From c28f7cb29f1d5521d9db273b73e0581f1d7c80a6 Mon Sep 17 00:00:00 2001 From: Lukas Date: Sun, 14 Nov 2021 21:01:54 +0100 Subject: [PATCH] Add golangci-lint (#502) Initial part of #435 --- .golangci.yml | 10 + .woodpecker/main.yml | 2 - Makefile | 10 +- go.mod | 1 + go.sum | 524 +- tools.go | 1 + vendor/4d63.com/gochecknoglobals/LICENSE | 21 + .../checknoglobals/check_no_globals.go | 154 + vendor/github.com/Antonboom/errname/LICENSE | 21 + .../errname/pkg/analyzer/analyzer.go | 134 + .../Antonboom/errname/pkg/analyzer/facts.go | 237 + vendor/github.com/BurntSushi/toml/.gitignore | 2 + vendor/github.com/BurntSushi/toml/COMPATIBLE | 1 + vendor/github.com/BurntSushi/toml/COPYING | 21 + vendor/github.com/BurntSushi/toml/README.md | 220 + vendor/github.com/BurntSushi/toml/decode.go | 511 + .../BurntSushi/toml/decode_go116.go | 18 + .../github.com/BurntSushi/toml/decode_meta.go | 123 + .../github.com/BurntSushi/toml/deprecated.go | 33 + vendor/github.com/BurntSushi/toml/doc.go | 13 + vendor/github.com/BurntSushi/toml/encode.go | 650 + vendor/github.com/BurntSushi/toml/go.mod | 3 + vendor/github.com/BurntSushi/toml/go.sum | 0 .../github.com/BurntSushi/toml/internal/tz.go | 36 + vendor/github.com/BurntSushi/toml/lex.go | 1225 + vendor/github.com/BurntSushi/toml/parse.go | 739 + .../github.com/BurntSushi/toml/type_check.go | 70 + .../github.com/BurntSushi/toml/type_fields.go | 242 + .../github.com/Djarvur/go-err113/.gitignore | 15 + .../Djarvur/go-err113/.golangci.yml | 150 + .../github.com/Djarvur/go-err113/.travis.yml | 24 + vendor/github.com/Djarvur/go-err113/LICENSE | 21 + .../github.com/Djarvur/go-err113/README.adoc | 75 + .../Djarvur/go-err113/comparison.go | 123 + .../Djarvur/go-err113/definition.go | 74 + vendor/github.com/Djarvur/go-err113/err113.go | 90 + vendor/github.com/Djarvur/go-err113/go.mod | 5 + vendor/github.com/Djarvur/go-err113/go.sum | 20 + .../github.com/Masterminds/semver/.travis.yml | 29 + .../Masterminds/semver/CHANGELOG.md | 109 + .../github.com/Masterminds/semver/LICENSE.txt | 19 + vendor/github.com/Masterminds/semver/Makefile | 36 + .../github.com/Masterminds/semver/README.md | 194 + .../Masterminds/semver/appveyor.yml | 44 + .../Masterminds/semver/collection.go | 24 + .../Masterminds/semver/constraints.go | 423 + vendor/github.com/Masterminds/semver/doc.go | 115 + .../github.com/Masterminds/semver/version.go | 425 + .../Masterminds/semver/version_fuzz.go | 10 + .../OpenPeeDeeP/depguard/.gitignore | 14 + .../github.com/OpenPeeDeeP/depguard/LICENSE | 674 + .../github.com/OpenPeeDeeP/depguard/README.md | 77 + .../OpenPeeDeeP/depguard/depguard.go | 241 + vendor/github.com/OpenPeeDeeP/depguard/go.mod | 9 + vendor/github.com/OpenPeeDeeP/depguard/go.sum | 6 + vendor/github.com/alexkohler/prealloc/LICENSE | 21 + .../alexkohler/prealloc/pkg/prealloc.go | 267 + .../github.com/ashanbrown/forbidigo/LICENSE | 13 + .../forbidigo/forbidigo/config_options.go | 45 + .../forbidigo/forbidigo/forbidigo.go | 193 + vendor/github.com/ashanbrown/makezero/LICENSE | 13 + .../ashanbrown/makezero/makezero/makezero.go | 200 + vendor/github.com/bkielbasa/cyclop/LICENSE | 21 + .../bkielbasa/cyclop/pkg/analyzer/analyzer.go | 104 + vendor/github.com/bombsimon/wsl/v3/.gitignore | 70 + .../github.com/bombsimon/wsl/v3/.travis.yml | 25 + vendor/github.com/bombsimon/wsl/v3/LICENSE | 21 + vendor/github.com/bombsimon/wsl/v3/README.md | 126 + vendor/github.com/bombsimon/wsl/v3/go.mod | 12 + vendor/github.com/bombsimon/wsl/v3/go.sum | 25 + vendor/github.com/bombsimon/wsl/v3/wsl.go | 1247 + .../charithe/durationcheck/.gitignore | 1 + .../github.com/charithe/durationcheck/LICENSE | 201 + .../charithe/durationcheck/Makefile | 5 + .../charithe/durationcheck/README.md | 48 + .../charithe/durationcheck/durationcheck.go | 188 + .../github.com/charithe/durationcheck/go.mod | 5 + .../github.com/charithe/durationcheck/go.sum | 26 + vendor/github.com/chavacava/garif/.gitignore | 3 + vendor/github.com/chavacava/garif/LICENSE | 21 + vendor/github.com/chavacava/garif/README.md | 52 + .../chavacava/garif/constructors.go | 338 + .../github.com/chavacava/garif/decorators.go | 94 + vendor/github.com/chavacava/garif/doc.go | 11 + vendor/github.com/chavacava/garif/go.mod | 5 + vendor/github.com/chavacava/garif/go.sum | 11 + vendor/github.com/chavacava/garif/io.go | 26 + vendor/github.com/chavacava/garif/models.go | 1486 + vendor/github.com/daixiang0/gci/LICENSE | 29 + .../github.com/daixiang0/gci/pkg/gci/gci.go | 383 + .../github.com/daixiang0/gci/pkg/gci/std.go | 161 + .../denis-tingajkin/go-header/.gitignore | 1 + .../denis-tingajkin/go-header/.go-header.yml | 19 + .../denis-tingajkin/go-header/LICENSE | 674 + .../denis-tingajkin/go-header/README.md | 81 + .../denis-tingajkin/go-header/analyzer.go | 146 + .../denis-tingajkin/go-header/config.go | 99 + .../denis-tingajkin/go-header/go.mod | 10 + .../denis-tingajkin/go-header/go.sum | 31 + .../denis-tingajkin/go-header/issue.go | 48 + .../denis-tingajkin/go-header/location.go | 35 + .../denis-tingajkin/go-header/option.go | 44 + .../denis-tingajkin/go-header/reader.go | 116 + .../denis-tingajkin/go-header/value.go | 128 + vendor/github.com/esimonov/ifshort/LICENSE | 21 + .../esimonov/ifshort/pkg/analyzer/analyzer.go | 247 + .../ifshort/pkg/analyzer/occurrences.go | 259 + vendor/github.com/ettle/strcase/.gitignore | 18 + vendor/github.com/ettle/strcase/.golangci.yml | 88 + vendor/github.com/ettle/strcase/.readme.tmpl | 80 + vendor/github.com/ettle/strcase/LICENSE | 21 + vendor/github.com/ettle/strcase/Makefile | 16 + vendor/github.com/ettle/strcase/README.md | 542 + vendor/github.com/ettle/strcase/caser.go | 87 + vendor/github.com/ettle/strcase/convert.go | 297 + vendor/github.com/ettle/strcase/doc.go | 155 + vendor/github.com/ettle/strcase/go.mod | 5 + vendor/github.com/ettle/strcase/go.sum | 11 + vendor/github.com/ettle/strcase/initialism.go | 43 + vendor/github.com/ettle/strcase/split.go | 164 + vendor/github.com/ettle/strcase/strcase.go | 81 + vendor/github.com/ettle/strcase/unicode.go | 48 + vendor/github.com/fatih/color/LICENSE.md | 20 + vendor/github.com/fatih/color/README.md | 178 + vendor/github.com/fatih/color/color.go | 618 + vendor/github.com/fatih/color/doc.go | 135 + vendor/github.com/fatih/color/go.mod | 8 + vendor/github.com/fatih/color/go.sum | 7 + vendor/github.com/fatih/structtag/LICENSE | 60 + vendor/github.com/fatih/structtag/README.md | 73 + vendor/github.com/fatih/structtag/go.mod | 3 + vendor/github.com/fatih/structtag/tags.go | 315 + .../fsnotify/fsnotify/.editorconfig | 12 + .../fsnotify/fsnotify/.gitattributes | 1 + .../github.com/fsnotify/fsnotify/.gitignore | 6 + .../github.com/fsnotify/fsnotify/.travis.yml | 36 + vendor/github.com/fsnotify/fsnotify/AUTHORS | 52 + .../github.com/fsnotify/fsnotify/CHANGELOG.md | 317 + .../fsnotify/fsnotify/CONTRIBUTING.md | 77 + vendor/github.com/fsnotify/fsnotify/LICENSE | 28 + vendor/github.com/fsnotify/fsnotify/README.md | 130 + vendor/github.com/fsnotify/fsnotify/fen.go | 37 + .../github.com/fsnotify/fsnotify/fsnotify.go | 68 + vendor/github.com/fsnotify/fsnotify/go.mod | 5 + vendor/github.com/fsnotify/fsnotify/go.sum | 2 + .../github.com/fsnotify/fsnotify/inotify.go | 337 + .../fsnotify/fsnotify/inotify_poller.go | 187 + vendor/github.com/fsnotify/fsnotify/kqueue.go | 521 + .../fsnotify/fsnotify/open_mode_bsd.go | 11 + .../fsnotify/fsnotify/open_mode_darwin.go | 12 + .../github.com/fsnotify/fsnotify/windows.go | 561 + vendor/github.com/fzipp/gocyclo/CHANGELOG.md | 38 + vendor/github.com/fzipp/gocyclo/CONTRIBUTORS | 7 + vendor/github.com/fzipp/gocyclo/LICENSE | 27 + vendor/github.com/fzipp/gocyclo/README.md | 107 + vendor/github.com/fzipp/gocyclo/analyze.go | 151 + vendor/github.com/fzipp/gocyclo/complexity.go | 48 + vendor/github.com/fzipp/gocyclo/directives.go | 39 + vendor/github.com/fzipp/gocyclo/go.mod | 3 + vendor/github.com/fzipp/gocyclo/stats.go | 73 + vendor/github.com/go-critic/go-critic/LICENSE | 22 + .../checkers/appendAssign_checker.go | 102 + .../checkers/appendCombine_checker.go | 102 + .../go-critic/checkers/argOrder_checker.go | 97 + .../go-critic/checkers/assignOp_checker.go | 102 + .../go-critic/checkers/badCall_checker.go | 63 + .../go-critic/checkers/badCond_checker.go | 147 + .../go-critic/checkers/badLock_checker.go | 116 + .../go-critic/checkers/badRegexp_checker.go | 445 + .../checkers/boolExprSimplify_checker.go | 346 + .../checkers/builtinShadowDecl_checker.go | 63 + .../checkers/builtinShadow_checker.go | 36 + .../go-critic/checkers/captLocal_checker.go | 49 + .../go-critic/checkers/caseOrder_checker.go | 88 + .../go-critic/go-critic/checkers/checkers.go | 19 + .../checkers/codegenComment_checker.go | 61 + .../checkers/commentFormatting_checker.go | 79 + .../checkers/commentedOutCode_checker.go | 157 + .../checkers/commentedOutImport_checker.go | 76 + .../checkers/defaultCaseOrder_checker.go | 65 + .../checkers/deferUnlambda_checker.go | 94 + .../checkers/deprecatedComment_checker.go | 149 + .../go-critic/checkers/docStub_checker.go | 95 + .../go-critic/checkers/dupArg_checker.go | 133 + .../checkers/dupBranchBody_checker.go | 58 + .../go-critic/checkers/dupCase_checker.go | 57 + .../go-critic/checkers/dupImports_checker.go | 63 + .../go-critic/checkers/dupSubExpr_checker.go | 102 + .../go-critic/checkers/elseif_checker.go | 71 + .../checkers/emptyFallthrough_checker.go | 70 + .../checkers/emptyStringTest_checker.go | 58 + .../go-critic/checkers/equalFold_checker.go | 87 + .../go-critic/checkers/evalOrder_checker.go | 87 + .../checkers/exitAfterDefer_checker.go | 84 + .../checkers/filepathJoin_checker.go | 50 + .../go-critic/checkers/flagDeref_checker.go | 65 + .../go-critic/checkers/flagName_checker.go | 88 + .../go-critic/checkers/hexLiteral_checker.go | 60 + .../go-critic/checkers/hugeParam_checker.go | 63 + .../go-critic/checkers/ifElseChain_checker.go | 99 + .../checkers/importShadow_checker.go | 47 + .../go-critic/checkers/indexAlloc_checker.go | 50 + .../go-critic/checkers/initClause_checker.go | 56 + .../internal/astwalk/comment_walker.go | 41 + .../internal/astwalk/doc_comment_walker.go | 48 + .../checkers/internal/astwalk/expr_walker.go | 31 + .../internal/astwalk/func_decl_walker.go | 23 + .../internal/astwalk/local_comment_walker.go | 32 + .../internal/astwalk/local_def_visitor.go | 51 + .../internal/astwalk/local_def_walker.go | 118 + .../internal/astwalk/local_expr_walker.go | 29 + .../internal/astwalk/stmt_list_walker.go | 33 + .../checkers/internal/astwalk/stmt_walker.go | 29 + .../internal/astwalk/type_expr_walker.go | 114 + .../checkers/internal/astwalk/visitor.go | 80 + .../checkers/internal/astwalk/walk_handler.go | 34 + .../checkers/internal/astwalk/walker.go | 57 + .../checkers/internal/lintutil/astfind.go | 27 + .../checkers/internal/lintutil/astflow.go | 86 + .../checkers/internal/lintutil/astset.go | 44 + .../checkers/internal/lintutil/zero_value.go | 94 + .../go-critic/checkers/mapKey_checker.go | 124 + .../checkers/methodExprCall_checker.go | 57 + .../checkers/nestingReduce_checker.go | 73 + .../go-critic/checkers/newDeref_checker.go | 45 + .../checkers/nilValReturn_checker.go | 71 + .../checkers/octalLiteral_checker.go | 82 + .../go-critic/checkers/offBy1_checker.go | 66 + .../checkers/paramTypeCombine_checker.go | 93 + .../checkers/ptrToRefParam_checker.go | 70 + .../checkers/rangeExprCopy_checker.go | 80 + .../checkers/rangeValCopy_checker.go | 75 + .../go-critic/checkers/regexpMust_checker.go | 47 + .../checkers/regexpPattern_checker.go | 68 + .../checkers/regexpSimplify_checker.go | 511 + .../go-critic/checkers/ruleguard_checker.go | 157 + .../checkers/singleCaseSwitch_checker.go | 84 + .../go-critic/checkers/sloppyLen_checker.go | 72 + .../checkers/sloppyReassign_checker.go | 80 + .../checkers/sloppyTypeAssert_checker.go | 75 + .../go-critic/checkers/sortSlice_checker.go | 135 + .../go-critic/checkers/sqlQuery_checker.go | 167 + .../checkers/stringXbytes_checker.go | 47 + .../go-critic/checkers/switchTrue_checker.go | 49 + .../checkers/tooManyResults_checker.go | 54 + .../go-critic/checkers/truncateCmp_checker.go | 117 + .../checkers/typeAssertChain_checker.go | 132 + .../checkers/typeDefFirst_checker.go | 88 + .../checkers/typeSwitchVar_checker.go | 97 + .../go-critic/checkers/typeUnparen_checker.go | 86 + .../go-critic/checkers/underef_checker.go | 127 + .../go-critic/checkers/unlabelStmt_checker.go | 170 + .../go-critic/checkers/unlambda_checker.go | 100 + .../checkers/unnamedResult_checker.go | 103 + .../checkers/unnecessaryBlock_checker.go | 69 + .../checkers/unnecessaryDefer_checker.go | 111 + .../go-critic/checkers/unslice_checker.go | 59 + .../go-critic/go-critic/checkers/utils.go | 309 + .../go-critic/checkers/valSwap_checker.go | 64 + .../go-critic/checkers/weakCond_checker.go | 77 + .../go-critic/checkers/whyNoLint_checker.go | 52 + .../go-critic/checkers/wrapperFunc_checker.go | 229 + .../checkers/yodaStyleExpr_checker.go | 66 + .../go-critic/framework/linter/checkers_db.go | 136 + .../go-critic/framework/linter/context.go | 35 + .../go-critic/framework/linter/lintpack.go | 269 + .../go-toolsmith/astcast/.travis.yml | 9 + .../github.com/go-toolsmith/astcast/LICENSE | 21 + .../github.com/go-toolsmith/astcast/README.md | 86 + .../go-toolsmith/astcast/astcast.go | 590 + vendor/github.com/go-toolsmith/astcast/go.mod | 6 + vendor/github.com/go-toolsmith/astcast/go.sum | 4 + .../go-toolsmith/astcopy/.travis.yml | 9 + .../github.com/go-toolsmith/astcopy/LICENSE | 21 + .../github.com/go-toolsmith/astcopy/README.md | 41 + .../go-toolsmith/astcopy/astcopy.go | 955 + vendor/github.com/go-toolsmith/astcopy/go.mod | 6 + vendor/github.com/go-toolsmith/astcopy/go.sum | 4 + .../go-toolsmith/astequal/.gitignore | 5 + .../go-toolsmith/astequal/.travis.yml | 9 + .../github.com/go-toolsmith/astequal/LICENSE | 21 + .../go-toolsmith/astequal/README.md | 67 + .../go-toolsmith/astequal/astequal.go | 734 + .../github.com/go-toolsmith/astequal/go.mod | 1 + .../go-toolsmith/astfmt/.travis.yml | 9 + vendor/github.com/go-toolsmith/astfmt/LICENSE | 21 + .../github.com/go-toolsmith/astfmt/README.md | 39 + .../github.com/go-toolsmith/astfmt/astfmt.go | 111 + vendor/github.com/go-toolsmith/astfmt/go.mod | 6 + vendor/github.com/go-toolsmith/astfmt/go.sum | 4 + .../github.com/go-toolsmith/astp/.gitignore | 4 + .../github.com/go-toolsmith/astp/.travis.yml | 9 + vendor/github.com/go-toolsmith/astp/LICENSE | 21 + vendor/github.com/go-toolsmith/astp/README.md | 39 + vendor/github.com/go-toolsmith/astp/decl.go | 39 + vendor/github.com/go-toolsmith/astp/expr.go | 141 + vendor/github.com/go-toolsmith/astp/go.mod | 6 + vendor/github.com/go-toolsmith/astp/go.sum | 4 + vendor/github.com/go-toolsmith/astp/stmt.go | 135 + .../go-toolsmith/strparse/.travis.yml | 9 + .../github.com/go-toolsmith/strparse/LICENSE | 21 + .../go-toolsmith/strparse/README.md | 34 + .../github.com/go-toolsmith/strparse/go.mod | 1 + .../go-toolsmith/strparse/strparse.go | 59 + .../github.com/go-toolsmith/typep/.travis.yml | 9 + vendor/github.com/go-toolsmith/typep/LICENSE | 21 + .../github.com/go-toolsmith/typep/README.md | 37 + vendor/github.com/go-toolsmith/typep/doc.go | 2 + vendor/github.com/go-toolsmith/typep/go.mod | 1 + .../go-toolsmith/typep/predicates.go | 36 + .../github.com/go-toolsmith/typep/safeExpr.go | 73 + .../go-toolsmith/typep/simplePredicates.go | 359 + vendor/github.com/go-xmlfmt/xmlfmt/LICENSE | 21 + vendor/github.com/go-xmlfmt/xmlfmt/README.md | 178 + vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go | 56 + vendor/github.com/gobwas/glob/.gitignore | 8 + vendor/github.com/gobwas/glob/.travis.yml | 9 + vendor/github.com/gobwas/glob/LICENSE | 21 + vendor/github.com/gobwas/glob/bench.sh | 26 + .../gobwas/glob/compiler/compiler.go | 525 + vendor/github.com/gobwas/glob/glob.go | 80 + vendor/github.com/gobwas/glob/match/any.go | 45 + vendor/github.com/gobwas/glob/match/any_of.go | 82 + vendor/github.com/gobwas/glob/match/btree.go | 146 + .../github.com/gobwas/glob/match/contains.go | 58 + .../github.com/gobwas/glob/match/every_of.go | 99 + vendor/github.com/gobwas/glob/match/list.go | 49 + vendor/github.com/gobwas/glob/match/match.go | 81 + vendor/github.com/gobwas/glob/match/max.go | 49 + vendor/github.com/gobwas/glob/match/min.go | 57 + .../github.com/gobwas/glob/match/nothing.go | 27 + vendor/github.com/gobwas/glob/match/prefix.go | 50 + .../gobwas/glob/match/prefix_any.go | 55 + .../gobwas/glob/match/prefix_suffix.go | 62 + vendor/github.com/gobwas/glob/match/range.go | 48 + vendor/github.com/gobwas/glob/match/row.go | 77 + .../github.com/gobwas/glob/match/segments.go | 91 + vendor/github.com/gobwas/glob/match/single.go | 43 + vendor/github.com/gobwas/glob/match/suffix.go | 35 + .../gobwas/glob/match/suffix_any.go | 43 + vendor/github.com/gobwas/glob/match/super.go | 33 + vendor/github.com/gobwas/glob/match/text.go | 45 + vendor/github.com/gobwas/glob/readme.md | 148 + .../github.com/gobwas/glob/syntax/ast/ast.go | 122 + .../gobwas/glob/syntax/ast/parser.go | 157 + .../gobwas/glob/syntax/lexer/lexer.go | 273 + .../gobwas/glob/syntax/lexer/token.go | 88 + .../github.com/gobwas/glob/syntax/syntax.go | 14 + .../gobwas/glob/util/runes/runes.go | 154 + .../gobwas/glob/util/strings/strings.go | 39 + vendor/github.com/gofrs/flock/.gitignore | 24 + vendor/github.com/gofrs/flock/.travis.yml | 10 + vendor/github.com/gofrs/flock/LICENSE | 27 + vendor/github.com/gofrs/flock/README.md | 41 + vendor/github.com/gofrs/flock/appveyor.yml | 25 + vendor/github.com/gofrs/flock/flock.go | 144 + vendor/github.com/gofrs/flock/flock_aix.go | 281 + vendor/github.com/gofrs/flock/flock_unix.go | 197 + vendor/github.com/gofrs/flock/flock_winapi.go | 76 + .../github.com/gofrs/flock/flock_windows.go | 142 + vendor/github.com/golangci/check/LICENSE | 674 + .../check/cmd/structcheck/structcheck.go | 193 + .../golangci/check/cmd/varcheck/varcheck.go | 163 + vendor/github.com/golangci/dupl/.travis.yml | 5 + vendor/github.com/golangci/dupl/LICENSE | 21 + vendor/github.com/golangci/dupl/README.md | 63 + .../github.com/golangci/dupl/job/buildtree.go | 22 + vendor/github.com/golangci/dupl/job/parse.go | 36 + vendor/github.com/golangci/dupl/main.go | 148 + .../github.com/golangci/dupl/printer/html.go | 120 + .../golangci/dupl/printer/plumbing.go | 50 + .../golangci/dupl/printer/printer.go | 11 + .../github.com/golangci/dupl/printer/text.go | 100 + .../golangci/dupl/suffixtree/dupl.go | 98 + .../golangci/dupl/suffixtree/suffixtree.go | 216 + .../golangci/dupl/syntax/golang/golang.go | 392 + .../github.com/golangci/dupl/syntax/syntax.go | 175 + vendor/github.com/golangci/go-misc/LICENSE | 27 + .../golangci/go-misc/deadcode/README.md | 18 + .../golangci/go-misc/deadcode/deadcode.go | 135 + .../github.com/golangci/gofmt/gofmt/LICENSE | 27 + vendor/github.com/golangci/gofmt/gofmt/doc.go | 104 + .../github.com/golangci/gofmt/gofmt/gofmt.go | 327 + .../golangci/gofmt/gofmt/golangci.go | 46 + .../golangci/gofmt/gofmt/internal.go | 176 + .../golangci/gofmt/gofmt/rewrite.go | 303 + .../golangci/gofmt/gofmt/simplify.go | 165 + .../golangci/gofmt/goimports/LICENSE | 27 + .../golangci/gofmt/goimports/goimports.go | 358 + .../golangci/gofmt/goimports/golangci.go | 33 + .../github.com/golangci/golangci-lint/LICENSE | 674 + .../golangci-lint/cmd/golangci-lint/main.go | 25 + .../cmd/golangci-lint/mod_version.go | 17 + .../golangci-lint/internal/cache/cache.go | 527 + .../golangci-lint/internal/cache/default.go | 87 + .../golangci-lint/internal/cache/hash.go | 186 + .../internal/errorutil/errors.go | 23 + .../internal/pkgcache/pkgcache.go | 229 + .../internal/renameio/renameio.go | 93 + .../internal/robustio/robustio.go | 53 + .../internal/robustio/robustio_darwin.go | 29 + .../internal/robustio/robustio_flaky.go | 93 + .../internal/robustio/robustio_other.go | 28 + .../internal/robustio/robustio_windows.go | 33 + .../golangci-lint/pkg/commands/cache.go | 84 + .../golangci-lint/pkg/commands/config.go | 66 + .../golangci-lint/pkg/commands/executor.go | 250 + .../golangci-lint/pkg/commands/help.go | 92 + .../golangci-lint/pkg/commands/linters.go | 51 + .../golangci-lint/pkg/commands/root.go | 165 + .../golangci-lint/pkg/commands/run.go | 566 + .../golangci-lint/pkg/commands/version.go | 60 + .../golangci-lint/pkg/config/config.go | 26 + .../golangci-lint/pkg/config/issues.go | 204 + .../golangci-lint/pkg/config/linters.go | 11 + .../pkg/config/linters_settings.go | 486 + .../pkg/config/linters_settings_gocritic.go | 365 + .../golangci-lint/pkg/config/output.go | 36 + .../golangci-lint/pkg/config/reader.go | 221 + .../golangci/golangci-lint/pkg/config/run.go | 37 + .../golangci-lint/pkg/config/severity.go | 18 + .../golangci-lint/pkg/exitcodes/exitcodes.go | 34 + .../golangci-lint/pkg/fsutils/filecache.go | 67 + .../golangci-lint/pkg/fsutils/fsutils.go | 100 + .../golangci-lint/pkg/fsutils/linecache.go | 70 + .../golangci-lint/pkg/golinters/asciicheck.go | 19 + .../golangci-lint/pkg/golinters/bodyclose.go | 21 + .../golangci-lint/pkg/golinters/cyclop.go | 39 + .../golangci-lint/pkg/golinters/deadcode.go | 52 + .../golangci-lint/pkg/golinters/depguard.go | 112 + .../golangci-lint/pkg/golinters/dogsled.go | 97 + .../golangci-lint/pkg/golinters/dupl.go | 83 + .../pkg/golinters/durationcheck.go | 15 + .../golangci-lint/pkg/golinters/errcheck.go | 251 + .../golangci-lint/pkg/golinters/errname.go | 21 + .../golangci-lint/pkg/golinters/errorlint.go | 31 + .../golangci-lint/pkg/golinters/exhaustive.go | 27 + .../pkg/golinters/exhaustivestruct.go | 31 + .../pkg/golinters/exportloopref.go | 19 + .../golangci-lint/pkg/golinters/forbidigo.go | 70 + .../pkg/golinters/forcetypeassert.go | 19 + .../golangci-lint/pkg/golinters/funlen.go | 64 + .../golangci-lint/pkg/golinters/gci.go | 96 + .../pkg/golinters/goanalysis/adapters.go | 36 + .../pkg/golinters/goanalysis/errors.go | 72 + .../pkg/golinters/goanalysis/issue.go | 31 + .../pkg/golinters/goanalysis/linter.go | 213 + .../pkg/golinters/goanalysis/load/guard.go | 30 + .../pkg/golinters/goanalysis/metalinter.go | 90 + .../pkg/golinters/goanalysis/runner.go | 342 + .../pkg/golinters/goanalysis/runner_action.go | 381 + .../pkg/golinters/goanalysis/runner_facts.go | 125 + .../goanalysis/runner_loadingpackage.go | 497 + .../pkg/golinters/goanalysis/runners.go | 269 + .../pkg/golinters/gochecknoglobals.go | 29 + .../pkg/golinters/gochecknoinits.go | 73 + .../golangci-lint/pkg/golinters/gocognit.go | 68 + .../golangci-lint/pkg/golinters/goconst.go | 92 + .../golangci-lint/pkg/golinters/gocritic.go | 188 + .../golangci-lint/pkg/golinters/gocyclo.go | 61 + .../golangci-lint/pkg/golinters/godot.go | 84 + .../golangci-lint/pkg/golinters/godox.go | 63 + .../golangci-lint/pkg/golinters/goerr113.go | 19 + .../golangci-lint/pkg/golinters/gofmt.go | 72 + .../pkg/golinters/gofmt_common.go | 286 + .../golangci-lint/pkg/golinters/gofumpt.go | 108 + .../golangci-lint/pkg/golinters/goheader.go | 85 + .../golangci-lint/pkg/golinters/goimports.go | 73 + .../golangci-lint/pkg/golinters/golint.go | 78 + .../golangci-lint/pkg/golinters/gomnd.go | 27 + .../pkg/golinters/gomoddirectives.go | 64 + .../golangci-lint/pkg/golinters/gomodguard.go | 99 + .../pkg/golinters/goprintffuncname.go | 17 + .../golangci-lint/pkg/golinters/gosec.go | 128 + .../golangci-lint/pkg/golinters/gosimple.go | 21 + .../golangci-lint/pkg/golinters/govet.go | 185 + .../golangci-lint/pkg/golinters/ifshort.go | 28 + .../golangci-lint/pkg/golinters/importas.go | 48 + .../pkg/golinters/ineffassign.go | 17 + .../golangci-lint/pkg/golinters/interfacer.go | 67 + .../golangci-lint/pkg/golinters/lll.go | 124 + .../golangci-lint/pkg/golinters/makezero.go | 60 + .../golangci-lint/pkg/golinters/maligned.go | 58 + .../golangci-lint/pkg/golinters/misspell.go | 132 + .../golangci-lint/pkg/golinters/nakedret.go | 123 + .../golangci-lint/pkg/golinters/nestif.go | 65 + .../golangci-lint/pkg/golinters/nilerr.go | 18 + .../golangci-lint/pkg/golinters/nlreturn.go | 19 + .../golangci-lint/pkg/golinters/noctx.go | 21 + .../golangci-lint/pkg/golinters/nolintlint.go | 93 + .../pkg/golinters/nolintlint/README.md | 31 + .../pkg/golinters/nolintlint/nolintlint.go | 305 + .../pkg/golinters/paralleltest.go | 21 + .../golangci-lint/pkg/golinters/prealloc.go | 57 + .../pkg/golinters/predeclared.go | 26 + .../golangci-lint/pkg/golinters/promlinter.go | 63 + .../golangci-lint/pkg/golinters/revive.go | 295 + .../pkg/golinters/rowerrcheck.go | 23 + .../golangci-lint/pkg/golinters/scopelint.go | 177 + .../pkg/golinters/sqlclosecheck.go | 21 + .../pkg/golinters/staticcheck.go | 21 + .../pkg/golinters/staticcheck_common.go | 168 + .../pkg/golinters/structcheck.go | 55 + .../golangci-lint/pkg/golinters/stylecheck.go | 30 + .../pkg/golinters/tagliatelle.go | 30 + .../pkg/golinters/testpackage.go | 23 + .../golangci-lint/pkg/golinters/thelper.go | 61 + .../golangci-lint/pkg/golinters/tparallel.go | 21 + .../golangci-lint/pkg/golinters/typecheck.go | 28 + .../golangci-lint/pkg/golinters/unconvert.go | 53 + .../golangci-lint/pkg/golinters/unparam.go | 76 + .../golangci-lint/pkg/golinters/unused.go | 69 + .../golangci-lint/pkg/golinters/util.go | 24 + .../golangci-lint/pkg/golinters/varcheck.go | 55 + .../pkg/golinters/wastedassign.go | 21 + .../golangci-lint/pkg/golinters/whitespace.go | 85 + .../golangci-lint/pkg/golinters/wrapcheck.go | 32 + .../golangci-lint/pkg/golinters/wsl.go | 83 + .../golangci/golangci-lint/pkg/goutil/env.go | 61 + .../golangci-lint/pkg/lint/linter/config.go | 127 + .../golangci-lint/pkg/lint/linter/context.go | 49 + .../golangci-lint/pkg/lint/linter/linter.go | 13 + .../pkg/lint/lintersdb/enabled_set.go | 207 + .../pkg/lint/lintersdb/manager.go | 618 + .../pkg/lint/lintersdb/validator.go | 107 + .../golangci/golangci-lint/pkg/lint/load.go | 315 + .../golangci/golangci-lint/pkg/lint/runner.go | 329 + .../golangci-lint/pkg/logutils/log.go | 31 + .../golangci-lint/pkg/logutils/logutils.go | 49 + .../golangci-lint/pkg/logutils/mock.go | 47 + .../golangci-lint/pkg/logutils/out.go | 9 + .../golangci-lint/pkg/logutils/stderr_log.go | 121 + .../golangci-lint/pkg/packages/errors.go | 39 + .../golangci-lint/pkg/packages/skip.go | 25 + .../golangci-lint/pkg/packages/util.go | 102 + .../golangci-lint/pkg/printers/checkstyle.go | 87 + .../golangci-lint/pkg/printers/codeclimate.go | 57 + .../golangci-lint/pkg/printers/github.go | 46 + .../golangci-lint/pkg/printers/html.go | 155 + .../golangci-lint/pkg/printers/json.go | 41 + .../golangci-lint/pkg/printers/junitxml.go | 79 + .../golangci-lint/pkg/printers/printer.go | 11 + .../golangci-lint/pkg/printers/tab.go | 58 + .../golangci-lint/pkg/printers/text.go | 91 + .../golangci/golangci-lint/pkg/report/data.go | 26 + .../golangci/golangci-lint/pkg/report/log.go | 64 + .../golangci-lint/pkg/result/issue.go | 98 + .../processors/autogenerated_exclude.go | 134 + .../pkg/result/processors/base_rule.go | 69 + .../pkg/result/processors/cgo.go | 58 + .../pkg/result/processors/diff.go | 74 + .../pkg/result/processors/exclude.go | 59 + .../pkg/result/processors/exclude_rules.go | 90 + .../result/processors/filename_unadjuster.go | 131 + .../pkg/result/processors/fixer.go | 248 + .../result/processors/identifier_marker.go | 125 + .../pkg/result/processors/max_from_linter.go | 54 + .../processors/max_per_file_from_linter.go | 59 + .../pkg/result/processors/max_same_issues.go | 81 + .../pkg/result/processors/nolint.go | 309 + .../pkg/result/processors/path_prefixer.go | 37 + .../pkg/result/processors/path_prettifier.go | 48 + .../pkg/result/processors/path_shortener.go | 40 + .../pkg/result/processors/processor.go | 11 + .../pkg/result/processors/severity_rules.go | 103 + .../pkg/result/processors/skip_dirs.go | 143 + .../pkg/result/processors/skip_files.go | 52 + .../pkg/result/processors/sort_results.go | 173 + .../pkg/result/processors/source_code.go | 47 + .../pkg/result/processors/uniq_by_line.go | 58 + .../pkg/result/processors/utils.go | 62 + .../golangci-lint/pkg/sliceutil/sliceutil.go | 17 + .../golangci-lint/pkg/timeutils/stopwatch.go | 116 + vendor/github.com/golangci/lint-1/.travis.yml | 19 + .../golangci/lint-1/CONTRIBUTING.md | 15 + vendor/github.com/golangci/lint-1/LICENSE | 27 + vendor/github.com/golangci/lint-1/README.md | 88 + vendor/github.com/golangci/lint-1/go.mod | 3 + vendor/github.com/golangci/lint-1/go.sum | 6 + vendor/github.com/golangci/lint-1/lint.go | 1655 + vendor/github.com/golangci/maligned/LICENSE | 27 + vendor/github.com/golangci/maligned/README | 7 + .../github.com/golangci/maligned/maligned.go | 253 + .../github.com/golangci/misspell/.gitignore | 34 + .../github.com/golangci/misspell/.travis.yml | 20 + .../github.com/golangci/misspell/Dockerfile | 37 + .../github.com/golangci/misspell/Gopkg.lock | 24 + .../github.com/golangci/misspell/Gopkg.toml | 34 + vendor/github.com/golangci/misspell/LICENSE | 22 + vendor/github.com/golangci/misspell/Makefile | 74 + vendor/github.com/golangci/misspell/README.md | 424 + .../golangci/misspell/RELEASE-HOWTO.md | 38 + vendor/github.com/golangci/misspell/ascii.go | 62 + vendor/github.com/golangci/misspell/case.go | 59 + .../golangci/misspell/goreleaser.yml | 38 + .../golangci/misspell/install-misspell.sh | 362 + vendor/github.com/golangci/misspell/legal.go | 48 + vendor/github.com/golangci/misspell/mime.go | 210 + .../github.com/golangci/misspell/notwords.go | 85 + .../github.com/golangci/misspell/replace.go | 246 + .../golangci/misspell/stringreplacer.go | 336 + .../golangci/misspell/stringreplacer_test.gox | 421 + vendor/github.com/golangci/misspell/url.go | 17 + vendor/github.com/golangci/misspell/words.go | 31194 ++++++++++++++++ vendor/github.com/golangci/revgrep/.gitignore | 1 + .../github.com/golangci/revgrep/.travis.yml | 7 + vendor/github.com/golangci/revgrep/LICENSE | 201 + vendor/github.com/golangci/revgrep/README.md | 58 + vendor/github.com/golangci/revgrep/go.mod | 3 + vendor/github.com/golangci/revgrep/go.sum | 0 vendor/github.com/golangci/revgrep/revgrep.go | 410 + vendor/github.com/golangci/unconvert/LICENSE | 27 + vendor/github.com/golangci/unconvert/README | 36 + .../golangci/unconvert/unconvert.go | 665 + vendor/github.com/google/go-cmp/LICENSE | 27 + .../github.com/google/go-cmp/cmp/compare.go | 682 + .../google/go-cmp/cmp/export_panic.go | 15 + .../google/go-cmp/cmp/export_unsafe.go | 35 + .../go-cmp/cmp/internal/diff/debug_disable.go | 17 + .../go-cmp/cmp/internal/diff/debug_enable.go | 122 + .../google/go-cmp/cmp/internal/diff/diff.go | 398 + .../google/go-cmp/cmp/internal/flags/flags.go | 9 + .../cmp/internal/flags/toolchain_legacy.go | 10 + .../cmp/internal/flags/toolchain_recent.go | 10 + .../go-cmp/cmp/internal/function/func.go | 99 + .../google/go-cmp/cmp/internal/value/name.go | 157 + .../cmp/internal/value/pointer_purego.go | 33 + .../cmp/internal/value/pointer_unsafe.go | 36 + .../google/go-cmp/cmp/internal/value/sort.go | 106 + .../google/go-cmp/cmp/internal/value/zero.go | 48 + .../github.com/google/go-cmp/cmp/options.go | 552 + vendor/github.com/google/go-cmp/cmp/path.go | 378 + vendor/github.com/google/go-cmp/cmp/report.go | 54 + .../google/go-cmp/cmp/report_compare.go | 432 + .../google/go-cmp/cmp/report_references.go | 264 + .../google/go-cmp/cmp/report_reflect.go | 402 + .../google/go-cmp/cmp/report_slices.go | 613 + .../google/go-cmp/cmp/report_text.go | 431 + .../google/go-cmp/cmp/report_value.go | 121 + .../gordonklaus/ineffassign/LICENSE | 21 + .../pkg/ineffassign/ineffassign.go | 591 + .../gostaticanalysis/analysisutil/LICENSE | 21 + .../gostaticanalysis/analysisutil/README.md | 5 + .../gostaticanalysis/analysisutil/call.go | 405 + .../analysisutil/diagnostic.go | 45 + .../gostaticanalysis/analysisutil/file.go | 18 + .../gostaticanalysis/analysisutil/go.mod | 8 + .../gostaticanalysis/analysisutil/go.sum | 37 + .../gostaticanalysis/analysisutil/pkg.go | 49 + .../gostaticanalysis/analysisutil/ssa.go | 146 + .../analysisutil/ssainspect.go | 37 + .../gostaticanalysis/analysisutil/types.go | 208 + .../gostaticanalysis/comment/LICENSE | 21 + .../gostaticanalysis/comment/README.md | 10 + .../gostaticanalysis/comment/comment.go | 147 + .../gostaticanalysis/comment/go.mod | 8 + .../gostaticanalysis/comment/go.sum | 24 + .../comment/passes/commentmap/commentmap.go | 20 + .../forcetypeassert/.reviewdog.yml | 8 + .../gostaticanalysis/forcetypeassert/LICENSE | 21 + .../forcetypeassert/README.md | 17 + .../forcetypeassert/forcetypeassert.go | 67 + .../gostaticanalysis/forcetypeassert/go.mod | 5 + .../gostaticanalysis/forcetypeassert/go.sum | 6 + .../gostaticanalysis/nilerr/LICENSE | 21 + .../gostaticanalysis/nilerr/README.md | 41 + .../github.com/gostaticanalysis/nilerr/go.mod | 8 + .../github.com/gostaticanalysis/nilerr/go.sum | 34 + .../gostaticanalysis/nilerr/nilerr.go | 291 + vendor/github.com/hashicorp/errwrap/LICENSE | 354 + vendor/github.com/hashicorp/errwrap/README.md | 89 + .../github.com/hashicorp/errwrap/errwrap.go | 169 + vendor/github.com/hashicorp/errwrap/go.mod | 1 + .../hashicorp/go-multierror/LICENSE | 353 + .../hashicorp/go-multierror/Makefile | 31 + .../hashicorp/go-multierror/README.md | 150 + .../hashicorp/go-multierror/append.go | 43 + .../hashicorp/go-multierror/flatten.go | 26 + .../hashicorp/go-multierror/format.go | 27 + .../github.com/hashicorp/go-multierror/go.mod | 5 + .../github.com/hashicorp/go-multierror/go.sum | 2 + .../hashicorp/go-multierror/group.go | 38 + .../hashicorp/go-multierror/multierror.go | 121 + .../hashicorp/go-multierror/prefix.go | 37 + .../hashicorp/go-multierror/sort.go | 16 + vendor/github.com/hashicorp/hcl/.gitignore | 9 + vendor/github.com/hashicorp/hcl/.travis.yml | 13 + vendor/github.com/hashicorp/hcl/LICENSE | 354 + vendor/github.com/hashicorp/hcl/Makefile | 18 + vendor/github.com/hashicorp/hcl/README.md | 125 + vendor/github.com/hashicorp/hcl/appveyor.yml | 19 + vendor/github.com/hashicorp/hcl/decoder.go | 729 + vendor/github.com/hashicorp/hcl/go.mod | 3 + vendor/github.com/hashicorp/hcl/go.sum | 2 + vendor/github.com/hashicorp/hcl/hcl.go | 11 + .../github.com/hashicorp/hcl/hcl/ast/ast.go | 219 + .../github.com/hashicorp/hcl/hcl/ast/walk.go | 52 + .../hashicorp/hcl/hcl/parser/error.go | 17 + .../hashicorp/hcl/hcl/parser/parser.go | 532 + .../hashicorp/hcl/hcl/printer/nodes.go | 789 + .../hashicorp/hcl/hcl/printer/printer.go | 66 + .../hashicorp/hcl/hcl/scanner/scanner.go | 652 + .../hashicorp/hcl/hcl/strconv/quote.go | 241 + .../hashicorp/hcl/hcl/token/position.go | 46 + .../hashicorp/hcl/hcl/token/token.go | 219 + .../hashicorp/hcl/json/parser/flatten.go | 117 + .../hashicorp/hcl/json/parser/parser.go | 313 + .../hashicorp/hcl/json/scanner/scanner.go | 451 + .../hashicorp/hcl/json/token/position.go | 46 + .../hashicorp/hcl/json/token/token.go | 118 + vendor/github.com/hashicorp/hcl/lex.go | 38 + vendor/github.com/hashicorp/hcl/parse.go | 39 + .../inconshreveable/mousetrap/LICENSE | 13 + .../inconshreveable/mousetrap/README.md | 23 + .../inconshreveable/mousetrap/trap_others.go | 15 + .../inconshreveable/mousetrap/trap_windows.go | 98 + .../mousetrap/trap_windows_1.4.go | 46 + vendor/github.com/jgautheron/goconst/LICENSE | 21 + .../github.com/jgautheron/goconst/README.md | 50 + vendor/github.com/jgautheron/goconst/api.go | 74 + vendor/github.com/jgautheron/goconst/go.mod | 3 + .../github.com/jgautheron/goconst/parser.go | 176 + .../github.com/jgautheron/goconst/visitor.go | 160 + .../github.com/jingyugao/rowserrcheck/LICENSE | 21 + .../rowserrcheck/passes/rowserr/rowserr.go | 331 + .../jirfag/go-printf-func-name/LICENSE | 21 + .../pkg/analyzer/analyzer.go | 74 + vendor/github.com/julz/importas/.gitignore | 2 + vendor/github.com/julz/importas/LICENSE | 201 + vendor/github.com/julz/importas/README.md | 47 + vendor/github.com/julz/importas/analyzer.go | 116 + vendor/github.com/julz/importas/config.go | 69 + vendor/github.com/julz/importas/flags.go | 31 + vendor/github.com/julz/importas/go.mod | 5 + vendor/github.com/julz/importas/go.sum | 26 + vendor/github.com/kisielk/errcheck/LICENSE | 22 + .../errcheck/errcheck/embedded_walker.go | 144 + .../kisielk/errcheck/errcheck/errcheck.go | 676 + .../kisielk/errcheck/errcheck/tags.go | 12 + .../kisielk/errcheck/errcheck/tags_compat.go | 13 + vendor/github.com/kisielk/gotool/.travis.yml | 23 + vendor/github.com/kisielk/gotool/LEGAL | 32 + vendor/github.com/kisielk/gotool/LICENSE | 20 + vendor/github.com/kisielk/gotool/README.md | 6 + vendor/github.com/kisielk/gotool/go.mod | 1 + vendor/github.com/kisielk/gotool/go13.go | 15 + vendor/github.com/kisielk/gotool/go14-15.go | 15 + vendor/github.com/kisielk/gotool/go16-18.go | 15 + .../kisielk/gotool/internal/load/path.go | 27 + .../kisielk/gotool/internal/load/pkg.go | 25 + .../kisielk/gotool/internal/load/search.go | 354 + vendor/github.com/kisielk/gotool/match.go | 56 + vendor/github.com/kisielk/gotool/match18.go | 317 + vendor/github.com/kisielk/gotool/tool.go | 48 + vendor/github.com/kulti/thelper/LICENSE | 21 + .../kulti/thelper/pkg/analyzer/analyzer.go | 416 + .../kulti/thelper/pkg/analyzer/report.go | 56 + .../kunwardeep/paralleltest/LICENSE | 21 + .../pkg/paralleltest/paralleltest.go | 256 + .../kyoh86/exportloopref/.golangci.yml | 4 + .../kyoh86/exportloopref/.goreleaser.yml | 43 + .../github.com/kyoh86/exportloopref/LICENSE | 21 + .../github.com/kyoh86/exportloopref/Makefile | 16 + .../github.com/kyoh86/exportloopref/README.md | 221 + .../kyoh86/exportloopref/exportloopref.go | 305 + vendor/github.com/kyoh86/exportloopref/go.mod | 5 + vendor/github.com/kyoh86/exportloopref/go.sum | 20 + .../ldez/gomoddirectives/.gitignore | 2 + .../ldez/gomoddirectives/.golangci.yml | 87 + .../github.com/ldez/gomoddirectives/LICENSE | 190 + .../github.com/ldez/gomoddirectives/Makefile | 15 + vendor/github.com/ldez/gomoddirectives/go.mod | 8 + vendor/github.com/ldez/gomoddirectives/go.sum | 25 + .../ldez/gomoddirectives/gomoddirectives.go | 125 + .../github.com/ldez/gomoddirectives/module.go | 47 + .../github.com/ldez/gomoddirectives/readme.md | 16 + vendor/github.com/ldez/tagliatelle/.gitignore | 3 + .../github.com/ldez/tagliatelle/.golangci.yml | 77 + vendor/github.com/ldez/tagliatelle/LICENSE | 190 + vendor/github.com/ldez/tagliatelle/Makefile | 15 + vendor/github.com/ldez/tagliatelle/go.mod | 8 + vendor/github.com/ldez/tagliatelle/go.sum | 38 + vendor/github.com/ldez/tagliatelle/readme.md | 31 + .../ldez/tagliatelle/tagliatelle.go | 192 + .../magiconair/properties/.gitignore | 6 + .../magiconair/properties/.travis.yml | 17 + .../magiconair/properties/CHANGELOG.md | 160 + .../magiconair/properties/LICENSE.md | 24 + .../magiconair/properties/README.md | 128 + .../magiconair/properties/decode.go | 289 + .../github.com/magiconair/properties/doc.go | 156 + .../github.com/magiconair/properties/go.mod | 3 + .../magiconair/properties/integrate.go | 34 + .../github.com/magiconair/properties/lex.go | 407 + .../github.com/magiconair/properties/load.go | 293 + .../magiconair/properties/parser.go | 95 + .../magiconair/properties/properties.go | 854 + .../magiconair/properties/rangecheck.go | 31 + .../github.com/maratori/testpackage/LICENSE | 21 + .../pkg/testpackage/testpackage.go | 53 + vendor/github.com/matoous/godox/.gitignore | 19 + vendor/github.com/matoous/godox/.golangci.yml | 71 + vendor/github.com/matoous/godox/.revive.toml | 135 + vendor/github.com/matoous/godox/LICENSE | 21 + vendor/github.com/matoous/godox/README.md | 23 + vendor/github.com/matoous/godox/go.mod | 5 + vendor/github.com/matoous/godox/go.sum | 8 + vendor/github.com/matoous/godox/godox.go | 84 + .../github.com/mattn/go-colorable/.travis.yml | 15 + vendor/github.com/mattn/go-colorable/LICENSE | 21 + .../github.com/mattn/go-colorable/README.md | 48 + .../mattn/go-colorable/colorable_appengine.go | 37 + .../mattn/go-colorable/colorable_others.go | 38 + .../mattn/go-colorable/colorable_windows.go | 1043 + vendor/github.com/mattn/go-colorable/go.mod | 8 + vendor/github.com/mattn/go-colorable/go.sum | 5 + .../github.com/mattn/go-colorable/go.test.sh | 12 + .../mattn/go-colorable/noncolorable.go | 55 + .../github.com/mattn/go-runewidth/.travis.yml | 16 + vendor/github.com/mattn/go-runewidth/LICENSE | 21 + .../github.com/mattn/go-runewidth/README.md | 27 + vendor/github.com/mattn/go-runewidth/go.mod | 3 + .../github.com/mattn/go-runewidth/go.test.sh | 12 + .../mattn/go-runewidth/runewidth.go | 257 + .../mattn/go-runewidth/runewidth_appengine.go | 8 + .../mattn/go-runewidth/runewidth_js.go | 9 + .../mattn/go-runewidth/runewidth_posix.go | 82 + .../mattn/go-runewidth/runewidth_table.go | 437 + .../mattn/go-runewidth/runewidth_windows.go | 28 + .../mbilski/exhaustivestruct/LICENSE | 21 + .../exhaustivestruct/pkg/analyzer/analyzer.go | 187 + vendor/github.com/mgechev/dots/.travis.yml | 2 + vendor/github.com/mgechev/dots/LICENSE | 21 + vendor/github.com/mgechev/dots/README.md | 100 + vendor/github.com/mgechev/dots/resolve.go | 456 + vendor/github.com/mgechev/revive/LICENSE | 21 + .../mgechev/revive/config/config.go | 226 + .../mgechev/revive/formatter/checkstyle.go | 76 + .../mgechev/revive/formatter/default.go | 26 + .../mgechev/revive/formatter/friendly.go | 149 + .../mgechev/revive/formatter/json.go | 40 + .../mgechev/revive/formatter/ndjson.go | 34 + .../mgechev/revive/formatter/plain.go | 26 + .../mgechev/revive/formatter/sarif.go | 107 + .../mgechev/revive/formatter/severity.go | 13 + .../mgechev/revive/formatter/stylish.go | 89 + .../mgechev/revive/formatter/unix.go | 27 + .../github.com/mgechev/revive/lint/config.go | 35 + .../github.com/mgechev/revive/lint/failure.go | 39 + vendor/github.com/mgechev/revive/lint/file.go | 278 + .../mgechev/revive/lint/formatter.go | 14 + .../github.com/mgechev/revive/lint/linter.go | 99 + .../github.com/mgechev/revive/lint/package.go | 178 + vendor/github.com/mgechev/revive/lint/rule.go | 31 + .../github.com/mgechev/revive/lint/utils.go | 128 + .../mgechev/revive/rule/add-constant.go | 152 + .../mgechev/revive/rule/argument-limit.go | 65 + .../github.com/mgechev/revive/rule/atomic.go | 94 + .../mgechev/revive/rule/bare-return.go | 84 + .../mgechev/revive/rule/blank-imports.go | 75 + .../revive/rule/bool-literal-in-expr.go | 73 + .../mgechev/revive/rule/call-to-gc.go | 70 + .../revive/rule/cognitive-complexity.go | 193 + .../mgechev/revive/rule/confusing-naming.go | 190 + .../mgechev/revive/rule/confusing-results.go | 67 + .../revive/rule/constant-logical-expr.go | 88 + .../revive/rule/context-as-argument.go | 63 + .../mgechev/revive/rule/context-keys-type.go | 81 + .../mgechev/revive/rule/cyclomatic.go | 117 + .../mgechev/revive/rule/deep-exit.go | 94 + .../github.com/mgechev/revive/rule/defer.go | 137 + .../mgechev/revive/rule/dot-imports.go | 54 + .../mgechev/revive/rule/duplicated-imports.go | 39 + .../mgechev/revive/rule/early-return.go | 78 + .../mgechev/revive/rule/empty-block.go | 65 + .../mgechev/revive/rule/empty-lines.go | 113 + .../mgechev/revive/rule/error-naming.go | 79 + .../mgechev/revive/rule/error-return.go | 67 + .../mgechev/revive/rule/error-strings.go | 98 + .../github.com/mgechev/revive/rule/errorf.go | 93 + .../mgechev/revive/rule/exported.go | 315 + .../mgechev/revive/rule/file-header.go | 67 + .../mgechev/revive/rule/flag-param.go | 104 + .../mgechev/revive/rule/function-length.go | 153 + .../revive/rule/function-result-limit.go | 66 + .../mgechev/revive/rule/get-return.go | 70 + .../mgechev/revive/rule/identical-branches.go | 82 + .../mgechev/revive/rule/if-return.go | 115 + .../mgechev/revive/rule/import-shadowing.go | 108 + .../mgechev/revive/rule/imports-blacklist.go | 52 + .../revive/rule/increment-decrement.go | 74 + .../mgechev/revive/rule/indent-error-flow.go | 78 + .../mgechev/revive/rule/line-length-limit.go | 82 + .../mgechev/revive/rule/max-public-structs.go | 69 + .../mgechev/revive/rule/modifies-param.go | 80 + .../revive/rule/modifies-value-receiver.go | 134 + .../mgechev/revive/rule/nested-structs.go | 61 + .../mgechev/revive/rule/package-comments.go | 121 + .../mgechev/revive/rule/range-val-address.go | 144 + .../revive/rule/range-val-in-closure.go | 111 + .../github.com/mgechev/revive/rule/range.go | 82 + .../mgechev/revive/rule/receiver-naming.go | 81 + .../revive/rule/redefines-builtin-id.go | 145 + .../mgechev/revive/rule/string-format.go | 282 + .../mgechev/revive/rule/string-of-int.go | 95 + .../mgechev/revive/rule/struct-tag.go | 236 + .../mgechev/revive/rule/superfluous-else.go | 114 + .../mgechev/revive/rule/time-naming.go | 93 + .../revive/rule/unconditional-recursion.go | 183 + .../mgechev/revive/rule/unexported-naming.go | 115 + .../mgechev/revive/rule/unexported-return.go | 106 + .../mgechev/revive/rule/unhandled-error.go | 120 + .../mgechev/revive/rule/unnecessary-stmt.go | 107 + .../mgechev/revive/rule/unreachable-code.go | 114 + .../mgechev/revive/rule/unused-param.go | 102 + .../mgechev/revive/rule/unused-receiver.go | 77 + .../mgechev/revive/rule/useless-break.go | 82 + .../github.com/mgechev/revive/rule/utils.go | 199 + .../mgechev/revive/rule/var-declarations.go | 120 + .../mgechev/revive/rule/var-naming.go | 230 + .../mgechev/revive/rule/waitgroup-by-value.go | 66 + .../github.com/mitchellh/go-homedir/LICENSE | 21 + .../github.com/mitchellh/go-homedir/README.md | 14 + vendor/github.com/mitchellh/go-homedir/go.mod | 1 + .../mitchellh/go-homedir/homedir.go | 167 + .../mitchellh/mapstructure/CHANGELOG.md | 73 + .../github.com/mitchellh/mapstructure/LICENSE | 21 + .../mitchellh/mapstructure/README.md | 46 + .../mitchellh/mapstructure/decode_hooks.go | 256 + .../mitchellh/mapstructure/error.go | 50 + .../github.com/mitchellh/mapstructure/go.mod | 3 + .../mitchellh/mapstructure/mapstructure.go | 1462 + .../github.com/moricho/tparallel/.gitignore | 3 + .../moricho/tparallel/.goreleaser.yml | 38 + vendor/github.com/moricho/tparallel/LICENSE | 21 + vendor/github.com/moricho/tparallel/Makefile | 13 + vendor/github.com/moricho/tparallel/README.md | 100 + vendor/github.com/moricho/tparallel/go.mod | 8 + vendor/github.com/moricho/tparallel/go.sum | 34 + .../moricho/tparallel/pkg/ssafunc/ssafunc.go | 34 + .../tparallel/pkg/ssainstr/ssainstr.go | 63 + .../github.com/moricho/tparallel/testmap.go | 63 + .../github.com/moricho/tparallel/tparallel.go | 72 + vendor/github.com/nakabonne/nestif/.gitignore | 16 + vendor/github.com/nakabonne/nestif/LICENSE | 25 + vendor/github.com/nakabonne/nestif/README.md | 122 + vendor/github.com/nakabonne/nestif/go.mod | 8 + vendor/github.com/nakabonne/nestif/go.sum | 12 + vendor/github.com/nakabonne/nestif/nestif.go | 148 + .../github.com/nbutton23/zxcvbn-go/.gitignore | 2 + .../nbutton23/zxcvbn-go/LICENSE.txt | 20 + .../github.com/nbutton23/zxcvbn-go/Makefile | 15 + .../github.com/nbutton23/zxcvbn-go/README.md | 78 + .../zxcvbn-go/adjacency/adjcmartix.go | 108 + .../nbutton23/zxcvbn-go/data/bindata.go | 444 + .../zxcvbn-go/entropy/entropyCalculator.go | 216 + .../zxcvbn-go/frequency/frequency.go | 50 + vendor/github.com/nbutton23/zxcvbn-go/go.mod | 9 + vendor/github.com/nbutton23/zxcvbn-go/go.sum | 5 + .../nbutton23/zxcvbn-go/match/match.go | 44 + .../zxcvbn-go/matching/dateMatchers.go | 209 + .../zxcvbn-go/matching/dictionaryMatch.go | 57 + .../nbutton23/zxcvbn-go/matching/leet.go | 234 + .../nbutton23/zxcvbn-go/matching/matching.go | 82 + .../zxcvbn-go/matching/repeatMatch.go | 67 + .../zxcvbn-go/matching/sequenceMatch.go | 76 + .../zxcvbn-go/matching/spatialMatch.go | 88 + .../nbutton23/zxcvbn-go/scoring/scoring.go | 177 + .../zxcvbn-go/utils/math/mathutils.go | 40 + .../github.com/nbutton23/zxcvbn-go/zxcvbn.go | 22 + .../nishanths/exhaustive/.gitignore | 7 + .../nishanths/exhaustive/.travis.yml | 12 + .../github.com/nishanths/exhaustive/LICENSE | 25 + .../github.com/nishanths/exhaustive/README.md | 70 + .../github.com/nishanths/exhaustive/enum.go | 146 + .../nishanths/exhaustive/exhaustive.go | 207 + .../nishanths/exhaustive/generated.go | 34 + vendor/github.com/nishanths/exhaustive/go.mod | 8 + vendor/github.com/nishanths/exhaustive/go.sum | 28 + .../nishanths/exhaustive/regexp_flag.go | 35 + .../github.com/nishanths/exhaustive/switch.go | 444 + .../github.com/nishanths/predeclared/LICENSE | 29 + .../predeclared/passes/predeclared/go18.go | 9 + .../passes/predeclared/pre_go18.go | 53 + .../passes/predeclared/predeclared.go | 202 + .../olekukonko/tablewriter/.gitignore | 15 + .../olekukonko/tablewriter/.travis.yml | 22 + .../olekukonko/tablewriter/LICENSE.md | 19 + .../olekukonko/tablewriter/README.md | 431 + .../github.com/olekukonko/tablewriter/csv.go | 52 + .../github.com/olekukonko/tablewriter/go.mod | 5 + .../github.com/olekukonko/tablewriter/go.sum | 2 + .../olekukonko/tablewriter/table.go | 967 + .../tablewriter/table_with_color.go | 136 + .../github.com/olekukonko/tablewriter/util.go | 93 + .../github.com/olekukonko/tablewriter/wrap.go | 99 + .../pelletier/go-toml/.dockerignore | 2 + .../github.com/pelletier/go-toml/.gitignore | 5 + .../pelletier/go-toml/CONTRIBUTING.md | 132 + .../github.com/pelletier/go-toml/Dockerfile | 11 + vendor/github.com/pelletier/go-toml/LICENSE | 247 + vendor/github.com/pelletier/go-toml/Makefile | 29 + .../go-toml/PULL_REQUEST_TEMPLATE.md | 5 + vendor/github.com/pelletier/go-toml/README.md | 176 + .../pelletier/go-toml/azure-pipelines.yml | 188 + .../github.com/pelletier/go-toml/benchmark.sh | 35 + vendor/github.com/pelletier/go-toml/doc.go | 23 + .../pelletier/go-toml/example-crlf.toml | 30 + .../github.com/pelletier/go-toml/example.toml | 30 + vendor/github.com/pelletier/go-toml/fuzz.go | 31 + vendor/github.com/pelletier/go-toml/fuzz.sh | 15 + vendor/github.com/pelletier/go-toml/go.mod | 3 + .../pelletier/go-toml/keysparsing.go | 112 + vendor/github.com/pelletier/go-toml/lexer.go | 1031 + .../github.com/pelletier/go-toml/localtime.go | 287 + .../github.com/pelletier/go-toml/marshal.go | 1308 + .../go-toml/marshal_OrderPreserve_test.toml | 39 + .../pelletier/go-toml/marshal_test.toml | 39 + vendor/github.com/pelletier/go-toml/parser.go | 508 + .../github.com/pelletier/go-toml/position.go | 29 + vendor/github.com/pelletier/go-toml/token.go | 136 + vendor/github.com/pelletier/go-toml/toml.go | 533 + .../github.com/pelletier/go-toml/tomlpub.go | 71 + .../pelletier/go-toml/tomltree_create.go | 155 + .../pelletier/go-toml/tomltree_write.go | 552 + .../pelletier/go-toml/tomltree_writepub.go | 6 + .../phayes/checkstyle/.scrutinizer.yml | 15 + vendor/github.com/phayes/checkstyle/LICENSE | 29 + vendor/github.com/phayes/checkstyle/README.md | 44 + .../phayes/checkstyle/checkstyle.go | 112 + vendor/github.com/phayes/checkstyle/godoc.go | 36 + .../github.com/polyfloyd/go-errorlint/LICENSE | 21 + .../go-errorlint/errorlint/allowed.go | 143 + .../go-errorlint/errorlint/analysis.go | 52 + .../polyfloyd/go-errorlint/errorlint/lint.go | 249 + .../prometheus/testutil/promlint/promlint.go | 386 + .../github.com/quasilyte/go-ruleguard/LICENSE | 29 + .../go-ruleguard/internal/gogrep/compile.go | 976 + .../internal/gogrep/gen_operations.go | 311 + .../go-ruleguard/internal/gogrep/gogrep.go | 66 + .../internal/gogrep/instructions.go | 107 + .../go-ruleguard/internal/gogrep/match.go | 731 + .../internal/gogrep/operation_string.go | 129 + .../internal/gogrep/operations.gen.go | 1249 + .../go-ruleguard/internal/gogrep/parse.go | 360 + .../go-ruleguard/internal/gogrep/slices.go | 51 + .../go-ruleguard/internal/golist/golist.go | 30 + .../go-ruleguard/internal/xtypes/xtypes.go | 256 + .../quasilyte/go-ruleguard/nodetag/nodetag.go | 277 + .../go-ruleguard/ruleguard/bundle.go | 19 + .../go-ruleguard/ruleguard/engine.go | 171 + .../go-ruleguard/ruleguard/filters.go | 267 + .../go-ruleguard/ruleguard/gorule.go | 146 + .../go-ruleguard/ruleguard/goutil/goutil.go | 21 + .../go-ruleguard/ruleguard/goutil/resolve.go | 33 + .../go-ruleguard/ruleguard/importer.go | 116 + .../go-ruleguard/ruleguard/libdsl.go | 276 + .../go-ruleguard/ruleguard/match_data.go | 46 + .../go-ruleguard/ruleguard/parser.go | 988 + .../go-ruleguard/ruleguard/quasigo/compile.go | 707 + .../ruleguard/quasigo/debug_info.go | 16 + .../go-ruleguard/ruleguard/quasigo/disasm.go | 74 + .../go-ruleguard/ruleguard/quasigo/env.go | 42 + .../go-ruleguard/ruleguard/quasigo/eval.go | 239 + .../ruleguard/quasigo/gen_opcodes.go | 184 + .../ruleguard/quasigo/opcode_string.go | 63 + .../ruleguard/quasigo/opcodes.gen.go | 219 + .../go-ruleguard/ruleguard/quasigo/quasigo.go | 165 + .../go-ruleguard/ruleguard/quasigo/utils.go | 60 + .../go-ruleguard/ruleguard/ruleguard.go | 87 + .../go-ruleguard/ruleguard/runner.go | 349 + .../ruleguard/typematch/patternop_string.go | 34 + .../ruleguard/typematch/typematch.go | 536 + .../quasilyte/go-ruleguard/ruleguard/utils.go | 251 + .../github.com/quasilyte/regex/syntax/LICENSE | 21 + .../quasilyte/regex/syntax/README.md | 26 + .../github.com/quasilyte/regex/syntax/ast.go | 147 + .../quasilyte/regex/syntax/errors.go | 27 + .../github.com/quasilyte/regex/syntax/go.mod | 3 + .../quasilyte/regex/syntax/lexer.go | 455 + .../quasilyte/regex/syntax/operation.go | 189 + .../regex/syntax/operation_string.go | 59 + .../quasilyte/regex/syntax/parser.go | 471 + .../github.com/quasilyte/regex/syntax/pos.go | 10 + .../regex/syntax/tokenkind_string.go | 59 + .../quasilyte/regex/syntax/utils.go | 30 + .../ryancurrah/gomodguard/.dockerignore | 1 + .../ryancurrah/gomodguard/.gitignore | 25 + .../ryancurrah/gomodguard/.golangci.yml | 132 + .../ryancurrah/gomodguard/.goreleaser.yml | 29 + .../ryancurrah/gomodguard/Dockerfile | 17 + .../gomodguard/Dockerfile.goreleaser | 10 + .../github.com/ryancurrah/gomodguard/LICENSE | 21 + .../github.com/ryancurrah/gomodguard/Makefile | 42 + .../ryancurrah/gomodguard/README.md | 131 + .../github.com/ryancurrah/gomodguard/cmd.go | 247 + .../github.com/ryancurrah/gomodguard/go.mod | 12 + .../github.com/ryancurrah/gomodguard/go.sum | 26 + .../ryancurrah/gomodguard/gomodguard.go | 486 + .../ryanrolds/sqlclosecheck/LICENSE | 19 + .../sqlclosecheck/pkg/analyzer/analyzer.go | 311 + .../sanposhiho/wastedassign/v2/LICENSE | 21 + .../sanposhiho/wastedassign/v2/README.md | 66 + .../sanposhiho/wastedassign/v2/go.mod | 5 + .../sanposhiho/wastedassign/v2/go.sum | 26 + .../wastedassign/v2/wastedassign.go | 272 + .../github.com/securego/gosec/v2/.gitignore | 35 + .../securego/gosec/v2/.golangci.yml | 23 + .../securego/gosec/v2/.goreleaser.yml | 21 + .../github.com/securego/gosec/v2/Dockerfile | 15 + .../github.com/securego/gosec/v2/LICENSE.txt | 154 + vendor/github.com/securego/gosec/v2/Makefile | 71 + vendor/github.com/securego/gosec/v2/README.md | 401 + vendor/github.com/securego/gosec/v2/USERS.md | 28 + .../github.com/securego/gosec/v2/action.yml | 19 + .../github.com/securego/gosec/v2/analyzer.go | 375 + .../github.com/securego/gosec/v2/call_list.go | 109 + vendor/github.com/securego/gosec/v2/config.go | 125 + .../github.com/securego/gosec/v2/cwe/data.go | 143 + .../github.com/securego/gosec/v2/cwe/types.go | 34 + .../securego/gosec/v2/entrypoint.sh | 7 + vendor/github.com/securego/gosec/v2/errors.go | 33 + vendor/github.com/securego/gosec/v2/go.mod | 18 + vendor/github.com/securego/gosec/v2/go.sum | 702 + .../github.com/securego/gosec/v2/helpers.go | 451 + .../securego/gosec/v2/import_tracker.go | 75 + .../github.com/securego/gosec/v2/install.sh | 375 + vendor/github.com/securego/gosec/v2/issue.go | 201 + .../securego/gosec/v2/renovate.json | 24 + vendor/github.com/securego/gosec/v2/report.go | 24 + .../github.com/securego/gosec/v2/resolve.go | 95 + vendor/github.com/securego/gosec/v2/rule.go | 59 + .../securego/gosec/v2/rules/archive.go | 65 + .../securego/gosec/v2/rules/bad_defer.go | 68 + .../securego/gosec/v2/rules/bind.go | 83 + .../securego/gosec/v2/rules/blocklist.go | 94 + .../gosec/v2/rules/decompression-bomb.go | 110 + .../securego/gosec/v2/rules/errors.go | 120 + .../securego/gosec/v2/rules/fileperms.go | 113 + .../gosec/v2/rules/hardcoded_credentials.go | 173 + .../gosec/v2/rules/implicit_aliasing.go | 119 + .../gosec/v2/rules/integer_overflow.go | 89 + .../securego/gosec/v2/rules/pprof.go | 42 + .../securego/gosec/v2/rules/rand.go | 58 + .../securego/gosec/v2/rules/readfile.go | 128 + .../github.com/securego/gosec/v2/rules/rsa.go | 58 + .../securego/gosec/v2/rules/rulelist.go | 116 + .../github.com/securego/gosec/v2/rules/sql.go | 303 + .../github.com/securego/gosec/v2/rules/ssh.go | 38 + .../securego/gosec/v2/rules/ssrf.go | 66 + .../securego/gosec/v2/rules/subproc.go | 85 + .../securego/gosec/v2/rules/tempfiles.go | 58 + .../securego/gosec/v2/rules/templates.go | 60 + .../github.com/securego/gosec/v2/rules/tls.go | 171 + .../securego/gosec/v2/rules/tls_config.go | 92 + .../securego/gosec/v2/rules/unsafe.go | 53 + .../securego/gosec/v2/rules/weakcrypto.go | 58 + vendor/github.com/shazow/go-diff/LICENSE | 22 + .../shazow/go-diff/difflib/differ.go | 39 + vendor/github.com/sonatard/noctx/.gitignore | 1 + .../github.com/sonatard/noctx/.golangci.yml | 20 + vendor/github.com/sonatard/noctx/LICENSE | 21 + vendor/github.com/sonatard/noctx/Makefile | 16 + vendor/github.com/sonatard/noctx/README.md | 95 + vendor/github.com/sonatard/noctx/go.mod | 8 + vendor/github.com/sonatard/noctx/go.sum | 16 + .../github.com/sonatard/noctx/ngfunc/main.go | 57 + .../sonatard/noctx/ngfunc/report.go | 29 + .../github.com/sonatard/noctx/ngfunc/types.go | 65 + vendor/github.com/sonatard/noctx/noctx.go | 31 + .../sonatard/noctx/reqwithoutctx/main.go | 14 + .../sonatard/noctx/reqwithoutctx/report.go | 26 + .../sonatard/noctx/reqwithoutctx/ssa.go | 180 + vendor/github.com/sourcegraph/go-diff/LICENSE | 35 + .../sourcegraph/go-diff/diff/diff.go | 132 + .../sourcegraph/go-diff/diff/doc.go | 2 + .../sourcegraph/go-diff/diff/parse.go | 725 + .../sourcegraph/go-diff/diff/print.go | 141 + .../sourcegraph/go-diff/diff/reader_util.go | 37 + vendor/github.com/spf13/afero/.gitignore | 2 + vendor/github.com/spf13/afero/.travis.yml | 26 + vendor/github.com/spf13/afero/LICENSE.txt | 174 + vendor/github.com/spf13/afero/README.md | 430 + vendor/github.com/spf13/afero/afero.go | 111 + vendor/github.com/spf13/afero/appveyor.yml | 15 + vendor/github.com/spf13/afero/basepath.go | 211 + .../github.com/spf13/afero/cacheOnReadFs.go | 311 + vendor/github.com/spf13/afero/const_bsds.go | 22 + .../github.com/spf13/afero/const_win_unix.go | 26 + .../github.com/spf13/afero/copyOnWriteFs.go | 326 + vendor/github.com/spf13/afero/go.mod | 9 + vendor/github.com/spf13/afero/go.sum | 29 + vendor/github.com/spf13/afero/httpFs.go | 114 + vendor/github.com/spf13/afero/iofs.go | 288 + vendor/github.com/spf13/afero/ioutil.go | 240 + vendor/github.com/spf13/afero/lstater.go | 27 + vendor/github.com/spf13/afero/match.go | 110 + vendor/github.com/spf13/afero/mem/dir.go | 37 + vendor/github.com/spf13/afero/mem/dirmap.go | 43 + vendor/github.com/spf13/afero/mem/file.go | 338 + vendor/github.com/spf13/afero/memmap.go | 404 + vendor/github.com/spf13/afero/os.go | 113 + vendor/github.com/spf13/afero/path.go | 106 + vendor/github.com/spf13/afero/readonlyfs.go | 96 + vendor/github.com/spf13/afero/regexpfs.go | 224 + vendor/github.com/spf13/afero/symlink.go | 55 + vendor/github.com/spf13/afero/unionFile.go | 317 + vendor/github.com/spf13/afero/util.go | 330 + vendor/github.com/spf13/cast/.gitignore | 25 + vendor/github.com/spf13/cast/.travis.yml | 16 + vendor/github.com/spf13/cast/LICENSE | 21 + vendor/github.com/spf13/cast/Makefile | 40 + vendor/github.com/spf13/cast/README.md | 75 + vendor/github.com/spf13/cast/cast.go | 171 + vendor/github.com/spf13/cast/caste.go | 1249 + vendor/github.com/spf13/cast/go.mod | 7 + vendor/github.com/spf13/cast/go.sum | 6 + vendor/github.com/spf13/cobra/.gitignore | 39 + vendor/github.com/spf13/cobra/.golangci.yml | 48 + vendor/github.com/spf13/cobra/.mailmap | 3 + vendor/github.com/spf13/cobra/CHANGELOG.md | 51 + vendor/github.com/spf13/cobra/CONDUCT.md | 37 + vendor/github.com/spf13/cobra/CONTRIBUTING.md | 50 + vendor/github.com/spf13/cobra/LICENSE.txt | 174 + vendor/github.com/spf13/cobra/Makefile | 40 + vendor/github.com/spf13/cobra/README.md | 125 + vendor/github.com/spf13/cobra/args.go | 109 + .../spf13/cobra/bash_completions.go | 685 + .../spf13/cobra/bash_completions.md | 93 + .../spf13/cobra/bash_completionsV2.go | 302 + vendor/github.com/spf13/cobra/cobra.go | 222 + vendor/github.com/spf13/cobra/command.go | 1680 + .../github.com/spf13/cobra/command_notwin.go | 5 + vendor/github.com/spf13/cobra/command_win.go | 26 + vendor/github.com/spf13/cobra/completions.go | 781 + .../spf13/cobra/fish_completions.go | 219 + .../spf13/cobra/fish_completions.md | 4 + vendor/github.com/spf13/cobra/go.mod | 11 + vendor/github.com/spf13/cobra/go.sum | 592 + .../spf13/cobra/powershell_completions.go | 285 + .../spf13/cobra/powershell_completions.md | 3 + .../spf13/cobra/projects_using_cobra.md | 38 + .../spf13/cobra/shell_completions.go | 84 + .../spf13/cobra/shell_completions.md | 546 + vendor/github.com/spf13/cobra/user_guide.md | 637 + .../github.com/spf13/cobra/zsh_completions.go | 258 + .../github.com/spf13/cobra/zsh_completions.md | 48 + .../spf13/jwalterweatherman/.gitignore | 24 + .../spf13/jwalterweatherman/LICENSE | 21 + .../spf13/jwalterweatherman/README.md | 148 + .../jwalterweatherman/default_notepad.go | 111 + .../github.com/spf13/jwalterweatherman/go.mod | 7 + .../spf13/jwalterweatherman/log_counter.go | 46 + .../spf13/jwalterweatherman/notepad.go | 225 + vendor/github.com/spf13/pflag/.gitignore | 2 + vendor/github.com/spf13/pflag/.travis.yml | 22 + vendor/github.com/spf13/pflag/LICENSE | 28 + vendor/github.com/spf13/pflag/README.md | 296 + vendor/github.com/spf13/pflag/bool.go | 94 + vendor/github.com/spf13/pflag/bool_slice.go | 185 + vendor/github.com/spf13/pflag/bytes.go | 209 + vendor/github.com/spf13/pflag/count.go | 96 + vendor/github.com/spf13/pflag/duration.go | 86 + .../github.com/spf13/pflag/duration_slice.go | 166 + vendor/github.com/spf13/pflag/flag.go | 1239 + vendor/github.com/spf13/pflag/float32.go | 88 + .../github.com/spf13/pflag/float32_slice.go | 174 + vendor/github.com/spf13/pflag/float64.go | 84 + .../github.com/spf13/pflag/float64_slice.go | 166 + vendor/github.com/spf13/pflag/go.mod | 3 + vendor/github.com/spf13/pflag/go.sum | 0 vendor/github.com/spf13/pflag/golangflag.go | 105 + vendor/github.com/spf13/pflag/int.go | 84 + vendor/github.com/spf13/pflag/int16.go | 88 + vendor/github.com/spf13/pflag/int32.go | 88 + vendor/github.com/spf13/pflag/int32_slice.go | 174 + vendor/github.com/spf13/pflag/int64.go | 84 + vendor/github.com/spf13/pflag/int64_slice.go | 166 + vendor/github.com/spf13/pflag/int8.go | 88 + vendor/github.com/spf13/pflag/int_slice.go | 158 + vendor/github.com/spf13/pflag/ip.go | 94 + vendor/github.com/spf13/pflag/ip_slice.go | 186 + vendor/github.com/spf13/pflag/ipmask.go | 122 + vendor/github.com/spf13/pflag/ipnet.go | 98 + vendor/github.com/spf13/pflag/string.go | 80 + vendor/github.com/spf13/pflag/string_array.go | 129 + vendor/github.com/spf13/pflag/string_slice.go | 163 + .../github.com/spf13/pflag/string_to_int.go | 149 + .../github.com/spf13/pflag/string_to_int64.go | 149 + .../spf13/pflag/string_to_string.go | 160 + vendor/github.com/spf13/pflag/uint.go | 88 + vendor/github.com/spf13/pflag/uint16.go | 88 + vendor/github.com/spf13/pflag/uint32.go | 88 + vendor/github.com/spf13/pflag/uint64.go | 88 + vendor/github.com/spf13/pflag/uint8.go | 88 + vendor/github.com/spf13/pflag/uint_slice.go | 168 + vendor/github.com/spf13/viper/.editorconfig | 15 + vendor/github.com/spf13/viper/.gitignore | 5 + vendor/github.com/spf13/viper/.golangci.yml | 93 + vendor/github.com/spf13/viper/LICENSE | 21 + vendor/github.com/spf13/viper/Makefile | 76 + vendor/github.com/spf13/viper/README.md | 865 + .../github.com/spf13/viper/TROUBLESHOOTING.md | 23 + vendor/github.com/spf13/viper/flags.go | 57 + vendor/github.com/spf13/viper/go.mod | 21 + vendor/github.com/spf13/viper/go.sum | 632 + vendor/github.com/spf13/viper/util.go | 230 + vendor/github.com/spf13/viper/viper.go | 2169 ++ vendor/github.com/spf13/viper/watch.go | 11 + vendor/github.com/spf13/viper/watch_wasm.go | 30 + vendor/github.com/ssgreg/nlreturn/v2/LICENSE | 21 + .../nlreturn/v2/pkg/nlreturn/nlreturn.go | 86 + vendor/github.com/subosito/gotenv/.env | 1 + .../github.com/subosito/gotenv/.env.invalid | 1 + vendor/github.com/subosito/gotenv/.gitignore | 3 + vendor/github.com/subosito/gotenv/.travis.yml | 10 + .../github.com/subosito/gotenv/CHANGELOG.md | 47 + vendor/github.com/subosito/gotenv/LICENSE | 21 + vendor/github.com/subosito/gotenv/README.md | 131 + .../github.com/subosito/gotenv/appveyor.yml | 9 + vendor/github.com/subosito/gotenv/gotenv.go | 265 + .../github.com/tdakkota/asciicheck/.gitignore | 33 + vendor/github.com/tdakkota/asciicheck/LICENSE | 21 + .../github.com/tdakkota/asciicheck/README.md | 72 + .../github.com/tdakkota/asciicheck/ascii.go | 18 + .../tdakkota/asciicheck/asciicheck.go | 49 + vendor/github.com/tdakkota/asciicheck/go.mod | 5 + vendor/github.com/tetafro/godot/.gitignore | 4 + vendor/github.com/tetafro/godot/.godot.yaml | 16 + vendor/github.com/tetafro/godot/.golangci.yml | 67 + .../github.com/tetafro/godot/.goreleaser.yml | 11 + vendor/github.com/tetafro/godot/LICENSE | 21 + vendor/github.com/tetafro/godot/Makefile | 25 + vendor/github.com/tetafro/godot/README.md | 84 + vendor/github.com/tetafro/godot/checks.go | 269 + vendor/github.com/tetafro/godot/getters.go | 283 + vendor/github.com/tetafro/godot/go.mod | 5 + vendor/github.com/tetafro/godot/go.sum | 4 + vendor/github.com/tetafro/godot/godot.go | 135 + vendor/github.com/tetafro/godot/settings.go | 29 + vendor/github.com/timakin/bodyclose/LICENSE | 21 + .../bodyclose/passes/bodyclose/bodyclose.go | 368 + .../github.com/tomarrell/wrapcheck/v2/LICENSE | 21 + .../wrapcheck/v2/wrapcheck/wrapcheck.go | 337 + .../tommy-muehle/go-mnd/v2/.editorconfig | 21 + .../tommy-muehle/go-mnd/v2/.gitattributes | 9 + .../tommy-muehle/go-mnd/v2/.gitignore | 3 + .../tommy-muehle/go-mnd/v2/.goreleaser.yml | 29 + .../tommy-muehle/go-mnd/v2/Dockerfile | 17 + .../github.com/tommy-muehle/go-mnd/v2/LICENSE | 21 + .../tommy-muehle/go-mnd/v2/Makefile | 32 + .../tommy-muehle/go-mnd/v2/README.md | 230 + .../tommy-muehle/go-mnd/v2/action.yml | 19 + .../tommy-muehle/go-mnd/v2/analyzer.go | 118 + .../tommy-muehle/go-mnd/v2/checks/argument.go | 121 + .../tommy-muehle/go-mnd/v2/checks/assign.go | 86 + .../tommy-muehle/go-mnd/v2/checks/case.go | 68 + .../tommy-muehle/go-mnd/v2/checks/checks.go | 3 + .../go-mnd/v2/checks/condition.go | 55 + .../go-mnd/v2/checks/operation.go | 77 + .../tommy-muehle/go-mnd/v2/checks/return.go | 68 + .../tommy-muehle/go-mnd/v2/config/config.go | 118 + .../tommy-muehle/go-mnd/v2/entrypoint.sh | 7 + .../github.com/tommy-muehle/go-mnd/v2/go.mod | 9 + .../github.com/tommy-muehle/go-mnd/v2/go.sum | 28 + vendor/github.com/ultraware/funlen/LICENSE | 7 + vendor/github.com/ultraware/funlen/README.md | 9 + vendor/github.com/ultraware/funlen/main.go | 104 + .../github.com/ultraware/whitespace/LICENSE | 7 + .../github.com/ultraware/whitespace/README.md | 7 + .../github.com/ultraware/whitespace/main.go | 158 + vendor/github.com/uudashr/gocognit/LICENSE | 21 + vendor/github.com/uudashr/gocognit/README.md | 185 + vendor/github.com/uudashr/gocognit/doc.go | 2 + vendor/github.com/uudashr/gocognit/go.mod | 5 + vendor/github.com/uudashr/gocognit/go.sum | 27 + .../github.com/uudashr/gocognit/gocognit.go | 385 + .../github.com/yeya24/promlinter/.gitignore | 20 + vendor/github.com/yeya24/promlinter/LICENSE | 201 + vendor/github.com/yeya24/promlinter/Makefile | 39 + vendor/github.com/yeya24/promlinter/README.md | 74 + vendor/github.com/yeya24/promlinter/go.mod | 9 + vendor/github.com/yeya24/promlinter/go.sum | 114 + .../yeya24/promlinter/promlinter.go | 664 + vendor/golang.org/x/mod/LICENSE | 27 + vendor/golang.org/x/mod/PATENTS | 22 + .../x/mod/internal/lazyregexp/lazyre.go | 78 + vendor/golang.org/x/mod/modfile/print.go | 174 + vendor/golang.org/x/mod/modfile/read.go | 956 + vendor/golang.org/x/mod/modfile/rule.go | 1180 + vendor/golang.org/x/mod/module/module.go | 822 + vendor/golang.org/x/mod/semver/semver.go | 391 + vendor/golang.org/x/text/width/kind_string.go | 28 + .../golang.org/x/text/width/tables10.0.0.go | 1319 + .../golang.org/x/text/width/tables11.0.0.go | 1331 + .../golang.org/x/text/width/tables12.0.0.go | 1351 + .../golang.org/x/text/width/tables13.0.0.go | 1352 + vendor/golang.org/x/text/width/tables9.0.0.go | 1287 + vendor/golang.org/x/text/width/transform.go | 239 + vendor/golang.org/x/text/width/trieval.go | 30 + vendor/golang.org/x/text/width/width.go | 206 + .../x/tools/go/analysis/analysis.go | 242 + .../x/tools/go/analysis/diagnostic.go | 65 + vendor/golang.org/x/tools/go/analysis/doc.go | 321 + .../go/analysis/passes/asmdecl/asmdecl.go | 802 + .../tools/go/analysis/passes/assign/assign.go | 76 + .../tools/go/analysis/passes/atomic/atomic.go | 96 + .../passes/atomicalign/atomicalign.go | 117 + .../x/tools/go/analysis/passes/bools/bools.go | 221 + .../go/analysis/passes/buildssa/buildssa.go | 117 + .../go/analysis/passes/buildtag/buildtag.go | 367 + .../analysis/passes/buildtag/buildtag_old.go | 174 + .../go/analysis/passes/cgocall/cgocall.go | 376 + .../go/analysis/passes/composite/composite.go | 117 + .../go/analysis/passes/composite/whitelist.go | 34 + .../go/analysis/passes/copylock/copylock.go | 300 + .../go/analysis/passes/ctrlflow/ctrlflow.go | 226 + .../passes/deepequalerrors/deepequalerrors.go | 115 + .../go/analysis/passes/errorsas/errorsas.go | 75 + .../passes/fieldalignment/fieldalignment.go | 368 + .../go/analysis/passes/findcall/findcall.go | 98 + .../passes/framepointer/framepointer.go | 91 + .../passes/httpresponse/httpresponse.go | 169 + .../passes/ifaceassert/ifaceassert.go | 105 + .../go/analysis/passes/inspect/inspect.go | 49 + .../passes/internal/analysisutil/util.go | 120 + .../passes/loopclosure/loopclosure.go | 165 + .../analysis/passes/lostcancel/lostcancel.go | 330 + .../go/analysis/passes/nilfunc/nilfunc.go | 74 + .../go/analysis/passes/nilness/nilness.go | 354 + .../go/analysis/passes/pkgfact/pkgfact.go | 127 + .../tools/go/analysis/passes/printf/printf.go | 1122 + .../tools/go/analysis/passes/printf/types.go | 246 + .../reflectvaluecompare.go | 99 + .../tools/go/analysis/passes/shadow/shadow.go | 290 + .../x/tools/go/analysis/passes/shift/dead.go | 101 + .../x/tools/go/analysis/passes/shift/shift.go | 101 + .../passes/sigchanyzer/sigchanyzer.go | 154 + .../go/analysis/passes/sortslice/analyzer.go | 123 + .../analysis/passes/stdmethods/stdmethods.go | 204 + .../analysis/passes/stringintconv/string.go | 126 + .../go/analysis/passes/structtag/structtag.go | 313 + .../testinggoroutine/testinggoroutine.go | 154 + .../x/tools/go/analysis/passes/tests/tests.go | 188 + .../go/analysis/passes/unmarshal/unmarshal.go | 100 + .../passes/unreachable/unreachable.go | 325 + .../go/analysis/passes/unsafeptr/unsafeptr.go | 168 + .../passes/unusedresult/unusedresult.go | 131 + .../passes/unusedwrite/unusedwrite.go | 184 + .../x/tools/go/analysis/validate.go | 130 + .../x/tools/go/ast/inspector/inspector.go | 186 + .../x/tools/go/ast/inspector/typeof.go | 220 + vendor/golang.org/x/tools/go/cfg/builder.go | 510 + vendor/golang.org/x/tools/go/cfg/cfg.go | 150 + .../x/tools/go/gcexportdata/gcexportdata.go | 133 + .../x/tools/go/gcexportdata/importer.go | 73 + .../x/tools/go/internal/gcimporter/bexport.go | 852 + .../x/tools/go/internal/gcimporter/bimport.go | 1039 + .../go/internal/gcimporter/exportdata.go | 93 + .../go/internal/gcimporter/gcimporter.go | 1078 + .../x/tools/go/internal/gcimporter/iexport.go | 781 + .../x/tools/go/internal/gcimporter/iimport.go | 676 + .../go/internal/gcimporter/newInterface10.go | 22 + .../go/internal/gcimporter/newInterface11.go | 14 + .../tools/go/internal/packagesdriver/sizes.go | 49 + vendor/golang.org/x/tools/go/packages/doc.go | 221 + .../x/tools/go/packages/external.go | 101 + .../golang.org/x/tools/go/packages/golist.go | 1099 + .../x/tools/go/packages/golist_overlay.go | 575 + .../x/tools/go/packages/loadmode_string.go | 57 + .../x/tools/go/packages/packages.go | 1239 + .../golang.org/x/tools/go/packages/visit.go | 59 + vendor/golang.org/x/tools/go/ssa/blockopt.go | 187 + vendor/golang.org/x/tools/go/ssa/builder.go | 2386 ++ vendor/golang.org/x/tools/go/ssa/const.go | 169 + vendor/golang.org/x/tools/go/ssa/create.go | 270 + vendor/golang.org/x/tools/go/ssa/doc.go | 125 + vendor/golang.org/x/tools/go/ssa/dom.go | 341 + vendor/golang.org/x/tools/go/ssa/emit.go | 478 + vendor/golang.org/x/tools/go/ssa/func.go | 691 + vendor/golang.org/x/tools/go/ssa/identical.go | 12 + .../golang.org/x/tools/go/ssa/identical_17.go | 12 + vendor/golang.org/x/tools/go/ssa/lift.go | 653 + vendor/golang.org/x/tools/go/ssa/lvalue.go | 120 + vendor/golang.org/x/tools/go/ssa/methods.go | 239 + vendor/golang.org/x/tools/go/ssa/mode.go | 105 + vendor/golang.org/x/tools/go/ssa/print.go | 431 + vendor/golang.org/x/tools/go/ssa/sanity.go | 539 + vendor/golang.org/x/tools/go/ssa/source.go | 293 + vendor/golang.org/x/tools/go/ssa/ssa.go | 1696 + .../golang.org/x/tools/go/ssa/ssautil/load.go | 175 + .../x/tools/go/ssa/ssautil/switch.go | 234 + .../x/tools/go/ssa/ssautil/visit.go | 79 + vendor/golang.org/x/tools/go/ssa/testmain.go | 274 + vendor/golang.org/x/tools/go/ssa/util.go | 89 + vendor/golang.org/x/tools/go/ssa/wrappers.go | 290 + .../x/tools/go/types/objectpath/objectpath.go | 524 + .../x/tools/go/types/typeutil/callee.go | 46 + .../x/tools/go/types/typeutil/imports.go | 31 + .../x/tools/go/types/typeutil/map.go | 313 + .../tools/go/types/typeutil/methodsetcache.go | 72 + .../x/tools/go/types/typeutil/ui.go | 52 + vendor/golang.org/x/tools/imports/forward.go | 77 + .../internal/analysisinternal/analysis.go | 425 + .../x/tools/internal/event/core/event.go | 85 + .../x/tools/internal/event/core/export.go | 70 + .../x/tools/internal/event/core/fast.go | 77 + .../golang.org/x/tools/internal/event/doc.go | 7 + .../x/tools/internal/event/event.go | 127 + .../x/tools/internal/event/keys/keys.go | 564 + .../x/tools/internal/event/keys/standard.go | 22 + .../x/tools/internal/event/label/label.go | 215 + .../x/tools/internal/fastwalk/fastwalk.go | 196 + .../fastwalk/fastwalk_dirent_fileno.go | 14 + .../internal/fastwalk/fastwalk_dirent_ino.go | 15 + .../fastwalk/fastwalk_dirent_namlen_bsd.go | 14 + .../fastwalk/fastwalk_dirent_namlen_linux.go | 29 + .../internal/fastwalk/fastwalk_portable.go | 38 + .../tools/internal/fastwalk/fastwalk_unix.go | 153 + .../x/tools/internal/gocommand/invoke.go | 273 + .../x/tools/internal/gocommand/vendor.go | 107 + .../x/tools/internal/gocommand/version.go | 51 + .../x/tools/internal/gopathwalk/walk.go | 264 + .../x/tools/internal/imports/fix.go | 1730 + .../x/tools/internal/imports/imports.go | 346 + .../x/tools/internal/imports/mod.go | 695 + .../x/tools/internal/imports/mod_cache.go | 236 + .../x/tools/internal/imports/sortimports.go | 280 + .../x/tools/internal/imports/zstdlib.go | 10733 ++++++ .../x/tools/internal/lsp/fuzzy/input.go | 168 + .../x/tools/internal/lsp/fuzzy/matcher.go | 398 + .../internal/packagesinternal/packages.go | 28 + .../tools/internal/typesinternal/errorcode.go | 1368 + .../typesinternal/errorcode_string.go | 153 + .../x/tools/internal/typesinternal/types.go | 45 + vendor/golang.org/x/xerrors/LICENSE | 27 + vendor/golang.org/x/xerrors/PATENTS | 22 + vendor/golang.org/x/xerrors/README | 2 + vendor/golang.org/x/xerrors/adaptor.go | 193 + vendor/golang.org/x/xerrors/codereview.cfg | 1 + vendor/golang.org/x/xerrors/doc.go | 22 + vendor/golang.org/x/xerrors/errors.go | 33 + vendor/golang.org/x/xerrors/fmt.go | 187 + vendor/golang.org/x/xerrors/format.go | 34 + vendor/golang.org/x/xerrors/frame.go | 56 + vendor/golang.org/x/xerrors/go.mod | 3 + .../golang.org/x/xerrors/internal/internal.go | 8 + vendor/golang.org/x/xerrors/wrap.go | 106 + vendor/gopkg.in/ini.v1/.gitignore | 6 + vendor/gopkg.in/ini.v1/LICENSE | 191 + vendor/gopkg.in/ini.v1/Makefile | 15 + vendor/gopkg.in/ini.v1/README.md | 43 + vendor/gopkg.in/ini.v1/codecov.yml | 9 + vendor/gopkg.in/ini.v1/data_source.go | 76 + vendor/gopkg.in/ini.v1/deprecated.go | 25 + vendor/gopkg.in/ini.v1/error.go | 34 + vendor/gopkg.in/ini.v1/file.go | 517 + vendor/gopkg.in/ini.v1/helper.go | 24 + vendor/gopkg.in/ini.v1/ini.go | 176 + vendor/gopkg.in/ini.v1/key.go | 829 + vendor/gopkg.in/ini.v1/parser.go | 535 + vendor/gopkg.in/ini.v1/section.go | 256 + vendor/gopkg.in/ini.v1/struct.go | 747 + vendor/honnef.co/go/tools/LICENSE | 20 + vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY | 121 + .../honnef.co/go/tools/analysis/code/code.go | 294 + .../honnef.co/go/tools/analysis/code/visit.go | 51 + .../honnef.co/go/tools/analysis/edit/edit.go | 74 + .../go/tools/analysis/facts/deprecated.go | 145 + .../go/tools/analysis/facts/directives.go | 20 + .../go/tools/analysis/facts/generated.go | 97 + .../tools/analysis/facts/nilness/nilness.go | 242 + .../go/tools/analysis/facts/purity.go | 178 + .../go/tools/analysis/facts/token.go | 24 + .../analysis/facts/typedness/typedness.go | 242 + .../honnef.co/go/tools/analysis/lint/lint.go | 198 + .../go/tools/analysis/report/report.go | 247 + vendor/honnef.co/go/tools/config/config.go | 245 + vendor/honnef.co/go/tools/config/example.conf | 10 + .../go/tools/go/ast/astutil/upstream.go | 20 + .../honnef.co/go/tools/go/ast/astutil/util.go | 299 + vendor/honnef.co/go/tools/go/ir/LICENSE | 28 + vendor/honnef.co/go/tools/go/ir/UPSTREAM | 9 + vendor/honnef.co/go/tools/go/ir/blockopt.go | 209 + vendor/honnef.co/go/tools/go/ir/builder.go | 2479 ++ vendor/honnef.co/go/tools/go/ir/const.go | 153 + vendor/honnef.co/go/tools/go/ir/create.go | 288 + vendor/honnef.co/go/tools/go/ir/doc.go | 129 + vendor/honnef.co/go/tools/go/ir/dom.go | 469 + vendor/honnef.co/go/tools/go/ir/emit.go | 461 + vendor/honnef.co/go/tools/go/ir/exits.go | 317 + vendor/honnef.co/go/tools/go/ir/func.go | 983 + vendor/honnef.co/go/tools/go/ir/html.go | 1124 + vendor/honnef.co/go/tools/go/ir/identical.go | 7 + .../honnef.co/go/tools/go/ir/identical_17.go | 7 + .../honnef.co/go/tools/go/ir/irutil/load.go | 184 + .../honnef.co/go/tools/go/ir/irutil/loops.go | 54 + .../honnef.co/go/tools/go/ir/irutil/stub.go | 32 + .../honnef.co/go/tools/go/ir/irutil/switch.go | 264 + .../go/tools/go/ir/irutil/terminates.go | 70 + .../honnef.co/go/tools/go/ir/irutil/util.go | 165 + .../honnef.co/go/tools/go/ir/irutil/visit.go | 79 + vendor/honnef.co/go/tools/go/ir/lift.go | 1075 + vendor/honnef.co/go/tools/go/ir/lvalue.go | 116 + vendor/honnef.co/go/tools/go/ir/methods.go | 239 + vendor/honnef.co/go/tools/go/ir/mode.go | 98 + vendor/honnef.co/go/tools/go/ir/print.go | 472 + vendor/honnef.co/go/tools/go/ir/sanity.go | 561 + vendor/honnef.co/go/tools/go/ir/source.go | 270 + vendor/honnef.co/go/tools/go/ir/ssa.go | 1898 + .../honnef.co/go/tools/go/ir/staticcheck.conf | 3 + vendor/honnef.co/go/tools/go/ir/util.go | 89 + vendor/honnef.co/go/tools/go/ir/wrappers.go | 290 + vendor/honnef.co/go/tools/go/ir/write.go | 5 + .../go/tools/go/types/typeutil/upstream.go | 25 + .../go/tools/go/types/typeutil/util.go | 131 + .../tools/internal/passes/buildir/buildir.go | 107 + .../go/tools/internal/sharedcheck/lint.go | 206 + vendor/honnef.co/go/tools/knowledge/arg.go | 64 + .../go/tools/knowledge/deprecated.go | 217 + vendor/honnef.co/go/tools/pattern/convert.go | 242 + vendor/honnef.co/go/tools/pattern/doc.go | 273 + vendor/honnef.co/go/tools/pattern/fuzz.go | 50 + vendor/honnef.co/go/tools/pattern/lexer.go | 221 + vendor/honnef.co/go/tools/pattern/match.go | 547 + vendor/honnef.co/go/tools/pattern/parser.go | 463 + vendor/honnef.co/go/tools/pattern/pattern.go | 496 + vendor/honnef.co/go/tools/printf/fuzz.go | 11 + vendor/honnef.co/go/tools/printf/printf.go | 197 + vendor/honnef.co/go/tools/simple/analysis.go | 152 + vendor/honnef.co/go/tools/simple/doc.go | 499 + vendor/honnef.co/go/tools/simple/lint.go | 1938 + .../go/tools/staticcheck/analysis.go | 302 + .../go/tools/staticcheck/buildtag.go | 21 + vendor/honnef.co/go/tools/staticcheck/doc.go | 1126 + vendor/honnef.co/go/tools/staticcheck/lint.go | 4793 +++ .../honnef.co/go/tools/staticcheck/rules.go | 291 + .../go/tools/staticcheck/structtag.go | 58 + .../honnef.co/go/tools/stylecheck/analysis.go | 83 + vendor/honnef.co/go/tools/stylecheck/doc.go | 237 + vendor/honnef.co/go/tools/stylecheck/lint.go | 936 + vendor/honnef.co/go/tools/stylecheck/names.go | 281 + vendor/honnef.co/go/tools/unused/edge.go | 55 + .../go/tools/unused/edgekind_string.go | 109 + .../honnef.co/go/tools/unused/implements.go | 82 + .../go/tools/unused/typemap/identical.go | 149 + .../honnef.co/go/tools/unused/typemap/map.go | 318 + vendor/honnef.co/go/tools/unused/unused.go | 1716 + vendor/modules.txt | 410 + vendor/mvdan.cc/gofumpt/LICENSE | 27 + vendor/mvdan.cc/gofumpt/LICENSE.google | 27 + vendor/mvdan.cc/gofumpt/format/format.go | 703 + vendor/mvdan.cc/interfacer/LICENSE | 27 + vendor/mvdan.cc/interfacer/check/cache.go | 50 + vendor/mvdan.cc/interfacer/check/check.go | 462 + vendor/mvdan.cc/interfacer/check/types.go | 170 + vendor/mvdan.cc/lint/.travis.yml | 7 + vendor/mvdan.cc/lint/LICENSE | 27 + vendor/mvdan.cc/lint/README.md | 27 + vendor/mvdan.cc/lint/lint.go | 28 + vendor/mvdan.cc/unparam/LICENSE | 27 + vendor/mvdan.cc/unparam/check/check.go | 979 + 1662 files changed, 298368 insertions(+), 14 deletions(-) create mode 100644 .golangci.yml create mode 100644 vendor/4d63.com/gochecknoglobals/LICENSE create mode 100644 vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go create mode 100644 vendor/github.com/Antonboom/errname/LICENSE create mode 100644 vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go create mode 100644 vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go create mode 100644 vendor/github.com/BurntSushi/toml/.gitignore create mode 100644 vendor/github.com/BurntSushi/toml/COMPATIBLE create mode 100644 vendor/github.com/BurntSushi/toml/COPYING create mode 100644 vendor/github.com/BurntSushi/toml/README.md create mode 100644 vendor/github.com/BurntSushi/toml/decode.go create mode 100644 vendor/github.com/BurntSushi/toml/decode_go116.go create mode 100644 vendor/github.com/BurntSushi/toml/decode_meta.go create mode 100644 vendor/github.com/BurntSushi/toml/deprecated.go create mode 100644 vendor/github.com/BurntSushi/toml/doc.go create mode 100644 vendor/github.com/BurntSushi/toml/encode.go create mode 100644 vendor/github.com/BurntSushi/toml/go.mod create mode 100644 vendor/github.com/BurntSushi/toml/go.sum create mode 100644 vendor/github.com/BurntSushi/toml/internal/tz.go create mode 100644 vendor/github.com/BurntSushi/toml/lex.go create mode 100644 vendor/github.com/BurntSushi/toml/parse.go create mode 100644 vendor/github.com/BurntSushi/toml/type_check.go create mode 100644 vendor/github.com/BurntSushi/toml/type_fields.go create mode 100644 vendor/github.com/Djarvur/go-err113/.gitignore create mode 100644 vendor/github.com/Djarvur/go-err113/.golangci.yml create mode 100644 vendor/github.com/Djarvur/go-err113/.travis.yml create mode 100644 vendor/github.com/Djarvur/go-err113/LICENSE create mode 100644 vendor/github.com/Djarvur/go-err113/README.adoc create mode 100644 vendor/github.com/Djarvur/go-err113/comparison.go create mode 100644 vendor/github.com/Djarvur/go-err113/definition.go create mode 100644 vendor/github.com/Djarvur/go-err113/err113.go create mode 100644 vendor/github.com/Djarvur/go-err113/go.mod create mode 100644 vendor/github.com/Djarvur/go-err113/go.sum create mode 100644 vendor/github.com/Masterminds/semver/.travis.yml create mode 100644 vendor/github.com/Masterminds/semver/CHANGELOG.md create mode 100644 vendor/github.com/Masterminds/semver/LICENSE.txt create mode 100644 vendor/github.com/Masterminds/semver/Makefile create mode 100644 vendor/github.com/Masterminds/semver/README.md create mode 100644 vendor/github.com/Masterminds/semver/appveyor.yml create mode 100644 vendor/github.com/Masterminds/semver/collection.go create mode 100644 vendor/github.com/Masterminds/semver/constraints.go create mode 100644 vendor/github.com/Masterminds/semver/doc.go create mode 100644 vendor/github.com/Masterminds/semver/version.go create mode 100644 vendor/github.com/Masterminds/semver/version_fuzz.go create mode 100644 vendor/github.com/OpenPeeDeeP/depguard/.gitignore create mode 100644 vendor/github.com/OpenPeeDeeP/depguard/LICENSE create mode 100644 vendor/github.com/OpenPeeDeeP/depguard/README.md create mode 100644 vendor/github.com/OpenPeeDeeP/depguard/depguard.go create mode 100644 vendor/github.com/OpenPeeDeeP/depguard/go.mod create mode 100644 vendor/github.com/OpenPeeDeeP/depguard/go.sum create mode 100644 vendor/github.com/alexkohler/prealloc/LICENSE create mode 100644 vendor/github.com/alexkohler/prealloc/pkg/prealloc.go create mode 100644 vendor/github.com/ashanbrown/forbidigo/LICENSE create mode 100644 vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go create mode 100644 vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go create mode 100644 vendor/github.com/ashanbrown/makezero/LICENSE create mode 100644 vendor/github.com/ashanbrown/makezero/makezero/makezero.go create mode 100644 vendor/github.com/bkielbasa/cyclop/LICENSE create mode 100644 vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go create mode 100644 vendor/github.com/bombsimon/wsl/v3/.gitignore create mode 100644 vendor/github.com/bombsimon/wsl/v3/.travis.yml create mode 100644 vendor/github.com/bombsimon/wsl/v3/LICENSE create mode 100644 vendor/github.com/bombsimon/wsl/v3/README.md create mode 100644 vendor/github.com/bombsimon/wsl/v3/go.mod create mode 100644 vendor/github.com/bombsimon/wsl/v3/go.sum create mode 100644 vendor/github.com/bombsimon/wsl/v3/wsl.go create mode 100644 vendor/github.com/charithe/durationcheck/.gitignore create mode 100644 vendor/github.com/charithe/durationcheck/LICENSE create mode 100644 vendor/github.com/charithe/durationcheck/Makefile create mode 100644 vendor/github.com/charithe/durationcheck/README.md create mode 100644 vendor/github.com/charithe/durationcheck/durationcheck.go create mode 100644 vendor/github.com/charithe/durationcheck/go.mod create mode 100644 vendor/github.com/charithe/durationcheck/go.sum create mode 100644 vendor/github.com/chavacava/garif/.gitignore create mode 100644 vendor/github.com/chavacava/garif/LICENSE create mode 100644 vendor/github.com/chavacava/garif/README.md create mode 100644 vendor/github.com/chavacava/garif/constructors.go create mode 100644 vendor/github.com/chavacava/garif/decorators.go create mode 100644 vendor/github.com/chavacava/garif/doc.go create mode 100644 vendor/github.com/chavacava/garif/go.mod create mode 100644 vendor/github.com/chavacava/garif/go.sum create mode 100644 vendor/github.com/chavacava/garif/io.go create mode 100644 vendor/github.com/chavacava/garif/models.go create mode 100644 vendor/github.com/daixiang0/gci/LICENSE create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/gci.go create mode 100644 vendor/github.com/daixiang0/gci/pkg/gci/std.go create mode 100644 vendor/github.com/denis-tingajkin/go-header/.gitignore create mode 100644 vendor/github.com/denis-tingajkin/go-header/.go-header.yml create mode 100644 vendor/github.com/denis-tingajkin/go-header/LICENSE create mode 100644 vendor/github.com/denis-tingajkin/go-header/README.md create mode 100644 vendor/github.com/denis-tingajkin/go-header/analyzer.go create mode 100644 vendor/github.com/denis-tingajkin/go-header/config.go create mode 100644 vendor/github.com/denis-tingajkin/go-header/go.mod create mode 100644 vendor/github.com/denis-tingajkin/go-header/go.sum create mode 100644 vendor/github.com/denis-tingajkin/go-header/issue.go create mode 100644 vendor/github.com/denis-tingajkin/go-header/location.go create mode 100644 vendor/github.com/denis-tingajkin/go-header/option.go create mode 100644 vendor/github.com/denis-tingajkin/go-header/reader.go create mode 100644 vendor/github.com/denis-tingajkin/go-header/value.go create mode 100644 vendor/github.com/esimonov/ifshort/LICENSE create mode 100644 vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go create mode 100644 vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go create mode 100644 vendor/github.com/ettle/strcase/.gitignore create mode 100644 vendor/github.com/ettle/strcase/.golangci.yml create mode 100644 vendor/github.com/ettle/strcase/.readme.tmpl create mode 100644 vendor/github.com/ettle/strcase/LICENSE create mode 100644 vendor/github.com/ettle/strcase/Makefile create mode 100644 vendor/github.com/ettle/strcase/README.md create mode 100644 vendor/github.com/ettle/strcase/caser.go create mode 100644 vendor/github.com/ettle/strcase/convert.go create mode 100644 vendor/github.com/ettle/strcase/doc.go create mode 100644 vendor/github.com/ettle/strcase/go.mod create mode 100644 vendor/github.com/ettle/strcase/go.sum create mode 100644 vendor/github.com/ettle/strcase/initialism.go create mode 100644 vendor/github.com/ettle/strcase/split.go create mode 100644 vendor/github.com/ettle/strcase/strcase.go create mode 100644 vendor/github.com/ettle/strcase/unicode.go create mode 100644 vendor/github.com/fatih/color/LICENSE.md create mode 100644 vendor/github.com/fatih/color/README.md create mode 100644 vendor/github.com/fatih/color/color.go create mode 100644 vendor/github.com/fatih/color/doc.go create mode 100644 vendor/github.com/fatih/color/go.mod create mode 100644 vendor/github.com/fatih/color/go.sum create mode 100644 vendor/github.com/fatih/structtag/LICENSE create mode 100644 vendor/github.com/fatih/structtag/README.md create mode 100644 vendor/github.com/fatih/structtag/go.mod create mode 100644 vendor/github.com/fatih/structtag/tags.go create mode 100644 vendor/github.com/fsnotify/fsnotify/.editorconfig create mode 100644 vendor/github.com/fsnotify/fsnotify/.gitattributes create mode 100644 vendor/github.com/fsnotify/fsnotify/.gitignore create mode 100644 vendor/github.com/fsnotify/fsnotify/.travis.yml create mode 100644 vendor/github.com/fsnotify/fsnotify/AUTHORS create mode 100644 vendor/github.com/fsnotify/fsnotify/CHANGELOG.md create mode 100644 vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md create mode 100644 vendor/github.com/fsnotify/fsnotify/LICENSE create mode 100644 vendor/github.com/fsnotify/fsnotify/README.md create mode 100644 vendor/github.com/fsnotify/fsnotify/fen.go create mode 100644 vendor/github.com/fsnotify/fsnotify/fsnotify.go create mode 100644 vendor/github.com/fsnotify/fsnotify/go.mod create mode 100644 vendor/github.com/fsnotify/fsnotify/go.sum create mode 100644 vendor/github.com/fsnotify/fsnotify/inotify.go create mode 100644 vendor/github.com/fsnotify/fsnotify/inotify_poller.go create mode 100644 vendor/github.com/fsnotify/fsnotify/kqueue.go create mode 100644 vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go create mode 100644 vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go create mode 100644 vendor/github.com/fsnotify/fsnotify/windows.go create mode 100644 vendor/github.com/fzipp/gocyclo/CHANGELOG.md create mode 100644 vendor/github.com/fzipp/gocyclo/CONTRIBUTORS create mode 100644 vendor/github.com/fzipp/gocyclo/LICENSE create mode 100644 vendor/github.com/fzipp/gocyclo/README.md create mode 100644 vendor/github.com/fzipp/gocyclo/analyze.go create mode 100644 vendor/github.com/fzipp/gocyclo/complexity.go create mode 100644 vendor/github.com/fzipp/gocyclo/directives.go create mode 100644 vendor/github.com/fzipp/gocyclo/go.mod create mode 100644 vendor/github.com/fzipp/gocyclo/stats.go create mode 100644 vendor/github.com/go-critic/go-critic/LICENSE create mode 100644 vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/argOrder_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/assignOp_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/badCall_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/badLock_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/checkers.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/deferUnlambda_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/dupArg_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/emptyStringTest_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/equalFold_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/flagDeref_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/indexAlloc_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/comment_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/doc_comment_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_comment_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/type_expr_walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astset.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/zero_value.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/offBy1_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/regexpMust_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/sloppyLen_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/stringXbytes_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/switchTrue_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/underef_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/unslice_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/utils.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/valSwap_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/wrapperFunc_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/checkers/yodaStyleExpr_checker.go create mode 100644 vendor/github.com/go-critic/go-critic/framework/linter/checkers_db.go create mode 100644 vendor/github.com/go-critic/go-critic/framework/linter/context.go create mode 100644 vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go create mode 100644 vendor/github.com/go-toolsmith/astcast/.travis.yml create mode 100644 vendor/github.com/go-toolsmith/astcast/LICENSE create mode 100644 vendor/github.com/go-toolsmith/astcast/README.md create mode 100644 vendor/github.com/go-toolsmith/astcast/astcast.go create mode 100644 vendor/github.com/go-toolsmith/astcast/go.mod create mode 100644 vendor/github.com/go-toolsmith/astcast/go.sum create mode 100644 vendor/github.com/go-toolsmith/astcopy/.travis.yml create mode 100644 vendor/github.com/go-toolsmith/astcopy/LICENSE create mode 100644 vendor/github.com/go-toolsmith/astcopy/README.md create mode 100644 vendor/github.com/go-toolsmith/astcopy/astcopy.go create mode 100644 vendor/github.com/go-toolsmith/astcopy/go.mod create mode 100644 vendor/github.com/go-toolsmith/astcopy/go.sum create mode 100644 vendor/github.com/go-toolsmith/astequal/.gitignore create mode 100644 vendor/github.com/go-toolsmith/astequal/.travis.yml create mode 100644 vendor/github.com/go-toolsmith/astequal/LICENSE create mode 100644 vendor/github.com/go-toolsmith/astequal/README.md create mode 100644 vendor/github.com/go-toolsmith/astequal/astequal.go create mode 100644 vendor/github.com/go-toolsmith/astequal/go.mod create mode 100644 vendor/github.com/go-toolsmith/astfmt/.travis.yml create mode 100644 vendor/github.com/go-toolsmith/astfmt/LICENSE create mode 100644 vendor/github.com/go-toolsmith/astfmt/README.md create mode 100644 vendor/github.com/go-toolsmith/astfmt/astfmt.go create mode 100644 vendor/github.com/go-toolsmith/astfmt/go.mod create mode 100644 vendor/github.com/go-toolsmith/astfmt/go.sum create mode 100644 vendor/github.com/go-toolsmith/astp/.gitignore create mode 100644 vendor/github.com/go-toolsmith/astp/.travis.yml create mode 100644 vendor/github.com/go-toolsmith/astp/LICENSE create mode 100644 vendor/github.com/go-toolsmith/astp/README.md create mode 100644 vendor/github.com/go-toolsmith/astp/decl.go create mode 100644 vendor/github.com/go-toolsmith/astp/expr.go create mode 100644 vendor/github.com/go-toolsmith/astp/go.mod create mode 100644 vendor/github.com/go-toolsmith/astp/go.sum create mode 100644 vendor/github.com/go-toolsmith/astp/stmt.go create mode 100644 vendor/github.com/go-toolsmith/strparse/.travis.yml create mode 100644 vendor/github.com/go-toolsmith/strparse/LICENSE create mode 100644 vendor/github.com/go-toolsmith/strparse/README.md create mode 100644 vendor/github.com/go-toolsmith/strparse/go.mod create mode 100644 vendor/github.com/go-toolsmith/strparse/strparse.go create mode 100644 vendor/github.com/go-toolsmith/typep/.travis.yml create mode 100644 vendor/github.com/go-toolsmith/typep/LICENSE create mode 100644 vendor/github.com/go-toolsmith/typep/README.md create mode 100644 vendor/github.com/go-toolsmith/typep/doc.go create mode 100644 vendor/github.com/go-toolsmith/typep/go.mod create mode 100644 vendor/github.com/go-toolsmith/typep/predicates.go create mode 100644 vendor/github.com/go-toolsmith/typep/safeExpr.go create mode 100644 vendor/github.com/go-toolsmith/typep/simplePredicates.go create mode 100644 vendor/github.com/go-xmlfmt/xmlfmt/LICENSE create mode 100644 vendor/github.com/go-xmlfmt/xmlfmt/README.md create mode 100644 vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go create mode 100644 vendor/github.com/gobwas/glob/.gitignore create mode 100644 vendor/github.com/gobwas/glob/.travis.yml create mode 100644 vendor/github.com/gobwas/glob/LICENSE create mode 100644 vendor/github.com/gobwas/glob/bench.sh create mode 100644 vendor/github.com/gobwas/glob/compiler/compiler.go create mode 100644 vendor/github.com/gobwas/glob/glob.go create mode 100644 vendor/github.com/gobwas/glob/match/any.go create mode 100644 vendor/github.com/gobwas/glob/match/any_of.go create mode 100644 vendor/github.com/gobwas/glob/match/btree.go create mode 100644 vendor/github.com/gobwas/glob/match/contains.go create mode 100644 vendor/github.com/gobwas/glob/match/every_of.go create mode 100644 vendor/github.com/gobwas/glob/match/list.go create mode 100644 vendor/github.com/gobwas/glob/match/match.go create mode 100644 vendor/github.com/gobwas/glob/match/max.go create mode 100644 vendor/github.com/gobwas/glob/match/min.go create mode 100644 vendor/github.com/gobwas/glob/match/nothing.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix_any.go create mode 100644 vendor/github.com/gobwas/glob/match/prefix_suffix.go create mode 100644 vendor/github.com/gobwas/glob/match/range.go create mode 100644 vendor/github.com/gobwas/glob/match/row.go create mode 100644 vendor/github.com/gobwas/glob/match/segments.go create mode 100644 vendor/github.com/gobwas/glob/match/single.go create mode 100644 vendor/github.com/gobwas/glob/match/suffix.go create mode 100644 vendor/github.com/gobwas/glob/match/suffix_any.go create mode 100644 vendor/github.com/gobwas/glob/match/super.go create mode 100644 vendor/github.com/gobwas/glob/match/text.go create mode 100644 vendor/github.com/gobwas/glob/readme.md create mode 100644 vendor/github.com/gobwas/glob/syntax/ast/ast.go create mode 100644 vendor/github.com/gobwas/glob/syntax/ast/parser.go create mode 100644 vendor/github.com/gobwas/glob/syntax/lexer/lexer.go create mode 100644 vendor/github.com/gobwas/glob/syntax/lexer/token.go create mode 100644 vendor/github.com/gobwas/glob/syntax/syntax.go create mode 100644 vendor/github.com/gobwas/glob/util/runes/runes.go create mode 100644 vendor/github.com/gobwas/glob/util/strings/strings.go create mode 100644 vendor/github.com/gofrs/flock/.gitignore create mode 100644 vendor/github.com/gofrs/flock/.travis.yml create mode 100644 vendor/github.com/gofrs/flock/LICENSE create mode 100644 vendor/github.com/gofrs/flock/README.md create mode 100644 vendor/github.com/gofrs/flock/appveyor.yml create mode 100644 vendor/github.com/gofrs/flock/flock.go create mode 100644 vendor/github.com/gofrs/flock/flock_aix.go create mode 100644 vendor/github.com/gofrs/flock/flock_unix.go create mode 100644 vendor/github.com/gofrs/flock/flock_winapi.go create mode 100644 vendor/github.com/gofrs/flock/flock_windows.go create mode 100644 vendor/github.com/golangci/check/LICENSE create mode 100644 vendor/github.com/golangci/check/cmd/structcheck/structcheck.go create mode 100644 vendor/github.com/golangci/check/cmd/varcheck/varcheck.go create mode 100644 vendor/github.com/golangci/dupl/.travis.yml create mode 100644 vendor/github.com/golangci/dupl/LICENSE create mode 100644 vendor/github.com/golangci/dupl/README.md create mode 100644 vendor/github.com/golangci/dupl/job/buildtree.go create mode 100644 vendor/github.com/golangci/dupl/job/parse.go create mode 100644 vendor/github.com/golangci/dupl/main.go create mode 100644 vendor/github.com/golangci/dupl/printer/html.go create mode 100644 vendor/github.com/golangci/dupl/printer/plumbing.go create mode 100644 vendor/github.com/golangci/dupl/printer/printer.go create mode 100644 vendor/github.com/golangci/dupl/printer/text.go create mode 100644 vendor/github.com/golangci/dupl/suffixtree/dupl.go create mode 100644 vendor/github.com/golangci/dupl/suffixtree/suffixtree.go create mode 100644 vendor/github.com/golangci/dupl/syntax/golang/golang.go create mode 100644 vendor/github.com/golangci/dupl/syntax/syntax.go create mode 100644 vendor/github.com/golangci/go-misc/LICENSE create mode 100644 vendor/github.com/golangci/go-misc/deadcode/README.md create mode 100644 vendor/github.com/golangci/go-misc/deadcode/deadcode.go create mode 100644 vendor/github.com/golangci/gofmt/gofmt/LICENSE create mode 100644 vendor/github.com/golangci/gofmt/gofmt/doc.go create mode 100644 vendor/github.com/golangci/gofmt/gofmt/gofmt.go create mode 100644 vendor/github.com/golangci/gofmt/gofmt/golangci.go create mode 100644 vendor/github.com/golangci/gofmt/gofmt/internal.go create mode 100644 vendor/github.com/golangci/gofmt/gofmt/rewrite.go create mode 100644 vendor/github.com/golangci/gofmt/gofmt/simplify.go create mode 100644 vendor/github.com/golangci/gofmt/goimports/LICENSE create mode 100644 vendor/github.com/golangci/gofmt/goimports/goimports.go create mode 100644 vendor/github.com/golangci/gofmt/goimports/golangci.go create mode 100644 vendor/github.com/golangci/golangci-lint/LICENSE create mode 100644 vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go create mode 100644 vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/mod_version.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/cache/cache.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/cache/default.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/cache/hash.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go create mode 100644 vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/commands/config.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/commands/help.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/commands/root.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/commands/run.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/commands/version.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/config/config.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/config/issues.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/config/linters.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings_gocritic.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/config/output.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/config/reader.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/config/run.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/config/severity.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load/guard.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_facts.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/rowerrcheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/lint/load.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/packages/util.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/printers/github.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/printers/html.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/printers/json.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/printers/text.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/report/data.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/report/log.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/issue.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/result/processors/utils.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go create mode 100644 vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go create mode 100644 vendor/github.com/golangci/lint-1/.travis.yml create mode 100644 vendor/github.com/golangci/lint-1/CONTRIBUTING.md create mode 100644 vendor/github.com/golangci/lint-1/LICENSE create mode 100644 vendor/github.com/golangci/lint-1/README.md create mode 100644 vendor/github.com/golangci/lint-1/go.mod create mode 100644 vendor/github.com/golangci/lint-1/go.sum create mode 100644 vendor/github.com/golangci/lint-1/lint.go create mode 100644 vendor/github.com/golangci/maligned/LICENSE create mode 100644 vendor/github.com/golangci/maligned/README create mode 100644 vendor/github.com/golangci/maligned/maligned.go create mode 100644 vendor/github.com/golangci/misspell/.gitignore create mode 100644 vendor/github.com/golangci/misspell/.travis.yml create mode 100644 vendor/github.com/golangci/misspell/Dockerfile create mode 100644 vendor/github.com/golangci/misspell/Gopkg.lock create mode 100644 vendor/github.com/golangci/misspell/Gopkg.toml create mode 100644 vendor/github.com/golangci/misspell/LICENSE create mode 100644 vendor/github.com/golangci/misspell/Makefile create mode 100644 vendor/github.com/golangci/misspell/README.md create mode 100644 vendor/github.com/golangci/misspell/RELEASE-HOWTO.md create mode 100644 vendor/github.com/golangci/misspell/ascii.go create mode 100644 vendor/github.com/golangci/misspell/case.go create mode 100644 vendor/github.com/golangci/misspell/goreleaser.yml create mode 100644 vendor/github.com/golangci/misspell/install-misspell.sh create mode 100644 vendor/github.com/golangci/misspell/legal.go create mode 100644 vendor/github.com/golangci/misspell/mime.go create mode 100644 vendor/github.com/golangci/misspell/notwords.go create mode 100644 vendor/github.com/golangci/misspell/replace.go create mode 100644 vendor/github.com/golangci/misspell/stringreplacer.go create mode 100644 vendor/github.com/golangci/misspell/stringreplacer_test.gox create mode 100644 vendor/github.com/golangci/misspell/url.go create mode 100644 vendor/github.com/golangci/misspell/words.go create mode 100644 vendor/github.com/golangci/revgrep/.gitignore create mode 100644 vendor/github.com/golangci/revgrep/.travis.yml create mode 100644 vendor/github.com/golangci/revgrep/LICENSE create mode 100644 vendor/github.com/golangci/revgrep/README.md create mode 100644 vendor/github.com/golangci/revgrep/go.mod create mode 100644 vendor/github.com/golangci/revgrep/go.sum create mode 100644 vendor/github.com/golangci/revgrep/revgrep.go create mode 100644 vendor/github.com/golangci/unconvert/LICENSE create mode 100644 vendor/github.com/golangci/unconvert/README create mode 100644 vendor/github.com/golangci/unconvert/unconvert.go create mode 100644 vendor/github.com/google/go-cmp/LICENSE create mode 100644 vendor/github.com/google/go-cmp/cmp/compare.go create mode 100644 vendor/github.com/google/go-cmp/cmp/export_panic.go create mode 100644 vendor/github.com/google/go-cmp/cmp/export_unsafe.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/function/func.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/name.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/sort.go create mode 100644 vendor/github.com/google/go-cmp/cmp/internal/value/zero.go create mode 100644 vendor/github.com/google/go-cmp/cmp/options.go create mode 100644 vendor/github.com/google/go-cmp/cmp/path.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_compare.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_references.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_reflect.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_slices.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_text.go create mode 100644 vendor/github.com/google/go-cmp/cmp/report_value.go create mode 100644 vendor/github.com/gordonklaus/ineffassign/LICENSE create mode 100644 vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/LICENSE create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/README.md create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/call.go create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/file.go create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/go.mod create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/go.sum create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/pkg.go create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/ssa.go create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go create mode 100644 vendor/github.com/gostaticanalysis/analysisutil/types.go create mode 100644 vendor/github.com/gostaticanalysis/comment/LICENSE create mode 100644 vendor/github.com/gostaticanalysis/comment/README.md create mode 100644 vendor/github.com/gostaticanalysis/comment/comment.go create mode 100644 vendor/github.com/gostaticanalysis/comment/go.mod create mode 100644 vendor/github.com/gostaticanalysis/comment/go.sum create mode 100644 vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go create mode 100644 vendor/github.com/gostaticanalysis/forcetypeassert/.reviewdog.yml create mode 100644 vendor/github.com/gostaticanalysis/forcetypeassert/LICENSE create mode 100644 vendor/github.com/gostaticanalysis/forcetypeassert/README.md create mode 100644 vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go create mode 100644 vendor/github.com/gostaticanalysis/forcetypeassert/go.mod create mode 100644 vendor/github.com/gostaticanalysis/forcetypeassert/go.sum create mode 100644 vendor/github.com/gostaticanalysis/nilerr/LICENSE create mode 100644 vendor/github.com/gostaticanalysis/nilerr/README.md create mode 100644 vendor/github.com/gostaticanalysis/nilerr/go.mod create mode 100644 vendor/github.com/gostaticanalysis/nilerr/go.sum create mode 100644 vendor/github.com/gostaticanalysis/nilerr/nilerr.go create mode 100644 vendor/github.com/hashicorp/errwrap/LICENSE create mode 100644 vendor/github.com/hashicorp/errwrap/README.md create mode 100644 vendor/github.com/hashicorp/errwrap/errwrap.go create mode 100644 vendor/github.com/hashicorp/errwrap/go.mod create mode 100644 vendor/github.com/hashicorp/go-multierror/LICENSE create mode 100644 vendor/github.com/hashicorp/go-multierror/Makefile create mode 100644 vendor/github.com/hashicorp/go-multierror/README.md create mode 100644 vendor/github.com/hashicorp/go-multierror/append.go create mode 100644 vendor/github.com/hashicorp/go-multierror/flatten.go create mode 100644 vendor/github.com/hashicorp/go-multierror/format.go create mode 100644 vendor/github.com/hashicorp/go-multierror/go.mod create mode 100644 vendor/github.com/hashicorp/go-multierror/go.sum create mode 100644 vendor/github.com/hashicorp/go-multierror/group.go create mode 100644 vendor/github.com/hashicorp/go-multierror/multierror.go create mode 100644 vendor/github.com/hashicorp/go-multierror/prefix.go create mode 100644 vendor/github.com/hashicorp/go-multierror/sort.go create mode 100644 vendor/github.com/hashicorp/hcl/.gitignore create mode 100644 vendor/github.com/hashicorp/hcl/.travis.yml create mode 100644 vendor/github.com/hashicorp/hcl/LICENSE create mode 100644 vendor/github.com/hashicorp/hcl/Makefile create mode 100644 vendor/github.com/hashicorp/hcl/README.md create mode 100644 vendor/github.com/hashicorp/hcl/appveyor.yml create mode 100644 vendor/github.com/hashicorp/hcl/decoder.go create mode 100644 vendor/github.com/hashicorp/hcl/go.mod create mode 100644 vendor/github.com/hashicorp/hcl/go.sum create mode 100644 vendor/github.com/hashicorp/hcl/hcl.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/ast/ast.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/ast/walk.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/parser/error.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/parser/parser.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/printer/printer.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/token/position.go create mode 100644 vendor/github.com/hashicorp/hcl/hcl/token/token.go create mode 100644 vendor/github.com/hashicorp/hcl/json/parser/flatten.go create mode 100644 vendor/github.com/hashicorp/hcl/json/parser/parser.go create mode 100644 vendor/github.com/hashicorp/hcl/json/scanner/scanner.go create mode 100644 vendor/github.com/hashicorp/hcl/json/token/position.go create mode 100644 vendor/github.com/hashicorp/hcl/json/token/token.go create mode 100644 vendor/github.com/hashicorp/hcl/lex.go create mode 100644 vendor/github.com/hashicorp/hcl/parse.go create mode 100644 vendor/github.com/inconshreveable/mousetrap/LICENSE create mode 100644 vendor/github.com/inconshreveable/mousetrap/README.md create mode 100644 vendor/github.com/inconshreveable/mousetrap/trap_others.go create mode 100644 vendor/github.com/inconshreveable/mousetrap/trap_windows.go create mode 100644 vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go create mode 100644 vendor/github.com/jgautheron/goconst/LICENSE create mode 100644 vendor/github.com/jgautheron/goconst/README.md create mode 100644 vendor/github.com/jgautheron/goconst/api.go create mode 100644 vendor/github.com/jgautheron/goconst/go.mod create mode 100644 vendor/github.com/jgautheron/goconst/parser.go create mode 100644 vendor/github.com/jgautheron/goconst/visitor.go create mode 100644 vendor/github.com/jingyugao/rowserrcheck/LICENSE create mode 100644 vendor/github.com/jingyugao/rowserrcheck/passes/rowserr/rowserr.go create mode 100644 vendor/github.com/jirfag/go-printf-func-name/LICENSE create mode 100644 vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go create mode 100644 vendor/github.com/julz/importas/.gitignore create mode 100644 vendor/github.com/julz/importas/LICENSE create mode 100644 vendor/github.com/julz/importas/README.md create mode 100644 vendor/github.com/julz/importas/analyzer.go create mode 100644 vendor/github.com/julz/importas/config.go create mode 100644 vendor/github.com/julz/importas/flags.go create mode 100644 vendor/github.com/julz/importas/go.mod create mode 100644 vendor/github.com/julz/importas/go.sum create mode 100644 vendor/github.com/kisielk/errcheck/LICENSE create mode 100644 vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go create mode 100644 vendor/github.com/kisielk/errcheck/errcheck/errcheck.go create mode 100644 vendor/github.com/kisielk/errcheck/errcheck/tags.go create mode 100644 vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go create mode 100644 vendor/github.com/kisielk/gotool/.travis.yml create mode 100644 vendor/github.com/kisielk/gotool/LEGAL create mode 100644 vendor/github.com/kisielk/gotool/LICENSE create mode 100644 vendor/github.com/kisielk/gotool/README.md create mode 100644 vendor/github.com/kisielk/gotool/go.mod create mode 100644 vendor/github.com/kisielk/gotool/go13.go create mode 100644 vendor/github.com/kisielk/gotool/go14-15.go create mode 100644 vendor/github.com/kisielk/gotool/go16-18.go create mode 100644 vendor/github.com/kisielk/gotool/internal/load/path.go create mode 100644 vendor/github.com/kisielk/gotool/internal/load/pkg.go create mode 100644 vendor/github.com/kisielk/gotool/internal/load/search.go create mode 100644 vendor/github.com/kisielk/gotool/match.go create mode 100644 vendor/github.com/kisielk/gotool/match18.go create mode 100644 vendor/github.com/kisielk/gotool/tool.go create mode 100644 vendor/github.com/kulti/thelper/LICENSE create mode 100644 vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go create mode 100644 vendor/github.com/kulti/thelper/pkg/analyzer/report.go create mode 100644 vendor/github.com/kunwardeep/paralleltest/LICENSE create mode 100644 vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go create mode 100644 vendor/github.com/kyoh86/exportloopref/.golangci.yml create mode 100644 vendor/github.com/kyoh86/exportloopref/.goreleaser.yml create mode 100644 vendor/github.com/kyoh86/exportloopref/LICENSE create mode 100644 vendor/github.com/kyoh86/exportloopref/Makefile create mode 100644 vendor/github.com/kyoh86/exportloopref/README.md create mode 100644 vendor/github.com/kyoh86/exportloopref/exportloopref.go create mode 100644 vendor/github.com/kyoh86/exportloopref/go.mod create mode 100644 vendor/github.com/kyoh86/exportloopref/go.sum create mode 100644 vendor/github.com/ldez/gomoddirectives/.gitignore create mode 100644 vendor/github.com/ldez/gomoddirectives/.golangci.yml create mode 100644 vendor/github.com/ldez/gomoddirectives/LICENSE create mode 100644 vendor/github.com/ldez/gomoddirectives/Makefile create mode 100644 vendor/github.com/ldez/gomoddirectives/go.mod create mode 100644 vendor/github.com/ldez/gomoddirectives/go.sum create mode 100644 vendor/github.com/ldez/gomoddirectives/gomoddirectives.go create mode 100644 vendor/github.com/ldez/gomoddirectives/module.go create mode 100644 vendor/github.com/ldez/gomoddirectives/readme.md create mode 100644 vendor/github.com/ldez/tagliatelle/.gitignore create mode 100644 vendor/github.com/ldez/tagliatelle/.golangci.yml create mode 100644 vendor/github.com/ldez/tagliatelle/LICENSE create mode 100644 vendor/github.com/ldez/tagliatelle/Makefile create mode 100644 vendor/github.com/ldez/tagliatelle/go.mod create mode 100644 vendor/github.com/ldez/tagliatelle/go.sum create mode 100644 vendor/github.com/ldez/tagliatelle/readme.md create mode 100644 vendor/github.com/ldez/tagliatelle/tagliatelle.go create mode 100644 vendor/github.com/magiconair/properties/.gitignore create mode 100644 vendor/github.com/magiconair/properties/.travis.yml create mode 100644 vendor/github.com/magiconair/properties/CHANGELOG.md create mode 100644 vendor/github.com/magiconair/properties/LICENSE.md create mode 100644 vendor/github.com/magiconair/properties/README.md create mode 100644 vendor/github.com/magiconair/properties/decode.go create mode 100644 vendor/github.com/magiconair/properties/doc.go create mode 100644 vendor/github.com/magiconair/properties/go.mod create mode 100644 vendor/github.com/magiconair/properties/integrate.go create mode 100644 vendor/github.com/magiconair/properties/lex.go create mode 100644 vendor/github.com/magiconair/properties/load.go create mode 100644 vendor/github.com/magiconair/properties/parser.go create mode 100644 vendor/github.com/magiconair/properties/properties.go create mode 100644 vendor/github.com/magiconair/properties/rangecheck.go create mode 100644 vendor/github.com/maratori/testpackage/LICENSE create mode 100644 vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go create mode 100644 vendor/github.com/matoous/godox/.gitignore create mode 100644 vendor/github.com/matoous/godox/.golangci.yml create mode 100644 vendor/github.com/matoous/godox/.revive.toml create mode 100644 vendor/github.com/matoous/godox/LICENSE create mode 100644 vendor/github.com/matoous/godox/README.md create mode 100644 vendor/github.com/matoous/godox/go.mod create mode 100644 vendor/github.com/matoous/godox/go.sum create mode 100644 vendor/github.com/matoous/godox/godox.go create mode 100644 vendor/github.com/mattn/go-colorable/.travis.yml create mode 100644 vendor/github.com/mattn/go-colorable/LICENSE create mode 100644 vendor/github.com/mattn/go-colorable/README.md create mode 100644 vendor/github.com/mattn/go-colorable/colorable_appengine.go create mode 100644 vendor/github.com/mattn/go-colorable/colorable_others.go create mode 100644 vendor/github.com/mattn/go-colorable/colorable_windows.go create mode 100644 vendor/github.com/mattn/go-colorable/go.mod create mode 100644 vendor/github.com/mattn/go-colorable/go.sum create mode 100644 vendor/github.com/mattn/go-colorable/go.test.sh create mode 100644 vendor/github.com/mattn/go-colorable/noncolorable.go create mode 100644 vendor/github.com/mattn/go-runewidth/.travis.yml create mode 100644 vendor/github.com/mattn/go-runewidth/LICENSE create mode 100644 vendor/github.com/mattn/go-runewidth/README.md create mode 100644 vendor/github.com/mattn/go-runewidth/go.mod create mode 100644 vendor/github.com/mattn/go-runewidth/go.test.sh create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_appengine.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_js.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_posix.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_table.go create mode 100644 vendor/github.com/mattn/go-runewidth/runewidth_windows.go create mode 100644 vendor/github.com/mbilski/exhaustivestruct/LICENSE create mode 100644 vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go create mode 100644 vendor/github.com/mgechev/dots/.travis.yml create mode 100644 vendor/github.com/mgechev/dots/LICENSE create mode 100644 vendor/github.com/mgechev/dots/README.md create mode 100644 vendor/github.com/mgechev/dots/resolve.go create mode 100644 vendor/github.com/mgechev/revive/LICENSE create mode 100644 vendor/github.com/mgechev/revive/config/config.go create mode 100644 vendor/github.com/mgechev/revive/formatter/checkstyle.go create mode 100644 vendor/github.com/mgechev/revive/formatter/default.go create mode 100644 vendor/github.com/mgechev/revive/formatter/friendly.go create mode 100644 vendor/github.com/mgechev/revive/formatter/json.go create mode 100644 vendor/github.com/mgechev/revive/formatter/ndjson.go create mode 100644 vendor/github.com/mgechev/revive/formatter/plain.go create mode 100644 vendor/github.com/mgechev/revive/formatter/sarif.go create mode 100644 vendor/github.com/mgechev/revive/formatter/severity.go create mode 100644 vendor/github.com/mgechev/revive/formatter/stylish.go create mode 100644 vendor/github.com/mgechev/revive/formatter/unix.go create mode 100644 vendor/github.com/mgechev/revive/lint/config.go create mode 100644 vendor/github.com/mgechev/revive/lint/failure.go create mode 100644 vendor/github.com/mgechev/revive/lint/file.go create mode 100644 vendor/github.com/mgechev/revive/lint/formatter.go create mode 100644 vendor/github.com/mgechev/revive/lint/linter.go create mode 100644 vendor/github.com/mgechev/revive/lint/package.go create mode 100644 vendor/github.com/mgechev/revive/lint/rule.go create mode 100644 vendor/github.com/mgechev/revive/lint/utils.go create mode 100644 vendor/github.com/mgechev/revive/rule/add-constant.go create mode 100644 vendor/github.com/mgechev/revive/rule/argument-limit.go create mode 100644 vendor/github.com/mgechev/revive/rule/atomic.go create mode 100644 vendor/github.com/mgechev/revive/rule/bare-return.go create mode 100644 vendor/github.com/mgechev/revive/rule/blank-imports.go create mode 100644 vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go create mode 100644 vendor/github.com/mgechev/revive/rule/call-to-gc.go create mode 100644 vendor/github.com/mgechev/revive/rule/cognitive-complexity.go create mode 100644 vendor/github.com/mgechev/revive/rule/confusing-naming.go create mode 100644 vendor/github.com/mgechev/revive/rule/confusing-results.go create mode 100644 vendor/github.com/mgechev/revive/rule/constant-logical-expr.go create mode 100644 vendor/github.com/mgechev/revive/rule/context-as-argument.go create mode 100644 vendor/github.com/mgechev/revive/rule/context-keys-type.go create mode 100644 vendor/github.com/mgechev/revive/rule/cyclomatic.go create mode 100644 vendor/github.com/mgechev/revive/rule/deep-exit.go create mode 100644 vendor/github.com/mgechev/revive/rule/defer.go create mode 100644 vendor/github.com/mgechev/revive/rule/dot-imports.go create mode 100644 vendor/github.com/mgechev/revive/rule/duplicated-imports.go create mode 100644 vendor/github.com/mgechev/revive/rule/early-return.go create mode 100644 vendor/github.com/mgechev/revive/rule/empty-block.go create mode 100644 vendor/github.com/mgechev/revive/rule/empty-lines.go create mode 100644 vendor/github.com/mgechev/revive/rule/error-naming.go create mode 100644 vendor/github.com/mgechev/revive/rule/error-return.go create mode 100644 vendor/github.com/mgechev/revive/rule/error-strings.go create mode 100644 vendor/github.com/mgechev/revive/rule/errorf.go create mode 100644 vendor/github.com/mgechev/revive/rule/exported.go create mode 100644 vendor/github.com/mgechev/revive/rule/file-header.go create mode 100644 vendor/github.com/mgechev/revive/rule/flag-param.go create mode 100644 vendor/github.com/mgechev/revive/rule/function-length.go create mode 100644 vendor/github.com/mgechev/revive/rule/function-result-limit.go create mode 100644 vendor/github.com/mgechev/revive/rule/get-return.go create mode 100644 vendor/github.com/mgechev/revive/rule/identical-branches.go create mode 100644 vendor/github.com/mgechev/revive/rule/if-return.go create mode 100644 vendor/github.com/mgechev/revive/rule/import-shadowing.go create mode 100644 vendor/github.com/mgechev/revive/rule/imports-blacklist.go create mode 100644 vendor/github.com/mgechev/revive/rule/increment-decrement.go create mode 100644 vendor/github.com/mgechev/revive/rule/indent-error-flow.go create mode 100644 vendor/github.com/mgechev/revive/rule/line-length-limit.go create mode 100644 vendor/github.com/mgechev/revive/rule/max-public-structs.go create mode 100644 vendor/github.com/mgechev/revive/rule/modifies-param.go create mode 100644 vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go create mode 100644 vendor/github.com/mgechev/revive/rule/nested-structs.go create mode 100644 vendor/github.com/mgechev/revive/rule/package-comments.go create mode 100644 vendor/github.com/mgechev/revive/rule/range-val-address.go create mode 100644 vendor/github.com/mgechev/revive/rule/range-val-in-closure.go create mode 100644 vendor/github.com/mgechev/revive/rule/range.go create mode 100644 vendor/github.com/mgechev/revive/rule/receiver-naming.go create mode 100644 vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go create mode 100644 vendor/github.com/mgechev/revive/rule/string-format.go create mode 100644 vendor/github.com/mgechev/revive/rule/string-of-int.go create mode 100644 vendor/github.com/mgechev/revive/rule/struct-tag.go create mode 100644 vendor/github.com/mgechev/revive/rule/superfluous-else.go create mode 100644 vendor/github.com/mgechev/revive/rule/time-naming.go create mode 100644 vendor/github.com/mgechev/revive/rule/unconditional-recursion.go create mode 100644 vendor/github.com/mgechev/revive/rule/unexported-naming.go create mode 100644 vendor/github.com/mgechev/revive/rule/unexported-return.go create mode 100644 vendor/github.com/mgechev/revive/rule/unhandled-error.go create mode 100644 vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go create mode 100644 vendor/github.com/mgechev/revive/rule/unreachable-code.go create mode 100644 vendor/github.com/mgechev/revive/rule/unused-param.go create mode 100644 vendor/github.com/mgechev/revive/rule/unused-receiver.go create mode 100644 vendor/github.com/mgechev/revive/rule/useless-break.go create mode 100644 vendor/github.com/mgechev/revive/rule/utils.go create mode 100644 vendor/github.com/mgechev/revive/rule/var-declarations.go create mode 100644 vendor/github.com/mgechev/revive/rule/var-naming.go create mode 100644 vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go create mode 100644 vendor/github.com/mitchellh/go-homedir/LICENSE create mode 100644 vendor/github.com/mitchellh/go-homedir/README.md create mode 100644 vendor/github.com/mitchellh/go-homedir/go.mod create mode 100644 vendor/github.com/mitchellh/go-homedir/homedir.go create mode 100644 vendor/github.com/mitchellh/mapstructure/CHANGELOG.md create mode 100644 vendor/github.com/mitchellh/mapstructure/LICENSE create mode 100644 vendor/github.com/mitchellh/mapstructure/README.md create mode 100644 vendor/github.com/mitchellh/mapstructure/decode_hooks.go create mode 100644 vendor/github.com/mitchellh/mapstructure/error.go create mode 100644 vendor/github.com/mitchellh/mapstructure/go.mod create mode 100644 vendor/github.com/mitchellh/mapstructure/mapstructure.go create mode 100644 vendor/github.com/moricho/tparallel/.gitignore create mode 100644 vendor/github.com/moricho/tparallel/.goreleaser.yml create mode 100644 vendor/github.com/moricho/tparallel/LICENSE create mode 100644 vendor/github.com/moricho/tparallel/Makefile create mode 100644 vendor/github.com/moricho/tparallel/README.md create mode 100644 vendor/github.com/moricho/tparallel/go.mod create mode 100644 vendor/github.com/moricho/tparallel/go.sum create mode 100644 vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go create mode 100644 vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go create mode 100644 vendor/github.com/moricho/tparallel/testmap.go create mode 100644 vendor/github.com/moricho/tparallel/tparallel.go create mode 100644 vendor/github.com/nakabonne/nestif/.gitignore create mode 100644 vendor/github.com/nakabonne/nestif/LICENSE create mode 100644 vendor/github.com/nakabonne/nestif/README.md create mode 100644 vendor/github.com/nakabonne/nestif/go.mod create mode 100644 vendor/github.com/nakabonne/nestif/go.sum create mode 100644 vendor/github.com/nakabonne/nestif/nestif.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/.gitignore create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/LICENSE.txt create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/Makefile create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/README.md create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/adjacency/adjcmartix.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/data/bindata.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/entropy/entropyCalculator.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/frequency/frequency.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/go.mod create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/go.sum create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/match/match.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/matching/dateMatchers.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/matching/dictionaryMatch.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/matching/leet.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/matching/matching.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/matching/repeatMatch.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/matching/sequenceMatch.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/matching/spatialMatch.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/scoring/scoring.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/utils/math/mathutils.go create mode 100644 vendor/github.com/nbutton23/zxcvbn-go/zxcvbn.go create mode 100644 vendor/github.com/nishanths/exhaustive/.gitignore create mode 100644 vendor/github.com/nishanths/exhaustive/.travis.yml create mode 100644 vendor/github.com/nishanths/exhaustive/LICENSE create mode 100644 vendor/github.com/nishanths/exhaustive/README.md create mode 100644 vendor/github.com/nishanths/exhaustive/enum.go create mode 100644 vendor/github.com/nishanths/exhaustive/exhaustive.go create mode 100644 vendor/github.com/nishanths/exhaustive/generated.go create mode 100644 vendor/github.com/nishanths/exhaustive/go.mod create mode 100644 vendor/github.com/nishanths/exhaustive/go.sum create mode 100644 vendor/github.com/nishanths/exhaustive/regexp_flag.go create mode 100644 vendor/github.com/nishanths/exhaustive/switch.go create mode 100644 vendor/github.com/nishanths/predeclared/LICENSE create mode 100644 vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go create mode 100644 vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go create mode 100644 vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go create mode 100644 vendor/github.com/olekukonko/tablewriter/.gitignore create mode 100644 vendor/github.com/olekukonko/tablewriter/.travis.yml create mode 100644 vendor/github.com/olekukonko/tablewriter/LICENSE.md create mode 100644 vendor/github.com/olekukonko/tablewriter/README.md create mode 100644 vendor/github.com/olekukonko/tablewriter/csv.go create mode 100644 vendor/github.com/olekukonko/tablewriter/go.mod create mode 100644 vendor/github.com/olekukonko/tablewriter/go.sum create mode 100644 vendor/github.com/olekukonko/tablewriter/table.go create mode 100644 vendor/github.com/olekukonko/tablewriter/table_with_color.go create mode 100644 vendor/github.com/olekukonko/tablewriter/util.go create mode 100644 vendor/github.com/olekukonko/tablewriter/wrap.go create mode 100644 vendor/github.com/pelletier/go-toml/.dockerignore create mode 100644 vendor/github.com/pelletier/go-toml/.gitignore create mode 100644 vendor/github.com/pelletier/go-toml/CONTRIBUTING.md create mode 100644 vendor/github.com/pelletier/go-toml/Dockerfile create mode 100644 vendor/github.com/pelletier/go-toml/LICENSE create mode 100644 vendor/github.com/pelletier/go-toml/Makefile create mode 100644 vendor/github.com/pelletier/go-toml/PULL_REQUEST_TEMPLATE.md create mode 100644 vendor/github.com/pelletier/go-toml/README.md create mode 100644 vendor/github.com/pelletier/go-toml/azure-pipelines.yml create mode 100644 vendor/github.com/pelletier/go-toml/benchmark.sh create mode 100644 vendor/github.com/pelletier/go-toml/doc.go create mode 100644 vendor/github.com/pelletier/go-toml/example-crlf.toml create mode 100644 vendor/github.com/pelletier/go-toml/example.toml create mode 100644 vendor/github.com/pelletier/go-toml/fuzz.go create mode 100644 vendor/github.com/pelletier/go-toml/fuzz.sh create mode 100644 vendor/github.com/pelletier/go-toml/go.mod create mode 100644 vendor/github.com/pelletier/go-toml/keysparsing.go create mode 100644 vendor/github.com/pelletier/go-toml/lexer.go create mode 100644 vendor/github.com/pelletier/go-toml/localtime.go create mode 100644 vendor/github.com/pelletier/go-toml/marshal.go create mode 100644 vendor/github.com/pelletier/go-toml/marshal_OrderPreserve_test.toml create mode 100644 vendor/github.com/pelletier/go-toml/marshal_test.toml create mode 100644 vendor/github.com/pelletier/go-toml/parser.go create mode 100644 vendor/github.com/pelletier/go-toml/position.go create mode 100644 vendor/github.com/pelletier/go-toml/token.go create mode 100644 vendor/github.com/pelletier/go-toml/toml.go create mode 100644 vendor/github.com/pelletier/go-toml/tomlpub.go create mode 100644 vendor/github.com/pelletier/go-toml/tomltree_create.go create mode 100644 vendor/github.com/pelletier/go-toml/tomltree_write.go create mode 100644 vendor/github.com/pelletier/go-toml/tomltree_writepub.go create mode 100644 vendor/github.com/phayes/checkstyle/.scrutinizer.yml create mode 100644 vendor/github.com/phayes/checkstyle/LICENSE create mode 100644 vendor/github.com/phayes/checkstyle/README.md create mode 100644 vendor/github.com/phayes/checkstyle/checkstyle.go create mode 100644 vendor/github.com/phayes/checkstyle/godoc.go create mode 100644 vendor/github.com/polyfloyd/go-errorlint/LICENSE create mode 100644 vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go create mode 100644 vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go create mode 100644 vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go create mode 100644 vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/promlint.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/LICENSE create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/compile.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gen_operations.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gogrep.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/instructions.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/match.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operation_string.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operations.gen.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/parse.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/slices.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/nodetag/nodetag.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go create mode 100644 vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go create mode 100644 vendor/github.com/quasilyte/regex/syntax/LICENSE create mode 100644 vendor/github.com/quasilyte/regex/syntax/README.md create mode 100644 vendor/github.com/quasilyte/regex/syntax/ast.go create mode 100644 vendor/github.com/quasilyte/regex/syntax/errors.go create mode 100644 vendor/github.com/quasilyte/regex/syntax/go.mod create mode 100644 vendor/github.com/quasilyte/regex/syntax/lexer.go create mode 100644 vendor/github.com/quasilyte/regex/syntax/operation.go create mode 100644 vendor/github.com/quasilyte/regex/syntax/operation_string.go create mode 100644 vendor/github.com/quasilyte/regex/syntax/parser.go create mode 100644 vendor/github.com/quasilyte/regex/syntax/pos.go create mode 100644 vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go create mode 100644 vendor/github.com/quasilyte/regex/syntax/utils.go create mode 100644 vendor/github.com/ryancurrah/gomodguard/.dockerignore create mode 100644 vendor/github.com/ryancurrah/gomodguard/.gitignore create mode 100644 vendor/github.com/ryancurrah/gomodguard/.golangci.yml create mode 100644 vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml create mode 100644 vendor/github.com/ryancurrah/gomodguard/Dockerfile create mode 100644 vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser create mode 100644 vendor/github.com/ryancurrah/gomodguard/LICENSE create mode 100644 vendor/github.com/ryancurrah/gomodguard/Makefile create mode 100644 vendor/github.com/ryancurrah/gomodguard/README.md create mode 100644 vendor/github.com/ryancurrah/gomodguard/cmd.go create mode 100644 vendor/github.com/ryancurrah/gomodguard/go.mod create mode 100644 vendor/github.com/ryancurrah/gomodguard/go.sum create mode 100644 vendor/github.com/ryancurrah/gomodguard/gomodguard.go create mode 100644 vendor/github.com/ryanrolds/sqlclosecheck/LICENSE create mode 100644 vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go create mode 100644 vendor/github.com/sanposhiho/wastedassign/v2/LICENSE create mode 100644 vendor/github.com/sanposhiho/wastedassign/v2/README.md create mode 100644 vendor/github.com/sanposhiho/wastedassign/v2/go.mod create mode 100644 vendor/github.com/sanposhiho/wastedassign/v2/go.sum create mode 100644 vendor/github.com/sanposhiho/wastedassign/v2/wastedassign.go create mode 100644 vendor/github.com/securego/gosec/v2/.gitignore create mode 100644 vendor/github.com/securego/gosec/v2/.golangci.yml create mode 100644 vendor/github.com/securego/gosec/v2/.goreleaser.yml create mode 100644 vendor/github.com/securego/gosec/v2/Dockerfile create mode 100644 vendor/github.com/securego/gosec/v2/LICENSE.txt create mode 100644 vendor/github.com/securego/gosec/v2/Makefile create mode 100644 vendor/github.com/securego/gosec/v2/README.md create mode 100644 vendor/github.com/securego/gosec/v2/USERS.md create mode 100644 vendor/github.com/securego/gosec/v2/action.yml create mode 100644 vendor/github.com/securego/gosec/v2/analyzer.go create mode 100644 vendor/github.com/securego/gosec/v2/call_list.go create mode 100644 vendor/github.com/securego/gosec/v2/config.go create mode 100644 vendor/github.com/securego/gosec/v2/cwe/data.go create mode 100644 vendor/github.com/securego/gosec/v2/cwe/types.go create mode 100644 vendor/github.com/securego/gosec/v2/entrypoint.sh create mode 100644 vendor/github.com/securego/gosec/v2/errors.go create mode 100644 vendor/github.com/securego/gosec/v2/go.mod create mode 100644 vendor/github.com/securego/gosec/v2/go.sum create mode 100644 vendor/github.com/securego/gosec/v2/helpers.go create mode 100644 vendor/github.com/securego/gosec/v2/import_tracker.go create mode 100644 vendor/github.com/securego/gosec/v2/install.sh create mode 100644 vendor/github.com/securego/gosec/v2/issue.go create mode 100644 vendor/github.com/securego/gosec/v2/renovate.json create mode 100644 vendor/github.com/securego/gosec/v2/report.go create mode 100644 vendor/github.com/securego/gosec/v2/resolve.go create mode 100644 vendor/github.com/securego/gosec/v2/rule.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/archive.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/bad_defer.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/bind.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/blocklist.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/errors.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/fileperms.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/integer_overflow.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/pprof.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/rand.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/readfile.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/rsa.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/rulelist.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/sql.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/ssh.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/ssrf.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/subproc.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/tempfiles.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/templates.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/tls.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/tls_config.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/unsafe.go create mode 100644 vendor/github.com/securego/gosec/v2/rules/weakcrypto.go create mode 100644 vendor/github.com/shazow/go-diff/LICENSE create mode 100644 vendor/github.com/shazow/go-diff/difflib/differ.go create mode 100644 vendor/github.com/sonatard/noctx/.gitignore create mode 100644 vendor/github.com/sonatard/noctx/.golangci.yml create mode 100644 vendor/github.com/sonatard/noctx/LICENSE create mode 100644 vendor/github.com/sonatard/noctx/Makefile create mode 100644 vendor/github.com/sonatard/noctx/README.md create mode 100644 vendor/github.com/sonatard/noctx/go.mod create mode 100644 vendor/github.com/sonatard/noctx/go.sum create mode 100644 vendor/github.com/sonatard/noctx/ngfunc/main.go create mode 100644 vendor/github.com/sonatard/noctx/ngfunc/report.go create mode 100644 vendor/github.com/sonatard/noctx/ngfunc/types.go create mode 100644 vendor/github.com/sonatard/noctx/noctx.go create mode 100644 vendor/github.com/sonatard/noctx/reqwithoutctx/main.go create mode 100644 vendor/github.com/sonatard/noctx/reqwithoutctx/report.go create mode 100644 vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go create mode 100644 vendor/github.com/sourcegraph/go-diff/LICENSE create mode 100644 vendor/github.com/sourcegraph/go-diff/diff/diff.go create mode 100644 vendor/github.com/sourcegraph/go-diff/diff/doc.go create mode 100644 vendor/github.com/sourcegraph/go-diff/diff/parse.go create mode 100644 vendor/github.com/sourcegraph/go-diff/diff/print.go create mode 100644 vendor/github.com/sourcegraph/go-diff/diff/reader_util.go create mode 100644 vendor/github.com/spf13/afero/.gitignore create mode 100644 vendor/github.com/spf13/afero/.travis.yml create mode 100644 vendor/github.com/spf13/afero/LICENSE.txt create mode 100644 vendor/github.com/spf13/afero/README.md create mode 100644 vendor/github.com/spf13/afero/afero.go create mode 100644 vendor/github.com/spf13/afero/appveyor.yml create mode 100644 vendor/github.com/spf13/afero/basepath.go create mode 100644 vendor/github.com/spf13/afero/cacheOnReadFs.go create mode 100644 vendor/github.com/spf13/afero/const_bsds.go create mode 100644 vendor/github.com/spf13/afero/const_win_unix.go create mode 100644 vendor/github.com/spf13/afero/copyOnWriteFs.go create mode 100644 vendor/github.com/spf13/afero/go.mod create mode 100644 vendor/github.com/spf13/afero/go.sum create mode 100644 vendor/github.com/spf13/afero/httpFs.go create mode 100644 vendor/github.com/spf13/afero/iofs.go create mode 100644 vendor/github.com/spf13/afero/ioutil.go create mode 100644 vendor/github.com/spf13/afero/lstater.go create mode 100644 vendor/github.com/spf13/afero/match.go create mode 100644 vendor/github.com/spf13/afero/mem/dir.go create mode 100644 vendor/github.com/spf13/afero/mem/dirmap.go create mode 100644 vendor/github.com/spf13/afero/mem/file.go create mode 100644 vendor/github.com/spf13/afero/memmap.go create mode 100644 vendor/github.com/spf13/afero/os.go create mode 100644 vendor/github.com/spf13/afero/path.go create mode 100644 vendor/github.com/spf13/afero/readonlyfs.go create mode 100644 vendor/github.com/spf13/afero/regexpfs.go create mode 100644 vendor/github.com/spf13/afero/symlink.go create mode 100644 vendor/github.com/spf13/afero/unionFile.go create mode 100644 vendor/github.com/spf13/afero/util.go create mode 100644 vendor/github.com/spf13/cast/.gitignore create mode 100644 vendor/github.com/spf13/cast/.travis.yml create mode 100644 vendor/github.com/spf13/cast/LICENSE create mode 100644 vendor/github.com/spf13/cast/Makefile create mode 100644 vendor/github.com/spf13/cast/README.md create mode 100644 vendor/github.com/spf13/cast/cast.go create mode 100644 vendor/github.com/spf13/cast/caste.go create mode 100644 vendor/github.com/spf13/cast/go.mod create mode 100644 vendor/github.com/spf13/cast/go.sum create mode 100644 vendor/github.com/spf13/cobra/.gitignore create mode 100644 vendor/github.com/spf13/cobra/.golangci.yml create mode 100644 vendor/github.com/spf13/cobra/.mailmap create mode 100644 vendor/github.com/spf13/cobra/CHANGELOG.md create mode 100644 vendor/github.com/spf13/cobra/CONDUCT.md create mode 100644 vendor/github.com/spf13/cobra/CONTRIBUTING.md create mode 100644 vendor/github.com/spf13/cobra/LICENSE.txt create mode 100644 vendor/github.com/spf13/cobra/Makefile create mode 100644 vendor/github.com/spf13/cobra/README.md create mode 100644 vendor/github.com/spf13/cobra/args.go create mode 100644 vendor/github.com/spf13/cobra/bash_completions.go create mode 100644 vendor/github.com/spf13/cobra/bash_completions.md create mode 100644 vendor/github.com/spf13/cobra/bash_completionsV2.go create mode 100644 vendor/github.com/spf13/cobra/cobra.go create mode 100644 vendor/github.com/spf13/cobra/command.go create mode 100644 vendor/github.com/spf13/cobra/command_notwin.go create mode 100644 vendor/github.com/spf13/cobra/command_win.go create mode 100644 vendor/github.com/spf13/cobra/completions.go create mode 100644 vendor/github.com/spf13/cobra/fish_completions.go create mode 100644 vendor/github.com/spf13/cobra/fish_completions.md create mode 100644 vendor/github.com/spf13/cobra/go.mod create mode 100644 vendor/github.com/spf13/cobra/go.sum create mode 100644 vendor/github.com/spf13/cobra/powershell_completions.go create mode 100644 vendor/github.com/spf13/cobra/powershell_completions.md create mode 100644 vendor/github.com/spf13/cobra/projects_using_cobra.md create mode 100644 vendor/github.com/spf13/cobra/shell_completions.go create mode 100644 vendor/github.com/spf13/cobra/shell_completions.md create mode 100644 vendor/github.com/spf13/cobra/user_guide.md create mode 100644 vendor/github.com/spf13/cobra/zsh_completions.go create mode 100644 vendor/github.com/spf13/cobra/zsh_completions.md create mode 100644 vendor/github.com/spf13/jwalterweatherman/.gitignore create mode 100644 vendor/github.com/spf13/jwalterweatherman/LICENSE create mode 100644 vendor/github.com/spf13/jwalterweatherman/README.md create mode 100644 vendor/github.com/spf13/jwalterweatherman/default_notepad.go create mode 100644 vendor/github.com/spf13/jwalterweatherman/go.mod create mode 100644 vendor/github.com/spf13/jwalterweatherman/log_counter.go create mode 100644 vendor/github.com/spf13/jwalterweatherman/notepad.go create mode 100644 vendor/github.com/spf13/pflag/.gitignore create mode 100644 vendor/github.com/spf13/pflag/.travis.yml create mode 100644 vendor/github.com/spf13/pflag/LICENSE create mode 100644 vendor/github.com/spf13/pflag/README.md create mode 100644 vendor/github.com/spf13/pflag/bool.go create mode 100644 vendor/github.com/spf13/pflag/bool_slice.go create mode 100644 vendor/github.com/spf13/pflag/bytes.go create mode 100644 vendor/github.com/spf13/pflag/count.go create mode 100644 vendor/github.com/spf13/pflag/duration.go create mode 100644 vendor/github.com/spf13/pflag/duration_slice.go create mode 100644 vendor/github.com/spf13/pflag/flag.go create mode 100644 vendor/github.com/spf13/pflag/float32.go create mode 100644 vendor/github.com/spf13/pflag/float32_slice.go create mode 100644 vendor/github.com/spf13/pflag/float64.go create mode 100644 vendor/github.com/spf13/pflag/float64_slice.go create mode 100644 vendor/github.com/spf13/pflag/go.mod create mode 100644 vendor/github.com/spf13/pflag/go.sum create mode 100644 vendor/github.com/spf13/pflag/golangflag.go create mode 100644 vendor/github.com/spf13/pflag/int.go create mode 100644 vendor/github.com/spf13/pflag/int16.go create mode 100644 vendor/github.com/spf13/pflag/int32.go create mode 100644 vendor/github.com/spf13/pflag/int32_slice.go create mode 100644 vendor/github.com/spf13/pflag/int64.go create mode 100644 vendor/github.com/spf13/pflag/int64_slice.go create mode 100644 vendor/github.com/spf13/pflag/int8.go create mode 100644 vendor/github.com/spf13/pflag/int_slice.go create mode 100644 vendor/github.com/spf13/pflag/ip.go create mode 100644 vendor/github.com/spf13/pflag/ip_slice.go create mode 100644 vendor/github.com/spf13/pflag/ipmask.go create mode 100644 vendor/github.com/spf13/pflag/ipnet.go create mode 100644 vendor/github.com/spf13/pflag/string.go create mode 100644 vendor/github.com/spf13/pflag/string_array.go create mode 100644 vendor/github.com/spf13/pflag/string_slice.go create mode 100644 vendor/github.com/spf13/pflag/string_to_int.go create mode 100644 vendor/github.com/spf13/pflag/string_to_int64.go create mode 100644 vendor/github.com/spf13/pflag/string_to_string.go create mode 100644 vendor/github.com/spf13/pflag/uint.go create mode 100644 vendor/github.com/spf13/pflag/uint16.go create mode 100644 vendor/github.com/spf13/pflag/uint32.go create mode 100644 vendor/github.com/spf13/pflag/uint64.go create mode 100644 vendor/github.com/spf13/pflag/uint8.go create mode 100644 vendor/github.com/spf13/pflag/uint_slice.go create mode 100644 vendor/github.com/spf13/viper/.editorconfig create mode 100644 vendor/github.com/spf13/viper/.gitignore create mode 100644 vendor/github.com/spf13/viper/.golangci.yml create mode 100644 vendor/github.com/spf13/viper/LICENSE create mode 100644 vendor/github.com/spf13/viper/Makefile create mode 100644 vendor/github.com/spf13/viper/README.md create mode 100644 vendor/github.com/spf13/viper/TROUBLESHOOTING.md create mode 100644 vendor/github.com/spf13/viper/flags.go create mode 100644 vendor/github.com/spf13/viper/go.mod create mode 100644 vendor/github.com/spf13/viper/go.sum create mode 100644 vendor/github.com/spf13/viper/util.go create mode 100644 vendor/github.com/spf13/viper/viper.go create mode 100644 vendor/github.com/spf13/viper/watch.go create mode 100644 vendor/github.com/spf13/viper/watch_wasm.go create mode 100644 vendor/github.com/ssgreg/nlreturn/v2/LICENSE create mode 100644 vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go create mode 100644 vendor/github.com/subosito/gotenv/.env create mode 100644 vendor/github.com/subosito/gotenv/.env.invalid create mode 100644 vendor/github.com/subosito/gotenv/.gitignore create mode 100644 vendor/github.com/subosito/gotenv/.travis.yml create mode 100644 vendor/github.com/subosito/gotenv/CHANGELOG.md create mode 100644 vendor/github.com/subosito/gotenv/LICENSE create mode 100644 vendor/github.com/subosito/gotenv/README.md create mode 100644 vendor/github.com/subosito/gotenv/appveyor.yml create mode 100644 vendor/github.com/subosito/gotenv/gotenv.go create mode 100644 vendor/github.com/tdakkota/asciicheck/.gitignore create mode 100644 vendor/github.com/tdakkota/asciicheck/LICENSE create mode 100644 vendor/github.com/tdakkota/asciicheck/README.md create mode 100644 vendor/github.com/tdakkota/asciicheck/ascii.go create mode 100644 vendor/github.com/tdakkota/asciicheck/asciicheck.go create mode 100644 vendor/github.com/tdakkota/asciicheck/go.mod create mode 100644 vendor/github.com/tetafro/godot/.gitignore create mode 100644 vendor/github.com/tetafro/godot/.godot.yaml create mode 100644 vendor/github.com/tetafro/godot/.golangci.yml create mode 100644 vendor/github.com/tetafro/godot/.goreleaser.yml create mode 100644 vendor/github.com/tetafro/godot/LICENSE create mode 100644 vendor/github.com/tetafro/godot/Makefile create mode 100644 vendor/github.com/tetafro/godot/README.md create mode 100644 vendor/github.com/tetafro/godot/checks.go create mode 100644 vendor/github.com/tetafro/godot/getters.go create mode 100644 vendor/github.com/tetafro/godot/go.mod create mode 100644 vendor/github.com/tetafro/godot/go.sum create mode 100644 vendor/github.com/tetafro/godot/godot.go create mode 100644 vendor/github.com/tetafro/godot/settings.go create mode 100644 vendor/github.com/timakin/bodyclose/LICENSE create mode 100644 vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go create mode 100644 vendor/github.com/tomarrell/wrapcheck/v2/LICENSE create mode 100644 vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/LICENSE create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/Makefile create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/README.md create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/action.yml create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/checks/checks.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/go.mod create mode 100644 vendor/github.com/tommy-muehle/go-mnd/v2/go.sum create mode 100644 vendor/github.com/ultraware/funlen/LICENSE create mode 100644 vendor/github.com/ultraware/funlen/README.md create mode 100644 vendor/github.com/ultraware/funlen/main.go create mode 100644 vendor/github.com/ultraware/whitespace/LICENSE create mode 100644 vendor/github.com/ultraware/whitespace/README.md create mode 100644 vendor/github.com/ultraware/whitespace/main.go create mode 100644 vendor/github.com/uudashr/gocognit/LICENSE create mode 100644 vendor/github.com/uudashr/gocognit/README.md create mode 100644 vendor/github.com/uudashr/gocognit/doc.go create mode 100644 vendor/github.com/uudashr/gocognit/go.mod create mode 100644 vendor/github.com/uudashr/gocognit/go.sum create mode 100644 vendor/github.com/uudashr/gocognit/gocognit.go create mode 100644 vendor/github.com/yeya24/promlinter/.gitignore create mode 100644 vendor/github.com/yeya24/promlinter/LICENSE create mode 100644 vendor/github.com/yeya24/promlinter/Makefile create mode 100644 vendor/github.com/yeya24/promlinter/README.md create mode 100644 vendor/github.com/yeya24/promlinter/go.mod create mode 100644 vendor/github.com/yeya24/promlinter/go.sum create mode 100644 vendor/github.com/yeya24/promlinter/promlinter.go create mode 100644 vendor/golang.org/x/mod/LICENSE create mode 100644 vendor/golang.org/x/mod/PATENTS create mode 100644 vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go create mode 100644 vendor/golang.org/x/mod/modfile/print.go create mode 100644 vendor/golang.org/x/mod/modfile/read.go create mode 100644 vendor/golang.org/x/mod/modfile/rule.go create mode 100644 vendor/golang.org/x/mod/module/module.go create mode 100644 vendor/golang.org/x/mod/semver/semver.go create mode 100644 vendor/golang.org/x/text/width/kind_string.go create mode 100644 vendor/golang.org/x/text/width/tables10.0.0.go create mode 100644 vendor/golang.org/x/text/width/tables11.0.0.go create mode 100644 vendor/golang.org/x/text/width/tables12.0.0.go create mode 100644 vendor/golang.org/x/text/width/tables13.0.0.go create mode 100644 vendor/golang.org/x/text/width/tables9.0.0.go create mode 100644 vendor/golang.org/x/text/width/transform.go create mode 100644 vendor/golang.org/x/text/width/trieval.go create mode 100644 vendor/golang.org/x/text/width/width.go create mode 100644 vendor/golang.org/x/tools/go/analysis/analysis.go create mode 100644 vendor/golang.org/x/tools/go/analysis/diagnostic.go create mode 100644 vendor/golang.org/x/tools/go/analysis/doc.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/printf/types.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go create mode 100644 vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go create mode 100644 vendor/golang.org/x/tools/go/analysis/validate.go create mode 100644 vendor/golang.org/x/tools/go/ast/inspector/inspector.go create mode 100644 vendor/golang.org/x/tools/go/ast/inspector/typeof.go create mode 100644 vendor/golang.org/x/tools/go/cfg/builder.go create mode 100644 vendor/golang.org/x/tools/go/cfg/cfg.go create mode 100644 vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go create mode 100644 vendor/golang.org/x/tools/go/gcexportdata/importer.go create mode 100644 vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go create mode 100644 vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go create mode 100644 vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go create mode 100644 vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go create mode 100644 vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go create mode 100644 vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go create mode 100644 vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go create mode 100644 vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go create mode 100644 vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go create mode 100644 vendor/golang.org/x/tools/go/packages/doc.go create mode 100644 vendor/golang.org/x/tools/go/packages/external.go create mode 100644 vendor/golang.org/x/tools/go/packages/golist.go create mode 100644 vendor/golang.org/x/tools/go/packages/golist_overlay.go create mode 100644 vendor/golang.org/x/tools/go/packages/loadmode_string.go create mode 100644 vendor/golang.org/x/tools/go/packages/packages.go create mode 100644 vendor/golang.org/x/tools/go/packages/visit.go create mode 100644 vendor/golang.org/x/tools/go/ssa/blockopt.go create mode 100644 vendor/golang.org/x/tools/go/ssa/builder.go create mode 100644 vendor/golang.org/x/tools/go/ssa/const.go create mode 100644 vendor/golang.org/x/tools/go/ssa/create.go create mode 100644 vendor/golang.org/x/tools/go/ssa/doc.go create mode 100644 vendor/golang.org/x/tools/go/ssa/dom.go create mode 100644 vendor/golang.org/x/tools/go/ssa/emit.go create mode 100644 vendor/golang.org/x/tools/go/ssa/func.go create mode 100644 vendor/golang.org/x/tools/go/ssa/identical.go create mode 100644 vendor/golang.org/x/tools/go/ssa/identical_17.go create mode 100644 vendor/golang.org/x/tools/go/ssa/lift.go create mode 100644 vendor/golang.org/x/tools/go/ssa/lvalue.go create mode 100644 vendor/golang.org/x/tools/go/ssa/methods.go create mode 100644 vendor/golang.org/x/tools/go/ssa/mode.go create mode 100644 vendor/golang.org/x/tools/go/ssa/print.go create mode 100644 vendor/golang.org/x/tools/go/ssa/sanity.go create mode 100644 vendor/golang.org/x/tools/go/ssa/source.go create mode 100644 vendor/golang.org/x/tools/go/ssa/ssa.go create mode 100644 vendor/golang.org/x/tools/go/ssa/ssautil/load.go create mode 100644 vendor/golang.org/x/tools/go/ssa/ssautil/switch.go create mode 100644 vendor/golang.org/x/tools/go/ssa/ssautil/visit.go create mode 100644 vendor/golang.org/x/tools/go/ssa/testmain.go create mode 100644 vendor/golang.org/x/tools/go/ssa/util.go create mode 100644 vendor/golang.org/x/tools/go/ssa/wrappers.go create mode 100644 vendor/golang.org/x/tools/go/types/objectpath/objectpath.go create mode 100644 vendor/golang.org/x/tools/go/types/typeutil/callee.go create mode 100644 vendor/golang.org/x/tools/go/types/typeutil/imports.go create mode 100644 vendor/golang.org/x/tools/go/types/typeutil/map.go create mode 100644 vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go create mode 100644 vendor/golang.org/x/tools/go/types/typeutil/ui.go create mode 100644 vendor/golang.org/x/tools/imports/forward.go create mode 100644 vendor/golang.org/x/tools/internal/analysisinternal/analysis.go create mode 100644 vendor/golang.org/x/tools/internal/event/core/event.go create mode 100644 vendor/golang.org/x/tools/internal/event/core/export.go create mode 100644 vendor/golang.org/x/tools/internal/event/core/fast.go create mode 100644 vendor/golang.org/x/tools/internal/event/doc.go create mode 100644 vendor/golang.org/x/tools/internal/event/event.go create mode 100644 vendor/golang.org/x/tools/internal/event/keys/keys.go create mode 100644 vendor/golang.org/x/tools/internal/event/keys/standard.go create mode 100644 vendor/golang.org/x/tools/internal/event/label/label.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go create mode 100644 vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go create mode 100644 vendor/golang.org/x/tools/internal/gocommand/invoke.go create mode 100644 vendor/golang.org/x/tools/internal/gocommand/vendor.go create mode 100644 vendor/golang.org/x/tools/internal/gocommand/version.go create mode 100644 vendor/golang.org/x/tools/internal/gopathwalk/walk.go create mode 100644 vendor/golang.org/x/tools/internal/imports/fix.go create mode 100644 vendor/golang.org/x/tools/internal/imports/imports.go create mode 100644 vendor/golang.org/x/tools/internal/imports/mod.go create mode 100644 vendor/golang.org/x/tools/internal/imports/mod_cache.go create mode 100644 vendor/golang.org/x/tools/internal/imports/sortimports.go create mode 100644 vendor/golang.org/x/tools/internal/imports/zstdlib.go create mode 100644 vendor/golang.org/x/tools/internal/lsp/fuzzy/input.go create mode 100644 vendor/golang.org/x/tools/internal/lsp/fuzzy/matcher.go create mode 100644 vendor/golang.org/x/tools/internal/packagesinternal/packages.go create mode 100644 vendor/golang.org/x/tools/internal/typesinternal/errorcode.go create mode 100644 vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go create mode 100644 vendor/golang.org/x/tools/internal/typesinternal/types.go create mode 100644 vendor/golang.org/x/xerrors/LICENSE create mode 100644 vendor/golang.org/x/xerrors/PATENTS create mode 100644 vendor/golang.org/x/xerrors/README create mode 100644 vendor/golang.org/x/xerrors/adaptor.go create mode 100644 vendor/golang.org/x/xerrors/codereview.cfg create mode 100644 vendor/golang.org/x/xerrors/doc.go create mode 100644 vendor/golang.org/x/xerrors/errors.go create mode 100644 vendor/golang.org/x/xerrors/fmt.go create mode 100644 vendor/golang.org/x/xerrors/format.go create mode 100644 vendor/golang.org/x/xerrors/frame.go create mode 100644 vendor/golang.org/x/xerrors/go.mod create mode 100644 vendor/golang.org/x/xerrors/internal/internal.go create mode 100644 vendor/golang.org/x/xerrors/wrap.go create mode 100644 vendor/gopkg.in/ini.v1/.gitignore create mode 100644 vendor/gopkg.in/ini.v1/LICENSE create mode 100644 vendor/gopkg.in/ini.v1/Makefile create mode 100644 vendor/gopkg.in/ini.v1/README.md create mode 100644 vendor/gopkg.in/ini.v1/codecov.yml create mode 100644 vendor/gopkg.in/ini.v1/data_source.go create mode 100644 vendor/gopkg.in/ini.v1/deprecated.go create mode 100644 vendor/gopkg.in/ini.v1/error.go create mode 100644 vendor/gopkg.in/ini.v1/file.go create mode 100644 vendor/gopkg.in/ini.v1/helper.go create mode 100644 vendor/gopkg.in/ini.v1/ini.go create mode 100644 vendor/gopkg.in/ini.v1/key.go create mode 100644 vendor/gopkg.in/ini.v1/parser.go create mode 100644 vendor/gopkg.in/ini.v1/section.go create mode 100644 vendor/gopkg.in/ini.v1/struct.go create mode 100644 vendor/honnef.co/go/tools/LICENSE create mode 100644 vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY create mode 100644 vendor/honnef.co/go/tools/analysis/code/code.go create mode 100644 vendor/honnef.co/go/tools/analysis/code/visit.go create mode 100644 vendor/honnef.co/go/tools/analysis/edit/edit.go create mode 100644 vendor/honnef.co/go/tools/analysis/facts/deprecated.go create mode 100644 vendor/honnef.co/go/tools/analysis/facts/directives.go create mode 100644 vendor/honnef.co/go/tools/analysis/facts/generated.go create mode 100644 vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go create mode 100644 vendor/honnef.co/go/tools/analysis/facts/purity.go create mode 100644 vendor/honnef.co/go/tools/analysis/facts/token.go create mode 100644 vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go create mode 100644 vendor/honnef.co/go/tools/analysis/lint/lint.go create mode 100644 vendor/honnef.co/go/tools/analysis/report/report.go create mode 100644 vendor/honnef.co/go/tools/config/config.go create mode 100644 vendor/honnef.co/go/tools/config/example.conf create mode 100644 vendor/honnef.co/go/tools/go/ast/astutil/upstream.go create mode 100644 vendor/honnef.co/go/tools/go/ast/astutil/util.go create mode 100644 vendor/honnef.co/go/tools/go/ir/LICENSE create mode 100644 vendor/honnef.co/go/tools/go/ir/UPSTREAM create mode 100644 vendor/honnef.co/go/tools/go/ir/blockopt.go create mode 100644 vendor/honnef.co/go/tools/go/ir/builder.go create mode 100644 vendor/honnef.co/go/tools/go/ir/const.go create mode 100644 vendor/honnef.co/go/tools/go/ir/create.go create mode 100644 vendor/honnef.co/go/tools/go/ir/doc.go create mode 100644 vendor/honnef.co/go/tools/go/ir/dom.go create mode 100644 vendor/honnef.co/go/tools/go/ir/emit.go create mode 100644 vendor/honnef.co/go/tools/go/ir/exits.go create mode 100644 vendor/honnef.co/go/tools/go/ir/func.go create mode 100644 vendor/honnef.co/go/tools/go/ir/html.go create mode 100644 vendor/honnef.co/go/tools/go/ir/identical.go create mode 100644 vendor/honnef.co/go/tools/go/ir/identical_17.go create mode 100644 vendor/honnef.co/go/tools/go/ir/irutil/load.go create mode 100644 vendor/honnef.co/go/tools/go/ir/irutil/loops.go create mode 100644 vendor/honnef.co/go/tools/go/ir/irutil/stub.go create mode 100644 vendor/honnef.co/go/tools/go/ir/irutil/switch.go create mode 100644 vendor/honnef.co/go/tools/go/ir/irutil/terminates.go create mode 100644 vendor/honnef.co/go/tools/go/ir/irutil/util.go create mode 100644 vendor/honnef.co/go/tools/go/ir/irutil/visit.go create mode 100644 vendor/honnef.co/go/tools/go/ir/lift.go create mode 100644 vendor/honnef.co/go/tools/go/ir/lvalue.go create mode 100644 vendor/honnef.co/go/tools/go/ir/methods.go create mode 100644 vendor/honnef.co/go/tools/go/ir/mode.go create mode 100644 vendor/honnef.co/go/tools/go/ir/print.go create mode 100644 vendor/honnef.co/go/tools/go/ir/sanity.go create mode 100644 vendor/honnef.co/go/tools/go/ir/source.go create mode 100644 vendor/honnef.co/go/tools/go/ir/ssa.go create mode 100644 vendor/honnef.co/go/tools/go/ir/staticcheck.conf create mode 100644 vendor/honnef.co/go/tools/go/ir/util.go create mode 100644 vendor/honnef.co/go/tools/go/ir/wrappers.go create mode 100644 vendor/honnef.co/go/tools/go/ir/write.go create mode 100644 vendor/honnef.co/go/tools/go/types/typeutil/upstream.go create mode 100644 vendor/honnef.co/go/tools/go/types/typeutil/util.go create mode 100644 vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go create mode 100644 vendor/honnef.co/go/tools/internal/sharedcheck/lint.go create mode 100644 vendor/honnef.co/go/tools/knowledge/arg.go create mode 100644 vendor/honnef.co/go/tools/knowledge/deprecated.go create mode 100644 vendor/honnef.co/go/tools/pattern/convert.go create mode 100644 vendor/honnef.co/go/tools/pattern/doc.go create mode 100644 vendor/honnef.co/go/tools/pattern/fuzz.go create mode 100644 vendor/honnef.co/go/tools/pattern/lexer.go create mode 100644 vendor/honnef.co/go/tools/pattern/match.go create mode 100644 vendor/honnef.co/go/tools/pattern/parser.go create mode 100644 vendor/honnef.co/go/tools/pattern/pattern.go create mode 100644 vendor/honnef.co/go/tools/printf/fuzz.go create mode 100644 vendor/honnef.co/go/tools/printf/printf.go create mode 100644 vendor/honnef.co/go/tools/simple/analysis.go create mode 100644 vendor/honnef.co/go/tools/simple/doc.go create mode 100644 vendor/honnef.co/go/tools/simple/lint.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/analysis.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/buildtag.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/doc.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/lint.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/rules.go create mode 100644 vendor/honnef.co/go/tools/staticcheck/structtag.go create mode 100644 vendor/honnef.co/go/tools/stylecheck/analysis.go create mode 100644 vendor/honnef.co/go/tools/stylecheck/doc.go create mode 100644 vendor/honnef.co/go/tools/stylecheck/lint.go create mode 100644 vendor/honnef.co/go/tools/stylecheck/names.go create mode 100644 vendor/honnef.co/go/tools/unused/edge.go create mode 100644 vendor/honnef.co/go/tools/unused/edgekind_string.go create mode 100644 vendor/honnef.co/go/tools/unused/implements.go create mode 100644 vendor/honnef.co/go/tools/unused/typemap/identical.go create mode 100644 vendor/honnef.co/go/tools/unused/typemap/map.go create mode 100644 vendor/honnef.co/go/tools/unused/unused.go create mode 100644 vendor/mvdan.cc/gofumpt/LICENSE create mode 100644 vendor/mvdan.cc/gofumpt/LICENSE.google create mode 100644 vendor/mvdan.cc/gofumpt/format/format.go create mode 100644 vendor/mvdan.cc/interfacer/LICENSE create mode 100644 vendor/mvdan.cc/interfacer/check/cache.go create mode 100644 vendor/mvdan.cc/interfacer/check/check.go create mode 100644 vendor/mvdan.cc/interfacer/check/types.go create mode 100644 vendor/mvdan.cc/lint/.travis.yml create mode 100644 vendor/mvdan.cc/lint/LICENSE create mode 100644 vendor/mvdan.cc/lint/README.md create mode 100644 vendor/mvdan.cc/lint/lint.go create mode 100644 vendor/mvdan.cc/unparam/LICENSE create mode 100644 vendor/mvdan.cc/unparam/check/check.go diff --git a/.golangci.yml b/.golangci.yml new file mode 100644 index 000000000..cabf6c094 --- /dev/null +++ b/.golangci.yml @@ -0,0 +1,10 @@ +linters-settings: + gofmt: + simplify: false + +linters: + disable-all: true + enable: + - gofmt + - goimports + - govet diff --git a/.woodpecker/main.yml b/.woodpecker/main.yml index 66791a01f..ba893292b 100644 --- a/.woodpecker/main.yml +++ b/.woodpecker/main.yml @@ -63,9 +63,7 @@ pipeline: group: test commands: - make test - - make vet - make lint - - make formatcheck test-postgres: image: golang:1.16 diff --git a/Makefile b/Makefile index b5432602f..f68baa1f5 100644 --- a/Makefile +++ b/Makefile @@ -25,9 +25,6 @@ vendor: go mod tidy go mod vendor -formatcheck: - @([ -z "$(shell gofmt -d $(GOFILES_NOVENDOR) | head)" ]) || (echo "Source is unformatted"; exit 1) - format: @gofmt -w ${GOFILES_NOVENDOR} @@ -36,13 +33,10 @@ clean: go clean -i ./... rm -rf build -.PHONY: vet -vet: - @echo "Running go vet..." - @go vet $(GO_PACKAGES) - .PHONY: lint lint: + @echo "Running golangci-lint" + go run vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go run @echo "Running zerolog linter" go run vendor/github.com/rs/zerolog/cmd/lint/lint.go github.com/woodpecker-ci/woodpecker/cmd/agent go run vendor/github.com/rs/zerolog/cmd/lint/lint.go github.com/woodpecker-ci/woodpecker/cmd/cli diff --git a/go.mod b/go.mod index b60fe2098..5191c08f5 100644 --- a/go.mod +++ b/go.mod @@ -23,6 +23,7 @@ require ( github.com/go-sql-driver/mysql v1.6.0 github.com/gogits/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 github.com/golang-jwt/jwt/v4 v4.1.0 + github.com/golangci/golangci-lint v1.42.1 github.com/google/go-github/v39 v39.2.0 github.com/gorilla/securecookie v1.1.1 github.com/hashicorp/go-cleanhttp v0.5.2 // indirect diff --git a/go.sum b/go.sum index 51c952987..aac950201 100644 --- a/go.sum +++ b/go.sum @@ -1,4 +1,7 @@ +4d63.com/gochecknoglobals v0.0.0-20201008074935-acfc0b28355a h1:wFEQiK85fRsEVF0CRrPAos5LoAryUsIX1kPW/WrIqFw= +4d63.com/gochecknoglobals v0.0.0-20201008074935-acfc0b28355a/go.mod h1:wfdC5ZjKSPr7CybKEcgJhUOgeAQW1+7WcyK8OvUilfo= bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8= +bitbucket.org/creachadair/shell v0.0.6/go.mod h1:8Qqi/cYk7vPnsOePHroKXDJYmb5x7ENhtiFtfZq8K+M= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= @@ -12,8 +15,14 @@ cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6 cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.60.0/go.mod h1:yw2G51M9IfRboUH61Us8GqCeF1PzPblB823Mn2q2eAU= cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= @@ -22,10 +31,13 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/pubsub v1.5.0/go.mod h1:ZEwJccE3z93Z2HWvstpri00jOg7oO4UZDtKhwDwqF0w= +cloud.google.com/go/spanner v1.7.0/go.mod h1:sd3K2gZ9Fd0vMPLXzeCrF6fq4i63Q7aTLW/lBIfBkIk= cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= @@ -34,9 +46,12 @@ cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9 code.gitea.io/gitea-vet v0.2.1/go.mod h1:zcNbT/aJEmivCAhfmkHOlT645KNOf9W2KnkLgFjGGfE= code.gitea.io/sdk/gitea v0.15.0 h1:tsNhxDM/2N1Ohv1Xq5UWrht/esg0WmtRj4wsHVHriTg= code.gitea.io/sdk/gitea v0.15.0/go.mod h1:klY2LVI3s3NChzIk/MzMn7G1FHrfU7qd63iSMVoHRBA= +contrib.go.opencensus.io/exporter/stackdriver v0.13.4/go.mod h1:aXENhDJ1Y4lIg4EUaVTwzvYETVNZk10Pu26tevFKLUc= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= gitea.com/xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a h1:lSA0F4e9A2NcQSqGqTOXqu2aRi/XEQxDCBwM8yJtE6s= gitea.com/xorm/sqlfiddle v0.0.0-20180821085327-62ce714f951a/go.mod h1:EXuID2Zs0pAQhH8yz+DNjUbjppKQzKFAn28TMYPB6IU= +github.com/Antonboom/errname v0.1.4 h1:lGSlI42Gm4bI1e+IITtXJXvxFM8N7naWimVFKcb0McY= +github.com/Antonboom/errname v0.1.4/go.mod h1:jRXo3m0E0EuCnK3wbsSVH3X55Z4iTDLl6ZfCxwFj4TM= github.com/Azure/azure-sdk-for-go v16.2.1+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 h1:UQHMgLO+TxOElx5B5HZ4hJQsoJ/PvUvKRhJHDQXO8P8= @@ -52,9 +67,19 @@ github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935 github.com/Azure/go-autorest/logger v0.2.0/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/toml v0.4.1 h1:GaI7EiDXDRfa8VshkTj7Fym7ha+y8/XxIgD2okUIjLw= +github.com/BurntSushi/toml v0.4.1/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24 h1:sHglBQTwgx+rWPdisA5ynNEsoARbiCBOyGcJM4/OzsM= +github.com/Djarvur/go-err113 v0.0.0-20210108212216-aea10b59be24/go.mod h1:4UJr5HIiMZrwgkSPdsjy2uOQExX/WEILpIrO9UPGuXs= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= +github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs= +github.com/Masterminds/sprig v2.15.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= +github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= github.com/Microsoft/go-winio v0.4.11/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= github.com/Microsoft/go-winio v0.4.14/go.mod h1:qXqCSQ3Xa7+6tgxaGTIe4Kpcdsi+P8jBhyzoq1bpyYA= github.com/Microsoft/go-winio v0.4.15-0.20190919025122-fc70bd9a86b5/go.mod h1:tTuCMEN+UleMWgg9dVx4Hu52b1bJo+59jBh3ajtinzw= @@ -77,11 +102,14 @@ github.com/Microsoft/hcsshim/test v0.0.0-20201218223536-d3e5debf77da/go.mod h1:5 github.com/Microsoft/hcsshim/test v0.0.0-20210227013316-43a75bb4edd3/go.mod h1:mw7qgWloBUl75W/gVH3cQszUg1+gUITj7D6NY7ywVnY= github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= +github.com/OpenPeeDeeP/depguard v1.0.1 h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us= +github.com/OpenPeeDeeP/depguard v1.0.1/go.mod h1:xsIw86fROiiwelg+jB2uM9PiKihMMmUx/1V+TNhjQvM= github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0= github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE= github.com/Shopify/logrus-bugsnag v0.0.0-20171204204709-577dee27f20d/go.mod h1:HI8ITrYtUY+O+ZhtlqUnD8+KwNPOyugEhfP9fdUIaEQ= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= +github.com/StackExchange/wmi v1.2.1/go.mod h1:rcmrprowKIVzvc+NUiLncP2uuArMWLCbu9SBzvHz7e8= github.com/VividCortex/gohistogram v1.0.0/go.mod h1:Pf5mBqqDxYaXu3hDrrU+w6nw50o/4+TcAqDqk/vUH7g= github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -90,7 +118,12 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alexflint/go-filemutex v0.0.0-20171022225611-72bdc8eae2ae/go.mod h1:CgnQgUtFrFz9mxFNtED3jI5tLDjKlOM+oUF/sTk6ps0= +github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= +github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= +github.com/andybalholm/brotli v1.0.0/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y= +github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q= github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/aokoli/goutils v1.0.1/go.mod h1:SijmP0QR8LtwsmDs8Yii5Z/S4trXFGFC2oO5g9DP+DQ= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= @@ -99,9 +132,16 @@ github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmV github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A= github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY= +github.com/ashanbrown/forbidigo v1.2.0 h1:RMlEFupPCxQ1IogYOQUnIQwGEUGK8g5vAPMRyJoSxbc= +github.com/ashanbrown/forbidigo v1.2.0/go.mod h1:vVW7PEdqEFqapJe95xHkTfB1+XvZXBFg8t0sG2FIxmI= +github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde h1:YOsoVXsZQPA9aOTy1g0lAJv5VzZUvwQuZqug8XPeqfM= +github.com/ashanbrown/makezero v0.0.0-20210520155254-b6261585ddde/go.mod h1:oG9Dnez7/ESBqc4EdrdNlryeo7d0KcW1ftXHm7nU/UU= github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU= github.com/aws/aws-sdk-go v1.15.11/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= +github.com/aws/aws-sdk-go v1.23.20/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.25.37/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.36.30/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= github.com/beorn7/perks v0.0.0-20160804104726-4c0e84591b9a/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= @@ -111,11 +151,17 @@ github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6r github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bitly/go-simplejson v0.5.0/go.mod h1:cXHtHw4XUPsvGaxgjIAn8PhEWG9NfngEKAMDJEczWVA= github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA= +github.com/bketelsen/crypt v0.0.3-0.20200106085610-5cbc8cc4026c/go.mod h1:MKsuJmJgSg28kpZDP6UIiPt0e0Oz0kqKNGyRaWEPv84= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= +github.com/bkielbasa/cyclop v1.2.0 h1:7Jmnh0yL2DjKfw28p86YTd/B4lRGcNuu12sKE35sM7A= +github.com/bkielbasa/cyclop v1.2.0/go.mod h1:qOI0yy6A7dYC4Zgsa72Ppm9kONl0RoIlPbzot9mhmeI= github.com/blang/semver v3.1.0+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/bmatcuk/doublestar/v4 v4.0.2 h1:X0krlUVAVmtr2cRoTqR8aDMrDqnB36ht8wpWTiQ3jsA= github.com/bmatcuk/doublestar/v4 v4.0.2/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4= +github.com/bombsimon/wsl/v3 v3.3.0 h1:Mka/+kRLoQJq7g2rggtgQsjuI/K5Efd87WX96EWFxjM= +github.com/bombsimon/wsl/v3 v3.3.0/go.mod h1:st10JtZYLE4D5sC7b8xV4zTKZwAQjCH/Hy2Pm1FNZIc= github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk= github.com/buger/jsonparser v0.0.0-20180808090653-f4dd9f5a6b44/go.mod h1:bbYlZJ7hK1yFx9hf58LP0zeX7UjIGs20ufpu3evjr+s= github.com/bugsnag/bugsnag-go v0.0.0-20141110184014-b1d153021fcd/go.mod h1:2oa8nejYd4cQ/b0hMIopN0lCRxU0bueqREvZLWFrtK8= @@ -128,6 +174,10 @@ github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/charithe/durationcheck v0.0.8 h1:cnZrThioNW9gSV5JsRIXmkyHUbcDH7Y9hkzFDVc9/j0= +github.com/charithe/durationcheck v0.0.8/go.mod h1:SSbRIBVfMjCi/kEB6K65XEA83D6prSM8ap1UCpNKtgg= +github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af h1:spmv8nSH9h5oCQf40jt/ufBCt9j0/58u4G+rkeMqXGI= +github.com/chavacava/garif v0.0.0-20210405164556-e8a0a408d6af/go.mod h1:Qjyv4H3//PWVzTeCezG2b9IRn6myJxJSr4TD/xo6ojU= github.com/checkpoint-restore/go-criu/v4 v4.1.0/go.mod h1:xUQBLp4RLc5zJtWY++yjOoMoB5lihDt7fai+75m+rGw= github.com/checkpoint-restore/go-criu/v5 v5.0.0/go.mod h1:cfwC0EG7HMUenopBsUf9d89JlCLQIfgVcNsNN0t6T2M= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= @@ -141,6 +191,7 @@ github.com/cilium/ebpf v0.6.2/go.mod h1:4tRaxcgiL706VnOzHOdBlY8IEAIdxINsQBcU4xJJ github.com/clbanning/x2j v0.0.0-20191024224557-825249438eec/go.mod h1:jMjuTZXRI4dUb/I5gc9Hdhagfvm9+RyrPryS/auMzxE= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= github.com/cncf/xds/go v0.0.0-20210312221358-fbca930ec8ed/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= @@ -232,6 +283,8 @@ github.com/containers/ocicrypt v1.1.0/go.mod h1:b8AOe0YR67uU8OqfVNcznfFpAzu3rdgU github.com/containers/ocicrypt v1.1.1/go.mod h1:Dm55fwWm1YZAjYRaJ94z2mfZikIyIN4B0oB3dj3jFxY= github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= github.com/coreos/go-iptables v0.4.5/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= github.com/coreos/go-iptables v0.5.0/go.mod h1:/mVI274lEDI2ns62jHCDnCyBF9Iwsmekav8Dbxlm1MU= github.com/coreos/go-oidc v2.1.0+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc= @@ -240,12 +293,15 @@ github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/coreos/go-systemd v0.0.0-20161114122254-48702e0da86b/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190620071333-e64a0ec8b42a/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/cpuguy83/go-md2man/v2 v2.0.1 h1:r/myEWzV9lfsM1tFLgDyu0atFtJ1fXn261LKYj/3DxU= @@ -259,10 +315,15 @@ github.com/d2g/dhcp4 v0.0.0-20170904100407-a1d1b6c41b1c/go.mod h1:Ct2BUK8SB0YC1S github.com/d2g/dhcp4client v1.0.0/go.mod h1:j0hNfjhrt2SxUOw55nL0ATM/z4Yt3t2Kd1mW34z5W5s= github.com/d2g/dhcp4server v0.0.0-20181031114812-7d4a0a7f59a5/go.mod h1:Eo87+Kg/IX2hfWJfwxMzLyuSZyxSoAug2nGa1G2QAi8= github.com/d2g/hardwareaddr v0.0.0-20190221164911-e7d9fbe030e4/go.mod h1:bMl4RjIciD2oAxI7DmWRx6gbeqrkoLqv3MV0vzNad+I= +github.com/daixiang0/gci v0.2.9 h1:iwJvwQpBZmMg31w+QQ6jsyZ54KEATn6/nfARbBNW294= +github.com/daixiang0/gci v0.2.9/go.mod h1:+4dZ7TISfSmqfAGv59ePaHfNzgGtIkHAhhdKggP1JAc= github.com/danieljoos/wincred v1.1.0/go.mod h1:XYlo+eRTsVA9aHGp7NGjFkPla4m+DCL7hqDjlFjiygg= +github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/denis-tingajkin/go-header v0.4.2 h1:jEeSF4sdv8/3cT/WY8AgDHUoItNSoEZ7qg9dX7pc218= +github.com/denis-tingajkin/go-header v0.4.2/go.mod h1:eLRHAVXzE5atsKAnNRDB90WHCFFnBUn4RN0nRcs1LJA= github.com/denisenkom/go-mssqldb v0.10.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU= github.com/denverdino/aliyungo v0.0.0-20190125010748-a747050bb1ba/go.mod h1:dV8lFg6daOBZbT6/BDGIz6Y3WFGn8juu6G+CQ6LHtl0= github.com/dgrijalva/jwt-go v0.0.0-20170104182250-a601269ab70c/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= @@ -305,12 +366,25 @@ github.com/envoyproxy/go-control-plane v0.6.9/go.mod h1:SBwIajubJHhxtWwsL9s8ss4s github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/go-control-plane v0.9.9-0.20210512163311-63b5d3c536b0/go.mod h1:hliV/p42l8fGbc6Y9bQ70uLwIvmJyVE5k4iMKlh8wCQ= github.com/envoyproxy/go-control-plane v0.9.10-0.20210907150352-cf90f659a021/go.mod h1:AFq3mo9L8Lqqiid3OhADV3RfLJnjiw63cSpi+fDTRC0= +github.com/envoyproxy/protoc-gen-validate v0.0.14/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/esimonov/ifshort v1.0.2 h1:K5s1W2fGfkoWXsFlxBNqT6J0ZCncPaKrGM5qe0bni68= +github.com/esimonov/ifshort v1.0.2/go.mod h1:yZqNJUrNn20K8Q9n2CrjTKYyVEmX209Hgu+M1LBpeZE= +github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw= +github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY= github.com/evanphx/json-patch v4.9.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/fatih/color v1.12.0 h1:mRhaKNwANqRgUBGKmnI5ZxEk7QXmjQeCcuYFMX2bfcc= +github.com/fatih/color v1.12.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= +github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k= @@ -323,6 +397,9 @@ github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMo github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fullsailor/pkcs7 v0.0.0-20190404230743-d7302db945fa/go.mod h1:KnogPXtdwXqoenmZCw6S+25EAm2MkxbG0deNDu4cbSA= +github.com/fullstorydev/grpcurl v1.6.0/go.mod h1:ZQ+ayqbKMJNhzLmbpCiurTVlaK2M/3nqZCxaQ2Ze/sM= +github.com/fzipp/gocyclo v0.3.1 h1:A9UeX3HJSXTBzvHzhqoYVuE0eAhe+aM8XBCCwsPMZOc= +github.com/fzipp/gocyclo v0.3.1/go.mod h1:DJHO6AUmbdqj2ET4Z9iArSuwWgYDRryYt2wASxc7x3E= github.com/garyburd/redigo v0.0.0-20150301180006-535138d7bcd7/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY= github.com/ghodss/yaml v0.0.0-20150909031657-73d445a93680/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= @@ -331,6 +408,8 @@ github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= github.com/gin-gonic/gin v1.7.4 h1:QmUZXrvJ9qZ3GfWvQ+2wnW/1ePrTEJqPKMYEU3lD/DM= github.com/gin-gonic/gin v1.7.4/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY= +github.com/go-critic/go-critic v0.5.6 h1:siUR1+322iVikWXoV75I1YRfNaC/yaLzhdF9Zwd8Tus= +github.com/go-critic/go-critic v0.5.6/go.mod h1:cVjj0DfqewQVIlIAGexPCaGaZDAqGE29PYDDADIVNEo= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -344,6 +423,7 @@ github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas= github.com/go-logr/logr v0.2.0/go.mod h1:z6/tIYblkpsD+a4lm/fGIIU9mZ+XfAiaFtq7xTgseGU= +github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg= github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc= @@ -362,10 +442,35 @@ github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl github.com/go-playground/validator/v10 v10.4.1/go.mod h1:nlOn6nFhuKACm19sB/8EGNn9GlaMV7XkbRSipzJ0Ii4= github.com/go-playground/validator/v10 v10.9.0 h1:NgTtmN58D0m8+UuxtYmGztBJB7VnPgjj221I1QHci2A= github.com/go-playground/validator/v10 v10.9.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= +github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/go-toolsmith/astcast v1.0.0 h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g= +github.com/go-toolsmith/astcast v1.0.0/go.mod h1:mt2OdQTeAQcY4DQgPSArJjHCcOwlX+Wl/kwN+LbLGQ4= +github.com/go-toolsmith/astcopy v1.0.0 h1:OMgl1b1MEpjFQ1m5ztEO06rz5CUd3oBv9RF7+DyvdG8= +github.com/go-toolsmith/astcopy v1.0.0/go.mod h1:vrgyG+5Bxrnz4MZWPF+pI4R8h3qKRjjyvV/DSez4WVQ= +github.com/go-toolsmith/astequal v1.0.0 h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ= +github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= +github.com/go-toolsmith/astfmt v1.0.0 h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k= +github.com/go-toolsmith/astfmt v1.0.0/go.mod h1:cnWmsOAuq4jJY6Ct5YWlVLmcmLMn1JUPuQIHCY7CJDw= +github.com/go-toolsmith/astinfo v0.0.0-20180906194353-9809ff7efb21/go.mod h1:dDStQCHtmZpYOmjRP/8gHHnCCch3Zz3oEgCdZVdtweU= +github.com/go-toolsmith/astp v1.0.0 h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg= +github.com/go-toolsmith/astp v1.0.0/go.mod h1:RSyrtpVlfTFGDYRbrjyWP1pYu//tSFcvdYrA8meBmLI= +github.com/go-toolsmith/pkgload v1.0.0 h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg= +github.com/go-toolsmith/pkgload v1.0.0/go.mod h1:5eFArkbO80v7Z0kdngIxsRXRMTaX4Ilcwuh3clNrQJc= +github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= +github.com/go-toolsmith/typep v1.0.0/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= +github.com/go-toolsmith/typep v1.0.2 h1:8xdsa1+FSIH/RhEkgnD1j2CJOy5mNllW1Q9tRiYwvlk= +github.com/go-toolsmith/typep v1.0.2/go.mod h1:JSQCQMUPdRlMZFswiq3TGpNp1GMktqkR2Ns5AIQkATU= +github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo= +github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/goccy/go-json v0.7.4 h1:B44qRUFwz/vxPKPISQ1KhvzRi9kZ28RAf6YtjriBZ5k= github.com/goccy/go-json v0.7.4/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= github.com/godbus/dbus v0.0.0-20151105175453-c7fdd8b5cd55/go.mod h1:/YcGZj5zSblfDWMMoOzV4fas9FZnQYTkDnsGvmh2Grw= @@ -373,6 +478,8 @@ github.com/godbus/dbus v0.0.0-20180201030542-885f9cc04c9c/go.mod h1:/YcGZj5zSblf github.com/godbus/dbus v0.0.0-20190422162347-ade71ed3457e/go.mod h1:bBOAhwG1umN6/6ZUMtDFBMQR8jRg9O75tm9K00oMsK4= github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= +github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gofrs/uuid v4.0.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM= github.com/gogits/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85 h1:04sojTxgYxu1L4Hn7Tgf7UVtIosVa6CuHtvNY+7T1K4= @@ -404,6 +511,8 @@ github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= @@ -419,12 +528,35 @@ github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QD github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db h1:woRePGFeVFfLKN/pOkfl+p/TAqKOfFu+7KPlMVpok/w= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2 h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0= +github.com/golangci/check v0.0.0-20180506172741-cfe4005ccda2/go.mod h1:k9Qvh+8juN+UKMCS/3jFtGICgW8O96FVaZsaxdzDkR4= +github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM= +github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= +github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613 h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw= +github.com/golangci/go-misc v0.0.0-20180628070357-927a3d87b613/go.mod h1:SyvUF2NxV+sN8upjjeVYr5W7tyxaT1JVtvhKhOn2ii8= +github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks= +github.com/golangci/gofmt v0.0.0-20190930125516-244bba706f1a/go.mod h1:9qCChq59u/eW8im404Q2WWTrnBUQKjpNYKMbU4M7EFU= +github.com/golangci/golangci-lint v1.42.1 h1:nC4WyrbdnNdohDVUoNKjy/4N4FTM1gCFaVeXecy6vzM= +github.com/golangci/golangci-lint v1.42.1/go.mod h1:MuInrVlgg2jq4do6XI1jbkErbVHVbwdrLLtGv6p2wPI= +github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0 h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA= +github.com/golangci/lint-1 v0.0.0-20191013205115-297bf364a8e0/go.mod h1:66R6K6P6VWk9I95jvqGxkqJxVWGFy9XlDwLwVz1RCFg= +github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA= +github.com/golangci/maligned v0.0.0-20180506175553-b1d89398deca/go.mod h1:tvlJhZqDe4LMs4ZHD0oMUlt9G2LWuDGoisJTBzLMV9o= +github.com/golangci/misspell v0.3.5 h1:pLzmVdl3VxTOncgzHcvLOKirdvcx/TydsClUQXTehjo= +github.com/golangci/misspell v0.3.5/go.mod h1:dEbvlSfYbMQDtrpRMQU675gSDLDNa8sCPPChZ7PhiVA= +github.com/golangci/revgrep v0.0.0-20210208091834-cd28932614b5 h1:c9Mqqrm/Clj5biNaG7rABrmwUq88nHh0uABo2b/WYmc= +github.com/golangci/revgrep v0.0.0-20210208091834-cd28932614b5/go.mod h1:LK+zW4MpyytAWQRz0M4xnzEk50lSvqDQKfx304apFkY= +github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4 h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys= +github.com/golangci/unconvert v0.0.0-20180507085042-28b1c447d1f4/go.mod h1:Izgrg8RkN3rCIMLGE9CyYmU9pY2Jer6DgANEnZ/L/cQ= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/certificate-transparency-go v1.0.21/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg= +github.com/google/certificate-transparency-go v1.1.1/go.mod h1:FDKqPvSXawb2ecErVRrD+nfy23RCzyl7eqVCEmlT1Zs= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -447,14 +579,22 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/ github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200507031123-427632fa3b1c/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/trillian v1.3.11/go.mod h1:0tPraVHrSDkA3BO6vKX67zgLXs6SsOAbHEivX+9mPgw= +github.com/google/uuid v0.0.0-20161128191214-064e2069ce9c/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= @@ -462,28 +602,54 @@ github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gnostic v0.4.1/go.mod h1:LRhVm6pbyptWbWbuZ38d1eyptfvIytN3ir6b65WBswg= +github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQHCoQ= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= +github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254 h1:Nb2aRlC404yz7gQIfRZxX9/MLvQiqXyiBTJtgAy6yrI= +github.com/gordonklaus/ineffassign v0.0.0-20210225214923-2e10b2664254/go.mod h1:M9mZEtGIsR1oDaZagNPNG9iq9n2HrhZ17dsXk73V3Lw= +github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75/go.mod h1:g2644b03hfBX9Ov0ZBDgXXens4rxSxmqFBbhvKv2yVA= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/handlers v0.0.0-20150720190736-60c7bfde3e33/go.mod h1:Qkdc/uu4tH4g6mTK6auzZ766c4CA0Ng8+o/OAirnOIQ= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= -github.com/gorilla/mux v1.7.3 h1:gnP5JzjVOuiZD07fKKToCAOjS0yOpj/qPETTXCCS6hw= github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/securecookie v1.1.1 h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ= github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4= github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gostaticanalysis/analysisutil v0.0.0-20190318220348-4088753ea4d3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= +github.com/gostaticanalysis/analysisutil v0.4.1 h1:/7clKqrVfiVwiBQLM0Uke4KvXnO6JcCTS7HwF2D6wG8= +github.com/gostaticanalysis/analysisutil v0.4.1/go.mod h1:18U/DLpRgIUd459wGxVHE0fRgmo1UgHDcbw7F5idXu0= +github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= +github.com/gostaticanalysis/comment v1.4.1 h1:xHopR5L2lRz6OsjH4R2HG5wRhW9ySl3FsHIvi5pcXwc= +github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= +github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5 h1:rx8127mFPqXXsfPSo8BwnIU97MKFZc89WHAHt8PwDVY= +github.com/gostaticanalysis/forcetypeassert v0.0.0-20200621232751-01d4955beaa5/go.mod h1:qZEedyP/sY1lTGV1uJ3VhWZ2mqag3IkWsDHVbplHXak= +github.com/gostaticanalysis/nilerr v0.1.1 h1:ThE+hJP0fEp4zWLkWHWcRyI2Od0p7DlgYG3Uqrmrcpk= +github.com/gostaticanalysis/nilerr v0.1.1/go.mod h1:wZYb6YI5YAxxq0i1+VJbY0s2YONW0HU0GPE3+5PWN4A= github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.12.1/go.mod h1:8XEsbTttt/W+VvjtQhLACqCisSPWTxCZ7sBRjU6iH9c= github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= github.com/hashicorp/consul/api v1.3.0/go.mod h1:MmDNSzIMUjNpY/mQ398R4bk2FnqQLoPndWW5VkKPlCE= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/consul/sdk v0.3.0/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= github.com/hashicorp/errwrap v0.0.0-20141028054710-7554cd9344ce/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= @@ -494,6 +660,8 @@ github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjh github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= +github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.6.8 h1:92lWxgpa+fF3FozM4B3UZtHZMJX8T5XT+TFdCxsPyWs= github.com/hashicorp/go-retryablehttp v0.6.8/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= @@ -507,19 +675,26 @@ github.com/hashicorp/go-version v1.2.1/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09 github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= +github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= github.com/hudl/fargo v1.3.0/go.mod h1:y3CKSmjA+wD2gak7sUSXTAoopbhU08POFhmITJgmKTg= github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.4/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= github.com/imdario/mergo v0.3.10/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= github.com/imdario/mergo v0.3.12/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo= github.com/j-keck/arping v0.0.0-20160618110441-2cf9dc699c56/go.mod h1:ymszkNOg6tORTn+6F6j+Jc8TOr5osrynvN6ivFWZ2GA= @@ -570,12 +745,23 @@ github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0f github.com/jackc/puddle v1.1.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.1.1/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= +github.com/jgautheron/goconst v1.5.1 h1:HxVbL1MhydKs8R8n/HE5NPvzfaYmQJA3o879lE4+WcM= +github.com/jgautheron/goconst v1.5.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= +github.com/jhump/protoreflect v1.6.1/go.mod h1:RZQ/lnuN+zqeRVpQigTwO6o0AJUkxbnSnpuG7toUTG4= +github.com/jingyugao/rowserrcheck v1.1.0 h1:u6h4eiNuCLqk73Ic5TXQq9yZS+uEXTdusn7c3w1Mr6A= +github.com/jingyugao/rowserrcheck v1.1.0/go.mod h1:TOQpc2SLx6huPfoFGK3UOnEG+u02D3C1GeosjupAKCA= +github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48= +github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20160803190731-bd40a432e4c7/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/jmoiron/sqlx v1.2.0/go.mod h1:1FEQNm3xlJgrMD+FBdI9+xvCksHtbpVBBw5dYhBSsks= github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg= github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jonboulle/clockwork v0.2.0/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -586,20 +772,31 @@ github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMW github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/juju/ratelimit v1.0.1/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d h1:XeSMXURZPtUffuWAaq90o6kLgZdgu+QA8wk4MPC8ikI= +github.com/julz/importas v0.0.0-20210419104244-841f0c0fe66d/go.mod h1:oSFU2R4XK/P7kNBrnL/FEQlDGN1/6WoxXEjSSXO0DV0= +github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs= github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/errcheck v1.6.0 h1:YTDO4pNy7AUN/021p+JGHycQyYNIyMoenM1YDVK6RlY= +github.com/kisielk/errcheck v1.6.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/klauspost/compress v1.10.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= +github.com/klauspost/compress v1.11.0/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/klauspost/compress v1.11.13/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.2.0/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= @@ -612,57 +809,105 @@ github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/kulti/thelper v0.4.0 h1:2Nx7XbdbE/BYZeoip2mURKUdtHQRuy6Ug+wR7K9ywNM= +github.com/kulti/thelper v0.4.0/go.mod h1:vMu2Cizjy/grP+jmsvOFDx1kYP6+PD1lqg4Yu5exl2U= +github.com/kunwardeep/paralleltest v1.0.2 h1:/jJRv0TiqPoEy/Y8dQxCFJhD56uS/pnvtatgTZBHokU= +github.com/kunwardeep/paralleltest v1.0.2/go.mod h1:ZPqNm1fVHPllh5LPVujzbVz1JN2GhLxSfY+oqUsvG30= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/kyoh86/exportloopref v0.1.8 h1:5Ry/at+eFdkX9Vsdw3qU4YkvGtzuVfzT4X7S77LoN/M= +github.com/kyoh86/exportloopref v0.1.8/go.mod h1:1tUcJeiioIs7VWe5gcOObrux3lb66+sBqGZrRkMwPgg= +github.com/ldez/gomoddirectives v0.2.2 h1:p9/sXuNFArS2RLc+UpYZSI4KQwGMEDWC/LbtF5OPFVg= +github.com/ldez/gomoddirectives v0.2.2/go.mod h1:cpgBogWITnCfRq2qGoDkKMEVSaarhdBr6g8G04uz6d0= +github.com/ldez/tagliatelle v0.2.0 h1:693V8Bf1NdShJ8eu/s84QySA0J2VWBanVBa2WwXD/Wk= +github.com/ldez/tagliatelle v0.2.0/go.mod h1:8s6WJQwEYHbKZDsp/LjArytKOG8qaMrKQQ3mFukHs88= github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII= github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/letsencrypt/pkcs11key/v4 v4.0.0/go.mod h1:EFUvBDay26dErnNb70Nd0/VW3tJiIbETBPTl9ATXQag= github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lib/pq v1.10.3 h1:v9QZf2Sn6AmjXtQeFpdoq/eaNtYP6IN+7lcrygsIAtg= github.com/lib/pq v1.10.3/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lightstep/lightstep-tracer-common/golang/gogo v0.0.0-20190605223551-bc2310a04743/go.mod h1:qklhhLq1aX+mtWk9cPHPzaBjWImj5ULL6C7HFJtXQMM= github.com/lightstep/lightstep-tracer-go v0.18.1/go.mod h1:jlF1pusYV4pidLvZ+XD0UBX0ZE6WURAspgAczcDHrL4= +github.com/logrusorgru/aurora v0.0.0-20181002194514-a7b3b318ed4e/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/lyft/protoc-gen-validate v0.0.13/go.mod h1:XbGvPuh87YZc5TdIa2/I4pLk0QoUACkjt2znoq26NVQ= github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7ldAVICs= +github.com/maratori/testpackage v1.0.1 h1:QtJ5ZjqapShm0w5DosRjg0PRlSdAdlx+W6cCKoALdbQ= +github.com/maratori/testpackage v1.0.1/go.mod h1:ddKdw+XG0Phzhx8BFDTKgpWP4i7MpApTE5fXSKAqwDU= github.com/marstr/guid v1.1.0/go.mod h1:74gB1z2wpxxInTG6yaqA7KrtM0NZ+RbrcqDvYHefzho= +github.com/matoous/godox v0.0.0-20210227103229-6504466cf951 h1:pWxk9e//NbPwfxat7RXkts09K+dEBJWakUWwICVqYbA= +github.com/matoous/godox v0.0.0-20210227103229-6504466cf951/go.mod h1:1BELzlh859Sh1c6+90blK8lbYy0kwQf1bYlBhBysy1s= github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.6/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= +github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= github.com/mattn/go-shellwords v1.0.3/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= +github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc= github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.8/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= github.com/mattn/go-sqlite3 v1.14.9 h1:10HX2Td0ocZpYEjhilsuo6WWtUqttj2Kb0KtD86/KYA= github.com/mattn/go-sqlite3 v1.14.9/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= +github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI= github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/mbilski/exhaustivestruct v1.2.0 h1:wCBmUnSYufAHO6J4AVWY6ff+oxWxsVFrwgOdMUQePUo= +github.com/mbilski/exhaustivestruct v1.2.0/go.mod h1:OeTBVxQWoEmB2J2JCHmXWPJ0aksxSUOUy+nvtVEfzXc= +github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81 h1:QASJXOGm2RZ5Ardbc86qNFvby9AqkLDibfChMtAg5QM= +github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg= +github.com/mgechev/revive v1.1.1 h1:mkXNHP14Y6tfq+ocnQaiKEtgJDM41yaoyQq4qn6TD/4= +github.com/mgechev/revive v1.1.1/go.mod h1:PKqk4L74K6wVNwY2b6fr+9Qqr/3hIsHVfZCJdbvozrY= github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= +github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mistifyio/go-zfs v2.1.2-0.20190413222219-f784269be439+incompatible/go.mod h1:8AuVvqP/mXw1px98n46wfvcGfQ4ci2FwoAjKYxuo3Z4= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-ps v1.0.0/go.mod h1:J4lOc8z8yJs6vUwklHw2XEIiT4z4C40KtWVN3nvg8Pg= github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/mitchellh/osext v0.0.0-20151018003038-5e2d6d41470f/go.mod h1:OkQIRizQZAeMln+1tSwduZz7+Af5oFlKirV/MSYes2A= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= github.com/moby/locker v1.0.1/go.mod h1:S7SDdo5zpBK84bzzVlKr2V0hz+7x9hWbYC/kq7oQppc= github.com/moby/moby v20.10.10+incompatible h1:KriJ8Zcm+NrFlaI0HNi2GtbfsT6J33o+XHmpAWZ6E7M= github.com/moby/moby v20.10.10+incompatible/go.mod h1:fDXVQ6+S340veQPv35CzDahGBmHsiclFwfEygB/TWMc= @@ -678,8 +923,13 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/moricho/tparallel v0.2.1 h1:95FytivzT6rYzdJLdtfn6m1bfFJylOJK41+lgv/EHf4= +github.com/moricho/tparallel v0.2.1/go.mod h1:fXEIZxG2vdfl0ZF8b42f5a78EhjjD5mX8qUplsoSU4k= github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/mozilla/scribe v0.0.0-20180711195314-fb71baf557c1/go.mod h1:FIczTrinKo8VaLxe6PWTPEXRXDIHz2QAwiaBaP5/4a8= +github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5/go.mod h1:FUqVoUPHSEdDR0MnFM3Dh8AU0pZHLXUD127SAJGER/s= github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450 h1:j2kD3MT1z4PXCiUllUJF9mWUESr9TWKS7iEKsQ/IipM= github.com/mrjones/oauth v0.0.0-20190623134757-126b35219450/go.mod h1:skjdDftzkFALcuGzYSklqYd8gvat6F1gZJ4YPVbkZpM= github.com/mrunalp/fileutils v0.5.0/go.mod h1:M1WthSahJixYnrXQl/DFQuteStB1weuxD2QJNHXfbSQ= @@ -687,7 +937,11 @@ github.com/munnerz/goautoneg v0.0.0-20120707110453-a547fc61f48d/go.mod h1:+n7T8m github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-proto-validators v0.0.0-20180403085117-0950a7990007/go.mod h1:m2XC9Qq0AlmmVksL6FktJCdTYyLk7V3fKyp0sl1yWQo= +github.com/mwitkow/go-proto-validators v0.2.0/go.mod h1:ZfA1hW+UH/2ZHOWvQ3HnQaU0DtnpXu850MZiy+YUgcc= github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= +github.com/nakabonne/nestif v0.3.0 h1:+yOViDGhg8ygGrmII72nV9B/zGxY188TYpfolntsaPw= +github.com/nakabonne/nestif v0.3.0/go.mod h1:dI314BppzXjJ4HsCnbo7XzrJHPszZsjnk5wEBSYHI2c= github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= github.com/nats-io/jwt v0.3.2/go.mod h1:/euKqTS1ZD+zzjYrY7pseZrTtWQSjujC7xjPc8wL6eU= github.com/nats-io/nats-server/v2 v2.1.2/go.mod h1:Afk+wRZqkMQs/p45uXdrVLuab3gwv3Z8C4HTBu8GD/k= @@ -695,13 +949,26 @@ github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzE github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= +github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 h1:4kuARK6Y6FxaNu/BnU2OAaLF86eTVhP2hjTB6iMvItA= +github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= github.com/ncw/swift v1.0.47/go.mod h1:23YIA4yWVnGwv2dQlN4bB7egfYX6YLn0Yo/S6zZO/ZM= -github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/nishanths/exhaustive v0.2.3 h1:+ANTMqRNrqwInnP9aszg/0jDo+zbXa4x66U19Bx/oTk= +github.com/nishanths/exhaustive v0.2.3/go.mod h1:bhIX678Nx8inLM9PbpvK1yv6oGtoP8BfaIeMzgBNKvc= +github.com/nishanths/predeclared v0.0.0-20190419143655-18a43bb90ffc/go.mod h1:62PewwiQTlm/7Rj+cxVYqZvDIUc+JjZq6GHAC1fsObQ= +github.com/nishanths/predeclared v0.2.1 h1:1TXtjmy4f3YCFjTxRd8zcFHOmoUir+gp0ESzjFzG2sw= +github.com/nishanths/predeclared v0.2.1/go.mod h1:HvkGJcA3naj4lOwnFXFDkFxVtSqQMB9sbB1usJ+xjQE= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/olekukonko/tablewriter v0.0.2/go.mod h1:rSAaSIOAGT9odnlyGlUfAJaoc5w2fSBUmeGDbRWPxyQ= +github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= +github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= github.com/onsi/ginkgo v0.0.0-20151202141238-7f8ab55aaf3b/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v0.0.0-20170829012221-11459a886d9c/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= @@ -709,15 +976,19 @@ github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+W github.com/onsi/ginkgo v1.10.1/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.11.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1 h1:mFwc4LvZ0xpSvDZ3E+k8Yte0hLOMxXUlP+yXtJqkYfQ= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.2/go.mod h1:CObGmKUOKaSC0RjmoAK7tKyn4Azo5P2IWuoMnvwxz1E= +github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/gomega v0.0.0-20151007035656-2152b45fa28a/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v0.0.0-20170829124025-dcabb60a477c/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.3 h1:gph6h/qe9GSUw1NhH1gp+qb+h8rXD8Cy60Z32Qw3ELA= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.10.3/go.mod h1:V9xEwhxec5O8UDM77eCW8vLymOMltsqPVYWrpDsH8xc= +github.com/onsi/gomega v1.13.0 h1:7lLHu94wT9Ij0o6EWWclhu0aOh32VxhkwEJvzuWPeak= +github.com/onsi/gomega v1.13.0/go.mod h1:lRk9szgn8TxENtWd0Tp4c3wjlRfMTMH27I+3Je41yGY= github.com/op/go-logging v0.0.0-20160315200505-970db520ece7/go.mod h1:HzydrMdWErDVzsI23lYNej1Htcns9BCg93Dk0bBINWk= github.com/opencontainers/go-digest v0.0.0-20170106003457-a6d0ee40d420/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= @@ -757,8 +1028,12 @@ github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FI github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml v1.8.1/go.mod h1:T2/BmBdy8dvIRq1a/8aqjN41wvWlN4lrapLU/GW4pbc= +github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= github.com/performancecopilot/speed v3.0.0+incompatible/go.mod h1:/CLtqpZ5gBg1M9iaPbIdPPGyKcA8hKdoy6hAWba7Yac= github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= +github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA= +github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc= github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= @@ -768,8 +1043,12 @@ github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINE github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/profile v1.2.1/go.mod h1:hJw3o1OdXxsrSjjVksARp5W95eeEaEfptyVZyv6JUPA= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349 h1:Kq/3kL0k033ds3tyez5lFPrfQ74fNJ+OqCclRipubwA= +github.com/polyfloyd/go-errorlint v0.0.0-20210722154253-910bb7978349/go.mod h1:wi9BfjxjF/bwiZ701TzmfKu6UKC357IOAtNr0Td0Lvw= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/pquerna/cachecontrol v0.0.0-20171018203845-0dec1b30a021/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA= github.com/prometheus/client_golang v0.0.0-20180209125602-c332b6f63c06/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= @@ -814,6 +1093,18 @@ github.com/prometheus/procfs v0.2.0/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4O github.com/prometheus/procfs v0.6.0 h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4= github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= +github.com/pseudomuto/protoc-gen-doc v1.3.2/go.mod h1:y5+P6n3iGrbKG+9O04V5ld71in3v/bX88wUwgt+U8EA= +github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q= +github.com/quasilyte/go-consistent v0.0.0-20190521200055-c6f3937de18c/go.mod h1:5STLWrekHfjyYwxBRVRXNOSewLJ3PWfDJd1VyTS21fI= +github.com/quasilyte/go-ruleguard v0.3.1-0.20210203134552-1b5a410e1cc8/go.mod h1:KsAh3x0e7Fkpgs+Q9pNLS5XpFSvYCEVl5gP9Pp1xp30= +github.com/quasilyte/go-ruleguard v0.3.4 h1:F6l5p6+7WBcTKS7foNQ4wqA39zjn2+RbdbyzGxIq1B0= +github.com/quasilyte/go-ruleguard v0.3.4/go.mod h1:57FZgMnoo6jqxkYKmVj5Fc8vOt0rVzoE/UNAmFFIPqA= +github.com/quasilyte/go-ruleguard/dsl v0.3.0/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/go-ruleguard/dsl v0.3.2/go.mod h1:KeCP03KrjuSO0H1kTuZQCWlQPulDV6YMIXmpQss17rU= +github.com/quasilyte/go-ruleguard/rules v0.0.0-20201231183845-9e62ed36efe1/go.mod h1:7JTjp89EGyU1d6XfBiXihJNG37wB2VRkd125Q1u7Plc= +github.com/quasilyte/go-ruleguard/rules v0.0.0-20210203162857-b223e0831f88/go.mod h1:4cgAphtvu7Ftv7vOT2ZOYhC6CvBxZixcasr8qIOTA50= +github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95 h1:L8QM9bvf68pVdQ3bCFZMDmnt9yqcMBro1pC7F+IPYMY= +github.com/quasilyte/regex/syntax v0.0.0-20200407221936-30656e2c4a95/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk= github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= @@ -821,26 +1112,44 @@ github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6So github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.6.2/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= +github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/xid v1.3.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/rs/zerolog v1.25.0 h1:Rj7XygbUHKUlDPcVdoLyR91fJBsduXj5fRxyqIQj/II= github.com/rs/zerolog v1.25.0/go.mod h1:7KHcEGe0QZPOm2IE4Kpb5rTh6n1h2hIgS5OOnu1rUaI= +github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryancurrah/gomodguard v1.2.3 h1:ww2fsjqocGCAFamzvv/b8IsRduuHHeK2MHTcTxZTQX8= +github.com/ryancurrah/gomodguard v1.2.3/go.mod h1:rYbA/4Tg5c54mV1sv4sQTP5WOPBcoLtnBZ7/TEhXAbg= +github.com/ryanrolds/sqlclosecheck v0.3.0 h1:AZx+Bixh8zdUBxUA1NxbxVAS78vTPq4rCb8OUZI9xFw= +github.com/ryanrolds/sqlclosecheck v0.3.0/go.mod h1:1gREqxyTGR3lVtpngyFo3hZAgk0KCtEdgEkHwDbigdA= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/safchain/ethtool v0.0.0-20190326074333-42ed695e3de8/go.mod h1:Z0q5wiBQGYcxhMZ6gUqHn6pYNLypFAvaL3UvgZLR0U4= github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0XIa01GRL2eRQVjQkKGqKF3SF9vZR/HnPullcV2E= +github.com/sanposhiho/wastedassign/v2 v2.0.6 h1:+6/hQIHKNJAUixEj6EmOngGIisyeI+T3335lYTyxRoA= +github.com/sanposhiho/wastedassign/v2 v2.0.6/go.mod h1:KyZ0MWTwxxBmfwn33zh3k1dmsbF2ud9pAAGfoLfjhtI= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/seccomp/libseccomp-golang v0.9.1/go.mod h1:GbW5+tmTXfcxTToHLXlScSlAvWlF4P2Ca7zGrPiEpWo= +github.com/securego/gosec/v2 v2.8.1 h1:Tyy/nsH39TYCOkqf5HAgRE+7B5D8sHDwPdXRgFWokh8= +github.com/securego/gosec/v2 v2.8.1/go.mod h1:pUmsq6+VyFEElJMUX+QB3p3LWNHXg1R3xh2ssVJPs8Q= +github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c h1:W65qqJCIOVP4jpqPQ0YvHYKwcMEMVWIzWC5iNQQfBTU= +github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c/go.mod h1:/PevMnwAxekIXwN8qQyfc5gl2NlkB3CQlkizAbOkeBs= +github.com/shirou/gopsutil/v3 v3.21.7/go.mod h1:RGl11Y7XMTQPmHh8F0ayC6haKNBgH4PXMJuTAcMOlz4= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4= github.com/shopspring/decimal v0.0.0-20200227202807-02e2044944cc/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= +github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk= +github.com/shurcooL/go-goon v0.0.0-20170922171312-37c2f522c041/go.mod h1:N5mDOmsrJOB+vfqUK+7DmDyjhSLIIBnXo9lvZJj3MWQ= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.0.4-0.20170822132746-89742aefa4b2/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc= @@ -851,25 +1160,47 @@ github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrf github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.8.1 h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE= github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v0.0.0-20190330032615-68dc04aab96a/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/sonatard/noctx v0.0.1 h1:VC1Qhl6Oxx9vvWo3UDgrGXYCeKCe3Wbw7qAWL6FrmTY= +github.com/sonatard/noctx v0.0.1/go.mod h1:9D2D/EoULe8Yy2joDHJj7bv3sZoq9AaSb8B4lqBjiZI= github.com/sony/gobreaker v0.4.1/go.mod h1:ZKptC7FHNvhBz7dN2LGjPVBz2sZJmc0/PkyDJOjmxWY= +github.com/sourcegraph/go-diff v0.6.1 h1:hmA1LzxW0n1c3Q4YbrFgg4P99GSnebYa3x8gr0HZqLQ= +github.com/sourcegraph/go-diff v0.6.1/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= +github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE= +github.com/spf13/cobra v1.2.1 h1:+KmjbUw1hriSNMF55oPrkZcb27aECyrj8V2ytv7kWDw= +github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE= +github.com/spf13/viper v1.7.1/go.mod h1:8WkrPz2fc9jxqZNCJI/76HCieCp4Q8HaLFoCha5qpdg= +github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44= +github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/ssgreg/nlreturn/v2 v2.1.0 h1:6/s4Rc49L6Uo6RLjhWZGBpWWjfzk2yrf1nIW8m4wgVA= +github.com/ssgreg/nlreturn/v2 v2.1.0/go.mod h1:E/iiPB78hV7Szg2YfRgyIrk1AD6JVMTRkkxBiELzh2I= github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8= github.com/streadway/amqp v0.0.0-20190404075320-75d898a42a94/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= @@ -880,7 +1211,9 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/objx v0.3.0 h1:NGXK3lHquSN08v5vWalVI/L8XU9hdzE/G6xsrze47As= github.com/stretchr/objx v0.3.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= +github.com/stretchr/testify v0.0.0-20170130113145-4d4bfba8f1d1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -888,23 +1221,44 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5 github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww= github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE= github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ= github.com/tchap/go-patricia v2.2.6+incompatible/go.mod h1:bmLyhP68RS6kStMGxByiQ23RP/odRBOTVjwp2cDyi6I= +github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b h1:HxLVTlqcHhFAz3nWUcuvpH7WuOMv8LQoCWmruLfFH2U= +github.com/tdakkota/asciicheck v0.0.0-20200416200610-e657995f937b/go.mod h1:yHp0ai0Z9gUljN3o0xMhYJnH/IcvkdTBOX2fmJ93JEM= +github.com/tetafro/godot v1.4.9 h1:wsNd0RuUxISqqudFqchsSsMqsM188DoZVPBeKl87tP0= +github.com/tetafro/godot v1.4.9/go.mod h1:LR3CJpxDVGlYOWn3ZZg1PgNZdTUvzsZWu8xaEohUpn8= github.com/tevino/abool v1.2.0 h1:heAkClL8H6w+mK5md9dzsuohKeXHUpY7Vw0ZCKW+huA= github.com/tevino/abool v1.2.0/go.mod h1:qc66Pna1RiIsPa7O4Egxxs9OqkuxDX55zznh9K07Tzg= +github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94 h1:ig99OeTyDwQWhPe2iw9lwfQVF1KB3Q4fpP3X7/2VBG8= +github.com/timakin/bodyclose v0.0.0-20200424151742-cb6215831a94/go.mod h1:Qimiffbc6q9tBWlVV6x0P9sat/ao1xEkREYPPj9hphk= +github.com/tklauser/go-sysconf v0.3.7/go.mod h1:JZIdXh4RmBvZDBZ41ld2bGxRV3n4daiiqA3skYhAoQ4= +github.com/tklauser/numcpus v0.2.3/go.mod h1:vpEPS/JC+oZGGQ/My/vJnNsvMDQL6PwOqt8dsCw5j+E= github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tmc/grpc-websocket-proxy v0.0.0-20200427203606-3cfed13b9966/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tomarrell/wrapcheck/v2 v2.3.0 h1:i3DNjtyyL1xwaBQOsPPk8LAcpayWfQv2rxNi9b/eEx4= +github.com/tomarrell/wrapcheck/v2 v2.3.0/go.mod h1:aF5rnkdtqNWP/gC7vPUO5pKsB0Oac2FDTQP4F+dpZMU= +github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= +github.com/tommy-muehle/go-mnd/v2 v2.4.0 h1:1t0f8Uiaq+fqKteUR4N9Umr6E99R+lDnLnq7PwX2PPE= +github.com/tommy-muehle/go-mnd/v2 v2.4.0/go.mod h1:WsUAkMJMYww6l/ufffCD3m+P7LEvr8TnZn9lwVDlgzw= github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= github.com/ugorji/go v1.2.6 h1:tGiWC9HENWE2tqYycIqFTNorMmFRVhNwCpDOpWqnk8E= github.com/ugorji/go v1.2.6/go.mod h1:anCg0y61KIhDlPZmnH+so+RQbysYVyDko0IMgJv0Nn0= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= github.com/ugorji/go/codec v1.2.6 h1:7kbGefxLoDBuYXOms4yD7223OpNMMPNPZxXk5TvFcyQ= github.com/ugorji/go/codec v1.2.6/go.mod h1:V6TCNZ4PHqoHGFZuSG1W8nrCzzdgA2DozYxWFFpvxTw= +github.com/ultraware/funlen v0.0.3 h1:5ylVWm8wsNwH5aWo9438pwvsK0QiqVuUrt9bn7S/iLA= +github.com/ultraware/funlen v0.0.3/go.mod h1:Dp4UiAus7Wdb9KUZsYWZEWiRzGuM2kXM1lPbfaF6xhA= +github.com/ultraware/whitespace v0.0.4 h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg= +github.com/ultraware/whitespace v0.0.4/go.mod h1:aVMh/gQve5Maj9hQ/hg+F75lr/X5A89uZnzAmWSineA= github.com/urfave/cli v0.0.0-20171014202726-7bc6a0acffa5/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= @@ -912,6 +1266,13 @@ github.com/urfave/cli v1.22.2 h1:gsqYFH8bb9ekPA12kRo0hfjngWQjkJPlN9R0N78BoUo= github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= github.com/urfave/cli/v2 v2.3.0 h1:qph92Y649prgesehzOrQjdWyxFOp/QVM+6imKHad91M= github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI= +github.com/uudashr/gocognit v1.0.5 h1:rrSex7oHr3/pPLQ0xoWq108XMU8s678FJcQ+aSfOHa4= +github.com/uudashr/gocognit v1.0.5/go.mod h1:wgYz0mitoKOTysqxTDMOUXg+Jb5SvtihkfmugIZYpEA= +github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= +github.com/valyala/fasthttp v1.16.0/go.mod h1:YOKImeEosDdBPnxc0gy7INqi3m1zK6A+xl6TwOBhHCA= +github.com/valyala/quicktemplate v1.6.3/go.mod h1:fwPzK2fHuYEODzJ9pkw0ipCPNHZ2tD5KW4lOuSdPKzY= +github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= +github.com/viki-org/dnscache v0.0.0-20130720023526-c70c1f23c5d8/go.mod h1:dniwbG03GafCjFohMDmz6Zc6oCuiqgH6tGNyXTkHzXE= github.com/vishvananda/netlink v0.0.0-20181108222139-023a6dafdcdf/go.mod h1:+SR5DhBJrl6ZM7CoCKvpw5BKroDKQ+PJqOg65H/2ktk= github.com/vishvananda/netlink v1.1.0/go.mod h1:cTgwzPIzzgDAYoQrMm0EdrjRUBkTqKYppBueQtXaqoE= github.com/vishvananda/netlink v1.1.1-0.20201029203352-d40f9887b852/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho= @@ -933,7 +1294,13 @@ github.com/xeipuuv/gojsonschema v0.0.0-20180618132009-1d523034197f/go.mod h1:5yf github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +github.com/yeya24/promlinter v0.1.0 h1:goWULN0jH5Yajmu/K+v1xCqIREeB+48OiJ2uu2ssc7U= +github.com/yeya24/promlinter v0.1.0/go.mod h1:rs5vtZzeBHqqMwXqFScncpCF6u06lezhZepno9AB1Oc= +github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= +github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= +github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= @@ -946,9 +1313,15 @@ github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxt github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0= go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg= +go.etcd.io/etcd v0.0.0-20200513171258-e048e166ab9c/go.mod h1:xCI7ZzBfRuGgBXyXO6yfWfDmlWd35khcWpUa4L0xI/k= go.etcd.io/etcd v0.5.0-alpha.5.0.20200910180754-dd1b699fc489/go.mod h1:yVHk9ub3CSBatqGNg7GRmsnfLWtoW60w4eDYfh7vHDg= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.mozilla.org/mozlog v0.0.0-20170222151521-4bb13139d403/go.mod h1:jHoPAGnDrCy6kaI2tAze5Prf0Nr0w/oNkROt2lw3n3o= go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk= go.opencensus.io v0.20.1/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= go.opencensus.io v0.20.2/go.mod h1:6WKK9ahsWS3RSO+PY9ZHZUfv2irvY6gN279GOPZjmmk= @@ -957,22 +1330,30 @@ go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= go.uber.org/atomic v1.6.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.5.0/go.mod h1:FeouvMocqHpRaaGuG9EjoKcStLC43Zu/fmqdUMPcKYU= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= golang.org/x/crypto v0.0.0-20171113213409-9f005a07e0d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180501155221-613d6eafa307/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181009213950-7c1a557ab941/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190325154230-a5d413f7728c/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE= @@ -988,7 +1369,9 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20201203163018-be400aefbc4c/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= @@ -1004,6 +1387,7 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1016,6 +1400,8 @@ golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= @@ -1024,6 +1410,8 @@ golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1050,6 +1438,8 @@ golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191002035440-2ec189313ef0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -1058,19 +1448,27 @@ golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200602114024-627f9648deb9/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201006153459-a7d1128ccaa0/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20201224014010-6772e930b67b/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20211020060615-d418f374d309 h1:A0lJIi+hcTR6aajJH4YqKWwohY4aW9RO7oRMcdv+HKI= golang.org/x/net v0.0.0-20211020060615-d418f374d309/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= @@ -1079,12 +1477,20 @@ golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4Iltr golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1 h1:B333XXssMuKQeBwiNODx4TupZy7bf4sxFZnN2ZOcvUE= golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -1101,6 +1507,7 @@ golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5h golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1123,8 +1530,10 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191115151921-52ab43148777/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1146,16 +1555,19 @@ golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200622214017-ed371f2e16b4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200728102440-3e129f6d46b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200817155316-9781c653f443/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200916030750-2334cc1a136f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200922070232-aee5d888a860/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -1166,16 +1578,24 @@ golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20201126233918-771906719818/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201202213521-69691e467435/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210426230700-d19ff857e887/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359 h1:2B5p2L5IfGiD7+b9BOoRMC6DgObAVZV+Fsp050NqXik= golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= @@ -1195,18 +1615,25 @@ golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxb golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac h1:7zkz7BUtwNFFqcowJ+RIgu2MaV/MapERkDIy+mwPyjs= golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180828015842-6cd1fcedba52/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190110163146-51295c7ec13a/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190322203728-c1a832b0ad89/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= @@ -1219,19 +1646,26 @@ golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190916130336-e45ffcd953cc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191010075000-0337d82405ff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200103221440-774c71fcf114/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -1241,18 +1675,52 @@ golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200414032229-332987a829c3/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200422022333-3d57cf2e726e/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200426102838-f3a5411a4c3b/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200625211823-6506e20df31f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200626171337-aa94e735be7f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200630154851-b2d8b0336632/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200706234117-b22de6825cf7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200724022722-7017fd6b1305/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200812195022-5ae4c3c160a0/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201001104356-43ebab892c4c/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201002184944-ecd9fd270d5d/go.mod h1:z6u4i615ZeAfBE4XtMziQW1fSVJXACjjbWkB/mvPzlU= +golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201028025901-8cd080b735b3/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201114224030-61ea331ec02b/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201118003311-bd56c0adb394/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201230224404-63754364767c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210101214203-2dba1e4ea05c/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210104081019-d8d6ddbec6ee/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -1268,6 +1736,7 @@ google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEt google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.10.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= @@ -1280,18 +1749,27 @@ google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0M google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.3.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.2/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/cloud v0.0.0-20151119220103-975617b05ea8/go.mod h1:0H1ncTHf11KCFhTc/+EFRbzSCOZx+VUbRMk55Yv5MYk= +google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20181107211654-5fc9ac540362/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -1301,6 +1779,7 @@ google.golang.org/genproto v0.0.0-20190530194941-fb225487d101/go.mod h1:z3L6/3dT google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20190927181202-20e1ac93f88c/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= @@ -1315,19 +1794,34 @@ google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfG google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200626011028-ee7919e894b5/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200707001353-8e8330bf89df/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20201110150050-8816d57aaa9a/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= google.golang.org/genproto v0.0.0-20211027162914-98a5263abeca h1:+e+aQDO4/c9KaG8PXWHTc6/+Du6kz+BKcXCSnV4SSTE= google.golang.org/genproto v0.0.0-20211027162914-98a5263abeca/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc= google.golang.org/grpc v0.0.0-20160317175043-d3ddb4469d5a/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM= @@ -1343,12 +1837,18 @@ google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8 google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.0/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34= google.golang.org/grpc v1.41.0 h1:f+PlOh7QV4iIJkPrx5NQ7qaNGFQ3OTse67yaDHfju4E= google.golang.org/grpc v1.41.0/go.mod h1:U3l9uK9J0sini8mHphKoXyaqDA/8VyGnDee1zzIUK6k= @@ -1372,15 +1872,20 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20141024133853-64131543e789/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/cheggaaa/pb.v1 v1.0.28/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo= gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= +gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/square/go-jose.v2 v2.2.2/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI= @@ -1395,6 +1900,7 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.6/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= @@ -1415,6 +1921,8 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.2.1 h1:/EPr//+UMMXwMTkXvCCoaJDq8cpjMO80Ou+L4PDo2mY= +honnef.co/go/tools v0.2.1/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY= k8s.io/api v0.20.1/go.mod h1:KqwcCVogGxQY3nBlRpwt+wpAMF/KjaCc7RpywacvqUo= k8s.io/api v0.20.4/go.mod h1:++lNL1AJMkDymriNniQsWRkMDzRaX2Y/POTUi8yvqYQ= k8s.io/api v0.20.6/go.mod h1:X9e8Qag6JV/bL5G6bU8sdVRltWKmdHsFUGS3eVndqE8= @@ -1467,6 +1975,14 @@ modernc.org/tcl v1.5.5/go.mod h1:ADkaTUuwukkrlhqwERyq0SM8OvyXo7+TjFz7yAF56EI= modernc.org/token v1.0.0 h1:a0jaWiNMDhDUtqOj09wvjWWAqd3q7WpBulmL9H2egsk= modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM= modernc.org/z v1.0.1/go.mod h1:8/SRk5C/HgiQWCgXdfpb+1RvhORdkz5sw72d3jjtyqA= +mvdan.cc/gofumpt v0.1.1 h1:bi/1aS/5W00E2ny5q65w9SnKpWEF/UIOqDYBILpo9rA= +mvdan.cc/gofumpt v0.1.1/go.mod h1:yXG1r1WqZVKWbVRtBWKWX9+CxGYfA51nSomhM0woR48= +mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I= +mvdan.cc/interfacer v0.0.0-20180901003855-c20040233aed/go.mod h1:Xkxe497xwlCKkIaQYRfC7CSLworTXY9RMqwhhCm+8Nc= +mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo= +mvdan.cc/lint v0.0.0-20170908181259-adc824a0674b/go.mod h1:2odslEg/xrtNQqCYg2/jCoyKnw3vv5biOc3JnIcYfL4= +mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7 h1:HT3e4Krq+IE44tiN36RvVEb6tvqeIdtsVSsxmNPqlFU= +mvdan.cc/unparam v0.0.0-20210104141923-aac4ce9116a7/go.mod h1:hBpJkZE8H/sb+VRFvw2+rBpHNsTBcvSpk61hr8mzXZE= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/tools.go b/tools.go index c5603957b..ce7cd899b 100644 --- a/tools.go +++ b/tools.go @@ -5,5 +5,6 @@ package tools import ( + _ "github.com/golangci/golangci-lint/cmd/golangci-lint" _ "github.com/rs/zerolog/cmd/lint" ) diff --git a/vendor/4d63.com/gochecknoglobals/LICENSE b/vendor/4d63.com/gochecknoglobals/LICENSE new file mode 100644 index 000000000..c401e6608 --- /dev/null +++ b/vendor/4d63.com/gochecknoglobals/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Leigh McCulloch + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go b/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go new file mode 100644 index 000000000..5b6325dd9 --- /dev/null +++ b/vendor/4d63.com/gochecknoglobals/checknoglobals/check_no_globals.go @@ -0,0 +1,154 @@ +package checknoglobals + +import ( + "flag" + "fmt" + "go/ast" + "go/token" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// allowedExpression is a struct representing packages and methods that will +// be an allowed combination to use as a global variable, f.ex. Name `regexp` +// and SelName `MustCompile`. +type allowedExpression struct { + Name string + SelName string +} + +const Doc = `check that no global variables exist + +This analyzer checks for global variables and errors on any found. + +A global variable is a variable declared in package scope and that can be read +and written to by any function within the package. Global variables can cause +side effects which are difficult to keep track of. A code in one function may +change the variables state while another unrelated chunk of code may be +effected by it.` + +// Analyzer provides an Analyzer that checks that there are no global +// variables, except for errors and variables containing regular +// expressions. +func Analyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "gochecknoglobals", + Doc: Doc, + Run: checkNoGlobals, + Flags: flags(), + RunDespiteErrors: true, + } +} + +func flags() flag.FlagSet { + flags := flag.NewFlagSet("", flag.ExitOnError) + flags.Bool("t", false, "Include tests") + + return *flags +} + +func isAllowed(v ast.Node) bool { + switch i := v.(type) { + case *ast.Ident: + return i.Name == "_" || i.Name == "version" || looksLikeError(i) + case *ast.CallExpr: + if expr, ok := i.Fun.(*ast.SelectorExpr); ok { + return isAllowedSelectorExpression(expr) + } + case *ast.CompositeLit: + if expr, ok := i.Type.(*ast.SelectorExpr); ok { + return isAllowedSelectorExpression(expr) + } + } + + return false +} + +func isAllowedSelectorExpression(v *ast.SelectorExpr) bool { + x, ok := v.X.(*ast.Ident) + if !ok { + return false + } + + allowList := []allowedExpression{ + {Name: "regexp", SelName: "MustCompile"}, + } + + for _, i := range allowList { + if x.Name == i.Name && v.Sel.Name == i.SelName { + return true + } + } + + return false +} + +// looksLikeError returns true if the AST identifier starts +// with 'err' or 'Err', or false otherwise. +// +// TODO: https://github.com/leighmcculloch/gochecknoglobals/issues/5 +func looksLikeError(i *ast.Ident) bool { + prefix := "err" + if i.IsExported() { + prefix = "Err" + } + return strings.HasPrefix(i.Name, prefix) +} + +func checkNoGlobals(pass *analysis.Pass) (interface{}, error) { + includeTests := pass.Analyzer.Flags.Lookup("t").Value.(flag.Getter).Get().(bool) + + for _, file := range pass.Files { + filename := pass.Fset.Position(file.Pos()).Filename + if !strings.HasSuffix(filename, ".go") { + continue + } + if !includeTests && strings.HasSuffix(filename, "_test.go") { + continue + } + + for _, decl := range file.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + if genDecl.Tok != token.VAR { + continue + } + for _, spec := range genDecl.Specs { + valueSpec := spec.(*ast.ValueSpec) + onlyAllowedValues := false + + for _, vn := range valueSpec.Values { + if isAllowed(vn) { + onlyAllowedValues = true + continue + } + + onlyAllowedValues = false + break + } + + if onlyAllowedValues { + continue + } + + for _, vn := range valueSpec.Names { + if isAllowed(vn) { + continue + } + + message := fmt.Sprintf("%s is a global variable", vn.Name) + pass.Report(analysis.Diagnostic{ + Pos: vn.Pos(), + Category: "global", + Message: message, + }) + } + } + } + } + + return nil, nil +} diff --git a/vendor/github.com/Antonboom/errname/LICENSE b/vendor/github.com/Antonboom/errname/LICENSE new file mode 100644 index 000000000..e2002e4d4 --- /dev/null +++ b/vendor/github.com/Antonboom/errname/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Anton Telyshev + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..984a6a9d8 --- /dev/null +++ b/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go @@ -0,0 +1,134 @@ +package analyzer + +import ( + "go/ast" + "go/token" + "strconv" + "strings" + "unicode" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +// New returns new errname analyzer. +func New() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "errname", + Doc: "Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + } +} + +type stringSet = map[string]struct{} + +var ( + imports = []ast.Node{(*ast.ImportSpec)(nil)} + types = []ast.Node{(*ast.TypeSpec)(nil)} + funcs = []ast.Node{(*ast.FuncDecl)(nil)} +) + +func run(pass *analysis.Pass) (interface{}, error) { + insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + pkgAliases := map[string]string{} + insp.Preorder(imports, func(node ast.Node) { + i := node.(*ast.ImportSpec) + if n := i.Name; n != nil && i.Path != nil { + if path, err := strconv.Unquote(i.Path.Value); err == nil { + pkgAliases[n.Name] = getPkgFromPath(path) + } + } + }) + + allTypes := stringSet{} + typesSpecs := map[string]*ast.TypeSpec{} + insp.Preorder(types, func(node ast.Node) { + t := node.(*ast.TypeSpec) + allTypes[t.Name.Name] = struct{}{} + typesSpecs[t.Name.Name] = t + }) + + errorTypes := stringSet{} + insp.Preorder(funcs, func(node ast.Node) { + f := node.(*ast.FuncDecl) + t, ok := isMethodError(f) + if !ok { + return + } + errorTypes[t] = struct{}{} + + tSpec, ok := typesSpecs[t] + if !ok { + panic("no specification for type " + t) + } + + if _, ok := tSpec.Type.(*ast.ArrayType); ok { + if !isValidErrorArrayTypeName(t) { + reportAboutErrorType(pass, tSpec.Pos(), t, true) + } + } else if !isValidErrorTypeName(t) { + reportAboutErrorType(pass, tSpec.Pos(), t, false) + } + }) + + errorFuncs := stringSet{} + insp.Preorder(funcs, func(node ast.Node) { + f := node.(*ast.FuncDecl) + if isFuncReturningErr(f.Type, allTypes, errorTypes) { + errorFuncs[f.Name.Name] = struct{}{} + } + }) + + inspectPkgLevelVarsOnly := func(node ast.Node) bool { + switch v := node.(type) { + case *ast.FuncDecl: + return false + + case *ast.ValueSpec: + if name, ok := isSentinelError(v, pkgAliases, allTypes, errorTypes, errorFuncs); ok && !isValidErrorVarName(name) { + reportAboutErrorVar(pass, v.Pos(), name) + } + } + return true + } + for _, f := range pass.Files { + ast.Inspect(f, inspectPkgLevelVarsOnly) + } + + return nil, nil +} + +func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName string, isArrayType bool) { + var form string + if unicode.IsLower([]rune(typeName)[0]) { + form = "xxxError" + } else { + form = "XxxError" + } + + if isArrayType { + form += "s" + } + pass.Reportf(typePos, "the type name `%s` should conform to the `%s` format", typeName, form) +} + +func reportAboutErrorVar(pass *analysis.Pass, pos token.Pos, varName string) { + var form string + if unicode.IsLower([]rune(varName)[0]) { + form = "errXxx" + } else { + form = "ErrXxx" + } + pass.Reportf(pos, "the variable name `%s` should conform to the `%s` format", varName, form) +} + +func getPkgFromPath(p string) string { + idx := strings.LastIndex(p, "/") + if idx == -1 { + return p + } + return p[idx+1:] +} diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go new file mode 100644 index 000000000..ce57159b0 --- /dev/null +++ b/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go @@ -0,0 +1,237 @@ +package analyzer + +import ( + "go/ast" + "go/token" + "strings" + "unicode" +) + +func isMethodError(f *ast.FuncDecl) (typeName string, ok bool) { + if f.Recv == nil { + return "", false + } + if f.Name.Name != "Error" { + return "", false + } + + if f.Type == nil || f.Type.Results == nil || len(f.Type.Results.List) != 1 { + return "", false + } + + returnType, ok := f.Type.Results.List[0].Type.(*ast.Ident) + if !ok { + return "", false + } + + var receiverType string + + switch rt := f.Recv.List[0].Type.(type) { + case *ast.Ident: + receiverType = rt.Name + case *ast.StarExpr: + if i, ok := rt.X.(*ast.Ident); ok { + receiverType = i.Name + } + } + + return receiverType, returnType.Name == "string" +} + +func isValidErrorTypeName(s string) bool { + if isInitialism(s) { + return true + } + + words := split(s) + wordsCnt := wordsCount(words) + + if wordsCnt["error"] != 1 { + return false + } + return words[len(words)-1] == "error" +} + +func isValidErrorArrayTypeName(s string) bool { + if isInitialism(s) { + return true + } + + words := split(s) + wordsCnt := wordsCount(words) + + if wordsCnt["errors"] != 1 { + return false + } + return words[len(words)-1] == "errors" +} + +func isFuncReturningErr(fType *ast.FuncType, allTypes, errorTypes stringSet) bool { + if fType == nil || fType.Results == nil || len(fType.Results.List) != 1 { + return false + } + + var returnTypeName string + switch rt := fType.Results.List[0].Type.(type) { + case *ast.Ident: + returnTypeName = rt.Name + case *ast.StarExpr: + if i, ok := rt.X.(*ast.Ident); ok { + returnTypeName = i.Name + } + } + + return isErrorType(returnTypeName, allTypes, errorTypes) +} + +func isErrorType(tName string, allTypes, errorTypes stringSet) bool { + _, isUserType := allTypes[tName] + _, isErrType := errorTypes[tName] + return isErrType || (tName == "error" && !isUserType) +} + +var knownErrConstructors = stringSet{ + "fmt.Errorf": {}, + "errors.Errorf": {}, + "errors.New": {}, + "errors.Newf": {}, + "errors.NewWithDepth": {}, + "errors.NewWithDepthf": {}, + "errors.NewAssertionErrorWithWrappedErrf": {}, +} + +func isSentinelError( //nolint:gocognit + v *ast.ValueSpec, + pkgAliases map[string]string, + allTypes, errorTypes, errorFuncs stringSet, +) (varName string, ok bool) { + if len(v.Names) != 1 { + return "", false + } + varName = v.Names[0].Name + + switch vv := v.Type.(type) { + // var ErrEndOfFile error + // var ErrEndOfFile SomeErrType + case *ast.Ident: + if isErrorType(vv.Name, allTypes, errorTypes) { + return varName, true + } + + // var ErrEndOfFile *SomeErrType + case *ast.StarExpr: + if i, ok := vv.X.(*ast.Ident); ok && isErrorType(i.Name, allTypes, errorTypes) { + return varName, true + } + } + + if len(v.Values) != 1 { + return "", false + } + + switch vv := v.Values[0].(type) { + case *ast.CallExpr: + switch fun := vv.Fun.(type) { + // var ErrEndOfFile = errors.New("end of file") + case *ast.SelectorExpr: + pkg, ok := fun.X.(*ast.Ident) + if !ok { + return "", false + } + pkgFun := fun.Sel + + pkgName := pkg.Name + if a, ok := pkgAliases[pkgName]; ok { + pkgName = a + } + + _, ok = knownErrConstructors[pkgName+"."+pkgFun.Name] + return varName, ok + + // var ErrEndOfFile = newErrEndOfFile() + // var ErrEndOfFile = new(EndOfFileError) + // const ErrEndOfFile = constError("end of file") + case *ast.Ident: + if isErrorType(fun.Name, allTypes, errorTypes) { + return varName, true + } + + if _, ok := errorFuncs[fun.Name]; ok { + return varName, true + } + + if fun.Name == "new" && len(vv.Args) == 1 { + if i, ok := vv.Args[0].(*ast.Ident); ok { + return varName, isErrorType(i.Name, allTypes, errorTypes) + } + } + + // var ErrEndOfFile = func() error { ... } + case *ast.FuncLit: + return varName, isFuncReturningErr(fun.Type, allTypes, errorTypes) + } + + // var ErrEndOfFile = &EndOfFileError{} + case *ast.UnaryExpr: + if vv.Op == token.AND { // & + if lit, ok := vv.X.(*ast.CompositeLit); ok { + if i, ok := lit.Type.(*ast.Ident); ok { + return varName, isErrorType(i.Name, allTypes, errorTypes) + } + } + } + + // var ErrEndOfFile = EndOfFileError{} + case *ast.CompositeLit: + if i, ok := vv.Type.(*ast.Ident); ok { + return varName, isErrorType(i.Name, allTypes, errorTypes) + } + } + + return "", false +} + +func isValidErrorVarName(s string) bool { + if isInitialism(s) { + return true + } + + words := split(s) + wordsCnt := wordsCount(words) + + if wordsCnt["err"] != 1 { + return false + } + return words[0] == "err" +} + +func isInitialism(s string) bool { + return strings.ToLower(s) == s || strings.ToUpper(s) == s +} + +func split(s string) []string { + var words []string + ss := []rune(s) + + var b strings.Builder + b.WriteRune(ss[0]) + + for _, r := range ss[1:] { + if unicode.IsUpper(r) { + words = append(words, strings.ToLower(b.String())) + b.Reset() + } + b.WriteRune(r) + } + + words = append(words, strings.ToLower(b.String())) + return words +} + +func wordsCount(w []string) map[string]int { + result := make(map[string]int, len(w)) + for _, ww := range w { + result[ww]++ + } + return result +} diff --git a/vendor/github.com/BurntSushi/toml/.gitignore b/vendor/github.com/BurntSushi/toml/.gitignore new file mode 100644 index 000000000..cd11be965 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/.gitignore @@ -0,0 +1,2 @@ +toml.test +/toml-test diff --git a/vendor/github.com/BurntSushi/toml/COMPATIBLE b/vendor/github.com/BurntSushi/toml/COMPATIBLE new file mode 100644 index 000000000..f621b0119 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/COMPATIBLE @@ -0,0 +1 @@ +Compatible with TOML version [v1.0.0](https://toml.io/en/v1.0.0). diff --git a/vendor/github.com/BurntSushi/toml/COPYING b/vendor/github.com/BurntSushi/toml/COPYING new file mode 100644 index 000000000..01b574320 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/COPYING @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 TOML authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md new file mode 100644 index 000000000..64410cf75 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/README.md @@ -0,0 +1,220 @@ +## TOML parser and encoder for Go with reflection + +TOML stands for Tom's Obvious, Minimal Language. This Go package provides a +reflection interface similar to Go's standard library `json` and `xml` +packages. This package also supports the `encoding.TextUnmarshaler` and +`encoding.TextMarshaler` interfaces so that you can define custom data +representations. (There is an example of this below.) + +Compatible with TOML version [v1.0.0](https://toml.io/en/v1.0.0). + +Documentation: https://godocs.io/github.com/BurntSushi/toml + +See the [releases page](https://github.com/BurntSushi/toml/releases) for a +changelog; this information is also in the git tag annotations (e.g. `git show +v0.4.0`). + +This library requires Go 1.13 or newer; install it with: + + $ go get github.com/BurntSushi/toml + +It also comes with a TOML validator CLI tool: + + $ go get github.com/BurntSushi/toml/cmd/tomlv + $ tomlv some-toml-file.toml + +### Testing + +This package passes all tests in +[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder +and the encoder. + +### Examples + +This package works similarly to how the Go standard library handles XML and +JSON. Namely, data is loaded into Go values via reflection. + +For the simplest example, consider some TOML file as just a list of keys +and values: + +```toml +Age = 25 +Cats = [ "Cauchy", "Plato" ] +Pi = 3.14 +Perfection = [ 6, 28, 496, 8128 ] +DOB = 1987-07-05T05:45:00Z +``` + +Which could be defined in Go as: + +```go +type Config struct { + Age int + Cats []string + Pi float64 + Perfection []int + DOB time.Time // requires `import time` +} +``` + +And then decoded with: + +```go +var conf Config +if _, err := toml.Decode(tomlData, &conf); err != nil { + // handle error +} +``` + +You can also use struct tags if your struct field name doesn't map to a TOML +key value directly: + +```toml +some_key_NAME = "wat" +``` + +```go +type TOML struct { + ObscureKey string `toml:"some_key_NAME"` +} +``` + +Beware that like other most other decoders **only exported fields** are +considered when encoding and decoding; private fields are silently ignored. + +### Using the `encoding.TextUnmarshaler` interface + +Here's an example that automatically parses duration strings into +`time.Duration` values: + +```toml +[[song]] +name = "Thunder Road" +duration = "4m49s" + +[[song]] +name = "Stairway to Heaven" +duration = "8m03s" +``` + +Which can be decoded with: + +```go +type song struct { + Name string + Duration duration +} +type songs struct { + Song []song +} +var favorites songs +if _, err := toml.Decode(blob, &favorites); err != nil { + log.Fatal(err) +} + +for _, s := range favorites.Song { + fmt.Printf("%s (%s)\n", s.Name, s.Duration) +} +``` + +And you'll also need a `duration` type that satisfies the +`encoding.TextUnmarshaler` interface: + +```go +type duration struct { + time.Duration +} + +func (d *duration) UnmarshalText(text []byte) error { + var err error + d.Duration, err = time.ParseDuration(string(text)) + return err +} +``` + +To target TOML specifically you can implement `UnmarshalTOML` TOML interface in +a similar way. + +### More complex usage + +Here's an example of how to load the example from the official spec page: + +```toml +# This is a TOML document. Boom. + +title = "TOML Example" + +[owner] +name = "Tom Preston-Werner" +organization = "GitHub" +bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." +dob = 1979-05-27T07:32:00Z # First class dates? Why not? + +[database] +server = "192.168.1.1" +ports = [ 8001, 8001, 8002 ] +connection_max = 5000 +enabled = true + +[servers] + + # You can indent as you please. Tabs or spaces. TOML don't care. + [servers.alpha] + ip = "10.0.0.1" + dc = "eqdc10" + + [servers.beta] + ip = "10.0.0.2" + dc = "eqdc10" + +[clients] +data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it + +# Line breaks are OK when inside arrays +hosts = [ + "alpha", + "omega" +] +``` + +And the corresponding Go types are: + +```go +type tomlConfig struct { + Title string + Owner ownerInfo + DB database `toml:"database"` + Servers map[string]server + Clients clients +} + +type ownerInfo struct { + Name string + Org string `toml:"organization"` + Bio string + DOB time.Time +} + +type database struct { + Server string + Ports []int + ConnMax int `toml:"connection_max"` + Enabled bool +} + +type server struct { + IP string + DC string +} + +type clients struct { + Data [][]interface{} + Hosts []string +} +``` + +Note that a case insensitive match will be tried if an exact match can't be +found. + +A working example of the above can be found in `_examples/example.{go,toml}`. + diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go new file mode 100644 index 000000000..d3d3b8397 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/decode.go @@ -0,0 +1,511 @@ +package toml + +import ( + "encoding" + "fmt" + "io" + "io/ioutil" + "math" + "os" + "reflect" + "strings" + "time" +) + +// Unmarshaler is the interface implemented by objects that can unmarshal a +// TOML description of themselves. +type Unmarshaler interface { + UnmarshalTOML(interface{}) error +} + +// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`. +func Unmarshal(p []byte, v interface{}) error { + _, err := Decode(string(p), v) + return err +} + +// Primitive is a TOML value that hasn't been decoded into a Go value. +// +// This type can be used for any value, which will cause decoding to be delayed. +// You can use the PrimitiveDecode() function to "manually" decode these values. +// +// NOTE: The underlying representation of a `Primitive` value is subject to +// change. Do not rely on it. +// +// NOTE: Primitive values are still parsed, so using them will only avoid the +// overhead of reflection. They can be useful when you don't know the exact type +// of TOML data until runtime. +type Primitive struct { + undecoded interface{} + context Key +} + +// PrimitiveDecode is just like the other `Decode*` functions, except it +// decodes a TOML value that has already been parsed. Valid primitive values +// can *only* be obtained from values filled by the decoder functions, +// including this method. (i.e., `v` may contain more `Primitive` +// values.) +// +// Meta data for primitive values is included in the meta data returned by +// the `Decode*` functions with one exception: keys returned by the Undecoded +// method will only reflect keys that were decoded. Namely, any keys hidden +// behind a Primitive will be considered undecoded. Executing this method will +// update the undecoded keys in the meta data. (See the example.) +func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { + md.context = primValue.context + defer func() { md.context = nil }() + return md.unify(primValue.undecoded, rvalue(v)) +} + +// Decoder decodes TOML data. +// +// TOML tables correspond to Go structs or maps (dealer's choice – they can be +// used interchangeably). +// +// TOML table arrays correspond to either a slice of structs or a slice of maps. +// +// TOML datetimes correspond to Go time.Time values. Local datetimes are parsed +// in the local timezone. +// +// All other TOML types (float, string, int, bool and array) correspond to the +// obvious Go types. +// +// An exception to the above rules is if a type implements the TextUnmarshaler +// interface, in which case any primitive TOML value (floats, strings, integers, +// booleans, datetimes) will be converted to a []byte and given to the value's +// UnmarshalText method. See the Unmarshaler example for a demonstration with +// time duration strings. +// +// Key mapping +// +// TOML keys can map to either keys in a Go map or field names in a Go struct. +// The special `toml` struct tag can be used to map TOML keys to struct fields +// that don't match the key name exactly (see the example). A case insensitive +// match to struct names will be tried if an exact match can't be found. +// +// The mapping between TOML values and Go values is loose. That is, there may +// exist TOML values that cannot be placed into your representation, and there +// may be parts of your representation that do not correspond to TOML values. +// This loose mapping can be made stricter by using the IsDefined and/or +// Undecoded methods on the MetaData returned. +// +// This decoder does not handle cyclic types. Decode will not terminate if a +// cyclic type is passed. +type Decoder struct { + r io.Reader +} + +// NewDecoder creates a new Decoder. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{r: r} +} + +// Decode TOML data in to the pointer `v`. +func (dec *Decoder) Decode(v interface{}) (MetaData, error) { + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Ptr { + return MetaData{}, e("Decode of non-pointer %s", reflect.TypeOf(v)) + } + if rv.IsNil() { + return MetaData{}, e("Decode of nil %s", reflect.TypeOf(v)) + } + + // TODO: have parser should read from io.Reader? Or at the very least, make + // it read from []byte rather than string + data, err := ioutil.ReadAll(dec.r) + if err != nil { + return MetaData{}, err + } + + p, err := parse(string(data)) + if err != nil { + return MetaData{}, err + } + md := MetaData{ + p.mapping, p.types, p.ordered, + make(map[string]bool, len(p.ordered)), nil, + } + return md, md.unify(p.mapping, indirect(rv)) +} + +// Decode the TOML data in to the pointer v. +// +// See the documentation on Decoder for a description of the decoding process. +func Decode(data string, v interface{}) (MetaData, error) { + return NewDecoder(strings.NewReader(data)).Decode(v) +} + +// DecodeFile is just like Decode, except it will automatically read the +// contents of the file at path and decode it for you. +func DecodeFile(path string, v interface{}) (MetaData, error) { + fp, err := os.Open(path) + if err != nil { + return MetaData{}, err + } + defer fp.Close() + return NewDecoder(fp).Decode(v) +} + +// unify performs a sort of type unification based on the structure of `rv`, +// which is the client representation. +// +// Any type mismatch produces an error. Finding a type that we don't know +// how to handle produces an unsupported type error. +func (md *MetaData) unify(data interface{}, rv reflect.Value) error { + // Special case. Look for a `Primitive` value. + // TODO: #76 would make this superfluous after implemented. + if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() { + // Save the undecoded data and the key context into the primitive + // value. + context := make(Key, len(md.context)) + copy(context, md.context) + rv.Set(reflect.ValueOf(Primitive{ + undecoded: data, + context: context, + })) + return nil + } + + // Special case. Unmarshaler Interface support. + if rv.CanAddr() { + if v, ok := rv.Addr().Interface().(Unmarshaler); ok { + return v.UnmarshalTOML(data) + } + } + + // Special case. Look for a value satisfying the TextUnmarshaler interface. + if v, ok := rv.Interface().(encoding.TextUnmarshaler); ok { + return md.unifyText(data, v) + } + // TODO: + // The behavior here is incorrect whenever a Go type satisfies the + // encoding.TextUnmarshaler interface but also corresponds to a TOML hash or + // array. In particular, the unmarshaler should only be applied to primitive + // TOML values. But at this point, it will be applied to all kinds of values + // and produce an incorrect error whenever those values are hashes or arrays + // (including arrays of tables). + + k := rv.Kind() + + // laziness + if k >= reflect.Int && k <= reflect.Uint64 { + return md.unifyInt(data, rv) + } + switch k { + case reflect.Ptr: + elem := reflect.New(rv.Type().Elem()) + err := md.unify(data, reflect.Indirect(elem)) + if err != nil { + return err + } + rv.Set(elem) + return nil + case reflect.Struct: + return md.unifyStruct(data, rv) + case reflect.Map: + return md.unifyMap(data, rv) + case reflect.Array: + return md.unifyArray(data, rv) + case reflect.Slice: + return md.unifySlice(data, rv) + case reflect.String: + return md.unifyString(data, rv) + case reflect.Bool: + return md.unifyBool(data, rv) + case reflect.Interface: + // we only support empty interfaces. + if rv.NumMethod() > 0 { + return e("unsupported type %s", rv.Type()) + } + return md.unifyAnything(data, rv) + case reflect.Float32: + fallthrough + case reflect.Float64: + return md.unifyFloat64(data, rv) + } + return e("unsupported type %s", rv.Kind()) +} + +func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error { + tmap, ok := mapping.(map[string]interface{}) + if !ok { + if mapping == nil { + return nil + } + return e("type mismatch for %s: expected table but found %T", + rv.Type().String(), mapping) + } + + for key, datum := range tmap { + var f *field + fields := cachedTypeFields(rv.Type()) + for i := range fields { + ff := &fields[i] + if ff.name == key { + f = ff + break + } + if f == nil && strings.EqualFold(ff.name, key) { + f = ff + } + } + if f != nil { + subv := rv + for _, i := range f.index { + subv = indirect(subv.Field(i)) + } + if isUnifiable(subv) { + md.decoded[md.context.add(key).String()] = true + md.context = append(md.context, key) + if err := md.unify(datum, subv); err != nil { + return err + } + md.context = md.context[0 : len(md.context)-1] + } else if f.name != "" { + // Bad user! No soup for you! + return e("cannot write unexported field %s.%s", + rv.Type().String(), f.name) + } + } + } + return nil +} + +func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error { + if k := rv.Type().Key().Kind(); k != reflect.String { + return fmt.Errorf( + "toml: cannot decode to a map with non-string key type (%s in %q)", + k, rv.Type()) + } + + tmap, ok := mapping.(map[string]interface{}) + if !ok { + if tmap == nil { + return nil + } + return badtype("map", mapping) + } + if rv.IsNil() { + rv.Set(reflect.MakeMap(rv.Type())) + } + for k, v := range tmap { + md.decoded[md.context.add(k).String()] = true + md.context = append(md.context, k) + + rvkey := indirect(reflect.New(rv.Type().Key())) + rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) + if err := md.unify(v, rvval); err != nil { + return err + } + md.context = md.context[0 : len(md.context)-1] + + rvkey.SetString(k) + rv.SetMapIndex(rvkey, rvval) + } + return nil +} + +func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error { + datav := reflect.ValueOf(data) + if datav.Kind() != reflect.Slice { + if !datav.IsValid() { + return nil + } + return badtype("slice", data) + } + if l := datav.Len(); l != rv.Len() { + return e("expected array length %d; got TOML array of length %d", rv.Len(), l) + } + return md.unifySliceArray(datav, rv) +} + +func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error { + datav := reflect.ValueOf(data) + if datav.Kind() != reflect.Slice { + if !datav.IsValid() { + return nil + } + return badtype("slice", data) + } + n := datav.Len() + if rv.IsNil() || rv.Cap() < n { + rv.Set(reflect.MakeSlice(rv.Type(), n, n)) + } + rv.SetLen(n) + return md.unifySliceArray(datav, rv) +} + +func (md *MetaData) unifySliceArray(data, rv reflect.Value) error { + l := data.Len() + for i := 0; i < l; i++ { + err := md.unify(data.Index(i).Interface(), indirect(rv.Index(i))) + if err != nil { + return err + } + } + return nil +} + +func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error { + if _, ok := data.(time.Time); ok { + rv.Set(reflect.ValueOf(data)) + return nil + } + return badtype("time.Time", data) +} + +func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error { + if s, ok := data.(string); ok { + rv.SetString(s) + return nil + } + return badtype("string", data) +} + +func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { + if num, ok := data.(float64); ok { + switch rv.Kind() { + case reflect.Float32: + fallthrough + case reflect.Float64: + rv.SetFloat(num) + default: + panic("bug") + } + return nil + } + return badtype("float", data) +} + +func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { + if num, ok := data.(int64); ok { + if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 { + switch rv.Kind() { + case reflect.Int, reflect.Int64: + // No bounds checking necessary. + case reflect.Int8: + if num < math.MinInt8 || num > math.MaxInt8 { + return e("value %d is out of range for int8", num) + } + case reflect.Int16: + if num < math.MinInt16 || num > math.MaxInt16 { + return e("value %d is out of range for int16", num) + } + case reflect.Int32: + if num < math.MinInt32 || num > math.MaxInt32 { + return e("value %d is out of range for int32", num) + } + } + rv.SetInt(num) + } else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 { + unum := uint64(num) + switch rv.Kind() { + case reflect.Uint, reflect.Uint64: + // No bounds checking necessary. + case reflect.Uint8: + if num < 0 || unum > math.MaxUint8 { + return e("value %d is out of range for uint8", num) + } + case reflect.Uint16: + if num < 0 || unum > math.MaxUint16 { + return e("value %d is out of range for uint16", num) + } + case reflect.Uint32: + if num < 0 || unum > math.MaxUint32 { + return e("value %d is out of range for uint32", num) + } + } + rv.SetUint(unum) + } else { + panic("unreachable") + } + return nil + } + return badtype("integer", data) +} + +func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { + if b, ok := data.(bool); ok { + rv.SetBool(b) + return nil + } + return badtype("boolean", data) +} + +func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { + rv.Set(reflect.ValueOf(data)) + return nil +} + +func (md *MetaData) unifyText(data interface{}, v encoding.TextUnmarshaler) error { + var s string + switch sdata := data.(type) { + case TextMarshaler: + text, err := sdata.MarshalText() + if err != nil { + return err + } + s = string(text) + case fmt.Stringer: + s = sdata.String() + case string: + s = sdata + case bool: + s = fmt.Sprintf("%v", sdata) + case int64: + s = fmt.Sprintf("%d", sdata) + case float64: + s = fmt.Sprintf("%f", sdata) + default: + return badtype("primitive (string-like)", data) + } + if err := v.UnmarshalText([]byte(s)); err != nil { + return err + } + return nil +} + +// rvalue returns a reflect.Value of `v`. All pointers are resolved. +func rvalue(v interface{}) reflect.Value { + return indirect(reflect.ValueOf(v)) +} + +// indirect returns the value pointed to by a pointer. +// Pointers are followed until the value is not a pointer. +// New values are allocated for each nil pointer. +// +// An exception to this rule is if the value satisfies an interface of +// interest to us (like encoding.TextUnmarshaler). +func indirect(v reflect.Value) reflect.Value { + if v.Kind() != reflect.Ptr { + if v.CanSet() { + pv := v.Addr() + if _, ok := pv.Interface().(encoding.TextUnmarshaler); ok { + return pv + } + } + return v + } + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + return indirect(reflect.Indirect(v)) +} + +func isUnifiable(rv reflect.Value) bool { + if rv.CanSet() { + return true + } + if _, ok := rv.Interface().(encoding.TextUnmarshaler); ok { + return true + } + return false +} + +func e(format string, args ...interface{}) error { + return fmt.Errorf("toml: "+format, args...) +} + +func badtype(expected string, data interface{}) error { + return e("cannot load TOML value of type %T into a Go %s", data, expected) +} diff --git a/vendor/github.com/BurntSushi/toml/decode_go116.go b/vendor/github.com/BurntSushi/toml/decode_go116.go new file mode 100644 index 000000000..38aa75fdc --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/decode_go116.go @@ -0,0 +1,18 @@ +// +build go1.16 + +package toml + +import ( + "io/fs" +) + +// DecodeFS is just like Decode, except it will automatically read the contents +// of the file at `path` from a fs.FS instance. +func DecodeFS(fsys fs.FS, path string, v interface{}) (MetaData, error) { + fp, err := fsys.Open(path) + if err != nil { + return MetaData{}, err + } + defer fp.Close() + return NewDecoder(fp).Decode(v) +} diff --git a/vendor/github.com/BurntSushi/toml/decode_meta.go b/vendor/github.com/BurntSushi/toml/decode_meta.go new file mode 100644 index 000000000..ad8899c6c --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/decode_meta.go @@ -0,0 +1,123 @@ +package toml + +import "strings" + +// MetaData allows access to meta information about TOML data that may not be +// inferable via reflection. In particular, whether a key has been defined and +// the TOML type of a key. +type MetaData struct { + mapping map[string]interface{} + types map[string]tomlType + keys []Key + decoded map[string]bool + context Key // Used only during decoding. +} + +// IsDefined reports if the key exists in the TOML data. +// +// The key should be specified hierarchically, for example to access the TOML +// key "a.b.c" you would use: +// +// IsDefined("a", "b", "c") +// +// IsDefined will return false if an empty key given. Keys are case sensitive. +func (md *MetaData) IsDefined(key ...string) bool { + if len(key) == 0 { + return false + } + + var hash map[string]interface{} + var ok bool + var hashOrVal interface{} = md.mapping + for _, k := range key { + if hash, ok = hashOrVal.(map[string]interface{}); !ok { + return false + } + if hashOrVal, ok = hash[k]; !ok { + return false + } + } + return true +} + +// Type returns a string representation of the type of the key specified. +// +// Type will return the empty string if given an empty key or a key that does +// not exist. Keys are case sensitive. +func (md *MetaData) Type(key ...string) string { + fullkey := strings.Join(key, ".") + if typ, ok := md.types[fullkey]; ok { + return typ.typeString() + } + return "" +} + +// Key represents any TOML key, including key groups. Use (MetaData).Keys to get +// values of this type. +type Key []string + +func (k Key) String() string { return strings.Join(k, ".") } + +func (k Key) maybeQuotedAll() string { + var ss []string + for i := range k { + ss = append(ss, k.maybeQuoted(i)) + } + return strings.Join(ss, ".") +} + +func (k Key) maybeQuoted(i int) string { + if k[i] == "" { + return `""` + } + quote := false + for _, c := range k[i] { + if !isBareKeyChar(c) { + quote = true + break + } + } + if quote { + return `"` + quotedReplacer.Replace(k[i]) + `"` + } + return k[i] +} + +func (k Key) add(piece string) Key { + newKey := make(Key, len(k)+1) + copy(newKey, k) + newKey[len(k)] = piece + return newKey +} + +// Keys returns a slice of every key in the TOML data, including key groups. +// +// Each key is itself a slice, where the first element is the top of the +// hierarchy and the last is the most specific. The list will have the same +// order as the keys appeared in the TOML data. +// +// All keys returned are non-empty. +func (md *MetaData) Keys() []Key { + return md.keys +} + +// Undecoded returns all keys that have not been decoded in the order in which +// they appear in the original TOML document. +// +// This includes keys that haven't been decoded because of a Primitive value. +// Once the Primitive value is decoded, the keys will be considered decoded. +// +// Also note that decoding into an empty interface will result in no decoding, +// and so no keys will be considered decoded. +// +// In this sense, the Undecoded keys correspond to keys in the TOML document +// that do not have a concrete type in your representation. +func (md *MetaData) Undecoded() []Key { + undecoded := make([]Key, 0, len(md.keys)) + for _, key := range md.keys { + if !md.decoded[key.String()] { + undecoded = append(undecoded, key) + } + } + return undecoded +} diff --git a/vendor/github.com/BurntSushi/toml/deprecated.go b/vendor/github.com/BurntSushi/toml/deprecated.go new file mode 100644 index 000000000..db89eac1d --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/deprecated.go @@ -0,0 +1,33 @@ +package toml + +import ( + "encoding" + "io" +) + +// DEPRECATED! +// +// Use the identical encoding.TextMarshaler instead. It is defined here to +// support Go 1.1 and older. +type TextMarshaler encoding.TextMarshaler + +// DEPRECATED! +// +// Use the identical encoding.TextUnmarshaler instead. It is defined here to +// support Go 1.1 and older. +type TextUnmarshaler encoding.TextUnmarshaler + +// DEPRECATED! +// +// Use MetaData.PrimitiveDecode instead. +func PrimitiveDecode(primValue Primitive, v interface{}) error { + md := MetaData{decoded: make(map[string]bool)} + return md.unify(primValue.undecoded, rvalue(v)) +} + +// DEPRECATED! +// +// Use NewDecoder(reader).Decode(&v) instead. +func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { + return NewDecoder(r).Decode(v) +} diff --git a/vendor/github.com/BurntSushi/toml/doc.go b/vendor/github.com/BurntSushi/toml/doc.go new file mode 100644 index 000000000..099c4a77d --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/doc.go @@ -0,0 +1,13 @@ +/* +Package toml implements decoding and encoding of TOML files. + +This package supports TOML v1.0.0, as listed on https://toml.io + +There is also support for delaying decoding with the Primitive type, and +querying the set of keys in a TOML document with the MetaData type. + +The github.com/BurntSushi/toml/cmd/tomlv package implements a TOML validator, +and can be used to verify if TOML document is valid. It can also be used to +print the type of each key. +*/ +package toml diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go new file mode 100644 index 000000000..10d88ac63 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/encode.go @@ -0,0 +1,650 @@ +package toml + +import ( + "bufio" + "encoding" + "errors" + "fmt" + "io" + "math" + "reflect" + "sort" + "strconv" + "strings" + "time" + + "github.com/BurntSushi/toml/internal" +) + +type tomlEncodeError struct{ error } + +var ( + errArrayNilElement = errors.New("toml: cannot encode array with nil element") + errNonString = errors.New("toml: cannot encode a map with non-string key type") + errAnonNonStruct = errors.New("toml: cannot encode an anonymous field that is not a struct") + errNoKey = errors.New("toml: top-level values must be Go maps or structs") + errAnything = errors.New("") // used in testing +) + +var quotedReplacer = strings.NewReplacer( + "\"", "\\\"", + "\\", "\\\\", + "\x00", `\u0000`, + "\x01", `\u0001`, + "\x02", `\u0002`, + "\x03", `\u0003`, + "\x04", `\u0004`, + "\x05", `\u0005`, + "\x06", `\u0006`, + "\x07", `\u0007`, + "\b", `\b`, + "\t", `\t`, + "\n", `\n`, + "\x0b", `\u000b`, + "\f", `\f`, + "\r", `\r`, + "\x0e", `\u000e`, + "\x0f", `\u000f`, + "\x10", `\u0010`, + "\x11", `\u0011`, + "\x12", `\u0012`, + "\x13", `\u0013`, + "\x14", `\u0014`, + "\x15", `\u0015`, + "\x16", `\u0016`, + "\x17", `\u0017`, + "\x18", `\u0018`, + "\x19", `\u0019`, + "\x1a", `\u001a`, + "\x1b", `\u001b`, + "\x1c", `\u001c`, + "\x1d", `\u001d`, + "\x1e", `\u001e`, + "\x1f", `\u001f`, + "\x7f", `\u007f`, +) + +// Encoder encodes a Go to a TOML document. +// +// The mapping between Go values and TOML values should be precisely the same as +// for the Decode* functions. Similarly, the TextMarshaler interface is +// supported by encoding the resulting bytes as strings. If you want to write +// arbitrary binary data then you will need to use something like base64 since +// TOML does not have any binary types. +// +// When encoding TOML hashes (Go maps or structs), keys without any sub-hashes +// are encoded first. +// +// Go maps will be sorted alphabetically by key for deterministic output. +// +// Encoding Go values without a corresponding TOML representation will return an +// error. Examples of this includes maps with non-string keys, slices with nil +// elements, embedded non-struct types, and nested slices containing maps or +// structs. (e.g. [][]map[string]string is not allowed but []map[string]string +// is okay, as is []map[string][]string). +// +// NOTE: Only exported keys are encoded due to the use of reflection. Unexported +// keys are silently discarded. +type Encoder struct { + // The string to use for a single indentation level. The default is two + // spaces. + Indent string + + // hasWritten is whether we have written any output to w yet. + hasWritten bool + w *bufio.Writer +} + +// NewEncoder create a new Encoder. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: bufio.NewWriter(w), + Indent: " ", + } +} + +// Encode writes a TOML representation of the Go value to the Encoder's writer. +// +// An error is returned if the value given cannot be encoded to a valid TOML +// document. +func (enc *Encoder) Encode(v interface{}) error { + rv := eindirect(reflect.ValueOf(v)) + if err := enc.safeEncode(Key([]string{}), rv); err != nil { + return err + } + return enc.w.Flush() +} + +func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) { + defer func() { + if r := recover(); r != nil { + if terr, ok := r.(tomlEncodeError); ok { + err = terr.error + return + } + panic(r) + } + }() + enc.encode(key, rv) + return nil +} + +func (enc *Encoder) encode(key Key, rv reflect.Value) { + // Special case. Time needs to be in ISO8601 format. + // Special case. If we can marshal the type to text, then we used that. + // Basically, this prevents the encoder for handling these types as + // generic structs (or whatever the underlying type of a TextMarshaler is). + switch t := rv.Interface().(type) { + case time.Time, encoding.TextMarshaler: + enc.writeKeyValue(key, rv, false) + return + // TODO: #76 would make this superfluous after implemented. + case Primitive: + enc.encode(key, reflect.ValueOf(t.undecoded)) + return + } + + k := rv.Kind() + switch k { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, + reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, + reflect.Uint64, + reflect.Float32, reflect.Float64, reflect.String, reflect.Bool: + enc.writeKeyValue(key, rv, false) + case reflect.Array, reflect.Slice: + if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) { + enc.eArrayOfTables(key, rv) + } else { + enc.writeKeyValue(key, rv, false) + } + case reflect.Interface: + if rv.IsNil() { + return + } + enc.encode(key, rv.Elem()) + case reflect.Map: + if rv.IsNil() { + return + } + enc.eTable(key, rv) + case reflect.Ptr: + if rv.IsNil() { + return + } + enc.encode(key, rv.Elem()) + case reflect.Struct: + enc.eTable(key, rv) + default: + encPanic(fmt.Errorf("unsupported type for key '%s': %s", key, k)) + } +} + +// eElement encodes any value that can be an array element. +func (enc *Encoder) eElement(rv reflect.Value) { + switch v := rv.Interface().(type) { + case time.Time: // Using TextMarshaler adds extra quotes, which we don't want. + format := time.RFC3339Nano + switch v.Location() { + case internal.LocalDatetime: + format = "2006-01-02T15:04:05.999999999" + case internal.LocalDate: + format = "2006-01-02" + case internal.LocalTime: + format = "15:04:05.999999999" + } + switch v.Location() { + default: + enc.wf(v.Format(format)) + case internal.LocalDatetime, internal.LocalDate, internal.LocalTime: + enc.wf(v.In(time.UTC).Format(format)) + } + return + case encoding.TextMarshaler: + // Use text marshaler if it's available for this value. + if s, err := v.MarshalText(); err != nil { + encPanic(err) + } else { + enc.writeQuoted(string(s)) + } + return + } + + switch rv.Kind() { + case reflect.String: + enc.writeQuoted(rv.String()) + case reflect.Bool: + enc.wf(strconv.FormatBool(rv.Bool())) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + enc.wf(strconv.FormatInt(rv.Int(), 10)) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + enc.wf(strconv.FormatUint(rv.Uint(), 10)) + case reflect.Float32: + f := rv.Float() + if math.IsNaN(f) { + enc.wf("nan") + } else if math.IsInf(f, 0) { + enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)]) + } else { + enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 32))) + } + case reflect.Float64: + f := rv.Float() + if math.IsNaN(f) { + enc.wf("nan") + } else if math.IsInf(f, 0) { + enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)]) + } else { + enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 64))) + } + case reflect.Array, reflect.Slice: + enc.eArrayOrSliceElement(rv) + case reflect.Struct: + enc.eStruct(nil, rv, true) + case reflect.Map: + enc.eMap(nil, rv, true) + case reflect.Interface: + enc.eElement(rv.Elem()) + default: + encPanic(fmt.Errorf("unexpected primitive type: %T", rv.Interface())) + } +} + +// By the TOML spec, all floats must have a decimal with at least one number on +// either side. +func floatAddDecimal(fstr string) string { + if !strings.Contains(fstr, ".") { + return fstr + ".0" + } + return fstr +} + +func (enc *Encoder) writeQuoted(s string) { + enc.wf("\"%s\"", quotedReplacer.Replace(s)) +} + +func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) { + length := rv.Len() + enc.wf("[") + for i := 0; i < length; i++ { + elem := rv.Index(i) + enc.eElement(elem) + if i != length-1 { + enc.wf(", ") + } + } + enc.wf("]") +} + +func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) { + if len(key) == 0 { + encPanic(errNoKey) + } + for i := 0; i < rv.Len(); i++ { + trv := rv.Index(i) + if isNil(trv) { + continue + } + enc.newline() + enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll()) + enc.newline() + enc.eMapOrStruct(key, trv, false) + } +} + +func (enc *Encoder) eTable(key Key, rv reflect.Value) { + if len(key) == 1 { + // Output an extra newline between top-level tables. + // (The newline isn't written if nothing else has been written though.) + enc.newline() + } + if len(key) > 0 { + enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll()) + enc.newline() + } + enc.eMapOrStruct(key, rv, false) +} + +func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value, inline bool) { + switch rv := eindirect(rv); rv.Kind() { + case reflect.Map: + enc.eMap(key, rv, inline) + case reflect.Struct: + enc.eStruct(key, rv, inline) + default: + // Should never happen? + panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String()) + } +} + +func (enc *Encoder) eMap(key Key, rv reflect.Value, inline bool) { + rt := rv.Type() + if rt.Key().Kind() != reflect.String { + encPanic(errNonString) + } + + // Sort keys so that we have deterministic output. And write keys directly + // underneath this key first, before writing sub-structs or sub-maps. + var mapKeysDirect, mapKeysSub []string + for _, mapKey := range rv.MapKeys() { + k := mapKey.String() + if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) { + mapKeysSub = append(mapKeysSub, k) + } else { + mapKeysDirect = append(mapKeysDirect, k) + } + } + + var writeMapKeys = func(mapKeys []string, trailC bool) { + sort.Strings(mapKeys) + for i, mapKey := range mapKeys { + val := rv.MapIndex(reflect.ValueOf(mapKey)) + if isNil(val) { + continue + } + + if inline { + enc.writeKeyValue(Key{mapKey}, val, true) + if trailC || i != len(mapKeys)-1 { + enc.wf(", ") + } + } else { + enc.encode(key.add(mapKey), val) + } + } + } + + if inline { + enc.wf("{") + } + writeMapKeys(mapKeysDirect, len(mapKeysSub) > 0) + writeMapKeys(mapKeysSub, false) + if inline { + enc.wf("}") + } +} + +func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) { + // Write keys for fields directly under this key first, because if we write + // a field that creates a new table then all keys under it will be in that + // table (not the one we're writing here). + // + // Fields is a [][]int: for fieldsDirect this always has one entry (the + // struct index). For fieldsSub it contains two entries: the parent field + // index from tv, and the field indexes for the fields of the sub. + var ( + rt = rv.Type() + fieldsDirect, fieldsSub [][]int + addFields func(rt reflect.Type, rv reflect.Value, start []int) + ) + addFields = func(rt reflect.Type, rv reflect.Value, start []int) { + for i := 0; i < rt.NumField(); i++ { + f := rt.Field(i) + if f.PkgPath != "" && !f.Anonymous { /// Skip unexported fields. + continue + } + + frv := rv.Field(i) + + // Treat anonymous struct fields with tag names as though they are + // not anonymous, like encoding/json does. + // + // Non-struct anonymous fields use the normal encoding logic. + if f.Anonymous { + t := f.Type + switch t.Kind() { + case reflect.Struct: + if getOptions(f.Tag).name == "" { + addFields(t, frv, append(start, f.Index...)) + continue + } + case reflect.Ptr: + if t.Elem().Kind() == reflect.Struct && getOptions(f.Tag).name == "" { + if !frv.IsNil() { + addFields(t.Elem(), frv.Elem(), append(start, f.Index...)) + } + continue + } + } + } + + if typeIsHash(tomlTypeOfGo(frv)) { + fieldsSub = append(fieldsSub, append(start, f.Index...)) + } else { + fieldsDirect = append(fieldsDirect, append(start, f.Index...)) + } + } + } + addFields(rt, rv, nil) + + writeFields := func(fields [][]int) { + for _, fieldIndex := range fields { + fieldType := rt.FieldByIndex(fieldIndex) + fieldVal := rv.FieldByIndex(fieldIndex) + + if isNil(fieldVal) { /// Don't write anything for nil fields. + continue + } + + opts := getOptions(fieldType.Tag) + if opts.skip { + continue + } + keyName := fieldType.Name + if opts.name != "" { + keyName = opts.name + } + if opts.omitempty && isEmpty(fieldVal) { + continue + } + if opts.omitzero && isZero(fieldVal) { + continue + } + + if inline { + enc.writeKeyValue(Key{keyName}, fieldVal, true) + if fieldIndex[0] != len(fields)-1 { + enc.wf(", ") + } + } else { + enc.encode(key.add(keyName), fieldVal) + } + } + } + + if inline { + enc.wf("{") + } + writeFields(fieldsDirect) + writeFields(fieldsSub) + if inline { + enc.wf("}") + } +} + +// tomlTypeName returns the TOML type name of the Go value's type. It is +// used to determine whether the types of array elements are mixed (which is +// forbidden). If the Go value is nil, then it is illegal for it to be an array +// element, and valueIsNil is returned as true. + +// Returns the TOML type of a Go value. The type may be `nil`, which means +// no concrete TOML type could be found. +func tomlTypeOfGo(rv reflect.Value) tomlType { + if isNil(rv) || !rv.IsValid() { + return nil + } + switch rv.Kind() { + case reflect.Bool: + return tomlBool + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, + reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, + reflect.Uint64: + return tomlInteger + case reflect.Float32, reflect.Float64: + return tomlFloat + case reflect.Array, reflect.Slice: + if typeEqual(tomlHash, tomlArrayType(rv)) { + return tomlArrayHash + } + return tomlArray + case reflect.Ptr, reflect.Interface: + return tomlTypeOfGo(rv.Elem()) + case reflect.String: + return tomlString + case reflect.Map: + return tomlHash + case reflect.Struct: + switch rv.Interface().(type) { + case time.Time: + return tomlDatetime + case encoding.TextMarshaler: + return tomlString + default: + // Someone used a pointer receiver: we can make it work for pointer + // values. + if rv.CanAddr() { + _, ok := rv.Addr().Interface().(encoding.TextMarshaler) + if ok { + return tomlString + } + } + return tomlHash + } + default: + _, ok := rv.Interface().(encoding.TextMarshaler) + if ok { + return tomlString + } + encPanic(errors.New("unsupported type: " + rv.Kind().String())) + panic("") // Need *some* return value + } +} + +// tomlArrayType returns the element type of a TOML array. The type returned +// may be nil if it cannot be determined (e.g., a nil slice or a zero length +// slize). This function may also panic if it finds a type that cannot be +// expressed in TOML (such as nil elements, heterogeneous arrays or directly +// nested arrays of tables). +func tomlArrayType(rv reflect.Value) tomlType { + if isNil(rv) || !rv.IsValid() || rv.Len() == 0 { + return nil + } + + /// Don't allow nil. + rvlen := rv.Len() + for i := 1; i < rvlen; i++ { + if tomlTypeOfGo(rv.Index(i)) == nil { + encPanic(errArrayNilElement) + } + } + + firstType := tomlTypeOfGo(rv.Index(0)) + if firstType == nil { + encPanic(errArrayNilElement) + } + return firstType +} + +type tagOptions struct { + skip bool // "-" + name string + omitempty bool + omitzero bool +} + +func getOptions(tag reflect.StructTag) tagOptions { + t := tag.Get("toml") + if t == "-" { + return tagOptions{skip: true} + } + var opts tagOptions + parts := strings.Split(t, ",") + opts.name = parts[0] + for _, s := range parts[1:] { + switch s { + case "omitempty": + opts.omitempty = true + case "omitzero": + opts.omitzero = true + } + } + return opts +} + +func isZero(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return rv.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return rv.Uint() == 0 + case reflect.Float32, reflect.Float64: + return rv.Float() == 0.0 + } + return false +} + +func isEmpty(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Array, reflect.Slice, reflect.Map, reflect.String: + return rv.Len() == 0 + case reflect.Bool: + return !rv.Bool() + } + return false +} + +func (enc *Encoder) newline() { + if enc.hasWritten { + enc.wf("\n") + } +} + +// Write a key/value pair: +// +// key = +// +// If inline is true it won't add a newline at the end. +func (enc *Encoder) writeKeyValue(key Key, val reflect.Value, inline bool) { + if len(key) == 0 { + encPanic(errNoKey) + } + enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1)) + enc.eElement(val) + if !inline { + enc.newline() + } +} + +func (enc *Encoder) wf(format string, v ...interface{}) { + if _, err := fmt.Fprintf(enc.w, format, v...); err != nil { + encPanic(err) + } + enc.hasWritten = true +} + +func (enc *Encoder) indentStr(key Key) string { + return strings.Repeat(enc.Indent, len(key)-1) +} + +func encPanic(err error) { + panic(tomlEncodeError{err}) +} + +func eindirect(v reflect.Value) reflect.Value { + switch v.Kind() { + case reflect.Ptr, reflect.Interface: + return eindirect(v.Elem()) + default: + return v + } +} + +func isNil(rv reflect.Value) bool { + switch rv.Kind() { + case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return rv.IsNil() + default: + return false + } +} diff --git a/vendor/github.com/BurntSushi/toml/go.mod b/vendor/github.com/BurntSushi/toml/go.mod new file mode 100644 index 000000000..82989481d --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/go.mod @@ -0,0 +1,3 @@ +module github.com/BurntSushi/toml + +go 1.16 diff --git a/vendor/github.com/BurntSushi/toml/go.sum b/vendor/github.com/BurntSushi/toml/go.sum new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/github.com/BurntSushi/toml/internal/tz.go b/vendor/github.com/BurntSushi/toml/internal/tz.go new file mode 100644 index 000000000..022f15bc2 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/internal/tz.go @@ -0,0 +1,36 @@ +package internal + +import "time" + +// Timezones used for local datetime, date, and time TOML types. +// +// The exact way times and dates without a timezone should be interpreted is not +// well-defined in the TOML specification and left to the implementation. These +// defaults to current local timezone offset of the computer, but this can be +// changed by changing these variables before decoding. +// +// TODO: +// Ideally we'd like to offer people the ability to configure the used timezone +// by setting Decoder.Timezone and Encoder.Timezone; however, this is a bit +// tricky: the reason we use three different variables for this is to support +// round-tripping – without these specific TZ names we wouldn't know which +// format to use. +// +// There isn't a good way to encode this right now though, and passing this sort +// of information also ties in to various related issues such as string format +// encoding, encoding of comments, etc. +// +// So, for the time being, just put this in internal until we can write a good +// comprehensive API for doing all of this. +// +// The reason they're exported is because they're referred from in e.g. +// internal/tag. +// +// Note that this behaviour is valid according to the TOML spec as the exact +// behaviour is left up to implementations. +var ( + localOffset = func() int { _, o := time.Now().Zone(); return o }() + LocalDatetime = time.FixedZone("datetime-local", localOffset) + LocalDate = time.FixedZone("date-local", localOffset) + LocalTime = time.FixedZone("time-local", localOffset) +) diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go new file mode 100644 index 000000000..adc4eb5d5 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/lex.go @@ -0,0 +1,1225 @@ +package toml + +import ( + "fmt" + "reflect" + "runtime" + "strings" + "unicode" + "unicode/utf8" +) + +type itemType int + +const ( + itemError itemType = iota + itemNIL // used in the parser to indicate no type + itemEOF + itemText + itemString + itemRawString + itemMultilineString + itemRawMultilineString + itemBool + itemInteger + itemFloat + itemDatetime + itemArray // the start of an array + itemArrayEnd + itemTableStart + itemTableEnd + itemArrayTableStart + itemArrayTableEnd + itemKeyStart + itemKeyEnd + itemCommentStart + itemInlineTableStart + itemInlineTableEnd +) + +const ( + eof = 0 + comma = ',' + tableStart = '[' + tableEnd = ']' + arrayTableStart = '[' + arrayTableEnd = ']' + tableSep = '.' + keySep = '=' + arrayStart = '[' + arrayEnd = ']' + commentStart = '#' + stringStart = '"' + stringEnd = '"' + rawStringStart = '\'' + rawStringEnd = '\'' + inlineTableStart = '{' + inlineTableEnd = '}' +) + +type stateFn func(lx *lexer) stateFn + +type lexer struct { + input string + start int + pos int + line int + state stateFn + items chan item + + // Allow for backing up up to four runes. + // This is necessary because TOML contains 3-rune tokens (""" and '''). + prevWidths [4]int + nprev int // how many of prevWidths are in use + // If we emit an eof, we can still back up, but it is not OK to call + // next again. + atEOF bool + + // A stack of state functions used to maintain context. + // The idea is to reuse parts of the state machine in various places. + // For example, values can appear at the top level or within arbitrarily + // nested arrays. The last state on the stack is used after a value has + // been lexed. Similarly for comments. + stack []stateFn +} + +type item struct { + typ itemType + val string + line int +} + +func (lx *lexer) nextItem() item { + for { + select { + case item := <-lx.items: + return item + default: + lx.state = lx.state(lx) + //fmt.Printf(" STATE %-24s current: %-10q stack: %s\n", lx.state, lx.current(), lx.stack) + } + } +} + +func lex(input string) *lexer { + lx := &lexer{ + input: input, + state: lexTop, + line: 1, + items: make(chan item, 10), + stack: make([]stateFn, 0, 10), + } + return lx +} + +func (lx *lexer) push(state stateFn) { + lx.stack = append(lx.stack, state) +} + +func (lx *lexer) pop() stateFn { + if len(lx.stack) == 0 { + return lx.errorf("BUG in lexer: no states to pop") + } + last := lx.stack[len(lx.stack)-1] + lx.stack = lx.stack[0 : len(lx.stack)-1] + return last +} + +func (lx *lexer) current() string { + return lx.input[lx.start:lx.pos] +} + +func (lx *lexer) emit(typ itemType) { + lx.items <- item{typ, lx.current(), lx.line} + lx.start = lx.pos +} + +func (lx *lexer) emitTrim(typ itemType) { + lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line} + lx.start = lx.pos +} + +func (lx *lexer) next() (r rune) { + if lx.atEOF { + panic("BUG in lexer: next called after EOF") + } + if lx.pos >= len(lx.input) { + lx.atEOF = true + return eof + } + + if lx.input[lx.pos] == '\n' { + lx.line++ + } + lx.prevWidths[3] = lx.prevWidths[2] + lx.prevWidths[2] = lx.prevWidths[1] + lx.prevWidths[1] = lx.prevWidths[0] + if lx.nprev < 4 { + lx.nprev++ + } + + r, w := utf8.DecodeRuneInString(lx.input[lx.pos:]) + if r == utf8.RuneError { + lx.errorf("invalid UTF-8 byte at position %d (line %d): 0x%02x", lx.pos, lx.line, lx.input[lx.pos]) + return utf8.RuneError + } + + lx.prevWidths[0] = w + lx.pos += w + return r +} + +// ignore skips over the pending input before this point. +func (lx *lexer) ignore() { + lx.start = lx.pos +} + +// backup steps back one rune. Can be called 4 times between calls to next. +func (lx *lexer) backup() { + if lx.atEOF { + lx.atEOF = false + return + } + if lx.nprev < 1 { + panic("BUG in lexer: backed up too far") + } + w := lx.prevWidths[0] + lx.prevWidths[0] = lx.prevWidths[1] + lx.prevWidths[1] = lx.prevWidths[2] + lx.prevWidths[2] = lx.prevWidths[3] + lx.nprev-- + lx.pos -= w + if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { + lx.line-- + } +} + +// accept consumes the next rune if it's equal to `valid`. +func (lx *lexer) accept(valid rune) bool { + if lx.next() == valid { + return true + } + lx.backup() + return false +} + +// peek returns but does not consume the next rune in the input. +func (lx *lexer) peek() rune { + r := lx.next() + lx.backup() + return r +} + +// skip ignores all input that matches the given predicate. +func (lx *lexer) skip(pred func(rune) bool) { + for { + r := lx.next() + if pred(r) { + continue + } + lx.backup() + lx.ignore() + return + } +} + +// errorf stops all lexing by emitting an error and returning `nil`. +// Note that any value that is a character is escaped if it's a special +// character (newlines, tabs, etc.). +func (lx *lexer) errorf(format string, values ...interface{}) stateFn { + lx.items <- item{ + itemError, + fmt.Sprintf(format, values...), + lx.line, + } + return nil +} + +// lexTop consumes elements at the top level of TOML data. +func lexTop(lx *lexer) stateFn { + r := lx.next() + if isWhitespace(r) || isNL(r) { + return lexSkip(lx, lexTop) + } + switch r { + case commentStart: + lx.push(lexTop) + return lexCommentStart + case tableStart: + return lexTableStart + case eof: + if lx.pos > lx.start { + return lx.errorf("unexpected EOF") + } + lx.emit(itemEOF) + return nil + } + + // At this point, the only valid item can be a key, so we back up + // and let the key lexer do the rest. + lx.backup() + lx.push(lexTopEnd) + return lexKeyStart +} + +// lexTopEnd is entered whenever a top-level item has been consumed. (A value +// or a table.) It must see only whitespace, and will turn back to lexTop +// upon a newline. If it sees EOF, it will quit the lexer successfully. +func lexTopEnd(lx *lexer) stateFn { + r := lx.next() + switch { + case r == commentStart: + // a comment will read to a newline for us. + lx.push(lexTop) + return lexCommentStart + case isWhitespace(r): + return lexTopEnd + case isNL(r): + lx.ignore() + return lexTop + case r == eof: + lx.emit(itemEOF) + return nil + } + return lx.errorf( + "expected a top-level item to end with a newline, comment, or EOF, but got %q instead", + r) +} + +// lexTable lexes the beginning of a table. Namely, it makes sure that +// it starts with a character other than '.' and ']'. +// It assumes that '[' has already been consumed. +// It also handles the case that this is an item in an array of tables. +// e.g., '[[name]]'. +func lexTableStart(lx *lexer) stateFn { + if lx.peek() == arrayTableStart { + lx.next() + lx.emit(itemArrayTableStart) + lx.push(lexArrayTableEnd) + } else { + lx.emit(itemTableStart) + lx.push(lexTableEnd) + } + return lexTableNameStart +} + +func lexTableEnd(lx *lexer) stateFn { + lx.emit(itemTableEnd) + return lexTopEnd +} + +func lexArrayTableEnd(lx *lexer) stateFn { + if r := lx.next(); r != arrayTableEnd { + return lx.errorf( + "expected end of table array name delimiter %q, but got %q instead", + arrayTableEnd, r) + } + lx.emit(itemArrayTableEnd) + return lexTopEnd +} + +func lexTableNameStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == tableEnd || r == eof: + return lx.errorf("unexpected end of table name (table names cannot be empty)") + case r == tableSep: + return lx.errorf("unexpected table separator (table names cannot be empty)") + case r == stringStart || r == rawStringStart: + lx.ignore() + lx.push(lexTableNameEnd) + return lexQuotedName + default: + lx.push(lexTableNameEnd) + return lexBareName + } +} + +// lexTableNameEnd reads the end of a piece of a table name, optionally +// consuming whitespace. +func lexTableNameEnd(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.next(); { + case isWhitespace(r): + return lexTableNameEnd + case r == tableSep: + lx.ignore() + return lexTableNameStart + case r == tableEnd: + return lx.pop() + default: + return lx.errorf("expected '.' or ']' to end table name, but got %q instead", r) + } +} + +// lexBareName lexes one part of a key or table. +// +// It assumes that at least one valid character for the table has already been +// read. +// +// Lexes only one part, e.g. only 'a' inside 'a.b'. +func lexBareName(lx *lexer) stateFn { + r := lx.next() + if isBareKeyChar(r) { + return lexBareName + } + lx.backup() + lx.emit(itemText) + return lx.pop() +} + +// lexBareName lexes one part of a key or table. +// +// It assumes that at least one valid character for the table has already been +// read. +// +// Lexes only one part, e.g. only '"a"' inside '"a".b'. +func lexQuotedName(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexValue) + case r == stringStart: + lx.ignore() // ignore the '"' + return lexString + case r == rawStringStart: + lx.ignore() // ignore the "'" + return lexRawString + case r == eof: + return lx.errorf("unexpected EOF; expected value") + default: + return lx.errorf("expected value but found %q instead", r) + } +} + +// lexKeyStart consumes all key parts until a '='. +func lexKeyStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == '=' || r == eof: + return lx.errorf("unexpected '=': key name appears blank") + case r == '.': + return lx.errorf("unexpected '.': keys cannot start with a '.'") + case r == stringStart || r == rawStringStart: + lx.ignore() + fallthrough + default: // Bare key + lx.emit(itemKeyStart) + return lexKeyNameStart + } +} + +func lexKeyNameStart(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.peek(); { + case r == '=' || r == eof: + return lx.errorf("unexpected '='") + case r == '.': + return lx.errorf("unexpected '.'") + case r == stringStart || r == rawStringStart: + lx.ignore() + lx.push(lexKeyEnd) + return lexQuotedName + default: + lx.push(lexKeyEnd) + return lexBareName + } +} + +// lexKeyEnd consumes the end of a key and trims whitespace (up to the key +// separator). +func lexKeyEnd(lx *lexer) stateFn { + lx.skip(isWhitespace) + switch r := lx.next(); { + case isWhitespace(r): + return lexSkip(lx, lexKeyEnd) + case r == eof: + return lx.errorf("unexpected EOF; expected key separator %q", keySep) + case r == '.': + lx.ignore() + return lexKeyNameStart + case r == '=': + lx.emit(itemKeyEnd) + return lexSkip(lx, lexValue) + default: + return lx.errorf("expected '.' or '=', but got %q instead", r) + } +} + +// lexValue starts the consumption of a value anywhere a value is expected. +// lexValue will ignore whitespace. +// After a value is lexed, the last state on the next is popped and returned. +func lexValue(lx *lexer) stateFn { + // We allow whitespace to precede a value, but NOT newlines. + // In array syntax, the array states are responsible for ignoring newlines. + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexValue) + case isDigit(r): + lx.backup() // avoid an extra state and use the same as above + return lexNumberOrDateStart + } + switch r { + case arrayStart: + lx.ignore() + lx.emit(itemArray) + return lexArrayValue + case inlineTableStart: + lx.ignore() + lx.emit(itemInlineTableStart) + return lexInlineTableValue + case stringStart: + if lx.accept(stringStart) { + if lx.accept(stringStart) { + lx.ignore() // Ignore """ + return lexMultilineString + } + lx.backup() + } + lx.ignore() // ignore the '"' + return lexString + case rawStringStart: + if lx.accept(rawStringStart) { + if lx.accept(rawStringStart) { + lx.ignore() // Ignore """ + return lexMultilineRawString + } + lx.backup() + } + lx.ignore() // ignore the "'" + return lexRawString + case '.': // special error case, be kind to users + return lx.errorf("floats must start with a digit, not '.'") + case 'i', 'n': + if (lx.accept('n') && lx.accept('f')) || (lx.accept('a') && lx.accept('n')) { + lx.emit(itemFloat) + return lx.pop() + } + case '-', '+': + return lexDecimalNumberStart + } + if unicode.IsLetter(r) { + // Be permissive here; lexBool will give a nice error if the + // user wrote something like + // x = foo + // (i.e. not 'true' or 'false' but is something else word-like.) + lx.backup() + return lexBool + } + if r == eof { + return lx.errorf("unexpected EOF; expected value") + } + return lx.errorf("expected value but found %q instead", r) +} + +// lexArrayValue consumes one value in an array. It assumes that '[' or ',' +// have already been consumed. All whitespace and newlines are ignored. +func lexArrayValue(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r) || isNL(r): + return lexSkip(lx, lexArrayValue) + case r == commentStart: + lx.push(lexArrayValue) + return lexCommentStart + case r == comma: + return lx.errorf("unexpected comma") + case r == arrayEnd: + // NOTE(caleb): The spec isn't clear about whether you can have + // a trailing comma or not, so we'll allow it. + return lexArrayEnd + } + + lx.backup() + lx.push(lexArrayValueEnd) + return lexValue +} + +// lexArrayValueEnd consumes everything between the end of an array value and +// the next value (or the end of the array): it ignores whitespace and newlines +// and expects either a ',' or a ']'. +func lexArrayValueEnd(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r) || isNL(r): + return lexSkip(lx, lexArrayValueEnd) + case r == commentStart: + lx.push(lexArrayValueEnd) + return lexCommentStart + case r == comma: + lx.ignore() + return lexArrayValue // move on to the next value + case r == arrayEnd: + return lexArrayEnd + } + return lx.errorf( + "expected a comma or array terminator %q, but got %s instead", + arrayEnd, runeOrEOF(r)) +} + +// lexArrayEnd finishes the lexing of an array. +// It assumes that a ']' has just been consumed. +func lexArrayEnd(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemArrayEnd) + return lx.pop() +} + +// lexInlineTableValue consumes one key/value pair in an inline table. +// It assumes that '{' or ',' have already been consumed. Whitespace is ignored. +func lexInlineTableValue(lx *lexer) stateFn { + r := lx.next() + switch { + case isWhitespace(r): + return lexSkip(lx, lexInlineTableValue) + case isNL(r): + return lx.errorf("newlines not allowed within inline tables") + case r == commentStart: + lx.push(lexInlineTableValue) + return lexCommentStart + case r == comma: + return lx.errorf("unexpected comma") + case r == inlineTableEnd: + return lexInlineTableEnd + } + lx.backup() + lx.push(lexInlineTableValueEnd) + return lexKeyStart +} + +// lexInlineTableValueEnd consumes everything between the end of an inline table +// key/value pair and the next pair (or the end of the table): +// it ignores whitespace and expects either a ',' or a '}'. +func lexInlineTableValueEnd(lx *lexer) stateFn { + switch r := lx.next(); { + case isWhitespace(r): + return lexSkip(lx, lexInlineTableValueEnd) + case isNL(r): + return lx.errorf("newlines not allowed within inline tables") + case r == commentStart: + lx.push(lexInlineTableValueEnd) + return lexCommentStart + case r == comma: + lx.ignore() + lx.skip(isWhitespace) + if lx.peek() == '}' { + return lx.errorf("trailing comma not allowed in inline tables") + } + return lexInlineTableValue + case r == inlineTableEnd: + return lexInlineTableEnd + default: + return lx.errorf( + "expected a comma or an inline table terminator %q, but got %s instead", + inlineTableEnd, runeOrEOF(r)) + } +} + +func runeOrEOF(r rune) string { + if r == eof { + return "end of file" + } + return "'" + string(r) + "'" +} + +// lexInlineTableEnd finishes the lexing of an inline table. +// It assumes that a '}' has just been consumed. +func lexInlineTableEnd(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemInlineTableEnd) + return lx.pop() +} + +// lexString consumes the inner contents of a string. It assumes that the +// beginning '"' has already been consumed and ignored. +func lexString(lx *lexer) stateFn { + r := lx.next() + switch { + case r == eof: + return lx.errorf(`unexpected EOF; expected '"'`) + case isControl(r) || r == '\r': + return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) + case isNL(r): + return lx.errorf("strings cannot contain newlines") + case r == '\\': + lx.push(lexString) + return lexStringEscape + case r == stringEnd: + lx.backup() + lx.emit(itemString) + lx.next() + lx.ignore() + return lx.pop() + } + return lexString +} + +// lexMultilineString consumes the inner contents of a string. It assumes that +// the beginning '"""' has already been consumed and ignored. +func lexMultilineString(lx *lexer) stateFn { + r := lx.next() + switch r { + case eof: + return lx.errorf(`unexpected EOF; expected '"""'`) + case '\r': + if lx.peek() != '\n' { + return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) + } + return lexMultilineString + case '\\': + return lexMultilineStringEscape + case stringEnd: + /// Found " → try to read two more "". + if lx.accept(stringEnd) { + if lx.accept(stringEnd) { + /// Peek ahead: the string can contain " and "", including at the + /// end: """str""""" + /// 6 or more at the end, however, is an error. + if lx.peek() == stringEnd { + /// Check if we already lexed 5 's; if so we have 6 now, and + /// that's just too many man! + if strings.HasSuffix(lx.current(), `"""""`) { + return lx.errorf(`unexpected '""""""'`) + } + lx.backup() + lx.backup() + return lexMultilineString + } + + lx.backup() /// backup: don't include the """ in the item. + lx.backup() + lx.backup() + lx.emit(itemMultilineString) + lx.next() /// Read over ''' again and discard it. + lx.next() + lx.next() + lx.ignore() + return lx.pop() + } + lx.backup() + } + } + + if isControl(r) { + return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) + } + return lexMultilineString +} + +// lexRawString consumes a raw string. Nothing can be escaped in such a string. +// It assumes that the beginning "'" has already been consumed and ignored. +func lexRawString(lx *lexer) stateFn { + r := lx.next() + switch { + case r == eof: + return lx.errorf(`unexpected EOF; expected "'"`) + case isControl(r) || r == '\r': + return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) + case isNL(r): + return lx.errorf("strings cannot contain newlines") + case r == rawStringEnd: + lx.backup() + lx.emit(itemRawString) + lx.next() + lx.ignore() + return lx.pop() + } + return lexRawString +} + +// lexMultilineRawString consumes a raw string. Nothing can be escaped in such +// a string. It assumes that the beginning "'''" has already been consumed and +// ignored. +func lexMultilineRawString(lx *lexer) stateFn { + r := lx.next() + switch r { + case eof: + return lx.errorf(`unexpected EOF; expected "'''"`) + case '\r': + if lx.peek() != '\n' { + return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) + } + return lexMultilineRawString + case rawStringEnd: + /// Found ' → try to read two more ''. + if lx.accept(rawStringEnd) { + if lx.accept(rawStringEnd) { + /// Peek ahead: the string can contain ' and '', including at the + /// end: '''str''''' + /// 6 or more at the end, however, is an error. + if lx.peek() == rawStringEnd { + /// Check if we already lexed 5 's; if so we have 6 now, and + /// that's just too many man! + if strings.HasSuffix(lx.current(), "'''''") { + return lx.errorf(`unexpected "''''''"`) + } + lx.backup() + lx.backup() + return lexMultilineRawString + } + + lx.backup() /// backup: don't include the ''' in the item. + lx.backup() + lx.backup() + lx.emit(itemRawMultilineString) + lx.next() /// Read over ''' again and discard it. + lx.next() + lx.next() + lx.ignore() + return lx.pop() + } + lx.backup() + } + } + + if isControl(r) { + return lx.errorf("control characters are not allowed inside strings: '0x%02x'", r) + } + return lexMultilineRawString +} + +// lexMultilineStringEscape consumes an escaped character. It assumes that the +// preceding '\\' has already been consumed. +func lexMultilineStringEscape(lx *lexer) stateFn { + // Handle the special case first: + if isNL(lx.next()) { + return lexMultilineString + } + lx.backup() + lx.push(lexMultilineString) + return lexStringEscape(lx) +} + +func lexStringEscape(lx *lexer) stateFn { + r := lx.next() + switch r { + case 'b': + fallthrough + case 't': + fallthrough + case 'n': + fallthrough + case 'f': + fallthrough + case 'r': + fallthrough + case '"': + fallthrough + case ' ', '\t': + // Inside """ .. """ strings you can use \ to escape newlines, and any + // amount of whitespace can be between the \ and \n. + fallthrough + case '\\': + return lx.pop() + case 'u': + return lexShortUnicodeEscape + case 'U': + return lexLongUnicodeEscape + } + return lx.errorf("invalid escape character %q; only the following escape characters are allowed: "+ + `\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX`, r) +} + +func lexShortUnicodeEscape(lx *lexer) stateFn { + var r rune + for i := 0; i < 4; i++ { + r = lx.next() + if !isHexadecimal(r) { + return lx.errorf( + `expected four hexadecimal digits after '\u', but got %q instead`, + lx.current()) + } + } + return lx.pop() +} + +func lexLongUnicodeEscape(lx *lexer) stateFn { + var r rune + for i := 0; i < 8; i++ { + r = lx.next() + if !isHexadecimal(r) { + return lx.errorf( + `expected eight hexadecimal digits after '\U', but got %q instead`, + lx.current()) + } + } + return lx.pop() +} + +// lexNumberOrDateStart processes the first character of a value which begins +// with a digit. It exists to catch values starting with '0', so that +// lexBaseNumberOrDate can differentiate base prefixed integers from other +// types. +func lexNumberOrDateStart(lx *lexer) stateFn { + r := lx.next() + switch r { + case '0': + return lexBaseNumberOrDate + } + + if !isDigit(r) { + // The only way to reach this state is if the value starts + // with a digit, so specifically treat anything else as an + // error. + return lx.errorf("expected a digit but got %q", r) + } + + return lexNumberOrDate +} + +// lexNumberOrDate consumes either an integer, float or datetime. +func lexNumberOrDate(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexNumberOrDate + } + switch r { + case '-', ':': + return lexDatetime + case '_': + return lexDecimalNumber + case '.', 'e', 'E': + return lexFloat + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDatetime consumes a Datetime, to a first approximation. +// The parser validates that it matches one of the accepted formats. +func lexDatetime(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexDatetime + } + switch r { + case '-', ':', 'T', 't', ' ', '.', 'Z', 'z', '+': + return lexDatetime + } + + lx.backup() + lx.emitTrim(itemDatetime) + return lx.pop() +} + +// lexHexInteger consumes a hexadecimal integer after seeing the '0x' prefix. +func lexHexInteger(lx *lexer) stateFn { + r := lx.next() + if isHexadecimal(r) { + return lexHexInteger + } + switch r { + case '_': + return lexHexInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexOctalInteger consumes an octal integer after seeing the '0o' prefix. +func lexOctalInteger(lx *lexer) stateFn { + r := lx.next() + if isOctal(r) { + return lexOctalInteger + } + switch r { + case '_': + return lexOctalInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexBinaryInteger consumes a binary integer after seeing the '0b' prefix. +func lexBinaryInteger(lx *lexer) stateFn { + r := lx.next() + if isBinary(r) { + return lexBinaryInteger + } + switch r { + case '_': + return lexBinaryInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDecimalNumber consumes a decimal float or integer. +func lexDecimalNumber(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexDecimalNumber + } + switch r { + case '.', 'e', 'E': + return lexFloat + case '_': + return lexDecimalNumber + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexDecimalNumber consumes the first digit of a number beginning with a sign. +// It assumes the sign has already been consumed. Values which start with a sign +// are only allowed to be decimal integers or floats. +// +// The special "nan" and "inf" values are also recognized. +func lexDecimalNumberStart(lx *lexer) stateFn { + r := lx.next() + + // Special error cases to give users better error messages + switch r { + case 'i': + if !lx.accept('n') || !lx.accept('f') { + return lx.errorf("invalid float: '%s'", lx.current()) + } + lx.emit(itemFloat) + return lx.pop() + case 'n': + if !lx.accept('a') || !lx.accept('n') { + return lx.errorf("invalid float: '%s'", lx.current()) + } + lx.emit(itemFloat) + return lx.pop() + case '0': + p := lx.peek() + switch p { + case 'b', 'o', 'x': + return lx.errorf("cannot use sign with non-decimal numbers: '%s%c'", lx.current(), p) + } + case '.': + return lx.errorf("floats must start with a digit, not '.'") + } + + if isDigit(r) { + return lexDecimalNumber + } + + return lx.errorf("expected a digit but got %q", r) +} + +// lexBaseNumberOrDate differentiates between the possible values which +// start with '0'. It assumes that before reaching this state, the initial '0' +// has been consumed. +func lexBaseNumberOrDate(lx *lexer) stateFn { + r := lx.next() + // Note: All datetimes start with at least two digits, so we don't + // handle date characters (':', '-', etc.) here. + if isDigit(r) { + return lexNumberOrDate + } + switch r { + case '_': + // Can only be decimal, because there can't be an underscore + // between the '0' and the base designator, and dates can't + // contain underscores. + return lexDecimalNumber + case '.', 'e', 'E': + return lexFloat + case 'b': + r = lx.peek() + if !isBinary(r) { + lx.errorf("not a binary number: '%s%c'", lx.current(), r) + } + return lexBinaryInteger + case 'o': + r = lx.peek() + if !isOctal(r) { + lx.errorf("not an octal number: '%s%c'", lx.current(), r) + } + return lexOctalInteger + case 'x': + r = lx.peek() + if !isHexadecimal(r) { + lx.errorf("not a hexidecimal number: '%s%c'", lx.current(), r) + } + return lexHexInteger + } + + lx.backup() + lx.emit(itemInteger) + return lx.pop() +} + +// lexFloat consumes the elements of a float. It allows any sequence of +// float-like characters, so floats emitted by the lexer are only a first +// approximation and must be validated by the parser. +func lexFloat(lx *lexer) stateFn { + r := lx.next() + if isDigit(r) { + return lexFloat + } + switch r { + case '_', '.', '-', '+', 'e', 'E': + return lexFloat + } + + lx.backup() + lx.emit(itemFloat) + return lx.pop() +} + +// lexBool consumes a bool string: 'true' or 'false. +func lexBool(lx *lexer) stateFn { + var rs []rune + for { + r := lx.next() + if !unicode.IsLetter(r) { + lx.backup() + break + } + rs = append(rs, r) + } + s := string(rs) + switch s { + case "true", "false": + lx.emit(itemBool) + return lx.pop() + } + return lx.errorf("expected value but found %q instead", s) +} + +// lexCommentStart begins the lexing of a comment. It will emit +// itemCommentStart and consume no characters, passing control to lexComment. +func lexCommentStart(lx *lexer) stateFn { + lx.ignore() + lx.emit(itemCommentStart) + return lexComment +} + +// lexComment lexes an entire comment. It assumes that '#' has been consumed. +// It will consume *up to* the first newline character, and pass control +// back to the last state on the stack. +func lexComment(lx *lexer) stateFn { + switch r := lx.next(); { + case isNL(r) || r == eof: + lx.backup() + lx.emit(itemText) + return lx.pop() + case isControl(r): + return lx.errorf("control characters are not allowed inside comments: '0x%02x'", r) + default: + return lexComment + } +} + +// lexSkip ignores all slurped input and moves on to the next state. +func lexSkip(lx *lexer, nextState stateFn) stateFn { + lx.ignore() + return nextState +} + +// isWhitespace returns true if `r` is a whitespace character according +// to the spec. +func isWhitespace(r rune) bool { + return r == '\t' || r == ' ' +} + +func isNL(r rune) bool { + return r == '\n' || r == '\r' +} + +// Control characters except \n, \t +func isControl(r rune) bool { + switch r { + case '\t', '\r', '\n': + return false + default: + return (r >= 0x00 && r <= 0x1f) || r == 0x7f + } +} + +func isDigit(r rune) bool { + return r >= '0' && r <= '9' +} + +func isHexadecimal(r rune) bool { + return (r >= '0' && r <= '9') || + (r >= 'a' && r <= 'f') || + (r >= 'A' && r <= 'F') +} + +func isOctal(r rune) bool { + return r >= '0' && r <= '7' +} + +func isBinary(r rune) bool { + return r == '0' || r == '1' +} + +func isBareKeyChar(r rune) bool { + return (r >= 'A' && r <= 'Z') || + (r >= 'a' && r <= 'z') || + (r >= '0' && r <= '9') || + r == '_' || + r == '-' +} + +func (s stateFn) String() string { + name := runtime.FuncForPC(reflect.ValueOf(s).Pointer()).Name() + if i := strings.LastIndexByte(name, '.'); i > -1 { + name = name[i+1:] + } + if s == nil { + name = "" + } + return name + "()" +} + +func (itype itemType) String() string { + switch itype { + case itemError: + return "Error" + case itemNIL: + return "NIL" + case itemEOF: + return "EOF" + case itemText: + return "Text" + case itemString, itemRawString, itemMultilineString, itemRawMultilineString: + return "String" + case itemBool: + return "Bool" + case itemInteger: + return "Integer" + case itemFloat: + return "Float" + case itemDatetime: + return "DateTime" + case itemTableStart: + return "TableStart" + case itemTableEnd: + return "TableEnd" + case itemKeyStart: + return "KeyStart" + case itemKeyEnd: + return "KeyEnd" + case itemArray: + return "Array" + case itemArrayEnd: + return "ArrayEnd" + case itemCommentStart: + return "CommentStart" + case itemInlineTableStart: + return "InlineTableStart" + case itemInlineTableEnd: + return "InlineTableEnd" + } + panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype))) +} + +func (item item) String() string { + return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) +} diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go new file mode 100644 index 000000000..d9ae5db94 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/parse.go @@ -0,0 +1,739 @@ +package toml + +import ( + "errors" + "fmt" + "strconv" + "strings" + "time" + "unicode/utf8" + + "github.com/BurntSushi/toml/internal" +) + +type parser struct { + mapping map[string]interface{} + types map[string]tomlType + lx *lexer + + ordered []Key // List of keys in the order that they appear in the TOML data. + context Key // Full key for the current hash in scope. + currentKey string // Base key name for everything except hashes. + approxLine int // Rough approximation of line number + implicits map[string]bool // Record implied keys (e.g. 'key.group.names'). +} + +// ParseError is used when a file can't be parsed: for example invalid integer +// literals, duplicate keys, etc. +type ParseError struct { + Message string + Line int + LastKey string +} + +func (pe ParseError) Error() string { + return fmt.Sprintf("Near line %d (last key parsed '%s'): %s", + pe.Line, pe.LastKey, pe.Message) +} + +func parse(data string) (p *parser, err error) { + defer func() { + if r := recover(); r != nil { + var ok bool + if err, ok = r.(ParseError); ok { + return + } + panic(r) + } + }() + + // Read over BOM; do this here as the lexer calls utf8.DecodeRuneInString() + // which mangles stuff. + if strings.HasPrefix(data, "\xff\xfe") || strings.HasPrefix(data, "\xfe\xff") { + data = data[2:] + } + + // Examine first few bytes for NULL bytes; this probably means it's a UTF-16 + // file (second byte in surrogate pair being NULL). Again, do this here to + // avoid having to deal with UTF-8/16 stuff in the lexer. + ex := 6 + if len(data) < 6 { + ex = len(data) + } + if strings.ContainsRune(data[:ex], 0) { + return nil, errors.New("files cannot contain NULL bytes; probably using UTF-16; TOML files must be UTF-8") + } + + p = &parser{ + mapping: make(map[string]interface{}), + types: make(map[string]tomlType), + lx: lex(data), + ordered: make([]Key, 0), + implicits: make(map[string]bool), + } + for { + item := p.next() + if item.typ == itemEOF { + break + } + p.topLevel(item) + } + + return p, nil +} + +func (p *parser) panicf(format string, v ...interface{}) { + msg := fmt.Sprintf(format, v...) + panic(ParseError{ + Message: msg, + Line: p.approxLine, + LastKey: p.current(), + }) +} + +func (p *parser) next() item { + it := p.lx.nextItem() + //fmt.Printf("ITEM %-18s line %-3d │ %q\n", it.typ, it.line, it.val) + if it.typ == itemError { + p.panicf("%s", it.val) + } + return it +} + +func (p *parser) bug(format string, v ...interface{}) { + panic(fmt.Sprintf("BUG: "+format+"\n\n", v...)) +} + +func (p *parser) expect(typ itemType) item { + it := p.next() + p.assertEqual(typ, it.typ) + return it +} + +func (p *parser) assertEqual(expected, got itemType) { + if expected != got { + p.bug("Expected '%s' but got '%s'.", expected, got) + } +} + +func (p *parser) topLevel(item item) { + switch item.typ { + case itemCommentStart: // # .. + p.approxLine = item.line + p.expect(itemText) + case itemTableStart: // [ .. ] + name := p.next() + p.approxLine = name.line + + var key Key + for ; name.typ != itemTableEnd && name.typ != itemEOF; name = p.next() { + key = append(key, p.keyString(name)) + } + p.assertEqual(itemTableEnd, name.typ) + + p.addContext(key, false) + p.setType("", tomlHash) + p.ordered = append(p.ordered, key) + case itemArrayTableStart: // [[ .. ]] + name := p.next() + p.approxLine = name.line + + var key Key + for ; name.typ != itemArrayTableEnd && name.typ != itemEOF; name = p.next() { + key = append(key, p.keyString(name)) + } + p.assertEqual(itemArrayTableEnd, name.typ) + + p.addContext(key, true) + p.setType("", tomlArrayHash) + p.ordered = append(p.ordered, key) + case itemKeyStart: // key = .. + outerContext := p.context + /// Read all the key parts (e.g. 'a' and 'b' in 'a.b') + k := p.next() + p.approxLine = k.line + var key Key + for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { + key = append(key, p.keyString(k)) + } + p.assertEqual(itemKeyEnd, k.typ) + + /// The current key is the last part. + p.currentKey = key[len(key)-1] + + /// All the other parts (if any) are the context; need to set each part + /// as implicit. + context := key[:len(key)-1] + for i := range context { + p.addImplicitContext(append(p.context, context[i:i+1]...)) + } + + /// Set value. + val, typ := p.value(p.next(), false) + p.set(p.currentKey, val, typ) + p.ordered = append(p.ordered, p.context.add(p.currentKey)) + + /// Remove the context we added (preserving any context from [tbl] lines). + p.context = outerContext + p.currentKey = "" + default: + p.bug("Unexpected type at top level: %s", item.typ) + } +} + +// Gets a string for a key (or part of a key in a table name). +func (p *parser) keyString(it item) string { + switch it.typ { + case itemText: + return it.val + case itemString, itemMultilineString, + itemRawString, itemRawMultilineString: + s, _ := p.value(it, false) + return s.(string) + default: + p.bug("Unexpected key type: %s", it.typ) + } + panic("unreachable") +} + +var datetimeRepl = strings.NewReplacer( + "z", "Z", + "t", "T", + " ", "T") + +// value translates an expected value from the lexer into a Go value wrapped +// as an empty interface. +func (p *parser) value(it item, parentIsArray bool) (interface{}, tomlType) { + switch it.typ { + case itemString: + return p.replaceEscapes(it.val), p.typeOfPrimitive(it) + case itemMultilineString: + return p.replaceEscapes(stripFirstNewline(stripEscapedNewlines(it.val))), p.typeOfPrimitive(it) + case itemRawString: + return it.val, p.typeOfPrimitive(it) + case itemRawMultilineString: + return stripFirstNewline(it.val), p.typeOfPrimitive(it) + case itemInteger: + return p.valueInteger(it) + case itemFloat: + return p.valueFloat(it) + case itemBool: + switch it.val { + case "true": + return true, p.typeOfPrimitive(it) + case "false": + return false, p.typeOfPrimitive(it) + default: + p.bug("Expected boolean value, but got '%s'.", it.val) + } + case itemDatetime: + return p.valueDatetime(it) + case itemArray: + return p.valueArray(it) + case itemInlineTableStart: + return p.valueInlineTable(it, parentIsArray) + default: + p.bug("Unexpected value type: %s", it.typ) + } + panic("unreachable") +} + +func (p *parser) valueInteger(it item) (interface{}, tomlType) { + if !numUnderscoresOK(it.val) { + p.panicf("Invalid integer %q: underscores must be surrounded by digits", it.val) + } + if numHasLeadingZero(it.val) { + p.panicf("Invalid integer %q: cannot have leading zeroes", it.val) + } + + num, err := strconv.ParseInt(it.val, 0, 64) + if err != nil { + // Distinguish integer values. Normally, it'd be a bug if the lexer + // provides an invalid integer, but it's possible that the number is + // out of range of valid values (which the lexer cannot determine). + // So mark the former as a bug but the latter as a legitimate user + // error. + if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { + p.panicf("Integer '%s' is out of the range of 64-bit signed integers.", it.val) + } else { + p.bug("Expected integer value, but got '%s'.", it.val) + } + } + return num, p.typeOfPrimitive(it) +} + +func (p *parser) valueFloat(it item) (interface{}, tomlType) { + parts := strings.FieldsFunc(it.val, func(r rune) bool { + switch r { + case '.', 'e', 'E': + return true + } + return false + }) + for _, part := range parts { + if !numUnderscoresOK(part) { + p.panicf("Invalid float %q: underscores must be surrounded by digits", it.val) + } + } + if len(parts) > 0 && numHasLeadingZero(parts[0]) { + p.panicf("Invalid float %q: cannot have leading zeroes", it.val) + } + if !numPeriodsOK(it.val) { + // As a special case, numbers like '123.' or '1.e2', + // which are valid as far as Go/strconv are concerned, + // must be rejected because TOML says that a fractional + // part consists of '.' followed by 1+ digits. + p.panicf("Invalid float %q: '.' must be followed by one or more digits", it.val) + } + val := strings.Replace(it.val, "_", "", -1) + if val == "+nan" || val == "-nan" { // Go doesn't support this, but TOML spec does. + val = "nan" + } + num, err := strconv.ParseFloat(val, 64) + if err != nil { + if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange { + p.panicf("Float '%s' is out of the range of 64-bit IEEE-754 floating-point numbers.", it.val) + } else { + p.panicf("Invalid float value: %q", it.val) + } + } + return num, p.typeOfPrimitive(it) +} + +var dtTypes = []struct { + fmt string + zone *time.Location +}{ + {time.RFC3339Nano, time.Local}, + {"2006-01-02T15:04:05.999999999", internal.LocalDatetime}, + {"2006-01-02", internal.LocalDate}, + {"15:04:05.999999999", internal.LocalTime}, +} + +func (p *parser) valueDatetime(it item) (interface{}, tomlType) { + it.val = datetimeRepl.Replace(it.val) + var ( + t time.Time + ok bool + err error + ) + for _, dt := range dtTypes { + t, err = time.ParseInLocation(dt.fmt, it.val, dt.zone) + if err == nil { + ok = true + break + } + } + if !ok { + p.panicf("Invalid TOML Datetime: %q.", it.val) + } + return t, p.typeOfPrimitive(it) +} + +func (p *parser) valueArray(it item) (interface{}, tomlType) { + p.setType(p.currentKey, tomlArray) + + // p.setType(p.currentKey, typ) + var ( + array []interface{} + types []tomlType + ) + for it = p.next(); it.typ != itemArrayEnd; it = p.next() { + if it.typ == itemCommentStart { + p.expect(itemText) + continue + } + + val, typ := p.value(it, true) + array = append(array, val) + types = append(types, typ) + } + return array, tomlArray +} + +func (p *parser) valueInlineTable(it item, parentIsArray bool) (interface{}, tomlType) { + var ( + hash = make(map[string]interface{}) + outerContext = p.context + outerKey = p.currentKey + ) + + p.context = append(p.context, p.currentKey) + prevContext := p.context + p.currentKey = "" + + p.addImplicit(p.context) + p.addContext(p.context, parentIsArray) + + /// Loop over all table key/value pairs. + for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() { + if it.typ == itemCommentStart { + p.expect(itemText) + continue + } + + /// Read all key parts. + k := p.next() + p.approxLine = k.line + var key Key + for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() { + key = append(key, p.keyString(k)) + } + p.assertEqual(itemKeyEnd, k.typ) + + /// The current key is the last part. + p.currentKey = key[len(key)-1] + + /// All the other parts (if any) are the context; need to set each part + /// as implicit. + context := key[:len(key)-1] + for i := range context { + p.addImplicitContext(append(p.context, context[i:i+1]...)) + } + + /// Set the value. + val, typ := p.value(p.next(), false) + p.set(p.currentKey, val, typ) + p.ordered = append(p.ordered, p.context.add(p.currentKey)) + hash[p.currentKey] = val + + /// Restore context. + p.context = prevContext + } + p.context = outerContext + p.currentKey = outerKey + return hash, tomlHash +} + +// numHasLeadingZero checks if this number has leading zeroes, allowing for '0', +// +/- signs, and base prefixes. +func numHasLeadingZero(s string) bool { + if len(s) > 1 && s[0] == '0' && isDigit(rune(s[1])) { // >1 to allow "0" and isDigit to allow 0x + return true + } + if len(s) > 2 && (s[0] == '-' || s[0] == '+') && s[1] == '0' { + return true + } + return false +} + +// numUnderscoresOK checks whether each underscore in s is surrounded by +// characters that are not underscores. +func numUnderscoresOK(s string) bool { + switch s { + case "nan", "+nan", "-nan", "inf", "-inf", "+inf": + return true + } + accept := false + for _, r := range s { + if r == '_' { + if !accept { + return false + } + } + + // isHexadecimal is a superset of all the permissable characters + // surrounding an underscore. + accept = isHexadecimal(r) + } + return accept +} + +// numPeriodsOK checks whether every period in s is followed by a digit. +func numPeriodsOK(s string) bool { + period := false + for _, r := range s { + if period && !isDigit(r) { + return false + } + period = r == '.' + } + return !period +} + +// Set the current context of the parser, where the context is either a hash or +// an array of hashes, depending on the value of the `array` parameter. +// +// Establishing the context also makes sure that the key isn't a duplicate, and +// will create implicit hashes automatically. +func (p *parser) addContext(key Key, array bool) { + var ok bool + + // Always start at the top level and drill down for our context. + hashContext := p.mapping + keyContext := make(Key, 0) + + // We only need implicit hashes for key[0:-1] + for _, k := range key[0 : len(key)-1] { + _, ok = hashContext[k] + keyContext = append(keyContext, k) + + // No key? Make an implicit hash and move on. + if !ok { + p.addImplicit(keyContext) + hashContext[k] = make(map[string]interface{}) + } + + // If the hash context is actually an array of tables, then set + // the hash context to the last element in that array. + // + // Otherwise, it better be a table, since this MUST be a key group (by + // virtue of it not being the last element in a key). + switch t := hashContext[k].(type) { + case []map[string]interface{}: + hashContext = t[len(t)-1] + case map[string]interface{}: + hashContext = t + default: + p.panicf("Key '%s' was already created as a hash.", keyContext) + } + } + + p.context = keyContext + if array { + // If this is the first element for this array, then allocate a new + // list of tables for it. + k := key[len(key)-1] + if _, ok := hashContext[k]; !ok { + hashContext[k] = make([]map[string]interface{}, 0, 4) + } + + // Add a new table. But make sure the key hasn't already been used + // for something else. + if hash, ok := hashContext[k].([]map[string]interface{}); ok { + hashContext[k] = append(hash, make(map[string]interface{})) + } else { + p.panicf("Key '%s' was already created and cannot be used as an array.", keyContext) + } + } else { + p.setValue(key[len(key)-1], make(map[string]interface{})) + } + p.context = append(p.context, key[len(key)-1]) +} + +// set calls setValue and setType. +func (p *parser) set(key string, val interface{}, typ tomlType) { + p.setValue(p.currentKey, val) + p.setType(p.currentKey, typ) +} + +// setValue sets the given key to the given value in the current context. +// It will make sure that the key hasn't already been defined, account for +// implicit key groups. +func (p *parser) setValue(key string, value interface{}) { + var ( + tmpHash interface{} + ok bool + hash = p.mapping + keyContext Key + ) + for _, k := range p.context { + keyContext = append(keyContext, k) + if tmpHash, ok = hash[k]; !ok { + p.bug("Context for key '%s' has not been established.", keyContext) + } + switch t := tmpHash.(type) { + case []map[string]interface{}: + // The context is a table of hashes. Pick the most recent table + // defined as the current hash. + hash = t[len(t)-1] + case map[string]interface{}: + hash = t + default: + p.panicf("Key '%s' has already been defined.", keyContext) + } + } + keyContext = append(keyContext, key) + + if _, ok := hash[key]; ok { + // Normally redefining keys isn't allowed, but the key could have been + // defined implicitly and it's allowed to be redefined concretely. (See + // the `valid/implicit-and-explicit-after.toml` in toml-test) + // + // But we have to make sure to stop marking it as an implicit. (So that + // another redefinition provokes an error.) + // + // Note that since it has already been defined (as a hash), we don't + // want to overwrite it. So our business is done. + if p.isArray(keyContext) { + p.removeImplicit(keyContext) + hash[key] = value + return + } + if p.isImplicit(keyContext) { + p.removeImplicit(keyContext) + return + } + + // Otherwise, we have a concrete key trying to override a previous + // key, which is *always* wrong. + p.panicf("Key '%s' has already been defined.", keyContext) + } + + hash[key] = value +} + +// setType sets the type of a particular value at a given key. +// It should be called immediately AFTER setValue. +// +// Note that if `key` is empty, then the type given will be applied to the +// current context (which is either a table or an array of tables). +func (p *parser) setType(key string, typ tomlType) { + keyContext := make(Key, 0, len(p.context)+1) + for _, k := range p.context { + keyContext = append(keyContext, k) + } + if len(key) > 0 { // allow type setting for hashes + keyContext = append(keyContext, key) + } + p.types[keyContext.String()] = typ +} + +// Implicit keys need to be created when tables are implied in "a.b.c.d = 1" and +// "[a.b.c]" (the "a", "b", and "c" hashes are never created explicitly). +func (p *parser) addImplicit(key Key) { p.implicits[key.String()] = true } +func (p *parser) removeImplicit(key Key) { p.implicits[key.String()] = false } +func (p *parser) isImplicit(key Key) bool { return p.implicits[key.String()] } +func (p *parser) isArray(key Key) bool { return p.types[key.String()] == tomlArray } +func (p *parser) addImplicitContext(key Key) { + p.addImplicit(key) + p.addContext(key, false) +} + +// current returns the full key name of the current context. +func (p *parser) current() string { + if len(p.currentKey) == 0 { + return p.context.String() + } + if len(p.context) == 0 { + return p.currentKey + } + return fmt.Sprintf("%s.%s", p.context, p.currentKey) +} + +func stripFirstNewline(s string) string { + if len(s) > 0 && s[0] == '\n' { + return s[1:] + } + if len(s) > 1 && s[0] == '\r' && s[1] == '\n' { + return s[2:] + } + return s +} + +// Remove newlines inside triple-quoted strings if a line ends with "\". +func stripEscapedNewlines(s string) string { + split := strings.Split(s, "\n") + if len(split) < 1 { + return s + } + + escNL := false // Keep track of the last non-blank line was escaped. + for i, line := range split { + line = strings.TrimRight(line, " \t\r") + + if len(line) == 0 || line[len(line)-1] != '\\' { + split[i] = strings.TrimRight(split[i], "\r") + if !escNL && i != len(split)-1 { + split[i] += "\n" + } + continue + } + + escBS := true + for j := len(line) - 1; j >= 0 && line[j] == '\\'; j-- { + escBS = !escBS + } + if escNL { + line = strings.TrimLeft(line, " \t\r") + } + escNL = !escBS + + if escBS { + split[i] += "\n" + continue + } + + split[i] = line[:len(line)-1] // Remove \ + if len(split)-1 > i { + split[i+1] = strings.TrimLeft(split[i+1], " \t\r") + } + } + return strings.Join(split, "") +} + +func (p *parser) replaceEscapes(str string) string { + var replaced []rune + s := []byte(str) + r := 0 + for r < len(s) { + if s[r] != '\\' { + c, size := utf8.DecodeRune(s[r:]) + r += size + replaced = append(replaced, c) + continue + } + r += 1 + if r >= len(s) { + p.bug("Escape sequence at end of string.") + return "" + } + switch s[r] { + default: + p.bug("Expected valid escape code after \\, but got %q.", s[r]) + return "" + case ' ', '\t': + p.panicf("invalid escape: '\\%c'", s[r]) + return "" + case 'b': + replaced = append(replaced, rune(0x0008)) + r += 1 + case 't': + replaced = append(replaced, rune(0x0009)) + r += 1 + case 'n': + replaced = append(replaced, rune(0x000A)) + r += 1 + case 'f': + replaced = append(replaced, rune(0x000C)) + r += 1 + case 'r': + replaced = append(replaced, rune(0x000D)) + r += 1 + case '"': + replaced = append(replaced, rune(0x0022)) + r += 1 + case '\\': + replaced = append(replaced, rune(0x005C)) + r += 1 + case 'u': + // At this point, we know we have a Unicode escape of the form + // `uXXXX` at [r, r+5). (Because the lexer guarantees this + // for us.) + escaped := p.asciiEscapeToUnicode(s[r+1 : r+5]) + replaced = append(replaced, escaped) + r += 5 + case 'U': + // At this point, we know we have a Unicode escape of the form + // `uXXXX` at [r, r+9). (Because the lexer guarantees this + // for us.) + escaped := p.asciiEscapeToUnicode(s[r+1 : r+9]) + replaced = append(replaced, escaped) + r += 9 + } + } + return string(replaced) +} + +func (p *parser) asciiEscapeToUnicode(bs []byte) rune { + s := string(bs) + hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) + if err != nil { + p.bug("Could not parse '%s' as a hexadecimal number, but the "+ + "lexer claims it's OK: %s", s, err) + } + if !utf8.ValidRune(rune(hex)) { + p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s) + } + return rune(hex) +} diff --git a/vendor/github.com/BurntSushi/toml/type_check.go b/vendor/github.com/BurntSushi/toml/type_check.go new file mode 100644 index 000000000..d56aa80fa --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/type_check.go @@ -0,0 +1,70 @@ +package toml + +// tomlType represents any Go type that corresponds to a TOML type. +// While the first draft of the TOML spec has a simplistic type system that +// probably doesn't need this level of sophistication, we seem to be militating +// toward adding real composite types. +type tomlType interface { + typeString() string +} + +// typeEqual accepts any two types and returns true if they are equal. +func typeEqual(t1, t2 tomlType) bool { + if t1 == nil || t2 == nil { + return false + } + return t1.typeString() == t2.typeString() +} + +func typeIsHash(t tomlType) bool { + return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash) +} + +type tomlBaseType string + +func (btype tomlBaseType) typeString() string { + return string(btype) +} + +func (btype tomlBaseType) String() string { + return btype.typeString() +} + +var ( + tomlInteger tomlBaseType = "Integer" + tomlFloat tomlBaseType = "Float" + tomlDatetime tomlBaseType = "Datetime" + tomlString tomlBaseType = "String" + tomlBool tomlBaseType = "Bool" + tomlArray tomlBaseType = "Array" + tomlHash tomlBaseType = "Hash" + tomlArrayHash tomlBaseType = "ArrayHash" +) + +// typeOfPrimitive returns a tomlType of any primitive value in TOML. +// Primitive values are: Integer, Float, Datetime, String and Bool. +// +// Passing a lexer item other than the following will cause a BUG message +// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime. +func (p *parser) typeOfPrimitive(lexItem item) tomlType { + switch lexItem.typ { + case itemInteger: + return tomlInteger + case itemFloat: + return tomlFloat + case itemDatetime: + return tomlDatetime + case itemString: + return tomlString + case itemMultilineString: + return tomlString + case itemRawString: + return tomlString + case itemRawMultilineString: + return tomlString + case itemBool: + return tomlBool + } + p.bug("Cannot infer primitive type of lex item '%s'.", lexItem) + panic("unreachable") +} diff --git a/vendor/github.com/BurntSushi/toml/type_fields.go b/vendor/github.com/BurntSushi/toml/type_fields.go new file mode 100644 index 000000000..608997c22 --- /dev/null +++ b/vendor/github.com/BurntSushi/toml/type_fields.go @@ -0,0 +1,242 @@ +package toml + +// Struct field handling is adapted from code in encoding/json: +// +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the Go distribution. + +import ( + "reflect" + "sort" + "sync" +) + +// A field represents a single field found in a struct. +type field struct { + name string // the name of the field (`toml` tag included) + tag bool // whether field has a `toml` tag + index []int // represents the depth of an anonymous field + typ reflect.Type // the type of the field +} + +// byName sorts field by name, breaking ties with depth, +// then breaking ties with "name came from toml tag", then +// breaking ties with index sequence. +type byName []field + +func (x byName) Len() int { return len(x) } + +func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byName) Less(i, j int) bool { + if x[i].name != x[j].name { + return x[i].name < x[j].name + } + if len(x[i].index) != len(x[j].index) { + return len(x[i].index) < len(x[j].index) + } + if x[i].tag != x[j].tag { + return x[i].tag + } + return byIndex(x).Less(i, j) +} + +// byIndex sorts field by index sequence. +type byIndex []field + +func (x byIndex) Len() int { return len(x) } + +func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +func (x byIndex) Less(i, j int) bool { + for k, xik := range x[i].index { + if k >= len(x[j].index) { + return false + } + if xik != x[j].index[k] { + return xik < x[j].index[k] + } + } + return len(x[i].index) < len(x[j].index) +} + +// typeFields returns a list of fields that TOML should recognize for the given +// type. The algorithm is breadth-first search over the set of structs to +// include - the top struct and then any reachable anonymous structs. +func typeFields(t reflect.Type) []field { + // Anonymous fields to explore at the current level and the next. + current := []field{} + next := []field{{typ: t}} + + // Count of queued names for current level and the next. + count := map[reflect.Type]int{} + nextCount := map[reflect.Type]int{} + + // Types already visited at an earlier level. + visited := map[reflect.Type]bool{} + + // Fields found. + var fields []field + + for len(next) > 0 { + current, next = next, current[:0] + count, nextCount = nextCount, map[reflect.Type]int{} + + for _, f := range current { + if visited[f.typ] { + continue + } + visited[f.typ] = true + + // Scan f.typ for fields to include. + for i := 0; i < f.typ.NumField(); i++ { + sf := f.typ.Field(i) + if sf.PkgPath != "" && !sf.Anonymous { // unexported + continue + } + opts := getOptions(sf.Tag) + if opts.skip { + continue + } + index := make([]int, len(f.index)+1) + copy(index, f.index) + index[len(f.index)] = i + + ft := sf.Type + if ft.Name() == "" && ft.Kind() == reflect.Ptr { + // Follow pointer. + ft = ft.Elem() + } + + // Record found field and index sequence. + if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { + tagged := opts.name != "" + name := opts.name + if name == "" { + name = sf.Name + } + fields = append(fields, field{name, tagged, index, ft}) + if count[f.typ] > 1 { + // If there were multiple instances, add a second, + // so that the annihilation code will see a duplicate. + // It only cares about the distinction between 1 or 2, + // so don't bother generating any more copies. + fields = append(fields, fields[len(fields)-1]) + } + continue + } + + // Record new anonymous struct to explore in next round. + nextCount[ft]++ + if nextCount[ft] == 1 { + f := field{name: ft.Name(), index: index, typ: ft} + next = append(next, f) + } + } + } + } + + sort.Sort(byName(fields)) + + // Delete all fields that are hidden by the Go rules for embedded fields, + // except that fields with TOML tags are promoted. + + // The fields are sorted in primary order of name, secondary order + // of field index length. Loop over names; for each name, delete + // hidden fields by choosing the one dominant field that survives. + out := fields[:0] + for advance, i := 0, 0; i < len(fields); i += advance { + // One iteration per name. + // Find the sequence of fields with the name of this first field. + fi := fields[i] + name := fi.name + for advance = 1; i+advance < len(fields); advance++ { + fj := fields[i+advance] + if fj.name != name { + break + } + } + if advance == 1 { // Only one field with this name + out = append(out, fi) + continue + } + dominant, ok := dominantField(fields[i : i+advance]) + if ok { + out = append(out, dominant) + } + } + + fields = out + sort.Sort(byIndex(fields)) + + return fields +} + +// dominantField looks through the fields, all of which are known to +// have the same name, to find the single field that dominates the +// others using Go's embedding rules, modified by the presence of +// TOML tags. If there are multiple top-level fields, the boolean +// will be false: This condition is an error in Go and we skip all +// the fields. +func dominantField(fields []field) (field, bool) { + // The fields are sorted in increasing index-length order. The winner + // must therefore be one with the shortest index length. Drop all + // longer entries, which is easy: just truncate the slice. + length := len(fields[0].index) + tagged := -1 // Index of first tagged field. + for i, f := range fields { + if len(f.index) > length { + fields = fields[:i] + break + } + if f.tag { + if tagged >= 0 { + // Multiple tagged fields at the same level: conflict. + // Return no field. + return field{}, false + } + tagged = i + } + } + if tagged >= 0 { + return fields[tagged], true + } + // All remaining fields have the same length. If there's more than one, + // we have a conflict (two fields named "X" at the same level) and we + // return no field. + if len(fields) > 1 { + return field{}, false + } + return fields[0], true +} + +var fieldCache struct { + sync.RWMutex + m map[reflect.Type][]field +} + +// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. +func cachedTypeFields(t reflect.Type) []field { + fieldCache.RLock() + f := fieldCache.m[t] + fieldCache.RUnlock() + if f != nil { + return f + } + + // Compute fields without lock. + // Might duplicate effort but won't hold other computations back. + f = typeFields(t) + if f == nil { + f = []field{} + } + + fieldCache.Lock() + if fieldCache.m == nil { + fieldCache.m = map[reflect.Type][]field{} + } + fieldCache.m[t] = f + fieldCache.Unlock() + return f +} diff --git a/vendor/github.com/Djarvur/go-err113/.gitignore b/vendor/github.com/Djarvur/go-err113/.gitignore new file mode 100644 index 000000000..66fd13c90 --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/.gitignore @@ -0,0 +1,15 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ diff --git a/vendor/github.com/Djarvur/go-err113/.golangci.yml b/vendor/github.com/Djarvur/go-err113/.golangci.yml new file mode 100644 index 000000000..2abdfc639 --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/.golangci.yml @@ -0,0 +1,150 @@ +# This file contains all available configuration options +# with their default values. + +# options for analysis running +run: + # default concurrency is a available CPU number + concurrency: 4 + + # timeout for analysis, e.g. 30s, 5m, default is 1m + deadline: 15m + + # exit code when at least one issue was found, default is 1 + issues-exit-code: 1 + + # include test files or not, default is true + tests: false + + # list of build tags, all linters use it. Default is empty list. + #build-tags: + # - mytag + + # which dirs to skip: they won't be analyzed; + # can use regexp here: generated.*, regexp is applied on full path; + # default value is empty list, but next dirs are always skipped independently + # from this option's value: + # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ + skip-dirs: + - /gen$ + + # which files to skip: they will be analyzed, but issues from them + # won't be reported. Default value is empty list, but there is + # no need to include all autogenerated files, we confidently recognize + # autogenerated files. If it's not please let us know. + skip-files: + - ".*\\.my\\.go$" + - lib/bad.go + - ".*\\.template\\.go$" + +# output configuration options +output: + # colored-line-number|line-number|json|tab|checkstyle, default is "colored-line-number" + format: colored-line-number + + # print lines of code with issue, default is true + print-issued-lines: true + + # print linter name in the end of issue text, default is true + print-linter-name: true + +# all available settings of specific linters +linters-settings: + errcheck: + # report about not checking of errors in type assetions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + govet: + # report about shadowed variables + check-shadowing: true + + # Obtain type information from installed (to $GOPATH/pkg) package files: + # golangci-lint will execute `go install -i` and `go test -i` for analyzed packages + # before analyzing them. + # By default this option is disabled and govet gets type information by loader from source code. + # Loading from source code is slow, but it's done only once for all linters. + # Go-installing of packages first time is much slower than loading them from source code, + # therefore this option is disabled by default. + # But repeated installation is fast in go >= 1.10 because of build caching. + # Enable this option only if all conditions are met: + # 1. you use only "fast" linters (--fast e.g.): no program loading occurs + # 2. you use go >= 1.10 + # 3. you do repeated runs (false for CI) or cache $GOPATH/pkg or `go env GOCACHE` dir in CI. + use-installed-packages: false + golint: + # minimal confidence for issues, default is 0.8 + min-confidence: 0.8 + gofmt: + # simplify code: gofmt with `-s` option, true by default + simplify: true + gocyclo: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 10 + maligned: + # print struct with more effective memory layout or not, false by default + suggest-new: true + dupl: + # tokens count to trigger issue, 150 by default + threshold: 100 + goconst: + # minimal length of string constant, 3 by default + min-len: 3 + # minimal occurrences count to trigger, 3 by default + min-occurrences: 3 + depguard: + list-type: blacklist + include-go-root: false + packages: + - github.com/davecgh/go-spew/spew + +linters: + #enable: + # - staticcheck + # - unused + # - gosimple + enable-all: true + disable: + - lll + disable-all: false + #presets: + # - bugs + # - unused + fast: false + +issues: + # List of regexps of issue texts to exclude, empty list by default. + # But independently from this option we use default exclude patterns, + # it can be disabled by `exclude-use-default: false`. To list all + # excluded by default patterns execute `golangci-lint run --help` + exclude: + - "`parseTained` is unused" + - "`parseState` is unused" + + # Independently from option `exclude` we use default exclude patterns, + # it can be disabled by this option. To list all + # excluded by default patterns execute `golangci-lint run --help`. + # Default value for this option is false. + exclude-use-default: false + + # Maximum issues count per one linter. Set to 0 to disable. Default is 50. + max-per-linter: 0 + + # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. + max-same: 0 + + # Show only new issues: if there are unstaged changes or untracked files, + # only those changes are analyzed, else only changes in HEAD~ are analyzed. + # It's a super-useful option for integration of golangci-lint into existing + # large codebase. It's not practical to fix all existing issues at the moment + # of integration: much better don't allow issues in new code. + # Default is false. + new: false + + # Show only new issues created after git revision `REV` + #new-from-rev: REV + + # Show only new issues created in git patch with set file path. + #new-from-patch: path/to/patch/file \ No newline at end of file diff --git a/vendor/github.com/Djarvur/go-err113/.travis.yml b/vendor/github.com/Djarvur/go-err113/.travis.yml new file mode 100644 index 000000000..44fe77d53 --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/.travis.yml @@ -0,0 +1,24 @@ +language: go + +go: + - "1.13" + - "1.14" + - tip + +env: + - GO111MODULE=on + +before_install: + - go get github.com/axw/gocov/gocov + - go get github.com/mattn/goveralls + - go get golang.org/x/tools/cmd/cover + - go get golang.org/x/tools/cmd/goimports + - wget -O - -q https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh + +script: + - test -z "$(goimports -d ./ 2>&1)" + - ./bin/golangci-lint run + - go test -v -race ./... + +after_success: + - test "$TRAVIS_GO_VERSION" = "1.14" && goveralls -service=travis-ci diff --git a/vendor/github.com/Djarvur/go-err113/LICENSE b/vendor/github.com/Djarvur/go-err113/LICENSE new file mode 100644 index 000000000..a78ad8c77 --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Djarvur + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/Djarvur/go-err113/README.adoc b/vendor/github.com/Djarvur/go-err113/README.adoc new file mode 100644 index 000000000..b26af4038 --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/README.adoc @@ -0,0 +1,75 @@ += err113 image:https://godoc.org/github.com/Djarvur/go-err113?status.svg["GoDoc",link="http://godoc.org/github.com/Djarvur/go-err113"] image:https://travis-ci.org/Djarvur/go-err113.svg["Build Status",link="https://travis-ci.org/Djarvur/go-err113"] image:https://coveralls.io/repos/Djarvur/go-err113/badge.svg?branch=master&service=github["Coverage Status",link="https://coveralls.io/github/Djarvur/go-err113?branch=master"] +Daniel Podolsky +:toc: + +Golang linter to check the errors handling expressions + +== Details + +Starting from Go 1.13 the standard `error` type behaviour was changed: one `error` could be derived from another with `fmt.Errorf()` method using `%w` format specifier. + +So the errors hierarchy could be built for flexible and responsible errors processing. + +And to make this possible at least two simple rules should be followed: + +1. `error` values should not be compared directly but with `errors.Is()` method. +1. `error` should not be created dynamically from scratch but by the wrapping the static (package-level) error. + +This linter is checking the code for these 2 rules compliance. + +=== Reports + +So, `err113` reports every `==` and `!=` comparison for exact `error` type variables except comparison to `nil` and `io.EOF`. + +Also, any call of `errors.New()` and `fmt.Errorf()` methods are reported except the calls used to initialise package-level variables and the `fmt.Errorf()` calls wrapping the other errors. + +Note: non-standard packages, like `github.com/pkg/errors` are ignored completely. + +== Install + +``` +go get -u github.com/Djarvur/go-err113/cmd/err113 +``` + +== Usage + +Defined by link:https://pkg.go.dev/golang.org/x/tools/go/analysis/singlechecker[singlechecker] package. + +``` +err113: checks the error handling rules according to the Go 1.13 new error type + +Usage: err113 [-flag] [package] + + +Flags: + -V print version and exit + -all + no effect (deprecated) + -c int + display offending line with this many lines of context (default -1) + -cpuprofile string + write CPU profile to this file + -debug string + debug flags, any subset of "fpstv" + -fix + apply all suggested fixes + -flags + print analyzer flags in JSON + -json + emit JSON output + -memprofile string + write memory profile to this file + -source + no effect (deprecated) + -tags string + no effect (deprecated) + -trace string + write trace log to this file + -v no effect (deprecated) +``` + +== Thanks + +To link:https://github.com/quasilyte[Iskander (Alex) Sharipov] for the really useful advices. + +To link:https://github.com/jackwhelpton[Jack Whelpton] for the bugfix provided. \ No newline at end of file diff --git a/vendor/github.com/Djarvur/go-err113/comparison.go b/vendor/github.com/Djarvur/go-err113/comparison.go new file mode 100644 index 000000000..8a8555783 --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/comparison.go @@ -0,0 +1,123 @@ +package err113 + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" +) + +func inspectComparision(pass *analysis.Pass, n ast.Node) bool { // nolint: unparam + // check whether the call expression matches time.Now().Sub() + be, ok := n.(*ast.BinaryExpr) + if !ok { + return true + } + + // check if it is a comparison operation + if be.Op != token.EQL && be.Op != token.NEQ { + return true + } + + if !areBothErrors(be.X, be.Y, pass.TypesInfo) { + return true + } + + oldExpr := render(pass.Fset, be) + + negate := "" + if be.Op == token.NEQ { + negate = "!" + } + + newExpr := fmt.Sprintf("%s%s.Is(%s, %s)", negate, "errors", rawString(be.X), rawString(be.Y)) + + pass.Report( + analysis.Diagnostic{ + Pos: be.Pos(), + Message: fmt.Sprintf("do not compare errors directly %q, use %q instead", oldExpr, newExpr), + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: fmt.Sprintf("should replace %q with %q", oldExpr, newExpr), + TextEdits: []analysis.TextEdit{ + { + Pos: be.Pos(), + End: be.End(), + NewText: []byte(newExpr), + }, + }, + }, + }, + }, + ) + + return true +} + +func isError(v ast.Expr, info *types.Info) bool { + if intf, ok := info.TypeOf(v).Underlying().(*types.Interface); ok { + return intf.NumMethods() == 1 && intf.Method(0).FullName() == "(error).Error" + } + + return false +} + +func isEOF(ex ast.Expr, info *types.Info) bool { + se, ok := ex.(*ast.SelectorExpr) + if !ok || se.Sel.Name != "EOF" { + return false + } + + if ep, ok := asImportedName(se.X, info); !ok || ep != "io" { + return false + } + + return true +} + +func asImportedName(ex ast.Expr, info *types.Info) (string, bool) { + ei, ok := ex.(*ast.Ident) + if !ok { + return "", false + } + + ep, ok := info.ObjectOf(ei).(*types.PkgName) + if !ok { + return "", false + } + + return ep.Imported().Path(), true +} + +func areBothErrors(x, y ast.Expr, typesInfo *types.Info) bool { + // check that both left and right hand side are not nil + if typesInfo.Types[x].IsNil() || typesInfo.Types[y].IsNil() { + return false + } + + // check that both left and right hand side are not io.EOF + if isEOF(x, typesInfo) || isEOF(y, typesInfo) { + return false + } + + // check that both left and right hand side are errors + if !isError(x, typesInfo) && !isError(y, typesInfo) { + return false + } + + return true +} + +func rawString(x ast.Expr) string { + switch t := x.(type) { + case *ast.Ident: + return t.Name + case *ast.SelectorExpr: + return fmt.Sprintf("%s.%s", rawString(t.X), t.Sel.Name) + case *ast.CallExpr: + return fmt.Sprintf("%s()", rawString(t.Fun)) + } + return fmt.Sprintf("%s", x) +} diff --git a/vendor/github.com/Djarvur/go-err113/definition.go b/vendor/github.com/Djarvur/go-err113/definition.go new file mode 100644 index 000000000..689236bac --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/definition.go @@ -0,0 +1,74 @@ +package err113 + +import ( + "go/ast" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis" +) + +var methods2check = map[string]map[string]func(*ast.CallExpr, *types.Info) bool{ // nolint: gochecknoglobals + "errors": {"New": justTrue}, + "fmt": {"Errorf": checkWrap}, +} + +func justTrue(*ast.CallExpr, *types.Info) bool { + return true +} + +func checkWrap(ce *ast.CallExpr, info *types.Info) bool { + return !(len(ce.Args) > 0 && strings.Contains(toString(ce.Args[0], info), `%w`)) +} + +func inspectDefinition(pass *analysis.Pass, tlds map[*ast.CallExpr]struct{}, n ast.Node) bool { //nolint: unparam + // check whether the call expression matches time.Now().Sub() + ce, ok := n.(*ast.CallExpr) + if !ok { + return true + } + + if _, ok = tlds[ce]; ok { + return true + } + + fn, ok := ce.Fun.(*ast.SelectorExpr) + if !ok { + return true + } + + fxName, ok := asImportedName(fn.X, pass.TypesInfo) + if !ok { + return true + } + + methods, ok := methods2check[fxName] + if !ok { + return true + } + + checkFunc, ok := methods[fn.Sel.Name] + if !ok { + return true + } + + if !checkFunc(ce, pass.TypesInfo) { + return true + } + + pass.Reportf( + ce.Pos(), + "do not define dynamic errors, use wrapped static errors instead: %q", + render(pass.Fset, ce), + ) + + return true +} + +func toString(ex ast.Expr, info *types.Info) string { + if tv, ok := info.Types[ex]; ok && tv.Value != nil { + return tv.Value.ExactString() + } + + return "" +} diff --git a/vendor/github.com/Djarvur/go-err113/err113.go b/vendor/github.com/Djarvur/go-err113/err113.go new file mode 100644 index 000000000..ec4f52ac7 --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/err113.go @@ -0,0 +1,90 @@ +// Package err113 is a Golang linter to check the errors handling expressions +package err113 + +import ( + "bytes" + "go/ast" + "go/printer" + "go/token" + + "golang.org/x/tools/go/analysis" +) + +// NewAnalyzer creates a new analysis.Analyzer instance tuned to run err113 checks. +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "err113", + Doc: "checks the error handling rules according to the Go 1.13 new error type", + Run: run, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, file := range pass.Files { + tlds := enumerateFileDecls(file) + + ast.Inspect( + file, + func(n ast.Node) bool { + return inspectComparision(pass, n) && + inspectDefinition(pass, tlds, n) + }, + ) + } + + return nil, nil +} + +// render returns the pretty-print of the given node. +func render(fset *token.FileSet, x interface{}) string { + var buf bytes.Buffer + if err := printer.Fprint(&buf, fset, x); err != nil { + panic(err) + } + + return buf.String() +} + +func enumerateFileDecls(f *ast.File) map[*ast.CallExpr]struct{} { + res := make(map[*ast.CallExpr]struct{}) + + var ces []*ast.CallExpr // nolint: prealloc + + for _, d := range f.Decls { + ces = append(ces, enumerateDeclVars(d)...) + } + + for _, ce := range ces { + res[ce] = struct{}{} + } + + return res +} + +func enumerateDeclVars(d ast.Decl) (res []*ast.CallExpr) { + td, ok := d.(*ast.GenDecl) + if !ok || td.Tok != token.VAR { + return nil + } + + for _, s := range td.Specs { + res = append(res, enumerateSpecValues(s)...) + } + + return res +} + +func enumerateSpecValues(s ast.Spec) (res []*ast.CallExpr) { + vs, ok := s.(*ast.ValueSpec) + if !ok { + return nil + } + + for _, v := range vs.Values { + if ce, ok := v.(*ast.CallExpr); ok { + res = append(res, ce) + } + } + + return res +} diff --git a/vendor/github.com/Djarvur/go-err113/go.mod b/vendor/github.com/Djarvur/go-err113/go.mod new file mode 100644 index 000000000..6e28e9336 --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/go.mod @@ -0,0 +1,5 @@ +module github.com/Djarvur/go-err113 + +go 1.13 + +require golang.org/x/tools v0.0.0-20200324003944-a576cf524670 diff --git a/vendor/github.com/Djarvur/go-err113/go.sum b/vendor/github.com/Djarvur/go-err113/go.sum new file mode 100644 index 000000000..dab64209d --- /dev/null +++ b/vendor/github.com/Djarvur/go-err113/go.sum @@ -0,0 +1,20 @@ +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200324003944-a576cf524670 h1:fW7EP/GZqIvbHessHd1PLca+77TBOsRBqtaybMgXJq8= +golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/Masterminds/semver/.travis.yml b/vendor/github.com/Masterminds/semver/.travis.yml new file mode 100644 index 000000000..096369d44 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/.travis.yml @@ -0,0 +1,29 @@ +language: go + +go: + - 1.6.x + - 1.7.x + - 1.8.x + - 1.9.x + - 1.10.x + - 1.11.x + - 1.12.x + - tip + +# Setting sudo access to false will let Travis CI use containers rather than +# VMs to run the tests. For more details see: +# - http://docs.travis-ci.com/user/workers/container-based-infrastructure/ +# - http://docs.travis-ci.com/user/workers/standard-infrastructure/ +sudo: false + +script: + - make setup + - make test + +notifications: + webhooks: + urls: + - https://webhooks.gitter.im/e/06e3328629952dabe3e0 + on_success: change # options: [always|never|change] default: always + on_failure: always # options: [always|never|change] default: always + on_start: never # options: [always|never|change] default: always diff --git a/vendor/github.com/Masterminds/semver/CHANGELOG.md b/vendor/github.com/Masterminds/semver/CHANGELOG.md new file mode 100644 index 000000000..e405c9a84 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/CHANGELOG.md @@ -0,0 +1,109 @@ +# 1.5.0 (2019-09-11) + +## Added + +- #103: Add basic fuzzing for `NewVersion()` (thanks @jesse-c) + +## Changed + +- #82: Clarify wildcard meaning in range constraints and update tests for it (thanks @greysteil) +- #83: Clarify caret operator range for pre-1.0.0 dependencies (thanks @greysteil) +- #72: Adding docs comment pointing to vert for a cli +- #71: Update the docs on pre-release comparator handling +- #89: Test with new go versions (thanks @thedevsaddam) +- #87: Added $ to ValidPrerelease for better validation (thanks @jeremycarroll) + +## Fixed + +- #78: Fix unchecked error in example code (thanks @ravron) +- #70: Fix the handling of pre-releases and the 0.0.0 release edge case +- #97: Fixed copyright file for proper display on GitHub +- #107: Fix handling prerelease when sorting alphanum and num +- #109: Fixed where Validate sometimes returns wrong message on error + +# 1.4.2 (2018-04-10) + +## Changed +- #72: Updated the docs to point to vert for a console appliaction +- #71: Update the docs on pre-release comparator handling + +## Fixed +- #70: Fix the handling of pre-releases and the 0.0.0 release edge case + +# 1.4.1 (2018-04-02) + +## Fixed +- Fixed #64: Fix pre-release precedence issue (thanks @uudashr) + +# 1.4.0 (2017-10-04) + +## Changed +- #61: Update NewVersion to parse ints with a 64bit int size (thanks @zknill) + +# 1.3.1 (2017-07-10) + +## Fixed +- Fixed #57: number comparisons in prerelease sometimes inaccurate + +# 1.3.0 (2017-05-02) + +## Added +- #45: Added json (un)marshaling support (thanks @mh-cbon) +- Stability marker. See https://masterminds.github.io/stability/ + +## Fixed +- #51: Fix handling of single digit tilde constraint (thanks @dgodd) + +## Changed +- #55: The godoc icon moved from png to svg + +# 1.2.3 (2017-04-03) + +## Fixed +- #46: Fixed 0.x.x and 0.0.x in constraints being treated as * + +# Release 1.2.2 (2016-12-13) + +## Fixed +- #34: Fixed issue where hyphen range was not working with pre-release parsing. + +# Release 1.2.1 (2016-11-28) + +## Fixed +- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha" + properly. + +# Release 1.2.0 (2016-11-04) + +## Added +- #20: Added MustParse function for versions (thanks @adamreese) +- #15: Added increment methods on versions (thanks @mh-cbon) + +## Fixed +- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and + might not satisfy the intended compatibility. The change here ignores pre-releases + on constraint checks (e.g., ~ or ^) when a pre-release is not part of the + constraint. For example, `^1.2.3` will ignore pre-releases while + `^1.2.3-alpha` will include them. + +# Release 1.1.1 (2016-06-30) + +## Changed +- Issue #9: Speed up version comparison performance (thanks @sdboyer) +- Issue #8: Added benchmarks (thanks @sdboyer) +- Updated Go Report Card URL to new location +- Updated Readme to add code snippet formatting (thanks @mh-cbon) +- Updating tagging to v[SemVer] structure for compatibility with other tools. + +# Release 1.1.0 (2016-03-11) + +- Issue #2: Implemented validation to provide reasons a versions failed a + constraint. + +# Release 1.0.1 (2015-12-31) + +- Fixed #1: * constraint failing on valid versions. + +# Release 1.0.0 (2015-10-20) + +- Initial release diff --git a/vendor/github.com/Masterminds/semver/LICENSE.txt b/vendor/github.com/Masterminds/semver/LICENSE.txt new file mode 100644 index 000000000..9ff7da9c4 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/LICENSE.txt @@ -0,0 +1,19 @@ +Copyright (C) 2014-2019, Matt Butcher and Matt Farina + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/Masterminds/semver/Makefile b/vendor/github.com/Masterminds/semver/Makefile new file mode 100644 index 000000000..a7a1b4e36 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/Makefile @@ -0,0 +1,36 @@ +.PHONY: setup +setup: + go get -u gopkg.in/alecthomas/gometalinter.v1 + gometalinter.v1 --install + +.PHONY: test +test: validate lint + @echo "==> Running tests" + go test -v + +.PHONY: validate +validate: + @echo "==> Running static validations" + @gometalinter.v1 \ + --disable-all \ + --enable deadcode \ + --severity deadcode:error \ + --enable gofmt \ + --enable gosimple \ + --enable ineffassign \ + --enable misspell \ + --enable vet \ + --tests \ + --vendor \ + --deadline 60s \ + ./... || exit_code=1 + +.PHONY: lint +lint: + @echo "==> Running linters" + @gometalinter.v1 \ + --disable-all \ + --enable golint \ + --vendor \ + --deadline 60s \ + ./... || : diff --git a/vendor/github.com/Masterminds/semver/README.md b/vendor/github.com/Masterminds/semver/README.md new file mode 100644 index 000000000..1b52d2f43 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/README.md @@ -0,0 +1,194 @@ +# SemVer + +The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to: + +* Parse semantic versions +* Sort semantic versions +* Check if a semantic version fits within a set of constraints +* Optionally work with a `v` prefix + +[![Stability: +Active](https://masterminds.github.io/stability/active.svg)](https://masterminds.github.io/stability/active.html) +[![Build Status](https://travis-ci.org/Masterminds/semver.svg)](https://travis-ci.org/Masterminds/semver) [![Build status](https://ci.appveyor.com/api/projects/status/jfk66lib7hb985k8/branch/master?svg=true&passingText=windows%20build%20passing&failingText=windows%20build%20failing)](https://ci.appveyor.com/project/mattfarina/semver/branch/master) [![GoDoc](https://godoc.org/github.com/Masterminds/semver?status.svg)](https://godoc.org/github.com/Masterminds/semver) [![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver) + +If you are looking for a command line tool for version comparisons please see +[vert](https://github.com/Masterminds/vert) which uses this library. + +## Parsing Semantic Versions + +To parse a semantic version use the `NewVersion` function. For example, + +```go + v, err := semver.NewVersion("1.2.3-beta.1+build345") +``` + +If there is an error the version wasn't parseable. The version object has methods +to get the parts of the version, compare it to other versions, convert the +version back into a string, and get the original string. For more details +please see the [documentation](https://godoc.org/github.com/Masterminds/semver). + +## Sorting Semantic Versions + +A set of versions can be sorted using the [`sort`](https://golang.org/pkg/sort/) +package from the standard library. For example, + +```go + raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} + vs := make([]*semver.Version, len(raw)) + for i, r := range raw { + v, err := semver.NewVersion(r) + if err != nil { + t.Errorf("Error parsing version: %s", err) + } + + vs[i] = v + } + + sort.Sort(semver.Collection(vs)) +``` + +## Checking Version Constraints + +Checking a version against version constraints is one of the most featureful +parts of the package. + +```go + c, err := semver.NewConstraint(">= 1.2.3") + if err != nil { + // Handle constraint not being parseable. + } + + v, _ := semver.NewVersion("1.3") + if err != nil { + // Handle version not being parseable. + } + // Check if the version meets the constraints. The a variable will be true. + a := c.Check(v) +``` + +## Basic Comparisons + +There are two elements to the comparisons. First, a comparison string is a list +of comma separated and comparisons. These are then separated by || separated or +comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a +comparison that's greater than or equal to 1.2 and less than 3.0.0 or is +greater than or equal to 4.2.3. + +The basic comparisons are: + +* `=`: equal (aliased to no operator) +* `!=`: not equal +* `>`: greater than +* `<`: less than +* `>=`: greater than or equal to +* `<=`: less than or equal to + +## Working With Pre-release Versions + +Pre-releases, for those not familiar with them, are used for software releases +prior to stable or generally available releases. Examples of pre-releases include +development, alpha, beta, and release candidate releases. A pre-release may be +a version such as `1.2.3-beta.1` while the stable release would be `1.2.3`. In the +order of precidence, pre-releases come before their associated releases. In this +example `1.2.3-beta.1 < 1.2.3`. + +According to the Semantic Version specification pre-releases may not be +API compliant with their release counterpart. It says, + +> A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version. + +SemVer comparisons without a pre-release comparator will skip pre-release versions. +For example, `>=1.2.3` will skip pre-releases when looking at a list of releases +while `>=1.2.3-0` will evaluate and find pre-releases. + +The reason for the `0` as a pre-release version in the example comparison is +because pre-releases can only contain ASCII alphanumerics and hyphens (along with +`.` separators), per the spec. Sorting happens in ASCII sort order, again per the spec. The lowest character is a `0` in ASCII sort order (see an [ASCII Table](http://www.asciitable.com/)) + +Understanding ASCII sort ordering is important because A-Z comes before a-z. That +means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case +sensitivity doesn't apply here. This is due to ASCII sort ordering which is what +the spec specifies. + +## Hyphen Range Comparisons + +There are multiple methods to handle ranges and the first is hyphens ranges. +These look like: + +* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` +* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` + +## Wildcards In Comparisons + +The `x`, `X`, and `*` characters can be used as a wildcard character. This works +for all comparison operators. When used on the `=` operator it falls +back to the pack level comparison (see tilde below). For example, + +* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` +* `>= 1.2.x` is equivalent to `>= 1.2.0` +* `<= 2.x` is equivalent to `< 3` +* `*` is equivalent to `>= 0.0.0` + +## Tilde Range Comparisons (Patch) + +The tilde (`~`) comparison operator is for patch level ranges when a minor +version is specified and major level changes when the minor number is missing. +For example, + +* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` +* `~1` is equivalent to `>= 1, < 2` +* `~2.3` is equivalent to `>= 2.3, < 2.4` +* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` +* `~1.x` is equivalent to `>= 1, < 2` + +## Caret Range Comparisons (Major) + +The caret (`^`) comparison operator is for major level changes. This is useful +when comparisons of API versions as a major change is API breaking. For example, + +* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` +* `^0.0.1` is equivalent to `>= 0.0.1, < 1.0.0` +* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` +* `^2.3` is equivalent to `>= 2.3, < 3` +* `^2.x` is equivalent to `>= 2.0.0, < 3` + +# Validation + +In addition to testing a version against a constraint, a version can be validated +against a constraint. When validation fails a slice of errors containing why a +version didn't meet the constraint is returned. For example, + +```go + c, err := semver.NewConstraint("<= 1.2.3, >= 1.4") + if err != nil { + // Handle constraint not being parseable. + } + + v, _ := semver.NewVersion("1.3") + if err != nil { + // Handle version not being parseable. + } + + // Validate a version against a constraint. + a, msgs := c.Validate(v) + // a is false + for _, m := range msgs { + fmt.Println(m) + + // Loops over the errors which would read + // "1.3 is greater than 1.2.3" + // "1.3 is less than 1.4" + } +``` + +# Fuzzing + + [dvyukov/go-fuzz](https://github.com/dvyukov/go-fuzz) is used for fuzzing. + +1. `go-fuzz-build` +2. `go-fuzz -workdir=fuzz` + +# Contribute + +If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues) +or [create a pull request](https://github.com/Masterminds/semver/pulls). diff --git a/vendor/github.com/Masterminds/semver/appveyor.yml b/vendor/github.com/Masterminds/semver/appveyor.yml new file mode 100644 index 000000000..b2778df15 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/appveyor.yml @@ -0,0 +1,44 @@ +version: build-{build}.{branch} + +clone_folder: C:\gopath\src\github.com\Masterminds\semver +shallow_clone: true + +environment: + GOPATH: C:\gopath + +platform: + - x64 + +install: + - go version + - go env + - go get -u gopkg.in/alecthomas/gometalinter.v1 + - set PATH=%PATH%;%GOPATH%\bin + - gometalinter.v1.exe --install + +build_script: + - go install -v ./... + +test_script: + - "gometalinter.v1 \ + --disable-all \ + --enable deadcode \ + --severity deadcode:error \ + --enable gofmt \ + --enable gosimple \ + --enable ineffassign \ + --enable misspell \ + --enable vet \ + --tests \ + --vendor \ + --deadline 60s \ + ./... || exit_code=1" + - "gometalinter.v1 \ + --disable-all \ + --enable golint \ + --vendor \ + --deadline 60s \ + ./... || :" + - go test -v + +deploy: off diff --git a/vendor/github.com/Masterminds/semver/collection.go b/vendor/github.com/Masterminds/semver/collection.go new file mode 100644 index 000000000..a78235895 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/collection.go @@ -0,0 +1,24 @@ +package semver + +// Collection is a collection of Version instances and implements the sort +// interface. See the sort package for more details. +// https://golang.org/pkg/sort/ +type Collection []*Version + +// Len returns the length of a collection. The number of Version instances +// on the slice. +func (c Collection) Len() int { + return len(c) +} + +// Less is needed for the sort interface to compare two Version objects on the +// slice. If checks if one is less than the other. +func (c Collection) Less(i, j int) bool { + return c[i].LessThan(c[j]) +} + +// Swap is needed for the sort interface to replace the Version objects +// at two different positions in the slice. +func (c Collection) Swap(i, j int) { + c[i], c[j] = c[j], c[i] +} diff --git a/vendor/github.com/Masterminds/semver/constraints.go b/vendor/github.com/Masterminds/semver/constraints.go new file mode 100644 index 000000000..b94b93413 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/constraints.go @@ -0,0 +1,423 @@ +package semver + +import ( + "errors" + "fmt" + "regexp" + "strings" +) + +// Constraints is one or more constraint that a semantic version can be +// checked against. +type Constraints struct { + constraints [][]*constraint +} + +// NewConstraint returns a Constraints instance that a Version instance can +// be checked against. If there is a parse error it will be returned. +func NewConstraint(c string) (*Constraints, error) { + + // Rewrite - ranges into a comparison operation. + c = rewriteRange(c) + + ors := strings.Split(c, "||") + or := make([][]*constraint, len(ors)) + for k, v := range ors { + cs := strings.Split(v, ",") + result := make([]*constraint, len(cs)) + for i, s := range cs { + pc, err := parseConstraint(s) + if err != nil { + return nil, err + } + + result[i] = pc + } + or[k] = result + } + + o := &Constraints{constraints: or} + return o, nil +} + +// Check tests if a version satisfies the constraints. +func (cs Constraints) Check(v *Version) bool { + // loop over the ORs and check the inner ANDs + for _, o := range cs.constraints { + joy := true + for _, c := range o { + if !c.check(v) { + joy = false + break + } + } + + if joy { + return true + } + } + + return false +} + +// Validate checks if a version satisfies a constraint. If not a slice of +// reasons for the failure are returned in addition to a bool. +func (cs Constraints) Validate(v *Version) (bool, []error) { + // loop over the ORs and check the inner ANDs + var e []error + + // Capture the prerelease message only once. When it happens the first time + // this var is marked + var prerelesase bool + for _, o := range cs.constraints { + joy := true + for _, c := range o { + // Before running the check handle the case there the version is + // a prerelease and the check is not searching for prereleases. + if c.con.pre == "" && v.pre != "" { + if !prerelesase { + em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v) + e = append(e, em) + prerelesase = true + } + joy = false + + } else { + + if !c.check(v) { + em := fmt.Errorf(c.msg, v, c.orig) + e = append(e, em) + joy = false + } + } + } + + if joy { + return true, []error{} + } + } + + return false, e +} + +var constraintOps map[string]cfunc +var constraintMsg map[string]string +var constraintRegex *regexp.Regexp + +func init() { + constraintOps = map[string]cfunc{ + "": constraintTildeOrEqual, + "=": constraintTildeOrEqual, + "!=": constraintNotEqual, + ">": constraintGreaterThan, + "<": constraintLessThan, + ">=": constraintGreaterThanEqual, + "=>": constraintGreaterThanEqual, + "<=": constraintLessThanEqual, + "=<": constraintLessThanEqual, + "~": constraintTilde, + "~>": constraintTilde, + "^": constraintCaret, + } + + constraintMsg = map[string]string{ + "": "%s is not equal to %s", + "=": "%s is not equal to %s", + "!=": "%s is equal to %s", + ">": "%s is less than or equal to %s", + "<": "%s is greater than or equal to %s", + ">=": "%s is less than %s", + "=>": "%s is less than %s", + "<=": "%s is greater than %s", + "=<": "%s is greater than %s", + "~": "%s does not have same major and minor version as %s", + "~>": "%s does not have same major and minor version as %s", + "^": "%s does not have same major version as %s", + } + + ops := make([]string, 0, len(constraintOps)) + for k := range constraintOps { + ops = append(ops, regexp.QuoteMeta(k)) + } + + constraintRegex = regexp.MustCompile(fmt.Sprintf( + `^\s*(%s)\s*(%s)\s*$`, + strings.Join(ops, "|"), + cvRegex)) + + constraintRangeRegex = regexp.MustCompile(fmt.Sprintf( + `\s*(%s)\s+-\s+(%s)\s*`, + cvRegex, cvRegex)) +} + +// An individual constraint +type constraint struct { + // The callback function for the restraint. It performs the logic for + // the constraint. + function cfunc + + msg string + + // The version used in the constraint check. For example, if a constraint + // is '<= 2.0.0' the con a version instance representing 2.0.0. + con *Version + + // The original parsed version (e.g., 4.x from != 4.x) + orig string + + // When an x is used as part of the version (e.g., 1.x) + minorDirty bool + dirty bool + patchDirty bool +} + +// Check if a version meets the constraint +func (c *constraint) check(v *Version) bool { + return c.function(v, c) +} + +type cfunc func(v *Version, c *constraint) bool + +func parseConstraint(c string) (*constraint, error) { + m := constraintRegex.FindStringSubmatch(c) + if m == nil { + return nil, fmt.Errorf("improper constraint: %s", c) + } + + ver := m[2] + orig := ver + minorDirty := false + patchDirty := false + dirty := false + if isX(m[3]) { + ver = "0.0.0" + dirty = true + } else if isX(strings.TrimPrefix(m[4], ".")) || m[4] == "" { + minorDirty = true + dirty = true + ver = fmt.Sprintf("%s.0.0%s", m[3], m[6]) + } else if isX(strings.TrimPrefix(m[5], ".")) { + dirty = true + patchDirty = true + ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6]) + } + + con, err := NewVersion(ver) + if err != nil { + + // The constraintRegex should catch any regex parsing errors. So, + // we should never get here. + return nil, errors.New("constraint Parser Error") + } + + cs := &constraint{ + function: constraintOps[m[1]], + msg: constraintMsg[m[1]], + con: con, + orig: orig, + minorDirty: minorDirty, + patchDirty: patchDirty, + dirty: dirty, + } + return cs, nil +} + +// Constraint functions +func constraintNotEqual(v *Version, c *constraint) bool { + if c.dirty { + + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if c.con.Major() != v.Major() { + return true + } + if c.con.Minor() != v.Minor() && !c.minorDirty { + return true + } else if c.minorDirty { + return false + } + + return false + } + + return !v.Equal(c.con) +} + +func constraintGreaterThan(v *Version, c *constraint) bool { + + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + return v.Compare(c.con) == 1 +} + +func constraintLessThan(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if !c.dirty { + return v.Compare(c.con) < 0 + } + + if v.Major() > c.con.Major() { + return false + } else if v.Minor() > c.con.Minor() && !c.minorDirty { + return false + } + + return true +} + +func constraintGreaterThanEqual(v *Version, c *constraint) bool { + + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + return v.Compare(c.con) >= 0 +} + +func constraintLessThanEqual(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if !c.dirty { + return v.Compare(c.con) <= 0 + } + + if v.Major() > c.con.Major() { + return false + } else if v.Minor() > c.con.Minor() && !c.minorDirty { + return false + } + + return true +} + +// ~*, ~>* --> >= 0.0.0 (any) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0 +func constraintTilde(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if v.LessThan(c.con) { + return false + } + + // ~0.0.0 is a special case where all constraints are accepted. It's + // equivalent to >= 0.0.0. + if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 && + !c.minorDirty && !c.patchDirty { + return true + } + + if v.Major() != c.con.Major() { + return false + } + + if v.Minor() != c.con.Minor() && !c.minorDirty { + return false + } + + return true +} + +// When there is a .x (dirty) status it automatically opts in to ~. Otherwise +// it's a straight = +func constraintTildeOrEqual(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if c.dirty { + c.msg = constraintMsg["~"] + return constraintTilde(v, c) + } + + return v.Equal(c.con) +} + +// ^* --> (any) +// ^2, ^2.x, ^2.x.x --> >=2.0.0, <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0, <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0, <2.0.0 +// ^1.2.3 --> >=1.2.3, <2.0.0 +// ^1.2.0 --> >=1.2.0, <2.0.0 +func constraintCaret(v *Version, c *constraint) bool { + // If there is a pre-release on the version but the constraint isn't looking + // for them assume that pre-releases are not compatible. See issue 21 for + // more details. + if v.Prerelease() != "" && c.con.Prerelease() == "" { + return false + } + + if v.LessThan(c.con) { + return false + } + + if v.Major() != c.con.Major() { + return false + } + + return true +} + +var constraintRangeRegex *regexp.Regexp + +const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` + + `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + + `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + +func isX(x string) bool { + switch x { + case "x", "*", "X": + return true + default: + return false + } +} + +func rewriteRange(i string) string { + m := constraintRangeRegex.FindAllStringSubmatch(i, -1) + if m == nil { + return i + } + o := i + for _, v := range m { + t := fmt.Sprintf(">= %s, <= %s", v[1], v[11]) + o = strings.Replace(o, v[0], t, 1) + } + + return o +} diff --git a/vendor/github.com/Masterminds/semver/doc.go b/vendor/github.com/Masterminds/semver/doc.go new file mode 100644 index 000000000..6a6c24c6d --- /dev/null +++ b/vendor/github.com/Masterminds/semver/doc.go @@ -0,0 +1,115 @@ +/* +Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go. + +Specifically it provides the ability to: + + * Parse semantic versions + * Sort semantic versions + * Check if a semantic version fits within a set of constraints + * Optionally work with a `v` prefix + +Parsing Semantic Versions + +To parse a semantic version use the `NewVersion` function. For example, + + v, err := semver.NewVersion("1.2.3-beta.1+build345") + +If there is an error the version wasn't parseable. The version object has methods +to get the parts of the version, compare it to other versions, convert the +version back into a string, and get the original string. For more details +please see the documentation at https://godoc.org/github.com/Masterminds/semver. + +Sorting Semantic Versions + +A set of versions can be sorted using the `sort` package from the standard library. +For example, + + raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",} + vs := make([]*semver.Version, len(raw)) + for i, r := range raw { + v, err := semver.NewVersion(r) + if err != nil { + t.Errorf("Error parsing version: %s", err) + } + + vs[i] = v + } + + sort.Sort(semver.Collection(vs)) + +Checking Version Constraints + +Checking a version against version constraints is one of the most featureful +parts of the package. + + c, err := semver.NewConstraint(">= 1.2.3") + if err != nil { + // Handle constraint not being parseable. + } + + v, err := semver.NewVersion("1.3") + if err != nil { + // Handle version not being parseable. + } + // Check if the version meets the constraints. The a variable will be true. + a := c.Check(v) + +Basic Comparisons + +There are two elements to the comparisons. First, a comparison string is a list +of comma separated and comparisons. These are then separated by || separated or +comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a +comparison that's greater than or equal to 1.2 and less than 3.0.0 or is +greater than or equal to 4.2.3. + +The basic comparisons are: + + * `=`: equal (aliased to no operator) + * `!=`: not equal + * `>`: greater than + * `<`: less than + * `>=`: greater than or equal to + * `<=`: less than or equal to + +Hyphen Range Comparisons + +There are multiple methods to handle ranges and the first is hyphens ranges. +These look like: + + * `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5` + * `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5` + +Wildcards In Comparisons + +The `x`, `X`, and `*` characters can be used as a wildcard character. This works +for all comparison operators. When used on the `=` operator it falls +back to the pack level comparison (see tilde below). For example, + + * `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` + * `>= 1.2.x` is equivalent to `>= 1.2.0` + * `<= 2.x` is equivalent to `<= 3` + * `*` is equivalent to `>= 0.0.0` + +Tilde Range Comparisons (Patch) + +The tilde (`~`) comparison operator is for patch level ranges when a minor +version is specified and major level changes when the minor number is missing. +For example, + + * `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0` + * `~1` is equivalent to `>= 1, < 2` + * `~2.3` is equivalent to `>= 2.3, < 2.4` + * `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0` + * `~1.x` is equivalent to `>= 1, < 2` + +Caret Range Comparisons (Major) + +The caret (`^`) comparison operator is for major level changes. This is useful +when comparisons of API versions as a major change is API breaking. For example, + + * `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0` + * `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0` + * `^2.3` is equivalent to `>= 2.3, < 3` + * `^2.x` is equivalent to `>= 2.0.0, < 3` +*/ +package semver diff --git a/vendor/github.com/Masterminds/semver/version.go b/vendor/github.com/Masterminds/semver/version.go new file mode 100644 index 000000000..400d4f934 --- /dev/null +++ b/vendor/github.com/Masterminds/semver/version.go @@ -0,0 +1,425 @@ +package semver + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "regexp" + "strconv" + "strings" +) + +// The compiled version of the regex created at init() is cached here so it +// only needs to be created once. +var versionRegex *regexp.Regexp +var validPrereleaseRegex *regexp.Regexp + +var ( + // ErrInvalidSemVer is returned a version is found to be invalid when + // being parsed. + ErrInvalidSemVer = errors.New("Invalid Semantic Version") + + // ErrInvalidMetadata is returned when the metadata is an invalid format + ErrInvalidMetadata = errors.New("Invalid Metadata string") + + // ErrInvalidPrerelease is returned when the pre-release is an invalid format + ErrInvalidPrerelease = errors.New("Invalid Prerelease string") +) + +// SemVerRegex is the regular expression used to parse a semantic version. +const SemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` + + `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + + `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` + +// ValidPrerelease is the regular expression which validates +// both prerelease and metadata values. +const ValidPrerelease string = `^([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*)$` + +// Version represents a single semantic version. +type Version struct { + major, minor, patch int64 + pre string + metadata string + original string +} + +func init() { + versionRegex = regexp.MustCompile("^" + SemVerRegex + "$") + validPrereleaseRegex = regexp.MustCompile(ValidPrerelease) +} + +// NewVersion parses a given version and returns an instance of Version or +// an error if unable to parse the version. +func NewVersion(v string) (*Version, error) { + m := versionRegex.FindStringSubmatch(v) + if m == nil { + return nil, ErrInvalidSemVer + } + + sv := &Version{ + metadata: m[8], + pre: m[5], + original: v, + } + + var temp int64 + temp, err := strconv.ParseInt(m[1], 10, 64) + if err != nil { + return nil, fmt.Errorf("Error parsing version segment: %s", err) + } + sv.major = temp + + if m[2] != "" { + temp, err = strconv.ParseInt(strings.TrimPrefix(m[2], "."), 10, 64) + if err != nil { + return nil, fmt.Errorf("Error parsing version segment: %s", err) + } + sv.minor = temp + } else { + sv.minor = 0 + } + + if m[3] != "" { + temp, err = strconv.ParseInt(strings.TrimPrefix(m[3], "."), 10, 64) + if err != nil { + return nil, fmt.Errorf("Error parsing version segment: %s", err) + } + sv.patch = temp + } else { + sv.patch = 0 + } + + return sv, nil +} + +// MustParse parses a given version and panics on error. +func MustParse(v string) *Version { + sv, err := NewVersion(v) + if err != nil { + panic(err) + } + return sv +} + +// String converts a Version object to a string. +// Note, if the original version contained a leading v this version will not. +// See the Original() method to retrieve the original value. Semantic Versions +// don't contain a leading v per the spec. Instead it's optional on +// implementation. +func (v *Version) String() string { + var buf bytes.Buffer + + fmt.Fprintf(&buf, "%d.%d.%d", v.major, v.minor, v.patch) + if v.pre != "" { + fmt.Fprintf(&buf, "-%s", v.pre) + } + if v.metadata != "" { + fmt.Fprintf(&buf, "+%s", v.metadata) + } + + return buf.String() +} + +// Original returns the original value passed in to be parsed. +func (v *Version) Original() string { + return v.original +} + +// Major returns the major version. +func (v *Version) Major() int64 { + return v.major +} + +// Minor returns the minor version. +func (v *Version) Minor() int64 { + return v.minor +} + +// Patch returns the patch version. +func (v *Version) Patch() int64 { + return v.patch +} + +// Prerelease returns the pre-release version. +func (v *Version) Prerelease() string { + return v.pre +} + +// Metadata returns the metadata on the version. +func (v *Version) Metadata() string { + return v.metadata +} + +// originalVPrefix returns the original 'v' prefix if any. +func (v *Version) originalVPrefix() string { + + // Note, only lowercase v is supported as a prefix by the parser. + if v.original != "" && v.original[:1] == "v" { + return v.original[:1] + } + return "" +} + +// IncPatch produces the next patch version. +// If the current version does not have prerelease/metadata information, +// it unsets metadata and prerelease values, increments patch number. +// If the current version has any of prerelease or metadata information, +// it unsets both values and keeps curent patch value +func (v Version) IncPatch() Version { + vNext := v + // according to http://semver.org/#spec-item-9 + // Pre-release versions have a lower precedence than the associated normal version. + // according to http://semver.org/#spec-item-10 + // Build metadata SHOULD be ignored when determining version precedence. + if v.pre != "" { + vNext.metadata = "" + vNext.pre = "" + } else { + vNext.metadata = "" + vNext.pre = "" + vNext.patch = v.patch + 1 + } + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext +} + +// IncMinor produces the next minor version. +// Sets patch to 0. +// Increments minor number. +// Unsets metadata. +// Unsets prerelease status. +func (v Version) IncMinor() Version { + vNext := v + vNext.metadata = "" + vNext.pre = "" + vNext.patch = 0 + vNext.minor = v.minor + 1 + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext +} + +// IncMajor produces the next major version. +// Sets patch to 0. +// Sets minor to 0. +// Increments major number. +// Unsets metadata. +// Unsets prerelease status. +func (v Version) IncMajor() Version { + vNext := v + vNext.metadata = "" + vNext.pre = "" + vNext.patch = 0 + vNext.minor = 0 + vNext.major = v.major + 1 + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext +} + +// SetPrerelease defines the prerelease value. +// Value must not include the required 'hypen' prefix. +func (v Version) SetPrerelease(prerelease string) (Version, error) { + vNext := v + if len(prerelease) > 0 && !validPrereleaseRegex.MatchString(prerelease) { + return vNext, ErrInvalidPrerelease + } + vNext.pre = prerelease + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext, nil +} + +// SetMetadata defines metadata value. +// Value must not include the required 'plus' prefix. +func (v Version) SetMetadata(metadata string) (Version, error) { + vNext := v + if len(metadata) > 0 && !validPrereleaseRegex.MatchString(metadata) { + return vNext, ErrInvalidMetadata + } + vNext.metadata = metadata + vNext.original = v.originalVPrefix() + "" + vNext.String() + return vNext, nil +} + +// LessThan tests if one version is less than another one. +func (v *Version) LessThan(o *Version) bool { + return v.Compare(o) < 0 +} + +// GreaterThan tests if one version is greater than another one. +func (v *Version) GreaterThan(o *Version) bool { + return v.Compare(o) > 0 +} + +// Equal tests if two versions are equal to each other. +// Note, versions can be equal with different metadata since metadata +// is not considered part of the comparable version. +func (v *Version) Equal(o *Version) bool { + return v.Compare(o) == 0 +} + +// Compare compares this version to another one. It returns -1, 0, or 1 if +// the version smaller, equal, or larger than the other version. +// +// Versions are compared by X.Y.Z. Build metadata is ignored. Prerelease is +// lower than the version without a prerelease. +func (v *Version) Compare(o *Version) int { + // Compare the major, minor, and patch version for differences. If a + // difference is found return the comparison. + if d := compareSegment(v.Major(), o.Major()); d != 0 { + return d + } + if d := compareSegment(v.Minor(), o.Minor()); d != 0 { + return d + } + if d := compareSegment(v.Patch(), o.Patch()); d != 0 { + return d + } + + // At this point the major, minor, and patch versions are the same. + ps := v.pre + po := o.Prerelease() + + if ps == "" && po == "" { + return 0 + } + if ps == "" { + return 1 + } + if po == "" { + return -1 + } + + return comparePrerelease(ps, po) +} + +// UnmarshalJSON implements JSON.Unmarshaler interface. +func (v *Version) UnmarshalJSON(b []byte) error { + var s string + if err := json.Unmarshal(b, &s); err != nil { + return err + } + temp, err := NewVersion(s) + if err != nil { + return err + } + v.major = temp.major + v.minor = temp.minor + v.patch = temp.patch + v.pre = temp.pre + v.metadata = temp.metadata + v.original = temp.original + temp = nil + return nil +} + +// MarshalJSON implements JSON.Marshaler interface. +func (v *Version) MarshalJSON() ([]byte, error) { + return json.Marshal(v.String()) +} + +func compareSegment(v, o int64) int { + if v < o { + return -1 + } + if v > o { + return 1 + } + + return 0 +} + +func comparePrerelease(v, o string) int { + + // split the prelease versions by their part. The separator, per the spec, + // is a . + sparts := strings.Split(v, ".") + oparts := strings.Split(o, ".") + + // Find the longer length of the parts to know how many loop iterations to + // go through. + slen := len(sparts) + olen := len(oparts) + + l := slen + if olen > slen { + l = olen + } + + // Iterate over each part of the prereleases to compare the differences. + for i := 0; i < l; i++ { + // Since the lentgh of the parts can be different we need to create + // a placeholder. This is to avoid out of bounds issues. + stemp := "" + if i < slen { + stemp = sparts[i] + } + + otemp := "" + if i < olen { + otemp = oparts[i] + } + + d := comparePrePart(stemp, otemp) + if d != 0 { + return d + } + } + + // Reaching here means two versions are of equal value but have different + // metadata (the part following a +). They are not identical in string form + // but the version comparison finds them to be equal. + return 0 +} + +func comparePrePart(s, o string) int { + // Fastpath if they are equal + if s == o { + return 0 + } + + // When s or o are empty we can use the other in an attempt to determine + // the response. + if s == "" { + if o != "" { + return -1 + } + return 1 + } + + if o == "" { + if s != "" { + return 1 + } + return -1 + } + + // When comparing strings "99" is greater than "103". To handle + // cases like this we need to detect numbers and compare them. According + // to the semver spec, numbers are always positive. If there is a - at the + // start like -99 this is to be evaluated as an alphanum. numbers always + // have precedence over alphanum. Parsing as Uints because negative numbers + // are ignored. + + oi, n1 := strconv.ParseUint(o, 10, 64) + si, n2 := strconv.ParseUint(s, 10, 64) + + // The case where both are strings compare the strings + if n1 != nil && n2 != nil { + if s > o { + return 1 + } + return -1 + } else if n1 != nil { + // o is a string and s is a number + return -1 + } else if n2 != nil { + // s is a string and o is a number + return 1 + } + // Both are numbers + if si > oi { + return 1 + } + return -1 + +} diff --git a/vendor/github.com/Masterminds/semver/version_fuzz.go b/vendor/github.com/Masterminds/semver/version_fuzz.go new file mode 100644 index 000000000..b42bcd62b --- /dev/null +++ b/vendor/github.com/Masterminds/semver/version_fuzz.go @@ -0,0 +1,10 @@ +// +build gofuzz + +package semver + +func Fuzz(data []byte) int { + if _, err := NewVersion(string(data)); err != nil { + return 0 + } + return 1 +} diff --git a/vendor/github.com/OpenPeeDeeP/depguard/.gitignore b/vendor/github.com/OpenPeeDeeP/depguard/.gitignore new file mode 100644 index 000000000..97cca67c6 --- /dev/null +++ b/vendor/github.com/OpenPeeDeeP/depguard/.gitignore @@ -0,0 +1,14 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +.idea diff --git a/vendor/github.com/OpenPeeDeeP/depguard/LICENSE b/vendor/github.com/OpenPeeDeeP/depguard/LICENSE new file mode 100644 index 000000000..94a9ed024 --- /dev/null +++ b/vendor/github.com/OpenPeeDeeP/depguard/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/vendor/github.com/OpenPeeDeeP/depguard/README.md b/vendor/github.com/OpenPeeDeeP/depguard/README.md new file mode 100644 index 000000000..d704ce6ad --- /dev/null +++ b/vendor/github.com/OpenPeeDeeP/depguard/README.md @@ -0,0 +1,77 @@ +# Depguard + +Go linter that checks package imports are in a list of acceptable packages. It +supports a white list and black list option and can do prefix or glob matching. +This allows you to allow imports from a whole organization or only +allow specific packages within a repository. It is recommended to use prefix +matching as it is faster than glob matching. The fewer glob matches the better. + +> If a pattern is matched by prefix it does not try to match via glob. + +## Install + +```bash +go get -u github.com/OpenPeeDeeP/depguard +``` + +## Config + +By default, Depguard looks for a file named `.depguard.json` in the current +current working directory. If it is somewhere else, pass in the `-c` flag with +the location of your configuration file. + +The following is an example configuration file. + +```json +{ + "type": "whitelist", + "packages": ["github.com/OpenPeeDeeP/depguard"], + "packageErrorMessages": { + "github.com/OpenPeeDeeP/depguards": "Please use \"github.com/OpenPeeDeeP/depguard\"," + }, + "inTests": ["github.com/stretchr/testify"], + "includeGoStdLib": true +} +``` + +- `type` can be either `whitelist` or `blacklist`. This check is case insensitive. + If not specified the default is `blacklist`. +- `packages` is a list of packages for the list type specified. +- `packageErrorMessages` is a mapping from packages to the error message to display +- `inTests` is a list of packages allowed/disallowed only in test files. +- Set `includeGoStdLib` (`includeGoRoot` for backwards compatability) to true if you want to check the list against standard lib. + If not specified the default is false. + +## Gometalinter + +The binary installation of this linter can be used with +[Gometalinter](github.com/alecthomas/gometalinter). + +If you use a configuration file for Gometalinter then the following will need to +be added to your configuration file. + +```json +{ + "linters": { + "depguard": { + "command": "depguard -c path/to/config.json", + "pattern": "PATH:LINE:COL:MESSAGE", + "installFrom": "github.com/OpenPeeDeeP/depguard", + "isFast": true, + "partitionStrategy": "packages" + } + } +} +``` + +If you prefer the command line way the following will work for you as well. + +```bash +gometalinter --linter='depguard:depguard -c path/to/config.json:PATH:LINE:COL:MESSAGE' +``` + +## Golangci-lint + +This linter was built with +[Golangci-lint](https://github.com/golangci/golangci-lint) in mind. It is compatable +and read their docs to see how to implement all their linters, including this one. diff --git a/vendor/github.com/OpenPeeDeeP/depguard/depguard.go b/vendor/github.com/OpenPeeDeeP/depguard/depguard.go new file mode 100644 index 000000000..1dbffb7d6 --- /dev/null +++ b/vendor/github.com/OpenPeeDeeP/depguard/depguard.go @@ -0,0 +1,241 @@ +package depguard + +import ( + "go/build" + "go/token" + "io/ioutil" + "path" + "sort" + "strings" + + "github.com/gobwas/glob" + "golang.org/x/tools/go/loader" +) + +// ListType states what kind of list is passed in. +type ListType int + +const ( + // LTBlacklist states the list given is a blacklist. (default) + LTBlacklist ListType = iota + // LTWhitelist states the list given is a whitelist. + LTWhitelist +) + +// StringToListType makes it easier to turn a string into a ListType. +// It assumes that the string representation is lower case. +var StringToListType = map[string]ListType{ + "whitelist": LTWhitelist, + "blacklist": LTBlacklist, +} + +// Issue with the package with PackageName at the Position. +type Issue struct { + PackageName string + Position token.Position +} + +// Depguard checks imports to make sure they follow the given list and constraints. +type Depguard struct { + ListType ListType + IncludeGoRoot bool + + Packages []string + prefixPackages []string + globPackages []glob.Glob + + TestPackages []string + prefixTestPackages []string + globTestPackages []glob.Glob + + prefixRoot []string +} + +// Run checks for dependencies given the program and validates them against +// Packages. +func (dg *Depguard) Run(config *loader.Config, prog *loader.Program) ([]*Issue, error) { + // Shortcut execution on an empty blacklist as that means every package is allowed + if dg.ListType == LTBlacklist && len(dg.Packages) == 0 { + return nil, nil + } + + if err := dg.initialize(config, prog); err != nil { + return nil, err + } + directImports, err := dg.createImportMap(prog) + if err != nil { + return nil, err + } + var issues []*Issue + for pkg, positions := range directImports { + for _, pos := range positions { + + prefixList, globList := dg.prefixPackages, dg.globPackages + if len(dg.TestPackages) > 0 && strings.Index(pos.Filename, "_test.go") != -1 { + prefixList, globList = dg.prefixTestPackages, dg.globTestPackages + } + + if dg.flagIt(pkg, prefixList, globList) { + issues = append(issues, &Issue{ + PackageName: pkg, + Position: pos, + }) + } + } + } + return issues, nil +} + +func (dg *Depguard) initialize(config *loader.Config, prog *loader.Program) error { + // parse ordinary guarded packages + for _, pkg := range dg.Packages { + if strings.ContainsAny(pkg, "!?*[]{}") { + g, err := glob.Compile(pkg, '/') + if err != nil { + return err + } + dg.globPackages = append(dg.globPackages, g) + } else { + dg.prefixPackages = append(dg.prefixPackages, pkg) + } + } + + // Sort the packages so we can have a faster search in the array + sort.Strings(dg.prefixPackages) + + // parse guarded tests packages + for _, pkg := range dg.TestPackages { + if strings.ContainsAny(pkg, "!?*[]{}") { + g, err := glob.Compile(pkg, '/') + if err != nil { + return err + } + dg.globTestPackages = append(dg.globTestPackages, g) + } else { + dg.prefixTestPackages = append(dg.prefixTestPackages, pkg) + } + } + + // Sort the test packages so we can have a faster search in the array + sort.Strings(dg.prefixTestPackages) + + if !dg.IncludeGoRoot { + var err error + dg.prefixRoot, err = listRootPrefixs(config.Build) + if err != nil { + return err + } + } + + return nil +} + +func (dg *Depguard) createImportMap(prog *loader.Program) (map[string][]token.Position, error) { + importMap := make(map[string][]token.Position) + // For the directly imported packages + for _, imported := range prog.InitialPackages() { + // Go through their files + for _, file := range imported.Files { + // And populate a map of all direct imports and their positions + // This will filter out GoRoot depending on the Depguard.IncludeGoRoot + for _, fileImport := range file.Imports { + fileImportPath := cleanBasicLitString(fileImport.Path.Value) + if !dg.IncludeGoRoot && dg.isRoot(fileImportPath) { + continue + } + position := prog.Fset.Position(fileImport.Pos()) + positions, found := importMap[fileImportPath] + if !found { + importMap[fileImportPath] = []token.Position{ + position, + } + continue + } + importMap[fileImportPath] = append(positions, position) + } + } + } + return importMap, nil +} + +func pkgInList(pkg string, prefixList []string, globList []glob.Glob) bool { + if pkgInPrefixList(pkg, prefixList) { + return true + } + return pkgInGlobList(pkg, globList) +} + +func pkgInPrefixList(pkg string, prefixList []string) bool { + // Idx represents where in the package slice the passed in package would go + // when sorted. -1 Just means that it would be at the very front of the slice. + idx := sort.Search(len(prefixList), func(i int) bool { + return prefixList[i] > pkg + }) - 1 + // This means that the package passed in has no way to be prefixed by anything + // in the package list as it is already smaller then everything + if idx == -1 { + return false + } + return strings.HasPrefix(pkg, prefixList[idx]) +} + +func pkgInGlobList(pkg string, globList []glob.Glob) bool { + for _, g := range globList { + if g.Match(pkg) { + return true + } + } + return false +} + +// InList | WhiteList | BlackList +// y | | x +// n | x | +func (dg *Depguard) flagIt(pkg string, prefixList []string, globList []glob.Glob) bool { + return pkgInList(pkg, prefixList, globList) == (dg.ListType == LTBlacklist) +} + +func cleanBasicLitString(value string) string { + return strings.Trim(value, "\"\\") +} + +// We can do this as all imports that are not root are either prefixed with a domain +// or prefixed with `./` or `/` to dictate it is a local file reference +func listRootPrefixs(buildCtx *build.Context) ([]string, error) { + if buildCtx == nil { + buildCtx = &build.Default + } + root := path.Join(buildCtx.GOROOT, "src") + fs, err := ioutil.ReadDir(root) + if err != nil { + return nil, err + } + var pkgPrefix []string + for _, f := range fs { + if !f.IsDir() { + continue + } + pkgPrefix = append(pkgPrefix, f.Name()) + } + return pkgPrefix, nil +} + +func (dg *Depguard) isRoot(importPath string) bool { + // Idx represents where in the package slice the passed in package would go + // when sorted. -1 Just means that it would be at the very front of the slice. + idx := sort.Search(len(dg.prefixRoot), func(i int) bool { + return dg.prefixRoot[i] > importPath + }) - 1 + // This means that the package passed in has no way to be prefixed by anything + // in the package list as it is already smaller then everything + if idx == -1 { + return false + } + // if it is prefixed by a root prefix we need to check if it is an exact match + // or prefix with `/` as this could return false posative if the domain was + // `archive.com` for example as `archive` is a go root package. + if strings.HasPrefix(importPath, dg.prefixRoot[idx]) { + return strings.HasPrefix(importPath, dg.prefixRoot[idx]+"/") || importPath == dg.prefixRoot[idx] + } + return false +} diff --git a/vendor/github.com/OpenPeeDeeP/depguard/go.mod b/vendor/github.com/OpenPeeDeeP/depguard/go.mod new file mode 100644 index 000000000..5ad37edb8 --- /dev/null +++ b/vendor/github.com/OpenPeeDeeP/depguard/go.mod @@ -0,0 +1,9 @@ +module github.com/OpenPeeDeeP/depguard + +go 1.13 + +require ( + github.com/gobwas/glob v0.2.3 + github.com/kisielk/gotool v1.0.0 + golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b +) diff --git a/vendor/github.com/OpenPeeDeeP/depguard/go.sum b/vendor/github.com/OpenPeeDeeP/depguard/go.sum new file mode 100644 index 000000000..24693c36d --- /dev/null +++ b/vendor/github.com/OpenPeeDeeP/depguard/go.sum @@ -0,0 +1,6 @@ +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= +github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b h1:7tibmaEqrQYA+q6ri7NQjuxqSwechjtDHKq6/e85S38= +golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/vendor/github.com/alexkohler/prealloc/LICENSE b/vendor/github.com/alexkohler/prealloc/LICENSE new file mode 100644 index 000000000..9310fbcff --- /dev/null +++ b/vendor/github.com/alexkohler/prealloc/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Alex Kohler + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/alexkohler/prealloc/pkg/prealloc.go b/vendor/github.com/alexkohler/prealloc/pkg/prealloc.go new file mode 100644 index 000000000..72d8b95f7 --- /dev/null +++ b/vendor/github.com/alexkohler/prealloc/pkg/prealloc.go @@ -0,0 +1,267 @@ +package pkg + +import ( + "fmt" + "go/ast" + "go/token" +) + +type sliceDeclaration struct { + name string + // sType string + genD *ast.GenDecl +} + +type returnsVisitor struct { + // flags + simple bool + includeRangeLoops bool + includeForLoops bool + // visitor fields + sliceDeclarations []*sliceDeclaration + preallocHints []Hint + returnsInsideOfLoop bool + arrayTypes []string +} + +func Check(files []*ast.File, simple, includeRangeLoops, includeForLoops bool) []Hint { + hints := []Hint{} + for _, f := range files { + retVis := &returnsVisitor{ + simple: simple, + includeRangeLoops: includeRangeLoops, + includeForLoops: includeForLoops, + } + ast.Walk(retVis, f) + // if simple is true, then we actually have to check if we had returns + // inside of our loop. Otherwise, we can just report all messages. + if !retVis.simple || !retVis.returnsInsideOfLoop { + hints = append(hints, retVis.preallocHints...) + } + } + + return hints +} + +func contains(slice []string, item string) bool { + for _, s := range slice { + if s == item { + return true + } + } + + return false +} + +func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor { + + v.sliceDeclarations = nil + v.returnsInsideOfLoop = false + + switch n := node.(type) { + case *ast.TypeSpec: + if _, ok := n.Type.(*ast.ArrayType); ok { + if n.Name != nil { + v.arrayTypes = append(v.arrayTypes, n.Name.Name) + } + } + case *ast.FuncDecl: + if n.Body != nil { + for _, stmt := range n.Body.List { + switch s := stmt.(type) { + // Find non pre-allocated slices + case *ast.DeclStmt: + genD, ok := s.Decl.(*ast.GenDecl) + if !ok { + continue + } + if genD.Tok == token.TYPE { + for _, spec := range genD.Specs { + tSpec, ok := spec.(*ast.TypeSpec) + if !ok { + continue + } + + if _, ok := tSpec.Type.(*ast.ArrayType); ok { + if tSpec.Name != nil { + v.arrayTypes = append(v.arrayTypes, tSpec.Name.Name) + } + } + } + } else if genD.Tok == token.VAR { + for _, spec := range genD.Specs { + vSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + var isArrType bool + switch val := vSpec.Type.(type) { + case *ast.ArrayType: + isArrType = true + case *ast.Ident: + isArrType = contains(v.arrayTypes, val.Name) + } + if isArrType { + if vSpec.Names != nil { + /*atID, ok := arrayType.Elt.(*ast.Ident) + if !ok { + continue + }*/ + + // We should handle multiple slices declared on same line e.g. var mySlice1, mySlice2 []uint32 + for _, vName := range vSpec.Names { + v.sliceDeclarations = append(v.sliceDeclarations, &sliceDeclaration{name: vName.Name /*sType: atID.Name,*/, genD: genD}) + } + } + } + } + } + + case *ast.RangeStmt: + if v.includeRangeLoops { + if len(v.sliceDeclarations) == 0 { + continue + } + // Check the value being ranged over and ensure it's not a channel (we cannot offer any recommendations on channel ranges). + rangeIdent, ok := s.X.(*ast.Ident) + if ok && rangeIdent.Obj != nil { + valueSpec, ok := rangeIdent.Obj.Decl.(*ast.ValueSpec) + if ok { + if _, rangeTargetIsChannel := valueSpec.Type.(*ast.ChanType); rangeTargetIsChannel { + continue + } + } + } + if s.Body != nil { + v.handleLoops(s.Body) + } + } + + case *ast.ForStmt: + if v.includeForLoops { + if len(v.sliceDeclarations) == 0 { + continue + } + if s.Body != nil { + v.handleLoops(s.Body) + } + } + + default: + } + } + } + } + return v +} + +// handleLoops is a helper function to share the logic required for both *ast.RangeLoops and *ast.ForLoops +func (v *returnsVisitor) handleLoops(blockStmt *ast.BlockStmt) { + + for _, stmt := range blockStmt.List { + switch bodyStmt := stmt.(type) { + case *ast.AssignStmt: + asgnStmt := bodyStmt + for index, expr := range asgnStmt.Rhs { + if index >= len(asgnStmt.Lhs) { + continue + } + + lhsIdent, ok := asgnStmt.Lhs[index].(*ast.Ident) + if !ok { + continue + } + + callExpr, ok := expr.(*ast.CallExpr) + if !ok { + continue + } + + rhsFuncIdent, ok := callExpr.Fun.(*ast.Ident) + if !ok { + continue + } + + if rhsFuncIdent.Name != "append" { + continue + } + + // e.g., `x = append(x)` + // Pointless, but pre-allocation will not help. + if len(callExpr.Args) < 2 { + continue + } + + rhsIdent, ok := callExpr.Args[0].(*ast.Ident) + if !ok { + continue + } + + // e.g., `x = append(y, a)` + // This is weird (and maybe a logic error), + // but we cannot recommend pre-allocation. + if lhsIdent.Name != rhsIdent.Name { + continue + } + + // e.g., `x = append(x, y...)` + // we should ignore this. Pre-allocating in this case + // is confusing, and is not possible in general. + if callExpr.Ellipsis.IsValid() { + continue + } + + for _, sliceDecl := range v.sliceDeclarations { + if sliceDecl.name == lhsIdent.Name { + // This is a potential mark, we just need to make sure there are no returns/continues in the + // range loop. + // now we just need to grab whatever we're ranging over + /*sxIdent, ok := s.X.(*ast.Ident) + if !ok { + continue + }*/ + + v.preallocHints = append(v.preallocHints, Hint{ + Pos: sliceDecl.genD.Pos(), + DeclaredSliceName: sliceDecl.name, + }) + } + } + } + case *ast.IfStmt: + ifStmt := bodyStmt + if ifStmt.Body != nil { + for _, ifBodyStmt := range ifStmt.Body.List { + // TODO should probably handle embedded ifs here + switch /*ift :=*/ ifBodyStmt.(type) { + case *ast.BranchStmt, *ast.ReturnStmt: + v.returnsInsideOfLoop = true + default: + } + } + } + + default: + + } + } + +} + +// Hint stores the information about an occurrence of a slice that could be +// preallocated. +type Hint struct { + Pos token.Pos + DeclaredSliceName string +} + +func (h Hint) String() string { + return fmt.Sprintf("%v: Consider preallocating %v", h.Pos, h.DeclaredSliceName) +} + +func (h Hint) StringFromFS(f *token.FileSet) string { + file := f.File(h.Pos) + lineNumber := file.Position(h.Pos).Line + + return fmt.Sprintf("%v:%v Consider preallocating %v", file.Name(), lineNumber, h.DeclaredSliceName) +} diff --git a/vendor/github.com/ashanbrown/forbidigo/LICENSE b/vendor/github.com/ashanbrown/forbidigo/LICENSE new file mode 100644 index 000000000..dc1d47ad5 --- /dev/null +++ b/vendor/github.com/ashanbrown/forbidigo/LICENSE @@ -0,0 +1,13 @@ +Copyright 2019 Andrew Shannon Brown + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go new file mode 100644 index 000000000..a39f754f0 --- /dev/null +++ b/vendor/github.com/ashanbrown/forbidigo/forbidigo/config_options.go @@ -0,0 +1,45 @@ +package forbidigo + +// Code generated by github.com/launchdarkly/go-options. DO NOT EDIT. + +type ApplyOptionFunc func(c *config) error + +func (f ApplyOptionFunc) apply(c *config) error { + return f(c) +} + +func newConfig(options ...Option) (config, error) { + var c config + err := applyConfigOptions(&c, options...) + return c, err +} + +func applyConfigOptions(c *config, options ...Option) error { + c.ExcludeGodocExamples = true + for _, o := range options { + if err := o.apply(c); err != nil { + return err + } + } + return nil +} + +type Option interface { + apply(*config) error +} + +// OptionExcludeGodocExamples don't check inside Godoc examples (see https://blog.golang.org/examples) +func OptionExcludeGodocExamples(o bool) ApplyOptionFunc { + return func(c *config) error { + c.ExcludeGodocExamples = o + return nil + } +} + +// OptionIgnorePermitDirectives don't check for `permit` directives(for example, in favor of `nolint`) +func OptionIgnorePermitDirectives(o bool) ApplyOptionFunc { + return func(c *config) error { + c.IgnorePermitDirectives = o + return nil + } +} diff --git a/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go b/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go new file mode 100644 index 000000000..2337404ae --- /dev/null +++ b/vendor/github.com/ashanbrown/forbidigo/forbidigo/forbidigo.go @@ -0,0 +1,193 @@ +// forbidigo provides a linter for forbidding the use of specific identifiers +package forbidigo + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "log" + "regexp" + "strings" + + "github.com/pkg/errors" +) + +type Issue interface { + Details() string + Position() token.Position + String() string +} + +type UsedIssue struct { + identifier string + pattern string + position token.Position +} + +func (a UsedIssue) Details() string { + return fmt.Sprintf("use of `%s` forbidden by pattern `%s`", a.identifier, a.pattern) +} + +func (a UsedIssue) Position() token.Position { + return a.position +} + +func (a UsedIssue) String() string { return toString(a) } + +func toString(i Issue) string { + return fmt.Sprintf("%s at %s", i.Details(), i.Position()) +} + +type Linter struct { + cfg config + patterns []*regexp.Regexp +} + +func DefaultPatterns() []string { + return []string{`^(fmt\.Print(|f|ln)|print|println)$`} +} + +//go:generate go-options config +type config struct { + // don't check inside Godoc examples (see https://blog.golang.org/examples) + ExcludeGodocExamples bool `options:",true"` + IgnorePermitDirectives bool // don't check for `permit` directives(for example, in favor of `nolint`) +} + +func NewLinter(patterns []string, options ...Option) (*Linter, error) { + cfg, err := newConfig(options...) + if err != nil { + return nil, errors.Wrapf(err, "failed to process options") + } + + if len(patterns) == 0 { + patterns = DefaultPatterns() + } + compiledPatterns := make([]*regexp.Regexp, 0, len(patterns)) + for _, p := range patterns { + re, err := regexp.Compile(p) + if err != nil { + return nil, fmt.Errorf("unable to compile pattern `%s`: %s", p, err) + } + compiledPatterns = append(compiledPatterns, re) + } + return &Linter{ + cfg: cfg, + patterns: compiledPatterns, + }, nil +} + +type visitor struct { + cfg config + isTestFile bool // godoc only runs on test files + + linter *Linter + comments []*ast.CommentGroup + + fset *token.FileSet + issues []Issue +} + +func (l *Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { + var issues []Issue //nolint:prealloc // we don't know how many there will be + for _, node := range nodes { + var comments []*ast.CommentGroup + isTestFile := false + isWholeFileExample := false + if file, ok := node.(*ast.File); ok { + comments = file.Comments + fileName := fset.Position(file.Pos()).Filename + isTestFile = strings.HasSuffix(fileName, "_test.go") + + // From https://blog.golang.org/examples, a "whole file example" is: + // a file that ends in _test.go and contains exactly one example function, + // no test or benchmark functions, and at least one other package-level declaration. + if l.cfg.ExcludeGodocExamples && isTestFile && len(file.Decls) > 1 { + numExamples := 0 + numTestsAndBenchmarks := 0 + for _, decl := range file.Decls { + funcDecl, isFuncDecl := decl.(*ast.FuncDecl) + // consider only functions, not methods + if !isFuncDecl || funcDecl.Recv != nil || funcDecl.Name == nil { + continue + } + funcName := funcDecl.Name.Name + if strings.HasPrefix(funcName, "Test") || strings.HasPrefix(funcName, "Benchmark") { + numTestsAndBenchmarks++ + break // not a whole file example + } + if strings.HasPrefix(funcName, "Example") { + numExamples++ + } + } + + // if this is a whole file example, skip this node + isWholeFileExample = numExamples == 1 && numTestsAndBenchmarks == 0 + } + } + if isWholeFileExample { + continue + } + visitor := visitor{ + cfg: l.cfg, + isTestFile: isTestFile, + linter: l, + fset: fset, + comments: comments, + } + ast.Walk(&visitor, node) + issues = append(issues, visitor.issues...) + } + return issues, nil +} + +func (v *visitor) Visit(node ast.Node) ast.Visitor { + switch node := node.(type) { + case *ast.FuncDecl: + // don't descend into godoc examples if we are ignoring them + isGodocExample := v.isTestFile && node.Recv == nil && node.Name != nil && strings.HasPrefix(node.Name.Name, "Example") + if isGodocExample && v.cfg.ExcludeGodocExamples { + return nil + } + return v + case *ast.SelectorExpr: + case *ast.Ident: + default: + return v + } + for _, p := range v.linter.patterns { + if p.MatchString(v.textFor(node)) && !v.permit(node) { + v.issues = append(v.issues, UsedIssue{ + identifier: v.textFor(node), + pattern: p.String(), + position: v.fset.Position(node.Pos()), + }) + } + } + return nil +} + +func (v *visitor) textFor(node ast.Node) string { + buf := new(bytes.Buffer) + if err := printer.Fprint(buf, v.fset, node); err != nil { + log.Fatalf("ERROR: unable to print node at %s: %s", v.fset.Position(node.Pos()), err) + } + return buf.String() +} + +func (v *visitor) permit(node ast.Node) bool { + if v.cfg.IgnorePermitDirectives { + return false + } + nodePos := v.fset.Position(node.Pos()) + var nolint = regexp.MustCompile(fmt.Sprintf(`^//\s?permit:%s\b`, regexp.QuoteMeta(v.textFor(node)))) + for _, c := range v.comments { + commentPos := v.fset.Position(c.Pos()) + if commentPos.Line == nodePos.Line && len(c.List) > 0 && nolint.MatchString(c.List[0].Text) { + return true + } + } + return false +} diff --git a/vendor/github.com/ashanbrown/makezero/LICENSE b/vendor/github.com/ashanbrown/makezero/LICENSE new file mode 100644 index 000000000..dc1d47ad5 --- /dev/null +++ b/vendor/github.com/ashanbrown/makezero/LICENSE @@ -0,0 +1,13 @@ +Copyright 2019 Andrew Shannon Brown + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/github.com/ashanbrown/makezero/makezero/makezero.go b/vendor/github.com/ashanbrown/makezero/makezero/makezero.go new file mode 100644 index 000000000..db9b45adc --- /dev/null +++ b/vendor/github.com/ashanbrown/makezero/makezero/makezero.go @@ -0,0 +1,200 @@ +// makezero provides a linter for appends to slices initialized with non-zero length. +package makezero + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "go/types" + "log" + "regexp" +) + +type Issue interface { + Details() string + Position() token.Position + String() string +} + +type AppendIssue struct { + name string + position token.Position +} + +func (a AppendIssue) Details() string { + return fmt.Sprintf("append to slice `%s` with non-zero initialized length", a.name) +} + +func (a AppendIssue) Position() token.Position { + return a.position +} + +func (a AppendIssue) String() string { return toString(a) } + +type MustHaveNonZeroInitLenIssue struct { + name string + position token.Position +} + +func (i MustHaveNonZeroInitLenIssue) Details() string { + return fmt.Sprintf("slice `%s` does not have non-zero initial length", i.name) +} + +func (i MustHaveNonZeroInitLenIssue) Position() token.Position { + return i.position +} + +func (i MustHaveNonZeroInitLenIssue) String() string { return toString(i) } + +func toString(i Issue) string { + return fmt.Sprintf("%s at %s", i.Details(), i.Position()) +} + +type visitor struct { + initLenMustBeZero bool + + comments []*ast.CommentGroup // comments to apply during this visit + info *types.Info + + nonZeroLengthSliceDecls map[interface{}]struct{} + fset *token.FileSet + issues []Issue +} + +type Linter struct { + initLenMustBeZero bool +} + +func NewLinter(initialLengthMustBeZero bool) *Linter { + return &Linter{ + initLenMustBeZero: initialLengthMustBeZero, + } +} + +func (l Linter) Run(fset *token.FileSet, info *types.Info, nodes ...ast.Node) ([]Issue, error) { + var issues []Issue // nolint:prealloc // don't know how many there will be + for _, node := range nodes { + var comments []*ast.CommentGroup + if file, ok := node.(*ast.File); ok { + comments = file.Comments + } + visitor := visitor{ + nonZeroLengthSliceDecls: make(map[interface{}]struct{}), + initLenMustBeZero: l.initLenMustBeZero, + info: info, + fset: fset, + comments: comments, + } + ast.Walk(&visitor, node) + issues = append(issues, visitor.issues...) + } + return issues, nil +} + +func (v *visitor) Visit(node ast.Node) ast.Visitor { + switch node := node.(type) { + case *ast.CallExpr: + fun, ok := node.Fun.(*ast.Ident) + if !ok || fun.Name != "append" { + break + } + if sliceIdent, ok := node.Args[0].(*ast.Ident); ok && + v.hasNonZeroInitialLength(sliceIdent) && + !v.hasNoLintOnSameLine(fun) { + v.issues = append(v.issues, AppendIssue{name: sliceIdent.Name, position: v.fset.Position(fun.Pos())}) + } + case *ast.AssignStmt: + for i, right := range node.Rhs { + if right, ok := right.(*ast.CallExpr); ok { + fun, ok := right.Fun.(*ast.Ident) + if !ok || fun.Name != "make" { + continue + } + left := node.Lhs[i] + if len(right.Args) == 2 { + // ignore if not a slice or it has explicit zero length + if !v.isSlice(right.Args[0]) { + break + } else if lit, ok := right.Args[1].(*ast.BasicLit); ok && lit.Kind == token.INT && lit.Value == "0" { + break + } + if v.initLenMustBeZero && !v.hasNoLintOnSameLine(fun) { + v.issues = append(v.issues, MustHaveNonZeroInitLenIssue{ + name: v.textFor(left), + position: v.fset.Position(node.Pos()), + }) + } + v.recordNonZeroLengthSlices(left) + } + } + } + } + return v +} + +func (v *visitor) textFor(node ast.Node) string { + typeBuf := new(bytes.Buffer) + if err := printer.Fprint(typeBuf, v.fset, node); err != nil { + log.Fatalf("ERROR: unable to print type: %s", err) + } + return typeBuf.String() +} + +func (v *visitor) hasNonZeroInitialLength(ident *ast.Ident) bool { + if ident.Obj == nil { + log.Printf("WARNING: could not determine with %q at %s is a slice (missing object type)", + ident.Name, v.fset.Position(ident.Pos()).String()) + return false + } + _, exists := v.nonZeroLengthSliceDecls[ident.Obj.Decl] + return exists +} + +func (v *visitor) recordNonZeroLengthSlices(node ast.Node) { + ident, ok := node.(*ast.Ident) + if !ok { + return + } + if ident.Obj == nil { + return + } + v.nonZeroLengthSliceDecls[ident.Obj.Decl] = struct{}{} +} + +func (v *visitor) isSlice(node ast.Node) bool { + // determine type if this is a user-defined type + if ident, ok := node.(*ast.Ident); ok { + obj := ident.Obj + if obj == nil { + if v.info != nil { + _, ok := v.info.ObjectOf(ident).Type().(*types.Slice) + return ok + } + return false + } + spec, ok := obj.Decl.(*ast.TypeSpec) + if !ok { + return false + } + node = spec.Type + } + + if node, ok := node.(*ast.ArrayType); ok { + return node.Len == nil // only slices have zero length + } + return false +} + +func (v *visitor) hasNoLintOnSameLine(node ast.Node) bool { + var nolint = regexp.MustCompile(`^\s*nozero\b`) + nodePos := v.fset.Position(node.Pos()) + for _, c := range v.comments { + commentPos := v.fset.Position(c.Pos()) + if commentPos.Line == nodePos.Line && nolint.MatchString(c.Text()) { + return true + } + } + return false +} diff --git a/vendor/github.com/bkielbasa/cyclop/LICENSE b/vendor/github.com/bkielbasa/cyclop/LICENSE new file mode 100644 index 000000000..b4a776a40 --- /dev/null +++ b/vendor/github.com/bkielbasa/cyclop/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Bartłomiej Klimczak + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go b/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..9b2801352 --- /dev/null +++ b/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go @@ -0,0 +1,104 @@ +package analyzer + +import ( + "flag" + "go/ast" + "go/token" + "strings" + + "golang.org/x/tools/go/analysis" +) + +var ( + flagSet flag.FlagSet +) + +var maxComplexity int +var packageAverage float64 +var skipTests bool + +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "cyclop", + Doc: "calculates cyclomatic complexity", + Run: run, + Flags: flagSet, + } +} + +func init() { + flagSet.IntVar(&maxComplexity, "maxComplexity", 10, "max complexity the function can have") + flagSet.Float64Var(&packageAverage, "packageAverage", 0, "max avarage complexity in package") + flagSet.BoolVar(&skipTests, "skipTests", false, "should the linter execute on test files as well") +} + +func run(pass *analysis.Pass) (interface{}, error) { + var sum, count float64 + var pkgName string + var pkgPos token.Pos + + for _, f := range pass.Files { + ast.Inspect(f, func(node ast.Node) bool { + f, ok := node.(*ast.FuncDecl) + if !ok { + if node == nil { + return true + } + if file, ok := node.(*ast.File); ok { + pkgName = file.Name.Name + pkgPos = node.Pos() + } + // we check function by function + return true + } + + if skipTests && testFunc(f) { + return true + } + + count++ + comp := complexity(f) + sum += float64(comp) + if comp > maxComplexity { + pass.Reportf(node.Pos(), "calculated cyclomatic complexity for function %s is %d, max is %d", f.Name.Name, comp, maxComplexity) + } + + return true + }) + } + + if packageAverage > 0 { + avg := sum / count + if avg > packageAverage { + pass.Reportf(pkgPos, "the avarage complexity for the package %s is %f, max is %f", pkgName, avg, packageAverage) + } + } + + return nil, nil +} + +func testFunc(f *ast.FuncDecl) bool { + return strings.HasPrefix(f.Name.Name, "Test") +} + +func complexity(fn *ast.FuncDecl) int { + v := complexityVisitor{} + ast.Walk(&v, fn) + return v.Complexity +} + +type complexityVisitor struct { + Complexity int +} + +func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause: + v.Complexity++ + case *ast.BinaryExpr: + if n.Op == token.LAND || n.Op == token.LOR { + v.Complexity++ + } + } + return v +} diff --git a/vendor/github.com/bombsimon/wsl/v3/.gitignore b/vendor/github.com/bombsimon/wsl/v3/.gitignore new file mode 100644 index 000000000..1c8eba613 --- /dev/null +++ b/vendor/github.com/bombsimon/wsl/v3/.gitignore @@ -0,0 +1,70 @@ + +# Created by https://www.gitignore.io/api/go,vim,macos + +### Go ### +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +### Go Patch ### +/vendor/ +/Godeps/ + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### Vim ### +# Swap +[._]*.s[a-v][a-z] +[._]*.sw[a-p] +[._]s[a-rt-v][a-z] +[._]ss[a-gi-z] +[._]sw[a-p] + +# Session +Session.vim + +# Temporary +.netrwhist +*~ +# Auto-generated tag files +tags +# Persistent undo +[._]*.un~ + + +# End of https://www.gitignore.io/api/go,vim,macos diff --git a/vendor/github.com/bombsimon/wsl/v3/.travis.yml b/vendor/github.com/bombsimon/wsl/v3/.travis.yml new file mode 100644 index 000000000..5e2e26ed1 --- /dev/null +++ b/vendor/github.com/bombsimon/wsl/v3/.travis.yml @@ -0,0 +1,25 @@ +--- +language: go + +go: + - 1.13.x + - 1.12.x + - 1.11.x + +env: + global: + - GO111MODULE=on + +install: + - go get -v golang.org/x/tools/cmd/cover github.com/mattn/goveralls + +script: + - go test -v -covermode=count -coverprofile=coverage.out + +after_script: + - $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci + +notifications: + email: false + +# vim: set ts=2 sw=2 et: diff --git a/vendor/github.com/bombsimon/wsl/v3/LICENSE b/vendor/github.com/bombsimon/wsl/v3/LICENSE new file mode 100644 index 000000000..4dade6d1c --- /dev/null +++ b/vendor/github.com/bombsimon/wsl/v3/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Simon Sawert + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/bombsimon/wsl/v3/README.md b/vendor/github.com/bombsimon/wsl/v3/README.md new file mode 100644 index 000000000..9812f94a7 --- /dev/null +++ b/vendor/github.com/bombsimon/wsl/v3/README.md @@ -0,0 +1,126 @@ +# WSL - Whitespace Linter + +[![forthebadge](https://forthebadge.com/images/badges/made-with-go.svg)](https://forthebadge.com) +[![forthebadge](https://forthebadge.com/images/badges/built-with-love.svg)](https://forthebadge.com) + +[![Build Status](https://travis-ci.org/bombsimon/wsl.svg?branch=master)](https://travis-ci.org/bombsimon/wsl) +[![Coverage Status](https://coveralls.io/repos/github/bombsimon/wsl/badge.svg?branch=master)](https://coveralls.io/github/bombsimon/wsl?branch=master) + +WSL is a linter that enforces a very **non scientific** vision of how to make +code more readable by enforcing empty lines at the right places. + +I think too much code out there is to cuddly and a bit too warm for it's own +good, making it harder for other people to read and understand. The linter will +warn about newlines in and around blocks, in the beginning of files and other +places in the code. + +**I know this linter is aggressive** and a lot of projects I've tested it on +have failed miserably. For this linter to be useful at all I want to be open to +new ideas, configurations and discussions! Also note that some of the warnings +might be bugs or unintentional false positives so I would love an +[issue](https://github.com/bombsimon/wsl/issues/new) to fix, discuss, change or +make something configurable! + +## Installation + +### By `go get` (local installation) + +You can do that by using: + +```sh +go get -u github.com/bombsimon/wsl/cmd/... +``` + +### By golangci-lint (CI automation) + +`wsl` is already integrated with +[golangci-lint](https://github.com/golangci/golangci-lint). Please refer to the +instructions there. + +## Usage + +How to use depends on how you install `wsl`. + +### With local binary + +The general command format for `wsl` is: + +```sh +$ wsl [flags] [files...] +$ wsl [flags] + +# Examples + +$ wsl ./main.go +$ wsl --no-test ./main.go +$ wsl --allow-cuddle-declarations ./main.go +$ wsl --no-test --allow-cuddle-declaration ./main.go +$ wsl --no-test --allow-trailing-comment ./myProject/... +``` + +The "..." wildcard is not used like other `go` commands but instead can only +be to a relative or absolute path. + +By default, the linter will run on `./...` which means all go files in the +current path and all subsequent paths, including test files. To disable linting +test files, use `-n` or `--no-test`. + +### By `golangci-lint` (CI automation) + +The recommended command is: + +```sh +golangci-lint run --disable-all --enable wsl +``` + +For more information, please refer to +[golangci-lint](https://github.com/golangci/golangci-lint)'s documentation. + +## Issues and configuration + +The linter suppers a few ways to configure it to satisfy more than one kind of +code style. These settings could be set either with flags or with YAML +configuration if used via `golangci-lint`. + +The supported configuration can be found [in the documentation](doc/configuration.md). + +Below are the available checklist for any hit from `wsl`. If you do not see any, +feel free to raise an [issue](https://github.com/bombsimon/wsl/issues/new). + +> **Note**: this linter doesn't take in consideration the issues that will be +> fixed with `go fmt -s` so ensure that the code is properly formatted before +> use. + +* [Anonymous switch statements should never be cuddled](doc/rules.md#anonymous-switch-statements-should-never-be-cuddled) +* [Append only allowed to cuddle with appended value](doc/rules.md#append-only-allowed-to-cuddle-with-appended-value) +* [Assignments should only be cuddled with other assignments](doc/rules.md#assignments-should-only-be-cuddled-with-other-assignments) +* [Block should not end with a whitespace (or comment)](doc/rules.md#block-should-not-end-with-a-whitespace-or-comment) +* [Block should not start with a whitespace](doc/rules.md#block-should-not-start-with-a-whitespace) +* [Case block should end with newline at this size](doc/rules.md#case-block-should-end-with-newline-at-this-size) +* [Branch statements should not be cuddled if block has more than two lines](doc/rules.md#branch-statements-should-not-be-cuddled-if-block-has-more-than-two-lines) +* [Declarations should never be cuddled](doc/rules.md#declarations-should-never-be-cuddled) +* [Defer statements should only be cuddled with expressions on same variable](doc/rules.md#defer-statements-should-only-be-cuddled-with-expressions-on-same-variable) +* [Expressions should not be cuddled with blocks](doc/rules.md#expressions-should-not-be-cuddled-with-blocks) +* [Expressions should not be cuddled with declarations or returns](doc/rules.md#expressions-should-not-be-cuddled-with-declarations-or-returns) +* [For statement without condition should never be cuddled](doc/rules.md#for-statement-without-condition-should-never-be-cuddled) +* [For statements should only be cuddled with assignments used in the iteration](doc/rules.md#for-statements-should-only-be-cuddled-with-assignments-used-in-the-iteration) +* [Go statements can only invoke functions assigned on line above](doc/rules.md#go-statements-can-only-invoke-functions-assigned-on-line-above) +* [If statements should only be cuddled with assignments](doc/rules.md#if-statements-should-only-be-cuddled-with-assignments) +* [If statements should only be cuddled with assignments used in the if + statement + itself](doc/rules.md#if-statements-should-only-be-cuddled-with-assignments-used-in-the-if-statement-itself) +* [If statements that check an error must be cuddled with the statement that assigned the error](doc/rules.md#if-statements-that-check-an-error-must-be-cuddled-with-the-statement-that-assigned-the-error) +* [Only cuddled expressions if assigning variable or using from line + above](doc/rules.md#only-cuddled-expressions-if-assigning-variable-or-using-from-line-above) +* [Only one cuddle assignment allowed before defer statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-defer-statement) +* [Only one cuddle assginment allowed before for statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-for-statement) +* [Only one cuddle assignment allowed before go statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-go-statement) +* [Only one cuddle assignment allowed before if statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-if-statement) +* [Only one cuddle assignment allowed before range statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-range-statement) +* [Only one cuddle assignment allowed before switch statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-switch-statement) +* [Only one cuddle assignment allowed before type switch statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-type-switch-statement) +* [Ranges should only be cuddled with assignments used in the iteration](doc/rules.md#ranges-should-only-be-cuddled-with-assignments-used-in-the-iteration) +* [Return statements should not be cuddled if block has more than two lines](doc/rules.md#return-statements-should-not-be-cuddled-if-block-has-more-than-two-lines) +* [Short declarations should cuddle only with other short declarations](doc/rules.md#short-declaration-should-cuddle-only-with-other-short-declarations) +* [Switch statements should only be cuddled with variables switched](doc/rules.md#switch-statements-should-only-be-cuddled-with-variables-switched) +* [Type switch statements should only be cuddled with variables switched](doc/rules.md#type-switch-statements-should-only-be-cuddled-with-variables-switched) diff --git a/vendor/github.com/bombsimon/wsl/v3/go.mod b/vendor/github.com/bombsimon/wsl/v3/go.mod new file mode 100644 index 000000000..0c325eda1 --- /dev/null +++ b/vendor/github.com/bombsimon/wsl/v3/go.mod @@ -0,0 +1,12 @@ +module github.com/bombsimon/wsl/v3 + +go 1.12 + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/kr/text v0.2.0 // indirect + github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect + github.com/stretchr/testify v1.5.1 + gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect + gopkg.in/yaml.v2 v2.2.8 // indirect +) diff --git a/vendor/github.com/bombsimon/wsl/v3/go.sum b/vendor/github.com/bombsimon/wsl/v3/go.sum new file mode 100644 index 000000000..3bdb59247 --- /dev/null +++ b/vendor/github.com/bombsimon/wsl/v3/go.sum @@ -0,0 +1,25 @@ +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/bombsimon/wsl/v3/wsl.go b/vendor/github.com/bombsimon/wsl/v3/wsl.go new file mode 100644 index 000000000..313b52787 --- /dev/null +++ b/vendor/github.com/bombsimon/wsl/v3/wsl.go @@ -0,0 +1,1247 @@ +package wsl + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "io/ioutil" + "reflect" + "strings" +) + +// Error reason strings +const ( + reasonMustCuddleErrCheck = "if statements that check an error must be cuddled with the statement that assigned the error" + reasonOnlyCuddleIfWithAssign = "if statements should only be cuddled with assignments" + reasonOnlyOneCuddle = "only one cuddle assignment allowed before if statement" + reasonOnlyCuddleWithUsedAssign = "if statements should only be cuddled with assignments used in the if statement itself" + reasonOnlyCuddle2LineReturn = "return statements should not be cuddled if block has more than two lines" + reasonMultiLineBranchCuddle = "branch statements should not be cuddled if block has more than two lines" + reasonAppendCuddledWithoutUse = "append only allowed to cuddle with appended value" + reasonAssignsCuddleAssign = "assignments should only be cuddled with other assignments" + reasonNeverCuddleDeclare = "declarations should never be cuddled" + reasonExpressionCuddledWithDeclOrRet = "expressions should not be cuddled with declarations or returns" + reasonExpressionCuddledWithBlock = "expressions should not be cuddled with blocks" + reasonExprCuddlingNonAssignedVar = "only cuddled expressions if assigning variable or using from line above" + reasonOneCuddleBeforeRange = "only one cuddle assignment allowed before range statement" + reasonRangeCuddledWithoutUse = "ranges should only be cuddled with assignments used in the iteration" + reasonOneCuddleBeforeDefer = "only one cuddle assignment allowed before defer statement" + reasonDeferCuddledWithOtherVar = "defer statements should only be cuddled with expressions on same variable" + reasonForWithoutCondition = "for statement without condition should never be cuddled" + reasonForWithMoreThanOneCuddle = "only one cuddle assignment allowed before for statement" + reasonForCuddledAssignWithoutUse = "for statements should only be cuddled with assignments used in the iteration" + reasonOneCuddleBeforeGo = "only one cuddle assignment allowed before go statement" + reasonGoFuncWithoutAssign = "go statements can only invoke functions assigned on line above" + reasonSwitchManyCuddles = "only one cuddle assignment allowed before switch statement" + reasonAnonSwitchCuddled = "anonymous switch statements should never be cuddled" + reasonSwitchCuddledWithoutUse = "switch statements should only be cuddled with variables switched" + reasonTypeSwitchTooCuddled = "only one cuddle assignment allowed before type switch statement" + reasonTypeSwitchCuddledWithoutUse = "type switch statements should only be cuddled with variables switched" + reasonBlockStartsWithWS = "block should not start with a whitespace" + reasonBlockEndsWithWS = "block should not end with a whitespace (or comment)" + reasonCaseBlockTooCuddly = "case block should end with newline at this size" + reasonShortDeclNotExclusive = "short declaration should cuddle only with other short declarations" +) + +// Warning strings +const ( + warnTypeNotImplement = "type not implemented" + warnStmtNotImplemented = "stmt type not implemented" + warnBodyStmtTypeNotImplemented = "body statement type not implemented " + warnWSNodeTypeNotImplemented = "whitespace node type not implemented " + warnUnknownLHS = "UNKNOWN LHS" + warnUnknownRHS = "UNKNOWN RHS" +) + +type Configuration struct { + // StrictAppend will do strict checking when assigning from append (x = + // append(x, y)). If this is set to true the append call must append either + // a variable assigned, called or used on the line above. Example on not + // allowed when this is true: + // + // x := []string{} + // y := "not going in X" + // x = append(x, "not y") // This is not allowed with StrictAppend + // z := "going in X" + // + // x = append(x, z) // This is allowed with StrictAppend + // + // m := transform(z) + // x = append(x, z) // So is this because Z is used above. + StrictAppend bool + + // AllowAssignAndCallCuddle allows assignments to be cuddled with variables + // used in calls on line above and calls to be cuddled with assignments of + // variables used in call on line above. + // Example supported with this set to true: + // + // x.Call() + // x = Assign() + // x.AnotherCall() + // x = AnotherAssign() + AllowAssignAndCallCuddle bool + + // AllowAssignAndCallCuddle allows assignments to be cuddled with anything. + // Example supported with this set to true: + // if x == 1 { + // x = 0 + // } + // z := x + 2 + // fmt.Println("x") + // y := "x" + AllowAssignAndAnythingCuddle bool + + // AllowMultiLineAssignCuddle allows cuddling to assignments even if they + // span over multiple lines. This defaults to true which allows the + // following example: + // + // err := function( + // "multiple", "lines", + // ) + // if err != nil { + // // ... + // } + AllowMultiLineAssignCuddle bool + + // If the number of lines in a case block is equal to or lager than this + // number, the case *must* end white a newline. + ForceCaseTrailingWhitespaceLimit int + + // AllowTrailingComment will allow blocks to end with comments. + AllowTrailingComment bool + + // AllowSeparatedLeadingComment will allow multiple comments in the + // beginning of a block separated with newline. Example: + // func () { + // // Comment one + // + // // Comment two + // fmt.Println("x") + // } + AllowSeparatedLeadingComment bool + + // AllowCuddleDeclaration will allow multiple var/declaration statements to + // be cuddled. This defaults to false but setting it to true will enable the + // following example: + // var foo bool + // var err error + AllowCuddleDeclaration bool + + // AllowCuddleWithCalls is a list of call idents that everything can be + // cuddled with. Defaults to calls looking like locks to support a flow like + // this: + // + // mu.Lock() + // allow := thisAssignment + AllowCuddleWithCalls []string + + // AllowCuddleWithRHS is a list of right hand side variables that is allowed + // to be cuddled with anything. Defaults to assignments or calls looking + // like unlocks to support a flow like this: + // + // allow := thisAssignment() + // mu.Unlock() + AllowCuddleWithRHS []string + + // ForceCuddleErrCheckAndAssign will cause an error when an If statement that + // checks an error variable doesn't cuddle with the assignment of that variable. + // This defaults to false but setting it to true will cause the following + // to generate an error: + // + // err := ProduceError() + // + // if err != nil { + // return err + // } + ForceCuddleErrCheckAndAssign bool + + // When ForceCuddleErrCheckAndAssign is enabled this is a list of names + // used for error variables to check for in the conditional. + // Defaults to just "err" + ErrorVariableNames []string + + // ForceExclusiveShortDeclarations will cause an error if a short declaration + // (:=) cuddles with anything other than another short declaration. For example + // + // a := 2 + // b := 3 + // + // is allowed, but + // + // a := 2 + // b = 3 + // + // is not allowed. This logic overrides ForceCuddleErrCheckAndAssign among others. + ForceExclusiveShortDeclarations bool +} + +// DefaultConfig returns default configuration +func DefaultConfig() Configuration { + return Configuration{ + StrictAppend: true, + AllowAssignAndCallCuddle: true, + AllowAssignAndAnythingCuddle: false, + AllowMultiLineAssignCuddle: true, + AllowTrailingComment: false, + AllowSeparatedLeadingComment: false, + ForceCuddleErrCheckAndAssign: false, + ForceExclusiveShortDeclarations: false, + ForceCaseTrailingWhitespaceLimit: 0, + AllowCuddleWithCalls: []string{"Lock", "RLock"}, + AllowCuddleWithRHS: []string{"Unlock", "RUnlock"}, + ErrorVariableNames: []string{"err"}, + } +} + +// Result represents the result of one error. +type Result struct { + FileName string + LineNumber int + Position token.Position + Reason string +} + +// String returns the filename, line number and reason of a Result. +func (r *Result) String() string { + return fmt.Sprintf("%s:%d: %s", r.FileName, r.LineNumber, r.Reason) +} + +type Processor struct { + config Configuration + result []Result + warnings []string + fileSet *token.FileSet + file *ast.File +} + +// NewProcessor will create a Processor. +func NewProcessorWithConfig(cfg Configuration) *Processor { + return &Processor{ + result: []Result{}, + config: cfg, + } +} + +// NewProcessor will create a Processor. +func NewProcessor() *Processor { + return NewProcessorWithConfig(DefaultConfig()) +} + +// ProcessFiles takes a string slice with file names (full paths) and lints +// them. +// nolint: gocritic +func (p *Processor) ProcessFiles(filenames []string) ([]Result, []string) { + for _, filename := range filenames { + data, err := ioutil.ReadFile(filename) + if err != nil { + panic(err) + } + + p.process(filename, data) + } + + return p.result, p.warnings +} + +func (p *Processor) process(filename string, data []byte) { + fileSet := token.NewFileSet() + file, err := parser.ParseFile(fileSet, filename, data, parser.ParseComments) + + // If the file is not parsable let's add a syntax error and move on. + if err != nil { + p.result = append(p.result, Result{ + FileName: filename, + LineNumber: 0, + Reason: fmt.Sprintf("invalid syntax, file cannot be linted (%s)", err.Error()), + }) + + return + } + + p.fileSet = fileSet + p.file = file + + for _, d := range p.file.Decls { + switch v := d.(type) { + case *ast.FuncDecl: + p.parseBlockBody(v.Name, v.Body) + case *ast.GenDecl: + // `go fmt` will handle proper spacing for GenDecl such as imports, + // constants etc. + default: + p.addWarning(warnTypeNotImplement, d.Pos(), v) + } + } +} + +// parseBlockBody will parse any kind of block statements such as switch cases +// and if statements. A list of Result is returned. +func (p *Processor) parseBlockBody(ident *ast.Ident, block *ast.BlockStmt) { + // Nothing to do if there's no value. + if reflect.ValueOf(block).IsNil() { + return + } + + // Start by finding leading and trailing whitespaces. + p.findLeadingAndTrailingWhitespaces(ident, block, nil) + + // Parse the block body contents. + p.parseBlockStatements(block.List) +} + +// parseBlockStatements will parse all the statements found in the body of a +// node. A list of Result is returned. +// nolint: gocognit +func (p *Processor) parseBlockStatements(statements []ast.Stmt) { + for i, stmt := range statements { + // Start by checking if this statement is another block (other than if, + // for and range). This could be assignment to a function, defer or go + // call with an inline function or similar. If this is found we start by + // parsing this body block before moving on. + for _, stmtBlocks := range p.findBlockStmt(stmt) { + p.parseBlockBody(nil, stmtBlocks) + } + + firstBodyStatement := p.firstBodyStatement(i, statements) + + // First statement, nothing to do. + if i == 0 { + continue + } + + previousStatement := statements[i-1] + previousStatementIsMultiline := p.nodeStart(previousStatement) != p.nodeEnd(previousStatement) + cuddledWithLastStmt := p.nodeEnd(previousStatement) == p.nodeStart(stmt)-1 + + // If we're not cuddled and we don't need to enforce err-check cuddling + // then we can bail out here + if !cuddledWithLastStmt && !p.config.ForceCuddleErrCheckAndAssign { + continue + } + + // We don't force error cuddling for multilines. (#86) + if p.config.ForceCuddleErrCheckAndAssign && previousStatementIsMultiline && !cuddledWithLastStmt { + continue + } + + // Extract assigned variables on the line above + // which is the only thing we allow cuddling with. If the assignment is + // made over multiple lines we should not allow cuddling. + var assignedOnLineAbove []string + + // We want to keep track of what was called on the line above to support + // special handling of things such as mutexes. + var calledOnLineAbove []string + + // Check if the previous statement spans over multiple lines. + var cuddledWithMultiLineAssignment = cuddledWithLastStmt && p.nodeStart(previousStatement) != p.nodeStart(stmt)-1 + + // Ensure previous line is not a multi line assignment and if not get + // rightAndLeftHandSide assigned variables. + if !cuddledWithMultiLineAssignment { + assignedOnLineAbove = p.findLHS(previousStatement) + calledOnLineAbove = p.findRHS(previousStatement) + } + + // If previous assignment is multi line and we allow it, fetch + // assignments (but only assignments). + if cuddledWithMultiLineAssignment && p.config.AllowMultiLineAssignCuddle { + if _, ok := previousStatement.(*ast.AssignStmt); ok { + assignedOnLineAbove = p.findLHS(previousStatement) + } + } + + // We could potentially have a block which require us to check the first + // argument before ruling out an allowed cuddle. + var calledOrAssignedFirstInBlock []string + + if firstBodyStatement != nil { + calledOrAssignedFirstInBlock = append(p.findLHS(firstBodyStatement), p.findRHS(firstBodyStatement)...) + } + + var ( + leftHandSide = p.findLHS(stmt) + rightHandSide = p.findRHS(stmt) + rightAndLeftHandSide = append(leftHandSide, rightHandSide...) + calledOrAssignedOnLineAbove = append(calledOnLineAbove, assignedOnLineAbove...) + ) + + // If we called some kind of lock on the line above we allow cuddling + // anything. + if atLeastOneInListsMatch(calledOnLineAbove, p.config.AllowCuddleWithCalls) { + continue + } + + // If we call some kind of unlock on this line we allow cuddling with + // anything. + if atLeastOneInListsMatch(rightHandSide, p.config.AllowCuddleWithRHS) { + continue + } + + moreThanOneStatementAbove := func() bool { + if i < 2 { + return false + } + + statementBeforePreviousStatement := statements[i-2] + + return p.nodeStart(previousStatement)-1 == p.nodeEnd(statementBeforePreviousStatement) + } + + isLastStatementInBlockOfOnlyTwoLines := func() bool { + // If we're the last statement, check if there's no more than two + // lines from the starting statement and the end of this statement. + // This is to support short return functions such as: + // func (t *Typ) X() { + // t.X = true + // return t + // } + // nolint: gocritic + if i == len(statements)-1 && i == 1 { + if p.nodeEnd(stmt)-p.nodeStart(previousStatement) <= 2 { + return true + } + } + + return false + } + + // If it's a short declaration we should not cuddle with anything else + // if ForceExclusiveShortDeclarations is set on; either this or the + // previous statement could be the short decl, so we'll find out which + // it was and use *that* statement's position + if p.config.ForceExclusiveShortDeclarations && cuddledWithLastStmt { + if p.isShortDecl(stmt) && !p.isShortDecl(previousStatement) { + p.addError(stmt.Pos(), reasonShortDeclNotExclusive) + } else if p.isShortDecl(previousStatement) && !p.isShortDecl(stmt) { + p.addError(previousStatement.Pos(), reasonShortDeclNotExclusive) + } + } + + // If it's not an if statement and we're not cuddled move on. The only + // reason we need to keep going for if statements is to check if we + // should be cuddled with an error check. + if _, ok := stmt.(*ast.IfStmt); !ok { + if !cuddledWithLastStmt { + continue + } + } + + switch t := stmt.(type) { + case *ast.IfStmt: + checkingErrInitializedInline := func() bool { + if t.Init == nil { + return false + } + + // Variables were initialized inline in the if statement + // Let's make sure it's the err just to be safe + return atLeastOneInListsMatch(p.findLHS(t.Init), p.config.ErrorVariableNames) + } + + if !cuddledWithLastStmt { + checkingErr := atLeastOneInListsMatch(rightAndLeftHandSide, p.config.ErrorVariableNames) + if checkingErr { + // We only want to enforce cuddling error checks if the + // error was assigned on the line above. See + // https://github.com/bombsimon/wsl/issues/78. + // This is needed since `assignedOnLineAbove` is not + // actually just assignments but everything from LHS in the + // previous statement. This means that if previous line was + // `if err ...`, `err` will now be in the list + // `assignedOnLineAbove`. + if _, ok := previousStatement.(*ast.AssignStmt); !ok { + continue + } + + if checkingErrInitializedInline() { + continue + } + + if atLeastOneInListsMatch(assignedOnLineAbove, p.config.ErrorVariableNames) { + p.addError(t.Pos(), reasonMustCuddleErrCheck) + } + } + + continue + } + + if len(assignedOnLineAbove) == 0 { + p.addError(t.Pos(), reasonOnlyCuddleIfWithAssign) + continue + } + + if moreThanOneStatementAbove() { + p.addError(t.Pos(), reasonOnlyOneCuddle) + continue + } + + if atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { + continue + } + + if atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { + continue + } + + p.addError(t.Pos(), reasonOnlyCuddleWithUsedAssign) + case *ast.ReturnStmt: + if isLastStatementInBlockOfOnlyTwoLines() { + continue + } + + p.addError(t.Pos(), reasonOnlyCuddle2LineReturn) + case *ast.BranchStmt: + if isLastStatementInBlockOfOnlyTwoLines() { + continue + } + + p.addError(t.Pos(), reasonMultiLineBranchCuddle) + case *ast.AssignStmt: + // append is usually an assignment but should not be allowed to be + // cuddled with anything not appended. + if len(rightHandSide) > 0 && rightHandSide[len(rightHandSide)-1] == "append" { + if p.config.StrictAppend { + if !atLeastOneInListsMatch(calledOrAssignedOnLineAbove, rightHandSide) { + p.addError(t.Pos(), reasonAppendCuddledWithoutUse) + } + } + + continue + } + + if _, ok := previousStatement.(*ast.AssignStmt); ok { + continue + } + + if p.config.AllowAssignAndAnythingCuddle { + continue + } + + if _, ok := previousStatement.(*ast.DeclStmt); ok && p.config.AllowCuddleDeclaration { + continue + } + + // If the assignment is from a type or variable called on the line + // above we can allow it by setting AllowAssignAndCallCuddle to + // true. + // Example (x is used): + // x.function() + // a.Field = x.anotherFunction() + if p.config.AllowAssignAndCallCuddle { + if atLeastOneInListsMatch(calledOrAssignedOnLineAbove, rightAndLeftHandSide) { + continue + } + } + + p.addError(t.Pos(), reasonAssignsCuddleAssign) + case *ast.DeclStmt: + if !p.config.AllowCuddleDeclaration { + p.addError(t.Pos(), reasonNeverCuddleDeclare) + } + case *ast.ExprStmt: + switch previousStatement.(type) { + case *ast.DeclStmt, *ast.ReturnStmt: + if p.config.AllowAssignAndCallCuddle && p.config.AllowCuddleDeclaration { + continue + } + + p.addError(t.Pos(), reasonExpressionCuddledWithDeclOrRet) + case *ast.IfStmt, *ast.RangeStmt, *ast.SwitchStmt: + p.addError(t.Pos(), reasonExpressionCuddledWithBlock) + } + + // If the expression is called on a type or variable used or + // assigned on the line we can allow it by setting + // AllowAssignAndCallCuddle to true. + // Example of allowed cuddled (x is used): + // a.Field = x.func() + // x.function() + if p.config.AllowAssignAndCallCuddle { + if atLeastOneInListsMatch(calledOrAssignedOnLineAbove, rightAndLeftHandSide) { + continue + } + } + + // If we assigned variables on the line above but didn't use them in + // this expression there should probably be a newline between them. + if len(assignedOnLineAbove) > 0 && !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { + p.addError(t.Pos(), reasonExprCuddlingNonAssignedVar) + } + case *ast.RangeStmt: + if moreThanOneStatementAbove() { + p.addError(t.Pos(), reasonOneCuddleBeforeRange) + continue + } + + if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { + if !atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { + p.addError(t.Pos(), reasonRangeCuddledWithoutUse) + } + } + case *ast.DeferStmt: + if _, ok := previousStatement.(*ast.DeferStmt); ok { + // We may cuddle multiple defers to group logic. + continue + } + + // Special treatment of deferring body closes after error checking + // according to best practices. See + // https://github.com/bombsimon/wsl/issues/31 which links to + // discussion about error handling after HTTP requests. This is hard + // coded and very specific but for now this is to be seen as a + // special case. What this does is that it *only* allows a defer + // statement with `Close` on the right hand side to be cuddled with + // an if-statement to support this: + // resp, err := client.Do(req) + // if err != nil { + // return err + // } + // defer resp.Body.Close() + if _, ok := previousStatement.(*ast.IfStmt); ok { + if atLeastOneInListsMatch(rightHandSide, []string{"Close"}) { + continue + } + } + + if moreThanOneStatementAbove() { + p.addError(t.Pos(), reasonOneCuddleBeforeDefer) + + continue + } + + // Be extra nice with RHS, it's common to use this for locks: + // m.Lock() + // defer m.Unlock() + previousRHS := p.findRHS(previousStatement) + if atLeastOneInListsMatch(rightHandSide, previousRHS) { + continue + } + + // Allow use to cuddled defer func literals with usages on line + // abouve. Example: + // b := getB() + // defer func() { + // makesSenseToUse(b) + // }() + if c, ok := t.Call.Fun.(*ast.FuncLit); ok { + funcLitFirstStmt := append(p.findLHS(c.Body), p.findRHS(c.Body)...) + + if atLeastOneInListsMatch(assignedOnLineAbove, funcLitFirstStmt) { + continue + } + } + + if atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { + continue + } + + if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { + p.addError(t.Pos(), reasonDeferCuddledWithOtherVar) + } + case *ast.ForStmt: + if len(rightAndLeftHandSide) == 0 { + p.addError(t.Pos(), reasonForWithoutCondition) + + continue + } + + if moreThanOneStatementAbove() { + p.addError(t.Pos(), reasonForWithMoreThanOneCuddle) + + continue + } + + // The same rule applies for ranges as for if statements, see + // comments regarding variable usages on the line before or as the + // first line in the block for details. + if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { + if !atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { + p.addError(t.Pos(), reasonForCuddledAssignWithoutUse) + } + } + case *ast.GoStmt: + if _, ok := previousStatement.(*ast.GoStmt); ok { + continue + } + + if moreThanOneStatementAbove() { + p.addError(t.Pos(), reasonOneCuddleBeforeGo) + + continue + } + + if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { + p.addError(t.Pos(), reasonGoFuncWithoutAssign) + } + case *ast.SwitchStmt: + if moreThanOneStatementAbove() { + p.addError(t.Pos(), reasonSwitchManyCuddles) + + continue + } + + if !atLeastOneInListsMatch(rightAndLeftHandSide, assignedOnLineAbove) { + if len(rightAndLeftHandSide) == 0 { + p.addError(t.Pos(), reasonAnonSwitchCuddled) + } else { + p.addError(t.Pos(), reasonSwitchCuddledWithoutUse) + } + } + case *ast.TypeSwitchStmt: + if moreThanOneStatementAbove() { + p.addError(t.Pos(), reasonTypeSwitchTooCuddled) + + continue + } + + // Allowed to type assert on variable assigned on line above. + if !atLeastOneInListsMatch(rightHandSide, assignedOnLineAbove) { + // Allow type assertion on variables used in the first case + // immediately. + if !atLeastOneInListsMatch(assignedOnLineAbove, calledOrAssignedFirstInBlock) { + p.addError(t.Pos(), reasonTypeSwitchCuddledWithoutUse) + } + } + case *ast.CaseClause, *ast.CommClause: + // Case clauses will be checked by not allowing leading ot trailing + // whitespaces within the block. There's nothing in the case itself + // that may be cuddled. + default: + p.addWarning(warnStmtNotImplemented, t.Pos(), t) + } + } +} + +// firstBodyStatement returns the first statement inside a body block. This is +// because variables may be cuddled with conditions or statements if it's used +// directly as the first argument inside a body. +// The body will then be parsed as a *ast.BlockStmt (regular block) or as a list +// of []ast.Stmt (case block). +func (p *Processor) firstBodyStatement(i int, allStmt []ast.Stmt) ast.Node { + stmt := allStmt[i] + + // Start by checking if the statement has a body (probably if-statement, + // a range, switch case or similar. Whenever a body is found we start by + // parsing it before moving on in the AST. + statementBody := reflect.Indirect(reflect.ValueOf(stmt)).FieldByName("Body") + + // Some cases allow cuddling depending on the first statement in a body + // of a block or case. If possible extract the first statement. + var firstBodyStatement ast.Node + + if !statementBody.IsValid() { + return firstBodyStatement + } + + switch statementBodyContent := statementBody.Interface().(type) { + case *ast.BlockStmt: + if len(statementBodyContent.List) > 0 { + firstBodyStatement = statementBodyContent.List[0] + + // If the first body statement is a *ast.CaseClause we're + // actually interested in the **next** body to know what's + // inside the first case. + if x, ok := firstBodyStatement.(*ast.CaseClause); ok { + if len(x.Body) > 0 { + firstBodyStatement = x.Body[0] + } + } + } + + p.parseBlockBody(nil, statementBodyContent) + case []ast.Stmt: + // The Body field for an *ast.CaseClause or *ast.CommClause is of type + // []ast.Stmt. We must check leading and trailing whitespaces and then + // pass the statements to parseBlockStatements to parse it's content. + var nextStatement ast.Node + + // Check if there's more statements (potential cases) after the + // current one. + if len(allStmt)-1 > i { + nextStatement = allStmt[i+1] + } + + p.findLeadingAndTrailingWhitespaces(nil, stmt, nextStatement) + p.parseBlockStatements(statementBodyContent) + default: + p.addWarning( + warnBodyStmtTypeNotImplemented, + stmt.Pos(), statementBodyContent, + ) + } + + return firstBodyStatement +} + +func (p *Processor) findLHS(node ast.Node) []string { + var lhs []string + + if node == nil { + return lhs + } + + switch t := node.(type) { + case *ast.BasicLit, *ast.FuncLit, *ast.SelectStmt, + *ast.LabeledStmt, *ast.ForStmt, *ast.SwitchStmt, + *ast.ReturnStmt, *ast.GoStmt, *ast.CaseClause, + *ast.CommClause, *ast.CallExpr, *ast.UnaryExpr, + *ast.BranchStmt, *ast.TypeSpec, *ast.ChanType, + *ast.DeferStmt, *ast.TypeAssertExpr, *ast.RangeStmt: + // Nothing to add to LHS + case *ast.IncDecStmt: + return p.findLHS(t.X) + case *ast.Ident: + return []string{t.Name} + case *ast.AssignStmt: + for _, v := range t.Lhs { + lhs = append(lhs, p.findLHS(v)...) + } + case *ast.GenDecl: + for _, v := range t.Specs { + lhs = append(lhs, p.findLHS(v)...) + } + case *ast.ValueSpec: + for _, v := range t.Names { + lhs = append(lhs, p.findLHS(v)...) + } + case *ast.BlockStmt: + for _, v := range t.List { + lhs = append(lhs, p.findLHS(v)...) + } + case *ast.BinaryExpr: + return append( + p.findLHS(t.X), + p.findLHS(t.Y)..., + ) + case *ast.DeclStmt: + return p.findLHS(t.Decl) + case *ast.IfStmt: + return p.findLHS(t.Cond) + case *ast.TypeSwitchStmt: + return p.findLHS(t.Assign) + case *ast.SendStmt: + return p.findLHS(t.Chan) + default: + if x, ok := maybeX(t); ok { + return p.findLHS(x) + } + + p.addWarning(warnUnknownLHS, t.Pos(), t) + } + + return lhs +} + +func (p *Processor) findRHS(node ast.Node) []string { + var rhs []string + + if node == nil { + return rhs + } + + switch t := node.(type) { + case *ast.BasicLit, *ast.SelectStmt, *ast.ChanType, + *ast.LabeledStmt, *ast.DeclStmt, *ast.BranchStmt, + *ast.TypeSpec, *ast.ArrayType, *ast.CaseClause, + *ast.CommClause, *ast.KeyValueExpr, *ast.MapType, + *ast.FuncLit: + // Nothing to add to RHS + case *ast.Ident: + return []string{t.Name} + case *ast.SelectorExpr: + // TODO: Should this be RHS? + // t.X is needed for defer as of now and t.Sel needed for special + // functions such as Lock() + rhs = p.findRHS(t.X) + rhs = append(rhs, p.findRHS(t.Sel)...) + case *ast.AssignStmt: + for _, v := range t.Rhs { + rhs = append(rhs, p.findRHS(v)...) + } + case *ast.CallExpr: + for _, v := range t.Args { + rhs = append(rhs, p.findRHS(v)...) + } + + rhs = append(rhs, p.findRHS(t.Fun)...) + case *ast.CompositeLit: + for _, v := range t.Elts { + rhs = append(rhs, p.findRHS(v)...) + } + case *ast.IfStmt: + rhs = append(rhs, p.findRHS(t.Cond)...) + rhs = append(rhs, p.findRHS(t.Init)...) + case *ast.BinaryExpr: + return append( + p.findRHS(t.X), + p.findRHS(t.Y)..., + ) + case *ast.TypeSwitchStmt: + return p.findRHS(t.Assign) + case *ast.ReturnStmt: + for _, v := range t.Results { + rhs = append(rhs, p.findRHS(v)...) + } + case *ast.BlockStmt: + for _, v := range t.List { + rhs = append(rhs, p.findRHS(v)...) + } + case *ast.SwitchStmt: + return p.findRHS(t.Tag) + case *ast.GoStmt: + return p.findRHS(t.Call) + case *ast.ForStmt: + return p.findRHS(t.Cond) + case *ast.DeferStmt: + return p.findRHS(t.Call) + case *ast.SendStmt: + return p.findLHS(t.Value) + case *ast.IndexExpr: + rhs = append(rhs, p.findRHS(t.Index)...) + rhs = append(rhs, p.findRHS(t.X)...) + case *ast.SliceExpr: + rhs = append(rhs, p.findRHS(t.X)...) + rhs = append(rhs, p.findRHS(t.Low)...) + rhs = append(rhs, p.findRHS(t.High)...) + default: + if x, ok := maybeX(t); ok { + return p.findRHS(x) + } + + p.addWarning(warnUnknownRHS, t.Pos(), t) + } + + return rhs +} + +func (p *Processor) isShortDecl(node ast.Node) bool { + if t, ok := node.(*ast.AssignStmt); ok { + return t.Tok == token.DEFINE + } + + return false +} + +func (p *Processor) findBlockStmt(node ast.Node) []*ast.BlockStmt { + var blocks []*ast.BlockStmt + + switch t := node.(type) { + case *ast.AssignStmt: + for _, x := range t.Rhs { + blocks = append(blocks, p.findBlockStmt(x)...) + } + case *ast.CallExpr: + blocks = append(blocks, p.findBlockStmt(t.Fun)...) + case *ast.FuncLit: + blocks = append(blocks, t.Body) + case *ast.ExprStmt: + blocks = append(blocks, p.findBlockStmt(t.X)...) + case *ast.ReturnStmt: + for _, x := range t.Results { + blocks = append(blocks, p.findBlockStmt(x)...) + } + case *ast.DeferStmt: + blocks = append(blocks, p.findBlockStmt(t.Call)...) + case *ast.GoStmt: + blocks = append(blocks, p.findBlockStmt(t.Call)...) + } + + return blocks +} + +// maybeX extracts the X field from an AST node and returns it with a true value +// if it exists. If the node doesn't have an X field nil and false is returned. +// Known fields with X that are handled: +// IndexExpr, ExprStmt, SelectorExpr, StarExpr, ParentExpr, TypeAssertExpr, +// RangeStmt, UnaryExpr, ParenExpr, SliceExpr, IncDecStmt. +func maybeX(node interface{}) (ast.Node, bool) { + maybeHasX := reflect.Indirect(reflect.ValueOf(node)).FieldByName("X") + if !maybeHasX.IsValid() { + return nil, false + } + + n, ok := maybeHasX.Interface().(ast.Node) + if !ok { + return nil, false + } + + return n, true +} + +func atLeastOneInListsMatch(listOne, listTwo []string) bool { + sliceToMap := func(s []string) map[string]struct{} { + m := map[string]struct{}{} + + for _, v := range s { + m[v] = struct{}{} + } + + return m + } + + m1 := sliceToMap(listOne) + m2 := sliceToMap(listTwo) + + for k1 := range m1 { + if _, ok := m2[k1]; ok { + return true + } + } + + for k2 := range m2 { + if _, ok := m1[k2]; ok { + return true + } + } + + return false +} + +// findLeadingAndTrailingWhitespaces will find leading and trailing whitespaces +// in a node. The method takes comments in consideration which will make the +// parser more gentle. +// nolint: gocognit +func (p *Processor) findLeadingAndTrailingWhitespaces(ident *ast.Ident, stmt, nextStatement ast.Node) { + var ( + allowedLinesBeforeFirstStatement = 1 + commentMap = ast.NewCommentMap(p.fileSet, stmt, p.file.Comments) + blockStatements []ast.Stmt + blockStartLine int + blockEndLine int + blockStartPos token.Pos + blockEndPos token.Pos + ) + + // Depending on the block type, get the statements in the block and where + // the block starts (and ends). + switch t := stmt.(type) { + case *ast.BlockStmt: + blockStatements = t.List + blockStartPos = t.Lbrace + blockEndPos = t.Rbrace + case *ast.CaseClause: + blockStatements = t.Body + blockStartPos = t.Colon + case *ast.CommClause: + blockStatements = t.Body + blockStartPos = t.Colon + default: + p.addWarning(warnWSNodeTypeNotImplemented, stmt.Pos(), stmt) + + return + } + + // Ignore empty blocks even if they have newlines or just comments. + if len(blockStatements) < 1 { + return + } + + blockStartLine = p.fileSet.Position(blockStartPos).Line + blockEndLine = p.fileSet.Position(blockEndPos).Line + + // No whitespace possible if LBrace and RBrace is on the same line. + if blockStartLine == blockEndLine { + return + } + + var ( + firstStatement = blockStatements[0] + lastStatement = blockStatements[len(blockStatements)-1] + seenCommentGroups = 0 + ) + + // Get the comment related to the first statement, we do allow commends in + // the beginning of a block before the first statement. + if c, ok := commentMap[firstStatement]; ok { + for _, commentGroup := range c { + // If the comment group is on the same line as the block start + // (LBrace) we should not consider it. + if p.nodeStart(commentGroup) == blockStartLine { + continue + } + + // We only care about comments before our statement from the comment + // map. As soon as we hit comments after our statement let's break + // out! + if commentGroup.Pos() > firstStatement.Pos() { + break + } + + // We store number of seen comment groups because we allow multiple + // groups with a newline between them; but if the first one has WS + // before it, we're not going to count it to force an error. + if p.config.AllowSeparatedLeadingComment { + cg := p.fileSet.Position(commentGroup.Pos()).Line + + if seenCommentGroups > 0 || cg == blockStartLine+1 { + seenCommentGroups++ + } + } else { + seenCommentGroups++ + } + + // Support both /* multiline */ and //single line comments + for _, c := range commentGroup.List { + allowedLinesBeforeFirstStatement += len(strings.Split(c.Text, "\n")) + } + } + } + + // If we allow separated comments, allow for a space after each group + if p.config.AllowSeparatedLeadingComment { + if seenCommentGroups > 1 { + allowedLinesBeforeFirstStatement += seenCommentGroups - 1 + } else if seenCommentGroups == 1 { + allowedLinesBeforeFirstStatement += 1 + } + } + + // And now if the first statement is passed the number of allowed lines, + // then we had extra WS, possibly before the first comment group. + if p.nodeStart(firstStatement) > blockStartLine+allowedLinesBeforeFirstStatement { + p.addError( + blockStartPos, + reasonBlockStartsWithWS, + ) + } + + // If the blockEndLine is not 0 we're a regular block (not case). + if blockEndLine != 0 { + if p.config.AllowTrailingComment { + if lastComment, ok := commentMap[lastStatement]; ok { + var ( + lastCommentGroup = lastComment[len(lastComment)-1] + lastCommentLine = lastCommentGroup.List[len(lastCommentGroup.List)-1] + countNewlines = 0 + ) + + countNewlines += len(strings.Split(lastCommentLine.Text, "\n")) + + // No newlines between trailing comments and end of block. + if p.nodeStart(lastCommentLine)+countNewlines != blockEndLine-1 { + return + } + } + } + + if p.nodeEnd(lastStatement) != blockEndLine-1 && !isExampleFunc(ident) { + p.addError(blockEndPos, reasonBlockEndsWithWS) + } + + return + } + + // If we don't have any nextStatement the trailing whitespace will be + // handled when parsing the switch. If we do have a next statement we can + // see where it starts by getting it's colon position. We set the end of the + // current case to the position of the next case. + switch n := nextStatement.(type) { + case *ast.CaseClause: + blockEndPos = n.Case + case *ast.CommClause: + blockEndPos = n.Case + default: + // No more cases + return + } + + blockEndLine = p.fileSet.Position(blockEndPos).Line - 1 + + var ( + blockSize = blockEndLine - blockStartLine + caseTrailingCommentLines int + ) + + // TODO: I don't know what comments are bound to in cases. For regular + // blocks the last comment is bound to the last statement but for cases + // they are bound to the case clause expression. This will however get us all + // comments and depending on the case expression this gets tricky. + // + // To handle this I get the comment map from the current statement (the case + // itself) and iterate through all groups and all comment within all groups. + // I then get the comments after the last statement but before the next case + // clause and just map each line of comment that way. + for _, commentGroups := range commentMap { + for _, commentGroup := range commentGroups { + for _, comment := range commentGroup.List { + commentLine := p.fileSet.Position(comment.Pos()).Line + + // Ignore comments before the last statement. + if commentLine <= p.nodeStart(lastStatement) { + continue + } + + // Ignore comments after the end of this case. + if commentLine > blockEndLine { + continue + } + + // This allows /* multiline */ comments with newlines as well + // as regular (//) ones + caseTrailingCommentLines += len(strings.Split(comment.Text, "\n")) + } + } + } + + hasTrailingWhitespace := p.nodeEnd(lastStatement)+caseTrailingCommentLines != blockEndLine + + // If the force trailing limit is configured and we don't end with a newline. + if p.config.ForceCaseTrailingWhitespaceLimit > 0 && !hasTrailingWhitespace { + // Check if the block size is too big to miss the newline. + if blockSize >= p.config.ForceCaseTrailingWhitespaceLimit { + p.addError(lastStatement.Pos(), reasonCaseBlockTooCuddly) + } + } +} + +func isExampleFunc(ident *ast.Ident) bool { + return ident != nil && strings.HasPrefix(ident.Name, "Example") +} + +func (p *Processor) nodeStart(node ast.Node) int { + return p.fileSet.Position(node.Pos()).Line +} + +func (p *Processor) nodeEnd(node ast.Node) int { + var line = p.fileSet.Position(node.End()).Line + + if isEmptyLabeledStmt(node) { + return p.fileSet.Position(node.Pos()).Line + } + + return line +} + +func isEmptyLabeledStmt(node ast.Node) bool { + v, ok := node.(*ast.LabeledStmt) + if !ok { + return false + } + + _, empty := v.Stmt.(*ast.EmptyStmt) + + return empty +} + +// Add an error for the file and line number for the current token.Pos with the +// given reason. +func (p *Processor) addError(pos token.Pos, reason string) { + position := p.fileSet.Position(pos) + + p.result = append(p.result, Result{ + FileName: position.Filename, + LineNumber: position.Line, + Position: position, + Reason: reason, + }) +} + +func (p *Processor) addWarning(w string, pos token.Pos, t interface{}) { + position := p.fileSet.Position(pos) + + p.warnings = append(p.warnings, + fmt.Sprintf("%s:%d: %s (%T)", position.Filename, position.Line, w, t), + ) +} diff --git a/vendor/github.com/charithe/durationcheck/.gitignore b/vendor/github.com/charithe/durationcheck/.gitignore new file mode 100644 index 000000000..c2b126a84 --- /dev/null +++ b/vendor/github.com/charithe/durationcheck/.gitignore @@ -0,0 +1 @@ +/durationcheck diff --git a/vendor/github.com/charithe/durationcheck/LICENSE b/vendor/github.com/charithe/durationcheck/LICENSE new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/vendor/github.com/charithe/durationcheck/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/charithe/durationcheck/Makefile b/vendor/github.com/charithe/durationcheck/Makefile new file mode 100644 index 000000000..8e2f81ae8 --- /dev/null +++ b/vendor/github.com/charithe/durationcheck/Makefile @@ -0,0 +1,5 @@ +build: + @GO111MODULE=on go build -ldflags '-s -w' -o durationcheck ./cmd/durationcheck/main.go + +install: + @GO111MODULE=on go install -ldflags '-s -w' ./cmd/durationcheck diff --git a/vendor/github.com/charithe/durationcheck/README.md b/vendor/github.com/charithe/durationcheck/README.md new file mode 100644 index 000000000..122edb745 --- /dev/null +++ b/vendor/github.com/charithe/durationcheck/README.md @@ -0,0 +1,48 @@ +[![CircleCI](https://circleci.com/gh/charithe/durationcheck.svg?style=svg)](https://circleci.com/gh/charithe/durationcheck) + + + +Duration Check +=============== + +A Go linter to detect cases where two `time.Duration` values are being multiplied in possibly erroneous ways. + +For example, consider the following (highly contrived) function: + +```go +func waitFor(someDuration time.Duration) { + timeToWait := someDuration * time.Second + time.Sleep(timeToWait) +} +``` + +Although the above code would compile without any errors, its runtime behaviour would almost certainly be incorrect. +A caller would reasonably expect `waitFor(5 * time.Seconds)` to wait for ~5 seconds but they would actually end up +waiting for ~1,388,889 hours. + +The above example is just for illustration purposes only. The problem is glaringly obvious in such a simple function +and even the greenest Gopher would discover the issue immediately. However, imagine a much more complicated function +with many more lines and it is not inconceivable that such logic errors could go unnoticed. + +See the [test cases](testdata/src/a/a.go) for more examples of the types of errors detected by the linter. + + +Installation +------------- + +Requires Go 1.11 or above. + +``` +go get -u github.com/charithe/durationcheck/cmd/durationcheck +``` + +Usage +----- + +Invoke `durationcheck` with your package name + +``` +durationcheck ./... +# or +durationcheck github.com/you/yourproject/... +``` diff --git a/vendor/github.com/charithe/durationcheck/durationcheck.go b/vendor/github.com/charithe/durationcheck/durationcheck.go new file mode 100644 index 000000000..7f7008e91 --- /dev/null +++ b/vendor/github.com/charithe/durationcheck/durationcheck.go @@ -0,0 +1,188 @@ +package durationcheck + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/token" + "go/types" + "log" + "os" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var Analyzer = &analysis.Analyzer{ + Name: "durationcheck", + Doc: "check for two durations multiplied together", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + // if the package does not import time, it can be skipped from analysis + if !hasImport(pass.Pkg, "time") { + return nil, nil + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeTypes := []ast.Node{ + (*ast.BinaryExpr)(nil), + } + + inspect.Preorder(nodeTypes, check(pass)) + + return nil, nil +} + +func hasImport(pkg *types.Package, importPath string) bool { + for _, imp := range pkg.Imports() { + if imp.Path() == importPath { + return true + } + } + + return false +} + +// check contains the logic for checking that time.Duration is used correctly in the code being analysed +func check(pass *analysis.Pass) func(ast.Node) { + return func(node ast.Node) { + expr := node.(*ast.BinaryExpr) + // we are only interested in multiplication + if expr.Op != token.MUL { + return + } + + // get the types of the two operands + x, xOK := pass.TypesInfo.Types[expr.X] + y, yOK := pass.TypesInfo.Types[expr.Y] + + if !xOK || !yOK { + return + } + + if isDuration(x.Type) && isDuration(y.Type) { + // check that both sides are acceptable expressions + if isUnacceptableExpr(pass, expr.X) && isUnacceptableExpr(pass, expr.Y) { + pass.Reportf(expr.Pos(), "Multiplication of durations: `%s`", formatNode(expr)) + } + } + } +} + +func isDuration(x types.Type) bool { + return x.String() == "time.Duration" || x.String() == "*time.Duration" +} + +// isUnacceptableExpr returns true if the argument is not an acceptable time.Duration expression +func isUnacceptableExpr(pass *analysis.Pass, expr ast.Expr) bool { + switch e := expr.(type) { + case *ast.BasicLit: + return false + case *ast.Ident: + return !isAcceptableNestedExpr(pass, e) + case *ast.CallExpr: + return !isAcceptableCast(pass, e) + case *ast.BinaryExpr: + return !isAcceptableNestedExpr(pass, e) + case *ast.UnaryExpr: + return !isAcceptableNestedExpr(pass, e) + case *ast.SelectorExpr: + return !isAcceptableNestedExpr(pass, e) + case *ast.StarExpr: + return !isAcceptableNestedExpr(pass, e) + case *ast.ParenExpr: + return !isAcceptableNestedExpr(pass, e) + case *ast.IndexExpr: + return !isAcceptableNestedExpr(pass, e) + default: + return true + } +} + +// isAcceptableCast returns true if the argument is an acceptable expression cast to time.Duration +func isAcceptableCast(pass *analysis.Pass, e *ast.CallExpr) bool { + // check that there's a single argument + if len(e.Args) != 1 { + return false + } + + // check that the argument is acceptable + if !isAcceptableNestedExpr(pass, e.Args[0]) { + return false + } + + // check for time.Duration cast + selector, ok := e.Fun.(*ast.SelectorExpr) + if !ok { + return false + } + + return isDurationCast(selector) +} + +func isDurationCast(selector *ast.SelectorExpr) bool { + pkg, ok := selector.X.(*ast.Ident) + if !ok { + return false + } + + if pkg.Name != "time" { + return false + } + + return selector.Sel.Name == "Duration" +} + +func isAcceptableNestedExpr(pass *analysis.Pass, n ast.Expr) bool { + switch e := n.(type) { + case *ast.BasicLit: + return true + case *ast.BinaryExpr: + return isAcceptableNestedExpr(pass, e.X) && isAcceptableNestedExpr(pass, e.Y) + case *ast.UnaryExpr: + return isAcceptableNestedExpr(pass, e.X) + case *ast.Ident: + return isAcceptableIdent(pass, e) + case *ast.CallExpr: + t := pass.TypesInfo.TypeOf(e) + return !isDuration(t) + case *ast.SelectorExpr: + return isAcceptableNestedExpr(pass, e.X) && isAcceptableIdent(pass, e.Sel) + case *ast.StarExpr: + return isAcceptableNestedExpr(pass, e.X) + case *ast.ParenExpr: + return isAcceptableNestedExpr(pass, e.X) + case *ast.IndexExpr: + t := pass.TypesInfo.TypeOf(e) + return !isDuration(t) + default: + return false + } +} + +func isAcceptableIdent(pass *analysis.Pass, ident *ast.Ident) bool { + obj := pass.TypesInfo.ObjectOf(ident) + return !isDuration(obj.Type()) +} + +func formatNode(node ast.Node) string { + buf := new(bytes.Buffer) + if err := format.Node(buf, token.NewFileSet(), node); err != nil { + log.Printf("Error formatting expression: %v", err) + return "" + } + + return buf.String() +} + +func printAST(msg string, node ast.Node) { + fmt.Printf(">>> %s:\n%s\n\n\n", msg, formatNode(node)) + ast.Fprint(os.Stdout, nil, node, nil) + fmt.Println("--------------") +} diff --git a/vendor/github.com/charithe/durationcheck/go.mod b/vendor/github.com/charithe/durationcheck/go.mod new file mode 100644 index 000000000..eb058f21d --- /dev/null +++ b/vendor/github.com/charithe/durationcheck/go.mod @@ -0,0 +1,5 @@ +module github.com/charithe/durationcheck + +go 1.14 + +require golang.org/x/tools v0.1.0 diff --git a/vendor/github.com/charithe/durationcheck/go.sum b/vendor/github.com/charithe/durationcheck/go.sum new file mode 100644 index 000000000..21d696a65 --- /dev/null +++ b/vendor/github.com/charithe/durationcheck/go.sum @@ -0,0 +1,26 @@ +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/chavacava/garif/.gitignore b/vendor/github.com/chavacava/garif/.gitignore new file mode 100644 index 000000000..5dee1052c --- /dev/null +++ b/vendor/github.com/chavacava/garif/.gitignore @@ -0,0 +1,3 @@ +*.test +*.out +.devcontainer/ \ No newline at end of file diff --git a/vendor/github.com/chavacava/garif/LICENSE b/vendor/github.com/chavacava/garif/LICENSE new file mode 100644 index 000000000..2bba73fb7 --- /dev/null +++ b/vendor/github.com/chavacava/garif/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Salvador Cavadini + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/chavacava/garif/README.md b/vendor/github.com/chavacava/garif/README.md new file mode 100644 index 000000000..6a19c6147 --- /dev/null +++ b/vendor/github.com/chavacava/garif/README.md @@ -0,0 +1,52 @@ +# garif + +A GO package to create and manipulate SARIF logs. + +SARIF, from _Static Analysis Results Interchange Format_, is a standard JSON-based format for the output of static analysis tools defined and promoted by [OASIS](https://www.oasis-open.org/). + +Current supported version of the standard is [SARIF-v2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html +). + +## Usage + +The package provides access to every element of the SARIF model, therefore you are free to manipulate it at every detail. + +The package also provides constructors functions (`New...`) and decorators methods (`With...`) that simplify the creation of SARIF files for common use cases. + +Using these constructors and decorators we can easily create the example SARIF file of the [Microsoft SARIF pages](https://github.com/microsoft/sarif-tutorials/blob/master/docs/1-Introduction.md) + + +```go +import to `github.com/chavacava/garif` + +// ... + +rule := garif.NewRule("no-unused-vars"). + WithHelpUri("https://eslint.org/docs/rules/no-unused-vars"). + WithShortDescription("disallow unused variables"). + WithProperties("category", "Variables") + +driver := garif.NewDriver("ESLint"). + WithInformationUri("https://eslint.org"). + WithRules(rule) + +run := garif.NewRun(NewTool(driver)). + WithArtifactsURIs("file:///C:/dev/sarif/sarif-tutorials/samples/Introduction/simple-example.js") + +run.WithResult(rule.Id, "'x' is assigned a value but never used.", "file:///C:/dev/sarif/sarif-tutorials/samples/Introduction/simple-example.js", 1, 5) + +logFile := garif.NewLogFile([]*Run{run}, Version210) + +logFile.Write(os.Stdout) +``` + +## Why this package? +This package was initiated during my works on adding to [`revive`](https://github.com/mgechev/revive) a SARIF output formatter. +I've tried to use [go-sarif](https://github.com/owenrumney/go-sarif) by [Owen Rumney](https://github.com/owenrumney) but it is too focused in the use case of the static analyzer [tfsec](https://tfsec.dev) so I've decided to create a package flexible enough to generate SARIF files in broader cases. + +## More information about SARIF +For more information about SARIF, you can visit the [Oasis Open](https://www.oasis-open.org/committees/tc_home.php?wg_abbrev=sarif) site. + + +## Contributing +Of course, contributions are welcome! \ No newline at end of file diff --git a/vendor/github.com/chavacava/garif/constructors.go b/vendor/github.com/chavacava/garif/constructors.go new file mode 100644 index 000000000..8910e396e --- /dev/null +++ b/vendor/github.com/chavacava/garif/constructors.go @@ -0,0 +1,338 @@ +package garif + +// NewAddress creates a valid Address +func NewAddress() *Address { + return &Address{} +} + +// NewArtifact creates a valid Artifact +func NewArtifact() *Artifact { + return &Artifact{} +} + +// NewArtifactChange creates a valid ArtifactChange +func NewArtifactChange(location *ArtifactLocation, replacements ...*Replacement) *ArtifactChange { + return &ArtifactChange{ + ArtifactLocation: location, + Replacements: replacements, + } +} + +// NewArtifactContent creates a valid ArtifactContent +func NewArtifactContent() *ArtifactContent { + return &ArtifactContent{} +} + +// NewArtifactLocation creates a valid ArtifactLocation +func NewArtifactLocation() *ArtifactLocation { + return &ArtifactLocation{} +} + +// NewAttachment creates a valid Attachment +func NewAttachment(location *ArtifactLocation) *Attachment { + return &Attachment{ArtifactLocation: location} +} + +// NewCodeFlow creates a valid CodeFlow +func NewCodeFlow(threadFlows ...*ThreadFlow) *CodeFlow { + return &CodeFlow{ThreadFlows: threadFlows} +} + +// NewConfigurationOverride creates a valid ConfigurationOverride +func NewConfigurationOverride(configuration *ReportingConfiguration, descriptor *ReportingDescriptorReference) *ConfigurationOverride { + return &ConfigurationOverride{ + Configuration: configuration, + Descriptor: descriptor, + } +} + +// NewConversion creates a valid Conversion +func NewConversion(tool *Tool) *Conversion { + return &Conversion{Tool: tool} +} + +// NewEdge creates a valid Edge +func NewEdge(id, sourceNodeId, targetNodeId string) *Edge { + return &Edge{ + Id: id, + SourceNodeId: sourceNodeId, + TargetNodeId: targetNodeId, + } +} + +// NewEdgeTraversal creates a valid EdgeTraversal +func NewEdgeTraversal(edgeId string) *EdgeTraversal { + return &EdgeTraversal{ + EdgeId: edgeId, + } +} + +// NewException creates a valid Exception +func NewException() *Exception { + return &Exception{} +} + +// NewExternalProperties creates a valid ExternalProperties +func NewExternalProperties() *ExternalProperties { + return &ExternalProperties{} +} + +// NewExternalPropertyFileReference creates a valid ExternalPropertyFileReference +func NewExternalPropertyFileReference() *ExternalPropertyFileReference { + return &ExternalPropertyFileReference{} +} + +// NewExternalPropertyFileReferences creates a valid ExternalPropertyFileReferences +func NewExternalPropertyFileReferences() *ExternalPropertyFileReferences { + return &ExternalPropertyFileReferences{} +} + +// NewFix creates a valid Fix +func NewFix(artifactChanges ...*ArtifactChange) *Fix { + return &Fix{ + ArtifactChanges: artifactChanges, + } +} + +// NewGraph creates a valid Graph +func NewGraph() *Graph { + return &Graph{} +} + +// NewGraphTraversal creates a valid GraphTraversal +func NewGraphTraversal() *GraphTraversal { + return &GraphTraversal{} +} + +// NewInvocation creates a valid Invocation +func NewInvocation(executionSuccessful bool) *Invocation { + return &Invocation{ + ExecutionSuccessful: executionSuccessful, + } +} + +// NewLocation creates a valid Location +func NewLocation() *Location { + return &Location{} +} + +// NewLocationRelationship creates a valid LocationRelationship +func NewLocationRelationship(target int) *LocationRelationship { + return &LocationRelationship{ + Target: target, + } +} + +type LogFileVersion string + +const Version210 LogFileVersion = "2.1.0" + +// NewLogFile creates a valid LogFile +func NewLogFile(runs []*Run, version LogFileVersion) *LogFile { + return &LogFile{ + Runs: runs, + Version: version, + } +} + +// NewLogicalLocation creates a valid LogicalLocation +func NewLogicalLocation() *LogicalLocation { + return &LogicalLocation{} +} + +// NewMessage creates a valid Message +func NewMessage() *Message { + return &Message{} +} + +// NewMessageFromText creates a valid Message with the given text +func NewMessageFromText(text string) *Message { + return &Message{ + Text: text, + } +} + +// NewMultiformatMessageString creates a valid MultiformatMessageString +func NewMultiformatMessageString(text string) *MultiformatMessageString { + return &MultiformatMessageString{ + Text: text, + } +} + +// NewNode creates a valid Node +func NewNode(id string) *Node { + return &Node{ + Id: id, + } +} + +// NewNotification creates a valid Notification +func NewNotification(message *Message) *Notification { + return &Notification{ + Message: message, + } +} + +// NewPhysicalLocation creates a valid PhysicalLocation +func NewPhysicalLocation() *PhysicalLocation { + return &PhysicalLocation{} +} + +// NewPropertyBag creates a valid PropertyBag +func NewPropertyBag() *PropertyBag { + return &PropertyBag{} +} + +// NewRectangle creates a valid Rectangle +func NewRectangle() *Rectangle { + return &Rectangle{} +} + +// NewRegion creates a valid Region +func NewRegion() *Region { + return &Region{} +} + +// NewReplacement creates a valid Replacement +func NewReplacement(deletedRegion *Region) *Replacement { + return &Replacement{ + DeletedRegion: deletedRegion, + } +} + +// NewReportingConfiguration creates a valid ReportingConfiguration +func NewReportingConfiguration() *ReportingConfiguration { + return &ReportingConfiguration{} +} + +// NewReportingDescriptor creates a valid ReportingDescriptor +func NewReportingDescriptor(id string) *ReportingDescriptor { + return &ReportingDescriptor{ + Id: id, + } +} + +// NewRule is an alias for NewReportingDescriptor +func NewRule(id string) *ReportingDescriptor { + return NewReportingDescriptor(id) +} + +// NewReportingDescriptorReference creates a valid ReportingDescriptorReference +func NewReportingDescriptorReference() *ReportingDescriptorReference { + return &ReportingDescriptorReference{} +} + +// NewReportingDescriptorRelationship creates a valid ReportingDescriptorRelationship +func NewReportingDescriptorRelationship(target *ReportingDescriptorReference) *ReportingDescriptorRelationship { + return &ReportingDescriptorRelationship{ + Target: target, + } +} + +// NewResult creates a valid Result +func NewResult(message *Message) *Result { + return &Result{ + Message: message, + } +} + +// NewResultProvenance creates a valid ResultProvenance +func NewResultProvenance() *ResultProvenance { + return &ResultProvenance{} +} + +// NewRun creates a valid Run +func NewRun(tool *Tool) *Run { + return &Run{ + Tool: tool, + } +} + +// NewRunAutomationDetails creates a valid RunAutomationDetails +func NewRunAutomationDetails() *RunAutomationDetails { + return &RunAutomationDetails{} +} + +// New creates a valid +func NewSpecialLocations() *SpecialLocations { + return &SpecialLocations{} +} + +// NewStack creates a valid Stack +func NewStack(frames ...*StackFrame) *Stack { + return &Stack{ + Frames: frames, + } +} + +// NewStackFrame creates a valid StackFrame +func NewStackFrame() *StackFrame { + return &StackFrame{} +} + +// NewSuppression creates a valid Suppression +func NewSuppression(kind string) *Suppression { + return &Suppression{ + Kind: kind, + } +} + +// NewThreadFlow creates a valid ThreadFlow +func NewThreadFlow(locations []*ThreadFlowLocation) *ThreadFlow { + return &ThreadFlow{ + Locations: locations, + } +} + +// NewThreadFlowLocation creates a valid ThreadFlowLocation +func NewThreadFlowLocation() *ThreadFlowLocation { + return &ThreadFlowLocation{} +} + +// NewTool creates a valid Tool +func NewTool(driver *ToolComponent) *Tool { + return &Tool{ + Driver: driver, + } +} + +// NewToolComponent creates a valid ToolComponent +func NewToolComponent(name string) *ToolComponent { + return &ToolComponent{ + Name: name, + } +} + +// NewDriver is an alias for NewToolComponent +func NewDriver(name string) *ToolComponent { + return NewToolComponent(name) +} + +// NewToolComponentReference creates a valid ToolComponentReference +func NewToolComponentReference() *ToolComponentReference { + return &ToolComponentReference{} +} + +// NewTranslationMetadata creates a valid TranslationMetadata +func NewTranslationMetadata(name string) *TranslationMetadata { + return &TranslationMetadata{ + Name: name, + } +} + +// NewVersionControlDetails creates a valid VersionControlDetails +func NewVersionControlDetails(repositoryUri string) *VersionControlDetails { + return &VersionControlDetails{ + RepositoryUri: repositoryUri, + } +} + +// NewWebRequest creates a valid WebRequest +func NewWebRequest() *WebRequest { + return &WebRequest{} +} + +// NewWebResponse creates a valid WebResponse +func NewWebResponse() *WebResponse { + return &WebResponse{} +} diff --git a/vendor/github.com/chavacava/garif/decorators.go b/vendor/github.com/chavacava/garif/decorators.go new file mode 100644 index 000000000..00b599fb8 --- /dev/null +++ b/vendor/github.com/chavacava/garif/decorators.go @@ -0,0 +1,94 @@ +package garif + +// WithLineColumn sets a physical location with the given line and column +func (l *Location) WithLineColumn(line, column int) *Location { + if l.PhysicalLocation == nil { + l.PhysicalLocation = NewPhysicalLocation() + } + + l.PhysicalLocation.Region = NewRegion() + l.PhysicalLocation.Region.StartLine = line + l.PhysicalLocation.Region.StartColumn = column + + return l +} + +// WithURI sets a physical location with the given URI +func (l *Location) WithURI(uri string) *Location { + if l.PhysicalLocation == nil { + l.PhysicalLocation = NewPhysicalLocation() + } + + l.PhysicalLocation.ArtifactLocation = NewArtifactLocation() + l.PhysicalLocation.ArtifactLocation.Uri = uri + + return l +} + +// WithKeyValue sets (overwrites) the value of the given key +func (b PropertyBag) WithKeyValue(key string, value interface{}) PropertyBag { + b[key] = value + return b +} + +// WithHelpUri sets the help URI for this ReportingDescriptor +func (r *ReportingDescriptor) WithHelpUri(uri string) *ReportingDescriptor { + r.HelpUri = uri + return r +} + +// WithProperties adds the key & value to the properties of this ReportingDescriptor +func (r *ReportingDescriptor) WithProperties(key string, value interface{}) *ReportingDescriptor { + if r.Properties == nil { + r.Properties = NewPropertyBag() + } + + r.Properties.WithKeyValue(key, value) + + return r +} + +// WithArtifactsURIs adds the given URI as artifacts of this Run +func (r *Run) WithArtifactsURIs(uris ...string) *Run { + if r.Artifacts == nil { + r.Artifacts = []*Artifact{} + } + + for _, uri := range uris { + a := NewArtifact() + a.Location = NewArtifactLocation() + a.Location.Uri = uri + r.Artifacts = append(r.Artifacts, a) + } + + return r +} + +// WithResult adds a result to this Run +func (r *Run) WithResult(ruleId string, message string, uri string, line int, column int) *Run { + if r.Results == nil { + r.Results = []*Result{} + } + + msg := NewMessage() + msg.Text = message + result := NewResult(msg) + location := NewLocation().WithURI(uri).WithLineColumn(line, column) + + result.Locations = append(result.Locations, location) + result.RuleId = ruleId + r.Results = append(r.Results, result) + return r +} + +// WithInformationUri sets the information URI +func (t *ToolComponent) WithInformationUri(uri string) *ToolComponent { + t.InformationUri = uri + return t +} + +// WithRules sets (overwrites) the rules +func (t *ToolComponent) WithRules(rules ...*ReportingDescriptor) *ToolComponent { + t.Rules = rules + return t +} diff --git a/vendor/github.com/chavacava/garif/doc.go b/vendor/github.com/chavacava/garif/doc.go new file mode 100644 index 000000000..50fa6dfe5 --- /dev/null +++ b/vendor/github.com/chavacava/garif/doc.go @@ -0,0 +1,11 @@ +// Package garif defines all the GO structures required to model a SARIF log file. +// These structures were created using the JSON-schema sarif-schema-2.1.0.json of SARIF logfiles +// available at https://github.com/oasis-tcs/sarif-spec/tree/master/Schemata. +// +// The package provides constructors for all structures (see constructors.go) These constructors +// ensure that the returned structure instantiation is valid with respect to the JSON schema and +// should be used in place of plain structure instantiation. +// The root structure is LogFile. +// +// The package provides utility decorators for the most commonly used structures (see decorators.go) +package garif diff --git a/vendor/github.com/chavacava/garif/go.mod b/vendor/github.com/chavacava/garif/go.mod new file mode 100644 index 000000000..4c8a7f5ce --- /dev/null +++ b/vendor/github.com/chavacava/garif/go.mod @@ -0,0 +1,5 @@ +module github.com/chavacava/garif + +go 1.16 + +require github.com/stretchr/testify v1.7.0 diff --git a/vendor/github.com/chavacava/garif/go.sum b/vendor/github.com/chavacava/garif/go.sum new file mode 100644 index 000000000..acb88a48f --- /dev/null +++ b/vendor/github.com/chavacava/garif/go.sum @@ -0,0 +1,11 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/vendor/github.com/chavacava/garif/io.go b/vendor/github.com/chavacava/garif/io.go new file mode 100644 index 000000000..ce5719c96 --- /dev/null +++ b/vendor/github.com/chavacava/garif/io.go @@ -0,0 +1,26 @@ +package garif + +import ( + "encoding/json" + "io" +) + +// Write writes the JSON +func (l *LogFile) Write(w io.Writer) error { + marshal, err := json.Marshal(l) + if err != nil { + return err + } + _, err = w.Write(marshal) + return err +} + +// PrettyWrite writes indented JSON +func (l *LogFile) PrettyWrite(w io.Writer) error { + marshal, err := json.MarshalIndent(l, "", " ") + if err != nil { + return err + } + _, err = w.Write(marshal) + return err +} diff --git a/vendor/github.com/chavacava/garif/models.go b/vendor/github.com/chavacava/garif/models.go new file mode 100644 index 000000000..3668436a3 --- /dev/null +++ b/vendor/github.com/chavacava/garif/models.go @@ -0,0 +1,1486 @@ +package garif + +// Address A physical or virtual address, or a range of addresses, in an 'addressable region' (memory or a binary file). +type Address struct { + + // The address expressed as a byte offset from the start of the addressable region. + AbsoluteAddress int `json:"absoluteAddress,omitempty"` + + // A human-readable fully qualified name that is associated with the address. + FullyQualifiedName string `json:"fullyQualifiedName,omitempty"` + + // The index within run.addresses of the cached object for this address. + Index int `json:"index,omitempty"` + + // An open-ended string that identifies the address kind. + // 'data', 'function', 'header','instruction', 'module', 'page', 'section', + // 'segment', 'stack', 'stackFrame', 'table' are well-known values. + Kind string `json:"kind,omitempty"` + + // The number of bytes in this range of addresses. + Length int `json:"length,omitempty"` + + // A name that is associated with the address, e.g., '.text'. + Name string `json:"name,omitempty"` + + // The byte offset of this address from the absolute or relative address of the parent object. + OffsetFromParent int `json:"offsetFromParent,omitempty"` + + // The index within run.addresses of the parent object. + ParentIndex int `json:"parentIndex,omitempty"` + + // Key/value pairs that provide additional information about the address. + Properties *PropertyBag `json:"properties,omitempty"` + + // The address expressed as a byte offset from the absolute address of the top-most parent object. + RelativeAddress int `json:"relativeAddress,omitempty"` +} + +// Artifact A single artifact. In some cases, this artifact might be nested within another artifact. +type Artifact struct { + + // The contents of the artifact. + Contents *ArtifactContent `json:"contents,omitempty"` + + // A short description of the artifact. + Description *Message `json:"description,omitempty"` + + // Specifies the encoding for an artifact object that refers to a text file. + Encoding string `json:"encoding,omitempty"` + + // A dictionary, each of whose keys is the name of a hash function and each of whose values is + // the hashed value of the artifact produced by the specified hash function. + Hashes map[string]string `json:"hashes,omitempty"` + + // The Coordinated Universal Time (UTC) date and time at which the artifact was most recently modified. + // See "Date/time properties" in the SARIF spec for the required format. + LastModifiedTimeUtc string `json:"lastModifiedTimeUtc,omitempty"` + + // The length of the artifact in bytes. + Length int `json:"length,omitempty"` + + // The location of the artifact. + Location *ArtifactLocation `json:"location,omitempty"` + + // The MIME type (RFC 2045) of the artifact. + MimeType string `json:"mimeType,omitempty"` + + // The offset in bytes of the artifact within its containing artifact. + Offset int `json:"offset,omitempty"` + + // Identifies the index of the immediate parent of the artifact, if this artifact is nested. + ParentIndex int `json:"parentIndex,omitempty"` + + // Key/value pairs that provide additional information about the artifact. + Properties *PropertyBag `json:"properties,omitempty"` + + // The role or roles played by the artifact in the analysis. + Roles []interface{} `json:"roles,omitempty"` + + // Specifies the source language for any artifact object that refers to a text file that contains source code. + SourceLanguage string `json:"sourceLanguage,omitempty"` +} + +// ArtifactChange A change to a single artifact. +type ArtifactChange struct { + + // The location of the artifact to change. + ArtifactLocation *ArtifactLocation `json:"artifactLocation"` + + // Key/value pairs that provide additional information about the change. + Properties *PropertyBag `json:"properties,omitempty"` + + // An array of replacement objects, each of which represents the replacement of a single region in a + // single artifact specified by 'artifactLocation'. + Replacements []*Replacement `json:"replacements"` +} + +// ArtifactContent Represents the contents of an artifact. +type ArtifactContent struct { + + // MIME Base64-encoded content from a binary artifact, or from a text artifact in its original encoding. + Binary string `json:"binary,omitempty"` + + // Key/value pairs that provide additional information about the artifact content. + Properties *PropertyBag `json:"properties,omitempty"` + + // An alternate rendered representation of the artifact (e.g., a decompiled representation of a binary region). + Rendered *MultiformatMessageString `json:"rendered,omitempty"` + + // UTF-8-encoded content from a text artifact. + Text string `json:"text,omitempty"` +} + +// ArtifactLocation Specifies the location of an artifact. +type ArtifactLocation struct { + + // A short description of the artifact location. + Description *Message `json:"description,omitempty"` + + // The index within the run artifacts array of the artifact object associated with the artifact location. + Index int `json:"index,omitempty"` + + // Key/value pairs that provide additional information about the artifact location. + Properties *PropertyBag `json:"properties,omitempty"` + + // A string containing a valid relative or absolute URI. + Uri string `json:"uri,omitempty"` + + // A string which indirectly specifies the absolute URI with respect to which a relative URI in the "uri" property is interpreted. + UriBaseId string `json:"uriBaseId,omitempty"` +} + +// Attachment An artifact relevant to a result. +type Attachment struct { + + // The location of the attachment. + ArtifactLocation *ArtifactLocation `json:"artifactLocation"` + + // A message describing the role played by the attachment. + Description *Message `json:"description,omitempty"` + + // Key/value pairs that provide additional information about the attachment. + Properties *PropertyBag `json:"properties,omitempty"` + + // An array of rectangles specifying areas of interest within the image. + Rectangles []*Rectangle `json:"rectangles,omitempty"` + + // An array of regions of interest within the attachment. + Regions []*Region `json:"regions,omitempty"` +} + +// CodeFlow A set of threadFlows which together describe a pattern of code execution relevant to detecting a result. +type CodeFlow struct { + + // A message relevant to the code flow. + Message *Message `json:"message,omitempty"` + + // Key/value pairs that provide additional information about the code flow. + Properties *PropertyBag `json:"properties,omitempty"` + + // An array of one or more unique threadFlow objects, each of which describes the progress of a program + // through a thread of execution. + ThreadFlows []*ThreadFlow `json:"threadFlows"` +} + +// ConfigurationOverride Information about how a specific rule or notification was reconfigured at runtime. +type ConfigurationOverride struct { + + // Specifies how the rule or notification was configured during the scan. + Configuration *ReportingConfiguration `json:"configuration"` + + // A reference used to locate the descriptor whose configuration was overridden. + Descriptor *ReportingDescriptorReference `json:"descriptor"` + + // Key/value pairs that provide additional information about the configuration override. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// Conversion Describes how a converter transformed the output of a static analysis tool from the analysis tool's native output format into the SARIF format. +type Conversion struct { + + // The locations of the analysis tool's per-run log files. + AnalysisToolLogFiles []*ArtifactLocation `json:"analysisToolLogFiles,omitempty"` + + // An invocation object that describes the invocation of the converter. + Invocation *Invocation `json:"invocation,omitempty"` + + // Key/value pairs that provide additional information about the conversion. + Properties *PropertyBag `json:"properties,omitempty"` + + // A tool object that describes the converter. + Tool *Tool `json:"tool"` +} + +// Edge Represents a directed edge in a graph. +type Edge struct { + + // A string that uniquely identifies the edge within its graph. + Id string `json:"id"` + + // A short description of the edge. + Label *Message `json:"label,omitempty"` + + // Key/value pairs that provide additional information about the edge. + Properties *PropertyBag `json:"properties,omitempty"` + + // Identifies the source node (the node at which the edge starts). + SourceNodeId string `json:"sourceNodeId"` + + // Identifies the target node (the node at which the edge ends). + TargetNodeId string `json:"targetNodeId"` +} + +// EdgeTraversal Represents the traversal of a single edge during a graph traversal. +type EdgeTraversal struct { + + // Identifies the edge being traversed. + EdgeId string `json:"edgeId"` + + // The values of relevant expressions after the edge has been traversed. + FinalState map[string]*MultiformatMessageString `json:"finalState,omitempty"` + + // A message to display to the user as the edge is traversed. + Message *Message `json:"message,omitempty"` + + // Key/value pairs that provide additional information about the edge traversal. + Properties *PropertyBag `json:"properties,omitempty"` + + // The number of edge traversals necessary to return from a nested graph. + StepOverEdgeCount int `json:"stepOverEdgeCount,omitempty"` +} + +// Exception Describes a runtime exception encountered during the execution of an analysis tool. +type Exception struct { + + // An array of exception objects each of which is considered a cause of this exception. + InnerExceptions []*Exception `json:"innerExceptions,omitempty"` + + // A string that identifies the kind of exception, for example, the fully qualified type name of an object that was thrown, or the symbolic name of a signal. + Kind string `json:"kind,omitempty"` + + // A message that describes the exception. + Message string `json:"message,omitempty"` + + // Key/value pairs that provide additional information about the exception. + Properties *PropertyBag `json:"properties,omitempty"` + + // The sequence of function calls leading to the exception. + Stack *Stack `json:"stack,omitempty"` +} + +// ExternalProperties The top-level element of an external property file. +type ExternalProperties struct { + + // Addresses that will be merged with a separate run. + Addresses []*Address `json:"addresses,omitempty"` + + // An array of artifact objects that will be merged with a separate run. + Artifacts []*Artifact `json:"artifacts,omitempty"` + + // A conversion object that will be merged with a separate run. + Conversion *Conversion `json:"conversion,omitempty"` + + // The analysis tool object that will be merged with a separate run. + Driver *ToolComponent `json:"driver,omitempty"` + + // Tool extensions that will be merged with a separate run. + Extensions []*ToolComponent `json:"extensions,omitempty"` + + // Key/value pairs that provide additional information that will be merged with a separate run. + ExternalizedProperties *PropertyBag `json:"externalizedProperties,omitempty"` + + // An array of graph objects that will be merged with a separate run. + Graphs []*Graph `json:"graphs,omitempty"` + + // A stable, unique identifer for this external properties object, in the form of a GUID. + Guid string `json:"guid,omitempty"` + + // Describes the invocation of the analysis tool that will be merged with a separate run. + Invocations []*Invocation `json:"invocations,omitempty"` + + // An array of logical locations such as namespaces, types or functions that will be merged with a separate run. + LogicalLocations []*LogicalLocation `json:"logicalLocations,omitempty"` + + // Tool policies that will be merged with a separate run. + Policies []*ToolComponent `json:"policies,omitempty"` + + // Key/value pairs that provide additional information about the external properties. + Properties *PropertyBag `json:"properties,omitempty"` + + // An array of result objects that will be merged with a separate run. + Results []*Result `json:"results,omitempty"` + + // A stable, unique identifer for the run associated with this external properties object, in the form of a GUID. + RunGuid string `json:"runGuid,omitempty"` + + // The URI of the JSON schema corresponding to the version of the external property file format. + Schema string `json:"schema,omitempty"` + + // Tool taxonomies that will be merged with a separate run. + Taxonomies []*ToolComponent `json:"taxonomies,omitempty"` + + // An array of threadFlowLocation objects that will be merged with a separate run. + ThreadFlowLocations []*ThreadFlowLocation `json:"threadFlowLocations,omitempty"` + + // Tool translations that will be merged with a separate run. + Translations []*ToolComponent `json:"translations,omitempty"` + + // The SARIF format version of this external properties object. + Version interface{} `json:"version,omitempty"` + + // Requests that will be merged with a separate run. + WebRequests []*WebRequest `json:"webRequests,omitempty"` + + // Responses that will be merged with a separate run. + WebResponses []*WebResponse `json:"webResponses,omitempty"` +} + +// ExternalPropertyFileReference Contains information that enables a SARIF consumer to locate the external property file that contains the value of an externalized property associated with the run. +type ExternalPropertyFileReference struct { + + // A stable, unique identifer for the external property file in the form of a GUID. + Guid string `json:"guid,omitempty"` + + // A non-negative integer specifying the number of items contained in the external property file. + ItemCount int `json:"itemCount,omitempty"` + + // The location of the external property file. + Location *ArtifactLocation `json:"location,omitempty"` + + // Key/value pairs that provide additional information about the external property file. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// ExternalPropertyFileReferences References to external property files that should be inlined with the content of a root log file. +type ExternalPropertyFileReferences struct { + + // An array of external property files containing run.addresses arrays to be merged with the root log file. + Addresses []*ExternalPropertyFileReference `json:"addresses,omitempty"` + + // An array of external property files containing run.artifacts arrays to be merged with the root log file. + Artifacts []*ExternalPropertyFileReference `json:"artifacts,omitempty"` + + // An external property file containing a run.conversion object to be merged with the root log file. + Conversion *ExternalPropertyFileReference `json:"conversion,omitempty"` + + // An external property file containing a run.driver object to be merged with the root log file. + Driver *ExternalPropertyFileReference `json:"driver,omitempty"` + + // An array of external property files containing run.extensions arrays to be merged with the root log file. + Extensions []*ExternalPropertyFileReference `json:"extensions,omitempty"` + + // An external property file containing a run.properties object to be merged with the root log file. + ExternalizedProperties *ExternalPropertyFileReference `json:"externalizedProperties,omitempty"` + + // An array of external property files containing a run.graphs object to be merged with the root log file. + Graphs []*ExternalPropertyFileReference `json:"graphs,omitempty"` + + // An array of external property files containing run.invocations arrays to be merged with the root log file. + Invocations []*ExternalPropertyFileReference `json:"invocations,omitempty"` + + // An array of external property files containing run.logicalLocations arrays to be merged with the root log file. + LogicalLocations []*ExternalPropertyFileReference `json:"logicalLocations,omitempty"` + + // An array of external property files containing run.policies arrays to be merged with the root log file. + Policies []*ExternalPropertyFileReference `json:"policies,omitempty"` + + // Key/value pairs that provide additional information about the external property files. + Properties *PropertyBag `json:"properties,omitempty"` + + // An array of external property files containing run.results arrays to be merged with the root log file. + Results []*ExternalPropertyFileReference `json:"results,omitempty"` + + // An array of external property files containing run.taxonomies arrays to be merged with the root log file. + Taxonomies []*ExternalPropertyFileReference `json:"taxonomies,omitempty"` + + // An array of external property files containing run.threadFlowLocations arrays to be merged with the root log file. + ThreadFlowLocations []*ExternalPropertyFileReference `json:"threadFlowLocations,omitempty"` + + // An array of external property files containing run.translations arrays to be merged with the root log file. + Translations []*ExternalPropertyFileReference `json:"translations,omitempty"` + + // An array of external property files containing run.requests arrays to be merged with the root log file. + WebRequests []*ExternalPropertyFileReference `json:"webRequests,omitempty"` + + // An array of external property files containing run.responses arrays to be merged with the root log file. + WebResponses []*ExternalPropertyFileReference `json:"webResponses,omitempty"` +} + +// Fix A proposed fix for the problem represented by a result object. +// A fix specifies a set of artifacts to modify. For each artifact, +// it specifies a set of bytes to remove, and provides a set of new bytes to replace them. +type Fix struct { + + // One or more artifact changes that comprise a fix for a result. + ArtifactChanges []*ArtifactChange `json:"artifactChanges"` + + // A message that describes the proposed fix, enabling viewers to present the proposed change to an end user. + Description *Message `json:"description,omitempty"` + + // Key/value pairs that provide additional information about the fix. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// Graph A network of nodes and directed edges that describes some aspect of the +// structure of the code (for example, a call graph). +type Graph struct { + + // A description of the graph. + Description *Message `json:"description,omitempty"` + + // An array of edge objects representing the edges of the graph. + Edges []*Edge `json:"edges,omitempty"` + + // An array of node objects representing the nodes of the graph. + Nodes []*Node `json:"nodes,omitempty"` + + // Key/value pairs that provide additional information about the graph. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// GraphTraversal Represents a path through a graph. +type GraphTraversal struct { + + // A description of this graph traversal. + Description *Message `json:"description,omitempty"` + + // The sequences of edges traversed by this graph traversal. + EdgeTraversals []*EdgeTraversal `json:"edgeTraversals,omitempty"` + + // Values of relevant expressions at the start of the graph traversal that remain constant for the graph traversal. + ImmutableState map[string]*MultiformatMessageString `json:"immutableState,omitempty"` + + // Values of relevant expressions at the start of the graph traversal that may change during graph traversal. + InitialState map[string]*MultiformatMessageString `json:"initialState,omitempty"` + + // Key/value pairs that provide additional information about the graph traversal. + Properties *PropertyBag `json:"properties,omitempty"` + + // The index within the result.graphs to be associated with the result. + ResultGraphIndex int `json:"resultGraphIndex,omitempty"` + + // The index within the run.graphs to be associated with the result. + RunGraphIndex int `json:"runGraphIndex,omitempty"` +} + +// Invocation The runtime environment of the analysis tool run. +type Invocation struct { + + // The account under which the invocation occurred. + Account string `json:"account,omitempty"` + + // An array of strings, containing in order the command line arguments passed to the tool from the operating system. + Arguments []string `json:"arguments,omitempty"` + + // The command line used to invoke the tool. + CommandLine string `json:"commandLine,omitempty"` + + // The Coordinated Universal Time (UTC) date and time at which the invocation ended. See "Date/time properties" in the SARIF spec for the required format. + EndTimeUtc string `json:"endTimeUtc,omitempty"` + + // The environment variables associated with the analysis tool process, expressed as key/value pairs. + EnvironmentVariables map[string]string `json:"environmentVariables,omitempty"` + + // An absolute URI specifying the location of the executable that was invoked. + ExecutableLocation *ArtifactLocation `json:"executableLocation,omitempty"` + + // Specifies whether the tool's execution completed successfully. + ExecutionSuccessful bool `json:"executionSuccessful"` + + // The process exit code. + ExitCode int `json:"exitCode,omitempty"` + + // The reason for the process exit. + ExitCodeDescription string `json:"exitCodeDescription,omitempty"` + + // The name of the signal that caused the process to exit. + ExitSignalName string `json:"exitSignalName,omitempty"` + + // The numeric value of the signal that caused the process to exit. + ExitSignalNumber int `json:"exitSignalNumber,omitempty"` + + // The machine on which the invocation occurred. + Machine string `json:"machine,omitempty"` + + // An array of configurationOverride objects that describe notifications related runtime overrides. + NotificationConfigurationOverrides []*ConfigurationOverride `json:"notificationConfigurationOverrides,omitempty"` + + // The id of the process in which the invocation occurred. + ProcessId int `json:"processId,omitempty"` + + // The reason given by the operating system that the process failed to start. + ProcessStartFailureMessage string `json:"processStartFailureMessage,omitempty"` + + // Key/value pairs that provide additional information about the invocation. + Properties *PropertyBag `json:"properties,omitempty"` + + // The locations of any response files specified on the tool's command line. + ResponseFiles []*ArtifactLocation `json:"responseFiles,omitempty"` + + // An array of configurationOverride objects that describe rules related runtime overrides. + RuleConfigurationOverrides []*ConfigurationOverride `json:"ruleConfigurationOverrides,omitempty"` + + // The Coordinated Universal Time (UTC) date and time at which the invocation started. See "Date/time properties" in the SARIF spec for the required format. + StartTimeUtc string `json:"startTimeUtc,omitempty"` + + // A file containing the standard error stream from the process that was invoked. + Stderr *ArtifactLocation `json:"stderr,omitempty"` + + // A file containing the standard input stream to the process that was invoked. + Stdin *ArtifactLocation `json:"stdin,omitempty"` + + // A file containing the standard output stream from the process that was invoked. + Stdout *ArtifactLocation `json:"stdout,omitempty"` + + // A file containing the interleaved standard output and standard error stream from the process that was invoked. + StdoutStderr *ArtifactLocation `json:"stdoutStderr,omitempty"` + + // A list of conditions detected by the tool that are relevant to the tool's configuration. + ToolConfigurationNotifications []*Notification `json:"toolConfigurationNotifications,omitempty"` + + // A list of runtime conditions detected by the tool during the analysis. + ToolExecutionNotifications []*Notification `json:"toolExecutionNotifications,omitempty"` + + // The working directory for the invocation. + WorkingDirectory *ArtifactLocation `json:"workingDirectory,omitempty"` +} + +// Location A location within a programming artifact. +type Location struct { + + // A set of regions relevant to the location. + Annotations []*Region `json:"annotations,omitempty"` + + // Value that distinguishes this location from all other locations within a single result object. + Id int `json:"id,omitempty"` + + // The logical locations associated with the result. + LogicalLocations []*LogicalLocation `json:"logicalLocations,omitempty"` + + // A message relevant to the location. + Message *Message `json:"message,omitempty"` + + // Identifies the artifact and region. + PhysicalLocation *PhysicalLocation `json:"physicalLocation,omitempty"` + + // Key/value pairs that provide additional information about the location. + Properties *PropertyBag `json:"properties,omitempty"` + + // An array of objects that describe relationships between this location and others. + Relationships []*LocationRelationship `json:"relationships,omitempty"` +} + +// LocationRelationship Information about the relation of one location to another. +type LocationRelationship struct { + + // A description of the location relationship. + Description *Message `json:"description,omitempty"` + + // A set of distinct strings that categorize the relationship. Well-known kinds include 'includes', 'isIncludedBy' and 'relevant'. + Kinds []string `json:"kinds,omitempty"` + + // Key/value pairs that provide additional information about the location relationship. + Properties *PropertyBag `json:"properties,omitempty"` + + // A reference to the related location. + Target int `json:"target"` +} + +// LogFile Static Analysis Results Format (SARIF) Version 2.1.0 JSON Schema. +type LogFile struct { + + // References to external property files that share data between runs. + InlineExternalProperties []*ExternalProperties `json:"inlineExternalProperties,omitempty"` + + // Key/value pairs that provide additional information about the log file. + Properties *PropertyBag `json:"properties,omitempty"` + + // The set of runs contained in this log file. + Runs []*Run `json:"runs"` + + // The URI of the JSON schema corresponding to the version. + Schema string `json:"$schema,omitempty"` + + // The SARIF format version of this log file. + Version interface{} `json:"version"` +} + +// LogicalLocation A logical location of a construct that produced a result. +type LogicalLocation struct { + + // The machine-readable name for the logical location, such as a mangled function name provided by a C++ compiler that encodes calling convention, return type and other details along with the function name. + DecoratedName string `json:"decoratedName,omitempty"` + + // The human-readable fully qualified name of the logical location. + FullyQualifiedName string `json:"fullyQualifiedName,omitempty"` + + // The index within the logical locations array. + Index int `json:"index,omitempty"` + + // The type of construct this logical location component refers to. Should be one of 'function', 'member', 'module', 'namespace', 'parameter', 'resource', 'returnType', 'type', 'variable', 'object', 'array', 'property', 'value', 'element', 'text', 'attribute', 'comment', 'declaration', 'dtd' or 'processingInstruction', if any of those accurately describe the construct. + Kind string `json:"kind,omitempty"` + + // Identifies the construct in which the result occurred. For example, this property might contain the name of a class or a method. + Name string `json:"name,omitempty"` + + // Identifies the index of the immediate parent of the construct in which the result was detected. For example, this property might point to a logical location that represents the namespace that holds a type. + ParentIndex int `json:"parentIndex,omitempty"` + + // Key/value pairs that provide additional information about the logical location. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// Message Encapsulates a message intended to be read by the end user. +type Message struct { + + // An array of strings to substitute into the message string. + Arguments []string `json:"arguments,omitempty"` + + // The identifier for this message. + Id string `json:"id,omitempty"` + + // A Markdown message string. + Markdown string `json:"markdown,omitempty"` + + // Key/value pairs that provide additional information about the message. + Properties *PropertyBag `json:"properties,omitempty"` + + // A plain text message string. + Text string `json:"text,omitempty"` +} + +// MultiformatMessageString A message string or message format string rendered in multiple formats. +type MultiformatMessageString struct { + + // A Markdown message string or format string. + Markdown string `json:"markdown,omitempty"` + + // Key/value pairs that provide additional information about the message. + Properties *PropertyBag `json:"properties,omitempty"` + + // A plain text message string or format string. + Text string `json:"text"` +} + +// Node Represents a node in a graph. +type Node struct { + + // Array of child nodes. + Children []*Node `json:"children,omitempty"` + + // A string that uniquely identifies the node within its graph. + Id string `json:"id"` + + // A short description of the node. + Label *Message `json:"label,omitempty"` + + // A code location associated with the node. + Location *Location `json:"location,omitempty"` + + // Key/value pairs that provide additional information about the node. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// Notification Describes a condition relevant to the tool itself, as opposed to being relevant to a target being analyzed by the tool. +type Notification struct { + + // A reference used to locate the rule descriptor associated with this notification. + AssociatedRule *ReportingDescriptorReference `json:"associatedRule,omitempty"` + + // A reference used to locate the descriptor relevant to this notification. + Descriptor *ReportingDescriptorReference `json:"descriptor,omitempty"` + + // The runtime exception, if any, relevant to this notification. + Exception *Exception `json:"exception,omitempty"` + + // A value specifying the severity level of the notification. + Level interface{} `json:"level,omitempty"` + + // The locations relevant to this notification. + Locations []*Location `json:"locations,omitempty"` + + // A message that describes the condition that was encountered. + Message *Message `json:"message"` + + // Key/value pairs that provide additional information about the notification. + Properties *PropertyBag `json:"properties,omitempty"` + + // The thread identifier of the code that generated the notification. + ThreadId int `json:"threadId,omitempty"` + + // The Coordinated Universal Time (UTC) date and time at which the analysis tool generated the notification. + TimeUtc string `json:"timeUtc,omitempty"` +} + +// PhysicalLocation A physical location relevant to a result. Specifies a reference to a programming artifact together with a range of bytes or characters within that artifact. +type PhysicalLocation struct { + + // The address of the location. + Address *Address `json:"address,omitempty"` + + // The location of the artifact. + ArtifactLocation *ArtifactLocation `json:"artifactLocation,omitempty"` + + // Specifies a portion of the artifact that encloses the region. Allows a viewer to display additional context around the region. + ContextRegion *Region `json:"contextRegion,omitempty"` + + // Key/value pairs that provide additional information about the physical location. + Properties *PropertyBag `json:"properties,omitempty"` + + // Specifies a portion of the artifact. + Region *Region `json:"region,omitempty"` +} + +type PropertyBag map[string]interface{} + +/* +// PropertyBag Key/value pairs that provide additional information about the object. +type PropertyBag struct { + AdditionalProperties map[string]interface{} `json:"-,omitempty"` + + // A set of distinct strings that provide additional information. + Tags []string `json:"tags,omitempty"` +} +*/ +// Rectangle An area within an image. +type Rectangle struct { + + // The Y coordinate of the bottom edge of the rectangle, measured in the image's natural units. + Bottom float64 `json:"bottom,omitempty"` + + // The X coordinate of the left edge of the rectangle, measured in the image's natural units. + Left float64 `json:"left,omitempty"` + + // A message relevant to the rectangle. + Message *Message `json:"message,omitempty"` + + // Key/value pairs that provide additional information about the rectangle. + Properties *PropertyBag `json:"properties,omitempty"` + + // The X coordinate of the right edge of the rectangle, measured in the image's natural units. + Right float64 `json:"right,omitempty"` + + // The Y coordinate of the top edge of the rectangle, measured in the image's natural units. + Top float64 `json:"top,omitempty"` +} + +// Region A region within an artifact where a result was detected. +type Region struct { + + // The length of the region in bytes. + ByteLength int `json:"byteLength,omitempty"` + + // The zero-based offset from the beginning of the artifact of the first byte in the region. + ByteOffset int `json:"byteOffset,omitempty"` + + // The length of the region in characters. + CharLength int `json:"charLength,omitempty"` + + // The zero-based offset from the beginning of the artifact of the first character in the region. + CharOffset int `json:"charOffset,omitempty"` + + // The column number of the character following the end of the region. + EndColumn int `json:"endColumn,omitempty"` + + // The line number of the last character in the region. + EndLine int `json:"endLine,omitempty"` + + // A message relevant to the region. + Message *Message `json:"message,omitempty"` + + // Key/value pairs that provide additional information about the region. + Properties *PropertyBag `json:"properties,omitempty"` + + // The portion of the artifact contents within the specified region. + Snippet *ArtifactContent `json:"snippet,omitempty"` + + // Specifies the source language, if any, of the portion of the artifact specified by the region object. + SourceLanguage string `json:"sourceLanguage,omitempty"` + + // The column number of the first character in the region. + StartColumn int `json:"startColumn,omitempty"` + + // The line number of the first character in the region. + StartLine int `json:"startLine,omitempty"` +} + +// Replacement The replacement of a single region of an artifact. +type Replacement struct { + + // The region of the artifact to delete. + DeletedRegion *Region `json:"deletedRegion"` + + // The content to insert at the location specified by the 'deletedRegion' property. + InsertedContent *ArtifactContent `json:"insertedContent,omitempty"` + + // Key/value pairs that provide additional information about the replacement. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// ReportingConfiguration Information about a rule or notification that can be configured at runtime. +type ReportingConfiguration struct { + + // Specifies whether the report may be produced during the scan. + Enabled bool `json:"enabled,omitempty"` + + // Specifies the failure level for the report. + Level interface{} `json:"level,omitempty"` + + // Contains configuration information specific to a report. + Parameters *PropertyBag `json:"parameters,omitempty"` + + // Key/value pairs that provide additional information about the reporting configuration. + Properties *PropertyBag `json:"properties,omitempty"` + + // Specifies the relative priority of the report. Used for analysis output only. + Rank float64 `json:"rank,omitempty"` +} + +// ReportingDescriptor Metadata that describes a specific report produced by the tool, as part of the analysis it provides or its runtime reporting. +type ReportingDescriptor struct { + + // Default reporting configuration information. + DefaultConfiguration *ReportingConfiguration `json:"defaultConfiguration,omitempty"` + + // An array of unique identifies in the form of a GUID by which this report was known in some previous version of the analysis tool. + DeprecatedGuids []string `json:"deprecatedGuids,omitempty"` + + // An array of stable, opaque identifiers by which this report was known in some previous version of the analysis tool. + DeprecatedIds []string `json:"deprecatedIds,omitempty"` + + // An array of readable identifiers by which this report was known in some previous version of the analysis tool. + DeprecatedNames []string `json:"deprecatedNames,omitempty"` + + // A description of the report. Should, as far as possible, provide details sufficient to enable resolution of any problem indicated by the result. + FullDescription *MultiformatMessageString `json:"fullDescription,omitempty"` + + // A unique identifer for the reporting descriptor in the form of a GUID. + Guid string `json:"guid,omitempty"` + + // Provides the primary documentation for the report, useful when there is no online documentation. + Help *MultiformatMessageString `json:"help,omitempty"` + + // A URI where the primary documentation for the report can be found. + HelpUri string `json:"helpUri,omitempty"` + + // A stable, opaque identifier for the report. + Id string `json:"id"` + + // A set of name/value pairs with arbitrary names. Each value is a multiformatMessageString object, which holds message strings in plain text and (optionally) Markdown format. The strings can include placeholders, which can be used to construct a message in combination with an arbitrary number of additional string arguments. + MessageStrings map[string]*MultiformatMessageString `json:"messageStrings,omitempty"` + + // A report identifier that is understandable to an end user. + Name string `json:"name,omitempty"` + + // Key/value pairs that provide additional information about the report. + Properties *PropertyBag `json:"properties,omitempty"` + + // An array of objects that describe relationships between this reporting descriptor and others. + Relationships []*ReportingDescriptorRelationship `json:"relationships,omitempty"` + + // A concise description of the report. Should be a single sentence that is understandable when visible space is limited to a single line of text. + ShortDescription *MultiformatMessageString `json:"shortDescription,omitempty"` +} + +// ReportingDescriptorReference Information about how to locate a relevant reporting descriptor. +type ReportingDescriptorReference struct { + + // A guid that uniquely identifies the descriptor. + Guid string `json:"guid,omitempty"` + + // The id of the descriptor. + Id string `json:"id,omitempty"` + + // The index into an array of descriptors in toolComponent.ruleDescriptors, toolComponent.notificationDescriptors, or toolComponent.taxonomyDescriptors, depending on context. + Index int `json:"index,omitempty"` + + // Key/value pairs that provide additional information about the reporting descriptor reference. + Properties *PropertyBag `json:"properties,omitempty"` + + // A reference used to locate the toolComponent associated with the descriptor. + ToolComponent *ToolComponentReference `json:"toolComponent,omitempty"` +} + +// ReportingDescriptorRelationship Information about the relation of one reporting descriptor to another. +type ReportingDescriptorRelationship struct { + + // A description of the reporting descriptor relationship. + Description *Message `json:"description,omitempty"` + + // A set of distinct strings that categorize the relationship. Well-known kinds include 'canPrecede', 'canFollow', 'willPrecede', 'willFollow', 'superset', 'subset', 'equal', 'disjoint', 'relevant', and 'incomparable'. + Kinds []string `json:"kinds,omitempty"` + + // Key/value pairs that provide additional information about the reporting descriptor reference. + Properties *PropertyBag `json:"properties,omitempty"` + + // A reference to the related reporting descriptor. + Target *ReportingDescriptorReference `json:"target"` +} + +// Result A result produced by an analysis tool. +type Result struct { + + // Identifies the artifact that the analysis tool was instructed to scan. This need not be the same as the artifact where the result actually occurred. + AnalysisTarget *ArtifactLocation `json:"analysisTarget,omitempty"` + + // A set of artifacts relevant to the result. + Attachments []*Attachment `json:"attachments,omitempty"` + + // The state of a result relative to a baseline of a previous run. + BaselineState interface{} `json:"baselineState,omitempty"` + + // An array of 'codeFlow' objects relevant to the result. + CodeFlows []*CodeFlow `json:"codeFlows,omitempty"` + + // A stable, unique identifier for the equivalence class of logically identical results to which this result belongs, in the form of a GUID. + CorrelationGuid string `json:"correlationGuid,omitempty"` + + // A set of strings each of which individually defines a stable, unique identity for the result. + Fingerprints map[string]string `json:"fingerprints,omitempty"` + + // An array of 'fix' objects, each of which represents a proposed fix to the problem indicated by the result. + Fixes []*Fix `json:"fixes,omitempty"` + + // An array of one or more unique 'graphTraversal' objects. + GraphTraversals []*GraphTraversal `json:"graphTraversals,omitempty"` + + // An array of zero or more unique graph objects associated with the result. + Graphs []*Graph `json:"graphs,omitempty"` + + // A stable, unique identifer for the result in the form of a GUID. + Guid string `json:"guid,omitempty"` + + // An absolute URI at which the result can be viewed. + HostedViewerUri string `json:"hostedViewerUri,omitempty"` + + // A value that categorizes results by evaluation state. + Kind interface{} `json:"kind,omitempty"` + + // A value specifying the severity level of the result. + Level interface{} `json:"level,omitempty"` + + // The set of locations where the result was detected. Specify only one location unless the problem indicated by the result can only be corrected by making a change at every specified location. + Locations []*Location `json:"locations,omitempty"` + + // A message that describes the result. The first sentence of the message only will be displayed when visible space is limited. + Message *Message `json:"message"` + + // A positive integer specifying the number of times this logically unique result was observed in this run. + OccurrenceCount int `json:"occurrenceCount,omitempty"` + + // A set of strings that contribute to the stable, unique identity of the result. + PartialFingerprints map[string]string `json:"partialFingerprints,omitempty"` + + // Key/value pairs that provide additional information about the result. + Properties *PropertyBag `json:"properties,omitempty"` + + // Information about how and when the result was detected. + Provenance *ResultProvenance `json:"provenance,omitempty"` + + // A number representing the priority or importance of the result. + Rank float64 `json:"rank,omitempty"` + + // A set of locations relevant to this result. + RelatedLocations []*Location `json:"relatedLocations,omitempty"` + + // A reference used to locate the rule descriptor relevant to this result. + Rule *ReportingDescriptorReference `json:"rule,omitempty"` + + // The stable, unique identifier of the rule, if any, to which this result is relevant. + RuleId string `json:"ruleId,omitempty"` + + // The index within the tool component rules array of the rule object associated with this result. + RuleIndex int `json:"ruleIndex,omitempty"` + + // An array of 'stack' objects relevant to the result. + Stacks []*Stack `json:"stacks,omitempty"` + + // A set of suppressions relevant to this result. + Suppressions []*Suppression `json:"suppressions,omitempty"` + + // An array of references to taxonomy reporting descriptors that are applicable to the result. + Taxa []*ReportingDescriptorReference `json:"taxa,omitempty"` + + // A web request associated with this result. + WebRequest *WebRequest `json:"webRequest,omitempty"` + + // A web response associated with this result. + WebResponse *WebResponse `json:"webResponse,omitempty"` + + // The URIs of the work items associated with this result. + WorkItemUris []string `json:"workItemUris,omitempty"` +} + +// ResultProvenance Contains information about how and when a result was detected. +type ResultProvenance struct { + + // An array of physicalLocation objects which specify the portions of an analysis tool's output that a converter transformed into the result. + ConversionSources []*PhysicalLocation `json:"conversionSources,omitempty"` + + // A GUID-valued string equal to the automationDetails.guid property of the run in which the result was first detected. + FirstDetectionRunGuid string `json:"firstDetectionRunGuid,omitempty"` + + // The Coordinated Universal Time (UTC) date and time at which the result was first detected. See "Date/time properties" in the SARIF spec for the required format. + FirstDetectionTimeUtc string `json:"firstDetectionTimeUtc,omitempty"` + + // The index within the run.invocations array of the invocation object which describes the tool invocation that detected the result. + InvocationIndex int `json:"invocationIndex,omitempty"` + + // A GUID-valued string equal to the automationDetails.guid property of the run in which the result was most recently detected. + LastDetectionRunGuid string `json:"lastDetectionRunGuid,omitempty"` + + // The Coordinated Universal Time (UTC) date and time at which the result was most recently detected. See "Date/time properties" in the SARIF spec for the required format. + LastDetectionTimeUtc string `json:"lastDetectionTimeUtc,omitempty"` + + // Key/value pairs that provide additional information about the result. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// Run Describes a single run of an analysis tool, and contains the reported output of that run. +type Run struct { + + // Addresses associated with this run instance, if any. + Addresses []*Address `json:"addresses,omitempty"` + + // An array of artifact objects relevant to the run. + Artifacts []*Artifact `json:"artifacts,omitempty"` + + // Automation details that describe this run. + AutomationDetails *RunAutomationDetails `json:"automationDetails,omitempty"` + + // The 'guid' property of a previous SARIF 'run' that comprises the baseline that was used to compute result 'baselineState' properties for the run. + BaselineGuid string `json:"baselineGuid,omitempty"` + + // Specifies the unit in which the tool measures columns. + ColumnKind interface{} `json:"columnKind,omitempty"` + + // A conversion object that describes how a converter transformed an analysis tool's native reporting format into the SARIF format. + Conversion *Conversion `json:"conversion,omitempty"` + + // Specifies the default encoding for any artifact object that refers to a text file. + DefaultEncoding string `json:"defaultEncoding,omitempty"` + + // Specifies the default source language for any artifact object that refers to a text file that contains source code. + DefaultSourceLanguage string `json:"defaultSourceLanguage,omitempty"` + + // References to external property files that should be inlined with the content of a root log file. + ExternalPropertyFileReferences *ExternalPropertyFileReferences `json:"externalPropertyFileReferences,omitempty"` + + // An array of zero or more unique graph objects associated with the run. + Graphs []*Graph `json:"graphs,omitempty"` + + // Describes the invocation of the analysis tool. + Invocations []*Invocation `json:"invocations,omitempty"` + + // The language of the messages emitted into the log file during this run (expressed as an ISO 639-1 two-letter lowercase culture code) and an optional region (expressed as an ISO 3166-1 two-letter uppercase subculture code associated with a country or region). The casing is recommended but not required (in order for this data to conform to RFC5646). + Language string `json:"language,omitempty"` + + // An array of logical locations such as namespaces, types or functions. + LogicalLocations []*LogicalLocation `json:"logicalLocations,omitempty"` + + // An ordered list of character sequences that were treated as line breaks when computing region information for the run. + NewlineSequences []string `json:"newlineSequences,omitempty"` + + // The artifact location specified by each uriBaseId symbol on the machine where the tool originally ran. + OriginalUriBaseIds map[string]*ArtifactLocation `json:"originalUriBaseIds,omitempty"` + + // Contains configurations that may potentially override both reportingDescriptor.defaultConfiguration (the tool's default severities) and invocation.configurationOverrides (severities established at run-time from the command line). + Policies []*ToolComponent `json:"policies,omitempty"` + + // Key/value pairs that provide additional information about the run. + Properties *PropertyBag `json:"properties,omitempty"` + + // An array of strings used to replace sensitive information in a redaction-aware property. + RedactionTokens []string `json:"redactionTokens,omitempty"` + + // The set of results contained in an SARIF log. The results array can be omitted when a run is solely exporting rules metadata. It must be present (but may be empty) if a log file represents an actual scan. + Results []*Result `json:"results,omitempty"` + + // Automation details that describe the aggregate of runs to which this run belongs. + RunAggregates []*RunAutomationDetails `json:"runAggregates,omitempty"` + + // A specialLocations object that defines locations of special significance to SARIF consumers. + SpecialLocations *SpecialLocations `json:"specialLocations,omitempty"` + + // An array of toolComponent objects relevant to a taxonomy in which results are categorized. + Taxonomies []*ToolComponent `json:"taxonomies,omitempty"` + + // An array of threadFlowLocation objects cached at run level. + ThreadFlowLocations []*ThreadFlowLocation `json:"threadFlowLocations,omitempty"` + + // Information about the tool or tool pipeline that generated the results in this run. A run can only contain results produced by a single tool or tool pipeline. A run can aggregate results from multiple log files, as long as context around the tool run (tool command-line arguments and the like) is identical for all aggregated files. + Tool *Tool `json:"tool"` + + // The set of available translations of the localized data provided by the tool. + Translations []*ToolComponent `json:"translations,omitempty"` + + // Specifies the revision in version control of the artifacts that were scanned. + VersionControlProvenance []*VersionControlDetails `json:"versionControlProvenance,omitempty"` + + // An array of request objects cached at run level. + WebRequests []*WebRequest `json:"webRequests,omitempty"` + + // An array of response objects cached at run level. + WebResponses []*WebResponse `json:"webResponses,omitempty"` +} + +// RunAutomationDetails Information that describes a run's identity and role within an engineering system process. +type RunAutomationDetails struct { + + // A stable, unique identifier for the equivalence class of runs to which this object's containing run object belongs in the form of a GUID. + CorrelationGuid string `json:"correlationGuid,omitempty"` + + // A description of the identity and role played within the engineering system by this object's containing run object. + Description *Message `json:"description,omitempty"` + + // A stable, unique identifer for this object's containing run object in the form of a GUID. + Guid string `json:"guid,omitempty"` + + // A hierarchical string that uniquely identifies this object's containing run object. + Id string `json:"id,omitempty"` + + // Key/value pairs that provide additional information about the run automation details. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// SpecialLocations Defines locations of special significance to SARIF consumers. +type SpecialLocations struct { + + // Provides a suggestion to SARIF consumers to display file paths relative to the specified location. + DisplayBase *ArtifactLocation `json:"displayBase,omitempty"` + + // Key/value pairs that provide additional information about the special locations. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// Stack A call stack that is relevant to a result. +type Stack struct { + + // An array of stack frames that represents a sequence of calls, rendered in reverse chronological order, that comprise the call stack. + Frames []*StackFrame `json:"frames"` + + // A message relevant to this call stack. + Message *Message `json:"message,omitempty"` + + // Key/value pairs that provide additional information about the stack. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// StackFrame A function call within a stack trace. +type StackFrame struct { + + // The location to which this stack frame refers. + Location *Location `json:"location,omitempty"` + + // The name of the module that contains the code of this stack frame. + Module string `json:"module,omitempty"` + + // The parameters of the call that is executing. + Parameters []string `json:"parameters,omitempty"` + + // Key/value pairs that provide additional information about the stack frame. + Properties *PropertyBag `json:"properties,omitempty"` + + // The thread identifier of the stack frame. + ThreadId int `json:"threadId,omitempty"` +} + +// Suppression A suppression that is relevant to a result. +type Suppression struct { + + // A stable, unique identifer for the supression in the form of a GUID. + Guid string `json:"guid,omitempty"` + + // A string representing the justification for the suppression. + Justification string `json:"justification,omitempty"` + + // A string that indicates where the suppression is persisted. + Kind string `json:"kind"` + + // Identifies the location associated with the suppression. + Location *Location `json:"location,omitempty"` + + // Key/value pairs that provide additional information about the suppression. + Properties *PropertyBag `json:"properties,omitempty"` + + // A string that indicates the review status of the suppression. + Status interface{} `json:"status,omitempty"` +} + +// ThreadFlow Describes a sequence of code locations that specify a path through a single thread of execution such as an operating system or fiber. +type ThreadFlow struct { + + // An string that uniquely identifies the threadFlow within the codeFlow in which it occurs. + Id string `json:"id,omitempty"` + + // Values of relevant expressions at the start of the thread flow that remain constant. + ImmutableState map[string]*MultiformatMessageString `json:"immutableState,omitempty"` + + // Values of relevant expressions at the start of the thread flow that may change during thread flow execution. + InitialState map[string]*MultiformatMessageString `json:"initialState,omitempty"` + + // A temporally ordered array of 'threadFlowLocation' objects, each of which describes a location visited by the tool while producing the result. + Locations []*ThreadFlowLocation `json:"locations"` + + // A message relevant to the thread flow. + Message *Message `json:"message,omitempty"` + + // Key/value pairs that provide additional information about the thread flow. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// ThreadFlowLocation A location visited by an analysis tool while simulating or monitoring the execution of a program. +type ThreadFlowLocation struct { + + // An integer representing the temporal order in which execution reached this location. + ExecutionOrder int `json:"executionOrder,omitempty"` + + // The Coordinated Universal Time (UTC) date and time at which this location was executed. + ExecutionTimeUtc string `json:"executionTimeUtc,omitempty"` + + // Specifies the importance of this location in understanding the code flow in which it occurs. The order from most to least important is "essential", "important", "unimportant". Default: "important". + Importance interface{} `json:"importance,omitempty"` + + // The index within the run threadFlowLocations array. + Index int `json:"index,omitempty"` + + // A set of distinct strings that categorize the thread flow location. Well-known kinds include 'acquire', 'release', 'enter', 'exit', 'call', 'return', 'branch', 'implicit', 'false', 'true', 'caution', 'danger', 'unknown', 'unreachable', 'taint', 'function', 'handler', 'lock', 'memory', 'resource', 'scope' and 'value'. + Kinds []string `json:"kinds,omitempty"` + + // The code location. + Location *Location `json:"location,omitempty"` + + // The name of the module that contains the code that is executing. + Module string `json:"module,omitempty"` + + // An integer representing a containment hierarchy within the thread flow. + NestingLevel int `json:"nestingLevel,omitempty"` + + // Key/value pairs that provide additional information about the threadflow location. + Properties *PropertyBag `json:"properties,omitempty"` + + // The call stack leading to this location. + Stack *Stack `json:"stack,omitempty"` + + // A dictionary, each of whose keys specifies a variable or expression, the associated value of which represents the variable or expression value. For an annotation of kind 'continuation', for example, this dictionary might hold the current assumed values of a set of global variables. + State map[string]*MultiformatMessageString `json:"state,omitempty"` + + // An array of references to rule or taxonomy reporting descriptors that are applicable to the thread flow location. + Taxa []*ReportingDescriptorReference `json:"taxa,omitempty"` + + // A web request associated with this thread flow location. + WebRequest *WebRequest `json:"webRequest,omitempty"` + + // A web response associated with this thread flow location. + WebResponse *WebResponse `json:"webResponse,omitempty"` +} + +// Tool The analysis tool that was run. +type Tool struct { + + // The analysis tool that was run. + Driver *ToolComponent `json:"driver"` + + // Tool extensions that contributed to or reconfigured the analysis tool that was run. + Extensions []*ToolComponent `json:"extensions,omitempty"` + + // Key/value pairs that provide additional information about the tool. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// ToolComponent A component, such as a plug-in or the driver, of the analysis tool that was run. +type ToolComponent struct { + + // The component which is strongly associated with this component. For a translation, this refers to the component which has been translated. For an extension, this is the driver that provides the extension's plugin model. + AssociatedComponent *ToolComponentReference `json:"associatedComponent,omitempty"` + + // The kinds of data contained in this object. + Contents []interface{} `json:"contents,omitempty"` + + // The binary version of the tool component's primary executable file expressed as four non-negative integers separated by a period (for operating systems that express file versions in this way). + DottedQuadFileVersion string `json:"dottedQuadFileVersion,omitempty"` + + // The absolute URI from which the tool component can be downloaded. + DownloadUri string `json:"downloadUri,omitempty"` + + // A comprehensive description of the tool component. + FullDescription *MultiformatMessageString `json:"fullDescription,omitempty"` + + // The name of the tool component along with its version and any other useful identifying information, such as its locale. + FullName string `json:"fullName,omitempty"` + + // A dictionary, each of whose keys is a resource identifier and each of whose values is a multiformatMessageString object, which holds message strings in plain text and (optionally) Markdown format. The strings can include placeholders, which can be used to construct a message in combination with an arbitrary number of additional string arguments. + GlobalMessageStrings map[string]*MultiformatMessageString `json:"globalMessageStrings,omitempty"` + + // A unique identifer for the tool component in the form of a GUID. + Guid string `json:"guid,omitempty"` + + // The absolute URI at which information about this version of the tool component can be found. + InformationUri string `json:"informationUri,omitempty"` + + // Specifies whether this object contains a complete definition of the localizable and/or non-localizable data for this component, as opposed to including only data that is relevant to the results persisted to this log file. + IsComprehensive bool `json:"isComprehensive,omitempty"` + + // The language of the messages emitted into the log file during this run (expressed as an ISO 639-1 two-letter lowercase language code) and an optional region (expressed as an ISO 3166-1 two-letter uppercase subculture code associated with a country or region). The casing is recommended but not required (in order for this data to conform to RFC5646). + Language string `json:"language,omitempty"` + + // The semantic version of the localized strings defined in this component; maintained by components that provide translations. + LocalizedDataSemanticVersion string `json:"localizedDataSemanticVersion,omitempty"` + + // An array of the artifactLocation objects associated with the tool component. + Locations []*ArtifactLocation `json:"locations,omitempty"` + + // The minimum value of localizedDataSemanticVersion required in translations consumed by this component; used by components that consume translations. + MinimumRequiredLocalizedDataSemanticVersion string `json:"minimumRequiredLocalizedDataSemanticVersion,omitempty"` + + // The name of the tool component. + Name string `json:"name"` + + // An array of reportingDescriptor objects relevant to the notifications related to the configuration and runtime execution of the tool component. + Notifications []*ReportingDescriptor `json:"notifications,omitempty"` + + // The organization or company that produced the tool component. + Organization string `json:"organization,omitempty"` + + // A product suite to which the tool component belongs. + Product string `json:"product,omitempty"` + + // A localizable string containing the name of the suite of products to which the tool component belongs. + ProductSuite string `json:"productSuite,omitempty"` + + // Key/value pairs that provide additional information about the tool component. + Properties *PropertyBag `json:"properties,omitempty"` + + // A string specifying the UTC date (and optionally, the time) of the component's release. + ReleaseDateUtc string `json:"releaseDateUtc,omitempty"` + + // An array of reportingDescriptor objects relevant to the analysis performed by the tool component. + Rules []*ReportingDescriptor `json:"rules,omitempty"` + + // The tool component version in the format specified by Semantic Versioning 2.0. + SemanticVersion string `json:"semanticVersion,omitempty"` + + // A brief description of the tool component. + ShortDescription *MultiformatMessageString `json:"shortDescription,omitempty"` + + // An array of toolComponentReference objects to declare the taxonomies supported by the tool component. + SupportedTaxonomies []*ToolComponentReference `json:"supportedTaxonomies,omitempty"` + + // An array of reportingDescriptor objects relevant to the definitions of both standalone and tool-defined taxonomies. + Taxa []*ReportingDescriptor `json:"taxa,omitempty"` + + // Translation metadata, required for a translation, not populated by other component types. + TranslationMetadata *TranslationMetadata `json:"translationMetadata,omitempty"` + + // The tool component version, in whatever format the component natively provides. + Version string `json:"version,omitempty"` +} + +// ToolComponentReference Identifies a particular toolComponent object, either the driver or an extension. +type ToolComponentReference struct { + + // The 'guid' property of the referenced toolComponent. + Guid string `json:"guid,omitempty"` + + // An index into the referenced toolComponent in tool.extensions. + Index int `json:"index,omitempty"` + + // The 'name' property of the referenced toolComponent. + Name string `json:"name,omitempty"` + + // Key/value pairs that provide additional information about the toolComponentReference. + Properties *PropertyBag `json:"properties,omitempty"` +} + +// TranslationMetadata Provides additional metadata related to translation. +type TranslationMetadata struct { + + // The absolute URI from which the translation metadata can be downloaded. + DownloadUri string `json:"downloadUri,omitempty"` + + // A comprehensive description of the translation metadata. + FullDescription *MultiformatMessageString `json:"fullDescription,omitempty"` + + // The full name associated with the translation metadata. + FullName string `json:"fullName,omitempty"` + + // The absolute URI from which information related to the translation metadata can be downloaded. + InformationUri string `json:"informationUri,omitempty"` + + // The name associated with the translation metadata. + Name string `json:"name"` + + // Key/value pairs that provide additional information about the translation metadata. + Properties *PropertyBag `json:"properties,omitempty"` + + // A brief description of the translation metadata. + ShortDescription *MultiformatMessageString `json:"shortDescription,omitempty"` +} + +// VersionControlDetails Specifies the information necessary to retrieve a desired revision from a version control system. +type VersionControlDetails struct { + + // A Coordinated Universal Time (UTC) date and time that can be used to synchronize an enlistment to the state of the repository at that time. + AsOfTimeUtc string `json:"asOfTimeUtc,omitempty"` + + // The name of a branch containing the revision. + Branch string `json:"branch,omitempty"` + + // The location in the local file system to which the root of the repository was mapped at the time of the analysis. + MappedTo *ArtifactLocation `json:"mappedTo,omitempty"` + + // Key/value pairs that provide additional information about the version control details. + Properties *PropertyBag `json:"properties,omitempty"` + + // The absolute URI of the repository. + RepositoryUri string `json:"repositoryUri"` + + // A string that uniquely and permanently identifies the revision within the repository. + RevisionId string `json:"revisionId,omitempty"` + + // A tag that has been applied to the revision. + RevisionTag string `json:"revisionTag,omitempty"` +} + +// WebRequest Describes an HTTP request. +type WebRequest struct { + + // The body of the request. + Body *ArtifactContent `json:"body,omitempty"` + + // The request headers. + Headers map[string]string `json:"headers,omitempty"` + + // The index within the run.webRequests array of the request object associated with this result. + Index int `json:"index,omitempty"` + + // The HTTP method. Well-known values are 'GET', 'PUT', 'POST', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS', 'TRACE', 'CONNECT'. + Method string `json:"method,omitempty"` + + // The request parameters. + Parameters map[string]string `json:"parameters,omitempty"` + + // Key/value pairs that provide additional information about the request. + Properties *PropertyBag `json:"properties,omitempty"` + + // The request protocol. Example: 'http'. + Protocol string `json:"protocol,omitempty"` + + // The target of the request. + Target string `json:"target,omitempty"` + + // The request version. Example: '1.1'. + Version string `json:"version,omitempty"` +} + +// WebResponse Describes the response to an HTTP request. +type WebResponse struct { + + // The body of the response. + Body *ArtifactContent `json:"body,omitempty"` + + // The response headers. + Headers map[string]string `json:"headers,omitempty"` + + // The index within the run.webResponses array of the response object associated with this result. + Index int `json:"index,omitempty"` + + // Specifies whether a response was received from the server. + NoResponseReceived bool `json:"noResponseReceived,omitempty"` + + // Key/value pairs that provide additional information about the response. + Properties *PropertyBag `json:"properties,omitempty"` + + // The response protocol. Example: 'http'. + Protocol string `json:"protocol,omitempty"` + + // The response reason. Example: 'Not found'. + ReasonPhrase string `json:"reasonPhrase,omitempty"` + + // The response status code. Example: 451. + StatusCode int `json:"statusCode,omitempty"` + + // The response version. Example: '1.1'. + Version string `json:"version,omitempty"` +} diff --git a/vendor/github.com/daixiang0/gci/LICENSE b/vendor/github.com/daixiang0/gci/LICENSE new file mode 100644 index 000000000..e1292f738 --- /dev/null +++ b/vendor/github.com/daixiang0/gci/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2020, Xiang Dai +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/gci.go b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go new file mode 100644 index 000000000..7efa576ca --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/gci.go @@ -0,0 +1,383 @@ +package gci + +import ( + "bytes" + "fmt" + "io" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "sort" + "strings" +) + +const ( + // pkg type: standard, remote, local + standard int = iota + // 3rd-party packages + remote + local + + commentFlag = "//" +) + +var ( + importStartFlag = []byte(` +import ( +`) + importEndFlag = []byte(` +) +`) +) + +type FlagSet struct { + LocalFlag []string + DoWrite, DoDiff *bool +} + +type pkg struct { + list map[int][]string + comment map[string]string + alias map[string]string +} + +// ParseLocalFlag takes a comma-separated list of +// package-name-prefixes (as passed to the "-local" flag), and splits +// it in to a list. This is different than strings.Split in that it +// handles the empty string and empty entries in the list. +func ParseLocalFlag(str string) []string { + return strings.FieldsFunc(str, func(c rune) bool { return c == ',' }) +} + +func newPkg(data [][]byte, localFlag []string) *pkg { + listMap := make(map[int][]string) + commentMap := make(map[string]string) + aliasMap := make(map[string]string) + p := &pkg{ + list: listMap, + comment: commentMap, + alias: aliasMap, + } + + formatData := make([]string, 0) + // remove all empty lines + for _, v := range data { + if len(v) > 0 { + formatData = append(formatData, strings.TrimSpace(string(v))) + } + } + + n := len(formatData) + for i := n - 1; i >= 0; i-- { + line := formatData[i] + + // check commentFlag: + // 1. one line commentFlag + // 2. commentFlag after import path + commentIndex := strings.Index(line, commentFlag) + if commentIndex == 0 { + // comment in the last line is useless, ignore it + if i+1 >= n { + continue + } + pkg, _, _ := getPkgInfo(formatData[i+1], strings.Index(formatData[i+1], commentFlag) >= 0) + p.comment[pkg] = line + continue + } else if commentIndex > 0 { + pkg, alias, comment := getPkgInfo(line, true) + if alias != "" { + p.alias[pkg] = alias + } + + p.comment[pkg] = comment + pkgType := getPkgType(pkg, localFlag) + p.list[pkgType] = append(p.list[pkgType], pkg) + continue + } + + pkg, alias, _ := getPkgInfo(line, false) + + if alias != "" { + p.alias[pkg] = alias + } + + pkgType := getPkgType(pkg, localFlag) + p.list[pkgType] = append(p.list[pkgType], pkg) + } + + return p +} + +// fmt format import pkgs as expected +func (p *pkg) fmt() []byte { + ret := make([]string, 0, 100) + + for pkgType := range []int{standard, remote, local} { + sort.Strings(p.list[pkgType]) + for _, s := range p.list[pkgType] { + if p.comment[s] != "" { + l := fmt.Sprintf("%s%s%s%s", linebreak, indent, p.comment[s], linebreak) + ret = append(ret, l) + } + + if p.alias[s] != "" { + s = fmt.Sprintf("%s%s%s%s%s", indent, p.alias[s], blank, s, linebreak) + } else { + s = fmt.Sprintf("%s%s%s", indent, s, linebreak) + } + + ret = append(ret, s) + } + + if len(p.list[pkgType]) > 0 { + ret = append(ret, linebreak) + } + } + if len(ret) > 0 && ret[len(ret)-1] == linebreak { + ret = ret[:len(ret)-1] + } + + // remove duplicate empty lines + s1 := fmt.Sprintf("%s%s%s%s", linebreak, linebreak, linebreak, indent) + s2 := fmt.Sprintf("%s%s%s", linebreak, linebreak, indent) + return []byte(strings.ReplaceAll(strings.Join(ret, ""), s1, s2)) +} + +// getPkgInfo assume line is a import path, and return (path, alias, comment) +func getPkgInfo(line string, comment bool) (string, string, string) { + if comment { + s := strings.Split(line, commentFlag) + pkgArray := strings.Split(s[0], blank) + if len(pkgArray) > 1 { + return pkgArray[1], pkgArray[0], fmt.Sprintf("%s%s%s", commentFlag, blank, strings.TrimSpace(s[1])) + } else { + return strings.TrimSpace(pkgArray[0]), "", fmt.Sprintf("%s%s%s", commentFlag, blank, strings.TrimSpace(s[1])) + } + } else { + pkgArray := strings.Split(line, blank) + if len(pkgArray) > 1 { + return pkgArray[1], pkgArray[0], "" + } else { + return pkgArray[0], "", "" + } + } +} + +func getPkgType(line string, localFlag []string) int { + pkgName := strings.Trim(line, "\"\\`") + + for _, localPkg := range localFlag { + if strings.HasPrefix(pkgName, localPkg) { + return local + } + } + + if isStandardPackage(pkgName) { + return standard + } + + return remote +} + +const ( + blank = " " + indent = "\t" + linebreak = "\n" +) + +func diff(b1, b2 []byte, filename string) (data []byte, err error) { + f1, err := writeTempFile("", "gci", b1) + if err != nil { + return + } + defer os.Remove(f1) + + f2, err := writeTempFile("", "gci", b2) + if err != nil { + return + } + defer os.Remove(f2) + + cmd := "diff" + + data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput() + if len(data) > 0 { + // diff exits with a non-zero status when the files don't match. + // Ignore that failure as long as we get output. + return replaceTempFilename(data, filename) + } + return +} + +func writeTempFile(dir, prefix string, data []byte) (string, error) { + file, err := ioutil.TempFile(dir, prefix) + if err != nil { + return "", err + } + _, err = file.Write(data) + if err1 := file.Close(); err == nil { + err = err1 + } + if err != nil { + os.Remove(file.Name()) + return "", err + } + return file.Name(), nil +} + +// replaceTempFilename replaces temporary filenames in diff with actual one. +// +// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500 +// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500 +// ... +// -> +// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500 +// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500 +// ... +func replaceTempFilename(diff []byte, filename string) ([]byte, error) { + bs := bytes.SplitN(diff, []byte{'\n'}, 3) + if len(bs) < 3 { + return nil, fmt.Errorf("got unexpected diff for %s", filename) + } + // Preserve timestamps. + var t0, t1 []byte + if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 { + t0 = bs[0][i:] + } + if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 { + t1 = bs[1][i:] + } + // Always print filepath with slash separator. + f := filepath.ToSlash(filename) + bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) + bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) + return bytes.Join(bs, []byte{'\n'}), nil +} + +func visitFile(set *FlagSet) filepath.WalkFunc { + return func(path string, f os.FileInfo, err error) error { + if err == nil && isGoFile(f) { + err = processFile(path, os.Stdout, set) + } + return err + } +} + +func WalkDir(path string, set *FlagSet) error { + return filepath.Walk(path, visitFile(set)) +} + +func isGoFile(f os.FileInfo) bool { + // ignore non-Go files + name := f.Name() + return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") +} + +func ProcessFile(filename string, out io.Writer, set *FlagSet) error { + return processFile(filename, out, set) +} + +func processFile(filename string, out io.Writer, set *FlagSet) error { + var err error + + f, err := os.Open(filename) + if err != nil { + return err + } + defer f.Close() + + src, err := ioutil.ReadAll(f) + if err != nil { + return err + } + + ori := make([]byte, len(src)) + copy(ori, src) + start := bytes.Index(src, importStartFlag) + // in case no importStartFlag or importStartFlag exist in the commentFlag + if start < 0 { + fmt.Printf("skip file %s since no import\n", filename) + return nil + } + end := bytes.Index(src[start:], importEndFlag) + start + + ret := bytes.Split(src[start+len(importStartFlag):end], []byte(linebreak)) + + p := newPkg(ret, set.LocalFlag) + + res := append(src[:start+len(importStartFlag)], append(p.fmt(), src[end+1:]...)...) + + if !bytes.Equal(ori, res) { + if *set.DoWrite { + // On Windows, we need to re-set the permissions from the file. See golang/go#38225. + var perms os.FileMode + if fi, err := os.Stat(filename); err == nil { + perms = fi.Mode() & os.ModePerm + } + err = ioutil.WriteFile(filename, res, perms) + if err != nil { + return err + } + } + if *set.DoDiff { + data, err := diff(ori, res, filename) + if err != nil { + return fmt.Errorf("failed to diff: %v", err) + } + fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename)) + if _, err := out.Write(data); err != nil { + return fmt.Errorf("failed to write: %v", err) + } + } + } + if !*set.DoWrite && !*set.DoDiff { + if _, err = out.Write(res); err != nil { + return fmt.Errorf("failed to write: %v", err) + } + } + + return err +} + +// Run return source and result in []byte if succeed +func Run(filename string, set *FlagSet) ([]byte, []byte, error) { + var err error + + f, err := os.Open(filename) + if err != nil { + return nil, nil, err + } + defer f.Close() + + src, err := ioutil.ReadAll(f) + if err != nil { + return nil, nil, err + } + + ori := make([]byte, len(src)) + copy(ori, src) + start := bytes.Index(src, importStartFlag) + // in case no importStartFlag or importStartFlag exist in the commentFlag + if start < 0 { + return nil, nil, nil + } + end := bytes.Index(src[start:], importEndFlag) + start + + // in case import flags are part of a codegen template, or otherwise "wrong" + if start+len(importStartFlag) > end { + return nil, nil, nil + } + + ret := bytes.Split(src[start+len(importStartFlag):end], []byte(linebreak)) + + p := newPkg(ret, set.LocalFlag) + + res := append(src[:start+len(importStartFlag)], append(p.fmt(), src[end+1:]...)...) + + if bytes.Equal(ori, res) { + return ori, nil, nil + } + + return ori, res, nil +} diff --git a/vendor/github.com/daixiang0/gci/pkg/gci/std.go b/vendor/github.com/daixiang0/gci/pkg/gci/std.go new file mode 100644 index 000000000..ac96b55ab --- /dev/null +++ b/vendor/github.com/daixiang0/gci/pkg/gci/std.go @@ -0,0 +1,161 @@ +package gci + +// Code generated based on go1.16beta1. DO NOT EDIT. + +var standardPackages = map[string]struct{}{ + "archive/tar": {}, + "archive/zip": {}, + "bufio": {}, + "bytes": {}, + "compress/bzip2": {}, + "compress/flate": {}, + "compress/gzip": {}, + "compress/lzw": {}, + "compress/zlib": {}, + "container/heap": {}, + "container/list": {}, + "container/ring": {}, + "context": {}, + "crypto": {}, + "crypto/aes": {}, + "crypto/cipher": {}, + "crypto/des": {}, + "crypto/dsa": {}, + "crypto/ecdsa": {}, + "crypto/ed25519": {}, + "crypto/elliptic": {}, + "crypto/hmac": {}, + "crypto/md5": {}, + "crypto/rand": {}, + "crypto/rc4": {}, + "crypto/rsa": {}, + "crypto/sha1": {}, + "crypto/sha256": {}, + "crypto/sha512": {}, + "crypto/subtle": {}, + "crypto/tls": {}, + "crypto/x509": {}, + "crypto/x509/pkix": {}, + "database/sql": {}, + "database/sql/driver": {}, + "debug/dwarf": {}, + "debug/elf": {}, + "debug/gosym": {}, + "debug/macho": {}, + "debug/pe": {}, + "debug/plan9obj": {}, + "embed": {}, + "encoding": {}, + "encoding/ascii85": {}, + "encoding/asn1": {}, + "encoding/base32": {}, + "encoding/base64": {}, + "encoding/binary": {}, + "encoding/csv": {}, + "encoding/gob": {}, + "encoding/hex": {}, + "encoding/json": {}, + "encoding/pem": {}, + "encoding/xml": {}, + "errors": {}, + "expvar": {}, + "flag": {}, + "fmt": {}, + "go/ast": {}, + "go/build": {}, + "go/constant": {}, + "go/doc": {}, + "go/format": {}, + "go/importer": {}, + "go/parser": {}, + "go/printer": {}, + "go/scanner": {}, + "go/token": {}, + "go/types": {}, + "hash": {}, + "hash/adler32": {}, + "hash/crc32": {}, + "hash/crc64": {}, + "hash/fnv": {}, + "hash/maphash": {}, + "html": {}, + "html/template": {}, + "image": {}, + "image/color": {}, + "image/color/palette": {}, + "image/draw": {}, + "image/gif": {}, + "image/jpeg": {}, + "image/png": {}, + "index/suffixarray": {}, + "io": {}, + "io/fs": {}, + "io/ioutil": {}, + "log": {}, + "log/syslog": {}, + "math": {}, + "math/big": {}, + "math/bits": {}, + "math/cmplx": {}, + "math/rand": {}, + "mime": {}, + "mime/multipart": {}, + "mime/quotedprintable": {}, + "net": {}, + "net/http": {}, + "net/http/cgi": {}, + "net/http/cookiejar": {}, + "net/http/fcgi": {}, + "net/http/httptest": {}, + "net/http/httptrace": {}, + "net/http/httputil": {}, + "net/http/pprof": {}, + "net/mail": {}, + "net/rpc": {}, + "net/rpc/jsonrpc": {}, + "net/smtp": {}, + "net/textproto": {}, + "net/url": {}, + "os": {}, + "os/exec": {}, + "os/signal": {}, + "os/user": {}, + "path": {}, + "path/filepath": {}, + "plugin": {}, + "reflect": {}, + "regexp": {}, + "regexp/syntax": {}, + "runtime": {}, + "runtime/cgo": {}, + "runtime/debug": {}, + "runtime/metrics": {}, + "runtime/pprof": {}, + "runtime/race": {}, + "runtime/trace": {}, + "sort": {}, + "strconv": {}, + "strings": {}, + "sync": {}, + "sync/atomic": {}, + "syscall": {}, + "testing": {}, + "testing/fstest": {}, + "testing/iotest": {}, + "testing/quick": {}, + "text/scanner": {}, + "text/tabwriter": {}, + "text/template": {}, + "text/template/parse": {}, + "time": {}, + "time/tzdata": {}, + "unicode": {}, + "unicode/utf16": {}, + "unicode/utf8": {}, + "unsafe": {}, +} + +func isStandardPackage(pkg string) bool { + _, ok := standardPackages[pkg] + return ok +} diff --git a/vendor/github.com/denis-tingajkin/go-header/.gitignore b/vendor/github.com/denis-tingajkin/go-header/.gitignore new file mode 100644 index 000000000..62c893550 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/.gitignore @@ -0,0 +1 @@ +.idea/ \ No newline at end of file diff --git a/vendor/github.com/denis-tingajkin/go-header/.go-header.yml b/vendor/github.com/denis-tingajkin/go-header/.go-header.yml new file mode 100644 index 000000000..446d7317e --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/.go-header.yml @@ -0,0 +1,19 @@ +values: + regexp: + copyright-holder: Copyright \(c\) {{year-range}} Denis Tingajkin +template: | + {{copyright-holder}} + + SPDX-License-Identifier: Apache-2.0 + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/vendor/github.com/denis-tingajkin/go-header/LICENSE b/vendor/github.com/denis-tingajkin/go-header/LICENSE new file mode 100644 index 000000000..a2c9fda21 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/vendor/github.com/denis-tingajkin/go-header/README.md b/vendor/github.com/denis-tingajkin/go-header/README.md new file mode 100644 index 000000000..1a2a3d9a6 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/README.md @@ -0,0 +1,81 @@ +# go-header +[![Actions Status](https://github.com/denis-tingajkin/go-header/workflows/ci/badge.svg)](https://github.com/denis-tingajkin/go-header/actions) + +Go source code linter providing checks for license headers. + +## Installation + +For installation you can simply use `go get`. + +```bash +go get github.com/denis-tingajkin/go-header/cmd/go-header +``` + +## Configuration + +To configuring `.go-header.yml` linter you simply need to fill the next fields: + +```yaml +--- +temaplte: # expects header template string. +tempalte-path: # expects path to file with license header string. +values: # expects `const` or `regexp` node with values where values is a map string to string. + const: + key1: value1 # const value just checks equality. Note `key1` should be used in template string as {{ key1 }} or {{ KEY1 }}. + regexp: + key2: value2 # regexp value just checks regex match. The value should be a valid regexp pattern. Note `key2` should be used in template string as {{ key2 }} or {{ KEY2 }}. +``` + +Where `values` also can be used recursively. Example: + +```yaml +values: + const: + key1: "value" + regexp: + key2: "{{key1}} value1" # Reads as regex pattern "value value1" +``` + +## Bult-in values + +- **YEAR** - Expects current year. Example header value: `2020`. Example of template using: `{{YEAR}}` or `{{year}}`. +- **YEAR-RANGE** - Expects any valid year interval or current year. Example header value: `2020` or `2000-2020`. Example of template using: `{{year-range}}` or `{{YEAR-RANGE}}`. + +## Execution + +`go-header` linter expects file paths on input. If you want to run `go-header` only on diff files, then you can use this command: + +```bash +go-header $(git diff --name-only | grep -E '.*\.go') +``` + +## Setup example + +### Step 1 + +Create configuration file `.go-header.yml` in the root of project. + +```yaml +--- +values: + const: + MY COMPANY: mycompany.com +template: | + {{ MY COMPANY }} + SPDX-License-Identifier: Apache-2.0 + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +``` + +### Step 2 +You are ready! Execute `go-header ${PATH_TO_FILES}` from the root of the project. diff --git a/vendor/github.com/denis-tingajkin/go-header/analyzer.go b/vendor/github.com/denis-tingajkin/go-header/analyzer.go new file mode 100644 index 000000000..5707890b0 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/analyzer.go @@ -0,0 +1,146 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import ( + "fmt" + "go/ast" + "os" + "os/exec" + "strings" + "time" +) + +type Target struct { + Path string + File *ast.File +} + +const iso = "2006-01-02 15:04:05 -0700" + +func (t *Target) ModTime() (time.Time, error) { + diff, err := exec.Command("git", "diff", t.Path).CombinedOutput() + if err == nil && len(diff) == 0 { + line, err := exec.Command("git", "log", "-1", "--pretty=format:%cd", "--date=iso", "--", t.Path).CombinedOutput() + if err == nil { + return time.Parse(iso, string(line)) + } + } + info, err := os.Stat(t.Path) + if err != nil { + return time.Time{}, err + } + return info.ModTime(), nil +} + +type Analyzer struct { + values map[string]Value + template string +} + +func (a *Analyzer) Analyze(target *Target) Issue { + if a.template == "" { + return NewIssue("Missed template for check") + } + if t, err := target.ModTime(); err == nil { + if t.Year() != time.Now().Year() { + return nil + } + } + file := target.File + var header string + var offset = Location{ + Position: 1, + } + if len(file.Comments) > 0 && file.Comments[0].Pos() < file.Package { + if strings.HasPrefix(file.Comments[0].List[0].Text, "/*") { + header = (&ast.CommentGroup{List: []*ast.Comment{file.Comments[0].List[0]}}).Text() + } else { + header = file.Comments[0].Text() + offset.Position += 3 + } + } + header = strings.TrimSpace(header) + if header == "" { + return NewIssue("Missed header for check") + } + s := NewReader(header) + s.SetOffset(offset) + t := NewReader(a.template) + for !s.Done() && !t.Done() { + templateCh := t.Peek() + if templateCh == '{' { + name := a.readField(t) + if a.values[name] == nil { + return NewIssue(fmt.Sprintf("Template has unknown value: %v", name)) + } + if i := a.values[name].Read(s); i != nil { + return i + } + continue + } + sourceCh := s.Peek() + if sourceCh != templateCh { + l := s.Location() + notNextLine := func(r rune) bool { + return r != '\n' + } + actual := s.ReadWhile(notNextLine) + expected := t.ReadWhile(notNextLine) + return NewIssueWithLocation(fmt.Sprintf("Actual: %v\nExpected:%v", actual, expected), l) + } + s.Next() + t.Next() + } + if !s.Done() { + l := s.Location() + return NewIssueWithLocation(fmt.Sprintf("Unexpected string: %v", s.Finish()), l) + } + if !t.Done() { + l := s.Location() + return NewIssueWithLocation(fmt.Sprintf("Missed string: %v", t.Finish()), l) + } + return nil +} + +func (a *Analyzer) readField(reader *Reader) string { + _ = reader.Next() + _ = reader.Next() + + r := reader.ReadWhile(func(r rune) bool { + return r != '}' + }) + + _ = reader.Next() + _ = reader.Next() + + return strings.ToLower(strings.TrimSpace(r)) +} + +func New(options ...Option) *Analyzer { + a := &Analyzer{} + for _, o := range options { + o.apply(a) + } + for _, v := range a.values { + err := v.Calculate(a.values) + if err != nil { + panic(err.Error()) + } + } + return a +} diff --git a/vendor/github.com/denis-tingajkin/go-header/config.go b/vendor/github.com/denis-tingajkin/go-header/config.go new file mode 100644 index 000000000..fa8b23c2d --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/config.go @@ -0,0 +1,99 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import ( + "errors" + "fmt" + "io/ioutil" + "strings" + "time" + + "gopkg.in/yaml.v2" +) + +// Configuration represents go-header linter setup parameters +type Configuration struct { + // Values is map of values. Supports two types 'const` and `regexp`. Values can be used recursively. + Values map[string]map[string]string `yaml:"values"'` + // Template is template for checking. Uses values. + Template string `yaml:"template"` + // TemplatePath path to the template file. Useful if need to load the template from a specific file. + TemplatePath string `yaml:"template-path"` +} + +func (c *Configuration) builtInValues() map[string]Value { + var result = make(map[string]Value) + year := fmt.Sprint(time.Now().Year()) + result["year-range"] = &RegexpValue{ + RawValue: strings.ReplaceAll(`(20\d\d\-YEAR)|(YEAR)`, "YEAR", year), + } + result["year"] = &ConstValue{ + RawValue: year, + } + return result +} + +func (c *Configuration) GetValues() (map[string]Value, error) { + var result = c.builtInValues() + createConst := func(raw string) Value { + return &ConstValue{RawValue: raw} + } + createRegexp := func(raw string) Value { + return &RegexpValue{RawValue: raw} + } + appendValues := func(m map[string]string, create func(string) Value) { + for k, v := range m { + key := strings.ToLower(k) + result[key] = create(v) + } + } + for k, v := range c.Values { + switch k { + case "const": + appendValues(v, createConst) + case "regexp": + appendValues(v, createRegexp) + default: + return nil, fmt.Errorf("unknown value type %v", k) + } + } + return result, nil +} + +func (c *Configuration) GetTemplate() (string, error) { + if c.Template != "" { + return c.Template, nil + } + if c.TemplatePath == "" { + return "", errors.New("template has not passed") + } + if b, err := ioutil.ReadFile(c.TemplatePath); err != nil { + return "", err + } else { + c.Template = strings.TrimSpace(string(b)) + return c.Template, nil + } +} + +func (c *Configuration) Parse(p string) error { + b, err := ioutil.ReadFile(p) + if err != nil { + return err + } + return yaml.Unmarshal(b, c) +} diff --git a/vendor/github.com/denis-tingajkin/go-header/go.mod b/vendor/github.com/denis-tingajkin/go-header/go.mod new file mode 100644 index 000000000..68984cb02 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/go.mod @@ -0,0 +1,10 @@ +module github.com/denis-tingajkin/go-header + +go 1.15 + +require ( + github.com/fatih/color v1.9.0 + github.com/sirupsen/logrus v1.6.0 + github.com/stretchr/testify v1.5.1 + gopkg.in/yaml.v2 v2.2.2 +) diff --git a/vendor/github.com/denis-tingajkin/go-header/go.sum b/vendor/github.com/denis-tingajkin/go-header/go.sum new file mode 100644 index 000000000..4033b08f0 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/go.sum @@ -0,0 +1,31 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s= +github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU= +github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM= +github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894 h1:Cz4ceDQGXuKRnVBDTS23GTn/pU5OE2C0WrNTOYK1Uuc= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/denis-tingajkin/go-header/issue.go b/vendor/github.com/denis-tingajkin/go-header/issue.go new file mode 100644 index 000000000..2ff7bfd3c --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/issue.go @@ -0,0 +1,48 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +type Issue interface { + Location() Location + Message() string +} + +type issue struct { + msg string + location Location +} + +func (i *issue) Location() Location { + return i.location +} + +func (i *issue) Message() string { + return i.msg +} + +func NewIssueWithLocation(msg string, location Location) Issue { + return &issue{ + msg: msg, + location: location, + } +} + +func NewIssue(msg string) Issue { + return &issue{ + msg: msg, + } +} diff --git a/vendor/github.com/denis-tingajkin/go-header/location.go b/vendor/github.com/denis-tingajkin/go-header/location.go new file mode 100644 index 000000000..ba4d1907b --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/location.go @@ -0,0 +1,35 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import "fmt" + +type Location struct { + Line int + Position int +} + +func (l Location) String() string { + return fmt.Sprintf("%v:%v", l.Line+1, l.Position) +} + +func (l Location) Add(other Location) Location { + return Location{ + Line: l.Line + other.Line, + Position: l.Position + other.Position, + } +} diff --git a/vendor/github.com/denis-tingajkin/go-header/option.go b/vendor/github.com/denis-tingajkin/go-header/option.go new file mode 100644 index 000000000..afbcb62e1 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/option.go @@ -0,0 +1,44 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import "strings" + +type Option interface { + apply(*Analyzer) +} + +type applyAnalyzerOptionFunc func(*Analyzer) + +func (f applyAnalyzerOptionFunc) apply(a *Analyzer) { + f(a) +} + +func WithValues(values map[string]Value) Option { + return applyAnalyzerOptionFunc(func(a *Analyzer) { + a.values = make(map[string]Value) + for k, v := range values { + a.values[strings.ToLower(k)] = v + } + }) +} + +func WithTemplate(template string) Option { + return applyAnalyzerOptionFunc(func(a *Analyzer) { + a.template = template + }) +} diff --git a/vendor/github.com/denis-tingajkin/go-header/reader.go b/vendor/github.com/denis-tingajkin/go-header/reader.go new file mode 100644 index 000000000..2393c9488 --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/reader.go @@ -0,0 +1,116 @@ +/* +Copyright (c) 2020 Denis Tingajkin + +SPDX-License-Identifier: Apache-2.0 + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at: + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ +package goheader + +func NewReader(text string) *Reader { + return &Reader{source: text} +} + +type Reader struct { + source string + position int + location Location + offset Location +} + +func (r *Reader) SetOffset(offset Location) { + r.offset = offset +} + +func (r *Reader) Position() int { + return r.position +} + +func (r *Reader) Location() Location { + return r.location.Add(r.offset) +} + +func (r *Reader) Peek() rune { + if r.Done() { + return rune(0) + } + return rune(r.source[r.position]) +} + +func (r *Reader) Done() bool { + return r.position >= len(r.source) +} + +func (r *Reader) Next() rune { + if r.Done() { + return rune(0) + } + reuslt := r.Peek() + if reuslt == '\n' { + r.location.Line++ + r.location.Position = 0 + } else { + r.location.Position++ + } + r.position++ + return reuslt +} + +func (r *Reader) Finish() string { + if r.position >= len(r.source) { + return "" + } + defer r.till() + return r.source[r.position:] +} + +func (r *Reader) SetPosition(pos int) { + if pos < 0 { + r.position = 0 + } + r.position = pos + r.location = r.calculateLocation() +} + +func (r *Reader) ReadWhile(match func(rune) bool) string { + if match == nil { + return "" + } + start := r.position + for !r.Done() && match(r.Peek()) { + r.Next() + } + return r.source[start:r.position] +} + +func (r *Reader) till() { + r.position = len(r.source) + r.location = r.calculateLocation() +} + +func (r *Reader) calculateLocation() Location { + min := len(r.source) + if min > r.position { + min = r.position + } + x, y := 0, 0 + for i := 0; i < min; i++ { + if r.source[i] == '\n' { + y++ + x = 0 + } else { + x++ + } + } + return Location{Line: y, Position: x} +} diff --git a/vendor/github.com/denis-tingajkin/go-header/value.go b/vendor/github.com/denis-tingajkin/go-header/value.go new file mode 100644 index 000000000..2a3adcdce --- /dev/null +++ b/vendor/github.com/denis-tingajkin/go-header/value.go @@ -0,0 +1,128 @@ +// Copyright (c) 2020 Denis Tingajkin +// +// SPDX-License-Identifier: Apache-2.0 +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at: +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package goheader + +import ( + "errors" + "fmt" + "regexp" + "strings" +) + +type Calculable interface { + Calculate(map[string]Value) error + Get() string +} + +type Value interface { + Calculable + Read(*Reader) Issue +} + +func calculateValue(calculable Calculable, values map[string]Value) (string, error) { + sb := strings.Builder{} + r := calculable.Get() + var endIndex int + var startIndex int + for startIndex = strings.Index(r, "{{"); startIndex >= 0; startIndex = strings.Index(r, "{{") { + _, _ = sb.WriteString(r[:startIndex]) + endIndex = strings.Index(r, "}}") + if endIndex < 0 { + return "", errors.New("missed value ending") + } + subVal := strings.ToLower(strings.TrimSpace(r[startIndex+2 : endIndex])) + if val := values[subVal]; val != nil { + if err := val.Calculate(values); err != nil { + return "", err + } + sb.WriteString(val.Get()) + } else { + return "", fmt.Errorf("unknown value name %v", subVal) + } + endIndex += 2 + r = r[endIndex:] + } + _, _ = sb.WriteString(r) + return sb.String(), nil +} + +type ConstValue struct { + RawValue string +} + +func (c *ConstValue) Calculate(values map[string]Value) error { + v, err := calculateValue(c, values) + if err != nil { + return err + } + c.RawValue = v + return nil +} + +func (c *ConstValue) Get() string { + return c.RawValue +} + +func (c *ConstValue) Read(s *Reader) Issue { + l := s.Location() + p := s.Position() + for _, ch := range c.Get() { + if ch != s.Peek() { + s.SetPosition(p) + f := s.ReadWhile(func(r rune) bool { + return r != '\n' + }) + return NewIssueWithLocation(fmt.Sprintf("Expected:%v, Actual: %v", c.Get(), f), l) + } + s.Next() + } + return nil +} + +type RegexpValue struct { + RawValue string +} + +func (r *RegexpValue) Calculate(values map[string]Value) error { + v, err := calculateValue(r, values) + if err != nil { + return err + } + r.RawValue = v + return nil +} + +func (r *RegexpValue) Get() string { + return r.RawValue +} + +func (r *RegexpValue) Read(s *Reader) Issue { + l := s.Location() + p := regexp.MustCompile(r.Get()) + pos := s.Position() + str := s.Finish() + s.SetPosition(pos) + indexes := p.FindAllIndex([]byte(str), -1) + if len(indexes) == 0 { + return NewIssueWithLocation(fmt.Sprintf("Pattern %v doesn't match.", p.String()), l) + } + s.SetPosition(pos + indexes[0][1]) + return nil +} + +var _ Value = &ConstValue{} +var _ Value = &RegexpValue{} diff --git a/vendor/github.com/esimonov/ifshort/LICENSE b/vendor/github.com/esimonov/ifshort/LICENSE new file mode 100644 index 000000000..a04e339c0 --- /dev/null +++ b/vendor/github.com/esimonov/ifshort/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Eugene Simonov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..7e4df7da1 --- /dev/null +++ b/vendor/github.com/esimonov/ifshort/pkg/analyzer/analyzer.go @@ -0,0 +1,247 @@ +package analyzer + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var maxDeclChars, maxDeclLines int + +const ( + maxDeclLinesUsage = `maximum length of variable declaration measured in number of lines, after which the linter won't suggest using short syntax. +Has precedence over max-decl-chars.` + maxDeclCharsUsage = `maximum length of variable declaration measured in number of characters, after which the linter won't suggest using short syntax.` +) + +func init() { + Analyzer.Flags.IntVar(&maxDeclLines, "max-decl-lines", 1, maxDeclLinesUsage) + Analyzer.Flags.IntVar(&maxDeclChars, "max-decl-chars", 30, maxDeclCharsUsage) +} + +// Analyzer is an analysis.Analyzer instance for ifshort linter. +var Analyzer = &analysis.Analyzer{ + Name: "ifshort", + Doc: "Checks that your code uses short syntax for if-statements whenever possible.", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + } + + inspector.Preorder(nodeFilter, func(node ast.Node) { + fdecl := node.(*ast.FuncDecl) + + /*if fdecl.Name.Name != "notUsed_BinaryExpressionInIndex_OK" { + return + }*/ + + if fdecl == nil || fdecl.Body == nil { + return + } + + candidates := getNamedOccurrenceMap(fdecl, pass) + + for _, stmt := range fdecl.Body.List { + candidates.checkStatement(stmt, token.NoPos) + } + + for varName := range candidates { + for marker, occ := range candidates[varName] { + // If two or more vars with the same scope marker - skip them. + if candidates.isFoundByScopeMarker(marker) { + continue + } + + pass.Reportf(occ.declarationPos, + "variable '%s' is only used in the if-statement (%s); consider using short syntax", + varName, pass.Fset.Position(occ.ifStmtPos)) + } + } + }) + return nil, nil +} + +func (nom namedOccurrenceMap) checkStatement(stmt ast.Stmt, ifPos token.Pos) { + switch v := stmt.(type) { + case *ast.AssignStmt: + for _, el := range v.Rhs { + nom.checkExpression(el, ifPos) + } + if isAssign(v.Tok) { + for _, el := range v.Lhs { + nom.checkExpression(el, ifPos) + } + } + case *ast.DeferStmt: + for _, a := range v.Call.Args { + nom.checkExpression(a, ifPos) + } + case *ast.ExprStmt: + if callExpr, ok := v.X.(*ast.CallExpr); ok { + nom.checkExpression(callExpr, ifPos) + } + case *ast.ForStmt: + for _, el := range v.Body.List { + nom.checkStatement(el, ifPos) + } + + if bexpr, ok := v.Cond.(*ast.BinaryExpr); ok { + nom.checkExpression(bexpr.X, ifPos) + nom.checkExpression(bexpr.Y, ifPos) + } + + nom.checkStatement(v.Post, ifPos) + case *ast.GoStmt: + for _, a := range v.Call.Args { + nom.checkExpression(a, ifPos) + } + case *ast.IfStmt: + for _, el := range v.Body.List { + nom.checkStatement(el, v.If) + } + + switch cond := v.Cond.(type) { + case *ast.BinaryExpr: + nom.checkExpression(cond.X, v.If) + nom.checkExpression(cond.Y, v.If) + case *ast.CallExpr: + nom.checkExpression(cond, v.If) + } + + if init, ok := v.Init.(*ast.AssignStmt); ok { + for _, e := range init.Rhs { + nom.checkExpression(e, v.If) + } + } + case *ast.IncDecStmt: + nom.checkExpression(v.X, ifPos) + case *ast.RangeStmt: + nom.checkExpression(v.X, ifPos) + if v.Body != nil { + for _, e := range v.Body.List { + nom.checkStatement(e, ifPos) + } + } + case *ast.ReturnStmt: + for _, r := range v.Results { + nom.checkExpression(r, ifPos) + } + case *ast.SendStmt: + nom.checkExpression(v.Chan, ifPos) + nom.checkExpression(v.Value, ifPos) + case *ast.SwitchStmt: + nom.checkExpression(v.Tag, ifPos) + + for _, el := range v.Body.List { + clauses, ok := el.(*ast.CaseClause) + if !ok { + continue + } + + for _, c := range clauses.List { + switch v := c.(type) { + case *ast.BinaryExpr: + nom.checkExpression(v.X, ifPos) + nom.checkExpression(v.Y, ifPos) + case *ast.Ident: + nom.checkExpression(v, ifPos) + } + } + + for _, c := range clauses.Body { + if est, ok := c.(*ast.ExprStmt); ok { + nom.checkExpression(est.X, ifPos) + } + + switch v := c.(type) { + case *ast.AssignStmt: + for _, el := range v.Rhs { + nom.checkExpression(el, ifPos) + } + case *ast.ExprStmt: + nom.checkExpression(v.X, ifPos) + } + } + } + } +} + +func (nom namedOccurrenceMap) checkExpression(candidate ast.Expr, ifPos token.Pos) { + switch v := candidate.(type) { + case *ast.BinaryExpr: + nom.checkExpression(v.X, ifPos) + nom.checkExpression(v.Y, ifPos) + case *ast.CallExpr: + for _, arg := range v.Args { + nom.checkExpression(arg, ifPos) + } + nom.checkExpression(v.Fun, ifPos) + if fun, ok := v.Fun.(*ast.SelectorExpr); ok { + nom.checkExpression(fun.X, ifPos) + } + case *ast.CompositeLit: + for _, el := range v.Elts { + switch v := el.(type) { + case *ast.Ident: + nom.checkExpression(v, ifPos) + case *ast.KeyValueExpr: + nom.checkExpression(v.Key, ifPos) + nom.checkExpression(v.Value, ifPos) + } + } + case *ast.FuncLit: + for _, el := range v.Body.List { + nom.checkStatement(el, ifPos) + } + case *ast.Ident: + if _, ok := nom[v.Name]; !ok || nom[v.Name].isEmponymousKey(ifPos) { + return + } + + scopeMarker1 := nom[v.Name].getScopeMarkerForPosition(v.Pos()) + + delete(nom[v.Name], scopeMarker1) + + for k := range nom { + for scopeMarker2 := range nom[k] { + if scopeMarker1 == scopeMarker2 { + delete(nom[k], scopeMarker2) + } + } + } + case *ast.IndexExpr: + nom.checkExpression(v.X, ifPos) + switch index := v.Index.(type) { + case *ast.BinaryExpr: + nom.checkExpression(index.X, ifPos) + case *ast.Ident: + nom.checkExpression(index, ifPos) + } + case *ast.SelectorExpr: + nom.checkExpression(v.X, ifPos) + case *ast.SliceExpr: + nom.checkExpression(v.High, ifPos) + nom.checkExpression(v.Low, ifPos) + nom.checkExpression(v.X, ifPos) + case *ast.TypeAssertExpr: + nom.checkExpression(v.X, ifPos) + case *ast.UnaryExpr: + nom.checkExpression(v.X, ifPos) + } +} + +func isAssign(tok token.Token) bool { + return (tok == token.ASSIGN || + tok == token.ADD_ASSIGN || tok == token.SUB_ASSIGN || + tok == token.MUL_ASSIGN || tok == token.QUO_ASSIGN || tok == token.REM_ASSIGN || + tok == token.AND_ASSIGN || tok == token.OR_ASSIGN || tok == token.XOR_ASSIGN || tok == token.AND_NOT_ASSIGN || + tok == token.SHL_ASSIGN || tok == token.SHR_ASSIGN) +} diff --git a/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go b/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go new file mode 100644 index 000000000..34224c93a --- /dev/null +++ b/vendor/github.com/esimonov/ifshort/pkg/analyzer/occurrences.go @@ -0,0 +1,259 @@ +package analyzer + +import ( + "go/ast" + "go/token" + "time" + + "golang.org/x/tools/go/analysis" +) + +// occurrence is a variable occurrence. +type occurrence struct { + declarationPos token.Pos + ifStmtPos token.Pos +} + +func (occ *occurrence) isComplete() bool { + return occ.ifStmtPos != token.NoPos && occ.declarationPos != token.NoPos +} + +// scopeMarkeredOccurences is a map of scope markers to variable occurrences. +type scopeMarkeredOccurences map[int64]occurrence + +func (smo scopeMarkeredOccurences) getGreatestMarker() int64 { + var maxScopeMarker int64 + + for marker := range smo { + if marker > maxScopeMarker { + maxScopeMarker = marker + } + } + return maxScopeMarker +} + +// find scope marker of the greatest token.Pos that is smaller than provided. +func (smo scopeMarkeredOccurences) getScopeMarkerForPosition(pos token.Pos) int64 { + var m int64 + var foundPos token.Pos + + for marker, occ := range smo { + if occ.declarationPos < pos && occ.declarationPos >= foundPos { + m = marker + foundPos = occ.declarationPos + } + } + return m +} + +func (smo scopeMarkeredOccurences) isEmponymousKey(pos token.Pos) bool { + if pos == token.NoPos { + return false + } + + for _, occ := range smo { + if occ.ifStmtPos == pos { + return true + } + } + return false +} + +// namedOccurrenceMap is a map of variable names to scopeMarkeredOccurences. +type namedOccurrenceMap map[string]scopeMarkeredOccurences + +func getNamedOccurrenceMap(fdecl *ast.FuncDecl, pass *analysis.Pass) namedOccurrenceMap { + nom := namedOccurrenceMap(map[string]scopeMarkeredOccurences{}) + + if fdecl == nil || fdecl.Body == nil { + return nom + } + + for _, stmt := range fdecl.Body.List { + switch v := stmt.(type) { + case *ast.AssignStmt: + nom.addFromAssignment(pass, v) + case *ast.IfStmt: + nom.addFromCondition(v) + nom.addFromIfClause(v) + nom.addFromElseClause(v) + } + } + + candidates := namedOccurrenceMap(map[string]scopeMarkeredOccurences{}) + + for varName, markeredOccs := range nom { + for marker, occ := range markeredOccs { + if !occ.isComplete() && !nom.isFoundByScopeMarker(marker) { + continue + } + if _, ok := candidates[varName]; !ok { + candidates[varName] = scopeMarkeredOccurences{ + marker: occ, + } + } else { + candidates[varName][marker] = occ + } + } + } + return candidates +} + +func (nom namedOccurrenceMap) isFoundByScopeMarker(scopeMarker int64) bool { + var i int + + for _, markeredOccs := range nom { + for marker := range markeredOccs { + if marker == scopeMarker { + i++ + } + } + } + return i >= 2 +} + +func (nom namedOccurrenceMap) addFromAssignment(pass *analysis.Pass, assignment *ast.AssignStmt) { + if assignment.Tok != token.DEFINE { + return + } + + scopeMarker := time.Now().UnixNano() + + for i, el := range assignment.Lhs { + ident, ok := el.(*ast.Ident) + if !ok { + continue + } + + if ident.Name == "_" || ident.Obj == nil || isUnshortenableAssignment(ident.Obj.Decl) { + continue + } + + if markeredOccs, ok := nom[ident.Name]; ok { + markeredOccs[scopeMarker] = occurrence{ + declarationPos: ident.Pos(), + } + nom[ident.Name] = markeredOccs + } else { + newOcc := occurrence{} + if areFlagSettingsSatisfied(pass, assignment, i) { + newOcc.declarationPos = ident.Pos() + } + nom[ident.Name] = scopeMarkeredOccurences{scopeMarker: newOcc} + } + } +} + +func isUnshortenableAssignment(decl interface{}) bool { + assign, ok := decl.(*ast.AssignStmt) + if !ok { + return false + } + + for _, el := range assign.Rhs { + u, ok := el.(*ast.UnaryExpr) + if !ok { + continue + } + + if u.Op == token.AND { + if _, ok := u.X.(*ast.CompositeLit); ok { + return true + } + } + } + return false +} + +func areFlagSettingsSatisfied(pass *analysis.Pass, assignment *ast.AssignStmt, i int) bool { + lh := assignment.Lhs[i] + rh := assignment.Rhs[len(assignment.Rhs)-1] + + if len(assignment.Rhs) == len(assignment.Lhs) { + rh = assignment.Rhs[i] + } + + if pass.Fset.Position(rh.End()).Line-pass.Fset.Position(rh.Pos()).Line > maxDeclLines { + return false + } + if int(rh.End()-lh.Pos()) > maxDeclChars { + return false + } + return true +} + +func (nom namedOccurrenceMap) addFromCondition(stmt *ast.IfStmt) { + switch v := stmt.Cond.(type) { + case *ast.BinaryExpr: + for _, v := range [2]ast.Expr{v.X, v.Y} { + switch e := v.(type) { + case *ast.Ident: + nom.addFromIdent(stmt.If, e) + case *ast.SelectorExpr: + nom.addFromIdent(stmt.If, e.X) + } + } + case *ast.Ident: + nom.addFromIdent(stmt.If, v) + case *ast.CallExpr: + for _, a := range v.Args { + switch e := a.(type) { + case *ast.Ident: + nom.addFromIdent(stmt.If, e) + case *ast.CallExpr: + nom.addFromCallExpr(stmt.If, e) + } + } + } +} + +func (nom namedOccurrenceMap) addFromIfClause(stmt *ast.IfStmt) { + nom.addFromBlockStmt(stmt.Body, stmt.If) +} + +func (nom namedOccurrenceMap) addFromElseClause(stmt *ast.IfStmt) { + nom.addFromBlockStmt(stmt.Else, stmt.If) +} + +func (nom namedOccurrenceMap) addFromBlockStmt(stmt ast.Stmt, ifPos token.Pos) { + blockStmt, ok := stmt.(*ast.BlockStmt) + if !ok { + return + } + + for _, el := range blockStmt.List { + exptStmt, ok := el.(*ast.ExprStmt) + if !ok { + continue + } + + if callExpr, ok := exptStmt.X.(*ast.CallExpr); ok { + nom.addFromCallExpr(ifPos, callExpr) + } + } +} + +func (nom namedOccurrenceMap) addFromCallExpr(ifPos token.Pos, callExpr *ast.CallExpr) { + for _, arg := range callExpr.Args { + nom.addFromIdent(ifPos, arg) + } +} + +func (nom namedOccurrenceMap) addFromIdent(ifPos token.Pos, v ast.Expr) { + ident, ok := v.(*ast.Ident) + if !ok { + return + } + + if markeredOccs, ok := nom[ident.Name]; ok { + marker := nom[ident.Name].getGreatestMarker() + + occ := markeredOccs[marker] + if occ.isComplete() { + return + } + + occ.ifStmtPos = ifPos + nom[ident.Name][marker] = occ + } +} diff --git a/vendor/github.com/ettle/strcase/.gitignore b/vendor/github.com/ettle/strcase/.gitignore new file mode 100644 index 000000000..54bc1fbff --- /dev/null +++ b/vendor/github.com/ettle/strcase/.gitignore @@ -0,0 +1,18 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# CPU and memory profiles +*.prof + +# Dependency directories +vendor/ diff --git a/vendor/github.com/ettle/strcase/.golangci.yml b/vendor/github.com/ettle/strcase/.golangci.yml new file mode 100644 index 000000000..4d31fcc5b --- /dev/null +++ b/vendor/github.com/ettle/strcase/.golangci.yml @@ -0,0 +1,88 @@ +linters-settings: + dupl: + threshold: 100 + gocyclo: + min-complexity: 15 + gocritic: + enabled-tags: + - diagnostic + - experimental + - opinionated + - performance + - style + disabled-checks: + - ifElseChain + - whyNoLint + - wrapperFunc + golint: + min-confidence: 0.5 + govet: + check-shadowing: true + lll: + line-length: 140 + maligned: + suggest-new: true + misspell: + locale: US + nolintlint: + allow-leading-space: false + allow-unused: false + require-specific: true + + require-explanation: true + allow-no-explanation: + - gocyclo + +linters: + disable-all: true + enable: + - bodyclose + - deadcode + - depguard + - dogsled + - dupl + - errcheck + - gochecknoinits + - gocritic + - gocyclo + - gofmt + - goimports + - golint + - goprintffuncname + - gosec + - gosimple + - govet + - ineffassign + - interfacer + - lll + - misspell + - nakedret + - nolintlint + - rowserrcheck + - staticcheck + - structcheck + - stylecheck + - typecheck + - unconvert + - unparam + - unused + - varcheck + - whitespace + + # don't enable: + # - asciicheck + # - gochecknoglobals + # - gocognit + # - godot + # - godox + # - goerr113 + # - maligned + # - nestif + # - prealloc + # - testpackage + # - wsl + +issues: + exclude-use-default: false + max-issues-per-linter: 0 + max-same-issues: 0 diff --git a/vendor/github.com/ettle/strcase/.readme.tmpl b/vendor/github.com/ettle/strcase/.readme.tmpl new file mode 100644 index 000000000..135765c40 --- /dev/null +++ b/vendor/github.com/ettle/strcase/.readme.tmpl @@ -0,0 +1,80 @@ +{{with .PDoc}} +# Go Strcase + +[![Go Report Card](https://goreportcard.com/badge/github.com/ettle/strcase)](https://goreportcard.com/report/github.com/ettle/strcase) +[![Coverage](http://gocover.io/_badge/github.com/ettle/strcase?0)](http://gocover.io/github.com/ettle/strcase) +[![GoDoc](https://godoc.org/github.com/ettle/strcase?status.svg)](https://pkg.go.dev/github.com/ettle/strcase) + +Convert strings to `snake_case`, `camelCase`, `PascalCase`, `kebab-case` and more! Supports Go initialisms, customization, and Unicode. + +`import "{{.ImportPath}}"` + +## Overview +{{comment_md .Doc}} +{{example_html $ ""}} + +## Index{{if .Consts}} +* [Constants](#pkg-constants){{end}}{{if .Vars}} +* [Variables](#pkg-variables){{end}}{{- range .Funcs -}}{{$name_html := html .Name}} +* [{{node_html $ .Decl false | sanitize}}](#{{$name_html}}){{- end}}{{- range .Types}}{{$tname_html := html .Name}} +* [type {{$tname_html}}](#{{$tname_html}}){{- range .Funcs}}{{$name_html := html .Name}} + * [{{node_html $ .Decl false | sanitize}}](#{{$name_html}}){{- end}}{{- range .Methods}}{{$name_html := html .Name}} + * [{{node_html $ .Decl false | sanitize}}](#{{$tname_html}}.{{$name_html}}){{- end}}{{- end}}{{- if $.Notes}}{{- range $marker, $item := $.Notes}} +* [{{noteTitle $marker | html}}s](#pkg-note-{{$marker}}){{end}}{{end}} +{{if $.Examples}} +#### Examples{{- range $.Examples}} +* [{{example_name .Name}}](#example_{{.Name}}){{- end}}{{- end}} + +{{with .Consts}}## Constants +{{range .}}{{node $ .Decl | pre}} +{{comment_md .Doc}}{{end}}{{end}} +{{with .Vars}}## Variables +{{range .}}{{node $ .Decl | pre}} +{{comment_md .Doc}}{{end}}{{end}} + +{{range .Funcs}}{{$name_html := html .Name}}## func [{{$name_html}}]({{gh_url $ .Decl}}) +{{node $ .Decl | pre}} +{{comment_md .Doc}} +{{example_html $ .Name}} +{{callgraph_html $ "" .Name}}{{end}} +{{range .Types}}{{$tname := .Name}}{{$tname_html := html .Name}}## type [{{$tname_html}}]({{gh_url $ .Decl}}) +{{node $ .Decl | pre}} +{{comment_md .Doc}}{{range .Consts}} +{{node $ .Decl | pre }} +{{comment_md .Doc}}{{end}}{{range .Vars}} +{{node $ .Decl | pre }} +{{comment_md .Doc}}{{end}} + +{{example_html $ $tname}} +{{implements_html $ $tname}} +{{methodset_html $ $tname}} + +{{range .Funcs}}{{$name_html := html .Name}}### func [{{$name_html}}]({{gh_url $ .Decl}}) +{{node $ .Decl | pre}} +{{comment_md .Doc}} +{{example_html $ .Name}}{{end}} +{{callgraph_html $ "" .Name}} + +{{range .Methods}}{{$name_html := html .Name}}### func ({{md .Recv}}) [{{$name_html}}]({{gh_url $ .Decl}}) +{{node $ .Decl | pre}} +{{comment_md .Doc}} +{{$name := printf "%s_%s" $tname .Name}}{{example_html $ $name}} +{{callgraph_html $ .Recv .Name}} +{{end}}{{end}}{{end}} + +{{with $.Notes}} +{{range $marker, $content := .}} +## {{noteTitle $marker | html}}s + +{{end}} +{{end}} +{{if .Dirs}} +## Subdirectories +{{range $.Dirs.List}} +{{indent .Depth}}* [{{.Name | html}}]({{print "./" .Path}}){{if .Synopsis}} {{ .Synopsis}}{{end -}} +{{end}} +{{end}} diff --git a/vendor/github.com/ettle/strcase/LICENSE b/vendor/github.com/ettle/strcase/LICENSE new file mode 100644 index 000000000..4f0116be2 --- /dev/null +++ b/vendor/github.com/ettle/strcase/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Liyan David Chang + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/ettle/strcase/Makefile b/vendor/github.com/ettle/strcase/Makefile new file mode 100644 index 000000000..462f8b473 --- /dev/null +++ b/vendor/github.com/ettle/strcase/Makefile @@ -0,0 +1,16 @@ +.PHONY: benchmark docs lint test + +docs: + which godoc2ghmd || ( go get github.com/DevotedHealth/godoc2ghmd && go mod tidy ) + godoc2ghmd -template .readme.tmpl github.com/ettle/strcase > README.md + +test: + go test -cover ./... + +lint: + which golangci-lint || ( go get github.com/golangci/golangci-lint/cmd/golangci-lint@v1.27.0 && go mod tidy ) + golangci-lint run + golangci-lint run benchmark/*.go + +benchmark: + cd benchmark && go test -bench=. -test.benchmem && go mod tidy diff --git a/vendor/github.com/ettle/strcase/README.md b/vendor/github.com/ettle/strcase/README.md new file mode 100644 index 000000000..ee165e3e5 --- /dev/null +++ b/vendor/github.com/ettle/strcase/README.md @@ -0,0 +1,542 @@ + +# Go Strcase + +[![Go Report Card](https://goreportcard.com/badge/github.com/ettle/strcase)](https://goreportcard.com/report/github.com/ettle/strcase) +[![Coverage](http://gocover.io/_badge/github.com/ettle/strcase?0)](http://gocover.io/github.com/ettle/strcase) +[![GoDoc](https://godoc.org/github.com/ettle/strcase?status.svg)](https://pkg.go.dev/github.com/ettle/strcase) + +Convert strings to `snake_case`, `camelCase`, `PascalCase`, `kebab-case` and more! Supports Go initialisms, customization, and Unicode. + +`import "github.com/ettle/strcase"` + +## Overview +Package strcase is a package for converting strings into various word cases +(e.g. snake_case, camelCase) + + + go get -u github.com/ettle/strcase + +Example usage + + + strcase.ToSnake("Hello World") // hello_world + strcase.ToSNAKE("Hello World") // HELLO_WORLD + + strcase.ToKebab("helloWorld") // hello-world + strcase.ToKEBAB("helloWorld") // HELLO-WORLD + + strcase.ToPascal("hello-world") // HelloWorld + strcase.ToCamel("hello-world") // helloWorld + + // Handle odd cases + strcase.ToSnake("FOOBar") // foo_bar + + // Support Go initialisms + strcase.ToGoCamel("http_response") // HTTPResponse + + // Specify case and delimiter + strcase.ToCase("HelloWorld", strcase.UpperCase, '.') // HELLO.WORLD + +### Why this package +String strcase is pretty straight forward and there are a number of methods to +do it. This package is fully featured, more customizable, better tested, and +faster* than other packages and what you would probably whip up yourself. + +### Unicode support +We work for with unicode strings and pay very little performance penalty for it +as we optimized for the common use case of ASCII only strings. + +### Customization +You can create a custom caser that changes the behavior to what you want. This +customization also reduces the pressure for us to change the default behavior +which means that things are more stable for everyone involved. The goal is to +make the common path easy and fast, while making the uncommon path possible. + + + c := NewCaser( + // Use Go's default initialisms e.g. ID, HTML + true, + // Override initialisms (e.g. don't initialize HTML but initialize SSL + map[string]bool{"SSL": true, "HTML": false}, + // Write your own custom SplitFn + // + NewSplitFn( + []rune{'*', '.', ','}, + SplitCase, + SplitAcronym, + PreserveNumberFormatting, + SplitBeforeNumber, + SplitAfterNumber, + )) + assert.Equal(t, "http_200", c.ToSnake("http200")) + +### Initialism support +By default, we use the golint intialisms list. You can customize and override +the initialisms if you wish to add additional ones, such as "SSL" or "CMS" or +domain specific ones to your industry. + + + ToGoCamel("http_response") // HTTPResponse + ToGoSnake("http_response") // HTTP_response + +### Test coverage +We have a wide ranging test suite to make sure that we understand our behavior. +Test coverage isn't everything, but we aim for 100% coverage. + +### Fast +Optimized to reduce memory allocations with Builder. Benchmarked and optimized +around common cases. + +We're on par with the fastest packages (that have less features) and much +faster than others. We also benchmarked against code snippets. Using string +builders to reduce memory allocation and reordering boolean checks for the +common cases have a large performance impact. + +Hopefully I was fair to each library and happy to rerun benchmarks differently +or reword my commentary based on suggestions or updates. + + + // This package + // Go intialisms and custom casers are slower + BenchmarkToTitle-4 992491 1559 ns/op 32 B/op 1 allocs/op + BenchmarkToSnake-4 1000000 1475 ns/op 32 B/op 1 allocs/op + BenchmarkToSNAKE-4 1000000 1609 ns/op 32 B/op 1 allocs/op + BenchmarkToGoSnake-4 275010 3697 ns/op 44 B/op 4 allocs/op + BenchmarkToCustomCaser-4 342704 4191 ns/op 56 B/op 4 allocs/op + + // Segment has very fast snake case and camel case libraries + // No features or customization, but very very fast + BenchmarkSegment-4 1303809 938 ns/op 16 B/op 1 allocs/op + + // Stdlib strings.Title for comparison, even though it only splits on spaces + BenchmarkToTitleStrings-4 1213467 1164 ns/op 16 B/op 1 allocs/op + + // Other libraries or code snippets + // - Most are slower, by up to an order of magnitude + // - None support initialisms or customization + // - Some generate only camelCase or snake_case + // - Many lack unicode support + BenchmarkToSnakeStoewer-4 973200 2075 ns/op 64 B/op 2 allocs/op + // Copying small rune arrays is slow + BenchmarkToSnakeSiongui-4 264315 4229 ns/op 48 B/op 10 allocs/op + BenchmarkGoValidator-4 206811 5152 ns/op 184 B/op 9 allocs/op + // String alloction is slow + BenchmarkToSnakeFatih-4 82675 12280 ns/op 392 B/op 26 allocs/op + BenchmarkToSnakeIanColeman-4 83276 13903 ns/op 145 B/op 13 allocs/op + // Regexp is slow + BenchmarkToSnakeGolangPrograms-4 74448 18586 ns/op 176 B/op 11 allocs/op + + // These results aren't a surprise - my initial version of this library was + // painfully slow. I think most of us, without spending some time with + // profilers and benchmarks, would write also something on the slower side. + +### Why not this package +If every nanosecond matters and this is used in a tight loop, use segment.io's +libraries (https://github.com/segmentio/go-snakecase and +https://github.com/segmentio/go-camelcase). They lack features, but make up for +it by being blazing fast. Alternatively, if you need your code to work slightly +differently, fork them and tailor it for your use case. + +If you don't like having external imports, I get it. This package only imports +packages for testing, otherwise it only uses the standard library. If that's +not enough, you can use this repo as the foundation for your own. MIT Licensed. + +This package is still relatively new and while I've used it for a while +personally, it doesn't have the miles that other packages do. I've tested this +code agains't their test cases to make sure that there aren't any surprises. + +### Migrating from other packages +If you are migrating from from another package, you may find slight differences +in output. To reduce the delta, you may find it helpful to use the following +custom casers to mimic the behavior of the other package. + + + // From https://github.com/iancoleman/strcase + var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-', '.'}, SplitCase, SplitAcronym, SplitBeforeNumber)) + + // From https://github.com/stoewer/go-strcase + var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-'}, SplitCase), SplitAcronym) + + + + +## Index +* [func ToCamel(s string) string](#ToCamel) +* [func ToCase(s string, wordCase WordCase, delimiter rune) string](#ToCase) +* [func ToGoCamel(s string) string](#ToGoCamel) +* [func ToGoCase(s string, wordCase WordCase, delimiter rune) string](#ToGoCase) +* [func ToGoKebab(s string) string](#ToGoKebab) +* [func ToGoPascal(s string) string](#ToGoPascal) +* [func ToGoSnake(s string) string](#ToGoSnake) +* [func ToKEBAB(s string) string](#ToKEBAB) +* [func ToKebab(s string) string](#ToKebab) +* [func ToPascal(s string) string](#ToPascal) +* [func ToSNAKE(s string) string](#ToSNAKE) +* [func ToSnake(s string) string](#ToSnake) +* [type Caser](#Caser) + * [func NewCaser(goInitialisms bool, initialismOverrides map[string]bool, splitFn SplitFn) *Caser](#NewCaser) + * [func (c *Caser) ToCamel(s string) string](#Caser.ToCamel) + * [func (c *Caser) ToCase(s string, wordCase WordCase, delimiter rune) string](#Caser.ToCase) + * [func (c *Caser) ToKEBAB(s string) string](#Caser.ToKEBAB) + * [func (c *Caser) ToKebab(s string) string](#Caser.ToKebab) + * [func (c *Caser) ToPascal(s string) string](#Caser.ToPascal) + * [func (c *Caser) ToSNAKE(s string) string](#Caser.ToSNAKE) + * [func (c *Caser) ToSnake(s string) string](#Caser.ToSnake) +* [type SplitAction](#SplitAction) +* [type SplitFn](#SplitFn) + * [func NewSplitFn(delimiters []rune, splitOptions ...SplitOption) SplitFn](#NewSplitFn) +* [type SplitOption](#SplitOption) +* [type WordCase](#WordCase) + + + + + +## func [ToCamel](./strcase.go#L57) +``` go +func ToCamel(s string) string +``` +ToCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). +Also known as lowerCamelCase or mixedCase. + + + +## func [ToCase](./strcase.go#L70) +``` go +func ToCase(s string, wordCase WordCase, delimiter rune) string +``` +ToCase returns words in given case and delimiter. + + + +## func [ToGoCamel](./strcase.go#L65) +``` go +func ToGoCamel(s string) string +``` +ToGoCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). +Also known as lowerCamelCase or mixedCase. + +Respects Go's common initialisms (e.g. httpResponse -> HTTPResponse). + + + +## func [ToGoCase](./strcase.go#L77) +``` go +func ToGoCase(s string, wordCase WordCase, delimiter rune) string +``` +ToGoCase returns words in given case and delimiter. + +Respects Go's common initialisms (e.g. httpResponse -> HTTPResponse). + + + +## func [ToGoKebab](./strcase.go#L31) +``` go +func ToGoKebab(s string) string +``` +ToGoKebab returns words in kebab-case (lower case words with dashes). +Also known as dash-case. + +Respects Go's common initialisms (e.g. http-response -> HTTP-response). + + + +## func [ToGoPascal](./strcase.go#L51) +``` go +func ToGoPascal(s string) string +``` +ToGoPascal returns words in PascalCase (capitalized words concatenated together). +Also known as UpperPascalCase. + +Respects Go's common initialisms (e.g. HttpResponse -> HTTPResponse). + + + +## func [ToGoSnake](./strcase.go#L11) +``` go +func ToGoSnake(s string) string +``` +ToGoSnake returns words in snake_case (lower case words with underscores). + +Respects Go's common initialisms (e.g. http_response -> HTTP_response). + + + +## func [ToKEBAB](./strcase.go#L37) +``` go +func ToKEBAB(s string) string +``` +ToKEBAB returns words in KEBAB-CASE (upper case words with dashes). +Also known as SCREAMING-KEBAB-CASE or SCREAMING-DASH-CASE. + + + +## func [ToKebab](./strcase.go#L23) +``` go +func ToKebab(s string) string +``` +ToKebab returns words in kebab-case (lower case words with dashes). +Also known as dash-case. + + + +## func [ToPascal](./strcase.go#L43) +``` go +func ToPascal(s string) string +``` +ToPascal returns words in PascalCase (capitalized words concatenated together). +Also known as UpperPascalCase. + + + +## func [ToSNAKE](./strcase.go#L17) +``` go +func ToSNAKE(s string) string +``` +ToSNAKE returns words in SNAKE_CASE (upper case words with underscores). +Also known as SCREAMING_SNAKE_CASE or UPPER_CASE. + + + +## func [ToSnake](./strcase.go#L4) +``` go +func ToSnake(s string) string +``` +ToSnake returns words in snake_case (lower case words with underscores). + + + + +## type [Caser](./caser.go#L4-L7) +``` go +type Caser struct { + // contains filtered or unexported fields +} + +``` +Caser allows for customization of parsing and intialisms + + + + + + + +### func [NewCaser](./caser.go#L24) +``` go +func NewCaser(goInitialisms bool, initialismOverrides map[string]bool, splitFn SplitFn) *Caser +``` +NewCaser returns a configured Caser. + +A Caser should be created when you want fine grained control over how the words are split. + + + Notes on function arguments + + goInitialisms: Whether to use Golint's intialisms + + initialismOverrides: A mapping of extra initialisms + Keys must be in ALL CAPS. Merged with Golint's if goInitialisms is set. + Setting a key to false will override Golint's. + + splitFn: How to separate words + Override the default split function. Consider using NewSplitFn to + configure one instead of writing your own. + + + + + +### func (\*Caser) [ToCamel](./caser.go#L80) +``` go +func (c *Caser) ToCamel(s string) string +``` +ToCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). +Also known as lowerCamelCase or mixedCase. + + + + +### func (\*Caser) [ToCase](./caser.go#L85) +``` go +func (c *Caser) ToCase(s string, wordCase WordCase, delimiter rune) string +``` +ToCase returns words with a given case and delimiter. + + + + +### func (\*Caser) [ToKEBAB](./caser.go#L68) +``` go +func (c *Caser) ToKEBAB(s string) string +``` +ToKEBAB returns words in KEBAB-CASE (upper case words with dashes). +Also known as SCREAMING-KEBAB-CASE or SCREAMING-DASH-CASE. + + + + +### func (\*Caser) [ToKebab](./caser.go#L62) +``` go +func (c *Caser) ToKebab(s string) string +``` +ToKebab returns words in kebab-case (lower case words with dashes). +Also known as dash-case. + + + + +### func (\*Caser) [ToPascal](./caser.go#L74) +``` go +func (c *Caser) ToPascal(s string) string +``` +ToPascal returns words in PascalCase (capitalized words concatenated together). +Also known as UpperPascalCase. + + + + +### func (\*Caser) [ToSNAKE](./caser.go#L56) +``` go +func (c *Caser) ToSNAKE(s string) string +``` +ToSNAKE returns words in SNAKE_CASE (upper case words with underscores). +Also known as SCREAMING_SNAKE_CASE or UPPER_CASE. + + + + +### func (\*Caser) [ToSnake](./caser.go#L50) +``` go +func (c *Caser) ToSnake(s string) string +``` +ToSnake returns words in snake_case (lower case words with underscores). + + + + +## type [SplitAction](./split.go#L110) +``` go +type SplitAction int +``` +SplitAction defines if and how to split a string + + +``` go +const ( + // Noop - Continue to next character + Noop SplitAction = iota + // Split - Split between words + // e.g. to split between wordsWithoutDelimiters + Split + // SkipSplit - Split the word and drop the character + // e.g. to split words with delimiters + SkipSplit + // Skip - Remove the character completely + Skip +) +``` + + + + + + + + + +## type [SplitFn](./split.go#L6) +``` go +type SplitFn func(prev, curr, next rune) SplitAction +``` +SplitFn defines how to split a string into words + + + + + + + +### func [NewSplitFn](./split.go#L14-L17) +``` go +func NewSplitFn( + delimiters []rune, + splitOptions ...SplitOption, +) SplitFn +``` +NewSplitFn returns a SplitFn based on the options provided. + +NewSplitFn covers the majority of common options that other strcase +libraries provide and should allow you to simply create a custom caser. +For more complicated use cases, feel free to write your own SplitFn +nolint:gocyclo + + + + + +## type [SplitOption](./split.go#L93) +``` go +type SplitOption int +``` +SplitOption are options that allow for configuring NewSplitFn + + +``` go +const ( + // SplitCase - FooBar -> Foo_Bar + SplitCase SplitOption = iota + // SplitAcronym - FOOBar -> Foo_Bar + // It won't preserve FOO's case. If you want, you can set the Caser's initialisms so FOO will be in all caps + SplitAcronym + // SplitBeforeNumber - port80 -> port_80 + SplitBeforeNumber + // SplitAfterNumber - 200status -> 200_status + SplitAfterNumber + // PreserveNumberFormatting - a.b.2,000.3.c -> a_b_2,000.3_c + PreserveNumberFormatting +) +``` + + + + + + + + + +## type [WordCase](./convert.go#L6) +``` go +type WordCase int +``` +WordCase is an enumeration of the ways to format a word. + + +``` go +const ( + // Original - Preserve the original input strcase + Original WordCase = iota + // LowerCase - All letters lower cased (example) + LowerCase + // UpperCase - All letters upper cased (EXAMPLE) + UpperCase + // TitleCase - Only first letter upper cased (Example) + TitleCase + // CamelCase - TitleCase except lower case first word (exampleText) + CamelCase +) +``` + + + + + + + + + + + + + diff --git a/vendor/github.com/ettle/strcase/caser.go b/vendor/github.com/ettle/strcase/caser.go new file mode 100644 index 000000000..891a67189 --- /dev/null +++ b/vendor/github.com/ettle/strcase/caser.go @@ -0,0 +1,87 @@ +package strcase + +// Caser allows for customization of parsing and intialisms +type Caser struct { + initialisms map[string]bool + splitFn SplitFn +} + +// NewCaser returns a configured Caser. +// +// A Caser should be created when you want fine grained control over how the words are split. +// +// Notes on function arguments +// +// goInitialisms: Whether to use Golint's intialisms +// +// initialismOverrides: A mapping of extra initialisms +// Keys must be in ALL CAPS. Merged with Golint's if goInitialisms is set. +// Setting a key to false will override Golint's. +// +// splitFn: How to separate words +// Override the default split function. Consider using NewSplitFn to +// configure one instead of writing your own. +func NewCaser(goInitialisms bool, initialismOverrides map[string]bool, splitFn SplitFn) *Caser { + c := &Caser{ + initialisms: golintInitialisms, + splitFn: splitFn, + } + + if c.splitFn == nil { + c.splitFn = defaultSplitFn + } + + if goInitialisms && initialismOverrides != nil { + c.initialisms = map[string]bool{} + for k, v := range golintInitialisms { + c.initialisms[k] = v + } + for k, v := range initialismOverrides { + c.initialisms[k] = v + } + } else if !goInitialisms { + c.initialisms = initialismOverrides + } + + return c +} + +// ToSnake returns words in snake_case (lower case words with underscores). +func (c *Caser) ToSnake(s string) string { + return convert(s, c.splitFn, '_', LowerCase, c.initialisms) +} + +// ToSNAKE returns words in SNAKE_CASE (upper case words with underscores). +// Also known as SCREAMING_SNAKE_CASE or UPPER_CASE. +func (c *Caser) ToSNAKE(s string) string { + return convert(s, c.splitFn, '_', UpperCase, c.initialisms) +} + +// ToKebab returns words in kebab-case (lower case words with dashes). +// Also known as dash-case. +func (c *Caser) ToKebab(s string) string { + return convert(s, c.splitFn, '-', LowerCase, c.initialisms) +} + +// ToKEBAB returns words in KEBAB-CASE (upper case words with dashes). +// Also known as SCREAMING-KEBAB-CASE or SCREAMING-DASH-CASE. +func (c *Caser) ToKEBAB(s string) string { + return convert(s, c.splitFn, '-', UpperCase, c.initialisms) +} + +// ToPascal returns words in PascalCase (capitalized words concatenated together). +// Also known as UpperPascalCase. +func (c *Caser) ToPascal(s string) string { + return convert(s, c.splitFn, '\x00', TitleCase, c.initialisms) +} + +// ToCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). +// Also known as lowerCamelCase or mixedCase. +func (c *Caser) ToCamel(s string) string { + return convert(s, c.splitFn, '\x00', CamelCase, c.initialisms) +} + +// ToCase returns words with a given case and delimiter. +func (c *Caser) ToCase(s string, wordCase WordCase, delimiter rune) string { + return convert(s, c.splitFn, delimiter, wordCase, c.initialisms) +} diff --git a/vendor/github.com/ettle/strcase/convert.go b/vendor/github.com/ettle/strcase/convert.go new file mode 100644 index 000000000..70fedb144 --- /dev/null +++ b/vendor/github.com/ettle/strcase/convert.go @@ -0,0 +1,297 @@ +package strcase + +import "strings" + +// WordCase is an enumeration of the ways to format a word. +type WordCase int + +const ( + // Original - Preserve the original input strcase + Original WordCase = iota + // LowerCase - All letters lower cased (example) + LowerCase + // UpperCase - All letters upper cased (EXAMPLE) + UpperCase + // TitleCase - Only first letter upper cased (Example) + TitleCase + // CamelCase - TitleCase except lower case first word (exampleText) + // Notably, even if the first word is an initialism, it will be lower + // cased. This is important for code generators where capital letters + // mean exported functions. i.e. jsonString(), not JSONString() + CamelCase +) + +// We have 3 convert functions for performance reasons +// The general convert could handle everything, but is not optimized +// +// The other two functions are optimized for the general use cases - that is the non-custom caser functions +// Case 1: Any Case and supports Go Initialisms +// Case 2: UpperCase words, which don't need to support initialisms since everything is in upper case + +// convertWithoutInitialims only works for to UpperCase and LowerCase +//nolint:gocyclo +func convertWithoutInitialisms(input string, delimiter rune, wordCase WordCase) string { + input = strings.TrimSpace(input) + runes := []rune(input) + if len(runes) == 0 { + return "" + } + + var b strings.Builder + b.Grow(len(input) * 2) // In case we need to write delimiters where they weren't before + + var prev, curr rune + next := runes[0] // 0 length will have already returned so safe to index + inWord := false + firstWord := true + for i := 0; i < len(runes); i++ { + prev = curr + curr = next + if i+1 == len(runes) { + next = 0 + } else { + next = runes[i+1] + } + + switch defaultSplitFn(prev, curr, next) { + case SkipSplit: + if inWord && delimiter != 0 { + b.WriteRune(delimiter) + } + inWord = false + continue + case Split: + if inWord && delimiter != 0 { + b.WriteRune(delimiter) + } + inWord = false + } + switch wordCase { + case UpperCase: + b.WriteRune(toUpper(curr)) + case LowerCase: + b.WriteRune(toLower(curr)) + case TitleCase: + if inWord { + b.WriteRune(toLower(curr)) + } else { + b.WriteRune(toUpper(curr)) + } + case CamelCase: + if inWord { + b.WriteRune(toLower(curr)) + } else if firstWord { + b.WriteRune(toLower(curr)) + firstWord = false + } else { + b.WriteRune(toUpper(curr)) + } + default: + // Must be original case + b.WriteRune(curr) + } + inWord = inWord || true + } + return b.String() +} + +// convertWithGoInitialisms changes a input string to a certain case with a +// delimiter, respecting go initialisms but not skip runes +//nolint:gocyclo +func convertWithGoInitialisms(input string, delimiter rune, wordCase WordCase) string { + input = strings.TrimSpace(input) + runes := []rune(input) + if len(runes) == 0 { + return "" + } + + var b strings.Builder + b.Grow(len(input) * 2) // In case we need to write delimiters where they weren't before + + firstWord := true + + addWord := func(start, end int) { + if start == end { + return + } + + if !firstWord && delimiter != 0 { + b.WriteRune(delimiter) + } + + // Don't bother with initialisms if the word is longer than 5 + // A quick proxy to avoid the extra memory allocations + if end-start <= 5 { + key := strings.ToUpper(string(runes[start:end])) + if golintInitialisms[key] { + if !firstWord || wordCase != CamelCase { + b.WriteString(key) + firstWord = false + return + } + } + } + + for i := start; i < end; i++ { + r := runes[i] + switch wordCase { + case UpperCase: + panic("use convertWithoutInitialisms instead") + case LowerCase: + b.WriteRune(toLower(r)) + case TitleCase: + if i == start { + b.WriteRune(toUpper(r)) + } else { + b.WriteRune(toLower(r)) + } + case CamelCase: + if !firstWord && i == start { + b.WriteRune(toUpper(r)) + } else { + b.WriteRune(toLower(r)) + } + default: + b.WriteRune(r) + } + } + firstWord = false + } + + var prev, curr rune + next := runes[0] // 0 length will have already returned so safe to index + wordStart := 0 + for i := 0; i < len(runes); i++ { + prev = curr + curr = next + if i+1 == len(runes) { + next = 0 + } else { + next = runes[i+1] + } + + switch defaultSplitFn(prev, curr, next) { + case Split: + addWord(wordStart, i) + wordStart = i + case SkipSplit: + addWord(wordStart, i) + wordStart = i + 1 + } + } + + if wordStart != len(runes) { + addWord(wordStart, len(runes)) + } + return b.String() +} + +// convert changes a input string to a certain case with a delimiter, +// respecting arbitrary initialisms and skip characters +//nolint:gocyclo +func convert(input string, fn SplitFn, delimiter rune, wordCase WordCase, + initialisms map[string]bool) string { + input = strings.TrimSpace(input) + runes := []rune(input) + if len(runes) == 0 { + return "" + } + + var b strings.Builder + b.Grow(len(input) * 2) // In case we need to write delimiters where they weren't before + + firstWord := true + var skipIndexes []int + + addWord := func(start, end int) { + // If you have nothing good to say, say nothing at all + if start == end || len(skipIndexes) == end-start { + skipIndexes = nil + return + } + + // If you have something to say, start with a delimiter + if !firstWord && delimiter != 0 { + b.WriteRune(delimiter) + } + + // Check if you're an initialism + // Note - we don't check skip characters here since initialisms + // will probably never have junk characters in between + // I'm open to it if there is a use case + if initialisms != nil { + var word strings.Builder + for i := start; i < end; i++ { + word.WriteRune(toUpper(runes[i])) + } + key := word.String() + if initialisms[key] { + if !firstWord || wordCase != CamelCase { + b.WriteString(key) + firstWord = false + return + } + } + } + + skipIdx := 0 + for i := start; i < end; i++ { + if len(skipIndexes) > 0 && skipIdx < len(skipIndexes) && i == skipIndexes[skipIdx] { + skipIdx++ + continue + } + r := runes[i] + switch wordCase { + case UpperCase: + b.WriteRune(toUpper(r)) + case LowerCase: + b.WriteRune(toLower(r)) + case TitleCase: + if i == start { + b.WriteRune(toUpper(r)) + } else { + b.WriteRune(toLower(r)) + } + case CamelCase: + if !firstWord && i == start { + b.WriteRune(toUpper(r)) + } else { + b.WriteRune(toLower(r)) + } + default: + b.WriteRune(r) + } + } + firstWord = false + skipIndexes = nil + } + + var prev, curr rune + next := runes[0] // 0 length will have already returned so safe to index + wordStart := 0 + for i := 0; i < len(runes); i++ { + prev = curr + curr = next + if i+1 == len(runes) { + next = 0 + } else { + next = runes[i+1] + } + + switch fn(prev, curr, next) { + case Skip: + skipIndexes = append(skipIndexes, i) + case Split: + addWord(wordStart, i) + wordStart = i + case SkipSplit: + addWord(wordStart, i) + wordStart = i + 1 + } + } + + if wordStart != len(runes) { + addWord(wordStart, len(runes)) + } + return b.String() +} diff --git a/vendor/github.com/ettle/strcase/doc.go b/vendor/github.com/ettle/strcase/doc.go new file mode 100644 index 000000000..b898a4e45 --- /dev/null +++ b/vendor/github.com/ettle/strcase/doc.go @@ -0,0 +1,155 @@ +/* +Package strcase is a package for converting strings into various word cases +(e.g. snake_case, camelCase) + + go get -u github.com/ettle/strcase + +Example usage + + strcase.ToSnake("Hello World") // hello_world + strcase.ToSNAKE("Hello World") // HELLO_WORLD + + strcase.ToKebab("helloWorld") // hello-world + strcase.ToKEBAB("helloWorld") // HELLO-WORLD + + strcase.ToPascal("hello-world") // HelloWorld + strcase.ToCamel("hello-world") // helloWorld + + // Handle odd cases + strcase.ToSnake("FOOBar") // foo_bar + + // Support Go initialisms + strcase.ToGoPascal("http_response") // HTTPResponse + + // Specify case and delimiter + strcase.ToCase("HelloWorld", strcase.UpperCase, '.') // HELLO.WORLD + +Why this package + +String strcase is pretty straight forward and there are a number of methods to +do it. This package is fully featured, more customizable, better tested, and +faster* than other packages and what you would probably whip up yourself. + +Unicode support + +We work for with unicode strings and pay very little performance penalty for it +as we optimized for the common use case of ASCII only strings. + +Customization + +You can create a custom caser that changes the behavior to what you want. This +customization also reduces the pressure for us to change the default behavior +which means that things are more stable for everyone involved. The goal is to +make the common path easy and fast, while making the uncommon path possible. + + c := NewCaser( + // Use Go's default initialisms e.g. ID, HTML + true, + // Override initialisms (e.g. don't initialize HTML but initialize SSL + map[string]bool{"SSL": true, "HTML": false}, + // Write your own custom SplitFn + // + NewSplitFn( + []rune{'*', '.', ','}, + SplitCase, + SplitAcronym, + PreserveNumberFormatting, + SplitBeforeNumber, + SplitAfterNumber, + )) + assert.Equal(t, "http_200", c.ToSnake("http200")) + +Initialism support + +By default, we use the golint intialisms list. You can customize and override +the initialisms if you wish to add additional ones, such as "SSL" or "CMS" or +domain specific ones to your industry. + + ToGoPascal("http_response") // HTTPResponse + ToGoSnake("http_response") // HTTP_response + +Test coverage + +We have a wide ranging test suite to make sure that we understand our behavior. +Test coverage isn't everything, but we aim for 100% coverage. + +Fast + +Optimized to reduce memory allocations with Builder. Benchmarked and optimized +around common cases. + +We're on par with the fastest packages (that have less features) and much +faster than others. We also benchmarked against code snippets. Using string +builders to reduce memory allocation and reordering boolean checks for the +common cases have a large performance impact. + +Hopefully I was fair to each library and happy to rerun benchmarks differently +or reword my commentary based on suggestions or updates. + + // This package - faster then almost all libraries + // Initialisms are more complicated and slightly slower, but still faster then other libraries that do less + BenchmarkToTitle-4 7821166 221 ns/op 32 B/op 1 allocs/op + BenchmarkToSnake-4 9378589 202 ns/op 32 B/op 1 allocs/op + BenchmarkToSNAKE-4 6174453 223 ns/op 32 B/op 1 allocs/op + BenchmarkToGoSnake-4 3114266 434 ns/op 44 B/op 4 allocs/op + BenchmarkToCustomCaser-4 2973855 448 ns/op 56 B/op 4 allocs/op + + // Segment has very fast snake case and camel case libraries + // No features or customization, but very very fast + BenchmarkSegment-4 24003495 64.9 ns/op 16 B/op 1 allocs/op + + // Stdlib strings.Title for comparison, even though it only splits on spaces + BenchmarkToTitleStrings-4 11259376 161 ns/op 16 B/op 1 allocs/op + + // Other libraries or code snippets + // - Most are slower, by up to an order of magnitude + // - None support initialisms or customization + // - Some generate only camelCase or snake_case + // - Many lack unicode support + BenchmarkToSnakeStoewer-4 7103268 297 ns/op 64 B/op 2 allocs/op + // Copying small rune arrays is slow + BenchmarkToSnakeSiongui-4 3710768 413 ns/op 48 B/op 10 allocs/op + BenchmarkGoValidator-4 2416479 1049 ns/op 184 B/op 9 allocs/op + // String alloction is slow + BenchmarkToSnakeFatih-4 1000000 2407 ns/op 624 B/op 26 allocs/op + BenchmarkToSnakeIanColeman-4 1005766 1426 ns/op 160 B/op 13 allocs/op + // Regexp is slow + BenchmarkToSnakeGolangPrograms-4 614689 2237 ns/op 225 B/op 11 allocs/op + + + + // These results aren't a surprise - my initial version of this library was + // painfully slow. I think most of us, without spending some time with + // profilers and benchmarks, would write also something on the slower side. + + +Why not this package + +If every nanosecond matters and this is used in a tight loop, use segment.io's +libraries (https://github.com/segmentio/go-snakecase and +https://github.com/segmentio/go-camelcase). They lack features, but make up for +it by being blazing fast. Alternatively, if you need your code to work slightly +differently, fork them and tailor it for your use case. + +If you don't like having external imports, I get it. This package only imports +packages for testing, otherwise it only uses the standard library. If that's +not enough, you can use this repo as the foundation for your own. MIT Licensed. + +This package is still relatively new and while I've used it for a while +personally, it doesn't have the miles that other packages do. I've tested this +code agains't their test cases to make sure that there aren't any surprises. + +Migrating from other packages + +If you are migrating from from another package, you may find slight differences +in output. To reduce the delta, you may find it helpful to use the following +custom casers to mimic the behavior of the other package. + + // From https://github.com/iancoleman/strcase + var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-', '.'}, SplitCase, SplitAcronym, SplitBeforeNumber)) + + // From https://github.com/stoewer/go-strcase + var c = NewCaser(false, nil, NewSplitFn([]rune{'_', '-'}, SplitCase), SplitAcronym) + +*/ +package strcase diff --git a/vendor/github.com/ettle/strcase/go.mod b/vendor/github.com/ettle/strcase/go.mod new file mode 100644 index 000000000..0219bfaa0 --- /dev/null +++ b/vendor/github.com/ettle/strcase/go.mod @@ -0,0 +1,5 @@ +module github.com/ettle/strcase + +go 1.12 + +require github.com/stretchr/testify v1.5.1 diff --git a/vendor/github.com/ettle/strcase/go.sum b/vendor/github.com/ettle/strcase/go.sum new file mode 100644 index 000000000..331fa6982 --- /dev/null +++ b/vendor/github.com/ettle/strcase/go.sum @@ -0,0 +1,11 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/ettle/strcase/initialism.go b/vendor/github.com/ettle/strcase/initialism.go new file mode 100644 index 000000000..3c313d3e9 --- /dev/null +++ b/vendor/github.com/ettle/strcase/initialism.go @@ -0,0 +1,43 @@ +package strcase + +// golintInitialisms are the golint initialisms +var golintInitialisms = map[string]bool{ + "ACL": true, + "API": true, + "ASCII": true, + "CPU": true, + "CSS": true, + "DNS": true, + "EOF": true, + "GUID": true, + "HTML": true, + "HTTP": true, + "HTTPS": true, + "ID": true, + "IP": true, + "JSON": true, + "LHS": true, + "QPS": true, + "RAM": true, + "RHS": true, + "RPC": true, + "SLA": true, + "SMTP": true, + "SQL": true, + "SSH": true, + "TCP": true, + "TLS": true, + "TTL": true, + "UDP": true, + "UI": true, + "UID": true, + "UUID": true, + "URI": true, + "URL": true, + "UTF8": true, + "VM": true, + "XML": true, + "XMPP": true, + "XSRF": true, + "XSS": true, +} diff --git a/vendor/github.com/ettle/strcase/split.go b/vendor/github.com/ettle/strcase/split.go new file mode 100644 index 000000000..84381106b --- /dev/null +++ b/vendor/github.com/ettle/strcase/split.go @@ -0,0 +1,164 @@ +package strcase + +import "unicode" + +// SplitFn defines how to split a string into words +type SplitFn func(prev, curr, next rune) SplitAction + +// NewSplitFn returns a SplitFn based on the options provided. +// +// NewSplitFn covers the majority of common options that other strcase +// libraries provide and should allow you to simply create a custom caser. +// For more complicated use cases, feel free to write your own SplitFn +//nolint:gocyclo +func NewSplitFn( + delimiters []rune, + splitOptions ...SplitOption, +) SplitFn { + var splitCase, splitAcronym, splitBeforeNumber, splitAfterNumber, preserveNumberFormatting bool + + for _, option := range splitOptions { + switch option { + case SplitCase: + splitCase = true + case SplitAcronym: + splitAcronym = true + case SplitBeforeNumber: + splitBeforeNumber = true + case SplitAfterNumber: + splitAfterNumber = true + case PreserveNumberFormatting: + preserveNumberFormatting = true + } + } + + return func(prev, curr, next rune) SplitAction { + // The most common case will be that it's just a letter + // There are safe cases to process + if isLower(curr) && !isNumber(prev) { + return Noop + } + if isUpper(prev) && isUpper(curr) && isUpper(next) { + return Noop + } + + if preserveNumberFormatting { + if (curr == '.' || curr == ',') && + isNumber(prev) && isNumber(next) { + return Noop + } + } + + if unicode.IsSpace(curr) { + return SkipSplit + } + for _, d := range delimiters { + if curr == d { + return SkipSplit + } + } + + if splitBeforeNumber { + if isNumber(curr) && !isNumber(prev) { + if preserveNumberFormatting && (prev == '.' || prev == ',') { + return Noop + } + return Split + } + } + + if splitAfterNumber { + if isNumber(prev) && !isNumber(curr) { + return Split + } + } + + if splitCase { + if !isUpper(prev) && isUpper(curr) { + return Split + } + } + + if splitAcronym { + if isUpper(prev) && isUpper(curr) && isLower(next) { + return Split + } + } + + return Noop + } +} + +// SplitOption are options that allow for configuring NewSplitFn +type SplitOption int + +const ( + // SplitCase - FooBar -> Foo_Bar + SplitCase SplitOption = iota + // SplitAcronym - FOOBar -> Foo_Bar + // It won't preserve FOO's case. If you want, you can set the Caser's initialisms so FOO will be in all caps + SplitAcronym + // SplitBeforeNumber - port80 -> port_80 + SplitBeforeNumber + // SplitAfterNumber - 200status -> 200_status + SplitAfterNumber + // PreserveNumberFormatting - a.b.2,000.3.c -> a_b_2,000.3_c + PreserveNumberFormatting +) + +// SplitAction defines if and how to split a string +type SplitAction int + +const ( + // Noop - Continue to next character + Noop SplitAction = iota + // Split - Split between words + // e.g. to split between wordsWithoutDelimiters + Split + // SkipSplit - Split the word and drop the character + // e.g. to split words with delimiters + SkipSplit + // Skip - Remove the character completely + Skip +) + +//nolint:gocyclo +func defaultSplitFn(prev, curr, next rune) SplitAction { + // The most common case will be that it's just a letter so let lowercase letters return early since we know what they should do + if isLower(curr) { + return Noop + } + // Delimiters are _, -, ., and unicode spaces + // Handle . lower down as it needs to happen after number exceptions + if curr == '_' || curr == '-' || isSpace(curr) { + return SkipSplit + } + + if isUpper(curr) { + if isLower(prev) { + // fooBar + return Split + } else if isUpper(prev) && isLower(next) { + // FOOBar + return Split + } + } + + // Do numeric exceptions last to avoid perf penalty + if unicode.IsNumber(prev) { + // v4.3 is not split + if (curr == '.' || curr == ',') && unicode.IsNumber(next) { + return Noop + } + if !unicode.IsNumber(curr) && curr != '.' { + return Split + } + } + // While period is a default delimiter, keep it down here to avoid + // penalty for other delimiters + if curr == '.' { + return SkipSplit + } + + return Noop +} diff --git a/vendor/github.com/ettle/strcase/strcase.go b/vendor/github.com/ettle/strcase/strcase.go new file mode 100644 index 000000000..46b4f7a68 --- /dev/null +++ b/vendor/github.com/ettle/strcase/strcase.go @@ -0,0 +1,81 @@ +package strcase + +// ToSnake returns words in snake_case (lower case words with underscores). +func ToSnake(s string) string { + return convertWithoutInitialisms(s, '_', LowerCase) +} + +// ToGoSnake returns words in snake_case (lower case words with underscores). +// +// Respects Go's common initialisms (e.g. http_response -> HTTP_response). +func ToGoSnake(s string) string { + return convertWithGoInitialisms(s, '_', LowerCase) +} + +// ToSNAKE returns words in SNAKE_CASE (upper case words with underscores). +// Also known as SCREAMING_SNAKE_CASE or UPPER_CASE. +func ToSNAKE(s string) string { + return convertWithoutInitialisms(s, '_', UpperCase) +} + +// ToKebab returns words in kebab-case (lower case words with dashes). +// Also known as dash-case. +func ToKebab(s string) string { + return convertWithoutInitialisms(s, '-', LowerCase) +} + +// ToGoKebab returns words in kebab-case (lower case words with dashes). +// Also known as dash-case. +// +// Respects Go's common initialisms (e.g. http-response -> HTTP-response). +func ToGoKebab(s string) string { + return convertWithGoInitialisms(s, '-', LowerCase) +} + +// ToKEBAB returns words in KEBAB-CASE (upper case words with dashes). +// Also known as SCREAMING-KEBAB-CASE or SCREAMING-DASH-CASE. +func ToKEBAB(s string) string { + return convertWithoutInitialisms(s, '-', UpperCase) +} + +// ToPascal returns words in PascalCase (capitalized words concatenated together). +// Also known as UpperPascalCase. +func ToPascal(s string) string { + return convertWithoutInitialisms(s, 0, TitleCase) +} + +// ToGoPascal returns words in PascalCase (capitalized words concatenated together). +// Also known as UpperPascalCase. +// +// Respects Go's common initialisms (e.g. HttpResponse -> HTTPResponse). +func ToGoPascal(s string) string { + return convertWithGoInitialisms(s, 0, TitleCase) +} + +// ToCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). +// Also known as lowerCamelCase or mixedCase. +func ToCamel(s string) string { + return convertWithoutInitialisms(s, 0, CamelCase) +} + +// ToGoCamel returns words in camelCase (capitalized words concatenated together, with first word lower case). +// Also known as lowerCamelCase or mixedCase. +// +// Respects Go's common initialisms, but first word remains lowercased which is +// important for code generator use cases (e.g. toJson -> toJSON, httpResponse +// -> httpResponse). +func ToGoCamel(s string) string { + return convertWithGoInitialisms(s, 0, CamelCase) +} + +// ToCase returns words in given case and delimiter. +func ToCase(s string, wordCase WordCase, delimiter rune) string { + return convertWithoutInitialisms(s, delimiter, wordCase) +} + +// ToGoCase returns words in given case and delimiter. +// +// Respects Go's common initialisms (e.g. httpResponse -> HTTPResponse). +func ToGoCase(s string, wordCase WordCase, delimiter rune) string { + return convertWithGoInitialisms(s, delimiter, wordCase) +} diff --git a/vendor/github.com/ettle/strcase/unicode.go b/vendor/github.com/ettle/strcase/unicode.go new file mode 100644 index 000000000..b75e25a51 --- /dev/null +++ b/vendor/github.com/ettle/strcase/unicode.go @@ -0,0 +1,48 @@ +package strcase + +import "unicode" + +// Unicode functions, optimized for the common case of ascii +// No performance lost by wrapping since these functions get inlined by the compiler + +func isUpper(r rune) bool { + return unicode.IsUpper(r) +} + +func isLower(r rune) bool { + return unicode.IsLower(r) +} + +func isNumber(r rune) bool { + if r >= '0' && r <= '9' { + return true + } + return unicode.IsNumber(r) +} + +func isSpace(r rune) bool { + if r == ' ' || r == '\t' || r == '\n' || r == '\r' { + return true + } else if r < 128 { + return false + } + return unicode.IsSpace(r) +} + +func toUpper(r rune) rune { + if r >= 'a' && r <= 'z' { + return r - 32 + } else if r < 128 { + return r + } + return unicode.ToUpper(r) +} + +func toLower(r rune) rune { + if r >= 'A' && r <= 'Z' { + return r + 32 + } else if r < 128 { + return r + } + return unicode.ToLower(r) +} diff --git a/vendor/github.com/fatih/color/LICENSE.md b/vendor/github.com/fatih/color/LICENSE.md new file mode 100644 index 000000000..25fdaf639 --- /dev/null +++ b/vendor/github.com/fatih/color/LICENSE.md @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2013 Fatih Arslan + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md new file mode 100644 index 000000000..5c751f215 --- /dev/null +++ b/vendor/github.com/fatih/color/README.md @@ -0,0 +1,178 @@ +# color [![](https://github.com/fatih/color/workflows/build/badge.svg)](https://github.com/fatih/color/actions) [![PkgGoDev](https://pkg.go.dev/badge/github.com/fatih/color)](https://pkg.go.dev/github.com/fatih/color) + +Color lets you use colorized outputs in terms of [ANSI Escape +Codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) in Go (Golang). It +has support for Windows too! The API can be used in several ways, pick one that +suits you. + +![Color](https://user-images.githubusercontent.com/438920/96832689-03b3e000-13f4-11eb-9803-46f4c4de3406.jpg) + + +## Install + +```bash +go get github.com/fatih/color +``` + +## Examples + +### Standard colors + +```go +// Print with default helper functions +color.Cyan("Prints text in cyan.") + +// A newline will be appended automatically +color.Blue("Prints %s in blue.", "text") + +// These are using the default foreground colors +color.Red("We have red") +color.Magenta("And many others ..") + +``` + +### Mix and reuse colors + +```go +// Create a new color object +c := color.New(color.FgCyan).Add(color.Underline) +c.Println("Prints cyan text with an underline.") + +// Or just add them to New() +d := color.New(color.FgCyan, color.Bold) +d.Printf("This prints bold cyan %s\n", "too!.") + +// Mix up foreground and background colors, create new mixes! +red := color.New(color.FgRed) + +boldRed := red.Add(color.Bold) +boldRed.Println("This will print text in bold red.") + +whiteBackground := red.Add(color.BgWhite) +whiteBackground.Println("Red text with white background.") +``` + +### Use your own output (io.Writer) + +```go +// Use your own io.Writer output +color.New(color.FgBlue).Fprintln(myWriter, "blue color!") + +blue := color.New(color.FgBlue) +blue.Fprint(writer, "This will print text in blue.") +``` + +### Custom print functions (PrintFunc) + +```go +// Create a custom print function for convenience +red := color.New(color.FgRed).PrintfFunc() +red("Warning") +red("Error: %s", err) + +// Mix up multiple attributes +notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() +notice("Don't forget this...") +``` + +### Custom fprint functions (FprintFunc) + +```go +blue := color.New(FgBlue).FprintfFunc() +blue(myWriter, "important notice: %s", stars) + +// Mix up with multiple attributes +success := color.New(color.Bold, color.FgGreen).FprintlnFunc() +success(myWriter, "Don't forget this...") +``` + +### Insert into noncolor strings (SprintFunc) + +```go +// Create SprintXxx functions to mix strings with other non-colorized strings: +yellow := color.New(color.FgYellow).SprintFunc() +red := color.New(color.FgRed).SprintFunc() +fmt.Printf("This is a %s and this is %s.\n", yellow("warning"), red("error")) + +info := color.New(color.FgWhite, color.BgGreen).SprintFunc() +fmt.Printf("This %s rocks!\n", info("package")) + +// Use helper functions +fmt.Println("This", color.RedString("warning"), "should be not neglected.") +fmt.Printf("%v %v\n", color.GreenString("Info:"), "an important message.") + +// Windows supported too! Just don't forget to change the output to color.Output +fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) +``` + +### Plug into existing code + +```go +// Use handy standard colors +color.Set(color.FgYellow) + +fmt.Println("Existing text will now be in yellow") +fmt.Printf("This one %s\n", "too") + +color.Unset() // Don't forget to unset + +// You can mix up parameters +color.Set(color.FgMagenta, color.Bold) +defer color.Unset() // Use it in your function + +fmt.Println("All text will now be bold magenta.") +``` + +### Disable/Enable color + +There might be a case where you want to explicitly disable/enable color output. the +`go-isatty` package will automatically disable color output for non-tty output streams +(for example if the output were piped directly to `less`). + +The `color` package also disables color output if the [`NO_COLOR`](https://no-color.org) environment +variable is set (regardless of its value). + +`Color` has support to disable/enable colors programatically both globally and +for single color definitions. For example suppose you have a CLI app and a +`--no-color` bool flag. You can easily disable the color output with: + +```go +var flagNoColor = flag.Bool("no-color", false, "Disable color output") + +if *flagNoColor { + color.NoColor = true // disables colorized output +} +``` + +It also has support for single color definitions (local). You can +disable/enable color output on the fly: + +```go +c := color.New(color.FgCyan) +c.Println("Prints cyan text") + +c.DisableColor() +c.Println("This is printed without any color") + +c.EnableColor() +c.Println("This prints again cyan...") +``` + +## GitHub Actions + +To output color in GitHub Actions (or other CI systems that support ANSI colors), make sure to set `color.NoColor = false` so that it bypasses the check for non-tty output streams. + +## Todo + +* Save/Return previous values +* Evaluate fmt.Formatter interface + + +## Credits + + * [Fatih Arslan](https://github.com/fatih) + * Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable) + +## License + +The MIT License (MIT) - see [`LICENSE.md`](https://github.com/fatih/color/blob/master/LICENSE.md) for more details diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go new file mode 100644 index 000000000..98a60f3c8 --- /dev/null +++ b/vendor/github.com/fatih/color/color.go @@ -0,0 +1,618 @@ +package color + +import ( + "fmt" + "io" + "os" + "strconv" + "strings" + "sync" + + "github.com/mattn/go-colorable" + "github.com/mattn/go-isatty" +) + +var ( + // NoColor defines if the output is colorized or not. It's dynamically set to + // false or true based on the stdout's file descriptor referring to a terminal + // or not. It's also set to true if the NO_COLOR environment variable is + // set (regardless of its value). This is a global option and affects all + // colors. For more control over each color block use the methods + // DisableColor() individually. + NoColor = noColorExists() || os.Getenv("TERM") == "dumb" || + (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd())) + + // Output defines the standard output of the print functions. By default + // os.Stdout is used. + Output = colorable.NewColorableStdout() + + // Error defines a color supporting writer for os.Stderr. + Error = colorable.NewColorableStderr() + + // colorsCache is used to reduce the count of created Color objects and + // allows to reuse already created objects with required Attribute. + colorsCache = make(map[Attribute]*Color) + colorsCacheMu sync.Mutex // protects colorsCache +) + +// noColorExists returns true if the environment variable NO_COLOR exists. +func noColorExists() bool { + _, exists := os.LookupEnv("NO_COLOR") + return exists +} + +// Color defines a custom color object which is defined by SGR parameters. +type Color struct { + params []Attribute + noColor *bool +} + +// Attribute defines a single SGR Code +type Attribute int + +const escape = "\x1b" + +// Base attributes +const ( + Reset Attribute = iota + Bold + Faint + Italic + Underline + BlinkSlow + BlinkRapid + ReverseVideo + Concealed + CrossedOut +) + +// Foreground text colors +const ( + FgBlack Attribute = iota + 30 + FgRed + FgGreen + FgYellow + FgBlue + FgMagenta + FgCyan + FgWhite +) + +// Foreground Hi-Intensity text colors +const ( + FgHiBlack Attribute = iota + 90 + FgHiRed + FgHiGreen + FgHiYellow + FgHiBlue + FgHiMagenta + FgHiCyan + FgHiWhite +) + +// Background text colors +const ( + BgBlack Attribute = iota + 40 + BgRed + BgGreen + BgYellow + BgBlue + BgMagenta + BgCyan + BgWhite +) + +// Background Hi-Intensity text colors +const ( + BgHiBlack Attribute = iota + 100 + BgHiRed + BgHiGreen + BgHiYellow + BgHiBlue + BgHiMagenta + BgHiCyan + BgHiWhite +) + +// New returns a newly created color object. +func New(value ...Attribute) *Color { + c := &Color{ + params: make([]Attribute, 0), + } + + if noColorExists() { + c.noColor = boolPtr(true) + } + + c.Add(value...) + return c +} + +// Set sets the given parameters immediately. It will change the color of +// output with the given SGR parameters until color.Unset() is called. +func Set(p ...Attribute) *Color { + c := New(p...) + c.Set() + return c +} + +// Unset resets all escape attributes and clears the output. Usually should +// be called after Set(). +func Unset() { + if NoColor { + return + } + + fmt.Fprintf(Output, "%s[%dm", escape, Reset) +} + +// Set sets the SGR sequence. +func (c *Color) Set() *Color { + if c.isNoColorSet() { + return c + } + + fmt.Fprintf(Output, c.format()) + return c +} + +func (c *Color) unset() { + if c.isNoColorSet() { + return + } + + Unset() +} + +func (c *Color) setWriter(w io.Writer) *Color { + if c.isNoColorSet() { + return c + } + + fmt.Fprintf(w, c.format()) + return c +} + +func (c *Color) unsetWriter(w io.Writer) { + if c.isNoColorSet() { + return + } + + if NoColor { + return + } + + fmt.Fprintf(w, "%s[%dm", escape, Reset) +} + +// Add is used to chain SGR parameters. Use as many as parameters to combine +// and create custom color objects. Example: Add(color.FgRed, color.Underline). +func (c *Color) Add(value ...Attribute) *Color { + c.params = append(c.params, value...) + return c +} + +func (c *Color) prepend(value Attribute) { + c.params = append(c.params, 0) + copy(c.params[1:], c.params[0:]) + c.params[0] = value +} + +// Fprint formats using the default formats for its operands and writes to w. +// Spaces are added between operands when neither is a string. +// It returns the number of bytes written and any write error encountered. +// On Windows, users should wrap w with colorable.NewColorable() if w is of +// type *os.File. +func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) { + c.setWriter(w) + defer c.unsetWriter(w) + + return fmt.Fprint(w, a...) +} + +// Print formats using the default formats for its operands and writes to +// standard output. Spaces are added between operands when neither is a +// string. It returns the number of bytes written and any write error +// encountered. This is the standard fmt.Print() method wrapped with the given +// color. +func (c *Color) Print(a ...interface{}) (n int, err error) { + c.Set() + defer c.unset() + + return fmt.Fprint(Output, a...) +} + +// Fprintf formats according to a format specifier and writes to w. +// It returns the number of bytes written and any write error encountered. +// On Windows, users should wrap w with colorable.NewColorable() if w is of +// type *os.File. +func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) { + c.setWriter(w) + defer c.unsetWriter(w) + + return fmt.Fprintf(w, format, a...) +} + +// Printf formats according to a format specifier and writes to standard output. +// It returns the number of bytes written and any write error encountered. +// This is the standard fmt.Printf() method wrapped with the given color. +func (c *Color) Printf(format string, a ...interface{}) (n int, err error) { + c.Set() + defer c.unset() + + return fmt.Fprintf(Output, format, a...) +} + +// Fprintln formats using the default formats for its operands and writes to w. +// Spaces are always added between operands and a newline is appended. +// On Windows, users should wrap w with colorable.NewColorable() if w is of +// type *os.File. +func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) { + c.setWriter(w) + defer c.unsetWriter(w) + + return fmt.Fprintln(w, a...) +} + +// Println formats using the default formats for its operands and writes to +// standard output. Spaces are always added between operands and a newline is +// appended. It returns the number of bytes written and any write error +// encountered. This is the standard fmt.Print() method wrapped with the given +// color. +func (c *Color) Println(a ...interface{}) (n int, err error) { + c.Set() + defer c.unset() + + return fmt.Fprintln(Output, a...) +} + +// Sprint is just like Print, but returns a string instead of printing it. +func (c *Color) Sprint(a ...interface{}) string { + return c.wrap(fmt.Sprint(a...)) +} + +// Sprintln is just like Println, but returns a string instead of printing it. +func (c *Color) Sprintln(a ...interface{}) string { + return c.wrap(fmt.Sprintln(a...)) +} + +// Sprintf is just like Printf, but returns a string instead of printing it. +func (c *Color) Sprintf(format string, a ...interface{}) string { + return c.wrap(fmt.Sprintf(format, a...)) +} + +// FprintFunc returns a new function that prints the passed arguments as +// colorized with color.Fprint(). +func (c *Color) FprintFunc() func(w io.Writer, a ...interface{}) { + return func(w io.Writer, a ...interface{}) { + c.Fprint(w, a...) + } +} + +// PrintFunc returns a new function that prints the passed arguments as +// colorized with color.Print(). +func (c *Color) PrintFunc() func(a ...interface{}) { + return func(a ...interface{}) { + c.Print(a...) + } +} + +// FprintfFunc returns a new function that prints the passed arguments as +// colorized with color.Fprintf(). +func (c *Color) FprintfFunc() func(w io.Writer, format string, a ...interface{}) { + return func(w io.Writer, format string, a ...interface{}) { + c.Fprintf(w, format, a...) + } +} + +// PrintfFunc returns a new function that prints the passed arguments as +// colorized with color.Printf(). +func (c *Color) PrintfFunc() func(format string, a ...interface{}) { + return func(format string, a ...interface{}) { + c.Printf(format, a...) + } +} + +// FprintlnFunc returns a new function that prints the passed arguments as +// colorized with color.Fprintln(). +func (c *Color) FprintlnFunc() func(w io.Writer, a ...interface{}) { + return func(w io.Writer, a ...interface{}) { + c.Fprintln(w, a...) + } +} + +// PrintlnFunc returns a new function that prints the passed arguments as +// colorized with color.Println(). +func (c *Color) PrintlnFunc() func(a ...interface{}) { + return func(a ...interface{}) { + c.Println(a...) + } +} + +// SprintFunc returns a new function that returns colorized strings for the +// given arguments with fmt.Sprint(). Useful to put into or mix into other +// string. Windows users should use this in conjunction with color.Output, example: +// +// put := New(FgYellow).SprintFunc() +// fmt.Fprintf(color.Output, "This is a %s", put("warning")) +func (c *Color) SprintFunc() func(a ...interface{}) string { + return func(a ...interface{}) string { + return c.wrap(fmt.Sprint(a...)) + } +} + +// SprintfFunc returns a new function that returns colorized strings for the +// given arguments with fmt.Sprintf(). Useful to put into or mix into other +// string. Windows users should use this in conjunction with color.Output. +func (c *Color) SprintfFunc() func(format string, a ...interface{}) string { + return func(format string, a ...interface{}) string { + return c.wrap(fmt.Sprintf(format, a...)) + } +} + +// SprintlnFunc returns a new function that returns colorized strings for the +// given arguments with fmt.Sprintln(). Useful to put into or mix into other +// string. Windows users should use this in conjunction with color.Output. +func (c *Color) SprintlnFunc() func(a ...interface{}) string { + return func(a ...interface{}) string { + return c.wrap(fmt.Sprintln(a...)) + } +} + +// sequence returns a formatted SGR sequence to be plugged into a "\x1b[...m" +// an example output might be: "1;36" -> bold cyan +func (c *Color) sequence() string { + format := make([]string, len(c.params)) + for i, v := range c.params { + format[i] = strconv.Itoa(int(v)) + } + + return strings.Join(format, ";") +} + +// wrap wraps the s string with the colors attributes. The string is ready to +// be printed. +func (c *Color) wrap(s string) string { + if c.isNoColorSet() { + return s + } + + return c.format() + s + c.unformat() +} + +func (c *Color) format() string { + return fmt.Sprintf("%s[%sm", escape, c.sequence()) +} + +func (c *Color) unformat() string { + return fmt.Sprintf("%s[%dm", escape, Reset) +} + +// DisableColor disables the color output. Useful to not change any existing +// code and still being able to output. Can be used for flags like +// "--no-color". To enable back use EnableColor() method. +func (c *Color) DisableColor() { + c.noColor = boolPtr(true) +} + +// EnableColor enables the color output. Use it in conjunction with +// DisableColor(). Otherwise this method has no side effects. +func (c *Color) EnableColor() { + c.noColor = boolPtr(false) +} + +func (c *Color) isNoColorSet() bool { + // check first if we have user set action + if c.noColor != nil { + return *c.noColor + } + + // if not return the global option, which is disabled by default + return NoColor +} + +// Equals returns a boolean value indicating whether two colors are equal. +func (c *Color) Equals(c2 *Color) bool { + if len(c.params) != len(c2.params) { + return false + } + + for _, attr := range c.params { + if !c2.attrExists(attr) { + return false + } + } + + return true +} + +func (c *Color) attrExists(a Attribute) bool { + for _, attr := range c.params { + if attr == a { + return true + } + } + + return false +} + +func boolPtr(v bool) *bool { + return &v +} + +func getCachedColor(p Attribute) *Color { + colorsCacheMu.Lock() + defer colorsCacheMu.Unlock() + + c, ok := colorsCache[p] + if !ok { + c = New(p) + colorsCache[p] = c + } + + return c +} + +func colorPrint(format string, p Attribute, a ...interface{}) { + c := getCachedColor(p) + + if !strings.HasSuffix(format, "\n") { + format += "\n" + } + + if len(a) == 0 { + c.Print(format) + } else { + c.Printf(format, a...) + } +} + +func colorString(format string, p Attribute, a ...interface{}) string { + c := getCachedColor(p) + + if len(a) == 0 { + return c.SprintFunc()(format) + } + + return c.SprintfFunc()(format, a...) +} + +// Black is a convenient helper function to print with black foreground. A +// newline is appended to format by default. +func Black(format string, a ...interface{}) { colorPrint(format, FgBlack, a...) } + +// Red is a convenient helper function to print with red foreground. A +// newline is appended to format by default. +func Red(format string, a ...interface{}) { colorPrint(format, FgRed, a...) } + +// Green is a convenient helper function to print with green foreground. A +// newline is appended to format by default. +func Green(format string, a ...interface{}) { colorPrint(format, FgGreen, a...) } + +// Yellow is a convenient helper function to print with yellow foreground. +// A newline is appended to format by default. +func Yellow(format string, a ...interface{}) { colorPrint(format, FgYellow, a...) } + +// Blue is a convenient helper function to print with blue foreground. A +// newline is appended to format by default. +func Blue(format string, a ...interface{}) { colorPrint(format, FgBlue, a...) } + +// Magenta is a convenient helper function to print with magenta foreground. +// A newline is appended to format by default. +func Magenta(format string, a ...interface{}) { colorPrint(format, FgMagenta, a...) } + +// Cyan is a convenient helper function to print with cyan foreground. A +// newline is appended to format by default. +func Cyan(format string, a ...interface{}) { colorPrint(format, FgCyan, a...) } + +// White is a convenient helper function to print with white foreground. A +// newline is appended to format by default. +func White(format string, a ...interface{}) { colorPrint(format, FgWhite, a...) } + +// BlackString is a convenient helper function to return a string with black +// foreground. +func BlackString(format string, a ...interface{}) string { return colorString(format, FgBlack, a...) } + +// RedString is a convenient helper function to return a string with red +// foreground. +func RedString(format string, a ...interface{}) string { return colorString(format, FgRed, a...) } + +// GreenString is a convenient helper function to return a string with green +// foreground. +func GreenString(format string, a ...interface{}) string { return colorString(format, FgGreen, a...) } + +// YellowString is a convenient helper function to return a string with yellow +// foreground. +func YellowString(format string, a ...interface{}) string { return colorString(format, FgYellow, a...) } + +// BlueString is a convenient helper function to return a string with blue +// foreground. +func BlueString(format string, a ...interface{}) string { return colorString(format, FgBlue, a...) } + +// MagentaString is a convenient helper function to return a string with magenta +// foreground. +func MagentaString(format string, a ...interface{}) string { + return colorString(format, FgMagenta, a...) +} + +// CyanString is a convenient helper function to return a string with cyan +// foreground. +func CyanString(format string, a ...interface{}) string { return colorString(format, FgCyan, a...) } + +// WhiteString is a convenient helper function to return a string with white +// foreground. +func WhiteString(format string, a ...interface{}) string { return colorString(format, FgWhite, a...) } + +// HiBlack is a convenient helper function to print with hi-intensity black foreground. A +// newline is appended to format by default. +func HiBlack(format string, a ...interface{}) { colorPrint(format, FgHiBlack, a...) } + +// HiRed is a convenient helper function to print with hi-intensity red foreground. A +// newline is appended to format by default. +func HiRed(format string, a ...interface{}) { colorPrint(format, FgHiRed, a...) } + +// HiGreen is a convenient helper function to print with hi-intensity green foreground. A +// newline is appended to format by default. +func HiGreen(format string, a ...interface{}) { colorPrint(format, FgHiGreen, a...) } + +// HiYellow is a convenient helper function to print with hi-intensity yellow foreground. +// A newline is appended to format by default. +func HiYellow(format string, a ...interface{}) { colorPrint(format, FgHiYellow, a...) } + +// HiBlue is a convenient helper function to print with hi-intensity blue foreground. A +// newline is appended to format by default. +func HiBlue(format string, a ...interface{}) { colorPrint(format, FgHiBlue, a...) } + +// HiMagenta is a convenient helper function to print with hi-intensity magenta foreground. +// A newline is appended to format by default. +func HiMagenta(format string, a ...interface{}) { colorPrint(format, FgHiMagenta, a...) } + +// HiCyan is a convenient helper function to print with hi-intensity cyan foreground. A +// newline is appended to format by default. +func HiCyan(format string, a ...interface{}) { colorPrint(format, FgHiCyan, a...) } + +// HiWhite is a convenient helper function to print with hi-intensity white foreground. A +// newline is appended to format by default. +func HiWhite(format string, a ...interface{}) { colorPrint(format, FgHiWhite, a...) } + +// HiBlackString is a convenient helper function to return a string with hi-intensity black +// foreground. +func HiBlackString(format string, a ...interface{}) string { + return colorString(format, FgHiBlack, a...) +} + +// HiRedString is a convenient helper function to return a string with hi-intensity red +// foreground. +func HiRedString(format string, a ...interface{}) string { return colorString(format, FgHiRed, a...) } + +// HiGreenString is a convenient helper function to return a string with hi-intensity green +// foreground. +func HiGreenString(format string, a ...interface{}) string { + return colorString(format, FgHiGreen, a...) +} + +// HiYellowString is a convenient helper function to return a string with hi-intensity yellow +// foreground. +func HiYellowString(format string, a ...interface{}) string { + return colorString(format, FgHiYellow, a...) +} + +// HiBlueString is a convenient helper function to return a string with hi-intensity blue +// foreground. +func HiBlueString(format string, a ...interface{}) string { return colorString(format, FgHiBlue, a...) } + +// HiMagentaString is a convenient helper function to return a string with hi-intensity magenta +// foreground. +func HiMagentaString(format string, a ...interface{}) string { + return colorString(format, FgHiMagenta, a...) +} + +// HiCyanString is a convenient helper function to return a string with hi-intensity cyan +// foreground. +func HiCyanString(format string, a ...interface{}) string { return colorString(format, FgHiCyan, a...) } + +// HiWhiteString is a convenient helper function to return a string with hi-intensity white +// foreground. +func HiWhiteString(format string, a ...interface{}) string { + return colorString(format, FgHiWhite, a...) +} diff --git a/vendor/github.com/fatih/color/doc.go b/vendor/github.com/fatih/color/doc.go new file mode 100644 index 000000000..04541de78 --- /dev/null +++ b/vendor/github.com/fatih/color/doc.go @@ -0,0 +1,135 @@ +/* +Package color is an ANSI color package to output colorized or SGR defined +output to the standard output. The API can be used in several way, pick one +that suits you. + +Use simple and default helper functions with predefined foreground colors: + + color.Cyan("Prints text in cyan.") + + // a newline will be appended automatically + color.Blue("Prints %s in blue.", "text") + + // More default foreground colors.. + color.Red("We have red") + color.Yellow("Yellow color too!") + color.Magenta("And many others ..") + + // Hi-intensity colors + color.HiGreen("Bright green color.") + color.HiBlack("Bright black means gray..") + color.HiWhite("Shiny white color!") + +However there are times where custom color mixes are required. Below are some +examples to create custom color objects and use the print functions of each +separate color object. + + // Create a new color object + c := color.New(color.FgCyan).Add(color.Underline) + c.Println("Prints cyan text with an underline.") + + // Or just add them to New() + d := color.New(color.FgCyan, color.Bold) + d.Printf("This prints bold cyan %s\n", "too!.") + + + // Mix up foreground and background colors, create new mixes! + red := color.New(color.FgRed) + + boldRed := red.Add(color.Bold) + boldRed.Println("This will print text in bold red.") + + whiteBackground := red.Add(color.BgWhite) + whiteBackground.Println("Red text with White background.") + + // Use your own io.Writer output + color.New(color.FgBlue).Fprintln(myWriter, "blue color!") + + blue := color.New(color.FgBlue) + blue.Fprint(myWriter, "This will print text in blue.") + +You can create PrintXxx functions to simplify even more: + + // Create a custom print function for convenient + red := color.New(color.FgRed).PrintfFunc() + red("warning") + red("error: %s", err) + + // Mix up multiple attributes + notice := color.New(color.Bold, color.FgGreen).PrintlnFunc() + notice("don't forget this...") + +You can also FprintXxx functions to pass your own io.Writer: + + blue := color.New(FgBlue).FprintfFunc() + blue(myWriter, "important notice: %s", stars) + + // Mix up with multiple attributes + success := color.New(color.Bold, color.FgGreen).FprintlnFunc() + success(myWriter, don't forget this...") + + +Or create SprintXxx functions to mix strings with other non-colorized strings: + + yellow := New(FgYellow).SprintFunc() + red := New(FgRed).SprintFunc() + + fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error")) + + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Printf("this %s rocks!\n", info("package")) + +Windows support is enabled by default. All Print functions work as intended. +However only for color.SprintXXX functions, user should use fmt.FprintXXX and +set the output to color.Output: + + fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS")) + + info := New(FgWhite, BgGreen).SprintFunc() + fmt.Fprintf(color.Output, "this %s rocks!\n", info("package")) + +Using with existing code is possible. Just use the Set() method to set the +standard output to the given parameters. That way a rewrite of an existing +code is not required. + + // Use handy standard colors. + color.Set(color.FgYellow) + + fmt.Println("Existing text will be now in Yellow") + fmt.Printf("This one %s\n", "too") + + color.Unset() // don't forget to unset + + // You can mix up parameters + color.Set(color.FgMagenta, color.Bold) + defer color.Unset() // use it in your function + + fmt.Println("All text will be now bold magenta.") + +There might be a case where you want to disable color output (for example to +pipe the standard output of your app to somewhere else). `Color` has support to +disable colors both globally and for single color definition. For example +suppose you have a CLI app and a `--no-color` bool flag. You can easily disable +the color output with: + + var flagNoColor = flag.Bool("no-color", false, "Disable color output") + + if *flagNoColor { + color.NoColor = true // disables colorized output + } + +You can also disable the color by setting the NO_COLOR environment variable to any value. + +It also has support for single color definitions (local). You can +disable/enable color output on the fly: + + c := color.New(color.FgCyan) + c.Println("Prints cyan text") + + c.DisableColor() + c.Println("This is printed without any color") + + c.EnableColor() + c.Println("This prints again cyan...") +*/ +package color diff --git a/vendor/github.com/fatih/color/go.mod b/vendor/github.com/fatih/color/go.mod new file mode 100644 index 000000000..78872815e --- /dev/null +++ b/vendor/github.com/fatih/color/go.mod @@ -0,0 +1,8 @@ +module github.com/fatih/color + +go 1.13 + +require ( + github.com/mattn/go-colorable v0.1.8 + github.com/mattn/go-isatty v0.0.12 +) diff --git a/vendor/github.com/fatih/color/go.sum b/vendor/github.com/fatih/color/go.sum new file mode 100644 index 000000000..54f7c46e8 --- /dev/null +++ b/vendor/github.com/fatih/color/go.sum @@ -0,0 +1,7 @@ +github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae h1:/WDfKMnPU+m5M4xB+6x4kaepxRw6jWvR5iDRdvjHgy8= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/vendor/github.com/fatih/structtag/LICENSE b/vendor/github.com/fatih/structtag/LICENSE new file mode 100644 index 000000000..4fd15f9f8 --- /dev/null +++ b/vendor/github.com/fatih/structtag/LICENSE @@ -0,0 +1,60 @@ +Copyright (c) 2017, Fatih Arslan +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of structtag nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +This software includes some portions from Go. Go is used under the terms of the +BSD like license. + +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +The Go gopher was designed by Renee French. http://reneefrench.blogspot.com/ The design is licensed under the Creative Commons 3.0 Attributions license. Read this article for more details: https://blog.golang.org/gopher diff --git a/vendor/github.com/fatih/structtag/README.md b/vendor/github.com/fatih/structtag/README.md new file mode 100644 index 000000000..c4e8b1e86 --- /dev/null +++ b/vendor/github.com/fatih/structtag/README.md @@ -0,0 +1,73 @@ +# structtag [![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/fatih/structtag) + +structtag provides an easy way of parsing and manipulating struct tag fields. +Please vendor the library as it might change in future versions. + +# Install + +```bash +go get github.com/fatih/structtag +``` + +# Example + +```go +package main + +import ( + "fmt" + "reflect" + "sort" + + "github.com/fatih/structtag" +) + +func main() { + type t struct { + t string `json:"foo,omitempty,string" xml:"foo"` + } + + // get field tag + tag := reflect.TypeOf(t{}).Field(0).Tag + + // ... and start using structtag by parsing the tag + tags, err := structtag.Parse(string(tag)) + if err != nil { + panic(err) + } + + // iterate over all tags + for _, t := range tags.Tags() { + fmt.Printf("tag: %+v\n", t) + } + + // get a single tag + jsonTag, err := tags.Get("json") + if err != nil { + panic(err) + } + fmt.Println(jsonTag) // Output: json:"foo,omitempty,string" + fmt.Println(jsonTag.Key) // Output: json + fmt.Println(jsonTag.Name) // Output: foo + fmt.Println(jsonTag.Options) // Output: [omitempty string] + + // change existing tag + jsonTag.Name = "foo_bar" + jsonTag.Options = nil + tags.Set(jsonTag) + + // add new tag + tags.Set(&structtag.Tag{ + Key: "hcl", + Name: "foo", + Options: []string{"squash"}, + }) + + // print the tags + fmt.Println(tags) // Output: json:"foo_bar" xml:"foo" hcl:"foo,squash" + + // sort tags according to keys + sort.Sort(tags) + fmt.Println(tags) // Output: hcl:"foo,squash" json:"foo_bar" xml:"foo" +} +``` diff --git a/vendor/github.com/fatih/structtag/go.mod b/vendor/github.com/fatih/structtag/go.mod new file mode 100644 index 000000000..660d6a1f1 --- /dev/null +++ b/vendor/github.com/fatih/structtag/go.mod @@ -0,0 +1,3 @@ +module github.com/fatih/structtag + +go 1.12 diff --git a/vendor/github.com/fatih/structtag/tags.go b/vendor/github.com/fatih/structtag/tags.go new file mode 100644 index 000000000..c168fb21c --- /dev/null +++ b/vendor/github.com/fatih/structtag/tags.go @@ -0,0 +1,315 @@ +package structtag + +import ( + "bytes" + "errors" + "fmt" + "strconv" + "strings" +) + +var ( + errTagSyntax = errors.New("bad syntax for struct tag pair") + errTagKeySyntax = errors.New("bad syntax for struct tag key") + errTagValueSyntax = errors.New("bad syntax for struct tag value") + + errKeyNotSet = errors.New("tag key does not exist") + errTagNotExist = errors.New("tag does not exist") + errTagKeyMismatch = errors.New("mismatch between key and tag.key") +) + +// Tags represent a set of tags from a single struct field +type Tags struct { + tags []*Tag +} + +// Tag defines a single struct's string literal tag +type Tag struct { + // Key is the tag key, such as json, xml, etc.. + // i.e: `json:"foo,omitempty". Here key is: "json" + Key string + + // Name is a part of the value + // i.e: `json:"foo,omitempty". Here name is: "foo" + Name string + + // Options is a part of the value. It contains a slice of tag options i.e: + // `json:"foo,omitempty". Here options is: ["omitempty"] + Options []string +} + +// Parse parses a single struct field tag and returns the set of tags. +func Parse(tag string) (*Tags, error) { + var tags []*Tag + + hasTag := tag != "" + + // NOTE(arslan) following code is from reflect and vet package with some + // modifications to collect all necessary information and extend it with + // usable methods + for tag != "" { + // Skip leading space. + i := 0 + for i < len(tag) && tag[i] == ' ' { + i++ + } + tag = tag[i:] + if tag == "" { + break + } + + // Scan to colon. A space, a quote or a control character is a syntax + // error. Strictly speaking, control chars include the range [0x7f, + // 0x9f], not just [0x00, 0x1f], but in practice, we ignore the + // multi-byte control characters as it is simpler to inspect the tag's + // bytes than the tag's runes. + i = 0 + for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f { + i++ + } + + if i == 0 { + return nil, errTagKeySyntax + } + if i+1 >= len(tag) || tag[i] != ':' { + return nil, errTagSyntax + } + if tag[i+1] != '"' { + return nil, errTagValueSyntax + } + + key := string(tag[:i]) + tag = tag[i+1:] + + // Scan quoted string to find value. + i = 1 + for i < len(tag) && tag[i] != '"' { + if tag[i] == '\\' { + i++ + } + i++ + } + if i >= len(tag) { + return nil, errTagValueSyntax + } + + qvalue := string(tag[:i+1]) + tag = tag[i+1:] + + value, err := strconv.Unquote(qvalue) + if err != nil { + return nil, errTagValueSyntax + } + + res := strings.Split(value, ",") + name := res[0] + options := res[1:] + if len(options) == 0 { + options = nil + } + + tags = append(tags, &Tag{ + Key: key, + Name: name, + Options: options, + }) + } + + if hasTag && len(tags) == 0 { + return nil, nil + } + + return &Tags{ + tags: tags, + }, nil +} + +// Get returns the tag associated with the given key. If the key is present +// in the tag the value (which may be empty) is returned. Otherwise the +// returned value will be the empty string. The ok return value reports whether +// the tag exists or not (which the return value is nil). +func (t *Tags) Get(key string) (*Tag, error) { + for _, tag := range t.tags { + if tag.Key == key { + return tag, nil + } + } + + return nil, errTagNotExist +} + +// Set sets the given tag. If the tag key already exists it'll override it +func (t *Tags) Set(tag *Tag) error { + if tag.Key == "" { + return errKeyNotSet + } + + added := false + for i, tg := range t.tags { + if tg.Key == tag.Key { + added = true + t.tags[i] = tag + } + } + + if !added { + // this means this is a new tag, add it + t.tags = append(t.tags, tag) + } + + return nil +} + +// AddOptions adds the given option for the given key. If the option already +// exists it doesn't add it again. +func (t *Tags) AddOptions(key string, options ...string) { + for i, tag := range t.tags { + if tag.Key != key { + continue + } + + for _, opt := range options { + if !tag.HasOption(opt) { + tag.Options = append(tag.Options, opt) + } + } + + t.tags[i] = tag + } +} + +// DeleteOptions deletes the given options for the given key +func (t *Tags) DeleteOptions(key string, options ...string) { + hasOption := func(option string) bool { + for _, opt := range options { + if opt == option { + return true + } + } + return false + } + + for i, tag := range t.tags { + if tag.Key != key { + continue + } + + var updated []string + for _, opt := range tag.Options { + if !hasOption(opt) { + updated = append(updated, opt) + } + } + + tag.Options = updated + t.tags[i] = tag + } +} + +// Delete deletes the tag for the given keys +func (t *Tags) Delete(keys ...string) { + hasKey := func(key string) bool { + for _, k := range keys { + if k == key { + return true + } + } + return false + } + + var updated []*Tag + for _, tag := range t.tags { + if !hasKey(tag.Key) { + updated = append(updated, tag) + } + } + + t.tags = updated +} + +// Tags returns a slice of tags. The order is the original tag order unless it +// was changed. +func (t *Tags) Tags() []*Tag { + return t.tags +} + +// Tags returns a slice of tags. The order is the original tag order unless it +// was changed. +func (t *Tags) Keys() []string { + var keys []string + for _, tag := range t.tags { + keys = append(keys, tag.Key) + } + return keys +} + +// String reassembles the tags into a valid literal tag field representation +func (t *Tags) String() string { + tags := t.Tags() + if len(tags) == 0 { + return "" + } + + var buf bytes.Buffer + for i, tag := range t.Tags() { + buf.WriteString(tag.String()) + if i != len(tags)-1 { + buf.WriteString(" ") + } + } + return buf.String() +} + +// HasOption returns true if the given option is available in options +func (t *Tag) HasOption(opt string) bool { + for _, tagOpt := range t.Options { + if tagOpt == opt { + return true + } + } + + return false +} + +// Value returns the raw value of the tag, i.e. if the tag is +// `json:"foo,omitempty", the Value is "foo,omitempty" +func (t *Tag) Value() string { + options := strings.Join(t.Options, ",") + if options != "" { + return fmt.Sprintf(`%s,%s`, t.Name, options) + } + return t.Name +} + +// String reassembles the tag into a valid tag field representation +func (t *Tag) String() string { + return fmt.Sprintf(`%s:%q`, t.Key, t.Value()) +} + +// GoString implements the fmt.GoStringer interface +func (t *Tag) GoString() string { + template := `{ + Key: '%s', + Name: '%s', + Option: '%s', + }` + + if t.Options == nil { + return fmt.Sprintf(template, t.Key, t.Name, "nil") + } + + options := strings.Join(t.Options, ",") + return fmt.Sprintf(template, t.Key, t.Name, options) +} + +func (t *Tags) Len() int { + return len(t.tags) +} + +func (t *Tags) Less(i int, j int) bool { + return t.tags[i].Key < t.tags[j].Key +} + +func (t *Tags) Swap(i int, j int) { + t.tags[i], t.tags[j] = t.tags[j], t.tags[i] +} diff --git a/vendor/github.com/fsnotify/fsnotify/.editorconfig b/vendor/github.com/fsnotify/fsnotify/.editorconfig new file mode 100644 index 000000000..fad895851 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*.go] +indent_style = tab +indent_size = 4 +insert_final_newline = true + +[*.{yml,yaml}] +indent_style = space +indent_size = 2 +insert_final_newline = true +trim_trailing_whitespace = true diff --git a/vendor/github.com/fsnotify/fsnotify/.gitattributes b/vendor/github.com/fsnotify/fsnotify/.gitattributes new file mode 100644 index 000000000..32f1001be --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/.gitattributes @@ -0,0 +1 @@ +go.sum linguist-generated diff --git a/vendor/github.com/fsnotify/fsnotify/.gitignore b/vendor/github.com/fsnotify/fsnotify/.gitignore new file mode 100644 index 000000000..4cd0cbaf4 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/.gitignore @@ -0,0 +1,6 @@ +# Setup a Global .gitignore for OS and editor generated files: +# https://help.github.com/articles/ignoring-files +# git config --global core.excludesfile ~/.gitignore_global + +.vagrant +*.sublime-project diff --git a/vendor/github.com/fsnotify/fsnotify/.travis.yml b/vendor/github.com/fsnotify/fsnotify/.travis.yml new file mode 100644 index 000000000..a9c30165c --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/.travis.yml @@ -0,0 +1,36 @@ +sudo: false +language: go + +go: + - "stable" + - "1.11.x" + - "1.10.x" + - "1.9.x" + +matrix: + include: + - go: "stable" + env: GOLINT=true + allow_failures: + - go: tip + fast_finish: true + + +before_install: + - if [ ! -z "${GOLINT}" ]; then go get -u golang.org/x/lint/golint; fi + +script: + - go test --race ./... + +after_script: + - test -z "$(gofmt -s -l -w . | tee /dev/stderr)" + - if [ ! -z "${GOLINT}" ]; then echo running golint; golint --set_exit_status ./...; else echo skipping golint; fi + - go vet ./... + +os: + - linux + - osx + - windows + +notifications: + email: false diff --git a/vendor/github.com/fsnotify/fsnotify/AUTHORS b/vendor/github.com/fsnotify/fsnotify/AUTHORS new file mode 100644 index 000000000..5ab5d41c5 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/AUTHORS @@ -0,0 +1,52 @@ +# Names should be added to this file as +# Name or Organization +# The email address is not required for organizations. + +# You can update this list using the following command: +# +# $ git shortlog -se | awk '{print $2 " " $3 " " $4}' + +# Please keep the list sorted. + +Aaron L +Adrien Bustany +Amit Krishnan +Anmol Sethi +Bjørn Erik Pedersen +Bruno Bigras +Caleb Spare +Case Nelson +Chris Howey +Christoffer Buchholz +Daniel Wagner-Hall +Dave Cheney +Evan Phoenix +Francisco Souza +Hari haran +John C Barstow +Kelvin Fo +Ken-ichirou MATSUZAWA +Matt Layher +Nathan Youngman +Nickolai Zeldovich +Patrick +Paul Hammond +Pawel Knap +Pieter Droogendijk +Pursuit92 +Riku Voipio +Rob Figueiredo +Rodrigo Chiossi +Slawek Ligus +Soge Zhang +Tiffany Jernigan +Tilak Sharma +Tom Payne +Travis Cline +Tudor Golubenco +Vahe Khachikyan +Yukang +bronze1man +debrando +henrikedwards +铁哥 diff --git a/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md new file mode 100644 index 000000000..be4d7ea2c --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/CHANGELOG.md @@ -0,0 +1,317 @@ +# Changelog + +## v1.4.7 / 2018-01-09 + +* BSD/macOS: Fix possible deadlock on closing the watcher on kqueue (thanks @nhooyr and @glycerine) +* Tests: Fix missing verb on format string (thanks @rchiossi) +* Linux: Fix deadlock in Remove (thanks @aarondl) +* Linux: Watch.Add improvements (avoid race, fix consistency, reduce garbage) (thanks @twpayne) +* Docs: Moved FAQ into the README (thanks @vahe) +* Linux: Properly handle inotify's IN_Q_OVERFLOW event (thanks @zeldovich) +* Docs: replace references to OS X with macOS + +## v1.4.2 / 2016-10-10 + +* Linux: use InotifyInit1 with IN_CLOEXEC to stop leaking a file descriptor to a child process when using fork/exec [#178](https://github.com/fsnotify/fsnotify/pull/178) (thanks @pattyshack) + +## v1.4.1 / 2016-10-04 + +* Fix flaky inotify stress test on Linux [#177](https://github.com/fsnotify/fsnotify/pull/177) (thanks @pattyshack) + +## v1.4.0 / 2016-10-01 + +* add a String() method to Event.Op [#165](https://github.com/fsnotify/fsnotify/pull/165) (thanks @oozie) + +## v1.3.1 / 2016-06-28 + +* Windows: fix for double backslash when watching the root of a drive [#151](https://github.com/fsnotify/fsnotify/issues/151) (thanks @brunoqc) + +## v1.3.0 / 2016-04-19 + +* Support linux/arm64 by [patching](https://go-review.googlesource.com/#/c/21971/) x/sys/unix and switching to to it from syscall (thanks @suihkulokki) [#135](https://github.com/fsnotify/fsnotify/pull/135) + +## v1.2.10 / 2016-03-02 + +* Fix golint errors in windows.go [#121](https://github.com/fsnotify/fsnotify/pull/121) (thanks @tiffanyfj) + +## v1.2.9 / 2016-01-13 + +kqueue: Fix logic for CREATE after REMOVE [#111](https://github.com/fsnotify/fsnotify/pull/111) (thanks @bep) + +## v1.2.8 / 2015-12-17 + +* kqueue: fix race condition in Close [#105](https://github.com/fsnotify/fsnotify/pull/105) (thanks @djui for reporting the issue and @ppknap for writing a failing test) +* inotify: fix race in test +* enable race detection for continuous integration (Linux, Mac, Windows) + +## v1.2.5 / 2015-10-17 + +* inotify: use epoll_create1 for arm64 support (requires Linux 2.6.27 or later) [#100](https://github.com/fsnotify/fsnotify/pull/100) (thanks @suihkulokki) +* inotify: fix path leaks [#73](https://github.com/fsnotify/fsnotify/pull/73) (thanks @chamaken) +* kqueue: watch for rename events on subdirectories [#83](https://github.com/fsnotify/fsnotify/pull/83) (thanks @guotie) +* kqueue: avoid infinite loops from symlinks cycles [#101](https://github.com/fsnotify/fsnotify/pull/101) (thanks @illicitonion) + +## v1.2.1 / 2015-10-14 + +* kqueue: don't watch named pipes [#98](https://github.com/fsnotify/fsnotify/pull/98) (thanks @evanphx) + +## v1.2.0 / 2015-02-08 + +* inotify: use epoll to wake up readEvents [#66](https://github.com/fsnotify/fsnotify/pull/66) (thanks @PieterD) +* inotify: closing watcher should now always shut down goroutine [#63](https://github.com/fsnotify/fsnotify/pull/63) (thanks @PieterD) +* kqueue: close kqueue after removing watches, fixes [#59](https://github.com/fsnotify/fsnotify/issues/59) + +## v1.1.1 / 2015-02-05 + +* inotify: Retry read on EINTR [#61](https://github.com/fsnotify/fsnotify/issues/61) (thanks @PieterD) + +## v1.1.0 / 2014-12-12 + +* kqueue: rework internals [#43](https://github.com/fsnotify/fsnotify/pull/43) + * add low-level functions + * only need to store flags on directories + * less mutexes [#13](https://github.com/fsnotify/fsnotify/issues/13) + * done can be an unbuffered channel + * remove calls to os.NewSyscallError +* More efficient string concatenation for Event.String() [#52](https://github.com/fsnotify/fsnotify/pull/52) (thanks @mdlayher) +* kqueue: fix regression in rework causing subdirectories to be watched [#48](https://github.com/fsnotify/fsnotify/issues/48) +* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51) + +## v1.0.4 / 2014-09-07 + +* kqueue: add dragonfly to the build tags. +* Rename source code files, rearrange code so exported APIs are at the top. +* Add done channel to example code. [#37](https://github.com/fsnotify/fsnotify/pull/37) (thanks @chenyukang) + +## v1.0.3 / 2014-08-19 + +* [Fix] Windows MOVED_TO now translates to Create like on BSD and Linux. [#36](https://github.com/fsnotify/fsnotify/issues/36) + +## v1.0.2 / 2014-08-17 + +* [Fix] Missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso) +* [Fix] Make ./path and path equivalent. (thanks @zhsso) + +## v1.0.0 / 2014-08-15 + +* [API] Remove AddWatch on Windows, use Add. +* Improve documentation for exported identifiers. [#30](https://github.com/fsnotify/fsnotify/issues/30) +* Minor updates based on feedback from golint. + +## dev / 2014-07-09 + +* Moved to [github.com/fsnotify/fsnotify](https://github.com/fsnotify/fsnotify). +* Use os.NewSyscallError instead of returning errno (thanks @hariharan-uno) + +## dev / 2014-07-04 + +* kqueue: fix incorrect mutex used in Close() +* Update example to demonstrate usage of Op. + +## dev / 2014-06-28 + +* [API] Don't set the Write Op for attribute notifications [#4](https://github.com/fsnotify/fsnotify/issues/4) +* Fix for String() method on Event (thanks Alex Brainman) +* Don't build on Plan 9 or Solaris (thanks @4ad) + +## dev / 2014-06-21 + +* Events channel of type Event rather than *Event. +* [internal] use syscall constants directly for inotify and kqueue. +* [internal] kqueue: rename events to kevents and fileEvent to event. + +## dev / 2014-06-19 + +* Go 1.3+ required on Windows (uses syscall.ERROR_MORE_DATA internally). +* [internal] remove cookie from Event struct (unused). +* [internal] Event struct has the same definition across every OS. +* [internal] remove internal watch and removeWatch methods. + +## dev / 2014-06-12 + +* [API] Renamed Watch() to Add() and RemoveWatch() to Remove(). +* [API] Pluralized channel names: Events and Errors. +* [API] Renamed FileEvent struct to Event. +* [API] Op constants replace methods like IsCreate(). + +## dev / 2014-06-12 + +* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98) + +## dev / 2014-05-23 + +* [API] Remove current implementation of WatchFlags. + * current implementation doesn't take advantage of OS for efficiency + * provides little benefit over filtering events as they are received, but has extra bookkeeping and mutexes + * no tests for the current implementation + * not fully implemented on Windows [#93](https://github.com/howeyc/fsnotify/issues/93#issuecomment-39285195) + +## v0.9.3 / 2014-12-31 + +* kqueue: cleanup internal watch before sending remove event [#51](https://github.com/fsnotify/fsnotify/issues/51) + +## v0.9.2 / 2014-08-17 + +* [Backport] Fix missing create events on macOS. [#14](https://github.com/fsnotify/fsnotify/issues/14) (thanks @zhsso) + +## v0.9.1 / 2014-06-12 + +* Fix data race on kevent buffer (thanks @tilaks) [#98](https://github.com/howeyc/fsnotify/pull/98) + +## v0.9.0 / 2014-01-17 + +* IsAttrib() for events that only concern a file's metadata [#79][] (thanks @abustany) +* [Fix] kqueue: fix deadlock [#77][] (thanks @cespare) +* [NOTICE] Development has moved to `code.google.com/p/go.exp/fsnotify` in preparation for inclusion in the Go standard library. + +## v0.8.12 / 2013-11-13 + +* [API] Remove FD_SET and friends from Linux adapter + +## v0.8.11 / 2013-11-02 + +* [Doc] Add Changelog [#72][] (thanks @nathany) +* [Doc] Spotlight and double modify events on macOS [#62][] (reported by @paulhammond) + +## v0.8.10 / 2013-10-19 + +* [Fix] kqueue: remove file watches when parent directory is removed [#71][] (reported by @mdwhatcott) +* [Fix] kqueue: race between Close and readEvents [#70][] (reported by @bernerdschaefer) +* [Doc] specify OS-specific limits in README (thanks @debrando) + +## v0.8.9 / 2013-09-08 + +* [Doc] Contributing (thanks @nathany) +* [Doc] update package path in example code [#63][] (thanks @paulhammond) +* [Doc] GoCI badge in README (Linux only) [#60][] +* [Doc] Cross-platform testing with Vagrant [#59][] (thanks @nathany) + +## v0.8.8 / 2013-06-17 + +* [Fix] Windows: handle `ERROR_MORE_DATA` on Windows [#49][] (thanks @jbowtie) + +## v0.8.7 / 2013-06-03 + +* [API] Make syscall flags internal +* [Fix] inotify: ignore event changes +* [Fix] race in symlink test [#45][] (reported by @srid) +* [Fix] tests on Windows +* lower case error messages + +## v0.8.6 / 2013-05-23 + +* kqueue: Use EVT_ONLY flag on Darwin +* [Doc] Update README with full example + +## v0.8.5 / 2013-05-09 + +* [Fix] inotify: allow monitoring of "broken" symlinks (thanks @tsg) + +## v0.8.4 / 2013-04-07 + +* [Fix] kqueue: watch all file events [#40][] (thanks @ChrisBuchholz) + +## v0.8.3 / 2013-03-13 + +* [Fix] inoitfy/kqueue memory leak [#36][] (reported by @nbkolchin) +* [Fix] kqueue: use fsnFlags for watching a directory [#33][] (reported by @nbkolchin) + +## v0.8.2 / 2013-02-07 + +* [Doc] add Authors +* [Fix] fix data races for map access [#29][] (thanks @fsouza) + +## v0.8.1 / 2013-01-09 + +* [Fix] Windows path separators +* [Doc] BSD License + +## v0.8.0 / 2012-11-09 + +* kqueue: directory watching improvements (thanks @vmirage) +* inotify: add `IN_MOVED_TO` [#25][] (requested by @cpisto) +* [Fix] kqueue: deleting watched directory [#24][] (reported by @jakerr) + +## v0.7.4 / 2012-10-09 + +* [Fix] inotify: fixes from https://codereview.appspot.com/5418045/ (ugorji) +* [Fix] kqueue: preserve watch flags when watching for delete [#21][] (reported by @robfig) +* [Fix] kqueue: watch the directory even if it isn't a new watch (thanks @robfig) +* [Fix] kqueue: modify after recreation of file + +## v0.7.3 / 2012-09-27 + +* [Fix] kqueue: watch with an existing folder inside the watched folder (thanks @vmirage) +* [Fix] kqueue: no longer get duplicate CREATE events + +## v0.7.2 / 2012-09-01 + +* kqueue: events for created directories + +## v0.7.1 / 2012-07-14 + +* [Fix] for renaming files + +## v0.7.0 / 2012-07-02 + +* [Feature] FSNotify flags +* [Fix] inotify: Added file name back to event path + +## v0.6.0 / 2012-06-06 + +* kqueue: watch files after directory created (thanks @tmc) + +## v0.5.1 / 2012-05-22 + +* [Fix] inotify: remove all watches before Close() + +## v0.5.0 / 2012-05-03 + +* [API] kqueue: return errors during watch instead of sending over channel +* kqueue: match symlink behavior on Linux +* inotify: add `DELETE_SELF` (requested by @taralx) +* [Fix] kqueue: handle EINTR (reported by @robfig) +* [Doc] Godoc example [#1][] (thanks @davecheney) + +## v0.4.0 / 2012-03-30 + +* Go 1 released: build with go tool +* [Feature] Windows support using winfsnotify +* Windows does not have attribute change notifications +* Roll attribute notifications into IsModify + +## v0.3.0 / 2012-02-19 + +* kqueue: add files when watch directory + +## v0.2.0 / 2011-12-30 + +* update to latest Go weekly code + +## v0.1.0 / 2011-10-19 + +* kqueue: add watch on file creation to match inotify +* kqueue: create file event +* inotify: ignore `IN_IGNORED` events +* event String() +* linux: common FileEvent functions +* initial commit + +[#79]: https://github.com/howeyc/fsnotify/pull/79 +[#77]: https://github.com/howeyc/fsnotify/pull/77 +[#72]: https://github.com/howeyc/fsnotify/issues/72 +[#71]: https://github.com/howeyc/fsnotify/issues/71 +[#70]: https://github.com/howeyc/fsnotify/issues/70 +[#63]: https://github.com/howeyc/fsnotify/issues/63 +[#62]: https://github.com/howeyc/fsnotify/issues/62 +[#60]: https://github.com/howeyc/fsnotify/issues/60 +[#59]: https://github.com/howeyc/fsnotify/issues/59 +[#49]: https://github.com/howeyc/fsnotify/issues/49 +[#45]: https://github.com/howeyc/fsnotify/issues/45 +[#40]: https://github.com/howeyc/fsnotify/issues/40 +[#36]: https://github.com/howeyc/fsnotify/issues/36 +[#33]: https://github.com/howeyc/fsnotify/issues/33 +[#29]: https://github.com/howeyc/fsnotify/issues/29 +[#25]: https://github.com/howeyc/fsnotify/issues/25 +[#24]: https://github.com/howeyc/fsnotify/issues/24 +[#21]: https://github.com/howeyc/fsnotify/issues/21 diff --git a/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md new file mode 100644 index 000000000..828a60b24 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/CONTRIBUTING.md @@ -0,0 +1,77 @@ +# Contributing + +## Issues + +* Request features and report bugs using the [GitHub Issue Tracker](https://github.com/fsnotify/fsnotify/issues). +* Please indicate the platform you are using fsnotify on. +* A code example to reproduce the problem is appreciated. + +## Pull Requests + +### Contributor License Agreement + +fsnotify is derived from code in the [golang.org/x/exp](https://godoc.org/golang.org/x/exp) package and it may be included [in the standard library](https://github.com/fsnotify/fsnotify/issues/1) in the future. Therefore fsnotify carries the same [LICENSE](https://github.com/fsnotify/fsnotify/blob/master/LICENSE) as Go. Contributors retain their copyright, so you need to fill out a short form before we can accept your contribution: [Google Individual Contributor License Agreement](https://developers.google.com/open-source/cla/individual). + +Please indicate that you have signed the CLA in your pull request. + +### How fsnotify is Developed + +* Development is done on feature branches. +* Tests are run on BSD, Linux, macOS and Windows. +* Pull requests are reviewed and [applied to master][am] using [hub][]. + * Maintainers may modify or squash commits rather than asking contributors to. +* To issue a new release, the maintainers will: + * Update the CHANGELOG + * Tag a version, which will become available through gopkg.in. + +### How to Fork + +For smooth sailing, always use the original import path. Installing with `go get` makes this easy. + +1. Install from GitHub (`go get -u github.com/fsnotify/fsnotify`) +2. Create your feature branch (`git checkout -b my-new-feature`) +3. Ensure everything works and the tests pass (see below) +4. Commit your changes (`git commit -am 'Add some feature'`) + +Contribute upstream: + +1. Fork fsnotify on GitHub +2. Add your remote (`git remote add fork git@github.com:mycompany/repo.git`) +3. Push to the branch (`git push fork my-new-feature`) +4. Create a new Pull Request on GitHub + +This workflow is [thoroughly explained by Katrina Owen](https://splice.com/blog/contributing-open-source-git-repositories-go/). + +### Testing + +fsnotify uses build tags to compile different code on Linux, BSD, macOS, and Windows. + +Before doing a pull request, please do your best to test your changes on multiple platforms, and list which platforms you were able/unable to test on. + +To aid in cross-platform testing there is a Vagrantfile for Linux and BSD. + +* Install [Vagrant](http://www.vagrantup.com/) and [VirtualBox](https://www.virtualbox.org/) +* Setup [Vagrant Gopher](https://github.com/nathany/vagrant-gopher) in your `src` folder. +* Run `vagrant up` from the project folder. You can also setup just one box with `vagrant up linux` or `vagrant up bsd` (note: the BSD box doesn't support Windows hosts at this time, and NFS may prompt for your host OS password) +* Once setup, you can run the test suite on a given OS with a single command `vagrant ssh linux -c 'cd fsnotify/fsnotify; go test'`. +* When you're done, you will want to halt or destroy the Vagrant boxes. + +Notice: fsnotify file system events won't trigger in shared folders. The tests get around this limitation by using the /tmp directory. + +Right now there is no equivalent solution for Windows and macOS, but there are Windows VMs [freely available from Microsoft](http://www.modern.ie/en-us/virtualization-tools#downloads). + +### Maintainers + +Help maintaining fsnotify is welcome. To be a maintainer: + +* Submit a pull request and sign the CLA as above. +* You must be able to run the test suite on Mac, Windows, Linux and BSD. + +To keep master clean, the fsnotify project uses the "apply mail" workflow outlined in Nathaniel Talbott's post ["Merge pull request" Considered Harmful][am]. This requires installing [hub][]. + +All code changes should be internal pull requests. + +Releases are tagged using [Semantic Versioning](http://semver.org/). + +[hub]: https://github.com/github/hub +[am]: http://blog.spreedly.com/2014/06/24/merge-pull-request-considered-harmful/#.VGa5yZPF_Zs diff --git a/vendor/github.com/fsnotify/fsnotify/LICENSE b/vendor/github.com/fsnotify/fsnotify/LICENSE new file mode 100644 index 000000000..e180c8fb0 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. +Copyright (c) 2012-2019 fsnotify Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/fsnotify/fsnotify/README.md b/vendor/github.com/fsnotify/fsnotify/README.md new file mode 100644 index 000000000..b2629e522 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/README.md @@ -0,0 +1,130 @@ +# File system notifications for Go + +[![GoDoc](https://godoc.org/github.com/fsnotify/fsnotify?status.svg)](https://godoc.org/github.com/fsnotify/fsnotify) [![Go Report Card](https://goreportcard.com/badge/github.com/fsnotify/fsnotify)](https://goreportcard.com/report/github.com/fsnotify/fsnotify) + +fsnotify utilizes [golang.org/x/sys](https://godoc.org/golang.org/x/sys) rather than `syscall` from the standard library. Ensure you have the latest version installed by running: + +```console +go get -u golang.org/x/sys/... +``` + +Cross platform: Windows, Linux, BSD and macOS. + +| Adapter | OS | Status | +| --------------------- | -------------------------------- | ------------------------------------------------------------------------------------------------------------------------------- | +| inotify | Linux 2.6.27 or later, Android\* | Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify) | +| kqueue | BSD, macOS, iOS\* | Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify) | +| ReadDirectoryChangesW | Windows | Supported [![Build Status](https://travis-ci.org/fsnotify/fsnotify.svg?branch=master)](https://travis-ci.org/fsnotify/fsnotify) | +| FSEvents | macOS | [Planned](https://github.com/fsnotify/fsnotify/issues/11) | +| FEN | Solaris 11 | [In Progress](https://github.com/fsnotify/fsnotify/issues/12) | +| fanotify | Linux 2.6.37+ | [Planned](https://github.com/fsnotify/fsnotify/issues/114) | +| USN Journals | Windows | [Maybe](https://github.com/fsnotify/fsnotify/issues/53) | +| Polling | *All* | [Maybe](https://github.com/fsnotify/fsnotify/issues/9) | + +\* Android and iOS are untested. + +Please see [the documentation](https://godoc.org/github.com/fsnotify/fsnotify) and consult the [FAQ](#faq) for usage information. + +## API stability + +fsnotify is a fork of [howeyc/fsnotify](https://godoc.org/github.com/howeyc/fsnotify) with a new API as of v1.0. The API is based on [this design document](http://goo.gl/MrYxyA). + +All [releases](https://github.com/fsnotify/fsnotify/releases) are tagged based on [Semantic Versioning](http://semver.org/). Further API changes are [planned](https://github.com/fsnotify/fsnotify/milestones), and will be tagged with a new major revision number. + +Go 1.6 supports dependencies located in the `vendor/` folder. Unless you are creating a library, it is recommended that you copy fsnotify into `vendor/github.com/fsnotify/fsnotify` within your project, and likewise for `golang.org/x/sys`. + +## Usage + +```go +package main + +import ( + "log" + + "github.com/fsnotify/fsnotify" +) + +func main() { + watcher, err := fsnotify.NewWatcher() + if err != nil { + log.Fatal(err) + } + defer watcher.Close() + + done := make(chan bool) + go func() { + for { + select { + case event, ok := <-watcher.Events: + if !ok { + return + } + log.Println("event:", event) + if event.Op&fsnotify.Write == fsnotify.Write { + log.Println("modified file:", event.Name) + } + case err, ok := <-watcher.Errors: + if !ok { + return + } + log.Println("error:", err) + } + } + }() + + err = watcher.Add("/tmp/foo") + if err != nil { + log.Fatal(err) + } + <-done +} +``` + +## Contributing + +Please refer to [CONTRIBUTING][] before opening an issue or pull request. + +## Example + +See [example_test.go](https://github.com/fsnotify/fsnotify/blob/master/example_test.go). + +## FAQ + +**When a file is moved to another directory is it still being watched?** + +No (it shouldn't be, unless you are watching where it was moved to). + +**When I watch a directory, are all subdirectories watched as well?** + +No, you must add watches for any directory you want to watch (a recursive watcher is on the roadmap [#18][]). + +**Do I have to watch the Error and Event channels in a separate goroutine?** + +As of now, yes. Looking into making this single-thread friendly (see [howeyc #7][#7]) + +**Why am I receiving multiple events for the same file on OS X?** + +Spotlight indexing on OS X can result in multiple events (see [howeyc #62][#62]). A temporary workaround is to add your folder(s) to the *Spotlight Privacy settings* until we have a native FSEvents implementation (see [#11][]). + +**How many files can be watched at once?** + +There are OS-specific limits as to how many watches can be created: +* Linux: /proc/sys/fs/inotify/max_user_watches contains the limit, reaching this limit results in a "no space left on device" error. +* BSD / OSX: sysctl variables "kern.maxfiles" and "kern.maxfilesperproc", reaching these limits results in a "too many open files" error. + +**Why don't notifications work with NFS filesystems or filesystem in userspace (FUSE)?** + +fsnotify requires support from underlying OS to work. The current NFS protocol does not provide network level support for file notifications. + +[#62]: https://github.com/howeyc/fsnotify/issues/62 +[#18]: https://github.com/fsnotify/fsnotify/issues/18 +[#11]: https://github.com/fsnotify/fsnotify/issues/11 +[#7]: https://github.com/howeyc/fsnotify/issues/7 + +[contributing]: https://github.com/fsnotify/fsnotify/blob/master/CONTRIBUTING.md + +## Related Projects + +* [notify](https://github.com/rjeczalik/notify) +* [fsevents](https://github.com/fsnotify/fsevents) + diff --git a/vendor/github.com/fsnotify/fsnotify/fen.go b/vendor/github.com/fsnotify/fsnotify/fen.go new file mode 100644 index 000000000..ced39cb88 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/fen.go @@ -0,0 +1,37 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build solaris + +package fsnotify + +import ( + "errors" +) + +// Watcher watches a set of files, delivering events to a channel. +type Watcher struct { + Events chan Event + Errors chan error +} + +// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. +func NewWatcher() (*Watcher, error) { + return nil, errors.New("FEN based watcher not yet supported for fsnotify\n") +} + +// Close removes all watches and closes the events channel. +func (w *Watcher) Close() error { + return nil +} + +// Add starts watching the named file or directory (non-recursively). +func (w *Watcher) Add(name string) error { + return nil +} + +// Remove stops watching the the named file or directory (non-recursively). +func (w *Watcher) Remove(name string) error { + return nil +} diff --git a/vendor/github.com/fsnotify/fsnotify/fsnotify.go b/vendor/github.com/fsnotify/fsnotify/fsnotify.go new file mode 100644 index 000000000..89cab046d --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/fsnotify.go @@ -0,0 +1,68 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !plan9 + +// Package fsnotify provides a platform-independent interface for file system notifications. +package fsnotify + +import ( + "bytes" + "errors" + "fmt" +) + +// Event represents a single file system notification. +type Event struct { + Name string // Relative path to the file or directory. + Op Op // File operation that triggered the event. +} + +// Op describes a set of file operations. +type Op uint32 + +// These are the generalized file operations that can trigger a notification. +const ( + Create Op = 1 << iota + Write + Remove + Rename + Chmod +) + +func (op Op) String() string { + // Use a buffer for efficient string concatenation + var buffer bytes.Buffer + + if op&Create == Create { + buffer.WriteString("|CREATE") + } + if op&Remove == Remove { + buffer.WriteString("|REMOVE") + } + if op&Write == Write { + buffer.WriteString("|WRITE") + } + if op&Rename == Rename { + buffer.WriteString("|RENAME") + } + if op&Chmod == Chmod { + buffer.WriteString("|CHMOD") + } + if buffer.Len() == 0 { + return "" + } + return buffer.String()[1:] // Strip leading pipe +} + +// String returns a string representation of the event in the form +// "file: REMOVE|WRITE|..." +func (e Event) String() string { + return fmt.Sprintf("%q: %s", e.Name, e.Op.String()) +} + +// Common errors that can be reported by a watcher +var ( + ErrEventOverflow = errors.New("fsnotify queue overflow") +) diff --git a/vendor/github.com/fsnotify/fsnotify/go.mod b/vendor/github.com/fsnotify/fsnotify/go.mod new file mode 100644 index 000000000..ff11e13f2 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/go.mod @@ -0,0 +1,5 @@ +module github.com/fsnotify/fsnotify + +go 1.13 + +require golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9 diff --git a/vendor/github.com/fsnotify/fsnotify/go.sum b/vendor/github.com/fsnotify/fsnotify/go.sum new file mode 100644 index 000000000..f60af9855 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/go.sum @@ -0,0 +1,2 @@ +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9 h1:L2auWcuQIvxz9xSEqzESnV/QN/gNRXNApHi3fYwl2w0= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/vendor/github.com/fsnotify/fsnotify/inotify.go b/vendor/github.com/fsnotify/fsnotify/inotify.go new file mode 100644 index 000000000..d9fd1b88a --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/inotify.go @@ -0,0 +1,337 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux + +package fsnotify + +import ( + "errors" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "sync" + "unsafe" + + "golang.org/x/sys/unix" +) + +// Watcher watches a set of files, delivering events to a channel. +type Watcher struct { + Events chan Event + Errors chan error + mu sync.Mutex // Map access + fd int + poller *fdPoller + watches map[string]*watch // Map of inotify watches (key: path) + paths map[int]string // Map of watched paths (key: watch descriptor) + done chan struct{} // Channel for sending a "quit message" to the reader goroutine + doneResp chan struct{} // Channel to respond to Close +} + +// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. +func NewWatcher() (*Watcher, error) { + // Create inotify fd + fd, errno := unix.InotifyInit1(unix.IN_CLOEXEC) + if fd == -1 { + return nil, errno + } + // Create epoll + poller, err := newFdPoller(fd) + if err != nil { + unix.Close(fd) + return nil, err + } + w := &Watcher{ + fd: fd, + poller: poller, + watches: make(map[string]*watch), + paths: make(map[int]string), + Events: make(chan Event), + Errors: make(chan error), + done: make(chan struct{}), + doneResp: make(chan struct{}), + } + + go w.readEvents() + return w, nil +} + +func (w *Watcher) isClosed() bool { + select { + case <-w.done: + return true + default: + return false + } +} + +// Close removes all watches and closes the events channel. +func (w *Watcher) Close() error { + if w.isClosed() { + return nil + } + + // Send 'close' signal to goroutine, and set the Watcher to closed. + close(w.done) + + // Wake up goroutine + w.poller.wake() + + // Wait for goroutine to close + <-w.doneResp + + return nil +} + +// Add starts watching the named file or directory (non-recursively). +func (w *Watcher) Add(name string) error { + name = filepath.Clean(name) + if w.isClosed() { + return errors.New("inotify instance already closed") + } + + const agnosticEvents = unix.IN_MOVED_TO | unix.IN_MOVED_FROM | + unix.IN_CREATE | unix.IN_ATTRIB | unix.IN_MODIFY | + unix.IN_MOVE_SELF | unix.IN_DELETE | unix.IN_DELETE_SELF + + var flags uint32 = agnosticEvents + + w.mu.Lock() + defer w.mu.Unlock() + watchEntry := w.watches[name] + if watchEntry != nil { + flags |= watchEntry.flags | unix.IN_MASK_ADD + } + wd, errno := unix.InotifyAddWatch(w.fd, name, flags) + if wd == -1 { + return errno + } + + if watchEntry == nil { + w.watches[name] = &watch{wd: uint32(wd), flags: flags} + w.paths[wd] = name + } else { + watchEntry.wd = uint32(wd) + watchEntry.flags = flags + } + + return nil +} + +// Remove stops watching the named file or directory (non-recursively). +func (w *Watcher) Remove(name string) error { + name = filepath.Clean(name) + + // Fetch the watch. + w.mu.Lock() + defer w.mu.Unlock() + watch, ok := w.watches[name] + + // Remove it from inotify. + if !ok { + return fmt.Errorf("can't remove non-existent inotify watch for: %s", name) + } + + // We successfully removed the watch if InotifyRmWatch doesn't return an + // error, we need to clean up our internal state to ensure it matches + // inotify's kernel state. + delete(w.paths, int(watch.wd)) + delete(w.watches, name) + + // inotify_rm_watch will return EINVAL if the file has been deleted; + // the inotify will already have been removed. + // watches and pathes are deleted in ignoreLinux() implicitly and asynchronously + // by calling inotify_rm_watch() below. e.g. readEvents() goroutine receives IN_IGNORE + // so that EINVAL means that the wd is being rm_watch()ed or its file removed + // by another thread and we have not received IN_IGNORE event. + success, errno := unix.InotifyRmWatch(w.fd, watch.wd) + if success == -1 { + // TODO: Perhaps it's not helpful to return an error here in every case. + // the only two possible errors are: + // EBADF, which happens when w.fd is not a valid file descriptor of any kind. + // EINVAL, which is when fd is not an inotify descriptor or wd is not a valid watch descriptor. + // Watch descriptors are invalidated when they are removed explicitly or implicitly; + // explicitly by inotify_rm_watch, implicitly when the file they are watching is deleted. + return errno + } + + return nil +} + +type watch struct { + wd uint32 // Watch descriptor (as returned by the inotify_add_watch() syscall) + flags uint32 // inotify flags of this watch (see inotify(7) for the list of valid flags) +} + +// readEvents reads from the inotify file descriptor, converts the +// received events into Event objects and sends them via the Events channel +func (w *Watcher) readEvents() { + var ( + buf [unix.SizeofInotifyEvent * 4096]byte // Buffer for a maximum of 4096 raw events + n int // Number of bytes read with read() + errno error // Syscall errno + ok bool // For poller.wait + ) + + defer close(w.doneResp) + defer close(w.Errors) + defer close(w.Events) + defer unix.Close(w.fd) + defer w.poller.close() + + for { + // See if we have been closed. + if w.isClosed() { + return + } + + ok, errno = w.poller.wait() + if errno != nil { + select { + case w.Errors <- errno: + case <-w.done: + return + } + continue + } + + if !ok { + continue + } + + n, errno = unix.Read(w.fd, buf[:]) + // If a signal interrupted execution, see if we've been asked to close, and try again. + // http://man7.org/linux/man-pages/man7/signal.7.html : + // "Before Linux 3.8, reads from an inotify(7) file descriptor were not restartable" + if errno == unix.EINTR { + continue + } + + // unix.Read might have been woken up by Close. If so, we're done. + if w.isClosed() { + return + } + + if n < unix.SizeofInotifyEvent { + var err error + if n == 0 { + // If EOF is received. This should really never happen. + err = io.EOF + } else if n < 0 { + // If an error occurred while reading. + err = errno + } else { + // Read was too short. + err = errors.New("notify: short read in readEvents()") + } + select { + case w.Errors <- err: + case <-w.done: + return + } + continue + } + + var offset uint32 + // We don't know how many events we just read into the buffer + // While the offset points to at least one whole event... + for offset <= uint32(n-unix.SizeofInotifyEvent) { + // Point "raw" to the event in the buffer + raw := (*unix.InotifyEvent)(unsafe.Pointer(&buf[offset])) + + mask := uint32(raw.Mask) + nameLen := uint32(raw.Len) + + if mask&unix.IN_Q_OVERFLOW != 0 { + select { + case w.Errors <- ErrEventOverflow: + case <-w.done: + return + } + } + + // If the event happened to the watched directory or the watched file, the kernel + // doesn't append the filename to the event, but we would like to always fill the + // the "Name" field with a valid filename. We retrieve the path of the watch from + // the "paths" map. + w.mu.Lock() + name, ok := w.paths[int(raw.Wd)] + // IN_DELETE_SELF occurs when the file/directory being watched is removed. + // This is a sign to clean up the maps, otherwise we are no longer in sync + // with the inotify kernel state which has already deleted the watch + // automatically. + if ok && mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF { + delete(w.paths, int(raw.Wd)) + delete(w.watches, name) + } + w.mu.Unlock() + + if nameLen > 0 { + // Point "bytes" at the first byte of the filename + bytes := (*[unix.PathMax]byte)(unsafe.Pointer(&buf[offset+unix.SizeofInotifyEvent])) + // The filename is padded with NULL bytes. TrimRight() gets rid of those. + name += "/" + strings.TrimRight(string(bytes[0:nameLen]), "\000") + } + + event := newEvent(name, mask) + + // Send the events that are not ignored on the events channel + if !event.ignoreLinux(mask) { + select { + case w.Events <- event: + case <-w.done: + return + } + } + + // Move to the next event in the buffer + offset += unix.SizeofInotifyEvent + nameLen + } + } +} + +// Certain types of events can be "ignored" and not sent over the Events +// channel. Such as events marked ignore by the kernel, or MODIFY events +// against files that do not exist. +func (e *Event) ignoreLinux(mask uint32) bool { + // Ignore anything the inotify API says to ignore + if mask&unix.IN_IGNORED == unix.IN_IGNORED { + return true + } + + // If the event is not a DELETE or RENAME, the file must exist. + // Otherwise the event is ignored. + // *Note*: this was put in place because it was seen that a MODIFY + // event was sent after the DELETE. This ignores that MODIFY and + // assumes a DELETE will come or has come if the file doesn't exist. + if !(e.Op&Remove == Remove || e.Op&Rename == Rename) { + _, statErr := os.Lstat(e.Name) + return os.IsNotExist(statErr) + } + return false +} + +// newEvent returns an platform-independent Event based on an inotify mask. +func newEvent(name string, mask uint32) Event { + e := Event{Name: name} + if mask&unix.IN_CREATE == unix.IN_CREATE || mask&unix.IN_MOVED_TO == unix.IN_MOVED_TO { + e.Op |= Create + } + if mask&unix.IN_DELETE_SELF == unix.IN_DELETE_SELF || mask&unix.IN_DELETE == unix.IN_DELETE { + e.Op |= Remove + } + if mask&unix.IN_MODIFY == unix.IN_MODIFY { + e.Op |= Write + } + if mask&unix.IN_MOVE_SELF == unix.IN_MOVE_SELF || mask&unix.IN_MOVED_FROM == unix.IN_MOVED_FROM { + e.Op |= Rename + } + if mask&unix.IN_ATTRIB == unix.IN_ATTRIB { + e.Op |= Chmod + } + return e +} diff --git a/vendor/github.com/fsnotify/fsnotify/inotify_poller.go b/vendor/github.com/fsnotify/fsnotify/inotify_poller.go new file mode 100644 index 000000000..b33f2b4d4 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/inotify_poller.go @@ -0,0 +1,187 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux + +package fsnotify + +import ( + "errors" + + "golang.org/x/sys/unix" +) + +type fdPoller struct { + fd int // File descriptor (as returned by the inotify_init() syscall) + epfd int // Epoll file descriptor + pipe [2]int // Pipe for waking up +} + +func emptyPoller(fd int) *fdPoller { + poller := new(fdPoller) + poller.fd = fd + poller.epfd = -1 + poller.pipe[0] = -1 + poller.pipe[1] = -1 + return poller +} + +// Create a new inotify poller. +// This creates an inotify handler, and an epoll handler. +func newFdPoller(fd int) (*fdPoller, error) { + var errno error + poller := emptyPoller(fd) + defer func() { + if errno != nil { + poller.close() + } + }() + poller.fd = fd + + // Create epoll fd + poller.epfd, errno = unix.EpollCreate1(unix.EPOLL_CLOEXEC) + if poller.epfd == -1 { + return nil, errno + } + // Create pipe; pipe[0] is the read end, pipe[1] the write end. + errno = unix.Pipe2(poller.pipe[:], unix.O_NONBLOCK|unix.O_CLOEXEC) + if errno != nil { + return nil, errno + } + + // Register inotify fd with epoll + event := unix.EpollEvent{ + Fd: int32(poller.fd), + Events: unix.EPOLLIN, + } + errno = unix.EpollCtl(poller.epfd, unix.EPOLL_CTL_ADD, poller.fd, &event) + if errno != nil { + return nil, errno + } + + // Register pipe fd with epoll + event = unix.EpollEvent{ + Fd: int32(poller.pipe[0]), + Events: unix.EPOLLIN, + } + errno = unix.EpollCtl(poller.epfd, unix.EPOLL_CTL_ADD, poller.pipe[0], &event) + if errno != nil { + return nil, errno + } + + return poller, nil +} + +// Wait using epoll. +// Returns true if something is ready to be read, +// false if there is not. +func (poller *fdPoller) wait() (bool, error) { + // 3 possible events per fd, and 2 fds, makes a maximum of 6 events. + // I don't know whether epoll_wait returns the number of events returned, + // or the total number of events ready. + // I decided to catch both by making the buffer one larger than the maximum. + events := make([]unix.EpollEvent, 7) + for { + n, errno := unix.EpollWait(poller.epfd, events, -1) + if n == -1 { + if errno == unix.EINTR { + continue + } + return false, errno + } + if n == 0 { + // If there are no events, try again. + continue + } + if n > 6 { + // This should never happen. More events were returned than should be possible. + return false, errors.New("epoll_wait returned more events than I know what to do with") + } + ready := events[:n] + epollhup := false + epollerr := false + epollin := false + for _, event := range ready { + if event.Fd == int32(poller.fd) { + if event.Events&unix.EPOLLHUP != 0 { + // This should not happen, but if it does, treat it as a wakeup. + epollhup = true + } + if event.Events&unix.EPOLLERR != 0 { + // If an error is waiting on the file descriptor, we should pretend + // something is ready to read, and let unix.Read pick up the error. + epollerr = true + } + if event.Events&unix.EPOLLIN != 0 { + // There is data to read. + epollin = true + } + } + if event.Fd == int32(poller.pipe[0]) { + if event.Events&unix.EPOLLHUP != 0 { + // Write pipe descriptor was closed, by us. This means we're closing down the + // watcher, and we should wake up. + } + if event.Events&unix.EPOLLERR != 0 { + // If an error is waiting on the pipe file descriptor. + // This is an absolute mystery, and should never ever happen. + return false, errors.New("Error on the pipe descriptor.") + } + if event.Events&unix.EPOLLIN != 0 { + // This is a regular wakeup, so we have to clear the buffer. + err := poller.clearWake() + if err != nil { + return false, err + } + } + } + } + + if epollhup || epollerr || epollin { + return true, nil + } + return false, nil + } +} + +// Close the write end of the poller. +func (poller *fdPoller) wake() error { + buf := make([]byte, 1) + n, errno := unix.Write(poller.pipe[1], buf) + if n == -1 { + if errno == unix.EAGAIN { + // Buffer is full, poller will wake. + return nil + } + return errno + } + return nil +} + +func (poller *fdPoller) clearWake() error { + // You have to be woken up a LOT in order to get to 100! + buf := make([]byte, 100) + n, errno := unix.Read(poller.pipe[0], buf) + if n == -1 { + if errno == unix.EAGAIN { + // Buffer is empty, someone else cleared our wake. + return nil + } + return errno + } + return nil +} + +// Close all poller file descriptors, but not the one passed to it. +func (poller *fdPoller) close() { + if poller.pipe[1] != -1 { + unix.Close(poller.pipe[1]) + } + if poller.pipe[0] != -1 { + unix.Close(poller.pipe[0]) + } + if poller.epfd != -1 { + unix.Close(poller.epfd) + } +} diff --git a/vendor/github.com/fsnotify/fsnotify/kqueue.go b/vendor/github.com/fsnotify/fsnotify/kqueue.go new file mode 100644 index 000000000..86e76a3d6 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/kqueue.go @@ -0,0 +1,521 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build freebsd openbsd netbsd dragonfly darwin + +package fsnotify + +import ( + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "sync" + "time" + + "golang.org/x/sys/unix" +) + +// Watcher watches a set of files, delivering events to a channel. +type Watcher struct { + Events chan Event + Errors chan error + done chan struct{} // Channel for sending a "quit message" to the reader goroutine + + kq int // File descriptor (as returned by the kqueue() syscall). + + mu sync.Mutex // Protects access to watcher data + watches map[string]int // Map of watched file descriptors (key: path). + externalWatches map[string]bool // Map of watches added by user of the library. + dirFlags map[string]uint32 // Map of watched directories to fflags used in kqueue. + paths map[int]pathInfo // Map file descriptors to path names for processing kqueue events. + fileExists map[string]bool // Keep track of if we know this file exists (to stop duplicate create events). + isClosed bool // Set to true when Close() is first called +} + +type pathInfo struct { + name string + isDir bool +} + +// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. +func NewWatcher() (*Watcher, error) { + kq, err := kqueue() + if err != nil { + return nil, err + } + + w := &Watcher{ + kq: kq, + watches: make(map[string]int), + dirFlags: make(map[string]uint32), + paths: make(map[int]pathInfo), + fileExists: make(map[string]bool), + externalWatches: make(map[string]bool), + Events: make(chan Event), + Errors: make(chan error), + done: make(chan struct{}), + } + + go w.readEvents() + return w, nil +} + +// Close removes all watches and closes the events channel. +func (w *Watcher) Close() error { + w.mu.Lock() + if w.isClosed { + w.mu.Unlock() + return nil + } + w.isClosed = true + + // copy paths to remove while locked + var pathsToRemove = make([]string, 0, len(w.watches)) + for name := range w.watches { + pathsToRemove = append(pathsToRemove, name) + } + w.mu.Unlock() + // unlock before calling Remove, which also locks + + for _, name := range pathsToRemove { + w.Remove(name) + } + + // send a "quit" message to the reader goroutine + close(w.done) + + return nil +} + +// Add starts watching the named file or directory (non-recursively). +func (w *Watcher) Add(name string) error { + w.mu.Lock() + w.externalWatches[name] = true + w.mu.Unlock() + _, err := w.addWatch(name, noteAllEvents) + return err +} + +// Remove stops watching the the named file or directory (non-recursively). +func (w *Watcher) Remove(name string) error { + name = filepath.Clean(name) + w.mu.Lock() + watchfd, ok := w.watches[name] + w.mu.Unlock() + if !ok { + return fmt.Errorf("can't remove non-existent kevent watch for: %s", name) + } + + const registerRemove = unix.EV_DELETE + if err := register(w.kq, []int{watchfd}, registerRemove, 0); err != nil { + return err + } + + unix.Close(watchfd) + + w.mu.Lock() + isDir := w.paths[watchfd].isDir + delete(w.watches, name) + delete(w.paths, watchfd) + delete(w.dirFlags, name) + w.mu.Unlock() + + // Find all watched paths that are in this directory that are not external. + if isDir { + var pathsToRemove []string + w.mu.Lock() + for _, path := range w.paths { + wdir, _ := filepath.Split(path.name) + if filepath.Clean(wdir) == name { + if !w.externalWatches[path.name] { + pathsToRemove = append(pathsToRemove, path.name) + } + } + } + w.mu.Unlock() + for _, name := range pathsToRemove { + // Since these are internal, not much sense in propagating error + // to the user, as that will just confuse them with an error about + // a path they did not explicitly watch themselves. + w.Remove(name) + } + } + + return nil +} + +// Watch all events (except NOTE_EXTEND, NOTE_LINK, NOTE_REVOKE) +const noteAllEvents = unix.NOTE_DELETE | unix.NOTE_WRITE | unix.NOTE_ATTRIB | unix.NOTE_RENAME + +// keventWaitTime to block on each read from kevent +var keventWaitTime = durationToTimespec(100 * time.Millisecond) + +// addWatch adds name to the watched file set. +// The flags are interpreted as described in kevent(2). +// Returns the real path to the file which was added, if any, which may be different from the one passed in the case of symlinks. +func (w *Watcher) addWatch(name string, flags uint32) (string, error) { + var isDir bool + // Make ./name and name equivalent + name = filepath.Clean(name) + + w.mu.Lock() + if w.isClosed { + w.mu.Unlock() + return "", errors.New("kevent instance already closed") + } + watchfd, alreadyWatching := w.watches[name] + // We already have a watch, but we can still override flags. + if alreadyWatching { + isDir = w.paths[watchfd].isDir + } + w.mu.Unlock() + + if !alreadyWatching { + fi, err := os.Lstat(name) + if err != nil { + return "", err + } + + // Don't watch sockets. + if fi.Mode()&os.ModeSocket == os.ModeSocket { + return "", nil + } + + // Don't watch named pipes. + if fi.Mode()&os.ModeNamedPipe == os.ModeNamedPipe { + return "", nil + } + + // Follow Symlinks + // Unfortunately, Linux can add bogus symlinks to watch list without + // issue, and Windows can't do symlinks period (AFAIK). To maintain + // consistency, we will act like everything is fine. There will simply + // be no file events for broken symlinks. + // Hence the returns of nil on errors. + if fi.Mode()&os.ModeSymlink == os.ModeSymlink { + name, err = filepath.EvalSymlinks(name) + if err != nil { + return "", nil + } + + w.mu.Lock() + _, alreadyWatching = w.watches[name] + w.mu.Unlock() + + if alreadyWatching { + return name, nil + } + + fi, err = os.Lstat(name) + if err != nil { + return "", nil + } + } + + watchfd, err = unix.Open(name, openMode, 0700) + if watchfd == -1 { + return "", err + } + + isDir = fi.IsDir() + } + + const registerAdd = unix.EV_ADD | unix.EV_CLEAR | unix.EV_ENABLE + if err := register(w.kq, []int{watchfd}, registerAdd, flags); err != nil { + unix.Close(watchfd) + return "", err + } + + if !alreadyWatching { + w.mu.Lock() + w.watches[name] = watchfd + w.paths[watchfd] = pathInfo{name: name, isDir: isDir} + w.mu.Unlock() + } + + if isDir { + // Watch the directory if it has not been watched before, + // or if it was watched before, but perhaps only a NOTE_DELETE (watchDirectoryFiles) + w.mu.Lock() + + watchDir := (flags&unix.NOTE_WRITE) == unix.NOTE_WRITE && + (!alreadyWatching || (w.dirFlags[name]&unix.NOTE_WRITE) != unix.NOTE_WRITE) + // Store flags so this watch can be updated later + w.dirFlags[name] = flags + w.mu.Unlock() + + if watchDir { + if err := w.watchDirectoryFiles(name); err != nil { + return "", err + } + } + } + return name, nil +} + +// readEvents reads from kqueue and converts the received kevents into +// Event values that it sends down the Events channel. +func (w *Watcher) readEvents() { + eventBuffer := make([]unix.Kevent_t, 10) + +loop: + for { + // See if there is a message on the "done" channel + select { + case <-w.done: + break loop + default: + } + + // Get new events + kevents, err := read(w.kq, eventBuffer, &keventWaitTime) + // EINTR is okay, the syscall was interrupted before timeout expired. + if err != nil && err != unix.EINTR { + select { + case w.Errors <- err: + case <-w.done: + break loop + } + continue + } + + // Flush the events we received to the Events channel + for len(kevents) > 0 { + kevent := &kevents[0] + watchfd := int(kevent.Ident) + mask := uint32(kevent.Fflags) + w.mu.Lock() + path := w.paths[watchfd] + w.mu.Unlock() + event := newEvent(path.name, mask) + + if path.isDir && !(event.Op&Remove == Remove) { + // Double check to make sure the directory exists. This can happen when + // we do a rm -fr on a recursively watched folders and we receive a + // modification event first but the folder has been deleted and later + // receive the delete event + if _, err := os.Lstat(event.Name); os.IsNotExist(err) { + // mark is as delete event + event.Op |= Remove + } + } + + if event.Op&Rename == Rename || event.Op&Remove == Remove { + w.Remove(event.Name) + w.mu.Lock() + delete(w.fileExists, event.Name) + w.mu.Unlock() + } + + if path.isDir && event.Op&Write == Write && !(event.Op&Remove == Remove) { + w.sendDirectoryChangeEvents(event.Name) + } else { + // Send the event on the Events channel. + select { + case w.Events <- event: + case <-w.done: + break loop + } + } + + if event.Op&Remove == Remove { + // Look for a file that may have overwritten this. + // For example, mv f1 f2 will delete f2, then create f2. + if path.isDir { + fileDir := filepath.Clean(event.Name) + w.mu.Lock() + _, found := w.watches[fileDir] + w.mu.Unlock() + if found { + // make sure the directory exists before we watch for changes. When we + // do a recursive watch and perform rm -fr, the parent directory might + // have gone missing, ignore the missing directory and let the + // upcoming delete event remove the watch from the parent directory. + if _, err := os.Lstat(fileDir); err == nil { + w.sendDirectoryChangeEvents(fileDir) + } + } + } else { + filePath := filepath.Clean(event.Name) + if fileInfo, err := os.Lstat(filePath); err == nil { + w.sendFileCreatedEventIfNew(filePath, fileInfo) + } + } + } + + // Move to next event + kevents = kevents[1:] + } + } + + // cleanup + err := unix.Close(w.kq) + if err != nil { + // only way the previous loop breaks is if w.done was closed so we need to async send to w.Errors. + select { + case w.Errors <- err: + default: + } + } + close(w.Events) + close(w.Errors) +} + +// newEvent returns an platform-independent Event based on kqueue Fflags. +func newEvent(name string, mask uint32) Event { + e := Event{Name: name} + if mask&unix.NOTE_DELETE == unix.NOTE_DELETE { + e.Op |= Remove + } + if mask&unix.NOTE_WRITE == unix.NOTE_WRITE { + e.Op |= Write + } + if mask&unix.NOTE_RENAME == unix.NOTE_RENAME { + e.Op |= Rename + } + if mask&unix.NOTE_ATTRIB == unix.NOTE_ATTRIB { + e.Op |= Chmod + } + return e +} + +func newCreateEvent(name string) Event { + return Event{Name: name, Op: Create} +} + +// watchDirectoryFiles to mimic inotify when adding a watch on a directory +func (w *Watcher) watchDirectoryFiles(dirPath string) error { + // Get all files + files, err := ioutil.ReadDir(dirPath) + if err != nil { + return err + } + + for _, fileInfo := range files { + filePath := filepath.Join(dirPath, fileInfo.Name()) + filePath, err = w.internalWatch(filePath, fileInfo) + if err != nil { + return err + } + + w.mu.Lock() + w.fileExists[filePath] = true + w.mu.Unlock() + } + + return nil +} + +// sendDirectoryEvents searches the directory for newly created files +// and sends them over the event channel. This functionality is to have +// the BSD version of fsnotify match Linux inotify which provides a +// create event for files created in a watched directory. +func (w *Watcher) sendDirectoryChangeEvents(dirPath string) { + // Get all files + files, err := ioutil.ReadDir(dirPath) + if err != nil { + select { + case w.Errors <- err: + case <-w.done: + return + } + } + + // Search for new files + for _, fileInfo := range files { + filePath := filepath.Join(dirPath, fileInfo.Name()) + err := w.sendFileCreatedEventIfNew(filePath, fileInfo) + + if err != nil { + return + } + } +} + +// sendFileCreatedEvent sends a create event if the file isn't already being tracked. +func (w *Watcher) sendFileCreatedEventIfNew(filePath string, fileInfo os.FileInfo) (err error) { + w.mu.Lock() + _, doesExist := w.fileExists[filePath] + w.mu.Unlock() + if !doesExist { + // Send create event + select { + case w.Events <- newCreateEvent(filePath): + case <-w.done: + return + } + } + + // like watchDirectoryFiles (but without doing another ReadDir) + filePath, err = w.internalWatch(filePath, fileInfo) + if err != nil { + return err + } + + w.mu.Lock() + w.fileExists[filePath] = true + w.mu.Unlock() + + return nil +} + +func (w *Watcher) internalWatch(name string, fileInfo os.FileInfo) (string, error) { + if fileInfo.IsDir() { + // mimic Linux providing delete events for subdirectories + // but preserve the flags used if currently watching subdirectory + w.mu.Lock() + flags := w.dirFlags[name] + w.mu.Unlock() + + flags |= unix.NOTE_DELETE | unix.NOTE_RENAME + return w.addWatch(name, flags) + } + + // watch file to mimic Linux inotify + return w.addWatch(name, noteAllEvents) +} + +// kqueue creates a new kernel event queue and returns a descriptor. +func kqueue() (kq int, err error) { + kq, err = unix.Kqueue() + if kq == -1 { + return kq, err + } + return kq, nil +} + +// register events with the queue +func register(kq int, fds []int, flags int, fflags uint32) error { + changes := make([]unix.Kevent_t, len(fds)) + + for i, fd := range fds { + // SetKevent converts int to the platform-specific types: + unix.SetKevent(&changes[i], fd, unix.EVFILT_VNODE, flags) + changes[i].Fflags = fflags + } + + // register the events + success, err := unix.Kevent(kq, changes, nil, nil) + if success == -1 { + return err + } + return nil +} + +// read retrieves pending events, or waits until an event occurs. +// A timeout of nil blocks indefinitely, while 0 polls the queue. +func read(kq int, events []unix.Kevent_t, timeout *unix.Timespec) ([]unix.Kevent_t, error) { + n, err := unix.Kevent(kq, nil, events, timeout) + if err != nil { + return nil, err + } + return events[0:n], nil +} + +// durationToTimespec prepares a timeout value +func durationToTimespec(d time.Duration) unix.Timespec { + return unix.NsecToTimespec(d.Nanoseconds()) +} diff --git a/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go b/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go new file mode 100644 index 000000000..2306c4620 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/open_mode_bsd.go @@ -0,0 +1,11 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build freebsd openbsd netbsd dragonfly + +package fsnotify + +import "golang.org/x/sys/unix" + +const openMode = unix.O_NONBLOCK | unix.O_RDONLY | unix.O_CLOEXEC diff --git a/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go b/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go new file mode 100644 index 000000000..870c4d6d1 --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/open_mode_darwin.go @@ -0,0 +1,12 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin + +package fsnotify + +import "golang.org/x/sys/unix" + +// note: this constant is not defined on BSD +const openMode = unix.O_EVTONLY | unix.O_CLOEXEC diff --git a/vendor/github.com/fsnotify/fsnotify/windows.go b/vendor/github.com/fsnotify/fsnotify/windows.go new file mode 100644 index 000000000..09436f31d --- /dev/null +++ b/vendor/github.com/fsnotify/fsnotify/windows.go @@ -0,0 +1,561 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build windows + +package fsnotify + +import ( + "errors" + "fmt" + "os" + "path/filepath" + "runtime" + "sync" + "syscall" + "unsafe" +) + +// Watcher watches a set of files, delivering events to a channel. +type Watcher struct { + Events chan Event + Errors chan error + isClosed bool // Set to true when Close() is first called + mu sync.Mutex // Map access + port syscall.Handle // Handle to completion port + watches watchMap // Map of watches (key: i-number) + input chan *input // Inputs to the reader are sent on this channel + quit chan chan<- error +} + +// NewWatcher establishes a new watcher with the underlying OS and begins waiting for events. +func NewWatcher() (*Watcher, error) { + port, e := syscall.CreateIoCompletionPort(syscall.InvalidHandle, 0, 0, 0) + if e != nil { + return nil, os.NewSyscallError("CreateIoCompletionPort", e) + } + w := &Watcher{ + port: port, + watches: make(watchMap), + input: make(chan *input, 1), + Events: make(chan Event, 50), + Errors: make(chan error), + quit: make(chan chan<- error, 1), + } + go w.readEvents() + return w, nil +} + +// Close removes all watches and closes the events channel. +func (w *Watcher) Close() error { + if w.isClosed { + return nil + } + w.isClosed = true + + // Send "quit" message to the reader goroutine + ch := make(chan error) + w.quit <- ch + if err := w.wakeupReader(); err != nil { + return err + } + return <-ch +} + +// Add starts watching the named file or directory (non-recursively). +func (w *Watcher) Add(name string) error { + if w.isClosed { + return errors.New("watcher already closed") + } + in := &input{ + op: opAddWatch, + path: filepath.Clean(name), + flags: sysFSALLEVENTS, + reply: make(chan error), + } + w.input <- in + if err := w.wakeupReader(); err != nil { + return err + } + return <-in.reply +} + +// Remove stops watching the the named file or directory (non-recursively). +func (w *Watcher) Remove(name string) error { + in := &input{ + op: opRemoveWatch, + path: filepath.Clean(name), + reply: make(chan error), + } + w.input <- in + if err := w.wakeupReader(); err != nil { + return err + } + return <-in.reply +} + +const ( + // Options for AddWatch + sysFSONESHOT = 0x80000000 + sysFSONLYDIR = 0x1000000 + + // Events + sysFSACCESS = 0x1 + sysFSALLEVENTS = 0xfff + sysFSATTRIB = 0x4 + sysFSCLOSE = 0x18 + sysFSCREATE = 0x100 + sysFSDELETE = 0x200 + sysFSDELETESELF = 0x400 + sysFSMODIFY = 0x2 + sysFSMOVE = 0xc0 + sysFSMOVEDFROM = 0x40 + sysFSMOVEDTO = 0x80 + sysFSMOVESELF = 0x800 + + // Special events + sysFSIGNORED = 0x8000 + sysFSQOVERFLOW = 0x4000 +) + +func newEvent(name string, mask uint32) Event { + e := Event{Name: name} + if mask&sysFSCREATE == sysFSCREATE || mask&sysFSMOVEDTO == sysFSMOVEDTO { + e.Op |= Create + } + if mask&sysFSDELETE == sysFSDELETE || mask&sysFSDELETESELF == sysFSDELETESELF { + e.Op |= Remove + } + if mask&sysFSMODIFY == sysFSMODIFY { + e.Op |= Write + } + if mask&sysFSMOVE == sysFSMOVE || mask&sysFSMOVESELF == sysFSMOVESELF || mask&sysFSMOVEDFROM == sysFSMOVEDFROM { + e.Op |= Rename + } + if mask&sysFSATTRIB == sysFSATTRIB { + e.Op |= Chmod + } + return e +} + +const ( + opAddWatch = iota + opRemoveWatch +) + +const ( + provisional uint64 = 1 << (32 + iota) +) + +type input struct { + op int + path string + flags uint32 + reply chan error +} + +type inode struct { + handle syscall.Handle + volume uint32 + index uint64 +} + +type watch struct { + ov syscall.Overlapped + ino *inode // i-number + path string // Directory path + mask uint64 // Directory itself is being watched with these notify flags + names map[string]uint64 // Map of names being watched and their notify flags + rename string // Remembers the old name while renaming a file + buf [4096]byte +} + +type indexMap map[uint64]*watch +type watchMap map[uint32]indexMap + +func (w *Watcher) wakeupReader() error { + e := syscall.PostQueuedCompletionStatus(w.port, 0, 0, nil) + if e != nil { + return os.NewSyscallError("PostQueuedCompletionStatus", e) + } + return nil +} + +func getDir(pathname string) (dir string, err error) { + attr, e := syscall.GetFileAttributes(syscall.StringToUTF16Ptr(pathname)) + if e != nil { + return "", os.NewSyscallError("GetFileAttributes", e) + } + if attr&syscall.FILE_ATTRIBUTE_DIRECTORY != 0 { + dir = pathname + } else { + dir, _ = filepath.Split(pathname) + dir = filepath.Clean(dir) + } + return +} + +func getIno(path string) (ino *inode, err error) { + h, e := syscall.CreateFile(syscall.StringToUTF16Ptr(path), + syscall.FILE_LIST_DIRECTORY, + syscall.FILE_SHARE_READ|syscall.FILE_SHARE_WRITE|syscall.FILE_SHARE_DELETE, + nil, syscall.OPEN_EXISTING, + syscall.FILE_FLAG_BACKUP_SEMANTICS|syscall.FILE_FLAG_OVERLAPPED, 0) + if e != nil { + return nil, os.NewSyscallError("CreateFile", e) + } + var fi syscall.ByHandleFileInformation + if e = syscall.GetFileInformationByHandle(h, &fi); e != nil { + syscall.CloseHandle(h) + return nil, os.NewSyscallError("GetFileInformationByHandle", e) + } + ino = &inode{ + handle: h, + volume: fi.VolumeSerialNumber, + index: uint64(fi.FileIndexHigh)<<32 | uint64(fi.FileIndexLow), + } + return ino, nil +} + +// Must run within the I/O thread. +func (m watchMap) get(ino *inode) *watch { + if i := m[ino.volume]; i != nil { + return i[ino.index] + } + return nil +} + +// Must run within the I/O thread. +func (m watchMap) set(ino *inode, watch *watch) { + i := m[ino.volume] + if i == nil { + i = make(indexMap) + m[ino.volume] = i + } + i[ino.index] = watch +} + +// Must run within the I/O thread. +func (w *Watcher) addWatch(pathname string, flags uint64) error { + dir, err := getDir(pathname) + if err != nil { + return err + } + if flags&sysFSONLYDIR != 0 && pathname != dir { + return nil + } + ino, err := getIno(dir) + if err != nil { + return err + } + w.mu.Lock() + watchEntry := w.watches.get(ino) + w.mu.Unlock() + if watchEntry == nil { + if _, e := syscall.CreateIoCompletionPort(ino.handle, w.port, 0, 0); e != nil { + syscall.CloseHandle(ino.handle) + return os.NewSyscallError("CreateIoCompletionPort", e) + } + watchEntry = &watch{ + ino: ino, + path: dir, + names: make(map[string]uint64), + } + w.mu.Lock() + w.watches.set(ino, watchEntry) + w.mu.Unlock() + flags |= provisional + } else { + syscall.CloseHandle(ino.handle) + } + if pathname == dir { + watchEntry.mask |= flags + } else { + watchEntry.names[filepath.Base(pathname)] |= flags + } + if err = w.startRead(watchEntry); err != nil { + return err + } + if pathname == dir { + watchEntry.mask &= ^provisional + } else { + watchEntry.names[filepath.Base(pathname)] &= ^provisional + } + return nil +} + +// Must run within the I/O thread. +func (w *Watcher) remWatch(pathname string) error { + dir, err := getDir(pathname) + if err != nil { + return err + } + ino, err := getIno(dir) + if err != nil { + return err + } + w.mu.Lock() + watch := w.watches.get(ino) + w.mu.Unlock() + if watch == nil { + return fmt.Errorf("can't remove non-existent watch for: %s", pathname) + } + if pathname == dir { + w.sendEvent(watch.path, watch.mask&sysFSIGNORED) + watch.mask = 0 + } else { + name := filepath.Base(pathname) + w.sendEvent(filepath.Join(watch.path, name), watch.names[name]&sysFSIGNORED) + delete(watch.names, name) + } + return w.startRead(watch) +} + +// Must run within the I/O thread. +func (w *Watcher) deleteWatch(watch *watch) { + for name, mask := range watch.names { + if mask&provisional == 0 { + w.sendEvent(filepath.Join(watch.path, name), mask&sysFSIGNORED) + } + delete(watch.names, name) + } + if watch.mask != 0 { + if watch.mask&provisional == 0 { + w.sendEvent(watch.path, watch.mask&sysFSIGNORED) + } + watch.mask = 0 + } +} + +// Must run within the I/O thread. +func (w *Watcher) startRead(watch *watch) error { + if e := syscall.CancelIo(watch.ino.handle); e != nil { + w.Errors <- os.NewSyscallError("CancelIo", e) + w.deleteWatch(watch) + } + mask := toWindowsFlags(watch.mask) + for _, m := range watch.names { + mask |= toWindowsFlags(m) + } + if mask == 0 { + if e := syscall.CloseHandle(watch.ino.handle); e != nil { + w.Errors <- os.NewSyscallError("CloseHandle", e) + } + w.mu.Lock() + delete(w.watches[watch.ino.volume], watch.ino.index) + w.mu.Unlock() + return nil + } + e := syscall.ReadDirectoryChanges(watch.ino.handle, &watch.buf[0], + uint32(unsafe.Sizeof(watch.buf)), false, mask, nil, &watch.ov, 0) + if e != nil { + err := os.NewSyscallError("ReadDirectoryChanges", e) + if e == syscall.ERROR_ACCESS_DENIED && watch.mask&provisional == 0 { + // Watched directory was probably removed + if w.sendEvent(watch.path, watch.mask&sysFSDELETESELF) { + if watch.mask&sysFSONESHOT != 0 { + watch.mask = 0 + } + } + err = nil + } + w.deleteWatch(watch) + w.startRead(watch) + return err + } + return nil +} + +// readEvents reads from the I/O completion port, converts the +// received events into Event objects and sends them via the Events channel. +// Entry point to the I/O thread. +func (w *Watcher) readEvents() { + var ( + n, key uint32 + ov *syscall.Overlapped + ) + runtime.LockOSThread() + + for { + e := syscall.GetQueuedCompletionStatus(w.port, &n, &key, &ov, syscall.INFINITE) + watch := (*watch)(unsafe.Pointer(ov)) + + if watch == nil { + select { + case ch := <-w.quit: + w.mu.Lock() + var indexes []indexMap + for _, index := range w.watches { + indexes = append(indexes, index) + } + w.mu.Unlock() + for _, index := range indexes { + for _, watch := range index { + w.deleteWatch(watch) + w.startRead(watch) + } + } + var err error + if e := syscall.CloseHandle(w.port); e != nil { + err = os.NewSyscallError("CloseHandle", e) + } + close(w.Events) + close(w.Errors) + ch <- err + return + case in := <-w.input: + switch in.op { + case opAddWatch: + in.reply <- w.addWatch(in.path, uint64(in.flags)) + case opRemoveWatch: + in.reply <- w.remWatch(in.path) + } + default: + } + continue + } + + switch e { + case syscall.ERROR_MORE_DATA: + if watch == nil { + w.Errors <- errors.New("ERROR_MORE_DATA has unexpectedly null lpOverlapped buffer") + } else { + // The i/o succeeded but the buffer is full. + // In theory we should be building up a full packet. + // In practice we can get away with just carrying on. + n = uint32(unsafe.Sizeof(watch.buf)) + } + case syscall.ERROR_ACCESS_DENIED: + // Watched directory was probably removed + w.sendEvent(watch.path, watch.mask&sysFSDELETESELF) + w.deleteWatch(watch) + w.startRead(watch) + continue + case syscall.ERROR_OPERATION_ABORTED: + // CancelIo was called on this handle + continue + default: + w.Errors <- os.NewSyscallError("GetQueuedCompletionPort", e) + continue + case nil: + } + + var offset uint32 + for { + if n == 0 { + w.Events <- newEvent("", sysFSQOVERFLOW) + w.Errors <- errors.New("short read in readEvents()") + break + } + + // Point "raw" to the event in the buffer + raw := (*syscall.FileNotifyInformation)(unsafe.Pointer(&watch.buf[offset])) + buf := (*[syscall.MAX_PATH]uint16)(unsafe.Pointer(&raw.FileName)) + name := syscall.UTF16ToString(buf[:raw.FileNameLength/2]) + fullname := filepath.Join(watch.path, name) + + var mask uint64 + switch raw.Action { + case syscall.FILE_ACTION_REMOVED: + mask = sysFSDELETESELF + case syscall.FILE_ACTION_MODIFIED: + mask = sysFSMODIFY + case syscall.FILE_ACTION_RENAMED_OLD_NAME: + watch.rename = name + case syscall.FILE_ACTION_RENAMED_NEW_NAME: + if watch.names[watch.rename] != 0 { + watch.names[name] |= watch.names[watch.rename] + delete(watch.names, watch.rename) + mask = sysFSMOVESELF + } + } + + sendNameEvent := func() { + if w.sendEvent(fullname, watch.names[name]&mask) { + if watch.names[name]&sysFSONESHOT != 0 { + delete(watch.names, name) + } + } + } + if raw.Action != syscall.FILE_ACTION_RENAMED_NEW_NAME { + sendNameEvent() + } + if raw.Action == syscall.FILE_ACTION_REMOVED { + w.sendEvent(fullname, watch.names[name]&sysFSIGNORED) + delete(watch.names, name) + } + if w.sendEvent(fullname, watch.mask&toFSnotifyFlags(raw.Action)) { + if watch.mask&sysFSONESHOT != 0 { + watch.mask = 0 + } + } + if raw.Action == syscall.FILE_ACTION_RENAMED_NEW_NAME { + fullname = filepath.Join(watch.path, watch.rename) + sendNameEvent() + } + + // Move to the next event in the buffer + if raw.NextEntryOffset == 0 { + break + } + offset += raw.NextEntryOffset + + // Error! + if offset >= n { + w.Errors <- errors.New("Windows system assumed buffer larger than it is, events have likely been missed.") + break + } + } + + if err := w.startRead(watch); err != nil { + w.Errors <- err + } + } +} + +func (w *Watcher) sendEvent(name string, mask uint64) bool { + if mask == 0 { + return false + } + event := newEvent(name, uint32(mask)) + select { + case ch := <-w.quit: + w.quit <- ch + case w.Events <- event: + } + return true +} + +func toWindowsFlags(mask uint64) uint32 { + var m uint32 + if mask&sysFSACCESS != 0 { + m |= syscall.FILE_NOTIFY_CHANGE_LAST_ACCESS + } + if mask&sysFSMODIFY != 0 { + m |= syscall.FILE_NOTIFY_CHANGE_LAST_WRITE + } + if mask&sysFSATTRIB != 0 { + m |= syscall.FILE_NOTIFY_CHANGE_ATTRIBUTES + } + if mask&(sysFSMOVE|sysFSCREATE|sysFSDELETE) != 0 { + m |= syscall.FILE_NOTIFY_CHANGE_FILE_NAME | syscall.FILE_NOTIFY_CHANGE_DIR_NAME + } + return m +} + +func toFSnotifyFlags(action uint32) uint64 { + switch action { + case syscall.FILE_ACTION_ADDED: + return sysFSCREATE + case syscall.FILE_ACTION_REMOVED: + return sysFSDELETE + case syscall.FILE_ACTION_MODIFIED: + return sysFSMODIFY + case syscall.FILE_ACTION_RENAMED_OLD_NAME: + return sysFSMOVEDFROM + case syscall.FILE_ACTION_RENAMED_NEW_NAME: + return sysFSMOVEDTO + } + return 0 +} diff --git a/vendor/github.com/fzipp/gocyclo/CHANGELOG.md b/vendor/github.com/fzipp/gocyclo/CHANGELOG.md new file mode 100644 index 000000000..3959a62a5 --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/CHANGELOG.md @@ -0,0 +1,38 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [0.3.1] +### Added +- Test coverage + +### Fixed +- Fix cyclomatic complexity for function literals (base complexity of 1 was missing) + +## [0.3.0] - 2020-10-17 +### Added +- New `-avg-short` and `-total-short` options for printing average and total cyclomatic complexities without label +- Export the `AnalyzeASTFile` function in package API +- Doc comments for exported functions and types + +### Fixed +- Ignore `default` cases + +## [0.2.0] - 2020-10-17 +### Added +- Support for gocyclo as a package +- Support for ignoring of individual functions via a new `gocyclo:ignore` directive +- New `-total` option to compute total cyclomatic complexity +- New `-ignore` option to ignore files matching a regular expression +- Analysis of function literals at declaration level + +### Changed +- Breaking: installation changed to `go get github.com/fzipp/gocyclo/cmd/gocyclo` + +## [0.1.0] - 2020-10-17 + +### Added +- `go.mod` file; beginning of versioning + diff --git a/vendor/github.com/fzipp/gocyclo/CONTRIBUTORS b/vendor/github.com/fzipp/gocyclo/CONTRIBUTORS new file mode 100644 index 000000000..1c09f1a06 --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/CONTRIBUTORS @@ -0,0 +1,7 @@ +# Names should be added to this file like so: +# Name + +# Please keep the list sorted. + +Frederik Zipp +Harshavardhana diff --git a/vendor/github.com/fzipp/gocyclo/LICENSE b/vendor/github.com/fzipp/gocyclo/LICENSE new file mode 100644 index 000000000..45f88d6cb --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013 Frederik Zipp. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of the copyright owner nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/fzipp/gocyclo/README.md b/vendor/github.com/fzipp/gocyclo/README.md new file mode 100644 index 000000000..f1056934c --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/README.md @@ -0,0 +1,107 @@ +# gocyclo + +[![PkgGoDev](https://pkg.go.dev/badge/github.com/fzipp/gocyclo)](https://pkg.go.dev/github.com/fzipp/gocyclo) +[![Go Report Card](https://goreportcard.com/badge/github.com/fzipp/gocyclo)](https://goreportcard.com/report/github.com/fzipp/gocyclo) + +Gocyclo calculates +[cyclomatic complexities](https://en.wikipedia.org/wiki/Cyclomatic_complexity) +of functions in Go source code. + +Cyclomatic complexity is a +[code quality metric](https://en.wikipedia.org/wiki/Software_metric) +which can be used to identify code that needs refactoring. +It measures the number of linearly independent paths through a function's +source code. + +The cyclomatic complexity of a function is calculated according to the +following rules: + +``` + 1 is the base complexity of a function ++1 for each 'if', 'for', 'case', '&&' or '||' +``` + +A function with a higher cyclomatic complexity requires more test cases to +cover all possible paths and is potentially harder to understand. The +complexity can be reduced by applying common refactoring techniques that lead +to smaller functions. + +## Installation + +To install the `gocyclo` command, run + +``` +$ go get github.com/fzipp/gocyclo/cmd/gocyclo +``` + +and put the resulting binary in one of your PATH directories if +`$GOPATH/bin` isn't already in your PATH. + +## Usage + +``` +Calculate cyclomatic complexities of Go functions. +Usage: + gocyclo [flags] ... + +Flags: + -over N show functions with complexity > N only and + return exit code 1 if the set is non-empty + -top N show the top N most complex functions only + -avg, -avg-short show the average complexity over all functions; + the short option prints the value without a label + -total, -total-short show the total complexity for all functions; + the short option prints the value without a label + -ignore REGEX exclude files matching the given regular expression + +The output fields for each line are: + +``` + +## Examples + +``` +$ gocyclo . +$ gocyclo main.go +$ gocyclo -top 10 src/ +$ gocyclo -over 25 docker +$ gocyclo -avg . +$ gocyclo -top 20 -ignore "_test|Godeps|vendor/" . +$ gocyclo -over 3 -avg gocyclo/ +``` + +Example output: + +``` +9 gocyclo (*complexityVisitor).Visit complexity.go:30:1 +8 main main cmd/gocyclo/main.go:53:1 +7 gocyclo (*fileAnalyzer).analyzeDecl analyze.go:96:1 +4 gocyclo Analyze analyze.go:24:1 +4 gocyclo parseDirectives directives.go:27:1 +4 gocyclo (Stats).SortAndFilter stats.go:52:1 +Average: 2.72 +``` + +Note that the average is calculated over all analyzed functions, +not just the printed ones. + +### Ignoring individual functions + +Individual functions can be ignored with a `gocyclo:ignore` directive: + +``` +//gocyclo:ignore +func f1() { + // ... +} + +//gocyclo:ignore +var f2 = func() { + // ... +} +``` + +## License + +This project is free and open source software licensed under the +[BSD 3-Clause License](LICENSE). diff --git a/vendor/github.com/fzipp/gocyclo/analyze.go b/vendor/github.com/fzipp/gocyclo/analyze.go new file mode 100644 index 000000000..c053e83e6 --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/analyze.go @@ -0,0 +1,151 @@ +// Copyright 2020 Frederik Zipp. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gocyclo + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "log" + "os" + "path/filepath" + "regexp" + "strings" +) + +// Analyze calculates the cyclomatic complexities of the functions and methods +// in the Go source code files in the given paths. If a path is a directory +// all Go files under that directory are analyzed recursively. +// Files with paths matching the 'ignore' regular expressions are skipped. +// The 'ignore' parameter can be nil, meaning that no files are skipped. +func Analyze(paths []string, ignore *regexp.Regexp) Stats { + var stats Stats + for _, path := range paths { + info, err := os.Stat(path) + if err != nil { + log.Printf("could not get file info for path %q: %s\n", path, err) + continue + } + if info.IsDir() { + stats = analyzeDir(path, ignore, stats) + } else { + stats = analyzeFile(path, ignore, stats) + } + } + return stats +} + +func analyzeDir(dirname string, ignore *regexp.Regexp, stats Stats) Stats { + filepath.Walk(dirname, func(path string, info os.FileInfo, err error) error { + if err == nil && isGoFile(info) { + stats = analyzeFile(path, ignore, stats) + } + return err + }) + return stats +} + +func isGoFile(f os.FileInfo) bool { + return !f.IsDir() && strings.HasSuffix(f.Name(), ".go") +} + +func analyzeFile(path string, ignore *regexp.Regexp, stats Stats) Stats { + if isIgnored(path, ignore) { + return stats + } + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, path, nil, parser.ParseComments) + if err != nil { + log.Fatal(err) + } + return AnalyzeASTFile(f, fset, stats) +} + +func isIgnored(path string, ignore *regexp.Regexp) bool { + return ignore != nil && ignore.MatchString(path) +} + +// AnalyzeASTFile calculates the cyclomatic complexities of the functions +// and methods in the abstract syntax tree (AST) of a parsed Go file and +// appends the results to the given Stats slice. +func AnalyzeASTFile(f *ast.File, fs *token.FileSet, s Stats) Stats { + analyzer := &fileAnalyzer{ + file: f, + fileSet: fs, + stats: s, + } + return analyzer.analyze() +} + +type fileAnalyzer struct { + file *ast.File + fileSet *token.FileSet + stats Stats +} + +func (a *fileAnalyzer) analyze() Stats { + for _, decl := range a.file.Decls { + a.analyzeDecl(decl) + } + return a.stats +} + +func (a *fileAnalyzer) analyzeDecl(d ast.Decl) { + switch decl := d.(type) { + case *ast.FuncDecl: + a.addStatIfNotIgnored(decl, funcName(decl), decl.Doc) + case *ast.GenDecl: + for _, spec := range decl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + for _, value := range valueSpec.Values { + funcLit, ok := value.(*ast.FuncLit) + if !ok { + continue + } + a.addStatIfNotIgnored(funcLit, valueSpec.Names[0].Name, decl.Doc) + } + } + } +} + +func (a *fileAnalyzer) addStatIfNotIgnored(node ast.Node, funcName string, doc *ast.CommentGroup) { + if parseDirectives(doc).HasIgnore() { + return + } + a.stats = append(a.stats, Stat{ + PkgName: a.file.Name.Name, + FuncName: funcName, + Complexity: Complexity(node), + Pos: a.fileSet.Position(node.Pos()), + }) +} + +// funcName returns the name representation of a function or method: +// "(Type).Name" for methods or simply "Name" for functions. +func funcName(fn *ast.FuncDecl) string { + if fn.Recv != nil { + if fn.Recv.NumFields() > 0 { + typ := fn.Recv.List[0].Type + return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) + } + } + return fn.Name.Name +} + +// recvString returns a string representation of recv of the +// form "T", "*T", or "BADRECV" (if not a proper receiver type). +func recvString(recv ast.Expr) string { + switch t := recv.(type) { + case *ast.Ident: + return t.Name + case *ast.StarExpr: + return "*" + recvString(t.X) + } + return "BADRECV" +} diff --git a/vendor/github.com/fzipp/gocyclo/complexity.go b/vendor/github.com/fzipp/gocyclo/complexity.go new file mode 100644 index 000000000..65f5077e8 --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/complexity.go @@ -0,0 +1,48 @@ +// Copyright 2020 Frederik Zipp. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gocyclo calculates the cyclomatic complexities of functions and +// methods in Go source code. +package gocyclo + +import ( + "go/ast" + "go/token" +) + +// Complexity calculates the cyclomatic complexity of a function. +// The 'fn' node is either a *ast.FuncDecl or a *ast.FuncLit. +func Complexity(fn ast.Node) int { + v := complexityVisitor{ + complexity: 1, + } + ast.Walk(&v, fn) + return v.complexity +} + +type complexityVisitor struct { + // complexity is the cyclomatic complexity + complexity int +} + +// Visit implements the ast.Visitor interface. +func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + case *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt: + v.complexity++ + case *ast.CaseClause: + if n.List != nil { // ignore default case + v.complexity++ + } + case *ast.CommClause: + if n.Comm != nil { // ignore default case + v.complexity++ + } + case *ast.BinaryExpr: + if n.Op == token.LAND || n.Op == token.LOR { + v.complexity++ + } + } + return v +} diff --git a/vendor/github.com/fzipp/gocyclo/directives.go b/vendor/github.com/fzipp/gocyclo/directives.go new file mode 100644 index 000000000..b4ee3c448 --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/directives.go @@ -0,0 +1,39 @@ +// Copyright 2020 Frederik Zipp. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gocyclo + +import ( + "go/ast" + "strings" +) + +type directives []string + +func (ds directives) HasIgnore() bool { + return ds.isPresent("ignore") +} + +func (ds directives) isPresent(name string) bool { + for _, d := range ds { + if d == name { + return true + } + } + return false +} + +func parseDirectives(doc *ast.CommentGroup) directives { + if doc == nil { + return directives{} + } + const prefix = "//gocyclo:" + var ds directives + for _, comment := range doc.List { + if strings.HasPrefix(comment.Text, prefix) { + ds = append(ds, strings.TrimSpace(strings.TrimPrefix(comment.Text, prefix))) + } + } + return ds +} diff --git a/vendor/github.com/fzipp/gocyclo/go.mod b/vendor/github.com/fzipp/gocyclo/go.mod new file mode 100644 index 000000000..c80982786 --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/go.mod @@ -0,0 +1,3 @@ +module github.com/fzipp/gocyclo + +go 1.15 diff --git a/vendor/github.com/fzipp/gocyclo/stats.go b/vendor/github.com/fzipp/gocyclo/stats.go new file mode 100644 index 000000000..90f5eefc2 --- /dev/null +++ b/vendor/github.com/fzipp/gocyclo/stats.go @@ -0,0 +1,73 @@ +// Copyright 2020 Frederik Zipp. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gocyclo + +import ( + "fmt" + "go/token" + "sort" +) + +// Stat holds the cyclomatic complexity of a function, along with its package +// and and function name and its position in the source code. +type Stat struct { + PkgName string + FuncName string + Complexity int + Pos token.Position +} + +// String formats the cyclomatic complexity information of a function in +// the following format: " " +func (s Stat) String() string { + return fmt.Sprintf("%d %s %s %s", s.Complexity, s.PkgName, s.FuncName, s.Pos) +} + +// Stats hold the cyclomatic complexities of many functions. +type Stats []Stat + +// AverageComplexity calculates the average cyclomatic complexity of the +// cyclomatic complexities in s. +func (s Stats) AverageComplexity() float64 { + return float64(s.TotalComplexity()) / float64(len(s)) +} + +// TotalComplexity calculates the total sum of all cyclomatic +// complexities in s. +func (s Stats) TotalComplexity() uint64 { + total := uint64(0) + for _, stat := range s { + total += uint64(stat.Complexity) + } + return total +} + +// SortAndFilter sorts the cyclomatic complexities in s in descending order +// and returns a slice of s limited to the 'top' N entries with a cyclomatic +// complexity greater than 'over'. If 'top' is negative, i.e. -1, it does +// not limit the result. If 'over' is <= 0 it does not limit the result either, +// because a function has a base cyclomatic complexity of at least 1. +func (s Stats) SortAndFilter(top, over int) Stats { + result := make(Stats, len(s)) + copy(result, s) + sort.Sort(byComplexityDesc(result)) + for i, stat := range result { + if i == top { + return result[:i] + } + if stat.Complexity <= over { + return result[:i] + } + } + return result +} + +type byComplexityDesc Stats + +func (s byComplexityDesc) Len() int { return len(s) } +func (s byComplexityDesc) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byComplexityDesc) Less(i, j int) bool { + return s[i].Complexity >= s[j].Complexity +} diff --git a/vendor/github.com/go-critic/go-critic/LICENSE b/vendor/github.com/go-critic/go-critic/LICENSE new file mode 100644 index 000000000..b944b4bbd --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/LICENSE @@ -0,0 +1,22 @@ +MIT License + +Copyright (c) 2018-2019 Alekseev Artem +Copyright (c) 2018-2019 Ravil Bikbulatov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go b/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go new file mode 100644 index 000000000..a9324dd02 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/appendAssign_checker.go @@ -0,0 +1,102 @@ +package checkers + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/astp" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "appendAssign" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects suspicious append result assignments" + info.Before = ` +p.positives = append(p.negatives, x) +p.negatives = append(p.negatives, y)` + info.After = ` +p.positives = append(p.positives, x) +p.negatives = append(p.negatives, y)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&appendAssignChecker{ctx: ctx}), nil + }) +} + +type appendAssignChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *appendAssignChecker) VisitStmt(stmt ast.Stmt) { + assign, ok := stmt.(*ast.AssignStmt) + if !ok || (assign.Tok != token.ASSIGN && assign.Tok != token.DEFINE) || len(assign.Lhs) != len(assign.Rhs) { + return + } + for i, rhs := range assign.Rhs { + call, ok := rhs.(*ast.CallExpr) + if !ok || qualifiedName(call.Fun) != "append" { + continue + } + c.checkAppend(assign.Lhs[i], call) + } +} + +func (c *appendAssignChecker) checkAppend(x ast.Expr, call *ast.CallExpr) { + if call.Ellipsis != token.NoPos { + // Try to detect `xs = append(ys, xs...)` idiom. + for _, arg := range call.Args[1:] { + y := arg + if arg, ok := arg.(*ast.SliceExpr); ok { + y = arg.X + } + if astequal.Expr(x, y) { + return + } + } + } + + switch x := x.(type) { + case *ast.Ident: + if x.Name == "_" { + return // Don't check assignments to blank ident + } + case *ast.IndexExpr: + if !astp.IsIndexExpr(call.Args[0]) { + // Most likely `m[k] = append(x, ...)` + // pattern, where x was retrieved by m[k] before. + // + // TODO: it's possible to record such map/slice reads + // and check whether it was done before this call. + // But for now, treat it like x belongs to m[k]. + return + } + } + + switch y := call.Args[0].(type) { + case *ast.SliceExpr: + if _, ok := c.ctx.TypeOf(y.X).(*types.Array); ok { + // Arrays are frequently used as scratch storages. + return + } + c.matchSlices(call, x, y.X) + case *ast.IndexExpr, *ast.Ident, *ast.SelectorExpr: + c.matchSlices(call, x, y) + } +} + +func (c *appendAssignChecker) matchSlices(cause ast.Node, x, y ast.Expr) { + if !astequal.Expr(x, astutil.Unparen(y)) { + c.warn(cause) + } +} + +func (c *appendAssignChecker) warn(cause ast.Node) { + c.ctx.Warn(cause, "append result not assigned to the same slice") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go b/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go new file mode 100644 index 000000000..03662fc21 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/appendCombine_checker.go @@ -0,0 +1,102 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "appendCombine" + info.Tags = []string{"performance"} + info.Summary = "Detects `append` chains to the same slice that can be done in a single `append` call" + info.Before = ` +xs = append(xs, 1) +xs = append(xs, 2)` + info.After = `xs = append(xs, 1, 2)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmtList(&appendCombineChecker{ctx: ctx}), nil + }) +} + +type appendCombineChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *appendCombineChecker) VisitStmtList(list []ast.Stmt) { + var cause ast.Node // First append + var slice ast.Expr // Slice being appended to + chain := 0 // How much appends in a row we've seen + + // Break the chain. + // If enough appends are in chain, print warning. + flush := func() { + if chain > 1 { + c.warn(cause, chain) + } + chain = 0 + slice = nil + } + + for _, stmt := range list { + call := c.matchAppend(stmt, slice) + if call == nil { + flush() + continue + } + + if chain == 0 { + // First append in a chain. + chain = 1 + slice = call.Args[0] + cause = stmt + } else { + chain++ + } + } + + // Required for printing chains that consist of trailing + // statements from the list. + flush() +} + +func (c *appendCombineChecker) matchAppend(stmt ast.Stmt, slice ast.Expr) *ast.CallExpr { + // Seeking for: + // slice = append(slice, xs...) + // xs are 0-N append arguments, but not variadic argument, + // because it makes append combining impossible. + + assign := astcast.ToAssignStmt(stmt) + if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { + return nil + } + + call, ok := assign.Rhs[0].(*ast.CallExpr) + { + cond := ok && + qualifiedName(call.Fun) == "append" && + call.Ellipsis == token.NoPos && + astequal.Expr(assign.Lhs[0], call.Args[0]) + if !cond { + return nil + } + } + + // Check that current append slice match previous append slice. + // Otherwise we should break the chain. + if slice == nil || astequal.Expr(slice, call.Args[0]) { + return call + } + return nil +} + +func (c *appendCombineChecker) warn(cause ast.Node, chain int) { + c.ctx.Warn(cause, "can combine chain of %d appends into one", chain) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/argOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/argOrder_checker.go new file mode 100644 index 000000000..98cabc54f --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/argOrder_checker.go @@ -0,0 +1,97 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astp" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "argOrder" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects suspicious arguments order" + info.Before = `strings.HasPrefix("#", userpass)` + info.After = `strings.HasPrefix(userpass, "#")` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&argOrderChecker{ctx: ctx}), nil + }) +} + +type argOrderChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *argOrderChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + + // For now only handle functions of 2 args. + // TODO(quasilyte): generalize the algorithm and add more patterns. + if len(call.Args) != 2 { + return + } + + calledExpr := astcast.ToSelectorExpr(call.Fun) + obj, ok := c.ctx.TypesInfo.ObjectOf(astcast.ToIdent(calledExpr.X)).(*types.PkgName) + if !ok || !isStdlibPkg(obj.Imported()) { + return + } + + x := call.Args[0] + y := call.Args[1] + switch calledExpr.Sel.Name { + case "HasPrefix", "HasSuffix", "Contains", "TrimPrefix", "TrimSuffix", "Split": + if obj.Name() != "bytes" && obj.Name() != "strings" { + return + } + if c.isConstLiteral(x) && !c.isConstLiteral(y) { + c.warn(call) + } + } +} + +func (c *argOrderChecker) isConstLiteral(x ast.Expr) bool { + // Also permit byte slices. + switch x := x.(type) { + case *ast.BasicLit: + return true + + case *ast.CallExpr: + // Handle `[]byte("abc")` as well. + if len(x.Args) != 1 || !astp.IsBasicLit(x.Args[0]) { + return false + } + typ, ok := c.ctx.TypeOf(x.Fun).(*types.Slice) + return ok && typep.HasUint8Kind(typ.Elem()) + + case *ast.CompositeLit: + // Check if it's a const byte slice. + typ, ok := c.ctx.TypeOf(x).(*types.Slice) + if !ok || !typep.HasUint8Kind(typ.Elem()) { + return false + } + for _, elt := range x.Elts { + if !astp.IsBasicLit(elt) { + return false + } + } + return true + + default: + return false + } +} + +func (c *argOrderChecker) warn(call *ast.CallExpr) { + fixed := astcopy.CallExpr(call) + fixed.Args[0], fixed.Args[1] = fixed.Args[1], fixed.Args[0] + c.ctx.Warn(call, "probably meant `%s`", fixed) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/assignOp_checker.go b/vendor/github.com/go-critic/go-critic/checkers/assignOp_checker.go new file mode 100644 index 000000000..d0bf64417 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/assignOp_checker.go @@ -0,0 +1,102 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "assignOp" + info.Tags = []string{"style"} + info.Summary = "Detects assignments that can be simplified by using assignment operators" + info.Before = `x = x * 2` + info.After = `x *= 2` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&assignOpChecker{ctx: ctx}), nil + }) +} + +type assignOpChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *assignOpChecker) VisitStmt(stmt ast.Stmt) { + assign, ok := stmt.(*ast.AssignStmt) + cond := ok && + assign.Tok == token.ASSIGN && + len(assign.Lhs) == 1 && + len(assign.Rhs) == 1 && + typep.SideEffectFree(c.ctx.TypesInfo, assign.Lhs[0]) + if !cond { + return + } + + // TODO(quasilyte): can take commutativity into account. + expr, ok := assign.Rhs[0].(*ast.BinaryExpr) + if !ok || !astequal.Expr(assign.Lhs[0], expr.X) { + return + } + + // TODO(quasilyte): perform unparen? + switch expr.Op { + case token.MUL: + c.warn(assign, token.MUL_ASSIGN, expr.Y) + case token.QUO: + c.warn(assign, token.QUO_ASSIGN, expr.Y) + case token.REM: + c.warn(assign, token.REM_ASSIGN, expr.Y) + case token.ADD: + c.warn(assign, token.ADD_ASSIGN, expr.Y) + case token.SUB: + c.warn(assign, token.SUB_ASSIGN, expr.Y) + case token.AND: + c.warn(assign, token.AND_ASSIGN, expr.Y) + case token.OR: + c.warn(assign, token.OR_ASSIGN, expr.Y) + case token.XOR: + c.warn(assign, token.XOR_ASSIGN, expr.Y) + case token.SHL: + c.warn(assign, token.SHL_ASSIGN, expr.Y) + case token.SHR: + c.warn(assign, token.SHR_ASSIGN, expr.Y) + case token.AND_NOT: + c.warn(assign, token.AND_NOT_ASSIGN, expr.Y) + } +} + +func (c *assignOpChecker) warn(cause *ast.AssignStmt, op token.Token, rhs ast.Expr) { + suggestion := c.simplify(cause, op, rhs) + c.ctx.Warn(cause, "replace `%s` with `%s`", cause, suggestion) +} + +func (c *assignOpChecker) simplify(cause *ast.AssignStmt, op token.Token, rhs ast.Expr) ast.Stmt { + if lit, ok := rhs.(*ast.BasicLit); ok && lit.Kind == token.INT && lit.Value == "1" { + switch op { + case token.ADD_ASSIGN: + return &ast.IncDecStmt{ + X: cause.Lhs[0], + TokPos: cause.TokPos, + Tok: token.INC, + } + case token.SUB_ASSIGN: + return &ast.IncDecStmt{ + X: cause.Lhs[0], + TokPos: cause.TokPos, + Tok: token.DEC, + } + } + } + suggestion := astcopy.AssignStmt(cause) + suggestion.Tok = op + suggestion.Rhs[0] = rhs + return suggestion +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/badCall_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badCall_checker.go new file mode 100644 index 000000000..7435ee57b --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/badCall_checker.go @@ -0,0 +1,63 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astcopy" +) + +func init() { + var info linter.CheckerInfo + info.Name = "badCall" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects suspicious function calls" + info.Before = `strings.Replace(s, from, to, 0)` + info.After = `strings.Replace(s, from, to, -1)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&badCallChecker{ctx: ctx}), nil + }) +} + +type badCallChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *badCallChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + if len(call.Args) == 0 { + return + } + + // TODO(quasilyte): handle methods. + + switch qualifiedName(call.Fun) { + case "strings.Replace", "bytes.Replace": + if n := astcast.ToBasicLit(call.Args[3]); n.Value == "0" { + c.warnBadArg(n, "-1") + } + case "strings.SplitN", "bytes.SplitN": + if n := astcast.ToBasicLit(call.Args[2]); n.Value == "0" { + c.warnBadArg(n, "-1") + } + case "append": + if len(call.Args) == 1 { + c.warnAppend(call) + } + } +} + +func (c *badCallChecker) warnBadArg(badArg *ast.BasicLit, correction string) { + goodArg := astcopy.BasicLit(badArg) + goodArg.Value = correction + c.ctx.Warn(badArg, "suspicious arg %s, probably meant %s", + badArg, goodArg) +} + +func (c *badCallChecker) warnAppend(call *ast.CallExpr) { + c.ctx.Warn(call, "no-op append call, probably missing arguments") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go new file mode 100644 index 000000000..149f0ac88 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/badCond_checker.go @@ -0,0 +1,147 @@ +package checkers + +import ( + "go/ast" + "go/constant" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "badCond" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects suspicious condition expressions" + info.Before = ` +for i := 0; i > n; i++ { + xs[i] = 0 +}` + info.After = ` +for i := 0; i < n; i++ { + xs[i] = 0 +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForFuncDecl(&badCondChecker{ctx: ctx}), nil + }) +} + +type badCondChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *badCondChecker) VisitFuncDecl(decl *ast.FuncDecl) { + ast.Inspect(decl.Body, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.ForStmt: + c.checkForStmt(n) + case ast.Expr: + c.checkExpr(n) + } + return true + }) +} + +func (c *badCondChecker) checkExpr(expr ast.Expr) { + // TODO(quasilyte): recognize more patterns. + + cond := astcast.ToBinaryExpr(expr) + lhs := astcast.ToBinaryExpr(astutil.Unparen(cond.X)) + rhs := astcast.ToBinaryExpr(astutil.Unparen(cond.Y)) + + if cond.Op != token.LAND { + return + } + + // Notes: + // `x != a || x != b` handled by go vet. + + // Pattern 1. + // `x < a && x > b`; Where `a` is less than `b`. + if c.lessAndGreater(lhs, rhs) { + c.warnCond(cond, "always false") + return + } + + // Pattern 2. + // `x == a && x == b` + // + // Valid when `b == a` is intended, but still reported. + // We can disable "just suspicious" warnings by default + // is users are upset with the current behavior. + if c.equalToBoth(lhs, rhs) { + c.warnCond(cond, "suspicious") + return + } +} + +func (c *badCondChecker) equalToBoth(lhs, rhs *ast.BinaryExpr) bool { + return lhs.Op == token.EQL && rhs.Op == token.EQL && + astequal.Expr(lhs.X, rhs.X) +} + +func (c *badCondChecker) lessAndGreater(lhs, rhs *ast.BinaryExpr) bool { + if lhs.Op != token.LSS || rhs.Op != token.GTR { + return false + } + if !astequal.Expr(lhs.X, rhs.X) { + return false + } + a := c.ctx.TypesInfo.Types[lhs.Y].Value + b := c.ctx.TypesInfo.Types[rhs.Y].Value + return a != nil && b != nil && constant.Compare(a, token.LSS, b) +} + +func (c *badCondChecker) checkForStmt(stmt *ast.ForStmt) { + // TODO(quasilyte): handle other kinds of bad conditionals. + + init := astcast.ToAssignStmt(stmt.Init) + if init.Tok != token.DEFINE || len(init.Lhs) != 1 || len(init.Rhs) != 1 { + return + } + if astcast.ToBasicLit(init.Rhs[0]).Value != "0" { + return + } + + iter := astcast.ToIdent(init.Lhs[0]) + cond := astcast.ToBinaryExpr(stmt.Cond) + if cond.Op != token.GTR || !astequal.Expr(iter, cond.X) { + return + } + if !typep.SideEffectFree(c.ctx.TypesInfo, cond.Y) { + return + } + + post := astcast.ToIncDecStmt(stmt.Post) + if post.Tok != token.INC || !astequal.Expr(iter, post.X) { + return + } + + mutated := lintutil.CouldBeMutated(c.ctx.TypesInfo, stmt.Body, cond.Y) || + lintutil.CouldBeMutated(c.ctx.TypesInfo, stmt.Body, iter) + if mutated { + return + } + + c.warnForStmt(stmt, cond) +} + +func (c *badCondChecker) warnForStmt(cause ast.Node, cond *ast.BinaryExpr) { + suggest := astcopy.BinaryExpr(cond) + suggest.Op = token.LSS + c.ctx.Warn(cause, "`%s` in loop; probably meant `%s`?", + cond, suggest) +} + +func (c *badCondChecker) warnCond(cond *ast.BinaryExpr, tag string) { + c.ctx.Warn(cond, "`%s` condition is %s", cond, tag) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/badLock_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badLock_checker.go new file mode 100644 index 000000000..8628ff2d7 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/badLock_checker.go @@ -0,0 +1,116 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "badLock" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects suspicious mutex lock/unlock operations" + info.Before = ` +mu.Lock() +mu.Unlock()` + info.After = ` +mu.Lock() +defer mu.Unlock()` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmtList(&badLockChecker{ctx: ctx}), nil + }) +} + +type badLockChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *badLockChecker) VisitStmtList(list []ast.Stmt) { + if len(list) < 2 { + return + } + + for i := 0; i < len(list)-1; i++ { + current, ok := list[i].(*ast.ExprStmt) + if !ok { + continue + } + deferred := false + var next ast.Expr + switch x := list[i+1].(type) { + case *ast.ExprStmt: + next = x.X + case *ast.DeferStmt: + next = x.Call + deferred = true + default: + continue + } + + mutex1, lockFunc, ok := c.asLockedMutex(current.X) + if !ok { + continue + } + mutex2, unlockFunc, ok := c.asUnlockedMutex(next) + if !ok { + continue + } + if !astequal.Expr(mutex1, mutex2) { + continue + } + + switch { + case !deferred: + c.warnImmediateUnlock(mutex2) + case lockFunc == "Lock" && unlockFunc == "RUnlock": + c.warnMismatchingUnlock(mutex2, "Unlock") + case lockFunc == "RLock" && unlockFunc == "Unlock": + c.warnMismatchingUnlock(mutex2, "RUnlock") + } + } +} + +func (c *badLockChecker) asLockedMutex(e ast.Expr) (ast.Expr, string, bool) { + call, ok := e.(*ast.CallExpr) + if !ok || len(call.Args) != 0 { + return nil, "", false + } + switch fn := call.Fun.(type) { + case *ast.SelectorExpr: + if fn.Sel.Name == "Lock" || fn.Sel.Name == "RLock" { + return fn.X, fn.Sel.Name, true + } + return nil, "", false + default: + return nil, "", false + } +} + +func (c *badLockChecker) asUnlockedMutex(e ast.Expr) (ast.Expr, string, bool) { + call, ok := e.(*ast.CallExpr) + if !ok || len(call.Args) != 0 { + return nil, "", false + } + switch fn := call.Fun.(type) { + case *ast.SelectorExpr: + if fn.Sel.Name == "Unlock" || fn.Sel.Name == "RUnlock" { + return fn.X, fn.Sel.Name, true + } + return nil, "", false + default: + return nil, "", false + } +} + +func (c *badLockChecker) warnImmediateUnlock(cause ast.Node) { + c.ctx.Warn(cause, "defer is missing, mutex is unlocked immediately") +} + +func (c *badLockChecker) warnMismatchingUnlock(cause ast.Node, suggestion string) { + c.ctx.Warn(cause, "suspicious unlock, maybe %s was intended?", suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go b/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go new file mode 100644 index 000000000..e0d4b7487 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/badRegexp_checker.go @@ -0,0 +1,445 @@ +package checkers + +import ( + "go/ast" + "go/constant" + "sort" + "strconv" + "unicode" + "unicode/utf8" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/quasilyte/regex/syntax" +) + +func init() { + var info linter.CheckerInfo + info.Name = "badRegexp" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects suspicious regexp patterns" + info.Before = "regexp.MustCompile(`(?:^aa|bb|cc)foo[aba]`)" + info.After = "regexp.MustCompile(`^(?:aa|bb|cc)foo[ab]`)" + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + opts := &syntax.ParserOptions{} + c := &badRegexpChecker{ + ctx: ctx, + parser: syntax.NewParser(opts), + } + return astwalk.WalkerForExpr(c), nil + }) +} + +type badRegexpChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + parser *syntax.Parser + cause ast.Expr + + flagStates []regexpFlagState + goodAnchors []syntax.Position +} + +type regexpFlagState [utf8.RuneSelf]bool + +func (c *badRegexpChecker) VisitExpr(x ast.Expr) { + call, ok := x.(*ast.CallExpr) + if !ok { + return + } + + switch qualifiedName(call.Fun) { + case "regexp.Compile", "regexp.MustCompile": + cv := c.ctx.TypesInfo.Types[call.Args[0]].Value + if cv == nil || cv.Kind() != constant.String { + return + } + pat := constant.StringVal(cv) + c.cause = call.Args[0] + c.checkPattern(pat) + } +} + +func (c *badRegexpChecker) checkPattern(pat string) { + re, err := c.parser.Parse(pat) + if err != nil { + return + } + + c.flagStates = c.flagStates[:0] + c.goodAnchors = c.goodAnchors[:0] + + // In Go all flags (modifiers) are set to false by default, + // so we start from the empty flag set. + c.flagStates = append(c.flagStates, regexpFlagState{}) + + c.markGoodCarets(re.Expr) + c.walk(re.Expr) +} + +func (c *badRegexpChecker) markGoodCarets(e syntax.Expr) { + canSkip := func(e syntax.Expr) bool { + switch e.Op { + case syntax.OpFlagOnlyGroup: + return true + case syntax.OpGroup: + x := e.Args[0] + return x.Op == syntax.OpConcat && len(x.Args) == 0 + } + return false + } + + if e.Op == syntax.OpConcat && len(e.Args) > 1 { + i := 0 + for i < len(e.Args) && canSkip(e.Args[i]) { + i++ + } + if i < len(e.Args) { + c.markGoodCarets(e.Args[i]) + } + return + } + if e.Op == syntax.OpCaret { + c.addGoodAnchor(e.Pos) + } + for _, a := range e.Args { + c.markGoodCarets(a) + } +} + +func (c *badRegexpChecker) walk(e syntax.Expr) { + switch e.Op { + case syntax.OpAlt: + c.checkAltAnchor(e) + c.checkAltDups(e) + for _, a := range e.Args { + c.walk(a) + } + + case syntax.OpCharClass, syntax.OpNegCharClass: + if c.checkCharClassRanges(e) { + c.checkCharClassDups(e) + } + + case syntax.OpStar, syntax.OpPlus: + c.checkNestedQuantifier(e) + c.walk(e.Args[0]) + + case syntax.OpFlagOnlyGroup: + c.updateFlagState(c.currentFlagState(), e, e.Args[0].Value) + case syntax.OpGroupWithFlags: + // Creates a new context using the current context copy. + // New flags are evaluated inside a new context. + // After nested expressions are processed, previous context is restored. + nflags := len(c.flagStates) + c.flagStates = append(c.flagStates, *c.currentFlagState()) + c.updateFlagState(c.currentFlagState(), e, e.Args[1].Value) + c.walk(e.Args[0]) + c.flagStates = c.flagStates[:nflags] + case syntax.OpGroup, syntax.OpCapture, syntax.OpNamedCapture: + // Like with OpGroupWithFlags, but doesn't evaluate any new flags. + nflags := len(c.flagStates) + c.flagStates = append(c.flagStates, *c.currentFlagState()) + c.walk(e.Args[0]) + c.flagStates = c.flagStates[:nflags] + + case syntax.OpCaret: + if !c.isGoodAnchor(e) { + c.warn("dangling or redundant ^, maybe \\^ is intended?") + } + + default: + for _, a := range e.Args { + c.walk(a) + } + } +} + +func (c *badRegexpChecker) currentFlagState() *regexpFlagState { + return &c.flagStates[len(c.flagStates)-1] +} + +func (c *badRegexpChecker) updateFlagState(state *regexpFlagState, e syntax.Expr, flagString string) { + clearing := false + for i := 0; i < len(flagString); i++ { + ch := flagString[i] + if ch == '-' { + clearing = true + continue + } + if int(ch) >= len(state) { + continue // Should never happen in practice, but we don't want a panic + } + + if clearing { + if !state[ch] { + c.warn("clearing unset flag %c in %s", ch, e.Value) + } + } else { + if state[ch] { + c.warn("redundant flag %c in %s", ch, e.Value) + } + } + state[ch] = !clearing + } +} + +func (c *badRegexpChecker) checkNestedQuantifier(e syntax.Expr) { + x := e.Args[0] + switch x.Op { + case syntax.OpGroup, syntax.OpCapture, syntax.OpGroupWithFlags: + if len(e.Args) == 1 { + x = x.Args[0] + } + } + + switch x.Op { + case syntax.OpPlus, syntax.OpStar: + c.warn("repeated greedy quantifier in %s", e.Value) + } +} + +func (c *badRegexpChecker) checkAltDups(alt syntax.Expr) { + // Seek duplicated alternation expressions. + + set := make(map[string]struct{}, len(alt.Args)) + for _, a := range alt.Args { + if _, ok := set[a.Value]; ok { + c.warn("`%s` is duplicated in %s", a.Value, alt.Value) + } + set[a.Value] = struct{}{} + } +} + +func (c *badRegexpChecker) isCharOrLit(e syntax.Expr) bool { + return e.Op == syntax.OpChar || e.Op == syntax.OpLiteral +} + +func (c *badRegexpChecker) checkAltAnchor(alt syntax.Expr) { + // Seek suspicious anchors. + + // Case 1: an alternation of literals where 1st expr begins with ^ anchor. + first := alt.Args[0] + if first.Op == syntax.OpConcat && len(first.Args) == 2 && first.Args[0].Op == syntax.OpCaret && c.isCharOrLit(first.Args[1]) { + matched := true + for _, a := range alt.Args[1:] { + if !c.isCharOrLit(a) { + matched = false + break + } + } + if matched { + c.warn("^ applied only to `%s` in %s", first.Value[len(`^`):], alt.Value) + } + } + + // Case 2: an alternation of literals where last expr ends with $ anchor. + last := alt.Args[len(alt.Args)-1] + if last.Op == syntax.OpConcat && len(last.Args) == 2 && last.Args[1].Op == syntax.OpDollar && c.isCharOrLit(last.Args[0]) { + matched := true + for _, a := range alt.Args[:len(alt.Args)-1] { + if !c.isCharOrLit(a) { + matched = false + break + } + } + if matched { + c.warn("$ applied only to `%s` in %s", last.Value[:len(last.Value)-len(`$`)], alt.Value) + } + } +} + +func (c *badRegexpChecker) checkCharClassRanges(cc syntax.Expr) bool { + // Seek for suspicious ranges like `!-_`. + // + // We permit numerical ranges (0-9, hex and octal literals) + // and simple ascii letter ranges. + + for _, e := range cc.Args { + if e.Op != syntax.OpCharRange { + continue + } + switch e.Args[0].Op { + case syntax.OpEscapeOctal, syntax.OpEscapeHex: + continue + } + ch := c.charClassBoundRune(e.Args[0]) + if ch == 0 { + return false + } + good := unicode.IsLetter(ch) || (ch >= '0' && ch <= '9') + if !good { + c.warnSloppyCharRange(e.Value, cc.Value) + } + } + + return true +} + +func (c *badRegexpChecker) checkCharClassDups(cc syntax.Expr) { + // Seek for excessive elements inside a character class. + // Report them as intersections. + + if len(cc.Args) == 1 { + return // Can't had duplicates. + } + + type charRange struct { + low rune + high rune + source string + } + ranges := make([]charRange, 0, 8) + addRange := func(source string, low, high rune) { + ranges = append(ranges, charRange{source: source, low: low, high: high}) + } + addRange1 := func(source string, ch rune) { + addRange(source, ch, ch) + } + + // 1. Collect ranges, O(n). + for _, e := range cc.Args { + switch e.Op { + case syntax.OpEscapeOctal: + addRange1(e.Value, c.octalToRune(e)) + case syntax.OpEscapeHex: + addRange1(e.Value, c.hexToRune(e)) + case syntax.OpChar: + addRange1(e.Value, c.stringToRune(e.Value)) + case syntax.OpCharRange: + addRange(e.Value, c.charClassBoundRune(e.Args[0]), c.charClassBoundRune(e.Args[1])) + case syntax.OpEscapeMeta: + addRange1(e.Value, rune(e.Value[1])) + case syntax.OpEscapeChar: + ch := c.stringToRune(e.Value[len(`\`):]) + if unicode.IsPunct(ch) { + addRange1(e.Value, ch) + break + } + switch e.Value { + case `\|`, `\<`, `\>`, `\+`, `\=`: // How to cover all symbols? + addRange1(e.Value, c.stringToRune(e.Value[len(`\`):])) + case `\t`: + addRange1(e.Value, '\t') + case `\n`: + addRange1(e.Value, '\n') + case `\r`: + addRange1(e.Value, '\r') + case `\v`: + addRange1(e.Value, '\v') + case `\d`: + addRange(e.Value, '0', '9') + case `\D`: + addRange(e.Value, 0, '0'-1) + addRange(e.Value, '9'+1, utf8.MaxRune) + case `\s`: + addRange(e.Value, '\t', '\n') // 9-10 + addRange(e.Value, '\f', '\r') // 12-13 + addRange1(e.Value, ' ') // 32 + case `\S`: + addRange(e.Value, 0, '\t'-1) + addRange(e.Value, '\n'+1, '\f'-1) + addRange(e.Value, '\r'+1, ' '-1) + addRange(e.Value, ' '+1, utf8.MaxRune) + case `\w`: + addRange(e.Value, '0', '9') // 48-57 + addRange(e.Value, 'A', 'Z') // 65-90 + addRange1(e.Value, '_') // 95 + addRange(e.Value, 'a', 'z') // 97-122 + case `\W`: + addRange(e.Value, 0, '0'-1) + addRange(e.Value, '9'+1, 'A'-1) + addRange(e.Value, 'Z'+1, '_'-1) + addRange(e.Value, '_'+1, 'a'-1) + addRange(e.Value, 'z'+1, utf8.MaxRune) + default: + // Give up: unknown escape sequence. + return + } + default: + // Give up: unexpected operation inside char class. + return + } + } + + // 2. Sort ranges, O(nlogn). + sort.Slice(ranges, func(i, j int) bool { + return ranges[i].low < ranges[j].low + }) + + // 3. Search for duplicates, O(n). + for i := 0; i < len(ranges)-1; i++ { + x := ranges[i+0] + y := ranges[i+1] + if x.high >= y.low { + c.warnCharClassDup(x.source, y.source, cc.Value) + break + } + } +} + +func (c *badRegexpChecker) charClassBoundRune(e syntax.Expr) rune { + switch e.Op { + case syntax.OpChar: + return c.stringToRune(e.Value) + case syntax.OpEscapeHex: + return c.hexToRune(e) + case syntax.OpEscapeOctal: + return c.octalToRune(e) + default: + return 0 + } +} + +func (c *badRegexpChecker) octalToRune(e syntax.Expr) rune { + v, _ := strconv.ParseInt(e.Value[len(`\`):], 8, 32) + return rune(v) +} + +func (c *badRegexpChecker) hexToRune(e syntax.Expr) rune { + var s string + switch e.Form { + case syntax.FormEscapeHexFull: + s = e.Value[len(`\x{`) : len(e.Value)-len(`}`)] + default: + s = e.Value[len(`\x`):] + } + v, _ := strconv.ParseInt(s, 16, 32) + return rune(v) +} + +func (c *badRegexpChecker) stringToRune(s string) rune { + ch, _ := utf8.DecodeRuneInString(s) + return ch +} + +func (c *badRegexpChecker) addGoodAnchor(pos syntax.Position) { + c.goodAnchors = append(c.goodAnchors, pos) +} + +func (c *badRegexpChecker) isGoodAnchor(e syntax.Expr) bool { + for _, pos := range c.goodAnchors { + if e.Pos == pos { + return true + } + } + return false +} + +func (c *badRegexpChecker) warn(format string, args ...interface{}) { + c.ctx.Warn(c.cause, format, args...) +} + +func (c *badRegexpChecker) warnSloppyCharRange(rng, charClass string) { + c.ctx.Warn(c.cause, "suspicious char range `%s` in %s", rng, charClass) +} + +func (c *badRegexpChecker) warnCharClassDup(x, y, charClass string) { + if x == y { + c.ctx.Warn(c.cause, "`%s` is duplicated in %s", x, charClass) + } else { + c.ctx.Warn(c.cause, "`%s` intersects with `%s` in %s", x, y, charClass) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go b/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go new file mode 100644 index 000000000..325fb56a3 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/boolExprSimplify_checker.go @@ -0,0 +1,346 @@ +package checkers + +import ( + "fmt" + "go/ast" + "go/token" + "strconv" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/astp" + "github.com/go-toolsmith/typep" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "boolExprSimplify" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects bool expressions that can be simplified" + info.Before = ` +a := !(elapsed >= expectElapsedMin) +b := !(x) == !(y)` + info.After = ` +a := elapsed < expectElapsedMin +b := (x) == (y)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&boolExprSimplifyChecker{ctx: ctx}), nil + }) +} + +type boolExprSimplifyChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + hasFloats bool +} + +func (c *boolExprSimplifyChecker) VisitExpr(x ast.Expr) { + if !astp.IsBinaryExpr(x) && !astp.IsUnaryExpr(x) { + return + } + + // Throw away non-bool expressions and avoid redundant + // AST copying below. + if typ := c.ctx.TypeOf(x); typ == nil || !typep.HasBoolKind(typ.Underlying()) { + return + } + + // We'll loose all types info after a copy, + // this is why we record valuable info before doing it. + c.hasFloats = lintutil.ContainsNode(x, func(n ast.Node) bool { + if x, ok := n.(*ast.BinaryExpr); ok { + return typep.HasFloatProp(c.ctx.TypeOf(x.X).Underlying()) || + typep.HasFloatProp(c.ctx.TypeOf(x.Y).Underlying()) + } + return false + }) + + y := c.simplifyBool(astcopy.Expr(x)) + if !astequal.Expr(x, y) { + c.warn(x, y) + } +} + +func (c *boolExprSimplifyChecker) simplifyBool(x ast.Expr) ast.Expr { + return astutil.Apply(x, nil, func(cur *astutil.Cursor) bool { + return c.doubleNegation(cur) || + c.negatedEquals(cur) || + c.invertComparison(cur) || + c.combineChecks(cur) || + c.removeIncDec(cur) || + c.foldRanges(cur) || + true + }).(ast.Expr) +} + +func (c *boolExprSimplifyChecker) doubleNegation(cur *astutil.Cursor) bool { + neg1 := astcast.ToUnaryExpr(cur.Node()) + neg2 := astcast.ToUnaryExpr(astutil.Unparen(neg1.X)) + if neg1.Op == token.NOT && neg2.Op == token.NOT { + cur.Replace(astutil.Unparen(neg2.X)) + return true + } + return false +} + +func (c *boolExprSimplifyChecker) negatedEquals(cur *astutil.Cursor) bool { + x, ok := cur.Node().(*ast.BinaryExpr) + if !ok || x.Op != token.EQL { + return false + } + neg1 := astcast.ToUnaryExpr(x.X) + neg2 := astcast.ToUnaryExpr(x.Y) + if neg1.Op == token.NOT && neg2.Op == token.NOT { + x.X = neg1.X + x.Y = neg2.X + return true + } + return false +} + +func (c *boolExprSimplifyChecker) invertComparison(cur *astutil.Cursor) bool { + if c.hasFloats { // See #673 + return false + } + + neg := astcast.ToUnaryExpr(cur.Node()) + cmp := astcast.ToBinaryExpr(astutil.Unparen(neg.X)) + if neg.Op != token.NOT { + return false + } + + // Replace operator to its negated form. + switch cmp.Op { + case token.EQL: + cmp.Op = token.NEQ + case token.NEQ: + cmp.Op = token.EQL + case token.LSS: + cmp.Op = token.GEQ + case token.GTR: + cmp.Op = token.LEQ + case token.LEQ: + cmp.Op = token.GTR + case token.GEQ: + cmp.Op = token.LSS + + default: + return false + } + cur.Replace(cmp) + return true +} + +func (c *boolExprSimplifyChecker) isSafe(x ast.Expr) bool { + return typep.SideEffectFree(c.ctx.TypesInfo, x) +} + +func (c *boolExprSimplifyChecker) combineChecks(cur *astutil.Cursor) bool { + or, ok := cur.Node().(*ast.BinaryExpr) + if !ok || or.Op != token.LOR { + return false + } + + lhs := astcast.ToBinaryExpr(astutil.Unparen(or.X)) + rhs := astcast.ToBinaryExpr(astutil.Unparen(or.Y)) + + if !astequal.Expr(lhs.X, rhs.X) || !astequal.Expr(lhs.Y, rhs.Y) { + return false + } + if !c.isSafe(lhs.X) || !c.isSafe(lhs.Y) { + return false + } + + combTable := [...]struct { + x token.Token + y token.Token + result token.Token + }{ + {token.GTR, token.EQL, token.GEQ}, + {token.EQL, token.GTR, token.GEQ}, + {token.LSS, token.EQL, token.LEQ}, + {token.EQL, token.LSS, token.LEQ}, + } + for _, comb := range &combTable { + if comb.x == lhs.Op && comb.y == rhs.Op { + lhs.Op = comb.result + cur.Replace(lhs) + return true + } + } + return false +} + +func (c *boolExprSimplifyChecker) removeIncDec(cur *astutil.Cursor) bool { + cmp := astcast.ToBinaryExpr(cur.Node()) + + matchOneWay := func(op token.Token, x, y *ast.BinaryExpr) bool { + if x.Op != op || astcast.ToBasicLit(x.Y).Value != "1" { + return false + } + if y.Op == op && astcast.ToBasicLit(y.Y).Value == "1" { + return false + } + return true + } + replace := func(lhsOp, rhsOp, replacement token.Token) bool { + lhs := astcast.ToBinaryExpr(cmp.X) + rhs := astcast.ToBinaryExpr(cmp.Y) + switch { + case matchOneWay(lhsOp, lhs, rhs): + cmp.X = lhs.X + cmp.Op = replacement + cur.Replace(cmp) + return true + case matchOneWay(rhsOp, rhs, lhs): + cmp.Y = rhs.X + cmp.Op = replacement + cur.Replace(cmp) + return true + default: + return false + } + } + + switch cmp.Op { + case token.GTR: + // `x > y-1` => `x >= y` + // `x+1 > y` => `x >= y` + return replace(token.ADD, token.SUB, token.GEQ) + + case token.GEQ: + // `x >= y+1` => `x > y` + // `x-1 >= y` => `x > y` + return replace(token.SUB, token.ADD, token.GTR) + + case token.LSS: + // `x < y+1` => `x <= y` + // `x-1 < y` => `x <= y` + return replace(token.SUB, token.ADD, token.LEQ) + + case token.LEQ: + // `x <= y-1` => `x < y` + // `x+1 <= y` => `x < y` + return replace(token.ADD, token.SUB, token.LSS) + + default: + return false + } +} + +func (c *boolExprSimplifyChecker) foldRanges(cur *astutil.Cursor) bool { + if c.hasFloats { // See #848 + return false + } + + e, ok := cur.Node().(*ast.BinaryExpr) + if !ok { + return false + } + lhs := astcast.ToBinaryExpr(e.X) + rhs := astcast.ToBinaryExpr(e.Y) + if !c.isSafe(lhs.X) || !c.isSafe(rhs.X) { + return false + } + if !astequal.Expr(lhs.X, rhs.X) { + return false + } + + c1, ok := c.int64val(lhs.Y) + if !ok { + return false + } + c2, ok := c.int64val(rhs.Y) + if !ok { + return false + } + + type combination struct { + lhsOp token.Token + rhsOp token.Token + rhsDiff int64 + resDelta int64 + } + match := func(comb *combination) bool { + if lhs.Op != comb.lhsOp || rhs.Op != comb.rhsOp { + return false + } + if c2-c1 != comb.rhsDiff { + return false + } + return true + } + + switch e.Op { + case token.LAND: + combTable := [...]combination{ + // `x > c && x < c+2` => `x == c+1` + {token.GTR, token.LSS, 2, 1}, + // `x >= c && x < c+1` => `x == c` + {token.GEQ, token.LSS, 1, 0}, + // `x > c && x <= c+1` => `x == c+1` + {token.GTR, token.LEQ, 1, 1}, + // `x >= c && x <= c` => `x == c` + {token.GEQ, token.LEQ, 0, 0}, + } + for i := range combTable { + comb := combTable[i] + if match(&comb) { + lhs.Op = token.EQL + v := c1 + comb.resDelta + lhs.Y.(*ast.BasicLit).Value = fmt.Sprint(v) + cur.Replace(lhs) + return true + } + } + + case token.LOR: + combTable := [...]combination{ + // `x < c || x > c` => `x != c` + {token.LSS, token.GTR, 0, 0}, + // `x <= c || x > c+1` => `x != c+1` + {token.LEQ, token.GTR, 1, 1}, + // `x < c || x >= c+1` => `x != c` + {token.LSS, token.GEQ, 1, 0}, + // `x <= c || x >= c+2` => `x != c+1` + {token.LEQ, token.GEQ, 2, 1}, + } + for i := range combTable { + comb := combTable[i] + if match(&comb) { + lhs.Op = token.NEQ + v := c1 + comb.resDelta + lhs.Y.(*ast.BasicLit).Value = fmt.Sprint(v) + cur.Replace(lhs) + return true + } + } + } + + return false +} + +func (c *boolExprSimplifyChecker) int64val(x ast.Expr) (int64, bool) { + // TODO(quasilyte): if we had types info, we could use TypesInfo.Types[x].Value, + // but since copying erases leaves us without it, only basic literals are handled. + lit, ok := x.(*ast.BasicLit) + if !ok { + return 0, false + } + v, err := strconv.ParseInt(lit.Value, 10, 64) + if err != nil { + return 0, false + } + return v, true +} + +func (c *boolExprSimplifyChecker) warn(cause, suggestion ast.Expr) { + c.SkipChilds = true + c.ctx.Warn(cause, "can simplify `%s` to `%s`", cause, suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go b/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go new file mode 100644 index 000000000..94d51a996 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/builtinShadowDecl_checker.go @@ -0,0 +1,63 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "builtinShadowDecl" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects top-level declarations that shadow the predeclared identifiers" + info.Before = `type int struct {}` + info.After = `type myInt struct {}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return &builtinShadowDeclChecker{ctx: ctx}, nil + }) +} + +type builtinShadowDeclChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *builtinShadowDeclChecker) WalkFile(f *ast.File) { + for _, decl := range f.Decls { + switch decl := decl.(type) { + case *ast.FuncDecl: + // Don't check methods. They can shadow anything safely. + if decl.Recv == nil { + c.checkName(decl.Name) + } + case *ast.GenDecl: + c.visitGenDecl(decl) + } + } +} + +func (c *builtinShadowDeclChecker) visitGenDecl(decl *ast.GenDecl) { + for _, spec := range decl.Specs { + switch spec := spec.(type) { + case *ast.ValueSpec: + for _, name := range spec.Names { + c.checkName(name) + } + case *ast.TypeSpec: + c.checkName(spec.Name) + } + } +} + +func (c *builtinShadowDeclChecker) checkName(name *ast.Ident) { + if isBuiltin(name.Name) { + c.warn(name) + } +} + +func (c *builtinShadowDeclChecker) warn(ident *ast.Ident) { + c.ctx.Warn(ident, "shadowing of predeclared identifier: %s", ident) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go b/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go new file mode 100644 index 000000000..1e1661deb --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/builtinShadow_checker.go @@ -0,0 +1,36 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "builtinShadow" + info.Tags = []string{"style", "opinionated"} + info.Summary = "Detects when predeclared identifiers are shadowed in assignments" + info.Before = `len := 10` + info.After = `length := 10` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForLocalDef(&builtinShadowChecker{ctx: ctx}, ctx.TypesInfo), nil + }) +} + +type builtinShadowChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *builtinShadowChecker) VisitLocalDef(name astwalk.Name, _ ast.Expr) { + if isBuiltin(name.ID.Name) { + c.warn(name.ID) + } +} + +func (c *builtinShadowChecker) warn(ident *ast.Ident) { + c.ctx.Warn(ident, "shadowing of predeclared identifier: %s", ident) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go b/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go new file mode 100644 index 000000000..d9b4b7e75 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/captLocal_checker.go @@ -0,0 +1,49 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "captLocal" + info.Tags = []string{"style"} + info.Params = linter.CheckerParams{ + "paramsOnly": { + Value: true, + Usage: "whether to restrict checker to params only", + }, + } + info.Summary = "Detects capitalized names for local variables" + info.Before = `func f(IN int, OUT *int) (ERR error) {}` + info.After = `func f(in int, out *int) (err error) {}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &captLocalChecker{ctx: ctx} + c.paramsOnly = info.Params.Bool("paramsOnly") + return astwalk.WalkerForLocalDef(c, ctx.TypesInfo), nil + }) +} + +type captLocalChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + paramsOnly bool +} + +func (c *captLocalChecker) VisitLocalDef(def astwalk.Name, _ ast.Expr) { + if c.paramsOnly && def.Kind != astwalk.NameParam { + return + } + if ast.IsExported(def.ID.Name) { + c.warn(def.ID) + } +} + +func (c *captLocalChecker) warn(id ast.Node) { + c.ctx.Warn(id, "`%s' should not be capitalized", id) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go new file mode 100644 index 000000000..047ea4fee --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go @@ -0,0 +1,88 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "caseOrder" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects erroneous case order inside switch statements" + info.Before = ` +switch x.(type) { +case ast.Expr: + fmt.Println("expr") +case *ast.BasicLit: + fmt.Println("basic lit") // Never executed +}` + info.After = ` +switch x.(type) { +case *ast.BasicLit: + fmt.Println("basic lit") // Now reachable +case ast.Expr: + fmt.Println("expr") +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&caseOrderChecker{ctx: ctx}), nil + }) +} + +type caseOrderChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *caseOrderChecker) VisitStmt(stmt ast.Stmt) { + switch stmt := stmt.(type) { + case *ast.TypeSwitchStmt: + c.checkTypeSwitch(stmt) + case *ast.SwitchStmt: + c.checkSwitch(stmt) + } +} + +func (c *caseOrderChecker) checkTypeSwitch(s *ast.TypeSwitchStmt) { + type ifaceType struct { + node ast.Node + typ *types.Interface + } + var ifaces []ifaceType // Interfaces seen so far + for _, cc := range s.Body.List { + cc := cc.(*ast.CaseClause) + for _, x := range cc.List { + typ := c.ctx.TypeOf(x) + if typ == linter.UnknownType { + c.warnUnknownType(cc, x) + return + } + for _, iface := range ifaces { + if types.Implements(typ, iface.typ) { + c.warnTypeSwitch(cc, x, iface.node) + break + } + } + if iface, ok := typ.Underlying().(*types.Interface); ok { + ifaces = append(ifaces, ifaceType{node: x, typ: iface}) + } + } + } +} + +func (c *caseOrderChecker) warnTypeSwitch(cause, concrete, iface ast.Node) { + c.ctx.Warn(cause, "case %s must go before the %s case", concrete, iface) +} + +func (c *caseOrderChecker) warnUnknownType(cause, concrete ast.Node) { + c.ctx.Warn(cause, "type is not defined %s", concrete) +} + +func (c *caseOrderChecker) checkSwitch(s *ast.SwitchStmt) { + // TODO(quasilyte): can handle expression cases that overlap. + // Cases that have narrower value range should go before wider ones. +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/checkers.go b/vendor/github.com/go-critic/go-critic/checkers/checkers.go new file mode 100644 index 000000000..0c2ebc00c --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/checkers.go @@ -0,0 +1,19 @@ +// Package checkers is a gocritic linter main checkers collection. +package checkers + +import ( + "os" + + "github.com/go-critic/go-critic/framework/linter" +) + +var collection = &linter.CheckerCollection{ + URL: "https://github.com/go-critic/go-critic/checkers", +} + +var debug = func() func() bool { + v := os.Getenv("DEBUG") != "" + return func() bool { + return v + } +}() diff --git a/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go b/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go new file mode 100644 index 000000000..52a72d28c --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/codegenComment_checker.go @@ -0,0 +1,61 @@ +package checkers + +import ( + "go/ast" + "regexp" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "codegenComment" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects malformed 'code generated' file comments" + info.Before = `// This file was automatically generated by foogen` + info.After = `// Code generated by foogen. DO NOT EDIT.` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + patterns := []string{ + "this (?:file|code) (?:was|is) auto(?:matically)? generated", + "this (?:file|code) (?:was|is) generated automatically", + "this (?:file|code) (?:was|is) generated by", + "this (?:file|code) (?:was|is) (?:auto(?:matically)? )?generated", + "this (?:file|code) (?:was|is) generated", + "code in this file (?:was|is) auto(?:matically)? generated", + "generated (?:file|code) - do not edit", + // TODO(quasilyte): more of these. + } + re := regexp.MustCompile("(?i)" + strings.Join(patterns, "|")) + return &codegenCommentChecker{ + ctx: ctx, + badCommentRE: re, + }, nil + }) +} + +type codegenCommentChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + badCommentRE *regexp.Regexp +} + +func (c *codegenCommentChecker) WalkFile(f *ast.File) { + if f.Doc == nil { + return + } + + for _, comment := range f.Doc.List { + if c.badCommentRE.MatchString(comment.Text) { + c.warn(comment) + return + } + } +} + +func (c *codegenCommentChecker) warn(cause ast.Node) { + c.ctx.Warn(cause, "comment should match `Code generated .* DO NOT EDIT.` regexp") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go new file mode 100644 index 000000000..d4939f3f6 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/commentFormatting_checker.go @@ -0,0 +1,79 @@ +package checkers + +import ( + "go/ast" + "regexp" + "strings" + "unicode" + "unicode/utf8" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "commentFormatting" + info.Tags = []string{"style"} + info.Summary = "Detects comments with non-idiomatic formatting" + info.Before = `//This is a comment` + info.After = `// This is a comment` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + parts := []string{ + `^//go:generate .*$`, // e.g.: go:generate value + `^//[\w-]+:.*$`, // e.g.: key: value + `^//nolint\b`, // e.g.: nolint + `^//line /.*:\d+`, // e.g.: line /path/to/file:123 + `^//export \w+$`, // e.g.: export Foo + } + pat := "(?m)" + strings.Join(parts, "|") + pragmaRE := regexp.MustCompile(pat) + return astwalk.WalkerForComment(&commentFormattingChecker{ + ctx: ctx, + pragmaRE: pragmaRE, + }), nil + }) +} + +type commentFormattingChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + pragmaRE *regexp.Regexp +} + +func (c *commentFormattingChecker) VisitComment(cg *ast.CommentGroup) { + if strings.HasPrefix(cg.List[0].Text, "/*") { + return + } + for _, comment := range cg.List { + if len(comment.Text) <= len("// ") { + continue + } + if c.pragmaRE.MatchString(comment.Text) { + continue + } + + // Make a decision based on a first comment text rune. + r, _ := utf8.DecodeRuneInString(comment.Text[len("//"):]) + if !c.specialChar(r) && !unicode.IsSpace(r) { + c.warn(comment) + return + } + } +} + +func (c *commentFormattingChecker) specialChar(r rune) bool { + // Permitted list to avoid false-positives. + switch r { + case '+', '-', '#', '!': + return true + default: + return false + } +} + +func (c *commentFormattingChecker) warn(comment *ast.Comment) { + c.ctx.Warn(comment, "put a space between `//` and comment text") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go new file mode 100644 index 000000000..554e0621f --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/commentedOutCode_checker.go @@ -0,0 +1,157 @@ +package checkers + +import ( + "fmt" + "go/ast" + "go/token" + "regexp" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/strparse" +) + +func init() { + var info linter.CheckerInfo + info.Name = "commentedOutCode" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects commented-out code inside function bodies" + info.Before = ` +// fmt.Println("Debugging hard") +foo(1, 2)` + info.After = `foo(1, 2)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForLocalComment(&commentedOutCodeChecker{ + ctx: ctx, + notQuiteFuncCall: regexp.MustCompile(`\w+\s+\([^)]*\)\s*$`), + }), nil + }) +} + +type commentedOutCodeChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + fn *ast.FuncDecl + + notQuiteFuncCall *regexp.Regexp +} + +func (c *commentedOutCodeChecker) EnterFunc(fn *ast.FuncDecl) bool { + c.fn = fn // Need to store current function inside checker context + return fn.Body != nil +} + +func (c *commentedOutCodeChecker) VisitLocalComment(cg *ast.CommentGroup) { + s := cg.Text() // Collect text once + + // We do multiple heuristics to avoid false positives. + // Many things can be improved here. + + markers := []string{ + "TODO", // TODO comments with code are permitted. + + // "http://" is interpreted as a label with comment. + // There are other protocols we might want to include. + "http://", + "https://", + + "e.g. ", // Clearly not a "selector expr" (mostly due to extra space) + } + for _, m := range markers { + if strings.Contains(s, m) { + return + } + } + + // Some very short comment that can be skipped. + // Usually triggering on these results in false positive. + // Unless there is a very popular call like print/println. + cond := len(s) < len("quite too short") && + !strings.Contains(s, "print") && + !strings.Contains(s, "fmt.") && + !strings.Contains(s, "log.") + if cond { + return + } + + // Almost looks like a commented-out function call, + // but there is a whitespace between function name and + // parameters list. Skip these to avoid false positives. + if c.notQuiteFuncCall.MatchString(s) { + return + } + + stmt := strparse.Stmt(s) + + if c.isPermittedStmt(stmt) { + return + } + + if stmt != strparse.BadStmt { + c.warn(cg) + return + } + + // Don't try to parse one-liner as block statement + if len(cg.List) == 1 && !strings.Contains(s, "\n") { + return + } + + // Some attempts to avoid false positives. + if c.skipBlock(s) { + return + } + + // Add braces to make block statement from + // multiple statements. + stmt = strparse.Stmt(fmt.Sprintf("{ %s }", s)) + + if stmt, ok := stmt.(*ast.BlockStmt); ok && len(stmt.List) != 0 { + c.warn(cg) + } +} + +func (c *commentedOutCodeChecker) skipBlock(s string) bool { + lines := strings.Split(s, "\n") // There is at least 1 line, that's invariant + + // Special example test block. + if isExampleTestFunc(c.fn) && strings.Contains(lines[0], "Output:") { + return true + } + + return false +} + +func (c *commentedOutCodeChecker) isPermittedStmt(stmt ast.Stmt) bool { + switch stmt := stmt.(type) { + case *ast.ExprStmt: + return c.isPermittedExpr(stmt.X) + case *ast.LabeledStmt: + return c.isPermittedStmt(stmt.Stmt) + case *ast.DeclStmt: + decl := stmt.Decl.(*ast.GenDecl) + return decl.Tok == token.TYPE + default: + return false + } +} + +func (c *commentedOutCodeChecker) isPermittedExpr(x ast.Expr) bool { + // Permit anything except expressions that can be used + // with complete result discarding. + switch x := x.(type) { + case *ast.CallExpr: + return false + case *ast.UnaryExpr: + // "<-" channel receive is not permitted. + return x.Op != token.ARROW + default: + return true + } +} + +func (c *commentedOutCodeChecker) warn(cause ast.Node) { + c.ctx.Warn(cause, "may want to remove commented-out code") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go b/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go new file mode 100644 index 000000000..3c086569b --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/commentedOutImport_checker.go @@ -0,0 +1,76 @@ +package checkers + +import ( + "go/ast" + "go/token" + "regexp" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "commentedOutImport" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects commented-out imports" + info.Before = ` +import ( + "fmt" + //"os" +)` + info.After = ` +import ( + "fmt" +)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + const pattern = `(?m)^(?://|/\*)?\s*"([a-zA-Z0-9_/]+)"\s*(?:\*/)?$` + return &commentedOutImportChecker{ + ctx: ctx, + importStringRE: regexp.MustCompile(pattern), + }, nil + }) +} + +type commentedOutImportChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + importStringRE *regexp.Regexp +} + +func (c *commentedOutImportChecker) WalkFile(f *ast.File) { + // TODO(quasilyte): handle commented-out import spec, + // for example: // import "errors". + + for _, decl := range f.Decls { + decl, ok := decl.(*ast.GenDecl) + if !ok || decl.Tok != token.IMPORT { + // Import decls can only be in the beginning of the file. + // If we've met some other decl, there will be no more + // import decls. + break + } + + // Find comments inside this import decl span. + for _, cg := range f.Comments { + if cg.Pos() > decl.Rparen { + break // Below the decl, stop. + } + if cg.Pos() < decl.Lparen { + continue // Before the decl, skip. + } + + for _, comment := range cg.List { + for _, m := range c.importStringRE.FindAllStringSubmatch(comment.Text, -1) { + c.warn(comment, m[1]) + } + } + } + } +} + +func (c *commentedOutImportChecker) warn(cause ast.Node, path string) { + c.ctx.Warn(cause, "remove commented-out %q import", path) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go new file mode 100644 index 000000000..e06944d62 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/defaultCaseOrder_checker.go @@ -0,0 +1,65 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "defaultCaseOrder" + info.Tags = []string{"style"} + info.Summary = "Detects when default case in switch isn't on 1st or last position" + info.Before = ` +switch { +case x > y: + // ... +default: // <- not the best position + // ... +case x == 10: + // ... +}` + info.After = ` +switch { +case x > y: + // ... +case x == 10: + // ... +default: // <- last case (could also be the first one) + // ... +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&defaultCaseOrderChecker{ctx: ctx}), nil + }) +} + +type defaultCaseOrderChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *defaultCaseOrderChecker) VisitStmt(stmt ast.Stmt) { + swtch, ok := stmt.(*ast.SwitchStmt) + if !ok { + return + } + for i, stmt := range swtch.Body.List { + caseStmt, ok := stmt.(*ast.CaseClause) + if !ok { + continue + } + // is `default` case + if caseStmt.List == nil { + if i != 0 && i != len(swtch.Body.List)-1 { + c.warn(caseStmt) + } + } + } +} + +func (c *defaultCaseOrderChecker) warn(cause *ast.CaseClause) { + c.ctx.Warn(cause, "consider to make `default` case as first or as last case") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/deferUnlambda_checker.go b/vendor/github.com/go-critic/go-critic/checkers/deferUnlambda_checker.go new file mode 100644 index 000000000..b312bfb68 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/deferUnlambda_checker.go @@ -0,0 +1,94 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "deferUnlambda" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects deferred function literals that can be simplified" + info.Before = `defer func() { f() }()` + info.After = `f()` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&deferUnlambdaChecker{ctx: ctx}), nil + }) +} + +type deferUnlambdaChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *deferUnlambdaChecker) VisitStmt(x ast.Stmt) { + def, ok := x.(*ast.DeferStmt) + if !ok { + return + } + + // We don't analyze deferred function args. + // Most deferred calls don't have them, so it's not a big deal to skip them. + if len(def.Call.Args) != 0 { + return + } + + fn, ok := def.Call.Fun.(*ast.FuncLit) + if !ok { + return + } + + if len(fn.Body.List) != 1 { + return + } + + call, ok := astcast.ToExprStmt(fn.Body.List[0]).X.(*ast.CallExpr) + if !ok || !c.isFunctionCall(call) { + return + } + + // Skip recover() as it can't be moved outside of the lambda. + // Skip panic() to avoid affecting the stack trace. + switch qualifiedName(call.Fun) { + case "recover", "panic": + return + } + + for _, arg := range call.Args { + if !c.isConstExpr(arg) { + return + } + } + + c.warn(def, call) +} + +func (c *deferUnlambdaChecker) isFunctionCall(e *ast.CallExpr) bool { + switch fnExpr := e.Fun.(type) { + case *ast.Ident: + return true + case *ast.SelectorExpr: + x, ok := fnExpr.X.(*ast.Ident) + if !ok { + return false + } + _, ok = c.ctx.TypesInfo.ObjectOf(x).(*types.PkgName) + return ok + default: + return false + } +} + +func (c *deferUnlambdaChecker) isConstExpr(e ast.Expr) bool { + return c.ctx.TypesInfo.Types[e].Value != nil +} + +func (c *deferUnlambdaChecker) warn(cause, suggestion ast.Node) { + c.ctx.Warn(cause, "can rewrite as `defer %s`", suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go b/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go new file mode 100644 index 000000000..f60e58b58 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/deprecatedComment_checker.go @@ -0,0 +1,149 @@ +package checkers + +import ( + "go/ast" + "regexp" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "deprecatedComment" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects malformed 'deprecated' doc-comments" + info.Before = ` +// deprecated, use FuncNew instead +func FuncOld() int` + info.After = ` +// Deprecated: use FuncNew instead +func FuncOld() int` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &deprecatedCommentChecker{ctx: ctx} + + c.commonPatterns = []*regexp.Regexp{ + regexp.MustCompile(`(?i)this (?:function|type) is deprecated`), + regexp.MustCompile(`(?i)deprecated[.!]? use \S* instead`), + regexp.MustCompile(`(?i)\[\[deprecated\]\].*`), + regexp.MustCompile(`(?i)note: deprecated\b.*`), + regexp.MustCompile(`(?i)deprecated in.*`), + // TODO(quasilyte): more of these? + } + + // TODO(quasilyte): may want to generate this list programmatically. + // + // TODO(quasilyte): currently it only handles a single missing letter. + // Might want to handle other kinds of common misspell/typo kinds. + c.commonTypos = []string{ + "Dprecated: ", + "Derecated: ", + "Depecated: ", + "Deprcated: ", + "Depreated: ", + "Deprected: ", + "Deprecaed: ", + "Deprecatd: ", + "Deprecate: ", + "Derpecate: ", + "Derpecated: ", + "Depreacted: ", + } + for i := range c.commonTypos { + c.commonTypos[i] = strings.ToUpper(c.commonTypos[i]) + } + + return astwalk.WalkerForDocComment(c), nil + }) +} + +type deprecatedCommentChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + commonPatterns []*regexp.Regexp + commonTypos []string +} + +func (c *deprecatedCommentChecker) VisitDocComment(doc *ast.CommentGroup) { + // There are 3 accepted forms of deprecation comments: + // + // 1. inline, that can't be handled with a DocCommentVisitor. + // Note that "Deprecated: " may not even be the comment prefix there. + // Example: "The line number in the input. Deprecated: Kept for compatibility." + // TODO(quasilyte): fix it. + // + // 2. Longer form-1. It's a doc-comment that only contains "deprecation" notice. + // + // 3. Like form-2, but may also include doc-comment text. + // Distinguished by an empty line. + // + // See https://github.com/golang/go/issues/10909#issuecomment-136492606. + // + // It's desirable to see how people make mistakes with the format, + // this is why there is currently no special treatment for these cases. + // TODO(quasilyte): do more audits and grow the negative tests suite. + // + // TODO(quasilyte): there are also multi-line deprecation comments. + + for _, comment := range doc.List { + if strings.HasPrefix(comment.Text, "/*") { + // TODO(quasilyte): handle multi-line doc comments. + continue + } + l := comment.Text[len("//"):] + if len(l) < len("Deprecated: ") { + continue + } + l = strings.TrimSpace(l) + + // Check whether someone messed up with a prefix casing. + upcase := strings.ToUpper(l) + if strings.HasPrefix(upcase, "DEPRECATED: ") && !strings.HasPrefix(l, "Deprecated: ") { + c.warnCasing(comment, l) + return + } + + // Check is someone used comma instead of a colon. + if strings.HasPrefix(l, "Deprecated, ") { + c.warnComma(comment) + return + } + + // Check for other commonly used patterns. + for _, pat := range c.commonPatterns { + if pat.MatchString(l) { + c.warnPattern(comment) + return + } + } + + // Detect some simple typos. + for _, prefixWithTypo := range c.commonTypos { + if strings.HasPrefix(upcase, prefixWithTypo) { + c.warnTypo(comment, l) + return + } + } + } +} + +func (c *deprecatedCommentChecker) warnCasing(cause ast.Node, line string) { + prefix := line[:len("DEPRECATED: ")] + c.ctx.Warn(cause, "use `Deprecated: ` (note the casing) instead of `%s`", prefix) +} + +func (c *deprecatedCommentChecker) warnPattern(cause ast.Node) { + c.ctx.Warn(cause, "the proper format is `Deprecated: `") +} + +func (c *deprecatedCommentChecker) warnComma(cause ast.Node) { + c.ctx.Warn(cause, "use `:` instead of `,` in `Deprecated, `") +} + +func (c *deprecatedCommentChecker) warnTypo(cause ast.Node, line string) { + word := strings.Split(line, ":")[0] + c.ctx.Warn(cause, "typo in `%s`; should be `Deprecated`", word) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go b/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go new file mode 100644 index 000000000..d8aaaf743 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/docStub_checker.go @@ -0,0 +1,95 @@ +package checkers + +import ( + "go/ast" + "go/token" + "regexp" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "docStub" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects comments that silence go lint complaints about doc-comment" + info.Before = ` +// Foo ... +func Foo() { +}` + info.After = ` +// (A) - remove the doc-comment stub +func Foo() {} +// (B) - replace it with meaningful comment +// Foo is a demonstration-only function. +func Foo() {}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + re := `(?i)^\.\.\.$|^\.$|^xxx\.?$|^whatever\.?$` + c := &docStubChecker{ + ctx: ctx, + stubCommentRE: regexp.MustCompile(re), + } + return c, nil + }) +} + +type docStubChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + stubCommentRE *regexp.Regexp +} + +func (c *docStubChecker) WalkFile(f *ast.File) { + for _, decl := range f.Decls { + switch decl := decl.(type) { + case *ast.FuncDecl: + c.visitDoc(decl, decl.Name, decl.Doc, false) + case *ast.GenDecl: + if decl.Tok != token.TYPE { + continue + } + if len(decl.Specs) == 1 { + spec := decl.Specs[0].(*ast.TypeSpec) + // Only 1 spec, use doc from the decl itself. + c.visitDoc(spec, spec.Name, decl.Doc, true) + } + // N specs, use per-spec doc. + for _, spec := range decl.Specs { + spec := spec.(*ast.TypeSpec) + c.visitDoc(spec, spec.Name, spec.Doc, true) + } + } + } +} + +func (c *docStubChecker) visitDoc(decl ast.Node, sym *ast.Ident, doc *ast.CommentGroup, article bool) { + if !sym.IsExported() || doc == nil { + return + } + line := strings.TrimSpace(doc.List[0].Text[len("//"):]) + if article { + // Skip optional article. + for _, a := range []string{"The ", "An ", "A "} { + if strings.HasPrefix(line, a) { + line = line[len(a):] + break + } + } + } + if !strings.HasPrefix(line, sym.Name) { + return + } + line = strings.TrimSpace(line[len(sym.Name):]) + // Now try to detect the "stub" part. + if c.stubCommentRE.MatchString(line) { + c.warn(decl) + } +} + +func (c *docStubChecker) warn(cause ast.Node) { + c.ctx.Warn(cause, "silencing go lint doc-comment warnings is unadvised") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupArg_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupArg_checker.go new file mode 100644 index 000000000..9f116d781 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/dupArg_checker.go @@ -0,0 +1,133 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "dupArg" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects suspicious duplicated arguments" + info.Before = `copy(dst, dst)` + info.After = `copy(dst, src)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &dupArgChecker{ctx: ctx} + // newMatcherFunc returns a function that matches a call if + // args[xIndex] and args[yIndex] are equal. + newMatcherFunc := func(xIndex, yIndex int) func(*ast.CallExpr) bool { + return func(call *ast.CallExpr) bool { + if len(call.Args) <= xIndex || len(call.Args) <= yIndex { + return false + } + x := call.Args[xIndex] + y := call.Args[yIndex] + return astequal.Expr(x, y) + } + } + + // m maps pattern string to a matching function. + // String patterns are used for documentation purposes (readability). + m := map[string]func(*ast.CallExpr) bool{ + "(x, x, ...)": newMatcherFunc(0, 1), + "(x, _, x, ...)": newMatcherFunc(0, 2), + "(_, x, x, ...)": newMatcherFunc(1, 2), + } + + // TODO(quasilyte): handle x.Equal(x) cases. + // Example: *math/Big.Int.Cmp method. + + // TODO(quasilyte): more perky mode that will also + // report things like io.Copy(x, x). + // Probably safe thing to do even without that option + // if `x` is not interface (requires type checks + // that are not incorporated into this checker yet). + + c.matchers = map[string]func(*ast.CallExpr) bool{ + "copy": m["(x, x, ...)"], + + "math.Max": m["(x, x, ...)"], + "math.Min": m["(x, x, ...)"], + + "reflect.Copy": m["(x, x, ...)"], + "reflect.DeepEqual": m["(x, x, ...)"], + + "strings.Contains": m["(x, x, ...)"], + "strings.Compare": m["(x, x, ...)"], + "strings.EqualFold": m["(x, x, ...)"], + "strings.HasPrefix": m["(x, x, ...)"], + "strings.HasSuffix": m["(x, x, ...)"], + "strings.Index": m["(x, x, ...)"], + "strings.LastIndex": m["(x, x, ...)"], + "strings.Split": m["(x, x, ...)"], + "strings.SplitAfter": m["(x, x, ...)"], + "strings.SplitAfterN": m["(x, x, ...)"], + "strings.SplitN": m["(x, x, ...)"], + "strings.Replace": m["(_, x, x, ...)"], + "strings.ReplaceAll": m["(_, x, x, ...)"], + + "bytes.Contains": m["(x, x, ...)"], + "bytes.Compare": m["(x, x, ...)"], + "bytes.Equal": m["(x, x, ...)"], + "bytes.EqualFold": m["(x, x, ...)"], + "bytes.HasPrefix": m["(x, x, ...)"], + "bytes.HasSuffix": m["(x, x, ...)"], + "bytes.Index": m["(x, x, ...)"], + "bytes.LastIndex": m["(x, x, ...)"], + "bytes.Split": m["(x, x, ...)"], + "bytes.SplitAfter": m["(x, x, ...)"], + "bytes.SplitAfterN": m["(x, x, ...)"], + "bytes.SplitN": m["(x, x, ...)"], + "bytes.Replace": m["(_, x, x, ...)"], + "bytes.ReplaceAll": m["(_, x, x, ...)"], + + "types.Identical": m["(x, x, ...)"], + "types.IdenticalIgnoreTags": m["(x, x, ...)"], + + "draw.Draw": m["(x, _, x, ...)"], + + // TODO(quasilyte): more of these. + } + return astwalk.WalkerForExpr(c), nil + }) +} + +type dupArgChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + matchers map[string]func(*ast.CallExpr) bool +} + +func (c *dupArgChecker) VisitExpr(expr ast.Expr) { + call, ok := expr.(*ast.CallExpr) + if !ok { + return + } + + // TODO(quasilyte): this kind of check is needed in multiple + // places and the code is somewhat duplicated around. + // We probably need to stop using qualifiedName for non-experimental checkers. + if calledExpr, ok := call.Fun.(*ast.SelectorExpr); ok { + obj, ok := c.ctx.TypesInfo.ObjectOf(astcast.ToIdent(calledExpr.X)).(*types.PkgName) + if !ok || !isStdlibPkg(obj.Imported()) { + return + } + } + + m := c.matchers[qualifiedName(call.Fun)] + if m != nil && m(call) { + c.warn(call) + } +} + +func (c *dupArgChecker) warn(cause ast.Node) { + c.ctx.Warn(cause, "suspicious duplicated args in `%s`", cause) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go new file mode 100644 index 000000000..83de50528 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/dupBranchBody_checker.go @@ -0,0 +1,58 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "dupBranchBody" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects duplicated branch bodies inside conditional statements" + info.Before = ` +if cond { + println("cond=true") +} else { + println("cond=true") +}` + info.After = ` +if cond { + println("cond=true") +} else { + println("cond=false") +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&dupBranchBodyChecker{ctx: ctx}), nil + }) +} + +type dupBranchBodyChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *dupBranchBodyChecker) VisitStmt(stmt ast.Stmt) { + // TODO(quasilyte): extend to check switch statements as well. + // Should be very careful with type switches. + + if stmt, ok := stmt.(*ast.IfStmt); ok { + c.checkIf(stmt) + } +} + +func (c *dupBranchBodyChecker) checkIf(stmt *ast.IfStmt) { + thenBody := stmt.Body + elseBody, ok := stmt.Else.(*ast.BlockStmt) + if ok && astequal.Stmt(thenBody, elseBody) { + c.warnIf(stmt) + } +} + +func (c *dupBranchBodyChecker) warnIf(cause ast.Node) { + c.ctx.Warn(cause, "both branches in if statement has same body") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go new file mode 100644 index 000000000..0c1962682 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/dupCase_checker.go @@ -0,0 +1,57 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "dupCase" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects duplicated case clauses inside switch statements" + info.Before = ` +switch x { +case ys[0], ys[1], ys[2], ys[0], ys[4]: +}` + info.After = ` +switch x { +case ys[0], ys[1], ys[2], ys[3], ys[4]: +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&dupCaseChecker{ctx: ctx}), nil + }) +} + +type dupCaseChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + astSet lintutil.AstSet +} + +func (c *dupCaseChecker) VisitStmt(stmt ast.Stmt) { + if stmt, ok := stmt.(*ast.SwitchStmt); ok { + c.checkSwitch(stmt) + } +} + +func (c *dupCaseChecker) checkSwitch(stmt *ast.SwitchStmt) { + c.astSet.Clear() + for i := range stmt.Body.List { + cc := stmt.Body.List[i].(*ast.CaseClause) + for _, x := range cc.List { + if !c.astSet.Insert(x) { + c.warn(x) + } + } + } +} + +func (c *dupCaseChecker) warn(cause ast.Node) { + c.ctx.Warn(cause, "'case %s' is duplicated", cause) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go new file mode 100644 index 000000000..54658eb9f --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/dupImports_checker.go @@ -0,0 +1,63 @@ +package checkers + +import ( + "fmt" + "go/ast" + + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "dupImport" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects multiple imports of the same package under different aliases" + info.Before = ` +import ( + "fmt" + priting "fmt" // Imported the second time +)` + info.After = ` +import( + "fmt" +)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return &dupImportChecker{ctx: ctx}, nil + }) +} + +type dupImportChecker struct { + ctx *linter.CheckerContext +} + +func (c *dupImportChecker) WalkFile(f *ast.File) { + imports := make(map[string][]*ast.ImportSpec) + for _, importDcl := range f.Imports { + pkg := importDcl.Path.Value + imports[pkg] = append(imports[pkg], importDcl) + } + + for _, importList := range imports { + if len(importList) == 1 { + continue + } + c.warn(importList) + } +} + +func (c *dupImportChecker) warn(importList []*ast.ImportSpec) { + msg := fmt.Sprintf("package is imported %d times under different aliases on lines", len(importList)) + for idx, importDcl := range importList { + switch { + case idx == len(importList)-1: + msg += " and" + case idx > 0: + msg += "," + } + msg += fmt.Sprintf(" %d", c.ctx.FileSet.Position(importDcl.Pos()).Line) + } + for _, importDcl := range importList { + c.ctx.Warn(importDcl, msg) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go b/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go new file mode 100644 index 000000000..00f8fd0eb --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/dupSubExpr_checker.go @@ -0,0 +1,102 @@ +package checkers + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "dupSubExpr" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects suspicious duplicated sub-expressions" + info.Before = ` +sort.Slice(xs, func(i, j int) bool { + return xs[i].v < xs[i].v // Duplicated index +})` + info.After = ` +sort.Slice(xs, func(i, j int) bool { + return xs[i].v < xs[j].v +})` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &dupSubExprChecker{ctx: ctx} + + ops := []struct { + op token.Token + float bool // Whether float args require special care + }{ + {op: token.LOR}, // x || x + {op: token.LAND}, // x && x + {op: token.OR}, // x | x + {op: token.AND}, // x & x + {op: token.XOR}, // x ^ x + {op: token.LSS}, // x < x + {op: token.GTR}, // x > x + {op: token.AND_NOT}, // x &^ x + {op: token.REM}, // x % x + + {op: token.EQL, float: true}, // x == x + {op: token.NEQ, float: true}, // x != x + {op: token.LEQ, float: true}, // x <= x + {op: token.GEQ, float: true}, // x >= x + {op: token.QUO, float: true}, // x / x + {op: token.SUB, float: true}, // x - x + } + + c.opSet = make(map[token.Token]bool) + c.floatOpsSet = make(map[token.Token]bool) + for _, opInfo := range ops { + c.opSet[opInfo.op] = true + if opInfo.float { + c.floatOpsSet[opInfo.op] = true + } + } + + return astwalk.WalkerForExpr(c), nil + }) +} + +type dupSubExprChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + // opSet is a set of binary operations that do not make + // sense with duplicated (same) RHS and LHS. + opSet map[token.Token]bool + + floatOpsSet map[token.Token]bool +} + +func (c *dupSubExprChecker) VisitExpr(expr ast.Expr) { + if expr, ok := expr.(*ast.BinaryExpr); ok { + c.checkBinaryExpr(expr) + } +} + +func (c *dupSubExprChecker) checkBinaryExpr(expr *ast.BinaryExpr) { + if !c.opSet[expr.Op] { + return + } + if c.resultIsFloat(expr.X) && c.floatOpsSet[expr.Op] { + return + } + if typep.SideEffectFree(c.ctx.TypesInfo, expr) && c.opSet[expr.Op] && astequal.Expr(expr.X, expr.Y) { + c.warn(expr) + } +} + +func (c *dupSubExprChecker) resultIsFloat(expr ast.Expr) bool { + typ, ok := c.ctx.TypeOf(expr).(*types.Basic) + return ok && typ.Info()&types.IsFloat != 0 +} + +func (c *dupSubExprChecker) warn(cause *ast.BinaryExpr) { + c.ctx.Warn(cause, "suspicious identical LHS and RHS for `%s` operator", cause.Op) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go b/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go new file mode 100644 index 000000000..d017ee6ca --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/elseif_checker.go @@ -0,0 +1,71 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astp" +) + +func init() { + var info linter.CheckerInfo + info.Name = "elseif" + info.Tags = []string{"style"} + info.Params = linter.CheckerParams{ + "skipBalanced": { + Value: true, + Usage: "whether to skip balanced if-else pairs", + }, + } + info.Summary = "Detects else with nested if statement that can be replaced with else-if" + info.Before = ` +if cond1 { +} else { + if x := cond2; x { + } +}` + info.After = ` +if cond1 { +} else if x := cond2; x { +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &elseifChecker{ctx: ctx} + c.skipBalanced = info.Params.Bool("skipBalanced") + return astwalk.WalkerForStmt(c), nil + }) +} + +type elseifChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + skipBalanced bool +} + +func (c *elseifChecker) VisitStmt(stmt ast.Stmt) { + if stmt, ok := stmt.(*ast.IfStmt); ok { + elseBody, ok := stmt.Else.(*ast.BlockStmt) + if !ok || len(elseBody.List) != 1 { + return + } + innerIfStmt, ok := elseBody.List[0].(*ast.IfStmt) + if !ok { + return + } + balanced := len(stmt.Body.List) == 1 && + astp.IsIfStmt(stmt.Body.List[0]) + if balanced && c.skipBalanced { + return // Configured to skip balanced statements + } + if innerIfStmt.Else != nil { + return + } + c.warn(stmt.Else) + } +} + +func (c *elseifChecker) warn(cause ast.Node) { + c.ctx.Warn(cause, "can replace 'else {if cond {}}' with 'else if cond {}'") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go b/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go new file mode 100644 index 000000000..ebb8dad45 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/emptyFallthrough_checker.go @@ -0,0 +1,70 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "emptyFallthrough" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects fallthrough that can be avoided by using multi case values" + info.Before = `switch kind { +case reflect.Int: + fallthrough +case reflect.Int32: + return Int +}` + info.After = `switch kind { +case reflect.Int, reflect.Int32: + return Int +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&emptyFallthroughChecker{ctx: ctx}), nil + }) +} + +type emptyFallthroughChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *emptyFallthroughChecker) VisitStmt(stmt ast.Stmt) { + ss, ok := stmt.(*ast.SwitchStmt) + if !ok { + return + } + + prevCaseDefault := false + for i := len(ss.Body.List) - 1; i >= 0; i-- { + if cc, ok := ss.Body.List[i].(*ast.CaseClause); ok { + warn := false + if len(cc.Body) == 1 { + if bs, ok := cc.Body[0].(*ast.BranchStmt); ok && bs.Tok == token.FALLTHROUGH { + warn = true + if prevCaseDefault { + c.warnDefault(bs) + } else if cc.List != nil { + c.warn(bs) + } + } + } + if !warn { + prevCaseDefault = cc.List == nil + } + } + } +} + +func (c *emptyFallthroughChecker) warnDefault(cause ast.Node) { + c.ctx.Warn(cause, "remove empty case containing only fallthrough to default case") +} + +func (c *emptyFallthroughChecker) warn(cause ast.Node) { + c.ctx.Warn(cause, "replace empty case containing only fallthrough with expression list") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/emptyStringTest_checker.go b/vendor/github.com/go-critic/go-critic/checkers/emptyStringTest_checker.go new file mode 100644 index 000000000..27ccbd2f2 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/emptyStringTest_checker.go @@ -0,0 +1,58 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "emptyStringTest" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects empty string checks that can be written more idiomatically" + info.Before = `len(s) == 0` + info.After = `s == ""` + info.Note = "See https://dmitri.shuralyov.com/idiomatic-go#empty-string-check." + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&emptyStringTestChecker{ctx: ctx}), nil + }) +} + +type emptyStringTestChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *emptyStringTestChecker) VisitExpr(e ast.Expr) { + cmp := astcast.ToBinaryExpr(e) + if cmp.Op != token.EQL && cmp.Op != token.NEQ { + return + } + lenCall := astcast.ToCallExpr(cmp.X) + if astcast.ToIdent(lenCall.Fun).Name != "len" { + return + } + s := lenCall.Args[0] + if !typep.HasStringProp(c.ctx.TypeOf(s)) { + return + } + zero := astcast.ToBasicLit(cmp.Y) + if zero.Value != "0" { + return + } + c.warn(cmp, s) +} + +func (c *emptyStringTestChecker) warn(cmp *ast.BinaryExpr, s ast.Expr) { + suggest := astcopy.BinaryExpr(cmp) + suggest.X = s + suggest.Y = &ast.BasicLit{Value: `""`} + c.ctx.Warn(cmp, "replace `%s` with `%s`", cmp, suggest) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/equalFold_checker.go b/vendor/github.com/go-critic/go-critic/checkers/equalFold_checker.go new file mode 100644 index 000000000..13f7fdbe2 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/equalFold_checker.go @@ -0,0 +1,87 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "equalFold" + info.Tags = []string{"performance", "experimental"} + info.Summary = "Detects unoptimal strings/bytes case-insensitive comparison" + info.Before = `strings.ToLower(x) == strings.ToLower(y)` + info.After = `strings.EqualFold(x, y)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&equalFoldChecker{ctx: ctx}), nil + }) +} + +type equalFoldChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *equalFoldChecker) VisitExpr(e ast.Expr) { + switch e := e.(type) { + case *ast.CallExpr: + c.checkBytes(e) + case *ast.BinaryExpr: + c.checkStrings(e) + } +} + +// uncaseCall simplifies lower(x) or upper(x) to x. +// If no simplification is applied, second return value is false. +func (c *equalFoldChecker) uncaseCall(x ast.Expr, lower, upper string) (ast.Expr, bool) { + call := astcast.ToCallExpr(x) + name := qualifiedName(call.Fun) + if name != lower && name != upper { + return x, false + } + return call.Args[0], true +} + +func (c *equalFoldChecker) checkBytes(expr *ast.CallExpr) { + if qualifiedName(expr.Fun) != "bytes.Equal" { + return + } + + x, ok1 := c.uncaseCall(expr.Args[0], "bytes.ToLower", "bytes.ToUpper") + y, ok2 := c.uncaseCall(expr.Args[1], "bytes.ToLower", "bytes.ToUpper") + if !ok1 && !ok2 { + return + } + if !astequal.Expr(x, y) { + c.warnBytes(expr, x, y) + } +} + +func (c *equalFoldChecker) checkStrings(expr *ast.BinaryExpr) { + if expr.Op != token.EQL && expr.Op != token.NEQ { + return + } + + x, ok1 := c.uncaseCall(expr.X, "strings.ToLower", "strings.ToUpper") + y, ok2 := c.uncaseCall(expr.Y, "strings.ToLower", "strings.ToUpper") + if !ok1 && !ok2 { + return + } + if !astequal.Expr(x, y) { + c.warnStrings(expr, x, y) + } +} + +func (c *equalFoldChecker) warnStrings(cause ast.Node, x, y ast.Expr) { + c.ctx.Warn(cause, "consider replacing with strings.EqualFold(%s, %s)", x, y) +} + +func (c *equalFoldChecker) warnBytes(cause ast.Node, x, y ast.Expr) { + c.ctx.Warn(cause, "consider replacing with bytes.EqualFold(%s, %s)", x, y) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go new file mode 100644 index 000000000..6ba07fe86 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/evalOrder_checker.go @@ -0,0 +1,87 @@ +package checkers + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "evalOrder" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects unwanted dependencies on the evaluation order" + info.Before = `return x, f(&x)` + info.After = ` +err := f(&x) +return x, err +` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&evalOrderChecker{ctx: ctx}), nil + }) +} + +type evalOrderChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *evalOrderChecker) VisitStmt(stmt ast.Stmt) { + ret := astcast.ToReturnStmt(stmt) + if len(ret.Results) < 2 { + return + } + + // TODO(quasilyte): handle selector expressions like o.val in addition + // to bare identifiers. + addrTake := &ast.UnaryExpr{Op: token.AND} + for _, res := range ret.Results { + id, ok := res.(*ast.Ident) + if !ok { + continue + } + addrTake.X = id // addrTake is &id now + for _, res := range ret.Results { + call, ok := res.(*ast.CallExpr) + if !ok { + continue + } + + // 1. Check if there is a call in form of id.method() where + // method takes id by a pointer. + if sel, ok := call.Fun.(*ast.SelectorExpr); ok { + if astequal.Node(sel.X, id) && c.hasPtrRecv(sel.Sel) { + c.warn(call) + } + } + + // 2. Check that there is no call that uses &id as an argument. + dependency := lintutil.ContainsNode(call, func(n ast.Node) bool { + return astequal.Node(addrTake, n) + }) + if dependency { + c.warn(call) + } + } + } +} + +func (c *evalOrderChecker) hasPtrRecv(fn *ast.Ident) bool { + sig, ok := c.ctx.TypeOf(fn).(*types.Signature) + if !ok { + return false + } + return typep.IsPointer(sig.Recv().Type()) +} + +func (c *evalOrderChecker) warn(call *ast.CallExpr) { + c.ctx.Warn(call, "may want to evaluate %s before the return statement", call) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go b/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go new file mode 100644 index 000000000..63e0049f2 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/exitAfterDefer_checker.go @@ -0,0 +1,84 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astfmt" + "github.com/go-toolsmith/astp" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "exitAfterDefer" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects calls to exit/fatal inside functions that use defer" + info.Before = ` +defer os.Remove(filename) +if bad { + log.Fatalf("something bad happened") +}` + info.After = ` +defer os.Remove(filename) +if bad { + log.Printf("something bad happened") + return +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForFuncDecl(&exitAfterDeferChecker{ctx: ctx}), nil + }) +} + +type exitAfterDeferChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *exitAfterDeferChecker) VisitFuncDecl(fn *ast.FuncDecl) { + // TODO(quasilyte): handle goto and other kinds of flow that break + // the algorithm below that expects the latter statement to be + // executed after the ones that come before it. + + var deferStmt *ast.DeferStmt + pre := func(cur *astutil.Cursor) bool { + // Don't recurse into local anonymous functions. + return !astp.IsFuncLit(cur.Node()) + } + post := func(cur *astutil.Cursor) bool { + switch n := cur.Node().(type) { + case *ast.DeferStmt: + deferStmt = n + case *ast.CallExpr: + // See #995. We allow `defer os.Exit()` calls + // as it's harder to determine whether they're going + // to clutter anything without actually trying to + // simulate the defer stack + understanding the control flow. + // TODO: can we use CFG here? + if _, ok := cur.Parent().(*ast.DeferStmt); ok { + return true + } + if deferStmt != nil { + switch qualifiedName(n.Fun) { + case "log.Fatal", "log.Fatalf", "log.Fatalln", "os.Exit": + c.warn(n, deferStmt) + return false + } + } + } + return true + } + astutil.Apply(fn.Body, pre, post) +} + +func (c *exitAfterDeferChecker) warn(cause *ast.CallExpr, deferStmt *ast.DeferStmt) { + s := astfmt.Sprint(deferStmt) + if fnlit, ok := deferStmt.Call.Fun.(*ast.FuncLit); ok { + // To avoid long and multi-line warning messages, + // collapse the function literals. + s = "defer " + astfmt.Sprint(fnlit.Type) + "{...}(...)" + } + c.ctx.Warn(cause, "%s will exit, and `%s` will not run", cause.Fun, s) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go b/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go new file mode 100644 index 000000000..698f5366d --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/filepathJoin_checker.go @@ -0,0 +1,50 @@ +package checkers + +import ( + "go/ast" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "filepathJoin" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects problems in filepath.Join() function calls" + info.Before = `filepath.Join("dir/", filename)` + info.After = `filepath.Join("dir", filename)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&filepathJoinChecker{ctx: ctx}), nil + }) +} + +type filepathJoinChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *filepathJoinChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + if qualifiedName(call.Fun) != "filepath.Join" { + return + } + + for _, arg := range call.Args { + arg, ok := arg.(*ast.BasicLit) + if ok && c.hasSeparator(arg) { + c.warnSeparator(arg) + } + } +} + +func (c *filepathJoinChecker) hasSeparator(v *ast.BasicLit) bool { + return strings.ContainsAny(v.Value, `/\`) +} + +func (c *filepathJoinChecker) warnSeparator(sep ast.Expr) { + c.ctx.Warn(sep, "%s contains a path separator", sep) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/flagDeref_checker.go b/vendor/github.com/go-critic/go-critic/checkers/flagDeref_checker.go new file mode 100644 index 000000000..3fe5e52fb --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/flagDeref_checker.go @@ -0,0 +1,65 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "flagDeref" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects immediate dereferencing of `flag` package pointers" + info.Details = "Suggests to use pointer to array to avoid the copy using `&` on range expression." + info.Before = `b := *flag.Bool("b", false, "b docs")` + info.After = ` +var b bool +flag.BoolVar(&b, "b", false, "b docs")` + info.Note = ` +Dereferencing returned pointers will lead to hard to find errors +where flag values are not updated after flag.Parse().` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &flagDerefChecker{ + ctx: ctx, + flagPtrFuncs: map[string]bool{ + "flag.Bool": true, + "flag.Duration": true, + "flag.Float64": true, + "flag.Int": true, + "flag.Int64": true, + "flag.String": true, + "flag.Uint": true, + "flag.Uint64": true, + }, + } + return astwalk.WalkerForExpr(c), nil + }) +} + +type flagDerefChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + flagPtrFuncs map[string]bool +} + +func (c *flagDerefChecker) VisitExpr(expr ast.Expr) { + if expr, ok := expr.(*ast.StarExpr); ok { + call, ok := expr.X.(*ast.CallExpr) + if !ok { + return + } + called := qualifiedName(call.Fun) + if c.flagPtrFuncs[called] { + c.warn(expr, called+"Var") + } + } +} + +func (c *flagDerefChecker) warn(x ast.Node, suggestion string) { + c.ctx.Warn(x, "immediate deref in %s is most likely an error; consider using %s", + x, suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go b/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go new file mode 100644 index 000000000..7f6ce3c01 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/flagName_checker.go @@ -0,0 +1,88 @@ +package checkers + +import ( + "go/ast" + "go/constant" + "go/types" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "flagName" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects suspicious flag names" + info.Before = `b := flag.Bool(" foo ", false, "description")` + info.After = `b := flag.Bool("foo", false, "description")` + info.Note = "https://github.com/golang/go/issues/41792" + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&flagNameChecker{ctx: ctx}), nil + }) +} + +type flagNameChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *flagNameChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + calledExpr := astcast.ToSelectorExpr(call.Fun) + obj, ok := c.ctx.TypesInfo.ObjectOf(astcast.ToIdent(calledExpr.X)).(*types.PkgName) + if !ok { + return + } + sym := calledExpr.Sel + pkg := obj.Imported() + if pkg.Path() != "flag" { + return + } + + switch sym.Name { + case "Bool", "Duration", "Float64", "String", + "Int", "Int64", "Uint", "Uint64": + c.checkFlagName(call, call.Args[0]) + case "BoolVar", "DurationVar", "Float64Var", "StringVar", + "IntVar", "Int64Var", "UintVar", "Uint64Var": + c.checkFlagName(call, call.Args[1]) + } +} + +func (c *flagNameChecker) checkFlagName(call *ast.CallExpr, arg ast.Expr) { + cv := c.ctx.TypesInfo.Types[arg].Value + if cv == nil { + return // Non-constant name + } + name := constant.StringVal(cv) + switch { + case name == "": + c.warnEmpty(call) + case strings.HasPrefix(name, "-"): + c.warnHypenPrefix(call, name) + case strings.Contains(name, "="): + c.warnEq(call, name) + case strings.Contains(name, " "): + c.warnWhitespace(call, name) + } +} + +func (c *flagNameChecker) warnEmpty(cause ast.Node) { + c.ctx.Warn(cause, "empty flag name") +} + +func (c *flagNameChecker) warnHypenPrefix(cause ast.Node, name string) { + c.ctx.Warn(cause, "flag name %q should not start with a hypen", name) +} + +func (c *flagNameChecker) warnEq(cause ast.Node, name string) { + c.ctx.Warn(cause, "flag name %q should not contain '='", name) +} + +func (c *flagNameChecker) warnWhitespace(cause ast.Node, name string) { + c.ctx.Warn(cause, "flag name %q contains whitespace", name) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go b/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go new file mode 100644 index 000000000..ae61a1125 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/hexLiteral_checker.go @@ -0,0 +1,60 @@ +package checkers + +import ( + "go/ast" + "go/token" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "hexLiteral" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects hex literals that have mixed case letter digits" + info.Before = ` +x := 0X12 +y := 0xfF` + info.After = ` +x := 0x12 +// (A) +y := 0xff +// (B) +y := 0xFF` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&hexLiteralChecker{ctx: ctx}), nil + }) +} + +type hexLiteralChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *hexLiteralChecker) warn0X(lit *ast.BasicLit) { + suggest := "0x" + lit.Value[len("0X"):] + c.ctx.Warn(lit, "prefer 0x over 0X, s/%s/%s/", lit.Value, suggest) +} + +func (c *hexLiteralChecker) warnMixedDigits(lit *ast.BasicLit) { + c.ctx.Warn(lit, "don't mix hex literal letter digits casing") +} + +func (c *hexLiteralChecker) VisitExpr(expr ast.Expr) { + lit := astcast.ToBasicLit(expr) + if lit.Kind != token.INT || len(lit.Value) < 3 { + return + } + if strings.HasPrefix(lit.Value, "0X") { + c.warn0X(lit) + return + } + digits := lit.Value[len("0x"):] + if strings.ToLower(digits) != digits && strings.ToUpper(digits) != digits { + c.warnMixedDigits(lit) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go new file mode 100644 index 000000000..c430431a7 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/hugeParam_checker.go @@ -0,0 +1,63 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "hugeParam" + info.Tags = []string{"performance"} + info.Params = linter.CheckerParams{ + "sizeThreshold": { + Value: 80, + Usage: "size in bytes that makes the warning trigger", + }, + } + info.Summary = "Detects params that incur excessive amount of copying" + info.Before = `func f(x [1024]int) {}` + info.After = `func f(x *[1024]int) {}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForFuncDecl(&hugeParamChecker{ + ctx: ctx, + sizeThreshold: int64(info.Params.Int("sizeThreshold")), + }), nil + }) +} + +type hugeParamChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + sizeThreshold int64 +} + +func (c *hugeParamChecker) VisitFuncDecl(decl *ast.FuncDecl) { + // TODO(quasilyte): maybe it's worthwhile to permit skipping + // test files for this checker? + if decl.Recv != nil { + c.checkParams(decl.Recv.List) + } + c.checkParams(decl.Type.Params.List) +} + +func (c *hugeParamChecker) checkParams(params []*ast.Field) { + for _, p := range params { + for _, id := range p.Names { + typ := c.ctx.TypeOf(id) + size := c.ctx.SizesInfo.Sizeof(typ) + if size >= c.sizeThreshold { + c.warn(id, size) + } + } + } +} + +func (c *hugeParamChecker) warn(cause *ast.Ident, size int64) { + c.ctx.Warn(cause, "%s is heavy (%d bytes); consider passing it by pointer", + cause, size) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go new file mode 100644 index 000000000..b1fcf4147 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/ifElseChain_checker.go @@ -0,0 +1,99 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "ifElseChain" + info.Tags = []string{"style"} + info.Summary = "Detects repeated if-else statements and suggests to replace them with switch statement" + info.Before = ` +if cond1 { + // Code A. +} else if cond2 { + // Code B. +} else { + // Code C. +}` + info.After = ` +switch { +case cond1: + // Code A. +case cond2: + // Code B. +default: + // Code C. +}` + info.Note = ` +Permits single else or else-if; repeated else-if or else + else-if +will trigger suggestion to use switch statement. +See [EffectiveGo#switch](https://golang.org/doc/effective_go.html#switch).` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&ifElseChainChecker{ctx: ctx}), nil + }) +} + +type ifElseChainChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + cause *ast.IfStmt + visited map[*ast.IfStmt]bool +} + +func (c *ifElseChainChecker) EnterFunc(fn *ast.FuncDecl) bool { + if fn.Body == nil { + return false + } + c.visited = make(map[*ast.IfStmt]bool) + return true +} + +func (c *ifElseChainChecker) VisitStmt(stmt ast.Stmt) { + if stmt, ok := stmt.(*ast.IfStmt); ok { + if c.visited[stmt] { + return + } + c.cause = stmt + c.checkIfStmt(stmt) + } +} + +func (c *ifElseChainChecker) checkIfStmt(stmt *ast.IfStmt) { + const minThreshold = 2 + if c.countIfelseLen(stmt) >= minThreshold { + c.warn() + } +} + +func (c *ifElseChainChecker) countIfelseLen(stmt *ast.IfStmt) int { + count := 0 + for { + switch e := stmt.Else.(type) { + case *ast.IfStmt: + if e.Init != nil { + return 0 // Give up + } + // Else if. + stmt = e + count++ + c.visited[e] = true + case *ast.BlockStmt: + // Else branch. + return count + 1 + default: + // No else or else if. + return count + } + } +} + +func (c *ifElseChainChecker) warn() { + c.ctx.Warn(c.cause, "rewrite if-else to switch statement") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go b/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go new file mode 100644 index 000000000..5ac711fc1 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/importShadow_checker.go @@ -0,0 +1,47 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "importShadow" + info.Tags = []string{"style", "opinionated"} + info.Summary = "Detects when imported package names shadowed in the assignments" + info.Before = ` +// "path/filepath" is imported. +filepath := "foo.txt"` + info.After = ` +filename := "foo.txt"` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + ctx.Require.PkgObjects = true + return astwalk.WalkerForLocalDef(&importShadowChecker{ctx: ctx}, ctx.TypesInfo), nil + }) +} + +type importShadowChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *importShadowChecker) VisitLocalDef(def astwalk.Name, _ ast.Expr) { + for pkgObj, name := range c.ctx.PkgObjects { + if name == def.ID.Name && name != "_" { + c.warn(def.ID, name, pkgObj.Imported()) + } + } +} + +func (c *importShadowChecker) warn(id ast.Node, importedName string, pkg *types.Package) { + if isStdlibPkg(pkg) { + c.ctx.Warn(id, "shadow of imported package '%s'", importedName) + } else { + c.ctx.Warn(id, "shadow of imported from '%s' package '%s'", pkg.Path(), importedName) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/indexAlloc_checker.go b/vendor/github.com/go-critic/go-critic/checkers/indexAlloc_checker.go new file mode 100644 index 000000000..908285c03 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/indexAlloc_checker.go @@ -0,0 +1,50 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "indexAlloc" + info.Tags = []string{"performance"} + info.Summary = "Detects strings.Index calls that may cause unwanted allocs" + info.Before = `strings.Index(string(x), y)` + info.After = `bytes.Index(x, []byte(y))` + info.Note = `See Go issue for details: https://github.com/golang/go/issues/25864` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&indexAllocChecker{ctx: ctx}), nil + }) +} + +type indexAllocChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *indexAllocChecker) VisitExpr(e ast.Expr) { + call := astcast.ToCallExpr(e) + if qualifiedName(call.Fun) != "strings.Index" { + return + } + stringConv := astcast.ToCallExpr(call.Args[0]) + if qualifiedName(stringConv.Fun) != "string" { + return + } + x := stringConv.Args[0] + y := call.Args[1] + if typep.SideEffectFree(c.ctx.TypesInfo, x) && typep.SideEffectFree(c.ctx.TypesInfo, y) { + c.warn(e, x, y) + } +} + +func (c *indexAllocChecker) warn(cause ast.Node, x, y ast.Expr) { + c.ctx.Warn(cause, "consider replacing %s with bytes.Index(%s, []byte(%s))", + cause, x, y) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go b/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go new file mode 100644 index 000000000..a1b6b2a8a --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/initClause_checker.go @@ -0,0 +1,56 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astp" +) + +func init() { + var info linter.CheckerInfo + info.Name = "initClause" + info.Tags = []string{"style", "opinionated", "experimental"} + info.Summary = "Detects non-assignment statements inside if/switch init clause" + info.Before = `if sideEffect(); cond { +}` + info.After = `sideEffect() +if cond { +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&initClauseChecker{ctx: ctx}), nil + }) +} + +type initClauseChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *initClauseChecker) VisitStmt(stmt ast.Stmt) { + initClause := c.getInitClause(stmt) + if initClause != nil && !astp.IsAssignStmt(initClause) { + c.warn(stmt, initClause) + } +} + +func (c *initClauseChecker) getInitClause(x ast.Stmt) ast.Stmt { + switch x := x.(type) { + case *ast.IfStmt: + return x.Init + case *ast.SwitchStmt: + return x.Init + default: + return nil + } +} + +func (c *initClauseChecker) warn(stmt, clause ast.Stmt) { + name := "if" + if astp.IsSwitchStmt(stmt) { + name = "switch" + } + c.ctx.Warn(stmt, "consider to move `%s` before %s", clause, name) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/comment_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/comment_walker.go new file mode 100644 index 000000000..6c60e3fed --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/comment_walker.go @@ -0,0 +1,41 @@ +package astwalk + +import ( + "go/ast" + "strings" +) + +type commentWalker struct { + visitor CommentVisitor +} + +func (w *commentWalker) WalkFile(f *ast.File) { + if !w.visitor.EnterFile(f) { + return + } + + for _, cg := range f.Comments { + visitCommentGroups(cg, w.visitor.VisitComment) + } +} + +func visitCommentGroups(cg *ast.CommentGroup, visit func(*ast.CommentGroup)) { + var group []*ast.Comment + visitGroup := func(list []*ast.Comment) { + if len(list) == 0 { + return + } + cg := &ast.CommentGroup{List: list} + visit(cg) + } + for _, comment := range cg.List { + if strings.HasPrefix(comment.Text, "/*") { + visitGroup(group) + group = group[:0] + visitGroup([]*ast.Comment{comment}) + } else { + group = append(group, comment) + } + } + visitGroup(group) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/doc_comment_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/doc_comment_walker.go new file mode 100644 index 000000000..39b536508 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/doc_comment_walker.go @@ -0,0 +1,48 @@ +package astwalk + +import ( + "go/ast" +) + +type docCommentWalker struct { + visitor DocCommentVisitor +} + +func (w *docCommentWalker) WalkFile(f *ast.File) { + for _, decl := range f.Decls { + switch decl := decl.(type) { + case *ast.FuncDecl: + if decl.Doc != nil { + w.visitor.VisitDocComment(decl.Doc) + } + case *ast.GenDecl: + if decl.Doc != nil { + w.visitor.VisitDocComment(decl.Doc) + } + for _, spec := range decl.Specs { + switch spec := spec.(type) { + case *ast.ImportSpec: + if spec.Doc != nil { + w.visitor.VisitDocComment(spec.Doc) + } + case *ast.ValueSpec: + if spec.Doc != nil { + w.visitor.VisitDocComment(spec.Doc) + } + case *ast.TypeSpec: + if spec.Doc != nil { + w.visitor.VisitDocComment(spec.Doc) + } + ast.Inspect(spec.Type, func(n ast.Node) bool { + if n, ok := n.(*ast.Field); ok { + if n.Doc != nil { + w.visitor.VisitDocComment(n.Doc) + } + } + return true + }) + } + } + } + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go new file mode 100644 index 000000000..de66c1081 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/expr_walker.go @@ -0,0 +1,31 @@ +package astwalk + +import ( + "go/ast" +) + +type exprWalker struct { + visitor ExprVisitor +} + +func (w *exprWalker) WalkFile(f *ast.File) { + if !w.visitor.EnterFile(f) { + return + } + + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + if !w.visitor.EnterFunc(decl) { + continue + } + } + + ast.Inspect(decl, func(x ast.Node) bool { + if x, ok := x.(ast.Expr); ok { + w.visitor.VisitExpr(x) + return !w.visitor.skipChilds() + } + return true + }) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go new file mode 100644 index 000000000..c7e3a4371 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/func_decl_walker.go @@ -0,0 +1,23 @@ +package astwalk + +import ( + "go/ast" +) + +type funcDeclWalker struct { + visitor FuncDeclVisitor +} + +func (w *funcDeclWalker) WalkFile(f *ast.File) { + if !w.visitor.EnterFile(f) { + return + } + + for _, decl := range f.Decls { + decl, ok := decl.(*ast.FuncDecl) + if !ok || !w.visitor.EnterFunc(decl) { + continue + } + w.visitor.VisitFuncDecl(decl) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_comment_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_comment_walker.go new file mode 100644 index 000000000..e042f0d5e --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_comment_walker.go @@ -0,0 +1,32 @@ +package astwalk + +import ( + "go/ast" +) + +type localCommentWalker struct { + visitor LocalCommentVisitor +} + +func (w *localCommentWalker) WalkFile(f *ast.File) { + if !w.visitor.EnterFile(f) { + return + } + + for _, decl := range f.Decls { + decl, ok := decl.(*ast.FuncDecl) + if !ok || !w.visitor.EnterFunc(decl) { + continue + } + + for _, cg := range f.Comments { + // Not sure that decls/comments are sorted + // by positions, so do a naive full scan for now. + if cg.Pos() < decl.Pos() || cg.Pos() > decl.End() { + continue + } + + visitCommentGroups(cg, w.visitor.VisitLocalComment) + } + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go new file mode 100644 index 000000000..bed0f44ab --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_visitor.go @@ -0,0 +1,51 @@ +package astwalk + +import ( + "go/ast" +) + +// LocalDefVisitor visits every name definitions inside a function. +// +// Next elements are considered as name definitions: +// - Function parameters (input, output, receiver) +// - Every LHS of ":=" assignment that defines a new name +// - Every local var/const declaration. +// +// NOTE: this visitor is experimental. +// This is also why it lives in a separate file. +type LocalDefVisitor interface { + walkerEvents + VisitLocalDef(Name, ast.Expr) +} + +type ( + // NameKind describes what kind of name Name object holds. + NameKind int + + // Name holds ver/const/param definition symbol info. + Name struct { + ID *ast.Ident + Kind NameKind + + // Index is NameVar-specific field that is used to + // specify nth tuple element being assigned to the name. + Index int + } +) + +// NOTE: set of name kinds is not stable and may change over time. +// +// TODO(quasilyte): is NameRecv/NameParam/NameResult granularity desired? +// TODO(quasilyte): is NameVar/NameBind (var vs :=) granularity desired? +const ( + // NameParam is function/method receiver/input/output name. + // Initializing expression is always nil. + NameParam NameKind = iota + // NameVar is var or ":=" declared name. + // Initizlizing expression may be nil for var-declared names + // without explicit initializing expression. + NameVar + // NameConst is const-declared name. + // Initializing expression is never nil. + NameConst +) diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_walker.go new file mode 100644 index 000000000..f6808cbb4 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_def_walker.go @@ -0,0 +1,118 @@ +package astwalk + +import ( + "go/ast" + "go/token" + "go/types" +) + +type localDefWalker struct { + visitor LocalDefVisitor + info *types.Info +} + +func (w *localDefWalker) WalkFile(f *ast.File) { + for _, decl := range f.Decls { + decl, ok := decl.(*ast.FuncDecl) + if !ok || !w.visitor.EnterFunc(decl) { + continue + } + w.walkFunc(decl) + } +} + +func (w *localDefWalker) walkFunc(decl *ast.FuncDecl) { + w.walkSignature(decl) + w.walkFuncBody(decl) +} + +func (w *localDefWalker) walkFuncBody(decl *ast.FuncDecl) { + ast.Inspect(decl.Body, func(x ast.Node) bool { + switch x := x.(type) { + case *ast.AssignStmt: + if x.Tok != token.DEFINE { + return false + } + if len(x.Lhs) != len(x.Rhs) { + // Multi-value assignment. + // Invariant: there is only 1 RHS. + for i, lhs := range x.Lhs { + id, ok := lhs.(*ast.Ident) + if !ok || w.info.Defs[id] == nil { + continue + } + def := Name{ID: id, Kind: NameVar, Index: i} + w.visitor.VisitLocalDef(def, x.Rhs[0]) + } + } else { + // Simple 1-1 assignments. + for i, lhs := range x.Lhs { + id, ok := lhs.(*ast.Ident) + if !ok || w.info.Defs[id] == nil { + continue + } + def := Name{ID: id, Kind: NameVar} + w.visitor.VisitLocalDef(def, x.Rhs[i]) + } + } + return false + + case *ast.GenDecl: + // Decls always introduce new names. + for _, spec := range x.Specs { + spec, ok := spec.(*ast.ValueSpec) + if !ok { // Ignore type/import specs + return false + } + switch { + case len(spec.Values) == 0: + // var-specific decls without explicit init. + for _, id := range spec.Names { + def := Name{ID: id, Kind: NameVar} + w.visitor.VisitLocalDef(def, nil) + } + case len(spec.Names) != len(spec.Values): + // var-specific decls that assign tuple results. + for i, id := range spec.Names { + def := Name{ID: id, Kind: NameVar, Index: i} + w.visitor.VisitLocalDef(def, spec.Values[0]) + } + default: + // Can be either var or const decl. + kind := NameVar + if x.Tok == token.CONST { + kind = NameConst + } + for i, id := range spec.Names { + def := Name{ID: id, Kind: kind} + w.visitor.VisitLocalDef(def, spec.Values[i]) + } + } + } + return false + } + + return true + }) +} + +func (w *localDefWalker) walkSignature(decl *ast.FuncDecl) { + for _, p := range decl.Type.Params.List { + for _, id := range p.Names { + def := Name{ID: id, Kind: NameParam} + w.visitor.VisitLocalDef(def, nil) + } + } + if decl.Type.Results != nil { + for _, p := range decl.Type.Results.List { + for _, id := range p.Names { + def := Name{ID: id, Kind: NameParam} + w.visitor.VisitLocalDef(def, nil) + } + } + } + if decl.Recv != nil && len(decl.Recv.List[0].Names) != 0 { + def := Name{ID: decl.Recv.List[0].Names[0], Kind: NameParam} + w.visitor.VisitLocalDef(def, nil) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go new file mode 100644 index 000000000..e455b3f8b --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/local_expr_walker.go @@ -0,0 +1,29 @@ +package astwalk + +import ( + "go/ast" +) + +type localExprWalker struct { + visitor LocalExprVisitor +} + +func (w *localExprWalker) WalkFile(f *ast.File) { + if !w.visitor.EnterFile(f) { + return + } + + for _, decl := range f.Decls { + decl, ok := decl.(*ast.FuncDecl) + if !ok || !w.visitor.EnterFunc(decl) { + continue + } + ast.Inspect(decl.Body, func(x ast.Node) bool { + if x, ok := x.(ast.Expr); ok { + w.visitor.VisitLocalExpr(x) + return !w.visitor.skipChilds() + } + return true + }) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go new file mode 100644 index 000000000..45c406e7e --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_list_walker.go @@ -0,0 +1,33 @@ +package astwalk + +import ( + "go/ast" +) + +type stmtListWalker struct { + visitor StmtListVisitor +} + +func (w *stmtListWalker) WalkFile(f *ast.File) { + if !w.visitor.EnterFile(f) { + return + } + + for _, decl := range f.Decls { + decl, ok := decl.(*ast.FuncDecl) + if !ok || !w.visitor.EnterFunc(decl) { + continue + } + ast.Inspect(decl.Body, func(x ast.Node) bool { + switch x := x.(type) { + case *ast.BlockStmt: + w.visitor.VisitStmtList(x.List) + case *ast.CaseClause: + w.visitor.VisitStmtList(x.Body) + case *ast.CommClause: + w.visitor.VisitStmtList(x.Body) + } + return !w.visitor.skipChilds() + }) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go new file mode 100644 index 000000000..912de867d --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/stmt_walker.go @@ -0,0 +1,29 @@ +package astwalk + +import ( + "go/ast" +) + +type stmtWalker struct { + visitor StmtVisitor +} + +func (w *stmtWalker) WalkFile(f *ast.File) { + if !w.visitor.EnterFile(f) { + return + } + + for _, decl := range f.Decls { + decl, ok := decl.(*ast.FuncDecl) + if !ok || !w.visitor.EnterFunc(decl) { + continue + } + ast.Inspect(decl.Body, func(x ast.Node) bool { + if x, ok := x.(ast.Stmt); ok { + w.visitor.VisitStmt(x) + return !w.visitor.skipChilds() + } + return true + }) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/type_expr_walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/type_expr_walker.go new file mode 100644 index 000000000..24c150084 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/type_expr_walker.go @@ -0,0 +1,114 @@ +package astwalk + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-toolsmith/astp" + "github.com/go-toolsmith/typep" +) + +type typeExprWalker struct { + visitor TypeExprVisitor + info *types.Info +} + +func (w *typeExprWalker) WalkFile(f *ast.File) { + if !w.visitor.EnterFile(f) { + return + } + + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + if !w.visitor.EnterFunc(decl) { + continue + } + } + switch decl := decl.(type) { + case *ast.FuncDecl: + if !w.visitor.EnterFunc(decl) { + continue + } + w.walkSignature(decl.Type) + ast.Inspect(decl.Body, w.walk) + case *ast.GenDecl: + if decl.Tok == token.IMPORT { + continue + } + ast.Inspect(decl, w.walk) + } + } +} + +func (w *typeExprWalker) visit(x ast.Expr) bool { + w.visitor.VisitTypeExpr(x) + return !w.visitor.skipChilds() +} + +func (w *typeExprWalker) walk(x ast.Node) bool { + switch x := x.(type) { + case *ast.ParenExpr: + if typep.IsTypeExpr(w.info, x.X) { + return w.visit(x) + } + return true + case *ast.CallExpr: + // Pointer conversions require parenthesis around pointer type. + // These casts are represented as call expressions. + // Because it's impossible for the visitor to distinguish such + // "required" parenthesis, walker skips outmost parenthesis in such cases. + return w.inspectInner(x.Fun) + case *ast.SelectorExpr: + // Like with conversions, method expressions are another special. + return w.inspectInner(x.X) + case *ast.StarExpr: + if typep.IsTypeExpr(w.info, x.X) { + return w.visit(x) + } + return true + case *ast.MapType: + return w.visit(x) + case *ast.FuncType: + return w.visit(x) + case *ast.StructType: + return w.visit(x) + case *ast.InterfaceType: + if !w.visit(x) { + return false + } + for _, method := range x.Methods.List { + switch x := method.Type.(type) { + case *ast.FuncType: + w.walkSignature(x) + default: + // Embedded interface. + w.walk(x) + } + } + return false + case *ast.ArrayType: + return w.visit(x) + } + return true +} + +func (w *typeExprWalker) inspectInner(x ast.Expr) bool { + parens, ok := x.(*ast.ParenExpr) + if ok && typep.IsTypeExpr(w.info, parens.X) && astp.IsStarExpr(parens.X) { + ast.Inspect(parens.X, w.walk) + return false + } + return true +} + +func (w *typeExprWalker) walkSignature(typ *ast.FuncType) { + for _, p := range typ.Params.List { + ast.Inspect(p.Type, w.walk) + } + if typ.Results != nil { + for _, p := range typ.Results.List { + ast.Inspect(p.Type, w.walk) + } + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go new file mode 100644 index 000000000..9f973a2b3 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/visitor.go @@ -0,0 +1,80 @@ +package astwalk + +import ( + "go/ast" +) + +// Visitor interfaces. +type ( + // DocCommentVisitor visits every doc-comment. + // Does not visit doc-comments for function-local definitions (types, etc). + // Also does not visit package doc-comment (file-level doc-comments). + DocCommentVisitor interface { + VisitDocComment(*ast.CommentGroup) + } + + // FuncDeclVisitor visits every top-level function declaration. + FuncDeclVisitor interface { + walkerEvents + VisitFuncDecl(*ast.FuncDecl) + } + + // ExprVisitor visits every expression inside AST file. + ExprVisitor interface { + walkerEvents + VisitExpr(ast.Expr) + } + + // LocalExprVisitor visits every expression inside function body. + LocalExprVisitor interface { + walkerEvents + VisitLocalExpr(ast.Expr) + } + + // StmtListVisitor visits every statement list inside function body. + // This includes block statement bodies as well as implicit blocks + // introduced by case clauses and alike. + StmtListVisitor interface { + walkerEvents + VisitStmtList([]ast.Stmt) + } + + // StmtVisitor visits every statement inside function body. + StmtVisitor interface { + walkerEvents + VisitStmt(ast.Stmt) + } + + // TypeExprVisitor visits every type describing expression. + // It also traverses struct types and interface types to run + // checker over their fields/method signatures. + TypeExprVisitor interface { + walkerEvents + VisitTypeExpr(ast.Expr) + } + + // LocalCommentVisitor visits every comment inside function body. + LocalCommentVisitor interface { + walkerEvents + VisitLocalComment(*ast.CommentGroup) + } + + // CommentVisitor visits every comment. + CommentVisitor interface { + walkerEvents + VisitComment(*ast.CommentGroup) + } +) + +// walkerEvents describes common hooks available for most visitor types. +type walkerEvents interface { + // EnterFile is called for every file that is about to be traversed. + // If false is returned, file is not visited. + EnterFile(*ast.File) bool + + // EnterFunc is called for every function declaration that is about + // to be traversed. If false is returned, function is not visited. + EnterFunc(*ast.FuncDecl) bool + + skipChilds() bool +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go new file mode 100644 index 000000000..1f6e948d5 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go @@ -0,0 +1,34 @@ +package astwalk + +import ( + "go/ast" +) + +// WalkHandler is a type to be embedded into every checker +// that uses astwalk walkers. +type WalkHandler struct { + // SkipChilds controls whether currently analyzed + // node childs should be traversed. + // + // Value is reset after each visitor invocation, + // so there is no need to set value back to false. + SkipChilds bool +} + +// EnterFile is a default walkerEvents.EnterFile implementation +// that reports every file as accepted candidate for checking. +func (w *WalkHandler) EnterFile(f *ast.File) bool { + return true +} + +// EnterFunc is a default walkerEvents.EnterFunc implementation +// that skips extern function (ones that do not have body). +func (w *WalkHandler) EnterFunc(decl *ast.FuncDecl) bool { + return decl.Body != nil +} + +func (w *WalkHandler) skipChilds() bool { + v := w.SkipChilds + w.SkipChilds = false + return v +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go new file mode 100644 index 000000000..cd5e1c979 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walker.go @@ -0,0 +1,57 @@ +package astwalk + +import ( + "go/types" + + "github.com/go-critic/go-critic/framework/linter" +) + +// WalkerForFuncDecl returns file walker implementation for FuncDeclVisitor. +func WalkerForFuncDecl(v FuncDeclVisitor) linter.FileWalker { + return &funcDeclWalker{visitor: v} +} + +// WalkerForExpr returns file walker implementation for ExprVisitor. +func WalkerForExpr(v ExprVisitor) linter.FileWalker { + return &exprWalker{visitor: v} +} + +// WalkerForLocalExpr returns file walker implementation for LocalExprVisitor. +func WalkerForLocalExpr(v LocalExprVisitor) linter.FileWalker { + return &localExprWalker{visitor: v} +} + +// WalkerForStmtList returns file walker implementation for StmtListVisitor. +func WalkerForStmtList(v StmtListVisitor) linter.FileWalker { + return &stmtListWalker{visitor: v} +} + +// WalkerForStmt returns file walker implementation for StmtVisitor. +func WalkerForStmt(v StmtVisitor) linter.FileWalker { + return &stmtWalker{visitor: v} +} + +// WalkerForTypeExpr returns file walker implementation for TypeExprVisitor. +func WalkerForTypeExpr(v TypeExprVisitor, info *types.Info) linter.FileWalker { + return &typeExprWalker{visitor: v, info: info} +} + +// WalkerForLocalComment returns file walker implementation for LocalCommentVisitor. +func WalkerForLocalComment(v LocalCommentVisitor) linter.FileWalker { + return &localCommentWalker{visitor: v} +} + +// WalkerForComment returns file walker implementation for CommentVisitor. +func WalkerForComment(v CommentVisitor) linter.FileWalker { + return &commentWalker{visitor: v} +} + +// WalkerForDocComment returns file walker implementation for DocCommentVisitor. +func WalkerForDocComment(v DocCommentVisitor) linter.FileWalker { + return &docCommentWalker{visitor: v} +} + +// WalkerForLocalDef returns file walker implementation for LocalDefVisitor. +func WalkerForLocalDef(v LocalDefVisitor, info *types.Info) linter.FileWalker { + return &localDefWalker{visitor: v, info: info} +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go new file mode 100644 index 000000000..3c0a95afc --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astfind.go @@ -0,0 +1,27 @@ +package lintutil + +import ( + "go/ast" + + "golang.org/x/tools/go/ast/astutil" +) + +// FindNode applies pred for root and all it's childs until it returns true. +// Matched node is returned. +// If none of the nodes matched predicate, nil is returned. +func FindNode(root ast.Node, pred func(ast.Node) bool) ast.Node { + var found ast.Node + astutil.Apply(root, nil, func(cur *astutil.Cursor) bool { + if pred(cur.Node()) { + found = cur.Node() + return false + } + return true + }) + return found +} + +// ContainsNode reports whether `FindNode(root, pred)!=nil`. +func ContainsNode(root ast.Node, pred func(ast.Node) bool) bool { + return FindNode(root, pred) != nil +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go new file mode 100644 index 000000000..63d181e5e --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astflow.go @@ -0,0 +1,86 @@ +package lintutil + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/astp" + "github.com/go-toolsmith/typep" +) + +// Different utilities to make simple analysis over typed ast values flow. +// +// It's primitive and can't replace SSA, but the bright side is that +// it does not require building an additional IR eagerly. +// Expected to be used sparingly inside a few checkers. +// +// If proven really useful, can be moved to go-toolsmith library. + +// IsImmutable reports whether n can be midified through any operation. +func IsImmutable(info *types.Info, n ast.Expr) bool { + if astp.IsBasicLit(n) { + return true + } + tv, ok := info.Types[n] + return ok && !tv.Assignable() && !tv.Addressable() +} + +// CouldBeMutated reports whether dst can be modified inside body. +// +// Note that it does not take already existing pointers to dst. +// An example of safe and correct usage is checking of something +// that was just defined, so the dst is a result of that definition. +func CouldBeMutated(info *types.Info, body ast.Node, dst ast.Expr) bool { + if IsImmutable(info, dst) { // Fast path. + return false + } + + // We don't track pass-by-value. + // If it's already a pointer, passing it by value + // means that there can be a potential indirect modification. + // + // It's possible to be less conservative here and find at least + // one such value pass before giving up. + if typep.IsPointer(info.TypeOf(dst)) { + return true + } + + var isDst func(x ast.Expr) bool + if dst, ok := dst.(*ast.Ident); ok { + // Identifier can be shadowed, + // so we need to check the object as well. + obj := info.ObjectOf(dst) + if obj == nil { + return true // Being conservative + } + isDst = func(x ast.Expr) bool { + id, ok := x.(*ast.Ident) + return ok && id.Name == dst.Name && info.ObjectOf(id) == obj + } + } else { + isDst = func(x ast.Expr) bool { + return astequal.Expr(dst, x) + } + } + + return ContainsNode(body, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.UnaryExpr: + if n.Op == token.AND && isDst(n.X) { + return true // Address taken + } + case *ast.AssignStmt: + for _, lhs := range n.Lhs { + if isDst(lhs) { + return true + } + } + case *ast.IncDecStmt: + // Incremented or decremented. + return isDst(n.X) + } + return false + }) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astset.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astset.go new file mode 100644 index 000000000..ebe7835e5 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/astset.go @@ -0,0 +1,44 @@ +package lintutil + +import ( + "go/ast" + + "github.com/go-toolsmith/astequal" +) + +// AstSet is a simple ast.Node set. +// Zero value is ready to use set. +// Can be reused after Clear call. +type AstSet struct { + items []ast.Node +} + +// Contains reports whether s contains x. +func (s *AstSet) Contains(x ast.Node) bool { + for i := range s.items { + if astequal.Node(s.items[i], x) { + return true + } + } + return false +} + +// Insert pushes x in s if it's not already there. +// Returns true if element was inserted. +func (s *AstSet) Insert(x ast.Node) bool { + if s.Contains(x) { + return false + } + s.items = append(s.items, x) + return true +} + +// Clear removes all element from set. +func (s *AstSet) Clear() { + s.items = s.items[:0] +} + +// Len returns the number of elements contained inside s. +func (s *AstSet) Len() int { + return len(s.items) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/zero_value.go b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/zero_value.go new file mode 100644 index 000000000..4370f5818 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/lintutil/zero_value.go @@ -0,0 +1,94 @@ +package lintutil + +import ( + "go/ast" + "go/constant" + "go/token" + "go/types" +) + +// IsZeroValue reports whether x represents zero value of its type. +// +// The functions is conservative and may return false for zero values +// if some cases are not handled in a comprehensive way +// but is should never return true for something that's not a proper zv. +func IsZeroValue(info *types.Info, x ast.Expr) bool { + switch x := x.(type) { + case *ast.BasicLit: + typ := info.TypeOf(x).Underlying().(*types.Basic) + v := info.Types[x].Value + var z constant.Value + switch { + case typ.Kind() == types.String: + z = constant.MakeString("") + case typ.Info()&types.IsInteger != 0: + z = constant.MakeInt64(0) + case typ.Info()&types.IsUnsigned != 0: + z = constant.MakeUint64(0) + case typ.Info()&types.IsFloat != 0: + z = constant.MakeFloat64(0) + default: + return false + } + return constant.Compare(v, token.EQL, z) + + case *ast.CompositeLit: + return len(x.Elts) == 0 + + default: + // Note that this function is not comprehensive. + return false + } +} + +// ZeroValueOf returns a zero value expression for typeExpr of type typ. +// If function can't find such a value, nil is returned. +func ZeroValueOf(typeExpr ast.Expr, typ types.Type) ast.Expr { + switch utyp := typ.Underlying().(type) { + case *types.Basic: + info := utyp.Info() + var zv ast.Expr + switch { + case info&types.IsInteger != 0: + zv = &ast.BasicLit{Kind: token.INT, Value: "0"} + case info&types.IsFloat != 0: + zv = &ast.BasicLit{Kind: token.FLOAT, Value: "0.0"} + case info&types.IsString != 0: + zv = &ast.BasicLit{Kind: token.STRING, Value: `""`} + case info&types.IsBoolean != 0: + zv = &ast.Ident{Name: "false"} + } + if isDefaultLiteralType(typ) { + return zv + } + return &ast.CallExpr{ + Fun: typeExpr, + Args: []ast.Expr{zv}, + } + + case *types.Slice, *types.Map, *types.Pointer, *types.Interface: + return &ast.CallExpr{ + Fun: typeExpr, + Args: []ast.Expr{&ast.Ident{Name: "nil"}}, + } + + case *types.Array, *types.Struct: + return &ast.CompositeLit{Type: typeExpr} + + default: + return nil + } +} + +func isDefaultLiteralType(typ types.Type) bool { + btyp, ok := typ.(*types.Basic) + if !ok { + return false + } + switch btyp.Kind() { + case types.Bool, types.Int, types.Float64, types.String: + return true + default: + return false + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go b/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go new file mode 100644 index 000000000..64c2821dd --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/mapKey_checker.go @@ -0,0 +1,124 @@ +package checkers + +import ( + "go/ast" + "go/types" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astp" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "mapKey" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects suspicious map literal keys" + info.Before = ` +_ = map[string]int{ + "foo": 1, + "bar ": 2, +}` + info.After = ` +_ = map[string]int{ + "foo": 1, + "bar": 2, +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&mapKeyChecker{ctx: ctx}), nil + }) +} + +type mapKeyChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + astSet lintutil.AstSet +} + +func (c *mapKeyChecker) VisitExpr(expr ast.Expr) { + lit := astcast.ToCompositeLit(expr) + if len(lit.Elts) < 2 { + return + } + + typ, ok := c.ctx.TypeOf(lit).Underlying().(*types.Map) + if !ok { + return + } + if !typep.HasStringKind(typ.Key().Underlying()) { + return + } + + c.checkWhitespace(lit) + c.checkDuplicates(lit) +} + +func (c *mapKeyChecker) checkDuplicates(lit *ast.CompositeLit) { + c.astSet.Clear() + + for _, elt := range lit.Elts { + kv := astcast.ToKeyValueExpr(elt) + if astp.IsBasicLit(kv.Key) { + // Basic lits are handled by the compiler. + continue + } + if !typep.SideEffectFree(c.ctx.TypesInfo, kv.Key) { + continue + } + if !c.astSet.Insert(kv.Key) { + c.warnDupKey(kv.Key) + } + } +} + +func (c *mapKeyChecker) checkWhitespace(lit *ast.CompositeLit) { + var whitespaceKey ast.Node + for _, elt := range lit.Elts { + key := astcast.ToBasicLit(astcast.ToKeyValueExpr(elt).Key) + if len(key.Value) < len(`" "`) { + continue + } + // s is unquoted string literal value. + s := key.Value[len(`"`) : len(key.Value)-len(`"`)] + if !strings.Contains(s, " ") { + continue + } + if whitespaceKey != nil { + // Already seen something with a whitespace. + // More than one entry => not suspicious. + return + } + if s == " " { + // If space is used as a key, maybe this map + // has something to do with spaces. Give up. + return + } + // Check if it has exactly 1 space prefix or suffix. + bad := strings.HasPrefix(s, " ") && !strings.HasPrefix(s, " ") || + strings.HasSuffix(s, " ") && !strings.HasSuffix(s, " ") + if !bad { + // These spaces can be a padding, + // or a legitimate part of a key. Give up. + return + } + whitespaceKey = key + } + + if whitespaceKey != nil { + c.warnWhitespace(whitespaceKey) + } +} + +func (c *mapKeyChecker) warnWhitespace(key ast.Node) { + c.ctx.Warn(key, "suspucious whitespace in %s key", key) +} + +func (c *mapKeyChecker) warnDupKey(key ast.Node) { + c.ctx.Warn(key, "suspicious duplicate %s key", key) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go b/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go new file mode 100644 index 000000000..2553def14 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/methodExprCall_checker.go @@ -0,0 +1,57 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "methodExprCall" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects method expression call that can be replaced with a method call" + info.Before = `f := foo{} +foo.bar(f)` + info.After = `f := foo{} +f.bar()` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&methodExprCallChecker{ctx: ctx}), nil + }) +} + +type methodExprCallChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *methodExprCallChecker) VisitExpr(x ast.Expr) { + call := astcast.ToCallExpr(x) + s := astcast.ToSelectorExpr(call.Fun) + + if len(call.Args) < 1 || astcast.ToIdent(call.Args[0]).Name == "nil" { + return + } + + if typep.IsTypeExpr(c.ctx.TypesInfo, s.X) { + c.warn(call, s) + } +} + +func (c *methodExprCallChecker) warn(cause *ast.CallExpr, s *ast.SelectorExpr) { + selector := astcopy.SelectorExpr(s) + selector.X = cause.Args[0] + + // Remove "&" from the receiver (if any). + if u, ok := selector.X.(*ast.UnaryExpr); ok && u.Op == token.AND { + selector.X = u.X + } + + c.ctx.Warn(cause, "consider to change `%s` to `%s`", cause.Fun, selector) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go b/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go new file mode 100644 index 000000000..a68acecca --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/nestingReduce_checker.go @@ -0,0 +1,73 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "nestingReduce" + info.Tags = []string{"style", "opinionated", "experimental"} + info.Params = linter.CheckerParams{ + "bodyWidth": { + Value: 5, + Usage: "min number of statements inside a branch to trigger a warning", + }, + } + info.Summary = "Finds where nesting level could be reduced" + info.Before = ` +for _, v := range a { + if v.Bool { + body() + } +}` + info.After = ` +for _, v := range a { + if !v.Bool { + continue + } + body() +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &nestingReduceChecker{ctx: ctx} + c.bodyWidth = info.Params.Int("bodyWidth") + return astwalk.WalkerForStmt(c), nil + }) +} + +type nestingReduceChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + bodyWidth int +} + +func (c *nestingReduceChecker) VisitStmt(stmt ast.Stmt) { + switch stmt := stmt.(type) { + case *ast.ForStmt: + c.checkLoopBody(stmt.Body.List) + case *ast.RangeStmt: + c.checkLoopBody(stmt.Body.List) + } +} + +func (c *nestingReduceChecker) checkLoopBody(body []ast.Stmt) { + if len(body) != 1 { + return + } + stmt, ok := body[0].(*ast.IfStmt) + if !ok { + return + } + if len(stmt.Body.List) >= c.bodyWidth && stmt.Else == nil { + c.warnLoop(stmt) + } +} + +func (c *nestingReduceChecker) warnLoop(cause ast.Node) { + c.ctx.Warn(cause, "invert if cond, replace body with `continue`, move old body after the statement") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go b/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go new file mode 100644 index 000000000..7e564b70f --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/newDeref_checker.go @@ -0,0 +1,45 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "newDeref" + info.Tags = []string{"style"} + info.Summary = "Detects immediate dereferencing of `new` expressions" + info.Before = `x := *new(bool)` + info.After = `x := false` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&newDerefChecker{ctx: ctx}), nil + }) +} + +type newDerefChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *newDerefChecker) VisitExpr(expr ast.Expr) { + deref := astcast.ToStarExpr(expr) + call := astcast.ToCallExpr(deref.X) + if astcast.ToIdent(call.Fun).Name == "new" { + typ := c.ctx.TypeOf(call.Args[0]) + zv := lintutil.ZeroValueOf(astutil.Unparen(call.Args[0]), typ) + if zv != nil { + c.warn(expr, zv) + } + } +} + +func (c *newDerefChecker) warn(cause, suggestion ast.Expr) { + c.ctx.Warn(cause, "replace `%s` with `%s`", cause, suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go b/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go new file mode 100644 index 000000000..0a8e793ee --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/nilValReturn_checker.go @@ -0,0 +1,71 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "nilValReturn" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects return statements those results evaluate to nil" + info.Before = ` +if err == nil { + return err +}` + info.After = ` +// (A) - return nil explicitly +if err == nil { + return nil +} +// (B) - typo in "==", change to "!=" +if err != nil { + return err +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&nilValReturnChecker{ctx: ctx}), nil + }) +} + +type nilValReturnChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *nilValReturnChecker) VisitStmt(stmt ast.Stmt) { + ifStmt, ok := stmt.(*ast.IfStmt) + if !ok || len(ifStmt.Body.List) != 1 { + return + } + ret, ok := ifStmt.Body.List[0].(*ast.ReturnStmt) + if !ok { + return + } + expr, ok := ifStmt.Cond.(*ast.BinaryExpr) + if !ok { + return + } + xIsNil := expr.Op == token.EQL && + typep.SideEffectFree(c.ctx.TypesInfo, expr.X) && + qualifiedName(expr.Y) == "nil" + if !xIsNil { + return + } + for _, res := range ret.Results { + if astequal.Expr(expr.X, res) { + c.warn(ret, expr.X) + break + } + } +} + +func (c *nilValReturnChecker) warn(cause, val ast.Node) { + c.ctx.Warn(cause, "returned expr is always nil; replace %s with nil", val) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go b/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go new file mode 100644 index 000000000..486940452 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/octalLiteral_checker.go @@ -0,0 +1,82 @@ +package checkers + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "octalLiteral" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects octal literals passed to functions" + info.Before = `foo(02)` + info.After = `foo(2)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &octalLiteralChecker{ + ctx: ctx, + octFriendlyPkg: map[string]bool{ + "os": true, + "io/ioutil": true, + }, + } + return astwalk.WalkerForExpr(c), nil + }) +} + +type octalLiteralChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + octFriendlyPkg map[string]bool +} + +func (c *octalLiteralChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + calledExpr := astcast.ToSelectorExpr(call.Fun) + ident := astcast.ToIdent(calledExpr.X) + + if obj, ok := c.ctx.TypesInfo.ObjectOf(ident).(*types.PkgName); ok { + pkg := obj.Imported() + if c.octFriendlyPkg[pkg.Path()] { + return + } + } + + for _, arg := range call.Args { + if lit := astcast.ToBasicLit(c.unsign(arg)); len(lit.Value) > 1 && + c.isIntLiteral(lit) && + c.isOctalLiteral(lit) { + c.warn(call) + return + } + } +} + +func (c *octalLiteralChecker) unsign(e ast.Expr) ast.Expr { + u, ok := e.(*ast.UnaryExpr) + if !ok { + return e + } + return u.X +} + +func (c *octalLiteralChecker) isIntLiteral(lit *ast.BasicLit) bool { + return lit.Kind == token.INT +} + +func (c *octalLiteralChecker) isOctalLiteral(lit *ast.BasicLit) bool { + return lit.Value[0] == '0' && + lit.Value[1] != 'x' && + lit.Value[1] != 'X' +} + +func (c *octalLiteralChecker) warn(expr ast.Expr) { + c.ctx.Warn(expr, "suspicious octal args in `%s`", expr) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/offBy1_checker.go b/vendor/github.com/go-critic/go-critic/checkers/offBy1_checker.go new file mode 100644 index 000000000..ece3fdfdb --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/offBy1_checker.go @@ -0,0 +1,66 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "offBy1" + info.Tags = []string{"diagnostic"} + info.Summary = "Detects various off-by-one kind of errors" + info.Before = `xs[len(xs)]` + info.After = `xs[len(xs)-1]` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&offBy1Checker{ctx: ctx}), nil + }) +} + +type offBy1Checker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *offBy1Checker) VisitExpr(e ast.Expr) { + // TODO(quasilyte): handle more off-by-1 patterns. + // TODO(quasilyte): check whether go/analysis can help here. + + // Detect s[len(s)] expressions that always panic. + // The correct form is s[len(s)-1]. + + indexExpr := astcast.ToIndexExpr(e) + indexed := indexExpr.X + if !typep.IsSlice(c.ctx.TypeOf(indexed)) { + return + } + if !typep.SideEffectFree(c.ctx.TypesInfo, indexed) { + return + } + call := astcast.ToCallExpr(indexExpr.Index) + if astcast.ToIdent(call.Fun).Name != "len" { + return + } + if len(call.Args) != 1 || !astequal.Expr(call.Args[0], indexed) { + return + } + c.warnLenIndex(indexExpr) +} + +func (c *offBy1Checker) warnLenIndex(cause *ast.IndexExpr) { + suggest := astcopy.IndexExpr(cause) + suggest.Index = &ast.BinaryExpr{ + Op: token.SUB, + X: cause.Index, + Y: &ast.BasicLit{Value: "1"}, + } + c.ctx.Warn(cause, "index expr always panics; maybe you wanted %s?", suggest) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go b/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go new file mode 100644 index 000000000..8cdad4eee --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/paramTypeCombine_checker.go @@ -0,0 +1,93 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "paramTypeCombine" + info.Tags = []string{"style", "opinionated"} + info.Summary = "Detects if function parameters could be combined by type and suggest the way to do it" + info.Before = `func foo(a, b int, c, d int, e, f int, g int) {}` + info.After = `func foo(a, b, c, d, e, f, g int) {}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForFuncDecl(¶mTypeCombineChecker{ctx: ctx}), nil + }) +} + +type paramTypeCombineChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *paramTypeCombineChecker) EnterFunc(*ast.FuncDecl) bool { + return true +} + +func (c *paramTypeCombineChecker) VisitFuncDecl(decl *ast.FuncDecl) { + typ := c.optimizeFuncType(decl.Type) + if !astequal.Expr(typ, decl.Type) { + c.warn(decl.Type, typ) + } +} + +func (c *paramTypeCombineChecker) optimizeFuncType(f *ast.FuncType) *ast.FuncType { + return &ast.FuncType{ + Params: c.optimizeParams(f.Params), + Results: c.optimizeParams(f.Results), + } +} +func (c *paramTypeCombineChecker) optimizeParams(params *ast.FieldList) *ast.FieldList { + // To avoid false positives, skip unnamed param lists. + // + // We're using a property that Go only permits unnamed params + // for the whole list, so it's enough to check whether any of + // ast.Field have empty name list. + skip := params == nil || + len(params.List) < 2 || + len(params.List[0].Names) == 0 || + c.paramsAreMultiLine(params) + if skip { + return params + } + + list := []*ast.Field{} + names := make([]*ast.Ident, len(params.List[0].Names)) + copy(names, params.List[0].Names) + list = append(list, &ast.Field{ + Names: names, + Type: params.List[0].Type, + }) + for i, p := range params.List[1:] { + names = make([]*ast.Ident, len(p.Names)) + copy(names, p.Names) + if astequal.Expr(p.Type, params.List[i].Type) { + list[len(list)-1].Names = + append(list[len(list)-1].Names, names...) + } else { + list = append(list, &ast.Field{ + Names: names, + Type: params.List[i+1].Type, + }) + } + } + return &ast.FieldList{ + List: list, + } +} + +func (c *paramTypeCombineChecker) warn(f1, f2 *ast.FuncType) { + c.ctx.Warn(f1, "%s could be replaced with %s", f1, f2) +} + +func (c *paramTypeCombineChecker) paramsAreMultiLine(params *ast.FieldList) bool { + startPos := c.ctx.FileSet.Position(params.Opening) + endPos := c.ctx.FileSet.Position(params.Closing) + return startPos.Line != endPos.Line +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go new file mode 100644 index 000000000..88c8f4cb3 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/ptrToRefParam_checker.go @@ -0,0 +1,70 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "ptrToRefParam" + info.Tags = []string{"style", "opinionated", "experimental"} + info.Summary = "Detects input and output parameters that have a type of pointer to referential type" + info.Before = `func f(m *map[string]int) (*chan *int)` + info.After = `func f(m map[string]int) (chan *int)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForFuncDecl(&ptrToRefParamChecker{ctx: ctx}), nil + }) +} + +type ptrToRefParamChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *ptrToRefParamChecker) VisitFuncDecl(fn *ast.FuncDecl) { + c.checkParams(fn.Type.Params.List) + if fn.Type.Results != nil { + c.checkParams(fn.Type.Results.List) + } +} + +func (c *ptrToRefParamChecker) checkParams(params []*ast.Field) { + for _, param := range params { + ptr, ok := c.ctx.TypeOf(param.Type).(*types.Pointer) + if !ok { + continue + } + + if c.isRefType(ptr.Elem()) { + if len(param.Names) == 0 { + c.ctx.Warn(param, "consider to make non-pointer type for `%s`", param.Type) + } else { + for i := range param.Names { + c.warn(param.Names[i]) + } + } + } + } +} + +func (c *ptrToRefParamChecker) isRefType(x types.Type) bool { + switch typ := x.(type) { + case *types.Map, *types.Chan, *types.Interface: + return true + case *types.Named: + // Handle underlying type only for interfaces. + if _, ok := typ.Underlying().(*types.Interface); ok { + return true + } + } + return false +} + +func (c *ptrToRefParamChecker) warn(id *ast.Ident) { + c.ctx.Warn(id, "consider `%s' to be of non-pointer type", id) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go new file mode 100644 index 000000000..5615af467 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/rangeExprCopy_checker.go @@ -0,0 +1,80 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "rangeExprCopy" + info.Tags = []string{"performance"} + info.Params = linter.CheckerParams{ + "sizeThreshold": { + Value: 512, + Usage: "size in bytes that makes the warning trigger", + }, + "skipTestFuncs": { + Value: true, + Usage: "whether to check test functions", + }, + } + info.Summary = "Detects expensive copies of `for` loop range expressions" + info.Details = "Suggests to use pointer to array to avoid the copy using `&` on range expression." + info.Before = ` +var xs [2048]byte +for _, x := range xs { // Copies 2048 bytes + // Loop body. +}` + info.After = ` +var xs [2048]byte +for _, x := range &xs { // No copy + // Loop body. +}` + info.Note = "See Go issue for details: https://github.com/golang/go/issues/15812." + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &rangeExprCopyChecker{ctx: ctx} + c.sizeThreshold = int64(info.Params.Int("sizeThreshold")) + c.skipTestFuncs = info.Params.Bool("skipTestFuncs") + return astwalk.WalkerForStmt(c), nil + }) +} + +type rangeExprCopyChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + sizeThreshold int64 + skipTestFuncs bool +} + +func (c *rangeExprCopyChecker) EnterFunc(fn *ast.FuncDecl) bool { + return fn.Body != nil && + !(c.skipTestFuncs && isUnitTestFunc(c.ctx, fn)) +} + +func (c *rangeExprCopyChecker) VisitStmt(stmt ast.Stmt) { + rng, ok := stmt.(*ast.RangeStmt) + if !ok || rng.Key == nil || rng.Value == nil { + return + } + tv := c.ctx.TypesInfo.Types[rng.X] + if !tv.Addressable() { + return + } + if _, ok := tv.Type.(*types.Array); !ok { + return + } + if size := c.ctx.SizesInfo.Sizeof(tv.Type); size >= c.sizeThreshold { + c.warn(rng, size) + } +} + +func (c *rangeExprCopyChecker) warn(rng *ast.RangeStmt, size int64) { + c.ctx.Warn(rng, "copy of %s (%d bytes) can be avoided with &%s", + rng.X, size, rng.X) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go new file mode 100644 index 000000000..b34aa5c28 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/rangeValCopy_checker.go @@ -0,0 +1,75 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "rangeValCopy" + info.Tags = []string{"performance"} + info.Params = linter.CheckerParams{ + "sizeThreshold": { + Value: 128, + Usage: "size in bytes that makes the warning trigger", + }, + "skipTestFuncs": { + Value: true, + Usage: "whether to check test functions", + }, + } + info.Summary = "Detects loops that copy big objects during each iteration" + info.Details = "Suggests to use index access or take address and make use pointer instead." + info.Before = ` +xs := make([][1024]byte, length) +for _, x := range xs { + // Loop body. +}` + info.After = ` +xs := make([][1024]byte, length) +for i := range xs { + x := &xs[i] + // Loop body. +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &rangeValCopyChecker{ctx: ctx} + c.sizeThreshold = int64(info.Params.Int("sizeThreshold")) + c.skipTestFuncs = info.Params.Bool("skipTestFuncs") + return astwalk.WalkerForStmt(c), nil + }) +} + +type rangeValCopyChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + sizeThreshold int64 + skipTestFuncs bool +} + +func (c *rangeValCopyChecker) EnterFunc(fn *ast.FuncDecl) bool { + return fn.Body != nil && + !(c.skipTestFuncs && isUnitTestFunc(c.ctx, fn)) +} + +func (c *rangeValCopyChecker) VisitStmt(stmt ast.Stmt) { + rng, ok := stmt.(*ast.RangeStmt) + if !ok || rng.Value == nil { + return + } + typ := c.ctx.TypeOf(rng.Value) + if typ == nil { + return + } + if size := c.ctx.SizesInfo.Sizeof(typ); size >= c.sizeThreshold { + c.warn(rng, size) + } +} + +func (c *rangeValCopyChecker) warn(n ast.Node, size int64) { + c.ctx.Warn(n, "each iteration copies %d bytes (consider pointers or indexing)", size) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/regexpMust_checker.go b/vendor/github.com/go-critic/go-critic/checkers/regexpMust_checker.go new file mode 100644 index 000000000..600aa73d0 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/regexpMust_checker.go @@ -0,0 +1,47 @@ +package checkers + +import ( + "go/ast" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astp" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "regexpMust" + info.Tags = []string{"style"} + info.Summary = "Detects `regexp.Compile*` that can be replaced with `regexp.MustCompile*`" + info.Before = `re, _ := regexp.Compile("const pattern")` + info.After = `re := regexp.MustCompile("const pattern")` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(®expMustChecker{ctx: ctx}), nil + }) +} + +type regexpMustChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *regexpMustChecker) VisitExpr(x ast.Expr) { + if x, ok := x.(*ast.CallExpr); ok { + switch name := qualifiedName(x.Fun); name { + case "regexp.Compile", "regexp.CompilePOSIX": + // Only check for trivial string args, permit parenthesis. + if !astp.IsBasicLit(astutil.Unparen(x.Args[0])) { + return + } + c.warn(x, strings.Replace(name, "Compile", "MustCompile", 1)) + } + } +} + +func (c *regexpMustChecker) warn(cause *ast.CallExpr, suggestion string) { + c.ctx.Warn(cause, "for const patterns like %s, use %s", + cause.Args[0], suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go b/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go new file mode 100644 index 000000000..31dc4aad3 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/regexpPattern_checker.go @@ -0,0 +1,68 @@ +package checkers + +import ( + "go/ast" + "go/constant" + "regexp" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "regexpPattern" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects suspicious regexp patterns" + info.Before = "regexp.MustCompile(`google.com|yandex.ru`)" + info.After = "regexp.MustCompile(`google\\.com|yandex\\.ru`)" + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + domains := []string{ + "com", + "org", + "info", + "net", + "ru", + "de", + } + + allDomains := strings.Join(domains, "|") + domainRE := regexp.MustCompile(`[^\\]\.(` + allDomains + `)\b`) + return astwalk.WalkerForExpr(®expPatternChecker{ + ctx: ctx, + domainRE: domainRE, + }), nil + }) +} + +type regexpPatternChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + domainRE *regexp.Regexp +} + +func (c *regexpPatternChecker) VisitExpr(x ast.Expr) { + call, ok := x.(*ast.CallExpr) + if !ok { + return + } + + switch qualifiedName(call.Fun) { + case "regexp.Compile", "regexp.CompilePOSIX", "regexp.MustCompile", "regexp.MustCompilePosix": + cv := c.ctx.TypesInfo.Types[call.Args[0]].Value + if cv == nil || cv.Kind() != constant.String { + return + } + s := constant.StringVal(cv) + if m := c.domainRE.FindStringSubmatch(s); m != nil { + c.warnDomain(call.Args[0], m[1]) + } + } +} + +func (c *regexpPatternChecker) warnDomain(cause ast.Expr, domain string) { + c.ctx.Warn(cause, "'.%s' should probably be '\\.%s'", domain, domain) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go b/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go new file mode 100644 index 000000000..b7dd15948 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/regexpSimplify_checker.go @@ -0,0 +1,511 @@ +package checkers + +import ( + "fmt" + "go/ast" + "go/constant" + "log" + "strings" + "unicode/utf8" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/quasilyte/regex/syntax" +) + +func init() { + var info linter.CheckerInfo + info.Name = "regexpSimplify" + info.Tags = []string{"style", "experimental", "opinionated"} + info.Summary = "Detects regexp patterns that can be simplified" + info.Before = "regexp.MustCompile(`(?:a|b|c) [a-z][a-z]*`)" + info.After = "regexp.MustCompile(`[abc] {3}[a-z]+`)" + + // TODO(quasilyte): add params to control most opinionated replacements + // like `[0-9] -> \d` + // `[[:digit:]] -> \d` + // `[A-Za-z0-9_]` -> `\w` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + opts := &syntax.ParserOptions{ + NoLiterals: true, + } + c := ®expSimplifyChecker{ + ctx: ctx, + parser: syntax.NewParser(opts), + out: &strings.Builder{}, + } + return astwalk.WalkerForExpr(c), nil + }) +} + +type regexpSimplifyChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + parser *syntax.Parser + + // out is a tmp buffer where we build a simplified regexp pattern. + out *strings.Builder + // score is a number of applied simplifications + score int +} + +func (c *regexpSimplifyChecker) VisitExpr(x ast.Expr) { + call, ok := x.(*ast.CallExpr) + if !ok { + return + } + + switch qualifiedName(call.Fun) { + case "regexp.Compile", "regexp.MustCompile": + cv := c.ctx.TypesInfo.Types[call.Args[0]].Value + if cv == nil || cv.Kind() != constant.String { + return + } + pat := constant.StringVal(cv) + if len(pat) > 60 { + // Skip scary regexp patterns for now. + break + } + + // Only do 2 passes. + simplified := pat + for pass := 0; pass < 2; pass++ { + candidate := c.simplify(pass, simplified) + if candidate == "" { + break + } + simplified = candidate + } + if simplified != "" && simplified != pat { + c.warn(call.Args[0], pat, simplified) + } + } +} + +func (c *regexpSimplifyChecker) simplify(pass int, pat string) string { + re, err := c.parser.Parse(pat) + if err != nil { + return "" + } + + c.score = 0 + c.out.Reset() + + // TODO(quasilyte): suggest char ranges for things like [012345689]? + // TODO(quasilyte): evaluate char range to suggest better replacements. + // TODO(quasilyte): (?:ab|ac) -> a[bc] + // TODO(quasilyte): suggest "s" and "." flag if things like [\w\W] are used. + // TODO(quasilyte): x{n}x? -> x{n,n+1} + + c.walk(re.Expr) + + if debug() { + // This happens only in one of two cases: + // 1. Parser has a bug and we got invalid AST for the given pattern. + // 2. Simplifier incorrectly built a replacement string from the AST. + if c.score == 0 && c.out.String() != pat { + log.Printf("pass %d: unexpected pattern diff:\n\thave: %q\n\twant: %q", + pass, c.out.String(), pat) + } + } + + if c.score > 0 { + return c.out.String() + } + return "" +} + +func (c *regexpSimplifyChecker) walk(e syntax.Expr) { + out := c.out + + switch e.Op { + case syntax.OpConcat: + c.walkConcat(e) + + case syntax.OpAlt: + c.walkAlt(e) + + case syntax.OpCharRange: + s := c.simplifyCharRange(e) + if s != "" { + out.WriteString(s) + c.score++ + } else { + out.WriteString(e.Value) + } + + case syntax.OpGroupWithFlags: + out.WriteString("(") + out.WriteString(e.Args[1].Value) + out.WriteString(":") + c.walk(e.Args[0]) + out.WriteString(")") + case syntax.OpGroup: + c.walkGroup(e) + case syntax.OpCapture: + out.WriteString("(") + c.walk(e.Args[0]) + out.WriteString(")") + case syntax.OpNamedCapture: + out.WriteString("(?P<") + out.WriteString(e.Args[1].Value) + out.WriteString(">") + c.walk(e.Args[0]) + out.WriteString(")") + + case syntax.OpRepeat: + // TODO(quasilyte): is it worth it to analyze repeat argument + // more closely and handle `{n,n} -> {n}` cases? + rep := e.Args[1].Value + switch rep { + case "{0,1}": + c.walk(e.Args[0]) + out.WriteString("?") + c.score++ + case "{1,}": + c.walk(e.Args[0]) + out.WriteString("+") + c.score++ + case "{0,}": + c.walk(e.Args[0]) + out.WriteString("*") + c.score++ + case "{0}": + // Maybe {0} should be reported by another check, regexpLint? + c.score++ + case "{1}": + c.walk(e.Args[0]) + c.score++ + default: + c.walk(e.Args[0]) + out.WriteString(rep) + } + + case syntax.OpPosixClass: + out.WriteString(e.Value) + + case syntax.OpNegCharClass: + s := c.simplifyNegCharClass(e) + if s != "" { + c.out.WriteString(s) + c.score++ + } else { + out.WriteString("[^") + for _, e := range e.Args { + c.walk(e) + } + out.WriteString("]") + } + + case syntax.OpCharClass: + s := c.simplifyCharClass(e) + if s != "" { + c.out.WriteString(s) + c.score++ + } else { + out.WriteString("[") + for _, e := range e.Args { + c.walk(e) + } + out.WriteString("]") + } + + case syntax.OpEscapeChar: + switch e.Value { + case `\&`, `\#`, `\!`, `\@`, `\%`, `\<`, `\>`, `\:`, `\;`, `\/`, `\,`, `\=`, `\.`: + c.score++ + out.WriteString(e.Value[len(`\`):]) + default: + out.WriteString(e.Value) + } + + case syntax.OpQuestion, syntax.OpNonGreedy: + c.walk(e.Args[0]) + out.WriteString("?") + case syntax.OpStar: + c.walk(e.Args[0]) + out.WriteString("*") + case syntax.OpPlus: + c.walk(e.Args[0]) + out.WriteString("+") + + default: + out.WriteString(e.Value) + } +} + +func (c *regexpSimplifyChecker) walkGroup(g syntax.Expr) { + switch g.Args[0].Op { + case syntax.OpChar, syntax.OpEscapeChar, syntax.OpEscapeMeta, syntax.OpCharClass: + c.walk(g.Args[0]) + c.score++ + return + } + + c.out.WriteString("(?:") + c.walk(g.Args[0]) + c.out.WriteString(")") +} + +func (c *regexpSimplifyChecker) simplifyNegCharClass(e syntax.Expr) string { + switch e.Value { + case `[^0-9]`: + return `\D` + case `[^\s]`: + return `\S` + case `[^\S]`: + return `\s` + case `[^\w]`: + return `\W` + case `[^\W]`: + return `\w` + case `[^\d]`: + return `\D` + case `[^\D]`: + return `\d` + case `[^[:^space:]]`: + return `\s` + case `[^[:space:]]`: + return `\S` + case `[^[:^word:]]`: + return `\w` + case `[^[:word:]]`: + return `\W` + case `[^[:^digit:]]`: + return `\d` + case `[^[:digit:]]`: + return `\D` + } + + return "" +} + +func (c *regexpSimplifyChecker) simplifyCharClass(e syntax.Expr) string { + switch e.Value { + case `[0-9]`: + return `\d` + case `[[:word:]]`: + return `\w` + case `[[:^word:]]`: + return `\W` + case `[[:digit:]]`: + return `\d` + case `[[:^digit:]]`: + return `\D` + case `[[:space:]]`: + return `\s` + case `[[:^space:]]`: + return `\S` + case `[][]`: + return `\]\[` + case `[]]`: + return `\]` + } + + if len(e.Args) == 1 { + switch e.Args[0].Op { + case syntax.OpChar: + switch v := e.Args[0].Value; v { + case "|", "*", "+", "?", ".", "[", "^", "$", "(", ")": + // Can't take outside of the char group without escaping. + default: + return v + } + case syntax.OpEscapeChar: + return e.Args[0].Value + } + } + + return "" +} + +func (c *regexpSimplifyChecker) canMerge(x, y syntax.Expr) bool { + if x.Op != y.Op { + return false + } + switch x.Op { + case syntax.OpChar, syntax.OpCharClass, syntax.OpEscapeMeta, syntax.OpEscapeChar, syntax.OpNegCharClass, syntax.OpGroup: + return x.Value == y.Value + default: + return false + } +} + +func (c *regexpSimplifyChecker) canCombine(x, y syntax.Expr) (threshold int, ok bool) { + if x.Op != y.Op { + return 0, false + } + + switch x.Op { + case syntax.OpDot: + return 3, true + + case syntax.OpChar: + if x.Value != y.Value { + return 0, false + } + if x.Value == " " { + return 1, true + } + return 4, true + + case syntax.OpEscapeMeta, syntax.OpEscapeChar: + if x.Value == y.Value { + return 2, true + } + + case syntax.OpCharClass, syntax.OpNegCharClass, syntax.OpGroup: + if x.Value == y.Value { + return 1, true + } + } + + return 0, false +} + +func (c *regexpSimplifyChecker) concatLiteral(e syntax.Expr) string { + if e.Op == syntax.OpConcat && c.allChars(e) { + return e.Value + } + return "" +} + +func (c *regexpSimplifyChecker) allChars(e syntax.Expr) bool { + for _, a := range e.Args { + if a.Op != syntax.OpChar { + return false + } + } + return true +} + +func (c *regexpSimplifyChecker) factorPrefixSuffix(alt syntax.Expr) bool { + // TODO: more forms of prefixes/suffixes? + // + // A more generalized algorithm could handle `fo|fo1|fo2` -> `fo[12]?`. + // but it's an open question whether the latter form universally better. + // + // Right now it handles only the simplest cases: + // `http|https` -> `https?` + // `xfoo|foo` -> `x?foo` + if len(alt.Args) != 2 { + return false + } + x := c.concatLiteral(alt.Args[0]) + y := c.concatLiteral(alt.Args[1]) + if x == y { + return false // Reject non-literals and identical strings early + } + + // Let x be a shorter string. + if len(x) > len(y) { + x, y = y, x + } + // Do we have a common prefix? + tail := strings.TrimPrefix(y, x) + if len(tail) <= utf8.UTFMax && utf8.RuneCountInString(tail) == 1 { + c.out.WriteString(x + tail + "?") + c.score++ + return true + } + // Do we have a common suffix? + head := strings.TrimSuffix(y, x) + if len(head) <= utf8.UTFMax && utf8.RuneCountInString(head) == 1 { + c.out.WriteString(head + "?" + x) + c.score++ + return true + } + return false +} + +func (c *regexpSimplifyChecker) walkAlt(alt syntax.Expr) { + // `x|y|z` -> `[xyz]`. + if c.allChars(alt) { + c.score++ + c.out.WriteString("[") + for _, e := range alt.Args { + c.out.WriteString(e.Value) + } + c.out.WriteString("]") + return + } + + if c.factorPrefixSuffix(alt) { + return + } + + for i, e := range alt.Args { + c.walk(e) + if i != len(alt.Args)-1 { + c.out.WriteString("|") + } + } +} + +func (c *regexpSimplifyChecker) walkConcat(concat syntax.Expr) { + i := 0 + for i < len(concat.Args) { + x := concat.Args[i] + c.walk(x) + i++ + + if i >= len(concat.Args) { + break + } + + // Try merging `xy*` into `x+` where x=y. + if concat.Args[i].Op == syntax.OpStar { + if c.canMerge(x, concat.Args[i].Args[0]) { + c.out.WriteString("+") + c.score++ + i++ + continue + } + } + + // Try combining `xy` into `x{2}` where x=y. + threshold, ok := c.canCombine(x, concat.Args[i]) + if !ok { + continue + } + n := 1 // Can combine at least 1 pair. + for j := i + 1; j < len(concat.Args); j++ { + _, ok := c.canCombine(x, concat.Args[j]) + if !ok { + break + } + n++ + } + if n >= threshold { + fmt.Fprintf(c.out, "{%d}", n+1) + c.score++ + i += n + } + } +} + +func (c *regexpSimplifyChecker) simplifyCharRange(rng syntax.Expr) string { + if rng.Args[0].Op != syntax.OpChar || rng.Args[1].Op != syntax.OpChar { + return "" + } + + lo := rng.Args[0].Value + hi := rng.Args[1].Value + if len(lo) == 1 && len(hi) == 1 { + switch hi[0] - lo[0] { + case 0: + return lo + case 1: + return fmt.Sprintf("%s%s", lo, hi) + case 2: + return fmt.Sprintf("%s%s%s", lo, string(lo[0]+1), hi) + } + } + + return "" +} + +func (c *regexpSimplifyChecker) warn(cause ast.Expr, orig, suggest string) { + c.ctx.Warn(cause, "can re-write `%s` as `%s`", orig, suggest) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go new file mode 100644 index 000000000..ecb3dc9ee --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go @@ -0,0 +1,157 @@ +package checkers + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "io/ioutil" + "log" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/go-critic/go-critic/framework/linter" + "github.com/quasilyte/go-ruleguard/ruleguard" +) + +func init() { + var info linter.CheckerInfo + info.Name = "ruleguard" + info.Tags = []string{"style", "experimental"} + info.Params = linter.CheckerParams{ + "rules": { + Value: "", + Usage: "comma-separated list of gorule file paths. Glob patterns such as 'rules-*.go' may be specified", + }, + "debug": { + Value: "", + Usage: "enable debug for the specified named rules group", + }, + "failOnError": { + Value: false, + Usage: "If true, panic when the gorule files contain a syntax error. If false, log and skip rules that contain an error", + }, + } + info.Summary = "Runs user-defined rules using ruleguard linter" + info.Details = "Reads a rules file and turns them into go-critic checkers." + info.Before = `N/A` + info.After = `N/A` + info.Note = "See https://github.com/quasilyte/go-ruleguard." + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return newRuleguardChecker(&info, ctx) + }) +} + +func newRuleguardChecker(info *linter.CheckerInfo, ctx *linter.CheckerContext) (*ruleguardChecker, error) { + c := &ruleguardChecker{ + ctx: ctx, + debugGroup: info.Params.String("debug"), + } + rulesFlag := info.Params.String("rules") + if rulesFlag == "" { + return c, nil + } + failOnErrorFlag := info.Params.Bool("failOnError") + + // TODO(quasilyte): handle initialization errors better when we make + // a transition to the go/analysis framework. + // + // For now, we log error messages and return a ruleguard checker + // with an empty rules set. + + engine := ruleguard.NewEngine() + fset := token.NewFileSet() + filePatterns := strings.Split(rulesFlag, ",") + + parseContext := &ruleguard.ParseContext{ + Fset: fset, + } + + loaded := 0 + for _, filePattern := range filePatterns { + filenames, err := filepath.Glob(strings.TrimSpace(filePattern)) + if err != nil { + // The only possible returned error is ErrBadPattern, when pattern is malformed. + log.Printf("ruleguard init error: %+v", err) + continue + } + for _, filename := range filenames { + data, err := ioutil.ReadFile(filename) + if err != nil { + if failOnErrorFlag { + return nil, fmt.Errorf("ruleguard init error: %+v", err) + } + log.Printf("ruleguard init error: %+v", err) + continue + } + if err := engine.Load(parseContext, filename, bytes.NewReader(data)); err != nil { + if failOnErrorFlag { + return nil, fmt.Errorf("ruleguard init error: %+v", err) + } + log.Printf("ruleguard init error: %+v", err) + continue + } + loaded++ + } + } + + if loaded != 0 { + c.engine = engine + } + return c, nil +} + +type ruleguardChecker struct { + ctx *linter.CheckerContext + + debugGroup string + engine *ruleguard.Engine +} + +func (c *ruleguardChecker) WalkFile(f *ast.File) { + if c.engine == nil { + return + } + + type ruleguardReport struct { + node ast.Node + message string + } + var reports []ruleguardReport + + ctx := &ruleguard.RunContext{ + Debug: c.debugGroup, + DebugPrint: func(s string) { + fmt.Fprintln(os.Stderr, s) + }, + Pkg: c.ctx.Pkg, + Types: c.ctx.TypesInfo, + Sizes: c.ctx.SizesInfo, + Fset: c.ctx.FileSet, + Report: func(_ ruleguard.GoRuleInfo, n ast.Node, msg string, _ *ruleguard.Suggestion) { + // TODO(quasilyte): investigate whether we should add a rule name as + // a message prefix here. + reports = append(reports, ruleguardReport{ + node: n, + message: msg, + }) + }, + } + + if err := c.engine.Run(ctx, f); err != nil { + // Normally this should never happen, but since + // we don't have a better mechanism to report errors, + // emit a warning. + c.ctx.Warn(f, "execution error: %v", err) + } + + sort.Slice(reports, func(i, j int) bool { + return reports[i].message < reports[j].message + }) + for _, report := range reports { + c.ctx.Warn(report.node, report.message) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go b/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go new file mode 100644 index 000000000..b369a4344 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/singleCaseSwitch_checker.go @@ -0,0 +1,84 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "singleCaseSwitch" + info.Tags = []string{"style"} + info.Summary = "Detects switch statements that could be better written as if statement" + info.Before = ` +switch x := x.(type) { +case int: + body() +}` + info.After = ` +if x, ok := x.(int); ok { + body() +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&singleCaseSwitchChecker{ctx: ctx}), nil + }) +} + +type singleCaseSwitchChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *singleCaseSwitchChecker) VisitStmt(stmt ast.Stmt) { + switch stmt := stmt.(type) { + case *ast.SwitchStmt: + c.checkSwitchStmt(stmt, stmt.Body) + case *ast.TypeSwitchStmt: + c.checkSwitchStmt(stmt, stmt.Body) + } +} + +func (c *singleCaseSwitchChecker) checkSwitchStmt(stmt ast.Stmt, body *ast.BlockStmt) { + if len(body.List) != 1 { + return + } + cc := body.List[0].(*ast.CaseClause) + if c.hasBreak(cc) { + return + } + switch { + case cc.List == nil: + c.warnDefault(stmt) + case len(cc.List) == 1: + c.warn(stmt) + } +} + +func (c *singleCaseSwitchChecker) hasBreak(stmt ast.Stmt) bool { + found := false + astutil.Apply(stmt, func(cur *astutil.Cursor) bool { + switch n := cur.Node().(type) { + case *ast.BranchStmt: + if n.Tok == token.BREAK { + found = true + } + case *ast.ForStmt, *ast.RangeStmt, *ast.SelectStmt, *ast.SwitchStmt: + return false + } + return true + }, nil) + return found +} + +func (c *singleCaseSwitchChecker) warn(stmt ast.Stmt) { + c.ctx.Warn(stmt, "should rewrite switch statement to if statement") +} + +func (c *singleCaseSwitchChecker) warnDefault(stmt ast.Stmt) { + c.ctx.Warn(stmt, "found switch with default case only") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sloppyLen_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sloppyLen_checker.go new file mode 100644 index 000000000..a08ef0a5c --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/sloppyLen_checker.go @@ -0,0 +1,72 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astfmt" +) + +func init() { + var info linter.CheckerInfo + info.Name = "sloppyLen" + info.Tags = []string{"style"} + info.Summary = "Detects usage of `len` when result is obvious or doesn't make sense" + info.Before = ` +len(arr) >= 0 // Sloppy +len(arr) <= 0 // Sloppy +len(arr) < 0 // Doesn't make sense at all` + info.After = ` +len(arr) > 0 +len(arr) == 0` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&sloppyLenChecker{ctx: ctx}), nil + }) +} + +type sloppyLenChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *sloppyLenChecker) VisitExpr(x ast.Expr) { + expr, ok := x.(*ast.BinaryExpr) + if !ok { + return + } + + if expr.Op == token.LSS || expr.Op == token.GEQ || expr.Op == token.LEQ { + if c.isLenCall(expr.X) && c.isZero(expr.Y) { + c.warn(expr) + } + } +} + +func (c *sloppyLenChecker) isLenCall(x ast.Expr) bool { + call, ok := x.(*ast.CallExpr) + return ok && qualifiedName(call.Fun) == "len" && len(call.Args) == 1 +} + +func (c *sloppyLenChecker) isZero(x ast.Expr) bool { + value, ok := x.(*ast.BasicLit) + return ok && value.Value == "0" +} + +func (c *sloppyLenChecker) warn(cause *ast.BinaryExpr) { + info := "" + switch cause.Op { + case token.LSS: + info = "is always false" + case token.GEQ: + info = "is always true" + case token.LEQ: + expr := astcopy.BinaryExpr(cause) + expr.Op = token.EQL + info = astfmt.Sprintf("can be %s", expr) + } + c.ctx.Warn(cause, "%s %s", cause, info) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go new file mode 100644 index 000000000..2f9ac62e1 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/sloppyReassign_checker.go @@ -0,0 +1,80 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "sloppyReassign" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects suspicious/confusing re-assignments" + info.Before = `if err = f(); err != nil { return err }` + info.After = `if err := f(); err != nil { return err }` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&sloppyReassignChecker{ctx: ctx}), nil + }) +} + +type sloppyReassignChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *sloppyReassignChecker) VisitStmt(stmt ast.Stmt) { + // Right now only check assignments in if statements init. + ifStmt := astcast.ToIfStmt(stmt) + assign := astcast.ToAssignStmt(ifStmt.Init) + if assign.Tok != token.ASSIGN { + return + } + + // TODO(quasilyte): is handling of multi-value assignments worthwhile? + if len(assign.Lhs) != 1 || len(assign.Rhs) != 1 { + return + } + + // TODO(quasilyte): handle not only the simplest, return-only case. + body := ifStmt.Body.List + if len(body) != 1 { + return + } + + // Variable that is being re-assigned. + reAssigned := astcast.ToIdent(assign.Lhs[0]) + if reAssigned.Name == "" { + return + } + + // TODO(quasilyte): handle not only nil comparisons. + eqToNil := &ast.BinaryExpr{ + Op: token.NEQ, + X: reAssigned, + Y: &ast.Ident{Name: "nil"}, + } + if !astequal.Expr(ifStmt.Cond, eqToNil) { + return + } + + results := astcast.ToReturnStmt(body[0]).Results + for _, res := range results { + if astequal.Expr(reAssigned, res) { + c.warnAssignToDefine(assign, reAssigned.Name) + break + } + } +} + +func (c *sloppyReassignChecker) warnAssignToDefine(assign *ast.AssignStmt, name string) { + suggest := astcopy.AssignStmt(assign) + suggest.Tok = token.DEFINE + c.ctx.Warn(assign, "re-assignment to `%s` can be replaced with `%s`", name, suggest) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go new file mode 100644 index 000000000..243925368 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/sloppyTypeAssert_checker.go @@ -0,0 +1,75 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "sloppyTypeAssert" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects redundant type assertions" + info.Before = ` +func f(r io.Reader) interface{} { + return r.(interface{}) +} +` + info.After = ` +func f(r io.Reader) interface{} { + return r +} +` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&sloppyTypeAssertChecker{ctx: ctx}), nil + }) +} + +type sloppyTypeAssertChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *sloppyTypeAssertChecker) VisitExpr(expr ast.Expr) { + assert := astcast.ToTypeAssertExpr(expr) + if assert.Type == nil { + return + } + + toType := c.ctx.TypeOf(expr) + fromType := c.ctx.TypeOf(assert.X) + + if types.Identical(toType, fromType) { + c.warnIdentical(expr) + return + } + + toIface, ok := toType.Underlying().(*types.Interface) + if !ok { + return + } + + switch { + case toIface.Empty(): + c.warnEmpty(expr) + case types.Implements(fromType, toIface): + c.warnImplements(expr, assert.X) + } +} + +func (c *sloppyTypeAssertChecker) warnIdentical(cause ast.Expr) { + c.ctx.Warn(cause, "type assertion from/to types are identical") +} + +func (c *sloppyTypeAssertChecker) warnEmpty(cause ast.Expr) { + c.ctx.Warn(cause, "type assertion to interface{} may be redundant") +} + +func (c *sloppyTypeAssertChecker) warnImplements(cause, val ast.Expr) { + c.ctx.Warn(cause, "type assertion may be redundant as %s always implements selected interface", val) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go new file mode 100644 index 000000000..29550da3f --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/sortSlice_checker.go @@ -0,0 +1,135 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "sortSlice" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects suspicious sort.Slice calls" + info.Before = `sort.Slice(xs, func(i, j) bool { return keys[i] < keys[j] })` + info.After = `sort.Slice(kv, func(i, j) bool { return kv[i].key < kv[j].key })` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&sortSliceChecker{ctx: ctx}), nil + }) +} + +type sortSliceChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *sortSliceChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + if len(call.Args) != 2 { + return + } + switch qualifiedName(call.Fun) { + case "sort.Slice", "sort.SliceStable": + // OK. + default: + return + } + + slice := c.unwrapSlice(call.Args[0]) + lessFunc, ok := call.Args[1].(*ast.FuncLit) + if !ok { + return + } + if !typep.SideEffectFree(c.ctx.TypesInfo, slice) { + return // Don't check unpredictable slice values + } + + ivar, jvar := c.paramIdents(lessFunc.Type) + if ivar == nil || jvar == nil { + return + } + + if len(lessFunc.Body.List) != 1 { + return + } + ret, ok := lessFunc.Body.List[0].(*ast.ReturnStmt) + if !ok { + return + } + cmp := astcast.ToBinaryExpr(astutil.Unparen(ret.Results[0])) + if !typep.SideEffectFree(c.ctx.TypesInfo, cmp) { + return + } + switch cmp.Op { + case token.LSS, token.LEQ, token.GTR, token.GEQ: + // Both cmp.X and cmp.Y are expected to be some expressions + // over the `slice` expression. In the simplest case, + // it's a `slice[i] slice[j]`. + if !c.containsSlice(cmp.X, slice) && !c.containsSlice(cmp.Y, slice) { + c.warnSlice(cmp, slice) + } + + // This one is more about the style, but can reveal potential issue + // or misprint in sorting condition. + // We give a warn if X contains indexing with `i` index and Y + // contains indexing with `j`. + if c.containsIndex(cmp.X, jvar) && c.containsIndex(cmp.Y, ivar) { + c.warnIndex(cmp, ivar, jvar) + } + } +} + +func (c *sortSliceChecker) paramIdents(e *ast.FuncType) (ivar, jvar *ast.Ident) { + // Covers both `i, j int` and `i int, j int`. + idents := make([]*ast.Ident, 0, 2) + for _, field := range e.Params.List { + idents = append(idents, field.Names...) + } + if len(idents) == 2 { + return idents[0], idents[1] + } + return nil, nil +} + +func (c *sortSliceChecker) unwrapSlice(e ast.Expr) ast.Expr { + switch e := e.(type) { + case *ast.ParenExpr: + return c.unwrapSlice(e.X) + case *ast.SliceExpr: + return e.X + default: + return e + } +} + +func (c *sortSliceChecker) containsIndex(e, index ast.Expr) bool { + return lintutil.ContainsNode(e, func(n ast.Node) bool { + indexing, ok := n.(*ast.IndexExpr) + if !ok { + return false + } + return astequal.Expr(indexing.Index, index) + }) +} + +func (c *sortSliceChecker) containsSlice(e, slice ast.Expr) bool { + return lintutil.ContainsNode(e, func(n ast.Node) bool { + return astequal.Node(n, slice) + }) +} + +func (c *sortSliceChecker) warnSlice(cause ast.Node, slice ast.Expr) { + c.ctx.Warn(cause, "cmp func must use %s slice in comparison", slice) +} + +func (c *sortSliceChecker) warnIndex(cause ast.Node, ivar, jvar *ast.Ident) { + c.ctx.Warn(cause, "unusual order of {%s,%s} params in comparison", ivar, jvar) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go b/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go new file mode 100644 index 000000000..eb3b49d88 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/sqlQuery_checker.go @@ -0,0 +1,167 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "sqlQuery" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects issue in Query() and Exec() calls" + info.Before = `_, err := db.Query("UPDATE ...")` + info.After = `_, err := db.Exec("UPDATE ...")` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&sqlQueryChecker{ctx: ctx}), nil + }) +} + +type sqlQueryChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *sqlQueryChecker) VisitStmt(stmt ast.Stmt) { + assign := astcast.ToAssignStmt(stmt) + if len(assign.Lhs) != 2 { // Query() has 2 return values. + return + } + if len(assign.Rhs) != 1 { + return + } + + // If Query() is called, but first return value is ignored, + // there is no way to close/read the returned rows. + // This can cause a connection leak. + if id, ok := assign.Lhs[0].(*ast.Ident); ok && id.Name != "_" { + return + } + + call := astcast.ToCallExpr(assign.Rhs[0]) + funcExpr := astcast.ToSelectorExpr(call.Fun) + if !c.funcIsQuery(funcExpr) { + return + } + + if c.typeHasExecMethod(c.ctx.TypeOf(funcExpr.X)) { + c.warnAndSuggestExec(funcExpr) + } else { + c.warnRowsIgnored(funcExpr) + } +} + +func (c *sqlQueryChecker) funcIsQuery(funcExpr *ast.SelectorExpr) bool { + if funcExpr.Sel == nil { + return false + } + switch funcExpr.Sel.Name { + case "Query", "QueryContext": + // Stdlib and friends. + case "Queryx", "QueryxContext": + // sqlx. + default: + return false + } + + // To avoid false positives (unrelated types can have Query method) + // check that the 1st returned type has Row-like name. + typ, ok := c.ctx.TypeOf(funcExpr).Underlying().(*types.Signature) + if !ok || typ.Results() == nil || typ.Results().Len() != 2 { + return false + } + if !c.typeIsRowsLike(typ.Results().At(0).Type()) { + return false + } + + return true +} + +func (c *sqlQueryChecker) typeIsRowsLike(typ types.Type) bool { + switch typ := typ.(type) { + case *types.Pointer: + return c.typeIsRowsLike(typ.Elem()) + case *types.Named: + return typ.Obj().Name() == "Rows" + default: + return false + } +} + +func (c *sqlQueryChecker) funcIsExec(fn *types.Func) bool { + if fn.Name() != "Exec" { + return false + } + + // Expect exactly 2 results. + sig := fn.Type().(*types.Signature) + if sig.Results() == nil || sig.Results().Len() != 2 { + return false + } + + // Expect at least 1 param and it should be a string (query). + params := sig.Params() + if params == nil || params.Len() == 0 { + return false + } + if typ, ok := params.At(0).Type().(*types.Basic); !ok || typ.Kind() != types.String { + return false + } + + return true +} + +func (c *sqlQueryChecker) typeHasExecMethod(typ types.Type) bool { + switch typ := typ.(type) { + case *types.Struct: + for i := 0; i < typ.NumFields(); i++ { + if c.typeHasExecMethod(typ.Field(i).Type()) { + return true + } + } + case *types.Interface: + for i := 0; i < typ.NumMethods(); i++ { + if c.funcIsExec(typ.Method(i)) { + return true + } + } + case *types.Pointer: + return c.typeHasExecMethod(typ.Elem()) + case *types.Named: + for i := 0; i < typ.NumMethods(); i++ { + if c.funcIsExec(typ.Method(i)) { + return true + } + } + switch ut := typ.Underlying().(type) { + case *types.Interface: + return c.typeHasExecMethod(ut) + case *types.Struct: + // Check embedded types. + for i := 0; i < ut.NumFields(); i++ { + field := ut.Field(i) + if !field.Embedded() { + continue + } + if c.typeHasExecMethod(field.Type()) { + return true + } + } + } + } + + return false +} + +func (c *sqlQueryChecker) warnAndSuggestExec(funcExpr *ast.SelectorExpr) { + c.ctx.Warn(funcExpr, "use %s.Exec() if returned result is not needed", funcExpr.X) +} + +func (c *sqlQueryChecker) warnRowsIgnored(funcExpr *ast.SelectorExpr) { + c.ctx.Warn(funcExpr, "ignoring Query() rows result may lead to a connection leak") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/stringXbytes_checker.go b/vendor/github.com/go-critic/go-critic/checkers/stringXbytes_checker.go new file mode 100644 index 000000000..bb9f16c07 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/stringXbytes_checker.go @@ -0,0 +1,47 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "stringXbytes" + info.Tags = []string{"style"} + info.Summary = "Detects redundant conversions between string and []byte" + info.Before = `copy(b, []byte(s))` + info.After = `copy(b, s)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&stringXbytes{ctx: ctx}), nil + }) +} + +type stringXbytes struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *stringXbytes) VisitExpr(expr ast.Expr) { + x, ok := expr.(*ast.CallExpr) + if !ok || qualifiedName(x.Fun) != "copy" || len(x.Args) != 2 { + return + } + + src := x.Args[1] + + byteCast, ok := src.(*ast.CallExpr) + if ok && typep.IsTypeExpr(c.ctx.TypesInfo, byteCast.Fun) && + typep.HasStringProp(c.ctx.TypeOf(byteCast.Args[0])) { + + c.warn(byteCast, byteCast.Args[0]) + } +} + +func (c *stringXbytes) warn(cause *ast.CallExpr, suggestion ast.Expr) { + c.ctx.Warn(cause, "can simplify `%s` to `%s`", cause, suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/switchTrue_checker.go b/vendor/github.com/go-critic/go-critic/checkers/switchTrue_checker.go new file mode 100644 index 000000000..0501a0ba1 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/switchTrue_checker.go @@ -0,0 +1,49 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "switchTrue" + info.Tags = []string{"style"} + info.Summary = "Detects switch-over-bool statements that use explicit `true` tag value" + info.Before = ` +switch true { +case x > y: +}` + info.After = ` +switch { +case x > y: +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&switchTrueChecker{ctx: ctx}), nil + }) +} + +type switchTrueChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *switchTrueChecker) VisitStmt(stmt ast.Stmt) { + if stmt, ok := stmt.(*ast.SwitchStmt); ok { + if qualifiedName(stmt.Tag) == "true" { + c.warn(stmt) + } + } +} + +func (c *switchTrueChecker) warn(cause *ast.SwitchStmt) { + if cause.Init == nil { + c.ctx.Warn(cause, "replace 'switch true {}' with 'switch {}'") + } else { + c.ctx.Warn(cause, "replace 'switch %s; true {}' with 'switch %s; {}'", + cause.Init, cause.Init) + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go b/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go new file mode 100644 index 000000000..4d4dcc26e --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/tooManyResults_checker.go @@ -0,0 +1,54 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "tooManyResultsChecker" + info.Tags = []string{"style", "opinionated", "experimental"} + info.Params = linter.CheckerParams{ + "maxResults": { + Value: 5, + Usage: "maximum number of results", + }, + } + info.Summary = "Detects function with too many results" + info.Before = `func fn() (a, b, c, d float32, _ int, _ bool)` + info.After = `func fn() (resultStruct, bool)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := astwalk.WalkerForFuncDecl(&tooManyResultsChecker{ + ctx: ctx, + maxParams: info.Params.Int("maxResults"), + }) + return c, nil + }) +} + +type tooManyResultsChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + maxParams int +} + +func (c *tooManyResultsChecker) VisitFuncDecl(decl *ast.FuncDecl) { + typ := c.ctx.TypeOf(decl.Name) + sig, ok := typ.(*types.Signature) + if !ok { + return + } + + if count := sig.Results().Len(); count > c.maxParams { + c.warn(decl) + } +} + +func (c *tooManyResultsChecker) warn(n ast.Node) { + c.ctx.Warn(n, "function has more than %d results, consider to simplify the function", c.maxParams) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go b/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go new file mode 100644 index 000000000..cd2346c78 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/truncateCmp_checker.go @@ -0,0 +1,117 @@ +package checkers + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astp" +) + +func init() { + var info linter.CheckerInfo + info.Name = "truncateCmp" + info.Tags = []string{"diagnostic", "experimental"} + info.Params = linter.CheckerParams{ + "skipArchDependent": { + Value: true, + Usage: "whether to skip int/uint/uintptr types", + }, + } + info.Summary = "Detects potential truncation issues when comparing ints of different sizes" + info.Before = ` +func f(x int32, y int16) bool { + return int16(x) < y +}` + info.After = ` +func f(x int32, int16) bool { + return x < int32(y) +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &truncateCmpChecker{ctx: ctx} + c.skipArchDependent = info.Params.Bool("skipArchDependent") + return astwalk.WalkerForExpr(c), nil + }) +} + +type truncateCmpChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + skipArchDependent bool +} + +func (c *truncateCmpChecker) VisitExpr(expr ast.Expr) { + cmp := astcast.ToBinaryExpr(expr) + switch cmp.Op { + case token.LSS, token.GTR, token.LEQ, token.GEQ, token.EQL, token.NEQ: + if astp.IsBasicLit(cmp.X) || astp.IsBasicLit(cmp.Y) { + return // Don't bother about untyped consts + } + leftCast := c.isTruncCast(cmp.X) + rightCast := c.isTruncCast(cmp.Y) + switch { + case leftCast && rightCast: + return + case leftCast: + c.checkCmp(cmp.X, cmp.Y) + case rightCast: + c.checkCmp(cmp.Y, cmp.X) + } + default: + return + } +} + +func (c *truncateCmpChecker) isTruncCast(x ast.Expr) bool { + switch astcast.ToIdent(astcast.ToCallExpr(x).Fun).Name { + case "int8", "int16", "int32", "uint8", "uint16", "uint32": + return true + default: + return false + } +} + +func (c *truncateCmpChecker) checkCmp(cmpX, cmpY ast.Expr) { + // Check if we have a cast to a type that can truncate. + xcast := astcast.ToCallExpr(cmpX) + if len(xcast.Args) != 1 { + return // Just in case of the shadowed builtin + } + + x := xcast.Args[0] + y := cmpY + + // Check that both x and y are signed or unsigned int-typed. + xtyp, ok := c.ctx.TypeOf(x).Underlying().(*types.Basic) + if !ok || xtyp.Info()&types.IsInteger == 0 { + return + } + ytyp, ok := c.ctx.TypeOf(y).Underlying().(*types.Basic) + if !ok || xtyp.Info() != ytyp.Info() { + return + } + + xsize := c.ctx.SizesInfo.Sizeof(xtyp) + ysize := c.ctx.SizesInfo.Sizeof(ytyp) + if xsize <= ysize { + return + } + + if c.skipArchDependent { + switch xtyp.Kind() { + case types.Int, types.Uint, types.Uintptr: + return + } + } + + c.warn(xcast, xsize*8, ysize*8, xtyp.String()) +} + +func (c *truncateCmpChecker) warn(cause ast.Expr, xsize, ysize int64, suggest string) { + c.ctx.Warn(cause, "truncation in comparison %d->%d bit; cast the other operand to %s instead", xsize, ysize, suggest) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go new file mode 100644 index 000000000..d87657c3b --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/typeAssertChain_checker.go @@ -0,0 +1,132 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/astp" +) + +func init() { + var info linter.CheckerInfo + info.Name = "typeAssertChain" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects repeated type assertions and suggests to replace them with type switch statement" + info.Before = ` +if x, ok := v.(T1); ok { + // Code A, uses x. +} else if x, ok := v.(T2); ok { + // Code B, uses x. +} else if x, ok := v.(T3); ok { + // Code C, uses x. +}` + info.After = ` +switch x := v.(T1) { +case cond1: + // Code A, uses x. +case cond2: + // Code B, uses x. +default: + // Code C, uses x. +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&typeAssertChainChecker{ctx: ctx}), nil + }) +} + +type typeAssertChainChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + cause *ast.IfStmt + visited map[*ast.IfStmt]bool + typeSet lintutil.AstSet +} + +func (c *typeAssertChainChecker) EnterFunc(fn *ast.FuncDecl) bool { + if fn.Body == nil { + return false + } + c.visited = make(map[*ast.IfStmt]bool) + return true +} + +func (c *typeAssertChainChecker) VisitStmt(stmt ast.Stmt) { + ifstmt, ok := stmt.(*ast.IfStmt) + if !ok || c.visited[ifstmt] || ifstmt.Init == nil { + return + } + assertion := c.getTypeAssert(ifstmt) + if assertion == nil { + return + } + c.cause = ifstmt + c.checkIfStmt(ifstmt, assertion) +} + +func (c *typeAssertChainChecker) getTypeAssert(ifstmt *ast.IfStmt) *ast.TypeAssertExpr { + assign := astcast.ToAssignStmt(ifstmt.Init) + if len(assign.Lhs) != 2 || len(assign.Rhs) != 1 { + return nil + } + if !astp.IsIdent(assign.Lhs[0]) || assign.Tok != token.DEFINE { + return nil + } + if !astequal.Expr(assign.Lhs[1], ifstmt.Cond) { + return nil + } + + assertion, ok := assign.Rhs[0].(*ast.TypeAssertExpr) + if !ok { + return nil + } + return assertion +} + +func (c *typeAssertChainChecker) checkIfStmt(stmt *ast.IfStmt, assertion *ast.TypeAssertExpr) { + if c.countTypeAssertions(stmt, assertion) >= 2 { + c.warn() + } +} + +func (c *typeAssertChainChecker) countTypeAssertions(stmt *ast.IfStmt, assertion *ast.TypeAssertExpr) int { + c.typeSet.Clear() + + count := 1 + x := assertion.X + c.typeSet.Insert(assertion.Type) + for { + e, ok := stmt.Else.(*ast.IfStmt) + if !ok { + return count + } + assertion = c.getTypeAssert(e) + if assertion == nil { + return count + } + if !c.typeSet.Insert(assertion.Type) { + // Asserted type is duplicated. + // Type switch does not permit duplicate cases, + // so give up. + return 0 + } + if !astequal.Expr(x, assertion.X) { + // Mixed type asserting chain. + // Can't be easily translated to a type switch. + return 0 + } + stmt = e + count++ + c.visited[e] = true + } +} + +func (c *typeAssertChainChecker) warn() { + c.ctx.Warn(c.cause, "rewrite if-else to type switch statement") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go new file mode 100644 index 000000000..491e71dfd --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/typeDefFirst_checker.go @@ -0,0 +1,88 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "typeDefFirst" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects method declarations preceding the type definition itself" + info.Before = ` +func (r rec) Method() {} +type rec struct{} +` + info.After = ` +type rec struct{} +func (r rec) Method() {} +` + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return &typeDefFirstChecker{ + ctx: ctx, + }, nil + }) +} + +type typeDefFirstChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + trackedTypes map[string]bool +} + +func (c *typeDefFirstChecker) WalkFile(f *ast.File) { + if len(f.Decls) == 0 { + return + } + + c.trackedTypes = make(map[string]bool) + for _, decl := range f.Decls { + c.walkDecl(decl) + } +} + +func (c *typeDefFirstChecker) walkDecl(decl ast.Decl) { + switch decl := decl.(type) { + case *ast.FuncDecl: + if decl.Recv == nil { + return + } + receiver := decl.Recv.List[0] + typeName := c.receiverType(receiver.Type) + c.trackedTypes[typeName] = true + + case *ast.GenDecl: + if decl.Tok != token.TYPE { + return + } + for _, spec := range decl.Specs { + spec, ok := spec.(*ast.TypeSpec) + if !ok { + return + } + typeName := spec.Name.Name + if val, ok := c.trackedTypes[typeName]; ok && val { + c.warn(decl, typeName) + } + } + } +} + +func (c *typeDefFirstChecker) receiverType(e ast.Expr) string { + switch e := e.(type) { + case *ast.StarExpr: + return c.receiverType(e.X) + case *ast.Ident: + return e.Name + default: + panic("unreachable") + } +} + +func (c *typeDefFirstChecker) warn(cause ast.Node, typeName string) { + c.ctx.Warn(cause, "definition of type '%s' should appear before its methods", typeName) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go new file mode 100644 index 000000000..6bbec5037 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/typeSwitchVar_checker.go @@ -0,0 +1,97 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/astp" +) + +func init() { + var info linter.CheckerInfo + info.Name = "typeSwitchVar" + info.Tags = []string{"style"} + info.Summary = "Detects type switches that can benefit from type guard clause with variable" + info.Before = ` +switch v.(type) { +case int: + return v.(int) +case point: + return v.(point).x + v.(point).y +default: + return 0 +}` + info.After = ` +switch v := v.(type) { +case int: + return v +case point: + return v.x + v.y +default: + return 0 +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&typeSwitchVarChecker{ctx: ctx}), nil + }) +} + +type typeSwitchVarChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + count int +} + +func (c *typeSwitchVarChecker) VisitStmt(stmt ast.Stmt) { + if stmt, ok := stmt.(*ast.TypeSwitchStmt); ok { + c.count = 0 + c.checkTypeSwitch(stmt) + } +} + +func (c *typeSwitchVarChecker) checkTypeSwitch(root *ast.TypeSwitchStmt) { + if astp.IsAssignStmt(root.Assign) { + return // Already with type guard + } + // Must be a *ast.ExprStmt then. + expr := root.Assign.(*ast.ExprStmt).X.(*ast.TypeAssertExpr).X + object := c.ctx.TypesInfo.ObjectOf(identOf(expr)) + if object == nil { + return // Give up: can't handle shadowing without object + } + + for _, clause := range root.Body.List { + clause := clause.(*ast.CaseClause) + // Multiple types in a list mean that assert.X will have + // a type of interface{} inside clause body. + // We are looking for precise type case. + if len(clause.List) != 1 { + continue + } + // Create artificial node just for matching. + assert1 := ast.TypeAssertExpr{X: expr, Type: clause.List[0]} + for _, stmt := range clause.Body { + assert2 := lintutil.FindNode(stmt, func(x ast.Node) bool { + return astequal.Node(&assert1, x) + }) + if object == c.ctx.TypesInfo.ObjectOf(identOf(assert2)) { + c.count++ + break + } + } + } + if c.count > 0 { + c.warn(root) + } +} + +func (c *typeSwitchVarChecker) warn(n ast.Node) { + msg := "case" + if c.count > 1 { + msg = "cases" + } + c.ctx.Warn(n, "%d "+msg+" can benefit from type switch with assignment", c.count) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go b/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go new file mode 100644 index 000000000..a3f02e14c --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/typeUnparen_checker.go @@ -0,0 +1,86 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astp" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "typeUnparen" + info.Tags = []string{"style", "opinionated"} + info.Summary = "Detects unneded parenthesis inside type expressions and suggests to remove them" + info.Before = `type foo [](func([](func())))` + info.After = `type foo []func([]func())` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForTypeExpr(&typeUnparenChecker{ctx: ctx}, ctx.TypesInfo), nil + }) +} + +type typeUnparenChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *typeUnparenChecker) VisitTypeExpr(x ast.Expr) { + switch x := x.(type) { + case *ast.ParenExpr: + switch x.X.(type) { + case *ast.StructType: + c.ctx.Warn(x, "could simplify (struct{...}) to struct{...}") + case *ast.InterfaceType: + c.ctx.Warn(x, "could simplify (interface{...}) to interface{...}") + default: + c.warn(x, c.unparenExpr(astcopy.Expr(x))) + } + default: + c.checkTypeExpr(x) + } +} + +func (c *typeUnparenChecker) checkTypeExpr(x ast.Expr) { + switch x := x.(type) { + case *ast.ArrayType: + // Arrays require extra care: we don't want to unparen + // length expression as they are not type expressions. + if !c.hasParens(x.Elt) { + return + } + noParens := astcopy.ArrayType(x) + noParens.Elt = c.unparenExpr(noParens.Elt) + c.warn(x, noParens) + case *ast.StructType, *ast.InterfaceType: + // Only nested fields are to be reported. + default: + if !c.hasParens(x) { + return + } + c.warn(x, c.unparenExpr(astcopy.Expr(x))) + } +} + +func (c *typeUnparenChecker) hasParens(x ast.Expr) bool { + return lintutil.ContainsNode(x, astp.IsParenExpr) +} + +func (c *typeUnparenChecker) unparenExpr(x ast.Expr) ast.Expr { + // Replace every paren expr with expression it encloses. + return astutil.Apply(x, nil, func(cur *astutil.Cursor) bool { + if paren, ok := cur.Node().(*ast.ParenExpr); ok { + cur.Replace(paren.X) + } + return true + }).(ast.Expr) +} + +func (c *typeUnparenChecker) warn(cause, noParens ast.Expr) { + c.SkipChilds = true + c.ctx.Warn(cause, "could simplify %s to %s", cause, noParens) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go b/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go new file mode 100644 index 000000000..d0426a9a5 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/underef_checker.go @@ -0,0 +1,127 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astp" +) + +func init() { + var info linter.CheckerInfo + info.Name = "underef" + info.Tags = []string{"style"} + info.Params = linter.CheckerParams{ + "skipRecvDeref": { + Value: true, + Usage: "whether to skip (*x).method() calls where x is a pointer receiver", + }, + } + info.Summary = "Detects dereference expressions that can be omitted" + info.Before = ` +(*k).field = 5 +v := (*a)[5] // only if a is array` + info.After = ` +k.field = 5 +v := a[5]` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &underefChecker{ctx: ctx} + c.skipRecvDeref = info.Params.Bool("skipRecvDeref") + return astwalk.WalkerForExpr(c), nil + }) +} + +type underefChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + skipRecvDeref bool +} + +func (c *underefChecker) VisitExpr(expr ast.Expr) { + switch n := expr.(type) { + case *ast.SelectorExpr: + expr := astcast.ToParenExpr(n.X) + if c.skipRecvDeref && c.isPtrRecvMethodCall(n.Sel) { + return + } + + if expr, ok := expr.X.(*ast.StarExpr); ok { + if c.checkStarExpr(expr) { + c.warnSelect(n) + } + } + case *ast.IndexExpr: + expr := astcast.ToParenExpr(n.X) + if expr, ok := expr.X.(*ast.StarExpr); ok { + if !c.checkStarExpr(expr) { + return + } + if c.checkArray(expr) { + c.warnArray(n) + } + } + } +} + +func (c *underefChecker) isPtrRecvMethodCall(fn *ast.Ident) bool { + typ, ok := c.ctx.TypeOf(fn).(*types.Signature) + if ok && typ != nil && typ.Recv() != nil { + _, ok := typ.Recv().Type().(*types.Pointer) + return ok + } + return false +} + +func (c *underefChecker) underef(x *ast.ParenExpr) ast.Expr { + // If there is only 1 deref, can remove parenthesis, + // otherwise can remove StarExpr only. + dereferenced := x.X.(*ast.StarExpr).X + if astp.IsStarExpr(dereferenced) { + return &ast.ParenExpr{X: dereferenced} + } + return dereferenced +} + +func (c *underefChecker) warnSelect(expr *ast.SelectorExpr) { + // TODO: add () to function output. + c.ctx.Warn(expr, "could simplify %s to %s.%s", + expr, + c.underef(expr.X.(*ast.ParenExpr)), + expr.Sel.Name) +} + +func (c *underefChecker) warnArray(expr *ast.IndexExpr) { + c.ctx.Warn(expr, "could simplify %s to %s[%s]", + expr, + c.underef(expr.X.(*ast.ParenExpr)), + expr.Index) +} + +// checkStarExpr checks if ast.StarExpr could be simplified. +func (c *underefChecker) checkStarExpr(expr *ast.StarExpr) bool { + typ, ok := c.ctx.TypeOf(expr.X).Underlying().(*types.Pointer) + if !ok { + return false + } + + switch typ.Elem().Underlying().(type) { + case *types.Pointer, *types.Interface: + return false + default: + return true + } +} + +func (c *underefChecker) checkArray(expr *ast.StarExpr) bool { + typ, ok := c.ctx.TypeOf(expr.X).(*types.Pointer) + if !ok { + return false + } + _, ok = typ.Elem().(*types.Array) + return ok +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go new file mode 100644 index 000000000..fab864ec5 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/unlabelStmt_checker.go @@ -0,0 +1,170 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "unlabelStmt" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects redundant statement labels" + info.Before = ` +derp: +for x := range xs { + if x == 0 { + break derp + } +}` + info.After = ` +for x := range xs { + if x == 0 { + break + } +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmt(&unlabelStmtChecker{ctx: ctx}), nil + }) +} + +type unlabelStmtChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *unlabelStmtChecker) EnterFunc(fn *ast.FuncDecl) bool { + if fn.Body == nil { + return false + } + // TODO(quasilyte): should not do additional traversal here. + // For now, skip all functions that contain goto statement. + return !lintutil.ContainsNode(fn.Body, func(n ast.Node) bool { + br, ok := n.(*ast.BranchStmt) + return ok && br.Tok == token.GOTO + }) +} + +func (c *unlabelStmtChecker) VisitStmt(stmt ast.Stmt) { + labeled, ok := stmt.(*ast.LabeledStmt) + if !ok || !c.canBreakFrom(labeled.Stmt) { + return + } + + // We have a labeled statement from that have labeled continue/break. + // This is an invariant, since unused label is a compile-time error + // and we're currently skipping functions containing goto. + // + // Also note that Go labels are function-scoped and there + // can be no re-definitions. This means that we don't + // need to care about label shadowing or things like that. + // + // The task is to find cases where labeled branch (continue/break) + // is redundant and can be re-written, decreasing the label usages + // and potentially leading to its redundancy, + // or finding the redundant labels right away. + + name := labeled.Label.Name + + // Simplest case that can prove that label is redundant. + // + // If labeled branch is somewhere inside the statement block itself + // and none of the nested break'able statements refer to that label, + // the label can be removed. + matchUsage := func(n ast.Node) bool { + return c.canBreakFrom(n) && c.usesLabel(c.blockStmtOf(n), name) + } + if !lintutil.ContainsNode(c.blockStmtOf(labeled.Stmt), matchUsage) { + c.warnRedundant(labeled) + return + } + + // Only for loops: if last stmt in list is a loop + // that contains labeled "continue" to the outer loop label, + // it can be refactored to use "break" instead. + if c.isLoop(labeled.Stmt) { + body := c.blockStmtOf(labeled.Stmt) + if len(body.List) == 0 { + return + } + last := body.List[len(body.List)-1] + if !c.isLoop(last) { + return + } + br := lintutil.FindNode(c.blockStmtOf(last), func(n ast.Node) bool { + br, ok := n.(*ast.BranchStmt) + return ok && br.Label != nil && + br.Label.Name == name && br.Tok == token.CONTINUE + }) + if br != nil { + c.warnLabeledContinue(br, name) + } + } +} + +// isLoop reports whether n is a loop of some kind. +// In other words, it tells whether n body can contain "continue" +// associated with n. +func (c *unlabelStmtChecker) isLoop(n ast.Node) bool { + switch n.(type) { + case *ast.ForStmt, *ast.RangeStmt: + return true + default: + return false + } +} + +// canBreakFrom reports whether it is possible to "break" or "continue" from n body. +func (c *unlabelStmtChecker) canBreakFrom(n ast.Node) bool { + switch n.(type) { + case *ast.RangeStmt, *ast.ForStmt, *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: + return true + default: + return false + } +} + +// blockStmtOf returns body of specified node. +// +// TODO(quasilyte): handle other statements and see if it can be useful +// in other checkers. +func (c *unlabelStmtChecker) blockStmtOf(n ast.Node) *ast.BlockStmt { + switch n := n.(type) { + case *ast.RangeStmt: + return n.Body + case *ast.ForStmt: + return n.Body + case *ast.SwitchStmt: + return n.Body + case *ast.TypeSwitchStmt: + return n.Body + case *ast.SelectStmt: + return n.Body + + default: + return nil + } +} + +// usesLabel reports whether n contains a usage of label. +func (c *unlabelStmtChecker) usesLabel(n *ast.BlockStmt, label string) bool { + return lintutil.ContainsNode(n, func(n ast.Node) bool { + branch, ok := n.(*ast.BranchStmt) + return ok && branch.Label != nil && + branch.Label.Name == label && + (branch.Tok == token.CONTINUE || branch.Tok == token.BREAK) + }) +} + +func (c *unlabelStmtChecker) warnRedundant(cause *ast.LabeledStmt) { + c.ctx.Warn(cause, "label %s is redundant", cause.Label) +} + +func (c *unlabelStmtChecker) warnLabeledContinue(cause ast.Node, label string) { + c.ctx.Warn(cause, "change `continue %s` to `break`", label) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go new file mode 100644 index 000000000..cce995d7a --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/unlambda_checker.go @@ -0,0 +1,100 @@ +package checkers + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" +) + +func init() { + var info linter.CheckerInfo + info.Name = "unlambda" + info.Tags = []string{"style"} + info.Summary = "Detects function literals that can be simplified" + info.Before = `func(x int) int { return fn(x) }` + info.After = `fn` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&unlambdaChecker{ctx: ctx}), nil + }) +} + +type unlambdaChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *unlambdaChecker) VisitExpr(x ast.Expr) { + fn, ok := x.(*ast.FuncLit) + if !ok || len(fn.Body.List) != 1 { + return + } + + ret, ok := fn.Body.List[0].(*ast.ReturnStmt) + if !ok || len(ret.Results) != 1 { + return + } + + result := astcast.ToCallExpr(ret.Results[0]) + callable := qualifiedName(result.Fun) + if callable == "" { + return // Skip tricky cases; only handle simple calls + } + if isBuiltin(callable) { + return // See #762 + } + hasVars := lintutil.ContainsNode(result.Fun, func(n ast.Node) bool { + id, ok := n.(*ast.Ident) + if !ok { + return false + } + obj, ok := c.ctx.TypesInfo.ObjectOf(id).(*types.Var) + if !ok { + return false + } + // Permit only non-pointer struct method values. + return !typep.IsStruct(obj.Type().Underlying()) + }) + if hasVars { + return // See #888 #1007 + } + + fnType := c.ctx.TypeOf(fn) + resultType := c.ctx.TypeOf(result.Fun) + if !types.Identical(fnType, resultType) { + return + } + // Now check that all arguments match the parameters. + n := 0 + for _, params := range fn.Type.Params.List { + if _, ok := params.Type.(*ast.Ellipsis); ok { + if result.Ellipsis == token.NoPos { + return + } + n++ + continue + } + + for _, id := range params.Names { + if !astequal.Expr(id, result.Args[n]) { + return + } + n++ + } + } + + if len(result.Args) == n { + c.warn(fn, callable) + } +} + +func (c *unlambdaChecker) warn(cause ast.Node, suggestion string) { + c.ctx.Warn(cause, "replace `%s` with `%s`", cause, suggestion) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go new file mode 100644 index 000000000..3149d9e87 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/unnamedResult_checker.go @@ -0,0 +1,103 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "unnamedResult" + info.Tags = []string{"style", "opinionated", "experimental"} + info.Params = linter.CheckerParams{ + "checkExported": { + Value: false, + Usage: "whether to check exported functions", + }, + } + info.Summary = "Detects unnamed results that may benefit from names" + info.Before = `func f() (float64, float64)` + info.After = `func f() (x, y float64)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &unnamedResultChecker{ctx: ctx} + c.checkExported = info.Params.Bool("checkExported") + return astwalk.WalkerForFuncDecl(c), nil + }) +} + +type unnamedResultChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + checkExported bool +} + +func (c *unnamedResultChecker) VisitFuncDecl(decl *ast.FuncDecl) { + if c.checkExported && !ast.IsExported(decl.Name.Name) { + return + } + results := decl.Type.Results + switch { + case results == nil: + return // Function has no results + case len(results.List) != 0 && results.List[0].Names != nil: + return // Skip named results + } + + typeName := func(x ast.Expr) string { return c.typeName(c.ctx.TypeOf(x)) } + isError := func(x ast.Expr) bool { return qualifiedName(x) == "error" } + isBool := func(x ast.Expr) bool { return qualifiedName(x) == "bool" } + + // Main difference with case of len=2 is that we permit any + // typ1 as long as second type is either error or bool. + if results.NumFields() == 2 { + typ1, typ2 := results.List[0].Type, results.List[1].Type + name1, name2 := typeName(typ1), typeName(typ2) + cond := (name1 != name2 && name2 != "") || + (!isError(typ1) && isError(typ2)) || + (!isBool(typ1) && isBool(typ2)) + if !cond { + c.warn(decl) + } + return + } + + seen := make(map[string]bool, len(results.List)) + for i := range results.List { + typ := results.List[i].Type + name := typeName(typ) + isLast := i == len(results.List)-1 + + cond := !seen[name] || + (isLast && (isError(typ) || isBool(typ))) + if !cond { + c.warn(decl) + return + } + + seen[name] = true + } +} + +func (c *unnamedResultChecker) typeName(typ types.Type) string { + switch typ := typ.(type) { + case *types.Array: + return c.typeName(typ.Elem()) + case *types.Pointer: + return c.typeName(typ.Elem()) + case *types.Slice: + return c.typeName(typ.Elem()) + case *types.Named: + return typ.Obj().Name() + default: + return "" + } +} + +func (c *unnamedResultChecker) warn(n ast.Node) { + c.ctx.Warn(n, "consider giving a name to these results") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go new file mode 100644 index 000000000..72807ddbf --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryBlock_checker.go @@ -0,0 +1,69 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + var info linter.CheckerInfo + info.Name = "unnecessaryBlock" + info.Tags = []string{"style", "opinionated", "experimental"} + info.Summary = "Detects unnecessary braced statement blocks" + info.Before = ` +x := 1 +{ + print(x) +}` + info.After = ` +x := 1 +print(x)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmtList(&unnecessaryBlockChecker{ctx: ctx}), nil + }) +} + +type unnecessaryBlockChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *unnecessaryBlockChecker) VisitStmtList(statements []ast.Stmt) { + // Using StmtListVisitor instead of StmtVisitor makes it easier to avoid + // false positives on IfStmt, RangeStmt, ForStmt and alike. + // We only inspect BlockStmt inside statement lists, so this method is not + // called for IfStmt itself, for example. + + for _, stmt := range statements { + stmt, ok := stmt.(*ast.BlockStmt) + if ok && !c.hasDefinitions(stmt) { + c.warn(stmt) + } + } +} + +func (c *unnecessaryBlockChecker) hasDefinitions(stmt *ast.BlockStmt) bool { + for _, bs := range stmt.List { + switch stmt := bs.(type) { + case *ast.AssignStmt: + if stmt.Tok == token.DEFINE { + return true + } + case *ast.DeclStmt: + decl := stmt.Decl.(*ast.GenDecl) + if len(decl.Specs) != 0 { + return true + } + } + } + + return false +} + +func (c *unnecessaryBlockChecker) warn(expr ast.Stmt) { + c.ctx.Warn(expr, "block doesn't have definitions, can be simply deleted") +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go new file mode 100644 index 000000000..ef72142a1 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/unnecessaryDefer_checker.go @@ -0,0 +1,111 @@ +package checkers + +import ( + "go/ast" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astfmt" +) + +func init() { + var info linter.CheckerInfo + info.Name = "unnecessaryDefer" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects redundantly deferred calls" + info.Before = ` +func() { + defer os.Remove(filename) +}` + info.After = ` +func() { + os.Remove(filename) +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForFuncDecl(&unnecessaryDeferChecker{ctx: ctx}), nil + }) +} + +type unnecessaryDeferChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + isFunc bool +} + +// Visit implements the ast.Visitor. This visitor keeps track of the block +// statement belongs to a function or any other block. If the block is not a +// function and ends with a defer statement that should be OK since it's +// defering the outer function. +func (c *unnecessaryDeferChecker) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl, *ast.FuncLit: + c.isFunc = true + case *ast.BlockStmt: + c.checkDeferBeforeReturn(n) + default: + c.isFunc = false + } + + return c +} + +func (c *unnecessaryDeferChecker) VisitFuncDecl(funcDecl *ast.FuncDecl) { + // We always start as a function (*ast.FuncDecl.Body passed) + c.isFunc = true + + ast.Walk(c, funcDecl.Body) +} + +func (c *unnecessaryDeferChecker) checkDeferBeforeReturn(funcDecl *ast.BlockStmt) { + // Check if we have an explicit return or if it's just the end of the scope. + explicitReturn := false + retIndex := len(funcDecl.List) + for i, stmt := range funcDecl.List { + retStmt, ok := stmt.(*ast.ReturnStmt) + if !ok { + continue + } + explicitReturn = true + if !c.isTrivialReturn(retStmt) { + continue + } + retIndex = i + break + } + if retIndex == 0 { + return + } + + if deferStmt, ok := funcDecl.List[retIndex-1].(*ast.DeferStmt); ok { + // If the block is a function and ending with return or if we have an + // explicit return in any other block we should warn about + // unnecessary defer. + if c.isFunc || explicitReturn { + c.warn(deferStmt) + } + } +} + +func (c *unnecessaryDeferChecker) isTrivialReturn(ret *ast.ReturnStmt) bool { + for _, e := range ret.Results { + if !c.isConstExpr(e) { + return false + } + } + return true +} + +func (c *unnecessaryDeferChecker) isConstExpr(e ast.Expr) bool { + return c.ctx.TypesInfo.Types[e].Value != nil +} + +func (c *unnecessaryDeferChecker) warn(deferStmt *ast.DeferStmt) { + s := astfmt.Sprint(deferStmt) + if fnlit, ok := deferStmt.Call.Fun.(*ast.FuncLit); ok { + // To avoid long and multi-line warning messages, + // collapse the function literals. + s = "defer " + astfmt.Sprint(fnlit.Type) + "{...}(...)" + } + c.ctx.Warn(deferStmt, "%s is placed just before return", s) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/unslice_checker.go b/vendor/github.com/go-critic/go-critic/checkers/unslice_checker.go new file mode 100644 index 000000000..26a4de061 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/unslice_checker.go @@ -0,0 +1,59 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "unslice" + info.Tags = []string{"style"} + info.Summary = "Detects slice expressions that can be simplified to sliced expression itself" + info.Before = ` +f(s[:]) // s is string +copy(b[:], values...) // b is []byte` + info.After = ` +f(s) +copy(b, values...)` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&unsliceChecker{ctx: ctx}), nil + }) +} + +type unsliceChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *unsliceChecker) VisitExpr(expr ast.Expr) { + unsliced := c.unslice(expr) + if !astequal.Expr(expr, unsliced) { + c.warn(expr, unsliced) + c.SkipChilds = true + } +} + +func (c *unsliceChecker) unslice(expr ast.Expr) ast.Expr { + slice, ok := expr.(*ast.SliceExpr) + if !ok || slice.Low != nil || slice.High != nil { + // No need to worry about 3-index slicing, + // because it's only permitted if expr.High is not nil. + return expr + } + switch c.ctx.TypeOf(slice.X).(type) { + case *types.Slice, *types.Basic: + // Basic kind catches strings, Slice cathes everything else. + return c.unslice(slice.X) + } + return expr +} + +func (c *unsliceChecker) warn(cause, unsliced ast.Expr) { + c.ctx.Warn(cause, "could simplify %s to %s", cause, unsliced) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/utils.go b/vendor/github.com/go-critic/go-critic/checkers/utils.go new file mode 100644 index 000000000..b71f24d74 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/utils.go @@ -0,0 +1,309 @@ +package checkers + +import ( + "go/ast" + "go/types" + "strings" + + "github.com/go-critic/go-critic/framework/linter" +) + +// goStdlib contains `go list std` command output list. +// Used to detect packages that belong to standard Go packages distribution. +var goStdlib = map[string]bool{ + "archive/tar": true, + "archive/zip": true, + "bufio": true, + "bytes": true, + "compress/bzip2": true, + "compress/flate": true, + "compress/gzip": true, + "compress/lzw": true, + "compress/zlib": true, + "container/heap": true, + "container/list": true, + "container/ring": true, + "context": true, + "crypto": true, + "crypto/aes": true, + "crypto/cipher": true, + "crypto/des": true, + "crypto/dsa": true, + "crypto/ecdsa": true, + "crypto/elliptic": true, + "crypto/hmac": true, + "crypto/internal/randutil": true, + "crypto/internal/subtle": true, + "crypto/md5": true, + "crypto/rand": true, + "crypto/rc4": true, + "crypto/rsa": true, + "crypto/sha1": true, + "crypto/sha256": true, + "crypto/sha512": true, + "crypto/subtle": true, + "crypto/tls": true, + "crypto/x509": true, + "crypto/x509/pkix": true, + "database/sql": true, + "database/sql/driver": true, + "debug/dwarf": true, + "debug/elf": true, + "debug/gosym": true, + "debug/macho": true, + "debug/pe": true, + "debug/plan9obj": true, + "encoding": true, + "encoding/ascii85": true, + "encoding/asn1": true, + "encoding/base32": true, + "encoding/base64": true, + "encoding/binary": true, + "encoding/csv": true, + "encoding/gob": true, + "encoding/hex": true, + "encoding/json": true, + "encoding/pem": true, + "encoding/xml": true, + "errors": true, + "expvar": true, + "flag": true, + "fmt": true, + "go/ast": true, + "go/build": true, + "go/constant": true, + "go/doc": true, + "go/format": true, + "go/importer": true, + "go/internal/gccgoimporter": true, + "go/internal/gcimporter": true, + "go/internal/srcimporter": true, + "go/parser": true, + "go/printer": true, + "go/scanner": true, + "go/token": true, + "go/types": true, + "hash": true, + "hash/adler32": true, + "hash/crc32": true, + "hash/crc64": true, + "hash/fnv": true, + "html": true, + "html/template": true, + "image": true, + "image/color": true, + "image/color/palette": true, + "image/draw": true, + "image/gif": true, + "image/internal/imageutil": true, + "image/jpeg": true, + "image/png": true, + "index/suffixarray": true, + "internal/bytealg": true, + "internal/cpu": true, + "internal/nettrace": true, + "internal/poll": true, + "internal/race": true, + "internal/singleflight": true, + "internal/syscall/unix": true, + "internal/syscall/windows": true, + "internal/syscall/windows/registry": true, + "internal/syscall/windows/sysdll": true, + "internal/testenv": true, + "internal/testlog": true, + "internal/trace": true, + "io": true, + "io/ioutil": true, + "log": true, + "log/syslog": true, + "math": true, + "math/big": true, + "math/bits": true, + "math/cmplx": true, + "math/rand": true, + "mime": true, + "mime/multipart": true, + "mime/quotedprintable": true, + "net": true, + "net/http": true, + "net/http/cgi": true, + "net/http/cookiejar": true, + "net/http/fcgi": true, + "net/http/httptest": true, + "net/http/httptrace": true, + "net/http/httputil": true, + "net/http/internal": true, + "net/http/pprof": true, + "net/internal/socktest": true, + "net/mail": true, + "net/rpc": true, + "net/rpc/jsonrpc": true, + "net/smtp": true, + "net/textproto": true, + "net/url": true, + "os": true, + "os/exec": true, + "os/signal": true, + "os/signal/internal/pty": true, + "os/user": true, + "path": true, + "path/filepath": true, + "plugin": true, + "reflect": true, + "regexp": true, + "regexp/syntax": true, + "runtime": true, + "runtime/cgo": true, + "runtime/debug": true, + "runtime/internal/atomic": true, + "runtime/internal/sys": true, + "runtime/pprof": true, + "runtime/pprof/internal/profile": true, + "runtime/race": true, + "runtime/trace": true, + "sort": true, + "strconv": true, + "strings": true, + "sync": true, + "sync/atomic": true, + "syscall": true, + "testing": true, + "testing/internal/testdeps": true, + "testing/iotest": true, + "testing/quick": true, + "text/scanner": true, + "text/tabwriter": true, + "text/template": true, + "text/template/parse": true, + "time": true, + "unicode": true, + "unicode/utf16": true, + "unicode/utf8": true, + "unsafe": true, +} + +var goBuiltins = map[string]bool{ + // Types + "bool": true, + "byte": true, + "complex64": true, + "complex128": true, + "error": true, + "float32": true, + "float64": true, + "int": true, + "int8": true, + "int16": true, + "int32": true, + "int64": true, + "rune": true, + "string": true, + "uint": true, + "uint8": true, + "uint16": true, + "uint32": true, + "uint64": true, + "uintptr": true, + + // Constants + "true": true, + "false": true, + "iota": true, + + // Zero value + "nil": true, + + // Functions + "append": true, + "cap": true, + "close": true, + "complex": true, + "copy": true, + "delete": true, + "imag": true, + "len": true, + "make": true, + "new": true, + "panic": true, + "print": true, + "println": true, + "real": true, + "recover": true, +} + +// isBuiltin reports whether sym belongs to a predefined identifier set. +func isBuiltin(sym string) bool { + return goBuiltins[sym] +} + +// isStdlibPkg reports whether pkg is a package from the Go standard library. +func isStdlibPkg(pkg *types.Package) bool { + return pkg != nil && goStdlib[pkg.Path()] +} + +// isExampleTestFunc reports whether FuncDecl looks like a testable example function. +func isExampleTestFunc(fn *ast.FuncDecl) bool { + return len(fn.Type.Params.List) == 0 && strings.HasPrefix(fn.Name.String(), "Example") +} + +// isUnitTestFunc reports whether FuncDecl declares testing function. +func isUnitTestFunc(ctx *linter.CheckerContext, fn *ast.FuncDecl) bool { + if !strings.HasPrefix(fn.Name.Name, "Test") { + return false + } + typ := ctx.TypesInfo.TypeOf(fn.Name) + if sig, ok := typ.(*types.Signature); ok { + return sig.Results().Len() == 0 && + sig.Params().Len() == 1 && + sig.Params().At(0).Type().String() == "*testing.T" + } + return false +} + +// qualifiedName returns called expr fully-quallified name. +// +// It works for simple identifiers like f => "f" and identifiers +// from other package like pkg.f => "pkg.f". +// +// For all unexpected expressions returns empty string. +func qualifiedName(x ast.Expr) string { + switch x := x.(type) { + case *ast.SelectorExpr: + pkg, ok := x.X.(*ast.Ident) + if !ok { + return "" + } + return pkg.Name + "." + x.Sel.Name + case *ast.Ident: + return x.Name + default: + return "" + } +} + +// identOf returns identifier for x that can be used to obtain associated types.Object. +// Returns nil for expressions that yield temporary results, like `f().field`. +func identOf(x ast.Node) *ast.Ident { + switch x := x.(type) { + case *ast.Ident: + return x + case *ast.SelectorExpr: + return identOf(x.Sel) + case *ast.TypeAssertExpr: + // x.(type) - x may contain ident. + return identOf(x.X) + case *ast.IndexExpr: + // x[i] - x may contain ident. + return identOf(x.X) + case *ast.StarExpr: + // *x - x may contain ident. + return identOf(x.X) + case *ast.SliceExpr: + // x[:] - x may contain ident. + return identOf(x.X) + + default: + // Note that this function is not comprehensive. + return nil + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/valSwap_checker.go b/vendor/github.com/go-critic/go-critic/checkers/valSwap_checker.go new file mode 100644 index 000000000..d03e11223 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/valSwap_checker.go @@ -0,0 +1,64 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" +) + +func init() { + var info linter.CheckerInfo + info.Name = "valSwap" + info.Tags = []string{"style"} + info.Summary = "Detects value swapping code that are not using parallel assignment" + info.Before = ` +tmp := *x +*x = *y +*y = tmp` + info.After = `*x, *y = *y, *x` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForStmtList(&valSwapChecker{ctx: ctx}), nil + }) +} + +type valSwapChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *valSwapChecker) VisitStmtList(list []ast.Stmt) { + for len(list) >= 3 { + tmpAssign := astcast.ToAssignStmt(list[0]) + assignX := astcast.ToAssignStmt(list[1]) + assignY := astcast.ToAssignStmt(list[2]) + + cond := c.isSimpleAssign(tmpAssign) && + c.isSimpleAssign(assignX) && + c.isSimpleAssign(assignY) && + assignX.Tok == token.ASSIGN && + assignY.Tok == token.ASSIGN && + astequal.Expr(assignX.Lhs[0], tmpAssign.Rhs[0]) && + astequal.Expr(assignX.Rhs[0], assignY.Lhs[0]) && + astequal.Expr(assignY.Rhs[0], tmpAssign.Lhs[0]) + if cond { + c.warn(tmpAssign, assignX.Lhs[0], assignY.Lhs[0]) + list = list[3:] + } else { + list = list[1:] + } + } +} + +func (c *valSwapChecker) isSimpleAssign(x *ast.AssignStmt) bool { + return len(x.Lhs) == 1 && len(x.Rhs) == 1 +} + +func (c *valSwapChecker) warn(cause, x, y ast.Node) { + c.ctx.Warn(cause, "can re-write as `%s, %s = %s, %s`", + x, y, y, x) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go b/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go new file mode 100644 index 000000000..831857c41 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/weakCond_checker.go @@ -0,0 +1,77 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/checkers/internal/lintutil" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/typep" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "weakCond" + info.Tags = []string{"diagnostic", "experimental"} + info.Summary = "Detects conditions that are unsafe due to not being exhaustive" + info.Before = `xs != nil && xs[0] != nil` + info.After = `len(xs) != 0 && xs[0] != nil` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForExpr(&weakCondChecker{ctx: ctx}), nil + }) +} + +type weakCondChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *weakCondChecker) VisitExpr(expr ast.Expr) { + // TODO(Quasilyte): more patterns. + // TODO(Quasilyte): analyze and fix false positives. + + cond := astcast.ToBinaryExpr(expr) + lhs := astcast.ToBinaryExpr(astutil.Unparen(cond.X)) + rhs := astutil.Unparen(cond.Y) + + // Pattern 1. + // `x != nil && usageOf(x[i])` + // Pattern 2. + // `x == nil || usageOf(x[i])` + + // lhs is `x nil` + x := lhs.X + if !typep.IsSlice(c.ctx.TypeOf(x)) { + return + } + if astcast.ToIdent(lhs.Y).Name != "nil" { + return + } + + pat1prefix := cond.Op == token.LAND && lhs.Op == token.NEQ + pat2prefix := cond.Op == token.LOR && lhs.Op == token.EQL + if !pat1prefix && !pat2prefix { + return + } + + if c.isIndexed(rhs, x) { + c.warn(expr, "nil check may not be enough, check for len") + } +} + +// isIndexed reports whether x is indexed inside given expr tree. +func (c *weakCondChecker) isIndexed(tree, x ast.Expr) bool { + return lintutil.ContainsNode(tree, func(n ast.Node) bool { + indexing := astcast.ToIndexExpr(n) + return astequal.Expr(x, indexing.X) + }) +} + +func (c *weakCondChecker) warn(cause ast.Node, suggest string) { + c.ctx.Warn(cause, "suspicious `%s`; %s", cause, suggest) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go b/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go new file mode 100644 index 000000000..260039f2b --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/whyNoLint_checker.go @@ -0,0 +1,52 @@ +package checkers + +import ( + "go/ast" + "regexp" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" +) + +func init() { + info := linter.CheckerInfo{ + Name: "whyNoLint", + Tags: []string{"style", "experimental"}, + Summary: "Ensures that `//nolint` comments include an explanation", + Before: `//nolint`, + After: `//nolint // reason`, + } + re := regexp.MustCompile(`^// *nolint(?::[^ ]+)? *(.*)$`) + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForComment(&whyNoLintChecker{ + ctx: ctx, + re: re, + }), nil + }) +} + +type whyNoLintChecker struct { + astwalk.WalkHandler + + ctx *linter.CheckerContext + re *regexp.Regexp +} + +func (c whyNoLintChecker) VisitComment(cg *ast.CommentGroup) { + if strings.HasPrefix(cg.List[0].Text, "/*") { + return + } + for _, comment := range cg.List { + sl := c.re.FindStringSubmatch(comment.Text) + if len(sl) < 2 { + continue + } + + if s := sl[1]; !strings.HasPrefix(s, "//") || strings.TrimPrefix(s, "//") == "" { + c.ctx.Warn(cg, "include an explanation for nolint directive") + return + } + } +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/wrapperFunc_checker.go b/vendor/github.com/go-critic/go-critic/checkers/wrapperFunc_checker.go new file mode 100644 index 000000000..d474989d0 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/wrapperFunc_checker.go @@ -0,0 +1,229 @@ +package checkers + +import ( + "go/ast" + "go/token" + "go/types" + "strings" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcast" +) + +func init() { + var info linter.CheckerInfo + info.Name = "wrapperFunc" + info.Tags = []string{"style"} + info.Summary = "Detects function calls that can be replaced with convenience wrappers" + info.Before = `wg.Add(-1)` + info.After = `wg.Done()` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + type arg struct { + index int + value string + } + type pattern struct { + pkg string + typ string // Only for typ patterns + args []arg + suggestion string + } + type matcher struct { + pkgPatterns []pattern + typPatterns []pattern + } + + typPatterns := map[string][]arg{ + "sync.WaitGroup.Add => WaitGroup.Done": { + {0, "-1"}, + }, + + "bytes.Buffer.Truncate => Buffer.Reset": { + {0, "0"}, + }, + } + + pkgPatterns := map[string][]arg{ + "http.HandlerFunc => http.NotFoundHandler": { + {0, "http.NotFound"}, + }, + + "strings.SplitN => strings.Split": { + {2, "-1"}, + }, + "strings.Replace => strings.ReplaceAll": { + {3, "-1"}, + }, + "strings.TrimFunc => strings.TrimSpace": { + {1, "unicode.IsSpace"}, + }, + "strings.Map => strings.ToTitle": { + {0, "unicode.ToTitle"}, + }, + + "bytes.SplitN => bytes.Split": { + {2, "-1"}, + }, + "bytes.Replace => bytes.ReplaceAll": { + {3, "-1"}, + }, + "bytes.TrimFunc => bytes.TrimSpace": { + {1, "unicode.IsSpace"}, + }, + "bytes.Map => bytes.ToUpper": { + {0, "unicode.ToUpper"}, + }, + "bytes.Map => bytes.ToLower": { + {0, "unicode.ToLower"}, + }, + "bytes.Map => bytes.ToTitle": { + {0, "unicode.ToTitle"}, + }, + + "draw.DrawMask => draw.Draw": { + {4, "nil"}, + {5, "image.Point{}"}, + }, + } + + matchers := make(map[string]*matcher) + + type templateKey struct { + from string + to string + } + decodeKey := func(key string) templateKey { + parts := strings.Split(key, " => ") + return templateKey{from: parts[0], to: parts[1]} + } + + // Expand pkg patterns. + for key, args := range pkgPatterns { + key := decodeKey(key) + parts := strings.Split(key.from, ".") + fn := parts[1] + m := matchers[fn] + if m == nil { + m = &matcher{} + matchers[fn] = m + } + m.pkgPatterns = append(m.pkgPatterns, pattern{ + pkg: parts[0], + args: args, + suggestion: key.to, + }) + } + // Expand typ patterns. + for key, args := range typPatterns { + key := decodeKey(key) + parts := strings.Split(key.from, ".") + fn := parts[2] + m := matchers[fn] + if m == nil { + m = &matcher{} + matchers[fn] = m + } + m.typPatterns = append(m.typPatterns, pattern{ + pkg: parts[0], + typ: parts[1], + args: args, + suggestion: key.to, + }) + } + + var valueOf func(x ast.Expr) string + valueOf = func(x ast.Expr) string { + switch x := x.(type) { + case *ast.Ident: + return x.Name + case *ast.SelectorExpr: + id, ok := x.X.(*ast.Ident) + if ok { + return id.Name + "." + x.Sel.Name + } + case *ast.BasicLit: + return x.Value + case *ast.UnaryExpr: + switch x.Op { + case token.SUB: + return "-" + valueOf(x.X) + case token.ADD: + return valueOf(x.X) + } + } + return "" + } + + findSuggestion := func(call *ast.CallExpr, pkg, typ string, patterns []pattern) string { + for _, pat := range patterns { + if pat.pkg != pkg || pat.typ != typ { + continue + } + for _, arg := range pat.args { + if arg.value == valueOf(call.Args[arg.index]) { + return pat.suggestion + } + } + } + return "" + } + + c := &wrapperFuncChecker{ctx: ctx} + c.findSuggestion = func(call *ast.CallExpr) string { + sel := astcast.ToSelectorExpr(call.Fun).Sel + if sel == nil { + return "" + } + x := astcast.ToSelectorExpr(call.Fun).X + + m := matchers[sel.Name] + if m == nil { + return "" + } + + if x, ok := x.(*ast.Ident); ok { + obj, ok := c.ctx.TypesInfo.ObjectOf(x).(*types.PkgName) + if ok { + return findSuggestion(call, obj.Name(), "", m.pkgPatterns) + } + } + + typ := c.ctx.TypeOf(x) + tn, ok := typ.(*types.Named) + if !ok { + return "" + } + return findSuggestion( + call, + tn.Obj().Pkg().Name(), + tn.Obj().Name(), + m.typPatterns) + } + + return astwalk.WalkerForExpr(c), nil + }) +} + +type wrapperFuncChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext + + findSuggestion func(*ast.CallExpr) string +} + +func (c *wrapperFuncChecker) VisitExpr(expr ast.Expr) { + call := astcast.ToCallExpr(expr) + if len(call.Args) == 0 { + return + } + + if suggest := c.findSuggestion(call); suggest != "" { + c.warn(call, suggest) + } +} + +func (c *wrapperFuncChecker) warn(cause ast.Node, suggest string) { + c.ctx.Warn(cause, "use %s method in `%s`", suggest, cause) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/yodaStyleExpr_checker.go b/vendor/github.com/go-critic/go-critic/checkers/yodaStyleExpr_checker.go new file mode 100644 index 000000000..c533d143b --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/yodaStyleExpr_checker.go @@ -0,0 +1,66 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/framework/linter" + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astp" +) + +func init() { + var info linter.CheckerInfo + info.Name = "yodaStyleExpr" + info.Tags = []string{"style", "experimental"} + info.Summary = "Detects Yoda style expressions and suggests to replace them" + info.Before = `return nil != ptr` + info.After = `return ptr != nil` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + return astwalk.WalkerForLocalExpr(&yodaStyleExprChecker{ctx: ctx}), nil + }) +} + +type yodaStyleExprChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *yodaStyleExprChecker) VisitLocalExpr(expr ast.Expr) { + binexpr, ok := expr.(*ast.BinaryExpr) + if !ok { + return + } + switch binexpr.Op { + case token.EQL, token.NEQ, token.LSS, token.LEQ, token.GEQ, token.GTR: + if c.isConstExpr(binexpr.X) && !c.isConstExpr(binexpr.Y) { + c.warn(binexpr) + } + } +} + +func (c *yodaStyleExprChecker) isConstExpr(expr ast.Expr) bool { + return qualifiedName(expr) == "nil" || astp.IsBasicLit(expr) +} + +func (c *yodaStyleExprChecker) invert(expr *ast.BinaryExpr) { + expr.X, expr.Y = expr.Y, expr.X + switch expr.Op { + case token.LSS: + expr.Op = token.GEQ + case token.LEQ: + expr.Op = token.GTR + case token.GEQ: + expr.Op = token.LSS + case token.GTR: + expr.Op = token.LEQ + } +} + +func (c *yodaStyleExprChecker) warn(expr *ast.BinaryExpr) { + e := astcopy.BinaryExpr(expr) + c.invert(e) + c.ctx.Warn(expr, "consider to change order in expression to %s", e) +} diff --git a/vendor/github.com/go-critic/go-critic/framework/linter/checkers_db.go b/vendor/github.com/go-critic/go-critic/framework/linter/checkers_db.go new file mode 100644 index 000000000..0a3fc0292 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/framework/linter/checkers_db.go @@ -0,0 +1,136 @@ +package linter + +import ( + "fmt" + "regexp" + "sort" + "strings" + + "github.com/go-toolsmith/astfmt" +) + +type checkerProto struct { + info *CheckerInfo + constructor func(*Context) (*Checker, error) +} + +// prototypes is a set of registered checkers that are not yet instantiated. +// Registration should be done with AddChecker function. +// Initialized checkers can be obtained with NewChecker function. +var prototypes = make(map[string]checkerProto) + +func getCheckersInfo() []*CheckerInfo { + infoList := make([]*CheckerInfo, 0, len(prototypes)) + for _, proto := range prototypes { + infoCopy := *proto.info + infoList = append(infoList, &infoCopy) + } + sort.Slice(infoList, func(i, j int) bool { + return infoList[i].Name < infoList[j].Name + }) + return infoList +} + +func addChecker(info *CheckerInfo, constructor func(*CheckerContext) (FileWalker, error)) { + if _, ok := prototypes[info.Name]; ok { + panic(fmt.Sprintf("checker with name %q already registered", info.Name)) + } + + // Validate param value type. + for pname, param := range info.Params { + switch param.Value.(type) { + case string, int, bool: + // OK. + default: + panic(fmt.Sprintf("unsupported %q param type value: %T", + pname, param.Value)) + } + } + + trimDocumentation := func(info *CheckerInfo) { + fields := []*string{ + &info.Summary, + &info.Details, + &info.Before, + &info.After, + &info.Note, + } + for _, f := range fields { + *f = strings.TrimSpace(*f) + } + } + + trimDocumentation(info) + + if err := validateCheckerInfo(info); err != nil { + panic(err) + } + + proto := checkerProto{ + info: info, + constructor: func(ctx *Context) (*Checker, error) { + var c Checker + c.Info = info + c.ctx = CheckerContext{ + Context: ctx, + printer: astfmt.NewPrinter(ctx.FileSet), + } + var err error + c.fileWalker, err = constructor(&c.ctx) + return &c, err + }, + } + + prototypes[info.Name] = proto +} + +func newChecker(ctx *Context, info *CheckerInfo) (*Checker, error) { + proto, ok := prototypes[info.Name] + if !ok { + panic(fmt.Sprintf("checker with name %q not registered", info.Name)) + } + return proto.constructor(ctx) +} + +func validateCheckerInfo(info *CheckerInfo) error { + steps := []func(*CheckerInfo) error{ + validateCheckerName, + validateCheckerDocumentation, + validateCheckerTags, + } + + for _, step := range steps { + if err := step(info); err != nil { + return fmt.Errorf("%q validation error: %v", info.Name, err) + } + } + return nil +} + +var validIdentRE = regexp.MustCompile(`^\w+$`) + +func validateCheckerName(info *CheckerInfo) error { + if !validIdentRE.MatchString(info.Name) { + return fmt.Errorf("checker name contains illegal chars") + } + return nil +} + +func validateCheckerDocumentation(info *CheckerInfo) error { + // TODO(quasilyte): validate documentation. + return nil +} + +func validateCheckerTags(info *CheckerInfo) error { + tagSet := make(map[string]bool) + for _, tag := range info.Tags { + if tagSet[tag] { + return fmt.Errorf("duplicated tag %q", tag) + } + if !validIdentRE.MatchString(tag) { + return fmt.Errorf("checker tag %q contains illegal chars", tag) + } + tagSet[tag] = true + } + return nil +} diff --git a/vendor/github.com/go-critic/go-critic/framework/linter/context.go b/vendor/github.com/go-critic/go-critic/framework/linter/context.go new file mode 100644 index 000000000..6e108ab6a --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/framework/linter/context.go @@ -0,0 +1,35 @@ +package linter + +import ( + "go/ast" + "go/types" + "strconv" +) + +func resolvePkgObjects(ctx *Context, f *ast.File) { + ctx.PkgObjects = make(map[*types.PkgName]string, len(f.Imports)) + + for _, spec := range f.Imports { + if spec.Name != nil { + obj := ctx.TypesInfo.ObjectOf(spec.Name) + ctx.PkgObjects[obj.(*types.PkgName)] = spec.Name.Name + } else { + obj := ctx.TypesInfo.Implicits[spec] + ctx.PkgObjects[obj.(*types.PkgName)] = obj.Name() + } + } +} + +func resolvePkgRenames(ctx *Context, f *ast.File) { + ctx.PkgRenames = make(map[string]string) + + for _, spec := range f.Imports { + if spec.Name != nil { + path, err := strconv.Unquote(spec.Path.Value) + if err != nil { + panic(err) + } + ctx.PkgRenames[path] = spec.Name.Name + } + } +} diff --git a/vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go b/vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go new file mode 100644 index 000000000..5c8662c69 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/framework/linter/lintpack.go @@ -0,0 +1,269 @@ +package linter + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/go-toolsmith/astfmt" +) + +// CheckerCollection provides additional information for a group of checkers. +type CheckerCollection struct { + // URL is a link for a main source of information on the collection. + URL string +} + +// AddChecker registers a new checker into a checkers pool. +// Constructor is used to create a new checker instance. +// Checker name (defined in CheckerInfo.Name) must be unique. +// +// CheckerInfo.Collection is automatically set to the coll (the receiver). +// +// If checker is never needed, for example if it is disabled, +// constructor will not be called. +func (coll *CheckerCollection) AddChecker(info *CheckerInfo, constructor func(*CheckerContext) (FileWalker, error)) { + if coll == nil { + panic("adding checker to a nil collection") + } + info.Collection = coll + addChecker(info, constructor) +} + +// CheckerParam describes a single checker customizable parameter. +type CheckerParam struct { + // Value holds parameter bound value. + // It might be overwritten by the integrating linter. + // + // Permitted types include: + // - int + // - bool + // - string + Value interface{} + + // Usage gives an overview about what parameter does. + Usage string +} + +// CheckerParams holds all checker-specific parameters. +// +// Provides convenient access to the loosely typed underlying map. +type CheckerParams map[string]*CheckerParam + +// Int lookups pname key in underlying map and type-asserts it to int. +func (params CheckerParams) Int(pname string) int { return params[pname].Value.(int) } + +// Bool lookups pname key in underlying map and type-asserts it to bool. +func (params CheckerParams) Bool(pname string) bool { return params[pname].Value.(bool) } + +// String lookups pname key in underlying map and type-asserts it to string. +func (params CheckerParams) String(pname string) string { return params[pname].Value.(string) } + +// CheckerInfo holds checker metadata and structured documentation. +type CheckerInfo struct { + // Name is a checker name. + Name string + + // Tags is a list of labels that can be used to enable or disable checker. + // Common tags are "experimental" and "performance". + Tags []string + + // Params declares checker-specific parameters. Optional. + Params CheckerParams + + // Summary is a short one sentence description. + // Should not end with a period. + Summary string + + // Details extends summary with additional info. Optional. + Details string + + // Before is a code snippet of code that will violate rule. + Before string + + // After is a code snippet of fixed code that complies to the rule. + After string + + // Note is an optional caution message or advice. + Note string + + // Collection establishes a checker-to-collection relationship. + Collection *CheckerCollection +} + +// GetCheckersInfo returns a checkers info list for all registered checkers. +// The slice is sorted by a checker name. +// +// Info objects can be used to instantiate checkers with NewChecker function. +func GetCheckersInfo() []*CheckerInfo { + return getCheckersInfo() +} + +// HasTag reports whether checker described by the info has specified tag. +func (info *CheckerInfo) HasTag(tag string) bool { + for i := range info.Tags { + if info.Tags[i] == tag { + return true + } + } + return false +} + +// Checker is an implementation of a check that is described by the associated info. +type Checker struct { + // Info is an info object that was used to instantiate this checker. + Info *CheckerInfo + + ctx CheckerContext + + fileWalker FileWalker +} + +// Check runs rule checker over file f. +func (c *Checker) Check(f *ast.File) []Warning { + c.ctx.warnings = c.ctx.warnings[:0] + c.fileWalker.WalkFile(f) + return c.ctx.warnings +} + +// Warning represents issue that is found by checker. +type Warning struct { + // Node is an AST node that caused warning to trigger. + // Can be used to obtain proper error location. + Node ast.Node + + // Text is warning message without source location info. + Text string +} + +// NewChecker returns initialized checker identified by an info. +// info must be non-nil. +// Returns an error if info describes a checker that was not properly registered, +// or if checker fails to initialize. +func NewChecker(ctx *Context, info *CheckerInfo) (*Checker, error) { + return newChecker(ctx, info) +} + +// Context is a readonly state shared among every checker. +type Context struct { + // TypesInfo carries parsed packages types information. + TypesInfo *types.Info + + // SizesInfo carries alignment and type size information. + // Arch-dependent. + SizesInfo types.Sizes + + // FileSet is a file set that was used during the program loading. + FileSet *token.FileSet + + // Pkg describes package that is being checked. + Pkg *types.Package + + // Filename is a currently checked file name. + Filename string + + // Require records what optional resources are required + // by the checkers set that use this context. + // + // Every require fields makes associated context field + // to be properly initialized. + // For example, Context.require.PkgObjects => Context.PkgObjects. + Require struct { + PkgObjects bool + PkgRenames bool + } + + // PkgObjects stores all imported packages and their local names. + PkgObjects map[*types.PkgName]string + + // PkgRenames maps package path to its local renaming. + // Contains no entries for packages that were imported without + // explicit local names. + PkgRenames map[string]string +} + +// NewContext returns new shared context to be used by every checker. +// +// All data carried by the context is readonly for checkers, +// but can be modified by the integrating application. +func NewContext(fset *token.FileSet, sizes types.Sizes) *Context { + return &Context{ + FileSet: fset, + SizesInfo: sizes, + TypesInfo: &types.Info{}, + } +} + +// SetPackageInfo sets package-related metadata. +// +// Must be called for every package being checked. +func (c *Context) SetPackageInfo(info *types.Info, pkg *types.Package) { + if info != nil { + // We do this kind of assignment to avoid + // changing c.typesInfo field address after + // every re-assignment. + *c.TypesInfo = *info + } + c.Pkg = pkg +} + +// SetFileInfo sets file-related metadata. +// +// Must be called for every source code file being checked. +func (c *Context) SetFileInfo(name string, f *ast.File) { + c.Filename = name + if c.Require.PkgObjects { + resolvePkgObjects(c, f) + } + if c.Require.PkgRenames { + resolvePkgRenames(c, f) + } +} + +// CheckerContext is checker-local context copy. +// Fields that are not from Context itself are writeable. +type CheckerContext struct { + *Context + + // printer used to format warning text. + printer *astfmt.Printer + + warnings []Warning +} + +// Warn adds a Warning to checker output. +func (ctx *CheckerContext) Warn(node ast.Node, format string, args ...interface{}) { + ctx.warnings = append(ctx.warnings, Warning{ + Text: ctx.printer.Sprintf(format, args...), + Node: node, + }) +} + +// UnknownType is a special sentinel value that is returned from the CheckerContext.TypeOf +// method instead of the nil type. +var UnknownType types.Type = types.Typ[types.Invalid] + +// TypeOf returns the type of expression x. +// +// Unlike TypesInfo.TypeOf, it never returns nil. +// Instead, it returns the Invalid type as a sentinel UnknownType value. +func (ctx *CheckerContext) TypeOf(x ast.Expr) types.Type { + typ := ctx.TypesInfo.TypeOf(x) + if typ != nil { + return typ + } + // Usually it means that some incorrect type info was loaded + // or the analyzed package was only partially (?) correct. + // To avoid nil pointer panics we can return a sentinel value + // that will fail most type assertions as well as kind checks + // (if the call side expects a *types.Basic). + return UnknownType +} + +// FileWalker is an interface every checker should implement. +// +// The WalkFile method is executed for every Go file inside the +// package that is being checked. +type FileWalker interface { + WalkFile(*ast.File) +} diff --git a/vendor/github.com/go-toolsmith/astcast/.travis.yml b/vendor/github.com/go-toolsmith/astcast/.travis.yml new file mode 100644 index 000000000..c32ac0062 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcast/.travis.yml @@ -0,0 +1,9 @@ +language: go +go: + - 1.x +install: + - # Prevent default install action "go get -t -v ./...". +script: + - go get -t -v ./... + - go tool vet . + - go test -v -race ./... diff --git a/vendor/github.com/go-toolsmith/astcast/LICENSE b/vendor/github.com/go-toolsmith/astcast/LICENSE new file mode 100644 index 000000000..eef17180f --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcast/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 go-toolsmith + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astcast/README.md b/vendor/github.com/go-toolsmith/astcast/README.md new file mode 100644 index 000000000..b618da461 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcast/README.md @@ -0,0 +1,86 @@ +[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/astcast)](https://goreportcard.com/report/github.com/go-toolsmith/astcast) +[![GoDoc](https://godoc.org/github.com/go-toolsmith/astcast?status.svg)](https://godoc.org/github.com/go-toolsmith/astcast) + +# astcast + +Package astcast wraps type assertion operations in such way that you don't have +to worry about nil pointer results anymore. + +## Installation + +```bash +go get -v github.com/go-toolsmith/astcast +``` + +## Example + +```go +package main + +import ( + "fmt" + + "github.com/go-toolsmith/astcast" + "github.com/go-toolsmith/strparse" +) + +func main() { + x := strparse.Expr(`(foo * bar) + 1`) + + // x type is ast.Expr, we want to access bar operand + // that is a RHS of the LHS of the addition. + // Note that addition LHS (X field) is has parenthesis, + // so we have to remove them too. + + add := astcast.ToBinaryExpr(x) + mul := astcast.ToBinaryExpr(astcast.ToParenExpr(add.X).X) + bar := astcast.ToIdent(mul.Y) + fmt.Printf("%T %s\n", bar, bar.Name) // => *ast.Ident bar + + // If argument has different dynamic type, + // non-nil sentinel object of requested type is returned. + // Those sentinel objects are exported so if you need + // to know whether it was a nil interface value of + // failed type assertion, you can compare returned + // object with such a sentinel. + + y := astcast.ToCallExpr(strparse.Expr(`x`)) + if y == astcast.NilCallExpr { + fmt.Println("it is a sentinel, type assertion failed") + } +} +``` + +Without `astcast`, you would have to do a lots of type assertions: + +```go +package main + +import ( + "fmt" + + "github.com/go-toolsmith/strparse" +) + +func main() { + x := strparse.Expr(`(foo * bar) + 1`) + + add, ok := x.(*ast.BinaryExpr) + if !ok || add == nil { + return + } + additionLHS, ok := add.X.(*ast.ParenExpr) + if !ok || additionLHS == nil { + return + } + mul, ok := additionLHS.X.(*ast.BinaryExpr) + if !ok || mul == nil { + return + } + bar, ok := mul.Y.(*ast.Ident) + if !ok || bar == nil { + return + } + fmt.Printf("%T %s\n", bar, bar.Name) +} +``` diff --git a/vendor/github.com/go-toolsmith/astcast/astcast.go b/vendor/github.com/go-toolsmith/astcast/astcast.go new file mode 100644 index 000000000..746d568aa --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcast/astcast.go @@ -0,0 +1,590 @@ +// Code generated by astcast_generate.go; DO NOT EDIT + +// Package astcast wraps type assertion operations in such way that you don't have +// to worry about nil pointer results anymore. +package astcast + +import ( + "go/ast" +) + +// A set of sentinel nil-like values that are returned +// by all "casting" functions in case of failed type assertion. +var ( + NilArrayType = &ast.ArrayType{} + NilBadExpr = &ast.BadExpr{} + NilBasicLit = &ast.BasicLit{} + NilBinaryExpr = &ast.BinaryExpr{} + NilCallExpr = &ast.CallExpr{} + NilChanType = &ast.ChanType{} + NilCompositeLit = &ast.CompositeLit{} + NilEllipsis = &ast.Ellipsis{} + NilFuncLit = &ast.FuncLit{} + NilFuncType = &ast.FuncType{} + NilIdent = &ast.Ident{} + NilIndexExpr = &ast.IndexExpr{} + NilInterfaceType = &ast.InterfaceType{} + NilKeyValueExpr = &ast.KeyValueExpr{} + NilMapType = &ast.MapType{} + NilParenExpr = &ast.ParenExpr{} + NilSelectorExpr = &ast.SelectorExpr{} + NilSliceExpr = &ast.SliceExpr{} + NilStarExpr = &ast.StarExpr{} + NilStructType = &ast.StructType{} + NilTypeAssertExpr = &ast.TypeAssertExpr{} + NilUnaryExpr = &ast.UnaryExpr{} + NilAssignStmt = &ast.AssignStmt{} + NilBadStmt = &ast.BadStmt{} + NilBlockStmt = &ast.BlockStmt{} + NilBranchStmt = &ast.BranchStmt{} + NilCaseClause = &ast.CaseClause{} + NilCommClause = &ast.CommClause{} + NilDeclStmt = &ast.DeclStmt{} + NilDeferStmt = &ast.DeferStmt{} + NilEmptyStmt = &ast.EmptyStmt{} + NilExprStmt = &ast.ExprStmt{} + NilForStmt = &ast.ForStmt{} + NilGoStmt = &ast.GoStmt{} + NilIfStmt = &ast.IfStmt{} + NilIncDecStmt = &ast.IncDecStmt{} + NilLabeledStmt = &ast.LabeledStmt{} + NilRangeStmt = &ast.RangeStmt{} + NilReturnStmt = &ast.ReturnStmt{} + NilSelectStmt = &ast.SelectStmt{} + NilSendStmt = &ast.SendStmt{} + NilSwitchStmt = &ast.SwitchStmt{} + NilTypeSwitchStmt = &ast.TypeSwitchStmt{} + NilComment = &ast.Comment{} + NilCommentGroup = &ast.CommentGroup{} + NilFieldList = &ast.FieldList{} + NilFile = &ast.File{} + NilPackage = &ast.Package{} +) + +// ToArrayType returns x as a non-nil *ast.ArrayType. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilArrayType. +func ToArrayType(x ast.Node) *ast.ArrayType { + if x, ok := x.(*ast.ArrayType); ok { + return x + } + return NilArrayType +} + +// ToBadExpr returns x as a non-nil *ast.BadExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilBadExpr. +func ToBadExpr(x ast.Node) *ast.BadExpr { + if x, ok := x.(*ast.BadExpr); ok { + return x + } + return NilBadExpr +} + +// ToBasicLit returns x as a non-nil *ast.BasicLit. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilBasicLit. +func ToBasicLit(x ast.Node) *ast.BasicLit { + if x, ok := x.(*ast.BasicLit); ok { + return x + } + return NilBasicLit +} + +// ToBinaryExpr returns x as a non-nil *ast.BinaryExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilBinaryExpr. +func ToBinaryExpr(x ast.Node) *ast.BinaryExpr { + if x, ok := x.(*ast.BinaryExpr); ok { + return x + } + return NilBinaryExpr +} + +// ToCallExpr returns x as a non-nil *ast.CallExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilCallExpr. +func ToCallExpr(x ast.Node) *ast.CallExpr { + if x, ok := x.(*ast.CallExpr); ok { + return x + } + return NilCallExpr +} + +// ToChanType returns x as a non-nil *ast.ChanType. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilChanType. +func ToChanType(x ast.Node) *ast.ChanType { + if x, ok := x.(*ast.ChanType); ok { + return x + } + return NilChanType +} + +// ToCompositeLit returns x as a non-nil *ast.CompositeLit. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilCompositeLit. +func ToCompositeLit(x ast.Node) *ast.CompositeLit { + if x, ok := x.(*ast.CompositeLit); ok { + return x + } + return NilCompositeLit +} + +// ToEllipsis returns x as a non-nil *ast.Ellipsis. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilEllipsis. +func ToEllipsis(x ast.Node) *ast.Ellipsis { + if x, ok := x.(*ast.Ellipsis); ok { + return x + } + return NilEllipsis +} + +// ToFuncLit returns x as a non-nil *ast.FuncLit. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilFuncLit. +func ToFuncLit(x ast.Node) *ast.FuncLit { + if x, ok := x.(*ast.FuncLit); ok { + return x + } + return NilFuncLit +} + +// ToFuncType returns x as a non-nil *ast.FuncType. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilFuncType. +func ToFuncType(x ast.Node) *ast.FuncType { + if x, ok := x.(*ast.FuncType); ok { + return x + } + return NilFuncType +} + +// ToIdent returns x as a non-nil *ast.Ident. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilIdent. +func ToIdent(x ast.Node) *ast.Ident { + if x, ok := x.(*ast.Ident); ok { + return x + } + return NilIdent +} + +// ToIndexExpr returns x as a non-nil *ast.IndexExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilIndexExpr. +func ToIndexExpr(x ast.Node) *ast.IndexExpr { + if x, ok := x.(*ast.IndexExpr); ok { + return x + } + return NilIndexExpr +} + +// ToInterfaceType returns x as a non-nil *ast.InterfaceType. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilInterfaceType. +func ToInterfaceType(x ast.Node) *ast.InterfaceType { + if x, ok := x.(*ast.InterfaceType); ok { + return x + } + return NilInterfaceType +} + +// ToKeyValueExpr returns x as a non-nil *ast.KeyValueExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilKeyValueExpr. +func ToKeyValueExpr(x ast.Node) *ast.KeyValueExpr { + if x, ok := x.(*ast.KeyValueExpr); ok { + return x + } + return NilKeyValueExpr +} + +// ToMapType returns x as a non-nil *ast.MapType. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilMapType. +func ToMapType(x ast.Node) *ast.MapType { + if x, ok := x.(*ast.MapType); ok { + return x + } + return NilMapType +} + +// ToParenExpr returns x as a non-nil *ast.ParenExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilParenExpr. +func ToParenExpr(x ast.Node) *ast.ParenExpr { + if x, ok := x.(*ast.ParenExpr); ok { + return x + } + return NilParenExpr +} + +// ToSelectorExpr returns x as a non-nil *ast.SelectorExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilSelectorExpr. +func ToSelectorExpr(x ast.Node) *ast.SelectorExpr { + if x, ok := x.(*ast.SelectorExpr); ok { + return x + } + return NilSelectorExpr +} + +// ToSliceExpr returns x as a non-nil *ast.SliceExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilSliceExpr. +func ToSliceExpr(x ast.Node) *ast.SliceExpr { + if x, ok := x.(*ast.SliceExpr); ok { + return x + } + return NilSliceExpr +} + +// ToStarExpr returns x as a non-nil *ast.StarExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilStarExpr. +func ToStarExpr(x ast.Node) *ast.StarExpr { + if x, ok := x.(*ast.StarExpr); ok { + return x + } + return NilStarExpr +} + +// ToStructType returns x as a non-nil *ast.StructType. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilStructType. +func ToStructType(x ast.Node) *ast.StructType { + if x, ok := x.(*ast.StructType); ok { + return x + } + return NilStructType +} + +// ToTypeAssertExpr returns x as a non-nil *ast.TypeAssertExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilTypeAssertExpr. +func ToTypeAssertExpr(x ast.Node) *ast.TypeAssertExpr { + if x, ok := x.(*ast.TypeAssertExpr); ok { + return x + } + return NilTypeAssertExpr +} + +// ToUnaryExpr returns x as a non-nil *ast.UnaryExpr. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilUnaryExpr. +func ToUnaryExpr(x ast.Node) *ast.UnaryExpr { + if x, ok := x.(*ast.UnaryExpr); ok { + return x + } + return NilUnaryExpr +} + +// ToAssignStmt returns x as a non-nil *ast.AssignStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilAssignStmt. +func ToAssignStmt(x ast.Node) *ast.AssignStmt { + if x, ok := x.(*ast.AssignStmt); ok { + return x + } + return NilAssignStmt +} + +// ToBadStmt returns x as a non-nil *ast.BadStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilBadStmt. +func ToBadStmt(x ast.Node) *ast.BadStmt { + if x, ok := x.(*ast.BadStmt); ok { + return x + } + return NilBadStmt +} + +// ToBlockStmt returns x as a non-nil *ast.BlockStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilBlockStmt. +func ToBlockStmt(x ast.Node) *ast.BlockStmt { + if x, ok := x.(*ast.BlockStmt); ok { + return x + } + return NilBlockStmt +} + +// ToBranchStmt returns x as a non-nil *ast.BranchStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilBranchStmt. +func ToBranchStmt(x ast.Node) *ast.BranchStmt { + if x, ok := x.(*ast.BranchStmt); ok { + return x + } + return NilBranchStmt +} + +// ToCaseClause returns x as a non-nil *ast.CaseClause. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilCaseClause. +func ToCaseClause(x ast.Node) *ast.CaseClause { + if x, ok := x.(*ast.CaseClause); ok { + return x + } + return NilCaseClause +} + +// ToCommClause returns x as a non-nil *ast.CommClause. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilCommClause. +func ToCommClause(x ast.Node) *ast.CommClause { + if x, ok := x.(*ast.CommClause); ok { + return x + } + return NilCommClause +} + +// ToDeclStmt returns x as a non-nil *ast.DeclStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilDeclStmt. +func ToDeclStmt(x ast.Node) *ast.DeclStmt { + if x, ok := x.(*ast.DeclStmt); ok { + return x + } + return NilDeclStmt +} + +// ToDeferStmt returns x as a non-nil *ast.DeferStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilDeferStmt. +func ToDeferStmt(x ast.Node) *ast.DeferStmt { + if x, ok := x.(*ast.DeferStmt); ok { + return x + } + return NilDeferStmt +} + +// ToEmptyStmt returns x as a non-nil *ast.EmptyStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilEmptyStmt. +func ToEmptyStmt(x ast.Node) *ast.EmptyStmt { + if x, ok := x.(*ast.EmptyStmt); ok { + return x + } + return NilEmptyStmt +} + +// ToExprStmt returns x as a non-nil *ast.ExprStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilExprStmt. +func ToExprStmt(x ast.Node) *ast.ExprStmt { + if x, ok := x.(*ast.ExprStmt); ok { + return x + } + return NilExprStmt +} + +// ToForStmt returns x as a non-nil *ast.ForStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilForStmt. +func ToForStmt(x ast.Node) *ast.ForStmt { + if x, ok := x.(*ast.ForStmt); ok { + return x + } + return NilForStmt +} + +// ToGoStmt returns x as a non-nil *ast.GoStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilGoStmt. +func ToGoStmt(x ast.Node) *ast.GoStmt { + if x, ok := x.(*ast.GoStmt); ok { + return x + } + return NilGoStmt +} + +// ToIfStmt returns x as a non-nil *ast.IfStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilIfStmt. +func ToIfStmt(x ast.Node) *ast.IfStmt { + if x, ok := x.(*ast.IfStmt); ok { + return x + } + return NilIfStmt +} + +// ToIncDecStmt returns x as a non-nil *ast.IncDecStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilIncDecStmt. +func ToIncDecStmt(x ast.Node) *ast.IncDecStmt { + if x, ok := x.(*ast.IncDecStmt); ok { + return x + } + return NilIncDecStmt +} + +// ToLabeledStmt returns x as a non-nil *ast.LabeledStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilLabeledStmt. +func ToLabeledStmt(x ast.Node) *ast.LabeledStmt { + if x, ok := x.(*ast.LabeledStmt); ok { + return x + } + return NilLabeledStmt +} + +// ToRangeStmt returns x as a non-nil *ast.RangeStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilRangeStmt. +func ToRangeStmt(x ast.Node) *ast.RangeStmt { + if x, ok := x.(*ast.RangeStmt); ok { + return x + } + return NilRangeStmt +} + +// ToReturnStmt returns x as a non-nil *ast.ReturnStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilReturnStmt. +func ToReturnStmt(x ast.Node) *ast.ReturnStmt { + if x, ok := x.(*ast.ReturnStmt); ok { + return x + } + return NilReturnStmt +} + +// ToSelectStmt returns x as a non-nil *ast.SelectStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilSelectStmt. +func ToSelectStmt(x ast.Node) *ast.SelectStmt { + if x, ok := x.(*ast.SelectStmt); ok { + return x + } + return NilSelectStmt +} + +// ToSendStmt returns x as a non-nil *ast.SendStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilSendStmt. +func ToSendStmt(x ast.Node) *ast.SendStmt { + if x, ok := x.(*ast.SendStmt); ok { + return x + } + return NilSendStmt +} + +// ToSwitchStmt returns x as a non-nil *ast.SwitchStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilSwitchStmt. +func ToSwitchStmt(x ast.Node) *ast.SwitchStmt { + if x, ok := x.(*ast.SwitchStmt); ok { + return x + } + return NilSwitchStmt +} + +// ToTypeSwitchStmt returns x as a non-nil *ast.TypeSwitchStmt. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilTypeSwitchStmt. +func ToTypeSwitchStmt(x ast.Node) *ast.TypeSwitchStmt { + if x, ok := x.(*ast.TypeSwitchStmt); ok { + return x + } + return NilTypeSwitchStmt +} + +// ToComment returns x as a non-nil *ast.Comment. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilComment. +func ToComment(x ast.Node) *ast.Comment { + if x, ok := x.(*ast.Comment); ok { + return x + } + return NilComment +} + +// ToCommentGroup returns x as a non-nil *ast.CommentGroup. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilCommentGroup. +func ToCommentGroup(x ast.Node) *ast.CommentGroup { + if x, ok := x.(*ast.CommentGroup); ok { + return x + } + return NilCommentGroup +} + +// ToFieldList returns x as a non-nil *ast.FieldList. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilFieldList. +func ToFieldList(x ast.Node) *ast.FieldList { + if x, ok := x.(*ast.FieldList); ok { + return x + } + return NilFieldList +} + +// ToFile returns x as a non-nil *ast.File. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilFile. +func ToFile(x ast.Node) *ast.File { + if x, ok := x.(*ast.File); ok { + return x + } + return NilFile +} + +// ToPackage returns x as a non-nil *ast.Package. +// If ast.Node actually has such dynamic type, the result is +// identical to normal type assertion. In case if it has +// different type, the returned value is NilPackage. +func ToPackage(x ast.Node) *ast.Package { + if x, ok := x.(*ast.Package); ok { + return x + } + return NilPackage +} diff --git a/vendor/github.com/go-toolsmith/astcast/go.mod b/vendor/github.com/go-toolsmith/astcast/go.mod new file mode 100644 index 000000000..3e431993e --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcast/go.mod @@ -0,0 +1,6 @@ +module github.com/go-toolsmith/astcast + +require ( + github.com/go-toolsmith/astequal v1.0.0 // indirect + github.com/go-toolsmith/strparse v1.0.0 +) diff --git a/vendor/github.com/go-toolsmith/astcast/go.sum b/vendor/github.com/go-toolsmith/astcast/go.sum new file mode 100644 index 000000000..aa0857030 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcast/go.sum @@ -0,0 +1,4 @@ +github.com/go-toolsmith/astequal v1.0.0 h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ= +github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= +github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= diff --git a/vendor/github.com/go-toolsmith/astcopy/.travis.yml b/vendor/github.com/go-toolsmith/astcopy/.travis.yml new file mode 100644 index 000000000..8994d395c --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcopy/.travis.yml @@ -0,0 +1,9 @@ +language: go +go: + - 1.x +install: + - # Prevent default install action "go get -t -v ./...". +script: + - go get -t -v ./... + - go tool vet . + - go test -v -race ./... \ No newline at end of file diff --git a/vendor/github.com/go-toolsmith/astcopy/LICENSE b/vendor/github.com/go-toolsmith/astcopy/LICENSE new file mode 100644 index 000000000..eef17180f --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcopy/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 go-toolsmith + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astcopy/README.md b/vendor/github.com/go-toolsmith/astcopy/README.md new file mode 100644 index 000000000..4dae5c41b --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcopy/README.md @@ -0,0 +1,41 @@ +[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/astcopy)](https://goreportcard.com/report/github.com/go-toolsmith/astcopy) +[![GoDoc](https://godoc.org/github.com/go-toolsmith/astcopy?status.svg)](https://godoc.org/github.com/go-toolsmith/astcopy) +[![Build Status](https://travis-ci.org/go-toolsmith/astcopy.svg?branch=master)](https://travis-ci.org/go-toolsmith/astcopy) + +# astcopy + +Package astcopy implements Go AST reflection-free deep copy operations. + +## Installation: + +```bash +go get github.com/go-toolsmith/astcopy +``` + +## Example + +```go +package main + +import ( + "fmt" + "go/ast" + "go/token" + + "github.com/go-toolsmith/astcopy" + "github.com/go-toolsmith/astequal" + "github.com/go-toolsmith/strparse" +) + +func main() { + x := strparse.Expr(`1 + 2`).(*ast.BinaryExpr) + y := astcopy.BinaryExpr(x) + fmt.Println(astequal.Expr(x, y)) // => true + + // Now modify x and make sure y is not modified. + z := astcopy.BinaryExpr(y) + x.Op = token.SUB + fmt.Println(astequal.Expr(y, z)) // => true + fmt.Println(astequal.Expr(x, y)) // => false +} +``` diff --git a/vendor/github.com/go-toolsmith/astcopy/astcopy.go b/vendor/github.com/go-toolsmith/astcopy/astcopy.go new file mode 100644 index 000000000..2feffb199 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcopy/astcopy.go @@ -0,0 +1,955 @@ +// Package astcopy implements Go AST reflection-free deep copy operations. +package astcopy + +import ( + "go/ast" +) + +// Node returns x node deep copy. +// Copy of nil argument is nil. +func Node(x ast.Node) ast.Node { + return copyNode(x) +} + +// NodeList returns xs node slice deep copy. +// Copy of nil argument is nil. +func NodeList(xs []ast.Node) []ast.Node { + if xs == nil { + return nil + } + cp := make([]ast.Node, len(xs)) + for i := range xs { + cp[i] = copyNode(xs[i]) + } + return cp +} + +// Expr returns x expression deep copy. +// Copy of nil argument is nil. +func Expr(x ast.Expr) ast.Expr { + return copyExpr(x) +} + +// ExprList returns xs expression slice deep copy. +// Copy of nil argument is nil. +func ExprList(xs []ast.Expr) []ast.Expr { + if xs == nil { + return nil + } + cp := make([]ast.Expr, len(xs)) + for i := range xs { + cp[i] = copyExpr(xs[i]) + } + return cp +} + +// Stmt returns x statement deep copy. +// Copy of nil argument is nil. +func Stmt(x ast.Stmt) ast.Stmt { + return copyStmt(x) +} + +// StmtList returns xs statement slice deep copy. +// Copy of nil argument is nil. +func StmtList(xs []ast.Stmt) []ast.Stmt { + if xs == nil { + return nil + } + cp := make([]ast.Stmt, len(xs)) + for i := range xs { + cp[i] = copyStmt(xs[i]) + } + return cp +} + +// Decl returns x declaration deep copy. +// Copy of nil argument is nil. +func Decl(x ast.Decl) ast.Decl { + return copyDecl(x) +} + +// DeclList returns xs declaration slice deep copy. +// Copy of nil argument is nil. +func DeclList(xs []ast.Decl) []ast.Decl { + if xs == nil { + return nil + } + cp := make([]ast.Decl, len(xs)) + for i := range xs { + cp[i] = copyDecl(xs[i]) + } + return cp +} + +// BadExpr returns x deep copy. +// Copy of nil argument is nil. +func BadExpr(x *ast.BadExpr) *ast.BadExpr { + if x == nil { + return nil + } + cp := *x + return &cp +} + +// Ident returns x deep copy. +// Copy of nil argument is nil. +func Ident(x *ast.Ident) *ast.Ident { + if x == nil { + return nil + } + cp := *x + return &cp +} + +// IdentList returns xs identifier slice deep copy. +// Copy of nil argument is nil. +func IdentList(xs []*ast.Ident) []*ast.Ident { + if xs == nil { + return nil + } + cp := make([]*ast.Ident, len(xs)) + for i := range xs { + cp[i] = Ident(xs[i]) + } + return cp +} + +// Ellipsis returns x deep copy. +// Copy of nil argument is nil. +func Ellipsis(x *ast.Ellipsis) *ast.Ellipsis { + if x == nil { + return nil + } + cp := *x + cp.Elt = copyExpr(x.Elt) + return &cp +} + +// BasicLit returns x deep copy. +// Copy of nil argument is nil. +func BasicLit(x *ast.BasicLit) *ast.BasicLit { + if x == nil { + return nil + } + cp := *x + return &cp +} + +// FuncLit returns x deep copy. +// Copy of nil argument is nil. +func FuncLit(x *ast.FuncLit) *ast.FuncLit { + if x == nil { + return nil + } + cp := *x + cp.Type = FuncType(x.Type) + cp.Body = BlockStmt(x.Body) + return &cp +} + +// CompositeLit returns x deep copy. +// Copy of nil argument is nil. +func CompositeLit(x *ast.CompositeLit) *ast.CompositeLit { + if x == nil { + return nil + } + cp := *x + cp.Type = copyExpr(x.Type) + cp.Elts = ExprList(x.Elts) + return &cp +} + +// ParenExpr returns x deep copy. +// Copy of nil argument is nil. +func ParenExpr(x *ast.ParenExpr) *ast.ParenExpr { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + return &cp +} + +// SelectorExpr returns x deep copy. +// Copy of nil argument is nil. +func SelectorExpr(x *ast.SelectorExpr) *ast.SelectorExpr { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + cp.Sel = Ident(x.Sel) + return &cp +} + +// IndexExpr returns x deep copy. +// Copy of nil argument is nil. +func IndexExpr(x *ast.IndexExpr) *ast.IndexExpr { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + cp.Index = copyExpr(x.Index) + return &cp +} + +// SliceExpr returns x deep copy. +// Copy of nil argument is nil. +func SliceExpr(x *ast.SliceExpr) *ast.SliceExpr { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + cp.Low = copyExpr(x.Low) + cp.High = copyExpr(x.High) + cp.Max = copyExpr(x.Max) + return &cp +} + +// TypeAssertExpr returns x deep copy. +// Copy of nil argument is nil. +func TypeAssertExpr(x *ast.TypeAssertExpr) *ast.TypeAssertExpr { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + cp.Type = copyExpr(x.Type) + return &cp +} + +// CallExpr returns x deep copy. +// Copy of nil argument is nil. +func CallExpr(x *ast.CallExpr) *ast.CallExpr { + if x == nil { + return nil + } + cp := *x + cp.Fun = copyExpr(x.Fun) + cp.Args = ExprList(x.Args) + return &cp +} + +// StarExpr returns x deep copy. +// Copy of nil argument is nil. +func StarExpr(x *ast.StarExpr) *ast.StarExpr { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + return &cp +} + +// UnaryExpr returns x deep copy. +// Copy of nil argument is nil. +func UnaryExpr(x *ast.UnaryExpr) *ast.UnaryExpr { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + return &cp +} + +// BinaryExpr returns x deep copy. +// Copy of nil argument is nil. +func BinaryExpr(x *ast.BinaryExpr) *ast.BinaryExpr { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + cp.Y = copyExpr(x.Y) + return &cp +} + +// KeyValueExpr returns x deep copy. +// Copy of nil argument is nil. +func KeyValueExpr(x *ast.KeyValueExpr) *ast.KeyValueExpr { + if x == nil { + return nil + } + cp := *x + cp.Key = copyExpr(x.Key) + cp.Value = copyExpr(x.Value) + return &cp +} + +// ArrayType returns x deep copy. +// Copy of nil argument is nil. +func ArrayType(x *ast.ArrayType) *ast.ArrayType { + if x == nil { + return nil + } + cp := *x + cp.Len = copyExpr(x.Len) + cp.Elt = copyExpr(x.Elt) + return &cp +} + +// StructType returns x deep copy. +// Copy of nil argument is nil. +func StructType(x *ast.StructType) *ast.StructType { + if x == nil { + return nil + } + cp := *x + cp.Fields = FieldList(x.Fields) + return &cp +} + +// Field returns x deep copy. +// Copy of nil argument is nil. +func Field(x *ast.Field) *ast.Field { + if x == nil { + return nil + } + cp := *x + cp.Names = IdentList(x.Names) + cp.Type = copyExpr(x.Type) + cp.Tag = BasicLit(x.Tag) + cp.Doc = CommentGroup(x.Doc) + cp.Comment = CommentGroup(x.Comment) + return &cp +} + +// FieldList returns x deep copy. +// Copy of nil argument is nil. +func FieldList(x *ast.FieldList) *ast.FieldList { + if x == nil { + return nil + } + cp := *x + if x.List != nil { + cp.List = make([]*ast.Field, len(x.List)) + for i := range x.List { + cp.List[i] = Field(x.List[i]) + } + } + return &cp +} + +// FuncType returns x deep copy. +// Copy of nil argument is nil. +func FuncType(x *ast.FuncType) *ast.FuncType { + if x == nil { + return nil + } + cp := *x + cp.Params = FieldList(x.Params) + cp.Results = FieldList(x.Results) + return &cp +} + +// InterfaceType returns x deep copy. +// Copy of nil argument is nil. +func InterfaceType(x *ast.InterfaceType) *ast.InterfaceType { + if x == nil { + return nil + } + cp := *x + cp.Methods = FieldList(x.Methods) + return &cp +} + +// MapType returns x deep copy. +// Copy of nil argument is nil. +func MapType(x *ast.MapType) *ast.MapType { + if x == nil { + return nil + } + cp := *x + cp.Key = copyExpr(x.Key) + cp.Value = copyExpr(x.Value) + return &cp +} + +// ChanType returns x deep copy. +// Copy of nil argument is nil. +func ChanType(x *ast.ChanType) *ast.ChanType { + if x == nil { + return nil + } + cp := *x + cp.Value = copyExpr(x.Value) + return &cp +} + +// BlockStmt returns x deep copy. +// Copy of nil argument is nil. +func BlockStmt(x *ast.BlockStmt) *ast.BlockStmt { + if x == nil { + return nil + } + cp := *x + cp.List = StmtList(x.List) + return &cp +} + +// ImportSpec returns x deep copy. +// Copy of nil argument is nil. +func ImportSpec(x *ast.ImportSpec) *ast.ImportSpec { + if x == nil { + return nil + } + cp := *x + cp.Name = Ident(x.Name) + cp.Path = BasicLit(x.Path) + cp.Doc = CommentGroup(x.Doc) + cp.Comment = CommentGroup(x.Comment) + return &cp +} + +// ValueSpec returns x deep copy. +// Copy of nil argument is nil. +func ValueSpec(x *ast.ValueSpec) *ast.ValueSpec { + if x == nil { + return nil + } + cp := *x + cp.Names = IdentList(x.Names) + cp.Values = ExprList(x.Values) + cp.Type = copyExpr(x.Type) + cp.Doc = CommentGroup(x.Doc) + cp.Comment = CommentGroup(x.Comment) + return &cp +} + +// TypeSpec returns x deep copy. +// Copy of nil argument is nil. +func TypeSpec(x *ast.TypeSpec) *ast.TypeSpec { + if x == nil { + return nil + } + cp := *x + cp.Name = Ident(x.Name) + cp.Type = copyExpr(x.Type) + cp.Doc = CommentGroup(x.Doc) + cp.Comment = CommentGroup(x.Comment) + return &cp +} + +// Spec returns x deep copy. +// Copy of nil argument is nil. +func Spec(x ast.Spec) ast.Spec { + if x == nil { + return nil + } + + switch x := x.(type) { + case *ast.ImportSpec: + return ImportSpec(x) + case *ast.ValueSpec: + return ValueSpec(x) + case *ast.TypeSpec: + return TypeSpec(x) + default: + panic("unhandled spec") + } +} + +// SpecList returns xs spec slice deep copy. +// Copy of nil argument is nil. +func SpecList(xs []ast.Spec) []ast.Spec { + if xs == nil { + return nil + } + cp := make([]ast.Spec, len(xs)) + for i := range xs { + cp[i] = Spec(xs[i]) + } + return cp +} + +// BadStmt returns x deep copy. +// Copy of nil argument is nil. +func BadStmt(x *ast.BadStmt) *ast.BadStmt { + if x == nil { + return nil + } + cp := *x + return &cp +} + +// DeclStmt returns x deep copy. +// Copy of nil argument is nil. +func DeclStmt(x *ast.DeclStmt) *ast.DeclStmt { + if x == nil { + return nil + } + cp := *x + cp.Decl = copyDecl(x.Decl) + return &cp +} + +// EmptyStmt returns x deep copy. +// Copy of nil argument is nil. +func EmptyStmt(x *ast.EmptyStmt) *ast.EmptyStmt { + if x == nil { + return nil + } + cp := *x + return &cp +} + +// LabeledStmt returns x deep copy. +// Copy of nil argument is nil. +func LabeledStmt(x *ast.LabeledStmt) *ast.LabeledStmt { + if x == nil { + return nil + } + cp := *x + cp.Label = Ident(x.Label) + cp.Stmt = copyStmt(x.Stmt) + return &cp +} + +// ExprStmt returns x deep copy. +// Copy of nil argument is nil. +func ExprStmt(x *ast.ExprStmt) *ast.ExprStmt { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + return &cp +} + +// SendStmt returns x deep copy. +// Copy of nil argument is nil. +func SendStmt(x *ast.SendStmt) *ast.SendStmt { + if x == nil { + return nil + } + cp := *x + cp.Chan = copyExpr(x.Chan) + cp.Value = copyExpr(x.Value) + return &cp +} + +// IncDecStmt returns x deep copy. +// Copy of nil argument is nil. +func IncDecStmt(x *ast.IncDecStmt) *ast.IncDecStmt { + if x == nil { + return nil + } + cp := *x + cp.X = copyExpr(x.X) + return &cp +} + +// AssignStmt returns x deep copy. +// Copy of nil argument is nil. +func AssignStmt(x *ast.AssignStmt) *ast.AssignStmt { + if x == nil { + return nil + } + cp := *x + cp.Lhs = ExprList(x.Lhs) + cp.Rhs = ExprList(x.Rhs) + return &cp +} + +// GoStmt returns x deep copy. +// Copy of nil argument is nil. +func GoStmt(x *ast.GoStmt) *ast.GoStmt { + if x == nil { + return nil + } + cp := *x + cp.Call = CallExpr(x.Call) + return &cp +} + +// DeferStmt returns x deep copy. +// Copy of nil argument is nil. +func DeferStmt(x *ast.DeferStmt) *ast.DeferStmt { + if x == nil { + return nil + } + cp := *x + cp.Call = CallExpr(x.Call) + return &cp +} + +// ReturnStmt returns x deep copy. +// Copy of nil argument is nil. +func ReturnStmt(x *ast.ReturnStmt) *ast.ReturnStmt { + if x == nil { + return nil + } + cp := *x + cp.Results = ExprList(x.Results) + return &cp +} + +// BranchStmt returns x deep copy. +// Copy of nil argument is nil. +func BranchStmt(x *ast.BranchStmt) *ast.BranchStmt { + if x == nil { + return nil + } + cp := *x + cp.Label = Ident(x.Label) + return &cp +} + +// IfStmt returns x deep copy. +// Copy of nil argument is nil. +func IfStmt(x *ast.IfStmt) *ast.IfStmt { + if x == nil { + return nil + } + cp := *x + cp.Init = copyStmt(x.Init) + cp.Cond = copyExpr(x.Cond) + cp.Body = BlockStmt(x.Body) + cp.Else = copyStmt(x.Else) + return &cp +} + +// CaseClause returns x deep copy. +// Copy of nil argument is nil. +func CaseClause(x *ast.CaseClause) *ast.CaseClause { + if x == nil { + return nil + } + cp := *x + cp.List = ExprList(x.List) + cp.Body = StmtList(x.Body) + return &cp +} + +// SwitchStmt returns x deep copy. +// Copy of nil argument is nil. +func SwitchStmt(x *ast.SwitchStmt) *ast.SwitchStmt { + if x == nil { + return nil + } + cp := *x + cp.Init = copyStmt(x.Init) + cp.Tag = copyExpr(x.Tag) + cp.Body = BlockStmt(x.Body) + return &cp +} + +// TypeSwitchStmt returns x deep copy. +// Copy of nil argument is nil. +func TypeSwitchStmt(x *ast.TypeSwitchStmt) *ast.TypeSwitchStmt { + if x == nil { + return nil + } + cp := *x + cp.Init = copyStmt(x.Init) + cp.Assign = copyStmt(x.Assign) + cp.Body = BlockStmt(x.Body) + return &cp +} + +// CommClause returns x deep copy. +// Copy of nil argument is nil. +func CommClause(x *ast.CommClause) *ast.CommClause { + if x == nil { + return nil + } + cp := *x + cp.Comm = copyStmt(x.Comm) + cp.Body = StmtList(x.Body) + return &cp +} + +// SelectStmt returns x deep copy. +// Copy of nil argument is nil. +func SelectStmt(x *ast.SelectStmt) *ast.SelectStmt { + if x == nil { + return nil + } + cp := *x + cp.Body = BlockStmt(x.Body) + return &cp +} + +// ForStmt returns x deep copy. +// Copy of nil argument is nil. +func ForStmt(x *ast.ForStmt) *ast.ForStmt { + if x == nil { + return nil + } + cp := *x + cp.Init = copyStmt(x.Init) + cp.Cond = copyExpr(x.Cond) + cp.Post = copyStmt(x.Post) + cp.Body = BlockStmt(x.Body) + return &cp +} + +// RangeStmt returns x deep copy. +// Copy of nil argument is nil. +func RangeStmt(x *ast.RangeStmt) *ast.RangeStmt { + if x == nil { + return nil + } + cp := *x + cp.Key = copyExpr(x.Key) + cp.Value = copyExpr(x.Value) + cp.X = copyExpr(x.X) + cp.Body = BlockStmt(x.Body) + return &cp +} + +// Comment returns x deep copy. +// Copy of nil argument is nil. +func Comment(x *ast.Comment) *ast.Comment { + if x == nil { + return nil + } + cp := *x + return &cp +} + +// CommentGroup returns x deep copy. +// Copy of nil argument is nil. +func CommentGroup(x *ast.CommentGroup) *ast.CommentGroup { + if x == nil { + return nil + } + cp := *x + if x.List != nil { + cp.List = make([]*ast.Comment, len(x.List)) + for i := range x.List { + cp.List[i] = Comment(x.List[i]) + } + } + return &cp +} + +// File returns x deep copy. +// Copy of nil argument is nil. +func File(x *ast.File) *ast.File { + if x == nil { + return nil + } + cp := *x + cp.Doc = CommentGroup(x.Doc) + cp.Name = Ident(x.Name) + cp.Decls = DeclList(x.Decls) + cp.Imports = make([]*ast.ImportSpec, len(x.Imports)) + for i := range x.Imports { + cp.Imports[i] = ImportSpec(x.Imports[i]) + } + cp.Unresolved = IdentList(x.Unresolved) + cp.Comments = make([]*ast.CommentGroup, len(x.Comments)) + for i := range x.Comments { + cp.Comments[i] = CommentGroup(x.Comments[i]) + } + return &cp +} + +// Package returns x deep copy. +// Copy of nil argument is nil. +func Package(x *ast.Package) *ast.Package { + if x == nil { + return nil + } + cp := *x + cp.Files = make(map[string]*ast.File) + for filename, f := range x.Files { + cp.Files[filename] = f + } + return &cp +} + +// BadDecl returns x deep copy. +// Copy of nil argument is nil. +func BadDecl(x *ast.BadDecl) *ast.BadDecl { + if x == nil { + return nil + } + cp := *x + return &cp +} + +// GenDecl returns x deep copy. +// Copy of nil argument is nil. +func GenDecl(x *ast.GenDecl) *ast.GenDecl { + if x == nil { + return nil + } + cp := *x + cp.Specs = SpecList(x.Specs) + cp.Doc = CommentGroup(x.Doc) + return &cp +} + +// FuncDecl returns x deep copy. +// Copy of nil argument is nil. +func FuncDecl(x *ast.FuncDecl) *ast.FuncDecl { + if x == nil { + return nil + } + cp := *x + cp.Recv = FieldList(x.Recv) + cp.Name = Ident(x.Name) + cp.Type = FuncType(x.Type) + cp.Body = BlockStmt(x.Body) + cp.Doc = CommentGroup(x.Doc) + return &cp +} + +func copyNode(x ast.Node) ast.Node { + switch x := x.(type) { + case ast.Expr: + return copyExpr(x) + case ast.Stmt: + return copyStmt(x) + case ast.Decl: + return copyDecl(x) + + case ast.Spec: + return Spec(x) + case *ast.FieldList: + return FieldList(x) + case *ast.Comment: + return Comment(x) + case *ast.CommentGroup: + return CommentGroup(x) + case *ast.File: + return File(x) + case *ast.Package: + return Package(x) + + default: + panic("unhandled node") + } +} + +func copyExpr(x ast.Expr) ast.Expr { + if x == nil { + return nil + } + + switch x := x.(type) { + case *ast.BadExpr: + return BadExpr(x) + case *ast.Ident: + return Ident(x) + case *ast.Ellipsis: + return Ellipsis(x) + case *ast.BasicLit: + return BasicLit(x) + case *ast.FuncLit: + return FuncLit(x) + case *ast.CompositeLit: + return CompositeLit(x) + case *ast.ParenExpr: + return ParenExpr(x) + case *ast.SelectorExpr: + return SelectorExpr(x) + case *ast.IndexExpr: + return IndexExpr(x) + case *ast.SliceExpr: + return SliceExpr(x) + case *ast.TypeAssertExpr: + return TypeAssertExpr(x) + case *ast.CallExpr: + return CallExpr(x) + case *ast.StarExpr: + return StarExpr(x) + case *ast.UnaryExpr: + return UnaryExpr(x) + case *ast.BinaryExpr: + return BinaryExpr(x) + case *ast.KeyValueExpr: + return KeyValueExpr(x) + case *ast.ArrayType: + return ArrayType(x) + case *ast.StructType: + return StructType(x) + case *ast.FuncType: + return FuncType(x) + case *ast.InterfaceType: + return InterfaceType(x) + case *ast.MapType: + return MapType(x) + case *ast.ChanType: + return ChanType(x) + + default: + panic("unhandled expr") + } +} + +func copyStmt(x ast.Stmt) ast.Stmt { + if x == nil { + return nil + } + + switch x := x.(type) { + case *ast.BadStmt: + return BadStmt(x) + case *ast.DeclStmt: + return DeclStmt(x) + case *ast.EmptyStmt: + return EmptyStmt(x) + case *ast.LabeledStmt: + return LabeledStmt(x) + case *ast.ExprStmt: + return ExprStmt(x) + case *ast.SendStmt: + return SendStmt(x) + case *ast.IncDecStmt: + return IncDecStmt(x) + case *ast.AssignStmt: + return AssignStmt(x) + case *ast.GoStmt: + return GoStmt(x) + case *ast.DeferStmt: + return DeferStmt(x) + case *ast.ReturnStmt: + return ReturnStmt(x) + case *ast.BranchStmt: + return BranchStmt(x) + case *ast.BlockStmt: + return BlockStmt(x) + case *ast.IfStmt: + return IfStmt(x) + case *ast.CaseClause: + return CaseClause(x) + case *ast.SwitchStmt: + return SwitchStmt(x) + case *ast.TypeSwitchStmt: + return TypeSwitchStmt(x) + case *ast.CommClause: + return CommClause(x) + case *ast.SelectStmt: + return SelectStmt(x) + case *ast.ForStmt: + return ForStmt(x) + case *ast.RangeStmt: + return RangeStmt(x) + + default: + panic("unhandled stmt") + } +} + +func copyDecl(x ast.Decl) ast.Decl { + if x == nil { + return nil + } + + switch x := x.(type) { + case *ast.BadDecl: + return BadDecl(x) + case *ast.GenDecl: + return GenDecl(x) + case *ast.FuncDecl: + return FuncDecl(x) + + default: + panic("unhandled decl") + } +} diff --git a/vendor/github.com/go-toolsmith/astcopy/go.mod b/vendor/github.com/go-toolsmith/astcopy/go.mod new file mode 100644 index 000000000..6f3b3027a --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcopy/go.mod @@ -0,0 +1,6 @@ +module github.com/go-toolsmith/astcopy + +require ( + github.com/go-toolsmith/astequal v1.0.0 + github.com/go-toolsmith/strparse v1.0.0 +) diff --git a/vendor/github.com/go-toolsmith/astcopy/go.sum b/vendor/github.com/go-toolsmith/astcopy/go.sum new file mode 100644 index 000000000..aa0857030 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astcopy/go.sum @@ -0,0 +1,4 @@ +github.com/go-toolsmith/astequal v1.0.0 h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ= +github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= +github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= diff --git a/vendor/github.com/go-toolsmith/astequal/.gitignore b/vendor/github.com/go-toolsmith/astequal/.gitignore new file mode 100644 index 000000000..f38c2b852 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astequal/.gitignore @@ -0,0 +1,5 @@ +bin +pkg +src/main +tmp + diff --git a/vendor/github.com/go-toolsmith/astequal/.travis.yml b/vendor/github.com/go-toolsmith/astequal/.travis.yml new file mode 100644 index 000000000..8994d395c --- /dev/null +++ b/vendor/github.com/go-toolsmith/astequal/.travis.yml @@ -0,0 +1,9 @@ +language: go +go: + - 1.x +install: + - # Prevent default install action "go get -t -v ./...". +script: + - go get -t -v ./... + - go tool vet . + - go test -v -race ./... \ No newline at end of file diff --git a/vendor/github.com/go-toolsmith/astequal/LICENSE b/vendor/github.com/go-toolsmith/astequal/LICENSE new file mode 100644 index 000000000..717f894f5 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astequal/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Iskander Sharipov / Quasilyte + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astequal/README.md b/vendor/github.com/go-toolsmith/astequal/README.md new file mode 100644 index 000000000..b14f80f6f --- /dev/null +++ b/vendor/github.com/go-toolsmith/astequal/README.md @@ -0,0 +1,67 @@ +[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/astequal)](https://goreportcard.com/report/github.com/go-toolsmith/astequal) +[![GoDoc](https://godoc.org/github.com/go-toolsmith/astequal?status.svg)](https://godoc.org/github.com/go-toolsmith/astequal) +[![Build Status](https://travis-ci.org/go-toolsmith/astequal.svg?branch=master)](https://travis-ci.org/go-toolsmith/astequal) + + +# astequal + +Package astequal provides AST (deep) equallity check operations. + +## Installation: + +```bash +go get github.com/go-toolsmith/astequal +``` + +## Example + +```go +package main + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "log" + "reflect" + + "github.com/go-toolsmith/astequal" +) + +func main() { + const code = ` + package foo + + func main() { + x := []int{1, 2, 3} + x := []int{1, 2, 3} + }` + + fset := token.NewFileSet() + pkg, err := parser.ParseFile(fset, "string", code, 0) + if err != nil { + log.Fatalf("parse error: %+v", err) + } + + fn := pkg.Decls[0].(*ast.FuncDecl) + x := fn.Body.List[0] + y := fn.Body.List[1] + + // Reflect DeepEqual will fail due to different Pos values. + // astequal only checks whether two nodes describe AST. + fmt.Println(reflect.DeepEqual(x, y)) // => false + fmt.Println(astequal.Node(x, y)) // => true + fmt.Println(astequal.Stmt(x, y)) // => true +} +``` + +## Performance + +`astequal` outperforms reflection-based comparison by a big margin: + +``` +BenchmarkEqualExpr/astequal.Expr-8 5000000 298 ns/op 0 B/op 0 allocs/op +BenchmarkEqualExpr/astequal.Node-8 3000000 409 ns/op 0 B/op 0 allocs/op +BenchmarkEqualExpr/reflect.DeepEqual-8 50000 38898 ns/op 10185 B/op 156 allocs/op +``` diff --git a/vendor/github.com/go-toolsmith/astequal/astequal.go b/vendor/github.com/go-toolsmith/astequal/astequal.go new file mode 100644 index 000000000..6a32d7218 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astequal/astequal.go @@ -0,0 +1,734 @@ +// Package astequal provides AST (deep) equallity check operations. +package astequal + +import ( + "go/ast" + "go/token" +) + +// Node reports whether two AST nodes are structurally (deep) equal. +// +// Nil arguments are permitted: true is returned if x and y are both nils. +// +// See also: Expr, Stmt, Decl functions. +func Node(x, y ast.Node) bool { + return astNodeEq(x, y) +} + +// Expr reports whether two AST expressions are structurally (deep) equal. +// +// Nil arguments are permitted: true is returned if x and y are both nils. +// ast.BadExpr comparison always yields false. +func Expr(x, y ast.Expr) bool { + return astExprEq(x, y) +} + +// Stmt reports whether two AST statements are structurally (deep) equal. +// +// Nil arguments are permitted: true is returned if x and y are both nils. +// ast.BadStmt comparison always yields false. +func Stmt(x, y ast.Stmt) bool { + return astStmtEq(x, y) +} + +// Decl reports whether two AST declarations are structurally (deep) equal. +// +// Nil arguments are permitted: true is returned if x and y are both nils. +// ast.BadDecl comparison always yields false. +func Decl(x, y ast.Decl) bool { + return astDeclEq(x, y) +} + +// Functions to perform deep equallity checks between arbitrary AST nodes. + +// Compare interface node types. +// +// Interfaces, as well as their values, can be nil. +// +// Even if AST does expect field X to be mandatory, +// nil checks are required as nodes can be constructed +// manually, or be partially invalid/incomplete. + +func astNodeEq(x, y ast.Node) bool { + switch x := x.(type) { + case ast.Expr: + y, ok := y.(ast.Expr) + return ok && astExprEq(x, y) + case ast.Stmt: + y, ok := y.(ast.Stmt) + return ok && astStmtEq(x, y) + case ast.Decl: + y, ok := y.(ast.Decl) + return ok && astDeclEq(x, y) + default: + return false + } +} + +func astExprEq(x, y ast.Expr) bool { + if x == nil || y == nil { + return x == y + } + + switch x := x.(type) { + case *ast.Ident: + y, ok := y.(*ast.Ident) + return ok && astIdentEq(x, y) + + case *ast.BasicLit: + y, ok := y.(*ast.BasicLit) + return ok && astBasicLitEq(x, y) + + case *ast.FuncLit: + y, ok := y.(*ast.FuncLit) + return ok && astFuncLitEq(x, y) + + case *ast.CompositeLit: + y, ok := y.(*ast.CompositeLit) + return ok && astCompositeLitEq(x, y) + + case *ast.ParenExpr: + y, ok := y.(*ast.ParenExpr) + return ok && astParenExprEq(x, y) + + case *ast.SelectorExpr: + y, ok := y.(*ast.SelectorExpr) + return ok && astSelectorExprEq(x, y) + + case *ast.IndexExpr: + y, ok := y.(*ast.IndexExpr) + return ok && astIndexExprEq(x, y) + + case *ast.SliceExpr: + y, ok := y.(*ast.SliceExpr) + return ok && astSliceExprEq(x, y) + + case *ast.TypeAssertExpr: + y, ok := y.(*ast.TypeAssertExpr) + return ok && astTypeAssertExprEq(x, y) + + case *ast.CallExpr: + y, ok := y.(*ast.CallExpr) + return ok && astCallExprEq(x, y) + + case *ast.StarExpr: + y, ok := y.(*ast.StarExpr) + return ok && astStarExprEq(x, y) + + case *ast.UnaryExpr: + y, ok := y.(*ast.UnaryExpr) + return ok && astUnaryExprEq(x, y) + + case *ast.BinaryExpr: + y, ok := y.(*ast.BinaryExpr) + return ok && astBinaryExprEq(x, y) + + case *ast.KeyValueExpr: + y, ok := y.(*ast.KeyValueExpr) + return ok && astKeyValueExprEq(x, y) + + case *ast.ArrayType: + y, ok := y.(*ast.ArrayType) + return ok && astArrayTypeEq(x, y) + + case *ast.StructType: + y, ok := y.(*ast.StructType) + return ok && astStructTypeEq(x, y) + + case *ast.FuncType: + y, ok := y.(*ast.FuncType) + return ok && astFuncTypeEq(x, y) + + case *ast.InterfaceType: + y, ok := y.(*ast.InterfaceType) + return ok && astInterfaceTypeEq(x, y) + + case *ast.MapType: + y, ok := y.(*ast.MapType) + return ok && astMapTypeEq(x, y) + + case *ast.ChanType: + y, ok := y.(*ast.ChanType) + return ok && astChanTypeEq(x, y) + + case *ast.Ellipsis: + y, ok := y.(*ast.Ellipsis) + return ok && astEllipsisEq(x, y) + + default: + return false + } +} + +func astStmtEq(x, y ast.Stmt) bool { + if x == nil || y == nil { + return x == y + } + + switch x := x.(type) { + case *ast.ExprStmt: + y, ok := y.(*ast.ExprStmt) + return ok && astExprStmtEq(x, y) + + case *ast.SendStmt: + y, ok := y.(*ast.SendStmt) + return ok && astSendStmtEq(x, y) + + case *ast.IncDecStmt: + y, ok := y.(*ast.IncDecStmt) + return ok && astIncDecStmtEq(x, y) + + case *ast.AssignStmt: + y, ok := y.(*ast.AssignStmt) + return ok && astAssignStmtEq(x, y) + + case *ast.GoStmt: + y, ok := y.(*ast.GoStmt) + return ok && astGoStmtEq(x, y) + + case *ast.DeferStmt: + y, ok := y.(*ast.DeferStmt) + return ok && astDeferStmtEq(x, y) + + case *ast.ReturnStmt: + y, ok := y.(*ast.ReturnStmt) + return ok && astReturnStmtEq(x, y) + + case *ast.BranchStmt: + y, ok := y.(*ast.BranchStmt) + return ok && astBranchStmtEq(x, y) + + case *ast.BlockStmt: + y, ok := y.(*ast.BlockStmt) + return ok && astBlockStmtEq(x, y) + + case *ast.IfStmt: + y, ok := y.(*ast.IfStmt) + return ok && astIfStmtEq(x, y) + + case *ast.CaseClause: + y, ok := y.(*ast.CaseClause) + return ok && astCaseClauseEq(x, y) + + case *ast.SwitchStmt: + y, ok := y.(*ast.SwitchStmt) + return ok && astSwitchStmtEq(x, y) + + case *ast.TypeSwitchStmt: + y, ok := y.(*ast.TypeSwitchStmt) + return ok && astTypeSwitchStmtEq(x, y) + + case *ast.CommClause: + y, ok := y.(*ast.CommClause) + return ok && astCommClauseEq(x, y) + + case *ast.SelectStmt: + y, ok := y.(*ast.SelectStmt) + return ok && astSelectStmtEq(x, y) + + case *ast.ForStmt: + y, ok := y.(*ast.ForStmt) + return ok && astForStmtEq(x, y) + + case *ast.RangeStmt: + y, ok := y.(*ast.RangeStmt) + return ok && astRangeStmtEq(x, y) + + case *ast.DeclStmt: + y, ok := y.(*ast.DeclStmt) + return ok && astDeclStmtEq(x, y) + + case *ast.LabeledStmt: + y, ok := y.(*ast.LabeledStmt) + return ok && astLabeledStmtEq(x, y) + + case *ast.EmptyStmt: + y, ok := y.(*ast.EmptyStmt) + return ok && astEmptyStmtEq(x, y) + + default: + return false + } +} + +func astDeclEq(x, y ast.Decl) bool { + if x == nil || y == nil { + return x == y + } + + switch x := x.(type) { + case *ast.GenDecl: + y, ok := y.(*ast.GenDecl) + return ok && astGenDeclEq(x, y) + + case *ast.FuncDecl: + y, ok := y.(*ast.FuncDecl) + return ok && astFuncDeclEq(x, y) + + default: + return false + } +} + +// Compare concrete nodes for equallity. +// +// Any node of pointer type permitted to be nil, +// hence nil checks are mandatory. + +func astIdentEq(x, y *ast.Ident) bool { + if x == nil || y == nil { + return x == y + } + return x.Name == y.Name +} + +func astKeyValueExprEq(x, y *ast.KeyValueExpr) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.Key, y.Key) && astExprEq(x.Value, y.Value) +} + +func astArrayTypeEq(x, y *ast.ArrayType) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.Len, y.Len) && astExprEq(x.Elt, y.Elt) +} + +func astStructTypeEq(x, y *ast.StructType) bool { + if x == nil || y == nil { + return x == y + } + return astFieldListEq(x.Fields, y.Fields) +} + +func astFuncTypeEq(x, y *ast.FuncType) bool { + if x == nil || y == nil { + return x == y + } + return astFieldListEq(x.Params, y.Params) && + astFieldListEq(x.Results, y.Results) +} + +func astBasicLitEq(x, y *ast.BasicLit) bool { + if x == nil || y == nil { + return x == y + } + return x.Kind == y.Kind && x.Value == y.Value +} + +func astBlockStmtEq(x, y *ast.BlockStmt) bool { + if x == nil || y == nil { + return x == y + } + return astStmtSliceEq(x.List, y.List) +} + +func astFieldEq(x, y *ast.Field) bool { + if x == nil || y == nil { + return x == y + } + return astIdentSliceEq(x.Names, y.Names) && + astExprEq(x.Type, y.Type) +} + +func astFuncLitEq(x, y *ast.FuncLit) bool { + if x == nil || y == nil { + return x == y + } + return astFuncTypeEq(x.Type, y.Type) && + astBlockStmtEq(x.Body, y.Body) +} + +func astCompositeLitEq(x, y *ast.CompositeLit) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.Type, y.Type) && + astExprSliceEq(x.Elts, y.Elts) +} + +func astSelectorExprEq(x, y *ast.SelectorExpr) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.X, y.X) && astIdentEq(x.Sel, y.Sel) +} + +func astIndexExprEq(x, y *ast.IndexExpr) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.X, y.X) && astExprEq(x.Index, y.Index) +} + +func astSliceExprEq(x, y *ast.SliceExpr) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.X, y.X) && + astExprEq(x.Low, y.Low) && + astExprEq(x.High, y.High) && + astExprEq(x.Max, y.Max) +} + +func astTypeAssertExprEq(x, y *ast.TypeAssertExpr) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.X, y.X) && astExprEq(x.Type, y.Type) +} + +func astInterfaceTypeEq(x, y *ast.InterfaceType) bool { + if x == nil || y == nil { + return x == y + } + return astFieldListEq(x.Methods, y.Methods) +} + +func astMapTypeEq(x, y *ast.MapType) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.Key, y.Key) && astExprEq(x.Value, y.Value) +} + +func astChanTypeEq(x, y *ast.ChanType) bool { + if x == nil || y == nil { + return x == y + } + return x.Dir == y.Dir && astExprEq(x.Value, y.Value) +} + +func astCallExprEq(x, y *ast.CallExpr) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.Fun, y.Fun) && + astExprSliceEq(x.Args, y.Args) && + (x.Ellipsis == 0) == (y.Ellipsis == 0) +} + +func astEllipsisEq(x, y *ast.Ellipsis) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.Elt, y.Elt) +} + +func astUnaryExprEq(x, y *ast.UnaryExpr) bool { + if x == nil || y == nil { + return x == y + } + return x.Op == y.Op && astExprEq(x.X, y.X) +} + +func astBinaryExprEq(x, y *ast.BinaryExpr) bool { + if x == nil || y == nil { + return x == y + } + return x.Op == y.Op && + astExprEq(x.X, y.X) && + astExprEq(x.Y, y.Y) +} + +func astParenExprEq(x, y *ast.ParenExpr) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.X, y.X) +} + +func astStarExprEq(x, y *ast.StarExpr) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.X, y.X) +} + +func astFieldListEq(x, y *ast.FieldList) bool { + if x == nil || y == nil { + return x == y + } + return astFieldSliceEq(x.List, y.List) +} + +func astEmptyStmtEq(x, y *ast.EmptyStmt) bool { + if x == nil || y == nil { + return x == y + } + return x.Implicit == y.Implicit +} + +func astLabeledStmtEq(x, y *ast.LabeledStmt) bool { + if x == nil || y == nil { + return x == y + } + return astIdentEq(x.Label, y.Label) && astStmtEq(x.Stmt, y.Stmt) +} + +func astExprStmtEq(x, y *ast.ExprStmt) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.X, y.X) +} + +func astSendStmtEq(x, y *ast.SendStmt) bool { + if x == nil || y == nil { + return x == y + } + return astExprEq(x.Chan, y.Chan) && astExprEq(x.Value, y.Value) +} + +func astDeclStmtEq(x, y *ast.DeclStmt) bool { + if x == nil || y == nil { + return x == y + } + return astDeclEq(x.Decl, y.Decl) +} + +func astIncDecStmtEq(x, y *ast.IncDecStmt) bool { + if x == nil || y == nil { + return x == y + } + return x.Tok == y.Tok && astExprEq(x.X, y.X) +} + +func astAssignStmtEq(x, y *ast.AssignStmt) bool { + if x == nil || y == nil { + return x == y + } + return x.Tok == y.Tok && + astExprSliceEq(x.Lhs, y.Lhs) && + astExprSliceEq(x.Rhs, y.Rhs) +} + +func astGoStmtEq(x, y *ast.GoStmt) bool { + if x == nil || y == nil { + return x == y + } + return astCallExprEq(x.Call, y.Call) +} + +func astDeferStmtEq(x, y *ast.DeferStmt) bool { + if x == nil || y == nil { + return x == y + } + return astCallExprEq(x.Call, y.Call) +} + +func astReturnStmtEq(x, y *ast.ReturnStmt) bool { + if x == nil || y == nil { + return x == y + } + return astExprSliceEq(x.Results, y.Results) +} + +func astBranchStmtEq(x, y *ast.BranchStmt) bool { + if x == nil || y == nil { + return x == y + } + return x.Tok == y.Tok && astIdentEq(x.Label, y.Label) +} + +func astIfStmtEq(x, y *ast.IfStmt) bool { + if x == nil || y == nil { + return x == y + } + return astStmtEq(x.Init, y.Init) && + astExprEq(x.Cond, y.Cond) && + astBlockStmtEq(x.Body, y.Body) && + astStmtEq(x.Else, y.Else) +} + +func astCaseClauseEq(x, y *ast.CaseClause) bool { + if x == nil || y == nil { + return x == y + } + return astExprSliceEq(x.List, y.List) && + astStmtSliceEq(x.Body, y.Body) +} + +func astSwitchStmtEq(x, y *ast.SwitchStmt) bool { + if x == nil || y == nil { + return x == y + } + return astStmtEq(x.Init, y.Init) && + astExprEq(x.Tag, y.Tag) && + astBlockStmtEq(x.Body, y.Body) +} + +func astTypeSwitchStmtEq(x, y *ast.TypeSwitchStmt) bool { + if x == nil || y == nil { + return x == y + } + return astStmtEq(x.Init, y.Init) && + astStmtEq(x.Assign, y.Assign) && + astBlockStmtEq(x.Body, y.Body) +} + +func astCommClauseEq(x, y *ast.CommClause) bool { + if x == nil || y == nil { + return x == y + } + return astStmtEq(x.Comm, y.Comm) && astStmtSliceEq(x.Body, y.Body) +} + +func astSelectStmtEq(x, y *ast.SelectStmt) bool { + if x == nil || y == nil { + return x == y + } + return astBlockStmtEq(x.Body, y.Body) +} + +func astForStmtEq(x, y *ast.ForStmt) bool { + if x == nil || y == nil { + return x == y + } + return astStmtEq(x.Init, y.Init) && + astExprEq(x.Cond, y.Cond) && + astStmtEq(x.Post, y.Post) && + astBlockStmtEq(x.Body, y.Body) +} + +func astRangeStmtEq(x, y *ast.RangeStmt) bool { + if x == nil || y == nil { + return x == y + } + return x.Tok == y.Tok && + astExprEq(x.Key, y.Key) && + astExprEq(x.Value, y.Value) && + astExprEq(x.X, y.X) && + astBlockStmtEq(x.Body, y.Body) +} + +func astFuncDeclEq(x, y *ast.FuncDecl) bool { + if x == nil || y == nil { + return x == y + } + return astFieldListEq(x.Recv, y.Recv) && + astIdentEq(x.Name, y.Name) && + astFuncTypeEq(x.Type, y.Type) && + astBlockStmtEq(x.Body, y.Body) +} + +func astGenDeclEq(x, y *ast.GenDecl) bool { + if x == nil || y == nil { + return x == y + } + + if x.Tok != y.Tok { + return false + } + if len(x.Specs) != len(y.Specs) { + return false + } + + switch x.Tok { + case token.IMPORT: + for i := range x.Specs { + xspec := x.Specs[i].(*ast.ImportSpec) + yspec := y.Specs[i].(*ast.ImportSpec) + if !astImportSpecEq(xspec, yspec) { + return false + } + } + case token.TYPE: + for i := range x.Specs { + xspec := x.Specs[i].(*ast.TypeSpec) + yspec := y.Specs[i].(*ast.TypeSpec) + if !astTypeSpecEq(xspec, yspec) { + return false + } + } + default: + for i := range x.Specs { + xspec := x.Specs[i].(*ast.ValueSpec) + yspec := y.Specs[i].(*ast.ValueSpec) + if !astValueSpecEq(xspec, yspec) { + return false + } + } + } + + return true +} + +func astImportSpecEq(x, y *ast.ImportSpec) bool { + if x == nil || y == nil { + return x == y + } + return astIdentEq(x.Name, y.Name) && astBasicLitEq(x.Path, y.Path) +} + +func astTypeSpecEq(x, y *ast.TypeSpec) bool { + if x == nil || y == nil { + return x == y + } + return astIdentEq(x.Name, y.Name) && astExprEq(x.Type, y.Type) +} + +func astValueSpecEq(x, y *ast.ValueSpec) bool { + if x == nil || y == nil { + return x == y + } + return astIdentSliceEq(x.Names, y.Names) && + astExprEq(x.Type, y.Type) && + astExprSliceEq(x.Values, y.Values) +} + +// Compare slices for equallity. +// +// Each slice element that has pointer type permitted to be nil, +// hence instead of using adhoc comparison of values, +// equallity functions that are defined above are used. + +func astIdentSliceEq(xs, ys []*ast.Ident) bool { + if len(xs) != len(ys) { + return false + } + for i := range xs { + if !astIdentEq(xs[i], ys[i]) { + return false + } + } + return true +} + +func astFieldSliceEq(xs, ys []*ast.Field) bool { + if len(xs) != len(ys) { + return false + } + for i := range xs { + if !astFieldEq(xs[i], ys[i]) { + return false + } + } + return true +} + +func astStmtSliceEq(xs, ys []ast.Stmt) bool { + if len(xs) != len(ys) { + return false + } + for i := range xs { + if !astStmtEq(xs[i], ys[i]) { + return false + } + } + return true +} + +func astExprSliceEq(xs, ys []ast.Expr) bool { + if len(xs) != len(ys) { + return false + } + for i := range xs { + if !astExprEq(xs[i], ys[i]) { + return false + } + } + return true +} diff --git a/vendor/github.com/go-toolsmith/astequal/go.mod b/vendor/github.com/go-toolsmith/astequal/go.mod new file mode 100644 index 000000000..86fa40772 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astequal/go.mod @@ -0,0 +1 @@ +module github.com/go-toolsmith/astequal diff --git a/vendor/github.com/go-toolsmith/astfmt/.travis.yml b/vendor/github.com/go-toolsmith/astfmt/.travis.yml new file mode 100644 index 000000000..c32ac0062 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astfmt/.travis.yml @@ -0,0 +1,9 @@ +language: go +go: + - 1.x +install: + - # Prevent default install action "go get -t -v ./...". +script: + - go get -t -v ./... + - go tool vet . + - go test -v -race ./... diff --git a/vendor/github.com/go-toolsmith/astfmt/LICENSE b/vendor/github.com/go-toolsmith/astfmt/LICENSE new file mode 100644 index 000000000..eef17180f --- /dev/null +++ b/vendor/github.com/go-toolsmith/astfmt/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 go-toolsmith + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astfmt/README.md b/vendor/github.com/go-toolsmith/astfmt/README.md new file mode 100644 index 000000000..954c92bf4 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astfmt/README.md @@ -0,0 +1,39 @@ +[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/strparse)](https://goreportcard.com/report/github.com/go-toolsmith/strparse) +[![GoDoc](https://godoc.org/github.com/go-toolsmith/strparse?status.svg)](https://godoc.org/github.com/go-toolsmith/strparse) + + +# astfmt + +Package astfmt implements ast.Node formatting with fmt-like API. + +## Installation + +```bash +go get github.com/go-toolsmith/astfmt +``` + +## Example + +```go +package main + +import ( + "go/token" + "os" + + "github.com/go-toolsmith/astfmt" + "github.com/go-toolsmith/strparse" +) + +func Example() { + x := strparse.Expr(`foo(bar(baz(1+2)))`) + // astfmt functions add %s support for ast.Node arguments. + astfmt.Println(x) // => foo(bar(baz(1 + 2))) + astfmt.Fprintf(os.Stdout, "node=%s\n", x) // => node=foo(bar(baz(1 + 2))) + + // Can use specific file set with printer. + fset := token.NewFileSet() // Suppose this fset is used when parsing + pp := astfmt.NewPrinter(fset) + pp.Println(x) // => foo(bar(baz(1 + 2))) +} +``` diff --git a/vendor/github.com/go-toolsmith/astfmt/astfmt.go b/vendor/github.com/go-toolsmith/astfmt/astfmt.go new file mode 100644 index 000000000..ca993e033 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astfmt/astfmt.go @@ -0,0 +1,111 @@ +// Package astfmt implements `ast.Node` formatting with fmt-like API. +package astfmt + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "io" +) + +// Println calls fmt.Println with additional support of %s format +// for ast.Node arguments. +// +// Uses empty file set for AST printing. +func Println(args ...interface{}) error { + return defaultPrinter.Println(args...) +} + +// Fprintf calls fmt.Fprintf with additional support of %s format +// for ast.Node arguments. +// +// Uses empty file set for AST printing. +func Fprintf(w io.Writer, format string, args ...interface{}) error { + return defaultPrinter.Fprintf(w, format, args...) +} + +// Sprintf calls fmt.Sprintf with additional support of %s format +// for ast.Node arguments. +// +// Uses empty file set for AST printing. +func Sprintf(format string, args ...interface{}) string { + return defaultPrinter.Sprintf(format, args...) +} + +// Sprint calls fmt.Sprint with additional support of %s format +// for ast.Node arguments. +// +// Uses empty file set for AST printing. +func Sprint(args ...interface{}) string { + return defaultPrinter.Sprint(args...) +} + +// NewPrinter returns printer that uses bound file set when printing AST nodes. +func NewPrinter(fset *token.FileSet) *Printer { + return &Printer{fset: fset} +} + +// Printer provides API close to fmt package for printing AST nodes. +// Unlike freestanding functions from this package, it makes it possible +// to associate appropriate file set for better output. +type Printer struct { + fset *token.FileSet +} + +// Println printer method is like Println function, but uses bound file set when printing. +func (p *Printer) Println(args ...interface{}) error { + _, err := fmt.Println(wrapArgs(p.fset, args)...) + return err +} + +// Fprintf printer method is like Fprintf function, but uses bound file set when printing. +func (p *Printer) Fprintf(w io.Writer, format string, args ...interface{}) error { + _, err := fmt.Fprintf(w, format, wrapArgs(p.fset, args)...) + return err +} + +// Sprintf printer method is like Sprintf function, but uses bound file set when printing. +func (p *Printer) Sprintf(format string, args ...interface{}) string { + return fmt.Sprintf(format, wrapArgs(p.fset, args)...) +} + +// Sprint printer method is like Sprint function, but uses bound file set when printing. +func (p *Printer) Sprint(args ...interface{}) string { + return fmt.Sprint(wrapArgs(p.fset, args)...) +} + +// defaultPrinter is used in printing functions like Println. +// Uses empty file set. +var defaultPrinter = NewPrinter(token.NewFileSet()) + +// wrapArgs returns arguments slice with every ast.Node element +// replaced with fmtNode wrapper that supports additional formatting. +func wrapArgs(fset *token.FileSet, args []interface{}) []interface{} { + for i := range args { + if x, ok := args[i].(ast.Node); ok { + args[i] = fmtNode{fset: fset, node: x} + } + } + return args +} + +type fmtNode struct { + fset *token.FileSet + node ast.Node +} + +func (n fmtNode) String() string { + var buf bytes.Buffer + if err := printer.Fprint(&buf, n.fset, n.node); err != nil { + return fmt.Sprintf("%%!s(ast.Node=%s)", err) + } + return buf.String() +} + +func (n fmtNode) GoString() string { + var buf bytes.Buffer + fmt.Fprintf(&buf, "%#v", n.node) + return buf.String() +} diff --git a/vendor/github.com/go-toolsmith/astfmt/go.mod b/vendor/github.com/go-toolsmith/astfmt/go.mod new file mode 100644 index 000000000..d23db1566 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astfmt/go.mod @@ -0,0 +1,6 @@ +module github.com/go-toolsmith/astfmt + +require ( + github.com/go-toolsmith/astequal v1.0.0 // indirect + github.com/go-toolsmith/strparse v1.0.0 +) diff --git a/vendor/github.com/go-toolsmith/astfmt/go.sum b/vendor/github.com/go-toolsmith/astfmt/go.sum new file mode 100644 index 000000000..aa0857030 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astfmt/go.sum @@ -0,0 +1,4 @@ +github.com/go-toolsmith/astequal v1.0.0 h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ= +github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= +github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= diff --git a/vendor/github.com/go-toolsmith/astp/.gitignore b/vendor/github.com/go-toolsmith/astp/.gitignore new file mode 100644 index 000000000..1f6187ecd --- /dev/null +++ b/vendor/github.com/go-toolsmith/astp/.gitignore @@ -0,0 +1,4 @@ +bin +pkg +src/main +tmp \ No newline at end of file diff --git a/vendor/github.com/go-toolsmith/astp/.travis.yml b/vendor/github.com/go-toolsmith/astp/.travis.yml new file mode 100644 index 000000000..8994d395c --- /dev/null +++ b/vendor/github.com/go-toolsmith/astp/.travis.yml @@ -0,0 +1,9 @@ +language: go +go: + - 1.x +install: + - # Prevent default install action "go get -t -v ./...". +script: + - go get -t -v ./... + - go tool vet . + - go test -v -race ./... \ No newline at end of file diff --git a/vendor/github.com/go-toolsmith/astp/LICENSE b/vendor/github.com/go-toolsmith/astp/LICENSE new file mode 100644 index 000000000..eef17180f --- /dev/null +++ b/vendor/github.com/go-toolsmith/astp/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 go-toolsmith + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/astp/README.md b/vendor/github.com/go-toolsmith/astp/README.md new file mode 100644 index 000000000..7313c6ab8 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astp/README.md @@ -0,0 +1,39 @@ +[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/astp)](https://goreportcard.com/report/github.com/go-toolsmith/astp) +[![GoDoc](https://godoc.org/github.com/go-toolsmith/astp?status.svg)](https://godoc.org/github.com/go-toolsmith/astp) +[![Build Status](https://travis-ci.org/go-toolsmith/astp.svg?branch=master)](https://travis-ci.org/go-toolsmith/astp) + + +# astp + +Package astp provides AST predicates. + +## Installation: + +```bash +go get github.com/go-toolsmith/astp +``` + +## Example + +```go +package main + +import ( + "fmt" + + "github.com/go-toolsmith/astp" + "github.com/go-toolsmith/strparse" +) + +func main() { + if astp.IsIdent(strparse.Expr(`x`)) { + fmt.Println("ident") + } + if astp.IsBlockStmt(strparse.Stmt(`{f()}`)) { + fmt.Println("block stmt") + } + if astp.IsGenDecl(strparse.Decl(`var x int = 10`)) { + fmt.Println("gen decl") + } +} +``` diff --git a/vendor/github.com/go-toolsmith/astp/decl.go b/vendor/github.com/go-toolsmith/astp/decl.go new file mode 100644 index 000000000..4654ad95c --- /dev/null +++ b/vendor/github.com/go-toolsmith/astp/decl.go @@ -0,0 +1,39 @@ +package astp + +import "go/ast" + +// IsDecl reports whether a node is a ast.Decl. +func IsDecl(node ast.Node) bool { + _, ok := node.(ast.Decl) + return ok +} + +// IsFuncDecl reports whether a given ast.Node is a function declaration (*ast.FuncDecl). +func IsFuncDecl(node ast.Node) bool { + _, ok := node.(*ast.FuncDecl) + return ok +} + +// IsGenDecl reports whether a given ast.Node is a generic declaration (*ast.GenDecl). +func IsGenDecl(node ast.Node) bool { + _, ok := node.(*ast.GenDecl) + return ok +} + +// IsImportSpec reports whether a given ast.Node is an import declaration (*ast.ImportSpec). +func IsImportSpec(node ast.Node) bool { + _, ok := node.(*ast.ImportSpec) + return ok +} + +// IsValueSpec reports whether a given ast.Node is a value declaration (*ast.ValueSpec). +func IsValueSpec(node ast.Node) bool { + _, ok := node.(*ast.ValueSpec) + return ok +} + +// IsTypeSpec reports whether a given ast.Node is a type declaration (*ast.TypeSpec). +func IsTypeSpec(node ast.Node) bool { + _, ok := node.(*ast.TypeSpec) + return ok +} diff --git a/vendor/github.com/go-toolsmith/astp/expr.go b/vendor/github.com/go-toolsmith/astp/expr.go new file mode 100644 index 000000000..adf9668ce --- /dev/null +++ b/vendor/github.com/go-toolsmith/astp/expr.go @@ -0,0 +1,141 @@ +package astp + +import "go/ast" + +// IsExpr reports whether a given ast.Node is an expression(ast.Expr). +func IsExpr(node ast.Node) bool { + _, ok := node.(ast.Expr) + return ok +} + +// IsBadExpr reports whether a given ast.Node is a bad expression (*ast.IsBadExpr). +func IsBadExpr(node ast.Node) bool { + _, ok := node.(*ast.BadExpr) + return ok +} + +// IsIdent reports whether a given ast.Node is an identifier (*ast.IsIdent). +func IsIdent(node ast.Node) bool { + _, ok := node.(*ast.Ident) + return ok +} + +// IsEllipsis reports whether a given ast.Node is an `...` (ellipsis) (*ast.IsEllipsis). +func IsEllipsis(node ast.Node) bool { + _, ok := node.(*ast.Ellipsis) + return ok +} + +// IsBasicLit reports whether a given ast.Node is a literal of basic type (*ast.IsBasicLit). +func IsBasicLit(node ast.Node) bool { + _, ok := node.(*ast.BasicLit) + return ok +} + +// IsFuncLit reports whether a given ast.Node is a function literal (*ast.IsFuncLit). +func IsFuncLit(node ast.Node) bool { + _, ok := node.(*ast.FuncLit) + return ok +} + +// IsCompositeLit reports whether a given ast.Node is a composite literal (*ast.IsCompositeLit). +func IsCompositeLit(node ast.Node) bool { + _, ok := node.(*ast.CompositeLit) + return ok +} + +// IsParenExpr reports whether a given ast.Node is a parenthesized expression (*ast.IsParenExpr). +func IsParenExpr(node ast.Node) bool { + _, ok := node.(*ast.ParenExpr) + return ok +} + +// IsSelectorExpr reports whether a given ast.Node is a selector expression (*ast.IsSelectorExpr). +func IsSelectorExpr(node ast.Node) bool { + _, ok := node.(*ast.SelectorExpr) + return ok +} + +// IsIndexExpr reports whether a given ast.Node is an index expression (*ast.IsIndexExpr). +func IsIndexExpr(node ast.Node) bool { + _, ok := node.(*ast.IndexExpr) + return ok +} + +// IsSliceExpr reports whether a given ast.Node is a slice expression (*ast.IsSliceExpr). +func IsSliceExpr(node ast.Node) bool { + _, ok := node.(*ast.SliceExpr) + return ok +} + +// IsTypeAssertExpr reports whether a given ast.Node is a type assert expression (*ast.IsTypeAssertExpr). +func IsTypeAssertExpr(node ast.Node) bool { + _, ok := node.(*ast.TypeAssertExpr) + return ok +} + +// IsCallExpr reports whether a given ast.Node is an expression followed by an argument list (*ast.IsCallExpr). +func IsCallExpr(node ast.Node) bool { + _, ok := node.(*ast.CallExpr) + return ok +} + +// IsStarExpr reports whether a given ast.Node is a star expression(unary "*" or apointer) (*ast.IsStarExpr) +func IsStarExpr(node ast.Node) bool { + _, ok := node.(*ast.StarExpr) + return ok +} + +// IsUnaryExpr reports whether a given ast.Node is a unary expression (*ast.IsUnaryExpr). +func IsUnaryExpr(node ast.Node) bool { + _, ok := node.(*ast.UnaryExpr) + return ok +} + +// IsBinaryExpr reports whether a given ast.Node is a binary expression (*ast.IsBinaryExpr). +func IsBinaryExpr(node ast.Node) bool { + _, ok := node.(*ast.BinaryExpr) + return ok +} + +// IsKeyValueExpr reports whether a given ast.Node is a (key:value) pair (*ast.IsKeyValueExpr). +func IsKeyValueExpr(node ast.Node) bool { + _, ok := node.(*ast.KeyValueExpr) + return ok +} + +// IsArrayType reports whether a given ast.Node is an array or slice type (*ast.IsArrayType). +func IsArrayType(node ast.Node) bool { + _, ok := node.(*ast.ArrayType) + return ok +} + +// IsStructType reports whether a given ast.Node is a struct type (*ast.IsStructType). +func IsStructType(node ast.Node) bool { + _, ok := node.(*ast.StructType) + return ok +} + +// IsFuncType reports whether a given ast.Node is a function type (*ast.IsFuncType). +func IsFuncType(node ast.Node) bool { + _, ok := node.(*ast.FuncType) + return ok +} + +// IsInterfaceType reports whether a given ast.Node is an interface type (*ast.IsInterfaceType). +func IsInterfaceType(node ast.Node) bool { + _, ok := node.(*ast.InterfaceType) + return ok +} + +// IsMapType reports whether a given ast.Node is a map type (*ast.IsMapType). +func IsMapType(node ast.Node) bool { + _, ok := node.(*ast.MapType) + return ok +} + +// IsChanType reports whether a given ast.Node is a channel type (*ast.IsChanType). +func IsChanType(node ast.Node) bool { + _, ok := node.(*ast.ChanType) + return ok +} diff --git a/vendor/github.com/go-toolsmith/astp/go.mod b/vendor/github.com/go-toolsmith/astp/go.mod new file mode 100644 index 000000000..023a09392 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astp/go.mod @@ -0,0 +1,6 @@ +module github.com/go-toolsmith/astp + +require ( + github.com/go-toolsmith/astequal v1.0.0 // indirect + github.com/go-toolsmith/strparse v1.0.0 +) diff --git a/vendor/github.com/go-toolsmith/astp/go.sum b/vendor/github.com/go-toolsmith/astp/go.sum new file mode 100644 index 000000000..aa0857030 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astp/go.sum @@ -0,0 +1,4 @@ +github.com/go-toolsmith/astequal v1.0.0 h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ= +github.com/go-toolsmith/astequal v1.0.0/go.mod h1:H+xSiq0+LtiDC11+h1G32h7Of5O3CYFJ99GVbS5lDKY= +github.com/go-toolsmith/strparse v1.0.0 h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4= +github.com/go-toolsmith/strparse v1.0.0/go.mod h1:YI2nUKP9YGZnL/L1/DLFBfixrcjslWct4wyljWhSRy8= diff --git a/vendor/github.com/go-toolsmith/astp/stmt.go b/vendor/github.com/go-toolsmith/astp/stmt.go new file mode 100644 index 000000000..19645d212 --- /dev/null +++ b/vendor/github.com/go-toolsmith/astp/stmt.go @@ -0,0 +1,135 @@ +package astp + +import "go/ast" + +// IsStmt reports whether a given ast.Node is a statement(ast.Stmt). +func IsStmt(node ast.Node) bool { + _, ok := node.(ast.Stmt) + return ok +} + +// IsBadStmt reports whether a given ast.Node is a bad statement(*ast.BadStmt) +func IsBadStmt(node ast.Node) bool { + _, ok := node.(*ast.BadStmt) + return ok +} + +// IsDeclStmt reports whether a given ast.Node is a declaration statement(*ast.DeclStmt) +func IsDeclStmt(node ast.Node) bool { + _, ok := node.(*ast.DeclStmt) + return ok +} + +// IsEmptyStmt reports whether a given ast.Node is an empty statement(*ast.EmptyStmt) +func IsEmptyStmt(node ast.Node) bool { + _, ok := node.(*ast.EmptyStmt) + return ok +} + +// IsLabeledStmt reports whether a given ast.Node is a label statement(*ast.LabeledStmt) +func IsLabeledStmt(node ast.Node) bool { + _, ok := node.(*ast.LabeledStmt) + return ok +} + +// IsExprStmt reports whether a given ast.Node is an expression statement(*ast.ExprStmt) +func IsExprStmt(node ast.Node) bool { + _, ok := node.(*ast.ExprStmt) + return ok +} + +// IsSendStmt reports whether a given ast.Node is a send to chan statement(*ast.SendStmt) +func IsSendStmt(node ast.Node) bool { + _, ok := node.(*ast.SendStmt) + return ok +} + +// IsIncDecStmt reports whether a given ast.Node is a increment/decrement statement(*ast.IncDecStmt) +func IsIncDecStmt(node ast.Node) bool { + _, ok := node.(*ast.IncDecStmt) + return ok +} + +// IsAssignStmt reports whether a given ast.Node is an assignment statement(*ast.AssignStmt) +func IsAssignStmt(node ast.Node) bool { + _, ok := node.(*ast.AssignStmt) + return ok +} + +// IsGoStmt reports whether a given ast.Node is a go statement(*ast.GoStmt) +func IsGoStmt(node ast.Node) bool { + _, ok := node.(*ast.GoStmt) + return ok +} + +// IsDeferStmt reports whether a given ast.Node is a defer statement(*ast.DeferStmt) +func IsDeferStmt(node ast.Node) bool { + _, ok := node.(*ast.DeferStmt) + return ok +} + +// IsReturnStmt reports whether a given ast.Node is a return statement(*ast.ReturnStmt) +func IsReturnStmt(node ast.Node) bool { + _, ok := node.(*ast.ReturnStmt) + return ok +} + +// IsBranchStmt reports whether a given ast.Node is a branch(goto/continue/break/fallthrough)statement(*ast.BranchStmt) +func IsBranchStmt(node ast.Node) bool { + _, ok := node.(*ast.BranchStmt) + return ok +} + +// IsBlockStmt reports whether a given ast.Node is a block statement(*ast.BlockStmt) +func IsBlockStmt(node ast.Node) bool { + _, ok := node.(*ast.BlockStmt) + return ok +} + +// IsIfStmt reports whether a given ast.Node is an if statement(*ast.IfStmt) +func IsIfStmt(node ast.Node) bool { + _, ok := node.(*ast.IfStmt) + return ok +} + +// IsCaseClause reports whether a given ast.Node is a case statement(*ast.CaseClause) +func IsCaseClause(node ast.Node) bool { + _, ok := node.(*ast.CaseClause) + return ok +} + +// IsSwitchStmt reports whether a given ast.Node is a switch statement(*ast.SwitchStmt) +func IsSwitchStmt(node ast.Node) bool { + _, ok := node.(*ast.SwitchStmt) + return ok +} + +// IsTypeSwitchStmt reports whether a given ast.Node is a type switch statement(*ast.TypeSwitchStmt) +func IsTypeSwitchStmt(node ast.Node) bool { + _, ok := node.(*ast.TypeSwitchStmt) + return ok +} + +// IsCommClause reports whether a given ast.Node is a select statement(*ast.CommClause) +func IsCommClause(node ast.Node) bool { + _, ok := node.(*ast.CommClause) + return ok +} + +// IsSelectStmt reports whether a given ast.Node is a selection statement(*ast.SelectStmt) +func IsSelectStmt(node ast.Node) bool { + _, ok := node.(*ast.SelectStmt) + return ok +} + +// IsForStmt reports whether a given ast.Node is a for statement(*ast.ForStmt) +func IsForStmt(node ast.Node) bool { + _, ok := node.(*ast.ForStmt) + return ok +} + +// IsRangeStmt reports whether a given ast.Node is a range statement(*ast.RangeStmt) +func IsRangeStmt(node ast.Node) bool { + _, ok := node.(*ast.RangeStmt) + return ok +} diff --git a/vendor/github.com/go-toolsmith/strparse/.travis.yml b/vendor/github.com/go-toolsmith/strparse/.travis.yml new file mode 100644 index 000000000..8994d395c --- /dev/null +++ b/vendor/github.com/go-toolsmith/strparse/.travis.yml @@ -0,0 +1,9 @@ +language: go +go: + - 1.x +install: + - # Prevent default install action "go get -t -v ./...". +script: + - go get -t -v ./... + - go tool vet . + - go test -v -race ./... \ No newline at end of file diff --git a/vendor/github.com/go-toolsmith/strparse/LICENSE b/vendor/github.com/go-toolsmith/strparse/LICENSE new file mode 100644 index 000000000..eef17180f --- /dev/null +++ b/vendor/github.com/go-toolsmith/strparse/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 go-toolsmith + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/strparse/README.md b/vendor/github.com/go-toolsmith/strparse/README.md new file mode 100644 index 000000000..ae80a5398 --- /dev/null +++ b/vendor/github.com/go-toolsmith/strparse/README.md @@ -0,0 +1,34 @@ +[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/strparse)](https://goreportcard.com/report/github.com/go-toolsmith/strparse) +[![GoDoc](https://godoc.org/github.com/go-toolsmith/strparse?status.svg)](https://godoc.org/github.com/go-toolsmith/strparse) +[![Build Status](https://travis-ci.org/go-toolsmith/strparse.svg?branch=master)](https://travis-ci.org/go-toolsmith/strparse) + + +# strparse + +Package strparse provides convenience wrappers around `go/parser` for simple +expression, statement and declaretion parsing from string. + +## Installation + +```bash +go get github.com/go-toolsmith/strparse +``` + +## Example + +```go +package main + +import ( + "go-toolsmith/astequal" + "go-toolsmith/strparse" +) + +func main() { + // Comparing AST strings for equallity (note different spacing): + x := strparse.Expr(`1 + f(v[0].X)`) + y := strparse.Expr(` 1+f( v[0].X ) `) + fmt.Println(astequal.Expr(x, y)) // => true +} + +``` diff --git a/vendor/github.com/go-toolsmith/strparse/go.mod b/vendor/github.com/go-toolsmith/strparse/go.mod new file mode 100644 index 000000000..ed9d88136 --- /dev/null +++ b/vendor/github.com/go-toolsmith/strparse/go.mod @@ -0,0 +1 @@ +module github.com/go-toolsmith/strparse diff --git a/vendor/github.com/go-toolsmith/strparse/strparse.go b/vendor/github.com/go-toolsmith/strparse/strparse.go new file mode 100644 index 000000000..894c7ebac --- /dev/null +++ b/vendor/github.com/go-toolsmith/strparse/strparse.go @@ -0,0 +1,59 @@ +// Package strparse provides convenience wrappers around `go/parser` for simple +// expression, statement and declaration parsing from string. +// +// Can be used to construct AST nodes using source syntax. +package strparse + +import ( + "go/ast" + "go/parser" + "go/token" +) + +var ( + // BadExpr is returned as a parse result for malformed expressions. + // Should be treated as constant or readonly variable. + BadExpr = &ast.BadExpr{} + + // BadStmt is returned as a parse result for malformed statmenents. + // Should be treated as constant or readonly variable. + BadStmt = &ast.BadStmt{} + + // BadDecl is returned as a parse result for malformed declarations. + // Should be treated as constant or readonly variable. + BadDecl = &ast.BadDecl{} +) + +// Expr parses single expression node from s. +// In case of parse error, BadExpr is returned. +func Expr(s string) ast.Expr { + node, err := parser.ParseExpr(s) + if err != nil { + return BadExpr + } + return node +} + +// Stmt parses single statement node from s. +// In case of parse error, BadStmt is returned. +func Stmt(s string) ast.Stmt { + node, err := parser.ParseFile(token.NewFileSet(), "", "package main;func main() {"+s+"}", 0) + if err != nil { + return BadStmt + } + fn := node.Decls[0].(*ast.FuncDecl) + if len(fn.Body.List) != 1 { + return BadStmt + } + return fn.Body.List[0] +} + +// Decl parses single declaration node from s. +// In case of parse error, BadDecl is returned. +func Decl(s string) ast.Decl { + node, err := parser.ParseFile(token.NewFileSet(), "", "package main;"+s, 0) + if err != nil || len(node.Decls) != 1 { + return BadDecl + } + return node.Decls[0] +} diff --git a/vendor/github.com/go-toolsmith/typep/.travis.yml b/vendor/github.com/go-toolsmith/typep/.travis.yml new file mode 100644 index 000000000..d3ff3cca8 --- /dev/null +++ b/vendor/github.com/go-toolsmith/typep/.travis.yml @@ -0,0 +1,9 @@ +language: go +go: + - 1.x +install: + - # Prevent default install action "go get -t -v ./...". +script: + - go get -t -v ./... + - go vet ./... + - go test -v -race ./... diff --git a/vendor/github.com/go-toolsmith/typep/LICENSE b/vendor/github.com/go-toolsmith/typep/LICENSE new file mode 100644 index 000000000..eef17180f --- /dev/null +++ b/vendor/github.com/go-toolsmith/typep/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 go-toolsmith + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-toolsmith/typep/README.md b/vendor/github.com/go-toolsmith/typep/README.md new file mode 100644 index 000000000..f7979148f --- /dev/null +++ b/vendor/github.com/go-toolsmith/typep/README.md @@ -0,0 +1,37 @@ +[![Go Report Card](https://goreportcard.com/badge/github.com/go-toolsmith/typep)](https://goreportcard.com/report/github.com/go-toolsmith/typep) +[![GoDoc](https://godoc.org/github.com/go-toolsmith/typep?status.svg)](https://godoc.org/github.com/go-toolsmith/typep) +[![Build Status](https://travis-ci.org/go-toolsmith/typep.svg?branch=master)](https://travis-ci.org/go-toolsmith/typep) + +# typep + +Package typep provides type predicates. + +## Installation: + +```bash +go get -v github.com/go-toolsmith/typep +``` + +## Example + +```go +package main + +import ( + "fmt" + + "github.com/go-toolsmith/typep" + "github.com/go-toolsmith/strparse" +) + +func main() { + floatTyp := types.Typ[types.Float32] + intTyp := types.Typ[types.Int] + ptr := types.NewPointer(intTyp) + arr := types.NewArray(intTyp, 64) + fmt.Println(typep.HasFloatProp(floatTyp)) // => true + fmt.Println(typep.HasFloatProp(intTyp)) // => false + fmt.Println(typep.IsPointer(ptr)) // => true + fmt.Println(typep.IsArray(arr)) // => true +} +``` diff --git a/vendor/github.com/go-toolsmith/typep/doc.go b/vendor/github.com/go-toolsmith/typep/doc.go new file mode 100644 index 000000000..990bc402c --- /dev/null +++ b/vendor/github.com/go-toolsmith/typep/doc.go @@ -0,0 +1,2 @@ +// Package typep provides type predicates. +package typep diff --git a/vendor/github.com/go-toolsmith/typep/go.mod b/vendor/github.com/go-toolsmith/typep/go.mod new file mode 100644 index 000000000..197a57d30 --- /dev/null +++ b/vendor/github.com/go-toolsmith/typep/go.mod @@ -0,0 +1 @@ +module github.com/go-toolsmith/typep diff --git a/vendor/github.com/go-toolsmith/typep/predicates.go b/vendor/github.com/go-toolsmith/typep/predicates.go new file mode 100644 index 000000000..b07325a72 --- /dev/null +++ b/vendor/github.com/go-toolsmith/typep/predicates.go @@ -0,0 +1,36 @@ +package typep + +import ( + "go/ast" + "go/types" +) + +// IsTypeExpr reports whether x represents a type expression. +// +// Type expression does not evaluate to any run time value, +// but rather describes a type that is used inside Go expression. +// +// For example, (*T)(v) is a CallExpr that "calls" (*T). +// (*T) is a type expression that tells Go compiler type v should be converted to. +func IsTypeExpr(info *types.Info, x ast.Expr) bool { + switch x := x.(type) { + case *ast.StarExpr: + return IsTypeExpr(info, x.X) + case *ast.ParenExpr: + return IsTypeExpr(info, x.X) + case *ast.SelectorExpr: + return IsTypeExpr(info, x.Sel) + + case *ast.Ident: + // Identifier may be a type expression if object + // it reffers to is a type name. + _, ok := info.ObjectOf(x).(*types.TypeName) + return ok + + case *ast.FuncType, *ast.StructType, *ast.InterfaceType, *ast.ArrayType, *ast.MapType, *ast.ChanType: + return true + + default: + return false + } +} diff --git a/vendor/github.com/go-toolsmith/typep/safeExpr.go b/vendor/github.com/go-toolsmith/typep/safeExpr.go new file mode 100644 index 000000000..d5835d97b --- /dev/null +++ b/vendor/github.com/go-toolsmith/typep/safeExpr.go @@ -0,0 +1,73 @@ +package typep + +import ( + "go/ast" + "go/token" + "go/types" +) + +// SideEffectFree reports whether expr is softly safe expression and contains +// no significant side-effects. As opposed to strictly safe expressions, +// soft safe expressions permit some forms of side-effects, like +// panic possibility during indexing or nil pointer dereference. +// +// Uses types info to determine type conversion expressions that +// are the only permitted kinds of call expressions. +// Note that is does not check whether called function really +// has any side effects. The analysis is very conservative. +func SideEffectFree(info *types.Info, expr ast.Expr) bool { + // This list switch is not comprehensive and uses + // whitelist to be on the conservative side. + // Can be extended as needed. + + if expr == nil { + return true + } + + switch expr := expr.(type) { + case *ast.StarExpr: + return SideEffectFree(info, expr.X) + case *ast.BinaryExpr: + return SideEffectFree(info, expr.X) && + SideEffectFree(info, expr.Y) + case *ast.UnaryExpr: + return expr.Op != token.ARROW && + SideEffectFree(info, expr.X) + case *ast.BasicLit, *ast.Ident: + return true + case *ast.SliceExpr: + return SideEffectFree(info, expr.X) && + SideEffectFree(info, expr.Low) && + SideEffectFree(info, expr.High) && + SideEffectFree(info, expr.Max) + case *ast.IndexExpr: + return SideEffectFree(info, expr.X) && + SideEffectFree(info, expr.Index) + case *ast.SelectorExpr: + return SideEffectFree(info, expr.X) + case *ast.ParenExpr: + return SideEffectFree(info, expr.X) + case *ast.TypeAssertExpr: + return SideEffectFree(info, expr.X) + case *ast.CompositeLit: + return SideEffectFreeList(info, expr.Elts) + case *ast.CallExpr: + return IsTypeExpr(info, expr.Fun) && + SideEffectFreeList(info, expr.Args) + + default: + return false + } +} + +// SideEffectFreeList reports whether every expr in list is safe. +// +// See SideEffectFree. +func SideEffectFreeList(info *types.Info, list []ast.Expr) bool { + for _, expr := range list { + if !SideEffectFree(info, expr) { + return false + } + } + return true +} diff --git a/vendor/github.com/go-toolsmith/typep/simplePredicates.go b/vendor/github.com/go-toolsmith/typep/simplePredicates.go new file mode 100644 index 000000000..3bc9c29c8 --- /dev/null +++ b/vendor/github.com/go-toolsmith/typep/simplePredicates.go @@ -0,0 +1,359 @@ +// Code generated by simplePredicates_generate.go; DO NOT EDIT + +package typep + +import ( + "go/types" +) + +// Simple 1-to-1 type predicates via type assertion. + +// IsBasic reports whether a given type has *types.Basic type. +func IsBasic(typ types.Type) bool { + _, ok := typ.(*types.Basic) + return ok +} + +// IsArray reports whether a given type has *types.Array type. +func IsArray(typ types.Type) bool { + _, ok := typ.(*types.Array) + return ok +} + +// IsSlice reports whether a given type has *types.Slice type. +func IsSlice(typ types.Type) bool { + _, ok := typ.(*types.Slice) + return ok +} + +// IsStruct reports whether a given type has *types.Struct type. +func IsStruct(typ types.Type) bool { + _, ok := typ.(*types.Struct) + return ok +} + +// IsPointer reports whether a given type has *types.Pointer type. +func IsPointer(typ types.Type) bool { + _, ok := typ.(*types.Pointer) + return ok +} + +// IsTuple reports whether a given type has *types.Tuple type. +func IsTuple(typ types.Type) bool { + _, ok := typ.(*types.Tuple) + return ok +} + +// IsSignature reports whether a given type has *types.Signature type. +func IsSignature(typ types.Type) bool { + _, ok := typ.(*types.Signature) + return ok +} + +// IsInterface reports whether a given type has *types.Interface type. +func IsInterface(typ types.Type) bool { + _, ok := typ.(*types.Interface) + return ok +} + +// IsMap reports whether a given type has *types.Map type. +func IsMap(typ types.Type) bool { + _, ok := typ.(*types.Map) + return ok +} + +// IsChan reports whether a given type has *types.Chan type. +func IsChan(typ types.Type) bool { + _, ok := typ.(*types.Chan) + return ok +} + +// IsNamed reports whether a given type has *types.Named type. +func IsNamed(typ types.Type) bool { + _, ok := typ.(*types.Named) + return ok +} + +// *types.Basic predicates for the info field. + +// HasBooleanProp reports whether typ is a *types.Basic has IsBoolean property. +func HasBooleanProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsBoolean != 0 + } + return false +} + +// HasIntegerProp reports whether typ is a *types.Basic has IsInteger property. +func HasIntegerProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsInteger != 0 + } + return false +} + +// HasUnsignedProp reports whether typ is a *types.Basic has IsUnsigned property. +func HasUnsignedProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsUnsigned != 0 + } + return false +} + +// HasFloatProp reports whether typ is a *types.Basic has IsFloat property. +func HasFloatProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsFloat != 0 + } + return false +} + +// HasComplexProp reports whether typ is a *types.Basic has IsComplex property. +func HasComplexProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsComplex != 0 + } + return false +} + +// HasStringProp reports whether typ is a *types.Basic has IsString property. +func HasStringProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsString != 0 + } + return false +} + +// HasUntypedProp reports whether typ is a *types.Basic has IsUntyped property. +func HasUntypedProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsUntyped != 0 + } + return false +} + +// HasOrderedProp reports whether typ is a *types.Basic has IsOrdered property. +func HasOrderedProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsOrdered != 0 + } + return false +} + +// HasNumericProp reports whether typ is a *types.Basic has IsNumeric property. +func HasNumericProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsNumeric != 0 + } + return false +} + +// HasConstTypeProp reports whether typ is a *types.Basic has IsConstType property. +func HasConstTypeProp(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Info()&types.IsConstType != 0 + } + return false +} + +// *types.Basic predicates for the kind field. + +// HasBoolKind reports whether typ is a *types.Basic with its kind set to types.Bool. +func HasBoolKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Bool + } + return false +} + +// HasIntKind reports whether typ is a *types.Basic with its kind set to types.Int. +func HasIntKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Int + } + return false +} + +// HasInt8Kind reports whether typ is a *types.Basic with its kind set to types.Int8. +func HasInt8Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Int8 + } + return false +} + +// HasInt16Kind reports whether typ is a *types.Basic with its kind set to types.Int16. +func HasInt16Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Int16 + } + return false +} + +// HasInt32Kind reports whether typ is a *types.Basic with its kind set to types.Int32. +func HasInt32Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Int32 + } + return false +} + +// HasInt64Kind reports whether typ is a *types.Basic with its kind set to types.Int64. +func HasInt64Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Int64 + } + return false +} + +// HasUintKind reports whether typ is a *types.Basic with its kind set to types.Uint. +func HasUintKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Uint + } + return false +} + +// HasUint8Kind reports whether typ is a *types.Basic with its kind set to types.Uint8. +func HasUint8Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Uint8 + } + return false +} + +// HasUint16Kind reports whether typ is a *types.Basic with its kind set to types.Uint16. +func HasUint16Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Uint16 + } + return false +} + +// HasUint32Kind reports whether typ is a *types.Basic with its kind set to types.Uint32. +func HasUint32Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Uint32 + } + return false +} + +// HasUint64Kind reports whether typ is a *types.Basic with its kind set to types.Uint64. +func HasUint64Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Uint64 + } + return false +} + +// HasUintptrKind reports whether typ is a *types.Basic with its kind set to types.Uintptr. +func HasUintptrKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Uintptr + } + return false +} + +// HasFloat32Kind reports whether typ is a *types.Basic with its kind set to types.Float32. +func HasFloat32Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Float32 + } + return false +} + +// HasFloat64Kind reports whether typ is a *types.Basic with its kind set to types.Float64. +func HasFloat64Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Float64 + } + return false +} + +// HasComplex64Kind reports whether typ is a *types.Basic with its kind set to types.Complex64. +func HasComplex64Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Complex64 + } + return false +} + +// HasComplex128Kind reports whether typ is a *types.Basic with its kind set to types.Complex128. +func HasComplex128Kind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.Complex128 + } + return false +} + +// HasStringKind reports whether typ is a *types.Basic with its kind set to types.String. +func HasStringKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.String + } + return false +} + +// HasUnsafePointerKind reports whether typ is a *types.Basic with its kind set to types.UnsafePointer. +func HasUnsafePointerKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.UnsafePointer + } + return false +} + +// HasUntypedBoolKind reports whether typ is a *types.Basic with its kind set to types.UntypedBool. +func HasUntypedBoolKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.UntypedBool + } + return false +} + +// HasUntypedIntKind reports whether typ is a *types.Basic with its kind set to types.UntypedInt. +func HasUntypedIntKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.UntypedInt + } + return false +} + +// HasUntypedRuneKind reports whether typ is a *types.Basic with its kind set to types.UntypedRune. +func HasUntypedRuneKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.UntypedRune + } + return false +} + +// HasUntypedFloatKind reports whether typ is a *types.Basic with its kind set to types.UntypedFloat. +func HasUntypedFloatKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.UntypedFloat + } + return false +} + +// HasUntypedComplexKind reports whether typ is a *types.Basic with its kind set to types.UntypedComplex. +func HasUntypedComplexKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.UntypedComplex + } + return false +} + +// HasUntypedStringKind reports whether typ is a *types.Basic with its kind set to types.UntypedString. +func HasUntypedStringKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.UntypedString + } + return false +} + +// HasUntypedNilKind reports whether typ is a *types.Basic with its kind set to types.UntypedNil. +func HasUntypedNilKind(typ types.Type) bool { + if typ, ok := typ.(*types.Basic); ok { + return typ.Kind() == types.UntypedNil + } + return false +} diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/LICENSE b/vendor/github.com/go-xmlfmt/xmlfmt/LICENSE new file mode 100644 index 000000000..890776ab7 --- /dev/null +++ b/vendor/github.com/go-xmlfmt/xmlfmt/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 go-xmlfmt + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/README.md b/vendor/github.com/go-xmlfmt/xmlfmt/README.md new file mode 100644 index 000000000..4eb6d69a0 --- /dev/null +++ b/vendor/github.com/go-xmlfmt/xmlfmt/README.md @@ -0,0 +1,178 @@ +# Go XML Formatter + +[![MIT License](http://img.shields.io/badge/License-MIT-blue.svg)](LICENSE) +[![Go Doc](https://img.shields.io/badge/godoc-reference-4b68a3.svg)](https://godoc.org/github.com/go-xmlfmt/xmlfmt) +[![Go Report Card](https://goreportcard.com/badge/github.com/go-xmlfmt/xmlfmt)](https://goreportcard.com/report/github.com/go-xmlfmt/xmlfmt) +[![Codeship Status](https://codeship.com/projects/c49f02b0-a384-0134-fb20-2e0351080565/status?branch=master)](https://codeship.com/projects/190297) + +## Synopsis + +The Go XML Formatter, xmlfmt, will format the XML string in a readable way. + +```go +package main + +import "github.com/go-xmlfmt/xmlfmt" + +func main() { + xml1 := `aSome org-or-otherWouldnt you like to knowPatCalifia` + x := xmlfmt.FormatXML(xml1, "\t", " ") + print(x) +} + +``` + +Output: + +```xml + + + a + + + + + + Some org-or-other + + Wouldnt you like to know + + + + Pat + + Califia + + + + + +``` + +There is no XML decoding and encoding involved, only pure regular expression matching and replacing. So it is much faster than going through decoding and encoding procedures. Moreover, the exact XML source string is preserved, instead of being changed by the encoder. This is why this package exists in the first place. + +## Command + +To use it on command line, check out [xmlfmt](https://github.com/AntonioSun/xmlfmt): + + +``` +$ xmlfmt +XML Formatter +built on 2019-12-08 + +The xmlfmt will format the XML string without rewriting the document + +Options: + + -h, --help display help information + -f, --file *The xml file to read from (or stdin) + -p, --prefix each element begins on a new line and this prefix + -i, --indent[= ] indent string for nested elements +``` + + +## Justification + +### The format + +The Go XML Formatter is not called XML Beautifier because the result is not *exactly* as what people would expect -- some, but not all, closing tags stays on the same line, just as shown above. Having been looking at the result and thinking over it, I now think it is actually a better way to present it, as those closing tags on the same line are better stay that way in my opinion. I.e., + +When it comes to very big XML strings, which is what I’m dealing every day, saving spaces by not allowing those closing tags taking extra lines is plus instead of negative to me. + +### The alternative + +To format it “properly”, i.e., as what people would normally see, is very hard using pure regular expression. In fact, according to Sam Whited from the go-nuts mlist, + +> Regular expression is, well, regular. This means that they can parse regular grammars, but can't parse context free grammars (like XML). It is actually impossible to use a regex to do this task; it will always be fragile, unfortunately. + +So if the output format is so important to you, then unfortunately you have to go through decoding and encoding procedures. But there are some drawbacks as well, as put by James McGill, in http://stackoverflow.com/questions/21117161, besides such method being slow: + +> I like this solution, but am still in search of a Golang XML formatter/prettyprinter that doesn't rewrite the document (other than formatting whitespace). Marshalling or using the Encoder will change namespace declarations. +> +> For example an element like "< ns1:Element />" will be translated to something like '< Element xmlns="http://bla...bla/ns1" >< /Element >' which seems harmless enough except when the intent is to not alter the xml other than formatting. -- James McGill Nov 12 '15 + +Using Sam's code as an example, + +https://play.golang.org/p/JUqQY3WpW5 + +The above code formats the following XML + +```xml + + + + + + 123 + John Brown + + + + +``` + +into this: + +```xml + +
+ + + + 123 + John Brown + + + +
+``` + +I know they are syntactically the same, however the problem is that they *look* totally different. + +That's why there is this package, an XML Beautifier that doesn't rewrite the document. + +## Credit + +The credit goes to **diotalevi** from his post at http://www.perlmonks.org/?node_id=261292. + +However, it does not work for all cases. For example, + +```sh +$ echo '
123John Brown
' | perl -pe 's/(?<=>)\s+(?=<)//g; s(<(/?)([^/>]+)(/?)>\s*(?=(".($1&&($4 eq"
+123 +John Brown + + + + +``` + +I simplified the algorithm, and now it should work for all cases: + +```sh +echo '
123John Brown
' | perl -pe 's/(?<=>)\s+(?=<)//g; s(<(/?)([^>]+)(/?)>)($indent+=$3?0:$1?-1:1;"<$1$2$3>"."\n".(" "x$indent))ge' +``` +```xml + +
+
+ + + + + 123 + + John Brown + + + +
+``` + +This package is a direct translate from above Perl code into Go, +then further enhanced by @ruandao. diff --git a/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go b/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go new file mode 100644 index 000000000..b744f5b35 --- /dev/null +++ b/vendor/github.com/go-xmlfmt/xmlfmt/xmlfmt.go @@ -0,0 +1,56 @@ +//////////////////////////////////////////////////////////////////////////// +// Porgram: xmlfmt.go +// Purpose: Go XML Beautify from XML string using pure string manipulation +// Authors: Antonio Sun (c) 2016-2019, All rights reserved +//////////////////////////////////////////////////////////////////////////// + +package xmlfmt + +import ( + "regexp" + "strings" +) + +var ( + reg = regexp.MustCompile(`<([/!]?)([^>]+?)(/?)>`) + // NL is the newline string used in XML output, define for DOS-convenient. + NL = "\r\n" +) + +// FormatXML will (purly) reformat the XML string in a readable way, without any rewriting/altering the structure +func FormatXML(xmls, prefix, indent string) string { + src := regexp.MustCompile(`(?s)>\s+<`).ReplaceAllString(xmls, "><") + + rf := replaceTag(prefix, indent) + return (prefix + reg.ReplaceAllStringFunc(src, rf)) +} + +// replaceTag returns a closure function to do 's/(?<=>)\s+(?=<)//g; s(<(/?)([^>]+?)(/?)>)($indent+=$3?0:$1?-1:1;"<$1$2$3>"."\n".(" "x$indent))ge' as in Perl +// and deal with comments as well +func replaceTag(prefix, indent string) func(string) string { + indentLevel := 0 + return func(m string) string { + // head elem + if strings.HasPrefix(m, "") { + return NL + prefix + strings.Repeat(indent, indentLevel) + m + } + // comment elem + if strings.HasPrefix(m, " "${filename}" -benchmem + echo "OK" + git checkout ${backup} + sleep 5 + fi +} + + +to=$1 +current=`git rev-parse --abbrev-ref HEAD` + +bench ${to} $2 +bench ${current} $2 + +benchcmp $3 "/tmp/${to}-$2.bench" "/tmp/${current}-$2.bench" diff --git a/vendor/github.com/gobwas/glob/compiler/compiler.go b/vendor/github.com/gobwas/glob/compiler/compiler.go new file mode 100644 index 000000000..02e7de80a --- /dev/null +++ b/vendor/github.com/gobwas/glob/compiler/compiler.go @@ -0,0 +1,525 @@ +package compiler + +// TODO use constructor with all matchers, and to their structs private +// TODO glue multiple Text nodes (like after QuoteMeta) + +import ( + "fmt" + "reflect" + + "github.com/gobwas/glob/match" + "github.com/gobwas/glob/syntax/ast" + "github.com/gobwas/glob/util/runes" +) + +func optimizeMatcher(matcher match.Matcher) match.Matcher { + switch m := matcher.(type) { + + case match.Any: + if len(m.Separators) == 0 { + return match.NewSuper() + } + + case match.AnyOf: + if len(m.Matchers) == 1 { + return m.Matchers[0] + } + + return m + + case match.List: + if m.Not == false && len(m.List) == 1 { + return match.NewText(string(m.List)) + } + + return m + + case match.BTree: + m.Left = optimizeMatcher(m.Left) + m.Right = optimizeMatcher(m.Right) + + r, ok := m.Value.(match.Text) + if !ok { + return m + } + + var ( + leftNil = m.Left == nil + rightNil = m.Right == nil + ) + if leftNil && rightNil { + return match.NewText(r.Str) + } + + _, leftSuper := m.Left.(match.Super) + lp, leftPrefix := m.Left.(match.Prefix) + la, leftAny := m.Left.(match.Any) + + _, rightSuper := m.Right.(match.Super) + rs, rightSuffix := m.Right.(match.Suffix) + ra, rightAny := m.Right.(match.Any) + + switch { + case leftSuper && rightSuper: + return match.NewContains(r.Str, false) + + case leftSuper && rightNil: + return match.NewSuffix(r.Str) + + case rightSuper && leftNil: + return match.NewPrefix(r.Str) + + case leftNil && rightSuffix: + return match.NewPrefixSuffix(r.Str, rs.Suffix) + + case rightNil && leftPrefix: + return match.NewPrefixSuffix(lp.Prefix, r.Str) + + case rightNil && leftAny: + return match.NewSuffixAny(r.Str, la.Separators) + + case leftNil && rightAny: + return match.NewPrefixAny(r.Str, ra.Separators) + } + + return m + } + + return matcher +} + +func compileMatchers(matchers []match.Matcher) (match.Matcher, error) { + if len(matchers) == 0 { + return nil, fmt.Errorf("compile error: need at least one matcher") + } + if len(matchers) == 1 { + return matchers[0], nil + } + if m := glueMatchers(matchers); m != nil { + return m, nil + } + + idx := -1 + maxLen := -1 + var val match.Matcher + for i, matcher := range matchers { + if l := matcher.Len(); l != -1 && l >= maxLen { + maxLen = l + idx = i + val = matcher + } + } + + if val == nil { // not found matcher with static length + r, err := compileMatchers(matchers[1:]) + if err != nil { + return nil, err + } + return match.NewBTree(matchers[0], nil, r), nil + } + + left := matchers[:idx] + var right []match.Matcher + if len(matchers) > idx+1 { + right = matchers[idx+1:] + } + + var l, r match.Matcher + var err error + if len(left) > 0 { + l, err = compileMatchers(left) + if err != nil { + return nil, err + } + } + + if len(right) > 0 { + r, err = compileMatchers(right) + if err != nil { + return nil, err + } + } + + return match.NewBTree(val, l, r), nil +} + +func glueMatchers(matchers []match.Matcher) match.Matcher { + if m := glueMatchersAsEvery(matchers); m != nil { + return m + } + if m := glueMatchersAsRow(matchers); m != nil { + return m + } + return nil +} + +func glueMatchersAsRow(matchers []match.Matcher) match.Matcher { + if len(matchers) <= 1 { + return nil + } + + var ( + c []match.Matcher + l int + ) + for _, matcher := range matchers { + if ml := matcher.Len(); ml == -1 { + return nil + } else { + c = append(c, matcher) + l += ml + } + } + return match.NewRow(l, c...) +} + +func glueMatchersAsEvery(matchers []match.Matcher) match.Matcher { + if len(matchers) <= 1 { + return nil + } + + var ( + hasAny bool + hasSuper bool + hasSingle bool + min int + separator []rune + ) + + for i, matcher := range matchers { + var sep []rune + + switch m := matcher.(type) { + case match.Super: + sep = []rune{} + hasSuper = true + + case match.Any: + sep = m.Separators + hasAny = true + + case match.Single: + sep = m.Separators + hasSingle = true + min++ + + case match.List: + if !m.Not { + return nil + } + sep = m.List + hasSingle = true + min++ + + default: + return nil + } + + // initialize + if i == 0 { + separator = sep + } + + if runes.Equal(sep, separator) { + continue + } + + return nil + } + + if hasSuper && !hasAny && !hasSingle { + return match.NewSuper() + } + + if hasAny && !hasSuper && !hasSingle { + return match.NewAny(separator) + } + + if (hasAny || hasSuper) && min > 0 && len(separator) == 0 { + return match.NewMin(min) + } + + every := match.NewEveryOf() + + if min > 0 { + every.Add(match.NewMin(min)) + + if !hasAny && !hasSuper { + every.Add(match.NewMax(min)) + } + } + + if len(separator) > 0 { + every.Add(match.NewContains(string(separator), true)) + } + + return every +} + +func minimizeMatchers(matchers []match.Matcher) []match.Matcher { + var done match.Matcher + var left, right, count int + + for l := 0; l < len(matchers); l++ { + for r := len(matchers); r > l; r-- { + if glued := glueMatchers(matchers[l:r]); glued != nil { + var swap bool + + if done == nil { + swap = true + } else { + cl, gl := done.Len(), glued.Len() + swap = cl > -1 && gl > -1 && gl > cl + swap = swap || count < r-l + } + + if swap { + done = glued + left = l + right = r + count = r - l + } + } + } + } + + if done == nil { + return matchers + } + + next := append(append([]match.Matcher{}, matchers[:left]...), done) + if right < len(matchers) { + next = append(next, matchers[right:]...) + } + + if len(next) == len(matchers) { + return next + } + + return minimizeMatchers(next) +} + +// minimizeAnyOf tries to apply some heuristics to minimize number of nodes in given tree +func minimizeTree(tree *ast.Node) *ast.Node { + switch tree.Kind { + case ast.KindAnyOf: + return minimizeTreeAnyOf(tree) + default: + return nil + } +} + +// minimizeAnyOf tries to find common children of given node of AnyOf pattern +// it searches for common children from left and from right +// if any common children are found – then it returns new optimized ast tree +// else it returns nil +func minimizeTreeAnyOf(tree *ast.Node) *ast.Node { + if !areOfSameKind(tree.Children, ast.KindPattern) { + return nil + } + + commonLeft, commonRight := commonChildren(tree.Children) + commonLeftCount, commonRightCount := len(commonLeft), len(commonRight) + if commonLeftCount == 0 && commonRightCount == 0 { // there are no common parts + return nil + } + + var result []*ast.Node + if commonLeftCount > 0 { + result = append(result, ast.NewNode(ast.KindPattern, nil, commonLeft...)) + } + + var anyOf []*ast.Node + for _, child := range tree.Children { + reuse := child.Children[commonLeftCount : len(child.Children)-commonRightCount] + var node *ast.Node + if len(reuse) == 0 { + // this pattern is completely reduced by commonLeft and commonRight patterns + // so it become nothing + node = ast.NewNode(ast.KindNothing, nil) + } else { + node = ast.NewNode(ast.KindPattern, nil, reuse...) + } + anyOf = appendIfUnique(anyOf, node) + } + switch { + case len(anyOf) == 1 && anyOf[0].Kind != ast.KindNothing: + result = append(result, anyOf[0]) + case len(anyOf) > 1: + result = append(result, ast.NewNode(ast.KindAnyOf, nil, anyOf...)) + } + + if commonRightCount > 0 { + result = append(result, ast.NewNode(ast.KindPattern, nil, commonRight...)) + } + + return ast.NewNode(ast.KindPattern, nil, result...) +} + +func commonChildren(nodes []*ast.Node) (commonLeft, commonRight []*ast.Node) { + if len(nodes) <= 1 { + return + } + + // find node that has least number of children + idx := leastChildren(nodes) + if idx == -1 { + return + } + tree := nodes[idx] + treeLength := len(tree.Children) + + // allocate max able size for rightCommon slice + // to get ability insert elements in reverse order (from end to start) + // without sorting + commonRight = make([]*ast.Node, treeLength) + lastRight := treeLength // will use this to get results as commonRight[lastRight:] + + var ( + breakLeft bool + breakRight bool + commonTotal int + ) + for i, j := 0, treeLength-1; commonTotal < treeLength && j >= 0 && !(breakLeft && breakRight); i, j = i+1, j-1 { + treeLeft := tree.Children[i] + treeRight := tree.Children[j] + + for k := 0; k < len(nodes) && !(breakLeft && breakRight); k++ { + // skip least children node + if k == idx { + continue + } + + restLeft := nodes[k].Children[i] + restRight := nodes[k].Children[j+len(nodes[k].Children)-treeLength] + + breakLeft = breakLeft || !treeLeft.Equal(restLeft) + + // disable searching for right common parts, if left part is already overlapping + breakRight = breakRight || (!breakLeft && j <= i) + breakRight = breakRight || !treeRight.Equal(restRight) + } + + if !breakLeft { + commonTotal++ + commonLeft = append(commonLeft, treeLeft) + } + if !breakRight { + commonTotal++ + lastRight = j + commonRight[j] = treeRight + } + } + + commonRight = commonRight[lastRight:] + + return +} + +func appendIfUnique(target []*ast.Node, val *ast.Node) []*ast.Node { + for _, n := range target { + if reflect.DeepEqual(n, val) { + return target + } + } + return append(target, val) +} + +func areOfSameKind(nodes []*ast.Node, kind ast.Kind) bool { + for _, n := range nodes { + if n.Kind != kind { + return false + } + } + return true +} + +func leastChildren(nodes []*ast.Node) int { + min := -1 + idx := -1 + for i, n := range nodes { + if idx == -1 || (len(n.Children) < min) { + min = len(n.Children) + idx = i + } + } + return idx +} + +func compileTreeChildren(tree *ast.Node, sep []rune) ([]match.Matcher, error) { + var matchers []match.Matcher + for _, desc := range tree.Children { + m, err := compile(desc, sep) + if err != nil { + return nil, err + } + matchers = append(matchers, optimizeMatcher(m)) + } + return matchers, nil +} + +func compile(tree *ast.Node, sep []rune) (m match.Matcher, err error) { + switch tree.Kind { + case ast.KindAnyOf: + // todo this could be faster on pattern_alternatives_combine_lite (see glob_test.go) + if n := minimizeTree(tree); n != nil { + return compile(n, sep) + } + matchers, err := compileTreeChildren(tree, sep) + if err != nil { + return nil, err + } + return match.NewAnyOf(matchers...), nil + + case ast.KindPattern: + if len(tree.Children) == 0 { + return match.NewNothing(), nil + } + matchers, err := compileTreeChildren(tree, sep) + if err != nil { + return nil, err + } + m, err = compileMatchers(minimizeMatchers(matchers)) + if err != nil { + return nil, err + } + + case ast.KindAny: + m = match.NewAny(sep) + + case ast.KindSuper: + m = match.NewSuper() + + case ast.KindSingle: + m = match.NewSingle(sep) + + case ast.KindNothing: + m = match.NewNothing() + + case ast.KindList: + l := tree.Value.(ast.List) + m = match.NewList([]rune(l.Chars), l.Not) + + case ast.KindRange: + r := tree.Value.(ast.Range) + m = match.NewRange(r.Lo, r.Hi, r.Not) + + case ast.KindText: + t := tree.Value.(ast.Text) + m = match.NewText(t.Text) + + default: + return nil, fmt.Errorf("could not compile tree: unknown node type") + } + + return optimizeMatcher(m), nil +} + +func Compile(tree *ast.Node, sep []rune) (match.Matcher, error) { + m, err := compile(tree, sep) + if err != nil { + return nil, err + } + + return m, nil +} diff --git a/vendor/github.com/gobwas/glob/glob.go b/vendor/github.com/gobwas/glob/glob.go new file mode 100644 index 000000000..2afde343a --- /dev/null +++ b/vendor/github.com/gobwas/glob/glob.go @@ -0,0 +1,80 @@ +package glob + +import ( + "github.com/gobwas/glob/compiler" + "github.com/gobwas/glob/syntax" +) + +// Glob represents compiled glob pattern. +type Glob interface { + Match(string) bool +} + +// Compile creates Glob for given pattern and strings (if any present after pattern) as separators. +// The pattern syntax is: +// +// pattern: +// { term } +// +// term: +// `*` matches any sequence of non-separator characters +// `**` matches any sequence of characters +// `?` matches any single non-separator character +// `[` [ `!` ] { character-range } `]` +// character class (must be non-empty) +// `{` pattern-list `}` +// pattern alternatives +// c matches character c (c != `*`, `**`, `?`, `\`, `[`, `{`, `}`) +// `\` c matches character c +// +// character-range: +// c matches character c (c != `\\`, `-`, `]`) +// `\` c matches character c +// lo `-` hi matches character c for lo <= c <= hi +// +// pattern-list: +// pattern { `,` pattern } +// comma-separated (without spaces) patterns +// +func Compile(pattern string, separators ...rune) (Glob, error) { + ast, err := syntax.Parse(pattern) + if err != nil { + return nil, err + } + + matcher, err := compiler.Compile(ast, separators) + if err != nil { + return nil, err + } + + return matcher, nil +} + +// MustCompile is the same as Compile, except that if Compile returns error, this will panic +func MustCompile(pattern string, separators ...rune) Glob { + g, err := Compile(pattern, separators...) + if err != nil { + panic(err) + } + + return g +} + +// QuoteMeta returns a string that quotes all glob pattern meta characters +// inside the argument text; For example, QuoteMeta(`{foo*}`) returns `\[foo\*\]`. +func QuoteMeta(s string) string { + b := make([]byte, 2*len(s)) + + // a byte loop is correct because all meta characters are ASCII + j := 0 + for i := 0; i < len(s); i++ { + if syntax.Special(s[i]) { + b[j] = '\\' + j++ + } + b[j] = s[i] + j++ + } + + return string(b[0:j]) +} diff --git a/vendor/github.com/gobwas/glob/match/any.go b/vendor/github.com/gobwas/glob/match/any.go new file mode 100644 index 000000000..514a9a5c4 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/any.go @@ -0,0 +1,45 @@ +package match + +import ( + "fmt" + "github.com/gobwas/glob/util/strings" +) + +type Any struct { + Separators []rune +} + +func NewAny(s []rune) Any { + return Any{s} +} + +func (self Any) Match(s string) bool { + return strings.IndexAnyRunes(s, self.Separators) == -1 +} + +func (self Any) Index(s string) (int, []int) { + found := strings.IndexAnyRunes(s, self.Separators) + switch found { + case -1: + case 0: + return 0, segments0 + default: + s = s[:found] + } + + segments := acquireSegments(len(s)) + for i := range s { + segments = append(segments, i) + } + segments = append(segments, len(s)) + + return 0, segments +} + +func (self Any) Len() int { + return lenNo +} + +func (self Any) String() string { + return fmt.Sprintf("", string(self.Separators)) +} diff --git a/vendor/github.com/gobwas/glob/match/any_of.go b/vendor/github.com/gobwas/glob/match/any_of.go new file mode 100644 index 000000000..8e65356cd --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/any_of.go @@ -0,0 +1,82 @@ +package match + +import "fmt" + +type AnyOf struct { + Matchers Matchers +} + +func NewAnyOf(m ...Matcher) AnyOf { + return AnyOf{Matchers(m)} +} + +func (self *AnyOf) Add(m Matcher) error { + self.Matchers = append(self.Matchers, m) + return nil +} + +func (self AnyOf) Match(s string) bool { + for _, m := range self.Matchers { + if m.Match(s) { + return true + } + } + + return false +} + +func (self AnyOf) Index(s string) (int, []int) { + index := -1 + + segments := acquireSegments(len(s)) + for _, m := range self.Matchers { + idx, seg := m.Index(s) + if idx == -1 { + continue + } + + if index == -1 || idx < index { + index = idx + segments = append(segments[:0], seg...) + continue + } + + if idx > index { + continue + } + + // here idx == index + segments = appendMerge(segments, seg) + } + + if index == -1 { + releaseSegments(segments) + return -1, nil + } + + return index, segments +} + +func (self AnyOf) Len() (l int) { + l = -1 + for _, m := range self.Matchers { + ml := m.Len() + switch { + case l == -1: + l = ml + continue + + case ml == -1: + return -1 + + case l != ml: + return -1 + } + } + + return +} + +func (self AnyOf) String() string { + return fmt.Sprintf("", self.Matchers) +} diff --git a/vendor/github.com/gobwas/glob/match/btree.go b/vendor/github.com/gobwas/glob/match/btree.go new file mode 100644 index 000000000..a8130e93e --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/btree.go @@ -0,0 +1,146 @@ +package match + +import ( + "fmt" + "unicode/utf8" +) + +type BTree struct { + Value Matcher + Left Matcher + Right Matcher + ValueLengthRunes int + LeftLengthRunes int + RightLengthRunes int + LengthRunes int +} + +func NewBTree(Value, Left, Right Matcher) (tree BTree) { + tree.Value = Value + tree.Left = Left + tree.Right = Right + + lenOk := true + if tree.ValueLengthRunes = Value.Len(); tree.ValueLengthRunes == -1 { + lenOk = false + } + + if Left != nil { + if tree.LeftLengthRunes = Left.Len(); tree.LeftLengthRunes == -1 { + lenOk = false + } + } + + if Right != nil { + if tree.RightLengthRunes = Right.Len(); tree.RightLengthRunes == -1 { + lenOk = false + } + } + + if lenOk { + tree.LengthRunes = tree.LeftLengthRunes + tree.ValueLengthRunes + tree.RightLengthRunes + } else { + tree.LengthRunes = -1 + } + + return tree +} + +func (self BTree) Len() int { + return self.LengthRunes +} + +// todo? +func (self BTree) Index(s string) (int, []int) { + return -1, nil +} + +func (self BTree) Match(s string) bool { + inputLen := len(s) + + // self.Length, self.RLen and self.LLen are values meaning the length of runes for each part + // here we manipulating byte length for better optimizations + // but these checks still works, cause minLen of 1-rune string is 1 byte. + if self.LengthRunes != -1 && self.LengthRunes > inputLen { + return false + } + + // try to cut unnecessary parts + // by knowledge of length of right and left part + var offset, limit int + if self.LeftLengthRunes >= 0 { + offset = self.LeftLengthRunes + } + if self.RightLengthRunes >= 0 { + limit = inputLen - self.RightLengthRunes + } else { + limit = inputLen + } + + for offset < limit { + // search for matching part in substring + index, segments := self.Value.Index(s[offset:limit]) + if index == -1 { + releaseSegments(segments) + return false + } + + l := s[:offset+index] + var left bool + if self.Left != nil { + left = self.Left.Match(l) + } else { + left = l == "" + } + + if left { + for i := len(segments) - 1; i >= 0; i-- { + length := segments[i] + + var right bool + var r string + // if there is no string for the right branch + if inputLen <= offset+index+length { + r = "" + } else { + r = s[offset+index+length:] + } + + if self.Right != nil { + right = self.Right.Match(r) + } else { + right = r == "" + } + + if right { + releaseSegments(segments) + return true + } + } + } + + _, step := utf8.DecodeRuneInString(s[offset+index:]) + offset += index + step + + releaseSegments(segments) + } + + return false +} + +func (self BTree) String() string { + const n string = "" + var l, r string + if self.Left == nil { + l = n + } else { + l = self.Left.String() + } + if self.Right == nil { + r = n + } else { + r = self.Right.String() + } + + return fmt.Sprintf("%s]>", l, self.Value, r) +} diff --git a/vendor/github.com/gobwas/glob/match/contains.go b/vendor/github.com/gobwas/glob/match/contains.go new file mode 100644 index 000000000..0998e95b0 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/contains.go @@ -0,0 +1,58 @@ +package match + +import ( + "fmt" + "strings" +) + +type Contains struct { + Needle string + Not bool +} + +func NewContains(needle string, not bool) Contains { + return Contains{needle, not} +} + +func (self Contains) Match(s string) bool { + return strings.Contains(s, self.Needle) != self.Not +} + +func (self Contains) Index(s string) (int, []int) { + var offset int + + idx := strings.Index(s, self.Needle) + + if !self.Not { + if idx == -1 { + return -1, nil + } + + offset = idx + len(self.Needle) + if len(s) <= offset { + return 0, []int{offset} + } + s = s[offset:] + } else if idx != -1 { + s = s[:idx] + } + + segments := acquireSegments(len(s) + 1) + for i := range s { + segments = append(segments, offset+i) + } + + return 0, append(segments, offset+len(s)) +} + +func (self Contains) Len() int { + return lenNo +} + +func (self Contains) String() string { + var not string + if self.Not { + not = "!" + } + return fmt.Sprintf("", not, self.Needle) +} diff --git a/vendor/github.com/gobwas/glob/match/every_of.go b/vendor/github.com/gobwas/glob/match/every_of.go new file mode 100644 index 000000000..7c968ee36 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/every_of.go @@ -0,0 +1,99 @@ +package match + +import ( + "fmt" +) + +type EveryOf struct { + Matchers Matchers +} + +func NewEveryOf(m ...Matcher) EveryOf { + return EveryOf{Matchers(m)} +} + +func (self *EveryOf) Add(m Matcher) error { + self.Matchers = append(self.Matchers, m) + return nil +} + +func (self EveryOf) Len() (l int) { + for _, m := range self.Matchers { + if ml := m.Len(); l > 0 { + l += ml + } else { + return -1 + } + } + + return +} + +func (self EveryOf) Index(s string) (int, []int) { + var index int + var offset int + + // make `in` with cap as len(s), + // cause it is the maximum size of output segments values + next := acquireSegments(len(s)) + current := acquireSegments(len(s)) + + sub := s + for i, m := range self.Matchers { + idx, seg := m.Index(sub) + if idx == -1 { + releaseSegments(next) + releaseSegments(current) + return -1, nil + } + + if i == 0 { + // we use copy here instead of `current = seg` + // cause seg is a slice from reusable buffer `in` + // and it could be overwritten in next iteration + current = append(current, seg...) + } else { + // clear the next + next = next[:0] + + delta := index - (idx + offset) + for _, ex := range current { + for _, n := range seg { + if ex+delta == n { + next = append(next, n) + } + } + } + + if len(next) == 0 { + releaseSegments(next) + releaseSegments(current) + return -1, nil + } + + current = append(current[:0], next...) + } + + index = idx + offset + sub = s[index:] + offset += idx + } + + releaseSegments(next) + + return index, current +} + +func (self EveryOf) Match(s string) bool { + for _, m := range self.Matchers { + if !m.Match(s) { + return false + } + } + + return true +} + +func (self EveryOf) String() string { + return fmt.Sprintf("", self.Matchers) +} diff --git a/vendor/github.com/gobwas/glob/match/list.go b/vendor/github.com/gobwas/glob/match/list.go new file mode 100644 index 000000000..7fd763ecd --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/list.go @@ -0,0 +1,49 @@ +package match + +import ( + "fmt" + "github.com/gobwas/glob/util/runes" + "unicode/utf8" +) + +type List struct { + List []rune + Not bool +} + +func NewList(list []rune, not bool) List { + return List{list, not} +} + +func (self List) Match(s string) bool { + r, w := utf8.DecodeRuneInString(s) + if len(s) > w { + return false + } + + inList := runes.IndexRune(self.List, r) != -1 + return inList == !self.Not +} + +func (self List) Len() int { + return lenOne +} + +func (self List) Index(s string) (int, []int) { + for i, r := range s { + if self.Not == (runes.IndexRune(self.List, r) == -1) { + return i, segmentsByRuneLength[utf8.RuneLen(r)] + } + } + + return -1, nil +} + +func (self List) String() string { + var not string + if self.Not { + not = "!" + } + + return fmt.Sprintf("", not, string(self.List)) +} diff --git a/vendor/github.com/gobwas/glob/match/match.go b/vendor/github.com/gobwas/glob/match/match.go new file mode 100644 index 000000000..f80e007fb --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/match.go @@ -0,0 +1,81 @@ +package match + +// todo common table of rune's length + +import ( + "fmt" + "strings" +) + +const lenOne = 1 +const lenZero = 0 +const lenNo = -1 + +type Matcher interface { + Match(string) bool + Index(string) (int, []int) + Len() int + String() string +} + +type Matchers []Matcher + +func (m Matchers) String() string { + var s []string + for _, matcher := range m { + s = append(s, fmt.Sprint(matcher)) + } + + return fmt.Sprintf("%s", strings.Join(s, ",")) +} + +// appendMerge merges and sorts given already SORTED and UNIQUE segments. +func appendMerge(target, sub []int) []int { + lt, ls := len(target), len(sub) + out := make([]int, 0, lt+ls) + + for x, y := 0, 0; x < lt || y < ls; { + if x >= lt { + out = append(out, sub[y:]...) + break + } + + if y >= ls { + out = append(out, target[x:]...) + break + } + + xValue := target[x] + yValue := sub[y] + + switch { + + case xValue == yValue: + out = append(out, xValue) + x++ + y++ + + case xValue < yValue: + out = append(out, xValue) + x++ + + case yValue < xValue: + out = append(out, yValue) + y++ + + } + } + + target = append(target[:0], out...) + + return target +} + +func reverseSegments(input []int) { + l := len(input) + m := l / 2 + + for i := 0; i < m; i++ { + input[i], input[l-i-1] = input[l-i-1], input[i] + } +} diff --git a/vendor/github.com/gobwas/glob/match/max.go b/vendor/github.com/gobwas/glob/match/max.go new file mode 100644 index 000000000..d72f69eff --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/max.go @@ -0,0 +1,49 @@ +package match + +import ( + "fmt" + "unicode/utf8" +) + +type Max struct { + Limit int +} + +func NewMax(l int) Max { + return Max{l} +} + +func (self Max) Match(s string) bool { + var l int + for range s { + l += 1 + if l > self.Limit { + return false + } + } + + return true +} + +func (self Max) Index(s string) (int, []int) { + segments := acquireSegments(self.Limit + 1) + segments = append(segments, 0) + var count int + for i, r := range s { + count++ + if count > self.Limit { + break + } + segments = append(segments, i+utf8.RuneLen(r)) + } + + return 0, segments +} + +func (self Max) Len() int { + return lenNo +} + +func (self Max) String() string { + return fmt.Sprintf("", self.Limit) +} diff --git a/vendor/github.com/gobwas/glob/match/min.go b/vendor/github.com/gobwas/glob/match/min.go new file mode 100644 index 000000000..db57ac8eb --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/min.go @@ -0,0 +1,57 @@ +package match + +import ( + "fmt" + "unicode/utf8" +) + +type Min struct { + Limit int +} + +func NewMin(l int) Min { + return Min{l} +} + +func (self Min) Match(s string) bool { + var l int + for range s { + l += 1 + if l >= self.Limit { + return true + } + } + + return false +} + +func (self Min) Index(s string) (int, []int) { + var count int + + c := len(s) - self.Limit + 1 + if c <= 0 { + return -1, nil + } + + segments := acquireSegments(c) + for i, r := range s { + count++ + if count >= self.Limit { + segments = append(segments, i+utf8.RuneLen(r)) + } + } + + if len(segments) == 0 { + return -1, nil + } + + return 0, segments +} + +func (self Min) Len() int { + return lenNo +} + +func (self Min) String() string { + return fmt.Sprintf("", self.Limit) +} diff --git a/vendor/github.com/gobwas/glob/match/nothing.go b/vendor/github.com/gobwas/glob/match/nothing.go new file mode 100644 index 000000000..0d4ecd36b --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/nothing.go @@ -0,0 +1,27 @@ +package match + +import ( + "fmt" +) + +type Nothing struct{} + +func NewNothing() Nothing { + return Nothing{} +} + +func (self Nothing) Match(s string) bool { + return len(s) == 0 +} + +func (self Nothing) Index(s string) (int, []int) { + return 0, segments0 +} + +func (self Nothing) Len() int { + return lenZero +} + +func (self Nothing) String() string { + return fmt.Sprintf("") +} diff --git a/vendor/github.com/gobwas/glob/match/prefix.go b/vendor/github.com/gobwas/glob/match/prefix.go new file mode 100644 index 000000000..a7347250e --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/prefix.go @@ -0,0 +1,50 @@ +package match + +import ( + "fmt" + "strings" + "unicode/utf8" +) + +type Prefix struct { + Prefix string +} + +func NewPrefix(p string) Prefix { + return Prefix{p} +} + +func (self Prefix) Index(s string) (int, []int) { + idx := strings.Index(s, self.Prefix) + if idx == -1 { + return -1, nil + } + + length := len(self.Prefix) + var sub string + if len(s) > idx+length { + sub = s[idx+length:] + } else { + sub = "" + } + + segments := acquireSegments(len(sub) + 1) + segments = append(segments, length) + for i, r := range sub { + segments = append(segments, length+i+utf8.RuneLen(r)) + } + + return idx, segments +} + +func (self Prefix) Len() int { + return lenNo +} + +func (self Prefix) Match(s string) bool { + return strings.HasPrefix(s, self.Prefix) +} + +func (self Prefix) String() string { + return fmt.Sprintf("", self.Prefix) +} diff --git a/vendor/github.com/gobwas/glob/match/prefix_any.go b/vendor/github.com/gobwas/glob/match/prefix_any.go new file mode 100644 index 000000000..8ee58fe1b --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/prefix_any.go @@ -0,0 +1,55 @@ +package match + +import ( + "fmt" + "strings" + "unicode/utf8" + + sutil "github.com/gobwas/glob/util/strings" +) + +type PrefixAny struct { + Prefix string + Separators []rune +} + +func NewPrefixAny(s string, sep []rune) PrefixAny { + return PrefixAny{s, sep} +} + +func (self PrefixAny) Index(s string) (int, []int) { + idx := strings.Index(s, self.Prefix) + if idx == -1 { + return -1, nil + } + + n := len(self.Prefix) + sub := s[idx+n:] + i := sutil.IndexAnyRunes(sub, self.Separators) + if i > -1 { + sub = sub[:i] + } + + seg := acquireSegments(len(sub) + 1) + seg = append(seg, n) + for i, r := range sub { + seg = append(seg, n+i+utf8.RuneLen(r)) + } + + return idx, seg +} + +func (self PrefixAny) Len() int { + return lenNo +} + +func (self PrefixAny) Match(s string) bool { + if !strings.HasPrefix(s, self.Prefix) { + return false + } + return sutil.IndexAnyRunes(s[len(self.Prefix):], self.Separators) == -1 +} + +func (self PrefixAny) String() string { + return fmt.Sprintf("", self.Prefix, string(self.Separators)) +} diff --git a/vendor/github.com/gobwas/glob/match/prefix_suffix.go b/vendor/github.com/gobwas/glob/match/prefix_suffix.go new file mode 100644 index 000000000..8208085a1 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/prefix_suffix.go @@ -0,0 +1,62 @@ +package match + +import ( + "fmt" + "strings" +) + +type PrefixSuffix struct { + Prefix, Suffix string +} + +func NewPrefixSuffix(p, s string) PrefixSuffix { + return PrefixSuffix{p, s} +} + +func (self PrefixSuffix) Index(s string) (int, []int) { + prefixIdx := strings.Index(s, self.Prefix) + if prefixIdx == -1 { + return -1, nil + } + + suffixLen := len(self.Suffix) + if suffixLen <= 0 { + return prefixIdx, []int{len(s) - prefixIdx} + } + + if (len(s) - prefixIdx) <= 0 { + return -1, nil + } + + segments := acquireSegments(len(s) - prefixIdx) + for sub := s[prefixIdx:]; ; { + suffixIdx := strings.LastIndex(sub, self.Suffix) + if suffixIdx == -1 { + break + } + + segments = append(segments, suffixIdx+suffixLen) + sub = sub[:suffixIdx] + } + + if len(segments) == 0 { + releaseSegments(segments) + return -1, nil + } + + reverseSegments(segments) + + return prefixIdx, segments +} + +func (self PrefixSuffix) Len() int { + return lenNo +} + +func (self PrefixSuffix) Match(s string) bool { + return strings.HasPrefix(s, self.Prefix) && strings.HasSuffix(s, self.Suffix) +} + +func (self PrefixSuffix) String() string { + return fmt.Sprintf("", self.Prefix, self.Suffix) +} diff --git a/vendor/github.com/gobwas/glob/match/range.go b/vendor/github.com/gobwas/glob/match/range.go new file mode 100644 index 000000000..ce30245a4 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/range.go @@ -0,0 +1,48 @@ +package match + +import ( + "fmt" + "unicode/utf8" +) + +type Range struct { + Lo, Hi rune + Not bool +} + +func NewRange(lo, hi rune, not bool) Range { + return Range{lo, hi, not} +} + +func (self Range) Len() int { + return lenOne +} + +func (self Range) Match(s string) bool { + r, w := utf8.DecodeRuneInString(s) + if len(s) > w { + return false + } + + inRange := r >= self.Lo && r <= self.Hi + + return inRange == !self.Not +} + +func (self Range) Index(s string) (int, []int) { + for i, r := range s { + if self.Not != (r >= self.Lo && r <= self.Hi) { + return i, segmentsByRuneLength[utf8.RuneLen(r)] + } + } + + return -1, nil +} + +func (self Range) String() string { + var not string + if self.Not { + not = "!" + } + return fmt.Sprintf("", not, string(self.Lo), string(self.Hi)) +} diff --git a/vendor/github.com/gobwas/glob/match/row.go b/vendor/github.com/gobwas/glob/match/row.go new file mode 100644 index 000000000..4379042e4 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/row.go @@ -0,0 +1,77 @@ +package match + +import ( + "fmt" +) + +type Row struct { + Matchers Matchers + RunesLength int + Segments []int +} + +func NewRow(len int, m ...Matcher) Row { + return Row{ + Matchers: Matchers(m), + RunesLength: len, + Segments: []int{len}, + } +} + +func (self Row) matchAll(s string) bool { + var idx int + for _, m := range self.Matchers { + length := m.Len() + + var next, i int + for next = range s[idx:] { + i++ + if i == length { + break + } + } + + if i < length || !m.Match(s[idx:idx+next+1]) { + return false + } + + idx += next + 1 + } + + return true +} + +func (self Row) lenOk(s string) bool { + var i int + for range s { + i++ + if i > self.RunesLength { + return false + } + } + return self.RunesLength == i +} + +func (self Row) Match(s string) bool { + return self.lenOk(s) && self.matchAll(s) +} + +func (self Row) Len() (l int) { + return self.RunesLength +} + +func (self Row) Index(s string) (int, []int) { + for i := range s { + if len(s[i:]) < self.RunesLength { + break + } + if self.matchAll(s[i:]) { + return i, self.Segments + } + } + return -1, nil +} + +func (self Row) String() string { + return fmt.Sprintf("", self.RunesLength, self.Matchers) +} diff --git a/vendor/github.com/gobwas/glob/match/segments.go b/vendor/github.com/gobwas/glob/match/segments.go new file mode 100644 index 000000000..9ea6f3094 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/segments.go @@ -0,0 +1,91 @@ +package match + +import ( + "sync" +) + +type SomePool interface { + Get() []int + Put([]int) +} + +var segmentsPools [1024]sync.Pool + +func toPowerOfTwo(v int) int { + v-- + v |= v >> 1 + v |= v >> 2 + v |= v >> 4 + v |= v >> 8 + v |= v >> 16 + v++ + + return v +} + +const ( + cacheFrom = 16 + cacheToAndHigher = 1024 + cacheFromIndex = 15 + cacheToAndHigherIndex = 1023 +) + +var ( + segments0 = []int{0} + segments1 = []int{1} + segments2 = []int{2} + segments3 = []int{3} + segments4 = []int{4} +) + +var segmentsByRuneLength [5][]int = [5][]int{ + 0: segments0, + 1: segments1, + 2: segments2, + 3: segments3, + 4: segments4, +} + +func init() { + for i := cacheToAndHigher; i >= cacheFrom; i >>= 1 { + func(i int) { + segmentsPools[i-1] = sync.Pool{New: func() interface{} { + return make([]int, 0, i) + }} + }(i) + } +} + +func getTableIndex(c int) int { + p := toPowerOfTwo(c) + switch { + case p >= cacheToAndHigher: + return cacheToAndHigherIndex + case p <= cacheFrom: + return cacheFromIndex + default: + return p - 1 + } +} + +func acquireSegments(c int) []int { + // make []int with less capacity than cacheFrom + // is faster than acquiring it from pool + if c < cacheFrom { + return make([]int, 0, c) + } + + return segmentsPools[getTableIndex(c)].Get().([]int)[:0] +} + +func releaseSegments(s []int) { + c := cap(s) + + // make []int with less capacity than cacheFrom + // is faster than acquiring it from pool + if c < cacheFrom { + return + } + + segmentsPools[getTableIndex(c)].Put(s) +} diff --git a/vendor/github.com/gobwas/glob/match/single.go b/vendor/github.com/gobwas/glob/match/single.go new file mode 100644 index 000000000..ee6e3954c --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/single.go @@ -0,0 +1,43 @@ +package match + +import ( + "fmt" + "github.com/gobwas/glob/util/runes" + "unicode/utf8" +) + +// single represents ? +type Single struct { + Separators []rune +} + +func NewSingle(s []rune) Single { + return Single{s} +} + +func (self Single) Match(s string) bool { + r, w := utf8.DecodeRuneInString(s) + if len(s) > w { + return false + } + + return runes.IndexRune(self.Separators, r) == -1 +} + +func (self Single) Len() int { + return lenOne +} + +func (self Single) Index(s string) (int, []int) { + for i, r := range s { + if runes.IndexRune(self.Separators, r) == -1 { + return i, segmentsByRuneLength[utf8.RuneLen(r)] + } + } + + return -1, nil +} + +func (self Single) String() string { + return fmt.Sprintf("", string(self.Separators)) +} diff --git a/vendor/github.com/gobwas/glob/match/suffix.go b/vendor/github.com/gobwas/glob/match/suffix.go new file mode 100644 index 000000000..85bea8c68 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/suffix.go @@ -0,0 +1,35 @@ +package match + +import ( + "fmt" + "strings" +) + +type Suffix struct { + Suffix string +} + +func NewSuffix(s string) Suffix { + return Suffix{s} +} + +func (self Suffix) Len() int { + return lenNo +} + +func (self Suffix) Match(s string) bool { + return strings.HasSuffix(s, self.Suffix) +} + +func (self Suffix) Index(s string) (int, []int) { + idx := strings.Index(s, self.Suffix) + if idx == -1 { + return -1, nil + } + + return 0, []int{idx + len(self.Suffix)} +} + +func (self Suffix) String() string { + return fmt.Sprintf("", self.Suffix) +} diff --git a/vendor/github.com/gobwas/glob/match/suffix_any.go b/vendor/github.com/gobwas/glob/match/suffix_any.go new file mode 100644 index 000000000..c5106f819 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/suffix_any.go @@ -0,0 +1,43 @@ +package match + +import ( + "fmt" + "strings" + + sutil "github.com/gobwas/glob/util/strings" +) + +type SuffixAny struct { + Suffix string + Separators []rune +} + +func NewSuffixAny(s string, sep []rune) SuffixAny { + return SuffixAny{s, sep} +} + +func (self SuffixAny) Index(s string) (int, []int) { + idx := strings.Index(s, self.Suffix) + if idx == -1 { + return -1, nil + } + + i := sutil.LastIndexAnyRunes(s[:idx], self.Separators) + 1 + + return i, []int{idx + len(self.Suffix) - i} +} + +func (self SuffixAny) Len() int { + return lenNo +} + +func (self SuffixAny) Match(s string) bool { + if !strings.HasSuffix(s, self.Suffix) { + return false + } + return sutil.IndexAnyRunes(s[:len(s)-len(self.Suffix)], self.Separators) == -1 +} + +func (self SuffixAny) String() string { + return fmt.Sprintf("", string(self.Separators), self.Suffix) +} diff --git a/vendor/github.com/gobwas/glob/match/super.go b/vendor/github.com/gobwas/glob/match/super.go new file mode 100644 index 000000000..3875950bb --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/super.go @@ -0,0 +1,33 @@ +package match + +import ( + "fmt" +) + +type Super struct{} + +func NewSuper() Super { + return Super{} +} + +func (self Super) Match(s string) bool { + return true +} + +func (self Super) Len() int { + return lenNo +} + +func (self Super) Index(s string) (int, []int) { + segments := acquireSegments(len(s) + 1) + for i := range s { + segments = append(segments, i) + } + segments = append(segments, len(s)) + + return 0, segments +} + +func (self Super) String() string { + return fmt.Sprintf("") +} diff --git a/vendor/github.com/gobwas/glob/match/text.go b/vendor/github.com/gobwas/glob/match/text.go new file mode 100644 index 000000000..0a17616d3 --- /dev/null +++ b/vendor/github.com/gobwas/glob/match/text.go @@ -0,0 +1,45 @@ +package match + +import ( + "fmt" + "strings" + "unicode/utf8" +) + +// raw represents raw string to match +type Text struct { + Str string + RunesLength int + BytesLength int + Segments []int +} + +func NewText(s string) Text { + return Text{ + Str: s, + RunesLength: utf8.RuneCountInString(s), + BytesLength: len(s), + Segments: []int{len(s)}, + } +} + +func (self Text) Match(s string) bool { + return self.Str == s +} + +func (self Text) Len() int { + return self.RunesLength +} + +func (self Text) Index(s string) (int, []int) { + index := strings.Index(s, self.Str) + if index == -1 { + return -1, nil + } + + return index, self.Segments +} + +func (self Text) String() string { + return fmt.Sprintf("", self.Str) +} diff --git a/vendor/github.com/gobwas/glob/readme.md b/vendor/github.com/gobwas/glob/readme.md new file mode 100644 index 000000000..f58144e73 --- /dev/null +++ b/vendor/github.com/gobwas/glob/readme.md @@ -0,0 +1,148 @@ +# glob.[go](https://golang.org) + +[![GoDoc][godoc-image]][godoc-url] [![Build Status][travis-image]][travis-url] + +> Go Globbing Library. + +## Install + +```shell + go get github.com/gobwas/glob +``` + +## Example + +```go + +package main + +import "github.com/gobwas/glob" + +func main() { + var g glob.Glob + + // create simple glob + g = glob.MustCompile("*.github.com") + g.Match("api.github.com") // true + + // quote meta characters and then create simple glob + g = glob.MustCompile(glob.QuoteMeta("*.github.com")) + g.Match("*.github.com") // true + + // create new glob with set of delimiters as ["."] + g = glob.MustCompile("api.*.com", '.') + g.Match("api.github.com") // true + g.Match("api.gi.hub.com") // false + + // create new glob with set of delimiters as ["."] + // but now with super wildcard + g = glob.MustCompile("api.**.com", '.') + g.Match("api.github.com") // true + g.Match("api.gi.hub.com") // true + + // create glob with single symbol wildcard + g = glob.MustCompile("?at") + g.Match("cat") // true + g.Match("fat") // true + g.Match("at") // false + + // create glob with single symbol wildcard and delimiters ['f'] + g = glob.MustCompile("?at", 'f') + g.Match("cat") // true + g.Match("fat") // false + g.Match("at") // false + + // create glob with character-list matchers + g = glob.MustCompile("[abc]at") + g.Match("cat") // true + g.Match("bat") // true + g.Match("fat") // false + g.Match("at") // false + + // create glob with character-list matchers + g = glob.MustCompile("[!abc]at") + g.Match("cat") // false + g.Match("bat") // false + g.Match("fat") // true + g.Match("at") // false + + // create glob with character-range matchers + g = glob.MustCompile("[a-c]at") + g.Match("cat") // true + g.Match("bat") // true + g.Match("fat") // false + g.Match("at") // false + + // create glob with character-range matchers + g = glob.MustCompile("[!a-c]at") + g.Match("cat") // false + g.Match("bat") // false + g.Match("fat") // true + g.Match("at") // false + + // create glob with pattern-alternatives list + g = glob.MustCompile("{cat,bat,[fr]at}") + g.Match("cat") // true + g.Match("bat") // true + g.Match("fat") // true + g.Match("rat") // true + g.Match("at") // false + g.Match("zat") // false +} + +``` + +## Performance + +This library is created for compile-once patterns. This means, that compilation could take time, but +strings matching is done faster, than in case when always parsing template. + +If you will not use compiled `glob.Glob` object, and do `g := glob.MustCompile(pattern); g.Match(...)` every time, then your code will be much more slower. + +Run `go test -bench=.` from source root to see the benchmarks: + +Pattern | Fixture | Match | Speed (ns/op) +--------|---------|-------|-------------- +`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my cat has very bright eyes` | `true` | 432 +`[a-z][!a-x]*cat*[h][!b]*eyes*` | `my dog has very bright eyes` | `false` | 199 +`https://*.google.*` | `https://account.google.com` | `true` | 96 +`https://*.google.*` | `https://google.com` | `false` | 66 +`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://yahoo.com` | `true` | 163 +`{https://*.google.*,*yandex.*,*yahoo.*,*mail.ru}` | `http://google.com` | `false` | 197 +`{https://*gobwas.com,http://exclude.gobwas.com}` | `https://safe.gobwas.com` | `true` | 22 +`{https://*gobwas.com,http://exclude.gobwas.com}` | `http://safe.gobwas.com` | `false` | 24 +`abc*` | `abcdef` | `true` | 8.15 +`abc*` | `af` | `false` | 5.68 +`*def` | `abcdef` | `true` | 8.84 +`*def` | `af` | `false` | 5.74 +`ab*ef` | `abcdef` | `true` | 15.2 +`ab*ef` | `af` | `false` | 10.4 + +The same things with `regexp` package: + +Pattern | Fixture | Match | Speed (ns/op) +--------|---------|-------|-------------- +`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my cat has very bright eyes` | `true` | 2553 +`^[a-z][^a-x].*cat.*[h][^b].*eyes.*$` | `my dog has very bright eyes` | `false` | 1383 +`^https:\/\/.*\.google\..*$` | `https://account.google.com` | `true` | 1205 +`^https:\/\/.*\.google\..*$` | `https://google.com` | `false` | 767 +`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://yahoo.com` | `true` | 1435 +`^(https:\/\/.*\.google\..*|.*yandex\..*|.*yahoo\..*|.*mail\.ru)$` | `http://google.com` | `false` | 1674 +`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `https://safe.gobwas.com` | `true` | 1039 +`^(https:\/\/.*gobwas\.com|http://exclude.gobwas.com)$` | `http://safe.gobwas.com` | `false` | 272 +`^abc.*$` | `abcdef` | `true` | 237 +`^abc.*$` | `af` | `false` | 100 +`^.*def$` | `abcdef` | `true` | 464 +`^.*def$` | `af` | `false` | 265 +`^ab.*ef$` | `abcdef` | `true` | 375 +`^ab.*ef$` | `af` | `false` | 145 + +[godoc-image]: https://godoc.org/github.com/gobwas/glob?status.svg +[godoc-url]: https://godoc.org/github.com/gobwas/glob +[travis-image]: https://travis-ci.org/gobwas/glob.svg?branch=master +[travis-url]: https://travis-ci.org/gobwas/glob + +## Syntax + +Syntax is inspired by [standard wildcards](http://tldp.org/LDP/GNU-Linux-Tools-Summary/html/x11655.htm), +except that `**` is aka super-asterisk, that do not sensitive for separators. \ No newline at end of file diff --git a/vendor/github.com/gobwas/glob/syntax/ast/ast.go b/vendor/github.com/gobwas/glob/syntax/ast/ast.go new file mode 100644 index 000000000..3220a694a --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/ast/ast.go @@ -0,0 +1,122 @@ +package ast + +import ( + "bytes" + "fmt" +) + +type Node struct { + Parent *Node + Children []*Node + Value interface{} + Kind Kind +} + +func NewNode(k Kind, v interface{}, ch ...*Node) *Node { + n := &Node{ + Kind: k, + Value: v, + } + for _, c := range ch { + Insert(n, c) + } + return n +} + +func (a *Node) Equal(b *Node) bool { + if a.Kind != b.Kind { + return false + } + if a.Value != b.Value { + return false + } + if len(a.Children) != len(b.Children) { + return false + } + for i, c := range a.Children { + if !c.Equal(b.Children[i]) { + return false + } + } + return true +} + +func (a *Node) String() string { + var buf bytes.Buffer + buf.WriteString(a.Kind.String()) + if a.Value != nil { + buf.WriteString(" =") + buf.WriteString(fmt.Sprintf("%v", a.Value)) + } + if len(a.Children) > 0 { + buf.WriteString(" [") + for i, c := range a.Children { + if i > 0 { + buf.WriteString(", ") + } + buf.WriteString(c.String()) + } + buf.WriteString("]") + } + return buf.String() +} + +func Insert(parent *Node, children ...*Node) { + parent.Children = append(parent.Children, children...) + for _, ch := range children { + ch.Parent = parent + } +} + +type List struct { + Not bool + Chars string +} + +type Range struct { + Not bool + Lo, Hi rune +} + +type Text struct { + Text string +} + +type Kind int + +const ( + KindNothing Kind = iota + KindPattern + KindList + KindRange + KindText + KindAny + KindSuper + KindSingle + KindAnyOf +) + +func (k Kind) String() string { + switch k { + case KindNothing: + return "Nothing" + case KindPattern: + return "Pattern" + case KindList: + return "List" + case KindRange: + return "Range" + case KindText: + return "Text" + case KindAny: + return "Any" + case KindSuper: + return "Super" + case KindSingle: + return "Single" + case KindAnyOf: + return "AnyOf" + default: + return "" + } +} diff --git a/vendor/github.com/gobwas/glob/syntax/ast/parser.go b/vendor/github.com/gobwas/glob/syntax/ast/parser.go new file mode 100644 index 000000000..429b40943 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/ast/parser.go @@ -0,0 +1,157 @@ +package ast + +import ( + "errors" + "fmt" + "github.com/gobwas/glob/syntax/lexer" + "unicode/utf8" +) + +type Lexer interface { + Next() lexer.Token +} + +type parseFn func(*Node, Lexer) (parseFn, *Node, error) + +func Parse(lexer Lexer) (*Node, error) { + var parser parseFn + + root := NewNode(KindPattern, nil) + + var ( + tree *Node + err error + ) + for parser, tree = parserMain, root; parser != nil; { + parser, tree, err = parser(tree, lexer) + if err != nil { + return nil, err + } + } + + return root, nil +} + +func parserMain(tree *Node, lex Lexer) (parseFn, *Node, error) { + for { + token := lex.Next() + switch token.Type { + case lexer.EOF: + return nil, tree, nil + + case lexer.Error: + return nil, tree, errors.New(token.Raw) + + case lexer.Text: + Insert(tree, NewNode(KindText, Text{token.Raw})) + return parserMain, tree, nil + + case lexer.Any: + Insert(tree, NewNode(KindAny, nil)) + return parserMain, tree, nil + + case lexer.Super: + Insert(tree, NewNode(KindSuper, nil)) + return parserMain, tree, nil + + case lexer.Single: + Insert(tree, NewNode(KindSingle, nil)) + return parserMain, tree, nil + + case lexer.RangeOpen: + return parserRange, tree, nil + + case lexer.TermsOpen: + a := NewNode(KindAnyOf, nil) + Insert(tree, a) + + p := NewNode(KindPattern, nil) + Insert(a, p) + + return parserMain, p, nil + + case lexer.Separator: + p := NewNode(KindPattern, nil) + Insert(tree.Parent, p) + + return parserMain, p, nil + + case lexer.TermsClose: + return parserMain, tree.Parent.Parent, nil + + default: + return nil, tree, fmt.Errorf("unexpected token: %s", token) + } + } + return nil, tree, fmt.Errorf("unknown error") +} + +func parserRange(tree *Node, lex Lexer) (parseFn, *Node, error) { + var ( + not bool + lo rune + hi rune + chars string + ) + for { + token := lex.Next() + switch token.Type { + case lexer.EOF: + return nil, tree, errors.New("unexpected end") + + case lexer.Error: + return nil, tree, errors.New(token.Raw) + + case lexer.Not: + not = true + + case lexer.RangeLo: + r, w := utf8.DecodeRuneInString(token.Raw) + if len(token.Raw) > w { + return nil, tree, fmt.Errorf("unexpected length of lo character") + } + lo = r + + case lexer.RangeBetween: + // + + case lexer.RangeHi: + r, w := utf8.DecodeRuneInString(token.Raw) + if len(token.Raw) > w { + return nil, tree, fmt.Errorf("unexpected length of lo character") + } + + hi = r + + if hi < lo { + return nil, tree, fmt.Errorf("hi character '%s' should be greater than lo '%s'", string(hi), string(lo)) + } + + case lexer.Text: + chars = token.Raw + + case lexer.RangeClose: + isRange := lo != 0 && hi != 0 + isChars := chars != "" + + if isChars == isRange { + return nil, tree, fmt.Errorf("could not parse range") + } + + if isRange { + Insert(tree, NewNode(KindRange, Range{ + Lo: lo, + Hi: hi, + Not: not, + })) + } else { + Insert(tree, NewNode(KindList, List{ + Chars: chars, + Not: not, + })) + } + + return parserMain, tree, nil + } + } +} diff --git a/vendor/github.com/gobwas/glob/syntax/lexer/lexer.go b/vendor/github.com/gobwas/glob/syntax/lexer/lexer.go new file mode 100644 index 000000000..a1c8d1962 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/lexer/lexer.go @@ -0,0 +1,273 @@ +package lexer + +import ( + "bytes" + "fmt" + "github.com/gobwas/glob/util/runes" + "unicode/utf8" +) + +const ( + char_any = '*' + char_comma = ',' + char_single = '?' + char_escape = '\\' + char_range_open = '[' + char_range_close = ']' + char_terms_open = '{' + char_terms_close = '}' + char_range_not = '!' + char_range_between = '-' +) + +var specials = []byte{ + char_any, + char_single, + char_escape, + char_range_open, + char_range_close, + char_terms_open, + char_terms_close, +} + +func Special(c byte) bool { + return bytes.IndexByte(specials, c) != -1 +} + +type tokens []Token + +func (i *tokens) shift() (ret Token) { + ret = (*i)[0] + copy(*i, (*i)[1:]) + *i = (*i)[:len(*i)-1] + return +} + +func (i *tokens) push(v Token) { + *i = append(*i, v) +} + +func (i *tokens) empty() bool { + return len(*i) == 0 +} + +var eof rune = 0 + +type lexer struct { + data string + pos int + err error + + tokens tokens + termsLevel int + + lastRune rune + lastRuneSize int + hasRune bool +} + +func NewLexer(source string) *lexer { + l := &lexer{ + data: source, + tokens: tokens(make([]Token, 0, 4)), + } + return l +} + +func (l *lexer) Next() Token { + if l.err != nil { + return Token{Error, l.err.Error()} + } + if !l.tokens.empty() { + return l.tokens.shift() + } + + l.fetchItem() + return l.Next() +} + +func (l *lexer) peek() (r rune, w int) { + if l.pos == len(l.data) { + return eof, 0 + } + + r, w = utf8.DecodeRuneInString(l.data[l.pos:]) + if r == utf8.RuneError { + l.errorf("could not read rune") + r = eof + w = 0 + } + + return +} + +func (l *lexer) read() rune { + if l.hasRune { + l.hasRune = false + l.seek(l.lastRuneSize) + return l.lastRune + } + + r, s := l.peek() + l.seek(s) + + l.lastRune = r + l.lastRuneSize = s + + return r +} + +func (l *lexer) seek(w int) { + l.pos += w +} + +func (l *lexer) unread() { + if l.hasRune { + l.errorf("could not unread rune") + return + } + l.seek(-l.lastRuneSize) + l.hasRune = true +} + +func (l *lexer) errorf(f string, v ...interface{}) { + l.err = fmt.Errorf(f, v...) +} + +func (l *lexer) inTerms() bool { + return l.termsLevel > 0 +} + +func (l *lexer) termsEnter() { + l.termsLevel++ +} + +func (l *lexer) termsLeave() { + l.termsLevel-- +} + +var inTextBreakers = []rune{char_single, char_any, char_range_open, char_terms_open} +var inTermsBreakers = append(inTextBreakers, char_terms_close, char_comma) + +func (l *lexer) fetchItem() { + r := l.read() + switch { + case r == eof: + l.tokens.push(Token{EOF, ""}) + + case r == char_terms_open: + l.termsEnter() + l.tokens.push(Token{TermsOpen, string(r)}) + + case r == char_comma && l.inTerms(): + l.tokens.push(Token{Separator, string(r)}) + + case r == char_terms_close && l.inTerms(): + l.tokens.push(Token{TermsClose, string(r)}) + l.termsLeave() + + case r == char_range_open: + l.tokens.push(Token{RangeOpen, string(r)}) + l.fetchRange() + + case r == char_single: + l.tokens.push(Token{Single, string(r)}) + + case r == char_any: + if l.read() == char_any { + l.tokens.push(Token{Super, string(r) + string(r)}) + } else { + l.unread() + l.tokens.push(Token{Any, string(r)}) + } + + default: + l.unread() + + var breakers []rune + if l.inTerms() { + breakers = inTermsBreakers + } else { + breakers = inTextBreakers + } + l.fetchText(breakers) + } +} + +func (l *lexer) fetchRange() { + var wantHi bool + var wantClose bool + var seenNot bool + for { + r := l.read() + if r == eof { + l.errorf("unexpected end of input") + return + } + + if wantClose { + if r != char_range_close { + l.errorf("expected close range character") + } else { + l.tokens.push(Token{RangeClose, string(r)}) + } + return + } + + if wantHi { + l.tokens.push(Token{RangeHi, string(r)}) + wantClose = true + continue + } + + if !seenNot && r == char_range_not { + l.tokens.push(Token{Not, string(r)}) + seenNot = true + continue + } + + if n, w := l.peek(); n == char_range_between { + l.seek(w) + l.tokens.push(Token{RangeLo, string(r)}) + l.tokens.push(Token{RangeBetween, string(n)}) + wantHi = true + continue + } + + l.unread() // unread first peek and fetch as text + l.fetchText([]rune{char_range_close}) + wantClose = true + } +} + +func (l *lexer) fetchText(breakers []rune) { + var data []rune + var escaped bool + +reading: + for { + r := l.read() + if r == eof { + break + } + + if !escaped { + if r == char_escape { + escaped = true + continue + } + + if runes.IndexRune(breakers, r) != -1 { + l.unread() + break reading + } + } + + escaped = false + data = append(data, r) + } + + if len(data) > 0 { + l.tokens.push(Token{Text, string(data)}) + } +} diff --git a/vendor/github.com/gobwas/glob/syntax/lexer/token.go b/vendor/github.com/gobwas/glob/syntax/lexer/token.go new file mode 100644 index 000000000..2797c4e83 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/lexer/token.go @@ -0,0 +1,88 @@ +package lexer + +import "fmt" + +type TokenType int + +const ( + EOF TokenType = iota + Error + Text + Char + Any + Super + Single + Not + Separator + RangeOpen + RangeClose + RangeLo + RangeHi + RangeBetween + TermsOpen + TermsClose +) + +func (tt TokenType) String() string { + switch tt { + case EOF: + return "eof" + + case Error: + return "error" + + case Text: + return "text" + + case Char: + return "char" + + case Any: + return "any" + + case Super: + return "super" + + case Single: + return "single" + + case Not: + return "not" + + case Separator: + return "separator" + + case RangeOpen: + return "range_open" + + case RangeClose: + return "range_close" + + case RangeLo: + return "range_lo" + + case RangeHi: + return "range_hi" + + case RangeBetween: + return "range_between" + + case TermsOpen: + return "terms_open" + + case TermsClose: + return "terms_close" + + default: + return "undef" + } +} + +type Token struct { + Type TokenType + Raw string +} + +func (t Token) String() string { + return fmt.Sprintf("%v<%q>", t.Type, t.Raw) +} diff --git a/vendor/github.com/gobwas/glob/syntax/syntax.go b/vendor/github.com/gobwas/glob/syntax/syntax.go new file mode 100644 index 000000000..1d168b148 --- /dev/null +++ b/vendor/github.com/gobwas/glob/syntax/syntax.go @@ -0,0 +1,14 @@ +package syntax + +import ( + "github.com/gobwas/glob/syntax/ast" + "github.com/gobwas/glob/syntax/lexer" +) + +func Parse(s string) (*ast.Node, error) { + return ast.Parse(lexer.NewLexer(s)) +} + +func Special(b byte) bool { + return lexer.Special(b) +} diff --git a/vendor/github.com/gobwas/glob/util/runes/runes.go b/vendor/github.com/gobwas/glob/util/runes/runes.go new file mode 100644 index 000000000..a72355641 --- /dev/null +++ b/vendor/github.com/gobwas/glob/util/runes/runes.go @@ -0,0 +1,154 @@ +package runes + +func Index(s, needle []rune) int { + ls, ln := len(s), len(needle) + + switch { + case ln == 0: + return 0 + case ln == 1: + return IndexRune(s, needle[0]) + case ln == ls: + if Equal(s, needle) { + return 0 + } + return -1 + case ln > ls: + return -1 + } + +head: + for i := 0; i < ls && ls-i >= ln; i++ { + for y := 0; y < ln; y++ { + if s[i+y] != needle[y] { + continue head + } + } + + return i + } + + return -1 +} + +func LastIndex(s, needle []rune) int { + ls, ln := len(s), len(needle) + + switch { + case ln == 0: + if ls == 0 { + return 0 + } + return ls + case ln == 1: + return IndexLastRune(s, needle[0]) + case ln == ls: + if Equal(s, needle) { + return 0 + } + return -1 + case ln > ls: + return -1 + } + +head: + for i := ls - 1; i >= 0 && i >= ln; i-- { + for y := ln - 1; y >= 0; y-- { + if s[i-(ln-y-1)] != needle[y] { + continue head + } + } + + return i - ln + 1 + } + + return -1 +} + +// IndexAny returns the index of the first instance of any Unicode code point +// from chars in s, or -1 if no Unicode code point from chars is present in s. +func IndexAny(s, chars []rune) int { + if len(chars) > 0 { + for i, c := range s { + for _, m := range chars { + if c == m { + return i + } + } + } + } + return -1 +} + +func Contains(s, needle []rune) bool { + return Index(s, needle) >= 0 +} + +func Max(s []rune) (max rune) { + for _, r := range s { + if r > max { + max = r + } + } + + return +} + +func Min(s []rune) rune { + min := rune(-1) + for _, r := range s { + if min == -1 { + min = r + continue + } + + if r < min { + min = r + } + } + + return min +} + +func IndexRune(s []rune, r rune) int { + for i, c := range s { + if c == r { + return i + } + } + return -1 +} + +func IndexLastRune(s []rune, r rune) int { + for i := len(s) - 1; i >= 0; i-- { + if s[i] == r { + return i + } + } + + return -1 +} + +func Equal(a, b []rune) bool { + if len(a) == len(b) { + for i := 0; i < len(a); i++ { + if a[i] != b[i] { + return false + } + } + + return true + } + + return false +} + +// HasPrefix tests whether the string s begins with prefix. +func HasPrefix(s, prefix []rune) bool { + return len(s) >= len(prefix) && Equal(s[0:len(prefix)], prefix) +} + +// HasSuffix tests whether the string s ends with suffix. +func HasSuffix(s, suffix []rune) bool { + return len(s) >= len(suffix) && Equal(s[len(s)-len(suffix):], suffix) +} diff --git a/vendor/github.com/gobwas/glob/util/strings/strings.go b/vendor/github.com/gobwas/glob/util/strings/strings.go new file mode 100644 index 000000000..e8ee1920b --- /dev/null +++ b/vendor/github.com/gobwas/glob/util/strings/strings.go @@ -0,0 +1,39 @@ +package strings + +import ( + "strings" + "unicode/utf8" +) + +func IndexAnyRunes(s string, rs []rune) int { + for _, r := range rs { + if i := strings.IndexRune(s, r); i != -1 { + return i + } + } + + return -1 +} + +func LastIndexAnyRunes(s string, rs []rune) int { + for _, r := range rs { + i := -1 + if 0 <= r && r < utf8.RuneSelf { + i = strings.LastIndexByte(s, byte(r)) + } else { + sub := s + for len(sub) > 0 { + j := strings.IndexRune(s, r) + if j == -1 { + break + } + i = j + sub = sub[i+1:] + } + } + if i != -1 { + return i + } + } + return -1 +} diff --git a/vendor/github.com/gofrs/flock/.gitignore b/vendor/github.com/gofrs/flock/.gitignore new file mode 100644 index 000000000..daf913b1b --- /dev/null +++ b/vendor/github.com/gofrs/flock/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/gofrs/flock/.travis.yml b/vendor/github.com/gofrs/flock/.travis.yml new file mode 100644 index 000000000..b16d040fa --- /dev/null +++ b/vendor/github.com/gofrs/flock/.travis.yml @@ -0,0 +1,10 @@ +language: go +go: + - 1.14.x + - 1.15.x +script: go test -v -check.vv -race ./... +sudo: false +notifications: + email: + on_success: never + on_failure: always diff --git a/vendor/github.com/gofrs/flock/LICENSE b/vendor/github.com/gofrs/flock/LICENSE new file mode 100644 index 000000000..8b8ff36fe --- /dev/null +++ b/vendor/github.com/gofrs/flock/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2015-2020, Tim Heckman +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of gofrs nor the names of its contributors may be used + to endorse or promote products derived from this software without + specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/gofrs/flock/README.md b/vendor/github.com/gofrs/flock/README.md new file mode 100644 index 000000000..71ce63692 --- /dev/null +++ b/vendor/github.com/gofrs/flock/README.md @@ -0,0 +1,41 @@ +# flock +[![TravisCI Build Status](https://img.shields.io/travis/gofrs/flock/master.svg?style=flat)](https://travis-ci.org/gofrs/flock) +[![GoDoc](https://img.shields.io/badge/godoc-flock-blue.svg?style=flat)](https://godoc.org/github.com/gofrs/flock) +[![License](https://img.shields.io/badge/license-BSD_3--Clause-brightgreen.svg?style=flat)](https://github.com/gofrs/flock/blob/master/LICENSE) +[![Go Report Card](https://goreportcard.com/badge/github.com/gofrs/flock)](https://goreportcard.com/report/github.com/gofrs/flock) + +`flock` implements a thread-safe sync.Locker interface for file locking. It also +includes a non-blocking TryLock() function to allow locking without blocking execution. + +## License +`flock` is released under the BSD 3-Clause License. See the `LICENSE` file for more details. + +## Go Compatibility +This package makes use of the `context` package that was introduced in Go 1.7. As such, this +package has an implicit dependency on Go 1.7+. + +## Installation +``` +go get -u github.com/gofrs/flock +``` + +## Usage +```Go +import "github.com/gofrs/flock" + +fileLock := flock.New("/var/lock/go-lock.lock") + +locked, err := fileLock.TryLock() + +if err != nil { + // handle locking error +} + +if locked { + // do work + fileLock.Unlock() +} +``` + +For more detailed usage information take a look at the package API docs on +[GoDoc](https://godoc.org/github.com/gofrs/flock). diff --git a/vendor/github.com/gofrs/flock/appveyor.yml b/vendor/github.com/gofrs/flock/appveyor.yml new file mode 100644 index 000000000..909b4bf7c --- /dev/null +++ b/vendor/github.com/gofrs/flock/appveyor.yml @@ -0,0 +1,25 @@ +version: '{build}' + +build: false +deploy: false + +clone_folder: 'c:\gopath\src\github.com\gofrs\flock' + +environment: + GOPATH: 'c:\gopath' + GOVERSION: '1.15' + +init: + - git config --global core.autocrlf input + +install: + - rmdir c:\go /s /q + - appveyor DownloadFile https://storage.googleapis.com/golang/go%GOVERSION%.windows-amd64.msi + - msiexec /i go%GOVERSION%.windows-amd64.msi /q + - set Path=c:\go\bin;c:\gopath\bin;%Path% + - go version + - go env + +test_script: + - go get -t ./... + - go test -race -v ./... diff --git a/vendor/github.com/gofrs/flock/flock.go b/vendor/github.com/gofrs/flock/flock.go new file mode 100644 index 000000000..95c784ca5 --- /dev/null +++ b/vendor/github.com/gofrs/flock/flock.go @@ -0,0 +1,144 @@ +// Copyright 2015 Tim Heckman. All rights reserved. +// Use of this source code is governed by the BSD 3-Clause +// license that can be found in the LICENSE file. + +// Package flock implements a thread-safe interface for file locking. +// It also includes a non-blocking TryLock() function to allow locking +// without blocking execution. +// +// Package flock is released under the BSD 3-Clause License. See the LICENSE file +// for more details. +// +// While using this library, remember that the locking behaviors are not +// guaranteed to be the same on each platform. For example, some UNIX-like +// operating systems will transparently convert a shared lock to an exclusive +// lock. If you Unlock() the flock from a location where you believe that you +// have the shared lock, you may accidentally drop the exclusive lock. +package flock + +import ( + "context" + "os" + "runtime" + "sync" + "time" +) + +// Flock is the struct type to handle file locking. All fields are unexported, +// with access to some of the fields provided by getter methods (Path() and Locked()). +type Flock struct { + path string + m sync.RWMutex + fh *os.File + l bool + r bool +} + +// New returns a new instance of *Flock. The only parameter +// it takes is the path to the desired lockfile. +func New(path string) *Flock { + return &Flock{path: path} +} + +// NewFlock returns a new instance of *Flock. The only parameter +// it takes is the path to the desired lockfile. +// +// Deprecated: Use New instead. +func NewFlock(path string) *Flock { + return New(path) +} + +// Close is equivalent to calling Unlock. +// +// This will release the lock and close the underlying file descriptor. +// It will not remove the file from disk, that's up to your application. +func (f *Flock) Close() error { + return f.Unlock() +} + +// Path returns the path as provided in NewFlock(). +func (f *Flock) Path() string { + return f.path +} + +// Locked returns the lock state (locked: true, unlocked: false). +// +// Warning: by the time you use the returned value, the state may have changed. +func (f *Flock) Locked() bool { + f.m.RLock() + defer f.m.RUnlock() + return f.l +} + +// RLocked returns the read lock state (locked: true, unlocked: false). +// +// Warning: by the time you use the returned value, the state may have changed. +func (f *Flock) RLocked() bool { + f.m.RLock() + defer f.m.RUnlock() + return f.r +} + +func (f *Flock) String() string { + return f.path +} + +// TryLockContext repeatedly tries to take an exclusive lock until one of the +// conditions is met: TryLock succeeds, TryLock fails with error, or Context +// Done channel is closed. +func (f *Flock) TryLockContext(ctx context.Context, retryDelay time.Duration) (bool, error) { + return tryCtx(ctx, f.TryLock, retryDelay) +} + +// TryRLockContext repeatedly tries to take a shared lock until one of the +// conditions is met: TryRLock succeeds, TryRLock fails with error, or Context +// Done channel is closed. +func (f *Flock) TryRLockContext(ctx context.Context, retryDelay time.Duration) (bool, error) { + return tryCtx(ctx, f.TryRLock, retryDelay) +} + +func tryCtx(ctx context.Context, fn func() (bool, error), retryDelay time.Duration) (bool, error) { + if ctx.Err() != nil { + return false, ctx.Err() + } + for { + if ok, err := fn(); ok || err != nil { + return ok, err + } + select { + case <-ctx.Done(): + return false, ctx.Err() + case <-time.After(retryDelay): + // try again + } + } +} + +func (f *Flock) setFh() error { + // open a new os.File instance + // create it if it doesn't exist, and open the file read-only. + flags := os.O_CREATE + if runtime.GOOS == "aix" { + // AIX cannot preform write-lock (ie exclusive) on a + // read-only file. + flags |= os.O_RDWR + } else { + flags |= os.O_RDONLY + } + fh, err := os.OpenFile(f.path, flags, os.FileMode(0600)) + if err != nil { + return err + } + + // set the filehandle on the struct + f.fh = fh + return nil +} + +// ensure the file handle is closed if no lock is held +func (f *Flock) ensureFhState() { + if !f.l && !f.r && f.fh != nil { + f.fh.Close() + f.fh = nil + } +} diff --git a/vendor/github.com/gofrs/flock/flock_aix.go b/vendor/github.com/gofrs/flock/flock_aix.go new file mode 100644 index 000000000..7277c1b6b --- /dev/null +++ b/vendor/github.com/gofrs/flock/flock_aix.go @@ -0,0 +1,281 @@ +// Copyright 2019 Tim Heckman. All rights reserved. Use of this source code is +// governed by the BSD 3-Clause license that can be found in the LICENSE file. + +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This code implements the filelock API using POSIX 'fcntl' locks, which attach +// to an (inode, process) pair rather than a file descriptor. To avoid unlocking +// files prematurely when the same file is opened through different descriptors, +// we allow only one read-lock at a time. +// +// This code is adapted from the Go package: +// cmd/go/internal/lockedfile/internal/filelock + +//+build aix + +package flock + +import ( + "errors" + "io" + "os" + "sync" + "syscall" + + "golang.org/x/sys/unix" +) + +type lockType int16 + +const ( + readLock lockType = unix.F_RDLCK + writeLock lockType = unix.F_WRLCK +) + +type cmdType int + +const ( + tryLock cmdType = unix.F_SETLK + waitLock cmdType = unix.F_SETLKW +) + +type inode = uint64 + +type inodeLock struct { + owner *Flock + queue []<-chan *Flock +} + +var ( + mu sync.Mutex + inodes = map[*Flock]inode{} + locks = map[inode]inodeLock{} +) + +// Lock is a blocking call to try and take an exclusive file lock. It will wait +// until it is able to obtain the exclusive file lock. It's recommended that +// TryLock() be used over this function. This function may block the ability to +// query the current Locked() or RLocked() status due to a RW-mutex lock. +// +// If we are already exclusive-locked, this function short-circuits and returns +// immediately assuming it can take the mutex lock. +// +// If the *Flock has a shared lock (RLock), this may transparently replace the +// shared lock with an exclusive lock on some UNIX-like operating systems. Be +// careful when using exclusive locks in conjunction with shared locks +// (RLock()), because calling Unlock() may accidentally release the exclusive +// lock that was once a shared lock. +func (f *Flock) Lock() error { + return f.lock(&f.l, writeLock) +} + +// RLock is a blocking call to try and take a shared file lock. It will wait +// until it is able to obtain the shared file lock. It's recommended that +// TryRLock() be used over this function. This function may block the ability to +// query the current Locked() or RLocked() status due to a RW-mutex lock. +// +// If we are already shared-locked, this function short-circuits and returns +// immediately assuming it can take the mutex lock. +func (f *Flock) RLock() error { + return f.lock(&f.r, readLock) +} + +func (f *Flock) lock(locked *bool, flag lockType) error { + f.m.Lock() + defer f.m.Unlock() + + if *locked { + return nil + } + + if f.fh == nil { + if err := f.setFh(); err != nil { + return err + } + defer f.ensureFhState() + } + + if _, err := f.doLock(waitLock, flag, true); err != nil { + return err + } + + *locked = true + return nil +} + +func (f *Flock) doLock(cmd cmdType, lt lockType, blocking bool) (bool, error) { + // POSIX locks apply per inode and process, and the lock for an inode is + // released when *any* descriptor for that inode is closed. So we need to + // synchronize access to each inode internally, and must serialize lock and + // unlock calls that refer to the same inode through different descriptors. + fi, err := f.fh.Stat() + if err != nil { + return false, err + } + ino := inode(fi.Sys().(*syscall.Stat_t).Ino) + + mu.Lock() + if i, dup := inodes[f]; dup && i != ino { + mu.Unlock() + return false, &os.PathError{ + Path: f.Path(), + Err: errors.New("inode for file changed since last Lock or RLock"), + } + } + + inodes[f] = ino + + var wait chan *Flock + l := locks[ino] + if l.owner == f { + // This file already owns the lock, but the call may change its lock type. + } else if l.owner == nil { + // No owner: it's ours now. + l.owner = f + } else if !blocking { + // Already owned: cannot take the lock. + mu.Unlock() + return false, nil + } else { + // Already owned: add a channel to wait on. + wait = make(chan *Flock) + l.queue = append(l.queue, wait) + } + locks[ino] = l + mu.Unlock() + + if wait != nil { + wait <- f + } + + err = setlkw(f.fh.Fd(), cmd, lt) + + if err != nil { + f.doUnlock() + if cmd == tryLock && err == unix.EACCES { + return false, nil + } + return false, err + } + + return true, nil +} + +func (f *Flock) Unlock() error { + f.m.Lock() + defer f.m.Unlock() + + // if we aren't locked or if the lockfile instance is nil + // just return a nil error because we are unlocked + if (!f.l && !f.r) || f.fh == nil { + return nil + } + + if err := f.doUnlock(); err != nil { + return err + } + + f.fh.Close() + + f.l = false + f.r = false + f.fh = nil + + return nil +} + +func (f *Flock) doUnlock() (err error) { + var owner *Flock + mu.Lock() + ino, ok := inodes[f] + if ok { + owner = locks[ino].owner + } + mu.Unlock() + + if owner == f { + err = setlkw(f.fh.Fd(), waitLock, unix.F_UNLCK) + } + + mu.Lock() + l := locks[ino] + if len(l.queue) == 0 { + // No waiters: remove the map entry. + delete(locks, ino) + } else { + // The first waiter is sending us their file now. + // Receive it and update the queue. + l.owner = <-l.queue[0] + l.queue = l.queue[1:] + locks[ino] = l + } + delete(inodes, f) + mu.Unlock() + + return err +} + +// TryLock is the preferred function for taking an exclusive file lock. This +// function takes an RW-mutex lock before it tries to lock the file, so there is +// the possibility that this function may block for a short time if another +// goroutine is trying to take any action. +// +// The actual file lock is non-blocking. If we are unable to get the exclusive +// file lock, the function will return false instead of waiting for the lock. If +// we get the lock, we also set the *Flock instance as being exclusive-locked. +func (f *Flock) TryLock() (bool, error) { + return f.try(&f.l, writeLock) +} + +// TryRLock is the preferred function for taking a shared file lock. This +// function takes an RW-mutex lock before it tries to lock the file, so there is +// the possibility that this function may block for a short time if another +// goroutine is trying to take any action. +// +// The actual file lock is non-blocking. If we are unable to get the shared file +// lock, the function will return false instead of waiting for the lock. If we +// get the lock, we also set the *Flock instance as being share-locked. +func (f *Flock) TryRLock() (bool, error) { + return f.try(&f.r, readLock) +} + +func (f *Flock) try(locked *bool, flag lockType) (bool, error) { + f.m.Lock() + defer f.m.Unlock() + + if *locked { + return true, nil + } + + if f.fh == nil { + if err := f.setFh(); err != nil { + return false, err + } + defer f.ensureFhState() + } + + haslock, err := f.doLock(tryLock, flag, false) + if err != nil { + return false, err + } + + *locked = haslock + return haslock, nil +} + +// setlkw calls FcntlFlock with cmd for the entire file indicated by fd. +func setlkw(fd uintptr, cmd cmdType, lt lockType) error { + for { + err := unix.FcntlFlock(fd, int(cmd), &unix.Flock_t{ + Type: int16(lt), + Whence: io.SeekStart, + Start: 0, + Len: 0, // All bytes. + }) + if err != unix.EINTR { + return err + } + } +} diff --git a/vendor/github.com/gofrs/flock/flock_unix.go b/vendor/github.com/gofrs/flock/flock_unix.go new file mode 100644 index 000000000..c315a3e29 --- /dev/null +++ b/vendor/github.com/gofrs/flock/flock_unix.go @@ -0,0 +1,197 @@ +// Copyright 2015 Tim Heckman. All rights reserved. +// Use of this source code is governed by the BSD 3-Clause +// license that can be found in the LICENSE file. + +// +build !aix,!windows + +package flock + +import ( + "os" + "syscall" +) + +// Lock is a blocking call to try and take an exclusive file lock. It will wait +// until it is able to obtain the exclusive file lock. It's recommended that +// TryLock() be used over this function. This function may block the ability to +// query the current Locked() or RLocked() status due to a RW-mutex lock. +// +// If we are already exclusive-locked, this function short-circuits and returns +// immediately assuming it can take the mutex lock. +// +// If the *Flock has a shared lock (RLock), this may transparently replace the +// shared lock with an exclusive lock on some UNIX-like operating systems. Be +// careful when using exclusive locks in conjunction with shared locks +// (RLock()), because calling Unlock() may accidentally release the exclusive +// lock that was once a shared lock. +func (f *Flock) Lock() error { + return f.lock(&f.l, syscall.LOCK_EX) +} + +// RLock is a blocking call to try and take a shared file lock. It will wait +// until it is able to obtain the shared file lock. It's recommended that +// TryRLock() be used over this function. This function may block the ability to +// query the current Locked() or RLocked() status due to a RW-mutex lock. +// +// If we are already shared-locked, this function short-circuits and returns +// immediately assuming it can take the mutex lock. +func (f *Flock) RLock() error { + return f.lock(&f.r, syscall.LOCK_SH) +} + +func (f *Flock) lock(locked *bool, flag int) error { + f.m.Lock() + defer f.m.Unlock() + + if *locked { + return nil + } + + if f.fh == nil { + if err := f.setFh(); err != nil { + return err + } + defer f.ensureFhState() + } + + if err := syscall.Flock(int(f.fh.Fd()), flag); err != nil { + shouldRetry, reopenErr := f.reopenFDOnError(err) + if reopenErr != nil { + return reopenErr + } + + if !shouldRetry { + return err + } + + if err = syscall.Flock(int(f.fh.Fd()), flag); err != nil { + return err + } + } + + *locked = true + return nil +} + +// Unlock is a function to unlock the file. This file takes a RW-mutex lock, so +// while it is running the Locked() and RLocked() functions will be blocked. +// +// This function short-circuits if we are unlocked already. If not, it calls +// syscall.LOCK_UN on the file and closes the file descriptor. It does not +// remove the file from disk. It's up to your application to do. +// +// Please note, if your shared lock became an exclusive lock this may +// unintentionally drop the exclusive lock if called by the consumer that +// believes they have a shared lock. Please see Lock() for more details. +func (f *Flock) Unlock() error { + f.m.Lock() + defer f.m.Unlock() + + // if we aren't locked or if the lockfile instance is nil + // just return a nil error because we are unlocked + if (!f.l && !f.r) || f.fh == nil { + return nil + } + + // mark the file as unlocked + if err := syscall.Flock(int(f.fh.Fd()), syscall.LOCK_UN); err != nil { + return err + } + + f.fh.Close() + + f.l = false + f.r = false + f.fh = nil + + return nil +} + +// TryLock is the preferred function for taking an exclusive file lock. This +// function takes an RW-mutex lock before it tries to lock the file, so there is +// the possibility that this function may block for a short time if another +// goroutine is trying to take any action. +// +// The actual file lock is non-blocking. If we are unable to get the exclusive +// file lock, the function will return false instead of waiting for the lock. If +// we get the lock, we also set the *Flock instance as being exclusive-locked. +func (f *Flock) TryLock() (bool, error) { + return f.try(&f.l, syscall.LOCK_EX) +} + +// TryRLock is the preferred function for taking a shared file lock. This +// function takes an RW-mutex lock before it tries to lock the file, so there is +// the possibility that this function may block for a short time if another +// goroutine is trying to take any action. +// +// The actual file lock is non-blocking. If we are unable to get the shared file +// lock, the function will return false instead of waiting for the lock. If we +// get the lock, we also set the *Flock instance as being share-locked. +func (f *Flock) TryRLock() (bool, error) { + return f.try(&f.r, syscall.LOCK_SH) +} + +func (f *Flock) try(locked *bool, flag int) (bool, error) { + f.m.Lock() + defer f.m.Unlock() + + if *locked { + return true, nil + } + + if f.fh == nil { + if err := f.setFh(); err != nil { + return false, err + } + defer f.ensureFhState() + } + + var retried bool +retry: + err := syscall.Flock(int(f.fh.Fd()), flag|syscall.LOCK_NB) + + switch err { + case syscall.EWOULDBLOCK: + return false, nil + case nil: + *locked = true + return true, nil + } + if !retried { + if shouldRetry, reopenErr := f.reopenFDOnError(err); reopenErr != nil { + return false, reopenErr + } else if shouldRetry { + retried = true + goto retry + } + } + + return false, err +} + +// reopenFDOnError determines whether we should reopen the file handle +// in readwrite mode and try again. This comes from util-linux/sys-utils/flock.c: +// Since Linux 3.4 (commit 55725513) +// Probably NFSv4 where flock() is emulated by fcntl(). +func (f *Flock) reopenFDOnError(err error) (bool, error) { + if err != syscall.EIO && err != syscall.EBADF { + return false, nil + } + if st, err := f.fh.Stat(); err == nil { + // if the file is able to be read and written + if st.Mode()&0600 == 0600 { + f.fh.Close() + f.fh = nil + + // reopen in read-write mode and set the filehandle + fh, err := os.OpenFile(f.path, os.O_CREATE|os.O_RDWR, os.FileMode(0600)) + if err != nil { + return false, err + } + f.fh = fh + return true, nil + } + } + + return false, nil +} diff --git a/vendor/github.com/gofrs/flock/flock_winapi.go b/vendor/github.com/gofrs/flock/flock_winapi.go new file mode 100644 index 000000000..fe405a255 --- /dev/null +++ b/vendor/github.com/gofrs/flock/flock_winapi.go @@ -0,0 +1,76 @@ +// Copyright 2015 Tim Heckman. All rights reserved. +// Use of this source code is governed by the BSD 3-Clause +// license that can be found in the LICENSE file. + +// +build windows + +package flock + +import ( + "syscall" + "unsafe" +) + +var ( + kernel32, _ = syscall.LoadLibrary("kernel32.dll") + procLockFileEx, _ = syscall.GetProcAddress(kernel32, "LockFileEx") + procUnlockFileEx, _ = syscall.GetProcAddress(kernel32, "UnlockFileEx") +) + +const ( + winLockfileFailImmediately = 0x00000001 + winLockfileExclusiveLock = 0x00000002 + winLockfileSharedLock = 0x00000000 +) + +// Use of 0x00000000 for the shared lock is a guess based on some the MS Windows +// `LockFileEX` docs, which document the `LOCKFILE_EXCLUSIVE_LOCK` flag as: +// +// > The function requests an exclusive lock. Otherwise, it requests a shared +// > lock. +// +// https://msdn.microsoft.com/en-us/library/windows/desktop/aa365203(v=vs.85).aspx + +func lockFileEx(handle syscall.Handle, flags uint32, reserved uint32, numberOfBytesToLockLow uint32, numberOfBytesToLockHigh uint32, offset *syscall.Overlapped) (bool, syscall.Errno) { + r1, _, errNo := syscall.Syscall6( + uintptr(procLockFileEx), + 6, + uintptr(handle), + uintptr(flags), + uintptr(reserved), + uintptr(numberOfBytesToLockLow), + uintptr(numberOfBytesToLockHigh), + uintptr(unsafe.Pointer(offset))) + + if r1 != 1 { + if errNo == 0 { + return false, syscall.EINVAL + } + + return false, errNo + } + + return true, 0 +} + +func unlockFileEx(handle syscall.Handle, reserved uint32, numberOfBytesToLockLow uint32, numberOfBytesToLockHigh uint32, offset *syscall.Overlapped) (bool, syscall.Errno) { + r1, _, errNo := syscall.Syscall6( + uintptr(procUnlockFileEx), + 5, + uintptr(handle), + uintptr(reserved), + uintptr(numberOfBytesToLockLow), + uintptr(numberOfBytesToLockHigh), + uintptr(unsafe.Pointer(offset)), + 0) + + if r1 != 1 { + if errNo == 0 { + return false, syscall.EINVAL + } + + return false, errNo + } + + return true, 0 +} diff --git a/vendor/github.com/gofrs/flock/flock_windows.go b/vendor/github.com/gofrs/flock/flock_windows.go new file mode 100644 index 000000000..ddb534cce --- /dev/null +++ b/vendor/github.com/gofrs/flock/flock_windows.go @@ -0,0 +1,142 @@ +// Copyright 2015 Tim Heckman. All rights reserved. +// Use of this source code is governed by the BSD 3-Clause +// license that can be found in the LICENSE file. + +package flock + +import ( + "syscall" +) + +// ErrorLockViolation is the error code returned from the Windows syscall when a +// lock would block and you ask to fail immediately. +const ErrorLockViolation syscall.Errno = 0x21 // 33 + +// Lock is a blocking call to try and take an exclusive file lock. It will wait +// until it is able to obtain the exclusive file lock. It's recommended that +// TryLock() be used over this function. This function may block the ability to +// query the current Locked() or RLocked() status due to a RW-mutex lock. +// +// If we are already locked, this function short-circuits and returns +// immediately assuming it can take the mutex lock. +func (f *Flock) Lock() error { + return f.lock(&f.l, winLockfileExclusiveLock) +} + +// RLock is a blocking call to try and take a shared file lock. It will wait +// until it is able to obtain the shared file lock. It's recommended that +// TryRLock() be used over this function. This function may block the ability to +// query the current Locked() or RLocked() status due to a RW-mutex lock. +// +// If we are already locked, this function short-circuits and returns +// immediately assuming it can take the mutex lock. +func (f *Flock) RLock() error { + return f.lock(&f.r, winLockfileSharedLock) +} + +func (f *Flock) lock(locked *bool, flag uint32) error { + f.m.Lock() + defer f.m.Unlock() + + if *locked { + return nil + } + + if f.fh == nil { + if err := f.setFh(); err != nil { + return err + } + defer f.ensureFhState() + } + + if _, errNo := lockFileEx(syscall.Handle(f.fh.Fd()), flag, 0, 1, 0, &syscall.Overlapped{}); errNo > 0 { + return errNo + } + + *locked = true + return nil +} + +// Unlock is a function to unlock the file. This file takes a RW-mutex lock, so +// while it is running the Locked() and RLocked() functions will be blocked. +// +// This function short-circuits if we are unlocked already. If not, it calls +// UnlockFileEx() on the file and closes the file descriptor. It does not remove +// the file from disk. It's up to your application to do. +func (f *Flock) Unlock() error { + f.m.Lock() + defer f.m.Unlock() + + // if we aren't locked or if the lockfile instance is nil + // just return a nil error because we are unlocked + if (!f.l && !f.r) || f.fh == nil { + return nil + } + + // mark the file as unlocked + if _, errNo := unlockFileEx(syscall.Handle(f.fh.Fd()), 0, 1, 0, &syscall.Overlapped{}); errNo > 0 { + return errNo + } + + f.fh.Close() + + f.l = false + f.r = false + f.fh = nil + + return nil +} + +// TryLock is the preferred function for taking an exclusive file lock. This +// function does take a RW-mutex lock before it tries to lock the file, so there +// is the possibility that this function may block for a short time if another +// goroutine is trying to take any action. +// +// The actual file lock is non-blocking. If we are unable to get the exclusive +// file lock, the function will return false instead of waiting for the lock. If +// we get the lock, we also set the *Flock instance as being exclusive-locked. +func (f *Flock) TryLock() (bool, error) { + return f.try(&f.l, winLockfileExclusiveLock) +} + +// TryRLock is the preferred function for taking a shared file lock. This +// function does take a RW-mutex lock before it tries to lock the file, so there +// is the possibility that this function may block for a short time if another +// goroutine is trying to take any action. +// +// The actual file lock is non-blocking. If we are unable to get the shared file +// lock, the function will return false instead of waiting for the lock. If we +// get the lock, we also set the *Flock instance as being shared-locked. +func (f *Flock) TryRLock() (bool, error) { + return f.try(&f.r, winLockfileSharedLock) +} + +func (f *Flock) try(locked *bool, flag uint32) (bool, error) { + f.m.Lock() + defer f.m.Unlock() + + if *locked { + return true, nil + } + + if f.fh == nil { + if err := f.setFh(); err != nil { + return false, err + } + defer f.ensureFhState() + } + + _, errNo := lockFileEx(syscall.Handle(f.fh.Fd()), flag|winLockfileFailImmediately, 0, 1, 0, &syscall.Overlapped{}) + + if errNo > 0 { + if errNo == ErrorLockViolation || errNo == syscall.ERROR_IO_PENDING { + return false, nil + } + + return false, errNo + } + + *locked = true + + return true, nil +} diff --git a/vendor/github.com/golangci/check/LICENSE b/vendor/github.com/golangci/check/LICENSE new file mode 100644 index 000000000..5a1774b8e --- /dev/null +++ b/vendor/github.com/golangci/check/LICENSE @@ -0,0 +1,674 @@ +GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. {http://fsf.org/} + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + {one line to give the program's name and a brief idea of what it does.} + Copyright (C) {year} {name of author} + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see {http://www.gnu.org/licenses/}. + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + opennota Copyright (C) 2013 opennota + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +{http://www.gnu.org/licenses/}. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +{http://www.gnu.org/philosophy/why-not-lgpl.html}. diff --git a/vendor/github.com/golangci/check/cmd/structcheck/structcheck.go b/vendor/github.com/golangci/check/cmd/structcheck/structcheck.go new file mode 100644 index 000000000..5dc5f8380 --- /dev/null +++ b/vendor/github.com/golangci/check/cmd/structcheck/structcheck.go @@ -0,0 +1,193 @@ +// structcheck +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +package structcheck + +import ( + "flag" + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/loader" +) + +var ( + assignmentsOnly = flag.Bool("structcheck.a", false, "Count assignments only") + loadTestFiles = flag.Bool("structcheck.t", false, "Load test files too") + buildTags = flag.String("structcheck.tags", "", "Build tags") +) + +type visitor struct { + prog *loader.Program + pkg *loader.PackageInfo + m map[types.Type]map[string]int + skip map[types.Type]struct{} +} + +func (v *visitor) decl(t types.Type, fieldName string) { + if _, ok := v.m[t]; !ok { + v.m[t] = make(map[string]int) + } + if _, ok := v.m[t][fieldName]; !ok { + v.m[t][fieldName] = 0 + } +} + +func (v *visitor) assignment(t types.Type, fieldName string) { + if _, ok := v.m[t]; !ok { + v.m[t] = make(map[string]int) + } + if _, ok := v.m[t][fieldName]; ok { + v.m[t][fieldName]++ + } else { + v.m[t][fieldName] = 1 + } +} + +func (v *visitor) typeSpec(node *ast.TypeSpec) { + if strukt, ok := node.Type.(*ast.StructType); ok { + t := v.pkg.Info.Defs[node.Name].Type() + for _, f := range strukt.Fields.List { + if len(f.Names) > 0 { + fieldName := f.Names[0].Name + v.decl(t, fieldName) + } + } + } +} + +func (v *visitor) typeAndFieldName(expr *ast.SelectorExpr) (types.Type, string, bool) { + selection := v.pkg.Info.Selections[expr] + if selection == nil { + return nil, "", false + } + recv := selection.Recv() + if ptr, ok := recv.(*types.Pointer); ok { + recv = ptr.Elem() + } + return recv, selection.Obj().Name(), true +} + +func (v *visitor) assignStmt(node *ast.AssignStmt) { + for _, lhs := range node.Lhs { + var selector *ast.SelectorExpr + switch expr := lhs.(type) { + case *ast.SelectorExpr: + selector = expr + case *ast.IndexExpr: + if expr, ok := expr.X.(*ast.SelectorExpr); ok { + selector = expr + } + } + if selector != nil { + if t, fn, ok := v.typeAndFieldName(selector); ok { + v.assignment(t, fn) + } + } + } +} + +func (v *visitor) compositeLiteral(node *ast.CompositeLit) { + t := v.pkg.Info.Types[node.Type].Type + for _, expr := range node.Elts { + if kv, ok := expr.(*ast.KeyValueExpr); ok { + if ident, ok := kv.Key.(*ast.Ident); ok { + v.assignment(t, ident.Name) + } + } else { + // Struct literal with positional values. + // All the fields are assigned. + v.skip[t] = struct{}{} + break + } + } +} + +func (v *visitor) Visit(node ast.Node) ast.Visitor { + switch node := node.(type) { + case *ast.TypeSpec: + v.typeSpec(node) + + case *ast.AssignStmt: + if *assignmentsOnly { + v.assignStmt(node) + } + + case *ast.SelectorExpr: + if !*assignmentsOnly { + if t, fn, ok := v.typeAndFieldName(node); ok { + v.assignment(t, fn) + } + } + + case *ast.CompositeLit: + v.compositeLiteral(node) + } + + return v +} + +type Issue struct { + Pos token.Position + Type string + FieldName string +} + +func Run(program *loader.Program, reportExported bool) []Issue { + var issues []Issue + for _, pkg := range program.InitialPackages() { + visitor := &visitor{ + m: make(map[types.Type]map[string]int), + skip: make(map[types.Type]struct{}), + prog: program, + pkg: pkg, + } + for _, f := range pkg.Files { + ast.Walk(visitor, f) + } + + for t := range visitor.m { + if _, skip := visitor.skip[t]; skip { + continue + } + for fieldName, v := range visitor.m[t] { + if !reportExported && ast.IsExported(fieldName) { + continue + } + if v == 0 { + field, _, _ := types.LookupFieldOrMethod(t, false, pkg.Pkg, fieldName) + if field == nil { + fmt.Printf("%s: unknown field or method: %s.%s\n", pkg.Pkg.Path(), t, fieldName) + continue + } + if fieldName == "XMLName" { + if named, ok := field.Type().(*types.Named); ok && named.Obj().Pkg().Path() == "encoding/xml" { + continue + } + } + pos := program.Fset.Position(field.Pos()) + issues = append(issues, Issue{ + Pos: pos, + Type: types.TypeString(t, nil), + FieldName: fieldName, + }) + } + } + } + } + + return issues +} diff --git a/vendor/github.com/golangci/check/cmd/varcheck/varcheck.go b/vendor/github.com/golangci/check/cmd/varcheck/varcheck.go new file mode 100644 index 000000000..8e93e0473 --- /dev/null +++ b/vendor/github.com/golangci/check/cmd/varcheck/varcheck.go @@ -0,0 +1,163 @@ +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program. If not, see . + +package varcheck + +import ( + "flag" + "go/ast" + "go/token" + "strings" + + "go/types" + + "golang.org/x/tools/go/loader" +) + +var ( + buildTags = flag.String("varcheck.tags", "", "Build tags") +) + +type object struct { + pkgPath string + name string +} + +type visitor struct { + prog *loader.Program + pkg *loader.PackageInfo + uses map[object]int + positions map[object]token.Position + insideFunc bool +} + +func getKey(obj types.Object) object { + if obj == nil { + return object{} + } + + pkg := obj.Pkg() + pkgPath := "" + if pkg != nil { + pkgPath = pkg.Path() + } + + return object{ + pkgPath: pkgPath, + name: obj.Name(), + } +} + +func (v *visitor) decl(obj types.Object) { + key := getKey(obj) + if _, ok := v.uses[key]; !ok { + v.uses[key] = 0 + } + if _, ok := v.positions[key]; !ok { + v.positions[key] = v.prog.Fset.Position(obj.Pos()) + } +} + +func (v *visitor) use(obj types.Object) { + key := getKey(obj) + if _, ok := v.uses[key]; ok { + v.uses[key]++ + } else { + v.uses[key] = 1 + } +} + +func isReserved(name string) bool { + return name == "_" || strings.HasPrefix(strings.ToLower(name), "_cgo_") +} + +func (v *visitor) Visit(node ast.Node) ast.Visitor { + switch node := node.(type) { + case *ast.Ident: + v.use(v.pkg.Info.Uses[node]) + + case *ast.ValueSpec: + if !v.insideFunc { + for _, ident := range node.Names { + if !isReserved(ident.Name) { + v.decl(v.pkg.Info.Defs[ident]) + } + } + } + for _, val := range node.Values { + ast.Walk(v, val) + } + if node.Type != nil { + ast.Walk(v, node.Type) + } + return nil + + case *ast.FuncDecl: + if node.Body != nil { + v.insideFunc = true + ast.Walk(v, node.Body) + v.insideFunc = false + } + + if node.Recv != nil { + ast.Walk(v, node.Recv) + } + if node.Type != nil { + ast.Walk(v, node.Type) + } + + return nil + } + + return v +} + +type Issue struct { + Pos token.Position + VarName string +} + +func Run(program *loader.Program, reportExported bool) []Issue { + var issues []Issue + uses := make(map[object]int) + positions := make(map[object]token.Position) + + for _, pkgInfo := range program.InitialPackages() { + if pkgInfo.Pkg.Path() == "unsafe" { + continue + } + + v := &visitor{ + prog: program, + pkg: pkgInfo, + uses: uses, + positions: positions, + } + + for _, f := range v.pkg.Files { + ast.Walk(v, f) + } + } + + for obj, useCount := range uses { + if useCount == 0 && (reportExported || !ast.IsExported(obj.name)) { + pos := positions[obj] + issues = append(issues, Issue{ + Pos: pos, + VarName: obj.name, + }) + } + } + + return issues +} diff --git a/vendor/github.com/golangci/dupl/.travis.yml b/vendor/github.com/golangci/dupl/.travis.yml new file mode 100644 index 000000000..33de24c0f --- /dev/null +++ b/vendor/github.com/golangci/dupl/.travis.yml @@ -0,0 +1,5 @@ +language: go +go: + - 1.3 + - 1.8 + - 1.9 diff --git a/vendor/github.com/golangci/dupl/LICENSE b/vendor/github.com/golangci/dupl/LICENSE new file mode 100644 index 000000000..ab317d841 --- /dev/null +++ b/vendor/github.com/golangci/dupl/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Michal Bohuslávek + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/golangci/dupl/README.md b/vendor/github.com/golangci/dupl/README.md new file mode 100644 index 000000000..f34901d7a --- /dev/null +++ b/vendor/github.com/golangci/dupl/README.md @@ -0,0 +1,63 @@ +# dupl [![Build Status](https://travis-ci.org/mibk/dupl.png)](https://travis-ci.org/mibk/dupl) + +**dupl** is a tool written in Go for finding code clones. So far it can find clones only +in the Go source files. The method uses suffix tree for serialized ASTs. It ignores values +of AST nodes. It just operates with their types (e.g. `if a == 13 {}` and `if x == 100 {}` are +considered the same provided it exceeds the minimal token sequence size). + +Due to the used method dupl can report so called "false positives" on the output. These are +the ones we do not consider clones (whether they are too small, or the values of the matched +tokens are completely different). + +## Installation + +```bash +go get -u github.com/golangci/dupl +``` + +## Usage + +``` +Usage of dupl: + dupl [flags] [paths] + +Paths: + If the given path is a file, dupl will use it regardless of + the file extension. If it is a directory it will recursively + search for *.go files in that directory. + + If no path is given dupl will recursively search for *.go + files in the current directory. + +Flags: + -files + read file names from stdin one at each line + -html + output the results as HTML, including duplicate code fragments + -plumbing + plumbing (easy-to-parse) output for consumption by scripts or tools + -t, -threshold size + minimum token sequence size as a clone (default 15) + -vendor + check files in vendor directory + -v, -verbose + explain what is being done + +Examples: + dupl -t 100 + Search clones in the current directory of size at least + 100 tokens. + dupl $(find app/ -name '*_test.go') + Search for clones in tests in the app directory. + find app/ -name '*_test.go' |dupl -files + The same as above. +``` + +## Example + +The reduced output of this command with the following parameters for the [Docker](https://www.docker.com) source code +looks like [this](http://htmlpreview.github.io/?https://github.com/golangci/dupl/blob/master/_output_example/docker.html). + +```bash +$ dupl -t 200 -html >docker.html +``` diff --git a/vendor/github.com/golangci/dupl/job/buildtree.go b/vendor/github.com/golangci/dupl/job/buildtree.go new file mode 100644 index 000000000..e9aad54c0 --- /dev/null +++ b/vendor/github.com/golangci/dupl/job/buildtree.go @@ -0,0 +1,22 @@ +package job + +import ( + "github.com/golangci/dupl/suffixtree" + "github.com/golangci/dupl/syntax" +) + +func BuildTree(schan chan []*syntax.Node) (t *suffixtree.STree, d *[]*syntax.Node, done chan bool) { + t = suffixtree.New() + data := make([]*syntax.Node, 0, 100) + done = make(chan bool) + go func() { + for seq := range schan { + data = append(data, seq...) + for _, node := range seq { + t.Update(node) + } + } + done <- true + }() + return t, &data, done +} diff --git a/vendor/github.com/golangci/dupl/job/parse.go b/vendor/github.com/golangci/dupl/job/parse.go new file mode 100644 index 000000000..eb9d7c625 --- /dev/null +++ b/vendor/github.com/golangci/dupl/job/parse.go @@ -0,0 +1,36 @@ +package job + +import ( + "log" + + "github.com/golangci/dupl/syntax" + "github.com/golangci/dupl/syntax/golang" +) + +func Parse(fchan chan string) chan []*syntax.Node { + + // parse AST + achan := make(chan *syntax.Node) + go func() { + for file := range fchan { + ast, err := golang.Parse(file) + if err != nil { + log.Println(err) + continue + } + achan <- ast + } + close(achan) + }() + + // serialize + schan := make(chan []*syntax.Node) + go func() { + for ast := range achan { + seq := syntax.Serialize(ast) + schan <- seq + } + close(schan) + }() + return schan +} diff --git a/vendor/github.com/golangci/dupl/main.go b/vendor/github.com/golangci/dupl/main.go new file mode 100644 index 000000000..3030a97ae --- /dev/null +++ b/vendor/github.com/golangci/dupl/main.go @@ -0,0 +1,148 @@ +package dupl + +import ( + "flag" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "sort" + + "github.com/golangci/dupl/job" + "github.com/golangci/dupl/printer" + "github.com/golangci/dupl/syntax" +) + +const defaultThreshold = 15 + +var ( + paths = []string{"."} + vendor = flag.Bool("dupl.vendor", false, "") + verbose = flag.Bool("dupl.verbose", false, "") + files = flag.Bool("dupl.files", false, "") + + html = flag.Bool("dupl.html", false, "") + plumbing = flag.Bool("dupl.plumbing", false, "") +) + +const ( + vendorDirPrefix = "vendor" + string(filepath.Separator) + vendorDirInPath = string(filepath.Separator) + vendorDirPrefix +) + +func init() { + flag.BoolVar(verbose, "dupl.v", false, "alias for -verbose") +} + +func Run(files []string, threshold int) ([]printer.Issue, error) { + fchan := make(chan string, 1024) + go func() { + for _, f := range files { + fchan <- f + } + close(fchan) + }() + schan := job.Parse(fchan) + t, data, done := job.BuildTree(schan) + <-done + + // finish stream + t.Update(&syntax.Node{Type: -1}) + + mchan := t.FindDuplOver(threshold) + duplChan := make(chan syntax.Match) + go func() { + for m := range mchan { + match := syntax.FindSyntaxUnits(*data, m, threshold) + if len(match.Frags) > 0 { + duplChan <- match + } + } + close(duplChan) + }() + + return makeIssues(duplChan) +} + +func makeIssues(duplChan <-chan syntax.Match) ([]printer.Issue, error) { + groups := make(map[string][][]*syntax.Node) + for dupl := range duplChan { + groups[dupl.Hash] = append(groups[dupl.Hash], dupl.Frags...) + } + keys := make([]string, 0, len(groups)) + for k := range groups { + keys = append(keys, k) + } + sort.Strings(keys) + + p := printer.NewPlumbing(ioutil.ReadFile) + + var issues []printer.Issue + for _, k := range keys { + uniq := unique(groups[k]) + if len(uniq) > 1 { + i, err := p.MakeIssues(uniq) + if err != nil { + return nil, err + } + issues = append(issues, i...) + } + } + + return issues, nil +} + +func unique(group [][]*syntax.Node) [][]*syntax.Node { + fileMap := make(map[string]map[int]struct{}) + + var newGroup [][]*syntax.Node + for _, seq := range group { + node := seq[0] + file, ok := fileMap[node.Filename] + if !ok { + file = make(map[int]struct{}) + fileMap[node.Filename] = file + } + if _, ok := file[node.Pos]; !ok { + file[node.Pos] = struct{}{} + newGroup = append(newGroup, seq) + } + } + return newGroup +} + +func usage() { + fmt.Fprintln(os.Stderr, `Usage: dupl [flags] [paths] + +Paths: + If the given path is a file, dupl will use it regardless of + the file extension. If it is a directory, it will recursively + search for *.go files in that directory. + + If no path is given, dupl will recursively search for *.go + files in the current directory. + +Flags: + -files + read file names from stdin one at each line + -html + output the results as HTML, including duplicate code fragments + -plumbing + plumbing (easy-to-parse) output for consumption by scripts or tools + -t, -threshold size + minimum token sequence size as a clone (default 15) + -vendor + check files in vendor directory + -v, -verbose + explain what is being done + +Examples: + dupl -t 100 + Search clones in the current directory of size at least + 100 tokens. + dupl $(find app/ -name '*_test.go') + Search for clones in tests in the app directory. + find app/ -name '*_test.go' |dupl -files + The same as above.`) + os.Exit(2) +} diff --git a/vendor/github.com/golangci/dupl/printer/html.go b/vendor/github.com/golangci/dupl/printer/html.go new file mode 100644 index 000000000..5ad9e25c7 --- /dev/null +++ b/vendor/github.com/golangci/dupl/printer/html.go @@ -0,0 +1,120 @@ +package printer + +import ( + "bytes" + "fmt" + "io" + "regexp" + "sort" + + "github.com/golangci/dupl/syntax" +) + +type html struct { + iota int + w io.Writer + ReadFile +} + +func NewHTML(w io.Writer, fread ReadFile) Printer { + return &html{w: w, ReadFile: fread} +} + +func (p *html) PrintHeader() error { + _, err := fmt.Fprint(p.w, ` + +Duplicates + +`) + return err +} + +func (p *html) PrintClones(dups [][]*syntax.Node) error { + p.iota++ + fmt.Fprintf(p.w, "

#%d found %d clones

\n", p.iota, len(dups)) + + clones := make([]clone, len(dups)) + for i, dup := range dups { + cnt := len(dup) + if cnt == 0 { + panic("zero length dup") + } + nstart := dup[0] + nend := dup[cnt-1] + + file, err := p.ReadFile(nstart.Filename) + if err != nil { + return err + } + + lineStart, _ := blockLines(file, nstart.Pos, nend.End) + cl := clone{filename: nstart.Filename, lineStart: lineStart} + start := findLineBeg(file, nstart.Pos) + content := append(toWhitespace(file[start:nstart.Pos]), file[nstart.Pos:nend.End]...) + cl.fragment = deindent(content) + clones[i] = cl + } + + sort.Sort(byNameAndLine(clones)) + for _, cl := range clones { + fmt.Fprintf(p.w, "

%s:%d

\n
%s
\n", cl.filename, cl.lineStart, cl.fragment) + } + return nil +} + +func (*html) PrintFooter() error { return nil } + +func findLineBeg(file []byte, index int) int { + for i := index; i >= 0; i-- { + if file[i] == '\n' { + return i + 1 + } + } + return 0 +} + +func toWhitespace(str []byte) []byte { + var out []byte + for _, c := range bytes.Runes(str) { + if c == '\t' { + out = append(out, '\t') + } else { + out = append(out, ' ') + } + } + return out +} + +func deindent(block []byte) []byte { + const maxVal = 99 + min := maxVal + re := regexp.MustCompile(`(^|\n)(\t*)\S`) + for _, line := range re.FindAllSubmatch(block, -1) { + indent := line[2] + if len(indent) < min { + min = len(indent) + } + } + if min == 0 || min == maxVal { + return block + } + block = block[min:] +Loop: + for i := 0; i < len(block); i++ { + if block[i] == '\n' && i != len(block)-1 { + for j := 0; j < min; j++ { + if block[i+j+1] != '\t' { + continue Loop + } + } + block = append(block[:i+1], block[i+1+min:]...) + } + } + return block +} diff --git a/vendor/github.com/golangci/dupl/printer/plumbing.go b/vendor/github.com/golangci/dupl/printer/plumbing.go new file mode 100644 index 000000000..cf39d01b7 --- /dev/null +++ b/vendor/github.com/golangci/dupl/printer/plumbing.go @@ -0,0 +1,50 @@ +package printer + +import ( + "sort" + + "github.com/golangci/dupl/syntax" +) + +type Clone clone + +func (c Clone) Filename() string { + return c.filename +} + +func (c Clone) LineStart() int { + return c.lineStart +} + +func (c Clone) LineEnd() int { + return c.lineEnd +} + +type Issue struct { + From, To Clone +} + +type Plumbing struct { + ReadFile +} + +func NewPlumbing(fread ReadFile) *Plumbing { + return &Plumbing{fread} +} + +func (p *Plumbing) MakeIssues(dups [][]*syntax.Node) ([]Issue, error) { + clones, err := prepareClonesInfo(p.ReadFile, dups) + if err != nil { + return nil, err + } + sort.Sort(byNameAndLine(clones)) + var issues []Issue + for i, cl := range clones { + nextCl := clones[(i+1)%len(clones)] + issues = append(issues, Issue{ + From: Clone(cl), + To: Clone(nextCl), + }) + } + return issues, nil +} diff --git a/vendor/github.com/golangci/dupl/printer/printer.go b/vendor/github.com/golangci/dupl/printer/printer.go new file mode 100644 index 000000000..385217bfc --- /dev/null +++ b/vendor/github.com/golangci/dupl/printer/printer.go @@ -0,0 +1,11 @@ +package printer + +import "github.com/golangci/dupl/syntax" + +type ReadFile func(filename string) ([]byte, error) + +type Printer interface { + PrintHeader() error + PrintClones(dups [][]*syntax.Node) error + PrintFooter() error +} diff --git a/vendor/github.com/golangci/dupl/printer/text.go b/vendor/github.com/golangci/dupl/printer/text.go new file mode 100644 index 000000000..8359fa76f --- /dev/null +++ b/vendor/github.com/golangci/dupl/printer/text.go @@ -0,0 +1,100 @@ +package printer + +import ( + "fmt" + "io" + "sort" + + "github.com/golangci/dupl/syntax" +) + +type text struct { + cnt int + w io.Writer + ReadFile +} + +func NewText(w io.Writer, fread ReadFile) Printer { + return &text{w: w, ReadFile: fread} +} + +func (p *text) PrintHeader() error { return nil } + +func (p *text) PrintClones(dups [][]*syntax.Node) error { + p.cnt++ + fmt.Fprintf(p.w, "found %d clones:\n", len(dups)) + clones, err := prepareClonesInfo(p.ReadFile, dups) + if err != nil { + return err + } + sort.Sort(byNameAndLine(clones)) + for _, cl := range clones { + fmt.Fprintf(p.w, " %s:%d,%d\n", cl.filename, cl.lineStart, cl.lineEnd) + } + return nil +} + +func (p *text) PrintFooter() error { + _, err := fmt.Fprintf(p.w, "\nFound total %d clone groups.\n", p.cnt) + return err +} + +func prepareClonesInfo(fread ReadFile, dups [][]*syntax.Node) ([]clone, error) { + clones := make([]clone, len(dups)) + for i, dup := range dups { + cnt := len(dup) + if cnt == 0 { + panic("zero length dup") + } + nstart := dup[0] + nend := dup[cnt-1] + + file, err := fread(nstart.Filename) + if err != nil { + return nil, err + } + + cl := clone{filename: nstart.Filename} + cl.lineStart, cl.lineEnd = blockLines(file, nstart.Pos, nend.End) + clones[i] = cl + } + return clones, nil +} + +func blockLines(file []byte, from, to int) (int, int) { + line := 1 + lineStart, lineEnd := 0, 0 + for offset, b := range file { + if b == '\n' { + line++ + } + if offset == from { + lineStart = line + } + if offset == to-1 { + lineEnd = line + break + } + } + return lineStart, lineEnd +} + +type clone struct { + filename string + lineStart int + lineEnd int + fragment []byte +} + +type byNameAndLine []clone + +func (c byNameAndLine) Len() int { return len(c) } + +func (c byNameAndLine) Swap(i, j int) { c[i], c[j] = c[j], c[i] } + +func (c byNameAndLine) Less(i, j int) bool { + if c[i].filename == c[j].filename { + return c[i].lineStart < c[j].lineStart + } + return c[i].filename < c[j].filename +} diff --git a/vendor/github.com/golangci/dupl/suffixtree/dupl.go b/vendor/github.com/golangci/dupl/suffixtree/dupl.go new file mode 100644 index 000000000..ab145b4f3 --- /dev/null +++ b/vendor/github.com/golangci/dupl/suffixtree/dupl.go @@ -0,0 +1,98 @@ +package suffixtree + +import "sort" + +type Match struct { + Ps []Pos + Len Pos +} + +type posList struct { + positions []Pos +} + +func newPosList() *posList { + return &posList{make([]Pos, 0)} +} + +func (p *posList) append(p2 *posList) { + p.positions = append(p.positions, p2.positions...) +} + +func (p *posList) add(pos Pos) { + p.positions = append(p.positions, pos) +} + +type contextList struct { + lists map[int]*posList +} + +func newContextList() *contextList { + return &contextList{make(map[int]*posList)} +} + +func (c *contextList) getAll() []Pos { + keys := make([]int, 0, len(c.lists)) + for k := range c.lists { + keys = append(keys, k) + } + sort.Ints(keys) + var ps []Pos + for _, k := range keys { + ps = append(ps, c.lists[k].positions...) + } + return ps +} + +func (c *contextList) append(c2 *contextList) { + for lc, pl := range c2.lists { + if _, ok := c.lists[lc]; ok { + c.lists[lc].append(pl) + } else { + c.lists[lc] = pl + } + } +} + +// FindDuplOver find pairs of maximal duplicities over a threshold +// length. +func (t *STree) FindDuplOver(threshold int) <-chan Match { + auxTran := newTran(0, 0, t.root) + ch := make(chan Match) + go func() { + walkTrans(auxTran, 0, threshold, ch) + close(ch) + }() + return ch +} + +func walkTrans(parent *tran, length, threshold int, ch chan<- Match) *contextList { + s := parent.state + + cl := newContextList() + + if len(s.trans) == 0 { + pl := newPosList() + start := parent.end + 1 - Pos(length) + pl.add(start) + ch := 0 + if start > 0 { + ch = s.tree.data[start-1].Val() + } + cl.lists[ch] = pl + return cl + } + + for _, t := range s.trans { + ln := length + t.len() + cl2 := walkTrans(t, ln, threshold, ch) + if ln >= threshold { + cl.append(cl2) + } + } + if length >= threshold && len(cl.lists) > 1 { + m := Match{cl.getAll(), Pos(length)} + ch <- m + } + return cl +} diff --git a/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go b/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go new file mode 100644 index 000000000..738015025 --- /dev/null +++ b/vendor/github.com/golangci/dupl/suffixtree/suffixtree.go @@ -0,0 +1,216 @@ +package suffixtree + +import ( + "bytes" + "fmt" + "math" + "strings" +) + +const infinity = math.MaxInt32 + +// Pos denotes position in data slice. +type Pos int32 + +type Token interface { + Val() int +} + +// STree is a struct representing a suffix tree. +type STree struct { + data []Token + root *state + auxState *state // auxiliary state + + // active point + s *state + start, end Pos +} + +// New creates new suffix tree. +func New() *STree { + t := new(STree) + t.data = make([]Token, 0, 50) + t.root = newState(t) + t.auxState = newState(t) + t.root.linkState = t.auxState + t.s = t.root + return t +} + +// Update refreshes the suffix tree to by new data. +func (t *STree) Update(data ...Token) { + t.data = append(t.data, data...) + for _ = range data { + t.update() + t.s, t.start = t.canonize(t.s, t.start, t.end) + t.end++ + } +} + +// update transforms suffix tree T(n) to T(n+1). +func (t *STree) update() { + oldr := t.root + + // (s, (start, end)) is the canonical reference pair for the active point + s := t.s + start, end := t.start, t.end + var r *state + for { + var endPoint bool + r, endPoint = t.testAndSplit(s, start, end-1) + if endPoint { + break + } + r.fork(end) + if oldr != t.root { + oldr.linkState = r + } + oldr = r + s, start = t.canonize(s.linkState, start, end-1) + } + if oldr != t.root { + oldr.linkState = r + } + + // update active point + t.s = s + t.start = start +} + +// testAndSplit tests whether a state with canonical ref. pair +// (s, (start, end)) is the end point, that is, a state that have +// a c-transition. If not, then state (exs, (start, end)) is made +// explicit (if not already so). +func (t *STree) testAndSplit(s *state, start, end Pos) (exs *state, endPoint bool) { + c := t.data[t.end] + if start <= end { + tr := s.findTran(t.data[start]) + splitPoint := tr.start + end - start + 1 + if t.data[splitPoint].Val() == c.Val() { + return s, true + } + // make the (s, (start, end)) state explicit + newSt := newState(s.tree) + newSt.addTran(splitPoint, tr.end, tr.state) + tr.end = splitPoint - 1 + tr.state = newSt + return newSt, false + } + if s == t.auxState || s.findTran(c) != nil { + return s, true + } + return s, false +} + +// canonize returns updated state and start position for ref. pair +// (s, (start, end)) of state r so the new ref. pair is canonical, +// that is, referenced from the closest explicit ancestor of r. +func (t *STree) canonize(s *state, start, end Pos) (*state, Pos) { + if s == t.auxState { + s, start = t.root, start+1 + } + if start > end { + return s, start + } + + var tr *tran + for { + if start <= end { + tr = s.findTran(t.data[start]) + if tr == nil { + panic(fmt.Sprintf("there should be some transition for '%d' at %d", + t.data[start].Val(), start)) + } + } + if tr.end-tr.start > end-start { + break + } + start += tr.end - tr.start + 1 + s = tr.state + } + if s == nil { + panic("there should always be some suffix link resolution") + } + return s, start +} + +func (t *STree) At(p Pos) Token { + if p < 0 || p >= Pos(len(t.data)) { + panic("position out of bounds") + } + return t.data[p] +} + +func (t *STree) String() string { + buf := new(bytes.Buffer) + printState(buf, t.root, 0) + return buf.String() +} + +func printState(buf *bytes.Buffer, s *state, ident int) { + for _, tr := range s.trans { + fmt.Fprint(buf, strings.Repeat(" ", ident)) + fmt.Fprintf(buf, "* (%d, %d)\n", tr.start, tr.ActEnd()) + printState(buf, tr.state, ident+1) + } +} + +// state is an explicit state of the suffix tree. +type state struct { + tree *STree + trans []*tran + linkState *state +} + +func newState(t *STree) *state { + return &state{ + tree: t, + trans: make([]*tran, 0), + linkState: nil, + } +} + +func (s *state) addTran(start, end Pos, r *state) { + s.trans = append(s.trans, newTran(start, end, r)) +} + +// fork creates a new branch from the state s. +func (s *state) fork(i Pos) *state { + r := newState(s.tree) + s.addTran(i, infinity, r) + return r +} + +// findTran finds c-transition. +func (s *state) findTran(c Token) *tran { + for _, tran := range s.trans { + if s.tree.data[tran.start].Val() == c.Val() { + return tran + } + } + return nil +} + +// tran represents a state's transition. +type tran struct { + start, end Pos + state *state +} + +func newTran(start, end Pos, s *state) *tran { + return &tran{start, end, s} +} + +func (t *tran) len() int { + return int(t.end - t.start + 1) +} + +// ActEnd returns actual end position as consistent with +// the actual length of the data in the STree. +func (t *tran) ActEnd() Pos { + if t.end == infinity { + return Pos(len(t.state.tree.data)) - 1 + } + return t.end +} diff --git a/vendor/github.com/golangci/dupl/syntax/golang/golang.go b/vendor/github.com/golangci/dupl/syntax/golang/golang.go new file mode 100644 index 000000000..a0b1e77e1 --- /dev/null +++ b/vendor/github.com/golangci/dupl/syntax/golang/golang.go @@ -0,0 +1,392 @@ +package golang + +import ( + "go/ast" + "go/parser" + "go/token" + + "github.com/golangci/dupl/syntax" +) + +const ( + BadNode = iota + File + ArrayType + AssignStmt + BasicLit + BinaryExpr + BlockStmt + BranchStmt + CallExpr + CaseClause + ChanType + CommClause + CompositeLit + DeclStmt + DeferStmt + Ellipsis + EmptyStmt + ExprStmt + Field + FieldList + ForStmt + FuncDecl + FuncLit + FuncType + GenDecl + GoStmt + Ident + IfStmt + IncDecStmt + IndexExpr + InterfaceType + KeyValueExpr + LabeledStmt + MapType + ParenExpr + RangeStmt + ReturnStmt + SelectStmt + SelectorExpr + SendStmt + SliceExpr + StarExpr + StructType + SwitchStmt + TypeAssertExpr + TypeSpec + TypeSwitchStmt + UnaryExpr + ValueSpec +) + +// Parse the given file and return uniform syntax tree. +func Parse(filename string) (*syntax.Node, error) { + fset := token.NewFileSet() + file, err := parser.ParseFile(fset, filename, nil, 0) + if err != nil { + return nil, err + } + t := &transformer{ + fileset: fset, + filename: filename, + } + return t.trans(file), nil +} + +type transformer struct { + fileset *token.FileSet + filename string +} + +// trans transforms given golang AST to uniform tree structure. +func (t *transformer) trans(node ast.Node) (o *syntax.Node) { + o = syntax.NewNode() + o.Filename = t.filename + st, end := node.Pos(), node.End() + o.Pos, o.End = t.fileset.File(st).Offset(st), t.fileset.File(end).Offset(end) + + switch n := node.(type) { + case *ast.ArrayType: + o.Type = ArrayType + if n.Len != nil { + o.AddChildren(t.trans(n.Len)) + } + o.AddChildren(t.trans(n.Elt)) + + case *ast.AssignStmt: + o.Type = AssignStmt + for _, e := range n.Rhs { + o.AddChildren(t.trans(e)) + } + + for _, e := range n.Lhs { + o.AddChildren(t.trans(e)) + } + + case *ast.BasicLit: + o.Type = BasicLit + + case *ast.BinaryExpr: + o.Type = BinaryExpr + o.AddChildren(t.trans(n.X), t.trans(n.Y)) + + case *ast.BlockStmt: + o.Type = BlockStmt + for _, stmt := range n.List { + o.AddChildren(t.trans(stmt)) + } + + case *ast.BranchStmt: + o.Type = BranchStmt + if n.Label != nil { + o.AddChildren(t.trans(n.Label)) + } + + case *ast.CallExpr: + o.Type = CallExpr + o.AddChildren(t.trans(n.Fun)) + for _, arg := range n.Args { + o.AddChildren(t.trans(arg)) + } + + case *ast.CaseClause: + o.Type = CaseClause + for _, e := range n.List { + o.AddChildren(t.trans(e)) + } + for _, stmt := range n.Body { + o.AddChildren(t.trans(stmt)) + } + + case *ast.ChanType: + o.Type = ChanType + o.AddChildren(t.trans(n.Value)) + + case *ast.CommClause: + o.Type = CommClause + if n.Comm != nil { + o.AddChildren(t.trans(n.Comm)) + } + for _, stmt := range n.Body { + o.AddChildren(t.trans(stmt)) + } + + case *ast.CompositeLit: + o.Type = CompositeLit + if n.Type != nil { + o.AddChildren(t.trans(n.Type)) + } + for _, e := range n.Elts { + o.AddChildren(t.trans(e)) + } + + case *ast.DeclStmt: + o.Type = DeclStmt + o.AddChildren(t.trans(n.Decl)) + + case *ast.DeferStmt: + o.Type = DeferStmt + o.AddChildren(t.trans(n.Call)) + + case *ast.Ellipsis: + o.Type = Ellipsis + if n.Elt != nil { + o.AddChildren(t.trans(n.Elt)) + } + + case *ast.EmptyStmt: + o.Type = EmptyStmt + + case *ast.ExprStmt: + o.Type = ExprStmt + o.AddChildren(t.trans(n.X)) + + case *ast.Field: + o.Type = Field + for _, name := range n.Names { + o.AddChildren(t.trans(name)) + } + o.AddChildren(t.trans(n.Type)) + + case *ast.FieldList: + o.Type = FieldList + for _, field := range n.List { + o.AddChildren(t.trans(field)) + } + + case *ast.File: + o.Type = File + for _, decl := range n.Decls { + if genDecl, ok := decl.(*ast.GenDecl); ok && genDecl.Tok == token.IMPORT { + // skip import declarations + continue + } + o.AddChildren(t.trans(decl)) + } + + case *ast.ForStmt: + o.Type = ForStmt + if n.Init != nil { + o.AddChildren(t.trans(n.Init)) + } + if n.Cond != nil { + o.AddChildren(t.trans(n.Cond)) + } + if n.Post != nil { + o.AddChildren(t.trans(n.Post)) + } + o.AddChildren(t.trans(n.Body)) + + case *ast.FuncDecl: + o.Type = FuncDecl + if n.Recv != nil { + o.AddChildren(t.trans(n.Recv)) + } + o.AddChildren(t.trans(n.Name), t.trans(n.Type)) + if n.Body != nil { + o.AddChildren(t.trans(n.Body)) + } + + case *ast.FuncLit: + o.Type = FuncLit + o.AddChildren(t.trans(n.Type), t.trans(n.Body)) + + case *ast.FuncType: + o.Type = FuncType + o.AddChildren(t.trans(n.Params)) + if n.Results != nil { + o.AddChildren(t.trans(n.Results)) + } + + case *ast.GenDecl: + o.Type = GenDecl + for _, spec := range n.Specs { + o.AddChildren(t.trans(spec)) + } + + case *ast.GoStmt: + o.Type = GoStmt + o.AddChildren(t.trans(n.Call)) + + case *ast.Ident: + o.Type = Ident + + case *ast.IfStmt: + o.Type = IfStmt + if n.Init != nil { + o.AddChildren(t.trans(n.Init)) + } + o.AddChildren(t.trans(n.Cond), t.trans(n.Body)) + if n.Else != nil { + o.AddChildren(t.trans(n.Else)) + } + + case *ast.IncDecStmt: + o.Type = IncDecStmt + o.AddChildren(t.trans(n.X)) + + case *ast.IndexExpr: + o.Type = IndexExpr + o.AddChildren(t.trans(n.X), t.trans(n.Index)) + + case *ast.InterfaceType: + o.Type = InterfaceType + o.AddChildren(t.trans(n.Methods)) + + case *ast.KeyValueExpr: + o.Type = KeyValueExpr + o.AddChildren(t.trans(n.Key), t.trans(n.Value)) + + case *ast.LabeledStmt: + o.Type = LabeledStmt + o.AddChildren(t.trans(n.Label), t.trans(n.Stmt)) + + case *ast.MapType: + o.Type = MapType + o.AddChildren(t.trans(n.Key), t.trans(n.Value)) + + case *ast.ParenExpr: + o.Type = ParenExpr + o.AddChildren(t.trans(n.X)) + + case *ast.RangeStmt: + o.Type = RangeStmt + if n.Key != nil { + o.AddChildren(t.trans(n.Key)) + } + if n.Value != nil { + o.AddChildren(t.trans(n.Value)) + } + o.AddChildren(t.trans(n.X), t.trans(n.Body)) + + case *ast.ReturnStmt: + o.Type = ReturnStmt + for _, e := range n.Results { + o.AddChildren(t.trans(e)) + } + + case *ast.SelectStmt: + o.Type = SelectStmt + o.AddChildren(t.trans(n.Body)) + + case *ast.SelectorExpr: + o.Type = SelectorExpr + o.AddChildren(t.trans(n.X), t.trans(n.Sel)) + + case *ast.SendStmt: + o.Type = SendStmt + o.AddChildren(t.trans(n.Chan), t.trans(n.Value)) + + case *ast.SliceExpr: + o.Type = SliceExpr + o.AddChildren(t.trans(n.X)) + if n.Low != nil { + o.AddChildren(t.trans(n.Low)) + } + if n.High != nil { + o.AddChildren(t.trans(n.High)) + } + if n.Max != nil { + o.AddChildren(t.trans(n.Max)) + } + + case *ast.StarExpr: + o.Type = StarExpr + o.AddChildren(t.trans(n.X)) + + case *ast.StructType: + o.Type = StructType + o.AddChildren(t.trans(n.Fields)) + + case *ast.SwitchStmt: + o.Type = SwitchStmt + if n.Init != nil { + o.AddChildren(t.trans(n.Init)) + } + if n.Tag != nil { + o.AddChildren(t.trans(n.Tag)) + } + o.AddChildren(t.trans(n.Body)) + + case *ast.TypeAssertExpr: + o.Type = TypeAssertExpr + o.AddChildren(t.trans(n.X)) + if n.Type != nil { + o.AddChildren(t.trans(n.Type)) + } + + case *ast.TypeSpec: + o.Type = TypeSpec + o.AddChildren(t.trans(n.Name), t.trans(n.Type)) + + case *ast.TypeSwitchStmt: + o.Type = TypeSwitchStmt + if n.Init != nil { + o.AddChildren(t.trans(n.Init)) + } + o.AddChildren(t.trans(n.Assign), t.trans(n.Body)) + + case *ast.UnaryExpr: + o.Type = UnaryExpr + o.AddChildren(t.trans(n.X)) + + case *ast.ValueSpec: + o.Type = ValueSpec + for _, name := range n.Names { + o.AddChildren(t.trans(name)) + } + if n.Type != nil { + o.AddChildren(t.trans(n.Type)) + } + for _, val := range n.Values { + o.AddChildren(t.trans(val)) + } + + default: + o.Type = BadNode + + } + + return o +} diff --git a/vendor/github.com/golangci/dupl/syntax/syntax.go b/vendor/github.com/golangci/dupl/syntax/syntax.go new file mode 100644 index 000000000..e2c750afd --- /dev/null +++ b/vendor/github.com/golangci/dupl/syntax/syntax.go @@ -0,0 +1,175 @@ +package syntax + +import ( + "crypto/sha1" + + "github.com/golangci/dupl/suffixtree" +) + +type Node struct { + Type int + Filename string + Pos, End int + Children []*Node + Owns int +} + +func NewNode() *Node { + return &Node{} +} + +func (n *Node) AddChildren(children ...*Node) { + n.Children = append(n.Children, children...) +} + +func (n *Node) Val() int { + return n.Type +} + +type Match struct { + Hash string + Frags [][]*Node +} + +func Serialize(n *Node) []*Node { + stream := make([]*Node, 0, 10) + serial(n, &stream) + return stream +} + +func serial(n *Node, stream *[]*Node) int { + *stream = append(*stream, n) + var count int + for _, child := range n.Children { + count += serial(child, stream) + } + n.Owns = count + return count + 1 +} + +// FindSyntaxUnits finds all complete syntax units in the match group and returns them +// with the corresponding hash. +func FindSyntaxUnits(data []*Node, m suffixtree.Match, threshold int) Match { + if len(m.Ps) == 0 { + return Match{} + } + firstSeq := data[m.Ps[0] : m.Ps[0]+m.Len] + indexes := getUnitsIndexes(firstSeq, threshold) + + // TODO: is this really working? + indexCnt := len(indexes) + if indexCnt > 0 { + lasti := indexes[indexCnt-1] + firstn := firstSeq[lasti] + for i := 1; i < len(m.Ps); i++ { + n := data[int(m.Ps[i])+lasti] + if firstn.Owns != n.Owns { + indexes = indexes[:indexCnt-1] + break + } + } + } + if len(indexes) == 0 || isCyclic(indexes, firstSeq) || spansMultipleFiles(indexes, firstSeq) { + return Match{} + } + + match := Match{Frags: make([][]*Node, len(m.Ps))} + for i, pos := range m.Ps { + match.Frags[i] = make([]*Node, len(indexes)) + for j, index := range indexes { + match.Frags[i][j] = data[int(pos)+index] + } + } + + lastIndex := indexes[len(indexes)-1] + match.Hash = hashSeq(firstSeq[indexes[0] : lastIndex+firstSeq[lastIndex].Owns]) + return match +} + +func getUnitsIndexes(nodeSeq []*Node, threshold int) []int { + var indexes []int + var split bool + for i := 0; i < len(nodeSeq); { + n := nodeSeq[i] + switch { + case n.Owns >= len(nodeSeq)-i: + // not complete syntax unit + i++ + split = true + continue + case n.Owns+1 < threshold: + split = true + default: + if split { + indexes = indexes[:0] + split = false + } + indexes = append(indexes, i) + } + i += n.Owns + 1 + } + return indexes +} + +// isCyclic finds out whether there is a repetive pattern in the found clone. If positive, +// it return false to point out that the clone would be redundant. +func isCyclic(indexes []int, nodes []*Node) bool { + cnt := len(indexes) + if cnt <= 1 { + return false + } + + alts := make(map[int]bool) + for i := 1; i <= cnt/2; i++ { + if cnt%i == 0 { + alts[i] = true + } + } + + for i := 0; i < indexes[cnt/2]; i++ { + nstart := nodes[i+indexes[0]] + AltLoop: + for alt := range alts { + for j := alt; j < cnt; j += alt { + index := i + indexes[j] + if index < len(nodes) { + nalt := nodes[index] + if nstart.Owns == nalt.Owns && nstart.Type == nalt.Type { + continue + } + } else if i >= indexes[alt] { + return true + } + delete(alts, alt) + continue AltLoop + } + } + if len(alts) == 0 { + return false + } + } + return true +} + +func spansMultipleFiles(indexes []int, nodes []*Node) bool { + if len(indexes) < 2 { + return false + } + f := nodes[indexes[0]].Filename + for i := 1; i < len(indexes); i++ { + if nodes[indexes[i]].Filename != f { + return true + } + } + return false +} + +func hashSeq(nodes []*Node) string { + h := sha1.New() + bytes := make([]byte, len(nodes)) + for i, node := range nodes { + bytes[i] = byte(node.Type) + } + h.Write(bytes) + return string(h.Sum(nil)) +} diff --git a/vendor/github.com/golangci/go-misc/LICENSE b/vendor/github.com/golangci/go-misc/LICENSE new file mode 100644 index 000000000..cc42dd45d --- /dev/null +++ b/vendor/github.com/golangci/go-misc/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 Rémy Oudompheng. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * The name of Rémy Oudompheng may not be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/vendor/github.com/golangci/go-misc/deadcode/README.md b/vendor/github.com/golangci/go-misc/deadcode/README.md new file mode 100644 index 000000000..550423128 --- /dev/null +++ b/vendor/github.com/golangci/go-misc/deadcode/README.md @@ -0,0 +1,18 @@ +# deadcode + +`deadcode` is a very simple utility which detects unused declarations in a Go package. + +## Usage +``` +deadcode [-test] [packages] + + -test Include test files + packages A list of packages using the same conventions as the go tool +``` + +## Limitations + +* Self-referential unused code is not currently reported +* A single package can be tested at a time +* Unused methods are not reported + diff --git a/vendor/github.com/golangci/go-misc/deadcode/deadcode.go b/vendor/github.com/golangci/go-misc/deadcode/deadcode.go new file mode 100644 index 000000000..2e7cfc962 --- /dev/null +++ b/vendor/github.com/golangci/go-misc/deadcode/deadcode.go @@ -0,0 +1,135 @@ +package deadcode + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "path/filepath" + "sort" + "strings" + + "golang.org/x/tools/go/loader" +) + +var exitCode int + +var ( + withTestFiles bool +) + +type Issue struct { + Pos token.Position + UnusedIdentName string +} + +func Run(program *loader.Program) ([]Issue, error) { + ctx := &Context{ + program: program, + } + report := ctx.Process() + var issues []Issue + for _, obj := range report { + issues = append(issues, Issue{ + Pos: program.Fset.Position(obj.Pos()), + UnusedIdentName: obj.Name(), + }) + } + + return issues, nil +} + +func fatalf(format string, args ...interface{}) { + panic(fmt.Errorf(format, args...)) +} + +type Context struct { + cwd string + withTests bool + + program *loader.Program +} + +// pos resolves a compact position encoding into a verbose one +func (ctx *Context) pos(pos token.Pos) token.Position { + if ctx.cwd == "" { + ctx.cwd, _ = os.Getwd() + } + p := ctx.program.Fset.Position(pos) + f, err := filepath.Rel(ctx.cwd, p.Filename) + if err == nil { + p.Filename = f + } + return p +} + +// error formats the error to standard error, adding program +// identification and a newline +func (ctx *Context) errorf(pos token.Pos, format string, args ...interface{}) { + p := ctx.pos(pos) + fmt.Fprintf(os.Stderr, p.String()+": "+format+"\n", args...) + exitCode = 2 +} + +func (ctx *Context) Load(args ...string) { + // TODO +} + +func (ctx *Context) Process() []types.Object { + prog := ctx.program + var allUnused []types.Object + for _, pkg := range prog.Imported { + unused := ctx.doPackage(prog, pkg) + allUnused = append(allUnused, unused...) + } + for _, pkg := range prog.Created { + unused := ctx.doPackage(prog, pkg) + allUnused = append(allUnused, unused...) + } + sort.Sort(objects(allUnused)) + return allUnused +} + +func isTestFuncByName(name string) bool { + return strings.HasPrefix(name, "Test") || strings.HasPrefix(name, "Benchmark") || strings.HasPrefix(name, "Example") +} + +func (ctx *Context) doPackage(prog *loader.Program, pkg *loader.PackageInfo) []types.Object { + used := make(map[types.Object]bool) + for _, file := range pkg.Files { + ast.Inspect(file, func(n ast.Node) bool { + id, ok := n.(*ast.Ident) + if !ok { + return true + } + obj := pkg.Info.Uses[id] + if obj != nil { + used[obj] = true + } + return false + }) + } + + global := pkg.Pkg.Scope() + var unused []types.Object + for _, name := range global.Names() { + if pkg.Pkg.Name() == "main" && name == "main" { + continue + } + obj := global.Lookup(name) + _, isSig := obj.Type().(*types.Signature) + pos := ctx.pos(obj.Pos()) + isTestMethod := isSig && isTestFuncByName(obj.Name()) && strings.HasSuffix(pos.Filename, "_test.go") + if !used[obj] && ((pkg.Pkg.Name() == "main" && !isTestMethod) || !ast.IsExported(name)) { + unused = append(unused, obj) + } + } + return unused +} + +type objects []types.Object + +func (s objects) Len() int { return len(s) } +func (s objects) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s objects) Less(i, j int) bool { return s[i].Pos() < s[j].Pos() } diff --git a/vendor/github.com/golangci/gofmt/gofmt/LICENSE b/vendor/github.com/golangci/gofmt/gofmt/LICENSE new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/vendor/github.com/golangci/gofmt/gofmt/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/gofmt/gofmt/doc.go b/vendor/github.com/golangci/gofmt/gofmt/doc.go new file mode 100644 index 000000000..da0c8581d --- /dev/null +++ b/vendor/github.com/golangci/gofmt/gofmt/doc.go @@ -0,0 +1,104 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Gofmt formats Go programs. +It uses tabs for indentation and blanks for alignment. +Alignment assumes that an editor is using a fixed-width font. + +Without an explicit path, it processes the standard input. Given a file, +it operates on that file; given a directory, it operates on all .go files in +that directory, recursively. (Files starting with a period are ignored.) +By default, gofmt prints the reformatted sources to standard output. + +Usage: + gofmt [flags] [path ...] + +The flags are: + -d + Do not print reformatted sources to standard output. + If a file's formatting is different than gofmt's, print diffs + to standard output. + -e + Print all (including spurious) errors. + -l + Do not print reformatted sources to standard output. + If a file's formatting is different from gofmt's, print its name + to standard output. + -r rule + Apply the rewrite rule to the source before reformatting. + -s + Try to simplify code (after applying the rewrite rule, if any). + -w + Do not print reformatted sources to standard output. + If a file's formatting is different from gofmt's, overwrite it + with gofmt's version. If an error occurred during overwriting, + the original file is restored from an automatic backup. + +Debugging support: + -cpuprofile filename + Write cpu profile to the specified file. + + +The rewrite rule specified with the -r flag must be a string of the form: + + pattern -> replacement + +Both pattern and replacement must be valid Go expressions. +In the pattern, single-character lowercase identifiers serve as +wildcards matching arbitrary sub-expressions; those expressions +will be substituted for the same identifiers in the replacement. + +When gofmt reads from standard input, it accepts either a full Go program +or a program fragment. A program fragment must be a syntactically +valid declaration list, statement list, or expression. When formatting +such a fragment, gofmt preserves leading indentation as well as leading +and trailing spaces, so that individual sections of a Go program can be +formatted by piping them through gofmt. + +Examples + +To check files for unnecessary parentheses: + + gofmt -r '(a) -> a' -l *.go + +To remove the parentheses: + + gofmt -r '(a) -> a' -w *.go + +To convert the package tree from explicit slice upper bounds to implicit ones: + + gofmt -r 'α[β:len(α)] -> α[β:]' -w $GOROOT/src + +The simplify command + +When invoked with -s gofmt will make the following source transformations where possible. + + An array, slice, or map composite literal of the form: + []T{T{}, T{}} + will be simplified to: + []T{{}, {}} + + A slice expression of the form: + s[a:len(s)] + will be simplified to: + s[a:] + + A range of the form: + for x, _ = range v {...} + will be simplified to: + for x = range v {...} + + A range of the form: + for _ = range v {...} + will be simplified to: + for range v {...} + +This may result in changes that are incompatible with earlier versions of Go. +*/ +package gofmt + +// BUG(rsc): The implementation of -r is a bit slow. +// BUG(gri): If -w fails, the restored original file may not have some of the +// original file attributes. diff --git a/vendor/github.com/golangci/gofmt/gofmt/gofmt.go b/vendor/github.com/golangci/gofmt/gofmt/gofmt.go new file mode 100644 index 000000000..fb9c8cb37 --- /dev/null +++ b/vendor/github.com/golangci/gofmt/gofmt/gofmt.go @@ -0,0 +1,327 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gofmt + +import ( + "bytes" + "flag" + "fmt" + "go/ast" + "go/parser" + "go/printer" + "go/scanner" + "go/token" + "io" + "io/ioutil" + "os" + "os/exec" + "path/filepath" + "runtime" + "runtime/pprof" + "strings" + "sync" +) + +var ( + // main operation modes + list = flag.Bool("gofmt.l", false, "list files whose formatting differs from gofmt's") + write = flag.Bool("gofmt.w", false, "write result to (source) file instead of stdout") + rewriteRule = flag.String("gofmt.r", "", "rewrite rule (e.g., 'a[b:len(a)] -> a[b:]')") + simplifyAST = flag.Bool("gofmt.s", false, "simplify code") + doDiff = flag.Bool("gofmt.d", false, "display diffs instead of rewriting files") + allErrors = flag.Bool("gofmt.e", false, "report all errors (not just the first 10 on different lines)") + + // debugging + cpuprofile = flag.String("gofmt.cpuprofile", "", "write cpu profile to this file") +) + +const ( + tabWidth = 8 + printerMode = printer.UseSpaces | printer.TabIndent +) + +var ( + fileSet = token.NewFileSet() // per process FileSet + exitCode = 0 + rewrite func(*ast.File) *ast.File + parserMode parser.Mode + parserModeInitOnce sync.Once +) + +func report(err error) { + scanner.PrintError(os.Stderr, err) + exitCode = 2 +} + +func usage() { + fmt.Fprintf(os.Stderr, "usage: gofmt [flags] [path ...]\n") + flag.PrintDefaults() +} + +func initParserMode() { + parserModeInitOnce.Do(func() { + parserMode = parser.ParseComments + if *allErrors { + parserMode |= parser.AllErrors + } + }) +} + +func isGoFile(f os.FileInfo) bool { + // ignore non-Go files + name := f.Name() + return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") +} + +// If in == nil, the source is the contents of the file with the given filename. +func processFile(filename string, in io.Reader, out io.Writer, stdin bool) error { + var perm os.FileMode = 0644 + if in == nil { + f, err := os.Open(filename) + if err != nil { + return err + } + defer f.Close() + fi, err := f.Stat() + if err != nil { + return err + } + in = f + perm = fi.Mode().Perm() + } + + src, err := ioutil.ReadAll(in) + if err != nil { + return err + } + + file, sourceAdj, indentAdj, err := parse(fileSet, filename, src, stdin) + if err != nil { + return err + } + + if rewrite != nil { + if sourceAdj == nil { + file = rewrite(file) + } else { + fmt.Fprintf(os.Stderr, "warning: rewrite ignored for incomplete programs\n") + } + } + + ast.SortImports(fileSet, file) + + if *simplifyAST { + simplify(file) + } + + res, err := format(fileSet, file, sourceAdj, indentAdj, src, printer.Config{Mode: printerMode, Tabwidth: tabWidth}) + if err != nil { + return err + } + + if !bytes.Equal(src, res) { + // formatting has changed + if *list { + fmt.Fprintln(out, filename) + } + if *write { + // make a temporary backup before overwriting original + bakname, err := backupFile(filename+".", src, perm) + if err != nil { + return err + } + err = ioutil.WriteFile(filename, res, perm) + if err != nil { + os.Rename(bakname, filename) + return err + } + err = os.Remove(bakname) + if err != nil { + return err + } + } + if *doDiff { + data, err := diff(src, res, filename) + if err != nil { + return fmt.Errorf("computing diff: %s", err) + } + fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename)) + out.Write(data) + } + } + + if !*list && !*write && !*doDiff { + _, err = out.Write(res) + } + + return err +} + +func visitFile(path string, f os.FileInfo, err error) error { + if err == nil && isGoFile(f) { + err = processFile(path, nil, os.Stdout, false) + } + // Don't complain if a file was deleted in the meantime (i.e. + // the directory changed concurrently while running gofmt). + if err != nil && !os.IsNotExist(err) { + report(err) + } + return nil +} + +func walkDir(path string) { + filepath.Walk(path, visitFile) +} + +func gofmtMain() { + flag.Usage = usage + flag.Parse() + + if *cpuprofile != "" { + f, err := os.Create(*cpuprofile) + if err != nil { + fmt.Fprintf(os.Stderr, "creating cpu profile: %s\n", err) + exitCode = 2 + return + } + defer f.Close() + pprof.StartCPUProfile(f) + defer pprof.StopCPUProfile() + } + + initParserMode() + initRewrite() + + if flag.NArg() == 0 { + if *write { + fmt.Fprintln(os.Stderr, "error: cannot use -w with standard input") + exitCode = 2 + return + } + if err := processFile("", os.Stdin, os.Stdout, true); err != nil { + report(err) + } + return + } + + for i := 0; i < flag.NArg(); i++ { + path := flag.Arg(i) + switch dir, err := os.Stat(path); { + case err != nil: + report(err) + case dir.IsDir(): + walkDir(path) + default: + if err := processFile(path, nil, os.Stdout, false); err != nil { + report(err) + } + } + } +} + +func writeTempFile(dir, prefix string, data []byte) (string, error) { + file, err := ioutil.TempFile(dir, prefix) + if err != nil { + return "", err + } + _, err = file.Write(data) + if err1 := file.Close(); err == nil { + err = err1 + } + if err != nil { + os.Remove(file.Name()) + return "", err + } + return file.Name(), nil +} + +func diff(b1, b2 []byte, filename string) (data []byte, err error) { + f1, err := writeTempFile("", "gofmt", b1) + if err != nil { + return + } + defer os.Remove(f1) + + f2, err := writeTempFile("", "gofmt", b2) + if err != nil { + return + } + defer os.Remove(f2) + + cmd := "diff" + if runtime.GOOS == "plan9" { + cmd = "/bin/ape/diff" + } + + data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput() + if len(data) > 0 { + // diff exits with a non-zero status when the files don't match. + // Ignore that failure as long as we get output. + return replaceTempFilename(data, filename) + } + return +} + +// replaceTempFilename replaces temporary filenames in diff with actual one. +// +// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500 +// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500 +// ... +// -> +// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500 +// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500 +// ... +func replaceTempFilename(diff []byte, filename string) ([]byte, error) { + bs := bytes.SplitN(diff, []byte{'\n'}, 3) + if len(bs) < 3 { + return nil, fmt.Errorf("got unexpected diff for %s", filename) + } + // Preserve timestamps. + var t0, t1 []byte + if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 { + t0 = bs[0][i:] + } + if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 { + t1 = bs[1][i:] + } + // Always print filepath with slash separator. + f := filepath.ToSlash(filename) + bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) + bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) + return bytes.Join(bs, []byte{'\n'}), nil +} + +const chmodSupported = runtime.GOOS != "windows" + +// backupFile writes data to a new file named filename with permissions perm, +// with 0 && isSpace(src[i-1]) { + i-- + } + return append(res, src[i:]...), nil +} + +// isSpace reports whether the byte is a space character. +// isSpace defines a space as being among the following bytes: ' ', '\t', '\n' and '\r'. +func isSpace(b byte) bool { + return b == ' ' || b == '\t' || b == '\n' || b == '\r' +} diff --git a/vendor/github.com/golangci/gofmt/gofmt/rewrite.go b/vendor/github.com/golangci/gofmt/gofmt/rewrite.go new file mode 100644 index 000000000..73741e0a9 --- /dev/null +++ b/vendor/github.com/golangci/gofmt/gofmt/rewrite.go @@ -0,0 +1,303 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gofmt + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "os" + "reflect" + "strings" + "unicode" + "unicode/utf8" +) + +func initRewrite() { + if *rewriteRule == "" { + rewrite = nil // disable any previous rewrite + return + } + f := strings.Split(*rewriteRule, "->") + if len(f) != 2 { + fmt.Fprintf(os.Stderr, "rewrite rule must be of the form 'pattern -> replacement'\n") + os.Exit(2) + } + pattern := parseExpr(f[0], "pattern") + replace := parseExpr(f[1], "replacement") + rewrite = func(p *ast.File) *ast.File { return rewriteFile(pattern, replace, p) } +} + +// parseExpr parses s as an expression. +// It might make sense to expand this to allow statement patterns, +// but there are problems with preserving formatting and also +// with what a wildcard for a statement looks like. +func parseExpr(s, what string) ast.Expr { + x, err := parser.ParseExpr(s) + if err != nil { + fmt.Fprintf(os.Stderr, "parsing %s %s at %s\n", what, s, err) + os.Exit(2) + } + return x +} + +// Keep this function for debugging. +/* +func dump(msg string, val reflect.Value) { + fmt.Printf("%s:\n", msg) + ast.Print(fileSet, val.Interface()) + fmt.Println() +} +*/ + +// rewriteFile applies the rewrite rule 'pattern -> replace' to an entire file. +func rewriteFile(pattern, replace ast.Expr, p *ast.File) *ast.File { + cmap := ast.NewCommentMap(fileSet, p, p.Comments) + m := make(map[string]reflect.Value) + pat := reflect.ValueOf(pattern) + repl := reflect.ValueOf(replace) + + var rewriteVal func(val reflect.Value) reflect.Value + rewriteVal = func(val reflect.Value) reflect.Value { + // don't bother if val is invalid to start with + if !val.IsValid() { + return reflect.Value{} + } + val = apply(rewriteVal, val) + for k := range m { + delete(m, k) + } + if match(m, pat, val) { + val = subst(m, repl, reflect.ValueOf(val.Interface().(ast.Node).Pos())) + } + return val + } + + r := apply(rewriteVal, reflect.ValueOf(p)).Interface().(*ast.File) + r.Comments = cmap.Filter(r).Comments() // recreate comments list + return r +} + +// set is a wrapper for x.Set(y); it protects the caller from panics if x cannot be changed to y. +func set(x, y reflect.Value) { + // don't bother if x cannot be set or y is invalid + if !x.CanSet() || !y.IsValid() { + return + } + defer func() { + if x := recover(); x != nil { + if s, ok := x.(string); ok && + (strings.Contains(s, "type mismatch") || strings.Contains(s, "not assignable")) { + // x cannot be set to y - ignore this rewrite + return + } + panic(x) + } + }() + x.Set(y) +} + +// Values/types for special cases. +var ( + objectPtrNil = reflect.ValueOf((*ast.Object)(nil)) + scopePtrNil = reflect.ValueOf((*ast.Scope)(nil)) + + identType = reflect.TypeOf((*ast.Ident)(nil)) + objectPtrType = reflect.TypeOf((*ast.Object)(nil)) + positionType = reflect.TypeOf(token.NoPos) + callExprType = reflect.TypeOf((*ast.CallExpr)(nil)) + scopePtrType = reflect.TypeOf((*ast.Scope)(nil)) +) + +// apply replaces each AST field x in val with f(x), returning val. +// To avoid extra conversions, f operates on the reflect.Value form. +func apply(f func(reflect.Value) reflect.Value, val reflect.Value) reflect.Value { + if !val.IsValid() { + return reflect.Value{} + } + + // *ast.Objects introduce cycles and are likely incorrect after + // rewrite; don't follow them but replace with nil instead + if val.Type() == objectPtrType { + return objectPtrNil + } + + // similarly for scopes: they are likely incorrect after a rewrite; + // replace them with nil + if val.Type() == scopePtrType { + return scopePtrNil + } + + switch v := reflect.Indirect(val); v.Kind() { + case reflect.Slice: + for i := 0; i < v.Len(); i++ { + e := v.Index(i) + set(e, f(e)) + } + case reflect.Struct: + for i := 0; i < v.NumField(); i++ { + e := v.Field(i) + set(e, f(e)) + } + case reflect.Interface: + e := v.Elem() + set(v, f(e)) + } + return val +} + +func isWildcard(s string) bool { + rune, size := utf8.DecodeRuneInString(s) + return size == len(s) && unicode.IsLower(rune) +} + +// match reports whether pattern matches val, +// recording wildcard submatches in m. +// If m == nil, match checks whether pattern == val. +func match(m map[string]reflect.Value, pattern, val reflect.Value) bool { + // Wildcard matches any expression. If it appears multiple + // times in the pattern, it must match the same expression + // each time. + if m != nil && pattern.IsValid() && pattern.Type() == identType { + name := pattern.Interface().(*ast.Ident).Name + if isWildcard(name) && val.IsValid() { + // wildcards only match valid (non-nil) expressions. + if _, ok := val.Interface().(ast.Expr); ok && !val.IsNil() { + if old, ok := m[name]; ok { + return match(nil, old, val) + } + m[name] = val + return true + } + } + } + + // Otherwise, pattern and val must match recursively. + if !pattern.IsValid() || !val.IsValid() { + return !pattern.IsValid() && !val.IsValid() + } + if pattern.Type() != val.Type() { + return false + } + + // Special cases. + switch pattern.Type() { + case identType: + // For identifiers, only the names need to match + // (and none of the other *ast.Object information). + // This is a common case, handle it all here instead + // of recursing down any further via reflection. + p := pattern.Interface().(*ast.Ident) + v := val.Interface().(*ast.Ident) + return p == nil && v == nil || p != nil && v != nil && p.Name == v.Name + case objectPtrType, positionType: + // object pointers and token positions always match + return true + case callExprType: + // For calls, the Ellipsis fields (token.Position) must + // match since that is how f(x) and f(x...) are different. + // Check them here but fall through for the remaining fields. + p := pattern.Interface().(*ast.CallExpr) + v := val.Interface().(*ast.CallExpr) + if p.Ellipsis.IsValid() != v.Ellipsis.IsValid() { + return false + } + } + + p := reflect.Indirect(pattern) + v := reflect.Indirect(val) + if !p.IsValid() || !v.IsValid() { + return !p.IsValid() && !v.IsValid() + } + + switch p.Kind() { + case reflect.Slice: + if p.Len() != v.Len() { + return false + } + for i := 0; i < p.Len(); i++ { + if !match(m, p.Index(i), v.Index(i)) { + return false + } + } + return true + + case reflect.Struct: + for i := 0; i < p.NumField(); i++ { + if !match(m, p.Field(i), v.Field(i)) { + return false + } + } + return true + + case reflect.Interface: + return match(m, p.Elem(), v.Elem()) + } + + // Handle token integers, etc. + return p.Interface() == v.Interface() +} + +// subst returns a copy of pattern with values from m substituted in place +// of wildcards and pos used as the position of tokens from the pattern. +// if m == nil, subst returns a copy of pattern and doesn't change the line +// number information. +func subst(m map[string]reflect.Value, pattern reflect.Value, pos reflect.Value) reflect.Value { + if !pattern.IsValid() { + return reflect.Value{} + } + + // Wildcard gets replaced with map value. + if m != nil && pattern.Type() == identType { + name := pattern.Interface().(*ast.Ident).Name + if isWildcard(name) { + if old, ok := m[name]; ok { + return subst(nil, old, reflect.Value{}) + } + } + } + + if pos.IsValid() && pattern.Type() == positionType { + // use new position only if old position was valid in the first place + if old := pattern.Interface().(token.Pos); !old.IsValid() { + return pattern + } + return pos + } + + // Otherwise copy. + switch p := pattern; p.Kind() { + case reflect.Slice: + v := reflect.MakeSlice(p.Type(), p.Len(), p.Len()) + for i := 0; i < p.Len(); i++ { + v.Index(i).Set(subst(m, p.Index(i), pos)) + } + return v + + case reflect.Struct: + v := reflect.New(p.Type()).Elem() + for i := 0; i < p.NumField(); i++ { + v.Field(i).Set(subst(m, p.Field(i), pos)) + } + return v + + case reflect.Ptr: + v := reflect.New(p.Type()).Elem() + if elem := p.Elem(); elem.IsValid() { + v.Set(subst(m, elem, pos).Addr()) + } + return v + + case reflect.Interface: + v := reflect.New(p.Type()).Elem() + if elem := p.Elem(); elem.IsValid() { + v.Set(subst(m, elem, pos)) + } + return v + } + + return pattern +} diff --git a/vendor/github.com/golangci/gofmt/gofmt/simplify.go b/vendor/github.com/golangci/gofmt/gofmt/simplify.go new file mode 100644 index 000000000..2c75495a6 --- /dev/null +++ b/vendor/github.com/golangci/gofmt/gofmt/simplify.go @@ -0,0 +1,165 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gofmt + +import ( + "go/ast" + "go/token" + "reflect" +) + +type simplifier struct{} + +func (s simplifier) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.CompositeLit: + // array, slice, and map composite literals may be simplified + outer := n + var keyType, eltType ast.Expr + switch typ := outer.Type.(type) { + case *ast.ArrayType: + eltType = typ.Elt + case *ast.MapType: + keyType = typ.Key + eltType = typ.Value + } + + if eltType != nil { + var ktyp reflect.Value + if keyType != nil { + ktyp = reflect.ValueOf(keyType) + } + typ := reflect.ValueOf(eltType) + for i, x := range outer.Elts { + px := &outer.Elts[i] + // look at value of indexed/named elements + if t, ok := x.(*ast.KeyValueExpr); ok { + if keyType != nil { + s.simplifyLiteral(ktyp, keyType, t.Key, &t.Key) + } + x = t.Value + px = &t.Value + } + s.simplifyLiteral(typ, eltType, x, px) + } + // node was simplified - stop walk (there are no subnodes to simplify) + return nil + } + + case *ast.SliceExpr: + // a slice expression of the form: s[a:len(s)] + // can be simplified to: s[a:] + // if s is "simple enough" (for now we only accept identifiers) + // + // Note: This may not be correct because len may have been redeclared in another + // file belonging to the same package. However, this is extremely unlikely + // and so far (April 2016, after years of supporting this rewrite feature) + // has never come up, so let's keep it working as is (see also #15153). + if n.Max != nil { + // - 3-index slices always require the 2nd and 3rd index + break + } + if s, _ := n.X.(*ast.Ident); s != nil && s.Obj != nil { + // the array/slice object is a single, resolved identifier + if call, _ := n.High.(*ast.CallExpr); call != nil && len(call.Args) == 1 && !call.Ellipsis.IsValid() { + // the high expression is a function call with a single argument + if fun, _ := call.Fun.(*ast.Ident); fun != nil && fun.Name == "len" && fun.Obj == nil { + // the function called is "len" and it is not locally defined; and + // because we don't have dot imports, it must be the predefined len() + if arg, _ := call.Args[0].(*ast.Ident); arg != nil && arg.Obj == s.Obj { + // the len argument is the array/slice object + n.High = nil + } + } + } + } + // Note: We could also simplify slice expressions of the form s[0:b] to s[:b] + // but we leave them as is since sometimes we want to be very explicit + // about the lower bound. + // An example where the 0 helps: + // x, y, z := b[0:2], b[2:4], b[4:6] + // An example where it does not: + // x, y := b[:n], b[n:] + + case *ast.RangeStmt: + // - a range of the form: for x, _ = range v {...} + // can be simplified to: for x = range v {...} + // - a range of the form: for _ = range v {...} + // can be simplified to: for range v {...} + if isBlank(n.Value) { + n.Value = nil + } + if isBlank(n.Key) && n.Value == nil { + n.Key = nil + } + } + + return s +} + +func (s simplifier) simplifyLiteral(typ reflect.Value, astType, x ast.Expr, px *ast.Expr) { + ast.Walk(s, x) // simplify x + + // if the element is a composite literal and its literal type + // matches the outer literal's element type exactly, the inner + // literal type may be omitted + if inner, ok := x.(*ast.CompositeLit); ok { + if match(nil, typ, reflect.ValueOf(inner.Type)) { + inner.Type = nil + } + } + // if the outer literal's element type is a pointer type *T + // and the element is & of a composite literal of type T, + // the inner &T may be omitted. + if ptr, ok := astType.(*ast.StarExpr); ok { + if addr, ok := x.(*ast.UnaryExpr); ok && addr.Op == token.AND { + if inner, ok := addr.X.(*ast.CompositeLit); ok { + if match(nil, reflect.ValueOf(ptr.X), reflect.ValueOf(inner.Type)) { + inner.Type = nil // drop T + *px = inner // drop & + } + } + } + } +} + +func isBlank(x ast.Expr) bool { + ident, ok := x.(*ast.Ident) + return ok && ident.Name == "_" +} + +func simplify(f *ast.File) { + // remove empty declarations such as "const ()", etc + removeEmptyDeclGroups(f) + + var s simplifier + ast.Walk(s, f) +} + +func removeEmptyDeclGroups(f *ast.File) { + i := 0 + for _, d := range f.Decls { + if g, ok := d.(*ast.GenDecl); !ok || !isEmpty(f, g) { + f.Decls[i] = d + i++ + } + } + f.Decls = f.Decls[:i] +} + +func isEmpty(f *ast.File, g *ast.GenDecl) bool { + if g.Doc != nil || g.Specs != nil { + return false + } + + for _, c := range f.Comments { + // if there is a comment in the declaration, it is not considered empty + if g.Pos() <= c.Pos() && c.End() <= g.End() { + return false + } + } + + return true +} diff --git a/vendor/github.com/golangci/gofmt/goimports/LICENSE b/vendor/github.com/golangci/gofmt/goimports/LICENSE new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/vendor/github.com/golangci/gofmt/goimports/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/gofmt/goimports/goimports.go b/vendor/github.com/golangci/gofmt/goimports/goimports.go new file mode 100644 index 000000000..8878b700d --- /dev/null +++ b/vendor/github.com/golangci/gofmt/goimports/goimports.go @@ -0,0 +1,358 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package goimports + +import ( + "bufio" + "bytes" + "errors" + "flag" + "fmt" + "go/scanner" + "io" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "runtime" + "runtime/pprof" + "strings" + + "golang.org/x/tools/imports" +) + +var ( + // main operation modes + list = flag.Bool("goimports.l", false, "list files whose formatting differs from goimport's") + write = flag.Bool("goimports.w", false, "write result to (source) file instead of stdout") + doDiff = flag.Bool("goimports.d", false, "display diffs instead of rewriting files") + srcdir = flag.String("goimports.srcdir", "", "choose imports as if source code is from `dir`. When operating on a single file, dir may instead be the complete file name.") + verbose bool // verbose logging + + cpuProfile = flag.String("goimports.cpuprofile", "", "CPU profile output") + memProfile = flag.String("goimports.memprofile", "", "memory profile output") + memProfileRate = flag.Int("goimports.memrate", 0, "if > 0, sets runtime.MemProfileRate") + + options = &imports.Options{ + TabWidth: 8, + TabIndent: true, + Comments: true, + Fragment: true, + } + exitCode = 0 +) + +func init() { + flag.BoolVar(&options.AllErrors, "goimports.e", false, "report all errors (not just the first 10 on different lines)") + flag.StringVar(&imports.LocalPrefix, "goimports.local", "", "put imports beginning with this string after 3rd-party packages; comma-separated list") +} + +func report(err error) { + scanner.PrintError(os.Stderr, err) + exitCode = 2 +} + +func usage() { + fmt.Fprintf(os.Stderr, "usage: goimports [flags] [path ...]\n") + flag.PrintDefaults() + os.Exit(2) +} + +func isGoFile(f os.FileInfo) bool { + // ignore non-Go files + name := f.Name() + return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") +} + +// argumentType is which mode goimports was invoked as. +type argumentType int + +const ( + // fromStdin means the user is piping their source into goimports. + fromStdin argumentType = iota + + // singleArg is the common case from editors, when goimports is run on + // a single file. + singleArg + + // multipleArg is when the user ran "goimports file1.go file2.go" + // or ran goimports on a directory tree. + multipleArg +) + +func processFile(filename string, in io.Reader, out io.Writer, argType argumentType) error { + opt := options + if argType == fromStdin { + nopt := *options + nopt.Fragment = true + opt = &nopt + } + + if in == nil { + f, err := os.Open(filename) + if err != nil { + return err + } + defer f.Close() + in = f + } + + src, err := ioutil.ReadAll(in) + if err != nil { + return err + } + + target := filename + if *srcdir != "" { + // Determine whether the provided -srcdirc is a directory or file + // and then use it to override the target. + // + // See https://github.com/dominikh/go-mode.el/issues/146 + if isFile(*srcdir) { + if argType == multipleArg { + return errors.New("-srcdir value can't be a file when passing multiple arguments or when walking directories") + } + target = *srcdir + } else if argType == singleArg && strings.HasSuffix(*srcdir, ".go") && !isDir(*srcdir) { + // For a file which doesn't exist on disk yet, but might shortly. + // e.g. user in editor opens $DIR/newfile.go and newfile.go doesn't yet exist on disk. + // The goimports on-save hook writes the buffer to a temp file + // first and runs goimports before the actual save to newfile.go. + // The editor's buffer is named "newfile.go" so that is passed to goimports as: + // goimports -srcdir=/gopath/src/pkg/newfile.go /tmp/gofmtXXXXXXXX.go + // and then the editor reloads the result from the tmp file and writes + // it to newfile.go. + target = *srcdir + } else { + // Pretend that file is from *srcdir in order to decide + // visible imports correctly. + target = filepath.Join(*srcdir, filepath.Base(filename)) + } + } + + res, err := imports.Process(target, src, opt) + if err != nil { + return err + } + + if !bytes.Equal(src, res) { + // formatting has changed + if *list { + fmt.Fprintln(out, filename) + } + if *write { + if argType == fromStdin { + // filename is "" + return errors.New("can't use -w on stdin") + } + err = ioutil.WriteFile(filename, res, 0) + if err != nil { + return err + } + } + if *doDiff { + if argType == fromStdin { + filename = "stdin.go" // because .orig looks silly + } + data, err := diff(src, res, filename) + if err != nil { + return fmt.Errorf("computing diff: %s", err) + } + fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename)) + out.Write(data) + } + } + + if !*list && !*write && !*doDiff { + _, err = out.Write(res) + } + + return err +} + +func visitFile(path string, f os.FileInfo, err error) error { + if err == nil && isGoFile(f) { + err = processFile(path, nil, os.Stdout, multipleArg) + } + if err != nil { + report(err) + } + return nil +} + +func walkDir(path string) { + filepath.Walk(path, visitFile) +} + +// parseFlags parses command line flags and returns the paths to process. +// It's a var so that custom implementations can replace it in other files. +var parseFlags = func() []string { + flag.BoolVar(&verbose, "v", false, "verbose logging") + + flag.Parse() + return flag.Args() +} + +func bufferedFileWriter(dest string) (w io.Writer, close func()) { + f, err := os.Create(dest) + if err != nil { + log.Fatal(err) + } + bw := bufio.NewWriter(f) + return bw, func() { + if err := bw.Flush(); err != nil { + log.Fatalf("error flushing %v: %v", dest, err) + } + if err := f.Close(); err != nil { + log.Fatal(err) + } + } +} + +func gofmtMain() { + flag.Usage = usage + paths := parseFlags() + + if *cpuProfile != "" { + bw, flush := bufferedFileWriter(*cpuProfile) + pprof.StartCPUProfile(bw) + defer flush() + defer pprof.StopCPUProfile() + } + // doTrace is a conditionally compiled wrapper around runtime/trace. It is + // used to allow goimports to compile under gccgo, which does not support + // runtime/trace. See https://golang.org/issue/15544. + if *memProfileRate > 0 { + runtime.MemProfileRate = *memProfileRate + bw, flush := bufferedFileWriter(*memProfile) + defer func() { + runtime.GC() // materialize all statistics + if err := pprof.WriteHeapProfile(bw); err != nil { + log.Fatal(err) + } + flush() + }() + } + + if verbose { + log.SetFlags(log.LstdFlags | log.Lmicroseconds) + imports.Debug = true + } + if options.TabWidth < 0 { + fmt.Fprintf(os.Stderr, "negative tabwidth %d\n", options.TabWidth) + exitCode = 2 + return + } + + if len(paths) == 0 { + if err := processFile("", os.Stdin, os.Stdout, fromStdin); err != nil { + report(err) + } + return + } + + argType := singleArg + if len(paths) > 1 { + argType = multipleArg + } + + for _, path := range paths { + switch dir, err := os.Stat(path); { + case err != nil: + report(err) + case dir.IsDir(): + walkDir(path) + default: + if err := processFile(path, nil, os.Stdout, argType); err != nil { + report(err) + } + } + } +} + +func writeTempFile(dir, prefix string, data []byte) (string, error) { + file, err := ioutil.TempFile(dir, prefix) + if err != nil { + return "", err + } + _, err = file.Write(data) + if err1 := file.Close(); err == nil { + err = err1 + } + if err != nil { + os.Remove(file.Name()) + return "", err + } + return file.Name(), nil +} + +func diff(b1, b2 []byte, filename string) (data []byte, err error) { + f1, err := writeTempFile("", "gofmt", b1) + if err != nil { + return + } + defer os.Remove(f1) + + f2, err := writeTempFile("", "gofmt", b2) + if err != nil { + return + } + defer os.Remove(f2) + + cmd := "diff" + if runtime.GOOS == "plan9" { + cmd = "/bin/ape/diff" + } + + data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput() + if len(data) > 0 { + // diff exits with a non-zero status when the files don't match. + // Ignore that failure as long as we get output. + return replaceTempFilename(data, filename) + } + return +} + +// replaceTempFilename replaces temporary filenames in diff with actual one. +// +// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500 +// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500 +// ... +// -> +// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500 +// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500 +// ... +func replaceTempFilename(diff []byte, filename string) ([]byte, error) { + bs := bytes.SplitN(diff, []byte{'\n'}, 3) + if len(bs) < 3 { + return nil, fmt.Errorf("got unexpected diff for %s", filename) + } + // Preserve timestamps. + var t0, t1 []byte + if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 { + t0 = bs[0][i:] + } + if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 { + t1 = bs[1][i:] + } + // Always print filepath with slash separator. + f := filepath.ToSlash(filename) + bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) + bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) + return bytes.Join(bs, []byte{'\n'}), nil +} + +// isFile reports whether name is a file. +func isFile(name string) bool { + fi, err := os.Stat(name) + return err == nil && fi.Mode().IsRegular() +} + +// isDir reports whether name is a directory. +func isDir(name string) bool { + fi, err := os.Stat(name) + return err == nil && fi.IsDir() +} diff --git a/vendor/github.com/golangci/gofmt/goimports/golangci.go b/vendor/github.com/golangci/gofmt/goimports/golangci.go new file mode 100644 index 000000000..e9d013d58 --- /dev/null +++ b/vendor/github.com/golangci/gofmt/goimports/golangci.go @@ -0,0 +1,33 @@ +package goimports + +import ( + "bytes" + "fmt" + "io/ioutil" + + "golang.org/x/tools/imports" +) + +func Run(filename string) ([]byte, error) { + src, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + + res, err := imports.Process(filename, src, options) + if err != nil { + return nil, err + } + + if bytes.Equal(src, res) { + return nil, nil + } + + // formatting has changed + data, err := diff(src, res, filename) + if err != nil { + return nil, fmt.Errorf("error computing diff: %s", err) + } + + return data, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/LICENSE b/vendor/github.com/golangci/golangci-lint/LICENSE new file mode 100644 index 000000000..e72bfddab --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. \ No newline at end of file diff --git a/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go new file mode 100644 index 000000000..282d794b8 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go @@ -0,0 +1,25 @@ +package main + +import ( + "fmt" + "os" + + "github.com/golangci/golangci-lint/pkg/commands" + "github.com/golangci/golangci-lint/pkg/exitcodes" +) + +var ( + // Populated by goreleaser during build + version = "master" + commit = "?" + date = "" +) + +func main() { + e := commands.NewExecutor(version, commit, date) + + if err := e.Execute(); err != nil { + fmt.Fprintf(os.Stderr, "failed executing command with error %v\n", err) + os.Exit(exitcodes.Failure) + } +} diff --git a/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/mod_version.go b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/mod_version.go new file mode 100644 index 000000000..119a8a60d --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/mod_version.go @@ -0,0 +1,17 @@ +package main + +import ( + "fmt" + "runtime/debug" +) + +//nolint:gochecknoinits +func init() { + if info, available := debug.ReadBuildInfo(); available { + if date == "" { + version = info.Main.Version + commit = fmt.Sprintf("(unknown, mod sum: %q)", info.Main.Sum) + date = "(unknown)" + } + } +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go new file mode 100644 index 000000000..51c75a77d --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go @@ -0,0 +1,527 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cache implements a build artifact cache. +// +// This package is a slightly modified fork of Go's +// cmd/go/internal/cache package. +package cache + +import ( + "bytes" + "crypto/sha256" + "encoding/hex" + "fmt" + "io" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/golangci/golangci-lint/internal/renameio" + "github.com/golangci/golangci-lint/internal/robustio" +) + +// An ActionID is a cache action key, the hash of a complete description of a +// repeatable computation (command line, environment variables, +// input file contents, executable contents). +type ActionID [HashSize]byte + +// An OutputID is a cache output key, the hash of an output of a computation. +type OutputID [HashSize]byte + +// A Cache is a package cache, backed by a file system directory tree. +type Cache struct { + dir string + now func() time.Time +} + +// Open opens and returns the cache in the given directory. +// +// It is safe for multiple processes on a single machine to use the +// same cache directory in a local file system simultaneously. +// They will coordinate using operating system file locks and may +// duplicate effort but will not corrupt the cache. +// +// However, it is NOT safe for multiple processes on different machines +// to share a cache directory (for example, if the directory were stored +// in a network file system). File locking is notoriously unreliable in +// network file systems and may not suffice to protect the cache. +// +func Open(dir string) (*Cache, error) { + info, err := os.Stat(dir) + if err != nil { + return nil, err + } + if !info.IsDir() { + return nil, &os.PathError{Op: "open", Path: dir, Err: fmt.Errorf("not a directory")} + } + for i := 0; i < 256; i++ { + name := filepath.Join(dir, fmt.Sprintf("%02x", i)) + if err := os.MkdirAll(name, 0744); err != nil { + return nil, err + } + } + c := &Cache{ + dir: dir, + now: time.Now, + } + return c, nil +} + +// fileName returns the name of the file corresponding to the given id. +func (c *Cache) fileName(id [HashSize]byte, key string) string { + return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key) +} + +var errMissing = errors.New("cache entry not found") + +func IsErrMissing(err error) bool { + return errors.Cause(err) == errMissing +} + +const ( + // action entry file is "v1 \n" + hexSize = HashSize * 2 + entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1 +) + +// verify controls whether to run the cache in verify mode. +// In verify mode, the cache always returns errMissing from Get +// but then double-checks in Put that the data being written +// exactly matches any existing entry. This provides an easy +// way to detect program behavior that would have been different +// had the cache entry been returned from Get. +// +// verify is enabled by setting the environment variable +// GODEBUG=gocacheverify=1. +var verify = false + +// DebugTest is set when GODEBUG=gocachetest=1 is in the environment. +var DebugTest = false + +func init() { initEnv() } + +func initEnv() { + verify = false + debugHash = false + debug := strings.Split(os.Getenv("GODEBUG"), ",") + for _, f := range debug { + if f == "gocacheverify=1" { + verify = true + } + if f == "gocachehash=1" { + debugHash = true + } + if f == "gocachetest=1" { + DebugTest = true + } + } +} + +// Get looks up the action ID in the cache, +// returning the corresponding output ID and file size, if any. +// Note that finding an output ID does not guarantee that the +// saved file for that output ID is still available. +func (c *Cache) Get(id ActionID) (Entry, error) { + if verify { + return Entry{}, errMissing + } + return c.get(id) +} + +type Entry struct { + OutputID OutputID + Size int64 + Time time.Time +} + +// get is Get but does not respect verify mode, so that Put can use it. +func (c *Cache) get(id ActionID) (Entry, error) { + missing := func() (Entry, error) { + return Entry{}, errMissing + } + failed := func(err error) (Entry, error) { + return Entry{}, err + } + fileName := c.fileName(id, "a") + f, err := os.Open(fileName) + if err != nil { + if os.IsNotExist(err) { + return missing() + } + return failed(err) + } + defer f.Close() + entry := make([]byte, entrySize+1) // +1 to detect whether f is too long + if n, readErr := io.ReadFull(f, entry); n != entrySize || readErr != io.ErrUnexpectedEOF { + return failed(fmt.Errorf("read %d/%d bytes from %s with error %s", n, entrySize, fileName, readErr)) + } + if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' { + return failed(fmt.Errorf("bad data in %s", fileName)) + } + eid, entry := entry[3:3+hexSize], entry[3+hexSize:] + eout, entry := entry[1:1+hexSize], entry[1+hexSize:] + esize, entry := entry[1:1+20], entry[1+20:] + etime := entry[1 : 1+20] + var buf [HashSize]byte + if _, err = hex.Decode(buf[:], eid); err != nil || buf != id { + return failed(errors.Wrapf(err, "failed to hex decode eid data in %s", fileName)) + } + if _, err = hex.Decode(buf[:], eout); err != nil { + return failed(errors.Wrapf(err, "failed to hex decode eout data in %s", fileName)) + } + i := 0 + for i < len(esize) && esize[i] == ' ' { + i++ + } + size, err := strconv.ParseInt(string(esize[i:]), 10, 64) + if err != nil || size < 0 { + return failed(fmt.Errorf("failed to parse esize int from %s with error %s", fileName, err)) + } + i = 0 + for i < len(etime) && etime[i] == ' ' { + i++ + } + tm, err := strconv.ParseInt(string(etime[i:]), 10, 64) + if err != nil || tm < 0 { + return failed(fmt.Errorf("failed to parse etime int from %s with error %s", fileName, err)) + } + + if err = c.used(fileName); err != nil { + return failed(errors.Wrapf(err, "failed to mark %s as used", fileName)) + } + + return Entry{buf, size, time.Unix(0, tm)}, nil +} + +// GetBytes looks up the action ID in the cache and returns +// the corresponding output bytes. +// GetBytes should only be used for data that can be expected to fit in memory. +func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) { + entry, err := c.Get(id) + if err != nil { + return nil, entry, err + } + outputFile, err := c.OutputFile(entry.OutputID) + if err != nil { + return nil, entry, err + } + + data, err := robustio.ReadFile(outputFile) + if err != nil { + return nil, entry, err + } + + if sha256.Sum256(data) != entry.OutputID { + return nil, entry, errMissing + } + return data, entry, nil +} + +// OutputFile returns the name of the cache file storing output with the given OutputID. +func (c *Cache) OutputFile(out OutputID) (string, error) { + file := c.fileName(out, "d") + if err := c.used(file); err != nil { + return "", err + } + return file, nil +} + +// Time constants for cache expiration. +// +// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour), +// to avoid causing many unnecessary inode updates. The mtimes therefore +// roughly reflect "time of last use" but may in fact be older by at most an hour. +// +// We scan the cache for entries to delete at most once per trimInterval (1 day). +// +// When we do scan the cache, we delete entries that have not been used for +// at least trimLimit (5 days). Statistics gathered from a month of usage by +// Go developers found that essentially all reuse of cached entries happened +// within 5 days of the previous reuse. See golang.org/issue/22990. +const ( + mtimeInterval = 1 * time.Hour + trimInterval = 24 * time.Hour + trimLimit = 5 * 24 * time.Hour +) + +// used makes a best-effort attempt to update mtime on file, +// so that mtime reflects cache access time. +// +// Because the reflection only needs to be approximate, +// and to reduce the amount of disk activity caused by using +// cache entries, used only updates the mtime if the current +// mtime is more than an hour old. This heuristic eliminates +// nearly all of the mtime updates that would otherwise happen, +// while still keeping the mtimes useful for cache trimming. +func (c *Cache) used(file string) error { + info, err := os.Stat(file) + if err != nil { + if os.IsNotExist(err) { + return errMissing + } + return errors.Wrapf(err, "failed to stat file %s", file) + } + + if c.now().Sub(info.ModTime()) < mtimeInterval { + return nil + } + + if err := os.Chtimes(file, c.now(), c.now()); err != nil { + return errors.Wrapf(err, "failed to change time of file %s", file) + } + + return nil +} + +// Trim removes old cache entries that are likely not to be reused. +func (c *Cache) Trim() { + now := c.now() + + // We maintain in dir/trim.txt the time of the last completed cache trim. + // If the cache has been trimmed recently enough, do nothing. + // This is the common case. + data, _ := renameio.ReadFile(filepath.Join(c.dir, "trim.txt")) + t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64) + if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval { + return + } + + // Trim each of the 256 subdirectories. + // We subtract an additional mtimeInterval + // to account for the imprecision of our "last used" mtimes. + cutoff := now.Add(-trimLimit - mtimeInterval) + for i := 0; i < 256; i++ { + subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i)) + c.trimSubdir(subdir, cutoff) + } + + // Ignore errors from here: if we don't write the complete timestamp, the + // cache will appear older than it is, and we'll trim it again next time. + _ = renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())), 0666) +} + +// trimSubdir trims a single cache subdirectory. +func (c *Cache) trimSubdir(subdir string, cutoff time.Time) { + // Read all directory entries from subdir before removing + // any files, in case removing files invalidates the file offset + // in the directory scan. Also, ignore error from f.Readdirnames, + // because we don't care about reporting the error and we still + // want to process any entries found before the error. + f, err := os.Open(subdir) + if err != nil { + return + } + names, _ := f.Readdirnames(-1) + f.Close() + + for _, name := range names { + // Remove only cache entries (xxxx-a and xxxx-d). + if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") { + continue + } + entry := filepath.Join(subdir, name) + info, err := os.Stat(entry) + if err == nil && info.ModTime().Before(cutoff) { + os.Remove(entry) + } + } +} + +// putIndexEntry adds an entry to the cache recording that executing the action +// with the given id produces an output with the given output id (hash) and size. +func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error { + // Note: We expect that for one reason or another it may happen + // that repeating an action produces a different output hash + // (for example, if the output contains a time stamp or temp dir name). + // While not ideal, this is also not a correctness problem, so we + // don't make a big deal about it. In particular, we leave the action + // cache entries writable specifically so that they can be overwritten. + // + // Setting GODEBUG=gocacheverify=1 does make a big deal: + // in verify mode we are double-checking that the cache entries + // are entirely reproducible. As just noted, this may be unrealistic + // in some cases but the check is also useful for shaking out real bugs. + entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano()) + + if verify && allowVerify { + old, err := c.get(id) + if err == nil && (old.OutputID != out || old.Size != size) { + // panic to show stack trace, so we can see what code is generating this cache entry. + msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size) + panic(msg) + } + } + file := c.fileName(id, "a") + + // Copy file to cache directory. + mode := os.O_WRONLY | os.O_CREATE + f, err := os.OpenFile(file, mode, 0666) + if err != nil { + return err + } + _, err = f.WriteString(entry) + if err == nil { + // Truncate the file only *after* writing it. + // (This should be a no-op, but truncate just in case of previous corruption.) + // + // This differs from ioutil.WriteFile, which truncates to 0 *before* writing + // via os.O_TRUNC. Truncating only after writing ensures that a second write + // of the same content to the same file is idempotent, and does not — even + // temporarily! — undo the effect of the first write. + err = f.Truncate(int64(len(entry))) + } + if closeErr := f.Close(); err == nil { + err = closeErr + } + if err != nil { + // TODO(bcmills): This Remove potentially races with another go command writing to file. + // Can we eliminate it? + os.Remove(file) + return err + } + if err = os.Chtimes(file, c.now(), c.now()); err != nil { // mainly for tests + return errors.Wrapf(err, "failed to change time of file %s", file) + } + + return nil +} + +// Put stores the given output in the cache as the output for the action ID. +// It may read file twice. The content of file must not change between the two passes. +func (c *Cache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { + return c.put(id, file, true) +} + +// PutNoVerify is like Put but disables the verify check +// when GODEBUG=goverifycache=1 is set. +// It is meant for data that is OK to cache but that we expect to vary slightly from run to run, +// like test output containing times and the like. +func (c *Cache) PutNoVerify(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { + return c.put(id, file, false) +} + +func (c *Cache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) { + // Compute output ID. + h := sha256.New() + if _, err := file.Seek(0, 0); err != nil { + return OutputID{}, 0, err + } + size, err := io.Copy(h, file) + if err != nil { + return OutputID{}, 0, err + } + var out OutputID + h.Sum(out[:0]) + + // Copy to cached output file (if not already present). + if err := c.copyFile(file, out, size); err != nil { + return out, size, err + } + + // Add to cache index. + return out, size, c.putIndexEntry(id, out, size, allowVerify) +} + +// PutBytes stores the given bytes in the cache as the output for the action ID. +func (c *Cache) PutBytes(id ActionID, data []byte) error { + _, _, err := c.Put(id, bytes.NewReader(data)) + return err +} + +// copyFile copies file into the cache, expecting it to have the given +// output ID and size, if that file is not present already. +func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error { + name := c.fileName(out, "d") + info, err := os.Stat(name) + if err == nil && info.Size() == size { + // Check hash. + if f, openErr := os.Open(name); openErr == nil { + h := sha256.New() + if _, copyErr := io.Copy(h, f); copyErr != nil { + return errors.Wrap(copyErr, "failed to copy to sha256") + } + + f.Close() + var out2 OutputID + h.Sum(out2[:0]) + if out == out2 { + return nil + } + } + // Hash did not match. Fall through and rewrite file. + } + + // Copy file to cache directory. + mode := os.O_RDWR | os.O_CREATE + if err == nil && info.Size() > size { // shouldn't happen but fix in case + mode |= os.O_TRUNC + } + f, err := os.OpenFile(name, mode, 0666) + if err != nil { + return err + } + defer f.Close() + if size == 0 { + // File now exists with correct size. + // Only one possible zero-length file, so contents are OK too. + // Early return here makes sure there's a "last byte" for code below. + return nil + } + + // From here on, if any of the I/O writing the file fails, + // we make a best-effort attempt to truncate the file f + // before returning, to avoid leaving bad bytes in the file. + + // Copy file to f, but also into h to double-check hash. + if _, err = file.Seek(0, 0); err != nil { + _ = f.Truncate(0) + return err + } + h := sha256.New() + w := io.MultiWriter(f, h) + if _, err = io.CopyN(w, file, size-1); err != nil { + _ = f.Truncate(0) + return err + } + // Check last byte before writing it; writing it will make the size match + // what other processes expect to find and might cause them to start + // using the file. + buf := make([]byte, 1) + if _, err = file.Read(buf); err != nil { + _ = f.Truncate(0) + return err + } + if n, wErr := h.Write(buf); n != len(buf) { + return fmt.Errorf("wrote to hash %d/%d bytes with error %s", n, len(buf), wErr) + } + + sum := h.Sum(nil) + if !bytes.Equal(sum, out[:]) { + _ = f.Truncate(0) + return fmt.Errorf("file content changed underfoot") + } + + // Commit cache file entry. + if _, err = f.Write(buf); err != nil { + _ = f.Truncate(0) + return err + } + if err = f.Close(); err != nil { + // Data might not have been written, + // but file may look like it is the right size. + // To be extra careful, remove cached file. + os.Remove(name) + return err + } + if err = os.Chtimes(name, c.now(), c.now()); err != nil { // mainly for tests + return errors.Wrapf(err, "failed to change time of file %s", name) + } + + return nil +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/default.go b/vendor/github.com/golangci/golangci-lint/internal/cache/default.go new file mode 100644 index 000000000..e8866cb30 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/cache/default.go @@ -0,0 +1,87 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "fmt" + "io/ioutil" + "log" + "os" + "path/filepath" + "sync" +) + +// Default returns the default cache to use. +func Default() (*Cache, error) { + defaultOnce.Do(initDefaultCache) + return defaultCache, defaultDirErr +} + +var ( + defaultOnce sync.Once + defaultCache *Cache +) + +// cacheREADME is a message stored in a README in the cache directory. +// Because the cache lives outside the normal Go trees, we leave the +// README as a courtesy to explain where it came from. +const cacheREADME = `This directory holds cached build artifacts from golangci-lint. +` + +// initDefaultCache does the work of finding the default cache +// the first time Default is called. +func initDefaultCache() { + dir := DefaultDir() + if err := os.MkdirAll(dir, 0744); err != nil { + log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) + } + if _, err := os.Stat(filepath.Join(dir, "README")); err != nil { + // Best effort. + if wErr := ioutil.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666); wErr != nil { + log.Fatalf("Failed to write README file to cache dir %s: %s", dir, err) + } + } + + c, err := Open(dir) + if err != nil { + log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) + } + defaultCache = c +} + +var ( + defaultDirOnce sync.Once + defaultDir string + defaultDirErr error +) + +// DefaultDir returns the effective GOLANGCI_LINT_CACHE setting. +func DefaultDir() string { + // Save the result of the first call to DefaultDir for later use in + // initDefaultCache. cmd/go/main.go explicitly sets GOCACHE so that + // subprocesses will inherit it, but that means initDefaultCache can't + // otherwise distinguish between an explicit "off" and a UserCacheDir error. + + defaultDirOnce.Do(func() { + defaultDir = os.Getenv("GOLANGCI_LINT_CACHE") + if filepath.IsAbs(defaultDir) { + return + } + if defaultDir != "" { + defaultDirErr = fmt.Errorf("GOLANGCI_LINT_CACHE is not an absolute path") + return + } + + // Compute default location. + dir, err := os.UserCacheDir() + if err != nil { + defaultDirErr = fmt.Errorf("GOLANGCI_LINT_CACHE is not defined and %v", err) + return + } + defaultDir = filepath.Join(dir, "golangci-lint") + }) + + return defaultDir +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go b/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go new file mode 100644 index 000000000..4ce79e325 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go @@ -0,0 +1,186 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "bytes" + "crypto/sha256" + "fmt" + "hash" + "io" + "os" + "sync" +) + +var debugHash = false // set when GODEBUG=gocachehash=1 + +// HashSize is the number of bytes in a hash. +const HashSize = 32 + +// A Hash provides access to the canonical hash function used to index the cache. +// The current implementation uses salted SHA256, but clients must not assume this. +type Hash struct { + h hash.Hash + name string // for debugging + buf *bytes.Buffer // for verify +} + +// hashSalt is a salt string added to the beginning of every hash +// created by NewHash. Using the golangci-lint version makes sure that different +// versions of the command do not address the same cache +// entries, so that a bug in one version does not affect the execution +// of other versions. This salt will result in additional ActionID files +// in the cache, but not additional copies of the large output files, +// which are still addressed by unsalted SHA256. +var hashSalt []byte + +func SetSalt(b []byte) { + hashSalt = b +} + +// Subkey returns an action ID corresponding to mixing a parent +// action ID with a string description of the subkey. +func Subkey(parent ActionID, desc string) (ActionID, error) { + h := sha256.New() + const subkeyPrefix = "subkey:" + if n, err := h.Write([]byte(subkeyPrefix)); n != len(subkeyPrefix) { + return ActionID{}, fmt.Errorf("wrote %d/%d bytes of subkey prefix with error %s", n, len(subkeyPrefix), err) + } + if n, err := h.Write(parent[:]); n != len(parent) { + return ActionID{}, fmt.Errorf("wrote %d/%d bytes of parent with error %s", n, len(parent), err) + } + if n, err := h.Write([]byte(desc)); n != len(desc) { + return ActionID{}, fmt.Errorf("wrote %d/%d bytes of desc with error %s", n, len(desc), err) + } + + var out ActionID + h.Sum(out[:0]) + if debugHash { + fmt.Fprintf(os.Stderr, "HASH subkey %x %q = %x\n", parent, desc, out) + } + if verify { + hashDebug.Lock() + hashDebug.m[out] = fmt.Sprintf("subkey %x %q", parent, desc) + hashDebug.Unlock() + } + return out, nil +} + +// NewHash returns a new Hash. +// The caller is expected to Write data to it and then call Sum. +func NewHash(name string) (*Hash, error) { + h := &Hash{h: sha256.New(), name: name} + if debugHash { + fmt.Fprintf(os.Stderr, "HASH[%s]\n", h.name) + } + if n, err := h.Write(hashSalt); n != len(hashSalt) { + return nil, fmt.Errorf("wrote %d/%d bytes of hash salt with error %s", n, len(hashSalt), err) + } + if verify { + h.buf = new(bytes.Buffer) + } + return h, nil +} + +// Write writes data to the running hash. +func (h *Hash) Write(b []byte) (int, error) { + if debugHash { + fmt.Fprintf(os.Stderr, "HASH[%s]: %q\n", h.name, b) + } + if h.buf != nil { + h.buf.Write(b) + } + return h.h.Write(b) +} + +// Sum returns the hash of the data written previously. +func (h *Hash) Sum() [HashSize]byte { + var out [HashSize]byte + h.h.Sum(out[:0]) + if debugHash { + fmt.Fprintf(os.Stderr, "HASH[%s]: %x\n", h.name, out) + } + if h.buf != nil { + hashDebug.Lock() + if hashDebug.m == nil { + hashDebug.m = make(map[[HashSize]byte]string) + } + hashDebug.m[out] = h.buf.String() + hashDebug.Unlock() + } + return out +} + +// In GODEBUG=gocacheverify=1 mode, +// hashDebug holds the input to every computed hash ID, +// so that we can work backward from the ID involved in a +// cache entry mismatch to a description of what should be there. +var hashDebug struct { + sync.Mutex + m map[[HashSize]byte]string +} + +// reverseHash returns the input used to compute the hash id. +func reverseHash(id [HashSize]byte) string { + hashDebug.Lock() + s := hashDebug.m[id] + hashDebug.Unlock() + return s +} + +var hashFileCache struct { + sync.Mutex + m map[string][HashSize]byte +} + +// FileHash returns the hash of the named file. +// It caches repeated lookups for a given file, +// and the cache entry for a file can be initialized +// using SetFileHash. +// The hash used by FileHash is not the same as +// the hash used by NewHash. +func FileHash(file string) ([HashSize]byte, error) { + hashFileCache.Lock() + out, ok := hashFileCache.m[file] + hashFileCache.Unlock() + + if ok { + return out, nil + } + + h := sha256.New() + f, err := os.Open(file) + if err != nil { + if debugHash { + fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err) + } + return [HashSize]byte{}, err + } + _, err = io.Copy(h, f) + f.Close() + if err != nil { + if debugHash { + fmt.Fprintf(os.Stderr, "HASH %s: %v\n", file, err) + } + return [HashSize]byte{}, err + } + h.Sum(out[:0]) + if debugHash { + fmt.Fprintf(os.Stderr, "HASH %s: %x\n", file, out) + } + + SetFileHash(file, out) + return out, nil +} + +// SetFileHash sets the hash returned by FileHash for file. +func SetFileHash(file string, sum [HashSize]byte) { + hashFileCache.Lock() + if hashFileCache.m == nil { + hashFileCache.m = make(map[string][HashSize]byte) + } + hashFileCache.m[file] = sum + hashFileCache.Unlock() +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go b/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go new file mode 100644 index 000000000..5cb86d669 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/errorutil/errors.go @@ -0,0 +1,23 @@ +package errorutil + +import ( + "fmt" +) + +// PanicError can be used to not print stacktrace twice +type PanicError struct { + recovered interface{} + stack []byte +} + +func NewPanicError(recovered interface{}, stack []byte) *PanicError { + return &PanicError{recovered: recovered, stack: stack} +} + +func (e PanicError) Error() string { + return fmt.Sprint(e.recovered) +} + +func (e PanicError) Stack() []byte { + return e.stack +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go b/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go new file mode 100644 index 000000000..86007d042 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go @@ -0,0 +1,229 @@ +package pkgcache + +import ( + "bytes" + "encoding/gob" + "encoding/hex" + "fmt" + "runtime" + "sort" + "sync" + + "github.com/pkg/errors" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/internal/cache" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/timeutils" +) + +type HashMode int + +const ( + HashModeNeedOnlySelf HashMode = iota + HashModeNeedDirectDeps + HashModeNeedAllDeps +) + +// Cache is a per-package data cache. A cached data is invalidated when +// package or it's dependencies change. +type Cache struct { + lowLevelCache *cache.Cache + pkgHashes sync.Map + sw *timeutils.Stopwatch + log logutils.Log // not used now, but may be needed for future debugging purposes + ioSem chan struct{} // semaphore limiting parallel IO +} + +func NewCache(sw *timeutils.Stopwatch, log logutils.Log) (*Cache, error) { + c, err := cache.Default() + if err != nil { + return nil, err + } + return &Cache{ + lowLevelCache: c, + sw: sw, + log: log, + ioSem: make(chan struct{}, runtime.GOMAXPROCS(-1)), + }, nil +} + +func (c *Cache) Trim() { + c.sw.TrackStage("trim", func() { + c.lowLevelCache.Trim() + }) +} + +func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data interface{}) error { + var err error + buf := &bytes.Buffer{} + c.sw.TrackStage("gob", func() { + err = gob.NewEncoder(buf).Encode(data) + }) + if err != nil { + return errors.Wrap(err, "failed to gob encode") + } + + var aID cache.ActionID + + c.sw.TrackStage("key build", func() { + aID, err = c.pkgActionID(pkg, mode) + if err == nil { + subkey, subkeyErr := cache.Subkey(aID, key) + if subkeyErr != nil { + err = errors.Wrap(subkeyErr, "failed to build subkey") + } + aID = subkey + } + }) + if err != nil { + return errors.Wrapf(err, "failed to calculate package %s action id", pkg.Name) + } + c.ioSem <- struct{}{} + c.sw.TrackStage("cache io", func() { + err = c.lowLevelCache.PutBytes(aID, buf.Bytes()) + }) + <-c.ioSem + if err != nil { + return errors.Wrapf(err, "failed to save data to low-level cache by key %s for package %s", key, pkg.Name) + } + + return nil +} + +var ErrMissing = errors.New("missing data") + +func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data interface{}) error { + var aID cache.ActionID + var err error + c.sw.TrackStage("key build", func() { + aID, err = c.pkgActionID(pkg, mode) + if err == nil { + subkey, subkeyErr := cache.Subkey(aID, key) + if subkeyErr != nil { + err = errors.Wrap(subkeyErr, "failed to build subkey") + } + aID = subkey + } + }) + if err != nil { + return errors.Wrapf(err, "failed to calculate package %s action id", pkg.Name) + } + + var b []byte + c.ioSem <- struct{}{} + c.sw.TrackStage("cache io", func() { + b, _, err = c.lowLevelCache.GetBytes(aID) + }) + <-c.ioSem + if err != nil { + if cache.IsErrMissing(err) { + return ErrMissing + } + return errors.Wrapf(err, "failed to get data from low-level cache by key %s for package %s", key, pkg.Name) + } + + c.sw.TrackStage("gob", func() { + err = gob.NewDecoder(bytes.NewReader(b)).Decode(data) + }) + if err != nil { + return errors.Wrap(err, "failed to gob decode") + } + + return nil +} + +func (c *Cache) pkgActionID(pkg *packages.Package, mode HashMode) (cache.ActionID, error) { + hash, err := c.packageHash(pkg, mode) + if err != nil { + return cache.ActionID{}, errors.Wrap(err, "failed to get package hash") + } + + key, err := cache.NewHash("action ID") + if err != nil { + return cache.ActionID{}, errors.Wrap(err, "failed to make a hash") + } + fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) + fmt.Fprintf(key, "pkghash %s\n", hash) + + return key.Sum(), nil +} + +// packageHash computes a package's hash. The hash is based on all Go +// files that make up the package, as well as the hashes of imported +// packages. +func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error) { + type hashResults map[HashMode]string + hashResI, ok := c.pkgHashes.Load(pkg) + if ok { + hashRes := hashResI.(hashResults) + if _, ok := hashRes[mode]; !ok { + return "", fmt.Errorf("no mode %d in hash result", mode) + } + return hashRes[mode], nil + } + + hashRes := hashResults{} + + key, err := cache.NewHash("package hash") + if err != nil { + return "", errors.Wrap(err, "failed to make a hash") + } + + fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) + for _, f := range pkg.CompiledGoFiles { + c.ioSem <- struct{}{} + h, fErr := cache.FileHash(f) + <-c.ioSem + if fErr != nil { + return "", errors.Wrapf(fErr, "failed to calculate file %s hash", f) + } + fmt.Fprintf(key, "file %s %x\n", f, h) + } + curSum := key.Sum() + hashRes[HashModeNeedOnlySelf] = hex.EncodeToString(curSum[:]) + + imps := make([]*packages.Package, 0, len(pkg.Imports)) + for _, imp := range pkg.Imports { + imps = append(imps, imp) + } + sort.Slice(imps, func(i, j int) bool { + return imps[i].PkgPath < imps[j].PkgPath + }) + + calcDepsHash := func(depMode HashMode) error { + for _, dep := range imps { + if dep.PkgPath == "unsafe" { + continue + } + + depHash, depErr := c.packageHash(dep, depMode) + if depErr != nil { + return errors.Wrapf(depErr, "failed to calculate hash for dependency %s with mode %d", dep.Name, depMode) + } + + fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, depHash) + } + return nil + } + + if err := calcDepsHash(HashModeNeedOnlySelf); err != nil { + return "", err + } + + curSum = key.Sum() + hashRes[HashModeNeedDirectDeps] = hex.EncodeToString(curSum[:]) + + if err := calcDepsHash(HashModeNeedAllDeps); err != nil { + return "", err + } + curSum = key.Sum() + hashRes[HashModeNeedAllDeps] = hex.EncodeToString(curSum[:]) + + if _, ok := hashRes[mode]; !ok { + return "", fmt.Errorf("invalid mode %d", mode) + } + + c.pkgHashes.Store(pkg, hashRes) + return hashRes[mode], nil +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go b/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go new file mode 100644 index 000000000..fa9d93bf7 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go @@ -0,0 +1,93 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package renameio writes files atomically by renaming temporary files. +package renameio + +import ( + "bytes" + "io" + "math/rand" + "os" + "path/filepath" + "strconv" + + "github.com/golangci/golangci-lint/internal/robustio" +) + +const patternSuffix = ".tmp" + +// Pattern returns a glob pattern that matches the unrenamed temporary files +// created when writing to filename. +func Pattern(filename string) string { + return filepath.Join(filepath.Dir(filename), filepath.Base(filename)+patternSuffix) +} + +// WriteFile is like ioutil.WriteFile, but first writes data to an arbitrary +// file in the same directory as filename, then renames it atomically to the +// final name. +// +// That ensures that the final location, if it exists, is always a complete file. +func WriteFile(filename string, data []byte, perm os.FileMode) (err error) { + return WriteToFile(filename, bytes.NewReader(data), perm) +} + +// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader +// instead of a slice. +func WriteToFile(filename string, data io.Reader, perm os.FileMode) (err error) { + f, err := tempFile(filepath.Dir(filename), filepath.Base(filename), perm) + if err != nil { + return err + } + defer func() { + // Only call os.Remove on f.Name() if we failed to rename it: otherwise, + // some other process may have created a new file with the same name after + // that. + if err != nil { + f.Close() + os.Remove(f.Name()) + } + }() + + if _, err := io.Copy(f, data); err != nil { + return err + } + // Sync the file before renaming it: otherwise, after a crash the reader may + // observe a 0-length file instead of the actual contents. + // See https://golang.org/issue/22397#issuecomment-380831736. + if err := f.Sync(); err != nil { + return err + } + if err := f.Close(); err != nil { + return err + } + + return robustio.Rename(f.Name(), filename) +} + +// tempFile creates a new temporary file with given permission bits. +func tempFile(dir, prefix string, perm os.FileMode) (f *os.File, err error) { + for i := 0; i < 10000; i++ { + name := filepath.Join(dir, prefix+strconv.Itoa(rand.Intn(1000000000))+patternSuffix) + f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, perm) + if os.IsExist(err) { + continue + } + break + } + return +} + +// ReadFile is like ioutil.ReadFile, but on Windows retries spurious errors that +// may occur if the file is concurrently replaced. +// +// Errors are classified heuristically and retries are bounded, so even this +// function may occasionally return a spurious error on Windows. +// If so, the error will likely wrap one of: +// - syscall.ERROR_ACCESS_DENIED +// - syscall.ERROR_FILE_NOT_FOUND +// - internal/syscall/windows.ERROR_SHARING_VIOLATION +func ReadFile(filename string) ([]byte, error) { + return robustio.ReadFile(filename) +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go new file mode 100644 index 000000000..76e47ad1f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go @@ -0,0 +1,53 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package robustio wraps I/O functions that are prone to failure on Windows, +// transparently retrying errors up to an arbitrary timeout. +// +// Errors are classified heuristically and retries are bounded, so the functions +// in this package do not completely eliminate spurious errors. However, they do +// significantly reduce the rate of failure in practice. +// +// If so, the error will likely wrap one of: +// The functions in this package do not completely eliminate spurious errors, +// but substantially reduce their rate of occurrence in practice. +package robustio + +// Rename is like os.Rename, but on Windows retries errors that may occur if the +// file is concurrently read or overwritten. +// +// (See golang.org/issue/31247 and golang.org/issue/32188.) +func Rename(oldpath, newpath string) error { + return rename(oldpath, newpath) +} + +// ReadFile is like ioutil.ReadFile, but on Windows retries errors that may +// occur if the file is concurrently replaced. +// +// (See golang.org/issue/31247 and golang.org/issue/32188.) +func ReadFile(filename string) ([]byte, error) { + return readFile(filename) +} + +// RemoveAll is like os.RemoveAll, but on Windows retries errors that may occur +// if an executable file in the directory has recently been executed. +// +// (See golang.org/issue/19491.) +func RemoveAll(path string) error { + return removeAll(path) +} + +// IsEphemeralError reports whether err is one of the errors that the functions +// in this package attempt to mitigate. +// +// Errors considered ephemeral include: +// - syscall.ERROR_ACCESS_DENIED +// - syscall.ERROR_FILE_NOT_FOUND +// - internal/syscall/windows.ERROR_SHARING_VIOLATION +// +// This set may be expanded in the future; programs must not rely on the +// non-ephemerality of any given error. +func IsEphemeralError(err error) bool { + return isEphemeralError(err) +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go new file mode 100644 index 000000000..1ac0d10d7 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go @@ -0,0 +1,29 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package robustio + +import ( + "os" + "syscall" +) + +const errFileNotFound = syscall.ENOENT + +// isEphemeralError returns true if err may be resolved by waiting. +func isEphemeralError(err error) bool { + switch werr := err.(type) { + case *os.PathError: + err = werr.Err + case *os.LinkError: + err = werr.Err + case *os.SyscallError: + err = werr.Err + + } + if errno, ok := err.(syscall.Errno); ok { + return errno == errFileNotFound + } + return false +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go new file mode 100644 index 000000000..e0bf5b9b3 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go @@ -0,0 +1,93 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build windows darwin + +package robustio + +import ( + "io/ioutil" + "math/rand" + "os" + "syscall" + "time" +) + +const arbitraryTimeout = 500 * time.Millisecond + +const ERROR_SHARING_VIOLATION = 32 + +// retry retries ephemeral errors from f up to an arbitrary timeout +// to work around filesystem flakiness on Windows and Darwin. +func retry(f func() (err error, mayRetry bool)) error { + var ( + bestErr error + lowestErrno syscall.Errno + start time.Time + nextSleep time.Duration = 1 * time.Millisecond + ) + for { + err, mayRetry := f() + if err == nil || !mayRetry { + return err + } + + if errno, ok := err.(syscall.Errno); ok && (lowestErrno == 0 || errno < lowestErrno) { + bestErr = err + lowestErrno = errno + } else if bestErr == nil { + bestErr = err + } + + if start.IsZero() { + start = time.Now() + } else if d := time.Since(start) + nextSleep; d >= arbitraryTimeout { + break + } + time.Sleep(nextSleep) + nextSleep += time.Duration(rand.Int63n(int64(nextSleep))) + } + + return bestErr +} + +// rename is like os.Rename, but retries ephemeral errors. +// +// On windows it wraps os.Rename, which (as of 2019-06-04) uses MoveFileEx with +// MOVEFILE_REPLACE_EXISTING. +// +// Windows also provides a different system call, ReplaceFile, +// that provides similar semantics, but perhaps preserves more metadata. (The +// documentation on the differences between the two is very sparse.) +// +// Empirical error rates with MoveFileEx are lower under modest concurrency, so +// for now we're sticking with what the os package already provides. +func rename(oldpath, newpath string) (err error) { + return retry(func() (err error, mayRetry bool) { + err = os.Rename(oldpath, newpath) + return err, isEphemeralError(err) + }) +} + +// readFile is like ioutil.ReadFile, but retries ephemeral errors. +func readFile(filename string) ([]byte, error) { + var b []byte + err := retry(func() (err error, mayRetry bool) { + b, err = ioutil.ReadFile(filename) + + // Unlike in rename, we do not retry errFileNotFound here: it can occur + // as a spurious error, but the file may also genuinely not exist, so the + // increase in robustness is probably not worth the extra latency. + + return err, isEphemeralError(err) && err != errFileNotFound + }) + return b, err +} + +func removeAll(path string) error { + return retry(func() (err error, mayRetry bool) { + err = os.RemoveAll(path) + return err, isEphemeralError(err) + }) +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go new file mode 100644 index 000000000..a2428856f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go @@ -0,0 +1,28 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//+build !windows,!darwin + +package robustio + +import ( + "io/ioutil" + "os" +) + +func rename(oldpath, newpath string) error { + return os.Rename(oldpath, newpath) +} + +func readFile(filename string) ([]byte, error) { + return ioutil.ReadFile(filename) +} + +func removeAll(path string) error { + return os.RemoveAll(path) +} + +func isEphemeralError(err error) bool { + return false +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go new file mode 100644 index 000000000..a35237d44 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go @@ -0,0 +1,33 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package robustio + +import ( + "os" + "syscall" +) + +const errFileNotFound = syscall.ERROR_FILE_NOT_FOUND + +// isEphemeralError returns true if err may be resolved by waiting. +func isEphemeralError(err error) bool { + switch werr := err.(type) { + case *os.PathError: + err = werr.Err + case *os.LinkError: + err = werr.Err + case *os.SyscallError: + err = werr.Err + } + if errno, ok := err.(syscall.Errno); ok { + switch errno { + case syscall.ERROR_ACCESS_DENIED, + syscall.ERROR_FILE_NOT_FOUND, + ERROR_SHARING_VIOLATION: + return true + } + } + return false +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go new file mode 100644 index 000000000..359e2d63c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go @@ -0,0 +1,84 @@ +package commands + +import ( + "fmt" + "os" + "path/filepath" + + "github.com/spf13/cobra" + + "github.com/golangci/golangci-lint/internal/cache" + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" +) + +func (e *Executor) initCache() { + cacheCmd := &cobra.Command{ + Use: "cache", + Short: "Cache control and information", + Run: func(cmd *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint cache") + } + if err := cmd.Help(); err != nil { + e.log.Fatalf("Can't run cache: %s", err) + } + }, + } + e.rootCmd.AddCommand(cacheCmd) + + cacheCmd.AddCommand(&cobra.Command{ + Use: "clean", + Short: "Clean cache", + Run: e.executeCleanCache, + }) + cacheCmd.AddCommand(&cobra.Command{ + Use: "status", + Short: "Show cache status", + Run: e.executeCacheStatus, + }) + + // TODO: add trim command? +} + +func (e *Executor) executeCleanCache(_ *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint cache clean") + } + + cacheDir := cache.DefaultDir() + if err := os.RemoveAll(cacheDir); err != nil { + e.log.Fatalf("Failed to remove dir %s: %s", cacheDir, err) + } + + os.Exit(0) +} + +func (e *Executor) executeCacheStatus(_ *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint cache status") + } + + cacheDir := cache.DefaultDir() + fmt.Fprintf(logutils.StdOut, "Dir: %s\n", cacheDir) + cacheSizeBytes, err := dirSizeBytes(cacheDir) + if err == nil { + fmt.Fprintf(logutils.StdOut, "Size: %s\n", fsutils.PrettifyBytesCount(cacheSizeBytes)) + } + + os.Exit(0) +} + +func dirSizeBytes(path string) (int64, error) { + var size int64 + err := filepath.Walk(path, func(_ string, info os.FileInfo, err error) error { + if err != nil { + return err + } + if !info.IsDir() { + size += info.Size() + } + return err + }) + return size, err +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go new file mode 100644 index 000000000..4b63e2e52 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/config.go @@ -0,0 +1,66 @@ +package commands + +import ( + "fmt" + "os" + + "github.com/spf13/cobra" + "github.com/spf13/viper" + + "github.com/golangci/golangci-lint/pkg/exitcodes" + "github.com/golangci/golangci-lint/pkg/fsutils" +) + +func (e *Executor) initConfig() { + cmd := &cobra.Command{ + Use: "config", + Short: "Config", + Run: func(cmd *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint config") + } + if err := cmd.Help(); err != nil { + e.log.Fatalf("Can't run help: %s", err) + } + }, + } + e.rootCmd.AddCommand(cmd) + + pathCmd := &cobra.Command{ + Use: "path", + Short: "Print used config path", + Run: e.executePathCmd, + } + e.initRunConfiguration(pathCmd) // allow --config + cmd.AddCommand(pathCmd) +} + +func (e *Executor) getUsedConfig() string { + usedConfigFile := viper.ConfigFileUsed() + if usedConfigFile == "" { + return "" + } + + prettyUsedConfigFile, err := fsutils.ShortestRelPath(usedConfigFile, "") + if err != nil { + e.log.Warnf("Can't pretty print config file path: %s", err) + return usedConfigFile + } + + return prettyUsedConfigFile +} + +func (e *Executor) executePathCmd(_ *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint config path") + } + + usedConfigFile := e.getUsedConfig() + if usedConfigFile == "" { + e.log.Warnf("No config file detected") + os.Exit(exitcodes.NoConfigFileDetected) + } + + fmt.Println(usedConfigFile) + os.Exit(0) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go new file mode 100644 index 000000000..3edb6e4b0 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/executor.go @@ -0,0 +1,250 @@ +package commands + +import ( + "bytes" + "context" + "crypto/sha256" + "io" + "os" + "path/filepath" + "strings" + "time" + + "github.com/fatih/color" + "github.com/gofrs/flock" + "github.com/pkg/errors" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + "gopkg.in/yaml.v3" + + "github.com/golangci/golangci-lint/internal/cache" + "github.com/golangci/golangci-lint/internal/pkgcache" + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" + "github.com/golangci/golangci-lint/pkg/goutil" + "github.com/golangci/golangci-lint/pkg/lint" + "github.com/golangci/golangci-lint/pkg/lint/lintersdb" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/report" + "github.com/golangci/golangci-lint/pkg/timeutils" +) + +type Executor struct { + rootCmd *cobra.Command + runCmd *cobra.Command + lintersCmd *cobra.Command + + exitCode int + version, commit, date string + + cfg *config.Config + log logutils.Log + reportData report.Data + DBManager *lintersdb.Manager + EnabledLintersSet *lintersdb.EnabledSet + contextLoader *lint.ContextLoader + goenv *goutil.Env + fileCache *fsutils.FileCache + lineCache *fsutils.LineCache + pkgCache *pkgcache.Cache + debugf logutils.DebugFunc + sw *timeutils.Stopwatch + + loadGuard *load.Guard + flock *flock.Flock +} + +func NewExecutor(version, commit, date string) *Executor { + startedAt := time.Now() + e := &Executor{ + cfg: config.NewDefault(), + version: version, + commit: commit, + date: date, + DBManager: lintersdb.NewManager(nil, nil), + debugf: logutils.Debug("exec"), + } + + e.debugf("Starting execution...") + e.log = report.NewLogWrapper(logutils.NewStderrLog(""), &e.reportData) + + // to setup log level early we need to parse config from command line extra time to + // find `-v` option + commandLineCfg, err := e.getConfigForCommandLine() + if err != nil && err != pflag.ErrHelp { + e.log.Fatalf("Can't get config for command line: %s", err) + } + if commandLineCfg != nil { + logutils.SetupVerboseLog(e.log, commandLineCfg.Run.IsVerbose) + + switch commandLineCfg.Output.Color { + case "always": + color.NoColor = false + case "never": + color.NoColor = true + case "auto": + // nothing + default: + e.log.Fatalf("invalid value %q for --color; must be 'always', 'auto', or 'never'", commandLineCfg.Output.Color) + } + } + + // init of commands must be done before config file reading because + // init sets config with the default values of flags + e.initRoot() + e.initRun() + e.initHelp() + e.initLinters() + e.initConfig() + e.initVersion() + e.initCache() + + // init e.cfg by values from config: flags parse will see these values + // like the default ones. It will overwrite them only if the same option + // is found in command-line: it's ok, command-line has higher priority. + + r := config.NewFileReader(e.cfg, commandLineCfg, e.log.Child("config_reader")) + if err = r.Read(); err != nil { + e.log.Fatalf("Can't read config: %s", err) + } + + // recreate after getting config + e.DBManager = lintersdb.NewManager(e.cfg, e.log).WithCustomLinters() + + e.cfg.LintersSettings.Gocritic.InferEnabledChecks(e.log) + if err = e.cfg.LintersSettings.Gocritic.Validate(e.log); err != nil { + e.log.Fatalf("Invalid gocritic settings: %s", err) + } + + // Slice options must be explicitly set for proper merging of config and command-line options. + fixSlicesFlags(e.runCmd.Flags()) + fixSlicesFlags(e.lintersCmd.Flags()) + + e.EnabledLintersSet = lintersdb.NewEnabledSet(e.DBManager, + lintersdb.NewValidator(e.DBManager), e.log.Child("lintersdb"), e.cfg) + e.goenv = goutil.NewEnv(e.log.Child("goenv")) + e.fileCache = fsutils.NewFileCache() + e.lineCache = fsutils.NewLineCache(e.fileCache) + + e.sw = timeutils.NewStopwatch("pkgcache", e.log.Child("stopwatch")) + e.pkgCache, err = pkgcache.NewCache(e.sw, e.log.Child("pkgcache")) + if err != nil { + e.log.Fatalf("Failed to build packages cache: %s", err) + } + e.loadGuard = load.NewGuard() + e.contextLoader = lint.NewContextLoader(e.cfg, e.log.Child("loader"), e.goenv, + e.lineCache, e.fileCache, e.pkgCache, e.loadGuard) + if err = e.initHashSalt(version); err != nil { + e.log.Fatalf("Failed to init hash salt: %s", err) + } + e.debugf("Initialized executor in %s", time.Since(startedAt)) + return e +} + +func (e *Executor) Execute() error { + return e.rootCmd.Execute() +} + +func (e *Executor) initHashSalt(version string) error { + binSalt, err := computeBinarySalt(version) + if err != nil { + return errors.Wrap(err, "failed to calculate binary salt") + } + + configSalt, err := computeConfigSalt(e.cfg) + if err != nil { + return errors.Wrap(err, "failed to calculate config salt") + } + + var b bytes.Buffer + b.Write(binSalt) + b.Write(configSalt) + cache.SetSalt(b.Bytes()) + return nil +} + +func computeBinarySalt(version string) ([]byte, error) { + if version != "" && version != "(devel)" { + return []byte(version), nil + } + + if logutils.HaveDebugTag("bin_salt") { + return []byte("debug"), nil + } + + p, err := os.Executable() + if err != nil { + return nil, err + } + f, err := os.Open(p) + if err != nil { + return nil, err + } + defer f.Close() + h := sha256.New() + if _, err := io.Copy(h, f); err != nil { + return nil, err + } + return h.Sum(nil), nil +} + +func computeConfigSalt(cfg *config.Config) ([]byte, error) { + // We don't hash all config fields to reduce meaningless cache + // invalidations. At least, it has a huge impact on tests speed. + + lintersSettingsBytes, err := yaml.Marshal(cfg.LintersSettings) + if err != nil { + return nil, errors.Wrap(err, "failed to json marshal config linter settings") + } + + var configData bytes.Buffer + configData.WriteString("linters-settings=") + configData.Write(lintersSettingsBytes) + configData.WriteString("\nbuild-tags=%s" + strings.Join(cfg.Run.BuildTags, ",")) + + h := sha256.New() + if _, err := h.Write(configData.Bytes()); err != nil { + return nil, err + } + return h.Sum(nil), nil +} + +func (e *Executor) acquireFileLock() bool { + if e.cfg.Run.AllowParallelRunners { + e.debugf("Parallel runners are allowed, no locking") + return true + } + + lockFile := filepath.Join(os.TempDir(), "golangci-lint.lock") + e.debugf("Locking on file %s...", lockFile) + f := flock.New(lockFile) + const retryDelay = time.Second + + ctx := context.Background() + if !e.cfg.Run.AllowSerialRunners { + const totalTimeout = 5 * time.Second + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, totalTimeout) + defer cancel() + } + if ok, _ := f.TryLockContext(ctx, retryDelay); !ok { + return false + } + + e.flock = f + return true +} + +func (e *Executor) releaseFileLock() { + if e.cfg.Run.AllowParallelRunners { + return + } + + if err := e.flock.Unlock(); err != nil { + e.debugf("Failed to unlock on file: %s", err) + } + if err := os.Remove(e.flock.Path()); err != nil { + e.debugf("Failed to remove lock file: %s", err) + } +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go new file mode 100644 index 000000000..ef276481c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/help.go @@ -0,0 +1,92 @@ +package commands + +import ( + "fmt" + "os" + "sort" + "strings" + + "github.com/fatih/color" + "github.com/spf13/cobra" + + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/logutils" +) + +func (e *Executor) initHelp() { + helpCmd := &cobra.Command{ + Use: "help", + Short: "Help", + Run: func(cmd *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint help") + } + if err := cmd.Help(); err != nil { + e.log.Fatalf("Can't run help: %s", err) + } + }, + } + e.rootCmd.SetHelpCommand(helpCmd) + + lintersHelpCmd := &cobra.Command{ + Use: "linters", + Short: "Help about linters", + Run: e.executeLintersHelp, + } + helpCmd.AddCommand(lintersHelpCmd) +} + +func printLinterConfigs(lcs []*linter.Config) { + sort.Slice(lcs, func(i, j int) bool { + return strings.Compare(lcs[i].Name(), lcs[j].Name()) < 0 + }) + for _, lc := range lcs { + altNamesStr := "" + if len(lc.AlternativeNames) != 0 { + altNamesStr = fmt.Sprintf(" (%s)", strings.Join(lc.AlternativeNames, ", ")) + } + + // If the linter description spans multiple lines, truncate everything following the first newline + linterDescription := lc.Linter.Desc() + firstNewline := strings.IndexRune(linterDescription, '\n') + if firstNewline > 0 { + linterDescription = linterDescription[:firstNewline] + } + + fmt.Fprintf(logutils.StdOut, "%s%s: %s [fast: %t, auto-fix: %t]\n", color.YellowString(lc.Name()), + altNamesStr, linterDescription, !lc.IsSlowLinter(), lc.CanAutoFix) + } +} + +func (e *Executor) executeLintersHelp(_ *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint help linters") + } + + var enabledLCs, disabledLCs []*linter.Config + for _, lc := range e.DBManager.GetAllSupportedLinterConfigs() { + if lc.EnabledByDefault { + enabledLCs = append(enabledLCs, lc) + } else { + disabledLCs = append(disabledLCs, lc) + } + } + + color.Green("Enabled by default linters:\n") + printLinterConfigs(enabledLCs) + color.Red("\nDisabled by default linters:\n") + printLinterConfigs(disabledLCs) + + color.Green("\nLinters presets:") + for _, p := range e.DBManager.AllPresets() { + linters := e.DBManager.GetAllLinterConfigsForPreset(p) + linterNames := []string{} + for _, lc := range linters { + linterNames = append(linterNames, lc.Name()) + } + sort.Strings(linterNames) + fmt.Fprintf(logutils.StdOut, "%s: %s\n", color.YellowString(p), strings.Join(linterNames, ", ")) + } + + os.Exit(0) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go new file mode 100644 index 000000000..873dab817 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/linters.go @@ -0,0 +1,51 @@ +package commands + +import ( + "log" + "os" + + "github.com/fatih/color" + "github.com/spf13/cobra" + + "github.com/golangci/golangci-lint/pkg/lint/linter" +) + +func (e *Executor) initLinters() { + e.lintersCmd = &cobra.Command{ + Use: "linters", + Short: "List current linters configuration", + Run: e.executeLinters, + } + e.rootCmd.AddCommand(e.lintersCmd) + e.initRunConfiguration(e.lintersCmd) +} + +func (e *Executor) executeLinters(_ *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint linters") + } + + enabledLintersMap, err := e.EnabledLintersSet.GetEnabledLintersMap() + if err != nil { + log.Fatalf("Can't get enabled linters: %s", err) + } + + color.Green("Enabled by your configuration linters:\n") + enabledLinters := make([]*linter.Config, 0, len(enabledLintersMap)) + for _, linter := range enabledLintersMap { + enabledLinters = append(enabledLinters, linter) + } + printLinterConfigs(enabledLinters) + + var disabledLCs []*linter.Config + for _, lc := range e.DBManager.GetAllSupportedLinterConfigs() { + if enabledLintersMap[lc.Name()] == nil { + disabledLCs = append(disabledLCs, lc) + } + } + + color.Red("\nDisabled by your configuration linters:\n") + printLinterConfigs(disabledLCs) + + os.Exit(0) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go new file mode 100644 index 000000000..f90df9901 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/root.go @@ -0,0 +1,165 @@ +package commands + +import ( + "fmt" + "os" + "runtime" + "runtime/pprof" + "runtime/trace" + "strconv" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/logutils" +) + +func (e *Executor) persistentPreRun(_ *cobra.Command, _ []string) { + if e.cfg.Run.PrintVersion { + fmt.Fprintf(logutils.StdOut, "golangci-lint has version %s built from %s on %s\n", e.version, e.commit, e.date) + os.Exit(0) + } + + runtime.GOMAXPROCS(e.cfg.Run.Concurrency) + + if e.cfg.Run.CPUProfilePath != "" { + f, err := os.Create(e.cfg.Run.CPUProfilePath) + if err != nil { + e.log.Fatalf("Can't create file %s: %s", e.cfg.Run.CPUProfilePath, err) + } + if err := pprof.StartCPUProfile(f); err != nil { + e.log.Fatalf("Can't start CPU profiling: %s", err) + } + } + + if e.cfg.Run.MemProfilePath != "" { + if rate := os.Getenv("GL_MEMPROFILE_RATE"); rate != "" { + runtime.MemProfileRate, _ = strconv.Atoi(rate) + } + } + + if e.cfg.Run.TracePath != "" { + f, err := os.Create(e.cfg.Run.TracePath) + if err != nil { + e.log.Fatalf("Can't create file %s: %s", e.cfg.Run.TracePath, err) + } + if err = trace.Start(f); err != nil { + e.log.Fatalf("Can't start tracing: %s", err) + } + } +} + +func (e *Executor) persistentPostRun(_ *cobra.Command, _ []string) { + if e.cfg.Run.CPUProfilePath != "" { + pprof.StopCPUProfile() + } + if e.cfg.Run.MemProfilePath != "" { + f, err := os.Create(e.cfg.Run.MemProfilePath) + if err != nil { + e.log.Fatalf("Can't create file %s: %s", e.cfg.Run.MemProfilePath, err) + } + + var ms runtime.MemStats + runtime.ReadMemStats(&ms) + printMemStats(&ms, e.log) + + if err := pprof.WriteHeapProfile(f); err != nil { + e.log.Fatalf("Can't write heap profile: %s", err) + } + f.Close() + } + if e.cfg.Run.TracePath != "" { + trace.Stop() + } + + os.Exit(e.exitCode) +} + +func printMemStats(ms *runtime.MemStats, logger logutils.Log) { + logger.Infof("Mem stats: alloc=%s total_alloc=%s sys=%s "+ + "heap_alloc=%s heap_sys=%s heap_idle=%s heap_released=%s heap_in_use=%s "+ + "stack_in_use=%s stack_sys=%s "+ + "mspan_sys=%s mcache_sys=%s buck_hash_sys=%s gc_sys=%s other_sys=%s "+ + "mallocs_n=%d frees_n=%d heap_objects_n=%d gc_cpu_fraction=%.2f", + formatMemory(ms.Alloc), formatMemory(ms.TotalAlloc), formatMemory(ms.Sys), + formatMemory(ms.HeapAlloc), formatMemory(ms.HeapSys), + formatMemory(ms.HeapIdle), formatMemory(ms.HeapReleased), formatMemory(ms.HeapInuse), + formatMemory(ms.StackInuse), formatMemory(ms.StackSys), + formatMemory(ms.MSpanSys), formatMemory(ms.MCacheSys), formatMemory(ms.BuckHashSys), + formatMemory(ms.GCSys), formatMemory(ms.OtherSys), + ms.Mallocs, ms.Frees, ms.HeapObjects, ms.GCCPUFraction) +} + +func formatMemory(memBytes uint64) string { + const Kb = 1024 + const Mb = Kb * 1024 + + if memBytes < Kb { + return fmt.Sprintf("%db", memBytes) + } + if memBytes < Mb { + return fmt.Sprintf("%dkb", memBytes/Kb) + } + return fmt.Sprintf("%dmb", memBytes/Mb) +} + +func getDefaultConcurrency() int { + if os.Getenv("HELP_RUN") == "1" { + // Make stable concurrency for README help generating builds. + const prettyConcurrency = 8 + return prettyConcurrency + } + + return runtime.NumCPU() +} + +func (e *Executor) initRoot() { + rootCmd := &cobra.Command{ + Use: "golangci-lint", + Short: "golangci-lint is a smart linters runner.", + Long: `Smart, fast linters runner. Run it in cloud for every GitHub pull request on https://golangci.com`, + Run: func(cmd *cobra.Command, args []string) { + if len(args) != 0 { + e.log.Fatalf("Usage: golangci-lint") + } + if err := cmd.Help(); err != nil { + e.log.Fatalf("Can't run help: %s", err) + } + }, + PersistentPreRun: e.persistentPreRun, + PersistentPostRun: e.persistentPostRun, + } + + initRootFlagSet(rootCmd.PersistentFlags(), e.cfg, e.needVersionOption()) + e.rootCmd = rootCmd +} + +func (e *Executor) needVersionOption() bool { + return e.date != "" +} + +func initRootFlagSet(fs *pflag.FlagSet, cfg *config.Config, needVersionOption bool) { + fs.BoolVarP(&cfg.Run.IsVerbose, "verbose", "v", false, wh("verbose output")) + + var silent bool + fs.BoolVarP(&silent, "silent", "s", false, wh("disables congrats outputs")) + if err := fs.MarkHidden("silent"); err != nil { + panic(err) + } + err := fs.MarkDeprecated("silent", + "now golangci-lint by default is silent: it doesn't print Congrats message") + if err != nil { + panic(err) + } + + fs.StringVar(&cfg.Run.CPUProfilePath, "cpu-profile-path", "", wh("Path to CPU profile output file")) + fs.StringVar(&cfg.Run.MemProfilePath, "mem-profile-path", "", wh("Path to memory profile output file")) + fs.StringVar(&cfg.Run.TracePath, "trace-path", "", wh("Path to trace output file")) + fs.IntVarP(&cfg.Run.Concurrency, "concurrency", "j", getDefaultConcurrency(), wh("Concurrency (default NumCPU)")) + if needVersionOption { + fs.BoolVar(&cfg.Run.PrintVersion, "version", false, wh("Print version")) + } + + fs.StringVar(&cfg.Output.Color, "color", "auto", wh("Use color when printing; can be 'always', 'auto', or 'never'")) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go new file mode 100644 index 000000000..271fffe94 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go @@ -0,0 +1,566 @@ +package commands + +import ( + "context" + "fmt" + "io/ioutil" + "log" + "os" + "runtime" + "strings" + "time" + + "github.com/fatih/color" + "github.com/pkg/errors" + "github.com/spf13/cobra" + "github.com/spf13/pflag" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/exitcodes" + "github.com/golangci/golangci-lint/pkg/lint" + "github.com/golangci/golangci-lint/pkg/lint/lintersdb" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/packages" + "github.com/golangci/golangci-lint/pkg/printers" + "github.com/golangci/golangci-lint/pkg/result" + "github.com/golangci/golangci-lint/pkg/result/processors" +) + +func getDefaultIssueExcludeHelp() string { + parts := []string{"Use or not use default excludes:"} + for _, ep := range config.DefaultExcludePatterns { + parts = append(parts, + fmt.Sprintf(" # %s %s: %s", ep.ID, ep.Linter, ep.Why), + fmt.Sprintf(" - %s", color.YellowString(ep.Pattern)), + "", + ) + } + return strings.Join(parts, "\n") +} + +func getDefaultDirectoryExcludeHelp() string { + parts := []string{"Use or not use default excluded directories:"} + for _, dir := range packages.StdExcludeDirRegexps { + parts = append(parts, fmt.Sprintf(" - %s", color.YellowString(dir))) + } + parts = append(parts, "") + return strings.Join(parts, "\n") +} + +func wh(text string) string { + return color.GreenString(text) +} + +const defaultTimeout = time.Minute + +//nolint:funlen +func initFlagSet(fs *pflag.FlagSet, cfg *config.Config, m *lintersdb.Manager, isFinalInit bool) { + hideFlag := func(name string) { + if err := fs.MarkHidden(name); err != nil { + panic(err) + } + + // we run initFlagSet multiple times, but we wouldn't like to see deprecation message multiple times + if isFinalInit { + const deprecateMessage = "flag will be removed soon, please, use .golangci.yml config" + if err := fs.MarkDeprecated(name, deprecateMessage); err != nil { + panic(err) + } + } + } + + // Output config + oc := &cfg.Output + fs.StringVar(&oc.Format, "out-format", + config.OutFormatColoredLineNumber, + wh(fmt.Sprintf("Format of output: %s", strings.Join(config.OutFormats, "|")))) + fs.BoolVar(&oc.PrintIssuedLine, "print-issued-lines", true, wh("Print lines of code with issue")) + fs.BoolVar(&oc.PrintLinterName, "print-linter-name", true, wh("Print linter name in issue line")) + fs.BoolVar(&oc.UniqByLine, "uniq-by-line", true, wh("Make issues output unique by line")) + fs.BoolVar(&oc.SortResults, "sort-results", false, wh("Sort linter results")) + fs.BoolVar(&oc.PrintWelcomeMessage, "print-welcome", false, wh("Print welcome message")) + fs.StringVar(&oc.PathPrefix, "path-prefix", "", wh("Path prefix to add to output")) + hideFlag("print-welcome") // no longer used + + fs.BoolVar(&cfg.InternalCmdTest, "internal-cmd-test", false, wh("Option is used only for testing golangci-lint command, don't use it")) + if err := fs.MarkHidden("internal-cmd-test"); err != nil { + panic(err) + } + + // Run config + rc := &cfg.Run + fs.StringVar(&rc.ModulesDownloadMode, "modules-download-mode", "", + "Modules download mode. If not empty, passed as -mod= to go tools") + fs.IntVar(&rc.ExitCodeIfIssuesFound, "issues-exit-code", + exitcodes.IssuesFound, wh("Exit code when issues were found")) + fs.StringSliceVar(&rc.BuildTags, "build-tags", nil, wh("Build tags")) + + fs.DurationVar(&rc.Timeout, "deadline", defaultTimeout, wh("Deadline for total work")) + if err := fs.MarkHidden("deadline"); err != nil { + panic(err) + } + fs.DurationVar(&rc.Timeout, "timeout", defaultTimeout, wh("Timeout for total work")) + + fs.BoolVar(&rc.AnalyzeTests, "tests", true, wh("Analyze tests (*_test.go)")) + fs.BoolVar(&rc.PrintResourcesUsage, "print-resources-usage", false, + wh("Print avg and max memory usage of golangci-lint and total time")) + fs.StringVarP(&rc.Config, "config", "c", "", wh("Read config from file path `PATH`")) + fs.BoolVar(&rc.NoConfig, "no-config", false, wh("Don't read config")) + fs.StringSliceVar(&rc.SkipDirs, "skip-dirs", nil, wh("Regexps of directories to skip")) + fs.BoolVar(&rc.UseDefaultSkipDirs, "skip-dirs-use-default", true, getDefaultDirectoryExcludeHelp()) + fs.StringSliceVar(&rc.SkipFiles, "skip-files", nil, wh("Regexps of files to skip")) + + const allowParallelDesc = "Allow multiple parallel golangci-lint instances running. " + + "If false (default) - golangci-lint acquires file lock on start." + fs.BoolVar(&rc.AllowParallelRunners, "allow-parallel-runners", false, wh(allowParallelDesc)) + const allowSerialDesc = "Allow multiple golangci-lint instances running, but serialize them around a lock. " + + "If false (default) - golangci-lint exits with an error if it fails to acquire file lock on start." + fs.BoolVar(&rc.AllowSerialRunners, "allow-serial-runners", false, wh(allowSerialDesc)) + + // Linters settings config + lsc := &cfg.LintersSettings + + // Hide all linters settings flags: they were initially visible, + // but when number of linters started to grow it became obvious that + // we can't fill 90% of flags by linters settings: common flags became hard to find. + // New linters settings should be done only through config file. + fs.BoolVar(&lsc.Errcheck.CheckTypeAssertions, "errcheck.check-type-assertions", + false, "Errcheck: check for ignored type assertion results") + hideFlag("errcheck.check-type-assertions") + fs.BoolVar(&lsc.Errcheck.CheckAssignToBlank, "errcheck.check-blank", false, + "Errcheck: check for errors assigned to blank identifier: _ = errFunc()") + hideFlag("errcheck.check-blank") + fs.StringVar(&lsc.Errcheck.Exclude, "errcheck.exclude", "", + "Path to a file containing a list of functions to exclude from checking") + hideFlag("errcheck.exclude") + fs.StringVar(&lsc.Errcheck.Ignore, "errcheck.ignore", "fmt:.*", + `Comma-separated list of pairs of the form pkg:regex. The regex is used to ignore names within pkg`) + hideFlag("errcheck.ignore") + + fs.BoolVar(&lsc.Govet.CheckShadowing, "govet.check-shadowing", false, + "Govet: check for shadowed variables") + hideFlag("govet.check-shadowing") + + fs.Float64Var(&lsc.Golint.MinConfidence, "golint.min-confidence", 0.8, + "Golint: minimum confidence of a problem to print it") + hideFlag("golint.min-confidence") + + fs.BoolVar(&lsc.Gofmt.Simplify, "gofmt.simplify", true, "Gofmt: simplify code") + hideFlag("gofmt.simplify") + + fs.IntVar(&lsc.Gocyclo.MinComplexity, "gocyclo.min-complexity", + 30, "Minimal complexity of function to report it") + hideFlag("gocyclo.min-complexity") + + fs.BoolVar(&lsc.Maligned.SuggestNewOrder, "maligned.suggest-new", false, + "Maligned: print suggested more optimal struct fields ordering") + hideFlag("maligned.suggest-new") + + fs.IntVar(&lsc.Dupl.Threshold, "dupl.threshold", + 150, "Dupl: Minimal threshold to detect copy-paste") + hideFlag("dupl.threshold") + + fs.BoolVar(&lsc.Goconst.MatchWithConstants, "goconst.match-constant", + true, "Goconst: look for existing constants matching the values") + hideFlag("goconst.match-constant") + fs.IntVar(&lsc.Goconst.MinStringLen, "goconst.min-len", + 3, "Goconst: minimum constant string length") + hideFlag("goconst.min-len") + fs.IntVar(&lsc.Goconst.MinOccurrencesCount, "goconst.min-occurrences", + 3, "Goconst: minimum occurrences of constant string count to trigger issue") + hideFlag("goconst.min-occurrences") + fs.BoolVar(&lsc.Goconst.ParseNumbers, "goconst.numbers", + false, "Goconst: search also for duplicated numbers") + hideFlag("goconst.numbers") + fs.IntVar(&lsc.Goconst.NumberMin, "goconst.min", + 3, "minimum value, only works with goconst.numbers") + hideFlag("goconst.min") + fs.IntVar(&lsc.Goconst.NumberMax, "goconst.max", + 3, "maximum value, only works with goconst.numbers") + hideFlag("goconst.max") + fs.BoolVar(&lsc.Goconst.IgnoreCalls, "goconst.ignore-calls", + true, "Goconst: ignore when constant is not used as function argument") + hideFlag("goconst.ignore-calls") + + // (@dixonwille) These flag is only used for testing purposes. + fs.StringSliceVar(&lsc.Depguard.Packages, "depguard.packages", nil, + "Depguard: packages to add to the list") + hideFlag("depguard.packages") + + fs.BoolVar(&lsc.Depguard.IncludeGoRoot, "depguard.include-go-root", false, + "Depguard: check list against standard lib") + hideFlag("depguard.include-go-root") + + fs.IntVar(&lsc.Lll.TabWidth, "lll.tab-width", 1, + "Lll: tab width in spaces") + hideFlag("lll.tab-width") + + // Linters config + lc := &cfg.Linters + fs.StringSliceVarP(&lc.Enable, "enable", "E", nil, wh("Enable specific linter")) + fs.StringSliceVarP(&lc.Disable, "disable", "D", nil, wh("Disable specific linter")) + fs.BoolVar(&lc.EnableAll, "enable-all", false, wh("Enable all linters")) + if err := fs.MarkHidden("enable-all"); err != nil { + panic(err) + } + + fs.BoolVar(&lc.DisableAll, "disable-all", false, wh("Disable all linters")) + fs.StringSliceVarP(&lc.Presets, "presets", "p", nil, + wh(fmt.Sprintf("Enable presets (%s) of linters. Run 'golangci-lint linters' to see "+ + "them. This option implies option --disable-all", strings.Join(m.AllPresets(), "|")))) + fs.BoolVar(&lc.Fast, "fast", false, wh("Run only fast linters from enabled linters set (first run won't be fast)")) + + // Issues config + ic := &cfg.Issues + fs.StringSliceVarP(&ic.ExcludePatterns, "exclude", "e", nil, wh("Exclude issue by regexp")) + fs.BoolVar(&ic.UseDefaultExcludes, "exclude-use-default", true, getDefaultIssueExcludeHelp()) + fs.BoolVar(&ic.ExcludeCaseSensitive, "exclude-case-sensitive", false, wh("If set to true exclude "+ + "and exclude rules regular expressions are case sensitive")) + + fs.IntVar(&ic.MaxIssuesPerLinter, "max-issues-per-linter", 50, + wh("Maximum issues count per one linter. Set to 0 to disable")) + fs.IntVar(&ic.MaxSameIssues, "max-same-issues", 3, + wh("Maximum count of issues with the same text. Set to 0 to disable")) + + fs.BoolVarP(&ic.Diff, "new", "n", false, + wh("Show only new issues: if there are unstaged changes or untracked files, only those changes "+ + "are analyzed, else only changes in HEAD~ are analyzed.\nIt's a super-useful option for integration "+ + "of golangci-lint into existing large codebase.\nIt's not practical to fix all existing issues at "+ + "the moment of integration: much better to not allow issues in new code.\nFor CI setups, prefer "+ + "--new-from-rev=HEAD~, as --new can skip linting the current patch if any scripts generate "+ + "unstaged files before golangci-lint runs.")) + fs.StringVar(&ic.DiffFromRevision, "new-from-rev", "", + wh("Show only new issues created after git revision `REV`")) + fs.StringVar(&ic.DiffPatchFilePath, "new-from-patch", "", + wh("Show only new issues created in git patch with file path `PATH`")) + fs.BoolVar(&ic.NeedFix, "fix", false, "Fix found issues (if it's supported by the linter)") +} + +func (e *Executor) initRunConfiguration(cmd *cobra.Command) { + fs := cmd.Flags() + fs.SortFlags = false // sort them as they are defined here + initFlagSet(fs, e.cfg, e.DBManager, true) +} + +func (e *Executor) getConfigForCommandLine() (*config.Config, error) { + // We use another pflag.FlagSet here to not set `changed` flag + // on cmd.Flags() options. Otherwise string slice options will be duplicated. + fs := pflag.NewFlagSet("config flag set", pflag.ContinueOnError) + + var cfg config.Config + // Don't do `fs.AddFlagSet(cmd.Flags())` because it shares flags representations: + // `changed` variable inside string slice vars will be shared. + // Use another config variable here, not e.cfg, to not + // affect main parsing by this parsing of only config option. + initFlagSet(fs, &cfg, e.DBManager, false) + initVersionFlagSet(fs, &cfg) + + // Parse max options, even force version option: don't want + // to get access to Executor here: it's error-prone to use + // cfg vs e.cfg. + initRootFlagSet(fs, &cfg, true) + + fs.Usage = func() {} // otherwise help text will be printed twice + if err := fs.Parse(os.Args); err != nil { + if err == pflag.ErrHelp { + return nil, err + } + + return nil, fmt.Errorf("can't parse args: %s", err) + } + + return &cfg, nil +} + +func (e *Executor) initRun() { + e.runCmd = &cobra.Command{ + Use: "run", + Short: "Run the linters", + Run: e.executeRun, + PreRun: func(_ *cobra.Command, _ []string) { + if ok := e.acquireFileLock(); !ok { + e.log.Fatalf("Parallel golangci-lint is running") + } + }, + PostRun: func(_ *cobra.Command, _ []string) { + e.releaseFileLock() + }, + } + e.rootCmd.AddCommand(e.runCmd) + + e.runCmd.SetOut(logutils.StdOut) // use custom output to properly color it in Windows terminals + e.runCmd.SetErr(logutils.StdErr) + + e.initRunConfiguration(e.runCmd) +} + +func fixSlicesFlags(fs *pflag.FlagSet) { + // It's a dirty hack to set flag.Changed to true for every string slice flag. + // It's necessary to merge config and command-line slices: otherwise command-line + // flags will always overwrite ones from the config. + fs.VisitAll(func(f *pflag.Flag) { + if f.Value.Type() != "stringSlice" { + return + } + + s, err := fs.GetStringSlice(f.Name) + if err != nil { + return + } + + if s == nil { // assume that every string slice flag has nil as the default + return + } + + var safe []string + for _, v := range s { + // add quotes to escape comma because spf13/pflag use a CSV parser: + // https://github.com/spf13/pflag/blob/85dd5c8bc61cfa382fecd072378089d4e856579d/string_slice.go#L43 + safe = append(safe, `"`+v+`"`) + } + + // calling Set sets Changed to true: next Set calls will append, not overwrite + _ = f.Value.Set(strings.Join(safe, ",")) + }) +} + +func (e *Executor) runAnalysis(ctx context.Context, args []string) ([]result.Issue, error) { + e.cfg.Run.Args = args + + lintersToRun, err := e.EnabledLintersSet.GetOptimizedLinters() + if err != nil { + return nil, err + } + + enabledLintersMap, err := e.EnabledLintersSet.GetEnabledLintersMap() + if err != nil { + return nil, err + } + + for _, lc := range e.DBManager.GetAllSupportedLinterConfigs() { + isEnabled := enabledLintersMap[lc.Name()] != nil + e.reportData.AddLinter(lc.Name(), isEnabled, lc.EnabledByDefault) + } + + lintCtx, err := e.contextLoader.Load(ctx, lintersToRun) + if err != nil { + return nil, errors.Wrap(err, "context loading failed") + } + lintCtx.Log = e.log.Child("linters context") + + runner, err := lint.NewRunner(e.cfg, e.log.Child("runner"), + e.goenv, e.EnabledLintersSet, e.lineCache, e.DBManager, lintCtx.Packages) + if err != nil { + return nil, err + } + + issues, err := runner.Run(ctx, lintersToRun, lintCtx) + if err != nil { + return nil, err + } + + fixer := processors.NewFixer(e.cfg, e.log, e.fileCache) + return fixer.Process(issues), nil +} + +func (e *Executor) setOutputToDevNull() (savedStdout, savedStderr *os.File) { + savedStdout, savedStderr = os.Stdout, os.Stderr + devNull, err := os.Open(os.DevNull) + if err != nil { + e.log.Warnf("Can't open null device %q: %s", os.DevNull, err) + return + } + + os.Stdout, os.Stderr = devNull, devNull + return +} + +func (e *Executor) setExitCodeIfIssuesFound(issues []result.Issue) { + if len(issues) != 0 { + e.exitCode = e.cfg.Run.ExitCodeIfIssuesFound + } +} + +func (e *Executor) runAndPrint(ctx context.Context, args []string) error { + if err := e.goenv.Discover(ctx); err != nil { + e.log.Warnf("Failed to discover go env: %s", err) + } + + if !logutils.HaveDebugTag("linters_output") { + // Don't allow linters and loader to print anything + log.SetOutput(ioutil.Discard) + savedStdout, savedStderr := e.setOutputToDevNull() + defer func() { + os.Stdout, os.Stderr = savedStdout, savedStderr + }() + } + + issues, err := e.runAnalysis(ctx, args) + if err != nil { + return err // XXX: don't loose type + } + + p, err := e.createPrinter() + if err != nil { + return err + } + + e.setExitCodeIfIssuesFound(issues) + + if err = p.Print(ctx, issues); err != nil { + return fmt.Errorf("can't print %d issues: %s", len(issues), err) + } + + e.fileCache.PrintStats(e.log) + + return nil +} + +func (e *Executor) createPrinter() (printers.Printer, error) { + var p printers.Printer + format := e.cfg.Output.Format + switch format { + case config.OutFormatJSON: + p = printers.NewJSON(&e.reportData) + case config.OutFormatColoredLineNumber, config.OutFormatLineNumber: + p = printers.NewText(e.cfg.Output.PrintIssuedLine, + format == config.OutFormatColoredLineNumber, e.cfg.Output.PrintLinterName, + e.log.Child("text_printer")) + case config.OutFormatTab: + p = printers.NewTab(e.cfg.Output.PrintLinterName, e.log.Child("tab_printer")) + case config.OutFormatCheckstyle: + p = printers.NewCheckstyle() + case config.OutFormatCodeClimate: + p = printers.NewCodeClimate() + case config.OutFormatHTML: + p = printers.NewHTML() + case config.OutFormatJunitXML: + p = printers.NewJunitXML() + case config.OutFormatGithubActions: + p = printers.NewGithub() + default: + return nil, fmt.Errorf("unknown output format %s", format) + } + + return p, nil +} + +func (e *Executor) executeRun(_ *cobra.Command, args []string) { + needTrackResources := e.cfg.Run.IsVerbose || e.cfg.Run.PrintResourcesUsage + trackResourcesEndCh := make(chan struct{}) + defer func() { // XXX: this defer must be before ctx.cancel defer + if needTrackResources { // wait until resource tracking finished to print properly + <-trackResourcesEndCh + } + }() + + e.setTimeoutToDeadlineIfOnlyDeadlineIsSet() + ctx, cancel := context.WithTimeout(context.Background(), e.cfg.Run.Timeout) + defer cancel() + + if needTrackResources { + go watchResources(ctx, trackResourcesEndCh, e.log, e.debugf) + } + + if err := e.runAndPrint(ctx, args); err != nil { + e.log.Errorf("Running error: %s", err) + if e.exitCode == exitcodes.Success { + if exitErr, ok := errors.Cause(err).(*exitcodes.ExitError); ok { + e.exitCode = exitErr.Code + } else { + e.exitCode = exitcodes.Failure + } + } + } + + e.setupExitCode(ctx) +} + +// to be removed when deadline is finally decommissioned +func (e *Executor) setTimeoutToDeadlineIfOnlyDeadlineIsSet() { + // nolint:staticcheck + deadlineValue := e.cfg.Run.Deadline + if deadlineValue != 0 && e.cfg.Run.Timeout == defaultTimeout { + e.cfg.Run.Timeout = deadlineValue + } +} + +func (e *Executor) setupExitCode(ctx context.Context) { + if ctx.Err() != nil { + e.exitCode = exitcodes.Timeout + e.log.Errorf("Timeout exceeded: try increasing it by passing --timeout option") + return + } + + if e.exitCode != exitcodes.Success { + return + } + + needFailOnWarnings := (os.Getenv("GL_TEST_RUN") == "1" || os.Getenv("FAIL_ON_WARNINGS") == "1") + if needFailOnWarnings && len(e.reportData.Warnings) != 0 { + e.exitCode = exitcodes.WarningInTest + return + } + + if e.reportData.Error != "" { + // it's a case e.g. when typecheck linter couldn't parse and error and just logged it + e.exitCode = exitcodes.ErrorWasLogged + return + } +} + +func watchResources(ctx context.Context, done chan struct{}, logger logutils.Log, debugf logutils.DebugFunc) { + startedAt := time.Now() + debugf("Started tracking time") + + var maxRSSMB, totalRSSMB float64 + var iterationsCount int + + const intervalMS = 100 + ticker := time.NewTicker(intervalMS * time.Millisecond) + defer ticker.Stop() + + logEveryRecord := os.Getenv("GL_MEM_LOG_EVERY") == "1" + const MB = 1024 * 1024 + + track := func() { + var m runtime.MemStats + runtime.ReadMemStats(&m) + + if logEveryRecord { + debugf("Stopping memory tracing iteration, printing ...") + printMemStats(&m, logger) + } + + rssMB := float64(m.Sys) / MB + if rssMB > maxRSSMB { + maxRSSMB = rssMB + } + totalRSSMB += rssMB + iterationsCount++ + } + + for { + track() + + stop := false + select { + case <-ctx.Done(): + stop = true + debugf("Stopped resources tracking") + case <-ticker.C: + } + + if stop { + break + } + } + track() + + avgRSSMB := totalRSSMB / float64(iterationsCount) + + logger.Infof("Memory: %d samples, avg is %.1fMB, max is %.1fMB", + iterationsCount, avgRSSMB, maxRSSMB) + logger.Infof("Execution took %s", time.Since(startedAt)) + close(done) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go new file mode 100644 index 000000000..8b48e515b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/version.go @@ -0,0 +1,60 @@ +package commands + +import ( + "encoding/json" + "fmt" + "strings" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" + + "github.com/golangci/golangci-lint/pkg/config" +) + +type jsonVersion struct { + Version string `json:"version"` + Commit string `json:"commit"` + Date string `json:"date"` +} + +func (e *Executor) initVersionConfiguration(cmd *cobra.Command) { + fs := cmd.Flags() + fs.SortFlags = false // sort them as they are defined here + initVersionFlagSet(fs, e.cfg) +} + +func initVersionFlagSet(fs *pflag.FlagSet, cfg *config.Config) { + // Version config + vc := &cfg.Version + fs.StringVar(&vc.Format, "format", "", wh("The version's format can be: 'short', 'json'")) +} + +func (e *Executor) initVersion() { + versionCmd := &cobra.Command{ + Use: "version", + Short: "Version", + RunE: func(cmd *cobra.Command, _ []string) error { + switch strings.ToLower(e.cfg.Version.Format) { + case "short": + fmt.Println(e.version) + case "json": + ver := jsonVersion{ + Version: e.version, + Commit: e.commit, + Date: e.date, + } + data, err := json.Marshal(&ver) + if err != nil { + return err + } + fmt.Println(string(data)) + default: + fmt.Printf("golangci-lint has version %s built from %s on %s\n", e.version, e.commit, e.date) + } + return nil + }, + } + + e.rootCmd.AddCommand(versionCmd) + e.initVersionConfiguration(versionCmd) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go new file mode 100644 index 000000000..931ddbbbe --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go @@ -0,0 +1,26 @@ +package config + +type Config struct { + Run Run + + Output Output + + LintersSettings LintersSettings `mapstructure:"linters-settings"` + Linters Linters + Issues Issues + Severity Severity + Version Version + + InternalCmdTest bool `mapstructure:"internal-cmd-test"` // Option is used only for testing golangci-lint command, don't use it + InternalTest bool // Option is used only for testing golangci-lint code, don't use it +} + +func NewDefault() *Config { + return &Config{ + LintersSettings: defaultLintersSettings, + } +} + +type Version struct { + Format string `mapstructure:"format"` +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go b/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go new file mode 100644 index 000000000..71bf2a90e --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/issues.go @@ -0,0 +1,204 @@ +package config + +import ( + "fmt" + "regexp" +) + +const excludeRuleMinConditionsCount = 2 + +var DefaultExcludePatterns = []ExcludePattern{ + { + ID: "EXC0001", + Pattern: "Error return value of .((os\\.)?std(out|err)\\..*|.*Close" + + "|.*Flush|os\\.Remove(All)?|.*print(f|ln)?|os\\.(Un)?Setenv). is not checked", + Linter: "errcheck", + Why: "Almost all programs ignore errors on these functions and in most cases it's ok", + }, + { + ID: "EXC0002", + Pattern: "(comment on exported (method|function|type|const)|" + + "should have( a package)? comment|comment should be of the form)", + Linter: "golint", + Why: "Annoying issue about not having a comment. The rare codebase has such comments", + }, + { + ID: "EXC0003", + Pattern: "func name will be used as test\\.Test.* by other packages, and that stutters; consider calling this", + Linter: "golint", + Why: "False positive when tests are defined in package 'test'", + }, + { + ID: "EXC0004", + Pattern: "(possible misuse of unsafe.Pointer|should have signature)", + Linter: "govet", + Why: "Common false positives", + }, + { + ID: "EXC0005", + Pattern: "ineffective break statement. Did you mean to break out of the outer loop", + Linter: "staticcheck", + Why: "Developers tend to write in C-style with an explicit 'break' in a 'switch', so it's ok to ignore", + }, + { + ID: "EXC0006", + Pattern: "Use of unsafe calls should be audited", + Linter: "gosec", + Why: "Too many false-positives on 'unsafe' usage", + }, + { + ID: "EXC0007", + Pattern: "Subprocess launch(ed with variable|ing should be audited)", + Linter: "gosec", + Why: "Too many false-positives for parametrized shell calls", + }, + { + ID: "EXC0008", + Pattern: "(G104|G307)", + Linter: "gosec", + Why: "Duplicated errcheck checks", + }, + { + ID: "EXC0009", + Pattern: "(Expect directory permissions to be 0750 or less|Expect file permissions to be 0600 or less)", + Linter: "gosec", + Why: "Too many issues in popular repos", + }, + { + ID: "EXC0010", + Pattern: "Potential file inclusion via variable", + Linter: "gosec", + Why: "False positive is triggered by 'src, err := ioutil.ReadFile(filename)'", + }, + { + ID: "EXC0011", + Pattern: "(comment on exported (method|function|type|const)|" + + "should have( a package)? comment|comment should be of the form)", + Linter: "stylecheck", + Why: "Annoying issue about not having a comment. The rare codebase has such comments", + }, + { + ID: "EXC0012", + Pattern: `exported (.+) should have comment( \(or a comment on this block\))? or be unexported`, + Linter: "revive", + Why: "Annoying issue about not having a comment. The rare codebase has such comments", + }, + { + ID: "EXC0013", + Pattern: `package comment should be of the form "(.+)...`, + Linter: "revive", + Why: "Annoying issue about not having a comment. The rare codebase has such comments", + }, + { + ID: "EXC0014", + Pattern: `comment on exported (.+) should be of the form "(.+)..."`, + Linter: "revive", + Why: "Annoying issue about not having a comment. The rare codebase has such comments", + }, + { + ID: "EXC0015", + Pattern: `should have a package comment, unless it's in another file for this package`, + Linter: "revive", + Why: "Annoying issue about not having a comment. The rare codebase has such comments", + }, +} + +type Issues struct { + IncludeDefaultExcludes []string `mapstructure:"include"` + ExcludeCaseSensitive bool `mapstructure:"exclude-case-sensitive"` + ExcludePatterns []string `mapstructure:"exclude"` + ExcludeRules []ExcludeRule `mapstructure:"exclude-rules"` + UseDefaultExcludes bool `mapstructure:"exclude-use-default"` + + MaxIssuesPerLinter int `mapstructure:"max-issues-per-linter"` + MaxSameIssues int `mapstructure:"max-same-issues"` + + DiffFromRevision string `mapstructure:"new-from-rev"` + DiffPatchFilePath string `mapstructure:"new-from-patch"` + Diff bool `mapstructure:"new"` + + NeedFix bool `mapstructure:"fix"` +} + +type ExcludeRule struct { + BaseRule `mapstructure:",squash"` +} + +func (e ExcludeRule) Validate() error { + return e.BaseRule.Validate(excludeRuleMinConditionsCount) +} + +type BaseRule struct { + Linters []string + Path string + Text string + Source string +} + +func (b BaseRule) Validate(minConditionsCount int) error { + if err := validateOptionalRegex(b.Path); err != nil { + return fmt.Errorf("invalid path regex: %v", err) + } + if err := validateOptionalRegex(b.Text); err != nil { + return fmt.Errorf("invalid text regex: %v", err) + } + if err := validateOptionalRegex(b.Source); err != nil { + return fmt.Errorf("invalid source regex: %v", err) + } + nonBlank := 0 + if len(b.Linters) > 0 { + nonBlank++ + } + if b.Path != "" { + nonBlank++ + } + if b.Text != "" { + nonBlank++ + } + if b.Source != "" { + nonBlank++ + } + if nonBlank < minConditionsCount { + return fmt.Errorf("at least %d of (text, source, path, linters) should be set", minConditionsCount) + } + return nil +} + +func validateOptionalRegex(value string) error { + if value == "" { + return nil + } + _, err := regexp.Compile(value) + return err +} + +type ExcludePattern struct { + ID string + Pattern string + Linter string + Why string +} + +func GetDefaultExcludePatternsStrings() []string { + ret := make([]string, len(DefaultExcludePatterns)) + for i, p := range DefaultExcludePatterns { + ret[i] = p.Pattern + } + return ret +} + +func GetExcludePatterns(include []string) []ExcludePattern { + includeMap := make(map[string]bool, len(include)) + for _, inc := range include { + includeMap[inc] = true + } + + var ret []ExcludePattern + for _, p := range DefaultExcludePatterns { + if !includeMap[p.ID] { + ret = append(ret, p) + } + } + + return ret +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go new file mode 100644 index 000000000..ccbdc123a --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters.go @@ -0,0 +1,11 @@ +package config + +type Linters struct { + Enable []string + Disable []string + EnableAll bool `mapstructure:"enable-all"` + DisableAll bool `mapstructure:"disable-all"` + Fast bool + + Presets []string +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go new file mode 100644 index 000000000..fd5f41318 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go @@ -0,0 +1,486 @@ +package config + +import "github.com/pkg/errors" + +var defaultLintersSettings = LintersSettings{ + Lll: LllSettings{ + LineLength: 120, + TabWidth: 1, + }, + Unparam: UnparamSettings{ + Algo: "cha", + }, + Nakedret: NakedretSettings{ + MaxFuncLines: 30, + }, + Prealloc: PreallocSettings{ + Simple: true, + RangeLoops: true, + ForLoops: false, + }, + Gocritic: GocriticSettings{ + SettingsPerCheck: map[string]GocriticCheckSettings{}, + }, + Godox: GodoxSettings{ + Keywords: []string{}, + }, + Dogsled: DogsledSettings{ + MaxBlankIdentifiers: 2, + }, + Gocognit: GocognitSettings{ + MinComplexity: 30, + }, + WSL: WSLSettings{ + StrictAppend: true, + AllowAssignAndCallCuddle: true, + AllowAssignAndAnythingCuddle: false, + AllowMultiLineAssignCuddle: true, + AllowCuddleDeclaration: false, + AllowTrailingComment: false, + AllowSeparatedLeadingComment: false, + ForceCuddleErrCheckAndAssign: false, + ForceExclusiveShortDeclarations: false, + ForceCaseTrailingWhitespaceLimit: 0, + }, + NoLintLint: NoLintLintSettings{ + RequireExplanation: false, + AllowLeadingSpace: true, + RequireSpecific: false, + AllowUnused: false, + }, + Testpackage: TestpackageSettings{ + SkipRegexp: `(export|internal)_test\.go`, + }, + Nestif: NestifSettings{ + MinComplexity: 5, + }, + Exhaustive: ExhaustiveSettings{ + CheckGenerated: false, + DefaultSignifiesExhaustive: false, + }, + Gofumpt: GofumptSettings{ + LangVersion: "", + ExtraRules: false, + }, + ErrorLint: ErrorLintSettings{ + Errorf: true, + Asserts: true, + Comparison: true, + }, + Ifshort: IfshortSettings{ + MaxDeclLines: 1, + MaxDeclChars: 30, + }, + Predeclared: PredeclaredSettings{ + Ignore: "", + Qualified: false, + }, + Forbidigo: ForbidigoSettings{ + ExcludeGodocExamples: true, + }, +} + +type LintersSettings struct { + Cyclop Cyclop + Depguard DepGuardSettings + Dogsled DogsledSettings + Dupl DuplSettings + Errcheck ErrcheckSettings + ErrorLint ErrorLintSettings + Exhaustive ExhaustiveSettings + ExhaustiveStruct ExhaustiveStructSettings + Forbidigo ForbidigoSettings + Funlen FunlenSettings + Gci GciSettings + Gocognit GocognitSettings + Goconst GoConstSettings + Gocritic GocriticSettings + Gocyclo GoCycloSettings + Godot GodotSettings + Godox GodoxSettings + Gofmt GoFmtSettings + Gofumpt GofumptSettings + Goheader GoHeaderSettings + Goimports GoImportsSettings + Golint GoLintSettings + Gomnd GoMndSettings + GoModDirectives GoModDirectivesSettings + Gomodguard GoModGuardSettings + Gosec GoSecSettings + Gosimple StaticCheckSettings + Govet GovetSettings + Ifshort IfshortSettings + ImportAs ImportAsSettings + Lll LllSettings + Makezero MakezeroSettings + Maligned MalignedSettings + Misspell MisspellSettings + Nakedret NakedretSettings + Nestif NestifSettings + NoLintLint NoLintLintSettings + Prealloc PreallocSettings + Predeclared PredeclaredSettings + Promlinter PromlinterSettings + Revive ReviveSettings + RowsErrCheck RowsErrCheckSettings + Staticcheck StaticCheckSettings + Structcheck StructCheckSettings + Stylecheck StaticCheckSettings + Tagliatelle TagliatelleSettings + Testpackage TestpackageSettings + Thelper ThelperSettings + Unparam UnparamSettings + Unused StaticCheckSettings + Varcheck VarCheckSettings + Whitespace WhitespaceSettings + Wrapcheck WrapcheckSettings + WSL WSLSettings + + Custom map[string]CustomLinterSettings +} + +type Cyclop struct { + MaxComplexity int `mapstructure:"max-complexity"` + PackageAverage float64 `mapstructure:"package-average"` + SkipTests bool `mapstructure:"skip-tests"` +} + +type DepGuardSettings struct { + ListType string `mapstructure:"list-type"` + Packages []string + IncludeGoRoot bool `mapstructure:"include-go-root"` + PackagesWithErrorMessage map[string]string `mapstructure:"packages-with-error-message"` +} + +type DogsledSettings struct { + MaxBlankIdentifiers int `mapstructure:"max-blank-identifiers"` +} + +type DuplSettings struct { + Threshold int +} + +type ErrcheckSettings struct { + CheckTypeAssertions bool `mapstructure:"check-type-assertions"` + CheckAssignToBlank bool `mapstructure:"check-blank"` + Ignore string `mapstructure:"ignore"` + ExcludeFunctions []string `mapstructure:"exclude-functions"` + + // Deprecated: use ExcludeFunctions instead + Exclude string `mapstructure:"exclude"` +} + +type ErrorLintSettings struct { + Errorf bool `mapstructure:"errorf"` + Asserts bool `mapstructure:"asserts"` + Comparison bool `mapstructure:"comparison"` +} + +type ExhaustiveSettings struct { + CheckGenerated bool `mapstructure:"check-generated"` + DefaultSignifiesExhaustive bool `mapstructure:"default-signifies-exhaustive"` + IgnorePattern string `mapstructure:"ignore-pattern"` +} + +type ExhaustiveStructSettings struct { + StructPatterns []string `mapstructure:"struct-patterns"` +} + +type ForbidigoSettings struct { + Forbid []string `mapstructure:"forbid"` + ExcludeGodocExamples bool `mapstructure:"exclude-godoc-examples"` +} + +type FunlenSettings struct { + Lines int + Statements int +} + +type GciSettings struct { + LocalPrefixes string `mapstructure:"local-prefixes"` +} + +type GocognitSettings struct { + MinComplexity int `mapstructure:"min-complexity"` +} + +type GoConstSettings struct { + IgnoreTests bool `mapstructure:"ignore-tests"` + MatchWithConstants bool `mapstructure:"match-constant"` + MinStringLen int `mapstructure:"min-len"` + MinOccurrencesCount int `mapstructure:"min-occurrences"` + ParseNumbers bool `mapstructure:"numbers"` + NumberMin int `mapstructure:"min"` + NumberMax int `mapstructure:"max"` + IgnoreCalls bool `mapstructure:"ignore-calls"` +} + +type GoCycloSettings struct { + MinComplexity int `mapstructure:"min-complexity"` +} + +type GodotSettings struct { + Scope string `mapstructure:"scope"` + Exclude []string `mapstructure:"exclude"` + Capital bool `mapstructure:"capital"` + + // Deprecated: use `Scope` instead + CheckAll bool `mapstructure:"check-all"` +} + +type GodoxSettings struct { + Keywords []string +} + +type GoFmtSettings struct { + Simplify bool +} + +type GofumptSettings struct { + LangVersion string `mapstructure:"lang-version"` + ExtraRules bool `mapstructure:"extra-rules"` +} + +type GoHeaderSettings struct { + Values map[string]map[string]string `mapstructure:"values"` + Template string `mapstructure:"template"` + TemplatePath string `mapstructure:"template-path"` +} + +type GoImportsSettings struct { + LocalPrefixes string `mapstructure:"local-prefixes"` +} + +type GoLintSettings struct { + MinConfidence float64 `mapstructure:"min-confidence"` +} + +type GoMndSettings struct { + Settings map[string]map[string]interface{} +} + +type GoModDirectivesSettings struct { + ReplaceAllowList []string `mapstructure:"replace-allow-list"` + ReplaceLocal bool `mapstructure:"replace-local"` + ExcludeForbidden bool `mapstructure:"exclude-forbidden"` + RetractAllowNoExplanation bool `mapstructure:"retract-allow-no-explanation"` +} + +type GoModGuardSettings struct { + Allowed struct { + Modules []string `mapstructure:"modules"` + Domains []string `mapstructure:"domains"` + } `mapstructure:"allowed"` + Blocked struct { + Modules []map[string]struct { + Recommendations []string `mapstructure:"recommendations"` + Reason string `mapstructure:"reason"` + } `mapstructure:"modules"` + Versions []map[string]struct { + Version string `mapstructure:"version"` + Reason string `mapstructure:"reason"` + } `mapstructure:"versions"` + LocalReplaceDirectives bool `mapstructure:"local_replace_directives"` + } `mapstructure:"blocked"` +} + +type GoSecSettings struct { + Includes []string + Excludes []string + Config map[string]interface{} `mapstructure:"config"` +} + +type GovetSettings struct { + CheckShadowing bool `mapstructure:"check-shadowing"` + Settings map[string]map[string]interface{} + + Enable []string + Disable []string + EnableAll bool `mapstructure:"enable-all"` + DisableAll bool `mapstructure:"disable-all"` +} + +func (cfg GovetSettings) Validate() error { + if cfg.EnableAll && cfg.DisableAll { + return errors.New("enable-all and disable-all can't be combined") + } + if cfg.EnableAll && len(cfg.Enable) != 0 { + return errors.New("enable-all and enable can't be combined") + } + if cfg.DisableAll && len(cfg.Disable) != 0 { + return errors.New("disable-all and disable can't be combined") + } + return nil +} + +type IfshortSettings struct { + MaxDeclLines int `mapstructure:"max-decl-lines"` + MaxDeclChars int `mapstructure:"max-decl-chars"` +} + +type ImportAsSettings struct { + Alias []ImportAsAlias + NoUnaliased bool `mapstructure:"no-unaliased"` +} + +type ImportAsAlias struct { + Pkg string + Alias string +} + +type LllSettings struct { + LineLength int `mapstructure:"line-length"` + TabWidth int `mapstructure:"tab-width"` +} + +type MakezeroSettings struct { + Always bool +} + +type MalignedSettings struct { + SuggestNewOrder bool `mapstructure:"suggest-new"` +} + +type MisspellSettings struct { + Locale string + IgnoreWords []string `mapstructure:"ignore-words"` +} + +type NakedretSettings struct { + MaxFuncLines int `mapstructure:"max-func-lines"` +} + +type NestifSettings struct { + MinComplexity int `mapstructure:"min-complexity"` +} + +type NoLintLintSettings struct { + RequireExplanation bool `mapstructure:"require-explanation"` + AllowLeadingSpace bool `mapstructure:"allow-leading-space"` + RequireSpecific bool `mapstructure:"require-specific"` + AllowNoExplanation []string `mapstructure:"allow-no-explanation"` + AllowUnused bool `mapstructure:"allow-unused"` +} + +type PreallocSettings struct { + Simple bool + RangeLoops bool `mapstructure:"range-loops"` + ForLoops bool `mapstructure:"for-loops"` +} + +type PredeclaredSettings struct { + Ignore string `mapstructure:"ignore"` + Qualified bool `mapstructure:"q"` +} + +type PromlinterSettings struct { + Strict bool `mapstructure:"strict"` + DisabledLinters []string `mapstructure:"disabled-linters"` +} + +type ReviveSettings struct { + IgnoreGeneratedHeader bool `mapstructure:"ignore-generated-header"` + Confidence float64 + Severity string + EnableAllRules bool `mapstructure:"enable-all-rules"` + Rules []struct { + Name string + Arguments []interface{} + Severity string + Disabled bool + } + ErrorCode int `mapstructure:"error-code"` + WarningCode int `mapstructure:"warning-code"` + Directives []struct { + Name string + Severity string + } +} + +type RowsErrCheckSettings struct { + Packages []string +} + +type StaticCheckSettings struct { + GoVersion string `mapstructure:"go"` + + Checks []string `mapstructure:"checks"` + Initialisms []string `mapstructure:"initialisms"` // only for stylecheck + DotImportWhitelist []string `mapstructure:"dot-import-whitelist"` // only for stylecheck + HTTPStatusCodeWhitelist []string `mapstructure:"http-status-code-whitelist"` // only for stylecheck +} + +func (s *StaticCheckSettings) HasConfiguration() bool { + return len(s.Initialisms) > 0 || len(s.HTTPStatusCodeWhitelist) > 0 || len(s.DotImportWhitelist) > 0 || len(s.Checks) > 0 +} + +type StructCheckSettings struct { + CheckExportedFields bool `mapstructure:"exported-fields"` +} + +type TagliatelleSettings struct { + Case struct { + Rules map[string]string + UseFieldName bool `mapstructure:"use-field-name"` + } +} + +type TestpackageSettings struct { + SkipRegexp string `mapstructure:"skip-regexp"` +} + +type ThelperSettings struct { + Test struct { + First bool `mapstructure:"first"` + Name bool `mapstructure:"name"` + Begin bool `mapstructure:"begin"` + } `mapstructure:"test"` + Benchmark struct { + First bool `mapstructure:"first"` + Name bool `mapstructure:"name"` + Begin bool `mapstructure:"begin"` + } `mapstructure:"benchmark"` + TB struct { + First bool `mapstructure:"first"` + Name bool `mapstructure:"name"` + Begin bool `mapstructure:"begin"` + } `mapstructure:"tb"` +} + +type UnparamSettings struct { + CheckExported bool `mapstructure:"check-exported"` + Algo string +} + +type VarCheckSettings struct { + CheckExportedFields bool `mapstructure:"exported-fields"` +} + +type WhitespaceSettings struct { + MultiIf bool `mapstructure:"multi-if"` + MultiFunc bool `mapstructure:"multi-func"` +} + +type WrapcheckSettings struct { + IgnoreSigs []string `mapstructure:"ignoreSigs"` + IgnorePackageGlobs []string `mapstructure:"ignorePackageGlobs"` +} + +type WSLSettings struct { + StrictAppend bool `mapstructure:"strict-append"` + AllowAssignAndCallCuddle bool `mapstructure:"allow-assign-and-call"` + AllowAssignAndAnythingCuddle bool `mapstructure:"allow-assign-and-anything"` + AllowMultiLineAssignCuddle bool `mapstructure:"allow-multiline-assign"` + AllowCuddleDeclaration bool `mapstructure:"allow-cuddle-declarations"` + AllowTrailingComment bool `mapstructure:"allow-trailing-comment"` + AllowSeparatedLeadingComment bool `mapstructure:"allow-separated-leading-comment"` + ForceCuddleErrCheckAndAssign bool `mapstructure:"force-err-cuddling"` + ForceExclusiveShortDeclarations bool `mapstructure:"force-short-decl-cuddling"` + ForceCaseTrailingWhitespaceLimit int `mapstructure:"force-case-trailing-whitespace"` +} + +type CustomLinterSettings struct { + Path string + Description string + OriginalURL string `mapstructure:"original-url"` +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings_gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings_gocritic.go new file mode 100644 index 000000000..34f850758 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings_gocritic.go @@ -0,0 +1,365 @@ +package config + +import ( + "fmt" + "sort" + "strings" + + _ "github.com/go-critic/go-critic/checkers" // this import register checkers + "github.com/go-critic/go-critic/framework/linter" + "github.com/pkg/errors" + + "github.com/golangci/golangci-lint/pkg/logutils" +) + +const gocriticDebugKey = "gocritic" + +var ( + gocriticDebugf = logutils.Debug(gocriticDebugKey) + isGocriticDebug = logutils.HaveDebugTag(gocriticDebugKey) + allGocriticCheckers = linter.GetCheckersInfo() + allGocriticCheckerMap = func() map[string]*linter.CheckerInfo { + checkInfoMap := make(map[string]*linter.CheckerInfo) + for _, checkInfo := range allGocriticCheckers { + checkInfoMap[checkInfo.Name] = checkInfo + } + return checkInfoMap + }() +) + +type GocriticCheckSettings map[string]interface{} + +type GocriticSettings struct { + EnabledChecks []string `mapstructure:"enabled-checks"` + DisabledChecks []string `mapstructure:"disabled-checks"` + EnabledTags []string `mapstructure:"enabled-tags"` + DisabledTags []string `mapstructure:"disabled-tags"` + SettingsPerCheck map[string]GocriticCheckSettings `mapstructure:"settings"` + + inferredEnabledChecks map[string]bool +} + +func debugChecksListf(checks []string, format string, args ...interface{}) { + if isGocriticDebug { + prefix := fmt.Sprintf(format, args...) + gocriticDebugf(prefix+" checks (%d): %s", len(checks), sprintStrings(checks)) + } +} + +func stringsSliceToSet(ss []string) map[string]bool { + ret := map[string]bool{} + for _, s := range ss { + ret[s] = true + } + + return ret +} + +func buildGocriticTagToCheckersMap() map[string][]string { + tagToCheckers := map[string][]string{} + for _, checker := range allGocriticCheckers { + for _, tag := range checker.Tags { + tagToCheckers[tag] = append(tagToCheckers[tag], checker.Name) + } + } + return tagToCheckers +} + +func gocriticCheckerTagsDebugf() { + if !isGocriticDebug { + return + } + + tagToCheckers := buildGocriticTagToCheckersMap() + + var allTags []string + for tag := range tagToCheckers { + allTags = append(allTags, tag) + } + sort.Strings(allTags) + + gocriticDebugf("All gocritic existing tags and checks:") + for _, tag := range allTags { + debugChecksListf(tagToCheckers[tag], " tag %q", tag) + } +} + +func (s *GocriticSettings) gocriticDisabledCheckersDebugf() { + if !isGocriticDebug { + return + } + + var disabledCheckers []string + for _, checker := range allGocriticCheckers { + if s.inferredEnabledChecks[strings.ToLower(checker.Name)] { + continue + } + + disabledCheckers = append(disabledCheckers, checker.Name) + } + + if len(disabledCheckers) == 0 { + gocriticDebugf("All checks are enabled") + } else { + debugChecksListf(disabledCheckers, "Final not used") + } +} + +func (s *GocriticSettings) InferEnabledChecks(log logutils.Log) { + gocriticCheckerTagsDebugf() + + enabledByDefaultChecks := getDefaultEnabledGocriticCheckersNames() + debugChecksListf(enabledByDefaultChecks, "Enabled by default") + + disabledByDefaultChecks := getDefaultDisabledGocriticCheckersNames() + debugChecksListf(disabledByDefaultChecks, "Disabled by default") + + var enabledChecks []string + + // EnabledTags + if len(s.EnabledTags) != 0 { + tagToCheckers := buildGocriticTagToCheckersMap() + for _, tag := range s.EnabledTags { + enabledChecks = append(enabledChecks, tagToCheckers[tag]...) + } + debugChecksListf(enabledChecks, "Enabled by config tags %s", sprintStrings(s.EnabledTags)) + } + + if !(len(s.EnabledTags) == 0 && len(s.EnabledChecks) != 0) { + // don't use default checks only if we have no enabled tags and enable some checks manually + enabledChecks = append(enabledChecks, enabledByDefaultChecks...) + } + + // DisabledTags + if len(s.DisabledTags) != 0 { + enabledChecks = filterByDisableTags(enabledChecks, s.DisabledTags, log) + } + + // EnabledChecks + if len(s.EnabledChecks) != 0 { + debugChecksListf(s.EnabledChecks, "Enabled by config") + + alreadyEnabledChecksSet := stringsSliceToSet(enabledChecks) + for _, enabledCheck := range s.EnabledChecks { + if alreadyEnabledChecksSet[enabledCheck] { + log.Warnf("No need to enable check %q: it's already enabled", enabledCheck) + continue + } + enabledChecks = append(enabledChecks, enabledCheck) + } + } + + // DisabledChecks + if len(s.DisabledChecks) != 0 { + debugChecksListf(s.DisabledChecks, "Disabled by config") + + enabledChecksSet := stringsSliceToSet(enabledChecks) + for _, disabledCheck := range s.DisabledChecks { + if !enabledChecksSet[disabledCheck] { + log.Warnf("Gocritic check %q was explicitly disabled via config. However, as this check"+ + "is disabled by default, there is no need to explicitly disable it via config.", disabledCheck) + continue + } + delete(enabledChecksSet, disabledCheck) + } + + enabledChecks = nil + for enabledCheck := range enabledChecksSet { + enabledChecks = append(enabledChecks, enabledCheck) + } + } + + s.inferredEnabledChecks = map[string]bool{} + for _, check := range enabledChecks { + s.inferredEnabledChecks[strings.ToLower(check)] = true + } + + debugChecksListf(enabledChecks, "Final used") + s.gocriticDisabledCheckersDebugf() +} + +func validateStringsUniq(ss []string) error { + set := map[string]bool{} + for _, s := range ss { + _, ok := set[s] + if ok { + return fmt.Errorf("%q occurs multiple times in list", s) + } + set[s] = true + } + + return nil +} + +func intersectStringSlice(s1, s2 []string) []string { + s1Map := make(map[string]struct{}) + for _, s := range s1 { + s1Map[s] = struct{}{} + } + + result := make([]string, 0) + for _, s := range s2 { + if _, exists := s1Map[s]; exists { + result = append(result, s) + } + } + + return result +} + +func (s *GocriticSettings) Validate(log logutils.Log) error { + if len(s.EnabledTags) == 0 { + if len(s.EnabledChecks) != 0 && len(s.DisabledChecks) != 0 { + return errors.New("both enabled and disabled check aren't allowed for gocritic") + } + } else { + if err := validateStringsUniq(s.EnabledTags); err != nil { + return errors.Wrap(err, "validate enabled tags") + } + + tagToCheckers := buildGocriticTagToCheckersMap() + for _, tag := range s.EnabledTags { + if _, ok := tagToCheckers[tag]; !ok { + return fmt.Errorf("gocritic [enabled]tag %q doesn't exist", tag) + } + } + } + + if len(s.DisabledTags) > 0 { + tagToCheckers := buildGocriticTagToCheckersMap() + for _, tag := range s.EnabledTags { + if _, ok := tagToCheckers[tag]; !ok { + return fmt.Errorf("gocritic [disabled]tag %q doesn't exist", tag) + } + } + } + + if err := validateStringsUniq(s.EnabledChecks); err != nil { + return errors.Wrap(err, "validate enabled checks") + } + if err := validateStringsUniq(s.DisabledChecks); err != nil { + return errors.Wrap(err, "validate disabled checks") + } + + if err := s.validateCheckerNames(log); err != nil { + return errors.Wrap(err, "validation failed") + } + + return nil +} + +func (s *GocriticSettings) IsCheckEnabled(name string) bool { + return s.inferredEnabledChecks[strings.ToLower(name)] +} + +func sprintAllowedCheckerNames(allowedNames map[string]bool) string { + var namesSlice []string + for name := range allowedNames { + namesSlice = append(namesSlice, name) + } + return sprintStrings(namesSlice) +} + +func sprintStrings(ss []string) string { + sort.Strings(ss) + return fmt.Sprint(ss) +} + +// getAllCheckerNames returns a map containing all checker names supported by gocritic. +func getAllCheckerNames() map[string]bool { + allCheckerNames := map[string]bool{} + for _, checker := range allGocriticCheckers { + allCheckerNames[strings.ToLower(checker.Name)] = true + } + + return allCheckerNames +} + +func isEnabledByDefaultGocriticCheck(info *linter.CheckerInfo) bool { + return !info.HasTag("experimental") && + !info.HasTag("opinionated") && + !info.HasTag("performance") +} + +func getDefaultEnabledGocriticCheckersNames() []string { + var enabled []string + for _, info := range allGocriticCheckers { + enable := isEnabledByDefaultGocriticCheck(info) + if enable { + enabled = append(enabled, info.Name) + } + } + + return enabled +} + +func getDefaultDisabledGocriticCheckersNames() []string { + var disabled []string + for _, info := range allGocriticCheckers { + enable := isEnabledByDefaultGocriticCheck(info) + if !enable { + disabled = append(disabled, info.Name) + } + } + + return disabled +} + +func (s *GocriticSettings) validateCheckerNames(log logutils.Log) error { + allowedNames := getAllCheckerNames() + + for _, name := range s.EnabledChecks { + if !allowedNames[strings.ToLower(name)] { + return fmt.Errorf("enabled checker %s doesn't exist, all existing checkers: %s", + name, sprintAllowedCheckerNames(allowedNames)) + } + } + + for _, name := range s.DisabledChecks { + if !allowedNames[strings.ToLower(name)] { + return fmt.Errorf("disabled checker %s doesn't exist, all existing checkers: %s", + name, sprintAllowedCheckerNames(allowedNames)) + } + } + + for checkName := range s.SettingsPerCheck { + if _, ok := allowedNames[checkName]; !ok { + return fmt.Errorf("invalid setting, checker %s doesn't exist, all existing checkers: %s", + checkName, sprintAllowedCheckerNames(allowedNames)) + } + if !s.IsCheckEnabled(checkName) { + log.Warnf("Gocritic settings were provided for not enabled check %q", checkName) + } + } + + return nil +} + +func (s *GocriticSettings) GetLowercasedParams() map[string]GocriticCheckSettings { + ret := map[string]GocriticCheckSettings{} + for checker, params := range s.SettingsPerCheck { + ret[strings.ToLower(checker)] = params + } + return ret +} + +func filterByDisableTags(enabledChecks, disableTags []string, log logutils.Log) []string { + enabledChecksSet := stringsSliceToSet(enabledChecks) + for _, enabledCheck := range enabledChecks { + checkInfo, checkInfoExists := allGocriticCheckerMap[enabledCheck] + if !checkInfoExists { + log.Warnf("Gocritic check %q was not exists via filtering disabled tags", enabledCheck) + continue + } + hitTags := intersectStringSlice(checkInfo.Tags, disableTags) + if len(hitTags) != 0 { + delete(enabledChecksSet, enabledCheck) + } + debugChecksListf(enabledChecks, "Disabled by config tags %s", sprintStrings(disableTags)) + } + enabledChecks = nil + for enabledCheck := range enabledChecksSet { + enabledChecks = append(enabledChecks, enabledCheck) + } + return enabledChecks +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/output.go b/vendor/github.com/golangci/golangci-lint/pkg/config/output.go new file mode 100644 index 000000000..d67f110f6 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/output.go @@ -0,0 +1,36 @@ +package config + +const ( + OutFormatJSON = "json" + OutFormatLineNumber = "line-number" + OutFormatColoredLineNumber = "colored-line-number" + OutFormatTab = "tab" + OutFormatCheckstyle = "checkstyle" + OutFormatCodeClimate = "code-climate" + OutFormatHTML = "html" + OutFormatJunitXML = "junit-xml" + OutFormatGithubActions = "github-actions" +) + +var OutFormats = []string{ + OutFormatColoredLineNumber, + OutFormatLineNumber, + OutFormatJSON, + OutFormatTab, + OutFormatCheckstyle, + OutFormatCodeClimate, + OutFormatHTML, + OutFormatJunitXML, + OutFormatGithubActions, +} + +type Output struct { + Format string + Color string + PrintIssuedLine bool `mapstructure:"print-issued-lines"` + PrintLinterName bool `mapstructure:"print-linter-name"` + UniqByLine bool `mapstructure:"uniq-by-line"` + SortResults bool `mapstructure:"sort-results"` + PrintWelcomeMessage bool `mapstructure:"print-welcome"` + PathPrefix string `mapstructure:"path-prefix"` +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go b/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go new file mode 100644 index 000000000..6e97277da --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/reader.go @@ -0,0 +1,221 @@ +package config + +import ( + "errors" + "fmt" + "os" + "path/filepath" + "strings" + + "github.com/mitchellh/go-homedir" + "github.com/spf13/viper" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/sliceutil" +) + +type FileReader struct { + log logutils.Log + cfg *Config + commandLineCfg *Config +} + +func NewFileReader(toCfg, commandLineCfg *Config, log logutils.Log) *FileReader { + return &FileReader{ + log: log, + cfg: toCfg, + commandLineCfg: commandLineCfg, + } +} + +func (r *FileReader) Read() error { + // XXX: hack with double parsing for 2 purposes: + // 1. to access "config" option here. + // 2. to give config less priority than command line. + + configFile, err := r.parseConfigOption() + if err != nil { + if err == errConfigDisabled { + return nil + } + + return fmt.Errorf("can't parse --config option: %s", err) + } + + if configFile != "" { + viper.SetConfigFile(configFile) + } else { + r.setupConfigFileSearch() + } + + return r.parseConfig() +} + +func (r *FileReader) parseConfig() error { + if err := viper.ReadInConfig(); err != nil { + if _, ok := err.(viper.ConfigFileNotFoundError); ok { + return nil + } + + return fmt.Errorf("can't read viper config: %s", err) + } + + usedConfigFile := viper.ConfigFileUsed() + if usedConfigFile == "" { + return nil + } + + usedConfigFile, err := fsutils.ShortestRelPath(usedConfigFile, "") + if err != nil { + r.log.Warnf("Can't pretty print config file path: %s", err) + } + r.log.Infof("Used config file %s", usedConfigFile) + + if err := viper.Unmarshal(r.cfg); err != nil { + return fmt.Errorf("can't unmarshal config by viper: %s", err) + } + + if err := r.validateConfig(); err != nil { + return fmt.Errorf("can't validate config: %s", err) + } + + if r.cfg.InternalTest { // just for testing purposes: to detect config file usage + fmt.Fprintln(logutils.StdOut, "test") + os.Exit(0) + } + + return nil +} + +func (r *FileReader) validateConfig() error { + c := r.cfg + if len(c.Run.Args) != 0 { + return errors.New("option run.args in config isn't supported now") + } + + if c.Run.CPUProfilePath != "" { + return errors.New("option run.cpuprofilepath in config isn't allowed") + } + + if c.Run.MemProfilePath != "" { + return errors.New("option run.memprofilepath in config isn't allowed") + } + + if c.Run.TracePath != "" { + return errors.New("option run.tracepath in config isn't allowed") + } + + if c.Run.IsVerbose { + return errors.New("can't set run.verbose option with config: only on command-line") + } + for i, rule := range c.Issues.ExcludeRules { + if err := rule.Validate(); err != nil { + return fmt.Errorf("error in exclude rule #%d: %v", i, err) + } + } + if len(c.Severity.Rules) > 0 && c.Severity.Default == "" { + return errors.New("can't set severity rule option: no default severity defined") + } + for i, rule := range c.Severity.Rules { + if err := rule.Validate(); err != nil { + return fmt.Errorf("error in severity rule #%d: %v", i, err) + } + } + if err := c.LintersSettings.Govet.Validate(); err != nil { + return fmt.Errorf("error in govet config: %v", err) + } + return nil +} + +func getFirstPathArg() string { + args := os.Args + + // skip all args ([golangci-lint, run/linters]) before files/dirs list + for len(args) != 0 { + if args[0] == "run" { + args = args[1:] + break + } + + args = args[1:] + } + + // find first file/dir arg + firstArg := "./..." + for _, arg := range args { + if !strings.HasPrefix(arg, "-") { + firstArg = arg + break + } + } + + return firstArg +} + +func (r *FileReader) setupConfigFileSearch() { + firstArg := getFirstPathArg() + absStartPath, err := filepath.Abs(firstArg) + if err != nil { + r.log.Warnf("Can't make abs path for %q: %s", firstArg, err) + absStartPath = filepath.Clean(firstArg) + } + + // start from it + var curDir string + if fsutils.IsDir(absStartPath) { + curDir = absStartPath + } else { + curDir = filepath.Dir(absStartPath) + } + + // find all dirs from it up to the root + configSearchPaths := []string{"./"} + + for { + configSearchPaths = append(configSearchPaths, curDir) + newCurDir := filepath.Dir(curDir) + if curDir == newCurDir || newCurDir == "" { + break + } + curDir = newCurDir + } + + // find home directory for global config + if home, err := homedir.Dir(); err != nil { + r.log.Warnf("Can't get user's home directory: %s", err.Error()) + } else if !sliceutil.Contains(configSearchPaths, home) { + configSearchPaths = append(configSearchPaths, home) + } + + r.log.Infof("Config search paths: %s", configSearchPaths) + viper.SetConfigName(".golangci") + for _, p := range configSearchPaths { + viper.AddConfigPath(p) + } +} + +var errConfigDisabled = errors.New("config is disabled by --no-config") + +func (r *FileReader) parseConfigOption() (string, error) { + cfg := r.commandLineCfg + if cfg == nil { + return "", nil + } + + configFile := cfg.Run.Config + if cfg.Run.NoConfig && configFile != "" { + return "", fmt.Errorf("can't combine option --config and --no-config") + } + + if cfg.Run.NoConfig { + return "", errConfigDisabled + } + + configFile, err := homedir.Expand(configFile) + if err != nil { + return "", fmt.Errorf("failed to expand configuration path") + } + + return configFile, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/run.go b/vendor/github.com/golangci/golangci-lint/pkg/config/run.go new file mode 100644 index 000000000..ff6347945 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/run.go @@ -0,0 +1,37 @@ +package config + +import "time" + +type Run struct { + IsVerbose bool `mapstructure:"verbose"` + Silent bool + CPUProfilePath string + MemProfilePath string + TracePath string + Concurrency int + PrintResourcesUsage bool `mapstructure:"print-resources-usage"` + + Config string + NoConfig bool + + Args []string + + BuildTags []string `mapstructure:"build-tags"` + ModulesDownloadMode string `mapstructure:"modules-download-mode"` + + ExitCodeIfIssuesFound int `mapstructure:"issues-exit-code"` + AnalyzeTests bool `mapstructure:"tests"` + + // Deprecated: Deadline exists for historical compatibility + // and should not be used. To set run timeout use Timeout instead. + Deadline time.Duration + Timeout time.Duration + + PrintVersion bool + SkipFiles []string `mapstructure:"skip-files"` + SkipDirs []string `mapstructure:"skip-dirs"` + UseDefaultSkipDirs bool `mapstructure:"skip-dirs-use-default"` + + AllowParallelRunners bool `mapstructure:"allow-parallel-runners"` + AllowSerialRunners bool `mapstructure:"allow-serial-runners"` +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go b/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go new file mode 100644 index 000000000..3068a0ed6 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/severity.go @@ -0,0 +1,18 @@ +package config + +const severityRuleMinConditionsCount = 1 + +type Severity struct { + Default string `mapstructure:"default-severity"` + CaseSensitive bool `mapstructure:"case-sensitive"` + Rules []SeverityRule `mapstructure:"rules"` +} + +type SeverityRule struct { + BaseRule `mapstructure:",squash"` + Severity string +} + +func (s *SeverityRule) Validate() error { + return s.BaseRule.Validate(severityRuleMinConditionsCount) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go b/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go new file mode 100644 index 000000000..536f90361 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/exitcodes/exitcodes.go @@ -0,0 +1,34 @@ +package exitcodes + +const ( + Success = 0 + IssuesFound = 1 + WarningInTest = 2 + Failure = 3 + Timeout = 4 + NoGoFiles = 5 + NoConfigFileDetected = 6 + ErrorWasLogged = 7 +) + +type ExitError struct { + Message string + Code int +} + +func (e ExitError) Error() string { + return e.Message +} + +var ( + ErrNoGoFiles = &ExitError{ + Message: "no go files to analyze", + Code: NoGoFiles, + } + ErrFailure = &ExitError{ + Message: "failed to analyze", + Code: Failure, + } +) + +// 1 diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go new file mode 100644 index 000000000..2b17a0398 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/filecache.go @@ -0,0 +1,67 @@ +package fsutils + +import ( + "fmt" + "io/ioutil" + "sync" + + "github.com/pkg/errors" + + "github.com/golangci/golangci-lint/pkg/logutils" +) + +type FileCache struct { + files sync.Map +} + +func NewFileCache() *FileCache { + return &FileCache{} +} + +func (fc *FileCache) GetFileBytes(filePath string) ([]byte, error) { + cachedBytes, ok := fc.files.Load(filePath) + if ok { + return cachedBytes.([]byte), nil + } + + fileBytes, err := ioutil.ReadFile(filePath) + if err != nil { + return nil, errors.Wrapf(err, "can't read file %s", filePath) + } + + fc.files.Store(filePath, fileBytes) + return fileBytes, nil +} + +func PrettifyBytesCount(n int64) string { + const ( + Multiplexer = 1024 + KiB = 1 * Multiplexer + MiB = KiB * Multiplexer + GiB = MiB * Multiplexer + ) + + if n >= GiB { + return fmt.Sprintf("%.1fGiB", float64(n)/GiB) + } + if n >= MiB { + return fmt.Sprintf("%.1fMiB", float64(n)/MiB) + } + if n >= KiB { + return fmt.Sprintf("%.1fKiB", float64(n)/KiB) + } + return fmt.Sprintf("%dB", n) +} + +func (fc *FileCache) PrintStats(log logutils.Log) { + var size int64 + var mapLen int + fc.files.Range(func(_, fileBytes interface{}) bool { + mapLen++ + size += int64(len(fileBytes.([]byte))) + + return true + }) + + log.Infof("File cache stats: %d entries of total size %s", mapLen, PrettifyBytesCount(size)) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go new file mode 100644 index 000000000..a39c105e4 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/fsutils.go @@ -0,0 +1,100 @@ +package fsutils + +import ( + "fmt" + "os" + "path/filepath" + "sync" +) + +func IsDir(filename string) bool { + fi, err := os.Stat(filename) + return err == nil && fi.IsDir() +} + +var cachedWd string +var cachedWdError error +var getWdOnce sync.Once +var useCache = true + +func UseWdCache(use bool) { + useCache = use +} + +func Getwd() (string, error) { + if !useCache { // for tests + return os.Getwd() + } + + getWdOnce.Do(func() { + cachedWd, cachedWdError = os.Getwd() + if cachedWdError != nil { + return + } + + evaledWd, err := EvalSymlinks(cachedWd) + if err != nil { + cachedWd, cachedWdError = "", fmt.Errorf("can't eval symlinks on wd %s: %s", cachedWd, err) + return + } + + cachedWd = evaledWd + }) + + return cachedWd, cachedWdError +} + +var evalSymlinkCache sync.Map + +type evalSymlinkRes struct { + path string + err error +} + +func EvalSymlinks(path string) (string, error) { + r, ok := evalSymlinkCache.Load(path) + if ok { + er := r.(evalSymlinkRes) + return er.path, er.err + } + + var er evalSymlinkRes + er.path, er.err = filepath.EvalSymlinks(path) + evalSymlinkCache.Store(path, er) + + return er.path, er.err +} + +func ShortestRelPath(path, wd string) (string, error) { + if wd == "" { // get it if user don't have cached working dir + var err error + wd, err = Getwd() + if err != nil { + return "", fmt.Errorf("can't get working directory: %s", err) + } + } + + evaledPath, err := EvalSymlinks(path) + if err != nil { + return "", fmt.Errorf("can't eval symlinks for path %s: %s", path, err) + } + path = evaledPath + + // make path absolute and then relative to be able to fix this case: + // we are in /test dir, we want to normalize ../test, and have file file.go in this dir; + // it must have normalized path file.go, not ../test/file.go, + var absPath string + if filepath.IsAbs(path) { + absPath = path + } else { + absPath = filepath.Join(wd, path) + } + + relPath, err := filepath.Rel(wd, absPath) + if err != nil { + return "", fmt.Errorf("can't get relative path for path %s and root %s: %s", + absPath, wd, err) + } + + return relPath, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go new file mode 100644 index 000000000..ab408e7d5 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/fsutils/linecache.go @@ -0,0 +1,70 @@ +package fsutils + +import ( + "bytes" + "fmt" + "sync" + + "github.com/pkg/errors" +) + +type fileLinesCache [][]byte + +type LineCache struct { + files sync.Map + fileCache *FileCache +} + +func NewLineCache(fc *FileCache) *LineCache { + return &LineCache{ + fileCache: fc, + } +} + +// GetLine returns a index1-th (1-based index) line from the file on filePath +func (lc *LineCache) GetLine(filePath string, index1 int) (string, error) { + if index1 == 0 { // some linters, e.g. gosec can do it: it really means first line + index1 = 1 + } + + const index1To0Offset = -1 + rawLine, err := lc.getRawLine(filePath, index1+index1To0Offset) + if err != nil { + return "", err + } + + return string(bytes.Trim(rawLine, "\r")), nil +} + +func (lc *LineCache) getRawLine(filePath string, index0 int) ([]byte, error) { + fc, err := lc.getFileCache(filePath) + if err != nil { + return nil, errors.Wrapf(err, "failed to get file %s lines cache", filePath) + } + + if index0 < 0 { + return nil, fmt.Errorf("invalid file line index0 < 0: %d", index0) + } + + if index0 >= len(fc) { + return nil, fmt.Errorf("invalid file line index0 (%d) >= len(fc) (%d)", index0, len(fc)) + } + + return fc[index0], nil +} + +func (lc *LineCache) getFileCache(filePath string) (fileLinesCache, error) { + loadedFc, ok := lc.files.Load(filePath) + if ok { + return loadedFc.(fileLinesCache), nil + } + + fileBytes, err := lc.fileCache.GetFileBytes(filePath) + if err != nil { + return nil, errors.Wrapf(err, "can't get file %s bytes from cache", filePath) + } + + fc := bytes.Split(fileBytes, []byte("\n")) + lc.files.Store(filePath, fileLinesCache(fc)) + return fc, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go new file mode 100644 index 000000000..1bf8c7b7d --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/asciicheck.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/tdakkota/asciicheck" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewAsciicheck() *goanalysis.Linter { + return goanalysis.NewLinter( + "asciicheck", + "Simple linter to check that your code does not contain non-ASCII identifiers", + []*analysis.Analyzer{ + asciicheck.NewAnalyzer(), + }, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go new file mode 100644 index 000000000..0e03813d1 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/bodyclose.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/timakin/bodyclose/passes/bodyclose" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewBodyclose() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + bodyclose.Analyzer, + } + + return goanalysis.NewLinter( + "bodyclose", + "checks whether HTTP response body is closed successfully", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go new file mode 100644 index 000000000..6f55b2797 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop.go @@ -0,0 +1,39 @@ +package golinters + +import ( + "github.com/bkielbasa/cyclop/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +const cyclopName = "cyclop" + +func NewCyclop(settings *config.Cyclop) *goanalysis.Linter { + a := analyzer.NewAnalyzer() + + var cfg map[string]map[string]interface{} + if settings != nil { + d := map[string]interface{}{ + "skipTests": settings.SkipTests, + } + + if settings.MaxComplexity != 0 { + d["maxComplexity"] = settings.MaxComplexity + } + + if settings.PackageAverage != 0 { + d["packageAverage"] = settings.PackageAverage + } + + cfg = map[string]map[string]interface{}{a.Name: d} + } + + return goanalysis.NewLinter( + cyclopName, + "checks function and package cyclomatic complexity", + []*analysis.Analyzer{a}, + cfg, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go new file mode 100644 index 000000000..6ff38909f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/deadcode.go @@ -0,0 +1,52 @@ +package golinters + +import ( + "fmt" + "sync" + + deadcodeAPI "github.com/golangci/go-misc/deadcode" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewDeadcode() *goanalysis.Linter { + const linterName = "deadcode" + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + Run: func(pass *analysis.Pass) (interface{}, error) { + prog := goanalysis.MakeFakeLoaderProgram(pass) + issues, err := deadcodeAPI.Run(prog) + if err != nil { + return nil, err + } + res := make([]goanalysis.Issue, 0, len(issues)) + for _, i := range issues { + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: i.Pos, + Text: fmt.Sprintf("%s is unused", formatCode(i.UnusedIdentName, nil)), + FromLinter: linterName, + }, pass)) + } + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + }, + } + return goanalysis.NewLinter( + linterName, + "Finds unused code", + []*analysis.Analyzer{analyzer}, + nil, + ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go new file mode 100644 index 000000000..aa372e956 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/depguard.go @@ -0,0 +1,112 @@ +package golinters + +import ( + "fmt" + "strings" + "sync" + + "github.com/OpenPeeDeeP/depguard" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/loader" //nolint:staticcheck // require changes in github.com/OpenPeeDeeP/depguard + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func setDepguardListType(dg *depguard.Depguard, lintCtx *linter.Context) error { + listType := lintCtx.Settings().Depguard.ListType + var found bool + dg.ListType, found = depguard.StringToListType[strings.ToLower(listType)] + if !found { + if listType != "" { + return fmt.Errorf("unsure what list type %s is", listType) + } + dg.ListType = depguard.LTBlacklist + } + + return nil +} + +func setupDepguardPackages(dg *depguard.Depguard, lintCtx *linter.Context) { + if dg.ListType == depguard.LTBlacklist { + // if the list type was a blacklist the packages with error messages should + // be included in the blacklist package list + + noMessagePackages := make(map[string]bool) + for _, pkg := range dg.Packages { + noMessagePackages[pkg] = true + } + + for pkg := range lintCtx.Settings().Depguard.PackagesWithErrorMessage { + if _, ok := noMessagePackages[pkg]; !ok { + dg.Packages = append(dg.Packages, pkg) + } + } + } +} + +func NewDepguard() *goanalysis.Linter { + const linterName = "depguard" + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + linterName, + "Go linter that checks if package imports are in a list of acceptable packages", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + dgSettings := &lintCtx.Settings().Depguard + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + prog := goanalysis.MakeFakeLoaderProgram(pass) + dg := &depguard.Depguard{ + Packages: dgSettings.Packages, + IncludeGoRoot: dgSettings.IncludeGoRoot, + } + if err := setDepguardListType(dg, lintCtx); err != nil { + return nil, err + } + setupDepguardPackages(dg, lintCtx) + + loadConfig := &loader.Config{ + Cwd: "", // fallbacked to os.Getcwd + Build: nil, // fallbacked to build.Default + } + issues, err := dg.Run(loadConfig, prog) + if err != nil { + return nil, err + } + if len(issues) == 0 { + return nil, nil + } + msgSuffix := "is in the blacklist" + if dg.ListType == depguard.LTWhitelist { + msgSuffix = "is not in the whitelist" + } + res := make([]goanalysis.Issue, 0, len(issues)) + for _, i := range issues { + userSuppliedMsgSuffix := dgSettings.PackagesWithErrorMessage[i.PackageName] + if userSuppliedMsgSuffix != "" { + userSuppliedMsgSuffix = ": " + userSuppliedMsgSuffix + } + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: i.Position, + Text: fmt.Sprintf("%s %s%s", formatCode(i.PackageName, lintCtx.Cfg), msgSuffix, userSuppliedMsgSuffix), + FromLinter: linterName, + }, pass)) + } + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go new file mode 100644 index 000000000..8978ff913 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dogsled.go @@ -0,0 +1,97 @@ +package golinters + +import ( + "fmt" + "go/ast" + "go/token" + "sync" + + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const dogsledLinterName = "dogsled" + +func NewDogsled() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: dogsledLinterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + dogsledLinterName, + "Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var pkgIssues []goanalysis.Issue + for _, f := range pass.Files { + v := returnsVisitor{ + maxBlanks: lintCtx.Settings().Dogsled.MaxBlankIdentifiers, + f: pass.Fset, + } + ast.Walk(&v, f) + for i := range v.issues { + pkgIssues = append(pkgIssues, goanalysis.NewIssue(&v.issues[i], pass)) + } + } + + mu.Lock() + resIssues = append(resIssues, pkgIssues...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} + +type returnsVisitor struct { + f *token.FileSet + maxBlanks int + issues []result.Issue +} + +func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor { + funcDecl, ok := node.(*ast.FuncDecl) + if !ok { + return v + } + if funcDecl.Body == nil { + return v + } + + for _, expr := range funcDecl.Body.List { + assgnStmt, ok := expr.(*ast.AssignStmt) + if !ok { + continue + } + + numBlank := 0 + for _, left := range assgnStmt.Lhs { + ident, ok := left.(*ast.Ident) + if !ok { + continue + } + if ident.Name == "_" { + numBlank++ + } + } + + if numBlank > v.maxBlanks { + v.issues = append(v.issues, result.Issue{ + FromLinter: dogsledLinterName, + Text: fmt.Sprintf("declaration has %v blank identifiers", numBlank), + Pos: v.f.Position(assgnStmt.Pos()), + }) + } + } + return v +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go new file mode 100644 index 000000000..ed1c4fcbd --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/dupl.go @@ -0,0 +1,83 @@ +package golinters + +import ( + "fmt" + "go/token" + "sync" + + duplAPI "github.com/golangci/dupl" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const duplLinterName = "dupl" + +func NewDupl() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: duplLinterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + duplLinterName, + "Tool for code clone detection", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var fileNames []string + for _, f := range pass.Files { + pos := pass.Fset.PositionFor(f.Pos(), false) + fileNames = append(fileNames, pos.Filename) + } + + issues, err := duplAPI.Run(fileNames, lintCtx.Settings().Dupl.Threshold) + if err != nil { + return nil, err + } + + if len(issues) == 0 { + return nil, nil + } + + res := make([]goanalysis.Issue, 0, len(issues)) + for _, i := range issues { + toFilename, err := fsutils.ShortestRelPath(i.To.Filename(), "") + if err != nil { + return nil, errors.Wrapf(err, "failed to get shortest rel path for %q", i.To.Filename()) + } + dupl := fmt.Sprintf("%s:%d-%d", toFilename, i.To.LineStart(), i.To.LineEnd()) + text := fmt.Sprintf("%d-%d lines are duplicate of %s", + i.From.LineStart(), i.From.LineEnd(), + formatCode(dupl, lintCtx.Cfg)) + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: token.Position{ + Filename: i.From.Filename(), + Line: i.From.LineStart(), + }, + LineRange: &result.Range{ + From: i.From.LineStart(), + To: i.From.LineEnd(), + }, + Text: text, + FromLinter: duplLinterName, + }, pass)) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go new file mode 100644 index 000000000..9c452af50 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/durationcheck.go @@ -0,0 +1,15 @@ +package golinters + +import ( + "github.com/charithe/durationcheck" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewDurationCheck() *goanalysis.Linter { + a := durationcheck.Analyzer + + return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, nil). + WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go new file mode 100644 index 000000000..2d9a4fc4c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errcheck.go @@ -0,0 +1,251 @@ +package golinters + +import ( + "bufio" + "fmt" + "os" + "os/user" + "path/filepath" + "regexp" + "strings" + "sync" + + "github.com/kisielk/errcheck/errcheck" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewErrcheck() *goanalysis.Linter { + const linterName = "errcheck" + + var mu sync.Mutex + var res []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + + return goanalysis.NewLinter( + linterName, + "Errcheck is a program for checking for unchecked errors "+ + "in go programs. These unchecked errors can be critical bugs in some cases", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + // copied from errcheck + checker, err := getChecker(&lintCtx.Settings().Errcheck) + if err != nil { + lintCtx.Log.Errorf("failed to get checker: %v", err) + return + } + + checker.Tags = lintCtx.Cfg.Run.BuildTags + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + pkg := &packages.Package{ + Fset: pass.Fset, + Syntax: pass.Files, + Types: pass.Pkg, + TypesInfo: pass.TypesInfo, + } + + errcheckIssues := checker.CheckPackage(pkg).Unique() + if len(errcheckIssues.UncheckedErrors) == 0 { + return nil, nil + } + + issues := make([]goanalysis.Issue, len(errcheckIssues.UncheckedErrors)) + for i, err := range errcheckIssues.UncheckedErrors { + var text string + if err.FuncName != "" { + text = fmt.Sprintf( + "Error return value of %s is not checked", + formatCode(err.SelectorName, lintCtx.Cfg), + ) + } else { + text = "Error return value is not checked" + } + + issues[i] = goanalysis.NewIssue( + &result.Issue{ + FromLinter: linterName, + Text: text, + Pos: err.Pos, + }, + pass, + ) + } + + mu.Lock() + res = append(res, issues...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return res + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} + +// parseIgnoreConfig was taken from errcheck in order to keep the API identical. +// https://github.com/kisielk/errcheck/blob/1787c4bee836470bf45018cfbc783650db3c6501/main.go#L25-L60 +func parseIgnoreConfig(s string) (map[string]*regexp.Regexp, error) { + if s == "" { + return nil, nil + } + + cfg := map[string]*regexp.Regexp{} + + for _, pair := range strings.Split(s, ",") { + colonIndex := strings.Index(pair, ":") + var pkg, re string + if colonIndex == -1 { + pkg = "" + re = pair + } else { + pkg = pair[:colonIndex] + re = pair[colonIndex+1:] + } + regex, err := regexp.Compile(re) + if err != nil { + return nil, err + } + cfg[pkg] = regex + } + + return cfg, nil +} + +func getChecker(errCfg *config.ErrcheckSettings) (*errcheck.Checker, error) { + ignoreConfig, err := parseIgnoreConfig(errCfg.Ignore) + if err != nil { + return nil, errors.Wrap(err, "failed to parse 'ignore' directive") + } + + checker := errcheck.Checker{ + Exclusions: errcheck.Exclusions{ + BlankAssignments: !errCfg.CheckAssignToBlank, + TypeAssertions: !errCfg.CheckTypeAssertions, + SymbolRegexpsByPackage: map[string]*regexp.Regexp{}, + Symbols: append([]string{}, errcheck.DefaultExcludedSymbols...), + }, + } + + for pkg, re := range ignoreConfig { + checker.Exclusions.SymbolRegexpsByPackage[pkg] = re + } + + if errCfg.Exclude != "" { + exclude, err := readExcludeFile(errCfg.Exclude) + if err != nil { + return nil, err + } + + checker.Exclusions.Symbols = append(checker.Exclusions.Symbols, exclude...) + } + + checker.Exclusions.Symbols = append(checker.Exclusions.Symbols, errCfg.ExcludeFunctions...) + + return &checker, nil +} + +func getFirstPathArg() string { + args := os.Args + + // skip all args ([golangci-lint, run/linters]) before files/dirs list + for len(args) != 0 { + if args[0] == "run" { + args = args[1:] + break + } + + args = args[1:] + } + + // find first file/dir arg + firstArg := "./..." + for _, arg := range args { + if !strings.HasPrefix(arg, "-") { + firstArg = arg + break + } + } + + return firstArg +} + +func setupConfigFileSearch(name string) []string { + if strings.HasPrefix(name, "~") { + if u, err := user.Current(); err == nil { + name = strings.Replace(name, "~", u.HomeDir, 1) + } + } + + if filepath.IsAbs(name) { + return []string{name} + } + + firstArg := getFirstPathArg() + + absStartPath, err := filepath.Abs(firstArg) + if err != nil { + absStartPath = filepath.Clean(firstArg) + } + + // start from it + var curDir string + if fsutils.IsDir(absStartPath) { + curDir = absStartPath + } else { + curDir = filepath.Dir(absStartPath) + } + + // find all dirs from it up to the root + configSearchPaths := []string{filepath.Join(".", name)} + for { + configSearchPaths = append(configSearchPaths, filepath.Join(curDir, name)) + newCurDir := filepath.Dir(curDir) + if curDir == newCurDir || newCurDir == "" { + break + } + curDir = newCurDir + } + + return configSearchPaths +} + +func readExcludeFile(name string) ([]string, error) { + var err error + var fh *os.File + + for _, path := range setupConfigFileSearch(name) { + if fh, err = os.Open(path); err == nil { + break + } + } + + if fh == nil { + return nil, errors.Wrapf(err, "failed reading exclude file: %s", name) + } + + scanner := bufio.NewScanner(fh) + + var excludes []string + for scanner.Scan() { + excludes = append(excludes, scanner.Text()) + } + + if err := scanner.Err(); err != nil { + return nil, errors.Wrapf(err, "failed scanning file: %s", name) + } + + return excludes, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go new file mode 100644 index 000000000..7ee811347 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errname.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/Antonboom/errname/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewErrName() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + analyzer.New(), + } + + return goanalysis.NewLinter( + "errname", + "Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go new file mode 100644 index 000000000..dd9d90161 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/errorlint.go @@ -0,0 +1,31 @@ +package golinters + +import ( + "github.com/polyfloyd/go-errorlint/errorlint" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewErrorLint(cfg *config.ErrorLintSettings) *goanalysis.Linter { + a := errorlint.NewAnalyzer() + + cfgMap := map[string]map[string]interface{}{} + + if cfg != nil { + cfgMap[a.Name] = map[string]interface{}{ + "errorf": cfg.Errorf, + "asserts": cfg.Asserts, + "comparison": cfg.Comparison, + } + } + + return goanalysis.NewLinter( + a.Name, + "errorlint is a linter for that can be used to find code "+ + "that will cause problems with the error wrapping scheme introduced in Go 1.13.", + []*analysis.Analyzer{a}, + cfgMap, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go new file mode 100644 index 000000000..9acee6a80 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustive.go @@ -0,0 +1,27 @@ +package golinters + +import ( + "github.com/nishanths/exhaustive" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewExhaustive(settings *config.ExhaustiveSettings) *goanalysis.Linter { + a := exhaustive.Analyzer + + var cfg map[string]map[string]interface{} + if settings != nil { + cfg = map[string]map[string]interface{}{ + a.Name: { + exhaustive.CheckGeneratedFlag: settings.CheckGenerated, + exhaustive.DefaultSignifiesExhaustiveFlag: settings.DefaultSignifiesExhaustive, + exhaustive.IgnorePatternFlag: settings.IgnorePattern, + }, + } + } + + return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, cfg). + WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go new file mode 100644 index 000000000..6a1dbd71c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exhaustivestruct.go @@ -0,0 +1,31 @@ +package golinters + +import ( + "strings" + + "github.com/mbilski/exhaustivestruct/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewExhaustiveStruct(settings *config.ExhaustiveStructSettings) *goanalysis.Linter { + a := analyzer.Analyzer + + var cfg map[string]map[string]interface{} + if settings != nil { + cfg = map[string]map[string]interface{}{ + a.Name: { + "struct_patterns": strings.Join(settings.StructPatterns, ","), + }, + } + } + + return goanalysis.NewLinter( + a.Name, + a.Doc, + []*analysis.Analyzer{a}, + cfg, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go new file mode 100644 index 000000000..1131c575b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/exportloopref.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/kyoh86/exportloopref" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewExportLoopRef() *goanalysis.Linter { + a := exportloopref.Analyzer + + return goanalysis.NewLinter( + a.Name, + a.Doc, + []*analysis.Analyzer{a}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go new file mode 100644 index 000000000..2fa9d5183 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forbidigo.go @@ -0,0 +1,70 @@ +package golinters + +import ( + "sync" + + "github.com/ashanbrown/forbidigo/forbidigo" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewForbidigo() *goanalysis.Linter { + const linterName = "forbidigo" + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + linterName, + "Forbids identifiers", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + s := &lintCtx.Settings().Forbidigo + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var res []goanalysis.Issue + options := []forbidigo.Option{ + forbidigo.OptionExcludeGodocExamples(s.ExcludeGodocExamples), + // disable "//permit" directives so only "//nolint" directives matters within golangci lint + forbidigo.OptionIgnorePermitDirectives(true), + } + forbid, err := forbidigo.NewLinter(s.Forbid, options...) + if err != nil { + return nil, errors.Wrapf(err, "failed to create linter %q", linterName) + } + + for _, file := range pass.Files { + hints, err := forbid.Run(pass.Fset, file) + if err != nil { + return nil, errors.Wrapf(err, "forbidigo linter failed on file %q", file.Name.String()) + } + for _, hint := range hints { + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: hint.Position(), + Text: hint.Details(), + FromLinter: linterName, + }, pass)) + } + } + + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go new file mode 100644 index 000000000..873c833b5 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/forcetypeassert.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/gostaticanalysis/forcetypeassert" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewForceTypeAssert() *goanalysis.Linter { + a := forcetypeassert.Analyzer + + return goanalysis.NewLinter( + a.Name, + "finds forced type assertions", + []*analysis.Analyzer{a}, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go new file mode 100644 index 000000000..29cb6b7ef --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/funlen.go @@ -0,0 +1,64 @@ +package golinters + +import ( + "go/token" + "strings" + "sync" + + "github.com/ultraware/funlen" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const funlenLinterName = "funlen" + +func NewFunlen() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: funlenLinterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + funlenLinterName, + "Tool for detection of long functions", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var issues []funlen.Message + for _, file := range pass.Files { + fileIssues := funlen.Run(file, pass.Fset, lintCtx.Settings().Funlen.Lines, lintCtx.Settings().Funlen.Statements) + issues = append(issues, fileIssues...) + } + + if len(issues) == 0 { + return nil, nil + } + + res := make([]goanalysis.Issue, len(issues)) + for k, i := range issues { + res[k] = goanalysis.NewIssue(&result.Issue{ + Pos: token.Position{ + Filename: i.Pos.Filename, + Line: i.Pos.Line, + }, + Text: strings.TrimRight(i.Message, "\n"), + FromLinter: funlenLinterName, + }, pass) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go new file mode 100644 index 000000000..9886fc5f2 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gci.go @@ -0,0 +1,96 @@ +package golinters + +import ( + "bytes" + "fmt" + "sync" + + "github.com/daixiang0/gci/pkg/gci" + "github.com/pkg/errors" + "github.com/shazow/go-diff/difflib" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" +) + +const gciName = "gci" + +func NewGci() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + differ := difflib.New() + + analyzer := &analysis.Analyzer{ + Name: gciName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + gciName, + "Gci control golang package import order and make it always deterministic.", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + localFlag := lintCtx.Settings().Gci.LocalPrefixes + goimportsFlag := lintCtx.Settings().Goimports.LocalPrefixes + if localFlag == "" && goimportsFlag != "" { + localFlag = goimportsFlag + } + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var fileNames []string + for _, f := range pass.Files { + pos := pass.Fset.PositionFor(f.Pos(), false) + fileNames = append(fileNames, pos.Filename) + } + + var issues []goanalysis.Issue + + flagSet := gci.FlagSet{ + LocalFlag: gci.ParseLocalFlag(localFlag), + } + + for _, f := range fileNames { + source, result, err := gci.Run(f, &flagSet) + if err != nil { + return nil, err + } + if result == nil { + continue + } + + diff := bytes.Buffer{} + _, err = diff.WriteString(fmt.Sprintf("--- %[1]s\n+++ %[1]s\n", f)) + if err != nil { + return nil, fmt.Errorf("can't write diff header: %v", err) + } + + err = differ.Diff(&diff, bytes.NewReader(source), bytes.NewReader(result)) + if err != nil { + return nil, fmt.Errorf("can't get gci diff output: %v", err) + } + + is, err := extractIssuesFromPatch(diff.String(), lintCtx.Log, lintCtx, gciName) + if err != nil { + return nil, errors.Wrapf(err, "can't extract issues from gci diff output %q", diff.String()) + } + + for i := range is { + issues = append(issues, goanalysis.NewIssue(&is[i], pass)) + } + } + + if len(issues) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, issues...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go new file mode 100644 index 000000000..b702d1660 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/adapters.go @@ -0,0 +1,36 @@ +package goanalysis + +import ( + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/loader" //nolint:staticcheck // it's an adapter for golang.org/x/tools/go/packages +) + +func MakeFakeLoaderProgram(pass *analysis.Pass) *loader.Program { + prog := &loader.Program{ + Fset: pass.Fset, + Created: []*loader.PackageInfo{ + { + Pkg: pass.Pkg, + Importable: true, // not used + TransitivelyErrorFree: true, // TODO + + Files: pass.Files, + Errors: nil, + Info: *pass.TypesInfo, + }, + }, + AllPackages: map[*types.Package]*loader.PackageInfo{ + pass.Pkg: { + Pkg: pass.Pkg, + Importable: true, + TransitivelyErrorFree: true, + Files: pass.Files, + Errors: nil, + Info: *pass.TypesInfo, + }, + }, + } + return prog +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go new file mode 100644 index 000000000..13b9ccf0a --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/errors.go @@ -0,0 +1,72 @@ +package goanalysis + +import ( + "fmt" + + "github.com/pkg/errors" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/pkg/lint/linter" + libpackages "github.com/golangci/golangci-lint/pkg/packages" + "github.com/golangci/golangci-lint/pkg/result" +) + +type IllTypedError struct { + Pkg *packages.Package +} + +func (e *IllTypedError) Error() string { + return fmt.Sprintf("errors in package: %v", e.Pkg.Errors) +} + +func buildIssuesFromIllTypedError(errs []error, lintCtx *linter.Context) ([]result.Issue, error) { + var issues []result.Issue + uniqReportedIssues := map[string]bool{} + + var other error + + for _, err := range errs { + err := err + + var ill *IllTypedError + if !errors.As(err, &ill) { + if other == nil { + other = err + } + continue + } + + for _, err := range libpackages.ExtractErrors(ill.Pkg) { + i, perr := parseError(err) + if perr != nil { // failed to parse + if uniqReportedIssues[err.Msg] { + continue + } + uniqReportedIssues[err.Msg] = true + lintCtx.Log.Errorf("typechecking error: %s", err.Msg) + } else { + i.Pkg = ill.Pkg // to save to cache later + issues = append(issues, *i) + } + } + } + + if len(issues) == 0 && other != nil { + return nil, other + } + + return issues, nil +} + +func parseError(srcErr packages.Error) (*result.Issue, error) { + pos, err := libpackages.ParseErrorPosition(srcErr.Pos) + if err != nil { + return nil, err + } + + return &result.Issue{ + Pos: *pos, + Text: srcErr.Msg, + FromLinter: "typecheck", + }, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go new file mode 100644 index 000000000..f331a3ab9 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/issue.go @@ -0,0 +1,31 @@ +package goanalysis + +import ( + "go/token" + + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/result" +) + +type Issue struct { + result.Issue + Pass *analysis.Pass +} + +func NewIssue(i *result.Issue, pass *analysis.Pass) Issue { + return Issue{ + Issue: *i, + Pass: pass, + } +} + +type EncodingIssue struct { + FromLinter string + Text string + Pos token.Position + LineRange *result.Range + Replacement *result.Replacement + ExpectNoLint bool + ExpectedNoLintLinter string +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go new file mode 100644 index 000000000..ef49e4284 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/linter.go @@ -0,0 +1,213 @@ +package goanalysis + +import ( + "context" + "flag" + "fmt" + "strings" + + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const ( + TheOnlyAnalyzerName = "the_only_name" + TheOnlyanalyzerDoc = "the_only_doc" +) + +type LoadMode int + +func (loadMode LoadMode) String() string { + switch loadMode { + case LoadModeNone: + return "none" + case LoadModeSyntax: + return "syntax" + case LoadModeTypesInfo: + return "types info" + case LoadModeWholeProgram: + return "whole program" + } + panic(fmt.Sprintf("unknown load mode %d", loadMode)) +} + +const ( + LoadModeNone LoadMode = iota + LoadModeSyntax + LoadModeTypesInfo + LoadModeWholeProgram +) + +type Linter struct { + name, desc string + analyzers []*analysis.Analyzer + cfg map[string]map[string]interface{} + issuesReporter func(*linter.Context) []Issue + contextSetter func(*linter.Context) + loadMode LoadMode + needUseOriginalPackages bool +} + +func NewLinter(name, desc string, analyzers []*analysis.Analyzer, cfg map[string]map[string]interface{}) *Linter { + return &Linter{name: name, desc: desc, analyzers: analyzers, cfg: cfg} +} + +func (lnt *Linter) Run(_ context.Context, lintCtx *linter.Context) ([]result.Issue, error) { + if err := lnt.preRun(lintCtx); err != nil { + return nil, err + } + + return runAnalyzers(lnt, lintCtx) +} + +func (lnt *Linter) UseOriginalPackages() { + lnt.needUseOriginalPackages = true +} + +func (lnt *Linter) LoadMode() LoadMode { + return lnt.loadMode +} + +func (lnt *Linter) WithLoadMode(loadMode LoadMode) *Linter { + lnt.loadMode = loadMode + return lnt +} + +func (lnt *Linter) WithIssuesReporter(r func(*linter.Context) []Issue) *Linter { + lnt.issuesReporter = r + return lnt +} + +func (lnt *Linter) WithContextSetter(cs func(*linter.Context)) *Linter { + lnt.contextSetter = cs + return lnt +} + +func (lnt *Linter) Name() string { + return lnt.name +} + +func (lnt *Linter) Desc() string { + return lnt.desc +} + +func (lnt *Linter) allAnalyzerNames() []string { + var ret []string + for _, a := range lnt.analyzers { + ret = append(ret, a.Name) + } + return ret +} + +func (lnt *Linter) configureAnalyzer(a *analysis.Analyzer, cfg map[string]interface{}) error { + for k, v := range cfg { + f := a.Flags.Lookup(k) + if f == nil { + validFlagNames := allFlagNames(&a.Flags) + if len(validFlagNames) == 0 { + return fmt.Errorf("analyzer doesn't have settings") + } + + return fmt.Errorf("analyzer doesn't have setting %q, valid settings: %v", + k, validFlagNames) + } + + if err := f.Value.Set(valueToString(v)); err != nil { + return errors.Wrapf(err, "failed to set analyzer setting %q with value %v", k, v) + } + } + + return nil +} + +func (lnt *Linter) configure() error { + analyzersMap := map[string]*analysis.Analyzer{} + for _, a := range lnt.analyzers { + analyzersMap[a.Name] = a + } + + for analyzerName, analyzerSettings := range lnt.cfg { + a := analyzersMap[analyzerName] + if a == nil { + return fmt.Errorf("settings key %q must be valid analyzer name, valid analyzers: %v", + analyzerName, lnt.allAnalyzerNames()) + } + + if err := lnt.configureAnalyzer(a, analyzerSettings); err != nil { + return errors.Wrapf(err, "failed to configure analyzer %s", analyzerName) + } + } + + return nil +} + +func (lnt *Linter) preRun(lintCtx *linter.Context) error { + if err := analysis.Validate(lnt.analyzers); err != nil { + return errors.Wrap(err, "failed to validate analyzers") + } + + if err := lnt.configure(); err != nil { + return errors.Wrap(err, "failed to configure analyzers") + } + + if lnt.contextSetter != nil { + lnt.contextSetter(lintCtx) + } + + return nil +} + +func (lnt *Linter) getName() string { + return lnt.name +} + +func (lnt *Linter) getLinterNameForDiagnostic(*Diagnostic) string { + return lnt.name +} + +func (lnt *Linter) getAnalyzers() []*analysis.Analyzer { + return lnt.analyzers +} + +func (lnt *Linter) useOriginalPackages() bool { + return lnt.needUseOriginalPackages +} + +func (lnt *Linter) reportIssues(lintCtx *linter.Context) []Issue { + if lnt.issuesReporter != nil { + return lnt.issuesReporter(lintCtx) + } + return nil +} + +func (lnt *Linter) getLoadMode() LoadMode { + return lnt.loadMode +} + +func allFlagNames(fs *flag.FlagSet) []string { + var ret []string + fs.VisitAll(func(f *flag.Flag) { + ret = append(ret, f.Name) + }) + return ret +} + +func valueToString(v interface{}) string { + if ss, ok := v.([]string); ok { + return strings.Join(ss, ",") + } + + if is, ok := v.([]interface{}); ok { + var ss []string + for _, i := range is { + ss = append(ss, fmt.Sprint(i)) + } + + return valueToString(ss) + } + + return fmt.Sprint(v) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load/guard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load/guard.go new file mode 100644 index 000000000..ab7775cc8 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load/guard.go @@ -0,0 +1,30 @@ +package load + +import ( + "sync" + + "golang.org/x/tools/go/packages" +) + +type Guard struct { + loadMutexes map[*packages.Package]*sync.Mutex + mutex sync.Mutex +} + +func NewGuard() *Guard { + return &Guard{ + loadMutexes: map[*packages.Package]*sync.Mutex{}, + } +} + +func (g *Guard) AddMutexForPkg(pkg *packages.Package) { + g.loadMutexes[pkg] = &sync.Mutex{} +} + +func (g *Guard) MutexForPkg(pkg *packages.Package) *sync.Mutex { + return g.loadMutexes[pkg] +} + +func (g *Guard) Mutex() *sync.Mutex { + return &g.mutex +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go new file mode 100644 index 000000000..5c24d1096 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/metalinter.go @@ -0,0 +1,90 @@ +package goanalysis + +import ( + "context" + + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +type MetaLinter struct { + linters []*Linter + analyzerToLinterName map[*analysis.Analyzer]string +} + +func NewMetaLinter(linters []*Linter) *MetaLinter { + ml := &MetaLinter{linters: linters} + ml.analyzerToLinterName = ml.getAnalyzerToLinterNameMapping() + return ml +} + +func (ml MetaLinter) Run(_ context.Context, lintCtx *linter.Context) ([]result.Issue, error) { + for _, l := range ml.linters { + if err := l.preRun(lintCtx); err != nil { + return nil, errors.Wrapf(err, "failed to pre-run %s", l.Name()) + } + } + + return runAnalyzers(ml, lintCtx) +} + +func (ml MetaLinter) Name() string { + return "goanalysis_metalinter" +} + +func (ml MetaLinter) Desc() string { + return "" +} + +func (ml MetaLinter) getLoadMode() LoadMode { + loadMode := LoadModeNone + for _, l := range ml.linters { + if l.loadMode > loadMode { + loadMode = l.loadMode + } + } + return loadMode +} + +func (ml MetaLinter) getAnalyzers() []*analysis.Analyzer { + var allAnalyzers []*analysis.Analyzer + for _, l := range ml.linters { + allAnalyzers = append(allAnalyzers, l.analyzers...) + } + return allAnalyzers +} + +func (ml MetaLinter) getName() string { + return "metalinter" +} + +func (ml MetaLinter) useOriginalPackages() bool { + return false // `unused` can't be run by this metalinter +} + +func (ml MetaLinter) reportIssues(lintCtx *linter.Context) []Issue { + var ret []Issue + for _, lnt := range ml.linters { + if lnt.issuesReporter != nil { + ret = append(ret, lnt.issuesReporter(lintCtx)...) + } + } + return ret +} + +func (ml MetaLinter) getLinterNameForDiagnostic(diag *Diagnostic) string { + return ml.analyzerToLinterName[diag.Analyzer] +} + +func (ml MetaLinter) getAnalyzerToLinterNameMapping() map[*analysis.Analyzer]string { + analyzerToLinterName := map[*analysis.Analyzer]string{} + for _, l := range ml.linters { + for _, a := range l.analyzers { + analyzerToLinterName[a] = l.Name() + } + } + return analyzerToLinterName +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go new file mode 100644 index 000000000..8b460d16b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner.go @@ -0,0 +1,342 @@ +// checker is a partial copy of https://github.com/golang/tools/blob/master/go/analysis/internal/checker +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package goanalysis defines the implementation of the checker commands. +// The same code drives the multi-analysis driver, the single-analysis +// driver that is conventionally provided for convenience along with +// each analysis package, and the test driver. +package goanalysis + +import ( + "encoding/gob" + "go/token" + "runtime" + "sort" + "sync" + + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/internal/errorutil" + "github.com/golangci/golangci-lint/internal/pkgcache" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/timeutils" +) + +var ( + debugf = logutils.Debug("goanalysis") + + analyzeDebugf = logutils.Debug("goanalysis/analyze") + isMemoryDebug = logutils.HaveDebugTag("goanalysis/memory") + issuesCacheDebugf = logutils.Debug("goanalysis/issues/cache") + + factsDebugf = logutils.Debug("goanalysis/facts") + factsCacheDebugf = logutils.Debug("goanalysis/facts/cache") + factsInheritDebugf = logutils.Debug("goanalysis/facts/inherit") + factsExportDebugf = logutils.Debug("goanalysis/facts") + isFactsExportDebug = logutils.HaveDebugTag("goanalysis/facts/export") +) + +type Diagnostic struct { + analysis.Diagnostic + Analyzer *analysis.Analyzer + Position token.Position + Pkg *packages.Package +} + +type runner struct { + log logutils.Log + prefix string // ensure unique analyzer names + pkgCache *pkgcache.Cache + loadGuard *load.Guard + loadMode LoadMode + passToPkg map[*analysis.Pass]*packages.Package + passToPkgGuard sync.Mutex + sw *timeutils.Stopwatch +} + +func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loadGuard *load.Guard, + loadMode LoadMode, sw *timeutils.Stopwatch) *runner { + return &runner{ + prefix: prefix, + log: logger, + pkgCache: pkgCache, + loadGuard: loadGuard, + loadMode: loadMode, + passToPkg: map[*analysis.Pass]*packages.Package{}, + sw: sw, + } +} + +// Run loads the packages specified by args using go/packages, +// then applies the specified analyzers to them. +// Analysis flags must already have been set. +// It provides most of the logic for the main functions of both the +// singlechecker and the multi-analysis commands. +// It returns the appropriate exit code. +func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages.Package) ([]Diagnostic, + []error, map[*analysis.Pass]*packages.Package) { + debugf("Analyzing %d packages on load mode %s", len(initialPackages), r.loadMode) + defer r.pkgCache.Trim() + + roots := r.analyze(initialPackages, analyzers) + + diags, errs := extractDiagnostics(roots) + + return diags, errs, r.passToPkg +} + +type actKey struct { + *analysis.Analyzer + *packages.Package +} + +func (r *runner) markAllActions(a *analysis.Analyzer, pkg *packages.Package, markedActions map[actKey]struct{}) { + k := actKey{a, pkg} + if _, ok := markedActions[k]; ok { + return + } + + for _, req := range a.Requires { + r.markAllActions(req, pkg, markedActions) + } + + if len(a.FactTypes) != 0 { + for path := range pkg.Imports { + r.markAllActions(a, pkg.Imports[path], markedActions) + } + } + + markedActions[k] = struct{}{} +} + +func (r *runner) makeAction(a *analysis.Analyzer, pkg *packages.Package, + initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) *action { + k := actKey{a, pkg} + act, ok := actions[k] + if ok { + return act + } + + act = actAlloc.alloc() + act.a = a + act.pkg = pkg + act.r = r + act.isInitialPkg = initialPkgs[pkg] + act.needAnalyzeSource = initialPkgs[pkg] + act.analysisDoneCh = make(chan struct{}) + + depsCount := len(a.Requires) + if len(a.FactTypes) > 0 { + depsCount += len(pkg.Imports) + } + act.deps = make([]*action, 0, depsCount) + + // Add a dependency on each required analyzers. + for _, req := range a.Requires { + act.deps = append(act.deps, r.makeAction(req, pkg, initialPkgs, actions, actAlloc)) + } + + r.buildActionFactDeps(act, a, pkg, initialPkgs, actions, actAlloc) + + actions[k] = act + + return act +} + +func (r *runner) buildActionFactDeps(act *action, a *analysis.Analyzer, pkg *packages.Package, + initialPkgs map[*packages.Package]bool, actions map[actKey]*action, actAlloc *actionAllocator) { + // An analysis that consumes/produces facts + // must run on the package's dependencies too. + if len(a.FactTypes) == 0 { + return + } + + act.objectFacts = make(map[objectFactKey]analysis.Fact) + act.packageFacts = make(map[packageFactKey]analysis.Fact) + + paths := make([]string, 0, len(pkg.Imports)) + for path := range pkg.Imports { + paths = append(paths, path) + } + sort.Strings(paths) // for determinism + for _, path := range paths { + dep := r.makeAction(a, pkg.Imports[path], initialPkgs, actions, actAlloc) + act.deps = append(act.deps, dep) + } + + // Need to register fact types for pkgcache proper gob encoding. + for _, f := range a.FactTypes { + gob.Register(f) + } +} + +//nolint:gocritic +func (r *runner) prepareAnalysis(pkgs []*packages.Package, + analyzers []*analysis.Analyzer) (map[*packages.Package]bool, []*action, []*action) { + // Construct the action graph. + + // Each graph node (action) is one unit of analysis. + // Edges express package-to-package (vertical) dependencies, + // and analysis-to-analysis (horizontal) dependencies. + + // This place is memory-intensive: e.g. Istio project has 120k total actions. + // Therefore optimize it carefully. + markedActions := make(map[actKey]struct{}, len(analyzers)*len(pkgs)) + for _, a := range analyzers { + for _, pkg := range pkgs { + r.markAllActions(a, pkg, markedActions) + } + } + totalActionsCount := len(markedActions) + + actions := make(map[actKey]*action, totalActionsCount) + actAlloc := newActionAllocator(totalActionsCount) + + initialPkgs := make(map[*packages.Package]bool, len(pkgs)) + for _, pkg := range pkgs { + initialPkgs[pkg] = true + } + + // Build nodes for initial packages. + roots := make([]*action, 0, len(pkgs)*len(analyzers)) + for _, a := range analyzers { + for _, pkg := range pkgs { + root := r.makeAction(a, pkg, initialPkgs, actions, actAlloc) + root.isroot = true + roots = append(roots, root) + } + } + + allActions := make([]*action, 0, len(actions)) + for _, act := range actions { + allActions = append(allActions, act) + } + + debugf("Built %d actions", len(actions)) + + return initialPkgs, allActions, roots +} + +func (r *runner) analyze(pkgs []*packages.Package, analyzers []*analysis.Analyzer) []*action { + initialPkgs, actions, rootActions := r.prepareAnalysis(pkgs, analyzers) + + actionPerPkg := map[*packages.Package][]*action{} + for _, act := range actions { + actionPerPkg[act.pkg] = append(actionPerPkg[act.pkg], act) + } + + // Fill Imports field. + loadingPackages := map[*packages.Package]*loadingPackage{} + var dfs func(pkg *packages.Package) + dfs = func(pkg *packages.Package) { + if loadingPackages[pkg] != nil { + return + } + + imports := map[string]*loadingPackage{} + for impPath, imp := range pkg.Imports { + dfs(imp) + impLp := loadingPackages[imp] + impLp.dependents++ + imports[impPath] = impLp + } + + loadingPackages[pkg] = &loadingPackage{ + pkg: pkg, + imports: imports, + isInitial: initialPkgs[pkg], + log: r.log, + actions: actionPerPkg[pkg], + loadGuard: r.loadGuard, + dependents: 1, // self dependent + } + } + for _, act := range actions { + dfs(act.pkg) + } + + // Limit memory and IO usage. + gomaxprocs := runtime.GOMAXPROCS(-1) + debugf("Analyzing at most %d packages in parallel", gomaxprocs) + loadSem := make(chan struct{}, gomaxprocs) + + var wg sync.WaitGroup + debugf("There are %d initial and %d total packages", len(initialPkgs), len(loadingPackages)) + for _, lp := range loadingPackages { + if lp.isInitial { + wg.Add(1) + go func(lp *loadingPackage) { + lp.analyzeRecursive(r.loadMode, loadSem) + wg.Done() + }(lp) + } + } + wg.Wait() + + return rootActions +} + +//nolint:nakedret +func extractDiagnostics(roots []*action) (retDiags []Diagnostic, retErrors []error) { + extracted := make(map[*action]bool) + var extract func(*action) + var visitAll func(actions []*action) + visitAll = func(actions []*action) { + for _, act := range actions { + if !extracted[act] { + extracted[act] = true + visitAll(act.deps) + extract(act) + } + } + } + + // De-duplicate diagnostics by position (not token.Pos) to + // avoid double-reporting in source files that belong to + // multiple packages, such as foo and foo.test. + type key struct { + token.Position + *analysis.Analyzer + message string + } + seen := make(map[key]bool) + + extract = func(act *action) { + if act.err != nil { + if pe, ok := act.err.(*errorutil.PanicError); ok { + panic(pe) + } + retErrors = append(retErrors, errors.Wrap(act.err, act.a.Name)) + return + } + + if act.isroot { + for _, diag := range act.diagnostics { + // We don't display a.Name/f.Category + // as most users don't care. + + posn := act.pkg.Fset.Position(diag.Pos) + k := key{posn, act.a, diag.Message} + if seen[k] { + continue // duplicate + } + seen[k] = true + + retDiag := Diagnostic{ + Diagnostic: diag, + Analyzer: act.a, + Position: posn, + Pkg: act.pkg, + } + retDiags = append(retDiags, retDiag) + } + } + } + visitAll(roots) + return +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go new file mode 100644 index 000000000..96c613e83 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_action.go @@ -0,0 +1,381 @@ +package goanalysis + +import ( + "fmt" + "go/types" + "reflect" + "runtime/debug" + "time" + + "github.com/hashicorp/go-multierror" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/types/objectpath" + + "github.com/golangci/golangci-lint/internal/errorutil" + "github.com/golangci/golangci-lint/internal/pkgcache" +) + +type actionAllocator struct { + allocatedActions []action + nextFreeIndex int +} + +func newActionAllocator(maxCount int) *actionAllocator { + return &actionAllocator{ + allocatedActions: make([]action, maxCount), + nextFreeIndex: 0, + } +} + +func (actAlloc *actionAllocator) alloc() *action { + if actAlloc.nextFreeIndex == len(actAlloc.allocatedActions) { + panic(fmt.Sprintf("Made too many allocations of actions: %d allowed", len(actAlloc.allocatedActions))) + } + act := &actAlloc.allocatedActions[actAlloc.nextFreeIndex] + actAlloc.nextFreeIndex++ + return act +} + +// An action represents one unit of analysis work: the application of +// one analysis to one package. Actions form a DAG, both within a +// package (as different analyzers are applied, either in sequence or +// parallel), and across packages (as dependencies are analyzed). +type action struct { + a *analysis.Analyzer + pkg *packages.Package + pass *analysis.Pass + deps []*action + objectFacts map[objectFactKey]analysis.Fact + packageFacts map[packageFactKey]analysis.Fact + result interface{} + diagnostics []analysis.Diagnostic + err error + r *runner + analysisDoneCh chan struct{} + loadCachedFactsDone bool + loadCachedFactsOk bool + isroot bool + isInitialPkg bool + needAnalyzeSource bool +} + +func (act *action) String() string { + return fmt.Sprintf("%s@%s", act.a, act.pkg) +} + +func (act *action) loadCachedFacts() bool { + if act.loadCachedFactsDone { // can't be set in parallel + return act.loadCachedFactsOk + } + + res := func() bool { + if act.isInitialPkg { + return true // load cached facts only for non-initial packages + } + + if len(act.a.FactTypes) == 0 { + return true // no need to load facts + } + + return act.loadPersistedFacts() + }() + act.loadCachedFactsDone = true + act.loadCachedFactsOk = res + return res +} + +func (act *action) waitUntilDependingAnalyzersWorked() { + for _, dep := range act.deps { + if dep.pkg == act.pkg { + <-dep.analysisDoneCh + } + } +} + +func (act *action) analyzeSafe() { + defer func() { + if p := recover(); p != nil { + act.err = errorutil.NewPanicError(fmt.Sprintf("%s: package %q (isInitialPkg: %t, needAnalyzeSource: %t): %s", + act.a.Name, act.pkg.Name, act.isInitialPkg, act.needAnalyzeSource, p), debug.Stack()) + } + }() + act.r.sw.TrackStage(act.a.Name, func() { + act.analyze() + }) +} + +func (act *action) analyze() { + defer close(act.analysisDoneCh) // unblock actions depending on this action + + if !act.needAnalyzeSource { + return + } + + defer func(now time.Time) { + analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now)) + }(time.Now()) + + // Report an error if any dependency failures. + var depErrors *multierror.Error + for _, dep := range act.deps { + if dep.err == nil { + continue + } + + depErrors = multierror.Append(depErrors, errors.Cause(dep.err)) + } + if depErrors != nil { + depErrors.ErrorFormat = func(e []error) string { + return fmt.Sprintf("failed prerequisites: %v", e) + } + + act.err = depErrors + return + } + + // Plumb the output values of the dependencies + // into the inputs of this action. Also facts. + inputs := make(map[*analysis.Analyzer]interface{}) + startedAt := time.Now() + for _, dep := range act.deps { + if dep.pkg == act.pkg { + // Same package, different analysis (horizontal edge): + // in-memory outputs of prerequisite analyzers + // become inputs to this analysis pass. + inputs[dep.a] = dep.result + } else if dep.a == act.a { // (always true) + // Same analysis, different package (vertical edge): + // serialized facts produced by prerequisite analysis + // become available to this analysis pass. + inheritFacts(act, dep) + } + } + factsDebugf("%s: Inherited facts in %s", act, time.Since(startedAt)) + + // Run the analysis. + pass := &analysis.Pass{ + Analyzer: act.a, + Fset: act.pkg.Fset, + Files: act.pkg.Syntax, + OtherFiles: act.pkg.OtherFiles, + Pkg: act.pkg.Types, + TypesInfo: act.pkg.TypesInfo, + TypesSizes: act.pkg.TypesSizes, + ResultOf: inputs, + Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, + ImportObjectFact: act.importObjectFact, + ExportObjectFact: act.exportObjectFact, + ImportPackageFact: act.importPackageFact, + ExportPackageFact: act.exportPackageFact, + AllObjectFacts: act.allObjectFacts, + AllPackageFacts: act.allPackageFacts, + } + act.pass = pass + act.r.passToPkgGuard.Lock() + act.r.passToPkg[pass] = act.pkg + act.r.passToPkgGuard.Unlock() + + if act.pkg.IllTyped { + // It looks like there should be !pass.Analyzer.RunDespiteErrors + // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here + // but it exit before it if packages.Load have failed. + act.err = errors.Wrap(&IllTypedError{Pkg: act.pkg}, "analysis skipped") + } else { + startedAt = time.Now() + act.result, act.err = pass.Analyzer.Run(pass) + analyzedIn := time.Since(startedAt) + if analyzedIn > time.Millisecond*10 { + debugf("%s: run analyzer in %s", act, analyzedIn) + } + } + + // disallow calls after Run + pass.ExportObjectFact = nil + pass.ExportPackageFact = nil + + if err := act.persistFactsToCache(); err != nil { + act.r.log.Warnf("Failed to persist facts to cache: %s", err) + } +} + +// importObjectFact implements Pass.ImportObjectFact. +// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, +// importObjectFact copies the fact value to *ptr. +func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool { + if obj == nil { + panic("nil object") + } + key := objectFactKey{obj, act.factType(ptr)} + if v, ok := act.objectFacts[key]; ok { + reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) + return true + } + return false +} + +// exportObjectFact implements Pass.ExportObjectFact. +func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) { + if obj.Pkg() != act.pkg.Types { + act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package", + act.a, act.pkg, obj, fact) + } + + key := objectFactKey{obj, act.factType(fact)} + act.objectFacts[key] = fact // clobber any existing entry + if isFactsExportDebug { + objstr := types.ObjectString(obj, (*types.Package).Name) + factsExportDebugf("%s: object %s has fact %s\n", + act.pkg.Fset.Position(obj.Pos()), objstr, fact) + } +} + +func (act *action) allObjectFacts() []analysis.ObjectFact { + out := make([]analysis.ObjectFact, 0, len(act.objectFacts)) + for key, fact := range act.objectFacts { + out = append(out, analysis.ObjectFact{ + Object: key.obj, + Fact: fact, + }) + } + return out +} + +// importPackageFact implements Pass.ImportPackageFact. +// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, +// fact copies the fact value to *ptr. +func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool { + if pkg == nil { + panic("nil package") + } + key := packageFactKey{pkg, act.factType(ptr)} + if v, ok := act.packageFacts[key]; ok { + reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) + return true + } + return false +} + +// exportPackageFact implements Pass.ExportPackageFact. +func (act *action) exportPackageFact(fact analysis.Fact) { + key := packageFactKey{act.pass.Pkg, act.factType(fact)} + act.packageFacts[key] = fact // clobber any existing entry + factsDebugf("%s: package %s has fact %s\n", + act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact) +} + +func (act *action) allPackageFacts() []analysis.PackageFact { + out := make([]analysis.PackageFact, 0, len(act.packageFacts)) + for key, fact := range act.packageFacts { + out = append(out, analysis.PackageFact{ + Package: key.pkg, + Fact: fact, + }) + } + return out +} + +func (act *action) factType(fact analysis.Fact) reflect.Type { + t := reflect.TypeOf(fact) + if t.Kind() != reflect.Ptr { + act.r.log.Fatalf("invalid Fact type: got %T, want pointer", t) + } + return t +} + +func (act *action) persistFactsToCache() error { + analyzer := act.a + if len(analyzer.FactTypes) == 0 { + return nil + } + + // Merge new facts into the package and persist them. + var facts []Fact + for key, fact := range act.packageFacts { + if key.pkg != act.pkg.Types { + // The fact is from inherited facts from another package + continue + } + facts = append(facts, Fact{ + Path: "", + Fact: fact, + }) + } + for key, fact := range act.objectFacts { + obj := key.obj + if obj.Pkg() != act.pkg.Types { + // The fact is from inherited facts from another package + continue + } + + path, err := objectpath.For(obj) + if err != nil { + // The object is not globally addressable + continue + } + + facts = append(facts, Fact{ + Path: string(path), + Fact: fact, + }) + } + + factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) + + key := fmt.Sprintf("%s/facts", analyzer.Name) + return act.r.pkgCache.Put(act.pkg, pkgcache.HashModeNeedAllDeps, key, facts) +} + +func (act *action) loadPersistedFacts() bool { + var facts []Fact + key := fmt.Sprintf("%s/facts", act.a.Name) + if err := act.r.pkgCache.Get(act.pkg, pkgcache.HashModeNeedAllDeps, key, &facts); err != nil { + if err != pkgcache.ErrMissing { + act.r.log.Warnf("Failed to get persisted facts: %s", err) + } + + factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name) + return false + } + + factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) + + for _, f := range facts { + if f.Path == "" { // this is a package fact + key := packageFactKey{act.pkg.Types, act.factType(f.Fact)} + act.packageFacts[key] = f.Fact + continue + } + obj, err := objectpath.Object(act.pkg.Types, objectpath.Path(f.Path)) + if err != nil { + // Be lenient about these errors. For example, when + // analyzing io/ioutil from source, we may get a fact + // for methods on the devNull type, and objectpath + // will happily create a path for them. However, when + // we later load io/ioutil from export data, the path + // no longer resolves. + // + // If an exported type embeds the unexported type, + // then (part of) the unexported type will become part + // of the type information and our path will resolve + // again. + continue + } + factKey := objectFactKey{obj, act.factType(f.Fact)} + act.objectFacts[factKey] = f.Fact + } + + return true +} + +func (act *action) markDepsForAnalyzingSource() { + // Horizontal deps (analyzer.Requires) must be loaded from source and analyzed before analyzing + // this action. + for _, dep := range act.deps { + if dep.pkg == act.pkg { + // Analyze source only for horizontal dependencies, e.g. from "buildssa". + dep.needAnalyzeSource = true // can't be set in parallel + } + } +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_facts.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_facts.go new file mode 100644 index 000000000..1d0fb974e --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_facts.go @@ -0,0 +1,125 @@ +package goanalysis + +import ( + "bytes" + "encoding/gob" + "fmt" + "go/types" + "reflect" + + "golang.org/x/tools/go/analysis" +) + +type objectFactKey struct { + obj types.Object + typ reflect.Type +} + +type packageFactKey struct { + pkg *types.Package + typ reflect.Type +} + +type Fact struct { + Path string // non-empty only for object facts + Fact analysis.Fact +} + +// inheritFacts populates act.facts with +// those it obtains from its dependency, dep. +func inheritFacts(act, dep *action) { + serialize := false + + for key, fact := range dep.objectFacts { + // Filter out facts related to objects + // that are irrelevant downstream + // (equivalently: not in the compiler export data). + if !exportedFrom(key.obj, dep.pkg.Types) { + factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact) + continue + } + + // Optionally serialize/deserialize fact + // to verify that it works across address spaces. + if serialize { + var err error + fact, err = codeFact(fact) + if err != nil { + act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) + } + } + + factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact) + act.objectFacts[key] = fact + } + + for key, fact := range dep.packageFacts { + // TODO: filter out facts that belong to + // packages not mentioned in the export data + // to prevent side channels. + + // Optionally serialize/deserialize fact + // to verify that it works across address spaces + // and is deterministic. + if serialize { + var err error + fact, err = codeFact(fact) + if err != nil { + act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) + } + } + + factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact) + act.packageFacts[key] = fact + } +} + +// codeFact encodes then decodes a fact, +// just to exercise that logic. +func codeFact(fact analysis.Fact) (analysis.Fact, error) { + // We encode facts one at a time. + // A real modular driver would emit all facts + // into one encoder to improve gob efficiency. + var buf bytes.Buffer + if err := gob.NewEncoder(&buf).Encode(fact); err != nil { + return nil, err + } + + // Encode it twice and assert that we get the same bits. + // This helps detect nondeterministic Gob encoding (e.g. of maps). + var buf2 bytes.Buffer + if err := gob.NewEncoder(&buf2).Encode(fact); err != nil { + return nil, err + } + if !bytes.Equal(buf.Bytes(), buf2.Bytes()) { + return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact) + } + + newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact) + if err := gob.NewDecoder(&buf).Decode(newFact); err != nil { + return nil, err + } + return newFact, nil +} + +// exportedFrom reports whether obj may be visible to a package that imports pkg. +// This includes not just the exported members of pkg, but also unexported +// constants, types, fields, and methods, perhaps belonging to other packages, +// that find there way into the API. +// This is an over-approximation of the more accurate approach used by +// gc export data, which walks the type graph, but it's much simpler. +// +// TODO(adonovan): do more accurate filtering by walking the type graph. +func exportedFrom(obj types.Object, pkg *types.Package) bool { + switch obj := obj.(type) { + case *types.Func: + return obj.Exported() && obj.Pkg() == pkg || + obj.Type().(*types.Signature).Recv() != nil + case *types.Var: + return obj.Exported() && obj.Pkg() == pkg || + obj.IsField() + case *types.TypeName, *types.Const: + return true + } + return false // Nil, Builtin, Label, or PkgName +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go new file mode 100644 index 000000000..9fa396854 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runner_loadingpackage.go @@ -0,0 +1,497 @@ +package goanalysis + +import ( + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/types" + "os" + "reflect" + "sync" + "sync/atomic" + + "github.com/pkg/errors" + "golang.org/x/tools/go/gcexportdata" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" + "github.com/golangci/golangci-lint/pkg/logutils" +) + +const unsafePkgName = "unsafe" + +type loadingPackage struct { + pkg *packages.Package + imports map[string]*loadingPackage + isInitial bool + log logutils.Log + actions []*action // all actions with this package + loadGuard *load.Guard + dependents int32 // number of depending on it packages + analyzeOnce sync.Once + decUseMutex sync.Mutex +} + +func (lp *loadingPackage) analyzeRecursive(loadMode LoadMode, loadSem chan struct{}) { + lp.analyzeOnce.Do(func() { + // Load the direct dependencies, in parallel. + var wg sync.WaitGroup + wg.Add(len(lp.imports)) + for _, imp := range lp.imports { + go func(imp *loadingPackage) { + imp.analyzeRecursive(loadMode, loadSem) + wg.Done() + }(imp) + } + wg.Wait() + lp.analyze(loadMode, loadSem) + }) +} + +func (lp *loadingPackage) analyze(loadMode LoadMode, loadSem chan struct{}) { + loadSem <- struct{}{} + defer func() { + <-loadSem + }() + + // Save memory on unused more fields. + defer lp.decUse(loadMode < LoadModeWholeProgram) + + if err := lp.loadWithFacts(loadMode); err != nil { + werr := errors.Wrapf(err, "failed to load package %s", lp.pkg.Name) + // Don't need to write error to errCh, it will be extracted and reported on another layer. + // Unblock depending actions and propagate error. + for _, act := range lp.actions { + close(act.analysisDoneCh) + act.err = werr + } + return + } + + var actsWg sync.WaitGroup + actsWg.Add(len(lp.actions)) + for _, act := range lp.actions { + go func(act *action) { + defer actsWg.Done() + + act.waitUntilDependingAnalyzersWorked() + + act.analyzeSafe() + }(act) + } + actsWg.Wait() +} + +func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error { + pkg := lp.pkg + + // Many packages have few files, much fewer than there + // are CPU cores. Additionally, parsing each individual file is + // very fast. A naive parallel implementation of this loop won't + // be faster, and tends to be slower due to extra scheduling, + // bookkeeping and potentially false sharing of cache lines. + pkg.Syntax = make([]*ast.File, 0, len(pkg.CompiledGoFiles)) + for _, file := range pkg.CompiledGoFiles { + f, err := parser.ParseFile(pkg.Fset, file, nil, parser.ParseComments) + if err != nil { + pkg.Errors = append(pkg.Errors, lp.convertError(err)...) + continue + } + pkg.Syntax = append(pkg.Syntax, f) + } + if len(pkg.Errors) != 0 { + pkg.IllTyped = true + return nil + } + + if loadMode == LoadModeSyntax { + return nil + } + + // Call NewPackage directly with explicit name. + // This avoids skew between golist and go/types when the files' + // package declarations are inconsistent. + // Subtle: we populate all Types fields with an empty Package + // before loading export data so that export data processing + // never has to create a types.Package for an indirect dependency, + // which would then require that such created packages be explicitly + // inserted back into the Import graph as a final step after export data loading. + pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) + + pkg.IllTyped = true + + pkg.TypesInfo = &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + + importer := func(path string) (*types.Package, error) { + if path == unsafePkgName { + return types.Unsafe, nil + } + if path == "C" { + // go/packages doesn't tell us that cgo preprocessing + // failed. When we subsequently try to parse the package, + // we'll encounter the raw C import. + return nil, errors.New("cgo preprocessing failed") + } + imp := pkg.Imports[path] + if imp == nil { + return nil, nil + } + if len(imp.Errors) > 0 { + return nil, imp.Errors[0] + } + return imp.Types, nil + } + tc := &types.Config{ + Importer: importerFunc(importer), + Error: func(err error) { + pkg.Errors = append(pkg.Errors, lp.convertError(err)...) + }, + } + _ = types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax) + // Don't handle error here: errors are adding by tc.Error function. + + illTyped := len(pkg.Errors) != 0 + if !illTyped { + for _, imp := range lp.imports { + if imp.pkg.IllTyped { + illTyped = true + break + } + } + } + pkg.IllTyped = illTyped + return nil +} + +func (lp *loadingPackage) loadFromExportData() error { + pkg := lp.pkg + + // Call NewPackage directly with explicit name. + // This avoids skew between golist and go/types when the files' + // package declarations are inconsistent. + // Subtle: we populate all Types fields with an empty Package + // before loading export data so that export data processing + // never has to create a types.Package for an indirect dependency, + // which would then require that such created packages be explicitly + // inserted back into the Import graph as a final step after export data loading. + pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) + + pkg.IllTyped = true + for path, pkg := range pkg.Imports { + if pkg.Types == nil { + return fmt.Errorf("dependency %q hasn't been loaded yet", path) + } + } + if pkg.ExportFile == "" { + return fmt.Errorf("no export data for %q", pkg.ID) + } + f, err := os.Open(pkg.ExportFile) + if err != nil { + return err + } + defer f.Close() + + r, err := gcexportdata.NewReader(f) + if err != nil { + return err + } + + view := make(map[string]*types.Package) // view seen by gcexportdata + seen := make(map[*packages.Package]bool) // all visited packages + var visit func(pkgs map[string]*packages.Package) + visit = func(pkgs map[string]*packages.Package) { + for _, pkg := range pkgs { + if !seen[pkg] { + seen[pkg] = true + view[pkg.PkgPath] = pkg.Types + visit(pkg.Imports) + } + } + } + visit(pkg.Imports) + tpkg, err := gcexportdata.Read(r, pkg.Fset, view, pkg.PkgPath) + if err != nil { + return err + } + pkg.Types = tpkg + pkg.IllTyped = false + return nil +} + +func (lp *loadingPackage) loadWithFacts(loadMode LoadMode) error { + pkg := lp.pkg + + if pkg.PkgPath == unsafePkgName { + // Fill in the blanks to avoid surprises. + pkg.Syntax = []*ast.File{} + if loadMode >= LoadModeTypesInfo { + pkg.Types = types.Unsafe + pkg.TypesInfo = new(types.Info) + } + return nil + } + + if pkg.TypesInfo != nil { + // Already loaded package, e.g. because another not go/analysis linter required types for deps. + // Try load cached facts for it. + + for _, act := range lp.actions { + if !act.loadCachedFacts() { + // Cached facts loading failed: analyze later the action from source. + act.needAnalyzeSource = true + factsCacheDebugf("Loading of facts for already loaded %s failed, analyze it from source later", act) + act.markDepsForAnalyzingSource() + } + } + return nil + } + + if lp.isInitial { + // No need to load cached facts: the package will be analyzed from source + // because it's the initial. + return lp.loadFromSource(loadMode) + } + + return lp.loadImportedPackageWithFacts(loadMode) +} + +func (lp *loadingPackage) loadImportedPackageWithFacts(loadMode LoadMode) error { + pkg := lp.pkg + + // Load package from export data + if loadMode >= LoadModeTypesInfo { + if err := lp.loadFromExportData(); err != nil { + // We asked Go to give us up to date export data, yet + // we can't load it. There must be something wrong. + // + // Attempt loading from source. This should fail (because + // otherwise there would be export data); we just want to + // get the compile errors. If loading from source succeeds + // we discard the result, anyway. Otherwise we'll fail + // when trying to reload from export data later. + + // Otherwise it panics because uses already existing (from exported data) types. + pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) + if srcErr := lp.loadFromSource(loadMode); srcErr != nil { + return srcErr + } + // Make sure this package can't be imported successfully + pkg.Errors = append(pkg.Errors, packages.Error{ + Pos: "-", + Msg: fmt.Sprintf("could not load export data: %s", err), + Kind: packages.ParseError, + }) + return errors.Wrap(err, "could not load export data") + } + } + + needLoadFromSource := false + for _, act := range lp.actions { + if act.loadCachedFacts() { + continue + } + + // Cached facts loading failed: analyze later the action from source. + factsCacheDebugf("Loading of facts for %s failed, analyze it from source later", act) + act.needAnalyzeSource = true // can't be set in parallel + needLoadFromSource = true + + act.markDepsForAnalyzingSource() + } + + if needLoadFromSource { + // Cached facts loading failed: analyze later the action from source. To perform + // the analysis we need to load the package from source code. + + // Otherwise it panics because uses already existing (from exported data) types. + if loadMode >= LoadModeTypesInfo { + pkg.Types = types.NewPackage(pkg.PkgPath, pkg.Name) + } + return lp.loadFromSource(loadMode) + } + + return nil +} + +func (lp *loadingPackage) decUse(canClearTypes bool) { + lp.decUseMutex.Lock() + defer lp.decUseMutex.Unlock() + + for _, act := range lp.actions { + pass := act.pass + if pass == nil { + continue + } + + pass.Files = nil + pass.TypesInfo = nil + pass.TypesSizes = nil + pass.ResultOf = nil + pass.Pkg = nil + pass.OtherFiles = nil + pass.AllObjectFacts = nil + pass.AllPackageFacts = nil + pass.ImportObjectFact = nil + pass.ExportObjectFact = nil + pass.ImportPackageFact = nil + pass.ExportPackageFact = nil + act.pass = nil + act.deps = nil + if act.result != nil { + if isMemoryDebug { + debugf("%s: decUse: nilling act result of size %d bytes", act, sizeOfValueTreeBytes(act.result)) + } + act.result = nil + } + } + + lp.pkg.Syntax = nil + lp.pkg.TypesInfo = nil + lp.pkg.TypesSizes = nil + + // Can't set lp.pkg.Imports to nil because of loadFromExportData.visit. + + dependents := atomic.AddInt32(&lp.dependents, -1) + if dependents != 0 { + return + } + + if canClearTypes { + // canClearTypes is set to true if we can discard type + // information after the package and its dependents have been + // processed. This is the case when no whole program checkers (unused) are + // being run. + lp.pkg.Types = nil + } + lp.pkg = nil + + for _, imp := range lp.imports { + imp.decUse(canClearTypes) + } + lp.imports = nil + + for _, act := range lp.actions { + if !lp.isInitial { + act.pkg = nil + } + act.packageFacts = nil + act.objectFacts = nil + } + lp.actions = nil +} + +func (lp *loadingPackage) convertError(err error) []packages.Error { + var errs []packages.Error + // taken from go/packages + switch err := err.(type) { + case packages.Error: + // from driver + errs = append(errs, err) + + case *os.PathError: + // from parser + errs = append(errs, packages.Error{ + Pos: err.Path + ":1", + Msg: err.Err.Error(), + Kind: packages.ParseError, + }) + + case scanner.ErrorList: + // from parser + for _, err := range err { + errs = append(errs, packages.Error{ + Pos: err.Pos.String(), + Msg: err.Msg, + Kind: packages.ParseError, + }) + } + + case types.Error: + // from type checker + errs = append(errs, packages.Error{ + Pos: err.Fset.Position(err.Pos).String(), + Msg: err.Msg, + Kind: packages.TypeError, + }) + + default: + // unexpected impoverished error from parser? + errs = append(errs, packages.Error{ + Pos: "-", + Msg: err.Error(), + Kind: packages.UnknownError, + }) + + // If you see this error message, please file a bug. + lp.log.Warnf("Internal error: error %q (%T) without position", err, err) + } + return errs +} + +func (lp *loadingPackage) String() string { + return fmt.Sprintf("%s@%s", lp.pkg.PkgPath, lp.pkg.Name) +} + +type importerFunc func(path string) (*types.Package, error) + +func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } + +func sizeOfValueTreeBytes(v interface{}) int { + return sizeOfReflectValueTreeBytes(reflect.ValueOf(v), map[uintptr]struct{}{}) +} + +func sizeOfReflectValueTreeBytes(rv reflect.Value, visitedPtrs map[uintptr]struct{}) int { + switch rv.Kind() { + case reflect.Ptr: + ptrSize := int(rv.Type().Size()) + if rv.IsNil() { + return ptrSize + } + ptr := rv.Pointer() + if _, ok := visitedPtrs[ptr]; ok { + return 0 + } + visitedPtrs[ptr] = struct{}{} + return ptrSize + sizeOfReflectValueTreeBytes(rv.Elem(), visitedPtrs) + case reflect.Interface: + if rv.IsNil() { + return 0 + } + return sizeOfReflectValueTreeBytes(rv.Elem(), visitedPtrs) + case reflect.Struct: + ret := 0 + for i := 0; i < rv.NumField(); i++ { + ret += sizeOfReflectValueTreeBytes(rv.Field(i), visitedPtrs) + } + return ret + case reflect.Slice, reflect.Array, reflect.Chan: + return int(rv.Type().Size()) + rv.Cap()*int(rv.Type().Elem().Size()) + case reflect.Map: + ret := 0 + for _, key := range rv.MapKeys() { + mv := rv.MapIndex(key) + ret += sizeOfReflectValueTreeBytes(key, visitedPtrs) + ret += sizeOfReflectValueTreeBytes(mv, visitedPtrs) + } + return ret + case reflect.String: + return rv.Len() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, + reflect.Uintptr, reflect.Bool, reflect.Float32, reflect.Float64, + reflect.Complex64, reflect.Complex128, reflect.Func, reflect.UnsafePointer: + return int(rv.Type().Size()) + case reflect.Invalid: + return 0 + default: + panic("unknown rv of type " + fmt.Sprint(rv)) + } +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go new file mode 100644 index 000000000..7e4cf902e --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goanalysis/runners.go @@ -0,0 +1,269 @@ +package goanalysis + +import ( + "fmt" + "runtime" + "sort" + "strings" + "sync" + "sync/atomic" + "time" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/internal/pkgcache" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" + "github.com/golangci/golangci-lint/pkg/timeutils" +) + +type runAnalyzersConfig interface { + getName() string + getLinterNameForDiagnostic(*Diagnostic) string + getAnalyzers() []*analysis.Analyzer + useOriginalPackages() bool + reportIssues(*linter.Context) []Issue + getLoadMode() LoadMode +} + +func runAnalyzers(cfg runAnalyzersConfig, lintCtx *linter.Context) ([]result.Issue, error) { + log := lintCtx.Log.Child("goanalysis") + sw := timeutils.NewStopwatch("analyzers", log) + + const stagesToPrint = 10 + defer sw.PrintTopStages(stagesToPrint) + + runner := newRunner(cfg.getName(), log, lintCtx.PkgCache, lintCtx.LoadGuard, cfg.getLoadMode(), sw) + + pkgs := lintCtx.Packages + if cfg.useOriginalPackages() { + pkgs = lintCtx.OriginalPackages + } + + issues, pkgsFromCache := loadIssuesFromCache(pkgs, lintCtx, cfg.getAnalyzers()) + var pkgsToAnalyze []*packages.Package + for _, pkg := range pkgs { + if !pkgsFromCache[pkg] { + pkgsToAnalyze = append(pkgsToAnalyze, pkg) + } + } + + diags, errs, passToPkg := runner.run(cfg.getAnalyzers(), pkgsToAnalyze) + + defer func() { + if len(errs) == 0 { + // If we try to save to cache even if we have compilation errors + // we won't see them on repeated runs. + saveIssuesToCache(pkgs, pkgsFromCache, issues, lintCtx, cfg.getAnalyzers()) + } + }() + + buildAllIssues := func() []result.Issue { + var retIssues []result.Issue + reportedIssues := cfg.reportIssues(lintCtx) + for i := range reportedIssues { + issue := &reportedIssues[i].Issue + if issue.Pkg == nil { + issue.Pkg = passToPkg[reportedIssues[i].Pass] + } + retIssues = append(retIssues, *issue) + } + retIssues = append(retIssues, buildIssues(diags, cfg.getLinterNameForDiagnostic)...) + return retIssues + } + + errIssues, err := buildIssuesFromIllTypedError(errs, lintCtx) + if err != nil { + return nil, err + } + + issues = append(issues, errIssues...) + issues = append(issues, buildAllIssues()...) + + return issues, nil +} + +func buildIssues(diags []Diagnostic, linterNameBuilder func(diag *Diagnostic) string) []result.Issue { + var issues []result.Issue + for i := range diags { + diag := &diags[i] + linterName := linterNameBuilder(diag) + + var text string + if diag.Analyzer.Name == linterName { + text = diag.Message + } else { + text = fmt.Sprintf("%s: %s", diag.Analyzer.Name, diag.Message) + } + + issues = append(issues, result.Issue{ + FromLinter: linterName, + Text: text, + Pos: diag.Position, + Pkg: diag.Pkg, + }) + + if len(diag.Related) > 0 { + for _, info := range diag.Related { + issues = append(issues, result.Issue{ + FromLinter: linterName, + Text: fmt.Sprintf("%s(related information): %s", diag.Analyzer.Name, info.Message), + Pos: diag.Pkg.Fset.Position(info.Pos), + Pkg: diag.Pkg, + }) + } + } + } + return issues +} + +func getIssuesCacheKey(analyzers []*analysis.Analyzer) string { + return "lint/result:" + analyzersHashID(analyzers) +} + +func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool, + issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer) { + startedAt := time.Now() + perPkgIssues := map[*packages.Package][]result.Issue{} + for ind := range issues { + i := &issues[ind] + perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i) + } + + savedIssuesCount := int32(0) + lintResKey := getIssuesCacheKey(analyzers) + + workerCount := runtime.GOMAXPROCS(-1) + var wg sync.WaitGroup + wg.Add(workerCount) + + pkgCh := make(chan *packages.Package, len(allPkgs)) + for i := 0; i < workerCount; i++ { + go func() { + defer wg.Done() + for pkg := range pkgCh { + pkgIssues := perPkgIssues[pkg] + encodedIssues := make([]EncodingIssue, 0, len(pkgIssues)) + for ind := range pkgIssues { + i := &pkgIssues[ind] + encodedIssues = append(encodedIssues, EncodingIssue{ + FromLinter: i.FromLinter, + Text: i.Text, + Pos: i.Pos, + LineRange: i.LineRange, + Replacement: i.Replacement, + ExpectNoLint: i.ExpectNoLint, + ExpectedNoLintLinter: i.ExpectedNoLintLinter, + }) + } + + atomic.AddInt32(&savedIssuesCount, int32(len(encodedIssues))) + if err := lintCtx.PkgCache.Put(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil { + lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err) + } else { + issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues)) + } + } + }() + } + + for _, pkg := range allPkgs { + if pkgsFromCache[pkg] { + continue + } + + pkgCh <- pkg + } + close(pkgCh) + wg.Wait() + + issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt)) +} + +//nolint:gocritic +func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context, + analyzers []*analysis.Analyzer) ([]result.Issue, map[*packages.Package]bool) { + startedAt := time.Now() + + lintResKey := getIssuesCacheKey(analyzers) + type cacheRes struct { + issues []result.Issue + loadErr error + } + pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs)) + for _, pkg := range pkgs { + pkgToCacheRes[pkg] = &cacheRes{} + } + + workerCount := runtime.GOMAXPROCS(-1) + var wg sync.WaitGroup + wg.Add(workerCount) + + pkgCh := make(chan *packages.Package, len(pkgs)) + for i := 0; i < workerCount; i++ { + go func() { + defer wg.Done() + for pkg := range pkgCh { + var pkgIssues []EncodingIssue + err := lintCtx.PkgCache.Get(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, &pkgIssues) + cacheRes := pkgToCacheRes[pkg] + cacheRes.loadErr = err + if err != nil { + continue + } + if len(pkgIssues) == 0 { + continue + } + + issues := make([]result.Issue, 0, len(pkgIssues)) + for _, i := range pkgIssues { + issues = append(issues, result.Issue{ + FromLinter: i.FromLinter, + Text: i.Text, + Pos: i.Pos, + LineRange: i.LineRange, + Replacement: i.Replacement, + Pkg: pkg, + ExpectNoLint: i.ExpectNoLint, + ExpectedNoLintLinter: i.ExpectedNoLintLinter, + }) + } + cacheRes.issues = issues + } + }() + } + + for _, pkg := range pkgs { + pkgCh <- pkg + } + close(pkgCh) + wg.Wait() + + loadedIssuesCount := 0 + var issues []result.Issue + pkgsFromCache := map[*packages.Package]bool{} + for pkg, cacheRes := range pkgToCacheRes { + if cacheRes.loadErr == nil { + loadedIssuesCount += len(cacheRes.issues) + pkgsFromCache[pkg] = true + issues = append(issues, cacheRes.issues...) + issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues)) + } else { + issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr) + } + } + issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages", + loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs)) + return issues, pkgsFromCache +} + +func analyzersHashID(analyzers []*analysis.Analyzer) string { + names := make([]string, 0, len(analyzers)) + for _, a := range analyzers { + names = append(names, a.Name) + } + + sort.Strings(names) + return strings.Join(names, ",") +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go new file mode 100644 index 000000000..804865cfc --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoglobals.go @@ -0,0 +1,29 @@ +package golinters + +import ( + "4d63.com/gochecknoglobals/checknoglobals" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewGochecknoglobals() *goanalysis.Linter { + gochecknoglobals := checknoglobals.Analyzer() + + // gochecknoglobals only lints test files if the `-t` flag is passed so we + // pass the `t` flag as true to the analyzer before running it. This can be + // turned of by using the regular golangci-lint flags such as `--tests` or + // `--skip-files`. + linterConfig := map[string]map[string]interface{}{ + gochecknoglobals.Name: { + "t": true, + }, + } + + return goanalysis.NewLinter( + gochecknoglobals.Name, + gochecknoglobals.Doc, + []*analysis.Analyzer{gochecknoglobals}, + linterConfig, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go new file mode 100644 index 000000000..f9715bda8 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecknoinits.go @@ -0,0 +1,73 @@ +package golinters + +import ( + "fmt" + "go/ast" + "go/token" + "sync" + + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const gochecknoinitsName = "gochecknoinits" + +func NewGochecknoinits() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: gochecknoinitsName, + Doc: goanalysis.TheOnlyanalyzerDoc, + Run: func(pass *analysis.Pass) (interface{}, error) { + var res []goanalysis.Issue + for _, file := range pass.Files { + fileIssues := checkFileForInits(file, pass.Fset) + for i := range fileIssues { + res = append(res, goanalysis.NewIssue(&fileIssues[i], pass)) + } + } + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + }, + } + return goanalysis.NewLinter( + gochecknoinitsName, + "Checks that no init functions are present in Go code", + []*analysis.Analyzer{analyzer}, + nil, + ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} + +func checkFileForInits(f *ast.File, fset *token.FileSet) []result.Issue { + var res []result.Issue + for _, decl := range f.Decls { + funcDecl, ok := decl.(*ast.FuncDecl) + if !ok { + continue + } + + name := funcDecl.Name.Name + if name == "init" && funcDecl.Recv.NumFields() == 0 { + res = append(res, result.Issue{ + Pos: fset.Position(funcDecl.Pos()), + Text: fmt.Sprintf("don't use %s function", formatCode(name, nil)), + FromLinter: gochecknoinitsName, + }) + } + } + + return res +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go new file mode 100644 index 000000000..eb42dd149 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocognit.go @@ -0,0 +1,68 @@ +package golinters + +import ( + "fmt" + "sort" + "sync" + + "github.com/uudashr/gocognit" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const gocognitName = "gocognit" + +func NewGocognit() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: goanalysis.TheOnlyAnalyzerName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + gocognitName, + "Computes and checks the cognitive complexity of functions", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var stats []gocognit.Stat + for _, f := range pass.Files { + stats = gocognit.ComplexityStats(f, pass.Fset, stats) + } + if len(stats) == 0 { + return nil, nil + } + + sort.SliceStable(stats, func(i, j int) bool { + return stats[i].Complexity > stats[j].Complexity + }) + + res := make([]goanalysis.Issue, 0, len(stats)) + for _, s := range stats { + if s.Complexity <= lintCtx.Settings().Gocognit.MinComplexity { + break // Break as the stats is already sorted from greatest to least + } + + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: s.Pos, + Text: fmt.Sprintf("cognitive complexity %d of func %s is high (> %d)", + s.Complexity, formatCode(s.FuncName, lintCtx.Cfg), lintCtx.Settings().Gocognit.MinComplexity), + FromLinter: gocognitName, + }, pass)) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go new file mode 100644 index 000000000..bdec4e10b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goconst.go @@ -0,0 +1,92 @@ +package golinters + +import ( + "fmt" + "sync" + + goconstAPI "github.com/jgautheron/goconst" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const goconstName = "goconst" + +func NewGoconst() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: goconstName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + goconstName, + "Finds repeated strings that could be replaced by a constant", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + issues, err := checkConstants(pass, lintCtx) + if err != nil || len(issues) == 0 { + return nil, err + } + + mu.Lock() + resIssues = append(resIssues, issues...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} + +func checkConstants(pass *analysis.Pass, lintCtx *linter.Context) ([]goanalysis.Issue, error) { + settings := lintCtx.Settings().Goconst + + cfg := goconstAPI.Config{ + IgnoreTests: settings.IgnoreTests, + MatchWithConstants: settings.MatchWithConstants, + MinStringLength: settings.MinStringLen, + MinOccurrences: settings.MinOccurrencesCount, + ParseNumbers: settings.ParseNumbers, + NumberMin: settings.NumberMin, + NumberMax: settings.NumberMax, + ExcludeTypes: map[goconstAPI.Type]bool{}, + } + + if settings.IgnoreCalls { + cfg.ExcludeTypes[goconstAPI.Call] = true + } + + goconstIssues, err := goconstAPI.Run(pass.Files, pass.Fset, &cfg) + if err != nil { + return nil, err + } + + if len(goconstIssues) == 0 { + return nil, nil + } + + res := make([]goanalysis.Issue, 0, len(goconstIssues)) + for _, i := range goconstIssues { + textBegin := fmt.Sprintf("string %s has %d occurrences", formatCode(i.Str, lintCtx.Cfg), i.OccurrencesCount) + var textEnd string + if i.MatchingConst == "" { + textEnd = ", make it a constant" + } else { + textEnd = fmt.Sprintf(", but such constant %s already exists", formatCode(i.MatchingConst, lintCtx.Cfg)) + } + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: i.Pos, + Text: textBegin + textEnd, + FromLinter: goconstName, + }, pass)) + } + + return res, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go new file mode 100644 index 000000000..e98f23445 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic.go @@ -0,0 +1,188 @@ +package golinters + +import ( + "fmt" + "go/ast" + "go/types" + "path/filepath" + "reflect" + "runtime" + "sort" + "strings" + "sync" + + gocriticlinter "github.com/go-critic/go-critic/framework/linter" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const gocriticName = "gocritic" + +func NewGocritic() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + sizes := types.SizesFor("gc", runtime.GOARCH) + + analyzer := &analysis.Analyzer{ + Name: gocriticName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + gocriticName, + `Provides many diagnostics that check for bugs, performance and style issues. +Extensible without recompilation through dynamic rules. +Dynamic rules are written declaratively with AST patterns, filters, report message and optional suggestion.`, + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + linterCtx := gocriticlinter.NewContext(pass.Fset, sizes) + enabledCheckers, err := buildEnabledCheckers(lintCtx, linterCtx) + if err != nil { + return nil, err + } + + linterCtx.SetPackageInfo(pass.TypesInfo, pass.Pkg) + var res []goanalysis.Issue + pkgIssues := runGocriticOnPackage(linterCtx, enabledCheckers, pass.Files) + for i := range pkgIssues { + res = append(res, goanalysis.NewIssue(&pkgIssues[i], pass)) + } + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} + +func normalizeCheckerInfoParams(info *gocriticlinter.CheckerInfo) gocriticlinter.CheckerParams { + // lowercase info param keys here because golangci-lint's config parser lowercases all strings + ret := gocriticlinter.CheckerParams{} + for k, v := range info.Params { + ret[strings.ToLower(k)] = v + } + + return ret +} + +func configureCheckerInfo(info *gocriticlinter.CheckerInfo, allParams map[string]config.GocriticCheckSettings) error { + params := allParams[strings.ToLower(info.Name)] + if params == nil { // no config for this checker + return nil + } + + infoParams := normalizeCheckerInfoParams(info) + for k, p := range params { + v, ok := infoParams[k] + if ok { + v.Value = normalizeCheckerParamsValue(p) + continue + } + + // param `k` isn't supported + if len(info.Params) == 0 { + return fmt.Errorf("checker %s config param %s doesn't exist: checker doesn't have params", + info.Name, k) + } + + var supportedKeys []string + for sk := range info.Params { + supportedKeys = append(supportedKeys, sk) + } + sort.Strings(supportedKeys) + + return fmt.Errorf("checker %s config param %s doesn't exist, all existing: %s", + info.Name, k, supportedKeys) + } + + return nil +} + +// normalizeCheckerParamsValue normalizes value types. +// go-critic asserts that CheckerParam.Value has some specific types, +// but the file parsers (TOML, YAML, JSON) don't create the same representation for raw type. +// then we have to convert value types into the expected value types. +// Maybe in the future, this kind of conversion will be done in go-critic itself. +//nolint:exhaustive // only 3 types (int, bool, and string) are supported by CheckerParam.Value +func normalizeCheckerParamsValue(p interface{}) interface{} { + rv := reflect.ValueOf(p) + switch rv.Type().Kind() { + case reflect.Int64, reflect.Int32, reflect.Int16, reflect.Int8, reflect.Int: + return int(rv.Int()) + case reflect.Bool: + return rv.Bool() + case reflect.String: + return rv.String() + default: + return p + } +} + +func buildEnabledCheckers(lintCtx *linter.Context, linterCtx *gocriticlinter.Context) ([]*gocriticlinter.Checker, error) { + s := lintCtx.Settings().Gocritic + allParams := s.GetLowercasedParams() + + var enabledCheckers []*gocriticlinter.Checker + for _, info := range gocriticlinter.GetCheckersInfo() { + if !s.IsCheckEnabled(info.Name) { + continue + } + + if err := configureCheckerInfo(info, allParams); err != nil { + return nil, err + } + + c, err := gocriticlinter.NewChecker(linterCtx, info) + if err != nil { + return nil, err + } + enabledCheckers = append(enabledCheckers, c) + } + + return enabledCheckers, nil +} + +func runGocriticOnPackage(linterCtx *gocriticlinter.Context, checkers []*gocriticlinter.Checker, + files []*ast.File) []result.Issue { + var res []result.Issue + for _, f := range files { + filename := filepath.Base(linterCtx.FileSet.Position(f.Pos()).Filename) + linterCtx.SetFileInfo(filename, f) + + issues := runGocriticOnFile(linterCtx, f, checkers) + res = append(res, issues...) + } + return res +} + +func runGocriticOnFile(ctx *gocriticlinter.Context, f *ast.File, checkers []*gocriticlinter.Checker) []result.Issue { + var res []result.Issue + + for _, c := range checkers { + // All checkers are expected to use *lint.Context + // as read-only structure, so no copying is required. + for _, warn := range c.Check(f) { + pos := ctx.FileSet.Position(warn.Node.Pos()) + res = append(res, result.Issue{ + Pos: pos, + Text: fmt.Sprintf("%s: %s", c.Info.Name, warn.Text), + FromLinter: gocriticName, + }) + } + } + + return res +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go new file mode 100644 index 000000000..5c61fec72 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocyclo.go @@ -0,0 +1,61 @@ +package golinters + +import ( + "fmt" + "sync" + + "github.com/fzipp/gocyclo" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const gocycloName = "gocyclo" + +func NewGocyclo() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: gocycloName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + gocycloName, + "Computes and checks the cyclomatic complexity of functions", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var stats gocyclo.Stats + for _, f := range pass.Files { + stats = gocyclo.AnalyzeASTFile(f, pass.Fset, stats) + } + if len(stats) == 0 { + return nil, nil + } + + stats = stats.SortAndFilter(-1, lintCtx.Settings().Gocyclo.MinComplexity) + + res := make([]goanalysis.Issue, 0, len(stats)) + for _, s := range stats { + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: s.Pos, + Text: fmt.Sprintf("cyclomatic complexity %d of func %s is high (> %d)", + s.Complexity, formatCode(s.FuncName, lintCtx.Cfg), lintCtx.Settings().Gocyclo.MinComplexity), + FromLinter: gocycloName, + }, pass)) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go new file mode 100644 index 000000000..625245890 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godot.go @@ -0,0 +1,84 @@ +package golinters + +import ( + "sync" + + "github.com/tetafro/godot" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const godotName = "godot" + +func NewGodot() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: godotName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + godotName, + "Check if comments end in a period", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + cfg := lintCtx.Cfg.LintersSettings.Godot + settings := godot.Settings{ + Scope: godot.Scope(cfg.Scope), + Exclude: cfg.Exclude, + Period: true, + Capital: cfg.Capital, + } + + // Convert deprecated setting + if cfg.CheckAll { // nolint: staticcheck + settings.Scope = godot.TopLevelScope + } + + if settings.Scope == "" { + settings.Scope = godot.DeclScope + } + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var issues []godot.Issue + for _, file := range pass.Files { + iss, err := godot.Run(file, pass.Fset, settings) + if err != nil { + return nil, err + } + issues = append(issues, iss...) + } + + if len(issues) == 0 { + return nil, nil + } + + res := make([]goanalysis.Issue, len(issues)) + for k, i := range issues { + issue := result.Issue{ + Pos: i.Pos, + Text: i.Message, + FromLinter: godotName, + Replacement: &result.Replacement{ + NewLines: []string{i.Replacement}, + }, + } + + res[k] = goanalysis.NewIssue(&issue, pass) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go new file mode 100644 index 000000000..2a4dd9faf --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/godox.go @@ -0,0 +1,63 @@ +package golinters + +import ( + "go/token" + "strings" + "sync" + + "github.com/matoous/godox" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const godoxName = "godox" + +func NewGodox() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: godoxName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + godoxName, + "Tool for detection of FIXME, TODO and other comment keywords", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var issues []godox.Message + for _, file := range pass.Files { + issues = append(issues, godox.Run(file, pass.Fset, lintCtx.Settings().Godox.Keywords...)...) + } + + if len(issues) == 0 { + return nil, nil + } + + res := make([]goanalysis.Issue, len(issues)) + for k, i := range issues { + res[k] = goanalysis.NewIssue(&result.Issue{ + Pos: token.Position{ + Filename: i.Pos.Filename, + Line: i.Pos.Line, + }, + Text: strings.TrimRight(i.Message, "\n"), + FromLinter: godoxName, + }, pass) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go new file mode 100644 index 000000000..0c10005a0 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goerr113.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/Djarvur/go-err113" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewGoerr113() *goanalysis.Linter { + return goanalysis.NewLinter( + "goerr113", + "Golang linter to check the errors handling expressions", + []*analysis.Analyzer{ + err113.NewAnalyzer(), + }, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go new file mode 100644 index 000000000..aa340dcf3 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt.go @@ -0,0 +1,72 @@ +package golinters + +import ( + "sync" + + gofmtAPI "github.com/golangci/gofmt/gofmt" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" +) + +const gofmtName = "gofmt" + +func NewGofmt() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: gofmtName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + gofmtName, + "Gofmt checks whether code was gofmt-ed. By default "+ + "this tool runs with -s option to check for code simplification", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var fileNames []string + for _, f := range pass.Files { + pos := pass.Fset.PositionFor(f.Pos(), false) + fileNames = append(fileNames, pos.Filename) + } + + var issues []goanalysis.Issue + + for _, f := range fileNames { + diff, err := gofmtAPI.Run(f, lintCtx.Settings().Gofmt.Simplify) + if err != nil { // TODO: skip + return nil, err + } + if diff == nil { + continue + } + + is, err := extractIssuesFromPatch(string(diff), lintCtx.Log, lintCtx, gofmtName) + if err != nil { + return nil, errors.Wrapf(err, "can't extract issues from gofmt diff output %q", string(diff)) + } + + for i := range is { + issues = append(issues, goanalysis.NewIssue(&is[i], pass)) + } + } + + if len(issues) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, issues...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go new file mode 100644 index 000000000..39e8092e9 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofmt_common.go @@ -0,0 +1,286 @@ +package golinters + +import ( + "bytes" + "fmt" + "go/token" + "strings" + + "github.com/pkg/errors" + diffpkg "github.com/sourcegraph/go-diff/diff" + + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type Change struct { + LineRange result.Range + Replacement result.Replacement +} + +type diffLineType string + +const ( + diffLineAdded diffLineType = "added" + diffLineOriginal diffLineType = "original" + diffLineDeleted diffLineType = "deleted" +) + +type diffLine struct { + originalNumber int // 1-based original line number + typ diffLineType + data string // "+" or "-" stripped line +} + +type hunkChangesParser struct { + // needed because we merge currently added lines with the last original line + lastOriginalLine *diffLine + + // if the first line of diff is an adding we save all additions to replacementLinesToPrepend + replacementLinesToPrepend []string + + log logutils.Log + + lines []diffLine + + ret []Change +} + +func (p *hunkChangesParser) parseDiffLines(h *diffpkg.Hunk) { + lines := bytes.Split(h.Body, []byte{'\n'}) + currentOriginalLineNumer := int(h.OrigStartLine) + var ret []diffLine + + for i, line := range lines { + dl := diffLine{ + originalNumber: currentOriginalLineNumer, + } + + lineStr := string(line) + + if strings.HasPrefix(lineStr, "-") { + dl.typ = diffLineDeleted + dl.data = strings.TrimPrefix(lineStr, "-") + currentOriginalLineNumer++ + } else if strings.HasPrefix(lineStr, "+") { + dl.typ = diffLineAdded + dl.data = strings.TrimPrefix(lineStr, "+") + } else { + if i == len(lines)-1 && lineStr == "" { + // handle last \n: don't add an empty original line + break + } + + dl.typ = diffLineOriginal + dl.data = strings.TrimPrefix(lineStr, " ") + currentOriginalLineNumer++ + } + + ret = append(ret, dl) + } + + p.lines = ret +} + +func (p *hunkChangesParser) handleOriginalLine(line diffLine, i *int) { + if len(p.replacementLinesToPrepend) == 0 { + p.lastOriginalLine = &line + *i++ + return + } + + // check following added lines for the case: + // + added line 1 + // original line + // + added line 2 + + *i++ + var followingAddedLines []string + for ; *i < len(p.lines) && p.lines[*i].typ == diffLineAdded; *i++ { + followingAddedLines = append(followingAddedLines, p.lines[*i].data) + } + + p.ret = append(p.ret, Change{ + LineRange: result.Range{ + From: line.originalNumber, + To: line.originalNumber, + }, + Replacement: result.Replacement{ + NewLines: append(p.replacementLinesToPrepend, append([]string{line.data}, followingAddedLines...)...), + }, + }) + p.replacementLinesToPrepend = nil + p.lastOriginalLine = &line +} + +func (p *hunkChangesParser) handleDeletedLines(deletedLines []diffLine, addedLines []string) { + change := Change{ + LineRange: result.Range{ + From: deletedLines[0].originalNumber, + To: deletedLines[len(deletedLines)-1].originalNumber, + }, + } + + if len(addedLines) != 0 { + //nolint:gocritic + change.Replacement.NewLines = append(p.replacementLinesToPrepend, addedLines...) + if len(p.replacementLinesToPrepend) != 0 { + p.replacementLinesToPrepend = nil + } + + p.ret = append(p.ret, change) + return + } + + // delete-only change with possible prepending + if len(p.replacementLinesToPrepend) != 0 { + change.Replacement.NewLines = p.replacementLinesToPrepend + p.replacementLinesToPrepend = nil + } else { + change.Replacement.NeedOnlyDelete = true + } + + p.ret = append(p.ret, change) +} + +func (p *hunkChangesParser) handleAddedOnlyLines(addedLines []string) { + if p.lastOriginalLine == nil { + // the first line is added; the diff looks like: + // 1. + ... + // 2. - ... + // or + // 1. + ... + // 2. ... + + p.replacementLinesToPrepend = addedLines + return + } + + // add-only change merged into the last original line with possible prepending + p.ret = append(p.ret, Change{ + LineRange: result.Range{ + From: p.lastOriginalLine.originalNumber, + To: p.lastOriginalLine.originalNumber, + }, + Replacement: result.Replacement{ + NewLines: append(p.replacementLinesToPrepend, append([]string{p.lastOriginalLine.data}, addedLines...)...), + }, + }) + p.replacementLinesToPrepend = nil +} + +func (p *hunkChangesParser) parse(h *diffpkg.Hunk) []Change { + p.parseDiffLines(h) + + for i := 0; i < len(p.lines); { + line := p.lines[i] + if line.typ == diffLineOriginal { + p.handleOriginalLine(line, &i) + continue + } + + var deletedLines []diffLine + for ; i < len(p.lines) && p.lines[i].typ == diffLineDeleted; i++ { + deletedLines = append(deletedLines, p.lines[i]) + } + + var addedLines []string + for ; i < len(p.lines) && p.lines[i].typ == diffLineAdded; i++ { + addedLines = append(addedLines, p.lines[i].data) + } + + if len(deletedLines) != 0 { + p.handleDeletedLines(deletedLines, addedLines) + continue + } + + // no deletions, only additions + p.handleAddedOnlyLines(addedLines) + } + + if len(p.replacementLinesToPrepend) != 0 { + p.log.Infof("The diff contains only additions: no original or deleted lines: %#v", p.lines) + return nil + } + + return p.ret +} + +func getErrorTextForLinter(lintCtx *linter.Context, linterName string) string { + text := "File is not formatted" + switch linterName { + case gofumptName: + text = "File is not `gofumpt`-ed" + if lintCtx.Settings().Gofumpt.ExtraRules { + text += " with `-extra`" + } + case gofmtName: + text = "File is not `gofmt`-ed" + if lintCtx.Settings().Gofmt.Simplify { + text += " with `-s`" + } + case goimportsName: + text = "File is not `goimports`-ed" + if lintCtx.Settings().Goimports.LocalPrefixes != "" { + text += " with -local " + lintCtx.Settings().Goimports.LocalPrefixes + } + case gciName: + text = "File is not `gci`-ed" + localPrefixes := lintCtx.Settings().Gci.LocalPrefixes + goimportsFlag := lintCtx.Settings().Goimports.LocalPrefixes + if localPrefixes == "" && goimportsFlag != "" { + localPrefixes = goimportsFlag + } + + if localPrefixes != "" { + text += " with -local " + localPrefixes + } + } + return text +} + +func extractIssuesFromPatch(patch string, log logutils.Log, lintCtx *linter.Context, linterName string) ([]result.Issue, error) { + diffs, err := diffpkg.ParseMultiFileDiff([]byte(patch)) + if err != nil { + return nil, errors.Wrap(err, "can't parse patch") + } + + if len(diffs) == 0 { + return nil, fmt.Errorf("got no diffs from patch parser: %v", diffs) + } + + issues := []result.Issue{} + for _, d := range diffs { + if len(d.Hunks) == 0 { + log.Warnf("Got no hunks in diff %+v", d) + continue + } + + for _, hunk := range d.Hunks { + p := hunkChangesParser{ + log: log, + } + changes := p.parse(hunk) + for _, change := range changes { + change := change // fix scope + i := result.Issue{ + FromLinter: linterName, + Pos: token.Position{ + Filename: d.NewName, + Line: change.LineRange.From, + }, + Text: getErrorTextForLinter(lintCtx, linterName), + Replacement: &change.Replacement, + } + if change.LineRange.From != change.LineRange.To { + i.LineRange = &change.LineRange + } + + issues = append(issues, i) + } + } + } + + return issues, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go new file mode 100644 index 000000000..75c088144 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gofumpt.go @@ -0,0 +1,108 @@ +package golinters + +import ( + "bytes" + "fmt" + "io/ioutil" + "sync" + + "github.com/pkg/errors" + "github.com/shazow/go-diff/difflib" + "golang.org/x/tools/go/analysis" + "mvdan.cc/gofumpt/format" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" +) + +const gofumptName = "gofumpt" + +func NewGofumpt() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + differ := difflib.New() + + analyzer := &analysis.Analyzer{ + Name: gofumptName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + gofumptName, + "Gofumpt checks whether code was gofumpt-ed.", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + settings := lintCtx.Settings().Gofumpt + + options := format.Options{ + LangVersion: getLangVersion(settings), + ExtraRules: settings.ExtraRules, + } + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var fileNames []string + for _, f := range pass.Files { + pos := pass.Fset.PositionFor(f.Pos(), false) + fileNames = append(fileNames, pos.Filename) + } + + var issues []goanalysis.Issue + + for _, f := range fileNames { + input, err := ioutil.ReadFile(f) + if err != nil { + return nil, fmt.Errorf("unable to open file %s: %w", f, err) + } + + output, err := format.Source(input, options) + if err != nil { + return nil, fmt.Errorf("error while running gofumpt: %w", err) + } + + if !bytes.Equal(input, output) { + out := bytes.Buffer{} + _, err = out.WriteString(fmt.Sprintf("--- %[1]s\n+++ %[1]s\n", f)) + if err != nil { + return nil, fmt.Errorf("error while running gofumpt: %w", err) + } + + err = differ.Diff(&out, bytes.NewReader(input), bytes.NewReader(output)) + if err != nil { + return nil, fmt.Errorf("error while running gofumpt: %w", err) + } + + diff := out.String() + is, err := extractIssuesFromPatch(diff, lintCtx.Log, lintCtx, gofumptName) + if err != nil { + return nil, errors.Wrapf(err, "can't extract issues from gofumpt diff output %q", diff) + } + + for i := range is { + issues = append(issues, goanalysis.NewIssue(&is[i], pass)) + } + } + } + + if len(issues) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, issues...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} + +func getLangVersion(settings config.GofumptSettings) string { + if settings.LangVersion == "" { + // TODO: defaults to "1.15", in the future (v2) must be set by using build.Default.ReleaseTags like staticcheck. + return "1.15" + } + return settings.LangVersion +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go new file mode 100644 index 000000000..2ff587b0d --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader.go @@ -0,0 +1,85 @@ +package golinters + +import ( + "go/token" + "sync" + + goheader "github.com/denis-tingajkin/go-header" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const goHeaderName = "goheader" + +func NewGoHeader() *goanalysis.Linter { + var mu sync.Mutex + var issues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: goHeaderName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + goHeaderName, + "Checks is file header matches to pattern", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + cfg := lintCtx.Cfg.LintersSettings.Goheader + c := &goheader.Configuration{ + Values: cfg.Values, + Template: cfg.Template, + TemplatePath: cfg.TemplatePath, + } + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + if c.TemplatePath == "" && c.Template == "" { + // User did not pass template, so then do not run go-header linter + return nil, nil + } + template, err := c.GetTemplate() + if err != nil { + return nil, err + } + values, err := c.GetValues() + if err != nil { + return nil, err + } + a := goheader.New(goheader.WithTemplate(template), goheader.WithValues(values)) + var res []goanalysis.Issue + for _, file := range pass.Files { + path := pass.Fset.Position(file.Pos()).Filename + i := a.Analyze(&goheader.Target{ + File: file, + Path: path, + }) + if i == nil { + continue + } + issue := result.Issue{ + Pos: token.Position{ + Line: i.Location().Line + 1, + Column: i.Location().Position, + Filename: path, + }, + Text: i.Message(), + FromLinter: goHeaderName, + } + res = append(res, goanalysis.NewIssue(&issue, pass)) + } + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + issues = append(issues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return issues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go new file mode 100644 index 000000000..b58af1967 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goimports.go @@ -0,0 +1,73 @@ +package golinters + +import ( + "sync" + + goimportsAPI "github.com/golangci/gofmt/goimports" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/imports" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" +) + +const goimportsName = "goimports" + +func NewGoimports() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: goimportsName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + goimportsName, + "In addition to fixing imports, goimports also formats your code in the same style as gofmt.", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + imports.LocalPrefix = lintCtx.Settings().Goimports.LocalPrefixes + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var fileNames []string + for _, f := range pass.Files { + pos := pass.Fset.PositionFor(f.Pos(), false) + fileNames = append(fileNames, pos.Filename) + } + + var issues []goanalysis.Issue + + for _, f := range fileNames { + diff, err := goimportsAPI.Run(f) + if err != nil { // TODO: skip + return nil, err + } + if diff == nil { + continue + } + + is, err := extractIssuesFromPatch(string(diff), lintCtx.Log, lintCtx, goimportsName) + if err != nil { + return nil, errors.Wrapf(err, "can't extract issues from gofmt diff output %q", string(diff)) + } + + for i := range is { + issues = append(issues, goanalysis.NewIssue(&is[i], pass)) + } + } + + if len(issues) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, issues...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go new file mode 100644 index 000000000..3b1b1b66f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/golint.go @@ -0,0 +1,78 @@ +package golinters + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "sync" + + lintAPI "github.com/golangci/lint-1" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func golintProcessPkg(minConfidence float64, files []*ast.File, fset *token.FileSet, + typesPkg *types.Package, typesInfo *types.Info) ([]result.Issue, error) { + l := new(lintAPI.Linter) + ps, err := l.LintPkg(files, fset, typesPkg, typesInfo) + if err != nil { + return nil, fmt.Errorf("can't lint %d files: %s", len(files), err) + } + + if len(ps) == 0 { + return nil, nil + } + + issues := make([]result.Issue, 0, len(ps)) // This is worst case + for idx := range ps { + if ps[idx].Confidence >= minConfidence { + issues = append(issues, result.Issue{ + Pos: ps[idx].Position, + Text: ps[idx].Text, + FromLinter: golintName, + }) + // TODO: use p.Link and p.Category + } + } + + return issues, nil +} + +const golintName = "golint" + +func NewGolint() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: golintName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + golintName, + "Golint differs from gofmt. Gofmt reformats Go source code, whereas golint prints out style mistakes", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + res, err := golintProcessPkg(lintCtx.Settings().Golint.MinConfidence, pass.Files, pass.Fset, pass.Pkg, pass.TypesInfo) + if err != nil || len(res) == 0 { + return nil, err + } + + mu.Lock() + for i := range res { + resIssues = append(resIssues, goanalysis.NewIssue(&res[i], pass)) + } + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go new file mode 100644 index 000000000..f7e71b7da --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomnd.go @@ -0,0 +1,27 @@ +package golinters + +import ( + mnd "github.com/tommy-muehle/go-mnd/v2" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewGoMND(cfg *config.Config) *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + mnd.Analyzer, + } + + var linterCfg map[string]map[string]interface{} + if cfg != nil { + linterCfg = cfg.LintersSettings.Gomnd.Settings + } + + return goanalysis.NewLinter( + "gomnd", + "An analyzer to detect magic numbers.", + analyzers, + linterCfg, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go new file mode 100644 index 000000000..40d3bf786 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomoddirectives.go @@ -0,0 +1,64 @@ +package golinters + +import ( + "sync" + + "github.com/ldez/gomoddirectives" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const goModDirectivesName = "gomoddirectives" + +// NewGoModDirectives returns a new gomoddirectives linter. +func NewGoModDirectives(settings *config.GoModDirectivesSettings) *goanalysis.Linter { + var issues []goanalysis.Issue + var once sync.Once + + var opts gomoddirectives.Options + if settings != nil { + opts.ReplaceAllowLocal = settings.ReplaceLocal + opts.ReplaceAllowList = settings.ReplaceAllowList + opts.RetractAllowNoExplanation = settings.RetractAllowNoExplanation + opts.ExcludeForbidden = settings.ExcludeForbidden + } + + analyzer := &analysis.Analyzer{ + Name: goanalysis.TheOnlyAnalyzerName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + + return goanalysis.NewLinter( + goModDirectivesName, + "Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + once.Do(func() { + results, err := gomoddirectives.Analyze(opts) + if err != nil { + lintCtx.Log.Warnf("running %s failed: %s: "+ + "if you are not using go modules it is suggested to disable this linter", goModDirectivesName, err) + return + } + + for _, p := range results { + issues = append(issues, goanalysis.NewIssue(&result.Issue{ + FromLinter: goModDirectivesName, + Pos: p.Start, + Text: p.Reason, + }, pass)) + } + }) + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return issues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go new file mode 100644 index 000000000..30ca6cc3d --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gomodguard.go @@ -0,0 +1,99 @@ +package golinters + +import ( + "sync" + + "github.com/ryancurrah/gomodguard" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const ( + gomodguardName = "gomodguard" + gomodguardDesc = "Allow and block list linter for direct Go module dependencies. " + + "This is different from depguard where there are different block " + + "types for example version constraints and module recommendations." +) + +// NewGomodguard returns a new Gomodguard linter. +func NewGomodguard() *goanalysis.Linter { + var ( + issues []goanalysis.Issue + mu = sync.Mutex{} + analyzer = &analysis.Analyzer{ + Name: goanalysis.TheOnlyAnalyzerName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + ) + + return goanalysis.NewLinter( + gomodguardName, + gomodguardDesc, + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + linterCfg := lintCtx.Cfg.LintersSettings.Gomodguard + + processorCfg := &gomodguard.Configuration{} + processorCfg.Allowed.Modules = linterCfg.Allowed.Modules + processorCfg.Allowed.Domains = linterCfg.Allowed.Domains + processorCfg.Blocked.LocalReplaceDirectives = linterCfg.Blocked.LocalReplaceDirectives + + for n := range linterCfg.Blocked.Modules { + for k, v := range linterCfg.Blocked.Modules[n] { + m := map[string]gomodguard.BlockedModule{k: { + Recommendations: v.Recommendations, + Reason: v.Reason, + }} + processorCfg.Blocked.Modules = append(processorCfg.Blocked.Modules, m) + break + } + } + + for n := range linterCfg.Blocked.Versions { + for k, v := range linterCfg.Blocked.Versions[n] { + m := map[string]gomodguard.BlockedVersion{k: { + Version: v.Version, + Reason: v.Reason, + }} + processorCfg.Blocked.Versions = append(processorCfg.Blocked.Versions, m) + break + } + } + + processor, err := gomodguard.NewProcessor(processorCfg) + if err != nil { + lintCtx.Log.Warnf("running gomodguard failed: %s: if you are not using go modules "+ + "it is suggested to disable this linter", err) + return + } + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var files []string + + for _, file := range pass.Files { + files = append(files, pass.Fset.PositionFor(file.Pos(), false).Filename) + } + + gomodguardIssues := processor.ProcessFiles(files) + + mu.Lock() + defer mu.Unlock() + + for _, gomodguardIssue := range gomodguardIssues { + issues = append(issues, goanalysis.NewIssue(&result.Issue{ + FromLinter: gomodguardName, + Pos: gomodguardIssue.Position, + Text: gomodguardIssue.Reason, + }, pass)) + } + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return issues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go new file mode 100644 index 000000000..c5516dc7f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname.go @@ -0,0 +1,17 @@ +package golinters + +import ( + "github.com/jirfag/go-printf-func-name/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewGoPrintfFuncName() *goanalysis.Linter { + return goanalysis.NewLinter( + "goprintffuncname", + "Checks that printf-like functions are named with `f` at the end", + []*analysis.Analyzer{analyzer.Analyzer}, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go new file mode 100644 index 000000000..328ba5ccc --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosec.go @@ -0,0 +1,128 @@ +package golinters + +import ( + "fmt" + "go/token" + "io/ioutil" + "log" + "strconv" + "strings" + "sync" + + "github.com/securego/gosec/v2" + "github.com/securego/gosec/v2/rules" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const gosecName = "gosec" + +func NewGosec(settings *config.GoSecSettings) *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + gasConfig := gosec.NewConfig() + + var filters []rules.RuleFilter + if settings != nil { + filters = gosecRuleFilters(settings.Includes, settings.Excludes) + + for k, v := range settings.Config { + // Uses ToUpper because the parsing of the map's key change the key to lowercase. + // The value is not impacted by that: the case is respected. + gasConfig.Set(strings.ToUpper(k), v) + } + } + + ruleDefinitions := rules.Generate(filters...) + + logger := log.New(ioutil.Discard, "", 0) + + analyzer := &analysis.Analyzer{ + Name: gosecName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + gosecName, + "Inspects source code for security problems", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + gosecAnalyzer := gosec.NewAnalyzer(gasConfig, true, logger) + gosecAnalyzer.LoadRules(ruleDefinitions.Builders()) + + pkg := &packages.Package{ + Fset: pass.Fset, + Syntax: pass.Files, + Types: pass.Pkg, + TypesInfo: pass.TypesInfo, + } + gosecAnalyzer.Check(pkg) + issues, _, _ := gosecAnalyzer.Report() + if len(issues) == 0 { + return nil, nil + } + + res := make([]goanalysis.Issue, 0, len(issues)) + for _, i := range issues { + text := fmt.Sprintf("%s: %s", i.RuleID, i.What) // TODO: use severity and confidence + var r *result.Range + line, err := strconv.Atoi(i.Line) + if err != nil { + r = &result.Range{} + if n, rerr := fmt.Sscanf(i.Line, "%d-%d", &r.From, &r.To); rerr != nil || n != 2 { + lintCtx.Log.Warnf("Can't convert gosec line number %q of %v to int: %s", i.Line, i, err) + continue + } + line = r.From + } + + column, err := strconv.Atoi(i.Col) + if err != nil { + lintCtx.Log.Warnf("Can't convert gosec column number %q of %v to int: %s", i.Col, i, err) + continue + } + + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: token.Position{ + Filename: i.File, + Line: line, + Column: column, + }, + Text: text, + LineRange: r, + FromLinter: gosecName, + }, pass)) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} + +// based on https://github.com/securego/gosec/blob/569328eade2ccbad4ce2d0f21ee158ab5356a5cf/cmd/gosec/main.go#L170-L188 +func gosecRuleFilters(includes, excludes []string) []rules.RuleFilter { + var filters []rules.RuleFilter + + if len(includes) > 0 { + filters = append(filters, rules.NewRuleFilter(false, includes...)) + } + + if len(excludes) > 0 { + filters = append(filters, rules.NewRuleFilter(true, excludes...)) + } + + return filters +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go new file mode 100644 index 000000000..fa14f1a96 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gosimple.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "honnef.co/go/tools/simple" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewGosimple(settings *config.StaticCheckSettings) *goanalysis.Linter { + cfg := staticCheckConfig(settings) + + analyzers := setupStaticCheckAnalyzers(simple.Analyzers, getGoVersion(settings), cfg.Checks) + + return goanalysis.NewLinter( + "gosimple", + "Linter for Go source code that specializes in simplifying a code", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go new file mode 100644 index 000000000..b3860e017 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/govet.go @@ -0,0 +1,185 @@ +package golinters + +import ( + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/asmdecl" + "golang.org/x/tools/go/analysis/passes/assign" + "golang.org/x/tools/go/analysis/passes/atomic" + "golang.org/x/tools/go/analysis/passes/atomicalign" + "golang.org/x/tools/go/analysis/passes/bools" + _ "golang.org/x/tools/go/analysis/passes/buildssa" // unused, internal analyzer + "golang.org/x/tools/go/analysis/passes/buildtag" + "golang.org/x/tools/go/analysis/passes/cgocall" + "golang.org/x/tools/go/analysis/passes/composite" + "golang.org/x/tools/go/analysis/passes/copylock" + _ "golang.org/x/tools/go/analysis/passes/ctrlflow" // unused, internal analyzer + "golang.org/x/tools/go/analysis/passes/deepequalerrors" + "golang.org/x/tools/go/analysis/passes/errorsas" + "golang.org/x/tools/go/analysis/passes/fieldalignment" + "golang.org/x/tools/go/analysis/passes/findcall" + "golang.org/x/tools/go/analysis/passes/framepointer" + "golang.org/x/tools/go/analysis/passes/httpresponse" + "golang.org/x/tools/go/analysis/passes/ifaceassert" + _ "golang.org/x/tools/go/analysis/passes/inspect" // unused internal analyzer + "golang.org/x/tools/go/analysis/passes/loopclosure" + "golang.org/x/tools/go/analysis/passes/lostcancel" + "golang.org/x/tools/go/analysis/passes/nilfunc" + "golang.org/x/tools/go/analysis/passes/nilness" + _ "golang.org/x/tools/go/analysis/passes/pkgfact" // unused, internal analyzer + "golang.org/x/tools/go/analysis/passes/printf" + "golang.org/x/tools/go/analysis/passes/reflectvaluecompare" + "golang.org/x/tools/go/analysis/passes/shadow" + "golang.org/x/tools/go/analysis/passes/shift" + "golang.org/x/tools/go/analysis/passes/sigchanyzer" + "golang.org/x/tools/go/analysis/passes/sortslice" + "golang.org/x/tools/go/analysis/passes/stdmethods" + "golang.org/x/tools/go/analysis/passes/stringintconv" + "golang.org/x/tools/go/analysis/passes/structtag" + "golang.org/x/tools/go/analysis/passes/testinggoroutine" + "golang.org/x/tools/go/analysis/passes/tests" + "golang.org/x/tools/go/analysis/passes/unmarshal" + "golang.org/x/tools/go/analysis/passes/unreachable" + "golang.org/x/tools/go/analysis/passes/unsafeptr" + "golang.org/x/tools/go/analysis/passes/unusedresult" + "golang.org/x/tools/go/analysis/passes/unusedwrite" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +var ( + allAnalyzers = []*analysis.Analyzer{ + asmdecl.Analyzer, + assign.Analyzer, + atomic.Analyzer, + atomicalign.Analyzer, + bools.Analyzer, + buildtag.Analyzer, + cgocall.Analyzer, + composite.Analyzer, + copylock.Analyzer, + deepequalerrors.Analyzer, + errorsas.Analyzer, + fieldalignment.Analyzer, + findcall.Analyzer, + framepointer.Analyzer, + httpresponse.Analyzer, + ifaceassert.Analyzer, + loopclosure.Analyzer, + lostcancel.Analyzer, + nilfunc.Analyzer, + nilness.Analyzer, + printf.Analyzer, + reflectvaluecompare.Analyzer, + shadow.Analyzer, + shift.Analyzer, + sigchanyzer.Analyzer, + sortslice.Analyzer, + stdmethods.Analyzer, + stringintconv.Analyzer, + structtag.Analyzer, + testinggoroutine.Analyzer, + tests.Analyzer, + unmarshal.Analyzer, + unreachable.Analyzer, + unsafeptr.Analyzer, + unusedresult.Analyzer, + unusedwrite.Analyzer, + } + + // https://github.com/golang/go/blob/879db69ce2de814bc3203c39b45617ba51cc5366/src/cmd/vet/main.go#L40-L68 + defaultAnalyzers = []*analysis.Analyzer{ + asmdecl.Analyzer, + assign.Analyzer, + atomic.Analyzer, + bools.Analyzer, + buildtag.Analyzer, + cgocall.Analyzer, + composite.Analyzer, + copylock.Analyzer, + errorsas.Analyzer, + framepointer.Analyzer, + httpresponse.Analyzer, + ifaceassert.Analyzer, + loopclosure.Analyzer, + lostcancel.Analyzer, + nilfunc.Analyzer, + printf.Analyzer, + shift.Analyzer, + sigchanyzer.Analyzer, + stdmethods.Analyzer, + stringintconv.Analyzer, + structtag.Analyzer, + testinggoroutine.Analyzer, + tests.Analyzer, + unmarshal.Analyzer, + unreachable.Analyzer, + unsafeptr.Analyzer, + unusedresult.Analyzer, + } +) + +func isAnalyzerEnabled(name string, cfg *config.GovetSettings, defaultAnalyzers []*analysis.Analyzer) bool { + if cfg.EnableAll { + for _, n := range cfg.Disable { + if n == name { + return false + } + } + return true + } + // Raw for loops should be OK on small slice lengths. + for _, n := range cfg.Enable { + if n == name { + return true + } + } + for _, n := range cfg.Disable { + if n == name { + return false + } + } + if cfg.DisableAll { + return false + } + for _, a := range defaultAnalyzers { + if a.Name == name { + return true + } + } + return false +} + +func analyzersFromConfig(cfg *config.GovetSettings) []*analysis.Analyzer { + if cfg == nil { + return defaultAnalyzers + } + + if cfg.CheckShadowing { + // Keeping for backward compatibility. + cfg.Enable = append(cfg.Enable, shadow.Analyzer.Name) + } + + var enabledAnalyzers []*analysis.Analyzer + for _, a := range allAnalyzers { + if isAnalyzerEnabled(a.Name, cfg, defaultAnalyzers) { + enabledAnalyzers = append(enabledAnalyzers, a) + } + } + + return enabledAnalyzers +} + +func NewGovet(cfg *config.GovetSettings) *goanalysis.Linter { + var settings map[string]map[string]interface{} + if cfg != nil { + settings = cfg.Settings + } + return goanalysis.NewLinter( + "govet", + "Vet examines Go source code and reports suspicious constructs, "+ + "such as Printf calls whose arguments do not align with the format string", + analyzersFromConfig(cfg), + settings, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go new file mode 100644 index 000000000..c26f08e40 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ifshort.go @@ -0,0 +1,28 @@ +package golinters + +import ( + "github.com/esimonov/ifshort/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewIfshort(settings *config.IfshortSettings) *goanalysis.Linter { + var cfg map[string]map[string]interface{} + if settings != nil { + cfg = map[string]map[string]interface{}{ + analyzer.Analyzer.Name: { + "max-decl-lines": settings.MaxDeclLines, + "max-decl-chars": settings.MaxDeclChars, + }, + } + } + + return goanalysis.NewLinter( + "ifshort", + "Checks that your code uses short syntax for if-statements whenever possible", + []*analysis.Analyzer{analyzer.Analyzer}, + cfg, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go new file mode 100644 index 000000000..523aa257b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/importas.go @@ -0,0 +1,48 @@ +package golinters + +import ( + "fmt" + "strconv" + + "github.com/julz/importas" // nolint: misspell + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" +) + +func NewImportAs(settings *config.ImportAsSettings) *goanalysis.Linter { + analyzer := importas.Analyzer + + return goanalysis.NewLinter( + analyzer.Name, + analyzer.Doc, + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + if settings == nil { + return + } + if len(settings.Alias) == 0 { + lintCtx.Log.Infof("importas settings found, but no aliases listed. List aliases under alias: key.") // nolint: misspell + } + + err := analyzer.Flags.Set("no-unaliased", strconv.FormatBool(settings.NoUnaliased)) + if err != nil { + lintCtx.Log.Errorf("failed to parse configuration: %v", err) + } + + for _, a := range settings.Alias { + if a.Pkg == "" { + lintCtx.Log.Errorf("invalid configuration, empty package: pkg=%s alias=%s", a.Pkg, a.Alias) + continue + } + + err := analyzer.Flags.Set("alias", fmt.Sprintf("%s:%s", a.Pkg, a.Alias)) + if err != nil { + lintCtx.Log.Errorf("failed to parse configuration: %v", err) + } + } + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go new file mode 100644 index 000000000..c87bb2fa5 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ineffassign.go @@ -0,0 +1,17 @@ +package golinters + +import ( + "github.com/gordonklaus/ineffassign/pkg/ineffassign" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewIneffassign() *goanalysis.Linter { + return goanalysis.NewLinter( + "ineffassign", + "Detects when assignments to existing variables are not used", + []*analysis.Analyzer{ineffassign.Analyzer}, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go new file mode 100644 index 000000000..1edbe894c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/interfacer.go @@ -0,0 +1,67 @@ +package golinters + +import ( + "sync" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "mvdan.cc/interfacer/check" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const interfacerName = "interfacer" + +func NewInterfacer() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: interfacerName, + Doc: goanalysis.TheOnlyanalyzerDoc, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + } + return goanalysis.NewLinter( + interfacerName, + "Linter that suggests narrower interface types", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + ssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + ssaPkg := ssa.Pkg + c := &check.Checker{} + prog := goanalysis.MakeFakeLoaderProgram(pass) + c.Program(prog) + c.ProgramSSA(ssaPkg.Prog) + + issues, err := c.Check() + if err != nil { + return nil, err + } + if len(issues) == 0 { + return nil, nil + } + + res := make([]goanalysis.Issue, 0, len(issues)) + for _, i := range issues { + pos := pass.Fset.Position(i.Pos()) + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: pos, + Text: i.Message(), + FromLinter: interfacerName, + }, pass)) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go new file mode 100644 index 000000000..5f26e91dd --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/lll.go @@ -0,0 +1,124 @@ +package golinters + +import ( + "bufio" + "fmt" + "go/token" + "os" + "strings" + "sync" + "unicode/utf8" + + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func getLLLIssuesForFile(filename string, maxLineLen int, tabSpaces string) ([]result.Issue, error) { + var res []result.Issue + + f, err := os.Open(filename) + if err != nil { + return nil, fmt.Errorf("can't open file %s: %s", filename, err) + } + defer f.Close() + + lineNumber := 1 + scanner := bufio.NewScanner(f) + for scanner.Scan() { + line := scanner.Text() + line = strings.Replace(line, "\t", tabSpaces, -1) + lineLen := utf8.RuneCountInString(line) + if lineLen > maxLineLen { + res = append(res, result.Issue{ + Pos: token.Position{ + Filename: filename, + Line: lineNumber, + }, + Text: fmt.Sprintf("line is %d characters", lineLen), + FromLinter: lllName, + }) + } + lineNumber++ + } + + if err := scanner.Err(); err != nil { + if err == bufio.ErrTooLong && maxLineLen < bufio.MaxScanTokenSize { + // scanner.Scan() might fail if the line is longer than bufio.MaxScanTokenSize + // In the case where the specified maxLineLen is smaller than bufio.MaxScanTokenSize + // we can return this line as a long line instead of returning an error. + // The reason for this change is that this case might happen with autogenerated files + // The go-bindata tool for instance might generate a file with a very long line. + // In this case, as it's a auto generated file, the warning returned by lll will + // be ignored. + // But if we return a linter error here, and this error happens for an autogenerated + // file the error will be discarded (fine), but all the subsequent errors for lll will + // be discarded for other files and we'll miss legit error. + res = append(res, result.Issue{ + Pos: token.Position{ + Filename: filename, + Line: lineNumber, + Column: 1, + }, + Text: fmt.Sprintf("line is more than %d characters", bufio.MaxScanTokenSize), + FromLinter: lllName, + }) + } else { + return nil, fmt.Errorf("can't scan file %s: %s", filename, err) + } + } + + return res, nil +} + +const lllName = "lll" + +func NewLLL() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: lllName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + lllName, + "Reports long lines", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var fileNames []string + for _, f := range pass.Files { + pos := pass.Fset.PositionFor(f.Pos(), false) + fileNames = append(fileNames, pos.Filename) + } + + var res []goanalysis.Issue + spaces := strings.Repeat(" ", lintCtx.Settings().Lll.TabWidth) + for _, f := range fileNames { + issues, err := getLLLIssuesForFile(f, lintCtx.Settings().Lll.LineLength, spaces) + if err != nil { + return nil, err + } + for i := range issues { + res = append(res, goanalysis.NewIssue(&issues[i], pass)) + } + } + + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go new file mode 100644 index 000000000..cdde09291 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/makezero.go @@ -0,0 +1,60 @@ +package golinters + +import ( + "sync" + + "github.com/ashanbrown/makezero/makezero" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const makezeroName = "makezero" + +func NewMakezero() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: makezeroName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + makezeroName, + "Finds slice declarations with non-zero initial length", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + s := &lintCtx.Settings().Makezero + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var res []goanalysis.Issue + linter := makezero.NewLinter(s.Always) + for _, file := range pass.Files { + hints, err := linter.Run(pass.Fset, pass.TypesInfo, file) + if err != nil { + return nil, errors.Wrapf(err, "makezero linter failed on file %q", file.Name.String()) + } + for _, hint := range hints { + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: hint.Position(), + Text: hint.Details(), + FromLinter: makezeroName, + }, pass)) + } + } + if len(res) == 0 { + return nil, nil + } + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go new file mode 100644 index 000000000..22422b8c6 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/maligned.go @@ -0,0 +1,58 @@ +package golinters + +import ( + "fmt" + "sync" + + malignedAPI "github.com/golangci/maligned" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewMaligned() *goanalysis.Linter { + const linterName = "maligned" + var mu sync.Mutex + var res []goanalysis.Issue + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + linterName, + "Tool to detect Go structs that would take less memory if their fields were sorted", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + prog := goanalysis.MakeFakeLoaderProgram(pass) + + malignedIssues := malignedAPI.Run(prog) + if len(malignedIssues) == 0 { + return nil, nil + } + + issues := make([]goanalysis.Issue, 0, len(malignedIssues)) + for _, i := range malignedIssues { + text := fmt.Sprintf("struct of size %d bytes could be of size %d bytes", i.OldSize, i.NewSize) + if lintCtx.Settings().Maligned.SuggestNewOrder { + text += fmt.Sprintf(":\n%s", formatCodeBlock(i.NewStructDef, lintCtx.Cfg)) + } + issues = append(issues, goanalysis.NewIssue(&result.Issue{ + Pos: i.Pos, + Text: text, + FromLinter: linterName, + }, pass)) + } + + mu.Lock() + res = append(res, issues...) + mu.Unlock() + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return res + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go new file mode 100644 index 000000000..80ecf9bb6 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/misspell.go @@ -0,0 +1,132 @@ +package golinters + +import ( + "fmt" + "go/token" + "strings" + "sync" + + "github.com/golangci/misspell" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func runMisspellOnFile(fileName string, r *misspell.Replacer, lintCtx *linter.Context) ([]result.Issue, error) { + var res []result.Issue + fileContent, err := lintCtx.FileCache.GetFileBytes(fileName) + if err != nil { + return nil, fmt.Errorf("can't get file %s contents: %s", fileName, err) + } + + // use r.Replace, not r.ReplaceGo because r.ReplaceGo doesn't find + // issues inside strings: it searches only inside comments. r.Replace + // searches all words: it treats input as a plain text. A standalone misspell + // tool uses r.Replace by default. + _, diffs := r.Replace(string(fileContent)) + for _, diff := range diffs { + text := fmt.Sprintf("`%s` is a misspelling of `%s`", diff.Original, diff.Corrected) + pos := token.Position{ + Filename: fileName, + Line: diff.Line, + Column: diff.Column + 1, + } + replacement := &result.Replacement{ + Inline: &result.InlineFix{ + StartCol: diff.Column, + Length: len(diff.Original), + NewString: diff.Corrected, + }, + } + + res = append(res, result.Issue{ + Pos: pos, + Text: text, + FromLinter: misspellName, + Replacement: replacement, + }) + } + + return res, nil +} + +const misspellName = "misspell" + +func NewMisspell() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + var ruleErr error + + analyzer := &analysis.Analyzer{ + Name: misspellName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + misspellName, + "Finds commonly misspelled English words in comments", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + r := misspell.Replacer{ + Replacements: misspell.DictMain, + } + + // Figure out regional variations + settings := lintCtx.Settings().Misspell + locale := settings.Locale + switch strings.ToUpper(locale) { + case "": + // nothing + case "US": + r.AddRuleList(misspell.DictAmerican) + case "UK", "GB": + r.AddRuleList(misspell.DictBritish) + case "NZ", "AU", "CA": + ruleErr = fmt.Errorf("unknown locale: %q", locale) + } + + if ruleErr == nil { + if len(settings.IgnoreWords) != 0 { + r.RemoveRule(settings.IgnoreWords) + } + + r.Compile() + } + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + if ruleErr != nil { + return nil, ruleErr + } + + var fileNames []string + for _, f := range pass.Files { + pos := pass.Fset.PositionFor(f.Pos(), false) + fileNames = append(fileNames, pos.Filename) + } + + var res []goanalysis.Issue + for _, f := range fileNames { + issues, err := runMisspellOnFile(f, &r, lintCtx) + if err != nil { + return nil, err + } + for i := range issues { + res = append(res, goanalysis.NewIssue(&issues[i], pass)) + } + } + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go new file mode 100644 index 000000000..86735a51a --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret.go @@ -0,0 +1,123 @@ +package golinters + +import ( + "fmt" + "go/ast" + "go/token" + "sync" + + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +type nakedretVisitor struct { + maxLength int + f *token.FileSet + issues []result.Issue +} + +func (v *nakedretVisitor) processFuncDecl(funcDecl *ast.FuncDecl) { + file := v.f.File(funcDecl.Pos()) + functionLineLength := file.Position(funcDecl.End()).Line - file.Position(funcDecl.Pos()).Line + + // Scan the body for usage of the named returns + for _, stmt := range funcDecl.Body.List { + s, ok := stmt.(*ast.ReturnStmt) + if !ok { + continue + } + + if len(s.Results) != 0 { + continue + } + + file := v.f.File(s.Pos()) + if file == nil || functionLineLength <= v.maxLength { + continue + } + if funcDecl.Name == nil { + continue + } + + v.issues = append(v.issues, result.Issue{ + FromLinter: nakedretName, + Text: fmt.Sprintf("naked return in func `%s` with %d lines of code", + funcDecl.Name.Name, functionLineLength), + Pos: v.f.Position(s.Pos()), + }) + } +} + +func (v *nakedretVisitor) Visit(node ast.Node) ast.Visitor { + funcDecl, ok := node.(*ast.FuncDecl) + if !ok { + return v + } + + var namedReturns []*ast.Ident + + // We've found a function + if funcDecl.Type != nil && funcDecl.Type.Results != nil { + for _, field := range funcDecl.Type.Results.List { + for _, ident := range field.Names { + if ident != nil { + namedReturns = append(namedReturns, ident) + } + } + } + } + + if len(namedReturns) == 0 || funcDecl.Body == nil { + return v + } + + v.processFuncDecl(funcDecl) + return v +} + +const nakedretName = "nakedret" + +func NewNakedret() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: nakedretName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + nakedretName, + "Finds naked returns in functions greater than a specified function length", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var res []goanalysis.Issue + for _, file := range pass.Files { + v := nakedretVisitor{ + maxLength: lintCtx.Settings().Nakedret.MaxFuncLines, + f: pass.Fset, + } + ast.Walk(&v, file) + for i := range v.issues { + res = append(res, goanalysis.NewIssue(&v.issues[i], pass)) + } + } + + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go new file mode 100644 index 000000000..0998a8ce2 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nestif.go @@ -0,0 +1,65 @@ +package golinters + +import ( + "sort" + "sync" + + "github.com/nakabonne/nestif" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const nestifName = "nestif" + +func NewNestif() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: goanalysis.TheOnlyAnalyzerName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + nestifName, + "Reports deeply nested if statements", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + checker := &nestif.Checker{ + MinComplexity: lintCtx.Settings().Nestif.MinComplexity, + } + var issues []nestif.Issue + for _, f := range pass.Files { + issues = append(issues, checker.Check(f, pass.Fset)...) + } + if len(issues) == 0 { + return nil, nil + } + + sort.SliceStable(issues, func(i, j int) bool { + return issues[i].Complexity > issues[j].Complexity + }) + + res := make([]goanalysis.Issue, 0, len(issues)) + for _, i := range issues { + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: i.Pos, + Text: i.Message, + FromLinter: nestifName, + }, pass)) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go new file mode 100644 index 000000000..d8a9a613e --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilerr.go @@ -0,0 +1,18 @@ +package golinters + +import ( + "github.com/gostaticanalysis/nilerr" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewNilErr() *goanalysis.Linter { + a := nilerr.Analyzer + return goanalysis.NewLinter( + a.Name, + "Finds the code that returns nil even if it checks that the error is not nil.", + []*analysis.Analyzer{a}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go new file mode 100644 index 000000000..3b661c64c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nlreturn.go @@ -0,0 +1,19 @@ +package golinters + +import ( + "github.com/ssgreg/nlreturn/v2/pkg/nlreturn" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewNLReturn() *goanalysis.Linter { + return goanalysis.NewLinter( + "nlreturn", + "nlreturn checks for a new line before return and branch statements to increase code clarity", + []*analysis.Analyzer{ + nlreturn.NewAnalyzer(), + }, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go new file mode 100644 index 000000000..b5c4a4be2 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/noctx.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/sonatard/noctx" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewNoctx() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + noctx.Analyzer, + } + + return goanalysis.NewLinter( + "noctx", + "noctx finds sending http request without context.Context", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go new file mode 100644 index 000000000..889cff864 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint.go @@ -0,0 +1,93 @@ +package golinters + +import ( + "fmt" + "go/ast" + "sync" + + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/golinters/nolintlint" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const NolintlintName = "nolintlint" + +func NewNoLintLint() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: NolintlintName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + NolintlintName, + "Reports ill-formed or insufficient nolint directives", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var needs nolintlint.Needs + settings := lintCtx.Settings().NoLintLint + if settings.RequireExplanation { + needs |= nolintlint.NeedsExplanation + } + if !settings.AllowLeadingSpace { + needs |= nolintlint.NeedsMachineOnly + } + if settings.RequireSpecific { + needs |= nolintlint.NeedsSpecific + } + if !settings.AllowUnused { + needs |= nolintlint.NeedsUnused + } + + lnt, err := nolintlint.NewLinter(needs, settings.AllowNoExplanation) + if err != nil { + return nil, err + } + + nodes := make([]ast.Node, 0, len(pass.Files)) + for _, n := range pass.Files { + nodes = append(nodes, n) + } + issues, err := lnt.Run(pass.Fset, nodes...) + if err != nil { + return nil, fmt.Errorf("linter failed to run: %s", err) + } + var res []goanalysis.Issue + for _, i := range issues { + expectNoLint := false + var expectedNolintLinter string + if ii, ok := i.(nolintlint.UnusedCandidate); ok { + expectedNolintLinter = ii.ExpectedLinter + expectNoLint = true + } + issue := &result.Issue{ + FromLinter: NolintlintName, + Text: i.Details(), + Pos: i.Position(), + ExpectNoLint: expectNoLint, + ExpectedNoLintLinter: expectedNolintLinter, + Replacement: i.Replacement(), + } + res = append(res, goanalysis.NewIssue(issue, pass)) + } + + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md new file mode 100644 index 000000000..3d440d5a5 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/README.md @@ -0,0 +1,31 @@ +# nolintlint + +nolintlint is a Go static analysis tool to find ill-formed or insufficiently explained `// nolint` directives for golangci +(or any other linter, using th ) + +## Purpose + +To ensure that lint exceptions have explanations. Consider the case below: + +```Go +import "crypto/md5" //nolint + +func hash(data []byte) []byte { + return md5.New().Sum(data) //nolint +} +``` + +In the above case, nolint directives are present but the user has no idea why this is being done or which linter +is being suppressed (in this case, gosec recommends against use of md5). `nolintlint` can require that the code provide an explanation, which might look as follows: + +```Go +import "crypto/md5" //nolint:gosec // this is not used in a secure application + +func hash(data []byte) []byte { + return md5.New().Sum(data) //nolint:gosec // this result is not used in a secure application +} +``` + +`nolintlint` can also identify cases where you may have written `// nolint`. Finally `nolintlint`, can also enforce that you +use the machine-readable nolint directive format `//nolint` and that you mention what linter is being suppressed, as shown above when we write `//nolint:gosec`. + diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go new file mode 100644 index 000000000..4466cab41 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nolintlint/nolintlint.go @@ -0,0 +1,305 @@ +// Package nolintlint provides a linter to ensure that all //nolint directives are followed by explanations +package nolintlint + +import ( + "fmt" + "go/ast" + "go/token" + "regexp" + "strings" + "unicode" + + "github.com/golangci/golangci-lint/pkg/result" +) + +type BaseIssue struct { + fullDirective string + directiveWithOptionalLeadingSpace string + position token.Position + replacement *result.Replacement +} + +//nolint:gocritic // TODO must be change in the future. +func (b BaseIssue) Position() token.Position { + return b.position +} + +//nolint:gocritic // TODO must be change in the future. +func (b BaseIssue) Replacement() *result.Replacement { + return b.replacement +} + +type ExtraLeadingSpace struct { + BaseIssue +} + +//nolint:gocritic // TODO must be change in the future. +func (i ExtraLeadingSpace) Details() string { + return fmt.Sprintf("directive `%s` should not have more than one leading space", i.fullDirective) +} + +//nolint:gocritic // TODO must be change in the future. +func (i ExtraLeadingSpace) String() string { return toString(i) } + +type NotMachine struct { + BaseIssue +} + +//nolint:gocritic // TODO must be change in the future. +func (i NotMachine) Details() string { + expected := i.fullDirective[:2] + strings.TrimLeftFunc(i.fullDirective[2:], unicode.IsSpace) + return fmt.Sprintf("directive `%s` should be written without leading space as `%s`", + i.fullDirective, expected) +} + +//nolint:gocritic // TODO must be change in the future. +func (i NotMachine) String() string { return toString(i) } + +type NotSpecific struct { + BaseIssue +} + +//nolint:gocritic // TODO must be change in the future. +func (i NotSpecific) Details() string { + return fmt.Sprintf("directive `%s` should mention specific linter such as `%s:my-linter`", + i.fullDirective, i.directiveWithOptionalLeadingSpace) +} + +//nolint:gocritic // TODO must be change in the future. +func (i NotSpecific) String() string { return toString(i) } + +type ParseError struct { + BaseIssue +} + +//nolint:gocritic // TODO must be change in the future. +func (i ParseError) Details() string { + return fmt.Sprintf("directive `%s` should match `%s[:] [// ]`", + i.fullDirective, + i.directiveWithOptionalLeadingSpace) +} + +//nolint:gocritic // TODO must be change in the future. +func (i ParseError) String() string { return toString(i) } + +type NoExplanation struct { + BaseIssue + fullDirectiveWithoutExplanation string +} + +//nolint:gocritic // TODO must be change in the future. +func (i NoExplanation) Details() string { + return fmt.Sprintf("directive `%s` should provide explanation such as `%s // this is why`", + i.fullDirective, i.fullDirectiveWithoutExplanation) +} + +//nolint:gocritic // TODO must be change in the future. +func (i NoExplanation) String() string { return toString(i) } + +type UnusedCandidate struct { + BaseIssue + ExpectedLinter string +} + +//nolint:gocritic // TODO must be change in the future. +func (i UnusedCandidate) Details() string { + details := fmt.Sprintf("directive `%s` is unused", i.fullDirective) + if i.ExpectedLinter != "" { + details += fmt.Sprintf(" for linter %q", i.ExpectedLinter) + } + return details +} + +//nolint:gocritic // TODO must be change in the future. +func (i UnusedCandidate) String() string { return toString(i) } + +func toString(i Issue) string { + return fmt.Sprintf("%s at %s", i.Details(), i.Position()) +} + +type Issue interface { + Details() string + Position() token.Position + String() string + Replacement() *result.Replacement +} + +type Needs uint + +const ( + NeedsMachineOnly Needs = 1 << iota + NeedsSpecific + NeedsExplanation + NeedsUnused + NeedsAll = NeedsMachineOnly | NeedsSpecific | NeedsExplanation +) + +var commentPattern = regexp.MustCompile(`^//\s*(nolint)(:\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*)?\b`) + +// matches a complete nolint directive +var fullDirectivePattern = regexp.MustCompile(`^//\s*nolint(?::(\s*[\w-]+\s*(?:,\s*[\w-]+\s*)*))?\s*(//.*)?\s*\n?$`) + +type Linter struct { + needs Needs // indicates which linter checks to perform + excludeByLinter map[string]bool +} + +// NewLinter creates a linter that enforces that the provided directives fulfill the provided requirements +func NewLinter(needs Needs, excludes []string) (*Linter, error) { + excludeByName := make(map[string]bool) + for _, e := range excludes { + excludeByName[e] = true + } + + return &Linter{ + needs: needs, + excludeByLinter: excludeByName, + }, nil +} + +var leadingSpacePattern = regexp.MustCompile(`^//(\s*)`) +var trailingBlankExplanation = regexp.MustCompile(`\s*(//\s*)?$`) + +//nolint:funlen,gocyclo +func (l Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) { + var issues []Issue + + for _, node := range nodes { + file, ok := node.(*ast.File) + if !ok { + continue + } + + for _, c := range file.Comments { + for _, comment := range c.List { + if !commentPattern.MatchString(comment.Text) { + continue + } + + // check for a space between the "//" and the directive + leadingSpaceMatches := leadingSpacePattern.FindStringSubmatch(comment.Text) + + var leadingSpace string + if len(leadingSpaceMatches) > 0 { + leadingSpace = leadingSpaceMatches[1] + } + + directiveWithOptionalLeadingSpace := comment.Text + if len(leadingSpace) > 0 { + split := strings.Split(strings.SplitN(comment.Text, ":", 2)[0], "//") + directiveWithOptionalLeadingSpace = "// " + strings.TrimSpace(split[1]) + } + + pos := fset.Position(comment.Pos()) + end := fset.Position(comment.End()) + + base := BaseIssue{ + fullDirective: comment.Text, + directiveWithOptionalLeadingSpace: directiveWithOptionalLeadingSpace, + position: pos, + } + + // check for, report and eliminate leading spaces so we can check for other issues + if len(leadingSpace) > 0 { + removeWhitespace := &result.Replacement{ + Inline: &result.InlineFix{ + StartCol: pos.Column + 1, + Length: len(leadingSpace), + NewString: "", + }, + } + if (l.needs & NeedsMachineOnly) != 0 { + issue := NotMachine{BaseIssue: base} + issue.BaseIssue.replacement = removeWhitespace + issues = append(issues, issue) + } else if len(leadingSpace) > 1 { + issue := ExtraLeadingSpace{BaseIssue: base} + issue.BaseIssue.replacement = removeWhitespace + issue.BaseIssue.replacement.Inline.NewString = " " // assume a single space was intended + issues = append(issues, issue) + } + } + + fullMatches := fullDirectivePattern.FindStringSubmatch(comment.Text) + if len(fullMatches) == 0 { + issues = append(issues, ParseError{BaseIssue: base}) + continue + } + + lintersText, explanation := fullMatches[1], fullMatches[2] + var linters []string + if len(lintersText) > 0 { + lls := strings.Split(lintersText, ",") + linters = make([]string, 0, len(lls)) + rangeStart := (pos.Column - 1) + len("//") + len(leadingSpace) + len("nolint:") + for i, ll := range lls { + rangeEnd := rangeStart + len(ll) + if i < len(lls)-1 { + rangeEnd++ // include trailing comma + } + trimmedLinterName := strings.TrimSpace(ll) + if trimmedLinterName != "" { + linters = append(linters, trimmedLinterName) + } + rangeStart = rangeEnd + } + } + + if (l.needs & NeedsSpecific) != 0 { + if len(linters) == 0 { + issues = append(issues, NotSpecific{BaseIssue: base}) + } + } + + // when detecting unused directives, we send all the directives through and filter them out in the nolint processor + if (l.needs & NeedsUnused) != 0 { + removeNolintCompletely := &result.Replacement{ + Inline: &result.InlineFix{ + StartCol: pos.Column - 1, + Length: end.Column - pos.Column, + NewString: "", + }, + } + + if len(linters) == 0 { + issue := UnusedCandidate{BaseIssue: base} + issue.replacement = removeNolintCompletely + issues = append(issues, issue) + } else { + for _, linter := range linters { + issue := UnusedCandidate{BaseIssue: base, ExpectedLinter: linter} + // only offer replacement if there is a single linter + // because of issues around commas and the possibility of all + // linters being removed + if len(linters) == 1 { + issue.replacement = removeNolintCompletely + } + issues = append(issues, issue) + } + } + } + + if (l.needs&NeedsExplanation) != 0 && (explanation == "" || strings.TrimSpace(explanation) == "//") { + needsExplanation := len(linters) == 0 // if no linters are mentioned, we must have explanation + // otherwise, check if we are excluding all of the mentioned linters + for _, ll := range linters { + if !l.excludeByLinter[ll] { // if a linter does require explanation + needsExplanation = true + break + } + } + + if needsExplanation { + fullDirectiveWithoutExplanation := trailingBlankExplanation.ReplaceAllString(comment.Text, "") + issues = append(issues, NoExplanation{ + BaseIssue: base, + fullDirectiveWithoutExplanation: fullDirectiveWithoutExplanation, + }) + } + } + } + } + } + + return issues, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go new file mode 100644 index 000000000..3b784baf5 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/paralleltest.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/kunwardeep/paralleltest/pkg/paralleltest" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewParallelTest() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + paralleltest.NewAnalyzer(), + } + + return goanalysis.NewLinter( + "paralleltest", + "paralleltest detects missing usage of t.Parallel() method in your Go test", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go new file mode 100644 index 000000000..3d06cf147 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/prealloc.go @@ -0,0 +1,57 @@ +package golinters + +import ( + "fmt" + "sync" + + "github.com/alexkohler/prealloc/pkg" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const preallocName = "prealloc" + +func NewPrealloc() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: preallocName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + preallocName, + "Finds slice declarations that could potentially be preallocated", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + s := &lintCtx.Settings().Prealloc + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var res []goanalysis.Issue + hints := pkg.Check(pass.Files, s.Simple, s.RangeLoops, s.ForLoops) + for _, hint := range hints { + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: pass.Fset.Position(hint.Pos), + Text: fmt.Sprintf("Consider preallocating %s", formatCode(hint.DeclaredSliceName, lintCtx.Cfg)), + FromLinter: preallocName, + }, pass)) + } + + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go new file mode 100644 index 000000000..caccd4823 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/predeclared.go @@ -0,0 +1,26 @@ +package golinters + +import ( + "github.com/nishanths/predeclared/passes/predeclared" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewPredeclared(settings *config.PredeclaredSettings) *goanalysis.Linter { + a := predeclared.Analyzer + + var cfg map[string]map[string]interface{} + if settings != nil { + cfg = map[string]map[string]interface{}{ + a.Name: { + predeclared.IgnoreFlag: settings.Ignore, + predeclared.QualifiedFlag: settings.Qualified, + }, + } + } + + return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, cfg). + WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go new file mode 100644 index 000000000..4fba3d274 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/promlinter.go @@ -0,0 +1,63 @@ +package golinters + +import ( + "fmt" + "sync" + + "github.com/yeya24/promlinter" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewPromlinter() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + const linterName = "promlinter" + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + linterName, + "Check Prometheus metrics naming via promlint", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + strict := lintCtx.Cfg.LintersSettings.Promlinter.Strict + disabledLinters := lintCtx.Cfg.LintersSettings.Promlinter.DisabledLinters + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + issues := promlinter.RunLint(pass.Fset, pass.Files, promlinter.Setting{ + Strict: strict, + DisabledLintFuncs: disabledLinters, + }) + + if len(issues) == 0 { + return nil, nil + } + + res := make([]goanalysis.Issue, len(issues)) + for k, i := range issues { + issue := result.Issue{ + Pos: i.Pos, + Text: fmt.Sprintf("Metric: %s Error: %s", i.Metric, i.Text), + FromLinter: linterName, + } + + res[k] = goanalysis.NewIssue(&issue, pass) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go new file mode 100644 index 000000000..590332e66 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive.go @@ -0,0 +1,295 @@ +package golinters + +import ( + "bytes" + "encoding/json" + "fmt" + "go/token" + "io/ioutil" + "reflect" + + "github.com/BurntSushi/toml" + "github.com/mgechev/dots" + reviveConfig "github.com/mgechev/revive/config" + "github.com/mgechev/revive/lint" + "github.com/mgechev/revive/rule" + "github.com/pkg/errors" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +const reviveName = "revive" + +var reviveDebugf = logutils.Debug("revive") + +// jsonObject defines a JSON object of a failure +type jsonObject struct { + Severity lint.Severity + lint.Failure `json:",inline"` +} + +// NewRevive returns a new Revive linter. +func NewRevive(cfg *config.ReviveSettings) *goanalysis.Linter { + var issues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: goanalysis.TheOnlyAnalyzerName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + + return goanalysis.NewLinter( + reviveName, + "Fast, configurable, extensible, flexible, and beautiful linter for Go. Drop-in replacement of golint.", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var files []string + + for _, file := range pass.Files { + files = append(files, pass.Fset.PositionFor(file.Pos(), false).Filename) + } + + conf, err := getReviveConfig(cfg) + if err != nil { + return nil, err + } + + formatter, err := reviveConfig.GetFormatter("json") + if err != nil { + return nil, err + } + + revive := lint.New(ioutil.ReadFile) + + lintingRules, err := reviveConfig.GetLintingRules(conf) + if err != nil { + return nil, err + } + + packages, err := dots.ResolvePackages(files, []string{}) + if err != nil { + return nil, err + } + + failures, err := revive.Lint(packages, lintingRules, *conf) + if err != nil { + return nil, err + } + + formatChan := make(chan lint.Failure) + exitChan := make(chan bool) + + var output string + go func() { + output, err = formatter.Format(formatChan, *conf) + if err != nil { + lintCtx.Log.Errorf("Format error: %v", err) + } + exitChan <- true + }() + + for f := range failures { + if f.Confidence < conf.Confidence { + continue + } + + formatChan <- f + } + + close(formatChan) + <-exitChan + + var results []jsonObject + err = json.Unmarshal([]byte(output), &results) + if err != nil { + return nil, err + } + + for i := range results { + issues = append(issues, reviveToIssue(pass, &results[i])) + } + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return issues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} + +func reviveToIssue(pass *analysis.Pass, object *jsonObject) goanalysis.Issue { + lineRangeTo := object.Position.End.Line + if object.RuleName == (&rule.ExportedRule{}).Name() { + lineRangeTo = object.Position.Start.Line + } + + return goanalysis.NewIssue(&result.Issue{ + Severity: string(object.Severity), + Text: fmt.Sprintf("%s: %s", object.RuleName, object.Failure.Failure), + Pos: token.Position{ + Filename: object.Position.Start.Filename, + Line: object.Position.Start.Line, + Offset: object.Position.Start.Offset, + Column: object.Position.Start.Column, + }, + LineRange: &result.Range{ + From: object.Position.Start.Line, + To: lineRangeTo, + }, + FromLinter: reviveName, + }, pass) +} + +// This function mimics the GetConfig function of revive. +// This allow to get default values and right types. +// https://github.com/golangci/golangci-lint/issues/1745 +// https://github.com/mgechev/revive/blob/389ba853b0b3587f0c3b71b5f0c61ea4e23928ec/config/config.go#L155 +func getReviveConfig(cfg *config.ReviveSettings) (*lint.Config, error) { + conf := defaultConfig() + + if !reflect.DeepEqual(cfg, &config.ReviveSettings{}) { + rawRoot := createConfigMap(cfg) + buf := bytes.NewBuffer(nil) + + err := toml.NewEncoder(buf).Encode(rawRoot) + if err != nil { + return nil, errors.Wrap(err, "failed to encode configuration") + } + + conf = &lint.Config{} + _, err = toml.DecodeReader(buf, conf) + if err != nil { + return nil, errors.Wrap(err, "failed to decode configuration") + } + } + + normalizeConfig(conf) + + reviveDebugf("revive configuration: %#v", conf) + + return conf, nil +} + +func createConfigMap(cfg *config.ReviveSettings) map[string]interface{} { + rawRoot := map[string]interface{}{ + "ignoreGeneratedHeader": cfg.IgnoreGeneratedHeader, + "confidence": cfg.Confidence, + "severity": cfg.Severity, + "errorCode": cfg.ErrorCode, + "warningCode": cfg.WarningCode, + "enableAllRules": cfg.EnableAllRules, + } + + rawDirectives := map[string]map[string]interface{}{} + for _, directive := range cfg.Directives { + rawDirectives[directive.Name] = map[string]interface{}{ + "severity": directive.Severity, + } + } + + if len(rawDirectives) > 0 { + rawRoot["directive"] = rawDirectives + } + + rawRules := map[string]map[string]interface{}{} + for _, s := range cfg.Rules { + rawRules[s.Name] = map[string]interface{}{ + "severity": s.Severity, + "arguments": safeTomlSlice(s.Arguments), + "disabled": s.Disabled, + } + } + + if len(rawRules) > 0 { + rawRoot["rule"] = rawRules + } + + return rawRoot +} + +func safeTomlSlice(r []interface{}) []interface{} { + if len(r) == 0 { + return nil + } + + if _, ok := r[0].(map[interface{}]interface{}); !ok { + return r + } + + var typed []interface{} + for _, elt := range r { + item := map[string]interface{}{} + for k, v := range elt.(map[interface{}]interface{}) { + item[k.(string)] = v + } + + typed = append(typed, item) + } + + return typed +} + +// This element is not exported by revive, so we need copy the code. +// Extracted from https://github.com/mgechev/revive/blob/389ba853b0b3587f0c3b71b5f0c61ea4e23928ec/config/config.go#L15 +var defaultRules = []lint.Rule{ + &rule.VarDeclarationsRule{}, + &rule.PackageCommentsRule{}, + &rule.DotImportsRule{}, + &rule.BlankImportsRule{}, + &rule.ExportedRule{}, + &rule.VarNamingRule{}, + &rule.IndentErrorFlowRule{}, + &rule.RangeRule{}, + &rule.ErrorfRule{}, + &rule.ErrorNamingRule{}, + &rule.ErrorStringsRule{}, + &rule.ReceiverNamingRule{}, + &rule.IncrementDecrementRule{}, + &rule.ErrorReturnRule{}, + &rule.UnexportedReturnRule{}, + &rule.TimeNamingRule{}, + &rule.ContextKeysType{}, + &rule.ContextAsArgumentRule{}, +} + +// This element is not exported by revive, so we need copy the code. +// Extracted from https://github.com/mgechev/revive/blob/389ba853b0b3587f0c3b71b5f0c61ea4e23928ec/config/config.go#L133 +func normalizeConfig(cfg *lint.Config) { + if cfg.Confidence == 0 { + cfg.Confidence = 0.8 + } + severity := cfg.Severity + if severity != "" { + for k, v := range cfg.Rules { + if v.Severity == "" { + v.Severity = severity + } + cfg.Rules[k] = v + } + for k, v := range cfg.Directives { + if v.Severity == "" { + v.Severity = severity + } + cfg.Directives[k] = v + } + } +} + +// This element is not exported by revive, so we need copy the code. +// Extracted from https://github.com/mgechev/revive/blob/389ba853b0b3587f0c3b71b5f0c61ea4e23928ec/config/config.go#L182 +func defaultConfig() *lint.Config { + defaultConfig := lint.Config{ + Confidence: 0.0, + Severity: lint.SeverityWarning, + Rules: map[string]lint.RuleConfig{}, + } + for _, r := range defaultRules { + defaultConfig.Rules[r.Name()] = lint.RuleConfig{} + } + return &defaultConfig +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowerrcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowerrcheck.go new file mode 100644 index 000000000..d4c89d382 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/rowerrcheck.go @@ -0,0 +1,23 @@ +package golinters + +import ( + "github.com/jingyugao/rowserrcheck/passes/rowserr" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" +) + +func NewRowsErrCheck() *goanalysis.Linter { + analyzer := rowserr.NewAnalyzer() + return goanalysis.NewLinter( + "rowserrcheck", + "checks whether Err of rows is checked successfully", + []*analysis.Analyzer{analyzer}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo). + WithContextSetter(func(lintCtx *linter.Context) { + pkgs := lintCtx.Settings().RowsErrCheck.Packages + analyzer.Run = rowserr.NewRun(pkgs...) + }) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go new file mode 100644 index 000000000..ba3921e19 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/scopelint.go @@ -0,0 +1,177 @@ +package golinters + +import ( + "fmt" + "go/ast" + "go/token" + "sync" + + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const scopelintName = "scopelint" + +func NewScopelint() *goanalysis.Linter { + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: scopelintName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + scopelintName, + "Scopelint checks for unpinned variables in go programs", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var res []result.Issue + for _, file := range pass.Files { + n := Node{ + fset: pass.Fset, + DangerObjects: map[*ast.Object]int{}, + UnsafeObjects: map[*ast.Object]int{}, + SkipFuncs: map[*ast.FuncLit]int{}, + issues: &res, + } + ast.Walk(&n, file) + } + + if len(res) == 0 { + return nil, nil + } + + mu.Lock() + for i := range res { + resIssues = append(resIssues, goanalysis.NewIssue(&res[i], pass)) + } + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} + +// The code below is copy-pasted from https://github.com/kyoh86/scopelint 92cbe2cc9276abda0e309f52cc9e309d407f174e + +// Node represents a Node being linted. +type Node struct { + fset *token.FileSet + DangerObjects map[*ast.Object]int + UnsafeObjects map[*ast.Object]int + SkipFuncs map[*ast.FuncLit]int + issues *[]result.Issue +} + +// Visit method is invoked for each node encountered by Walk. +// If the result visitor w is not nil, Walk visits each of the children +// of node with the visitor w, followed by a call of w.Visit(nil). +//nolint:gocyclo,gocritic +func (f *Node) Visit(node ast.Node) ast.Visitor { + switch typedNode := node.(type) { + case *ast.ForStmt: + switch init := typedNode.Init.(type) { + case *ast.AssignStmt: + for _, lh := range init.Lhs { + switch tlh := lh.(type) { + case *ast.Ident: + f.UnsafeObjects[tlh.Obj] = 0 + } + } + } + + case *ast.RangeStmt: + // Memory variables declared in range statement + switch k := typedNode.Key.(type) { + case *ast.Ident: + f.UnsafeObjects[k.Obj] = 0 + } + switch v := typedNode.Value.(type) { + case *ast.Ident: + f.UnsafeObjects[v.Obj] = 0 + } + + case *ast.UnaryExpr: + if typedNode.Op == token.AND { + switch ident := typedNode.X.(type) { + case *ast.Ident: + if _, unsafe := f.UnsafeObjects[ident.Obj]; unsafe { + f.errorf(ident, "Using a reference for the variable on range scope %s", formatCode(ident.Name, nil)) + } + } + } + + case *ast.Ident: + if _, obj := f.DangerObjects[typedNode.Obj]; obj { + // It is the naked variable in scope of range statement. + f.errorf(node, "Using the variable on range scope %s in function literal", formatCode(typedNode.Name, nil)) + break + } + + case *ast.CallExpr: + // Ignore func literals that'll be called immediately. + switch funcLit := typedNode.Fun.(type) { + case *ast.FuncLit: + f.SkipFuncs[funcLit] = 0 + } + + case *ast.FuncLit: + if _, skip := f.SkipFuncs[typedNode]; !skip { + dangers := map[*ast.Object]int{} + for d := range f.DangerObjects { + dangers[d] = 0 + } + for u := range f.UnsafeObjects { + dangers[u] = 0 + f.UnsafeObjects[u]++ + } + return &Node{ + fset: f.fset, + DangerObjects: dangers, + UnsafeObjects: f.UnsafeObjects, + SkipFuncs: f.SkipFuncs, + issues: f.issues, + } + } + + case *ast.ReturnStmt: + unsafe := map[*ast.Object]int{} + for u := range f.UnsafeObjects { + if f.UnsafeObjects[u] == 0 { + continue + } + unsafe[u] = f.UnsafeObjects[u] + } + return &Node{ + fset: f.fset, + DangerObjects: f.DangerObjects, + UnsafeObjects: unsafe, + SkipFuncs: f.SkipFuncs, + issues: f.issues, + } + } + return f +} + +// The variadic arguments may start with link and category types, +// and must end with a format string and any arguments. +//nolint:interfacer +func (f *Node) errorf(n ast.Node, format string, args ...interface{}) { + pos := f.fset.Position(n.Pos()) + f.errorAtf(pos, format, args...) +} + +func (f *Node) errorAtf(pos token.Position, format string, args ...interface{}) { + *f.issues = append(*f.issues, result.Issue{ + Pos: pos, + Text: fmt.Sprintf(format, args...), + FromLinter: scopelintName, + }) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go new file mode 100644 index 000000000..48ca246e7 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/sqlclosecheck.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/ryanrolds/sqlclosecheck/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewSQLCloseCheck() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + analyzer.NewAnalyzer(), + } + + return goanalysis.NewLinter( + "sqlclosecheck", + "Checks that sql.Rows and sql.Stmt are closed.", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go new file mode 100644 index 000000000..2226eabb4 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "honnef.co/go/tools/staticcheck" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewStaticcheck(settings *config.StaticCheckSettings) *goanalysis.Linter { + cfg := staticCheckConfig(settings) + + analyzers := setupStaticCheckAnalyzers(staticcheck.Analyzers, getGoVersion(settings), cfg.Checks) + + return goanalysis.NewLinter( + "staticcheck", + "Staticcheck is a go vet on steroids, applying a ton of static analysis checks", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go new file mode 100644 index 000000000..dc6360d7e --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/staticcheck_common.go @@ -0,0 +1,168 @@ +package golinters + +import ( + "strings" + "unicode" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/analysis/lint" + scconfig "honnef.co/go/tools/config" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/logutils" +) + +var debugf = logutils.Debug("megacheck") + +func getGoVersion(settings *config.StaticCheckSettings) string { + var goVersion string + if settings != nil { + goVersion = settings.GoVersion + } + + if goVersion != "" { + return goVersion + } + + // TODO: uses "1.13" for backward compatibility, but in the future (v2) must be set by using build.Default.ReleaseTags like staticcheck. + return "1.13" +} + +func setupStaticCheckAnalyzers(src []*lint.Analyzer, goVersion string, checks []string) []*analysis.Analyzer { + var names []string + for _, a := range src { + names = append(names, a.Analyzer.Name) + } + + filter := filterAnalyzerNames(names, checks) + + var ret []*analysis.Analyzer + for _, a := range src { + if filter[a.Analyzer.Name] { + setAnalyzerGoVersion(a.Analyzer, goVersion) + ret = append(ret, a.Analyzer) + } + } + + return ret +} + +func setAnalyzerGoVersion(a *analysis.Analyzer, goVersion string) { + if v := a.Flags.Lookup("go"); v != nil { + if err := v.Value.Set(goVersion); err != nil { + debugf("Failed to set go version: %s", err) + } + } +} + +func staticCheckConfig(settings *config.StaticCheckSettings) *scconfig.Config { + var cfg *scconfig.Config + + if settings == nil || !settings.HasConfiguration() { + return &scconfig.Config{ + Checks: []string{"*"}, // override for compatibility reason. Must drop in the next major version. + Initialisms: scconfig.DefaultConfig.Initialisms, + DotImportWhitelist: scconfig.DefaultConfig.DotImportWhitelist, + HTTPStatusCodeWhitelist: scconfig.DefaultConfig.HTTPStatusCodeWhitelist, + } + } + + cfg = &scconfig.Config{ + Checks: settings.Checks, + Initialisms: settings.Initialisms, + DotImportWhitelist: settings.DotImportWhitelist, + HTTPStatusCodeWhitelist: settings.HTTPStatusCodeWhitelist, + } + + if len(cfg.Checks) == 0 { + cfg.Checks = append(cfg.Checks, "*") // override for compatibility reason. Must drop in the next major version. + } + + if len(cfg.Initialisms) == 0 { + cfg.Initialisms = append(cfg.Initialisms, scconfig.DefaultConfig.Initialisms...) + } + + if len(cfg.DotImportWhitelist) == 0 { + cfg.DotImportWhitelist = append(cfg.DotImportWhitelist, scconfig.DefaultConfig.DotImportWhitelist...) + } + + if len(cfg.HTTPStatusCodeWhitelist) == 0 { + cfg.HTTPStatusCodeWhitelist = append(cfg.HTTPStatusCodeWhitelist, scconfig.DefaultConfig.HTTPStatusCodeWhitelist...) + } + + cfg.Checks = normalizeList(cfg.Checks) + cfg.Initialisms = normalizeList(cfg.Initialisms) + cfg.DotImportWhitelist = normalizeList(cfg.DotImportWhitelist) + cfg.HTTPStatusCodeWhitelist = normalizeList(cfg.HTTPStatusCodeWhitelist) + + return cfg +} + +// https://github.com/dominikh/go-tools/blob/9bf17c0388a65710524ba04c2d821469e639fdc2/lintcmd/lint.go#L437-L477 +// nolint // Keep the original source code. +func filterAnalyzerNames(analyzers []string, checks []string) map[string]bool { + allowedChecks := map[string]bool{} + + for _, check := range checks { + b := true + if len(check) > 1 && check[0] == '-' { + b = false + check = check[1:] + } + + if check == "*" || check == "all" { + // Match all + for _, c := range analyzers { + allowedChecks[c] = b + } + } else if strings.HasSuffix(check, "*") { + // Glob + prefix := check[:len(check)-1] + isCat := strings.IndexFunc(prefix, func(r rune) bool { return unicode.IsNumber(r) }) == -1 + + for _, a := range analyzers { + idx := strings.IndexFunc(a, func(r rune) bool { return unicode.IsNumber(r) }) + if isCat { + // Glob is S*, which should match S1000 but not SA1000 + cat := a[:idx] + if prefix == cat { + allowedChecks[a] = b + } + } else { + // Glob is S1* + if strings.HasPrefix(a, prefix) { + allowedChecks[a] = b + } + } + } + } else { + // Literal check name + allowedChecks[check] = b + } + } + return allowedChecks +} + +// https://github.com/dominikh/go-tools/blob/9bf17c0388a65710524ba04c2d821469e639fdc2/config/config.go#L95-L116 +func normalizeList(list []string) []string { + if len(list) > 1 { + nlist := make([]string, 0, len(list)) + nlist = append(nlist, list[0]) + for i, el := range list[1:] { + if el != list[i] { + nlist = append(nlist, el) + } + } + list = nlist + } + + for _, el := range list { + if el == "inherit" { + // This should never happen, because the default config + // should not use "inherit" + panic(`unresolved "inherit"`) + } + } + + return list +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go new file mode 100644 index 000000000..7c16f8ec3 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/structcheck.go @@ -0,0 +1,55 @@ +package golinters // nolint:dupl + +import ( + "fmt" + "sync" + + structcheckAPI "github.com/golangci/check/cmd/structcheck" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewStructcheck() *goanalysis.Linter { + const linterName = "structcheck" + var mu sync.Mutex + var res []goanalysis.Issue + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + linterName, + "Finds unused struct fields", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + checkExported := lintCtx.Settings().Structcheck.CheckExportedFields + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + prog := goanalysis.MakeFakeLoaderProgram(pass) + + structcheckIssues := structcheckAPI.Run(prog, checkExported) + if len(structcheckIssues) == 0 { + return nil, nil + } + + issues := make([]goanalysis.Issue, 0, len(structcheckIssues)) + for _, i := range structcheckIssues { + issues = append(issues, goanalysis.NewIssue(&result.Issue{ + Pos: i.Pos, + Text: fmt.Sprintf("%s is unused", formatCode(i.FieldName, lintCtx.Cfg)), + FromLinter: linterName, + }, pass)) + } + + mu.Lock() + res = append(res, issues...) + mu.Unlock() + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return res + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go new file mode 100644 index 000000000..899f6ff58 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/stylecheck.go @@ -0,0 +1,30 @@ +package golinters + +import ( + "golang.org/x/tools/go/analysis" + scconfig "honnef.co/go/tools/config" + "honnef.co/go/tools/stylecheck" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewStylecheck(settings *config.StaticCheckSettings) *goanalysis.Linter { + cfg := staticCheckConfig(settings) + + // `scconfig.Analyzer` is a singleton, then it's not possible to have more than one instance for all staticcheck "sub-linters". + // When we will merge the 4 "sub-linters", the problem will disappear: https://github.com/golangci/golangci-lint/issues/357 + // Currently only stylecheck analyzer has a configuration in staticcheck. + scconfig.Analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + return cfg, nil + } + + analyzers := setupStaticCheckAnalyzers(stylecheck.Analyzers, getGoVersion(settings), cfg.Checks) + + return goanalysis.NewLinter( + "stylecheck", + "Stylecheck is a replacement for golint", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go new file mode 100644 index 000000000..5f58fc1d3 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tagliatelle.go @@ -0,0 +1,30 @@ +package golinters + +import ( + "github.com/ldez/tagliatelle" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewTagliatelle(settings *config.TagliatelleSettings) *goanalysis.Linter { + cfg := tagliatelle.Config{ + Rules: map[string]string{ + "json": "camel", + "yaml": "camel", + }, + } + + if settings != nil { + for k, v := range settings.Case.Rules { + cfg.Rules[k] = v + } + cfg.UseFieldName = settings.Case.UseFieldName + } + + a := tagliatelle.New(cfg) + + return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, nil). + WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go new file mode 100644 index 000000000..1248e78fd --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/testpackage.go @@ -0,0 +1,23 @@ +package golinters + +import ( + "github.com/maratori/testpackage/pkg/testpackage" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewTestpackage(cfg *config.TestpackageSettings) *goanalysis.Linter { + var a = testpackage.NewAnalyzer() + var settings map[string]map[string]interface{} + if cfg != nil { + settings = map[string]map[string]interface{}{ + a.Name: { + testpackage.SkipRegexpFlagName: cfg.SkipRegexp, + }, + } + } + return goanalysis.NewLinter(a.Name, a.Doc, []*analysis.Analyzer{a}, settings). + WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go new file mode 100644 index 000000000..1d92f2fbf --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/thelper.go @@ -0,0 +1,61 @@ +package golinters + +import ( + "strings" + + "github.com/kulti/thelper/pkg/analyzer" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewThelper(cfg *config.ThelperSettings) *goanalysis.Linter { + a := analyzer.NewAnalyzer() + + cfgMap := map[string]map[string]interface{}{} + if cfg != nil { + var opts []string + + if cfg.Test.Name { + opts = append(opts, "t_name") + } + if cfg.Test.Begin { + opts = append(opts, "t_begin") + } + if cfg.Test.First { + opts = append(opts, "t_first") + } + + if cfg.Benchmark.Name { + opts = append(opts, "b_name") + } + if cfg.Benchmark.Begin { + opts = append(opts, "b_begin") + } + if cfg.Benchmark.First { + opts = append(opts, "b_first") + } + + if cfg.TB.Name { + opts = append(opts, "tb_name") + } + if cfg.TB.Begin { + opts = append(opts, "tb_begin") + } + if cfg.TB.First { + opts = append(opts, "tb_first") + } + + cfgMap[a.Name] = map[string]interface{}{ + "checks": strings.Join(opts, ","), + } + } + + return goanalysis.NewLinter( + "thelper", + "thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers", + []*analysis.Analyzer{a}, + cfgMap, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go new file mode 100644 index 000000000..a4b96eb73 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tparallel.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/moricho/tparallel" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewTparallel() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + tparallel.Analyzer, + } + + return goanalysis.NewLinter( + "tparallel", + "tparallel detects inappropriate usage of t.Parallel() method in your Go test codes", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go new file mode 100644 index 000000000..24f4339fb --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/typecheck.go @@ -0,0 +1,28 @@ +package golinters + +import ( + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewTypecheck() *goanalysis.Linter { + const linterName = "typecheck" + + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + Run: func(pass *analysis.Pass) (interface{}, error) { + return nil, nil + }, + } + + linter := goanalysis.NewLinter( + linterName, + "Like the front-end of a Go compiler, parses and type-checks Go code", + []*analysis.Analyzer{analyzer}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) + + return linter +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go new file mode 100644 index 000000000..456f6836c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unconvert.go @@ -0,0 +1,53 @@ +package golinters + +import ( + "sync" + + unconvertAPI "github.com/golangci/unconvert" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewUnconvert() *goanalysis.Linter { + const linterName = "unconvert" + var mu sync.Mutex + var res []goanalysis.Issue + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + linterName, + "Remove unnecessary type conversions", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + prog := goanalysis.MakeFakeLoaderProgram(pass) + + positions := unconvertAPI.Run(prog) + if len(positions) == 0 { + return nil, nil + } + + issues := make([]goanalysis.Issue, 0, len(positions)) + for _, pos := range positions { + issues = append(issues, goanalysis.NewIssue(&result.Issue{ + Pos: pos, + Text: "unnecessary conversion", + FromLinter: linterName, + }, pass)) + } + + mu.Lock() + res = append(res, issues...) + mu.Unlock() + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return res + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go new file mode 100644 index 000000000..33dd55c9b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unparam.go @@ -0,0 +1,76 @@ +package golinters + +import ( + "sync" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/packages" + "mvdan.cc/unparam/check" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewUnparam() *goanalysis.Linter { + const linterName = "unparam" + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + } + return goanalysis.NewLinter( + linterName, + "Reports unused function parameters", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + us := &lintCtx.Settings().Unparam + if us.Algo != "cha" { + lintCtx.Log.Warnf("`linters-settings.unparam.algo` isn't supported by the newest `unparam`") + } + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + ssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + ssaPkg := ssa.Pkg + + pkg := &packages.Package{ + Fset: pass.Fset, + Syntax: pass.Files, + Types: pass.Pkg, + TypesInfo: pass.TypesInfo, + } + + c := &check.Checker{} + c.CheckExportedFuncs(us.CheckExported) + c.Packages([]*packages.Package{pkg}) + c.ProgramSSA(ssaPkg.Prog) + + unparamIssues, err := c.Check() + if err != nil { + return nil, err + } + + var res []goanalysis.Issue + for _, i := range unparamIssues { + res = append(res, goanalysis.NewIssue(&result.Issue{ + Pos: pass.Fset.Position(i.Pos()), + Text: i.Message(), + FromLinter: linterName, + }, pass)) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go new file mode 100644 index 000000000..cfdf1f2ca --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/unused.go @@ -0,0 +1,69 @@ +package golinters + +import ( + "fmt" + "sync" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/unused" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +type UnusedSettings struct { + GoVersion string +} + +func NewUnused(settings *config.StaticCheckSettings) *goanalysis.Linter { + const name = "unused" + + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: name, + Doc: unused.Analyzer.Analyzer.Doc, + Requires: unused.Analyzer.Analyzer.Requires, + Run: func(pass *analysis.Pass) (interface{}, error) { + res, err := unused.Analyzer.Analyzer.Run(pass) + if err != nil { + return nil, err + } + + sr := unused.Serialize(pass, res.(unused.Result), pass.Fset) + + var issues []goanalysis.Issue + for _, object := range sr.Unused { + issue := goanalysis.NewIssue(&result.Issue{ + FromLinter: name, + Text: fmt.Sprintf("%s %s is unused", object.Kind, object.Name), + Pos: object.Position, + }, pass) + + issues = append(issues, issue) + } + + mu.Lock() + resIssues = append(resIssues, issues...) + mu.Unlock() + + return nil, nil + }, + } + + setAnalyzerGoVersion(analyzer, getGoVersion(settings)) + + lnt := goanalysis.NewLinter( + name, + "Checks Go code for unused constants, variables, functions and types", + []*analysis.Analyzer{analyzer}, + nil, + ).WithIssuesReporter(func(lintCtx *linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeTypesInfo) + + return lnt +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go new file mode 100644 index 000000000..1940f30e3 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/util.go @@ -0,0 +1,24 @@ +package golinters + +import ( + "fmt" + "strings" + + "github.com/golangci/golangci-lint/pkg/config" +) + +func formatCode(code string, _ *config.Config) string { + if strings.Contains(code, "`") { + return code // TODO: properly escape or remove + } + + return fmt.Sprintf("`%s`", code) +} + +func formatCodeBlock(code string, _ *config.Config) string { + if strings.Contains(code, "`") { + return code // TODO: properly escape or remove + } + + return fmt.Sprintf("```\n%s\n```", code) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go new file mode 100644 index 000000000..dcf2e7de8 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/varcheck.go @@ -0,0 +1,55 @@ +package golinters // nolint:dupl + +import ( + "fmt" + "sync" + + varcheckAPI "github.com/golangci/check/cmd/varcheck" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewVarcheck() *goanalysis.Linter { + const linterName = "varcheck" + var mu sync.Mutex + var res []goanalysis.Issue + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + linterName, + "Finds unused global variables and constants", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + checkExported := lintCtx.Settings().Varcheck.CheckExportedFields + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + prog := goanalysis.MakeFakeLoaderProgram(pass) + + varcheckIssues := varcheckAPI.Run(prog, checkExported) + if len(varcheckIssues) == 0 { + return nil, nil + } + + issues := make([]goanalysis.Issue, 0, len(varcheckIssues)) + for _, i := range varcheckIssues { + issues = append(issues, goanalysis.NewIssue(&result.Issue{ + Pos: i.Pos, + Text: fmt.Sprintf("%s is unused", formatCode(i.VarName, lintCtx.Cfg)), + FromLinter: linterName, + }, pass)) + } + + mu.Lock() + res = append(res, issues...) + mu.Unlock() + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return res + }).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go new file mode 100644 index 000000000..d359fb019 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wastedassign.go @@ -0,0 +1,21 @@ +package golinters + +import ( + "github.com/sanposhiho/wastedassign/v2" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +func NewWastedAssign() *goanalysis.Linter { + analyzers := []*analysis.Analyzer{ + wastedassign.Analyzer, + } + + return goanalysis.NewLinter( + "wastedassign", + "wastedassign finds wasted assignment statements.", + analyzers, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go new file mode 100644 index 000000000..d475465a2 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/whitespace.go @@ -0,0 +1,85 @@ +package golinters + +import ( + "go/token" + "sync" + + "github.com/pkg/errors" + "github.com/ultraware/whitespace" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func NewWhitespace() *goanalysis.Linter { + const linterName = "whitespace" + var mu sync.Mutex + var resIssues []goanalysis.Issue + + analyzer := &analysis.Analyzer{ + Name: linterName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + return goanalysis.NewLinter( + linterName, + "Tool for detection of leading and trailing whitespace", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + cfg := lintCtx.Cfg.LintersSettings.Whitespace + settings := whitespace.Settings{MultiIf: cfg.MultiIf, MultiFunc: cfg.MultiFunc} + + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var issues []whitespace.Message + for _, file := range pass.Files { + issues = append(issues, whitespace.Run(file, pass.Fset, settings)...) + } + + if len(issues) == 0 { + return nil, nil + } + + res := make([]goanalysis.Issue, len(issues)) + for k, i := range issues { + issue := result.Issue{ + Pos: token.Position{ + Filename: i.Pos.Filename, + Line: i.Pos.Line, + }, + LineRange: &result.Range{From: i.Pos.Line, To: i.Pos.Line}, + Text: i.Message, + FromLinter: linterName, + Replacement: &result.Replacement{}, + } + + bracketLine, err := lintCtx.LineCache.GetLine(issue.Pos.Filename, issue.Pos.Line) + if err != nil { + return nil, errors.Wrapf(err, "failed to get line %s:%d", issue.Pos.Filename, issue.Pos.Line) + } + + switch i.Type { + case whitespace.MessageTypeLeading: + issue.LineRange.To++ // cover two lines by the issue: opening bracket "{" (issue.Pos.Line) and following empty line + case whitespace.MessageTypeTrailing: + issue.LineRange.From-- // cover two lines by the issue: closing bracket "}" (issue.Pos.Line) and preceding empty line + issue.Pos.Line-- // set in sync with LineRange.From to not break fixer and other code features + case whitespace.MessageTypeAddAfter: + bracketLine += "\n" + } + issue.Replacement.NewLines = []string{bracketLine} + + res[k] = goanalysis.NewIssue(&issue, pass) + } + + mu.Lock() + resIssues = append(resIssues, res...) + mu.Unlock() + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return resIssues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go new file mode 100644 index 000000000..5eaf085d7 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wrapcheck.go @@ -0,0 +1,32 @@ +package golinters + +import ( + "github.com/tomarrell/wrapcheck/v2/wrapcheck" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" +) + +const wrapcheckName = "wrapcheck" + +func NewWrapcheck(settings *config.WrapcheckSettings) *goanalysis.Linter { + cfg := wrapcheck.NewDefaultConfig() + if settings != nil { + if len(settings.IgnoreSigs) != 0 { + cfg.IgnoreSigs = settings.IgnoreSigs + } + if len(settings.IgnorePackageGlobs) != 0 { + cfg.IgnorePackageGlobs = settings.IgnorePackageGlobs + } + } + + a := wrapcheck.NewAnalyzer(cfg) + + return goanalysis.NewLinter( + wrapcheckName, + a.Doc, + []*analysis.Analyzer{a}, + nil, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go new file mode 100644 index 000000000..29d00faea --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/wsl.go @@ -0,0 +1,83 @@ +package golinters + +import ( + "sync" + + "github.com/bombsimon/wsl/v3" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +const ( + name = "wsl" +) + +// NewWSL returns a new WSL linter. +func NewWSL() *goanalysis.Linter { + var ( + issues []goanalysis.Issue + mu = sync.Mutex{} + analyzer = &analysis.Analyzer{ + Name: goanalysis.TheOnlyAnalyzerName, + Doc: goanalysis.TheOnlyanalyzerDoc, + } + ) + + return goanalysis.NewLinter( + name, + "Whitespace Linter - Forces you to use empty lines!", + []*analysis.Analyzer{analyzer}, + nil, + ).WithContextSetter(func(lintCtx *linter.Context) { + analyzer.Run = func(pass *analysis.Pass) (interface{}, error) { + var ( + files = []string{} + linterCfg = lintCtx.Cfg.LintersSettings.WSL + processorCfg = wsl.Configuration{ + StrictAppend: linterCfg.StrictAppend, + AllowAssignAndCallCuddle: linterCfg.AllowAssignAndCallCuddle, + AllowAssignAndAnythingCuddle: linterCfg.AllowAssignAndAnythingCuddle, + AllowMultiLineAssignCuddle: linterCfg.AllowMultiLineAssignCuddle, + AllowCuddleDeclaration: linterCfg.AllowCuddleDeclaration, + AllowTrailingComment: linterCfg.AllowTrailingComment, + AllowSeparatedLeadingComment: linterCfg.AllowSeparatedLeadingComment, + ForceCuddleErrCheckAndAssign: linterCfg.ForceCuddleErrCheckAndAssign, + ForceCaseTrailingWhitespaceLimit: linterCfg.ForceCaseTrailingWhitespaceLimit, + ForceExclusiveShortDeclarations: linterCfg.ForceExclusiveShortDeclarations, + AllowCuddleWithCalls: []string{"Lock", "RLock"}, + AllowCuddleWithRHS: []string{"Unlock", "RUnlock"}, + ErrorVariableNames: []string{"err"}, + } + ) + + for _, file := range pass.Files { + files = append(files, pass.Fset.PositionFor(file.Pos(), false).Filename) + } + + wslErrors, _ := wsl.NewProcessorWithConfig(processorCfg). + ProcessFiles(files) + + if len(wslErrors) == 0 { + return nil, nil + } + + mu.Lock() + defer mu.Unlock() + + for _, err := range wslErrors { + issues = append(issues, goanalysis.NewIssue(&result.Issue{ + FromLinter: name, + Pos: err.Position, + Text: err.Reason, + }, pass)) + } + + return nil, nil + } + }).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { + return issues + }).WithLoadMode(goanalysis.LoadModeSyntax) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go b/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go new file mode 100644 index 000000000..1c05b9805 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/goutil/env.go @@ -0,0 +1,61 @@ +package goutil + +import ( + "context" + "encoding/json" + "os" + "os/exec" + "strings" + "time" + + "github.com/pkg/errors" + + "github.com/golangci/golangci-lint/pkg/logutils" +) + +type EnvKey string + +const ( + EnvGoCache EnvKey = "GOCACHE" + EnvGoRoot EnvKey = "GOROOT" +) + +type Env struct { + vars map[string]string + log logutils.Log + debugf logutils.DebugFunc +} + +func NewEnv(log logutils.Log) *Env { + return &Env{ + vars: map[string]string{}, + log: log, + debugf: logutils.Debug("env"), + } +} + +func (e *Env) Discover(ctx context.Context) error { + startedAt := time.Now() + args := []string{"env", "-json"} + args = append(args, string(EnvGoCache), string(EnvGoRoot)) + out, err := exec.CommandContext(ctx, "go", args...).Output() + if err != nil { + return errors.Wrap(err, "failed to run 'go env'") + } + + if err = json.Unmarshal(out, &e.vars); err != nil { + return errors.Wrapf(err, "failed to parse 'go %s' json", strings.Join(args, " ")) + } + + e.debugf("Read go env for %s: %#v", time.Since(startedAt), e.vars) + return nil +} + +func (e Env) Get(k EnvKey) string { + envValue := os.Getenv(string(k)) + if envValue != "" { + return envValue + } + + return e.vars[string(k)] +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go new file mode 100644 index 000000000..2372a011e --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go @@ -0,0 +1,127 @@ +package linter + +import ( + "golang.org/x/tools/go/packages" +) + +const ( + PresetBugs = "bugs" // Related to bugs detection. + PresetComment = "comment" // Related to comments analysis. + PresetComplexity = "complexity" // Related to code complexity analysis. + PresetError = "error" // Related to error handling analysis. + PresetFormatting = "format" // Related to code formatting. + PresetImport = "import" // Related to imports analysis. + PresetMetaLinter = "metalinter" // Related to linter that contains multiple rules or multiple linters. + PresetModule = "module" // Related to Go modules analysis. + PresetPerformance = "performance" // Related to performance. + PresetSQL = "sql" // Related to SQL. + PresetStyle = "style" // Related to coding style. + PresetTest = "test" // Related to the analysis of the code of the tests. + PresetUnused = "unused" // Related to the detection of unused code. +) + +// LastLinter nolintlint must be last because it looks at the results of all the previous linters for unused nolint directives. +const LastLinter = "nolintlint" + +type Deprecation struct { + Since string + Message string + Replacement string +} + +type Config struct { + Linter Linter + EnabledByDefault bool + + LoadMode packages.LoadMode + + InPresets []string + AlternativeNames []string + + OriginalURL string // URL of original (not forked) repo, needed for autogenerated README + CanAutoFix bool + IsSlow bool + DoesChangeTypes bool + + Since string + Deprecation *Deprecation +} + +func (lc *Config) ConsiderSlow() *Config { + lc.IsSlow = true + return lc +} + +func (lc *Config) IsSlowLinter() bool { + return lc.IsSlow +} + +func (lc *Config) WithLoadFiles() *Config { + lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles + return lc +} + +func (lc *Config) WithLoadForGoAnalysis() *Config { + lc = lc.WithLoadFiles() + lc.LoadMode |= packages.NeedImports | packages.NeedDeps | packages.NeedExportsFile | packages.NeedTypesSizes + lc.IsSlow = true + return lc +} + +func (lc *Config) WithPresets(presets ...string) *Config { + lc.InPresets = presets + return lc +} + +func (lc *Config) WithURL(url string) *Config { + lc.OriginalURL = url + return lc +} + +func (lc *Config) WithAlternativeNames(names ...string) *Config { + lc.AlternativeNames = names + return lc +} + +func (lc *Config) WithAutoFix() *Config { + lc.CanAutoFix = true + return lc +} + +func (lc *Config) WithChangeTypes() *Config { + lc.DoesChangeTypes = true + return lc +} + +func (lc *Config) WithSince(version string) *Config { + lc.Since = version + return lc +} + +func (lc *Config) Deprecated(message, version, replacement string) *Config { + lc.Deprecation = &Deprecation{ + Since: version, + Message: message, + Replacement: replacement, + } + return lc +} + +func (lc *Config) IsDeprecated() bool { + return lc.Deprecation != nil +} + +func (lc *Config) AllNames() []string { + return append([]string{lc.Name()}, lc.AlternativeNames...) +} + +func (lc *Config) Name() string { + return lc.Linter.Name() +} + +func NewConfig(linter Linter) *Config { + lc := &Config{ + Linter: linter, + } + return lc.WithLoadFiles() +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go new file mode 100644 index 000000000..a9f9d7d7f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go @@ -0,0 +1,49 @@ +package linter + +import ( + "go/ast" + + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/internal/pkgcache" + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" + "github.com/golangci/golangci-lint/pkg/logutils" +) + +type Context struct { + // Packages are deduplicated (test and normal packages) packages + Packages []*packages.Package + + // OriginalPackages aren't deduplicated: they contain both normal and test + // version for each of packages + OriginalPackages []*packages.Package + + Cfg *config.Config + FileCache *fsutils.FileCache + LineCache *fsutils.LineCache + Log logutils.Log + + PkgCache *pkgcache.Cache + LoadGuard *load.Guard +} + +func (c *Context) Settings() *config.LintersSettings { + return &c.Cfg.LintersSettings +} + +func (c *Context) ClearTypesInPackages() { + for _, p := range c.Packages { + clearTypes(p) + } + for _, p := range c.OriginalPackages { + clearTypes(p) + } +} + +func clearTypes(p *packages.Package) { + p.Types = nil + p.TypesInfo = nil + p.Syntax = []*ast.File{} +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go new file mode 100644 index 000000000..cfe9ec020 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/linter.go @@ -0,0 +1,13 @@ +package linter + +import ( + "context" + + "github.com/golangci/golangci-lint/pkg/result" +) + +type Linter interface { + Run(ctx context.Context, lintCtx *Context) ([]result.Issue, error) + Name() string + Desc() string +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go new file mode 100644 index 000000000..9814aa857 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/enabled_set.go @@ -0,0 +1,207 @@ +package lintersdb + +import ( + "os" + "sort" + "strings" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/logutils" +) + +type EnabledSet struct { + m *Manager + v *Validator + log logutils.Log + cfg *config.Config + debugf logutils.DebugFunc +} + +func NewEnabledSet(m *Manager, v *Validator, log logutils.Log, cfg *config.Config) *EnabledSet { + return &EnabledSet{ + m: m, + v: v, + log: log, + cfg: cfg, + debugf: logutils.Debug("enabled_linters"), + } +} + +func (es EnabledSet) build(lcfg *config.Linters, enabledByDefaultLinters []*linter.Config) map[string]*linter.Config { + es.debugf("Linters config: %#v", lcfg) + resultLintersSet := map[string]*linter.Config{} + switch { + case len(lcfg.Presets) != 0: + break // imply --disable-all + case lcfg.EnableAll: + resultLintersSet = linterConfigsToMap(es.m.GetAllSupportedLinterConfigs()) + case lcfg.DisableAll: + break + default: + resultLintersSet = linterConfigsToMap(enabledByDefaultLinters) + } + + // --presets can only add linters to default set + for _, p := range lcfg.Presets { + for _, lc := range es.m.GetAllLinterConfigsForPreset(p) { + lc := lc + resultLintersSet[lc.Name()] = lc + } + } + + // --fast removes slow linters from current set. + // It should be after --presets to be able to run only fast linters in preset. + // It should be before --enable and --disable to be able to enable or disable specific linter. + if lcfg.Fast { + for name, lc := range resultLintersSet { + if lc.IsSlowLinter() { + delete(resultLintersSet, name) + } + } + } + + for _, name := range lcfg.Enable { + for _, lc := range es.m.GetLinterConfigs(name) { + // it's important to use lc.Name() nor name because name can be alias + resultLintersSet[lc.Name()] = lc + } + } + + for _, name := range lcfg.Disable { + for _, lc := range es.m.GetLinterConfigs(name) { + // it's important to use lc.Name() nor name because name can be alias + delete(resultLintersSet, lc.Name()) + } + } + + return resultLintersSet +} + +func (es EnabledSet) GetEnabledLintersMap() (map[string]*linter.Config, error) { + if err := es.v.validateEnabledDisabledLintersConfig(&es.cfg.Linters); err != nil { + return nil, err + } + + enabledLinters := es.build(&es.cfg.Linters, es.m.GetAllEnabledByDefaultLinters()) + if os.Getenv("GL_TEST_RUN") == "1" { + es.verbosePrintLintersStatus(enabledLinters) + } + return enabledLinters, nil +} + +// GetOptimizedLinters returns enabled linters after optimization (merging) of multiple linters +// into a fewer number of linters. E.g. some go/analysis linters can be optimized into +// one metalinter for data reuse and speed up. +func (es EnabledSet) GetOptimizedLinters() ([]*linter.Config, error) { + if err := es.v.validateEnabledDisabledLintersConfig(&es.cfg.Linters); err != nil { + return nil, err + } + + resultLintersSet := es.build(&es.cfg.Linters, es.m.GetAllEnabledByDefaultLinters()) + es.verbosePrintLintersStatus(resultLintersSet) + es.combineGoAnalysisLinters(resultLintersSet) + + var resultLinters []*linter.Config + for _, lc := range resultLintersSet { + resultLinters = append(resultLinters, lc) + } + + // Make order of execution of linters (go/analysis metalinter and unused) stable. + sort.Slice(resultLinters, func(i, j int) bool { + a, b := resultLinters[i], resultLinters[j] + + if b.Name() == linter.LastLinter { + return true + } + + if a.Name() == linter.LastLinter { + return false + } + + if a.DoesChangeTypes != b.DoesChangeTypes { + return b.DoesChangeTypes // move type-changing linters to the end to optimize speed + } + return strings.Compare(a.Name(), b.Name()) < 0 + }) + + return resultLinters, nil +} + +func (es EnabledSet) combineGoAnalysisLinters(linters map[string]*linter.Config) { + var goanalysisLinters []*goanalysis.Linter + goanalysisPresets := map[string]bool{} + for _, linter := range linters { + lnt, ok := linter.Linter.(*goanalysis.Linter) + if !ok { + continue + } + if lnt.LoadMode() == goanalysis.LoadModeWholeProgram { + // It's ineffective by CPU and memory to run whole-program and incremental analyzers at once. + continue + } + goanalysisLinters = append(goanalysisLinters, lnt) + for _, p := range linter.InPresets { + goanalysisPresets[p] = true + } + } + + if len(goanalysisLinters) <= 1 { + es.debugf("Didn't combine go/analysis linters: got only %d linters", len(goanalysisLinters)) + return + } + + for _, lnt := range goanalysisLinters { + delete(linters, lnt.Name()) + } + + // Make order of execution of go/analysis analyzers stable. + sort.Slice(goanalysisLinters, func(i, j int) bool { + a, b := goanalysisLinters[i], goanalysisLinters[j] + + if b.Name() == linter.LastLinter { + return true + } + + if a.Name() == linter.LastLinter { + return false + } + + return strings.Compare(a.Name(), b.Name()) <= 0 + }) + + ml := goanalysis.NewMetaLinter(goanalysisLinters) + + var presets []string + for p := range goanalysisPresets { + presets = append(presets, p) + } + + mlConfig := &linter.Config{ + Linter: ml, + EnabledByDefault: false, + InPresets: presets, + AlternativeNames: nil, + OriginalURL: "", + } + + mlConfig = mlConfig.WithLoadForGoAnalysis() + + linters[ml.Name()] = mlConfig + es.debugf("Combined %d go/analysis linters into one metalinter", len(goanalysisLinters)) +} + +func (es EnabledSet) verbosePrintLintersStatus(lcs map[string]*linter.Config) { + var linterNames []string + for _, lc := range lcs { + linterNames = append(linterNames, lc.Name()) + } + sort.StringSlice(linterNames).Sort() + es.log.Infof("Active %d linters: %s", len(linterNames), linterNames) + + if len(es.cfg.Linters.Presets) != 0 { + sort.StringSlice(es.cfg.Linters.Presets).Sort() + es.log.Infof("Active presets: %s", es.cfg.Linters.Presets) + } +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go new file mode 100644 index 000000000..a69a6ec38 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/manager.go @@ -0,0 +1,618 @@ +package lintersdb + +import ( + "fmt" + "path/filepath" + "plugin" + + "github.com/spf13/viper" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/golinters" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/report" +) + +type Manager struct { + nameToLCs map[string][]*linter.Config + cfg *config.Config + log logutils.Log +} + +func NewManager(cfg *config.Config, log logutils.Log) *Manager { + m := &Manager{cfg: cfg, log: log} + nameToLCs := make(map[string][]*linter.Config) + for _, lc := range m.GetAllSupportedLinterConfigs() { + for _, name := range lc.AllNames() { + nameToLCs[name] = append(nameToLCs[name], lc) + } + } + + m.nameToLCs = nameToLCs + return m +} + +func (m *Manager) WithCustomLinters() *Manager { + if m.log == nil { + m.log = report.NewLogWrapper(logutils.NewStderrLog(""), &report.Data{}) + } + if m.cfg != nil { + for name, settings := range m.cfg.LintersSettings.Custom { + lc, err := m.loadCustomLinterConfig(name, settings) + + if err != nil { + m.log.Errorf("Unable to load custom analyzer %s:%s, %v", + name, + settings.Path, + err) + } else { + m.nameToLCs[name] = append(m.nameToLCs[name], lc) + } + } + } + return m +} + +func (Manager) AllPresets() []string { + return []string{ + linter.PresetBugs, + linter.PresetComment, + linter.PresetComplexity, + linter.PresetError, + linter.PresetFormatting, + linter.PresetImport, + linter.PresetMetaLinter, + linter.PresetModule, + linter.PresetPerformance, + linter.PresetSQL, + linter.PresetStyle, + linter.PresetTest, + linter.PresetUnused, + } +} + +func (m Manager) allPresetsSet() map[string]bool { + ret := map[string]bool{} + for _, p := range m.AllPresets() { + ret[p] = true + } + return ret +} + +func (m Manager) GetLinterConfigs(name string) []*linter.Config { + return m.nameToLCs[name] +} + +func enableLinterConfigs(lcs []*linter.Config, isEnabled func(lc *linter.Config) bool) []*linter.Config { + var ret []*linter.Config + for _, lc := range lcs { + lc := lc + lc.EnabledByDefault = isEnabled(lc) + ret = append(ret, lc) + } + + return ret +} + +//nolint:funlen +func (m Manager) GetAllSupportedLinterConfigs() []*linter.Config { + var govetCfg *config.GovetSettings + var testpackageCfg *config.TestpackageSettings + var exhaustiveCfg *config.ExhaustiveSettings + var exhaustiveStructCfg *config.ExhaustiveStructSettings + var errorlintCfg *config.ErrorLintSettings + var thelperCfg *config.ThelperSettings + var predeclaredCfg *config.PredeclaredSettings + var ifshortCfg *config.IfshortSettings + var reviveCfg *config.ReviveSettings + var cyclopCfg *config.Cyclop + var importAsCfg *config.ImportAsSettings + var goModDirectivesCfg *config.GoModDirectivesSettings + var tagliatelleCfg *config.TagliatelleSettings + var gosecCfg *config.GoSecSettings + var gosimpleCfg *config.StaticCheckSettings + var staticcheckCfg *config.StaticCheckSettings + var stylecheckCfg *config.StaticCheckSettings + var unusedCfg *config.StaticCheckSettings + var wrapcheckCfg *config.WrapcheckSettings + + if m.cfg != nil { + govetCfg = &m.cfg.LintersSettings.Govet + testpackageCfg = &m.cfg.LintersSettings.Testpackage + exhaustiveCfg = &m.cfg.LintersSettings.Exhaustive + exhaustiveStructCfg = &m.cfg.LintersSettings.ExhaustiveStruct + errorlintCfg = &m.cfg.LintersSettings.ErrorLint + thelperCfg = &m.cfg.LintersSettings.Thelper + predeclaredCfg = &m.cfg.LintersSettings.Predeclared + ifshortCfg = &m.cfg.LintersSettings.Ifshort + reviveCfg = &m.cfg.LintersSettings.Revive + cyclopCfg = &m.cfg.LintersSettings.Cyclop + importAsCfg = &m.cfg.LintersSettings.ImportAs + goModDirectivesCfg = &m.cfg.LintersSettings.GoModDirectives + tagliatelleCfg = &m.cfg.LintersSettings.Tagliatelle + gosecCfg = &m.cfg.LintersSettings.Gosec + gosimpleCfg = &m.cfg.LintersSettings.Gosimple + staticcheckCfg = &m.cfg.LintersSettings.Staticcheck + stylecheckCfg = &m.cfg.LintersSettings.Stylecheck + unusedCfg = &m.cfg.LintersSettings.Unused + wrapcheckCfg = &m.cfg.LintersSettings.Wrapcheck + } + + const megacheckName = "megacheck" + + lcs := []*linter.Config{ + linter.NewConfig(golinters.NewGovet(govetCfg)). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs, linter.PresetMetaLinter). + WithAlternativeNames("vet", "vetshadow"). + WithURL("https://golang.org/cmd/vet/"), + linter.NewConfig(golinters.NewBodyclose()). + WithSince("v1.18.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetPerformance, linter.PresetBugs). + WithURL("https://github.com/timakin/bodyclose"), + linter.NewConfig(golinters.NewNoctx()). + WithSince("v1.28.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetPerformance, linter.PresetBugs). + WithURL("https://github.com/sonatard/noctx"), + linter.NewConfig(golinters.NewErrcheck()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs, linter.PresetError). + WithURL("https://github.com/kisielk/errcheck"), + linter.NewConfig(golinters.NewGolint()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/golang/lint"). + Deprecated("The repository of the linter has been archived by the owner.", "v1.41.0", "revive"), + linter.NewConfig(golinters.NewRowsErrCheck()). + WithSince("v1.23.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs, linter.PresetSQL). + WithURL("https://github.com/jingyugao/rowserrcheck"), + + linter.NewConfig(golinters.NewStaticcheck(staticcheckCfg)). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs, linter.PresetMetaLinter). + WithAlternativeNames(megacheckName). + WithURL("https://staticcheck.io/"), + linter.NewConfig(golinters.NewUnused(unusedCfg)). + WithSince("v1.20.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetUnused). + WithAlternativeNames(megacheckName). + ConsiderSlow(). + WithChangeTypes(). + WithURL("https://github.com/dominikh/go-tools/tree/master/unused"), + linter.NewConfig(golinters.NewGosimple(gosimpleCfg)). + WithSince("v1.20.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithAlternativeNames(megacheckName). + WithURL("https://github.com/dominikh/go-tools/tree/master/simple"), + + linter.NewConfig(golinters.NewStylecheck(stylecheckCfg)). + WithSince("v1.20.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/dominikh/go-tools/tree/master/stylecheck"), + linter.NewConfig(golinters.NewGosec(gosecCfg)). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/securego/gosec"). + WithAlternativeNames("gas"), + linter.NewConfig(golinters.NewStructcheck()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetUnused). + WithURL("https://github.com/opennota/check"), + linter.NewConfig(golinters.NewVarcheck()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetUnused). + WithURL("https://github.com/opennota/check"), + linter.NewConfig(golinters.NewInterfacer()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/mvdan/interfacer"). + Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", ""), + linter.NewConfig(golinters.NewUnconvert()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/mdempsky/unconvert"), + linter.NewConfig(golinters.NewIneffassign()). + WithSince("v1.0.0"). + WithPresets(linter.PresetUnused). + WithURL("https://github.com/gordonklaus/ineffassign"), + linter.NewConfig(golinters.NewDupl()). + WithSince("v1.0.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/mibk/dupl"), + linter.NewConfig(golinters.NewGoconst()). + WithSince("v1.0.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/jgautheron/goconst"), + linter.NewConfig(golinters.NewDeadcode()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetUnused). + WithURL("https://github.com/remyoudompheng/go-misc/tree/master/deadcode"), + linter.NewConfig(golinters.NewGocyclo()). + WithSince("v1.0.0"). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/fzipp/gocyclo"), + linter.NewConfig(golinters.NewCyclop(cyclopCfg)). + WithSince("v1.37.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/bkielbasa/cyclop"), + linter.NewConfig(golinters.NewGocognit()). + WithSince("v1.20.0"). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/uudashr/gocognit"), + linter.NewConfig(golinters.NewTypecheck()). + WithSince("v1.3.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs). + WithURL(""), + linter.NewConfig(golinters.NewAsciicheck()). + WithSince("v1.26.0"). + WithPresets(linter.PresetBugs, linter.PresetStyle). + WithURL("https://github.com/tdakkota/asciicheck"), + + linter.NewConfig(golinters.NewGofmt()). + WithSince("v1.0.0"). + WithPresets(linter.PresetFormatting). + WithAutoFix(). + WithURL("https://golang.org/cmd/gofmt/"), + linter.NewConfig(golinters.NewGofumpt()). + WithSince("v1.28.0"). + WithPresets(linter.PresetFormatting). + WithAutoFix(). + WithURL("https://github.com/mvdan/gofumpt"), + linter.NewConfig(golinters.NewGoimports()). + WithSince("v1.20.0"). + WithPresets(linter.PresetFormatting, linter.PresetImport). + WithAutoFix(). + WithURL("https://godoc.org/golang.org/x/tools/cmd/goimports"), + linter.NewConfig(golinters.NewGoHeader()). + WithSince("v1.28.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/denis-tingajkin/go-header"), + linter.NewConfig(golinters.NewGci()). + WithSince("v1.30.0"). + WithPresets(linter.PresetFormatting, linter.PresetImport). + WithAutoFix(). + WithURL("https://github.com/daixiang0/gci"), + linter.NewConfig(golinters.NewMaligned()). + WithSince("v1.0.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetPerformance). + WithURL("https://github.com/mdempsky/maligned"). + Deprecated("The repository of the linter has been archived by the owner.", "v1.38.0", "govet 'fieldalignment'"), + linter.NewConfig(golinters.NewDepguard()). + WithSince("v1.4.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule). + WithURL("https://github.com/OpenPeeDeeP/depguard"), + linter.NewConfig(golinters.NewMisspell()). + WithSince("v1.8.0"). + WithPresets(linter.PresetStyle, linter.PresetComment). + WithAutoFix(). + WithURL("https://github.com/client9/misspell"), + linter.NewConfig(golinters.NewLLL()). + WithSince("v1.8.0"). + WithPresets(linter.PresetStyle), + linter.NewConfig(golinters.NewUnparam()). + WithSince("v1.9.0"). + WithPresets(linter.PresetUnused). + WithLoadForGoAnalysis(). + WithURL("https://github.com/mvdan/unparam"), + linter.NewConfig(golinters.NewDogsled()). + WithSince("v1.19.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/alexkohler/dogsled"), + linter.NewConfig(golinters.NewNakedret()). + WithSince("v1.19.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/alexkohler/nakedret"), + linter.NewConfig(golinters.NewPrealloc()). + WithSince("v1.19.0"). + WithPresets(linter.PresetPerformance). + WithURL("https://github.com/alexkohler/prealloc"), + linter.NewConfig(golinters.NewScopelint()). + WithSince("v1.12.0"). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/kyoh86/scopelint"). + Deprecated("The repository of the linter has been deprecated by the owner.", "v1.39.0", "exportloopref"), + linter.NewConfig(golinters.NewGocritic()). + WithSince("v1.12.0"). + WithPresets(linter.PresetStyle, linter.PresetMetaLinter). + WithLoadForGoAnalysis(). + WithURL("https://github.com/go-critic/go-critic"), + linter.NewConfig(golinters.NewGochecknoinits()). + WithSince("v1.12.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/leighmcculloch/gochecknoinits"), + linter.NewConfig(golinters.NewGochecknoglobals()). + WithSince("v1.12.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/leighmcculloch/gochecknoglobals"), + linter.NewConfig(golinters.NewGodox()). + WithSince("v1.19.0"). + WithPresets(linter.PresetStyle, linter.PresetComment). + WithURL("https://github.com/matoous/godox"), + linter.NewConfig(golinters.NewFunlen()). + WithSince("v1.18.0"). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/ultraware/funlen"), + linter.NewConfig(golinters.NewWhitespace()). + WithSince("v1.19.0"). + WithPresets(linter.PresetStyle). + WithAutoFix(). + WithURL("https://github.com/ultraware/whitespace"), + linter.NewConfig(golinters.NewWSL()). + WithSince("v1.20.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/bombsimon/wsl"), + linter.NewConfig(golinters.NewGoPrintfFuncName()). + WithSince("v1.23.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/jirfag/go-printf-func-name"), + linter.NewConfig(golinters.NewGoMND(m.cfg)). + WithSince("v1.22.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/tommy-muehle/go-mnd"), + linter.NewConfig(golinters.NewGoerr113()). + WithSince("v1.26.0"). + WithPresets(linter.PresetStyle, linter.PresetError). + WithLoadForGoAnalysis(). + WithURL("https://github.com/Djarvur/go-err113"), + linter.NewConfig(golinters.NewGomodguard()). + WithSince("v1.25.0"). + WithPresets(linter.PresetStyle, linter.PresetImport, linter.PresetModule). + WithURL("https://github.com/ryancurrah/gomodguard"), + linter.NewConfig(golinters.NewGodot()). + WithSince("v1.25.0"). + WithPresets(linter.PresetStyle, linter.PresetComment). + WithAutoFix(). + WithURL("https://github.com/tetafro/godot"), + linter.NewConfig(golinters.NewTestpackage(testpackageCfg)). + WithSince("v1.25.0"). + WithPresets(linter.PresetStyle, linter.PresetTest). + WithURL("https://github.com/maratori/testpackage"), + linter.NewConfig(golinters.NewNestif()). + WithSince("v1.25.0"). + WithPresets(linter.PresetComplexity). + WithURL("https://github.com/nakabonne/nestif"), + linter.NewConfig(golinters.NewExportLoopRef()). + WithSince("v1.28.0"). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/kyoh86/exportloopref"), + linter.NewConfig(golinters.NewExhaustive(exhaustiveCfg)). + WithSince(" v1.28.0"). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/nishanths/exhaustive"), + linter.NewConfig(golinters.NewSQLCloseCheck()). + WithSince("v1.28.0"). + WithPresets(linter.PresetBugs, linter.PresetSQL). + WithLoadForGoAnalysis(). + WithURL("https://github.com/ryanrolds/sqlclosecheck"), + linter.NewConfig(golinters.NewNLReturn()). + WithSince("v1.30.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/ssgreg/nlreturn"), + linter.NewConfig(golinters.NewWrapcheck(wrapcheckCfg)). + WithSince("v1.32.0"). + WithPresets(linter.PresetStyle, linter.PresetError). + WithLoadForGoAnalysis(). + WithURL("https://github.com/tomarrell/wrapcheck"), + linter.NewConfig(golinters.NewThelper(thelperCfg)). + WithSince("v1.34.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/kulti/thelper"), + linter.NewConfig(golinters.NewTparallel()). + WithSince("v1.32.0"). + WithPresets(linter.PresetStyle, linter.PresetTest). + WithLoadForGoAnalysis(). + WithURL("https://github.com/moricho/tparallel"), + linter.NewConfig(golinters.NewExhaustiveStruct(exhaustiveStructCfg)). + WithSince("v1.32.0"). + WithPresets(linter.PresetStyle, linter.PresetTest). + WithLoadForGoAnalysis(). + WithURL("https://github.com/mbilski/exhaustivestruct"), + linter.NewConfig(golinters.NewErrorLint(errorlintCfg)). + WithSince("v1.32.0"). + WithPresets(linter.PresetBugs, linter.PresetError). + WithLoadForGoAnalysis(). + WithURL("https://github.com/polyfloyd/go-errorlint"), + linter.NewConfig(golinters.NewParallelTest()). + WithSince("v1.33.0"). + WithPresets(linter.PresetStyle, linter.PresetTest). + WithURL("https://github.com/kunwardeep/paralleltest"), + linter.NewConfig(golinters.NewMakezero()). + WithSince("v1.34.0"). + WithPresets(linter.PresetStyle, linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/ashanbrown/makezero"), + linter.NewConfig(golinters.NewForbidigo()). + WithSince("v1.34.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/ashanbrown/forbidigo"), + linter.NewConfig(golinters.NewIfshort(ifshortCfg)). + WithSince("v1.36.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/esimonov/ifshort"), + linter.NewConfig(golinters.NewPredeclared(predeclaredCfg)). + WithSince("v1.35.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/nishanths/predeclared"), + linter.NewConfig(golinters.NewRevive(reviveCfg)). + WithSince("v1.37.0"). + WithPresets(linter.PresetStyle, linter.PresetMetaLinter). + ConsiderSlow(). + WithURL("https://github.com/mgechev/revive"), + linter.NewConfig(golinters.NewDurationCheck()). + WithSince("v1.37.0"). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/charithe/durationcheck"), + linter.NewConfig(golinters.NewWastedAssign()). + WithSince("v1.38.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/sanposhiho/wastedassign"), + linter.NewConfig(golinters.NewImportAs(importAsCfg)). + WithSince("v1.38.0"). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/julz/importas"), + linter.NewConfig(golinters.NewNilErr()). + WithSince("v1.38.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetBugs). + WithURL("https://github.com/gostaticanalysis/nilerr"), + linter.NewConfig(golinters.NewForceTypeAssert()). + WithSince("v1.38.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/gostaticanalysis/forcetypeassert"), + linter.NewConfig(golinters.NewGoModDirectives(goModDirectivesCfg)). + WithSince("v1.39.0"). + WithPresets(linter.PresetStyle, linter.PresetModule). + WithURL("https://github.com/ldez/gomoddirectives"), + linter.NewConfig(golinters.NewPromlinter()). + WithSince("v1.40.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/yeya24/promlinter"), + linter.NewConfig(golinters.NewTagliatelle(tagliatelleCfg)). + WithSince("v1.40.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/ldez/tagliatelle"), + linter.NewConfig(golinters.NewErrName()). + WithPresets(linter.PresetStyle). + WithLoadForGoAnalysis(). + WithURL("https://github.com/Antonboom/errname"). + WithSince("v1.42.0"), + + // nolintlint must be last because it looks at the results of all the previous linters for unused nolint directives + linter.NewConfig(golinters.NewNoLintLint()). + WithSince("v1.26.0"). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/golangci/golangci-lint/blob/master/pkg/golinters/nolintlint/README.md"), + } + + enabledByDefault := map[string]bool{ + golinters.NewGovet(nil).Name(): true, + golinters.NewErrcheck().Name(): true, + golinters.NewStaticcheck(staticcheckCfg).Name(): true, + golinters.NewUnused(unusedCfg).Name(): true, + golinters.NewGosimple(gosimpleCfg).Name(): true, + golinters.NewStructcheck().Name(): true, + golinters.NewVarcheck().Name(): true, + golinters.NewIneffassign().Name(): true, + golinters.NewDeadcode().Name(): true, + golinters.NewTypecheck().Name(): true, + } + return enableLinterConfigs(lcs, func(lc *linter.Config) bool { + return enabledByDefault[lc.Name()] + }) +} + +func (m Manager) GetAllEnabledByDefaultLinters() []*linter.Config { + var ret []*linter.Config + for _, lc := range m.GetAllSupportedLinterConfigs() { + if lc.EnabledByDefault { + ret = append(ret, lc) + } + } + + return ret +} + +func linterConfigsToMap(lcs []*linter.Config) map[string]*linter.Config { + ret := map[string]*linter.Config{} + for _, lc := range lcs { + lc := lc // local copy + ret[lc.Name()] = lc + } + + return ret +} + +func (m Manager) GetAllLinterConfigsForPreset(p string) []*linter.Config { + var ret []*linter.Config + for _, lc := range m.GetAllSupportedLinterConfigs() { + for _, ip := range lc.InPresets { + if p == ip { + ret = append(ret, lc) + break + } + } + } + + return ret +} + +func (m Manager) loadCustomLinterConfig(name string, settings config.CustomLinterSettings) (*linter.Config, error) { + analyzer, err := m.getAnalyzerPlugin(settings.Path) + if err != nil { + return nil, err + } + m.log.Infof("Loaded %s: %s", settings.Path, name) + customLinter := goanalysis.NewLinter( + name, + settings.Description, + analyzer.GetAnalyzers(), + nil).WithLoadMode(goanalysis.LoadModeTypesInfo) + linterConfig := linter.NewConfig(customLinter) + linterConfig.EnabledByDefault = true + linterConfig.IsSlow = false + linterConfig.WithURL(settings.OriginalURL) + return linterConfig, nil +} + +type AnalyzerPlugin interface { + GetAnalyzers() []*analysis.Analyzer +} + +func (m Manager) getAnalyzerPlugin(path string) (AnalyzerPlugin, error) { + if !filepath.IsAbs(path) { + // resolve non-absolute paths relative to config file's directory + configFilePath := viper.ConfigFileUsed() + absConfigFilePath, err := filepath.Abs(configFilePath) + if err != nil { + return nil, fmt.Errorf("could not get absolute representation of config file path %q: %v", configFilePath, err) + } + path = filepath.Join(filepath.Dir(absConfigFilePath), path) + } + + plug, err := plugin.Open(path) + if err != nil { + return nil, err + } + + symbol, err := plug.Lookup("AnalyzerPlugin") + if err != nil { + return nil, err + } + + analyzerPlugin, ok := symbol.(AnalyzerPlugin) + if !ok { + return nil, fmt.Errorf("plugin %s does not abide by 'AnalyzerPlugin' interface", path) + } + + return analyzerPlugin, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go new file mode 100644 index 000000000..47c128930 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/validator.go @@ -0,0 +1,107 @@ +package lintersdb + +import ( + "fmt" + "strings" + + "github.com/golangci/golangci-lint/pkg/config" +) + +type Validator struct { + m *Manager +} + +func NewValidator(m *Manager) *Validator { + return &Validator{ + m: m, + } +} + +func (v Validator) validateLintersNames(cfg *config.Linters) error { + allNames := append([]string{}, cfg.Enable...) + allNames = append(allNames, cfg.Disable...) + + unknownNames := []string{} + + for _, name := range allNames { + if v.m.GetLinterConfigs(name) == nil { + unknownNames = append(unknownNames, name) + } + } + + if len(unknownNames) > 0 { + return fmt.Errorf("unknown linters: '%v', run 'golangci-lint help linters' to see the list of supported linters", + strings.Join(unknownNames, ",")) + } + + return nil +} + +func (v Validator) validatePresets(cfg *config.Linters) error { + allPresets := v.m.allPresetsSet() + for _, p := range cfg.Presets { + if !allPresets[p] { + return fmt.Errorf("no such preset %q: only next presets exist: (%s)", + p, strings.Join(v.m.AllPresets(), "|")) + } + } + + if len(cfg.Presets) != 0 && cfg.EnableAll { + return fmt.Errorf("--presets is incompatible with --enable-all") + } + + return nil +} + +func (v Validator) validateAllDisableEnableOptions(cfg *config.Linters) error { + if cfg.EnableAll && cfg.DisableAll { + return fmt.Errorf("--enable-all and --disable-all options must not be combined") + } + + if cfg.DisableAll { + if len(cfg.Enable) == 0 && len(cfg.Presets) == 0 { + return fmt.Errorf("all linters were disabled, but no one linter was enabled: must enable at least one") + } + + if len(cfg.Disable) != 0 { + return fmt.Errorf("can't combine options --disable-all and --disable %s", cfg.Disable[0]) + } + } + + if cfg.EnableAll && len(cfg.Enable) != 0 && !cfg.Fast { + return fmt.Errorf("can't combine options --enable-all and --enable %s", cfg.Enable[0]) + } + + return nil +} + +func (v Validator) validateDisabledAndEnabledAtOneMoment(cfg *config.Linters) error { + enabledLintersSet := map[string]bool{} + for _, name := range cfg.Enable { + enabledLintersSet[name] = true + } + + for _, name := range cfg.Disable { + if enabledLintersSet[name] { + return fmt.Errorf("linter %q can't be disabled and enabled at one moment", name) + } + } + + return nil +} + +func (v Validator) validateEnabledDisabledLintersConfig(cfg *config.Linters) error { + validators := []func(cfg *config.Linters) error{ + v.validateLintersNames, + v.validatePresets, + v.validateAllDisableEnableOptions, + v.validateDisabledAndEnabledAtOneMoment, + } + for _, v := range validators { + if err := v(cfg); err != nil { + return err + } + } + + return nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go new file mode 100644 index 000000000..69852afb9 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/load.go @@ -0,0 +1,315 @@ +package lint + +import ( + "context" + "fmt" + "go/build" + "go/token" + "os" + "path/filepath" + "regexp" + "strings" + "time" + + "github.com/pkg/errors" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/internal/pkgcache" + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/exitcodes" + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/golinters/goanalysis/load" + "github.com/golangci/golangci-lint/pkg/goutil" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/logutils" +) + +type ContextLoader struct { + cfg *config.Config + log logutils.Log + debugf logutils.DebugFunc + goenv *goutil.Env + pkgTestIDRe *regexp.Regexp + lineCache *fsutils.LineCache + fileCache *fsutils.FileCache + pkgCache *pkgcache.Cache + loadGuard *load.Guard +} + +func NewContextLoader(cfg *config.Config, log logutils.Log, goenv *goutil.Env, + lineCache *fsutils.LineCache, fileCache *fsutils.FileCache, pkgCache *pkgcache.Cache, loadGuard *load.Guard) *ContextLoader { + return &ContextLoader{ + cfg: cfg, + log: log, + debugf: logutils.Debug("loader"), + goenv: goenv, + pkgTestIDRe: regexp.MustCompile(`^(.*) \[(.*)\.test\]`), + lineCache: lineCache, + fileCache: fileCache, + pkgCache: pkgCache, + loadGuard: loadGuard, + } +} + +func (cl *ContextLoader) prepareBuildContext() { + // Set GOROOT to have working cross-compilation: cross-compiled binaries + // have invalid GOROOT. XXX: can't use runtime.GOROOT(). + goroot := cl.goenv.Get(goutil.EnvGoRoot) + if goroot == "" { + return + } + + os.Setenv("GOROOT", goroot) + build.Default.GOROOT = goroot + build.Default.BuildTags = cl.cfg.Run.BuildTags +} + +func (cl *ContextLoader) findLoadMode(linters []*linter.Config) packages.LoadMode { + loadMode := packages.LoadMode(0) + for _, lc := range linters { + loadMode |= lc.LoadMode + } + + return loadMode +} + +func (cl *ContextLoader) buildArgs() []string { + args := cl.cfg.Run.Args + if len(args) == 0 { + return []string{"./..."} + } + + var retArgs []string + for _, arg := range args { + if strings.HasPrefix(arg, ".") || filepath.IsAbs(arg) { + retArgs = append(retArgs, arg) + } else { + // go/packages doesn't work well if we don't have prefix ./ for local packages + retArgs = append(retArgs, fmt.Sprintf(".%c%s", filepath.Separator, arg)) + } + } + + return retArgs +} + +func (cl *ContextLoader) makeBuildFlags() ([]string, error) { + var buildFlags []string + + if len(cl.cfg.Run.BuildTags) != 0 { + // go help build + buildFlags = append(buildFlags, "-tags", strings.Join(cl.cfg.Run.BuildTags, " ")) + cl.log.Infof("Using build tags: %v", cl.cfg.Run.BuildTags) + } + + mod := cl.cfg.Run.ModulesDownloadMode + if mod != "" { + // go help modules + allowedMods := []string{"mod", "readonly", "vendor"} + var ok bool + for _, am := range allowedMods { + if am == mod { + ok = true + break + } + } + if !ok { + return nil, fmt.Errorf("invalid modules download path %s, only (%s) allowed", mod, strings.Join(allowedMods, "|")) + } + + buildFlags = append(buildFlags, fmt.Sprintf("-mod=%s", cl.cfg.Run.ModulesDownloadMode)) + } + + return buildFlags, nil +} + +func stringifyLoadMode(mode packages.LoadMode) string { + m := map[packages.LoadMode]string{ + packages.NeedCompiledGoFiles: "compiled_files", + packages.NeedDeps: "deps", + packages.NeedExportsFile: "exports_file", + packages.NeedFiles: "files", + packages.NeedImports: "imports", + packages.NeedName: "name", + packages.NeedSyntax: "syntax", + packages.NeedTypes: "types", + packages.NeedTypesInfo: "types_info", + packages.NeedTypesSizes: "types_sizes", + } + + var flags []string + for flag, flagStr := range m { + if mode&flag != 0 { + flags = append(flags, flagStr) + } + } + + return fmt.Sprintf("%d (%s)", mode, strings.Join(flags, "|")) +} + +func (cl *ContextLoader) debugPrintLoadedPackages(pkgs []*packages.Package) { + cl.debugf("loaded %d pkgs", len(pkgs)) + for i, pkg := range pkgs { + var syntaxFiles []string + for _, sf := range pkg.Syntax { + syntaxFiles = append(syntaxFiles, pkg.Fset.Position(sf.Pos()).Filename) + } + cl.debugf("Loaded pkg #%d: ID=%s GoFiles=%s CompiledGoFiles=%s Syntax=%s", + i, pkg.ID, pkg.GoFiles, pkg.CompiledGoFiles, syntaxFiles) + } +} + +func (cl *ContextLoader) parseLoadedPackagesErrors(pkgs []*packages.Package) error { + for _, pkg := range pkgs { + for _, err := range pkg.Errors { + if strings.Contains(err.Msg, "no Go files") { + return errors.Wrapf(exitcodes.ErrNoGoFiles, "package %s", pkg.PkgPath) + } + if strings.Contains(err.Msg, "cannot find package") { + // when analyzing not existing directory + return errors.Wrap(exitcodes.ErrFailure, err.Msg) + } + } + } + + return nil +} + +func (cl *ContextLoader) loadPackages(ctx context.Context, loadMode packages.LoadMode) ([]*packages.Package, error) { + defer func(startedAt time.Time) { + cl.log.Infof("Go packages loading at mode %s took %s", stringifyLoadMode(loadMode), time.Since(startedAt)) + }(time.Now()) + + cl.prepareBuildContext() + + buildFlags, err := cl.makeBuildFlags() + if err != nil { + return nil, errors.Wrap(err, "failed to make build flags for go list") + } + + conf := &packages.Config{ + Mode: loadMode, + Tests: cl.cfg.Run.AnalyzeTests, + Context: ctx, + BuildFlags: buildFlags, + Logf: cl.debugf, + //TODO: use fset, parsefile, overlay + } + + args := cl.buildArgs() + cl.debugf("Built loader args are %s", args) + pkgs, err := packages.Load(conf, args...) + if err != nil { + return nil, errors.Wrap(err, "failed to load with go/packages") + } + + // Currently, go/packages doesn't guarantee that error will be returned + // if context was canceled. See + // https://github.com/golang/tools/commit/c5cec6710e927457c3c29d6c156415e8539a5111#r39261855 + if ctx.Err() != nil { + return nil, errors.Wrap(ctx.Err(), "timed out to load packages") + } + + if loadMode&packages.NeedSyntax == 0 { + // Needed e.g. for go/analysis loading. + fset := token.NewFileSet() + packages.Visit(pkgs, nil, func(pkg *packages.Package) { + pkg.Fset = fset + cl.loadGuard.AddMutexForPkg(pkg) + }) + } + + cl.debugPrintLoadedPackages(pkgs) + + if err := cl.parseLoadedPackagesErrors(pkgs); err != nil { + return nil, err + } + + return cl.filterTestMainPackages(pkgs), nil +} + +func (cl *ContextLoader) tryParseTestPackage(pkg *packages.Package) (name string, isTest bool) { + matches := cl.pkgTestIDRe.FindStringSubmatch(pkg.ID) + if matches == nil { + return "", false + } + + return matches[1], true +} + +func (cl *ContextLoader) filterTestMainPackages(pkgs []*packages.Package) []*packages.Package { + var retPkgs []*packages.Package + for _, pkg := range pkgs { + if pkg.Name == "main" && strings.HasSuffix(pkg.PkgPath, ".test") { + // it's an implicit testmain package + cl.debugf("skip pkg ID=%s", pkg.ID) + continue + } + + retPkgs = append(retPkgs, pkg) + } + + return retPkgs +} + +func (cl *ContextLoader) filterDuplicatePackages(pkgs []*packages.Package) []*packages.Package { + packagesWithTests := map[string]bool{} + for _, pkg := range pkgs { + name, isTest := cl.tryParseTestPackage(pkg) + if !isTest { + continue + } + packagesWithTests[name] = true + } + + cl.debugf("package with tests: %#v", packagesWithTests) + + var retPkgs []*packages.Package + for _, pkg := range pkgs { + _, isTest := cl.tryParseTestPackage(pkg) + if !isTest && packagesWithTests[pkg.PkgPath] { + // If tests loading is enabled, + // for package with files a.go and a_test.go go/packages loads two packages: + // 1. ID=".../a" GoFiles=[a.go] + // 2. ID=".../a [.../a.test]" GoFiles=[a.go a_test.go] + // We need only the second package, otherwise we can get warnings about unused variables/fields/functions + // in a.go if they are used only in a_test.go. + cl.debugf("skip pkg ID=%s because we load it with test package", pkg.ID) + continue + } + + retPkgs = append(retPkgs, pkg) + } + + return retPkgs +} + +func (cl *ContextLoader) Load(ctx context.Context, linters []*linter.Config) (*linter.Context, error) { + loadMode := cl.findLoadMode(linters) + pkgs, err := cl.loadPackages(ctx, loadMode) + if err != nil { + return nil, errors.Wrap(err, "failed to load packages") + } + + deduplicatedPkgs := cl.filterDuplicatePackages(pkgs) + + if len(deduplicatedPkgs) == 0 { + return nil, exitcodes.ErrNoGoFiles + } + + ret := &linter.Context{ + Packages: deduplicatedPkgs, + + // At least `unused` linters works properly only on original (not deduplicated) packages, + // see https://github.com/golangci/golangci-lint/pull/585. + OriginalPackages: pkgs, + + Cfg: cl.cfg, + Log: cl.log, + FileCache: cl.fileCache, + LineCache: cl.lineCache, + PkgCache: cl.pkgCache, + LoadGuard: cl.loadGuard, + } + + return ret, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go new file mode 100644 index 000000000..8882b9300 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go @@ -0,0 +1,329 @@ +package lint + +import ( + "context" + "fmt" + "runtime/debug" + "strings" + + "github.com/pkg/errors" + gopackages "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/internal/errorutil" + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/goutil" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/lint/lintersdb" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/packages" + "github.com/golangci/golangci-lint/pkg/result" + "github.com/golangci/golangci-lint/pkg/result/processors" + "github.com/golangci/golangci-lint/pkg/timeutils" +) + +type Runner struct { + Processors []processors.Processor + Log logutils.Log +} + +func NewRunner(cfg *config.Config, log logutils.Log, goenv *goutil.Env, es *lintersdb.EnabledSet, + lineCache *fsutils.LineCache, dbManager *lintersdb.Manager, pkgs []*gopackages.Package) (*Runner, error) { + skipFilesProcessor, err := processors.NewSkipFiles(cfg.Run.SkipFiles) + if err != nil { + return nil, err + } + + skipDirs := cfg.Run.SkipDirs + if cfg.Run.UseDefaultSkipDirs { + skipDirs = append(skipDirs, packages.StdExcludeDirRegexps...) + } + skipDirsProcessor, err := processors.NewSkipDirs(skipDirs, log.Child("skip dirs"), cfg.Run.Args) + if err != nil { + return nil, err + } + + enabledLinters, err := es.GetEnabledLintersMap() + if err != nil { + return nil, errors.Wrap(err, "failed to get enabled linters") + } + + // print deprecated messages + if !cfg.InternalCmdTest { + for name, lc := range enabledLinters { + if !lc.IsDeprecated() { + continue + } + + var extra string + if lc.Deprecation.Replacement != "" { + extra = fmt.Sprintf(" Replaced by %s.", lc.Deprecation.Replacement) + } + + log.Warnf("The linter '%s' is deprecated (since %s) due to: %s %s", name, lc.Deprecation.Since, lc.Deprecation.Message, extra) + } + } + + return &Runner{ + Processors: []processors.Processor{ + processors.NewCgo(goenv), + + // Must go after Cgo. + processors.NewFilenameUnadjuster(pkgs, log.Child("filename_unadjuster")), + + // Must be before diff, nolint and exclude autogenerated processor at least. + processors.NewPathPrettifier(), + skipFilesProcessor, + skipDirsProcessor, // must be after path prettifier + + processors.NewAutogeneratedExclude(), + + // Must be before exclude because users see already marked output and configure excluding by it. + processors.NewIdentifierMarker(), + + getExcludeProcessor(&cfg.Issues), + getExcludeRulesProcessor(&cfg.Issues, log, lineCache), + processors.NewNolint(log.Child("nolint"), dbManager, enabledLinters), + + processors.NewUniqByLine(cfg), + processors.NewDiff(cfg.Issues.Diff, cfg.Issues.DiffFromRevision, cfg.Issues.DiffPatchFilePath), + processors.NewMaxPerFileFromLinter(cfg), + processors.NewMaxSameIssues(cfg.Issues.MaxSameIssues, log.Child("max_same_issues"), cfg), + processors.NewMaxFromLinter(cfg.Issues.MaxIssuesPerLinter, log.Child("max_from_linter"), cfg), + processors.NewSourceCode(lineCache, log.Child("source_code")), + processors.NewPathShortener(), + getSeverityRulesProcessor(&cfg.Severity, log, lineCache), + processors.NewPathPrefixer(cfg.Output.PathPrefix), + processors.NewSortResults(cfg), + }, + Log: log, + }, nil +} + +func (r *Runner) runLinterSafe(ctx context.Context, lintCtx *linter.Context, + lc *linter.Config) (ret []result.Issue, err error) { + defer func() { + if panicData := recover(); panicData != nil { + if pe, ok := panicData.(*errorutil.PanicError); ok { + err = fmt.Errorf("%s: %w", lc.Name(), pe) + + // Don't print stacktrace from goroutines twice + r.Log.Errorf("Panic: %s: %s", pe, pe.Stack()) + } else { + err = fmt.Errorf("panic occurred: %s", panicData) + r.Log.Errorf("Panic stack trace: %s", debug.Stack()) + } + } + }() + + issues, err := lc.Linter.Run(ctx, lintCtx) + + if lc.DoesChangeTypes { + // Packages in lintCtx might be dirty due to the last analysis, + // which affects to the next analysis. + // To avoid this issue, we clear type information from the packages. + // See https://github.com/golangci/golangci-lint/pull/944. + // Currently DoesChangeTypes is true only for `unused`. + lintCtx.ClearTypesInPackages() + } + + if err != nil { + return nil, err + } + + for i := range issues { + if issues[i].FromLinter == "" { + issues[i].FromLinter = lc.Name() + } + } + + return issues, nil +} + +type processorStat struct { + inCount int + outCount int +} + +func (r Runner) processLintResults(inIssues []result.Issue) []result.Issue { + sw := timeutils.NewStopwatch("processing", r.Log) + + var issuesBefore, issuesAfter int + statPerProcessor := map[string]processorStat{} + + var outIssues []result.Issue + if len(inIssues) != 0 { + issuesBefore += len(inIssues) + outIssues = r.processIssues(inIssues, sw, statPerProcessor) + issuesAfter += len(outIssues) + } + + // finalize processors: logging, clearing, no heavy work here + + for _, p := range r.Processors { + p := p + sw.TrackStage(p.Name(), func() { + p.Finish() + }) + } + + if issuesBefore != issuesAfter { + r.Log.Infof("Issues before processing: %d, after processing: %d", issuesBefore, issuesAfter) + } + r.printPerProcessorStat(statPerProcessor) + sw.PrintStages() + + return outIssues +} + +func (r Runner) printPerProcessorStat(stat map[string]processorStat) { + parts := make([]string, 0, len(stat)) + for name, ps := range stat { + if ps.inCount != 0 { + parts = append(parts, fmt.Sprintf("%s: %d/%d", name, ps.outCount, ps.inCount)) + } + } + if len(parts) != 0 { + r.Log.Infof("Processors filtering stat (out/in): %s", strings.Join(parts, ", ")) + } +} + +func (r Runner) Run(ctx context.Context, linters []*linter.Config, lintCtx *linter.Context) ([]result.Issue, error) { + sw := timeutils.NewStopwatch("linters", r.Log) + defer sw.Print() + + var issues []result.Issue + for _, lc := range linters { + lc := lc + sw.TrackStage(lc.Name(), func() { + linterIssues, err := r.runLinterSafe(ctx, lintCtx, lc) + if err != nil { + r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err) + return + } + issues = append(issues, linterIssues...) + }) + } + + return r.processLintResults(issues), nil +} + +func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, statPerProcessor map[string]processorStat) []result.Issue { + for _, p := range r.Processors { + var newIssues []result.Issue + var err error + p := p + sw.TrackStage(p.Name(), func() { + newIssues, err = p.Process(issues) + }) + + if err != nil { + r.Log.Warnf("Can't process result by %s processor: %s", p.Name(), err) + } else { + stat := statPerProcessor[p.Name()] + stat.inCount += len(issues) + stat.outCount += len(newIssues) + statPerProcessor[p.Name()] = stat + issues = newIssues + } + + if issues == nil { + issues = []result.Issue{} + } + } + + return issues +} + +func getExcludeProcessor(cfg *config.Issues) processors.Processor { + var excludeTotalPattern string + + if len(cfg.ExcludePatterns) != 0 { + excludeTotalPattern = fmt.Sprintf("(%s)", strings.Join(cfg.ExcludePatterns, "|")) + } + + var excludeProcessor processors.Processor + if cfg.ExcludeCaseSensitive { + excludeProcessor = processors.NewExcludeCaseSensitive(excludeTotalPattern) + } else { + excludeProcessor = processors.NewExclude(excludeTotalPattern) + } + + return excludeProcessor +} + +func getExcludeRulesProcessor(cfg *config.Issues, log logutils.Log, lineCache *fsutils.LineCache) processors.Processor { + var excludeRules []processors.ExcludeRule + for _, r := range cfg.ExcludeRules { + excludeRules = append(excludeRules, processors.ExcludeRule{ + BaseRule: processors.BaseRule{ + Text: r.Text, + Source: r.Source, + Path: r.Path, + Linters: r.Linters, + }, + }) + } + + if cfg.UseDefaultExcludes { + for _, r := range config.GetExcludePatterns(cfg.IncludeDefaultExcludes) { + excludeRules = append(excludeRules, processors.ExcludeRule{ + BaseRule: processors.BaseRule{ + Text: r.Pattern, + Linters: []string{r.Linter}, + }, + }) + } + } + + var excludeRulesProcessor processors.Processor + if cfg.ExcludeCaseSensitive { + excludeRulesProcessor = processors.NewExcludeRulesCaseSensitive( + excludeRules, + lineCache, + log.Child("exclude_rules"), + ) + } else { + excludeRulesProcessor = processors.NewExcludeRules( + excludeRules, + lineCache, + log.Child("exclude_rules"), + ) + } + + return excludeRulesProcessor +} + +func getSeverityRulesProcessor(cfg *config.Severity, log logutils.Log, lineCache *fsutils.LineCache) processors.Processor { + var severityRules []processors.SeverityRule + for _, r := range cfg.Rules { + severityRules = append(severityRules, processors.SeverityRule{ + Severity: r.Severity, + BaseRule: processors.BaseRule{ + Text: r.Text, + Source: r.Source, + Path: r.Path, + Linters: r.Linters, + }, + }) + } + + var severityRulesProcessor processors.Processor + if cfg.CaseSensitive { + severityRulesProcessor = processors.NewSeverityRulesCaseSensitive( + cfg.Default, + severityRules, + lineCache, + log.Child("severity_rules"), + ) + } else { + severityRulesProcessor = processors.NewSeverityRules( + cfg.Default, + severityRules, + lineCache, + log.Child("severity_rules"), + ) + } + + return severityRulesProcessor +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go new file mode 100644 index 000000000..b955417a8 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/log.go @@ -0,0 +1,31 @@ +package logutils + +type Log interface { + Fatalf(format string, args ...interface{}) + Panicf(format string, args ...interface{}) + Errorf(format string, args ...interface{}) + Warnf(format string, args ...interface{}) + Infof(format string, args ...interface{}) + + Child(name string) Log + SetLevel(level LogLevel) +} + +type LogLevel int + +const ( + // Debug messages, write to debug logs only by logutils.Debug. + LogLevelDebug LogLevel = 0 + + // Information messages, don't write too much messages, + // only useful ones: they are shown when running with -v. + LogLevelInfo LogLevel = 1 + + // Hidden errors: non critical errors: work can be continued, no need to fail whole program; + // tests will crash if any warning occurred. + LogLevelWarn LogLevel = 2 + + // Only not hidden from user errors: whole program failing, usually + // error logging happens in 1-2 places: in the "main" function. + LogLevelError LogLevel = 3 +) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go new file mode 100644 index 000000000..93c9873d9 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go @@ -0,0 +1,49 @@ +package logutils + +import ( + "os" + "strings" +) + +func getEnabledDebugs() map[string]bool { + ret := map[string]bool{} + debugVar := os.Getenv("GL_DEBUG") + if debugVar == "" { + return ret + } + + for _, tag := range strings.Split(debugVar, ",") { + ret[tag] = true + } + + return ret +} + +var enabledDebugs = getEnabledDebugs() + +type DebugFunc func(format string, args ...interface{}) + +func nopDebugf(format string, args ...interface{}) {} + +func Debug(tag string) DebugFunc { + if !enabledDebugs[tag] { + return nopDebugf + } + + logger := NewStderrLog(tag) + logger.SetLevel(LogLevelDebug) + + return func(format string, args ...interface{}) { + logger.Debugf(format, args...) + } +} + +func HaveDebugTag(tag string) bool { + return enabledDebugs[tag] +} + +func SetupVerboseLog(log Log, isVerbose bool) { + if isVerbose { + log.SetLevel(LogLevelInfo) + } +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go new file mode 100644 index 000000000..e897ce1ed --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/mock.go @@ -0,0 +1,47 @@ +package logutils + +import ( + "github.com/stretchr/testify/mock" +) + +type MockLog struct { + mock.Mock +} + +func NewMockLog() *MockLog { + return &MockLog{} +} + +func (m *MockLog) Fatalf(format string, args ...interface{}) { + mArgs := []interface{}{format} + m.Called(append(mArgs, args...)...) +} + +func (m *MockLog) Panicf(format string, args ...interface{}) { + mArgs := []interface{}{format} + m.Called(append(mArgs, args...)...) +} + +func (m *MockLog) Errorf(format string, args ...interface{}) { + mArgs := []interface{}{format} + m.Called(append(mArgs, args...)...) +} + +func (m *MockLog) Warnf(format string, args ...interface{}) { + mArgs := []interface{}{format} + m.Called(append(mArgs, args...)...) +} + +func (m *MockLog) Infof(format string, args ...interface{}) { + mArgs := []interface{}{format} + m.Called(append(mArgs, args...)...) +} + +func (m *MockLog) Child(name string) Log { + m.Called(name) + return m +} + +func (m *MockLog) SetLevel(level LogLevel) { + m.Called(level) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go new file mode 100644 index 000000000..67c70dc8f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/out.go @@ -0,0 +1,9 @@ +package logutils + +import ( + "github.com/fatih/color" + colorable "github.com/mattn/go-colorable" +) + +var StdOut = color.Output // https://github.com/golangci/golangci-lint/issues/14 +var StdErr = colorable.NewColorableStderr() diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go new file mode 100644 index 000000000..b4697ee4c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/stderr_log.go @@ -0,0 +1,121 @@ +package logutils + +import ( + "fmt" + "os" + "time" + + "github.com/sirupsen/logrus" //nolint:depguard + + "github.com/golangci/golangci-lint/pkg/exitcodes" +) + +type StderrLog struct { + name string + logger *logrus.Logger + level LogLevel +} + +var _ Log = NewStderrLog("") + +func NewStderrLog(name string) *StderrLog { + sl := &StderrLog{ + name: name, + logger: logrus.New(), + level: LogLevelWarn, + } + + switch os.Getenv("LOG_LEVEL") { + case "error", "err": + sl.logger.SetLevel(logrus.ErrorLevel) + case "warning", "warn": + sl.logger.SetLevel(logrus.WarnLevel) + case "info": + sl.logger.SetLevel(logrus.InfoLevel) + default: + sl.logger.SetLevel(logrus.DebugLevel) + } + + sl.logger.Out = StdErr + formatter := &logrus.TextFormatter{ + DisableTimestamp: true, // `INFO[0007] msg` -> `INFO msg` + } + if os.Getenv("LOG_TIMESTAMP") == "1" { + formatter.DisableTimestamp = false + formatter.FullTimestamp = true + formatter.TimestampFormat = time.StampMilli + } + sl.logger.Formatter = formatter + + return sl +} + +func (sl StderrLog) prefix() string { + prefix := "" + if sl.name != "" { + prefix = fmt.Sprintf("[%s] ", sl.name) + } + + return prefix +} + +func (sl StderrLog) Fatalf(format string, args ...interface{}) { + sl.logger.Errorf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) + os.Exit(exitcodes.Failure) +} + +func (sl StderrLog) Panicf(format string, args ...interface{}) { + v := fmt.Sprintf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) + panic(v) +} + +func (sl StderrLog) Errorf(format string, args ...interface{}) { + if sl.level > LogLevelError { + return + } + + sl.logger.Errorf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) + // don't call exitIfTest() because the idea is to + // crash on hidden errors (warnings); but Errorf MUST NOT be + // called on hidden errors, see log levels comments. +} + +func (sl StderrLog) Warnf(format string, args ...interface{}) { + if sl.level > LogLevelWarn { + return + } + + sl.logger.Warnf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) +} + +func (sl StderrLog) Infof(format string, args ...interface{}) { + if sl.level > LogLevelInfo { + return + } + + sl.logger.Infof("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) +} + +func (sl StderrLog) Debugf(format string, args ...interface{}) { + if sl.level > LogLevelDebug { + return + } + + sl.logger.Debugf("%s%s", sl.prefix(), fmt.Sprintf(format, args...)) +} + +func (sl StderrLog) Child(name string) Log { + prefix := "" + if sl.name != "" { + prefix = sl.name + "/" + } + + child := sl + child.name = prefix + name + + return &child +} + +func (sl *StderrLog) SetLevel(level LogLevel) { + sl.level = level +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go b/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go new file mode 100644 index 000000000..c620573b9 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/packages/errors.go @@ -0,0 +1,39 @@ +package packages + +import ( + "fmt" + "go/token" + "strconv" + "strings" + + "github.com/pkg/errors" +) + +//nolint:gomnd +func ParseErrorPosition(pos string) (*token.Position, error) { + // file:line(:colon) + parts := strings.Split(pos, ":") + if len(parts) == 1 { + return nil, errors.New("no colons") + } + + file := parts[0] + line, err := strconv.Atoi(parts[1]) + if err != nil { + return nil, fmt.Errorf("can't parse line number %q: %s", parts[1], err) + } + + var column int + if len(parts) == 3 { // no column + column, err = strconv.Atoi(parts[2]) + if err != nil { + return nil, errors.Wrapf(err, "failed to parse column from %q", parts[2]) + } + } + + return &token.Position{ + Filename: file, + Line: line, + Column: column, + }, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go b/vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go new file mode 100644 index 000000000..cdd327f5d --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/packages/skip.go @@ -0,0 +1,25 @@ +package packages + +import ( + "fmt" + "path/filepath" + "regexp" +) + +func pathElemReImpl(e string, sep rune) string { + escapedSep := regexp.QuoteMeta(string(sep)) // needed for windows sep '\\' + return fmt.Sprintf(`(^|%s)%s($|%s)`, escapedSep, e, escapedSep) +} + +func pathElemRe(e string) string { + return pathElemReImpl(e, filepath.Separator) +} + +var StdExcludeDirRegexps = []string{ + pathElemRe("vendor"), + pathElemRe("third_party"), + pathElemRe("testdata"), + pathElemRe("examples"), + pathElemRe("Godeps"), + pathElemRe("builtin"), +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go b/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go new file mode 100644 index 000000000..e4268897f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/packages/util.go @@ -0,0 +1,102 @@ +package packages + +import ( + "fmt" + "regexp" + "strings" + + "golang.org/x/tools/go/packages" +) + +// reFile matches a line who starts with path and position. +// ex: `/example/main.go:11:17: foobar` +var reFile = regexp.MustCompile(`^.+\.go:\d+:\d+: .+`) + +func ExtractErrors(pkg *packages.Package) []packages.Error { + errors := extractErrorsImpl(pkg, map[*packages.Package]bool{}) + if len(errors) == 0 { + return errors + } + + seenErrors := map[string]bool{} + var uniqErrors []packages.Error + for _, err := range errors { + msg := stackCrusher(err.Error()) + if seenErrors[msg] { + continue + } + + if msg != err.Error() { + continue + } + + seenErrors[msg] = true + + uniqErrors = append(uniqErrors, err) + } + + if len(pkg.GoFiles) != 0 { + // errors were extracted from deps and have at least one file in package + for i := range uniqErrors { + if _, parseErr := ParseErrorPosition(uniqErrors[i].Pos); parseErr == nil { + continue + } + + // change pos to local file to properly process it by processors (properly read line etc) + uniqErrors[i].Msg = fmt.Sprintf("%s: %s", uniqErrors[i].Pos, uniqErrors[i].Msg) + uniqErrors[i].Pos = fmt.Sprintf("%s:1", pkg.GoFiles[0]) + } + + // some errors like "code in directory expects import" don't have Pos, set it here + for i := range uniqErrors { + err := &uniqErrors[i] + if err.Pos == "" { + err.Pos = fmt.Sprintf("%s:1", pkg.GoFiles[0]) + } + } + } + + return uniqErrors +} + +func extractErrorsImpl(pkg *packages.Package, seenPackages map[*packages.Package]bool) []packages.Error { + if seenPackages[pkg] { + return nil + } + seenPackages[pkg] = true + + if !pkg.IllTyped { // otherwise it may take hours to traverse all deps many times + return nil + } + + if len(pkg.Errors) > 0 { + return pkg.Errors + } + + var errors []packages.Error + for _, iPkg := range pkg.Imports { + iPkgErrors := extractErrorsImpl(iPkg, seenPackages) + if iPkgErrors != nil { + errors = append(errors, iPkgErrors...) + } + } + + return errors +} + +func stackCrusher(msg string) string { + index := strings.Index(msg, "(") + lastIndex := strings.LastIndex(msg, ")") + + if index == -1 || index == len(msg)-1 || lastIndex == -1 || lastIndex != len(msg)-1 { + return msg + } + + frag := msg[index+1 : lastIndex] + + if !reFile.MatchString(frag) { + return msg + } + + return stackCrusher(frag) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go new file mode 100644 index 000000000..c5b948a98 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/checkstyle.go @@ -0,0 +1,87 @@ +package printers + +import ( + "context" + "encoding/xml" + "fmt" + + "github.com/go-xmlfmt/xmlfmt" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type checkstyleOutput struct { + XMLName xml.Name `xml:"checkstyle"` + Version string `xml:"version,attr"` + Files []*checkstyleFile `xml:"file"` +} + +type checkstyleFile struct { + Name string `xml:"name,attr"` + Errors []*checkstyleError `xml:"error"` +} + +type checkstyleError struct { + Column int `xml:"column,attr"` + Line int `xml:"line,attr"` + Message string `xml:"message,attr"` + Severity string `xml:"severity,attr"` + Source string `xml:"source,attr"` +} + +const defaultCheckstyleSeverity = "error" + +type Checkstyle struct{} + +func NewCheckstyle() *Checkstyle { + return &Checkstyle{} +} + +func (Checkstyle) Print(ctx context.Context, issues []result.Issue) error { + out := checkstyleOutput{ + Version: "5.0", + } + + files := map[string]*checkstyleFile{} + + for i := range issues { + issue := &issues[i] + file, ok := files[issue.FilePath()] + if !ok { + file = &checkstyleFile{ + Name: issue.FilePath(), + } + + files[issue.FilePath()] = file + } + + severity := defaultCheckstyleSeverity + if issue.Severity != "" { + severity = issue.Severity + } + + newError := &checkstyleError{ + Column: issue.Column(), + Line: issue.Line(), + Message: issue.Text, + Source: issue.FromLinter, + Severity: severity, + } + + file.Errors = append(file.Errors, newError) + } + + out.Files = make([]*checkstyleFile, 0, len(files)) + for _, file := range files { + out.Files = append(out.Files, file) + } + + data, err := xml.Marshal(&out) + if err != nil { + return err + } + + fmt.Fprintf(logutils.StdOut, "%s%s\n", xml.Header, xmlfmt.FormatXML(string(data), "", " ")) + return nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go new file mode 100644 index 000000000..35a22ce99 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go @@ -0,0 +1,57 @@ +package printers + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +// CodeClimateIssue is a subset of the Code Climate spec - https://github.com/codeclimate/spec/blob/master/SPEC.md#data-types +// It is just enough to support GitLab CI Code Quality - https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html +type CodeClimateIssue struct { + Description string `json:"description"` + Severity string `json:"severity,omitempty"` + Fingerprint string `json:"fingerprint"` + Location struct { + Path string `json:"path"` + Lines struct { + Begin int `json:"begin"` + } `json:"lines"` + } `json:"location"` +} + +type CodeClimate struct { +} + +func NewCodeClimate() *CodeClimate { + return &CodeClimate{} +} + +func (p CodeClimate) Print(ctx context.Context, issues []result.Issue) error { + codeClimateIssues := []CodeClimateIssue{} + for i := range issues { + issue := &issues[i] + codeClimateIssue := CodeClimateIssue{} + codeClimateIssue.Description = issue.Description() + codeClimateIssue.Location.Path = issue.Pos.Filename + codeClimateIssue.Location.Lines.Begin = issue.Pos.Line + codeClimateIssue.Fingerprint = issue.Fingerprint() + + if issue.Severity != "" { + codeClimateIssue.Severity = issue.Severity + } + + codeClimateIssues = append(codeClimateIssues, codeClimateIssue) + } + + outputJSON, err := json.Marshal(codeClimateIssues) + if err != nil { + return err + } + + fmt.Fprint(logutils.StdOut, string(outputJSON)) + return nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go new file mode 100644 index 000000000..4ebc26685 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/github.go @@ -0,0 +1,46 @@ +package printers + +import ( + "context" + "fmt" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type github struct { +} + +const defaultGithubSeverity = "error" + +// NewGithub output format outputs issues according to Github actions format: +// https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message +func NewGithub() Printer { + return &github{} +} + +// print each line as: ::error file=app.js,line=10,col=15::Something went wrong +func formatIssueAsGithub(issue *result.Issue) string { + severity := defaultGithubSeverity + if issue.Severity != "" { + severity = issue.Severity + } + + ret := fmt.Sprintf("::%s file=%s,line=%d", severity, issue.FilePath(), issue.Line()) + if issue.Pos.Column != 0 { + ret += fmt.Sprintf(",col=%d", issue.Pos.Column) + } + + ret += fmt.Sprintf("::%s (%s)", issue.Text, issue.FromLinter) + return ret +} + +func (g *github) Print(_ context.Context, issues []result.Issue) error { + for ind := range issues { + _, err := fmt.Fprintln(logutils.StdOut, formatIssueAsGithub(&issues[ind])) + if err != nil { + return err + } + } + return nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go new file mode 100644 index 000000000..65ab753bd --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/html.go @@ -0,0 +1,155 @@ +package printers + +import ( + "context" + "fmt" + "html/template" + "strings" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +const templateContent = ` + + + + golangci-lint + + + + + + + + + + +
+
+
+
+
+ + + +` + +type htmlIssue struct { + Title string + Pos string + Linter string + Code string +} + +type HTML struct{} + +func NewHTML() *HTML { + return &HTML{} +} + +func (h HTML) Print(_ context.Context, issues []result.Issue) error { + var htmlIssues []htmlIssue + + for i := range issues { + pos := fmt.Sprintf("%s:%d", issues[i].FilePath(), issues[i].Line()) + if issues[i].Pos.Column != 0 { + pos += fmt.Sprintf(":%d", issues[i].Pos.Column) + } + + htmlIssues = append(htmlIssues, htmlIssue{ + Title: strings.TrimSpace(issues[i].Text), + Pos: pos, + Linter: issues[i].FromLinter, + Code: strings.Join(issues[i].SourceLines, "\n"), + }) + } + + t, err := template.New("golangci-lint").Parse(templateContent) + if err != nil { + return err + } + + return t.Execute(logutils.StdOut, struct{ Issues []htmlIssue }{Issues: htmlIssues}) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go new file mode 100644 index 000000000..6ffa996fb --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/json.go @@ -0,0 +1,41 @@ +package printers + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/report" + "github.com/golangci/golangci-lint/pkg/result" +) + +type JSON struct { + rd *report.Data +} + +func NewJSON(rd *report.Data) *JSON { + return &JSON{ + rd: rd, + } +} + +type JSONResult struct { + Issues []result.Issue + Report *report.Data +} + +func (p JSON) Print(ctx context.Context, issues []result.Issue) error { + res := JSONResult{ + Issues: issues, + Report: p.rd, + } + + outputJSON, err := json.Marshal(res) + if err != nil { + return err + } + + fmt.Fprint(logutils.StdOut, string(outputJSON)) + return nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go new file mode 100644 index 000000000..9277cd66f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/junitxml.go @@ -0,0 +1,79 @@ +package printers + +import ( + "context" + "encoding/xml" + "strings" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type testSuitesXML struct { + XMLName xml.Name `xml:"testsuites"` + TestSuites []testSuiteXML +} + +type testSuiteXML struct { + XMLName xml.Name `xml:"testsuite"` + Suite string `xml:"name,attr"` + Tests int `xml:"tests,attr"` + Errors int `xml:"errors,attr"` + Failures int `xml:"failures,attr"` + TestCases []testCaseXML `xml:"testcase"` +} + +type testCaseXML struct { + Name string `xml:"name,attr"` + ClassName string `xml:"classname,attr"` + Failure failureXML `xml:"failure"` +} + +type failureXML struct { + Message string `xml:"message,attr"` + Content string `xml:",cdata"` +} + +type JunitXML struct { +} + +func NewJunitXML() *JunitXML { + return &JunitXML{} +} + +func (JunitXML) Print(ctx context.Context, issues []result.Issue) error { + suites := make(map[string]testSuiteXML) // use a map to group by file + + for ind := range issues { + i := &issues[ind] + suiteName := i.FilePath() + testSuite := suites[suiteName] + testSuite.Suite = i.FilePath() + testSuite.Tests++ + testSuite.Failures++ + + tc := testCaseXML{ + Name: i.FromLinter, + ClassName: i.Pos.String(), + Failure: failureXML{ + Message: i.Text, + Content: strings.Join(i.SourceLines, "\n"), + }, + } + + testSuite.TestCases = append(testSuite.TestCases, tc) + suites[suiteName] = testSuite + } + + var res testSuitesXML + for _, val := range suites { + res.TestSuites = append(res.TestSuites, val) + } + + enc := xml.NewEncoder(logutils.StdOut) + enc.Indent("", " ") + if err := enc.Encode(res); err != nil { + return err + } + return nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go new file mode 100644 index 000000000..bfafb88e2 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/printer.go @@ -0,0 +1,11 @@ +package printers + +import ( + "context" + + "github.com/golangci/golangci-lint/pkg/result" +) + +type Printer interface { + Print(ctx context.Context, issues []result.Issue) error +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go new file mode 100644 index 000000000..d3cdce673 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/tab.go @@ -0,0 +1,58 @@ +package printers + +import ( + "context" + "fmt" + "io" + "text/tabwriter" + + "github.com/fatih/color" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type Tab struct { + printLinterName bool + log logutils.Log +} + +func NewTab(printLinterName bool, log logutils.Log) *Tab { + return &Tab{ + printLinterName: printLinterName, + log: log, + } +} + +func (p Tab) SprintfColored(ca color.Attribute, format string, args ...interface{}) string { + c := color.New(ca) + return c.Sprintf(format, args...) +} + +func (p *Tab) Print(ctx context.Context, issues []result.Issue) error { + w := tabwriter.NewWriter(logutils.StdOut, 0, 0, 2, ' ', 0) + + for i := range issues { + p.printIssue(&issues[i], w) + } + + if err := w.Flush(); err != nil { + p.log.Warnf("Can't flush tab writer: %s", err) + } + + return nil +} + +func (p Tab) printIssue(i *result.Issue, w io.Writer) { + text := p.SprintfColored(color.FgRed, "%s", i.Text) + if p.printLinterName { + text = fmt.Sprintf("%s\t%s", i.FromLinter, text) + } + + pos := p.SprintfColored(color.Bold, "%s:%d", i.FilePath(), i.Line()) + if i.Pos.Column != 0 { + pos += fmt.Sprintf(":%d", i.Pos.Column) + } + + fmt.Fprintf(w, "%s\t%s\n", pos, text) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go new file mode 100644 index 000000000..181452888 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/text.go @@ -0,0 +1,91 @@ +package printers + +import ( + "context" + "fmt" + "strings" + + "github.com/fatih/color" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type Text struct { + printIssuedLine bool + useColors bool + printLinterName bool + + log logutils.Log +} + +func NewText(printIssuedLine, useColors, printLinterName bool, log logutils.Log) *Text { + return &Text{ + printIssuedLine: printIssuedLine, + useColors: useColors, + printLinterName: printLinterName, + log: log, + } +} + +func (p Text) SprintfColored(ca color.Attribute, format string, args ...interface{}) string { + if !p.useColors { + return fmt.Sprintf(format, args...) + } + + c := color.New(ca) + return c.Sprintf(format, args...) +} + +func (p *Text) Print(ctx context.Context, issues []result.Issue) error { + for i := range issues { + p.printIssue(&issues[i]) + + if !p.printIssuedLine { + continue + } + + p.printSourceCode(&issues[i]) + p.printUnderLinePointer(&issues[i]) + } + + return nil +} + +func (p Text) printIssue(i *result.Issue) { + text := p.SprintfColored(color.FgRed, "%s", strings.TrimSpace(i.Text)) + if p.printLinterName { + text += fmt.Sprintf(" (%s)", i.FromLinter) + } + pos := p.SprintfColored(color.Bold, "%s:%d", i.FilePath(), i.Line()) + if i.Pos.Column != 0 { + pos += fmt.Sprintf(":%d", i.Pos.Column) + } + fmt.Fprintf(logutils.StdOut, "%s: %s\n", pos, text) +} + +func (p Text) printSourceCode(i *result.Issue) { + for _, line := range i.SourceLines { + fmt.Fprintln(logutils.StdOut, line) + } +} + +func (p Text) printUnderLinePointer(i *result.Issue) { + // if column == 0 it means column is unknown (e.g. for gosec) + if len(i.SourceLines) != 1 || i.Pos.Column == 0 { + return + } + + col0 := i.Pos.Column - 1 + line := i.SourceLines[0] + prefixRunes := make([]rune, 0, len(line)) + for j := 0; j < len(line) && j < col0; j++ { + if line[j] == '\t' { + prefixRunes = append(prefixRunes, '\t') + } else { + prefixRunes = append(prefixRunes, ' ') + } + } + + fmt.Fprintf(logutils.StdOut, "%s%s\n", string(prefixRunes), p.SprintfColored(color.FgYellow, "^")) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/report/data.go b/vendor/github.com/golangci/golangci-lint/pkg/report/data.go new file mode 100644 index 000000000..f083fa9f5 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/report/data.go @@ -0,0 +1,26 @@ +package report + +type Warning struct { + Tag string `json:",omitempty"` + Text string +} + +type LinterData struct { + Name string + Enabled bool `json:",omitempty"` + EnabledByDefault bool `json:",omitempty"` +} + +type Data struct { + Warnings []Warning `json:",omitempty"` + Linters []LinterData `json:",omitempty"` + Error string `json:",omitempty"` +} + +func (d *Data) AddLinter(name string, enabled, enabledByDefault bool) { + d.Linters = append(d.Linters, LinterData{ + Name: name, + Enabled: enabled, + EnabledByDefault: enabledByDefault, + }) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/report/log.go b/vendor/github.com/golangci/golangci-lint/pkg/report/log.go new file mode 100644 index 000000000..45ab6cae8 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/report/log.go @@ -0,0 +1,64 @@ +package report + +import ( + "fmt" + "strings" + + "github.com/golangci/golangci-lint/pkg/logutils" +) + +type LogWrapper struct { + rd *Data + tags []string + origLog logutils.Log +} + +func NewLogWrapper(log logutils.Log, reportData *Data) *LogWrapper { + return &LogWrapper{ + rd: reportData, + origLog: log, + } +} + +func (lw LogWrapper) Fatalf(format string, args ...interface{}) { + lw.origLog.Fatalf(format, args...) +} + +func (lw LogWrapper) Panicf(format string, args ...interface{}) { + lw.origLog.Panicf(format, args...) +} + +func (lw LogWrapper) Errorf(format string, args ...interface{}) { + lw.origLog.Errorf(format, args...) + lw.rd.Error = fmt.Sprintf(format, args...) +} + +func (lw LogWrapper) Warnf(format string, args ...interface{}) { + lw.origLog.Warnf(format, args...) + w := Warning{ + Tag: strings.Join(lw.tags, "/"), + Text: fmt.Sprintf(format, args...), + } + + lw.rd.Warnings = append(lw.rd.Warnings, w) +} + +func (lw LogWrapper) Infof(format string, args ...interface{}) { + lw.origLog.Infof(format, args...) +} + +func (lw LogWrapper) Child(name string) logutils.Log { + c := lw + c.origLog = lw.origLog.Child(name) + c.tags = append([]string{}, lw.tags...) + c.tags = append(c.tags, name) + return c +} + +func (lw LogWrapper) SetLevel(level logutils.LogLevel) { + lw.origLog.SetLevel(level) +} + +func (lw LogWrapper) GoString() string { + return fmt.Sprintf("lw: %+v, orig log: %#v", lw, lw.origLog) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go b/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go new file mode 100644 index 000000000..707a2b17c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/issue.go @@ -0,0 +1,98 @@ +package result + +import ( + "crypto/md5" //nolint:gosec + "fmt" + "go/token" + + "golang.org/x/tools/go/packages" +) + +type Range struct { + From, To int +} + +type Replacement struct { + NeedOnlyDelete bool // need to delete all lines of the issue without replacement with new lines + NewLines []string // if NeedDelete is false it's the replacement lines + Inline *InlineFix +} + +type InlineFix struct { + StartCol int // zero-based + Length int // length of chunk to be replaced + NewString string +} + +type Issue struct { + FromLinter string + Text string + + Severity string + + // Source lines of a code with the issue to show + SourceLines []string + + // If we know how to fix the issue we can provide replacement lines + Replacement *Replacement + + // Pkg is needed for proper caching of linting results + Pkg *packages.Package `json:"-"` + + LineRange *Range `json:",omitempty"` + + Pos token.Position + + // HunkPos is used only when golangci-lint is run over a diff + HunkPos int `json:",omitempty"` + + // If we are expecting a nolint (because this is from nolintlint), record the expected linter + ExpectNoLint bool + ExpectedNoLintLinter string +} + +func (i *Issue) FilePath() string { + return i.Pos.Filename +} + +func (i *Issue) Line() int { + return i.Pos.Line +} + +func (i *Issue) Column() int { + return i.Pos.Column +} + +func (i *Issue) GetLineRange() Range { + if i.LineRange == nil { + return Range{ + From: i.Line(), + To: i.Line(), + } + } + + if i.LineRange.From == 0 { + return Range{ + From: i.Line(), + To: i.Line(), + } + } + + return *i.LineRange +} + +func (i *Issue) Description() string { + return fmt.Sprintf("%s: %s", i.FromLinter, i.Text) +} + +func (i *Issue) Fingerprint() string { + firstLine := "" + if len(i.SourceLines) > 0 { + firstLine = i.SourceLines[0] + } + + hash := md5.New() //nolint:gosec + _, _ = hash.Write([]byte(fmt.Sprintf("%s%s%s", i.Pos.Filename, i.Text, firstLine))) + + return fmt.Sprintf("%X", hash.Sum(nil)) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go new file mode 100644 index 000000000..57388f64f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/autogenerated_exclude.go @@ -0,0 +1,134 @@ +package processors + +import ( + "fmt" + "go/parser" + "go/token" + "path/filepath" + "strings" + + "github.com/pkg/errors" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +var autogenDebugf = logutils.Debug("autogen_exclude") + +type ageFileSummary struct { + isGenerated bool +} + +type ageFileSummaryCache map[string]*ageFileSummary + +type AutogeneratedExclude struct { + fileSummaryCache ageFileSummaryCache +} + +func NewAutogeneratedExclude() *AutogeneratedExclude { + return &AutogeneratedExclude{ + fileSummaryCache: ageFileSummaryCache{}, + } +} + +var _ Processor = &AutogeneratedExclude{} + +func (p AutogeneratedExclude) Name() string { + return "autogenerated_exclude" +} + +func (p *AutogeneratedExclude) Process(issues []result.Issue) ([]result.Issue, error) { + return filterIssuesErr(issues, p.shouldPassIssue) +} + +func isSpecialAutogeneratedFile(filePath string) bool { + fileName := filepath.Base(filePath) + // fake files or generation definitions to which //line points to for generated files + return filepath.Ext(fileName) != ".go" +} + +func (p *AutogeneratedExclude) shouldPassIssue(i *result.Issue) (bool, error) { + if i.FromLinter == "typecheck" { + // don't hide typechecking errors in generated files: users expect to see why the project isn't compiling + return true, nil + } + + if filepath.Base(i.FilePath()) == "go.mod" { + return true, nil + } + + if isSpecialAutogeneratedFile(i.FilePath()) { + return false, nil + } + + fs, err := p.getOrCreateFileSummary(i) + if err != nil { + return false, err + } + + // don't report issues for autogenerated files + return !fs.isGenerated, nil +} + +// isGenerated reports whether the source file is generated code. +// Using a bit laxer rules than https://golang.org/s/generatedcode to +// match more generated code. See #48 and #72. +func isGeneratedFileByComment(doc string) bool { + const ( + genCodeGenerated = "code generated" + genDoNotEdit = "do not edit" + genAutoFile = "autogenerated file" // easyjson + ) + + markers := []string{genCodeGenerated, genDoNotEdit, genAutoFile} + doc = strings.ToLower(doc) + for _, marker := range markers { + if strings.Contains(doc, marker) { + autogenDebugf("doc contains marker %q: file is generated", marker) + return true + } + } + + autogenDebugf("doc of len %d doesn't contain any of markers: %s", len(doc), markers) + return false +} + +func (p *AutogeneratedExclude) getOrCreateFileSummary(i *result.Issue) (*ageFileSummary, error) { + fs := p.fileSummaryCache[i.FilePath()] + if fs != nil { + return fs, nil + } + + fs = &ageFileSummary{} + p.fileSummaryCache[i.FilePath()] = fs + + if i.FilePath() == "" { + return nil, fmt.Errorf("no file path for issue") + } + + doc, err := getDoc(i.FilePath()) + if err != nil { + return nil, errors.Wrapf(err, "failed to get doc of file %s", i.FilePath()) + } + + fs.isGenerated = isGeneratedFileByComment(doc) + autogenDebugf("file %q is generated: %t", i.FilePath(), fs.isGenerated) + return fs, nil +} + +func getDoc(filePath string) (string, error) { + fset := token.NewFileSet() + syntax, err := parser.ParseFile(fset, filePath, nil, parser.PackageClauseOnly|parser.ParseComments) + if err != nil { + return "", errors.Wrap(err, "failed to parse file") + } + + var docLines []string + for _, c := range syntax.Comments { + docLines = append(docLines, strings.TrimSpace(c.Text())) + } + + return strings.Join(docLines, "\n"), nil +} + +func (p AutogeneratedExclude) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go new file mode 100644 index 000000000..b6ce4f215 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/base_rule.go @@ -0,0 +1,69 @@ +package processors + +import ( + "regexp" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type BaseRule struct { + Text string + Source string + Path string + Linters []string +} + +type baseRule struct { + text *regexp.Regexp + source *regexp.Regexp + path *regexp.Regexp + linters []string +} + +func (r *baseRule) isEmpty() bool { + return r.text == nil && r.source == nil && r.path == nil && len(r.linters) == 0 +} + +func (r *baseRule) match(issue *result.Issue, lineCache *fsutils.LineCache, log logutils.Log) bool { + if r.isEmpty() { + return false + } + if r.text != nil && !r.text.MatchString(issue.Text) { + return false + } + if r.path != nil && !r.path.MatchString(issue.FilePath()) { + return false + } + if len(r.linters) != 0 && !r.matchLinter(issue) { + return false + } + + // the most heavyweight checking last + if r.source != nil && !r.matchSource(issue, lineCache, log) { + return false + } + + return true +} + +func (r *baseRule) matchLinter(issue *result.Issue) bool { + for _, linter := range r.linters { + if linter == issue.FromLinter { + return true + } + } + + return false +} + +func (r *baseRule) matchSource(issue *result.Issue, lineCache *fsutils.LineCache, log logutils.Log) bool { // nolint:interfacer + sourceLine, errSourceLine := lineCache.GetLine(issue.FilePath(), issue.Line()) + if errSourceLine != nil { + log.Warnf("Failed to get line %s:%d from line cache: %s", issue.FilePath(), issue.Line(), errSourceLine) + return false // can't properly match + } + + return r.source.MatchString(sourceLine) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go new file mode 100644 index 000000000..c8793871a --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/cgo.go @@ -0,0 +1,58 @@ +package processors + +import ( + "path/filepath" + "strings" + + "github.com/pkg/errors" + + "github.com/golangci/golangci-lint/pkg/goutil" + "github.com/golangci/golangci-lint/pkg/result" +) + +type Cgo struct { + goCacheDir string +} + +var _ Processor = Cgo{} + +func NewCgo(goenv *goutil.Env) *Cgo { + return &Cgo{ + goCacheDir: goenv.Get(goutil.EnvGoCache), + } +} + +func (p Cgo) Name() string { + return "cgo" +} + +func (p Cgo) Process(issues []result.Issue) ([]result.Issue, error) { + return filterIssuesErr(issues, func(i *result.Issue) (bool, error) { + // some linters (.e.g gosec, deadcode) return incorrect filepaths for cgo issues, + // also cgo files have strange issues looking like false positives. + + // cache dir contains all preprocessed files including cgo files + + issueFilePath := i.FilePath() + if !filepath.IsAbs(i.FilePath()) { + absPath, err := filepath.Abs(i.FilePath()) + if err != nil { + return false, errors.Wrapf(err, "failed to build abs path for %q", i.FilePath()) + } + issueFilePath = absPath + } + + if p.goCacheDir != "" && strings.HasPrefix(issueFilePath, p.goCacheDir) { + return false, nil + } + + if filepath.Base(i.FilePath()) == "_cgo_gotypes.go" { + // skip cgo warning for go1.10 + return false, nil + } + + return true, nil + }) +} + +func (Cgo) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go new file mode 100644 index 000000000..fc4aba4b9 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/diff.go @@ -0,0 +1,74 @@ +package processors + +import ( + "bytes" + "fmt" + "io" + "io/ioutil" + "os" + "strings" + + "github.com/golangci/revgrep" + + "github.com/golangci/golangci-lint/pkg/result" +) + +type Diff struct { + onlyNew bool + fromRev string + patchFilePath string + patch string +} + +var _ Processor = Diff{} + +func NewDiff(onlyNew bool, fromRev, patchFilePath string) *Diff { + return &Diff{ + onlyNew: onlyNew, + fromRev: fromRev, + patchFilePath: patchFilePath, + patch: os.Getenv("GOLANGCI_DIFF_PROCESSOR_PATCH"), + } +} + +func (p Diff) Name() string { + return "diff" +} + +func (p Diff) Process(issues []result.Issue) ([]result.Issue, error) { + if !p.onlyNew && p.fromRev == "" && p.patchFilePath == "" && p.patch == "" { // no need to work + return issues, nil + } + + var patchReader io.Reader + if p.patchFilePath != "" { + patch, err := ioutil.ReadFile(p.patchFilePath) + if err != nil { + return nil, fmt.Errorf("can't read from patch file %s: %s", p.patchFilePath, err) + } + patchReader = bytes.NewReader(patch) + } else if p.patch != "" { + patchReader = strings.NewReader(p.patch) + } + + c := revgrep.Checker{ + Patch: patchReader, + RevisionFrom: p.fromRev, + } + if err := c.Prepare(); err != nil { + return nil, fmt.Errorf("can't prepare diff by revgrep: %s", err) + } + + return transformIssues(issues, func(i *result.Issue) *result.Issue { + hunkPos, isNew := c.IsNewIssue(i) + if !isNew { + return nil + } + + newI := *i + newI.HunkPos = hunkPos + return &newI + }), nil +} + +func (Diff) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go new file mode 100644 index 000000000..92959a328 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude.go @@ -0,0 +1,59 @@ +package processors + +import ( + "regexp" + + "github.com/golangci/golangci-lint/pkg/result" +) + +type Exclude struct { + pattern *regexp.Regexp +} + +var _ Processor = Exclude{} + +func NewExclude(pattern string) *Exclude { + var patternRe *regexp.Regexp + if pattern != "" { + patternRe = regexp.MustCompile("(?i)" + pattern) + } + return &Exclude{ + pattern: patternRe, + } +} + +func (p Exclude) Name() string { + return "exclude" +} + +func (p Exclude) Process(issues []result.Issue) ([]result.Issue, error) { + if p.pattern == nil { + return issues, nil + } + + return filterIssues(issues, func(i *result.Issue) bool { + return !p.pattern.MatchString(i.Text) + }), nil +} + +func (p Exclude) Finish() {} + +type ExcludeCaseSensitive struct { + *Exclude +} + +var _ Processor = ExcludeCaseSensitive{} + +func NewExcludeCaseSensitive(pattern string) *ExcludeCaseSensitive { + var patternRe *regexp.Regexp + if pattern != "" { + patternRe = regexp.MustCompile(pattern) + } + return &ExcludeCaseSensitive{ + &Exclude{pattern: patternRe}, + } +} + +func (p ExcludeCaseSensitive) Name() string { + return "exclude-case-sensitive" +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go new file mode 100644 index 000000000..d4d6569f4 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/exclude_rules.go @@ -0,0 +1,90 @@ +package processors + +import ( + "regexp" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type excludeRule struct { + baseRule +} + +type ExcludeRule struct { + BaseRule +} + +type ExcludeRules struct { + rules []excludeRule + lineCache *fsutils.LineCache + log logutils.Log +} + +func NewExcludeRules(rules []ExcludeRule, lineCache *fsutils.LineCache, log logutils.Log) *ExcludeRules { + r := &ExcludeRules{ + lineCache: lineCache, + log: log, + } + r.rules = createRules(rules, "(?i)") + + return r +} + +func createRules(rules []ExcludeRule, prefix string) []excludeRule { + parsedRules := make([]excludeRule, 0, len(rules)) + for _, rule := range rules { + parsedRule := excludeRule{} + parsedRule.linters = rule.Linters + if rule.Text != "" { + parsedRule.text = regexp.MustCompile(prefix + rule.Text) + } + if rule.Source != "" { + parsedRule.source = regexp.MustCompile(prefix + rule.Source) + } + if rule.Path != "" { + parsedRule.path = regexp.MustCompile(rule.Path) + } + parsedRules = append(parsedRules, parsedRule) + } + return parsedRules +} + +func (p ExcludeRules) Process(issues []result.Issue) ([]result.Issue, error) { + if len(p.rules) == 0 { + return issues, nil + } + return filterIssues(issues, func(i *result.Issue) bool { + for _, rule := range p.rules { + rule := rule + if rule.match(i, p.lineCache, p.log) { + return false + } + } + return true + }), nil +} + +func (ExcludeRules) Name() string { return "exclude-rules" } +func (ExcludeRules) Finish() {} + +var _ Processor = ExcludeRules{} + +type ExcludeRulesCaseSensitive struct { + *ExcludeRules +} + +func NewExcludeRulesCaseSensitive(rules []ExcludeRule, lineCache *fsutils.LineCache, log logutils.Log) *ExcludeRulesCaseSensitive { + r := &ExcludeRules{ + lineCache: lineCache, + log: log, + } + r.rules = createRules(rules, "") + + return &ExcludeRulesCaseSensitive{r} +} + +func (ExcludeRulesCaseSensitive) Name() string { return "exclude-rules-case-sensitive" } + +var _ Processor = ExcludeCaseSensitive{} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go new file mode 100644 index 000000000..96540245b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/filename_unadjuster.go @@ -0,0 +1,131 @@ +package processors + +import ( + "go/parser" + "go/token" + "path/filepath" + "strings" + "sync" + "time" + + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type posMapper func(pos token.Position) token.Position + +type adjustMap struct { + sync.Mutex + m map[string]posMapper +} + +// FilenameUnadjuster is needed because a lot of linters use fset.Position(f.Pos()) +// to get filename. And they return adjusted filename (e.g. *.qtpl) for an issue. We need +// restore real .go filename to properly output it, parse it, etc. +type FilenameUnadjuster struct { + m map[string]posMapper // map from adjusted filename to position mapper: adjusted -> unadjusted position + log logutils.Log + loggedUnadjustments map[string]bool +} + +var _ Processor = &FilenameUnadjuster{} + +func processUnadjusterPkg(m *adjustMap, pkg *packages.Package, log logutils.Log) { + fset := token.NewFileSet() // it's more memory efficient to not store all in one fset + + for _, filename := range pkg.CompiledGoFiles { + // It's important to call func here to run GC + processUnadjusterFile(filename, m, log, fset) + } +} + +func processUnadjusterFile(filename string, m *adjustMap, log logutils.Log, fset *token.FileSet) { + syntax, err := parser.ParseFile(fset, filename, nil, parser.ParseComments) + if err != nil { + // Error will be reported by typecheck + return + } + + adjustedFilename := fset.PositionFor(syntax.Pos(), true).Filename + if adjustedFilename == "" { + return + } + + unadjustedFilename := fset.PositionFor(syntax.Pos(), false).Filename + if unadjustedFilename == "" || unadjustedFilename == adjustedFilename { + return + } + + if !strings.HasSuffix(unadjustedFilename, ".go") { + return // file.go -> /caches/cgo-xxx + } + + m.Lock() + defer m.Unlock() + m.m[adjustedFilename] = func(adjustedPos token.Position) token.Position { + tokenFile := fset.File(syntax.Pos()) + if tokenFile == nil { + log.Warnf("Failed to get token file for %s", adjustedFilename) + return adjustedPos + } + return fset.PositionFor(tokenFile.Pos(adjustedPos.Offset), false) + } +} + +func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *FilenameUnadjuster { + m := adjustMap{m: map[string]posMapper{}} + + startedAt := time.Now() + var wg sync.WaitGroup + wg.Add(len(pkgs)) + for _, pkg := range pkgs { + go func(pkg *packages.Package) { + // It's important to call func here to run GC + processUnadjusterPkg(&m, pkg, log) + wg.Done() + }(pkg) + } + wg.Wait() + log.Infof("Pre-built %d adjustments in %s", len(m.m), time.Since(startedAt)) + + return &FilenameUnadjuster{ + m: m.m, + log: log, + loggedUnadjustments: map[string]bool{}, + } +} + +func (p FilenameUnadjuster) Name() string { + return "filename_unadjuster" +} + +func (p *FilenameUnadjuster) Process(issues []result.Issue) ([]result.Issue, error) { + return transformIssues(issues, func(i *result.Issue) *result.Issue { + issueFilePath := i.FilePath() + if !filepath.IsAbs(i.FilePath()) { + absPath, err := filepath.Abs(i.FilePath()) + if err != nil { + p.log.Warnf("failed to build abs path for %q: %s", i.FilePath(), err) + return i + } + issueFilePath = absPath + } + + mapper := p.m[issueFilePath] + if mapper == nil { + return i + } + + newI := *i + newI.Pos = mapper(i.Pos) + if !p.loggedUnadjustments[i.Pos.Filename] { + p.log.Infof("Unadjusted from %v to %v", i.Pos, newI.Pos) + p.loggedUnadjustments[i.Pos.Filename] = true + } + return &newI + }), nil +} + +func (FilenameUnadjuster) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go new file mode 100644 index 000000000..17f519e32 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go @@ -0,0 +1,248 @@ +package processors + +import ( + "bytes" + "fmt" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/pkg/errors" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" + "github.com/golangci/golangci-lint/pkg/timeutils" +) + +type Fixer struct { + cfg *config.Config + log logutils.Log + fileCache *fsutils.FileCache + sw *timeutils.Stopwatch +} + +func NewFixer(cfg *config.Config, log logutils.Log, fileCache *fsutils.FileCache) *Fixer { + return &Fixer{ + cfg: cfg, + log: log, + fileCache: fileCache, + sw: timeutils.NewStopwatch("fixer", log), + } +} + +func (f Fixer) printStat() { + f.sw.PrintStages() +} + +func (f Fixer) Process(issues []result.Issue) []result.Issue { + if !f.cfg.Issues.NeedFix { + return issues + } + + outIssues := make([]result.Issue, 0, len(issues)) + issuesToFixPerFile := map[string][]result.Issue{} + for i := range issues { + issue := &issues[i] + if issue.Replacement == nil { + outIssues = append(outIssues, *issue) + continue + } + + issuesToFixPerFile[issue.FilePath()] = append(issuesToFixPerFile[issue.FilePath()], *issue) + } + + for file, issuesToFix := range issuesToFixPerFile { + var err error + f.sw.TrackStage("all", func() { + err = f.fixIssuesInFile(file, issuesToFix) + }) + if err != nil { + f.log.Errorf("Failed to fix issues in file %s: %s", file, err) + + // show issues only if can't fix them + outIssues = append(outIssues, issuesToFix...) + } + } + + f.printStat() + return outIssues +} + +func (f Fixer) fixIssuesInFile(filePath string, issues []result.Issue) error { + // TODO: don't read the whole file into memory: read line by line; + // can't just use bufio.scanner: it has a line length limit + origFileData, err := f.fileCache.GetFileBytes(filePath) + if err != nil { + return errors.Wrapf(err, "failed to get file bytes for %s", filePath) + } + origFileLines := bytes.Split(origFileData, []byte("\n")) + + tmpFileName := filepath.Join(filepath.Dir(filePath), fmt.Sprintf(".%s.golangci_fix", filepath.Base(filePath))) + tmpOutFile, err := os.Create(tmpFileName) + if err != nil { + return errors.Wrapf(err, "failed to make file %s", tmpFileName) + } + + // merge multiple issues per line into one issue + issuesPerLine := map[int][]result.Issue{} + for i := range issues { + issue := &issues[i] + issuesPerLine[issue.Line()] = append(issuesPerLine[issue.Line()], *issue) + } + + issues = issues[:0] // reuse the same memory + for line, lineIssues := range issuesPerLine { + if mergedIssue := f.mergeLineIssues(line, lineIssues, origFileLines); mergedIssue != nil { + issues = append(issues, *mergedIssue) + } + } + + issues = f.findNotIntersectingIssues(issues) + + if err = f.writeFixedFile(origFileLines, issues, tmpOutFile); err != nil { + tmpOutFile.Close() + os.Remove(tmpOutFile.Name()) + return err + } + + tmpOutFile.Close() + if err = os.Rename(tmpOutFile.Name(), filePath); err != nil { + os.Remove(tmpOutFile.Name()) + return errors.Wrapf(err, "failed to rename %s -> %s", tmpOutFile.Name(), filePath) + } + + return nil +} + +func (f Fixer) mergeLineIssues(lineNum int, lineIssues []result.Issue, origFileLines [][]byte) *result.Issue { + origLine := origFileLines[lineNum-1] // lineNum is 1-based + + if len(lineIssues) == 1 && lineIssues[0].Replacement.Inline == nil { + return &lineIssues[0] + } + + // check issues first + for ind := range lineIssues { + i := &lineIssues[ind] + if i.LineRange != nil { + f.log.Infof("Line %d has multiple issues but at least one of them is ranged: %#v", lineNum, lineIssues) + return &lineIssues[0] + } + + r := i.Replacement + if r.Inline == nil || len(r.NewLines) != 0 || r.NeedOnlyDelete { + f.log.Infof("Line %d has multiple issues but at least one of them isn't inline: %#v", lineNum, lineIssues) + return &lineIssues[0] + } + + if r.Inline.StartCol < 0 || r.Inline.Length <= 0 || r.Inline.StartCol+r.Inline.Length > len(origLine) { + f.log.Warnf("Line %d (%q) has invalid inline fix: %#v, %#v", lineNum, origLine, i, r.Inline) + return nil + } + } + + return f.applyInlineFixes(lineIssues, origLine, lineNum) +} + +func (f Fixer) applyInlineFixes(lineIssues []result.Issue, origLine []byte, lineNum int) *result.Issue { + sort.Slice(lineIssues, func(i, j int) bool { + return lineIssues[i].Replacement.Inline.StartCol < lineIssues[j].Replacement.Inline.StartCol + }) + + var newLineBuf bytes.Buffer + newLineBuf.Grow(len(origLine)) + + //nolint:misspell + // example: origLine="it's becouse of them", StartCol=5, Length=7, NewString="because" + + curOrigLinePos := 0 + for i := range lineIssues { + fix := lineIssues[i].Replacement.Inline + if fix.StartCol < curOrigLinePos { + f.log.Warnf("Line %d has multiple intersecting issues: %#v", lineNum, lineIssues) + return nil + } + + if curOrigLinePos != fix.StartCol { + newLineBuf.Write(origLine[curOrigLinePos:fix.StartCol]) + } + newLineBuf.WriteString(fix.NewString) + curOrigLinePos = fix.StartCol + fix.Length + } + if curOrigLinePos != len(origLine) { + newLineBuf.Write(origLine[curOrigLinePos:]) + } + + mergedIssue := lineIssues[0] // use text from the first issue (it's not really used) + mergedIssue.Replacement = &result.Replacement{ + NewLines: []string{newLineBuf.String()}, + } + return &mergedIssue +} + +func (f Fixer) findNotIntersectingIssues(issues []result.Issue) []result.Issue { + sort.SliceStable(issues, func(i, j int) bool { + a, b := issues[i], issues[j] + return a.Line() < b.Line() + }) + + var ret []result.Issue + var currentEnd int + for i := range issues { + issue := &issues[i] + rng := issue.GetLineRange() + if rng.From <= currentEnd { + f.log.Infof("Skip issue %#v: intersects with end %d", issue, currentEnd) + continue // skip intersecting issue + } + f.log.Infof("Fix issue %#v with range %v", issue, issue.GetLineRange()) + ret = append(ret, *issue) + currentEnd = rng.To + } + + return ret +} + +func (f Fixer) writeFixedFile(origFileLines [][]byte, issues []result.Issue, tmpOutFile *os.File) error { + // issues aren't intersecting + + nextIssueIndex := 0 + for i := 0; i < len(origFileLines); i++ { + var outLine string + var nextIssue *result.Issue + if nextIssueIndex != len(issues) { + nextIssue = &issues[nextIssueIndex] + } + + origFileLineNumber := i + 1 + if nextIssue == nil || origFileLineNumber != nextIssue.GetLineRange().From { + outLine = string(origFileLines[i]) + } else { + nextIssueIndex++ + rng := nextIssue.GetLineRange() + if rng.From > rng.To { + // Maybe better decision is to skip such issues, re-evaluate if regressed. + f.log.Warnf("[fixer]: issue line range is probably invalid, fix can be incorrect (from=%d, to=%d, linter=%s)", + rng.From, rng.To, nextIssue.FromLinter, + ) + } + i += rng.To - rng.From + if nextIssue.Replacement.NeedOnlyDelete { + continue + } + outLine = strings.Join(nextIssue.Replacement.NewLines, "\n") + } + + if i < len(origFileLines)-1 { + outLine += "\n" + } + if _, err := tmpOutFile.WriteString(outLine); err != nil { + return errors.Wrap(err, "failed to write output line") + } + } + + return nil +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go new file mode 100644 index 000000000..5cc4e56ba --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/identifier_marker.go @@ -0,0 +1,125 @@ +package processors + +import ( + "regexp" + + "github.com/golangci/golangci-lint/pkg/result" +) + +type replacePattern struct { + re string + repl string +} + +type replaceRegexp struct { + re *regexp.Regexp + repl string +} + +var replacePatterns = []replacePattern{ + // unparam + {`^(\S+) - (\S+) is unused$`, "`${1}` - `${2}` is unused"}, + {`^(\S+) - (\S+) always receives (\S+) \((.*)\)$`, "`${1}` - `${2}` always receives `${3}` (`${4}`)"}, + {`^(\S+) - (\S+) always receives (.*)$`, "`${1}` - `${2}` always receives `${3}`"}, + {`^(\S+) - result (\S+) is always (\S+)`, "`${1}` - result `${2}` is always `${3}`"}, + + // interfacer + {`^(\S+) can be (\S+)$`, "`${1}` can be `${2}`"}, + + // govet + {`^printf: (\S+) arg list ends with redundant newline$`, "printf: `${1}` arg list ends with redundant newline"}, + {`^composites: (\S+) composite literal uses unkeyed fields$`, "composites: `${1}` composite literal uses unkeyed fields"}, + + // gosec + {`^(\S+): Blacklisted import (\S+): weak cryptographic primitive$`, + "${1}: Blacklisted import `${2}`: weak cryptographic primitive"}, + {`^TLS InsecureSkipVerify set true.$`, "TLS `InsecureSkipVerify` set true."}, + + // gosimple + {`should replace loop with (.*)$`, "should replace loop with `${1}`"}, + {`should use a simple channel send/receive instead of select with a single case`, + "should use a simple channel send/receive instead of `select` with a single case"}, + {`should omit comparison to bool constant, can be simplified to (.+)$`, + "should omit comparison to bool constant, can be simplified to `${1}`"}, + {`should write (.+) instead of (.+)$`, "should write `${1}` instead of `${2}`"}, + {`redundant return statement$`, "redundant `return` statement"}, + {`should replace this if statement with an unconditional strings.TrimPrefix`, + "should replace this `if` statement with an unconditional `strings.TrimPrefix`"}, + + // staticcheck + {`this value of (\S+) is never used$`, "this value of `${1}` is never used"}, + {`should use time.Since instead of time.Now\(\).Sub$`, + "should use `time.Since` instead of `time.Now().Sub`"}, + {`should check returned error before deferring response.Close\(\)$`, + "should check returned error before deferring `response.Close()`"}, + {`no value of type uint is less than 0$`, "no value of type `uint` is less than `0`"}, + + // unused + {`(func|const|field|type|var) (\S+) is unused$`, "${1} `${2}` is unused"}, + + // typecheck + {`^unknown field (\S+) in struct literal$`, "unknown field `${1}` in struct literal"}, + {`^invalid operation: (\S+) \(variable of type (\S+)\) has no field or method (\S+)$`, + "invalid operation: `${1}` (variable of type `${2}`) has no field or method `${3}`"}, + {`^undeclared name: (\S+)$`, "undeclared name: `${1}`"}, + {`^cannot use addr \(variable of type (\S+)\) as (\S+) value in argument to (\S+)$`, + "cannot use addr (variable of type `${1}`) as `${2}` value in argument to `${3}`"}, + {`^other declaration of (\S+)$`, "other declaration of `${1}`"}, + {`^(\S+) redeclared in this block$`, "`${1}` redeclared in this block"}, + + // golint + {`^exported (type|method|function|var|const) (\S+) should have comment or be unexported$`, + "exported ${1} `${2}` should have comment or be unexported"}, + {`^comment on exported (type|method|function|var|const) (\S+) should be of the form "(\S+) ..."$`, + "comment on exported ${1} `${2}` should be of the form `${3} ...`"}, + {`^should replace (.+) with (.+)$`, "should replace `${1}` with `${2}`"}, + {`^if block ends with a return statement, so drop this else and outdent its block$`, + "`if` block ends with a `return` statement, so drop this `else` and outdent its block"}, + {`^(struct field|var|range var|const|type|(?:func|method|interface method) (?:parameter|result)) (\S+) should be (\S+)$`, + "${1} `${2}` should be `${3}`"}, + {`^don't use underscores in Go names; var (\S+) should be (\S+)$`, + "don't use underscores in Go names; var `${1}` should be `${2}`"}, +} + +type IdentifierMarker struct { + replaceRegexps []replaceRegexp +} + +func NewIdentifierMarker() *IdentifierMarker { + var replaceRegexps []replaceRegexp + for _, p := range replacePatterns { + r := replaceRegexp{ + re: regexp.MustCompile(p.re), + repl: p.repl, + } + replaceRegexps = append(replaceRegexps, r) + } + + return &IdentifierMarker{ + replaceRegexps: replaceRegexps, + } +} + +func (im IdentifierMarker) Process(issues []result.Issue) ([]result.Issue, error) { + return transformIssues(issues, func(i *result.Issue) *result.Issue { + iCopy := *i + iCopy.Text = im.markIdentifiers(iCopy.Text) + return &iCopy + }), nil +} + +func (im IdentifierMarker) markIdentifiers(s string) string { + for _, rr := range im.replaceRegexps { + rs := rr.re.ReplaceAllString(s, rr.repl) + if rs != s { + return rs + } + } + + return s +} + +func (im IdentifierMarker) Name() string { + return "identifier_marker" +} +func (im IdentifierMarker) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go new file mode 100644 index 000000000..c58666c56 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_from_linter.go @@ -0,0 +1,54 @@ +package processors + +import ( + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type MaxFromLinter struct { + lc linterToCountMap + limit int + log logutils.Log + cfg *config.Config +} + +var _ Processor = &MaxFromLinter{} + +func NewMaxFromLinter(limit int, log logutils.Log, cfg *config.Config) *MaxFromLinter { + return &MaxFromLinter{ + lc: linterToCountMap{}, + limit: limit, + log: log, + cfg: cfg, + } +} + +func (p MaxFromLinter) Name() string { + return "max_from_linter" +} + +func (p *MaxFromLinter) Process(issues []result.Issue) ([]result.Issue, error) { + if p.limit <= 0 { // no limit + return issues, nil + } + + return filterIssues(issues, func(i *result.Issue) bool { + if i.Replacement != nil && p.cfg.Issues.NeedFix { + // we need to fix all issues at once => we need to return all of them + return true + } + + p.lc[i.FromLinter]++ // always inc for stat + return p.lc[i.FromLinter] <= p.limit + }), nil +} + +func (p MaxFromLinter) Finish() { + walkStringToIntMapSortedByValue(p.lc, func(linter string, count int) { + if count > p.limit { + p.log.Infof("%d/%d issues from linter %s were hidden, use --max-issues-per-linter", + count-p.limit, count, linter) + } + }) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go new file mode 100644 index 000000000..e36446c9f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_per_file_from_linter.go @@ -0,0 +1,59 @@ +package processors + +import ( + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/result" +) + +type linterToCountMap map[string]int +type fileToLinterToCountMap map[string]linterToCountMap + +type MaxPerFileFromLinter struct { + flc fileToLinterToCountMap + maxPerFileFromLinterConfig map[string]int +} + +var _ Processor = &MaxPerFileFromLinter{} + +func NewMaxPerFileFromLinter(cfg *config.Config) *MaxPerFileFromLinter { + maxPerFileFromLinterConfig := map[string]int{} + + if !cfg.Issues.NeedFix { + // if we don't fix we do this limiting to not annoy user; + // otherwise we need to fix all issues in the file at once + maxPerFileFromLinterConfig["gofmt"] = 1 + maxPerFileFromLinterConfig["goimports"] = 1 + } + + return &MaxPerFileFromLinter{ + flc: fileToLinterToCountMap{}, + maxPerFileFromLinterConfig: maxPerFileFromLinterConfig, + } +} + +func (p MaxPerFileFromLinter) Name() string { + return "max_per_file_from_linter" +} + +func (p *MaxPerFileFromLinter) Process(issues []result.Issue) ([]result.Issue, error) { + return filterIssues(issues, func(i *result.Issue) bool { + limit := p.maxPerFileFromLinterConfig[i.FromLinter] + if limit == 0 { + return true + } + + lm := p.flc[i.FilePath()] + if lm == nil { + p.flc[i.FilePath()] = linterToCountMap{} + } + count := p.flc[i.FilePath()][i.FromLinter] + if count >= limit { + return false + } + + p.flc[i.FilePath()][i.FromLinter]++ + return true + }), nil +} + +func (p MaxPerFileFromLinter) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go new file mode 100644 index 000000000..84fdf0c05 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/max_same_issues.go @@ -0,0 +1,81 @@ +package processors + +import ( + "sort" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type textToCountMap map[string]int + +type MaxSameIssues struct { + tc textToCountMap + limit int + log logutils.Log + cfg *config.Config +} + +var _ Processor = &MaxSameIssues{} + +func NewMaxSameIssues(limit int, log logutils.Log, cfg *config.Config) *MaxSameIssues { + return &MaxSameIssues{ + tc: textToCountMap{}, + limit: limit, + log: log, + cfg: cfg, + } +} + +func (MaxSameIssues) Name() string { + return "max_same_issues" +} + +func (p *MaxSameIssues) Process(issues []result.Issue) ([]result.Issue, error) { + if p.limit <= 0 { // no limit + return issues, nil + } + + return filterIssues(issues, func(i *result.Issue) bool { + if i.Replacement != nil && p.cfg.Issues.NeedFix { + // we need to fix all issues at once => we need to return all of them + return true + } + + p.tc[i.Text]++ // always inc for stat + return p.tc[i.Text] <= p.limit + }), nil +} + +func (p MaxSameIssues) Finish() { + walkStringToIntMapSortedByValue(p.tc, func(text string, count int) { + if count > p.limit { + p.log.Infof("%d/%d issues with text %q were hidden, use --max-same-issues", + count-p.limit, count, text) + } + }) +} + +type kv struct { + Key string + Value int +} + +func walkStringToIntMapSortedByValue(m map[string]int, walk func(k string, v int)) { + var ss []kv + for k, v := range m { + ss = append(ss, kv{ + Key: k, + Value: v, + }) + } + + sort.Slice(ss, func(i, j int) bool { + return ss[i].Value > ss[j].Value + }) + + for _, kv := range ss { + walk(kv.Key, kv.Value) + } +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go new file mode 100644 index 000000000..0788a7160 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/nolint.go @@ -0,0 +1,309 @@ +package processors + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "regexp" + "sort" + "strings" + + "github.com/golangci/golangci-lint/pkg/golinters" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/lint/lintersdb" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +var nolintDebugf = logutils.Debug("nolint") + +type ignoredRange struct { + linters []string + matchedIssueFromLinter map[string]bool + result.Range + col int + originalRange *ignoredRange // pre-expanded range (used to match nolintlint issues) +} + +func (i *ignoredRange) doesMatch(issue *result.Issue) bool { + if issue.Line() < i.From || issue.Line() > i.To { + return false + } + + // only allow selective nolinting of nolintlint + nolintFoundForLinter := len(i.linters) == 0 && issue.FromLinter != golinters.NolintlintName + + for _, linterName := range i.linters { + if linterName == issue.FromLinter { + nolintFoundForLinter = true + break + } + } + + if nolintFoundForLinter { + return true + } + + // handle possible unused nolint directives + // nolintlint generates potential issues for every nolint directive and they are filtered out here + if issue.FromLinter == golinters.NolintlintName && issue.ExpectNoLint { + if issue.ExpectedNoLintLinter != "" { + return i.matchedIssueFromLinter[issue.ExpectedNoLintLinter] + } + return len(i.matchedIssueFromLinter) > 0 + } + + return false +} + +type fileData struct { + ignoredRanges []ignoredRange +} + +type filesCache map[string]*fileData + +type Nolint struct { + cache filesCache + dbManager *lintersdb.Manager + enabledLinters map[string]*linter.Config + log logutils.Log + + unknownLintersSet map[string]bool +} + +func NewNolint(log logutils.Log, dbManager *lintersdb.Manager, enabledLinters map[string]*linter.Config) *Nolint { + return &Nolint{ + cache: filesCache{}, + dbManager: dbManager, + enabledLinters: enabledLinters, + log: log, + unknownLintersSet: map[string]bool{}, + } +} + +var _ Processor = &Nolint{} + +func (p Nolint) Name() string { + return "nolint" +} + +func (p *Nolint) Process(issues []result.Issue) ([]result.Issue, error) { + // put nolintlint issues last because we process other issues first to determine which nolint directives are unused + sort.Stable(sortWithNolintlintLast(issues)) + return filterIssuesErr(issues, p.shouldPassIssue) +} + +func (p *Nolint) getOrCreateFileData(i *result.Issue) (*fileData, error) { + fd := p.cache[i.FilePath()] + if fd != nil { + return fd, nil + } + + fd = &fileData{} + p.cache[i.FilePath()] = fd + + if i.FilePath() == "" { + return nil, fmt.Errorf("no file path for issue") + } + + // TODO: migrate this parsing to go/analysis facts + // or cache them somehow per file. + + // Don't use cached AST because they consume a lot of memory on large projects. + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, i.FilePath(), nil, parser.ParseComments) + if err != nil { + // Don't report error because it's already must be reporter by typecheck or go/analysis. + return fd, nil + } + + fd.ignoredRanges = p.buildIgnoredRangesForFile(f, fset, i.FilePath()) + nolintDebugf("file %s: built nolint ranges are %+v", i.FilePath(), fd.ignoredRanges) + return fd, nil +} + +func (p *Nolint) buildIgnoredRangesForFile(f *ast.File, fset *token.FileSet, filePath string) []ignoredRange { + inlineRanges := p.extractFileCommentsInlineRanges(fset, f.Comments...) + nolintDebugf("file %s: inline nolint ranges are %+v", filePath, inlineRanges) + + if len(inlineRanges) == 0 { + return nil + } + + e := rangeExpander{ + fset: fset, + inlineRanges: inlineRanges, + } + + ast.Walk(&e, f) + + // TODO: merge all ranges: there are repeated ranges + allRanges := append([]ignoredRange{}, inlineRanges...) + allRanges = append(allRanges, e.expandedRanges...) + + return allRanges +} + +func (p *Nolint) shouldPassIssue(i *result.Issue) (bool, error) { + nolintDebugf("got issue: %v", *i) + if i.FromLinter == golinters.NolintlintName && i.ExpectNoLint && i.ExpectedNoLintLinter != "" { + // don't expect disabled linters to cover their nolint statements + nolintDebugf("enabled linters: %v", p.enabledLinters) + if p.enabledLinters[i.ExpectedNoLintLinter] == nil { + return false, nil + } + nolintDebugf("checking that lint issue was used for %s: %v", i.ExpectedNoLintLinter, i) + } + + fd, err := p.getOrCreateFileData(i) + if err != nil { + return false, err + } + + for _, ir := range fd.ignoredRanges { + if ir.doesMatch(i) { + nolintDebugf("found ignored range for issue %v: %v", i, ir) + ir.matchedIssueFromLinter[i.FromLinter] = true + if ir.originalRange != nil { + ir.originalRange.matchedIssueFromLinter[i.FromLinter] = true + } + return false, nil + } + } + + return true, nil +} + +type rangeExpander struct { + fset *token.FileSet + inlineRanges []ignoredRange + expandedRanges []ignoredRange +} + +func (e *rangeExpander) Visit(node ast.Node) ast.Visitor { + if node == nil { + return e + } + + nodeStartPos := e.fset.Position(node.Pos()) + nodeStartLine := nodeStartPos.Line + nodeEndLine := e.fset.Position(node.End()).Line + + var foundRange *ignoredRange + for _, r := range e.inlineRanges { + if r.To == nodeStartLine-1 && nodeStartPos.Column == r.col { + r := r + foundRange = &r + break + } + } + if foundRange == nil { + return e + } + + expandedRange := *foundRange + // store the original unexpanded range for matching nolintlint issues + if expandedRange.originalRange == nil { + expandedRange.originalRange = foundRange + } + if expandedRange.To < nodeEndLine { + expandedRange.To = nodeEndLine + } + + nolintDebugf("found range is %v for node %#v [%d;%d], expanded range is %v", + *foundRange, node, nodeStartLine, nodeEndLine, expandedRange) + e.expandedRanges = append(e.expandedRanges, expandedRange) + + return e +} + +func (p *Nolint) extractFileCommentsInlineRanges(fset *token.FileSet, comments ...*ast.CommentGroup) []ignoredRange { + var ret []ignoredRange + for _, g := range comments { + for _, c := range g.List { + ir := p.extractInlineRangeFromComment(c.Text, g, fset) + if ir != nil { + ret = append(ret, *ir) + } + } + } + + return ret +} + +func (p *Nolint) extractInlineRangeFromComment(text string, g ast.Node, fset *token.FileSet) *ignoredRange { + text = strings.TrimLeft(text, "/ ") + if ok, _ := regexp.MatchString(`^nolint( |:|$)`, text); !ok { + return nil + } + + buildRange := func(linters []string) *ignoredRange { + pos := fset.Position(g.Pos()) + return &ignoredRange{ + Range: result.Range{ + From: pos.Line, + To: fset.Position(g.End()).Line, + }, + col: pos.Column, + linters: linters, + matchedIssueFromLinter: make(map[string]bool), + } + } + + if !strings.HasPrefix(text, "nolint:") { + return buildRange(nil) // ignore all linters + } + + // ignore specific linters + var linters []string + text = strings.Split(text, "//")[0] // allow another comment after this comment + linterItems := strings.Split(strings.TrimPrefix(text, "nolint:"), ",") + for _, linter := range linterItems { + linterName := strings.ToLower(strings.TrimSpace(linter)) + + lcs := p.dbManager.GetLinterConfigs(linterName) + if lcs == nil { + p.unknownLintersSet[linterName] = true + linters = append(linters, linterName) + nolintDebugf("unknown linter %s on line %d", linterName, fset.Position(g.Pos()).Line) + continue + } + + for _, lc := range lcs { + linters = append(linters, lc.Name()) // normalize name to work with aliases + } + } + + nolintDebugf("%d: linters are %s", fset.Position(g.Pos()).Line, linters) + return buildRange(linters) +} + +func (p Nolint) Finish() { + if len(p.unknownLintersSet) == 0 { + return + } + + unknownLinters := []string{} + for name := range p.unknownLintersSet { + unknownLinters = append(unknownLinters, name) + } + sort.Strings(unknownLinters) + + p.log.Warnf("Found unknown linters in //nolint directives: %s", strings.Join(unknownLinters, ", ")) +} + +// put nolintlint last +type sortWithNolintlintLast []result.Issue + +func (issues sortWithNolintlintLast) Len() int { + return len(issues) +} + +func (issues sortWithNolintlintLast) Less(i, j int) bool { + return issues[i].FromLinter != golinters.NolintlintName && issues[j].FromLinter == golinters.NolintlintName +} + +func (issues sortWithNolintlintLast) Swap(i, j int) { + issues[j], issues[i] = issues[i], issues[j] +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go new file mode 100644 index 000000000..5ce940b39 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prefixer.go @@ -0,0 +1,37 @@ +package processors + +import ( + "path" + + "github.com/golangci/golangci-lint/pkg/result" +) + +// PathPrefixer adds a customizable prefix to every output path +type PathPrefixer struct { + prefix string +} + +var _ Processor = new(PathPrefixer) + +// NewPathPrefixer returns a new path prefixer for the provided string +func NewPathPrefixer(prefix string) *PathPrefixer { + return &PathPrefixer{prefix: prefix} +} + +// Name returns the name of this processor +func (*PathPrefixer) Name() string { + return "path_prefixer" +} + +// Process adds the prefix to each path +func (p *PathPrefixer) Process(issues []result.Issue) ([]result.Issue, error) { + if p.prefix != "" { + for i := range issues { + issues[i].Pos.Filename = path.Join(p.prefix, issues[i].Pos.Filename) + } + } + return issues, nil +} + +// Finish is implemented to satisfy the Processor interface +func (*PathPrefixer) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go new file mode 100644 index 000000000..3a140999c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_prettifier.go @@ -0,0 +1,48 @@ +package processors + +import ( + "fmt" + "path/filepath" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type PathPrettifier struct { + root string +} + +var _ Processor = PathPrettifier{} + +func NewPathPrettifier() *PathPrettifier { + root, err := fsutils.Getwd() + if err != nil { + panic(fmt.Sprintf("Can't get working dir: %s", err)) + } + return &PathPrettifier{ + root: root, + } +} + +func (p PathPrettifier) Name() string { + return "path_prettifier" +} + +func (p PathPrettifier) Process(issues []result.Issue) ([]result.Issue, error) { + return transformIssues(issues, func(i *result.Issue) *result.Issue { + if !filepath.IsAbs(i.FilePath()) { + return i + } + + rel, err := fsutils.ShortestRelPath(i.FilePath(), "") + if err != nil { + return i + } + + newI := i + newI.Pos.Filename = rel + return newI + }), nil +} + +func (p PathPrettifier) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go new file mode 100644 index 000000000..484f7f1f1 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/path_shortener.go @@ -0,0 +1,40 @@ +package processors + +import ( + "fmt" + "strings" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type PathShortener struct { + wd string +} + +var _ Processor = PathShortener{} + +func NewPathShortener() *PathShortener { + wd, err := fsutils.Getwd() + if err != nil { + panic(fmt.Sprintf("Can't get working dir: %s", err)) + } + return &PathShortener{ + wd: wd, + } +} + +func (p PathShortener) Name() string { + return "path_shortener" +} + +func (p PathShortener) Process(issues []result.Issue) ([]result.Issue, error) { + return transformIssues(issues, func(i *result.Issue) *result.Issue { + newI := i + newI.Text = strings.Replace(newI.Text, p.wd+"/", "", -1) + newI.Text = strings.Replace(newI.Text, p.wd, "", -1) + return newI + }), nil +} + +func (p PathShortener) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go new file mode 100644 index 000000000..1a7a40434 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/processor.go @@ -0,0 +1,11 @@ +package processors + +import ( + "github.com/golangci/golangci-lint/pkg/result" +) + +type Processor interface { + Process(issues []result.Issue) ([]result.Issue, error) + Name() string + Finish() +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go new file mode 100644 index 000000000..7c9a4c1d6 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/severity_rules.go @@ -0,0 +1,103 @@ +package processors + +import ( + "regexp" + + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type severityRule struct { + baseRule + severity string +} + +type SeverityRule struct { + BaseRule + Severity string +} + +type SeverityRules struct { + defaultSeverity string + rules []severityRule + lineCache *fsutils.LineCache + log logutils.Log +} + +func NewSeverityRules(defaultSeverity string, rules []SeverityRule, lineCache *fsutils.LineCache, log logutils.Log) *SeverityRules { + r := &SeverityRules{ + lineCache: lineCache, + log: log, + defaultSeverity: defaultSeverity, + } + r.rules = createSeverityRules(rules, "(?i)") + + return r +} + +func createSeverityRules(rules []SeverityRule, prefix string) []severityRule { + parsedRules := make([]severityRule, 0, len(rules)) + for _, rule := range rules { + parsedRule := severityRule{} + parsedRule.linters = rule.Linters + parsedRule.severity = rule.Severity + if rule.Text != "" { + parsedRule.text = regexp.MustCompile(prefix + rule.Text) + } + if rule.Source != "" { + parsedRule.source = regexp.MustCompile(prefix + rule.Source) + } + if rule.Path != "" { + parsedRule.path = regexp.MustCompile(rule.Path) + } + parsedRules = append(parsedRules, parsedRule) + } + return parsedRules +} + +func (p SeverityRules) Process(issues []result.Issue) ([]result.Issue, error) { + if len(p.rules) == 0 && p.defaultSeverity == "" { + return issues, nil + } + return transformIssues(issues, func(i *result.Issue) *result.Issue { + for _, rule := range p.rules { + rule := rule + + ruleSeverity := p.defaultSeverity + if rule.severity != "" { + ruleSeverity = rule.severity + } + + if rule.match(i, p.lineCache, p.log) { + i.Severity = ruleSeverity + return i + } + } + i.Severity = p.defaultSeverity + return i + }), nil +} + +func (SeverityRules) Name() string { return "severity-rules" } +func (SeverityRules) Finish() {} + +var _ Processor = SeverityRules{} + +type SeverityRulesCaseSensitive struct { + *SeverityRules +} + +func NewSeverityRulesCaseSensitive(defaultSeverity string, rules []SeverityRule, + lineCache *fsutils.LineCache, log logutils.Log) *SeverityRulesCaseSensitive { + r := &SeverityRules{ + lineCache: lineCache, + log: log, + defaultSeverity: defaultSeverity, + } + r.rules = createSeverityRules(rules, "") + + return &SeverityRulesCaseSensitive{r} +} + +func (SeverityRulesCaseSensitive) Name() string { return "severity-rules-case-sensitive" } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go new file mode 100644 index 000000000..d657c5a04 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_dirs.go @@ -0,0 +1,143 @@ +package processors + +import ( + "path/filepath" + "regexp" + "strings" + + "github.com/pkg/errors" + + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type skipStat struct { + pattern string + count int +} + +type SkipDirs struct { + patterns []*regexp.Regexp + log logutils.Log + skippedDirs map[string]*skipStat + absArgsDirs []string + skippedDirsCache map[string]bool +} + +var _ Processor = (*SkipDirs)(nil) + +const goFileSuffix = ".go" + +func NewSkipDirs(patterns []string, log logutils.Log, runArgs []string) (*SkipDirs, error) { + var patternsRe []*regexp.Regexp + for _, p := range patterns { + p = normalizePathInRegex(p) + patternRe, err := regexp.Compile(p) + if err != nil { + return nil, errors.Wrapf(err, "can't compile regexp %q", p) + } + patternsRe = append(patternsRe, patternRe) + } + + if len(runArgs) == 0 { + runArgs = append(runArgs, "./...") + } + var absArgsDirs []string + for _, arg := range runArgs { + base := filepath.Base(arg) + if base == "..." || strings.HasSuffix(base, goFileSuffix) { + arg = filepath.Dir(arg) + } + + absArg, err := filepath.Abs(arg) + if err != nil { + return nil, errors.Wrapf(err, "failed to abs-ify arg %q", arg) + } + absArgsDirs = append(absArgsDirs, absArg) + } + + return &SkipDirs{ + patterns: patternsRe, + log: log, + skippedDirs: map[string]*skipStat{}, + absArgsDirs: absArgsDirs, + skippedDirsCache: map[string]bool{}, + }, nil +} + +func (p *SkipDirs) Name() string { + return "skip_dirs" +} + +func (p *SkipDirs) Process(issues []result.Issue) ([]result.Issue, error) { + if len(p.patterns) == 0 { + return issues, nil + } + + return filterIssues(issues, p.shouldPassIssue), nil +} + +func (p *SkipDirs) shouldPassIssue(i *result.Issue) bool { + if filepath.IsAbs(i.FilePath()) { + if !isSpecialAutogeneratedFile(i.FilePath()) { + p.log.Warnf("Got abs path %s in skip dirs processor, it should be relative", i.FilePath()) + } + return true + } + + issueRelDir := filepath.Dir(i.FilePath()) + + if toPass, ok := p.skippedDirsCache[issueRelDir]; ok { + if !toPass { + p.skippedDirs[issueRelDir].count++ + } + return toPass + } + + issueAbsDir, err := filepath.Abs(issueRelDir) + if err != nil { + p.log.Warnf("Can't abs-ify path %q: %s", issueRelDir, err) + return true + } + + toPass := p.shouldPassIssueDirs(issueRelDir, issueAbsDir) + p.skippedDirsCache[issueRelDir] = toPass + return toPass +} + +func (p *SkipDirs) shouldPassIssueDirs(issueRelDir, issueAbsDir string) bool { + for _, absArgDir := range p.absArgsDirs { + if absArgDir == issueAbsDir { + // we must not skip issues if they are from explicitly set dirs + // even if they match skip patterns + return true + } + } + + // We use issueRelDir for matching: it's the relative to the current + // work dir path of directory of source file with the issue. It can lead + // to unexpected behavior if we're analyzing files out of current work dir. + // The alternative solution is to find relative to args path, but it has + // disadvantages (https://github.com/golangci/golangci-lint/pull/313). + + for _, pattern := range p.patterns { + if pattern.MatchString(issueRelDir) { + ps := pattern.String() + if p.skippedDirs[issueRelDir] == nil { + p.skippedDirs[issueRelDir] = &skipStat{ + pattern: ps, + } + } + p.skippedDirs[issueRelDir].count++ + return false + } + } + + return true +} + +func (p *SkipDirs) Finish() { + for dir, stat := range p.skippedDirs { + p.log.Infof("Skipped %d issues from dir %s by pattern %s", stat.count, dir, stat.pattern) + } +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go new file mode 100644 index 000000000..1e2ca7aeb --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/skip_files.go @@ -0,0 +1,52 @@ +package processors + +import ( + "fmt" + "regexp" + + "github.com/golangci/golangci-lint/pkg/result" +) + +type SkipFiles struct { + patterns []*regexp.Regexp +} + +var _ Processor = (*SkipFiles)(nil) + +func NewSkipFiles(patterns []string) (*SkipFiles, error) { + var patternsRe []*regexp.Regexp + for _, p := range patterns { + p = normalizePathInRegex(p) + patternRe, err := regexp.Compile(p) + if err != nil { + return nil, fmt.Errorf("can't compile regexp %q: %s", p, err) + } + patternsRe = append(patternsRe, patternRe) + } + + return &SkipFiles{ + patterns: patternsRe, + }, nil +} + +func (p SkipFiles) Name() string { + return "skip_files" +} + +func (p SkipFiles) Process(issues []result.Issue) ([]result.Issue, error) { + if len(p.patterns) == 0 { + return issues, nil + } + + return filterIssues(issues, func(i *result.Issue) bool { + for _, p := range p.patterns { + if p.MatchString(i.FilePath()) { + return false + } + } + + return true + }), nil +} + +func (p SkipFiles) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go new file mode 100644 index 000000000..e726c3adf --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/sort_results.go @@ -0,0 +1,173 @@ +package processors + +import ( + "sort" + "strings" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/result" +) + +// Base propose of this functionality to sort results (issues) +// produced by various linters by analyzing code. We achieving this +// by sorting results.Issues using processor step, and chain based +// rules that can compare different properties of the Issues struct. + +var _ Processor = (*SortResults)(nil) + +type SortResults struct { + cmp comparator + cfg *config.Config +} + +func NewSortResults(cfg *config.Config) *SortResults { + // For sorting we are comparing (in next order): file names, line numbers, + // position, and finally - giving up. + return &SortResults{ + cmp: ByName{ + next: ByLine{ + next: ByColumn{}, + }, + }, + cfg: cfg, + } +} + +// Process is performing sorting of the result issues. +func (sr SortResults) Process(issues []result.Issue) ([]result.Issue, error) { + if !sr.cfg.Output.SortResults { + return issues, nil + } + + sort.Slice(issues, func(i, j int) bool { + return sr.cmp.Compare(&issues[i], &issues[j]) == Less + }) + + return issues, nil +} + +func (sr SortResults) Name() string { return "sort_results" } +func (sr SortResults) Finish() {} + +type compareResult int + +const ( + Less compareResult = iota - 1 + Equal + Greater + None +) + +func (c compareResult) isNeutral() bool { + // return true if compare result is incomparable or equal. + return c == None || c == Equal +} + +//nolint:exhaustive +func (c compareResult) String() string { + switch c { + case Less: + return "Less" + case Equal: + return "Equal" + case Greater: + return "Greater" + } + + return "None" +} + +// comparator describe how to implement compare for two "issues" lexicographically +type comparator interface { + Compare(a, b *result.Issue) compareResult + Next() comparator +} + +var ( + _ comparator = (*ByName)(nil) + _ comparator = (*ByLine)(nil) + _ comparator = (*ByColumn)(nil) +) + +type ByName struct{ next comparator } + +//nolint:golint +func (cmp ByName) Next() comparator { return cmp.next } + +//nolint:golint +func (cmp ByName) Compare(a, b *result.Issue) compareResult { + var res compareResult + + if res = compareResult(strings.Compare(a.FilePath(), b.FilePath())); !res.isNeutral() { + return res + } + + if next := cmp.Next(); next != nil { + return next.Compare(a, b) + } + + return res +} + +type ByLine struct{ next comparator } + +//nolint:golint +func (cmp ByLine) Next() comparator { return cmp.next } + +//nolint:golint +func (cmp ByLine) Compare(a, b *result.Issue) compareResult { + var res compareResult + + if res = numericCompare(a.Line(), b.Line()); !res.isNeutral() { + return res + } + + if next := cmp.Next(); next != nil { + return next.Compare(a, b) + } + + return res +} + +type ByColumn struct{ next comparator } + +//nolint:golint +func (cmp ByColumn) Next() comparator { return cmp.next } + +//nolint:golint +func (cmp ByColumn) Compare(a, b *result.Issue) compareResult { + var res compareResult + + if res = numericCompare(a.Column(), b.Column()); !res.isNeutral() { + return res + } + + if next := cmp.Next(); next != nil { + return next.Compare(a, b) + } + + return res +} + +func numericCompare(a, b int) compareResult { + var ( + isValuesInvalid = a < 0 || b < 0 + isZeroValuesBoth = a == 0 && b == 0 + isEqual = a == b + isZeroValueInA = b > 0 && a == 0 + isZeroValueInB = a > 0 && b == 0 + ) + + switch { + case isZeroValuesBoth || isEqual: + return Equal + case isValuesInvalid || isZeroValueInA || isZeroValueInB: + return None + case a > b: + return Greater + case a < b: + return Less + } + + return Equal +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go new file mode 100644 index 000000000..cfd73cb98 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/source_code.go @@ -0,0 +1,47 @@ +package processors + +import ( + "github.com/golangci/golangci-lint/pkg/fsutils" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/result" +) + +type SourceCode struct { + lineCache *fsutils.LineCache + log logutils.Log +} + +var _ Processor = SourceCode{} + +func NewSourceCode(lc *fsutils.LineCache, log logutils.Log) *SourceCode { + return &SourceCode{ + lineCache: lc, + log: log, + } +} + +func (p SourceCode) Name() string { + return "source_code" +} + +func (p SourceCode) Process(issues []result.Issue) ([]result.Issue, error) { + return transformIssues(issues, func(i *result.Issue) *result.Issue { + newI := *i + + lineRange := i.GetLineRange() + for lineNumber := lineRange.From; lineNumber <= lineRange.To; lineNumber++ { + line, err := p.lineCache.GetLine(i.FilePath(), lineNumber) + if err != nil { + p.log.Warnf("Failed to get line %d for file %s: %s", + lineNumber, i.FilePath(), err) + return i + } + + newI.SourceLines = append(newI.SourceLines, line) + } + + return &newI + }), nil +} + +func (p SourceCode) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go new file mode 100644 index 000000000..17167dde5 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/uniq_by_line.go @@ -0,0 +1,58 @@ +package processors + +import ( + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/result" +) + +type lineToCount map[int]int +type fileToLineToCount map[string]lineToCount + +type UniqByLine struct { + flc fileToLineToCount + cfg *config.Config +} + +func NewUniqByLine(cfg *config.Config) *UniqByLine { + return &UniqByLine{ + flc: fileToLineToCount{}, + cfg: cfg, + } +} + +var _ Processor = &UniqByLine{} + +func (p UniqByLine) Name() string { + return "uniq_by_line" +} + +func (p *UniqByLine) Process(issues []result.Issue) ([]result.Issue, error) { + if !p.cfg.Output.UniqByLine { + return issues, nil + } + + return filterIssues(issues, func(i *result.Issue) bool { + if i.Replacement != nil && p.cfg.Issues.NeedFix { + // if issue will be auto-fixed we shouldn't collapse issues: + // e.g. one line can contain 2 misspellings, they will be in 2 issues and misspell should fix both of them. + return true + } + + lc := p.flc[i.FilePath()] + if lc == nil { + lc = lineToCount{} + p.flc[i.FilePath()] = lc + } + + const limit = 1 + count := lc[i.Line()] + if count == limit { + return false + } + + lc[i.Line()]++ + return true + }), nil +} + +func (p UniqByLine) Finish() {} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/utils.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/utils.go new file mode 100644 index 000000000..7108fd3b3 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/utils.go @@ -0,0 +1,62 @@ +package processors + +import ( + "path/filepath" + "regexp" + "strings" + + "github.com/pkg/errors" + + "github.com/golangci/golangci-lint/pkg/result" +) + +func filterIssues(issues []result.Issue, filter func(i *result.Issue) bool) []result.Issue { + retIssues := make([]result.Issue, 0, len(issues)) + for i := range issues { + if filter(&issues[i]) { + retIssues = append(retIssues, issues[i]) + } + } + + return retIssues +} + +func filterIssuesErr(issues []result.Issue, filter func(i *result.Issue) (bool, error)) ([]result.Issue, error) { + retIssues := make([]result.Issue, 0, len(issues)) + for i := range issues { + ok, err := filter(&issues[i]) + if err != nil { + return nil, errors.Wrapf(err, "can't filter issue %#v", issues[i]) + } + + if ok { + retIssues = append(retIssues, issues[i]) + } + } + + return retIssues, nil +} + +func transformIssues(issues []result.Issue, transform func(i *result.Issue) *result.Issue) []result.Issue { + retIssues := make([]result.Issue, 0, len(issues)) + for i := range issues { + newI := transform(&issues[i]) + if newI != nil { + retIssues = append(retIssues, *newI) + } + } + + return retIssues +} + +var separatorToReplace = regexp.QuoteMeta(string(filepath.Separator)) + +func normalizePathInRegex(path string) string { + if filepath.Separator == '/' { + return path + } + + // This replacing should be safe because "/" are disallowed in Windows + // https://docs.microsoft.com/ru-ru/windows/win32/fileio/naming-a-file + return strings.ReplaceAll(path, "/", separatorToReplace) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go b/vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go new file mode 100644 index 000000000..cb89e34e0 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/sliceutil/sliceutil.go @@ -0,0 +1,17 @@ +package sliceutil + +// IndexOf get the index of the given value in the given string slice, +// or -1 if not found. +func IndexOf(slice []string, value string) int { + for i, v := range slice { + if v == value { + return i + } + } + return -1 +} + +// Contains check if a string slice contains a value. +func Contains(slice []string, value string) bool { + return IndexOf(slice, value) != -1 +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go new file mode 100644 index 000000000..9628bd80f --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go @@ -0,0 +1,116 @@ +package timeutils + +import ( + "fmt" + "sort" + "strings" + "sync" + "time" + + "github.com/golangci/golangci-lint/pkg/logutils" +) + +const noStagesText = "no stages" + +type Stopwatch struct { + name string + startedAt time.Time + stages map[string]time.Duration + log logutils.Log + + sync.Mutex +} + +func NewStopwatch(name string, log logutils.Log) *Stopwatch { + return &Stopwatch{ + name: name, + startedAt: time.Now(), + stages: map[string]time.Duration{}, + log: log, + } +} + +type stageDuration struct { + name string + d time.Duration +} + +func (s *Stopwatch) stageDurationsSorted() []stageDuration { + stageDurations := []stageDuration{} + for n, d := range s.stages { + stageDurations = append(stageDurations, stageDuration{ + name: n, + d: d, + }) + } + sort.Slice(stageDurations, func(i, j int) bool { + return stageDurations[i].d > stageDurations[j].d + }) + return stageDurations +} + +func (s *Stopwatch) sprintStages() string { + if len(s.stages) == 0 { + return noStagesText + } + + stageDurations := s.stageDurationsSorted() + + stagesStrings := []string{} + for _, s := range stageDurations { + stagesStrings = append(stagesStrings, fmt.Sprintf("%s: %s", s.name, s.d)) + } + + return fmt.Sprintf("stages: %s", strings.Join(stagesStrings, ", ")) +} + +func (s *Stopwatch) sprintTopStages(n int) string { + if len(s.stages) == 0 { + return noStagesText + } + + stageDurations := s.stageDurationsSorted() + + stagesStrings := []string{} + for i := 0; i < len(stageDurations) && i < n; i++ { + s := stageDurations[i] + stagesStrings = append(stagesStrings, fmt.Sprintf("%s: %s", s.name, s.d)) + } + + return fmt.Sprintf("top %d stages: %s", n, strings.Join(stagesStrings, ", ")) +} + +func (s *Stopwatch) Print() { + p := fmt.Sprintf("%s took %s", s.name, time.Since(s.startedAt)) + if len(s.stages) == 0 { + s.log.Infof("%s", p) + return + } + + s.log.Infof("%s with %s", p, s.sprintStages()) +} + +func (s *Stopwatch) PrintStages() { + var stagesDuration time.Duration + for _, s := range s.stages { + stagesDuration += s + } + s.log.Infof("%s took %s with %s", s.name, stagesDuration, s.sprintStages()) +} + +func (s *Stopwatch) PrintTopStages(n int) { + var stagesDuration time.Duration + for _, s := range s.stages { + stagesDuration += s + } + s.log.Infof("%s took %s with %s", s.name, stagesDuration, s.sprintTopStages(n)) +} + +func (s *Stopwatch) TrackStage(name string, f func()) { + startedAt := time.Now() + f() + + s.Lock() + s.stages[name] += time.Since(startedAt) + s.Unlock() +} diff --git a/vendor/github.com/golangci/lint-1/.travis.yml b/vendor/github.com/golangci/lint-1/.travis.yml new file mode 100644 index 000000000..bc2f4b311 --- /dev/null +++ b/vendor/github.com/golangci/lint-1/.travis.yml @@ -0,0 +1,19 @@ +sudo: false +language: go +go: + - 1.10.x + - 1.11.x + - master + +go_import_path: github.com/golangci/lint-1 + +install: + - go get -t -v ./... + +script: + - go test -v -race ./... + +matrix: + allow_failures: + - go: master + fast_finish: true diff --git a/vendor/github.com/golangci/lint-1/CONTRIBUTING.md b/vendor/github.com/golangci/lint-1/CONTRIBUTING.md new file mode 100644 index 000000000..2e39a1c67 --- /dev/null +++ b/vendor/github.com/golangci/lint-1/CONTRIBUTING.md @@ -0,0 +1,15 @@ +# Contributing to Golint + +## Before filing an issue: + +### Are you having trouble building golint? + +Check you have the latest version of its dependencies. Run +``` +go get -u github.com/golangci/lint-1/golint +``` +If you still have problems, consider searching for existing issues before filing a new issue. + +## Before sending a pull request: + +Have you understood the purpose of golint? Make sure to carefully read `README`. diff --git a/vendor/github.com/golangci/lint-1/LICENSE b/vendor/github.com/golangci/lint-1/LICENSE new file mode 100644 index 000000000..65d761bc9 --- /dev/null +++ b/vendor/github.com/golangci/lint-1/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2013 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/lint-1/README.md b/vendor/github.com/golangci/lint-1/README.md new file mode 100644 index 000000000..2de6ee835 --- /dev/null +++ b/vendor/github.com/golangci/lint-1/README.md @@ -0,0 +1,88 @@ +Golint is a linter for Go source code. + +[![Build Status](https://travis-ci.org/golang/lint.svg?branch=master)](https://travis-ci.org/golang/lint) + +## Installation + +Golint requires a +[supported release of Go](https://golang.org/doc/devel/release.html#policy). + + go get -u github.com/golangci/lint-1/golint + +To find out where `golint` was installed you can run `go list -f {{.Target}} github.com/golangci/lint-1/golint`. For `golint` to be used globally add that directory to the `$PATH` environment setting. + +## Usage + +Invoke `golint` with one or more filenames, directories, or packages named +by its import path. Golint uses the same +[import path syntax](https://golang.org/cmd/go/#hdr-Import_path_syntax) as +the `go` command and therefore +also supports relative import paths like `./...`. Additionally the `...` +wildcard can be used as suffix on relative and absolute file paths to recurse +into them. + +The output of this tool is a list of suggestions in Vim quickfix format, +which is accepted by lots of different editors. + +## Purpose + +Golint differs from gofmt. Gofmt reformats Go source code, whereas +golint prints out style mistakes. + +Golint differs from govet. Govet is concerned with correctness, whereas +golint is concerned with coding style. Golint is in use at Google, and it +seeks to match the accepted style of the open source Go project. + +The suggestions made by golint are exactly that: suggestions. +Golint is not perfect, and has both false positives and false negatives. +Do not treat its output as a gold standard. We will not be adding pragmas +or other knobs to suppress specific warnings, so do not expect or require +code to be completely "lint-free". +In short, this tool is not, and will never be, trustworthy enough for its +suggestions to be enforced automatically, for example as part of a build process. +Golint makes suggestions for many of the mechanically checkable items listed in +[Effective Go](https://golang.org/doc/effective_go.html) and the +[CodeReviewComments wiki page](https://golang.org/wiki/CodeReviewComments). + +## Scope + +Golint is meant to carry out the stylistic conventions put forth in +[Effective Go](https://golang.org/doc/effective_go.html) and +[CodeReviewComments](https://golang.org/wiki/CodeReviewComments). +Changes that are not aligned with those documents will not be considered. + +## Contributions + +Contributions to this project are welcome provided they are [in scope](#scope), +though please send mail before starting work on anything major. +Contributors retain their copyright, so we need you to fill out +[a short form](https://developers.google.com/open-source/cla/individual) +before we can accept your contribution. + +## Vim + +Add this to your ~/.vimrc: + + set rtp+=$GOPATH/src/github.com/golangci/lint-1/misc/vim + +If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value. + +Running `:Lint` will run golint on the current file and populate the quickfix list. + +Optionally, add this to your `~/.vimrc` to automatically run `golint` on `:w` + + autocmd BufWritePost,FileWritePost *.go execute 'Lint' | cwindow + + +## Emacs + +Add this to your `.emacs` file: + + (add-to-list 'load-path (concat (getenv "GOPATH") "/src/github.com/golang/lint/misc/emacs")) + (require 'golint) + +If you have multiple entries in your GOPATH, replace `$GOPATH` with the right value. + +Running M-x golint will run golint on the current file. + +For more usage, see [Compilation-Mode](http://www.gnu.org/software/emacs/manual/html_node/emacs/Compilation-Mode.html). diff --git a/vendor/github.com/golangci/lint-1/go.mod b/vendor/github.com/golangci/lint-1/go.mod new file mode 100644 index 000000000..fafbd340b --- /dev/null +++ b/vendor/github.com/golangci/lint-1/go.mod @@ -0,0 +1,3 @@ +module github.com/golangci/lint-1 + +require golang.org/x/tools v0.0.0-20190311212946-11955173bddd diff --git a/vendor/github.com/golangci/lint-1/go.sum b/vendor/github.com/golangci/lint-1/go.sum new file mode 100644 index 000000000..7d0e2e618 --- /dev/null +++ b/vendor/github.com/golangci/lint-1/go.sum @@ -0,0 +1,6 @@ +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd h1:/e+gpKk9r3dJobndpTytxS2gOy6m5uvpg+ISQoEcusQ= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= diff --git a/vendor/github.com/golangci/lint-1/lint.go b/vendor/github.com/golangci/lint-1/lint.go new file mode 100644 index 000000000..886c85bf0 --- /dev/null +++ b/vendor/github.com/golangci/lint-1/lint.go @@ -0,0 +1,1655 @@ +// Copyright (c) 2013 The Go Authors. All rights reserved. +// +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file or at +// https://developers.google.com/open-source/licenses/bsd. + +// Package lint contains a linter for Go source code. +package lint // import "github.com/golangci/lint-1" + +import ( + "bufio" + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/printer" + "go/token" + "go/types" + "io/ioutil" + "regexp" + "sort" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/gcexportdata" +) + +const styleGuideBase = "https://golang.org/wiki/CodeReviewComments" + +// A Linter lints Go source code. +type Linter struct { +} + +// Problem represents a problem in some source code. +type Problem struct { + Position token.Position // position in source file + Text string // the prose that describes the problem + Link string // (optional) the link to the style guide for the problem + Confidence float64 // a value in (0,1] estimating the confidence in this problem's correctness + LineText string // the source line + Category string // a short name for the general category of the problem + + // If the problem has a suggested fix (the minority case), + // ReplacementLine is a full replacement for the relevant line of the source file. + ReplacementLine string +} + +func (p *Problem) String() string { + if p.Link != "" { + return p.Text + "\n\n" + p.Link + } + return p.Text +} + +type byPosition []Problem + +func (p byPosition) Len() int { return len(p) } +func (p byPosition) Swap(i, j int) { p[i], p[j] = p[j], p[i] } + +func (p byPosition) Less(i, j int) bool { + pi, pj := p[i].Position, p[j].Position + + if pi.Filename != pj.Filename { + return pi.Filename < pj.Filename + } + if pi.Line != pj.Line { + return pi.Line < pj.Line + } + if pi.Column != pj.Column { + return pi.Column < pj.Column + } + + return p[i].Text < p[j].Text +} + +// Lint lints src. +func (l *Linter) Lint(filename string, src []byte) ([]Problem, error) { + return l.LintFiles(map[string][]byte{filename: src}) +} + +// LintFiles lints a set of files of a single package. +// The argument is a map of filename to source. +func (l *Linter) LintFiles(files map[string][]byte) ([]Problem, error) { + pkg := &pkg{ + fset: token.NewFileSet(), + files: make(map[string]*file), + } + var pkgName string + for filename, src := range files { + if isGenerated(src) { + continue // See issue #239 + } + f, err := parser.ParseFile(pkg.fset, filename, src, parser.ParseComments) + if err != nil { + return nil, err + } + if pkgName == "" { + pkgName = f.Name.Name + } else if f.Name.Name != pkgName { + return nil, fmt.Errorf("%s is in package %s, not %s", filename, f.Name.Name, pkgName) + } + pkg.files[filename] = &file{ + pkg: pkg, + f: f, + fset: pkg.fset, + src: src, + filename: filename, + } + } + if len(pkg.files) == 0 { + return nil, nil + } + return pkg.lint(), nil +} + +// LintFiles lints a set of files of a single package. +// The argument is a map of filename to source. +func (l *Linter) LintPkg(files []*ast.File, fset *token.FileSet, typesPkg *types.Package, typesInfo *types.Info) ([]Problem, error) { + pkg := &pkg{ + fset: fset, + files: make(map[string]*file), + typesPkg: typesPkg, + typesInfo: typesInfo, + } + var pkgName string + for _, f := range files { + // use PositionFor, not Position because of //line directives: + // this filename will be used for source lines extraction. + filename := fset.PositionFor(f.Pos(), false).Filename + if filename == "" { + return nil, fmt.Errorf("no file name for file %+v", f) + } + + if pkgName == "" { + pkgName = f.Name.Name + } else if f.Name.Name != pkgName { + return nil, fmt.Errorf("%s is in package %s, not %s", filename, f.Name.Name, pkgName) + } + + // TODO: reuse golangci-lint lines cache + src, err := ioutil.ReadFile(filename) + if err != nil { + return nil, fmt.Errorf("can't read file %s: %s", filename, err) + } + + pkg.files[filename] = &file{ + pkg: pkg, + f: f, + fset: pkg.fset, + src: src, + filename: filename, + } + } + if len(pkg.files) == 0 { + return nil, nil + } + return pkg.lint(), nil +} + +var ( + genHdr = []byte("// Code generated ") + genFtr = []byte(" DO NOT EDIT.") +) + +// isGenerated reports whether the source file is generated code +// according the rules from https://golang.org/s/generatedcode. +func isGenerated(src []byte) bool { + sc := bufio.NewScanner(bytes.NewReader(src)) + for sc.Scan() { + b := sc.Bytes() + if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) { + return true + } + } + return false +} + +// pkg represents a package being linted. +type pkg struct { + fset *token.FileSet + files map[string]*file + + typesPkg *types.Package + typesInfo *types.Info + + // sortable is the set of types in the package that implement sort.Interface. + sortable map[string]bool + // main is whether this is a "main" package. + main bool + + problems []Problem +} + +func (p *pkg) lint() []Problem { + p.scanSortable() + p.main = p.isMain() + + for _, f := range p.files { + f.lint() + } + + sort.Sort(byPosition(p.problems)) + + return p.problems +} + +// file represents a file being linted. +type file struct { + pkg *pkg + f *ast.File + fset *token.FileSet + src []byte + filename string +} + +func (f *file) isTest() bool { return strings.HasSuffix(f.filename, "_test.go") } + +func (f *file) lint() { + f.lintPackageComment() + f.lintImports() + f.lintBlankImports() + f.lintExported() + f.lintNames() + f.lintElses() + f.lintRanges() + f.lintErrorf() + f.lintErrors() + f.lintErrorStrings() + f.lintReceiverNames() + f.lintIncDec() + f.lintErrorReturn() + f.lintUnexportedReturn() + f.lintTimeNames() + f.lintContextKeyTypes() + f.lintContextArgs() +} + +type link string +type category string + +// The variadic arguments may start with link and category types, +// and must end with a format string and any arguments. +// It returns the new Problem. +func (f *file) errorf(n ast.Node, confidence float64, args ...interface{}) *Problem { + pos := f.fset.Position(n.Pos()) + if pos.Filename == "" { + pos.Filename = f.filename + } + return f.pkg.errorfAt(pos, confidence, args...) +} + +func (p *pkg) errorfAt(pos token.Position, confidence float64, args ...interface{}) *Problem { + problem := Problem{ + Position: pos, + Confidence: confidence, + } + if pos.Filename != "" { + // The file might not exist in our mapping if a //line directive was encountered. + if f, ok := p.files[pos.Filename]; ok { + problem.LineText = srcLine(f.src, pos) + } + } + +argLoop: + for len(args) > 1 { // always leave at least the format string in args + switch v := args[0].(type) { + case link: + problem.Link = string(v) + case category: + problem.Category = string(v) + default: + break argLoop + } + args = args[1:] + } + + problem.Text = fmt.Sprintf(args[0].(string), args[1:]...) + + p.problems = append(p.problems, problem) + return &p.problems[len(p.problems)-1] +} + +var newImporter = func(fset *token.FileSet) types.ImporterFrom { + return gcexportdata.NewImporter(fset, make(map[string]*types.Package)) +} + +func (p *pkg) typeCheck() error { + config := &types.Config{ + // By setting a no-op error reporter, the type checker does as much work as possible. + Error: func(error) {}, + Importer: newImporter(p.fset), + } + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + } + var anyFile *file + var astFiles []*ast.File + for _, f := range p.files { + anyFile = f + astFiles = append(astFiles, f.f) + } + pkg, err := config.Check(anyFile.f.Name.Name, p.fset, astFiles, info) + // Remember the typechecking info, even if config.Check failed, + // since we will get partial information. + p.typesPkg = pkg + p.typesInfo = info + return err +} + +func (p *pkg) typeOf(expr ast.Expr) types.Type { + if p.typesInfo == nil { + return nil + } + return p.typesInfo.TypeOf(expr) +} + +func (p *pkg) isNamedType(typ types.Type, importPath, name string) bool { + n, ok := typ.(*types.Named) + if !ok { + return false + } + tn := n.Obj() + return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name +} + +// scopeOf returns the tightest scope encompassing id. +func (p *pkg) scopeOf(id *ast.Ident) *types.Scope { + var scope *types.Scope + if obj := p.typesInfo.ObjectOf(id); obj != nil { + scope = obj.Parent() + } + if scope == p.typesPkg.Scope() { + // We were given a top-level identifier. + // Use the file-level scope instead of the package-level scope. + pos := id.Pos() + for _, f := range p.files { + if f.f.Pos() <= pos && pos < f.f.End() { + scope = p.typesInfo.Scopes[f.f] + break + } + } + } + return scope +} + +func (p *pkg) scanSortable() { + p.sortable = make(map[string]bool) + + // bitfield for which methods exist on each type. + const ( + Len = 1 << iota + Less + Swap + ) + nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap} + has := make(map[string]int) + for _, f := range p.files { + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { + return true + } + // TODO(dsymonds): We could check the signature to be more precise. + recv := receiverType(fn) + if i, ok := nmap[fn.Name.Name]; ok { + has[recv] |= i + } + return false + }) + } + for typ, ms := range has { + if ms == Len|Less|Swap { + p.sortable[typ] = true + } + } +} + +func (p *pkg) isMain() bool { + for _, f := range p.files { + if f.isMain() { + return true + } + } + return false +} + +func (f *file) isMain() bool { + if f.f.Name.Name == "main" { + return true + } + return false +} + +// lintPackageComment checks package comments. It complains if +// there is no package comment, or if it is not of the right form. +// This has a notable false positive in that a package comment +// could rightfully appear in a different file of the same package, +// but that's not easy to fix since this linter is file-oriented. +func (f *file) lintPackageComment() { + if f.isTest() { + return + } + + const ref = styleGuideBase + "#package-comments" + prefix := "Package " + f.f.Name.Name + " " + + // Look for a detached package comment. + // First, scan for the last comment that occurs before the "package" keyword. + var lastCG *ast.CommentGroup + for _, cg := range f.f.Comments { + if cg.Pos() > f.f.Package { + // Gone past "package" keyword. + break + } + lastCG = cg + } + if lastCG != nil && strings.HasPrefix(lastCG.Text(), prefix) { + endPos := f.fset.Position(lastCG.End()) + pkgPos := f.fset.Position(f.f.Package) + if endPos.Line+1 < pkgPos.Line { + // There isn't a great place to anchor this error; + // the start of the blank lines between the doc and the package statement + // is at least pointing at the location of the problem. + pos := token.Position{ + Filename: endPos.Filename, + // Offset not set; it is non-trivial, and doesn't appear to be needed. + Line: endPos.Line + 1, + Column: 1, + } + f.pkg.errorfAt(pos, 0.9, link(ref), category("comments"), "package comment is detached; there should be no blank lines between it and the package statement") + return + } + } + + if f.f.Doc == nil { + f.errorf(f.f, 0.2, link(ref), category("comments"), "should have a package comment, unless it's in another file for this package") + return + } + s := f.f.Doc.Text() + if ts := strings.TrimLeft(s, " \t"); ts != s { + f.errorf(f.f.Doc, 1, link(ref), category("comments"), "package comment should not have leading space") + s = ts + } + // Only non-main packages need to keep to this form. + if !f.pkg.main && !strings.HasPrefix(s, prefix) { + f.errorf(f.f.Doc, 1, link(ref), category("comments"), `package comment should be of the form "%s..."`, prefix) + } +} + +func (f *file) isCgo() bool { + if f.src == nil { + return false + } + newLinePos := bytes.Index(f.src, []byte("\n")) + if newLinePos < 0 { + return false + } + firstLine := string(f.src[:newLinePos]) + + // files using cgo have implicitly added comment "Created by cgo - DO NOT EDIT" for go <= 1.10 + // and "Code generated by cmd/cgo" for go >= 1.11 + return strings.Contains(firstLine, "Created by cgo") || strings.Contains(firstLine, "Code generated by cmd/cgo") +} + +// lintBlankImports complains if a non-main package has blank imports that are +// not documented. +func (f *file) lintBlankImports() { + // In package main and in tests, we don't complain about blank imports. + if f.pkg.main || f.isTest() || f.isCgo() { + return + } + + // The first element of each contiguous group of blank imports should have + // an explanatory comment of some kind. + for i, imp := range f.f.Imports { + pos := f.fset.Position(imp.Pos()) + + if !isBlank(imp.Name) { + continue // Ignore non-blank imports. + } + if i > 0 { + prev := f.f.Imports[i-1] + prevPos := f.fset.Position(prev.Pos()) + if isBlank(prev.Name) && prevPos.Line+1 == pos.Line { + continue // A subsequent blank in a group. + } + } + + // This is the first blank import of a group. + if imp.Doc == nil && imp.Comment == nil { + ref := "" + f.errorf(imp, 1, link(ref), category("imports"), "a blank import should be only in a main or test package, or have a comment justifying it") + } + } +} + +// lintImports examines import blocks. +func (f *file) lintImports() { + for i, is := range f.f.Imports { + _ = i + if is.Name != nil && is.Name.Name == "." && !f.isTest() { + f.errorf(is, 1, link(styleGuideBase+"#import-dot"), category("imports"), "should not use dot imports") + } + + } +} + +const docCommentsLink = styleGuideBase + "#doc-comments" + +// lintExported examines the exported names. +// It complains if any required doc comments are missing, +// or if they are not of the right form. The exact rules are in +// lintFuncDoc, lintTypeDoc and lintValueSpecDoc; this function +// also tracks the GenDecl structure being traversed to permit +// doc comments for constants to be on top of the const block. +// It also complains if the names stutter when combined with +// the package name. +func (f *file) lintExported() { + if f.isTest() { + return + } + + var lastGen *ast.GenDecl // last GenDecl entered. + + // Set of GenDecls that have already had missing comments flagged. + genDeclMissingComments := make(map[*ast.GenDecl]bool) + + f.walk(func(node ast.Node) bool { + switch v := node.(type) { + case *ast.GenDecl: + if v.Tok == token.IMPORT { + return false + } + // token.CONST, token.TYPE or token.VAR + lastGen = v + return true + case *ast.FuncDecl: + f.lintFuncDoc(v) + if v.Recv == nil { + // Only check for stutter on functions, not methods. + // Method names are not used package-qualified. + f.checkStutter(v.Name, "func") + } + // Don't proceed inside funcs. + return false + case *ast.TypeSpec: + // inside a GenDecl, which usually has the doc + doc := v.Doc + if doc == nil { + doc = lastGen.Doc + } + f.lintTypeDoc(v, doc) + f.checkStutter(v.Name, "type") + // Don't proceed inside types. + return false + case *ast.ValueSpec: + f.lintValueSpecDoc(v, lastGen, genDeclMissingComments) + return false + } + return true + }) +} + +var ( + allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`) + anyCapsRE = regexp.MustCompile(`[A-Z]`) +) + +// knownNameExceptions is a set of names that are known to be exempt from naming checks. +// This is usually because they are constrained by having to match names in the +// standard library. +var knownNameExceptions = map[string]bool{ + "LastInsertId": true, // must match database/sql + "kWh": true, +} + +func isInTopLevel(f *ast.File, ident *ast.Ident) bool { + path, _ := astutil.PathEnclosingInterval(f, ident.Pos(), ident.End()) + for _, f := range path { + switch f.(type) { + case *ast.File, *ast.GenDecl, *ast.ValueSpec, *ast.Ident: + continue + } + return false + } + return true +} + +// lintNames examines all names in the file. +// It complains if any use underscores or incorrect known initialisms. +func (f *file) lintNames() { + // Package names need slightly different handling than other names. + if strings.Contains(f.f.Name.Name, "_") && !strings.HasSuffix(f.f.Name.Name, "_test") { + f.errorf(f.f, 1, link("http://golang.org/doc/effective_go.html#package-names"), category("naming"), "don't use an underscore in package name") + } + if anyCapsRE.MatchString(f.f.Name.Name) { + f.errorf(f.f, 1, link("http://golang.org/doc/effective_go.html#package-names"), category("mixed-caps"), "don't use MixedCaps in package name; %s should be %s", f.f.Name.Name, strings.ToLower(f.f.Name.Name)) + } + + check := func(id *ast.Ident, thing string) { + if id.Name == "_" { + return + } + if knownNameExceptions[id.Name] { + return + } + + // Handle two common styles from other languages that don't belong in Go. + if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") { + capCount := 0 + for _, c := range id.Name { + if 'A' <= c && c <= 'Z' { + capCount++ + } + } + if capCount >= 2 { + f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use ALL_CAPS in Go names; use CamelCase") + return + } + } + if thing == "const" || (thing == "var" && isInTopLevel(f.f, id)) { + if len(id.Name) > 2 && id.Name[0] == 'k' && id.Name[1] >= 'A' && id.Name[1] <= 'Z' { + should := string(id.Name[1]+'a'-'A') + id.Name[2:] + f.errorf(id, 0.8, link(styleGuideBase+"#mixed-caps"), category("naming"), "don't use leading k in Go names; %s %s should be %s", thing, id.Name, should) + } + } + + should := lintName(id.Name) + if id.Name == should { + return + } + + if len(id.Name) > 2 && strings.Contains(id.Name[1:], "_") { + f.errorf(id, 0.9, link("http://golang.org/doc/effective_go.html#mixed-caps"), category("naming"), "don't use underscores in Go names; %s %s should be %s", thing, id.Name, should) + return + } + f.errorf(id, 0.8, link(styleGuideBase+"#initialisms"), category("naming"), "%s %s should be %s", thing, id.Name, should) + } + checkList := func(fl *ast.FieldList, thing string) { + if fl == nil { + return + } + for _, f := range fl.List { + for _, id := range f.Names { + check(id, thing) + } + } + } + f.walk(func(node ast.Node) bool { + switch v := node.(type) { + case *ast.AssignStmt: + if v.Tok == token.ASSIGN { + return true + } + for _, exp := range v.Lhs { + if id, ok := exp.(*ast.Ident); ok { + check(id, "var") + } + } + case *ast.FuncDecl: + if f.isTest() && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { + return true + } + + thing := "func" + if v.Recv != nil { + thing = "method" + } + + // Exclude naming warnings for functions that are exported to C but + // not exported in the Go API. + // See https://github.com/golang/lint/issues/144. + if ast.IsExported(v.Name.Name) || !isCgoExported(v) { + check(v.Name, thing) + } + + checkList(v.Type.Params, thing+" parameter") + checkList(v.Type.Results, thing+" result") + case *ast.GenDecl: + if v.Tok == token.IMPORT { + return true + } + var thing string + switch v.Tok { + case token.CONST: + thing = "const" + case token.TYPE: + thing = "type" + case token.VAR: + thing = "var" + } + for _, spec := range v.Specs { + switch s := spec.(type) { + case *ast.TypeSpec: + check(s.Name, thing) + case *ast.ValueSpec: + for _, id := range s.Names { + check(id, thing) + } + } + } + case *ast.InterfaceType: + // Do not check interface method names. + // They are often constrainted by the method names of concrete types. + for _, x := range v.Methods.List { + ft, ok := x.Type.(*ast.FuncType) + if !ok { // might be an embedded interface name + continue + } + checkList(ft.Params, "interface method parameter") + checkList(ft.Results, "interface method result") + } + case *ast.RangeStmt: + if v.Tok == token.ASSIGN { + return true + } + if id, ok := v.Key.(*ast.Ident); ok { + check(id, "range var") + } + if id, ok := v.Value.(*ast.Ident); ok { + check(id, "range var") + } + case *ast.StructType: + for _, f := range v.Fields.List { + for _, id := range f.Names { + check(id, "struct field") + } + } + } + return true + }) +} + +// lintName returns a different name if it should be different. +func lintName(name string) (should string) { + // Fast path for simple cases: "_" and all lowercase. + if name == "_" { + return name + } + allLower := true + for _, r := range name { + if !unicode.IsLower(r) { + allLower = false + break + } + } + if allLower { + return name + } + + // Split camelCase at any lower->upper transition, and split on underscores. + // Check each word for common initialisms. + runes := []rune(name) + w, i := 0, 0 // index of start of word, scan + for i+1 <= len(runes) { + eow := false // whether we hit the end of a word + if i+1 == len(runes) { + eow = true + } else if runes[i+1] == '_' { + // underscore; shift the remainder forward over any run of underscores + eow = true + n := 1 + for i+n+1 < len(runes) && runes[i+n+1] == '_' { + n++ + } + + // Leave at most one underscore if the underscore is between two digits + if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) { + n-- + } + + copy(runes[i+1:], runes[i+n+1:]) + runes = runes[:len(runes)-n] + } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) { + // lower->non-lower + eow = true + } + i++ + if !eow { + continue + } + + // [w,i) is a word. + word := string(runes[w:i]) + if u := strings.ToUpper(word); commonInitialisms[u] { + // Keep consistent case, which is lowercase only at the start. + if w == 0 && unicode.IsLower(runes[w]) { + u = strings.ToLower(u) + } + // All the common initialisms are ASCII, + // so we can replace the bytes exactly. + copy(runes[w:], []rune(u)) + } else if w > 0 && strings.ToLower(word) == word { + // already all lowercase, and not the first word, so uppercase the first character. + runes[w] = unicode.ToUpper(runes[w]) + } + w = i + } + return string(runes) +} + +// commonInitialisms is a set of common initialisms. +// Only add entries that are highly unlikely to be non-initialisms. +// For instance, "ID" is fine (Freudian code is rare), but "AND" is not. +var commonInitialisms = map[string]bool{ + "ACL": true, + "API": true, + "ASCII": true, + "CPU": true, + "CSS": true, + "DNS": true, + "EOF": true, + "GUID": true, + "HTML": true, + "HTTP": true, + "HTTPS": true, + "ID": true, + "IP": true, + "JSON": true, + "LHS": true, + "QPS": true, + "RAM": true, + "RHS": true, + "RPC": true, + "SLA": true, + "SMTP": true, + "SQL": true, + "SSH": true, + "TCP": true, + "TLS": true, + "TTL": true, + "UDP": true, + "UI": true, + "UID": true, + "UUID": true, + "URI": true, + "URL": true, + "UTF8": true, + "VM": true, + "XML": true, + "XMPP": true, + "XSRF": true, + "XSS": true, +} + +// lintTypeDoc examines the doc comment on a type. +// It complains if they are missing from an exported type, +// or if they are not of the standard form. +func (f *file) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) { + if !ast.IsExported(t.Name.Name) { + return + } + if doc == nil { + f.errorf(t, 1, link(docCommentsLink), category("comments"), "exported type %v should have comment or be unexported", t.Name) + return + } + + s := doc.Text() + articles := [...]string{"A", "An", "The"} + for _, a := range articles { + if strings.HasPrefix(s, a+" ") { + s = s[len(a)+1:] + break + } + } + if !strings.HasPrefix(s, t.Name.Name+" ") { + f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name) + } +} + +var commonMethods = map[string]bool{ + "Error": true, + "Read": true, + "ServeHTTP": true, + "String": true, + "Write": true, +} + +// lintFuncDoc examines doc comments on functions and methods. +// It complains if they are missing, or not of the right form. +// It has specific exclusions for well-known methods (see commonMethods above). +func (f *file) lintFuncDoc(fn *ast.FuncDecl) { + if !ast.IsExported(fn.Name.Name) { + // func is unexported + return + } + kind := "function" + name := fn.Name.Name + if fn.Recv != nil && len(fn.Recv.List) > 0 { + // method + kind = "method" + recv := receiverType(fn) + if !ast.IsExported(recv) { + // receiver is unexported + return + } + if commonMethods[name] { + return + } + switch name { + case "Len", "Less", "Swap": + if f.pkg.sortable[recv] { + return + } + } + name = recv + "." + name + } + if fn.Doc == nil { + f.errorf(fn, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment or be unexported", kind, name) + return + } + s := fn.Doc.Text() + prefix := fn.Name.Name + " " + if !strings.HasPrefix(s, prefix) { + f.errorf(fn.Doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix) + } +} + +// lintValueSpecDoc examines package-global variables and constants. +// It complains if they are not individually declared, +// or if they are not suitably documented in the right form (unless they are in a block that is commented). +func (f *file) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) { + kind := "var" + if gd.Tok == token.CONST { + kind = "const" + } + + if len(vs.Names) > 1 { + // Check that none are exported except for the first. + for _, n := range vs.Names[1:] { + if ast.IsExported(n.Name) { + f.errorf(vs, 1, category("comments"), "exported %s %s should have its own declaration", kind, n.Name) + return + } + } + } + + // Only one name. + name := vs.Names[0].Name + if !ast.IsExported(name) { + return + } + + if vs.Doc == nil && gd.Doc == nil { + if genDeclMissingComments[gd] { + return + } + block := "" + if kind == "const" && gd.Lparen.IsValid() { + block = " (or a comment on this block)" + } + f.errorf(vs, 1, link(docCommentsLink), category("comments"), "exported %s %s should have comment%s or be unexported", kind, name, block) + genDeclMissingComments[gd] = true + return + } + // If this GenDecl has parens and a comment, we don't check its comment form. + if gd.Lparen.IsValid() && gd.Doc != nil { + return + } + // The relevant text to check will be on either vs.Doc or gd.Doc. + // Use vs.Doc preferentially. + doc := vs.Doc + if doc == nil { + doc = gd.Doc + } + prefix := name + " " + if !strings.HasPrefix(doc.Text(), prefix) { + f.errorf(doc, 1, link(docCommentsLink), category("comments"), `comment on exported %s %s should be of the form "%s..."`, kind, name, prefix) + } +} + +func (f *file) checkStutter(id *ast.Ident, thing string) { + pkg, name := f.f.Name.Name, id.Name + if !ast.IsExported(name) { + // unexported name + return + } + // A name stutters if the package name is a strict prefix + // and the next character of the name starts a new word. + if len(name) <= len(pkg) { + // name is too short to stutter. + // This permits the name to be the same as the package name. + return + } + if !strings.EqualFold(pkg, name[:len(pkg)]) { + return + } + // We can assume the name is well-formed UTF-8. + // If the next rune after the package name is uppercase or an underscore + // the it's starting a new word and thus this name stutters. + rem := name[len(pkg):] + if next, _ := utf8.DecodeRuneInString(rem); next == '_' || unicode.IsUpper(next) { + f.errorf(id, 0.8, link(styleGuideBase+"#package-names"), category("naming"), "%s name will be used as %s.%s by other packages, and that stutters; consider calling this %s", thing, pkg, name, rem) + } +} + +// zeroLiteral is a set of ast.BasicLit values that are zero values. +// It is not exhaustive. +var zeroLiteral = map[string]bool{ + "false": true, // bool + // runes + `'\x00'`: true, + `'\000'`: true, + // strings + `""`: true, + "``": true, + // numerics + "0": true, + "0.": true, + "0.0": true, + "0i": true, +} + +// lintElses examines else blocks. It complains about any else block whose if block ends in a return. +func (f *file) lintElses() { + // We don't want to flag if { } else if { } else { } constructions. + // They will appear as an IfStmt whose Else field is also an IfStmt. + // Record such a node so we ignore it when we visit it. + ignore := make(map[*ast.IfStmt]bool) + + f.walk(func(node ast.Node) bool { + ifStmt, ok := node.(*ast.IfStmt) + if !ok || ifStmt.Else == nil { + return true + } + if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok { + ignore[elseif] = true + return true + } + if ignore[ifStmt] { + return true + } + if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok { + // only care about elses without conditions + return true + } + if len(ifStmt.Body.List) == 0 { + return true + } + shortDecl := false // does the if statement have a ":=" initialization statement? + if ifStmt.Init != nil { + if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE { + shortDecl = true + } + } + lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1] + if _, ok := lastStmt.(*ast.ReturnStmt); ok { + extra := "" + if shortDecl { + extra = " (move short variable declaration to its own line if necessary)" + } + f.errorf(ifStmt.Else, 1, link(styleGuideBase+"#indent-error-flow"), category("indent"), "if block ends with a return statement, so drop this else and outdent its block"+extra) + } + return true + }) +} + +// lintRanges examines range clauses. It complains about redundant constructions. +func (f *file) lintRanges() { + f.walk(func(node ast.Node) bool { + rs, ok := node.(*ast.RangeStmt) + if !ok { + return true + } + + if isIdent(rs.Key, "_") && (rs.Value == nil || isIdent(rs.Value, "_")) { + p := f.errorf(rs.Key, 1, category("range-loop"), "should omit values from range; this loop is equivalent to `for range ...`") + + newRS := *rs // shallow copy + newRS.Value = nil + newRS.Key = nil + p.ReplacementLine = f.firstLineOf(&newRS, rs) + + return true + } + + if isIdent(rs.Value, "_") { + p := f.errorf(rs.Value, 1, category("range-loop"), "should omit 2nd value from range; this loop is equivalent to `for %s %s range ...`", f.render(rs.Key), rs.Tok) + + newRS := *rs // shallow copy + newRS.Value = nil + p.ReplacementLine = f.firstLineOf(&newRS, rs) + } + + return true + }) +} + +// lintErrorf examines errors.New and testing.Error calls. It complains if its only argument is an fmt.Sprintf invocation. +func (f *file) lintErrorf() { + f.walk(func(node ast.Node) bool { + ce, ok := node.(*ast.CallExpr) + if !ok || len(ce.Args) != 1 { + return true + } + isErrorsNew := isPkgDot(ce.Fun, "errors", "New") + var isTestingError bool + se, ok := ce.Fun.(*ast.SelectorExpr) + if ok && se.Sel.Name == "Error" { + if typ := f.pkg.typeOf(se.X); typ != nil { + isTestingError = typ.String() == "*testing.T" + } + } + if !isErrorsNew && !isTestingError { + return true + } + if !f.imports("errors") { + return true + } + arg := ce.Args[0] + ce, ok = arg.(*ast.CallExpr) + if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") { + return true + } + errorfPrefix := "fmt" + if isTestingError { + errorfPrefix = f.render(se.X) + } + p := f.errorf(node, 1, category("errors"), "should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", f.render(se), errorfPrefix) + + m := f.srcLineWithMatch(ce, `^(.*)`+f.render(se)+`\(fmt\.Sprintf\((.*)\)\)(.*)$`) + if m != nil { + p.ReplacementLine = m[1] + errorfPrefix + ".Errorf(" + m[2] + ")" + m[3] + } + + return true + }) +} + +// lintErrors examines global error vars. It complains if they aren't named in the standard way. +func (f *file) lintErrors() { + for _, decl := range f.f.Decls { + gd, ok := decl.(*ast.GenDecl) + if !ok || gd.Tok != token.VAR { + continue + } + for _, spec := range gd.Specs { + spec := spec.(*ast.ValueSpec) + if len(spec.Names) != 1 || len(spec.Values) != 1 { + continue + } + ce, ok := spec.Values[0].(*ast.CallExpr) + if !ok { + continue + } + if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { + continue + } + + id := spec.Names[0] + prefix := "err" + if id.IsExported() { + prefix = "Err" + } + if !strings.HasPrefix(id.Name, prefix) { + f.errorf(id, 0.9, category("naming"), "error var %s should have name of the form %sFoo", id.Name, prefix) + } + } + } +} + +func lintErrorString(s string) (isClean bool, conf float64) { + const basicConfidence = 0.8 + const capConfidence = basicConfidence - 0.2 + first, firstN := utf8.DecodeRuneInString(s) + last, _ := utf8.DecodeLastRuneInString(s) + if last == '.' || last == ':' || last == '!' || last == '\n' { + return false, basicConfidence + } + if unicode.IsUpper(first) { + // People use proper nouns and exported Go identifiers in error strings, + // so decrease the confidence of warnings for capitalization. + if len(s) <= firstN { + return false, capConfidence + } + // Flag strings starting with something that doesn't look like an initialism. + if second, _ := utf8.DecodeRuneInString(s[firstN:]); !unicode.IsUpper(second) { + return false, capConfidence + } + } + return true, 0 +} + +// lintErrorStrings examines error strings. +// It complains if they are capitalized or end in punctuation or a newline. +func (f *file) lintErrorStrings() { + f.walk(func(node ast.Node) bool { + ce, ok := node.(*ast.CallExpr) + if !ok { + return true + } + if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { + return true + } + if len(ce.Args) < 1 { + return true + } + str, ok := ce.Args[0].(*ast.BasicLit) + if !ok || str.Kind != token.STRING { + return true + } + s, _ := strconv.Unquote(str.Value) // can assume well-formed Go + if s == "" { + return true + } + clean, conf := lintErrorString(s) + if clean { + return true + } + + f.errorf(str, conf, link(styleGuideBase+"#error-strings"), category("errors"), + "error strings should not be capitalized or end with punctuation or a newline") + return true + }) +} + +// lintReceiverNames examines receiver names. It complains about inconsistent +// names used for the same type and names such as "this". +func (f *file) lintReceiverNames() { + typeReceiver := map[string]string{} + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { + return true + } + names := fn.Recv.List[0].Names + if len(names) < 1 { + return true + } + name := names[0].Name + const ref = styleGuideBase + "#receiver-names" + if name == "_" { + f.errorf(n, 1, link(ref), category("naming"), `receiver name should not be an underscore, omit the name if it is unused`) + return true + } + if name == "this" || name == "self" { + f.errorf(n, 1, link(ref), category("naming"), `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`) + return true + } + recv := receiverType(fn) + if prev, ok := typeReceiver[recv]; ok && prev != name { + f.errorf(n, 1, link(ref), category("naming"), "receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv) + return true + } + typeReceiver[recv] = name + return true + }) +} + +// lintIncDec examines statements that increment or decrement a variable. +// It complains if they don't use x++ or x--. +func (f *file) lintIncDec() { + f.walk(func(n ast.Node) bool { + as, ok := n.(*ast.AssignStmt) + if !ok { + return true + } + if len(as.Lhs) != 1 { + return true + } + if !isOne(as.Rhs[0]) { + return true + } + var suffix string + switch as.Tok { + case token.ADD_ASSIGN: + suffix = "++" + case token.SUB_ASSIGN: + suffix = "--" + default: + return true + } + f.errorf(as, 0.8, category("unary-op"), "should replace %s with %s%s", f.render(as), f.render(as.Lhs[0]), suffix) + return true + }) +} + +// lintErrorReturn examines function declarations that return an error. +// It complains if the error isn't the last parameter. +func (f *file) lintErrorReturn() { + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Type.Results == nil { + return true + } + ret := fn.Type.Results.List + if len(ret) <= 1 { + return true + } + if isIdent(ret[len(ret)-1].Type, "error") { + return true + } + // An error return parameter should be the last parameter. + // Flag any error parameters found before the last. + for _, r := range ret[:len(ret)-1] { + if isIdent(r.Type, "error") { + f.errorf(fn, 0.9, category("arg-order"), "error should be the last type when returning multiple items") + break // only flag one + } + } + return true + }) +} + +// lintUnexportedReturn examines exported function declarations. +// It complains if any return an unexported type. +func (f *file) lintUnexportedReturn() { + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok { + return true + } + if fn.Type.Results == nil { + return false + } + if !fn.Name.IsExported() { + return false + } + thing := "func" + if fn.Recv != nil && len(fn.Recv.List) > 0 { + thing = "method" + if !ast.IsExported(receiverType(fn)) { + // Don't report exported methods of unexported types, + // such as private implementations of sort.Interface. + return false + } + } + for _, ret := range fn.Type.Results.List { + typ := f.pkg.typeOf(ret.Type) + if exportedType(typ) { + continue + } + f.errorf(ret.Type, 0.8, category("unexported-type-in-api"), + "exported %s %s returns unexported type %s, which can be annoying to use", + thing, fn.Name.Name, typ) + break // only flag one + } + return false + }) +} + +// exportedType reports whether typ is an exported type. +// It is imprecise, and will err on the side of returning true, +// such as for composite types. +func exportedType(typ types.Type) bool { + switch T := typ.(type) { + case *types.Named: + // Builtin types have no package. + return T.Obj().Pkg() == nil || T.Obj().Exported() + case *types.Map: + return exportedType(T.Key()) && exportedType(T.Elem()) + case interface { + Elem() types.Type + }: // array, slice, pointer, chan + return exportedType(T.Elem()) + } + // Be conservative about other types, such as struct, interface, etc. + return true +} + +// timeSuffixes is a list of name suffixes that imply a time unit. +// This is not an exhaustive list. +var timeSuffixes = []string{ + "Sec", "Secs", "Seconds", + "Msec", "Msecs", + "Milli", "Millis", "Milliseconds", + "Usec", "Usecs", "Microseconds", + "MS", "Ms", +} + +func (f *file) lintTimeNames() { + f.walk(func(node ast.Node) bool { + v, ok := node.(*ast.ValueSpec) + if !ok { + return true + } + for _, name := range v.Names { + origTyp := f.pkg.typeOf(name) + // Look for time.Duration or *time.Duration; + // the latter is common when using flag.Duration. + typ := origTyp + if pt, ok := typ.(*types.Pointer); ok { + typ = pt.Elem() + } + if !f.pkg.isNamedType(typ, "time", "Duration") { + continue + } + suffix := "" + for _, suf := range timeSuffixes { + if strings.HasSuffix(name.Name, suf) { + suffix = suf + break + } + } + if suffix == "" { + continue + } + f.errorf(v, 0.9, category("time"), "var %s is of type %v; don't use unit-specific suffix %q", name.Name, origTyp, suffix) + } + return true + }) +} + +// lintContextKeyTypes checks for call expressions to context.WithValue with +// basic types used for the key argument. +// See: https://golang.org/issue/17293 +func (f *file) lintContextKeyTypes() { + f.walk(func(node ast.Node) bool { + switch node := node.(type) { + case *ast.CallExpr: + f.checkContextKeyType(node) + } + + return true + }) +} + +// checkContextKeyType reports an error if the call expression calls +// context.WithValue with a key argument of basic type. +func (f *file) checkContextKeyType(x *ast.CallExpr) { + sel, ok := x.Fun.(*ast.SelectorExpr) + if !ok { + return + } + pkg, ok := sel.X.(*ast.Ident) + if !ok || pkg.Name != "context" { + return + } + if sel.Sel.Name != "WithValue" { + return + } + + // key is second argument to context.WithValue + if len(x.Args) != 3 { + return + } + key := f.pkg.typesInfo.Types[x.Args[1]] + + if ktyp, ok := key.Type.(*types.Basic); ok && ktyp.Kind() != types.Invalid { + f.errorf(x, 1.0, category("context"), fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type)) + } +} + +// lintContextArgs examines function declarations that contain an +// argument with a type of context.Context +// It complains if that argument isn't the first parameter. +func (f *file) lintContextArgs() { + f.walk(func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || len(fn.Type.Params.List) <= 1 { + return true + } + // A context.Context should be the first parameter of a function. + // Flag any that show up after the first. + for _, arg := range fn.Type.Params.List[1:] { + if isPkgDot(arg.Type, "context", "Context") { + f.errorf(fn, 0.9, link("https://golang.org/pkg/context/"), category("arg-order"), "context.Context should be the first parameter of a function") + break // only flag one + } + } + return true + }) +} + +// containsComments returns whether the interval [start, end) contains any +// comments without "// MATCH " prefix. +func (f *file) containsComments(start, end token.Pos) bool { + for _, cgroup := range f.f.Comments { + comments := cgroup.List + if comments[0].Slash >= end { + // All comments starting with this group are after end pos. + return false + } + if comments[len(comments)-1].Slash < start { + // Comments group ends before start pos. + continue + } + for _, c := range comments { + if start <= c.Slash && c.Slash < end && !strings.HasPrefix(c.Text, "// MATCH ") { + return true + } + } + } + return false +} + +// receiverType returns the named type of the method receiver, sans "*", +// or "invalid-type" if fn.Recv is ill formed. +func receiverType(fn *ast.FuncDecl) string { + switch e := fn.Recv.List[0].Type.(type) { + case *ast.Ident: + return e.Name + case *ast.StarExpr: + if id, ok := e.X.(*ast.Ident); ok { + return id.Name + } + } + // The parser accepts much more than just the legal forms. + return "invalid-type" +} + +func (f *file) walk(fn func(ast.Node) bool) { + ast.Walk(walker(fn), f.f) +} + +func (f *file) render(x interface{}) string { + var buf bytes.Buffer + if err := printer.Fprint(&buf, f.fset, x); err != nil { + panic(err) + } + return buf.String() +} + +func (f *file) debugRender(x interface{}) string { + var buf bytes.Buffer + if err := ast.Fprint(&buf, f.fset, x, nil); err != nil { + panic(err) + } + return buf.String() +} + +// walker adapts a function to satisfy the ast.Visitor interface. +// The function return whether the walk should proceed into the node's children. +type walker func(ast.Node) bool + +func (w walker) Visit(node ast.Node) ast.Visitor { + if w(node) { + return w + } + return nil +} + +func isIdent(expr ast.Expr, ident string) bool { + id, ok := expr.(*ast.Ident) + return ok && id.Name == ident +} + +// isBlank returns whether id is the blank identifier "_". +// If id == nil, the answer is false. +func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" } + +func isPkgDot(expr ast.Expr, pkg, name string) bool { + sel, ok := expr.(*ast.SelectorExpr) + return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name) +} + +func isOne(expr ast.Expr) bool { + lit, ok := expr.(*ast.BasicLit) + return ok && lit.Kind == token.INT && lit.Value == "1" +} + +func isCgoExported(f *ast.FuncDecl) bool { + if f.Recv != nil || f.Doc == nil { + return false + } + + cgoExport := regexp.MustCompile(fmt.Sprintf("(?m)^//export %s$", regexp.QuoteMeta(f.Name.Name))) + for _, c := range f.Doc.List { + if cgoExport.MatchString(c.Text) { + return true + } + } + return false +} + +var basicTypeKinds = map[types.BasicKind]string{ + types.UntypedBool: "bool", + types.UntypedInt: "int", + types.UntypedRune: "rune", + types.UntypedFloat: "float64", + types.UntypedComplex: "complex128", + types.UntypedString: "string", +} + +// isUntypedConst reports whether expr is an untyped constant, +// and indicates what its default type is. +// scope may be nil. +func (f *file) isUntypedConst(expr ast.Expr) (defType string, ok bool) { + // Re-evaluate expr outside of its context to see if it's untyped. + // (An expr evaluated within, for example, an assignment context will get the type of the LHS.) + exprStr := f.render(expr) + tv, err := types.Eval(f.fset, f.pkg.typesPkg, expr.Pos(), exprStr) + if err != nil { + return "", false + } + if b, ok := tv.Type.(*types.Basic); ok { + if dt, ok := basicTypeKinds[b.Kind()]; ok { + return dt, true + } + } + + return "", false +} + +// firstLineOf renders the given node and returns its first line. +// It will also match the indentation of another node. +func (f *file) firstLineOf(node, match ast.Node) string { + line := f.render(node) + if i := strings.Index(line, "\n"); i >= 0 { + line = line[:i] + } + return f.indentOf(match) + line +} + +func (f *file) indentOf(node ast.Node) string { + line := srcLine(f.src, f.fset.Position(node.Pos())) + for i, r := range line { + switch r { + case ' ', '\t': + default: + return line[:i] + } + } + return line // unusual or empty line +} + +func (f *file) srcLineWithMatch(node ast.Node, pattern string) (m []string) { + line := srcLine(f.src, f.fset.Position(node.Pos())) + line = strings.TrimSuffix(line, "\n") + rx := regexp.MustCompile(pattern) + return rx.FindStringSubmatch(line) +} + +// imports returns true if the current file imports the specified package path. +func (f *file) imports(importPath string) bool { + all := astutil.Imports(f.fset, f.f) + for _, p := range all { + for _, i := range p { + uq, err := strconv.Unquote(i.Path.Value) + if err == nil && importPath == uq { + return true + } + } + } + return false +} + +// srcLine returns the complete line at p, including the terminating newline. +func srcLine(src []byte, p token.Position) string { + // Run to end of line in both directions if not at line start/end. + lo, hi := p.Offset, p.Offset+1 + for lo > 0 && src[lo-1] != '\n' { + lo-- + } + for hi < len(src) && src[hi-1] != '\n' { + hi++ + } + return string(src[lo:hi]) +} diff --git a/vendor/github.com/golangci/maligned/LICENSE b/vendor/github.com/golangci/maligned/LICENSE new file mode 100644 index 000000000..744875676 --- /dev/null +++ b/vendor/github.com/golangci/maligned/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/maligned/README b/vendor/github.com/golangci/maligned/README new file mode 100644 index 000000000..4e57f6eab --- /dev/null +++ b/vendor/github.com/golangci/maligned/README @@ -0,0 +1,7 @@ +Install: + + go get github.com/mdempsky/maligned + +Usage: + + maligned cmd/compile/internal/gc cmd/link/internal/ld diff --git a/vendor/github.com/golangci/maligned/maligned.go b/vendor/github.com/golangci/maligned/maligned.go new file mode 100644 index 000000000..c2492b2ff --- /dev/null +++ b/vendor/github.com/golangci/maligned/maligned.go @@ -0,0 +1,253 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package maligned + +import ( + "fmt" + "go/ast" + "go/build" + "go/token" + "go/types" + "sort" + "strings" + + "golang.org/x/tools/go/loader" +) + +var fset = token.NewFileSet() + +type Issue struct { + OldSize, NewSize int + NewStructDef string + Pos token.Position +} + +func Run(prog *loader.Program) []Issue { + flagVerbose := true + fset = prog.Fset + + var issues []Issue + + for _, pkg := range prog.InitialPackages() { + for _, file := range pkg.Files { + ast.Inspect(file, func(node ast.Node) bool { + if s, ok := node.(*ast.StructType); ok { + i := malign(node.Pos(), pkg.Types[s].Type.(*types.Struct), flagVerbose) + if i != nil { + issues = append(issues, *i) + } + } + return true + }) + } + } + + return issues +} + +func malign(pos token.Pos, str *types.Struct, verbose bool) *Issue { + wordSize := int64(8) + maxAlign := int64(8) + switch build.Default.GOARCH { + case "386", "arm": + wordSize, maxAlign = 4, 4 + case "amd64p32": + wordSize = 4 + } + + s := gcSizes{wordSize, maxAlign} + sz := s.Sizeof(str) + opt, fields := optimalSize(str, &s, verbose) + if sz == opt { + return nil + } + + newStructDefParts := []string{"struct{"} + + var w int + for _, f := range fields { + if n := len(f.Name()); n > w { + w = n + } + } + spaces := strings.Repeat(" ", w) + for _, f := range fields { + line := fmt.Sprintf("\t%s%s\t%s,", f.Name(), spaces[len(f.Name()):], f.Type().String()) + newStructDefParts = append(newStructDefParts, line) + } + newStructDefParts = append(newStructDefParts, "}") + + return &Issue{ + OldSize: int(sz), + NewSize: int(opt), + NewStructDef: strings.Join(newStructDefParts, "\n"), + Pos: fset.Position(pos), + } +} + +func optimalSize(str *types.Struct, sizes *gcSizes, stable bool) (int64, []*types.Var) { + nf := str.NumFields() + fields := make([]*types.Var, nf) + alignofs := make([]int64, nf) + sizeofs := make([]int64, nf) + for i := 0; i < nf; i++ { + fields[i] = str.Field(i) + ft := fields[i].Type() + alignofs[i] = sizes.Alignof(ft) + sizeofs[i] = sizes.Sizeof(ft) + } + if stable { // Stable keeps as much of the order as possible, but slower + sort.Stable(&byAlignAndSize{fields, alignofs, sizeofs}) + } else { + sort.Sort(&byAlignAndSize{fields, alignofs, sizeofs}) + } + return sizes.Sizeof(types.NewStruct(fields, nil)), fields +} + +type byAlignAndSize struct { + fields []*types.Var + alignofs []int64 + sizeofs []int64 +} + +func (s *byAlignAndSize) Len() int { return len(s.fields) } +func (s *byAlignAndSize) Swap(i, j int) { + s.fields[i], s.fields[j] = s.fields[j], s.fields[i] + s.alignofs[i], s.alignofs[j] = s.alignofs[j], s.alignofs[i] + s.sizeofs[i], s.sizeofs[j] = s.sizeofs[j], s.sizeofs[i] +} + +func (s *byAlignAndSize) Less(i, j int) bool { + // Place zero sized objects before non-zero sized objects. + if s.sizeofs[i] == 0 && s.sizeofs[j] != 0 { + return true + } + if s.sizeofs[j] == 0 && s.sizeofs[i] != 0 { + return false + } + + // Next, place more tightly aligned objects before less tightly aligned objects. + if s.alignofs[i] != s.alignofs[j] { + return s.alignofs[i] > s.alignofs[j] + } + + // Lastly, order by size. + if s.sizeofs[i] != s.sizeofs[j] { + return s.sizeofs[i] > s.sizeofs[j] + } + + return false +} + +// Code below based on go/types.StdSizes. + +type gcSizes struct { + WordSize int64 + MaxAlign int64 +} + +func (s *gcSizes) Alignof(T types.Type) int64 { + // NOTE: On amd64, complex64 is 8 byte aligned, + // even though float32 is only 4 byte aligned. + + // For arrays and structs, alignment is defined in terms + // of alignment of the elements and fields, respectively. + switch t := T.Underlying().(type) { + case *types.Array: + // spec: "For a variable x of array type: unsafe.Alignof(x) + // is the same as unsafe.Alignof(x[0]), but at least 1." + return s.Alignof(t.Elem()) + case *types.Struct: + // spec: "For a variable x of struct type: unsafe.Alignof(x) + // is the largest of the values unsafe.Alignof(x.f) for each + // field f of x, but at least 1." + max := int64(1) + for i, nf := 0, t.NumFields(); i < nf; i++ { + if a := s.Alignof(t.Field(i).Type()); a > max { + max = a + } + } + return max + } + a := s.Sizeof(T) // may be 0 + // spec: "For a variable x of any type: unsafe.Alignof(x) is at least 1." + if a < 1 { + return 1 + } + if a > s.MaxAlign { + return s.MaxAlign + } + return a +} + +var basicSizes = [...]byte{ + types.Bool: 1, + types.Int8: 1, + types.Int16: 2, + types.Int32: 4, + types.Int64: 8, + types.Uint8: 1, + types.Uint16: 2, + types.Uint32: 4, + types.Uint64: 8, + types.Float32: 4, + types.Float64: 8, + types.Complex64: 8, + types.Complex128: 16, +} + +func (s *gcSizes) Sizeof(T types.Type) int64 { + switch t := T.Underlying().(type) { + case *types.Basic: + k := t.Kind() + if int(k) < len(basicSizes) { + if s := basicSizes[k]; s > 0 { + return int64(s) + } + } + if k == types.String { + return s.WordSize * 2 + } + case *types.Array: + n := t.Len() + if n == 0 { + return 0 + } + a := s.Alignof(t.Elem()) + z := s.Sizeof(t.Elem()) + return align(z, a)*(n-1) + z + case *types.Slice: + return s.WordSize * 3 + case *types.Struct: + nf := t.NumFields() + if nf == 0 { + return 0 + } + + var o int64 + max := int64(1) + for i := 0; i < nf; i++ { + ft := t.Field(i).Type() + a, sz := s.Alignof(ft), s.Sizeof(ft) + if a > max { + max = a + } + if i == nf-1 && sz == 0 && o != 0 { + sz = 1 + } + o = align(o, a) + sz + } + return align(o, max) + case *types.Interface: + return s.WordSize * 2 + } + return s.WordSize // catch-all +} + +// align returns the smallest y >= x such that y % a == 0. +func align(x, a int64) int64 { + y := x + a - 1 + return y - y%a +} diff --git a/vendor/github.com/golangci/misspell/.gitignore b/vendor/github.com/golangci/misspell/.gitignore new file mode 100644 index 000000000..b1b707e32 --- /dev/null +++ b/vendor/github.com/golangci/misspell/.gitignore @@ -0,0 +1,34 @@ +dist/ +bin/ +vendor/ + +# editor turds +*~ +*.gz +*.bz2 +*.csv + +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof diff --git a/vendor/github.com/golangci/misspell/.travis.yml b/vendor/github.com/golangci/misspell/.travis.yml new file mode 100644 index 000000000..e63e6c2bd --- /dev/null +++ b/vendor/github.com/golangci/misspell/.travis.yml @@ -0,0 +1,20 @@ +sudo: required +dist: trusty +group: edge +language: go +go: + - "1.10" +git: + depth: 1 + +script: + - ./scripts/travis.sh + +# calls goreleaser when a new tag is pushed +deploy: +- provider: script + skip_cleanup: true + script: curl -sL http://git.io/goreleaser | bash + on: + tags: true + condition: $TRAVIS_OS_NAME = linux diff --git a/vendor/github.com/golangci/misspell/Dockerfile b/vendor/github.com/golangci/misspell/Dockerfile new file mode 100644 index 000000000..b8ea37b4c --- /dev/null +++ b/vendor/github.com/golangci/misspell/Dockerfile @@ -0,0 +1,37 @@ +FROM golang:1.10.0-alpine + +# cache buster +RUN echo 4 + +# git is needed for "go get" below +RUN apk add --no-cache git make + +# these are my standard testing / linting tools +RUN /bin/true \ + && go get -u github.com/golang/dep/cmd/dep \ + && go get -u github.com/alecthomas/gometalinter \ + && gometalinter --install \ + && rm -rf /go/src /go/pkg +# +# * SCOWL word list +# +# Downloads +# http://wordlist.aspell.net/dicts/ +# --> http://app.aspell.net/create +# + +# use en_US large size +# use regular size for others +ENV SOURCE_US_BIG http://app.aspell.net/create?max_size=70&spelling=US&max_variant=2&diacritic=both&special=hacker&special=roman-numerals&download=wordlist&encoding=utf-8&format=inline + +# should be able tell difference between English variations using this +ENV SOURCE_US http://app.aspell.net/create?max_size=60&spelling=US&max_variant=1&diacritic=both&download=wordlist&encoding=utf-8&format=inline +ENV SOURCE_GB_ISE http://app.aspell.net/create?max_size=60&spelling=GBs&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline +ENV SOURCE_GB_IZE http://app.aspell.net/create?max_size=60&spelling=GBz&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline +ENV SOURCE_CA http://app.aspell.net/create?max_size=60&spelling=CA&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline + +RUN /bin/true \ + && mkdir /scowl-wl \ + && wget -O /scowl-wl/words-US-60.txt ${SOURCE_US} \ + && wget -O /scowl-wl/words-GB-ise-60.txt ${SOURCE_GB_ISE} + diff --git a/vendor/github.com/golangci/misspell/Gopkg.lock b/vendor/github.com/golangci/misspell/Gopkg.lock new file mode 100644 index 000000000..90ed45115 --- /dev/null +++ b/vendor/github.com/golangci/misspell/Gopkg.lock @@ -0,0 +1,24 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "github.com/gobwas/glob" + packages = [ + ".", + "compiler", + "match", + "syntax", + "syntax/ast", + "syntax/lexer", + "util/runes", + "util/strings" + ] + revision = "5ccd90ef52e1e632236f7326478d4faa74f99438" + version = "v0.2.3" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "087ea4c49358ea8258ad9edfe514cd5ce9975c889c258e5ec7b5d2b720aae113" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/vendor/github.com/golangci/misspell/Gopkg.toml b/vendor/github.com/golangci/misspell/Gopkg.toml new file mode 100644 index 000000000..e9b8e6a45 --- /dev/null +++ b/vendor/github.com/golangci/misspell/Gopkg.toml @@ -0,0 +1,34 @@ +# Gopkg.toml example +# +# Refer to https://golang.github.io/dep/docs/Gopkg.toml.html +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" +# +# [prune] +# non-go = false +# go-tests = true +# unused-packages = true + + +[[constraint]] + name = "github.com/gobwas/glob" + version = "0.2.3" + +[prune] + go-tests = true + unused-packages = true diff --git a/vendor/github.com/golangci/misspell/LICENSE b/vendor/github.com/golangci/misspell/LICENSE new file mode 100644 index 000000000..423e1f9e0 --- /dev/null +++ b/vendor/github.com/golangci/misspell/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015-2017 Nick Galbreath + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/vendor/github.com/golangci/misspell/Makefile b/vendor/github.com/golangci/misspell/Makefile new file mode 100644 index 000000000..862ab77b0 --- /dev/null +++ b/vendor/github.com/golangci/misspell/Makefile @@ -0,0 +1,74 @@ +CONTAINER=nickg/misspell + +install: ## install misspell into GOPATH/bin + go install ./cmd/misspell + +build: hooks ## build and lint misspell + ./scripts/build.sh + +test: ## run all tests + go test . + +# real publishing is done only by travis +publish: ## test goreleaser + ./scripts/goreleaser-dryrun.sh + +# the grep in line 2 is to remove misspellings in the spelling dictionary +# that trigger false positives!! +falsepositives: /scowl-wl + cat /scowl-wl/words-US-60.txt | \ + grep -i -v -E "payed|Tyre|Euclidian|nonoccurence|dependancy|reenforced|accidently|surprize|dependance|idealogy|binominal|causalities|conquerer|withing|casette|analyse|analogue|dialogue|paralyse|catalogue|archaeolog|clarinettist|catalyses|cancell|chisell|ageing|cataloguing" | \ + misspell -debug -error + cat /scowl-wl/words-GB-ise-60.txt | \ + grep -v -E "payed|nonoccurence|withing" | \ + misspell -locale=UK -debug -error +# cat /scowl-wl/words-GB-ize-60.txt | \ +# grep -v -E "withing" | \ +# misspell -debug -error +# cat /scowl-wl/words-CA-60.txt | \ +# grep -v -E "withing" | \ +# misspell -debug -error + +bench: ## run benchmarks + go test -bench '.*' + +clean: ## clean up time + rm -rf dist/ bin/ + go clean ./... + git gc --aggressive + +ci: ## run test like travis-ci does, requires docker + docker run --rm \ + -v $(PWD):/go/src/github.com/client9/misspell \ + -w /go/src/github.com/client9/misspell \ + ${CONTAINER} \ + make build falsepositives + +docker-build: ## build a docker test image + docker build -t ${CONTAINER} . + +docker-pull: ## pull latest test image + docker pull ${CONTAINER} + +docker-console: ## log into the test image + docker run --rm -it \ + -v $(PWD):/go/src/github.com/client9/misspell \ + -w /go/src/github.com/client9/misspell \ + ${CONTAINER} sh + +.git/hooks/pre-commit: scripts/pre-commit.sh + cp -f scripts/pre-commit.sh .git/hooks/pre-commit +.git/hooks/commit-msg: scripts/commit-msg.sh + cp -f scripts/commit-msg.sh .git/hooks/commit-msg +hooks: .git/hooks/pre-commit .git/hooks/commit-msg ## install git precommit hooks + +.PHONY: help ci console docker-build bench + +# https://www.client9.com/self-documenting-makefiles/ +help: + @awk -F ':|##' '/^[^\t].+?:.*?##/ {\ + printf "\033[36m%-30s\033[0m %s\n", $$1, $$NF \ + }' $(MAKEFILE_LIST) +.DEFAULT_GOAL=help +.PHONY=help + diff --git a/vendor/github.com/golangci/misspell/README.md b/vendor/github.com/golangci/misspell/README.md new file mode 100644 index 000000000..5b68af04d --- /dev/null +++ b/vendor/github.com/golangci/misspell/README.md @@ -0,0 +1,424 @@ +[![Build Status](https://travis-ci.org/client9/misspell.svg?branch=master)](https://travis-ci.org/client9/misspell) [![Go Report Card](https://goreportcard.com/badge/github.com/client9/misspell)](https://goreportcard.com/report/github.com/client9/misspell) [![GoDoc](https://godoc.org/github.com/client9/misspell?status.svg)](https://godoc.org/github.com/client9/misspell) [![Coverage](http://gocover.io/_badge/github.com/client9/misspell)](http://gocover.io/github.com/client9/misspell) [![license](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](https://raw.githubusercontent.com/client9/misspell/master/LICENSE) + +Correct commonly misspelled English words... quickly. + +### Install + + +If you just want a binary and to start using `misspell`: + +``` +curl -L -o ./install-misspell.sh https://git.io/misspell +sh ./install-misspell.sh +``` + + +Both will install as `./bin/misspell`. You can adjust the download location using the `-b` flag. File a ticket if you want another platform supported. + + +If you use [Go](https://golang.org/), the best way to run `misspell` is by using [gometalinter](#gometalinter). Otherwise, install `misspell` the old-fashioned way: + +``` +go get -u github.com/client9/misspell/cmd/misspell +``` + +and misspell will be in your `GOPATH` + + +Also if you like to live dangerously, one could do + +```bash +curl -L https://git.io/misspell | bash +``` + +### Usage + + +```bash +$ misspell all.html your.txt important.md files.go +your.txt:42:10 found "langauge" a misspelling of "language" + +# ^ file, line, column +``` + +``` +$ misspell -help +Usage of misspell: + -debug + Debug matching, very slow + -error + Exit with 2 if misspelling found + -f string + 'csv', 'sqlite3' or custom Golang template for output + -i string + ignore the following corrections, comma separated + -j int + Number of workers, 0 = number of CPUs + -legal + Show legal information and exit + -locale string + Correct spellings using locale perferances for US or UK. Default is to use a neutral variety of English. Setting locale to US will correct the British spelling of 'colour' to 'color' + -o string + output file or [stderr|stdout|] (default "stdout") + -q Do not emit misspelling output + -source string + Source mode: auto=guess, go=golang source, text=plain or markdown-like text (default "auto") + -w Overwrite file with corrections (default is just to display) +``` + +## FAQ + +* [Automatic Corrections](#correct) +* [Converting UK spellings to US](#locale) +* [Using pipes and stdin](#stdin) +* [Golang special support](#golang) +* [gometalinter support](#gometalinter) +* [CSV Output](#csv) +* [Using SQLite3](#sqlite) +* [Changing output format](#output) +* [Checking a folder recursively](#recursive) +* [Performance](#performance) +* [Known Issues](#issues) +* [Debugging](#debug) +* [False Negatives and missing words](#missing) +* [Origin of Word Lists](#words) +* [Software License](#license) +* [Problem statement](#problem) +* [Other spelling correctors](#others) +* [Other ideas](#otherideas) + + +### How can I make the corrections automatically? + +Just add the `-w` flag! + +``` +$ misspell -w all.html your.txt important.md files.go +your.txt:9:21:corrected "langauge" to "language" + +# ^ File is rewritten only if a misspelling is found +``` + + +### How do I convert British spellings to American (or vice-versa)? + +Add the `-locale US` flag! + +```bash +$ misspell -locale US important.txt +important.txt:10:20 found "colour" a misspelling of "color" +``` + +Add the `-locale UK` flag! + +```bash +$ echo "My favorite color is blue" | misspell -locale UK +stdin:1:3:found "favorite color" a misspelling of "favourite colour" +``` + +Help is appreciated as I'm neither British nor an +expert in the English language. + + +### How do you check an entire folder recursively? + +Just list a directory you'd like to check + +```bash +misspell . +misspell aDirectory anotherDirectory aFile +``` + +You can also run misspell recursively using the following shell tricks: + +```bash +misspell directory/**/* +``` + +or + +```bash +find . -type f | xargs misspell +``` + +You can select a type of file as well. The following examples selects all `.txt` files that are *not* in the `vendor` directory: + +```bash +find . -type f -name '*.txt' | grep -v vendor/ | xargs misspell -error +``` + + +### Can I use pipes or `stdin` for input? + +Yes! + +Print messages to `stderr` only: + +```bash +$ echo "zeebra" | misspell +stdin:1:0:found "zeebra" a misspelling of "zebra" +``` + +Print messages to `stderr`, and corrected text to `stdout`: + +```bash +$ echo "zeebra" | misspell -w +stdin:1:0:corrected "zeebra" to "zebra" +zebra +``` + +Only print the corrected text to `stdout`: + +```bash +$ echo "zeebra" | misspell -w -q +zebra +``` + + +### Are there special rules for golang source files? + +Yes! If the file ends in `.go`, then misspell will only check spelling in +comments. + +If you want to force a file to be checked as a golang source, use `-source=go` +on the command line. Conversely, you can check a golang source as if it were +pure text by using `-source=text`. You might want to do this since many +variable names have misspellings in them! + +### Can I check only-comments in other other programming languages? + +I'm told the using `-source=go` works well for ruby, javascript, java, c and +c++. + +It doesn't work well for python and bash. + + +### Does this work with gometalinter? + +[gometalinter](https://github.com/alecthomas/gometalinter) runs +multiple golang linters. Starting on [2016-06-12](https://github.com/alecthomas/gometalinter/pull/134) +gometalinter supports `misspell` natively but it is disabled by default. + +```bash +# update your copy of gometalinter +go get -u github.com/alecthomas/gometalinter + +# install updates and misspell +gometalinter --install --update +``` + +To use, just enable `misspell` + +``` +gometalinter --enable misspell ./... +``` + +Note that gometalinter only checks golang files, and uses the default options +of `misspell` + +You may wish to run this on your plaintext (.txt) and/or markdown files too. + + + +### How Can I Get CSV Output? + +Using `-f csv`, the output is standard comma-seprated values with headers in the first row. + +``` +misspell -f csv * +file,line,column,typo,corrected +"README.md",9,22,langauge,language +"README.md",47,25,langauge,language +``` + + +### How can I export to SQLite3? + +Using `-f sqlite`, the output is a [sqlite3](https://www.sqlite.org/index.html) dump-file. + +```bash +$ misspell -f sqlite * > /tmp/misspell.sql +$ cat /tmp/misspell.sql + +PRAGMA foreign_keys=OFF; +BEGIN TRANSACTION; +CREATE TABLE misspell( + "file" TEXT, + "line" INTEGER,i + "column" INTEGER,i + "typo" TEXT, + "corrected" TEXT +); +INSERT INTO misspell VALUES("install.txt",202,31,"immediatly","immediately"); +# etc... +COMMIT; +``` + +```bash +$ sqlite3 -init /tmp/misspell.sql :memory: 'select count(*) from misspell' +1 +``` + +With some tricks you can directly pipe output to sqlite3 by using `-init /dev/stdin`: + +``` +misspell -f sqlite * | sqlite3 -init /dev/stdin -column -cmd '.width 60 15' ':memory' \ + 'select substr(file,35),typo,count(*) as count from misspell group by file, typo order by count desc;' +``` + + +### How can I ignore rules? + +Using the `-i "comma,separated,rules"` flag you can specify corrections to ignore. + +For example, if you were to run `misspell -w -error -source=text` against document that contains the string `Guy Finkelshteyn Braswell`, misspell would change the text to `Guy Finkelstheyn Bras well`. You can then +determine the rules to ignore by reverting the change and running the with the `-debug` flag. You can then see +that the corrections were `htey -> they` and `aswell -> as well`. To ignore these two rules, you add `-i "htey,aswell"` to +your command. With debug mode on, you can see it print the corrections, but it will no longer make them. + + +### How can I change the output format? + +Using the `-f template` flag you can pass in a +[golang text template](https://golang.org/pkg/text/template/) to format the output. + +One can use `printf "%q" VALUE` to safely quote a value. + +The default template is compatible with [gometalinter](https://github.com/alecthomas/gometalinter) +``` +{{ .Filename }}:{{ .Line }}:{{ .Column }}:corrected {{ printf "%q" .Original }} to "{{ printf "%q" .Corrected }}" +``` + +To just print probable misspellings: + +``` +-f '{{ .Original }}' +``` + + +### What problem does this solve? + +This corrects commonly misspelled English words in computer source +code, and other text-based formats (`.txt`, `.md`, etc). + +It is designed to run quickly so it can be +used as a [pre-commit hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) +with minimal burden on the developer. + +It does not work with binary formats (e.g. Word, etc). + +It is not a complete spell-checking program nor a grammar checker. + + +### What are other misspelling correctors and what's wrong with them? + +Some other misspelling correctors: + +* https://github.com/vlajos/misspell_fixer +* https://github.com/lyda/misspell-check +* https://github.com/lucasdemarchi/codespell + +They all work but had problems that prevented me from using them at scale: + +* slow, all of the above check one misspelling at a time (i.e. linear) using regexps +* not MIT/Apache2 licensed (or equivalent) +* have dependencies that don't work for me (python3, bash, linux sed, etc) +* don't understand American vs. British English and sometimes makes unwelcome "corrections" + +That said, they might be perfect for you and many have more features +than this project! + + +### How fast is it? + +Misspell is easily 100x to 1000x faster than other spelling correctors. You +should be able to check and correct 1000 files in under 250ms. + +This uses the mighty power of golang's +[strings.Replacer](https://golang.org/pkg/strings/#Replacer) which is +a implementation or variation of the +[Aho–Corasick algorithm](https://en.wikipedia.org/wiki/Aho–Corasick_algorithm). +This makes multiple substring matches *simultaneously*. + +In addition this uses multiple CPU cores to work on multiple files. + + +### What problems does it have? + +Unlike the other projects, this doesn't know what a "word" is. There may be +more false positives and false negatives due to this. On the other hand, it +sometimes catches things others don't. + +Either way, please file bugs and we'll fix them! + +Since it operates in parallel to make corrections, it can be non-obvious to +determine exactly what word was corrected. + + +### It's making mistakes. How can I debug? + +Run using `-debug` flag on the file you want. It should then print what word +it is trying to correct. Then [file a +bug](https://github.com/client9/misspell/issues) describing the problem. +Thanks! + + +### Why is it making mistakes or missing items in golang files? + +The matching function is *case-sensitive*, so variable names that are multiple +worlds either in all-upper or all-lower case sometimes can cause false +positives. For instance a variable named `bodyreader` could trigger a false +positive since `yrea` is in the middle that could be corrected to `year`. +Other problems happen if the variable name uses a English contraction that +should use an apostrophe. The best way of fixing this is to use the +[Effective Go naming +conventions](https://golang.org/doc/effective_go.html#mixed-caps) and use +[camelCase](https://en.wikipedia.org/wiki/CamelCase) for variable names. You +can check your code using [golint](https://github.com/golang/lint) + + +### What license is this? + +The main code is [MIT](https://github.com/client9/misspell/blob/master/LICENSE). + +Misspell also makes uses of the Golang standard library and contains a modified version of Golang's [strings.Replacer](https://golang.org/pkg/strings/#Replacer) +which are covered under a [BSD License](https://github.com/golang/go/blob/master/LICENSE). Type `misspell -legal` for more details or see [legal.go](https://github.com/client9/misspell/blob/master/legal.go) + + +### Where do the word lists come from? + +It started with a word list from +[Wikipedia](https://en.wikipedia.org/wiki/Wikipedia:Lists_of_common_misspellings/For_machines). +Unfortunately, this list had to be highly edited as many of the words are +obsolete or based from mistakes on mechanical typewriters (I'm guessing). + +Additional words were added based on actually mistakes seen in +the wild (meaning self-generated). + +Variations of UK and US spellings are based on many sources including: + +* http://www.tysto.com/uk-us-spelling-list.html (with heavy editing, many are incorrect) +* http://www.oxforddictionaries.com/us/words/american-and-british-spelling-american (excellent site but incomplete) +* Diffing US and UK [scowl dictionaries](http://wordlist.aspell.net) + +American English is more accepting of spelling variations than is British +English, so "what is American or not" is subject to opinion. Corrections and help welcome. + + +### What are some other enhancements that could be done? + +Here's some ideas for enhancements: + +*Capitalization of proper nouns* could be done (e.g. weekday and month names, country names, language names) + +*Opinionated US spellings* US English has a number of words with alternate +spellings. Think [adviser vs. +advisor](http://grammarist.com/spelling/adviser-advisor/). While "advisor" is not wrong, the opinionated US +locale would correct "advisor" to "adviser". + +*Versioning* Some type of versioning is needed so reporting mistakes and errors is easier. + +*Feedback* Mistakes would be sent to some server for agregation and feedback review. + +*Contractions and Apostrophes* This would optionally correct "isnt" to +"isn't", etc. diff --git a/vendor/github.com/golangci/misspell/RELEASE-HOWTO.md b/vendor/github.com/golangci/misspell/RELEASE-HOWTO.md new file mode 100644 index 000000000..55b52d962 --- /dev/null +++ b/vendor/github.com/golangci/misspell/RELEASE-HOWTO.md @@ -0,0 +1,38 @@ +# Release HOWTO + +since I forget. + + +1. Review existing tags and pick new release number + + ```sh + git tag + ``` + +2. Tag locally + + ```sh + git tag -a v0.1.0 -m "First release" + ``` + + If things get screwed up, delete the tag with + + ```sh + git tag -d v0.1.0 + ``` + +3. Test goreleaser + + TODO: how to install goreleaser + + ```sh + ./scripts/goreleaser-dryrun.sh + ``` + +4. Push + + ```bash + git push origin v0.1.0 + ``` + +5. Verify release and edit notes. See https://github.com/client9/misspell/releases diff --git a/vendor/github.com/golangci/misspell/ascii.go b/vendor/github.com/golangci/misspell/ascii.go new file mode 100644 index 000000000..1430718d6 --- /dev/null +++ b/vendor/github.com/golangci/misspell/ascii.go @@ -0,0 +1,62 @@ +package misspell + +// ByteToUpper converts an ascii byte to upper cases +// Uses a branchless algorithm +func ByteToUpper(x byte) byte { + b := byte(0x80) | x + c := b - byte(0x61) + d := ^(b - byte(0x7b)) + e := (c & d) & (^x & 0x7f) + return x - (e >> 2) +} + +// ByteToLower converts an ascii byte to lower case +// uses a branchless algorithm +func ByteToLower(eax byte) byte { + ebx := eax&byte(0x7f) + byte(0x25) + ebx = ebx&byte(0x7f) + byte(0x1a) + ebx = ((ebx & ^eax) >> 2) & byte(0x20) + return eax + ebx +} + +// ByteEqualFold does ascii compare, case insensitive +func ByteEqualFold(a, b byte) bool { + return a == b || ByteToLower(a) == ByteToLower(b) +} + +// StringEqualFold ASCII case-insensitive comparison +// golang toUpper/toLower for both bytes and strings +// appears to be Unicode based which is super slow +// based from https://codereview.appspot.com/5180044/patch/14007/21002 +func StringEqualFold(s1, s2 string) bool { + if len(s1) != len(s2) { + return false + } + for i := 0; i < len(s1); i++ { + c1 := s1[i] + c2 := s2[i] + // c1 & c2 + if c1 != c2 { + c1 |= 'a' - 'A' + c2 |= 'a' - 'A' + if c1 != c2 || c1 < 'a' || c1 > 'z' { + return false + } + } + } + return true +} + +// StringHasPrefixFold is similar to strings.HasPrefix but comparison +// is done ignoring ASCII case. +// / +func StringHasPrefixFold(s1, s2 string) bool { + // prefix is bigger than input --> false + if len(s1) < len(s2) { + return false + } + if len(s1) == len(s2) { + return StringEqualFold(s1, s2) + } + return StringEqualFold(s1[:len(s2)], s2) +} diff --git a/vendor/github.com/golangci/misspell/case.go b/vendor/github.com/golangci/misspell/case.go new file mode 100644 index 000000000..2ea3850df --- /dev/null +++ b/vendor/github.com/golangci/misspell/case.go @@ -0,0 +1,59 @@ +package misspell + +import ( + "strings" +) + +// WordCase is an enum of various word casing styles +type WordCase int + +// Various WordCase types.. likely to be not correct +const ( + CaseUnknown WordCase = iota + CaseLower + CaseUpper + CaseTitle +) + +// CaseStyle returns what case style a word is in +func CaseStyle(word string) WordCase { + upperCount := 0 + lowerCount := 0 + + // this iterates over RUNES not BYTES + for i := 0; i < len(word); i++ { + ch := word[i] + switch { + case ch >= 'a' && ch <= 'z': + lowerCount++ + case ch >= 'A' && ch <= 'Z': + upperCount++ + } + } + + switch { + case upperCount != 0 && lowerCount == 0: + return CaseUpper + case upperCount == 0 && lowerCount != 0: + return CaseLower + case upperCount == 1 && lowerCount > 0 && word[0] >= 'A' && word[0] <= 'Z': + return CaseTitle + } + return CaseUnknown +} + +// CaseVariations returns +// If AllUpper or First-Letter-Only is upcased: add the all upper case version +// If AllLower, add the original, the title and upcase forms +// If Mixed, return the original, and the all upcase form +// +func CaseVariations(word string, style WordCase) []string { + switch style { + case CaseLower: + return []string{word, strings.ToUpper(word[0:1]) + word[1:], strings.ToUpper(word)} + case CaseUpper: + return []string{strings.ToUpper(word)} + default: + return []string{word, strings.ToUpper(word)} + } +} diff --git a/vendor/github.com/golangci/misspell/goreleaser.yml b/vendor/github.com/golangci/misspell/goreleaser.yml new file mode 100644 index 000000000..560cb3810 --- /dev/null +++ b/vendor/github.com/golangci/misspell/goreleaser.yml @@ -0,0 +1,38 @@ +# goreleaser.yml +# https://github.com/goreleaser/goreleaser + +project_name: misspell + +builds: + - + main: cmd/misspell/main.go + binary: misspell + ldflags: -s -w -X main.version={{.Version}} + goos: + - darwin + - linux + - windows + goarch: + - amd64 + env: + - CGO_ENABLED=0 + ignore: + - goos: darwin + goarch: 386 + - goos: windows + goarch: 386 + +archive: + name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}" + replacements: + amd64: 64bit + 386: 32bit + darwin: mac + files: + - none* + +checksum: + name_template: "{{ .ProjectName }}_{{ .Version }}_checksums.txt" + +snapshot: + name_template: "SNAPSHOT-{{.Commit}}" diff --git a/vendor/github.com/golangci/misspell/install-misspell.sh b/vendor/github.com/golangci/misspell/install-misspell.sh new file mode 100644 index 000000000..e24a84a20 --- /dev/null +++ b/vendor/github.com/golangci/misspell/install-misspell.sh @@ -0,0 +1,362 @@ +#!/bin/sh +set -e +# Code generated by godownloader. DO NOT EDIT. +# + +usage() { + this=$1 + cat </dev/null +} +echoerr() { + echo "$@" 1>&2 +} +log_prefix() { + echo "$0" +} +_logp=6 +log_set_priority() { + _logp="$1" +} +log_priority() { + if test -z "$1"; then + echo "$_logp" + return + fi + [ "$1" -ge "$_logp" ] +} +log_debug() { + log_priority 7 && echoerr "$(log_prefix)" "DEBUG" "$@" +} +log_info() { + log_priority 6 && echoerr "$(log_prefix)" "INFO" "$@" +} +log_err() { + log_priority 3 && echoerr "$(log_prefix)" "ERR" "$@" +} +log_crit() { + log_priority 2 && echoerr "$(log_prefix)" "CRIT" "$@" +} +uname_os() { + os=$(uname -s | tr '[:upper:]' '[:lower:]') + case "$os" in + msys_nt) os="windows" ;; + esac + echo "$os" +} +uname_arch() { + arch=$(uname -m) + case $arch in + x86_64) arch="amd64" ;; + x86) arch="386" ;; + i686) arch="386" ;; + i386) arch="386" ;; + aarch64) arch="arm64" ;; + armv5*) arch="arm5" ;; + armv6*) arch="arm6" ;; + armv7*) arch="arm7" ;; + esac + echo ${arch} +} +uname_os_check() { + os=$(uname_os) + case "$os" in + darwin) return 0 ;; + dragonfly) return 0 ;; + freebsd) return 0 ;; + linux) return 0 ;; + android) return 0 ;; + nacl) return 0 ;; + netbsd) return 0 ;; + openbsd) return 0 ;; + plan9) return 0 ;; + solaris) return 0 ;; + windows) return 0 ;; + esac + log_crit "uname_os_check '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib" + return 1 +} +uname_arch_check() { + arch=$(uname_arch) + case "$arch" in + 386) return 0 ;; + amd64) return 0 ;; + arm64) return 0 ;; + armv5) return 0 ;; + armv6) return 0 ;; + armv7) return 0 ;; + ppc64) return 0 ;; + ppc64le) return 0 ;; + mips) return 0 ;; + mipsle) return 0 ;; + mips64) return 0 ;; + mips64le) return 0 ;; + s390x) return 0 ;; + amd64p32) return 0 ;; + esac + log_crit "uname_arch_check '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib" + return 1 +} +untar() { + tarball=$1 + case "${tarball}" in + *.tar.gz | *.tgz) tar -xzf "${tarball}" ;; + *.tar) tar -xf "${tarball}" ;; + *.zip) unzip "${tarball}" ;; + *) + log_err "untar unknown archive format for ${tarball}" + return 1 + ;; + esac +} +mktmpdir() { + test -z "$TMPDIR" && TMPDIR="$(mktemp -d)" + mkdir -p "${TMPDIR}" + echo "${TMPDIR}" +} +http_download() { + local_file=$1 + source_url=$2 + header=$3 + headerflag='' + destflag='' + if is_command curl; then + cmd='curl --fail -sSL' + destflag='-o' + headerflag='-H' + elif is_command wget; then + cmd='wget -q' + destflag='-O' + headerflag='--header' + else + log_crit "http_download unable to find wget or curl" + return 1 + fi + if [ -z "$header" ]; then + $cmd $destflag "$local_file" "$source_url" + else + $cmd $headerflag "$header" $destflag "$local_file" "$source_url" + fi +} +github_api() { + local_file=$1 + source_url=$2 + header="" + case "$source_url" in + https://api.github.com*) + test -z "$GITHUB_TOKEN" || header="Authorization: token $GITHUB_TOKEN" + ;; + esac + http_download "$local_file" "$source_url" "$header" +} +github_last_release() { + owner_repo=$1 + version=$2 + test -z "$version" && version="latest" + giturl="https://github.com/${owner_repo}/releases/${version}" + json=$(http_download "-" "$giturl" "Accept:application/json") + version=$(echo "$json" | tr -s '\n' ' ' | sed 's/.*"tag_name":"//' | sed 's/".*//') + test -z "$version" && return 1 + echo "$version" +} +hash_sha256() { + TARGET=${1:-/dev/stdin} + if is_command gsha256sum; then + hash=$(gsha256sum "$TARGET") || return 1 + echo "$hash" | cut -d ' ' -f 1 + elif is_command sha256sum; then + hash=$(sha256sum "$TARGET") || return 1 + echo "$hash" | cut -d ' ' -f 1 + elif is_command shasum; then + hash=$(shasum -a 256 "$TARGET" 2>/dev/null) || return 1 + echo "$hash" | cut -d ' ' -f 1 + elif is_command openssl; then + hash=$(openssl -dst openssl dgst -sha256 "$TARGET") || return 1 + echo "$hash" | cut -d ' ' -f a + else + log_crit "hash_sha256 unable to find command to compute sha-256 hash" + return 1 + fi +} +hash_sha256_verify() { + TARGET=$1 + checksums=$2 + if [ -z "$checksums" ]; then + log_err "hash_sha256_verify checksum file not specified in arg2" + return 1 + fi + BASENAME=${TARGET##*/} + want=$(grep "${BASENAME}" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1) + if [ -z "$want" ]; then + log_err "hash_sha256_verify unable to find checksum for '${TARGET}' in '${checksums}'" + return 1 + fi + got=$(hash_sha256 "$TARGET") + if [ "$want" != "$got" ]; then + log_err "hash_sha256_verify checksum for '$TARGET' did not verify ${want} vs $got" + return 1 + fi +} +cat /dev/null < 50000 { + fin, err := os.Open(filename) + if err != nil { + return "", fmt.Errorf("Unable to open large file %q: %s", filename, err) + } + defer fin.Close() + buf := make([]byte, 512) + _, err = io.ReadFull(fin, buf) + if err != nil { + return "", fmt.Errorf("Unable to read 512 bytes from %q: %s", filename, err) + } + if !isTextFile(buf) { + return "", nil + } + + // set so we don't double check this file + isText = true + } + + // read in whole file + raw, err := ioutil.ReadFile(filename) + if err != nil { + return "", fmt.Errorf("Unable to read all %q: %s", filename, err) + } + + if !isText && !isTextFile(raw) { + return "", nil + } + return string(raw), nil +} diff --git a/vendor/github.com/golangci/misspell/notwords.go b/vendor/github.com/golangci/misspell/notwords.go new file mode 100644 index 000000000..06d0d5a5a --- /dev/null +++ b/vendor/github.com/golangci/misspell/notwords.go @@ -0,0 +1,85 @@ +package misspell + +import ( + "bytes" + "regexp" + "strings" +) + +var ( + reEmail = regexp.MustCompile(`[a-zA-Z0-9_.%+-]+@[a-zA-Z0-9-.]+\.[a-zA-Z]{2,6}[^a-zA-Z]`) + reHost = regexp.MustCompile(`[a-zA-Z0-9-.]+\.[a-zA-Z]+`) + reBackslash = regexp.MustCompile(`\\[a-z]`) +) + +// RemovePath attempts to strip away embedded file system paths, e.g. +// /foo/bar or /static/myimg.png +// +// TODO: windows style +// +func RemovePath(s string) string { + out := bytes.Buffer{} + var idx int + for len(s) > 0 { + if idx = strings.IndexByte(s, '/'); idx == -1 { + out.WriteString(s) + break + } + + if idx > 0 { + idx-- + } + + var chclass string + switch s[idx] { + case '/', ' ', '\n', '\t', '\r': + chclass = " \n\r\t" + case '[': + chclass = "]\n" + case '(': + chclass = ")\n" + default: + out.WriteString(s[:idx+2]) + s = s[idx+2:] + continue + } + + endx := strings.IndexAny(s[idx+1:], chclass) + if endx != -1 { + out.WriteString(s[:idx+1]) + out.Write(bytes.Repeat([]byte{' '}, endx)) + s = s[idx+endx+1:] + } else { + out.WriteString(s) + break + } + } + return out.String() +} + +// replaceWithBlanks returns a string with the same number of spaces as the input +func replaceWithBlanks(s string) string { + return strings.Repeat(" ", len(s)) +} + +// RemoveEmail remove email-like strings, e.g. "nickg+junk@xfoobar.com", "nickg@xyz.abc123.biz" +func RemoveEmail(s string) string { + return reEmail.ReplaceAllStringFunc(s, replaceWithBlanks) +} + +// RemoveHost removes host-like strings "foobar.com" "abc123.fo1231.biz" +func RemoveHost(s string) string { + return reHost.ReplaceAllStringFunc(s, replaceWithBlanks) +} + +// RemoveBackslashEscapes removes characters that are preceeded by a backslash +// commonly found in printf format stringd "\nto" +func removeBackslashEscapes(s string) string { + return reBackslash.ReplaceAllStringFunc(s, replaceWithBlanks) +} + +// RemoveNotWords blanks out all the not words +func RemoveNotWords(s string) string { + // do most selective/specific first + return removeBackslashEscapes(RemoveHost(RemoveEmail(RemovePath(StripURL(s))))) +} diff --git a/vendor/github.com/golangci/misspell/replace.go b/vendor/github.com/golangci/misspell/replace.go new file mode 100644 index 000000000..a99bbcc58 --- /dev/null +++ b/vendor/github.com/golangci/misspell/replace.go @@ -0,0 +1,246 @@ +package misspell + +import ( + "bufio" + "bytes" + "io" + "regexp" + "strings" + "text/scanner" +) + +func max(x, y int) int { + if x > y { + return x + } + return y +} + +func inArray(haystack []string, needle string) bool { + for _, word := range haystack { + if needle == word { + return true + } + } + return false +} + +var wordRegexp = regexp.MustCompile(`[a-zA-Z0-9']+`) + +// Diff is datastructure showing what changed in a single line +type Diff struct { + Filename string + FullLine string + Line int + Column int + Original string + Corrected string +} + +// Replacer is the main struct for spelling correction +type Replacer struct { + Replacements []string + Debug bool + engine *StringReplacer + corrected map[string]string +} + +// New creates a new default Replacer using the main rule list +func New() *Replacer { + r := Replacer{ + Replacements: DictMain, + } + r.Compile() + return &r +} + +// RemoveRule deletes existings rules. +// TODO: make inplace to save memory +func (r *Replacer) RemoveRule(ignore []string) { + newwords := make([]string, 0, len(r.Replacements)) + for i := 0; i < len(r.Replacements); i += 2 { + if inArray(ignore, r.Replacements[i]) { + continue + } + newwords = append(newwords, r.Replacements[i:i+2]...) + } + r.engine = nil + r.Replacements = newwords +} + +// AddRuleList appends new rules. +// Input is in the same form as Strings.Replacer: [ old1, new1, old2, new2, ....] +// Note: does not check for duplictes +func (r *Replacer) AddRuleList(additions []string) { + r.engine = nil + r.Replacements = append(r.Replacements, additions...) +} + +// Compile compiles the rules. Required before using the Replace functions +func (r *Replacer) Compile() { + + r.corrected = make(map[string]string, len(r.Replacements)/2) + for i := 0; i < len(r.Replacements); i += 2 { + r.corrected[r.Replacements[i]] = r.Replacements[i+1] + } + r.engine = NewStringReplacer(r.Replacements...) +} + +/* +line1 and line2 are different +extract words from each line1 + +replace word -> newword +if word == new-word + continue +if new-word in list of replacements + continue +new word not original, and not in list of replacements + some substring got mixed up. UNdo +*/ +func (r *Replacer) recheckLine(s string, lineNum int, buf io.Writer, next func(Diff)) { + first := 0 + redacted := RemoveNotWords(s) + + idx := wordRegexp.FindAllStringIndex(redacted, -1) + for _, ab := range idx { + word := s[ab[0]:ab[1]] + newword := r.engine.Replace(word) + if newword == word { + // no replacement done + continue + } + + // ignore camelCase words + // https://github.com/client9/misspell/issues/113 + if CaseStyle(word) == CaseUnknown { + continue + } + + if StringEqualFold(r.corrected[strings.ToLower(word)], newword) { + // word got corrected into something we know + io.WriteString(buf, s[first:ab[0]]) + io.WriteString(buf, newword) + first = ab[1] + next(Diff{ + FullLine: s, + Line: lineNum, + Original: word, + Corrected: newword, + Column: ab[0], + }) + continue + } + // Word got corrected into something unknown. Ignore it + } + io.WriteString(buf, s[first:]) +} + +// ReplaceGo is a specialized routine for correcting Golang source +// files. Currently only checks comments, not identifiers for +// spelling. +func (r *Replacer) ReplaceGo(input string) (string, []Diff) { + var s scanner.Scanner + s.Init(strings.NewReader(input)) + s.Mode = scanner.ScanIdents | scanner.ScanFloats | scanner.ScanChars | scanner.ScanStrings | scanner.ScanRawStrings | scanner.ScanComments + lastPos := 0 + output := "" +Loop: + for { + switch s.Scan() { + case scanner.Comment: + origComment := s.TokenText() + newComment := r.engine.Replace(origComment) + + if origComment != newComment { + // s.Pos().Offset is the end of the current token + // subtract len(origComment) to get the start of the token + offset := s.Pos().Offset + output = output + input[lastPos:offset-len(origComment)] + newComment + lastPos = offset + } + case scanner.EOF: + break Loop + } + } + + if lastPos == 0 { + // no changes, no copies + return input, nil + } + if lastPos < len(input) { + output = output + input[lastPos:] + } + diffs := make([]Diff, 0, 8) + buf := bytes.NewBuffer(make([]byte, 0, max(len(input), len(output))+100)) + // faster that making a bytes.Buffer and bufio.ReadString + outlines := strings.SplitAfter(output, "\n") + inlines := strings.SplitAfter(input, "\n") + for i := 0; i < len(inlines); i++ { + if inlines[i] == outlines[i] { + buf.WriteString(outlines[i]) + continue + } + r.recheckLine(inlines[i], i+1, buf, func(d Diff) { + diffs = append(diffs, d) + }) + } + + return buf.String(), diffs + +} + +// Replace is corrects misspellings in input, returning corrected version +// along with a list of diffs. +func (r *Replacer) Replace(input string) (string, []Diff) { + output := r.engine.Replace(input) + if input == output { + return input, nil + } + diffs := make([]Diff, 0, 8) + buf := bytes.NewBuffer(make([]byte, 0, max(len(input), len(output))+100)) + // faster that making a bytes.Buffer and bufio.ReadString + outlines := strings.SplitAfter(output, "\n") + inlines := strings.SplitAfter(input, "\n") + for i := 0; i < len(inlines); i++ { + if inlines[i] == outlines[i] { + buf.WriteString(outlines[i]) + continue + } + r.recheckLine(inlines[i], i+1, buf, func(d Diff) { + diffs = append(diffs, d) + }) + } + + return buf.String(), diffs +} + +// ReplaceReader applies spelling corrections to a reader stream. Diffs are +// emitted through a callback. +func (r *Replacer) ReplaceReader(raw io.Reader, w io.Writer, next func(Diff)) error { + var ( + err error + line string + lineNum int + ) + reader := bufio.NewReader(raw) + for err == nil { + lineNum++ + line, err = reader.ReadString('\n') + + // if it's EOF, then line has the last line + // don't like the check of err here and + // in for loop + if err != nil && err != io.EOF { + return err + } + // easily 5x faster than regexp+map + if line == r.engine.Replace(line) { + io.WriteString(w, line) + continue + } + // but it can be inaccurate, so we need to double check + r.recheckLine(line, lineNum, w, next) + } + return nil +} diff --git a/vendor/github.com/golangci/misspell/stringreplacer.go b/vendor/github.com/golangci/misspell/stringreplacer.go new file mode 100644 index 000000000..3151eceb7 --- /dev/null +++ b/vendor/github.com/golangci/misspell/stringreplacer.go @@ -0,0 +1,336 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misspell + +import ( + "io" + // "log" + "strings" +) + +// StringReplacer replaces a list of strings with replacements. +// It is safe for concurrent use by multiple goroutines. +type StringReplacer struct { + r replacer +} + +// replacer is the interface that a replacement algorithm needs to implement. +type replacer interface { + Replace(s string) string + WriteString(w io.Writer, s string) (n int, err error) +} + +// NewStringReplacer returns a new Replacer from a list of old, new string pairs. +// Replacements are performed in order, without overlapping matches. +func NewStringReplacer(oldnew ...string) *StringReplacer { + if len(oldnew)%2 == 1 { + panic("strings.NewReplacer: odd argument count") + } + + return &StringReplacer{r: makeGenericReplacer(oldnew)} +} + +// Replace returns a copy of s with all replacements performed. +func (r *StringReplacer) Replace(s string) string { + return r.r.Replace(s) +} + +// WriteString writes s to w with all replacements performed. +func (r *StringReplacer) WriteString(w io.Writer, s string) (n int, err error) { + return r.r.WriteString(w, s) +} + +// trieNode is a node in a lookup trie for prioritized key/value pairs. Keys +// and values may be empty. For example, the trie containing keys "ax", "ay", +// "bcbc", "x" and "xy" could have eight nodes: +// +// n0 - +// n1 a- +// n2 .x+ +// n3 .y+ +// n4 b- +// n5 .cbc+ +// n6 x+ +// n7 .y+ +// +// n0 is the root node, and its children are n1, n4 and n6; n1's children are +// n2 and n3; n4's child is n5; n6's child is n7. Nodes n0, n1 and n4 (marked +// with a trailing "-") are partial keys, and nodes n2, n3, n5, n6 and n7 +// (marked with a trailing "+") are complete keys. +type trieNode struct { + // value is the value of the trie node's key/value pair. It is empty if + // this node is not a complete key. + value string + // priority is the priority (higher is more important) of the trie node's + // key/value pair; keys are not necessarily matched shortest- or longest- + // first. Priority is positive if this node is a complete key, and zero + // otherwise. In the example above, positive/zero priorities are marked + // with a trailing "+" or "-". + priority int + + // A trie node may have zero, one or more child nodes: + // * if the remaining fields are zero, there are no children. + // * if prefix and next are non-zero, there is one child in next. + // * if table is non-zero, it defines all the children. + // + // Prefixes are preferred over tables when there is one child, but the + // root node always uses a table for lookup efficiency. + + // prefix is the difference in keys between this trie node and the next. + // In the example above, node n4 has prefix "cbc" and n4's next node is n5. + // Node n5 has no children and so has zero prefix, next and table fields. + prefix string + next *trieNode + + // table is a lookup table indexed by the next byte in the key, after + // remapping that byte through genericReplacer.mapping to create a dense + // index. In the example above, the keys only use 'a', 'b', 'c', 'x' and + // 'y', which remap to 0, 1, 2, 3 and 4. All other bytes remap to 5, and + // genericReplacer.tableSize will be 5. Node n0's table will be + // []*trieNode{ 0:n1, 1:n4, 3:n6 }, where the 0, 1 and 3 are the remapped + // 'a', 'b' and 'x'. + table []*trieNode +} + +func (t *trieNode) add(key, val string, priority int, r *genericReplacer) { + if key == "" { + if t.priority == 0 { + t.value = val + t.priority = priority + } + return + } + + if t.prefix != "" { + // Need to split the prefix among multiple nodes. + var n int // length of the longest common prefix + for ; n < len(t.prefix) && n < len(key); n++ { + if t.prefix[n] != key[n] { + break + } + } + if n == len(t.prefix) { + t.next.add(key[n:], val, priority, r) + } else if n == 0 { + // First byte differs, start a new lookup table here. Looking up + // what is currently t.prefix[0] will lead to prefixNode, and + // looking up key[0] will lead to keyNode. + var prefixNode *trieNode + if len(t.prefix) == 1 { + prefixNode = t.next + } else { + prefixNode = &trieNode{ + prefix: t.prefix[1:], + next: t.next, + } + } + keyNode := new(trieNode) + t.table = make([]*trieNode, r.tableSize) + t.table[r.mapping[t.prefix[0]]] = prefixNode + t.table[r.mapping[key[0]]] = keyNode + t.prefix = "" + t.next = nil + keyNode.add(key[1:], val, priority, r) + } else { + // Insert new node after the common section of the prefix. + next := &trieNode{ + prefix: t.prefix[n:], + next: t.next, + } + t.prefix = t.prefix[:n] + t.next = next + next.add(key[n:], val, priority, r) + } + } else if t.table != nil { + // Insert into existing table. + m := r.mapping[key[0]] + if t.table[m] == nil { + t.table[m] = new(trieNode) + } + t.table[m].add(key[1:], val, priority, r) + } else { + t.prefix = key + t.next = new(trieNode) + t.next.add("", val, priority, r) + } +} + +func (r *genericReplacer) lookup(s string, ignoreRoot bool) (val string, keylen int, found bool) { + // Iterate down the trie to the end, and grab the value and keylen with + // the highest priority. + bestPriority := 0 + node := &r.root + n := 0 + for node != nil { + if node.priority > bestPriority && !(ignoreRoot && node == &r.root) { + bestPriority = node.priority + val = node.value + keylen = n + found = true + } + + if s == "" { + break + } + if node.table != nil { + index := r.mapping[ByteToLower(s[0])] + if int(index) == r.tableSize { + break + } + node = node.table[index] + s = s[1:] + n++ + } else if node.prefix != "" && StringHasPrefixFold(s, node.prefix) { + n += len(node.prefix) + s = s[len(node.prefix):] + node = node.next + } else { + break + } + } + return +} + +// genericReplacer is the fully generic algorithm. +// It's used as a fallback when nothing faster can be used. +type genericReplacer struct { + root trieNode + // tableSize is the size of a trie node's lookup table. It is the number + // of unique key bytes. + tableSize int + // mapping maps from key bytes to a dense index for trieNode.table. + mapping [256]byte +} + +func makeGenericReplacer(oldnew []string) *genericReplacer { + r := new(genericReplacer) + // Find each byte used, then assign them each an index. + for i := 0; i < len(oldnew); i += 2 { + key := strings.ToLower(oldnew[i]) + for j := 0; j < len(key); j++ { + r.mapping[key[j]] = 1 + } + } + + for _, b := range r.mapping { + r.tableSize += int(b) + } + + var index byte + for i, b := range r.mapping { + if b == 0 { + r.mapping[i] = byte(r.tableSize) + } else { + r.mapping[i] = index + index++ + } + } + // Ensure root node uses a lookup table (for performance). + r.root.table = make([]*trieNode, r.tableSize) + + for i := 0; i < len(oldnew); i += 2 { + r.root.add(strings.ToLower(oldnew[i]), oldnew[i+1], len(oldnew)-i, r) + } + return r +} + +type appendSliceWriter []byte + +// Write writes to the buffer to satisfy io.Writer. +func (w *appendSliceWriter) Write(p []byte) (int, error) { + *w = append(*w, p...) + return len(p), nil +} + +// WriteString writes to the buffer without string->[]byte->string allocations. +func (w *appendSliceWriter) WriteString(s string) (int, error) { + *w = append(*w, s...) + return len(s), nil +} + +type stringWriterIface interface { + WriteString(string) (int, error) +} + +type stringWriter struct { + w io.Writer +} + +func (w stringWriter) WriteString(s string) (int, error) { + return w.w.Write([]byte(s)) +} + +func getStringWriter(w io.Writer) stringWriterIface { + sw, ok := w.(stringWriterIface) + if !ok { + sw = stringWriter{w} + } + return sw +} + +func (r *genericReplacer) Replace(s string) string { + buf := make(appendSliceWriter, 0, len(s)) + r.WriteString(&buf, s) + return string(buf) +} + +func (r *genericReplacer) WriteString(w io.Writer, s string) (n int, err error) { + sw := getStringWriter(w) + var last, wn int + var prevMatchEmpty bool + for i := 0; i <= len(s); { + // Fast path: s[i] is not a prefix of any pattern. + if i != len(s) && r.root.priority == 0 { + index := int(r.mapping[ByteToLower(s[i])]) + if index == r.tableSize || r.root.table[index] == nil { + i++ + continue + } + } + + // Ignore the empty match iff the previous loop found the empty match. + val, keylen, match := r.lookup(s[i:], prevMatchEmpty) + prevMatchEmpty = match && keylen == 0 + if match { + orig := s[i : i+keylen] + switch CaseStyle(orig) { + case CaseUnknown: + // pretend we didn't match + // i++ + // continue + case CaseUpper: + val = strings.ToUpper(val) + case CaseLower: + val = strings.ToLower(val) + case CaseTitle: + if len(val) < 2 { + val = strings.ToUpper(val) + } else { + val = strings.ToUpper(val[:1]) + strings.ToLower(val[1:]) + } + } + wn, err = sw.WriteString(s[last:i]) + n += wn + if err != nil { + return + } + //log.Printf("%d: Going to correct %q with %q", i, s[i:i+keylen], val) + wn, err = sw.WriteString(val) + n += wn + if err != nil { + return + } + i += keylen + last = i + continue + } + i++ + } + if last != len(s) { + wn, err = sw.WriteString(s[last:]) + n += wn + } + return +} diff --git a/vendor/github.com/golangci/misspell/stringreplacer_test.gox b/vendor/github.com/golangci/misspell/stringreplacer_test.gox new file mode 100644 index 000000000..70da997f6 --- /dev/null +++ b/vendor/github.com/golangci/misspell/stringreplacer_test.gox @@ -0,0 +1,421 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package misspell_test + +import ( + "bytes" + "fmt" + "strings" + "testing" + + . "github.com/client9/misspell" +) + +var htmlEscaper = NewStringReplacer( + "&", "&", + "<", "<", + ">", ">", + `"`, """, + "'", "'", +) + +var htmlUnescaper = NewStringReplacer( + "&", "&", + "<", "<", + ">", ">", + """, `"`, + "'", "'", +) + +// The http package's old HTML escaping function. +func oldHTMLEscape(s string) string { + s = strings.Replace(s, "&", "&", -1) + s = strings.Replace(s, "<", "<", -1) + s = strings.Replace(s, ">", ">", -1) + s = strings.Replace(s, `"`, """, -1) + s = strings.Replace(s, "'", "'", -1) + return s +} + +var capitalLetters = NewStringReplacer("a", "A", "b", "B") + +// TestReplacer tests the replacer implementations. +func TestReplacer(t *testing.T) { + type testCase struct { + r *StringReplacer + in, out string + } + var testCases []testCase + + // str converts 0xff to "\xff". This isn't just string(b) since that converts to UTF-8. + str := func(b byte) string { + return string([]byte{b}) + } + var s []string + + // inc maps "\x00"->"\x01", ..., "a"->"b", "b"->"c", ..., "\xff"->"\x00". + for i := 0; i < 256; i++ { + s = append(s, str(byte(i)), str(byte(i+1))) + } + inc := NewStringReplacer(s...) + + // Test cases with 1-byte old strings, 1-byte new strings. + testCases = append(testCases, + testCase{capitalLetters, "brad", "BrAd"}, + testCase{capitalLetters, strings.Repeat("a", (32<<10)+123), strings.Repeat("A", (32<<10)+123)}, + testCase{capitalLetters, "", ""}, + + testCase{inc, "brad", "csbe"}, + testCase{inc, "\x00\xff", "\x01\x00"}, + testCase{inc, "", ""}, + + testCase{NewStringReplacer("a", "1", "a", "2"), "brad", "br1d"}, + ) + + // repeat maps "a"->"a", "b"->"bb", "c"->"ccc", ... + s = nil + for i := 0; i < 256; i++ { + n := i + 1 - 'a' + if n < 1 { + n = 1 + } + s = append(s, str(byte(i)), strings.Repeat(str(byte(i)), n)) + } + repeat := NewStringReplacer(s...) + + // Test cases with 1-byte old strings, variable length new strings. + testCases = append(testCases, + testCase{htmlEscaper, "No changes", "No changes"}, + testCase{htmlEscaper, "I <3 escaping & stuff", "I <3 escaping & stuff"}, + testCase{htmlEscaper, "&&&", "&&&"}, + testCase{htmlEscaper, "", ""}, + + testCase{repeat, "brad", "bbrrrrrrrrrrrrrrrrrradddd"}, + testCase{repeat, "abba", "abbbba"}, + testCase{repeat, "", ""}, + + testCase{NewStringReplacer("a", "11", "a", "22"), "brad", "br11d"}, + ) + + // The remaining test cases have variable length old strings. + + testCases = append(testCases, + testCase{htmlUnescaper, "&amp;", "&"}, + testCase{htmlUnescaper, "<b>HTML's neat</b>", "HTML's neat"}, + testCase{htmlUnescaper, "", ""}, + + testCase{NewStringReplacer("a", "1", "a", "2", "xxx", "xxx"), "brad", "br1d"}, + + testCase{NewStringReplacer("a", "1", "aa", "2", "aaa", "3"), "aaaa", "1111"}, + + testCase{NewStringReplacer("aaa", "3", "aa", "2", "a", "1"), "aaaa", "31"}, + ) + + // gen1 has multiple old strings of variable length. There is no + // overall non-empty common prefix, but some pairwise common prefixes. + gen1 := NewStringReplacer( + "aaa", "3[aaa]", + "aa", "2[aa]", + "a", "1[a]", + "i", "i", + "longerst", "most long", + "longer", "medium", + "long", "short", + "xx", "xx", + "x", "X", + "X", "Y", + "Y", "Z", + ) + testCases = append(testCases, + testCase{gen1, "fooaaabar", "foo3[aaa]b1[a]r"}, + testCase{gen1, "long, longerst, longer", "short, most long, medium"}, + testCase{gen1, "xxxxx", "xxxxX"}, + testCase{gen1, "XiX", "YiY"}, + testCase{gen1, "", ""}, + ) + + // gen2 has multiple old strings with no pairwise common prefix. + gen2 := NewStringReplacer( + "roses", "red", + "violets", "blue", + "sugar", "sweet", + ) + testCases = append(testCases, + testCase{gen2, "roses are red, violets are blue...", "red are red, blue are blue..."}, + testCase{gen2, "", ""}, + ) + + // gen3 has multiple old strings with an overall common prefix. + gen3 := NewStringReplacer( + "abracadabra", "poof", + "abracadabrakazam", "splat", + "abraham", "lincoln", + "abrasion", "scrape", + "abraham", "isaac", + ) + testCases = append(testCases, + testCase{gen3, "abracadabrakazam abraham", "poofkazam lincoln"}, + testCase{gen3, "abrasion abracad", "scrape abracad"}, + testCase{gen3, "abba abram abrasive", "abba abram abrasive"}, + testCase{gen3, "", ""}, + ) + + // foo{1,2,3,4} have multiple old strings with an overall common prefix + // and 1- or 2- byte extensions from the common prefix. + foo1 := NewStringReplacer( + "foo1", "A", + "foo2", "B", + "foo3", "C", + ) + foo2 := NewStringReplacer( + "foo1", "A", + "foo2", "B", + "foo31", "C", + "foo32", "D", + ) + foo3 := NewStringReplacer( + "foo11", "A", + "foo12", "B", + "foo31", "C", + "foo32", "D", + ) + foo4 := NewStringReplacer( + "foo12", "B", + "foo32", "D", + ) + testCases = append(testCases, + testCase{foo1, "fofoofoo12foo32oo", "fofooA2C2oo"}, + testCase{foo1, "", ""}, + + testCase{foo2, "fofoofoo12foo32oo", "fofooA2Doo"}, + testCase{foo2, "", ""}, + + testCase{foo3, "fofoofoo12foo32oo", "fofooBDoo"}, + testCase{foo3, "", ""}, + + testCase{foo4, "fofoofoo12foo32oo", "fofooBDoo"}, + testCase{foo4, "", ""}, + ) + + // genAll maps "\x00\x01\x02...\xfe\xff" to "[all]", amongst other things. + allBytes := make([]byte, 256) + for i := range allBytes { + allBytes[i] = byte(i) + } + allString := string(allBytes) + genAll := NewStringReplacer( + allString, "[all]", + "\xff", "[ff]", + "\x00", "[00]", + ) + testCases = append(testCases, + testCase{genAll, allString, "[all]"}, + testCase{genAll, "a\xff" + allString + "\x00", "a[ff][all][00]"}, + testCase{genAll, "", ""}, + ) + + // Test cases with empty old strings. + + blankToX1 := NewStringReplacer("", "X") + blankToX2 := NewStringReplacer("", "X", "", "") + blankHighPriority := NewStringReplacer("", "X", "o", "O") + blankLowPriority := NewStringReplacer("o", "O", "", "X") + blankNoOp1 := NewStringReplacer("", "") + blankNoOp2 := NewStringReplacer("", "", "", "A") + blankFoo := NewStringReplacer("", "X", "foobar", "R", "foobaz", "Z") + testCases = append(testCases, + testCase{blankToX1, "foo", "XfXoXoX"}, + testCase{blankToX1, "", "X"}, + + testCase{blankToX2, "foo", "XfXoXoX"}, + testCase{blankToX2, "", "X"}, + + testCase{blankHighPriority, "oo", "XOXOX"}, + testCase{blankHighPriority, "ii", "XiXiX"}, + testCase{blankHighPriority, "oiio", "XOXiXiXOX"}, + testCase{blankHighPriority, "iooi", "XiXOXOXiX"}, + testCase{blankHighPriority, "", "X"}, + + testCase{blankLowPriority, "oo", "OOX"}, + testCase{blankLowPriority, "ii", "XiXiX"}, + testCase{blankLowPriority, "oiio", "OXiXiOX"}, + testCase{blankLowPriority, "iooi", "XiOOXiX"}, + testCase{blankLowPriority, "", "X"}, + + testCase{blankNoOp1, "foo", "foo"}, + testCase{blankNoOp1, "", ""}, + + testCase{blankNoOp2, "foo", "foo"}, + testCase{blankNoOp2, "", ""}, + + testCase{blankFoo, "foobarfoobaz", "XRXZX"}, + testCase{blankFoo, "foobar-foobaz", "XRX-XZX"}, + testCase{blankFoo, "", "X"}, + ) + + // single string replacer + + abcMatcher := NewStringReplacer("abc", "[match]") + + testCases = append(testCases, + testCase{abcMatcher, "", ""}, + testCase{abcMatcher, "ab", "ab"}, + testCase{abcMatcher, "abc", "[match]"}, + testCase{abcMatcher, "abcd", "[match]d"}, + testCase{abcMatcher, "cabcabcdabca", "c[match][match]d[match]a"}, + ) + + // Issue 6659 cases (more single string replacer) + + noHello := NewStringReplacer("Hello", "") + testCases = append(testCases, + testCase{noHello, "Hello", ""}, + testCase{noHello, "Hellox", "x"}, + testCase{noHello, "xHello", "x"}, + testCase{noHello, "xHellox", "xx"}, + ) + + // No-arg test cases. + + nop := NewStringReplacer() + testCases = append(testCases, + testCase{nop, "abc", "abc"}, + testCase{nop, "", ""}, + ) + + // Run the test cases. + + for i, tc := range testCases { + if s := tc.r.Replace(tc.in); s != tc.out { + t.Errorf("%d. strings.Replace(%q) = %q, want %q", i, tc.in, s, tc.out) + } + var buf bytes.Buffer + n, err := tc.r.WriteString(&buf, tc.in) + if err != nil { + t.Errorf("%d. WriteString: %v", i, err) + continue + } + got := buf.String() + if got != tc.out { + t.Errorf("%d. WriteString(%q) wrote %q, want %q", i, tc.in, got, tc.out) + continue + } + if n != len(tc.out) { + t.Errorf("%d. WriteString(%q) wrote correct string but reported %d bytes; want %d (%q)", + i, tc.in, n, len(tc.out), tc.out) + } + } +} + +type errWriter struct{} + +func (errWriter) Write(p []byte) (n int, err error) { + return 0, fmt.Errorf("unwritable") +} + +func BenchmarkGenericNoMatch(b *testing.B) { + str := strings.Repeat("A", 100) + strings.Repeat("B", 100) + generic := NewStringReplacer("a", "A", "b", "B", "12", "123") // varying lengths forces generic + for i := 0; i < b.N; i++ { + generic.Replace(str) + } +} + +func BenchmarkGenericMatch1(b *testing.B) { + str := strings.Repeat("a", 100) + strings.Repeat("b", 100) + generic := NewStringReplacer("a", "A", "b", "B", "12", "123") + for i := 0; i < b.N; i++ { + generic.Replace(str) + } +} + +func BenchmarkGenericMatch2(b *testing.B) { + str := strings.Repeat("It's <b>HTML</b>!", 100) + for i := 0; i < b.N; i++ { + htmlUnescaper.Replace(str) + } +} + +func benchmarkSingleString(b *testing.B, pattern, text string) { + r := NewStringReplacer(pattern, "[match]") + b.SetBytes(int64(len(text))) + b.ResetTimer() + for i := 0; i < b.N; i++ { + r.Replace(text) + } +} + +func BenchmarkSingleMaxSkipping(b *testing.B) { + benchmarkSingleString(b, strings.Repeat("b", 25), strings.Repeat("a", 10000)) +} + +func BenchmarkSingleLongSuffixFail(b *testing.B) { + benchmarkSingleString(b, "b"+strings.Repeat("a", 500), strings.Repeat("a", 1002)) +} + +func BenchmarkSingleMatch(b *testing.B) { + benchmarkSingleString(b, "abcdef", strings.Repeat("abcdefghijklmno", 1000)) +} + +func BenchmarkByteByteNoMatch(b *testing.B) { + str := strings.Repeat("A", 100) + strings.Repeat("B", 100) + for i := 0; i < b.N; i++ { + capitalLetters.Replace(str) + } +} + +func BenchmarkByteByteMatch(b *testing.B) { + str := strings.Repeat("a", 100) + strings.Repeat("b", 100) + for i := 0; i < b.N; i++ { + capitalLetters.Replace(str) + } +} + +func BenchmarkByteStringMatch(b *testing.B) { + str := "<" + strings.Repeat("a", 99) + strings.Repeat("b", 99) + ">" + for i := 0; i < b.N; i++ { + htmlEscaper.Replace(str) + } +} + +func BenchmarkHTMLEscapeNew(b *testing.B) { + str := "I <3 to escape HTML & other text too." + for i := 0; i < b.N; i++ { + htmlEscaper.Replace(str) + } +} + +func BenchmarkHTMLEscapeOld(b *testing.B) { + str := "I <3 to escape HTML & other text too." + for i := 0; i < b.N; i++ { + oldHTMLEscape(str) + } +} + +func BenchmarkByteStringReplacerWriteString(b *testing.B) { + str := strings.Repeat("I <3 to escape HTML & other text too.", 100) + buf := new(bytes.Buffer) + for i := 0; i < b.N; i++ { + htmlEscaper.WriteString(buf, str) + buf.Reset() + } +} + +func BenchmarkByteReplacerWriteString(b *testing.B) { + str := strings.Repeat("abcdefghijklmnopqrstuvwxyz", 100) + buf := new(bytes.Buffer) + for i := 0; i < b.N; i++ { + capitalLetters.WriteString(buf, str) + buf.Reset() + } +} + +// BenchmarkByteByteReplaces compares byteByteImpl against multiple Replaces. +func BenchmarkByteByteReplaces(b *testing.B) { + str := strings.Repeat("a", 100) + strings.Repeat("b", 100) + for i := 0; i < b.N; i++ { + strings.Replace(strings.Replace(str, "a", "A", -1), "b", "B", -1) + } +} diff --git a/vendor/github.com/golangci/misspell/url.go b/vendor/github.com/golangci/misspell/url.go new file mode 100644 index 000000000..1a259f5f9 --- /dev/null +++ b/vendor/github.com/golangci/misspell/url.go @@ -0,0 +1,17 @@ +package misspell + +import ( + "regexp" +) + +// Regexp for URL https://mathiasbynens.be/demo/url-regex +// +// original @imme_emosol (54 chars) has trouble with dashes in hostname +// @(https?|ftp)://(-\.)?([^\s/?\.#-]+\.?)+(/[^\s]*)?$@iS +var reURL = regexp.MustCompile(`(?i)(https?|ftp)://(-\.)?([^\s/?\.#]+\.?)+(/[^\s]*)?`) + +// StripURL attemps to replace URLs with blank spaces, e.g. +// "xxx http://foo.com/ yyy -> "xxx yyyy" +func StripURL(s string) string { + return reURL.ReplaceAllStringFunc(s, replaceWithBlanks) +} diff --git a/vendor/github.com/golangci/misspell/words.go b/vendor/github.com/golangci/misspell/words.go new file mode 100644 index 000000000..1603d87e6 --- /dev/null +++ b/vendor/github.com/golangci/misspell/words.go @@ -0,0 +1,31194 @@ +package misspell + +// Code generated automatically. DO NOT EDIT. + +// DictMain is the main rule set, not including locale-specific spellings +var DictMain = []string{ + "differentiatiations", "differentiations", + "disproportionaltely", "disproportionately", + "oversimplificiation", "oversimplification", + "transcendentational", "transcendental", + "anthromorphization", "anthropomorphization", + "disporportionately", "disproportionately", + "dispraportionately", "disproportionately", + "disproportianately", "disproportionately", + "disproportionatley", "disproportionately", + "disproprotionately", "disproportionately", + "fundamentalistisch", "fundamentalists", + "fundamentalistiska", "fundamentalists", + "fundamentalistiske", "fundamentalists", + "fundamentalistiskt", "fundamentalists", + "histocompatability", "histocompatibility", + "microtransacations", "microtransactions", + "microtransacciones", "microtransactions", + "microtransactional", "microtransactions", + "microtransactioned", "microtransactions", + "misunderstandingly", "misunderstandings", + "oversemplification", "oversimplification", + "oversimplifacation", "oversimplification", + "oversimplificaiton", "oversimplification", + "oversimplificating", "oversimplification", + "oversimplyfication", "oversimplification", + "cardiovasculaires", "cardiovascular", + "certificationkits", "certifications", + "counterporductive", "counterproductive", + "coutnerproductive", "counterproductive", + "disporportionatly", "disproportionately", + "disproportiantely", "disproportionately", + "disproportionatly", "disproportionately", + "disproportionnate", "disproportionate", + "disrepresentation", "misrepresentation", + "fundamentalistisk", "fundamentalists", + "incompatabilities", "incompatibilities", + "inconsequentional", "inconsequential", + "indistinguishible", "indistinguishable", + "indistingusihable", "indistinguishable", + "indistinquishable", "indistinguishable", + "indistuingishable", "indistinguishable", + "instatutionalized", "institutionalized", + "institucionalized", "institutionalized", + "institutionilized", "institutionalized", + "instutitionalized", "institutionalized", + "instututionalized", "institutionalized", + "interchangeablely", "interchangeably", + "interchangeablity", "interchangeably", + "intercontinential", "intercontinental", + "micortransactions", "microtransactions", + "microstansactions", "microtransactions", + "microtramsactions", "microtransactions", + "microtranasctions", "microtransactions", + "microtransacitons", "microtransactions", + "microtransacrions", "microtransactions", + "microtransactioms", "microtransactions", + "microtransactiosn", "microtransactions", + "microtranscations", "microtransactions", + "microtrasnactions", "microtransactions", + "mircotransactions", "microtransactions", + "misinterpretating", "misinterpreting", + "misrepresantation", "misrepresentation", + "misrepresentaiton", "misrepresentation", + "misrepresentating", "misrepresenting", + "misunderstantings", "misunderstandings", + "mocrotransactions", "microtransactions", + "oversimplifaction", "oversimplification", + "oversimplificaton", "oversimplification", + "oversimplifiction", "oversimplification", + "responsibillities", "responsibilities", + "unconstitutionnal", "unconstitutional", + "accomplishements", "accomplishments", + "admininistrative", "administrative", + "antidepresssants", "antidepressants", + "architechturally", "architecturally", + "cardiovasculaire", "cardiovascular", + "charactarization", "characterization", + "characterazation", "characterization", + "characterisitics", "characteristics", + "characteristsics", "characteristic", + "characterizarion", "characterization", + "charecterization", "characterization", + "charicterization", "characterization", + "circumstantional", "circumstantial", + "conversationable", "conversational", + "counterprodutive", "counterproductive", + "demonstrationens", "demonstrations", + "deterministische", "deterministic", + "differenciations", "differentiation", + "differentiantion", "differentiation", + "differentiatiors", "differentiation", + "differentitation", "differentiation", + "disperportionate", "disproportionate", + "disporportionate", "disproportionate", + "dispraportionate", "disproportionate", + "disproportianate", "disproportionate", + "disproportionaly", "disproportionately", + "disproprotionate", "disproportionate", + "electromagnectic", "electromagnetic", + "enviornmentalist", "environmentalist", + "environmentality", "environmentally", + "extraordinairily", "extraordinarily", + "extraordinarilly", "extraordinary", + "extraterrestials", "extraterrestrials", + "fundamentalismos", "fundamentalists", + "fundamentalismus", "fundamentalists", + "fundamentalistas", "fundamentalists", + "fundamentalisten", "fundamentalists", + "fundamentalister", "fundamentalists", + "imcomprehensible", "incomprehensible", + "immunosupressant", "immunosuppressant", + "imperfectionists", "imperfections", + "implementaciones", "implementations", + "implementationen", "implementations", + "implementationer", "implementations", + "inappropriatelly", "inappropriately", + "incompatablities", "incompatibilities", + "incompatiblities", "incompatibilities", + "incomprehencible", "incomprehensible", + "incomprehendible", "incomprehensible", + "incomprehenisble", "incomprehensible", + "incomprehensable", "incomprehensible", + "incomprehinsible", "incomprehensible", + "incomprihensible", "incomprehensible", + "inconprehensible", "incomprehensible", + "inconsistentcies", "inconsistencies", + "inconstitutional", "unconstitutional", + "incrompehensible", "incomprehensible", + "indistinguisable", "indistinguishable", + "institutionlized", "institutionalized", + "intellectualiser", "intellectuals", + "intellectualisme", "intellectuals", + "interchangeabley", "interchangeably", + "internationnally", "internationally", + "interpretaciones", "interpretations", + "interpretationen", "interpretations", + "manoeuverability", "maneuverability", + "massachusettians", "massachusetts", + "microtransacions", "microtransactions", + "microtransacting", "microtransactions", + "microtransactios", "microtransactions", + "microtransactons", "microtransactions", + "microtransations", "microtransactions", + "microtranscation", "microtransactions", + "mircotransaction", "microtransactions", + "miscommunciation", "miscommunication", + "miscommunicaiton", "miscommunication", + "miscomunnication", "miscommunication", + "miscummunication", "miscommunication", + "misinterpretated", "misinterpreted", + "misinterpretions", "misinterpreting", + "misinterpretting", "misinterpreting", + "misproportionate", "disproportionate", + "misrepresenation", "misrepresentation", + "misrepresentaion", "misrepresentation", + "misrepresentated", "misrepresented", + "misrepresentatie", "misrepresentation", + "misrepresentativ", "misrepresentation", + "misubderstanding", "misunderstandings", + "misudnerstanding", "misunderstandings", + "misundarstanding", "misunderstandings", + "misunderatanding", "misunderstandings", + "misunderdtanding", "misunderstandings", + "misundersatnding", "misunderstandings", + "misundersranding", "misunderstandings", + "misunderstadings", "misunderstandings", + "misunderstadning", "misunderstandings", + "misunderstamding", "misunderstandings", + "misunderstandigs", "misunderstandings", + "misunderstandimg", "misunderstandings", + "misunderstandind", "misunderstandings", + "misunderstanging", "misunderstandings", + "misunderstanidng", "misunderstandings", + "misunderstanings", "misunderstandings", + "misunderstansing", "misunderstandings", + "misunderstanting", "misunderstandings", + "misunderstending", "misunderstandings", + "misunderstnading", "misunderstandings", + "misunderstsnding", "misunderstandings", + "misunderstunding", "misunderstandings", + "misundertsanding", "misunderstandings", + "misundrestanding", "misunderstandings", + "misunterstanding", "misunderstandings", + "nationalistische", "nationalistic", + "nationalististic", "nationalistic", + "neconstitutional", "unconstitutional", + "notwhithstanding", "notwithstanding", + "objectificiation", "objectification", + "organisationnels", "organisations", + "perpendiculaires", "perpendicular", + "phillosophically", "philosophically", + "preinitalization", "preinitialization", + "prescriptionists", "prescriptions", + "procrastinarting", "procrastinating", + "procrastinationg", "procrastinating", + "procrastinazione", "procrastination", + "professionalisim", "professionalism", + "professionalisme", "professionals", + "professionallism", "professionalism", + "professionnalism", "professionalism", + "programattically", "programmatically", + "proportionallity", "proportionally", + "reaponsibilities", "responsibilities", + "reinitalizations", "reinitializations", + "representaciones", "representations", + "representationen", "representations", + "representationer", "representations", + "repsonsibilities", "responsibilities", + "responcibilities", "responsibilities", + "responisbilities", "responsibilities", + "responsabilities", "responsibilities", + "responsebilities", "responsibilities", + "straightforeward", "straightforward", + "surrepetitiously", "surreptitiously", + "technologicially", "technologically", + "unconditionnally", "unconditionally", + "unconfortability", "discomfort", + "unconstititional", "unconstitutional", + "uncontrollablely", "uncontrollably", + "underestimateing", "underestimating", + "understandablely", "understandably", + "unintentionnally", "unintentionally", + "unsubstantianted", "unsubstantiated", + "unsubstantiative", "unsubstantiated", + "acclimitization", "acclimatization", + "accomplishemnts", "accomplishments", + "accountabillity", "accountability", + "acknolwedgement", "acknowledgement", + "acknoweldgement", "acknowledgement", + "acknowldegement", "acknowledgement", + "acknowlegdement", "acknowledgement", + "administratieve", "administrative", + "administratiors", "administrators", + "administrativne", "administrative", + "aforementionned", "aforementioned", + "anitdepressants", "antidepressants", + "antidepressents", "antidepressants", + "archetecturally", "architecturally", + "associationthis", "associations", + "authobiographic", "autobiographic", + "awknowledgement", "acknowledgement", + "bureaucratische", "bureaucratic", + "cardiovascualar", "cardiovascular", + "carnagie-mellon", "carnegie-mellon", + "carnigie-mellon", "carnegie-mellon", + "celebrationists", "celebrations", + "charactaristics", "characteristics", + "characterisitcs", "characteristics", + "characterisitic", "characteristic", + "characterizaton", "characterization", + "charactersistic", "characteristic", + "charactersitics", "characteristics", + "charactoristics", "characteristics", + "charecteristics", "characteristics", + "comfrontational", "confrontational", + "commuinications", "communications", + "compatabilities", "compatibilities", + "complimentarity", "complimentary", + "compositionwise", "compositions", + "confidenciality", "confidential", + "confidentuality", "confidential", + "confrentational", "confrontational", + "confrontacional", "confrontational", + "conglaturations", "congratulations", + "congradulations", "congratulations", + "congragulations", "congratulations", + "congratualtions", "congratulations", + "congraturations", "congratulations", + "consequentually", "consequently", + "constitutionnal", "constitutional", + "deinitalization", "deinitialization", + "denominationals", "denominations", + "destinationhash", "destinations", + "deterministisch", "deterministic", + "developmentwise", "developments", + "differantiation", "differentiation", + "differenciation", "differentiation", + "differientation", "differentiation", + "discriminatoire", "discriminate", + "discriminatorie", "discriminate", + "disproportiante", "disproportionate", + "disproportinate", "disproportionate", + "elecrtomagnetic", "electromagnetic", + "electormagnetic", "electromagnetic", + "electromagentic", "electromagnetic", + "electromagnatic", "electromagnetic", + "electromangetic", "electromagnetic", + "electromegnetic", "electromagnetic", + "electronagnetic", "electromagnetic", + "enivronmentally", "environmentally", + "entrepreneurers", "entrepreneurs", + "enviornmentally", "environmentally", + "enviromentalist", "environmentalist", + "environemntally", "environmentally", + "envrionmentally", "environmentally", + "evolutionarilly", "evolutionary", + "experementation", "experimentation", + "experimantation", "experimentation", + "experimentacion", "experimentation", + "experimentating", "experimentation", + "experimenterade", "experimented", + "experimintation", "experimentation", + "expirementation", "experimentation", + "extraodrinarily", "extraordinarily", + "extraordinairly", "extraordinarily", + "extraordinarely", "extraordinarily", + "extraordinaryly", "extraordinarily", + "extraterrestial", "extraterrestrial", + "extroardinarily", "extraordinarily", + "fondamentalists", "fundamentalists", + "fundamendalists", "fundamentalists", + "fundamentalisme", "fundamentals", + "fundamentalismo", "fundamentals", + "fundamentalista", "fundamentals", + "fundamentalisti", "fundamentals", + "fundamnetalists", "fundamentalists", + "fundemantalists", "fundamentalists", + "fundimentalists", "fundamentalists", + "fundumentalists", "fundamentalists", + "gongratulations", "congratulations", + "grammaticallity", "grammatically", + "gundamentalists", "fundamentalists", + "idiosynchracies", "idiosyncrasies", + "implementaitons", "implementations", + "implimentations", "implementations", + "inapporpriately", "inappropriately", + "inappropraitely", "inappropriately", + "inappropriatley", "inappropriately", + "incompatability", "incompatibility", + "incompetentence", "incompetence", + "incomprehensibe", "incomprehensible", + "incomprehesible", "incomprehensible", + "inconcequential", "inconsequential", + "inconcistencies", "inconsistencies", + "inconditionally", "unconditionally", + "inconsecuential", "inconsequential", + "inconsequantial", "inconsequential", + "inconsequencial", "inconsequential", + "inconsequentual", "inconsequential", + "inconsiquential", "inconsequential", + "inconsistancies", "inconsistencies", + "inconsistencias", "inconsistencies", + "inconsistensies", "inconsistencies", + "inconsistenties", "inconsistencies", + "independentisme", "independents", + "independentiste", "independents", + "independentness", "independents", + "inexperiencable", "inexperience", + "inplementations", "implementations", + "instantaneoulsy", "instantaneous", + "institutionella", "institutional", + "institutionnels", "institutions", + "instutionalized", "institutionalized", + "insubstantiated", "unsubstantiated", + "interchangabley", "interchangeably", + "interchangebale", "interchangeable", + "intercontinetal", "intercontinental", + "interpertations", "interpretations", + "interpratations", "interpretations", + "interpritations", "interpretations", + "intersectionals", "intersections", + "intrepretations", "interpretations", + "investigationes", "investigations", + "journalistische", "journalistic", + "libertarianisim", "libertarianism", + "libertarianisme", "libertarians", + "libertarianismo", "libertarians", + "libertarianists", "libertarians", + "libertariansism", "libertarianism", + "manisfestations", "manifestations", + "manouverability", "maneuverability", + "manufacturerers", "manufacturers", + "marshmallowiest", "marshmallows", + "marshmallowness", "marshmallows", + "microtransacton", "microtransactions", + "mininterpreting", "misinterpreting", + "miscommuniation", "miscommunication", + "miscommunicatie", "miscommunication", + "miscommuniction", "miscommunication", + "misinterperting", "misinterpreting", + "misinterprating", "misinterpreting", + "misinterprented", "misinterpret", + "misinterprested", "misinterpret", + "misinterpretion", "misinterpreting", + "misinterpretted", "misinterpreted", + "misinterpriting", "misinterpreting", + "misintrepreting", "misinterpreting", + "misrepresention", "misrepresenting", + "misunderstading", "misunderstanding", + "misunderstandig", "misunderstandings", + "misunderstandng", "misunderstandings", + "misunderstaning", "misunderstanding", + "multicultralism", "multiculturalism", + "multinationella", "multinational", + "nationalistisch", "nationalists", + "nationalistisen", "nationalists", + "nationalistiska", "nationalists", + "nationalistiske", "nationalists", + "nationalistiskt", "nationalists", + "nationalistista", "nationalists", + "objectificaiton", "objectification", + "objectivication", "objectification", + "organisationens", "organisations", + "organisationers", "organisations", + "overestimateing", "overestimating", + "paychologically", "psychologically", + "performancetest", "performances", + "performancewise", "performances", + "perpendiculaire", "perpendicular", + "pharamceuticals", "pharmaceutical", + "pharmacueticals", "pharmaceutical", + "philoshopically", "philosophically", + "philosohpically", "philosophically", + "philosophycally", "philosophically", + "phsycologically", "psychologically", + "phychologically", "psychologically", + "phylosophically", "philosophically", + "physcologically", "psychologically", + "precrastination", "procrastination", + "prefessionalism", "professionalism", + "premonasterians", "premonstratensians", + "procastrinating", "procrastinating", + "procastrination", "procrastination", + "procrascinating", "procrastinating", + "procrastenating", "procrastinating", + "procrastiantion", "procrastination", + "procrastibating", "procrastinating", + "procrastibation", "procrastination", + "procrastonating", "procrastinating", + "procrestinating", "procrastinating", + "procrestination", "procrastination", + "professionalsim", "professionalism", + "prograstination", "procrastination", + "progressionists", "progressions", + "progressionwise", "progressions", + "prokrastination", "procrastination", + "proportionallly", "proportionally", + "proscratination", "procrastination", + "pscyhologically", "psychologically", + "pshycologically", "psychologically", + "psichologically", "psychologically", + "psychedelicious", "psychedelics", + "psychedelicness", "psychedelics", + "psycholigically", "psychologically", + "psychopathische", "psychopathic", + "pyschologically", "psychologically", + "racionalization", "rationalization", + "rationalizaiton", "rationalization", + "rationalizating", "rationalization", + "reccomendations", "recommendations", + "recommandations", "recommendations", + "recommondations", "recommendations", + "reinitalization", "reinitialization", + "repersentations", "representations", + "represantations", "representations", + "represantatives", "representatives", + "representatieve", "representative", + "representativas", "representatives", + "representetives", "representatives", + "representitives", "representatives", + "responibilities", "responsibilities", + "responsibilites", "responsibilities", + "responsibilitys", "responsibilities", + "responsibillity", "responsibility", + "responsibilties", "responsibilities", + "responsiblities", "responsibilities", + "ridiculoussness", "ridiculousness", + "saskatchewinian", "saskatchewan", + "satisfactorally", "satisfactory", + "satisfactorilly", "satisfactory", + "schizophreniiic", "schizophrenic", + "sensationalisim", "sensationalism", + "spreadsheeticus", "spreadsheets", + "starightforward", "straightforward", + "straigthforward", "straightforward", + "striaghtforward", "straightforward", + "sustainabillity", "sustainability", + "technoligically", "technologically", + "troubelshooting", "troubleshooting", + "troublehsooting", "troubleshooting", + "troubleshotting", "troubleshooting", + "trustworthyness", "trustworthiness", + "ubsubstantiated", "unsubstantiated", + "unappropriately", "inappropriately", + "uncomfortablely", "uncomfortably", + "uncomfortablity", "uncomfortably", + "unconditionable", "unconditional", + "unconstituional", "unconstitutional", + "uncontitutional", "unconstitutional", + "uncontrollabley", "uncontrollably", + "uncontrollablly", "uncontrollably", + "unconventionnal", "unconventional", + "underastimating", "underestimating", + "underestemating", "underestimating", + "understandabley", "understandably", + "unintensionally", "unintentionally", + "unprofessionnal", "unprofessional", + "unresponsivness", "unresponsive", + "unsibstantiated", "unsubstantiated", + "unsubstanciated", "unsubstantiated", + "unsubstansiated", "unsubstantiated", + "unsusbtantiated", "unsubstantiated", + "untranslateable", "untranslatable", + "vulernabilities", "vulnerabilities", + "vulnarabilities", "vulnerabilities", + "vulnurabilities", "vulnerabilities", + "vunlerabilities", "vulnerabilities", + "vurnerabilities", "vulnerabilities", + "accomplishemnt", "accomplishment", + "accomplishents", "accomplishes", + "acconplishment", "accomplishment", + "acknowledgeing", "acknowledging", + "acknowledgemnt", "acknowledgement", + "acomplishments", "accomplishments", + "administartion", "administration", + "administartors", "administrators", + "administraters", "administrators", + "administratief", "administrative", + "administratiei", "administrative", + "administratior", "administrator", + "administrativo", "administration", + "adminsitration", "administration", + "adminsitrative", "administrative", + "adminsitrators", "administrators", + "affectionatley", "affectionate", + "aforememtioned", "aforementioned", + "aforementioend", "aforementioned", + "alternativelly", "alternatively", + "amministrative", "administrative", + "anitdepressant", "antidepressants", + "approproximate", "approximate", + "approximatelly", "approximately", + "archeaologists", "archeologists", + "architechtures", "architectures", + "architectureal", "architectural", + "architecturial", "architectural", + "assassintation", "assassination", + "authenitcation", "authentication", + "authenticaiton", "authentication", + "authobiography", "autobiography", + "breakthroughts", "breakthroughs", + "bureaucratisch", "bureaucratic", + "calssification", "classification", + "capatilization", "capitalization", + "capitalizacion", "capitalization", + "capitalizaiton", "capitalization", + "capitalizating", "capitalization", + "capitilazation", "capitalization", + "capitolization", "capitalization", + "captialization", "capitalization", + "cardiocascular", "cardiovascular", + "cardiovascualr", "cardiovascular", + "cardiovasuclar", "cardiovascular", + "caridovascular", "cardiovascular", + "cessationalism", "sensationalism", + "cessationalist", "sensationalist", + "charactaristic", "characteristic", + "characterisics", "characteristics", + "characterisitc", "characteristics", + "characteristcs", "characteristics", + "characteritics", "characteristic", + "charactersitic", "characteristics", + "charasteristic", "characteristics", + "charecteristic", "characteristic", + "cheeseburguers", "cheeseburgers", + "cinematagraphy", "cinematography", + "cinematagrophy", "cinematography", + "cinematograhpy", "cinematography", + "cinematogrophy", "cinematography", + "cinematogrpahy", "cinematography", + "cinemetography", "cinematography", + "cinimatography", "cinematography", + "circumstansial", "circumstantial", + "circumstantual", "circumstantial", + "circumstential", "circumstantial", + "circunstantial", "circumstantial", + "classificaiton", "classification", + "coincedentally", "coincidentally", + "coinsidentally", "coincidentally", + "commemmorating", "commemorating", + "communciations", "communications", + "compatablities", "compatibilities", + "compatibillity", "compatibility", + "compatiblities", "compatibilities", + "competitioners", "competitions", + "comphrehensive", "comprehensive", + "computationnal", "computational", + "concatentation", "concatenation", + "conciderations", "considerations", + "condescenscion", "condescension", + "condradictions", "contradictions", + "configuartions", "configurations", + "confugurations", "configurations", + "conglaturation", "congratulations", + "congratulatons", "congratulations", + "conicidentally", "coincidentally", + "conifgurations", "configurations", + "conscioussness", "consciousness", + "consentrations", "concentrations", + "consiciousness", "consciousness", + "considerablely", "considerably", + "considerstions", "considerations", + "constititional", "constitutional", + "constitucional", "constitutional", + "contamporaries", "contemporaries", + "contemporaneus", "contemporaneous", + "contraceptivos", "contraceptives", + "contradicitons", "contradictions", + "contradictiong", "contradicting", + "contriceptives", "contraceptives", + "controceptives", "contraceptives", + "controdictions", "contradictions", + "conversacional", "conversational", + "converstaional", "conversational", + "correpsondence", "correspondence", + "correspondants", "correspondents", + "correspondense", "correspondence", + "correspondente", "correspondence", + "corrispondants", "correspondents", + "corrispondence", "correspondence", + "corrospondence", "correspondence", + "costumizations", "customization", + "councidentally", "coincidentally", + "crystalisation", "crystallisation", + "curcumstantial", "circumstantial", + "demenstrations", "demonstrations", + "deminstrations", "demonstrations", + "demonstartions", "demonstrations", + "demonstrativno", "demonstrations", + "demonstrativos", "demonstrations", + "demosntrations", "demonstrations", + "desintegration", "disintegration", + "deterioriating", "deteriorating", + "determinisitic", "deterministic", + "differentiaton", "differentiation", + "disatisfaction", "dissatisfaction", + "discrimanatory", "discriminatory", + "discriminacion", "discrimination", + "discriminitory", "discriminatory", + "disillusionned", "disillusioned", + "diskrimination", "discrimination", + "disproportiate", "disproportionate", + "distingiushing", "distinguishing", + "distingquished", "distinguished", + "distingusihing", "distinguishing", + "distinquishing", "distinguishing", + "distuingishing", "distinguishing", + "dysfunctionnal", "dysfunctional", + "eldistribution", "redistribution", + "electromagnetc", "electromagnetic", + "electromagntic", "electromagnetic", + "endoctrination", "indoctrination", + "enthusiastisch", "enthusiastic", + "entrepreneuers", "entrepreneurs", + "entrepreneures", "entrepreneurs", + "enviormentally", "environmentally", + "enviromentally", "environmentally", + "environmentals", "environments", + "environmentaly", "environmentally", + "experimentaion", "experimentation", + "experimentella", "experimental", + "extraordinairy", "extraordinary", + "extraordinarly", "extraordinary", + "extrordinarily", "extraordinarily", + "fondamentalist", "fundamentalist", + "foreshadowning", "foreshadowing", + "functionallity", "functionality", + "fundamendalist", "fundamentalist", + "fundamentalits", "fundamentalists", + "fundamnetalist", "fundamentalist", + "fundemantalist", "fundamentalist", + "fundimentalist", "fundamentalist", + "fundumentalist", "fundamentalist", + "generalizacion", "generalization", + "generalizating", "generalization", + "generelization", "generalization", + "geographacilly", "geographically", + "geographycally", "geographically", + "geogrpahically", "geographically", + "geopraphically", "geographically", + "goegraphically", "geographically", + "grandchilderen", "grandchildren", + "gravitationnal", "gravitational", + "groubdbreaking", "groundbreaking", + "groudnbreaking", "groundbreaking", + "hallcuinations", "hallucination", + "hallicunations", "hallucinations", + "hallucenations", "hallucinations", + "halluciantions", "hallucinations", + "hallucinaitons", "hallucination", + "hallunications", "hallucinations", + "hallusinations", "hallucinations", + "halluzinations", "hallucinations", + "hellucinations", "hallucinations", + "heterosexuella", "heterosexual", + "hipothetically", "hypothetically", + "homosexuallity", "homosexuality", + "hullucinations", "hallucinations", + "hyopthetically", "hypothetically", + "hypathetically", "hypothetically", + "hypethetically", "hypothetically", + "hypotehtically", "hypothetically", + "hypotethically", "hypothetically", + "identificacion", "identification", + "identificaiton", "identification", + "identificativo", "identification", + "identifikation", "identification", + "imlpementation", "implementations", + "impelmentation", "implementations", + "impersonationg", "impersonating", + "implementacion", "implementation", + "implementaiton", "implementation", + "implementating", "implementation", + "implementatino", "implementations", + "implemetnation", "implementations", + "implimentation", "implementation", + "impossibillity", "impossibility", + "inadvertantely", "inadvertently", + "inappropriatly", "inappropriately", + "inapproprietly", "inappropriately", + "incompatablity", "incompatibility", + "incompatiblity", "incompatibility", + "inconsequental", "inconsequential", + "inconsistentcy", "inconsistency", + "incontrollably", "uncontrollably", + "inconventional", "unconventional", + "inconvienenced", "inconvenience", + "indestrictible", "indestructible", + "indestructuble", "indestructible", + "indetification", "identification", + "indistructible", "indestructible", + "individuallity", "individuality", + "indocrtination", "indoctrination", + "indoctrication", "indoctrination", + "indoktrination", "indoctrination", + "industiralized", "industrialized", + "industrailized", "industrialized", + "industrualized", "industrialized", + "industructible", "indestructible", + "inexplicablely", "inexplicably", + "infrastracture", "infrastructure", + "infrastructuur", "infrastructure", + "infrastrucutre", "infrastructure", + "infrastrukture", "infrastructure", + "infrastrutture", "infrastructure", + "infrasturcture", "infrastructure", + "initalisations", "initialisations", + "initalizations", "initializations", + "inplementation", "implementation", + "inspirationnal", "inspirational", + "instinctivelly", "instinctively", + "institutionale", "institutionalized", + "institutionals", "institutions", + "institutionnal", "institutional", + "intellectualis", "intellectuals", + "intellectualls", "intellectuals", + "intellecutally", "intellectually", + "intercepticons", "interceptions", + "interchangable", "interchangeable", + "interchangably", "interchangeably", + "interchangeble", "interchangeable", + "interchangebly", "interchangeably", + "interlectually", "intellectually", + "internationaal", "international", + "internationaly", "internationally", + "internationnal", "international", + "interpersonnal", "interpersonal", + "interpertation", "interpretation", + "interpratation", "interpretation", + "interpretacion", "interpretation", + "interpretaiton", "interpretations", + "interpretating", "interpretation", + "interpritation", "interpretation", + "interstellaire", "interstellar", + "intillectually", "intellectually", + "intrepretation", "interpretation", + "invesitgations", "investigations", + "investiagtions", "investigations", + "investigatiors", "investigations", + "investigativos", "investigations", + "investigstions", "investigations", + "irrationallity", "irrationally", + "irresponsibile", "irresponsible", + "journalistisch", "journalistic", + "justificativos", "justifications", + "koncentrations", "concentrations", + "liberatrianism", "libertarianism", + "libertarainism", "libertarianism", + "libertariansim", "libertarianism", + "libertarinaism", "libertarianism", + "libertaryanism", "libertarianism", + "libertatianism", "libertarianism", + "liberterianism", "libertarianism", + "libretarianism", "libertarianism", + "manufactureers", "manufactures", + "manufactureras", "manufactures", + "manufacturered", "manufactured", + "manufactureres", "manufacturers", + "manufactureros", "manufactures", + "massachusettes", "massachusetts", + "massachussetts", "massachusetts", + "mataphorically", "metaphorically", + "mathameticians", "mathematicians", + "mathemagically", "mathematically", + "mathematitians", "mathematicians", + "mathemetically", "mathematically", + "mathemeticians", "mathematicians", + "mathimatically", "mathematically", + "mediterainnean", "mediterranean", + "mediterrannean", "mediterranean", + "metaphotically", "metaphorically", + "metephorically", "metaphorically", + "methaporically", "metaphorically", + "metiphorically", "metaphorically", + "metophorically", "metaphorically", + "metropolitaine", "metropolitan", + "misconseptions", "misconceptions", + "misinterperted", "misinterpreted", + "misintrepreted", "misinterpreted", + "mulitnationals", "multinational", + "mulitplication", "multiplication", + "multiplicacion", "multiplication", + "multiplicaiton", "multiplication", + "multiplicativo", "multiplication", + "multiplikation", "multiplication", + "mutlinationals", "multinational", + "mutliplication", "multiplication", + "nationalisitic", "nationalistic", + "nationalistics", "nationalists", + "nationalisties", "nationalists", + "nationalistisk", "nationalists", + "neighbourhoood", "neighbourhood", + "nieghbourhoods", "neighbourhood", + "northereastern", "northeastern", + "objectificaton", "objectification", + "opthalmologist", "ophthalmologist", + "organizacional", "organizational", + "organizaitonal", "organizational", + "organziational", "organizational", + "orginazational", "organizational", + "overestemating", "overestimating", + "overextimating", "overestimating", + "overhwelmingly", "overwhelmingly", + "overhwlemingly", "overwhelmingly", + "overpolulation", "overpopulation", + "overpopluation", "overpopulation", + "oversetimating", "overestimating", + "overshadowered", "overshadowed", + "overwhemlingly", "overwhelmingly", + "overwhlemingly", "overwhelmingly", + "paliamentarian", "parliamentarian", + "parliamentiary", "parliamentary", + "performancepcs", "performances", + "personalitites", "personalities", + "pharamceutical", "pharmaceutical", + "pharmaceudical", "pharmaceutical", + "pharmacuetical", "pharmaceutical", + "pharmaseutical", "pharmaceutical", + "pharmeceutical", "pharmaceutical", + "philosophicaly", "philosophically", + "phramaceutical", "pharmaceutical", + "playthroughers", "playthroughs", + "porportionally", "proportionally", + "practitionners", "practitioners", + "predeterminded", "predetermined", + "predominantely", "predominantly", + "predominantley", "predominantly", + "preinitalizing", "preinitializing", + "prerequisities", "prerequisite", + "procrastinatin", "procrastination", + "procrastinaton", "procrastination", + "professionials", "professionalism", + "professionnals", "professionals", + "profitabillity", "profitability", + "progressivelly", "progressively", + "progressivisme", "progressives", + "pronounciation", "pronunciation", + "proportianally", "proportionally", + "proportionalty", "proportionally", + "proportionella", "proportionally", + "proprotionally", "proportionally", + "protruberances", "protuberances", + "pseudononymous", "pseudonymous", + "psychologicaly", "psychologically", + "qaulifications", "qualification", + "qualifiactions", "qualification", + "qualificaitons", "qualifications", + "quarterbackers", "quarterbacks", + "rationalizaton", "rationalization", + "reaponsibility", "responsibility", + "recommandation", "recommendation", + "recommedations", "recommendations", + "recommondation", "recommendation", + "reconnaissence", "reconnaissance", + "reconstruccion", "reconstruction", + "reconsturction", "reconstruction", + "redistirbution", "redistribution", + "redistribucion", "redistribution", + "redistributivo", "redistribution", + "redistrubition", "redistribution", + "refridgeration", "refrigeration", + "rehabilitacion", "rehabilitation", + "rehabilitaiton", "rehabilitation", + "reinforcemnets", "reinforcements", + "rekommendation", "recommendation", + "rektifications", "certifications", + "reniforcements", "reinforcements", + "repersentation", "representation", + "represantation", "representation", + "represantative", "representative", + "representacion", "representation", + "representaiton", "representations", + "representatief", "representative", + "representating", "representation", + "representativo", "representation", + "representetive", "representative", + "representitive", "representative", + "representstion", "representations", + "representstive", "representatives", + "represetnation", "representations", + "represnetation", "representations", + "reprezentative", "representative", + "repsonsibility", "responsibility", + "resistribution", "redistribution", + "responcibility", "responsibility", + "responisbility", "responsibility", + "responnsibilty", "responsibility", + "responsability", "responsibility", + "responsibilies", "responsibilities", + "responsibities", "responsibilities", + "restaraunteurs", "restaurateurs", + "retroactivelly", "retroactively", + "revolutionairy", "revolutionary", + "revolutionnary", "revolutionary", + "ridicilousness", "ridiculousness", + "ridicoulusness", "ridiculousness", + "rienforcements", "reinforcements", + "righteoussness", "righteousness", + "satisfactoraly", "satisfactory", + "satisfactority", "satisfactorily", + "sceintifically", "scientifically", + "schizophrentic", "schizophrenic", + "screenwrighter", "screenwriter", + "sensacionalism", "sensationalism", + "sensacionalist", "sensationalist", + "sensasionalism", "sensationalism", + "sensasionalist", "sensationalist", + "sensationality", "sensationalist", + "sensationalizm", "sensationalism", + "sensationalsim", "sensationalism", + "sensationilism", "sensationalism", + "sensationilist", "sensationalist", + "sensationslism", "sensationalism", + "sensetionalism", "sensationalism", + "sensibilisiert", "sensibilities", + "sentationalism", "sensationalism", + "sentationalist", "sensationalist", + "senzationalism", "sensationalism", + "senzationalist", "sensationalist", + "sepcifications", "specification", + "simaltaneously", "simultaneously", + "simeltaneously", "simultaneously", + "similtaneously", "simultaneously", + "simlutaneously", "simultaneously", + "simplificacion", "simplification", + "simplificaiton", "simplification", + "simplificating", "simplification", + "simulatenously", "simultaneously", + "simulatneously", "simultaneously", + "simultaenously", "simultaneously", + "simultainously", "simultaneously", + "simultaneoulsy", "simultaneously", + "simultaniously", "simultaneously", + "simulteanously", "simultaneously", + "sistematically", "systematically", + "slaugterhouses", "slaughterhouses", + "specailization", "specialization", + "specialication", "specialization", + "specializaiton", "specialization", + "specificaitons", "specification", + "speciliazation", "specialization", + "spectacularely", "spectacularly", + "spectacularily", "spectacularly", + "spesifications", "specifications", + "spezialisation", "specialization", + "sportsmansship", "sportsmanship", + "spreadsheeters", "spreadsheets", + "straightforwad", "straightforward", + "subconcsiously", "subconsciously", + "subconsicously", "subconsciously", + "subsconciously", "subconsciously", + "sunconsciously", "subconsciously", + "superintendant", "superintendent", + "suppliementing", "supplementing", + "surrepetitious", "surreptitious", + "survivabililty", "survivability", + "survivabillity", "survivability", + "sustainabiltiy", "sustainability", + "syncronization", "synchronization", + "systemetically", "systematically", + "systimatically", "systematically", + "technologicaly", "technologically", + "thermodinamics", "thermodynamics", + "thermodyanmics", "thermodynamics", + "thermodymamics", "thermodynamics", + "thermodymanics", "thermodynamics", + "thermodynamcis", "thermodynamics", + "thermodynanics", "thermodynamics", + "thermodynmaics", "thermodynamics", + "thernodynamics", "thermodynamics", + "theromdynamics", "thermodynamics", + "transformacion", "transformation", + "transfromation", "transformation", + "transitionable", "transitional", + "transitionning", "transitioning", + "transofrmation", "transformation", + "trasnformation", "transformation", + "trasnportation", "transportation", + "unbelievablely", "unbelievably", + "unchallengable", "unchallengeable", + "uncomfortabley", "uncomfortably", + "uncomfortablly", "uncomfortably", + "unconciousness", "unconsciousness", + "unconditionaly", "unconditionally", + "unconditionnal", "unconditional", + "unconsciouslly", "unconsciously", + "uncontrallable", "uncontrollable", + "uncontrallably", "uncontrollably", + "uncontrolablly", "uncontrollably", + "unconvectional", "unconventional", + "unconvencional", "unconventional", + "unconvensional", "unconventional", + "unconventianal", "unconventional", + "underastimated", "underestimated", + "underestamated", "underestimated", + "underestemated", "underestimated", + "underestimeted", "underestimated", + "undersetimated", "underestimated", + "understandebly", "understandably", + "understandible", "understandable", + "understandibly", "understandably", + "undestructible", "indestructible", + "unforetunately", "unfortunately", + "unfortunatelly", "unfortunately", + "unfourtunately", "unfortunately", + "uninitalizable", "uninitializable", + "unintelligient", "unintelligent", + "unintentionaly", "unintentionally", + "unintentionnal", "unintentional", + "unmanouverable", "unmaneuverable", + "unneccessarily", "unnecessarily", + "unnecessarilly", "unnecessarily", + "unprecendented", "unprecedented", + "unprofessionel", "unprofessional", + "unreasonablely", "unreasonably", + "unsubstantiaed", "unsubstantiated", + "unsurprizingly", "unsurprisingly", + "vizualisations", "visualization", + "vulnerabilites", "vulnerabilities", + "vulnerabillity", "vulnerability", + "vulnerablility", "vulnerability", + "wholeheartadly", "wholeheartedly", + "wholeheartidly", "wholeheartedly", + "abbrievations", "abbreviation", + "accelleration", "acceleration", + "accomadations", "accommodations", + "accommadating", "accommodating", + "accommadation", "accommodation", + "accommidation", "accommodation", + "accomodations", "accommodations", + "accomondating", "accommodating", + "accomondation", "accommodation", + "accomplishent", "accomplishment", + "accountabilty", "accountability", + "accredidation", "accreditation", + "acknolwedging", "acknowledging", + "acknowlegding", "acknowledging", + "acomplishment", "accomplishment", + "acquaintaince", "acquaintance", + "acquaintences", "acquaintances", + "acquaintinces", "acquaintances", + "acquanitances", "acquaintance", + "acquantainces", "acquaintances", + "acquantiances", "acquaintances", + "acquiantances", "acquaintances", + "acquiantences", "acquaintances", + "adminastrator", "administrator", + "administartor", "administrator", + "administraion", "administration", + "administraron", "administrator", + "administrater", "administrator", + "administratio", "administrator", + "administraton", "administration", + "adminsitrator", "administrator", + "adminstration", "administration", + "adminstrative", "administrative", + "admissability", "admissibility", + "adnimistrator", "administrators", + "adverticement", "advertisement", + "advertisiment", "advertisement", + "advertisments", "advertisements", + "advirtisement", "advertisement", + "aestethically", "aesthetically", + "aesthatically", "aesthetically", + "aesthitically", "aesthetically", + "affectionnate", "affectionate", + "aforementiond", "aforementioned", + "agriculturual", "agricultural", + "agrumentative", "argumentative", + "alterantively", "alternatively", + "alternativets", "alternatives", + "alternativley", "alternatively", + "alternitavely", "alternatively", + "alternitively", "alternatively", + "aninteresting", "uninteresting", + "annoucnements", "announcements", + "antagonisitic", "antagonistic", + "anthropolgist", "anthropologist", + "apporpriately", "appropriately", + "apporpriation", "appropriation", + "apporximately", "approximately", + "appreciateing", "appreciating", + "appreciateive", "appreciative", + "appreciationg", "appreciating", + "appropirately", "appropriately", + "appropiration", "appropriation", + "appropraitely", "appropriately", + "appropreation", "appropriation", + "appropriatley", "appropriately", + "appropropiate", "appropriate", + "approrpiation", "appropriation", + "approxamately", "approximately", + "approxiamtely", "approximately", + "approximatley", "approximately", + "approximitely", "approximately", + "aqcuaintances", "acquaintances", + "aqquaintances", "acquaintances", + "archaelogical", "archaeological", + "archaelogists", "archaeologists", + "archeaologist", "archeologist", + "archetectural", "architectural", + "architechture", "architecture", + "architechural", "architectural", + "architectrual", "architectural", + "architecutral", "architectural", + "argumentitive", "argumentative", + "arugmentative", "argumentative", + "asethetically", "aesthetically", + "assasinations", "assassinations", + "audomoderator", "automoderator", + "australianess", "australians", + "authenticaion", "authentication", + "authenticaton", "authentication", + "autherization", "authorization", + "authoratitive", "authoritative", + "authoritatian", "authoritarian", + "authoritation", "authorization", + "authorititive", "authoritative", + "authoritorian", "authoritarian", + "authorotative", "authoritative", + "authroization", "authorization", + "automoderador", "automoderator", + "automoderater", "automoderator", + "automodorator", "automoderator", + "automoterator", "automoderator", + "autoritharian", "authoritarian", + "availabillity", "availability", + "awknowledging", "acknowledging", + "billingualism", "bilingualism", + "billionairres", "billionaire", + "borderlanders", "borderlands", + "breadtfeeding", "breastfeeding", + "breastfeading", "breastfeeding", + "breatsfeeding", "breastfeeding", + "broadacasting", "broadcasting", + "bureaucractic", "bureaucratic", + "bureaucratics", "bureaucrats", + "bureaucratius", "bureaucrats", + "californiaman", "californian", + "calrification", "clarification", + "capitalizaton", "capitalization", + "carbohdyrates", "carbohydrates", + "carbohidrates", "carbohydrates", + "carbohyrdates", "carbohydrates", + "carboyhdrates", "carbohydrates", + "carthographer", "cartographer", + "catagorically", "categorically", + "catastrophies", "catastrophe", + "catastrophize", "catastrophe", + "catigorically", "categorically", + "catterpillars", "caterpillars", + "celebrationis", "celebrations", + "ceritfication", "certifications", + "certificaiton", "certification", + "championchips", "championship", + "championshiop", "championships", + "championsship", "championships", + "chanpionships", "championships", + "charactarized", "characterized", + "characterisic", "characteristic", + "characteristc", "characteristics", + "characterists", "characteristics", + "charicterized", "characterized", + "charismatisch", "charismatic", + "checkpointusa", "checkpoints", + "cheeseburgare", "cheeseburger", + "cheeseburgler", "cheeseburger", + "cheeseburguer", "cheeseburger", + "cheezeburgers", "cheeseburgers", + "chornological", "chronological", + "chronoligical", "chronological", + "chronologicly", "chronological", + "cinematograhy", "cinematography", + "cinematograpy", "cinematography", + "circomference", "circumference", + "circumcission", "circumcision", + "circumferance", "circumference", + "circumsicions", "circumcision", + "circumstanial", "circumstantial", + "circumstantal", "circumstantial", + "circumstnaces", "circumstance", + "circunference", "circumference", + "circunstances", "circumstances", + "cirucmference", "circumference", + "cirucmstances", "circumstances", + "civilications", "civilizations", + "civilizaitons", "civilizations", + "clarificaiton", "clarification", + "clasification", "clarification", + "clerification", "clarification", + "coincidentaly", "coincidentally", + "coincidential", "coincidental", + "colaborations", "collaborations", + "collabaration", "collaboration", + "collaberation", "collaboration", + "collaberative", "collaborative", + "collaboratore", "collaborate", + "collectioners", "collections", + "collectivelly", "collectively", + "collobaration", "collaboration", + "combatibility", "compatibility", + "comeptitively", "competitively", + "comfortablely", "comfortably", + "comfortablity", "comfortably", + "comfrontation", "confrontation", + "commemerative", "commemorative", + "commericially", "commercially", + "commerorative", "commemorative", + "comminication", "communication", + "comminucation", "communications", + "commissionees", "commissions", + "commissionned", "commissioned", + "commissionner", "commissioner", + "commmemorated", "commemorated", + "commuications", "communications", + "commuincation", "communications", + "communciation", "communication", + "communiaction", "communications", + "communicaiton", "communication", + "communicatoin", "communications", + "communicatons", "communications", + "compadibility", "compatibility", + "comparativley", "comparatively", + "comparetively", "comparatively", + "comparitavely", "comparatively", + "comparitively", "comparatively", + "compatability", "compatibility", + "compatibiltiy", "compatibility", + "compeditively", "competitively", + "compensantion", "compensation", + "compensationg", "compensating", + "comperatively", "comparatively", + "comperhension", "comprehension", + "competatively", "competitively", + "competitavely", "competitively", + "competitevely", "competitively", + "competitivley", "competitively", + "competiveness", "competitiveness", + "compilcations", "complication", + "compitability", "compatibility", + "complciations", "complication", + "complecations", "complications", + "compliactions", "complication", + "complicaitons", "complication", + "complilations", "complications", + "complimentery", "complimentary", + "complimentoni", "complimenting", + "complimentory", "complimentary", + "comprehention", "comprehension", + "computacional", "computational", + "comtamination", "contamination", + "comtemplating", "contemplating", + "concatination", "contamination", + "conceivablely", "conceivably", + "concencration", "concentration", + "concenrtation", "concentrations", + "concentartion", "concentrations", + "concentracion", "concentration", + "concentraited", "concentrated", + "concentraiton", "concentrations", + "concentratons", "concentrations", + "concervatives", "conservatives", + "concideration", "consideration", + "concioussness", "consciousness", + "concnetration", "concentrations", + "concsiousness", "consciousness", + "condascending", "condescending", + "condescencion", "condescension", + "condescendion", "condescension", + "condescensing", "condescension", + "condiscending", "condescending", + "conditionning", "conditioning", + "condradicting", "contradicting", + "condradiction", "contradiction", + "condradictory", "contradictory", + "conecntration", "concentrations", + "conenctration", "concentrations", + "confidentally", "confidentially", + "configrations", "configurations", + "configruation", "configurations", + "configuartion", "configuration", + "configuracion", "configuration", + "configuraiton", "configuration", + "configuratoin", "configurations", + "configureable", "configurable", + "confrentation", "confrontation", + "confrontacion", "confrontation", + "confrontating", "confrontation", + "confrontativo", "confrontation", + "congratualted", "congratulate", + "conifguration", "configurations", + "conisderation", "considerations", + "connecticunts", "connecticut", + "connectivitiy", "connectivity", + "conpassionate", "compassionate", + "conplications", "complications", + "conplimentary", "complimentary", + "conplimenting", "complimenting", + "conprehension", "comprehension", + "consdieration", "considerations", + "consenquently", "consequently", + "consentrating", "concentrating", + "consentration", "concentration", + "consequencies", "consequence", + "consequentely", "consequently", + "consequeseces", "consequences", + "conservatisim", "conservatism", + "conservativsm", "conservatism", + "conservitives", "conservatives", + "consicousness", "consciousness", + "considerabely", "considerable", + "considerabile", "considerable", + "considerabley", "considerably", + "considerablly", "considerably", + "consideracion", "consideration", + "consideratoin", "considerations", + "considerstion", "considerations", + "considertaion", "considerations", + "consituencies", "constituencies", + "consitutional", "constitutional", + "constallation", "constellation", + "constarnation", "consternation", + "constillation", "constellation", + "constituintes", "constituents", + "constituional", "constitutional", + "constitutents", "constitutes", + "constitutinal", "constitutional", + "constructicon", "construction", + "constructieve", "constructive", + "constructiong", "constructing", + "consttruction", "construction", + "contaminacion", "contamination", + "contaminanted", "contaminated", + "contanimation", "contamination", + "contenplating", "contemplating", + "contimplating", "contemplating", + "contraceptivo", "contraception", + "contradiccion", "contradiction", + "contradicitng", "contradicting", + "contradiciton", "contradiction", + "contradictary", "contradictory", + "contradictons", "contradicts", + "contraticting", "contradicting", + "contravercial", "controversial", + "contraversial", "controversial", + "contreception", "contraception", + "contreversial", "controversial", + "contributeurs", "contributes", + "contributiors", "contributors", + "contriception", "contraception", + "contridictory", "contradictory", + "contritutions", "contributions", + "contriversial", "controversial", + "controception", "contraception", + "controdicting", "contradicting", + "controdiction", "contradiction", + "controvercial", "controversial", + "controverisal", "controversial", + "controversary", "controversy", + "controversity", "controversy", + "controvertial", "controversial", + "contstruction", "construction", + "conventionnal", "conventional", + "converastions", "conservation", + "conversationa", "conservation", + "conversationg", "conservation", + "conversationy", "conservation", + "conversatiosn", "conservation", + "conversatives", "conservatives", + "converstaions", "conversations", + "convorsations", "conversations", + "cooresponding", "corresponding", + "coorperations", "corporations", + "correctionals", "corrections", + "correpsonding", "corresponding", + "correspondant", "correspondent", + "correspondece", "correspondence", + "corresponders", "corresponds", + "corresponsing", "corresponding", + "corrispondant", "correspondent", + "corrisponding", "corresponding", + "corrosponding", "corresponding", + "costomization", "customization", + "costumization", "customization", + "counterfeight", "counterfeit", + "creationistas", "creationists", + "cricumference", "circumference", + "cringeworthey", "cringeworthy", + "cringeworthly", "cringeworthy", + "crytopgraphic", "cryptographic", + "curcumference", "circumference", + "curcumstances", "circumstances", + "custumization", "customization", + "cuztomization", "customization", + "decentraliced", "decentralized", + "decentrilized", "decentralized", + "decomissioned", "decommissioned", + "decompositing", "decomposing", + "definitivelly", "definitively", + "deinitalizing", "deinitializing", + "demenstration", "demonstration", + "democraticaly", "democratically", + "democraticlly", "democratically", + "demoninations", "denominations", + "demonstarting", "demonstrating", + "demonstartion", "demonstration", + "demonstraiton", "demonstrations", + "demonstratbly", "demonstrably", + "demonstraties", "demonstrate", + "demonstrativo", "demonstration", + "demosntrating", "demonstrating", + "demosntration", "demonstrations", + "denomenations", "denominations", + "denomonations", "denominations", + "deomnstration", "demonstrations", + "dermatalogist", "dermatologist", + "dermatolagist", "dermatologist", + "dermatoligist", "dermatologist", + "dermatologyst", "dermatologist", + "dermetologist", "dermatologist", + "dermitologist", "dermatologist", + "derpatologist", "dermatologist", + "desentralized", "decentralized", + "desillusioned", "disillusioned", + "desintegrated", "disintegrated", + "desinterested", "disinterested", + "determenation", "determination", + "determinacion", "determination", + "determinining", "determining", + "determinisitc", "deterministic", + "determinsitic", "deterministic", + "detmatologist", "dermatologist", + "developmently", "developmental", + "dezentralized", "decentralized", + "differantiate", "differentiate", + "differenciate", "differentiate", + "differintiate", "differentiate", + "diffirentiate", "differentiate", + "disadvandages", "disadvantaged", + "disadvantadge", "disadvantaged", + "disadvanteged", "disadvantaged", + "disadvanteges", "disadvantages", + "disadvatanges", "disadvantages", + "disadventaged", "disadvantaged", + "disadventages", "disadvantages", + "disallusioned", "disillusioned", + "disappearence", "disappearance", + "disappearnace", "disappearance", + "disappearring", "disappearing", + "disatvantaged", "disadvantaged", + "disatvantages", "disadvantages", + "disciplinairy", "disciplinary", + "disciplinerad", "disciplined", + "discipliniary", "disciplinary", + "disconnecters", "disconnects", + "discontinuted", "discontinued", + "discrimianted", "discriminated", + "discriminante", "discriminate", + "discriminatie", "discriminate", + "discriminatin", "discrimination", + "disillisioned", "disillusioned", + "disillutioned", "disillusioned", + "disingenuious", "disingenuous", + "disollusioned", "disillusioned", + "disrecpectful", "disrespectful", + "disrecpecting", "disrespecting", + "disrepsectful", "disrespectful", + "disrepsecting", "disrespecting", + "disresepctful", "disrespectful", + "disresepcting", "disrespecting", + "disrespection", "disrespecting", + "disrespekting", "disrespecting", + "disrispectful", "disrespectful", + "disrispecting", "disrespecting", + "dissagreement", "disagreement", + "dissapearance", "disappearance", + "dissapointted", "dissapointed", + "dissappointed", "disappointed", + "dissobediance", "disobedience", + "dissobedience", "disobedience", + "distingishing", "distinguishing", + "distinguising", "distinguishing", + "distinquished", "distinguished", + "distirbutions", "distributions", + "distiungished", "distinguished", + "distribustion", "distributions", + "distributiors", "distributors", + "distributivos", "distributions", + "distrobutions", "distributions", + "distrubitions", "distributions", + "distuingished", "distinguished", + "documantaries", "documentaries", + "documenatries", "documentaries", + "documentacion", "documentation", + "documentaires", "documentaries", + "documentaiton", "documentation", + "documentarios", "documentaries", + "documentaties", "documentaries", + "documentating", "documentation", + "documenteries", "documentaries", + "documentories", "documentaries", + "drammatically", "grammatically", + "dsyfunctional", "dysfunctional", + "dumbfoundeads", "dumbfounded", + "dusfunctional", "dysfunctional", + "dustification", "justification", + "dysfonctional", "dysfunctional", + "dysfucntional", "dysfunctional", + "dysfuncitonal", "dysfunctional", + "dysfunktional", "dysfunctional", + "easthetically", "aesthetically", + "effectiviness", "effectiveness", + "effictiveness", "effectiveness", + "effortlessely", "effortlessly", + "effortlessley", "effortlessly", + "embarrasement", "embarrassment", + "embarrasments", "embarrassment", + "embarressment", "embarrassment", + "emberrassment", "embarrassment", + "encarceration", "incarceration", + "encorporating", "incorporating", + "encyclopeadia", "encyclopedia", + "encyclopeadic", "encyclopedia", + "encyclopeedia", "encyclopedia", + "encycolpedias", "encyclopedia", + "endoctrinated", "indoctrinated", + "enlightenting", "enlightening", + "enlightnement", "enlightenment", + "enligthenment", "enlightenment", + "enteratinment", "entertainment", + "enterpreneurs", "entrepreneurs", + "enterprenuers", "entrepreneurs", + "enterpreuners", "entrepreneurs", + "entertianment", "entertainment", + "enthusiasists", "enthusiasts", + "enthusiastics", "enthusiasts", + "entrepraneurs", "entrepreneurs", + "entreprenaurs", "entrepreneurs", + "entrepreneuer", "entrepreneurs", + "entreprenours", "entrepreneurs", + "entreprenuers", "entrepreneurs", + "entreprenures", "entrepreneurs", + "entrepreuners", "entrepreneurs", + "entretainment", "entertainment", + "enviornmental", "environmental", + "environemntal", "environmental", + "environmently", "environmental", + "envolutionary", "evolutionary", + "envrionmental", "environmental", + "estabilshment", "establishments", + "establishemnt", "establishments", + "establishmnet", "establishments", + "establsihment", "establishments", + "estbalishment", "establishments", + "ethnocentricm", "ethnocentrism", + "evolutionairy", "evolutionary", + "evolutionarly", "evolutionary", + "evolutionnary", "evolutionary", + "exaggeratting", "exaggerating", + "excpetionally", "exceptionally", + "executioneers", "executioner", + "existentiella", "existential", + "expectionally", "exceptionally", + "experementing", "experimenting", + "experienceing", "experiencing", + "experimentais", "experiments", + "experimention", "experimenting", + "experimentors", "experiments", + "expirementing", "experimenting", + "expodentially", "exponentially", + "exponantially", "exponentially", + "exponencially", "exponentially", + "exponentiella", "exponential", + "extraodrinary", "extraordinary", + "extraordianry", "extraordinary", + "extraordinair", "extraordinary", + "extraordinaly", "extraordinary", + "extraoridnary", "extraordinary", + "extremeophile", "extremophile", + "extroardinary", "extraordinary", + "familiarizate", "familiarize", + "fantasitcally", "fantastically", + "fantasmically", "fantastically", + "fantistically", "fantastically", + "faptastically", "fantastically", + "figurativeley", "figuratively", + "figurativelly", "figuratively", + "frankenstiens", "frankenstein", + "frankenstined", "frankenstein", + "frankenstiner", "frankenstein", + "frankenstines", "frankenstein", + "friendzoneado", "friendzoned", + "fucntionality", "functionality", + "funcitonality", "functionality", + "functionailty", "functionality", + "fundamentalis", "fundamentals", + "fundamnetally", "fundamentally", + "fundementally", "fundamentally", + "fundimentally", "fundamentally", + "gamifications", "ramifications", + "generalizaing", "generalizing", + "generalizaton", "generalization", + "generationals", "generations", + "generationens", "generations", + "generationers", "generations", + "generationnal", "generational", + "geographicaly", "geographically", + "geographicial", "geographical", + "geometricians", "geometers", + "goreshadowing", "foreshadowing", + "governmential", "governmental", + "gradification", "gratification", + "grammarically", "grammatically", + "grandchildern", "grandchildren", + "gratificacion", "gratification", + "gratificaiton", "gratification", + "grativational", "gravitational", + "gravitacional", "gravitational", + "gravitaitonal", "gravitational", + "hallcuination", "hallucination", + "hallicunation", "hallucination", + "hallucenation", "hallucination", + "halluciantion", "hallucinations", + "hallukination", "hallucination", + "hallunication", "hallucination", + "hallusination", "hallucination", + "halluzination", "hallucination", + "heiroglyphics", "hieroglyphics", + "hellucination", "hallucination", + "highlightning", "highlighting", + "homesexuality", "homosexuality", + "homosexualtiy", "homosexuality", + "homosexulaity", "homosexuality", + "horizontallly", "horizontally", + "hullucination", "hallucination", + "hypocriticial", "hypocritical", + "hypotheticaly", "hypothetically", + "hystericallly", "hysterically", + "identificaton", "identification", + "ideoligically", "ideologically", + "ideosyncratic", "idiosyncratic", + "idiologically", "ideologically", + "illistrations", "illustrations", + "illustartions", "illustrations", + "imperfactions", "imperfections", + "impersinating", "impersonating", + "implementaion", "implementation", + "implementatin", "implementations", + "implimenation", "implementation", + "imprefections", "imperfections", + "impresonating", "impersonating", + "inaccessibile", "inaccessible", + "inadventently", "inadvertently", + "inadverdently", "inadvertently", + "inadvertantly", "inadvertently", + "inadvertendly", "inadvertently", + "inapporpriate", "inappropriate", + "inappropirate", "inappropriate", + "inappropraite", "inappropriate", + "inaproppriate", "inappropriate", + "incarcaration", "incarceration", + "incarciration", "incarceration", + "incarseration", "incarceration", + "incerceration", "incarceration", + "incidentially", "incidentally", + "incomfortable", "uncomfortable", + "incomfortably", "uncomfortably", + "incompatabile", "incompatible", + "incompatiable", "incompatible", + "incompatibile", "incompatible", + "inconciderate", "inconsiderate", + "inconcistency", "inconsistency", + "inconditional", "unconditional", + "inconsciously", "unconsciously", + "inconsiderant", "inconsiderate", + "inconsistance", "inconsistency", + "inconsistancy", "inconsistency", + "inconsistenly", "inconsistency", + "inconsistensy", "inconsistency", + "inconsistenty", "inconsistency", + "inconveinence", "inconvenience", + "inconveniance", "inconvenience", + "inconveniente", "inconvenience", + "inconvienence", "inconvenience", + "incoroporated", "incorporated", + "incorparating", "incorporating", + "incorperating", "incorporating", + "incorperation", "incorporation", + "incorruptable", "incorruptible", + "incramentally", "incrementally", + "incrementarla", "incremental", + "incrementarlo", "incremental", + "indavertently", "inadvertently", + "indefinitelly", "indefinitely", + "independantes", "independents", + "independantly", "independently", + "independendet", "independent", + "independendly", "independently", + "indepentently", "independently", + "indespensable", "indispensable", + "indespensible", "indispensable", + "indestructble", "indestructible", + "indestructibe", "indestructible", + "indictrinated", "indoctrinated", + "indipendently", "independently", + "indispensible", "indispensable", + "indivuduality", "individuality", + "indocrtinated", "indoctrinated", + "indocternated", "indoctrinated", + "indoctornated", "indoctrinated", + "indoctrinatie", "indoctrinated", + "indoctrinatin", "indoctrination", + "indoctronated", "indoctrinated", + "industrialied", "industrialized", + "industrialzed", "industrialized", + "inexeprienced", "inexperience", + "inexpeirenced", "inexperience", + "inexpereinced", "inexperienced", + "inexperianced", "inexperienced", + "inexperiecned", "inexperience", + "inexperineced", "inexperience", + "inexpierenced", "inexperienced", + "inexplicabley", "inexplicably", + "inexplicablly", "inexplicably", + "infilitration", "infiltration", + "infrastructre", "infrastructure", + "infrastrucure", "infrastructure", + "inintelligent", "unintelligent", + "ininteresting", "uninteresting", + "initalisation", "initialisation", + "initalization", "initialization", + "inperfections", "imperfections", + "inpersonating", "impersonating", + "inpossibility", "impossibility", + "inpredictable", "unpredictable", + "inresponsible", "irresponsible", + "insectiverous", "insectivorous", + "insecuritites", "insecurities", + "insiginficant", "insignificant", + "insiginifcant", "insignificant", + "insignificent", "insignificant", + "insignificunt", "insignificant", + "insignifigant", "insignificant", + "insiprational", "inspirational", + "insperational", "inspirational", + "inspiritional", "inspirational", + "inspriational", "inspirational", + "instantaenous", "instantaneous", + "instantanious", "instantaneous", + "instanteneous", "instantaneous", + "instantenious", "instantaneous", + "instincitvely", "instinctively", + "instinctivley", "instinctively", + "instititional", "institutional", + "institutionel", "institutional", + "insturmentals", "instrumental", + "instutitional", "institutional", + "insustainable", "unsustainable", + "intelelctuals", "intellectuals", + "intellectualy", "intellectually", + "intellectuels", "intellectuals", + "intellecutals", "intellectuals", + "intellegently", "intelligently", + "intelluctuals", "intellectuals", + "intepretation", "interpretation", + "intereactions", "intersections", + "interesctions", "intersections", + "interlectuals", "intellectuals", + "intermittient", "intermittent", + "intermittment", "intermittent", + "internacional", "international", + "interpersonel", "interpersonal", + "interpresonal", "interpersonal", + "interpretaion", "interpretation", + "interpretarea", "interpreter", + "interpretarem", "interpreter", + "interpretares", "interpreter", + "interpretarse", "interpreter", + "interpretarte", "interpreter", + "interpretatin", "interpretations", + "interpreteert", "interpreter", + "interragation", "interrogation", + "interregation", "interrogation", + "interrigation", "interrogation", + "interrogacion", "interrogation", + "interrogativo", "interrogation", + "intertainment", "entertainment", + "intillectuals", "intellectuals", + "intraspection", "introspection", + "intrensically", "intrinsically", + "intriniscally", "intrinsically", + "intrinsecally", "intrinsically", + "intrisincally", "intrinsically", + "intristically", "intrinsically", + "introductiory", "introductory", + "introspeccion", "introspection", + "introspectivo", "introspection", + "introspektion", "introspection", + "invertibrates", "invertebrates", + "invesitgation", "investigation", + "invesitgative", "investigative", + "invesitgators", "investigators", + "investagators", "investigators", + "investegating", "investigating", + "investegators", "investigators", + "investiagtion", "investigation", + "investiagtive", "investigative", + "investigacion", "investigation", + "investigaiton", "investigations", + "investigaters", "investigators", + "investigativo", "investigation", + "investigatons", "investigations", + "investigsting", "investigating", + "investigstion", "investigations", + "investogators", "investigators", + "invisibillity", "invisibility", + "involuntarely", "involuntary", + "involuntarity", "involuntary", + "invulnerabile", "invulnerable", + "irrationallly", "irrationally", + "irresponcible", "irresponsible", + "irresponisble", "irresponsible", + "irresponsable", "irresponsible", + "irresponsbile", "irresponsible", + "irreversiable", "irreversible", + "irreversibelt", "irreversible", + "irreversibile", "irreversible", + "irrisponsible", "irresponsible", + "jacksonvillle", "jacksonville", + "journalisitic", "journalistic", + "journalistens", "journalists", + "journalisters", "journalists", + "journalistisk", "journalists", + "jsutification", "justifications", + "jurisdicitons", "jurisdictions", + "jurisidctions", "jurisdictions", + "juristictions", "jurisdictions", + "jursidictions", "jurisdictions", + "jusitfication", "justifications", + "justifiaction", "justifications", + "justificacion", "justification", + "justificaiton", "justification", + "justificativo", "justification", + "justificatons", "justifications", + "justificstion", "justifications", + "justiifcation", "justifications", + "karbohydrates", "carbohydrates", + "knoweldgeable", "knowledgeable", + "knowledegable", "knowledgeable", + "knowledgebale", "knowledgable", + "knowlegdeable", "knowledgeable", + "kollaboration", "collaboration", + "koncentration", "concentration", + "konfiguration", "configuration", + "konfrontation", "confrontation", + "konservatives", "conservatives", + "konstellation", "constellation", + "kontamination", "contamination", + "legitimatelly", "legitimately", + "libertariaism", "libertarianism", + "libertariansm", "libertarianism", + "libitarianisn", "libertarianism", + "lighthearthed", "lighthearted", + "mainfestation", "manifestation", + "manafacturers", "manufacturers", + "manafacturing", "manufacturing", + "manafestation", "manifestation", + "manefestation", "manifestation", + "manfuacturers", "manufactures", + "manifacturers", "manufacturers", + "manifacturing", "manufacturing", + "manifastation", "manifestation", + "manifestacion", "manifestation", + "manifestating", "manifestation", + "manifistation", "manifestation", + "manipulationg", "manipulating", + "manufacterers", "manufacturers", + "manufactering", "manufacturing", + "manufacterurs", "manufactures", + "manufactorers", "manufacturers", + "manufactoring", "manufacturing", + "manufactuered", "manufactured", + "manufactuerer", "manufacturer", + "manufactueres", "manufactures", + "manufacturedd", "manufactured", + "manufactureds", "manufactures", + "manufacturerd", "manufactured", + "manufacturier", "manufacturer", + "manufacturors", "manufacturers", + "manufactuters", "manufactures", + "manufacutrers", "manufactures", + "manufcaturers", "manufactures", + "marshmalllows", "marshmallows", + "massachsuetts", "massachusetts", + "massachucetts", "massachusetts", + "massachuestts", "massachusetts", + "massachusents", "massachusetts", + "massachusites", "massachusetts", + "massachussets", "massachusetts", + "massechusetts", "massachusetts", + "masturbateing", "masturbating", + "materialisimo", "materialism", + "mathamatician", "mathematician", + "mathametician", "mathematician", + "mathematicals", "mathematics", + "mathematicaly", "mathematically", + "mathematicans", "mathematics", + "mathematicion", "mathematician", + "mathematitian", "mathematician", + "mathemetician", "mathematician", + "mathmatically", "mathematically", + "mathmaticians", "mathematicians", + "mechanicallly", "mechanically", + "medeterranean", "mediterranean", + "meditarrenean", "mediterranean", + "meditereanean", "mediterranean", + "membranaphone", "membranophone", + "metamorphysis", "metamorphosis", + "metaphoricaly", "metaphorically", + "metaphoricial", "metaphorical", + "metaphysicals", "metaphysics", + "metaphysicans", "metaphysics", + "methamatician", "mathematician", + "methematician", "mathematician", + "metropolitain", "metropolitan", + "metropolitcan", "metropolitan", + "metropolitian", "metropolitan", + "millionairres", "millionaire", + "minneapolites", "minneapolis", + "misanderstood", "misunderstood", + "miscellanious", "miscellaneous", + "misconcpetion", "misconceptions", + "misconecption", "misconceptions", + "misinterperet", "misinterpret", + "misinterprate", "misinterpret", + "misinterprent", "misinterpret", + "misinterprted", "misinterpret", + "misogynisitic", "misogynistic", + "misrepreseted", "misrepresented", + "misunterstood", "misunderstood", + "modificaitons", "modifications", + "motivationals", "motivations", + "motivationnal", "motivational", + "mulitnational", "multinational", + "multimational", "multinational", + "multiplicaton", "multiplication", + "muncipalities", "municipalities", + "munnicipality", "municipality", + "mutlinational", "multinational", + "nacionalistic", "nationalistic", + "narcissisitic", "narcissistic", + "narcississtic", "narcissistic", + "natioanlistic", "nationalistic", + "nationalisitc", "nationalistic", + "nationalistes", "nationalists", + "nationalsitic", "nationalistic", + "neigbhourhood", "neighbourhood", + "neighboorhoud", "neighbourhood", + "neighborehood", "neighbourhood", + "neighborhoood", "neighborhoods", + "neighbourbood", "neighbourhood", + "neighbourgood", "neighbourhood", + "neighbourhoud", "neighbourhood", + "neighourhoods", "neighborhoods", + "nieghborhoods", "neighborhoods", + "nieghbourhood", "neighbourhood", + "noncombatents", "noncombatants", + "noninitalized", "noninitialized", + "northwestener", "northwestern", + "notificaitons", "notifications", + "occassionally", "occasionally", + "operationable", "operational", + "oppertunities", "opportunities", + "opprotunities", "opportunities", + "oppurtunities", "opportunities", + "opthamologist", "ophthalmologist", + "organistaions", "organisations", + "organizatinal", "organizational", + "organizativos", "organizations", + "organsiations", "organisations", + "organziations", "organizations", + "orginasations", "organisations", + "orginazations", "organizations", + "overpopulaton", "overpopulation", + "overreactiong", "overreacting", + "overshaddowed", "overshadowed", + "overwheliming", "overwhelming", + "overwhelmigly", "overwhelmingly", + "overwhelmingy", "overwhelmingly", + "overwhelminly", "overwhelmingly", + "palestininans", "palestinians", + "paraphraseing", "paraphrasing", + "paraphrashing", "paraphrasing", + "parilamentary", "parliamentary", + "parlaimentary", "parliamentary", + "parliamantary", "parliamentary", + "parliamentery", "parliamentary", + "parliamnetary", "parliamentary", + "parliementary", "parliamentary", + "particiaption", "participation", + "participacion", "participation", + "participantes", "participants", + "participativo", "participation", + "particularely", "particularly", + "particularily", "particularly", + "particularlly", "particularly", + "partizipation", "participation", + "passionatelly", "passionately", + "paychiatrists", "psychiatrists", + "paychologists", "psychologists", + "pennsylvainia", "pennsylvania", + "pennsylvanica", "pennsylvania", + "pennsylvannia", "pennsylvania", + "perdominantly", "predominantly", + "perpandicular", "perpendicular", + "perpendicualr", "perpendicular", + "perpenticular", "perpendicular", + "perpetuationg", "perpetuating", + "perpindicular", "perpendicular", + "personalitits", "personalities", + "pessimistisch", "pessimistic", + "pharmaceutial", "pharmaceutical", + "philisophical", "philosophical", + "philosiphical", "philosophical", + "philosohpical", "philosophical", + "philosophycal", "philosophically", + "philospohical", "philosophical", + "phisiological", "physiological", + "photagraphers", "photographers", + "photographics", "photographs", + "photographied", "photographed", + "photographier", "photographer", + "photograpphed", "photographed", + "photogrophers", "photographers", + "photogrpahers", "photographers", + "photoshoppade", "photoshopped", + "photoshoppped", "photoshopped", + "phsyiological", "physiological", + "phychiatrists", "psychiatrists", + "phychological", "psychological", + "phychologists", "psychologists", + "phylosophical", "philosophical", + "physciatrists", "psychiatrists", + "physcological", "psychological", + "physcologists", "psychologists", + "physioligical", "physiological", + "planeswlakers", "planeswalker", + "plansewalkers", "planeswalker", + "playthroughts", "playthroughs", + "polysaccaride", "polysaccharide", + "practicallity", "practically", + "practicioners", "practitioners", + "practisioners", "practitioners", + "practitioneer", "practitioners", + "practitionner", "practitioner", + "pratictioners", "practitioners", + "preconceieved", "preconceived", + "predecessores", "predecessors", + "predetermiend", "predetermined", + "predetirmined", "predetermined", + "preditermined", "predetermined", + "predomenantly", "predominantly", + "predominently", "predominantly", + "pregressively", "progressively", + "preinitalized", "preinitialized", + "preinitalizes", "preinitializes", + "preliferation", "proliferation", + "prependicular", "perpendicular", + "preposterious", "preposterous", + "prerequisties", "prerequisite", + "prerequistite", "prerequisite", + "prescribtions", "prescriptions", + "presumptuious", "presumptuous", + "pretedermined", "predetermined", + "problematisch", "problematic", + "proclaimation", "proclamation", + "prodominantly", "predominantly", + "professionnal", "professional", + "profitiablity", "profitability", + "profitibality", "profitability", + "progressivily", "progressively", + "progressivley", "progressively", + "prononciation", "pronunciation", + "pronouciation", "pronunciation", + "pronunciacion", "pronunciation", + "pronunciating", "pronunciation", + "pronuncuation", "pronunciation", + "pronunication", "pronunciation", + "pronuntiation", "pronunciation", + "propabilities", "probabilities", + "proportionaly", "proportionally", + "proportionnal", "proportional", + "proseletyzing", "proselytizing", + "protagonistas", "protagonists", + "protagonistes", "protagonists", + "protestantisk", "protestants", + "protruberance", "protuberance", + "provocativley", "provocative", + "pscyhiatrists", "psychiatrists", + "pscyhological", "psychological", + "pscyhologists", "psychologists", + "pshycological", "psychological", + "pshycologists", "psychologists", + "psichological", "psychological", + "psychaitrists", "psychiatrists", + "psychedellics", "psychedelics", + "psychiatrisch", "psychiatric", + "psycholigical", "psychological", + "psycholigists", "psychologists", + "psychologycal", "psychologically", + "psychologysts", "psychologists", + "psychyatrists", "psychiatrists", + "psysiological", "physiological", + "purpendicular", "perpendicular", + "pyschiatrists", "psychiatrists", + "pyschological", "psychological", + "pyschologists", "psychologists", + "qaulification", "qualification", + "qualifiaction", "qualification", + "qualificaiton", "qualifications", + "qualificatons", "qualifications", + "qualifikation", "qualification", + "questionalble", "questionable", + "quinessential", "quintessential", + "ramificaitons", "ramifications", + "realisitcally", "realistically", + "realtionships", "relationships", + "reccommending", "recommending", + "receptionnist", "receptionist", + "receptionsist", "receptionist", + "reconaissance", "reconnaissance", + "reconcilation", "reconciliation", + "reconnaisance", "reconnaissance", + "reconstrucion", "reconstruction", + "recreationnal", "recreational", + "rectangulaire", "rectangular", + "redistribuito", "redistribution", + "redistributin", "redistribution", + "reencarnation", "reincarnation", + "refridgerator", "refrigerator", + "rehabilitaion", "rehabilitation", + "rehabilitatin", "rehabilitation", + "rehabilitaton", "rehabilitation", + "reincarantion", "reincarnation", + "reincatnation", "reincarnation", + "reinforcemens", "reinforcements", + "reinforcemnts", "reinforcements", + "reinitalising", "reinitialising", + "reinitalizing", "reinitializing", + "reinkarnation", "reincarnation", + "reinstallling", "reinstalling", + "reintarnation", "reincarnation", + "relationshits", "relationships", + "relationsship", "relationships", + "relatiopnship", "relationship", + "relentlessely", "relentlessly", + "relentlessley", "relentlessly", + "relinqushment", "relinquishment", + "remifications", "ramifications", + "reprehenisble", "reprehensible", + "reprehensable", "reprehensible", + "reprehinsible", "reprehensible", + "represenation", "representation", + "represensible", "reprehensible", + "representaion", "representation", + "representatie", "representatives", + "representatin", "representations", + "representerad", "represented", + "representitve", "representative", + "representives", "representatives", + "repricussions", "repercussions", + "reprihensible", "reprehensible", + "resolutionary", "revolutionary", + "respectivelly", "respectively", + "responsibiliy", "responsibility", + "responsibilty", "responsibility", + "responsiblity", "responsibility", + "respositories", "repositories", + "resssurecting", "resurrecting", + "ressurrection", "resurrection", + "restaraunteur", "restaurateur", + "retoractively", "retroactively", + "retroactivily", "retroactively", + "retroactivley", "retroactively", + "retrocatively", "retroactively", + "revelutionary", "revolutionary", + "revolutionair", "revolutionary", + "revolutionens", "revolutions", + "revolutioners", "revolutions", + "revoultionary", "revolutionary", + "ridiculouness", "ridiculousness", + "rienforcement", "reinforcements", + "righetousness", "righteousness", + "rightiousness", "righteousness", + "rigtheousness", "righteousness", + "rollarcoaster", "rollercoaster", + "rollercaoster", "rollercoaster", + "rollercoaters", "rollercoaster", + "rollercoatser", "rollercoaster", + "rollerocaster", "rollercoaster", + "rollertoaster", "rollercoaster", + "rollorcoaster", "rollercoaster", + "sacrastically", "sarcastically", + "sarcasitcally", "sarcastically", + "satisfactorly", "satisfactory", + "scandianvians", "scandinavian", + "scateboarding", "skateboarding", + "schisophrenic", "schizophrenic", + "schiziphrenic", "schizophrenic", + "schizophernia", "schizophrenia", + "schizophernic", "schizophrenic", + "schizophrania", "schizophrenia", + "schizoprhenia", "schizophrenia", + "schizoprhenic", "schizophrenic", + "schozophrenia", "schizophrenia", + "schozophrenic", "schizophrenic", + "schyzophrenia", "schizophrenia", + "schyzophrenic", "schizophrenic", + "schziophrenia", "schizophrenia", + "schziophrenic", "schizophrenic", + "scientificaly", "scientifically", + "scientificlly", "scientifically", + "segementation", "segmentation", + "sensationable", "sensational", + "sensationails", "sensationalism", + "sensationaism", "sensationalism", + "sensationalim", "sensationalism", + "sensationella", "sensational", + "shcizophrenic", "schizophrenic", + "significanlty", "significantly", + "significently", "significantly", + "signifigantly", "significantly", + "simultaneosly", "simultaneously", + "simultaneuous", "simultaneous", + "simultanously", "simultaneously", + "singificantly", "significantly", + "skatebaording", "skateboarding", + "skateborading", "skateboarding", + "socioecenomic", "socioeconomic", + "socioecomonic", "socioeconomic", + "socioeconimic", "socioeconomic", + "sohpisticated", "sophisticated", + "sophisitcated", "sophisticated", + "sophistacated", "sophisticated", + "sophistocated", "sophisticated", + "sophosticated", "sophisticated", + "spacification", "specification", + "specializaton", "specialization", + "specificaiton", "specifications", + "specificatons", "specifications", + "specifikation", "specification", + "spectaculaire", "spectacular", + "spectaculalry", "spectacularly", + "spectatularly", "spectacularly", + "spesification", "specification", + "spirituallity", "spiritually", + "sponatenously", "spontaneously", + "spontaenously", "spontaneously", + "spontainously", "spontaneously", + "spontaneoulsy", "spontaneously", + "spontaneuosly", "spontaneously", + "spontaniously", "spontaneously", + "spontanuously", "spontaneously", + "sponteanously", "spontaneously", + "sponteneously", "spontaneously", + "sporstmanship", "sportsmanship", + "sportmansship", "sportsmanship", + "sportsmamship", "sportsmanship", + "sportsmenship", "sportsmanship", + "sprotsmanship", "sportsmanship", + "stakeboarding", "skateboarding", + "startegically", "strategically", + "statisitcally", "statistically", + "statistacally", "statistically", + "stereotipical", "stereotypical", + "stereotpyical", "stereotypical", + "stereotypcial", "stereotypical", + "stereotypeing", "stereotyping", + "stereotypying", "stereotyping", + "steriotypical", "stereotypical", + "steroetypical", "stereotypical", + "steryotypical", "stereotypical", + "storytellling", "storytelling", + "stragegically", "strategically", + "stragetically", "strategically", + "straightenend", "straightened", + "straitforward", "straightforward", + "stratagically", "strategically", + "stratigically", "strategically", + "strawberrries", "strawberries", + "stregnthening", "strengthening", + "strenghtening", "strengthening", + "strengthining", "strengthening", + "stretegically", "strategically", + "subcatagories", "subcategories", + "subconsciosly", "subconsciously", + "subconsciouly", "subconsciously", + "subconsiously", "subconsciously", + "subjectivelly", "subjectively", + "subscribtions", "subscriptions", + "substancially", "substantially", + "substanitally", "substantially", + "substansially", "substantially", + "substantiable", "substantial", + "substantually", "substantially", + "substitutents", "substitutes", + "successfullly", "successfully", + "supermarkedet", "supermarket", + "supermarkerts", "supermarkets", + "superpowereds", "superpowers", + "supersticious", "superstitious", + "superstisious", "superstitious", + "superstitiosi", "superstitious", + "superstitiuos", "superstitious", + "superstituous", "superstitious", + "suphisticated", "sophisticated", + "supscriptions", "subscriptions", + "surreptiously", "surreptitiously", + "survavibility", "survivability", + "survibability", "survivability", + "survivabiltiy", "survivability", + "survivalibity", "survivability", + "survivavility", "survivability", + "survivebility", "survivability", + "susbtantially", "substantially", + "sustainabilty", "sustainability", + "synchornously", "synchronously", + "systematicaly", "systematically", + "systematiclly", "systematically", + "techmological", "technological", + "technicallity", "technically", + "technoligical", "technological", + "technologicly", "technological", + "techonlogical", "technological", + "telaportation", "teleportation", + "teleportating", "teleportation", + "teleprotation", "teleportation", + "teliportation", "teleportation", + "teloportation", "teleportation", + "territoriella", "territorial", + "theoratically", "theoretically", + "theoritically", "theoretically", + "therapeutisch", "therapeutic", + "thereotically", "theoretically", + "thermodynaics", "thermodynamics", + "thermodynamcs", "thermodynamics", + "theroetically", "theoretically", + "thoeretically", "theoretically", + "tranistioning", "transitioning", + "transcendance", "transcendence", + "transcribtion", "transcription", + "transcripcion", "transcription", + "transferrring", "transferring", + "transformarea", "transformer", + "transformarem", "transformer", + "transformarse", "transformers", + "transformaton", "transformation", + "transformered", "transformed", + "transgengered", "transgendered", + "transisioning", "transitioning", + "transitionals", "transitions", + "transitionnal", "transitional", + "transitionned", "transitioned", + "transkription", "transcription", + "translyvanian", "transylvania", + "transmisisons", "transmissions", + "transmissable", "transmissible", + "transmisssion", "transmissions", + "transparantie", "transparent", + "transparentcy", "transparency", + "transplantees", "transplants", + "transporation", "transportation", + "transportaion", "transportation", + "transportarme", "transporter", + "transportarse", "transporter", + "transportarte", "transporter", + "transporteurs", "transporter", + "transsexuella", "transsexual", + "transylvannia", "transylvania", + "trasngendered", "transgendered", + "troubleshooot", "troubleshoot", + "udnerestimate", "underestimated", + "umcomfortable", "uncomfortable", + "umcomfortably", "uncomfortably", + "umpredictable", "unpredictable", + "unappropriate", "inappropriate", + "unbelievabley", "unbelievably", + "unbelievablly", "unbelievably", + "uncertaintity", "uncertainty", + "uncomfertable", "uncomfortable", + "uncomfertably", "uncomfortably", + "uncomfortabel", "uncomfortably", + "uncomforyable", "uncomfortably", + "uncomfrotable", "uncomfortable", + "uncomfrotably", "uncomfortably", + "uncomftorable", "uncomfortable", + "uncomftorably", "uncomfortably", + "unconcsiously", "unconsciously", + "unconfortable", "uncomfortable", + "unconfortably", "uncomfortably", + "unconscioulsy", "unconsciously", + "unconsicously", "unconsciously", + "unconsiderate", "inconsiderate", + "uncontrollabe", "uncontrollable", + "uncontrollaby", "uncontrollably", + "unconventinal", "unconventional", + "uncounciously", "unconsciously", + "uncousciously", "unconsciously", + "underastimate", "underestimate", + "underesitmate", "underestimated", + "underestamate", "underestimate", + "underestemate", "underestimate", + "underestiamte", "underestimated", + "undergratuate", "undergraduate", + "underhwelming", "underwhelming", + "underhwleming", "underwhelming", + "underminining", "undermining", + "underpowererd", "underpowered", + "undersetimate", "underestimate", + "understandble", "understandable", + "understandbly", "understandably", + "underwealming", "underwhelming", + "underwhemling", "underwhelming", + "underwhleming", "underwhelming", + "undoctrinated", "indoctrinated", + "unexpectadely", "unexpectedly", + "unfomfortable", "uncomfortable", + "unforgiveable", "unforgivable", + "unfortuantely", "unfortunately", + "unfortunantly", "unfortunately", + "unfortunatley", "unfortunately", + "unfortuneatly", "unfortunately", + "unfortunetely", "unfortunately", + "unilaterallly", "unilaterally", + "uninstallling", "uninstalling", + "unintellegent", "unintelligent", + "unintelligant", "unintelligent", + "unintensional", "unintentional", + "uninteristing", "uninteresting", + "universitites", "universities", + "unnecassarily", "unnecessarily", + "unneccesarily", "unnecessarily", + "unnecessairly", "unnecessarily", + "unnecessarely", "unnecessarily", + "unnecessarity", "unnecessarily", + "unnecesserily", "unnecessarily", + "unnecissarily", "unnecessarily", + "unnessecarily", "unnecessarily", + "unoperational", "nonoperational", + "unprecendeted", "unprecedented", + "unprecidented", "unprecedented", + "unpredecented", "unprecedented", + "unpredicatble", "unpredictable", + "unpredictible", "unpredictable", + "unpresedented", "unprecedented", + "unpridictable", "unpredictable", + "unprofessinal", "unprofessional", + "unrealistisch", "unrealistic", + "unreasonabley", "unreasonably", + "unreasonablly", "unreasonably", + "unrestrictred", "unrestricted", + "unsistainable", "unsustainable", + "unsubscribade", "unsubscribed", + "unsubscribbed", "unsubscribe", + "unsuccesfully", "unsuccessfully", + "unsuccessfull", "unsuccessful", + "unsucessfully", "unsuccessfully", + "unsuprisingly", "unsurprisingly", + "unsuprizingly", "unsurprisingly", + "unsustainible", "unsustainable", + "unsustianable", "unsustainable", + "vertification", "certification", + "villification", "vilification", + "virualization", "visualization", + "visualizacion", "visualization", + "visualizaiton", "visualization", + "visualizating", "visualization", + "vitualization", "visualization", + "vizualization", "visualization", + "volounteering", "volunteering", + "vulberability", "vulnerability", + "vulernability", "vulnerability", + "vulnarability", "vulnerability", + "vulnerabiltiy", "vulnerability", + "vulnurability", "vulnerability", + "vunlerability", "vulnerability", + "vurnerability", "vulnerability", + "weightlfiting", "weightlifting", + "weightlifitng", "weightlifting", + "weightligting", "weightlifting", + "weigthlifting", "weightlifting", + "wholeheartdly", "wholeheartedly", + "wholeheartedy", "wholeheartedly", + "wholeheartely", "wholeheartedly", + "wieghtlifting", "weightlifting", + "abberivation", "abbreviation", + "abberviation", "abbreviation", + "abbreivation", "abbreviation", + "abbreveation", "abbreviation", + "abbrievation", "abbreviation", + "abortificant", "abortifacient", + "abrreviation", "abbreviation", + "academcially", "academically", + "accedentally", "accidentally", + "accelarating", "accelerating", + "accelaration", "acceleration", + "acceleartion", "acceleration", + "acceleraptor", "accelerator", + "accelorating", "accelerating", + "accessibilty", "accessibility", + "accidentlaly", "accidently", + "accomadating", "accommodating", + "accomadation", "accommodation", + "accomodating", "accommodating", + "accomodation", "accommodation", + "accrediation", "accreditation", + "acculumation", "accumulation", + "accumalation", "accumulation", + "accumilation", "accumulation", + "acedemically", "academically", + "acheivements", "achievements", + "acknolwedged", "acknowledged", + "acknolwedges", "acknowledges", + "acknoweldged", "acknowledged", + "acknoweldges", "acknowledges", + "acknowiedged", "acknowledged", + "acknowladges", "acknowledges", + "acknowldeged", "acknowledged", + "acknowledget", "acknowledgement", + "acknowleding", "acknowledging", + "acknowlegded", "acknowledged", + "acknowlegdes", "acknowledges", + "ackumulation", "accumulation", + "acquaintaces", "acquaintances", + "acquaintence", "acquaintance", + "acquantaince", "acquaintance", + "acquantiance", "acquaintances", + "acquiantance", "acquaintances", + "acquiantence", "acquaintance", + "adknowledged", "acknowledged", + "adknowledges", "acknowledges", + "administored", "administer", + "adminsitered", "administered", + "adminstrator", "administrator", + "advantagious", "advantageous", + "advantegeous", "advantageous", + "adventageous", "advantageous", + "adventureous", "adventures", + "adventureres", "adventures", + "adventurious", "adventurous", + "adventuruous", "adventurous", + "advertisiers", "advertisers", + "advertisment", "advertisement", + "advertisters", "advertisers", + "advertisting", "advertising", + "aestheticaly", "aesthetically", + "aestheticlly", "aesthetically", + "afficianados", "aficionados", + "afficionados", "aficionados", + "afghanisthan", "afghanistan", + "afterhtought", "afterthought", + "afterthougth", "afterthought", + "aggressivley", "aggressively", + "agircultural", "agricultural", + "agknowledged", "acknowledged", + "agnosticisim", "agnosticism", + "agracultural", "agricultural", + "agriculteral", "agricultural", + "agriculteurs", "agriculture", + "agricultrual", "agricultural", + "agriculutral", "agricultural", + "agrigultural", "agricultural", + "agrocultural", "agricultural", + "allegiancies", "allegiance", + "alterantives", "alternatives", + "alternatevly", "alternately", + "alternatiely", "alternately", + "alternatieve", "alternative", + "alternativly", "alternatively", + "alternativos", "alternatives", + "alternatvely", "alternately", + "alternitives", "alternatives", + "altruistisch", "altruistic", + "amendmenters", "amendments", + "amohetamines", "amphetamines", + "ampehtamines", "amphetamines", + "ampethamines", "amphetamines", + "amphatamines", "amphetamines", + "amphedamines", "amphetamines", + "amphetamenes", "amphetamines", + "amphetemines", "amphetamines", + "amphetimines", "amphetamines", + "amphetmaines", "amphetamines", + "anecdotallly", "anecdotally", + "annhiliation", "annihilation", + "annihalition", "annihilation", + "annihilatron", "annihilation", + "annihliation", "annihilation", + "annilihation", "annihilation", + "anniversairy", "anniversary", + "anniversarry", "anniversary", + "anniversiary", "anniversary", + "annoucenment", "announcements", + "annoucnement", "announcement", + "announcemnet", "announcements", + "announcemnts", "announcements", + "anphetamines", "amphetamines", + "ansalisation", "nasalisation", + "ansalization", "nasalization", + "antaganistic", "antagonistic", + "antagonisitc", "antagonistic", + "antagonostic", "antagonist", + "antibioticos", "antibiotics", + "anticiaption", "anticipation", + "anticipacion", "anticipation", + "antisipation", "anticipation", + "antogonistic", "antagonistic", + "antrhopology", "anthropology", + "antrophology", "anthropology", + "apllications", "applications", + "apocalypitic", "apocalyptic", + "apologistics", "apologists", + "apologizeing", "apologizing", + "apostrophied", "apostrophe", + "apostrophies", "apostrophe", + "apperciation", "appreciation", + "applicaitons", "applications", + "appoitnments", "appointments", + "apporachable", "approachable", + "appraochable", "approachable", + "appreceating", "appreciating", + "appreciaters", "appreciates", + "appreciatied", "appreciative", + "appreicating", "appreciating", + "appreication", "appreciation", + "appretiation", "appreciation", + "appropriatin", "appropriation", + "appropriatly", "appropriately", + "appropriaton", "appropriation", + "approprietly", "appropriately", + "approstraphe", "apostrophe", + "approxiately", "approximately", + "approximatly", "approximately", + "approximetly", "approximately", + "aproximately", "approximately", + "aqcuaintance", "acquaintance", + "aqquaintance", "acquaintance", + "arbitrariliy", "arbitrarily", + "arbitrarilly", "arbitrarily", + "archetecture", "architecture", + "architechure", "architecture", + "architectual", "architectural", + "architectuur", "architecture", + "architecutre", "architecture", + "architexture", "architecture", + "arcitechture", "architecture", + "areodynamics", "aerodynamics", + "argicultural", "agricultural", + "argumentatie", "argumentative", + "arithmetisch", "arithmetic", + "armageddomon", "armageddon", + "arrengements", "arrangements", + "articifially", "artificially", + "artificailly", "artificially", + "artificiella", "artificial", + "artificually", "artificially", + "artifiically", "artificially", + "assasination", "assassination", + "assassinatin", "assassination", + "assissinated", "assassinated", + "associationg", "associating", + "assoications", "associations", + "assosiations", "associations", + "assosication", "assassination", + "assotiations", "associations", + "assymetrical", "asymmetrical", + "asthetically", "aesthetically", + "astranomical", "astronomical", + "astromonical", "astronomical", + "astronautlis", "astronauts", + "astronimical", "astronomical", + "astronomicly", "astronomical", + "athleticisim", "athleticism", + "atmosphereic", "atmospheric", + "audiobookmrs", "audiobooks", + "auhtenticate", "authenticate", + "australianas", "australians", + "australianos", "australians", + "authentisity", "authenticity", + "authorithies", "authorities", + "authoritiers", "authorities", + "authorizaton", "authorization", + "authrorities", "authorities", + "autochtonous", "autochthonous", + "autocorrrect", "autocorrect", + "automobilies", "automobile", + "automodertor", "automoderator", + "automonomous", "autonomous", + "auxilliaries", "auxiliaries", + "avaliability", "availability", + "avialability", "availability", + "awknowledged", "acknowledged", + "awknowledges", "acknowledges", + "awkwardsness", "awkwardness", + "babysittting", "babysitting", + "beaurocratic", "bureaucratic", + "beautifullly", "beautifully", + "belligerante", "belligerent", + "beuraucratic", "bureaucratic", + "billionairre", "billionaire", + "billionaries", "billionaires", + "billioniares", "billionaires", + "bioligically", "biologically", + "birmingharam", "birmingham", + "bittersweeet", "bittersweet", + "blamethrower", "flamethrower", + "blueberrries", "blueberries", + "blueprintcss", "blueprints", + "boardcasting", "broadcasting", + "bobybuilding", "bodybuilding", + "bodybuidling", "bodybuilding", + "bodybuilidng", "bodybuilding", + "bodybuliding", "bodybuilding", + "bodydbuilder", "bodybuilder", + "bombardement", "bombardment", + "boradcasting", "broadcasting", + "botivational", "motivational", + "brainwahsing", "brainwashing", + "brakethrough", "breakthrough", + "braodcasting", "broadcasting", + "brazilianese", "brazilians", + "brazilianess", "brazilians", + "breakthorugh", "breakthrough", + "breaktrhough", "breakthrough", + "breastfeedig", "breastfeeding", + "breastfeeing", "breastfeeding", + "breasttaking", "breathtaking", + "brianwashing", "brainwashing", + "broadcastors", "broadcasts", + "brotherhoood", "brotherhood", + "buearucratic", "bureaucratic", + "bueraucratic", "bureaucratic", + "bulletprooof", "bulletproof", + "bureaocratic", "bureaucratic", + "bureaucracie", "bureaucratic", + "bureaucracts", "bureaucrats", + "bureaucrates", "bureaucrats", + "bureuacratic", "bureaucratic", + "businessemen", "businessmen", + "cababilities", "capabilities", + "caclulations", "calculations", + "calcluations", "calculation", + "calcualtions", "calculations", + "calculationg", "calculating", + "calculatoare", "calculator", + "californains", "californian", + "californican", "californian", + "californinan", "californian", + "caluclations", "calculations", + "camouflagued", "camouflage", + "canceltation", "cancellation", + "cannibalisim", "cannibalism", + "canniballism", "cannibalism", + "cannotations", "connotations", + "capitalistes", "capitalists", + "caracterized", "characterized", + "carbohydrats", "carbohydrates", + "carbohyrdate", "carbohydrates", + "caricaturale", "caricature", + "caricaturile", "caricature", + "caricaturise", "caricature", + "caricaturize", "caricature", + "catastraphic", "catastrophic", + "catastrohpic", "catastrophic", + "catastrophie", "catastrophe", + "categoricaly", "categorically", + "categoriezed", "categorized", + "catergorized", "categorized", + "caterpillers", "caterpillars", + "catestrophic", "catastrophic", + "catholicisim", "catholicism", + "catholocisim", "catholicism", + "catistrophic", "catastrophic", + "catostraphic", "catastrophic", + "catostrophic", "catastrophic", + "catterpilars", "caterpillars", + "catterpillar", "caterpillar", + "celebratings", "celebrations", + "celebritites", "celebrities", + "celibrations", "celebrations", + "cententenial", "centennial", + "cercumstance", "circumstance", + "cerification", "verification", + "certificiate", "certificate", + "challengeing", "challenging", + "chamiponship", "championships", + "champinoship", "championships", + "championchip", "championship", + "championsihp", "championships", + "championsips", "championships", + "champiosnhip", "championships", + "champoinship", "championship", + "chanpionship", "championship", + "charactarize", "characterize", + "charaterized", "characterized", + "charismastic", "charismatic", + "cheerlearder", "cheerleader", + "cheerleeders", "cheerleaders", + "cheeseberger", "cheeseburger", + "cheeseborger", "cheeseburger", + "cheesebruger", "cheeseburgers", + "cheeseburges", "cheeseburgers", + "cheeseburgie", "cheeseburger", + "cheezeburger", "cheeseburger", + "chirstianity", "christianity", + "chocolateers", "chocolates", + "chrisitanity", "christianity", + "christainity", "christianity", + "christiantiy", "christianity", + "christinaity", "christianity", + "chromosomers", "chromosomes", + "chronologial", "chronological", + "chrsitianity", "christianity", + "cilivization", "civilizations", + "circulatiing", "circulating", + "circulationg", "circulating", + "circumcisied", "circumcised", + "circumcition", "circumcision", + "circumsicion", "circumcision", + "circumsision", "circumcision", + "circumsition", "circumcision", + "circumsizion", "circumcision", + "circumstanes", "circumstance", + "circumstanta", "circumstantial", + "circumstante", "circumstance", + "circuncision", "circumcision", + "circunstance", "circumstance", + "civiliaztion", "civilizations", + "civilizacion", "civilization", + "civilizaiton", "civilization", + "civilizatoin", "civilizations", + "civilizatons", "civilizations", + "civilziation", "civilizations", + "civizilation", "civilizations", + "claculations", "calculations", + "classificato", "classification", + "cockroachers", "cockroaches", + "coefficienct", "coefficient", + "coencidental", "coincidental", + "coincedental", "coincidental", + "coincidencal", "coincidental", + "coincidentia", "coincidental", + "coindidental", "coincidental", + "coinsidental", "coincidental", + "cointerpoint", "counterpoint", + "collaberator", "collaborate", + "collaboratie", "collaborate", + "collaboratin", "collaboration", + "collectivily", "collectively", + "collectivley", "collectively", + "colonialisim", "colonialism", + "colonizacion", "colonization", + "colonizators", "colonizers", + "colonozation", "colonization", + "combanations", "combinations", + "combonations", "combinations", + "comdemnation", "condemnation", + "comemmorates", "commemorates", + "comemoretion", "commemoration", + "comeptitions", "competitions", + "comfirmation", "confirmation", + "comfortabley", "comfortably", + "comfortablly", "comfortably", + "comissioning", "commissioning", + "commandemnts", "commandment", + "commandmants", "commandments", + "commandmends", "commandments", + "commemmorate", "commemorate", + "commendments", "commandments", + "commenteries", "commenters", + "commenwealth", "commonwealth", + "commerciales", "commercials", + "commerically", "commercially", + "comminicated", "communicated", + "commishioned", "commissioned", + "commishioner", "commissioner", + "commisioning", "commissioning", + "commissionar", "commissioner", + "commissionor", "commissioner", + "committments", "commitments", + "commoditites", "commodities", + "commomwealth", "commonwealth", + "commonhealth", "commonwealth", + "commonweatlh", "commonwealth", + "commonwelath", "commonwealth", + "communciated", "communicated", + "communiation", "communication", + "communicatie", "communicate", + "communicatin", "communications", + "communicaton", "communication", + "communitites", "communities", + "compansating", "compensating", + "compansation", "compensation", + "comparativly", "comparatively", + "comparisions", "comparisons", + "comparission", "comparisons", + "comparissons", "comparisons", + "compatablity", "compatibility", + "compatibiliy", "compatibility", + "compatibilty", "compatibility", + "compatiblity", "compatibility", + "compensacion", "compensation", + "compensative", "compensate", + "compesitions", "compositions", + "competetions", "competitions", + "competitevly", "competitively", + "competitiion", "competition", + "competitiors", "competitors", + "competitivly", "competitively", + "competitivos", "competitions", + "compinsating", "compensating", + "compinsation", "compensation", + "complainging", "complaining", + "completetion", "completion", + "compliations", "compilation", + "complicacion", "complication", + "complicatied", "complicate", + "complicaties", "complicate", + "complicatred", "complicate", + "complicatted", "complicate", + "complilation", "complication", + "complimation", "complication", + "complimenary", "complimentary", + "complimentje", "complimented", + "complimentry", "complimentary", + "complination", "complication", + "complitation", "complication", + "composistion", "compositions", + "compramising", "compromising", + "compremising", "compromising", + "compresssion", "compression", + "compromissen", "compromise", + "compromisses", "compromises", + "compromizing", "compromising", + "compromosing", "compromising", + "comptability", "compatibility", + "compulsivley", "compulsive", + "compulsorary", "compulsory", + "computarized", "computerized", + "comrpomising", "compromising", + "comtaminated", "contaminated", + "comtemporary", "contemporary", + "conbinations", "combinations", + "concatinated", "contaminated", + "conceivabley", "conceivably", + "concellation", "cancellation", + "concentraded", "concentrated", + "concentraing", "concentrating", + "concentraion", "concentration", + "concentrarte", "concentrate", + "concentratie", "concentrate", + "concentratin", "concentration", + "concequences", "consequences", + "concequently", "consequently", + "concersation", "conservation", + "concervation", "conservation", + "concervatism", "conservatism", + "concervative", "conservative", + "conciderable", "considerable", + "conciderably", "considerably", + "conciousness", "consciousness", + "conclusiones", "conclusions", + "conclusivley", "conclusive", + "condamnation", "condemnation", + "condemantion", "condemnation", + "condenmation", "condemnation", + "condescening", "condescending", + "condescenion", "condescension", + "conditionnal", "conditional", + "conditionned", "conditioned", + "conditionner", "conditioner", + "condmenation", "condemnation", + "condolencies", "condolences", + "condolensces", "condolences", + "condomnation", "condemnation", + "condradicted", "contradicted", + "conenctivity", "connectivity", + "confedential", "confidential", + "confederancy", "confederacy", + "confederatie", "confederate", + "confermation", "confirmation", + "confersation", "conservation", + "confessionis", "confessions", + "confidencial", "confidential", + "confidentail", "confidential", + "confidentaly", "confidently", + "confidentely", "confidently", + "confidentiel", "confidential", + "configuratin", "configurations", + "configuraton", "configuration", + "confirmacion", "confirmation", + "confrimation", "confirmation", + "confrontaion", "confrontation", + "congegration", "congregation", + "congergation", "congregation", + "congradulate", "congratulate", + "congragation", "congregation", + "congragulate", "congratulate", + "congratualte", "congratulate", + "congregacion", "congregation", + "congresional", "congressional", + "congresssman", "congressman", + "congresssmen", "congressmen", + "congretation", "congregation", + "congrigation", "congregation", + "conicidental", "coincidental", + "connatations", "connotations", + "connecticuit", "connecticut", + "connectivety", "connectivity", + "connetations", "connotations", + "connitations", "connotations", + "connonations", "connotations", + "conolization", "colonization", + "conpensating", "compensating", + "conpensation", "compensation", + "conpetitions", "competitions", + "conplimented", "complimented", + "conpromising", "compromising", + "consciouness", "consciousness", + "consciouslly", "consciously", + "consectutive", "consecutive", + "consecuences", "consequences", + "consecuentes", "consequences", + "consecuently", "consequently", + "consensuarlo", "consensual", + "consentrated", "concentrated", + "consentrates", "concentrates", + "conseqeunces", "consequence", + "consequenses", "consequences", + "consequental", "consequently", + "consequneces", "consequence", + "conservacion", "conservation", + "conservaties", "conservatives", + "conservativo", "conservation", + "conservativs", "conservatism", + "conservitave", "conservatives", + "conservitism", "conservatism", + "conservitive", "conservative", + "considerarle", "considerable", + "considerarte", "considerate", + "consideraste", "considerate", + "consideratie", "considerate", + "consideratin", "considerations", + "consideribly", "considerably", + "consilidated", "consolidated", + "consipracies", "conspiracies", + "consiquently", "consequently", + "consistantly", "consistently", + "consistencey", "consistency", + "consistentcy", "consistently", + "consitutents", "constituents", + "consoldiated", "consolidated", + "consolitated", "consolidate", + "consolodated", "consolidated", + "consoltation", "consultation", + "conspericies", "conspiracies", + "conspiracize", "conspiracies", + "conspiriator", "conspirator", + "conspiricies", "conspiracies", + "conspriacies", "conspiracies", + "consqeuences", "consequence", + "constinually", "continually", + "constitition", "constitution", + "constituante", "constituents", + "constituants", "constituents", + "constituates", "constitutes", + "constitucion", "constitution", + "constituient", "constitute", + "constituinte", "constituents", + "constitutiei", "constitute", + "constitutues", "constitute", + "constiutents", "constituents", + "constracting", "constructing", + "constraction", "construction", + "constrainsts", "constraints", + "construccion", "construction", + "construciton", "construction", + "constructeds", "constructs", + "constructief", "constructive", + "constructies", "constructs", + "constructifs", "constructs", + "constructiin", "constructing", + "constructivo", "construction", + "consturction", "construction", + "consultating", "consultation", + "consumerisim", "consumerism", + "contaiminate", "contaminate", + "contaminatie", "contaminated", + "contaminaton", "contamination", + "contaminents", "containment", + "contamporary", "contemporary", + "contanimated", "contaminated", + "contaniments", "containment", + "contemperary", "contemporary", + "contemporany", "contemporary", + "continentais", "continents", + "continential", "continental", + "contineously", "continuously", + "continiously", "continuously", + "continuacion", "continuation", + "continuating", "continuation", + "continuativo", "continuation", + "continuining", "continuing", + "contirbution", "contribution", + "contirbutors", "contributors", + "contiunation", "continuation", + "contrabution", "contribution", + "contraceptie", "contraceptives", + "contradicing", "contradicting", + "contradicion", "contradiction", + "contradicory", "contradictory", + "contradictie", "contradicted", + "contradictin", "contradiction", + "contradicton", "contradiction", + "contraticted", "contradicted", + "contribucion", "contribution", + "contribuitor", "contributor", + "contributers", "contributors", + "contributivo", "contribution", + "contributons", "contributors", + "contrictions", "contractions", + "contridicted", "contradicted", + "controlleras", "controllers", + "controlllers", "controllers", + "controverial", "controversial", + "controveries", "controversies", + "controversal", "controversial", + "controversey", "controversy", + "contructions", "contractions", + "conveinently", "conveniently", + "convencional", "conventional", + "conveniantly", "conveniently", + "converastion", "conversations", + "converdation", "conservation", + "conversacion", "conversation", + "conversaiton", "conversations", + "conversatino", "conservation", + "conversatism", "conservatism", + "conversatoin", "conversations", + "conversiones", "conversions", + "converstaion", "conversation", + "convertables", "convertibles", + "convertiable", "convertible", + "convertibile", "convertible", + "convervation", "conservation", + "convervatism", "conservatism", + "converzation", "conservation", + "convesration", "conservation", + "convienently", "conveniently", + "convorsation", "conversation", + "convseration", "conservation", + "coordenation", "coordination", + "coordiantion", "coordination", + "coordinacion", "coordination", + "coordinaters", "coordinates", + "coordinatior", "coordinator", + "coordinatore", "coordinate", + "coordonation", "coordination", + "cooridnation", "coordination", + "coorperation", "cooperation", + "coprorations", "corporations", + "corinthianos", "corinthians", + "corinthinans", "corinthians", + "corparations", "corporations", + "corperations", "corporations", + "corporativos", "corporations", + "corproations", "corporations", + "corrdination", "coordination", + "correponding", "corresponding", + "correposding", "corresponding", + "correspondes", "corresponds", + "correspondig", "corresponding", + "corresponing", "corresponding", + "corrisponded", "corresponded", + "costomizable", "customizable", + "costumizable", "customizable", + "councidental", "coincidental", + "counsellling", "counselling", + "counterfiets", "counterfeit", + "counterfited", "counterfeit", + "counterracts", "counterparts", + "countertraps", "counterparts", + "countrywides", "countryside", + "coutnerparts", "counterparts", + "coutnerpoint", "counterpoint", + "covnersation", "conservation", + "crankenstein", "frankenstein", + "creationisim", "creationism", + "creationnism", "creationism", + "creationnist", "creationist", + "creationsism", "creationism", + "creationsist", "creationist", + "creationsits", "creationists", + "credibillity", "credibility", + "crigneworthy", "cringeworthy", + "cringewhorty", "cringeworthy", + "cringeworhty", "cringeworthy", + "cringewrothy", "cringeworthy", + "cringyworthy", "cringeworthy", + "criticallity", "critically", + "criticiszing", "criticising", + "croporations", "corporations", + "crucifiction", "crucifixion", + "cuestionable", "questionable", + "culiminating", "culminating", + "cumulatative", "cumulative", + "cuntaminated", "contaminated", + "curcumcision", "circumcision", + "curcumstance", "circumstance", + "custamizable", "customizable", + "custimizable", "customizable", + "customizaton", "customization", + "customizeble", "customizable", + "customizible", "customizable", + "custumizable", "customizable", + "cuztomizable", "customizable", + "dabilitating", "debilitating", + "dangerousely", "dangerously", + "decensitized", "desensitized", + "deceptionist", "receptionist", + "declareation", "declaration", + "decomposeion", "decomposition", + "decomposited", "decomposed", + "decscription", "description", + "deffensively", "defensively", + "deficiancies", "deficiencies", + "deficiencias", "deficiencies", + "deficiensies", "deficiencies", + "definatively", "definitively", + "defininitely", "definitively", + "definitavely", "definitively", + "definitevely", "definitively", + "definitifely", "definitively", + "definitinely", "definitively", + "definititely", "definitively", + "definitivley", "definitively", + "deinitalized", "deinitialized", + "deinitalizes", "deinitializes", + "delibaretely", "deliberately", + "deliberatley", "deliberately", + "delibirately", "deliberately", + "delibitating", "debilitating", + "deliverately", "deliberately", + "delusionally", "delusively", + "demesticated", "domesticated", + "democracries", "democracies", + "democraphics", "demographics", + "democratisch", "democratic", + "demograhpics", "demographics", + "demogrpahics", "demographics", + "demonination", "denominations", + "demonstarted", "demonstrated", + "demonstartes", "demonstrates", + "demonstrabil", "demonstrably", + "demonstraion", "demonstration", + "demonstraits", "demonstrates", + "demonstrants", "demonstrates", + "demonstratie", "demonstrate", + "demonstratin", "demonstration", + "demonstrerat", "demonstrate", + "demosntrably", "demonstrably", + "demosntrated", "demonstrated", + "demosntrates", "demonstrates", + "demostration", "demonstration", + "denomenation", "denomination", + "denominacion", "denomination", + "denominatior", "denominator", + "denominatons", "denominations", + "denomonation", "denomination", + "deomgraphics", "demographics", + "depencencies", "dependencies", + "dependancies", "dependencies", + "dependencias", "dependencies", + "dependenices", "dependencies", + "dependensies", "dependencies", + "deperecation", "deprecation", + "deplacements", "replacements", + "deregualtion", "deregulation", + "deregulaiton", "deregulation", + "derugulation", "deregulation", + "describtions", "descriptions", + "descriminant", "discriminant", + "descriptivos", "descriptions", + "desctiptions", "descriptions", + "desctruction", "destruction", + "desencitized", "desensitized", + "desensatized", "desensitized", + "desensitived", "desensitized", + "desentisized", "desensitized", + "desentitized", "desensitized", + "desentizised", "desensitized", + "desginations", "destinations", + "desgustingly", "disgustingly", + "desitnations", "destinations", + "despectively", "respectively", + "despensaries", "dispensaries", + "desperatedly", "desperately", + "desperatelly", "desperately", + "desqualified", "disqualified", + "desregarding", "disregarding", + "dessertation", "dissertation", + "destiantions", "destinations", + "destinctions", "destinations", + "destractions", "distractions", + "destributors", "distributors", + "determinanti", "determination", + "determinaton", "determination", + "determinging", "determining", + "determinisic", "deterministic", + "determinisim", "determinism", + "deterministc", "deterministic", + "determinitic", "deterministic", + "detrimential", "detrimental", + "developement", "development", + "developmenet", "developments", + "develpoments", "developments", + "devolopement", "development", + "devolopments", "developments", + "diasspointed", "dissapointed", + "dicitonaries", "dictionaries", + "dictadorship", "dictatorship", + "dictarorship", "dictatorship", + "dictatorshop", "dictatorship", + "dictionaires", "dictionaries", + "didsapointed", "dissapointed", + "differencial", "differential", + "differencies", "differences", + "differentate", "differentiate", + "differnetial", "differential", + "difficulites", "difficulties", + "difficutlies", "difficulties", + "diffuculties", "difficulties", + "dimensionals", "dimensions", + "dimensionnal", "dimensional", + "dimensionsal", "dimensional", + "diplomatisch", "diplomatic", + "directionnal", "directional", + "disaapointed", "dissapointed", + "disadvandage", "disadvantaged", + "disadvantged", "disadvantaged", + "disadvantges", "disadvantages", + "disadvatange", "disadvantage", + "disadventage", "disadvantage", + "disagremeent", "disagreements", + "disapointing", "disappointing", + "disappearnce", "disappearance", + "disappearred", "disappeared", + "disapperaing", "disappearing", + "disaspointed", "dissapointed", + "disastisfied", "dissatisfied", + "disatissfied", "dissatisfied", + "disatvantage", "disadvantage", + "discertation", "dissertation", + "disciniplary", "disciplinary", + "disciplanary", "disciplinary", + "disciplenary", "disciplinary", + "disciplinare", "discipline", + "disciplinera", "disciplinary", + "disciplinery", "disciplinary", + "disclipinary", "disciplinary", + "disconencted", "disconnected", + "disconnectes", "disconnects", + "disconnectme", "disconnected", + "disconnectus", "disconnects", + "discontiuned", "discontinued", + "discountined", "discontinued", + "discreditied", "discredited", + "discreditted", "discredited", + "discriminare", "discriminate", + "discriminted", "discriminated", + "disctinction", "distinction", + "disctinctive", "distinctive", + "disctintions", "distinctions", + "discualified", "disqualified", + "discustingly", "disgustingly", + "disemination", "dissemination", + "disenchanged", "disenchanted", + "disengenuous", "disingenuous", + "disenginuous", "disingenuous", + "disensitized", "desensitized", + "disgareement", "disagreements", + "disgruntaled", "disgruntled", + "disgrunteled", "disgruntled", + "disguntingly", "disgustingly", + "disingeneous", "disingenuous", + "disingenious", "disingenuous", + "disinteresed", "disinterested", + "disintereted", "disinterested", + "dismantleing", "dismantling", + "disobediance", "disobedience", + "disobeidence", "disobedience", + "dispalcement", "displacement", + "dispapointed", "dissapointed", + "dispencaries", "dispensaries", + "dispensaires", "dispensaries", + "dispensarios", "dispensaries", + "dispensiries", "dispensaries", + "dispensories", "dispensaries", + "disqaulified", "disqualified", + "disqualifyed", "disqualified", + "disqustingly", "disgustingly", + "disrecpected", "disrespected", + "disrepsected", "disrespected", + "disresepcted", "disrespected", + "disrespecful", "disrespectful", + "disrespecing", "disrespecting", + "disrespectul", "disrespectful", + "disrespekted", "disrespected", + "disrtibution", "distributions", + "dissapearing", "disappearing", + "dissapionted", "dissapointed", + "dissapoimted", "dissapointed", + "dissapoitned", "dissapointed", + "dissaponited", "dissapointed", + "dissapoonted", "dissapointed", + "dissapounted", "dissapointed", + "dissappinted", "dissapointed", + "dissapponted", "dissapointed", + "dissastified", "dissatisfied", + "dissatisifed", "dissatisfied", + "dissatsified", "dissatisfied", + "dissepointed", "dissapointed", + "dissipointed", "dissapointed", + "dissobediant", "disobedient", + "dissobedient", "disobedient", + "dissopointed", "dissapointed", + "disspaointed", "dissapointed", + "dissppointed", "dissapointed", + "dissspointed", "dissapointed", + "distinations", "distinctions", + "distincitons", "distinctions", + "distingished", "distinguished", + "distingishes", "distinguishes", + "distinguised", "distinguished", + "distirbuting", "distributing", + "distirbution", "distribution", + "distrabution", "distribution", + "distribitors", "distributors", + "distribtuion", "distributions", + "distribucion", "distribution", + "distribuited", "distributed", + "distribuiton", "distributions", + "distribuitor", "distributor", + "distribusion", "distributions", + "distributino", "distributions", + "distributior", "distributor", + "distributons", "distributors", + "distributore", "distribute", + "distriubtion", "distributions", + "distrobution", "distribution", + "distrubances", "disturbance", + "distrubiting", "distributing", + "distrubition", "distribution", + "distrubitors", "distributors", + "distrubution", "distribution", + "distrubutors", "distributors", + "distructions", "distractions", + "distustingly", "disgustingly", + "ditactorship", "dictatorship", + "documenation", "documentation", + "documentaion", "documentation", + "documentaire", "documentaries", + "documentarse", "documentaries", + "documentarsi", "documentaries", + "domesitcated", "domesticated", + "domisticated", "domesticated", + "donesticated", "domesticated", + "donwloadable", "downloadable", + "dossapointed", "dissapointed", + "downlaodable", "downloadable", + "downloadbale", "downloadable", + "downloadeble", "downloadable", + "drankenstein", "frankenstein", + "dublications", "publications", + "dusgustingly", "disgustingly", + "dynamicallly", "dynamically", + "dyregulation", "deregulation", + "earthquackes", "earthquakes", + "earthquakers", "earthquakes", + "econimically", "economically", + "economisesti", "economists", + "educationnal", "educational", + "effectionate", "affectionate", + "effectivelly", "effectively", + "effectivenss", "effectiveness", + "efficienctly", "efficiency", + "effordlessly", "effortlessly", + "ejacualtions", "ejaculation", + "electorlytes", "electrolytes", + "electricrain", "electrician", + "electrictian", "electrician", + "electrobytes", "electrolytes", + "electrocytes", "electrolytes", + "electrolites", "electrolytes", + "electroltyes", "electrolytes", + "electronicas", "electronics", + "electronicos", "electronics", + "electroyltes", "electrolytes", + "elektrolytes", "electrolytes", + "eloctrolytes", "electrolytes", + "embarassment", "embarrassment", + "embarasssing", "embarassing", + "embarrasment", "embarrassment", + "embarressing", "embarrassing", + "embarrissing", "embarrassing", + "emberrassing", "embarrassing", + "emphetamines", "amphetamines", + "emprisonment", "imprisonment", + "encarcerated", "incarcerated", + "enceclopedia", "encyclopedia", + "enchancement", "enhancement", + "enchancments", "enchantments", + "enchantmants", "enchantments", + "enchentments", "enchantments", + "enciclopedia", "encyclopedia", + "enclycopedia", "encyclopedia", + "encorporated", "incorporated", + "encourageing", "encouraging", + "encyclapedia", "encyclopedia", + "encyclepedia", "encyclopedia", + "encyclopadia", "encyclopedia", + "encyclopeida", "encyclopedia", + "encyclopidia", "encyclopedia", + "encycolpedia", "encyclopedia", + "encyklopedia", "encyclopedia", + "encylcopedia", "encyclopedia", + "encyplopedia", "encyclopedia", + "endoresments", "endorsement", + "enemployment", "unemployment", + "enfringement", "infringement", + "enlightended", "enlightened", + "enlightenend", "enlightened", + "enlightented", "enlightened", + "enlightining", "enlightening", + "enligthening", "enlightening", + "entaglements", "entanglements", + "entartaining", "entertaining", + "enterpreneur", "entrepreneurs", + "enterprenuer", "entrepreneur", + "entertainted", "entertained", + "enthusiaists", "enthusiasts", + "enthusuastic", "enthusiastic", + "entoxication", "intoxication", + "entrepeneurs", "entrepreneurs", + "entreperneur", "entrepreneurs", + "entreprenaur", "entrepreneur", + "entrepreners", "entrepreneurs", + "entrepreneus", "entrepreneurs", + "entreprenour", "entrepreneur", + "entreprenure", "entrepreneurs", + "entreprenurs", "entrepreneurs", + "entrepreuner", "entrepreneurs", + "entretaining", "entertaining", + "enviormental", "environmental", + "enviornments", "environments", + "enviromental", "environmental", + "environemnts", "environments", + "environmentl", "environmentally", + "environmetal", "environmental", + "envrionments", "environments", + "errorneously", "erroneously", + "establishmet", "establishments", + "evelutionary", "evolutionary", + "exagerrating", "exaggerating", + "exaggarating", "exaggerating", + "exaggaration", "exaggeration", + "exaggeratted", "exaggerated", + "exaggurating", "exaggerating", + "exagguration", "exaggeration", + "exceptionaly", "exceptionally", + "exceptionnal", "exceptional", + "exclusiveity", "exclusivity", + "exclusivelly", "exclusively", + "exclusivitiy", "exclusivity", + "excorciating", "excruciating", + "excrusiating", "excruciating", + "excurciating", "excruciating", + "exectuioners", "executioner", + "executioneer", "executioner", + "executionees", "executions", + "executioness", "executions", + "executionier", "executioner", + "executionner", "executioner", + "exeggerating", "exaggerating", + "exeggeration", "exaggeration", + "expeditonary", "expeditionary", + "expendatures", "expenditures", + "expendetures", "expenditures", + "expentitures", "expenditures", + "experamental", "experimental", + "expereincing", "experiencing", + "experemental", "experimental", + "experiancing", "experiencing", + "experiemntal", "experimental", + "experiemnted", "experimented", + "experimantal", "experimental", + "experimentan", "experimentation", + "experimentes", "experiments", + "experimentle", "experimented", + "experimentos", "experiments", + "experimentul", "experimental", + "expidentures", "expenditures", + "expierencing", "experiencing", + "expiremental", "experimental", + "expiremented", "experimented", + "explaination", "explanation", + "explenations", "explanations", + "expliotation", "exploitation", + "exploitaiton", "exploitation", + "exploitating", "exploitation", + "exploititive", "exploitative", + "explortation", "exploitation", + "explotiation", "exploitation", + "explotiative", "exploitative", + "expolitation", "exploitation", + "expolitative", "exploitative", + "exponentialy", "exponentially", + "expropiation", "expropriation", + "extensivelly", "extensively", + "extradiction", "extradition", + "extraordiary", "extraordinary", + "extraordinay", "extraordinary", + "extrapolerat", "extrapolate", + "extrapoloate", "extrapolate", + "extremistisk", "extremists", + "extrordinary", "extraordinary", + "extruciating", "excruciating", + "facilitatile", "facilitate", + "fahrenheight", "fahrenheit", + "falmethrower", "flamethrower", + "familiarlize", "familiarize", + "fanslaughter", "manslaughter", + "fantasticaly", "fantastically", + "fantasticlly", "fantastically", + "fashionalble", "fashionable", + "fermantation", "fermentation", + "fermentacion", "fermentation", + "fermentaiton", "fermentation", + "fermentating", "fermentation", + "fermintation", "fermentation", + "fictionaries", "dictionaries", + "figuartively", "figuratively", + "figuratevely", "figuratively", + "figurativley", "figuratively", + "figuretively", "figuratively", + "figuritively", "figuratively", + "fingerpoints", "fingerprints", + "firefigthers", "firefighters", + "flamethorwer", "flamethrower", + "flametrhower", "flamethrower", + "flanethrower", "flamethrower", + "flexibillity", "flexibility", + "flourishment", "flourishing", + "fluctiations", "fluctuations", + "flucutations", "fluctuations", + "fluxtuations", "fluctuations", + "forgivenness", "forgiveness", + "fortunatelly", "fortunately", + "framethrower", "flamethrower", + "frankenstain", "frankenstein", + "frankensteen", "frankenstein", + "frankenstine", "frankenstein", + "frankinstein", "frankenstein", + "frementation", "fermentation", + "friendzonded", "friendzoned", + "friendzonned", "friendzoned", + "friendzowned", "friendzoned", + "fringeworthy", "cringeworthy", + "fronkenstein", "frankenstein", + "fruitsations", "frustrations", + "frustrastion", "frustrations", + "fucntionally", "functionally", + "funcitonally", "functionally", + "functionable", "functional", + "functionaliy", "functionally", + "functionalty", "functionality", + "functionlity", "functionality", + "functionning", "functioning", + "fundamentais", "fundamentals", + "fundamentalt", "fundamentalist", + "fundamentaly", "fundamentally", + "fundemantals", "fundamentals", + "fundementals", "fundamentals", + "fundimentals", "fundamentals", + "furstrations", "frustrations", + "futuristisch", "futuristic", + "fwankenstein", "frankenstein", + "geneological", "genealogical", + "generacional", "generational", + "generalizare", "generalize", + "generalizate", "generalize", + "generelizing", "generalizing", + "geograhpical", "geographical", + "geographicly", "geographical", + "geographisch", "geographic", + "geogrpahical", "geographical", + "goegraphical", "geographical", + "governemntal", "governmental", + "governmently", "governmental", + "grammaticaal", "grammatical", + "grammaticaly", "grammatically", + "grandchilden", "grandchildren", + "grandchilder", "grandchildren", + "grandchilren", "grandchildren", + "grassrooters", "grassroots", + "gringeworthy", "cringeworthy", + "guantanameow", "guantanamo", + "guantanamero", "guantanamo", + "hallucinatin", "hallucinations", + "hallucinaton", "hallucination", + "handwritting", "handwriting", + "harrassments", "harassments", + "headqaurters", "headquarters", + "headquatered", "headquartered", + "healthercare", "healthcare", + "heavywieghts", "heavyweight", + "helicopteros", "helicopters", + "hererosexual", "heterosexual", + "heretosexual", "heterosexual", + "heteresexual", "heterosexual", + "hetreosexual", "heterosexual", + "highligthing", "highlighting", + "hipocritical", "hypocritical", + "hipothetical", "hypothetical", + "histarically", "historically", + "histerically", "historically", + "historicians", "historians", + "homogeneized", "homogenized", + "homogenenous", "homogeneous", + "homosexuales", "homosexuals", + "homosexualiy", "homosexuality", + "homosexualls", "homosexuals", + "homosexualty", "homosexuality", + "homosexuella", "homosexual", + "hopsitalized", "hospitalized", + "horisontally", "horizontally", + "horizantally", "horizontally", + "horiztonally", "horizontally", + "horozontally", "horizontally", + "hospitallity", "hospitality", + "hospitilized", "hospitalized", + "hospitolized", "hospitalized", + "hosptialized", "hospitalized", + "humanitarien", "humanitarian", + "humanitarion", "humanitarian", + "humanitatian", "humanitarian", + "humaniterian", "humanitarian", + "humantiarian", "humanitarian", + "huminatarian", "humanitarian", + "hurricanefps", "hurricanes", + "hyopthetical", "hypothetical", + "hypathetical", "hypothetical", + "hypertrophey", "hypertrophy", + "hypethetical", "hypothetical", + "hypocrticial", "hypocritical", + "hypocrytical", "hypocritical", + "hypotehtical", "hypothetical", + "hypotethical", "hypothetical", + "hypotherical", "hypothetical", + "hypotheticly", "hypothetical", + "hystarically", "hysterically", + "hystorically", "hysterically", + "idealistisch", "idealistic", + "identificato", "identification", + "identifierad", "identified", + "identifieras", "identifies", + "identifyable", "identifiable", + "ideologicaly", "ideologically", + "idiosyncracy", "idiosyncrasy", + "illegetimate", "illegitimate", + "illegitamate", "illegitimate", + "illegitamite", "illegitimate", + "illegitemate", "illegitimate", + "illegitimite", "illegitimate", + "illigetimate", "illegitimate", + "illigitemate", "illegitimate", + "illistration", "illustration", + "illsutration", "illustrations", + "illustartion", "illustration", + "illustraitor", "illustrator", + "illustraties", "illustrate", + "illustratior", "illustrator", + "imcompatible", "incompatible", + "imcompetence", "incompetence", + "imexperience", "inexperience", + "immediatelly", "immediately", + "immortallity", "immortality", + "imperialfist", "imperialist", + "imperialisim", "imperialism", + "imperialstic", "imperialist", + "implamenting", "implementing", + "implausibile", "implausible", + "implecations", "implications", + "implementase", "implements", + "implementasi", "implements", + "implementato", "implementation", + "implentation", "implementation", + "implimenting", "implementing", + "imporvements", "improvements", + "impossibilty", "impossibility", + "impossiblely", "impossibly", + "impossiblity", "impossibly", + "impovershied", "impoverished", + "impoversihed", "impoverished", + "imprefection", "imperfections", + "improsonment", "imprisonment", + "improviserad", "improvised", + "imrpovements", "improvements", + "imtimidating", "intimidating", + "imtimidation", "intimidation", + "inaccesibles", "inaccessible", + "inaccessable", "inaccessible", + "inaccessbile", "inaccessible", + "inaccurasies", "inaccuracies", + "inaccuraties", "inaccuracies", + "inaccuricies", "inaccuracies", + "inacuraccies", "inaccuracies", + "inadvertenly", "inadvertently", + "inappropiate", "inappropriate", + "inapproprate", "inappropriate", + "inappropriae", "inappropriately", + "inappropriet", "inappropriately", + "inattractive", "unattractive", + "inbelievable", "unbelievable", + "incarcelated", "incarcerated", + "incarcirated", "incarcerated", + "incarserated", "incarcerated", + "incedentally", "incidentally", + "incentiveise", "incentives", + "incestigator", "investigator", + "incomaptible", "incompatible", + "incomparible", "incompatible", + "incompatable", "incompatible", + "incompatibil", "incompatible", + "incompetance", "incompetence", + "incompetente", "incompetence", + "incompitable", "incompatible", + "incomptetent", "incompetent", + "inconcistent", "inconsistent", + "inconsistant", "inconsistent", + "inconsistecy", "inconsistency", + "inconsisteny", "inconsistency", + "inconveinent", "inconvenient", + "inconveniant", "inconvenient", + "inconveniece", "inconvenience", + "inconvenince", "inconvenience", + "inconvienent", "inconvenient", + "incorparated", "incorporated", + "incorperated", "incorporated", + "incorportaed", "incorporated", + "incorportate", "incorporate", + "incrediblely", "incredibly", + "incrementers", "increments", + "incremential", "incremental", + "indefinately", "indefinitely", + "indefineable", "undefinable", + "indefinetely", "indefinitely", + "indefinitive", "indefinite", + "indefinitley", "indefinitely", + "indefintiely", "indefinitely", + "indepedantly", "independently", + "indepencence", "independence", + "independance", "independence", + "independante", "independents", + "independenet", "independents", + "independenly", "independently", + "independense", "independents", + "independente", "independence", + "independetly", "independently", + "indepentents", "independents", + "indetifiable", "identifiable", + "indianaoplis", "indianapolis", + "indianopolis", "indianapolis", + "indicentally", "incidentally", + "indifferance", "indifference", + "indifferente", "indifference", + "indiffernece", "indifference", + "indimidating", "intimidating", + "indimidation", "intimidation", + "indipendence", "independence", + "indisputible", "indisputable", + "indisputibly", "indisputably", + "individuales", "individuals", + "individualty", "individuality", + "individuella", "individual", + "indiviudally", "individually", + "indivudually", "individually", + "indpendently", "independently", + "indroduction", "introduction", + "indroductory", "introductory", + "industriella", "industrial", + "industrijske", "industries", + "inefficienct", "inefficient", + "inefficienty", "inefficiently", + "inevitablely", "inevitably", + "inevitablity", "inevitably", + "inevititably", "inevitably", + "inexblicably", "inexplicably", + "inexpectedly", "unexpectedly", + "inexpereince", "inexperience", + "inexperiance", "inexperience", + "inexperieced", "inexperienced", + "inexperiened", "inexperienced", + "inexperiente", "inexperience", + "inexpierence", "inexperienced", + "inexplicabil", "inexplicably", + "inexplicibly", "inexplicably", + "infalability", "infallibility", + "infilitrated", "infiltrated", + "infiltraitor", "infiltrator", + "infiltratior", "infiltrator", + "infiltratred", "infiltrate", + "influenceing", "influencing", + "infogrpahics", "infographic", + "inforgivable", "unforgivable", + "infrantryman", "infantryman", + "infridgement", "infringement", + "infrignement", "infringement", + "ingestigator", "investigator", + "ingredientes", "ingredients", + "ingreediants", "ingredients", + "ininterested", "uninterested", + "initalizable", "initializable", + "inkompatible", "incompatible", + "inkompetence", "incompetence", + "inkonsistent", "inconsistent", + "inlightening", "enlightening", + "innersection", "intersection", + "innerstellar", "interstellar", + "inpenetrable", "impenetrable", + "inplementing", "implementing", + "inplications", "implications", + "inpoverished", "impoverished", + "inprisonment", "imprisonment", + "inproductive", "unproductive", + "inprovements", "improvements", + "inresponsive", "unresponsive", + "insentivised", "insensitive", + "insentivises", "insensitive", + "insignifiant", "insignificant", + "insignificat", "insignificant", + "insinuationg", "insinuating", + "instabillity", "instability", + "instalaltion", "installations", + "installatons", "installations", + "installatron", "installation", + "instantaneos", "instantaneous", + "instantaneus", "instantaneous", + "instantanous", "instantaneous", + "instinctivly", "instinctively", + "institutuion", "institution", + "instramental", "instrumental", + "instrcutions", "instruction", + "instrucitons", "instruction", + "instructiosn", "instruction", + "instructores", "instructors", + "instrumentos", "instruments", + "instrumentul", "instrumental", + "insturmental", "instrumental", + "instutitions", "institutions", + "insuccessful", "unsuccessful", + "insufficiant", "insufficient", + "insuffucient", "insufficient", + "insuspecting", "unsuspecting", + "intaxication", "intoxication", + "intelelctual", "intellectuals", + "intellectals", "intellectuals", + "intellectaul", "intellectuals", + "intellectuel", "intellectual", + "intellecutal", "intellectual", + "intelligance", "intelligence", + "intelligenly", "intelligently", + "intelligente", "intelligence", + "intelligenty", "intelligently", + "intelligient", "intelligent", + "intenational", "international", + "intentionnal", "intentional", + "intepretator", "interpretor", + "interatellar", "interstellar", + "interational", "international", + "intercection", "interception", + "intercepcion", "interception", + "interceptons", "interceptions", + "intereaction", "intersection", + "interections", "interactions", + "interersting", "interpreting", + "interesction", "intersection", + "interestigly", "interestingly", + "interestinly", "interestingly", + "interferance", "interference", + "interfereing", "interfering", + "interferisce", "interferes", + "interferisse", "interferes", + "interferring", "interfering", + "intergration", "integration", + "interlectual", "intellectual", + "intermediare", "intermediate", + "intermediete", "intermediate", + "intermettent", "intermittent", + "intermideate", "intermediate", + "intermidiate", "intermediate", + "internatinal", "international", + "internationl", "international", + "internations", "interactions", + "internediate", "intermediate", + "internelized", "internalized", + "internilized", "internalized", + "interperters", "interpreter", + "interperting", "interpreting", + "interprating", "interpreting", + "interpretare", "interpreter", + "interpretato", "interpretation", + "interpreteer", "interpreter", + "interpretier", "interpreter", + "interpretion", "interpreting", + "interpretter", "interpreter", + "interpriting", "interpreting", + "interraccial", "interracial", + "interractial", "interracial", + "interrogatin", "interrogation", + "interrumping", "interrupting", + "interrupteds", "interrupts", + "interruptors", "interrupts", + "interseccion", "intersection", + "interseciton", "intersections", + "interseption", "interception", + "intersetllar", "interstellar", + "interstallar", "interstellar", + "interstaller", "interstellar", + "intersteller", "interstellar", + "interstellor", "interstellar", + "intertaining", "entertaining", + "intertwinded", "intertwined", + "intertwinned", "intertwined", + "interveiwing", "interviewing", + "intervencion", "intervention", + "interveneing", "intervening", + "intervension", "intervention", + "interviening", "interviewing", + "intidimation", "intimidation", + "intillectual", "intellectual", + "intimidacion", "intimidation", + "intimidative", "intimidate", + "intimitading", "intimidating", + "intimitating", "intimidating", + "intimitation", "intimidation", + "intorduction", "introduction", + "intorductory", "introductory", + "intoxicacion", "intoxication", + "intoxination", "intoxication", + "intrepreting", "interpreting", + "intrinsicaly", "intrinsically", + "introdiction", "introduction", + "introduccion", "introduction", + "introduceras", "introduces", + "introduceres", "introduces", + "introduciton", "introduction", + "introductary", "introductory", + "introducting", "introduction", + "introductury", "introductory", + "introduktion", "introduction", + "introspectin", "introspection", + "intruduction", "introduction", + "intruductory", "introductory", + "intsrumental", "instrumental", + "intuitivelly", "intuitively", + "inturrupting", "interrupting", + "invervention", "intervention", + "investagated", "investigated", + "investagator", "investigator", + "investegated", "investigated", + "investegator", "investigator", + "investigaron", "investigator", + "investigater", "investigator", + "investigatie", "investigative", + "investigatin", "investigation", + "investigatio", "investigator", + "investigaton", "investigation", + "investingate", "investigate", + "investogator", "investigator", + "invicibility", "invisibility", + "invididually", "individually", + "invisibiltiy", "invisibility", + "invisilibity", "invisibility", + "invisivility", "invisibility", + "invlunerable", "invulnerable", + "involnerable", "invulnerable", + "involuntairy", "involuntary", + "involuntarly", "involuntary", + "invonvenient", "inconvenient", + "invulenrable", "invulnerable", + "invulernable", "invulnerable", + "invulnarable", "invulnerable", + "invulnerbale", "invulnerable", + "invulnurable", "invulnerable", + "invulverable", "invulnerable", + "invunlerable", "invulnerable", + "invurnerable", "invulnerable", + "irrationably", "irrationally", + "irrationatly", "irrationally", + "irrationella", "irrational", + "irreplacable", "irreplaceable", + "irresistable", "irresistible", + "irresistably", "irresistibly", + "irrespecitve", "irrespective", + "irresponsble", "irresponsible", + "irresponsibe", "irresponsible", + "irreverisble", "irreversible", + "irreversebly", "irreversible", + "irreversibel", "irreversible", + "irrevirsible", "irreversible", + "irrispective", "irrespective", + "irriversible", "irreversible", + "isdefinitely", "indefinitely", + "isntallation", "installation", + "isntrumental", "instrumental", + "jackonsville", "jacksonville", + "jounralistic", "journalistic", + "jouranlistic", "journalistic", + "journalisitc", "journalistic", + "journalistes", "journalists", + "judgementals", "judgements", + "juggernaunts", "juggernaut", + "juridisction", "jurisdictions", + "jurisdiccion", "jurisdiction", + "jurisdiciton", "jurisdiction", + "jurisdiktion", "jurisdiction", + "jurisfiction", "jurisdiction", + "jurisidction", "jurisdiction", + "juristiction", "jurisdiction", + "jursidiction", "jurisdiction", + "jusridiction", "jurisdiction", + "justificatin", "justifications", + "katastrophic", "catastrophic", + "kidnergarten", "kindergarten", + "kindergarden", "kindergarten", + "kingergarten", "kindergarten", + "kintergarten", "kindergarten", + "knolwedgable", "knowledgable", + "knoweldgable", "knowledgable", + "knowladgable", "knowledgable", + "knowldegable", "knowledgable", + "knowldgeable", "knowledgable", + "knowleagable", "knowledgable", + "knowledagble", "knowledgable", + "knowledeable", "knowledgable", + "knowledgabel", "knowledgable", + "knowledgeble", "knowledgeable", + "knowledgebly", "knowledgable", + "knowledgible", "knowledgable", + "knowlegdable", "knowledgable", + "knowlegeable", "knowledgeable", + "knwoledgable", "knowledgable", + "kolonization", "colonization", + "kombinations", "combinations", + "kommissioner", "commissioner", + "kompensation", "compensation", + "konfidential", "confidential", + "konfirmation", "confirmation", + "kongregation", "congregation", + "konservatism", "conservatism", + "konservative", "conservative", + "konsultation", "consultation", + "konversation", "conversation", + "koordination", "coordination", + "krankenstein", "frankenstein", + "leaglization", "legalization", + "legalizacion", "legalization", + "legalizaiton", "legalization", + "legendariske", "legendaries", + "legimitately", "legitimately", + "legislatiors", "legislators", + "legistration", "registration", + "legitamately", "legitimately", + "legitamitely", "legitimately", + "legitemately", "legitimately", + "legitimatley", "legitimately", + "legitimitely", "legitimately", + "liberatrians", "libertarians", + "libertarains", "libertarians", + "libertariens", "libertarians", + "libertaryans", "libertarians", + "libertatians", "libertarians", + "liberterians", "libertarians", + "libretarians", "libertarians", + "lighthearded", "lighthearted", + "linguisticas", "linguistics", + "linguisticos", "linguistics", + "linguistisch", "linguistics", + "litllefinger", "littlefinger", + "littelfinger", "littlefinger", + "litterfinger", "littlefinger", + "littiefinger", "littlefinger", + "littlefigner", "littlefinger", + "littlefinder", "littlefinger", + "littlepinger", "littlefinger", + "lnowledgable", "knowledgable", + "longitudonal", "longitudinal", + "madturbating", "masturbating", + "madturbation", "masturbation", + "magnificient", "magnificent", + "maintainance", "maintenance", + "maintainence", "maintenance", + "maintenaince", "maintenance", + "malfucntions", "malfunction", + "manafactured", "manufactured", + "manafacturer", "manufacturer", + "manafactures", "manufactures", + "manifactured", "manufactured", + "manifacturer", "manufacturer", + "manifactures", "manufactures", + "manifestaion", "manifestation", + "manifestanti", "manifestation", + "manipluating", "manipulating", + "manipluation", "manipulation", + "manipualting", "manipulating", + "manipualtion", "manipulation", + "manipualtive", "manipulative", + "manipulacion", "manipulation", + "manipulitive", "manipulative", + "maniuplating", "manipulating", + "maniuplation", "manipulation", + "maniuplative", "manipulative", + "manouverable", "maneuverable", + "mansalughter", "manslaughter", + "manslaugther", "manslaughter", + "mansluaghter", "manslaughter", + "manufactered", "manufactured", + "manufacterer", "manufacturer", + "manufacteres", "manufactures", + "manufacteurs", "manufactures", + "manufactored", "manufactured", + "manufactorer", "manufacturer", + "manufactores", "manufactures", + "manufactuers", "manufacturers", + "manufactuing", "manufacturing", + "manufacturas", "manufactures", + "manufacturor", "manufacturer", + "manufactuter", "manufacture", + "manufacuters", "manufactures", + "manufacutred", "manufacture", + "manufacutres", "manufactures", + "manufaturing", "manufacturing", + "manupilating", "manipulating", + "manupulating", "manipulating", + "manupulation", "manipulation", + "manupulative", "manipulative", + "marchmallows", "marshmallows", + "marganilized", "marginalized", + "margenalized", "marginalized", + "marginilized", "marginalized", + "marhsmallows", "marshmallows", + "marshamllows", "marshmallows", + "marshmallons", "marshmallows", + "masoginistic", "misogynistic", + "masogynistic", "misogynistic", + "massachusets", "massachusetts", + "massachustts", "massachusetts", + "masterbation", "masturbation", + "masterpeices", "masterpiece", + "mastrubating", "masturbating", + "mastrubation", "masturbation", + "mastubration", "masturbation", + "masturabting", "masturbating", + "masturabtion", "masturbation", + "masturbacion", "masturbation", + "masturbaited", "masturbated", + "masturbathon", "masturbation", + "masturbsting", "masturbating", + "masturdating", "masturbating", + "mastutbation", "masturbation", + "mataphorical", "metaphorical", + "mataphysical", "metaphysical", + "matchmakeing", "matchmaking", + "mathemathics", "mathematics", + "mathematican", "mathematician", + "mathematicas", "mathematics", + "mathematicks", "mathematics", + "mathematicly", "mathematical", + "mathematisch", "mathematics", + "mathemetical", "mathematical", + "matheticians", "mathematicians", + "mathimatical", "mathematical", + "mathmatician", "mathematician", + "mecahnically", "mechanically", + "mechancially", "mechanically", + "meditaciones", "medications", + "mediteranean", "mediterranean", + "mediterraean", "mediterranean", + "mediterranen", "mediterranean", + "memerization", "memorization", + "memorizacion", "memorization", + "memorozation", "memorization", + "metalurgical", "metallurgical", + "metaphisical", "metaphysical", + "metaphoricly", "metaphorical", + "metaphsyical", "metaphysical", + "metaphyiscal", "metaphysical", + "metaphyscial", "metaphysical", + "metaphysisch", "metaphysics", + "metephorical", "metaphorical", + "metephysical", "metaphysical", + "meterologist", "meteorologist", + "meterosexual", "heterosexual", + "methaporical", "metaphorical", + "methematical", "mathematical", + "metiphorical", "metaphorical", + "metophorical", "metaphorical", + "metorpolitan", "metropolitan", + "metrololitan", "metropolitan", + "metropilitan", "metropolitan", + "metroploitan", "metropolitan", + "metropolians", "metropolis", + "metropoliten", "metropolitan", + "metropolitin", "metropolitan", + "metropoliton", "metropolitan", + "microcentres", "microcenter", + "microphonies", "microphones", + "microscophic", "microscopic", + "microscopice", "microscope", + "microscoptic", "microscopic", + "midfieldiers", "midfielders", + "millenialism", "millennialism", + "millionairre", "millionaire", + "millionaries", "millionaires", + "millioniares", "millionaires", + "minimalisitc", "minimalist", + "minimalisity", "minimalist", + "mininterpret", "misinterpret", + "minipulating", "manipulating", + "minipulation", "manipulation", + "minipulative", "manipulative", + "miracilously", "miraculously", + "miracurously", "miraculous", + "miscarraiges", "miscarriage", + "miscelaneous", "miscellaneous", + "miscellanous", "miscellaneous", + "mischievious", "mischievous", + "misdameanors", "misdemeanors", + "misdeamenors", "misdemeanor", + "misfourtunes", "misfortunes", + "misgoynistic", "misogynistic", + "misinterpert", "misinterpret", + "misinterpred", "misinterpreted", + "misinterprit", "misinterpreting", + "misinterpted", "misinterpret", + "misintrepret", "misinterpret", + "misisonaries", "missionaries", + "misoganistic", "misogynistic", + "misogenistic", "misogynistic", + "misoginystic", "misogynistic", + "misognyistic", "misogynistic", + "misogonistic", "misogynistic", + "misogynisitc", "misogynistic", + "misogynsitic", "misogynistic", + "misogynystic", "misogynistic", + "missionaires", "missionaries", + "mississipppi", "mississippi", + "misspellling", "misspelling", + "misteriously", "mysteriously", + "misundersood", "misunderstood", + "misunderstod", "misunderstood", + "misygonistic", "misogynistic", + "modificacion", "modification", + "modificaiton", "modification", + "modificatons", "modifications", + "modifikation", "modification", + "modivational", "motivational", + "moisterizing", "moisturizing", + "moistorizing", "moisturizing", + "moisutrizing", "moisturizing", + "momentarilly", "momentarily", + "monolithisch", "monolithic", + "mositurizing", "moisturizing", + "motherbaords", "motherboards", + "motherborads", "motherboards", + "motivacional", "motivational", + "motovational", "motivational", + "mousturizing", "moisturizing", + "muktitasking", "multitasking", + "mulittasking", "multitasking", + "multinatinal", "multinational", + "multitaksing", "multitasking", + "munipulative", "manipulative", + "mutlitasking", "multitasking", + "mysoganistic", "misogynistic", + "mysogenistic", "misogynistic", + "mysogonistic", "misogynistic", + "mysterioulsy", "mysteriously", + "nacionalists", "nationalists", + "narcisisstic", "narcissistic", + "narcissictic", "narcissistic", + "narcissisism", "narcissism", + "narcissisist", "narcissist", + "narcissisitc", "narcissist", + "narcississts", "narcissist", + "narssicistic", "narcissistic", + "natioanlists", "nationalists", + "nationalisic", "nationalistic", + "nationalisim", "nationalism", + "nationalistc", "nationalistic", + "nationalites", "nationalist", + "nationalitic", "nationalistic", + "nationalitys", "nationalist", + "nationallity", "nationally", + "nationalsits", "nationalists", + "nationalties", "nationalist", + "nazionalists", "nationalists", + "neccessarily", "necessarily", + "neccessities", "necessities", + "necessarilly", "necessarily", + "necessitites", "necessities", + "neckbearders", "neckbeards", + "neckbeardese", "neckbeards", + "neckbeardest", "neckbeards", + "neckbeardies", "neckbeards", + "neckbeardius", "neckbeards", + "negociations", "negotiations", + "negoitations", "negotiations", + "negotiatians", "negotiations", + "negotiatiing", "negotiating", + "negotiationg", "negotiating", + "negotiatiors", "negotiations", + "neigbhorhood", "neighborhoods", + "neigbourhood", "neighbourhood", + "neighboorhod", "neighbourhood", + "neighborhing", "neighboring", + "neighborhods", "neighborhoods", + "neighbourghs", "neighbours", + "neighbourhod", "neighbourhood", + "neighbourood", "neighbourhood", + "neighbrohood", "neighborhoods", + "neighourhood", "neighborhood", + "neoroscience", "neuroscience", + "neruological", "neurological", + "neruoscience", "neuroscience", + "netropolitan", "metropolitan", + "neuorscience", "neuroscience", + "neuralogical", "neurological", + "neuroligical", "neurological", + "neurosceince", "neuroscience", + "neuroscienze", "neuroscience", + "neurosicence", "neuroscience", + "neverhteless", "nevertheless", + "nieghborhood", "neighborhood", + "norhtwestern", "northwestern", + "nothingsness", "nothingness", + "noticeablely", "noticeably", + "notificacion", "notification", + "notificaiton", "notification", + "notificatons", "notifications", + "nuerological", "neurological", + "nueroscience", "neuroscience", + "nutritionnal", "nutritional", + "obersvations", "observations", + "objectivelly", "objectively", + "objectiviser", "objectives", + "objectivitiy", "objectivity", + "obversations", "observations", + "ocassionally", "occasionally", + "occaisonally", "occasionally", + "occasioanlly", "occasionally", + "occassionaly", "occasionally", + "occationally", "occasionally", + "occurrencies", "occurrences", + "offensivelly", "offensively", + "ogranisation", "organisation", + "omniverously", "omnivorously", + "operationnal", "operational", + "opportuniste", "opportunities", + "opportunites", "opportunities", + "oppositition", "opposition", + "opthalmology", "ophthalmology", + "optimistisch", "optimistic", + "optimizacion", "optimization", + "optimizating", "optimization", + "optimziation", "optimization", + "optmizations", "optimizations", + "oragnisation", "organisation", + "orchastrated", "orchestrated", + "orchestarted", "orchestrated", + "orchestraded", "orchestrated", + "orchistrated", "orchestrated", + "orgainsation", "organisation", + "orgainzation", "organizations", + "organisaiton", "organisation", + "organisatons", "organisations", + "organistaion", "organisation", + "organizacion", "organization", + "organizaiton", "organization", + "organizativo", "organization", + "organizatons", "organizations", + "organsiation", "organisation", + "organziation", "organization", + "orginasation", "organisation", + "orginazation", "organization", + "orgnaisation", "organisations", + "originallity", "originality", + "outraegously", "outrageously", + "outrageoulsy", "outrageously", + "outragesouly", "outrageously", + "outrageuosly", "outrageously", + "outragiously", "outrageously", + "outsourceing", "outsourcing", + "overbearring", "overbearing", + "overblocking", "overclocking", + "overclcoking", "overclocking", + "overclicking", "overclocking", + "overcloaking", "overclocking", + "overclockign", "overclocking", + "overclokcing", "overclocking", + "overhearting", "overreacting", + "overheathing", "overheating", + "overhtinking", "overthinking", + "overhwelming", "overwhelming", + "overlappping", "overlapping", + "overlcocking", "overclocking", + "overreaktion", "overreaction", + "overwealming", "overwhelming", + "overwhelemed", "overwhelmed", + "overwhemling", "overwhelming", + "overwhleming", "overwhelming", + "owerpowering", "overpowering", + "painkilllers", "painkillers", + "palastinians", "palestinians", + "palesitnians", "palestinians", + "palestenians", "palestinians", + "palestinains", "palestinians", + "palestiniens", "palestinians", + "palestininan", "palestinian", + "palestininas", "palestinians", + "palistinians", "palestinians", + "palythroughs", "playthroughs", + "parapharsing", "paraphrasing", + "paraphenalia", "paraphernalia", + "paraphrashed", "paraphrase", + "paraphrazing", "paraphrasing", + "paraprashing", "paraphrasing", + "paraprhasing", "paraphrasing", + "parenthesees", "parentheses", + "parenthesies", "parenthesis", + "parliamentry", "parliamentary", + "partecipants", "participants", + "partecipated", "participated", + "parternships", "partnership", + "particapated", "participated", + "particiapnts", "participant", + "particiapted", "participated", + "participante", "participate", + "participaste", "participants", + "participatie", "participated", + "participatin", "participation", + "participatns", "participant", + "participaton", "participant", + "participents", "participants", + "particualrly", "particularly", + "particulalry", "particularly", + "particullary", "particularly", + "passionatley", "passionately", + "pathalogical", "pathological", + "pathelogical", "pathological", + "patholigical", "pathological", + "paychedelics", "psychedelics", + "paychiatrist", "psychiatrist", + "paychologist", "psychologist", + "paychopathic", "psychopathic", + "penetratiing", "penetrating", + "penisylvania", "pennsylvania", + "pennsilvania", "pennsylvania", + "pennslyvania", "pennsylvania", + "pennsylvaina", "pennsylvania", + "pennsyvlania", "pennsylvania", + "pennyslvania", "pennsylvania", + "penssylvania", "pennsylvania", + "pentsylvania", "pennsylvania", + "percentagens", "percentages", + "perferential", "preferential", + "performantes", "performances", + "performences", "performances", + "perfromances", "performances", + "peridoically", "periodically", + "peripathetic", "peripatetic", + "periphereals", "peripherals", + "peripherials", "peripherals", + "permanantely", "permanently", + "permanentely", "permanently", + "permissiable", "permissible", + "peroidically", "periodically", + "perpatrators", "perpetrators", + "perpatuating", "perpetuating", + "perpertators", "perpetrators", + "perpertrated", "perpetrated", + "perpetraitor", "perpetrator", + "perpetraters", "perpetrators", + "perpetuaters", "perpetuates", + "perpitrators", "perpetrators", + "perposefully", "purposefully", + "perposterous", "preposterous", + "perpretators", "perpetrators", + "perpsectives", "perspectives", + "perputrators", "perpetrators", + "perputuating", "perpetuating", + "persepctives", "perspectives", + "perservation", "preservation", + "perseverence", "perseverance", + "personalites", "personalities", + "personallity", "personally", + "personilized", "personalized", + "perspecitves", "perspectives", + "perspectivas", "perspectives", + "persumptuous", "presumptuous", + "perticularly", "particularly", + "pertubations", "perturbations", + "pessimisitic", "pessimistic", + "pessimisstic", "pessimistic", + "phenomenonal", "phenomenal", + "phenomenonly", "phenomenally", + "phenomonenon", "phenomenon", + "phialdelphia", "philadelphia", + "philadalphia", "philadelphia", + "philadelhpia", "philadelphia", + "philadeplhia", "philadelphia", + "philadlephia", "philadelphia", + "philedalphia", "philadelphia", + "philedelphia", "philadelphia", + "philidalphia", "philadelphia", + "philippinnes", "philippines", + "philippinoes", "philippines", + "philisophers", "philosophers", + "philisophies", "philosophies", + "phillippines", "philippines", + "philosiphers", "philosophers", + "philosiphies", "philosophies", + "philosohpers", "philosopher", + "philosohpies", "philosophies", + "philosophiae", "philosophies", + "philosophics", "philosophies", + "philosophios", "philosophies", + "philospohers", "philosophers", + "philospohies", "philosophies", + "photagrapher", "photographer", + "photochopped", "photoshopped", + "photograhper", "photographer", + "photograpers", "photographers", + "photographes", "photographs", + "photographyi", "photographic", + "photogropher", "photographer", + "photogrpahed", "photographed", + "photogrpaher", "photographer", + "photoshipped", "photoshopped", + "photoshooped", "photoshopped", + "photoshoppad", "photoshopped", + "phychedelics", "psychedelics", + "phychiatrist", "psychiatrist", + "phychologist", "psychologist", + "phychopathic", "psychopathic", + "physcedelics", "psychedelics", + "physciatrist", "psychiatrist", + "physcologist", "psychologist", + "physcopathic", "psychopathic", + "physicallity", "physically", + "physiologial", "physiological", + "pilgrimmages", "pilgrimages", + "pitchforkers", "pitchforks", + "pkaythroughs", "playthroughs", + "plabeswalker", "planeswalker", + "plaestinians", "palestinians", + "planeswaller", "planeswalker", + "planeswlaker", "planeswalker", + "planetwalker", "planeswalker", + "plansewalker", "planeswalker", + "plauthroughs", "playthroughs", + "playhtroughs", "playthroughs", + "playtgroughs", "playthroughs", + "playthorughs", "playthroughs", + "playthourghs", "playthroughs", + "playthrougth", "playthroughs", + "playthrouhgs", "playthroughs", + "playthtoughs", "playthroughs", + "playtrhoughs", "playthroughs", + "populationes", "populations", + "pornograpghy", "pornography", + "porportional", "proportional", + "portabillity", "portability", + "portagonists", "protagonists", + "positionning", "positioning", + "positivitely", "positivity", + "possessivize", "possessive", + "possibillity", "possibility", + "possiblility", "possibility", + "possiblities", "possibilities", + "powerfisting", "powerlifting", + "powerlfiting", "powerlifting", + "powerlifitng", "powerlifting", + "powerlisting", "powerlifting", + "powetlifting", "powerlifting", + "powrrlifting", "powerlifting", + "practicioner", "practitioner", + "practisioner", "practitioner", + "pratictioner", "practitioners", + "precedessors", "predecessors", + "preconveived", "preconceived", + "predacessors", "predecessors", + "predeccesors", "predecessor", + "predecesores", "predecessor", + "predescesors", "predecessors", + "predessecors", "predecessors", + "predetermind", "predetermined", + "predicessors", "predecessors", + "predocessors", "predecessors", + "predomiantly", "predominately", + "predominanty", "predominantly", + "predominatly", "predominantly", + "preferantial", "preferential", + "preferentail", "preferential", + "preformances", "performances", + "preinitalize", "preinitialize", + "preliminarly", "preliminary", + "prematurelly", "prematurely", + "premillenial", "premillennial", + "preocupation", "preoccupation", + "preperations", "preparations", + "prepetrators", "perpetrators", + "prepetuating", "perpetuating", + "prepostorous", "preposterous", + "preposturous", "preposterous", + "prerequisets", "prerequisite", + "prescirption", "prescriptions", + "prescribtion", "prescription", + "prescripcion", "prescription", + "prescriptons", "prescriptions", + "prescritpion", "prescriptions", + "presedential", "presidential", + "presentacion", "presentation", + "presentaiton", "presentations", + "preservacion", "preservation", + "preservating", "preservation", + "preservativo", "preservation", + "presidencial", "presidential", + "presidenital", "presidential", + "presidentail", "presidential", + "presnetation", "presentations", + "presonalized", "personalized", + "prespectives", "perspectives", + "presrciption", "prescriptions", + "presumpteous", "presumptuous", + "presumputous", "presumptuous", + "prevantative", "preventative", + "preventation", "presentation", + "preventetive", "preventative", + "preventitive", "preventative", + "prezidential", "presidential", + "principlaity", "principality", + "probabiliste", "probabilities", + "probabilites", "probabilities", + "probabillity", "probability", + "probablistic", "probabilistic", + "proclomation", "proclamation", + "proconceived", "preconceived", + "profesisonal", "professionals", + "professiinal", "professionalism", + "professioanl", "professionals", + "professiomal", "professionalism", + "professionel", "professional", + "professionsl", "professionalism", + "professoinal", "professionals", + "professonial", "professionals", + "proffesional", "professional", + "proficientcy", "proficiency", + "profissional", "professional", + "profitabiliy", "profitability", + "profitabilty", "profitability", + "profressions", "progressions", + "progatonists", "protagonists", + "programmeurs", "programmer", + "progressieve", "progressive", + "progressioin", "progressions", + "progressiong", "progressing", + "progressisme", "progresses", + "progressiste", "progresses", + "progressivas", "progressives", + "progressivey", "progressively", + "progressivly", "progressively", + "progressivsm", "progressives", + "progresssing", "progressing", + "progresssion", "progressions", + "progresssive", "progressives", + "prohibitting", "prohibiting", + "projecticles", "projectiles", + "proletariaat", "proletariat", + "proletariant", "proletariat", + "proletaricat", "proletariat", + "prominantely", "prominently", + "promiscuious", "promiscuous", + "promisculous", "promiscuous", + "promotionnal", "promotional", + "pronounceing", "pronouncing", + "pronunciaton", "pronunciation", + "propertional", "proportional", + "propesterous", "preposterous", + "proportianal", "proportional", + "proportionel", "proportional", + "proposterous", "preposterous", + "proprotional", "proportional", + "prostetution", "prostitution", + "prostitition", "prostitution", + "prostitucion", "prostitution", + "prostituiton", "prostitution", + "prostitutiei", "prostitute", + "protaganists", "protagonists", + "protaginists", "protagonists", + "protagnoists", "protagonists", + "protestantes", "protestants", + "protoganists", "protagonists", + "prouncements", "pronouncements", + "pruposefully", "purposefully", + "pscyhologist", "psychologist", + "pscyhopathic", "psychopathic", + "pshyciatrist", "psychiatrist", + "pshycologist", "psychologist", + "pshycopathic", "psychopathic", + "psichologist", "psychologist", + "psychaitrist", "psychiatrist", + "psychedellic", "psychedelic", + "psychedilics", "psychedelics", + "psychemedics", "psychedelics", + "psychiatirst", "psychiatrists", + "psychiatrics", "psychiatrist", + "psychiatrict", "psychiatrist", + "psychiatrits", "psychiatrists", + "psychistrist", "psychiatrist", + "psychodelics", "psychedelics", + "psycholigist", "psychologist", + "psychologial", "psychological", + "psychologits", "psychologists", + "psychologyst", "psychologist", + "psychopathes", "psychopaths", + "psychyatrist", "psychiatrist", + "puplications", "publications", + "puritannical", "puritanical", + "purpetrators", "perpetrators", + "purpetuating", "perpetuating", + "purpusefully", "purposefully", + "pyschedelics", "psychedelics", + "pyschiatrist", "psychiatrist", + "pyschologist", "psychologist", + "pyschopathic", "psychopathic", + "qualificaton", "qualification", + "qualifierais", "qualifiers", + "qualtitative", "quantitative", + "quantatitive", "quantitative", + "quantititive", "quantitative", + "quarterblack", "quarterback", + "quesitonable", "questionable", + "questionalbe", "questionable", + "questionning", "questioning", + "questionsign", "questioning", + "radioactieve", "radioactive", + "rationallity", "rationally", + "reactionairy", "reactionary", + "reactionnary", "reactionary", + "realisticaly", "realistically", + "realisticlly", "realistically", + "reasonablely", "reasonably", + "recallection", "recollection", + "reccomending", "recommending", + "reccommended", "recommended", + "recepcionist", "receptionist", + "receptionest", "receptionist", + "recgonizable", "recognizable", + "reciporcated", "reciprocate", + "reciprociate", "reciprocate", + "reciprocrate", "reciprocate", + "recognizible", "recognizable", + "recolleciton", "recollection", + "recommanding", "recommending", + "recommendeds", "recommends", + "recommendors", "recommends", + "recommeneded", "recommended", + "recommenting", "recommending", + "recongizable", "recognizable", + "recontructed", "reconstructed", + "recpetionist", "receptionist", + "recreacional", "recreational", + "recriational", "recreational", + "referenceing", "referencing", + "refirgerator", "refrigerator", + "refriderator", "refrigerator", + "refrigarator", "refrigerator", + "refrigerador", "refrigerator", + "refrigerater", "refrigerator", + "refrigirator", "refrigerator", + "regenaration", "regeneration", + "regeneracion", "regeneration", + "regestration", "registration", + "registartion", "registration", + "registrating", "registration", + "regrigerator", "refrigerator", + "regulatorias", "regulators", + "regulatories", "regulators", + "regulatorios", "regulators", + "reicarnation", "reincarnation", + "reinforcemnt", "reinforcement", + "reinitalised", "reinitialised", + "reinitalises", "reinitialises", + "reinitalized", "reinitialized", + "reinitalizes", "reinitializes", + "reinstallled", "reinstalled", + "reisntalling", "reinstalling", + "relaitonship", "relationships", + "relatinoship", "relationships", + "reliabillity", "reliability", + "reluctanctly", "reluctantly", + "remarkablely", "remarkably", + "rememberance", "remembrance", + "reminiscient", "reminiscent", + "renaissaince", "renaissance", + "renegeration", "regeneration", + "reorganision", "reorganisation", + "repalcements", "replacements", + "repersenting", "representing", + "reporduction", "reproduction", + "reporductive", "reproductive", + "reprecussion", "repercussions", + "representate", "representative", + "represention", "representing", + "representive", "representative", + "reproducable", "reproducible", + "reproduccion", "reproduction", + "reproduciton", "reproduction", + "reproducting", "reproduction", + "reproductivo", "reproduction", + "reproduktion", "reproduction", + "repsectfully", "respectfully", + "repsectively", "respectively", + "republicanas", "republicans", + "republicanos", "republicans", + "republicants", "republicans", + "republicians", "republicans", + "requerimento", "requirement", + "requeriments", "requirements", + "requierments", "requirements", + "requriements", "requirements", + "resembelance", "resemblance", + "reseptionist", "receptionist", + "reserrection", "resurrection", + "resintalling", "reinstalling", + "resistancies", "resistances", + "resistencias", "resistances", + "respecitvely", "respectively", + "respectabile", "respectable", + "respectivily", "respectively", + "respectivley", "respectively", + "respectuflly", "respectfully", + "respiratiory", "respiratory", + "responsabile", "responsible", + "responsaveis", "responsive", + "responsbilty", "responsibly", + "responsibile", "responsible", + "responsibily", "responsibility", + "responsibley", "responsibly", + "responsibliy", "responsibly", + "responsiblty", "responsibly", + "ressemblance", "resemblance", + "ressemblence", "resemblance", + "ressurection", "resurrection", + "restaurantes", "restaurants", + "restauration", "restoration", + "restauraunts", "restaurants", + "restirctions", "restrictions", + "restrainting", "restraining", + "restrcitions", "restriction", + "restricitons", "restrictions", + "resurreccion", "resurrection", + "resurrektion", "resurrection", + "retalitation", "retaliation", + "retributioon", "retribution", + "retroactivly", "retroactively", + "revolutionay", "revolutionary", + "revolutionos", "revolutions", + "rezurrection", "resurrection", + "rictatorship", "dictatorship", + "ridicilously", "ridiculously", + "ridicoulusly", "ridiculously", + "righteouness", "righteousness", + "rockerfeller", "rockefeller", + "rollercoaser", "rollercoaster", + "rollercoater", "rollercoaster", + "romanitcally", "romantically", + "roundabounts", "roundabout", + "rudimentatry", "rudimentary", + "rysurrection", "resurrection", + "sacksonville", "jacksonville", + "sacreligious", "sacrilegious", + "sacrificeing", "sacrificing", + "saksatchewan", "saskatchewan", + "salughtering", "slaughtering", + "sanctionning", "sanctioning", + "sarcasticaly", "sarcastically", + "sarcasticlly", "sarcastically", + "sascatchewan", "saskatchewan", + "saskatcehwan", "saskatchewan", + "saskatchawan", "saskatchewan", + "saskatechwan", "saskatchewan", + "sasketchawan", "saskatchewan", + "sasketchewan", "saskatchewan", + "sasktachewan", "saskatchewan", + "satasfaction", "satisfaction", + "satasfactory", "satisfactory", + "satisfaccion", "satisfaction", + "satisfacting", "satisfaction", + "satisfcation", "satisfaction", + "satisfiction", "satisfaction", + "satistactory", "satisfactory", + "satsifaction", "satisfaction", + "satsifactory", "satisfactory", + "scandanivian", "scandinavian", + "scandenavian", "scandinavian", + "scandianvian", "scandinavian", + "scandinacian", "scandinavian", + "scandinaivan", "scandinavia", + "scandinavica", "scandinavian", + "scandinavien", "scandinavian", + "scandinavion", "scandinavian", + "scandivanian", "scandinavian", + "scandonavian", "scandinavian", + "schizophrena", "schizophrenia", + "scholarhsips", "scholarships", + "scholerships", "scholarships", + "scholorships", "scholarships", + "scnadinavian", "scandinavian", + "screenshoots", "screenshot", + "sensationail", "sensational", + "sensationnal", "sensational", + "sensibilites", "sensibilities", + "sensitivitiy", "sensitivity", + "sentimentals", "sentiments", + "sertificates", "certificates", + "serveillance", "surveillance", + "seskatchewan", "saskatchewan", + "shakesperean", "shakespeare", + "shamelessely", "shamelessly", + "shamelessley", "shamelessly", + "shampionship", "championship", + "shardholders", "shareholders", + "shenanigains", "shenanigans", + "shenanigangs", "shenanigans", + "shenaniganns", "shenanigans", + "shenanighans", "shenanigans", + "shopkeeepers", "shopkeepers", + "showboarding", "snowboarding", + "siginificant", "significant", + "significanly", "significantly", + "significante", "significance", + "significanty", "significantly", + "significatly", "significantly", + "signleplayer", "singleplayer", + "simaltaneous", "simultaneous", + "simeltaneous", "simultaneous", + "similaraties", "similarities", + "similiarites", "similarities", + "similiarties", "similarities", + "similiraties", "similarities", + "similtaneous", "simultaneous", + "simliarities", "similarities", + "simlutaneous", "simultaneous", + "simpathizers", "sympathizers", + "simplistisch", "simplistic", + "simulatenous", "simultaneous", + "simulatneous", "simultaneous", + "simultaenous", "simultaneous", + "simultaneuos", "simultaneous", + "simultanious", "simultaneous", + "simulteneous", "simultaneous", + "singelplayer", "singleplayer", + "singlepalyer", "singleplayer", + "sinlgeplayer", "singleplayer", + "situationals", "situations", + "situationnal", "situational", + "skandinavian", "scandinavian", + "skateboaring", "skateboarding", + "skrawberries", "strawberries", + "slaugthering", "slaughtering", + "sloughtering", "slaughtering", + "sluaghtering", "slaughtering", + "snowballling", "snowballing", + "snowbaording", "snowboarding", + "socialistisk", "socialists", + "socialogical", "sociological", + "socioeconimc", "socioeconomic", + "socioeconmic", "socioeconomic", + "socioligical", "sociological", + "sociopolical", "sociological", + "somethingest", "somethings", + "sophisticaed", "sophisticated", + "sophisticted", "sophisticated", + "southamption", "southampton", + "southernerns", "southerners", + "sovereighnty", "sovereignty", + "sovereignety", "sovereignty", + "sovereignity", "sovereignty", + "specialistes", "specialists", + "specializare", "specialize", + "specializate", "specialize", + "specializeds", "specializes", + "specializied", "specialize", + "speciallized", "specialised", + "specifcation", "specification", + "spectacuarly", "spectacular", + "spectaculair", "spectacular", + "spectaculary", "spectacularly", + "spectacullar", "spectacularly", + "specualtions", "speculation", + "spermatozoan", "spermatozoon", + "spesifically", "specifically", + "spirituallly", "spiritually", + "spirtiuality", "spirituality", + "spirutuality", "spirituality", + "spontaneosly", "spontaneously", + "spontaneouly", "spontaneously", + "spreadhseets", "spreadsheets", + "spreadsheats", "spreadsheets", + "spreadsheeds", "spreadsheets", + "spreadsheeet", "spreadsheets", + "standartized", "standardized", + "standerdized", "standardized", + "stardardized", "standardized", + "starightened", "straightened", + "starwberries", "strawberries", + "statisticaly", "statistically", + "stereotpying", "stereotyping", + "stereotypers", "stereotypes", + "stereotypian", "stereotyping", + "steriotyping", "stereotyping", + "steroetyping", "stereotyping", + "steryotyping", "stereotyping", + "straigntened", "straightened", + "straigthened", "straightened", + "strategicaly", "strategically", + "strategiclly", "strategically", + "strawburries", "strawberries", + "streemlining", "streamlining", + "streightened", "straightened", + "strenghening", "strengthening", + "strenghtened", "strengthened", + "strengtheing", "strengthening", + "stroytelling", "storytelling", + "subconcsious", "subconscious", + "subconsicous", "subconscious", + "subcouncious", "subconscious", + "subcsription", "subscriptions", + "subesquently", "subsequently", + "subjectivety", "subjectively", + "subjectivily", "subjectively", + "subjectivley", "subjectively", + "subjudgation", "subjugation", + "subredditors", "subreddits", + "subscirption", "subscriptions", + "subsconcious", "subconscious", + "subscribbers", "subscribers", + "subscribbing", "subscribing", + "subscribirse", "subscriber", + "subscribtion", "subscription", + "subscriptons", "subscriptions", + "subscritpion", "subscriptions", + "subscrpition", "subscriptions", + "subsiquently", "subsequently", + "subsrciption", "subscriptions", + "subsricption", "subscriptions", + "substantialy", "substantially", + "substantitve", "substantive", + "substitition", "substitution", + "substituters", "substitutes", + "substitutivo", "substitution", + "substitutues", "substitutes", + "substracting", "subtracting", + "substraction", "subtraction", + "subterranian", "subterranean", + "succsessfull", "successful", + "sunconscious", "subconscious", + "supermarkeds", "supermarkets", + "supermarkers", "supermarkets", + "supermarkert", "supermarkets", + "supermarkten", "supermarket", + "supermarktes", "supermarkets", + "supernarkets", "supermarkets", + "supernatrual", "supernatural", + "supersticion", "superstition", + "superstision", "superstition", + "superstitios", "superstitious", + "superstitous", "superstitious", + "supervisiors", "supervisors", + "supervisoras", "supervisors", + "supervisores", "supervisors", + "supllemental", "supplemental", + "supplamental", "supplemental", + "supplamented", "supplemented", + "supplimental", "supplemental", + "suppresssion", "suppression", + "supscription", "subscription", + "supsiciously", "suspiciously", + "surprizingly", "surprisingly", + "surrenderred", "surrendered", + "surrundering", "surrendering", + "survaillance", "surveillance", + "survaillence", "surveillance", + "survallience", "surveillance", + "surveillence", "surveillance", + "survelliance", "surveillance", + "surviellance", "surveillance", + "survivabiity", "survivability", + "survivabiliy", "survivability", + "survivabilty", "survivability", + "susceptiable", "susceptible", + "susceptibile", "susceptible", + "suspeciously", "suspiciously", + "suspicioulsy", "suspiciously", + "suspiciuosly", "suspiciously", + "suspisiously", "suspiciously", + "sustainabily", "sustainability", + "symapthizers", "sympathizers", + "symetrically", "symmetrically", + "symmetricaly", "symmetrically", + "sympathethic", "sympathetic", + "sympathsizer", "sympathizers", + "sympathyzers", "sympathizers", + "sympethizers", "sympathizers", + "symphatizers", "sympathizers", + "sympithizers", "sympathizers", + "syncronously", "synchronously", + "sysmatically", "systematically", + "systematisch", "systematic", + "tablespooons", "tablespoon", + "tacticallity", "tactically", + "tangencially", "tangentially", + "tangenitally", "tangentially", + "tangientally", "tangentially", + "teamfighters", "teamfights", + "teansylvania", "transylvania", + "techanically", "mechanically", + "techincality", "technicality", + "technologial", "technological", + "telelevision", "television", + "teleportaion", "teleportation", + "teleportaton", "teleportation", + "temepratures", "temperatures", + "temparatures", "temperatures", + "temperaturas", "temperatures", + "temporarilly", "temporarily", + "tempreatures", "temperatures", + "tempuratures", "temperatures", + "tengentially", "tangentially", + "termendously", "tremendously", + "territorrial", "territorial", + "territorries", "territories", + "testasterone", "testosterone", + "testestorone", "testosterone", + "thanskgiving", "thanksgiving", + "theologicial", "theological", + "theoreticaly", "theoretically", + "thermomenter", "thermometer", + "thermomether", "thermometer", + "thumbnailers", "thumbnails", + "thunderboldt", "thunderbolt", + "tindergarten", "kindergarten", + "torubleshoot", "troubleshoot", + "totalitarion", "totalitarian", + "totalitatian", "totalitarian", + "touchscreeen", "touchscreen", + "traditionaly", "traditionally", + "traditionnal", "traditional", + "tradtionally", "traditionally", + "tramendously", "tremendously", + "tramsformers", "transformers", + "tramsforming", "transforming", + "tranditional", "transitional", + "tranistional", "transitional", + "tranistioned", "transitioned", + "tranlsations", "translations", + "tranmsission", "transmissions", + "transaltions", "translations", + "transaprency", "transparency", + "transational", "transitional", + "transcations", "transactions", + "transcendant", "transcendent", + "transcripton", "transcription", + "transcriptus", "transcripts", + "transesxuals", "transsexuals", + "transfarmers", "transformers", + "transfarring", "transferring", + "transferrred", "transferred", + "transformare", "transformers", + "transformase", "transforms", + "transformees", "transforms", + "transforners", "transformers", + "transfromers", "transformers", + "transfroming", "transforming", + "transgenderd", "transgendered", + "transgendred", "transgendered", + "transgenered", "transgender", + "transicional", "transitional", + "transilvania", "transylvania", + "transimssion", "transmissions", + "transisioned", "transitioned", + "translastion", "translations", + "translateing", "translating", + "translationg", "translating", + "translucient", "translucent", + "translyvania", "transylvania", + "transmisions", "transmission", + "transmisison", "transmission", + "transmissons", "transmissions", + "transmitirte", "transmitter", + "transmittted", "transmitted", + "transmorfers", "transformer", + "transofrmers", "transformers", + "transofrming", "transforming", + "transparancy", "transparency", + "transparenty", "transparency", + "transparrent", "transparent", + "transperancy", "transparency", + "transperency", "transparency", + "transplantes", "transplants", + "transporteur", "transporter", + "transportion", "transporting", + "transpotting", "transporting", + "transsmision", "transmissions", + "transylmania", "transylvania", + "transylvanai", "transylvania", + "trasnferring", "transferring", + "trasnformers", "transformers", + "trasnforming", "transforming", + "trasnmission", "transmissions", + "trasnparency", "transparency", + "trasnporting", "transporting", + "trememdously", "tremendously", + "tremendoulsy", "tremendously", + "tremondously", "tremendously", + "troubelshoot", "troubleshoot", + "troublehsoot", "troubleshoot", + "trumendously", "tremendously", + "trustworthly", "trustworthy", + "ubsubscribed", "unsubscribed", + "udnerpowered", "underpowered", + "umbelievable", "unbelievable", + "umemployment", "unemployment", + "unaccaptable", "unacceptable", + "unacceptible", "unacceptable", + "unaccpetable", "unacceptable", + "unacompanied", "unaccompanied", + "unappealling", "unappealing", + "unattractice", "unattractive", + "unautherized", "unauthorized", + "unauthroized", "unauthorized", + "unbeleivable", "unbelievable", + "unbeleivably", "unbelievably", + "unbeliavable", "unbelievable", + "unbeliavably", "unbelievably", + "unbeliebable", "unbelievable", + "unbelieveble", "unbelievable", + "unbelievibly", "unbelievably", + "unbeliveable", "unbelievable", + "unbeliveably", "unbelievably", + "unbelizeable", "unbelievable", + "unbolievable", "unbelievable", + "uncertainity", "uncertainty", + "uncertaintly", "uncertainty", + "uncompatible", "incompatible", + "unconditinal", "unconditional", + "unconsciosly", "unconsciously", + "unconsciouly", "unconsciously", + "unconsistent", "inconsistent", + "unconvenient", "inconvenient", + "unconvential", "unconventional", + "undecideable", "undecidable", + "undefinitely", "indefinitely", + "undeniablely", "undeniably", + "undergradate", "undergraduate", + "undergradute", "undergraduate", + "underminding", "undermining", + "undermineing", "undermining", + "undermineras", "undermines", + "undermineres", "undermines", + "underminging", "undermining", + "underminning", "undermining", + "undertakeing", "undertaking", + "underwhelimg", "underwhelming", + "underwheling", "underwhelming", + "undesireable", "undesirable", + "undoubtedbly", "undoubtedly", + "unemployemnt", "unemployment", + "unemplyoment", "unemployment", + "unempolyment", "unemployment", + "unenployment", "unemployment", + "unequalities", "inequalities", + "unexpectadly", "unexpectedly", + "unexpectetly", "unexpectedly", + "unexpectidly", "unexpectedly", + "unexperience", "inexperience", + "unexpextedly", "unexpectedly", + "unexplicably", "inexplicably", + "unforgetable", "unforgettable", + "unforgiveble", "unforgivable", + "unforgivible", "unforgivable", + "unfortunatly", "unfortunately", + "unfortunetly", "unfortunately", + "unilatreally", "unilaterally", + "uniliterally", "unilaterally", + "unimpresssed", "unimpressed", + "uninitalised", "uninitialised", + "uninitalized", "uninitialized", + "uninstallimg", "uninstalling", + "uninstallled", "uninstalled", + "unintentinal", "unintentional", + "uninteresing", "uninteresting", + "uninterneted", "uninterested", + "uninterruped", "uninterrupted", + "uninterupted", "uninterrupted", + "unisntalling", "uninstalling", + "unitesstates", "unitedstates", + "univerisites", "universities", + "univeristies", "universities", + "universitets", "universities", + "unliaterally", "unilaterally", + "unneccessary", "unnecessary", + "unnecesarily", "unnecessarily", + "unnecessairy", "unnecessarily", + "unnecessarly", "unnecessarily", + "unnistalling", "uninstalling", + "unpredictabe", "unpredictable", + "unpreductive", "unproductive", + "unproduktive", "unproductive", + "unrealisitic", "unrealistic", + "unreaponsive", "unresponsive", + "unreasonalby", "unreasonably", + "unrepsonsive", "unresponsive", + "unresponcive", "unresponsive", + "unresponisve", "unresponsive", + "unresponsibe", "unresponsive", + "unrestircted", "unrestricted", + "unrestrcited", "unrestricted", + "unristricted", "unrestricted", + "unseccessful", "unsuccessful", + "unsespecting", "unsuspecting", + "unsibscribed", "unsubscribed", + "unsoliciated", "unsolicited", + "unsolicitied", "unsolicited", + "unsubscirbed", "unsubscribed", + "unsubscrible", "unsubscribed", + "unsubscrided", "unsubscribed", + "unsubscriped", "unsubscribed", + "unsubscrubed", "unsubscribed", + "unsubsrcibed", "unsubscribed", + "unsucessfull", "unsuccessful", + "unsunscribed", "unsubscribed", + "unsurprizing", "unsurprising", + "unsusbcribed", "unsubscribed", + "unsustainble", "unsustainable", + "unvelievable", "unbelievable", + "unvelievably", "unbelievably", + "unviersities", "universities", + "unvulnerable", "invulnerable", + "varification", "verification", + "vegetarianas", "vegetarians", + "vegetarianos", "vegetarians", + "verficiation", "verification", + "verificacion", "verification", + "verificaiton", "verification", + "verifikation", "verification", + "vernaculaire", "vernacular", + "versatillity", "versatility", + "verticallity", "vertically", + "videogamemes", "videogames", + "visualizaton", "visualization", + "vocabularily", "vocabulary", + "vocabularity", "vocabulary", + "volonteering", "volunteering", + "volounteered", "volunteered", + "voluntarilly", "voluntarily", + "volunterring", "volunteering", + "vulnerabilty", "vulnerability", + "weightlifing", "weightlifting", + "withdrawalls", "withdrawals", + "withdrawling", "withdrawing", + "withdrawning", "withdrawing", + "wonderfullly", "wonderfully", + "worshippping", "worshipping", + "xenophobical", "xenophobia", + "abandenment", "abandonment", + "abandomnent", "abandonment", + "abandonding", "abandoning", + "abandonnent", "abandonment", + "abandonning", "abandoning", + "abbreviatin", "abbreviation", + "abbreviaton", "abbreviation", + "abdominable", "abdominal", + "abomanation", "abomination", + "abominacion", "abomination", + "abomonation", "abomination", + "abonimation", "abomination", + "aboriginial", "aboriginal", + "aborigional", "aboriginal", + "abreviation", "abbreviation", + "abritrarily", "arbitrarily", + "abritration", "arbitration", + "absolutelly", "absolutely", + "absolutelys", "absolutes", + "absolutisme", "absolutes", + "absolutiste", "absolutes", + "abstraccion", "abstraction", + "abstraktion", "abstraction", + "abstruction", "abstraction", + "abundancies", "abundances", + "academicaly", "academically", + "academicese", "academics", + "accelarated", "accelerated", + "accelarator", "accelerator", + "accelerater", "accelerator", + "acceleratie", "accelerate", + "acceleratio", "accelerator", + "acceleraton", "acceleration", + "accelorated", "accelerated", + "accelorator", "accelerator", + "acceptabelt", "acceptable", + "accesseries", "accessories", + "accessibile", "accessible", + "accessibily", "accessibility", + "accessoires", "accessories", + "accidantely", "accidently", + "accidentaly", "accidentally", + "accidentely", "accidently", + "accidential", "accidental", + "accidentily", "accidently", + "accidentlay", "accidently", + "accidentley", "accidently", + "accidentlly", "accidently", + "accomadated", "accommodated", + "accomadates", "accommodates", + "accommadate", "accommodate", + "accommidate", "accommodate", + "accomodated", "accommodated", + "accomodates", "accommodates", + "accomondate", "accommodate", + "accompained", "accompanied", + "accompanyed", "accompanied", + "accompianed", "accompanied", + "accompinied", "accompanied", + "accomplises", "accomplishes", + "accomplishs", "accomplishes", + "accomponied", "accompanied", + "accountatns", "accountants", + "accountents", "accountants", + "accquainted", "acquainted", + "accrediated", "accredited", + "accreditied", "accredited", + "accreditted", "accredited", + "acculumated", "accumulated", + "accumalated", "accumulated", + "accumelated", "accumulated", + "accumilated", "accumulated", + "accumulatin", "accumulation", + "accumulaton", "accumulation", + "accuratelly", "accurately", + "accustommed", "accustomed", + "acheivement", "achievement", + "acheivments", "achievements", + "achievemint", "achievement", + "achievemnts", "achievements", + "achievments", "achievements", + "achivements", "achievements", + "acknolwedge", "acknowledge", + "acknoweldge", "acknowledge", + "acknowleded", "acknowledged", + "acknowlegde", "acknowledge", + "acknowleged", "acknowledge", + "acknowleges", "acknowledges", + "acknwoledge", "acknowledges", + "acomplished", "accomplished", + "acopalyptic", "apocalyptic", + "acquaintace", "acquaintance", + "acquisation", "acquisition", + "activateing", "activating", + "activationg", "activating", + "activistion", "activision", + "additinally", "additionally", + "additionaly", "additionally", + "additonally", "additionally", + "adequatedly", "adequately", + "adjectiveus", "adjectives", + "administerd", "administered", + "administrar", "administrator", + "administren", "administer", + "administrer", "administer", + "administres", "administer", + "administrez", "administer", + "adminstered", "administered", + "adminstrate", "administrate", + "admittadely", "admittedly", + "adolencence", "adolescence", + "adolescance", "adolescence", + "adolescense", "adolescence", + "advantadges", "advantages", + "advantageos", "advantageous", + "advantageus", "advantageous", + "advantagous", "advantageous", + "adventerous", "adventures", + "adventourus", "adventurous", + "adversiting", "advertising", + "advertisors", "advertisers", + "advertisted", "advertised", + "aesthethics", "aesthetics", + "afficionado", "aficionado", + "affiliction", "affiliation", + "affirmitave", "affirmative", + "affirmitive", "affirmative", + "affixiation", "affiliation", + "affrimative", "affirmative", + "afgahnistan", "afghanistan", + "afganhistan", "afghanistan", + "afghanastan", "afghanistan", + "afghansitan", "afghanistan", + "afhganistan", "afghanistan", + "afternarket", "aftermarket", + "afterthougt", "afterthought", + "aggaravates", "aggravates", + "aggragating", "aggravating", + "aggregatore", "aggregate", + "aggressivly", "aggressively", + "aggresssion", "aggression", + "aggrovating", "aggravating", + "agnostacism", "agnosticism", + "agnostisicm", "agnosticism", + "agnostisism", "agnosticism", + "agnostocism", "agnosticism", + "agnsoticism", "agnosticism", + "agonsticism", "agnosticism", + "agressively", "aggressively", + "agressivley", "agressive", + "agressivnes", "agressive", + "agricolture", "agriculture", + "agriculteur", "agriculture", + "agricultral", "agricultural", + "agricultual", "agricultural", + "agricutlure", "agriculture", + "ahtleticism", "athleticism", + "alcoholicas", "alcoholics", + "alcoholicos", "alcoholics", + "alcoholisim", "alcoholism", + "algorithems", "algorithm", + "algorithims", "algorithm", + "algorithmes", "algorithms", + "algorithmns", "algorithms", + "algorithmus", "algorithms", + "algorithyms", "algorithm", + "algorythims", "algorithms", + "alientating", "alienating", + "alleigances", "allegiance", + "alltogether", "altogether", + "alterantive", "alternative", + "alternatley", "alternately", + "alternitive", "alternative", + "altheticism", "athleticism", + "altnerately", "alternately", + "altruisitic", "altruistic", + "altruistric", "altruistic", + "amalgomated", "amalgamated", + "ambulancier", "ambulance", + "amerliorate", "ameliorate", + "ammendments", "amendments", + "ampehtamine", "amphetamine", + "ampethamine", "amphetamine", + "amphetamies", "amphetamines", + "amphetamins", "amphetamines", + "amphetemine", "amphetamine", + "amphetimine", "amphetamine", + "amphetmaine", "amphetamines", + "analyticals", "analytics", + "anarchistes", "anarchists", + "ancedotally", "anecdotally", + "androgenous", "androgynous", + "anecdatally", "anecdotally", + "anecdotelly", "anecdotally", + "anecodtally", "anecdotally", + "anectodally", "anecdotally", + "anectotally", "anecdotally", + "anedoctally", "anecdotally", + "angosticism", "agnosticism", + "anihilation", "annihilation", + "anitbiotics", "antibiotics", + "annihalated", "annihilated", + "annihilaton", "annihilation", + "annihilited", "annihilated", + "annihliated", "annihilated", + "annilihated", "annihilated", + "anniversery", "anniversary", + "annonymouse", "anonymous", + "announceing", "announcing", + "announcemet", "announcements", + "announcemnt", "announcement", + "announcents", "announces", + "annoymously", "anonymously", + "anonamously", "anonymously", + "anonimously", "anonymously", + "anonmyously", "anonymously", + "anonomously", "anonymously", + "anonymousny", "anonymously", + "anouncement", "announcement", + "antagonisic", "antagonistic", + "antagonistc", "antagonistic", + "antagonstic", "antagonist", + "anthropolgy", "anthropology", + "anthropoloy", "anthropology", + "antibiodics", "antibiotics", + "antibioitcs", "antibiotic", + "antibioitic", "antibiotic", + "antibitoics", "antibiotics", + "antiboitics", "antibiotics", + "anticapated", "anticipated", + "anticiapted", "anticipated", + "anticipatin", "anticipation", + "antiobitics", "antibiotic", + "antiquaited", "antiquated", + "antisipated", "anticipated", + "apacolyptic", "apocalyptic", + "apocaliptic", "apocalyptic", + "apocalpytic", "apocalyptic", + "apocalytpic", "apocalyptic", + "apolagizing", "apologizing", + "apolegetics", "apologetics", + "apologistas", "apologists", + "apologistes", "apologists", + "apostrophie", "apostrophe", + "apparantely", "apparently", + "appareances", "appearances", + "apparentely", "apparently", + "appartments", "apartments", + "appeareance", "appearance", + "appearences", "appearances", + "apperciated", "appreciated", + "apperciates", "appreciates", + "appereances", "appearances", + "applicabile", "applicable", + "applicaiton", "application", + "applicatins", "applicants", + "applicatons", "applications", + "appoitnment", "appointments", + "apporaching", "approaching", + "apporpriate", "appropriate", + "apporximate", "approximate", + "appraoching", "approaching", + "apprearance", "appearance", + "apprecaited", "appreciated", + "apprecaites", "appreciates", + "appreciaite", "appreciative", + "appreciatie", "appreciative", + "appreciatin", "appreciation", + "appreciaton", "appreciation", + "appreciatve", "appreciative", + "appreicated", "appreciated", + "appreicates", "appreciates", + "apprentince", "apprentice", + "appriciated", "appreciated", + "appriciates", "appreciates", + "apprieciate", "appreciate", + "appropirate", "appropriate", + "appropraite", "appropriate", + "appropriato", "appropriation", + "approxamate", "approximate", + "approxiamte", "approximate", + "approxmiate", "approximate", + "aprehensive", "apprehensive", + "apsirations", "aspirations", + "aqcuisition", "acquisition", + "aquaintance", "acquaintance", + "aquiantance", "acquaintance", + "arbitrairly", "arbitrarily", + "arbitralily", "arbitrarily", + "arbitrarely", "arbitrarily", + "arbitrarion", "arbitration", + "arbitratily", "arbitrarily", + "arbritarily", "arbitrarily", + "arbritation", "arbitration", + "arcaheology", "archaeology", + "archaoelogy", "archeology", + "archeaology", "archaeology", + "archimedian", "archimedean", + "architechts", "architect", + "architectes", "architects", + "architecure", "architecture", + "argiculture", "agriculture", + "argumentate", "argumentative", + "aribtrarily", "arbitrarily", + "aribtration", "arbitration", + "arithmentic", "arithmetic", + "arithmethic", "arithmetic", + "arithmetric", "arithmetic", + "armagedddon", "armageddon", + "armageddeon", "armageddon", + "arrangments", "arrangements", + "arrengement", "arrangement", + "articluated", "articulated", + "articualted", "articulated", + "artifically", "artificially", + "artificialy", "artificially", + "aspergerers", "aspergers", + "asphyxation", "asphyxiation", + "aspriations", "aspirations", + "assasinated", "assassinated", + "assasinates", "assassinates", + "assassiante", "assassinate", + "assassinare", "assassinate", + "assassinatd", "assassinated", + "assassinato", "assassination", + "assassinats", "assassins", + "assassinted", "assassinated", + "assembleing", "assembling", + "assemblying", "assembling", + "assertation", "assertion", + "assignemnts", "assignments", + "assimialted", "assimilate", + "assimilatie", "assimilate", + "assimilerat", "assimilate", + "assimiliate", "assimilate", + "assimliated", "assimilate", + "assingments", "assignments", + "assistantes", "assistants", + "assocaition", "associations", + "associaiton", "associations", + "associaties", "associates", + "associatons", "associations", + "assoication", "association", + "assosiating", "associating", + "assosiation", "association", + "assoziation", "association", + "assumptious", "assumptions", + "astonashing", "astonishing", + "astonoshing", "astonishing", + "astronaught", "astronaut", + "astronaunts", "astronaut", + "astronautas", "astronauts", + "astronautes", "astronauts", + "asychronous", "asynchronous", + "asyncronous", "asynchronous", + "atatchments", "attachments", + "atheistisch", "atheistic", + "athelticism", "athleticism", + "athletecism", "athleticism", + "athleticsim", "athleticism", + "athletisicm", "athleticism", + "athletisism", "athleticism", + "atmopsheric", "atmospheric", + "atmoshperic", "atmospheric", + "atmosoheric", "atmospheric", + "atomspheric", "atmospheric", + "atrocitites", "atrocities", + "attachemnts", "attachments", + "attackerasu", "attackers", + "attackerats", "attackers", + "attactments", "attachments", + "attributred", "attributed", + "attributted", "attribute", + "attrocities", "atrocities", + "atttributes", "attributes", + "audiobookas", "audiobooks", + "audioboooks", "audiobook", + "auotcorrect", "autocorrect", + "austrailans", "australians", + "austrailian", "australian", + "australiaan", "australians", + "australiams", "australians", + "australiens", "australians", + "australlian", "australian", + "authenticiy", "authenticity", + "authenticor", "authenticator", + "authenticty", "authenticity", + "authorative", "authoritative", + "authoritate", "authoritative", + "authoroties", "authorities", + "autoatttack", "autoattack", + "autocoreect", "autocorrect", + "autocorrekt", "autocorrect", + "autocorrent", "autocorrect", + "autocorrext", "autocorrect", + "autoctonous", "autochthonous", + "autokorrect", "autocorrect", + "automaticly", "automatically", + "automatonic", "automation", + "automoblies", "automobile", + "auxillaries", "auxiliaries", + "availabiliy", "availability", + "availabilty", "availability", + "availablity", "availability", + "awesoneness", "awesomeness", + "babysittter", "babysitter", + "backbacking", "backpacking", + "backgorunds", "backgrounds", + "backhacking", "backpacking", + "backjacking", "backpacking", + "backtacking", "backpacking", + "bangaldeshi", "bangladesh", + "bangladesch", "bangladesh", + "barceloneta", "barcelona", + "bargainning", "bargaining", + "battelfield", "battlefield", + "battelfront", "battlefront", + "battelships", "battleship", + "battlefeild", "battlefield", + "battlefiend", "battlefield", + "battlefiled", "battlefield", + "battlefornt", "battlefront", + "battlehsips", "battleship", + "beastiality", "bestiality", + "beaurocracy", "bureaucracy", + "beautyfully", "beautifully", + "behaviorial", "behavioral", + "belittleing", "belittling", + "belittlling", "belittling", + "belligerant", "belligerent", + "belligirent", "belligerent", + "bellweather", "bellwether", + "benefitical", "beneficial", + "bestiallity", "bestiality", + "beuatifully", "beautifully", + "beuraucracy", "bureaucracy", + "beuraucrats", "bureaucrats", + "billegerent", "belligerent", + "billionairs", "billionaires", + "billionarie", "billionaire", + "billioniare", "billionaire", + "biologicaly", "biologically", + "birthdayers", "birthdays", + "birthdaymas", "birthdays", + "bittersweat", "bittersweet", + "bitterwseet", "bittersweet", + "blackberrry", "blackberry", + "blacksmitch", "blacksmith", + "bloodboorne", "bloodborne", + "bluebarries", "blueberries", + "blueburries", "blueberries", + "blueprients", "blueprints", + "bodybuildig", "bodybuilding", + "bodybuildng", "bodybuilding", + "bodybuiling", "bodybuilding", + "bombardeada", "bombarded", + "bombardeado", "bombarded", + "bombarderad", "bombarded", + "bordelrands", "borderlands", + "bordlerands", "borderlands", + "bortherhood", "brotherhood", + "bourgeousie", "bourgeois", + "boycottting", "boycotting", + "bracelettes", "bracelets", + "brainwahsed", "brainwashed", + "brainwasing", "brainwashing", + "braziliians", "brazilians", + "breakthough", "breakthrough", + "breakthrouh", "breakthrough", + "breathtakng", "breathtaking", + "brianwashed", "brainwashed", + "brillaintly", "brilliantly", + "broadcasing", "broadcasting", + "broadcastes", "broadcasts", + "broderlands", "borderlands", + "brotherwood", "brotherhood", + "buddhistisk", "buddhists", + "buearucrats", "bureaucrats", + "bueraucracy", "bureaucracy", + "bueraucrats", "bureaucrats", + "buisnessman", "businessman", + "buisnessmen", "businessmen", + "bullerproof", "bulletproof", + "bulletbroof", "bulletproof", + "bulletproff", "bulletproof", + "bulletprrof", "bulletproof", + "bullitproof", "bulletproof", + "bureacuracy", "bureaucracy", + "bureaocracy", "bureaucracy", + "bureaocrats", "bureaucrats", + "bureaucraps", "bureaucrats", + "bureaucrash", "bureaucrats", + "bureaucrasy", "bureaucrats", + "bureaucrazy", "bureaucracy", + "bureuacracy", "bureaucracy", + "bureuacrats", "bureaucrats", + "burueacrats", "bureaucrats", + "businessnes", "businessmen", + "busniessmen", "businessmen", + "butterfiles", "butterflies", + "butterfleye", "butterfly", + "butterflyes", "butterflies", + "butterfries", "butterflies", + "butterlfies", "butterflies", + "caclulating", "calculating", + "caclulation", "calculation", + "caclulators", "calculators", + "cailbration", "calibration", + "calbiration", "calibration", + "calcualting", "calculating", + "calcualtion", "calculations", + "calcualtors", "calculators", + "calculaters", "calculators", + "calculatios", "calculators", + "calculatons", "calculations", + "calibartion", "calibration", + "calibraiton", "calibration", + "califorinan", "californian", + "californain", "californian", + "californica", "california", + "californien", "californian", + "californiia", "californian", + "californina", "californian", + "californnia", "californian", + "califronian", "californian", + "caluclating", "calculating", + "caluclation", "calculation", + "caluclators", "calculators", + "caluculated", "calculated", + "caluiflower", "cauliflower", + "camouflague", "camouflage", + "camouflauge", "camouflage", + "campagining", "campaigning", + "campainging", "campaigning", + "canadianese", "canadians", + "cannabilism", "cannibalism", + "cannabolism", "cannibalism", + "canniablism", "cannibalism", + "cannibalizm", "cannibalism", + "cannibaljim", "cannibalism", + "cannibalsim", "cannibalism", + "cannibilism", "cannibalism", + "cannobalism", "cannibalism", + "cannotation", "connotation", + "capabilites", "capabilities", + "capabilitiy", "capability", + "capabillity", "capability", + "capacitaron", "capacitor", + "capacitores", "capacitors", + "capatilists", "capitalists", + "capatilized", "capitalized", + "caperbility", "capability", + "capitalisim", "capitalism", + "capitilists", "capitalists", + "capitilized", "capitalized", + "capitolists", "capitalists", + "capitolized", "capitalized", + "captialists", "capitalists", + "captialized", "capitalized", + "cariactures", "caricature", + "carniverous", "carnivorous", + "castatrophe", "catastrophe", + "catagorized", "categorized", + "catapillars", "caterpillars", + "catapillers", "caterpillars", + "catasthrope", "catastrophe", + "catastraphe", "catastrophe", + "catastrohpe", "catastrophe", + "catastropic", "catastrophic", + "categroized", "categorized", + "catepillars", "caterpillars", + "catergorize", "categorize", + "caterogized", "categorized", + "caterpilars", "caterpillars", + "caterpiller", "caterpillar", + "catholacism", "catholicism", + "catholicsim", "catholicism", + "catholisicm", "catholicism", + "catholisism", "catholicism", + "catholizism", "catholicism", + "catholocism", "catholicism", + "catogerized", "categorized", + "catterpilar", "caterpillar", + "cauilflower", "cauliflower", + "caulfilower", "cauliflower", + "celebartion", "celebrations", + "celebirties", "celebrities", + "celebracion", "celebration", + "celebrasion", "celebrations", + "celebratons", "celebrations", + "centipeddle", "centipede", + "cerimonious", "ceremonious", + "certaintity", "certainty", + "certificaat", "certificate", + "certificare", "certificate", + "certificato", "certification", + "certificats", "certificates", + "challanging", "challenging", + "challeneged", "challenged", + "challeneger", "challenger", + "challeneges", "challenges", + "chameleooon", "chameleon", + "championshp", "championship", + "championsip", "championship", + "chancellour", "chancellor", + "charachters", "characters", + "charasmatic", "charismatic", + "charimastic", "charismatic", + "charsimatic", "charismatic", + "cheerleadra", "cheerleader", + "cheerleards", "cheerleaders", + "cheerleeder", "cheerleader", + "cheesebuger", "cheeseburger", + "cheeseburgs", "cheeseburgers", + "chihuahuita", "chihuahua", + "childrenmrs", "childrens", + "chloesterol", "cholesterol", + "cholesteral", "cholesterol", + "cholestoral", "cholesterol", + "cholestorol", "cholesterol", + "cholosterol", "cholesterol", + "chormosomes", "chromosomes", + "christianty", "christianity", + "chromasomes", "chromosomes", + "chromesomes", "chromosomes", + "chromisomes", "chromosomes", + "chromosones", "chromosomes", + "chromossome", "chromosomes", + "chromozomes", "chromosomes", + "chronicales", "chronicles", + "chronichles", "chronicles", + "cicrulating", "circulating", + "cincinnasti", "cincinnati", + "cincinnatti", "cincinnati", + "cincinnnati", "cincinnati", + "circimcised", "circumcised", + "circluating", "circulating", + "circualtion", "circulation", + "circulacion", "circulation", + "circumcison", "circumcision", + "circumsiced", "circumcised", + "circumsised", "circumcised", + "circumstace", "circumstance", + "circumvrent", "circumvent", + "circuncised", "circumcised", + "cirticising", "criticising", + "ciruclating", "circulating", + "ciruclation", "circulation", + "citicenship", "citizenship", + "citisenship", "citizenship", + "citizinship", "citizenship", + "civilizatin", "civilizations", + "civilizaton", "civilization", + "claculators", "calculators", + "classifides", "classified", + "cleanilness", "cleanliness", + "cleanleness", "cleanliness", + "cleanlyness", "cleanliness", + "cleansiness", "cleanliness", + "cliffbanger", "cliffhanger", + "cliffhander", "cliffhanger", + "cliffhangar", "cliffhanger", + "clifthanger", "cliffhanger", + "cockaroches", "cockroaches", + "cockraoches", "cockroaches", + "cockroackes", "cockroaches", + "cocktailers", "cocktails", + "coefficeint", "coefficient", + "coefficiant", "coefficient", + "coincedince", "coincidence", + "coincidance", "coincidence", + "coincidense", "coincidence", + "coincidente", "coincidence", + "coincidince", "coincidence", + "coinsidence", "coincidence", + "collabarate", "collaborate", + "collaberate", "collaborate", + "collaborant", "collaborate", + "collaborare", "collaborate", + "collaborato", "collaboration", + "collapseing", "collapsing", + "collaterial", "collateral", + "collectieve", "collective", + "collectivly", "collectively", + "collectivos", "collections", + "collobarate", "collaborate", + "colloborate", "collaborate", + "colonializm", "colonialism", + "colonialsim", "colonialism", + "colonianism", "colonialism", + "colonizaton", "colonization", + "comaprisons", "comparisons", + "combiantion", "combinations", + "combinacion", "combination", + "combinaison", "combinations", + "combinaiton", "combinations", + "combinatino", "combinations", + "combinatins", "combinations", + "combinatios", "combinations", + "combinining", "combining", + "combonation", "combination", + "comediantes", "comedians", + "comeptition", "competition", + "comeptitive", "competitive", + "comeptitors", "competitors", + "comfertable", "comfortable", + "comfertably", "comfortably", + "comfortabel", "comfortably", + "comfortabil", "comfortably", + "comfrotable", "comfortable", + "comftorable", "comfortable", + "comftorably", "comfortably", + "comisioning", "commissioning", + "comissioned", "commissioned", + "comissioner", "commissioner", + "commandered", "commanded", + "commandmant", "commandment", + "commantator", "commentator", + "commendment", "commandment", + "commentarea", "commenter", + "commentaren", "commenter", + "commentater", "commentator", + "commenteers", "commenter", + "commentries", "commenters", + "commercialy", "commercially", + "commericals", "commercials", + "commericial", "commercial", + "comminicate", "communicate", + "comminucate", "communicate", + "commisioned", "commissioned", + "commisioner", "commissioner", + "commisssion", "commissions", + "committment", "commitment", + "commodoties", "commodities", + "commomplace", "commonplace", + "commonspace", "commonplace", + "commonweath", "commonwealth", + "commonwelth", "commonwealth", + "commuincate", "communicated", + "communciate", "communicate", + "communicted", "communicated", + "communistas", "communists", + "communistes", "communists", + "compability", "compatibility", + "compalation", "compilation", + "compansated", "compensated", + "comparabile", "comparable", + "comparasion", "comparison", + "comparasons", "comparisons", + "comparement", "compartment", + "comparetive", "comparative", + "comparision", "comparison", + "comparisson", "comparisons", + "comparitave", "comparative", + "comparitive", "comparative", + "comparsions", "comparisons", + "compassione", "compassionate", + "compasssion", "compassion", + "compatabile", "compatible", + "compatative", "comparative", + "compatiable", "compatible", + "compatibile", "compatible", + "compatibily", "compatibility", + "compeditive", "competitive", + "compeditors", "competitors", + "compeitions", "competitions", + "compeittion", "competitions", + "compelation", "compilation", + "compensante", "compensate", + "compensatie", "compensate", + "compensatin", "compensation", + "compenstate", "compensate", + "comperative", "comparative", + "compesition", "composition", + "competation", "computation", + "competative", "competitive", + "competators", "competitors", + "competetion", "competition", + "competetors", "competitors", + "competiters", "competitors", + "competiting", "competition", + "competitior", "competitor", + "competitivo", "competition", + "competitoin", "competitions", + "competitons", "competitors", + "competution", "computation", + "compilacion", "compilation", + "compilcated", "complicate", + "compination", "compilation", + "compinsated", "compensated", + "compitation", "computation", + "compitetion", "competitions", + "complacient", "complacent", + "complciated", "complicate", + "compleation", "compilation", + "complecated", "complicated", + "completaste", "completes", + "completeing", "completing", + "completeion", "completion", + "completelly", "completely", + "completelyl", "completely", + "completelys", "completes", + "completenes", "completes", + "complexitiy", "complexity", + "compliacted", "complicate", + "compliation", "compilation", + "complicarte", "complicate", + "complicatie", "complicit", + "complicatii", "complicit", + "complicatin", "complicit", + "complictaed", "complicate", + "complimente", "complement", + "complimenty", "complimentary", + "complusions", "compulsion", + "compolation", "compilation", + "componenets", "components", + "componentes", "components", + "composicion", "composition", + "composiiton", "compositions", + "composision", "compositions", + "compositied", "composite", + "composities", "composite", + "compositoin", "compositions", + "compositons", "compositions", + "compositore", "composite", + "compostiion", "compositions", + "compotition", "composition", + "compramised", "compromised", + "compramises", "compromises", + "compremised", "compromised", + "compremises", "compromises", + "comprension", "compression", + "compresores", "compressor", + "compresssed", "compressed", + "compresssor", "compressor", + "comprimised", "compromised", + "comprimises", "compromises", + "compromessi", "compromises", + "compromisng", "compromising", + "compromisse", "compromises", + "compromisso", "compromises", + "compromized", "compromised", + "compulstion", "compulsion", + "compunation", "computation", + "computacion", "computation", + "computating", "computation", + "computition", "computation", + "conceivibly", "conceivably", + "concencrate", "concentrate", + "concentrace", "concentrate", + "concentrade", "concentrated", + "concentrait", "concentrate", + "concentrant", "concentrate", + "concentrare", "concentrate", + "concentrato", "concentration", + "concertmate", "concentrate", + "conceviable", "conceivable", + "conceviably", "conceivably", + "concidering", "considering", + "conciveable", "conceivable", + "conciveably", "conceivably", + "conclsuions", "concussions", + "concludendo", "concluded", + "conclussion", "conclusions", + "conclussive", "conclusive", + "conclutions", "conclusions", + "concsiously", "consciously", + "conculsions", "conclusions", + "concusssion", "concussions", + "condeferacy", "confederacy", + "condicional", "conditional", + "condidtions", "conditions", + "conditionar", "conditioner", + "conditionel", "conditional", + "condolances", "condolences", + "condolenses", "condolences", + "condolonces", "condolences", + "conductiong", "conducting", + "condulences", "condolences", + "conenctions", "connections", + "conescutive", "consecutive", + "confedaracy", "confederacy", + "confedarate", "confederate", + "confederecy", "confederacy", + "conferances", "conferences", + "conferedate", "confederate", + "confererate", "confederate", + "confescated", "confiscated", + "confesssion", "confessions", + "confidantly", "confidently", + "configurare", "configure", + "configurate", "configure", + "configurato", "configuration", + "confilcting", "conflicting", + "confisgated", "confiscated", + "conflciting", "conflicting", + "confortable", "comfortable", + "confrontato", "confrontation", + "confussions", "confessions", + "congrassman", "congressman", + "congratuate", "congratulate", + "conicidence", "coincidence", + "conjonction", "conjunction", + "conjucntion", "conjunction", + "conjuncting", "conjunction", + "conlcusions", "conclusions", + "connatation", "connotation", + "connecitcut", "connecticut", + "connecticon", "connection", + "connectiong", "connecting", + "connectivty", "connectivity", + "connetation", "connotation", + "connonation", "connotation", + "connotacion", "connotation", + "conontation", "connotation", + "conotations", "connotations", + "conquerring", "conquering", + "consdidered", "considered", + "consectuive", "consecutive", + "consecuence", "consequence", + "conseguence", "consequence", + "conselation", "consolation", + "consentrate", "concentrate", + "consequenes", "consequence", + "consequense", "consequences", + "consequente", "consequence", + "consequenty", "consequently", + "consequtive", "consecutive", + "conservanti", "conservation", + "conservatie", "conservatives", + "conservaton", "conservation", + "consficated", "confiscated", + "considerabe", "considerate", + "considerais", "considers", + "considerant", "considerate", + "considerato", "consideration", + "considerble", "considerable", + "considerbly", "considerably", + "considereis", "considers", + "consilation", "consolation", + "consilidate", "consolidate", + "consistance", "consistency", + "consistenly", "consistently", + "consistensy", "consistency", + "consistenty", "consistently", + "consitution", "constitution", + "conslutants", "consultant", + "consolacion", "consolation", + "consoldiate", "consolidate", + "consolidare", "consolidate", + "consolodate", "consolidate", + "consomation", "consolation", + "conspiraces", "conspiracies", + "conspiracys", "conspiracies", + "conspirancy", "conspiracy", + "constantins", "constants", + "constantivs", "constants", + "constarints", "constraint", + "constituant", "constituent", + "constituion", "constitution", + "constituite", "constitute", + "constitutie", "constitutes", + "constrating", "constraint", + "constriants", "constraints", + "construcing", "constructing", + "construcion", "construction", + "construcive", "constructive", + "constructie", "constructive", + "constructos", "constructs", + "constructur", "constructor", + "constructus", "constructs", + "constuction", "construction", + "consturcted", "constructed", + "consuelling", "counselling", + "consulation", "consolation", + "consultaion", "consultation", + "consultanti", "consultation", + "consumation", "consumption", + "consumbales", "consumables", + "consumersim", "consumerism", + "consumibles", "consumables", + "contagiosum", "contagious", + "containered", "contained", + "containmemt", "containment", + "containters", "containers", + "containting", "containing", + "contaminato", "contamination", + "contaminent", "containment", + "contaminted", "contaminated", + "contancting", "contracting", + "contanimate", "contaminated", + "contemplare", "contemplate", + "contempoary", "contemporary", + "contemporay", "contemporary", + "contencious", "contentious", + "contenental", "continental", + "contengency", "contingency", + "contenintal", "continental", + "contenplate", "contemplate", + "contensious", "contentious", + "contentants", "contestants", + "contentuous", "contentious", + "contestaste", "contestants", + "contestents", "contestants", + "contianment", "containment", + "contientous", "contentious", + "contimplate", "contemplate", + "continenets", "continents", + "continentes", "continents", + "continentul", "continental", + "contingancy", "contingency", + "contingient", "contingent", + "contingincy", "contingency", + "continously", "continuously", + "continuarla", "continual", + "continuarlo", "continual", + "continuasse", "continues", + "continueing", "continuing", + "continuemos", "continues", + "continueous", "continuous", + "continuious", "continuous", + "continuning", "continuing", + "continunity", "continuity", + "continuosly", "continuously", + "continuting", "continuing", + "continutity", "continuity", + "continuuing", "continuing", + "continuuity", "continuity", + "contirbuted", "contributed", + "contiunally", "continually", + "contraccion", "contraction", + "contraddice", "contradicted", + "contradices", "contradicts", + "contradtion", "contraction", + "contraversy", "controversy", + "contreversy", "controversy", + "contribuent", "contribute", + "contribuito", "contribution", + "contributer", "contributor", + "contributie", "contribute", + "contributin", "contribution", + "contributos", "contributors", + "contribuyes", "contributes", + "contricting", "contracting", + "contriction", "contraction", + "contridicts", "contradicts", + "contriversy", "controversy", + "controleurs", "controllers", + "controllore", "controllers", + "controvercy", "controversy", + "controversa", "controversial", + "contrubutes", "contributes", + "contructing", "contracting", + "contruction", "construction", + "contructors", "contractors", + "conveinence", "convenience", + "conveneince", "convenience", + "conveniance", "convenience", + "conveniente", "convenience", + "convenietly", "conveniently", + "conventinal", "conventional", + "converitble", "convertible", + "conversaion", "conversion", + "conversatin", "conversations", + "converseley", "conversely", + "converstion", "conversion", + "convertirea", "converter", + "convertirle", "convertible", + "convertirme", "converter", + "convertirte", "converter", + "convicitons", "convictions", + "convienence", "convenience", + "convienient", "convenient", + "convinceing", "convincing", + "convincente", "convenient", + "convincersi", "convinces", + "convirtible", "convertible", + "cooperacion", "cooperation", + "cooperativo", "cooperation", + "cooporation", "cooperation", + "cooporative", "cooperative", + "coordenated", "coordinated", + "coordenates", "coordinates", + "coordianted", "coordinated", + "coordiantes", "coordinates", + "coordiantor", "coordinator", + "coordinador", "coordinator", + "coordinants", "coordinates", + "coordinater", "coordinator", + "coordinaton", "coordination", + "coordonated", "coordinated", + "coordonates", "coordinates", + "coordonator", "coordinator", + "cooridnated", "coordinated", + "cooridnates", "coordinates", + "cooridnator", "coordinator", + "copenhaagen", "copenhagen", + "copenhaegen", "copenhagen", + "copenhaguen", "copenhagen", + "copenhangen", "copenhagen", + "copmetitors", "competitors", + "coproration", "corporation", + "copyrigthed", "copyrighted", + "corinthains", "corinthians", + "corintheans", "corinthians", + "corinthiens", "corinthians", + "corinthinas", "corinthians", + "cornithians", "corinthians", + "corparation", "corporation", + "corperation", "corporation", + "corporacion", "corporation", + "corporativo", "corporation", + "corralation", "correlation", + "correctings", "corrections", + "correctivos", "corrections", + "correktions", "corrections", + "correktness", "correctness", + "correlacion", "correlation", + "correlaties", "correlates", + "corrilation", "correlation", + "corrisponds", "corresponds", + "corrolation", "correlation", + "corrosponds", "corresponds", + "costitution", "constitution", + "councellors", "councillors", + "counrtyside", "countryside", + "counsilling", "counselling", + "countercoat", "counteract", + "counteredit", "counterfeit", + "counterfact", "counteract", + "counterfait", "counterfeit", + "counterfest", "counterfeit", + "counterfiet", "counterfeit", + "counterpaly", "counterplay", + "counterpary", "counterplay", + "counterpath", "counterpart", + "counterpats", "counterparts", + "counterpont", "counterpoint", + "counterract", "counterpart", + "counterside", "countryside", + "countertrap", "counterpart", + "countriside", "countryside", + "countrycide", "countryside", + "countrywise", "countryside", + "courthourse", "courthouse", + "coutnerfeit", "counterfeit", + "coutnerpart", "counterpart", + "coutnerplay", "counterplay", + "creacionism", "creationism", + "creationkit", "creationist", + "creationsim", "creationism", + "creationsit", "creationist", + "creationsts", "creationists", + "creativelly", "creatively", + "credencials", "credentials", + "credentails", "credentials", + "credentaisl", "credentials", + "credientals", "credentials", + "credintials", "credentials", + "cricitising", "criticising", + "criculating", "circulating", + "cringeworhy", "cringeworthy", + "cringeworty", "cringeworthy", + "cringewothy", "cringeworthy", + "criticicing", "criticising", + "criticisied", "criticise", + "criticisims", "criticisms", + "criticisize", "criticise", + "criticiszed", "criticise", + "critisicing", "criticizing", + "critisising", "criticising", + "critizicing", "criticizing", + "critizising", "criticizing", + "critizizing", "criticizing", + "crockodiles", "crocodiles", + "crocodiller", "crocodile", + "crocodilule", "crocodile", + "croporation", "corporation", + "crossfiters", "crossfire", + "cultivative", "cultivate", + "curricullum", "curriculum", + "customizabe", "customizable", + "customizble", "customizable", + "dangeroulsy", "dangerously", + "dardenelles", "dardanelles", + "deadlifters", "deadlifts", + "dealershits", "dealerships", + "deceptivley", "deceptive", + "declaracion", "declaration", + "decleration", "declaration", + "declinining", "declining", + "decloration", "declaration", + "decoartions", "decoration", + "decomposits", "decomposes", + "decoratieve", "decorative", + "decorativos", "decorations", + "decotations", "decorations", + "decsendants", "descendants", + "deductiable", "deductible", + "defenderlas", "defenders", + "defenderlos", "defenders", + "defendernos", "defenders", + "defenesless", "defenseless", + "defenisvely", "defensively", + "defensivley", "defensively", + "deficiencey", "deficiency", + "deficienies", "deficiencies", + "deficientcy", "deficiency", + "definantley", "definately", + "definatedly", "definately", + "definateley", "definately", + "definatelly", "definately", + "definatelty", "definately", + "definatetly", "definately", + "definations", "definitions", + "definatlely", "definately", + "definetally", "definately", + "definetlely", "definetly", + "definitaley", "definately", + "definitelly", "definitely", + "definitevly", "definitively", + "definitiely", "definitively", + "definitieve", "definitive", + "definitiley", "definitively", + "definitivly", "definitively", + "definitivno", "definition", + "definitivos", "definitions", + "definitlely", "definitly", + "definitlety", "definitly", + "deflecticon", "deflection", + "degenererat", "degenerate", + "degradacion", "degradation", + "degradating", "degradation", + "degragation", "degradation", + "degridation", "degradation", + "dehyrdation", "dehydration", + "deinitalize", "deinitialize", + "delaerships", "dealerships", + "delapidated", "dilapidated", + "delcaration", "declaration", + "delearships", "dealerships", + "delevopment", "development", + "deliberante", "deliberate", + "deliberatly", "deliberately", + "deliberetly", "deliberately", + "delightlful", "delightful", + "deliverying", "delivering", + "delusionnal", "delusional", + "deminsional", "dimensional", + "democarcies", "democracies", + "democracize", "democracies", + "democractic", "democratic", + "democraphic", "demographic", + "democrasies", "democracies", + "democrazies", "democracies", + "democrocies", "democracies", + "demograhpic", "demographic", + "demographis", "demographics", + "demograpics", "demographics", + "demogrpahic", "demographic", + "demoninator", "denominator", + "demonstarte", "demonstrate", + "demonstates", "demonstrates", + "demonstraby", "demonstrably", + "demonstrant", "demonstrate", + "demonstrats", "demonstrates", + "demosntrate", "demonstrate", + "denegrating", "denigrating", + "denomenator", "denominator", + "denominador", "denominator", + "denominaron", "denominator", + "denominater", "denominator", + "denominaton", "denomination", + "denomitator", "denominator", + "denomonator", "denominator", + "denonimator", "denominator", + "deocrations", "decorations", + "deomcracies", "democracies", + "deparmental", "departmental", + "depedencies", "dependencies", + "dependancey", "dependency", + "dependencey", "dependency", + "dependencie", "dependence", + "dependenies", "dependencies", + "deplorabile", "deplorable", + "depressieve", "depressive", + "depresssion", "depression", + "deprevation", "deprivation", + "deprication", "deprivation", + "deprivating", "deprivation", + "deprivition", "deprivation", + "deprovation", "deprivation", + "depserately", "desperately", + "depseration", "desperation", + "deregulatin", "deregulation", + "derivativos", "derivatives", + "derivitaves", "derivatives", + "derivitives", "derivatives", + "derpivation", "deprivation", + "derviatives", "derivatives", + "descandants", "descendants", + "descendands", "descendants", + "descendends", "descended", + "descendenta", "descendants", + "descentants", "descendants", + "descirption", "descriptions", + "descprition", "descriptions", + "describiste", "describes", + "describtion", "description", + "descripcion", "description", + "descripiton", "descriptions", + "descripters", "descriptors", + "descriptoin", "descriptions", + "descriptons", "descriptions", + "descritpion", "descriptions", + "descrpition", "descriptions", + "desensitied", "desensitized", + "desensitzed", "desensitized", + "desentisize", "desensitized", + "desgination", "designation", + "designacion", "designation", + "designstion", "designation", + "desinations", "destinations", + "desingation", "designation", + "desitnation", "destination", + "desoriented", "disoriented", + "desparately", "desperately", + "desparation", "desperation", + "desperating", "desperation", + "desperatley", "desperately", + "despirately", "desperately", + "despiration", "desperation", + "destablized", "destabilized", + "destiantion", "destinations", + "destinaiton", "destinations", + "destinatons", "destinations", + "destinction", "destination", + "destraction", "destruction", + "destruccion", "destruction", + "destruciton", "destruction", + "destructivo", "destruction", + "destruktion", "destruction", + "destruktive", "destructive", + "deteoriated", "deteriorated", + "determanism", "determinism", + "determening", "determining", + "determenism", "determinism", + "determinare", "determine", + "determinato", "determination", + "determinded", "determine", + "determinsim", "determinism", + "detramental", "detrimental", + "detremental", "detrimental", + "detrimentul", "detrimental", + "detuschland", "deutschland", + "deustchland", "deutschland", + "deutchsland", "deutschland", + "deutcshland", "deutschland", + "deutschalnd", "deutschland", + "deutshcland", "deutschland", + "develepmont", "developments", + "develompent", "developments", + "developemnt", "developments", + "developmant", "developmental", + "developmetn", "developments", + "developmnet", "developments", + "developpers", "developers", + "develpoment", "developments", + "deveolpment", "developments", + "deveploment", "developments", + "devestating", "devastating", + "devistating", "devastating", + "deyhdration", "dehydration", + "diagnositcs", "diagnostic", + "diagnositic", "diagnostic", + "diagonstics", "diagnostic", + "dictatorhip", "dictatorship", + "dictionaire", "dictionaries", + "dictionairy", "dictionary", + "dictionarys", "dictionaries", + "dictionnary", "dictionary", + "differances", "differences", + "differantly", "differently", + "differental", "differential", + "differentes", "differences", + "differneces", "differences", + "differnetly", "differently", + "difficulity", "difficulty", + "difficultes", "difficulties", + "dificulties", "difficulties", + "dimensiones", "dimensions", + "dimentional", "dimensional", + "dimesnional", "dimensional", + "diminisheds", "diminishes", + "diminsihing", "diminishing", + "diminuitive", "diminutive", + "diminushing", "diminishing", + "dinosaurios", "dinosaurs", + "direccional", "directional", + "direcitonal", "directional", + "directorguy", "directory", + "directorios", "directors", + "direktional", "directional", + "disadvantge", "disadvantage", + "disagreemet", "disagreements", + "disagreemtn", "disagreements", + "disapperead", "disappeared", + "disapporval", "disapproval", + "disapprovel", "disapproval", + "disasterous", "disastrous", + "disastreous", "disastrous", + "disastrious", "disastrous", + "disastruous", "disastrous", + "disatisfied", "dissatisfied", + "disciplened", "disciplined", + "disciplinas", "disciplines", + "disciplince", "disciplines", + "disclipined", "disciplined", + "disclipines", "disciplines", + "discogrophy", "discography", + "discogrpahy", "discography", + "disconencts", "disconnects", + "disconneted", "disconnected", + "disconnnect", "disconnect", + "discontined", "discontinued", + "discontiued", "discontinued", + "discrapency", "discrepancy", + "discretited", "discredited", + "discrimante", "discriminate", + "discrimiate", "discriminate", + "discussiong", "discussing", + "discusssion", "discussions", + "disgraseful", "disgraceful", + "disgrateful", "disgraceful", + "disgrunteld", "disgruntled", + "disgustigly", "disgustingly", + "disgustingy", "disgustingly", + "disgustinly", "disgustingly", + "disicplined", "disciplined", + "disicplines", "disciplines", + "disingenuos", "disingenuous", + "dismanlting", "dismantling", + "dismantaled", "dismantled", + "dismanteled", "dismantled", + "disobediant", "disobedient", + "disocgraphy", "discography", + "disparingly", "disparagingly", + "dispensaire", "dispensaries", + "dispensarie", "dispenser", + "dispensiary", "dispensary", + "displacemnt", "displacement", + "disposicion", "disposition", + "disputandem", "disputandum", + "disqualifed", "disqualified", + "disregaring", "disregarding", + "dissapeared", "disappeared", + "dissapoined", "dissapointed", + "dissapointd", "dissapointed", + "dissapoited", "dissapointed", + "dissappears", "disappears", + "dissatisfed", "dissatisfied", + "disscusions", "discussions", + "dissertaion", "dissertation", + "dissipatore", "dissipate", + "distatesful", "distasteful", + "distatseful", "distasteful", + "disterbance", "disturbance", + "disticntion", "distinctions", + "distinciton", "distinction", + "distincitve", "distinctive", + "distinctily", "distinctly", + "distingiush", "distinguish", + "distinguise", "distinguished", + "distinktion", "distinction", + "distinquish", "distinguish", + "distirbance", "disturbance", + "distirbuted", "distribute", + "distirbutor", "distributor", + "distraccion", "distraction", + "distractons", "distracts", + "distraktion", "distraction", + "distribitor", "distributor", + "distribuent", "distribute", + "distribuite", "distribute", + "distribuito", "distribution", + "distributie", "distributed", + "distributin", "distribution", + "distributio", "distributor", + "distrobuted", "distributed", + "distrubance", "disturbance", + "distrubited", "distributed", + "distrubitor", "distributor", + "distrubuted", "distributed", + "distrubutor", "distributor", + "distructive", "destructive", + "distuingish", "distinguish", + "distunguish", "distinguish", + "disturbante", "disturbance", + "disturbence", "disturbance", + "disucssions", "discussions", + "divisionals", "divisions", + "doccumented", "documented", + "documantary", "documentary", + "documenatry", "documentary", + "documentare", "documentaries", + "documentato", "documentation", + "documentery", "documentary", + "documentory", "documentary", + "domesticted", "domesticated", + "dominateurs", "dominates", + "dominationg", "dominating", + "donwloading", "downloading", + "doublellift", "doublelift", + "downlaoding", "downloading", + "downloadbel", "downloadable", + "downloadbig", "downloading", + "downloadble", "downloadable", + "downvoteers", "downvoters", + "downvoteing", "downvoting", + "downvoteres", "downvoters", + "downvoteros", "downvoters", + "downvoteurs", "downvoters", + "downvotters", "downvoters", + "downvotting", "downvoting", + "dramaticaly", "dramatically", + "dramaticlly", "dramatically", + "drasitcally", "drastically", + "dsyfunction", "dysfunction", + "duetschland", "deutschland", + "durabillity", "durability", + "dyanmically", "dynamically", + "dymanically", "dynamically", + "dysfonction", "dysfunction", + "dysfucntion", "dysfunction", + "dysfunciton", "dysfunction", + "dysfunktion", "dysfunction", + "earhtquakes", "earthquakes", + "earthqaukes", "earthquakes", + "earthquacks", "earthquakes", + "economicaly", "economically", + "economiclly", "economically", + "economisiti", "economist", + "economistes", "economists", + "educacional", "educational", + "effeciently", "efficiently", + "effecitvely", "effectively", + "effectivley", "effectively", + "efficeintly", "efficiently", + "efficiantly", "efficiently", + "efficientcy", "efficiently", + "effortlesly", "effortlessly", + "effortlessy", "effortlessly", + "egaletarian", "egalitarian", + "egalitatian", "egalitarian", + "egaliterian", "egalitarian", + "egostitical", "egotistical", + "egotastical", "egotistical", + "egotestical", "egotistical", + "egotisitcal", "egotistical", + "egotisticle", "egotistical", + "egotystical", "egotistical", + "ehtnicities", "ethnicities", + "ejacluation", "ejaculation", + "ejacualtion", "ejaculation", + "electoratul", "electoral", + "electornics", "electronics", + "electricain", "electrician", + "electricial", "electrical", + "electricien", "electrician", + "electricion", "electrician", + "electricman", "electrician", + "electrisity", "electricity", + "electritian", "electrician", + "electrocity", "electricity", + "electrolyes", "electrolytes", + "electrolyts", "electrolytes", + "electroncis", "electrons", + "electroylte", "electrolytes", + "elementrary", "elementary", + "eleminating", "eliminating", + "elimanation", "elimination", + "eliminacion", "elimination", + "elimintates", "eliminates", + "ellipitcals", "elliptical", + "eloquentely", "eloquently", + "emabrassing", "embarassing", + "embaraasing", "embarassing", + "embarasaing", "embarassing", + "embarassign", "embarassing", + "embarassimg", "embarassing", + "embarassing", "embarrassing", + "embarissing", "embarassing", + "embarrasing", "embarrassing", + "embarressed", "embarrassed", + "embarrssing", "embarassing", + "emergancies", "emergencies", + "emergencias", "emergencies", + "emergenices", "emergencies", + "emmediately", "immediately", + "emmisarries", "emissaries", + "emotionella", "emotionally", + "empahsizing", "emphasizing", + "empathethic", "empathetic", + "emphacizing", "emphasizing", + "emphatising", "emphasizing", + "emphatizing", "emphasizing", + "emphazising", "emphasizing", + "emphesizing", "emphasizing", + "empiracally", "empirically", + "empirialism", "imperialism", + "empirialist", "imperialist", + "enchamtment", "enchantment", + "enchancment", "enchantment", + "enchanement", "enchantment", + "enchanthing", "enchanting", + "enchantmant", "enchantment", + "enchantmens", "enchantments", + "enchantmets", "enchantments", + "encomapsses", "encompasses", + "encompasess", "encompasses", + "encompesses", "encompasses", + "encounteres", "encounters", + "encoutnered", "encountered", + "encryptiion", "encryption", + "encyclopdia", "encyclopedia", + "encylopedia", "encyclopedia", + "endagnering", "endangering", + "endandering", "endangering", + "endorcement", "endorsement", + "endoresment", "endorsement", + "engagaments", "engagements", + "engeneering", "engineering", + "enginerring", "engineering", + "enginnering", "engineering", + "enlargments", "enlargements", + "enligthened", "enlightened", + "enourmously", "enormously", + "enterpirses", "enterprises", + "enterprices", "enterprises", + "enterprishe", "enterprises", + "entertainig", "entertaining", + "entertwined", "entertained", + "enthicities", "ethnicities", + "enthisiasts", "enthusiasts", + "enthuasists", "enthusiasts", + "enthuisasts", "enthusiasts", + "enthusaists", "enthusiasts", + "enthusiants", "enthusiast", + "enthusiasic", "enthusiastic", + "enthusiasim", "enthusiasm", + "enthusiasum", "enthusiasm", + "enthusiatic", "enthusiastic", + "enthusiests", "enthusiasts", + "enthusigasm", "enthusiasm", + "enthusisast", "enthusiasts", + "entrepeneur", "entrepreneur", + "entreperure", "entrepreneur", + "entrepeuner", "entrepreneur", + "entreprener", "entrepreneurs", + "entreprenur", "entrepreneur", + "entretained", "entertained", + "envinroment", "environments", + "enviorments", "environments", + "enviornment", "environment", + "envirnoment", "environment", + "enviroments", "environments", + "enviromnent", "environments", + "environemnt", "environment", + "environmnet", "environments", + "envrionment", "environment", + "equilavents", "equivalents", + "equilbirium", "equilibrium", + "equilevants", "equivalents", + "equilibirum", "equilibrium", + "equilibriam", "equilibrium", + "equilibruim", "equilibrium", + "equivalance", "equivalence", + "equivalants", "equivalents", + "equivalenet", "equivalents", + "equivallent", "equivalent", + "equivelance", "equivalence", + "equivelants", "equivalents", + "equivelents", "equivalents", + "equivilants", "equivalents", + "equivilence", "equivalence", + "equivilents", "equivalents", + "equivlalent", "equivalent", + "equivlanets", "equivalents", + "equivolence", "equivalence", + "equivolents", "equivalents", + "essencially", "essentially", + "essentailly", "essentially", + "essentialls", "essentials", + "essentually", "essentially", + "establising", "establishing", + "ethicallity", "ethically", + "ethincities", "ethnicities", + "ethniticies", "ethnicities", + "europeaners", "europeans", + "europeaness", "europeans", + "evaluatiing", "evaluating", + "evaluationg", "evaluating", + "evangalical", "evangelical", + "evangelikal", "evangelical", + "evengalical", "evangelical", + "evenhtually", "eventually", + "everyonehas", "everyones", + "everyonelse", "everyones", + "evidentally", "evidently", + "exacarbated", "exacerbated", + "exacberated", "exacerbated", + "exagerating", "exaggerating", + "exagerrated", "exaggerated", + "exagerrates", "exaggerates", + "exaggarated", "exaggerated", + "exaggareted", "exaggerate", + "exaggeratin", "exaggeration", + "exaggerrate", "exaggerate", + "exaggurated", "exaggerated", + "exarcebated", "exacerbated", + "excalmation", "exclamation", + "excepcional", "exceptional", + "exceptionel", "exceptional", + "excessivley", "excessively", + "exceutioner", "executioner", + "exchanching", "exchanging", + "exclamacion", "exclamation", + "exclamating", "exclamation", + "exclamativo", "exclamation", + "exclemation", "exclamation", + "exclimation", "exclamation", + "exclucivity", "exclusivity", + "exclusivety", "exclusivity", + "exclusivily", "exclusivity", + "exclusivley", "exclusively", + "excpetional", "exceptional", + "exculsively", "exclusively", + "exculsivity", "exclusivity", + "execitioner", "executioner", + "execptional", "exceptional", + "exectuables", "executable", + "exectuioner", "executioner", + "executionar", "executioner", + "executionor", "executioner", + "exerciseing", "exercising", + "exeuctioner", "executioner", + "existantial", "existential", + "existencial", "existential", + "existensial", "existential", + "existentiel", "existential", + "exlcamation", "exclamation", + "exlcusively", "exclusively", + "exlcusivity", "exclusivity", + "exoskelaton", "exoskeleton", + "expansiones", "expansions", + "expectantcy", "expectancy", + "expectating", "expectation", + "expectional", "exceptional", + "expendature", "expenditure", + "expendeture", "expenditure", + "expentiture", "expenditure", + "expereinced", "experienced", + "expereinces", "experiences", + "experements", "experiments", + "experianced", "experienced", + "experiances", "experiences", + "experiemnts", "experiments", + "experiening", "experiencing", + "experimetal", "experimental", + "experimeted", "experimented", + "experssions", "expressions", + "expiditions", "expeditions", + "expierenced", "experienced", + "expierences", "experiences", + "expirements", "experiments", + "explainging", "explaining", + "explaintory", "explanatory", + "explanaiton", "explanations", + "explanetary", "explanatory", + "explanetory", "explanatory", + "explanitary", "explanatory", + "explanotory", "explanatory", + "explenation", "explanation", + "explenatory", "explanatory", + "explicitely", "explicitly", + "explicitily", "explicitly", + "explination", "explanation", + "explinatory", "explanatory", + "exploitaion", "exploitation", + "exploitatie", "exploitative", + "explonation", "exploration", + "exploracion", "exploration", + "explorating", "exploration", + "explorerers", "explorers", + "explosiones", "explosions", + "explotacion", "exploration", + "expodential", "exponential", + "exponantial", "exponential", + "exponencial", "exponential", + "exponentiel", "exponential", + "expresscoin", "expression", + "expressivos", "expressions", + "expresssive", "expressive", + "expressview", "expressive", + "exprimental", "experimental", + "expropiated", "expropriated", + "extensiones", "extensions", + "extensivley", "extensively", + "extragavant", "extravagant", + "extrapalate", "extrapolate", + "extraploate", "extrapolate", + "extrapolant", "extrapolate", + "extrapolare", "extrapolate", + "extrapolite", "extrapolate", + "extrapulate", "extrapolate", + "extravagent", "extravagant", + "extravagina", "extravagant", + "extravegant", "extravagant", + "extravigant", "extravagant", + "extravogant", "extravagant", + "extremistas", "extremists", + "extremistes", "extremists", + "extropolate", "extrapolate", + "fabircation", "fabrication", + "fabricacion", "fabrication", + "fabrikation", "fabrication", + "facilitarte", "facilitate", + "facilitiate", "facilitate", + "facillitate", "facilitate", + "facisnation", "fascination", + "facsination", "fascination", + "factuallity", "factually", + "familairity", "familiarity", + "familairize", "familiarize", + "familiaries", "familiarize", + "familierize", "familiarize", + "fanatsizing", "fantasizing", + "fanficitons", "fanfiction", + "fantacising", "fantasizing", + "fantacizing", "fantasizing", + "fantasazing", "fantasizing", + "fantasiaing", "fantasizing", + "fantasyzing", "fantasizing", + "fantazising", "fantasizing", + "fascinacion", "fascination", + "fascinatinf", "fascination", + "fascisation", "fascination", + "fascization", "fascination", + "fashionalbe", "fashionable", + "fashoinable", "fashionable", + "fatalitites", "fatalities", + "favoritisme", "favorites", + "favoutrable", "favourable", + "felxibility", "flexibility", + "feministers", "feminists", + "feministisk", "feminists", + "fermentaion", "fermentation", + "fermenterad", "fermented", + "fertilizier", "fertilizer", + "fertizilers", "fertilizer", + "festivalens", "festivals", + "fignernails", "fingernails", + "fignerprint", "fingerprint", + "figurativly", "figuratively", + "finanically", "financially", + "finantially", "financially", + "fingerpints", "fingertips", + "fingerpoint", "fingerprint", + "fingertrips", "fingertips", + "firefighers", "firefighters", + "firefigther", "firefighters", + "firendzoned", "friendzoned", + "firghtening", "frightening", + "flatterende", "flattered", + "flawlessely", "flawlessly", + "flawlessley", "flawlessly", + "flexibiltiy", "flexibility", + "flourescent", "fluorescent", + "fluctuaties", "fluctuate", + "fluctuative", "fluctuate", + "flutteryshy", "fluttershy", + "forcefullly", "forcefully", + "foreseaable", "foreseeable", + "foresseable", "foreseeable", + "forgettting", "forgetting", + "forgiviness", "forgiveness", + "formallized", "formalized", + "formattting", "formatting", + "formidabble", "formidable", + "formidabelt", "formidable", + "formidabile", "formidable", + "fortitudine", "fortitude", + "fortuantely", "fortunately", + "fortunantly", "fortunately", + "fortunatley", "fortunately", + "fortunetely", "fortunately", + "franchieses", "franchises", + "frankensite", "frankenstein", + "frankensten", "frankenstein", + "fransiscans", "franciscans", + "freindships", "friendships", + "freindzoned", "friendzoned", + "frequenices", "frequencies", + "frequensies", "frequencies", + "frequenties", "frequencies", + "frequentily", "frequently", + "frequenzies", "frequencies", + "friendboned", "friendzoned", + "friendlines", "friendlies", + "friendzonie", "friendzoned", + "frientships", "friendships", + "frientzoned", "friendzoned", + "frightenend", "frightened", + "frightining", "frightening", + "frigthening", "frightening", + "frinedzoned", "friendzoned", + "frontlinies", "frontline", + "frontlinjen", "frontline", + "frustartion", "frustrations", + "frustracion", "frustration", + "frustraited", "frustrated", + "frustrantes", "frustrates", + "frustrasion", "frustrations", + "frustrasted", "frustrates", + "frustraties", "frustrates", + "fucntioning", "functioning", + "fulfillling", "fulfilling", + "fulfullment", "fulfillment", + "fullfilment", "fulfillment", + "fullscreeen", "fullscreen", + "funcitoning", "functioning", + "functionaly", "functionally", + "functionnal", "functional", + "fundamentas", "fundamentals", + "fundamently", "fundamental", + "fundametals", "fundamentals", + "fundamnetal", "fundamentals", + "fundemantal", "fundamental", + "fundemental", "fundamental", + "fundimental", "fundamental", + "furhtermore", "furthermore", + "furstration", "frustration", + "furthremore", "furthermore", + "furthurmore", "furthermore", + "futurisitic", "futuristic", + "gangsterest", "gangsters", + "gangsterous", "gangsters", + "gauntlettes", "gauntlets", + "geneologies", "genealogies", + "generalizng", "generalizing", + "generatting", "generating", + "genitaliban", "genitalia", + "gentlemanne", "gentlemen", + "girlfirends", "girlfriends", + "girlfreinds", "girlfriends", + "girlfrients", "girlfriends", + "glorifierad", "glorified", + "glorifindel", "glorified", + "goosebumbps", "goosebumps", + "govenrments", "governments", + "govermental", "governmental", + "governemnts", "governments", + "governmanet", "governmental", + "governmeant", "governmental", + "govormental", "governmental", + "gracefullly", "gracefully", + "grahpically", "graphically", + "grammarical", "grammatical", + "grammaticly", "grammatical", + "grammitical", "grammatical", + "graphcially", "graphically", + "grassrooots", "grassroots", + "gratuitious", "gratuitous", + "gratuituous", "gratuitous", + "gravitatiei", "gravitate", + "grilfriends", "girlfriends", + "grpahically", "graphically", + "guaranteeds", "guarantees", + "guerrillera", "guerrilla", + "gunslingner", "gunslinger", + "hamburgaren", "hamburger", + "hamburgeres", "hamburgers", + "hamburglers", "hamburgers", + "hamburguers", "hamburgers", + "handlebards", "handlebars", + "handrwiting", "handwriting", + "handycapped", "handicapped", + "hanidcapped", "handicapped", + "harassement", "harassment", + "harrasments", "harassments", + "harrassment", "harassment", + "harvestgain", "harvesting", + "headquartes", "headquarters", + "headquaters", "headquarters", + "hearhtstone", "hearthstone", + "heartborken", "heartbroken", + "heartbraker", "heartbreak", + "heartbrakes", "heartbreak", + "heartsthone", "hearthstone", + "heaviweight", "heavyweight", + "heavyweigth", "heavyweight", + "heavywieght", "heavyweight", + "helicoptors", "helicopters", + "helicotpers", "helicopters", + "helicpoters", "helicopters", + "helictopers", "helicopters", + "helikopters", "helicopters", + "hemipsheres", "hemisphere", + "hemishperes", "hemisphere", + "herathstone", "hearthstone", + "heterosexal", "heterosexual", + "hexidecimal", "hexadecimal", + "hierachical", "hierarchical", + "hierarcical", "hierarchical", + "highlighing", "highlighting", + "highschoool", "highschool", + "hipopotamus", "hippopotamus", + "historicaly", "historically", + "historicans", "historians", + "historietas", "histories", + "historinhas", "historians", + "homecomeing", "homecoming", + "homecomming", "homecoming", + "homelesness", "homelessness", + "homelessess", "homelessness", + "homeowneris", "homeowners", + "homoegenous", "homogeneous", + "homogeneize", "homogenize", + "homogenious", "homogeneous", + "homogenuous", "homogeneous", + "homophoboes", "homophobe", + "homosexuais", "homosexuals", + "homosexuels", "homosexuals", + "hopelessely", "hopelessly", + "hopelessley", "hopelessly", + "hopsitality", "hospitality", + "horizonatal", "horizontal", + "horizontaal", "horizontal", + "horizontaly", "horizontally", + "horrendeous", "horrendous", + "horrendious", "horrendous", + "horrenduous", "horrendous", + "hospitalzed", "hospitalized", + "hospotality", "hospitality", + "househoulds", "households", + "humanitarna", "humanitarian", + "humanitites", "humanities", + "humilitaing", "humiliating", + "humilitaion", "humiliation", + "humillating", "humiliating", + "humillation", "humiliation", + "hurricaines", "hurricanes", + "hurricances", "hurricanes", + "hurricanger", "hurricane", + "hyperbollic", "hyperbolic", + "hyperbrophy", "hypertrophy", + "hyperthropy", "hypertrophy", + "hypertorphy", "hypertrophy", + "hypertrohpy", "hypertrophy", + "hypocritcal", "hypocritical", + "hypocritial", "hypocritical", + "hypocrities", "hypocrite", + "hypothesees", "hypotheses", + "hypothesies", "hypothesis", + "hystericaly", "hysterically", + "hystericlly", "hysterically", + "iconclastic", "iconoclastic", + "idealisitic", "idealistic", + "identifible", "identifiable", + "identitites", "identities", + "identitties", "identities", + "ideologiers", "ideologies", + "ideologisen", "ideologies", + "ideologiset", "ideologies", + "ideologiske", "ideologies", + "illegallity", "illegally", + "illegitamte", "illegitimate", + "illegitmate", "illegitimate", + "illsutrator", "illustrator", + "illuminanti", "illuminati", + "illuminarti", "illuminati", + "illuminatti", "illuminati", + "illuminauti", "illuminati", + "illuminiati", "illuminati", + "illuminista", "illuminati", + "illumintati", "illuminati", + "illustarted", "illustrated", + "illustartor", "illustrator", + "illustraded", "illustrated", + "illustraion", "illustration", + "illustrater", "illustrator", + "illustratie", "illustrate", + "illustratin", "illustrations", + "illustraton", "illustration", + "imaganative", "imaginative", + "imaganitive", "imaginative", + "imaginacion", "imagination", + "imaginatiei", "imaginative", + "imaginating", "imagination", + "imaginativo", "imagination", + "imaginitave", "imaginative", + "imbalanaced", "imbalanced", + "imbalanaces", "imbalances", + "imbalancers", "imbalances", + "immatureity", "immaturity", + "immedeately", "immediately", + "immediantly", "immediately", + "immediatley", "immediately", + "immedietely", "immediately", + "immideately", "immediately", + "immidiately", "immediately", + "immigraiton", "immigration", + "immigrantes", "immigrants", + "immoratlity", "immortality", + "immortailty", "immortality", + "immortalisy", "immortals", + "impeccabile", "impeccable", + "imperailist", "imperialist", + "imperealist", "imperialist", + "imperialims", "imperialism", + "imperialsim", "imperialism", + "imperiarist", "imperialist", + "imperically", "empirically", + "imperislist", "imperialist", + "implausable", "implausible", + "implausbile", "implausible", + "implementas", "implements", + "implementes", "implements", + "implementig", "implementing", + "implementos", "implements", + "implicacion", "implication", + "implicatons", "implications", + "implicitely", "implicitly", + "implicitily", "implicitly", + "implikation", "implication", + "implimented", "implemented", + "importantce", "importance", + "importently", "importantly", + "imporvement", "improvement", + "impossibile", "impossible", + "impossibily", "impossibly", + "impossibley", "impossibly", + "impossiblly", "impossibly", + "impoverised", "impoverished", + "impracticle", "impractical", + "impressario", "impresario", + "impresssion", "impressions", + "imprisonent", "imprisonment", + "imprisonned", "imprisoned", + "improbabile", "improbable", + "improtantly", "importantly", + "improvemnts", "improvements", + "improvished", "improvised", + "improvision", "improvisation", + "improvments", "improvements", + "impulsivley", "impulsive", + "imrpovement", "improvement", + "inaccessble", "inaccessible", + "inaccuraces", "inaccuracies", + "inaccurrate", "inaccurate", + "inadvertant", "inadvertent", + "inaguration", "inauguration", + "inahbitants", "inhabitants", + "incarantion", "incarnation", + "incarcerato", "incarceration", + "incarnacion", "incarnation", + "incentivare", "incentive", + "incentivate", "incentive", + "incentivice", "incentive", + "incentivies", "incentives", + "incidencies", "incidence", + "incidentaly", "incidentally", + "incidential", "incidental", + "inclanation", "inclination", + "inclenation", "inclination", + "inclinacion", "inclination", + "inclinaison", "inclination", + "incognition", "incognito", + "incoherrent", "incoherent", + "incompatble", "incompatible", + "incompatent", "incompetent", + "incompetant", "incompetent", + "incompitent", "incompetent", + "incompotent", "incompetent", + "incomptable", "incompatible", + "inconsisent", "inconsistent", + "inconveniet", "inconvenient", + "incoroprate", "incorporate", + "incorparate", "incorporate", + "incorperate", "incorporate", + "incorporare", "incorporate", + "incorported", "incorporated", + "incorprates", "incorporates", + "incorproate", "incorporated", + "incramental", "incremental", + "increadible", "incredible", + "incrediable", "incredible", + "incrediably", "incredibly", + "incredibile", "incredible", + "incredibily", "incredibly", + "incredibley", "incredibly", + "incrememnts", "increments", + "incremenets", "increments", + "incrementas", "increments", + "incremently", "incremental", + "incrementos", "increments", + "incrimental", "incremental", + "inctroduced", "introduced", + "indefinetly", "indefinitely", + "indefininte", "indefinite", + "indefinitly", "indefinitely", + "indepdenent", "independents", + "indepedence", "independence", + "indepednent", "independents", + "independant", "independent", + "independece", "independence", + "independens", "independents", + "independetn", "independents", + "independets", "independents", + "independnet", "independents", + "indepentend", "independents", + "indepentent", "independent", + "indianapols", "indianapolis", + "indicateurs", "indicates", + "indicatiors", "indicators", + "indictement", "indictment", + "indifferant", "indifferent", + "indiffernce", "indifference", + "indigeneous", "indigenous", + "indigenious", "indigenous", + "indigenuous", "indigenous", + "indigineous", "indigenous", + "indipendent", "independent", + "indirectely", "indirectly", + "individiual", "individual", + "individuais", "individuals", + "individualy", "individually", + "individuati", "individuality", + "individuels", "individuals", + "indivuduals", "individuals", + "industriels", "industries", + "ineffecitve", "ineffective", + "ineffektive", "ineffective", + "inefficeint", "inefficient", + "inefficiant", "inefficient", + "ineffictive", "ineffective", + "ineffizient", "inefficient", + "inequallity", "inequality", + "inevitabile", "inevitable", + "inevitabily", "inevitably", + "inevitabley", "inevitably", + "inevitablly", "inevitably", + "inexpencive", "inexpensive", + "inexpenisve", "inexpensive", + "inexperiece", "inexperience", + "inexperince", "inexperience", + "inexplicaby", "inexplicably", + "infallibale", "infallible", + "infallibile", "infallible", + "infectation", "infestation", + "inferioirty", "inferiority", + "infestating", "infestation", + "infilitrate", "infiltrate", + "infiltartor", "infiltrator", + "infiltraron", "infiltrator", + "infiltrarte", "infiltrate", + "infiltrater", "infiltrator", + "infiltratie", "infiltrate", + "infiltrerat", "infiltrate", + "infinitelly", "infinitely", + "infintrator", "infiltrator", + "inflamation", "inflammation", + "inflatabale", "inflatable", + "inflitrator", "infiltrator", + "influancing", "influencing", + "influencial", "influential", + "influencian", "influencing", + "influenting", "influencing", + "influentual", "influential", + "influincing", "influencing", + "infograhpic", "infographic", + "infograpgic", "infographic", + "infogrpahic", "infographic", + "informacion", "information", + "informatice", "informative", + "informatief", "informative", + "informatiei", "informative", + "informatike", "informative", + "informativo", "information", + "informitive", "informative", + "infrigement", "infringement", + "infringeing", "infringing", + "infromation", "information", + "infromative", "informative", + "infulential", "influential", + "ingerdients", "ingredients", + "ingrediants", "ingredients", + "ingreidents", "ingredient", + "ingriedents", "ingredient", + "inhabitents", "inhabitants", + "inheirtance", "inheritance", + "inheratance", "inheritance", + "inheretance", "inheritance", + "inheritence", "inheritance", + "inhertiance", "inheritance", + "initaitives", "initiatives", + "initalisers", "initialisers", + "initalising", "initialising", + "initalizers", "initializers", + "initalizing", "initializing", + "initiaitive", "initiative", + "inititiaves", "initiatives", + "innocenters", "innocents", + "innocentius", "innocents", + "innoculated", "inoculated", + "inpsiration", "inspiration", + "inquisicion", "inquisition", + "inquisistor", "inquisitor", + "inquisiting", "inquisition", + "inquisitior", "inquisitor", + "inquisitivo", "inquisition", + "inquizition", "inquisition", + "insecurites", "insecurities", + "insensative", "insensitive", + "insensetive", "insensitive", + "insentitive", "insensitive", + "insepctions", "inspections", + "inseperable", "inseparable", + "insipration", "inspiration", + "insitutions", "institutions", + "insparation", "inspiration", + "inspecticon", "inspection", + "inspectoras", "inspectors", + "insperation", "inspiration", + "inspiracion", "inspiration", + "inspirating", "inspiration", + "inspriation", "inspiration", + "instalation", "installation", + "instalement", "installment", + "installatin", "installations", + "installeert", "installer", + "installemnt", "installment", + "installling", "installing", + "installmant", "installment", + "instanciate", "instantiate", + "instantaneu", "instantaneous", + "institucion", "institution", + "institutiei", "institute", + "instituttet", "institute", + "instraments", "instruments", + "instruccion", "instruction", + "instruciton", "instruction", + "instructers", "instructors", + "instructior", "instructor", + "instructios", "instructors", + "instructivo", "instruction", + "instructons", "instructors", + "instruktion", "instruction", + "instrumenal", "instrumental", + "instrumetal", "instrumental", + "insturction", "instruction", + "insturctors", "instructors", + "insturments", "instruments", + "instutition", "institution", + "instutution", "institution", + "insufficent", "insufficient", + "insuinating", "insinuating", + "insuniating", "insinuating", + "insurgencey", "insurgency", + "intangiable", "intangible", + "intangibile", "intangible", + "inteferring", "interfering", + "integracion", "integration", + "integratron", "integration", + "integrering", "interfering", + "intelectual", "intellectual", + "inteligence", "intelligence", + "intellectul", "intellectuals", + "intellectus", "intellectuals", + "intellecual", "intellectual", + "intellegent", "intelligent", + "intelligant", "intelligent", + "intencional", "intentional", + "intentionly", "intentional", + "interaccion", "interaction", + "interactice", "interactive", + "interacties", "interacts", + "interactifs", "interacts", + "interactins", "interacts", + "interactios", "interacts", + "interactivo", "interaction", + "interactons", "interacts", + "interaktion", "interaction", + "interaktive", "interactive", + "interasting", "interacting", + "intercation", "integration", + "interceptin", "interception", + "intercoarse", "intercourse", + "intercource", "intercourse", + "interecting", "interacting", + "interection", "interaction", + "interelated", "interrelated", + "interersted", "interpreted", + "interesring", "interfering", + "interessted", "interested", + "interferece", "interference", + "interferens", "interferes", + "interferire", "interfere", + "interfernce", "interference", + "interferred", "interfere", + "interferres", "interferes", + "intergation", "integration", + "intergrated", "integrated", + "intermedate", "intermediate", + "intermedite", "intermediate", + "intermitent", "intermittent", + "internation", "international", + "interneters", "internets", + "internetese", "internets", + "internetest", "internets", + "interneting", "interesting", + "internetors", "internets", + "internettes", "internets", + "interperted", "interpreted", + "interperter", "interpreter", + "interprered", "interpreter", + "interpretor", "interpreter", + "interratial", "interracial", + "interresing", "interfering", + "interrogato", "interrogation", + "interrputed", "interrupted", + "interruping", "interrupting", + "interruptes", "interrupts", + "interruptis", "interrupts", + "intersecton", "intersection", + "interstelar", "interstellar", + "intertained", "intertwined", + "intertvined", "intertwined", + "intertwyned", "intertwined", + "intervalles", "intervals", + "intervation", "integration", + "interveiwed", "interviewed", + "interveiwer", "interviewer", + "intervenion", "intervening", + "intervenire", "intervene", + "interventie", "intervene", + "intervewing", "intervening", + "interviened", "interviewed", + "interviewes", "interviews", + "interviewie", "interviewer", + "intervining", "intervening", + "interwebers", "interwebs", + "interwiever", "interviewer", + "intestinces", "intestines", + "inticracies", "intricacies", + "intimadated", "intimidated", + "intimidades", "intimidated", + "intimidante", "intimidate", + "intimidatie", "intimidated", + "intimidatin", "intimidation", + "intimidaton", "intimidation", + "intimidiate", "intimidate", + "intiminated", "intimidated", + "intimitaded", "intimidated", + "intimitated", "intimidated", + "intiutively", "intuitively", + "intoleranse", "intolerance", + "intolerante", "intolerance", + "intolerence", "intolerance", + "intolernace", "intolerance", + "intolorance", "intolerance", + "intolorence", "intolerance", + "intorducing", "introducing", + "intorverted", "introverted", + "intoxicatin", "intoxication", + "intoxicaton", "intoxication", + "intoxinated", "intoxicated", + "intoxocated", "intoxicated", + "intracacies", "intricacies", + "intracicies", "intricacies", + "intraverted", "introverted", + "intrecacies", "intricacies", + "intrepreted", "interpreted", + "intrepreter", "interpreter", + "intrerupted", "interrupted", + "intricasies", "intricacies", + "intricicies", "intricacies", + "intrigueing", "intriguing", + "intrinsisch", "intrinsic", + "introducion", "introduction", + "introducted", "introduced", + "introductie", "introduce", + "introvertie", "introverted", + "introvertis", "introverts", + "intruducing", "introducing", + "intrumental", "instrumental", + "intuatively", "intuitively", + "intuitevely", "intuitively", + "intuitivley", "intuitively", + "intuituvely", "intuitively", + "inutitively", "intuitively", + "invaldiates", "invalidates", + "invalidades", "invalidates", + "invalidante", "invalidate", + "invariabley", "invariably", + "invariablly", "invariably", + "inventiones", "inventions", + "invesitgate", "investigate", + "investagate", "investigate", + "investiagte", "investigate", + "investigare", "investigate", + "invincibile", "invincible", + "invincinble", "invincible", + "invisibiity", "invisibility", + "invisibiliy", "invisibility", + "invokations", "invocations", + "involantary", "involuntary", + "involentary", "involuntary", + "involintary", "involuntary", + "involontary", "involuntary", + "involunatry", "involuntary", + "invulnerabe", "invulnerable", + "invulnerble", "invulnerable", + "iresistable", "irresistible", + "iresistably", "irresistibly", + "iresistible", "irresistible", + "iresistibly", "irresistibly", + "irrationaly", "irrationally", + "irrationnal", "irrational", + "islamisists", "islamists", + "islamisters", "islamists", + "islamistisk", "islamists", + "isntruments", "instruments", + "jacksonvile", "jacksonville", + "jailbroaken", "jailbroken", + "jailbrocken", "jailbroken", + "jounralists", "journalists", + "jouranlists", "journalists", + "journalisim", "journalism", + "journalistc", "journalistic", + "journolists", "journalists", + "judegmental", "judgemental", + "judgamental", "judgemental", + "judgementle", "judgemental", + "judgementsl", "judgemental", + "judgenental", "judgemental", + "jugdemental", "judgemental", + "juggernaugt", "juggernaut", + "juggernault", "juggernaut", + "juggernaunt", "juggernaut", + "justifyable", "justifiable", + "kidnappning", "kidnapping", + "kidnappping", "kidnapping", + "kilometeres", "kilometers", + "kindergaten", "kindergarten", + "knowledgabe", "knowledgable", + "knowledgble", "knowledgable", + "kryptoninte", "kryptonite", + "lacklusture", "lackluster", + "laughablely", "laughably", + "legalizaing", "legalizing", + "legalizaton", "legalization", + "legalizeing", "legalizing", + "legenadries", "legendaries", + "legendaires", "legendaries", + "legendarios", "legendaries", + "legendarisk", "legendaries", + "legendaryes", "legendaries", + "legenderies", "legendaries", + "legilsation", "legislation", + "legislacion", "legislation", + "legislativo", "legislation", + "legistation", "legislation", + "legistative", "legislative", + "legistators", "legislators", + "legitematly", "legitimately", + "legitimancy", "legitimacy", + "legitimatcy", "legitimacy", + "legitimatly", "legitimately", + "legitimetly", "legitimately", + "legnedaries", "legendaries", + "lengedaries", "legendaries", + "liberalisim", "liberalism", + "liberatrian", "libertarians", + "libertairan", "libertarians", + "libertarain", "libertarian", + "libertarias", "libertarians", + "libertarien", "libertarian", + "libertaryan", "libertarian", + "libertatian", "libertarian", + "liberterian", "libertarian", + "libguistics", "linguistics", + "libretarian", "libertarian", + "lieutennant", "lieutenant", + "lieutentant", "lieutenant", + "lightenning", "lightening", + "lightenting", "lightening", + "lightheared", "lighthearted", + "lightheated", "lighthearted", + "lightweigth", "lightweight", + "lightwieght", "lightweight", + "lightwright", "lightweight", + "ligthweight", "lightweight", + "limitaitons", "limitation", + "linguisitcs", "linguistics", + "linguisitic", "linguistic", + "lingusitics", "linguistics", + "lithuaninan", "lithuania", + "littlefiger", "littlefinger", + "littlefiner", "littlefinger", + "lockscreeen", "lockscreen", + "longevitity", "longevity", + "lotharingen", "lothringen", + "louisvillle", "louisville", + "maginficent", "magnificent", + "magneficent", "magnificent", + "magnicifent", "magnificent", + "magnifacent", "magnificent", + "magnifecent", "magnificent", + "magnificant", "magnificent", + "magnitudine", "magnitude", + "maintainted", "maintained", + "maintanance", "maintenance", + "maintanence", "maintenance", + "maintenence", "maintenance", + "maintianing", "maintaining", + "maintinaing", "maintaining", + "maintinance", "maintenance", + "maintinence", "maintenance", + "malfonction", "malfunction", + "malfucntion", "malfunction", + "malfunciton", "malfunction", + "malfuncting", "malfunction", + "malfunktion", "malfunction", + "malpractise", "malpractice", + "malpractive", "malpractice", + "maneouvring", "manoeuvring", + "manifestado", "manifesto", + "manifestano", "manifesto", + "manifestato", "manifesto", + "manifestion", "manifesto", + "manifestior", "manifesto", + "manifestons", "manifests", + "manifestors", "manifests", + "manipluated", "manipulated", + "manipualted", "manipulated", + "manipulatie", "manipulative", + "manipulatin", "manipulation", + "manipulaton", "manipulation", + "maniuplated", "manipulated", + "mannerisims", "mannerisms", + "manslaugher", "manslaughter", + "manslaugter", "manslaughter", + "manufacters", "manufactures", + "manufacteur", "manufactures", + "manufactued", "manufactured", + "manufactuer", "manufacture", + "manufacturs", "manufactures", + "manufacuter", "manufacture", + "manufacutre", "manufactures", + "manufatured", "manufactured", + "manupilated", "manipulated", + "marganilize", "marginalized", + "marhsmallow", "marshmallow", + "marijuannas", "marijuana", + "markerplace", "marketplace", + "marketpalce", "marketplace", + "marshamllow", "marshmallow", + "marshmalows", "marshmallows", + "masculanity", "masculinity", + "masculenity", "masculinity", + "masrhmallow", "marshmallow", + "mastermined", "mastermind", + "masterpeace", "masterpiece", + "masterpeice", "masterpiece", + "mastrubated", "masturbated", + "mastrubates", "masturbate", + "masturabted", "masturbated", + "masturbaing", "masturbating", + "masturbarte", "masturbate", + "masturbathe", "masturbated", + "masturbatie", "masturbated", + "masturbatin", "masturbation", + "masturbaton", "masturbation", + "masturbsted", "masturbated", + "masturpiece", "masterpiece", + "masuclinity", "masculinity", + "matchamking", "matchmaking", + "materalists", "materialist", + "materialsim", "materialism", + "mathamatics", "mathematics", + "mathcmaking", "matchmaking", + "mathemagics", "mathematics", + "mathemetics", "mathematics", + "mathimatics", "mathematics", + "matieralism", "materialism", + "maybelleine", "maybelline", + "maybelliene", "maybelline", + "maybellinne", "maybelline", + "maybellline", "maybelline", + "mdifielders", "midfielders", + "meatballers", "meatballs", + "mecernaries", "mercenaries", + "mechanicaly", "mechanically", + "mechanichal", "mechanical", + "mechaniclly", "mechanically", + "mechanicsms", "mechanisms", + "mechanisims", "mechanism", + "mechanismus", "mechanisms", + "medicaitons", "medications", + "medicineras", "medicines", + "meditatiing", "meditating", + "meditationg", "meditating", + "mentionning", "mentioning", + "mercanaries", "mercenaries", + "mercaneries", "mercenaries", + "mercenaires", "mercenaries", + "mercenarias", "mercenaries", + "mercenarios", "mercenaries", + "merceneries", "mercenaries", + "merchandice", "merchandise", + "merchandies", "merchandise", + "merchanidse", "merchandise", + "merchanters", "merchants", + "merchendise", "merchandise", + "merchindise", "merchandise", + "mercinaries", "mercenaries", + "mercineries", "mercenaries", + "metabolisim", "metabolism", + "metabolitic", "metabolic", + "metaphisics", "metaphysics", + "metaphorial", "metaphorical", + "metaphorics", "metaphors", + "metaphsyics", "metaphysics", + "metaphyiscs", "metaphysics", + "methodoligy", "methodology", + "metholodogy", "methodology", + "metropolian", "metropolitan", + "metropolies", "metropolis", + "metropollis", "metropolis", + "metropolois", "metropolis", + "micorcenter", "microcenter", + "micorphones", "microphones", + "microcender", "microcenter", + "microcentre", "microcenter", + "microcentro", "microcenter", + "microhpones", "microphones", + "microscrope", "microscope", + "microwavees", "microwaves", + "microwavers", "microwaves", + "midfeilders", "midfielders", + "midfiedlers", "midfielders", + "midfileders", "midfielders", + "midifelders", "midfielders", + "millienaire", "millionaire", + "millionairs", "millionaires", + "millionarie", "millionaire", + "millioniare", "millionaire", + "mindlessely", "mindlessly", + "mindlessley", "mindlessly", + "minimalstic", "minimalist", + "ministerens", "ministers", + "ministerios", "ministers", + "minneaoplis", "minneapolis", + "minneaplois", "minneapolis", + "minniapolis", "minneapolis", + "miraculaous", "miraculous", + "miraculosly", "miraculously", + "miraculousy", "miraculously", + "mircocenter", "microcenter", + "mircophones", "microphones", + "mircoscopic", "microscopic", + "miscairrage", "miscarriage", + "miscarraige", "miscarriage", + "miscarridge", "miscarriage", + "miscarriege", "miscarriage", + "mischeivous", "mischievous", + "mischevious", "mischievous", + "misdameanor", "misdemeanor", + "misdeamenor", "misdemeanor", + "misdemeaner", "misdemeanor", + "misdemenaor", "misdemeanor", + "misdemenors", "misdemeanors", + "misdimeanor", "misdemeanor", + "misdomeanor", "misdemeanor", + "miserablely", "miserably", + "misfortunte", "misfortune", + "misimformed", "misinformed", + "misinterept", "misinterpret", + "misinterpet", "misinterpret", + "misoginysts", "misogynist", + "misognyists", "misogynist", + "misogyinsts", "misogynist", + "misogynisic", "misogynistic", + "misogynistc", "misogynistic", + "misogynstic", "misogynist", + "missionaire", "missionaries", + "missionairy", "missionary", + "missionares", "missionaries", + "missionaris", "missionaries", + "missionarry", "missionary", + "missionnary", "missionary", + "mississipis", "mississippi", + "misspeeling", "misspelling", + "misspellled", "misspelled", + "mistakengly", "mistakenly", + "mistakently", "mistakenly", + "moderatedly", "moderately", + "moderateurs", "moderates", + "moderatorin", "moderation", + "modificaton", "modification", + "moisterizer", "moisturizer", + "moistruizer", "moisturizer", + "moisturizng", "moisturizing", + "moisturizor", "moisturizer", + "moistutizer", "moisturizer", + "moisutrizer", "moisturizer", + "moleculaire", "molecular", + "molestating", "molestation", + "moleststion", "molestation", + "momemtarily", "momentarily", + "momentairly", "momentarily", + "momentaraly", "momentarily", + "momentarely", "momentarily", + "momenterily", "momentarily", + "monestaries", "monasteries", + "monitoreada", "monitored", + "monitoreado", "monitored", + "monogameous", "monogamous", + "monolitihic", "monolithic", + "monopollies", "monopolies", + "monstorsity", "monstrosity", + "monstrasity", "monstrosity", + "monstrisity", "monstrosity", + "monstrocity", "monstrosity", + "monstrosoty", "monstrosity", + "monstrostiy", "monstrosity", + "monumentaal", "monumental", + "monumentais", "monuments", + "monumentals", "monuments", + "monumentous", "monuments", + "mositurizer", "moisturizer", + "mosntrosity", "monstrosity", + "motehrboard", "motherboard", + "mothebroard", "motherboards", + "motherbaord", "motherboard", + "motherboads", "motherboards", + "motherboars", "motherboards", + "motherborad", "motherboard", + "motherbords", "motherboards", + "motherobard", "motherboards", + "mothreboard", "motherboards", + "motivatinal", "motivational", + "motorcicles", "motorcycles", + "motorcylces", "motorcycles", + "mouthpeices", "mouthpiece", + "mulitplayer", "multiplayer", + "mulitplying", "multiplying", + "multipalyer", "multiplayer", + "multiplater", "multiplayer", + "multiplebgs", "multiples", + "multipleies", "multiples", + "multitaskng", "multitasking", + "multitudine", "multitude", + "multiverese", "multiverse", + "multyplayer", "multiplayer", + "multyplying", "multiplying", + "muncipality", "municipality", + "murdererous", "murderers", + "musicallity", "musically", + "mutliplayer", "multiplayer", + "mutliplying", "multiplying", + "mysterieuse", "mysteries", + "mysteriosly", "mysteriously", + "mysteriouly", "mysteriously", + "mysteriousy", "mysteriously", + "napoleonian", "napoleonic", + "narcisissim", "narcissism", + "narcisissts", "narcissist", + "narcisscism", "narcissism", + "narcisscist", "narcissist", + "narcissisim", "narcissism", + "narcississm", "narcissism", + "narcississt", "narcissist", + "narcissistc", "narcissistic", + "narcissitic", "narcissistic", + "narcisssism", "narcissism", + "narcisssist", "narcissist", + "narcissstic", "narcissist", + "natioanlist", "nationalist", + "nationailty", "nationality", + "nationalesl", "nationals", + "nationalisn", "nationals", + "nationalite", "nationalist", + "nationalits", "nationalist", + "nationalizm", "nationalism", + "nationalsim", "nationalism", + "neccesarily", "necessarily", + "necessairly", "necessarily", + "necessaties", "necessities", + "necesseraly", "necessarily", + "necesserily", "necessarily", + "necessiates", "necessities", + "necessitive", "necessities", + "neckbeardos", "neckbeards", + "neckbeardus", "neckbeards", + "necormancer", "necromancer", + "necromamcer", "necromancer", + "necromanser", "necromancer", + "necromencer", "necromancer", + "needlessley", "needlessly", + "negativeity", "negativity", + "negativelly", "negatively", + "negativitiy", "negativity", + "negiotating", "negotiating", + "negligiable", "negligible", + "negociating", "negotiating", + "negociation", "negotiation", + "negoitating", "negotiating", + "negoitation", "negotiation", + "negotiatied", "negotiate", + "negotiative", "negotiate", + "negotiatons", "negotiations", + "neigborhood", "neighborhood", + "neigbouring", "neighbouring", + "neighborhod", "neighborhood", + "neighbourgs", "neighbours", + "neighouring", "neighboring", + "nercomancer", "necromancer", + "nessasarily", "necessarily", + "neurologial", "neurological", + "neurosciene", "neuroscience", + "neutrallity", "neutrality", + "neverthelss", "nevertheless", + "neverthless", "nevertheless", + "newspapaers", "newspapers", + "newspappers", "newspapers", + "nieghboring", "neighboring", + "nightmarket", "nightmare", + "nonsencical", "nonsensical", + "nonsenscial", "nonsensical", + "nonsensicle", "nonsensical", + "normallized", "normalized", + "northwesten", "northwestern", + "nostalgisch", "nostalgic", + "noteworthly", "noteworthy", + "noticeabley", "noticeably", + "notificaton", "notification", + "notoriuosly", "notoriously", + "numericable", "numerical", + "nurtitional", "nutritional", + "nutricional", "nutritional", + "nutrutional", "nutritional", + "obamination", "abomination", + "obersvation", "observation", + "obilterated", "obliterated", + "objectivety", "objectivity", + "objectivify", "objectivity", + "objectivily", "objectivity", + "objectivley", "objectively", + "obliberated", "obliterated", + "obliderated", "obliterated", + "obligerated", "obliterated", + "oblitarated", "obliterated", + "obliteraded", "obliterated", + "obliterared", "obliterated", + "oblitirated", "obliterated", + "oblitorated", "obliterated", + "obliverated", "obliterated", + "observacion", "observation", + "observaiton", "observant", + "observasion", "observations", + "observating", "observation", + "observerats", "observers", + "obsessivley", "obsessive", + "obstruccion", "obstruction", + "obstruktion", "obstruction", + "obsturction", "obstruction", + "obversation", "observation", + "ocasionally", "occasionally", + "ocassionaly", "occasionally", + "occasionals", "occasions", + "occasionaly", "occasionally", + "occasionnal", "occasional", + "occassional", "occasional", + "occassioned", "occasioned", + "occurrances", "occurrences", + "offensivley", "offensively", + "offesnively", "offensively", + "officiallly", "officially", + "olbiterated", "obliterated", + "omniscienct", "omniscient", + "operacional", "operational", + "operasional", "operational", + "operationel", "operational", + "oppresssing", "oppressing", + "oppresssion", "oppression", + "opprotunity", "opportunity", + "optimisitic", "optimistic", + "optimizaton", "optimization", + "optmization", "optimization", + "orchestraed", "orchestrated", + "orchestrial", "orchestra", + "oreintation", "orientation", + "organisaton", "organisation", + "organiserad", "organised", + "organistion", "organisation", + "organizarea", "organizer", + "organizarem", "organizer", + "organizarme", "organizer", + "organizarte", "organizer", + "organiztion", "organization", + "oridinarily", "ordinarily", + "orientacion", "orientation", + "originially", "originally", + "originnally", "originally", + "origniality", "originality", + "ostensiably", "ostensibly", + "ostensibily", "ostensibly", + "outclasssed", "outclassed", + "outnunbered", "outnumbered", + "outperfroms", "outperform", + "outpreforms", "outperform", + "outrageosly", "outrageously", + "outrageouly", "outrageously", + "outragerous", "outrageous", + "outskirters", "outskirts", + "outsorucing", "outsourcing", + "outsourcade", "outsourced", + "outsoursing", "outsourcing", + "overbraking", "overbearing", + "overcapping", "overlapping", + "overcharing", "overarching", + "overclcoked", "overclocked", + "overclicked", "overclocked", + "overcloaked", "overclocked", + "overclocing", "overclocking", + "overclockig", "overclocking", + "overclocled", "overclocked", + "overcomeing", "overcoming", + "overcomming", "overcoming", + "overeaching", "overarching", + "overfapping", "overlapping", + "overheading", "overheating", + "overhooking", "overlooking", + "overhwelmed", "overwhelmed", + "overkapping", "overlapping", + "overklocked", "overclocked", + "overlapsing", "overlapping", + "overlcocked", "overclocked", + "overlcoking", "overlooking", + "overlooming", "overlooking", + "overloooked", "overlooked", + "overlordess", "overlords", + "overmapping", "overlapping", + "overpooling", "overlooking", + "overpovered", "overpowered", + "overpoweing", "overpowering", + "overreacing", "overreacting", + "overreactin", "overreaction", + "overreacton", "overreaction", + "overshaddow", "overshadowed", + "overshadowd", "overshadowed", + "overtapping", "overlapping", + "overthining", "overthinking", + "overthinkig", "overthinking", + "overvlocked", "overclocked", + "overwealmed", "overwhelmed", + "overwelming", "overwhelming", + "overwhelemd", "overwhelmed", + "overwhelimg", "overwhelm", + "overwheling", "overwhelming", + "overwhemled", "overwhelmed", + "overwhlemed", "overwhelmed", + "overwritted", "overwrite", + "pakistanais", "pakistani", + "pakistanezi", "pakistani", + "palceholder", "placeholder", + "palesitnian", "palestinians", + "palestenian", "palestinian", + "palestinain", "palestinians", + "palestinans", "palestinians", + "palestinier", "palestine", + "palistinian", "palestinian", + "palythrough", "playthrough", + "papanicalou", "papanicolaou", + "parachutage", "parachute", + "paragraphes", "paragraphs", + "paramedicks", "paramedics", + "paramedicos", "paramedics", + "parameteres", "parameters", + "paranthesis", "parenthesis", + "parapharsed", "paraphrase", + "paraprhased", "paraphrase", + "parasitisme", "parasites", + "parenthasis", "parenthesis", + "parenthesys", "parentheses", + "parenthises", "parenthesis", + "parenthisis", "parenthesis", + "parliamenty", "parliamentary", + "parntership", "partnership", + "parrallelly", "parallelly", + "partecipant", "participant", + "partecipate", "participate", + "parternship", "partnership", + "partiarchal", "patriarchal", + "particapate", "participate", + "particiapte", "participate", + "participait", "participant", + "participans", "participants", + "participare", "participate", + "participatd", "participant", + "participati", "participant", + "participats", "participant", + "participent", "participant", + "particpiate", "participated", + "particually", "particularly", + "particulaly", "particularly", + "particulary", "particularly", + "partnetship", "partnership", + "partonizing", "patronizing", + "passionatly", "passionately", + "passionetly", "passionately", + "passionnate", "passionate", + "passporters", "passports", + "pathologial", "pathological", + "patriarchia", "patriarchal", + "patriarcial", "patriarchal", + "patriarical", "patriarchal", + "patriotisch", "patriotic", + "patriotisim", "patriotism", + "patriottism", "patriotism", + "patronozing", "patronizing", + "peacefullly", "peacefully", + "pedestirans", "pedestrians", + "pedestrains", "pedestrians", + "pedophilies", "pedophile", + "pedophilles", "pedophile", + "penetracion", "penetration", + "penetrading", "penetrating", + "penetrarion", "penetration", + "penninsular", "peninsular", + "pennsylvnia", "pennsylvania", + "pepperocini", "pepperoni", + "percantages", "percentages", + "percautions", "precautions", + "percentille", "percentile", + "percpetions", "perceptions", + "percusssion", "percussion", + "perdicament", "predicament", + "perdictable", "predictable", + "perdictions", "predictions", + "perephirals", "peripherals", + "pereptually", "perpetually", + "perferences", "preferences", + "perfomrance", "performances", + "perforamnce", "performances", + "performaces", "performances", + "performacne", "performances", + "performanes", "performances", + "performanse", "performances", + "performence", "performance", + "performnace", "performances", + "perfromance", "performance", + "perhiperals", "peripherals", + "perihperals", "peripherals", + "periodicaly", "periodically", + "periperhals", "peripherals", + "periphereal", "peripheral", + "peripherial", "peripheral", + "periphirals", "peripherals", + "periphreals", "peripherals", + "periphrials", "peripherals", + "perjorative", "pejorative", + "perliminary", "preliminary", + "permamently", "permanently", + "permanantly", "permanently", + "permaturely", "prematurely", + "permenantly", "permanently", + "permenently", "permanently", + "perminantly", "permanently", + "perminently", "permanently", + "permisisons", "permissions", + "permissable", "permissible", + "permisssion", "permissions", + "pernamently", "permanently", + "perosnality", "personality", + "perparation", "preparation", + "perpatrated", "perpetrated", + "perpatrator", "perpetrator", + "perpatuated", "perpetuated", + "perpatuates", "perpetuates", + "perpertated", "perpetuated", + "perpertator", "perpetrators", + "perpetraded", "perpetrated", + "perpetrador", "perpetrator", + "perpetraron", "perpetrator", + "perpetrater", "perpetrator", + "perpetuaded", "perpetuated", + "perpetutate", "perpetuate", + "perpetuties", "perpetuates", + "perpitrated", "perpetrated", + "perpitrator", "perpetrator", + "perpretated", "perpetrated", + "perpretator", "perpetrators", + "perpsective", "perspective", + "perputrator", "perpetrator", + "perputually", "perpetually", + "perputuated", "perpetuated", + "perputuates", "perpetuates", + "perrogative", "prerogative", + "persceptive", "perspectives", + "persectuion", "persecution", + "persecucion", "persecution", + "persecusion", "persecution", + "persecutted", "persecuted", + "persepctive", "perspective", + "persicution", "persecution", + "persistance", "persistence", + "persistante", "persistent", + "persistense", "persistence", + "persistente", "persistence", + "personhoood", "personhood", + "perspecitve", "perspective", + "perspectief", "perspective", + "perspektive", "perspective", + "persuassion", "persuasion", + "persuassive", "persuasive", + "persucution", "persecution", + "persumption", "presumption", + "pertubation", "perturbation", + "pessimestic", "pessimistic", + "pharamcists", "pharmacist", + "phenomenona", "phenomena", + "philadelpha", "philadelphia", + "philadelpia", "philadelphia", + "philiphines", "philippines", + "philippenes", "philippines", + "philippenis", "philippines", + "philippides", "philippines", + "philippinas", "philippines", + "philippinos", "philippines", + "philisopher", "philosopher", + "phillipines", "philippines", + "philosipher", "philosopher", + "philosopers", "philosophers", + "philosophae", "philosopher", + "philosophia", "philosophical", + "philosopies", "philosophies", + "philosphies", "philosophies", + "philospoher", "philosopher", + "photograhed", "photographed", + "photograher", "photographer", + "photograhic", "photographic", + "photograhpy", "photography", + "photograped", "photographed", + "photograper", "photographer", + "photograpgh", "photographs", + "photograpic", "photographic", + "photogrpahs", "photographs", + "photogrpahy", "photography", + "physcedelic", "psychedelic", + "physciatric", "psychiatric", + "physcopaths", "psychopaths", + "piankillers", "painkillers", + "pilgrimmage", "pilgrimage", + "pitchforcks", "pitchforks", + "pitchforkes", "pitchforks", + "plaestinian", "palestinian", + "plagiariasm", "plagiarism", + "planeswaker", "planeswalker", + "planeswaler", "planeswalker", + "planeswalkr", "planeswalker", + "platfromers", "platformer", + "playhtrough", "playthrough", + "playthorugh", "playthrough", + "playthourgh", "playthrough", + "playthroguh", "playthroughs", + "playthrougs", "playthroughs", + "playthrouhg", "playthroughs", + "playthtough", "playthrough", + "playtrhough", "playthrough", + "ploretariat", "proletariat", + "policitally", "politically", + "policitians", "politicians", + "politicains", "politicians", + "politicanti", "politician", + "politiciens", "politicians", + "politiicans", "politician", + "polititians", "politicians", + "polyphonyic", "polyphonic", + "pomegranite", "pomegranate", + "popluations", "populations", + "poportional", "proportional", + "popoulation", "population", + "porjectiles", "projectiles", + "porletariat", "proletariat", + "pornagraphy", "pornography", + "pornograghy", "pornography", + "pornograhpy", "pornography", + "pornograpgy", "pornography", + "pornogrophy", "pornography", + "pornogrpahy", "pornography", + "porportions", "proportions", + "portestants", "protestants", + "portuguease", "portuguese", + "portuguesse", "portuguese", + "positionial", "positional", + "positionnal", "positional", + "positionned", "positioned", + "positiveity", "positivity", + "positiviely", "positively", + "positivisme", "positives", + "positivisty", "positivity", + "positivitey", "positivity", + "positivitiy", "positivity", + "possesseurs", "possesses", + "possesssion", "possessions", + "possestions", "possessions", + "possiblilty", "possibility", + "potencially", "potentially", + "potentailly", "potentially", + "powerhourse", "powerhouse", + "powerlifing", "powerlifting", + "powerliftng", "powerlifting", + "pracitcally", "practically", + "practicarlo", "practical", + "practioners", "practitioners", + "practitions", "practitioners", + "pragmatisch", "pragmatic", + "precausions", "precautions", + "precedessor", "predecessor", + "precendence", "precedence", + "precentages", "percentages", + "preconceved", "preconceived", + "preconcieve", "preconceived", + "precuations", "precautions", + "predacessor", "predecessor", + "predecesser", "predecessor", + "predections", "predictions", + "predescesor", "predecessors", + "predesessor", "predecessors", + "predesposed", "predisposed", + "predessecor", "predecessor", + "predicatble", "predictable", + "predicement", "predicament", + "predicessor", "predecessor", + "prediciment", "predicament", + "predicitons", "predictions", + "predictible", "predictable", + "predictious", "predictions", + "predictment", "predicament", + "predisposte", "predisposed", + "predocessor", "predecessor", + "preferabbly", "preferably", + "preferabely", "preferable", + "preferabley", "preferably", + "preferablly", "preferably", + "preferances", "preferences", + "preferenser", "preferences", + "preferental", "preferential", + "preferentes", "preferences", + "preferrably", "preferably", + "preferrring", "preferring", + "preformance", "performance", + "pregnanices", "pregnancies", + "pregnencies", "pregnancies", + "pregorative", "prerogative", + "preipherals", "peripherals", + "prejudicies", "prejudice", + "preleminary", "preliminary", + "prelimanary", "preliminary", + "prelimenary", "preliminary", + "premanently", "permanently", + "prematuraly", "prematurely", + "prematurily", "prematurely", + "prematurley", "prematurely", + "premilinary", "preliminary", + "premissible", "permissible", + "premissions", "permissions", + "preorderded", "preordered", + "preorderers", "preorders", + "preparacion", "preparation", + "preperation", "preparation", + "prepetrated", "perpetrated", + "prepetrator", "perpetrator", + "prepetually", "perpetually", + "prepetuated", "perpetuated", + "prepetuates", "perpetuates", + "preporation", "preparation", + "preposterus", "preposterous", + "prerequesit", "prerequisite", + "prerequiste", "prerequisite", + "prerequites", "prerequisite", + "prerogitive", "prerogative", + "prerogotive", "prerogative", + "prescripton", "prescription", + "presecution", "persecution", + "presedintia", "presidential", + "presentaion", "presentation", + "presentatin", "presentations", + "preservaton", "preservation", + "preservered", "preserved", + "presidencey", "presidency", + "presidental", "presidential", + "presidentcy", "presidency", + "presistence", "persistence", + "presitgious", "prestigious", + "presitigous", "prestigious", + "presomption", "presumption", + "prespective", "perspective", + "pressureing", "pressuring", + "prestegious", "prestigious", + "prestigeous", "prestigious", + "prestigieus", "prestigious", + "prestigiosa", "prestigious", + "prestigiose", "prestigious", + "prestigiosi", "prestigious", + "prestigioso", "prestigious", + "prestiguous", "prestigious", + "presumabely", "presumably", + "presumabley", "presumably", + "presumptous", "presumptuous", + "presumptuos", "presumptuous", + "pretencious", "pretentious", + "pretendendo", "pretended", + "pretensious", "pretentious", + "pretentieus", "pretentious", + "prevailaing", "prevailing", + "prevailling", "prevailing", + "preventitve", "preventative", + "preventivno", "prevention", + "primatively", "primitively", + "princessses", "princesses", + "principales", "principles", + "principalis", "principals", + "principielt", "principle", + "privatizied", "privatized", + "priveledges", "privileges", + "privelleges", "privileges", + "privilegeds", "privileges", + "privilegied", "privileged", + "privilegien", "privilege", + "privilegier", "privilege", + "privilegies", "privilege", + "proactivley", "proactive", + "probabilaty", "probability", + "probabilite", "probabilities", + "probalibity", "probability", + "probelmatic", "problematic", + "problamatic", "problematic", + "problimatic", "problematic", + "problomatic", "problematic", + "proccedings", "proceedings", + "proccessing", "processing", + "proceddings", "proceedings", + "procedureal", "procedural", + "procedurial", "procedural", + "procedurile", "procedure", + "processesor", "processors", + "processeurs", "processes", + "processsors", "processors", + "procrastion", "procreation", + "procriation", "procreation", + "prodcutions", "productions", + "prodictions", "productions", + "producerats", "producers", + "producitons", "productions", + "productioin", "productions", + "productivos", "productions", + "productivty", "productivity", + "produktions", "productions", + "professinal", "professional", + "professionl", "professionals", + "professoras", "professors", + "professores", "professors", + "professorin", "profession", + "professsion", "professions", + "proficiancy", "proficiency", + "proficienct", "proficient", + "proficienty", "proficiency", + "proficinecy", "proficiency", + "profitabile", "profitable", + "progerssion", "progressions", + "progerssive", "progressives", + "programable", "programmable", + "programmare", "programmer", + "programmars", "programmers", + "programmate", "programme", + "programmets", "programmers", + "programmeur", "programmer", + "programmier", "programmer", + "programmmed", "programme", + "programmmer", "programme", + "progresison", "progressions", + "progressers", "progresses", + "progressief", "progressive", + "progressino", "progressions", + "progressivo", "progression", + "progressoin", "progressions", + "progressvie", "progressives", + "prohabition", "prohibition", + "prohibation", "prohibition", + "prohibicion", "prohibition", + "prohibiteds", "prohibits", + "prohibitied", "prohibited", + "prohibitifs", "prohibits", + "prohibitivo", "prohibition", + "prohibitons", "prohibits", + "prohibitted", "prohibited", + "projecticle", "projectile", + "projectives", "projectiles", + "projectlies", "projectiles", + "prolateriat", "proletariat", + "proletariet", "proletariat", + "proletariot", "proletariat", + "proletaryat", "proletariat", + "proleteriat", "proletariat", + "prolitariat", "proletariat", + "prologomena", "prolegomena", + "promenantly", "prominently", + "promenently", "prominently", + "prometheius", "prometheus", + "prometheous", "prometheus", + "promethesus", "prometheus", + "prometheyus", "prometheus", + "promimently", "prominently", + "prominantly", "prominently", + "prominately", "prominently", + "promiscious", "promiscuous", + "promocional", "promotional", + "promsicuous", "promiscuous", + "pronography", "pornography", + "pronoucning", "pronouncing", + "pronounched", "pronounced", + "pronunciato", "pronunciation", + "propaganada", "propaganda", + "properitary", "proprietary", + "propertiary", "proprietary", + "propertions", "proportions", + "prophechies", "prophecies", + "propiertary", "proprietary", + "propogation", "propagation", + "proponenets", "proponents", + "proponentes", "proponents", + "proporition", "proposition", + "proportians", "proportions", + "proportinal", "proportional", + "proposicion", "proposition", + "propositivo", "proposition", + "propostions", "proportions", + "propreitary", "proprietary", + "propriatary", "proprietary", + "propriatery", "proprietary", + "propriatory", "proprietary", + "proprietery", "proprietary", + "proprietory", "proprietary", + "propriotary", "proprietary", + "proprotions", "proportions", + "propsective", "prospective", + "propulstion", "propulsion", + "prosectuion", "prosecution", + "prosectuors", "prosecutors", + "prosecuters", "prosecutors", + "prosicution", "prosecution", + "prosocution", "prosecution", + "prosperious", "prosperous", + "prospertity", "prosperity", + "prospettive", "prospective", + "prostethics", "prosthetic", + "prosthethic", "prosthetic", + "prostitites", "prostitutes", + "prostitiute", "prostitute", + "prostituate", "prostitute", + "prostitudes", "prostitutes", + "prostituees", "prostitutes", + "prostituion", "prostitution", + "prostitures", "prostitutes", + "prostitutas", "prostitutes", + "prostitutie", "prostitute", + "prostitutin", "prostitution", + "prostitutke", "prostitutes", + "prostituton", "prostitution", + "prostitutos", "prostitutes", + "protability", "portability", + "protaganist", "protagonist", + "protaginist", "protagonist", + "protagnoist", "protagonist", + "protagoinst", "protagonists", + "protagonits", "protagonists", + "protagonsit", "protagonists", + "protectings", "protections", + "protectoras", "protectors", + "protectores", "protectors", + "protectrons", "protections", + "protelariat", "proletariat", + "protestents", "protestants", + "protistants", "protestants", + "protoganist", "protagonist", + "protogonist", "protagonist", + "protostants", "protestants", + "protototype", "prototype", + "provacative", "provocative", + "provacotive", "provocative", + "provicative", "provocative", + "providencie", "providence", + "provinciaal", "provincial", + "provinicial", "provincial", + "provisiones", "provisions", + "provoactive", "provocative", + "provocatief", "provocative", + "provocitive", "provocative", + "provocotive", "provocative", + "provokative", "provocative", + "pscyhedelic", "psychedelic", + "pscyhiatric", "psychiatric", + "pscyhopaths", "psychopaths", + "pshyciatric", "psychiatric", + "pshycopaths", "psychopaths", + "psychaitric", "psychiatric", + "psychedilic", "psychedelic", + "psychedleic", "psychedelics", + "psychiatist", "psychiatrist", + "psychidelic", "psychedelic", + "psychodelic", "psychedelic", + "psychopants", "psychopaths", + "psychopatch", "psychopath", + "psychopatic", "psychopathic", + "psychotisch", "psychotic", + "psychriatic", "psychiatric", + "publikation", "publication", + "punctiation", "punctuation", + "puncutation", "punctuation", + "punshiments", "punishments", + "punsihments", "punishments", + "purchaseing", "purchasing", + "purchashing", "purchasing", + "purposefuly", "purposefully", + "pyschedelic", "psychedelic", + "pyschiatric", "psychiatric", + "pyschopaths", "psychopaths", + "qaurterback", "quarterback", + "qualificato", "qualification", + "qualifieres", "qualifiers", + "quantitaive", "quantitative", + "quantitatve", "quantitative", + "quantitites", "quantities", + "quantitties", "quantities", + "quarantaine", "quarantine", + "quarantenni", "quarantine", + "quartercask", "quarterbacks", + "quesitoning", "questioning", + "questionned", "questioned", + "questonable", "questionable", + "radiaoctive", "radioactive", + "radioactice", "radioactive", + "radioactief", "radioactive", + "radioaktive", "radioactive", + "radiocative", "radioactive", + "raidoactive", "radioactive", + "reaccurring", "recurring", + "reactionair", "reactionary", + "realibility", "reliability", + "realistisch", "realistic", + "reaserchers", "researchers", + "reaserching", "researching", + "reasonabley", "reasonably", + "reasonablly", "reasonably", + "reassureing", "reassuring", + "reassurring", "reassuring", + "rebuildling", "rebuilding", + "rebuplicans", "republicans", + "reccomended", "recommended", + "receptionst", "receptionist", + "recgonition", "recognition", + "recgonizing", "recognizing", + "rechargable", "rechargeable", + "recipientes", "recipients", + "reciporcate", "reciprocate", + "recipricate", "reciprocate", + "reciprocant", "reciprocate", + "reciprocite", "reciprocate", + "recivership", "receivership", + "reclutantly", "reluctantly", + "recognicing", "recognizing", + "recognision", "recognition", + "recomending", "recommending", + "recommandes", "recommends", + "recommendes", "recommends", + "recommented", "recommended", + "reconcilled", "reconcile", + "recongition", "recognition", + "recongizing", "recognizing", + "reconsidder", "reconsider", + "recrational", "recreational", + "recrutiment", "recruitment", + "rectangluar", "rectangular", + "rectangualr", "rectangular", + "rectengular", "rectangular", + "recuritment", "recruitment", + "redundantcy", "redundancy", + "reevalulate", "reevaluate", + "reevalutate", "reevaluate", + "reevaulated", "reevaluate", + "refelctions", "reflections", + "referancing", "referencing", + "refereneced", "referenced", + "refereneces", "references", + "referincing", "referencing", + "referrences", "references", + "reflectivos", "reflections", + "refreshener", "refresher", + "refrubished", "refurbished", + "refubrished", "refurbished", + "refurbushed", "refurbished", + "regeneratin", "regeneration", + "regeneraton", "regeneration", + "registerdns", "registers", + "registeries", "registers", + "registerred", "registered", + "registraion", "registration", + "regocnition", "recognition", + "regresssion", "regression", + "regresssive", "regressive", + "regualtions", "regulations", + "regulationg", "regulating", + "regulatiors", "regulators", + "reinassance", "renaissance", + "reinforcemt", "reinforcement", + "reinfornced", "reinforced", + "reinitalise", "reinitialise", + "reinitalize", "reinitialize", + "reinstaling", "reinstalling", + "reinstallng", "reinstalling", + "reisntalled", "reinstalled", + "relaibility", "reliability", + "relatiation", "retaliation", + "relationshp", "relationships", + "relativiser", "relatives", + "relativisme", "relatives", + "relativitiy", "relativity", + "relativitly", "relativity", + "relcutantly", "reluctantly", + "relentlesly", "relentlessly", + "relentlessy", "relentlessly", + "relevations", "revelations", + "relfections", "reflections", + "religeously", "religiously", + "religionens", "religions", + "religioners", "religions", + "relpacement", "replacement", + "reluctently", "reluctantly", + "remarkabley", "remarkably", + "remarkablly", "remarkably", + "remasterred", "remastered", + "remembrence", "remembrance", + "reminescent", "reminiscent", + "reminicient", "reminiscent", + "reminiscant", "reminiscent", + "reminiscint", "reminiscent", + "reminscient", "reminiscent", + "reminsicent", "reminiscent", + "renaiisance", "renaissance", + "renaiscance", "renaissance", + "renaissanse", "renaissance", + "renaissence", "renaissance", + "renassaince", "renaissance", + "renassiance", "renaissance", + "reniassance", "renaissance", + "rennovating", "renovating", + "rennovation", "renovation", + "repalcement", "replacement", + "repbulicans", "republicans", + "repeateadly", "repeatedly", + "repectively", "respectively", + "repersented", "represented", + "replacemnet", "replacements", + "replacemnts", "replacements", + "repleacable", "replaceable", + "repositiory", "repository", + "representas", "represents", + "representes", "represents", + "represssion", "repression", + "reproducion", "reproduction", + "reproducive", "reproductive", + "repsectable", "respectable", + "repsonsible", "responsible", + "repsonsibly", "responsibly", + "republcians", "republicans", + "republician", "republican", + "republicons", "republicans", + "repuglicans", "republicans", + "requeriment", "requirement", + "requierment", "requirements", + "resemblence", "resemblance", + "resemblense", "resembles", + "reserachers", "researchers", + "reseraching", "researching", + "resgination", "resignation", + "residencial", "residential", + "residentail", "residential", + "residentual", "residential", + "resignacion", "resignation", + "resignating", "resignation", + "resignement", "resignment", + "resignition", "resignation", + "resintalled", "reinstalled", + "resistansen", "resistances", + "resistanses", "resistances", + "resistences", "resistances", + "resistnaces", "resistances", + "resoltuions", "resolutions", + "resotration", "restoration", + "resoultions", "resolutions", + "respecatble", "respectable", + "respectabil", "respectable", + "respectfuly", "respectfully", + "respectible", "respectable", + "respectivly", "respectively", + "respectuful", "respectful", + "respektable", "respectable", + "resperatory", "respiratory", + "resperitory", "respiratory", + "respiritory", "respiratory", + "respitatory", "respiratory", + "responcible", "responsible", + "responcibly", "responsibly", + "respondendo", "responded", + "responisble", "responsible", + "responisbly", "responsibly", + "responsable", "responsible", + "responsably", "responsibly", + "responsbile", "responsible", + "responsbily", "responsibly", + "responsibel", "responsibly", + "responsibil", "responsibly", + "responsivle", "responsive", + "resporatory", "respiratory", + "respository", "repository", + "respriatory", "respiratory", + "ressembling", "resembling", + "ressurected", "resurrected", + "restaraunts", "restaurants", + "restaruants", "restaurants", + "restauraunt", "restaurant", + "restaurents", "restaurants", + "resteraunts", "restaurants", + "restirction", "restriction", + "restorarion", "restoration", + "restorating", "restoration", + "restrainted", "restrained", + "restrective", "restrictive", + "restriccion", "restriction", + "restricitng", "restricting", + "restriciton", "restrictions", + "restricitve", "restrictive", + "restricteds", "restricts", + "restricters", "restricts", + "restrictied", "restrictive", + "restrictifs", "restricts", + "restrictins", "restricts", + "restrictios", "restricts", + "restrictivo", "restriction", + "restrictons", "restricts", + "restriktion", "restriction", + "restriktive", "restrictive", + "restrittive", "restrictive", + "restructing", "restricting", + "restruction", "restriction", + "restuarants", "restaurants", + "resturaunts", "restaurants", + "resurecting", "resurrecting", + "resurrecion", "resurrection", + "retailation", "retaliation", + "retalitated", "retaliated", + "retardathon", "retardation", + "retardating", "retardation", + "retardatron", "retardation", + "retartation", "retardation", + "retirbution", "retribution", + "retrebution", "retribution", + "retribucion", "retribution", + "retribuiton", "retribution", + "retributivo", "retribution", + "retribvtion", "retribution", + "retrobution", "retribution", + "retrubution", "retribution", + "revealtions", "revelations", + "revelaitons", "revelations", + "revolations", "revolutions", + "revoultions", "revolutions", + "ridiculious", "ridiculous", + "ridiculosly", "ridiculously", + "ridiculouly", "ridiculously", + "ridiculousy", "ridiculously", + "rightfullly", "rightfully", + "rolepalying", "roleplaying", + "romanticaly", "romantically", + "roundabaout", "roundabout", + "roundabount", "roundabout", + "rudimentery", "rudimentary", + "rudimentory", "rudimentary", + "ruidmentary", "rudimentary", + "sacrifacing", "sacrificing", + "sacrificare", "sacrifice", + "sacrificied", "sacrifice", + "sacrificies", "sacrifice", + "sacrifieced", "sacrificed", + "sacrifising", "sacrificing", + "sacrifizing", "sacrificing", + "salughtered", "slaughtered", + "sanctionned", "sanctioned", + "sarcastisch", "sarcastic", + "saskatchewn", "saskatchewan", + "saskatchwan", "saskatchewan", + "satisfacion", "satisfaction", + "satisfacory", "satisfactory", + "scandanavia", "scandinavia", + "scandanivia", "scandinavian", + "scandenavia", "scandinavia", + "scandianvia", "scandinavian", + "scandimania", "scandinavia", + "scandinaiva", "scandinavian", + "scandinavan", "scandinavian", + "scandivania", "scandinavian", + "scandonavia", "scandinavia", + "scarificing", "sacrificing", + "scheduleing", "scheduling", + "schedulling", "scheduling", + "schoalrship", "scholarships", + "scholarhips", "scholarship", + "scholarstic", "scholastic", + "scholership", "scholarship", + "scholorship", "scholarship", + "scientiests", "scientists", + "scnadinavia", "scandinavia", + "scrambleing", "scrambling", + "screenshoot", "screenshot", + "seamlessley", "seamlessly", + "sedentarity", "sedentary", + "seflishness", "selfishness", + "segergation", "segregation", + "segragation", "segregation", + "segregacion", "segregation", + "segretation", "segregation", + "segrigation", "segregation", + "selectivley", "selectively", + "selfeshness", "selfishness", + "senitmental", "sentimental", + "sensacional", "sensational", + "sensasional", "sensational", + "sensationel", "sensational", + "sensetional", "sensational", + "sensitivety", "sensitivity", + "sentamental", "sentimental", + "sentemental", "sentimental", + "sentenceing", "sentencing", + "sentimentos", "sentiments", + "sentimentul", "sentimental", + "separatedly", "separately", + "separatelly", "separately", + "separatisme", "separates", + "separatiste", "separates", + "sepculating", "speculating", + "serivceable", "serviceable", + "serviciable", "serviceable", + "settelement", "settlement", + "settelments", "settlements", + "settlemetns", "settlements", + "sexualizied", "sexualized", + "shakeapeare", "shakespeare", + "shakepseare", "shakespeare", + "shakesphere", "shakespeare", + "shanenigans", "shenanigans", + "shareholdes", "shareholders", + "sharpeneing", "sharpening", + "sharpenning", "sharpening", + "shatterling", "shattering", + "shatterring", "shattering", + "sheakspeare", "shakespeare", + "shenadigans", "shenanigans", + "shenanagans", "shenanigans", + "shenanagins", "shenanigans", + "shenanegans", "shenanigans", + "shenanegins", "shenanigans", + "shenangians", "shenanigans", + "shenanigens", "shenanigans", + "shenanigins", "shenanigans", + "shenenigans", "shenanigans", + "sheninigans", "shenanigans", + "shennaigans", "shenanigans", + "shortenning", "shortening", + "shortenting", "shortening", + "signficiant", "significant", + "signifantly", "significantly", + "significane", "significance", + "significato", "significant", + "signifigant", "significant", + "signifikant", "significant", + "signitories", "signatories", + "signularity", "singularity", + "similarites", "similarities", + "similarlity", "similarity", + "similiarity", "similarity", + "simluations", "simulations", + "simplefying", "simplifying", + "simplicitly", "simplicity", + "simplifiing", "simplifying", + "simplisitic", "simplistic", + "simplyifing", "simplifying", + "simualtions", "simulations", + "simulatious", "simulations", + "simultaneos", "simultaneous", + "simultaneus", "simultaneous", + "simultanous", "simultaneous", + "singluarity", "singularity", + "singualrity", "singularity", + "singulairty", "singularity", + "singularily", "singularity", + "sitautional", "situational", + "situacional", "situational", + "situationly", "situational", + "siutational", "situational", + "skatebaords", "skateboard", + "skateboader", "skateboard", + "skepticisim", "skepticism", + "skillshoots", "skillshots", + "skillshosts", "skillshots", + "slaugthered", "slaughtered", + "slefishness", "selfishness", + "sluaghtered", "slaughtered", + "smarthpones", "smartphones", + "snowboaring", "snowboarding", + "snowbolling", "snowballing", + "snowfalling", "snowballing", + "socailizing", "socializing", + "socialicing", "socializing", + "socialistes", "socialists", + "socialistos", "socialists", + "socializare", "socialize", + "sociapathic", "sociopathic", + "sociologial", "sociological", + "sociopathes", "sociopaths", + "sociopathis", "sociopaths", + "sociophatic", "sociopathic", + "solidariety", "solidarity", + "somethingis", "somethings", + "sorrounding", "surrounding", + "soundtrakcs", "soundtracks", + "southamtpon", "southampton", + "southanpton", "southampton", + "southapmton", "southampton", + "southernese", "southerners", + "southerness", "southerners", + "southernest", "southerners", + "southernors", "southerners", + "southmapton", "southampton", + "southtampon", "southampton", + "soveregnity", "sovereignty", + "sovereighty", "sovereignty", + "sovereingty", "sovereignty", + "sovereinity", "sovereignty", + "soveriegnty", "sovereignty", + "soveriengty", "sovereignty", + "soverignity", "sovereignty", + "specailists", "specialists", + "specailized", "specialized", + "specailizes", "specializes", + "specatcular", "spectacular", + "specialiced", "specialized", + "specialices", "specializes", + "specialites", "specializes", + "speciallist", "specialist", + "speciallity", "specially", + "speciallize", "specialize", + "specialzied", "specialized", + "specifcally", "specifically", + "specificaly", "specifically", + "specificato", "specification", + "specificies", "specifics", + "specifiying", "specifying", + "specilaized", "specialize", + "speciliazed", "specialize", + "spectatores", "spectators", + "spectatular", "spectacular", + "spectauclar", "spectacular", + "spectaulars", "spectaculars", + "spectecular", "spectacular", + "specualting", "speculating", + "specualtion", "speculation", + "specualtive", "speculative", + "specularite", "speculative", + "speculaties", "speculative", + "spiritualiy", "spiritually", + "spiritualty", "spirituality", + "spirituella", "spiritually", + "spirtiually", "spiritually", + "spirutually", "spiritually", + "spitirually", "spiritually", + "sponatenous", "spontaneous", + "sponatneous", "spontaneous", + "sponsership", "sponsorship", + "sponsorhips", "sponsorship", + "sponsorhsip", "sponsorship", + "sponsorshop", "sponsorship", + "spontaenous", "spontaneous", + "spontainous", "spontaneous", + "spontaneuos", "spontaneous", + "spontanious", "spontaneous", + "sponteanous", "spontaneous", + "sponteneous", "spontaneous", + "spreadhseet", "spreadsheet", + "spreadsheat", "spreadsheet", + "spreadshets", "spreadsheets", + "spreedsheet", "spreadsheet", + "springfeild", "springfield", + "springfiled", "springfield", + "sprinklered", "sprinkled", + "squirrelies", "squirrels", + "squirrelius", "squirrels", + "stabilizare", "stabilize", + "stabilizied", "stabilize", + "stabilizier", "stabilize", + "stabilizies", "stabilize", + "staggerring", "staggering", + "staggerwing", "staggering", + "stationairy", "stationary", + "stationerad", "stationed", + "stationnary", "stationary", + "statisitcal", "statistical", + "statisticly", "statistical", + "statistisch", "statistics", + "statsitical", "statistical", + "stereotpyes", "stereotypes", + "stereotying", "stereotyping", + "steriotypes", "stereotypes", + "steroetypes", "stereotypes", + "steryotypes", "stereotypes", + "stimluating", "stimulating", + "stimualting", "stimulating", + "stimualtion", "stimulation", + "stimulantes", "stimulants", + "stockpilled", "stockpile", + "stormfrount", "stormfront", + "storyteling", "storytelling", + "straightden", "straightened", + "straightend", "straightened", + "straightmen", "straighten", + "straightned", "straightened", + "straightner", "straighten", + "strangeshit", "strangest", + "strategisch", "strategic", + "strategiske", "strategies", + "strawberies", "strawberries", + "strawberrry", "strawberry", + "strawbrerry", "strawberry", + "strenghened", "strengthened", + "strenghtend", "strengthen", + "strenghtens", "strengthen", + "strengtened", "strengthened", + "structurels", "structures", + "strugglebus", "struggles", + "struggleing", "struggling", + "stubborness", "stubbornness", + "stutterring", "stuttering", + "subcatagory", "subcategory", + "subconscius", "subconscious", + "subconscous", "subconscious", + "subisdizing", "subsidizing", + "subjectivly", "subjectively", + "submergered", "submerged", + "submisisons", "submissions", + "subredddits", "subreddits", + "subscirbers", "subscribers", + "subscribbed", "subscribe", + "subscribber", "subscriber", + "subscriping", "subscribing", + "subscriptin", "subscriptions", + "subscripton", "subscription", + "subsequenty", "subsequently", + "subsidiezed", "subsidized", + "subsidizied", "subsidized", + "subsidizies", "subsidize", + "subsiziding", "subsidizing", + "subsquently", "subsequently", + "subsrcibers", "subscribers", + "substancial", "substantial", + "substansial", "substantial", + "substansive", "substantive", + "substantied", "substantive", + "substanties", "substantive", + "substential", "substantial", + "substitiute", "substitute", + "substituded", "substituted", + "substitudes", "substitutes", + "substituion", "substitution", + "substitures", "substitutes", + "substitutie", "substitutes", + "substitutos", "substitutes", + "substitutue", "substitutes", + "substracted", "subtracted", + "suburburban", "suburban", + "succesfully", "successfully", + "successeurs", "successes", + "successfull", "successful", + "successfuly", "successfully", + "successsion", "succession", + "successully", "successfully", + "succsesfull", "successfully", + "sucessfully", "successfully", + "sucseptible", "susceptible", + "sufficently", "sufficiently", + "suggestieve", "suggestive", + "sumbissions", "submissions", + "sunglassses", "sunglasses", + "superceeded", "superseded", + "superficiel", "superficial", + "superfulous", "superfluous", + "superhereos", "superhero", + "superifical", "superficial", + "superiorest", "superiors", + "supermacist", "supremacist", + "supermakert", "supermarkets", + "supermakret", "supermarkets", + "supermakter", "supermarkets", + "supermarkts", "supermarkets", + "supermaster", "supermarkets", + "supernatual", "supernatural", + "supersition", "supervision", + "superstiton", "superstition", + "supervisers", "supervisors", + "supervisior", "supervisor", + "suplimented", "supplemented", + "supplaments", "supplements", + "supplemetal", "supplemental", + "supporteurs", "supporters", + "supposedely", "supposedly", + "supposidely", "supposedly", + "supposingly", "supposedly", + "suppresions", "suppression", + "suppresssor", "suppressor", + "supramacist", "supremacist", + "supremacits", "supremacist", + "supremasist", "supremacist", + "supremicist", "supremacist", + "suprimacist", "supremacist", + "suprisingly", "surprisingly", + "suprizingly", "surprisingly", + "suroundings", "surroundings", + "surpemacist", "supremacist", + "surprisinly", "surprisingly", + "surreptious", "surreptitious", + "surroundign", "surroundings", + "surroundigs", "surrounds", + "surroundins", "surrounds", + "surroundngs", "surrounds", + "surveilence", "surveillance", + "survivabily", "survivability", + "susbtantial", "substantial", + "susbtantive", "substantive", + "suscepitble", "susceptible", + "susceptable", "susceptible", + "suscpetible", "susceptible", + "susecptible", "susceptible", + "suspectible", "susceptible", + "suspiciosly", "suspiciously", + "suspiciouly", "suspiciously", + "suspiciouns", "suspicion", + "suspicision", "suspicions", + "suspicisons", "suspicions", + "sustainible", "sustainable", + "switerzland", "switzerland", + "switserland", "switzerland", + "switzlerand", "switzerland", + "swizterland", "switzerland", + "swtizerland", "switzerland", + "symapthetic", "sympathetic", + "symmertical", "symmetrical", + "sympathatic", "sympathetic", + "sympathiers", "sympathizers", + "sympathsize", "sympathize", + "sympethetic", "sympathetic", + "symphatetic", "sympathetic", + "symphatized", "sympathize", + "symphatizer", "sympathizers", + "symphatizes", "sympathize", + "sympothetic", "sympathetic", + "synthesasia", "synthesis", + "synthesesia", "synthesis", + "sypmathetic", "sympathetic", + "tabelspoons", "tablespoons", + "tablepsoons", "tablespoons", + "tablespooon", "tablespoon", + "tablesppons", "tablespoons", + "tailgateing", "tailgating", + "tailgatting", "tailgating", + "tangentialy", "tangentially", + "techincally", "technically", + "techincians", "technicians", + "techiniques", "techniques", + "techncially", "technically", + "technicalty", "technicality", + "technichian", "technician", + "technicials", "technicians", + "techniciens", "technicians", + "technitians", "technicians", + "technnology", "technology", + "technologia", "technological", + "techticians", "technicians", + "teleportato", "teleportation", + "teleportion", "teleporting", + "teleproting", "teleporting", + "temeprature", "temperature", + "temparament", "temperament", + "temparature", "temperature", + "temparement", "temperament", + "tempearture", "temperatures", + "temperamant", "temperament", + "temperarily", "temporarily", + "temperatues", "temperatures", + "temperaturs", "temperatures", + "temperatuur", "temperature", + "temperement", "temperament", + "tempermeant", "temperament", + "tempertaure", "temperature", + "temporairly", "temporarily", + "temporaraly", "temporarily", + "temporarity", "temporarily", + "tempreature", "temperature", + "temproarily", "temporarily", + "tempurature", "temperature", + "tepmorarily", "temporarily", + "termanology", "terminology", + "terminacion", "termination", + "terminaison", "termination", + "terminalogy", "terminology", + "terminatior", "terminator", + "terminatorn", "termination", + "terminilogy", "terminology", + "terminoligy", "terminology", + "terratorial", "territorial", + "terratories", "territories", + "terretorial", "territorial", + "terretories", "territories", + "terrirorial", "territorial", + "terrirories", "territories", + "terriroties", "territories", + "terristrial", "territorial", + "territoires", "territories", + "territorist", "terrorist", + "territority", "territory", + "terroristas", "terrorists", + "terroristes", "terrorists", + "terrorities", "territories", + "terrotorial", "territorial", + "terrotories", "territories", + "testiclular", "testicular", + "thankfullly", "thankfully", + "thanksgivng", "thanksgiving", + "theoligical", "theological", + "theoratical", "theoretical", + "theoreticly", "theoretical", + "theoritical", "theoretical", + "therapautic", "therapeutic", + "therapeudic", "therapeutic", + "therapeutuc", "therapeutic", + "therapuetic", "therapeutic", + "theraupetic", "therapeutic", + "thereaputic", "therapeutic", + "thereotical", "theoretical", + "therepeutic", "therapeutic", + "thermometor", "thermometer", + "thermometre", "thermometer", + "thermomiter", "thermometer", + "thermomoter", "thermometer", + "thermoneter", "thermometer", + "thermostaat", "thermostat", + "theroetical", "theoretical", + "thoeretical", "theoretical", + "threataning", "threatening", + "threatended", "threatened", + "threatining", "threatening", + "throttleing", "throttling", + "throughoput", "throughput", + "throughtout", "throughout", + "throughtput", "throughput", + "thudnerbolt", "thunderbolt", + "thunberbolt", "thunderbolt", + "thunderblot", "thunderbolt", + "thunderboat", "thunderbolt", + "thunderbots", "thunderbolt", + "thunderbowl", "thunderbolt", + "thunderjolt", "thunderbolt", + "thundervolt", "thunderbolt", + "tightenting", "tightening", + "tocuhscreen", "touchscreen", + "torrentking", "torrenting", + "torrentting", "torrenting", + "torublesome", "troublesome", + "torunaments", "tournaments", + "totalitaran", "totalitarian", + "totalitarni", "totalitarian", + "touranments", "tournaments", + "tournamnets", "tournaments", + "tournemants", "tournaments", + "tournements", "tournaments", + "tournmanets", "tournaments", + "tradicional", "traditional", + "tradionally", "traditionally", + "tradisional", "traditional", + "traditionel", "traditional", + "traditition", "tradition", + "tragicallly", "tragically", + "tramautized", "traumatized", + "tramuatized", "traumatized", + "trancendent", "transcendent", + "trancending", "transcending", + "tranclucent", "translucent", + "trandgender", "transgender", + "tranditions", "transitions", + "tranistions", "transitions", + "tranlastion", "translations", + "tranlsating", "translating", + "tranlsation", "translation", + "tranluscent", "translucent", + "trannsexual", "transsexual", + "tranpshobic", "transphobic", + "transaccion", "transaction", + "transaciton", "transactions", + "transalting", "translating", + "transaltion", "translation", + "transations", "transitions", + "transcluent", "translucent", + "transcripto", "transcription", + "transctions", "transitions", + "transculent", "translucent", + "transending", "transcending", + "transfender", "transgender", + "transferers", "transfers", + "transfering", "transferring", + "transfersom", "transforms", + "transfomers", "transforms", + "transformas", "transforms", + "transformes", "transformers", + "transformis", "transforms", + "transformus", "transforms", + "transforums", "transforms", + "transfromed", "transformed", + "transfromer", "transformers", + "transgemder", "transgender", + "transgended", "transgendered", + "transgenger", "transgender", + "transgenres", "transgender", + "transhpobic", "transphobic", + "transisions", "transitions", + "transisitor", "transistor", + "transistion", "transition", + "transistior", "transistor", + "transitiond", "transitioned", + "transitiong", "transitioning", + "translatron", "translation", + "translusent", "translucent", + "transmatter", "transmitter", + "transmision", "transmission", + "transmissin", "transmissions", + "transmisson", "transmission", + "transmittor", "transmitter", + "transmorged", "transformed", + "transmutter", "transmitter", + "transofrmed", "transformed", + "transohobic", "transphobic", + "transparant", "transparent", + "transparecy", "transparency", + "transpareny", "transparency", + "transperant", "transparent", + "transperent", "transparent", + "transphonic", "transphobic", + "transphopic", "transphobic", + "transplanet", "transplant", + "transporder", "transporter", + "transporing", "transporting", + "transportar", "transporter", + "transportng", "transporting", + "transportor", "transporter", + "transseuxal", "transsexual", + "transsexaul", "transsexual", + "transsexuel", "transsexual", + "transulcent", "translucent", + "transylvnia", "transylvania", + "tranzformer", "transformer", + "tranzitions", "transitions", + "tranzporter", "transporter", + "trasncripts", "transcripts", + "trasnferred", "transferred", + "trasnformed", "transformed", + "trasnformer", "transformer", + "trasngender", "transgender", + "trasnmitted", "transmitted", + "trasnmitter", "transmitter", + "trasnparent", "transparent", + "trasnphobic", "transphobic", + "trasnported", "transported", + "trasnporter", "transporter", + "traumatisch", "traumatic", + "traumetized", "traumatized", + "traumitized", "traumatized", + "travellerhd", "travelled", + "travellodge", "travelled", + "tremendeous", "tremendous", + "tremendious", "tremendous", + "tremenduous", "tremendous", + "trespessing", "trespassing", + "tresspasing", "trespassing", + "triggereing", "triggering", + "triggerring", "triggering", + "troubelsome", "troublesome", + "truamatized", "traumatized", + "trushworthy", "trustworthy", + "trustowrthy", "trustworthy", + "trustwhorty", "trustworthy", + "trustworhty", "trustworthy", + "truthfullly", "truthfully", + "tupperwears", "tupperware", + "turstworthy", "trustworthy", + "ubiquitious", "ubiquitous", + "ubiquituous", "ubiquitous", + "ukraininans", "ukrainians", + "ultimatelly", "ultimately", + "unanimoulsy", "unanimous", + "unappeasing", "unappealing", + "unappeeling", "unappealing", + "unathorised", "unauthorised", + "unattendend", "unattended", + "unatteneded", "unattended", + "unattracive", "unattractive", + "unauthoried", "unauthorized", + "unavailible", "unavailable", + "unavaliable", "unavailable", + "unaviodable", "unavoidable", + "unbalanaced", "unbalanced", + "unbraikable", "unbreakable", + "unbrakeable", "unbreakable", + "unbreakabie", "unbreakable", + "unbreakabke", "unbreakable", + "unbreakbale", "unbreakable", + "unbreakeble", "unbreakable", + "unbrearable", "unbreakable", + "uncensorred", "uncensored", + "uncertaincy", "uncertainty", + "uncertanity", "uncertainty", + "uncertianty", "uncertainty", + "unchangable", "unchangeable", + "uncompetive", "uncompetitive", + "unconcsious", "unconscious", + "unconsicous", "unconscious", + "uncouncious", "unconscious", + "undeniabely", "undeniably", + "undeniabley", "undeniably", + "undeniablly", "undeniably", + "undenialbly", "undeniably", + "underestime", "underestimate", + "undergating", "undertaking", + "undergorund", "underground", + "underheight", "underweight", + "undermiming", "undermining", + "undermindes", "undermines", + "undernearth", "underneath", + "underneight", "underweight", + "underpining", "undermining", + "underpowerd", "underpowered", + "underpowred", "underpowered", + "underratted", "underrated", + "understannd", "understands", + "understsand", "understands", + "undertacker", "undertaker", + "underwarter", "underwater", + "underwieght", "underweight", + "underwright", "underweight", + "undesireble", "undesirable", + "undesriable", "undesirable", + "undetecable", "undetectable", + "undiserable", "undesirable", + "undoubedtly", "undoubtedly", + "undoubetdly", "undoubtedly", + "undoubtadly", "undoubtedly", + "undoubtebly", "undoubtedly", + "undoubtetly", "undoubtedly", + "undreground", "underground", + "unemployeed", "unemployed", + "unemployent", "unemployment", + "unemploymed", "unemployed", + "unexpectdly", "unexpectedly", + "unexpectely", "unexpectedly", + "unfamilliar", "unfamiliar", + "unfortuante", "unfortunate", + "ungreatfull", "ungrateful", + "unilateraly", "unilaterally", + "unilaterlly", "unilaterally", + "unimportent", "unimportant", + "uninspiried", "uninspired", + "uninstaling", "uninstalling", + "uninstallng", "uninstalling", + "uninteresed", "uninterested", + "uniquesness", "uniqueness", + "unisntalled", "uninstalled", + "universella", "universally", + "universites", "universities", + "univesities", "universities", + "unjustifyed", "unjustified", + "unknowinlgy", "unknowingly", + "unkowningly", "unknowingly", + "unnecassary", "unnecessary", + "unneccesary", "unnecessary", + "unnecessery", "unnecessary", + "unnecissary", "unnecessary", + "unnessecary", "unnecessary", + "unnistalled", "uninstalled", + "unoriginial", "unoriginal", + "unorigional", "unoriginal", + "unoticeable", "unnoticeable", + "unpleaseant", "unpleasant", + "unportected", "unprotected", + "unprepaired", "unprepared", + "unpreparred", "unprepared", + "unproducive", "unproductive", + "unprotexted", "unprotected", + "unqaulified", "unqualified", + "unrealisitc", "unrealistic", + "unrealsitic", "unrealistic", + "unreasonbly", "unreasonably", + "unregluated", "unregulated", + "unregualted", "unregulated", + "unregulared", "unregulated", + "unrepentent", "unrepentant", + "unresponive", "unresponsive", + "unrestriced", "unrestricted", + "unsettleing", "unsettling", + "unsintalled", "uninstalled", + "unsolicated", "unsolicited", + "unsoliticed", "unsolicited", + "unsolocited", "unsolicited", + "unsubscirbe", "unsubscribe", + "unsubscrbed", "unsubscribed", + "unsubscried", "unsubscribed", + "unsubscripe", "unsubscribe", + "unsubscrive", "unsubscribe", + "unsubscrube", "unsubscribe", + "unsubsrcibe", "unsubscribe", + "unsuccesful", "unsuccessful", + "unsuccessul", "unsuccessful", + "unsucesfuly", "unsuccessfully", + "unsucessful", "unsuccessful", + "unsunscribe", "unsubscribe", + "unsuprising", "unsurprising", + "unsuprizing", "unsurprising", + "unsurprized", "unsurprised", + "unsusbcribe", "unsubscribe", + "unviersally", "universally", + "unwarrented", "unwarranted", + "utiliatrian", "utilitarian", + "utilitatian", "utilitarian", + "utiliterian", "utilitarian", + "utilizacion", "utilization", + "utilizaiton", "utilization", + "utilizating", "utilization", + "utiltiarian", "utilitarian", + "vacciantion", "vaccination", + "vaccinaties", "vaccinate", + "vegaterians", "vegetarians", + "vegetariens", "vegetarians", + "vegetatians", "vegetarians", + "vegeterians", "vegetarians", + "vehementely", "vehemently", + "venezuelean", "venezuela", + "venezuelian", "venezuela", + "ventalation", "ventilation", + "ventelation", "ventilation", + "ventialtion", "ventilation", + "ventilacion", "ventilation", + "verastility", "versatility", + "verfication", "verification", + "versatality", "versatility", + "versitality", "versatility", + "versitilaty", "versatility", + "victorieuse", "victories", + "victoriuous", "victorious", + "vietnameese", "vietnamese", + "vietnamesse", "vietnamese", + "vietnamiese", "vietnamese", + "vietnamnese", "vietnamese", + "vigilanties", "vigilante", + "visibillity", "visibility", + "vocabularly", "vocabulary", + "volatillity", "volatility", + "volonteered", "volunteered", + "volounteers", "volunteers", + "volunatrily", "voluntarily", + "voluntairly", "voluntarily", + "volunteeers", "volunteers", + "volunteraly", "voluntarily", + "voluntereed", "volunteered", + "volunterily", "voluntarily", + "vulnerabile", "vulnerable", + "wallpapaers", "wallpapers", + "wallpappers", "wallpapers", + "washingtion", "washington", + "watermeleon", "watermelon", + "waterprooof", "waterproof", + "wavelegnths", "wavelength", + "wavelenghth", "wavelength", + "wavelenghts", "wavelength", + "weaknessses", "weaknesses", + "wellingston", "wellington", + "wellingtion", "wellington", + "westernerns", "westerners", + "westmisnter", "westminster", + "westmnister", "westminster", + "westmonster", "westminster", + "whisperered", "whispered", + "whitholding", "withholding", + "wikileakers", "wikileaks", + "willingless", "willingness", + "wincheseter", "winchester", + "windsheilds", "windshield", + "withdrawels", "withdrawals", + "withdrawles", "withdrawals", + "withhelding", "withholding", + "withrdawing", "withdrawing", + "witnesssing", "witnessing", + "woodowrking", "woodworking", + "woodworkign", "woodworking", + "worhsipping", "worshipping", + "workstaiton", "workstation", + "workststion", "workstation", + "worshopping", "worshipping", + "xenophoblic", "xenophobic", + "abandining", "abandoning", + "abandonned", "abandoned", + "abbreviato", "abbreviation", + "abnoramlly", "abnormally", + "abnormalty", "abnormally", + "abnornally", "abnormally", + "abominaton", "abomination", + "abondoning", "abandoning", + "aborginial", "aboriginal", + "aboriganal", "aboriginal", + "aborigenal", "aboriginal", + "aborignial", "aboriginal", + "aborigonal", "aboriginal", + "aboroginal", "aboriginal", + "aboslutely", "absolutely", + "abosrption", "absorption", + "abreviated", "abbreviated", + "absintence", "abstinence", + "absitnence", "abstinence", + "absolument", "absolute", + "absolutley", "absolutely", + "absoprtion", "absorption", + "absorbsion", "absorption", + "absorbtion", "absorption", + "absorpsion", "absorption", + "absoultely", "absolutely", + "abstanence", "abstinence", + "abstenance", "abstinence", + "abstenince", "abstinence", + "abstinense", "abstinence", + "abstinince", "abstinence", + "absurditiy", "absurdity", + "abundacies", "abundances", + "academicas", "academics", + "academicos", "academics", + "academicus", "academics", + "accdiently", "accidently", + "accelarate", "accelerate", + "accelerade", "accelerated", + "accelerare", "accelerate", + "accelerato", "acceleration", + "acceleread", "accelerated", + "accelertor", "accelerator", + "accelorate", "accelerate", + "acceptabel", "acceptable", + "acceptabil", "acceptable", + "acceptence", "acceptance", + "accepterad", "accepted", + "acceptible", "acceptable", + "accerelate", "accelerated", + "accesories", "accessories", + "accessable", "accessible", + "accessbile", "accessible", + "accessoire", "accessories", + "accessoirs", "accessories", + "accicently", "accidently", + "accidantly", "accidently", + "accidebtly", "accidently", + "accidenlty", "accidently", + "accidentes", "accidents", + "accidentky", "accidently", + "accidently", "accidentally", + "accidnetly", "accidently", + "accomadate", "accommodate", + "accomodate", "accommodate", + "accompined", "accompanied", + "accomplise", "accomplishes", + "accompliss", "accomplishes", + "accostumed", "accustomed", + "accountent", "accountant", + "accpetable", "acceptable", + "accpetance", "acceptance", + "accuastion", "accusation", + "acculumate", "accumulate", + "accumalate", "accumulate", + "accumelate", "accumulate", + "accumilate", "accumulate", + "accumulare", "accumulate", + "accumulato", "accumulation", + "accumulted", "accumulated", + "accuratley", "accurately", + "accusating", "accusation", + "accusition", "accusation", + "accustumed", "accustomed", + "acheivable", "achievable", + "acheivment", "achievement", + "acheviable", "achievable", + "achiavable", "achievable", + "achieveble", "achievable", + "achievemnt", "achievement", + "achievemts", "achieves", + "achievents", "achieves", + "achievment", "achievement", + "achilleous", "achilles", + "achiveable", "achievable", + "achivement", "achievement", + "acitvating", "activating", + "acitvision", "activision", + "acknowldge", "acknowledge", + "acknowlede", "acknowledge", + "acknowlege", "acknowledge", + "acommodate", "accommodate", + "acopalypse", "apocalypse", + "acordingly", "accordingly", + "acqauinted", "acquainted", + "acquanited", "acquainted", + "acquianted", "acquainted", + "acquinated", "acquainted", + "acquisiton", "acquisition", + "acticating", "activating", + "actication", "activation", + "activacion", "activation", + "activaters", "activates", + "activiates", "activist", + "activiites", "activist", + "activisiom", "activism", + "activisits", "activist", + "activistas", "activists", + "activistes", "activists", + "activiting", "activating", + "activizion", "activision", + "acustommed", "accustomed", + "adaptacion", "adaptation", + "adaptating", "adaptation", + "adaquetely", "adequately", + "addicitons", "addictions", + "addionally", "additionally", + "additivies", "additive", + "additivley", "additive", + "addittions", "addictions", + "addmission", "admission", + "addresable", "addressable", + "addressess", "addresses", + "adequatley", "adequately", + "adequetely", "adequately", + "adequitely", "adequately", + "adernaline", "adrenaline", + "adjectivos", "adjectives", + "adjustible", "adjustable", + "admendment", "amendment", + "administed", "administered", + "administor", "administer", + "administre", "administer", + "administro", "administer", + "adminsiter", "administer", + "admissable", "admissible", + "admittadly", "admittedly", + "admittetly", "admittedly", + "admittidly", "admittedly", + "adolencent", "adolescent", + "adolescant", "adolescent", + "adolescene", "adolescence", + "adoloscent", "adolescent", + "adolsecent", "adolescent", + "adpatation", "adaptation", + "adreanline", "adrenaline", + "adrelanine", "adrenaline", + "adreneline", "adrenaline", + "adreniline", "adrenaline", + "adressable", "addressable", + "advanteges", "advantages", + "advatanges", "advantages", + "adventrous", "adventurous", + "adventrues", "adventures", + "adventuers", "adventures", + "adventuous", "adventurous", + "adventuros", "adventurous", + "adventurus", "adventurous", + "adverticed", "advertised", + "aestethics", "aesthetics", + "aesthatics", "aesthetics", + "aesthestic", "aesthetics", + "affiliaton", "affiliation", + "affilliate", "affiliate", + "affirmitve", "affirmative", + "afflcition", "affliction", + "afflection", "affliction", + "affliation", "affliction", + "affliciton", "affliction", + "afforadble", "affordable", + "affordible", "affordable", + "affortable", "affordable", + "africaners", "africans", + "africaness", "africans", + "aftermaket", "aftermarket", + "afternooon", "afternoon", + "aggravanti", "aggravating", + "aggraveted", "aggravated", + "aggreement", "agreement", + "aggregious", "egregious", + "aggresions", "aggression", + "aggressivo", "aggression", + "aggrovated", "aggravated", + "agnosticim", "agnosticism", + "agnosticsm", "agnosticism", + "agnostisch", "agnostic", + "agnostiscm", "agnosticism", + "agnostisim", "agnosticism", + "agreeement", "agreement", + "agricultre", "agriculture", + "agricultue", "agriculture", + "agriculure", "agriculture", + "agricuture", "agriculture", + "ailenating", "alienating", + "ajdectives", "adjectives", + "alchoholic", "alcoholic", + "alchoolism", "alcoholism", + "alcohalics", "alcoholics", + "alcohalism", "alcoholism", + "alcoholsim", "alcoholism", + "aleinating", "alienating", + "algorhitms", "algorithms", + "algorithem", "algorithm", + "algorithim", "algorithm", + "algorithsm", "algorithms", + "algorithum", "algorithm", + "algorithym", "algorithm", + "algoritmes", "algorithms", + "algoritmos", "algorithms", + "algorthims", "algorithms", + "algortihms", "algorithms", + "algorythms", "algorithms", + "alievating", "alienating", + "alledgedly", "allegedly", + "allegeance", "allegiance", + "allegedely", "allegedly", + "allegedley", "allegedly", + "allegience", "allegiance", + "alleigance", "allegiance", + "allergisch", "allergic", + "alliegance", "allegiance", + "alligeance", "allegiance", + "alocholics", "alcoholics", + "alocholism", "alcoholism", + "alogrithms", "algorithms", + "alphabeast", "alphabet", + "alteracion", "alteration", + "alterarion", "alteration", + "alterating", "alteration", + "alternador", "alternator", + "alternater", "alternator", + "alternatie", "alternatives", + "alternatly", "alternately", + "alternatve", "alternate", + "alternetly", "alternately", + "altogehter", "altogether", + "altogheter", "altogether", + "altriustic", "altruistic", + "altruisitc", "altruistic", + "altrusitic", "altruistic", + "alturistic", "altruistic", + "aluminimum", "aluminum", + "amargeddon", "armageddon", + "amateurest", "amateurs", + "ambassabor", "ambassador", + "ambassader", "ambassador", + "ambassator", "ambassador", + "ambassedor", "ambassador", + "ambassidor", "ambassador", + "ambassodor", "ambassador", + "ambiguitiy", "ambiguity", + "amendmants", "amendments", + "amendmends", "amendments", + "americains", "americas", + "americanas", "americans", + "americanis", "americas", + "americanss", "americas", + "americants", "americas", + "americanus", "americans", + "americares", "americas", + "ammendment", "amendment", + "amrageddon", "armageddon", + "analitical", "analytical", + "analitycal", "analytical", + "analogeous", "analogous", + "analyitcal", "analytical", + "analyseles", "analyses", + "analyseras", "analyses", + "analyseres", "analyses", + "analysised", "analyses", + "analysises", "analyses", + "analysisto", "analysts", + "analystics", "analysts", + "anarchisim", "anarchism", + "anarchistm", "anarchism", + "anarchiszm", "anarchism", + "anarchsits", "anarchists", + "anayltical", "analytical", + "ancilliary", "ancillary", + "androiders", "androids", + "androidtvs", "androids", + "anecdotale", "anecdote", + "anecdotice", "anecdote", + "anestheisa", "anesthesia", + "anesthetia", "anesthesia", + "anesthisia", "anesthesia", + "anitbiotic", "antibiotic", + "anitquated", "antiquated", + "anitsocial", "antisocial", + "aniversary", "anniversary", + "annilihate", "annihilated", + "anniverary", "anniversary", + "anniversay", "anniversary", + "anniversry", "anniversary", + "annointing", "anointing", + "annonceurs", "announcers", + "annoucners", "announcers", + "annoucning", "announcing", + "announched", "announce", + "annyoingly", "annoyingly", + "anonymosly", "anonymously", + "anonymousy", "anonymously", + "antaganist", "antagonist", + "antagnoist", "antagonist", + "antarcitca", "antarctica", + "antarctida", "antarctica", + "anthropoly", "anthropology", + "antibiodic", "antibiotic", + "antibiotcs", "antibiotics", + "antibitoic", "antibiotic", + "antiboitic", "antibiotics", + "anticapate", "anticipate", + "anticiapte", "anticipate", + "anticipare", "anticipate", + "anticipato", "anticipation", + "anticuated", "antiquated", + "antiquited", "antiquated", + "antiqvated", "antiquated", + "antisipate", "anticipate", + "antisocail", "antisocial", + "antisosial", "antisocial", + "antoganist", "antagonist", + "antractica", "antarctica", + "apacolypse", "apocalypse", + "apartheied", "apartheid", + "aplication", "application", + "apocalipse", "apocalypse", + "apocalpyse", "apocalypse", + "apocalypes", "apocalypse", + "apocalypic", "apocalyptic", + "apocalyspe", "apocalypse", + "apocalytic", "apocalyptic", + "apocaplyse", "apocalypse", + "apocolapse", "apocalypse", + "apolagetic", "apologetic", + "apolagized", "apologized", + "apolegetic", "apologetic", + "apoligetic", "apologetic", + "apoligists", "apologists", + "apoligized", "apologized", + "apologisms", "apologists", + "apologiste", "apologise", + "apologitic", "apologetic", + "apostraphe", "apostrophe", + "apostrephe", "apostrophe", + "apostrohpe", "apostrophe", + "apostropes", "apostrophe", + "apparantly", "apparently", + "appareance", "appearance", + "apparenlty", "apparently", + "appartment", "apartment", + "appealling", "appealing", + "appearence", "appearance", + "appearnace", "appearances", + "apperances", "appearances", + "apperantly", "apparently", + "apperciate", "appreciate", + "appereance", "appearance", + "appetities", "appetite", + "appetitite", "appetite", + "appication", "application", + "applainces", "appliances", + "applicaple", "applicable", + "applicates", "applicants", + "applicaton", "application", + "applicible", "applicable", + "appliences", "appliances", + "appointmet", "appointments", + "appologies", "apologies", + "apporached", "approached", + "apporaches", "approaches", + "appraoched", "approached", + "appraoches", "approaches", + "apprecaite", "appreciate", + "appreciato", "appreciation", + "appreciste", "appreciates", + "apprecitae", "appreciates", + "apprecited", "appreciated", + "apprectice", "apprentice", + "appreicate", "appreciate", + "apprendice", "apprentice", + "apprentace", "apprentice", + "apprentise", "apprentice", + "appretiate", "appreciate", + "appretince", "apprentice", + "appriceate", "appreciates", + "appriciate", "appreciate", + "appriecate", "appreciates", + "approacing", "approaching", + "appropiate", "appropriate", + "approprate", "appropriate", + "apropriate", "appropriate", + "aproximate", "approximate", + "apsotrophe", "apostrophe", + "aptitudine", "aptitude", + "aqcuainted", "acquainted", + "aquisition", "acquisition", + "aramgeddon", "armageddon", + "arangement", "arrangement", + "arbitarily", "arbitrarily", + "arbitraily", "arbitrarily", + "arbitraion", "arbitration", + "arbitrairy", "arbitrarily", + "arbitrarly", "arbitrary", + "arbitraton", "arbitration", + "arcehtypes", "archetypes", + "archaelogy", "archaeology", + "archaeolgy", "archaeology", + "archaology", "archeology", + "archatypes", "archetypes", + "archetects", "architects", + "archetipes", "archetypes", + "archetpyes", "archetypes", + "archetypus", "archetypes", + "archeytpes", "archetypes", + "archictect", "architect", + "architechs", "architects", + "architecht", "architect", + "architecte", "architecture", + "architexts", "architects", + "architypes", "archetypes", + "archtiects", "architects", + "archytypes", "archetypes", + "argentinia", "argentina", + "arguements", "arguments", + "argumentas", "arguments", + "argumentos", "arguments", + "arithemtic", "arithmetic", + "arithmitic", "arithmetic", + "aritmethic", "arithmetic", + "armagaddon", "armageddon", + "armageddan", "armageddon", + "armagedden", "armageddon", + "armageddin", "armageddon", + "armagedeon", "armageddon", + "armageedon", "armageddon", + "armagideon", "armageddon", + "armegaddon", "armageddon", + "arrangerad", "arranged", + "arrangment", "arrangement", + "arthimetic", "arithmetic", + "articifial", "artificial", + "articluate", "articulate", + "articualte", "articulate", + "articulted", "articulated", + "artifactos", "artifacts", + "artificiel", "artificial", + "artihmetic", "arithmetic", + "artillerly", "artillery", + "asbestoast", "asbestos", + "asethetics", "aesthetics", + "asisstants", "assistants", + "aspiratons", "aspirations", + "assasinate", "assassinate", + "assassians", "assassin", + "assassinas", "assassins", + "assassines", "assassins", + "assassinos", "assassins", + "assemblare", "assemble", + "assempling", "assembling", + "assersions", "assertions", + "assesement", "assessment", + "assestment", "assessment", + "assigments", "assignments", + "assignemnt", "assignment", + "assimalate", "assimilate", + "assimilant", "assimilate", + "assimilare", "assimilate", + "assimliate", "assimilate", + "assimulate", "assimilate", + "assingment", "assignment", + "assistanat", "assistants", + "assistanse", "assistants", + "assistante", "assistance", + "assistence", "assistance", + "assistendo", "assisted", + "assistents", "assistants", + "assmebling", "assembling", + "assocaited", "associated", + "assocaites", "associates", + "assocation", "association", + "associatie", "associated", + "associatin", "associations", + "associaton", "association", + "associsted", "associates", + "assoicated", "associated", + "assoicates", "associates", + "assosiated", "associated", + "assosiates", "associates", + "asssassans", "assassins", + "assupmtion", "assumptions", + "assymetric", "asymmetric", + "asteroides", "asteroids", + "asthetical", "aesthetical", + "astonising", "astonishing", + "astornauts", "astronauts", + "astranauts", "astronauts", + "astronatus", "astronauts", + "astronaunt", "astronaut", + "astronomia", "astronomical", + "astronouts", "astronauts", + "astronuats", "astronauts", + "asutralian", "australian", + "atatchment", "attachment", + "athleticos", "athletics", + "athleticsm", "athleticism", + "athletiscm", "athleticism", + "athletisim", "athleticism", + "atmopshere", "atmosphere", + "atmoshpere", "atmosphere", + "atomsphere", "atmosphere", + "atriculate", "articulate", + "atrocoties", "atrocities", + "atrosities", "atrocities", + "attachemnt", "attachment", + "attackeras", "attackers", + "attactment", "attachment", + "attemtping", "attempting", + "attendence", "attendance", + "attendents", "attendants", + "attirbutes", "attributes", + "attmepting", "attempting", + "attracters", "attracts", + "attractice", "attractive", + "attracties", "attracts", + "attractifs", "attracts", + "attraktion", "attraction", + "attraktive", "attractive", + "attribuito", "attribution", + "attritubes", "attributes", + "auctioners", "auctions", + "audioboook", "audiobook", + "audioboost", "audiobooks", + "auidobooks", "audiobooks", + "auotattack", "autoattack", + "austrailan", "australian", + "austrailia", "australia", + "australain", "australians", + "australien", "australian", + "australina", "australians", + "austrlaian", "australians", + "authenticy", "authenticity", + "autherized", "authorized", + "authoritay", "authority", + "authorites", "authorities", + "authorithy", "authority", + "authroized", "authorized", + "autistisch", "autistic", + "autoattaks", "autoattack", + "autocorect", "autocorrect", + "autocorrct", "autocorrect", + "autocorret", "autocorrect", + "autograpgh", "autograph", + "automatice", "automate", + "automatico", "automation", + "automatied", "automate", + "automatiek", "automate", + "automatron", "automation", + "automatted", "automate", + "automibile", "automobile", + "automitive", "automotive", + "automoblie", "automobile", + "automomous", "autonomous", + "automonous", "autonomous", + "automotice", "automotive", + "automotion", "automation", + "automotize", "automotive", + "automotove", "automotive", + "autonamous", "autonomous", + "autonation", "automation", + "autonimous", "autonomous", + "autonomity", "autonomy", + "autononous", "autonomous", + "auttoatack", "autoattack", + "auxilliary", "auxiliary", + "availabale", "available", + "availaible", "available", + "availiable", "available", + "averageadi", "averaged", + "averageifs", "averages", + "awesomeley", "awesomely", + "awesomelly", "awesomely", + "awesomenss", "awesomeness", + "awkwardess", "awkwardness", + "babysister", "babysitter", + "babysiting", "babysitting", + "babysittng", "babysitting", + "bachelores", "bachelors", + "backgorund", "background", + "backgroudn", "backgrounds", + "backgrouds", "backgrounds", + "backgrouns", "backgrounds", + "backgruond", "backgrounds", + "backpacing", "backpacking", + "backpackng", "backpacking", + "backrgound", "backgrounds", + "backrounds", "backgrounds", + "baksetball", "basketball", + "balanceada", "balanced", + "balanceado", "balanced", + "balckberry", "blackberry", + "balckhawks", "blackhawks", + "balcksmith", "blacksmith", + "bandwagoon", "bandwagon", + "bangaldesh", "bangladesh", + "bangladash", "bangladesh", + "bangledash", "bangladesh", + "bangledesh", "bangladesh", + "banglidesh", "bangladesh", + "bankrupcty", "bankruptcy", + "bankruptsy", "bankruptcy", + "bankrutpcy", "bankruptcy", + "barabrians", "barbarians", + "barbariens", "barbarians", + "barbarions", "barbarians", + "barbarisch", "barbaric", + "barberians", "barbarians", + "bargianing", "bargaining", + "bartendars", "bartenders", + "basektball", "basketball", + "baskteball", "basketball", + "bastardous", "bastards", + "battelship", "battleship", + "battelstar", "battlestar", + "battlearts", "battlestar", + "battlechip", "battleship", + "battlefied", "battlefield", + "battlefont", "battlefront", + "battlehips", "battleship", + "battlesaur", "battlestar", + "battlescar", "battlestar", + "battleshop", "battleship", + "battlestsr", "battlestar", + "beahviours", "behaviours", + "beautifuly", "beautifully", + "beautilful", "beautifully", + "beautyfull", "beautiful", + "becnhmarks", "benchmarks", + "beethoveen", "beethoven", + "begginings", "beginnings", + "begininngs", "beginnings", + "beginninng", "beginnings", + "behaivours", "behaviours", + "behaviorly", "behavioral", + "behavoiral", "behavioral", + "behavoiurs", "behaviours", + "behavorial", "behavioral", + "behavoural", "behavioral", + "behvaiours", "behaviours", + "beleagured", "beleaguered", + "beleivable", "believable", + "beliavable", "believable", + "beliebable", "believable", + "believeble", "believable", + "beliveable", "believable", + "benchamrks", "benchmarks", + "benchmakrs", "benchmarks", + "benckmarks", "benchmarks", + "benefecial", "beneficial", + "beneficary", "beneficiary", + "beneficiul", "beneficial", + "benefitial", "beneficial", + "beneifical", "beneficial", + "benelovent", "benevolent", + "benevalent", "benevolent", + "benevelant", "benevolent", + "benevelent", "benevolent", + "benevelont", "benevolent", + "benevloent", "benevolent", + "benevolant", "benevolent", + "benificial", "beneficial", + "benovelent", "benevolent", + "bernouilli", "bernoulli", + "besitality", "bestiality", + "bestaility", "bestiality", + "besteality", "bestiality", + "betrayeado", "betrayed", + "bilateraly", "bilaterally", + "billborads", "billboards", + "bioligical", "biological", + "biologiset", "biologist", + "biologiskt", "biologist", + "birghtness", "brightness", + "birmignham", "birmingham", + "birmimgham", "birmingham", + "bisexuella", "bisexual", + "bitterseet", "bittersweet", + "bitterswet", "bittersweet", + "blackahwks", "blackhawks", + "blackbarry", "blackberry", + "blackbeary", "blackberry", + "blackbeery", "blackberry", + "blackcawks", "blackhawks", + "blackhakws", "blackhawks", + "blackhwaks", "blackhawks", + "blackmsith", "blacksmith", + "blackshits", "blacksmith", + "blasphemey", "blasphemy", + "blitzkreig", "blitzkrieg", + "blochchain", "blockchain", + "blockcahin", "blockchain", + "blockchian", "blockchain", + "bloodboner", "bloodborne", + "bloodbonre", "bloodborne", + "bloodborbe", "bloodborne", + "bloodbrone", "bloodborne", + "bloodporne", "bloodborne", + "bloorborne", "bloodborne", + "blueberies", "blueberries", + "blueberris", "blueberries", + "blueberrry", "blueberry", + "bluebrints", "blueprints", + "boardcasts", "broadcasts", + "bodyheight", "bodyweight", + "bodyweigth", "bodyweight", + "bodywieght", "bodyweight", + "bombarment", "bombardment", + "bookmakred", "bookmarked", + "bootlaoder", "bootloader", + "bootleader", "bootloader", + "boradcasts", "broadcasts", + "borderlads", "borderlands", + "borderlans", "borderlands", + "bottelneck", "bottleneck", + "bottlebeck", "bottleneck", + "boundaires", "boundaries", + "bounderies", "boundaries", + "bourgeoius", "bourgeois", + "boycutting", "boycotting", + "boyfirends", "boyfriends", + "boyfreinds", "boyfriends", + "boyfrients", "boyfriends", + "braceletes", "bracelets", + "braceletts", "bracelets", + "brainwased", "brainwashed", + "brakedowns", "breakdowns", + "braodcasts", "broadcasts", + "brasillian", "brazilian", + "bratenders", "bartenders", + "brazilains", "brazilians", + "brazileans", "brazilians", + "braziliaan", "brazilians", + "brazilions", "brazilians", + "brazillans", "brazilians", + "brightoner", "brighten", + "brigthness", "brightness", + "brillaince", "brilliance", + "brilliante", "brilliance", + "brillianty", "brilliantly", + "brimestone", "brimstone", + "brimingham", "birmingham", + "broacasted", "broadcast", + "brotherhod", "brotherhood", + "brotherood", "brotherhood", + "brusselers", "brussels", + "brutallity", "brutally", + "buisnesses", "businesses", + "bulgariska", "bulgaria", + "bulletpoof", "bulletproof", + "bulletprof", "bulletproof", + "bureaucats", "bureaucrats", + "businesman", "businessman", + "businesmen", "businessmen", + "businessen", "businessmen", + "butterfies", "butterflies", + "cabinettas", "cabinets", + "caclulated", "calculated", + "caclulator", "calculator", + "cahracters", "characters", + "calcluator", "calculators", + "calcualted", "calculated", + "calcualtor", "calculator", + "calculador", "calculator", + "calcularon", "calculator", + "calculater", "calculator", + "calculatin", "calculations", + "calibratin", "calibration", + "calibraton", "calibration", + "califnoria", "californian", + "califonria", "californian", + "califorian", "californian", + "califorina", "california", + "californai", "californian", + "califronia", "california", + "caligraphy", "calligraphy", + "caliofrnia", "californian", + "calrifying", "clarifying", + "calssified", "classified", + "caluclated", "calculated", + "caluclator", "calculator", + "caluculate", "calculate", + "cambodican", "cambodia", + "camofluage", "camouflage", + "camoufalge", "camouflage", + "camouglage", "camouflage", + "campaiging", "campaigning", + "campaignes", "campaigns", + "cancellato", "cancellation", + "candidatas", "candidates", + "candidatxs", "candidates", + "candidiate", "candidate", + "canditates", "candidates", + "cannibalsm", "cannibalism", + "cannisters", "canisters", + "cannonical", "canonical", + "capabality", "capability", + "capabiltiy", "capability", + "capacators", "capacitors", + "capaciters", "capacitors", + "capactiors", "capacitors", + "capasitors", "capacitors", + "capatilism", "capitalism", + "capatilist", "capitalist", + "capatilize", "capitalize", + "capialized", "capitalized", + "capicators", "capacitors", + "capitalisn", "capitals", + "capitalits", "capitalists", + "capitalsim", "capitalism", + "capitalsit", "capitalists", + "capitarist", "capitalist", + "capitilism", "capitalism", + "capitilist", "capitalist", + "capitilize", "capitalize", + "capitlaism", "capitalism", + "capitlaist", "capitalist", + "capitlaize", "capitalized", + "capitolism", "capitalism", + "capitolist", "capitalist", + "capitolize", "capitalize", + "captainers", "captains", + "captialism", "capitalism", + "captialist", "capitalist", + "captialize", "capitalize", + "captivitiy", "captivity", + "caraciture", "caricature", + "carciature", "caricature", + "cardinales", "cardinals", + "cardinalis", "cardinals", + "carefullly", "carefully", + "cariacture", "caricature", + "caricatore", "caricature", + "cariciture", "caricature", + "caricuture", "caricature", + "carismatic", "charismatic", + "carribbean", "caribbean", + "cartdridge", "cartridge", + "cartdriges", "cartridges", + "carthagian", "carthaginian", + "cartilidge", "cartilage", + "cartirdges", "cartridges", + "cartrdiges", "cartridges", + "cartriages", "cartridges", + "cartrigdes", "cartridges", + "casaulties", "casualties", + "cassowarry", "cassowary", + "casualites", "casualties", + "casualries", "casualties", + "casulaties", "casualties", + "cataclysim", "cataclysm", + "cataclysym", "cataclysm", + "catagories", "categories", + "catapillar", "caterpillar", + "catapiller", "caterpillar", + "catastrope", "catastrophe", + "catastrphe", "catastrophe", + "categorice", "categorize", + "categoried", "categorized", + "categoriei", "categorize", + "cateogrize", "categorized", + "catepillar", "caterpillar", + "caterpilar", "caterpillar", + "catholicsm", "catholicism", + "catholicus", "catholics", + "catholisim", "catholicism", + "cativating", "activating", + "cattleship", "battleship", + "causalties", "casualties", + "cautionsly", "cautiously", + "celebratin", "celebration", + "celebrites", "celebrities", + "celebritiy", "celebrity", + "cellpading", "cellpadding", + "cellulaire", "cellular", + "cemetaries", "cemeteries", + "censorhsip", "censorship", + "censurship", "censorship", + "centipedle", "centipede", + "ceremonias", "ceremonies", + "ceremoniis", "ceremonies", + "ceremonije", "ceremonies", + "cerimonial", "ceremonial", + "cerimonies", "ceremonies", + "certainity", "certainty", + "certainlyt", "certainty", + "chairtable", "charitable", + "chalenging", "challenging", + "challanged", "challenged", + "challanges", "challenges", + "challegner", "challenger", + "challender", "challenger", + "challengue", "challenger", + "challengur", "challenger", + "challening", "challenging", + "challneger", "challenger", + "chanceller", "chancellor", + "chancillor", "chancellor", + "chansellor", "chancellor", + "charachter", "character", + "charactere", "characterize", + "characterz", "characterize", + "charactors", "characters", + "charakters", "characters", + "charatable", "charitable", + "charecters", "characters", + "charistics", "characteristics", + "charitible", "charitable", + "chartiable", "charitable", + "chechpoint", "checkpoint", + "checkpiont", "checkpoint", + "checkpoins", "checkpoints", + "checkponts", "checkpoints", + "cheesecase", "cheesecake", + "cheesecave", "cheesecake", + "cheeseface", "cheesecake", + "cheezecake", "cheesecake", + "chemcially", "chemically", + "chidlbirth", "childbirth", + "chihuahuha", "chihuahua", + "childbrith", "childbirth", + "childrends", "childrens", + "childrenis", "childrens", + "childrents", "childrens", + "chirstians", "christians", + "chocalates", "chocolates", + "chocloates", "chocolates", + "chocoaltes", "chocolates", + "chocolatie", "chocolates", + "chocolatos", "chocolates", + "chocolatte", "chocolates", + "chocolotes", "chocolates", + "cholestrol", "cholesterol", + "chormosome", "chromosome", + "chornicles", "chronicles", + "chrisitans", "christians", + "christains", "christians", + "christiaan", "christian", + "christimas", "christians", + "christinas", "christians", + "christines", "christians", + "christmans", "christians", + "chromasome", "chromosome", + "chromesome", "chromosome", + "chromisome", "chromosome", + "chromosmes", "chromosomes", + "chromosoms", "chromosomes", + "chromosone", "chromosome", + "chromosoom", "chromosome", + "chromozome", "chromosome", + "chronciles", "chronicles", + "chronicals", "chronicles", + "chronicels", "chronicles", + "chronocles", "chronicles", + "chronosome", "chromosome", + "chrsitians", "christians", + "cigarattes", "cigarettes", + "cigerattes", "cigarettes", + "cincinatti", "cincinnati", + "cinncinati", "cincinnati", + "circulaire", "circular", + "circulaton", "circulation", + "circumsice", "circumcised", + "circumsied", "circumcised", + "circumwent", "circumvent", + "circunvent", "circumvent", + "cirruculum", "curriculum", + "claculator", "calculator", + "clairfying", "clarifying", + "clasically", "classically", + "classicals", "classics", + "classrooom", "classroom", + "cleanliess", "cleanliness", + "cleareance", "clearance", + "cleverleys", "cleverly", + "cliffhager", "cliffhanger", + "climateers", "climates", + "climatiser", "climates", + "clincially", "clinically", + "clitoridis", "clitoris", + "clitorious", "clitoris", + "co-incided", "coincided", + "cockroachs", "cockroaches", + "cockroahes", "cockroaches", + "coefficent", "coefficient", + "cognatious", "contagious", + "cognitivie", "cognitive", + "coincidnce", "coincide", + "colelctive", "collective", + "colelctors", "collectors", + "collapsers", "collapses", + "collaquial", "colloquial", + "collasping", "collapsing", + "collataral", "collateral", + "collaterol", "collateral", + "collatoral", "collateral", + "collcetion", "collections", + "colleauges", "colleagues", + "colleciton", "collection", + "collectems", "collects", + "collectief", "collective", + "collecties", "collects", + "collectifs", "collects", + "collectivo", "collection", + "collectoin", "collections", + "collectons", "collections", + "collectros", "collects", + "collegaues", "colleagues", + "collequial", "colloquial", + "colleteral", "collateral", + "colliquial", "colloquial", + "collission", "collisions", + "collitions", "collisions", + "colloqiual", "colloquial", + "colloquail", "colloquial", + "colloqueal", "colloquial", + "collpasing", "collapsing", + "colonialsm", "colonialism", + "colorblend", "colorblind", + "coloublind", "colorblind", + "columbidae", "columbia", + "comapnions", "companions", + "comaprable", "comparable", + "comaprison", "comparison", + "comaptible", "compatible", + "combatabts", "combatants", + "combatents", "combatants", + "combinatin", "combinations", + "combinaton", "combination", + "comediants", "comedians", + "comepndium", "compendium", + "comferting", "comforting", + "comforming", "comforting", + "comfortbly", "comfortably", + "comisioned", "commissioned", + "comisioner", "commissioner", + "comissions", "commissions", + "commandbox", "commando", + "commandent", "commandment", + "commandeur", "commanders", + "commandore", "commanders", + "commandpod", "commando", + "commanists", "communists", + "commemters", "commenters", + "commencera", "commerce", + "commenciez", "commence", + "commentaar", "commentary", + "commentare", "commenter", + "commentars", "commenters", + "commentart", "commentator", + "commentery", "commentary", + "commentsry", "commenters", + "commercail", "commercials", + "commercent", "commence", + "commerical", "commercial", + "comminists", "communists", + "commisison", "commissions", + "commissons", "commissions", + "commiteted", "commited", + "commodites", "commodities", + "commtiment", "commitments", + "communicae", "communicated", + "communisim", "communism", + "communiste", "communities", + "communites", "communities", + "communters", "commenters", + "compadible", "compatible", + "compagnons", "companions", + "compainons", "companions", + "compairson", "comparison", + "compalined", "complained", + "compandium", "compendium", + "companians", "companions", + "companines", "companions", + "compansate", "compensate", + "comparabil", "comparable", + "comparason", "comparison", + "comparaste", "compares", + "comparatie", "comparative", + "compareble", "comparable", + "comparemos", "compares", + "comparions", "comparison", + "compariosn", "comparisons", + "comparisen", "compares", + "comparitve", "comparative", + "comparsion", "comparison", + "compartent", "compartment", + "compartmet", "compartment", + "compatibel", "compatible", + "compatibil", "compatible", + "compeating", "completing", + "compeditor", "competitor", + "compednium", "compendium", + "compeeting", "completing", + "compeltely", "completely", + "compelting", "completing", + "compeltion", "completion", + "compemdium", "compendium", + "compenduim", "compendium", + "compenents", "components", + "compenidum", "compendium", + "compensare", "compensate", + "comperable", "comparable", + "comperhend", "comprehend", + "compession", "compassion", + "competance", "competence", + "competator", "competitor", + "competenet", "competence", + "competense", "competence", + "competenze", "competence", + "competeted", "competed", + "competetor", "competitor", + "competidor", "competitor", + "competiors", "competitors", + "competitie", "competitive", + "competitin", "competitions", + "competitio", "competitor", + "competiton", "competition", + "competitve", "competitive", + "compilance", "compliance", + "compilaton", "compilation", + "compinsate", "compensate", + "compitable", "compatible", + "compitance", "compliance", + "complacant", "complacent", + "complaince", "compliance", + "complaines", "complaints", + "complainig", "complaining", + "complainte", "complained", + "complation", "completion", + "compleatly", "completely", + "complecate", "complicate", + "completeds", "completes", + "completent", "complement", + "completily", "complexity", + "completito", "completion", + "completley", "completely", + "complexers", "complexes", + "complexety", "complexity", + "complianed", "compliance", + "compliants", "complaints", + "complicaed", "complicate", + "complicare", "complicate", + "complicati", "complicit", + "complicato", "complication", + "complicite", "complicate", + "complicted", "complicated", + "complience", "compliance", + "complimate", "complicate", + "complition", "completion", + "complusion", "compulsion", + "complusive", "compulsive", + "complusory", "compulsory", + "compolsive", "compulsive", + "compolsory", "compulsory", + "compolsury", "compulsory", + "componants", "components", + "componenet", "components", + "componsate", "compensate", + "comporable", "comparable", + "compositae", "composite", + "compositie", "composite", + "compositon", "composition", + "compraison", "comparisons", + "compramise", "compromise", + "comprassem", "compress", + "comprehand", "comprehend", + "compresion", "compression", + "compresors", "compressor", + "compresser", "compressor", + "compressio", "compressor", + "compresson", "compression", + "comprihend", "comprehend", + "comprimise", "compromise", + "compromiss", "compromises", + "compromize", "compromise", + "compromsie", "compromises", + "comprossor", "compressor", + "compteting", "completing", + "comptetion", "completion", + "compulisve", "compulsive", + "compulosry", "compulsory", + "compulsary", "compulsory", + "compulsery", "compulsory", + "compulsing", "compulsion", + "compulsivo", "compulsion", + "compulsury", "compulsory", + "compuslion", "compulsion", + "compuslive", "compulsive", + "compuslory", "compulsory", + "compustion", "compulsion", + "computanti", "computation", + "conatiners", "containers", + "concedendo", "conceded", + "concedered", "conceded", + "conceitual", "conceptual", + "concentate", "concentrate", + "concenting", "connecting", + "conceptial", "conceptual", + "conceptuel", "conceptual", + "concersion", "concession", + "concesions", "concession", + "concidered", "considered", + "conciously", "consciously", + "concission", "concession", + "conclsuion", "concussion", + "conclusies", "conclusive", + "conclution", "conclusion", + "concorrent", "concurrent", + "concsience", "conscience", + "conculsion", "conclusion", + "conculsive", "conclusive", + "concurment", "concurrent", + "concurrant", "concurrent", + "concurrect", "concurrent", + "concusions", "concussion", + "concusison", "concussions", + "condamning", "condemning", + "condemming", "condemning", + "condencing", "condemning", + "condenming", "condemning", + "condensend", "condensed", + "condidtion", "condition", + "conditinal", "conditional", + "conditiner", "conditioner", + "conditiond", "conditioned", + "conditiong", "conditioning", + "condmening", "condemning", + "conduiting", "conducting", + "conencting", "connecting", + "conenction", "connection", + "conenctors", "connectors", + "conesencus", "consensus", + "confedarcy", "confederacy", + "confedence", "conference", + "confedercy", "confederacy", + "conferance", "conference", + "conferenze", "conference", + "conferming", "confirming", + "confernece", "conferences", + "confessino", "confessions", + "confidance", "confidence", + "confidenly", "confidently", + "confidense", "confidence", + "confidenty", "confidently", + "conflcting", "conflating", + "conflicing", "conflicting", + "conflictos", "conflicts", + "confliting", "conflating", + "confriming", "confirming", + "confussion", "confession", + "congratule", "congratulate", + "congresman", "congressman", + "congresmen", "congressmen", + "congressen", "congressmen", + "conjecutre", "conjecture", + "conjuction", "conjunction", + "conjuncion", "conjunction", + "conlcusion", "conclusion", + "conncetion", "connections", + "conneciton", "connection", + "connecties", "connects", + "connectins", "connects", + "connectivy", "connectivity", + "connectpro", "connector", + "conneticut", "connecticut", + "connotaion", "connotation", + "conpsiracy", "conspiracy", + "conqeuring", "conquering", + "conqouring", "conquering", + "conquerers", "conquerors", + "conquoring", "conquering", + "consciense", "conscience", + "consciouly", "consciously", + "consdiered", "considered", + "consending", "consenting", + "consensuel", "consensual", + "consenusal", "consensual", + "consequece", "consequence", + "consequnce", "consequence", + "conservare", "conserve", + "conservato", "conservation", + "conservice", "conserve", + "conservies", "conserve", + "conservite", "conserve", + "consicence", "conscience", + "consideras", "considers", + "consideret", "considerate", + "consipracy", "conspiracy", + "consistant", "consistent", + "consistens", "consists", + "consisteny", "consistency", + "consitency", "consistency", + "consituted", "constituted", + "conslutant", "consultant", + "consluting", "consulting", + "consolidad", "consolidated", + "consonents", "consonants", + "consorcium", "consortium", + "conspirace", "conspiracies", + "conspiricy", "conspiracy", + "conspriacy", "conspiracy", + "constaints", "constraints", + "constatnly", "constantly", + "constently", "constantly", + "constitude", "constitute", + "constitued", "constitute", + "constituem", "constitute", + "constituer", "constitute", + "constitues", "constitutes", + "constituie", "constitute", + "constituit", "constitute", + "constitutn", "constituents", + "constituye", "constitute", + "constnatly", "constantly", + "constracts", "constructs", + "constraits", "constraints", + "constransi", "constraints", + "constrants", "constraints", + "construced", "constructed", + "constructo", "construction", + "construint", "constraint", + "construits", "constructs", + "construted", "constructed", + "consueling", "consulting", + "consultata", "consultant", + "consultate", "consultant", + "consultati", "consultant", + "consultato", "consultation", + "consultent", "consultant", + "consumated", "consummated", + "consumbale", "consumables", + "consuments", "consumes", + "consumirem", "consumerism", + "consumires", "consumerism", + "consumirse", "consumerism", + "consumiste", "consumes", + "consumpion", "consumption", + "contaction", "contacting", + "contageous", "contagious", + "contagiosa", "contagious", + "contagioso", "contagious", + "contaigous", "contagious", + "containors", "containers", + "contaminen", "containment", + "contanting", "contacting", + "contection", "contention", + "contectual", "contextual", + "conteiners", "contenders", + "contempate", "contemplate", + "contemplat", "contempt", + "contempory", "contemporary", + "contenants", "continents", + "contencion", "contention", + "contendors", "contenders", + "contenents", "continents", + "conteneurs", "contenders", + "contengent", "contingent", + "contension", "contention", + "contentino", "contention", + "contentios", "contentious", + "contentous", "contentious", + "contestais", "contests", + "contestans", "contests", + "contestase", "contests", + "contestion", "contention", + "contestors", "contests", + "contextful", "contextual", + "contextuel", "contextual", + "contextura", "contextual", + "contianers", "containers", + "contianing", "containing", + "contibuted", "contributed", + "contibutes", "contributes", + "contigents", "continents", + "contigious", "contagious", + "contignent", "contingent", + "continants", "continents", + "continenal", "continental", + "continenet", "continents", + "contineous", "continuous", + "continetal", "continental", + "contingecy", "contingency", + "contingeny", "contingency", + "continient", "contingent", + "continious", "continuous", + "continiuty", "continuity", + "contintent", "contingent", + "continualy", "continually", + "continuare", "continue", + "continuati", "continuity", + "continuato", "continuation", + "continuent", "contingent", + "continuety", "continuity", + "continunes", "continents", + "continuons", "continuous", + "continutiy", "continuity", + "continuuum", "continuum", + "contitnent", "contingent", + "contiuning", "containing", + "contiunity", "continuity", + "contorller", "controllers", + "contracing", "contracting", + "contractar", "contractor", + "contracter", "contractor", + "contractin", "contraction", + "contractos", "contracts", + "contradice", "contradicted", + "contradics", "contradicts", + "contredict", "contradict", + "contribued", "contributed", + "contribuem", "contribute", + "contribuer", "contribute", + "contribues", "contributes", + "contribuie", "contribute", + "contribuit", "contribute", + "contributo", "contribution", + "contributs", "contributes", + "contribuye", "contribute", + "contricted", "contracted", + "contridict", "contradict", + "contriubte", "contributes", + "controlelr", "controllers", + "controlers", "controls", + "controling", "controlling", + "controlles", "controls", + "controvery", "controversy", + "controvesy", "controversy", + "contrubite", "contributes", + "contrubute", "contribute", + "contuining", "continuing", + "contuinity", "continuity", + "convaluted", "convoluted", + "convcition", "convictions", + "conveinent", "convenient", + "conveluted", "convoluted", + "convencion", "convention", + "conveniant", "convenient", + "conveniece", "convenience", + "convenince", "convenience", + "convential", "conventional", + "converesly", "conversely", + "convergens", "converse", + "converison", "conversions", + "converning", "converting", + "conversare", "converse", + "conversino", "conversions", + "conversley", "conversely", + "conversoin", "conversions", + "conversons", "conversions", + "convertion", "conversion", + "convertire", "converter", + "converying", "converting", + "conveyered", "conveyed", + "conviccion", "conviction", + "conviciton", "conviction", + "convienent", "convenient", + "conviluted", "convoluted", + "convincted", "convince", + "convinsing", "convincing", + "convinving", "convincing", + "convoluded", "convoluted", + "convoulted", "convoluted", + "convulated", "convoluted", + "convuluted", "convoluted", + "cooperatve", "cooperative", + "coordenate", "coordinate", + "coordiante", "coordinate", + "coordinare", "coordinate", + "coordinato", "coordination", + "coordinats", "coordinates", + "coordonate", "coordinate", + "cooridnate", "coordinate", + "copehnagen", "copenhagen", + "copenaghen", "copenhagen", + "copenahgen", "copenhagen", + "copengagen", "copenhagen", + "copengahen", "copenhagen", + "copenhagan", "copenhagen", + "copenhague", "copenhagen", + "copenhagun", "copenhagen", + "copenhaven", "copenhagen", + "copenhegan", "copenhagen", + "copyrighed", "copyrighted", + "copyrigted", "copyrighted", + "corinthans", "corinthians", + "corinthias", "corinthians", + "corinthins", "corinthians", + "cornmitted", "committed", + "corporatie", "corporate", + "corralated", "correlated", + "corralates", "correlates", + "correccion", "correction", + "correciton", "corrections", + "correcters", "correctors", + "correctess", "correctness", + "correctivo", "correction", + "correctons", "corrections", + "corregated", "correlated", + "correkting", "correcting", + "correlatas", "correlates", + "correlatie", "correlated", + "correlatos", "correlates", + "correspend", "correspond", + "corrilated", "correlated", + "corrilates", "correlates", + "corrispond", "correspond", + "corrolated", "correlated", + "corrolates", "correlates", + "corrospond", "correspond", + "corrpution", "corruption", + "corrulates", "correlates", + "corrupcion", "corruption", + "cosmeticas", "cosmetics", + "cosmeticos", "cosmetics", + "costumized", "customized", + "counceling", "counseling", + "councellor", "councillor", + "councelors", "counselors", + "councilers", "councils", + "counselers", "counselors", + "counsellng", "counselling", + "counsilers", "counselors", + "counsiling", "counseling", + "counsilors", "counselors", + "counsolers", "counselors", + "counsoling", "counseling", + "countepart", "counteract", + "counteratk", "counteract", + "counterbat", "counteract", + "countercat", "counteract", + "countercut", "counteract", + "counteries", "counters", + "countoring", "countering", + "countryies", "countryside", + "countrying", "countering", + "courcework", "coursework", + "coursefork", "coursework", + "courthosue", "courthouse", + "courtrooom", "courtroom", + "cousnelors", "counselors", + "coutneract", "counteract", + "coutnering", "countering", + "covenental", "covenant", + "cranberrry", "cranberry", + "creationis", "creations", + "creationsm", "creationism", + "creationst", "creationist", + "creativily", "creatively", + "creativley", "creatively", + "credibilty", "credibility", + "creeperest", "creepers", + "crimanally", "criminally", + "criminalty", "criminally", + "criminalul", "criminally", + "criticable", "critical", + "criticarlo", "critical", + "criticiing", "criticising", + "criticisim", "criticism", + "criticisme", "criticise", + "criticisng", "criticising", + "criticists", "critics", + "criticisze", "criticise", + "criticizms", "criticisms", + "criticizng", "criticizing", + "critisiced", "criticized", + "critisicms", "criticisms", + "critisicsm", "criticisms", + "critisiscm", "criticisms", + "critisisms", "criticisms", + "critisizes", "criticises", + "critisizms", "criticisms", + "critiziced", "criticized", + "critizised", "criticized", + "critizisms", "criticisms", + "critizized", "criticized", + "crocodille", "crocodile", + "crossfiter", "crossfire", + "crutchetts", "crutches", + "crystalens", "crystals", + "crystalisk", "crystals", + "crystallis", "crystals", + "cuatiously", "cautiously", + "culterally", "culturally", + "cultrually", "culturally", + "culumative", "cumulative", + "culutrally", "culturally", + "cumbersone", "cumbersome", + "cumbursome", "cumbersome", + "cumpolsory", "compulsory", + "cumulitive", "cumulative", + "currancies", "currencies", + "currenctly", "currency", + "currenices", "currencies", + "currentfps", "currents", + "currentlys", "currents", + "currentpos", "currents", + "currentusa", "currents", + "curriculem", "curriculum", + "curriculim", "curriculum", + "curriences", "currencies", + "curroption", "corruption", + "custimized", "customized", + "customzied", "customized", + "custumized", "customized", + "cutscences", "cutscene", + "cutscenses", "cutscene", + "dangerouly", "dangerously", + "dealerhsip", "dealerships", + "deathamtch", "deathmatch", + "deathmacth", "deathmatch", + "debateable", "debatable", + "decembeard", "december", + "decendants", "descendants", + "decendents", "descendants", + "decideable", "decidable", + "deciptions", "depictions", + "decisiones", "decisions", + "declarasen", "declares", + "declaraste", "declares", + "declaremos", "declares", + "decomposit", "decompose", + "decoracion", "decoration", + "decorativo", "decoration", + "decoritive", "decorative", + "decroative", "decorative", + "decsending", "descending", + "dedicacion", "dedication", + "dedikation", "dedication", + "deducatble", "deductible", + "deducitble", "deductible", + "defacation", "defamation", + "defamating", "defamation", + "defanitely", "definately", + "defelction", "deflection", + "defendeers", "defender", + "defendents", "defendants", + "defenderes", "defenders", + "defenesman", "defenseman", + "defenselss", "defenseless", + "defensivly", "defensively", + "defianetly", "definately", + "defiantely", "definately", + "defiantley", "definately", + "defibately", "definately", + "deficately", "definately", + "deficiancy", "deficiency", + "deficience", "deficiencies", + "deficienct", "deficient", + "deficienty", "deficiency", + "defiintely", "definately", + "definaetly", "definately", + "definaitly", "definately", + "definaltey", "definately", + "definataly", "definately", + "definateky", "definately", + "definately", "definitely", + "definatily", "definately", + "defination", "definition", + "definative", "definitive", + "definatlly", "definately", + "definatrly", "definately", + "definayely", "definately", + "defineatly", "definately", + "definetaly", "definately", + "definetely", "definitely", + "definetily", "definately", + "definetlly", "definetly", + "definettly", "definately", + "definicion", "definition", + "definietly", "definitely", + "definining", "defining", + "definitaly", "definately", + "definiteyl", "definitly", + "definitivo", "definition", + "definitley", "definitely", + "definitlly", "definitly", + "definitlry", "definitly", + "definitlty", "definitly", + "definjtely", "definately", + "definltely", "definately", + "definotely", "definately", + "definstely", "definately", + "defintaley", "definately", + "defintiely", "definitely", + "defintiion", "definitions", + "definutely", "definately", + "deflaction", "deflection", + "defleciton", "deflection", + "deflektion", "deflection", + "defniately", "definately", + "degenarate", "degenerate", + "degenerare", "degenerate", + "degenerite", "degenerate", + "degoratory", "derogatory", + "degraderad", "degraded", + "dehydraded", "dehydrated", + "dehyrdated", "dehydrated", + "deifnately", "definately", + "deisgnated", "designated", + "delaership", "dealership", + "delearship", "dealership", + "delegaties", "delegate", + "delegative", "delegate", + "delfection", "deflection", + "delibarate", "deliberate", + "deliberant", "deliberate", + "delibirate", "deliberate", + "deligthful", "delightful", + "deliverate", "deliberate", + "deliverees", "deliveries", + "deliviered", "delivered", + "deliviring", "delivering", + "delporable", "deplorable", + "delpoyment", "deployment", + "delutional", "delusional", + "dementieva", "dementia", + "deminsions", "dimensions", + "democracis", "democracies", + "democracts", "democrat", + "democratas", "democrats", + "democrates", "democrats", + "demograhic", "demographic", + "demographs", "demographics", + "demograpic", "demographic", + "demolation", "demolition", + "demolicion", "demolition", + "demolision", "demolition", + "demolitian", "demolition", + "demoliting", "demolition", + "demoloshed", "demolished", + "demolution", "demolition", + "demonished", "demolished", + "demonstate", "demonstrate", + "demonstras", "demonstrates", + "demorcracy", "democracy", + "denegerate", "degenerate", + "denominato", "denomination", + "denomintor", "denominator", + "deocrative", "decorative", + "deomcratic", "democratic", + "deparments", "departments", + "departmens", "departments", + "departmnet", "departments", + "depcitions", "depictions", + "depdending", "depending", + "depencency", "dependency", + "dependance", "dependence", + "dependancy", "dependency", + "dependandt", "dependant", + "dependends", "depended", + "dependened", "depended", + "dependenta", "dependant", + "dependente", "dependence", + "depicitons", "depictions", + "deplorabel", "deplorable", + "deplorabil", "deplorable", + "deplorible", "deplorable", + "deplyoment", "deployment", + "depolyment", "deployment", + "depositers", "deposits", + "depressief", "depressive", + "depressies", "depressive", + "deprivaton", "deprivation", + "deragotory", "derogatory", + "derivaties", "derivatives", + "deriviated", "derived", + "derivitave", "derivative", + "derivitive", "derivative", + "derogatary", "derogatory", + "derogatery", "derogatory", + "derogetory", "derogatory", + "derogitory", "derogatory", + "derogotary", "derogatory", + "derogotory", "derogatory", + "derviative", "derivative", + "descendats", "descendants", + "descendend", "descended", + "descenting", "descending", + "descerning", "descending", + "descipable", "despicable", + "descisions", "decisions", + "descriibes", "describes", + "descripton", "description", + "desginated", "designated", + "desigining", "designing", + "desireable", "desirable", + "desktopbsd", "desktops", + "despciable", "despicable", + "desperatly", "desperately", + "desperetly", "desperately", + "despicaple", "despicable", + "despicible", "despicable", + "dessicated", "desiccated", + "destinatin", "destinations", + "destinaton", "destination", + "destoryers", "destroyers", + "destorying", "destroying", + "destroyeds", "destroyers", + "destroyeer", "destroyers", + "destrucion", "destruction", + "destrucive", "destructive", + "destryoing", "destroying", + "detectarlo", "detector", + "detectaron", "detector", + "detectoare", "detector", + "determinas", "determines", + "determinig", "determining", + "determinsm", "determinism", + "deutschand", "deutschland", + "devastaded", "devastated", + "devastaing", "devastating", + "devastanti", "devastating", + "devasteted", "devastated", + "develepors", "developers", + "develoeprs", "developers", + "developmet", "developments", + "developors", "develops", + "developped", "developed", + "developres", "develops", + "develpment", "development", + "devestated", "devastated", + "devolvendo", "devolved", + "deyhdrated", "dehydrated", + "diagnosied", "diagnose", + "diagnosies", "diagnosis", + "diagnositc", "diagnostic", + "diagnossed", "diagnose", + "diagnosted", "diagnose", + "diagnotics", "diagnostic", + "diagonstic", "diagnostic", + "dichotomoy", "dichotomy", + "dicitonary", "dictionary", + "diconnects", "disconnects", + "dicovering", "discovering", + "dictateurs", "dictates", + "dictionare", "dictionaries", + "differance", "difference", + "differenly", "differently", + "differense", "differences", + "differente", "difference", + "differentl", "differential", + "differenty", "differently", + "differnece", "difference", + "difficulte", "difficulties", + "difficults", "difficulties", + "difficutly", "difficulty", + "diffuculty", "difficulty", + "diganostic", "diagnostic", + "dimensinal", "dimensional", + "dimentions", "dimensions", + "dimesnions", "dimensions", + "dimineshes", "diminishes", + "diminising", "diminishing", + "dimunitive", "diminutive", + "dinosaures", "dinosaurs", + "dinosaurus", "dinosaurs", + "dipections", "depictions", + "diplimatic", "diplomatic", + "diplomacia", "diplomatic", + "diplomancy", "diplomacy", + "dipolmatic", "diplomatic", + "directinla", "directional", + "directionl", "directional", + "directivos", "directions", + "directores", "directors", + "directorys", "directors", + "directsong", "directions", + "disaapoint", "disappoint", + "disagreeed", "disagreed", + "disapeared", "disappeared", + "disappeard", "disappeared", + "disappered", "disappeared", + "disappiont", "disappoint", + "disaproval", "disapproval", + "disastorus", "disastrous", + "disastrosa", "disastrous", + "disastrose", "disastrous", + "disastrosi", "disastrous", + "disastroso", "disastrous", + "disaterous", "disastrous", + "discalimer", "disclaimer", + "discapline", "discipline", + "discepline", "discipline", + "disception", "discretion", + "discharded", "discharged", + "disciplers", "disciples", + "disciplies", "disciplines", + "disciplins", "disciplines", + "disciprine", "discipline", + "disclamier", "disclaimer", + "discliamer", "disclaimer", + "disclipine", "discipline", + "disclousre", "disclosure", + "disclsoure", "disclosure", + "discograhy", "discography", + "discograpy", "discography", + "discolsure", "disclosure", + "disconenct", "disconnect", + "disconncet", "disconnects", + "disconnets", "disconnects", + "discontued", "discounted", + "discoruage", "discourages", + "discources", "discourse", + "discourgae", "discourages", + "discourges", "discourages", + "discoveres", "discovers", + "discoveryd", "discovered", + "discoverys", "discovers", + "discrecion", "discretion", + "discreddit", "discredited", + "discrepany", "discrepancy", + "discresion", "discretion", + "discreting", "discretion", + "discribing", "describing", + "discrimine", "discriminate", + "discrouage", "discourages", + "discrption", "discretion", + "discusison", "discussions", + "discusting", "discussing", + "disgracful", "disgraceful", + "disgrunted", "disgruntled", + "disgruntld", "disgruntled", + "disguisted", "disguise", + "disgustiny", "disgustingly", + "disgustosa", "disgusts", + "disgustose", "disgusts", + "disgustosi", "disgusts", + "disgustoso", "disgusts", + "dishcarged", "discharged", + "dishinored", "dishonored", + "disicpline", "discipline", + "disiplined", "disciplined", + "dislcaimer", "disclaimer", + "dismanteld", "dismantled", + "dismanting", "dismantling", + "dismentled", "dismantled", + "dispecable", "despicable", + "dispencary", "dispensary", + "dispencers", "dispenser", + "dispencing", "dispensing", + "dispensare", "dispenser", + "dispensory", "dispensary", + "dispesnary", "dispensary", + "dispicable", "despicable", + "displayfps", "displays", + "dispositon", "disposition", + "dispostion", "disposition", + "disputerad", "disputed", + "disrecpect", "disrespect", + "disrection", "discretion", + "disrepsect", "disrespect", + "disresepct", "disrespect", + "disrespekt", "disrespect", + "disription", "disruption", + "disrispect", "disrespect", + "disrputing", "disrupting", + "disruptivo", "disruption", + "disruptron", "disruption", + "dissapears", "disappears", + "dissappear", "disappear", + "disscusion", "discussion", + "dissmisive", "dismissive", + "dissodance", "dissonance", + "dissonante", "dissonance", + "dissonence", "dissonance", + "distastful", "distasteful", + "disticntly", "distinctly", + "distiction", "distinction", + "distincion", "distinction", + "distincive", "distinctive", + "distinclty", "distinctly", + "distinctie", "distinctive", + "distinctin", "distinctions", + "distingish", "distinguish", + "distingush", "distinguish", + "distintcly", "distinctly", + "distoriton", "distortion", + "distorsion", "distortion", + "distortian", "distortion", + "distortron", "distortion", + "distractes", "distracts", + "distractia", "district", + "distractin", "district", + "distractiv", "district", + "distration", "distortion", + "distribuem", "distribute", + "distribuer", "distribute", + "distribuie", "distribute", + "distribuit", "distribute", + "distributs", "distributors", + "distribuye", "distribute", + "distrotion", "distortion", + "distrubing", "disturbing", + "distrubtes", "distrust", + "distrubute", "distribute", + "distubring", "disturbing", + "disturbace", "disturbance", + "disturping", "disrupting", + "disucssing", "discussing", + "disucssion", "discussion", + "disurption", "disruption", + "ditributed", "distributed", + "diversifiy", "diversify", + "dividendes", "dividends", + "dividendos", "dividends", + "divideneds", "dividend", + "divinition", "divination", + "divinitory", "divinity", + "divisiones", "divisions", + "dobulelift", "doublelift", + "doccuments", "documents", + "documentry", "documentary", + "dogmatisch", "dogmatic", + "dolphinese", "dolphins", + "domianting", "dominating", + "domimation", "domination", + "dominacion", "domination", + "dominaters", "dominates", + "donwgraded", "downgraded", + "donwloaded", "downloaded", + "donwvoters", "downvoters", + "donwvoting", "downvoting", + "doomsdaily", "doomsday", + "doubellift", "doublelift", + "doubleiift", "doublelift", + "doubleleft", "doublelift", + "doublelfit", "doublelift", + "doublerift", "doublelift", + "doulbelift", "doublelift", + "downgarded", "downgraded", + "downgrated", "downgrade", + "downlaoded", "downloaded", + "downloadas", "downloads", + "downloades", "downloads", + "downovting", "downvoting", + "downroaded", "downgraded", + "downsiders", "downsides", + "downstaris", "downstairs", + "downstiars", "downstairs", + "downtokers", "downvoters", + "downtoking", "downvoting", + "downtraded", "downgraded", + "downviting", "downvoting", + "downvotear", "downvoters", + "downvoteas", "downvoters", + "downvoteds", "downvoters", + "downvotees", "downvoters", + "downvotesd", "downvoters", + "downvotess", "downvoters", + "downvotest", "downvoters", + "downvoteur", "downvoters", + "downvoties", "downvoters", + "downvotres", "downvoters", + "downvotted", "downvote", + "downvottes", "downvoters", + "downwoters", "downvoters", + "downwoting", "downvoting", + "drasticaly", "drastically", + "drasticlly", "drastically", + "draughtman", "draughtsman", + "dumbbellls", "dumbbells", + "dumbfouded", "dumbfounded", + "dumbfouned", "dumbfounded", + "dungeoness", "dungeons", + "dupilcates", "duplicates", + "duplicants", "duplicates", + "duplicatas", "duplicates", + "duplicitas", "duplicates", + "duplifaces", "duplicates", + "durabiltiy", "durability", + "dyamically", "dynamically", + "dynamicaly", "dynamically", + "dynamicdns", "dynamics", + "dynamiclly", "dynamically", + "dynamicpsf", "dynamics", + "dynamitage", "dynamite", + "dysfuncion", "dysfunction", + "earhtbound", "earthbound", + "earthqauke", "earthquake", + "earthquack", "earthquake", + "earthquaks", "earthquakes", + "earthquate", "earthquake", + "earthqukes", "earthquakes", + "easthetics", "aesthetics", + "ecoligical", "ecological", + "ecomonical", "economical", + "econimical", "economical", + "econimists", "economists", + "economicas", "economics", + "economicos", "economics", + "economicus", "economics", + "economisch", "economic", + "economisit", "economists", + "effeciency", "efficiency", + "effectivly", "effectively", + "efficeincy", "efficiency", + "efficently", "efficiently", + "efficiancy", "efficiency", + "efficienct", "efficient", + "efficienty", "efficiently", + "egotistcal", "egotistical", + "ehtnically", "ethnically", + "ejaculaion", "ejaculation", + "ejaculatie", "ejaculate", + "ejaculatin", "ejaculation", + "ejaculaton", "ejaculation", + "ejaculatte", "ejaculate", + "electircal", "electrical", + "electivite", "elective", + "electoraat", "electorate", + "electorale", "electorate", + "electorite", "electorate", + "electornic", "electronic", + "electrican", "electrician", + "electriciy", "electricity", + "electricty", "electricity", + "electrinic", "electrician", + "electroate", "electorate", + "electrodan", "electron", + "electroinc", "electron", + "electrolye", "electrolytes", + "electroman", "electron", + "electroncs", "electrons", + "electrones", "electrons", + "electronik", "election", + "electronis", "electronics", + "electronix", "election", + "elemantary", "elementary", + "elementery", "elementary", + "elementray", "elementary", + "eleminated", "eliminated", + "elephantes", "elephants", + "elephantis", "elephants", + "elephantos", "elephants", + "elephantus", "elephants", + "eletricity", "electricity", + "elimanates", "eliminates", + "elimenates", "eliminates", + "elimentary", "elementary", + "elimimates", "eliminates", + "eliminaste", "eliminates", + "eliminatin", "elimination", + "eliminaton", "elimination", + "eliminster", "eliminates", + "ellipitcal", "elliptical", + "ellipsical", "elliptical", + "ellipticle", "elliptical", + "ellitpical", "elliptical", + "ellpitical", "elliptical", + "eloquantly", "eloquently", + "eloquintly", "eloquently", + "emapthetic", "empathetic", + "embarassed", "embarrassed", + "embarassig", "embarassing", + "embarrased", "embarrassed", + "embarrases", "embarrassed", + "embezelled", "embezzled", + "emblamatic", "emblematic", + "embodyment", "embodiment", + "emergenies", "emergencies", + "emmigrated", "emigrated", + "emminently", "eminently", + "emmisaries", "emissaries", + "emobdiment", "embodiment", + "emotionaly", "emotionally", + "empahsized", "emphasized", + "empahsizes", "emphasizes", + "empathatic", "empathetic", + "emphacized", "emphasized", + "emphatetic", "empathetic", + "emphatised", "emphasized", + "emphatized", "emphasized", + "emphatizes", "emphasizes", + "emphazised", "emphasized", + "emphazises", "emphasizes", + "emphesized", "emphasized", + "emphesizes", "emphasizes", + "emphisized", "emphasized", + "emphisizes", "emphasizes", + "empiricaly", "empirically", + "employeers", "employees", + "employeurs", "employer", + "emprisoned", "imprisoned", + "encahnting", "enchanting", + "enchancing", "enchanting", + "enchanging", "enchanting", + "enchantent", "enchantment", + "enchantmet", "enchantments", + "encompases", "encompasses", + "encounterd", "encountered", + "encountred", "encountered", + "encouraing", "encouraging", + "encoutners", "encounters", + "encription", "encryption", + "encrpytion", "encryption", + "encyrption", "encryption", + "endlessley", "endlessly", + "endolithes", "endoliths", + "enforceing", "enforcing", + "engagemnet", "engagements", + "engagemnts", "engagements", + "engieneers", "engineers", + "enginereed", "engineered", + "enivitable", "inevitable", + "enlargment", "enlargement", + "enlighment", "enlighten", + "enlightend", "enlightened", + "enlightned", "enlightened", + "enrolement", "enrollment", + "enrollemnt", "enrollment", + "enterpirse", "enterprise", + "enterprice", "enterprise", + "enterpries", "enterprises", + "enterprize", "enterprise", + "enterprsie", "enterprises", + "enterrpise", "enterprises", + "entertaing", "entertaining", + "enthically", "ethnically", + "enthisiast", "enthusiast", + "enthuiasts", "enthusiast", + "enthuisast", "enthusiasts", + "enthusiams", "enthusiasm", + "enthusiant", "enthusiast", + "enthusiats", "enthusiast", + "enthusiest", "enthusiast", + "enthusists", "enthusiasts", + "envelopped", "envelope", + "enveloppen", "envelope", + "enveloppes", "envelope", + "enviorment", "environment", + "enviroment", "environment", + "environmet", "environments", + "equiavlent", "equivalents", + "equilavent", "equivalent", + "equilibium", "equilibrium", + "equilibrim", "equilibrium", + "equilibrum", "equilibrium", + "equippment", "equipment", + "equitorial", "equatorial", + "equivalant", "equivalent", + "equivalnce", "equivalence", + "equivalnet", "equivalents", + "equivelant", "equivalent", + "equivelent", "equivalent", + "equivilant", "equivalent", + "equivilent", "equivalent", + "equivlaent", "equivalents", + "equivolent", "equivalent", + "eratically", "erratically", + "escalative", "escalate", + "escavation", "escalation", + "esitmation", "estimation", + "esoterisch", "esoteric", + "especailly", "especially", + "espeically", "especially", + "espressino", "espresso", + "espression", "espresso", + "essencials", "essentials", + "essensials", "essentials", + "essentails", "essentials", + "essentialy", "essentially", + "essentiels", "essentials", + "essentuals", "essentials", + "estabishes", "establishes", + "estimacion", "estimation", + "estimativo", "estimation", + "estination", "estimation", + "ethicallly", "ethically", + "ethincally", "ethnically", + "ethnicites", "ethnicities", + "ethnicitiy", "ethnicity", + "euphorical", "euphoria", + "euphorisch", "euphoric", + "euthanaisa", "euthanasia", + "euthanazia", "euthanasia", + "euthanesia", "euthanasia", + "evaluacion", "evaluation", + "evalutaion", "evaluation", + "evaulating", "evaluating", + "evaulation", "evaluation", + "eventaully", "eventually", + "eventially", "eventually", + "everyoneis", "everyones", + "exacberate", "exacerbated", + "exagerated", "exaggerated", + "exagerates", "exaggerates", + "exagerrate", "exaggerate", + "exaggarate", "exaggerate", + "exaggurate", "exaggerate", + "exahusting", "exhausting", + "exahustion", "exhaustion", + "examinated", "examined", + "examinerad", "examined", + "exapansion", "expansion", + "exapnsions", "expansions", + "exauhsting", "exhausting", + "exauhstion", "exhaustion", + "excecuting", "executing", + "excecution", "execution", + "exceedigly", "exceedingly", + "exceedinly", "exceedingly", + "excellance", "excellence", + "excellenet", "excellence", + "excellenze", "excellence", + "excerising", "exercising", + "excessivly", "excessively", + "exchangees", "exchanges", + "excitiment", "excitement", + "exclsuives", "exclusives", + "exclusivas", "exclusives", + "exclusivly", "exclusively", + "exclusivos", "exclusives", + "exclusivty", "exclusivity", + "exclussive", "exclusives", + "exclusvies", "exclusives", + "excpetions", "exceptions", + "exculsives", "exclusives", + "exculsivly", "exclusively", + "execptions", "exceptions", + "exectuable", "executable", + "exectuions", "executions", + "exectuives", "executives", + "execusions", "executions", + "executabil", "executable", + "executible", "executable", + "executiner", "executioner", + "executings", "executions", + "executivas", "executives", + "exeedingly", "exceedingly", + "exepmtions", "exemptions", + "exeptional", "exceptional", + "exercicing", "exercising", + "exercizing", "exercising", + "exersicing", "exercising", + "exersising", "exercising", + "exersizing", "exercising", + "exerternal", "external", + "exeuctions", "executions", + "exhasuting", "exhausting", + "exhasution", "exhaustion", + "exhaustivo", "exhaustion", + "exhibicion", "exhibition", + "exhibitons", "exhibits", + "exhuasting", "exhausting", + "exhuastion", "exhaustion", + "exibitions", "exhibitions", + "exictement", "excitement", + "exipration", "expiration", + "existantes", "existent", + "existenial", "existential", + "existental", "existential", + "exlcusives", "exclusives", + "exorbatant", "exorbitant", + "exorbatent", "exorbitant", + "exorbidant", "exorbitant", + "exorbirant", "exorbitant", + "exorbitent", "exorbitant", + "expalining", "explaining", + "expanisons", "expansions", + "expansivos", "expansions", + "expanssion", "expansions", + "expantions", "expansions", + "expecially", "especially", + "expectaion", "expectation", + "expectansy", "expectancy", + "expectency", "expectancy", + "expections", "exceptions", + "expedetion", "expedition", + "expedicion", "expedition", + "expeditivo", "expedition", + "expeiments", "experiments", + "expemtions", "exemptions", + "expendeble", "expendable", + "expendible", "expendable", + "expensable", "expendable", + "expentancy", "expectancy", + "expereince", "experience", + "experement", "experiment", + "experiance", "experience", + "experieced", "experienced", + "experieces", "experiences", + "experiemnt", "experiment", + "experiened", "experienced", + "experiense", "experiences", + "expermient", "experiments", + "experssion", "expression", + "expextancy", "expectancy", + "expidetion", "expedition", + "expierence", "experience", + "expination", "expiration", + "expirement", "experiment", + "explanatin", "explanations", + "explicatia", "explicit", + "explicatie", "explicit", + "explicatif", "explicit", + "explicatii", "explicit", + "explicetly", "explicitly", + "explicilty", "explicitly", + "explioting", "exploiting", + "exploiding", "exploiting", + "exploition", "exploiting", + "explorarea", "explorer", + "exploreres", "explorers", + "explosivas", "explosives", + "explossion", "explosions", + "explossive", "explosives", + "explosvies", "explosives", + "explotions", "explosions", + "explusions", "explosions", + "expodition", "exposition", + "expoliting", "exploiting", + "expolsions", "explosions", + "expolsives", "explosives", + "exponental", "exponential", + "exposicion", "exposition", + "expositivo", "exposition", + "expotition", "exposition", + "exprensive", "expressive", + "expresions", "expression", + "expresison", "expressions", + "expressens", "expresses", + "expressief", "expressive", + "expressley", "expressly", + "expriation", "expiration", + "extensivly", "extensively", + "extentions", "extensions", + "exterioara", "exterior", + "exterioare", "exterior", + "extermally", "externally", + "extermists", "extremists", + "extraccion", "extraction", + "extractivo", "extraction", + "extractnow", "extraction", + "extradtion", "extraction", + "extremaste", "extremes", + "extremeley", "extremely", + "extremelly", "extremely", + "extrememly", "extremely", + "extremests", "extremists", + "extremised", "extremes", + "extremisim", "extremism", + "extremisme", "extremes", + "extremiste", "extremes", + "extrenally", "externally", + "extrimists", "extremists", + "eyeballers", "eyeballs", + "fabriacted", "fabricated", + "fabricatie", "fabricated", + "faciliated", "facilitated", + "facilitait", "facilitate", + "facilitant", "facilitate", + "facilitare", "facilitate", + "facisnated", "fascinated", + "facitilies", "facilities", + "facsinated", "fascinated", + "fahernheit", "fahrenheit", + "fahrenhiet", "fahrenheit", + "fallatious", "fallacious", + "fallicious", "fallacious", + "falshbacks", "flashbacks", + "familiarty", "familiarity", + "familiarze", "familiarize", + "fanaticals", "fanatics", + "fanfaction", "fanfiction", + "fanfcition", "fanfiction", + "fanficiton", "fanfiction", + "fanserivce", "fanservice", + "fanservise", "fanservice", + "fanservive", "fanservice", + "fantasiose", "fantasies", + "farehnheit", "fahrenheit", + "farhenheit", "fahrenheit", + "fascianted", "fascinated", + "fascinatie", "fascinated", + "fascinatin", "fascination", + "fascistisk", "fascists", + "fatalaties", "fatalities", + "favoruites", "favourites", + "favourates", "favourites", + "favouritsm", "favourites", + "favourties", "favourites", + "federacion", "federation", + "federativo", "federation", + "fellowhsip", "fellowship", + "fellowshop", "fellowship", + "feminimity", "femininity", + "feministas", "feminists", + "feminitity", "femininity", + "fermentato", "fermentation", + "fertalizer", "fertilizer", + "fertelizer", "fertilizer", + "fertilizar", "fertilizer", + "fertilzier", "fertilizer", + "fertiziler", "fertilizer", + "festivales", "festivals", + "fetishiste", "fetishes", + "ficticious", "fictitious", + "filessytem", "filesystem", + "filesytems", "filesystem", + "filmamkers", "filmmakers", + "filmmakare", "filmmakers", + "finallizes", "finalizes", + "financialy", "financially", + "fingernals", "fingernails", + "fingerpies", "fingertips", + "fingerpint", "fingerprint", + "fingertaps", "fingertips", + "fingertits", "fingertips", + "fingertops", "fingertips", + "fireballls", "fireballs", + "firefigher", "firefighter", + "firefigter", "firefighter", + "firendlies", "friendlies", + "firghtened", "frightened", + "fisionable", "fissionable", + "flashligth", "flashlight", + "flaskbacks", "flashbacks", + "flawleslly", "flawlessly", + "flexibiliy", "flexibility", + "flexibilty", "flexibility", + "flimmakers", "filmmakers", + "fluctuatie", "fluctuate", + "fluctuatin", "fluctuations", + "flutterhsy", "fluttershy", + "fluttersky", "fluttershy", + "flutterspy", "fluttershy", + "forcifully", "forcefully", + "forecfully", "forcefully", + "foreginers", "foreigners", + "foregorund", "foreground", + "foreignese", "foreigners", + "foreigness", "foreigners", + "foreignors", "foreigners", + "foreingers", "foreigners", + "forensisch", "forensic", + "foreseeble", "foreseeable", + "forgeiners", "foreigners", + "forgieners", "foreigners", + "forgivance", "forgiven", + "forgivenss", "forgiveness", + "forgotting", "forgetting", + "foriegners", "foreigners", + "formadible", "formidable", + "formalhaut", "fomalhaut", + "formallity", "formally", + "formallize", "formalize", + "formatiing", "formatting", + "formatings", "formations", + "formativos", "formations", + "formidabel", "formidable", + "formidabil", "formidable", + "formidible", "formidable", + "forminable", "formidable", + "formitable", "formidable", + "formuladas", "formulas", + "formulados", "formulas", + "forseeable", "foreseeable", + "fortelling", "foretelling", + "fortunatly", "fortunately", + "fortunetly", "fortunately", + "foundaiton", "foundations", + "foundaries", "foundries", + "foundatoin", "foundations", + "fractalers", "fractals", + "fractalius", "fractals", + "fractalpus", "fractals", + "fracturare", "fracture", + "fragmanted", "fragment", + "francaises", "franchises", + "franchices", "franchises", + "franchines", "franchises", + "franchizes", "franchises", + "franchsies", "franchises", + "fransiscan", "franciscan", + "franticaly", "frantically", + "franticlly", "frantically", + "fraternaty", "fraternity", + "fraternety", "fraternity", + "fraterntiy", "fraternity", + "fraturnity", "fraternity", + "fraudalent", "fraudulent", + "fraudelant", "fraudulent", + "fraudelent", "fraudulent", + "fraudolent", "fraudulent", + "fraudulant", "fraudulent", + "freedomers", "freedoms", + "freedomest", "freedoms", + "freindlies", "friendlies", + "freindship", "friendship", + "frequencey", "frequency", + "friednship", "friendships", + "friednzone", "friendzoned", + "friendhsip", "friendship", + "friendsies", "friendlies", + "friendzies", "friendlies", + "friendzond", "friendzoned", + "frientship", "friendship", + "frigthened", "frightened", + "fromatting", "formatting", + "fromidable", "formidable", + "frontlinie", "frontline", + "fruadulent", "fraudulent", + "frustraded", "frustrated", + "frustradet", "frustrates", + "frustraits", "frustrates", + "frustrants", "frustrates", + "frustratin", "frustration", + "frustrsted", "frustrates", + "fucntional", "functional", + "fulfulling", "fulfilling", + "fullfiling", "fulfilling", + "fullfilled", "fulfilled", + "fullscrean", "fullscreen", + "fulttershy", "fluttershy", + "funcitonal", "functional", + "fundametal", "fundamental", + "furstrated", "frustrated", + "furstrates", "frustrates", + "furutistic", "futuristic", + "futhermore", "furthermore", + "futurestic", "futuristic", + "futurisitc", "futuristic", + "futurustic", "futuristic", + "galvinized", "galvanized", + "garuanteed", "guaranteed", + "garuantees", "guarantees", + "gauntanamo", "guantanamo", + "gauntlents", "gauntlet", + "gauranteed", "guaranteed", + "gaurantees", "guarantees", + "gaurenteed", "guaranteed", + "gaurentees", "guarantees", + "generalice", "generalize", + "generalife", "generalize", + "generalnie", "generalize", + "generaters", "generates", + "generaties", "generate", + "generatios", "generators", + "generatons", "generators", + "generatore", "generate", + "generelize", "generalize", + "generocity", "generosity", + "generoisty", "generosity", + "generostiy", "generosity", + "geneticaly", "genetically", + "geneticlly", "genetically", + "genitalias", "genitals", + "genuinelly", "genuinely", + "geographia", "geographical", + "geogrpahic", "geographic", + "germanisch", "germanic", + "gigantisch", "gigantic", + "gimmickers", "gimmicks", + "girlfirend", "girlfriend", + "girlfreind", "girlfriend", + "girlfriens", "girlfriends", + "girlfrinds", "girlfriends", + "girlfrined", "girlfriends", + "goalkeaper", "goalkeeper", + "goalkeeprs", "goalkeeper", + "goalkepeer", "goalkeeper", + "goegraphic", "geographic", + "golakeeper", "goalkeeper", + "goldburger", "goldberg", + "goosebumbs", "goosebumps", + "goosegumps", "goosebumps", + "goosepumps", "goosebumps", + "gothenberg", "gothenburg", + "govenrment", "government", + "govermenet", "goverment", + "govermnent", "governments", + "governemnt", "government", + "governened", "governed", + "governered", "governed", + "governmant", "governmental", + "governmetn", "governments", + "governmnet", "government", + "govnerment", "government", + "govornment", "government", + "gradiating", "graduating", + "gradiation", "graduation", + "graduacion", "graduation", + "grapefriut", "grapefruit", + "grapefrukt", "grapefruit", + "graphicaly", "graphically", + "graphiclly", "graphically", + "gratituous", "gratuitous", + "gratiutous", "gratuitous", + "gratuidous", "gratuitous", + "gratuituos", "gratuitous", + "gratutious", "gratuitous", + "graudating", "graduating", + "graudation", "graduation", + "gravitatie", "gravitate", + "greatfully", "gratefully", + "greenhosue", "greenhouse", + "greviances", "grievances", + "grievences", "grievances", + "grilfriend", "girlfriend", + "guaduloupe", "guadalupe", + "guanatanmo", "guantanamo", + "guantamamo", "guantanamo", + "guantamano", "guantanamo", + "guantanano", "guantanamo", + "guantanemo", "guantanamo", + "guantanoma", "guantanamo", + "guantanomo", "guantanamo", + "guantonamo", "guantanamo", + "guarantess", "guarantees", + "guardiands", "guardians", + "guardianes", "guardians", + "guardianis", "guardians", + "guarenteed", "guaranteed", + "guarentees", "guarantees", + "guarnateed", "guaranteed", + "guarnatees", "guarantees", + "guarunteed", "guaranteed", + "guaruntees", "guarantees", + "guatamalan", "guatemalan", + "gunatanamo", "guantanamo", + "gunlsinger", "gunslinger", + "gunsiinger", "gunslinger", + "gunslanger", "gunslinger", + "gunsligner", "gunslinger", + "gunstinger", "gunslinger", + "gymanstics", "gymnastics", + "gymnasitcs", "gymnastics", + "gynmastics", "gymnastics", + "haemorrage", "haemorrhage", + "halloweeen", "halloween", + "hambergers", "hamburgers", + "hamburgare", "hamburger", + "hamburgesa", "hamburgers", + "hamburgles", "hamburgers", + "hamburgurs", "hamburgers", + "handcuffes", "handcuffs", + "handelbars", "handlebars", + "handicaped", "handicapped", + "handwritng", "handwriting", + "harasments", "harassments", + "hardlinked", "hardline", + "harmoniacs", "harmonic", + "harmonisch", "harmonic", + "harrasment", "harassment", + "harrassing", "harassing", + "harvasting", "harvesting", + "haversting", "harvesting", + "headhpones", "headphones", + "headphoens", "headphones", + "headquarer", "headquarter", + "headquater", "headquarter", + "headshoots", "headshot", + "healtchare", "healthcare", + "healtheast", "healthiest", + "healthyest", "healthiest", + "heapdhones", "headphones", + "heartbeart", "heartbeat", + "heartbeast", "heartbeat", + "heartborne", "heartbroken", + "heartbrake", "heartbreak", + "hearthsone", "hearthstone", + "heatlhcare", "healthcare", + "heavyweght", "heavyweight", + "heavyweigt", "heavyweight", + "hedgehodge", "hedgehog", + "heidelburg", "heidelberg", + "heigthened", "heightened", + "heistation", "hesitation", + "helathcare", "healthcare", + "helicopers", "helicopters", + "helicoptor", "helicopter", + "helicotper", "helicopters", + "helicpoter", "helicopter", + "helictoper", "helicopters", + "helikopter", "helicopter", + "hemingwary", "hemingway", + "hemingwavy", "hemingway", + "hemipshere", "hemisphere", + "hemishpere", "hemisphere", + "hemmorhage", "hemorrhage", + "hempishere", "hemisphere", + "herculeans", "hercules", + "herculeasy", "hercules", + "herculeees", "hercules", + "hesitstion", "hesitation", + "hestiation", "hesitation", + "hieghtened", "heightened", + "hierachies", "hierarchies", + "hieroglphs", "hieroglyphs", + "highalnder", "highlander", + "highlighed", "highlighted", + "highligted", "highlighted", + "highloader", "highlander", + "highpander", "highlander", + "highscholl", "highschool", + "highshcool", "highschool", + "hillarious", "hilarious", + "hinderance", "hindrance", + "hinderence", "hindrance", + "hipsterest", "hipsters", + "hispanicos", "hispanics", + "hispanicus", "hispanics", + "histarical", "historical", + "histerical", "historical", + "historiaan", "historians", + "historicas", "historians", + "historicly", "historical", + "historiens", "histories", + "historisch", "historic", + "hoemopathy", "homeopathy", + "hollywoood", "hollywood", + "homecuming", "homecoming", + "homeoapthy", "homeopathy", + "homeonwers", "homeowners", + "homeopahty", "homeopathy", + "homeophaty", "homeopathy", + "homeopothy", "homeopathy", + "homeothapy", "homeopathy", + "homepoathy", "homeopathy", + "homewoners", "homeowners", + "homoepathy", "homeopathy", + "homogeneos", "homogeneous", + "homogeneus", "homogeneous", + "homophibia", "homophobia", + "homophibic", "homophobic", + "homophobie", "homophobe", + "homophonia", "homophobia", + "homophopia", "homophobia", + "homophopic", "homophobic", + "homosexaul", "homosexual", + "homosexuel", "homosexual", + "honeymooon", "honeymoon", + "hopefullly", "hopefully", + "hopeleslly", "hopelessly", + "horisontal", "horizontal", + "horizantal", "horizontal", + "horizontes", "horizons", + "horiztonal", "horizontal", + "horrendeus", "horrendous", + "horriblely", "horribly", + "hospitales", "hospitals", + "hospitalty", "hospitality", + "hospitible", "hospitable", + "hsitorians", "historians", + "humanaties", "humanities", + "humanitary", "humanity", + "humiliatin", "humiliation", + "humiliaton", "humiliation", + "humilitied", "humiliated", + "humillated", "humiliated", + "hurricance", "hurricane", + "hurriganes", "hurricanes", + "hurrikanes", "hurricanes", + "hurrycanes", "hurricanes", + "hydropilic", "hydrophilic", + "hydropobic", "hydrophobic", + "hyperbolie", "hyperbole", + "hyperlobic", "hyperbolic", + "hyperlogic", "hyperbolic", + "hypertrohy", "hypertrophy", + "hypertropy", "hypertrophy", + "hyphotesis", "hypothesis", + "hypocrates", "hypocrites", + "hypocriscy", "hypocrisy", + "hypocrises", "hypocrites", + "hypocritus", "hypocrites", + "hypocrties", "hypocrites", + "hypocrytes", "hypocrites", + "hypokrites", "hypocrites", + "hypothecis", "hypothesis", + "hypotheiss", "hypotheses", + "hypothesus", "hypotheses", + "hypothises", "hypotheses", + "hypothisis", "hypothesis", + "hypothosis", "hypothesis", + "hyprocites", "hypocrites", + "hystarical", "hysterical", + "hystericly", "hysterical", + "hysteriska", "hysteria", + "ibuprofein", "ibuprofen", + "ibuprofine", "ibuprofen", + "icelandinc", "icelandic", + "idealisitc", "idealistic", + "idealogies", "ideologies", + "identicial", "identical", + "identifyed", "identified", + "identitets", "identities", + "ideolagies", "ideologies", + "ideoligies", "ideologies", + "ideologias", "ideologies", + "ideologice", "ideologies", + "ideologije", "ideologies", + "ideologins", "ideologies", + "ideologisk", "ideologies", + "ideolouges", "ideologies", + "illegalest", "illegals", + "illegallly", "illegally", + "illegimacy", "illegitimacy", + "illegitime", "illegitimate", + "illegitimt", "illegitimate", + "illimunati", "illuminati", + "illinoians", "illinois", + "illistrate", "illiterate", + "illitarate", "illiterate", + "illitirate", "illiterate", + "illumanati", "illuminati", + "illumaniti", "illuminati", + "illumianti", "illuminati", + "illumimati", "illuminati", + "illuminaci", "illuminati", + "illuminadi", "illuminati", + "illuminami", "illuminati", + "illuminazi", "illuminati", + "illuminite", "illuminati", + "illuminiti", "illuminati", + "illuminoti", "illuminati", + "illuminuti", "illuminati", + "illumniati", "illuminati", + "illumunati", "illuminati", + "illuninati", "illuminati", + "illusiones", "illusions", + "illustrant", "illustrate", + "illustrare", "illustrate", + "illustrato", "illustration", + "imablanced", "imbalanced", + "imablances", "imbalances", + "imaginatie", "imaginative", + "imaginaton", "imagination", + "imaginitve", "imaginative", + "imbalenced", "imbalanced", + "imbalences", "imbalances", + "imcomplete", "incomplete", + "imediately", "immediately", + "imigration", "emigration", + "immaturaty", "immaturity", + "immaturety", "immaturity", + "immedeatly", "immediately", + "immediatly", "immediately", + "immedietly", "immediately", + "immenseley", "immensely", + "immidately", "immediately", + "immigranti", "immigration", + "immigrents", "immigrants", + "immitating", "imitating", + "immobilien", "immobile", + "immobilier", "immobile", + "immobilzed", "immobile", + "immobilzer", "immobile", + "immobilzes", "immobile", + "immortales", "immortals", + "immortalis", "immortals", + "immortaliy", "immortality", + "immortalls", "immortals", + "immortalty", "immortality", + "impartirla", "impartial", + "impecabbly", "impeccably", + "impeccible", "impeccable", + "impeckable", "impeccable", + "impelments", "implements", + "imperetive", "imperative", + "imperialsm", "imperialism", + "imperialst", "imperialist", + "imperitave", "imperative", + "imperitive", "imperative", + "implaments", "implements", + "implantase", "implants", + "implausble", "implausible", + "implausibe", "implausible", + "implemenet", "implements", + "implicatia", "implicit", + "implicatie", "implicit", + "implicatii", "implicit", + "implicetly", "implicitly", + "impliciete", "implicit", + "implicilty", "implicitly", + "impliments", "implements", + "imporbable", "improbable", + "importanly", "importantly", + "importanty", "importantly", + "importence", "importance", + "importerad", "imported", + "imporvised", "improvised", + "impossable", "impossible", + "impossbily", "impossibly", + "impossibal", "impossibly", + "impossibel", "impossibly", + "impossibry", "impossibly", + "impossibul", "impossibly", + "impractial", "impractical", + "impreative", "imperative", + "impresison", "impressions", + "impressoin", "impressions", + "impressons", "impressions", + "improbabil", "improbable", + "improbible", "improbable", + "impropable", "improbable", + "improsined", "imprisoned", + "improsoned", "imprisoned", + "improvemnt", "improvement", + "improvents", "improves", + "improvized", "improvised", + "imprsioned", "imprisoned", + "impulsemos", "impulses", + "imrpovised", "improvised", + "inablility", "inability", + "inaccruate", "inaccurate", + "inadaquate", "inadequate", + "inadaquete", "inadequate", + "inadecuate", "inadequate", + "inadeguate", "inadequate", + "inadeqaute", "inadequate", + "inadequete", "inadequate", + "inadequite", "inadequate", + "inadiquate", "inadequate", + "inagurated", "inaugurated", + "inbalanced", "imbalanced", + "inbetweeen", "inbetween", + "incarnaton", "incarnation", + "incentivos", "incentives", + "inchoerent", "incoherent", + "incidentes", "incidents", + "incidently", "incidentally", + "incidentul", "incidental", + "inclreased", "increased", + "incognitio", "incognito", + "incoherant", "incoherent", + "incohorent", "incoherent", + "incorectly", "incorrectly", + "incorrecly", "incorrectly", + "incorrecty", "incorrectly", + "incorretly", "incorrectly", + "incraments", "increments", + "incredable", "incredible", + "incredably", "incredibly", + "incremetal", "incremental", + "incriments", "increments", + "inctroduce", "introduce", + "indefenite", "indefinite", + "indefinate", "indefinite", + "indefinete", "indefinite", + "indefinity", "indefinitely", + "indeginous", "indigenous", + "indentical", "identical", + "independet", "independent", + "indepenent", "independent", + "inderictly", "indirectly", + "indicaters", "indicates", + "indicativo", "indication", + "indicatore", "indicate", + "indicitave", "indicative", + "indicitive", "indicative", + "indiffernt", "indifferent", + "indigenius", "indigenous", + "indiginous", "indigenous", + "indigneous", "indigenous", + "indikation", "indication", + "indireclty", "indirectly", + "indirektly", "indirectly", + "individuel", "individual", + "indiviudal", "individuals", + "indivudual", "individual", + "indoensian", "indonesian", + "indonasian", "indonesian", + "indoneisan", "indonesian", + "indonesean", "indonesian", + "indonesien", "indonesian", + "indonesion", "indonesian", + "indonisian", "indonesian", + "indonistan", "indonesian", + "indpendent", "independent", + "industiral", "industrial", + "industires", "industries", + "industrail", "industrial", + "industrees", "industries", + "industrias", "industries", + "industriel", "industrial", + "industrija", "industrial", + "industrije", "industries", + "indviduals", "individuals", + "inefficent", "inefficient", + "ineqaulity", "inequality", + "inequailty", "inequality", + "inevatible", "inevitable", + "inevetable", "inevitable", + "inevetably", "inevitably", + "inevetible", "inevitable", + "inevidable", "inevitable", + "inevidably", "inevitably", + "inevitible", "inevitable", + "inevitibly", "inevitably", + "inevtiable", "inevitable", + "inevtiably", "inevitably", + "infallable", "infallible", + "infaltable", "inflatable", + "infeccious", "infectious", + "infecteous", "infectious", + "infectuous", "infectious", + "infedility", "infidelity", + "infektious", "infectious", + "inferioara", "inferior", + "inferioare", "inferior", + "inferiorty", "inferiority", + "inferrence", "inference", + "infestaion", "infestation", + "infestaton", "infestation", + "infestions", "infections", + "infideltiy", "infidelity", + "infidility", "infidelity", + "infiltrade", "infiltrate", + "infiltrait", "infiltrate", + "infiltrare", "infiltrate", + "infiltrase", "infiltrate", + "infinately", "infinitely", + "infinetely", "infinitely", + "infiniment", "infinite", + "infinitley", "infinitely", + "infintiely", "infinitely", + "inflamable", "inflatable", + "inflateble", "inflatable", + "inflatible", "inflatable", + "infleunced", "influenced", + "inflitrate", "infiltrate", + "influanced", "influenced", + "influances", "influences", + "influencie", "influences", + "influening", "influencing", + "influensed", "influences", + "influenser", "influences", + "influenses", "influences", + "influental", "influential", + "influented", "influenced", + "influentes", "influences", + "influneced", "influenced", + "infograhic", "infographic", + "infograpic", "infographic", + "infomation", "information", + "informable", "informal", + "informarla", "informal", + "informarle", "informal", + "informarlo", "informal", + "informatie", "informative", + "informella", "informal", + "informerad", "informed", + "informtion", "information", + "infridging", "infringing", + "infrigning", "infringing", + "infulenced", "influenced", + "infulences", "influences", + "ingenuitiy", "ingenuity", + "ingrediant", "ingredient", + "ingrediens", "ingredients", + "ingrediets", "ingredient", + "inhabitans", "inhabitants", + "inhabitats", "inhabitants", + "inherantly", "inherently", + "inherintly", "inherently", + "inheritage", "heritage", + "inhernetly", "inherently", + "inifnitely", "infinitely", + "initaition", "initiation", + "initalised", "initialised", + "initaliser", "initialiser", + "initalises", "initialises", + "initalisms", "initialisms", + "initalized", "initialized", + "initalizer", "initializer", + "initalizes", "initializes", + "initalling", "initialling", + "initalness", "initialness", + "initiaitve", "initiatives", + "initiaties", "initiatives", + "initiativs", "initiatives", + "initiatves", "initiatives", + "initiavite", "initiatives", + "inititaive", "initiatives", + "inititiave", "initiatives", + "initmately", "intimately", + "initmidate", "intimidate", + "inituition", "initiation", + "injustaces", "injustices", + "injusticas", "injustices", + "inmigrants", "immigrants", + "innoavtion", "innovations", + "innocentes", "innocents", + "innotation", "innovation", + "innovacion", "innovation", + "innovaiton", "innovations", + "innovatief", "innovate", + "innovaties", "innovate", + "innovativo", "innovation", + "innvoation", "innovation", + "inofficial", "unofficial", + "inpsection", "inspection", + "inquisator", "inquisitor", + "inquisidor", "inquisitor", + "inquisiter", "inquisitor", + "inquisitio", "inquisitor", + "inquisitir", "inquisitor", + "inquisiton", "inquisition", + "inquistior", "inquisitor", + "inquizitor", "inquisitor", + "inqusitior", "inquisitor", + "insensitve", "insensitive", + "insepction", "inspection", + "insistance", "insistence", + "insistente", "insistence", + "insistenze", "insistence", + "insistince", "insistence", + "insitution", "institution", + "inspeccion", "inspection", + "inspeciton", "inspections", + "inspectons", "inspections", + "inspectres", "inspectors", + "inspektion", "inspection", + "inspektors", "inspectors", + "inspiraste", "inspires", + "inspiraton", "inspiration", + "inspirerad", "inspired", + "inspireras", "inspires", + "insrugency", "insurgency", + "instabiliy", "instability", + "instabilty", "instability", + "installeer", "installer", + "installent", "installment", + "installesd", "installs", + "installion", "installing", + "instatance", "instance", + "instelling", "installing", + "instituded", "instituted", + "instituion", "institution", + "institutie", "institute", + "institutue", "instituted", + "instrament", "instrument", + "instrcutor", "instructors", + "instrucion", "instruction", + "instructer", "instructor", + "instructie", "instructed", + "instruktor", "instructor", + "instuction", "instruction", + "instuments", "instruments", + "insturcted", "instructed", + "insturctor", "instructor", + "insturment", "instrument", + "instutions", "intuitions", + "instututed", "instituted", + "insurgance", "insurgency", + "insurgancy", "insurgency", + "intangable", "intangible", + "intangeble", "intangible", + "intangibil", "intangible", + "intanjible", "intangible", + "integraded", "integrated", + "integrarla", "integral", + "integrarlo", "integral", + "integratie", "integrated", + "integreres", "interferes", + "integreted", "integrated", + "inteligent", "intelligent", + "intenseley", "intensely", + "intensitiy", "intensity", + "intentinal", "intentional", + "intentines", "intestines", + "interacive", "interactive", + "interactes", "interacts", + "interactie", "interactive", + "interactue", "interacted", + "interasted", "interacted", + "interbread", "interbreed", + "intercepto", "interception", + "intercorse", "intercourse", + "intercouse", "intercourse", + "intereacts", "interfaces", + "interected", "interacted", + "interefers", "interferes", + "interesant", "interest", + "interesing", "interesting", + "interestes", "interests", + "interfacce", "interfaces", + "interfears", "interferes", + "interfeers", "interferes", + "interferce", "interferes", + "interferre", "interfere", + "intergated", "integrated", + "interioara", "interior", + "interioare", "interior", + "intermedie", "intermediate", + "internetbs", "internets", + "internetes", "internets", + "internetis", "internets", + "internetts", "internets", + "internetus", "internets", + "interprate", "interpret", + "interrugum", "interregnum", + "interruped", "interrupted", + "interstela", "interstellar", + "intervalls", "intervals", + "intervalos", "intervals", + "interveign", "intervening", + "interveing", "intervening", + "interveiws", "interviews", + "intervento", "intervention", + "intervenue", "intervene", + "interveres", "interferes", + "intervieni", "interviewing", + "intervieuw", "interviews", + "interviewd", "interviewed", + "interviewr", "interviewer", + "intervines", "intervenes", + "interviwed", "interviewed", + "interviwer", "interviewer", + "interwebbs", "interwebs", + "intestents", "intestines", + "intestinas", "intestines", + "intestinos", "intestines", + "intestions", "intestines", + "intidimate", "intimidate", + "intimadate", "intimidate", + "intimatley", "intimately", + "intimiated", "intimidate", + "intimidade", "intimidated", + "intimidant", "intimidate", + "intimidare", "intimidate", + "intimitade", "intimidated", + "intimitaly", "intimately", + "intimitate", "intimidate", + "intimitely", "intimately", + "intolarant", "intolerant", + "intolerace", "intolerance", + "intolerate", "intolerant", + "intolerent", "intolerant", + "intolorant", "intolerant", + "intolorent", "intolerant", + "intorduced", "introduced", + "intorduces", "introduces", + "intorverts", "introverts", + "intoxicted", "intoxicated", + "intraverts", "introverts", + "intreguing", "intriguing", + "intricaces", "intricacies", + "intriguied", "intrigue", + "intrigured", "intrigue", + "intrinseci", "intrinsic", + "intrinsinc", "intrinsic", + "intriquing", "intriguing", + "intriuging", "intriguing", + "introdecks", "introduces", + "introdused", "introduces", + "introvents", "introverts", + "introvered", "introverted", + "introversa", "introverts", + "introverse", "introverts", + "introversi", "introverts", + "introverso", "introverts", + "introversy", "introverts", + "introveted", "introverted", + "intruduced", "introduced", + "intruduces", "introduces", + "intruiging", "intriguing", + "intruments", "instruments", + "intuitevly", "intuitively", + "intuitivly", "intuitively", + "intuitivno", "intuition", + "intutively", "intuitively", + "inumerable", "enumerable", + "inusrgency", "insurgency", + "invaderats", "invaders", + "invaildate", "invalidates", + "invairably", "invariably", + "invaldiate", "invalidates", + "invalidade", "invalidate", + "invalidare", "invalidate", + "invalubale", "invaluable", + "invalueble", "invaluable", + "invaraibly", "invariably", + "invariabil", "invariably", + "invaribaly", "invariably", + "invaulable", "invaluable", + "inveitable", "inevitable", + "inveitably", "inevitably", + "invensions", "inventions", + "inventario", "inventor", + "inventarlo", "inventor", + "inventaron", "inventor", + "inventings", "inventions", + "inventivos", "inventions", + "invertendo", "inverted", + "inverterad", "inverted", + "invertions", "inventions", + "investemnt", "investments", + "investiage", "investigate", + "investions", "inventions", + "investirat", "investigator", + "investmens", "investments", + "invicinble", "invincible", + "invididual", "individual", + "invincable", "invincible", + "invinceble", "invincible", + "invinicble", "invincible", + "invinsible", "invincible", + "invinvible", "invincible", + "invisibily", "invisibility", + "invitacion", "invitation", + "invitating", "invitation", + "involunary", "involuntary", + "involvment", "involvement", + "ironcially", "ironically", + "irracional", "irrational", + "irrationel", "irrational", + "irrelavant", "irrelevant", + "irrelavent", "irrelevant", + "irrelevent", "irrelevant", + "irrelivant", "irrelevant", + "irrelivent", "irrelevant", + "irrevelant", "irrelevant", + "irreverant", "irrelevant", + "irridation", "irritation", + "irriration", "irritation", + "irritacion", "irritation", + "irritaties", "irritate", + "islamisist", "islamist", + "islamistas", "islamists", + "isntalling", "installing", + "isntructed", "instructed", + "isntrument", "instrument", + "israeliens", "israelis", + "israelitas", "israelis", + "italianess", "italians", + "itnroduced", "introduced", + "jailborken", "jailbroken", + "jalibroken", "jailbroken", + "jamaicains", "jamaican", + "jamaicaman", "jamaican", + "jerusaleum", "jerusalem", + "jounralism", "journalism", + "jounralist", "journalist", + "jouranlism", "journalism", + "jouranlist", "journalist", + "journalims", "journals", + "journalits", "journals", + "journalizm", "journalism", + "journalsim", "journalism", + "journolist", "journalist", + "judegments", "judgements", + "judgemenal", "judgemental", + "judgemetal", "judgemental", + "jugdements", "judgements", + "juggarnaut", "juggernaut", + "juggeranut", "juggernaut", + "juggernath", "juggernaut", + "juggernout", "juggernaut", + "juggernuat", "juggernaut", + "juggetnaut", "juggernaut", + "jugglenaut", "juggernaut", + "juggurnaut", "juggernaut", + "justifible", "justifiable", + "juvenilles", "juvenile", + "kickstarer", "kickstarter", + "kickstartr", "kickstarter", + "kickstater", "kickstarter", + "kidnapning", "kidnapping", + "kidnappade", "kidnapped", + "killingest", "killings", + "kilometros", "kilometers", + "kilomiters", "kilometers", + "kilomoters", "kilometers", + "kilomteres", "kilometers", + "kindapping", "kidnapping", + "kingdomers", "kingdoms", + "krpytonite", "kryptonite", + "krypotnite", "kryptonite", + "krypronite", "kryptonite", + "kryptinite", "kryptonite", + "kryptolite", "kryptonite", + "kryptonyte", "kryptonite", + "krypyonite", "kryptonite", + "krytponite", "kryptonite", + "kyrptonite", "kryptonite", + "labarotory", "laboratory", + "laboratroy", "laboratory", + "laborerers", "laborers", + "laboritory", "laboratory", + "laborotory", "laboratory", + "lackbuster", "lackluster", + "lacklaster", "lackluster", + "landacapes", "landscapes", + "landingers", "landings", + "landshapes", "landscapes", + "landspaces", "landscapes", + "lannasters", "lannisters", + "lannesters", "lannisters", + "lannistars", "lannisters", + "lannsiters", "lannisters", + "lateration", "alteration", + "latitudine", "latitude", + "laughabley", "laughably", + "laughablly", "laughably", + "launchered", "launched", + "leaglizing", "legalizing", + "lectureres", "lectures", + "legalazing", "legalizing", + "legalizare", "legalize", + "legalizate", "legalize", + "legendaies", "legendaries", + "legendaris", "legendaries", + "legimitacy", "legitimacy", + "legimitate", "legitimate", + "legislatie", "legislative", + "legitamacy", "legitimacy", + "legitamate", "legitimate", + "legitamicy", "legitimacy", + "legitamite", "legitimate", + "legitemacy", "legitimacy", + "legitemate", "legitimate", + "legitimaly", "legitimacy", + "legitimicy", "legitimacy", + "legitimite", "legitimate", + "leiutenant", "lieutenant", + "lesbianese", "lesbians", + "lesbianest", "lesbians", + "leuitenant", "lieutenant", + "levetating", "levitating", + "liberacion", "liberation", + "liberalest", "liberate", + "liberalizm", "liberalism", + "liberalnim", "liberalism", + "liberalsim", "liberalism", + "liberarion", "liberation", + "liberaties", "liberate", + "liberatore", "liberate", + "libertania", "libertarians", + "libguistic", "linguistic", + "lietuenant", "lieutenant", + "lieutanant", "lieutenant", + "lieutanent", "lieutenant", + "lieutenent", "lieutenant", + "lifestiles", "lifestyles", + "lifestlyes", "lifestyles", + "lifesystem", "filesystem", + "lifesytles", "lifestyles", + "lifetimers", "lifetimes", + "lifetsyles", "lifestyles", + "lighhtning", "lightening", + "lightergas", "lighters", + "lighthning", "lightening", + "lighthorse", "lighthouse", + "lighthosue", "lighthouse", + "lighthours", "lighthouse", + "lightining", "lighting", + "lightneing", "lightening", + "lightnting", "lightening", + "lightrooom", "lightroom", + "lightweigt", "lightweight", + "ligitation", "litigation", + "ligthening", "lightening", + "ligthhouse", "lighthouse", + "likelyhood", "likelihood", + "limination", "limitation", + "limitacion", "limitation", + "limitaiton", "limitation", + "limitating", "limitation", + "limitativo", "limitation", + "linguisics", "linguistics", + "linguisitc", "linguistics", + "linguistcs", "linguistics", + "linguistis", "linguistics", + "linguitics", "linguistic", + "lingusitic", "linguistics", + "lingvistic", "linguistic", + "liousville", "louisville", + "listeneres", "listeners", + "literallly", "literally", + "literarely", "literary", + "literarlly", "literary", + "literatire", "literate", + "literative", "literate", + "literatute", "literate", + "lithuanina", "lithuania", + "litterally", "literally", + "liuetenant", "lieutenant", + "liveatream", "livestream", + "livelehood", "livelihood", + "liverpoool", "liverpool", + "livescream", "livestream", + "livestreem", "livestream", + "livestrems", "livestream", + "livilehood", "livelihood", + "livliehood", "livelihood", + "lobbyistes", "lobbyists", + "lockacreen", "lockscreen", + "logictical", "logistical", + "logisitcal", "logistical", + "logisticas", "logistics", + "logisticly", "logistical", + "loiusville", "louisville", + "lollipoopy", "lollipop", + "lonelyness", "loneliness", + "longevitiy", "longevity", + "lonileness", "loneliness", + "lonlieness", "loneliness", + "louieville", "louisville", + "louisiania", "louisiana", + "louisianna", "louisiana", + "louisivlle", "louisville", + "louisviile", "louisville", + "lousiville", "louisville", + "luietenant", "lieutenant", + "mabyelline", "maybelline", + "magnifient", "magnificent", + "mainpulate", "manipulate", + "mainstreem", "mainstream", + "maintaince", "maintained", + "maintaines", "maintains", + "maintainig", "maintaining", + "maintenace", "maintenance", + "maintianed", "maintained", + "maintioned", "mentioned", + "malfuncion", "malfunction", + "malpractce", "malpractice", + "managebale", "manageable", + "maneagable", "manageable", + "maneouvred", "manoeuvred", + "maneouvres", "manoeuvres", + "maneuveres", "maneuvers", + "maneuveurs", "maneuver", + "manifestas", "manifests", + "manifestes", "manifests", + "manifestus", "manifests", + "manipluate", "manipulate", + "manipualte", "manipulate", + "manipulant", "manipulate", + "manipulare", "manipulate", + "manipulted", "manipulated", + "maniuplate", "manipulate", + "mannarisms", "mannerisms", + "mannersims", "mannerisms", + "mannorisms", "mannerisms", + "manufacter", "manufacture", + "manufacure", "manufacture", + "manufature", "manufacture", + "maraudeurs", "marauder", + "margaritte", "margaret", + "margianlly", "marginally", + "marginaali", "marginal", + "marginable", "marginal", + "marignally", "marginally", + "marijuanna", "marijuana", + "marketting", "marketing", + "marshmalow", "marshmallow", + "masculinty", "masculinity", + "massacrare", "massacre", + "massivelly", "massively", + "masteriers", "masteries", + "masternind", "mastermind", + "masterpice", "masterpiece", + "mastrubate", "masturbate", + "mastubrate", "masturbated", + "masturabte", "masturbate", + "masturbait", "masturbate", + "masturbare", "masturbate", + "masturbeta", "masturbated", + "masturdate", "masturbate", + "materiales", "materials", + "materialsm", "materialism", + "maximazing", "maximizing", + "maximixing", "maximizing", + "mayballine", "maybelline", + "maybellene", "maybelline", + "maybellibe", "maybelline", + "maybilline", "maybelline", + "mccarthyst", "mccarthyist", + "mdifielder", "midfielder", + "meagthread", "megathread", + "meaningess", "meanings", + "meaningles", "meanings", + "meatballls", "meatballs", + "mecahnical", "mechanical", + "mecahnisms", "mechanisms", + "mechancial", "mechanical", + "mechandise", "merchandise", + "mechanichs", "mechanics", + "mechanicle", "mechanical", + "mechanicly", "mechanical", + "mechanicus", "mechanics", + "mechanincs", "mechanic", + "mechanisim", "mechanism", + "mechansims", "mechanisms", + "mechinical", "mechanical", + "mechinisms", "mechanisms", + "mediaction", "medications", + "medicacion", "medication", + "medicaiton", "medication", + "medicalert", "medicare", + "medicallly", "medically", + "medicatons", "medications", + "medicinens", "medicines", + "medicinske", "medicine", + "medicority", "mediocrity", + "medidating", "meditating", + "mediocirty", "mediocrity", + "mediocraty", "mediocrity", + "mediocrety", "mediocrity", + "mediocricy", "mediocrity", + "mediocrily", "mediocrity", + "mediocrisy", "mediocrity", + "meditacion", "medications", + "meditaiton", "meditation", + "melatonian", "melatonin", + "melatonion", "melatonin", + "mellinnium", "millennium", + "melodieuse", "melodies", + "membrances", "membrane", + "mentallity", "mentally", + "mentionnes", "mentions", + "mercenaire", "mercenaries", + "mercenares", "mercenaries", + "mercentile", "mercantile", + "merchanise", "merchandise", + "merchantos", "merchants", + "messagease", "messages", + "messagepad", "messaged", + "messenging", "messaging", + "metabalism", "metabolism", + "metabilism", "metabolism", + "metabloism", "metabolism", + "metablosim", "metabolism", + "metabolics", "metabolism", + "metabolizm", "metabolism", + "metabolsim", "metabolism", + "metalurgic", "metallurgic", + "metaphoras", "metaphors", + "metaphores", "metaphors", + "metaphyics", "metaphysics", + "meterology", "meteorology", + "methaphors", "metaphors", + "methodolgy", "methodology", + "methodoloy", "methodology", + "metrapolis", "metropolis", + "metrolopis", "metropolis", + "metropilis", "metropolis", + "metroplois", "metropolis", + "metropolin", "metropolitan", + "metropolos", "metropolis", + "metropolys", "metropolis", + "mexicanese", "mexicans", + "mexicaness", "mexicans", + "michelline", "michelle", + "micorwaves", "microwaves", + "microhpone", "microphone", + "microscoop", "microscope", + "microvaves", "microwaves", + "microvaxes", "microwaves", + "micrpohone", "microphones", + "midfeilder", "midfielder", + "midfiedler", "midfielder", + "midfieldes", "midfielders", + "midfielers", "midfielders", + "midfileder", "midfielder", + "midifelder", "midfielder", + "midnlessly", "mindlessly", + "migitation", "mitigation", + "migrainers", "migraines", + "miletsones", "milestones", + "milisecond", "millisecond", + "militiades", "militias", + "militiants", "militias", + "millinnium", "millennium", + "miminalist", "minimalist", + "minamilist", "minimalist", + "mindleslly", "mindlessly", + "minimazing", "minimizing", + "minimilast", "minimalist", + "minimilist", "minimalist", + "mininalist", "minimalist", + "ministeres", "ministers", + "ministerns", "ministers", + "minneaplis", "minneapolis", + "minneapols", "minneapolis", + "minnesotta", "minnesota", + "minoritets", "minorities", + "minoroties", "minorities", + "miracalous", "miraculous", + "miracluous", "miraculous", + "miracoulus", "miraculous", + "mircophone", "microphone", + "mircoscope", "microscope", + "mircowaves", "microwaves", + "misandrony", "misandry", + "miscarrage", "miscarriage", + "miscarrige", "miscarriage", + "misdemenor", "misdemeanor", + "miserabley", "miserably", + "miserablly", "miserably", + "misforture", "misfortune", + "misgoynist", "misogynist", + "misinfomed", "misinformed", + "misinterpt", "misinterpret", + "misisonary", "missionary", + "misoganist", "misogynist", + "misogenist", "misogynist", + "misoginist", "misogynist", + "misoginyst", "misogynist", + "misognyist", "misogynist", + "misogonist", "misogynist", + "misogonyst", "misogynist", + "misogyinst", "misogynist", + "misogynyst", "misogynist", + "misoygnist", "misogynist", + "mispelling", "misspelling", + "missionare", "missionaries", + "missionera", "missionary", + "missisippi", "mississippi", + "mississipi", "mississippi", + "mississppi", "mississippi", + "misspeling", "misspelling", + "misspellng", "misspelling", + "mistakedly", "mistakenly", + "mistakinly", "mistakenly", + "mistankely", "mistakenly", + "misterious", "mysterious", + "misteryous", "mysterious", + "mistreaded", "mistreated", + "misygonist", "misogynist", + "mitigaiton", "mitigation", + "moderacion", "moderation", + "moderaters", "moderates", + "moderatley", "moderately", + "moderatore", "moderate", + "moderatorn", "moderation", + "modificato", "modification", + "modifieras", "modifiers", + "modifieres", "modifiers", + "moisturier", "moisturizer", + "moleculair", "molecular", + "molestaion", "molestation", + "molestarle", "molester", + "molestarme", "molester", + "molestarse", "molester", + "molestarte", "molester", + "molestered", "molested", + "momentarly", "momentarily", + "monagomous", "monogamous", + "monetizare", "monetize", + "monitering", "monitoring", + "monogymous", "monogamous", + "monolistic", "monolithic", + "monolitich", "monolithic", + "monolopies", "monopolies", + "monolothic", "monolithic", + "monolythic", "monolithic", + "monopilies", "monopolies", + "monoploies", "monopolies", + "monopolets", "monopolies", + "monopolice", "monopolies", + "monopolios", "monopolies", + "monothilic", "monolithic", + "monsterous", "monsters", + "montioring", "monitoring", + "monumentos", "monuments", + "monumentul", "monumental", + "monumentus", "monuments", + "mormonisim", "mormonism", + "morphinate", "morphine", + "morrisette", "morissette", + "morrisound", "morrison", + "mosquitero", "mosquito", + "mosquiters", "mosquitoes", + "motherbard", "motherboard", + "motherboad", "motherboard", + "motherbord", "motherboard", + "motivaiton", "motivations", + "motiviated", "motivated", + "motorcicle", "motorcycle", + "motorcylce", "motorcycle", + "motorcyles", "motorcycles", + "motorollas", "motorola", + "mouthpeace", "mouthpiece", + "mouthpeice", "mouthpiece", + "movespeeed", "movespeed", + "mozzaralla", "mozzarella", + "mozzeralla", "mozzarella", + "mozzorella", "mozzarella", + "mulitation", "mutilation", + "mulitplied", "multiplied", + "mulitplier", "multiplier", + "mulitverse", "multiverse", + "multilpier", "multiplier", + "multiplaer", "multiplier", + "multiplaye", "multiply", + "multiplayr", "multiply", + "multiplays", "multiply", + "multipleye", "multiply", + "multipling", "multiplying", + "multiplyed", "multiplied", + "multiplyer", "multiple", + "multiplyng", "multiplying", + "murderered", "murdered", + "murdereres", "murderers", + "muscicians", "musicians", + "musculaire", "muscular", + "mushroooms", "mushroom", + "mutialtion", "mutilation", + "mutiliated", "mutilated", + "mutliation", "mutilation", + "mutliplied", "multiplied", + "mutliplier", "multiplier", + "mutliverse", "multiverse", + "mysogynist", "misogynist", + "mysterieus", "mysteries", + "nagivating", "navigating", + "nagivation", "navigation", + "narcassism", "narcissism", + "narcassist", "narcissist", + "narcessist", "narcissist", + "narciscism", "narcissism", + "narciscist", "narcissist", + "narcisissm", "narcissism", + "narcisisst", "narcissist", + "narcisists", "narcissist", + "narcissicm", "narcissism", + "narcissict", "narcissist", + "narcissitc", "narcissist", + "narcissits", "narcissist", + "narcoticos", "narcotics", + "narrativas", "narratives", + "narrativos", "narratives", + "narritives", "narratives", + "nashvillle", "nashville", + "nationales", "nationals", + "nationalis", "nationals", + "nationalit", "nationalist", + "nationaliy", "nationality", + "nationalty", "nationality", + "nationella", "national", + "naturually", "naturally", + "naviagting", "navigating", + "naviagtion", "navigation", + "navigatore", "navigate", + "neccessary", "necessary", + "necesarily", "necessarily", + "necessairy", "necessarily", + "necessarly", "necessary", + "necessarry", "necessary", + "necessiate", "necessitate", + "necessites", "necessities", + "neckbeared", "neckbeard", + "neckboards", "neckbeards", + "neckbreads", "neckbeards", + "neckneards", "neckbeards", + "necromacer", "necromancer", + "necromaner", "necromancer", + "needleslly", "needlessly", + "negativaty", "negativity", + "negativley", "negatively", + "negelcting", "neglecting", + "negilgence", "negligence", + "negiotated", "negotiated", + "neglacting", "neglecting", + "neglagence", "negligence", + "neglegance", "negligence", + "neglegible", "negligible", + "neglegting", "neglecting", + "neglibible", "negligible", + "neglicence", "negligence", + "neglicible", "negligible", + "neglicting", "neglecting", + "negligable", "negligible", + "negligance", "negligence", + "negligeble", "negligible", + "negligente", "negligence", + "negociated", "negotiated", + "negogiated", "negotiated", + "negoitated", "negotiated", + "negotaited", "negotiated", + "negotation", "negotiation", + "negotiaion", "negotiation", + "negotiatie", "negotiated", + "negotiatin", "negotiations", + "negotiaton", "negotiation", + "neigbhours", "neighbours", + "neighbhors", "neighbours", + "neighbords", "neighbours", + "neighbores", "neighbours", + "netowrking", "networking", + "netruality", "neutrality", + "neturality", "neutrality", + "netwroking", "networking", + "neurologia", "neurological", + "neutrailty", "neutrality", + "newletters", "newsletters", + "newlsetter", "newsletter", + "newsettler", "newsletter", + "newslatter", "newsletter", + "nieghbours", "neighbours", + "nightmates", "nightmares", + "nightmears", "nightmares", + "nightmeres", "nightmares", + "nigthmares", "nightmares", + "nipticking", "nitpicking", + "nitpciking", "nitpicking", + "nominacion", "nomination", + "nominatino", "nominations", + "nominativo", "nomination", + "nominatons", "nominations", + "nonsencial", "nonsensical", + "nontheless", "nonetheless", + "northerend", "northern", + "nostalgica", "nostalgia", + "nostalgija", "nostalgia", + "noteworhty", "noteworthy", + "nothingess", "nothingness", + "noticabely", "noticeably", + "noticabley", "noticeably", + "noticiably", "noticeably", + "notoriosly", "notoriously", + "novembeard", "november", + "nuetrality", "neutrality", + "nutricious", "nutritious", + "nutrientes", "nutrients", + "nutritents", "nutrients", + "nutritinal", "nutritional", + "nutritiuos", "nutritious", + "nutritivos", "nutritious", + "nutrituous", "nutritious", + "nutrutious", "nutritious", + "obatinable", "obtainable", + "obejctives", "objectives", + "obilgatory", "obligatory", + "objecitves", "objectives", + "objectivas", "objectives", + "objectivly", "objectively", + "objectivst", "objectives", + "objectivty", "objectivity", + "objektives", "objectives", + "obligitary", "obligatory", + "obligitory", "obligatory", + "observabil", "observable", + "observarse", "observers", + "observaton", "observation", + "observeras", "observers", + "observered", "observed", + "observeres", "observers", + "observible", "observable", + "obstancles", "obstacles", + "obstrucion", "obstruction", + "obstructin", "obstruction", + "obtainabie", "obtainable", + "obtaineble", "obtainable", + "obtainible", "obtainable", + "obtianable", "obtainable", + "ocasionaly", "occasionally", + "ocassional", "occasional", + "ocassioned", "occasioned", + "occaisonal", "occasional", + "occasionly", "occasional", + "occassions", "occasions", + "occational", "occasional", + "occulation", "occupation", + "occupaiton", "occupation", + "occurances", "occurrences", + "occurences", "occurrences", + "occurrance", "occurrence", + "octohedral", "octahedral", + "octohedron", "octahedron", + "offensivly", "offensively", + "offereings", "offerings", + "officailly", "officially", + "olbigatory", "obligatory", + "ominpotent", "omnipotent", + "ominscient", "omniscient", + "omnipetent", "omnipotent", + "omnipitent", "omnipotent", + "omnipotant", "omnipotent", + "omnisicent", "omniscient", + "omniverous", "omnivorous", + "omnsicient", "omniscient", + "on-premise", "on-premises", + "onmipotent", "omnipotent", + "onmiscient", "omniscient", + "operatings", "operations", + "operativne", "operative", + "operativos", "operations", + "oportunity", "opportunity", + "opponenets", "opponent", + "oppononent", "opponent", + "oppressiun", "oppressing", + "optimisitc", "optimistic", + "optimizare", "optimize", + "optimizate", "optimize", + "optimizied", "optimize", + "organicaly", "organically", + "organiclly", "organically", + "organisate", "organise", + "organische", "organise", + "organisera", "organizers", + "organisere", "organizers", + "organisert", "organizers", + "organisier", "organise", + "organisims", "organism", + "organismed", "organise", + "organismen", "organise", + "organismer", "organise", + "organismes", "organisms", + "organismus", "organisms", + "organisten", "organise", + "organiszed", "organise", + "organizaed", "organize", + "organizare", "organizer", + "organizate", "organize", + "organizors", "organizers", + "organizuje", "organize", + "organziers", "organizers", + "orientaion", "orientation", + "orientarla", "oriental", + "orientarlo", "oriental", + "origianlly", "originally", + "originales", "originals", + "originalet", "originated", + "originalis", "originals", + "originalty", "originality", + "orignially", "originally", + "origniated", "originated", + "origonally", "originally", + "origonated", "originated", + "ostencibly", "ostensibly", + "ostenisbly", "ostensibly", + "ostensably", "ostensibly", + "ostentibly", "ostensibly", + "ostrasiced", "ostracized", + "ostrasized", "ostracized", + "ostraziced", "ostracized", + "ostrazised", "ostracized", + "ostrecized", "ostracized", + "ostricized", "ostracized", + "ostrocized", "ostracized", + "oustanding", "outstanding", + "outcalssed", "outclassed", + "outlcassed", "outclassed", + "outnumberd", "outnumbered", + "outnumbred", "outnumbered", + "outperfoms", "outperform", + "outperfrom", "outperform", + "outpreform", "outperform", + "outrageuos", "outrageous", + "outragious", "outrageous", + "outragoues", "outrageous", + "outreagous", "outrageous", + "outsourcad", "outsourced", + "outsouring", "outsourcing", + "outsoursed", "outsourced", + "outweighes", "outweighs", + "overarcing", "overarching", + "overclockd", "overclocked", + "overcloked", "overclocked", + "overcoding", "overcoming", + "overheards", "overhead", + "overheared", "overhead", + "overhooked", "overlooked", + "overlanded", "overloaded", + "overlaoded", "overloaded", + "overlaping", "overlapping", + "overlauded", "overloaded", + "overloards", "overload", + "overlorded", "overloaded", + "overlordes", "overlords", + "overnurfed", "overturned", + "overpirced", "overpriced", + "overpowerd", "overpowered", + "overpowred", "overpowered", + "overprised", "overpriced", + "overtunned", "overturned", + "overtunred", "overturned", + "overturing", "overturn", + "overweigth", "overweight", + "overwhemed", "overwhelmed", + "overwieght", "overweight", + "overwritte", "overwrite", + "pahtfinder", "pathfinder", + "painfullly", "painfully", + "painkilers", "painkillers", + "pairlament", "parliament", + "pakistanti", "pakistani", + "paladinlst", "paladins", + "palcements", "placements", + "paleolitic", "paleolithic", + "palestinan", "palestinian", + "paltformer", "platformer", + "palyerbase", "playerbase", + "parachutte", "parachute", + "parademics", "paramedics", + "paradiggum", "paradigm", + "paragraghs", "paragraphs", + "paragrahps", "paragraphs", + "paragrapgh", "paragraphs", + "paragrpahs", "paragraphs", + "parahprase", "paraphrase", + "paralleles", "parallels", + "parallells", "parallels", + "paramadics", "paramedics", + "paramaters", "parameters", + "paramecias", "paramedics", + "parametics", "paramedics", + "parametros", "parameters", + "paramiters", "parameters", + "paramormal", "paranormal", + "paranoicas", "paranoia", + "paranomral", "paranormal", + "paranornal", "paranormal", + "parapharse", "paraphrase", + "paraphraze", "paraphrase", + "paraprhase", "paraphrase", + "parasitter", "parasite", + "parilament", "parliament", + "parituclar", "particular", + "parlaiment", "parliament", + "parliamant", "parliament", + "parliamone", "parliament", + "parliement", "parliament", + "parrallell", "parallel", + "parrallely", "parallelly", + "partiarchy", "patriarchy", + "participas", "participants", + "participat", "participants", + "participte", "participate", + "particualr", "particular", + "partiotism", "patriotism", + "passionais", "passions", + "passionale", "passionately", + "passionant", "passionate", + "passionite", "passionate", + "passivedns", "passives", + "passivelly", "passively", + "patenterad", "patented", + "pathfidner", "pathfinder", + "pathfindir", "pathfinder", + "pathifnder", "pathfinder", + "patientens", "patients", + "patrairchy", "patriarchy", + "patriachry", "patriarchy", + "patriarcal", "patriarchal", + "patriarhal", "patriarchal", + "patriatchy", "patriarchy", + "patriatism", "patriotism", + "patrionism", "patriotism", + "patriotics", "patriotism", + "patriotisk", "patriots", + "patroitism", "patriotism", + "patryarchy", "patriarchy", + "pedantisch", "pedantic", + "pedestiran", "pedestrian", + "pedestrain", "pedestrian", + "pedictions", "depictions", + "pedohpiles", "pedophiles", + "pedohpilia", "pedophilia", + "pedophilac", "pedophilia", + "pedophilea", "pedophilia", + "pedophilie", "pedophile", + "pedophilla", "pedophilia", + "pedophille", "pedophile", + "pedopholia", "pedophilia", + "penetraion", "penetration", + "penetratin", "penetration", + "penetraton", "penetration", + "penguinese", "penguins", + "penguiness", "penguins", + "peninsulla", "peninsula", + "penninsula", "peninsula", + "peodphiles", "pedophiles", + "peodphilia", "pedophilia", + "pepperment", "peppermint", + "pepperonni", "pepperoni", + "percantage", "percentage", + "percantile", "percentile", + "percaution", "precaution", + "percenatge", "percentages", + "percential", "percentile", + "percentige", "percentile", + "perceptoin", "perceptions", + "percession", "percussion", + "percetange", "percentages", + "percetnage", "percentages", + "percintile", "percentile", + "percission", "percussion", + "percpetion", "perceptions", + "percusions", "percussion", + "perdicting", "predicting", + "perdiction", "prediction", + "perdictive", "predictive", + "perenially", "perennially", + "perfeccion", "perfection", + "perfecxion", "perfection", + "perfektion", "perfection", + "perferable", "preferable", + "perferably", "preferably", + "perference", "preference", + "perferring", "preferring", + "perfexcion", "perfection", + "perfomance", "performance", + "performace", "performance", + "performane", "performances", + "performans", "performances", + "performens", "performers", + "performous", "performs", + "perfromers", "performers", + "perhiperal", "peripheral", + "peridinkle", "periwinkle", + "perihperal", "peripheral", + "periodisch", "periodic", + "periperhal", "peripheral", + "peripheals", "peripherals", + "peripheria", "peripheral", + "periphiral", "peripheral", + "periphreal", "peripheral", + "periphrial", "peripheral", + "peritinkle", "periwinkle", + "periwankle", "periwinkle", + "periwinkel", "periwinkle", + "periwinkie", "periwinkle", + "periwinlke", "periwinkle", + "permanenty", "permanently", + "permanetly", "permanently", + "permisions", "permission", + "permisison", "permissions", + "permissble", "permissible", + "permissibe", "permissible", + "permissons", "permissions", + "perogative", "prerogative", + "perordered", "preordered", + "perpatuate", "perpetuate", + "perpetualy", "perpetually", + "perpetuare", "perpetuate", + "persausion", "persuasion", + "persausive", "persuasive", + "persective", "respective", + "persectued", "persecuted", + "persecutie", "persecuted", + "persecutin", "persecution", + "perserving", "preserving", + "persicuted", "persecuted", + "persistant", "persistent", + "persistens", "persists", + "persoanlly", "personally", + "persocuted", "persecuted", + "personalie", "personalized", + "personalis", "personas", + "personarse", "personas", + "personatus", "personas", + "personnell", "personnel", + "perspecive", "perspective", + "perspectie", "perspectives", + "persuasian", "persuasion", + "persuasing", "persuasion", + "persuasivo", "persuasion", + "persuation", "persuasion", + "persucuted", "persecuted", + "persumably", "presumably", + "persussion", "persuasion", + "persvasive", "persuasive", + "perswasion", "persuasion", + "pertinante", "pertinent", + "pervailing", "prevailing", + "pervalence", "prevalence", + "pervention", "prevention", + "perversley", "perverse", + "pesitcides", "pesticides", + "pessimistc", "pessimistic", + "pessimitic", "pessimistic", + "pestacides", "pesticides", + "pestecides", "pesticides", + "pesticedes", "pesticides", + "pesticidas", "pesticides", + "pestisides", "pesticides", + "pestizides", "pesticides", + "pharamcist", "pharmacist", + "pharmacias", "pharmacist", + "pharmacyst", "pharmacist", + "pharmasist", "pharmacist", + "pharmicist", "pharmacist", + "phemonenon", "phenomenon", + "phenemenon", "phenomenon", + "phenemonal", "phenomenal", + "phenomanal", "phenomenal", + "phenomanon", "phenomenon", + "phenomemon", "phenomenon", + "phenomenen", "phenomenon", + "phenomenol", "phenomenal", + "phenomenom", "phenomenon", + "phenominon", "phenomenon", + "phenomonal", "phenomenal", + "phenomonen", "phenomenon", + "phenomonon", "phenomenon", + "phenonemal", "phenomenal", + "phenonemon", "phenomenon", + "phenonmena", "phenomena", + "philipines", "philippines", + "philippins", "philippines", + "philisophy", "philosophy", + "phillipine", "philippine", + "phillipses", "phillies", + "philosiphy", "philosophy", + "philosohpy", "philosophy", + "philosoper", "philosopher", + "philospher", "philosopher", + "philospohy", "philosophy", + "photogragh", "photograph", + "photograhs", "photographs", + "photograhy", "photography", + "photograps", "photographs", + "photograpy", "photography", + "photogrpah", "photographs", + "photoshopd", "photoshopped", + "photoshope", "photoshopped", + "phramacist", "pharmacist", + "phsyically", "physically", + "phsyicians", "physicians", + "phsyicists", "physicists", + "phsyiology", "physiology", + "phycisians", "physicians", + "phycisists", "physicists", + "phyiscally", "physically", + "phyisology", "physiology", + "physcially", "physically", + "physcology", "psychology", + "physcopath", "psychopath", + "physicials", "physicians", + "physiciens", "physicians", + "physioligy", "physiology", + "picthforks", "pitchforks", + "pinoneered", "pioneered", + "pitchferks", "pitchforks", + "pitchfolks", "pitchforks", + "pitchfords", "pitchforks", + "pitchworks", "pitchforks", + "pitckforks", "pitchforks", + "pittaburgh", "pittsburgh", + "pittsbrugh", "pittsburgh", + "placehoder", "placeholder", + "placeholdr", "placeholder", + "placeholer", "placeholder", + "placemenet", "placements", + "plagairism", "plagiarism", + "plagarisim", "plagiarism", + "plagiariam", "plagiarism", + "plagiarios", "plagiarism", + "plagiarius", "plagiarism", + "plagiarizm", "plagiarism", + "plagierism", "plagiarism", + "plaguarism", "plagiarism", + "plaigarism", "plagiarism", + "plasticosa", "plastics", + "platfarmer", "platformer", + "platformar", "platformer", + "platformie", "platformer", + "platfotmer", "platformer", + "platfromer", "platformer", + "platofrmer", "platformer", + "playaround", "playground", + "playersare", "playerbase", + "playgorund", "playground", + "playthrogh", "playthrough", + "playthrouh", "playthrough", + "playwrites", "playwrights", + "plethorian", "plethora", + "policitian", "politician", + "polinators", "pollinators", + "polishuset", "polishes", + "politessen", "politeness", + "politicain", "politician", + "politicaly", "politically", + "politicien", "politician", + "politicing", "politician", + "politicion", "politician", + "politickin", "politician", + "politiikan", "politician", + "politiness", "politeness", + "polititian", "politician", + "popualtion", "populations", + "populairty", "popularity", + "populaiton", "populations", + "popularaty", "popularity", + "popularest", "populate", + "popularily", "popularity", + "populaties", "populate", + "populatiry", "popularity", + "populative", "populate", + "populatoin", "populations", + "popultaion", "populations", + "pormetheus", "prometheus", + "pornograhy", "pornography", + "pornograpy", "pornography", + "pornogrphy", "pornography", + "porportion", "proportion", + "portabilty", "portability", + "portarying", "portraying", + "portoguese", "portuguese", + "portraiing", "portraying", + "portrating", "portraying", + "portrayels", "portrays", + "portugeuse", "portuguese", + "portuguise", "portuguese", + "posessions", "possessions", + "posicional", "positional", + "positevely", "positively", + "positioing", "positioning", + "positionly", "positional", + "positionne", "positioned", + "positivley", "positively", + "possesives", "possessive", + "possessers", "possesses", + "possessess", "possesses", + "possibiliy", "possibility", + "possibilty", "possibility", + "possissive", "possessive", + "posthomous", "posthumous", + "potentialy", "potentially", + "poulations", "populations", + "powerhorse", "powerhouse", + "powerhosue", "powerhouse", + "powerhours", "powerhouse", + "powerhsell", "powershell", + "powerprint", "powerpoint", + "powersehll", "powershell", + "ppublisher", "publisher", + "practially", "practically", + "practicaly", "practically", + "practicess", "practise", + "practiclly", "practically", + "practioner", "practitioner", + "precaucion", "precaution", + "precausion", "precaution", + "precautios", "precautions", + "precedance", "precedence", + "precedense", "precedence", + "preceeding", "preceding", + "precendece", "precedence", + "precentage", "percentage", + "precentile", "percentile", + "preciselly", "precisely", + "precuation", "precautions", + "precussion", "percussion", + "predecated", "predicated", + "predecence", "precedence", + "predecesor", "predecessor", + "predection", "prediction", + "predective", "predictive", + "prediccion", "prediction", + "prediceted", "predicated", + "predicited", "predicated", + "predicitng", "predicting", + "prediciton", "prediction", + "predicitve", "predictive", + "predickted", "predicated", + "predictave", "predictive", + "predictivo", "prediction", + "predictons", "predictions", + "predjuiced", "prejudiced", + "predjuices", "prejudices", + "preduction", "prediction", + "preductive", "predictive", + "predujiced", "prejudiced", + "predujices", "prejudices", + "prefarable", "preferable", + "prefarably", "preferably", + "prefection", "perfection", + "preferance", "preference", + "prefereble", "preferable", + "preferente", "preference", + "preferenze", "preference", + "preferible", "preferable", + "preferibly", "preferably", + "prefernece", "preferences", + "preformers", "performers", + "pregancies", "pregnancies", + "pregnanies", "pregnancies", + "preipheral", "peripheral", + "preisdents", "presidents", + "preisthood", "priesthood", + "prejeduced", "prejudiced", + "prejeduces", "prejudices", + "prejiduced", "prejudiced", + "prejiduces", "prejudices", + "prejucided", "prejudiced", + "prejucides", "prejudices", + "prejuduced", "prejudiced", + "prejuduces", "prejudices", + "prelimiary", "preliminary", + "prematurly", "prematurely", + "preminence", "preeminence", + "premission", "permission", + "preorderes", "preorders", + "prepartion", "preparation", + "prepetuate", "perpetuate", + "preposters", "preposterous", + "prescients", "presidents", + "prescirbed", "prescribed", + "prescriped", "prescribed", + "presearing", "preserving", + "presecuted", "persecuted", + "presedency", "presidency", + "presedents", "presidents", + "presenning", "presenting", + "presentase", "presents", + "presentato", "presentation", + "presention", "presenting", + "presentors", "presents", + "preservare", "preserve", + "preservato", "preservation", + "preserverd", "preserved", + "presidancy", "presidency", + "presidante", "presidents", + "presidenta", "presidential", + "presidenty", "presidency", + "presidunce", "presidency", + "presistent", "persistent", + "presonally", "personally", + "presonhood", "personhood", + "pressuming", "pressuring", + "prestigios", "prestigious", + "prestigous", "prestigious", + "presuambly", "presumably", + "presuasion", "persuasion", + "presuasive", "persuasive", + "presumebly", "presumably", + "presumendo", "presumed", + "presumibly", "presumably", + "presumpton", "presumption", + "pretaining", "pertaining", + "pretection", "protection", + "pretendias", "pretends", + "pretensive", "pretense", + "pretentios", "pretentious", + "pretentous", "pretentious", + "prevalecen", "prevalence", + "prevalente", "prevalence", + "prevencion", "prevention", + "preventivo", "prevention", + "preventors", "prevents", + "previaling", "prevailing", + "previosuly", "previously", + "previoulsy", "previously", + "prevolence", "prevalence", + "pricinpals", "principals", + "primarilly", "primarily", + "primatives", "primitives", + "princepals", "principals", + "princesess", "princesses", + "princibles", "principles", + "principaly", "principality", + "principels", "principals", + "principial", "principal", + "principias", "principals", + "principlas", "principals", + "prinicipal", "principal", + "prinicpals", "principals", + "prinicples", "principles", + "printerest", "printers", + "prioratize", "prioritize", + "prioretize", "prioritize", + "prioritice", "prioritize", + "prioritied", "prioritize", + "prioroties", "priorities", + "priorotize", "prioritize", + "priotities", "priorities", + "priotitize", "prioritize", + "privaleged", "privileged", + "privaleges", "privileges", + "privaticed", "privatized", + "privelaged", "privileged", + "privelages", "privileges", + "priveldges", "privileges", + "priveleged", "privileged", + "priveleges", "privileges", + "privelidge", "privileged", + "priveliged", "privileged", + "priveliges", "privileges", + "privetized", "privatized", + "privilaged", "privileged", + "privilages", "privileges", + "priviledge", "privilege", + "privilegde", "privileges", + "privilegie", "privilege", + "priviliged", "privileged", + "priviliges", "privileges", + "privitazed", "privatized", + "privitized", "privatized", + "probabiliy", "probability", + "probabilty", "probability", + "probablies", "probable", + "probablybe", "probable", + "problemita", "problematic", + "procalimed", "proclaimed", + "procceding", "proceeding", + "procedding", "proceeding", + "procederal", "procedural", + "procedings", "proceedings", + "procedrual", "procedural", + "proceededs", "proceeds", + "proceedure", "procedure", + "proceesing", "proceeding", + "processsor", "processors", + "proclamied", "proclaimed", + "proclaming", "proclaiming", + "procliamed", "proclaimed", + "procreatin", "procreation", + "procudures", "procedures", + "prodcution", "production", + "prodecural", "procedural", + "prodecures", "procedures", + "produccion", "production", + "produceras", "produces", + "produceres", "produces", + "producirse", "producers", + "produciton", "production", + "producting", "production", + "productino", "productions", + "productivo", "production", + "productivy", "productivity", + "productoin", "productions", + "produktion", "production", + "produktive", "productive", + "produtcion", "productions", + "profesions", "profession", + "professers", "professors", + "professorn", "profession", + "professsor", "professors", + "proffesion", "profession", + "proficeint", "proficient", + "proficiant", "proficient", + "proficieny", "proficiency", + "proficincy", "proficiency", + "profitabel", "profitable", + "profitabil", "profitable", + "profitible", "profitable", + "proftiable", "profitable", + "programmar", "programmer", + "programmme", "programme", + "progresing", "progressing", + "progresion", "progression", + "progresive", "progressive", + "progressie", "progressives", + "progressin", "progression", + "progresson", "progression", + "progressos", "progresses", + "progressus", "progresses", + "prohibirte", "prohibit", + "prohibites", "prohibits", + "prohibitng", "prohibiting", + "prohibiton", "prohibition", + "prohibitus", "prohibits", + "prohibitve", "prohibited", + "prohobited", "prohibited", + "prohpecies", "prophecies", + "projecitle", "projectiles", + "projectiel", "projectiles", + "projecties", "projectiles", + "projectils", "projectiles", + "projectles", "projectiles", + "projectlie", "projectiles", + "projectyle", "projectile", + "projektile", "projectile", + "projektion", "projection", + "prometheas", "prometheus", + "promethese", "prometheus", + "promethius", "prometheus", + "promethous", "prometheus", + "promethues", "prometheus", + "prominance", "prominence", + "prominenty", "prominently", + "prominetly", "prominently", + "promiscous", "promiscuous", + "promiscuos", "promiscuous", + "promoteurs", "promotes", + "promotheus", "prometheus", + "promotinal", "promotional", + "pronoucned", "pronounced", + "pronouning", "pronouncing", + "propechies", "prophecies", + "propencity", "propensity", + "propenents", "proponents", + "properites", "properties", + "propersity", "propensity", + "propertion", "proportion", + "propertius", "properties", + "prophacies", "prophecies", + "prophocies", "prophecies", + "propietary", "proprietary", + "proplusion", "propulsion", + "propoganda", "propaganda", + "propogates", "propagates", + "propolsion", "propulsion", + "proponants", "proponents", + "proponenet", "proponent", + "proporcion", "proportion", + "proporties", "properties", + "proporting", "proportion", + "propositon", "proposition", + "propotions", "proportions", + "proprietry", "proprietary", + "proprotion", "proportion", + "propserity", "prosperity", + "propserous", "prosperous", + "propulaios", "propulsion", + "propulsing", "propulsion", + "propultion", "propulsion", + "propuslion", "propulsion", + "prosectued", "prosecuted", + "prosectuor", "prosecutor", + "prosecuter", "prosecutor", + "prosecutie", "prosecuted", + "prosicuted", "prosecuted", + "prosicutor", "prosecutor", + "prosocuted", "prosecuted", + "prosparity", "prosperity", + "prospectos", "prospects", + "prosperety", "prosperity", + "prospertiy", "prosperity", + "prosphetic", "prosthetic", + "prosporous", "prosperous", + "prostehtic", "prosthetic", + "prosterity", "prosperity", + "prostethic", "prosthetic", + "prostitite", "prostitute", + "prostitude", "prostitute", + "prostituee", "prostitute", + "prostituer", "prostitute", + "prostitues", "prostitutes", + "prostiture", "prostitute", + "prostituto", "prostitution", + "prostituye", "prostitute", + "protaginst", "protagonist", + "protastant", "protestant", + "proteccion", "protection", + "proteciton", "protections", + "protectice", "protective", + "protectiei", "protective", + "protectoin", "protections", + "protectons", "protectors", + "protectron", "protection", + "protestans", "protests", + "protestare", "protesters", + "protestato", "protestant", + "protestent", "protestant", + "protestina", "protestant", + "prothsetic", "prosthetic", + "protistant", "protestant", + "protocoles", "protocols", + "protocolls", "protocols", + "protocolos", "protocols", + "protohypes", "prototypes", + "protostant", "protestant", + "prototipes", "prototypes", + "prototpyes", "prototypes", + "protraying", "portraying", + "protuguese", "portuguese", + "provencial", "provincial", + "proveribal", "proverbial", + "provervial", "proverbial", + "providance", "providence", + "providince", "providence", + "provinciae", "province", + "provincies", "province", + "provincija", "provincial", + "provinence", "providence", + "provinical", "provincial", + "provintial", "provincial", + "provinvial", "provincial", + "provisiosn", "provision", + "provisonal", "provisional", + "provocatie", "provocative", + "pscyhology", "psychology", + "pscyhopath", "psychopath", + "pshycology", "psychology", + "pshycopath", "psychopath", + "psychedlic", "psychedelic", + "psychiatic", "psychiatric", + "psycholoog", "psychology", + "psychopaat", "psychopath", + "psychopats", "psychopaths", + "ptichforks", "pitchforks", + "publicitan", "publication", + "publisheed", "published", + "publisherr", "publisher", + "publishher", "publisher", + "publissher", "publisher", + "publlisher", "publisher", + "punihsment", "punishments", + "punishemnt", "punishments", + "punishible", "punishable", + "punishmnet", "punishments", + "punissable", "punishable", + "punsihable", "punishable", + "purchacing", "purchasing", + "purpolsion", "propulsion", + "purposedly", "purposely", + "purposelly", "purposely", + "purpotedly", "purportedly", + "pususading", "persuading", + "pyschology", "psychology", + "pyschopath", "psychopath", + "qaulifiers", "qualifiers", + "quailfiers", "qualifiers", + "qualfiiers", "qualifiers", + "qualifieds", "qualifies", + "qualifiies", "qualifiers", + "qualifiing", "qualifying", + "qualifires", "qualifiers", + "qualifyers", "qualifiers", + "qualitying", "qualifying", + "quanitites", "quantities", + "quantaties", "quantities", + "quantitize", "quantities", + "quarantena", "quarantine", + "quarantene", "quarantine", + "quarantied", "quarantine", + "quarintine", "quarantine", + "quaruntine", "quarantine", + "quesitoned", "questioned", + "questional", "questionable", + "questionne", "questioned", + "rabinnical", "rabbinical", + "radiactive", "radioactive", + "radioacive", "radioactive", + "rainbowers", "rainbows", + "randmoness", "randomness", + "randomzied", "randomized", + "randonmess", "randomness", + "randumness", "randomness", + "raspberrry", "raspberry", + "rationalle", "rationale", + "readmition", "readmission", + "realitvely", "relatively", + "realtively", "relatively", + "realtivity", "relativity", + "reaserched", "researched", + "reasercher", "researcher", + "rebiulding", "rebuilding", + "reboudning", "rebounding", + "rebouncing", "rebounding", + "rebuidling", "rebuilding", + "rebuliding", "rebuilding", + "rebuplican", "republican", + "reccommend", "recommend", + "recepients", "recipients", + "receptoras", "receptors", + "receptores", "receptors", + "recgonised", "recognised", + "recgonized", "recognized", + "recgonizes", "recognizes", + "reciepents", "recipients", + "recipeints", "recipients", + "recipiants", "recipients", + "recocnised", "recognised", + "recoginsed", "recognised", + "recoginzed", "recognized", + "recognices", "recognizes", + "recogniton", "recognition", + "recognzied", "recognised", + "recomended", "recommended", + "recommande", "recommend", + "recommands", "recommends", + "recommeded", "recommended", + "recommened", "recommend", + "recommennd", "recommends", + "recomments", "recommends", + "recompence", "recompense", + "reconcider", "reconsider", + "reconcille", "reconcile", + "recongised", "recognised", + "recongized", "recognized", + "recongizes", "recognizes", + "reconisder", "reconsider", + "reconsiled", "reconsider", + "recordarle", "recorder", + "recordarme", "recorder", + "recordarse", "recorder", + "recordarte", "recorder", + "recreacion", "recreation", + "recreatief", "recreate", + "recreativo", "recreation", + "recrutiers", "recruiters", + "rectanglar", "rectangular", + "rectangual", "rectangular", + "rectanguar", "rectangular", + "recuriters", "recruiters", + "recurrance", "recurrence", + "recursivly", "recursively", + "redefinied", "redefine", + "redefinine", "redefine", + "redemtpion", "redemption", + "redepmtion", "redemption", + "redesiging", "redesign", + "rediculous", "ridiculous", + "redmeption", "redemption", + "redneckers", "rednecks", + "redneckese", "rednecks", + "redneckest", "rednecks", + "reduncancy", "redundancy", + "redundency", "redundancy", + "redundnacy", "redundancy", + "redunduncy", "redundancy", + "reenforced", "reinforced", + "reevaulate", "reevaluate", + "refedendum", "referendum", + "refelcting", "reflecting", + "refelction", "reflection", + "refelctive", "reflective", + "referances", "references", + "referandum", "referendum", + "referemces", "references", + "referemdum", "referendum", + "referendim", "referendum", + "referendom", "referendum", + "referenece", "reference", + "referening", "referencing", + "referenses", "referees", + "referentes", "references", + "referneces", "references", + "referrence", "reference", + "referundum", "referendum", + "refference", "reference", + "refleciton", "reflections", + "reflecters", "reflects", + "reflektion", "reflection", + "reflextion", "reflection", + "reformerad", "reformed", + "refrigerar", "refrigerator", + "refurbised", "refurbished", + "regenarate", "regenerate", + "registeres", "registers", + "registrato", "registration", + "regresives", "regressive", + "regressivo", "regression", + "regualting", "regulating", + "regualtion", "regulations", + "regualtors", "regulators", + "regulacion", "regulation", + "regulament", "regulate", + "regulaotrs", "regulators", + "regularily", "regularly", + "regularing", "regulating", + "regularlas", "regulars", + "regularlos", "regulars", + "regulaters", "regulators", + "regulatios", "regulators", + "regulatons", "regulations", + "rehtorical", "rhetorical", + "reinstaled", "reinstalled", + "reitrement", "retirement", + "relagation", "relaxation", + "relatation", "relaxation", + "relativety", "relativity", + "relativily", "relativity", + "relativley", "relatively", + "relavation", "relaxation", + "relaxating", "relaxation", + "relazation", "relaxation", + "releagtion", "relegation", + "relegetion", "relegation", + "relentness", "relentless", + "reletnless", "relentless", + "relevation", "revelation", + "relexation", "relegation", + "relfecting", "reflecting", + "relfection", "reflection", + "relfective", "reflective", + "reliabilty", "reliability", + "reliablely", "reliably", + "religiones", "religions", + "religiosly", "religiously", + "religiousy", "religiously", + "religously", "religiously", + "relitavely", "relatively", + "reluctanct", "reluctant", + "reluctanly", "reluctantly", + "reluctanty", "reluctantly", + "remarcably", "remarkably", + "remarkibly", "remarkably", + "rememberes", "remembers", + "remenicent", "reminiscent", + "reminisent", "reminiscent", + "reminscent", "reminiscent", + "remmebered", "remembered", + "renaissace", "renaissance", + "renderered", "rendered", + "renegerate", "regenerate", + "renewabels", "renewables", + "renewebles", "renewables", + "rennovated", "renovated", + "renweables", "renewables", + "repatition", "repetition", + "repblicans", "republicans", + "repbulican", "republican", + "repeadedly", "repeatedly", + "repeadetly", "repeatedly", + "repearable", "repeatable", + "repearedly", "repealed", + "repeatadly", "repeatedly", + "repeatedlt", "repealed", + "repeatetly", "repeatedly", + "repeatible", "repeatable", + "repeatidly", "repeatedly", + "repectable", "repeatable", + "repentable", "repeatable", + "repentence", "repentance", + "repersents", "represents", + "repetation", "repetition", + "repeteadly", "repeatedly", + "repetetion", "repetition", + "repeticion", "repetition", + "repetitivo", "repetition", + "replacated", "replicated", + "replaceble", "replaceable", + "replacemet", "replacements", + "replacemnt", "replacement", + "replacemtn", "replacements", + "replecated", "replicated", + "repoistory", "repository", + "reponsible", "responsible", + "reportadly", "reportedly", + "reporteros", "reporters", + "reportidly", "reportedly", + "repositary", "repository", + "reposotory", "repository", + "repostiory", "repository", + "representn", "representing", + "repressent", "represents", + "repressivo", "repression", + "repsectful", "respectful", + "repsecting", "respecting", + "repsective", "respective", + "repsonding", "responding", + "repsonsive", "responsive", + "reptuation", "reputation", + "repubicans", "republicans", + "republcian", "republican", + "republians", "republicans", + "republicon", "republican", + "repuglican", "republican", + "repulicans", "republicans", + "reputacion", "reputation", + "requirment", "requirement", + "requrement", "requirement", + "resemblace", "resemble", + "reserached", "researched", + "reseracher", "researchers", + "reserverad", "reserved", + "reservered", "reserved", + "residental", "residential", + "resistable", "resistible", + "resistanes", "resistances", + "resistanse", "resistances", + "resistence", "resistance", + "resistendo", "resisted", + "resistered", "resisted", + "resistnace", "resistances", + "resitsance", "resistances", + "resoltuion", "resolutions", + "resolucion", "resolution", + "resolutino", "resolutions", + "resolutoin", "resolutions", + "resolutons", "resolutions", + "resolvemos", "resolves", + "resolvendo", "resolved", + "resolveres", "resolves", + "resolverse", "resolves", + "resolviste", "resolves", + "resonabelt", "resonate", + "resoultion", "resolution", + "respecitve", "respective", + "respectifs", "respects", + "respection", "respecting", + "respectons", "respects", + "respectuos", "respects", + "respektive", "respective", + "respiratoy", "respiratory", + "responcive", "responsive", + "responisve", "responsive", + "responsibe", "responsive", + "responsiby", "responsibly", + "responsile", "responsive", + "responsing", "responding", + "ressembled", "resembled", + "restarants", "restaurants", + "restaraunt", "restaurant", + "restaruant", "restaurant", + "restatting", "restarting", + "restaurent", "restaurant", + "restauring", "restarting", + "resteraunt", "restaurant", + "restircted", "restricted", + "restorting", "restarting", + "restrainig", "restraining", + "restrcited", "restricted", + "restrcting", "restarting", + "restricing", "restricting", + "restricion", "restriction", + "restricive", "restrictive", + "restrictes", "restricts", + "restrictie", "restrictive", + "restricton", "restriction", + "restructed", "restricted", + "restuarant", "restaurant", + "resturants", "restaurants", + "resturaunt", "restaurant", + "retaliaton", "retaliation", + "rethorical", "rhetorical", + "retierment", "retirement", + "retribuito", "retribution", + "retrosepct", "retrospect", + "retrospekt", "retrospect", + "revaluated", "reevaluated", + "revealtion", "revelations", + "revelaiton", "revelations", + "revelatons", "revelations", + "revelution", "revelation", + "reversable", "reversible", + "reversably", "reversal", + "reviewtrue", "reviewer", + "revisiones", "revisions", + "revisionis", "revisions", + "revoltuion", "revolution", + "revoluiton", "revolutions", + "revolutoin", "revolutions", + "revoultion", "revolution", + "rewarching", "rewatching", + "rewatchibg", "rewatching", + "rewatchign", "rewatching", + "rewatchimg", "rewatching", + "rhapsodomy", "rhapsody", + "rhetorisch", "rhetoric", + "ridicilous", "ridiculous", + "ridicoulus", "ridiculous", + "ridiculise", "ridicule", + "ridiculize", "ridicule", + "ridiculled", "ridicule", + "ridiculose", "ridicule", + "ridiculued", "ridicule", + "rienforced", "reinforced", + "rigthfully", "rightfully", + "roleplaing", "roleplaying", + "romanmania", "romanian", + "roundaboot", "roundabout", + "rucuperate", "recuperate", + "rudimentry", "rudimentary", + "sacarmento", "sacramento", + "sacntioned", "sanctioned", + "sacraficed", "sacrificed", + "sacrafices", "sacrifices", + "sacramenno", "sacramento", + "sacreficed", "sacrificed", + "sacrefices", "sacrifices", + "sacremento", "sacramento", + "sacrifaced", "sacrificed", + "sacrifaces", "sacrifices", + "sacrifical", "sacrificial", + "sacrificas", "sacrifices", + "sacrificie", "sacrificed", + "sacrificng", "sacrificing", + "sacrifises", "sacrifices", + "sacrifized", "sacrificed", + "sacrifizes", "sacrifices", + "sacromento", "sacramento", + "sadistisch", "sadistic", + "sanctionne", "sanctioned", + "sandiwches", "sandwiches", + "sandviches", "sandwiches", + "sandwishes", "sandwiches", + "sanitazion", "sanitation", + "santiation", "sanitation", + "sastifying", "satisfying", + "satellitte", "satellites", + "satifsying", "satisfying", + "satrically", "satirically", + "satsifying", "satisfying", + "sattelites", "satellites", + "saturacion", "saturation", + "scandalosa", "scandals", + "scandalose", "scandals", + "scandalosi", "scandals", + "scandaloso", "scandals", + "scandaniva", "scandinavia", + "scandinava", "scandinavian", + "scandinvia", "scandinavia", + "scaramento", "sacramento", + "scarificed", "sacrificed", + "scarifices", "sacrifices", + "scarmbling", "scrambling", + "scartching", "scratching", + "sceintific", "scientific", + "sceintists", "scientists", + "scenarioes", "scenarios", + "scenarions", "scenarios", + "scenarious", "scenarios", + "scheudling", "scheduling", + "scholarhip", "scholarship", + "scholarley", "scholarly", + "sciencists", "scientists", + "scientests", "scientists", + "scirptures", "scriptures", + "scooterers", "scooters", + "scorebaord", "scoreboard", + "scoreborad", "scoreboard", + "scorebored", "scoreboard", + "scorpiomon", "scorpion", + "scracthing", "scratching", + "scramblies", "scramble", + "screenshat", "screenshot", + "screenshit", "screenshot", + "scriptores", "scriptures", + "scripturae", "scriptures", + "scriputres", "scriptures", + "scritpures", "scriptures", + "scrutinity", "scrutiny", + "seahawkers", "seahawks", + "sebastiaan", "sebastian", + "segegrated", "segregated", + "segragated", "segregated", + "segregaded", "segregated", + "segregatie", "segregated", + "segretated", "segregated", + "segrigated", "segregated", + "selectiose", "selections", + "selectivly", "selectively", + "selectivos", "selections", + "selfishess", "selfishness", + "senitments", "sentiments", + "sensitiviy", "sensitivity", + "sensitivty", "sensitivity", + "sentaments", "sentiments", + "sentancing", "sentencing", + "sentements", "sentiments", + "sentencian", "sentencing", + "sentensing", "sentencing", + "sentimenal", "sentimental", + "sentimetal", "sentimental", + "sentincing", "sentencing", + "sentinents", "sentiments", + "separacion", "separation", + "separaters", "separates", + "separatley", "separately", + "separatron", "separation", + "separetely", "separately", + "seperately", "separately", + "seperating", "separating", + "seperation", "separation", + "seperatism", "separatism", + "seperatist", "separatist", + "seperatley", "seperate", + "sepulchure", "sepulchre", + "serenitary", "serenity", + "serviceble", "serviceable", + "settelment", "settlement", + "settlemens", "settlements", + "settlemets", "settlements", + "settlemnts", "settlements", + "seuxalized", "sexualized", + "seventeeen", "seventeen", + "sexaulized", "sexualized", + "sexualixed", "sexualized", + "sexuallity", "sexually", + "sexualzied", "sexualized", + "sexulaized", "sexualized", + "shakespare", "shakespeare", + "shakespeer", "shakespeare", + "shakespere", "shakespeare", + "shamelesly", "shamelessly", + "shamelessy", "shamelessly", + "shaprening", "sharpening", + "shareholds", "shareholders", + "sharkening", "sharpening", + "sharpining", "sharpening", + "shartening", "sharpening", + "shatnering", "shattering", + "shattening", "shattering", + "shepharded", "shepherd", + "shilouette", "silhouette", + "shitlasses", "shitless", + "shortenend", "shortened", + "shortining", "shortening", + "sidelinien", "sideline", + "sidelinjen", "sideline", + "sidelinked", "sideline", + "sigantures", "signatures", + "sightstine", "sightstone", + "signficant", "significant", + "signifiant", "significant", + "significat", "significant", + "signitures", "signatures", + "sigthstone", "sightstone", + "sihlouette", "silhouette", + "silohuette", "silhouette", + "silouhette", "silhouette", + "similairty", "similarity", + "similarily", "similarly", + "similarlly", "similarly", + "similiarly", "similarly", + "similiarty", "similarity", + "simliarity", "similarity", + "simluation", "simulation", + "simplictic", "simplistic", + "simplifing", "simplifying", + "simplifyed", "simplified", + "simplifyng", "simplifying", + "simplisitc", "simplistic", + "simplisity", "simplicity", + "simplistes", "simplest", + "simplivity", "simplicity", + "simplyfied", "simplified", + "simualtion", "simulation", + "simulacion", "simulation", + "simulaiton", "simulations", + "simulaties", "simulate", + "simulative", "simulate", + "simulatons", "simulations", + "simulatore", "simulate", + "sincereley", "sincerely", + "sincerelly", "sincerely", + "singatures", "signatures", + "singulaire", "singular", + "singulariy", "singularity", + "singularty", "singularity", + "singulator", "singular", + "sitautions", "situations", + "situatinal", "situational", + "skatebaord", "skateboard", + "skateborad", "skateboard", + "skatebored", "skateboard", + "skatebrand", "skateboard", + "skeletones", "skeletons", + "skeptecism", "skepticism", + "skepticals", "skeptics", + "skepticles", "skeptics", + "skepticons", "skeptics", + "skeptisicm", "skepticism", + "skeptisism", "skepticism", + "sketchysex", "sketches", + "sketpicism", "skepticism", + "skillhosts", "skillshots", + "skillshits", "skillshots", + "skillshoot", "skillshots", + "skillslots", "skillshots", + "skillsofts", "skillshots", + "skillsshot", "skillshots", + "skirmiches", "skirmish", + "skpeticism", "skepticism", + "slaughterd", "slaughtered", + "slipperies", "slippers", + "smarpthone", "smartphones", + "smarthpone", "smartphone", + "snadwiches", "sandwiches", + "snowbaling", "snowballing", + "snowballes", "snowballs", + "snowballls", "snowballs", + "socailists", "socialists", + "socailized", "socialized", + "socialisim", "socialism", + "socializng", "socializing", + "socialsits", "socialists", + "sociapaths", "sociopaths", + "socilaists", "socialists", + "socilaized", "socialized", + "sociologia", "sociological", + "sociopatas", "sociopaths", + "sociopatch", "sociopaths", + "sociopatic", "sociopathic", + "socratease", "socrates", + "socreboard", "scoreboard", + "soemthings", "somethings", + "soldiarity", "solidarity", + "solidairty", "solidarity", + "soliditary", "solidarity", + "solitudine", "solitude", + "somehtings", "somethings", + "someonelse", "someones", + "somethibng", "somethin", + "somethigng", "somethin", + "somethigns", "somethings", + "somethihng", "somethin", + "somethiing", "somethin", + "somethijng", "somethin", + "somethikng", "somethin", + "somethimng", "somethin", + "somethinbg", "somethings", + "somethines", "somethings", + "somethinfg", "somethings", + "somethinhg", "somethings", + "somethinig", "somethings", + "somethinkg", "somethings", + "somethinks", "somethings", + "somethinmg", "somethings", + "somethinng", "somethings", + "somethintg", "somethings", + "somethiong", "somethin", + "somethiung", "somethin", + "sophicated", "sophisticated", + "sotrmfront", "stormfront", + "sotrylines", "storylines", + "soudntrack", "soundtrack", + "soundrtack", "soundtracks", + "soundtracs", "soundtracks", + "soundtrakc", "soundtracks", + "soundtrakk", "soundtrack", + "soundtraks", "soundtracks", + "southampon", "southampton", + "southamton", "southampton", + "southerers", "southerners", + "southernes", "southerners", + "southerton", "southern", + "souveniers", "souvenirs", + "sovereigny", "sovereignty", + "sovereinty", "sovereignty", + "soverignty", "sovereignty", + "spartaniis", "spartans", + "spartanops", "spartans", + "specailist", "specialist", + "specailize", "specializes", + "specialice", "specialize", + "specialied", "specialized", + "specialies", "specializes", + "specialits", "specials", + "speciallly", "specially", + "speciallty", "specially", + "specialops", "specials", + "specialsts", "specialists", + "specialtys", "specials", + "specialzed", "specialized", + "specialzes", "specializes", + "specifices", "specifics", + "specifiing", "specifying", + "specifiyng", "specifying", + "speciliast", "specialists", + "specimines", "specimen", + "spectarors", "spectators", + "spectaters", "spectators", + "spectracal", "spectral", + "spectraply", "spectral", + "spectrolab", "spectral", + "speculatie", "speculative", + "speculatin", "speculation", + "speecheasy", "speeches", + "speicalist", "specialist", + "spiritualy", "spiritually", + "sponsorees", "sponsors", + "sponsorhip", "sponsorship", + "sponsorise", "sponsors", + "spontaneos", "spontaneous", + "spontaneus", "spontaneous", + "spontanous", "spontaneous", + "spoonfulls", "spoonfuls", + "spreadshet", "spreadsheet", + "springfeld", "springfield", + "springfied", "springfield", + "spriritual", "spiritual", + "squirrells", "squirrels", + "squirrelus", "squirrels", + "stabelized", "stabilized", + "stabilzied", "stabilized", + "stablility", "stability", + "stablizied", "stabilized", + "staggaring", "staggering", + "stakeboard", "skateboard", + "starighten", "straighten", + "starnation", "starvation", + "startegies", "strategies", + "startupbus", "startups", + "starwberry", "strawberry", + "statememts", "statements", + "statictics", "statistics", + "stationair", "stationary", + "statisitcs", "statistics", + "statistcal", "statistical", + "statistisk", "statistics", + "stauration", "saturation", + "stealthboy", "stealthy", + "stealthely", "stealthy", + "stealthify", "stealthy", + "stealthray", "stealthy", + "steeleries", "steelers", + "stereotipe", "stereotype", + "stereotpye", "stereotypes", + "steriotype", "stereotype", + "steroetype", "stereotype", + "sterotypes", "stereotypes", + "steryotype", "stereotype", + "stimilants", "stimulants", + "stimilated", "stimulated", + "stimualted", "stimulated", + "stimulatie", "stimulated", + "stimulatin", "stimulation", + "stimulaton", "stimulation", + "stimulents", "stimulants", + "stomrfront", "stormfront", + "storelines", "storylines", + "stormfornt", "stormfront", + "stormfromt", "stormfront", + "stornfront", "stormfront", + "stornghold", "stronghold", + "stradegies", "strategies", + "strageties", "strategies", + "straighted", "straightened", + "straightie", "straighten", + "straightin", "straighten", + "straigthen", "straighten", + "stranglove", "strangle", + "strangreal", "strangle", + "stratagies", "strategies", + "strategems", "strategies", + "strategice", "strategies", + "strategisk", "strategies", + "stravation", "starvation", + "strawbarry", "strawberry", + "strawbeary", "strawberry", + "strawbeery", "strawberry", + "strawbrary", "strawberry", + "strawburry", "strawberry", + "streaching", "stretching", + "streamtrue", "streamer", + "strechting", "stretching", + "strecthing", "stretching", + "stregnthen", "strengthen", + "streichung", "stretching", + "strenghten", "strengthen", + "strengsten", "strengthen", + "strengthes", "strengths", + "strengthin", "strengthen", + "stressende", "stressed", + "striaghten", "straighten", + "stromfront", "stormfront", + "stronkhold", "stronghold", + "stroylines", "storylines", + "structered", "structured", + "structrual", "structural", + "structurel", "structural", + "strucutral", "structural", + "strucutred", "structured", + "strucutres", "structures", + "strugglign", "struggling", + "strwaberry", "strawberry", + "sttutering", "stuttering", + "stupidfree", "stupider", + "stupiditiy", "stupidity", + "sturctural", "structural", + "sturctures", "structures", + "sturggling", "struggling", + "subarmines", "submarines", + "subcultuur", "subculture", + "subesquent", "subsequent", + "subisdized", "subsidized", + "subjectief", "subjective", + "subjectifs", "subjects", + "subjectivy", "subjectively", + "subjektive", "subjective", + "submariens", "submarines", + "submarinas", "submarines", + "submergerd", "submerged", + "submerines", "submarines", + "submisison", "submissions", + "submissies", "submissive", + "submissons", "submissions", + "submittion", "submitting", + "subsadized", "subsidized", + "subscirbed", "subscribed", + "subscirber", "subscribers", + "subscribar", "subscriber", + "subscribir", "subscriber", + "subscrible", "subscriber", + "subscriped", "subscribed", + "subscrubed", "subscribed", + "subscryber", "subscriber", + "subsedized", "subsidized", + "subsequant", "subsequent", + "subsidezed", "subsidized", + "subsidiced", "subsidized", + "subsidizng", "subsidizing", + "subsiduary", "subsidiary", + "subsiquent", "subsequent", + "subsittute", "substitutes", + "subsizided", "subsidized", + "subsrcibed", "subscribed", + "substanial", "substantial", + "substansen", "substances", + "substanser", "substances", + "substanses", "substances", + "substantie", "substantive", + "substatial", "substantial", + "substences", "substances", + "substitite", "substitute", + "substittue", "substitutes", + "substitude", "substitute", + "substitued", "substitute", + "substituer", "substitute", + "substitues", "substitutes", + "substiture", "substitute", + "substituto", "substitution", + "substituts", "substitutes", + "substracts", "subtracts", + "substutite", "substitutes", + "subsudized", "subsidized", + "subtitltes", "subtitle", + "succceeded", "succeeded", + "succcesses", "successes", + "succesfuly", "successfully", + "succesions", "succession", + "successing", "succession", + "successivo", "succession", + "sucesfully", "successfully", + "sucessfull", "successful", + "sucessfuly", "successfully", + "sudnerland", "sunderland", + "sufferered", "suffered", + "sufferring", "suffering", + "sufficiant", "sufficient", + "suggestied", "suggestive", + "suggestief", "suggestive", + "suggestons", "suggests", + "sumbarines", "submarines", + "sumbissive", "submissive", + "sumbitting", "submitting", + "summerized", "summarized", + "summorized", "summarized", + "summurized", "summarized", + "sunderlona", "sunderland", + "sunderlund", "sunderland", + "sungalsses", "sunglasses", + "sunglesses", "sunglasses", + "sunglinger", "gunslinger", + "sunscreeen", "sunscreen", + "superfical", "superficial", + "superfluos", "superfluous", + "superioara", "superior", + "superioare", "superior", + "superioris", "superiors", + "superivsor", "supervisors", + "supermaket", "supermarket", + "supermarkt", "supermarket", + "superouman", "superhuman", + "superposer", "superpowers", + "superviors", "supervisors", + "superviosr", "supervisors", + "supervisar", "supervisor", + "superviser", "supervisor", + "supervisin", "supervision", + "supervison", "supervision", + "supervsior", "supervisors", + "supperssor", "suppressor", + "supplament", "supplement", + "supplemant", "supplemental", + "supplemets", "supplements", + "supportare", "supporters", + "supporteur", "supporter", + "supportied", "supported", + "supportors", "supporters", + "supposdely", "supposedly", + "supposebly", "supposedly", + "supposidly", "supposedly", + "suppresion", "suppression", + "suppresors", "suppressor", + "suppressin", "suppression", + "suppressio", "suppressor", + "suppresson", "suppression", + "suprassing", "surpassing", + "supressing", "suppressing", + "supression", "suppression", + "supsension", "suspension", + "supsicions", "suspicions", + "supsicious", "suspicious", + "surounding", "surrounding", + "surplanted", "supplanted", + "surpressed", "suppressed", + "surprizing", "surprising", + "surrenderd", "surrendered", + "surrouding", "surrounding", + "surroundes", "surrounds", + "surroundig", "surroundings", + "survivours", "survivor", + "suseptable", "susceptible", + "suseptible", "susceptible", + "suspecions", "suspicions", + "suspecious", "suspicious", + "suspencion", "suspension", + "suspendeds", "suspense", + "suspention", "suspension", + "suspicians", "suspicions", + "suspiciois", "suspicions", + "suspicioso", "suspicions", + "suspicioun", "suspicion", + "suspicison", "suspicions", + "suspiciuos", "suspicions", + "suspicsion", "suspicions", + "suspisions", "suspicions", + "suspisious", "suspicious", + "suspitions", "suspicions", + "sustainble", "sustainable", + "swaetshirt", "sweatshirt", + "swearengin", "swearing", + "swearshirt", "sweatshirt", + "sweathsirt", "sweatshirt", + "sweatshits", "sweatshirt", + "sweatshort", "sweatshirt", + "sweatshrit", "sweatshirt", + "sweerheart", "sweetheart", + "sweetshart", "sweetheart", + "switcheasy", "switches", + "switzerand", "switzerland", + "symapthize", "sympathize", + "symbolisch", "symbolic", + "symbolisim", "symbolism", + "symetrical", "symmetrical", + "sympatheic", "sympathetic", + "sympathiek", "sympathize", + "sympathien", "sympathize", + "sympathtic", "sympathetic", + "sympathyze", "sympathize", + "sympethize", "sympathize", + "symphatize", "sympathize", + "symphonity", "symphony", + "sympothize", "sympathize", + "syncronous", "synchronous", + "synomymous", "synonymous", + "synomynous", "synonymous", + "synonamous", "synonymous", + "synonimous", "synonymous", + "synonmyous", "synonymous", + "synonomous", "synonymous", + "synonumous", "synonymous", + "synonynous", "synonymous", + "sypmathize", "sympathize", + "systamatic", "systematic", + "systemetic", "systematic", + "systemisch", "systemic", + "systimatic", "systematic", + "tabelspoon", "tablespoon", + "tablespons", "tablespoons", + "tablesppon", "tablespoon", + "tacitcally", "tactically", + "taiwanesse", "taiwanese", + "taligating", "tailgating", + "tantrumers", "tantrums", + "targetting", "targeting", + "teamfigths", "teamfights", + "teamifghts", "teamfights", + "teamspeack", "teamspeak", + "techicians", "technicians", + "techincian", "technician", + "techinican", "technician", + "techinques", "techniques", + "technicain", "technician", + "technicaly", "technically", + "technicans", "technicians", + "technichan", "technician", + "technicien", "technician", + "technicion", "technician", + "technitian", "technician", + "technqiues", "techniques", + "techtician", "technician", + "tehnically", "ethnically", + "telegrapgh", "telegraph", + "teleporing", "teleporting", + "televesion", "television", + "televisivo", "television", + "temafights", "teamfights", + "temerature", "temperature", + "temperatue", "temperature", + "temperment", "temperament", + "temperture", "temperature", + "templarios", "templars", + "templarius", "templars", + "temporaily", "temporarily", + "temporarly", "temporary", + "temptating", "temptation", + "temptetion", "temptation", + "tendancies", "tendencies", + "tendencias", "tendencies", + "tendencije", "tendencies", + "tendensies", "tendencies", + "tendincies", "tendencies", + "tensionors", "tensions", + "tentacreul", "tentacle", + "termanator", "terminator", + "termendous", "tremendous", + "termiantor", "terminator", + "termigator", "terminator", + "terminales", "terminals", + "terminalis", "terminals", + "terminarla", "terminal", + "terminarlo", "terminal", + "terminaron", "terminator", + "terminater", "terminator", + "terminolgy", "terminology", + "terorrists", "terrorists", + "terrerists", "terrorists", + "terrestial", "terrestrial", + "terriblely", "terribly", + "terriories", "territories", + "territoral", "territorial", + "territores", "territories", + "territoris", "territories", + "territorry", "territory", + "terrorisim", "terrorism", + "terrorsits", "terrorists", + "terrurists", "terrorists", + "testiclees", "testicles", + "testiclies", "testicle", + "testimoney", "testimony", + "thankyooou", "thankyou", + "themselfes", "themselves", + "themsevles", "themselves", + "themsleves", "themselves", + "theocracry", "theocracy", + "theologial", "theological", + "therapetic", "therapeutic", + "therepists", "therapists", + "theripists", "therapists", + "thermastat", "thermostat", + "thermistat", "thermostat", + "thermomter", "thermometer", + "theromstat", "thermostat", + "thorttling", "throttling", + "thorughout", "throughout", + "thouroghly", "thoroughly", + "threadened", "threaded", + "threatenes", "threatens", + "threatning", "threatening", + "threshhold", "threshold", + "throthling", "throttling", + "throtlling", "throttling", + "throughiut", "throughput", + "thubmnails", "thumbnails", + "thumbmails", "thumbnails", + "thunderbot", "thunderbolt", + "thunderolt", "thunderbolt", + "tighetning", "tightening", + "tightining", "tightening", + "tigthening", "tightening", + "tjpanishad", "upanishad", + "toothbruch", "toothbrush", + "toothbruth", "toothbrush", + "toothbursh", "toothbrush", + "toothrbush", "toothbrush", + "toppingest", "toppings", + "torchilght", "torchlight", + "torchlgiht", "torchlight", + "torchligth", "torchlight", + "torhclight", "torchlight", + "torrentbig", "torrenting", + "torrenters", "torrents", + "torrentors", "torrents", + "tortillera", "tortilla", + "tortillias", "tortilla", + "tortillita", "tortilla", + "tortilllas", "tortilla", + "torunament", "tournament", + "totalitara", "totalitarian", + "touchsceen", "touchscreen", + "touchscren", "touchscreen", + "touranment", "tournaments", + "tourmanent", "tournaments", + "tournamets", "tournaments", + "tournamnet", "tournament", + "tournemant", "tournament", + "tournement", "tournament", + "toxicitity", "toxicity", + "trafficing", "trafficking", + "trainwreak", "trainwreck", + "traitorise", "traitors", + "tramboline", "trampoline", + "tramploine", "trampoline", + "trampolene", "trampoline", + "tranformed", "transformed", + "tranistion", "transition", + "tranlsated", "translated", + "transalted", "translated", + "transaltes", "translates", + "transaltor", "translator", + "transation", "transition", + "transciprt", "transcripts", + "transcirpt", "transcripts", + "transcrips", "transcripts", + "transcrito", "transcript", + "transcrits", "transcripts", + "transcrpit", "transcript", + "transfered", "transferred", + "transferer", "transferred", + "transferes", "transfers", + "transferrs", "transfers", + "transferts", "transfers", + "transfomed", "transformed", + "transfored", "transformed", + "transforme", "transfer", + "transfroms", "transforms", + "transgeder", "transgender", + "transgener", "transgender", + "transicion", "transition", + "transision", "transition", + "transister", "transistor", + "transitons", "transitions", + "transitors", "transistor", + "transkript", "transcript", + "translater", "translator", + "translatin", "translations", + "translatio", "translator", + "translpant", "transplants", + "transluent", "translucent", + "transmited", "transmitted", + "transmiter", "transmitter", + "transmitor", "transistor", + "transmorgs", "transforms", + "transpalnt", "transplants", + "transphoic", "transphobic", + "transplain", "transplant", + "transplate", "transplant", + "transplats", "transplants", + "transpoder", "transported", + "transportr", "transporter", + "transsexal", "transsexual", + "transtator", "translator", + "tranzistor", "transistor", + "trasncript", "transcript", + "trasnforms", "transforms", + "trasnlated", "translated", + "trasnlator", "translator", + "trasnplant", "transplant", + "traveleres", "travelers", + "travelodge", "traveled", + "traverlers", "traverse", + "traversare", "traverse", + "traversier", "traverse", + "treasurery", "treasury", + "trememdous", "tremendous", + "tremondous", "tremendous", + "trespasing", "trespassing", + "trianwreck", "trainwreck", + "trochlight", "torchlight", + "trustworhy", "trustworthy", + "trustworty", "trustworthy", + "trustwothy", "trustworthy", + "tryannical", "tyrannical", + "tunraround", "turnaround", + "tupparware", "tupperware", + "turnapound", "turnaround", + "turthfully", "truthfully", + "tutoriales", "tutorials", + "tyrantical", "tyrannical", + "ubiqituous", "ubiquitous", + "ubiquotous", "ubiquitous", + "ubiqutious", "ubiquitous", + "ukrainains", "ukrainians", + "ukraineans", "ukrainians", + "ukrainiens", "ukrainians", + "ukraininas", "ukrainians", + "ukrianians", "ukrainians", + "ulitmately", "ultimately", + "ulterioara", "ulterior", + "ulterioare", "ulterior", + "ultimative", "ultimate", + "ultimatley", "ultimately", + "ultimatuum", "ultimatum", + "unanwsered", "unanswered", + "unasnwered", "unanswered", + "unattanded", "unattended", + "unattented", "unattended", + "unavailabe", "unavailable", + "unavailble", "unavailable", + "unavoidble", "unavoidable", + "unawnsered", "unanswered", + "unbalenced", "unbalanced", + "unballance", "unbalance", + "unbalnaced", "unbalanced", + "unbareable", "unbearable", + "unbeakable", "unbeatable", + "unbeareble", "unbearable", + "unbeatbale", "unbeatable", + "unbeateble", "unbeatable", + "unbeerable", "unbearable", + "unbeetable", "unbeatable", + "unbeknowst", "unbeknownst", + "unbreakble", "unbreakable", + "uncencored", "uncensored", + "uncensered", "uncensored", + "uncersored", "uncensored", + "uncertainy", "uncertainty", + "uncertanty", "uncertainty", + "uncesnored", "uncensored", + "uncomitted", "uncommitted", + "uncommited", "uncommitted", + "unconcious", "unconscious", + "unconscous", "unconscious", + "undebiably", "undeniably", + "undeinable", "undeniable", + "undeinably", "undeniably", + "undenaible", "undeniable", + "undenaibly", "undeniably", + "undenyable", "undeniable", + "undenyably", "undeniably", + "underbaker", "undertaker", + "undercling", "underlying", + "underfaker", "undertaker", + "undergated", "underrated", + "undergrand", "undergrad", + "undergroud", "underground", + "undergrund", "underground", + "undermimes", "undermines", + "underminde", "undermines", + "underminig", "undermining", + "underneeth", "underneath", + "underneith", "underneath", + "undernieth", "underneath", + "underpowed", "underpowered", + "underraged", "underrated", + "underraker", "undertaker", + "underrater", "undertaker", + "undersatnd", "understands", + "understadn", "understands", + "understans", "understands", + "understnad", "understands", + "understoon", "understood", + "understsnd", "understands", + "undertoker", "undertaker", + "undertsand", "understands", + "undertunes", "undertones", + "underwager", "underwater", + "underwares", "underwater", + "underwolrd", "underworld", + "underwoord", "underworld", + "underwrold", "underworld", + "underyling", "underlying", + "undesrtand", "understands", + "undoubtedy", "undoubtedly", + "undoubtely", "undoubtedly", + "undoubtley", "undoubtedly", + "uneccesary", "unnecessary", + "unecessary", "unnecessary", + "unedcuated", "uneducated", + "unedicated", "uneducated", + "unempolyed", "unemployed", + "unexplaind", "unexplained", + "unexplaned", "unexplained", + "unfamilair", "unfamiliar", + "unfamilier", "unfamiliar", + "unfinsihed", "unfinished", + "unfirendly", "unfriendly", + "unfortuate", "unfortunate", + "unfreindly", "unfriendly", + "unfriednly", "unfriendly", + "unfriently", "unfriendly", + "ungrapeful", "ungrateful", + "ungreatful", "ungrateful", + "unhealthly", "unhealthy", + "unicornios", "unicorns", + "unifnished", "unfinished", + "unihabited", "uninhabited", + "unilatreal", "unilateral", + "unimporant", "unimportant", + "unimpresed", "unimpressed", + "unimpressd", "unimpressed", + "uninsipred", "uninspired", + "uninspried", "uninspired", + "uninstaled", "uninstalled", + "uniquiness", "uniqueness", + "univercity", "university", + "univeristy", "university", + "universale", "universe", + "universaly", "universally", + "universels", "universes", + "universets", "universes", + "universite", "universities", + "universtiy", "university", + "unjustifed", "unjustified", + "unknowingy", "unknowingly", + "unknowinly", "unknowingly", + "unnecesary", "unnecessary", + "unofficail", "unofficial", + "unoffocial", "unofficial", + "unorginial", "unoriginal", + "unorignial", "unoriginal", + "unorigonal", "unoriginal", + "unplacable", "unplayable", + "unplaybale", "unplayable", + "unplayeble", "unplayable", + "unpleasent", "unpleasant", + "unpopulair", "unpopular", + "unproteced", "unprotected", + "unqiueness", "uniqueness", + "unqualifed", "unqualified", + "unrealesed", "unreleased", + "unrealible", "unreliable", + "unrealistc", "unrealistic", + "unrealitic", "unrealistic", + "unreasonal", "unreasonably", + "unrelaible", "unreliable", + "unreleated", "unreleased", + "unrelyable", "unreliable", + "unrepetant", "unrepentant", + "unrepetent", "unrepentant", + "unresponse", "unresponsive", + "unsencored", "uncensored", + "unsetlling", "unsettling", + "unsolicted", "unsolicited", + "unsubscibe", "unsubscribe", + "unsubscrbe", "unsubscribe", + "unsucesful", "unsuccessful", + "unsuprised", "unsurprised", + "unsuprized", "unsurprised", + "unviersity", "university", + "unwrittern", "unwritten", + "urkainians", "ukrainians", + "utlimately", "ultimately", + "utlrasound", "ultrasound", + "vaccinatie", "vaccinated", + "vaccineras", "vaccines", + "valentians", "valentines", + "valentiens", "valentines", + "valentimes", "valentines", + "valentinas", "valentines", + "valentinos", "valentines", + "valentones", "valentines", + "validitity", "validity", + "valnetines", "valentines", + "vandalisim", "vandalism", + "vasectomey", "vasectomy", + "vegatarian", "vegetarian", + "vegaterian", "vegetarian", + "vegeratian", "vegetarians", + "vegetairan", "vegetarians", + "vegetarain", "vegetarians", + "vegetarien", "vegetarian", + "vegetarion", "vegetarian", + "vegetatian", "vegetarian", + "vegeterian", "vegetarian", + "vegitables", "vegetables", + "vehemantly", "vehemently", + "vehemontly", "vehemently", + "veitnamese", "vietnamese", + "veiwership", "viewership", + "veiwpoints", "viewpoints", + "venezuella", "venezuela", + "verificato", "verification", + "verifyable", "verifiable", + "veritcally", "vertically", + "veritiable", "verifiable", + "vernecular", "vernacular", + "vernicular", "vernacular", + "versatiliy", "versatility", + "versatille", "versatile", + "versatilty", "versatility", + "versitlity", "versatility", + "vewiership", "viewership", + "vibratoare", "vibrator", + "vicitmized", "victimized", + "vicotrious", "victorious", + "victemized", "victimized", + "victomized", "victimized", + "victorinos", "victorious", + "victorinus", "victorious", + "victoriosa", "victorious", + "victorioso", "victorious", + "victoriuos", "victorious", + "victumized", "victimized", + "videogaems", "videogames", + "videojames", "videogames", + "vidoegames", "videogames", + "vientamese", "vietnamese", + "vietmanese", "vietnamese", + "vietnamees", "vietnamese", + "vietnamise", "vietnamese", + "viewpionts", "viewpoints", + "vigilantie", "vigilante", + "vigoruosly", "vigorously", + "vigourosly", "vigorously", + "villageois", "villages", + "vindicitve", "vindictive", + "vindictave", "vindictive", + "visibiltiy", "visibility", + "vitenamese", "vietnamese", + "vocabluary", "vocabulary", + "volatiltiy", "volatility", + "volativity", "volatility", + "volitality", "volatility", + "volleyboll", "volleyball", + "vollyeball", "volleyball", + "volonteers", "volunteers", + "volounteer", "volunteer", + "voluntairy", "voluntarily", + "voluntarly", "voluntary", + "voluntears", "volunteers", + "volunteeer", "volunteers", + "volunteerd", "volunteered", + "voluntered", "volunteered", + "vulernable", "vulnerable", + "vulnarable", "vulnerable", + "vulnerabil", "vulnerable", + "vulnurable", "vulnerable", + "vunlerable", "vulnerable", + "warrandyte", "warranty", + "warrantles", "warranties", + "warrenties", "warranties", + "washignton", "washington", + "waterlemon", "watermelon", + "watermalon", "watermelon", + "waterproff", "waterproof", + "wavelegnth", "wavelength", + "wavelenghs", "wavelength", + "wavelenght", "wavelength", + "weakensses", "weaknesses", + "weaknesess", "weaknesses", + "weathliest", "wealthiest", + "wedensdays", "wednesdays", + "wednesdsay", "wednesdays", + "wednessday", "wednesdays", + "wednsedays", "wednesdays", + "weightened", "weighted", + "welathiest", "wealthiest", + "wellignton", "wellington", + "wellingotn", "wellington", + "wendesdays", "wednesdays", + "wereabouts", "whereabouts", + "westbroook", "westbrook", + "westernese", "westerners", + "westerness", "westerners", + "westminser", "westminster", + "westminter", "westminster", + "whatosever", "whatsoever", + "whatseover", "whatsoever", + "whipsering", "whispering", + "whsipering", "whispering", + "widepsread", "widespread", + "wikileakes", "wikileaks", + "wilderniss", "wilderness", + "wildreness", "wilderness", + "willfullly", "willfully", + "winchestor", "winchester", + "windhsield", "windshield", + "windsheild", "windshield", + "windshiled", "windshield", + "wisconsion", "wisconsin", + "wishpering", "whispering", + "withdrawan", "withdrawn", + "withdrawel", "withdrawal", + "withdrawin", "withdrawn", + "withholdng", "withholding", + "withrdawal", "withdrawals", + "witnissing", "witnessing", + "wonderfull", "wonderful", + "wonderfuly", "wonderfully", + "wonderwand", "wonderland", + "worhsiping", "worshiping", + "workingest", "workings", + "workstaion", "workstation", + "workstaton", "workstation", + "worshippig", "worshipping", + "worshoping", "worshiping", + "wrestlewar", "wrestler", + "xenohpobic", "xenophobic", + "xenophibia", "xenophobia", + "xenophibic", "xenophobic", + "xenophonic", "xenophobic", + "xenophopia", "xenophobia", + "xenophopic", "xenophobic", + "xeonphobia", "xenophobia", + "xeonphobic", "xenophobic", + "yourselfes", "yourselves", + "yoursleves", "yourselves", + "zimbabwaen", "zimbabwe", + "zionistisk", "zionists", + "abandonig", "abandoning", + "abandonne", "abandonment", + "abanonded", "abandoned", + "abdomnial", "abdominal", + "abdonimal", "abdominal", + "aberation", "aberration", + "abnormaly", "abnormally", + "abodminal", "abdominal", + "abondoned", "abandoned", + "aborigene", "aborigine", + "aboslutes", "absolutes", + "abosrbing", "absorbing", + "abreviate", "abbreviate", + "abritrary", "arbitrary", + "abruptley", "abruptly", + "absailing", "abseiling", + "absloutes", "absolutes", + "absolutey", "absolutely", + "absolutly", "absolutely", + "absoultes", "absolutes", + "abstracto", "abstraction", + "absurdley", "absurdly", + "absuridty", "absurdity", + "abusrdity", "absurdity", + "academica", "academia", + "accademic", "academic", + "accalimed", "acclaimed", + "accelerar", "accelerator", + "accending", "ascending", + "accension", "accession", + "accidenty", "accidently", + "acclamied", "acclaimed", + "accliamed", "acclaimed", + "accomdate", "accommodate", + "accordeon", "accordion", + "accordian", "accordion", + "accoridng", "according", + "accountas", "accountants", + "accountat", "accountants", + "accoustic", "acoustic", + "accroding", "according", + "accuraccy", "accuracy", + "acftually", "factually", + "acheiving", "achieving", + "achieveds", "achieves", + "achillees", "achilles", + "achilleos", "achilles", + "achilleus", "achilles", + "achiveing", "achieving", + "acitvates", "activates", + "aclhemist", "alchemist", + "acomplish", "accomplish", + "acquisito", "acquisition", + "acronymes", "acronyms", + "acronymns", "acronyms", + "acsending", "ascending", + "acsension", "ascension", + "activaste", "activates", + "activatin", "activation", + "activelly", "actively", + "activisim", "activism", + "activisit", "activist", + "activites", "activities", + "actresess", "actresses", + "acusation", "causation", + "acutality", "actuality", + "adavanced", "advanced", + "adbominal", "abdominal", + "additonal", "additional", + "addoptive", "adoptive", + "addresing", "addressing", + "addtional", "additional", + "adhearing", "adhering", + "adherance", "adherence", + "adjectivs", "adjectives", + "adjustabe", "adjustable", + "administr", "administer", + "admitedly", "admittedly", + "adolecent", "adolescent", + "adovcated", "advocated", + "adovcates", "advocates", + "adquiring", "acquiring", + "adresable", "addressable", + "adressing", "addressing", + "aduiobook", "audiobook", + "advatange", "advantage", + "adventurs", "adventures", + "adveristy", "adversity", + "advertisy", "adversity", + "advisorys", "advisors", + "aeorspace", "aerospace", + "aeropsace", "aerospace", + "aerosapce", "aerospace", + "aersopace", "aerospace", + "aestethic", "aesthetic", + "aethistic", "atheistic", + "affiliato", "affiliation", + "affinitiy", "affinity", + "affirmate", "affirmative", + "affliated", "affiliated", + "africanas", "africans", + "africanos", "africans", + "aggegrate", "aggregate", + "aggresive", "aggressive", + "agnosticm", "agnosticism", + "agregates", "aggregates", + "agreggate", "aggregate", + "agrentina", "argentina", + "agression", "aggression", + "agressive", "aggressive", + "agressvie", "agressive", + "agruement", "arguement", + "agruments", "arguments", + "agurement", "arguement", + "ailenated", "alienated", + "airbourne", "airborne", + "aircrafts", "aircraft", + "airplance", "airplane", + "airrcraft", "aircraft", + "aksreddit", "askreddit", + "alcehmist", "alchemist", + "alchemsit", "alchemist", + "alchimest", "alchemist", + "alchmeist", "alchemist", + "alchoolic", "alcoholic", + "alcoholis", "alcoholics", + "alechmist", "alchemist", + "alegience", "allegiance", + "aleinated", "alienated", + "algoriths", "algorithms", + "algoritms", "algorithms", + "algorthim", "algorithm", + "algortihm", "algorithm", + "alignemnt", "alignment", + "alimunium", "aluminium", + "alingment", "alignment", + "allainces", "alliances", + "alledgely", "allegedly", + "allegence", "allegiance", + "alleivate", "alleviate", + "allievate", "alleviate", + "alliviate", "alleviate", + "allopones", "allophones", + "allthough", "although", + "almightly", "almighty", + "alocholic", "alcoholic", + "alogrithm", "algorithm", + "alphabeat", "alphabet", + "alrightey", "alrighty", + "alrightly", "alrighty", + "alrightty", "alrighty", + "alrington", "arlington", + "alrorythm", "algorithm", + "alterante", "alternate", + "alternatr", "alternator", + "althetics", "athletics", + "althought", "although", + "altruisim", "altruism", + "amateures", "amateurs", + "ambluance", "ambulance", + "ambuigity", "ambiguity", + "amendmant", "amendment", + "amercians", "americans", + "americain", "american", + "americams", "americas", + "americaps", "americas", + "americats", "americas", + "amibguity", "ambiguity", + "aminosity", "animosity", + "amrstrong", "armstrong", + "amublance", "ambulance", + "amunition", "ammunition", + "anachrist", "anarchist", + "analagous", "analogous", + "analitycs", "analytics", + "analtyics", "analytics", + "analyitcs", "analytics", + "analyseas", "analyses", + "analysees", "analyses", + "analysens", "analyses", + "analysise", "analyses", + "analystes", "analysts", + "analzying", "analyzing", + "anarchsim", "anarchism", + "anayltics", "analytics", + "anaylzing", "analyzing", + "ancedotal", "anecdotal", + "ancedotes", "anecdotes", + "ancestory", "ancestry", + "androgeny", "androgyny", + "androides", "androids", + "androidos", "androids", + "anecdotle", "anecdote", + "anecodtal", "anecdotal", + "anecodtes", "anecdotes", + "anectodal", "anecdotal", + "anectodes", "anecdotes", + "anedoctal", "anecdotal", + "anedoctes", "anecdotes", + "animostiy", "animosity", + "anitvirus", "antivirus", + "anlaytics", "analytics", + "anniversy", "anniversary", + "annointed", "anointed", + "annoucnes", "announces", + "annoyingy", "annoyingly", + "annoymous", "anonymous", + "annoynace", "annoyance", + "annyoance", "annoyance", + "anomisity", "animosity", + "anomolies", "anomalies", + "anomolous", "anomalous", + "anomynity", "anonymity", + "anomynous", "anonymous", + "anonimity", "anonymity", + "anonmyous", "anonymous", + "anonymoys", "anonymously", + "anorexiac", "anorexic", + "anorexica", "anorexia", + "anrachist", "anarchist", + "ansestors", "ancestors", + "antarctia", "antarctica", + "antennaes", "antennas", + "antiviurs", "antivirus", + "antivrius", "antivirus", + "antivuris", "antivirus", + "anwsering", "answering", + "anynomity", "anonymity", + "anynomous", "anonymous", + "aparthide", "apartheid", + "aparthied", "apartheid", + "apartmens", "apartments", + "apocalype", "apocalypse", + "apostrope", "apostrophe", + "apparenty", "apparently", + "appearane", "appearances", + "appenines", "apennines", + "apperance", "appearance", + "appetitie", "appetite", + "applaudes", "applause", + "applicato", "application", + "appreciae", "appreciates", + "apprentie", "apprentice", + "approachs", "approaches", + "apratheid", "apartheid", + "apsaragus", "asparagus", + "apsergers", "aspergers", + "aquainted", "acquainted", + "arbirtary", "arbitrary", + "arbritary", "arbitrary", + "arcehtype", "archetype", + "archetect", "architect", + "archetpye", "archetype", + "archetyps", "archetypes", + "architecs", "architects", + "archtypes", "archetypes", + "aregument", "arguement", + "areospace", "aerospace", + "argessive", "agressive", + "argeument", "arguement", + "arguabley", "arguably", + "arguablly", "arguably", + "arguement", "argument", + "arguemnet", "arguement", + "arguemnts", "arguments", + "argumeent", "arguement", + "arhtritis", "arthritis", + "aribtrary", "arbitrary", + "ariplanes", "airplanes", + "aristolte", "aristotle", + "aristotel", "aristotle", + "aritfacts", "artifacts", + "arlignton", "arlington", + "arlingotn", "arlington", + "armistace", "armistice", + "armstorng", "armstrong", + "arpatheid", "apartheid", + "arthirtis", "arthritis", + "artifcats", "artifacts", + "artifical", "artificial", + "artillary", "artillery", + "arugement", "arguement", + "arugments", "arguments", + "asapragus", "asparagus", + "asbestoes", "asbestos", + "asborbing", "absorbing", + "asburdity", "absurdity", + "ascendend", "ascended", + "ascneding", "ascending", + "ascnesion", "ascension", + "asethetic", "aesthetic", + "asnwering", "answering", + "asociated", "associated", + "assasined", "assassinated", + "assassian", "assassin", + "assassine", "assassinate", + "assasssin", "assassins", + "assaultes", "assaults", + "assembeld", "assembled", + "assembley", "assembly", + "assemblie", "assemble", + "assisnate", "assassinate", + "assistans", "assistants", + "assistsnt", "assistants", + "assmebled", "assembled", + "associato", "association", + "assoicate", "associate", + "asssasins", "assassins", + "assualted", "assaulted", + "assulated", "assaulted", + "asteorids", "asteroids", + "astericks", "asterisk", + "asteriods", "asteroids", + "astroanut", "astronaut", + "astronuat", "astronaut", + "astrounat", "astronaut", + "asuterity", "austerity", + "atempting", "attempting", + "atheltics", "athletics", + "atheneans", "athenians", + "athesitic", "atheistic", + "athetlics", "athletics", + "athiestic", "atheistic", + "athleticm", "athleticism", + "atmosphir", "atmospheric", + "atributed", "attributed", + "atributes", "attributes", + "atrifacts", "artifacts", + "atrillery", "artillery", + "atrittion", "attrition", + "attachmet", "attachments", + "attaindre", "attainder", + "attemting", "attempting", + "attemtped", "attempted", + "attendent", "attendant", + "attension", "attention", + "attirbute", "attribute", + "attirtion", "attrition", + "attmepted", "attempted", + "attractes", "attracts", + "attractin", "attraction", + "attributo", "attribution", + "attributs", "attributes", + "attritube", "attribute", + "auctionrs", "auctions", + "auidobook", "audiobook", + "auromated", "automated", + "australin", "australians", + "authroity", "authority", + "autoattak", "autoattack", + "autogrpah", "autograph", + "autonomos", "autonomous", + "auxillary", "auxiliary", + "avaialble", "available", + "availible", "available", + "avalaible", "available", + "avaliable", "available", + "averageed", "averaged", + "avialable", "available", + "awakenend", "awakened", + "awesomley", "awesomely", + "awkawrdly", "awkwardly", + "awnsering", "answering", + "bacehlors", "bachelors", + "bachelour", "bachelor", + "bachleors", "bachelors", + "bacholers", "bachelors", + "backdooor", "backdoor", + "backfeild", "backfield", + "backfiled", "backfield", + "backgroud", "background", + "backpakcs", "backpacks", + "badnwagon", "bandwagon", + "badnwidth", "bandwidth", + "balckjack", "blackjack", + "balcklist", "blacklist", + "balitmore", "baltimore", + "ballisitc", "ballistic", + "ballsitic", "ballistic", + "balsphemy", "blasphemy", + "bandiwdth", "bandwidth", + "bandwdith", "bandwidth", + "bandwidht", "bandwidth", + "bandwitdh", "bandwidth", + "bankrupcy", "bankruptcy", + "bankrupty", "bankruptcy", + "banruptcy", "bankruptcy", + "baordwalk", "boardwalk", + "barabrian", "barbarian", + "barbarain", "barbarian", + "barbarina", "barbarian", + "barcelets", "bracelets", + "barcleona", "barcelona", + "bareclona", "barcelona", + "barrackus", "barracks", + "bascially", "basically", + "bastardes", "bastards", + "bastardos", "bastards", + "bastardus", "bastards", + "bathrooom", "bathroom", + "batlimore", "baltimore", + "battailon", "battalion", + "battlaion", "battalion", + "beahviour", "behaviour", + "beauitful", "beautiful", + "beautifyl", "beautifully", + "becnhmark", "benchmark", + "becomeing", "becoming", + "becomming", "becoming", + "beehtoven", "beethoven", + "begginers", "beginners", + "beggining", "beginning", + "begininng", "beginning", + "beginnins", "beginnings", + "behaivors", "behaviors", + "behaivour", "behaviour", + "behavoirs", "behaviors", + "behavoiur", "behaviour", + "behvaiour", "behaviour", + "beleiving", "believing", + "beliveing", "believing", + "belssings", "blessings", + "bemusemnt", "bemusement", + "benchamrk", "benchmark", + "benchmars", "benchmarks", + "benedicat", "benedict", + "benedickt", "benedict", + "benghazhi", "benghazi", + "benghazzi", "benghazi", + "bergamont", "bergamot", + "berkelely", "berkeley", + "bersekrer", "berserker", + "berskerer", "berserker", + "beseiging", "besieging", + "bestialiy", "bestiality", + "beuatiful", "beautiful", + "biginning", "beginning", + "bigrading", "brigading", + "billbaord", "billboard", + "billboars", "billboards", + "binominal", "binomial", + "birgading", "brigading", + "birghtest", "brightest", + "birhtdays", "birthdays", + "bitcoints", "bitcoins", + "blackbery", "blackberry", + "blackhaws", "blackhawks", + "blackshit", "blacksmith", + "blanketts", "blankets", + "blapshemy", "blasphemy", + "blashpemy", "blasphemy", + "blaspehmy", "blasphemy", + "blasphmey", "blasphemy", + "blatanlty", "blatantly", + "blatimore", "baltimore", + "bleuberry", "blueberry", + "bleutooth", "bluetooth", + "blisteres", "blisters", + "blizzcoin", "blizzcon", + "blockchan", "blockchain", + "blockeras", "blockers", + "bloodbore", "bloodborne", + "boardband", "broadband", + "boardcast", "broadcast", + "bodyweigt", "bodyweight", + "bookamrks", "bookmarks", + "bookmakrs", "bookmarks", + "bookmarkd", "bookmarked", + "boradband", "broadband", + "boradcast", "broadcast", + "boradwalk", "boardwalk", + "bouregois", "bourgeois", + "bourgeios", "bourgeois", + "bourgoeis", "bourgeois", + "boyfirend", "boyfriend", + "boyfreind", "boyfriend", + "boyfriens", "boyfriends", + "brabarian", "barbarian", + "bracelona", "barcelona", + "braodband", "broadband", + "braodcast", "broadcast", + "brazilias", "brazilians", + "breakdows", "breakdowns", + "breserker", "berserker", + "bretheren", "brethren", + "bridaging", "brigading", + "brightern", "brighten", + "brigthest", "brightest", + "brilliany", "brilliantly", + "brithdays", "birthdays", + "broadwalk", "boardwalk", + "bruiseres", "bruisers", + "brunettte", "brunette", + "brusseles", "brussels", + "brussells", "brussels", + "brutailty", "brutality", + "brutallly", "brutally", + "buddhisim", "buddhism", + "buddihsts", "buddhists", + "buddishts", "buddhists", + "buhddists", "buddhists", + "buidlings", "buildings", + "bulidings", "buildings", + "burgunday", "burgundy", + "burgundry", "burgundy", + "burritoes", "burritos", + "burtality", "brutality", + "busineses", "business", + "businessa", "businessman", + "businesse", "businessmen", + "businesss", "businesses", + "bussiness", "business", + "buthcered", "butchered", + "butterlfy", "butterfly", + "cacausian", "caucasian", + "caclulate", "calculate", + "cacuasian", "caucasian", + "caculater", "calculator", + "cafeteira", "cafeteria", + "cafetiera", "cafeteria", + "caffeinne", "caffeine", + "calcualte", "calculate", + "californa", "california", + "caluclate", "calculate", + "calulated", "calculated", + "calulater", "calculator", + "cambirdge", "cambridge", + "cambrdige", "cambridge", + "cambrigde", "cambridge", + "camoflage", "camouflage", + "campagins", "campaigns", + "campaings", "campaigns", + "campiagns", "campaigns", + "campusers", "campuses", + "camrbidge", "cambridge", + "canadains", "canadians", + "candadate", "candidate", + "candidats", "candidates", + "cannister", "canister", + "cannoical", "canonical", + "canoncial", "canonical", + "capactior", "capacitor", + "capicator", "capacitor", + "capitalis", "capitals", + "caprenter", "carpenter", + "capsulers", "capsules", + "capsulets", "capsules", + "carachter", "character", + "cardbaord", "cardboard", + "cardborad", "cardboard", + "cardianls", "cardinals", + "cardnials", "cardinals", + "caridnals", "cardinals", + "carmalite", "carmelite", + "carnberry", "cranberry", + "carolinia", "carolina", + "carpetner", "carpenter", + "carptener", "carpenter", + "carribean", "caribbean", + "cartdrige", "cartridge", + "cartilege", "cartilage", + "cartirdge", "cartridge", + "cartrdige", "cartridge", + "cartrigde", "cartridge", + "casaulity", "causality", + "cashieres", "cashiers", + "cassawory", "cassowary", + "cassettte", "cassette", + "casuation", "causation", + "cataclsym", "cataclysm", + "cataclyms", "cataclysm", + "catacylsm", "cataclysm", + "catacyslm", "cataclysm", + "catalcysm", "cataclysm", + "catalgoue", "catalogue", + "cathderal", "cathedral", + "catherdal", "cathedral", + "cathloics", "catholics", + "cathredal", "cathedral", + "caucaisan", "caucasian", + "caucasain", "caucasian", + "causacian", "caucasian", + "causailty", "causality", + "celebirty", "celebrity", + "celebrato", "celebration", + "celebrite", "celebrities", + "celesital", "celestial", + "celestail", "celestial", + "cementary", "cemetery", + "cemetarey", "cemetery", + "cenitpede", "centipede", + "centepide", "centipede", + "centipeed", "centipede", + "centruies", "centuries", + "centuties", "centuries", + "cerebrawl", "cerebral", + "certanity", "certainty", + "certianty", "certainty", + "cesspoool", "cesspool", + "chairmain", "chairman", + "challange", "challenge", + "challengr", "challenger", + "challengs", "challenges", + "chameloen", "chameleon", + "champagen", "champagne", + "champange", "champagne", + "chandlure", "chandler", + "changable", "changeable", + "charactor", "character", + "chatedral", "cathedral", + "chatolics", "catholics", + "checkmeat", "checkmate", + "checkpoit", "checkpoints", + "chekcmate", "checkmate", + "chemestry", "chemistry", + "chemicaly", "chemically", + "chemsitry", "chemistry", + "chernboyl", "chernobyl", + "chernobly", "chernobyl", + "chernoybl", "chernobyl", + "chernyobl", "chernobyl", + "cheronbyl", "chernobyl", + "chidlfree", "childfree", + "chidlrens", "childrens", + "chihauhua", "chihuahua", + "chihuahau", "chihuahua", + "childbird", "childbirth", + "childerns", "childrens", + "childisch", "childish", + "childresn", "childrens", + "chirstian", "christian", + "chirstmas", "christmas", + "chiuhahua", "chihuahua", + "chlidfree", "childfree", + "chlidrens", "childrens", + "chocloate", "chocolate", + "chocoalte", "chocolate", + "chocolats", "chocolates", + "chocolste", "chocolates", + "cholocate", "chocolate", + "chrenobyl", "chernobyl", + "chrisitan", "christian", + "christain", "christian", + "christams", "christmas", + "chrsitian", "christian", + "chrsitmas", "christmas", + "churchers", "churches", + "cigaretts", "cigarettes", + "cigeratte", "cigarette", + "cilivians", "civilians", + "cilpboard", "clipboard", + "cilynders", "cylinders", + "circuitos", "circuits", + "ciriculum", "curriculum", + "cirticise", "criticise", + "civilains", "civilians", + "civillian", "civilian", + "classicos", "classics", + "classicus", "classics", + "classifiy", "classify", + "cleanisng", "cleansing", + "cleasning", "cleansing", + "clikcbait", "clickbait", + "clinicaly", "clinically", + "clipbaord", "clipboard", + "clitories", "clitoris", + "clitorios", "clitoris", + "clitorius", "clitoris", + "clucthing", "clutching", + "clutchign", "clutching", + "cluthcing", "clutching", + "coca cola", "coca-cola", + "cockatils", "cocktails", + "cocktials", "cocktails", + "cognizent", "cognizant", + "colateral", "collateral", + "collabore", "collaborate", + "collasped", "collapsed", + "collaspes", "collapses", + "colleauge", "colleague", + "collectes", "collects", + "collectie", "collective", + "collecton", "collection", + "collectos", "collectors", + "collegaue", "colleague", + "collegues", "colleagues", + "collisson", "collisions", + "collonade", "colonnade", + "collonies", "colonies", + "collpased", "collapsed", + "collpases", "collapses", + "colombina", "colombia", + "columbina", "columbia", + "comapnies", "companies", + "combatans", "combatants", + "combinato", "combination", + "combusion", "combustion", + "comestics", "cosmetics", + "comisions", "commissions", + "comission", "commission", + "comitting", "committing", + "commandes", "commands", + "commentar", "commentator", + "commentes", "commenters", + "commercie", "commerce", + "commision", "commission", + "commiteed", "commited", + "commiting", "committing", + "commitmet", "commitments", + "commments", "comments", + "commongly", "commonly", + "communiss", "communists", + "communite", "communities", + "communits", "communist", + "communsim", "communism", + "compaines", "companies", + "compalins", "complains", + "compalint", "compliant", + "comparisn", "comparisons", + "compeltes", "completes", + "competant", "competent", + "competend", "competed", + "competion", "competition", + "competive", "competitive", + "compilant", "compliant", + "compilare", "compiler", + "compilato", "compilation", + "compitent", "competent", + "complaind", "complained", + "complaing", "complaining", + "completen", "complement", + "completey", "completely", + "completin", "completion", + "complians", "complains", + "componant", "component", + "comprable", "comparable", + "compresas", "compress", + "compreses", "compress", + "compteurs", "computers", + "comptuers", "computers", + "computato", "computation", + "comradets", "comrades", + "comsetics", "cosmetics", + "conanical", "canonical", + "conatiner", "container", + "concelaed", "concealed", + "concelaer", "concealer", + "concelear", "concealer", + "concensus", "consensus", + "conceptos", "concepts", + "conceptul", "conceptual", + "concernig", "concerning", + "concertas", "concerts", + "concevied", "conceived", + "conciders", "considers", + "concieted", "conceited", + "concieved", "conceived", + "conclusie", "conclusive", + "concsious", "conscious", + "concurret", "concurrent", + "condamned", "condemned", + "condemend", "condemned", + "condemmed", "condemned", + "condemnig", "condemning", + "condenmed", "condemned", + "condesend", "condensed", + "condesned", "condensed", + "condmened", "condemned", + "conection", "connection", + "conenctor", "connector", + "conferene", "conferences", + "confessin", "confession", + "confideny", "confidently", + "confilcts", "conflicts", + "confimred", "confirmed", + "confirmas", "confirms", + "conflcits", "conflicts", + "confrimed", "confirmed", + "congitive", "cognitive", + "conlcuded", "concluded", + "connectes", "connects", + "connectit", "connecticut", + "connectos", "connectors", + "conquerer", "conqueror", + "consdider", "consider", + "consensul", "consensual", + "conserned", "concerned", + "consicous", "conscious", + "considerd", "considered", + "considert", "considerate", + "consisent", "consistent", + "consistes", "consists", + "consolato", "consolation", + "consolide", "consolidate", + "consonent", "consonant", + "constanly", "constantly", + "constanst", "constants", + "constanty", "constantly", + "constasnt", "constants", + "constitue", "constitutes", + "constrait", "constraints", + "construcs", "constructs", + "construde", "construed", + "construst", "constructs", + "constucts", "constructs", + "constured", "construed", + "consulant", "consultant", + "consultat", "consultant", + "consumate", "consummate", + "contactes", "contacts", + "contactos", "contacts", + "contagios", "contagious", + "containes", "contains", + "containig", "containing", + "containts", "contains", + "contemple", "contemplate", + "contendor", "contender", + "contentas", "contents", + "contentes", "contents", + "contentos", "contents", + "contestas", "contests", + "contestat", "contestants", + "contestes", "contests", + "contextes", "contexts", + "contextos", "contexts", + "contianer", "container", + "contibute", "contribute", + "contigent", "contingent", + "continant", "continental", + "continens", "continents", + "continous", "continuous", + "continuos", "continuous", + "continute", "continue", + "contiunal", "continual", + "contracto", "contraction", + "contribue", "contribute", + "contribuo", "contributor", + "controlas", "controls", + "controled", "controlled", + "controles", "controls", + "controlls", "controls", + "convenant", "covenant", + "convencen", "convenience", + "conveniet", "convenient", + "conversie", "converse", + "conversin", "conversions", + "convertie", "convertible", + "convertis", "converts", + "cooldwons", "cooldowns", + "coordinar", "coordinator", + "copenhagn", "copenhagen", + "coprorate", "corporate", + "copywrite", "copyright", + "corcodile", "crocodile", + "corparate", "corporate", + "corproate", "corporate", + "correclty", "correctly", + "correctin", "correction", + "correlato", "correlation", + "corridoor", "corridor", + "corruptin", "corruption", + "corssfire", "crossfire", + "corsshair", "crosshair", + "corsspost", "crosspost", + "coruching", "crouching", + "cosemtics", "cosmetics", + "costumise", "costumes", + "counciles", "councils", + "councills", "councils", + "councilos", "councils", + "countains", "contains", + "counteres", "counters", + "countires", "countries", + "courching", "crouching", + "courtesey", "courtesy", + "courtesty", "courtesy", + "coururier", "courier", + "coutnered", "countered", + "crapenter", "carpenter", + "creativey", "creatively", + "creedence", "credence", + "crhistmas", "christmas", + "cricketts", "crickets", + "criminaly", "criminally", + "critereon", "criterion", + "criterias", "criteria", + "criticaly", "critically", + "criticies", "criticise", + "criticisn", "criticising", + "critisice", "criticise", + "critisicm", "criticism", + "critising", "criticising", + "critisism", "criticism", + "critisize", "criticise", + "critizing", "criticizing", + "crosshiar", "crosshair", + "crossifre", "crossfire", + "crticised", "criticised", + "crusdaers", "crusaders", + "crutchers", "crutches", + "crystalls", "crystals", + "crystalus", "crystals", + "crystalys", "crystals", + "cuacasian", "caucasian", + "cuasality", "causality", + "culitvate", "cultivate", + "culturaly", "culturally", + "culturels", "cultures", + "curiostiy", "curiosity", + "curisoity", "curiosity", + "currenlty", "currently", + "curriculm", "curriculum", + "cursaders", "crusaders", + "custcenes", "cutscenes", + "cutsceens", "cutscenes", + "cutscence", "cutscene", + "cutsences", "cutscenes", + "cyclinder", "cylinder", + "cyclistes", "cyclists", + "cylindres", "cylinders", + "cynicisim", "cynicism", + "dahsboard", "dashboard", + "dalmation", "dalmatian", + "dangeroys", "dangerously", + "dashbaord", "dashboard", + "daugthers", "daughters", + "davantage", "advantage", + "deadlfits", "deadlifts", + "deadpoool", "deadpool", + "dealershp", "dealerships", + "deathmath", "deathmatch", + "decalring", "declaring", + "decendant", "descendant", + "decendent", "descendant", + "decipting", "depicting", + "deciption", "depiction", + "decisivie", "decisive", + "declarase", "declares", + "declarees", "declares", + "decoratie", "decorative", + "decoratin", "decorations", + "decpetion", "deception", + "decpetive", "deceptive", + "decribing", "describing", + "decsended", "descended", + "deductibe", "deductible", + "defaintly", "defiantly", + "defaltion", "deflation", + "defanitly", "defiantly", + "defeintly", "definetly", + "defendent", "defendant", + "defensese", "defenseless", + "defianlty", "defiantly", + "deficeint", "deficient", + "deficieny", "deficiency", + "deficites", "deficits", + "definance", "defiance", + "definatey", "definately", + "definatly", "definitely", + "definetly", "definitely", + "definetyl", "definetly", + "definilty", "definitly", + "definitie", "definitive", + "definitin", "definitions", + "definitly", "definitely", + "definiton", "definition", + "definitve", "definite", + "definityl", "definitly", + "definltey", "definetly", + "defintaly", "defiantly", + "defintily", "definitly", + "defintion", "definition", + "defintley", "definetly", + "defitenly", "definetly", + "defitinly", "definitly", + "defitnaly", "defiantly", + "defitnely", "definetly", + "deflectin", "deflection", + "defnietly", "definetly", + "degeneret", "degenerate", + "degradato", "degradation", + "degradead", "degraded", + "degrassie", "degrasse", + "degrassse", "degrasse", + "deifnetly", "definetly", + "deifnitly", "definitly", + "deisgners", "designers", + "delagates", "delegates", + "delcaring", "declaring", + "delcining", "declining", + "delegatie", "delegate", + "delerious", "delirious", + "deleteing", "deleting", + "delfation", "deflation", + "deliveres", "delivers", + "deliverys", "delivers", + "delpoying", "deploying", + "demcorats", "democrats", + "deminsion", "dimension", + "democarcy", "democracy", + "democract", "democrat", + "demonstre", "demonstrate", + "denominar", "denominator", + "dentistas", "dentists", + "dentistes", "dentists", + "deomcrats", "democrats", + "deopsited", "deposited", + "deparment", "department", + "departmet", "departments", + "depciting", "depicting", + "depcition", "depiction", + "depection", "deception", + "depedency", "dependency", + "depicitng", "depicting", + "depiciton", "depiction", + "deplyoing", "deploying", + "depoisted", "deposited", + "depolying", "deploying", + "depositas", "deposits", + "deposites", "deposits", + "depositis", "deposits", + "depositos", "deposits", + "depostied", "deposited", + "depressie", "depressive", + "depressin", "depression", + "depserate", "desperate", + "depsoited", "deposited", + "descirbes", "describes", + "descision", "decision", + "desginers", "designers", + "desgining", "designing", + "desicions", "decisions", + "designade", "designated", + "designato", "designation", + "desingage", "disengage", + "desingers", "designers", + "desinging", "designing", + "desktopos", "desktops", + "desparate", "desperate", + "desperato", "desperation", + "despoited", "deposited", + "desriable", "desirable", + "dessigned", "designed", + "destinato", "destination", + "destoryed", "destroyed", + "destoryer", "destroyer", + "destroyes", "destroys", + "destructo", "destruction", + "destryoed", "destroyed", + "destryoer", "destroyer", + "desuction", "seduction", + "detailled", "detailed", + "detatched", "detached", + "detectivs", "detectives", + "deteriate", "deteriorate", + "determing", "determining", + "determins", "determines", + "developrs", "develops", + "diabetees", "diabetes", + "diablical", "diabolical", + "diagonaal", "diagonal", + "diagonsed", "diagnosed", + "diagonsis", "diagnosis", + "diagramas", "diagrams", + "diagramms", "diagrams", + "dialectes", "dialects", + "dialectos", "dialects", + "diarrheoa", "diarrhea", + "diasbling", "disabling", + "dichomoty", "dichotomy", + "dicovered", "discovered", + "dictaters", "dictates", + "dictionay", "dictionary", + "difenitly", "definitly", + "diferrent", "different", + "differene", "differences", + "differens", "differences", + "differeny", "differently", + "difficuly", "difficulty", + "diffucult", "difficult", + "dificulty", "difficulty", + "diganosed", "diagnosed", + "diganosis", "diagnosis", + "dimenions", "dimensions", + "dimention", "dimension", + "dimesnion", "dimension", + "diminishs", "diminishes", + "dinasours", "dinosaurs", + "dinosuars", "dinosaurs", + "dinsoaurs", "dinosaurs", + "dionsaurs", "dinosaurs", + "diphtongs", "diphthongs", + "dipthongs", "diphthongs", + "direcotry", "directory", + "directoty", "directory", + "directroy", "directory", + "disapears", "disappears", + "disaprity", "disparity", + "disastros", "disastrous", + "disatrous", "disastrous", + "disbaling", "disabling", + "disbeleif", "disbelief", + "disbelife", "disbelief", + "disciplen", "disciplines", + "disclamer", "disclaimer", + "disclosue", "disclosure", + "disconnet", "disconnect", + "discosure", "discourse", + "discoverd", "discovered", + "discovere", "discoveries", + "discredid", "discredited", + "discribed", "described", + "discribes", "describes", + "discussin", "discussion", + "diserable", "desirable", + "disgarees", "disagrees", + "disgiused", "disguised", + "disgusied", "disguised", + "disgustes", "disgusts", + "disgustos", "disgusts", + "disgustus", "disgusts", + "dishonesy", "dishonesty", + "dishonord", "dishonored", + "disicples", "disciples", + "dismantel", "dismantle", + "dismisals", "dismissal", + "disnegage", "disengage", + "dispairty", "disparity", + "dispalyed", "displayed", + "dispartiy", "disparity", + "dispenced", "dispensed", + "dispeners", "dispenser", + "displayes", "displays", + "disruptin", "disruption", + "dissapear", "disappear", + "dissarray", "disarray", + "dissmisal", "dismissal", + "disspiate", "dissipate", + "distincte", "distinctive", + "distrcits", "districts", + "distribue", "distributed", + "distrubed", "disturbed", + "distrupts", "distrust", + "disturben", "disturbance", + "diverisfy", "diversify", + "diveristy", "diversity", + "diverstiy", "diversity", + "dividened", "dividend", + "divinitiy", "divinity", + "doccument", "document", + "docrtines", "doctrines", + "docuhebag", "douchebag", + "dogdammit", "goddammit", + "dogfather", "godfather", + "dolphines", "dolphins", + "domecracy", "democracy", + "domecrats", "democrats", + "domiantes", "dominates", + "dominatin", "domination", + "dominaton", "domination", + "dominiant", "dominant", + "donwgrade", "downgrade", + "donwloads", "downloads", + "donwsides", "downsides", + "donwvoted", "downvoted", + "donwvotes", "downvotes", + "doublelit", "doublelift", + "doucehbag", "douchebag", + "downgarde", "downgrade", + "downlaods", "downloads", + "downloaad", "download", + "downovted", "downvoted", + "dravadian", "dravidian", + "drummless", "drumless", + "dsyphoria", "dysphoria", + "dsytopian", "dystopian", + "duaghters", "daughters", + "duplicats", "duplicates", + "durabiliy", "durability", + "dynamicus", "dynamics", + "dypshoria", "dysphoria", + "dyshporia", "dysphoria", + "dysoptian", "dystopian", + "dysphoira", "dysphoria", + "dysphroia", "dysphoria", + "dyspohria", "dysphoria", + "dyspotian", "dystopian", + "dystopain", "dystopian", + "dystpoian", "dystopian", + "eachohter", "eachother", + "eachotehr", "eachother", + "eachtoher", "eachother", + "earpluggs", "earplugs", + "earthboud", "earthbound", + "eastwoood", "eastwood", + "eastwoord", "eastwood", + "ecclectic", "eclectic", + "ecomonics", "economics", + "edficient", "deficient", + "effecient", "efficient", + "efficeint", "efficient", + "efficency", "efficiency", + "efficieny", "efficiency", + "effulence", "effluence", + "egalitara", "egalitarian", + "egpytians", "egyptians", + "egyptains", "egyptians", + "egytpians", "egyptians", + "ehtically", "ethically", + "ehtnicity", "ethnicity", + "eighteeen", "eighteen", + "eitquette", "etiquette", + "ejacualte", "ejaculate", + "electivre", "elective", + "electorns", "electrons", + "electrial", "electrical", + "electricy", "electricity", + "electroal", "electoral", + "elementay", "elementary", + "elepahnts", "elephants", + "eliminase", "eliminates", + "eliminato", "elimination", + "ellignton", "ellington", + "ellingotn", "ellington", + "eloquenty", "eloquently", + "elsehwere", "elsewhere", + "emapthize", "empathize", + "embarress", "embarrassed", + "emmisarry", "emissary", + "emmisions", "emissions", + "emmitting", "emitting", + "empahsize", "emphasize", + "emperical", "empirical", + "emphaised", "emphasised", + "emphatize", "empathize", + "emphazise", "emphasize", + "emphysyma", "emphysema", + "empitness", "emptiness", + "employeer", "employer", + "employeur", "employer", + "empolyees", "employees", + "emtpiness", "emptiness", + "emualtion", "emulation", + "enahncing", "enhancing", + "enchantig", "enchanting", + "enclousre", "enclosure", + "enclsoure", "enclosure", + "encolsure", "enclosure", + "encompase", "encompass", + "enconding", "encoding", + "encounted", "encountered", + "encrpyted", "encrypted", + "encrytped", "encrypted", + "encyrpted", "encrypted", + "endangerd", "endangered", + "enevlopes", "envelopes", + "enforcees", "enforces", + "engagemet", "engagements", + "engagment", "engagement", + "engieneer", "engineer", + "engineeer", "engineer", + "engineerd", "engineered", + "enhacning", "enhancing", + "enhanceds", "enhances", + "enligthen", "enlighten", + "enourmous", "enormous", + "ensconsed", "ensconced", + "enthicity", "ethnicity", + "enthusiam", "enthusiasm", + "enthusiat", "enthusiast", + "entirelly", "entirely", + "entitlied", "entitled", + "enveloppe", "envelope", + "epidsodes", "episodes", + "epilepsey", "epilepsy", + "epiphanny", "epiphany", + "episonage", "espionage", + "epscially", "specially", + "epsionage", "espionage", + "eqautions", "equations", + "equialent", "equivalent", + "equivalet", "equivalents", + "ermington", "remington", + "erroenous", "erroneous", + "escalatie", "escalate", + "escalatin", "escalation", + "esitmated", "estimated", + "esitmates", "estimates", + "eslewhere", "elsewhere", + "especialy", "especially", + "espianoge", "espionage", + "espinoage", "espionage", + "espoinage", "espionage", + "esponiage", "espionage", + "espressso", "espresso", + "essencial", "essential", + "essentail", "essential", + "essentias", "essentials", + "essentual", "essential", + "essesital", "essential", + "estiamted", "estimated", + "estiamtes", "estimates", + "estimatin", "estimation", + "ethcially", "ethically", + "ethincity", "ethnicity", + "ethnicaly", "ethnically", + "ethniticy", "ethnicity", + "etmyology", "etymology", + "euclidian", "euclidean", + "euorpeans", "europeans", + "euphoriac", "euphoric", + "euphorica", "euphoria", + "europenas", "europeans", + "europians", "europeans", + "eurpoeans", "europeans", + "evangelia", "evangelical", + "evelation", "elevation", + "evenlopes", "envelopes", + "eventally", "eventually", + "eventualy", "eventually", + "everthing", "everything", + "evertyime", "everytime", + "everwhere", "everywhere", + "everyoens", "everyones", + "everyteim", "everytime", + "everytiem", "everytime", + "everyting", "everything", + "eveyrones", "everyones", + "evreyones", "everyones", + "evreytime", "everytime", + "exagerate", "exaggerate", + "exahusted", "exhausted", + "exapnsive", "expansive", + "exauhsted", "exhausted", + "excahnges", "exchanges", + "excecuted", "executed", + "excecutes", "executes", + "excellant", "excellent", + "excercise", "exercise", + "excerised", "exercised", + "excerises", "exercises", + "exceuting", "executing", + "exchnages", "exchanges", + "exclsuive", "exclusive", + "excludeds", "excludes", + "exclusivs", "exclusives", + "exclusivy", "exclusivity", + "excpetion", "exception", + "exculding", "excluding", + "exculsion", "exclusion", + "exculsive", "exclusive", + "execising", "exercising", + "execption", "exception", + "exectuing", "executing", + "exectuion", "execution", + "exectuive", "executive", + "executabe", "executable", + "exepmtion", "exemption", + "exerbated", "exacerbated", + "exercices", "exercise", + "exerciese", "exercises", + "exercizes", "exercise", + "exersices", "exercises", + "exhasuted", "exhausted", + "exhaustin", "exhaustion", + "exhibites", "exhibits", + "exhibitin", "exhibition", + "exhibtion", "exhibition", + "exhuasted", "exhausted", + "exibition", "exhibition", + "existance", "existence", + "existenta", "existential", + "existince", "existence", + "existnace", "existance", + "exlcuding", "excluding", + "exlcusion", "exclusion", + "exlcusive", "exclusive", + "exlpoding", "exploding", + "exlporers", "explorers", + "exlposion", "explosion", + "exonorate", "exonerate", + "expalined", "explained", + "expanisve", "expansive", + "expatriot", "expatriate", + "expectany", "expectancy", + "expection", "exception", + "expemtion", "exemption", + "experimet", "experiments", + "explaines", "explains", + "explainig", "explaining", + "explaning", "explaining", + "expliciet", "explicit", + "explicity", "explicitly", + "explictly", "explicitly", + "explioted", "exploited", + "explodeds", "explodes", + "exploites", "exploits", + "explorare", "explorer", + "explotied", "exploited", + "expolding", "exploding", + "expolited", "exploited", + "expolsion", "explosion", + "expolsive", "explosive", + "expressie", "expressive", + "expressin", "expression", + "exsitance", "existance", + "extention", "extension", + "exteriour", "exterior", + "extermely", "extremely", + "extermism", "extremism", + "extermist", "extremist", + "externaly", "externally", + "extractin", "extraction", + "extrapole", "extrapolate", + "extreemly", "extremely", + "extremers", "extremes", + "extremley", "extremely", + "extrotion", "extortion", + "eyeballls", "eyeballs", + "eyebrowes", "eyebrows", + "eyebrowns", "eyebrows", + "eyesahdow", "eyeshadow", + "eyeshdaow", "eyeshadow", + "eygptians", "egyptians", + "eytmology", "etymology", + "faceboook", "facebook", + "faciliate", "facilitate", + "facilites", "facilities", + "facilitiy", "facility", + "facinated", "fascinated", + "facutally", "factually", + "familiair", "familiar", + "familiare", "familiarize", + "familiary", "familiarity", + "familliar", "familiar", + "fanaticas", "fanatics", + "fanaticos", "fanatics", + "fanaticus", "fanatics", + "fanatsies", "fantasies", + "fanatsize", "fantasize", + "fandation", "foundation", + "fanservie", "fanservice", + "fantazise", "fantasize", + "farenheit", "fahrenheit", + "fascistes", "fascists", + "fashoined", "fashioned", + "favorties", "favorites", + "favoruite", "favourite", + "favourits", "favourites", + "favourtie", "favourite", + "fedreally", "federally", + "feminisim", "feminism", + "feminsits", "feminists", + "femminist", "feminist", + "fesitvals", "festivals", + "fetishers", "fetishes", + "fightings", "fighting", + "filetimes", "lifetimes", + "filiament", "filament", + "filmmakes", "filmmakers", + "fingernal", "fingernails", + "flashligt", "flashlight", + "flavorade", "flavored", + "flavoures", "flavours", + "flavourus", "flavours", + "flawlessy", "flawlessly", + "flexibily", "flexibility", + "fluctaute", "fluctuate", + "flucutate", "fluctuate", + "fluttersy", "fluttershy", + "follwoing", "following", + "foootball", "football", + "forcefuly", "forcefully", + "forcibley", "forcibly", + "forciblly", "forcibly", + "forearmes", "forearms", + "foreginer", "foreigner", + "foregroud", "foreground", + "foreinger", "foreigner", + "forgeiner", "foreigner", + "forgiener", "foreigner", + "forgivens", "forgiveness", + "foriegner", "foreigner", + "forigener", "foreigner", + "formerlly", "formerly", + "formualte", "formulate", + "formulaes", "formulas", + "formulars", "formulas", + "forntline", "frontline", + "forntpage", "frontpage", + "fortuante", "fortunate", + "forumlate", "formulate", + "foundatin", "foundations", + "fourteeen", "fourteen", + "fractales", "fractals", + "fractalis", "fractals", + "fractalus", "fractals", + "fragement", "fragment", + "fragmenot", "fragment", + "franchies", "franchise", + "francsico", "francisco", + "franscico", "francisco", + "frecklers", "freckles", + "freedomes", "freedoms", + "freestlye", "freestyle", + "freesytle", "freestyle", + "fremented", "fermented", + "freqeuncy", "frequency", + "frequence", "frequencies", + "friendlis", "friendlies", + "frightend", "frightened", + "fromation", "formation", + "frontapge", "frontpage", + "frontilne", "frontline", + "frustrato", "frustration", + "frustrats", "frustrates", + "fucntions", "functions", + "fullscren", "fullscreen", + "funcitons", "functions", + "functiong", "functioning", + "functtion", "function", + "furiosuly", "furiously", + "furiuosly", "furiously", + "futuristc", "futuristic", + "gagnsters", "gangsters", + "galations", "galatians", + "galdiator", "gladiator", + "gallaxies", "galaxies", + "garanteed", "guaranteed", + "garantees", "guarantees", + "garuantee", "guarantee", + "gatherins", "gatherings", + "gauntelts", "gauntlets", + "gauntlent", "gauntlet", + "gaurantee", "guarantee", + "gaurentee", "guarantee", + "genatilia", "genitalia", + "geneology", "genealogy", + "generalbs", "generals", + "generalis", "generals", + "generaste", "generates", + "generatie", "generate", + "generatin", "generations", + "generatos", "generators", + "genitaila", "genitalia", + "genitales", "genitals", + "genitalis", "genitals", + "geniunely", "genuinely", + "gentailia", "genitalia", + "gentelmen", "gentlemen", + "gentialia", "genitalia", + "genuienly", "genuinely", + "genuinley", "genuinely", + "geogrpahy", "geography", + "germaniac", "germanic", + "geurrilla", "guerrilla", + "gimmickey", "gimmicky", + "gimmickly", "gimmicky", + "girlfried", "girlfriend", + "goalkeepr", "goalkeeper", + "godafther", "godfather", + "godspeeed", "godspeed", + "goegraphy", "geography", + "goldfisch", "goldfish", + "goosebums", "goosebumps", + "gorvement", "goverment", + "govemrent", "goverment", + "govenment", "government", + "goverance", "governance", + "goveremnt", "goverment", + "goverment", "government", + "govermetn", "goverment", + "govermnet", "goverment", + "governmet", "governments", + "govorment", "government", + "govrement", "goverment", + "gracefull", "graceful", + "gracefuly", "gracefully", + "graduaste", "graduates", + "graduatin", "graduation", + "grahpical", "graphical", + "grativate", "gravitate", + "graudally", "gradually", + "graudates", "graduates", + "greenalnd", "greenland", + "grenaders", "grenades", + "grpahical", "graphical", + "guadulupe", "guadalupe", + "guaranted", "guaranteed", + "guarantes", "guarantees", + "guardains", "guardians", + "guarentee", "guarantee", + "guaridans", "guardians", + "guatamala", "guatemala", + "guerrilas", "guerrillas", + "guradians", "guardians", + "guranteed", "guaranteed", + "gurantees", "guarantees", + "gutiarist", "guitarist", + "habsbourg", "habsburg", + "hairstlye", "hairstyle", + "hairsytle", "hairstyle", + "halarious", "hilarious", + "hambruger", "hamburger", + "hamburges", "hamburgers", + "hamphsire", "hampshire", + "hamsphire", "hampshire", + "handboook", "handbook", + "handedley", "handedly", + "handedlly", "handedly", + "handicape", "handicapped", + "hapmshire", "hampshire", + "happended", "happened", + "happenend", "happened", + "happenned", "happened", + "harasment", "harassment", + "hardenend", "hardened", + "hardwoord", "hardwood", + "haristyle", "hairstyle", + "harrasing", "harassing", + "harrassed", "harassed", + "harrasses", "harassed", + "hdinsight", "hindsight", + "headahces", "headaches", + "headhpone", "headphone", + "headshoot", "headshot", + "healither", "healthier", + "healtheir", "healthier", + "healthiet", "healthiest", + "healthire", "healthier", + "heapdhone", "headphone", + "hedgehoog", "hedgehog", + "hedgehorg", "hedgehog", + "heightend", "heightened", + "heirarchy", "hierarchy", + "herculase", "hercules", + "herculeas", "hercules", + "herculees", "hercules", + "herculeus", "hercules", + "heriarchy", "hierarchy", + "hesistant", "hesitant", + "hesistate", "hesitate", + "hesitatin", "hesitation", + "hieroglph", "hieroglyph", + "highschol", "highschool", + "hindisght", "hindsight", + "hindrence", "hindrance", + "hinduisim", "hinduism", + "hinduisum", "hinduism", + "hipsanics", "hispanics", + "hirearchy", "hierarchy", + "hirsohima", "hiroshima", + "hispancis", "hispanics", + "hitboxers", "hitboxes", + "hoepfully", "hopefully", + "holocasut", "holocaust", + "holocuast", "holocaust", + "homeonwer", "homeowner", + "homeopaty", "homeopathy", + "homewolrd", "homeworld", + "homewoner", "homeowner", + "homewrold", "homeworld", + "homogenes", "homogeneous", + "homosexul", "homosexuals", + "hopelessy", "hopelessly", + "hopsitals", "hospitals", + "horishima", "hiroshima", + "horizones", "horizons", + "horizonts", "horizons", + "horrendos", "horrendous", + "horribley", "horribly", + "horriblly", "horribly", + "horrifing", "horrifying", + "hositlity", "hostility", + "hospitaly", "hospitality", + "hosptials", "hospitals", + "hourgalss", "hourglass", + "hourlgass", "hourglass", + "househols", "households", + "humanitis", "humanities", + "humanoind", "humanoid", + "humiditiy", "humidity", + "hunagrian", "hungarian", + "hurriance", "hurricane", + "hurricans", "hurricanes", + "husbandos", "husbands", + "hydraluic", "hydraulic", + "hydropile", "hydrophile", + "hydropobe", "hydrophobe", + "hydrualic", "hydraulic", + "hyopcrite", "hypocrite", + "hypcorite", "hypocrite", + "hyperoble", "hyperbole", + "hypocracy", "hypocrisy", + "hypocrasy", "hypocrisy", + "hypocricy", "hypocrisy", + "hypocriet", "hypocrite", + "hypocrits", "hypocrites", + "hyporcite", "hypocrite", + "hypothess", "hypotheses", + "hyprocisy", "hypocrisy", + "hyprocite", "hypocrite", + "hyrdation", "hydration", + "hyrdaulic", "hydraulic", + "hysterica", "hysteria", + "hysteriia", "hysteria", + "iburpofen", "ibuprofen", + "icleandic", "icelandic", + "icongnito", "incognito", + "idealisim", "idealism", + "idealistc", "idealistic", + "identifer", "identifier", + "identifiy", "identify", + "ideologis", "ideologies", + "ignornace", "ignorance", + "illegales", "illegals", + "illegalis", "illegals", + "illegalls", "illegals", + "illnesess", "illnesses", + "illsuions", "illusions", + "illuminai", "illuminati", + "imagenary", "imaginary", + "imaginery", "imaginary", + "imaptient", "impatient", + "imigrated", "emigrated", + "immensley", "immensely", + "immerisve", "immersive", + "immesnely", "immensely", + "immidiate", "immediate", + "immigrato", "immigration", + "immitated", "imitated", + "immitator", "imitator", + "immobilie", "immobile", + "immobille", "immobile", + "immobilze", "immobile", + "immortaly", "immortality", + "immserive", "immersive", + "impaitent", "impatient", + "imparital", "impartial", + "impedence", "impedance", + "implantes", "implants", + "implicati", "implicit", + "impliciet", "implicit", + "implicity", "implicitly", + "impliment", "implement", + "implusive", "impulsive", + "importamt", "important", + "importend", "imported", + "imporving", "improving", + "impossibe", "impossible", + "imprefect", "imperfect", + "impressin", "impressions", + "imprioned", "imprisoned", + "improbabe", "improbable", + "impulisve", "impulsive", + "impuslive", "impulsive", + "imrpoving", "improving", + "inadequet", "inadequate", + "inadquate", "inadequate", + "inaugures", "inaugurates", + "inbalance", "imbalance", + "inbeetwen", "inbetween", + "inbetween", "between", + "inbewteen", "inbetween", + "incarnato", "incarnation", + "incgonito", "incognito", + "inclinato", "inclination", + "includeds", "includes", + "incoginto", "incognito", + "incongito", "incognito", + "incorpore", "incorporate", + "incpetion", "inception", + "incredibe", "incredible", + "incrediby", "incredibly", + "inculding", "including", + "incunabla", "incunabula", + "indicaste", "indicates", + "indicatie", "indicative", + "indicence", "incidence", + "indicents", "incidents", + "indigenos", "indigenous", + "indirecty", "indirectly", + "indisious", "insidious", + "individul", "individual", + "individus", "individuals", + "indoensia", "indonesia", + "indoneisa", "indonesia", + "indutrial", "industrial", + "inersting", "inserting", + "inexpense", "inexpensive", + "infallibe", "infallible", + "inferioir", "inferior", + "inferiour", "inferior", + "infestato", "infestation", + "infiltrar", "infiltrator", + "infinitey", "infinity", + "infinitie", "infinite", + "infinitiy", "infinity", + "infinitly", "infinity", + "inflatabe", "inflatable", + "influense", "influences", + "influenta", "influential", + "informate", "informative", + "infraread", "infrared", + "ingeniuty", "ingenuity", + "ingeunity", "ingenuity", + "ingocnito", "incognito", + "ingorance", "ignorance", + "inguenity", "ingenuity", + "inhabitat", "inhabitants", + "inheirted", "inherited", + "inhertied", "inherited", + "initailly", "initially", + "initalese", "initialese", + "initaling", "initialing", + "initalise", "initialise", + "initalism", "initialism", + "initalize", "initialize", + "initalled", "initialled", + "initation", "initiation", + "initiales", "initials", + "initiatie", "initiatives", + "initiatin", "initiation", + "initiatve", "initiate", + "injustics", "injustices", + "inlcuding", "including", + "inmigrant", "immigrant", + "innoucous", "innocuous", + "innovatin", "innovations", + "innovatve", "innovate", + "inpection", "inception", + "inpending", "impending", + "inproving", "improving", + "inpsector", "inspector", + "inpsiring", "inspiring", + "inquisito", "inquisition", + "inquisitr", "inquisitor", + "inresting", "inserting", + "insanelly", "insanely", + "insepctor", "inspector", + "insidiuos", "insidious", + "insipring", "inspiring", + "insluated", "insulated", + "inspectin", "inspection", + "instabilt", "instability", + "installes", "installs", + "installus", "installs", + "instering", "inserting", + "insticnts", "instincts", + "institude", "instituted", + "instituto", "institution", + "insualted", "insulated", + "insurence", "insurance", + "insurgeny", "insurgency", + "integirty", "integrity", + "integraal", "integral", + "integrade", "integrated", + "integrato", "integration", + "intenisty", "intensity", + "intensley", "intensely", + "interacte", "interactive", + "interents", "internets", + "interesat", "interest", + "interesst", "interests", + "interewbs", "interwebs", + "interfase", "interfaces", + "interfeer", "interfere", + "interfers", "interferes", + "intergate", "integrate", + "intergity", "integrity", + "interiour", "interior", + "internest", "internets", + "interpert", "interpret", + "interprut", "interrupt", + "interrups", "interrupts", + "interstae", "interstate", + "interveen", "intervene", + "intervied", "interviewed", + "intervier", "interviewer", + "intervies", "interviews", + "intesnely", "intensely", + "intesnity", "intensity", + "intestins", "intestines", + "intialize", "initialize", + "inticrate", "intricate", + "intimidad", "intimidated", + "intircate", "intricate", + "intiution", "intuition", + "intiutive", "intuitive", + "intorduce", "introduce", + "intorvert", "introvert", + "intracite", "intricate", + "intrduced", "introduced", + "intregity", "integrity", + "intrenets", "internets", + "intrepret", "interpret", + "intrerupt", "interrupt", + "intrewebs", "interwebs", + "intrinisc", "intrinsic", + "intrisinc", "intrinsic", + "intrisnic", "intrinsic", + "intriuged", "intrigued", + "introdued", "introduced", + "introduse", "introduces", + "introvers", "introverts", + "intruiged", "intrigued", + "intrument", "instrument", + "inutition", "intuition", + "inutitive", "intuitive", + "invaderas", "invaders", + "invalidas", "invalidates", + "inventios", "inventions", + "investige", "investigate", + "investmet", "investments", + "invincibe", "invincible", + "invloving", "involving", + "invovling", "involving", + "ipubrofen", "ibuprofen", + "iranianos", "iranians", + "irelevent", "irrelevant", + "ironicaly", "ironically", + "irritatie", "irritate", + "irritatin", "irritation", + "isalmists", "islamists", + "isalnders", "islanders", + "islamiskt", "islamist", + "islamsits", "islamists", + "islmaists", "islamists", + "isntaller", "installer", + "isntances", "instances", + "isntantly", "instantly", + "israelies", "israelis", + "israelits", "israelis", + "italianas", "italians", + "italianos", "italians", + "jailbrake", "jailbreak", + "jalibreak", "jailbreak", + "jamaicain", "jamaican", + "jersualem", "jerusalem", + "jeruselam", "jerusalem", + "jeruslaem", "jerusalem", + "journalis", "journals", + "judegment", "judgement", + "judgemant", "judgemental", + "judisuary", "judiciary", + "jugdement", "judgement", + "juggernat", "juggernaut", + "juvenille", "juvenile", + "keneysian", "keynesian", + "kentuckey", "kentucky", + "kenyesian", "keynesian", + "keybaords", "keyboards", + "keyensian", "keynesian", + "keyesnian", "keynesian", + "keynseian", "keynesian", + "keysenian", "keynesian", + "kilometes", "kilometers", + "kindapped", "kidnapped", + "kncokback", "knockback", + "knoweldge", "knowledge", + "knowlegde", "knowledge", + "konckback", "knockback", + "kryptonie", "kryptonite", + "labirynth", "labyrinth", + "laboratoy", "laboratory", + "laboreres", "laborers", + "labratory", "laboratory", + "labriynth", "labyrinth", + "labryinth", "labyrinth", + "labyrnith", "labyrinth", + "landscaps", "landscapes", + "landscspe", "landscapes", + "langauges", "languages", + "languague", "language", + "lanuchers", "launchers", + "lanugages", "languages", + "larington", "arlington", + "latitudie", "latitude", + "lattitude", "latitude", + "laucnhers", "launchers", + "laucnhing", "launching", + "launchign", "launching", + "laybrinth", "labyrinth", + "lebanesse", "lebanese", + "leceister", "leicester", + "leciester", "leicester", + "legitmate", "legitimate", + "legnedary", "legendary", + "lesbianas", "lesbians", + "lesbianus", "lesbians", + "letivicus", "leviticus", + "leutenant", "lieutenant", + "levaithan", "leviathan", + "levellign", "levelling", + "levetated", "levitated", + "levetates", "levitates", + "levicitus", "leviticus", + "levleling", "levelling", + "lfiesteal", "lifesteal", + "liberales", "liberals", + "liberalim", "liberalism", + "liberalis", "liberals", + "liberatin", "liberation", + "libraires", "libraries", + "liecester", "leicester", + "lieuenant", "lieutenant", + "lieutenat", "lieutenant", + "lifespawn", "lifespan", + "lifestlye", "lifestyle", + "lighnting", "lightning", + "lightnign", "lightning", + "ligthning", "lightning", + "ligthroom", "lightroom", + "lingerine", "lingerie", + "lispticks", "lipsticks", + "listenend", "listened", + "literarly", "literary", + "literarry", "literary", + "literatre", "literate", + "literatue", "literate", + "literture", "literature", + "lithaunia", "lithuania", + "lithuaina", "lithuania", + "lithuiana", "lithuania", + "lithunaia", "lithuania", + "litigatin", "litigation", + "lituhania", "lithuania", + "liveprool", "liverpool", + "livestrem", "livestream", + "lobbysits", "lobbyists", + "lockscren", "lockscreen", + "logisitcs", "logistics", + "logsitics", "logistics", + "loiusiana", "louisiana", + "lollipoop", "lollipop", + "louisvile", "louisville", + "luanchers", "launchers", + "luanching", "launching", + "lubicrant", "lubricant", + "lubircant", "lubricant", + "ludcrious", "ludicrous", + "ludricous", "ludicrous", + "lunaticos", "lunatics", + "lunaticus", "lunatics", + "macaronni", "macaroni", + "maestries", "masteries", + "magainzes", "magazines", + "magensium", "magnesium", + "magincian", "magician", + "magintude", "magnitude", + "magneisum", "magnesium", + "magnesuim", "magnesium", + "magnifine", "magnificent", + "mainfesto", "manifesto", + "mainfests", "manifests", + "mainstrem", "mainstream", + "maintaing", "maintaining", + "maintance", "maintenance", + "maintians", "maintains", + "mairjuana", "marijuana", + "malasyian", "malaysian", + "malayisan", "malaysian", + "malaysain", "malaysian", + "maletonin", "melatonin", + "maltesian", "maltese", + "malyasian", "malaysian", + "managable", "manageable", + "managment", "management", + "mandarian", "mandarin", + "mandarijn", "mandarin", + "mandarion", "mandarin", + "maneouvre", "manoeuvre", + "maneuveur", "maneuver", + "maneveurs", "maneuvers", + "manfiesto", "manifesto", + "manfiests", "manifests", + "mangesium", "magnesium", + "mangitude", "magnitude", + "manouvers", "maneuvers", + "mantained", "maintained", + "manuevers", "maneuvers", + "maraudeur", "marauder", + "marevlous", "marvelous", + "margarent", "margaret", + "margarite", "margaret", + "marginaal", "marginal", + "marginaly", "marginally", + "marijauna", "marijuana", + "marineras", "mariners", + "marineris", "mariners", + "marineros", "mariners", + "marjiuana", "marijuana", + "marjority", "majority", + "marmelade", "marmalade", + "marrtyred", "martyred", + "massagens", "massages", + "massivley", "massively", + "masteires", "masteries", + "mastereis", "masteries", + "masterise", "masteries", + "mastermid", "mastermind", + "mastieres", "masteries", + "masturbae", "masturbated", + "materiaal", "material", + "matierals", "materials", + "mattreses", "mattress", + "mayalsian", "malaysian", + "maylasian", "malaysian", + "mccarthey", "mccarthy", + "mecahnics", "mechanics", + "mecernary", "mercenary", + "mechancis", "mechanics", + "mechanims", "mechanism", + "mechaninc", "mechanic", + "mechansim", "mechanism", + "medicince", "medicine", + "mediciney", "mediciny", + "meditatie", "meditate", + "meditatin", "meditation", + "megathred", "megathread", + "melanotin", "melatonin", + "melborune", "melbourne", + "melbounre", "melbourne", + "membrance", "membrane", + "menstraul", "menstrual", + "menstural", "menstrual", + "mensutral", "menstrual", + "mentiones", "mentions", + "mercanery", "mercenary", + "merhcants", "merchants", + "messagers", "messages", + "messanger", "messenger", + "metabloic", "metabolic", + "metalurgy", "metallurgy", + "methaphor", "metaphor", + "methapors", "metaphors", + "methodoly", "methodology", + "metropols", "metropolis", + "mexicanas", "mexicans", + "mexicants", "mexicans", + "mexicanus", "mexicans", + "michellle", "michelle", + "micorwave", "microwave", + "micoscopy", "microscopy", + "microphen", "microphone", + "migrantes", "migrants", + "migrianes", "migraines", + "milawukee", "milwaukee", + "milennium", "millennium", + "milestons", "milestones", + "militians", "militias", + "millenial", "millennial", + "millenian", "millennia", + "millenium", "millennium", + "millionar", "millionaire", + "millitary", "military", + "miluwakee", "milwaukee", + "milwakuee", "milwaukee", + "milwuakee", "milwaukee", + "mindcarck", "mindcrack", + "mindlessy", "mindlessly", + "minerales", "minerals", + "minisclue", "miniscule", + "miniscuel", "miniscule", + "ministery", "ministry", + "minisucle", "miniscule", + "minitaure", "miniature", + "minituare", "miniature", + "minneosta", "minnesota", + "minnestoa", "minnesota", + "minsicule", "miniscule", + "minsiters", "ministers", + "minstries", "ministries", + "miraculos", "miraculous", + "mircowave", "microwave", + "mirrorred", "mirrored", + "miserabel", "miserable", + "mispelled", "misspelled", + "misreable", "miserable", + "misreably", "miserably", + "missisipi", "mississippi", + "missonary", "missionary", + "missourri", "missouri", + "misspelld", "misspelled", + "mobilitiy", "mobility", + "moderatey", "moderately", + "moderatin", "moderation", + "modifires", "modifiers", + "moelcules", "molecules", + "moleclues", "molecules", + "molestare", "molester", + "molestato", "molestation", + "molesterd", "molested", + "monestary", "monastery", + "monitores", "monitors", + "monolgoue", "monologue", + "monolight", "moonlight", + "monolouge", "monologue", + "monopolis", "monopolies", + "monopolly", "monopoly", + "monopoloy", "monopoly", + "monserrat", "montserrat", + "monstorus", "monstrous", + "monstruos", "monstrous", + "montanous", "mountainous", + "montoring", "monitoring", + "monumnets", "monuments", + "moratlity", "mortality", + "morbidley", "morbidly", + "morgatges", "mortgages", + "morgtages", "mortgages", + "morisette", "morissette", + "mormonsim", "mormonism", + "morroccan", "moroccan", + "mortailty", "mortality", + "mosquitto", "mosquito", + "motivatie", "motivate", + "motivatin", "motivations", + "motorcyce", "motorcycles", + "motorolja", "motorola", + "motoroloa", "motorola", + "moustahce", "moustache", + "movepseed", "movespeed", + "mozzarela", "mozzarella", + "mucisians", "musicians", + "mulitated", "mutilated", + "mulitples", "multiples", + "multipled", "multiplied", + "multplies", "multiples", + "murdererd", "murdered", + "muscially", "musically", + "muscician", "musician", + "musculair", "muscular", + "mushrooom", "mushroom", + "musicains", "musicians", + "mutatiohn", "mutation", + "mutialted", "mutilated", + "mutilatin", "mutilation", + "mutliated", "mutilated", + "mutliples", "multiples", + "mutlitude", "multitude", + "mysterise", "mysteries", + "mysterous", "mysterious", + "nacrotics", "narcotics", + "naferious", "nefarious", + "nahsville", "nashville", + "narcissim", "narcissism", + "narcissit", "narcissist", + "narcissts", "narcissist", + "narctoics", "narcotics", + "nasvhille", "nashville", + "nationaal", "national", + "nationaly", "nationally", + "nativelly", "natively", + "natrually", "naturally", + "navigatie", "navigate", + "navigatin", "navigation", + "neccesary", "necessary", + "necessite", "necessities", + "neckbears", "neckbeards", + "neckbread", "neckbeard", + "nedlessly", "endlessly", + "needlessy", "needlessly", + "negiotate", "negotiate", + "negociate", "negotiate", + "negoitate", "negotiate", + "neigbhour", "neighbour", + "neigbours", "neighbours", + "neighboor", "neighbor", + "nessecary", "necessary", + "newcaslte", "newcastle", + "newcastel", "newcastle", + "nieghbour", "neighbour", + "nightfa;;", "nightfall", + "nightlcub", "nightclub", + "nigthclub", "nightclub", + "nigthlife", "nightlife", + "nigthmare", "nightmare", + "nihilisim", "nihilism", + "ninteenth", "nineteenth", + "nominatie", "nominate", + "nominatin", "nomination", + "noninital", "noninitial", + "norhteast", "northeast", + "norhtwest", "northwest", + "normanday", "normandy", + "northeren", "northern", + "norwegain", "norwegian", + "norwiegan", "norwegian", + "nostaglia", "nostalgia", + "nostaglic", "nostalgic", + "nostaliga", "nostalgia", + "nostaligc", "nostalgic", + "nostlagia", "nostalgia", + "nostlagic", "nostalgic", + "nostriles", "nostrils", + "nostrills", "nostrils", + "notacible", "noticable", + "notciable", "noticable", + "noteboook", "notebook", + "noteriety", "notoriety", + "noteworty", "noteworthy", + "noticable", "noticeable", + "noticably", "noticeably", + "noticalbe", "noticable", + "noticeing", "noticing", + "noticible", "noticeable", + "notoroius", "notorious", + "novermber", "november", + "nullabour", "nullarbor", + "numberous", "numerous", + "numercial", "numerical", + "numerious", "numerous", + "nuremburg", "nuremberg", + "nurtients", "nutrients", + "nutirents", "nutrients", + "nutreints", "nutrients", + "nutritent", "nutrient", + "nutritian", "nutritional", + "nutritios", "nutritious", + "obediance", "obedience", + "obeidence", "obedience", + "obersvant", "observant", + "obersvers", "observers", + "obesssion", "obsession", + "obiedence", "obedience", + "obivously", "obviously", + "objectivs", "objectives", + "objectivy", "objectivity", + "obscruity", "obscurity", + "obscuirty", "obscurity", + "observare", "observer", + "observerd", "observed", + "obssesion", "obsession", + "obssesive", "obsessive", + "obssessed", "obsessed", + "obstruced", "obstructed", + "obsucrity", "obscurity", + "obtainabe", "obtainable", + "obviosuly", "obviously", + "obvioulsy", "obviously", + "obvisouly", "obviously", + "obvoiusly", "obviously", + "ocasional", "occasional", + "ocasioned", "occasioned", + "ocassions", "occasions", + "occaisons", "occasions", + "occassion", "occasion", + "occurance", "occurrence", + "occurence", "occurrence", + "octohedra", "octahedra", + "ocuntries", "countries", + "ocurrance", "occurrence", + "ocurrence", "occurrence", + "offcially", "officially", + "offically", "officially", + "officialy", "officially", + "offpsring", "offspring", + "offspirng", "offspring", + "offsrping", "offspring", + "ogliarchy", "oligarchy", + "oilgarchy", "oligarchy", + "oligrachy", "oligarchy", + "ommitting", "omitting", + "onlsaught", "onslaught", + "onsalught", "onslaught", + "onslaugth", "onslaught", + "onsluaght", "onslaught", + "onwership", "ownership", + "opiniones", "opinions", + "oposition", "opposition", + "opponenet", "opponent", + "opposiste", "opposites", + "opposties", "opposites", + "oppressin", "oppression", + "opression", "oppression", + "opressive", "oppressive", + "opthalmic", "ophthalmic", + "optimisim", "optimism", + "optimistc", "optimistic", + "optinally", "optimally", + "oragnered", "orangered", + "oragnised", "organised", + "oragnizer", "organizer", + "orcehstra", "orchestra", + "ordinarly", "ordinary", + "orgainsed", "organised", + "orgainzer", "organizer", + "organered", "orangered", + "organices", "organise", + "organisim", "organism", + "organiske", "organise", + "organiste", "organise", + "organites", "organise", + "organizms", "organism", + "organsied", "organised", + "organsims", "organisms", + "organzier", "organizer", + "orginally", "originally", + "orgnaised", "organised", + "orhcestra", "orchestra", + "orientato", "orientation", + "origanaly", "originally", + "originall", "original", + "originalt", "originality", + "originaly", "originally", + "origintea", "originate", + "origional", "original", + "orignally", "originally", + "orignials", "originals", + "oublisher", "publisher", + "oursleves", "ourselves", + "oustiders", "outsiders", + "oustpoken", "outspoken", + "outisders", "outsiders", + "outnumbed", "outnumbered", + "outpalyed", "outplayed", + "outperfom", "outperform", + "outpsoken", "outspoken", + "outrageos", "outrageous", + "outskirst", "outskirts", + "outskrits", "outskirts", + "outwieghs", "outweighs", + "overbaord", "overboard", + "overclcok", "overclock", + "overdirve", "overdrive", + "overhpyed", "overhyped", + "overhwelm", "overwhelm", + "overlcock", "overclock", + "overloard", "overload", + "overpaied", "overpaid", + "overpowed", "overpowered", + "overriden", "overridden", + "overwhlem", "overwhelm", + "overwirte", "overwrite", + "overwtach", "overwatch", + "overyhped", "overhyped", + "owernship", "ownership", + "pacificts", "pacifist", + "packageid", "packaged", + "pactivity", "captivity", + "painkills", "painkillers", + "paitently", "patiently", + "paitience", "patience", + "pakistain", "pakistani", + "pakistian", "pakistani", + "pakistnai", "pakistani", + "paksitani", "pakistani", + "paladines", "paladins", + "paladinos", "paladins", + "palestein", "palestine", + "palestina", "palestinian", + "palistian", "palestinian", + "paltforms", "platforms", + "palystyle", "playstyle", + "pancakers", "pancakes", + "pantomine", "pantomime", + "paradimes", "paradise", + "paragraps", "paragraphs", + "paragrpah", "paragraph", + "paralells", "parallels", + "paralelly", "parallelly", + "paralisys", "paralysis", + "parallely", "parallelly", + "paralzyed", "paralyzed", + "paramedis", "paramedics", + "paramters", "parameters", + "paranoica", "paranoia", + "paranoida", "paranoia", + "parasties", "parasites", + "paraylsis", "paralysis", + "paraylzed", "paralyzed", + "parellels", "parallels", + "paricular", "particular", + "parisitic", "parasitic", + "paritally", "partially", + "parliment", "parliament", + "parmesaen", "parmesan", + "parntered", "partnered", + "parrallel", "parallel", + "partchett", "pratchett", + "parterned", "partnered", + "participe", "participate", + "partiotic", "patriotic", + "partisain", "partisan", + "pasengers", "passengers", + "passagens", "passages", + "passagers", "passages", + "passerbys", "passersby", + "passiones", "passions", + "passivley", "passively", + "passowrds", "passwords", + "pateintly", "patiently", + "paticular", "particular", + "patinetly", "patiently", + "patriarca", "patriarchal", + "patriarcy", "patriarchy", + "patriotas", "patriots", + "patriotes", "patriots", + "patroitic", "patriotic", + "pattented", "patented", + "pavillion", "pavilion", + "pbulisher", "publisher", + "peacefuly", "peacefully", + "pedohpile", "pedophile", + "pedophila", "pedophilia", + "pedophils", "pedophiles", + "peircings", "piercings", + "penalites", "penalties", + "penatlies", "penalties", + "penduluum", "pendulum", + "penerator", "penetrator", + "penguines", "penguins", + "penguings", "penguins", + "penguinos", "penguins", + "peninsual", "peninsula", + "peninusla", "peninsula", + "penisnula", "peninsula", + "penisular", "peninsular", + "pennisula", "peninsula", + "pensinula", "peninsula", + "pentagoon", "pentagon", + "peodphile", "pedophile", + "pepperino", "pepperoni", + "peppermit", "peppermint", + "percepted", "perceived", + "percevied", "perceived", + "percieved", "perceived", + "percisely", "precisely", + "percision", "precision", + "percursor", "precursor", + "perdators", "predators", + "peremiter", "perimeter", + "perfeclty", "perfectly", + "perfomers", "performers", + "performas", "performs", + "perfromer", "performer", + "pericings", "piercings", + "perimetre", "perimeter", + "peristent", "persistent", + "periwinke", "periwinkle", + "permanant", "permanent", + "permature", "premature", + "permenant", "permanent", + "permieter", "perimeter", + "permissie", "permissible", + "permissin", "permissions", + "permisson", "permission", + "pernament", "permanent", + "perorders", "preorders", + "perpetrar", "perpetrator", + "perpetuae", "perpetuate", + "perpetuas", "perpetuates", + "persauded", "persuaded", + "perscribe", "prescribe", + "perserved", "preserved", + "persistes", "persists", + "personaes", "personas", + "personaly", "personally", + "personell", "personnel", + "personhod", "personhood", + "persuated", "persuade", + "pertended", "pretended", + "pertoleum", "petroleum", + "perusaded", "persuaded", + "pervertes", "perverse", + "pesticids", "pesticides", + "petroluem", "petroleum", + "phemonena", "phenomena", + "phenemona", "phenomena", + "phenonema", "phenomena", + "phillipse", "phillies", + "philosopy", "philosophy", + "philosphy", "philosophy", + "phonecian", "phoenecian", + "phonemena", "phenomena", + "phongraph", "phonograph", + "photograh", "photograph", + "phsyician", "physician", + "phsyicist", "physicist", + "phycisian", "physician", + "phycisist", "physicist", + "physicaly", "physically", + "physicits", "physicist", + "physisict", "physicist", + "piblisher", "publisher", + "picthfork", "pitchfork", + "pinetrest", "pinterest", + "placeheld", "placeholder", + "placemens", "placements", + "plaestine", "palestine", + "plagarism", "plagiarism", + "planatery", "planetary", + "planation", "plantation", + "planteary", "planetary", + "plasticas", "plastics", + "plasticos", "plastics", + "plasticus", "plastics", + "platfroms", "platforms", + "platofrms", "platforms", + "plausable", "plausible", + "plausbile", "plausible", + "plausibel", "plausible", + "playgroud", "playground", + "playright", "playwright", + "playstlye", "playstyle", + "playwrite", "playwright", + "plebicite", "plebiscite", + "plethoria", "plethora", + "ploarized", "polarized", + "pointeres", "pointers", + "polinator", "pollinator", + "polishees", "polishes", + "politelly", "politely", + "politican", "politician", + "politicas", "politics", + "politicin", "politician", + "politicus", "politics", + "polygammy", "polygamy", + "populatin", "populations", + "poralized", "polarized", + "porcelian", "porcelain", + "porcelina", "porcelain", + "poreclain", "porcelain", + "porftolio", "portfolio", + "porgramme", "programme", + "portfoilo", "portfolio", + "portoflio", "portfolio", + "portraing", "portraying", + "portrayes", "portrays", + "portrayls", "portrays", + "portriats", "portraits", + "portugese", "portuguese", + "portugues", "portuguese", + "posessing", "possessing", + "posession", "possession", + "positiond", "positioned", + "positiong", "positioning", + "positionl", "positional", + "positiviy", "positivity", + "possesess", "possesses", + "possesing", "possessing", + "possesion", "possession", + "possessin", "possessions", + "possibile", "possible", + "possibily", "possibility", + "possibley", "possibly", + "possiblly", "possibly", + "possition", "position", + "powderade", "powdered", + "powerfull", "powerful", + "pracitcal", "practical", + "practhett", "pratchett", + "practicly", "practically", + "practives", "practise", + "pragamtic", "pragmatic", + "preadtors", "predators", + "precedeed", "preceded", + "preceeded", "preceded", + "preceived", "perceived", + "preciesly", "precisely", + "precisley", "precisely", + "precurors", "precursor", + "precurosr", "precursor", + "precurser", "precursor", + "predatobr", "predator", + "predictie", "predictive", + "predictin", "prediction", + "predjuice", "prejudice", + "predujice", "prejudice", + "prefectly", "perfectly", + "preferens", "preferences", + "prefering", "preferring", + "preformer", "performer", + "pregnance", "pregnancies", + "preimeter", "perimeter", + "prejiduce", "prejudice", + "prejucide", "prejudice", + "premanent", "permanent", + "premeired", "premiered", + "preorderd", "preordered", + "preparato", "preparation", + "prepatory", "preparatory", + "presentas", "presents", + "presentes", "presents", + "presicely", "precisely", + "presicion", "precision", + "presideny", "presidency", + "prestigiu", "prestigious", + "prestigue", "prestige", + "presuaded", "persuaded", + "pretendas", "pretends", + "pretensje", "pretense", + "pretinent", "pertinent", + "prevelant", "prevalent", + "preventin", "prevention", + "previvous", "previous", + "priesthod", "priesthood", + "priestood", "priesthood", + "primaires", "primaries", + "primairly", "primarily", + "primarliy", "primarily", + "primative", "primitive", + "primordal", "primordial", + "princesas", "princess", + "princeses", "princess", + "princesss", "princesses", + "principas", "principals", + "principly", "principally", + "prinicple", "principle", + "prioritie", "prioritize", + "prioritse", "priorities", + "privalege", "privilege", + "privelege", "privilege", + "privelige", "privilege", + "privilage", "privilege", + "privilegs", "privileges", + "privledge", "privilege", + "probabily", "probability", + "probablly", "probably", + "problemas", "problems", + "procative", "proactive", + "procedger", "procedure", + "proceding", "proceeding", + "proceedes", "proceeds", + "procelain", "porcelain", + "procesess", "processes", + "processer", "processor", + "processos", "processors", + "proclamed", "proclaimed", + "procotols", "protocols", + "prodecure", "procedure", + "productie", "productive", + "productin", "productions", + "productos", "products", + "profesion", "profusion", + "professer", "professor", + "professin", "professions", + "proffesed", "professed", + "proffesor", "professor", + "progessed", "progressed", + "programas", "programs", + "programem", "programme", + "programes", "programs", + "programms", "programs", + "progresso", "progression", + "progresss", "progresses", + "projectie", "projectile", + "projectin", "projection", + "prominant", "prominent", + "promiscus", "promiscuous", + "promotted", "promoted", + "pronomial", "pronominal", + "pronouced", "pronounced", + "pronounds", "pronouns", + "pronounes", "pronouns", + "propagana", "propaganda", + "properies", "properties", + "propertly", "property", + "propeties", "properties", + "prophesie", "prophecies", + "prophetes", "prophets", + "propogate", "propagate", + "proposels", "proposes", + "proposito", "proposition", + "propperly", "properly", + "propsects", "prospects", + "prosperos", "prosperous", + "prostitue", "prostitute", + "protectes", "protects", + "protectie", "protective", + "protectos", "protectors", + "proteinas", "proteins", + "proteines", "proteins", + "protestas", "protests", + "protestat", "protestant", + "protestes", "protests", + "protestos", "protests", + "protfolio", "portfolio", + "protocool", "protocol", + "prototpye", "prototype", + "prototyps", "prototypes", + "protraits", "portraits", + "protrayal", "portrayal", + "protrayed", "portrayed", + "provicial", "provincial", + "provincie", "province", + "provisios", "provisions", + "pruchased", "purchased", + "pruchases", "purchases", + "prugatory", "purgatory", + "pruposely", "purposely", + "pscyhotic", "psychotic", + "pseudonyn", "pseudonym", + "pshycosis", "psychosis", + "pshycotic", "psychotic", + "psycology", "psychology", + "psycothic", "psychotic", + "ptichfork", "pitchfork", + "pubilsher", "publisher", + "publiaher", "publisher", + "publicaly", "publicly", + "publicani", "publication", + "publicher", "publisher", + "publiclly", "publicly", + "publihser", "publisher", + "publisehr", "publisher", + "publisger", "publisher", + "publishor", "publisher", + "publishre", "publisher", + "publsiher", "publisher", + "publusher", "publisher", + "puchasing", "purchasing", + "punishmet", "punishments", + "puplisher", "publisher", + "puragtory", "purgatory", + "purcahsed", "purchased", + "purcahses", "purchases", + "purhcased", "purchased", + "purposley", "purposely", + "pursuaded", "persuaded", + "pursuades", "persuades", + "pyramidas", "pyramids", + "pyramides", "pyramids", + "pyschosis", "psychosis", + "pyschotic", "psychotic", + "qaulifies", "qualifies", + "quantifiy", "quantify", + "quantitiy", "quantity", + "quantitty", "quantity", + "quartlery", "quarterly", + "queations", "equations", + "queenland", "queensland", + "questiond", "questioned", + "questiong", "questioning", + "questionn", "questioning", + "radicalis", "radicals", + "rapsberry", "raspberry", + "rasbperry", "raspberry", + "rationaly", "rationally", + "reactiony", "reactionary", + "realisitc", "realistic", + "realoding", "reloading", + "realsitic", "realistic", + "realtable", "relatable", + "realtions", "relations", + "realtives", "relatives", + "reamining", "remaining", + "reaplying", "replaying", + "reasearch", "research", + "reaveling", "revealing", + "rebellios", "rebellious", + "rebllions", "rebellions", + "recations", "creations", + "reccomend", "recommend", + "reccuring", "recurring", + "receeding", "receding", + "recepient", "recipient", + "recgonise", "recognise", + "recgonize", "recognize", + "recidents", "residents", + "recievers", "receivers", + "recieving", "receiving", + "recipiant", "recipient", + "reciproce", "reciprocate", + "reclutant", "reluctant", + "recoginse", "recognise", + "recoginze", "recognize", + "recomends", "recommends", + "recommens", "recommends", + "reconenct", "reconnect", + "recongise", "recognise", + "recongize", "recognize", + "reconicle", "reconcile", + "reconized", "recognized", + "recordare", "recorder", + "recoveres", "recovers", + "recoverys", "recovers", + "recpetive", "receptive", + "recpetors", "receptors", + "recquired", "required", + "recreatie", "recreate", + "recruitcs", "recruits", + "recruites", "recruits", + "recrusion", "recursion", + "recrutied", "recruited", + "recrutier", "recruiter", + "rectangel", "rectangle", + "rectanlge", "rectangle", + "recuiting", "recruiting", + "recurison", "recursion", + "recurited", "recruited", + "recuriter", "recruiter", + "recusrion", "recursion", + "redeemeed", "redeemed", + "redundany", "redundancy", + "redundent", "redundant", + "reedeming", "redeeming", + "refelcted", "reflected", + "refereces", "references", + "refereees", "referees", + "refereers", "referees", + "referemce", "reference", + "referencs", "references", + "referense", "references", + "referiang", "referring", + "referinng", "refering", + "refernces", "references", + "refernece", "reference", + "refershed", "refreshed", + "refersher", "refresher", + "reffering", "referring", + "reflectie", "reflective", + "refrehser", "refresher", + "refrences", "references", + "refromist", "reformist", + "regionaal", "regional", + "registerd", "registered", + "registery", "registry", + "regualrly", "regularly", + "regualtor", "regulator", + "regulaion", "regulation", + "regulalry", "regularly", + "regulares", "regulars", + "regularis", "regulars", + "regulatin", "regulations", + "regurally", "regularly", + "reigining", "reigning", + "reinstale", "reinstalled", + "reisntall", "reinstall", + "reknowned", "renowned", + "relaoding", "reloading", + "relatiate", "retaliate", + "relativiy", "relativity", + "relativly", "relatively", + "relativno", "relation", + "relavence", "relevance", + "relcutant", "reluctant", + "relevence", "relevance", + "relfected", "reflected", + "reliabily", "reliability", + "reliabley", "reliably", + "religeous", "religious", + "remasterd", "remastered", + "rememberd", "remembered", + "rememebrs", "remembers", + "remianing", "remaining", + "remignton", "remington", + "remingotn", "remington", + "remmebers", "remembers", + "remotelly", "remotely", + "rendevous", "rendezvous", + "rendezous", "rendezvous", + "renedered", "rende", + "renegated", "renegade", + "rennovate", "renovate", + "repalying", "replaying", + "repblican", "republican", + "repeatedy", "repeatedly", + "repective", "receptive", + "repeition", "repetition", + "repentent", "repentant", + "rephrasse", "rephrase", + "replusive", "repulsive", + "reportedy", "reportedly", + "represend", "represented", + "repressin", "repression", + "reprtoire", "repertoire", + "repsonded", "responded", + "reptition", "repetition", + "reptuable", "reputable", + "repubican", "republican", + "republian", "republican", + "repulican", "republican", + "repulisve", "repulsive", + "repuslive", "repulsive", + "resaurant", "restaurant", + "researchs", "researchers", + "resembels", "resembles", + "reserverd", "reserved", + "resintall", "reinstall", + "resistane", "resistances", + "resistans", "resistances", + "resistend", "resisted", + "resistent", "resistant", + "resmebles", "resembles", + "resolutin", "resolutions", + "resoruces", "resources", + "respectes", "respects", + "respectos", "respects", + "responces", "response", + "respondas", "responds", + "respondis", "responds", + "respondus", "responds", + "respoting", "reposting", + "ressemble", "resemble", + "ressurect", "resurrect", + "restarant", "restaurant", + "resticted", "restricted", + "restircts", "restricts", + "restorani", "restoration", + "restraind", "restrained", + "restraing", "restraining", + "restraunt", "restraint", + "restriant", "restraint", + "restricte", "restrictive", + "resturant", "restaurant", + "retailate", "retaliate", + "retalaite", "retaliate", + "retaliers", "retailers", + "reteriver", "retriever", + "retirever", "retriever", + "retrevier", "retriever", + "retriving", "retrieving", + "reuptable", "reputable", + "reveiwers", "reviewers", + "revelaing", "revealing", + "revelance", "relevance", + "revolutin", "revolutions", + "rewachted", "rewatched", + "rewatchig", "rewatching", + "rferences", "references", + "ridiculos", "ridiculous", + "ridiculue", "ridicule", + "ridiculus", "ridiculous", + "righetous", "righteous", + "rightfuly", "rightfully", + "rightoues", "righteous", + "rigourous", "rigorous", + "rigtheous", "righteous", + "rilvaries", "rivalries", + "rininging", "ringing", + "rivarlies", "rivalries", + "rivlaries", "rivalries", + "roaylties", "royalties", + "roboticus", "robotics", + "roganisms", "organisms", + "royalites", "royalties", + "roylaties", "royalties", + "ruleboook", "rulebook", + "sacarstic", "sarcastic", + "sacntuary", "sanctuary", + "sacrafice", "sacrifice", + "sacrastic", "sarcastic", + "sacrifise", "sacrifices", + "salughter", "slaughter", + "samckdown", "smackdown", + "sanctiond", "sanctioned", + "sancturay", "sanctuary", + "sancutary", "sanctuary", + "sandstrom", "sandstorm", + "sandwhich", "sandwich", + "sanhedrim", "sanhedrin", + "santcuary", "sanctuary", + "santioned", "sanctioned", + "sapphirre", "sapphire", + "sastified", "satisfied", + "sastifies", "satisfies", + "satelites", "satellites", + "saterdays", "saturdays", + "satisifed", "satisfied", + "satisifes", "satisfies", + "satrudays", "saturdays", + "satsified", "satisfied", + "satsifies", "satisfies", + "sattelite", "satellite", + "saxaphone", "saxophone", + "scaepgoat", "scapegoat", + "scaleable", "scalable", + "scandales", "scandals", + "scandalos", "scandals", + "scantuary", "sanctuary", + "scaricity", "scarcity", + "scarifice", "sacrifice", + "scarmbled", "scrambled", + "scartched", "scratched", + "scartches", "scratches", + "scavanged", "scavenged", + "sceintist", "scientist", + "scholalry", "scholarly", + "sciencers", "sciences", + "scientfic", "scientific", + "scientifc", "scientific", + "scientits", "scientist", + "sclupture", "sculpture", + "scnearios", "scenarios", + "scoreboad", "scoreboard", + "scottisch", "scottish", + "scracthed", "scratched", + "scracthes", "scratches", + "scrambeld", "scrambled", + "scrathces", "scratches", + "scrollade", "scrolled", + "scrutiney", "scrutiny", + "scrutinty", "scrutiny", + "sculpteur", "sculpture", + "sculputre", "sculpture", + "scultpure", "sculpture", + "scuplture", "sculpture", + "scuptures", "sculptures", + "seamlessy", "seamlessly", + "searchign", "searching", + "sebasitan", "sebastian", + "sebastain", "sebastian", + "sebsatian", "sebastian", + "secceeded", "seceded", + "secertary", "secretary", + "secratary", "secretary", + "secratery", "secretary", + "secretery", "secretary", + "secretley", "secretly", + "sednetary", "sedentary", + "seduciton", "seduction", + "semanitcs", "semantics", + "semestres", "semesters", + "semnatics", "semantics", + "semseters", "semesters", + "senatores", "senators", + "sendetary", "sedentary", + "sensitivy", "sensitivity", + "sentimant", "sentimental", + "sepcially", "specially", + "seperated", "separated", + "seperates", "separates", + "seperator", "separator", + "septmeber", "september", + "seraching", "searching", + "seriosuly", "seriously", + "serioulsy", "seriously", + "seriuosly", "seriously", + "servantes", "servants", + "settlment", "settlement", + "sexualizd", "sexualized", + "sexuallly", "sexually", + "shadasloo", "shadaloo", + "shaprness", "sharpness", + "sharpenss", "sharpness", + "shawhsank", "shawshank", + "sheilding", "shielding", + "shephered", "shepherd", + "shileding", "shielding", + "shitstrom", "shitstorm", + "shletered", "sheltered", + "shoudlers", "shoulders", + "shouldnot", "shouldnt", + "shperical", "spherical", + "shwashank", "shawshank", + "sidebaord", "sideboard", + "siganture", "signature", + "signapore", "singapore", + "signitory", "signatory", + "silhouete", "silhouette", + "similiair", "similiar", + "simliarly", "similarly", + "simluated", "simulated", + "simluator", "simulator", + "simplifiy", "simplify", + "simualted", "simulated", + "simualtor", "simulator", + "simulatie", "simulate", + "simulatin", "simulation", + "sinagpore", "singapore", + "sincerley", "sincerely", + "singature", "signature", + "singpaore", "singapore", + "singulair", "singular", + "singulary", "singularity", + "skateboad", "skateboard", + "skeletaal", "skeletal", + "skepitcal", "skeptical", + "skepticim", "skepticism", + "sketpical", "skeptical", + "slaughted", "slaughtered", + "slaugther", "slaughter", + "slipperly", "slippery", + "sluaghter", "slaughter", + "smackdwon", "smackdown", + "smealting", "smelting", + "smeesters", "semesters", + "snadstorm", "sandstorm", + "snippetts", "snippets", + "snowfalke", "snowflake", + "snowflaek", "snowflake", + "snowlfake", "snowflake", + "snwoballs", "snowballs", + "snythesis", "synthesis", + "snythetic", "synthetic", + "socailism", "socialism", + "socailist", "socialist", + "socailize", "socialize", + "soceities", "societies", + "socialini", "socializing", + "socialiss", "socialists", + "socialsim", "socialism", + "socieites", "societies", + "socilaism", "socialism", + "socilaist", "socialist", + "sociopati", "sociopathic", + "sociopats", "sociopaths", + "socratees", "socrates", + "socrateks", "socrates", + "soemthing", "something", + "sohpomore", "sophomore", + "soliliquy", "soliloquy", + "somehting", "something", + "someoneis", "someones", + "somethign", "something", + "somethins", "somethings", + "sopohmore", "sophomore", + "sotryline", "storyline", + "soundtrak", "soundtrack", + "sountrack", "soundtrack", + "sourthern", "southern", + "souvenier", "souvenir", + "soveregin", "sovereign", + "sovereing", "sovereign", + "soveriegn", "sovereign", + "spacegoat", "scapegoat", + "spagehtti", "spaghetti", + "spahgetti", "spaghetti", + "sparlking", "sparkling", + "spartants", "spartans", + "specailly", "specially", + "specailty", "specialty", + "specality", "specialty", + "speciales", "specials", + "specialis", "specials", + "speciatly", "specialty", + "specifing", "specifying", + "specimine", "specimen", + "spectrail", "spectral", + "specualte", "speculate", + "speechers", "speeches", + "spehrical", "spherical", + "speically", "specially", + "spetember", "september", + "sphagetti", "spaghetti", + "splatooon", "splatoon", + "sponosred", "sponsored", + "sponsered", "sponsored", + "sponsores", "sponsors", + "spontanes", "spontaneous", + "sponzored", "sponsored", + "sprakling", "sparkling", + "sprinkeld", "sprinkled", + "squadroon", "squadron", + "squirrles", "squirrels", + "squirrtle", "squirrel", + "squrriels", "squirrels", + "srirachia", "sriracha", + "srirachra", "sriracha", + "stabliize", "stabilize", + "stainlees", "stainless", + "startegic", "strategic", + "startlxde", "startled", + "statisitc", "statistic", + "staurdays", "saturdays", + "steadilly", "steadily", + "stealthly", "stealthy", + "stichting", "stitching", + "sticthing", "stitching", + "stimulans", "stimulants", + "stockplie", "stockpile", + "stornegst", "strongest", + "stragetic", "strategic", + "straightn", "straighten", + "strangets", "strangest", + "strategis", "strategies", + "strawbery", "strawberry", + "streamade", "streamed", + "streamare", "streamer", + "streamear", "streamer", + "strechted", "stretched", + "strechtes", "stretches", + "strecthed", "stretched", + "strecthes", "stretches", + "stregnths", "strengths", + "strenghen", "strengthen", + "strengthn", "strengthen", + "strentghs", "strengths", + "stressade", "stressed", + "stressers", "stresses", + "strictist", "strictest", + "stringnet", "stringent", + "stroyline", "storyline", + "structual", "structural", + "structurs", "structures", + "strucutre", "structure", + "struggeld", "struggled", + "struggels", "struggles", + "stryofoam", "styrofoam", + "stuctured", "structured", + "stuggling", "struggling", + "stupitidy", "stupidity", + "sturcture", "structure", + "sturggled", "struggled", + "sturggles", "struggles", + "styrofaom", "styrofoam", + "subarmine", "submarine", + "subculter", "subculture", + "submachne", "submachine", + "subpecies", "subspecies", + "subscirbe", "subscribe", + "subsidary", "subsidiary", + "subsizide", "subsidize", + "subsquent", "subsequent", + "subsrcibe", "subscribe", + "substanse", "substances", + "substanta", "substantial", + "substante", "substantive", + "substarte", "substrate", + "substitue", "substitute", + "substract", "subtract", + "subtances", "substances", + "subtiltes", "subtitles", + "subtitels", "subtitles", + "subtletly", "subtlety", + "subtlties", "subtitles", + "succedded", "succeeded", + "succeedes", "succeeds", + "succesful", "successful", + "succesion", "succession", + "succesive", "successive", + "suceeding", "succeeding", + "sucesfuly", "successfully", + "sucessful", "successful", + "sucession", "succession", + "sucessive", "successive", + "sufferage", "suffrage", + "sufferred", "suffered", + "sufficent", "sufficient", + "suggestes", "suggests", + "suggestie", "suggestive", + "sumbarine", "submarine", + "sumberged", "submerged", + "summenors", "summoners", + "summoenrs", "summoners", + "sunderlad", "sunderland", + "sunglases", "sunglasses", + "superfluu", "superfluous", + "superiour", "superior", + "superisor", "superiors", + "supermare", "supermarket", + "superviso", "supervision", + "suposedly", "supposedly", + "supportes", "supports", + "suppreses", "suppress", + "supressed", "suppressed", + "supresses", "suppresses", + "suprising", "surprising", + "suprizing", "surprising", + "supsicion", "suspicion", + "surounded", "surrounded", + "surprized", "surprised", + "surronded", "surrounded", + "surrouded", "surrounded", + "surrouned", "surround", + "survivers", "survivors", + "survivied", "survived", + "survivour", "survivor", + "susbcribe", "subscribe", + "susbtrate", "substrate", + "susncreen", "sunscreen", + "suspectes", "suspects", + "suspendes", "suspense", + "suspensie", "suspense", + "swastikka", "swastika", + "sweatshit", "sweatshirt", + "sweetheat", "sweetheart", + "switchign", "switching", + "swithcing", "switching", + "swtiching", "switching", + "sydnicate", "syndicate", + "sykwalker", "skywalker", + "sylablles", "syllables", + "syllabels", "syllables", + "symbolsim", "symbolism", + "symettric", "symmetric", + "symmetral", "symmetric", + "symmetria", "symmetrical", + "symoblism", "symbolism", + "sympathie", "sympathize", + "symphoney", "symphony", + "symptomes", "symptoms", + "symptomps", "symptoms", + "synagouge", "synagogue", + "syndacite", "syndicate", + "syndiacte", "syndicate", + "synidcate", "syndicate", + "synonymes", "synonyms", + "synonymis", "synonyms", + "synonymns", "synonyms", + "synonymos", "synonymous", + "synonymus", "synonyms", + "synopsies", "synopsis", + "syntehsis", "synthesis", + "syntehtic", "synthetic", + "syntethic", "synthetic", + "syphyllis", "syphilis", + "syracusae", "syracuse", + "sytrofoam", "styrofoam", + "tablespon", "tablespoon", + "tacticaly", "tactically", + "tanenhill", "tannehill", + "tannheill", "tannehill", + "targetted", "targeted", + "tawainese", "taiwanese", + "tawianese", "taiwanese", + "taxanomic", "taxonomic", + "teamfighs", "teamfights", + "teamfigth", "teamfight", + "teamifght", "teamfight", + "teampseak", "teamspeak", + "teaspooon", "teaspoon", + "techician", "technician", + "techinque", "technique", + "technolgy", "technology", + "teeangers", "teenagers", + "tehtering", "tethering", + "telegrpah", "telegraph", + "televsion", "television", + "temafight", "teamfight", + "tempaltes", "templates", + "temparate", "temperate", + "templaras", "templars", + "templares", "templars", + "temporali", "temporarily", + "tenacitiy", "tenacity", + "tensiones", "tensions", + "tentacels", "tentacles", + "tentacuel", "tentacle", + "tentalces", "tentacles", + "termianls", "terminals", + "terminato", "termination", + "terorrism", "terrorism", + "terorrist", "terrorist", + "terrabyte", "terabyte", + "terribley", "terribly", + "terriblly", "terribly", + "terriroty", "territory", + "terrorits", "terrorist", + "terrorsim", "terrorism", + "tesitcles", "testicles", + "tesitmony", "testimony", + "testicels", "testicles", + "testomony", "testimony", + "texturers", "textures", + "thankfuly", "thankfully", + "thankyoou", "thankyou", + "themselfs", "themselves", + "themselvs", "themselves", + "themslves", "themselves", + "theologia", "theological", + "therafter", "thereafter", + "therefoer", "therefor", + "therefour", "therefor", + "theroists", "theorists", + "thetering", "tethering", + "thirlling", "thrilling", + "thirteeen", "thirteen", + "thoecracy", "theocracy", + "thoerists", "theorists", + "thoroughy", "thoroughly", + "thoughout", "throughout", + "threatend", "threatened", + "throrough", "thorough", + "throughly", "thoroughly", + "througout", "throughout", + "thrusdays", "thursdays", + "thurdsays", "thursdays", + "thursdsay", "thursdays", + "thursters", "thrusters", + "tiawanese", "taiwanese", + "timestmap", "timestamp", + "tirangles", "triangles", + "tocuhdown", "touchdown", + "toghether", "together", + "tolerence", "tolerance", + "tommorrow", "tomorrow", + "torandoes", "tornadoes", + "torchligt", "torchlight", + "torelable", "tolerable", + "toritllas", "tortillas", + "tornaodes", "tornadoes", + "torpeados", "torpedoes", + "torrentas", "torrents", + "torrentes", "torrents", + "tortialls", "tortillas", + "tortillia", "tortilla", + "tortillla", "tortilla", + "tottehnam", "tottenham", + "tottenahm", "tottenham", + "tottneham", "tottenham", + "toturials", "tutorials", + "touchdwon", "touchdown", + "touristas", "tourists", + "touristes", "tourists", + "touristey", "touristy", + "touristly", "touristy", + "touristsy", "touristy", + "tournamet", "tournament", + "toxicitiy", "toxicity", + "trafficed", "trafficked", + "tragicaly", "tragically", + "traileras", "trailers", + "traingles", "triangles", + "trainwrek", "trainwreck", + "traitoris", "traitors", + "traitorus", "traitors", + "tramautic", "traumatic", + "tranlsate", "translate", + "transalte", "translate", + "transcris", "transcripts", + "transcrit", "transcript", + "transferd", "transferred", + "transfere", "transferred", + "transfors", "transforms", + "transfrom", "transform", + "transiten", "transient", + "transitin", "transitions", + "transofrm", "transform", + "transplat", "transplant", + "trasnfers", "transfers", + "trasnform", "transform", + "trasnport", "transport", + "traversie", "traverse", + "travestry", "travesty", + "treasuers", "treasures", + "treasurey", "treasury", + "treatmens", "treatments", + "treausres", "treasures", + "tremendos", "tremendous", + "trhilling", "thrilling", + "trhusters", "thrusters", + "triangels", "triangles", + "trianlges", "triangles", + "tribunaal", "tribunal", + "triguered", "triggered", + "trinagles", "triangles", + "truamatic", "traumatic", + "truthfuly", "truthfully", + "tunrtable", "turntable", + "turnaroud", "turnaround", + "turntabel", "turntable", + "typcially", "typically", + "tyrranies", "tyrannies", + "ubiquitos", "ubiquitous", + "ugprading", "upgrading", + "ukrainain", "ukrainian", + "ukrainias", "ukrainians", + "ukrainina", "ukrainian", + "ukrainisn", "ukrainians", + "ukrianian", "ukrainian", + "ulitmatum", "ultimatum", + "ulteriour", "ulterior", + "umbrellla", "umbrella", + "unaminous", "unanimous", + "unanmious", "unanimous", + "unanswerd", "unanswered", + "unanymous", "unanimous", + "unbannend", "unbanned", + "uncensord", "uncensored", + "uncomited", "uncommitted", + "undercunt", "undercut", + "underdong", "underdog", + "undergard", "undergrad", + "underming", "undermining", + "understad", "understands", + "underwaer", "underwear", + "underware", "underwear", + "undescore", "underscore", + "unforseen", "unforeseen", + "unfortune", "unfortunate", + "unfriendy", "unfriendly", + "unhealhty", "unhealthy", + "unheathly", "unhealthy", + "unhelathy", "unhealthy", + "unicornis", "unicorns", + "unicornus", "unicorns", + "uniformes", "uniforms", + "uninamous", "unanimous", + "unintuive", "unintuitive", + "uniquelly", "uniquely", + "unisntall", "uninstall", + "univerity", "university", + "universse", "universes", + "univesity", "university", + "unnistall", "uninstall", + "unoffical", "unofficial", + "unopenend", "unopened", + "unplayabe", "unplayable", + "unplesant", "unpleasant", + "unpopluar", "unpopular", + "unrankend", "unranked", + "unreliabe", "unreliable", + "unrwitten", "unwritten", + "untrianed", "untrained", + "unusaully", "unusually", + "unuseable", "unusable", + "unusuable", "unusable", + "unvierses", "universes", + "unweildly", "unwieldy", + "unwieldly", "unwieldy", + "unwirtten", "unwritten", + "unworthly", "unworthy", + "upcomming", "upcoming", + "upgarding", "upgrading", + "upgradded", "upgraded", + "uplfiting", "uplifting", + "uplifitng", "uplifting", + "urkainian", "ukrainian", + "utlimatum", "ultimatum", + "vacciante", "vaccinate", + "vaccinato", "vaccination", + "vacciners", "vaccines", + "vacestomy", "vasectomy", + "vaguaries", "vagaries", + "vaibility", "viability", + "vaildated", "validated", + "vairables", "variables", + "valdiated", "validated", + "valentein", "valentine", + "valentien", "valentine", + "valentins", "valentines", + "validitiy", "validity", + "valueable", "valuable", + "vanadlism", "vandalism", + "vandalsim", "vandalism", + "varaibles", "variables", + "varations", "variations", + "variantes", "variants", + "vascetomy", "vasectomy", + "vastecomy", "vasectomy", + "veganisim", "veganism", + "vegetarin", "vegetarians", + "vegitable", "vegetable", + "vehementy", "vehemently", + "veiwpoint", "viewpoint", + "velantine", "valentine", + "vendettta", "vendetta", + "venegance", "vengeance", + "veneuzela", "venezuela", + "venezeula", "venezuela", + "venezulea", "venezuela", + "vengaence", "vengeance", + "vengenace", "vengeance", + "ventilato", "ventilation", + "verbatium", "verbatim", + "verfiying", "verifying", + "verifiyng", "verifying", + "verisions", "revisions", + "versalite", "versatile", + "versatily", "versatility", + "versiones", "versions", + "versitale", "versatile", + "verstaile", "versatile", + "verticaly", "vertically", + "veryifing", "verifying", + "vicotrian", "victorian", + "vicotries", "victories", + "victoires", "victories", + "victorain", "victorian", + "victorina", "victorian", + "victorios", "victorious", + "videogaem", "videogame", + "videogams", "videogames", + "vidoegame", "videogame", + "viewpiont", "viewpoint", + "vigilence", "vigilance", + "vigliante", "vigilante", + "vigourous", "vigorous", + "viligante", "vigilante", + "viloently", "violently", + "vincinity", "vicinity", + "vioalting", "violating", + "violentce", "violence", + "virbation", "vibration", + "virgintiy", "virginity", + "virignity", "virginity", + "virutally", "virtually", + "visibiliy", "visibility", + "vitaminas", "vitamins", + "vitamines", "vitamins", + "vitrually", "virtually", + "vociemail", "voicemail", + "voilating", "violating", + "voilation", "violation", + "voilently", "violently", + "volatiliy", "volatility", + "voleyball", "volleyball", + "volontary", "voluntary", + "volonteer", "volunteer", + "volunatry", "voluntary", + "volunteed", "volunteered", + "vriginity", "virginity", + "wallpapes", "wallpapers", + "warrantly", "warranty", + "warrriors", "warriors", + "wavelengh", "wavelength", + "weakenend", "weakened", + "weakneses", "weakness", + "weaknesss", "weaknesses", + "wealtheir", "wealthier", + "weaponary", "weaponry", + "wedensday", "wednesday", + "wednesdsy", "wednesdays", + "wednessay", "wednesdays", + "wednseday", "wednesday", + "welathier", "wealthier", + "wendesday", "wednesday", + "wesbtrook", "westbrook", + "westernes", "westerners", + "westrbook", "westbrook", + "whereever", "wherever", + "whietlist", "whitelist", + "whilrwind", "whirlwind", + "whilsting", "whistling", + "whipsered", "whispered", + "whislting", "whistling", + "whisperes", "whispers", + "whitelsit", "whitelist", + "whitleist", "whitelist", + "whitsling", "whistling", + "whrilwind", "whirlwind", + "whsipered", "whispered", + "whtielist", "whitelist", + "widespred", "widespread", + "widesread", "widespread", + "windshied", "windshield", + "wintesses", "witnesses", + "wisconisn", "wisconsin", + "wishlisht", "wishlist", + "wishpered", "whispered", + "withdrawl", "withdrawal", + "withelist", "whitelist", + "witnesess", "witnesses", + "wolrdview", "worldview", + "wolrdwide", "worldwide", + "wonderlad", "wonderland", + "wordlview", "worldview", + "wordlwide", "worldwide", + "worhtless", "worthless", + "workfroce", "workforce", + "worldivew", "worldview", + "worldveiw", "worldview", + "worstened", "worsened", + "worthelss", "worthless", + "xenbolade", "xenoblade", + "xenobalde", "xenoblade", + "xenophoby", "xenophobia", + "xeonblade", "xenoblade", + "yementite", "yemenite", + "yorkshrie", "yorkshire", + "yorskhire", "yorkshire", + "yosemitie", "yosemite", + "youngents", "youngest", + "yourselvs", "yourselves", + "zimbabwae", "zimbabwe", + "zionistas", "zionists", + "zionistes", "zionists", + "abandond", "abandoned", + "abdomine", "abdomen", + "abilitiy", "ability", + "abilties", "abilities", + "abondons", "abandons", + "aboslute", "absolute", + "abosrbed", "absorbed", + "abruplty", "abruptly", + "abrutply", "abruptly", + "abscence", "absence", + "absestos", "asbestos", + "absoluts", "absolutes", + "absolvte", "absolve", + "absorbes", "absorbs", + "absoulte", "absolute", + "abstante", "bastante", + "abudance", "abundance", + "abudcted", "abducted", + "abundunt", "abundant", + "aburptly", "abruptly", + "abuseres", "abusers", + "abusrdly", "absurdly", + "academis", "academics", + "accademy", "academy", + "acccused", "accused", + "acceptes", "accepts", + "accidens", "accidents", + "accideny", "accidently", + "accoring", "according", + "accountt", "accountant", + "accpeted", "accepted", + "accuarcy", "accuracy", + "accumule", "accumulate", + "accusato", "accusation", + "accussed", "accused", + "acedamia", "academia", + "acedemic", "academic", + "acheived", "achieved", + "acheives", "achieves", + "achieval", "achievable", + "acnedote", "anecdote", + "acording", "according", + "acornyms", "acronyms", + "acousitc", "acoustic", + "acoutsic", "acoustic", + "acovados", "avocados", + "acquifer", "acquire", + "acquited", "acquitted", + "acquried", "acquired", + "acronmys", "acronyms", + "acronysm", "acronyms", + "acroynms", "acronyms", + "acrynoms", "acronyms", + "acsended", "ascended", + "actaully", "actually", + "activite", "activities", + "activits", "activities", + "activley", "actively", + "actresss", "actresses", + "actualey", "actualy", + "actualiy", "actuality", + "actualky", "actualy", + "actualmy", "actualy", + "actualoy", "actualy", + "actualpy", "actualy", + "actualty", "actualy", + "acutally", "actually", + "acutions", "auctions", + "adaptare", "adapter", + "adbandon", "abandon", + "adbucted", "abducted", + "addictes", "addicts", + "addictin", "addictions", + "addictis", "addictions", + "addional", "additional", + "addopted", "adopted", + "addresed", "addressed", + "adealide", "adelaide", + "adecuate", "adequate", + "adeilade", "adelaide", + "adeladie", "adelaide", + "adeliade", "adelaide", + "adeqaute", "adequate", + "adheisve", "adhesive", + "adhevise", "adhesive", + "adivsors", "advisors", + "admiraal", "admiral", + "adolence", "adolescent", + "adorbale", "adorable", + "adovcacy", "advocacy", + "adpaters", "adapters", + "adquired", "acquired", + "adquires", "acquires", + "adresing", "addressing", + "adressed", "addressed", + "adroable", "adorable", + "adultrey", "adultery", + "adventue", "adventures", + "adventus", "adventures", + "advertis", "adverts", + "advesary", "adversary", + "adviseer", "adviser", + "adviseur", "adviser", + "advocade", "advocated", + "advocats", "advocates", + "advsiors", "advisors", + "aethists", "atheists", + "affaires", "affairs", + "affilate", "affiliate", + "affintiy", "affinity", + "affleunt", "affluent", + "affulent", "affluent", + "afircans", "africans", + "africain", "african", + "afternon", "afternoon", + "againnst", "against", + "agnositc", "agnostic", + "agonstic", "agnostic", + "agravate", "aggravate", + "agreemnt", "agreement", + "agregate", "aggregate", + "agressie", "agressive", + "agressor", "aggressor", + "agrieved", "aggrieved", + "agruable", "arguable", + "agruably", "arguably", + "agrument", "argument", + "ahtletes", "athletes", + "aincents", "ancients", + "airboner", "airborne", + "airbrone", "airborne", + "aircarft", "aircraft", + "airplans", "airplanes", + "airporta", "airports", + "airpsace", "airspace", + "airscape", "airspace", + "akransas", "arkansas", + "alchemey", "alchemy", + "alchohol", "alcohol", + "alcholic", "alcoholic", + "alcoholc", "alcoholics", + "aldutery", "adultery", + "aleniate", "alienate", + "algoritm", "algorithm", + "alimoney", "alimony", + "alirghty", "alrighty", + "allaince", "alliance", + "alledged", "alleged", + "alledges", "alleges", + "allegedy", "allegedly", + "allegely", "allegedly", + "allegric", "allergic", + "allergey", "allergy", + "allianse", "alliances", + "alligned", "aligned", + "allinace", "alliance", + "allopone", "allophone", + "allready", "already", + "almigthy", "almighty", + "alpahbet", "alphabet", + "alrigthy", "alrighty", + "altantic", "atlantic", + "alterato", "alteration", + "alternar", "alternator", + "althetes", "athletes", + "althetic", "athletic", + "altriusm", "altruism", + "altrusim", "altruism", + "alturism", "altruism", + "aluminim", "aluminium", + "alumnium", "aluminum", + "alunimum", "aluminum", + "amatersu", "amateurs", + "amaterus", "amateurs", + "amendmet", "amendments", + "amercian", "american", + "amercias", "americas", + "amernian", "armenian", + "amethsyt", "amethyst", + "ameythst", "amethyst", + "ammended", "amended", + "amnestry", "amnesty", + "amoungst", "amongst", + "amplifiy", "amplify", + "amplifly", "amplify", + "amrchair", "armchair", + "amrenian", "armenian", + "amtheyst", "amethyst", + "analgoue", "analogue", + "analisys", "analysis", + "analitic", "analytic", + "analouge", "analogue", + "analysie", "analyse", + "analysit", "analyst", + "analyste", "analyse", + "analysze", "analyse", + "analzyed", "analyzed", + "anaolgue", "analogue", + "anarchim", "anarchism", + "anaylses", "analyses", + "anaylsis", "analysis", + "anaylsts", "analysts", + "anaylzed", "analyzed", + "ancedote", "anecdote", + "anceints", "ancients", + "ancinets", "ancients", + "andoirds", "androids", + "andorids", "androids", + "andriods", "androids", + "anecdots", "anecdotes", + "anectode", "anecdote", + "anedocte", "anecdote", + "aneroxia", "anorexia", + "aneroxic", "anorexic", + "angostic", "agnostic", + "angrilly", "angrily", + "anicents", "ancients", + "animatie", "animate", + "animatte", "animate", + "anlayses", "analyses", + "annoints", "anoints", + "annouced", "announced", + "annoucne", "announce", + "anntenas", "antennas", + "anoerxia", "anorexia", + "anoerxic", "anorexic", + "anonymos", "anonymous", + "anoreixa", "anorexia", + "anounced", "announced", + "anoxeria", "anorexia", + "anoxeric", "anorexic", + "answeres", "answers", + "antartic", "antarctic", + "antennea", "antenna", + "antennna", "antenna", + "anticipe", "anticipate", + "antiquae", "antique", + "antivirs", "antivirus", + "anwsered", "answered", + "anyhting", "anything", + "anyhwere", "anywhere", + "anyoneis", "anyones", + "anythign", "anything", + "anytying", "anything", + "aparment", "apartment", + "apartmet", "apartments", + "apenines", "apennines", + "aperutre", "aperture", + "aplhabet", "alphabet", + "apologes", "apologise", + "aposltes", "apostles", + "apostels", "apostles", + "appaluse", "applause", + "apparant", "apparent", + "appareal", "apparel", + "appareil", "apparel", + "apperead", "appeared", + "applaued", "applaud", + "appluase", "applause", + "appology", "apology", + "apporach", "approach", + "appraoch", "approach", + "apreture", "aperture", + "apsotles", "apostles", + "aqaurium", "aquarium", + "aqcuired", "acquired", + "aquaduct", "aqueduct", + "aquairum", "aquarium", + "aquaruim", "aquarium", + "aquiring", "acquiring", + "aquitted", "acquitted", + "arbitary", "arbitrary", + "arbitray", "arbitrary", + "arbiture", "arbiter", + "architet", "architect", + "archtype", "archetype", + "aremnian", "armenian", + "argentia", "argentina", + "argubaly", "arguably", + "arguemet", "arguement", + "arguemtn", "arguement", + "ariborne", "airborne", + "aricraft", "aircraft", + "ariplane", "airplane", + "ariports", "airports", + "arispace", "airspace", + "aristote", "aristotle", + "aritfact", "artifact", + "arizonia", "arizona", + "arkasnas", "arkansas", + "arlighty", "alrighty", + "armamant", "armament", + "armenain", "armenian", + "armenina", "armenian", + "armpitts", "armpits", + "armstrog", "armstrong", + "arpanoid", "paranoid", + "arpeture", "aperture", + "arragned", "arranged", + "arrestes", "arrests", + "arrestos", "arrests", + "arsenaal", "arsenal", + "artemios", "artemis", + "artemius", "artemis", + "arthrits", "arthritis", + "articule", "articulate", + "artifacs", "artifacts", + "artifcat", "artifact", + "artilley", "artillery", + "artisitc", "artistic", + "artistas", "artists", + "arugable", "arguable", + "arugably", "arguably", + "arugment", "argument", + "asborbed", "absorbed", + "asburdly", "absurdly", + "ascneded", "ascended", + "asissted", "assisted", + "askreddt", "askreddit", + "asnwered", "answered", + "aspectos", "aspects", + "asperges", "aspergers", + "assasins", "assassins", + "assemple", "assemble", + "assertin", "assertions", + "asshates", "asshats", + "asshatts", "asshats", + "assimile", "assimilate", + "assistat", "assistants", + "assitant", "assistant", + "assmeble", "assemble", + "assmebly", "assembly", + "asssasin", "assassin", + "assualts", "assaults", + "asteorid", "asteroid", + "asteriks", "asterisk", + "asteriod", "asteroid", + "asterois", "asteroids", + "astersik", "asterisk", + "asthetic", "aesthetic", + "astronat", "astronaut", + "asutrian", "austrian", + "atheisim", "atheism", + "atheistc", "atheistic", + "atheltes", "athletes", + "atheltic", "athletic", + "athenean", "athenian", + "athesits", "atheists", + "athetlic", "athletic", + "athients", "athiest", + "atittude", "attitude", + "atlantia", "atlanta", + "atmoizer", "atomizer", + "atomzier", "atomizer", + "atribute", "attribute", + "atrifact", "artifact", + "attackes", "attackers", + "attemped", "attempted", + "attemted", "attempted", + "attemtps", "attempts", + "attidute", "attitude", + "attitide", "attitude", + "attribue", "attribute", + "aucitons", "auctions", + "audactiy", "audacity", + "audcaity", "audacity", + "audeince", "audience", + "audiobok", "audiobook", + "austeriy", "austerity", + "austiran", "austrian", + "austitic", "autistic", + "austrain", "austrian", + "australa", "australian", + "austrija", "austria", + "austrila", "austria", + "autisitc", "autistic", + "autoattk", "autoattack", + "autograh", "autograph", + "automato", "automation", + "automony", "autonomy", + "autority", "authority", + "autsitic", "autistic", + "auxilary", "auxiliary", + "avacodos", "avocados", + "avaiable", "available", + "availabe", "available", + "availble", "available", + "avaition", "aviation", + "avalable", "available", + "avalance", "avalanche", + "avataras", "avatars", + "avatards", "avatars", + "avatares", "avatars", + "averadge", "averaged", + "avergaed", "averaged", + "avergaes", "averages", + "aviaiton", "aviation", + "avilable", "available", + "avnegers", "avengers", + "avodacos", "avocados", + "awekened", "weakened", + "awesomey", "awesomely", + "awfullly", "awfully", + "awkwardy", "awkwardly", + "awnsered", "answered", + "babysite", "babysitter", + "baceause", "because", + "bacehlor", "bachelor", + "bachleor", "bachelor", + "bacholer", "bachelor", + "backeast", "backseat", + "backerds", "backers", + "backfied", "backfield", + "backpacs", "backpacks", + "balcanes", "balances", + "balconey", "balcony", + "balconny", "balcony", + "ballistc", "ballistic", + "balnaced", "balanced", + "banannas", "bananas", + "banditas", "bandits", + "bandwith", "bandwidth", + "bangkock", "bangkok", + "baptisim", "baptism", + "barabric", "barbaric", + "barbarin", "barbarian", + "barbaris", "barbarians", + "bardford", "bradford", + "bargaing", "bargaining", + "baristia", "barista", + "barrakcs", "barracks", + "barrells", "barrels", + "basicaly", "basically", + "basiclay", "basicly", + "basicley", "basicly", + "basicliy", "basicly", + "batistia", "batista", + "battalin", "battalion", + "bayonent", "bayonet", + "beachead", "beachhead", + "beacuoup", "beaucoup", + "beardude", "bearded", + "beastley", "beastly", + "beatiful", "beautiful", + "beccause", "because", + "becuasse", "becuase", + "befirend", "befriend", + "befreind", "befriend", + "begginer", "beginner", + "begginig", "begging", + "begginng", "begging", + "begining", "beginning", + "beginnig", "beginning", + "behaivor", "behavior", + "behavios", "behaviours", + "behavoir", "behavior", + "behavour", "behavior", + "behngazi", "benghazi", + "behtesda", "bethesda", + "beleived", "believed", + "beleiver", "believer", + "beleives", "believes", + "beliefes", "beliefs", + "benefica", "beneficial", + "bengahzi", "benghazi", + "bengalas", "bengals", + "bengalos", "bengals", + "bengazhi", "benghazi", + "benghzai", "benghazi", + "bengzhai", "benghazi", + "benhgazi", "benghazi", + "benidect", "benedict", + "benifits", "benefits", + "berekley", "berkeley", + "berserkr", "berserker", + "beseiged", "besieged", + "betehsda", "bethesda", + "beteshda", "bethesda", + "bethdesa", "bethesda", + "bethedsa", "bethesda", + "bethseda", "bethesda", + "beyoncye", "beyonce", + "bibilcal", "biblical", + "bicylces", "bicycles", + "bigfooot", "bigfoot", + "bigining", "beginning", + "bilbical", "biblical", + "billboad", "billboard", + "bilsters", "blisters", + "bilzzard", "blizzard", + "bilzzcon", "blizzcon", + "biologia", "biological", + "birhtday", "birthday", + "birsbane", "brisbane", + "birthdsy", "birthdays", + "biseuxal", "bisexual", + "bisexaul", "bisexual", + "bitcions", "bitcoins", + "bitocins", "bitcoins", + "blackade", "blacked", + "blackend", "blacked", + "blackjak", "blackjack", + "blacklit", "blacklist", + "blatanty", "blatantly", + "blessins", "blessings", + "blessure", "blessing", + "bloggare", "blogger", + "bloggeur", "blogger", + "bluebery", "blueberry", + "bluetooh", "bluetooth", + "blugaria", "bulgaria", + "boardway", "broadway", + "bollcoks", "bollocks", + "bomberos", "bombers", + "bookmars", "bookmarks", + "boradway", "broadway", + "boredoom", "boredom", + "bouldore", "boulder", + "bounites", "bounties", + "boutnies", "bounties", + "boutqiue", "boutique", + "bouyancy", "buoyancy", + "boyfried", "boyfriend", + "bradcast", "broadcast", + "bradfrod", "bradford", + "brakeout", "breakout", + "braodway", "broadway", + "braverly", "bravery", + "breathis", "breaths", + "breathos", "breaths", + "brekaout", "breakout", + "brendamn", "brendan", + "breweres", "brewers", + "brewerey", "brewery", + "brewerks", "brewers", + "brewerys", "brewers", + "brigaged", "brigade", + "brigated", "brigade", + "brigthen", "brighten", + "briliant", "brilliant", + "brillant", "brilliant", + "bristool", "bristol", + "brithday", "birthday", + "brittish", "british", + "briusers", "bruisers", + "broadbad", "broadband", + "broadcat", "broadcasts", + "broadley", "broadly", + "brocolli", "broccoli", + "brodaway", "broadway", + "broncoes", "broncos", + "broswing", "browsing", + "browines", "brownies", + "browisng", "browsing", + "brtually", "brutally", + "brugundy", "burgundy", + "bruisend", "bruised", + "brussles", "brussels", + "brusting", "bursting", + "bubblews", "bubbles", + "buddhits", "buddhist", + "buddhsim", "buddhism", + "buddishm", "buddhism", + "buddisht", "buddhist", + "buglaria", "bulgaria", + "buhddism", "buddhism", + "buhddist", "buddhist", + "buidlers", "builders", + "buidling", "building", + "buildins", "buildings", + "buisness", "business", + "bulagria", "bulgaria", + "bulgaira", "bulgaria", + "buliders", "builders", + "buliding", "building", + "bulletts", "bullets", + "burisers", "bruisers", + "burriots", "burritos", + "burritio", "burrito", + "burritto", "burrito", + "burrtios", "burritos", + "burssels", "brussels", + "burtally", "brutally", + "burtsing", "bursting", + "busrting", "bursting", + "butcherd", "butchered", + "butterey", "buttery", + "butterfy", "butterfly", + "butterry", "buttery", + "butthoel", "butthole", + "bycicles", "bicycles", + "cabbagge", "cabbage", + "cabients", "cabinets", + "cabinate", "cabinet", + "cabinent", "cabinet", + "cabniets", "cabinets", + "caclulus", "calculus", + "cafetera", "cafeteria", + "caffinee", "caffeine", + "cahsiers", "cashiers", + "cainster", "canister", + "calander", "calendar", + "calcular", "calculator", + "calgarry", "calgary", + "calibler", "calibre", + "caloires", "calories", + "calrkson", "clarkson", + "calroies", "calories", + "calssify", "classify", + "calulate", "calculate", + "calymore", "claymore", + "camapign", "campaign", + "cambodai", "cambodia", + "camboida", "cambodia", + "cambpell", "campbell", + "cambride", "cambridge", + "cambrige", "cambridge", + "camoufle", "camouflage", + "campagin", "campaign", + "campaing", "campaign", + "campains", "campaigns", + "camperas", "campers", + "camperos", "campers", + "canadias", "canadians", + "cananbis", "cannabis", + "cancelas", "cancels", + "canceles", "cancels", + "cancells", "cancels", + "canceres", "cancers", + "cancerns", "cancers", + "cancerus", "cancers", + "candiate", "candidate", + "candiens", "candies", + "canistre", "canister", + "cannabil", "cannibal", + "cannbial", "cannibal", + "cannibas", "cannabis", + "cansiter", "canister", + "capitans", "captains", + "capitola", "capital", + "capitulo", "capitol", + "capmbell", "campbell", + "capsuels", "capsules", + "capsulse", "capsules", + "capsumel", "capsule", + "capteurs", "captures", + "captials", "capitals", + "captians", "captains", + "capusles", "capsules", + "caputres", "captures", + "cardboad", "cardboard", + "cardianl", "cardinal", + "cardnial", "cardinal", + "careflly", "carefully", + "carefull", "careful", + "carefuly", "carefully", + "caricate", "caricature", + "caridgan", "cardigan", + "caridnal", "cardinal", + "carinval", "carnival", + "carloina", "carolina", + "carnagie", "carnegie", + "carnigie", "carnegie", + "carnvial", "carnival", + "carrotts", "carrots", + "carrotus", "carrots", + "cartells", "cartels", + "cartmaan", "cartman", + "cartride", "cartridge", + "cartrige", "cartridge", + "carvinal", "carnival", + "casaulty", "casualty", + "casheirs", "cashiers", + "cashieer", "cashier", + "cashires", "cashiers", + "castleos", "castles", + "castlers", "castles", + "casulaty", "casualty", + "cataclym", "cataclysm", + "catagory", "category", + "cataline", "catiline", + "cataloge", "catalogue", + "catalsyt", "catalyst", + "cataylst", "catalyst", + "cathloic", "catholic", + "catlayst", "catalyst", + "caucasin", "caucasian", + "causalty", "casualty", + "cellural", "cellular", + "celullar", "cellular", + "celverly", "cleverly", + "cemetary", "cemetery", + "centeres", "centers", + "centerns", "centers", + "centrase", "centres", + "centrers", "centres", + "ceratine", "creatine", + "cerberal", "cerebral", + "cerbreus", "cerberus", + "cerbures", "cerberus", + "ceremone", "ceremonies", + "cerimony", "ceremony", + "ceromony", "ceremony", + "certainy", "certainty", + "challege", "challenge", + "chambear", "chamber", + "chambres", "chambers", + "champage", "champagne", + "chanisaw", "chainsaw", + "chanlder", "chandler", + "charcaol", "charcoal", + "chargehr", "charger", + "chargeur", "charger", + "chariman", "chairman", + "charimsa", "charisma", + "charmisa", "charisma", + "charocal", "charcoal", + "charsima", "charisma", + "chasiers", "cashiers", + "chassids", "chassis", + "chassies", "chassis", + "chatolic", "catholic", + "chcukles", "chuckles", + "checkare", "checker", + "checkear", "checker", + "cheesees", "cheeses", + "cheeseus", "cheeses", + "cheetoos", "cheetos", + "chemcial", "chemical", + "chemisty", "chemistry", + "chernobl", "chernobyl", + "chiansaw", "chainsaw", + "chidlish", "childish", + "chihuaha", "chihuahua", + "childres", "childrens", + "chillade", "chilled", + "chillead", "chilled", + "chillend", "chilled", + "chilvary", "chivalry", + "chinesse", "chinese", + "chivarly", "chivalry", + "chivlary", "chivalry", + "chlidish", "childish", + "chlroine", "chlorine", + "chmabers", "chambers", + "chocolae", "chocolates", + "chocolet", "chocolates", + "choesive", "cohesive", + "choicers", "choices", + "cholrine", "chlorine", + "chorline", "chlorine", + "chracter", "character", + "christin", "christian", + "chroline", "chlorine", + "chromose", "chromosome", + "chronice", "chronicles", + "chruches", "churches", + "chuckels", "chuckles", + "cielings", "ceilings", + "cigarete", "cigarettes", + "cigarets", "cigarettes", + "cilmbers", "climbers", + "cilnatro", "cilantro", + "ciltoris", "clitoris", + "circiuts", "circuits", + "circkets", "crickets", + "circlebs", "circles", + "circluar", "circular", + "ciricuit", "circuit", + "cirlcing", "circling", + "ciruclar", "circular", + "clannand", "clannad", + "clarifiy", "clarify", + "clarskon", "clarkson", + "clasical", "classical", + "classrom", "classroom", + "classsic", "classics", + "clausens", "clauses", + "cleanies", "cleanse", + "cleasner", "cleanser", + "clenaser", "cleanser", + "clevelry", "cleverly", + "clhorine", "chlorine", + "cliamtes", "climates", + "cliantro", "cilantro", + "clickare", "clicker", + "clickbat", "clickbait", + "clickear", "clicker", + "clientes", "clients", + "clincial", "clinical", + "clinicas", "clinics", + "clinicos", "clinics", + "clipboad", "clipboard", + "clitiros", "clitoris", + "closeing", "closing", + "closeley", "closely", + "clyamore", "claymore", + "clyinder", "cylinder", + "cmoputer", "computer", + "coindice", "coincide", + "collapes", "collapse", + "collares", "collars", + "collaris", "collars", + "collaros", "collars", + "collaspe", "collapse", + "colleage", "colleagues", + "collecte", "collective", + "collegue", "colleague", + "collisin", "collisions", + "collosal", "colossal", + "collpase", "collapse", + "coloardo", "colorado", + "colordao", "colorado", + "colubmia", "columbia", + "columnas", "columns", + "comadres", "comrades", + "comander", "commander", + "comandos", "commandos", + "comapany", "company", + "comapres", "compares", + "combiens", "combines", + "combinig", "combining", + "comediac", "comedic", + "comedias", "comedians", + "comestic", "cosmetic", + "comision", "commission", + "comiting", "committing", + "comitted", "committed", + "comittee", "committee", + "commandd", "commanded", + "commecen", "commence", + "commedic", "comedic", + "commense", "commenters", + "commenty", "commentary", + "commiest", "commits", + "commited", "committed", + "commitee", "committee", + "commites", "commits", + "committe", "committee", + "committs", "commits", + "commitus", "commits", + "commmand", "command", + "communit", "communist", + "companis", "companions", + "comparse", "compares", + "comparte", "compare", + "compasso", "compassion", + "compelte", "complete", + "compense", "compensate", + "complais", "complains", + "complane", "complacent", + "complate", "complacent", + "compleet", "complete", + "completi", "complexity", + "complets", "completes", + "complety", "completely", + "complexs", "complexes", + "complext", "complexity", + "complexy", "complexity", + "complict", "complicit", + "complier", "compiler", + "compones", "compose", + "componet", "components", + "componts", "compost", + "composet", "compost", + "composit", "compost", + "composte", "compose", + "comprese", "compressed", + "compreso", "compressor", + "compsers", "compress", + "comptown", "compton", + "compunet", "compute", + "computre", "compute", + "comradre", "comrade", + "comsetic", "cosmetic", + "conatins", "contains", + "conceald", "concealed", + "conceide", "conceived", + "conceled", "concede", + "concened", "concede", + "concepta", "conceptual", + "concered", "concede", + "concernt", "concert", + "concerte", "concrete", + "concesso", "concession", + "conceted", "concede", + "conceved", "concede", + "concibes", "concise", + "concider", "consider", + "concides", "concise", + "concious", "conscious", + "conclued", "conclude", + "concluse", "conclusive", + "concluso", "conclusion", + "concreet", "concrete", + "concrets", "concerts", + "condemnd", "condemned", + "conditon", "condition", + "condomes", "condoms", + "condomns", "condoms", + "conduict", "conduit", + "conected", "connected", + "conencts", "connects", + "confeses", "confess", + "confesos", "confess", + "confesso", "confession", + "configue", "configure", + "confilct", "conflict", + "confirmd", "confirmed", + "conflcit", "conflict", + "conflics", "conflicts", + "confrims", "confirms", + "conicide", "coincide", + "conlcude", "conclude", + "conqueor", "conquer", + "conquerd", "conquered", + "conqured", "conquered", + "conscent", "consent", + "consious", "conscious", + "constans", "constants", + "constast", "constants", + "constatn", "constant", + "constrat", "constraint", + "construt", "constructs", + "containd", "contained", + "containg", "containing", + "contaire", "containers", + "contanti", "contacting", + "contense", "contenders", + "contenst", "contents", + "contexta", "contextual", + "contextl", "contextual", + "contians", "contains", + "contined", "continued", + "contines", "continents", + "continum", "continuum", + "continus", "continues", + "continut", "continuity", + "continuu", "continuous", + "contracr", "contractor", + "contracs", "contracts", + "controll", "control", + "contruct", "construct", + "convenit", "convenient", + "convento", "convention", + "converst", "converts", + "convertr", "converter", + "conviced", "convinced", + "convicto", "conviction", + "convingi", "convincing", + "convinse", "convinces", + "cooldows", "cooldowns", + "coordine", "coordinate", + "coralina", "carolina", + "corollla", "corolla", + "corolloa", "corolla", + "corosion", "corrosion", + "corpsers", "corpses", + "corrdior", "corridor", + "correcty", "correctly", + "correnti", "correcting", + "corretly", "correctly", + "corrupto", "corruption", + "cosemtic", "cosmetic", + "cosutmes", "costumes", + "couldnot", "couldnt", + "coulored", "coloured", + "counries", "countries", + "counseil", "counsel", + "counsole", "counsel", + "counterd", "countered", + "countert", "counteract", + "countres", "counters", + "courtrom", "courtroom", + "courtsey", "courtesy", + "cousines", "cousins", + "cousings", "cousins", + "coutners", "counters", + "covanent", "covenant", + "coverted", "converted", + "coyotees", "coyotes", + "cpatains", "captains", + "cranbery", "cranberry", + "crayones", "crayons", + "creaeted", "created", + "createin", "creatine", + "createur", "creature", + "creatien", "creatine", + "creepgin", "creeping", + "cricling", "circling", + "cringely", "cringey", + "cringery", "cringey", + "criticas", "critics", + "critices", "critics", + "criticie", "criticise", + "criticim", "criticisms", + "criticis", "critics", + "criticms", "critics", + "criticos", "critics", + "criticts", "critics", + "criticus", "critics", + "critiera", "criteria", + "critized", "criticized", + "croatioa", "croatia", + "crossfie", "crossfire", + "crosshar", "crosshair", + "crosspot", "crosspost", + "crowbahr", "crowbar", + "cruasder", "crusader", + "cruciaal", "crucial", + "crucibel", "crucible", + "cruicble", "crucible", + "crusdaer", "crusader", + "crusiers", "cruisers", + "crusiing", "cruising", + "cruthces", "crutches", + "cthulhlu", "cthulhu", + "cthulluh", "cthulhu", + "cubpoard", "cupboard", + "cuddleys", "cuddles", + "culprint", "culprit", + "cultrual", "cultural", + "culutral", "cultural", + "cupbaord", "cupboard", + "cupborad", "cupboard", + "curcible", "crucible", + "curisers", "cruisers", + "curising", "cruising", + "currecny", "currency", + "currence", "currencies", + "currenly", "currently", + "currenty", "currently", + "cursader", "crusader", + "custcene", "cutscene", + "cutsceen", "cutscene", + "cutscens", "cutscenes", + "cutsence", "cutscene", + "cylcists", "cyclists", + "cylidner", "cylinder", + "cylindre", "cylinder", + "cynisicm", "cynicism", + "cyrstals", "crystals", + "dacquiri", "daiquiri", + "daimonds", "diamonds", + "dangeros", "dangers", + "dangerus", "dangers", + "darkenss", "darkness", + "darnkess", "darkness", + "dashboad", "dashboard", + "daugther", "daughter", + "deadlfit", "deadlift", + "deadlifs", "deadlifts", + "deafauts", "defaults", + "deafeted", "defeated", + "deafults", "defaults", + "dealying", "delaying", + "deamenor", "demeanor", + "deathcat", "deathmatch", + "debuffes", "debuffs", + "debufffs", "debuffs", + "decalred", "declared", + "decalres", "declares", + "decembre", "december", + "decidely", "decidedly", + "decieved", "deceived", + "decifits", "deficits", + "decipted", "depicted", + "declears", "declares", + "declinig", "declining", + "decmeber", "december", + "decribed", "described", + "decribes", "describes", + "dedicato", "dedication", + "deductie", "deductible", + "defautls", "defaults", + "defectos", "defects", + "defectus", "defects", + "defendas", "defends", + "defendes", "defenders", + "defendis", "defends", + "defendre", "defender", + "defendrs", "defends", + "defensea", "defenseman", + "defensen", "defenseman", + "defensie", "defensive", + "defetead", "defeated", + "deffined", "defined", + "deficiet", "deficient", + "definate", "definite", + "definaty", "definately", + "definety", "definetly", + "definito", "definition", + "definitv", "definitive", + "deflatin", "deflation", + "deflecto", "deflection", + "defualts", "defaults", + "degarded", "degraded", + "degenere", "degenerate", + "degraged", "degrade", + "degrated", "degrade", + "deisgned", "designed", + "deisgner", "designer", + "dekstops", "desktops", + "delcared", "declared", + "delcares", "declares", + "delepted", "depleted", + "delivere", "deliveries", + "delpeted", "depleted", + "delpoyed", "deployed", + "delyaing", "delaying", + "demandas", "demands", + "demandes", "demands", + "demenaor", "demeanor", + "democray", "democracy", + "demolito", "demolition", + "denseley", "densely", + "densitiy", "density", + "deomcrat", "democrat", + "deovtion", "devotion", + "departer", "departure", + "departue", "departure", + "depcited", "depicted", + "depelted", "depleted", + "dependat", "dependant", + "depictes", "depicts", + "depictin", "depictions", + "depolyed", "deployed", + "depositd", "deposited", + "depostis", "deposits", + "depresse", "depressive", + "depresso", "depression", + "derivate", "derivative", + "descened", "descend", + "descibed", "described", + "descirbe", "describe", + "descrise", "describes", + "desgined", "designed", + "desginer", "designer", + "desicive", "decisive", + "designad", "designated", + "designes", "designs", + "designet", "designated", + "desinged", "designed", + "desinger", "designer", + "desitned", "destined", + "desktiop", "desktop", + "desorder", "disorder", + "despides", "despised", + "despiste", "despise", + "destiney", "destiny", + "destinty", "destiny", + "destkops", "desktops", + "destorys", "destroys", + "destrose", "destroyers", + "destroyd", "destroyed", + "destroyr", "destroyers", + "detalied", "detailed", + "detectas", "detects", + "detectes", "detects", + "detectie", "detectives", + "determen", "determines", + "devasted", "devastated", + "develope", "develop", + "devialet", "deviate", + "deviatie", "deviate", + "devilers", "delivers", + "devloved", "devolved", + "devovled", "devolved", + "diaganol", "diagonal", + "diagnoal", "diagonal", + "diagnoes", "diagnose", + "diagnosi", "diagnostic", + "diagonse", "diagnose", + "diahrrea", "diarrhea", + "dialetcs", "dialects", + "dialgoue", "dialogue", + "dialouge", "dialogue", + "diarreah", "diarrhea", + "diarreha", "diarrhea", + "dichtomy", "dichotomy", + "dickisch", "dickish", + "dicovers", "discovers", + "dicovery", "discovery", + "dicussed", "discussed", + "diferent", "different", + "differnt", "different", + "difficut", "difficulty", + "diffrent", "different", + "diganose", "diagnose", + "dignitiy", "dignity", + "dimaonds", "diamonds", + "dinasour", "dinosaur", + "dinosaus", "dinosaurs", + "dinosuar", "dinosaur", + "dinsoaur", "dinosaur", + "dionsaur", "dinosaur", + "diphtong", "diphthong", + "diplomma", "diploma", + "dipthong", "diphthong", + "direclty", "directly", + "directin", "directions", + "directix", "directx", + "directos", "directors", + "directoy", "directory", + "directrx", "directx", + "dirfting", "drifting", + "disabeld", "disabled", + "disabels", "disables", + "disagred", "disagreed", + "disagres", "disagrees", + "disbaled", "disabled", + "disbales", "disables", + "disbelif", "disbelief", + "dischard", "discharged", + "dischare", "discharged", + "discound", "discounted", + "discoure", "discourse", + "discoved", "discovered", + "discreto", "discretion", + "discribe", "describe", + "disentry", "dysentery", + "disgiuse", "disguise", + "dishoner", "dishonored", + "dishonet", "dishonesty", + "dislikse", "dislikes", + "dismante", "dismantle", + "dismisse", "dismissive", + "disolved", "dissolved", + "dispacth", "dispatch", + "dispalys", "displays", + "dispence", "dispense", + "dispersa", "dispensary", + "displayd", "displayed", + "disposle", "dispose", + "disposte", "dispose", + "dispoves", "dispose", + "disptach", "dispatch", + "disricts", "districts", + "dissovle", "dissolve", + "distates", "distaste", + "distatse", "distaste", + "disticnt", "distinct", + "distorto", "distortion", + "distrcit", "district", + "districs", "districts", + "disturbd", "disturbed", + "disupted", "disputed", + "disuptes", "disputes", + "diversed", "diverse", + "diversiy", "diversify", + "dividens", "dividends", + "divintiy", "divinity", + "divisons", "divisions", + "doapmine", "dopamine", + "docrines", "doctrines", + "docrtine", "doctrine", + "doctines", "doctrines", + "doctirne", "doctrine", + "doctrins", "doctrines", + "dogamtic", "dogmatic", + "dolhpins", "dolphins", + "domapine", "dopamine", + "domecrat", "democrat", + "domiante", "dominate", + "dominato", "domination", + "dominats", "dominates", + "dominent", "dominant", + "dominoin", "dominion", + "donwload", "download", + "donwvote", "downvote", + "doomdsay", "doomsday", + "doosmday", "doomsday", + "doplhins", "dolphins", + "dopmaine", "dopamine", + "dormtund", "dortmund", + "dortumnd", "dortmund", + "dotrmund", "dortmund", + "douchely", "douchey", + "doucheus", "douches", + "dowloads", "downloads", + "downlaod", "download", + "downloas", "downloads", + "downstar", "downstairs", + "downvore", "downvoters", + "downvotr", "downvoters", + "downvots", "downvotes", + "draculea", "dracula", + "draculla", "dracula", + "dragones", "dragons", + "dragonus", "dragons", + "drfiting", "drifting", + "driectly", "directly", + "drifitng", "drifting", + "driveris", "drivers", + "drotmund", "dortmund", + "duaghter", "daughter", + "dumbbels", "dumbbells", + "dumptser", "dumpster", + "dumspter", "dumpster", + "dunegons", "dungeons", + "dungeoun", "dungeon", + "dungoens", "dungeons", + "dupicate", "duplicate", + "duplicas", "duplicates", + "dwarvens", "dwarves", + "dyanmics", "dynamics", + "dyanmite", "dynamite", + "dymanics", "dynamics", + "dymanite", "dynamite", + "dynastry", "dynasty", + "dysentry", "dysentery", + "dysphora", "dysphoria", + "earilest", "earliest", + "eatswood", "eastwood", + "eceonomy", "economy", + "ecidious", "deciduous", + "ecologia", "ecological", + "ecomonic", "economic", + "ecstacys", "ecstasy", + "ecstascy", "ecstasy", + "ecstasty", "ecstasy", + "ectastic", "ecstatic", + "editoras", "editors", + "editores", "editors", + "efficent", "efficient", + "egpytian", "egyptian", + "egyptain", "egyptian", + "egytpian", "egyptian", + "ehtereal", "ethereal", + "ehternet", "ethernet", + "eigtheen", "eighteen", + "electhor", "electro", + "electorn", "electron", + "elementy", "elementary", + "elephans", "elephants", + "elevatin", "elevation", + "elicided", "elicited", + "eligable", "eligible", + "elimiate", "eliminate", + "eliminas", "eliminates", + "elitisim", "elitism", + "elitistm", "elitism", + "ellected", "elected", + "embarass", "embarrass", + "embargos", "embargoes", + "embarras", "embarrass", + "embassay", "embassy", + "embassey", "embassy", + "embasssy", "embassy", + "emergend", "emerged", + "emergerd", "emerged", + "eminated", "emanated", + "emminent", "eminent", + "emmisary", "emissary", + "emmision", "emission", + "emmiting", "emitting", + "emmitted", "emitted", + "empathie", "empathize", + "empirial", "empirical", + "emulatin", "emulation", + "enahnces", "enhances", + "enchanct", "enchant", + "encolsed", "enclosed", + "endanged", "endangered", + "endevors", "endeavors", + "endevour", "endeavour", + "endlessy", "endlessly", + "endorces", "endorse", + "engeneer", "engineer", + "engeries", "energies", + "engineed", "engineered", + "engrames", "engrams", + "engramms", "engrams", + "enigneer", "engineer", + "enitrely", "entirely", + "enlcosed", "enclosed", + "enlsaved", "enslaved", + "ensalved", "enslaved", + "enterity", "entirety", + "entierly", "entirely", + "entierty", "entirety", + "entilted", "entitled", + "entirley", "entirely", + "entiteld", "entitled", + "entitity", "entity", + "entropay", "entropy", + "entrophy", "entropy", + "ephipany", "epiphany", + "epihpany", "epiphany", + "epilespy", "epilepsy", + "epilgoue", "epilogue", + "episdoes", "episodes", + "epitomie", "epitome", + "epliepsy", "epilepsy", + "epliogue", "epilogue", + "epsiodes", "episodes", + "epsresso", "espresso", + "eqaulity", "equality", + "eqaution", "equation", + "equailty", "equality", + "eraticly", "erratically", + "erroneos", "erroneous", + "errupted", "erupted", + "escalato", "escalation", + "esctatic", "ecstatic", + "esential", "essential", + "esitmate", "estimate", + "esperate", "seperate", + "esportes", "esports", + "estiamte", "estimate", + "estoeric", "esoteric", + "estonija", "estonia", + "estoniya", "estonia", + "etherael", "ethereal", + "etherent", "ethernet", + "ethicaly", "ethically", + "etiquete", "etiquette", + "etrailer", "retailer", + "eugencis", "eugenics", + "eugneics", "eugenics", + "euhporia", "euphoria", + "euhporic", "euphoric", + "euorpean", "european", + "euphoira", "euphoria", + "euphroia", "euphoria", + "euphroic", "euphoric", + "europian", "european", + "eurpoean", "european", + "evangers", "avengers", + "everyons", "everyones", + "evidencd", "evidenced", + "evidende", "evidenced", + "evloving", "evolving", + "evolveds", "evolves", + "evolveos", "evolves", + "evovling", "evolving", + "excecute", "execute", + "excedded", "exceeded", + "excelent", "excellent", + "exceptin", "exceptions", + "excerise", "exercise", + "excisted", "existed", + "exclusie", "exclusives", + "exculded", "excluded", + "exculdes", "excludes", + "exection", "execution", + "exectued", "executed", + "executie", "executive", + "executin", "execution", + "exellent", "excellent", + "exerbate", "exacerbate", + "exercide", "exercised", + "exercies", "exercise", + "exersice", "exercise", + "exersize", "exercise", + "exhalted", "exalted", + "exhaustn", "exhaustion", + "exhausto", "exhaustion", + "exicting", "exciting", + "exisitng", "existing", + "existane", "existance", + "existant", "existent", + "existend", "existed", + "exlcuded", "excluded", + "exlcudes", "excludes", + "exlporer", "explorer", + "exoticas", "exotics", + "exoticos", "exotics", + "expalins", "explains", + "expandas", "expands", + "expandes", "expands", + "expansie", "expansive", + "expectes", "expects", + "expectus", "expects", + "expedito", "expedition", + "expences", "expense", + "expensie", "expense", + "expensve", "expense", + "expertas", "experts", + "expertis", "experts", + "expertos", "experts", + "expireds", "expires", + "explaind", "explained", + "explaing", "explaining", + "expliots", "exploits", + "explodie", "explode", + "exploint", "exploit", + "explosie", "explosive", + "explosin", "explosions", + "exploted", "explode", + "expoldes", "explodes", + "expolits", "exploits", + "exportas", "exports", + "exportes", "exports", + "exportfs", "exports", + "exposees", "exposes", + "exposito", "exposition", + "expresse", "expressive", + "expresss", "expresses", + "expressy", "expressly", + "exressed", "expressed", + "exsitent", "existent", + "exsiting", "existing", + "extactly", "exactly", + "extemely", "extremely", + "extendes", "extends", + "extendos", "extends", + "extenion", "extension", + "extensie", "extensive", + "extensis", "extensions", + "extortin", "extortion", + "extracto", "extraction", + "extreems", "extremes", + "extremly", "extremely", + "eygptian", "egyptian", + "faboulus", "fabulous", + "fabricas", "fabrics", + "fabrices", "fabrics", + "fabricus", "fabrics", + "faceplam", "facepalm", + "facilisi", "facilities", + "faciltiy", "facility", + "facsists", "fascists", + "factores", "factors", + "factorys", "factors", + "factualy", "factually", + "faggotts", "faggots", + "faggotus", "faggots", + "falcones", "falcons", + "falgship", "flagship", + "faliures", "failures", + "falseley", "falsely", + "falshing", "flashing", + "falvored", "flavored", + "falvours", "flavours", + "familair", "familiar", + "famoulsy", "famously", + "fanatism", "fanaticism", + "fanatsic", "fanatics", + "fanserve", "fanservice", + "fantasty", "fantasy", + "farcking", "fracking", + "fascisim", "fascism", + "fashiond", "fashioned", + "fasicsts", "fascists", + "fatigure", "fatigue", + "favorits", "favorites", + "favourie", "favourites", + "feasable", "feasible", + "feasbile", "feasible", + "febraury", "february", + "februray", "february", + "feburary", "february", + "fedility", "fidelity", + "fedorahs", "fedoras", + "fedorans", "fedoras", + "feilding", "fielding", + "feisable", "feasible", + "feitshes", "fetishes", + "feltcher", "fletcher", + "felxible", "flexible", + "feminint", "femininity", + "feminsim", "feminism", + "feromone", "pheromone", + "fesiable", "feasible", + "festivas", "festivals", + "festivle", "festive", + "fictious", "fictitious", + "fideling", "fielding", + "fideltiy", "fidelity", + "fiedling", "fielding", + "fiedlity", "fidelity", + "fighitng", "fighting", + "figthing", "fighting", + "fileding", "fielding", + "fimilies", "families", + "finacial", "financial", + "fineshes", "finesse", + "fingersi", "fingertips", + "finnisch", "finnish", + "finsihes", "finishes", + "firebals", "fireballs", + "firendly", "friendly", + "firmwear", "firmware", + "firwmare", "firmware", + "flaghsip", "flagship", + "flamable", "flammable", + "flasghip", "flagship", + "flatterd", "flattered", + "flatteur", "flatter", + "flattire", "flatter", + "flavores", "flavors", + "flechter", "fletcher", + "flecther", "fletcher", + "flemmish", "flemish", + "flethcer", "fletcher", + "flexbile", "flexible", + "flexibel", "flexible", + "flippade", "flipped", + "flitered", "filtered", + "florecen", "florence", + "floridia", "florida", + "floruide", "fluoride", + "floruish", "flourish", + "flourine", "fluorine", + "floursih", "flourish", + "fluorish", "flourish", + "fluroide", "fluoride", + "folowing", "following", + "fontrier", "fontier", + "forasken", "forsaken", + "forbiden", "forbidden", + "foreamrs", "forearms", + "foreksin", "foreskin", + "forenics", "forensic", + "forenisc", "forensic", + "foresnic", "forensic", + "foreward", "foreword", + "foricbly", "forcibly", + "forigven", "forgiven", + "formatin", "formation", + "formelly", "formerly", + "formuals", "formulas", + "fornesic", "forensic", + "forresst", "forrest", + "forsekan", "forsaken", + "forsekin", "foreskin", + "forsenic", "forensic", + "forskaen", "forsaken", + "forsting", "frosting", + "fortitue", "fortitude", + "fortunae", "fortune", + "fortunte", "fortune", + "forumlas", "formulas", + "forunner", "forerunner", + "fossiles", "fossils", + "fossilis", "fossils", + "foundary", "foundry", + "fountian", "fountain", + "fourties", "forties", + "fowrards", "forwards", + "frackign", "fracking", + "framgent", "fragment", + "franches", "franchise", + "franchie", "franchises", + "franciso", "francisco", + "frankiln", "franklin", + "franlkin", "franklin", + "freckels", "freckles", + "freindly", "friendly", + "frequeny", "frequency", + "friendle", "friendlies", + "friendsi", "friendlies", + "frimware", "firmware", + "frogiven", "forgiven", + "frointer", "frontier", + "fromerly", "formerly", + "froniter", "frontier", + "fronteir", "frontier", + "frosaken", "forsaken", + "frutcose", "fructose", + "fucntion", "function", + "fufilled", "fulfilled", + "fulfiled", "fulfilled", + "fullfill", "fulfill", + "funciton", "function", + "fundirse", "fundies", + "funniliy", "funnily", + "funnilly", "funnily", + "furctose", "fructose", + "furition", "fruition", + "furuther", "further", + "futurers", "futures", + "futureus", "futures", + "gamemdoe", "gamemode", + "gamepaly", "gameplay", + "gamergat", "gamertag", + "gammeode", "gamemode", + "ganerate", "generate", + "garantee", "guarantee", + "gardient", "gradient", + "garfeild", "garfield", + "garfiled", "garfield", + "garflied", "garfield", + "garnison", "garrison", + "garrions", "garrison", + "garriosn", "garrison", + "garrsion", "garrison", + "gatherig", "gatherings", + "gauarana", "guaraná", + "gauntelt", "gauntlet", + "gauntles", "gauntlets", + "gaurdian", "guardian", + "gaurding", "guarding", + "gautnlet", "gauntlet", + "gemoetry", "geometry", + "generaly", "generally", + "generase", "generates", + "generats", "generates", + "genialia", "genitalia", + "genisues", "geniuses", + "genitala", "genitalia", + "genrates", "generates", + "gentials", "genitals", + "gentlemn", "gentlemen", + "genuises", "geniuses", + "geograpy", "geography", + "geomerty", "geometry", + "geomtery", "geometry", + "germanos", "germans", + "germanus", "germans", + "gernades", "grenades", + "giagbyte", "gigabyte", + "gigabtye", "gigabyte", + "gigaybte", "gigabyte", + "gigbayte", "gigabyte", + "gignatic", "gigantic", + "giltched", "glitched", + "giltches", "glitches", + "girafffe", "giraffe", + "girefing", "griefing", + "girlling", "grilling", + "gladiatr", "gladiator", + "glichted", "glitched", + "glichtes", "glitches", + "glicthed", "glitched", + "glicthes", "glitches", + "glitchey", "glitchy", + "glitchly", "glitchy", + "glitchty", "glitchy", + "glithced", "glitched", + "glithces", "glitches", + "gloablly", "globally", + "glodberg", "goldberg", + "glodfish", "goldfish", + "gloriuos", "glorious", + "gltiched", "glitched", + "gltiches", "glitches", + "gmaertag", "gamertag", + "goblings", "goblins", + "goddammn", "goddamn", + "goddammt", "goddammit", + "godesses", "goddesses", + "godlberg", "goldberg", + "godlfish", "goldfish", + "godounov", "godunov", + "godpseed", "godspeed", + "godspede", "godspeed", + "goldifsh", "goldfish", + "gonewidl", "gonewild", + "goodlcuk", "goodluck", + "goregous", "gorgeous", + "gorgoeus", "gorgeous", + "gorillia", "gorilla", + "gorillla", "gorilla", + "gospells", "gospels", + "gottleib", "gottlieb", + "gourmelt", "gourmet", + "gourment", "gourmet", + "gouvener", "governor", + "govement", "government", + "goverend", "governed", + "govermet", "goverment", + "governer", "governor", + "gradualy", "gradually", + "grafield", "garfield", + "grafitti", "graffiti", + "grahpics", "graphics", + "grahpite", "graphite", + "graident", "gradient", + "granolla", "granola", + "graphcis", "graphics", + "grapichs", "graphics", + "grappnel", "grapple", + "greandes", "grenades", + "greatful", "grateful", + "greeneer", "greener", + "greenhoe", "greenhouse", + "greenlad", "greenland", + "greenore", "greener", + "greusome", "gruesome", + "grieifng", "griefing", + "grifeing", "griefing", + "grizzlay", "grizzly", + "grizzley", "grizzly", + "grpahics", "graphics", + "grpahite", "graphite", + "gruseome", "gruesome", + "guantano", "guantanamo", + "guardain", "guardian", + "guardias", "guardians", + "guaridan", "guardian", + "guerrila", "guerrilla", + "guidence", "guidance", + "guiseppe", "giuseppe", + "guitards", "guitars", + "guitares", "guitars", + "guitarit", "guitarist", + "gullbile", "gullible", + "gunanine", "guanine", + "guniness", "guinness", + "gunniess", "guinness", + "guradian", "guardian", + "gurading", "guarding", + "gurantee", "guarantee", + "guresome", "gruesome", + "guttaral", "guttural", + "gutteral", "guttural", + "hacthing", "hatching", + "hafltime", "halftime", + "haircuit", "haircut", + "halfitme", "halftime", + "hallowen", "halloween", + "hamburgr", "hamburgers", + "hamitlon", "hamilton", + "hamliton", "hamilton", + "handcufs", "handcuffs", + "handeldy", "handedly", + "handlade", "handled", + "handlare", "handler", + "handledy", "handedly", + "hannbial", "hannibal", + "haording", "hoarding", + "hapening", "happening", + "happends", "happens", + "happenes", "happens", + "happilly", "happily", + "harldine", "hardline", + "harrased", "harassed", + "harrases", "harasses", + "hatchign", "hatching", + "hatesink", "heatsink", + "hathcing", "hatching", + "headachs", "headaches", + "headests", "headsets", + "headhsot", "headshot", + "headseat", "headset", + "healthit", "healthiest", + "heastink", "heatsink", + "heathern", "heathen", + "heatskin", "heatsink", + "heaviliy", "heavily", + "heavilly", "heavily", + "heavnely", "heavenly", + "hedeghog", "hedgehog", + "hegdehog", "hedgehog", + "heighest", "heights", + "heighted", "heightened", + "heirachy", "hierarchy", + "heistant", "hesitant", + "heistate", "hesitate", + "hellifre", "hellfire", + "helluvva", "helluva", + "helpfull", "helpful", + "heratige", "heritage", + "herclues", "hercules", + "heridity", "heredity", + "heroicas", "heroics", + "heroices", "heroics", + "heroicos", "heroics", + "heroicus", "heroics", + "hertiage", "heritage", + "herucles", "hercules", + "hestiant", "hesitant", + "hestiate", "hesitate", + "heveanly", "heavenly", + "hierachy", "hierarchy", + "hierarcy", "hierarchy", + "highlane", "highlander", + "hindiusm", "hinduism", + "hindusim", "hinduism", + "hinudism", "hinduism", + "hiptsers", "hipsters", + "hispanis", "hispanics", + "hispters", "hipsters", + "histroic", "historic", + "hodlings", "holdings", + "hoenstly", "honestly", + "hoildays", "holidays", + "holdiays", "holidays", + "hollywod", "hollywood", + "homeword", "homeworld", + "homineim", "hominem", + "homineum", "hominem", + "honeslty", "honestly", + "honeymon", "honeymoon", + "honsetly", "honestly", + "hopefuly", "hopefully", + "hopkings", "hopkins", + "hopsital", "hospital", + "horading", "hoarding", + "horzions", "horizons", + "hosptial", "hospital", + "hosteles", "hostels", + "hostiliy", "hostility", + "hotshoot", "hotshot", + "hotsport", "hotspot", + "hsyteria", "hysteria", + "htaching", "hatching", + "htiboxes", "hitboxes", + "huanting", "haunting", + "humaniod", "humanoid", + "humanite", "humanities", + "humantiy", "humanity", + "humerous", "humorous", + "huminoid", "humanoid", + "humitidy", "humidity", + "humoural", "humoral", + "humouros", "humorous", + "humurous", "humorous", + "hunderds", "hundreds", + "hundread", "hundred", + "hungarin", "hungarian", + "huntmsan", "huntsman", + "hutnsman", "huntsman", + "hybrides", "hybrids", + "hybridus", "hybrids", + "hydorgen", "hydrogen", + "hydratin", "hydration", + "hydregon", "hydrogen", + "hygience", "hygiene", + "hygienne", "hygiene", + "hyperbel", "hyperbole", + "hypocrit", "hypocrite", + "hyponsis", "hypnosis", + "hyrdogen", "hydrogen", + "icefrong", "icefrog", + "icelings", "ceilings", + "idaeidae", "idea", + "idealogy", "ideology", + "idealsim", "idealism", + "idenfity", "identify", + "idenitfy", "identify", + "identite", "identities", + "ideologe", "ideologies", + "illiegal", "illegal", + "illinios", "illinois", + "illionis", "illinois", + "illnesss", "illnesses", + "illumini", "illuminati", + "illustre", "illustrate", + "illution", "illusion", + "ilogical", "illogical", + "ilterate", "literate", + "imapired", "impaired", + "imgrants", "migrants", + "imigrant", "emigrant", + "immboile", "immobile", + "immenint", "imminent", + "immersie", "immerse", + "immersve", "immerse", + "immitate", "imitate", + "immoblie", "immobile", + "immortas", "immortals", + "impactes", "impacts", + "impactos", "impacts", + "imparied", "impaired", + "imperavi", "imperative", + "imperfet", "imperfect", + "implemet", "implements", + "implosed", "implode", + "impluses", "impulses", + "imporper", "improper", + "importas", "imports", + "importen", "importance", + "importes", "imports", + "imporved", "improved", + "imporves", "improves", + "impropre", "improper", + "improted", "imported", + "improvie", "improvised", + "impusles", "impulses", + "imrpoved", "improved", + "imrpoves", "improves", + "inbetwen", "inbetween", + "inclince", "incline", + "inclinde", "incline", + "includng", "including", + "incorect", "incorrect", + "incuding", "including", + "inculded", "included", + "indianas", "indians", + "indiands", "indians", + "indiania", "indiana", + "indianna", "indiana", + "indianos", "indians", + "indicato", "indication", + "indicats", "indicators", + "indonesa", "indonesia", + "indulgue", "indulge", + "infantis", "infants", + "infantus", "infants", + "infarred", "infrared", + "infectin", "infections", + "infermon", "inferno", + "infiltre", "infiltrate", + "infintie", "infinite", + "infintiy", "infinity", + "inflatie", "inflate", + "influens", "influences", + "informas", "informs", + "informis", "informs", + "infromal", "informal", + "infromed", "informed", + "ingenius", "ingenious", + "ingition", "ignition", + "ingorant", "ignorant", + "inheriet", "inherit", + "inherint", "inherit", + "inhumaan", "inhuman", + "inhumain", "inhuman", + "inifnite", "infinite", + "inifnity", "infinity", + "inisghts", "insights", + "initails", "initials", + "initaite", "initiate", + "initaled", "initialed", + "initally", "initially", + "initialy", "initially", + "initmacy", "intimacy", + "initmate", "intimate", + "injustie", "injustices", + "inlcuded", "included", + "inlcudes", "includes", + "innocens", "innocents", + "innocuos", "innocuous", + "innvoate", "innovate", + "inocence", "innocence", + "inpolite", "impolite", + "inpsired", "inspired", + "inquirey", "inquiry", + "inquirie", "inquire", + "inquiriy", "inquiry", + "inrested", "inserted", + "insanley", "insanely", + "insectes", "insects", + "insectos", "insects", + "insertas", "inserts", + "insertes", "inserts", + "insertos", "inserts", + "insidios", "insidious", + "insigths", "insights", + "insipred", "inspired", + "insipres", "inspires", + "insistas", "insists", + "insistes", "insists", + "insistis", "insists", + "insmonia", "insomnia", + "insomina", "insomnia", + "insonmia", "insomnia", + "inspried", "inspired", + "inspries", "inspires", + "instanse", "instances", + "instanty", "instantly", + "instered", "inserted", + "insticnt", "instinct", + "instincs", "instincts", + "institue", "institute", + "insultas", "insults", + "insultes", "insults", + "insultos", "insults", + "intamicy", "intimacy", + "intamite", "intimate", + "intendes", "intends", + "intendos", "intends", + "intentas", "intents", + "intented", "intended", + "interace", "interacted", + "interacs", "interacts", + "interect", "interacted", + "interent", "internet", + "interese", "interested", + "interfce", "interface", + "intergal", "integral", + "internts", "interns", + "internus", "interns", + "interpet", "interpret", + "interrim", "interim", + "interste", "interstate", + "interupt", "interrupt", + "intevene", "intervene", + "intially", "initially", + "intiials", "initials", + "intimaty", "intimately", + "intimide", "intimidate", + "intregal", "integral", + "intriuge", "intrigue", + "introdue", "introduces", + "introdus", "introduces", + "introvet", "introvert", + "intruige", "intrigue", + "intutive", "intuitive", + "inudstry", "industry", + "inventer", "inventor", + "invertes", "inverse", + "invincil", "invincible", + "invitato", "invitation", + "invloved", "involved", + "invloves", "involves", + "invovled", "involved", + "invovles", "involves", + "iranains", "iranians", + "iraninas", "iranians", + "iritable", "irritable", + "iritated", "irritated", + "ironicly", "ironically", + "irritato", "irritation", + "isalmist", "islamist", + "isarelis", "israelis", + "islamits", "islamist", + "islamsit", "islamist", + "islandes", "islanders", + "ismalist", "islamist", + "isntalls", "installs", + "isolatie", "isolate", + "israelli", "israeli", + "israleis", "israelis", + "isralies", "israelis", + "isrealis", "israelis", + "issueing", "issuing", + "italains", "italians", + "jaguards", "jaguars", + "jaguares", "jaguars", + "jailbrek", "jailbreak", + "jaimacan", "jamaican", + "jamacain", "jamaican", + "jamaicia", "jamaica", + "jamiacan", "jamaican", + "januaray", "january", + "janurary", "january", + "jeapardy", "jeopardy", + "jefferry", "jeffery", + "jefferty", "jeffery", + "jennigns", "jennings", + "jeoprady", "jeopardy", + "jepoardy", "jeopardy", + "jerusalm", "jerusalem", + "jewelrey", "jewelry", + "jewllery", "jewellery", + "joanthan", "jonathan", + "joepardy", "jeopardy", + "johanine", "johannine", + "jonatahn", "jonathan", + "journaal", "journal", + "journied", "journeyed", + "journies", "journeys", + "joysitck", "joystick", + "juadaism", "judaism", + "judaisim", "judaism", + "judgemet", "judgements", + "juducial", "judicial", + "jugnling", "jungling", + "junglign", "jungling", + "junlging", "jungling", + "justifiy", "justify", + "juveline", "juvenile", + "juvenlie", "juvenile", + "katemine", "ketamine", + "kennedey", "kennedy", + "ketmaine", "ketamine", + "keybaord", "keyboard", + "keyboars", "keyboards", + "keyborad", "keyboard", + "keychian", "keychain", + "kicthens", "kitchens", + "kindgoms", "kingdoms", + "kittiens", "kitties", + "knockbak", "knockback", + "knowlege", "knowledge", + "knuckels", "knuckles", + "koreanos", "koreans", + "kunckles", "knuckles", + "kurdisch", "kurdish", + "labatory", "lavatory", + "labenese", "lebanese", + "laboraty", "laboratory", + "laguages", "languages", + "landscae", "landscapes", + "langauge", "language", + "lanucher", "launcher", + "lanuches", "launches", + "laodouts", "loadouts", + "larwence", "lawrence", + "lasagnea", "lasagna", + "lasagnia", "lasagna", + "laucnhed", "launched", + "laucnher", "launcher", + "laucnhes", "launches", + "laundrey", "laundry", + "lawernce", "lawrence", + "lazyness", "laziness", + "leaglize", "legalize", + "lecteurs", "lectures", + "lecutres", "lectures", + "lefitsts", "leftists", + "leftsits", "leftists", + "legenday", "legendary", + "legionis", "legions", + "legitimt", "legitimate", + "lengthes", "lengths", + "lengthly", "lengthy", + "lentiles", "lentils", + "lentills", "lentils", + "lesbains", "lesbians", + "lesibans", "lesbians", + "levander", "lavender", + "levelign", "leveling", + "levetate", "levitate", + "leviathn", "leviathan", + "levleing", "leveling", + "liberato", "liberation", + "libertae", "liberate", + "libertea", "liberate", + "librarse", "libraries", + "licencie", "licence", + "licencse", "licence", + "liebrals", "liberals", + "liekable", "likeable", + "lifepsan", "lifespan", + "lifestel", "lifesteal", + "lifestye", "lifestyle", + "lighitng", "lighting", + "lightnig", "lightning", + "lightres", "lighters", + "lightrom", "lightroom", + "ligthers", "lighters", + "ligthing", "lighting", + "likebale", "likeable", + "limitant", "militant", + "limitato", "limitation", + "lincolin", "lincoln", + "lincolon", "lincoln", + "lineupes", "lineups", + "lingeire", "lingerie", + "lingiere", "lingerie", + "linnaena", "linnaean", + "lipstics", "lipsticks", + "liquidas", "liquids", + "liquides", "liquids", + "liquidos", "liquids", + "liscense", "license", + "lisenced", "silenced", + "listenes", "listens", + "listents", "listens", + "listners", "listeners", + "litature", "literature", + "litecion", "litecoin", + "liteicon", "litecoin", + "literaly", "literally", + "lithuana", "lithuania", + "litigato", "litigation", + "liverpol", "liverpool", + "locagion", "location", + "logtiech", "logitech", + "longitme", "longtime", + "longtiem", "longtime", + "looseley", "loosely", + "loreplay", "roleplay", + "luanched", "launched", + "luancher", "launcher", + "luanches", "launches", + "lubricat", "lubricant", + "lucifear", "lucifer", + "luckilly", "luckily", + "macarino", "macaroni", + "machiens", "machines", + "mackeral", "mackerel", + "macthups", "matchups", + "magasine", "magazine", + "magazins", "magazines", + "magentic", "magnetic", + "magicain", "magician", + "magisine", "magazine", + "magizine", "magazine", + "magnetis", "magnets", + "magnited", "magnitude", + "magnitue", "magnitude", + "mainfest", "manifest", + "maintian", "maintain", + "majoroty", "majority", + "makrsman", "marksman", + "malariya", "malaria", + "malasiya", "malaysia", + "malasyia", "malaysia", + "malayisa", "malaysia", + "malyasia", "malaysia", + "mamalian", "mammalian", + "manadrin", "mandarin", + "manaully", "manually", + "mandaste", "mandates", + "mandrain", "mandarin", + "mandrian", "mandarin", + "maneveur", "maneuver", + "manevuer", "maneuver", + "manfiest", "manifest", + "mangetic", "magnetic", + "manglade", "mangled", + "manifeso", "manifesto", + "manipule", "manipulate", + "manouver", "maneuver", + "manuales", "manuals", + "manuever", "maneuver", + "maraconi", "macaroni", + "maradeur", "marauder", + "maraduer", "marauder", + "maragret", "margaret", + "marbleds", "marbles", + "margerat", "margaret", + "margines", "margins", + "margings", "margins", + "marginis", "margins", + "marignal", "marginal", + "marilyin", "marilyn", + "marinens", "marines", + "markedet", "marketed", + "markeras", "markers", + "markerts", "markers", + "marniers", "mariners", + "marraige", "marriage", + "marryied", "married", + "marskman", "marksman", + "maruader", "marauder", + "marvelos", "marvelous", + "marxisim", "marxism", + "mascarra", "mascara", + "massacer", "massacre", + "massarce", "massacre", + "massasge", "massages", + "masscare", "massacre", + "masteris", "masteries", + "masturbe", "masturbate", + "materias", "materials", + "mathcups", "matchups", + "mathewes", "mathews", + "matieral", "material", + "matterss", "mattress", + "mauarder", "marauder", + "maximini", "maximizing", + "mayalsia", "malaysia", + "maybelle", "maybelline", + "maylasia", "malaysia", + "mccarhty", "mccarthy", + "mcgergor", "mcgregor", + "mchanics", "mechanics", + "mclarean", "mclaren", + "mcreggor", "mcgregor", + "meagtron", "megatron", + "meancing", "menacing", + "meaninng", "meaning", + "meatbals", "meatballs", + "mecahnic", "mechanic", + "mechanim", "mechanism", + "mechanis", "mechanics", + "medacine", "medicine", + "medatite", "meditate", + "medeival", "medieval", + "medevial", "medieval", + "mediavel", "medieval", + "medicaly", "medically", + "mediciad", "medicaid", + "medicins", "medicines", + "medicore", "mediocre", + "medievel", "medieval", + "mediocer", "mediocre", + "mediocry", "mediocrity", + "mediorce", "mediocre", + "meditato", "meditation", + "mediveal", "medieval", + "medoicre", "mediocre", + "meerkrat", "meerkat", + "megatorn", "megatron", + "meidcare", "medicare", + "meixcans", "mexicans", + "melboure", "melbourne", + "meltodwn", "meltdown", + "memoriez", "memorize", + "mencaing", "menacing", + "menstrul", "menstrual", + "mentiong", "mentioning", + "meoldies", "melodies", + "merchans", "merchants", + "mercurcy", "mercury", + "mercurey", "mercury", + "merficul", "merciful", + "merhcant", "merchant", + "mericful", "merciful", + "messgaed", "messaged", + "messiach", "messiah", + "metagaem", "metagame", + "metahpor", "metaphor", + "metamage", "metagame", + "methapor", "metaphor", + "metldown", "meltdown", + "metricas", "metrics", + "metrices", "metrics", + "metropos", "metropolis", + "mexcians", "mexicans", + "mexicain", "mexican", + "mhytical", "mythical", + "michagan", "michigan", + "michgian", "michigan", + "microtax", "microatx", + "microwae", "microwaves", + "midfeild", "midfield", + "midfiled", "midfield", + "midifeld", "midfield", + "migrains", "migraines", + "migriane", "migraine", + "milennia", "millennia", + "miligram", "milligram", + "miliitas", "militias", + "miliraty", "military", + "militais", "militias", + "millenia", "millennia", + "millenna", "millennia", + "miltiant", "militant", + "minature", "miniature", + "mindcrak", "mindcrack", + "minerial", "mineral", + "mingiame", "minigame", + "minimage", "minigame", + "minimals", "minimalist", + "minimalt", "minimalist", + "minimini", "minimizing", + "minimium", "minimum", + "miniscue", "miniscule", + "minsiter", "minister", + "minsitry", "ministry", + "miraculu", "miraculous", + "miralces", "miracles", + "mircales", "miracles", + "mircoatx", "microatx", + "mirgaine", "migraine", + "mirorred", "mirrored", + "misnadry", "misandry", + "misogynt", "misogynist", + "missigno", "mission", + "missiony", "missionary", + "misslies", "missiles", + "missorui", "missouri", + "misspeld", "misspelled", + "mistakey", "mistakenly", + "mistread", "mistreated", + "mobiltiy", "mobility", + "moderats", "moderates", + "modulair", "modular", + "moleculs", "molecules", + "momentos", "moments", + "momentus", "moments", + "monagomy", "monogamy", + "mongoles", "mongols", + "mongolos", "mongols", + "monitord", "monitored", + "monogmay", "monogamy", + "monolite", "monolithic", + "monologe", "monologue", + "monolopy", "monopoly", + "monoploy", "monopoly", + "monopols", "monopolies", + "monrachy", "monarchy", + "monstros", "monstrous", + "montaban", "montana", + "montains", "mountains", + "montanha", "montana", + "montania", "montana", + "montanna", "montana", + "montanta", "montana", + "montanya", "montana", + "montaran", "montana", + "monteize", "monetize", + "monteral", "montreal", + "montiors", "monitors", + "montnana", "montana", + "montypic", "monotypic", + "monumnet", "monument", + "moonligt", "moonlight", + "moprhine", "morphine", + "morbildy", "morbidly", + "mordibly", "morbidly", + "morevoer", "moreover", + "morhpine", "morphine", + "moribdly", "morbidly", + "mormones", "mormons", + "mormonts", "mormons", + "moroever", "moreover", + "morotola", "motorola", + "morphein", "morphine", + "morriosn", "morrison", + "morrocco", "morocco", + "morrsion", "morrison", + "mortards", "mortars", + "mortarts", "mortars", + "moruning", "mourning", + "mosnters", "monsters", + "mosqueto", "mosquitoes", + "mosquite", "mosquitoes", + "mosqutio", "mosquito", + "motoroal", "motorola", + "mounment", "monument", + "mounring", "mourning", + "mountian", "mountain", + "moustace", "moustache", + "movesped", "movespeed", + "mozillia", "mozilla", + "mozillla", "mozilla", + "msytical", "mystical", + "mucnhies", "munchies", + "mudering", "murdering", + "muffings", "muffins", + "muffinus", "muffins", + "mulitple", "multiple", + "mulitply", "multiply", + "multiplr", "multiplier", + "multipls", "multiples", + "mundance", "mundane", + "mundande", "mundane", + "muniches", "munchies", + "murderes", "murders", + "murderus", "murders", + "muscluar", "muscular", + "muscualr", "muscular", + "musicaly", "musically", + "musuclar", "muscular", + "mutliple", "multiple", + "mutliply", "multiply", + "myhtical", "mythical", + "mysitcal", "mystical", + "mysogyny", "misogyny", + "mysteris", "mysteries", + "mythraic", "mithraic", + "nagivate", "navigate", + "naopleon", "napoleon", + "napcakes", "pancakes", + "naploeon", "napoleon", + "napoelon", "napoleon", + "napolean", "napoleon", + "napoloen", "napoleon", + "narcissm", "narcissism", + "narcisst", "narcissist", + "narcotis", "narcotics", + "narwharl", "narwhal", + "naseuous", "nauseous", + "nashvile", "nashville", + "nasueous", "nauseous", + "natievly", "natively", + "nationas", "nationals", + "nationsl", "nationals", + "nativley", "natively", + "natuilus", "nautilus", + "naturaly", "naturally", + "naturels", "natures", + "naturely", "naturally", + "naturens", "natures", + "naturual", "natural", + "nauesous", "nauseous", + "naughtly", "naughty", + "nauitlus", "nautilus", + "nauseuos", "nauseous", + "nautiuls", "nautilus", + "nautlius", "nautilus", + "nautulis", "nautilus", + "naviagte", "navigate", + "navigato", "navigation", + "nazereth", "nazareth", + "necesary", "necessary", + "neckbead", "neckbeard", + "needlees", "needles", + "nefarios", "nefarious", + "negativy", "negativity", + "neglectn", "neglecting", + "neglible", "negligible", + "neigbour", "neighbour", + "neolitic", "neolithic", + "netboook", "netbook", + "neuronas", "neurons", + "neutraal", "neutral", + "neutralt", "neutrality", + "neutraly", "neutrality", + "newcaste", "newcastle", + "nickanme", "nickname", + "nickmane", "nickname", + "nieghbor", "neighbor", + "nightime", "nighttime", + "nightley", "nightly", + "nightlie", "nightlife", + "nihilsim", "nihilism", + "nilihism", "nihilism", + "nirtogen", "nitrogen", + "nirvanna", "nirvana", + "nitorgen", "nitrogen", + "niusance", "nuisance", + "noctrune", "nocturne", + "noctunre", "nocturne", + "nocturen", "nocturne", + "nominato", "nomination", + "nonsence", "nonsense", + "nonsesne", "nonsense", + "noramlly", "normally", + "norhtern", "northern", + "normalis", "normals", + "normalls", "normals", + "normalos", "normals", + "northeat", "northeast", + "northren", "northern", + "northwet", "northwest", + "norwegin", "norwegian", + "nostalga", "nostalgia", + "nostirls", "nostrils", + "notabley", "notably", + "notablly", "notably", + "noteable", "notable", + "noteably", "notably", + "noticabe", "noticable", + "notorios", "notorious", + "novmeber", "november", + "nromandy", "normandy", + "nuatilus", "nautilus", + "nuculear", "nuclear", + "nuetered", "neutered", + "nuisanse", "nuisance", + "nullifiy", "nullify", + "nurtient", "nutrient", + "nusaince", "nuisance", + "nusiance", "nuisance", + "nutirent", "nutrient", + "nutriens", "nutrients", + "nuturing", "nurturing", + "obdisian", "obsidian", + "obediant", "obedient", + "obession", "obsession", + "obilvion", "oblivion", + "obisdian", "obsidian", + "obsessie", "obsessive", + "obsessin", "obsession", + "obsidain", "obsidian", + "obstacal", "obstacle", + "obvilion", "oblivion", + "ocasions", "occasions", + "ocassion", "occasion", + "occaison", "occasion", + "occupato", "occupation", + "occuring", "occurring", + "octobear", "october", + "octopuns", "octopus", + "ofcoruse", "ofcourse", + "ofcoures", "ofcourse", + "ofcousre", "ofcourse", + "ofcrouse", "ofcourse", + "officals", "officials", + "officaly", "officially", + "offsited", "offside", + "ofocurse", "ofcourse", + "oligarcy", "oligarchy", + "olmypics", "olympics", + "olymipcs", "olympics", + "olypmics", "olympics", + "ommision", "omission", + "ommiting", "omitting", + "ommitted", "omitted", + "ongewild", "gonewild", + "onslaugt", "onslaught", + "operatie", "operative", + "opinoins", "opinions", + "oppinion", "opinion", + "opponant", "opponent", + "opposits", "opposites", + "oppossed", "opposed", + "oppresso", "oppression", + "optimaal", "optimal", + "optomism", "optimism", + "oragnise", "organise", + "orangerd", "orangered", + "orangers", "oranges", + "orangism", "organism", + "orchesta", "orchestra", + "ordianry", "ordinary", + "oreintal", "oriental", + "orgainse", "organise", + "orgainze", "organize", + "organims", "organism", + "organsie", "organise", + "organsim", "organism", + "organzie", "organize", + "orgasmes", "orgasms", + "orgasmos", "orgasms", + "orgasmus", "orgasms", + "orginize", "organise", + "orhtodox", "orthodox", + "oridnary", "ordinary", + "originas", "origins", + "origines", "origins", + "originsl", "originals", + "orphanes", "orphans", + "osbidian", "obsidian", + "othrodox", "orthodox", + "ourselvs", "ourselves", + "oustider", "outsider", + "outfeild", "outfield", + "outfidel", "outfield", + "outfiled", "outfield", + "outisder", "outsider", + "outplayd", "outplayed", + "outputed", "outputted", + "outsoure", "outsourced", + "overboad", "overboard", + "overclok", "overclock", + "overdrev", "overdrive", + "overhual", "overhaul", + "overlaod", "overload", + "overpiad", "overpaid", + "overules", "overuse", + "overwath", "overwatch", + "overwhem", "overwhelm", + "oximoron", "oxymoron", + "oylmpics", "olympics", + "pacakged", "packaged", + "packadge", "packaged", + "paficist", "pacifist", + "painfuly", "painfully", + "paitence", "patience", + "paitents", "patients", + "palidans", "paladins", + "palstics", "plastics", + "paltform", "platform", + "paltinum", "platinum", + "palyable", "playable", + "palyoffs", "playoffs", + "pancaeks", "pancakes", + "panckaes", "pancakes", + "pandoria", "pandora", + "pandorra", "pandora", + "panedmic", "pandemic", + "panethon", "pantheon", + "pankaces", "pancakes", + "panmedic", "pandemic", + "pantehon", "pantheon", + "panthoen", "pantheon", + "paradies", "paradise", + "paradyse", "parades", + "paragrah", "paragraph", + "paraiste", "parasite", + "paralell", "parallel", + "paralely", "parallelly", + "paralles", "parallels", + "parameds", "paramedics", + "paramter", "parameter", + "paranioa", "paranoia", + "paraniod", "paranoid", + "paraside", "paradise", + "parasits", "parasites", + "parastie", "parasite", + "parctise", "practise", + "paremsan", "parmesan", + "paristan", "partisan", + "parmasen", "parmesan", + "parmenas", "parmesan", + "parmsean", "parmesan", + "parnters", "partners", + "parralel", "parallel", + "parterns", "partners", + "partialy", "partially", + "partians", "partisan", + "partical", "particular", + "particel", "particle", + "partiets", "parties", + "partiots", "patriots", + "partnerd", "partnered", + "partsian", "partisan", + "passabel", "passable", + "passione", "passionate", + "passisve", "passives", + "passpost", "passports", + "passvies", "passives", + "passwors", "passwords", + "pasttime", "pastime", + "pastural", "pastoral", + "pateince", "patience", + "pateints", "patients", + "patethic", "pathetic", + "patheitc", "pathetic", + "patienty", "patiently", + "patirots", "patriots", + "patriarh", "patriarchy", + "patroits", "patriots", + "patrolls", "patrols", + "patronas", "patrons", + "patrones", "patrons", + "patronis", "patrons", + "patronos", "patrons", + "pattened", "patented", + "patterno", "patterson", + "pattersn", "patterson", + "pblisher", "publisher", + "peageant", "pageant", + "pebbleos", "pebbles", + "pebblers", "pebbles", + "pebblets", "pebbles", + "peciluar", "peculiar", + "pecuilar", "peculiar", + "peculair", "peculiar", + "peculure", "peculiar", + "peformed", "performed", + "peircing", "piercing", + "penaltis", "penalties", + "penatgon", "pentagon", + "penciles", "pencils", + "pendatic", "pedantic", + "pengiuns", "penguins", + "penisula", "peninsula", + "pensioen", "pension", + "pepperin", "pepperoni", + "perceded", "preceded", + "percente", "percentile", + "percieve", "perceive", + "percious", "precious", + "perclude", "preclude", + "perfecty", "perfectly", + "perfroms", "performs", + "perheaps", "perhaps", + "pericing", "piercing", + "peridoic", "periodic", + "perimetr", "perimeter", + "periodes", "periods", + "periodos", "periods", + "permanet", "permanent", + "permiere", "premiere", + "permises", "premises", + "permitas", "permits", + "permites", "permits", + "permitis", "permits", + "permitts", "permits", + "permiums", "premiums", + "peroidic", "periodic", + "perosnas", "personas", + "perpetue", "perpetuate", + "persaude", "persuade", + "perserve", "preserve", + "persisit", "persist", + "personel", "personnel", + "persones", "persons", + "personis", "persons", + "personsa", "personas", + "perstige", "prestige", + "persuaso", "persuasion", + "persuded", "persuaded", + "persuing", "pursuing", + "persuits", "pursuits", + "persumed", "presumed", + "pertaing", "pertaining", + "pertians", "pertains", + "pertinet", "pertinent", + "pervents", "prevents", + "perverst", "pervert", + "perviews", "previews", + "pervious", "previous", + "perxoide", "peroxide", + "pessiary", "pessary", + "petetion", "petition", + "petrolem", "petroleum", + "phantoom", "phantom", + "pharamcy", "pharmacy", + "pharmacs", "pharmacist", + "pharmsci", "pharmacist", + "phenomon", "phenomenon", + "phramacy", "pharmacy", + "phsyical", "physical", + "phsyique", "physique", + "phyiscal", "physical", + "phyisque", "physique", + "physcial", "physical", + "physicis", "physicians", + "physicks", "physics", + "physicts", "physicist", + "physqiue", "physique", + "picthers", "pitchers", + "pillards", "pillars", + "pillaris", "pillars", + "pinancle", "pinnacle", + "pinapple", "pineapple", + "pinnalce", "pinnacle", + "pinnaple", "pineapple", + "pinncale", "pinnacle", + "pinpiont", "pinpoint", + "pinteret", "pinterest", + "piolting", "piloting", + "pioneeer", "pioneer", + "pithcers", "pitchers", + "placebro", "placebo", + "placemet", "placements", + "planetas", "planets", + "planetos", "planets", + "plantiff", "plaintiff", + "plantium", "platinum", + "plasitcs", "plastics", + "platfrom", "platform", + "platimun", "platinum", + "platnium", "platinum", + "platnuim", "platinum", + "plausibe", "plausible", + "playbody", "playboy", + "playstye", "playstyle", + "pleasent", "pleasant", + "plehtora", "plethora", + "pleothra", "plethora", + "plethroa", "plethora", + "ploygamy", "polygamy", + "pnatheon", "pantheon", + "poeoples", "peoples", + "poingant", "poignant", + "pointeur", "pointer", + "pointure", "pointer", + "poisones", "poisons", + "poisonis", "poisons", + "poisonos", "poisons", + "poisonus", "poisons", + "polgyamy", "polygamy", + "polietly", "politely", + "politing", "piloting", + "politley", "politely", + "poltical", "political", + "poluting", "polluting", + "polution", "pollution", + "polygoon", "polygon", + "polymore", "polymer", + "pomotion", "promotion", + "popoulus", "populous", + "populair", "popular", + "populare", "popular", + "populary", "popularity", + "porcelan", "porcelain", + "porposes", "proposes", + "portabel", "portable", + "portalis", "portals", + "portalus", "portals", + "portayed", "portrayed", + "portgual", "portugal", + "portrais", "portraits", + "portrary", "portray", + "portrayl", "portrayal", + "portriat", "portrait", + "posessed", "possessed", + "posesses", "possesses", + "posioned", "poisoned", + "positivs", "positives", + "positivy", "positivity", + "possable", "possible", + "possably", "possibly", + "possbily", "possibly", + "posseses", "possesses", + "possesse", "possessive", + "possesss", "possesses", + "potrayed", "portrayed", + "poverful", "powerful", + "powerded", "powdered", + "powerpot", "powerpoint", + "pracitse", "practise", + "practial", "practical", + "practies", "practise", + "pratcise", "practise", + "praticle", "particle", + "prceeded", "preceded", + "preadtor", "predator", + "preample", "preamble", + "preceeds", "precedes", + "precisie", "precise", + "precisly", "precisely", + "precisou", "precious", + "preculde", "preclude", + "predicat", "predict", + "predicte", "predictive", + "preferas", "prefers", + "prefered", "preferred", + "preferes", "prefers", + "preferis", "prefers", + "preferrs", "prefers", + "preimere", "premiere", + "preimums", "premiums", + "preiodic", "periodic", + "preivews", "previews", + "prejudis", "prejudices", + "prelayed", "replayed", + "premeire", "premiere", + "premesis", "premises", + "premiare", "premier", + "premines", "premise", + "premuims", "premiums", + "preorded", "preordered", + "preordes", "preorders", + "preoxide", "peroxide", + "prepaird", "prepaid", + "preqeuls", "prequels", + "prequles", "prequels", + "prescrie", "prescribed", + "presense", "presence", + "presenst", "presets", + "presidet", "presidents", + "presists", "persists", + "presitge", "prestige", + "presonas", "personas", + "presuade", "persuade", + "pretador", "predator", + "pretains", "pertains", + "preveiws", "previews", + "preverse", "perverse", + "previwes", "previews", + "pricipal", "principal", + "priciple", "principle", + "priemere", "premiere", + "priestes", "priests", + "primaris", "primaries", + "primarly", "primarily", + "princila", "principals", + "principl", "principals", + "prisitne", "pristine", + "probelms", "problems", + "probleem", "problem", + "procalim", "proclaim", + "proccess", "process", + "proceded", "proceeded", + "proceder", "procedure", + "procedes", "proceeds", + "procedue", "procedure", + "proceeed", "proceed", + "procesed", "proceeds", + "processs", "processes", + "proclami", "proclaim", + "procliam", "proclaim", + "procotol", "protocol", + "prodcuts", "products", + "producto", "production", + "profesor", "professor", + "proficit", "proficient", + "profilic", "prolific", + "progroms", "pogroms", + "prohibis", "prohibits", + "prohpecy", "prophecy", + "prohpets", "prophets", + "projecte", "projectile", + "projecto", "projection", + "prolouge", "prologue", + "promplty", "promptly", + "promptes", "prompts", + "promptus", "prompts", + "promtply", "promptly", + "pronoune", "pronounced", + "propechy", "prophecy", + "propehcy", "prophecy", + "propehts", "prophets", + "prophacy", "prophecy", + "propmted", "prompted", + "propmtly", "promptly", + "proponet", "proponents", + "proposse", "proposes", + "proposte", "propose", + "proprety", "property", + "propsect", "prospect", + "prosepct", "prospect", + "prostite", "prostitute", + "protable", "portable", + "protecte", "protective", + "protiens", "proteins", + "protines", "proteins", + "protocal", "protocol", + "prototye", "prototype", + "protrait", "portrait", + "protrays", "portrays", + "protugal", "portugal", + "proverai", "proverbial", + "providee", "providence", + "proximty", "proximity", + "pruchase", "purchase", + "pryamids", "pyramids", + "ptichers", "pitchers", + "pubisher", "publisher", + "publiser", "publisher", + "puinsher", "punisher", + "pulisher", "publisher", + "pumkpins", "pumpkins", + "pumpinks", "pumpkins", + "pumpknis", "pumpkins", + "punshier", "punisher", + "punsiher", "punisher", + "punsihes", "punishes", + "purcahse", "purchase", + "pyramind", "pyramid", + "pyrimads", "pyramids", + "pyrmaids", "pyramids", + "qauntity", "quantity", + "qualifiy", "qualify", + "quanitfy", "quantify", + "quantaty", "quantity", + "quantite", "quantities", + "quantuum", "quantum", + "quarante", "quarantine", + "quartery", "quarterly", + "qucikest", "quickest", + "queation", "equation", + "quention", "quentin", + "quickets", "quickest", + "quicklyu", "quickly", + "rabbitos", "rabbits", + "rabbitts", "rabbits", + "racistas", "racists", + "racistes", "racists", + "radaince", "radiance", + "rahpsody", "rhapsody", + "raidance", "radiance", + "railraod", "railroad", + "randomes", "randoms", + "randomez", "randomized", + "randomns", "randoms", + "randomrs", "randoms", + "randomus", "randoms", + "raosting", "roasting", + "raphsody", "rhapsody", + "raptores", "raptors", + "raspbery", "raspberry", + "rationel", "rationale", + "realible", "reliable", + "realibly", "reliably", + "realiest", "earliest", + "realisim", "realism", + "realisme", "realise", + "realistc", "realistic", + "realiste", "realise", + "realoded", "reloaded", + "realsied", "realised", + "realtion", "relation", + "realtive", "relative", + "reamined", "remained", + "reapired", "repaired", + "reaplugs", "earplugs", + "reaserch", "research", + "reasonal", "reasonably", + "reatiler", "retailer", + "reaveled", "revealed", + "rebellis", "rebellious", + "reboudns", "rebounds", + "rebounce", "rebound", + "rebuildt", "rebuilt", + "rebuplic", "republic", + "receeded", "receded", + "recepits", "receipts", + "receptie", "receptive", + "receptos", "receptors", + "receving", "receiving", + "recident", "resident", + "reciding", "residing", + "recieved", "received", + "reciever", "receiver", + "recieves", "receives", + "recipees", "recipes", + "recipets", "recipes", + "recogise", "recognise", + "recogize", "recognize", + "recognie", "recognizes", + "recomend", "recommend", + "recommed", "recommend", + "reconnet", "reconnect", + "rectange", "rectangle", + "rectifiy", "rectify", + "recuring", "recurring", + "recurits", "recruits", + "redeisgn", "redesign", + "redemeed", "redeemed", + "redesgin", "redesign", + "redesing", "redesign", + "reedemed", "redeemed", + "refeeres", "referees", + "refelcts", "reflects", + "refelxes", "reflexes", + "referede", "referee", + "referene", "referee", + "referens", "references", + "referere", "referee", + "referign", "refering", + "refering", "referring", + "refernce", "references", + "reffered", "referred", + "refilles", "refills", + "refillls", "refills", + "reflecte", "reflective", + "reflecto", "reflection", + "reformes", "reforms", + "refreing", "refering", + "refrence", "reference", + "refreshd", "refreshed", + "refreshr", "refresher", + "refromed", "reformed", + "regardes", "regards", + "regenade", "renegade", + "regenere", "regenerate", + "regiones", "regions", + "regisrty", "registry", + "registed", "registered", + "regresas", "regress", + "regreses", "regress", + "regresos", "regress", + "regresse", "regressive", + "regresso", "regression", + "regrests", "regress", + "regretts", "regrets", + "regsitry", "registry", + "regualrs", "regulars", + "regualte", "regulate", + "reguarly", "regularly", + "regulary", "regularly", + "regulatr", "regulator", + "regulats", "regulators", + "rehersal", "rehearsal", + "rehtoric", "rhetoric", + "reiceved", "recieved", + "reigment", "regiment", + "reigonal", "regional", + "rekenton", "renekton", + "relaible", "reliable", + "relaibly", "reliably", + "relaised", "realised", + "relaoded", "reloaded", + "relasped", "relapsed", + "relatabe", "relatable", + "relateds", "relates", + "relativy", "relativity", + "relavent", "relevant", + "relected", "reelected", + "relegato", "relegation", + "releived", "relieved", + "releiver", "reliever", + "relevent", "relevant", + "relfects", "reflects", + "relfexes", "reflexes", + "reliased", "realised", + "religous", "religious", + "relpased", "relapsed", + "remainds", "remains", + "remainig", "remaining", + "remannts", "remnants", + "remarkes", "remarks", + "remembed", "remembered", + "remembee", "remembered", + "rememebr", "remember", + "remenant", "remnant", + "reminent", "remnant", + "remmeber", "remember", + "remotley", "remotely", + "renderes", "renders", + "reneagde", "renegade", + "renetkon", "renekton", + "renewabe", "renewables", + "renketon", "renekton", + "renmants", "remnants", + "renoylds", "reynolds", + "renteris", "renters", + "renyolds", "reynolds", + "reowrked", "reworked", + "repaires", "repairs", + "repalces", "replaces", + "reparied", "repaired", + "repblics", "republics", + "repbulic", "republic", + "repeatae", "repeatable", + "repeates", "repeats", + "repetion", "repetition", + "repharse", "rephrase", + "repitles", "reptiles", + "replased", "relapsed", + "replayes", "replays", + "replicae", "replicated", + "replubic", "republic", + "reportes", "reporters", + "reposity", "repository", + "repostas", "reposts", + "repostes", "reposts", + "repostig", "reposting", + "repostus", "reposts", + "represet", "represents", + "represso", "repression", + "reprhase", "rephrase", + "repsects", "respects", + "repsonds", "responds", + "repsonse", "response", + "repsoted", "reposted", + "repubics", "republics", + "republis", "republics", + "repulics", "republics", + "repulsie", "repulsive", + "requiers", "requires", + "requieum", "requiem", + "requilme", "requiem", + "requried", "required", + "requries", "requires", + "rescuecd", "rescued", + "researce", "researcher", + "resembes", "resembles", + "reserach", "research", + "resevoir", "reservoir", + "resgined", "resigned", + "residude", "residue", + "residule", "residue", + "resinged", "resigned", + "resistas", "resists", + "resisten", "resistance", + "resistes", "resists", + "resloved", "resolved", + "resloves", "resolves", + "resmeble", "resemble", + "resotred", "restored", + "resourse", "resources", + "resovled", "resolved", + "resovles", "resolves", + "respecte", "respective", + "respesct", "respects", + "responce", "response", + "responed", "respond", + "respones", "response", + "responsd", "responds", + "respoted", "reposted", + "restanti", "restarting", + "restrait", "restraint", + "restrics", "restricts", + "resuable", "reusable", + "retailes", "retailers", + "retalier", "retailer", + "rethoric", "rhetoric", + "retirase", "retires", + "retireds", "retires", + "retireus", "retires", + "retireve", "retrieve", + "retreive", "retrieve", + "retrived", "retrieved", + "retunred", "returned", + "reuasble", "reusable", + "reveales", "reveals", + "reveiwed", "reviewed", + "reveiwer", "reviewer", + "revelaed", "revealed", + "revelant", "relevant", + "revelead", "revealed", + "reverals", "reversal", + "reviewes", "reviewers", + "revlover", "revolver", + "revloves", "revolves", + "revovler", "revolver", + "revovles", "revolves", + "rewatchd", "rewatched", + "rewitten", "rewritten", + "rewritte", "rewrite", + "rewtched", "wretched", + "reynlods", "reynolds", + "reyonlds", "reynolds", + "rhaposdy", "rhapsody", + "rhaspody", "rhapsody", + "rheotric", "rhetoric", + "righteos", "righteous", + "rigntone", "ringtone", + "ringotne", "ringtone", + "ritalian", "ritalin", + "rivalrly", "rivalry", + "roachers", "roaches", + "robberts", "robbers", + "robberys", "robbers", + "robocoop", "robocop", + "robocorp", "robocop", + "robocoup", "robocop", + "roelplay", "roleplay", + "roganism", "organism", + "rolepaly", "roleplay", + "romaanin", "romanian", + "romainan", "romanian", + "romanain", "romanian", + "romanica", "romania", + "rosettta", "rosetta", + "rostaing", "roasting", + "routeros", "routers", + "rutgerus", "rutgers", + "ryenolds", "reynolds", + "sacrifie", "sacrifice", + "saddends", "saddens", + "saddenes", "saddens", + "sadisitc", "sadistic", + "salaires", "salaries", + "sandales", "sandals", + "sandalls", "sandals", + "sandstom", "sandstorm", + "sanotrum", "santorum", + "santourm", "santorum", + "santroum", "santorum", + "santurom", "santorum", + "sapcebar", "spacebar", + "sapphrie", "sapphire", + "sarcasam", "sarcasm", + "sarcasim", "sarcasm", + "sarcastc", "sarcastic", + "sargeant", "sergeant", + "sasauges", "sausages", + "sasuages", "sausages", + "satelite", "satellite", + "satellie", "satellites", + "saterday", "saturday", + "satifies", "satisfies", + "satisfiy", "satisfy", + "satrical", "satirical", + "satruday", "saturday", + "saturdsy", "saturdays", + "sawstika", "swastika", + "scandlas", "scandals", + "scannign", "scanning", + "scarmble", "scramble", + "scepture", "scepter", + "schedual", "schedule", + "schoalrs", "scholars", + "scholary", "scholarly", + "schoodle", "schooled", + "scientic", "scientific", + "scientis", "scientist", + "scoprion", "scorpion", + "scorates", "socrates", + "scoripon", "scorpion", + "scorpoin", "scorpion", + "scostman", "scotsman", + "scratchs", "scratches", + "scriptue", "scriptures", + "scriptus", "scripts", + "scritped", "scripted", + "scroates", "socrates", + "scropion", "scorpion", + "scrpited", "scripted", + "scruitny", "scrutiny", + "scrunity", "scrutiny", + "sctosman", "scotsman", + "sculpter", "sculpture", + "scurtiny", "scrutiny", + "seahakws", "seahawks", + "seahwaks", "seahawks", + "seantors", "senators", + "sebastin", "sebastian", + "seceeded", "succeeded", + "secertly", "secretly", + "secrelty", "secretly", + "secretas", "secrets", + "secretos", "secrets", + "secruity", "security", + "secuirty", "security", + "sedereal", "sidereal", + "seldomly", "seldom", + "selectie", "selective", + "selfiers", "selfies", + "semestre", "semester", + "semseter", "semester", + "senarios", "scenarios", + "senerity", "serenity", + "seniores", "seniors", + "senisble", "sensible", + "sensibel", "sensible", + "sensores", "sensors", + "senstive", "sensitive", + "sentaors", "senators", + "sentiers", "sentries", + "sentinet", "sentient", + "sentinte", "sentient", + "sentires", "sentries", + "sentreis", "sentries", + "separato", "separation", + "separete", "seperate", + "sepearte", "seperate", + "seperate", "separate", + "seplling", "spelling", + "sepreate", "seperate", + "sepulcre", "sepulchre", + "serached", "searched", + "seraches", "searches", + "serentiy", "serenity", + "sergaent", "sergeant", + "settigns", "settings", + "settting", "setting", + "seventen", "seventeen", + "severeal", "several", + "severeid", "severed", + "severide", "severed", + "severley", "severely", + "sexaully", "sexually", + "seziures", "seizures", + "sezuires", "seizures", + "shadoloo", "shadaloo", + "shangahi", "shanghai", + "shanghia", "shanghai", + "sharplay", "sharply", + "sharpley", "sharply", + "shawshak", "shawshank", + "shcolars", "scholars", + "shcooled", "schooled", + "sheilded", "shielded", + "shelterd", "sheltered", + "shelvers", "shelves", + "shelveys", "shelves", + "sherlcok", "sherlock", + "shetlers", "shelters", + "shfiting", "shifting", + "shifitng", "shifting", + "shifteer", "shifter", + "shileded", "shielded", + "shineing", "shining", + "shitstom", "shitstorm", + "shittoon", "shitton", + "shittown", "shitton", + "shleters", "shelters", + "shnaghai", "shanghai", + "shortend", "shortened", + "shotuout", "shoutout", + "shoudlnt", "shouldnt", + "shouldes", "shoulders", + "shoulndt", "shouldnt", + "shrapenl", "shrapnel", + "shrelock", "sherlock", + "shrinked", "shrunk", + "shrpanel", "shrapnel", + "shtiless", "shitless", + "shuoldnt", "shouldnt", + "sideboad", "sideboard", + "sidleine", "sideline", + "siezable", "sizeable", + "siezures", "seizures", + "signatue", "signatures", + "signfies", "signifies", + "signifiy", "signify", + "signigns", "signings", + "signular", "singular", + "silbings", "siblings", + "silicoln", "silicon", + "silicoon", "silicon", + "silimiar", "similiar", + "simialir", "similiar", + "simiilar", "similiar", + "similair", "similar", + "similari", "similiar", + "similart", "similarity", + "similary", "similarly", + "similiar", "similar", + "simliiar", "similiar", + "simluate", "simulate", + "simmilar", "similar", + "simpelst", "simplest", + "simplets", "simplest", + "simplicy", "simplicity", + "simplier", "simpler", + "simulato", "simulation", + "singlers", "singles", + "singluar", "singular", + "sinistre", "sinister", + "sinsiter", "sinister", + "sitckers", "stickers", + "sitrring", "stirring", + "sizebale", "sizeable", + "skateing", "skating", + "skecthes", "sketches", + "skelatel", "skeletal", + "skeletos", "skeletons", + "sketchey", "sketchy", + "sketpics", "skeptics", + "skillsto", "skillshots", + "skimrish", "skirmish", + "skpetics", "skeptics", + "skrimish", "skirmish", + "skteches", "sketches", + "skywalkr", "skywalker", + "slaptoon", "splatoon", + "slaverly", "slavery", + "slienced", "silenced", + "sliently", "silently", + "slighlty", "slightly", + "sligthly", "slightly", + "smartare", "smarter", + "snetries", "sentries", + "snippent", "snippet", + "snippert", "snippet", + "snowbals", "snowballs", + "snugglie", "snuggle", + "snydrome", "syndrome", + "snyopsis", "synopsis", + "soberity", "sobriety", + "sobreity", "sobriety", + "socailly", "socially", + "socalism", "socialism", + "socartes", "socrates", + "socialim", "socialism", + "socities", "societies", + "socttish", "scottish", + "soemthin", "somethin", + "soilders", "soldiers", + "solatary", "solitary", + "soldeirs", "soldiers", + "soliders", "soldiers", + "soluable", "soluble", + "solutide", "solitude", + "somalija", "somalia", + "somehtin", "somethin", + "someoens", "someones", + "somethis", "somethings", + "sometihn", "somethin", + "sometinh", "somethin", + "somoenes", "someones", + "somtimes", "sometimes", + "somwhere", "somewhere", + "soparnos", "sopranos", + "sophmore", "sophomore", + "sorcercy", "sorcery", + "sorcerey", "sorcery", + "sorceror", "sorcerer", + "sorcerry", "sorcery", + "sorpanos", "sopranos", + "southren", "southern", + "soverein", "sovereign", + "soverign", "sovereign", + "sovietes", "soviets", + "spagheti", "spaghetti", + "spainish", "spanish", + "spaltoon", "splatoon", + "spammade", "spammed", + "spammare", "spammer", + "spammear", "spammer", + "spammend", "spammed", + "spammeur", "spammer", + "spanisch", "spanish", + "sparklie", "sparkle", + "spawnign", "spawning", + "specemin", "specimen", + "speciaal", "special", + "specialt", "specialist", + "specialy", "specially", + "specialz", "specialize", + "specifed", "specified", + "specifiy", "specify", + "speciman", "specimen", + "specrtal", "spectral", + "speicals", "specials", + "spellign", "spelling", + "spendour", "splendour", + "sphereos", "spheres", + "spilnter", "splinter", + "spiltter", "splitter", + "spindrel", "spindle", + "spirites", "spirits", + "spiritis", "spirits", + "spiritus", "spirits", + "spirtied", "spirited", + "spleling", "spelling", + "splitner", "splinter", + "spoilerd", "spoiled", + "spoliers", "spoilers", + "sponsord", "sponsored", + "sporanos", "sopranos", + "spotifiy", "spotify", + "spotifty", "spotify", + "sppeches", "speeches", + "sprayade", "sprayed", + "spreaded", "spread", + "springst", "sprints", + "sprinkel", "sprinkle", + "sprintas", "sprints", + "spritual", "spiritual", + "sproutes", "sprouts", + "spwaning", "spawning", + "sqaudron", "squadron", + "sqaurely", "squarely", + "sqiurtle", "squirtle", + "squardon", "squadron", + "squareds", "squares", + "squarley", "squarely", + "squeakey", "squeaky", + "squeakly", "squeaky", + "squirlte", "squirtle", + "squirrle", "squirrel", + "squirtel", "squirtle", + "squishey", "squishy", + "squishly", "squishy", + "squritle", "squirtle", + "squrriel", "squirrel", + "squrtile", "squirtle", + "sriarcha", "sriracha", + "srriacha", "sriracha", + "sryacuse", "syracuse", + "staduims", "stadiums", + "staidums", "stadiums", + "staklers", "stalkers", + "stalekrs", "stalkers", + "stalkear", "stalker", + "staminia", "stamina", + "stampade", "stamped", + "stampeed", "stamped", + "stancels", "stances", + "stancers", "stances", + "standars", "standards", + "standbay", "standby", + "standbuy", "standby", + "stangant", "stagnant", + "staright", "straight", + "starined", "strained", + "starlted", "startled", + "startegy", "strategy", + "starteld", "startled", + "startsup", "startups", + "stateman", "statesman", + "staticts", "statist", + "stationd", "stationed", + "stationy", "stationary", + "statiskt", "statist", + "statistc", "statistic", + "statment", "statement", + "stattues", "statutes", + "statuets", "statutes", + "statuser", "stature", + "staurday", "saturday", + "steadliy", "steadily", + "stealhty", "stealthy", + "steathly", "stealthy", + "stelathy", "stealthy", + "sterilze", "sterile", + "steriods", "steroids", + "stichted", "stitched", + "sticthed", "stitched", + "sticthes", "stitches", + "stimulai", "stimuli", + "stimulas", "stimulants", + "stimulat", "stimulants", + "stimulli", "stimuli", + "stingent", "stringent", + "stirkers", "strikers", + "stlakers", "stalkers", + "stomache", "stomach", + "stormade", "stormed", + "stormend", "stormed", + "stradegy", "strategy", + "stragety", "strategy", + "straignt", "straighten", + "straigth", "straight", + "straings", "strains", + "strangel", "strangle", + "stranget", "strangest", + "stratgey", "strategy", + "stratled", "startled", + "streames", "streams", + "streamos", "streams", + "streamus", "streams", + "streamys", "streams", + "stregnth", "strength", + "stremear", "streamer", + "strenght", "strength", + "strengts", "strengths", + "strenous", "strenuous", + "strentgh", "strength", + "stretchs", "stretches", + "striaght", "straight", + "striclty", "strictly", + "striekrs", "strikers", + "strikely", "strikingly", + "stringet", "stringent", + "stubbron", "stubborn", + "stubmled", "stumbled", + "stucture", "structure", + "studioes", "studios", + "stuipder", "stupider", + "stumbeld", "stumbled", + "stupdily", "stupidly", + "stupidiy", "stupidity", + "stylisch", "stylish", + "styrofom", "styrofoam", + "suasages", "sausages", + "subltety", "subtlety", + "submarie", "submarines", + "subruban", "suburban", + "subscrie", "subscriber", + "subsidie", "subsidized", + "subsidiy", "subsidy", + "substace", "substance", + "substans", "substances", + "substite", "substitute", + "subtelty", "subtlety", + "subtetly", "subtlety", + "subtilte", "subtitle", + "subtitel", "subtitle", + "subtitls", "subtitles", + "subtltey", "subtlety", + "succeded", "succeeded", + "succedes", "succeeds", + "succeeed", "succeed", + "succesed", "succeeds", + "successs", "successes", + "succsess", "success", + "suceeded", "succeeded", + "sucesful", "successful", + "sucesion", "succession", + "sucesses", "successes", + "sucessor", "successor", + "sucessot", "successor", + "sucidial", "suicidal", + "suddnely", "suddenly", + "sufficit", "sufficient", + "suggesst", "suggests", + "suggeste", "suggestive", + "summenor", "summoner", + "summones", "summoners", + "sunfiber", "sunfire", + "sunscren", "sunscreen", + "superham", "superhuman", + "superheo", "superhero", + "superios", "superiors", + "supirsed", "suprised", + "suposing", "supposing", + "supporre", "supporters", + "suppoted", "supported", + "suprised", "surprised", + "suprized", "surprised", + "suprsied", "suprised", + "supsects", "suspects", + "supsense", "suspense", + "surbuban", "suburban", + "surounds", "surrounds", + "surpases", "surpass", + "surpress", "suppress", + "surprize", "surprise", + "surrouns", "surrounds", + "surveill", "surveil", + "surveyer", "surveyor", + "surviver", "survivor", + "suspened", "suspend", + "suspenso", "suspension", + "swaering", "swearing", + "swansoon", "swanson", + "swasitka", "swastika", + "swaskita", "swastika", + "swatiska", "swastika", + "swatsika", "swastika", + "swedisch", "swedish", + "swiftley", "swiftly", + "swithced", "switched", + "swithces", "switches", + "swtiched", "switched", + "swtiches", "switches", + "syarcuse", "syracuse", + "sydnrome", "syndrome", + "sylablle", "syllable", + "syllabel", "syllable", + "symapthy", "sympathy", + "symboles", "symbols", + "symhpony", "symphony", + "symmerty", "symmetry", + "symmtery", "symmetry", + "symoblic", "symbolic", + "symphaty", "sympathy", + "symptoom", "symptom", + "symtpoms", "symptoms", + "synomyns", "synonyms", + "synonmys", "synonyms", + "synonomy", "synonym", + "synoynms", "synonyms", + "synphony", "symphony", + "synposis", "synopsis", + "sypmathy", "sympathy", + "sypmtoms", "symptoms", + "sypnosis", "synopsis", + "syraucse", "syracuse", + "syrcause", "syracuse", + "syringae", "syringe", + "syringue", "syringe", + "sysamdin", "sysadmin", + "sysdamin", "sysadmin", + "tacticas", "tactics", + "tacticts", "tactics", + "tacticus", "tactics", + "tagliate", "tailgate", + "tahnkyou", "thankyou", + "tailsman", "talisman", + "taiwanee", "taiwanese", + "taligate", "tailgate", + "taliored", "tailored", + "tallents", "tallest", + "talsiman", "talisman", + "tanturms", "tantrums", + "tapitude", "aptitude", + "tasliman", "talisman", + "tattooes", "tattoos", + "tattooos", "tattoos", + "taxanomy", "taxonomy", + "teamfigt", "teamfight", + "teamspek", "teamspeak", + "teancity", "tenacity", + "teapsoon", "teaspoon", + "techniqe", "technique", + "teenages", "teenagers", + "telegrah", "telegraph", + "telphony", "telephony", + "tempalrs", "templars", + "tempalte", "template", + "templats", "templates", + "templeos", "temples", + "templers", "temples", + "temporay", "temporary", + "temprary", "temporary", + "tenacles", "tentacles", + "tenactiy", "tenacity", + "tencaity", "tenacity", + "tendancy", "tendency", + "tendence", "tendencies", + "tentacel", "tentacle", + "tentacls", "tentacles", + "tentalce", "tentacle", + "tequilia", "tequila", + "terriory", "territory", + "territoy", "territory", + "terroist", "terrorist", + "tesitcle", "testicle", + "testicel", "testicle", + "testifiy", "testify", + "teusdays", "tuesdays", + "texutres", "textures", + "thaliand", "thailand", + "theather", "theater", + "theathre", "theater", + "theature", "theater", + "theisitc", "theistic", + "themslef", "themself", + "theorits", "theorist", + "theraphy", "therapy", + "thereian", "therein", + "theroies", "theories", + "theroist", "theorist", + "thesitic", "theistic", + "thialand", "thailand", + "thiestic", "theistic", + "thikning", "thinking", + "thirites", "thirties", + "thirstay", "thirsty", + "thnakyou", "thankyou", + "thoeries", "theories", + "thoerist", "theorist", + "thomspon", "thompson", + "thopmson", "thompson", + "thougths", "thoughts", + "thourogh", "thorough", + "threates", "threatens", + "threefor", "therefor", + "thriteen", "thirteen", + "thrities", "thirties", + "throaths", "throats", + "throners", "thrones", + "throough", "thorough", + "throught", "thought", + "thrusday", "thursday", + "thumbnal", "thumbnails", + "thurdsay", "thursday", + "thursdsy", "thursdays", + "tightare", "tighter", + "timestap", "timestamp", + "tirangle", "triangle", + "tirbunal", "tribunal", + "titainum", "titanium", + "titanuim", "titanium", + "tocuhpad", "touchpad", + "togehter", "together", + "togheter", "together", + "toiletts", "toilets", + "tolerabe", "tolerable", + "tommorow", "tomorrow", + "tonguers", "tongues", + "toriodal", "toroidal", + "toritlla", "tortilla", + "tornadoe", "tornado", + "torotise", "tortoise", + "torpedeo", "torpedo", + "torphies", "trophies", + "tortiose", "tortoise", + "toruisty", "touristy", + "toruneys", "tourneys", + "touchapd", "touchpad", + "tounreys", "tourneys", + "tourisim", "tourism", + "touritsy", "touristy", + "tournyes", "tourneys", + "toursits", "tourists", + "toursity", "touristy", + "toxiticy", "toxicity", + "trabajao", "trabajo", + "trabajdo", "trabajo", + "trackres", "trackers", + "trageted", "targeted", + "traingle", "triangle", + "traitour", "traitor", + "trakcers", "trackers", + "traliers", "trailers", + "tranform", "transform", + "transeat", "translates", + "transfom", "transform", + "transfos", "transforms", + "transiet", "transient", + "transito", "transition", + "transpot", "transport", + "trasnfer", "transfer", + "tratiors", "traitors", + "traveles", "travels", + "traveres", "traverse", + "treasurs", "treasures", + "treatmet", "treatments", + "treatsie", "treaties", + "treausre", "treasure", + "tredning", "trending", + "tremelos", "tremolos", + "tresuary", "treasury", + "trialers", "trailers", + "trianers", "trainers", + "triangel", "triangle", + "triangls", "triangles", + "trianing", "training", + "trianlge", "triangle", + "triators", "traitors", + "tribuanl", "tribunal", + "trickyer", "trickery", + "triggern", "triggering", + "trilogoy", "trilogy", + "trinagle", "triangle", + "trinekts", "trinkets", + "tringale", "triangle", + "trinitiy", "trinity", + "triology", "trilogy", + "triumpth", "triumph", + "trohpies", "trophies", + "trollade", "trolled", + "tropcial", "tropical", + "trotilla", "tortilla", + "trpoical", "tropical", + "trubinal", "tribunal", + "trubines", "turbines", + "tsunamai", "tsunami", + "tuesdsay", "tuesdays", + "tunnells", "tunnels", + "turkisch", "turkish", + "turntabe", "turntable", + "turretts", "turrets", + "tusedays", "tuesdays", + "tutorual", "tutorial", + "twilgiht", "twilight", + "tylenool", "tylenol", + "typicaly", "typically", + "tyranies", "tyrannies", + "tyrannia", "tyrannical", + "ublisher", "publisher", + "udnercut", "undercut", + "udnerdog", "underdog", + "ugpraded", "upgraded", + "ugprades", "upgrades", + "ukrainie", "ukraine", + "ukrainin", "ukrainian", + "ukranian", "ukrainian", + "ulitmate", "ultimate", + "ultamite", "ultimate", + "ultiamte", "ultimate", + "ultimely", "ultimately", + "ultrason", "ultrasound", + "umberlla", "umbrella", + "unabnned", "unbanned", + "unbanend", "unbanned", + "uncanney", "uncanny", + "uncannny", "uncanny", + "underbog", "undergo", + "underglo", "undergo", + "undersog", "undergo", + "undertoe", "undertones", + "underwar", "underwater", + "unfailry", "unfairly", + "unfarily", "unfairly", + "ungodley", "ungodly", + "unhapppy", "unhappy", + "unhealty", "unhealthy", + "unicrons", "unicorns", + "unifroms", "uniforms", + "uniquley", "uniquely", + "univeral", "universal", + "unlikley", "unlikely", + "unlockes", "unlocks", + "unluckly", "unlucky", + "unpoened", "unopened", + "unqiuely", "uniquely", + "unrakned", "unranked", + "unrnaked", "unranked", + "unrpoven", "unproven", + "unsuable", "unusable", + "untraind", "untrained", + "unusualy", "unusually", + "unvierse", "universe", + "unworhty", "unworthy", + "upgarded", "upgraded", + "upgardes", "upgrades", + "uploades", "uploads", + "upstaris", "upstairs", + "upstiars", "upstairs", + "urethrea", "urethra", + "uruguary", "uruguay", + "ususally", "usually", + "utilitiy", "utility", + "utlimate", "ultimate", + "vaccinae", "vaccinated", + "vaccinet", "vaccinated", + "vacinity", "vicinity", + "vaguelly", "vaguely", + "vaiation", "aviation", + "vaieties", "varieties", + "vailidty", "validity", + "vairable", "variable", + "vaklyrie", "valkyrie", + "valenica", "valencia", + "valentie", "valentines", + "valentis", "valentines", + "validade", "validated", + "valkirye", "valkyrie", + "valkiyre", "valkyrie", + "valkriye", "valkyrie", + "valkryie", "valkyrie", + "valkyire", "valkyrie", + "valnecia", "valencia", + "valubale", "valuable", + "valykrie", "valkyrie", + "vamipres", "vampires", + "vampiers", "vampires", + "vampries", "vampires", + "vangurad", "vanguard", + "vanillia", "vanilla", + "vanillla", "vanilla", + "vanugard", "vanguard", + "varaible", "variable", + "varaints", "variants", + "variabel", "variable", + "varibale", "variable", + "varities", "varieties", + "vassales", "vassals", + "vassalls", "vassals", + "vassalos", "vassals", + "vaticaan", "vatican", + "vaticina", "vatican", + "vaulable", "valuable", + "vaylkrie", "valkyrie", + "vechiles", "vehicles", + "vectores", "vectors", + "vegansim", "veganism", + "vegtable", "vegetable", + "vehciles", "vehicles", + "vehicels", "vehicles", + "vehicule", "vehicle", + "veichles", "vehicles", + "venelope", "envelope", + "venemous", "venomous", + "vengance", "vengeance", + "vengence", "vengeance", + "verablly", "verbally", + "verbaitm", "verbatim", + "verisons", "versions", + "versatel", "versatile", + "vertabim", "verbatim", + "vertigro", "vertigo", + "vesseles", "vessels", + "vessells", "vessels", + "viabiliy", "viability", + "viatmins", "vitamins", + "vibratie", "vibrate", + "vibratin", "vibration", + "vicintiy", "vicinity", + "vicseral", "visceral", + "victimas", "victims", + "victimes", "victims", + "victorin", "victorian", + "victoris", "victories", + "vieweres", "viewers", + "viewpoit", "viewpoints", + "vigilane", "vigilante", + "vigliant", "vigilant", + "vikingos", "vikings", + "viligant", "vigilant", + "villegas", "villages", + "vindicte", "vindictive", + "vinicity", "vicinity", + "violatin", "violation", + "violenty", "violently", + "violetas", "violates", + "virament", "vraiment", + "virbator", "vibrator", + "virginas", "virgins", + "virgines", "virgins", + "virgings", "virgins", + "virginis", "virgins", + "virginus", "virgins", + "virtualy", "virtually", + "virtuels", "virtues", + "virtuose", "virtues", + "viscreal", "visceral", + "visercal", "visceral", + "visibily", "visibility", + "visibley", "visibly", + "visiblly", "visibly", + "vitailty", "vitality", + "vitimans", "vitamins", + "vitmains", "vitamins", + "vitories", "victories", + "voicemal", "voicemail", + "voilates", "violates", + "volatily", "volatility", + "volcando", "volcano", + "volcanoe", "volcano", + "volcaron", "volcano", + "vriament", "vraiment", + "wahtever", "whatever", + "wallpapr", "wallpapers", + "warantee", "warranty", + "warcarft", "warcraft", + "warrante", "warranties", + "warriros", "warriors", + "watchemn", "watchmen", + "watchign", "watching", + "wathcing", "watching", + "wathcmen", "watchmen", + "wathever", "whatever", + "watkings", "watkins", + "wealthly", "wealthy", + "webistes", "websites", + "websties", "websites", + "wednesdy", "wednesdays", + "weigthed", "weighted", + "weridest", "weirdest", + "werstler", "wrestler", + "wesbites", "websites", + "westbrok", "westbrook", + "westerse", "westerners", + "wherease", "whereas", + "whipsers", "whispers", + "whislist", "wishlist", + "whisltes", "whistles", + "whisperd", "whispered", + "whistels", "whistles", + "whitsles", "whistles", + "whsipers", "whispers", + "widgetas", "widgets", + "wieghted", "weighted", + "willaims", "williams", + "willfuly", "willfully", + "willimas", "williams", + "windsoar", "windsor", + "wininpeg", "winnipeg", + "winnigns", "winnings", + "winnpieg", "winnipeg", + "wiredest", "weirdest", + "wishlsit", "wishlist", + "wishpers", "whispers", + "withdral", "withdrawal", + "witnesss", "witnesses", + "wonderes", "wonders", + "wonderus", "wonders", + "workfore", "workforce", + "wouldnot", "wouldnt", + "wranlger", "wrangler", + "wreckign", "wrecking", + "wrecthed", "wretched", + "wrekcing", "wrecking", + "wreslter", "wrestler", + "wresters", "wrestlers", + "writting", "writing", + "wrnagler", "wrangler", + "wrteched", "wretched", + "yeilding", "yielding", + "yoesmite", "yosemite", + "yorksher", "yorkshire", + "yorkshie", "yorkshire", + "yosemeti", "yosemite", + "yosimete", "yosemite", + "zealotes", "zealots", + "zealoths", "zealots", + "zealotus", "zealots", + "zealouts", "zealous", + "zepplein", "zeppelin", + "zepplien", "zeppelin", + "zimbabew", "zimbabwe", + "zimbawbe", "zimbabwe", + "zinoists", "zionists", + "zionisim", "zionism", + "zionistm", "zionism", + "zionsits", "zionists", + "zoinists", "zionists", + "abiltiy", "ability", + "abodmen", "abdomen", + "abondon", "abandon", + "aboslve", "absolve", + "abosrbs", "absorbs", + "abriter", "arbiter", + "abrupty", "abruptly", + "absense", "absence", + "absolue", "absolute", + "absovle", "absolve", + "absrobs", "absorbs", + "absuers", "abusers", + "absurdy", "absurdly", + "absymal", "abysmal", + "abymsal", "abysmal", + "acadamy", "academy", + "acadmic", "academic", + "accesss", "access", + "accpets", "accepts", + "accross", "across", + "accuray", "accuracy", + "acheive", "achieve", + "achived", "achieved", + "acident", "accident", + "ackward", "awkward", + "acrlyic", "acrylic", + "actauly", "actualy", + "activit", "activist", + "activly", "actively", + "actualy", "actually", + "actulay", "actualy", + "acuracy", "accuracy", + "acusing", "causing", + "acustom", "accustom", + "acutaly", "actualy", + "acyrlic", "acrylic", + "adaptes", "adapters", + "adatper", "adapter", + "adbomen", "abdomen", + "addcits", "addicts", + "adderss", "address", + "addtion", "addition", + "adequet", "adequate", + "adequit", "adequate", + "adivser", "adviser", + "adivsor", "advisor", + "admited", "admitted", + "admrial", "admiral", + "adpater", "adapter", + "adquire", "acquire", + "adultey", "adultery", + "adverst", "adverts", + "adviced", "advised", + "advocay", "advocacy", + "advsior", "advisor", + "aeriels", "aerials", + "affaris", "affairs", + "affiars", "affairs", + "afircan", "african", + "africas", "africans", + "afwully", "awfully", + "againts", "against", + "agaisnt", "against", + "aganist", "against", + "aggreed", "agreed", + "agianst", "against", + "agreing", "agreeing", + "agruing", "arguing", + "ahtiest", "athiest", + "aicraft", "aircraft", + "ailmony", "alimony", + "airbore", "airborne", + "aircaft", "aircraft", + "airlfow", "airflow", + "airosft", "airsoft", + "airpost", "airports", + "airsfot", "airsoft", + "airzona", "arizona", + "alchmey", "alchemy", + "alchool", "alcohol", + "alcohal", "alcohol", + "aledged", "alleged", + "aledges", "alleges", + "alegbra", "algebra", + "algerba", "algebra", + "alienet", "alienate", + "alledge", "allege", + "allegry", "allergy", + "alltime", "all-time", + "almighy", "almighty", + "alochol", "alcohol", + "alotted", "allotted", + "alowing", "allowing", + "alphabt", "alphabet", + "alreayd", "already", + "alrighy", "alrighty", + "altanta", "atlanta", + "alteast", "atleast", + "altough", "although", + "alusion", "allusion", + "amateus", "amateurs", + "amatuer", "amateur", + "amature", "armature", + "amensia", "amnesia", + "amensty", "amnesty", + "amercia", "america", + "americs", "americas", + "ammount", "amount", + "ammused", "amused", + "amneisa", "amnesia", + "amnsety", "amnesty", + "amognst", "amongst", + "amongts", "amongst", + "amonsgt", "amongst", + "ampilfy", "amplify", + "amrpits", "armpits", + "analoge", "analogue", + "analsyt", "analyst", + "analyes", "analyse", + "analyts", "analyst", + "analzye", "analyze", + "anaylse", "analyse", + "anaylst", "analyst", + "anaylze", "analyze", + "anceint", "ancient", + "andorid", "android", + "andriod", "android", + "androis", "androids", + "angirly", "angrily", + "angluar", "angular", + "angualr", "angular", + "anicent", "ancient", + "anitque", "antique", + "anixety", "anxiety", + "anmesia", "amnesia", + "anmesty", "amnesty", + "annoint", "anoint", + "annualy", "annually", + "annuled", "annulled", + "anohter", "another", + "anomoly", "anomaly", + "answerd", "answered", + "anuglar", "angular", + "anulled", "annulled", + "anwsers", "answers", + "anwyays", "anyways", + "anxeity", "anxiety", + "anyoens", "anyones", + "anyonse", "anyones", + "anywyas", "anyways", + "aparent", "apparent", + "appeard", "appeared", + "appluad", "applaud", + "aproval", "approval", + "apsects", "aspects", + "apshalt", "asphalt", + "apsirin", "aspirin", + "aqcuire", "acquire", + "aquarim", "aquarium", + "aquired", "acquired", + "aranged", "arranged", + "arbitre", "arbiter", + "arcahic", "archaic", + "archiac", "archaic", + "arcylic", "acrylic", + "aresnal", "arsenal", + "aretmis", "artemis", + "argubly", "arguably", + "aribter", "arbiter", + "ariflow", "airflow", + "arisoft", "airsoft", + "aritsts", "artists", + "armchar", "armchair", + "arogant", "arrogant", + "arogent", "arrogant", + "arresst", "arrests", + "arround", "around", + "arsneal", "arsenal", + "artcile", "article", + "artical", "article", + "articel", "article", + "artistc", "artistic", + "artmeis", "artemis", + "artsits", "artists", + "aruging", "arguing", + "aseuxal", "asexual", + "asexaul", "asexual", + "ashpalt", "asphalt", + "asiprin", "aspirin", + "asissts", "assists", + "asnwers", "answers", + "asorbed", "absorbed", + "aspahlt", "asphalt", + "asphlat", "asphalt", + "aspriin", "aspirin", + "assagne", "assange", + "assasin", "assassin", + "assembe", "assemble", + "assemby", "assembly", + "assisst", "assists", + "assnage", "assange", + "asssits", "assists", + "assualt", "assault", + "asterik", "asterisk", + "asutria", "austria", + "atcualy", "actualy", + "atelast", "atleast", + "athesim", "atheism", + "athiesm", "atheism", + "athiest", "atheist", + "athiets", "athiest", + "athlets", "athletes", + "atlantc", "atlantic", + "atleats", "atleast", + "atlesat", "atleast", + "atorney", "attorney", + "atremis", "artemis", + "attemps", "attempts", + "attemts", "attempts", + "attened", "attended", + "attracs", "attracts", + "audbile", "audible", + "audibel", "audible", + "austira", "austria", + "austrai", "austria", + "autistc", "autistic", + "avation", "aviation", + "avtaars", "avatars", + "awakend", "awakened", + "bablyon", "babylon", + "backdor", "backdoor", + "backsta", "backseat", + "baclony", "balcony", + "badnits", "bandits", + "baiscly", "basicly", + "bakcers", "backers", + "balanse", "balances", + "balcked", "blacked", + "banhsee", "banshee", + "bankgok", "bangkok", + "baoynet", "bayonet", + "baptims", "baptism", + "baptsim", "baptism", + "baragin", "bargain", + "bargani", "bargain", + "bargian", "bargain", + "bariner", "brainer", + "barlkey", "barkley", + "barracs", "barracks", + "barrles", "barrels", + "barsita", "barista", + "barvery", "bravery", + "bascily", "basicly", + "basicly", "basically", + "basilcy", "basicly", + "basiton", "bastion", + "basnhee", "banshee", + "bastane", "bastante", + "bastars", "bastards", + "bastino", "bastion", + "bathrom", "bathroom", + "batitsa", "batista", + "batsita", "batista", + "bayblon", "babylon", + "baynoet", "bayonet", + "bayoent", "bayonet", + "bceuase", "becuase", + "beacuse", "because", + "bealtes", "beatles", + "beaslty", "beastly", + "beatels", "beatles", + "beaucop", "beaucoup", + "becamae", "became", + "becames", "becomes", + "becasue", "because", + "becouse", "because", + "becuaes", "becuase", + "becuase", "because", + "becusae", "becuase", + "befried", "befriend", + "beggins", "begins", + "beglian", "belgian", + "beglium", "belgium", + "begnals", "bengals", + "bejiing", "beijing", + "beleifs", "beliefs", + "beleive", "believe", + "belgain", "belgian", + "belguim", "belgium", + "believr", "believer", + "believs", "believes", + "belifes", "beliefs", + "beligan", "belgian", + "beligum", "belgium", + "belived", "believed", + "belives", "believes", + "benagls", "bengals", + "benedit", "benedict", + "benghai", "benghazi", + "benglas", "bengals", + "benifit", "benefit", + "beoynce", "beyonce", + "beraded", "bearded", + "bersekr", "berserk", + "beseige", "besiege", + "betales", "beatles", + "bethesa", "bethesda", + "betrayd", "betrayed", + "beucase", "becuase", + "bewteen", "between", + "bicthes", "bitches", + "bidrman", "birdman", + "biejing", "beijing", + "bifgoot", "bigfoot", + "bigorty", "bigotry", + "bigtoed", "bigoted", + "bigtory", "bigotry", + "biogted", "bigoted", + "biogtry", "bigotry", + "bioplar", "bipolar", + "biploar", "bipolar", + "birdamn", "birdman", + "birdges", "bridges", + "birgade", "brigade", + "bitcion", "bitcoin", + "bithced", "bitched", + "bithces", "bitches", + "bitocin", "bitcoin", + "bizzare", "bizarre", + "blacony", "balcony", + "blaimed", "blamed", + "blankes", "blankets", + "blegian", "belgian", + "blegium", "belgium", + "blizzad", "blizzard", + "blockes", "blockers", + "bloster", "bolster", + "blulets", "bullets", + "bobmers", "bombers", + "bollocs", "bollocks", + "bondary", "boundary", + "bonnano", "bonanno", + "bonsues", "bonuses", + "boraden", "broaden", + "borader", "broader", + "boradly", "broadly", + "bordeom", "boredom", + "boslter", "bolster", + "boudler", "boulder", + "boundry", "boundary", + "bounses", "bonuses", + "boutiqe", "boutique", + "bouyant", "buoyant", + "braevry", "bravery", + "braista", "barista", + "brakley", "barkley", + "branier", "brainer", + "braoden", "broaden", + "braoder", "broader", + "braodly", "broadly", + "brednan", "brendan", + "breifly", "briefly", + "breserk", "berserk", + "brethen", "brethren", + "brewrey", "brewery", + "briagde", "brigade", + "brianer", "brainer", + "bridman", "birdman", + "brielfy", "briefly", + "brigdes", "bridges", + "brightn", "brighten", + "brisben", "brisbane", + "britian", "britain", + "britsol", "bristol", + "briused", "bruised", + "briuser", "bruiser", + "briuses", "bruises", + "brocoli", "broccoli", + "bronocs", "broncos", + "browine", "brownie", + "brownei", "brownie", + "brownis", "brownies", + "bruglar", "burglar", + "brunete", "brunette", + "bruning", "burning", + "brusied", "bruised", + "brusies", "bruises", + "brusses", "brussels", + "brutaly", "brutally", + "btiched", "bitched", + "btiches", "bitches", + "bubbels", "bubbles", + "buddhim", "buddhism", + "buddhit", "buddhist", + "buddist", "buddhist", + "budgest", "budgets", + "bugdets", "budgets", + "buildes", "builders", + "bulgara", "bulgaria", + "bullest", "bullets", + "buoancy", "buoyancy", + "burguny", "burgundy", + "buriser", "bruiser", + "burlgar", "burglar", + "burnign", "burning", + "burried", "buried", + "burrtio", "burrito", + "busines", "business", + "busness", "business", + "butthoe", "butthole", + "buttrey", "buttery", + "cababge", "cabbage", + "cabines", "cabinets", + "cabniet", "cabinet", + "caclium", "calcium", + "cacuses", "caucuses", + "caffeen", "caffeine", + "cahched", "cached", + "cahotic", "chaotic", + "cahsier", "cashier", + "cailbre", "calibre", + "calaber", "caliber", + "calagry", "calgary", + "calback", "callback", + "calbire", "calibre", + "calcuim", "calcium", + "calculs", "calculus", + "calicum", "calcium", + "calrify", "clarify", + "calrity", "clarity", + "caluses", "clauses", + "camboda", "cambodia", + "campain", "campaign", + "campuss", "campuses", + "cancles", "cancels", + "cancres", "cancers", + "cancuks", "canucks", + "canides", "candies", + "cannnot", "cannot", + "canrage", "carnage", + "capible", "capable", + "capitas", "capitals", + "capsuls", "capsules", + "captais", "captains", + "captial", "capital", + "captiol", "capitol", + "captued", "captured", + "capturd", "captured", + "capusle", "capsule", + "carange", "carnage", + "carbien", "carbine", + "cardaic", "cardiac", + "cardina", "cardigan", + "careing", "caring", + "caridac", "cardiac", + "carmtan", "cartman", + "carnege", "carnage", + "carnige", "carnage", + "carolan", "carolina", + "carreer", "career", + "carrers", "careers", + "cartles", "cartels", + "caryons", "crayons", + "casette", "cassette", + "casheir", "cashier", + "cashies", "cashiers", + "cashire", "cashier", + "casltes", "castles", + "caspule", "capsule", + "cassete", "cassette", + "castels", "castles", + "casuing", "causing", + "cathlic", "catholic", + "cauncks", "canucks", + "cavarly", "cavalry", + "cavlary", "cavalry", + "celcius", "celsius", + "celisus", "celsius", + "celitcs", "celtics", + "celsuis", "celsius", + "centruy", "century", + "centuty", "century", + "ceratin", "certain", + "cermaic", "ceramic", + "certian", "certain", + "cervial", "cervical", + "cesspol", "cesspool", + "cetlics", "celtics", + "chambre", "chamber", + "charcol", "charcoal", + "charisa", "charisma", + "chasiss", "chassis", + "chatoic", "chaotic", + "cheeots", "cheetos", + "cheesse", "cheeses", + "chekcer", "checker", + "chelsae", "chelsea", + "cheslea", "chelsea", + "chiense", "chinese", + "childen", "children", + "chimeny", "chimney", + "chinees", "chinese", + "chinmey", "chimney", + "chipest", "chipset", + "chispet", "chipset", + "chivaly", "chivalry", + "chlesea", "chelsea", + "chnages", "changes", + "choatic", "chaotic", + "chocies", "choices", + "choosen", "chosen", + "chtulhu", "cthulhu", + "churchs", "churches", + "cilanto", "cilantro", + "cilents", "clients", + "circels", "circles", + "circuis", "circuits", + "cirlces", "circles", + "clacium", "calcium", + "claerer", "clearer", + "claerly", "clearly", + "clagary", "calgary", + "claibre", "calibre", + "claimes", "claims", + "clairfy", "clarify", + "clairty", "clarity", + "clanand", "clannad", + "clarfiy", "clarify", + "classis", "classics", + "clasues", "clauses", + "claymer", "claymore", + "claymoe", "claymore", + "cleanes", "cleanse", + "cleasne", "cleanse", + "cleints", "clients", + "clenase", "cleanse", + "clesius", "celsius", + "cletics", "celtics", + "clevery", "cleverly", + "climats", "climates", + "climbes", "climbers", + "clincis", "clinics", + "clitors", "clitoris", + "cloesly", "closely", + "closley", "closely", + "cluases", "clauses", + "cluprit", "culprit", + "coalese", "coalesce", + "coctail", "cocktail", + "cohesie", "cohesive", + "colgone", "cologne", + "collape", "collapse", + "collest", "collects", + "collony", "colony", + "collumn", "column", + "cologen", "cologne", + "colomba", "colombia", + "colonge", "cologne", + "colorao", "colorado", + "colourd", "coloured", + "columsn", "columns", + "comando", "commando", + "comapny", "company", + "comapre", "compare", + "comarde", "comrade", + "comback", "comeback", + "combins", "combines", + "comdeic", "comedic", + "comited", "committed", + "commano", "commando", + "commans", "commands", + "commere", "commerce", + "comming", "coming", + "commitd", "commited", + "compase", "compares", + "compede", "competed", + "compilr", "compiler", + "compnay", "company", + "compots", "compost", + "comrads", "comrades", + "comtpon", "compton", + "conceed", "concede", + "conceps", "concepts", + "conclue", "conclude", + "concret", "concert", + "condenm", "condemn", + "condiut", "conduit", + "condmen", "condemn", + "confids", "confides", + "confins", "confines", + "confise", "confines", + "conflit", "conflict", + "conived", "connived", + "connecs", "connects", + "conqeur", "conquer", + "conqure", "conquer", + "consept", "concept", + "consern", "concern", + "consums", "consumes", + "contacs", "contacts", + "contais", "contains", + "contast", "contacts", + "contemt", "contempt", + "contens", "contents", + "contess", "contests", + "contian", "contain", + "contine", "continue", + "convers", "converts", + "conveyd", "conveyed", + "convine", "convince", + "coprses", "corpses", + "coputer", "computer", + "corasir", "corsair", + "coratia", "croatia", + "coridal", "cordial", + "corsari", "corsair", + "corsiar", "corsair", + "corspes", "corpses", + "corwbar", "crowbar", + "costums", "costumes", + "coudlnt", "couldnt", + "coulmns", "columns", + "coulndt", "couldnt", + "counsle", "counsel", + "countes", "counters", + "courtey", "courtesy", + "covenat", "covenant", + "coytoes", "coyotes", + "crabine", "carbine", + "cralwed", "crawled", + "craotia", "croatia", + "craweld", "crawled", + "creamic", "ceramic", + "createn", "creatine", + "creater", "creature", + "creatie", "creatine", + "creatue", "creature", + "creepes", "creepers", + "creepig", "creeping", + "creulty", "cruelty", + "cricles", "circles", + "critera", "criteria", + "cropses", "corpses", + "crosair", "corsair", + "crpytic", "cryptic", + "crsytal", "crystal", + "crtical", "critical", + "crucibe", "crucible", + "cruetly", "cruelty", + "cruical", "crucial", + "crulety", "cruelty", + "crusdae", "crusade", + "crusier", "cruiser", + "crusies", "cruises", + "crusive", "cursive", + "crutchs", "crutches", + "crypitc", "cryptic", + "crystas", "crystals", + "crystsl", "crystals", + "crytpic", "cryptic", + "crytsal", "crystal", + "cthluhu", "cthulhu", + "cthuhlu", "cthulhu", + "cthuluh", "cthulhu", + "ctuhlhu", "cthulhu", + "cuasing", "causing", + "cubcile", "cubicle", + "cubilce", "cubicle", + "cuddels", "cuddles", + "culrpit", "culprit", + "culturs", "cultures", + "cupboad", "cupboard", + "cuplrit", "culprit", + "curatin", "curtain", + "curcial", "crucial", + "curcuit", "circuit", + "curelty", "cruelty", + "curiser", "cruiser", + "curisve", "cursive", + "currate", "curate", + "currens", "currents", + "curreny", "currency", + "currest", "currents", + "cursade", "crusade", + "curtian", "curtain", + "cyandie", "cyanide", + "cyclits", "cyclist", + "cycloen", "cyclone", + "cycolps", "cyclops", + "cylcist", "cyclist", + "cylcone", "cyclone", + "cylcops", "cyclops", + "cynaide", "cyanide", + "cyrptic", "cryptic", + "cyrstal", "crystal", + "dagners", "dangers", + "daimond", "diamond", + "damenor", "demeanor", + "dammage", "damage", + "darcula", "dracula", + "dargons", "dragons", + "darkets", "darkest", + "datbase", "database", + "daulity", "duality", + "dawrves", "dwarves", + "ddogers", "dodgers", + "ddoging", "dodging", + "deadlit", "deadlift", + "deadpol", "deadpool", + "deafult", "default", + "deahtly", "deathly", + "deatils", "details", + "deatlhy", "deathly", + "decalre", "declare", + "decison", "decision", + "declars", "declares", + "declase", "declares", + "decress", "decrees", + "decribe", "describe", + "decsend", "descend", + "dectect", "detect", + "defaint", "defiant", + "defauls", "defaults", + "defelct", "deflect", + "defensd", "defends", + "deffine", "define", + "definat", "defiant", + "definet", "definite", + "definie", "definite", + "definig", "defining", + "definit", "definite", + "defualt", "default", + "degarde", "degrade", + "degrase", "degrasse", + "degrate", "degrade", + "deiners", "deniers", + "deisgns", "designs", + "deivant", "deviant", + "dekstop", "desktop", + "delcare", "declare", + "delfect", "deflect", + "demenor", "demeanor", + "dementa", "dementia", + "demsond", "desmond", + "deneirs", "deniers", + "denisty", "density", + "densley", "densely", + "depcits", "depicts", + "dependd", "depended", + "depitcs", "depicts", + "deployd", "deployed", + "depsise", "despise", + "descrie", "describe", + "descuss", "discuss", + "desgins", "designs", + "desings", "designs", + "desitny", "destiny", + "desnely", "densely", + "desnity", "density", + "desomnd", "desmond", + "despict", "depict", + "despide", "despised", + "despies", "despise", + "destkop", "desktop", + "destory", "destroy", + "destros", "destroys", + "detaild", "detailed", + "detials", "details", + "detorit", "detroit", + "detriot", "detroit", + "deuling", "dueling", + "devaint", "deviant", + "devaite", "deviate", + "devided", "divided", + "devlove", "devolve", + "devotin", "devotion", + "devovle", "devolve", + "diabets", "diabetes", + "dialecs", "dialects", + "dialoge", "dialogue", + "diamons", "diamonds", + "diasble", "disable", + "dicksih", "dickish", + "dicover", "discover", + "dictats", "dictates", + "dieties", "deities", + "dilpoma", "diploma", + "dimaond", "diamond", + "dingity", "dignity", + "dinosar", "dinosaur", + "diosese", "diocese", + "dipolma", "diploma", + "dirbble", "dribble", + "directy", "directly", + "diretcx", "directx", + "dirived", "derived", + "dirvers", "drivers", + "disbale", "disable", + "disguss", "disgusts", + "disliks", "dislikes", + "disover", "discover", + "dispair", "despair", + "dispath", "dispatch", + "dispite", "despite", + "dispuse", "disputes", + "disputs", "disputes", + "dissole", "dissolve", + "distase", "distaste", + "distint", "distinct", + "divison", "division", + "docuhes", "douches", + "docuhey", "douchey", + "dogders", "dodgers", + "dogding", "dodging", + "dolhpin", "dolphin", + "dolphis", "dolphins", + "dominae", "dominate", + "dominno", "dominion", + "doplhin", "dolphin", + "dortmud", "dortmund", + "draclua", "dracula", + "dracual", "dracula", + "drakest", "darkest", + "dramtic", "dramatic", + "dribbel", "dribble", + "driectx", "directx", + "driftig", "drifting", + "drinkes", "drinkers", + "druming", "drumming", + "duailty", "duality", + "dualtiy", "duality", + "dubsetp", "dubstep", + "dulaity", "duality", + "duleing", "dueling", + "dunegon", "dungeon", + "dungeos", "dungeons", + "dungoen", "dungeon", + "durring", "during", + "dusbtep", "dubstep", + "dyansty", "dynasty", + "dynamis", "dynamics", + "dynsaty", "dynasty", + "earlies", "earliest", + "earliet", "earliest", + "earplus", "earplugs", + "eastwod", "eastwood", + "ebcuase", "becuase", + "ecilpse", "eclipse", + "eclipes", "eclipse", + "eclispe", "eclipse", + "eclpise", "eclipse", + "ectsasy", "ecstasy", + "edbiles", "edibles", + "edibels", "edibles", + "effords", "efforts", + "ehtanol", "ethanol", + "eifnach", "einfach", + "eighten", "eighteen", + "einfahc", "einfach", + "elasped", "elapsed", + "elcipse", "eclipse", + "elction", "election", + "elecrto", "electro", + "electic", "electric", + "electon", "election", + "ellitot", "elliott", + "elloitt", "elliott", + "elphant", "elephant", + "emabrgo", "embargo", + "emabssy", "embassy", + "emapthy", "empathy", + "embeded", "embedded", + "embrago", "embargo", + "eminate", "emanate", + "emipres", "empires", + "emision", "emission", + "emiting", "emitting", + "emition", "emission", + "emmited", "emitted", + "empahty", "empathy", + "emphsis", "emphasis", + "empiers", "empires", + "empited", "emptied", + "emplore", "employer", + "emporer", "emperor", + "empries", "empires", + "emtpied", "emptied", + "enameld", "enameled", + "encahnt", "enchant", + "encalve", "enclave", + "encrpyt", "encrypt", + "encyrpt", "encrypt", + "endores", "endorse", + "endrose", "endorse", + "energis", "energies", + "enforse", "enforces", + "enginer", "engineer", + "englsih", "english", + "enhanse", "enhances", + "enlcave", "enclave", + "enlgish", "english", + "enlsave", "enslave", + "ensalve", "enslave", + "entbook", "netbook", + "entirey", "entirety", + "entorpy", "entropy", + "epiloge", "epilogue", + "episdoe", "episode", + "epsiode", "episode", + "epsorts", "esports", + "eptiome", "epitome", + "equiped", "equipped", + "erested", "arrested", + "escapse", "escapes", + "escpaes", "escapes", + "esctasy", "ecstasy", + "esporst", "esports", + "espreso", "espresso", + "esprots", "esports", + "essense", "essence", + "etherel", "ethereal", + "ethnaol", "ethanol", + "euphora", "euphoria", + "europen", "european", + "eurpean", "european", + "everets", "everest", + "everset", "everest", + "evloved", "evolved", + "evloves", "evolves", + "evovled", "evolved", + "evovles", "evolves", + "exaclty", "exactly", + "exahust", "exhaust", + "examind", "examined", + "exapnds", "expands", + "exatled", "exalted", + "excange", "exchange", + "excatly", "exactly", + "excells", "excels", + "exceprt", "excerpt", + "excluse", "excludes", + "excrept", "excerpt", + "exculde", "exclude", + "exelent", "excellent", + "exemple", "example", + "exerpts", "excerpts", + "exhasut", "exhaust", + "exhuast", "exhaust", + "exising", "existing", + "existet", "existent", + "exlated", "exalted", + "exlcude", "exclude", + "exliled", "exiled", + "exludes", "excludes", + "exmaple", "example", + "exoitcs", "exotics", + "expalin", "explain", + "expeced", "expected", + "expells", "expels", + "expiers", "expires", + "explict", "explicit", + "expliot", "exploit", + "explods", "explodes", + "explose", "explodes", + "expolde", "explode", + "expolit", "exploit", + "exposse", "exposes", + "expries", "expires", + "exracts", "extracts", + "exsited", "existed", + "extered", "exerted", + "exterme", "extreme", + "extoics", "exotics", + "extreem", "extreme", + "extrems", "extremes", + "eyebals", "eyeballs", + "eyebros", "eyebrows", + "fabulos", "fabulous", + "facebok", "facebook", + "facepam", "facepalm", + "faclons", "falcons", + "facsism", "fascism", + "facsist", "fascist", + "failurs", "failures", + "faincee", "fiancee", + "falesly", "falsely", + "falired", "flaired", + "falshed", "flashed", + "falshes", "flashes", + "falsley", "falsely", + "falvors", "flavors", + "familes", "families", + "famoust", "famous", + "famousy", "famously", + "fanatsy", "fantasy", + "fantaic", "fanatic", + "faoming", "foaming", + "fascits", "fascist", + "fasicsm", "fascism", + "fasicst", "fascist", + "faslely", "falsely", + "fatiuge", "fatigue", + "febuary", "february", + "fecthed", "fetched", + "fecthes", "fetches", + "feminen", "feminine", + "feminie", "feminine", + "feminim", "feminism", + "feodras", "fedoras", + "fertily", "fertility", + "fesitve", "festive", + "fethced", "fetched", + "fethces", "fetches", + "fetishs", "fetishes", + "fianite", "finite", + "fianlly", "finally", + "fiercly", "fiercely", + "filcker", "flicker", + "filpped", "flipped", + "filterd", "filtered", + "finacee", "fiancee", + "fineses", "finesse", + "fininsh", "finnish", + "finishs", "finishes", + "finisse", "finishes", + "finnsih", "finnish", + "firends", "friends", + "firggin", "friggin", + "firsbee", "frisbee", + "firslty", "firstly", + "firtsly", "firstly", + "fitlers", "filters", + "flacons", "falcons", + "flahsed", "flashed", + "flahses", "flashes", + "flaried", "flaired", + "flasely", "falsely", + "flashig", "flashing", + "flavord", "flavored", + "flavous", "flavours", + "flawess", "flawless", + "flciker", "flicker", + "fliters", "filters", + "flordia", "florida", + "florene", "florence", + "fnaatic", "fanatic", + "fomaing", "foaming", + "fonetic", "phonetic", + "forefit", "forfeit", + "foregin", "foreign", + "foreing", "foreign", + "forfiet", "forfeit", + "forhead", "forehead", + "foriegn", "foreign", + "formaly", "formally", + "formery", "formerly", + "formost", "foremost", + "formual", "formula", + "formuls", "formulas", + "forrset", "forrest", + "forsakn", "forsaken", + "forsane", "forsaken", + "forumla", "formula", + "fountan", "fountain", + "fourten", "fourteen", + "fracter", "fracture", + "fragmet", "fragment", + "freedos", "freedoms", + "freinds", "friends", + "frigign", "friggin", + "fristly", "firstly", + "frostig", "frosting", + "frsibee", "frisbee", + "fruitin", "fruition", + "fullets", "fullest", + "fullset", "fullest", + "funides", "fundies", + "funtion", "function", + "furance", "furnace", + "furncae", "furnace", + "futhroc", "futhark", + "gadgest", "gadgets", + "gagdets", "gadgets", + "galatic", "galactic", + "galcier", "glacier", + "galsgow", "glasgow", + "gameply", "gameplay", + "gamerga", "gamertag", + "gankign", "ganking", + "ganster", "gangster", + "garabge", "garbage", + "garfied", "garfield", + "garnola", "granola", + "generas", "generals", + "genersl", "generals", + "geniuss", "geniuses", + "geogria", "georgia", + "geomety", "geometry", + "georiga", "georgia", + "gernade", "grenade", + "gerogia", "georgia", + "gigabye", "gigabyte", + "giltchy", "glitchy", + "gimmics", "gimmicks", + "gimmicy", "gimmicky", + "girzzly", "grizzly", + "glagsow", "glasgow", + "glaicer", "glacier", + "glicthy", "glitchy", + "glimpes", "glimpse", + "glimspe", "glimpse", + "glipmse", "glimpse", + "glitchd", "glitched", + "glitchs", "glitches", + "glithcy", "glitchy", + "globaly", "globally", + "gloiath", "goliath", + "glorios", "glorious", + "gltichy", "glitchy", + "gnaking", "ganking", + "gnawwed", "gnawed", + "goddanm", "goddamn", + "goddman", "goddamn", + "godliek", "godlike", + "godlman", "goldman", + "godsped", "godspeed", + "goergia", "georgia", + "goilath", "goliath", + "golaith", "goliath", + "golbins", "goblins", + "goldamn", "goldman", + "goldbeg", "goldberg", + "goldike", "godlike", + "golitah", "goliath", + "goodluk", "goodluck", + "gorumet", "gourmet", + "gosepls", "gospels", + "gosples", "gospels", + "gpysies", "gypsies", + "grabage", "garbage", + "grahpic", "graphic", + "grainte", "granite", + "grammer", "grammar", + "graniet", "granite", + "grantie", "granite", + "graphie", "graphite", + "graphis", "graphics", + "grappel", "grapple", + "greande", "grenade", + "grenads", "grenades", + "greneer", "greener", + "griaffe", "giraffe", + "gridles", "griddles", + "grillig", "grilling", + "grpahic", "graphic", + "guardin", "guardian", + "guiness", "guinness", + "gullibe", "gullible", + "gutiars", "guitars", + "gypises", "gypsies", + "gyspies", "gypsies", + "habaeus", "habeas", + "haethen", "heathen", + "hailfax", "halifax", + "halfiax", "halifax", + "handbok", "handbook", + "handedy", "handedly", + "handeld", "handled", + "hanlder", "handler", + "hannibl", "hannibal", + "hanuted", "haunted", + "haorder", "hoarder", + "hapened", "happened", + "happend", "happened", + "happliy", "happily", + "harased", "harassed", + "harases", "harasses", + "hardend", "hardened", + "hardwod", "hardwood", + "haricut", "haircut", + "hatchig", "hatching", + "hauntig", "haunting", + "haviest", "heaviest", + "headest", "headset", + "headses", "headsets", + "heaveny", "heavenly", + "heigher", "higher", + "heigths", "heights", + "helemts", "helmets", + "hellfie", "hellfire", + "hellvua", "helluva", + "helment", "helmet", + "helpped", "helped", + "hemlets", "helmets", + "henious", "heinous", + "heorics", "heroics", + "heorine", "heroine", + "heriocs", "heroics", + "herione", "heroine", + "herocis", "heroics", + "heronie", "heroine", + "hesiman", "heisman", + "hieghts", "heights", + "hienous", "heinous", + "hiesman", "heisman", + "himselv", "himself", + "hiptser", "hipster", + "hismelf", "himself", + "hispter", "hipster", + "hitboxs", "hitboxes", + "hoilday", "holiday", + "hokpins", "hopkins", + "holdiay", "holiday", + "holdins", "holdings", + "homniem", "hominem", + "horader", "hoarder", + "hosited", "hoisted", + "hosthot", "hotshot", + "hostles", "hostels", + "hostpot", "hotspot", + "hothsot", "hotshot", + "hotpsot", "hotspot", + "hotsopt", "hotspot", + "hounour", "honour", + "hseldon", "sheldon", + "huanted", "haunted", + "humanit", "humanist", + "humants", "humanist", + "humidiy", "humidity", + "humoros", "humorous", + "hunagry", "hungary", + "hunderd", "hundred", + "hundres", "hundreds", + "hungray", "hungary", + "hurdels", "hurdles", + "hurldes", "hurdles", + "husbans", "husbands", + "hweaton", "wheaton", + "hybirds", "hybrids", + "hydogen", "hydrogen", + "hygeine", "hygiene", + "hypnoss", "hypnosis", + "hyrbids", "hybrids", + "hystera", "hysteria", + "iceforg", "icefrog", + "ierland", "ireland", + "ignitin", "ignition", + "ignorat", "ignorant", + "illegas", "illegals", + "illegsl", "illegals", + "illinos", "illinois", + "imanent", "eminent", + "imapcts", "impacts", + "iminent", "eminent", + "imminet", "imminent", + "implict", "implicit", + "imploed", "implode", + "imploys", "employs", + "impluse", "impulse", + "impolde", "implode", + "importd", "imported", + "imporve", "improve", + "impules", "impulse", + "impusle", "impulse", + "imrpove", "improve", + "incldue", "include", + "incluse", "includes", + "indains", "indians", + "indeces", "indices", + "indiaan", "indiana", + "indluge", "indulge", + "indugle", "indulge", + "infalte", "inflate", + "infenro", "inferno", + "infered", "inferred", + "inferir", "inferior", + "infinet", "infinite", + "infinie", "infinite", + "infinit", "infinite", + "infornt", "infront", + "infroms", "informs", + "infrotn", "infront", + "inheirt", "inherit", + "inidans", "indians", + "initals", "initials", + "initisl", "initials", + "inlcine", "incline", + "inovker", "invoker", + "inpeach", "impeach", + "inpsect", "inspect", + "inpsire", "inspire", + "inquier", "inquire", + "inquriy", "inquiry", + "insaney", "insanely", + "inscets", "insects", + "insepct", "inspect", + "insipre", "inspire", + "insluts", "insults", + "instade", "instead", + "instint", "instinct", + "intenst", "intents", + "intered", "interred", + "interet", "interest", + "internt", "internet", + "interro", "interior", + "intrest", "interest", + "intrige", "intrigue", + "invlove", "involve", + "invoekr", "invoker", + "invovle", "involve", + "iornman", "ironman", + "iranain", "iranian", + "iranias", "iranians", + "iranina", "iranian", + "irleand", "ireland", + "ironamn", "ironman", + "isalmic", "islamic", + "isareli", "israeli", + "islamit", "islamist", + "islmaic", "islamic", + "isloate", "isolate", + "isralei", "israeli", + "isreali", "israeli", + "italias", "italians", + "jagaurs", "jaguars", + "jaguras", "jaguars", + "jamacia", "jamaica", + "jamaina", "jamaican", + "jamiaca", "jamaica", + "jamsine", "jasmine", + "janaury", "january", + "januray", "january", + "japanes", "japanese", + "jasmien", "jasmine", + "jaugars", "jaguars", + "jaunary", "january", + "jeircho", "jericho", + "jennins", "jennings", + "jeopary", "jeopardy", + "jeresys", "jerseys", + "jericoh", "jericho", + "jersyes", "jerseys", + "jewerly", "jewelry", + "jorunal", "journal", + "jounral", "journal", + "joystik", "joystick", + "juadism", "judaism", + "judasim", "judaism", + "judical", "judicial", + "juipter", "jupiter", + "junglig", "jungling", + "juptier", "jupiter", + "jusitfy", "justify", + "justfiy", "justify", + "karakoe", "karaoke", + "karoake", "karaoke", + "kenendy", "kennedy", + "kenndey", "kennedy", + "kentucy", "kentucky", + "keyboad", "keyboard", + "keychan", "keychain", + "keynode", "keynote", + "kicthen", "kitchen", + "killins", "killings", + "kineitc", "kinetic", + "kinghts", "knights", + "kinteic", "kinetic", + "kitches", "kitchens", + "kitites", "kitties", + "knietic", "kinetic", + "knigths", "knights", + "knuckel", "knuckle", + "kroeans", "koreans", + "krudish", "kurdish", + "ktichen", "kitchen", + "kubirck", "kubrick", + "kunckle", "knuckle", + "kurbick", "kubrick", + "kuridsh", "kurdish", + "laguage", "language", + "landins", "landings", + "lantren", "lantern", + "laready", "already", + "laregly", "largely", + "largley", "largely", + "lasanga", "lasagna", + "lasgana", "lasagna", + "latitue", "latitude", + "latnern", "lantern", + "launhed", "launched", + "lavendr", "lavender", + "leathal", "lethal", + "lefitst", "leftist", + "leftits", "leftist", + "legnths", "lengths", + "legnthy", "lengthy", + "legoins", "legions", + "leigons", "legions", + "lenghts", "lengths", + "lenoard", "leonard", + "lepoard", "leopard", + "lesbain", "lesbian", + "lesiban", "lesbian", + "lesiure", "leisure", + "liasion", "liaison", + "liasons", "liaisons", + "liberae", "liberate", + "liberas", "liberals", + "lienups", "lineups", + "liesure", "leisure", + "liftime", "lifetime", + "lighlty", "lightly", + "lightes", "lighters", + "ligthly", "lightly", + "linclon", "lincoln", + "linueps", "lineups", + "liqiuds", "liquids", + "lisence", "license", + "lisense", "license", + "listend", "listened", + "litecon", "litecoin", + "literae", "literate", + "lithuim", "lithium", + "litihum", "lithium", + "loadous", "loadouts", + "loenard", "leonard", + "loepard", "leopard", + "logiteh", "logitech", + "loosley", "loosely", + "luandry", "laundry", + "luckliy", "luckily", + "luicfer", "lucifer", + "lunatis", "lunatics", + "maching", "machine", + "machins", "machines", + "maclolm", "malcolm", + "macthup", "matchup", + "madsion", "madison", + "magents", "magnets", + "magicin", "magician", + "magolia", "magnolia", + "maidson", "madison", + "maintan", "maintain", + "mairlyn", "marilyn", + "malaira", "malaria", + "malaysa", "malaysia", + "malclom", "malcolm", + "manauls", "manuals", + "mandase", "mandates", + "mandats", "mandates", + "mangeld", "mangled", + "mangets", "magnets", + "manualy", "manually", + "manuver", "maneuver", + "marbels", "marbles", + "margart", "margaret", + "mariage", "marriage", + "mariens", "marines", + "maritan", "martian", + "marixsm", "marxism", + "mariyln", "marilyn", + "markede", "marketed", + "marlbes", "marbles", + "marliyn", "marilyn", + "marnies", "marines", + "marrage", "marriage", + "martail", "martial", + "martain", "martian", + "masacra", "mascara", + "massace", "massacre", + "mathcup", "matchup", + "mathwes", "mathews", + "matrial", "martial", + "maunals", "manuals", + "mcalren", "mclaren", + "meanins", "meanings", + "medicad", "medicaid", + "medicae", "medicare", + "medioce", "mediocre", + "meixcan", "mexican", + "meldoic", "melodic", + "melieux", "milieux", + "melodis", "melodies", + "memeber", "member", + "memoery", "memory", + "memorie", "memory", + "menally", "mentally", + "mentaly", "mentally", + "meoldic", "melodic", + "meranda", "veranda", + "merchat", "merchant", + "merucry", "mercury", + "messagd", "messaged", + "messaih", "messiah", + "metagem", "metagame", + "metalic", "metallic", + "mexcian", "mexican", + "michina", "michigan", + "midfied", "midfield", + "midotwn", "midtown", + "midtwon", "midtown", + "migrans", "migrants", + "militat", "militant", + "militis", "militias", + "miltary", "military", + "mimimum", "minimum", + "mineras", "minerals", + "mininos", "minions", + "ministr", "minister", + "ministy", "ministry", + "minoins", "minions", + "minstry", "ministry", + "minumum", "minimum", + "mirrord", "mirrored", + "misandy", "misandry", + "misison", "mission", + "misouri", "missouri", + "mispell", "misspell", + "missils", "missiles", + "mistery", "mystery", + "mobiliy", "mobility", + "modualr", "modular", + "momento", "memento", + "momment", "moment", + "monarcy", "monarchy", + "monatge", "montage", + "monglos", "mongols", + "monitos", "monitors", + "monstre", "monster", + "montaeg", "montage", + "montrel", "montreal", + "monumet", "monument", + "morbidy", "morbidly", + "morgage", "mortgage", + "morphen", "morphine", + "morphie", "morphine", + "morroco", "morocco", + "mortage", "mortgage", + "mosnter", "monster", + "mosture", "moisture", + "motivet", "motivate", + "motnage", "montage", + "motoral", "motorola", + "mountan", "mountain", + "movment", "movement", + "mucuous", "mucous", + "muesums", "museums", + "muliple", "multiple", + "mulsims", "muslims", + "multipe", "multiple", + "multipy", "multiply", + "munbers", "numbers", + "munchis", "munchies", + "murderd", "murdered", + "muscial", "musical", + "mushrom", "mushroom", + "musilms", "muslims", + "muslces", "muscles", + "musuems", "museums", + "mutatin", "mutation", + "mypsace", "myspace", + "mysapce", "myspace", + "napolen", "napoleon", + "narhwal", "narwhal", + "natique", "antique", + "nativey", "natively", + "natrual", "natural", + "naugthy", "naughty", + "nauseos", "nauseous", + "nautils", "nautilus", + "nautral", "natural", + "nautres", "natures", + "nectode", "netcode", + "needels", "needles", + "neruons", "neurons", + "neslave", "enslave", + "netocde", "netcode", + "netowrk", "network", + "netural", "neutral", + "neturon", "neutron", + "netwrok", "network", + "neurton", "neutron", + "neuterd", "neutered", + "nighlty", "nightly", + "nigthly", "nightly", + "nihilim", "nihilism", + "ninties", "1990s", + "niverse", "inverse", + "nocture", "nocturne", + "nominae", "nominate", + "nominet", "nominate", + "nonsene", "nonsense", + "noramls", "normals", + "norhern", "northern", + "normaly", "normally", + "normany", "normandy", + "northen", "northern", + "nostris", "nostrils", + "notario", "ontario", + "notebok", "notebook", + "nothern", "northern", + "nowdays", "nowadays", + "nrivana", "nirvana", + "nuaghty", "naughty", + "nubmers", "numbers", + "nucelar", "nuclear", + "nucelus", "nucleus", + "nuclean", "unclean", + "nuclues", "nucleus", + "nucular", "nuclear", + "nuerons", "neurons", + "nuetral", "neutral", + "nuetron", "neutron", + "nulcear", "nuclear", + "nullfiy", "nullify", + "nusance", "nuisance", + "nutriet", "nutrient", + "oarcles", "oracles", + "obivous", "obvious", + "obvoius", "obvious", + "ocarnia", "ocarina", + "ocasion", "occasion", + "occured", "occurred", + "ocotber", "october", + "ocotpus", "octopus", + "ocraina", "ocarina", + "ocuntry", "country", + "ocurred", "occurred", + "ofcoure", "ofcourse", + "offcers", "officers", + "offical", "official", + "offisde", "offside", + "oftenly", "often", + "ogrilla", "gorilla", + "olmypic", "olympic", + "olreans", "orleans", + "olympis", "olympics", + "olypmic", "olympic", + "omision", "omission", + "omiting", "omitting", + "omlette", "omelette", + "ommited", "omitted", + "onatrio", "ontario", + "onbaord", "onboard", + "onborad", "onboard", + "ontairo", "ontario", + "ontraio", "ontario", + "opartor", "operator", + "openess", "openness", + "opitcal", "optical", + "opitmal", "optimal", + "oponent", "opponent", + "oposite", "opposite", + "oppenly", "openly", + "opponet", "opponent", + "oprhans", "orphans", + "optimim", "optimism", + "oracels", "oracles", + "oragnes", "oranges", + "oragsms", "orgasms", + "oralces", "oracles", + "orbtial", "orbital", + "orcales", "oracles", + "orelans", "orleans", + "organes", "organise", + "organie", "organise", + "organim", "organism", + "orginal", "original", + "orhpans", "orphans", + "oribtal", "orbital", + "orlenas", "orleans", + "orpahns", "orphans", + "orthodx", "orthodox", + "outfied", "outfield", + "outsidr", "outsider", + "overhal", "overhaul", + "overpad", "overpaid", + "oversue", "overuse", + "overtun", "overturn", + "ownders", "wonders", + "owuldve", "wouldve", + "oylmpic", "olympic", + "pacakge", "package", + "pacifit", "pacifist", + "packade", "packaged", + "pacthes", "patches", + "pahntom", "phantom", + "paitent", "patient", + "palcebo", "placebo", + "pallete", "palette", + "palster", "plaster", + "palyboy", "playboy", + "pamflet", "pamphlet", + "pamplet", "pamphlet", + "pancaks", "pancakes", + "pandroa", "pandora", + "panthen", "pantheon", + "paradim", "paradigm", + "paradse", "parades", + "paralel", "parallel", + "paranoa", "paranoia", + "parises", "praises", + "parites", "parties", + "partice", "particle", + "partick", "patrick", + "partiel", "particle", + "partiot", "patriot", + "partols", "patrols", + "passabe", "passable", + "passivs", "passives", + "pasuing", "pausing", + "pateint", "patient", + "pathces", "patches", + "patiens", "patients", + "patirot", "patriot", + "patrcik", "patrick", + "patrios", "patriots", + "patroit", "patriot", + "peaples", "peoples", + "pebbels", "pebbles", + "peirced", "pierced", + "penatly", "penalty", + "pendulm", "pendulum", + "penguis", "penguins", + "penicls", "pencils", + "penison", "pension", + "penisse", "penises", + "penitum", "pentium", + "pensies", "penises", + "pensino", "pension", + "pentuim", "pentium", + "peopels", "peoples", + "percise", "precise", + "perdict", "predict", + "perfers", "prefers", + "perhasp", "perhaps", + "perhpas", "perhaps", + "perisan", "persian", + "perjery", "perjury", + "permade", "premade", + "permier", "premier", + "permise", "premise", + "permium", "premium", + "peroids", "periods", + "peronal", "personal", + "perpaid", "prepaid", + "perphas", "perhaps", + "persain", "persian", + "persets", "presets", + "persits", "persist", + "persued", "pursued", + "persuit", "pursuit", + "pervail", "prevail", + "perview", "preview", + "pharoah", "pharaoh", + "phatnom", "phantom", + "phsyics", "physics", + "phyiscs", "physics", + "physcis", "physics", + "physiqe", "physique", + "picthed", "pitched", + "picther", "pitcher", + "picthes", "pitches", + "piegons", "pigeons", + "piglrim", "pilgrim", + "pigoens", "pigeons", + "pilgirm", "pilgrim", + "pilrgim", "pilgrim", + "pinoeer", "pioneer", + "pinpoit", "pinpoint", + "pionere", "pioneer", + "pireced", "pierced", + "pithces", "pitches", + "plantes", "planets", + "plastis", "plastics", + "plastre", "plaster", + "plataeu", "plateau", + "plateua", "plateau", + "playabe", "playable", + "playofs", "playoffs", + "plesant", "pleasant", + "pligrim", "pilgrim", + "ploygon", "polygon", + "ploymer", "polymer", + "podemso", "podemos", + "podmeos", "podemos", + "poeples", "peoples", + "poignat", "poignant", + "poineer", "pioneer", + "pointes", "pointers", + "poisond", "poisoned", + "polgyon", "polygon", + "polical", "political", + "polishs", "polishes", + "polisse", "polishes", + "politey", "politely", + "poluted", "polluted", + "polutes", "pollutes", + "popluar", "popular", + "populer", "popular", + "populos", "populous", + "porpose", "propose", + "porshan", "portion", + "porshon", "portion", + "portait", "portrait", + "portary", "portray", + "portras", "portrays", + "portrat", "portrait", + "posions", "poisons", + "positon", "position", + "positve", "positive", + "possibe", "possible", + "possiby", "possibly", + "postdam", "potsdam", + "postion", "position", + "postive", "positive", + "potatos", "potatoes", + "potical", "optical", + "potrait", "portrait", + "powderd", "powdered", + "poweful", "powerful", + "poylgon", "polygon", + "poylmer", "polymer", + "practie", "practise", + "praisse", "praises", + "praries", "prairies", + "prasied", "praised", + "prasies", "praises", + "pratice", "practice", + "preamde", "premade", + "preceed", "precede", + "precice", "precise", + "preests", "presets", + "prehaps", "perhaps", + "preimer", "premier", + "preimum", "premium", + "preists", "priests", + "preivew", "preview", + "premeir", "premier", + "premiee", "premiere", + "premire", "premier", + "premits", "permits", + "premius", "premiums", + "premuim", "premium", + "prepair", "prepare", + "preriod", "period", + "presens", "presents", + "presest", "presets", + "presist", "persist", + "prestes", "presets", + "presude", "presumed", + "pretene", "pretense", + "pretens", "pretends", + "preveiw", "preview", + "prevert", "pervert", + "previal", "prevail", + "previes", "previews", + "previos", "previous", + "priased", "praised", + "priases", "praises", + "printes", "printers", + "pristen", "pristine", + "probabe", "probable", + "probaly", "probably", + "probelm", "problem", + "procede", "proceed", + "procees", "proceeds", + "procesd", "proceeds", + "proclam", "proclaim", + "produly", "proudly", + "produse", "produces", + "progidy", "prodigy", + "progrom", "pogrom", + "prohibt", "prohibit", + "prohpet", "prophet", + "prologe", "prologue", + "promose", "promotes", + "promots", "promotes", + "prompty", "promptly", + "promtps", "prompts", + "pronous", "pronouns", + "prooved", "proved", + "propeht", "prophet", + "prophey", "prophecy", + "propper", "proper", + "protals", "portals", + "protecs", "protects", + "protess", "protests", + "protocl", "protocol", + "protray", "portray", + "prouldy", "proudly", + "provded", "provided", + "provine", "province", + "prusuit", "pursuit", + "pryamid", "pyramid", + "pscyhed", "psyched", + "ptiched", "pitched", + "pticher", "pitcher", + "puasing", "pausing", + "publicy", "publicly", + "publsih", "publish", + "puhsups", "pushups", + "punishs", "punishes", + "punisse", "punishes", + "pursiut", "pursuit", + "pursude", "pursued", + "purused", "pursued", + "pushpus", "pushups", + "pyarmid", "pyramid", + "pyramis", "pyramids", + "pyrmaid", "pyramid", + "pysched", "psyched", + "qaulify", "qualify", + "qaulity", "quality", + "qauntum", "quantum", + "quailfy", "qualify", + "quailty", "quality", + "queires", "queries", + "queitly", "quietly", + "quereis", "queries", + "quicket", "quickest", + "quielty", "quietly", + "quitely", "quietly", + "qunatum", "quantum", + "qunetin", "quentin", + "racisst", "racists", + "racthet", "ratchet", + "radaint", "radiant", + "radiane", "radiance", + "radicas", "radicals", + "radiers", "raiders", + "raelism", "realism", + "raidant", "radiant", + "railrod", "railroad", + "rainbos", "rainbows", + "raoches", "roaches", + "raoming", "roaming", + "raptros", "raptors", + "raputre", "rapture", + "rathcet", "ratchet", + "ratpure", "rapture", + "reacing", "reaching", + "reagrds", "regards", + "realies", "realise", + "realsie", "realise", + "realsim", "realism", + "realtes", "relates", + "reamins", "remains", + "reapirs", "repairs", + "rebouns", "rebounds", + "rebulit", "rebuilt", + "recalim", "reclaim", + "receips", "receipts", + "recided", "resided", + "reciept", "receipt", + "recievd", "recieved", + "recieve", "receive", + "recitfy", "rectify", + "recived", "received", + "reclami", "reclaim", + "recliam", "reclaim", + "recorre", "recorder", + "recoves", "recovers", + "recpies", "recipes", + "redeemd", "redeemed", + "redners", "renders", + "refelct", "reflect", + "referal", "referral", + "refered", "referred", + "referig", "refering", + "referrs", "refers", + "reflexs", "reflexes", + "refrers", "refers", + "refroms", "reforms", + "refusla", "refusal", + "regerts", "regrets", + "regiems", "regimes", + "regimet", "regiment", + "registy", "registry", + "regluar", "regular", + "regrest", "regrets", + "regulae", "regulate", + "regulas", "regulars", + "regulsr", "regulars", + "reigmes", "regimes", + "reigons", "regions", + "reitres", "retires", + "reivews", "reviews", + "reknown", "renown", + "relaise", "realise", + "relapes", "relapse", + "relaspe", "relapse", + "relatie", "relative", + "relatin", "relation", + "relcaim", "reclaim", + "releive", "relieve", + "releses", "releases", + "relfect", "reflect", + "reliabe", "reliable", + "relient", "reliant", + "relized", "realised", + "relpase", "relapse", + "remaind", "remained", + "remaing", "remaining", + "remakrs", "remarks", + "remannt", "remnant", + "remeber", "remember", + "remians", "remains", + "remnans", "remnants", + "renderd", "rendered", + "renegae", "renegade", + "renmant", "remnant", + "rentors", "renters", + "rentres", "renters", + "renuion", "reunion", + "repaird", "repaired", + "repalys", "replays", + "repblic", "republic", + "repeast", "repeats", + "repects", "respects", + "repitle", "reptile", + "replase", "replaces", + "replayd", "replayed", + "reponse", "response", + "repostd", "reposted", + "repsawn", "respawn", + "repsond", "respond", + "repsots", "reposts", + "reptiel", "reptile", + "reptils", "reptiles", + "repubic", "republic", + "republi", "republic", + "repulic", "republic", + "reqiuem", "requiem", + "requeim", "requiem", + "requime", "requiem", + "requred", "required", + "resapwn", "respawn", + "rescuse", "rescues", + "resembe", "resemble", + "reslove", "resolve", + "resolvs", "resolves", + "resonet", "resonate", + "resouce", "resource", + "resovle", "resolve", + "respest", "respects", + "respone", "response", + "respwan", "respawn", + "ressits", "resists", + "restord", "restored", + "resuced", "rescued", + "resuces", "rescues", + "retrive", "retrieve", + "returnd", "returned", + "reuinon", "reunion", + "reveald", "revealed", + "reveiws", "reviews", + "revelas", "reveals", + "reveral", "reversal", + "reviere", "reviewer", + "reviewd", "reviewed", + "reviewr", "reviewer", + "revolvr", "revolver", + "revolvs", "revolves", + "rewirte", "rewrite", + "reworkd", "reworked", + "rewriet", "rewrite", + "reynols", "reynolds", + "rhapsoy", "rhapsody", + "rhythem", "rhythm", + "rhythim", "rhythm", + "rhytmic", "rhythmic", + "riaders", "raiders", + "ritlain", "ritalin", + "ritoers", "rioters", + "rivarly", "rivalry", + "rivlary", "rivalry", + "roahces", "roaches", + "robotis", "robotics", + "rococco", "rococo", + "roestta", "rosetta", + "roiters", "rioters", + "roleply", "roleplay", + "romaina", "romania", + "romaing", "roaming", + "romanin", "romanian", + "romanna", "romanian", + "roomate", "roommate", + "rotuers", "routers", + "rugters", "rutgers", + "rulebok", "rulebook", + "rumorus", "rumours", + "rumuors", "rumours", + "runnung", "running", + "ruslted", "rustled", + "russina", "russian", + "russion", "russian", + "rusteld", "rustled", + "rythmic", "rhythmic", + "rythyms", "rhythms", + "sacrasm", "sarcasm", + "saddnes", "saddens", + "sadistc", "sadistic", + "sadning", "sanding", + "salaris", "salaries", + "salavge", "salvage", + "salvery", "slavery", + "salying", "slaying", + "sampels", "samples", + "samruai", "samurai", + "samuari", "samurai", + "samuria", "samurai", + "sandlas", "sandals", + "sandnig", "sanding", + "sanlder", "sandler", + "santorm", "santorum", + "sapphie", "sapphire", + "sarcams", "sarcasm", + "sargant", "sergeant", + "sasuage", "sausage", + "satifsy", "satisfy", + "satsify", "satisfy", + "satsohi", "satoshi", + "savanha", "savannah", + "savannh", "savannah", + "saveing", "saving", + "sawnsea", "swansea", + "sawnson", "swanson", + "scandas", "scandals", + "scannig", "scanning", + "scartch", "scratch", + "scheems", "schemes", + "schoold", "schooled", + "sciense", "sciences", + "scinece", "science", + "scootes", "scooters", + "scorpin", "scorpion", + "scpeter", "scepter", + "scracth", "scratch", + "scrambe", "scramble", + "scritps", "scripts", + "scrolld", "scrolled", + "scrpits", "scripts", + "scyhter", "scyther", + "seached", "searched", + "seaches", "searches", + "seahaws", "seahawks", + "seantor", "senator", + "searchd", "searched", + "searchs", "searches", + "sebrian", "serbian", + "secerts", "secrets", + "secpter", "scepter", + "secrest", "secrets", + "secrety", "secretly", + "seflies", "selfies", + "seguoys", "segues", + "seinors", "seniors", + "selifes", "selfies", + "senoirs", "seniors", + "sensure", "censure", + "sentaor", "senator", + "sentris", "sentries", + "serbain", "serbian", + "sergeat", "sergeant", + "sergent", "sergeant", + "seriban", "serbian", + "servans", "servants", + "sesnors", "sensors", + "settins", "settings", + "severly", "severely", + "sexualy", "sexually", + "seziure", "seizure", + "shaddow", "shadow", + "shanghi", "shanghai", + "shaprie", "sharpie", + "shaprly", "sharply", + "sharipe", "sharpie", + "shcemes", "schemes", + "sheelpe", "sheeple", + "sheepel", "sheeple", + "shephed", "shepherd", + "sherlok", "sherlock", + "shetler", "shelter", + "shevles", "shelves", + "shfiter", "shifter", + "shieldd", "shielded", + "shiping", "shipping", + "shirely", "shirley", + "shitfer", "shifter", + "shledon", "sheldon", + "shleter", "shelter", + "shoudln", "should", + "shouldt", "shouldnt", + "shoutot", "shoutout", + "showede", "showered", + "showerd", "showered", + "shperes", "spheres", + "shriley", "shirley", + "siblins", "siblings", + "sidelen", "sideline", + "sideral", "sidereal", + "siezing", "seizing", + "siezure", "seizure", + "signfiy", "signify", + "signins", "signings", + "signles", "singles", + "silders", "sliders", + "silenty", "silently", + "similir", "similiar", + "simliar", "similar", + "simplet", "simplest", + "simpley", "simply", + "simplfy", "simplify", + "simpliy", "simplify", + "simposn", "simpson", + "simspon", "simpson", + "singals", "signals", + "singels", "singles", + "singify", "signify", + "singsog", "singsong", + "sitmuli", "stimuli", + "skecthy", "sketchy", + "skeletl", "skeletal", + "skeptis", "skeptics", + "sketchs", "sketches", + "sketpic", "skeptic", + "skpetic", "skeptic", + "sktechy", "sketchy", + "skwyard", "skyward", + "slavage", "salvage", + "slayign", "slaying", + "sldiers", "sliders", + "slefies", "selfies", + "slighly", "slightly", + "slighty", "slightly", + "slippes", "slippers", + "slippey", "slippery", + "smaples", "samples", + "smartre", "smarter", + "smaurai", "samurai", + "snadler", "sandler", + "snigles", "singles", + "snippes", "snippets", + "snodwen", "snowden", + "snwoden", "snowden", + "snycing", "syncing", + "snyergy", "synergy", + "socialy", "socially", + "sofware", "software", + "soildly", "solidly", + "soldies", "soldiers", + "soldily", "solidly", + "somaila", "somalia", + "someons", "someones", + "somethn", "somethin", + "southen", "southern", + "soveits", "soviets", + "spacebr", "spacebar", + "spainsh", "spanish", + "spansih", "spanish", + "spanwed", "spawned", + "sparkel", "sparkle", + "spartas", "spartans", + "spartsn", "spartans", + "sparyed", "sprayed", + "spawend", "spawned", + "spawnig", "spawning", + "specail", "special", + "specfic", "specific", + "specias", "specials", + "specisl", "specials", + "spectum", "spectrum", + "speechs", "speeches", + "spehres", "spheres", + "speical", "special", + "speices", "species", + "spellig", "spelling", + "spindel", "spindle", + "spiritd", "spirited", + "splaton", "splatoon", + "splittr", "splitter", + "spoiles", "spoilers", + "spoitfy", "spotify", + "spolied", "spoiled", + "sponser", "sponsor", + "sporles", "sproles", + "sporuts", "sprouts", + "spotfiy", "spotify", + "sprinke", "sprinkle", + "sproels", "sproles", + "spwaned", "spawned", + "sqaures", "squares", + "sqeuaky", "squeaky", + "sqiushy", "squishy", + "squarey", "squarely", + "squirel", "squirtle", + "squirle", "squirrel", + "squirrl", "squirrel", + "squirte", "squirtle", + "squsihy", "squishy", + "sriraca", "sriracha", + "srpouts", "sprouts", + "sryians", "syrians", + "sryinge", "syringe", + "stadius", "stadiums", + "staduim", "stadium", + "stagnat", "stagnant", + "staidum", "stadium", + "stakler", "stalker", + "stalkes", "stalkers", + "stamnia", "stamina", + "staoshi", "satoshi", + "starins", "strains", + "startde", "startled", + "startus", "startups", + "statits", "statist", + "statsit", "statist", + "statuer", "stature", + "statuse", "statutes", + "statuts", "statutes", + "stautes", "statues", + "stealty", "stealthy", + "steeles", "steelers", + "steorid", "steroid", + "steriel", "sterile", + "sterlie", "sterile", + "stickes", "stickers", + "stiring", "stirring", + "stirker", "striker", + "stirrig", "stirring", + "stitchs", "stitches", + "stlaker", "stalker", + "stlyish", "stylish", + "storeis", "stories", + "storise", "stories", + "stormde", "stormed", + "straigt", "straight", + "straind", "strained", + "streamd", "streamed", + "stregth", "strength", + "strengh", "strength", + "streoid", "steroid", + "stresss", "stresses", + "strians", "strains", + "stricty", "strictly", + "striekr", "striker", + "stromed", "stormed", + "stubbon", "stubborn", + "studing", "studying", + "stuidos", "studios", + "stunami", "tsunami", + "stupidr", "stupider", + "stupidy", "stupidly", + "stupire", "stupider", + "suasage", "sausage", + "subisdy", "subsidy", + "subjest", "subjects", + "subtiel", "subtitle", + "succede", "succeed", + "succeds", "succeeds", + "succees", "succeeds", + "succesd", "succeeds", + "suceeds", "succeeds", + "suddeny", "suddenly", + "suefull", "usefull", + "sufferd", "suffered", + "summonr", "summoner", + "summore", "summoner", + "sunggle", "snuggle", + "sunifre", "sunfire", + "superme", "supreme", + "suposed", "supposed", + "suposes", "supposes", + "suppoed", "supposed", + "suppost", "supports", + "suprass", "surpass", + "supress", "suppress", + "suprisd", "suprised", + "suprise", "surprise", + "suprize", "surprise", + "supsend", "suspend", + "suround", "surround", + "surpeme", "supreme", + "surroud", "surround", + "sweidsh", "swedish", + "swiflty", "swiftly", + "swiming", "swimming", + "switchs", "switches", + "switfly", "swiftly", + "swnasea", "swansea", + "sycning", "syncing", + "sycther", "scyther", + "syirans", "syrians", + "sykward", "skyward", + "syllabe", "syllable", + "symetry", "symmetry", + "symmety", "symmetry", + "symobls", "symbols", + "sympaty", "sympathy", + "symtpom", "symptom", + "synegry", "synergy", + "synoynm", "synonym", + "sypmtom", "symptom", + "syracue", "syracuse", + "syrains", "syrians", + "sysadmn", "sysadmin", + "systemc", "systemic", + "sytlish", "stylish", + "tabacco", "tobacco", + "tailban", "taliban", + "tailord", "tailored", + "talbian", "taliban", + "tallets", "tallest", + "tangeld", "tangled", + "tanlged", "tangled", + "targetd", "targeted", + "taryvon", "trayvon", + "teached", "taught", + "teaspon", "teaspoon", + "techeis", "techies", + "tehcies", "techies", + "temepst", "tempest", + "tempels", "temples", + "tempets", "tempest", + "templas", "templars", + "tempset", "tempest", + "tenacle", "tentacle", + "tendacy", "tendency", + "tequlia", "tequila", + "tesitfy", "testify", + "testice", "testicle", + "teusday", "tuesday", + "thankyu", "thankyou", + "thearpy", "therapy", + "theistc", "theistic", + "theives", "thieves", + "themsef", "themself", + "therefo", "thereof", + "therien", "therein", + "theroem", "theorem", + "thesits", "theists", + "thiests", "theists", + "thirldy", "thirdly", + "thirten", "thirteen", + "thirtsy", "thirsty", + "thoerem", "theorem", + "thorats", "throats", + "thornes", "thrones", + "thoruim", "thorium", + "thoughs", "thoughts", + "threadd", "threaded", + "threeof", "thereof", + "thridly", "thirdly", + "thristy", "thirsty", + "throast", "throats", + "throium", "thorium", + "thryoid", "thyroid", + "thyorid", "thyroid", + "thyriod", "thyroid", + "tigther", "tighter", + "tiolets", "toilets", + "tirdent", "trident", + "titanim", "titanium", + "tlaking", "talking", + "tobbaco", "tobacco", + "toliets", "toilets", + "tolkein", "tolkien", + "tomatos", "tomatoes", + "tongiht", "tonight", + "tonuges", "tongues", + "toppins", "toppings", + "torando", "tornado", + "torndao", "tornado", + "torpdeo", "torpedo", + "torrest", "torrents", + "tortila", "tortilla", + "toruney", "tourney", + "toubles", "troubles", + "touchda", "touchpad", + "tounrey", "tourney", + "tourisy", "touristy", + "tourits", "tourist", + "tournes", "tourneys", + "toursim", "tourism", + "toursit", "tourist", + "towords", "towards", + "trackes", "trackers", + "trailes", "trailers", + "traines", "trainers", + "trainig", "training", + "tralier", "trailer", + "tratior", "traitor", + "traveld", "traveled", + "travere", "traverse", + "travesy", "travesty", + "travles", "travels", + "treasue", "treasure", + "treatis", "treaties", + "tremelo", "tremolo", + "trendig", "trending", + "trialer", "trailer", + "triange", "triangle", + "triator", "traitor", + "trickey", "trickery", + "tridnet", "trident", + "trimuph", "triumph", + "trinkes", "trinkets", + "trinkst", "trinkets", + "trintiy", "trinity", + "triolgy", "trilogy", + "troleld", "trolled", + "troling", "trolling", + "tronado", "tornado", + "tropedo", "torpedo", + "trudnle", "trundle", + "truimph", "triumph", + "trukish", "turkish", + "trundel", "trundle", + "trunlde", "trundle", + "tryahrd", "tryhard", + "tryavon", "trayvon", + "tsamina", "stamina", + "tsnuami", "tsunami", + "tsuanmi", "tsunami", + "tsunmai", "tsunami", + "tuesdsy", "tuesdays", + "tunnles", "tunnels", + "turbins", "turbines", + "turksih", "turkish", + "turltes", "turtles", + "turrest", "turrets", + "turtels", "turtles", + "tuseday", "tuesday", + "tusnami", "tsunami", + "tutrles", "turtles", + "twiligt", "twilight", + "tyelnol", "tylenol", + "typcial", "typical", + "tyrhard", "tryhard", + "tyrrany", "tyranny", + "udpated", "updated", + "uesfull", "usefull", + "ugprade", "upgrade", + "ukarine", "ukraine", + "ukranie", "ukraine", + "ukriane", "ukraine", + "ultimae", "ultimate", + "umbrela", "umbrella", + "unahppy", "unhappy", + "unbannd", "unbanned", + "underog", "undergo", + "unfairy", "unfairly", + "ungoldy", "ungodly", + "unicors", "unicorns", + "uniquey", "uniquely", + "unknwon", "unknown", + "unkonwn", "unknown", + "unlcean", "unclean", + "unlcoks", "unlocks", + "unlcuky", "unlucky", + "unlikey", "unlikely", + "unopend", "unopened", + "unprone", "unproven", + "unusabe", "unusable", + "unworty", "unworthy", + "upgarde", "upgrade", + "upgrads", "upgrades", + "uplaods", "uploads", + "upsteam", "upstream", + "urainum", "uranium", + "uranuim", "uranium", + "uretrha", "urethra", + "urkaine", "ukraine", + "urnaium", "uranium", + "urugauy", "uruguay", + "usefull", "useful", + "usefuly", "usefully", + "utiltiy", "utility", + "utopain", "utopian", + "utpoian", "utopian", + "vaccins", "vaccines", + "vaccume", "vacuum", + "vageuly", "vaguely", + "vaguley", "vaguely", + "vairant", "variant", + "valenca", "valencia", + "valetta", "valletta", + "valkyre", "valkyrie", + "valuabe", "valuable", + "valuble", "valuable", + "vampirs", "vampires", + "vanguad", "vanguard", + "varaint", "variant", + "vareity", "variety", + "varians", "variants", + "varient", "variant", + "varisty", "varsity", + "varitey", "variety", + "varstiy", "varsity", + "vasalls", "vassals", + "vasslas", "vassals", + "vaugely", "vaguely", + "vecotrs", "vectors", + "vectros", "vectors", + "veitnam", "vietnam", + "veiwers", "viewers", + "vendeta", "vendetta", + "verbaly", "verbally", + "verical", "vertical", + "verious", "various", + "verison", "version", + "veritgo", "vertigo", + "versoin", "version", + "vertgio", "vertigo", + "vessles", "vessels", + "vetween", "between", + "viatmin", "vitamin", + "vibratr", "vibrator", + "vicitms", "victims", + "vientam", "vietnam", + "vigrins", "virgins", + "vikigns", "vikings", + "villian", "villain", + "villify", "vilify", + "virbate", "vibrate", + "virigns", "virgins", + "virtiol", "vitriol", + "virutal", "virtual", + "virutes", "virtues", + "visable", "visible", + "visably", "visibly", + "visbily", "visibly", + "visting", "visiting", + "vistors", "visitors", + "vitaliy", "vitality", + "vitamis", "vitamins", + "vitenam", "vietnam", + "vitirol", "vitriol", + "vitmain", "vitamin", + "vitroil", "vitriol", + "vitrual", "virtual", + "vitrues", "virtues", + "volatge", "voltage", + "volumne", "volume", + "votlage", "voltage", + "vrigins", "virgins", + "waclott", "walcott", + "wacther", "watcher", + "waitres", "waiters", + "waktins", "watkins", + "warcrat", "warcraft", + "wardobe", "wardrobe", + "wariwck", "warwick", + "warrany", "warranty", + "warrent", "warrant", + "warrios", "warriors", + "warwcik", "warwick", + "wathcer", "watcher", + "watiers", "waiters", + "waviers", "waivers", + "wawrick", "warwick", + "wayword", "wayward", + "webapge", "webpage", + "webiste", "website", + "webstie", "website", + "weigths", "weights", + "weilded", "wielded", + "weirldy", "weirdly", + "weirods", "weirdos", + "welathy", "wealthy", + "wendsay", "wednesday", + "wensday", "wednesday", + "wepbage", "webpage", + "weridly", "weirdly", + "weridos", "weirdos", + "werstle", "wrestle", + "wesbite", "website", + "whaeton", "wheaton", + "whipser", "whisper", + "whislte", "whistle", + "whistel", "whistle", + "whitsle", "whistle", + "whsiper", "whisper", + "wiaters", "waiters", + "wiavers", "waivers", + "widgest", "widgets", + "wieghts", "weights", + "wigdets", "widgets", + "windosr", "windsor", + "winnins", "winnings", + "winsdor", "windsor", + "wintson", "winston", + "wirting", "writing", + "wisnton", "winston", + "withces", "witches", + "witheld", "withheld", + "withing", "within", + "withold", "withhold", + "wlacott", "walcott", + "wokring", "working", + "workins", "workings", + "woudlnt", "wouldnt", + "woudlve", "wouldve", + "woulndt", "wouldnt", + "wreslte", "wrestle", + "wroking", "working", + "wtiches", "witches", + "wupport", "support", + "yaching", "yachting", + "younget", "youngest", + "youseff", "yousef", + "youself", "yourself", + "zaelots", "zealots", + "zealtos", "zealots", + "zelaots", "zealots", + "zelaous", "zealous", + "zimbabe", "zimbabwe", + "zionsim", "zionism", + "zionsit", "zionist", + "zoinism", "zionism", + "zoinist", "zionist", + "abbout", "about", + "abilty", "ability", + "absail", "abseil", + "abutts", "abuts", + "achive", "achieve", + "acused", "accused", + "addopt", "adopt", + "addres", "address", + "adress", "address", + "aeriel", "aerial", + "affort", "afford", + "agains", "against", + "aginst", "against", + "ahppen", "happen", + "aiport", "airport", + "aisian", "asian", + "albiet", "albeit", + "alchol", "alcohol", + "aledge", "allege", + "aleged", "alleged", + "allign", "align", + "almsot", "almost", + "alomst", "almost", + "alowed", "allowed", + "alwasy", "always", + "alwyas", "always", + "amking", "making", + "ammend", "amend", + "amoung", "among", + "aplied", "applied", + "appart", "apart", + "aquire", "acquire", + "aready", "already", + "arised", "arose", + "arival", "arrival", + "arrary", "array", + "artice", "article", + "asetic", "ascetic", + "asside", "aside", + "attemp", "attempt", + "attemt", "attempt", + "auther", "author", + "awared", "awarded", + "bedore", "before", + "beeing", "being", + "befoer", "before", + "beggin", "begin", + "beleif", "belief", + "belive", "believe", + "beteen", "between", + "betwen", "between", + "beween", "between", + "bianry", "binary", + "boyant", "buoyant", + "broady", "broadly", + "buddah", "buddha", + "buring", "burying", + "carcas", "carcass", + "casion", "caisson", + "casued", "caused", + "casues", "causes", + "ceasar", "caesar", + "cencus", "census", + "censur", "censor", + "cheifs", "chiefs", + "circut", "circuit", + "clasic", "classic", + "coform", "conform", + "comany", "company", + "coucil", "council", + "curent", "current", + "densly", "densely", + "deside", "decide", + "devels", "delves", + "devide", "divide", + "dieing", "dying", + "divice", "device", + "doulbe", "double", + "dreasm", "dreams", + "duting", "during", + "ealier", "earlier", + "eearly", "early", + "efford", "effort", + "emited", "emitted", + "emnity", "enmity", + "enduce", "induce", + "enlish", "english", + "erally", "orally", + "eratic", "erratic", + "ethose", "those", + "exampt", "exempt", + "excact", "exact", + "excell", "excel", + "exerpt", "excerpt", + "exinct", "extinct", + "expell", "expel", + "expoch", "epoch", + "extint", "extinct", + "facist", "fascist", + "faught", "fought", + "finaly", "finally", + "forsaw", "foresaw", + "fougth", "fought", + "fourty", "forty", + "foward", "forward", + "freind", "friend", + "fromed", "formed", + "fufill", "fulfill", + "futher", "further", + "gardai", "gardaí", + "geting", "getting", + "ghandi", "gandhi", + "glight", "flight", + "gloabl", "global", + "godess", "goddess", + "guilia", "giulia", + "guilio", "giulio", + "habeus", "habeas", + "harras", "harass", + "hatian", "haitian", + "heared", "heard", + "hertzs", "hertz", + "hieght", "height", + "higest", "highest", + "higway", "highway", + "honory", "honorary", + "howver", "however", + "hstory", "history", + "hunman", "human", + "husban", "husband", + "hvaing", "having", + "illess", "illness", + "ilness", "illness", + "imagin", "imagine", + "imense", "immense", + "includ", "include", + "inital", "initial", + "interm", "interim", + "intial", "initial", + "invlid", "invalid", + "iunior", "junior", + "jaques", "jacques", + "jospeh", "joseph", + "jouney", "journey", + "klenex", "kleenex", + "labled", "labelled", + "largst", "largest", + "larrry", "larry", + "lefted", "left", + "lenght", "length", + "lerans", "learns", + "liason", "liaison", + "libary", "library", + "lieing", "lying", + "lieved", "lived", + "littel", "little", + "livley", "lively", + "lonley", "lonely", + "mailny", "mainly", + "markes", "marks", + "mileau", "milieu", + "milion", "million", + "millon", "million", + "misile", "missile", + "missen", "mizzen", + "missle", "missile", + "mkaing", "making", + "moderm", "modem", + "moreso", "more", + "mounth", "month", + "myraid", "myriad", + "naieve", "naive", + "nestin", "nesting", + "nineth", "ninth", + "noveau", "nouveau", + "occour", "occur", + "occurr", "occur", + "offred", "offered", + "omited", "omitted", + "ouevre", "oeuvre", + "oxigen", "oxygen", + "p0enis", "penis", + "packge", "package", + "peaple", "people", + "pensle", "pencil", + "peopel", "people", + "peotry", "poetry", + "perade", "parade", + "persan", "person", + "persue", "pursue", + "plateu", "plateau", + "poenis", "penis", + "poisin", "poison", + "polute", "pollute", + "posess", "possess", + "posion", "poison", + "prairy", "prairie", + "prarie", "prairie", + "preiod", "period", + "privte", "private", + "proces", "process", + "proove", "prove", + "psuedo", "pseudo", + "psyhic", "psychic", + "pucini", "puccini", + "pumkin", "pumpkin", + "puting", "putting", + "pyscic", "psychic", + "quizes", "quizzes", + "quuery", "query", + "racaus", "raucous", + "radify", "ratify", + "raelly", "really", + "reacll", "recall", + "realyl", "really", + "reched", "reached", + "recide", "reside", + "recrod", "record", + "refect", "reflect", + "relaly", "really", + "renewl", "renewal", + "retuns", "returns", + "reveiw", "review", + "rhymme", "rhyme", + "rigeur", "rigueur", + "rocord", "record", + "rougly", "roughly", + "runing", "running", + "rythem", "rhythm", + "rythim", "rhythm", + "saftey", "safety", + "salery", "salary", + "satisy", "satisfy", + "satric", "satiric", + "saught", "sought", + "scince", "science", + "scirpt", "script", + "seceed", "succeed", + "seinor", "senior", + "sepina", "subpoena", + "sevice", "service", + "shamen", "shaman", + "sheild", "shield", + "shiped", "shipped", + "shorly", "shortly", + "shoudl", "should", + "shreak", "shriek", + "siezed", "seized", + "sixtin", "sistine", + "skiped", "skipped", + "sneeks", "sneaks", + "somene", "someone", + "soruce", "source", + "soudns", "sounds", + "sourth", "south", + "speach", "speech", + "spects", "aspects", + "spoace", "space", + "sqaure", "square", + "staion", "station", + "stange", "strange", + "stilus", "stylus", + "stirrs", "stirs", + "stopry", "story", + "strnad", "strand", + "studdy", "study", + "suceed", "succeed", + "sucess", "success", + "sucide", "suicide", + "sumary", "summary", + "suport", "support", + "supose", "suppose", + "surfce", "surface", + "surley", "surly", + "swaers", "swears", + "swepth", "swept", + "talekd", "talked", + "theese", "these", + "therby", "thereby", + "thigns", "things", + "thigsn", "things", + "thikns", "thinks", + "thiunk", "think", + "thnigs", "things", + "threee", "three", + "tkaing", "taking", + "tounge", "tongue", + "tourch", "torch", + "towrad", "toward", + "trafic", "traffic", + "troups", "troupes", + "truely", "truly", + "twelth", "twelfth", + "tyrany", "tyranny", + "unabel", "unable", + "unkown", "unknown", + "unmont", "unmount", + "unmout", "unmount", + "untill", "until", + "usally", "usually", + "useage", "usage", + "useing", "using", + "usualy", "usually", + "vaccum", "vacuum", + "variey", "variety", + "varing", "varying", + "varity", "variety", + "vasall", "vassal", + "vigeur", "vigueur", + "villin", "villain", + "vreity", "variety", + "vriety", "variety", + "whants", "wants", + "wheras", "whereas", + "wheter", "whether", + "wholey", "wholly", + "whther", "whether", + "wnated", "wanted", + "writen", "written", + "yaerly", "yearly", + "yotube", "youtube", + "zeebra", "zebra", + "abotu", "about", + "adres", "address", + "afair", "affair", + "agian", "again", + "agina", "again", + "agred", "agreed", + "alege", "allege", + "alsot", "also", + "altho", "although", + "amung", "among", + "anual", "annual", + "aroud", "around", + "arund", "around", + "asign", "assign", + "assit", "assist", + "asume", "assume", + "atain", "attain", + "autor", "author", + "baout", "about", + "blaim", "blame", + "boaut", "bout", + "boook", "book", + "borke", "broke", + "breif", "brief", + "caost", "coast", + "casue", "cause", + "chasr", "chaser", + "cheif", "chief", + "chuch", "church", + "claer", "clear", + "clera", "clear", + "coudl", "could", + "crowm", "crown", + "deram", "dram", + "diety", "deity", + "doens", "does", + "doign", "doing", + "donig", "doing", + "drnik", "drink", + "durig", "during", + "earnt", "earned", + "eigth", "eighth", + "eiter", "either", + "emtpy", "empty", + "endig", "ending", + "eveyr", "every", + "exept", "except", + "eyars", "years", + "eyasr", "years", + "fiels", "fields", + "firts", "flirts", + "fleed", "fled", + "fomed", "formed", + "foucs", "focus", + "foudn", "found", + "fouth", "fourth", + "frome", "from", + "ganes", "games", + "gaurd", "guard", + "gerat", "great", + "gogin", "going", + "goign", "going", + "gonig", "going", + "graet", "great", + "greif", "grief", + "gropu", "group", + "guage", "gauge", + "hapen", "happen", + "herad", "heard", + "heroe", "hero", + "higer", "higher", + "housr", "hours", + "htere", "there", + "htikn", "think", + "hting", "thing", + "htink", "think", + "hwihc", "which", + "hwile", "while", + "hwole", "whole", + "idaes", "ideas", + "idesa", "ideas", + "ihaca", "ithaca", + "knwos", "knows", + "konws", "knows", + "lastr", "last", + "lavae", "larvae", + "layed", "laid", + "leage", "league", + "leanr", "lean", + "leran", "learn", + "levle", "level", + "lible", "libel", + "liekd", "liked", + "liuke", "like", + "lmits", "limits", + "lonly", "lonely", + "lukid", "likud", + "lybia", "libya", + "maked", "marked", + "makse", "makes", + "mamal", "mammal", + "mileu", "milieu", + "mkaes", "makes", + "modle", "model", + "moent", "moment", + "moeny", "money", + "monts", "months", + "movei", "movie", + "muder", "murder", + "mysef", "myself", + "neice", "niece", + "ninty", "ninety", + "ocurr", "occur", + "oging", "going", + "opose", "oppose", + "orded", "ordered", + "orgin", "origin", + "otehr", "other", + "ouput", "output", + "owudl", "would", + "paide", "paid", + "palce", "place", + "pased", "passed", + "payed", "paid", + "peice", "piece", + "peoms", "poems", + "poety", "poetry", + "pwoer", "power", + "qtuie", "quite", + "qutie", "quite", + "realy", "really", + "repid", "rapid", + "rised", "raised", + "rulle", "rule", + "rwite", "write", + "rythm", "rhythm", + "safty", "safety", + "scoll", "scroll", + "seach", "search", + "seige", "siege", + "seing", "seeing", + "sence", "sense", + "sicne", "since", + "sieze", "seize", + "sinse", "sines", + "slowy", "slowly", + "snese", "sneeze", + "soley", "solely", + "sotry", "story", + "sotyr", "satyr", + "soudn", "sound", + "sould", "could", + "spred", "spread", + "stlye", "style", + "stong", "strong", + "stoyr", "story", + "strat", "start", + "stroy", "story", + "suppy", "supply", + "swaer", "swear", + "syrap", "syrup", + "sytem", "system", + "sytle", "style", + "tatoo", "tattoo", + "thast", "that", + "theif", "thief", + "theri", "their", + "thgat", "that", + "thier", "their", + "thign", "thing", + "thikn", "think", + "thnig", "thing", + "thrid", "third", + "thsoe", "those", + "thyat", "that", + "tihkn", "think", + "timne", "time", + "tiome", "time", + "tkaes", "takes", + "todya", "today", + "tyhat", "that", + "unsed", "used", + "weild", "wield", + "whant", "want", + "whcih", "which", + "whihc", "which", + "whith", "with", + "whlch", "which", + "wholy", "wholly", + "wierd", "weird", + "wille", "will", + "willk", "will", + "withh", "with", + "witht", "with", + "wiull", "will", + "wnats", "wants", + "wohle", "whole", + "worls", "world", + "woudl", "would", + "wriet", "write", + "wroet", "wrote", + "yaers", "years", + "yatch", "yacht", + "yearm", "year", + "yeasr", "years", + "yeild", "yield", + "yeras", "years", + "yersa", "years", + "agin", "again", + "agre", "agree", + "ahev", "have", + "ahve", "have", + "alse", "else", + "amke", "make", + "anbd", "and", + "andd", "and", + "apon", "upon", + "aslo", "also", + "awya", "away", + "bakc", "back", + "bcak", "back", + "clas", "class", + "cpoy", "coy", + "cxan", "cyan", + "daed", "dead", + "dael", "deal", + "diea", "idea", + "doub", "doubt", + "dyas", "dryas", + "eahc", "each", + "efel", "evil", + "eles", "eels", + "ened", "need", + "enxt", "next", + "esle", "else", + "eyar", "year", + "fatc", "fact", + "fidn", "find", + "fomr", "from", + "grwo", "grow", + "haev", "have", + "halp", "help", + "holf", "hold", + "hten", "then", + "htey", "they", + "htis", "this", + "hvae", "have", + "hvea", "have", + "inot", "into", + "iwll", "will", + "iwth", "with", + "jstu", "just", + "jsut", "just", + "knwo", "know", + "konw", "know", + "kwno", "know", + "liek", "like", + "loev", "love", + "lveo", "love", + "lvoe", "love", + "mkae", "make", + "mkea", "make", + "mroe", "more", + "nkow", "know", + "nkwo", "know", + "nmae", "name", + "noth", "north", + "nowe", "now", + "omre", "more", + "onot", "note", + "onyl", "only", + "owrk", "work", + "peom", "poem", + "pich", "pitch", + "rela", "real", + "sasy", "says", + "smae", "same", + "smoe", "some", + "soem", "some", + "sohw", "show", + "stpo", "stop", + "suop", "soup", + "syas", "says", + "tahn", "than", + "taht", "that", + "tast", "taste", + "tath", "that", + "tehy", "they", + "tghe", "the", + "ther", "there", + "thge", "the", + "thna", "than", + "thne", "then", + "thsi", "this", + "thta", "that", + "tiem", "time", + "tihs", "this", + "tjhe", "the", + "tkae", "take", + "tood", "todo", + "tust", "trust", + "twon", "town", + "twpo", "two", + "tyhe", "they", + "uise", "use", + "vell", "well", + "veyr", "very", + "vrey", "very", + "vyer", "very", + "vyre", "very", + "waht", "what", + "wass", "was", + "watn", "want", + "weas", "was", + "wehn", "when", + "whic", "which", + "whta", "what", + "wich", "which", + "wief", "wife", + "wiew", "view", + "wiht", "with", + "witn", "with", + "wnat", "want", + "wokr", "work", + "wrok", "work", + "wtih", "with", + "yaer", "year", + "yera", "year", + "yrea", "year", + "ytou", "you", + "adn", "and", + "ect", "etc", + "nto", "not", + "teh", "the", + "thn", "then", + "tje", "the", + "whn", "when", + "wih", "with", + "yuo", "you", +} + +// DictAmerican converts UK spellings to US spellings +var DictAmerican = []string{ + "institutionalisation", "institutionalization", + "internationalisation", "internationalization", + "professionalisation", "professionalization", + "compartmentalising", "compartmentalizing", + "institutionalising", "institutionalizing", + "internationalising", "internationalizing", + "compartmentalised", "compartmentalized", + "compartmentalises", "compartmentalizes", + "decriminalisation", "decriminalization", + "denationalisation", "denationalization", + "fictionalisations", "fictionalizations", + "institutionalised", "institutionalized", + "institutionalises", "institutionalizes", + "intellectualising", "intellectualizing", + "internationalised", "internationalized", + "internationalises", "internationalizes", + "pedestrianisation", "pedestrianization", + "professionalising", "professionalizing", + "archaeologically", "archeologically", + "compartmentalise", "compartmentalize", + "decentralisation", "decentralization", + "demilitarisation", "demilitarization", + "externalisations", "externalizations", + "fictionalisation", "fictionalization", + "institutionalise", "institutionalize", + "intellectualised", "intellectualized", + "intellectualises", "intellectualizes", + "internationalise", "internationalize", + "nationalisations", "nationalizations", + "palaeontologists", "paleontologists", + "professionalised", "professionalized", + "professionalises", "professionalizes", + "rationalisations", "rationalizations", + "sensationalising", "sensationalizing", + "sentimentalising", "sentimentalizing", + "acclimatisation", "acclimatization", + "bougainvillaeas", "bougainvilleas", + "commercialising", "commercializing", + "conceptualising", "conceptualizing", + "contextualising", "contextualizing", + "crystallisation", "crystallization", + "decriminalising", "decriminalizing", + "democratisation", "democratization", + "denationalising", "denationalizing", + "depersonalising", "depersonalizing", + "desensitisation", "desensitization", + "destabilisation", "destabilization", + "disorganisation", "disorganization", + "extemporisation", "extemporization", + "externalisation", "externalization", + "familiarisation", "familiarization", + "generalisations", "generalizations", + "hospitalisation", "hospitalization", + "individualising", "individualizing", + "industrialising", "industrializing", + "intellectualise", "intellectualize", + "internalisation", "internalization", + "manoeuvrability", "maneuverability", + "marginalisation", "marginalization", + "materialisation", "materialization", + "miniaturisation", "miniaturization", + "nationalisation", "nationalization", + "neighbourliness", "neighborliness", + "overemphasising", "overemphasizing", + "palaeontologist", "paleontologist", + "particularising", "particularizing", + "pedestrianising", "pedestrianizing", + "professionalise", "professionalize", + "psychoanalysing", "psychoanalyzing", + "rationalisation", "rationalization", + "reorganisations", "reorganizations", + "revolutionising", "revolutionizing", + "sensationalised", "sensationalized", + "sensationalises", "sensationalizes", + "sentimentalised", "sentimentalized", + "sentimentalises", "sentimentalizes", + "specialisations", "specializations", + "standardisation", "standardization", + "synchronisation", "synchronization", + "systematisation", "systematization", + "aggrandisement", "aggrandizement", + "anaesthetising", "anesthetizing", + "archaeological", "archeological", + "archaeologists", "archeologists", + "bougainvillaea", "bougainvillea", + "characterising", "characterizing", + "collectivising", "collectivizing", + "commercialised", "commercialized", + "commercialises", "commercializes", + "conceptualised", "conceptualized", + "conceptualises", "conceptualizes", + "contextualised", "contextualized", + "contextualises", "contextualizes", + "decentralising", "decentralizing", + "decriminalised", "decriminalized", + "decriminalises", "decriminalizes", + "dehumanisation", "dehumanization", + "demilitarising", "demilitarizing", + "demobilisation", "demobilization", + "demoralisation", "demoralization", + "denationalised", "denationalized", + "denationalises", "denationalizes", + "depersonalised", "depersonalized", + "depersonalises", "depersonalizes", + "disembowelling", "disemboweling", + "dramatisations", "dramatizations", + "editorialising", "editorializing", + "encyclopaedias", "encyclopedias", + "fictionalising", "fictionalizing", + "fraternisation", "fraternization", + "generalisation", "generalization", + "gynaecological", "gynecological", + "gynaecologists", "gynecologists", + "haematological", "hematological", + "haematologists", "hematologists", + "immobilisation", "immobilization", + "individualised", "individualized", + "individualises", "individualizes", + "industrialised", "industrialized", + "industrialises", "industrializes", + "liberalisation", "liberalization", + "monopolisation", "monopolization", + "naturalisation", "naturalization", + "neighbourhoods", "neighborhoods", + "neutralisation", "neutralization", + "organisational", "organizational", + "outmanoeuvring", "outmaneuvering", + "overemphasised", "overemphasized", + "overemphasises", "overemphasizes", + "paediatricians", "pediatricians", + "particularised", "particularized", + "particularises", "particularizes", + "pasteurisation", "pasteurization", + "pedestrianised", "pedestrianized", + "pedestrianises", "pedestrianizes", + "philosophising", "philosophizing", + "politicisation", "politicization", + "popularisation", "popularization", + "pressurisation", "pressurization", + "prioritisation", "prioritization", + "privatisations", "privatizations", + "propagandising", "propagandizing", + "psychoanalysed", "psychoanalyzed", + "psychoanalyses", "psychoanalyzes", + "regularisation", "regularization", + "reorganisation", "reorganization", + "revolutionised", "revolutionized", + "revolutionises", "revolutionizes", + "secularisation", "secularization", + "sensationalise", "sensationalize", + "sentimentalise", "sentimentalize", + "serialisations", "serializations", + "specialisation", "specialization", + "sterilisations", "sterilizations", + "stigmatisation", "stigmatization", + "transistorised", "transistorized", + "unrecognisable", "unrecognizable", + "visualisations", "visualizations", + "westernisation", "westernization", + "accessorising", "accessorizing", + "acclimatising", "acclimatizing", + "amortisations", "amortizations", + "amphitheatres", "amphitheaters", + "anaesthetised", "anesthetized", + "anaesthetises", "anesthetizes", + "anaesthetists", "anesthetists", + "archaeologist", "archeologist", + "backpedalling", "backpedaling", + "behaviourists", "behaviorists", + "breathalysers", "breathalyzers", + "breathalysing", "breathalyzing", + "callisthenics", "calisthenics", + "cannibalising", "cannibalizing", + "characterised", "characterized", + "characterises", "characterizes", + "circularising", "circularizing", + "clarinettists", "clarinetists", + "collectivised", "collectivized", + "collectivises", "collectivizes", + "commercialise", "commercialize", + "computerising", "computerizing", + "conceptualise", "conceptualize", + "contextualise", "contextualize", + "criminalising", "criminalizing", + "crystallising", "crystallizing", + "decentralised", "decentralized", + "decentralises", "decentralizes", + "decriminalise", "decriminalize", + "demilitarised", "demilitarized", + "demilitarises", "demilitarizes", + "democratising", "democratizing", + "denationalise", "denationalize", + "depersonalise", "depersonalize", + "desensitising", "desensitizing", + "destabilising", "destabilizing", + "disembowelled", "disemboweled", + "dishonourable", "dishonorable", + "dishonourably", "dishonorably", + "dramatisation", "dramatization", + "editorialised", "editorialized", + "editorialises", "editorializes", + "encyclopaedia", "encyclopedia", + "encyclopaedic", "encyclopedic", + "extemporising", "extemporizing", + "externalising", "externalizing", + "familiarising", "familiarizing", + "fertilisation", "fertilization", + "fictionalised", "fictionalized", + "fictionalises", "fictionalizes", + "formalisation", "formalization", + "fossilisation", "fossilization", + "globalisation", "globalization", + "gynaecologist", "gynecologist", + "haematologist", "hematologist", + "haemophiliacs", "hemophiliacs", + "haemorrhaging", "hemorrhaging", + "harmonisation", "harmonization", + "hospitalising", "hospitalizing", + "hypothesising", "hypothesizing", + "immortalising", "immortalizing", + "individualise", "individualize", + "industrialise", "industrialize", + "internalising", "internalizing", + "marginalising", "marginalizing", + "materialising", "materializing", + "mechanisation", "mechanization", + "memorialising", "memorializing", + "miniaturising", "miniaturizing", + "miscatalogued", "miscataloged", + "misdemeanours", "misdemeanors", + "multicoloured", "multicolored", + "nationalising", "nationalizing", + "neighbourhood", "neighborhood", + "normalisation", "normalization", + "organisations", "organizations", + "outmanoeuvred", "outmaneuvered", + "outmanoeuvres", "outmaneuvers", + "overemphasise", "overemphasize", + "paediatrician", "pediatrician", + "palaeontology", "paleontology", + "particularise", "particularize", + "passivisation", "passivization", + "patronisingly", "patronizingly", + "pedestrianise", "pedestrianize", + "personalising", "personalizing", + "philosophised", "philosophized", + "philosophises", "philosophizes", + "privatisation", "privatization", + "propagandised", "propagandized", + "propagandises", "propagandizes", + "proselytisers", "proselytizers", + "proselytising", "proselytizing", + "psychoanalyse", "psychoanalyze", + "pulverisation", "pulverization", + "rationalising", "rationalizing", + "reconnoitring", "reconnoitering", + "revolutionise", "revolutionize", + "romanticising", "romanticizing", + "serialisation", "serialization", + "socialisation", "socialization", + "stabilisation", "stabilization", + "standardising", "standardizing", + "sterilisation", "sterilization", + "subsidisation", "subsidization", + "synchronising", "synchronizing", + "systematising", "systematizing", + "tantalisingly", "tantalizingly", + "underutilised", "underutilized", + "victimisation", "victimization", + "visualisation", "visualization", + "vocalisations", "vocalizations", + "vulgarisation", "vulgarization", + "accessorised", "accessorized", + "accessorises", "accessorizes", + "acclimatised", "acclimatized", + "acclimatises", "acclimatizes", + "amortisation", "amortization", + "amphitheatre", "amphitheater", + "anaesthetics", "anesthetics", + "anaesthetise", "anesthetize", + "anaesthetist", "anesthetist", + "antagonising", "antagonizing", + "appetisingly", "appetizingly", + "backpedalled", "backpedaled", + "bastardising", "bastardizing", + "behaviourism", "behaviorism", + "behaviourist", "behaviorist", + "bowdlerising", "bowdlerizing", + "breathalysed", "breathalyzed", + "breathalyser", "breathalyzer", + "breathalyses", "breathalyzes", + "cannibalised", "cannibalized", + "cannibalises", "cannibalizes", + "capitalising", "capitalizing", + "caramelising", "caramelizing", + "categorising", "categorizing", + "centigrammes", "centigrams", + "centralising", "centralizing", + "centrepieces", "centerpieces", + "characterise", "characterize", + "circularised", "circularized", + "circularises", "circularizes", + "clarinettist", "clarinetist", + "collectivise", "collectivize", + "colonisation", "colonization", + "computerised", "computerized", + "computerises", "computerizes", + "criminalised", "criminalized", + "criminalises", "criminalizes", + "crystallised", "crystallized", + "crystallises", "crystallizes", + "decentralise", "decentralize", + "dehumanising", "dehumanizing", + "demilitarise", "demilitarize", + "demobilising", "demobilizing", + "democratised", "democratized", + "democratises", "democratizes", + "demoralising", "demoralizing", + "desensitised", "desensitized", + "desensitises", "desensitizes", + "destabilised", "destabilized", + "destabilises", "destabilizes", + "discolouring", "discoloring", + "dishonouring", "dishonoring", + "disorganised", "disorganized", + "editorialise", "editorialize", + "endeavouring", "endeavoring", + "equalisation", "equalization", + "evangelising", "evangelizing", + "extemporised", "extemporized", + "extemporises", "extemporizes", + "externalised", "externalized", + "externalises", "externalizes", + "familiarised", "familiarized", + "familiarises", "familiarizes", + "fictionalise", "fictionalize", + "finalisation", "finalization", + "fraternising", "fraternizing", + "generalising", "generalizing", + "haemophiliac", "hemophiliac", + "haemorrhaged", "hemorrhaged", + "haemorrhages", "hemorrhages", + "haemorrhoids", "hemorrhoids", + "homoeopathic", "homeopathic", + "homogenising", "homogenizing", + "hospitalised", "hospitalized", + "hospitalises", "hospitalizes", + "hypothesised", "hypothesized", + "hypothesises", "hypothesizes", + "idealisation", "idealization", + "immobilisers", "immobilizers", + "immobilising", "immobilizing", + "immortalised", "immortalized", + "immortalises", "immortalizes", + "immunisation", "immunization", + "initialising", "initializing", + "internalised", "internalized", + "internalises", "internalizes", + "jeopardising", "jeopardizing", + "legalisation", "legalization", + "legitimising", "legitimizing", + "liberalising", "liberalizing", + "manoeuvrable", "maneuverable", + "manoeuvrings", "maneuverings", + "marginalised", "marginalized", + "marginalises", "marginalizes", + "marvellously", "marvelously", + "materialised", "materialized", + "materialises", "materializes", + "maximisation", "maximization", + "memorialised", "memorialized", + "memorialises", "memorializes", + "metabolising", "metabolizing", + "militarising", "militarizing", + "milligrammes", "milligrams", + "miniaturised", "miniaturized", + "miniaturises", "miniaturizes", + "misbehaviour", "misbehavior", + "misdemeanour", "misdemeanor", + "mobilisation", "mobilization", + "moisturisers", "moisturizers", + "moisturising", "moisturizing", + "monopolising", "monopolizing", + "moustachioed", "mustachioed", + "nationalised", "nationalized", + "nationalises", "nationalizes", + "naturalising", "naturalizing", + "neighbouring", "neighboring", + "neutralising", "neutralizing", + "oesophaguses", "esophaguses", + "organisation", "organization", + "orthopaedics", "orthopedics", + "outmanoeuvre", "outmaneuver", + "palaeolithic", "paleolithic", + "pasteurising", "pasteurizing", + "personalised", "personalized", + "personalises", "personalizes", + "philosophise", "philosophize", + "plagiarising", "plagiarizing", + "ploughshares", "plowshares", + "polarisation", "polarization", + "politicising", "politicizing", + "popularising", "popularizing", + "pressurising", "pressurizing", + "prioritising", "prioritizing", + "propagandise", "propagandize", + "proselytised", "proselytized", + "proselytiser", "proselytizer", + "proselytises", "proselytizes", + "radicalising", "radicalizing", + "rationalised", "rationalized", + "rationalises", "rationalizes", + "realisations", "realizations", + "recognisable", "recognizable", + "recognisably", "recognizably", + "recognisance", "recognizance", + "reconnoitred", "reconnoitered", + "reconnoitres", "reconnoiters", + "regularising", "regularizing", + "reorganising", "reorganizing", + "revitalising", "revitalizing", + "rhapsodising", "rhapsodizing", + "romanticised", "romanticized", + "romanticises", "romanticizes", + "scandalising", "scandalizing", + "scrutinising", "scrutinizing", + "secularising", "secularizing", + "specialising", "specializing", + "squirrelling", "squirreling", + "standardised", "standardized", + "standardises", "standardizes", + "stigmatising", "stigmatizing", + "sympathisers", "sympathizers", + "sympathising", "sympathizing", + "synchronised", "synchronized", + "synchronises", "synchronizes", + "synthesisers", "synthesizers", + "synthesising", "synthesizing", + "systematised", "systematized", + "systematises", "systematizes", + "technicolour", "technicolor", + "theatregoers", "theatergoers", + "traumatising", "traumatizing", + "trivialising", "trivializing", + "unauthorised", "unauthorized", + "uncatalogued", "uncataloged", + "unfavourable", "unfavorable", + "unfavourably", "unfavorably", + "unionisation", "unionization", + "unrecognised", "unrecognized", + "untrammelled", "untrammeled", + "urbanisation", "urbanization", + "vaporisation", "vaporization", + "vocalisation", "vocalization", + "watercolours", "watercolors", + "westernising", "westernizing", + "accessorise", "accessorize", + "acclimatise", "acclimatize", + "agonisingly", "agonizingly", + "amortisable", "amortizable", + "anaesthesia", "anesthesia", + "anaesthetic", "anesthetic", + "anglicising", "anglicizing", + "antagonised", "antagonized", + "antagonises", "antagonizes", + "apologising", "apologizing", + "archaeology", "archeology", + "authorising", "authorizing", + "bastardised", "bastardized", + "bastardises", "bastardizes", + "bedevilling", "bedeviling", + "behavioural", "behavioral", + "belabouring", "belaboring", + "bowdlerised", "bowdlerized", + "bowdlerises", "bowdlerizes", + "breathalyse", "breathalyze", + "brutalising", "brutalizing", + "cannibalise", "cannibalize", + "capitalised", "capitalized", + "capitalises", "capitalizes", + "caramelised", "caramelized", + "caramelises", "caramelizes", + "carbonising", "carbonizing", + "cataloguing", "cataloging", + "categorised", "categorized", + "categorises", "categorizes", + "cauterising", "cauterizing", + "centigramme", "centigram", + "centilitres", "centiliters", + "centimetres", "centimeters", + "centralised", "centralized", + "centralises", "centralizes", + "centrefolds", "centerfolds", + "centrepiece", "centerpiece", + "channelling", "channeling", + "chequebooks", "checkbooks", + "circularise", "circularize", + "colourfully", "colorfully", + "colourizing", "colorizing", + "computerise", "computerize", + "councillors", "councilors", + "counselling", "counseling", + "counsellors", "counselors", + "criminalise", "criminalize", + "criticising", "criticizing", + "crystallise", "crystallize", + "customising", "customizing", + "defenceless", "defenseless", + "dehumanised", "dehumanized", + "dehumanises", "dehumanizes", + "demobilised", "demobilized", + "demobilises", "demobilizes", + "democratise", "democratize", + "demoralised", "demoralized", + "demoralises", "demoralizes", + "deodorising", "deodorizing", + "desensitise", "desensitize", + "destabilise", "destabilize", + "discoloured", "discolored", + "dishevelled", "disheveled", + "dishonoured", "dishonored", + "dramatising", "dramatizing", + "economising", "economizing", + "empathising", "empathizing", + "emphasising", "emphasizing", + "endeavoured", "endeavored", + "epitomising", "epitomizing", + "evangelised", "evangelized", + "evangelises", "evangelizes", + "extemporise", "extemporize", + "externalise", "externalize", + "factorising", "factorizing", + "familiarise", "familiarize", + "fantasising", "fantasizing", + "favouritism", "favoritism", + "fertilisers", "fertilizers", + "fertilising", "fertilizing", + "flavourings", "flavorings", + "flavourless", "flavorless", + "flavoursome", "flavorsome", + "formalising", "formalizing", + "fossilising", "fossilizing", + "fraternised", "fraternized", + "fraternises", "fraternizes", + "galvanising", "galvanizing", + "generalised", "generalized", + "generalises", "generalizes", + "ghettoising", "ghettoizing", + "globalising", "globalizing", + "gruellingly", "gruelingly", + "gynaecology", "gynecology", + "haematology", "hematology", + "haemoglobin", "hemoglobin", + "haemophilia", "hemophilia", + "haemorrhage", "hemorrhage", + "harmonising", "harmonizing", + "homoeopaths", "homeopaths", + "homoeopathy", "homeopathy", + "homogenised", "homogenized", + "homogenises", "homogenizes", + "hospitalise", "hospitalize", + "hybridising", "hybridizing", + "hypnotising", "hypnotizing", + "hypothesise", "hypothesize", + "immobilised", "immobilized", + "immobiliser", "immobilizer", + "immobilises", "immobilizes", + "immortalise", "immortalize", + "impanelling", "impaneling", + "imperilling", "imperiling", + "initialised", "initialized", + "initialises", "initializes", + "initialling", "initialing", + "instalments", "installments", + "internalise", "internalize", + "italicising", "italicizing", + "jeopardised", "jeopardized", + "jeopardises", "jeopardizes", + "kilogrammes", "kilograms", + "legitimised", "legitimized", + "legitimises", "legitimizes", + "liberalised", "liberalized", + "liberalises", "liberalizes", + "lionisation", "lionization", + "liquidisers", "liquidizers", + "liquidising", "liquidizing", + "magnetising", "magnetizing", + "manoeuvring", "maneuvering", + "marginalise", "marginalize", + "marshalling", "marshaling", + "materialise", "materialize", + "mechanising", "mechanizing", + "memorialise", "memorialize", + "mesmerising", "mesmerizing", + "metabolised", "metabolized", + "metabolises", "metabolizes", + "micrometres", "micrometers", + "militarised", "militarized", + "militarises", "militarizes", + "milligramme", "milligram", + "millilitres", "milliliters", + "millimetres", "millimeters", + "miniaturise", "miniaturize", + "modernising", "modernizing", + "moisturised", "moisturized", + "moisturiser", "moisturizer", + "moisturises", "moisturizes", + "monopolised", "monopolized", + "monopolises", "monopolizes", + "nationalise", "nationalize", + "naturalised", "naturalized", + "naturalises", "naturalizes", + "neighbourly", "neighborly", + "neutralised", "neutralized", + "neutralises", "neutralizes", + "normalising", "normalizing", + "orthopaedic", "orthopedic", + "ostracising", "ostracizing", + "oxidisation", "oxidization", + "paediatrics", "pediatrics", + "paedophiles", "pedophiles", + "paedophilia", "pedophilia", + "passivising", "passivizing", + "pasteurised", "pasteurized", + "pasteurises", "pasteurizes", + "patronising", "patronizing", + "personalise", "personalize", + "plagiarised", "plagiarized", + "plagiarises", "plagiarizes", + "ploughshare", "plowshare", + "politicised", "politicized", + "politicises", "politicizes", + "popularised", "popularized", + "popularises", "popularizes", + "praesidiums", "presidiums", + "pressurised", "pressurized", + "pressurises", "pressurizes", + "prioritised", "prioritized", + "prioritises", "prioritizes", + "privatising", "privatizing", + "proselytise", "proselytize", + "publicising", "publicizing", + "pulverising", "pulverizing", + "quarrelling", "quarreling", + "radicalised", "radicalized", + "radicalises", "radicalizes", + "randomising", "randomizing", + "rationalise", "rationalize", + "realisation", "realization", + "recognising", "recognizing", + "reconnoitre", "reconnoiter", + "regularised", "regularized", + "regularises", "regularizes", + "remodelling", "remodeling", + "reorganised", "reorganized", + "reorganises", "reorganizes", + "revitalised", "revitalized", + "revitalises", "revitalizes", + "rhapsodised", "rhapsodized", + "rhapsodises", "rhapsodizes", + "romanticise", "romanticize", + "scandalised", "scandalized", + "scandalises", "scandalizes", + "sceptically", "skeptically", + "scrutinised", "scrutinized", + "scrutinises", "scrutinizes", + "secularised", "secularized", + "secularises", "secularizes", + "sensitising", "sensitizing", + "serialising", "serializing", + "sermonising", "sermonizing", + "shrivelling", "shriveling", + "signalising", "signalizing", + "snorkelling", "snorkeling", + "snowploughs", "snowplow", + "socialising", "socializing", + "solemnising", "solemnizing", + "specialised", "specialized", + "specialises", "specializes", + "squirrelled", "squirreled", + "stabilisers", "stabilizers", + "stabilising", "stabilizing", + "standardise", "standardize", + "stencilling", "stenciling", + "sterilisers", "sterilizers", + "sterilising", "sterilizing", + "stigmatised", "stigmatized", + "stigmatises", "stigmatizes", + "subsidisers", "subsidizers", + "subsidising", "subsidizing", + "summarising", "summarizing", + "symbolising", "symbolizing", + "sympathised", "sympathized", + "sympathiser", "sympathizer", + "sympathises", "sympathizes", + "synchronise", "synchronize", + "synthesised", "synthesized", + "synthesiser", "synthesizer", + "synthesises", "synthesizes", + "systematise", "systematize", + "tantalising", "tantalizing", + "temporising", "temporizing", + "tenderising", "tenderizing", + "terrorising", "terrorizing", + "theatregoer", "theatergoer", + "traumatised", "traumatized", + "traumatises", "traumatizes", + "trivialised", "trivialized", + "trivialises", "trivializes", + "tyrannising", "tyrannizing", + "uncivilised", "uncivilized", + "unorganised", "unorganized", + "unravelling", "unraveling", + "utilisation", "utilization", + "vandalising", "vandalizing", + "verbalising", "verbalizing", + "victimising", "victimizing", + "visualising", "visualizing", + "vulgarising", "vulgarizing", + "watercolour", "watercolor", + "westernised", "westernized", + "westernises", "westernizes", + "worshipping", "worshiping", + "aeroplanes", "airplanes", + "amortising", "amortizing", + "anglicised", "anglicized", + "anglicises", "anglicizes", + "annualised", "annualized", + "antagonise", "antagonize", + "apologised", "apologized", + "apologises", "apologizes", + "appetisers", "appetizers", + "appetising", "appetizing", + "authorised", "authorized", + "authorises", "authorizes", + "bannisters", "banisters", + "bastardise", "bastardize", + "bedevilled", "bedeviled", + "behaviours", "behaviors", + "bejewelled", "bejeweled", + "belaboured", "belabored", + "bowdlerise", "bowdlerize", + "brutalised", "brutalized", + "brutalises", "brutalizes", + "canalising", "canalizing", + "cancelling", "canceling", + "canonising", "canonizing", + "capitalise", "capitalize", + "caramelise", "caramelize", + "carbonised", "carbonized", + "carbonises", "carbonizes", + "catalogued", "cataloged", + "catalogues", "catalogs", + "catalysing", "catalyzing", + "categorise", "categorize", + "cauterised", "cauterized", + "cauterises", "cauterizes", + "centilitre", "centiliter", + "centimetre", "centimeter", + "centralise", "centralize", + "centrefold", "centerfold", + "channelled", "channeled", + "chequebook", "checkbook", + "chiselling", "chiseling", + "civilising", "civilizing", + "clamouring", "clamoring", + "colonisers", "colonizers", + "colonising", "colonizing", + "colourants", "colorants", + "colourized", "colorized", + "colourizes", "colorizes", + "colourless", "colorless", + "connexions", "connections", + "councillor", "councilor", + "counselled", "counseled", + "counsellor", "counselor", + "criticised", "criticized", + "criticises", "criticizes", + "cudgelling", "cudgeling", + "customised", "customized", + "customises", "customizes", + "dehumanise", "dehumanize", + "demobilise", "demobilize", + "demonising", "demonizing", + "demoralise", "demoralize", + "deodorised", "deodorized", + "deodorises", "deodorizes", + "deputising", "deputizing", + "digitising", "digitizing", + "discolours", "discolors", + "dishonours", "dishonors", + "dramatised", "dramatized", + "dramatises", "dramatizes", + "drivelling", "driveling", + "economised", "economized", + "economises", "economizes", + "empathised", "empathized", + "empathises", "empathizes", + "emphasised", "emphasized", + "emphasises", "emphasizes", + "enamelling", "enameling", + "endeavours", "endeavors", + "energising", "energizing", + "epaulettes", "epaulets", + "epicentres", "epicenters", + "epitomised", "epitomized", + "epitomises", "epitomizes", + "equalisers", "equalizers", + "equalising", "equalizing", + "eulogising", "eulogizing", + "evangelise", "evangelize", + "factorised", "factorized", + "factorises", "factorizes", + "fantasised", "fantasized", + "fantasises", "fantasizes", + "favourable", "favorable", + "favourably", "favorably", + "favourites", "favorites", + "feminising", "feminizing", + "fertilised", "fertilized", + "fertiliser", "fertilizer", + "fertilises", "fertilizes", + "fibreglass", "fiberglass", + "finalising", "finalizing", + "flavouring", "flavoring", + "formalised", "formalized", + "formalises", "formalizes", + "fossilised", "fossilized", + "fossilises", "fossilizes", + "fraternise", "fraternize", + "fulfilment", "fulfillment", + "funnelling", "funneling", + "galvanised", "galvanized", + "galvanises", "galvanizes", + "gambolling", "gamboling", + "gaolbreaks", "jailbreaks", + "generalise", "generalize", + "ghettoised", "ghettoized", + "ghettoises", "ghettoizes", + "globalised", "globalized", + "globalises", "globalizes", + "gonorrhoea", "gonorrhea", + "grovelling", "groveling", + "harbouring", "harboring", + "harmonised", "harmonized", + "harmonises", "harmonizes", + "homoeopath", "homeopath", + "homogenise", "homogenize", + "honourable", "honorable", + "honourably", "honorably", + "humanising", "humanizing", + "humourless", "humorless", + "hybridised", "hybridized", + "hybridises", "hybridizes", + "hypnotised", "hypnotized", + "hypnotises", "hypnotizes", + "idealising", "idealizing", + "immobilise", "immobilize", + "immunising", "immunizing", + "impanelled", "impaneled", + "imperilled", "imperiled", + "inflexions", "inflections", + "initialise", "initialize", + "initialled", "initialed", + "instalment", "installment", + "ionisation", "ionization", + "italicised", "italicized", + "italicises", "italicizes", + "jeopardise", "jeopardize", + "kilogramme", "kilogram", + "kilometres", "kilometers", + "lacklustre", "lackluster", + "legalising", "legalizing", + "legitimise", "legitimize", + "liberalise", "liberalize", + "liquidised", "liquidized", + "liquidiser", "liquidizer", + "liquidises", "liquidizes", + "localising", "localizing", + "magnetised", "magnetized", + "magnetises", "magnetizes", + "manoeuvred", "maneuvered", + "manoeuvres", "maneuvers", + "marshalled", "marshaled", + "marvelling", "marveling", + "marvellous", "marvelous", + "maximising", "maximizing", + "mechanised", "mechanized", + "mechanises", "mechanizes", + "memorising", "memorizing", + "mesmerised", "mesmerized", + "mesmerises", "mesmerizes", + "metabolise", "metabolize", + "micrometre", "micrometer", + "militarise", "militarize", + "millilitre", "milliliter", + "millimetre", "millimeter", + "minimising", "minimizing", + "mobilising", "mobilizing", + "modernised", "modernized", + "modernises", "modernizes", + "moisturise", "moisturize", + "monopolise", "monopolize", + "moralising", "moralizing", + "mouldering", "moldering", + "moustached", "mustached", + "moustaches", "mustaches", + "naturalise", "naturalize", + "neighbours", "neighbors", + "neutralise", "neutralize", + "normalised", "normalized", + "normalises", "normalizes", + "oesophagus", "esophagus", + "optimising", "optimizing", + "organisers", "organizers", + "organising", "organizing", + "ostracised", "ostracized", + "ostracises", "ostracizes", + "paederasts", "pederasts", + "paediatric", "pediatric", + "paedophile", "pedophile", + "panellists", "panelists", + "paralysing", "paralyzing", + "parcelling", "parceling", + "passivised", "passivized", + "passivises", "passivizes", + "pasteurise", "pasteurize", + "patronised", "patronized", + "patronises", "patronizes", + "penalising", "penalizing", + "pencilling", "penciling", + "plagiarise", "plagiarize", + "polarising", "polarizing", + "politicise", "politicize", + "popularise", "popularize", + "practising", "practicing", + "praesidium", "presidium", + "pressurise", "pressurize", + "prioritise", "prioritize", + "privatised", "privatized", + "privatises", "privatizes", + "programmes", "programs", + "publicised", "publicized", + "publicises", "publicizes", + "pulverised", "pulverized", + "pulverises", "pulverizes", + "pummelling", "pummeled", + "quarrelled", "quarreled", + "radicalise", "radicalize", + "randomised", "randomized", + "randomises", "randomizes", + "realisable", "realizable", + "recognised", "recognized", + "recognises", "recognizes", + "refuelling", "refueling", + "regularise", "regularize", + "remodelled", "remodeled", + "remoulding", "remolding", + "reorganise", "reorganize", + "revitalise", "revitalize", + "rhapsodise", "rhapsodize", + "ritualised", "ritualized", + "sanitising", "sanitizing", + "satirising", "satirizing", + "scandalise", "scandalize", + "scepticism", "skepticism", + "scrutinise", "scrutinize", + "secularise", "secularize", + "sensitised", "sensitized", + "sensitises", "sensitizes", + "sepulchres", "sepulchers", + "serialised", "serialized", + "serialises", "serializes", + "sermonised", "sermonized", + "sermonises", "sermonizes", + "shovelling", "shoveling", + "shrivelled", "shriveled", + "signalised", "signalized", + "signalises", "signalizes", + "signalling", "signaling", + "snivelling", "sniveling", + "snorkelled", "snorkeled", + "snowplough", "snowplow", + "socialised", "socialized", + "socialises", "socializes", + "sodomising", "sodomizing", + "solemnised", "solemnized", + "solemnises", "solemnizes", + "specialise", "specialize", + "spiralling", "spiraling", + "splendours", "splendors", + "stabilised", "stabilized", + "stabiliser", "stabilizer", + "stabilises", "stabilizes", + "stencilled", "stenciled", + "sterilised", "sterilized", + "steriliser", "sterilizer", + "sterilises", "sterilizes", + "stigmatise", "stigmatize", + "subsidised", "subsidized", + "subsidiser", "subsidizer", + "subsidises", "subsidizes", + "succouring", "succoring", + "sulphurous", "sulfurous", + "summarised", "summarized", + "summarises", "summarizes", + "swivelling", "swiveling", + "symbolised", "symbolized", + "symbolises", "symbolizes", + "sympathise", "sympathize", + "synthesise", "synthesize", + "tantalised", "tantalized", + "tantalises", "tantalizes", + "temporised", "temporized", + "temporises", "temporizes", + "tenderised", "tenderized", + "tenderises", "tenderizes", + "terrorised", "terrorized", + "terrorises", "terrorizes", + "theorising", "theorizing", + "traumatise", "traumatize", + "travellers", "travelers", + "travelling", "traveling", + "tricolours", "tricolors", + "trivialise", "trivialize", + "tunnelling", "tunneling", + "tyrannised", "tyrannized", + "tyrannises", "tyrannizes", + "unequalled", "unequaled", + "unionising", "unionizing", + "unravelled", "unraveled", + "unrivalled", "unrivaled", + "urbanising", "urbanizing", + "utilisable", "utilizable", + "vandalised", "vandalized", + "vandalises", "vandalizes", + "vaporising", "vaporizing", + "verbalised", "verbalized", + "verbalises", "verbalizes", + "victimised", "victimized", + "victimises", "victimizes", + "visualised", "visualized", + "visualises", "visualizes", + "vocalising", "vocalizing", + "vulcanised", "vulcanized", + "vulgarised", "vulgarized", + "vulgarises", "vulgarizes", + "weaselling", "weaseling", + "westernise", "westernize", + "womanisers", "womanizers", + "womanising", "womanizing", + "worshipped", "worshiped", + "worshipper", "worshiper", + "aeroplane", "airplane", + "aetiology", "etiology", + "agonising", "agonizing", + "almanacks", "almanacs", + "aluminium", "aluminum", + "amortised", "amortized", + "amortises", "amortizes", + "analogues", "analogs", + "analysing", "analyzing", + "anglicise", "anglicize", + "apologise", "apologize", + "appetiser", "appetizer", + "armourers", "armorers", + "armouries", "armories", + "artefacts", "artifacts", + "authorise", "authorize", + "baptising", "baptizing", + "behaviour", "behavior", + "belabours", "belabors", + "brutalise", "brutalize", + "callipers", "calipers", + "canalised", "canalized", + "canalises", "canalizes", + "cancelled", "canceled", + "canonised", "canonized", + "canonises", "canonizes", + "carbonise", "carbonize", + "carolling", "caroling", + "catalogue", "catalog", + "catalysed", "catalyzed", + "catalyses", "catalyzes", + "cauterise", "cauterize", + "cavilling", "caviling", + "chequered", "checkered", + "chiselled", "chiseled", + "civilised", "civilized", + "civilises", "civilizes", + "clamoured", "clamored", + "colonised", "colonized", + "coloniser", "colonizer", + "colonises", "colonizes", + "colourant", "colorant", + "coloureds", "coloreds", + "colourful", "colorful", + "colouring", "coloring", + "colourize", "colorize", + "connexion", "connection", + "criticise", "criticize", + "cruellest", "cruelest", + "cudgelled", "cudgeled", + "customise", "customize", + "demeanour", "demeanor", + "demonised", "demonized", + "demonises", "demonizes", + "deodorise", "deodorize", + "deputised", "deputized", + "deputises", "deputizes", + "dialogues", "dialogs", + "diarrhoea", "diarrhea", + "digitised", "digitized", + "digitises", "digitizes", + "discolour", "discolor", + "disfavour", "disfavor", + "dishonour", "dishonor", + "dramatise", "dramatize", + "drivelled", "driveled", + "economise", "economize", + "empathise", "empathize", + "emphasise", "emphasize", + "enamelled", "enameled", + "enamoured", "enamored", + "endeavour", "endeavor", + "energised", "energized", + "energises", "energizes", + "epaulette", "epaulet", + "epicentre", "epicenter", + "epitomise", "epitomize", + "equalised", "equalized", + "equaliser", "equalizer", + "equalises", "equalizes", + "eulogised", "eulogized", + "eulogises", "eulogizes", + "factorise", "factorize", + "fantasise", "fantasize", + "favouring", "favoring", + "favourite", "favorite", + "feminised", "feminized", + "feminises", "feminizes", + "fertilise", "fertilize", + "finalised", "finalized", + "finalises", "finalizes", + "flautists", "flutists", + "flavoured", "flavored", + "formalise", "formalize", + "fossilise", "fossilize", + "funnelled", "funneled", + "galvanise", "galvanize", + "gambolled", "gamboled", + "gaolbirds", "jailbirds", + "gaolbreak", "jailbreak", + "ghettoise", "ghettoize", + "globalise", "globalize", + "gravelled", "graveled", + "grovelled", "groveled", + "gruelling", "grueling", + "harboured", "harbored", + "harmonise", "harmonize", + "honouring", "honoring", + "humanised", "humanized", + "humanises", "humanizes", + "humouring", "humoring", + "hybridise", "hybridize", + "hypnotise", "hypnotize", + "idealised", "idealized", + "idealises", "idealizes", + "idolising", "idolizing", + "immunised", "immunized", + "immunises", "immunizes", + "inflexion", "inflection", + "italicise", "italicize", + "itemising", "itemizing", + "jewellers", "jewelers", + "jewellery", "jewelry", + "kilometre", "kilometer", + "labelling", "labeling", + "labourers", "laborers", + "labouring", "laboring", + "legalised", "legalized", + "legalises", "legalizes", + "leukaemia", "leukemia", + "levellers", "levelers", + "levelling", "leveling", + "libelling", "libeling", + "libellous", "libelous", + "licencing", "licensing", + "lionising", "lionizing", + "liquidise", "liquidize", + "localised", "localized", + "localises", "localizes", + "magnetise", "magnetize", + "manoeuvre", "maneuver", + "marvelled", "marveled", + "maximised", "maximized", + "maximises", "maximizes", + "mechanise", "mechanize", + "mediaeval", "medieval", + "memorised", "memorized", + "memorises", "memorizes", + "mesmerise", "mesmerize", + "minimised", "minimized", + "minimises", "minimizes", + "mobilised", "mobilized", + "mobilises", "mobilizes", + "modellers", "modelers", + "modelling", "modeling", + "modernise", "modernize", + "moralised", "moralized", + "moralises", "moralizes", + "motorised", "motorized", + "mouldered", "moldered", + "mouldiest", "moldiest", + "mouldings", "moldings", + "moustache", "mustache", + "neighbour", "neighbor", + "normalise", "normalize", + "odourless", "odorless", + "oestrogen", "estrogen", + "optimised", "optimized", + "optimises", "optimizes", + "organised", "organized", + "organiser", "organizer", + "organises", "organizes", + "ostracise", "ostracize", + "oxidising", "oxidizing", + "paederast", "pederast", + "panelling", "paneling", + "panellist", "panelist", + "paralysed", "paralyzed", + "paralyses", "paralyzes", + "parcelled", "parceled", + "passivise", "passivize", + "patronise", "patronize", + "pedalling", "pedaling", + "penalised", "penalized", + "penalises", "penalizes", + "pencilled", "penciled", + "ploughing", "plowing", + "ploughman", "plowman", + "ploughmen", "plowmen", + "polarised", "polarized", + "polarises", "polarizes", + "practised", "practiced", + "practises", "practices", + "pretences", "pretenses", + "primaeval", "primeval", + "privatise", "privatize", + "programme", "program", + "publicise", "publicize", + "pulverise", "pulverize", + "pummelled", "pummel", + "randomise", "randomize", + "ravelling", "raveling", + "realising", "realizing", + "recognise", "recognize", + "refuelled", "refueled", + "remoulded", "remolded", + "revellers", "revelers", + "revelling", "reveling", + "rivalling", "rivaling", + "saltpetre", "saltpeter", + "sanitised", "sanitized", + "sanitises", "sanitizes", + "satirised", "satirized", + "satirises", "satirizes", + "savouries", "savories", + "savouring", "savoring", + "sceptical", "skeptical", + "sensitise", "sensitize", + "sepulchre", "sepulcher", + "serialise", "serialize", + "sermonise", "sermonize", + "shovelled", "shoveled", + "signalise", "signalize", + "signalled", "signaled", + "snivelled", "sniveled", + "socialise", "socialize", + "sodomised", "sodomized", + "sodomises", "sodomizes", + "solemnise", "solemnize", + "spiralled", "spiraled", + "splendour", "splendor", + "stabilise", "stabilize", + "sterilise", "sterilize", + "subsidise", "subsidize", + "succoured", "succored", + "sulphates", "sulfates", + "sulphides", "sulfides", + "summarise", "summarize", + "swivelled", "swiveled", + "symbolise", "symbolize", + "syphoning", "siphoning", + "tantalise", "tantalize", + "tasselled", "tasseled", + "temporise", "temporize", + "tenderise", "tenderize", + "terrorise", "terrorize", + "theorised", "theorized", + "theorises", "theorizes", + "towelling", "toweling", + "travelled", "traveled", + "traveller", "traveler", + "trialling", "trialing", + "tricolour", "tricolor", + "tunnelled", "tunneled", + "tyrannise", "tyrannize", + "unionised", "unionized", + "unionises", "unionizes", + "unsavoury", "unsavory", + "urbanised", "urbanized", + "urbanises", "urbanizes", + "utilising", "utilizing", + "vandalise", "vandalize", + "vaporised", "vaporized", + "vaporises", "vaporizes", + "verbalise", "verbalize", + "victimise", "victimize", + "visualise", "visualize", + "vocalised", "vocalized", + "vocalises", "vocalizes", + "vulgarise", "vulgarize", + "weaselled", "weaseled", + "womanised", "womanized", + "womaniser", "womanizer", + "womanises", "womanizes", + "yodelling", "yodeling", + "yoghourts", "yogurts", + "agonised", "agonized", + "agonises", "agonizes", + "almanack", "almanac", + "amortise", "amortize", + "analogue", "analog", + "analysed", "analyzed", + "analyses", "analyzes", + "armoured", "armored", + "armourer", "armorer", + "artefact", "artifact", + "baptised", "baptized", + "baptises", "baptizes", + "baulking", "balking", + "belabour", "belabor", + "bevelled", "beveled", + "calibres", "calibers", + "calliper", "caliper", + "canalise", "canalize", + "canonise", "canonize", + "carolled", "caroled", + "catalyse", "catalyze", + "cavilled", "caviled", + "civilise", "civilize", + "clamours", "clamors", + "clangour", "clangor", + "colonise", "colonize", + "coloured", "colored", + "cosiness", "coziness", + "crueller", "crueler", + "defences", "defenses", + "demonise", "demonize", + "deputise", "deputize", + "dialling", "dialing", + "dialogue", "dialog", + "digitise", "digitize", + "draughty", "drafty", + "duelling", "dueling", + "energise", "energize", + "enthrals", "enthralls", + "equalise", "equalize", + "eulogise", "eulogize", + "favoured", "favored", + "feminise", "feminize", + "finalise", "finalize", + "flautist", "flutist", + "flavours", "flavors", + "foetuses", "fetuses", + "fuelling", "fueling", + "gaolbird", "jailbird", + "gryphons", "griffins", + "harbours", "harbors", + "honoured", "honored", + "humanise", "humanize", + "humoured", "humored", + "idealise", "idealize", + "idolised", "idolized", + "idolises", "idolizes", + "immunise", "immunize", + "ionisers", "ionizers", + "ionising", "ionizing", + "itemised", "itemized", + "itemises", "itemizes", + "jewelled", "jeweled", + "jeweller", "jeweler", + "labelled", "labeled", + "laboured", "labored", + "labourer", "laborer", + "legalise", "legalize", + "levelled", "leveled", + "leveller", "leveler", + "libelled", "libeled", + "licenced", "licensed", + "licences", "licenses", + "lionised", "lionized", + "lionises", "lionizes", + "localise", "localize", + "maximise", "maximize", + "memorise", "memorize", + "minimise", "minimize", + "misspelt", "misspelled", + "mobilise", "mobilize", + "modelled", "modeled", + "modeller", "modeler", + "moralise", "moralize", + "moulders", "molders", + "mouldier", "moldier", + "moulding", "molding", + "moulting", "molting", + "offences", "offenses", + "optimise", "optimize", + "organise", "organize", + "oxidised", "oxidized", + "oxidises", "oxidizes", + "panelled", "paneled", + "paralyse", "paralyze", + "parlours", "parlors", + "pedalled", "pedaled", + "penalise", "penalize", + "philtres", "filters", + "ploughed", "plowed", + "polarise", "polarize", + "practise", "practice", + "pretence", "pretense", + "ravelled", "raveled", + "realised", "realized", + "realises", "realizes", + "remoulds", "remolds", + "revelled", "reveled", + "reveller", "reveler", + "rivalled", "rivaled", + "rumoured", "rumored", + "sanitise", "sanitize", + "satirise", "satirize", + "saviours", "saviors", + "savoured", "savored", + "sceptics", "skeptics", + "sceptres", "scepters", + "sodomise", "sodomize", + "spectres", "specters", + "succours", "succors", + "sulphate", "sulfate", + "sulphide", "sulfide", + "syphoned", "siphoned", + "theatres", "theaters", + "theorise", "theorize", + "towelled", "toweled", + "toxaemia", "toxemia", + "trialled", "trialed", + "unionise", "unionize", + "urbanise", "urbanize", + "utilised", "utilized", + "utilises", "utilizes", + "vaporise", "vaporize", + "vocalise", "vocalize", + "womanise", "womanize", + "yodelled", "yodeled", + "yoghourt", "yogurt", + "yoghurts", "yogurts", + "agonise", "agonize", + "anaemia", "anemia", + "anaemic", "anemic", + "analyse", "analyze", + "arbours", "arbors", + "armoury", "armory", + "baptise", "baptize", + "baulked", "balked", + "behoved", "behooved", + "behoves", "behooves", + "calibre", "caliber", + "candour", "candor", + "centred", "centered", + "centres", "centers", + "cheques", "checks", + "clamour", "clamor", + "colours", "colors", + "cosiest", "coziest", + "defence", "defense", + "dialled", "dialed", + "distils", "distills", + "duelled", "dueled", + "enthral", "enthrall", + "favours", "favors", + "fervour", "fervor", + "flavour", "flavor", + "fuelled", "fueled", + "fulfils", "fulfills", + "gaolers", "jailers", + "gaoling", "jailing", + "gipsies", "gypsies", + "glueing", "gluing", + "goitres", "goiters", + "grammes", "grams", + "groynes", "groins", + "gryphon", "griffin", + "harbour", "harbor", + "honours", "honors", + "humours", "humors", + "idolise", "idolize", + "instals", "installs", + "instils", "instills", + "ionised", "ionized", + "ioniser", "ionizer", + "ionises", "ionizes", + "itemise", "itemize", + "labours", "labors", + "licence", "license", + "lionise", "lionize", + "louvred", "louvered", + "louvres", "louvers", + "moulded", "molded", + "moulder", "molder", + "moulted", "molted", + "offence", "offense", + "oxidise", "oxidize", + "parlour", "parlor", + "philtre", "filter", + "ploughs", "plows", + "pyjamas", "pajamas", + "rancour", "rancor", + "realise", "realize", + "remould", "remold", + "rigours", "rigors", + "rumours", "rumors", + "saviour", "savior", + "savours", "savors", + "savoury", "savory", + "sceptic", "skeptic", + "sceptre", "scepter", + "spectre", "specter", + "storeys", "stories", + "succour", "succor", + "sulphur", "sulfur", + "syphons", "siphons", + "theatre", "theater", + "tumours", "tumors", + "utilise", "utilize", + "vapours", "vapors", + "waggons", "wagons", + "yoghurt", "yogurt", + "ageing", "aging", + "appals", "appalls", + "arbour", "arbor", + "ardour", "ardor", + "baulks", "balks", + "behove", "behoove", + "centre", "center", + "cheque", "check", + "chilli", "chili", + "colour", "color", + "cosier", "cozier", + "cosies", "cozies", + "cosily", "cozily", + "distil", "distill", + "edoema", "edema", + "enrols", "enrolls", + "faecal", "fecal", + "faeces", "feces", + "favour", "favor", + "fibres", "fibers", + "foetal", "fetal", + "foetid", "fetid", + "foetus", "fetus", + "fulfil", "fulfill", + "gaoled", "jailed", + "gaoler", "jailer", + "goitre", "goiter", + "gramme", "gram", + "groyne", "groin", + "honour", "honor", + "humour", "humor", + "instal", "install", + "instil", "instill", + "ionise", "ionize", + "labour", "labor", + "litres", "liters", + "lustre", "luster", + "meagre", "meager", + "metres", "meters", + "mitres", "miters", + "moulds", "molds", + "mouldy", "moldy", + "moults", "molts", + "odours", "odors", + "plough", "plow", + "pyjama", "pajama", + "rigour", "rigor", + "rumour", "rumor", + "savour", "savor", + "storey", "story", + "syphon", "siphon", + "tumour", "tumor", + "valour", "valor", + "vapour", "vapor", + "vigour", "vigor", + "waggon", "wagon", + "appal", "appall", + "baulk", "balk", + "enrol", "enroll", + "fibre", "fiber", + "gaols", "jails", + "litre", "liter", + "metre", "meter", + "mitre", "miter", + "mould", "mold", + "moult", "molt", + "odour", "odor", + "tyres", "tires", + "cosy", "cozy", + "gaol", "jail", + "tyre", "tire", +} + +// DictBritish converts US spellings to UK spellings +var DictBritish = []string{ + "institutionalization", "institutionalisation", + "internationalization", "internationalisation", + "professionalization", "professionalisation", + "compartmentalizing", "compartmentalising", + "institutionalizing", "institutionalising", + "internationalizing", "internationalising", + "compartmentalized", "compartmentalised", + "compartmentalizes", "compartmentalises", + "decriminalization", "decriminalisation", + "denationalization", "denationalisation", + "fictionalizations", "fictionalisations", + "institutionalized", "institutionalised", + "institutionalizes", "institutionalises", + "intellectualizing", "intellectualising", + "internationalized", "internationalised", + "internationalizes", "internationalises", + "pedestrianization", "pedestrianisation", + "professionalizing", "professionalising", + "compartmentalize", "compartmentalise", + "decentralization", "decentralisation", + "demilitarization", "demilitarisation", + "externalizations", "externalisations", + "fictionalization", "fictionalisation", + "institutionalize", "institutionalise", + "intellectualized", "intellectualised", + "intellectualizes", "intellectualises", + "internationalize", "internationalise", + "nationalizations", "nationalisations", + "professionalized", "professionalised", + "professionalizes", "professionalises", + "rationalizations", "rationalisations", + "sensationalizing", "sensationalising", + "sentimentalizing", "sentimentalising", + "acclimatization", "acclimatisation", + "commercializing", "commercialising", + "conceptualizing", "conceptualising", + "contextualizing", "contextualising", + "crystallization", "crystallisation", + "decriminalizing", "decriminalising", + "democratization", "democratisation", + "denationalizing", "denationalising", + "depersonalizing", "depersonalising", + "desensitization", "desensitisation", + "disorganization", "disorganisation", + "extemporization", "extemporisation", + "externalization", "externalisation", + "familiarization", "familiarisation", + "generalizations", "generalisations", + "hospitalization", "hospitalisation", + "individualizing", "individualising", + "industrializing", "industrialising", + "intellectualize", "intellectualise", + "internalization", "internalisation", + "maneuverability", "manoeuvrability", + "materialization", "materialisation", + "miniaturization", "miniaturisation", + "nationalization", "nationalisation", + "overemphasizing", "overemphasising", + "paleontologists", "palaeontologists", + "particularizing", "particularising", + "pedestrianizing", "pedestrianising", + "professionalize", "professionalise", + "psychoanalyzing", "psychoanalysing", + "rationalization", "rationalisation", + "reorganizations", "reorganisations", + "revolutionizing", "revolutionising", + "sensationalized", "sensationalised", + "sensationalizes", "sensationalises", + "sentimentalized", "sentimentalised", + "sentimentalizes", "sentimentalises", + "specializations", "specialisations", + "standardization", "standardisation", + "synchronization", "synchronisation", + "systematization", "systematisation", + "aggrandizement", "aggrandisement", + "characterizing", "characterising", + "collectivizing", "collectivising", + "commercialized", "commercialised", + "commercializes", "commercialises", + "conceptualized", "conceptualised", + "conceptualizes", "conceptualises", + "contextualized", "contextualised", + "contextualizes", "contextualises", + "decentralizing", "decentralising", + "decriminalized", "decriminalised", + "decriminalizes", "decriminalises", + "dehumanization", "dehumanisation", + "demilitarizing", "demilitarising", + "demobilization", "demobilisation", + "demoralization", "demoralisation", + "denationalized", "denationalised", + "denationalizes", "denationalises", + "depersonalized", "depersonalised", + "depersonalizes", "depersonalises", + "dramatizations", "dramatisations", + "editorializing", "editorialising", + "fictionalizing", "fictionalising", + "fraternization", "fraternisation", + "generalization", "generalisation", + "immobilization", "immobilisation", + "individualized", "individualised", + "individualizes", "individualises", + "industrialized", "industrialised", + "industrializes", "industrialises", + "liberalization", "liberalisation", + "monopolization", "monopolisation", + "naturalization", "naturalisation", + "neighborliness", "neighbourliness", + "neutralization", "neutralisation", + "organizational", "organisational", + "outmaneuvering", "outmanoeuvring", + "overemphasized", "overemphasised", + "overemphasizes", "overemphasises", + "paleontologist", "palaeontologist", + "particularized", "particularised", + "particularizes", "particularises", + "pasteurization", "pasteurisation", + "pedestrianized", "pedestrianised", + "pedestrianizes", "pedestrianises", + "philosophizing", "philosophising", + "politicization", "politicisation", + "popularization", "popularisation", + "pressurization", "pressurisation", + "prioritization", "prioritisation", + "privatizations", "privatisations", + "propagandizing", "propagandising", + "psychoanalyzed", "psychoanalysed", + "psychoanalyzes", "psychoanalyses", + "reconnoitering", "reconnoitring", + "regularization", "regularisation", + "reorganization", "reorganisation", + "revolutionized", "revolutionised", + "revolutionizes", "revolutionises", + "secularization", "secularisation", + "sensationalize", "sensationalise", + "sentimentalize", "sentimentalise", + "serializations", "serialisations", + "specialization", "specialisation", + "sterilizations", "sterilisations", + "stigmatization", "stigmatisation", + "transistorized", "transistorised", + "unrecognizable", "unrecognisable", + "visualizations", "visualisations", + "westernization", "westernisation", + "accessorizing", "accessorising", + "acclimatizing", "acclimatising", + "amortizations", "amortisations", + "amphitheaters", "amphitheatres", + "anesthetizing", "anaesthetising", + "archeologists", "archaeologists", + "breathalyzers", "breathalysers", + "breathalyzing", "breathalysing", + "cannibalizing", "cannibalising", + "characterized", "characterised", + "characterizes", "characterises", + "circularizing", "circularising", + "collectivized", "collectivised", + "collectivizes", "collectivises", + "commercialize", "commercialise", + "computerizing", "computerising", + "conceptualize", "conceptualise", + "contextualize", "contextualise", + "criminalizing", "criminalising", + "crystallizing", "crystallising", + "decentralized", "decentralised", + "decentralizes", "decentralises", + "decriminalize", "decriminalise", + "demilitarized", "demilitarised", + "demilitarizes", "demilitarises", + "democratizing", "democratising", + "denationalize", "denationalise", + "depersonalize", "depersonalise", + "desensitizing", "desensitising", + "destabilizing", "destabilising", + "disemboweling", "disembowelling", + "dramatization", "dramatisation", + "editorialized", "editorialised", + "editorializes", "editorialises", + "extemporizing", "extemporising", + "externalizing", "externalising", + "familiarizing", "familiarising", + "fertilization", "fertilisation", + "fictionalized", "fictionalised", + "fictionalizes", "fictionalises", + "formalization", "formalisation", + "fossilization", "fossilisation", + "globalization", "globalisation", + "gynecological", "gynaecological", + "gynecologists", "gynaecologists", + "harmonization", "harmonisation", + "hematological", "haematological", + "hematologists", "haematologists", + "hospitalizing", "hospitalising", + "hypothesizing", "hypothesising", + "immortalizing", "immortalising", + "individualize", "individualise", + "industrialize", "industrialise", + "internalizing", "internalising", + "marginalizing", "marginalising", + "materializing", "materialising", + "mechanization", "mechanisation", + "memorializing", "memorialising", + "miniaturizing", "miniaturising", + "nationalizing", "nationalising", + "neighborhoods", "neighbourhoods", + "normalization", "normalisation", + "organizations", "organisations", + "outmaneuvered", "outmanoeuvred", + "overemphasize", "overemphasise", + "particularize", "particularise", + "passivization", "passivisation", + "patronizingly", "patronisingly", + "pedestrianize", "pedestrianise", + "pediatricians", "paediatricians", + "personalizing", "personalising", + "philosophized", "philosophised", + "philosophizes", "philosophises", + "privatization", "privatisation", + "propagandized", "propagandised", + "propagandizes", "propagandises", + "proselytizers", "proselytisers", + "proselytizing", "proselytising", + "psychoanalyze", "psychoanalyse", + "pulverization", "pulverisation", + "rationalizing", "rationalising", + "reconnoitered", "reconnoitred", + "revolutionize", "revolutionise", + "romanticizing", "romanticising", + "serialization", "serialisation", + "socialization", "socialisation", + "standardizing", "standardising", + "sterilization", "sterilisation", + "subsidization", "subsidisation", + "synchronizing", "synchronising", + "systematizing", "systematising", + "tantalizingly", "tantalisingly", + "underutilized", "underutilised", + "victimization", "victimisation", + "visualization", "visualisation", + "vocalizations", "vocalisations", + "vulgarization", "vulgarisation", + "accessorized", "accessorised", + "accessorizes", "accessorises", + "acclimatized", "acclimatised", + "acclimatizes", "acclimatises", + "amortization", "amortisation", + "amphitheater", "amphitheatre", + "anesthetists", "anaesthetists", + "anesthetized", "anaesthetised", + "anesthetizes", "anaesthetises", + "antagonizing", "antagonising", + "appetizingly", "appetisingly", + "archeologist", "archaeologist", + "backpedaling", "backpedalling", + "bastardizing", "bastardising", + "behaviorists", "behaviourists", + "bowdlerizing", "bowdlerising", + "breathalyzed", "breathalysed", + "breathalyzes", "breathalyses", + "cannibalized", "cannibalised", + "cannibalizes", "cannibalises", + "capitalizing", "capitalising", + "caramelizing", "caramelising", + "categorizing", "categorising", + "centerpieces", "centrepieces", + "centralizing", "centralising", + "characterize", "characterise", + "circularized", "circularised", + "circularizes", "circularises", + "clarinetists", "clarinettists", + "collectivize", "collectivise", + "colonization", "colonisation", + "computerized", "computerised", + "computerizes", "computerises", + "criminalized", "criminalised", + "criminalizes", "criminalises", + "crystallized", "crystallised", + "crystallizes", "crystallises", + "decentralize", "decentralise", + "dehumanizing", "dehumanising", + "demilitarize", "demilitarise", + "demobilizing", "demobilising", + "democratized", "democratised", + "democratizes", "democratises", + "demoralizing", "demoralising", + "desensitized", "desensitised", + "desensitizes", "desensitises", + "destabilized", "destabilised", + "destabilizes", "destabilises", + "disemboweled", "disembowelled", + "dishonorable", "dishonourable", + "dishonorably", "dishonourably", + "disorganized", "disorganised", + "editorialize", "editorialise", + "equalization", "equalisation", + "evangelizing", "evangelising", + "extemporized", "extemporised", + "extemporizes", "extemporises", + "externalized", "externalised", + "externalizes", "externalises", + "familiarized", "familiarised", + "familiarizes", "familiarises", + "fictionalize", "fictionalise", + "finalization", "finalisation", + "fraternizing", "fraternising", + "generalizing", "generalising", + "gynecologist", "gynaecologist", + "hematologist", "haematologist", + "hemophiliacs", "haemophiliacs", + "hemorrhaging", "haemorrhaging", + "homogenizing", "homogenising", + "hospitalized", "hospitalised", + "hospitalizes", "hospitalises", + "hypothesized", "hypothesised", + "hypothesizes", "hypothesises", + "idealization", "idealisation", + "immobilizers", "immobilisers", + "immobilizing", "immobilising", + "immortalized", "immortalised", + "immortalizes", "immortalises", + "immunization", "immunisation", + "initializing", "initialising", + "installments", "instalments", + "internalized", "internalised", + "internalizes", "internalises", + "jeopardizing", "jeopardising", + "legalization", "legalisation", + "legitimizing", "legitimising", + "liberalizing", "liberalising", + "maneuverable", "manoeuvrable", + "maneuverings", "manoeuvrings", + "marginalized", "marginalised", + "marginalizes", "marginalises", + "materialized", "materialised", + "materializes", "materialises", + "maximization", "maximisation", + "memorialized", "memorialised", + "memorializes", "memorialises", + "metabolizing", "metabolising", + "militarizing", "militarising", + "miniaturized", "miniaturised", + "miniaturizes", "miniaturises", + "miscataloged", "miscatalogued", + "misdemeanors", "misdemeanours", + "mobilization", "mobilisation", + "moisturizers", "moisturisers", + "moisturizing", "moisturising", + "monopolizing", "monopolising", + "multicolored", "multicoloured", + "nationalized", "nationalised", + "nationalizes", "nationalises", + "naturalizing", "naturalising", + "neighborhood", "neighbourhood", + "neutralizing", "neutralising", + "organization", "organisation", + "outmaneuvers", "outmanoeuvres", + "paleontology", "palaeontology", + "pasteurizing", "pasteurising", + "pediatrician", "paediatrician", + "personalized", "personalised", + "personalizes", "personalises", + "philosophize", "philosophise", + "plagiarizing", "plagiarising", + "polarization", "polarisation", + "politicizing", "politicising", + "popularizing", "popularising", + "pressurizing", "pressurising", + "prioritizing", "prioritising", + "propagandize", "propagandise", + "proselytized", "proselytised", + "proselytizer", "proselytiser", + "proselytizes", "proselytises", + "radicalizing", "radicalising", + "rationalized", "rationalised", + "rationalizes", "rationalises", + "realizations", "realisations", + "recognizable", "recognisable", + "recognizably", "recognisably", + "recognizance", "recognisance", + "reconnoiters", "reconnoitres", + "regularizing", "regularising", + "reorganizing", "reorganising", + "revitalizing", "revitalising", + "rhapsodizing", "rhapsodising", + "romanticized", "romanticised", + "romanticizes", "romanticises", + "scandalizing", "scandalising", + "scrutinizing", "scrutinising", + "secularizing", "secularising", + "standardized", "standardised", + "standardizes", "standardises", + "stigmatizing", "stigmatising", + "sympathizers", "sympathisers", + "sympathizing", "sympathising", + "synchronized", "synchronised", + "synchronizes", "synchronises", + "synthesizing", "synthesising", + "systematized", "systematised", + "systematizes", "systematises", + "theatergoers", "theatregoers", + "traumatizing", "traumatising", + "trivializing", "trivialising", + "unauthorized", "unauthorised", + "unionization", "unionisation", + "unrecognized", "unrecognised", + "urbanization", "urbanisation", + "vaporization", "vaporisation", + "vocalization", "vocalisation", + "westernizing", "westernising", + "accessorize", "accessorise", + "acclimatize", "acclimatise", + "agonizingly", "agonisingly", + "amortizable", "amortisable", + "anesthetics", "anaesthetics", + "anesthetist", "anaesthetist", + "anesthetize", "anaesthetise", + "anglicizing", "anglicising", + "antagonized", "antagonised", + "antagonizes", "antagonises", + "apologizing", "apologising", + "backpedaled", "backpedalled", + "bastardized", "bastardised", + "bastardizes", "bastardises", + "behaviorism", "behaviourism", + "behaviorist", "behaviourist", + "bowdlerized", "bowdlerised", + "bowdlerizes", "bowdlerises", + "brutalizing", "brutalising", + "cannibalize", "cannibalise", + "capitalized", "capitalised", + "capitalizes", "capitalises", + "caramelized", "caramelised", + "caramelizes", "caramelises", + "carbonizing", "carbonising", + "categorized", "categorised", + "categorizes", "categorises", + "cauterizing", "cauterising", + "centerfolds", "centrefolds", + "centerpiece", "centrepiece", + "centiliters", "centilitres", + "centimeters", "centimetres", + "centralized", "centralised", + "centralizes", "centralises", + "circularize", "circularise", + "clarinetist", "clarinettist", + "computerize", "computerise", + "criminalize", "criminalise", + "criticizing", "criticising", + "crystallize", "crystallise", + "customizing", "customising", + "defenseless", "defenceless", + "dehumanized", "dehumanised", + "dehumanizes", "dehumanises", + "demobilized", "demobilised", + "demobilizes", "demobilises", + "democratize", "democratise", + "demoralized", "demoralised", + "demoralizes", "demoralises", + "deodorizing", "deodorising", + "desensitize", "desensitise", + "destabilize", "destabilise", + "discoloring", "discolouring", + "dishonoring", "dishonouring", + "dramatizing", "dramatising", + "economizing", "economising", + "empathizing", "empathising", + "emphasizing", "emphasising", + "endeavoring", "endeavouring", + "epitomizing", "epitomising", + "esophaguses", "oesophaguses", + "evangelized", "evangelised", + "evangelizes", "evangelises", + "extemporize", "extemporise", + "externalize", "externalise", + "factorizing", "factorising", + "familiarize", "familiarise", + "fantasizing", "fantasising", + "fertilizers", "fertilisers", + "fertilizing", "fertilising", + "formalizing", "formalising", + "fossilizing", "fossilising", + "fraternized", "fraternised", + "fraternizes", "fraternises", + "fulfillment", "fulfilment", + "galvanizing", "galvanising", + "generalized", "generalised", + "generalizes", "generalises", + "ghettoizing", "ghettoising", + "globalizing", "globalising", + "harmonizing", "harmonising", + "hemophiliac", "haemophiliac", + "hemorrhaged", "haemorrhaged", + "hemorrhages", "haemorrhages", + "hemorrhoids", "haemorrhoids", + "homogenized", "homogenised", + "homogenizes", "homogenises", + "hospitalize", "hospitalise", + "hybridizing", "hybridising", + "hypnotizing", "hypnotising", + "hypothesize", "hypothesise", + "immobilized", "immobilised", + "immobilizer", "immobiliser", + "immobilizes", "immobilises", + "immortalize", "immortalise", + "initialized", "initialised", + "initializes", "initialises", + "installment", "instalment", + "internalize", "internalise", + "italicizing", "italicising", + "jeopardized", "jeopardised", + "jeopardizes", "jeopardises", + "legitimized", "legitimised", + "legitimizes", "legitimises", + "liberalized", "liberalised", + "liberalizes", "liberalises", + "lionization", "lionisation", + "liquidizers", "liquidisers", + "liquidizing", "liquidising", + "magnetizing", "magnetising", + "maneuvering", "manoeuvring", + "marginalize", "marginalise", + "marvelously", "marvellously", + "materialize", "materialise", + "mechanizing", "mechanising", + "memorialize", "memorialise", + "mesmerizing", "mesmerising", + "metabolized", "metabolised", + "metabolizes", "metabolises", + "militarized", "militarised", + "militarizes", "militarises", + "milliliters", "millilitres", + "millimeters", "millimetres", + "miniaturize", "miniaturise", + "misbehavior", "misbehaviour", + "misdemeanor", "misdemeanour", + "modernizing", "modernising", + "moisturized", "moisturised", + "moisturizer", "moisturiser", + "moisturizes", "moisturises", + "monopolized", "monopolised", + "monopolizes", "monopolises", + "nationalize", "nationalise", + "naturalized", "naturalised", + "naturalizes", "naturalises", + "neighboring", "neighbouring", + "neutralized", "neutralised", + "neutralizes", "neutralises", + "normalizing", "normalising", + "orthopedics", "orthopaedics", + "ostracizing", "ostracising", + "outmaneuver", "outmanoeuvre", + "oxidization", "oxidisation", + "pasteurized", "pasteurised", + "pasteurizes", "pasteurises", + "patronizing", "patronising", + "personalize", "personalise", + "plagiarized", "plagiarised", + "plagiarizes", "plagiarises", + "politicized", "politicised", + "politicizes", "politicises", + "popularized", "popularised", + "popularizes", "popularises", + "pressurized", "pressurised", + "pressurizes", "pressurises", + "prioritized", "prioritised", + "prioritizes", "prioritises", + "privatizing", "privatising", + "proselytize", "proselytise", + "publicizing", "publicising", + "pulverizing", "pulverising", + "radicalized", "radicalised", + "radicalizes", "radicalises", + "randomizing", "randomising", + "rationalize", "rationalise", + "realization", "realisation", + "recognizing", "recognising", + "reconnoiter", "reconnoitre", + "regularized", "regularised", + "regularizes", "regularises", + "reorganized", "reorganised", + "reorganizes", "reorganises", + "revitalized", "revitalised", + "revitalizes", "revitalises", + "rhapsodized", "rhapsodised", + "rhapsodizes", "rhapsodises", + "romanticize", "romanticise", + "scandalized", "scandalised", + "scandalizes", "scandalises", + "scrutinized", "scrutinised", + "scrutinizes", "scrutinises", + "secularized", "secularised", + "secularizes", "secularises", + "sensitizing", "sensitising", + "serializing", "serialising", + "sermonizing", "sermonising", + "signalizing", "signalising", + "skeptically", "sceptically", + "socializing", "socialising", + "solemnizing", "solemnising", + "specialized", "specialised", + "specializes", "specialises", + "squirreling", "squirrelling", + "stabilizers", "stabilisers", + "stabilizing", "stabilising", + "standardize", "standardise", + "sterilizers", "sterilisers", + "sterilizing", "sterilising", + "stigmatized", "stigmatised", + "stigmatizes", "stigmatises", + "subsidizers", "subsidisers", + "subsidizing", "subsidising", + "summarizing", "summarising", + "symbolizing", "symbolising", + "sympathized", "sympathised", + "sympathizer", "sympathiser", + "sympathizes", "sympathises", + "synchronize", "synchronise", + "synthesized", "synthesised", + "synthesizes", "synthesises", + "systematize", "systematise", + "tantalizing", "tantalising", + "temporizing", "temporising", + "tenderizing", "tenderising", + "terrorizing", "terrorising", + "theatergoer", "theatregoer", + "traumatized", "traumatised", + "traumatizes", "traumatises", + "trivialized", "trivialised", + "trivializes", "trivialises", + "tyrannizing", "tyrannising", + "uncataloged", "uncatalogued", + "uncivilized", "uncivilised", + "unfavorable", "unfavourable", + "unfavorably", "unfavourably", + "unorganized", "unorganised", + "untrammeled", "untrammelled", + "utilization", "utilisation", + "vandalizing", "vandalising", + "verbalizing", "verbalising", + "victimizing", "victimising", + "visualizing", "visualising", + "vulgarizing", "vulgarising", + "watercolors", "watercolours", + "westernized", "westernised", + "westernizes", "westernises", + "amortizing", "amortising", + "anesthesia", "anaesthesia", + "anesthetic", "anaesthetic", + "anglicized", "anglicised", + "anglicizes", "anglicises", + "annualized", "annualised", + "antagonize", "antagonise", + "apologized", "apologised", + "apologizes", "apologises", + "appetizers", "appetisers", + "appetizing", "appetising", + "archeology", "archaeology", + "authorizes", "authorises", + "bastardize", "bastardise", + "bedeviling", "bedevilling", + "behavioral", "behavioural", + "belaboring", "belabouring", + "bowdlerize", "bowdlerise", + "brutalized", "brutalised", + "brutalizes", "brutalises", + "canalizing", "canalising", + "canonizing", "canonising", + "capitalize", "capitalise", + "caramelize", "caramelise", + "carbonized", "carbonised", + "carbonizes", "carbonises", + "cataloging", "cataloguing", + "catalyzing", "catalysing", + "categorize", "categorise", + "cauterized", "cauterised", + "cauterizes", "cauterises", + "centerfold", "centrefold", + "centiliter", "centilitre", + "centimeter", "centimetre", + "centralize", "centralise", + "channeling", "channelling", + "checkbooks", "chequebooks", + "civilizing", "civilising", + "colonizers", "colonisers", + "colonizing", "colonising", + "colorfully", "colourfully", + "colorizing", "colourizing", + "councilors", "councillors", + "counselors", "counsellors", + "criticized", "criticised", + "criticizes", "criticises", + "customized", "customised", + "customizes", "customises", + "dehumanize", "dehumanise", + "demobilize", "demobilise", + "demonizing", "demonising", + "demoralize", "demoralise", + "deodorized", "deodorised", + "deodorizes", "deodorises", + "deputizing", "deputising", + "digitizing", "digitising", + "discolored", "discoloured", + "disheveled", "dishevelled", + "dishonored", "dishonoured", + "dramatized", "dramatised", + "dramatizes", "dramatises", + "economized", "economised", + "economizes", "economises", + "empathized", "empathised", + "empathizes", "empathises", + "emphasized", "emphasised", + "emphasizes", "emphasises", + "endeavored", "endeavoured", + "energizing", "energising", + "epicenters", "epicentres", + "epitomized", "epitomised", + "epitomizes", "epitomises", + "equalizers", "equalisers", + "equalizing", "equalising", + "eulogizing", "eulogising", + "evangelize", "evangelise", + "factorized", "factorised", + "factorizes", "factorises", + "fantasized", "fantasised", + "fantasizes", "fantasises", + "favoritism", "favouritism", + "feminizing", "feminising", + "fertilized", "fertilised", + "fertilizer", "fertiliser", + "fertilizes", "fertilises", + "fiberglass", "fibreglass", + "finalizing", "finalising", + "flavorings", "flavourings", + "flavorless", "flavourless", + "flavorsome", "flavoursome", + "formalized", "formalised", + "formalizes", "formalises", + "fossilized", "fossilised", + "fossilizes", "fossilises", + "fraternize", "fraternise", + "galvanized", "galvanised", + "galvanizes", "galvanises", + "generalize", "generalise", + "ghettoized", "ghettoised", + "ghettoizes", "ghettoises", + "globalized", "globalised", + "globalizes", "globalises", + "gruelingly", "gruellingly", + "gynecology", "gynaecology", + "harmonized", "harmonised", + "harmonizes", "harmonises", + "hematology", "haematology", + "hemoglobin", "haemoglobin", + "hemophilia", "haemophilia", + "hemorrhage", "haemorrhage", + "homogenize", "homogenise", + "humanizing", "humanising", + "hybridized", "hybridised", + "hybridizes", "hybridises", + "hypnotized", "hypnotised", + "hypnotizes", "hypnotises", + "idealizing", "idealising", + "immobilize", "immobilise", + "immunizing", "immunising", + "impaneling", "impanelling", + "imperiling", "imperilling", + "initialing", "initialling", + "initialize", "initialise", + "ionization", "ionisation", + "italicized", "italicised", + "italicizes", "italicises", + "jeopardize", "jeopardise", + "kilometers", "kilometres", + "lackluster", "lacklustre", + "legalizing", "legalising", + "legitimize", "legitimise", + "liberalize", "liberalise", + "liquidized", "liquidised", + "liquidizer", "liquidiser", + "liquidizes", "liquidises", + "localizing", "localising", + "magnetized", "magnetised", + "magnetizes", "magnetises", + "maneuvered", "manoeuvred", + "marshaling", "marshalling", + "maximizing", "maximising", + "mechanized", "mechanised", + "mechanizes", "mechanises", + "memorizing", "memorising", + "mesmerized", "mesmerised", + "mesmerizes", "mesmerises", + "metabolize", "metabolise", + "militarize", "militarise", + "milliliter", "millilitre", + "millimeter", "millimetre", + "minimizing", "minimising", + "mobilizing", "mobilising", + "modernized", "modernised", + "modernizes", "modernises", + "moisturize", "moisturise", + "monopolize", "monopolise", + "moralizing", "moralising", + "naturalize", "naturalise", + "neighborly", "neighbourly", + "neutralize", "neutralise", + "normalized", "normalised", + "normalizes", "normalises", + "optimizing", "optimising", + "organizers", "organisers", + "organizing", "organising", + "orthopedic", "orthopaedic", + "ostracized", "ostracised", + "ostracizes", "ostracises", + "paralyzing", "paralysing", + "pasteurize", "pasteurise", + "patronized", "patronised", + "patronizes", "patronises", + "pedophiles", "paedophiles", + "pedophilia", "paedophilia", + "penalizing", "penalising", + "plagiarize", "plagiarise", + "plowshares", "ploughshares", + "polarizing", "polarising", + "politicize", "politicise", + "popularize", "popularise", + "prioritize", "prioritise", + "privatized", "privatised", + "privatizes", "privatises", + "publicized", "publicised", + "publicizes", "publicises", + "pulverized", "pulverised", + "pulverizes", "pulverises", + "quarreling", "quarrelling", + "radicalize", "radicalise", + "randomized", "randomised", + "randomizes", "randomises", + "realizable", "realisable", + "recognized", "recognised", + "recognizes", "recognises", + "regularize", "regularise", + "remodeling", "remodelling", + "reorganize", "reorganise", + "revitalize", "revitalise", + "rhapsodize", "rhapsodise", + "ritualized", "ritualised", + "sanitizing", "sanitising", + "satirizing", "satirising", + "scandalize", "scandalise", + "scrutinize", "scrutinise", + "secularize", "secularise", + "sensitized", "sensitised", + "sensitizes", "sensitises", + "sepulchers", "sepulchres", + "serialized", "serialised", + "serializes", "serialises", + "sermonized", "sermonised", + "sermonizes", "sermonises", + "shriveling", "shrivelling", + "signalized", "signalised", + "signalizes", "signalises", + "skepticism", "scepticism", + "socialized", "socialised", + "socializes", "socialises", + "sodomizing", "sodomising", + "solemnized", "solemnised", + "solemnizes", "solemnises", + "specialize", "specialise", + "squirreled", "squirrelled", + "stabilized", "stabilised", + "stabilizer", "stabiliser", + "stabilizes", "stabilises", + "stenciling", "stencilling", + "sterilized", "sterilised", + "sterilizer", "steriliser", + "sterilizes", "sterilises", + "stigmatize", "stigmatise", + "subsidized", "subsidised", + "subsidizer", "subsidiser", + "subsidizes", "subsidises", + "summarized", "summarised", + "summarizes", "summarises", + "symbolized", "symbolised", + "symbolizes", "symbolises", + "sympathize", "sympathise", + "tantalized", "tantalised", + "tantalizes", "tantalises", + "temporized", "temporised", + "temporizes", "temporises", + "tenderized", "tenderised", + "tenderizes", "tenderises", + "terrorized", "terrorised", + "terrorizes", "terrorises", + "theorizing", "theorising", + "traumatize", "traumatise", + "trivialize", "trivialise", + "tyrannized", "tyrannised", + "tyrannizes", "tyrannises", + "unionizing", "unionising", + "unraveling", "unravelling", + "urbanizing", "urbanising", + "utilizable", "utilisable", + "vandalized", "vandalised", + "vandalizes", "vandalises", + "vaporizing", "vaporising", + "verbalized", "verbalised", + "verbalizes", "verbalises", + "victimized", "victimised", + "victimizes", "victimises", + "visualized", "visualised", + "visualizes", "visualises", + "vocalizing", "vocalising", + "vulcanized", "vulcanised", + "vulgarized", "vulgarised", + "vulgarizes", "vulgarises", + "watercolor", "watercolour", + "westernize", "westernise", + "womanizers", "womanisers", + "womanizing", "womanising", + "worshiping", "worshipping", + "agonizing", "agonising", + "airplanes", "aeroplanes", + "amortized", "amortised", + "amortizes", "amortises", + "analyzing", "analysing", + "apologize", "apologise", + "appetizer", "appetiser", + "artifacts", "artefacts", + "baptizing", "baptising", + "bedeviled", "bedevilled", + "behaviors", "behaviours", + "bejeweled", "bejewelled", + "belabored", "belaboured", + "brutalize", "brutalise", + "canalized", "canalised", + "canalizes", "canalises", + "canonized", "canonised", + "canonizes", "canonises", + "carbonize", "carbonise", + "cataloged", "catalogued", + "catalyzed", "catalysed", + "catalyzes", "catalyses", + "cauterize", "cauterise", + "channeled", "channelled", + "checkbook", "chequebook", + "checkered", "chequered", + "chiseling", "chiselling", + "civilized", "civilised", + "civilizes", "civilises", + "clamoring", "clamouring", + "colonized", "colonised", + "colonizer", "coloniser", + "colonizes", "colonises", + "colorants", "colourants", + "colorized", "colourized", + "colorizes", "colourizes", + "colorless", "colourless", + "councilor", "councillor", + "counseled", "counselled", + "counselor", "counsellor", + "criticize", "criticise", + "cudgeling", "cudgelling", + "customize", "customise", + "demonized", "demonised", + "demonizes", "demonises", + "deodorize", "deodorise", + "deputized", "deputised", + "deputizes", "deputises", + "digitized", "digitised", + "digitizes", "digitises", + "discolors", "discolours", + "dishonors", "dishonours", + "dramatize", "dramatise", + "driveling", "drivelling", + "economize", "economise", + "empathize", "empathise", + "emphasize", "emphasise", + "enameling", "enamelling", + "endeavors", "endeavours", + "energized", "energised", + "energizes", "energises", + "enthralls", "enthrals", + "epicenter", "epicentre", + "epitomize", "epitomise", + "equalized", "equalised", + "equalizer", "equaliser", + "equalizes", "equalises", + "eulogized", "eulogised", + "eulogizes", "eulogises", + "factorize", "factorise", + "fantasize", "fantasise", + "favorable", "favourable", + "favorably", "favourably", + "favorites", "favourites", + "feminized", "feminised", + "feminizes", "feminises", + "fertilize", "fertilise", + "finalized", "finalised", + "finalizes", "finalises", + "flavoring", "flavouring", + "formalize", "formalise", + "fossilize", "fossilise", + "funneling", "funnelling", + "galvanize", "galvanise", + "gamboling", "gambolling", + "ghettoize", "ghettoise", + "globalize", "globalise", + "gonorrhea", "gonorrhoea", + "groveling", "grovelling", + "harboring", "harbouring", + "harmonize", "harmonise", + "honorably", "honourably", + "humanized", "humanised", + "humanizes", "humanises", + "hybridize", "hybridise", + "hypnotize", "hypnotise", + "idealized", "idealised", + "idealizes", "idealises", + "idolizing", "idolising", + "immunized", "immunised", + "immunizes", "immunises", + "impaneled", "impanelled", + "imperiled", "imperilled", + "initialed", "initialled", + "italicize", "italicise", + "itemizing", "itemising", + "kilometer", "kilometre", + "legalized", "legalised", + "legalizes", "legalises", + "lionizing", "lionising", + "liquidize", "liquidise", + "localized", "localised", + "localizes", "localises", + "magnetize", "magnetise", + "maneuvers", "manoeuvres", + "marshaled", "marshalled", + "marveling", "marvelling", + "marvelous", "marvellous", + "maximized", "maximised", + "maximizes", "maximises", + "mechanize", "mechanise", + "memorized", "memorised", + "memorizes", "memorises", + "mesmerize", "mesmerise", + "minimized", "minimised", + "minimizes", "minimises", + "mobilized", "mobilised", + "mobilizes", "mobilises", + "modernize", "modernise", + "moldering", "mouldering", + "moralized", "moralised", + "moralizes", "moralises", + "motorized", "motorised", + "mustached", "moustached", + "mustaches", "moustaches", + "neighbors", "neighbours", + "normalize", "normalise", + "optimized", "optimised", + "optimizes", "optimises", + "organized", "organised", + "organizer", "organiser", + "organizes", "organises", + "ostracize", "ostracise", + "oxidizing", "oxidising", + "panelists", "panellists", + "paralyzed", "paralysed", + "paralyzes", "paralyses", + "parceling", "parcelling", + "patronize", "patronise", + "pedophile", "paedophile", + "penalized", "penalised", + "penalizes", "penalises", + "penciling", "pencilling", + "plowshare", "ploughshare", + "polarized", "polarised", + "polarizes", "polarises", + "practiced", "practised", + "pretenses", "pretences", + "privatize", "privatise", + "publicize", "publicise", + "pulverize", "pulverise", + "quarreled", "quarrelled", + "randomize", "randomise", + "realizing", "realising", + "recognize", "recognise", + "refueling", "refuelling", + "remodeled", "remodelled", + "remolding", "remoulding", + "saltpeter", "saltpetre", + "sanitized", "sanitised", + "sanitizes", "sanitises", + "satirized", "satirised", + "satirizes", "satirises", + "sensitize", "sensitise", + "sepulcher", "sepulchre", + "serialize", "serialise", + "sermonize", "sermonise", + "shoveling", "shovelling", + "shriveled", "shrivelled", + "signaling", "signalling", + "signalize", "signalise", + "skeptical", "sceptical", + "sniveling", "snivelling", + "snorkeled", "snorkelled", + "socialize", "socialise", + "sodomized", "sodomised", + "sodomizes", "sodomises", + "solemnize", "solemnise", + "spiraling", "spiralling", + "splendors", "splendours", + "stabilize", "stabilise", + "stenciled", "stencilled", + "sterilize", "sterilise", + "subsidize", "subsidise", + "succoring", "succouring", + "sulfurous", "sulphurous", + "summarize", "summarise", + "swiveling", "swivelling", + "symbolize", "symbolise", + "tantalize", "tantalise", + "temporize", "temporise", + "tenderize", "tenderise", + "terrorize", "terrorise", + "theorized", "theorised", + "theorizes", "theorises", + "travelers", "travellers", + "traveling", "travelling", + "tricolors", "tricolours", + "tunneling", "tunnelling", + "tyrannize", "tyrannise", + "unequaled", "unequalled", + "unionized", "unionised", + "unionizes", "unionises", + "unraveled", "unravelled", + "unrivaled", "unrivalled", + "urbanized", "urbanised", + "urbanizes", "urbanises", + "utilizing", "utilising", + "vandalize", "vandalise", + "vaporized", "vaporised", + "vaporizes", "vaporises", + "verbalize", "verbalise", + "victimize", "victimise", + "visualize", "visualise", + "vocalized", "vocalised", + "vocalizes", "vocalises", + "vulgarize", "vulgarise", + "weaseling", "weaselling", + "womanized", "womanised", + "womanizer", "womaniser", + "womanizes", "womanises", + "worshiped", "worshipped", + "worshiper", "worshipper", + "agonized", "agonised", + "agonizes", "agonises", + "airplane", "aeroplane", + "aluminum", "aluminium", + "amortize", "amortise", + "analyzed", "analysed", + "analyzes", "analyses", + "armorers", "armourers", + "armories", "armouries", + "artifact", "artefact", + "baptized", "baptised", + "baptizes", "baptises", + "behavior", "behaviour", + "behooved", "behoved", + "behooves", "behoves", + "belabors", "belabours", + "calibers", "calibres", + "canalize", "canalise", + "canonize", "canonise", + "catalogs", "catalogues", + "catalyze", "catalyse", + "caviling", "cavilling", + "centered", "centred", + "chiseled", "chiselled", + "civilize", "civilise", + "clamored", "clamoured", + "colonize", "colonise", + "colorant", "colourant", + "coloreds", "coloureds", + "colorful", "colourful", + "coloring", "colouring", + "colorize", "colourize", + "coziness", "cosiness", + "cruelest", "cruellest", + "cudgeled", "cudgelled", + "defenses", "defences", + "demeanor", "demeanour", + "demonize", "demonise", + "deputize", "deputise", + "diarrhea", "diarrhoea", + "digitize", "digitise", + "disfavor", "disfavour", + "dishonor", "dishonour", + "distills", "distils", + "driveled", "drivelled", + "enameled", "enamelled", + "enamored", "enamoured", + "endeavor", "endeavour", + "energize", "energise", + "epaulets", "epaulettes", + "equalize", "equalise", + "estrogen", "oestrogen", + "etiology", "aetiology", + "eulogize", "eulogise", + "favoring", "favouring", + "favorite", "favourite", + "feminize", "feminise", + "finalize", "finalise", + "flavored", "flavoured", + "flutists", "flautists", + "fulfills", "fulfils", + "funneled", "funnelled", + "gamboled", "gambolled", + "graveled", "gravelled", + "groveled", "grovelled", + "grueling", "gruelling", + "harbored", "harboured", + "honoring", "honouring", + "humanize", "humanise", + "humoring", "humouring", + "idealize", "idealise", + "idolized", "idolised", + "idolizes", "idolises", + "immunize", "immunise", + "ionizing", "ionising", + "itemized", "itemised", + "itemizes", "itemises", + "jewelers", "jewellers", + "labeling", "labelling", + "laborers", "labourers", + "laboring", "labouring", + "legalize", "legalise", + "leukemia", "leukaemia", + "levelers", "levellers", + "leveling", "levelling", + "libeling", "libelling", + "libelous", "libellous", + "lionized", "lionised", + "lionizes", "lionises", + "localize", "localise", + "louvered", "louvred", + "maneuver", "manoeuvre", + "marveled", "marvelled", + "maximize", "maximise", + "memorize", "memorise", + "minimize", "minimise", + "mobilize", "mobilise", + "modelers", "modellers", + "modeling", "modelling", + "moldered", "mouldered", + "moldiest", "mouldiest", + "moldings", "mouldings", + "moralize", "moralise", + "mustache", "moustache", + "neighbor", "neighbour", + "odorless", "odourless", + "offenses", "offences", + "optimize", "optimise", + "organize", "organise", + "oxidized", "oxidised", + "oxidizes", "oxidises", + "paneling", "panelling", + "panelist", "panellist", + "paralyze", "paralyse", + "parceled", "parcelled", + "pedaling", "pedalling", + "penalize", "penalise", + "penciled", "pencilled", + "polarize", "polarise", + "pretense", "pretence", + "pummeled", "pummelling", + "raveling", "ravelling", + "realized", "realised", + "realizes", "realises", + "refueled", "refuelled", + "remolded", "remoulded", + "revelers", "revellers", + "reveling", "revelling", + "rivaling", "rivalling", + "sanitize", "sanitise", + "satirize", "satirise", + "savories", "savouries", + "savoring", "savouring", + "scepters", "sceptres", + "shoveled", "shovelled", + "signaled", "signalled", + "skeptics", "sceptics", + "sniveled", "snivelled", + "sodomize", "sodomise", + "specters", "spectres", + "spiraled", "spiralled", + "splendor", "splendour", + "succored", "succoured", + "sulfates", "sulphates", + "sulfides", "sulphides", + "swiveled", "swivelled", + "tasseled", "tasselled", + "theaters", "theatres", + "theorize", "theorise", + "toweling", "towelling", + "traveler", "traveller", + "trialing", "trialling", + "tricolor", "tricolour", + "tunneled", "tunnelled", + "unionize", "unionise", + "unsavory", "unsavoury", + "urbanize", "urbanise", + "utilized", "utilised", + "utilizes", "utilises", + "vaporize", "vaporise", + "vocalize", "vocalise", + "weaseled", "weaselled", + "womanize", "womanise", + "yodeling", "yodelling", + "agonize", "agonise", + "analyze", "analyse", + "appalls", "appals", + "armored", "armoured", + "armorer", "armourer", + "baptize", "baptise", + "behoove", "behove", + "belabor", "belabour", + "beveled", "bevelled", + "caliber", "calibre", + "caroled", "carolled", + "caviled", "cavilled", + "centers", "centres", + "clamors", "clamours", + "clangor", "clangour", + "colored", "coloured", + "coziest", "cosiest", + "crueler", "crueller", + "defense", "defence", + "dialing", "dialling", + "dialogs", "dialogues", + "distill", "distil", + "dueling", "duelling", + "enrolls", "enrols", + "epaulet", "epaulette", + "favored", "favoured", + "flavors", "flavours", + "flutist", "flautist", + "fueling", "fuelling", + "fulfill", "fulfil", + "goiters", "goitres", + "harbors", "harbours", + "honored", "honoured", + "humored", "humoured", + "idolize", "idolise", + "ionized", "ionised", + "ionizes", "ionises", + "itemize", "itemise", + "jeweled", "jewelled", + "jeweler", "jeweller", + "jewelry", "jewellery", + "labeled", "labelled", + "labored", "laboured", + "laborer", "labourer", + "leveled", "levelled", + "leveler", "leveller", + "libeled", "libelled", + "lionize", "lionise", + "louvers", "louvres", + "modeled", "modelled", + "modeler", "modeller", + "molders", "moulders", + "moldier", "mouldier", + "molding", "moulding", + "molting", "moulting", + "offense", "offence", + "oxidize", "oxidise", + "pajamas", "pyjamas", + "paneled", "panelled", + "parlors", "parlours", + "pedaled", "pedalled", + "plowing", "ploughing", + "plowman", "ploughman", + "plowmen", "ploughmen", + "realize", "realise", + "remolds", "remoulds", + "reveled", "revelled", + "reveler", "reveller", + "rivaled", "rivalled", + "rumored", "rumoured", + "saviors", "saviours", + "savored", "savoured", + "scepter", "sceptre", + "skeptic", "sceptic", + "specter", "spectre", + "succors", "succours", + "sulfate", "sulphate", + "sulfide", "sulphide", + "theater", "theatre", + "toweled", "towelled", + "toxemia", "toxaemia", + "trialed", "trialled", + "utilize", "utilise", + "yodeled", "yodelled", + "anemia", "anaemia", + "anemic", "anaemic", + "appall", "appal", + "arbors", "arbours", + "armory", "armoury", + "candor", "candour", + "center", "centre", + "clamor", "clamour", + "colors", "colours", + "cozier", "cosier", + "cozies", "cosies", + "cozily", "cosily", + "dialed", "dialled", + "drafty", "draughty", + "dueled", "duelled", + "favors", "favours", + "fervor", "fervour", + "fibers", "fibres", + "flavor", "flavour", + "fueled", "fuelled", + "goiter", "goitre", + "harbor", "harbour", + "honors", "honours", + "humors", "humours", + "labors", "labours", + "liters", "litres", + "louver", "louvre", + "luster", "lustre", + "meager", "meagre", + "miters", "mitres", + "molded", "moulded", + "molder", "moulder", + "molted", "moulted", + "pajama", "pyjama", + "parlor", "parlour", + "plowed", "ploughed", + "rancor", "rancour", + "remold", "remould", + "rigors", "rigours", + "rumors", "rumours", + "savors", "savours", + "savory", "savoury", + "succor", "succour", + "tumors", "tumours", + "vapors", "vapours", + "aging", "ageing", + "arbor", "arbour", + "ardor", "ardour", + "armor", "armour", + "chili", "chilli", + "color", "colour", + "edema", "edoema", + "favor", "favour", + "fecal", "faecal", + "feces", "faeces", + "fiber", "fibre", + "honor", "honour", + "humor", "humour", + "labor", "labour", + "liter", "litre", + "miter", "mitre", + "molds", "moulds", + "moldy", "mouldy", + "molts", "moults", + "odors", "odours", + "plows", "ploughs", + "rigor", "rigour", + "rumor", "rumour", + "savor", "savour", + "valor", "valour", + "vapor", "vapour", + "vigor", "vigour", + "cozy", "cosy", + "mold", "mould", + "molt", "moult", + "odor", "odour", + "plow", "plough", +} diff --git a/vendor/github.com/golangci/revgrep/.gitignore b/vendor/github.com/golangci/revgrep/.gitignore new file mode 100644 index 000000000..0540fe2ca --- /dev/null +++ b/vendor/github.com/golangci/revgrep/.gitignore @@ -0,0 +1 @@ +testdata/git diff --git a/vendor/github.com/golangci/revgrep/.travis.yml b/vendor/github.com/golangci/revgrep/.travis.yml new file mode 100644 index 000000000..d16d8b5bd --- /dev/null +++ b/vendor/github.com/golangci/revgrep/.travis.yml @@ -0,0 +1,7 @@ +language: go +before_install: + - go get github.com/mattn/goveralls + - go get golang.org/x/tools/cmd/cover +script: + - $HOME/gopath/bin/goveralls -service=travis-ci + diff --git a/vendor/github.com/golangci/revgrep/LICENSE b/vendor/github.com/golangci/revgrep/LICENSE new file mode 100644 index 000000000..8dada3eda --- /dev/null +++ b/vendor/github.com/golangci/revgrep/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/golangci/revgrep/README.md b/vendor/github.com/golangci/revgrep/README.md new file mode 100644 index 000000000..31faefee9 --- /dev/null +++ b/vendor/github.com/golangci/revgrep/README.md @@ -0,0 +1,58 @@ +# Overview + +[![Build Status](https://travis-ci.org/bradleyfalzon/revgrep.svg?branch=master)](https://travis-ci.org/bradleyfalzon/revgrep) [![Coverage +Status](https://coveralls.io/repos/github/bradleyfalzon/revgrep/badge.svg?branch=master)](https://coveralls.io/github/bradleyfalzon/revgrep?branch=master) [![GoDoc](https://godoc.org/github.com/bradleyfalzon/revgrep?status.svg)](https://godoc.org/github.com/bradleyfalzon/revgrep) + +`revgrep` is a CLI tool used to filter static analysis tools to only lines changed based on a commit reference. + +# Install + +```bash +go get -u github.com/bradleyfalzon/revgrep/... +``` + +# Usage + +In the scenario below, a change was made causing a warning in `go vet` on line 5, but `go vet` will show all warnings. +Using `revgrep`, you can show only warnings for lines of code that have been changed (in this case, hiding line 6). + +```bash +[user@host dir (master)]$ go vet +main.go:5: missing argument for Sprintf("%s"): format reads arg 1, have only 0 args +main.go:6: missing argument for Sprintf("%s"): format reads arg 1, have only 0 args +[user@host dir (master)]$ go vet |& revgrep +main.go:5: missing argument for Sprintf("%s"): format reads arg 1, have only 0 args +``` + +`|&` is shown above as many static analysis programs write to `stderr`, not `stdout`, `|&` combines both `stderr` and +`stdout`. It could also be achieved with `go vet 2>&1 | revgrep`. + +`revgrep` CLI tool will return an exit status of 1 if any issues match, else it will return 0. Consider using +`${PIPESTATUS[0]}` for the exit status of the `go vet` command in the above example. + +``` +Usage: revgrep [options] [from-rev] [to-rev] + +from-rev filters issues to lines changed since (and including) this revision + to-rev filters issues to lines changed since (and including) this revision, requires + + If no revisions are given, and there are unstaged changes or untracked files, only those changes are shown + If no revisions are given, and there are no unstaged changes or untracked files, only changes in HEAD~ are shown + If from-rev is given and to-rev is not, only changes between from-rev and HEAD are shown. + + -d Show debug output + -regexp string + Regexp to match path, line number, optional column number, and message +``` + +# Other Examples + +Issues between branches: +```bash +[user@host dir (feature/branch)]$ go vet |& revgrep master +``` + +Issues since last push: +```bash +[user@host dir (master)]$ go vet |& revgrep origin/master +``` diff --git a/vendor/github.com/golangci/revgrep/go.mod b/vendor/github.com/golangci/revgrep/go.mod new file mode 100644 index 000000000..8bdbb1951 --- /dev/null +++ b/vendor/github.com/golangci/revgrep/go.mod @@ -0,0 +1,3 @@ +module github.com/golangci/revgrep + +go 1.13 diff --git a/vendor/github.com/golangci/revgrep/go.sum b/vendor/github.com/golangci/revgrep/go.sum new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/github.com/golangci/revgrep/revgrep.go b/vendor/github.com/golangci/revgrep/revgrep.go new file mode 100644 index 000000000..d0940d300 --- /dev/null +++ b/vendor/github.com/golangci/revgrep/revgrep.go @@ -0,0 +1,410 @@ +package revgrep + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "os" + "os/exec" + "path/filepath" + "regexp" + "strconv" + "strings" +) + +// Checker provides APIs to filter static analysis tools to specific commits, +// such as showing only issues since last commit. +type Checker struct { + // Patch file (unified) to read to detect lines being changed, if nil revgrep + // will attempt to detect the VCS and generate an appropriate patch. Auto + // detection will search for uncommitted changes first, if none found, will + // generate a patch from last committed change. File paths within patches + // must be relative to current working directory. + Patch io.Reader + // NewFiles is a list of file names (with absolute paths) where the entire + // contents of the file is new. + NewFiles []string + // Debug sets the debug writer for additional output. + Debug io.Writer + // RevisionFrom check revision starting at, leave blank for auto detection + // ignored if patch is set. + RevisionFrom string + // RevisionTo checks revision finishing at, leave blank for auto detection + // ignored if patch is set. + RevisionTo string + // Regexp to match path, line number, optional column number, and message. + Regexp string + // AbsPath is used to make an absolute path of an issue's filename to be + // relative in order to match patch file. If not set, current working + // directory is used. + AbsPath string + + // Calculated changes for next calls to IsNewIssue + changes map[string][]pos +} + +// Issue contains metadata about an issue found. +type Issue struct { + // File is the name of the file as it appeared from the patch. + File string + // LineNo is the line number of the file. + LineNo int + // ColNo is the column number or 0 if none could be parsed. + ColNo int + // HunkPos is position from file's first @@, for new files this will be the + // line number. + // + // See also: https://developer.github.com/v3/pulls/comments/#create-a-comment + HunkPos int + // Issue text as it appeared from the tool. + Issue string + // Message is the issue without file name, line number and column number. + Message string +} + +func (c *Checker) preparePatch() error { + // Check if patch is supplied, if not, retrieve from VCS + if c.Patch == nil { + var err error + c.Patch, c.NewFiles, err = GitPatch(c.RevisionFrom, c.RevisionTo) + if err != nil { + return fmt.Errorf("could not read git repo: %s", err) + } + if c.Patch == nil { + return errors.New("no version control repository found") + } + } + + return nil +} + +// InputIssue represents issue found by some linter +type InputIssue interface { + FilePath() string + Line() int +} + +type simpleInputIssue struct { + filePath string + lineNumber int +} + +func (i simpleInputIssue) FilePath() string { + return i.filePath +} + +func (i simpleInputIssue) Line() int { + return i.lineNumber +} + +// Prepare extracts a patch and changed lines +func (c *Checker) Prepare() error { + returnErr := c.preparePatch() + c.changes = c.linesChanged() + return returnErr +} + +// IsNewIssue checks whether issue found by linter is new: it was found in changed lines +func (c Checker) IsNewIssue(i InputIssue) (hunkPos int, isNew bool) { + fchanges, ok := c.changes[i.FilePath()] + if !ok { // file wasn't changed + return 0, false + } + + var ( + fpos pos + changed bool + ) + // found file, see if lines matched + for _, pos := range fchanges { + if pos.lineNo == i.Line() { + fpos = pos + changed = true + break + } + } + + if changed || fchanges == nil { + // either file changed or it's a new file + hunkPos := fpos.lineNo + if changed { // existing file changed + hunkPos = fpos.hunkPos + } + + return hunkPos, true + } + + return 0, false +} + +// Check scans reader and writes any lines to writer that have been added in +// Checker.Patch. +// +// Returns issues written to writer when no error occurs. +// +// If no VCS could be found or other VCS errors occur, all issues are written +// to writer and an error is returned. +// +// File paths in reader must be relative to current working directory or +// absolute. +func (c Checker) Check(reader io.Reader, writer io.Writer) (issues []Issue, err error) { + returnErr := c.Prepare() + writeAll := returnErr != nil + + // file.go:lineNo:colNo:message + // colNo is optional, strip spaces before message + lineRE := regexp.MustCompile(`(.*?\.go):([0-9]+):([0-9]+)?:?\s*(.*)`) + if c.Regexp != "" { + lineRE, err = regexp.Compile(c.Regexp) + if err != nil { + return nil, fmt.Errorf("could not parse regexp: %v", err) + } + } + + // TODO consider lazy loading this, if there's nothing in stdin, no point + // checking for recent changes + c.debugf("lines changed: %+v", c.changes) + + absPath := c.AbsPath + if absPath == "" { + absPath, err = os.Getwd() + if err != nil { + returnErr = fmt.Errorf("could not get current working directory: %s", err) + } + } + + // Scan each line in reader and only write those lines if lines changed + scanner := bufio.NewScanner(reader) + for scanner.Scan() { + line := lineRE.FindSubmatch(scanner.Bytes()) + if line == nil { + c.debugf("cannot parse file+line number: %s", scanner.Text()) + continue + } + + if writeAll { + fmt.Fprintln(writer, scanner.Text()) + continue + } + + // Make absolute path names relative + path := string(line[1]) + if rel, err := filepath.Rel(absPath, path); err == nil { + c.debugf("rewrote path from %q to %q (absPath: %q)", path, rel, absPath) + path = rel + } + + // Parse line number + lno, err := strconv.ParseUint(string(line[2]), 10, 64) + if err != nil { + c.debugf("cannot parse line number: %q", scanner.Text()) + continue + } + + // Parse optional column number + var cno uint64 + if len(line[3]) > 0 { + cno, err = strconv.ParseUint(string(line[3]), 10, 64) + if err != nil { + c.debugf("cannot parse column number: %q", scanner.Text()) + // Ignore this error and continue + } + } + + // Extract message + msg := string(line[4]) + + c.debugf("path: %q, lineNo: %v, colNo: %v, msg: %q", path, lno, cno, msg) + i := simpleInputIssue{ + filePath: path, + lineNumber: int(lno), + } + hunkPos, changed := c.IsNewIssue(i) + if changed { + issue := Issue{ + File: path, + LineNo: int(lno), + ColNo: int(cno), + HunkPos: hunkPos, + Issue: scanner.Text(), + Message: msg, + } + issues = append(issues, issue) + fmt.Fprintln(writer, scanner.Text()) + } else { + c.debugf("unchanged: %s", scanner.Text()) + } + } + if err := scanner.Err(); err != nil { + returnErr = fmt.Errorf("error reading standard input: %s", err) + } + return issues, returnErr +} + +func (c Checker) debugf(format string, s ...interface{}) { + if c.Debug != nil { + fmt.Fprint(c.Debug, "DEBUG: ") + fmt.Fprintf(c.Debug, format+"\n", s...) + } +} + +type pos struct { + lineNo int // line number + hunkPos int // position relative to first @@ in file +} + +// linesChanges returns a map of file names to line numbers being changed. +// If key is nil, the file has been recently added, else it contains a slice +// of positions that have been added. +func (c Checker) linesChanged() map[string][]pos { + type state struct { + file string + lineNo int // current line number within chunk + hunkPos int // current line count since first @@ in file + changes []pos // position of changes + } + + var ( + s state + changes = make(map[string][]pos) + ) + + for _, file := range c.NewFiles { + changes[file] = nil + } + + if c.Patch == nil { + return changes + } + + scanner := bufio.NewReader(c.Patch) + var scanErr error + for { + lineB, isPrefix, err := scanner.ReadLine() + if isPrefix { + // If a single line overflowed the buffer, don't bother processing it as + // it's likey part of a file and not relevant to the patch. + continue + } + if err != nil { + scanErr = err + break + } + line := strings.TrimRight(string(lineB), "\n") + + c.debugf(line) + s.lineNo++ + s.hunkPos++ + switch { + case strings.HasPrefix(line, "+++ ") && len(line) > 4: + if s.changes != nil { + // record the last state + changes[s.file] = s.changes + } + // 6 removes "+++ b/" + s = state{file: line[6:], hunkPos: -1, changes: []pos{}} + case strings.HasPrefix(line, "@@ "): + // @@ -1 +2,4 @@ + // chdr ^^^^^^^^^^^^^ + // ahdr ^^^^ + // cstart ^ + chdr := strings.Split(line, " ") + ahdr := strings.Split(chdr[2], ",") + // [1:] to remove leading plus + cstart, err := strconv.ParseUint(ahdr[0][1:], 10, 64) + if err != nil { + panic(err) + } + s.lineNo = int(cstart) - 1 // -1 as cstart is the next line number + case strings.HasPrefix(line, "-"): + s.lineNo-- + case strings.HasPrefix(line, "+"): + s.changes = append(s.changes, pos{lineNo: s.lineNo, hunkPos: s.hunkPos}) + } + + } + if scanErr != nil && scanErr != io.EOF { + fmt.Fprintln(os.Stderr, "reading standard input:", scanErr) + } + // record the last state + changes[s.file] = s.changes + + return changes +} + +// GitPatch returns a patch from a git repository, if no git repository was +// was found and no errors occurred, nil is returned, else an error is returned +// revisionFrom and revisionTo defines the git diff parameters, if left blank +// and there are unstaged changes or untracked files, only those will be returned +// else only check changes since HEAD~. If revisionFrom is set but revisionTo +// is not, untracked files will be included, to exclude untracked files set +// revisionTo to HEAD~. It's incorrect to specify revisionTo without a +// revisionFrom. +func GitPatch(revisionFrom, revisionTo string) (io.Reader, []string, error) { + var patch bytes.Buffer + + // check if git repo exists + if err := exec.Command("git", "status").Run(); err != nil { + // don't return an error, we assume the error is not repo exists + return nil, nil, nil + } + + // make a patch for untracked files + var newFiles []string + ls, err := exec.Command("git", "ls-files", "--others", "--exclude-standard").CombinedOutput() + if err != nil { + return nil, nil, fmt.Errorf("error executing git ls-files: %s", err) + } + for _, file := range bytes.Split(ls, []byte{'\n'}) { + if len(file) == 0 || bytes.HasSuffix(file, []byte{'/'}) { + // ls-files was sometimes showing directories when they were ignored + // I couldn't create a test case for this as I couldn't reproduce correctly + // for the moment, just exclude files with trailing / + continue + } + newFiles = append(newFiles, string(file)) + } + + if revisionFrom != "" { + cmd := exec.Command("git", "diff", "--relative", revisionFrom) + if revisionTo != "" { + cmd.Args = append(cmd.Args, revisionTo) + } + cmd.Stdout = &patch + if err := cmd.Run(); err != nil { + return nil, nil, fmt.Errorf("error executing git diff %q %q: %s", revisionFrom, revisionTo, err) + } + + if revisionTo == "" { + return &patch, newFiles, nil + } + return &patch, nil, nil + } + + // make a patch for unstaged changes + // use --no-prefix to remove b/ given: +++ b/main.go + cmd := exec.Command("git", "diff", "--relative") + cmd.Stdout = &patch + if err := cmd.Run(); err != nil { + return nil, nil, fmt.Errorf("error executing git diff: %s", err) + } + unstaged := patch.Len() > 0 + + // If there's unstaged changes OR untracked changes (or both), then this is + // a suitable patch + if unstaged || newFiles != nil { + return &patch, newFiles, nil + } + + // check for changes in recent commit + + cmd = exec.Command("git", "diff", "--relative", "HEAD~") + cmd.Stdout = &patch + if err := cmd.Run(); err != nil { + return nil, nil, fmt.Errorf("error executing git diff HEAD~: %s", err) + } + + return &patch, nil, nil +} diff --git a/vendor/github.com/golangci/unconvert/LICENSE b/vendor/github.com/golangci/unconvert/LICENSE new file mode 100644 index 000000000..744875676 --- /dev/null +++ b/vendor/github.com/golangci/unconvert/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/unconvert/README b/vendor/github.com/golangci/unconvert/README new file mode 100644 index 000000000..dbaea4f57 --- /dev/null +++ b/vendor/github.com/golangci/unconvert/README @@ -0,0 +1,36 @@ +About: + +The unconvert program analyzes Go packages to identify unnecessary +type conversions; i.e., expressions T(x) where x already has type T. + +Install: + + $ go get github.com/mdempsky/unconvert + +Usage: + + $ unconvert -v bytes fmt + GOROOT/src/bytes/reader.go:117:14: unnecessary conversion + abs = int64(r.i) + offset + ^ + GOROOT/src/fmt/print.go:411:21: unnecessary conversion + p.fmt.integer(int64(v), 16, unsigned, udigits) + ^ + +Flags: + +Using the -v flag, unconvert will also print the source line and a +caret to indicate the unnecessary conversion's position therein. + +Using the -apply flag, unconvert will rewrite the Go source files +without the unnecessary type conversions. + +Using the -all flag, unconvert will analyze the Go packages under all +possible GOOS/GOARCH combinations, and only identify conversions that +are unnecessary in all cases. + +E.g., syscall.Timespec's Sec and Nsec fields are int64 under +linux/amd64 but int32 under linux/386. An int64(ts.Sec) conversion +that appears in a linux/amd64-only file will be identified as +unnecessary, but it will be preserved if it occurs in a file that's +compiled for both linux/amd64 and linux/386. diff --git a/vendor/github.com/golangci/unconvert/unconvert.go b/vendor/github.com/golangci/unconvert/unconvert.go new file mode 100644 index 000000000..38737d39f --- /dev/null +++ b/vendor/github.com/golangci/unconvert/unconvert.go @@ -0,0 +1,665 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Unconvert removes redundant type conversions from Go packages. +package unconvert + +import ( + "bytes" + "flag" + "fmt" + "go/ast" + "go/build" + "go/format" + "go/parser" + "go/token" + "go/types" + "io/ioutil" + "log" + "os" + "reflect" + "runtime/pprof" + "sort" + "sync" + "unicode" + + "github.com/kisielk/gotool" + "golang.org/x/text/width" + "golang.org/x/tools/go/loader" +) + +// Unnecessary conversions are identified by the position +// of their left parenthesis within a source file. + +type editSet map[token.Position]struct{} + +type fileToEditSet map[string]editSet + +func apply(file string, edits editSet) { + if len(edits) == 0 { + return + } + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, file, nil, parser.ParseComments) + if err != nil { + log.Fatal(err) + } + + // Note: We modify edits during the walk. + v := editor{edits: edits, file: fset.File(f.Package)} + ast.Walk(&v, f) + if len(edits) != 0 { + log.Printf("%s: missing edits %s", file, edits) + } + + // TODO(mdempsky): Write to temporary file and rename. + var buf bytes.Buffer + err = format.Node(&buf, fset, f) + if err != nil { + log.Fatal(err) + } + + err = ioutil.WriteFile(file, buf.Bytes(), 0) + if err != nil { + log.Fatal(err) + } +} + +type editor struct { + edits editSet + file *token.File +} + +func (e *editor) Visit(n ast.Node) ast.Visitor { + if n == nil { + return nil + } + v := reflect.ValueOf(n).Elem() + for i, n := 0, v.NumField(); i < n; i++ { + switch f := v.Field(i).Addr().Interface().(type) { + case *ast.Expr: + e.rewrite(f) + case *[]ast.Expr: + for i := range *f { + e.rewrite(&(*f)[i]) + } + } + } + return e +} + +func (e *editor) rewrite(f *ast.Expr) { + call, ok := (*f).(*ast.CallExpr) + if !ok { + return + } + + pos := e.file.Position(call.Lparen) + if _, ok := e.edits[pos]; !ok { + return + } + *f = call.Args[0] + delete(e.edits, pos) +} + +var ( + cr = []byte{'\r'} + nl = []byte{'\n'} +) + +func print(conversions []token.Position) { + var file string + var lines [][]byte + + for _, pos := range conversions { + fmt.Printf("%s:%d:%d: unnecessary conversion\n", pos.Filename, pos.Line, pos.Column) + if *flagV { + if pos.Filename != file { + buf, err := ioutil.ReadFile(pos.Filename) + if err != nil { + log.Fatal(err) + } + file = pos.Filename + lines = bytes.Split(buf, nl) + } + + line := bytes.TrimSuffix(lines[pos.Line-1], cr) + fmt.Printf("%s\n", line) + + // For files processed by cgo, Column is the + // column location after cgo processing, which + // may be different than the source column + // that we want here. In lieu of a better + // heuristic for detecting this case, at least + // avoid panicking if column is out of bounds. + if pos.Column <= len(line) { + fmt.Printf("%s^\n", rub(line[:pos.Column-1])) + } + } + } +} + +// Rub returns a copy of buf with all non-whitespace characters replaced +// by spaces (like rubbing them out with white out). +func rub(buf []byte) []byte { + // TODO(mdempsky): Handle combining characters? + var res bytes.Buffer + for _, r := range string(buf) { + if unicode.IsSpace(r) { + res.WriteRune(r) + continue + } + switch width.LookupRune(r).Kind() { + case width.EastAsianWide, width.EastAsianFullwidth: + res.WriteString(" ") + default: + res.WriteByte(' ') + } + } + return res.Bytes() +} + +var ( + flagAll = flag.Bool("unconvert.all", false, "type check all GOOS and GOARCH combinations") + flagApply = flag.Bool("unconvert.apply", false, "apply edits to source files") + flagCPUProfile = flag.String("unconvert.cpuprofile", "", "write CPU profile to file") + // TODO(mdempsky): Better description and maybe flag name. + flagSafe = flag.Bool("unconvert.safe", false, "be more conservative (experimental)") + flagV = flag.Bool("unconvert.v", false, "verbose output") +) + +func usage() { + fmt.Fprintf(os.Stderr, "usage: unconvert [flags] [package ...]\n") + flag.PrintDefaults() +} + +func nomain() { + flag.Usage = usage + flag.Parse() + + if *flagCPUProfile != "" { + f, err := os.Create(*flagCPUProfile) + if err != nil { + log.Fatal(err) + } + pprof.StartCPUProfile(f) + defer pprof.StopCPUProfile() + } + + importPaths := gotool.ImportPaths(flag.Args()) + if len(importPaths) == 0 { + return + } + + var m fileToEditSet + if *flagAll { + m = mergeEdits(importPaths) + } else { + m = computeEdits(importPaths, build.Default.GOOS, build.Default.GOARCH, build.Default.CgoEnabled) + } + + if *flagApply { + var wg sync.WaitGroup + for f, e := range m { + wg.Add(1) + f, e := f, e + go func() { + defer wg.Done() + apply(f, e) + }() + } + wg.Wait() + } else { + var conversions []token.Position + for _, positions := range m { + for pos := range positions { + conversions = append(conversions, pos) + } + } + sort.Sort(byPosition(conversions)) + print(conversions) + if len(conversions) > 0 { + os.Exit(1) + } + } +} + +func Run(prog *loader.Program) []token.Position { + m := computeEditsFromProg(prog) + var conversions []token.Position + for _, positions := range m { + for pos := range positions { + conversions = append(conversions, pos) + } + } + return conversions +} + +var plats = [...]struct { + goos, goarch string +}{ + // TODO(mdempsky): buildall.bash also builds linux-386-387 and linux-arm-arm5. + {"android", "386"}, + {"android", "amd64"}, + {"android", "arm"}, + {"android", "arm64"}, + {"darwin", "386"}, + {"darwin", "amd64"}, + {"darwin", "arm"}, + {"darwin", "arm64"}, + {"dragonfly", "amd64"}, + {"freebsd", "386"}, + {"freebsd", "amd64"}, + {"freebsd", "arm"}, + {"linux", "386"}, + {"linux", "amd64"}, + {"linux", "arm"}, + {"linux", "arm64"}, + {"linux", "mips64"}, + {"linux", "mips64le"}, + {"linux", "ppc64"}, + {"linux", "ppc64le"}, + {"linux", "s390x"}, + {"nacl", "386"}, + {"nacl", "amd64p32"}, + {"nacl", "arm"}, + {"netbsd", "386"}, + {"netbsd", "amd64"}, + {"netbsd", "arm"}, + {"openbsd", "386"}, + {"openbsd", "amd64"}, + {"openbsd", "arm"}, + {"plan9", "386"}, + {"plan9", "amd64"}, + {"plan9", "arm"}, + {"solaris", "amd64"}, + {"windows", "386"}, + {"windows", "amd64"}, +} + +func mergeEdits(importPaths []string) fileToEditSet { + m := make(fileToEditSet) + for _, plat := range plats { + for f, e := range computeEdits(importPaths, plat.goos, plat.goarch, false) { + if e0, ok := m[f]; ok { + for k := range e0 { + if _, ok := e[k]; !ok { + delete(e0, k) + } + } + } else { + m[f] = e + } + } + } + return m +} + +type noImporter struct{} + +func (noImporter) Import(path string) (*types.Package, error) { + panic("golang.org/x/tools/go/loader said this wouldn't be called") +} + +func computeEdits(importPaths []string, os, arch string, cgoEnabled bool) fileToEditSet { + ctxt := build.Default + ctxt.GOOS = os + ctxt.GOARCH = arch + ctxt.CgoEnabled = cgoEnabled + + var conf loader.Config + conf.Build = &ctxt + conf.TypeChecker.Importer = noImporter{} + for _, importPath := range importPaths { + conf.Import(importPath) + } + prog, err := conf.Load() + if err != nil { + log.Fatal(err) + } + + return computeEditsFromProg(prog) +} + +func computeEditsFromProg(prog *loader.Program) fileToEditSet { + type res struct { + file string + edits editSet + } + ch := make(chan res) + var wg sync.WaitGroup + for _, pkg := range prog.InitialPackages() { + for _, file := range pkg.Files { + pkg, file := pkg, file + wg.Add(1) + go func() { + defer wg.Done() + v := visitor{pkg: pkg, file: prog.Fset.File(file.Package), edits: make(editSet)} + ast.Walk(&v, file) + ch <- res{v.file.Name(), v.edits} + }() + } + } + go func() { + wg.Wait() + close(ch) + }() + + m := make(fileToEditSet) + for r := range ch { + m[r.file] = r.edits + } + return m +} + +type step struct { + n ast.Node + i int +} + +type visitor struct { + pkg *loader.PackageInfo + file *token.File + edits editSet + path []step +} + +func (v *visitor) Visit(node ast.Node) ast.Visitor { + if node != nil { + v.path = append(v.path, step{n: node}) + } else { + n := len(v.path) + v.path = v.path[:n-1] + if n >= 2 { + v.path[n-2].i++ + } + } + + if call, ok := node.(*ast.CallExpr); ok { + v.unconvert(call) + } + return v +} + +func (v *visitor) unconvert(call *ast.CallExpr) { + // TODO(mdempsky): Handle useless multi-conversions. + + // Conversions have exactly one argument. + if len(call.Args) != 1 || call.Ellipsis != token.NoPos { + return + } + ft, ok := v.pkg.Types[call.Fun] + if !ok { + fmt.Println("Missing type for function") + return + } + if !ft.IsType() { + // Function call; not a conversion. + return + } + at, ok := v.pkg.Types[call.Args[0]] + if !ok { + fmt.Println("Missing type for argument") + return + } + if !types.Identical(ft.Type, at.Type) { + // A real conversion. + return + } + if isUntypedValue(call.Args[0], &v.pkg.Info) { + // Workaround golang.org/issue/13061. + return + } + if *flagSafe && !v.isSafeContext(at.Type) { + // TODO(mdempsky): Remove this message. + fmt.Println("Skipped a possible type conversion because of -safe at", v.file.Position(call.Pos())) + return + } + if v.isCgoCheckPointerContext() { + // cmd/cgo generates explicit type conversions that + // are often redundant when introducing + // _cgoCheckPointer calls (issue #16). Users can't do + // anything about these, so skip over them. + return + } + + v.edits[v.file.Position(call.Lparen)] = struct{}{} +} + +func (v *visitor) isCgoCheckPointerContext() bool { + ctxt := &v.path[len(v.path)-2] + if ctxt.i != 1 { + return false + } + call, ok := ctxt.n.(*ast.CallExpr) + if !ok { + return false + } + ident, ok := call.Fun.(*ast.Ident) + if !ok { + return false + } + return ident.Name == "_cgoCheckPointer" +} + +// isSafeContext reports whether the current context requires +// an expression of type t. +// +// TODO(mdempsky): That's a bad explanation. +func (v *visitor) isSafeContext(t types.Type) bool { + ctxt := &v.path[len(v.path)-2] + switch n := ctxt.n.(type) { + case *ast.AssignStmt: + pos := ctxt.i - len(n.Lhs) + if pos < 0 { + fmt.Println("Type conversion on LHS of assignment?") + return false + } + if n.Tok == token.DEFINE { + // Skip := assignments. + return true + } + // We're a conversion in the pos'th element of n.Rhs. + // Check that the corresponding element of n.Lhs is of type t. + lt, ok := v.pkg.Types[n.Lhs[pos]] + if !ok { + fmt.Println("Missing type for LHS expression") + return false + } + return types.Identical(t, lt.Type) + case *ast.BinaryExpr: + if n.Op == token.SHL || n.Op == token.SHR { + if ctxt.i == 1 { + // RHS of a shift is always safe. + return true + } + // For the LHS, we should inspect up another level. + fmt.Println("TODO(mdempsky): Handle LHS of shift expressions") + return true + } + var other ast.Expr + if ctxt.i == 0 { + other = n.Y + } else { + other = n.X + } + ot, ok := v.pkg.Types[other] + if !ok { + fmt.Println("Missing type for other binop subexpr") + return false + } + return types.Identical(t, ot.Type) + case *ast.CallExpr: + pos := ctxt.i - 1 + if pos < 0 { + // Type conversion in the function subexpr is okay. + return true + } + ft, ok := v.pkg.Types[n.Fun] + if !ok { + fmt.Println("Missing type for function expression") + return false + } + sig, ok := ft.Type.(*types.Signature) + if !ok { + // "Function" is either a type conversion (ok) or a builtin (ok?). + return true + } + params := sig.Params() + var pt types.Type + if sig.Variadic() && n.Ellipsis == token.NoPos && pos >= params.Len()-1 { + pt = params.At(params.Len() - 1).Type().(*types.Slice).Elem() + } else { + pt = params.At(pos).Type() + } + return types.Identical(t, pt) + case *ast.CompositeLit, *ast.KeyValueExpr: + fmt.Println("TODO(mdempsky): Compare against value type of composite literal type at", v.file.Position(n.Pos())) + return true + case *ast.ReturnStmt: + // TODO(mdempsky): Is there a better way to get the corresponding + // return parameter type? + var funcType *ast.FuncType + for i := len(v.path) - 1; funcType == nil && i >= 0; i-- { + switch f := v.path[i].n.(type) { + case *ast.FuncDecl: + funcType = f.Type + case *ast.FuncLit: + funcType = f.Type + } + } + var typeExpr ast.Expr + for i, j := ctxt.i, 0; j < len(funcType.Results.List); j++ { + f := funcType.Results.List[j] + if len(f.Names) == 0 { + if i >= 1 { + i-- + continue + } + } else { + if i >= len(f.Names) { + i -= len(f.Names) + continue + } + } + typeExpr = f.Type + break + } + if typeExpr == nil { + fmt.Println(ctxt) + } + pt, ok := v.pkg.Types[typeExpr] + if !ok { + fmt.Println("Missing type for return parameter at", v.file.Position(n.Pos())) + return false + } + return types.Identical(t, pt.Type) + case *ast.StarExpr, *ast.UnaryExpr: + // TODO(mdempsky): I think these are always safe. + return true + case *ast.SwitchStmt: + // TODO(mdempsky): I think this is always safe? + return true + default: + // TODO(mdempsky): When can this happen? + fmt.Printf("... huh, %T at %v\n", n, v.file.Position(n.Pos())) + return true + } +} + +func isUntypedValue(n ast.Expr, info *types.Info) (res bool) { + switch n := n.(type) { + case *ast.BinaryExpr: + switch n.Op { + case token.SHL, token.SHR: + // Shifts yield an untyped value if their LHS is untyped. + return isUntypedValue(n.X, info) + case token.EQL, token.NEQ, token.LSS, token.GTR, token.LEQ, token.GEQ: + // Comparisons yield an untyped boolean value. + return true + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, + token.AND, token.OR, token.XOR, token.AND_NOT, + token.LAND, token.LOR: + return isUntypedValue(n.X, info) && isUntypedValue(n.Y, info) + } + case *ast.UnaryExpr: + switch n.Op { + case token.ADD, token.SUB, token.NOT, token.XOR: + return isUntypedValue(n.X, info) + } + case *ast.BasicLit: + // Basic literals are always untyped. + return true + case *ast.ParenExpr: + return isUntypedValue(n.X, info) + case *ast.SelectorExpr: + return isUntypedValue(n.Sel, info) + case *ast.Ident: + if obj, ok := info.Uses[n]; ok { + if obj.Pkg() == nil && obj.Name() == "nil" { + // The universal untyped zero value. + return true + } + if b, ok := obj.Type().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { + // Reference to an untyped constant. + return true + } + } + case *ast.CallExpr: + if b, ok := asBuiltin(n.Fun, info); ok { + switch b.Name() { + case "real", "imag": + return isUntypedValue(n.Args[0], info) + case "complex": + return isUntypedValue(n.Args[0], info) && isUntypedValue(n.Args[1], info) + } + } + } + + return false +} + +func asBuiltin(n ast.Expr, info *types.Info) (*types.Builtin, bool) { + for { + paren, ok := n.(*ast.ParenExpr) + if !ok { + break + } + n = paren.X + } + + ident, ok := n.(*ast.Ident) + if !ok { + return nil, false + } + + obj, ok := info.Uses[ident] + if !ok { + return nil, false + } + + b, ok := obj.(*types.Builtin) + return b, ok +} + +type byPosition []token.Position + +func (p byPosition) Len() int { + return len(p) +} + +func (p byPosition) Less(i, j int) bool { + if p[i].Filename != p[j].Filename { + return p[i].Filename < p[j].Filename + } + if p[i].Line != p[j].Line { + return p[i].Line < p[j].Line + } + return p[i].Column < p[j].Column +} + +func (p byPosition) Swap(i, j int) { + p[i], p[j] = p[j], p[i] +} diff --git a/vendor/github.com/google/go-cmp/LICENSE b/vendor/github.com/google/go-cmp/LICENSE new file mode 100644 index 000000000..32017f8fa --- /dev/null +++ b/vendor/github.com/google/go-cmp/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2017 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/google/go-cmp/cmp/compare.go b/vendor/github.com/google/go-cmp/cmp/compare.go new file mode 100644 index 000000000..86d0903b8 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/compare.go @@ -0,0 +1,682 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cmp determines equality of values. +// +// This package is intended to be a more powerful and safer alternative to +// reflect.DeepEqual for comparing whether two values are semantically equal. +// It is intended to only be used in tests, as performance is not a goal and +// it may panic if it cannot compare the values. Its propensity towards +// panicking means that its unsuitable for production environments where a +// spurious panic may be fatal. +// +// The primary features of cmp are: +// +// • When the default behavior of equality does not suit the needs of the test, +// custom equality functions can override the equality operation. +// For example, an equality function may report floats as equal so long as they +// are within some tolerance of each other. +// +// • Types that have an Equal method may use that method to determine equality. +// This allows package authors to determine the equality operation for the types +// that they define. +// +// • If no custom equality functions are used and no Equal method is defined, +// equality is determined by recursively comparing the primitive kinds on both +// values, much like reflect.DeepEqual. Unlike reflect.DeepEqual, unexported +// fields are not compared by default; they result in panics unless suppressed +// by using an Ignore option (see cmpopts.IgnoreUnexported) or explicitly +// compared using the Exporter option. +package cmp + +import ( + "fmt" + "reflect" + "strings" + + "github.com/google/go-cmp/cmp/internal/diff" + "github.com/google/go-cmp/cmp/internal/flags" + "github.com/google/go-cmp/cmp/internal/function" + "github.com/google/go-cmp/cmp/internal/value" +) + +// Equal reports whether x and y are equal by recursively applying the +// following rules in the given order to x and y and all of their sub-values: +// +// • Let S be the set of all Ignore, Transformer, and Comparer options that +// remain after applying all path filters, value filters, and type filters. +// If at least one Ignore exists in S, then the comparison is ignored. +// If the number of Transformer and Comparer options in S is greater than one, +// then Equal panics because it is ambiguous which option to use. +// If S contains a single Transformer, then use that to transform the current +// values and recursively call Equal on the output values. +// If S contains a single Comparer, then use that to compare the current values. +// Otherwise, evaluation proceeds to the next rule. +// +// • If the values have an Equal method of the form "(T) Equal(T) bool" or +// "(T) Equal(I) bool" where T is assignable to I, then use the result of +// x.Equal(y) even if x or y is nil. Otherwise, no such method exists and +// evaluation proceeds to the next rule. +// +// • Lastly, try to compare x and y based on their basic kinds. +// Simple kinds like booleans, integers, floats, complex numbers, strings, and +// channels are compared using the equivalent of the == operator in Go. +// Functions are only equal if they are both nil, otherwise they are unequal. +// +// Structs are equal if recursively calling Equal on all fields report equal. +// If a struct contains unexported fields, Equal panics unless an Ignore option +// (e.g., cmpopts.IgnoreUnexported) ignores that field or the Exporter option +// explicitly permits comparing the unexported field. +// +// Slices are equal if they are both nil or both non-nil, where recursively +// calling Equal on all non-ignored slice or array elements report equal. +// Empty non-nil slices and nil slices are not equal; to equate empty slices, +// consider using cmpopts.EquateEmpty. +// +// Maps are equal if they are both nil or both non-nil, where recursively +// calling Equal on all non-ignored map entries report equal. +// Map keys are equal according to the == operator. +// To use custom comparisons for map keys, consider using cmpopts.SortMaps. +// Empty non-nil maps and nil maps are not equal; to equate empty maps, +// consider using cmpopts.EquateEmpty. +// +// Pointers and interfaces are equal if they are both nil or both non-nil, +// where they have the same underlying concrete type and recursively +// calling Equal on the underlying values reports equal. +// +// Before recursing into a pointer, slice element, or map, the current path +// is checked to detect whether the address has already been visited. +// If there is a cycle, then the pointed at values are considered equal +// only if both addresses were previously visited in the same path step. +func Equal(x, y interface{}, opts ...Option) bool { + s := newState(opts) + s.compareAny(rootStep(x, y)) + return s.result.Equal() +} + +// Diff returns a human-readable report of the differences between two values: +// y - x. It returns an empty string if and only if Equal returns true for the +// same input values and options. +// +// The output is displayed as a literal in pseudo-Go syntax. +// At the start of each line, a "-" prefix indicates an element removed from x, +// a "+" prefix to indicates an element added from y, and the lack of a prefix +// indicates an element common to both x and y. If possible, the output +// uses fmt.Stringer.String or error.Error methods to produce more humanly +// readable outputs. In such cases, the string is prefixed with either an +// 's' or 'e' character, respectively, to indicate that the method was called. +// +// Do not depend on this output being stable. If you need the ability to +// programmatically interpret the difference, consider using a custom Reporter. +func Diff(x, y interface{}, opts ...Option) string { + s := newState(opts) + + // Optimization: If there are no other reporters, we can optimize for the + // common case where the result is equal (and thus no reported difference). + // This avoids the expensive construction of a difference tree. + if len(s.reporters) == 0 { + s.compareAny(rootStep(x, y)) + if s.result.Equal() { + return "" + } + s.result = diff.Result{} // Reset results + } + + r := new(defaultReporter) + s.reporters = append(s.reporters, reporter{r}) + s.compareAny(rootStep(x, y)) + d := r.String() + if (d == "") != s.result.Equal() { + panic("inconsistent difference and equality results") + } + return d +} + +// rootStep constructs the first path step. If x and y have differing types, +// then they are stored within an empty interface type. +func rootStep(x, y interface{}) PathStep { + vx := reflect.ValueOf(x) + vy := reflect.ValueOf(y) + + // If the inputs are different types, auto-wrap them in an empty interface + // so that they have the same parent type. + var t reflect.Type + if !vx.IsValid() || !vy.IsValid() || vx.Type() != vy.Type() { + t = reflect.TypeOf((*interface{})(nil)).Elem() + if vx.IsValid() { + vvx := reflect.New(t).Elem() + vvx.Set(vx) + vx = vvx + } + if vy.IsValid() { + vvy := reflect.New(t).Elem() + vvy.Set(vy) + vy = vvy + } + } else { + t = vx.Type() + } + + return &pathStep{t, vx, vy} +} + +type state struct { + // These fields represent the "comparison state". + // Calling statelessCompare must not result in observable changes to these. + result diff.Result // The current result of comparison + curPath Path // The current path in the value tree + curPtrs pointerPath // The current set of visited pointers + reporters []reporter // Optional reporters + + // recChecker checks for infinite cycles applying the same set of + // transformers upon the output of itself. + recChecker recChecker + + // dynChecker triggers pseudo-random checks for option correctness. + // It is safe for statelessCompare to mutate this value. + dynChecker dynChecker + + // These fields, once set by processOption, will not change. + exporters []exporter // List of exporters for structs with unexported fields + opts Options // List of all fundamental and filter options +} + +func newState(opts []Option) *state { + // Always ensure a validator option exists to validate the inputs. + s := &state{opts: Options{validator{}}} + s.curPtrs.Init() + s.processOption(Options(opts)) + return s +} + +func (s *state) processOption(opt Option) { + switch opt := opt.(type) { + case nil: + case Options: + for _, o := range opt { + s.processOption(o) + } + case coreOption: + type filtered interface { + isFiltered() bool + } + if fopt, ok := opt.(filtered); ok && !fopt.isFiltered() { + panic(fmt.Sprintf("cannot use an unfiltered option: %v", opt)) + } + s.opts = append(s.opts, opt) + case exporter: + s.exporters = append(s.exporters, opt) + case reporter: + s.reporters = append(s.reporters, opt) + default: + panic(fmt.Sprintf("unknown option %T", opt)) + } +} + +// statelessCompare compares two values and returns the result. +// This function is stateless in that it does not alter the current result, +// or output to any registered reporters. +func (s *state) statelessCompare(step PathStep) diff.Result { + // We do not save and restore curPath and curPtrs because all of the + // compareX methods should properly push and pop from them. + // It is an implementation bug if the contents of the paths differ from + // when calling this function to when returning from it. + + oldResult, oldReporters := s.result, s.reporters + s.result = diff.Result{} // Reset result + s.reporters = nil // Remove reporters to avoid spurious printouts + s.compareAny(step) + res := s.result + s.result, s.reporters = oldResult, oldReporters + return res +} + +func (s *state) compareAny(step PathStep) { + // Update the path stack. + s.curPath.push(step) + defer s.curPath.pop() + for _, r := range s.reporters { + r.PushStep(step) + defer r.PopStep() + } + s.recChecker.Check(s.curPath) + + // Cycle-detection for slice elements (see NOTE in compareSlice). + t := step.Type() + vx, vy := step.Values() + if si, ok := step.(SliceIndex); ok && si.isSlice && vx.IsValid() && vy.IsValid() { + px, py := vx.Addr(), vy.Addr() + if eq, visited := s.curPtrs.Push(px, py); visited { + s.report(eq, reportByCycle) + return + } + defer s.curPtrs.Pop(px, py) + } + + // Rule 1: Check whether an option applies on this node in the value tree. + if s.tryOptions(t, vx, vy) { + return + } + + // Rule 2: Check whether the type has a valid Equal method. + if s.tryMethod(t, vx, vy) { + return + } + + // Rule 3: Compare based on the underlying kind. + switch t.Kind() { + case reflect.Bool: + s.report(vx.Bool() == vy.Bool(), 0) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + s.report(vx.Int() == vy.Int(), 0) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + s.report(vx.Uint() == vy.Uint(), 0) + case reflect.Float32, reflect.Float64: + s.report(vx.Float() == vy.Float(), 0) + case reflect.Complex64, reflect.Complex128: + s.report(vx.Complex() == vy.Complex(), 0) + case reflect.String: + s.report(vx.String() == vy.String(), 0) + case reflect.Chan, reflect.UnsafePointer: + s.report(vx.Pointer() == vy.Pointer(), 0) + case reflect.Func: + s.report(vx.IsNil() && vy.IsNil(), 0) + case reflect.Struct: + s.compareStruct(t, vx, vy) + case reflect.Slice, reflect.Array: + s.compareSlice(t, vx, vy) + case reflect.Map: + s.compareMap(t, vx, vy) + case reflect.Ptr: + s.comparePtr(t, vx, vy) + case reflect.Interface: + s.compareInterface(t, vx, vy) + default: + panic(fmt.Sprintf("%v kind not handled", t.Kind())) + } +} + +func (s *state) tryOptions(t reflect.Type, vx, vy reflect.Value) bool { + // Evaluate all filters and apply the remaining options. + if opt := s.opts.filter(s, t, vx, vy); opt != nil { + opt.apply(s, vx, vy) + return true + } + return false +} + +func (s *state) tryMethod(t reflect.Type, vx, vy reflect.Value) bool { + // Check if this type even has an Equal method. + m, ok := t.MethodByName("Equal") + if !ok || !function.IsType(m.Type, function.EqualAssignable) { + return false + } + + eq := s.callTTBFunc(m.Func, vx, vy) + s.report(eq, reportByMethod) + return true +} + +func (s *state) callTRFunc(f, v reflect.Value, step Transform) reflect.Value { + v = sanitizeValue(v, f.Type().In(0)) + if !s.dynChecker.Next() { + return f.Call([]reflect.Value{v})[0] + } + + // Run the function twice and ensure that we get the same results back. + // We run in goroutines so that the race detector (if enabled) can detect + // unsafe mutations to the input. + c := make(chan reflect.Value) + go detectRaces(c, f, v) + got := <-c + want := f.Call([]reflect.Value{v})[0] + if step.vx, step.vy = got, want; !s.statelessCompare(step).Equal() { + // To avoid false-positives with non-reflexive equality operations, + // we sanity check whether a value is equal to itself. + if step.vx, step.vy = want, want; !s.statelessCompare(step).Equal() { + return want + } + panic(fmt.Sprintf("non-deterministic function detected: %s", function.NameOf(f))) + } + return want +} + +func (s *state) callTTBFunc(f, x, y reflect.Value) bool { + x = sanitizeValue(x, f.Type().In(0)) + y = sanitizeValue(y, f.Type().In(1)) + if !s.dynChecker.Next() { + return f.Call([]reflect.Value{x, y})[0].Bool() + } + + // Swapping the input arguments is sufficient to check that + // f is symmetric and deterministic. + // We run in goroutines so that the race detector (if enabled) can detect + // unsafe mutations to the input. + c := make(chan reflect.Value) + go detectRaces(c, f, y, x) + got := <-c + want := f.Call([]reflect.Value{x, y})[0].Bool() + if !got.IsValid() || got.Bool() != want { + panic(fmt.Sprintf("non-deterministic or non-symmetric function detected: %s", function.NameOf(f))) + } + return want +} + +func detectRaces(c chan<- reflect.Value, f reflect.Value, vs ...reflect.Value) { + var ret reflect.Value + defer func() { + recover() // Ignore panics, let the other call to f panic instead + c <- ret + }() + ret = f.Call(vs)[0] +} + +// sanitizeValue converts nil interfaces of type T to those of type R, +// assuming that T is assignable to R. +// Otherwise, it returns the input value as is. +func sanitizeValue(v reflect.Value, t reflect.Type) reflect.Value { + // TODO(≥go1.10): Workaround for reflect bug (https://golang.org/issue/22143). + if !flags.AtLeastGo110 { + if v.Kind() == reflect.Interface && v.IsNil() && v.Type() != t { + return reflect.New(t).Elem() + } + } + return v +} + +func (s *state) compareStruct(t reflect.Type, vx, vy reflect.Value) { + var addr bool + var vax, vay reflect.Value // Addressable versions of vx and vy + + var mayForce, mayForceInit bool + step := StructField{&structField{}} + for i := 0; i < t.NumField(); i++ { + step.typ = t.Field(i).Type + step.vx = vx.Field(i) + step.vy = vy.Field(i) + step.name = t.Field(i).Name + step.idx = i + step.unexported = !isExported(step.name) + if step.unexported { + if step.name == "_" { + continue + } + // Defer checking of unexported fields until later to give an + // Ignore a chance to ignore the field. + if !vax.IsValid() || !vay.IsValid() { + // For retrieveUnexportedField to work, the parent struct must + // be addressable. Create a new copy of the values if + // necessary to make them addressable. + addr = vx.CanAddr() || vy.CanAddr() + vax = makeAddressable(vx) + vay = makeAddressable(vy) + } + if !mayForceInit { + for _, xf := range s.exporters { + mayForce = mayForce || xf(t) + } + mayForceInit = true + } + step.mayForce = mayForce + step.paddr = addr + step.pvx = vax + step.pvy = vay + step.field = t.Field(i) + } + s.compareAny(step) + } +} + +func (s *state) compareSlice(t reflect.Type, vx, vy reflect.Value) { + isSlice := t.Kind() == reflect.Slice + if isSlice && (vx.IsNil() || vy.IsNil()) { + s.report(vx.IsNil() && vy.IsNil(), 0) + return + } + + // NOTE: It is incorrect to call curPtrs.Push on the slice header pointer + // since slices represents a list of pointers, rather than a single pointer. + // The pointer checking logic must be handled on a per-element basis + // in compareAny. + // + // A slice header (see reflect.SliceHeader) in Go is a tuple of a starting + // pointer P, a length N, and a capacity C. Supposing each slice element has + // a memory size of M, then the slice is equivalent to the list of pointers: + // [P+i*M for i in range(N)] + // + // For example, v[:0] and v[:1] are slices with the same starting pointer, + // but they are clearly different values. Using the slice pointer alone + // violates the assumption that equal pointers implies equal values. + + step := SliceIndex{&sliceIndex{pathStep: pathStep{typ: t.Elem()}, isSlice: isSlice}} + withIndexes := func(ix, iy int) SliceIndex { + if ix >= 0 { + step.vx, step.xkey = vx.Index(ix), ix + } else { + step.vx, step.xkey = reflect.Value{}, -1 + } + if iy >= 0 { + step.vy, step.ykey = vy.Index(iy), iy + } else { + step.vy, step.ykey = reflect.Value{}, -1 + } + return step + } + + // Ignore options are able to ignore missing elements in a slice. + // However, detecting these reliably requires an optimal differencing + // algorithm, for which diff.Difference is not. + // + // Instead, we first iterate through both slices to detect which elements + // would be ignored if standing alone. The index of non-discarded elements + // are stored in a separate slice, which diffing is then performed on. + var indexesX, indexesY []int + var ignoredX, ignoredY []bool + for ix := 0; ix < vx.Len(); ix++ { + ignored := s.statelessCompare(withIndexes(ix, -1)).NumDiff == 0 + if !ignored { + indexesX = append(indexesX, ix) + } + ignoredX = append(ignoredX, ignored) + } + for iy := 0; iy < vy.Len(); iy++ { + ignored := s.statelessCompare(withIndexes(-1, iy)).NumDiff == 0 + if !ignored { + indexesY = append(indexesY, iy) + } + ignoredY = append(ignoredY, ignored) + } + + // Compute an edit-script for slices vx and vy (excluding ignored elements). + edits := diff.Difference(len(indexesX), len(indexesY), func(ix, iy int) diff.Result { + return s.statelessCompare(withIndexes(indexesX[ix], indexesY[iy])) + }) + + // Replay the ignore-scripts and the edit-script. + var ix, iy int + for ix < vx.Len() || iy < vy.Len() { + var e diff.EditType + switch { + case ix < len(ignoredX) && ignoredX[ix]: + e = diff.UniqueX + case iy < len(ignoredY) && ignoredY[iy]: + e = diff.UniqueY + default: + e, edits = edits[0], edits[1:] + } + switch e { + case diff.UniqueX: + s.compareAny(withIndexes(ix, -1)) + ix++ + case diff.UniqueY: + s.compareAny(withIndexes(-1, iy)) + iy++ + default: + s.compareAny(withIndexes(ix, iy)) + ix++ + iy++ + } + } +} + +func (s *state) compareMap(t reflect.Type, vx, vy reflect.Value) { + if vx.IsNil() || vy.IsNil() { + s.report(vx.IsNil() && vy.IsNil(), 0) + return + } + + // Cycle-detection for maps. + if eq, visited := s.curPtrs.Push(vx, vy); visited { + s.report(eq, reportByCycle) + return + } + defer s.curPtrs.Pop(vx, vy) + + // We combine and sort the two map keys so that we can perform the + // comparisons in a deterministic order. + step := MapIndex{&mapIndex{pathStep: pathStep{typ: t.Elem()}}} + for _, k := range value.SortKeys(append(vx.MapKeys(), vy.MapKeys()...)) { + step.vx = vx.MapIndex(k) + step.vy = vy.MapIndex(k) + step.key = k + if !step.vx.IsValid() && !step.vy.IsValid() { + // It is possible for both vx and vy to be invalid if the + // key contained a NaN value in it. + // + // Even with the ability to retrieve NaN keys in Go 1.12, + // there still isn't a sensible way to compare the values since + // a NaN key may map to multiple unordered values. + // The most reasonable way to compare NaNs would be to compare the + // set of values. However, this is impossible to do efficiently + // since set equality is provably an O(n^2) operation given only + // an Equal function. If we had a Less function or Hash function, + // this could be done in O(n*log(n)) or O(n), respectively. + // + // Rather than adding complex logic to deal with NaNs, make it + // the user's responsibility to compare such obscure maps. + const help = "consider providing a Comparer to compare the map" + panic(fmt.Sprintf("%#v has map key with NaNs\n%s", s.curPath, help)) + } + s.compareAny(step) + } +} + +func (s *state) comparePtr(t reflect.Type, vx, vy reflect.Value) { + if vx.IsNil() || vy.IsNil() { + s.report(vx.IsNil() && vy.IsNil(), 0) + return + } + + // Cycle-detection for pointers. + if eq, visited := s.curPtrs.Push(vx, vy); visited { + s.report(eq, reportByCycle) + return + } + defer s.curPtrs.Pop(vx, vy) + + vx, vy = vx.Elem(), vy.Elem() + s.compareAny(Indirect{&indirect{pathStep{t.Elem(), vx, vy}}}) +} + +func (s *state) compareInterface(t reflect.Type, vx, vy reflect.Value) { + if vx.IsNil() || vy.IsNil() { + s.report(vx.IsNil() && vy.IsNil(), 0) + return + } + vx, vy = vx.Elem(), vy.Elem() + if vx.Type() != vy.Type() { + s.report(false, 0) + return + } + s.compareAny(TypeAssertion{&typeAssertion{pathStep{vx.Type(), vx, vy}}}) +} + +func (s *state) report(eq bool, rf resultFlags) { + if rf&reportByIgnore == 0 { + if eq { + s.result.NumSame++ + rf |= reportEqual + } else { + s.result.NumDiff++ + rf |= reportUnequal + } + } + for _, r := range s.reporters { + r.Report(Result{flags: rf}) + } +} + +// recChecker tracks the state needed to periodically perform checks that +// user provided transformers are not stuck in an infinitely recursive cycle. +type recChecker struct{ next int } + +// Check scans the Path for any recursive transformers and panics when any +// recursive transformers are detected. Note that the presence of a +// recursive Transformer does not necessarily imply an infinite cycle. +// As such, this check only activates after some minimal number of path steps. +func (rc *recChecker) Check(p Path) { + const minLen = 1 << 16 + if rc.next == 0 { + rc.next = minLen + } + if len(p) < rc.next { + return + } + rc.next <<= 1 + + // Check whether the same transformer has appeared at least twice. + var ss []string + m := map[Option]int{} + for _, ps := range p { + if t, ok := ps.(Transform); ok { + t := t.Option() + if m[t] == 1 { // Transformer was used exactly once before + tf := t.(*transformer).fnc.Type() + ss = append(ss, fmt.Sprintf("%v: %v => %v", t, tf.In(0), tf.Out(0))) + } + m[t]++ + } + } + if len(ss) > 0 { + const warning = "recursive set of Transformers detected" + const help = "consider using cmpopts.AcyclicTransformer" + set := strings.Join(ss, "\n\t") + panic(fmt.Sprintf("%s:\n\t%s\n%s", warning, set, help)) + } +} + +// dynChecker tracks the state needed to periodically perform checks that +// user provided functions are symmetric and deterministic. +// The zero value is safe for immediate use. +type dynChecker struct{ curr, next int } + +// Next increments the state and reports whether a check should be performed. +// +// Checks occur every Nth function call, where N is a triangular number: +// 0 1 3 6 10 15 21 28 36 45 55 66 78 91 105 120 136 153 171 190 ... +// See https://en.wikipedia.org/wiki/Triangular_number +// +// This sequence ensures that the cost of checks drops significantly as +// the number of functions calls grows larger. +func (dc *dynChecker) Next() bool { + ok := dc.curr == dc.next + if ok { + dc.curr = 0 + dc.next++ + } + dc.curr++ + return ok +} + +// makeAddressable returns a value that is always addressable. +// It returns the input verbatim if it is already addressable, +// otherwise it creates a new value and returns an addressable copy. +func makeAddressable(v reflect.Value) reflect.Value { + if v.CanAddr() { + return v + } + vc := reflect.New(v.Type()).Elem() + vc.Set(v) + return vc +} diff --git a/vendor/github.com/google/go-cmp/cmp/export_panic.go b/vendor/github.com/google/go-cmp/cmp/export_panic.go new file mode 100644 index 000000000..5ff0b4218 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/export_panic.go @@ -0,0 +1,15 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build purego + +package cmp + +import "reflect" + +const supportExporters = false + +func retrieveUnexportedField(reflect.Value, reflect.StructField, bool) reflect.Value { + panic("no support for forcibly accessing unexported fields") +} diff --git a/vendor/github.com/google/go-cmp/cmp/export_unsafe.go b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go new file mode 100644 index 000000000..21eb54858 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/export_unsafe.go @@ -0,0 +1,35 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !purego + +package cmp + +import ( + "reflect" + "unsafe" +) + +const supportExporters = true + +// retrieveUnexportedField uses unsafe to forcibly retrieve any field from +// a struct such that the value has read-write permissions. +// +// The parent struct, v, must be addressable, while f must be a StructField +// describing the field to retrieve. If addr is false, +// then the returned value will be shallowed copied to be non-addressable. +func retrieveUnexportedField(v reflect.Value, f reflect.StructField, addr bool) reflect.Value { + ve := reflect.NewAt(f.Type, unsafe.Pointer(uintptr(unsafe.Pointer(v.UnsafeAddr()))+f.Offset)).Elem() + if !addr { + // A field is addressable if and only if the struct is addressable. + // If the original parent value was not addressable, shallow copy the + // value to make it non-addressable to avoid leaking an implementation + // detail of how forcibly exporting a field works. + if ve.Kind() == reflect.Interface && ve.IsNil() { + return reflect.Zero(f.Type) + } + return reflect.ValueOf(ve.Interface()).Convert(f.Type) + } + return ve +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go new file mode 100644 index 000000000..1daaaacc5 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_disable.go @@ -0,0 +1,17 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !cmp_debug + +package diff + +var debug debugger + +type debugger struct{} + +func (debugger) Begin(_, _ int, f EqualFunc, _, _ *EditScript) EqualFunc { + return f +} +func (debugger) Update() {} +func (debugger) Finish() {} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go new file mode 100644 index 000000000..4b91dbcac --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/debug_enable.go @@ -0,0 +1,122 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build cmp_debug + +package diff + +import ( + "fmt" + "strings" + "sync" + "time" +) + +// The algorithm can be seen running in real-time by enabling debugging: +// go test -tags=cmp_debug -v +// +// Example output: +// === RUN TestDifference/#34 +// ┌───────────────────────────────┐ +// │ \ · · · · · · · · · · · · · · │ +// │ · # · · · · · · · · · · · · · │ +// │ · \ · · · · · · · · · · · · · │ +// │ · · \ · · · · · · · · · · · · │ +// │ · · · X # · · · · · · · · · · │ +// │ · · · # \ · · · · · · · · · · │ +// │ · · · · · # # · · · · · · · · │ +// │ · · · · · # \ · · · · · · · · │ +// │ · · · · · · · \ · · · · · · · │ +// │ · · · · · · · · \ · · · · · · │ +// │ · · · · · · · · · \ · · · · · │ +// │ · · · · · · · · · · \ · · # · │ +// │ · · · · · · · · · · · \ # # · │ +// │ · · · · · · · · · · · # # # · │ +// │ · · · · · · · · · · # # # # · │ +// │ · · · · · · · · · # # # # # · │ +// │ · · · · · · · · · · · · · · \ │ +// └───────────────────────────────┘ +// [.Y..M.XY......YXYXY.|] +// +// The grid represents the edit-graph where the horizontal axis represents +// list X and the vertical axis represents list Y. The start of the two lists +// is the top-left, while the ends are the bottom-right. The '·' represents +// an unexplored node in the graph. The '\' indicates that the two symbols +// from list X and Y are equal. The 'X' indicates that two symbols are similar +// (but not exactly equal) to each other. The '#' indicates that the two symbols +// are different (and not similar). The algorithm traverses this graph trying to +// make the paths starting in the top-left and the bottom-right connect. +// +// The series of '.', 'X', 'Y', and 'M' characters at the bottom represents +// the currently established path from the forward and reverse searches, +// separated by a '|' character. + +const ( + updateDelay = 100 * time.Millisecond + finishDelay = 500 * time.Millisecond + ansiTerminal = true // ANSI escape codes used to move terminal cursor +) + +var debug debugger + +type debugger struct { + sync.Mutex + p1, p2 EditScript + fwdPath, revPath *EditScript + grid []byte + lines int +} + +func (dbg *debugger) Begin(nx, ny int, f EqualFunc, p1, p2 *EditScript) EqualFunc { + dbg.Lock() + dbg.fwdPath, dbg.revPath = p1, p2 + top := "┌─" + strings.Repeat("──", nx) + "┐\n" + row := "│ " + strings.Repeat("· ", nx) + "│\n" + btm := "└─" + strings.Repeat("──", nx) + "┘\n" + dbg.grid = []byte(top + strings.Repeat(row, ny) + btm) + dbg.lines = strings.Count(dbg.String(), "\n") + fmt.Print(dbg) + + // Wrap the EqualFunc so that we can intercept each result. + return func(ix, iy int) (r Result) { + cell := dbg.grid[len(top)+iy*len(row):][len("│ ")+len("· ")*ix:][:len("·")] + for i := range cell { + cell[i] = 0 // Zero out the multiple bytes of UTF-8 middle-dot + } + switch r = f(ix, iy); { + case r.Equal(): + cell[0] = '\\' + case r.Similar(): + cell[0] = 'X' + default: + cell[0] = '#' + } + return + } +} + +func (dbg *debugger) Update() { + dbg.print(updateDelay) +} + +func (dbg *debugger) Finish() { + dbg.print(finishDelay) + dbg.Unlock() +} + +func (dbg *debugger) String() string { + dbg.p1, dbg.p2 = *dbg.fwdPath, dbg.p2[:0] + for i := len(*dbg.revPath) - 1; i >= 0; i-- { + dbg.p2 = append(dbg.p2, (*dbg.revPath)[i]) + } + return fmt.Sprintf("%s[%v|%v]\n\n", dbg.grid, dbg.p1, dbg.p2) +} + +func (dbg *debugger) print(d time.Duration) { + if ansiTerminal { + fmt.Printf("\x1b[%dA", dbg.lines) // Reset terminal cursor + } + fmt.Print(dbg) + time.Sleep(d) +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go b/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go new file mode 100644 index 000000000..bc196b16c --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/diff/diff.go @@ -0,0 +1,398 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package diff implements an algorithm for producing edit-scripts. +// The edit-script is a sequence of operations needed to transform one list +// of symbols into another (or vice-versa). The edits allowed are insertions, +// deletions, and modifications. The summation of all edits is called the +// Levenshtein distance as this problem is well-known in computer science. +// +// This package prioritizes performance over accuracy. That is, the run time +// is more important than obtaining a minimal Levenshtein distance. +package diff + +import ( + "math/rand" + "time" + + "github.com/google/go-cmp/cmp/internal/flags" +) + +// EditType represents a single operation within an edit-script. +type EditType uint8 + +const ( + // Identity indicates that a symbol pair is identical in both list X and Y. + Identity EditType = iota + // UniqueX indicates that a symbol only exists in X and not Y. + UniqueX + // UniqueY indicates that a symbol only exists in Y and not X. + UniqueY + // Modified indicates that a symbol pair is a modification of each other. + Modified +) + +// EditScript represents the series of differences between two lists. +type EditScript []EditType + +// String returns a human-readable string representing the edit-script where +// Identity, UniqueX, UniqueY, and Modified are represented by the +// '.', 'X', 'Y', and 'M' characters, respectively. +func (es EditScript) String() string { + b := make([]byte, len(es)) + for i, e := range es { + switch e { + case Identity: + b[i] = '.' + case UniqueX: + b[i] = 'X' + case UniqueY: + b[i] = 'Y' + case Modified: + b[i] = 'M' + default: + panic("invalid edit-type") + } + } + return string(b) +} + +// stats returns a histogram of the number of each type of edit operation. +func (es EditScript) stats() (s struct{ NI, NX, NY, NM int }) { + for _, e := range es { + switch e { + case Identity: + s.NI++ + case UniqueX: + s.NX++ + case UniqueY: + s.NY++ + case Modified: + s.NM++ + default: + panic("invalid edit-type") + } + } + return +} + +// Dist is the Levenshtein distance and is guaranteed to be 0 if and only if +// lists X and Y are equal. +func (es EditScript) Dist() int { return len(es) - es.stats().NI } + +// LenX is the length of the X list. +func (es EditScript) LenX() int { return len(es) - es.stats().NY } + +// LenY is the length of the Y list. +func (es EditScript) LenY() int { return len(es) - es.stats().NX } + +// EqualFunc reports whether the symbols at indexes ix and iy are equal. +// When called by Difference, the index is guaranteed to be within nx and ny. +type EqualFunc func(ix int, iy int) Result + +// Result is the result of comparison. +// NumSame is the number of sub-elements that are equal. +// NumDiff is the number of sub-elements that are not equal. +type Result struct{ NumSame, NumDiff int } + +// BoolResult returns a Result that is either Equal or not Equal. +func BoolResult(b bool) Result { + if b { + return Result{NumSame: 1} // Equal, Similar + } else { + return Result{NumDiff: 2} // Not Equal, not Similar + } +} + +// Equal indicates whether the symbols are equal. Two symbols are equal +// if and only if NumDiff == 0. If Equal, then they are also Similar. +func (r Result) Equal() bool { return r.NumDiff == 0 } + +// Similar indicates whether two symbols are similar and may be represented +// by using the Modified type. As a special case, we consider binary comparisons +// (i.e., those that return Result{1, 0} or Result{0, 1}) to be similar. +// +// The exact ratio of NumSame to NumDiff to determine similarity may change. +func (r Result) Similar() bool { + // Use NumSame+1 to offset NumSame so that binary comparisons are similar. + return r.NumSame+1 >= r.NumDiff +} + +var randBool = rand.New(rand.NewSource(time.Now().Unix())).Intn(2) == 0 + +// Difference reports whether two lists of lengths nx and ny are equal +// given the definition of equality provided as f. +// +// This function returns an edit-script, which is a sequence of operations +// needed to convert one list into the other. The following invariants for +// the edit-script are maintained: +// • eq == (es.Dist()==0) +// • nx == es.LenX() +// • ny == es.LenY() +// +// This algorithm is not guaranteed to be an optimal solution (i.e., one that +// produces an edit-script with a minimal Levenshtein distance). This algorithm +// favors performance over optimality. The exact output is not guaranteed to +// be stable and may change over time. +func Difference(nx, ny int, f EqualFunc) (es EditScript) { + // This algorithm is based on traversing what is known as an "edit-graph". + // See Figure 1 from "An O(ND) Difference Algorithm and Its Variations" + // by Eugene W. Myers. Since D can be as large as N itself, this is + // effectively O(N^2). Unlike the algorithm from that paper, we are not + // interested in the optimal path, but at least some "decent" path. + // + // For example, let X and Y be lists of symbols: + // X = [A B C A B B A] + // Y = [C B A B A C] + // + // The edit-graph can be drawn as the following: + // A B C A B B A + // ┌─────────────┐ + // C │_|_|\|_|_|_|_│ 0 + // B │_|\|_|_|\|\|_│ 1 + // A │\|_|_|\|_|_|\│ 2 + // B │_|\|_|_|\|\|_│ 3 + // A │\|_|_|\|_|_|\│ 4 + // C │ | |\| | | | │ 5 + // └─────────────┘ 6 + // 0 1 2 3 4 5 6 7 + // + // List X is written along the horizontal axis, while list Y is written + // along the vertical axis. At any point on this grid, if the symbol in + // list X matches the corresponding symbol in list Y, then a '\' is drawn. + // The goal of any minimal edit-script algorithm is to find a path from the + // top-left corner to the bottom-right corner, while traveling through the + // fewest horizontal or vertical edges. + // A horizontal edge is equivalent to inserting a symbol from list X. + // A vertical edge is equivalent to inserting a symbol from list Y. + // A diagonal edge is equivalent to a matching symbol between both X and Y. + + // Invariants: + // • 0 ≤ fwdPath.X ≤ (fwdFrontier.X, revFrontier.X) ≤ revPath.X ≤ nx + // • 0 ≤ fwdPath.Y ≤ (fwdFrontier.Y, revFrontier.Y) ≤ revPath.Y ≤ ny + // + // In general: + // • fwdFrontier.X < revFrontier.X + // • fwdFrontier.Y < revFrontier.Y + // Unless, it is time for the algorithm to terminate. + fwdPath := path{+1, point{0, 0}, make(EditScript, 0, (nx+ny)/2)} + revPath := path{-1, point{nx, ny}, make(EditScript, 0)} + fwdFrontier := fwdPath.point // Forward search frontier + revFrontier := revPath.point // Reverse search frontier + + // Search budget bounds the cost of searching for better paths. + // The longest sequence of non-matching symbols that can be tolerated is + // approximately the square-root of the search budget. + searchBudget := 4 * (nx + ny) // O(n) + + // Running the tests with the "cmp_debug" build tag prints a visualization + // of the algorithm running in real-time. This is educational for + // understanding how the algorithm works. See debug_enable.go. + f = debug.Begin(nx, ny, f, &fwdPath.es, &revPath.es) + + // The algorithm below is a greedy, meet-in-the-middle algorithm for + // computing sub-optimal edit-scripts between two lists. + // + // The algorithm is approximately as follows: + // • Searching for differences switches back-and-forth between + // a search that starts at the beginning (the top-left corner), and + // a search that starts at the end (the bottom-right corner). The goal of + // the search is connect with the search from the opposite corner. + // • As we search, we build a path in a greedy manner, where the first + // match seen is added to the path (this is sub-optimal, but provides a + // decent result in practice). When matches are found, we try the next pair + // of symbols in the lists and follow all matches as far as possible. + // • When searching for matches, we search along a diagonal going through + // through the "frontier" point. If no matches are found, we advance the + // frontier towards the opposite corner. + // • This algorithm terminates when either the X coordinates or the + // Y coordinates of the forward and reverse frontier points ever intersect. + + // This algorithm is correct even if searching only in the forward direction + // or in the reverse direction. We do both because it is commonly observed + // that two lists commonly differ because elements were added to the front + // or end of the other list. + // + // Non-deterministically start with either the forward or reverse direction + // to introduce some deliberate instability so that we have the flexibility + // to change this algorithm in the future. + if flags.Deterministic || randBool { + goto forwardSearch + } else { + goto reverseSearch + } + +forwardSearch: + { + // Forward search from the beginning. + if fwdFrontier.X >= revFrontier.X || fwdFrontier.Y >= revFrontier.Y || searchBudget == 0 { + goto finishSearch + } + for stop1, stop2, i := false, false, 0; !(stop1 && stop2) && searchBudget > 0; i++ { + // Search in a diagonal pattern for a match. + z := zigzag(i) + p := point{fwdFrontier.X + z, fwdFrontier.Y - z} + switch { + case p.X >= revPath.X || p.Y < fwdPath.Y: + stop1 = true // Hit top-right corner + case p.Y >= revPath.Y || p.X < fwdPath.X: + stop2 = true // Hit bottom-left corner + case f(p.X, p.Y).Equal(): + // Match found, so connect the path to this point. + fwdPath.connect(p, f) + fwdPath.append(Identity) + // Follow sequence of matches as far as possible. + for fwdPath.X < revPath.X && fwdPath.Y < revPath.Y { + if !f(fwdPath.X, fwdPath.Y).Equal() { + break + } + fwdPath.append(Identity) + } + fwdFrontier = fwdPath.point + stop1, stop2 = true, true + default: + searchBudget-- // Match not found + } + debug.Update() + } + // Advance the frontier towards reverse point. + if revPath.X-fwdFrontier.X >= revPath.Y-fwdFrontier.Y { + fwdFrontier.X++ + } else { + fwdFrontier.Y++ + } + goto reverseSearch + } + +reverseSearch: + { + // Reverse search from the end. + if fwdFrontier.X >= revFrontier.X || fwdFrontier.Y >= revFrontier.Y || searchBudget == 0 { + goto finishSearch + } + for stop1, stop2, i := false, false, 0; !(stop1 && stop2) && searchBudget > 0; i++ { + // Search in a diagonal pattern for a match. + z := zigzag(i) + p := point{revFrontier.X - z, revFrontier.Y + z} + switch { + case fwdPath.X >= p.X || revPath.Y < p.Y: + stop1 = true // Hit bottom-left corner + case fwdPath.Y >= p.Y || revPath.X < p.X: + stop2 = true // Hit top-right corner + case f(p.X-1, p.Y-1).Equal(): + // Match found, so connect the path to this point. + revPath.connect(p, f) + revPath.append(Identity) + // Follow sequence of matches as far as possible. + for fwdPath.X < revPath.X && fwdPath.Y < revPath.Y { + if !f(revPath.X-1, revPath.Y-1).Equal() { + break + } + revPath.append(Identity) + } + revFrontier = revPath.point + stop1, stop2 = true, true + default: + searchBudget-- // Match not found + } + debug.Update() + } + // Advance the frontier towards forward point. + if revFrontier.X-fwdPath.X >= revFrontier.Y-fwdPath.Y { + revFrontier.X-- + } else { + revFrontier.Y-- + } + goto forwardSearch + } + +finishSearch: + // Join the forward and reverse paths and then append the reverse path. + fwdPath.connect(revPath.point, f) + for i := len(revPath.es) - 1; i >= 0; i-- { + t := revPath.es[i] + revPath.es = revPath.es[:i] + fwdPath.append(t) + } + debug.Finish() + return fwdPath.es +} + +type path struct { + dir int // +1 if forward, -1 if reverse + point // Leading point of the EditScript path + es EditScript +} + +// connect appends any necessary Identity, Modified, UniqueX, or UniqueY types +// to the edit-script to connect p.point to dst. +func (p *path) connect(dst point, f EqualFunc) { + if p.dir > 0 { + // Connect in forward direction. + for dst.X > p.X && dst.Y > p.Y { + switch r := f(p.X, p.Y); { + case r.Equal(): + p.append(Identity) + case r.Similar(): + p.append(Modified) + case dst.X-p.X >= dst.Y-p.Y: + p.append(UniqueX) + default: + p.append(UniqueY) + } + } + for dst.X > p.X { + p.append(UniqueX) + } + for dst.Y > p.Y { + p.append(UniqueY) + } + } else { + // Connect in reverse direction. + for p.X > dst.X && p.Y > dst.Y { + switch r := f(p.X-1, p.Y-1); { + case r.Equal(): + p.append(Identity) + case r.Similar(): + p.append(Modified) + case p.Y-dst.Y >= p.X-dst.X: + p.append(UniqueY) + default: + p.append(UniqueX) + } + } + for p.X > dst.X { + p.append(UniqueX) + } + for p.Y > dst.Y { + p.append(UniqueY) + } + } +} + +func (p *path) append(t EditType) { + p.es = append(p.es, t) + switch t { + case Identity, Modified: + p.add(p.dir, p.dir) + case UniqueX: + p.add(p.dir, 0) + case UniqueY: + p.add(0, p.dir) + } + debug.Update() +} + +type point struct{ X, Y int } + +func (p *point) add(dx, dy int) { p.X += dx; p.Y += dy } + +// zigzag maps a consecutive sequence of integers to a zig-zag sequence. +// [0 1 2 3 4 5 ...] => [0 -1 +1 -2 +2 ...] +func zigzag(x int) int { + if x&1 != 0 { + x = ^x + } + return x >> 1 +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go new file mode 100644 index 000000000..d8e459c9b --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/flags/flags.go @@ -0,0 +1,9 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package flags + +// Deterministic controls whether the output of Diff should be deterministic. +// This is only used for testing. +var Deterministic bool diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go new file mode 100644 index 000000000..82d1d7fbf --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_legacy.go @@ -0,0 +1,10 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.10 + +package flags + +// AtLeastGo110 reports whether the Go toolchain is at least Go 1.10. +const AtLeastGo110 = false diff --git a/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go new file mode 100644 index 000000000..8646f0529 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/flags/toolchain_recent.go @@ -0,0 +1,10 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.10 + +package flags + +// AtLeastGo110 reports whether the Go toolchain is at least Go 1.10. +const AtLeastGo110 = true diff --git a/vendor/github.com/google/go-cmp/cmp/internal/function/func.go b/vendor/github.com/google/go-cmp/cmp/internal/function/func.go new file mode 100644 index 000000000..d127d4362 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/function/func.go @@ -0,0 +1,99 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package function provides functionality for identifying function types. +package function + +import ( + "reflect" + "regexp" + "runtime" + "strings" +) + +type funcType int + +const ( + _ funcType = iota + + tbFunc // func(T) bool + ttbFunc // func(T, T) bool + trbFunc // func(T, R) bool + tibFunc // func(T, I) bool + trFunc // func(T) R + + Equal = ttbFunc // func(T, T) bool + EqualAssignable = tibFunc // func(T, I) bool; encapsulates func(T, T) bool + Transformer = trFunc // func(T) R + ValueFilter = ttbFunc // func(T, T) bool + Less = ttbFunc // func(T, T) bool + ValuePredicate = tbFunc // func(T) bool + KeyValuePredicate = trbFunc // func(T, R) bool +) + +var boolType = reflect.TypeOf(true) + +// IsType reports whether the reflect.Type is of the specified function type. +func IsType(t reflect.Type, ft funcType) bool { + if t == nil || t.Kind() != reflect.Func || t.IsVariadic() { + return false + } + ni, no := t.NumIn(), t.NumOut() + switch ft { + case tbFunc: // func(T) bool + if ni == 1 && no == 1 && t.Out(0) == boolType { + return true + } + case ttbFunc: // func(T, T) bool + if ni == 2 && no == 1 && t.In(0) == t.In(1) && t.Out(0) == boolType { + return true + } + case trbFunc: // func(T, R) bool + if ni == 2 && no == 1 && t.Out(0) == boolType { + return true + } + case tibFunc: // func(T, I) bool + if ni == 2 && no == 1 && t.In(0).AssignableTo(t.In(1)) && t.Out(0) == boolType { + return true + } + case trFunc: // func(T) R + if ni == 1 && no == 1 { + return true + } + } + return false +} + +var lastIdentRx = regexp.MustCompile(`[_\p{L}][_\p{L}\p{N}]*$`) + +// NameOf returns the name of the function value. +func NameOf(v reflect.Value) string { + fnc := runtime.FuncForPC(v.Pointer()) + if fnc == nil { + return "" + } + fullName := fnc.Name() // e.g., "long/path/name/mypkg.(*MyType).(long/path/name/mypkg.myMethod)-fm" + + // Method closures have a "-fm" suffix. + fullName = strings.TrimSuffix(fullName, "-fm") + + var name string + for len(fullName) > 0 { + inParen := strings.HasSuffix(fullName, ")") + fullName = strings.TrimSuffix(fullName, ")") + + s := lastIdentRx.FindString(fullName) + if s == "" { + break + } + name = s + "." + name + fullName = strings.TrimSuffix(fullName, s) + + if i := strings.LastIndexByte(fullName, '('); inParen && i >= 0 { + fullName = fullName[:i] + } + fullName = strings.TrimSuffix(fullName, ".") + } + return strings.TrimSuffix(name, ".") +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/name.go b/vendor/github.com/google/go-cmp/cmp/internal/value/name.go new file mode 100644 index 000000000..b6c12cefb --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/name.go @@ -0,0 +1,157 @@ +// Copyright 2020, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package value + +import ( + "reflect" + "strconv" +) + +// TypeString is nearly identical to reflect.Type.String, +// but has an additional option to specify that full type names be used. +func TypeString(t reflect.Type, qualified bool) string { + return string(appendTypeName(nil, t, qualified, false)) +} + +func appendTypeName(b []byte, t reflect.Type, qualified, elideFunc bool) []byte { + // BUG: Go reflection provides no way to disambiguate two named types + // of the same name and within the same package, + // but declared within the namespace of different functions. + + // Named type. + if t.Name() != "" { + if qualified && t.PkgPath() != "" { + b = append(b, '"') + b = append(b, t.PkgPath()...) + b = append(b, '"') + b = append(b, '.') + b = append(b, t.Name()...) + } else { + b = append(b, t.String()...) + } + return b + } + + // Unnamed type. + switch k := t.Kind(); k { + case reflect.Bool, reflect.String, reflect.UnsafePointer, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, + reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128: + b = append(b, k.String()...) + case reflect.Chan: + if t.ChanDir() == reflect.RecvDir { + b = append(b, "<-"...) + } + b = append(b, "chan"...) + if t.ChanDir() == reflect.SendDir { + b = append(b, "<-"...) + } + b = append(b, ' ') + b = appendTypeName(b, t.Elem(), qualified, false) + case reflect.Func: + if !elideFunc { + b = append(b, "func"...) + } + b = append(b, '(') + for i := 0; i < t.NumIn(); i++ { + if i > 0 { + b = append(b, ", "...) + } + if i == t.NumIn()-1 && t.IsVariadic() { + b = append(b, "..."...) + b = appendTypeName(b, t.In(i).Elem(), qualified, false) + } else { + b = appendTypeName(b, t.In(i), qualified, false) + } + } + b = append(b, ')') + switch t.NumOut() { + case 0: + // Do nothing + case 1: + b = append(b, ' ') + b = appendTypeName(b, t.Out(0), qualified, false) + default: + b = append(b, " ("...) + for i := 0; i < t.NumOut(); i++ { + if i > 0 { + b = append(b, ", "...) + } + b = appendTypeName(b, t.Out(i), qualified, false) + } + b = append(b, ')') + } + case reflect.Struct: + b = append(b, "struct{ "...) + for i := 0; i < t.NumField(); i++ { + if i > 0 { + b = append(b, "; "...) + } + sf := t.Field(i) + if !sf.Anonymous { + if qualified && sf.PkgPath != "" { + b = append(b, '"') + b = append(b, sf.PkgPath...) + b = append(b, '"') + b = append(b, '.') + } + b = append(b, sf.Name...) + b = append(b, ' ') + } + b = appendTypeName(b, sf.Type, qualified, false) + if sf.Tag != "" { + b = append(b, ' ') + b = strconv.AppendQuote(b, string(sf.Tag)) + } + } + if b[len(b)-1] == ' ' { + b = b[:len(b)-1] + } else { + b = append(b, ' ') + } + b = append(b, '}') + case reflect.Slice, reflect.Array: + b = append(b, '[') + if k == reflect.Array { + b = strconv.AppendUint(b, uint64(t.Len()), 10) + } + b = append(b, ']') + b = appendTypeName(b, t.Elem(), qualified, false) + case reflect.Map: + b = append(b, "map["...) + b = appendTypeName(b, t.Key(), qualified, false) + b = append(b, ']') + b = appendTypeName(b, t.Elem(), qualified, false) + case reflect.Ptr: + b = append(b, '*') + b = appendTypeName(b, t.Elem(), qualified, false) + case reflect.Interface: + b = append(b, "interface{ "...) + for i := 0; i < t.NumMethod(); i++ { + if i > 0 { + b = append(b, "; "...) + } + m := t.Method(i) + if qualified && m.PkgPath != "" { + b = append(b, '"') + b = append(b, m.PkgPath...) + b = append(b, '"') + b = append(b, '.') + } + b = append(b, m.Name...) + b = appendTypeName(b, m.Type, qualified, true) + } + if b[len(b)-1] == ' ' { + b = b[:len(b)-1] + } else { + b = append(b, ' ') + } + b = append(b, '}') + default: + panic("invalid kind: " + k.String()) + } + return b +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go new file mode 100644 index 000000000..44f4a5afd --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_purego.go @@ -0,0 +1,33 @@ +// Copyright 2018, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build purego + +package value + +import "reflect" + +// Pointer is an opaque typed pointer and is guaranteed to be comparable. +type Pointer struct { + p uintptr + t reflect.Type +} + +// PointerOf returns a Pointer from v, which must be a +// reflect.Ptr, reflect.Slice, or reflect.Map. +func PointerOf(v reflect.Value) Pointer { + // NOTE: Storing a pointer as an uintptr is technically incorrect as it + // assumes that the GC implementation does not use a moving collector. + return Pointer{v.Pointer(), v.Type()} +} + +// IsNil reports whether the pointer is nil. +func (p Pointer) IsNil() bool { + return p.p == 0 +} + +// Uintptr returns the pointer as a uintptr. +func (p Pointer) Uintptr() uintptr { + return p.p +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go new file mode 100644 index 000000000..a605953d4 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/pointer_unsafe.go @@ -0,0 +1,36 @@ +// Copyright 2018, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !purego + +package value + +import ( + "reflect" + "unsafe" +) + +// Pointer is an opaque typed pointer and is guaranteed to be comparable. +type Pointer struct { + p unsafe.Pointer + t reflect.Type +} + +// PointerOf returns a Pointer from v, which must be a +// reflect.Ptr, reflect.Slice, or reflect.Map. +func PointerOf(v reflect.Value) Pointer { + // The proper representation of a pointer is unsafe.Pointer, + // which is necessary if the GC ever uses a moving collector. + return Pointer{unsafe.Pointer(v.Pointer()), v.Type()} +} + +// IsNil reports whether the pointer is nil. +func (p Pointer) IsNil() bool { + return p.p == nil +} + +// Uintptr returns the pointer as a uintptr. +func (p Pointer) Uintptr() uintptr { + return uintptr(p.p) +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go b/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go new file mode 100644 index 000000000..98533b036 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/sort.go @@ -0,0 +1,106 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package value + +import ( + "fmt" + "math" + "reflect" + "sort" +) + +// SortKeys sorts a list of map keys, deduplicating keys if necessary. +// The type of each value must be comparable. +func SortKeys(vs []reflect.Value) []reflect.Value { + if len(vs) == 0 { + return vs + } + + // Sort the map keys. + sort.SliceStable(vs, func(i, j int) bool { return isLess(vs[i], vs[j]) }) + + // Deduplicate keys (fails for NaNs). + vs2 := vs[:1] + for _, v := range vs[1:] { + if isLess(vs2[len(vs2)-1], v) { + vs2 = append(vs2, v) + } + } + return vs2 +} + +// isLess is a generic function for sorting arbitrary map keys. +// The inputs must be of the same type and must be comparable. +func isLess(x, y reflect.Value) bool { + switch x.Type().Kind() { + case reflect.Bool: + return !x.Bool() && y.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return x.Int() < y.Int() + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return x.Uint() < y.Uint() + case reflect.Float32, reflect.Float64: + // NOTE: This does not sort -0 as less than +0 + // since Go maps treat -0 and +0 as equal keys. + fx, fy := x.Float(), y.Float() + return fx < fy || math.IsNaN(fx) && !math.IsNaN(fy) + case reflect.Complex64, reflect.Complex128: + cx, cy := x.Complex(), y.Complex() + rx, ix, ry, iy := real(cx), imag(cx), real(cy), imag(cy) + if rx == ry || (math.IsNaN(rx) && math.IsNaN(ry)) { + return ix < iy || math.IsNaN(ix) && !math.IsNaN(iy) + } + return rx < ry || math.IsNaN(rx) && !math.IsNaN(ry) + case reflect.Ptr, reflect.UnsafePointer, reflect.Chan: + return x.Pointer() < y.Pointer() + case reflect.String: + return x.String() < y.String() + case reflect.Array: + for i := 0; i < x.Len(); i++ { + if isLess(x.Index(i), y.Index(i)) { + return true + } + if isLess(y.Index(i), x.Index(i)) { + return false + } + } + return false + case reflect.Struct: + for i := 0; i < x.NumField(); i++ { + if isLess(x.Field(i), y.Field(i)) { + return true + } + if isLess(y.Field(i), x.Field(i)) { + return false + } + } + return false + case reflect.Interface: + vx, vy := x.Elem(), y.Elem() + if !vx.IsValid() || !vy.IsValid() { + return !vx.IsValid() && vy.IsValid() + } + tx, ty := vx.Type(), vy.Type() + if tx == ty { + return isLess(x.Elem(), y.Elem()) + } + if tx.Kind() != ty.Kind() { + return vx.Kind() < vy.Kind() + } + if tx.String() != ty.String() { + return tx.String() < ty.String() + } + if tx.PkgPath() != ty.PkgPath() { + return tx.PkgPath() < ty.PkgPath() + } + // This can happen in rare situations, so we fallback to just comparing + // the unique pointer for a reflect.Type. This guarantees deterministic + // ordering within a program, but it is obviously not stable. + return reflect.ValueOf(vx.Type()).Pointer() < reflect.ValueOf(vy.Type()).Pointer() + default: + // Must be Func, Map, or Slice; which are not comparable. + panic(fmt.Sprintf("%T is not comparable", x.Type())) + } +} diff --git a/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go b/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go new file mode 100644 index 000000000..9147a2997 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/internal/value/zero.go @@ -0,0 +1,48 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package value + +import ( + "math" + "reflect" +) + +// IsZero reports whether v is the zero value. +// This does not rely on Interface and so can be used on unexported fields. +func IsZero(v reflect.Value) bool { + switch v.Kind() { + case reflect.Bool: + return v.Bool() == false + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return math.Float64bits(v.Float()) == 0 + case reflect.Complex64, reflect.Complex128: + return math.Float64bits(real(v.Complex())) == 0 && math.Float64bits(imag(v.Complex())) == 0 + case reflect.String: + return v.String() == "" + case reflect.UnsafePointer: + return v.Pointer() == 0 + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice: + return v.IsNil() + case reflect.Array: + for i := 0; i < v.Len(); i++ { + if !IsZero(v.Index(i)) { + return false + } + } + return true + case reflect.Struct: + for i := 0; i < v.NumField(); i++ { + if !IsZero(v.Field(i)) { + return false + } + } + return true + } + return false +} diff --git a/vendor/github.com/google/go-cmp/cmp/options.go b/vendor/github.com/google/go-cmp/cmp/options.go new file mode 100644 index 000000000..e57b9eb53 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/options.go @@ -0,0 +1,552 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "fmt" + "reflect" + "regexp" + "strings" + + "github.com/google/go-cmp/cmp/internal/function" +) + +// Option configures for specific behavior of Equal and Diff. In particular, +// the fundamental Option functions (Ignore, Transformer, and Comparer), +// configure how equality is determined. +// +// The fundamental options may be composed with filters (FilterPath and +// FilterValues) to control the scope over which they are applied. +// +// The cmp/cmpopts package provides helper functions for creating options that +// may be used with Equal and Diff. +type Option interface { + // filter applies all filters and returns the option that remains. + // Each option may only read s.curPath and call s.callTTBFunc. + // + // An Options is returned only if multiple comparers or transformers + // can apply simultaneously and will only contain values of those types + // or sub-Options containing values of those types. + filter(s *state, t reflect.Type, vx, vy reflect.Value) applicableOption +} + +// applicableOption represents the following types: +// Fundamental: ignore | validator | *comparer | *transformer +// Grouping: Options +type applicableOption interface { + Option + + // apply executes the option, which may mutate s or panic. + apply(s *state, vx, vy reflect.Value) +} + +// coreOption represents the following types: +// Fundamental: ignore | validator | *comparer | *transformer +// Filters: *pathFilter | *valuesFilter +type coreOption interface { + Option + isCore() +} + +type core struct{} + +func (core) isCore() {} + +// Options is a list of Option values that also satisfies the Option interface. +// Helper comparison packages may return an Options value when packing multiple +// Option values into a single Option. When this package processes an Options, +// it will be implicitly expanded into a flat list. +// +// Applying a filter on an Options is equivalent to applying that same filter +// on all individual options held within. +type Options []Option + +func (opts Options) filter(s *state, t reflect.Type, vx, vy reflect.Value) (out applicableOption) { + for _, opt := range opts { + switch opt := opt.filter(s, t, vx, vy); opt.(type) { + case ignore: + return ignore{} // Only ignore can short-circuit evaluation + case validator: + out = validator{} // Takes precedence over comparer or transformer + case *comparer, *transformer, Options: + switch out.(type) { + case nil: + out = opt + case validator: + // Keep validator + case *comparer, *transformer, Options: + out = Options{out, opt} // Conflicting comparers or transformers + } + } + } + return out +} + +func (opts Options) apply(s *state, _, _ reflect.Value) { + const warning = "ambiguous set of applicable options" + const help = "consider using filters to ensure at most one Comparer or Transformer may apply" + var ss []string + for _, opt := range flattenOptions(nil, opts) { + ss = append(ss, fmt.Sprint(opt)) + } + set := strings.Join(ss, "\n\t") + panic(fmt.Sprintf("%s at %#v:\n\t%s\n%s", warning, s.curPath, set, help)) +} + +func (opts Options) String() string { + var ss []string + for _, opt := range opts { + ss = append(ss, fmt.Sprint(opt)) + } + return fmt.Sprintf("Options{%s}", strings.Join(ss, ", ")) +} + +// FilterPath returns a new Option where opt is only evaluated if filter f +// returns true for the current Path in the value tree. +// +// This filter is called even if a slice element or map entry is missing and +// provides an opportunity to ignore such cases. The filter function must be +// symmetric such that the filter result is identical regardless of whether the +// missing value is from x or y. +// +// The option passed in may be an Ignore, Transformer, Comparer, Options, or +// a previously filtered Option. +func FilterPath(f func(Path) bool, opt Option) Option { + if f == nil { + panic("invalid path filter function") + } + if opt := normalizeOption(opt); opt != nil { + return &pathFilter{fnc: f, opt: opt} + } + return nil +} + +type pathFilter struct { + core + fnc func(Path) bool + opt Option +} + +func (f pathFilter) filter(s *state, t reflect.Type, vx, vy reflect.Value) applicableOption { + if f.fnc(s.curPath) { + return f.opt.filter(s, t, vx, vy) + } + return nil +} + +func (f pathFilter) String() string { + return fmt.Sprintf("FilterPath(%s, %v)", function.NameOf(reflect.ValueOf(f.fnc)), f.opt) +} + +// FilterValues returns a new Option where opt is only evaluated if filter f, +// which is a function of the form "func(T, T) bool", returns true for the +// current pair of values being compared. If either value is invalid or +// the type of the values is not assignable to T, then this filter implicitly +// returns false. +// +// The filter function must be +// symmetric (i.e., agnostic to the order of the inputs) and +// deterministic (i.e., produces the same result when given the same inputs). +// If T is an interface, it is possible that f is called with two values with +// different concrete types that both implement T. +// +// The option passed in may be an Ignore, Transformer, Comparer, Options, or +// a previously filtered Option. +func FilterValues(f interface{}, opt Option) Option { + v := reflect.ValueOf(f) + if !function.IsType(v.Type(), function.ValueFilter) || v.IsNil() { + panic(fmt.Sprintf("invalid values filter function: %T", f)) + } + if opt := normalizeOption(opt); opt != nil { + vf := &valuesFilter{fnc: v, opt: opt} + if ti := v.Type().In(0); ti.Kind() != reflect.Interface || ti.NumMethod() > 0 { + vf.typ = ti + } + return vf + } + return nil +} + +type valuesFilter struct { + core + typ reflect.Type // T + fnc reflect.Value // func(T, T) bool + opt Option +} + +func (f valuesFilter) filter(s *state, t reflect.Type, vx, vy reflect.Value) applicableOption { + if !vx.IsValid() || !vx.CanInterface() || !vy.IsValid() || !vy.CanInterface() { + return nil + } + if (f.typ == nil || t.AssignableTo(f.typ)) && s.callTTBFunc(f.fnc, vx, vy) { + return f.opt.filter(s, t, vx, vy) + } + return nil +} + +func (f valuesFilter) String() string { + return fmt.Sprintf("FilterValues(%s, %v)", function.NameOf(f.fnc), f.opt) +} + +// Ignore is an Option that causes all comparisons to be ignored. +// This value is intended to be combined with FilterPath or FilterValues. +// It is an error to pass an unfiltered Ignore option to Equal. +func Ignore() Option { return ignore{} } + +type ignore struct{ core } + +func (ignore) isFiltered() bool { return false } +func (ignore) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption { return ignore{} } +func (ignore) apply(s *state, _, _ reflect.Value) { s.report(true, reportByIgnore) } +func (ignore) String() string { return "Ignore()" } + +// validator is a sentinel Option type to indicate that some options could not +// be evaluated due to unexported fields, missing slice elements, or +// missing map entries. Both values are validator only for unexported fields. +type validator struct{ core } + +func (validator) filter(_ *state, _ reflect.Type, vx, vy reflect.Value) applicableOption { + if !vx.IsValid() || !vy.IsValid() { + return validator{} + } + if !vx.CanInterface() || !vy.CanInterface() { + return validator{} + } + return nil +} +func (validator) apply(s *state, vx, vy reflect.Value) { + // Implies missing slice element or map entry. + if !vx.IsValid() || !vy.IsValid() { + s.report(vx.IsValid() == vy.IsValid(), 0) + return + } + + // Unable to Interface implies unexported field without visibility access. + if !vx.CanInterface() || !vy.CanInterface() { + help := "consider using a custom Comparer; if you control the implementation of type, you can also consider using an Exporter, AllowUnexported, or cmpopts.IgnoreUnexported" + var name string + if t := s.curPath.Index(-2).Type(); t.Name() != "" { + // Named type with unexported fields. + name = fmt.Sprintf("%q.%v", t.PkgPath(), t.Name()) // e.g., "path/to/package".MyType + if _, ok := reflect.New(t).Interface().(error); ok { + help = "consider using cmpopts.EquateErrors to compare error values" + } + } else { + // Unnamed type with unexported fields. Derive PkgPath from field. + var pkgPath string + for i := 0; i < t.NumField() && pkgPath == ""; i++ { + pkgPath = t.Field(i).PkgPath + } + name = fmt.Sprintf("%q.(%v)", pkgPath, t.String()) // e.g., "path/to/package".(struct { a int }) + } + panic(fmt.Sprintf("cannot handle unexported field at %#v:\n\t%v\n%s", s.curPath, name, help)) + } + + panic("not reachable") +} + +// identRx represents a valid identifier according to the Go specification. +const identRx = `[_\p{L}][_\p{L}\p{N}]*` + +var identsRx = regexp.MustCompile(`^` + identRx + `(\.` + identRx + `)*$`) + +// Transformer returns an Option that applies a transformation function that +// converts values of a certain type into that of another. +// +// The transformer f must be a function "func(T) R" that converts values of +// type T to those of type R and is implicitly filtered to input values +// assignable to T. The transformer must not mutate T in any way. +// +// To help prevent some cases of infinite recursive cycles applying the +// same transform to the output of itself (e.g., in the case where the +// input and output types are the same), an implicit filter is added such that +// a transformer is applicable only if that exact transformer is not already +// in the tail of the Path since the last non-Transform step. +// For situations where the implicit filter is still insufficient, +// consider using cmpopts.AcyclicTransformer, which adds a filter +// to prevent the transformer from being recursively applied upon itself. +// +// The name is a user provided label that is used as the Transform.Name in the +// transformation PathStep (and eventually shown in the Diff output). +// The name must be a valid identifier or qualified identifier in Go syntax. +// If empty, an arbitrary name is used. +func Transformer(name string, f interface{}) Option { + v := reflect.ValueOf(f) + if !function.IsType(v.Type(), function.Transformer) || v.IsNil() { + panic(fmt.Sprintf("invalid transformer function: %T", f)) + } + if name == "" { + name = function.NameOf(v) + if !identsRx.MatchString(name) { + name = "λ" // Lambda-symbol as placeholder name + } + } else if !identsRx.MatchString(name) { + panic(fmt.Sprintf("invalid name: %q", name)) + } + tr := &transformer{name: name, fnc: reflect.ValueOf(f)} + if ti := v.Type().In(0); ti.Kind() != reflect.Interface || ti.NumMethod() > 0 { + tr.typ = ti + } + return tr +} + +type transformer struct { + core + name string + typ reflect.Type // T + fnc reflect.Value // func(T) R +} + +func (tr *transformer) isFiltered() bool { return tr.typ != nil } + +func (tr *transformer) filter(s *state, t reflect.Type, _, _ reflect.Value) applicableOption { + for i := len(s.curPath) - 1; i >= 0; i-- { + if t, ok := s.curPath[i].(Transform); !ok { + break // Hit most recent non-Transform step + } else if tr == t.trans { + return nil // Cannot directly use same Transform + } + } + if tr.typ == nil || t.AssignableTo(tr.typ) { + return tr + } + return nil +} + +func (tr *transformer) apply(s *state, vx, vy reflect.Value) { + step := Transform{&transform{pathStep{typ: tr.fnc.Type().Out(0)}, tr}} + vvx := s.callTRFunc(tr.fnc, vx, step) + vvy := s.callTRFunc(tr.fnc, vy, step) + step.vx, step.vy = vvx, vvy + s.compareAny(step) +} + +func (tr transformer) String() string { + return fmt.Sprintf("Transformer(%s, %s)", tr.name, function.NameOf(tr.fnc)) +} + +// Comparer returns an Option that determines whether two values are equal +// to each other. +// +// The comparer f must be a function "func(T, T) bool" and is implicitly +// filtered to input values assignable to T. If T is an interface, it is +// possible that f is called with two values of different concrete types that +// both implement T. +// +// The equality function must be: +// • Symmetric: equal(x, y) == equal(y, x) +// • Deterministic: equal(x, y) == equal(x, y) +// • Pure: equal(x, y) does not modify x or y +func Comparer(f interface{}) Option { + v := reflect.ValueOf(f) + if !function.IsType(v.Type(), function.Equal) || v.IsNil() { + panic(fmt.Sprintf("invalid comparer function: %T", f)) + } + cm := &comparer{fnc: v} + if ti := v.Type().In(0); ti.Kind() != reflect.Interface || ti.NumMethod() > 0 { + cm.typ = ti + } + return cm +} + +type comparer struct { + core + typ reflect.Type // T + fnc reflect.Value // func(T, T) bool +} + +func (cm *comparer) isFiltered() bool { return cm.typ != nil } + +func (cm *comparer) filter(_ *state, t reflect.Type, _, _ reflect.Value) applicableOption { + if cm.typ == nil || t.AssignableTo(cm.typ) { + return cm + } + return nil +} + +func (cm *comparer) apply(s *state, vx, vy reflect.Value) { + eq := s.callTTBFunc(cm.fnc, vx, vy) + s.report(eq, reportByFunc) +} + +func (cm comparer) String() string { + return fmt.Sprintf("Comparer(%s)", function.NameOf(cm.fnc)) +} + +// Exporter returns an Option that specifies whether Equal is allowed to +// introspect into the unexported fields of certain struct types. +// +// Users of this option must understand that comparing on unexported fields +// from external packages is not safe since changes in the internal +// implementation of some external package may cause the result of Equal +// to unexpectedly change. However, it may be valid to use this option on types +// defined in an internal package where the semantic meaning of an unexported +// field is in the control of the user. +// +// In many cases, a custom Comparer should be used instead that defines +// equality as a function of the public API of a type rather than the underlying +// unexported implementation. +// +// For example, the reflect.Type documentation defines equality to be determined +// by the == operator on the interface (essentially performing a shallow pointer +// comparison) and most attempts to compare *regexp.Regexp types are interested +// in only checking that the regular expression strings are equal. +// Both of these are accomplished using Comparers: +// +// Comparer(func(x, y reflect.Type) bool { return x == y }) +// Comparer(func(x, y *regexp.Regexp) bool { return x.String() == y.String() }) +// +// In other cases, the cmpopts.IgnoreUnexported option can be used to ignore +// all unexported fields on specified struct types. +func Exporter(f func(reflect.Type) bool) Option { + if !supportExporters { + panic("Exporter is not supported on purego builds") + } + return exporter(f) +} + +type exporter func(reflect.Type) bool + +func (exporter) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption { + panic("not implemented") +} + +// AllowUnexported returns an Options that allows Equal to forcibly introspect +// unexported fields of the specified struct types. +// +// See Exporter for the proper use of this option. +func AllowUnexported(types ...interface{}) Option { + m := make(map[reflect.Type]bool) + for _, typ := range types { + t := reflect.TypeOf(typ) + if t.Kind() != reflect.Struct { + panic(fmt.Sprintf("invalid struct type: %T", typ)) + } + m[t] = true + } + return exporter(func(t reflect.Type) bool { return m[t] }) +} + +// Result represents the comparison result for a single node and +// is provided by cmp when calling Result (see Reporter). +type Result struct { + _ [0]func() // Make Result incomparable + flags resultFlags +} + +// Equal reports whether the node was determined to be equal or not. +// As a special case, ignored nodes are considered equal. +func (r Result) Equal() bool { + return r.flags&(reportEqual|reportByIgnore) != 0 +} + +// ByIgnore reports whether the node is equal because it was ignored. +// This never reports true if Equal reports false. +func (r Result) ByIgnore() bool { + return r.flags&reportByIgnore != 0 +} + +// ByMethod reports whether the Equal method determined equality. +func (r Result) ByMethod() bool { + return r.flags&reportByMethod != 0 +} + +// ByFunc reports whether a Comparer function determined equality. +func (r Result) ByFunc() bool { + return r.flags&reportByFunc != 0 +} + +// ByCycle reports whether a reference cycle was detected. +func (r Result) ByCycle() bool { + return r.flags&reportByCycle != 0 +} + +type resultFlags uint + +const ( + _ resultFlags = (1 << iota) / 2 + + reportEqual + reportUnequal + reportByIgnore + reportByMethod + reportByFunc + reportByCycle +) + +// Reporter is an Option that can be passed to Equal. When Equal traverses +// the value trees, it calls PushStep as it descends into each node in the +// tree and PopStep as it ascend out of the node. The leaves of the tree are +// either compared (determined to be equal or not equal) or ignored and reported +// as such by calling the Report method. +func Reporter(r interface { + // PushStep is called when a tree-traversal operation is performed. + // The PathStep itself is only valid until the step is popped. + // The PathStep.Values are valid for the duration of the entire traversal + // and must not be mutated. + // + // Equal always calls PushStep at the start to provide an operation-less + // PathStep used to report the root values. + // + // Within a slice, the exact set of inserted, removed, or modified elements + // is unspecified and may change in future implementations. + // The entries of a map are iterated through in an unspecified order. + PushStep(PathStep) + + // Report is called exactly once on leaf nodes to report whether the + // comparison identified the node as equal, unequal, or ignored. + // A leaf node is one that is immediately preceded by and followed by + // a pair of PushStep and PopStep calls. + Report(Result) + + // PopStep ascends back up the value tree. + // There is always a matching pop call for every push call. + PopStep() +}) Option { + return reporter{r} +} + +type reporter struct{ reporterIface } +type reporterIface interface { + PushStep(PathStep) + Report(Result) + PopStep() +} + +func (reporter) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption { + panic("not implemented") +} + +// normalizeOption normalizes the input options such that all Options groups +// are flattened and groups with a single element are reduced to that element. +// Only coreOptions and Options containing coreOptions are allowed. +func normalizeOption(src Option) Option { + switch opts := flattenOptions(nil, Options{src}); len(opts) { + case 0: + return nil + case 1: + return opts[0] + default: + return opts + } +} + +// flattenOptions copies all options in src to dst as a flat list. +// Only coreOptions and Options containing coreOptions are allowed. +func flattenOptions(dst, src Options) Options { + for _, opt := range src { + switch opt := opt.(type) { + case nil: + continue + case Options: + dst = flattenOptions(dst, opt) + case coreOption: + dst = append(dst, opt) + default: + panic(fmt.Sprintf("invalid option type: %T", opt)) + } + } + return dst +} diff --git a/vendor/github.com/google/go-cmp/cmp/path.go b/vendor/github.com/google/go-cmp/cmp/path.go new file mode 100644 index 000000000..f01eff318 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/path.go @@ -0,0 +1,378 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "fmt" + "reflect" + "strings" + "unicode" + "unicode/utf8" + + "github.com/google/go-cmp/cmp/internal/value" +) + +// Path is a list of PathSteps describing the sequence of operations to get +// from some root type to the current position in the value tree. +// The first Path element is always an operation-less PathStep that exists +// simply to identify the initial type. +// +// When traversing structs with embedded structs, the embedded struct will +// always be accessed as a field before traversing the fields of the +// embedded struct themselves. That is, an exported field from the +// embedded struct will never be accessed directly from the parent struct. +type Path []PathStep + +// PathStep is a union-type for specific operations to traverse +// a value's tree structure. Users of this package never need to implement +// these types as values of this type will be returned by this package. +// +// Implementations of this interface are +// StructField, SliceIndex, MapIndex, Indirect, TypeAssertion, and Transform. +type PathStep interface { + String() string + + // Type is the resulting type after performing the path step. + Type() reflect.Type + + // Values is the resulting values after performing the path step. + // The type of each valid value is guaranteed to be identical to Type. + // + // In some cases, one or both may be invalid or have restrictions: + // • For StructField, both are not interface-able if the current field + // is unexported and the struct type is not explicitly permitted by + // an Exporter to traverse unexported fields. + // • For SliceIndex, one may be invalid if an element is missing from + // either the x or y slice. + // • For MapIndex, one may be invalid if an entry is missing from + // either the x or y map. + // + // The provided values must not be mutated. + Values() (vx, vy reflect.Value) +} + +var ( + _ PathStep = StructField{} + _ PathStep = SliceIndex{} + _ PathStep = MapIndex{} + _ PathStep = Indirect{} + _ PathStep = TypeAssertion{} + _ PathStep = Transform{} +) + +func (pa *Path) push(s PathStep) { + *pa = append(*pa, s) +} + +func (pa *Path) pop() { + *pa = (*pa)[:len(*pa)-1] +} + +// Last returns the last PathStep in the Path. +// If the path is empty, this returns a non-nil PathStep that reports a nil Type. +func (pa Path) Last() PathStep { + return pa.Index(-1) +} + +// Index returns the ith step in the Path and supports negative indexing. +// A negative index starts counting from the tail of the Path such that -1 +// refers to the last step, -2 refers to the second-to-last step, and so on. +// If index is invalid, this returns a non-nil PathStep that reports a nil Type. +func (pa Path) Index(i int) PathStep { + if i < 0 { + i = len(pa) + i + } + if i < 0 || i >= len(pa) { + return pathStep{} + } + return pa[i] +} + +// String returns the simplified path to a node. +// The simplified path only contains struct field accesses. +// +// For example: +// MyMap.MySlices.MyField +func (pa Path) String() string { + var ss []string + for _, s := range pa { + if _, ok := s.(StructField); ok { + ss = append(ss, s.String()) + } + } + return strings.TrimPrefix(strings.Join(ss, ""), ".") +} + +// GoString returns the path to a specific node using Go syntax. +// +// For example: +// (*root.MyMap["key"].(*mypkg.MyStruct).MySlices)[2][3].MyField +func (pa Path) GoString() string { + var ssPre, ssPost []string + var numIndirect int + for i, s := range pa { + var nextStep PathStep + if i+1 < len(pa) { + nextStep = pa[i+1] + } + switch s := s.(type) { + case Indirect: + numIndirect++ + pPre, pPost := "(", ")" + switch nextStep.(type) { + case Indirect: + continue // Next step is indirection, so let them batch up + case StructField: + numIndirect-- // Automatic indirection on struct fields + case nil: + pPre, pPost = "", "" // Last step; no need for parenthesis + } + if numIndirect > 0 { + ssPre = append(ssPre, pPre+strings.Repeat("*", numIndirect)) + ssPost = append(ssPost, pPost) + } + numIndirect = 0 + continue + case Transform: + ssPre = append(ssPre, s.trans.name+"(") + ssPost = append(ssPost, ")") + continue + } + ssPost = append(ssPost, s.String()) + } + for i, j := 0, len(ssPre)-1; i < j; i, j = i+1, j-1 { + ssPre[i], ssPre[j] = ssPre[j], ssPre[i] + } + return strings.Join(ssPre, "") + strings.Join(ssPost, "") +} + +type pathStep struct { + typ reflect.Type + vx, vy reflect.Value +} + +func (ps pathStep) Type() reflect.Type { return ps.typ } +func (ps pathStep) Values() (vx, vy reflect.Value) { return ps.vx, ps.vy } +func (ps pathStep) String() string { + if ps.typ == nil { + return "" + } + s := ps.typ.String() + if s == "" || strings.ContainsAny(s, "{}\n") { + return "root" // Type too simple or complex to print + } + return fmt.Sprintf("{%s}", s) +} + +// StructField represents a struct field access on a field called Name. +type StructField struct{ *structField } +type structField struct { + pathStep + name string + idx int + + // These fields are used for forcibly accessing an unexported field. + // pvx, pvy, and field are only valid if unexported is true. + unexported bool + mayForce bool // Forcibly allow visibility + paddr bool // Was parent addressable? + pvx, pvy reflect.Value // Parent values (always addressible) + field reflect.StructField // Field information +} + +func (sf StructField) Type() reflect.Type { return sf.typ } +func (sf StructField) Values() (vx, vy reflect.Value) { + if !sf.unexported { + return sf.vx, sf.vy // CanInterface reports true + } + + // Forcibly obtain read-write access to an unexported struct field. + if sf.mayForce { + vx = retrieveUnexportedField(sf.pvx, sf.field, sf.paddr) + vy = retrieveUnexportedField(sf.pvy, sf.field, sf.paddr) + return vx, vy // CanInterface reports true + } + return sf.vx, sf.vy // CanInterface reports false +} +func (sf StructField) String() string { return fmt.Sprintf(".%s", sf.name) } + +// Name is the field name. +func (sf StructField) Name() string { return sf.name } + +// Index is the index of the field in the parent struct type. +// See reflect.Type.Field. +func (sf StructField) Index() int { return sf.idx } + +// SliceIndex is an index operation on a slice or array at some index Key. +type SliceIndex struct{ *sliceIndex } +type sliceIndex struct { + pathStep + xkey, ykey int + isSlice bool // False for reflect.Array +} + +func (si SliceIndex) Type() reflect.Type { return si.typ } +func (si SliceIndex) Values() (vx, vy reflect.Value) { return si.vx, si.vy } +func (si SliceIndex) String() string { + switch { + case si.xkey == si.ykey: + return fmt.Sprintf("[%d]", si.xkey) + case si.ykey == -1: + // [5->?] means "I don't know where X[5] went" + return fmt.Sprintf("[%d->?]", si.xkey) + case si.xkey == -1: + // [?->3] means "I don't know where Y[3] came from" + return fmt.Sprintf("[?->%d]", si.ykey) + default: + // [5->3] means "X[5] moved to Y[3]" + return fmt.Sprintf("[%d->%d]", si.xkey, si.ykey) + } +} + +// Key is the index key; it may return -1 if in a split state +func (si SliceIndex) Key() int { + if si.xkey != si.ykey { + return -1 + } + return si.xkey +} + +// SplitKeys are the indexes for indexing into slices in the +// x and y values, respectively. These indexes may differ due to the +// insertion or removal of an element in one of the slices, causing +// all of the indexes to be shifted. If an index is -1, then that +// indicates that the element does not exist in the associated slice. +// +// Key is guaranteed to return -1 if and only if the indexes returned +// by SplitKeys are not the same. SplitKeys will never return -1 for +// both indexes. +func (si SliceIndex) SplitKeys() (ix, iy int) { return si.xkey, si.ykey } + +// MapIndex is an index operation on a map at some index Key. +type MapIndex struct{ *mapIndex } +type mapIndex struct { + pathStep + key reflect.Value +} + +func (mi MapIndex) Type() reflect.Type { return mi.typ } +func (mi MapIndex) Values() (vx, vy reflect.Value) { return mi.vx, mi.vy } +func (mi MapIndex) String() string { return fmt.Sprintf("[%#v]", mi.key) } + +// Key is the value of the map key. +func (mi MapIndex) Key() reflect.Value { return mi.key } + +// Indirect represents pointer indirection on the parent type. +type Indirect struct{ *indirect } +type indirect struct { + pathStep +} + +func (in Indirect) Type() reflect.Type { return in.typ } +func (in Indirect) Values() (vx, vy reflect.Value) { return in.vx, in.vy } +func (in Indirect) String() string { return "*" } + +// TypeAssertion represents a type assertion on an interface. +type TypeAssertion struct{ *typeAssertion } +type typeAssertion struct { + pathStep +} + +func (ta TypeAssertion) Type() reflect.Type { return ta.typ } +func (ta TypeAssertion) Values() (vx, vy reflect.Value) { return ta.vx, ta.vy } +func (ta TypeAssertion) String() string { return fmt.Sprintf(".(%v)", ta.typ) } + +// Transform is a transformation from the parent type to the current type. +type Transform struct{ *transform } +type transform struct { + pathStep + trans *transformer +} + +func (tf Transform) Type() reflect.Type { return tf.typ } +func (tf Transform) Values() (vx, vy reflect.Value) { return tf.vx, tf.vy } +func (tf Transform) String() string { return fmt.Sprintf("%s()", tf.trans.name) } + +// Name is the name of the Transformer. +func (tf Transform) Name() string { return tf.trans.name } + +// Func is the function pointer to the transformer function. +func (tf Transform) Func() reflect.Value { return tf.trans.fnc } + +// Option returns the originally constructed Transformer option. +// The == operator can be used to detect the exact option used. +func (tf Transform) Option() Option { return tf.trans } + +// pointerPath represents a dual-stack of pointers encountered when +// recursively traversing the x and y values. This data structure supports +// detection of cycles and determining whether the cycles are equal. +// In Go, cycles can occur via pointers, slices, and maps. +// +// The pointerPath uses a map to represent a stack; where descension into a +// pointer pushes the address onto the stack, and ascension from a pointer +// pops the address from the stack. Thus, when traversing into a pointer from +// reflect.Ptr, reflect.Slice element, or reflect.Map, we can detect cycles +// by checking whether the pointer has already been visited. The cycle detection +// uses a separate stack for the x and y values. +// +// If a cycle is detected we need to determine whether the two pointers +// should be considered equal. The definition of equality chosen by Equal +// requires two graphs to have the same structure. To determine this, both the +// x and y values must have a cycle where the previous pointers were also +// encountered together as a pair. +// +// Semantically, this is equivalent to augmenting Indirect, SliceIndex, and +// MapIndex with pointer information for the x and y values. +// Suppose px and py are two pointers to compare, we then search the +// Path for whether px was ever encountered in the Path history of x, and +// similarly so with py. If either side has a cycle, the comparison is only +// equal if both px and py have a cycle resulting from the same PathStep. +// +// Using a map as a stack is more performant as we can perform cycle detection +// in O(1) instead of O(N) where N is len(Path). +type pointerPath struct { + // mx is keyed by x pointers, where the value is the associated y pointer. + mx map[value.Pointer]value.Pointer + // my is keyed by y pointers, where the value is the associated x pointer. + my map[value.Pointer]value.Pointer +} + +func (p *pointerPath) Init() { + p.mx = make(map[value.Pointer]value.Pointer) + p.my = make(map[value.Pointer]value.Pointer) +} + +// Push indicates intent to descend into pointers vx and vy where +// visited reports whether either has been seen before. If visited before, +// equal reports whether both pointers were encountered together. +// Pop must be called if and only if the pointers were never visited. +// +// The pointers vx and vy must be a reflect.Ptr, reflect.Slice, or reflect.Map +// and be non-nil. +func (p pointerPath) Push(vx, vy reflect.Value) (equal, visited bool) { + px := value.PointerOf(vx) + py := value.PointerOf(vy) + _, ok1 := p.mx[px] + _, ok2 := p.my[py] + if ok1 || ok2 { + equal = p.mx[px] == py && p.my[py] == px // Pointers paired together + return equal, true + } + p.mx[px] = py + p.my[py] = px + return false, false +} + +// Pop ascends from pointers vx and vy. +func (p pointerPath) Pop(vx, vy reflect.Value) { + delete(p.mx, value.PointerOf(vx)) + delete(p.my, value.PointerOf(vy)) +} + +// isExported reports whether the identifier is exported. +func isExported(id string) bool { + r, _ := utf8.DecodeRuneInString(id) + return unicode.IsUpper(r) +} diff --git a/vendor/github.com/google/go-cmp/cmp/report.go b/vendor/github.com/google/go-cmp/cmp/report.go new file mode 100644 index 000000000..f43cd12eb --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report.go @@ -0,0 +1,54 @@ +// Copyright 2017, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +// defaultReporter implements the reporter interface. +// +// As Equal serially calls the PushStep, Report, and PopStep methods, the +// defaultReporter constructs a tree-based representation of the compared value +// and the result of each comparison (see valueNode). +// +// When the String method is called, the FormatDiff method transforms the +// valueNode tree into a textNode tree, which is a tree-based representation +// of the textual output (see textNode). +// +// Lastly, the textNode.String method produces the final report as a string. +type defaultReporter struct { + root *valueNode + curr *valueNode +} + +func (r *defaultReporter) PushStep(ps PathStep) { + r.curr = r.curr.PushStep(ps) + if r.root == nil { + r.root = r.curr + } +} +func (r *defaultReporter) Report(rs Result) { + r.curr.Report(rs) +} +func (r *defaultReporter) PopStep() { + r.curr = r.curr.PopStep() +} + +// String provides a full report of the differences detected as a structured +// literal in pseudo-Go syntax. String may only be called after the entire tree +// has been traversed. +func (r *defaultReporter) String() string { + assert(r.root != nil && r.curr == nil) + if r.root.NumDiff == 0 { + return "" + } + ptrs := new(pointerReferences) + text := formatOptions{}.FormatDiff(r.root, ptrs) + resolveReferences(text) + return text.String() +} + +func assert(ok bool) { + if !ok { + panic("assertion failure") + } +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_compare.go b/vendor/github.com/google/go-cmp/cmp/report_compare.go new file mode 100644 index 000000000..104bb3053 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_compare.go @@ -0,0 +1,432 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "fmt" + "reflect" + + "github.com/google/go-cmp/cmp/internal/value" +) + +// numContextRecords is the number of surrounding equal records to print. +const numContextRecords = 2 + +type diffMode byte + +const ( + diffUnknown diffMode = 0 + diffIdentical diffMode = ' ' + diffRemoved diffMode = '-' + diffInserted diffMode = '+' +) + +type typeMode int + +const ( + // emitType always prints the type. + emitType typeMode = iota + // elideType never prints the type. + elideType + // autoType prints the type only for composite kinds + // (i.e., structs, slices, arrays, and maps). + autoType +) + +type formatOptions struct { + // DiffMode controls the output mode of FormatDiff. + // + // If diffUnknown, then produce a diff of the x and y values. + // If diffIdentical, then emit values as if they were equal. + // If diffRemoved, then only emit x values (ignoring y values). + // If diffInserted, then only emit y values (ignoring x values). + DiffMode diffMode + + // TypeMode controls whether to print the type for the current node. + // + // As a general rule of thumb, we always print the type of the next node + // after an interface, and always elide the type of the next node after + // a slice or map node. + TypeMode typeMode + + // formatValueOptions are options specific to printing reflect.Values. + formatValueOptions +} + +func (opts formatOptions) WithDiffMode(d diffMode) formatOptions { + opts.DiffMode = d + return opts +} +func (opts formatOptions) WithTypeMode(t typeMode) formatOptions { + opts.TypeMode = t + return opts +} +func (opts formatOptions) WithVerbosity(level int) formatOptions { + opts.VerbosityLevel = level + opts.LimitVerbosity = true + return opts +} +func (opts formatOptions) verbosity() uint { + switch { + case opts.VerbosityLevel < 0: + return 0 + case opts.VerbosityLevel > 16: + return 16 // some reasonable maximum to avoid shift overflow + default: + return uint(opts.VerbosityLevel) + } +} + +const maxVerbosityPreset = 6 + +// verbosityPreset modifies the verbosity settings given an index +// between 0 and maxVerbosityPreset, inclusive. +func verbosityPreset(opts formatOptions, i int) formatOptions { + opts.VerbosityLevel = int(opts.verbosity()) + 2*i + if i > 0 { + opts.AvoidStringer = true + } + if i >= maxVerbosityPreset { + opts.PrintAddresses = true + opts.QualifiedNames = true + } + return opts +} + +// FormatDiff converts a valueNode tree into a textNode tree, where the later +// is a textual representation of the differences detected in the former. +func (opts formatOptions) FormatDiff(v *valueNode, ptrs *pointerReferences) (out textNode) { + if opts.DiffMode == diffIdentical { + opts = opts.WithVerbosity(1) + } else if opts.verbosity() < 3 { + opts = opts.WithVerbosity(3) + } + + // Check whether we have specialized formatting for this node. + // This is not necessary, but helpful for producing more readable outputs. + if opts.CanFormatDiffSlice(v) { + return opts.FormatDiffSlice(v) + } + + var parentKind reflect.Kind + if v.parent != nil && v.parent.TransformerName == "" { + parentKind = v.parent.Type.Kind() + } + + // For leaf nodes, format the value based on the reflect.Values alone. + if v.MaxDepth == 0 { + switch opts.DiffMode { + case diffUnknown, diffIdentical: + // Format Equal. + if v.NumDiff == 0 { + outx := opts.FormatValue(v.ValueX, parentKind, ptrs) + outy := opts.FormatValue(v.ValueY, parentKind, ptrs) + if v.NumIgnored > 0 && v.NumSame == 0 { + return textEllipsis + } else if outx.Len() < outy.Len() { + return outx + } else { + return outy + } + } + + // Format unequal. + assert(opts.DiffMode == diffUnknown) + var list textList + outx := opts.WithTypeMode(elideType).FormatValue(v.ValueX, parentKind, ptrs) + outy := opts.WithTypeMode(elideType).FormatValue(v.ValueY, parentKind, ptrs) + for i := 0; i <= maxVerbosityPreset && outx != nil && outy != nil && outx.Equal(outy); i++ { + opts2 := verbosityPreset(opts, i).WithTypeMode(elideType) + outx = opts2.FormatValue(v.ValueX, parentKind, ptrs) + outy = opts2.FormatValue(v.ValueY, parentKind, ptrs) + } + if outx != nil { + list = append(list, textRecord{Diff: '-', Value: outx}) + } + if outy != nil { + list = append(list, textRecord{Diff: '+', Value: outy}) + } + return opts.WithTypeMode(emitType).FormatType(v.Type, list) + case diffRemoved: + return opts.FormatValue(v.ValueX, parentKind, ptrs) + case diffInserted: + return opts.FormatValue(v.ValueY, parentKind, ptrs) + default: + panic("invalid diff mode") + } + } + + // Register slice element to support cycle detection. + if parentKind == reflect.Slice { + ptrRefs := ptrs.PushPair(v.ValueX, v.ValueY, opts.DiffMode, true) + defer ptrs.Pop() + defer func() { out = wrapTrunkReferences(ptrRefs, out) }() + } + + // Descend into the child value node. + if v.TransformerName != "" { + out := opts.WithTypeMode(emitType).FormatDiff(v.Value, ptrs) + out = &textWrap{Prefix: "Inverse(" + v.TransformerName + ", ", Value: out, Suffix: ")"} + return opts.FormatType(v.Type, out) + } else { + switch k := v.Type.Kind(); k { + case reflect.Struct, reflect.Array, reflect.Slice: + out = opts.formatDiffList(v.Records, k, ptrs) + out = opts.FormatType(v.Type, out) + case reflect.Map: + // Register map to support cycle detection. + ptrRefs := ptrs.PushPair(v.ValueX, v.ValueY, opts.DiffMode, false) + defer ptrs.Pop() + + out = opts.formatDiffList(v.Records, k, ptrs) + out = wrapTrunkReferences(ptrRefs, out) + out = opts.FormatType(v.Type, out) + case reflect.Ptr: + // Register pointer to support cycle detection. + ptrRefs := ptrs.PushPair(v.ValueX, v.ValueY, opts.DiffMode, false) + defer ptrs.Pop() + + out = opts.FormatDiff(v.Value, ptrs) + out = wrapTrunkReferences(ptrRefs, out) + out = &textWrap{Prefix: "&", Value: out} + case reflect.Interface: + out = opts.WithTypeMode(emitType).FormatDiff(v.Value, ptrs) + default: + panic(fmt.Sprintf("%v cannot have children", k)) + } + return out + } +} + +func (opts formatOptions) formatDiffList(recs []reportRecord, k reflect.Kind, ptrs *pointerReferences) textNode { + // Derive record name based on the data structure kind. + var name string + var formatKey func(reflect.Value) string + switch k { + case reflect.Struct: + name = "field" + opts = opts.WithTypeMode(autoType) + formatKey = func(v reflect.Value) string { return v.String() } + case reflect.Slice, reflect.Array: + name = "element" + opts = opts.WithTypeMode(elideType) + formatKey = func(reflect.Value) string { return "" } + case reflect.Map: + name = "entry" + opts = opts.WithTypeMode(elideType) + formatKey = func(v reflect.Value) string { return formatMapKey(v, false, ptrs) } + } + + maxLen := -1 + if opts.LimitVerbosity { + if opts.DiffMode == diffIdentical { + maxLen = ((1 << opts.verbosity()) >> 1) << 2 // 0, 4, 8, 16, 32, etc... + } else { + maxLen = (1 << opts.verbosity()) << 1 // 2, 4, 8, 16, 32, 64, etc... + } + opts.VerbosityLevel-- + } + + // Handle unification. + switch opts.DiffMode { + case diffIdentical, diffRemoved, diffInserted: + var list textList + var deferredEllipsis bool // Add final "..." to indicate records were dropped + for _, r := range recs { + if len(list) == maxLen { + deferredEllipsis = true + break + } + + // Elide struct fields that are zero value. + if k == reflect.Struct { + var isZero bool + switch opts.DiffMode { + case diffIdentical: + isZero = value.IsZero(r.Value.ValueX) || value.IsZero(r.Value.ValueY) + case diffRemoved: + isZero = value.IsZero(r.Value.ValueX) + case diffInserted: + isZero = value.IsZero(r.Value.ValueY) + } + if isZero { + continue + } + } + // Elide ignored nodes. + if r.Value.NumIgnored > 0 && r.Value.NumSame+r.Value.NumDiff == 0 { + deferredEllipsis = !(k == reflect.Slice || k == reflect.Array) + if !deferredEllipsis { + list.AppendEllipsis(diffStats{}) + } + continue + } + if out := opts.FormatDiff(r.Value, ptrs); out != nil { + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + } + } + if deferredEllipsis { + list.AppendEllipsis(diffStats{}) + } + return &textWrap{Prefix: "{", Value: list, Suffix: "}"} + case diffUnknown: + default: + panic("invalid diff mode") + } + + // Handle differencing. + var numDiffs int + var list textList + var keys []reflect.Value // invariant: len(list) == len(keys) + groups := coalesceAdjacentRecords(name, recs) + maxGroup := diffStats{Name: name} + for i, ds := range groups { + if maxLen >= 0 && numDiffs >= maxLen { + maxGroup = maxGroup.Append(ds) + continue + } + + // Handle equal records. + if ds.NumDiff() == 0 { + // Compute the number of leading and trailing records to print. + var numLo, numHi int + numEqual := ds.NumIgnored + ds.NumIdentical + for numLo < numContextRecords && numLo+numHi < numEqual && i != 0 { + if r := recs[numLo].Value; r.NumIgnored > 0 && r.NumSame+r.NumDiff == 0 { + break + } + numLo++ + } + for numHi < numContextRecords && numLo+numHi < numEqual && i != len(groups)-1 { + if r := recs[numEqual-numHi-1].Value; r.NumIgnored > 0 && r.NumSame+r.NumDiff == 0 { + break + } + numHi++ + } + if numEqual-(numLo+numHi) == 1 && ds.NumIgnored == 0 { + numHi++ // Avoid pointless coalescing of a single equal record + } + + // Format the equal values. + for _, r := range recs[:numLo] { + out := opts.WithDiffMode(diffIdentical).FormatDiff(r.Value, ptrs) + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + keys = append(keys, r.Key) + } + if numEqual > numLo+numHi { + ds.NumIdentical -= numLo + numHi + list.AppendEllipsis(ds) + for len(keys) < len(list) { + keys = append(keys, reflect.Value{}) + } + } + for _, r := range recs[numEqual-numHi : numEqual] { + out := opts.WithDiffMode(diffIdentical).FormatDiff(r.Value, ptrs) + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + keys = append(keys, r.Key) + } + recs = recs[numEqual:] + continue + } + + // Handle unequal records. + for _, r := range recs[:ds.NumDiff()] { + switch { + case opts.CanFormatDiffSlice(r.Value): + out := opts.FormatDiffSlice(r.Value) + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + keys = append(keys, r.Key) + case r.Value.NumChildren == r.Value.MaxDepth: + outx := opts.WithDiffMode(diffRemoved).FormatDiff(r.Value, ptrs) + outy := opts.WithDiffMode(diffInserted).FormatDiff(r.Value, ptrs) + for i := 0; i <= maxVerbosityPreset && outx != nil && outy != nil && outx.Equal(outy); i++ { + opts2 := verbosityPreset(opts, i) + outx = opts2.WithDiffMode(diffRemoved).FormatDiff(r.Value, ptrs) + outy = opts2.WithDiffMode(diffInserted).FormatDiff(r.Value, ptrs) + } + if outx != nil { + list = append(list, textRecord{Diff: diffRemoved, Key: formatKey(r.Key), Value: outx}) + keys = append(keys, r.Key) + } + if outy != nil { + list = append(list, textRecord{Diff: diffInserted, Key: formatKey(r.Key), Value: outy}) + keys = append(keys, r.Key) + } + default: + out := opts.FormatDiff(r.Value, ptrs) + list = append(list, textRecord{Key: formatKey(r.Key), Value: out}) + keys = append(keys, r.Key) + } + } + recs = recs[ds.NumDiff():] + numDiffs += ds.NumDiff() + } + if maxGroup.IsZero() { + assert(len(recs) == 0) + } else { + list.AppendEllipsis(maxGroup) + for len(keys) < len(list) { + keys = append(keys, reflect.Value{}) + } + } + assert(len(list) == len(keys)) + + // For maps, the default formatting logic uses fmt.Stringer which may + // produce ambiguous output. Avoid calling String to disambiguate. + if k == reflect.Map { + var ambiguous bool + seenKeys := map[string]reflect.Value{} + for i, currKey := range keys { + if currKey.IsValid() { + strKey := list[i].Key + prevKey, seen := seenKeys[strKey] + if seen && prevKey.CanInterface() && currKey.CanInterface() { + ambiguous = prevKey.Interface() != currKey.Interface() + if ambiguous { + break + } + } + seenKeys[strKey] = currKey + } + } + if ambiguous { + for i, k := range keys { + if k.IsValid() { + list[i].Key = formatMapKey(k, true, ptrs) + } + } + } + } + + return &textWrap{Prefix: "{", Value: list, Suffix: "}"} +} + +// coalesceAdjacentRecords coalesces the list of records into groups of +// adjacent equal, or unequal counts. +func coalesceAdjacentRecords(name string, recs []reportRecord) (groups []diffStats) { + var prevCase int // Arbitrary index into which case last occurred + lastStats := func(i int) *diffStats { + if prevCase != i { + groups = append(groups, diffStats{Name: name}) + prevCase = i + } + return &groups[len(groups)-1] + } + for _, r := range recs { + switch rv := r.Value; { + case rv.NumIgnored > 0 && rv.NumSame+rv.NumDiff == 0: + lastStats(1).NumIgnored++ + case rv.NumDiff == 0: + lastStats(1).NumIdentical++ + case rv.NumDiff > 0 && !rv.ValueY.IsValid(): + lastStats(2).NumRemoved++ + case rv.NumDiff > 0 && !rv.ValueX.IsValid(): + lastStats(2).NumInserted++ + default: + lastStats(2).NumModified++ + } + } + return groups +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_references.go b/vendor/github.com/google/go-cmp/cmp/report_references.go new file mode 100644 index 000000000..be31b33a9 --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_references.go @@ -0,0 +1,264 @@ +// Copyright 2020, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "fmt" + "reflect" + "strings" + + "github.com/google/go-cmp/cmp/internal/flags" + "github.com/google/go-cmp/cmp/internal/value" +) + +const ( + pointerDelimPrefix = "⟪" + pointerDelimSuffix = "⟫" +) + +// formatPointer prints the address of the pointer. +func formatPointer(p value.Pointer, withDelims bool) string { + v := p.Uintptr() + if flags.Deterministic { + v = 0xdeadf00f // Only used for stable testing purposes + } + if withDelims { + return pointerDelimPrefix + formatHex(uint64(v)) + pointerDelimSuffix + } + return formatHex(uint64(v)) +} + +// pointerReferences is a stack of pointers visited so far. +type pointerReferences [][2]value.Pointer + +func (ps *pointerReferences) PushPair(vx, vy reflect.Value, d diffMode, deref bool) (pp [2]value.Pointer) { + if deref && vx.IsValid() { + vx = vx.Addr() + } + if deref && vy.IsValid() { + vy = vy.Addr() + } + switch d { + case diffUnknown, diffIdentical: + pp = [2]value.Pointer{value.PointerOf(vx), value.PointerOf(vy)} + case diffRemoved: + pp = [2]value.Pointer{value.PointerOf(vx), value.Pointer{}} + case diffInserted: + pp = [2]value.Pointer{value.Pointer{}, value.PointerOf(vy)} + } + *ps = append(*ps, pp) + return pp +} + +func (ps *pointerReferences) Push(v reflect.Value) (p value.Pointer, seen bool) { + p = value.PointerOf(v) + for _, pp := range *ps { + if p == pp[0] || p == pp[1] { + return p, true + } + } + *ps = append(*ps, [2]value.Pointer{p, p}) + return p, false +} + +func (ps *pointerReferences) Pop() { + *ps = (*ps)[:len(*ps)-1] +} + +// trunkReferences is metadata for a textNode indicating that the sub-tree +// represents the value for either pointer in a pair of references. +type trunkReferences struct{ pp [2]value.Pointer } + +// trunkReference is metadata for a textNode indicating that the sub-tree +// represents the value for the given pointer reference. +type trunkReference struct{ p value.Pointer } + +// leafReference is metadata for a textNode indicating that the value is +// truncated as it refers to another part of the tree (i.e., a trunk). +type leafReference struct{ p value.Pointer } + +func wrapTrunkReferences(pp [2]value.Pointer, s textNode) textNode { + switch { + case pp[0].IsNil(): + return &textWrap{Value: s, Metadata: trunkReference{pp[1]}} + case pp[1].IsNil(): + return &textWrap{Value: s, Metadata: trunkReference{pp[0]}} + case pp[0] == pp[1]: + return &textWrap{Value: s, Metadata: trunkReference{pp[0]}} + default: + return &textWrap{Value: s, Metadata: trunkReferences{pp}} + } +} +func wrapTrunkReference(p value.Pointer, printAddress bool, s textNode) textNode { + var prefix string + if printAddress { + prefix = formatPointer(p, true) + } + return &textWrap{Prefix: prefix, Value: s, Metadata: trunkReference{p}} +} +func makeLeafReference(p value.Pointer, printAddress bool) textNode { + out := &textWrap{Prefix: "(", Value: textEllipsis, Suffix: ")"} + var prefix string + if printAddress { + prefix = formatPointer(p, true) + } + return &textWrap{Prefix: prefix, Value: out, Metadata: leafReference{p}} +} + +// resolveReferences walks the textNode tree searching for any leaf reference +// metadata and resolves each against the corresponding trunk references. +// Since pointer addresses in memory are not particularly readable to the user, +// it replaces each pointer value with an arbitrary and unique reference ID. +func resolveReferences(s textNode) { + var walkNodes func(textNode, func(textNode)) + walkNodes = func(s textNode, f func(textNode)) { + f(s) + switch s := s.(type) { + case *textWrap: + walkNodes(s.Value, f) + case textList: + for _, r := range s { + walkNodes(r.Value, f) + } + } + } + + // Collect all trunks and leaves with reference metadata. + var trunks, leaves []*textWrap + walkNodes(s, func(s textNode) { + if s, ok := s.(*textWrap); ok { + switch s.Metadata.(type) { + case leafReference: + leaves = append(leaves, s) + case trunkReference, trunkReferences: + trunks = append(trunks, s) + } + } + }) + + // No leaf references to resolve. + if len(leaves) == 0 { + return + } + + // Collect the set of all leaf references to resolve. + leafPtrs := make(map[value.Pointer]bool) + for _, leaf := range leaves { + leafPtrs[leaf.Metadata.(leafReference).p] = true + } + + // Collect the set of trunk pointers that are always paired together. + // This allows us to assign a single ID to both pointers for brevity. + // If a pointer in a pair ever occurs by itself or as a different pair, + // then the pair is broken. + pairedTrunkPtrs := make(map[value.Pointer]value.Pointer) + unpair := func(p value.Pointer) { + if !pairedTrunkPtrs[p].IsNil() { + pairedTrunkPtrs[pairedTrunkPtrs[p]] = value.Pointer{} // invalidate other half + } + pairedTrunkPtrs[p] = value.Pointer{} // invalidate this half + } + for _, trunk := range trunks { + switch p := trunk.Metadata.(type) { + case trunkReference: + unpair(p.p) // standalone pointer cannot be part of a pair + case trunkReferences: + p0, ok0 := pairedTrunkPtrs[p.pp[0]] + p1, ok1 := pairedTrunkPtrs[p.pp[1]] + switch { + case !ok0 && !ok1: + // Register the newly seen pair. + pairedTrunkPtrs[p.pp[0]] = p.pp[1] + pairedTrunkPtrs[p.pp[1]] = p.pp[0] + case ok0 && ok1 && p0 == p.pp[1] && p1 == p.pp[0]: + // Exact pair already seen; do nothing. + default: + // Pair conflicts with some other pair; break all pairs. + unpair(p.pp[0]) + unpair(p.pp[1]) + } + } + } + + // Correlate each pointer referenced by leaves to a unique identifier, + // and print the IDs for each trunk that matches those pointers. + var nextID uint + ptrIDs := make(map[value.Pointer]uint) + newID := func() uint { + id := nextID + nextID++ + return id + } + for _, trunk := range trunks { + switch p := trunk.Metadata.(type) { + case trunkReference: + if print := leafPtrs[p.p]; print { + id, ok := ptrIDs[p.p] + if !ok { + id = newID() + ptrIDs[p.p] = id + } + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id)) + } + case trunkReferences: + print0 := leafPtrs[p.pp[0]] + print1 := leafPtrs[p.pp[1]] + if print0 || print1 { + id0, ok0 := ptrIDs[p.pp[0]] + id1, ok1 := ptrIDs[p.pp[1]] + isPair := pairedTrunkPtrs[p.pp[0]] == p.pp[1] && pairedTrunkPtrs[p.pp[1]] == p.pp[0] + if isPair { + var id uint + assert(ok0 == ok1) // must be seen together or not at all + if ok0 { + assert(id0 == id1) // must have the same ID + id = id0 + } else { + id = newID() + ptrIDs[p.pp[0]] = id + ptrIDs[p.pp[1]] = id + } + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id)) + } else { + if print0 && !ok0 { + id0 = newID() + ptrIDs[p.pp[0]] = id0 + } + if print1 && !ok1 { + id1 = newID() + ptrIDs[p.pp[1]] = id1 + } + switch { + case print0 && print1: + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id0)+","+formatReference(id1)) + case print0: + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id0)) + case print1: + trunk.Prefix = updateReferencePrefix(trunk.Prefix, formatReference(id1)) + } + } + } + } + } + + // Update all leaf references with the unique identifier. + for _, leaf := range leaves { + if id, ok := ptrIDs[leaf.Metadata.(leafReference).p]; ok { + leaf.Prefix = updateReferencePrefix(leaf.Prefix, formatReference(id)) + } + } +} + +func formatReference(id uint) string { + return fmt.Sprintf("ref#%d", id) +} + +func updateReferencePrefix(prefix, ref string) string { + if prefix == "" { + return pointerDelimPrefix + ref + pointerDelimSuffix + } + suffix := strings.TrimPrefix(prefix, pointerDelimPrefix) + return pointerDelimPrefix + ref + ": " + suffix +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_reflect.go b/vendor/github.com/google/go-cmp/cmp/report_reflect.go new file mode 100644 index 000000000..33f03577f --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_reflect.go @@ -0,0 +1,402 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "bytes" + "fmt" + "reflect" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "github.com/google/go-cmp/cmp/internal/value" +) + +type formatValueOptions struct { + // AvoidStringer controls whether to avoid calling custom stringer + // methods like error.Error or fmt.Stringer.String. + AvoidStringer bool + + // PrintAddresses controls whether to print the address of all pointers, + // slice elements, and maps. + PrintAddresses bool + + // QualifiedNames controls whether FormatType uses the fully qualified name + // (including the full package path as opposed to just the package name). + QualifiedNames bool + + // VerbosityLevel controls the amount of output to produce. + // A higher value produces more output. A value of zero or lower produces + // no output (represented using an ellipsis). + // If LimitVerbosity is false, then the level is treated as infinite. + VerbosityLevel int + + // LimitVerbosity specifies that formatting should respect VerbosityLevel. + LimitVerbosity bool +} + +// FormatType prints the type as if it were wrapping s. +// This may return s as-is depending on the current type and TypeMode mode. +func (opts formatOptions) FormatType(t reflect.Type, s textNode) textNode { + // Check whether to emit the type or not. + switch opts.TypeMode { + case autoType: + switch t.Kind() { + case reflect.Struct, reflect.Slice, reflect.Array, reflect.Map: + if s.Equal(textNil) { + return s + } + default: + return s + } + if opts.DiffMode == diffIdentical { + return s // elide type for identical nodes + } + case elideType: + return s + } + + // Determine the type label, applying special handling for unnamed types. + typeName := value.TypeString(t, opts.QualifiedNames) + if t.Name() == "" { + // According to Go grammar, certain type literals contain symbols that + // do not strongly bind to the next lexicographical token (e.g., *T). + switch t.Kind() { + case reflect.Chan, reflect.Func, reflect.Ptr: + typeName = "(" + typeName + ")" + } + } + return &textWrap{Prefix: typeName, Value: wrapParens(s)} +} + +// wrapParens wraps s with a set of parenthesis, but avoids it if the +// wrapped node itself is already surrounded by a pair of parenthesis or braces. +// It handles unwrapping one level of pointer-reference nodes. +func wrapParens(s textNode) textNode { + var refNode *textWrap + if s2, ok := s.(*textWrap); ok { + // Unwrap a single pointer reference node. + switch s2.Metadata.(type) { + case leafReference, trunkReference, trunkReferences: + refNode = s2 + if s3, ok := refNode.Value.(*textWrap); ok { + s2 = s3 + } + } + + // Already has delimiters that make parenthesis unnecessary. + hasParens := strings.HasPrefix(s2.Prefix, "(") && strings.HasSuffix(s2.Suffix, ")") + hasBraces := strings.HasPrefix(s2.Prefix, "{") && strings.HasSuffix(s2.Suffix, "}") + if hasParens || hasBraces { + return s + } + } + if refNode != nil { + refNode.Value = &textWrap{Prefix: "(", Value: refNode.Value, Suffix: ")"} + return s + } + return &textWrap{Prefix: "(", Value: s, Suffix: ")"} +} + +// FormatValue prints the reflect.Value, taking extra care to avoid descending +// into pointers already in ptrs. As pointers are visited, ptrs is also updated. +func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind, ptrs *pointerReferences) (out textNode) { + if !v.IsValid() { + return nil + } + t := v.Type() + + // Check slice element for cycles. + if parentKind == reflect.Slice { + ptrRef, visited := ptrs.Push(v.Addr()) + if visited { + return makeLeafReference(ptrRef, false) + } + defer ptrs.Pop() + defer func() { out = wrapTrunkReference(ptrRef, false, out) }() + } + + // Check whether there is an Error or String method to call. + if !opts.AvoidStringer && v.CanInterface() { + // Avoid calling Error or String methods on nil receivers since many + // implementations crash when doing so. + if (t.Kind() != reflect.Ptr && t.Kind() != reflect.Interface) || !v.IsNil() { + var prefix, strVal string + func() { + // Swallow and ignore any panics from String or Error. + defer func() { recover() }() + switch v := v.Interface().(type) { + case error: + strVal = v.Error() + prefix = "e" + case fmt.Stringer: + strVal = v.String() + prefix = "s" + } + }() + if prefix != "" { + return opts.formatString(prefix, strVal) + } + } + } + + // Check whether to explicitly wrap the result with the type. + var skipType bool + defer func() { + if !skipType { + out = opts.FormatType(t, out) + } + }() + + switch t.Kind() { + case reflect.Bool: + return textLine(fmt.Sprint(v.Bool())) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return textLine(fmt.Sprint(v.Int())) + case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return textLine(fmt.Sprint(v.Uint())) + case reflect.Uint8: + if parentKind == reflect.Slice || parentKind == reflect.Array { + return textLine(formatHex(v.Uint())) + } + return textLine(fmt.Sprint(v.Uint())) + case reflect.Uintptr: + return textLine(formatHex(v.Uint())) + case reflect.Float32, reflect.Float64: + return textLine(fmt.Sprint(v.Float())) + case reflect.Complex64, reflect.Complex128: + return textLine(fmt.Sprint(v.Complex())) + case reflect.String: + return opts.formatString("", v.String()) + case reflect.UnsafePointer, reflect.Chan, reflect.Func: + return textLine(formatPointer(value.PointerOf(v), true)) + case reflect.Struct: + var list textList + v := makeAddressable(v) // needed for retrieveUnexportedField + maxLen := v.NumField() + if opts.LimitVerbosity { + maxLen = ((1 << opts.verbosity()) >> 1) << 2 // 0, 4, 8, 16, 32, etc... + opts.VerbosityLevel-- + } + for i := 0; i < v.NumField(); i++ { + vv := v.Field(i) + if value.IsZero(vv) { + continue // Elide fields with zero values + } + if len(list) == maxLen { + list.AppendEllipsis(diffStats{}) + break + } + sf := t.Field(i) + if supportExporters && !isExported(sf.Name) { + vv = retrieveUnexportedField(v, sf, true) + } + s := opts.WithTypeMode(autoType).FormatValue(vv, t.Kind(), ptrs) + list = append(list, textRecord{Key: sf.Name, Value: s}) + } + return &textWrap{Prefix: "{", Value: list, Suffix: "}"} + case reflect.Slice: + if v.IsNil() { + return textNil + } + + // Check whether this is a []byte of text data. + if t.Elem() == reflect.TypeOf(byte(0)) { + b := v.Bytes() + isPrintSpace := func(r rune) bool { return unicode.IsPrint(r) && unicode.IsSpace(r) } + if len(b) > 0 && utf8.Valid(b) && len(bytes.TrimFunc(b, isPrintSpace)) == 0 { + out = opts.formatString("", string(b)) + return opts.WithTypeMode(emitType).FormatType(t, out) + } + } + + fallthrough + case reflect.Array: + maxLen := v.Len() + if opts.LimitVerbosity { + maxLen = ((1 << opts.verbosity()) >> 1) << 2 // 0, 4, 8, 16, 32, etc... + opts.VerbosityLevel-- + } + var list textList + for i := 0; i < v.Len(); i++ { + if len(list) == maxLen { + list.AppendEllipsis(diffStats{}) + break + } + s := opts.WithTypeMode(elideType).FormatValue(v.Index(i), t.Kind(), ptrs) + list = append(list, textRecord{Value: s}) + } + + out = &textWrap{Prefix: "{", Value: list, Suffix: "}"} + if t.Kind() == reflect.Slice && opts.PrintAddresses { + header := fmt.Sprintf("ptr:%v, len:%d, cap:%d", formatPointer(value.PointerOf(v), false), v.Len(), v.Cap()) + out = &textWrap{Prefix: pointerDelimPrefix + header + pointerDelimSuffix, Value: out} + } + return out + case reflect.Map: + if v.IsNil() { + return textNil + } + + // Check pointer for cycles. + ptrRef, visited := ptrs.Push(v) + if visited { + return makeLeafReference(ptrRef, opts.PrintAddresses) + } + defer ptrs.Pop() + + maxLen := v.Len() + if opts.LimitVerbosity { + maxLen = ((1 << opts.verbosity()) >> 1) << 2 // 0, 4, 8, 16, 32, etc... + opts.VerbosityLevel-- + } + var list textList + for _, k := range value.SortKeys(v.MapKeys()) { + if len(list) == maxLen { + list.AppendEllipsis(diffStats{}) + break + } + sk := formatMapKey(k, false, ptrs) + sv := opts.WithTypeMode(elideType).FormatValue(v.MapIndex(k), t.Kind(), ptrs) + list = append(list, textRecord{Key: sk, Value: sv}) + } + + out = &textWrap{Prefix: "{", Value: list, Suffix: "}"} + out = wrapTrunkReference(ptrRef, opts.PrintAddresses, out) + return out + case reflect.Ptr: + if v.IsNil() { + return textNil + } + + // Check pointer for cycles. + ptrRef, visited := ptrs.Push(v) + if visited { + out = makeLeafReference(ptrRef, opts.PrintAddresses) + return &textWrap{Prefix: "&", Value: out} + } + defer ptrs.Pop() + + skipType = true // Let the underlying value print the type instead + out = opts.FormatValue(v.Elem(), t.Kind(), ptrs) + out = wrapTrunkReference(ptrRef, opts.PrintAddresses, out) + out = &textWrap{Prefix: "&", Value: out} + return out + case reflect.Interface: + if v.IsNil() { + return textNil + } + // Interfaces accept different concrete types, + // so configure the underlying value to explicitly print the type. + skipType = true // Print the concrete type instead + return opts.WithTypeMode(emitType).FormatValue(v.Elem(), t.Kind(), ptrs) + default: + panic(fmt.Sprintf("%v kind not handled", v.Kind())) + } +} + +func (opts formatOptions) formatString(prefix, s string) textNode { + maxLen := len(s) + maxLines := strings.Count(s, "\n") + 1 + if opts.LimitVerbosity { + maxLen = (1 << opts.verbosity()) << 5 // 32, 64, 128, 256, etc... + maxLines = (1 << opts.verbosity()) << 2 // 4, 8, 16, 32, 64, etc... + } + + // For multiline strings, use the triple-quote syntax, + // but only use it when printing removed or inserted nodes since + // we only want the extra verbosity for those cases. + lines := strings.Split(strings.TrimSuffix(s, "\n"), "\n") + isTripleQuoted := len(lines) >= 4 && (opts.DiffMode == '-' || opts.DiffMode == '+') + for i := 0; i < len(lines) && isTripleQuoted; i++ { + lines[i] = strings.TrimPrefix(strings.TrimSuffix(lines[i], "\r"), "\r") // trim leading/trailing carriage returns for legacy Windows endline support + isPrintable := func(r rune) bool { + return unicode.IsPrint(r) || r == '\t' // specially treat tab as printable + } + line := lines[i] + isTripleQuoted = !strings.HasPrefix(strings.TrimPrefix(line, prefix), `"""`) && !strings.HasPrefix(line, "...") && strings.TrimFunc(line, isPrintable) == "" && len(line) <= maxLen + } + if isTripleQuoted { + var list textList + list = append(list, textRecord{Diff: opts.DiffMode, Value: textLine(prefix + `"""`), ElideComma: true}) + for i, line := range lines { + if numElided := len(lines) - i; i == maxLines-1 && numElided > 1 { + comment := commentString(fmt.Sprintf("%d elided lines", numElided)) + list = append(list, textRecord{Diff: opts.DiffMode, Value: textEllipsis, ElideComma: true, Comment: comment}) + break + } + list = append(list, textRecord{Diff: opts.DiffMode, Value: textLine(line), ElideComma: true}) + } + list = append(list, textRecord{Diff: opts.DiffMode, Value: textLine(prefix + `"""`), ElideComma: true}) + return &textWrap{Prefix: "(", Value: list, Suffix: ")"} + } + + // Format the string as a single-line quoted string. + if len(s) > maxLen+len(textEllipsis) { + return textLine(prefix + formatString(s[:maxLen]) + string(textEllipsis)) + } + return textLine(prefix + formatString(s)) +} + +// formatMapKey formats v as if it were a map key. +// The result is guaranteed to be a single line. +func formatMapKey(v reflect.Value, disambiguate bool, ptrs *pointerReferences) string { + var opts formatOptions + opts.DiffMode = diffIdentical + opts.TypeMode = elideType + opts.PrintAddresses = disambiguate + opts.AvoidStringer = disambiguate + opts.QualifiedNames = disambiguate + opts.VerbosityLevel = maxVerbosityPreset + opts.LimitVerbosity = true + s := opts.FormatValue(v, reflect.Map, ptrs).String() + return strings.TrimSpace(s) +} + +// formatString prints s as a double-quoted or backtick-quoted string. +func formatString(s string) string { + // Use quoted string if it the same length as a raw string literal. + // Otherwise, attempt to use the raw string form. + qs := strconv.Quote(s) + if len(qs) == 1+len(s)+1 { + return qs + } + + // Disallow newlines to ensure output is a single line. + // Only allow printable runes for readability purposes. + rawInvalid := func(r rune) bool { + return r == '`' || r == '\n' || !(unicode.IsPrint(r) || r == '\t') + } + if utf8.ValidString(s) && strings.IndexFunc(s, rawInvalid) < 0 { + return "`" + s + "`" + } + return qs +} + +// formatHex prints u as a hexadecimal integer in Go notation. +func formatHex(u uint64) string { + var f string + switch { + case u <= 0xff: + f = "0x%02x" + case u <= 0xffff: + f = "0x%04x" + case u <= 0xffffff: + f = "0x%06x" + case u <= 0xffffffff: + f = "0x%08x" + case u <= 0xffffffffff: + f = "0x%010x" + case u <= 0xffffffffffff: + f = "0x%012x" + case u <= 0xffffffffffffff: + f = "0x%014x" + case u <= 0xffffffffffffffff: + f = "0x%016x" + } + return fmt.Sprintf(f, u) +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_slices.go b/vendor/github.com/google/go-cmp/cmp/report_slices.go new file mode 100644 index 000000000..2ad3bc85b --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_slices.go @@ -0,0 +1,613 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "bytes" + "fmt" + "math" + "reflect" + "strconv" + "strings" + "unicode" + "unicode/utf8" + + "github.com/google/go-cmp/cmp/internal/diff" +) + +// CanFormatDiffSlice reports whether we support custom formatting for nodes +// that are slices of primitive kinds or strings. +func (opts formatOptions) CanFormatDiffSlice(v *valueNode) bool { + switch { + case opts.DiffMode != diffUnknown: + return false // Must be formatting in diff mode + case v.NumDiff == 0: + return false // No differences detected + case !v.ValueX.IsValid() || !v.ValueY.IsValid(): + return false // Both values must be valid + case v.NumIgnored > 0: + return false // Some ignore option was used + case v.NumTransformed > 0: + return false // Some transform option was used + case v.NumCompared > 1: + return false // More than one comparison was used + case v.NumCompared == 1 && v.Type.Name() != "": + // The need for cmp to check applicability of options on every element + // in a slice is a significant performance detriment for large []byte. + // The workaround is to specify Comparer(bytes.Equal), + // which enables cmp to compare []byte more efficiently. + // If they differ, we still want to provide batched diffing. + // The logic disallows named types since they tend to have their own + // String method, with nicer formatting than what this provides. + return false + } + + // Check whether this is an interface with the same concrete types. + t := v.Type + vx, vy := v.ValueX, v.ValueY + if t.Kind() == reflect.Interface && !vx.IsNil() && !vy.IsNil() && vx.Elem().Type() == vy.Elem().Type() { + vx, vy = vx.Elem(), vy.Elem() + t = vx.Type() + } + + // Check whether we provide specialized diffing for this type. + switch t.Kind() { + case reflect.String: + case reflect.Array, reflect.Slice: + // Only slices of primitive types have specialized handling. + switch t.Elem().Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, + reflect.Bool, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128: + default: + return false + } + + // Both slice values have to be non-empty. + if t.Kind() == reflect.Slice && (vx.Len() == 0 || vy.Len() == 0) { + return false + } + + // If a sufficient number of elements already differ, + // use specialized formatting even if length requirement is not met. + if v.NumDiff > v.NumSame { + return true + } + default: + return false + } + + // Use specialized string diffing for longer slices or strings. + const minLength = 64 + return vx.Len() >= minLength && vy.Len() >= minLength +} + +// FormatDiffSlice prints a diff for the slices (or strings) represented by v. +// This provides custom-tailored logic to make printing of differences in +// textual strings and slices of primitive kinds more readable. +func (opts formatOptions) FormatDiffSlice(v *valueNode) textNode { + assert(opts.DiffMode == diffUnknown) + t, vx, vy := v.Type, v.ValueX, v.ValueY + if t.Kind() == reflect.Interface { + vx, vy = vx.Elem(), vy.Elem() + t = vx.Type() + opts = opts.WithTypeMode(emitType) + } + + // Auto-detect the type of the data. + var sx, sy string + var ssx, ssy []string + var isString, isMostlyText, isPureLinedText, isBinary bool + switch { + case t.Kind() == reflect.String: + sx, sy = vx.String(), vy.String() + isString = true + case t.Kind() == reflect.Slice && t.Elem() == reflect.TypeOf(byte(0)): + sx, sy = string(vx.Bytes()), string(vy.Bytes()) + isString = true + case t.Kind() == reflect.Array: + // Arrays need to be addressable for slice operations to work. + vx2, vy2 := reflect.New(t).Elem(), reflect.New(t).Elem() + vx2.Set(vx) + vy2.Set(vy) + vx, vy = vx2, vy2 + } + if isString { + var numTotalRunes, numValidRunes, numLines, lastLineIdx, maxLineLen int + for i, r := range sx + sy { + numTotalRunes++ + if (unicode.IsPrint(r) || unicode.IsSpace(r)) && r != utf8.RuneError { + numValidRunes++ + } + if r == '\n' { + if maxLineLen < i-lastLineIdx { + maxLineLen = i - lastLineIdx + } + lastLineIdx = i + 1 + numLines++ + } + } + isPureText := numValidRunes == numTotalRunes + isMostlyText = float64(numValidRunes) > math.Floor(0.90*float64(numTotalRunes)) + isPureLinedText = isPureText && numLines >= 4 && maxLineLen <= 1024 + isBinary = !isMostlyText + + // Avoid diffing by lines if it produces a significantly more complex + // edit script than diffing by bytes. + if isPureLinedText { + ssx = strings.Split(sx, "\n") + ssy = strings.Split(sy, "\n") + esLines := diff.Difference(len(ssx), len(ssy), func(ix, iy int) diff.Result { + return diff.BoolResult(ssx[ix] == ssy[iy]) + }) + esBytes := diff.Difference(len(sx), len(sy), func(ix, iy int) diff.Result { + return diff.BoolResult(sx[ix] == sy[iy]) + }) + efficiencyLines := float64(esLines.Dist()) / float64(len(esLines)) + efficiencyBytes := float64(esBytes.Dist()) / float64(len(esBytes)) + isPureLinedText = efficiencyLines < 4*efficiencyBytes + } + } + + // Format the string into printable records. + var list textList + var delim string + switch { + // If the text appears to be multi-lined text, + // then perform differencing across individual lines. + case isPureLinedText: + list = opts.formatDiffSlice( + reflect.ValueOf(ssx), reflect.ValueOf(ssy), 1, "line", + func(v reflect.Value, d diffMode) textRecord { + s := formatString(v.Index(0).String()) + return textRecord{Diff: d, Value: textLine(s)} + }, + ) + delim = "\n" + + // If possible, use a custom triple-quote (""") syntax for printing + // differences in a string literal. This format is more readable, + // but has edge-cases where differences are visually indistinguishable. + // This format is avoided under the following conditions: + // • A line starts with `"""` + // • A line starts with "..." + // • A line contains non-printable characters + // • Adjacent different lines differ only by whitespace + // + // For example: + // """ + // ... // 3 identical lines + // foo + // bar + // - baz + // + BAZ + // """ + isTripleQuoted := true + prevRemoveLines := map[string]bool{} + prevInsertLines := map[string]bool{} + var list2 textList + list2 = append(list2, textRecord{Value: textLine(`"""`), ElideComma: true}) + for _, r := range list { + if !r.Value.Equal(textEllipsis) { + line, _ := strconv.Unquote(string(r.Value.(textLine))) + line = strings.TrimPrefix(strings.TrimSuffix(line, "\r"), "\r") // trim leading/trailing carriage returns for legacy Windows endline support + normLine := strings.Map(func(r rune) rune { + if unicode.IsSpace(r) { + return -1 // drop whitespace to avoid visually indistinguishable output + } + return r + }, line) + isPrintable := func(r rune) bool { + return unicode.IsPrint(r) || r == '\t' // specially treat tab as printable + } + isTripleQuoted = !strings.HasPrefix(line, `"""`) && !strings.HasPrefix(line, "...") && strings.TrimFunc(line, isPrintable) == "" + switch r.Diff { + case diffRemoved: + isTripleQuoted = isTripleQuoted && !prevInsertLines[normLine] + prevRemoveLines[normLine] = true + case diffInserted: + isTripleQuoted = isTripleQuoted && !prevRemoveLines[normLine] + prevInsertLines[normLine] = true + } + if !isTripleQuoted { + break + } + r.Value = textLine(line) + r.ElideComma = true + } + if !(r.Diff == diffRemoved || r.Diff == diffInserted) { // start a new non-adjacent difference group + prevRemoveLines = map[string]bool{} + prevInsertLines = map[string]bool{} + } + list2 = append(list2, r) + } + if r := list2[len(list2)-1]; r.Diff == diffIdentical && len(r.Value.(textLine)) == 0 { + list2 = list2[:len(list2)-1] // elide single empty line at the end + } + list2 = append(list2, textRecord{Value: textLine(`"""`), ElideComma: true}) + if isTripleQuoted { + var out textNode = &textWrap{Prefix: "(", Value: list2, Suffix: ")"} + switch t.Kind() { + case reflect.String: + if t != reflect.TypeOf(string("")) { + out = opts.FormatType(t, out) + } + case reflect.Slice: + // Always emit type for slices since the triple-quote syntax + // looks like a string (not a slice). + opts = opts.WithTypeMode(emitType) + out = opts.FormatType(t, out) + } + return out + } + + // If the text appears to be single-lined text, + // then perform differencing in approximately fixed-sized chunks. + // The output is printed as quoted strings. + case isMostlyText: + list = opts.formatDiffSlice( + reflect.ValueOf(sx), reflect.ValueOf(sy), 64, "byte", + func(v reflect.Value, d diffMode) textRecord { + s := formatString(v.String()) + return textRecord{Diff: d, Value: textLine(s)} + }, + ) + + // If the text appears to be binary data, + // then perform differencing in approximately fixed-sized chunks. + // The output is inspired by hexdump. + case isBinary: + list = opts.formatDiffSlice( + reflect.ValueOf(sx), reflect.ValueOf(sy), 16, "byte", + func(v reflect.Value, d diffMode) textRecord { + var ss []string + for i := 0; i < v.Len(); i++ { + ss = append(ss, formatHex(v.Index(i).Uint())) + } + s := strings.Join(ss, ", ") + comment := commentString(fmt.Sprintf("%c|%v|", d, formatASCII(v.String()))) + return textRecord{Diff: d, Value: textLine(s), Comment: comment} + }, + ) + + // For all other slices of primitive types, + // then perform differencing in approximately fixed-sized chunks. + // The size of each chunk depends on the width of the element kind. + default: + var chunkSize int + if t.Elem().Kind() == reflect.Bool { + chunkSize = 16 + } else { + switch t.Elem().Bits() { + case 8: + chunkSize = 16 + case 16: + chunkSize = 12 + case 32: + chunkSize = 8 + default: + chunkSize = 8 + } + } + list = opts.formatDiffSlice( + vx, vy, chunkSize, t.Elem().Kind().String(), + func(v reflect.Value, d diffMode) textRecord { + var ss []string + for i := 0; i < v.Len(); i++ { + switch t.Elem().Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + ss = append(ss, fmt.Sprint(v.Index(i).Int())) + case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: + ss = append(ss, fmt.Sprint(v.Index(i).Uint())) + case reflect.Uint8, reflect.Uintptr: + ss = append(ss, formatHex(v.Index(i).Uint())) + case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128: + ss = append(ss, fmt.Sprint(v.Index(i).Interface())) + } + } + s := strings.Join(ss, ", ") + return textRecord{Diff: d, Value: textLine(s)} + }, + ) + } + + // Wrap the output with appropriate type information. + var out textNode = &textWrap{Prefix: "{", Value: list, Suffix: "}"} + if !isMostlyText { + // The "{...}" byte-sequence literal is not valid Go syntax for strings. + // Emit the type for extra clarity (e.g. "string{...}"). + if t.Kind() == reflect.String { + opts = opts.WithTypeMode(emitType) + } + return opts.FormatType(t, out) + } + switch t.Kind() { + case reflect.String: + out = &textWrap{Prefix: "strings.Join(", Value: out, Suffix: fmt.Sprintf(", %q)", delim)} + if t != reflect.TypeOf(string("")) { + out = opts.FormatType(t, out) + } + case reflect.Slice: + out = &textWrap{Prefix: "bytes.Join(", Value: out, Suffix: fmt.Sprintf(", %q)", delim)} + if t != reflect.TypeOf([]byte(nil)) { + out = opts.FormatType(t, out) + } + } + return out +} + +// formatASCII formats s as an ASCII string. +// This is useful for printing binary strings in a semi-legible way. +func formatASCII(s string) string { + b := bytes.Repeat([]byte{'.'}, len(s)) + for i := 0; i < len(s); i++ { + if ' ' <= s[i] && s[i] <= '~' { + b[i] = s[i] + } + } + return string(b) +} + +func (opts formatOptions) formatDiffSlice( + vx, vy reflect.Value, chunkSize int, name string, + makeRec func(reflect.Value, diffMode) textRecord, +) (list textList) { + eq := func(ix, iy int) bool { + return vx.Index(ix).Interface() == vy.Index(iy).Interface() + } + es := diff.Difference(vx.Len(), vy.Len(), func(ix, iy int) diff.Result { + return diff.BoolResult(eq(ix, iy)) + }) + + appendChunks := func(v reflect.Value, d diffMode) int { + n0 := v.Len() + for v.Len() > 0 { + n := chunkSize + if n > v.Len() { + n = v.Len() + } + list = append(list, makeRec(v.Slice(0, n), d)) + v = v.Slice(n, v.Len()) + } + return n0 - v.Len() + } + + var numDiffs int + maxLen := -1 + if opts.LimitVerbosity { + maxLen = (1 << opts.verbosity()) << 2 // 4, 8, 16, 32, 64, etc... + opts.VerbosityLevel-- + } + + groups := coalesceAdjacentEdits(name, es) + groups = coalesceInterveningIdentical(groups, chunkSize/4) + groups = cleanupSurroundingIdentical(groups, eq) + maxGroup := diffStats{Name: name} + for i, ds := range groups { + if maxLen >= 0 && numDiffs >= maxLen { + maxGroup = maxGroup.Append(ds) + continue + } + + // Print equal. + if ds.NumDiff() == 0 { + // Compute the number of leading and trailing equal bytes to print. + var numLo, numHi int + numEqual := ds.NumIgnored + ds.NumIdentical + for numLo < chunkSize*numContextRecords && numLo+numHi < numEqual && i != 0 { + numLo++ + } + for numHi < chunkSize*numContextRecords && numLo+numHi < numEqual && i != len(groups)-1 { + numHi++ + } + if numEqual-(numLo+numHi) <= chunkSize && ds.NumIgnored == 0 { + numHi = numEqual - numLo // Avoid pointless coalescing of single equal row + } + + // Print the equal bytes. + appendChunks(vx.Slice(0, numLo), diffIdentical) + if numEqual > numLo+numHi { + ds.NumIdentical -= numLo + numHi + list.AppendEllipsis(ds) + } + appendChunks(vx.Slice(numEqual-numHi, numEqual), diffIdentical) + vx = vx.Slice(numEqual, vx.Len()) + vy = vy.Slice(numEqual, vy.Len()) + continue + } + + // Print unequal. + len0 := len(list) + nx := appendChunks(vx.Slice(0, ds.NumIdentical+ds.NumRemoved+ds.NumModified), diffRemoved) + vx = vx.Slice(nx, vx.Len()) + ny := appendChunks(vy.Slice(0, ds.NumIdentical+ds.NumInserted+ds.NumModified), diffInserted) + vy = vy.Slice(ny, vy.Len()) + numDiffs += len(list) - len0 + } + if maxGroup.IsZero() { + assert(vx.Len() == 0 && vy.Len() == 0) + } else { + list.AppendEllipsis(maxGroup) + } + return list +} + +// coalesceAdjacentEdits coalesces the list of edits into groups of adjacent +// equal or unequal counts. +// +// Example: +// +// Input: "..XXY...Y" +// Output: [ +// {NumIdentical: 2}, +// {NumRemoved: 2, NumInserted 1}, +// {NumIdentical: 3}, +// {NumInserted: 1}, +// ] +// +func coalesceAdjacentEdits(name string, es diff.EditScript) (groups []diffStats) { + var prevMode byte + lastStats := func(mode byte) *diffStats { + if prevMode != mode { + groups = append(groups, diffStats{Name: name}) + prevMode = mode + } + return &groups[len(groups)-1] + } + for _, e := range es { + switch e { + case diff.Identity: + lastStats('=').NumIdentical++ + case diff.UniqueX: + lastStats('!').NumRemoved++ + case diff.UniqueY: + lastStats('!').NumInserted++ + case diff.Modified: + lastStats('!').NumModified++ + } + } + return groups +} + +// coalesceInterveningIdentical coalesces sufficiently short (<= windowSize) +// equal groups into adjacent unequal groups that currently result in a +// dual inserted/removed printout. This acts as a high-pass filter to smooth +// out high-frequency changes within the windowSize. +// +// Example: +// +// WindowSize: 16, +// Input: [ +// {NumIdentical: 61}, // group 0 +// {NumRemoved: 3, NumInserted: 1}, // group 1 +// {NumIdentical: 6}, // ├── coalesce +// {NumInserted: 2}, // ├── coalesce +// {NumIdentical: 1}, // ├── coalesce +// {NumRemoved: 9}, // └── coalesce +// {NumIdentical: 64}, // group 2 +// {NumRemoved: 3, NumInserted: 1}, // group 3 +// {NumIdentical: 6}, // ├── coalesce +// {NumInserted: 2}, // ├── coalesce +// {NumIdentical: 1}, // ├── coalesce +// {NumRemoved: 7}, // ├── coalesce +// {NumIdentical: 1}, // ├── coalesce +// {NumRemoved: 2}, // └── coalesce +// {NumIdentical: 63}, // group 4 +// ] +// Output: [ +// {NumIdentical: 61}, +// {NumIdentical: 7, NumRemoved: 12, NumInserted: 3}, +// {NumIdentical: 64}, +// {NumIdentical: 8, NumRemoved: 12, NumInserted: 3}, +// {NumIdentical: 63}, +// ] +// +func coalesceInterveningIdentical(groups []diffStats, windowSize int) []diffStats { + groups, groupsOrig := groups[:0], groups + for i, ds := range groupsOrig { + if len(groups) >= 2 && ds.NumDiff() > 0 { + prev := &groups[len(groups)-2] // Unequal group + curr := &groups[len(groups)-1] // Equal group + next := &groupsOrig[i] // Unequal group + hadX, hadY := prev.NumRemoved > 0, prev.NumInserted > 0 + hasX, hasY := next.NumRemoved > 0, next.NumInserted > 0 + if ((hadX || hasX) && (hadY || hasY)) && curr.NumIdentical <= windowSize { + *prev = prev.Append(*curr).Append(*next) + groups = groups[:len(groups)-1] // Truncate off equal group + continue + } + } + groups = append(groups, ds) + } + return groups +} + +// cleanupSurroundingIdentical scans through all unequal groups, and +// moves any leading sequence of equal elements to the preceding equal group and +// moves and trailing sequence of equal elements to the succeeding equal group. +// +// This is necessary since coalesceInterveningIdentical may coalesce edit groups +// together such that leading/trailing spans of equal elements becomes possible. +// Note that this can occur even with an optimal diffing algorithm. +// +// Example: +// +// Input: [ +// {NumIdentical: 61}, +// {NumIdentical: 1 , NumRemoved: 11, NumInserted: 2}, // assume 3 leading identical elements +// {NumIdentical: 67}, +// {NumIdentical: 7, NumRemoved: 12, NumInserted: 3}, // assume 10 trailing identical elements +// {NumIdentical: 54}, +// ] +// Output: [ +// {NumIdentical: 64}, // incremented by 3 +// {NumRemoved: 9}, +// {NumIdentical: 67}, +// {NumRemoved: 9}, +// {NumIdentical: 64}, // incremented by 10 +// ] +// +func cleanupSurroundingIdentical(groups []diffStats, eq func(i, j int) bool) []diffStats { + var ix, iy int // indexes into sequence x and y + for i, ds := range groups { + // Handle equal group. + if ds.NumDiff() == 0 { + ix += ds.NumIdentical + iy += ds.NumIdentical + continue + } + + // Handle unequal group. + nx := ds.NumIdentical + ds.NumRemoved + ds.NumModified + ny := ds.NumIdentical + ds.NumInserted + ds.NumModified + var numLeadingIdentical, numTrailingIdentical int + for i := 0; i < nx && i < ny && eq(ix+i, iy+i); i++ { + numLeadingIdentical++ + } + for i := 0; i < nx && i < ny && eq(ix+nx-1-i, iy+ny-1-i); i++ { + numTrailingIdentical++ + } + if numIdentical := numLeadingIdentical + numTrailingIdentical; numIdentical > 0 { + if numLeadingIdentical > 0 { + // Remove leading identical span from this group and + // insert it into the preceding group. + if i-1 >= 0 { + groups[i-1].NumIdentical += numLeadingIdentical + } else { + // No preceding group exists, so prepend a new group, + // but do so after we finish iterating over all groups. + defer func() { + groups = append([]diffStats{{Name: groups[0].Name, NumIdentical: numLeadingIdentical}}, groups...) + }() + } + // Increment indexes since the preceding group would have handled this. + ix += numLeadingIdentical + iy += numLeadingIdentical + } + if numTrailingIdentical > 0 { + // Remove trailing identical span from this group and + // insert it into the succeeding group. + if i+1 < len(groups) { + groups[i+1].NumIdentical += numTrailingIdentical + } else { + // No succeeding group exists, so append a new group, + // but do so after we finish iterating over all groups. + defer func() { + groups = append(groups, diffStats{Name: groups[len(groups)-1].Name, NumIdentical: numTrailingIdentical}) + }() + } + // Do not increment indexes since the succeeding group will handle this. + } + + // Update this group since some identical elements were removed. + nx -= numIdentical + ny -= numIdentical + groups[i] = diffStats{Name: ds.Name, NumRemoved: nx, NumInserted: ny} + } + ix += nx + iy += ny + } + return groups +} diff --git a/vendor/github.com/google/go-cmp/cmp/report_text.go b/vendor/github.com/google/go-cmp/cmp/report_text.go new file mode 100644 index 000000000..0fd46d7ff --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_text.go @@ -0,0 +1,431 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import ( + "bytes" + "fmt" + "math/rand" + "strings" + "time" + "unicode/utf8" + + "github.com/google/go-cmp/cmp/internal/flags" +) + +var randBool = rand.New(rand.NewSource(time.Now().Unix())).Intn(2) == 0 + +const maxColumnLength = 80 + +type indentMode int + +func (n indentMode) appendIndent(b []byte, d diffMode) []byte { + // The output of Diff is documented as being unstable to provide future + // flexibility in changing the output for more humanly readable reports. + // This logic intentionally introduces instability to the exact output + // so that users can detect accidental reliance on stability early on, + // rather than much later when an actual change to the format occurs. + if flags.Deterministic || randBool { + // Use regular spaces (U+0020). + switch d { + case diffUnknown, diffIdentical: + b = append(b, " "...) + case diffRemoved: + b = append(b, "- "...) + case diffInserted: + b = append(b, "+ "...) + } + } else { + // Use non-breaking spaces (U+00a0). + switch d { + case diffUnknown, diffIdentical: + b = append(b, "  "...) + case diffRemoved: + b = append(b, "- "...) + case diffInserted: + b = append(b, "+ "...) + } + } + return repeatCount(n).appendChar(b, '\t') +} + +type repeatCount int + +func (n repeatCount) appendChar(b []byte, c byte) []byte { + for ; n > 0; n-- { + b = append(b, c) + } + return b +} + +// textNode is a simplified tree-based representation of structured text. +// Possible node types are textWrap, textList, or textLine. +type textNode interface { + // Len reports the length in bytes of a single-line version of the tree. + // Nested textRecord.Diff and textRecord.Comment fields are ignored. + Len() int + // Equal reports whether the two trees are structurally identical. + // Nested textRecord.Diff and textRecord.Comment fields are compared. + Equal(textNode) bool + // String returns the string representation of the text tree. + // It is not guaranteed that len(x.String()) == x.Len(), + // nor that x.String() == y.String() implies that x.Equal(y). + String() string + + // formatCompactTo formats the contents of the tree as a single-line string + // to the provided buffer. Any nested textRecord.Diff and textRecord.Comment + // fields are ignored. + // + // However, not all nodes in the tree should be collapsed as a single-line. + // If a node can be collapsed as a single-line, it is replaced by a textLine + // node. Since the top-level node cannot replace itself, this also returns + // the current node itself. + // + // This does not mutate the receiver. + formatCompactTo([]byte, diffMode) ([]byte, textNode) + // formatExpandedTo formats the contents of the tree as a multi-line string + // to the provided buffer. In order for column alignment to operate well, + // formatCompactTo must be called before calling formatExpandedTo. + formatExpandedTo([]byte, diffMode, indentMode) []byte +} + +// textWrap is a wrapper that concatenates a prefix and/or a suffix +// to the underlying node. +type textWrap struct { + Prefix string // e.g., "bytes.Buffer{" + Value textNode // textWrap | textList | textLine + Suffix string // e.g., "}" + Metadata interface{} // arbitrary metadata; has no effect on formatting +} + +func (s *textWrap) Len() int { + return len(s.Prefix) + s.Value.Len() + len(s.Suffix) +} +func (s1 *textWrap) Equal(s2 textNode) bool { + if s2, ok := s2.(*textWrap); ok { + return s1.Prefix == s2.Prefix && s1.Value.Equal(s2.Value) && s1.Suffix == s2.Suffix + } + return false +} +func (s *textWrap) String() string { + var d diffMode + var n indentMode + _, s2 := s.formatCompactTo(nil, d) + b := n.appendIndent(nil, d) // Leading indent + b = s2.formatExpandedTo(b, d, n) // Main body + b = append(b, '\n') // Trailing newline + return string(b) +} +func (s *textWrap) formatCompactTo(b []byte, d diffMode) ([]byte, textNode) { + n0 := len(b) // Original buffer length + b = append(b, s.Prefix...) + b, s.Value = s.Value.formatCompactTo(b, d) + b = append(b, s.Suffix...) + if _, ok := s.Value.(textLine); ok { + return b, textLine(b[n0:]) + } + return b, s +} +func (s *textWrap) formatExpandedTo(b []byte, d diffMode, n indentMode) []byte { + b = append(b, s.Prefix...) + b = s.Value.formatExpandedTo(b, d, n) + b = append(b, s.Suffix...) + return b +} + +// textList is a comma-separated list of textWrap or textLine nodes. +// The list may be formatted as multi-lines or single-line at the discretion +// of the textList.formatCompactTo method. +type textList []textRecord +type textRecord struct { + Diff diffMode // e.g., 0 or '-' or '+' + Key string // e.g., "MyField" + Value textNode // textWrap | textLine + ElideComma bool // avoid trailing comma + Comment fmt.Stringer // e.g., "6 identical fields" +} + +// AppendEllipsis appends a new ellipsis node to the list if none already +// exists at the end. If cs is non-zero it coalesces the statistics with the +// previous diffStats. +func (s *textList) AppendEllipsis(ds diffStats) { + hasStats := !ds.IsZero() + if len(*s) == 0 || !(*s)[len(*s)-1].Value.Equal(textEllipsis) { + if hasStats { + *s = append(*s, textRecord{Value: textEllipsis, ElideComma: true, Comment: ds}) + } else { + *s = append(*s, textRecord{Value: textEllipsis, ElideComma: true}) + } + return + } + if hasStats { + (*s)[len(*s)-1].Comment = (*s)[len(*s)-1].Comment.(diffStats).Append(ds) + } +} + +func (s textList) Len() (n int) { + for i, r := range s { + n += len(r.Key) + if r.Key != "" { + n += len(": ") + } + n += r.Value.Len() + if i < len(s)-1 { + n += len(", ") + } + } + return n +} + +func (s1 textList) Equal(s2 textNode) bool { + if s2, ok := s2.(textList); ok { + if len(s1) != len(s2) { + return false + } + for i := range s1 { + r1, r2 := s1[i], s2[i] + if !(r1.Diff == r2.Diff && r1.Key == r2.Key && r1.Value.Equal(r2.Value) && r1.Comment == r2.Comment) { + return false + } + } + return true + } + return false +} + +func (s textList) String() string { + return (&textWrap{Prefix: "{", Value: s, Suffix: "}"}).String() +} + +func (s textList) formatCompactTo(b []byte, d diffMode) ([]byte, textNode) { + s = append(textList(nil), s...) // Avoid mutating original + + // Determine whether we can collapse this list as a single line. + n0 := len(b) // Original buffer length + var multiLine bool + for i, r := range s { + if r.Diff == diffInserted || r.Diff == diffRemoved { + multiLine = true + } + b = append(b, r.Key...) + if r.Key != "" { + b = append(b, ": "...) + } + b, s[i].Value = r.Value.formatCompactTo(b, d|r.Diff) + if _, ok := s[i].Value.(textLine); !ok { + multiLine = true + } + if r.Comment != nil { + multiLine = true + } + if i < len(s)-1 { + b = append(b, ", "...) + } + } + // Force multi-lined output when printing a removed/inserted node that + // is sufficiently long. + if (d == diffInserted || d == diffRemoved) && len(b[n0:]) > maxColumnLength { + multiLine = true + } + if !multiLine { + return b, textLine(b[n0:]) + } + return b, s +} + +func (s textList) formatExpandedTo(b []byte, d diffMode, n indentMode) []byte { + alignKeyLens := s.alignLens( + func(r textRecord) bool { + _, isLine := r.Value.(textLine) + return r.Key == "" || !isLine + }, + func(r textRecord) int { return utf8.RuneCountInString(r.Key) }, + ) + alignValueLens := s.alignLens( + func(r textRecord) bool { + _, isLine := r.Value.(textLine) + return !isLine || r.Value.Equal(textEllipsis) || r.Comment == nil + }, + func(r textRecord) int { return utf8.RuneCount(r.Value.(textLine)) }, + ) + + // Format lists of simple lists in a batched form. + // If the list is sequence of only textLine values, + // then batch multiple values on a single line. + var isSimple bool + for _, r := range s { + _, isLine := r.Value.(textLine) + isSimple = r.Diff == 0 && r.Key == "" && isLine && r.Comment == nil + if !isSimple { + break + } + } + if isSimple { + n++ + var batch []byte + emitBatch := func() { + if len(batch) > 0 { + b = n.appendIndent(append(b, '\n'), d) + b = append(b, bytes.TrimRight(batch, " ")...) + batch = batch[:0] + } + } + for _, r := range s { + line := r.Value.(textLine) + if len(batch)+len(line)+len(", ") > maxColumnLength { + emitBatch() + } + batch = append(batch, line...) + batch = append(batch, ", "...) + } + emitBatch() + n-- + return n.appendIndent(append(b, '\n'), d) + } + + // Format the list as a multi-lined output. + n++ + for i, r := range s { + b = n.appendIndent(append(b, '\n'), d|r.Diff) + if r.Key != "" { + b = append(b, r.Key+": "...) + } + b = alignKeyLens[i].appendChar(b, ' ') + + b = r.Value.formatExpandedTo(b, d|r.Diff, n) + if !r.ElideComma { + b = append(b, ',') + } + b = alignValueLens[i].appendChar(b, ' ') + + if r.Comment != nil { + b = append(b, " // "+r.Comment.String()...) + } + } + n-- + + return n.appendIndent(append(b, '\n'), d) +} + +func (s textList) alignLens( + skipFunc func(textRecord) bool, + lenFunc func(textRecord) int, +) []repeatCount { + var startIdx, endIdx, maxLen int + lens := make([]repeatCount, len(s)) + for i, r := range s { + if skipFunc(r) { + for j := startIdx; j < endIdx && j < len(s); j++ { + lens[j] = repeatCount(maxLen - lenFunc(s[j])) + } + startIdx, endIdx, maxLen = i+1, i+1, 0 + } else { + if maxLen < lenFunc(r) { + maxLen = lenFunc(r) + } + endIdx = i + 1 + } + } + for j := startIdx; j < endIdx && j < len(s); j++ { + lens[j] = repeatCount(maxLen - lenFunc(s[j])) + } + return lens +} + +// textLine is a single-line segment of text and is always a leaf node +// in the textNode tree. +type textLine []byte + +var ( + textNil = textLine("nil") + textEllipsis = textLine("...") +) + +func (s textLine) Len() int { + return len(s) +} +func (s1 textLine) Equal(s2 textNode) bool { + if s2, ok := s2.(textLine); ok { + return bytes.Equal([]byte(s1), []byte(s2)) + } + return false +} +func (s textLine) String() string { + return string(s) +} +func (s textLine) formatCompactTo(b []byte, d diffMode) ([]byte, textNode) { + return append(b, s...), s +} +func (s textLine) formatExpandedTo(b []byte, _ diffMode, _ indentMode) []byte { + return append(b, s...) +} + +type diffStats struct { + Name string + NumIgnored int + NumIdentical int + NumRemoved int + NumInserted int + NumModified int +} + +func (s diffStats) IsZero() bool { + s.Name = "" + return s == diffStats{} +} + +func (s diffStats) NumDiff() int { + return s.NumRemoved + s.NumInserted + s.NumModified +} + +func (s diffStats) Append(ds diffStats) diffStats { + assert(s.Name == ds.Name) + s.NumIgnored += ds.NumIgnored + s.NumIdentical += ds.NumIdentical + s.NumRemoved += ds.NumRemoved + s.NumInserted += ds.NumInserted + s.NumModified += ds.NumModified + return s +} + +// String prints a humanly-readable summary of coalesced records. +// +// Example: +// diffStats{Name: "Field", NumIgnored: 5}.String() => "5 ignored fields" +func (s diffStats) String() string { + var ss []string + var sum int + labels := [...]string{"ignored", "identical", "removed", "inserted", "modified"} + counts := [...]int{s.NumIgnored, s.NumIdentical, s.NumRemoved, s.NumInserted, s.NumModified} + for i, n := range counts { + if n > 0 { + ss = append(ss, fmt.Sprintf("%d %v", n, labels[i])) + } + sum += n + } + + // Pluralize the name (adjusting for some obscure English grammar rules). + name := s.Name + if sum > 1 { + name += "s" + if strings.HasSuffix(name, "ys") { + name = name[:len(name)-2] + "ies" // e.g., "entrys" => "entries" + } + } + + // Format the list according to English grammar (with Oxford comma). + switch n := len(ss); n { + case 0: + return "" + case 1, 2: + return strings.Join(ss, " and ") + " " + name + default: + return strings.Join(ss[:n-1], ", ") + ", and " + ss[n-1] + " " + name + } +} + +type commentString string + +func (s commentString) String() string { return string(s) } diff --git a/vendor/github.com/google/go-cmp/cmp/report_value.go b/vendor/github.com/google/go-cmp/cmp/report_value.go new file mode 100644 index 000000000..668d470fd --- /dev/null +++ b/vendor/github.com/google/go-cmp/cmp/report_value.go @@ -0,0 +1,121 @@ +// Copyright 2019, The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cmp + +import "reflect" + +// valueNode represents a single node within a report, which is a +// structured representation of the value tree, containing information +// regarding which nodes are equal or not. +type valueNode struct { + parent *valueNode + + Type reflect.Type + ValueX reflect.Value + ValueY reflect.Value + + // NumSame is the number of leaf nodes that are equal. + // All descendants are equal only if NumDiff is 0. + NumSame int + // NumDiff is the number of leaf nodes that are not equal. + NumDiff int + // NumIgnored is the number of leaf nodes that are ignored. + NumIgnored int + // NumCompared is the number of leaf nodes that were compared + // using an Equal method or Comparer function. + NumCompared int + // NumTransformed is the number of non-leaf nodes that were transformed. + NumTransformed int + // NumChildren is the number of transitive descendants of this node. + // This counts from zero; thus, leaf nodes have no descendants. + NumChildren int + // MaxDepth is the maximum depth of the tree. This counts from zero; + // thus, leaf nodes have a depth of zero. + MaxDepth int + + // Records is a list of struct fields, slice elements, or map entries. + Records []reportRecord // If populated, implies Value is not populated + + // Value is the result of a transformation, pointer indirect, of + // type assertion. + Value *valueNode // If populated, implies Records is not populated + + // TransformerName is the name of the transformer. + TransformerName string // If non-empty, implies Value is populated +} +type reportRecord struct { + Key reflect.Value // Invalid for slice element + Value *valueNode +} + +func (parent *valueNode) PushStep(ps PathStep) (child *valueNode) { + vx, vy := ps.Values() + child = &valueNode{parent: parent, Type: ps.Type(), ValueX: vx, ValueY: vy} + switch s := ps.(type) { + case StructField: + assert(parent.Value == nil) + parent.Records = append(parent.Records, reportRecord{Key: reflect.ValueOf(s.Name()), Value: child}) + case SliceIndex: + assert(parent.Value == nil) + parent.Records = append(parent.Records, reportRecord{Value: child}) + case MapIndex: + assert(parent.Value == nil) + parent.Records = append(parent.Records, reportRecord{Key: s.Key(), Value: child}) + case Indirect: + assert(parent.Value == nil && parent.Records == nil) + parent.Value = child + case TypeAssertion: + assert(parent.Value == nil && parent.Records == nil) + parent.Value = child + case Transform: + assert(parent.Value == nil && parent.Records == nil) + parent.Value = child + parent.TransformerName = s.Name() + parent.NumTransformed++ + default: + assert(parent == nil) // Must be the root step + } + return child +} + +func (r *valueNode) Report(rs Result) { + assert(r.MaxDepth == 0) // May only be called on leaf nodes + + if rs.ByIgnore() { + r.NumIgnored++ + } else { + if rs.Equal() { + r.NumSame++ + } else { + r.NumDiff++ + } + } + assert(r.NumSame+r.NumDiff+r.NumIgnored == 1) + + if rs.ByMethod() { + r.NumCompared++ + } + if rs.ByFunc() { + r.NumCompared++ + } + assert(r.NumCompared <= 1) +} + +func (child *valueNode) PopStep() (parent *valueNode) { + if child.parent == nil { + return nil + } + parent = child.parent + parent.NumSame += child.NumSame + parent.NumDiff += child.NumDiff + parent.NumIgnored += child.NumIgnored + parent.NumCompared += child.NumCompared + parent.NumTransformed += child.NumTransformed + parent.NumChildren += child.NumChildren + 1 + if parent.MaxDepth < child.MaxDepth+1 { + parent.MaxDepth = child.MaxDepth + 1 + } + return parent +} diff --git a/vendor/github.com/gordonklaus/ineffassign/LICENSE b/vendor/github.com/gordonklaus/ineffassign/LICENSE new file mode 100644 index 000000000..9e3d9bcc0 --- /dev/null +++ b/vendor/github.com/gordonklaus/ineffassign/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016 Gordon Klaus and contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go b/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go new file mode 100644 index 000000000..606eb14aa --- /dev/null +++ b/vendor/github.com/gordonklaus/ineffassign/pkg/ineffassign/ineffassign.go @@ -0,0 +1,591 @@ +package ineffassign + +import ( + "fmt" + "go/ast" + "go/token" + "sort" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// Analyzer is the ineffassign analysis.Analyzer instance. +var Analyzer = &analysis.Analyzer{ + Name: "ineffassign", + Doc: "detect ineffectual assignments in Go code", + Run: checkPath, +} + +func checkPath(pass *analysis.Pass) (interface{}, error) { + for _, file := range pass.Files { + if isGenerated(file) { + continue + } + + bld := &builder{vars: map[*ast.Object]*variable{}} + bld.walk(file) + + chk := &checker{vars: bld.vars, seen: map[*block]bool{}} + for _, b := range bld.roots { + chk.check(b) + } + sort.Sort(chk.ineff) + + for _, id := range chk.ineff { + pass.Report(analysis.Diagnostic{ + Pos: id.Pos(), + Message: fmt.Sprintf("ineffectual assignment to %s", id.Name), + }) + } + } + + return nil, nil +} + +func isGenerated(file *ast.File) bool { + for _, cg := range file.Comments { + for _, c := range cg.List { + if strings.HasPrefix(c.Text, "// Code generated ") && strings.HasSuffix(c.Text, " DO NOT EDIT.") { + return true + } + } + } + + return false +} + +type builder struct { + roots []*block + block *block + vars map[*ast.Object]*variable + results []*ast.FieldList + breaks branchStack + continues branchStack + gotos branchStack + labelStmt *ast.LabeledStmt +} + +type block struct { + children []*block + ops map[*ast.Object][]operation +} + +func (b *block) addChild(c *block) { + b.children = append(b.children, c) +} + +type operation struct { + id *ast.Ident + assign bool +} + +type variable struct { + fundept int + escapes bool +} + +func (bld *builder) walk(n ast.Node) { + if n != nil { + ast.Walk(bld, n) + } +} + +func (bld *builder) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + case *ast.FuncDecl: + if n.Body != nil { + bld.fun(n.Type, n.Body) + } + case *ast.FuncLit: + bld.fun(n.Type, n.Body) + case *ast.IfStmt: + bld.walk(n.Init) + bld.walk(n.Cond) + b0 := bld.block + bld.newBlock(b0) + bld.walk(n.Body) + b1 := bld.block + if n.Else != nil { + bld.newBlock(b0) + bld.walk(n.Else) + b0 = bld.block + } + bld.newBlock(b0, b1) + case *ast.ForStmt: + lbl := bld.stmtLabel(n) + brek := bld.breaks.push(lbl) + continu := bld.continues.push(lbl) + bld.walk(n.Init) + start := bld.newBlock(bld.block) + bld.walk(n.Cond) + cond := bld.block + bld.newBlock(cond) + bld.walk(n.Body) + continu.setDestination(bld.newBlock(bld.block)) + bld.walk(n.Post) + bld.block.addChild(start) + brek.setDestination(bld.newBlock(cond)) + bld.breaks.pop() + bld.continues.pop() + case *ast.RangeStmt: + lbl := bld.stmtLabel(n) + brek := bld.breaks.push(lbl) + continu := bld.continues.push(lbl) + bld.walk(n.X) + pre := bld.newBlock(bld.block) + start := bld.newBlock(pre) + if n.Key != nil { + lhs := []ast.Expr{n.Key} + if n.Value != nil { + lhs = append(lhs, n.Value) + } + bld.walk(&ast.AssignStmt{Lhs: lhs, Tok: n.Tok, TokPos: n.TokPos, Rhs: []ast.Expr{&ast.Ident{NamePos: n.X.End()}}}) + } + bld.walk(n.Body) + bld.block.addChild(start) + continu.setDestination(pre) + brek.setDestination(bld.newBlock(pre, bld.block)) + bld.breaks.pop() + bld.continues.pop() + case *ast.SwitchStmt: + bld.walk(n.Init) + bld.walk(n.Tag) + bld.swtch(n, n.Body.List) + case *ast.TypeSwitchStmt: + bld.walk(n.Init) + bld.walk(n.Assign) + bld.swtch(n, n.Body.List) + case *ast.SelectStmt: + brek := bld.breaks.push(bld.stmtLabel(n)) + for _, c := range n.Body.List { + c := c.(*ast.CommClause).Comm + if s, ok := c.(*ast.AssignStmt); ok { + bld.walk(s.Rhs[0]) + } else { + bld.walk(c) + } + } + b0 := bld.block + exits := make([]*block, len(n.Body.List)) + dfault := false + for i, c := range n.Body.List { + c := c.(*ast.CommClause) + bld.newBlock(b0) + bld.walk(c) + exits[i] = bld.block + dfault = dfault || c.Comm == nil + } + if !dfault { + exits = append(exits, b0) + } + brek.setDestination(bld.newBlock(exits...)) + bld.breaks.pop() + case *ast.LabeledStmt: + bld.gotos.get(n.Label).setDestination(bld.newBlock(bld.block)) + bld.labelStmt = n + bld.walk(n.Stmt) + case *ast.BranchStmt: + switch n.Tok { + case token.BREAK: + bld.breaks.get(n.Label).addSource(bld.block) + bld.newBlock() + case token.CONTINUE: + bld.continues.get(n.Label).addSource(bld.block) + bld.newBlock() + case token.GOTO: + bld.gotos.get(n.Label).addSource(bld.block) + bld.newBlock() + } + + case *ast.AssignStmt: + if n.Tok == token.QUO_ASSIGN || n.Tok == token.REM_ASSIGN { + bld.maybePanic() + } + + for _, x := range n.Rhs { + bld.walk(x) + } + for i, x := range n.Lhs { + if id, ok := ident(x); ok { + if n.Tok >= token.ADD_ASSIGN && n.Tok <= token.AND_NOT_ASSIGN { + bld.use(id) + } + // Don't treat explicit initialization to zero as assignment; it is often used as shorthand for a bare declaration. + if n.Tok == token.DEFINE && i < len(n.Rhs) && isZeroInitializer(n.Rhs[i]) { + bld.use(id) + } else { + bld.assign(id) + } + } else { + bld.walk(x) + } + } + case *ast.GenDecl: + if n.Tok == token.VAR { + for _, s := range n.Specs { + s := s.(*ast.ValueSpec) + for _, x := range s.Values { + bld.walk(x) + } + for _, id := range s.Names { + if len(s.Values) > 0 { + bld.assign(id) + } else { + bld.use(id) + } + } + } + } + case *ast.IncDecStmt: + if id, ok := ident(n.X); ok { + bld.use(id) + bld.assign(id) + } else { + bld.walk(n.X) + } + case *ast.Ident: + bld.use(n) + case *ast.ReturnStmt: + for _, x := range n.Results { + bld.walk(x) + } + if res := bld.results[len(bld.results)-1]; res != nil { + for _, f := range res.List { + for _, id := range f.Names { + if n.Results != nil { + bld.assign(id) + } + bld.use(id) + } + } + } + bld.newBlock() + case *ast.SendStmt: + bld.maybePanic() + return bld + + case *ast.BinaryExpr: + if n.Op == token.EQL || n.Op == token.QUO || n.Op == token.REM { + bld.maybePanic() + } + return bld + case *ast.CallExpr: + bld.maybePanic() + return bld + case *ast.IndexExpr: + bld.maybePanic() + return bld + case *ast.UnaryExpr: + id, ok := ident(n.X) + if ix, isIx := n.X.(*ast.IndexExpr); isIx { + // We don't care about indexing into slices, but without type information we can do no better. + id, ok = ident(ix.X) + } + if ok && n.Op == token.AND { + if v, ok := bld.vars[id.Obj]; ok { + v.escapes = true + } + } + return bld + case *ast.SelectorExpr: + bld.maybePanic() + // A method call (possibly delayed via a method value) might implicitly take + // the address of its receiver, causing it to escape. + // We can't do any better here without knowing the variable's type. + if id, ok := ident(n.X); ok { + if v, ok := bld.vars[id.Obj]; ok { + v.escapes = true + } + } + return bld + case *ast.SliceExpr: + bld.maybePanic() + // We don't care about slicing into slices, but without type information we can do no better. + if id, ok := ident(n.X); ok { + if v, ok := bld.vars[id.Obj]; ok { + v.escapes = true + } + } + return bld + case *ast.StarExpr: + bld.maybePanic() + return bld + case *ast.TypeAssertExpr: + bld.maybePanic() + return bld + + default: + return bld + } + return nil +} + +func isZeroInitializer(x ast.Expr) bool { + // Assume that a call expression of a single argument is a conversion expression. We can't do better without type information. + if c, ok := x.(*ast.CallExpr); ok { + switch c.Fun.(type) { + case *ast.Ident, *ast.SelectorExpr: + default: + return false + } + if len(c.Args) != 1 { + return false + } + x = c.Args[0] + } + + switch x := x.(type) { + case *ast.BasicLit: + switch x.Value { + case "0", "0.0", "0.", ".0", `""`: + return true + } + case *ast.Ident: + return x.Name == "false" && x.Obj == nil + } + + return false +} + +func (bld *builder) fun(typ *ast.FuncType, body *ast.BlockStmt) { + for _, v := range bld.vars { + v.fundept++ + } + bld.results = append(bld.results, typ.Results) + + b := bld.block + bld.newBlock() + bld.roots = append(bld.roots, bld.block) + bld.walk(typ) + bld.walk(body) + bld.block = b + + bld.results = bld.results[:len(bld.results)-1] + for _, v := range bld.vars { + v.fundept-- + } +} + +func (bld *builder) swtch(stmt ast.Stmt, cases []ast.Stmt) { + brek := bld.breaks.push(bld.stmtLabel(stmt)) + b0 := bld.block + list := b0 + exits := make([]*block, 0, len(cases)+1) + var dfault, fallthru *block + for _, c := range cases { + c := c.(*ast.CaseClause) + + if c.List != nil { + list = bld.newBlock(list) + for _, x := range c.List { + bld.walk(x) + } + } + + parents := []*block{} + if c.List != nil { + parents = append(parents, list) + } + if fallthru != nil { + parents = append(parents, fallthru) + fallthru = nil + } + bld.newBlock(parents...) + if c.List == nil { + dfault = bld.block + } + for _, s := range c.Body { + bld.walk(s) + if s, ok := s.(*ast.BranchStmt); ok && s.Tok == token.FALLTHROUGH { + fallthru = bld.block + } + } + + if fallthru == nil { + exits = append(exits, bld.block) + } + } + if dfault != nil { + list.addChild(dfault) + } else { + exits = append(exits, b0) + } + brek.setDestination(bld.newBlock(exits...)) + bld.breaks.pop() +} + +// An operation that might panic marks named function results as used. +func (bld *builder) maybePanic() { + if len(bld.results) == 0 { + return + } + res := bld.results[len(bld.results)-1] + if res == nil { + return + } + for _, f := range res.List { + for _, id := range f.Names { + bld.use(id) + } + } +} + +func (bld *builder) newBlock(parents ...*block) *block { + bld.block = &block{ops: map[*ast.Object][]operation{}} + for _, b := range parents { + b.addChild(bld.block) + } + return bld.block +} + +func (bld *builder) stmtLabel(s ast.Stmt) *ast.Object { + if ls := bld.labelStmt; ls != nil && ls.Stmt == s { + return ls.Label.Obj + } + return nil +} + +func (bld *builder) assign(id *ast.Ident) { + bld.newOp(id, true) +} + +func (bld *builder) use(id *ast.Ident) { + bld.newOp(id, false) +} + +func (bld *builder) newOp(id *ast.Ident, assign bool) { + if id.Name == "_" || id.Obj == nil { + return + } + + v, ok := bld.vars[id.Obj] + if !ok { + v = &variable{} + bld.vars[id.Obj] = v + } + v.escapes = v.escapes || v.fundept > 0 || bld.block == nil + + if b := bld.block; b != nil && !v.escapes { + b.ops[id.Obj] = append(b.ops[id.Obj], operation{id, assign}) + } +} + +type branchStack []*branch + +type branch struct { + label *ast.Object + srcs []*block + dst *block +} + +func (s *branchStack) push(lbl *ast.Object) *branch { + br := &branch{label: lbl} + *s = append(*s, br) + return br +} + +func (s *branchStack) get(lbl *ast.Ident) *branch { + for i := len(*s) - 1; i >= 0; i-- { + if br := (*s)[i]; lbl == nil || br.label == lbl.Obj { + return br + } + } + + // Guard against invalid code (break/continue outside of loop). + if lbl == nil { + return &branch{} + } + + return s.push(lbl.Obj) +} + +func (br *branch) addSource(src *block) { + br.srcs = append(br.srcs, src) + if br.dst != nil { + src.addChild(br.dst) + } +} + +func (br *branch) setDestination(dst *block) { + br.dst = dst + for _, src := range br.srcs { + src.addChild(dst) + } +} + +func (s *branchStack) pop() { + *s = (*s)[:len(*s)-1] +} + +func ident(x ast.Expr) (*ast.Ident, bool) { + if p, ok := x.(*ast.ParenExpr); ok { + return ident(p.X) + } + id, ok := x.(*ast.Ident) + return id, ok +} + +type checker struct { + vars map[*ast.Object]*variable + seen map[*block]bool + ineff idents +} + +func (chk *checker) check(b *block) { + if chk.seen[b] { + return + } + chk.seen[b] = true + + for obj, ops := range b.ops { + ops: + for i, op := range ops { + if !op.assign { + continue + } + if i+1 < len(ops) { + if ops[i+1].assign { + chk.ineff = append(chk.ineff, op.id) + } + continue + } + seen := map[*block]bool{} + for _, b := range b.children { + if used(obj, b, seen) { + continue ops + } + } + if !chk.vars[obj].escapes { + chk.ineff = append(chk.ineff, op.id) + } + } + } + + for _, b := range b.children { + chk.check(b) + } +} + +func used(obj *ast.Object, b *block, seen map[*block]bool) bool { + if seen[b] { + return false + } + seen[b] = true + + if ops := b.ops[obj]; len(ops) > 0 { + return !ops[0].assign + } + for _, b := range b.children { + if used(obj, b, seen) { + return true + } + } + return false +} + +type idents []*ast.Ident + +func (ids idents) Len() int { return len(ids) } +func (ids idents) Less(i, j int) bool { return ids[i].Pos() < ids[j].Pos() } +func (ids idents) Swap(i, j int) { ids[i], ids[j] = ids[j], ids[i] } diff --git a/vendor/github.com/gostaticanalysis/analysisutil/LICENSE b/vendor/github.com/gostaticanalysis/analysisutil/LICENSE new file mode 100644 index 000000000..bf7e33db8 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 GoStaticAnalysis + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/gostaticanalysis/analysisutil/README.md b/vendor/github.com/gostaticanalysis/analysisutil/README.md new file mode 100644 index 000000000..d8fd3d2a4 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/README.md @@ -0,0 +1,5 @@ +# analysisutil + +[![PkgGoDev](https://pkg.go.dev/badge/github.com/gostaticanalysis/analysisutil)](https://pkg.go.dev/github.com/gostaticanalysis/analysisutil) + +Utilities for x/tools/go/analysis package. diff --git a/vendor/github.com/gostaticanalysis/analysisutil/call.go b/vendor/github.com/gostaticanalysis/analysisutil/call.go new file mode 100644 index 000000000..e3d98d1dc --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/call.go @@ -0,0 +1,405 @@ +package analysisutil + +import ( + "go/types" + + "golang.org/x/tools/go/ssa" +) + +// CalledChecker checks a function is called. +// See From and Func. +type CalledChecker struct { + Ignore func(instr ssa.Instruction) bool +} + +// NotIn checks whether receiver's method is called in a function. +// If there is no methods calling at a path from an instruction +// which type is receiver to all return instruction, NotIn returns these instructions. +func (c *CalledChecker) NotIn(f *ssa.Function, receiver types.Type, methods ...*types.Func) []ssa.Instruction { + done := map[ssa.Value]bool{} + var instrs []ssa.Instruction + for _, b := range f.Blocks { + for i, instr := range b.Instrs { + v, _ := instr.(ssa.Value) + if v == nil || done[v] { + continue + } + + if v, _ := v.(*ssa.UnOp); v != nil && done[v.X] { + continue + } + + called, ok := c.From(b, i, receiver, methods...) + if ok && !called { + instrs = append(instrs, instr) + done[v] = true + if v, _ := v.(*ssa.UnOp); v != nil { + done[v.X] = true + } + } + } + } + return instrs +} + +// Func returns true when f is called in the instr. +// If recv is not nil, Func also checks the receiver. +func (c *CalledChecker) Func(instr ssa.Instruction, recv ssa.Value, f *types.Func) bool { + + if c.Ignore != nil && c.Ignore(instr) { + return false + } + + call, ok := instr.(ssa.CallInstruction) + if !ok { + return false + } + + common := call.Common() + if common == nil { + return false + } + + callee := common.StaticCallee() + if callee == nil { + return false + } + + fn, ok := callee.Object().(*types.Func) + if !ok { + return false + } + + if recv != nil && + common.Signature().Recv() != nil && + (len(common.Args) == 0 && recv != nil || common.Args[0] != recv && + !referrer(recv, common.Args[0])) { + return false + } + + return fn == f +} + +func referrer(a, b ssa.Value) bool { + return isReferrerOf(a, b) || isReferrerOf(b, a) +} + +func isReferrerOf(a, b ssa.Value) bool { + if a == nil || b == nil { + return false + } + if b.Referrers() != nil { + brs := *b.Referrers() + + for _, br := range brs { + brv, ok := br.(ssa.Value) + if !ok { + continue + } + if brv == a { + return true + } + } + } + return false +} + +// From checks whether receiver's method is called in an instruction +// which belogns to after i-th instructions, or in succsor blocks of b. +// The first result is above value. +// The second result is whether type of i-th instruction does not much receiver +// or matches with ignore cases. +func (c *CalledChecker) From(b *ssa.BasicBlock, i int, receiver types.Type, methods ...*types.Func) (called, ok bool) { + if b == nil || i < 0 || i >= len(b.Instrs) || + receiver == nil || len(methods) == 0 { + return false, false + } + + v, ok := b.Instrs[i].(ssa.Value) + if !ok { + return false, false + } + + from := &calledFrom{recv: v, fs: methods, ignore: c.Ignore} + + if !from.isRecv(receiver, v.Type()) { + return false, false + } + + if from.ignored() { + return false, false + } + + if from.instrs(b.Instrs[i+1:]) || + from.succs(b) { + return true, true + } + + from.done = nil + if from.storedInInstrs(b.Instrs[i+1:]) || + from.storedInSuccs(b) { + return false, false + } + + return false, true +} + +type calledFrom struct { + recv ssa.Value + fs []*types.Func + done map[*ssa.BasicBlock]bool + ignore func(ssa.Instruction) bool +} + +func (c *calledFrom) ignored() bool { + + switch v := c.recv.(type) { + case *ssa.UnOp: + switch v.X.(type) { + case *ssa.FreeVar, *ssa.Global: + return true + } + } + + refs := c.recv.Referrers() + if refs == nil { + return false + } + + for _, ref := range *refs { + done := map[ssa.Instruction]bool{} + if !c.isOwn(ref) && + ((c.ignore != nil && c.ignore(ref)) || + c.isRet(ref, done) || c.isArg(ref)) { + return true + } + } + + return false +} + +func (c *calledFrom) isOwn(instr ssa.Instruction) bool { + v, ok := instr.(ssa.Value) + if !ok { + return false + } + return v == c.recv +} + +func (c *calledFrom) isRet(instr ssa.Instruction, done map[ssa.Instruction]bool) bool { + if done[instr] { + return false + } + done[instr] = true + + switch instr := instr.(type) { + case *ssa.Return: + return true + case *ssa.MapUpdate: + return c.isRetInRefs(instr.Map, done) + case *ssa.Store: + if instr, _ := instr.Addr.(ssa.Instruction); instr != nil { + return c.isRet(instr, done) + } + return c.isRetInRefs(instr.Addr, done) + case *ssa.FieldAddr: + return c.isRetInRefs(instr.X, done) + case ssa.Value: + return c.isRetInRefs(instr, done) + default: + return false + } +} + +func (c *calledFrom) isRetInRefs(v ssa.Value, done map[ssa.Instruction]bool) bool { + refs := v.Referrers() + if refs == nil { + return false + } + for _, ref := range *refs { + if c.isRet(ref, done) { + return true + } + } + return false +} + +func (c *calledFrom) isArg(instr ssa.Instruction) bool { + + call, ok := instr.(ssa.CallInstruction) + if !ok { + return false + } + + common := call.Common() + if common == nil { + return false + } + + args := common.Args + if common.Signature().Recv() != nil { + args = args[1:] + } + + for i := range args { + if args[i] == c.recv { + return true + } + } + + return false +} + +func (c *calledFrom) instrs(instrs []ssa.Instruction) bool { + for _, instr := range instrs { + for _, f := range c.fs { + if Called(instr, c.recv, f) { + return true + } + } + } + return false +} + +func (c *calledFrom) succs(b *ssa.BasicBlock) bool { + if c.done == nil { + c.done = map[*ssa.BasicBlock]bool{} + } + + if c.done[b] { + return true + } + c.done[b] = true + + if len(b.Succs) == 0 { + return false + } + + for _, s := range b.Succs { + if !c.instrs(s.Instrs) && !c.succs(s) { + return false + } + } + + return true +} + +func (c *calledFrom) storedInInstrs(instrs []ssa.Instruction) bool { + for _, instr := range instrs { + switch instr := instr.(type) { + case *ssa.Store: + if instr.Val == c.recv { + return true + } + } + } + return false +} + +func (c *calledFrom) storedInSuccs(b *ssa.BasicBlock) bool { + if c.done == nil { + c.done = map[*ssa.BasicBlock]bool{} + } + + if c.done[b] { + return true + } + c.done[b] = true + + if len(b.Succs) == 0 { + return false + } + + for _, s := range b.Succs { + if !c.storedInInstrs(s.Instrs) && !c.succs(s) { + return false + } + } + + return true +} + +func (c *calledFrom) isRecv(recv, typ types.Type) bool { + return recv == typ || identical(recv, typ) || + c.isRecvInTuple(recv, typ) || c.isRecvInEmbedded(recv, typ) +} + +func (c *calledFrom) isRecvInTuple(recv, typ types.Type) bool { + tuple, _ := typ.(*types.Tuple) + if tuple == nil { + return false + } + + for i := 0; i < tuple.Len(); i++ { + if c.isRecv(recv, tuple.At(i).Type()) { + return true + } + } + + return false +} + +func (c *calledFrom) isRecvInEmbedded(recv, typ types.Type) bool { + + var st *types.Struct + switch typ := typ.(type) { + case *types.Struct: + st = typ + case *types.Pointer: + return c.isRecvInEmbedded(recv, typ.Elem()) + case *types.Named: + return c.isRecvInEmbedded(recv, typ.Underlying()) + default: + return false + } + + for i := 0; i < st.NumFields(); i++ { + field := st.Field(i) + if !field.Embedded() { + continue + } + + ft := field.Type() + if c.isRecv(recv, ft) { + return true + } + + var ptrOrUnptr types.Type + switch ft := ft.(type) { + case *types.Pointer: + // struct { *T } -> T + ptrOrUnptr = ft.Elem() + default: + // struct { T } -> *T + ptrOrUnptr = types.NewPointer(ft) + } + + if c.isRecv(recv, ptrOrUnptr) { + return true + } + } + + return false +} + +// NotCalledIn checks whether receiver's method is called in a function. +// If there is no methods calling at a path from an instruction +// which type is receiver to all return instruction, NotCalledIn returns these instructions. +func NotCalledIn(f *ssa.Function, receiver types.Type, methods ...*types.Func) []ssa.Instruction { + return new(CalledChecker).NotIn(f, receiver, methods...) +} + +// CalledFrom checks whether receiver's method is called in an instruction +// which belogns to after i-th instructions, or in succsor blocks of b. +// The first result is above value. +// The second result is whether type of i-th instruction does not much receiver +// or matches with ignore cases. +func CalledFrom(b *ssa.BasicBlock, i int, receiver types.Type, methods ...*types.Func) (called, ok bool) { + return new(CalledChecker).From(b, i, receiver, methods...) +} + +// Called returns true when f is called in the instr. +// If recv is not nil, Called also checks the receiver. +func Called(instr ssa.Instruction, recv ssa.Value, f *types.Func) bool { + return new(CalledChecker).Func(instr, recv, f) +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go b/vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go new file mode 100644 index 000000000..a911db6f1 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/diagnostic.go @@ -0,0 +1,45 @@ +package analysisutil + +import ( + "go/token" + + "github.com/gostaticanalysis/comment" + "github.com/gostaticanalysis/comment/passes/commentmap" + "golang.org/x/tools/go/analysis" +) + +// ReportWithoutIgnore returns a report function which can set to (analysis.Pass).Report. +// The report function ignores a diagnostic which annotated by ignore comment as the below. +// //lint:ignore Check1[,Check2,...,CheckN] reason +// names is a list of checker names. +// If names was omitted, the report function ignores by pass.Analyzer.Name. +func ReportWithoutIgnore(pass *analysis.Pass, names ...string) func(analysis.Diagnostic) { + cmaps, _ := pass.ResultOf[commentmap.Analyzer].(comment.Maps) + if cmaps == nil { + cmaps = comment.New(pass.Fset, pass.Files) + } + + if len(names) == 0 { + names = []string{pass.Analyzer.Name} + } + + report := pass.Report // original report func + + return func(d analysis.Diagnostic) { + start := pass.Fset.File(d.Pos).Line(d.Pos) + end := start + if d.End != token.NoPos { + end = pass.Fset.File(d.End).Line(d.End) + } + + for l := start; l <= end; l++ { + for _, n := range names { + if cmaps.IgnoreLine(pass.Fset, l, n) { + return + } + } + } + + report(d) + } +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/file.go b/vendor/github.com/gostaticanalysis/analysisutil/file.go new file mode 100644 index 000000000..2aeca1d9e --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/file.go @@ -0,0 +1,18 @@ +package analysisutil + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" +) + +// File finds *ast.File in pass.Files by pos. +func File(pass *analysis.Pass, pos token.Pos) *ast.File { + for _, f := range pass.Files { + if f.Pos() <= pos && pos <= f.End() { + return f + } + } + return nil +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/go.mod b/vendor/github.com/gostaticanalysis/analysisutil/go.mod new file mode 100644 index 000000000..5ca7c62b8 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/go.mod @@ -0,0 +1,8 @@ +module github.com/gostaticanalysis/analysisutil + +go 1.12 + +require ( + github.com/gostaticanalysis/comment v1.4.1 + golang.org/x/tools v0.0.0-20200820010801-b793a1359eac +) diff --git a/vendor/github.com/gostaticanalysis/analysisutil/go.sum b/vendor/github.com/gostaticanalysis/analysisutil/go.sum new file mode 100644 index 000000000..134e67dbd --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/go.sum @@ -0,0 +1,37 @@ +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/gostaticanalysis/comment v1.3.0 h1:wTVgynbFu8/nz6SGgywA0TcyIoAVsYc7ai/Zp5xNGlw= +github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= +github.com/gostaticanalysis/comment v1.4.1 h1:xHopR5L2lRz6OsjH4R2HG5wRhW9ySl3FsHIvi5pcXwc= +github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3 h1:2oZsfYnKfYzL4I57uYiRFsUf0bqlLkiuw8nnj3+voUA= +golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff h1:foic6oVZ4MKltJC6MXzuFZFswE7NCjjtc0Hxbyblawc= +golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac h1:DugppSxw0LSF8lcjaODPJZoDzq0ElTGskTst3ZaBkHI= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/gostaticanalysis/analysisutil/pkg.go b/vendor/github.com/gostaticanalysis/analysisutil/pkg.go new file mode 100644 index 000000000..b64150d81 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/pkg.go @@ -0,0 +1,49 @@ +package analysisutil + +import ( + "go/types" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// RemoVendor removes vendoring information from import path. +func RemoveVendor(path string) string { + i := strings.Index(path, "vendor/") + if i >= 0 { + return path[i+len("vendor/"):] + } + return path +} + +// LookupFromImports finds an object from import paths. +func LookupFromImports(imports []*types.Package, path, name string) types.Object { + path = RemoveVendor(path) + for i := range imports { + if path == RemoveVendor(imports[i].Path()) { + return imports[i].Scope().Lookup(name) + } + } + return nil +} + +// Imported returns true when the given pass imports the pkg. +func Imported(pkgPath string, pass *analysis.Pass) bool { + fs := pass.Files + if len(fs) == 0 { + return false + } + for _, f := range fs { + for _, i := range f.Imports { + path, err := strconv.Unquote(i.Path.Value) + if err != nil { + continue + } + if RemoveVendor(path) == pkgPath { + return true + } + } + } + return false +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/ssa.go b/vendor/github.com/gostaticanalysis/analysisutil/ssa.go new file mode 100644 index 000000000..517f6b9b4 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/ssa.go @@ -0,0 +1,146 @@ +package analysisutil + +import ( + "golang.org/x/tools/go/ssa" +) + +// IfInstr returns *ssa.If which is contained in the block b. +// If the block b has not any if instruction, IfInstr returns nil. +func IfInstr(b *ssa.BasicBlock) *ssa.If { + if len(b.Instrs) == 0 { + return nil + } + + ifinstr, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If) + if !ok { + return nil + } + + return ifinstr +} + +// Phi returns phi values which are contained in the block b. +func Phi(b *ssa.BasicBlock) (phis []*ssa.Phi) { + for _, instr := range b.Instrs { + if phi, ok := instr.(*ssa.Phi); ok { + phis = append(phis, phi) + } else { + // no more phi + break + } + } + return +} + +// Returns returns a slice of *ssa.Return in the function. +func Returns(v ssa.Value) []*ssa.Return { + var fn *ssa.Function + switch v := v.(type) { + case *ssa.Function: + fn = v + case *ssa.MakeClosure: + return Returns(v.Fn) + default: + return nil + } + + var rets []*ssa.Return + done := map[*ssa.BasicBlock]bool{} + for _, b := range fn.Blocks { + rets = append(rets, returnsInBlock(b, done)...) + } + return rets +} + +func returnsInBlock(b *ssa.BasicBlock, done map[*ssa.BasicBlock]bool) (rets []*ssa.Return) { + if done[b] { + return + } + done[b] = true + + if len(b.Instrs) != 0 { + switch instr := b.Instrs[len(b.Instrs)-1].(type) { + case *ssa.Return: + rets = append(rets, instr) + } + } + + for _, s := range b.Succs { + rets = append(rets, returnsInBlock(s, done)...) + } + return +} + +// BinOp returns binary operator values which are contained in the block b. +func BinOp(b *ssa.BasicBlock) []*ssa.BinOp { + var binops []*ssa.BinOp + for _, instr := range b.Instrs { + if binop, ok := instr.(*ssa.BinOp); ok { + binops = append(binops, binop) + } + } + return binops +} + +// Used returns an instruction which uses the value in the instructions. +func Used(v ssa.Value, instrs []ssa.Instruction) ssa.Instruction { + if len(instrs) == 0 || v.Referrers() == nil { + return nil + } + + for _, instr := range instrs { + if used := usedInInstr(v, instr); used != nil { + return used + } + } + + return nil +} + +func usedInInstr(v ssa.Value, instr ssa.Instruction) ssa.Instruction { + switch instr := instr.(type) { + case *ssa.MakeClosure: + return usedInClosure(v, instr) + default: + operands := instr.Operands(nil) + for _, x := range operands { + if x != nil && *x == v { + return instr + } + } + } + + switch v := v.(type) { + case *ssa.UnOp: + return usedInInstr(v.X, instr) + } + + return nil +} + +func usedInClosure(v ssa.Value, instr *ssa.MakeClosure) ssa.Instruction { + fn, _ := instr.Fn.(*ssa.Function) + if fn == nil { + return nil + } + + var fv *ssa.FreeVar + for i := range instr.Bindings { + if instr.Bindings[i] == v { + fv = fn.FreeVars[i] + break + } + } + + if fv == nil { + return nil + } + + for _, b := range fn.Blocks { + if used := Used(fv, b.Instrs); used != nil { + return used + } + } + + return nil +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go b/vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go new file mode 100644 index 000000000..2f8a16576 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/ssainspect.go @@ -0,0 +1,37 @@ +package analysisutil + +import "golang.org/x/tools/go/ssa" + +// InspectInstr inspects from i-th instruction of start block to succsessor blocks. +func InspectInstr(start *ssa.BasicBlock, i int, f func(i int, instr ssa.Instruction) bool) { + new(instrInspector).block(start, i, f) +} + +type instrInspector struct { + done map[*ssa.BasicBlock]bool +} + +func (ins *instrInspector) block(b *ssa.BasicBlock, i int, f func(i int, instr ssa.Instruction) bool) { + if ins.done == nil { + ins.done = map[*ssa.BasicBlock]bool{} + } + + if b == nil || ins.done[b] || len(b.Instrs) <= i { + return + } + + ins.done[b] = true + ins.instrs(i, b.Instrs[i:], f) + for _, s := range b.Succs { + ins.block(s, 0, f) + } + +} + +func (ins *instrInspector) instrs(offset int, instrs []ssa.Instruction, f func(i int, instr ssa.Instruction) bool) { + for i, instr := range instrs { + if !f(offset+i, instr) { + break + } + } +} diff --git a/vendor/github.com/gostaticanalysis/analysisutil/types.go b/vendor/github.com/gostaticanalysis/analysisutil/types.go new file mode 100644 index 000000000..46b970621 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/analysisutil/types.go @@ -0,0 +1,208 @@ +package analysisutil + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" +) + +var errType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + +// ImplementsError return whether t implements error interface. +func ImplementsError(t types.Type) bool { + return types.Implements(t, errType) +} + +// ObjectOf returns types.Object by given name in the package. +func ObjectOf(pass *analysis.Pass, pkg, name string) types.Object { + obj := LookupFromImports(pass.Pkg.Imports(), pkg, name) + if obj != nil { + return obj + } + if RemoveVendor(pass.Pkg.Name()) != RemoveVendor(pkg) { + return nil + } + return pass.Pkg.Scope().Lookup(name) +} + +// TypeOf returns types.Type by given name in the package. +// TypeOf accepts pointer types such as *T. +func TypeOf(pass *analysis.Pass, pkg, name string) types.Type { + if name == "" { + return nil + } + + if name[0] == '*' { + obj := TypeOf(pass, pkg, name[1:]) + if obj == nil { + return nil + } + return types.NewPointer(obj) + } + + obj := ObjectOf(pass, pkg, name) + if obj == nil { + return nil + } + + return obj.Type() +} + +// MethodOf returns a method which has given name in the type. +func MethodOf(typ types.Type, name string) *types.Func { + switch typ := typ.(type) { + case *types.Named: + for i := 0; i < typ.NumMethods(); i++ { + if f := typ.Method(i); f.Name() == name { + return f + } + } + case *types.Pointer: + return MethodOf(typ.Elem(), name) + } + return nil +} + +// see: https://github.com/golang/go/issues/19670 +func identical(x, y types.Type) (ret bool) { + defer func() { + r := recover() + switch r := r.(type) { + case string: + if r == "unreachable" { + ret = false + return + } + case nil: + return + } + panic(r) + }() + return types.Identical(x, y) +} + +// Interfaces returns a map of interfaces which are declared in the package. +func Interfaces(pkg *types.Package) map[string]*types.Interface { + ifs := map[string]*types.Interface{} + + for _, n := range pkg.Scope().Names() { + o := pkg.Scope().Lookup(n) + if o != nil { + i, ok := o.Type().Underlying().(*types.Interface) + if ok { + ifs[n] = i + } + } + } + + return ifs +} + +// Structs returns a map of structs which are declared in the package. +func Structs(pkg *types.Package) map[string]*types.Struct { + structs := map[string]*types.Struct{} + + for _, n := range pkg.Scope().Names() { + o := pkg.Scope().Lookup(n) + if o != nil { + s, ok := o.Type().Underlying().(*types.Struct) + if ok { + structs[n] = s + } + } + } + + return structs +} + +// HasField returns whether the struct has the field. +func HasField(s *types.Struct, f *types.Var) bool { + if s == nil || f == nil { + return false + } + + for i := 0; i < s.NumFields(); i++ { + if s.Field(i) == f { + return true + } + } + + return false +} + +func TypesInfo(info ...*types.Info) *types.Info { + if len(info) == 0 { + return nil + } + + var merged types.Info + for i := range info { + mergeTypesInfo(&merged, info[i]) + } + + return &merged +} + +func mergeTypesInfo(i1, i2 *types.Info) { + // Types + if i1.Types == nil && i2.Types != nil { + i1.Types = map[ast.Expr]types.TypeAndValue{} + } + for expr, tv := range i2.Types { + i1.Types[expr] = tv + } + + // Defs + if i1.Defs == nil && i2.Defs != nil { + i1.Defs = map[*ast.Ident]types.Object{} + } + for ident, obj := range i2.Defs { + i1.Defs[ident] = obj + } + + // Uses + if i1.Uses == nil && i2.Uses != nil { + i1.Uses = map[*ast.Ident]types.Object{} + } + for ident, obj := range i2.Uses { + i1.Uses[ident] = obj + } + + // Implicits + if i1.Implicits == nil && i2.Implicits != nil { + i1.Implicits = map[ast.Node]types.Object{} + } + for n, obj := range i2.Implicits { + i1.Implicits[n] = obj + } + + // Selections + if i1.Selections == nil && i2.Selections != nil { + i1.Selections = map[*ast.SelectorExpr]*types.Selection{} + } + for expr, sel := range i2.Selections { + i1.Selections[expr] = sel + } + + // Scopes + if i1.Scopes == nil && i2.Scopes != nil { + i1.Scopes = map[ast.Node]*types.Scope{} + } + for n, s := range i2.Scopes { + i1.Scopes[n] = s + } + + // InitOrder + i1.InitOrder = append(i1.InitOrder, i2.InitOrder...) +} + +// Under returns the most bottom underlying type. +func Under(t types.Type) types.Type { + switch t := t.(type) { + case *types.Named: + return Under(t.Underlying()) + default: + return t + } +} diff --git a/vendor/github.com/gostaticanalysis/comment/LICENSE b/vendor/github.com/gostaticanalysis/comment/LICENSE new file mode 100644 index 000000000..4f7eeff5b --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Takuya Ueda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/gostaticanalysis/comment/README.md b/vendor/github.com/gostaticanalysis/comment/README.md new file mode 100644 index 000000000..533555313 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/README.md @@ -0,0 +1,10 @@ +# gostaticanalysis/comment + +[![godoc.org][godoc-badge]][godoc] + +`comment` provides utilities for [ast.CommentMap](https://golang.org/pkg/go/ast/#CommentMap). + + +[godoc]: https://godoc.org/github.com/gostaticanalysis/comment +[godoc-badge]: https://img.shields.io/badge/godoc-reference-4F73B3.svg?style=flat-square&label=%20godoc.org + diff --git a/vendor/github.com/gostaticanalysis/comment/comment.go b/vendor/github.com/gostaticanalysis/comment/comment.go new file mode 100644 index 000000000..2fe67fa96 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/comment.go @@ -0,0 +1,147 @@ +package comment + +import ( + "go/ast" + "go/token" + "strings" +) + +// Maps is slice of ast.CommentMap. +type Maps []ast.CommentMap + +// New creates new a CommentMap slice from specified files. +func New(fset *token.FileSet, files []*ast.File) Maps { + maps := make(Maps, len(files)) + for i := range files { + maps[i] = ast.NewCommentMap(fset, files[i], files[i].Comments) + } + return maps +} + +// Comments returns correspond a CommentGroup slice to specified AST node. +func (maps Maps) Comments(n ast.Node) []*ast.CommentGroup { + for i := range maps { + if maps[i][n] != nil { + return maps[i][n] + } + } + return nil +} + +// CommentsByPos returns correspond a CommentGroup slice to specified pos. +func (maps Maps) CommentsByPos(pos token.Pos) []*ast.CommentGroup { + for i := range maps { + for n, cgs := range maps[i] { + if n.Pos() == pos { + return cgs + } + } + } + return nil +} + +// Annotated checks either specified AST node is annotated or not. +func (maps Maps) Annotated(n ast.Node, annotation string) bool { + for _, cg := range maps.Comments(n) { + if strings.HasPrefix(strings.TrimSpace(cg.Text()), annotation) { + return true + } + } + return false +} + +// Ignore checks either specified AST node is ignored by the check. +// It follows staticcheck style as the below. +// //lint:ignore Check1[,Check2,...,CheckN] reason +func (maps Maps) Ignore(n ast.Node, check string) bool { + for _, cg := range maps.Comments(n) { + if hasIgnoreCheck(cg, check) { + return true + } + } + return false +} + +// IgnorePos checks either specified postion of AST node is ignored by the check. +// It follows staticcheck style as the below. +// //lint:ignore Check1[,Check2,...,CheckN] reason +func (maps Maps) IgnorePos(pos token.Pos, check string) bool { + for _, cg := range maps.CommentsByPos(pos) { + if hasIgnoreCheck(cg, check) { + return true + } + } + return false +} + +// Deprecated: This function does not work with multiple files. +// CommentsByPosLine can be used instead of CommentsByLine. +// +// CommentsByLine returns correspond a CommentGroup slice to specified line. +func (maps Maps) CommentsByLine(fset *token.FileSet, line int) []*ast.CommentGroup { + for i := range maps { + for n, cgs := range maps[i] { + l := fset.File(n.Pos()).Line(n.Pos()) + if l == line { + return cgs + } + } + } + return nil +} + +// CommentsByPosLine returns correspond a CommentGroup slice to specified line. +func (maps Maps) CommentsByPosLine(fset *token.FileSet, pos token.Pos) []*ast.CommentGroup { + f1 := fset.File(pos) + for i := range maps { + for n, cgs := range maps[i] { + f2 := fset.File(n.Pos()) + if f1 != f2 { + // different file + continue + } + + if f1.Line(pos) == f2.Line(n.Pos()) { + return cgs + } + } + } + return nil +} + +// IgnoreLine checks either specified lineof AST node is ignored by the check. +// It follows staticcheck style as the below. +// //lint:ignore Check1[,Check2,...,CheckN] reason +func (maps Maps) IgnoreLine(fset *token.FileSet, line int, check string) bool { + for _, cg := range maps.CommentsByLine(fset, line) { + if hasIgnoreCheck(cg, check) { + return true + } + } + return false +} + +// hasIgnoreCheck returns true if the provided CommentGroup starts with a comment +// of the form "//lint:ignore Check1[,Check2,...,CheckN] reason" and one of the +// checks matches the provided check. The *ast.CommentGroup is checked directly +// rather than using "cg.Text()" because, starting in Go 1.15, the "cg.Text()" call +// no longer returns directive-style comments (see https://github.com/golang/go/issues/37974). +func hasIgnoreCheck(cg *ast.CommentGroup, check string) bool { + if !strings.HasPrefix(cg.List[0].Text, "//") { + return false + } + + s := strings.TrimSpace(cg.List[0].Text[2:]) + txt := strings.Split(s, " ") + if len(txt) < 3 || txt[0] != "lint:ignore" { + return false + } + + checks := strings.Split(txt[1], ",") + for i := range checks { + if check == checks[i] { + return true + } + } + return false +} diff --git a/vendor/github.com/gostaticanalysis/comment/go.mod b/vendor/github.com/gostaticanalysis/comment/go.mod new file mode 100644 index 000000000..275568113 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/go.mod @@ -0,0 +1,8 @@ +module github.com/gostaticanalysis/comment + +go 1.12 + +require ( + github.com/google/go-cmp v0.5.1 + golang.org/x/tools v0.0.0-20200820010801-b793a1359eac +) diff --git a/vendor/github.com/gostaticanalysis/comment/go.sum b/vendor/github.com/gostaticanalysis/comment/go.sum new file mode 100644 index 000000000..425807ce1 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/go.sum @@ -0,0 +1,24 @@ +github.com/google/go-cmp v0.5.1 h1:JFrFEBb2xKufg6XkJsJr+WbKb4FQlURi5RUcBveYu9k= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac h1:DugppSxw0LSF8lcjaODPJZoDzq0ElTGskTst3ZaBkHI= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go b/vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go new file mode 100644 index 000000000..9266d9895 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/comment/passes/commentmap/commentmap.go @@ -0,0 +1,20 @@ +package commentmap + +import ( + "reflect" + + "github.com/gostaticanalysis/comment" + "golang.org/x/tools/go/analysis" +) + +var Analyzer = &analysis.Analyzer{ + Name: "commentmap", + Doc: "create comment map", + Run: run, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(comment.Maps{}), +} + +func run(pass *analysis.Pass) (interface{}, error) { + return comment.New(pass.Fset, pass.Files), nil +} diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/.reviewdog.yml b/vendor/github.com/gostaticanalysis/forcetypeassert/.reviewdog.yml new file mode 100644 index 000000000..2e243ff73 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/forcetypeassert/.reviewdog.yml @@ -0,0 +1,8 @@ +runner: + golint: + cmd: golint ./... + errorformat: + - "%f:%l:%c: %m" + level: warning + govet: + cmd: go vet -all . diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/LICENSE b/vendor/github.com/gostaticanalysis/forcetypeassert/LICENSE new file mode 100644 index 000000000..bf7e33db8 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/forcetypeassert/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 GoStaticAnalysis + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/README.md b/vendor/github.com/gostaticanalysis/forcetypeassert/README.md new file mode 100644 index 000000000..517f69400 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/forcetypeassert/README.md @@ -0,0 +1,17 @@ +# forcetypeassert + +[![godoc.org][godoc-badge]][godoc] + +`forcetypeassert` finds type assertions which did forcely such as below. + +```go +func f() { + var a interface{} + _ = a.(int) // type assertion must be checked +} +``` + + +[godoc]: https://godoc.org/github.com/gostaticanalysis/forcetypeassert +[godoc-badge]: https://img.shields.io/badge/godoc-reference-4F73B3.svg?style=flat-square&label=%20godoc.org + diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go b/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go new file mode 100644 index 000000000..cdc49e3b5 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/forcetypeassert/forcetypeassert.go @@ -0,0 +1,67 @@ +package forcetypeassert + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var Analyzer = &analysis.Analyzer{ + Name: "forcetypeassert", + Doc: Doc, + Run: run, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, +} + +const Doc = "forcetypeassert is finds type assertions which did forcely such as below." + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.AssignStmt)(nil), + } + + inspect.Preorder(nodeFilter, func(n ast.Node) { + switch n := n.(type) { + case *ast.AssignStmt: + if !hasTypeAssertion(n.Rhs) { + return + } + // if right hand has 2 or more values, assign statement can't assert boolean value which describes type assertion is succeeded + if len(n.Rhs) > 1 { + pass.Reportf(n.Pos(), "right hand must be only type assertion") + return + } + if len(n.Lhs) == 2 { + return + } + + tae, ok := n.Rhs[0].(*ast.TypeAssertExpr) + if !ok { + pass.Reportf(n.Pos(), "right hand is not TypeAssertion") + return + } + if tae.Type == nil { + return + } + pass.Reportf(n.Pos(), "type assertion must be checked") + } + }) + + return nil, nil +} + +func hasTypeAssertion(exprs []ast.Expr) bool { + for _, node := range exprs { + _, ok := node.(*ast.TypeAssertExpr) + if ok { + return true + } + } + return false +} diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/go.mod b/vendor/github.com/gostaticanalysis/forcetypeassert/go.mod new file mode 100644 index 000000000..32b9c14a8 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/forcetypeassert/go.mod @@ -0,0 +1,5 @@ +module github.com/gostaticanalysis/forcetypeassert + +go 1.12 + +require golang.org/x/tools v0.0.0-20190321232350-e250d351ecad diff --git a/vendor/github.com/gostaticanalysis/forcetypeassert/go.sum b/vendor/github.com/gostaticanalysis/forcetypeassert/go.sum new file mode 100644 index 000000000..7a25fbce3 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/forcetypeassert/go.sum @@ -0,0 +1,6 @@ +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190321232350-e250d351ecad h1:tYrC3aF7wTeS1noni7wCGu94xeMVu0dxOdFufzx/VM8= +golang.org/x/tools v0.0.0-20190321232350-e250d351ecad/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= diff --git a/vendor/github.com/gostaticanalysis/nilerr/LICENSE b/vendor/github.com/gostaticanalysis/nilerr/LICENSE new file mode 100644 index 000000000..bf7e33db8 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/nilerr/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 GoStaticAnalysis + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/gostaticanalysis/nilerr/README.md b/vendor/github.com/gostaticanalysis/nilerr/README.md new file mode 100644 index 000000000..d6b4acf8b --- /dev/null +++ b/vendor/github.com/gostaticanalysis/nilerr/README.md @@ -0,0 +1,41 @@ +# nilerr + +[![pkg.go.dev][gopkg-badge]][gopkg] + +`nilerr` finds code which returns nil even though it checks that error is not nil. + +```go +func f() error { + err := do() + if err != nil { + return nil // miss + } +} +``` + +`nilerr` also finds code which returns error even though it checks that error is nil. + +```go +func f() error { + err := do() + if err == nil { + return err // miss + } +} +``` + +`nilerr` ignores code which has a miss with ignore comment. + +```go +func f() error { + err := do() + if err != nil { + //lint:ignore nilerr reason + return nil // ignore + } +} +``` + + +[gopkg]: https://pkg.go.dev/github.com/gostaticanalysis/nilerr +[gopkg-badge]: https://pkg.go.dev/badge/github.com/gostaticanalysis/nilerr?status.svg diff --git a/vendor/github.com/gostaticanalysis/nilerr/go.mod b/vendor/github.com/gostaticanalysis/nilerr/go.mod new file mode 100644 index 000000000..d4ed85c98 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/nilerr/go.mod @@ -0,0 +1,8 @@ +module github.com/gostaticanalysis/nilerr + +go 1.15 + +require ( + github.com/gostaticanalysis/comment v1.4.1 + golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6 +) diff --git a/vendor/github.com/gostaticanalysis/nilerr/go.sum b/vendor/github.com/gostaticanalysis/nilerr/go.sum new file mode 100644 index 000000000..93690fa9e --- /dev/null +++ b/vendor/github.com/gostaticanalysis/nilerr/go.sum @@ -0,0 +1,34 @@ +github.com/google/go-cmp v0.5.1 h1:JFrFEBb2xKufg6XkJsJr+WbKb4FQlURi5RUcBveYu9k= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/gostaticanalysis/comment v1.4.1 h1:xHopR5L2lRz6OsjH4R2HG5wRhW9ySl3FsHIvi5pcXwc= +github.com/gostaticanalysis/comment v1.4.1/go.mod h1:ih6ZxzTHLdadaiSnF5WY3dxUoXfXAlTaRzuaNDlSado= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200820010801-b793a1359eac/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6 h1:rbvTkL9AkFts1cgI78+gG6Yu1pwaqX6hjSJAatB78E4= +golang.org/x/tools v0.0.0-20201023174141-c8cfbd0f21e6/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/gostaticanalysis/nilerr/nilerr.go b/vendor/github.com/gostaticanalysis/nilerr/nilerr.go new file mode 100644 index 000000000..787a9e1e9 --- /dev/null +++ b/vendor/github.com/gostaticanalysis/nilerr/nilerr.go @@ -0,0 +1,291 @@ +package nilerr + +import ( + "fmt" + "go/token" + "go/types" + + "github.com/gostaticanalysis/comment" + "github.com/gostaticanalysis/comment/passes/commentmap" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" +) + +var Analyzer = &analysis.Analyzer{ + Name: "nilerr", + Doc: Doc, + Run: run, + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + commentmap.Analyzer, + }, +} + +const Doc = "nilerr checks returning nil when err is not nil" + +func run(pass *analysis.Pass) (interface{}, error) { + funcs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs + cmaps := pass.ResultOf[commentmap.Analyzer].(comment.Maps) + + reportFail := func(v ssa.Value, ret *ssa.Return, format string) { + pos := ret.Pos() + line := getNodeLineNumber(pass, ret) + errLines := getValueLineNumbers(pass, v) + if !cmaps.IgnoreLine(pass.Fset, line, "nilerr") { + var errLineText string + if len(errLines) == 1 { + errLineText = fmt.Sprintf("line %d", errLines[0]) + } else { + errLineText = fmt.Sprintf("lines %v", errLines) + } + pass.Reportf(pos, format, errLineText) + } + } + + for i := range funcs { + for _, b := range funcs[i].Blocks { + if v := binOpErrNil(b, token.NEQ); v != nil { + if ret := isReturnNil(b.Succs[0]); ret != nil { + if !usesErrorValue(b.Succs[0], v) { + reportFail(v, ret, "error is not nil (%s) but it returns nil") + } + } + } else if v := binOpErrNil(b, token.EQL); v != nil { + if len(b.Succs[0].Preds) == 1 { // if there are multiple conditions, this may be false positive + if ret := isReturnError(b.Succs[0], v); ret != nil { + reportFail(v, ret, "error is nil (%s) but it returns error") + } + } + } + + } + } + + return nil, nil +} + +func getValueLineNumbers(pass *analysis.Pass, v ssa.Value) []int { + if phi, ok := v.(*ssa.Phi); ok { + result := make([]int, 0, len(phi.Edges)) + for _, edge := range phi.Edges { + result = append(result, getValueLineNumbers(pass, edge)...) + } + return result + } + + value := v + if extract, ok := value.(*ssa.Extract); ok { + value = extract.Tuple + } + + pos := value.Pos() + return []int{pass.Fset.File(pos).Line(pos)} +} + +func getNodeLineNumber(pass *analysis.Pass, node ssa.Node) int { + pos := node.Pos() + return pass.Fset.File(pos).Line(pos) +} + +var errType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + +func binOpErrNil(b *ssa.BasicBlock, op token.Token) ssa.Value { + if len(b.Instrs) == 0 { + return nil + } + + ifinst, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If) + if !ok { + return nil + } + + binop, ok := ifinst.Cond.(*ssa.BinOp) + if !ok { + return nil + } + + if binop.Op != op { + return nil + } + + if !types.Implements(binop.X.Type(), errType) { + return nil + } + + if !types.Implements(binop.Y.Type(), errType) { + return nil + } + + xIsConst, yIsConst := isConst(binop.X), isConst(binop.Y) + switch { + case !xIsConst && yIsConst: // err != nil or err == nil + return binop.X + case xIsConst && !yIsConst: // nil != err or nil == err + return binop.Y + } + + return nil +} + +func isConst(v ssa.Value) bool { + _, ok := v.(*ssa.Const) + return ok +} + +func isReturnNil(b *ssa.BasicBlock) *ssa.Return { + if len(b.Instrs) == 0 { + return nil + } + + ret, ok := b.Instrs[len(b.Instrs)-1].(*ssa.Return) + if !ok { + return nil + } + + errorReturnValues := 0 + for _, res := range ret.Results { + if !types.Implements(res.Type(), errType) { + continue + } + + errorReturnValues++ + v, ok := res.(*ssa.Const) + if !ok { + return nil + } + + if !v.IsNil() { + return nil + } + } + + if errorReturnValues == 0 { + return nil + } + + return ret +} + +func isReturnError(b *ssa.BasicBlock, errVal ssa.Value) *ssa.Return { + if len(b.Instrs) == 0 { + return nil + } + + ret, ok := b.Instrs[len(b.Instrs)-1].(*ssa.Return) + if !ok { + return nil + } + + for _, v := range ret.Results { + if v == errVal { + return ret + } + } + + return nil +} + +func usesErrorValue(b *ssa.BasicBlock, errVal ssa.Value) bool { + for _, instr := range b.Instrs { + if callInstr, ok := instr.(*ssa.Call); ok { + for _, arg := range callInstr.Call.Args { + if isUsedInValue(arg, errVal) { + return true + } + + sliceArg, ok := arg.(*ssa.Slice) + if ok { + if isUsedInSlice(sliceArg, errVal) { + return true + } + } + } + } + } + return false +} + +type ReferrersHolder interface { + Referrers() *[]ssa.Instruction +} + +var _ ReferrersHolder = (ssa.Node)(nil) +var _ ReferrersHolder = (ssa.Value)(nil) + +func isUsedInSlice(sliceArg *ssa.Slice, errVal ssa.Value) bool { + var valueBuf [10]*ssa.Value + operands := sliceArg.Operands(valueBuf[:0]) + + var valuesToInspect []ssa.Value + addValueForInspection := func(value ssa.Value) { + if value != nil { + valuesToInspect = append(valuesToInspect, value) + } + } + + var nodesToInspect []ssa.Node + visitedNodes := map[ssa.Node]bool{} + addNodeForInspection := func(node ssa.Node) { + if !visitedNodes[node] { + visitedNodes[node] = true + nodesToInspect = append(nodesToInspect, node) + } + } + addReferrersForInspection := func(h ReferrersHolder) { + if h == nil { + return + } + + referrers := h.Referrers() + if referrers == nil { + return + } + + for _, r := range *referrers { + if node, ok := r.(ssa.Node); ok { + addNodeForInspection(node) + } + } + } + + for _, operand := range operands { + addReferrersForInspection(*operand) + addValueForInspection(*operand) + } + + for i := 0; i < len(nodesToInspect); i++ { + switch node := nodesToInspect[i].(type) { + case *ssa.IndexAddr: + addReferrersForInspection(node) + case *ssa.Store: + addValueForInspection(node.Val) + } + } + + for _, value := range valuesToInspect { + if isUsedInValue(value, errVal) { + return true + } + } + return false +} + +func isUsedInValue(value, lookedFor ssa.Value) bool { + if value == lookedFor { + return true + } + + switch value := value.(type) { + case *ssa.ChangeInterface: + return isUsedInValue(value.X, lookedFor) + case *ssa.MakeInterface: + return isUsedInValue(value.X, lookedFor) + case *ssa.Call: + if value.Call.IsInvoke() { + return isUsedInValue(value.Call.Value, lookedFor) + } + } + + return false +} diff --git a/vendor/github.com/hashicorp/errwrap/LICENSE b/vendor/github.com/hashicorp/errwrap/LICENSE new file mode 100644 index 000000000..c33dcc7c9 --- /dev/null +++ b/vendor/github.com/hashicorp/errwrap/LICENSE @@ -0,0 +1,354 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. “Contributor” + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. “Contributor Version” + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor’s Contribution. + +1.3. “Contribution” + + means Covered Software of a particular Contributor. + +1.4. “Covered Software” + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. “Incompatible With Secondary Licenses” + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of version + 1.1 or earlier of the License, but not also under the terms of a + Secondary License. + +1.6. “Executable Form” + + means any form of the work other than Source Code Form. + +1.7. “Larger Work” + + means a work that combines Covered Software with other material, in a separate + file or files, that is not Covered Software. + +1.8. “License” + + means this document. + +1.9. “Licensable” + + means having the right to grant, to the maximum extent possible, whether at the + time of the initial grant or subsequently, any and all of the rights conveyed by + this License. + +1.10. “Modifications” + + means any of the following: + + a. any file in Source Code Form that results from an addition to, deletion + from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. “Patent Claims” of a Contributor + + means any patent claim(s), including without limitation, method, process, + and apparatus claims, in any patent Licensable by such Contributor that + would be infringed, but for the grant of the License, by the making, + using, selling, offering for sale, having made, import, or transfer of + either its Contributions or its Contributor Version. + +1.12. “Secondary License” + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. “Source Code Form” + + means the form of the work preferred for making modifications. + +1.14. “You” (or “Your”) + + means an individual or a legal entity exercising rights under this + License. For legal entities, “You” includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, “control” means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or as + part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its Contributions + or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution become + effective for each Contribution on the date the Contributor first distributes + such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under this + License. No additional rights or licenses will be implied from the distribution + or licensing of Covered Software under this License. Notwithstanding Section + 2.1(b) above, no patent license is granted by a Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party’s + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of its + Contributions. + + This License does not grant any rights in the trademarks, service marks, or + logos of any Contributor (except as may be necessary to comply with the + notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this License + (see Section 10.2) or under the terms of a Secondary License (if permitted + under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its Contributions + are its original creation(s) or it has sufficient rights to grant the + rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under applicable + copyright doctrines of fair use, fair dealing, or other equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under the + terms of this License. You must inform recipients that the Source Code Form + of the Covered Software is governed by the terms of this License, and how + they can obtain a copy of this License. You may not attempt to alter or + restrict the recipients’ rights in the Source Code Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this License, + or sublicense it under different terms, provided that the license for + the Executable Form does not attempt to limit or alter the recipients’ + rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for the + Covered Software. If the Larger Work is a combination of Covered Software + with a work governed by one or more Secondary Licenses, and the Covered + Software is not Incompatible With Secondary Licenses, this License permits + You to additionally distribute such Covered Software under the terms of + such Secondary License(s), so that the recipient of the Larger Work may, at + their option, further distribute the Covered Software under the terms of + either this License or such Secondary License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices (including + copyright notices, patent notices, disclaimers of warranty, or limitations + of liability) contained within the Source Code Form of the Covered + Software, except that You may alter any license notices to the extent + required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on behalf + of any Contributor. You must make it absolutely clear that any such + warranty, support, indemnity, or liability obligation is offered by You + alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, judicial + order, or regulation then You must: (a) comply with the terms of this License + to the maximum extent possible; and (b) describe the limitations and the code + they affect. Such description must be placed in a text file included with all + distributions of the Covered Software under this License. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing basis, + if such Contributor fails to notify You of the non-compliance by some + reasonable means prior to 60 days after You have come back into compliance. + Moreover, Your grants from a particular Contributor are reinstated on an + ongoing basis if such Contributor notifies You of the non-compliance by + some reasonable means, this is the first time You have received notice of + non-compliance with this License from such Contributor, and You become + compliant prior to 30 days after Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, counter-claims, + and cross-claims) alleging that a Contributor Version directly or + indirectly infringes any patent, then the rights granted to You by any and + all Contributors for the Covered Software under Section 2.1 of this License + shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an “as is” basis, without + warranty of any kind, either expressed, implied, or statutory, including, + without limitation, warranties that the Covered Software is free of defects, + merchantable, fit for a particular purpose or non-infringing. The entire + risk as to the quality and performance of the Covered Software is with You. + Should any Covered Software prove defective in any respect, You (not any + Contributor) assume the cost of any necessary servicing, repair, or + correction. This disclaimer of warranty constitutes an essential part of this + License. No use of any Covered Software is authorized under this License + except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from such + party’s negligence to the extent applicable law prohibits such limitation. + Some jurisdictions do not allow the exclusion or limitation of incidental or + consequential damages, so this exclusion and limitation may not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts of + a jurisdiction where the defendant maintains its principal place of business + and such litigation shall be governed by laws of that jurisdiction, without + reference to its conflict-of-law provisions. Nothing in this Section shall + prevent a party’s ability to bring cross-claims or counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject matter + hereof. If any provision of this License is held to be unenforceable, such + provision shall be reformed only to the extent necessary to make it + enforceable. Any law or regulation which provides that the language of a + contract shall be construed against the drafter shall not be used to construe + this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version of + the License under which You originally received the Covered Software, or + under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a modified + version of this License if you rename the license and remove any + references to the name of the license steward (except to note that such + modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the + notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then +You may include the notice in a location (such as a LICENSE file in a relevant +directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - “Incompatible With Secondary Licenses” Notice + + This Source Code Form is “Incompatible + With Secondary Licenses”, as defined by + the Mozilla Public License, v. 2.0. + diff --git a/vendor/github.com/hashicorp/errwrap/README.md b/vendor/github.com/hashicorp/errwrap/README.md new file mode 100644 index 000000000..444df08f8 --- /dev/null +++ b/vendor/github.com/hashicorp/errwrap/README.md @@ -0,0 +1,89 @@ +# errwrap + +`errwrap` is a package for Go that formalizes the pattern of wrapping errors +and checking if an error contains another error. + +There is a common pattern in Go of taking a returned `error` value and +then wrapping it (such as with `fmt.Errorf`) before returning it. The problem +with this pattern is that you completely lose the original `error` structure. + +Arguably the _correct_ approach is that you should make a custom structure +implementing the `error` interface, and have the original error as a field +on that structure, such [as this example](http://golang.org/pkg/os/#PathError). +This is a good approach, but you have to know the entire chain of possible +rewrapping that happens, when you might just care about one. + +`errwrap` formalizes this pattern (it doesn't matter what approach you use +above) by giving a single interface for wrapping errors, checking if a specific +error is wrapped, and extracting that error. + +## Installation and Docs + +Install using `go get github.com/hashicorp/errwrap`. + +Full documentation is available at +http://godoc.org/github.com/hashicorp/errwrap + +## Usage + +#### Basic Usage + +Below is a very basic example of its usage: + +```go +// A function that always returns an error, but wraps it, like a real +// function might. +func tryOpen() error { + _, err := os.Open("/i/dont/exist") + if err != nil { + return errwrap.Wrapf("Doesn't exist: {{err}}", err) + } + + return nil +} + +func main() { + err := tryOpen() + + // We can use the Contains helpers to check if an error contains + // another error. It is safe to do this with a nil error, or with + // an error that doesn't even use the errwrap package. + if errwrap.Contains(err, "does not exist") { + // Do something + } + if errwrap.ContainsType(err, new(os.PathError)) { + // Do something + } + + // Or we can use the associated `Get` functions to just extract + // a specific error. This would return nil if that specific error doesn't + // exist. + perr := errwrap.GetType(err, new(os.PathError)) +} +``` + +#### Custom Types + +If you're already making custom types that properly wrap errors, then +you can get all the functionality of `errwraps.Contains` and such by +implementing the `Wrapper` interface with just one function. Example: + +```go +type AppError { + Code ErrorCode + Err error +} + +func (e *AppError) WrappedErrors() []error { + return []error{e.Err} +} +``` + +Now this works: + +```go +err := &AppError{Err: fmt.Errorf("an error")} +if errwrap.ContainsType(err, fmt.Errorf("")) { + // This will work! +} +``` diff --git a/vendor/github.com/hashicorp/errwrap/errwrap.go b/vendor/github.com/hashicorp/errwrap/errwrap.go new file mode 100644 index 000000000..a733bef18 --- /dev/null +++ b/vendor/github.com/hashicorp/errwrap/errwrap.go @@ -0,0 +1,169 @@ +// Package errwrap implements methods to formalize error wrapping in Go. +// +// All of the top-level functions that take an `error` are built to be able +// to take any error, not just wrapped errors. This allows you to use errwrap +// without having to type-check and type-cast everywhere. +package errwrap + +import ( + "errors" + "reflect" + "strings" +) + +// WalkFunc is the callback called for Walk. +type WalkFunc func(error) + +// Wrapper is an interface that can be implemented by custom types to +// have all the Contains, Get, etc. functions in errwrap work. +// +// When Walk reaches a Wrapper, it will call the callback for every +// wrapped error in addition to the wrapper itself. Since all the top-level +// functions in errwrap use Walk, this means that all those functions work +// with your custom type. +type Wrapper interface { + WrappedErrors() []error +} + +// Wrap defines that outer wraps inner, returning an error type that +// can be cleanly used with the other methods in this package, such as +// Contains, GetAll, etc. +// +// This function won't modify the error message at all (the outer message +// will be used). +func Wrap(outer, inner error) error { + return &wrappedError{ + Outer: outer, + Inner: inner, + } +} + +// Wrapf wraps an error with a formatting message. This is similar to using +// `fmt.Errorf` to wrap an error. If you're using `fmt.Errorf` to wrap +// errors, you should replace it with this. +// +// format is the format of the error message. The string '{{err}}' will +// be replaced with the original error message. +func Wrapf(format string, err error) error { + outerMsg := "" + if err != nil { + outerMsg = err.Error() + } + + outer := errors.New(strings.Replace( + format, "{{err}}", outerMsg, -1)) + + return Wrap(outer, err) +} + +// Contains checks if the given error contains an error with the +// message msg. If err is not a wrapped error, this will always return +// false unless the error itself happens to match this msg. +func Contains(err error, msg string) bool { + return len(GetAll(err, msg)) > 0 +} + +// ContainsType checks if the given error contains an error with +// the same concrete type as v. If err is not a wrapped error, this will +// check the err itself. +func ContainsType(err error, v interface{}) bool { + return len(GetAllType(err, v)) > 0 +} + +// Get is the same as GetAll but returns the deepest matching error. +func Get(err error, msg string) error { + es := GetAll(err, msg) + if len(es) > 0 { + return es[len(es)-1] + } + + return nil +} + +// GetType is the same as GetAllType but returns the deepest matching error. +func GetType(err error, v interface{}) error { + es := GetAllType(err, v) + if len(es) > 0 { + return es[len(es)-1] + } + + return nil +} + +// GetAll gets all the errors that might be wrapped in err with the +// given message. The order of the errors is such that the outermost +// matching error (the most recent wrap) is index zero, and so on. +func GetAll(err error, msg string) []error { + var result []error + + Walk(err, func(err error) { + if err.Error() == msg { + result = append(result, err) + } + }) + + return result +} + +// GetAllType gets all the errors that are the same type as v. +// +// The order of the return value is the same as described in GetAll. +func GetAllType(err error, v interface{}) []error { + var result []error + + var search string + if v != nil { + search = reflect.TypeOf(v).String() + } + Walk(err, func(err error) { + var needle string + if err != nil { + needle = reflect.TypeOf(err).String() + } + + if needle == search { + result = append(result, err) + } + }) + + return result +} + +// Walk walks all the wrapped errors in err and calls the callback. If +// err isn't a wrapped error, this will be called once for err. If err +// is a wrapped error, the callback will be called for both the wrapper +// that implements error as well as the wrapped error itself. +func Walk(err error, cb WalkFunc) { + if err == nil { + return + } + + switch e := err.(type) { + case *wrappedError: + cb(e.Outer) + Walk(e.Inner, cb) + case Wrapper: + cb(err) + + for _, err := range e.WrappedErrors() { + Walk(err, cb) + } + default: + cb(err) + } +} + +// wrappedError is an implementation of error that has both the +// outer and inner errors. +type wrappedError struct { + Outer error + Inner error +} + +func (w *wrappedError) Error() string { + return w.Outer.Error() +} + +func (w *wrappedError) WrappedErrors() []error { + return []error{w.Outer, w.Inner} +} diff --git a/vendor/github.com/hashicorp/errwrap/go.mod b/vendor/github.com/hashicorp/errwrap/go.mod new file mode 100644 index 000000000..c9b84022c --- /dev/null +++ b/vendor/github.com/hashicorp/errwrap/go.mod @@ -0,0 +1 @@ +module github.com/hashicorp/errwrap diff --git a/vendor/github.com/hashicorp/go-multierror/LICENSE b/vendor/github.com/hashicorp/go-multierror/LICENSE new file mode 100644 index 000000000..82b4de97c --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/LICENSE @@ -0,0 +1,353 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. “Contributor” + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. “Contributor Version” + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor’s Contribution. + +1.3. “Contribution” + + means Covered Software of a particular Contributor. + +1.4. “Covered Software” + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. “Incompatible With Secondary Licenses” + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of version + 1.1 or earlier of the License, but not also under the terms of a + Secondary License. + +1.6. “Executable Form” + + means any form of the work other than Source Code Form. + +1.7. “Larger Work” + + means a work that combines Covered Software with other material, in a separate + file or files, that is not Covered Software. + +1.8. “License” + + means this document. + +1.9. “Licensable” + + means having the right to grant, to the maximum extent possible, whether at the + time of the initial grant or subsequently, any and all of the rights conveyed by + this License. + +1.10. “Modifications” + + means any of the following: + + a. any file in Source Code Form that results from an addition to, deletion + from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. “Patent Claims” of a Contributor + + means any patent claim(s), including without limitation, method, process, + and apparatus claims, in any patent Licensable by such Contributor that + would be infringed, but for the grant of the License, by the making, + using, selling, offering for sale, having made, import, or transfer of + either its Contributions or its Contributor Version. + +1.12. “Secondary License” + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. “Source Code Form” + + means the form of the work preferred for making modifications. + +1.14. “You” (or “Your”) + + means an individual or a legal entity exercising rights under this + License. For legal entities, “You” includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, “control” means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or as + part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its Contributions + or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution become + effective for each Contribution on the date the Contributor first distributes + such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under this + License. No additional rights or licenses will be implied from the distribution + or licensing of Covered Software under this License. Notwithstanding Section + 2.1(b) above, no patent license is granted by a Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party’s + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of its + Contributions. + + This License does not grant any rights in the trademarks, service marks, or + logos of any Contributor (except as may be necessary to comply with the + notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this License + (see Section 10.2) or under the terms of a Secondary License (if permitted + under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its Contributions + are its original creation(s) or it has sufficient rights to grant the + rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under applicable + copyright doctrines of fair use, fair dealing, or other equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under the + terms of this License. You must inform recipients that the Source Code Form + of the Covered Software is governed by the terms of this License, and how + they can obtain a copy of this License. You may not attempt to alter or + restrict the recipients’ rights in the Source Code Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this License, + or sublicense it under different terms, provided that the license for + the Executable Form does not attempt to limit or alter the recipients’ + rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for the + Covered Software. If the Larger Work is a combination of Covered Software + with a work governed by one or more Secondary Licenses, and the Covered + Software is not Incompatible With Secondary Licenses, this License permits + You to additionally distribute such Covered Software under the terms of + such Secondary License(s), so that the recipient of the Larger Work may, at + their option, further distribute the Covered Software under the terms of + either this License or such Secondary License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices (including + copyright notices, patent notices, disclaimers of warranty, or limitations + of liability) contained within the Source Code Form of the Covered + Software, except that You may alter any license notices to the extent + required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on behalf + of any Contributor. You must make it absolutely clear that any such + warranty, support, indemnity, or liability obligation is offered by You + alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, judicial + order, or regulation then You must: (a) comply with the terms of this License + to the maximum extent possible; and (b) describe the limitations and the code + they affect. Such description must be placed in a text file included with all + distributions of the Covered Software under this License. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing basis, + if such Contributor fails to notify You of the non-compliance by some + reasonable means prior to 60 days after You have come back into compliance. + Moreover, Your grants from a particular Contributor are reinstated on an + ongoing basis if such Contributor notifies You of the non-compliance by + some reasonable means, this is the first time You have received notice of + non-compliance with this License from such Contributor, and You become + compliant prior to 30 days after Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, counter-claims, + and cross-claims) alleging that a Contributor Version directly or + indirectly infringes any patent, then the rights granted to You by any and + all Contributors for the Covered Software under Section 2.1 of this License + shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an “as is” basis, without + warranty of any kind, either expressed, implied, or statutory, including, + without limitation, warranties that the Covered Software is free of defects, + merchantable, fit for a particular purpose or non-infringing. The entire + risk as to the quality and performance of the Covered Software is with You. + Should any Covered Software prove defective in any respect, You (not any + Contributor) assume the cost of any necessary servicing, repair, or + correction. This disclaimer of warranty constitutes an essential part of this + License. No use of any Covered Software is authorized under this License + except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from such + party’s negligence to the extent applicable law prohibits such limitation. + Some jurisdictions do not allow the exclusion or limitation of incidental or + consequential damages, so this exclusion and limitation may not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts of + a jurisdiction where the defendant maintains its principal place of business + and such litigation shall be governed by laws of that jurisdiction, without + reference to its conflict-of-law provisions. Nothing in this Section shall + prevent a party’s ability to bring cross-claims or counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject matter + hereof. If any provision of this License is held to be unenforceable, such + provision shall be reformed only to the extent necessary to make it + enforceable. Any law or regulation which provides that the language of a + contract shall be construed against the drafter shall not be used to construe + this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version of + the License under which You originally received the Covered Software, or + under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a modified + version of this License if you rename the license and remove any + references to the name of the license steward (except to note that such + modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the + notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then +You may include the notice in a location (such as a LICENSE file in a relevant +directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - “Incompatible With Secondary Licenses” Notice + + This Source Code Form is “Incompatible + With Secondary Licenses”, as defined by + the Mozilla Public License, v. 2.0. diff --git a/vendor/github.com/hashicorp/go-multierror/Makefile b/vendor/github.com/hashicorp/go-multierror/Makefile new file mode 100644 index 000000000..b97cd6ed0 --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/Makefile @@ -0,0 +1,31 @@ +TEST?=./... + +default: test + +# test runs the test suite and vets the code. +test: generate + @echo "==> Running tests..." + @go list $(TEST) \ + | grep -v "/vendor/" \ + | xargs -n1 go test -timeout=60s -parallel=10 ${TESTARGS} + +# testrace runs the race checker +testrace: generate + @echo "==> Running tests (race)..." + @go list $(TEST) \ + | grep -v "/vendor/" \ + | xargs -n1 go test -timeout=60s -race ${TESTARGS} + +# updatedeps installs all the dependencies needed to run and build. +updatedeps: + @sh -c "'${CURDIR}/scripts/deps.sh' '${NAME}'" + +# generate runs `go generate` to build the dynamically generated source files. +generate: + @echo "==> Generating..." + @find . -type f -name '.DS_Store' -delete + @go list ./... \ + | grep -v "/vendor/" \ + | xargs -n1 go generate + +.PHONY: default test testrace updatedeps generate diff --git a/vendor/github.com/hashicorp/go-multierror/README.md b/vendor/github.com/hashicorp/go-multierror/README.md new file mode 100644 index 000000000..71dd308ed --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/README.md @@ -0,0 +1,150 @@ +# go-multierror + +[![CircleCI](https://img.shields.io/circleci/build/github/hashicorp/go-multierror/master)](https://circleci.com/gh/hashicorp/go-multierror) +[![Go Reference](https://pkg.go.dev/badge/github.com/hashicorp/go-multierror.svg)](https://pkg.go.dev/github.com/hashicorp/go-multierror) +![GitHub go.mod Go version](https://img.shields.io/github/go-mod/go-version/hashicorp/go-multierror) + +[circleci]: https://app.circleci.com/pipelines/github/hashicorp/go-multierror +[godocs]: https://pkg.go.dev/github.com/hashicorp/go-multierror + +`go-multierror` is a package for Go that provides a mechanism for +representing a list of `error` values as a single `error`. + +This allows a function in Go to return an `error` that might actually +be a list of errors. If the caller knows this, they can unwrap the +list and access the errors. If the caller doesn't know, the error +formats to a nice human-readable format. + +`go-multierror` is fully compatible with the Go standard library +[errors](https://golang.org/pkg/errors/) package, including the +functions `As`, `Is`, and `Unwrap`. This provides a standardized approach +for introspecting on error values. + +## Installation and Docs + +Install using `go get github.com/hashicorp/go-multierror`. + +Full documentation is available at +https://pkg.go.dev/github.com/hashicorp/go-multierror + +### Requires go version 1.13 or newer + +`go-multierror` requires go version 1.13 or newer. Go 1.13 introduced +[error wrapping](https://golang.org/doc/go1.13#error_wrapping), which +this library takes advantage of. + +If you need to use an earlier version of go, you can use the +[v1.0.0](https://github.com/hashicorp/go-multierror/tree/v1.0.0) +tag, which doesn't rely on features in go 1.13. + +If you see compile errors that look like the below, it's likely that +you're on an older version of go: + +``` +/go/src/github.com/hashicorp/go-multierror/multierror.go:112:9: undefined: errors.As +/go/src/github.com/hashicorp/go-multierror/multierror.go:117:9: undefined: errors.Is +``` + +## Usage + +go-multierror is easy to use and purposely built to be unobtrusive in +existing Go applications/libraries that may not be aware of it. + +**Building a list of errors** + +The `Append` function is used to create a list of errors. This function +behaves a lot like the Go built-in `append` function: it doesn't matter +if the first argument is nil, a `multierror.Error`, or any other `error`, +the function behaves as you would expect. + +```go +var result error + +if err := step1(); err != nil { + result = multierror.Append(result, err) +} +if err := step2(); err != nil { + result = multierror.Append(result, err) +} + +return result +``` + +**Customizing the formatting of the errors** + +By specifying a custom `ErrorFormat`, you can customize the format +of the `Error() string` function: + +```go +var result *multierror.Error + +// ... accumulate errors here, maybe using Append + +if result != nil { + result.ErrorFormat = func([]error) string { + return "errors!" + } +} +``` + +**Accessing the list of errors** + +`multierror.Error` implements `error` so if the caller doesn't know about +multierror, it will work just fine. But if you're aware a multierror might +be returned, you can use type switches to access the list of errors: + +```go +if err := something(); err != nil { + if merr, ok := err.(*multierror.Error); ok { + // Use merr.Errors + } +} +``` + +You can also use the standard [`errors.Unwrap`](https://golang.org/pkg/errors/#Unwrap) +function. This will continue to unwrap into subsequent errors until none exist. + +**Extracting an error** + +The standard library [`errors.As`](https://golang.org/pkg/errors/#As) +function can be used directly with a multierror to extract a specific error: + +```go +// Assume err is a multierror value +err := somefunc() + +// We want to know if "err" has a "RichErrorType" in it and extract it. +var errRich RichErrorType +if errors.As(err, &errRich) { + // It has it, and now errRich is populated. +} +``` + +**Checking for an exact error value** + +Some errors are returned as exact errors such as the [`ErrNotExist`](https://golang.org/pkg/os/#pkg-variables) +error in the `os` package. You can check if this error is present by using +the standard [`errors.Is`](https://golang.org/pkg/errors/#Is) function. + +```go +// Assume err is a multierror value +err := somefunc() +if errors.Is(err, os.ErrNotExist) { + // err contains os.ErrNotExist +} +``` + +**Returning a multierror only if there are errors** + +If you build a `multierror.Error`, you can use the `ErrorOrNil` function +to return an `error` implementation only if there are errors to return: + +```go +var result *multierror.Error + +// ... accumulate errors here + +// Return the `error` only if errors were added to the multierror, otherwise +// return nil since there are no errors. +return result.ErrorOrNil() +``` diff --git a/vendor/github.com/hashicorp/go-multierror/append.go b/vendor/github.com/hashicorp/go-multierror/append.go new file mode 100644 index 000000000..3e2589bfd --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/append.go @@ -0,0 +1,43 @@ +package multierror + +// Append is a helper function that will append more errors +// onto an Error in order to create a larger multi-error. +// +// If err is not a multierror.Error, then it will be turned into +// one. If any of the errs are multierr.Error, they will be flattened +// one level into err. +// Any nil errors within errs will be ignored. If err is nil, a new +// *Error will be returned. +func Append(err error, errs ...error) *Error { + switch err := err.(type) { + case *Error: + // Typed nils can reach here, so initialize if we are nil + if err == nil { + err = new(Error) + } + + // Go through each error and flatten + for _, e := range errs { + switch e := e.(type) { + case *Error: + if e != nil { + err.Errors = append(err.Errors, e.Errors...) + } + default: + if e != nil { + err.Errors = append(err.Errors, e) + } + } + } + + return err + default: + newErrs := make([]error, 0, len(errs)+1) + if err != nil { + newErrs = append(newErrs, err) + } + newErrs = append(newErrs, errs...) + + return Append(&Error{}, newErrs...) + } +} diff --git a/vendor/github.com/hashicorp/go-multierror/flatten.go b/vendor/github.com/hashicorp/go-multierror/flatten.go new file mode 100644 index 000000000..aab8e9abe --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/flatten.go @@ -0,0 +1,26 @@ +package multierror + +// Flatten flattens the given error, merging any *Errors together into +// a single *Error. +func Flatten(err error) error { + // If it isn't an *Error, just return the error as-is + if _, ok := err.(*Error); !ok { + return err + } + + // Otherwise, make the result and flatten away! + flatErr := new(Error) + flatten(err, flatErr) + return flatErr +} + +func flatten(err error, flatErr *Error) { + switch err := err.(type) { + case *Error: + for _, e := range err.Errors { + flatten(e, flatErr) + } + default: + flatErr.Errors = append(flatErr.Errors, err) + } +} diff --git a/vendor/github.com/hashicorp/go-multierror/format.go b/vendor/github.com/hashicorp/go-multierror/format.go new file mode 100644 index 000000000..47f13c49a --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/format.go @@ -0,0 +1,27 @@ +package multierror + +import ( + "fmt" + "strings" +) + +// ErrorFormatFunc is a function callback that is called by Error to +// turn the list of errors into a string. +type ErrorFormatFunc func([]error) string + +// ListFormatFunc is a basic formatter that outputs the number of errors +// that occurred along with a bullet point list of the errors. +func ListFormatFunc(es []error) string { + if len(es) == 1 { + return fmt.Sprintf("1 error occurred:\n\t* %s\n\n", es[0]) + } + + points := make([]string, len(es)) + for i, err := range es { + points[i] = fmt.Sprintf("* %s", err) + } + + return fmt.Sprintf( + "%d errors occurred:\n\t%s\n\n", + len(es), strings.Join(points, "\n\t")) +} diff --git a/vendor/github.com/hashicorp/go-multierror/go.mod b/vendor/github.com/hashicorp/go-multierror/go.mod new file mode 100644 index 000000000..141cc4ccb --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/go.mod @@ -0,0 +1,5 @@ +module github.com/hashicorp/go-multierror + +go 1.13 + +require github.com/hashicorp/errwrap v1.0.0 diff --git a/vendor/github.com/hashicorp/go-multierror/go.sum b/vendor/github.com/hashicorp/go-multierror/go.sum new file mode 100644 index 000000000..e8238e9ec --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/go.sum @@ -0,0 +1,2 @@ +github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= diff --git a/vendor/github.com/hashicorp/go-multierror/group.go b/vendor/github.com/hashicorp/go-multierror/group.go new file mode 100644 index 000000000..9c29efb7f --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/group.go @@ -0,0 +1,38 @@ +package multierror + +import "sync" + +// Group is a collection of goroutines which return errors that need to be +// coalesced. +type Group struct { + mutex sync.Mutex + err *Error + wg sync.WaitGroup +} + +// Go calls the given function in a new goroutine. +// +// If the function returns an error it is added to the group multierror which +// is returned by Wait. +func (g *Group) Go(f func() error) { + g.wg.Add(1) + + go func() { + defer g.wg.Done() + + if err := f(); err != nil { + g.mutex.Lock() + g.err = Append(g.err, err) + g.mutex.Unlock() + } + }() +} + +// Wait blocks until all function calls from the Go method have returned, then +// returns the multierror. +func (g *Group) Wait() *Error { + g.wg.Wait() + g.mutex.Lock() + defer g.mutex.Unlock() + return g.err +} diff --git a/vendor/github.com/hashicorp/go-multierror/multierror.go b/vendor/github.com/hashicorp/go-multierror/multierror.go new file mode 100644 index 000000000..f54574326 --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/multierror.go @@ -0,0 +1,121 @@ +package multierror + +import ( + "errors" + "fmt" +) + +// Error is an error type to track multiple errors. This is used to +// accumulate errors in cases and return them as a single "error". +type Error struct { + Errors []error + ErrorFormat ErrorFormatFunc +} + +func (e *Error) Error() string { + fn := e.ErrorFormat + if fn == nil { + fn = ListFormatFunc + } + + return fn(e.Errors) +} + +// ErrorOrNil returns an error interface if this Error represents +// a list of errors, or returns nil if the list of errors is empty. This +// function is useful at the end of accumulation to make sure that the value +// returned represents the existence of errors. +func (e *Error) ErrorOrNil() error { + if e == nil { + return nil + } + if len(e.Errors) == 0 { + return nil + } + + return e +} + +func (e *Error) GoString() string { + return fmt.Sprintf("*%#v", *e) +} + +// WrappedErrors returns the list of errors that this Error is wrapping. It is +// an implementation of the errwrap.Wrapper interface so that multierror.Error +// can be used with that library. +// +// This method is not safe to be called concurrently. Unlike accessing the +// Errors field directly, this function also checks if the multierror is nil to +// prevent a null-pointer panic. It satisfies the errwrap.Wrapper interface. +func (e *Error) WrappedErrors() []error { + if e == nil { + return nil + } + return e.Errors +} + +// Unwrap returns an error from Error (or nil if there are no errors). +// This error returned will further support Unwrap to get the next error, +// etc. The order will match the order of Errors in the multierror.Error +// at the time of calling. +// +// The resulting error supports errors.As/Is/Unwrap so you can continue +// to use the stdlib errors package to introspect further. +// +// This will perform a shallow copy of the errors slice. Any errors appended +// to this error after calling Unwrap will not be available until a new +// Unwrap is called on the multierror.Error. +func (e *Error) Unwrap() error { + // If we have no errors then we do nothing + if e == nil || len(e.Errors) == 0 { + return nil + } + + // If we have exactly one error, we can just return that directly. + if len(e.Errors) == 1 { + return e.Errors[0] + } + + // Shallow copy the slice + errs := make([]error, len(e.Errors)) + copy(errs, e.Errors) + return chain(errs) +} + +// chain implements the interfaces necessary for errors.Is/As/Unwrap to +// work in a deterministic way with multierror. A chain tracks a list of +// errors while accounting for the current represented error. This lets +// Is/As be meaningful. +// +// Unwrap returns the next error. In the cleanest form, Unwrap would return +// the wrapped error here but we can't do that if we want to properly +// get access to all the errors. Instead, users are recommended to use +// Is/As to get the correct error type out. +// +// Precondition: []error is non-empty (len > 0) +type chain []error + +// Error implements the error interface +func (e chain) Error() string { + return e[0].Error() +} + +// Unwrap implements errors.Unwrap by returning the next error in the +// chain or nil if there are no more errors. +func (e chain) Unwrap() error { + if len(e) == 1 { + return nil + } + + return e[1:] +} + +// As implements errors.As by attempting to map to the current value. +func (e chain) As(target interface{}) bool { + return errors.As(e[0], target) +} + +// Is implements errors.Is by comparing the current value directly. +func (e chain) Is(target error) bool { + return errors.Is(e[0], target) +} diff --git a/vendor/github.com/hashicorp/go-multierror/prefix.go b/vendor/github.com/hashicorp/go-multierror/prefix.go new file mode 100644 index 000000000..5c477abe4 --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/prefix.go @@ -0,0 +1,37 @@ +package multierror + +import ( + "fmt" + + "github.com/hashicorp/errwrap" +) + +// Prefix is a helper function that will prefix some text +// to the given error. If the error is a multierror.Error, then +// it will be prefixed to each wrapped error. +// +// This is useful to use when appending multiple multierrors +// together in order to give better scoping. +func Prefix(err error, prefix string) error { + if err == nil { + return nil + } + + format := fmt.Sprintf("%s {{err}}", prefix) + switch err := err.(type) { + case *Error: + // Typed nils can reach here, so initialize if we are nil + if err == nil { + err = new(Error) + } + + // Wrap each of the errors + for i, e := range err.Errors { + err.Errors[i] = errwrap.Wrapf(format, e) + } + + return err + default: + return errwrap.Wrapf(format, err) + } +} diff --git a/vendor/github.com/hashicorp/go-multierror/sort.go b/vendor/github.com/hashicorp/go-multierror/sort.go new file mode 100644 index 000000000..fecb14e81 --- /dev/null +++ b/vendor/github.com/hashicorp/go-multierror/sort.go @@ -0,0 +1,16 @@ +package multierror + +// Len implements sort.Interface function for length +func (err Error) Len() int { + return len(err.Errors) +} + +// Swap implements sort.Interface function for swapping elements +func (err Error) Swap(i, j int) { + err.Errors[i], err.Errors[j] = err.Errors[j], err.Errors[i] +} + +// Less implements sort.Interface function for determining order +func (err Error) Less(i, j int) bool { + return err.Errors[i].Error() < err.Errors[j].Error() +} diff --git a/vendor/github.com/hashicorp/hcl/.gitignore b/vendor/github.com/hashicorp/hcl/.gitignore new file mode 100644 index 000000000..15586a2b5 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/.gitignore @@ -0,0 +1,9 @@ +y.output + +# ignore intellij files +.idea +*.iml +*.ipr +*.iws + +*.test diff --git a/vendor/github.com/hashicorp/hcl/.travis.yml b/vendor/github.com/hashicorp/hcl/.travis.yml new file mode 100644 index 000000000..cb63a3216 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/.travis.yml @@ -0,0 +1,13 @@ +sudo: false + +language: go + +go: + - 1.x + - tip + +branches: + only: + - master + +script: make test diff --git a/vendor/github.com/hashicorp/hcl/LICENSE b/vendor/github.com/hashicorp/hcl/LICENSE new file mode 100644 index 000000000..c33dcc7c9 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/LICENSE @@ -0,0 +1,354 @@ +Mozilla Public License, version 2.0 + +1. Definitions + +1.1. “Contributor” + + means each individual or legal entity that creates, contributes to the + creation of, or owns Covered Software. + +1.2. “Contributor Version” + + means the combination of the Contributions of others (if any) used by a + Contributor and that particular Contributor’s Contribution. + +1.3. “Contribution” + + means Covered Software of a particular Contributor. + +1.4. “Covered Software” + + means Source Code Form to which the initial Contributor has attached the + notice in Exhibit A, the Executable Form of such Source Code Form, and + Modifications of such Source Code Form, in each case including portions + thereof. + +1.5. “Incompatible With Secondary Licenses” + means + + a. that the initial Contributor has attached the notice described in + Exhibit B to the Covered Software; or + + b. that the Covered Software was made available under the terms of version + 1.1 or earlier of the License, but not also under the terms of a + Secondary License. + +1.6. “Executable Form” + + means any form of the work other than Source Code Form. + +1.7. “Larger Work” + + means a work that combines Covered Software with other material, in a separate + file or files, that is not Covered Software. + +1.8. “License” + + means this document. + +1.9. “Licensable” + + means having the right to grant, to the maximum extent possible, whether at the + time of the initial grant or subsequently, any and all of the rights conveyed by + this License. + +1.10. “Modifications” + + means any of the following: + + a. any file in Source Code Form that results from an addition to, deletion + from, or modification of the contents of Covered Software; or + + b. any new file in Source Code Form that contains any Covered Software. + +1.11. “Patent Claims” of a Contributor + + means any patent claim(s), including without limitation, method, process, + and apparatus claims, in any patent Licensable by such Contributor that + would be infringed, but for the grant of the License, by the making, + using, selling, offering for sale, having made, import, or transfer of + either its Contributions or its Contributor Version. + +1.12. “Secondary License” + + means either the GNU General Public License, Version 2.0, the GNU Lesser + General Public License, Version 2.1, the GNU Affero General Public + License, Version 3.0, or any later versions of those licenses. + +1.13. “Source Code Form” + + means the form of the work preferred for making modifications. + +1.14. “You” (or “Your”) + + means an individual or a legal entity exercising rights under this + License. For legal entities, “You” includes any entity that controls, is + controlled by, or is under common control with You. For purposes of this + definition, “control” means (a) the power, direct or indirect, to cause + the direction or management of such entity, whether by contract or + otherwise, or (b) ownership of more than fifty percent (50%) of the + outstanding shares or beneficial ownership of such entity. + + +2. License Grants and Conditions + +2.1. Grants + + Each Contributor hereby grants You a world-wide, royalty-free, + non-exclusive license: + + a. under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or as + part of a Larger Work; and + + b. under Patent Claims of such Contributor to make, use, sell, offer for + sale, have made, import, and otherwise transfer either its Contributions + or its Contributor Version. + +2.2. Effective Date + + The licenses granted in Section 2.1 with respect to any Contribution become + effective for each Contribution on the date the Contributor first distributes + such Contribution. + +2.3. Limitations on Grant Scope + + The licenses granted in this Section 2 are the only rights granted under this + License. No additional rights or licenses will be implied from the distribution + or licensing of Covered Software under this License. Notwithstanding Section + 2.1(b) above, no patent license is granted by a Contributor: + + a. for any code that a Contributor has removed from Covered Software; or + + b. for infringements caused by: (i) Your and any other third party’s + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + + c. under Patent Claims infringed by Covered Software in the absence of its + Contributions. + + This License does not grant any rights in the trademarks, service marks, or + logos of any Contributor (except as may be necessary to comply with the + notice requirements in Section 3.4). + +2.4. Subsequent Licenses + + No Contributor makes additional grants as a result of Your choice to + distribute the Covered Software under a subsequent version of this License + (see Section 10.2) or under the terms of a Secondary License (if permitted + under the terms of Section 3.3). + +2.5. Representation + + Each Contributor represents that the Contributor believes its Contributions + are its original creation(s) or it has sufficient rights to grant the + rights to its Contributions conveyed by this License. + +2.6. Fair Use + + This License is not intended to limit any rights You have under applicable + copyright doctrines of fair use, fair dealing, or other equivalents. + +2.7. Conditions + + Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in + Section 2.1. + + +3. Responsibilities + +3.1. Distribution of Source Form + + All distribution of Covered Software in Source Code Form, including any + Modifications that You create or to which You contribute, must be under the + terms of this License. You must inform recipients that the Source Code Form + of the Covered Software is governed by the terms of this License, and how + they can obtain a copy of this License. You may not attempt to alter or + restrict the recipients’ rights in the Source Code Form. + +3.2. Distribution of Executable Form + + If You distribute Covered Software in Executable Form then: + + a. such Covered Software must also be made available in Source Code Form, + as described in Section 3.1, and You must inform recipients of the + Executable Form how they can obtain a copy of such Source Code Form by + reasonable means in a timely manner, at a charge no more than the cost + of distribution to the recipient; and + + b. You may distribute such Executable Form under the terms of this License, + or sublicense it under different terms, provided that the license for + the Executable Form does not attempt to limit or alter the recipients’ + rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + + You may create and distribute a Larger Work under terms of Your choice, + provided that You also comply with the requirements of this License for the + Covered Software. If the Larger Work is a combination of Covered Software + with a work governed by one or more Secondary Licenses, and the Covered + Software is not Incompatible With Secondary Licenses, this License permits + You to additionally distribute such Covered Software under the terms of + such Secondary License(s), so that the recipient of the Larger Work may, at + their option, further distribute the Covered Software under the terms of + either this License or such Secondary License(s). + +3.4. Notices + + You may not remove or alter the substance of any license notices (including + copyright notices, patent notices, disclaimers of warranty, or limitations + of liability) contained within the Source Code Form of the Covered + Software, except that You may alter any license notices to the extent + required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + + You may choose to offer, and to charge a fee for, warranty, support, + indemnity or liability obligations to one or more recipients of Covered + Software. However, You may do so only on Your own behalf, and not on behalf + of any Contributor. You must make it absolutely clear that any such + warranty, support, indemnity, or liability obligation is offered by You + alone, and You hereby agree to indemnify every Contributor for any + liability incurred by such Contributor as a result of warranty, support, + indemnity or liability terms You offer. You may include additional + disclaimers of warranty and limitations of liability specific to any + jurisdiction. + +4. Inability to Comply Due to Statute or Regulation + + If it is impossible for You to comply with any of the terms of this License + with respect to some or all of the Covered Software due to statute, judicial + order, or regulation then You must: (a) comply with the terms of this License + to the maximum extent possible; and (b) describe the limitations and the code + they affect. Such description must be placed in a text file included with all + distributions of the Covered Software under this License. Except to the + extent prohibited by statute or regulation, such description must be + sufficiently detailed for a recipient of ordinary skill to be able to + understand it. + +5. Termination + +5.1. The rights granted under this License will terminate automatically if You + fail to comply with any of its terms. However, if You become compliant, + then the rights granted under this License from a particular Contributor + are reinstated (a) provisionally, unless and until such Contributor + explicitly and finally terminates Your grants, and (b) on an ongoing basis, + if such Contributor fails to notify You of the non-compliance by some + reasonable means prior to 60 days after You have come back into compliance. + Moreover, Your grants from a particular Contributor are reinstated on an + ongoing basis if such Contributor notifies You of the non-compliance by + some reasonable means, this is the first time You have received notice of + non-compliance with this License from such Contributor, and You become + compliant prior to 30 days after Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent + infringement claim (excluding declaratory judgment actions, counter-claims, + and cross-claims) alleging that a Contributor Version directly or + indirectly infringes any patent, then the rights granted to You by any and + all Contributors for the Covered Software under Section 2.1 of this License + shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user + license agreements (excluding distributors and resellers) which have been + validly granted by You or Your distributors under this License prior to + termination shall survive termination. + +6. Disclaimer of Warranty + + Covered Software is provided under this License on an “as is” basis, without + warranty of any kind, either expressed, implied, or statutory, including, + without limitation, warranties that the Covered Software is free of defects, + merchantable, fit for a particular purpose or non-infringing. The entire + risk as to the quality and performance of the Covered Software is with You. + Should any Covered Software prove defective in any respect, You (not any + Contributor) assume the cost of any necessary servicing, repair, or + correction. This disclaimer of warranty constitutes an essential part of this + License. No use of any Covered Software is authorized under this License + except under this disclaimer. + +7. Limitation of Liability + + Under no circumstances and under no legal theory, whether tort (including + negligence), contract, or otherwise, shall any Contributor, or anyone who + distributes Covered Software as permitted above, be liable to You for any + direct, indirect, special, incidental, or consequential damages of any + character including, without limitation, damages for lost profits, loss of + goodwill, work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses, even if such party shall have been + informed of the possibility of such damages. This limitation of liability + shall not apply to liability for death or personal injury resulting from such + party’s negligence to the extent applicable law prohibits such limitation. + Some jurisdictions do not allow the exclusion or limitation of incidental or + consequential damages, so this exclusion and limitation may not apply to You. + +8. Litigation + + Any litigation relating to this License may be brought only in the courts of + a jurisdiction where the defendant maintains its principal place of business + and such litigation shall be governed by laws of that jurisdiction, without + reference to its conflict-of-law provisions. Nothing in this Section shall + prevent a party’s ability to bring cross-claims or counter-claims. + +9. Miscellaneous + + This License represents the complete agreement concerning the subject matter + hereof. If any provision of this License is held to be unenforceable, such + provision shall be reformed only to the extent necessary to make it + enforceable. Any law or regulation which provides that the language of a + contract shall be construed against the drafter shall not be used to construe + this License against a Contributor. + + +10. Versions of the License + +10.1. New Versions + + Mozilla Foundation is the license steward. Except as provided in Section + 10.3, no one other than the license steward has the right to modify or + publish new versions of this License. Each version will be given a + distinguishing version number. + +10.2. Effect of New Versions + + You may distribute the Covered Software under the terms of the version of + the License under which You originally received the Covered Software, or + under the terms of any subsequent version published by the license + steward. + +10.3. Modified Versions + + If you create software not governed by this License, and you want to + create a new license for such software, you may create and use a modified + version of this License if you rename the license and remove any + references to the name of the license steward (except to note that such + modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses + If You choose to distribute Source Code Form that is Incompatible With + Secondary Licenses under the terms of this version of the License, the + notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice + + This Source Code Form is subject to the + terms of the Mozilla Public License, v. + 2.0. If a copy of the MPL was not + distributed with this file, You can + obtain one at + http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular file, then +You may include the notice in a location (such as a LICENSE file in a relevant +directory) where a recipient would be likely to look for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - “Incompatible With Secondary Licenses” Notice + + This Source Code Form is “Incompatible + With Secondary Licenses”, as defined by + the Mozilla Public License, v. 2.0. + diff --git a/vendor/github.com/hashicorp/hcl/Makefile b/vendor/github.com/hashicorp/hcl/Makefile new file mode 100644 index 000000000..84fd743f5 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/Makefile @@ -0,0 +1,18 @@ +TEST?=./... + +default: test + +fmt: generate + go fmt ./... + +test: generate + go get -t ./... + go test $(TEST) $(TESTARGS) + +generate: + go generate ./... + +updatedeps: + go get -u golang.org/x/tools/cmd/stringer + +.PHONY: default generate test updatedeps diff --git a/vendor/github.com/hashicorp/hcl/README.md b/vendor/github.com/hashicorp/hcl/README.md new file mode 100644 index 000000000..c8223326d --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/README.md @@ -0,0 +1,125 @@ +# HCL + +[![GoDoc](https://godoc.org/github.com/hashicorp/hcl?status.png)](https://godoc.org/github.com/hashicorp/hcl) [![Build Status](https://travis-ci.org/hashicorp/hcl.svg?branch=master)](https://travis-ci.org/hashicorp/hcl) + +HCL (HashiCorp Configuration Language) is a configuration language built +by HashiCorp. The goal of HCL is to build a structured configuration language +that is both human and machine friendly for use with command-line tools, but +specifically targeted towards DevOps tools, servers, etc. + +HCL is also fully JSON compatible. That is, JSON can be used as completely +valid input to a system expecting HCL. This helps makes systems +interoperable with other systems. + +HCL is heavily inspired by +[libucl](https://github.com/vstakhov/libucl), +nginx configuration, and others similar. + +## Why? + +A common question when viewing HCL is to ask the question: why not +JSON, YAML, etc.? + +Prior to HCL, the tools we built at [HashiCorp](http://www.hashicorp.com) +used a variety of configuration languages from full programming languages +such as Ruby to complete data structure languages such as JSON. What we +learned is that some people wanted human-friendly configuration languages +and some people wanted machine-friendly languages. + +JSON fits a nice balance in this, but is fairly verbose and most +importantly doesn't support comments. With YAML, we found that beginners +had a really hard time determining what the actual structure was, and +ended up guessing more often than not whether to use a hyphen, colon, etc. +in order to represent some configuration key. + +Full programming languages such as Ruby enable complex behavior +a configuration language shouldn't usually allow, and also forces +people to learn some set of Ruby. + +Because of this, we decided to create our own configuration language +that is JSON-compatible. Our configuration language (HCL) is designed +to be written and modified by humans. The API for HCL allows JSON +as an input so that it is also machine-friendly (machines can generate +JSON instead of trying to generate HCL). + +Our goal with HCL is not to alienate other configuration languages. +It is instead to provide HCL as a specialized language for our tools, +and JSON as the interoperability layer. + +## Syntax + +For a complete grammar, please see the parser itself. A high-level overview +of the syntax and grammar is listed here. + + * Single line comments start with `#` or `//` + + * Multi-line comments are wrapped in `/*` and `*/`. Nested block comments + are not allowed. A multi-line comment (also known as a block comment) + terminates at the first `*/` found. + + * Values are assigned with the syntax `key = value` (whitespace doesn't + matter). The value can be any primitive: a string, number, boolean, + object, or list. + + * Strings are double-quoted and can contain any UTF-8 characters. + Example: `"Hello, World"` + + * Multi-line strings start with `<- + echo %Path% + + go version + + go env + + go get -t ./... + +build_script: +- cmd: go test -v ./... diff --git a/vendor/github.com/hashicorp/hcl/decoder.go b/vendor/github.com/hashicorp/hcl/decoder.go new file mode 100644 index 000000000..bed9ebbe1 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/decoder.go @@ -0,0 +1,729 @@ +package hcl + +import ( + "errors" + "fmt" + "reflect" + "sort" + "strconv" + "strings" + + "github.com/hashicorp/hcl/hcl/ast" + "github.com/hashicorp/hcl/hcl/parser" + "github.com/hashicorp/hcl/hcl/token" +) + +// This is the tag to use with structures to have settings for HCL +const tagName = "hcl" + +var ( + // nodeType holds a reference to the type of ast.Node + nodeType reflect.Type = findNodeType() +) + +// Unmarshal accepts a byte slice as input and writes the +// data to the value pointed to by v. +func Unmarshal(bs []byte, v interface{}) error { + root, err := parse(bs) + if err != nil { + return err + } + + return DecodeObject(v, root) +} + +// Decode reads the given input and decodes it into the structure +// given by `out`. +func Decode(out interface{}, in string) error { + obj, err := Parse(in) + if err != nil { + return err + } + + return DecodeObject(out, obj) +} + +// DecodeObject is a lower-level version of Decode. It decodes a +// raw Object into the given output. +func DecodeObject(out interface{}, n ast.Node) error { + val := reflect.ValueOf(out) + if val.Kind() != reflect.Ptr { + return errors.New("result must be a pointer") + } + + // If we have the file, we really decode the root node + if f, ok := n.(*ast.File); ok { + n = f.Node + } + + var d decoder + return d.decode("root", n, val.Elem()) +} + +type decoder struct { + stack []reflect.Kind +} + +func (d *decoder) decode(name string, node ast.Node, result reflect.Value) error { + k := result + + // If we have an interface with a valid value, we use that + // for the check. + if result.Kind() == reflect.Interface { + elem := result.Elem() + if elem.IsValid() { + k = elem + } + } + + // Push current onto stack unless it is an interface. + if k.Kind() != reflect.Interface { + d.stack = append(d.stack, k.Kind()) + + // Schedule a pop + defer func() { + d.stack = d.stack[:len(d.stack)-1] + }() + } + + switch k.Kind() { + case reflect.Bool: + return d.decodeBool(name, node, result) + case reflect.Float32, reflect.Float64: + return d.decodeFloat(name, node, result) + case reflect.Int, reflect.Int32, reflect.Int64: + return d.decodeInt(name, node, result) + case reflect.Interface: + // When we see an interface, we make our own thing + return d.decodeInterface(name, node, result) + case reflect.Map: + return d.decodeMap(name, node, result) + case reflect.Ptr: + return d.decodePtr(name, node, result) + case reflect.Slice: + return d.decodeSlice(name, node, result) + case reflect.String: + return d.decodeString(name, node, result) + case reflect.Struct: + return d.decodeStruct(name, node, result) + default: + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown kind to decode into: %s", name, k.Kind()), + } + } +} + +func (d *decoder) decodeBool(name string, node ast.Node, result reflect.Value) error { + switch n := node.(type) { + case *ast.LiteralType: + if n.Token.Type == token.BOOL { + v, err := strconv.ParseBool(n.Token.Text) + if err != nil { + return err + } + + result.Set(reflect.ValueOf(v)) + return nil + } + } + + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown type %T", name, node), + } +} + +func (d *decoder) decodeFloat(name string, node ast.Node, result reflect.Value) error { + switch n := node.(type) { + case *ast.LiteralType: + if n.Token.Type == token.FLOAT || n.Token.Type == token.NUMBER { + v, err := strconv.ParseFloat(n.Token.Text, 64) + if err != nil { + return err + } + + result.Set(reflect.ValueOf(v).Convert(result.Type())) + return nil + } + } + + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown type %T", name, node), + } +} + +func (d *decoder) decodeInt(name string, node ast.Node, result reflect.Value) error { + switch n := node.(type) { + case *ast.LiteralType: + switch n.Token.Type { + case token.NUMBER: + v, err := strconv.ParseInt(n.Token.Text, 0, 0) + if err != nil { + return err + } + + if result.Kind() == reflect.Interface { + result.Set(reflect.ValueOf(int(v))) + } else { + result.SetInt(v) + } + return nil + case token.STRING: + v, err := strconv.ParseInt(n.Token.Value().(string), 0, 0) + if err != nil { + return err + } + + if result.Kind() == reflect.Interface { + result.Set(reflect.ValueOf(int(v))) + } else { + result.SetInt(v) + } + return nil + } + } + + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown type %T", name, node), + } +} + +func (d *decoder) decodeInterface(name string, node ast.Node, result reflect.Value) error { + // When we see an ast.Node, we retain the value to enable deferred decoding. + // Very useful in situations where we want to preserve ast.Node information + // like Pos + if result.Type() == nodeType && result.CanSet() { + result.Set(reflect.ValueOf(node)) + return nil + } + + var set reflect.Value + redecode := true + + // For testing types, ObjectType should just be treated as a list. We + // set this to a temporary var because we want to pass in the real node. + testNode := node + if ot, ok := node.(*ast.ObjectType); ok { + testNode = ot.List + } + + switch n := testNode.(type) { + case *ast.ObjectList: + // If we're at the root or we're directly within a slice, then we + // decode objects into map[string]interface{}, otherwise we decode + // them into lists. + if len(d.stack) == 0 || d.stack[len(d.stack)-1] == reflect.Slice { + var temp map[string]interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeMap( + reflect.MapOf( + reflect.TypeOf(""), + tempVal.Type().Elem())) + + set = result + } else { + var temp []map[string]interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeSlice( + reflect.SliceOf(tempVal.Type().Elem()), 0, len(n.Items)) + set = result + } + case *ast.ObjectType: + // If we're at the root or we're directly within a slice, then we + // decode objects into map[string]interface{}, otherwise we decode + // them into lists. + if len(d.stack) == 0 || d.stack[len(d.stack)-1] == reflect.Slice { + var temp map[string]interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeMap( + reflect.MapOf( + reflect.TypeOf(""), + tempVal.Type().Elem())) + + set = result + } else { + var temp []map[string]interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeSlice( + reflect.SliceOf(tempVal.Type().Elem()), 0, 1) + set = result + } + case *ast.ListType: + var temp []interface{} + tempVal := reflect.ValueOf(temp) + result := reflect.MakeSlice( + reflect.SliceOf(tempVal.Type().Elem()), 0, 0) + set = result + case *ast.LiteralType: + switch n.Token.Type { + case token.BOOL: + var result bool + set = reflect.Indirect(reflect.New(reflect.TypeOf(result))) + case token.FLOAT: + var result float64 + set = reflect.Indirect(reflect.New(reflect.TypeOf(result))) + case token.NUMBER: + var result int + set = reflect.Indirect(reflect.New(reflect.TypeOf(result))) + case token.STRING, token.HEREDOC: + set = reflect.Indirect(reflect.New(reflect.TypeOf(""))) + default: + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: cannot decode into interface: %T", name, node), + } + } + default: + return fmt.Errorf( + "%s: cannot decode into interface: %T", + name, node) + } + + // Set the result to what its supposed to be, then reset + // result so we don't reflect into this method anymore. + result.Set(set) + + if redecode { + // Revisit the node so that we can use the newly instantiated + // thing and populate it. + if err := d.decode(name, node, result); err != nil { + return err + } + } + + return nil +} + +func (d *decoder) decodeMap(name string, node ast.Node, result reflect.Value) error { + if item, ok := node.(*ast.ObjectItem); ok { + node = &ast.ObjectList{Items: []*ast.ObjectItem{item}} + } + + if ot, ok := node.(*ast.ObjectType); ok { + node = ot.List + } + + n, ok := node.(*ast.ObjectList) + if !ok { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: not an object type for map (%T)", name, node), + } + } + + // If we have an interface, then we can address the interface, + // but not the slice itself, so get the element but set the interface + set := result + if result.Kind() == reflect.Interface { + result = result.Elem() + } + + resultType := result.Type() + resultElemType := resultType.Elem() + resultKeyType := resultType.Key() + if resultKeyType.Kind() != reflect.String { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: map must have string keys", name), + } + } + + // Make a map if it is nil + resultMap := result + if result.IsNil() { + resultMap = reflect.MakeMap( + reflect.MapOf(resultKeyType, resultElemType)) + } + + // Go through each element and decode it. + done := make(map[string]struct{}) + for _, item := range n.Items { + if item.Val == nil { + continue + } + + // github.com/hashicorp/terraform/issue/5740 + if len(item.Keys) == 0 { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: map must have string keys", name), + } + } + + // Get the key we're dealing with, which is the first item + keyStr := item.Keys[0].Token.Value().(string) + + // If we've already processed this key, then ignore it + if _, ok := done[keyStr]; ok { + continue + } + + // Determine the value. If we have more than one key, then we + // get the objectlist of only these keys. + itemVal := item.Val + if len(item.Keys) > 1 { + itemVal = n.Filter(keyStr) + done[keyStr] = struct{}{} + } + + // Make the field name + fieldName := fmt.Sprintf("%s.%s", name, keyStr) + + // Get the key/value as reflection values + key := reflect.ValueOf(keyStr) + val := reflect.Indirect(reflect.New(resultElemType)) + + // If we have a pre-existing value in the map, use that + oldVal := resultMap.MapIndex(key) + if oldVal.IsValid() { + val.Set(oldVal) + } + + // Decode! + if err := d.decode(fieldName, itemVal, val); err != nil { + return err + } + + // Set the value on the map + resultMap.SetMapIndex(key, val) + } + + // Set the final map if we can + set.Set(resultMap) + return nil +} + +func (d *decoder) decodePtr(name string, node ast.Node, result reflect.Value) error { + // Create an element of the concrete (non pointer) type and decode + // into that. Then set the value of the pointer to this type. + resultType := result.Type() + resultElemType := resultType.Elem() + val := reflect.New(resultElemType) + if err := d.decode(name, node, reflect.Indirect(val)); err != nil { + return err + } + + result.Set(val) + return nil +} + +func (d *decoder) decodeSlice(name string, node ast.Node, result reflect.Value) error { + // If we have an interface, then we can address the interface, + // but not the slice itself, so get the element but set the interface + set := result + if result.Kind() == reflect.Interface { + result = result.Elem() + } + // Create the slice if it isn't nil + resultType := result.Type() + resultElemType := resultType.Elem() + if result.IsNil() { + resultSliceType := reflect.SliceOf(resultElemType) + result = reflect.MakeSlice( + resultSliceType, 0, 0) + } + + // Figure out the items we'll be copying into the slice + var items []ast.Node + switch n := node.(type) { + case *ast.ObjectList: + items = make([]ast.Node, len(n.Items)) + for i, item := range n.Items { + items[i] = item + } + case *ast.ObjectType: + items = []ast.Node{n} + case *ast.ListType: + items = n.List + default: + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("unknown slice type: %T", node), + } + } + + for i, item := range items { + fieldName := fmt.Sprintf("%s[%d]", name, i) + + // Decode + val := reflect.Indirect(reflect.New(resultElemType)) + + // if item is an object that was decoded from ambiguous JSON and + // flattened, make sure it's expanded if it needs to decode into a + // defined structure. + item := expandObject(item, val) + + if err := d.decode(fieldName, item, val); err != nil { + return err + } + + // Append it onto the slice + result = reflect.Append(result, val) + } + + set.Set(result) + return nil +} + +// expandObject detects if an ambiguous JSON object was flattened to a List which +// should be decoded into a struct, and expands the ast to properly deocode. +func expandObject(node ast.Node, result reflect.Value) ast.Node { + item, ok := node.(*ast.ObjectItem) + if !ok { + return node + } + + elemType := result.Type() + + // our target type must be a struct + switch elemType.Kind() { + case reflect.Ptr: + switch elemType.Elem().Kind() { + case reflect.Struct: + //OK + default: + return node + } + case reflect.Struct: + //OK + default: + return node + } + + // A list value will have a key and field name. If it had more fields, + // it wouldn't have been flattened. + if len(item.Keys) != 2 { + return node + } + + keyToken := item.Keys[0].Token + item.Keys = item.Keys[1:] + + // we need to un-flatten the ast enough to decode + newNode := &ast.ObjectItem{ + Keys: []*ast.ObjectKey{ + &ast.ObjectKey{ + Token: keyToken, + }, + }, + Val: &ast.ObjectType{ + List: &ast.ObjectList{ + Items: []*ast.ObjectItem{item}, + }, + }, + } + + return newNode +} + +func (d *decoder) decodeString(name string, node ast.Node, result reflect.Value) error { + switch n := node.(type) { + case *ast.LiteralType: + switch n.Token.Type { + case token.NUMBER: + result.Set(reflect.ValueOf(n.Token.Text).Convert(result.Type())) + return nil + case token.STRING, token.HEREDOC: + result.Set(reflect.ValueOf(n.Token.Value()).Convert(result.Type())) + return nil + } + } + + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unknown type for string %T", name, node), + } +} + +func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value) error { + var item *ast.ObjectItem + if it, ok := node.(*ast.ObjectItem); ok { + item = it + node = it.Val + } + + if ot, ok := node.(*ast.ObjectType); ok { + node = ot.List + } + + // Handle the special case where the object itself is a literal. Previously + // the yacc parser would always ensure top-level elements were arrays. The new + // parser does not make the same guarantees, thus we need to convert any + // top-level literal elements into a list. + if _, ok := node.(*ast.LiteralType); ok && item != nil { + node = &ast.ObjectList{Items: []*ast.ObjectItem{item}} + } + + list, ok := node.(*ast.ObjectList) + if !ok { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: not an object type for struct (%T)", name, node), + } + } + + // This slice will keep track of all the structs we'll be decoding. + // There can be more than one struct if there are embedded structs + // that are squashed. + structs := make([]reflect.Value, 1, 5) + structs[0] = result + + // Compile the list of all the fields that we're going to be decoding + // from all the structs. + type field struct { + field reflect.StructField + val reflect.Value + } + fields := []field{} + for len(structs) > 0 { + structVal := structs[0] + structs = structs[1:] + + structType := structVal.Type() + for i := 0; i < structType.NumField(); i++ { + fieldType := structType.Field(i) + tagParts := strings.Split(fieldType.Tag.Get(tagName), ",") + + // Ignore fields with tag name "-" + if tagParts[0] == "-" { + continue + } + + if fieldType.Anonymous { + fieldKind := fieldType.Type.Kind() + if fieldKind != reflect.Struct { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: unsupported type to struct: %s", + fieldType.Name, fieldKind), + } + } + + // We have an embedded field. We "squash" the fields down + // if specified in the tag. + squash := false + for _, tag := range tagParts[1:] { + if tag == "squash" { + squash = true + break + } + } + + if squash { + structs = append( + structs, result.FieldByName(fieldType.Name)) + continue + } + } + + // Normal struct field, store it away + fields = append(fields, field{fieldType, structVal.Field(i)}) + } + } + + usedKeys := make(map[string]struct{}) + decodedFields := make([]string, 0, len(fields)) + decodedFieldsVal := make([]reflect.Value, 0) + unusedKeysVal := make([]reflect.Value, 0) + for _, f := range fields { + field, fieldValue := f.field, f.val + if !fieldValue.IsValid() { + // This should never happen + panic("field is not valid") + } + + // If we can't set the field, then it is unexported or something, + // and we just continue onwards. + if !fieldValue.CanSet() { + continue + } + + fieldName := field.Name + + tagValue := field.Tag.Get(tagName) + tagParts := strings.SplitN(tagValue, ",", 2) + if len(tagParts) >= 2 { + switch tagParts[1] { + case "decodedFields": + decodedFieldsVal = append(decodedFieldsVal, fieldValue) + continue + case "key": + if item == nil { + return &parser.PosError{ + Pos: node.Pos(), + Err: fmt.Errorf("%s: %s asked for 'key', impossible", + name, fieldName), + } + } + + fieldValue.SetString(item.Keys[0].Token.Value().(string)) + continue + case "unusedKeys": + unusedKeysVal = append(unusedKeysVal, fieldValue) + continue + } + } + + if tagParts[0] != "" { + fieldName = tagParts[0] + } + + // Determine the element we'll use to decode. If it is a single + // match (only object with the field), then we decode it exactly. + // If it is a prefix match, then we decode the matches. + filter := list.Filter(fieldName) + + prefixMatches := filter.Children() + matches := filter.Elem() + if len(matches.Items) == 0 && len(prefixMatches.Items) == 0 { + continue + } + + // Track the used key + usedKeys[fieldName] = struct{}{} + + // Create the field name and decode. We range over the elements + // because we actually want the value. + fieldName = fmt.Sprintf("%s.%s", name, fieldName) + if len(prefixMatches.Items) > 0 { + if err := d.decode(fieldName, prefixMatches, fieldValue); err != nil { + return err + } + } + for _, match := range matches.Items { + var decodeNode ast.Node = match.Val + if ot, ok := decodeNode.(*ast.ObjectType); ok { + decodeNode = &ast.ObjectList{Items: ot.List.Items} + } + + if err := d.decode(fieldName, decodeNode, fieldValue); err != nil { + return err + } + } + + decodedFields = append(decodedFields, field.Name) + } + + if len(decodedFieldsVal) > 0 { + // Sort it so that it is deterministic + sort.Strings(decodedFields) + + for _, v := range decodedFieldsVal { + v.Set(reflect.ValueOf(decodedFields)) + } + } + + return nil +} + +// findNodeType returns the type of ast.Node +func findNodeType() reflect.Type { + var nodeContainer struct { + Node ast.Node + } + value := reflect.ValueOf(nodeContainer).FieldByName("Node") + return value.Type() +} diff --git a/vendor/github.com/hashicorp/hcl/go.mod b/vendor/github.com/hashicorp/hcl/go.mod new file mode 100644 index 000000000..4debbbe35 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/go.mod @@ -0,0 +1,3 @@ +module github.com/hashicorp/hcl + +require github.com/davecgh/go-spew v1.1.1 diff --git a/vendor/github.com/hashicorp/hcl/go.sum b/vendor/github.com/hashicorp/hcl/go.sum new file mode 100644 index 000000000..b5e2922e8 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/go.sum @@ -0,0 +1,2 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= diff --git a/vendor/github.com/hashicorp/hcl/hcl.go b/vendor/github.com/hashicorp/hcl/hcl.go new file mode 100644 index 000000000..575a20b50 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl.go @@ -0,0 +1,11 @@ +// Package hcl decodes HCL into usable Go structures. +// +// hcl input can come in either pure HCL format or JSON format. +// It can be parsed into an AST, and then decoded into a structure, +// or it can be decoded directly from a string into a structure. +// +// If you choose to parse HCL into a raw AST, the benefit is that you +// can write custom visitor implementations to implement custom +// semantic checks. By default, HCL does not perform any semantic +// checks. +package hcl diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go b/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go new file mode 100644 index 000000000..6e5ef654b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/ast/ast.go @@ -0,0 +1,219 @@ +// Package ast declares the types used to represent syntax trees for HCL +// (HashiCorp Configuration Language) +package ast + +import ( + "fmt" + "strings" + + "github.com/hashicorp/hcl/hcl/token" +) + +// Node is an element in the abstract syntax tree. +type Node interface { + node() + Pos() token.Pos +} + +func (File) node() {} +func (ObjectList) node() {} +func (ObjectKey) node() {} +func (ObjectItem) node() {} +func (Comment) node() {} +func (CommentGroup) node() {} +func (ObjectType) node() {} +func (LiteralType) node() {} +func (ListType) node() {} + +// File represents a single HCL file +type File struct { + Node Node // usually a *ObjectList + Comments []*CommentGroup // list of all comments in the source +} + +func (f *File) Pos() token.Pos { + return f.Node.Pos() +} + +// ObjectList represents a list of ObjectItems. An HCL file itself is an +// ObjectList. +type ObjectList struct { + Items []*ObjectItem +} + +func (o *ObjectList) Add(item *ObjectItem) { + o.Items = append(o.Items, item) +} + +// Filter filters out the objects with the given key list as a prefix. +// +// The returned list of objects contain ObjectItems where the keys have +// this prefix already stripped off. This might result in objects with +// zero-length key lists if they have no children. +// +// If no matches are found, an empty ObjectList (non-nil) is returned. +func (o *ObjectList) Filter(keys ...string) *ObjectList { + var result ObjectList + for _, item := range o.Items { + // If there aren't enough keys, then ignore this + if len(item.Keys) < len(keys) { + continue + } + + match := true + for i, key := range item.Keys[:len(keys)] { + key := key.Token.Value().(string) + if key != keys[i] && !strings.EqualFold(key, keys[i]) { + match = false + break + } + } + if !match { + continue + } + + // Strip off the prefix from the children + newItem := *item + newItem.Keys = newItem.Keys[len(keys):] + result.Add(&newItem) + } + + return &result +} + +// Children returns further nested objects (key length > 0) within this +// ObjectList. This should be used with Filter to get at child items. +func (o *ObjectList) Children() *ObjectList { + var result ObjectList + for _, item := range o.Items { + if len(item.Keys) > 0 { + result.Add(item) + } + } + + return &result +} + +// Elem returns items in the list that are direct element assignments +// (key length == 0). This should be used with Filter to get at elements. +func (o *ObjectList) Elem() *ObjectList { + var result ObjectList + for _, item := range o.Items { + if len(item.Keys) == 0 { + result.Add(item) + } + } + + return &result +} + +func (o *ObjectList) Pos() token.Pos { + // always returns the uninitiliazed position + return o.Items[0].Pos() +} + +// ObjectItem represents a HCL Object Item. An item is represented with a key +// (or keys). It can be an assignment or an object (both normal and nested) +type ObjectItem struct { + // keys is only one length long if it's of type assignment. If it's a + // nested object it can be larger than one. In that case "assign" is + // invalid as there is no assignments for a nested object. + Keys []*ObjectKey + + // assign contains the position of "=", if any + Assign token.Pos + + // val is the item itself. It can be an object,list, number, bool or a + // string. If key length is larger than one, val can be only of type + // Object. + Val Node + + LeadComment *CommentGroup // associated lead comment + LineComment *CommentGroup // associated line comment +} + +func (o *ObjectItem) Pos() token.Pos { + // I'm not entirely sure what causes this, but removing this causes + // a test failure. We should investigate at some point. + if len(o.Keys) == 0 { + return token.Pos{} + } + + return o.Keys[0].Pos() +} + +// ObjectKeys are either an identifier or of type string. +type ObjectKey struct { + Token token.Token +} + +func (o *ObjectKey) Pos() token.Pos { + return o.Token.Pos +} + +// LiteralType represents a literal of basic type. Valid types are: +// token.NUMBER, token.FLOAT, token.BOOL and token.STRING +type LiteralType struct { + Token token.Token + + // comment types, only used when in a list + LeadComment *CommentGroup + LineComment *CommentGroup +} + +func (l *LiteralType) Pos() token.Pos { + return l.Token.Pos +} + +// ListStatement represents a HCL List type +type ListType struct { + Lbrack token.Pos // position of "[" + Rbrack token.Pos // position of "]" + List []Node // the elements in lexical order +} + +func (l *ListType) Pos() token.Pos { + return l.Lbrack +} + +func (l *ListType) Add(node Node) { + l.List = append(l.List, node) +} + +// ObjectType represents a HCL Object Type +type ObjectType struct { + Lbrace token.Pos // position of "{" + Rbrace token.Pos // position of "}" + List *ObjectList // the nodes in lexical order +} + +func (o *ObjectType) Pos() token.Pos { + return o.Lbrace +} + +// Comment node represents a single //, # style or /*- style commment +type Comment struct { + Start token.Pos // position of / or # + Text string +} + +func (c *Comment) Pos() token.Pos { + return c.Start +} + +// CommentGroup node represents a sequence of comments with no other tokens and +// no empty lines between. +type CommentGroup struct { + List []*Comment // len(List) > 0 +} + +func (c *CommentGroup) Pos() token.Pos { + return c.List[0].Pos() +} + +//------------------------------------------------------------------- +// GoStringer +//------------------------------------------------------------------- + +func (o *ObjectKey) GoString() string { return fmt.Sprintf("*%#v", *o) } +func (o *ObjectList) GoString() string { return fmt.Sprintf("*%#v", *o) } diff --git a/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go b/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go new file mode 100644 index 000000000..ba07ad42b --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/ast/walk.go @@ -0,0 +1,52 @@ +package ast + +import "fmt" + +// WalkFunc describes a function to be called for each node during a Walk. The +// returned node can be used to rewrite the AST. Walking stops the returned +// bool is false. +type WalkFunc func(Node) (Node, bool) + +// Walk traverses an AST in depth-first order: It starts by calling fn(node); +// node must not be nil. If fn returns true, Walk invokes fn recursively for +// each of the non-nil children of node, followed by a call of fn(nil). The +// returned node of fn can be used to rewrite the passed node to fn. +func Walk(node Node, fn WalkFunc) Node { + rewritten, ok := fn(node) + if !ok { + return rewritten + } + + switch n := node.(type) { + case *File: + n.Node = Walk(n.Node, fn) + case *ObjectList: + for i, item := range n.Items { + n.Items[i] = Walk(item, fn).(*ObjectItem) + } + case *ObjectKey: + // nothing to do + case *ObjectItem: + for i, k := range n.Keys { + n.Keys[i] = Walk(k, fn).(*ObjectKey) + } + + if n.Val != nil { + n.Val = Walk(n.Val, fn) + } + case *LiteralType: + // nothing to do + case *ListType: + for i, l := range n.List { + n.List[i] = Walk(l, fn) + } + case *ObjectType: + n.List = Walk(n.List, fn).(*ObjectList) + default: + // should we panic here? + fmt.Printf("unknown type: %T\n", n) + } + + fn(nil) + return rewritten +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/error.go b/vendor/github.com/hashicorp/hcl/hcl/parser/error.go new file mode 100644 index 000000000..5c99381df --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/parser/error.go @@ -0,0 +1,17 @@ +package parser + +import ( + "fmt" + + "github.com/hashicorp/hcl/hcl/token" +) + +// PosError is a parse error that contains a position. +type PosError struct { + Pos token.Pos + Err error +} + +func (e *PosError) Error() string { + return fmt.Sprintf("At %s: %s", e.Pos, e.Err) +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go new file mode 100644 index 000000000..64c83bcfb --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/parser/parser.go @@ -0,0 +1,532 @@ +// Package parser implements a parser for HCL (HashiCorp Configuration +// Language) +package parser + +import ( + "bytes" + "errors" + "fmt" + "strings" + + "github.com/hashicorp/hcl/hcl/ast" + "github.com/hashicorp/hcl/hcl/scanner" + "github.com/hashicorp/hcl/hcl/token" +) + +type Parser struct { + sc *scanner.Scanner + + // Last read token + tok token.Token + commaPrev token.Token + + comments []*ast.CommentGroup + leadComment *ast.CommentGroup // last lead comment + lineComment *ast.CommentGroup // last line comment + + enableTrace bool + indent int + n int // buffer size (max = 1) +} + +func newParser(src []byte) *Parser { + return &Parser{ + sc: scanner.New(src), + } +} + +// Parse returns the fully parsed source and returns the abstract syntax tree. +func Parse(src []byte) (*ast.File, error) { + // normalize all line endings + // since the scanner and output only work with "\n" line endings, we may + // end up with dangling "\r" characters in the parsed data. + src = bytes.Replace(src, []byte("\r\n"), []byte("\n"), -1) + + p := newParser(src) + return p.Parse() +} + +var errEofToken = errors.New("EOF token found") + +// Parse returns the fully parsed source and returns the abstract syntax tree. +func (p *Parser) Parse() (*ast.File, error) { + f := &ast.File{} + var err, scerr error + p.sc.Error = func(pos token.Pos, msg string) { + scerr = &PosError{Pos: pos, Err: errors.New(msg)} + } + + f.Node, err = p.objectList(false) + if scerr != nil { + return nil, scerr + } + if err != nil { + return nil, err + } + + f.Comments = p.comments + return f, nil +} + +// objectList parses a list of items within an object (generally k/v pairs). +// The parameter" obj" tells this whether to we are within an object (braces: +// '{', '}') or just at the top level. If we're within an object, we end +// at an RBRACE. +func (p *Parser) objectList(obj bool) (*ast.ObjectList, error) { + defer un(trace(p, "ParseObjectList")) + node := &ast.ObjectList{} + + for { + if obj { + tok := p.scan() + p.unscan() + if tok.Type == token.RBRACE { + break + } + } + + n, err := p.objectItem() + if err == errEofToken { + break // we are finished + } + + // we don't return a nil node, because might want to use already + // collected items. + if err != nil { + return node, err + } + + node.Add(n) + + // object lists can be optionally comma-delimited e.g. when a list of maps + // is being expressed, so a comma is allowed here - it's simply consumed + tok := p.scan() + if tok.Type != token.COMMA { + p.unscan() + } + } + return node, nil +} + +func (p *Parser) consumeComment() (comment *ast.Comment, endline int) { + endline = p.tok.Pos.Line + + // count the endline if it's multiline comment, ie starting with /* + if len(p.tok.Text) > 1 && p.tok.Text[1] == '*' { + // don't use range here - no need to decode Unicode code points + for i := 0; i < len(p.tok.Text); i++ { + if p.tok.Text[i] == '\n' { + endline++ + } + } + } + + comment = &ast.Comment{Start: p.tok.Pos, Text: p.tok.Text} + p.tok = p.sc.Scan() + return +} + +func (p *Parser) consumeCommentGroup(n int) (comments *ast.CommentGroup, endline int) { + var list []*ast.Comment + endline = p.tok.Pos.Line + + for p.tok.Type == token.COMMENT && p.tok.Pos.Line <= endline+n { + var comment *ast.Comment + comment, endline = p.consumeComment() + list = append(list, comment) + } + + // add comment group to the comments list + comments = &ast.CommentGroup{List: list} + p.comments = append(p.comments, comments) + + return +} + +// objectItem parses a single object item +func (p *Parser) objectItem() (*ast.ObjectItem, error) { + defer un(trace(p, "ParseObjectItem")) + + keys, err := p.objectKey() + if len(keys) > 0 && err == errEofToken { + // We ignore eof token here since it is an error if we didn't + // receive a value (but we did receive a key) for the item. + err = nil + } + if len(keys) > 0 && err != nil && p.tok.Type == token.RBRACE { + // This is a strange boolean statement, but what it means is: + // We have keys with no value, and we're likely in an object + // (since RBrace ends an object). For this, we set err to nil so + // we continue and get the error below of having the wrong value + // type. + err = nil + + // Reset the token type so we don't think it completed fine. See + // objectType which uses p.tok.Type to check if we're done with + // the object. + p.tok.Type = token.EOF + } + if err != nil { + return nil, err + } + + o := &ast.ObjectItem{ + Keys: keys, + } + + if p.leadComment != nil { + o.LeadComment = p.leadComment + p.leadComment = nil + } + + switch p.tok.Type { + case token.ASSIGN: + o.Assign = p.tok.Pos + o.Val, err = p.object() + if err != nil { + return nil, err + } + case token.LBRACE: + o.Val, err = p.objectType() + if err != nil { + return nil, err + } + default: + keyStr := make([]string, 0, len(keys)) + for _, k := range keys { + keyStr = append(keyStr, k.Token.Text) + } + + return nil, &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf( + "key '%s' expected start of object ('{') or assignment ('=')", + strings.Join(keyStr, " ")), + } + } + + // key=#comment + // val + if p.lineComment != nil { + o.LineComment, p.lineComment = p.lineComment, nil + } + + // do a look-ahead for line comment + p.scan() + if len(keys) > 0 && o.Val.Pos().Line == keys[0].Pos().Line && p.lineComment != nil { + o.LineComment = p.lineComment + p.lineComment = nil + } + p.unscan() + return o, nil +} + +// objectKey parses an object key and returns a ObjectKey AST +func (p *Parser) objectKey() ([]*ast.ObjectKey, error) { + keyCount := 0 + keys := make([]*ast.ObjectKey, 0) + + for { + tok := p.scan() + switch tok.Type { + case token.EOF: + // It is very important to also return the keys here as well as + // the error. This is because we need to be able to tell if we + // did parse keys prior to finding the EOF, or if we just found + // a bare EOF. + return keys, errEofToken + case token.ASSIGN: + // assignment or object only, but not nested objects. this is not + // allowed: `foo bar = {}` + if keyCount > 1 { + return nil, &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf("nested object expected: LBRACE got: %s", p.tok.Type), + } + } + + if keyCount == 0 { + return nil, &PosError{ + Pos: p.tok.Pos, + Err: errors.New("no object keys found!"), + } + } + + return keys, nil + case token.LBRACE: + var err error + + // If we have no keys, then it is a syntax error. i.e. {{}} is not + // allowed. + if len(keys) == 0 { + err = &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf("expected: IDENT | STRING got: %s", p.tok.Type), + } + } + + // object + return keys, err + case token.IDENT, token.STRING: + keyCount++ + keys = append(keys, &ast.ObjectKey{Token: p.tok}) + case token.ILLEGAL: + return keys, &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf("illegal character"), + } + default: + return keys, &PosError{ + Pos: p.tok.Pos, + Err: fmt.Errorf("expected: IDENT | STRING | ASSIGN | LBRACE got: %s", p.tok.Type), + } + } + } +} + +// object parses any type of object, such as number, bool, string, object or +// list. +func (p *Parser) object() (ast.Node, error) { + defer un(trace(p, "ParseType")) + tok := p.scan() + + switch tok.Type { + case token.NUMBER, token.FLOAT, token.BOOL, token.STRING, token.HEREDOC: + return p.literalType() + case token.LBRACE: + return p.objectType() + case token.LBRACK: + return p.listType() + case token.COMMENT: + // implement comment + case token.EOF: + return nil, errEofToken + } + + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf("Unknown token: %+v", tok), + } +} + +// objectType parses an object type and returns a ObjectType AST +func (p *Parser) objectType() (*ast.ObjectType, error) { + defer un(trace(p, "ParseObjectType")) + + // we assume that the currently scanned token is a LBRACE + o := &ast.ObjectType{ + Lbrace: p.tok.Pos, + } + + l, err := p.objectList(true) + + // if we hit RBRACE, we are good to go (means we parsed all Items), if it's + // not a RBRACE, it's an syntax error and we just return it. + if err != nil && p.tok.Type != token.RBRACE { + return nil, err + } + + // No error, scan and expect the ending to be a brace + if tok := p.scan(); tok.Type != token.RBRACE { + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf("object expected closing RBRACE got: %s", tok.Type), + } + } + + o.List = l + o.Rbrace = p.tok.Pos // advanced via parseObjectList + return o, nil +} + +// listType parses a list type and returns a ListType AST +func (p *Parser) listType() (*ast.ListType, error) { + defer un(trace(p, "ParseListType")) + + // we assume that the currently scanned token is a LBRACK + l := &ast.ListType{ + Lbrack: p.tok.Pos, + } + + needComma := false + for { + tok := p.scan() + if needComma { + switch tok.Type { + case token.COMMA, token.RBRACK: + default: + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf( + "error parsing list, expected comma or list end, got: %s", + tok.Type), + } + } + } + switch tok.Type { + case token.BOOL, token.NUMBER, token.FLOAT, token.STRING, token.HEREDOC: + node, err := p.literalType() + if err != nil { + return nil, err + } + + // If there is a lead comment, apply it + if p.leadComment != nil { + node.LeadComment = p.leadComment + p.leadComment = nil + } + + l.Add(node) + needComma = true + case token.COMMA: + // get next list item or we are at the end + // do a look-ahead for line comment + p.scan() + if p.lineComment != nil && len(l.List) > 0 { + lit, ok := l.List[len(l.List)-1].(*ast.LiteralType) + if ok { + lit.LineComment = p.lineComment + l.List[len(l.List)-1] = lit + p.lineComment = nil + } + } + p.unscan() + + needComma = false + continue + case token.LBRACE: + // Looks like a nested object, so parse it out + node, err := p.objectType() + if err != nil { + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf( + "error while trying to parse object within list: %s", err), + } + } + l.Add(node) + needComma = true + case token.LBRACK: + node, err := p.listType() + if err != nil { + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf( + "error while trying to parse list within list: %s", err), + } + } + l.Add(node) + case token.RBRACK: + // finished + l.Rbrack = p.tok.Pos + return l, nil + default: + return nil, &PosError{ + Pos: tok.Pos, + Err: fmt.Errorf("unexpected token while parsing list: %s", tok.Type), + } + } + } +} + +// literalType parses a literal type and returns a LiteralType AST +func (p *Parser) literalType() (*ast.LiteralType, error) { + defer un(trace(p, "ParseLiteral")) + + return &ast.LiteralType{ + Token: p.tok, + }, nil +} + +// scan returns the next token from the underlying scanner. If a token has +// been unscanned then read that instead. In the process, it collects any +// comment groups encountered, and remembers the last lead and line comments. +func (p *Parser) scan() token.Token { + // If we have a token on the buffer, then return it. + if p.n != 0 { + p.n = 0 + return p.tok + } + + // Otherwise read the next token from the scanner and Save it to the buffer + // in case we unscan later. + prev := p.tok + p.tok = p.sc.Scan() + + if p.tok.Type == token.COMMENT { + var comment *ast.CommentGroup + var endline int + + // fmt.Printf("p.tok.Pos.Line = %+v prev: %d endline %d \n", + // p.tok.Pos.Line, prev.Pos.Line, endline) + if p.tok.Pos.Line == prev.Pos.Line { + // The comment is on same line as the previous token; it + // cannot be a lead comment but may be a line comment. + comment, endline = p.consumeCommentGroup(0) + if p.tok.Pos.Line != endline { + // The next token is on a different line, thus + // the last comment group is a line comment. + p.lineComment = comment + } + } + + // consume successor comments, if any + endline = -1 + for p.tok.Type == token.COMMENT { + comment, endline = p.consumeCommentGroup(1) + } + + if endline+1 == p.tok.Pos.Line && p.tok.Type != token.RBRACE { + switch p.tok.Type { + case token.RBRACE, token.RBRACK: + // Do not count for these cases + default: + // The next token is following on the line immediately after the + // comment group, thus the last comment group is a lead comment. + p.leadComment = comment + } + } + + } + + return p.tok +} + +// unscan pushes the previously read token back onto the buffer. +func (p *Parser) unscan() { + p.n = 1 +} + +// ---------------------------------------------------------------------------- +// Parsing support + +func (p *Parser) printTrace(a ...interface{}) { + if !p.enableTrace { + return + } + + const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + const n = len(dots) + fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column) + + i := 2 * p.indent + for i > n { + fmt.Print(dots) + i -= n + } + // i <= n + fmt.Print(dots[0:i]) + fmt.Println(a...) +} + +func trace(p *Parser, msg string) *Parser { + p.printTrace(msg, "(") + p.indent++ + return p +} + +// Usage pattern: defer un(trace(p, "...")) +func un(p *Parser) { + p.indent-- + p.printTrace(")") +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go b/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go new file mode 100644 index 000000000..7c038d12a --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/nodes.go @@ -0,0 +1,789 @@ +package printer + +import ( + "bytes" + "fmt" + "sort" + + "github.com/hashicorp/hcl/hcl/ast" + "github.com/hashicorp/hcl/hcl/token" +) + +const ( + blank = byte(' ') + newline = byte('\n') + tab = byte('\t') + infinity = 1 << 30 // offset or line +) + +var ( + unindent = []byte("\uE123") // in the private use space +) + +type printer struct { + cfg Config + prev token.Pos + + comments []*ast.CommentGroup // may be nil, contains all comments + standaloneComments []*ast.CommentGroup // contains all standalone comments (not assigned to any node) + + enableTrace bool + indentTrace int +} + +type ByPosition []*ast.CommentGroup + +func (b ByPosition) Len() int { return len(b) } +func (b ByPosition) Swap(i, j int) { b[i], b[j] = b[j], b[i] } +func (b ByPosition) Less(i, j int) bool { return b[i].Pos().Before(b[j].Pos()) } + +// collectComments comments all standalone comments which are not lead or line +// comment +func (p *printer) collectComments(node ast.Node) { + // first collect all comments. This is already stored in + // ast.File.(comments) + ast.Walk(node, func(nn ast.Node) (ast.Node, bool) { + switch t := nn.(type) { + case *ast.File: + p.comments = t.Comments + return nn, false + } + return nn, true + }) + + standaloneComments := make(map[token.Pos]*ast.CommentGroup, 0) + for _, c := range p.comments { + standaloneComments[c.Pos()] = c + } + + // next remove all lead and line comments from the overall comment map. + // This will give us comments which are standalone, comments which are not + // assigned to any kind of node. + ast.Walk(node, func(nn ast.Node) (ast.Node, bool) { + switch t := nn.(type) { + case *ast.LiteralType: + if t.LeadComment != nil { + for _, comment := range t.LeadComment.List { + if _, ok := standaloneComments[comment.Pos()]; ok { + delete(standaloneComments, comment.Pos()) + } + } + } + + if t.LineComment != nil { + for _, comment := range t.LineComment.List { + if _, ok := standaloneComments[comment.Pos()]; ok { + delete(standaloneComments, comment.Pos()) + } + } + } + case *ast.ObjectItem: + if t.LeadComment != nil { + for _, comment := range t.LeadComment.List { + if _, ok := standaloneComments[comment.Pos()]; ok { + delete(standaloneComments, comment.Pos()) + } + } + } + + if t.LineComment != nil { + for _, comment := range t.LineComment.List { + if _, ok := standaloneComments[comment.Pos()]; ok { + delete(standaloneComments, comment.Pos()) + } + } + } + } + + return nn, true + }) + + for _, c := range standaloneComments { + p.standaloneComments = append(p.standaloneComments, c) + } + + sort.Sort(ByPosition(p.standaloneComments)) +} + +// output prints creates b printable HCL output and returns it. +func (p *printer) output(n interface{}) []byte { + var buf bytes.Buffer + + switch t := n.(type) { + case *ast.File: + // File doesn't trace so we add the tracing here + defer un(trace(p, "File")) + return p.output(t.Node) + case *ast.ObjectList: + defer un(trace(p, "ObjectList")) + + var index int + for { + // Determine the location of the next actual non-comment + // item. If we're at the end, the next item is at "infinity" + var nextItem token.Pos + if index != len(t.Items) { + nextItem = t.Items[index].Pos() + } else { + nextItem = token.Pos{Offset: infinity, Line: infinity} + } + + // Go through the standalone comments in the file and print out + // the comments that we should be for this object item. + for _, c := range p.standaloneComments { + // Go through all the comments in the group. The group + // should be printed together, not separated by double newlines. + printed := false + newlinePrinted := false + for _, comment := range c.List { + // We only care about comments after the previous item + // we've printed so that comments are printed in the + // correct locations (between two objects for example). + // And before the next item. + if comment.Pos().After(p.prev) && comment.Pos().Before(nextItem) { + // if we hit the end add newlines so we can print the comment + // we don't do this if prev is invalid which means the + // beginning of the file since the first comment should + // be at the first line. + if !newlinePrinted && p.prev.IsValid() && index == len(t.Items) { + buf.Write([]byte{newline, newline}) + newlinePrinted = true + } + + // Write the actual comment. + buf.WriteString(comment.Text) + buf.WriteByte(newline) + + // Set printed to true to note that we printed something + printed = true + } + } + + // If we're not at the last item, write a new line so + // that there is a newline separating this comment from + // the next object. + if printed && index != len(t.Items) { + buf.WriteByte(newline) + } + } + + if index == len(t.Items) { + break + } + + buf.Write(p.output(t.Items[index])) + if index != len(t.Items)-1 { + // Always write a newline to separate us from the next item + buf.WriteByte(newline) + + // Need to determine if we're going to separate the next item + // with a blank line. The logic here is simple, though there + // are a few conditions: + // + // 1. The next object is more than one line away anyways, + // so we need an empty line. + // + // 2. The next object is not a "single line" object, so + // we need an empty line. + // + // 3. This current object is not a single line object, + // so we need an empty line. + current := t.Items[index] + next := t.Items[index+1] + if next.Pos().Line != t.Items[index].Pos().Line+1 || + !p.isSingleLineObject(next) || + !p.isSingleLineObject(current) { + buf.WriteByte(newline) + } + } + index++ + } + case *ast.ObjectKey: + buf.WriteString(t.Token.Text) + case *ast.ObjectItem: + p.prev = t.Pos() + buf.Write(p.objectItem(t)) + case *ast.LiteralType: + buf.Write(p.literalType(t)) + case *ast.ListType: + buf.Write(p.list(t)) + case *ast.ObjectType: + buf.Write(p.objectType(t)) + default: + fmt.Printf(" unknown type: %T\n", n) + } + + return buf.Bytes() +} + +func (p *printer) literalType(lit *ast.LiteralType) []byte { + result := []byte(lit.Token.Text) + switch lit.Token.Type { + case token.HEREDOC: + // Clear the trailing newline from heredocs + if result[len(result)-1] == '\n' { + result = result[:len(result)-1] + } + + // Poison lines 2+ so that we don't indent them + result = p.heredocIndent(result) + case token.STRING: + // If this is a multiline string, poison lines 2+ so we don't + // indent them. + if bytes.IndexRune(result, '\n') >= 0 { + result = p.heredocIndent(result) + } + } + + return result +} + +// objectItem returns the printable HCL form of an object item. An object type +// starts with one/multiple keys and has a value. The value might be of any +// type. +func (p *printer) objectItem(o *ast.ObjectItem) []byte { + defer un(trace(p, fmt.Sprintf("ObjectItem: %s", o.Keys[0].Token.Text))) + var buf bytes.Buffer + + if o.LeadComment != nil { + for _, comment := range o.LeadComment.List { + buf.WriteString(comment.Text) + buf.WriteByte(newline) + } + } + + // If key and val are on different lines, treat line comments like lead comments. + if o.LineComment != nil && o.Val.Pos().Line != o.Keys[0].Pos().Line { + for _, comment := range o.LineComment.List { + buf.WriteString(comment.Text) + buf.WriteByte(newline) + } + } + + for i, k := range o.Keys { + buf.WriteString(k.Token.Text) + buf.WriteByte(blank) + + // reach end of key + if o.Assign.IsValid() && i == len(o.Keys)-1 && len(o.Keys) == 1 { + buf.WriteString("=") + buf.WriteByte(blank) + } + } + + buf.Write(p.output(o.Val)) + + if o.LineComment != nil && o.Val.Pos().Line == o.Keys[0].Pos().Line { + buf.WriteByte(blank) + for _, comment := range o.LineComment.List { + buf.WriteString(comment.Text) + } + } + + return buf.Bytes() +} + +// objectType returns the printable HCL form of an object type. An object type +// begins with a brace and ends with a brace. +func (p *printer) objectType(o *ast.ObjectType) []byte { + defer un(trace(p, "ObjectType")) + var buf bytes.Buffer + buf.WriteString("{") + + var index int + var nextItem token.Pos + var commented, newlinePrinted bool + for { + // Determine the location of the next actual non-comment + // item. If we're at the end, the next item is the closing brace + if index != len(o.List.Items) { + nextItem = o.List.Items[index].Pos() + } else { + nextItem = o.Rbrace + } + + // Go through the standalone comments in the file and print out + // the comments that we should be for this object item. + for _, c := range p.standaloneComments { + printed := false + var lastCommentPos token.Pos + for _, comment := range c.List { + // We only care about comments after the previous item + // we've printed so that comments are printed in the + // correct locations (between two objects for example). + // And before the next item. + if comment.Pos().After(p.prev) && comment.Pos().Before(nextItem) { + // If there are standalone comments and the initial newline has not + // been printed yet, do it now. + if !newlinePrinted { + newlinePrinted = true + buf.WriteByte(newline) + } + + // add newline if it's between other printed nodes + if index > 0 { + commented = true + buf.WriteByte(newline) + } + + // Store this position + lastCommentPos = comment.Pos() + + // output the comment itself + buf.Write(p.indent(p.heredocIndent([]byte(comment.Text)))) + + // Set printed to true to note that we printed something + printed = true + + /* + if index != len(o.List.Items) { + buf.WriteByte(newline) // do not print on the end + } + */ + } + } + + // Stuff to do if we had comments + if printed { + // Always write a newline + buf.WriteByte(newline) + + // If there is another item in the object and our comment + // didn't hug it directly, then make sure there is a blank + // line separating them. + if nextItem != o.Rbrace && nextItem.Line != lastCommentPos.Line+1 { + buf.WriteByte(newline) + } + } + } + + if index == len(o.List.Items) { + p.prev = o.Rbrace + break + } + + // At this point we are sure that it's not a totally empty block: print + // the initial newline if it hasn't been printed yet by the previous + // block about standalone comments. + if !newlinePrinted { + buf.WriteByte(newline) + newlinePrinted = true + } + + // check if we have adjacent one liner items. If yes we'll going to align + // the comments. + var aligned []*ast.ObjectItem + for _, item := range o.List.Items[index:] { + // we don't group one line lists + if len(o.List.Items) == 1 { + break + } + + // one means a oneliner with out any lead comment + // two means a oneliner with lead comment + // anything else might be something else + cur := lines(string(p.objectItem(item))) + if cur > 2 { + break + } + + curPos := item.Pos() + + nextPos := token.Pos{} + if index != len(o.List.Items)-1 { + nextPos = o.List.Items[index+1].Pos() + } + + prevPos := token.Pos{} + if index != 0 { + prevPos = o.List.Items[index-1].Pos() + } + + // fmt.Println("DEBUG ----------------") + // fmt.Printf("prev = %+v prevPos: %s\n", prev, prevPos) + // fmt.Printf("cur = %+v curPos: %s\n", cur, curPos) + // fmt.Printf("next = %+v nextPos: %s\n", next, nextPos) + + if curPos.Line+1 == nextPos.Line { + aligned = append(aligned, item) + index++ + continue + } + + if curPos.Line-1 == prevPos.Line { + aligned = append(aligned, item) + index++ + + // finish if we have a new line or comment next. This happens + // if the next item is not adjacent + if curPos.Line+1 != nextPos.Line { + break + } + continue + } + + break + } + + // put newlines if the items are between other non aligned items. + // newlines are also added if there is a standalone comment already, so + // check it too + if !commented && index != len(aligned) { + buf.WriteByte(newline) + } + + if len(aligned) >= 1 { + p.prev = aligned[len(aligned)-1].Pos() + + items := p.alignedItems(aligned) + buf.Write(p.indent(items)) + } else { + p.prev = o.List.Items[index].Pos() + + buf.Write(p.indent(p.objectItem(o.List.Items[index]))) + index++ + } + + buf.WriteByte(newline) + } + + buf.WriteString("}") + return buf.Bytes() +} + +func (p *printer) alignedItems(items []*ast.ObjectItem) []byte { + var buf bytes.Buffer + + // find the longest key and value length, needed for alignment + var longestKeyLen int // longest key length + var longestValLen int // longest value length + for _, item := range items { + key := len(item.Keys[0].Token.Text) + val := len(p.output(item.Val)) + + if key > longestKeyLen { + longestKeyLen = key + } + + if val > longestValLen { + longestValLen = val + } + } + + for i, item := range items { + if item.LeadComment != nil { + for _, comment := range item.LeadComment.List { + buf.WriteString(comment.Text) + buf.WriteByte(newline) + } + } + + for i, k := range item.Keys { + keyLen := len(k.Token.Text) + buf.WriteString(k.Token.Text) + for i := 0; i < longestKeyLen-keyLen+1; i++ { + buf.WriteByte(blank) + } + + // reach end of key + if i == len(item.Keys)-1 && len(item.Keys) == 1 { + buf.WriteString("=") + buf.WriteByte(blank) + } + } + + val := p.output(item.Val) + valLen := len(val) + buf.Write(val) + + if item.Val.Pos().Line == item.Keys[0].Pos().Line && item.LineComment != nil { + for i := 0; i < longestValLen-valLen+1; i++ { + buf.WriteByte(blank) + } + + for _, comment := range item.LineComment.List { + buf.WriteString(comment.Text) + } + } + + // do not print for the last item + if i != len(items)-1 { + buf.WriteByte(newline) + } + } + + return buf.Bytes() +} + +// list returns the printable HCL form of an list type. +func (p *printer) list(l *ast.ListType) []byte { + if p.isSingleLineList(l) { + return p.singleLineList(l) + } + + var buf bytes.Buffer + buf.WriteString("[") + buf.WriteByte(newline) + + var longestLine int + for _, item := range l.List { + // for now we assume that the list only contains literal types + if lit, ok := item.(*ast.LiteralType); ok { + lineLen := len(lit.Token.Text) + if lineLen > longestLine { + longestLine = lineLen + } + } + } + + haveEmptyLine := false + for i, item := range l.List { + // If we have a lead comment, then we want to write that first + leadComment := false + if lit, ok := item.(*ast.LiteralType); ok && lit.LeadComment != nil { + leadComment = true + + // Ensure an empty line before every element with a + // lead comment (except the first item in a list). + if !haveEmptyLine && i != 0 { + buf.WriteByte(newline) + } + + for _, comment := range lit.LeadComment.List { + buf.Write(p.indent([]byte(comment.Text))) + buf.WriteByte(newline) + } + } + + // also indent each line + val := p.output(item) + curLen := len(val) + buf.Write(p.indent(val)) + + // if this item is a heredoc, then we output the comma on + // the next line. This is the only case this happens. + comma := []byte{','} + if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC { + buf.WriteByte(newline) + comma = p.indent(comma) + } + + buf.Write(comma) + + if lit, ok := item.(*ast.LiteralType); ok && lit.LineComment != nil { + // if the next item doesn't have any comments, do not align + buf.WriteByte(blank) // align one space + for i := 0; i < longestLine-curLen; i++ { + buf.WriteByte(blank) + } + + for _, comment := range lit.LineComment.List { + buf.WriteString(comment.Text) + } + } + + buf.WriteByte(newline) + + // Ensure an empty line after every element with a + // lead comment (except the first item in a list). + haveEmptyLine = leadComment && i != len(l.List)-1 + if haveEmptyLine { + buf.WriteByte(newline) + } + } + + buf.WriteString("]") + return buf.Bytes() +} + +// isSingleLineList returns true if: +// * they were previously formatted entirely on one line +// * they consist entirely of literals +// * there are either no heredoc strings or the list has exactly one element +// * there are no line comments +func (printer) isSingleLineList(l *ast.ListType) bool { + for _, item := range l.List { + if item.Pos().Line != l.Lbrack.Line { + return false + } + + lit, ok := item.(*ast.LiteralType) + if !ok { + return false + } + + if lit.Token.Type == token.HEREDOC && len(l.List) != 1 { + return false + } + + if lit.LineComment != nil { + return false + } + } + + return true +} + +// singleLineList prints a simple single line list. +// For a definition of "simple", see isSingleLineList above. +func (p *printer) singleLineList(l *ast.ListType) []byte { + buf := &bytes.Buffer{} + + buf.WriteString("[") + for i, item := range l.List { + if i != 0 { + buf.WriteString(", ") + } + + // Output the item itself + buf.Write(p.output(item)) + + // The heredoc marker needs to be at the end of line. + if lit, ok := item.(*ast.LiteralType); ok && lit.Token.Type == token.HEREDOC { + buf.WriteByte(newline) + } + } + + buf.WriteString("]") + return buf.Bytes() +} + +// indent indents the lines of the given buffer for each non-empty line +func (p *printer) indent(buf []byte) []byte { + var prefix []byte + if p.cfg.SpacesWidth != 0 { + for i := 0; i < p.cfg.SpacesWidth; i++ { + prefix = append(prefix, blank) + } + } else { + prefix = []byte{tab} + } + + var res []byte + bol := true + for _, c := range buf { + if bol && c != '\n' { + res = append(res, prefix...) + } + + res = append(res, c) + bol = c == '\n' + } + return res +} + +// unindent removes all the indentation from the tombstoned lines +func (p *printer) unindent(buf []byte) []byte { + var res []byte + for i := 0; i < len(buf); i++ { + skip := len(buf)-i <= len(unindent) + if !skip { + skip = !bytes.Equal(unindent, buf[i:i+len(unindent)]) + } + if skip { + res = append(res, buf[i]) + continue + } + + // We have a marker. we have to backtrace here and clean out + // any whitespace ahead of our tombstone up to a \n + for j := len(res) - 1; j >= 0; j-- { + if res[j] == '\n' { + break + } + + res = res[:j] + } + + // Skip the entire unindent marker + i += len(unindent) - 1 + } + + return res +} + +// heredocIndent marks all the 2nd and further lines as unindentable +func (p *printer) heredocIndent(buf []byte) []byte { + var res []byte + bol := false + for _, c := range buf { + if bol && c != '\n' { + res = append(res, unindent...) + } + res = append(res, c) + bol = c == '\n' + } + return res +} + +// isSingleLineObject tells whether the given object item is a single +// line object such as "obj {}". +// +// A single line object: +// +// * has no lead comments (hence multi-line) +// * has no assignment +// * has no values in the stanza (within {}) +// +func (p *printer) isSingleLineObject(val *ast.ObjectItem) bool { + // If there is a lead comment, can't be one line + if val.LeadComment != nil { + return false + } + + // If there is assignment, we always break by line + if val.Assign.IsValid() { + return false + } + + // If it isn't an object type, then its not a single line object + ot, ok := val.Val.(*ast.ObjectType) + if !ok { + return false + } + + // If the object has no items, it is single line! + return len(ot.List.Items) == 0 +} + +func lines(txt string) int { + endline := 1 + for i := 0; i < len(txt); i++ { + if txt[i] == '\n' { + endline++ + } + } + return endline +} + +// ---------------------------------------------------------------------------- +// Tracing support + +func (p *printer) printTrace(a ...interface{}) { + if !p.enableTrace { + return + } + + const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + const n = len(dots) + i := 2 * p.indentTrace + for i > n { + fmt.Print(dots) + i -= n + } + // i <= n + fmt.Print(dots[0:i]) + fmt.Println(a...) +} + +func trace(p *printer, msg string) *printer { + p.printTrace(msg, "(") + p.indentTrace++ + return p +} + +// Usage pattern: defer un(trace(p, "...")) +func un(p *printer) { + p.indentTrace-- + p.printTrace(")") +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go b/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go new file mode 100644 index 000000000..6617ab8e7 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/printer/printer.go @@ -0,0 +1,66 @@ +// Package printer implements printing of AST nodes to HCL format. +package printer + +import ( + "bytes" + "io" + "text/tabwriter" + + "github.com/hashicorp/hcl/hcl/ast" + "github.com/hashicorp/hcl/hcl/parser" +) + +var DefaultConfig = Config{ + SpacesWidth: 2, +} + +// A Config node controls the output of Fprint. +type Config struct { + SpacesWidth int // if set, it will use spaces instead of tabs for alignment +} + +func (c *Config) Fprint(output io.Writer, node ast.Node) error { + p := &printer{ + cfg: *c, + comments: make([]*ast.CommentGroup, 0), + standaloneComments: make([]*ast.CommentGroup, 0), + // enableTrace: true, + } + + p.collectComments(node) + + if _, err := output.Write(p.unindent(p.output(node))); err != nil { + return err + } + + // flush tabwriter, if any + var err error + if tw, _ := output.(*tabwriter.Writer); tw != nil { + err = tw.Flush() + } + + return err +} + +// Fprint "pretty-prints" an HCL node to output +// It calls Config.Fprint with default settings. +func Fprint(output io.Writer, node ast.Node) error { + return DefaultConfig.Fprint(output, node) +} + +// Format formats src HCL and returns the result. +func Format(src []byte) ([]byte, error) { + node, err := parser.Parse(src) + if err != nil { + return nil, err + } + + var buf bytes.Buffer + if err := DefaultConfig.Fprint(&buf, node); err != nil { + return nil, err + } + + // Add trailing newline to result + buf.WriteString("\n") + return buf.Bytes(), nil +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go new file mode 100644 index 000000000..624a18fe3 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/scanner/scanner.go @@ -0,0 +1,652 @@ +// Package scanner implements a scanner for HCL (HashiCorp Configuration +// Language) source text. +package scanner + +import ( + "bytes" + "fmt" + "os" + "regexp" + "unicode" + "unicode/utf8" + + "github.com/hashicorp/hcl/hcl/token" +) + +// eof represents a marker rune for the end of the reader. +const eof = rune(0) + +// Scanner defines a lexical scanner +type Scanner struct { + buf *bytes.Buffer // Source buffer for advancing and scanning + src []byte // Source buffer for immutable access + + // Source Position + srcPos token.Pos // current position + prevPos token.Pos // previous position, used for peek() method + + lastCharLen int // length of last character in bytes + lastLineLen int // length of last line in characters (for correct column reporting) + + tokStart int // token text start position + tokEnd int // token text end position + + // Error is called for each error encountered. If no Error + // function is set, the error is reported to os.Stderr. + Error func(pos token.Pos, msg string) + + // ErrorCount is incremented by one for each error encountered. + ErrorCount int + + // tokPos is the start position of most recently scanned token; set by + // Scan. The Filename field is always left untouched by the Scanner. If + // an error is reported (via Error) and Position is invalid, the scanner is + // not inside a token. + tokPos token.Pos +} + +// New creates and initializes a new instance of Scanner using src as +// its source content. +func New(src []byte) *Scanner { + // even though we accept a src, we read from a io.Reader compatible type + // (*bytes.Buffer). So in the future we might easily change it to streaming + // read. + b := bytes.NewBuffer(src) + s := &Scanner{ + buf: b, + src: src, + } + + // srcPosition always starts with 1 + s.srcPos.Line = 1 + return s +} + +// next reads the next rune from the bufferred reader. Returns the rune(0) if +// an error occurs (or io.EOF is returned). +func (s *Scanner) next() rune { + ch, size, err := s.buf.ReadRune() + if err != nil { + // advance for error reporting + s.srcPos.Column++ + s.srcPos.Offset += size + s.lastCharLen = size + return eof + } + + // remember last position + s.prevPos = s.srcPos + + s.srcPos.Column++ + s.lastCharLen = size + s.srcPos.Offset += size + + if ch == utf8.RuneError && size == 1 { + s.err("illegal UTF-8 encoding") + return ch + } + + if ch == '\n' { + s.srcPos.Line++ + s.lastLineLen = s.srcPos.Column + s.srcPos.Column = 0 + } + + if ch == '\x00' { + s.err("unexpected null character (0x00)") + return eof + } + + if ch == '\uE123' { + s.err("unicode code point U+E123 reserved for internal use") + return utf8.RuneError + } + + // debug + // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column) + return ch +} + +// unread unreads the previous read Rune and updates the source position +func (s *Scanner) unread() { + if err := s.buf.UnreadRune(); err != nil { + panic(err) // this is user fault, we should catch it + } + s.srcPos = s.prevPos // put back last position +} + +// peek returns the next rune without advancing the reader. +func (s *Scanner) peek() rune { + peek, _, err := s.buf.ReadRune() + if err != nil { + return eof + } + + s.buf.UnreadRune() + return peek +} + +// Scan scans the next token and returns the token. +func (s *Scanner) Scan() token.Token { + ch := s.next() + + // skip white space + for isWhitespace(ch) { + ch = s.next() + } + + var tok token.Type + + // token text markings + s.tokStart = s.srcPos.Offset - s.lastCharLen + + // token position, initial next() is moving the offset by one(size of rune + // actually), though we are interested with the starting point + s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen + if s.srcPos.Column > 0 { + // common case: last character was not a '\n' + s.tokPos.Line = s.srcPos.Line + s.tokPos.Column = s.srcPos.Column + } else { + // last character was a '\n' + // (we cannot be at the beginning of the source + // since we have called next() at least once) + s.tokPos.Line = s.srcPos.Line - 1 + s.tokPos.Column = s.lastLineLen + } + + switch { + case isLetter(ch): + tok = token.IDENT + lit := s.scanIdentifier() + if lit == "true" || lit == "false" { + tok = token.BOOL + } + case isDecimal(ch): + tok = s.scanNumber(ch) + default: + switch ch { + case eof: + tok = token.EOF + case '"': + tok = token.STRING + s.scanString() + case '#', '/': + tok = token.COMMENT + s.scanComment(ch) + case '.': + tok = token.PERIOD + ch = s.peek() + if isDecimal(ch) { + tok = token.FLOAT + ch = s.scanMantissa(ch) + ch = s.scanExponent(ch) + } + case '<': + tok = token.HEREDOC + s.scanHeredoc() + case '[': + tok = token.LBRACK + case ']': + tok = token.RBRACK + case '{': + tok = token.LBRACE + case '}': + tok = token.RBRACE + case ',': + tok = token.COMMA + case '=': + tok = token.ASSIGN + case '+': + tok = token.ADD + case '-': + if isDecimal(s.peek()) { + ch := s.next() + tok = s.scanNumber(ch) + } else { + tok = token.SUB + } + default: + s.err("illegal char") + } + } + + // finish token ending + s.tokEnd = s.srcPos.Offset + + // create token literal + var tokenText string + if s.tokStart >= 0 { + tokenText = string(s.src[s.tokStart:s.tokEnd]) + } + s.tokStart = s.tokEnd // ensure idempotency of tokenText() call + + return token.Token{ + Type: tok, + Pos: s.tokPos, + Text: tokenText, + } +} + +func (s *Scanner) scanComment(ch rune) { + // single line comments + if ch == '#' || (ch == '/' && s.peek() != '*') { + if ch == '/' && s.peek() != '/' { + s.err("expected '/' for comment") + return + } + + ch = s.next() + for ch != '\n' && ch >= 0 && ch != eof { + ch = s.next() + } + if ch != eof && ch >= 0 { + s.unread() + } + return + } + + // be sure we get the character after /* This allows us to find comment's + // that are not erminated + if ch == '/' { + s.next() + ch = s.next() // read character after "/*" + } + + // look for /* - style comments + for { + if ch < 0 || ch == eof { + s.err("comment not terminated") + break + } + + ch0 := ch + ch = s.next() + if ch0 == '*' && ch == '/' { + break + } + } +} + +// scanNumber scans a HCL number definition starting with the given rune +func (s *Scanner) scanNumber(ch rune) token.Type { + if ch == '0' { + // check for hexadecimal, octal or float + ch = s.next() + if ch == 'x' || ch == 'X' { + // hexadecimal + ch = s.next() + found := false + for isHexadecimal(ch) { + ch = s.next() + found = true + } + + if !found { + s.err("illegal hexadecimal number") + } + + if ch != eof { + s.unread() + } + + return token.NUMBER + } + + // now it's either something like: 0421(octal) or 0.1231(float) + illegalOctal := false + for isDecimal(ch) { + ch = s.next() + if ch == '8' || ch == '9' { + // this is just a possibility. For example 0159 is illegal, but + // 0159.23 is valid. So we mark a possible illegal octal. If + // the next character is not a period, we'll print the error. + illegalOctal = true + } + } + + if ch == 'e' || ch == 'E' { + ch = s.scanExponent(ch) + return token.FLOAT + } + + if ch == '.' { + ch = s.scanFraction(ch) + + if ch == 'e' || ch == 'E' { + ch = s.next() + ch = s.scanExponent(ch) + } + return token.FLOAT + } + + if illegalOctal { + s.err("illegal octal number") + } + + if ch != eof { + s.unread() + } + return token.NUMBER + } + + s.scanMantissa(ch) + ch = s.next() // seek forward + if ch == 'e' || ch == 'E' { + ch = s.scanExponent(ch) + return token.FLOAT + } + + if ch == '.' { + ch = s.scanFraction(ch) + if ch == 'e' || ch == 'E' { + ch = s.next() + ch = s.scanExponent(ch) + } + return token.FLOAT + } + + if ch != eof { + s.unread() + } + return token.NUMBER +} + +// scanMantissa scans the mantissa beginning from the rune. It returns the next +// non decimal rune. It's used to determine wheter it's a fraction or exponent. +func (s *Scanner) scanMantissa(ch rune) rune { + scanned := false + for isDecimal(ch) { + ch = s.next() + scanned = true + } + + if scanned && ch != eof { + s.unread() + } + return ch +} + +// scanFraction scans the fraction after the '.' rune +func (s *Scanner) scanFraction(ch rune) rune { + if ch == '.' { + ch = s.peek() // we peek just to see if we can move forward + ch = s.scanMantissa(ch) + } + return ch +} + +// scanExponent scans the remaining parts of an exponent after the 'e' or 'E' +// rune. +func (s *Scanner) scanExponent(ch rune) rune { + if ch == 'e' || ch == 'E' { + ch = s.next() + if ch == '-' || ch == '+' { + ch = s.next() + } + ch = s.scanMantissa(ch) + } + return ch +} + +// scanHeredoc scans a heredoc string +func (s *Scanner) scanHeredoc() { + // Scan the second '<' in example: '<= len(identBytes) && identRegexp.Match(s.src[lineStart:s.srcPos.Offset-s.lastCharLen]) { + break + } + + // Not an anchor match, record the start of a new line + lineStart = s.srcPos.Offset + } + + if ch == eof { + s.err("heredoc not terminated") + return + } + } + + return +} + +// scanString scans a quoted string +func (s *Scanner) scanString() { + braces := 0 + for { + // '"' opening already consumed + // read character after quote + ch := s.next() + + if (ch == '\n' && braces == 0) || ch < 0 || ch == eof { + s.err("literal not terminated") + return + } + + if ch == '"' && braces == 0 { + break + } + + // If we're going into a ${} then we can ignore quotes for awhile + if braces == 0 && ch == '$' && s.peek() == '{' { + braces++ + s.next() + } else if braces > 0 && ch == '{' { + braces++ + } + if braces > 0 && ch == '}' { + braces-- + } + + if ch == '\\' { + s.scanEscape() + } + } + + return +} + +// scanEscape scans an escape sequence +func (s *Scanner) scanEscape() rune { + // http://en.cppreference.com/w/cpp/language/escape + ch := s.next() // read character after '/' + switch ch { + case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"': + // nothing to do + case '0', '1', '2', '3', '4', '5', '6', '7': + // octal notation + ch = s.scanDigits(ch, 8, 3) + case 'x': + // hexademical notation + ch = s.scanDigits(s.next(), 16, 2) + case 'u': + // universal character name + ch = s.scanDigits(s.next(), 16, 4) + case 'U': + // universal character name + ch = s.scanDigits(s.next(), 16, 8) + default: + s.err("illegal char escape") + } + return ch +} + +// scanDigits scans a rune with the given base for n times. For example an +// octal notation \184 would yield in scanDigits(ch, 8, 3) +func (s *Scanner) scanDigits(ch rune, base, n int) rune { + start := n + for n > 0 && digitVal(ch) < base { + ch = s.next() + if ch == eof { + // If we see an EOF, we halt any more scanning of digits + // immediately. + break + } + + n-- + } + if n > 0 { + s.err("illegal char escape") + } + + if n != start && ch != eof { + // we scanned all digits, put the last non digit char back, + // only if we read anything at all + s.unread() + } + + return ch +} + +// scanIdentifier scans an identifier and returns the literal string +func (s *Scanner) scanIdentifier() string { + offs := s.srcPos.Offset - s.lastCharLen + ch := s.next() + for isLetter(ch) || isDigit(ch) || ch == '-' || ch == '.' { + ch = s.next() + } + + if ch != eof { + s.unread() // we got identifier, put back latest char + } + + return string(s.src[offs:s.srcPos.Offset]) +} + +// recentPosition returns the position of the character immediately after the +// character or token returned by the last call to Scan. +func (s *Scanner) recentPosition() (pos token.Pos) { + pos.Offset = s.srcPos.Offset - s.lastCharLen + switch { + case s.srcPos.Column > 0: + // common case: last character was not a '\n' + pos.Line = s.srcPos.Line + pos.Column = s.srcPos.Column + case s.lastLineLen > 0: + // last character was a '\n' + // (we cannot be at the beginning of the source + // since we have called next() at least once) + pos.Line = s.srcPos.Line - 1 + pos.Column = s.lastLineLen + default: + // at the beginning of the source + pos.Line = 1 + pos.Column = 1 + } + return +} + +// err prints the error of any scanning to s.Error function. If the function is +// not defined, by default it prints them to os.Stderr +func (s *Scanner) err(msg string) { + s.ErrorCount++ + pos := s.recentPosition() + + if s.Error != nil { + s.Error(pos, msg) + return + } + + fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg) +} + +// isHexadecimal returns true if the given rune is a letter +func isLetter(ch rune) bool { + return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) +} + +// isDigit returns true if the given rune is a decimal digit +func isDigit(ch rune) bool { + return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) +} + +// isDecimal returns true if the given rune is a decimal number +func isDecimal(ch rune) bool { + return '0' <= ch && ch <= '9' +} + +// isHexadecimal returns true if the given rune is an hexadecimal number +func isHexadecimal(ch rune) bool { + return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F' +} + +// isWhitespace returns true if the rune is a space, tab, newline or carriage return +func isWhitespace(ch rune) bool { + return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' +} + +// digitVal returns the integer value of a given octal,decimal or hexadecimal rune +func digitVal(ch rune) int { + switch { + case '0' <= ch && ch <= '9': + return int(ch - '0') + case 'a' <= ch && ch <= 'f': + return int(ch - 'a' + 10) + case 'A' <= ch && ch <= 'F': + return int(ch - 'A' + 10) + } + return 16 // larger than any legal digit val +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go new file mode 100644 index 000000000..5f981eaa2 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/strconv/quote.go @@ -0,0 +1,241 @@ +package strconv + +import ( + "errors" + "unicode/utf8" +) + +// ErrSyntax indicates that a value does not have the right syntax for the target type. +var ErrSyntax = errors.New("invalid syntax") + +// Unquote interprets s as a single-quoted, double-quoted, +// or backquoted Go string literal, returning the string value +// that s quotes. (If s is single-quoted, it would be a Go +// character literal; Unquote returns the corresponding +// one-character string.) +func Unquote(s string) (t string, err error) { + n := len(s) + if n < 2 { + return "", ErrSyntax + } + quote := s[0] + if quote != s[n-1] { + return "", ErrSyntax + } + s = s[1 : n-1] + + if quote != '"' { + return "", ErrSyntax + } + if !contains(s, '$') && !contains(s, '{') && contains(s, '\n') { + return "", ErrSyntax + } + + // Is it trivial? Avoid allocation. + if !contains(s, '\\') && !contains(s, quote) && !contains(s, '$') { + switch quote { + case '"': + return s, nil + case '\'': + r, size := utf8.DecodeRuneInString(s) + if size == len(s) && (r != utf8.RuneError || size != 1) { + return s, nil + } + } + } + + var runeTmp [utf8.UTFMax]byte + buf := make([]byte, 0, 3*len(s)/2) // Try to avoid more allocations. + for len(s) > 0 { + // If we're starting a '${}' then let it through un-unquoted. + // Specifically: we don't unquote any characters within the `${}` + // section. + if s[0] == '$' && len(s) > 1 && s[1] == '{' { + buf = append(buf, '$', '{') + s = s[2:] + + // Continue reading until we find the closing brace, copying as-is + braces := 1 + for len(s) > 0 && braces > 0 { + r, size := utf8.DecodeRuneInString(s) + if r == utf8.RuneError { + return "", ErrSyntax + } + + s = s[size:] + + n := utf8.EncodeRune(runeTmp[:], r) + buf = append(buf, runeTmp[:n]...) + + switch r { + case '{': + braces++ + case '}': + braces-- + } + } + if braces != 0 { + return "", ErrSyntax + } + if len(s) == 0 { + // If there's no string left, we're done! + break + } else { + // If there's more left, we need to pop back up to the top of the loop + // in case there's another interpolation in this string. + continue + } + } + + if s[0] == '\n' { + return "", ErrSyntax + } + + c, multibyte, ss, err := unquoteChar(s, quote) + if err != nil { + return "", err + } + s = ss + if c < utf8.RuneSelf || !multibyte { + buf = append(buf, byte(c)) + } else { + n := utf8.EncodeRune(runeTmp[:], c) + buf = append(buf, runeTmp[:n]...) + } + if quote == '\'' && len(s) != 0 { + // single-quoted must be single character + return "", ErrSyntax + } + } + return string(buf), nil +} + +// contains reports whether the string contains the byte c. +func contains(s string, c byte) bool { + for i := 0; i < len(s); i++ { + if s[i] == c { + return true + } + } + return false +} + +func unhex(b byte) (v rune, ok bool) { + c := rune(b) + switch { + case '0' <= c && c <= '9': + return c - '0', true + case 'a' <= c && c <= 'f': + return c - 'a' + 10, true + case 'A' <= c && c <= 'F': + return c - 'A' + 10, true + } + return +} + +func unquoteChar(s string, quote byte) (value rune, multibyte bool, tail string, err error) { + // easy cases + switch c := s[0]; { + case c == quote && (quote == '\'' || quote == '"'): + err = ErrSyntax + return + case c >= utf8.RuneSelf: + r, size := utf8.DecodeRuneInString(s) + return r, true, s[size:], nil + case c != '\\': + return rune(s[0]), false, s[1:], nil + } + + // hard case: c is backslash + if len(s) <= 1 { + err = ErrSyntax + return + } + c := s[1] + s = s[2:] + + switch c { + case 'a': + value = '\a' + case 'b': + value = '\b' + case 'f': + value = '\f' + case 'n': + value = '\n' + case 'r': + value = '\r' + case 't': + value = '\t' + case 'v': + value = '\v' + case 'x', 'u', 'U': + n := 0 + switch c { + case 'x': + n = 2 + case 'u': + n = 4 + case 'U': + n = 8 + } + var v rune + if len(s) < n { + err = ErrSyntax + return + } + for j := 0; j < n; j++ { + x, ok := unhex(s[j]) + if !ok { + err = ErrSyntax + return + } + v = v<<4 | x + } + s = s[n:] + if c == 'x' { + // single-byte string, possibly not UTF-8 + value = v + break + } + if v > utf8.MaxRune { + err = ErrSyntax + return + } + value = v + multibyte = true + case '0', '1', '2', '3', '4', '5', '6', '7': + v := rune(c) - '0' + if len(s) < 2 { + err = ErrSyntax + return + } + for j := 0; j < 2; j++ { // one digit already; two more + x := rune(s[j]) - '0' + if x < 0 || x > 7 { + err = ErrSyntax + return + } + v = (v << 3) | x + } + s = s[2:] + if v > 255 { + err = ErrSyntax + return + } + value = v + case '\\': + value = '\\' + case '\'', '"': + if c != quote { + err = ErrSyntax + return + } + value = rune(c) + default: + err = ErrSyntax + return + } + tail = s + return +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/position.go b/vendor/github.com/hashicorp/hcl/hcl/token/position.go new file mode 100644 index 000000000..59c1bb72d --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/token/position.go @@ -0,0 +1,46 @@ +package token + +import "fmt" + +// Pos describes an arbitrary source position +// including the file, line, and column location. +// A Position is valid if the line number is > 0. +type Pos struct { + Filename string // filename, if any + Offset int // offset, starting at 0 + Line int // line number, starting at 1 + Column int // column number, starting at 1 (character count) +} + +// IsValid returns true if the position is valid. +func (p *Pos) IsValid() bool { return p.Line > 0 } + +// String returns a string in one of several forms: +// +// file:line:column valid position with file name +// line:column valid position without file name +// file invalid position with file name +// - invalid position without file name +func (p Pos) String() string { + s := p.Filename + if p.IsValid() { + if s != "" { + s += ":" + } + s += fmt.Sprintf("%d:%d", p.Line, p.Column) + } + if s == "" { + s = "-" + } + return s +} + +// Before reports whether the position p is before u. +func (p Pos) Before(u Pos) bool { + return u.Offset > p.Offset || u.Line > p.Line +} + +// After reports whether the position p is after u. +func (p Pos) After(u Pos) bool { + return u.Offset < p.Offset || u.Line < p.Line +} diff --git a/vendor/github.com/hashicorp/hcl/hcl/token/token.go b/vendor/github.com/hashicorp/hcl/hcl/token/token.go new file mode 100644 index 000000000..e37c0664e --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/hcl/token/token.go @@ -0,0 +1,219 @@ +// Package token defines constants representing the lexical tokens for HCL +// (HashiCorp Configuration Language) +package token + +import ( + "fmt" + "strconv" + "strings" + + hclstrconv "github.com/hashicorp/hcl/hcl/strconv" +) + +// Token defines a single HCL token which can be obtained via the Scanner +type Token struct { + Type Type + Pos Pos + Text string + JSON bool +} + +// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language) +type Type int + +const ( + // Special tokens + ILLEGAL Type = iota + EOF + COMMENT + + identifier_beg + IDENT // literals + literal_beg + NUMBER // 12345 + FLOAT // 123.45 + BOOL // true,false + STRING // "abc" + HEREDOC // < 0 { + // Pop the current item + n := len(frontier) + item := frontier[n-1] + frontier = frontier[:n-1] + + switch v := item.Val.(type) { + case *ast.ObjectType: + items, frontier = flattenObjectType(v, item, items, frontier) + case *ast.ListType: + items, frontier = flattenListType(v, item, items, frontier) + default: + items = append(items, item) + } + } + + // Reverse the list since the frontier model runs things backwards + for i := len(items)/2 - 1; i >= 0; i-- { + opp := len(items) - 1 - i + items[i], items[opp] = items[opp], items[i] + } + + // Done! Set the original items + list.Items = items + return n, true + }) +} + +func flattenListType( + ot *ast.ListType, + item *ast.ObjectItem, + items []*ast.ObjectItem, + frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) { + // If the list is empty, keep the original list + if len(ot.List) == 0 { + items = append(items, item) + return items, frontier + } + + // All the elements of this object must also be objects! + for _, subitem := range ot.List { + if _, ok := subitem.(*ast.ObjectType); !ok { + items = append(items, item) + return items, frontier + } + } + + // Great! We have a match go through all the items and flatten + for _, elem := range ot.List { + // Add it to the frontier so that we can recurse + frontier = append(frontier, &ast.ObjectItem{ + Keys: item.Keys, + Assign: item.Assign, + Val: elem, + LeadComment: item.LeadComment, + LineComment: item.LineComment, + }) + } + + return items, frontier +} + +func flattenObjectType( + ot *ast.ObjectType, + item *ast.ObjectItem, + items []*ast.ObjectItem, + frontier []*ast.ObjectItem) ([]*ast.ObjectItem, []*ast.ObjectItem) { + // If the list has no items we do not have to flatten anything + if ot.List.Items == nil { + items = append(items, item) + return items, frontier + } + + // All the elements of this object must also be objects! + for _, subitem := range ot.List.Items { + if _, ok := subitem.Val.(*ast.ObjectType); !ok { + items = append(items, item) + return items, frontier + } + } + + // Great! We have a match go through all the items and flatten + for _, subitem := range ot.List.Items { + // Copy the new key + keys := make([]*ast.ObjectKey, len(item.Keys)+len(subitem.Keys)) + copy(keys, item.Keys) + copy(keys[len(item.Keys):], subitem.Keys) + + // Add it to the frontier so that we can recurse + frontier = append(frontier, &ast.ObjectItem{ + Keys: keys, + Assign: item.Assign, + Val: subitem.Val, + LeadComment: item.LeadComment, + LineComment: item.LineComment, + }) + } + + return items, frontier +} diff --git a/vendor/github.com/hashicorp/hcl/json/parser/parser.go b/vendor/github.com/hashicorp/hcl/json/parser/parser.go new file mode 100644 index 000000000..125a5f072 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/parser/parser.go @@ -0,0 +1,313 @@ +package parser + +import ( + "errors" + "fmt" + + "github.com/hashicorp/hcl/hcl/ast" + hcltoken "github.com/hashicorp/hcl/hcl/token" + "github.com/hashicorp/hcl/json/scanner" + "github.com/hashicorp/hcl/json/token" +) + +type Parser struct { + sc *scanner.Scanner + + // Last read token + tok token.Token + commaPrev token.Token + + enableTrace bool + indent int + n int // buffer size (max = 1) +} + +func newParser(src []byte) *Parser { + return &Parser{ + sc: scanner.New(src), + } +} + +// Parse returns the fully parsed source and returns the abstract syntax tree. +func Parse(src []byte) (*ast.File, error) { + p := newParser(src) + return p.Parse() +} + +var errEofToken = errors.New("EOF token found") + +// Parse returns the fully parsed source and returns the abstract syntax tree. +func (p *Parser) Parse() (*ast.File, error) { + f := &ast.File{} + var err, scerr error + p.sc.Error = func(pos token.Pos, msg string) { + scerr = fmt.Errorf("%s: %s", pos, msg) + } + + // The root must be an object in JSON + object, err := p.object() + if scerr != nil { + return nil, scerr + } + if err != nil { + return nil, err + } + + // We make our final node an object list so it is more HCL compatible + f.Node = object.List + + // Flatten it, which finds patterns and turns them into more HCL-like + // AST trees. + flattenObjects(f.Node) + + return f, nil +} + +func (p *Parser) objectList() (*ast.ObjectList, error) { + defer un(trace(p, "ParseObjectList")) + node := &ast.ObjectList{} + + for { + n, err := p.objectItem() + if err == errEofToken { + break // we are finished + } + + // we don't return a nil node, because might want to use already + // collected items. + if err != nil { + return node, err + } + + node.Add(n) + + // Check for a followup comma. If it isn't a comma, then we're done + if tok := p.scan(); tok.Type != token.COMMA { + break + } + } + + return node, nil +} + +// objectItem parses a single object item +func (p *Parser) objectItem() (*ast.ObjectItem, error) { + defer un(trace(p, "ParseObjectItem")) + + keys, err := p.objectKey() + if err != nil { + return nil, err + } + + o := &ast.ObjectItem{ + Keys: keys, + } + + switch p.tok.Type { + case token.COLON: + pos := p.tok.Pos + o.Assign = hcltoken.Pos{ + Filename: pos.Filename, + Offset: pos.Offset, + Line: pos.Line, + Column: pos.Column, + } + + o.Val, err = p.objectValue() + if err != nil { + return nil, err + } + } + + return o, nil +} + +// objectKey parses an object key and returns a ObjectKey AST +func (p *Parser) objectKey() ([]*ast.ObjectKey, error) { + keyCount := 0 + keys := make([]*ast.ObjectKey, 0) + + for { + tok := p.scan() + switch tok.Type { + case token.EOF: + return nil, errEofToken + case token.STRING: + keyCount++ + keys = append(keys, &ast.ObjectKey{ + Token: p.tok.HCLToken(), + }) + case token.COLON: + // If we have a zero keycount it means that we never got + // an object key, i.e. `{ :`. This is a syntax error. + if keyCount == 0 { + return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type) + } + + // Done + return keys, nil + case token.ILLEGAL: + return nil, errors.New("illegal") + default: + return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type) + } + } +} + +// object parses any type of object, such as number, bool, string, object or +// list. +func (p *Parser) objectValue() (ast.Node, error) { + defer un(trace(p, "ParseObjectValue")) + tok := p.scan() + + switch tok.Type { + case token.NUMBER, token.FLOAT, token.BOOL, token.NULL, token.STRING: + return p.literalType() + case token.LBRACE: + return p.objectType() + case token.LBRACK: + return p.listType() + case token.EOF: + return nil, errEofToken + } + + return nil, fmt.Errorf("Expected object value, got unknown token: %+v", tok) +} + +// object parses any type of object, such as number, bool, string, object or +// list. +func (p *Parser) object() (*ast.ObjectType, error) { + defer un(trace(p, "ParseType")) + tok := p.scan() + + switch tok.Type { + case token.LBRACE: + return p.objectType() + case token.EOF: + return nil, errEofToken + } + + return nil, fmt.Errorf("Expected object, got unknown token: %+v", tok) +} + +// objectType parses an object type and returns a ObjectType AST +func (p *Parser) objectType() (*ast.ObjectType, error) { + defer un(trace(p, "ParseObjectType")) + + // we assume that the currently scanned token is a LBRACE + o := &ast.ObjectType{} + + l, err := p.objectList() + + // if we hit RBRACE, we are good to go (means we parsed all Items), if it's + // not a RBRACE, it's an syntax error and we just return it. + if err != nil && p.tok.Type != token.RBRACE { + return nil, err + } + + o.List = l + return o, nil +} + +// listType parses a list type and returns a ListType AST +func (p *Parser) listType() (*ast.ListType, error) { + defer un(trace(p, "ParseListType")) + + // we assume that the currently scanned token is a LBRACK + l := &ast.ListType{} + + for { + tok := p.scan() + switch tok.Type { + case token.NUMBER, token.FLOAT, token.STRING: + node, err := p.literalType() + if err != nil { + return nil, err + } + + l.Add(node) + case token.COMMA: + continue + case token.LBRACE: + node, err := p.objectType() + if err != nil { + return nil, err + } + + l.Add(node) + case token.BOOL: + // TODO(arslan) should we support? not supported by HCL yet + case token.LBRACK: + // TODO(arslan) should we support nested lists? Even though it's + // written in README of HCL, it's not a part of the grammar + // (not defined in parse.y) + case token.RBRACK: + // finished + return l, nil + default: + return nil, fmt.Errorf("unexpected token while parsing list: %s", tok.Type) + } + + } +} + +// literalType parses a literal type and returns a LiteralType AST +func (p *Parser) literalType() (*ast.LiteralType, error) { + defer un(trace(p, "ParseLiteral")) + + return &ast.LiteralType{ + Token: p.tok.HCLToken(), + }, nil +} + +// scan returns the next token from the underlying scanner. If a token has +// been unscanned then read that instead. +func (p *Parser) scan() token.Token { + // If we have a token on the buffer, then return it. + if p.n != 0 { + p.n = 0 + return p.tok + } + + p.tok = p.sc.Scan() + return p.tok +} + +// unscan pushes the previously read token back onto the buffer. +func (p *Parser) unscan() { + p.n = 1 +} + +// ---------------------------------------------------------------------------- +// Parsing support + +func (p *Parser) printTrace(a ...interface{}) { + if !p.enableTrace { + return + } + + const dots = ". . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . " + const n = len(dots) + fmt.Printf("%5d:%3d: ", p.tok.Pos.Line, p.tok.Pos.Column) + + i := 2 * p.indent + for i > n { + fmt.Print(dots) + i -= n + } + // i <= n + fmt.Print(dots[0:i]) + fmt.Println(a...) +} + +func trace(p *Parser, msg string) *Parser { + p.printTrace(msg, "(") + p.indent++ + return p +} + +// Usage pattern: defer un(trace(p, "...")) +func un(p *Parser) { + p.indent-- + p.printTrace(")") +} diff --git a/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go new file mode 100644 index 000000000..fe3f0f095 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/scanner/scanner.go @@ -0,0 +1,451 @@ +package scanner + +import ( + "bytes" + "fmt" + "os" + "unicode" + "unicode/utf8" + + "github.com/hashicorp/hcl/json/token" +) + +// eof represents a marker rune for the end of the reader. +const eof = rune(0) + +// Scanner defines a lexical scanner +type Scanner struct { + buf *bytes.Buffer // Source buffer for advancing and scanning + src []byte // Source buffer for immutable access + + // Source Position + srcPos token.Pos // current position + prevPos token.Pos // previous position, used for peek() method + + lastCharLen int // length of last character in bytes + lastLineLen int // length of last line in characters (for correct column reporting) + + tokStart int // token text start position + tokEnd int // token text end position + + // Error is called for each error encountered. If no Error + // function is set, the error is reported to os.Stderr. + Error func(pos token.Pos, msg string) + + // ErrorCount is incremented by one for each error encountered. + ErrorCount int + + // tokPos is the start position of most recently scanned token; set by + // Scan. The Filename field is always left untouched by the Scanner. If + // an error is reported (via Error) and Position is invalid, the scanner is + // not inside a token. + tokPos token.Pos +} + +// New creates and initializes a new instance of Scanner using src as +// its source content. +func New(src []byte) *Scanner { + // even though we accept a src, we read from a io.Reader compatible type + // (*bytes.Buffer). So in the future we might easily change it to streaming + // read. + b := bytes.NewBuffer(src) + s := &Scanner{ + buf: b, + src: src, + } + + // srcPosition always starts with 1 + s.srcPos.Line = 1 + return s +} + +// next reads the next rune from the bufferred reader. Returns the rune(0) if +// an error occurs (or io.EOF is returned). +func (s *Scanner) next() rune { + ch, size, err := s.buf.ReadRune() + if err != nil { + // advance for error reporting + s.srcPos.Column++ + s.srcPos.Offset += size + s.lastCharLen = size + return eof + } + + if ch == utf8.RuneError && size == 1 { + s.srcPos.Column++ + s.srcPos.Offset += size + s.lastCharLen = size + s.err("illegal UTF-8 encoding") + return ch + } + + // remember last position + s.prevPos = s.srcPos + + s.srcPos.Column++ + s.lastCharLen = size + s.srcPos.Offset += size + + if ch == '\n' { + s.srcPos.Line++ + s.lastLineLen = s.srcPos.Column + s.srcPos.Column = 0 + } + + // debug + // fmt.Printf("ch: %q, offset:column: %d:%d\n", ch, s.srcPos.Offset, s.srcPos.Column) + return ch +} + +// unread unreads the previous read Rune and updates the source position +func (s *Scanner) unread() { + if err := s.buf.UnreadRune(); err != nil { + panic(err) // this is user fault, we should catch it + } + s.srcPos = s.prevPos // put back last position +} + +// peek returns the next rune without advancing the reader. +func (s *Scanner) peek() rune { + peek, _, err := s.buf.ReadRune() + if err != nil { + return eof + } + + s.buf.UnreadRune() + return peek +} + +// Scan scans the next token and returns the token. +func (s *Scanner) Scan() token.Token { + ch := s.next() + + // skip white space + for isWhitespace(ch) { + ch = s.next() + } + + var tok token.Type + + // token text markings + s.tokStart = s.srcPos.Offset - s.lastCharLen + + // token position, initial next() is moving the offset by one(size of rune + // actually), though we are interested with the starting point + s.tokPos.Offset = s.srcPos.Offset - s.lastCharLen + if s.srcPos.Column > 0 { + // common case: last character was not a '\n' + s.tokPos.Line = s.srcPos.Line + s.tokPos.Column = s.srcPos.Column + } else { + // last character was a '\n' + // (we cannot be at the beginning of the source + // since we have called next() at least once) + s.tokPos.Line = s.srcPos.Line - 1 + s.tokPos.Column = s.lastLineLen + } + + switch { + case isLetter(ch): + lit := s.scanIdentifier() + if lit == "true" || lit == "false" { + tok = token.BOOL + } else if lit == "null" { + tok = token.NULL + } else { + s.err("illegal char") + } + case isDecimal(ch): + tok = s.scanNumber(ch) + default: + switch ch { + case eof: + tok = token.EOF + case '"': + tok = token.STRING + s.scanString() + case '.': + tok = token.PERIOD + ch = s.peek() + if isDecimal(ch) { + tok = token.FLOAT + ch = s.scanMantissa(ch) + ch = s.scanExponent(ch) + } + case '[': + tok = token.LBRACK + case ']': + tok = token.RBRACK + case '{': + tok = token.LBRACE + case '}': + tok = token.RBRACE + case ',': + tok = token.COMMA + case ':': + tok = token.COLON + case '-': + if isDecimal(s.peek()) { + ch := s.next() + tok = s.scanNumber(ch) + } else { + s.err("illegal char") + } + default: + s.err("illegal char: " + string(ch)) + } + } + + // finish token ending + s.tokEnd = s.srcPos.Offset + + // create token literal + var tokenText string + if s.tokStart >= 0 { + tokenText = string(s.src[s.tokStart:s.tokEnd]) + } + s.tokStart = s.tokEnd // ensure idempotency of tokenText() call + + return token.Token{ + Type: tok, + Pos: s.tokPos, + Text: tokenText, + } +} + +// scanNumber scans a HCL number definition starting with the given rune +func (s *Scanner) scanNumber(ch rune) token.Type { + zero := ch == '0' + pos := s.srcPos + + s.scanMantissa(ch) + ch = s.next() // seek forward + if ch == 'e' || ch == 'E' { + ch = s.scanExponent(ch) + return token.FLOAT + } + + if ch == '.' { + ch = s.scanFraction(ch) + if ch == 'e' || ch == 'E' { + ch = s.next() + ch = s.scanExponent(ch) + } + return token.FLOAT + } + + if ch != eof { + s.unread() + } + + // If we have a larger number and this is zero, error + if zero && pos != s.srcPos { + s.err("numbers cannot start with 0") + } + + return token.NUMBER +} + +// scanMantissa scans the mantissa beginning from the rune. It returns the next +// non decimal rune. It's used to determine wheter it's a fraction or exponent. +func (s *Scanner) scanMantissa(ch rune) rune { + scanned := false + for isDecimal(ch) { + ch = s.next() + scanned = true + } + + if scanned && ch != eof { + s.unread() + } + return ch +} + +// scanFraction scans the fraction after the '.' rune +func (s *Scanner) scanFraction(ch rune) rune { + if ch == '.' { + ch = s.peek() // we peek just to see if we can move forward + ch = s.scanMantissa(ch) + } + return ch +} + +// scanExponent scans the remaining parts of an exponent after the 'e' or 'E' +// rune. +func (s *Scanner) scanExponent(ch rune) rune { + if ch == 'e' || ch == 'E' { + ch = s.next() + if ch == '-' || ch == '+' { + ch = s.next() + } + ch = s.scanMantissa(ch) + } + return ch +} + +// scanString scans a quoted string +func (s *Scanner) scanString() { + braces := 0 + for { + // '"' opening already consumed + // read character after quote + ch := s.next() + + if ch == '\n' || ch < 0 || ch == eof { + s.err("literal not terminated") + return + } + + if ch == '"' { + break + } + + // If we're going into a ${} then we can ignore quotes for awhile + if braces == 0 && ch == '$' && s.peek() == '{' { + braces++ + s.next() + } else if braces > 0 && ch == '{' { + braces++ + } + if braces > 0 && ch == '}' { + braces-- + } + + if ch == '\\' { + s.scanEscape() + } + } + + return +} + +// scanEscape scans an escape sequence +func (s *Scanner) scanEscape() rune { + // http://en.cppreference.com/w/cpp/language/escape + ch := s.next() // read character after '/' + switch ch { + case 'a', 'b', 'f', 'n', 'r', 't', 'v', '\\', '"': + // nothing to do + case '0', '1', '2', '3', '4', '5', '6', '7': + // octal notation + ch = s.scanDigits(ch, 8, 3) + case 'x': + // hexademical notation + ch = s.scanDigits(s.next(), 16, 2) + case 'u': + // universal character name + ch = s.scanDigits(s.next(), 16, 4) + case 'U': + // universal character name + ch = s.scanDigits(s.next(), 16, 8) + default: + s.err("illegal char escape") + } + return ch +} + +// scanDigits scans a rune with the given base for n times. For example an +// octal notation \184 would yield in scanDigits(ch, 8, 3) +func (s *Scanner) scanDigits(ch rune, base, n int) rune { + for n > 0 && digitVal(ch) < base { + ch = s.next() + n-- + } + if n > 0 { + s.err("illegal char escape") + } + + // we scanned all digits, put the last non digit char back + s.unread() + return ch +} + +// scanIdentifier scans an identifier and returns the literal string +func (s *Scanner) scanIdentifier() string { + offs := s.srcPos.Offset - s.lastCharLen + ch := s.next() + for isLetter(ch) || isDigit(ch) || ch == '-' { + ch = s.next() + } + + if ch != eof { + s.unread() // we got identifier, put back latest char + } + + return string(s.src[offs:s.srcPos.Offset]) +} + +// recentPosition returns the position of the character immediately after the +// character or token returned by the last call to Scan. +func (s *Scanner) recentPosition() (pos token.Pos) { + pos.Offset = s.srcPos.Offset - s.lastCharLen + switch { + case s.srcPos.Column > 0: + // common case: last character was not a '\n' + pos.Line = s.srcPos.Line + pos.Column = s.srcPos.Column + case s.lastLineLen > 0: + // last character was a '\n' + // (we cannot be at the beginning of the source + // since we have called next() at least once) + pos.Line = s.srcPos.Line - 1 + pos.Column = s.lastLineLen + default: + // at the beginning of the source + pos.Line = 1 + pos.Column = 1 + } + return +} + +// err prints the error of any scanning to s.Error function. If the function is +// not defined, by default it prints them to os.Stderr +func (s *Scanner) err(msg string) { + s.ErrorCount++ + pos := s.recentPosition() + + if s.Error != nil { + s.Error(pos, msg) + return + } + + fmt.Fprintf(os.Stderr, "%s: %s\n", pos, msg) +} + +// isHexadecimal returns true if the given rune is a letter +func isLetter(ch rune) bool { + return 'a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || ch == '_' || ch >= 0x80 && unicode.IsLetter(ch) +} + +// isHexadecimal returns true if the given rune is a decimal digit +func isDigit(ch rune) bool { + return '0' <= ch && ch <= '9' || ch >= 0x80 && unicode.IsDigit(ch) +} + +// isHexadecimal returns true if the given rune is a decimal number +func isDecimal(ch rune) bool { + return '0' <= ch && ch <= '9' +} + +// isHexadecimal returns true if the given rune is an hexadecimal number +func isHexadecimal(ch rune) bool { + return '0' <= ch && ch <= '9' || 'a' <= ch && ch <= 'f' || 'A' <= ch && ch <= 'F' +} + +// isWhitespace returns true if the rune is a space, tab, newline or carriage return +func isWhitespace(ch rune) bool { + return ch == ' ' || ch == '\t' || ch == '\n' || ch == '\r' +} + +// digitVal returns the integer value of a given octal,decimal or hexadecimal rune +func digitVal(ch rune) int { + switch { + case '0' <= ch && ch <= '9': + return int(ch - '0') + case 'a' <= ch && ch <= 'f': + return int(ch - 'a' + 10) + case 'A' <= ch && ch <= 'F': + return int(ch - 'A' + 10) + } + return 16 // larger than any legal digit val +} diff --git a/vendor/github.com/hashicorp/hcl/json/token/position.go b/vendor/github.com/hashicorp/hcl/json/token/position.go new file mode 100644 index 000000000..59c1bb72d --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/token/position.go @@ -0,0 +1,46 @@ +package token + +import "fmt" + +// Pos describes an arbitrary source position +// including the file, line, and column location. +// A Position is valid if the line number is > 0. +type Pos struct { + Filename string // filename, if any + Offset int // offset, starting at 0 + Line int // line number, starting at 1 + Column int // column number, starting at 1 (character count) +} + +// IsValid returns true if the position is valid. +func (p *Pos) IsValid() bool { return p.Line > 0 } + +// String returns a string in one of several forms: +// +// file:line:column valid position with file name +// line:column valid position without file name +// file invalid position with file name +// - invalid position without file name +func (p Pos) String() string { + s := p.Filename + if p.IsValid() { + if s != "" { + s += ":" + } + s += fmt.Sprintf("%d:%d", p.Line, p.Column) + } + if s == "" { + s = "-" + } + return s +} + +// Before reports whether the position p is before u. +func (p Pos) Before(u Pos) bool { + return u.Offset > p.Offset || u.Line > p.Line +} + +// After reports whether the position p is after u. +func (p Pos) After(u Pos) bool { + return u.Offset < p.Offset || u.Line < p.Line +} diff --git a/vendor/github.com/hashicorp/hcl/json/token/token.go b/vendor/github.com/hashicorp/hcl/json/token/token.go new file mode 100644 index 000000000..95a0c3eee --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/json/token/token.go @@ -0,0 +1,118 @@ +package token + +import ( + "fmt" + "strconv" + + hcltoken "github.com/hashicorp/hcl/hcl/token" +) + +// Token defines a single HCL token which can be obtained via the Scanner +type Token struct { + Type Type + Pos Pos + Text string +} + +// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language) +type Type int + +const ( + // Special tokens + ILLEGAL Type = iota + EOF + + identifier_beg + literal_beg + NUMBER // 12345 + FLOAT // 123.45 + BOOL // true,false + STRING // "abc" + NULL // null + literal_end + identifier_end + + operator_beg + LBRACK // [ + LBRACE // { + COMMA // , + PERIOD // . + COLON // : + + RBRACK // ] + RBRACE // } + + operator_end +) + +var tokens = [...]string{ + ILLEGAL: "ILLEGAL", + + EOF: "EOF", + + NUMBER: "NUMBER", + FLOAT: "FLOAT", + BOOL: "BOOL", + STRING: "STRING", + NULL: "NULL", + + LBRACK: "LBRACK", + LBRACE: "LBRACE", + COMMA: "COMMA", + PERIOD: "PERIOD", + COLON: "COLON", + + RBRACK: "RBRACK", + RBRACE: "RBRACE", +} + +// String returns the string corresponding to the token tok. +func (t Type) String() string { + s := "" + if 0 <= t && t < Type(len(tokens)) { + s = tokens[t] + } + if s == "" { + s = "token(" + strconv.Itoa(int(t)) + ")" + } + return s +} + +// IsIdentifier returns true for tokens corresponding to identifiers and basic +// type literals; it returns false otherwise. +func (t Type) IsIdentifier() bool { return identifier_beg < t && t < identifier_end } + +// IsLiteral returns true for tokens corresponding to basic type literals; it +// returns false otherwise. +func (t Type) IsLiteral() bool { return literal_beg < t && t < literal_end } + +// IsOperator returns true for tokens corresponding to operators and +// delimiters; it returns false otherwise. +func (t Type) IsOperator() bool { return operator_beg < t && t < operator_end } + +// String returns the token's literal text. Note that this is only +// applicable for certain token types, such as token.IDENT, +// token.STRING, etc.. +func (t Token) String() string { + return fmt.Sprintf("%s %s %s", t.Pos.String(), t.Type.String(), t.Text) +} + +// HCLToken converts this token to an HCL token. +// +// The token type must be a literal type or this will panic. +func (t Token) HCLToken() hcltoken.Token { + switch t.Type { + case BOOL: + return hcltoken.Token{Type: hcltoken.BOOL, Text: t.Text} + case FLOAT: + return hcltoken.Token{Type: hcltoken.FLOAT, Text: t.Text} + case NULL: + return hcltoken.Token{Type: hcltoken.STRING, Text: ""} + case NUMBER: + return hcltoken.Token{Type: hcltoken.NUMBER, Text: t.Text} + case STRING: + return hcltoken.Token{Type: hcltoken.STRING, Text: t.Text, JSON: true} + default: + panic(fmt.Sprintf("unimplemented HCLToken for type: %s", t.Type)) + } +} diff --git a/vendor/github.com/hashicorp/hcl/lex.go b/vendor/github.com/hashicorp/hcl/lex.go new file mode 100644 index 000000000..d9993c292 --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/lex.go @@ -0,0 +1,38 @@ +package hcl + +import ( + "unicode" + "unicode/utf8" +) + +type lexModeValue byte + +const ( + lexModeUnknown lexModeValue = iota + lexModeHcl + lexModeJson +) + +// lexMode returns whether we're going to be parsing in JSON +// mode or HCL mode. +func lexMode(v []byte) lexModeValue { + var ( + r rune + w int + offset int + ) + + for { + r, w = utf8.DecodeRune(v[offset:]) + offset += w + if unicode.IsSpace(r) { + continue + } + if r == '{' { + return lexModeJson + } + break + } + + return lexModeHcl +} diff --git a/vendor/github.com/hashicorp/hcl/parse.go b/vendor/github.com/hashicorp/hcl/parse.go new file mode 100644 index 000000000..1fca53c4c --- /dev/null +++ b/vendor/github.com/hashicorp/hcl/parse.go @@ -0,0 +1,39 @@ +package hcl + +import ( + "fmt" + + "github.com/hashicorp/hcl/hcl/ast" + hclParser "github.com/hashicorp/hcl/hcl/parser" + jsonParser "github.com/hashicorp/hcl/json/parser" +) + +// ParseBytes accepts as input byte slice and returns ast tree. +// +// Input can be either JSON or HCL +func ParseBytes(in []byte) (*ast.File, error) { + return parse(in) +} + +// ParseString accepts input as a string and returns ast tree. +func ParseString(input string) (*ast.File, error) { + return parse([]byte(input)) +} + +func parse(in []byte) (*ast.File, error) { + switch lexMode(in) { + case lexModeHcl: + return hclParser.Parse(in) + case lexModeJson: + return jsonParser.Parse(in) + } + + return nil, fmt.Errorf("unknown config format") +} + +// Parse parses the given input and returns the root object. +// +// The input format can be either HCL or JSON. +func Parse(input string) (*ast.File, error) { + return parse([]byte(input)) +} diff --git a/vendor/github.com/inconshreveable/mousetrap/LICENSE b/vendor/github.com/inconshreveable/mousetrap/LICENSE new file mode 100644 index 000000000..5f0d1fb6a --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/LICENSE @@ -0,0 +1,13 @@ +Copyright 2014 Alan Shreve + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/vendor/github.com/inconshreveable/mousetrap/README.md b/vendor/github.com/inconshreveable/mousetrap/README.md new file mode 100644 index 000000000..7a950d177 --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/README.md @@ -0,0 +1,23 @@ +# mousetrap + +mousetrap is a tiny library that answers a single question. + +On a Windows machine, was the process invoked by someone double clicking on +the executable file while browsing in explorer? + +### Motivation + +Windows developers unfamiliar with command line tools will often "double-click" +the executable for a tool. Because most CLI tools print the help and then exit +when invoked without arguments, this is often very frustrating for those users. + +mousetrap provides a way to detect these invocations so that you can provide +more helpful behavior and instructions on how to run the CLI tool. To see what +this looks like, both from an organizational and a technical perspective, see +https://inconshreveable.com/09-09-2014/sweat-the-small-stuff/ + +### The interface + +The library exposes a single interface: + + func StartedByExplorer() (bool) diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_others.go b/vendor/github.com/inconshreveable/mousetrap/trap_others.go new file mode 100644 index 000000000..9d2d8a4ba --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/trap_others.go @@ -0,0 +1,15 @@ +// +build !windows + +package mousetrap + +// StartedByExplorer returns true if the program was invoked by the user +// double-clicking on the executable from explorer.exe +// +// It is conservative and returns false if any of the internal calls fail. +// It does not guarantee that the program was run from a terminal. It only can tell you +// whether it was launched from explorer.exe +// +// On non-Windows platforms, it always returns false. +func StartedByExplorer() bool { + return false +} diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_windows.go b/vendor/github.com/inconshreveable/mousetrap/trap_windows.go new file mode 100644 index 000000000..336142a5e --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/trap_windows.go @@ -0,0 +1,98 @@ +// +build windows +// +build !go1.4 + +package mousetrap + +import ( + "fmt" + "os" + "syscall" + "unsafe" +) + +const ( + // defined by the Win32 API + th32cs_snapprocess uintptr = 0x2 +) + +var ( + kernel = syscall.MustLoadDLL("kernel32.dll") + CreateToolhelp32Snapshot = kernel.MustFindProc("CreateToolhelp32Snapshot") + Process32First = kernel.MustFindProc("Process32FirstW") + Process32Next = kernel.MustFindProc("Process32NextW") +) + +// ProcessEntry32 structure defined by the Win32 API +type processEntry32 struct { + dwSize uint32 + cntUsage uint32 + th32ProcessID uint32 + th32DefaultHeapID int + th32ModuleID uint32 + cntThreads uint32 + th32ParentProcessID uint32 + pcPriClassBase int32 + dwFlags uint32 + szExeFile [syscall.MAX_PATH]uint16 +} + +func getProcessEntry(pid int) (pe *processEntry32, err error) { + snapshot, _, e1 := CreateToolhelp32Snapshot.Call(th32cs_snapprocess, uintptr(0)) + if snapshot == uintptr(syscall.InvalidHandle) { + err = fmt.Errorf("CreateToolhelp32Snapshot: %v", e1) + return + } + defer syscall.CloseHandle(syscall.Handle(snapshot)) + + var processEntry processEntry32 + processEntry.dwSize = uint32(unsafe.Sizeof(processEntry)) + ok, _, e1 := Process32First.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) + if ok == 0 { + err = fmt.Errorf("Process32First: %v", e1) + return + } + + for { + if processEntry.th32ProcessID == uint32(pid) { + pe = &processEntry + return + } + + ok, _, e1 = Process32Next.Call(snapshot, uintptr(unsafe.Pointer(&processEntry))) + if ok == 0 { + err = fmt.Errorf("Process32Next: %v", e1) + return + } + } +} + +func getppid() (pid int, err error) { + pe, err := getProcessEntry(os.Getpid()) + if err != nil { + return + } + + pid = int(pe.th32ParentProcessID) + return +} + +// StartedByExplorer returns true if the program was invoked by the user double-clicking +// on the executable from explorer.exe +// +// It is conservative and returns false if any of the internal calls fail. +// It does not guarantee that the program was run from a terminal. It only can tell you +// whether it was launched from explorer.exe +func StartedByExplorer() bool { + ppid, err := getppid() + if err != nil { + return false + } + + pe, err := getProcessEntry(ppid) + if err != nil { + return false + } + + name := syscall.UTF16ToString(pe.szExeFile[:]) + return name == "explorer.exe" +} diff --git a/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go b/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go new file mode 100644 index 000000000..9a28e57c3 --- /dev/null +++ b/vendor/github.com/inconshreveable/mousetrap/trap_windows_1.4.go @@ -0,0 +1,46 @@ +// +build windows +// +build go1.4 + +package mousetrap + +import ( + "os" + "syscall" + "unsafe" +) + +func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) { + snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0) + if err != nil { + return nil, err + } + defer syscall.CloseHandle(snapshot) + var procEntry syscall.ProcessEntry32 + procEntry.Size = uint32(unsafe.Sizeof(procEntry)) + if err = syscall.Process32First(snapshot, &procEntry); err != nil { + return nil, err + } + for { + if procEntry.ProcessID == uint32(pid) { + return &procEntry, nil + } + err = syscall.Process32Next(snapshot, &procEntry) + if err != nil { + return nil, err + } + } +} + +// StartedByExplorer returns true if the program was invoked by the user double-clicking +// on the executable from explorer.exe +// +// It is conservative and returns false if any of the internal calls fail. +// It does not guarantee that the program was run from a terminal. It only can tell you +// whether it was launched from explorer.exe +func StartedByExplorer() bool { + pe, err := getProcessEntry(os.Getppid()) + if err != nil { + return false + } + return "explorer.exe" == syscall.UTF16ToString(pe.ExeFile[:]) +} diff --git a/vendor/github.com/jgautheron/goconst/LICENSE b/vendor/github.com/jgautheron/goconst/LICENSE new file mode 100644 index 000000000..e92649543 --- /dev/null +++ b/vendor/github.com/jgautheron/goconst/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Jonathan Gautheron + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/jgautheron/goconst/README.md b/vendor/github.com/jgautheron/goconst/README.md new file mode 100644 index 000000000..8dd093baf --- /dev/null +++ b/vendor/github.com/jgautheron/goconst/README.md @@ -0,0 +1,50 @@ +# goconst + +Find repeated strings that could be replaced by a constant. + +### Motivation + +There are obvious benefits to using constants instead of repeating strings, mostly to ease maintenance. Cannot argue against changing a single constant versus many strings. + +While this could be considered a beginner mistake, across time, multiple packages and large codebases, some repetition could have slipped in. + +### Get Started + + $ go get github.com/jgautheron/goconst/cmd/goconst + $ goconst ./... + +### Usage + +``` +Usage: + + goconst ARGS + +Flags: + + -ignore exclude files matching the given regular expression + -ignore-tests exclude tests from the search (default: true) + -min-occurrences report from how many occurrences (default: 2) + -min-length only report strings with the minimum given length (default: 3) + -match-constant look for existing constants matching the values + -numbers search also for duplicated numbers + -min minimum value, only works with -numbers + -max maximum value, only works with -numbers + -output output formatting (text or json) + -set-exit-status Set exit status to 2 if any issues are found + +Examples: + + goconst ./... + goconst -ignore "yacc|\.pb\." $GOPATH/src/github.com/cockroachdb/cockroach/... + goconst -min-occurrences 3 -output json $GOPATH/src/github.com/cockroachdb/cockroach + goconst -numbers -min 60 -max 512 . +``` + +### Other static analysis tools + +- [gogetimports](https://github.com/jgautheron/gogetimports): Get a JSON-formatted list of imports. +- [usedexports](https://github.com/jgautheron/usedexports): Find exported variables that could be unexported. + +### License +MIT diff --git a/vendor/github.com/jgautheron/goconst/api.go b/vendor/github.com/jgautheron/goconst/api.go new file mode 100644 index 000000000..d56fcd6c2 --- /dev/null +++ b/vendor/github.com/jgautheron/goconst/api.go @@ -0,0 +1,74 @@ +package goconst + +import ( + "go/ast" + "go/token" + "strings" +) + +type Issue struct { + Pos token.Position + OccurrencesCount int + Str string + MatchingConst string +} + +type Config struct { + IgnoreTests bool + MatchWithConstants bool + MinStringLength int + MinOccurrences int + ParseNumbers bool + NumberMin int + NumberMax int + ExcludeTypes map[Type]bool +} + +func Run(files []*ast.File, fset *token.FileSet, cfg *Config) ([]Issue, error) { + p := New( + "", + "", + cfg.IgnoreTests, + cfg.MatchWithConstants, + cfg.ParseNumbers, + cfg.NumberMin, + cfg.NumberMax, + cfg.MinStringLength, + cfg.MinOccurrences, + cfg.ExcludeTypes, + ) + var issues []Issue + for _, f := range files { + if p.ignoreTests { + if filename := fset.Position(f.Pos()).Filename; strings.HasSuffix(filename, testSuffix) { + continue + } + } + ast.Walk(&treeVisitor{ + fileSet: fset, + packageName: "", + fileName: "", + p: p, + }, f) + } + p.ProcessResults() + + for str, item := range p.strs { + fi := item[0] + i := Issue{ + Pos: fi.Position, + OccurrencesCount: len(item), + Str: str, + } + + if len(p.consts) != 0 { + if cst, ok := p.consts[str]; ok { + // const should be in the same package and exported + i.MatchingConst = cst.Name + } + } + issues = append(issues, i) + } + + return issues, nil +} diff --git a/vendor/github.com/jgautheron/goconst/go.mod b/vendor/github.com/jgautheron/goconst/go.mod new file mode 100644 index 000000000..53dbfbbb9 --- /dev/null +++ b/vendor/github.com/jgautheron/goconst/go.mod @@ -0,0 +1,3 @@ +module github.com/jgautheron/goconst + +go 1.13 diff --git a/vendor/github.com/jgautheron/goconst/parser.go b/vendor/github.com/jgautheron/goconst/parser.go new file mode 100644 index 000000000..2ed7a9a90 --- /dev/null +++ b/vendor/github.com/jgautheron/goconst/parser.go @@ -0,0 +1,176 @@ +// Package goconst finds repeated strings that could be replaced by a constant. +// +// There are obvious benefits to using constants instead of repeating strings, +// mostly to ease maintenance. Cannot argue against changing a single constant versus many strings. +// While this could be considered a beginner mistake, across time, +// multiple packages and large codebases, some repetition could have slipped in. +package goconst + +import ( + "go/ast" + "go/parser" + "go/token" + "log" + "os" + "path/filepath" + "regexp" + "strconv" + "strings" +) + +const ( + testSuffix = "_test.go" +) + +type Parser struct { + // Meant to be passed via New() + path, ignore string + ignoreTests, matchConstant bool + minLength, minOccurrences int + numberMin, numberMax int + excludeTypes map[Type]bool + + supportedTokens []token.Token + + // Internals + strs Strings + consts Constants +} + +// New creates a new instance of the parser. +// This is your entry point if you'd like to use goconst as an API. +func New(path, ignore string, ignoreTests, matchConstant, numbers bool, numberMin, numberMax, minLength, minOccurrences int, excludeTypes map[Type]bool) *Parser { + supportedTokens := []token.Token{token.STRING} + if numbers { + supportedTokens = append(supportedTokens, token.INT, token.FLOAT) + } + + return &Parser{ + path: path, + ignore: ignore, + ignoreTests: ignoreTests, + matchConstant: matchConstant, + minLength: minLength, + minOccurrences: minOccurrences, + numberMin: numberMin, + numberMax: numberMax, + supportedTokens: supportedTokens, + excludeTypes: excludeTypes, + + // Initialize the maps + strs: Strings{}, + consts: Constants{}, + } +} + +// ParseTree will search the given path for occurrences that could be moved into constants. +// If "..." is appended, the search will be recursive. +func (p *Parser) ParseTree() (Strings, Constants, error) { + pathLen := len(p.path) + // Parse recursively the given path if the recursive notation is found + if pathLen >= 5 && p.path[pathLen-3:] == "..." { + filepath.Walk(p.path[:pathLen-3], func(path string, f os.FileInfo, err error) error { + if err != nil { + log.Println(err) + // resume walking + return nil + } + + if f.IsDir() { + p.parseDir(path) + } + return nil + }) + } else { + p.parseDir(p.path) + } + + p.ProcessResults() + + return p.strs, p.consts, nil +} + +// ProcessResults post-processes the raw results. +func (p *Parser) ProcessResults() { + for str, item := range p.strs { + // Filter out items whose occurrences don't match the min value + if len(item) < p.minOccurrences { + delete(p.strs, str) + } + + // If the value is a number + if i, err := strconv.Atoi(str); err == nil { + if p.numberMin != 0 && i < p.numberMin { + delete(p.strs, str) + } + if p.numberMax != 0 && i > p.numberMax { + delete(p.strs, str) + } + } + } +} + +func (p *Parser) parseDir(dir string) error { + fset := token.NewFileSet() + pkgs, err := parser.ParseDir(fset, dir, func(info os.FileInfo) bool { + valid, name := true, info.Name() + + if p.ignoreTests { + if strings.HasSuffix(name, testSuffix) { + valid = false + } + } + + if len(p.ignore) != 0 { + match, err := regexp.MatchString(p.ignore, dir+name) + if err != nil { + log.Fatal(err) + return true + } + if match { + valid = false + } + } + + return valid + }, 0) + if err != nil { + return err + } + + for _, pkg := range pkgs { + for fn, f := range pkg.Files { + ast.Walk(&treeVisitor{ + fileSet: fset, + packageName: pkg.Name, + fileName: fn, + p: p, + }, f) + } + } + + return nil +} + +type Strings map[string][]ExtendedPos +type Constants map[string]ConstType + +type ConstType struct { + token.Position + Name, packageName string +} + +type ExtendedPos struct { + token.Position + packageName string +} + +type Type int + +const ( + Assignment Type = iota + Binary + Case + Return + Call +) diff --git a/vendor/github.com/jgautheron/goconst/visitor.go b/vendor/github.com/jgautheron/goconst/visitor.go new file mode 100644 index 000000000..c0974da8f --- /dev/null +++ b/vendor/github.com/jgautheron/goconst/visitor.go @@ -0,0 +1,160 @@ +package goconst + +import ( + "go/ast" + "go/token" + "strconv" + "strings" +) + +// treeVisitor carries the package name and file name +// for passing it to the imports map, and the fileSet for +// retrieving the token.Position. +type treeVisitor struct { + p *Parser + fileSet *token.FileSet + packageName, fileName string +} + +// Visit browses the AST tree for strings that could be potentially +// replaced by constants. +// A map of existing constants is built as well (-match-constant). +func (v *treeVisitor) Visit(node ast.Node) ast.Visitor { + if node == nil { + return v + } + + // A single case with "ast.BasicLit" would be much easier + // but then we wouldn't be able to tell in which context + // the string is defined (could be a constant definition). + switch t := node.(type) { + // Scan for constants in an attempt to match strings with existing constants + case *ast.GenDecl: + if !v.p.matchConstant { + return v + } + if t.Tok != token.CONST { + return v + } + + for _, spec := range t.Specs { + val := spec.(*ast.ValueSpec) + for i, str := range val.Values { + lit, ok := str.(*ast.BasicLit) + if !ok || !v.isSupported(lit.Kind) { + continue + } + + v.addConst(val.Names[i].Name, lit.Value, val.Names[i].Pos()) + } + } + + // foo := "moo" + case *ast.AssignStmt: + for _, rhs := range t.Rhs { + lit, ok := rhs.(*ast.BasicLit) + if !ok || !v.isSupported(lit.Kind) { + continue + } + + v.addString(lit.Value, rhs.(*ast.BasicLit).Pos(), Assignment) + } + + // if foo == "moo" + case *ast.BinaryExpr: + if t.Op != token.EQL && t.Op != token.NEQ { + return v + } + + var lit *ast.BasicLit + var ok bool + + lit, ok = t.X.(*ast.BasicLit) + if ok && v.isSupported(lit.Kind) { + v.addString(lit.Value, lit.Pos(), Binary) + } + + lit, ok = t.Y.(*ast.BasicLit) + if ok && v.isSupported(lit.Kind) { + v.addString(lit.Value, lit.Pos(), Binary) + } + + // case "foo": + case *ast.CaseClause: + for _, item := range t.List { + lit, ok := item.(*ast.BasicLit) + if ok && v.isSupported(lit.Kind) { + v.addString(lit.Value, lit.Pos(), Case) + } + } + + // return "boo" + case *ast.ReturnStmt: + for _, item := range t.Results { + lit, ok := item.(*ast.BasicLit) + if ok && v.isSupported(lit.Kind) { + v.addString(lit.Value, lit.Pos(), Return) + } + } + + // fn("http://") + case *ast.CallExpr: + for _, item := range t.Args { + lit, ok := item.(*ast.BasicLit) + if ok && v.isSupported(lit.Kind) { + v.addString(lit.Value, lit.Pos(), Call) + } + } + } + + return v +} + +// addString adds a string in the map along with its position in the tree. +func (v *treeVisitor) addString(str string, pos token.Pos, typ Type) { + ok, excluded := v.p.excludeTypes[typ] + if ok && excluded { + return + } + // Drop quotes if any + if strings.HasPrefix(str, `"`) || strings.HasPrefix(str, "`") { + str, _ = strconv.Unquote(str) + } + + // Ignore empty strings + if len(str) == 0 { + return + } + + if len(str) < v.p.minLength { + return + } + + _, ok = v.p.strs[str] + if !ok { + v.p.strs[str] = make([]ExtendedPos, 0) + } + v.p.strs[str] = append(v.p.strs[str], ExtendedPos{ + packageName: v.packageName, + Position: v.fileSet.Position(pos), + }) +} + +// addConst adds a const in the map along with its position in the tree. +func (v *treeVisitor) addConst(name string, val string, pos token.Pos) { + val = strings.Replace(val, `"`, "", 2) + v.p.consts[val] = ConstType{ + Name: name, + packageName: v.packageName, + Position: v.fileSet.Position(pos), + } +} + +func (v *treeVisitor) isSupported(tk token.Token) bool { + for _, s := range v.p.supportedTokens { + if tk == s { + return true + } + } + return false +} diff --git a/vendor/github.com/jingyugao/rowserrcheck/LICENSE b/vendor/github.com/jingyugao/rowserrcheck/LICENSE new file mode 100644 index 000000000..6957f1889 --- /dev/null +++ b/vendor/github.com/jingyugao/rowserrcheck/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Seiji Takahashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/jingyugao/rowserrcheck/passes/rowserr/rowserr.go b/vendor/github.com/jingyugao/rowserrcheck/passes/rowserr/rowserr.go new file mode 100644 index 000000000..ac0177f6e --- /dev/null +++ b/vendor/github.com/jingyugao/rowserrcheck/passes/rowserr/rowserr.go @@ -0,0 +1,331 @@ +package rowserr + +import ( + "go/ast" + "go/types" + "strconv" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" +) + +func NewAnalyzer(sqlPkgs ...string) *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "rowserrcheck", + Doc: Doc, + Run: NewRun(sqlPkgs...), + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + }, + } +} + +const ( + Doc = "rowserrcheck checks whether Rows.Err is checked" + errMethod = "Err" + rowsName = "Rows" +) + +type runner struct { + pass *analysis.Pass + rowsTyp *types.Pointer + rowsObj types.Object + skipFile map[*ast.File]bool + sqlPkgs []string +} + +func NewRun(pkgs ...string) func(pass *analysis.Pass) (interface{}, error) { + return func(pass *analysis.Pass) (interface{}, error) { + sqlPkgs := append(pkgs, "database/sql") + for _, pkg := range sqlPkgs { + r := new(runner) + r.sqlPkgs = sqlPkgs + r.run(pass, pkg) + } + return nil, nil + } +} + +// run executes an analysis for the pass. The receiver is passed +// by value because this func is called in parallel for different passes. +func (r runner) run(pass *analysis.Pass, pkgPath string) { + r.pass = pass + pssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + funcs := pssa.SrcFuncs + + pkg := pssa.Pkg.Prog.ImportedPackage(pkgPath) + if pkg == nil { + // skip + return + } + + rowsType := pkg.Type(rowsName) + if rowsType == nil { + // skip checking + return + } + + r.rowsObj = rowsType.Object() + if r.rowsObj == nil { + // skip checking + return + } + + resNamed, ok := r.rowsObj.Type().(*types.Named) + if !ok { + return + } + + r.rowsTyp = types.NewPointer(resNamed) + r.skipFile = map[*ast.File]bool{} + + for _, f := range funcs { + if r.noImportedDBSQL(f) { + // skip this + continue + } + + // skip if the function is just referenced + var isRefFunc bool + + for i := 0; i < f.Signature.Results().Len(); i++ { + if types.Identical(f.Signature.Results().At(i).Type(), r.rowsTyp) { + isRefFunc = true + } + } + + if isRefFunc { + continue + } + + for _, b := range f.Blocks { + for i := range b.Instrs { + if r.errCallMissing(b, i) { + pass.Reportf(b.Instrs[i].Pos(), "rows.Err must be checked") + } + } + } + } +} + +func (r *runner) errCallMissing(b *ssa.BasicBlock, i int) (ret bool) { + call, ok := r.getCallReturnsRow(b.Instrs[i]) + if !ok { + return false + } + + for _, cRef := range *call.Referrers() { + val, ok := r.getRowsVal(cRef) + if !ok { + continue + } + if len(*val.Referrers()) == 0 { + continue + } + resRefs := *val.Referrers() + var errCalled func(resRef ssa.Instruction) bool + errCalled = func(resRef ssa.Instruction) bool { + switch resRef := resRef.(type) { + case *ssa.Phi: + for _, rf := range *resRef.Referrers() { + if errCalled(rf) { + return true + } + } + case *ssa.Store: // Call in Closure function + for _, aref := range *resRef.Addr.Referrers() { + switch c := aref.(type) { + case *ssa.MakeClosure: + f := c.Fn.(*ssa.Function) + if r.noImportedDBSQL(f) { + // skip this + continue + } + called := r.isClosureCalled(c) + if r.calledInFunc(f, called) { + return true + } + case *ssa.UnOp: + for _, rf := range *c.Referrers() { + if errCalled(rf) { + return true + } + } + } + } + case *ssa.Call: // Indirect function call + if r.isErrCall(resRef) { + return true + } + if f, ok := resRef.Call.Value.(*ssa.Function); ok { + for _, b := range f.Blocks { + for i := range b.Instrs { + if !r.errCallMissing(b, i) { + return true + } + } + } + } + case *ssa.FieldAddr: + for _, bRef := range *resRef.Referrers() { + bOp, ok := r.getBodyOp(bRef) + if !ok { + continue + } + + for _, ccall := range *bOp.Referrers() { + if r.isErrCall(ccall) { + return true + } + } + } + } + + return false + } + + for _, resRef := range resRefs { + if errCalled(resRef) { + return false + } + } + } + + return true +} + +func (r *runner) getCallReturnsRow(instr ssa.Instruction) (*ssa.Call, bool) { + call, ok := instr.(*ssa.Call) + if !ok { + return nil, false + } + + res := call.Call.Signature().Results() + flag := false + + for i := 0; i < res.Len(); i++ { + flag = flag || types.Identical(res.At(i).Type(), r.rowsTyp) + } + + if !flag { + return nil, false + } + + return call, true +} + +func (r *runner) getRowsVal(instr ssa.Instruction) (ssa.Value, bool) { + switch instr := instr.(type) { + case *ssa.Call: + if len(instr.Call.Args) == 1 && types.Identical(instr.Call.Args[0].Type(), r.rowsTyp) { + return instr.Call.Args[0], true + } + case ssa.Value: + if types.Identical(instr.Type(), r.rowsTyp) { + return instr, true + } + default: + } + + return nil, false +} + +func (r *runner) getBodyOp(instr ssa.Instruction) (*ssa.UnOp, bool) { + op, ok := instr.(*ssa.UnOp) + if !ok { + return nil, false + } + // fix: try to check type + // if op.Type() != r.rowsObj.Type() { + // return nil, false + // } + return op, true +} + +func (r *runner) isErrCall(ccall ssa.Instruction) bool { + switch ccall := ccall.(type) { + case *ssa.Defer: + if ccall.Call.Value != nil && ccall.Call.Value.Name() == errMethod { + return true + } + case *ssa.Call: + if ccall.Call.Value != nil && ccall.Call.Value.Name() == errMethod { + return true + } + } + + return false +} + +func (r *runner) isClosureCalled(c *ssa.MakeClosure) bool { + for _, ref := range *c.Referrers() { + switch ref.(type) { + case *ssa.Call, *ssa.Defer: + return true + } + } + + return false +} + +func (r *runner) noImportedDBSQL(f *ssa.Function) (ret bool) { + obj := f.Object() + if obj == nil { + return false + } + + file := analysisutil.File(r.pass, obj.Pos()) + if file == nil { + return false + } + + if skip, has := r.skipFile[file]; has { + return skip + } + defer func() { + r.skipFile[file] = ret + }() + + for _, impt := range file.Imports { + path, err := strconv.Unquote(impt.Path.Value) + if err != nil { + continue + } + path = analysisutil.RemoveVendor(path) + for _, pkg := range r.sqlPkgs { + if pkg == path { + return false + } + } + } + + return true +} + +func (r *runner) calledInFunc(f *ssa.Function, called bool) bool { + for _, b := range f.Blocks { + for i, instr := range b.Instrs { + switch instr := instr.(type) { + case *ssa.UnOp: + for _, ref := range *instr.Referrers() { + if v, ok := ref.(ssa.Value); ok { + if vCall, ok := v.(*ssa.Call); ok { + if vCall.Call.Value != nil && vCall.Call.Value.Name() == errMethod { + if called { + return true + } + } + } + } + } + default: + if r.errCallMissing(b, i) || !called { + return false + } + } + } + } + return false +} diff --git a/vendor/github.com/jirfag/go-printf-func-name/LICENSE b/vendor/github.com/jirfag/go-printf-func-name/LICENSE new file mode 100644 index 000000000..d06a809c2 --- /dev/null +++ b/vendor/github.com/jirfag/go-printf-func-name/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Isaev Denis + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go b/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..7937dd433 --- /dev/null +++ b/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go @@ -0,0 +1,74 @@ +package analyzer + +import ( + "go/ast" + "strings" + + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + + "golang.org/x/tools/go/analysis" +) + +var Analyzer = &analysis.Analyzer{ + Name: "goprintffuncname", + Doc: "Checks that printf-like functions are named with `f` at the end.", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + } + + inspector.Preorder(nodeFilter, func(node ast.Node) { + funcDecl := node.(*ast.FuncDecl) + + if res := funcDecl.Type.Results; res != nil && len(res.List) != 0 { + return + } + + params := funcDecl.Type.Params.List + if len(params) < 2 { // [0] must be format (string), [1] must be args (...interface{}) + return + } + + formatParamType, ok := params[len(params)-2].Type.(*ast.Ident) + if !ok { // first param type isn't identificator so it can't be of type "string" + return + } + + if formatParamType.Name != "string" { // first param (format) type is not string + return + } + + if formatParamNames := params[len(params)-2].Names; len(formatParamNames) == 0 || formatParamNames[len(formatParamNames)-1].Name != "format" { + return + } + + argsParamType, ok := params[len(params)-1].Type.(*ast.Ellipsis) + if !ok { // args are not ellipsis (...args) + return + } + + elementType, ok := argsParamType.Elt.(*ast.InterfaceType) + if !ok { // args are not of interface type, but we need interface{} + return + } + + if elementType.Methods != nil && len(elementType.Methods.List) != 0 { + return // has >= 1 method in interface, but we need an empty interface "interface{}" + } + + if strings.HasSuffix(funcDecl.Name.Name, "f") { + return + } + + pass.Reportf(node.Pos(), "printf-like formatting function '%s' should be named '%sf'", + funcDecl.Name.Name, funcDecl.Name.Name) + }) + + return nil, nil +} diff --git a/vendor/github.com/julz/importas/.gitignore b/vendor/github.com/julz/importas/.gitignore new file mode 100644 index 000000000..c264e642f --- /dev/null +++ b/vendor/github.com/julz/importas/.gitignore @@ -0,0 +1,2 @@ +.idea/ +importas diff --git a/vendor/github.com/julz/importas/LICENSE b/vendor/github.com/julz/importas/LICENSE new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/vendor/github.com/julz/importas/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/julz/importas/README.md b/vendor/github.com/julz/importas/README.md new file mode 100644 index 000000000..9489fe7d8 --- /dev/null +++ b/vendor/github.com/julz/importas/README.md @@ -0,0 +1,47 @@ +A linter to enforce importing certain packages consistently. + +## What is this for? + +Ideally, go imports should avoid aliasing. Sometimes though, especially with +Kubernetes API code, it becomes unavoidable, because many packages are imported +as e.g. "[package]/v1alpha1" and you end up with lots of collisions if you use +"v1alpha1". + +This linter lets you enforce that whenever (for example) +"pkg/apis/serving/v1alpha1" is aliased, it is aliased as "servingv1alpha1". + +## Usage + +~~~~ +importas \ + -alias knative.dev/serving/pkg/apis/autoscaling/v1alpha1:autoscalingv1alpha1 \ + -alias knative.dev/serving/pkg/apis/serving/v1:servingv1 \ + ./... +~~~~ + +### `-no-unaliased` option + +By default, importas allows non-aliased imports, even when the package is specified by `-alias` flag. +With `-no-unaliased` option, importas does not allow this. + +~~~~ +importas -no-unaliased \ + -alias knative.dev/serving/pkg/apis/autoscaling/v1alpha1:autoscalingv1alpha1 \ + -alias knative.dev/serving/pkg/apis/serving/v1:servingv1 \ + ./... +~~~~ + +### Use regular expression + +You can specify the package path by regular expression, and alias by regular expression replacement syntax like following snippet. + +~~~~ +importas -alias 'knative.dev/serving/pkg/apis/(\w+)/(v[\w\d]+):$1$2' +~~~~ + +`$1` represents the text of the first submatch. See [detail](https://golang.org/pkg/regexp/#Regexp.Expand). + +So it will enforce that + +"knative.dev/serving/pkg/apis/autoscaling/v1alpha1" is aliased by "autoscalingv1alpha1", and +"knative.dev/serving/pkg/apis/serving/v1" is aliased by "servingv1" diff --git a/vendor/github.com/julz/importas/analyzer.go b/vendor/github.com/julz/importas/analyzer.go new file mode 100644 index 000000000..4fbe104e5 --- /dev/null +++ b/vendor/github.com/julz/importas/analyzer.go @@ -0,0 +1,116 @@ +package importas + +import ( + "fmt" + "go/ast" + "go/types" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var config = &Config{ + RequiredAlias: make(map[string]string), +} + +var Analyzer = &analysis.Analyzer{ + Name: "importas", + Doc: "Enforces consistent import aliases", + Run: run, + + Flags: flags(config), + + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + return runWithConfig(config, pass) +} + +func runWithConfig(config *Config, pass *analysis.Pass) (interface{}, error) { + if err := config.CompileRegexp(); err != nil { + return nil, err + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + inspect.Preorder([]ast.Node{(*ast.ImportSpec)(nil)}, func(n ast.Node) { + visitImportSpecNode(config, n.(*ast.ImportSpec), pass) + }) + + return nil, nil +} + +func visitImportSpecNode(config *Config, node *ast.ImportSpec, pass *analysis.Pass) { + if !config.DisallowUnaliased && node.Name == nil { + return + } + + alias := "" + if node.Name != nil { + alias = node.Name.String() + } + + if alias == "." { + return // Dot aliases are generally used in tests, so ignore. + } + + if strings.HasPrefix(alias, "_") { + return // Used by go test and for auto-includes, not a conflict. + } + + path, err := strconv.Unquote(node.Path.Value) + if err != nil { + pass.Reportf(node.Pos(), "import not quoted") + } + + if required, exists := config.AliasFor(path); exists && required != alias { + message := fmt.Sprintf("import %q imported as %q but must be %q according to config", path, alias, required) + if alias == "" { + message = fmt.Sprintf("import %q imported without alias but must be with alias %q according to config", path, required) + } + + pass.Report(analysis.Diagnostic{ + Pos: node.Pos(), + End: node.End(), + Message: message, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Use correct alias", + TextEdits: findEdits(node, pass.TypesInfo.Uses, path, alias, required), + }}, + }) + } +} + +func findEdits(node ast.Node, uses map[*ast.Ident]types.Object, importPath, original, required string) []analysis.TextEdit { + // Edit the actual import line. + result := []analysis.TextEdit{{ + Pos: node.Pos(), + End: node.End(), + NewText: []byte(required + " " + strconv.Quote(importPath)), + }} + + // Edit all the uses of the alias in the code. + for use, pkg := range uses { + pkgName, ok := pkg.(*types.PkgName) + if !ok { + // skip identifiers that aren't pointing at a PkgName. + continue + } + + if pkgName.Pos() != node.Pos() { + // skip identifiers pointing to a different import statement. + continue + } + + result = append(result, analysis.TextEdit{ + Pos: use.Pos(), + End: use.End(), + NewText: []byte(required), + }) + } + + return result +} diff --git a/vendor/github.com/julz/importas/config.go b/vendor/github.com/julz/importas/config.go new file mode 100644 index 000000000..2e1c1d887 --- /dev/null +++ b/vendor/github.com/julz/importas/config.go @@ -0,0 +1,69 @@ +package importas + +import ( + "errors" + "fmt" + "regexp" +) + +type Config struct { + RequiredAlias map[string]string + Rules []*Rule + DisallowUnaliased bool +} + +func (c *Config) CompileRegexp() error { + rules := make([]*Rule, 0, len(c.RequiredAlias)) + for path, alias := range c.RequiredAlias { + reg, err := regexp.Compile(fmt.Sprintf("^%s$", path)) + if err != nil { + return err + } + + rules = append(rules, &Rule{ + Regexp: reg, + Alias: alias, + }) + } + + c.Rules = rules + return nil +} + +func (c *Config) findRule(path string) *Rule { + for _, rule := range c.Rules { + if rule.Regexp.MatchString(path) { + return rule + } + } + + return nil +} + +func (c *Config) AliasFor(path string) (string, bool) { + rule := c.findRule(path) + if rule == nil { + return "", false + } + + alias, err := rule.aliasFor(path) + if err != nil { + return "", false + } + + return alias, true +} + +type Rule struct { + Alias string + Regexp *regexp.Regexp +} + +func (r *Rule) aliasFor(path string) (string, error) { + str := r.Regexp.FindString(path) + if len(str) > 0 { + return r.Regexp.ReplaceAllString(str, r.Alias), nil + } + + return "", errors.New("mismatch rule") +} diff --git a/vendor/github.com/julz/importas/flags.go b/vendor/github.com/julz/importas/flags.go new file mode 100644 index 000000000..22be4af3e --- /dev/null +++ b/vendor/github.com/julz/importas/flags.go @@ -0,0 +1,31 @@ +package importas + +import ( + "errors" + "flag" + "fmt" + "strings" +) + +func flags(config *Config) flag.FlagSet { + fs := flag.FlagSet{} + fs.Var(stringMap(config.RequiredAlias), "alias", "required import alias in form path:alias") + fs.BoolVar(&config.DisallowUnaliased, "no-unaliased", false, "do not allow unaliased imports of aliased packages") + return fs +} + +type stringMap map[string]string + +func (v stringMap) Set(val string) error { + spl := strings.SplitN(val, ":", 2) + if len(spl) != 2 { + return errors.New("import flag must be of form path:alias") + } + + v[spl[0]] = spl[1] + return nil +} + +func (v stringMap) String() string { + return fmt.Sprintf("%v", (map[string]string)(v)) +} diff --git a/vendor/github.com/julz/importas/go.mod b/vendor/github.com/julz/importas/go.mod new file mode 100644 index 000000000..97d8c438b --- /dev/null +++ b/vendor/github.com/julz/importas/go.mod @@ -0,0 +1,5 @@ +module github.com/julz/importas + +go 1.15 + +require golang.org/x/tools v0.1.0 diff --git a/vendor/github.com/julz/importas/go.sum b/vendor/github.com/julz/importas/go.sum new file mode 100644 index 000000000..21d696a65 --- /dev/null +++ b/vendor/github.com/julz/importas/go.sum @@ -0,0 +1,26 @@ +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/kisielk/errcheck/LICENSE b/vendor/github.com/kisielk/errcheck/LICENSE new file mode 100644 index 000000000..a2b16b5bd --- /dev/null +++ b/vendor/github.com/kisielk/errcheck/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2013 Kamil Kisiel + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go new file mode 100644 index 000000000..3b3192580 --- /dev/null +++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go @@ -0,0 +1,144 @@ +package errcheck + +import ( + "fmt" + "go/types" +) + +// walkThroughEmbeddedInterfaces returns a slice of Interfaces that +// we need to walk through in order to reach the actual definition, +// in an Interface, of the method selected by the given selection. +// +// false will be returned in the second return value if: +// - the right side of the selection is not a function +// - the actual definition of the function is not in an Interface +// +// The returned slice will contain all the interface types that need +// to be walked through to reach the actual definition. +// +// For example, say we have: +// +// type Inner interface {Method()} +// type Middle interface {Inner} +// type Outer interface {Middle} +// type T struct {Outer} +// type U struct {T} +// type V struct {U} +// +// And then the selector: +// +// V.Method +// +// We'll return [Outer, Middle, Inner] by first walking through the embedded structs +// until we reach the Outer interface, then descending through the embedded interfaces +// until we find the one that actually explicitly defines Method. +func walkThroughEmbeddedInterfaces(sel *types.Selection) ([]types.Type, bool) { + fn, ok := sel.Obj().(*types.Func) + if !ok { + return nil, false + } + + // Start off at the receiver. + currentT := sel.Recv() + + // First, we can walk through any Struct fields provided + // by the selection Index() method. We ignore the last + // index because it would give the method itself. + indexes := sel.Index() + for _, fieldIndex := range indexes[:len(indexes)-1] { + currentT = getTypeAtFieldIndex(currentT, fieldIndex) + } + + // Now currentT is either a type implementing the actual function, + // an Invalid type (if the receiver is a package), or an interface. + // + // If it's not an Interface, then we're done, as this function + // only cares about Interface-defined functions. + // + // If it is an Interface, we potentially need to continue digging until + // we find the Interface that actually explicitly defines the function. + interfaceT, ok := maybeUnname(currentT).(*types.Interface) + if !ok { + return nil, false + } + + // The first interface we pass through is this one we've found. We return the possibly + // wrapping types.Named because it is more useful to work with for callers. + result := []types.Type{currentT} + + // If this interface itself explicitly defines the given method + // then we're done digging. + for !explicitlyDefinesMethod(interfaceT, fn) { + // Otherwise, we find which of the embedded interfaces _does_ + // define the method, add it to our list, and loop. + namedInterfaceT, ok := getEmbeddedInterfaceDefiningMethod(interfaceT, fn) + if !ok { + // This should be impossible as long as we type-checked: either the + // interface or one of its embedded ones must implement the method... + panic(fmt.Sprintf("either %v or one of its embedded interfaces must implement %v", currentT, fn)) + } + result = append(result, namedInterfaceT) + interfaceT = namedInterfaceT.Underlying().(*types.Interface) + } + + return result, true +} + +func getTypeAtFieldIndex(startingAt types.Type, fieldIndex int) types.Type { + t := maybeUnname(maybeDereference(startingAt)) + s, ok := t.(*types.Struct) + if !ok { + panic(fmt.Sprintf("cannot get Field of a type that is not a struct, got a %T", t)) + } + + return s.Field(fieldIndex).Type() +} + +// getEmbeddedInterfaceDefiningMethod searches through any embedded interfaces of the +// passed interface searching for one that defines the given function. If found, the +// types.Named wrapping that interface will be returned along with true in the second value. +// +// If no such embedded interface is found, nil and false are returned. +func getEmbeddedInterfaceDefiningMethod(interfaceT *types.Interface, fn *types.Func) (*types.Named, bool) { + for i := 0; i < interfaceT.NumEmbeddeds(); i++ { + embedded := interfaceT.Embedded(i) + if definesMethod(embedded.Underlying().(*types.Interface), fn) { + return embedded, true + } + } + return nil, false +} + +func explicitlyDefinesMethod(interfaceT *types.Interface, fn *types.Func) bool { + for i := 0; i < interfaceT.NumExplicitMethods(); i++ { + if interfaceT.ExplicitMethod(i) == fn { + return true + } + } + return false +} + +func definesMethod(interfaceT *types.Interface, fn *types.Func) bool { + for i := 0; i < interfaceT.NumMethods(); i++ { + if interfaceT.Method(i) == fn { + return true + } + } + return false +} + +func maybeDereference(t types.Type) types.Type { + p, ok := t.(*types.Pointer) + if ok { + return p.Elem() + } + return t +} + +func maybeUnname(t types.Type) types.Type { + n, ok := t.(*types.Named) + if ok { + return n.Underlying() + } + return t +} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go new file mode 100644 index 000000000..724e3e88f --- /dev/null +++ b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go @@ -0,0 +1,676 @@ +// Package errcheck is the library used to implement the errcheck command-line tool. +package errcheck + +import ( + "bufio" + "errors" + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "regexp" + "sort" + "strings" + + "golang.org/x/tools/go/packages" +) + +var errorType *types.Interface + +func init() { + errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) +} + +var ( + // ErrNoGoFiles is returned when CheckPackage is run on a package with no Go source files + ErrNoGoFiles = errors.New("package contains no go source files") + + // DefaultExcludedSymbols is a list of symbol names that are usually excluded from checks by default. + // + // Note, that they still need to be explicitly copied to Checker.Exclusions.Symbols + DefaultExcludedSymbols = []string{ + // bytes + "(*bytes.Buffer).Write", + "(*bytes.Buffer).WriteByte", + "(*bytes.Buffer).WriteRune", + "(*bytes.Buffer).WriteString", + + // fmt + "fmt.Errorf", + "fmt.Print", + "fmt.Printf", + "fmt.Println", + "fmt.Fprint(*bytes.Buffer)", + "fmt.Fprintf(*bytes.Buffer)", + "fmt.Fprintln(*bytes.Buffer)", + "fmt.Fprint(*strings.Builder)", + "fmt.Fprintf(*strings.Builder)", + "fmt.Fprintln(*strings.Builder)", + "fmt.Fprint(os.Stderr)", + "fmt.Fprintf(os.Stderr)", + "fmt.Fprintln(os.Stderr)", + + // math/rand + "math/rand.Read", + "(*math/rand.Rand).Read", + + // strings + "(*strings.Builder).Write", + "(*strings.Builder).WriteByte", + "(*strings.Builder).WriteRune", + "(*strings.Builder).WriteString", + + // hash + "(hash.Hash).Write", + } +) + +// UncheckedError indicates the position of an unchecked error return. +type UncheckedError struct { + Pos token.Position + Line string + FuncName string + SelectorName string +} + +// Result is returned from the CheckPackage function, and holds all the errors +// that were found to be unchecked in a package. +// +// Aggregation can be done using the Append method for users that want to +// combine results from multiple packages. +type Result struct { + // UncheckedErrors is a list of all the unchecked errors in the package. + // Printing an error reports its position within the file and the contents of the line. + UncheckedErrors []UncheckedError +} + +type byName []UncheckedError + +// Less reports whether the element with index i should sort before the element with index j. +func (b byName) Less(i, j int) bool { + ei, ej := b[i], b[j] + + pi, pj := ei.Pos, ej.Pos + + if pi.Filename != pj.Filename { + return pi.Filename < pj.Filename + } + if pi.Line != pj.Line { + return pi.Line < pj.Line + } + if pi.Column != pj.Column { + return pi.Column < pj.Column + } + + return ei.Line < ej.Line +} + +func (b byName) Swap(i, j int) { + b[i], b[j] = b[j], b[i] +} + +func (b byName) Len() int { + return len(b) +} + +// Append appends errors to e. Append does not do any duplicate checking. +func (r *Result) Append(other Result) { + r.UncheckedErrors = append(r.UncheckedErrors, other.UncheckedErrors...) +} + +// Returns the unique errors that have been accumulated. Duplicates may occur +// when a file containing an unchecked error belongs to > 1 package. +// +// The method receiver remains unmodified after the call to Unique. +func (r Result) Unique() Result { + result := make([]UncheckedError, len(r.UncheckedErrors)) + copy(result, r.UncheckedErrors) + sort.Sort((byName)(result)) + uniq := result[:0] // compact in-place + for i, err := range result { + if i == 0 || err != result[i-1] { + uniq = append(uniq, err) + } + } + return Result{UncheckedErrors: uniq} +} + +// Exclusions define symbols and language elements that will be not checked +type Exclusions struct { + + // Packages lists paths of excluded packages. + Packages []string + + // SymbolRegexpsByPackage maps individual package paths to regular + // expressions that match symbols to be excluded. + // + // Packages whose paths appear both here and in Packages list will + // be excluded entirely. + // + // This is a legacy input that will be deprecated in errcheck version 2 and + // should not be used. + SymbolRegexpsByPackage map[string]*regexp.Regexp + + // Symbols lists patterns that exclude individual package symbols. + // + // For example: + // + // "fmt.Errorf" // function + // "fmt.Fprintf(os.Stderr)" // function with set argument value + // "(hash.Hash).Write" // method + // + Symbols []string + + // TestFiles excludes _test.go files. + TestFiles bool + + // GeneratedFiles excludes generated source files. + // + // Source file is assumed to be generated if its contents + // match the following regular expression: + // + // ^// Code generated .* DO NOT EDIT\\.$ + // + GeneratedFiles bool + + // BlankAssignments ignores assignments to blank identifier. + BlankAssignments bool + + // TypeAssertions ignores unchecked type assertions. + TypeAssertions bool +} + +// Checker checks that you checked errors. +type Checker struct { + // Exclusions defines code packages, symbols, and other elements that will not be checked. + Exclusions Exclusions + + // Tags are a list of build tags to use. + Tags []string + + // The mod flag for go build. + Mod string +} + +// loadPackages is used for testing. +var loadPackages = func(cfg *packages.Config, paths ...string) ([]*packages.Package, error) { + return packages.Load(cfg, paths...) +} + +// LoadPackages loads all the packages in all the paths provided. It uses the +// exclusions and build tags provided to by the user when loading the packages. +func (c *Checker) LoadPackages(paths ...string) ([]*packages.Package, error) { + buildFlags := []string{fmtTags(c.Tags)} + if c.Mod != "" { + buildFlags = append(buildFlags, fmt.Sprintf("-mod=%s", c.Mod)) + } + cfg := &packages.Config{ + Mode: packages.LoadAllSyntax, + Tests: !c.Exclusions.TestFiles, + BuildFlags: buildFlags, + } + return loadPackages(cfg, paths...) +} + +var generatedCodeRegexp = regexp.MustCompile("^// Code generated .* DO NOT EDIT\\.$") +var dotStar = regexp.MustCompile(".*") + +func (c *Checker) shouldSkipFile(file *ast.File) bool { + if !c.Exclusions.GeneratedFiles { + return false + } + + for _, cg := range file.Comments { + for _, comment := range cg.List { + if generatedCodeRegexp.MatchString(comment.Text) { + return true + } + } + } + + return false +} + +// CheckPackage checks packages for errors that have not been checked. +// +// It will exclude specific errors from analysis if the user has configured +// exclusions. +func (c *Checker) CheckPackage(pkg *packages.Package) Result { + excludedSymbols := map[string]bool{} + for _, sym := range c.Exclusions.Symbols { + excludedSymbols[sym] = true + } + + ignore := map[string]*regexp.Regexp{} + // Apply SymbolRegexpsByPackage first so that if the same path appears in + // Packages, a more narrow regexp will be superceded by dotStar below. + if regexps := c.Exclusions.SymbolRegexpsByPackage; regexps != nil { + for pkg, re := range regexps { + // TODO warn if previous entry overwritten? + ignore[nonVendoredPkgPath(pkg)] = re + } + } + for _, pkg := range c.Exclusions.Packages { + // TODO warn if previous entry overwritten? + ignore[nonVendoredPkgPath(pkg)] = dotStar + } + + v := &visitor{ + pkg: pkg, + ignore: ignore, + blank: !c.Exclusions.BlankAssignments, + asserts: !c.Exclusions.TypeAssertions, + lines: make(map[string][]string), + exclude: excludedSymbols, + errors: []UncheckedError{}, + } + + for _, astFile := range v.pkg.Syntax { + if c.shouldSkipFile(astFile) { + continue + } + ast.Walk(v, astFile) + } + return Result{UncheckedErrors: v.errors} +} + +// visitor implements the errcheck algorithm +type visitor struct { + pkg *packages.Package + ignore map[string]*regexp.Regexp + blank bool + asserts bool + lines map[string][]string + exclude map[string]bool + + errors []UncheckedError +} + +// selectorAndFunc tries to get the selector and function from call expression. +// For example, given the call expression representing "a.b()", the selector +// is "a.b" and the function is "b" itself. +// +// The final return value will be true if it is able to do extract a selector +// from the call and look up the function object it refers to. +// +// If the call does not include a selector (like if it is a plain "f()" function call) +// then the final return value will be false. +func (v *visitor) selectorAndFunc(call *ast.CallExpr) (*ast.SelectorExpr, *types.Func, bool) { + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return nil, nil, false + } + + fn, ok := v.pkg.TypesInfo.ObjectOf(sel.Sel).(*types.Func) + if !ok { + // Shouldn't happen, but be paranoid + return nil, nil, false + } + + return sel, fn, true + +} + +// fullName will return a package / receiver-type qualified name for a called function +// if the function is the result of a selector. Otherwise it will return +// the empty string. +// +// The name is fully qualified by the import path, possible type, +// function/method name and pointer receiver. +// +// For example, +// - for "fmt.Printf(...)" it will return "fmt.Printf" +// - for "base64.StdEncoding.Decode(...)" it will return "(*encoding/base64.Encoding).Decode" +// - for "myFunc()" it will return "" +func (v *visitor) fullName(call *ast.CallExpr) string { + _, fn, ok := v.selectorAndFunc(call) + if !ok { + return "" + } + + // TODO(dh): vendored packages will have /vendor/ in their name, + // thus not matching vendored standard library packages. If we + // want to support vendored stdlib packages, we need to implement + // FullName with our own logic. + return fn.FullName() +} + +func getSelectorName(sel *ast.SelectorExpr) string { + if ident, ok := sel.X.(*ast.Ident); ok { + return fmt.Sprintf("%s.%s", ident.Name, sel.Sel.Name) + } + if s, ok := sel.X.(*ast.SelectorExpr); ok { + return fmt.Sprintf("%s.%s", getSelectorName(s), sel.Sel.Name) + } + + return "" +} + +// selectorName will return a name for a called function +// if the function is the result of a selector. Otherwise it will return +// the empty string. +// +// The name is fully qualified by the import path, possible type, +// function/method name and pointer receiver. +// +// For example, +// - for "fmt.Printf(...)" it will return "fmt.Printf" +// - for "base64.StdEncoding.Decode(...)" it will return "base64.StdEncoding.Decode" +// - for "myFunc()" it will return "" +func (v *visitor) selectorName(call *ast.CallExpr) string { + sel, _, ok := v.selectorAndFunc(call) + if !ok { + return "" + } + + return getSelectorName(sel) +} + +// namesForExcludeCheck will return a list of fully-qualified function names +// from a function call that can be used to check against the exclusion list. +// +// If a function call is against a local function (like "myFunc()") then no +// names are returned. If the function is package-qualified (like "fmt.Printf()") +// then just that function's fullName is returned. +// +// Otherwise, we walk through all the potentially embeddded interfaces of the receiver +// the collect a list of type-qualified function names that we will check. +func (v *visitor) namesForExcludeCheck(call *ast.CallExpr) []string { + sel, fn, ok := v.selectorAndFunc(call) + if !ok { + return nil + } + + name := v.fullName(call) + if name == "" { + return nil + } + + // This will be missing for functions without a receiver (like fmt.Printf), + // so just fall back to the the function's fullName in that case. + selection, ok := v.pkg.TypesInfo.Selections[sel] + if !ok { + return []string{name} + } + + // This will return with ok false if the function isn't defined + // on an interface, so just fall back to the fullName. + ts, ok := walkThroughEmbeddedInterfaces(selection) + if !ok { + return []string{name} + } + + result := make([]string, len(ts)) + for i, t := range ts { + // Like in fullName, vendored packages will have /vendor/ in their name, + // thus not matching vendored standard library packages. If we + // want to support vendored stdlib packages, we need to implement + // additional logic here. + result[i] = fmt.Sprintf("(%s).%s", t.String(), fn.Name()) + } + return result +} + +// isBufferType checks if the expression type is a known in-memory buffer type. +func (v *visitor) argName(expr ast.Expr) string { + // Special-case literal "os.Stdout" and "os.Stderr" + if sel, ok := expr.(*ast.SelectorExpr); ok { + if obj := v.pkg.TypesInfo.ObjectOf(sel.Sel); obj != nil { + vr, ok := obj.(*types.Var) + if ok && vr.Pkg() != nil && vr.Pkg().Name() == "os" && (vr.Name() == "Stderr" || vr.Name() == "Stdout") { + return "os." + vr.Name() + } + } + } + t := v.pkg.TypesInfo.TypeOf(expr) + if t == nil { + return "" + } + return t.String() +} + +func (v *visitor) excludeCall(call *ast.CallExpr) bool { + var arg0 string + if len(call.Args) > 0 { + arg0 = v.argName(call.Args[0]) + } + for _, name := range v.namesForExcludeCheck(call) { + if v.exclude[name] { + return true + } + if arg0 != "" && v.exclude[name+"("+arg0+")"] { + return true + } + } + return false +} + +func (v *visitor) ignoreCall(call *ast.CallExpr) bool { + if v.excludeCall(call) { + return true + } + + // Try to get an identifier. + // Currently only supports simple expressions: + // 1. f() + // 2. x.y.f() + var id *ast.Ident + switch exp := call.Fun.(type) { + case (*ast.Ident): + id = exp + case (*ast.SelectorExpr): + id = exp.Sel + default: + // eg: *ast.SliceExpr, *ast.IndexExpr + } + + if id == nil { + return false + } + + // If we got an identifier for the function, see if it is ignored + if re, ok := v.ignore[""]; ok && re.MatchString(id.Name) { + return true + } + + if obj := v.pkg.TypesInfo.Uses[id]; obj != nil { + if pkg := obj.Pkg(); pkg != nil { + if re, ok := v.ignore[nonVendoredPkgPath(pkg.Path())]; ok { + return re.MatchString(id.Name) + } + } + } + + return false +} + +// nonVendoredPkgPath returns the unvendored version of the provided package +// path (or returns the provided path if it does not represent a vendored +// path). +func nonVendoredPkgPath(pkgPath string) string { + lastVendorIndex := strings.LastIndex(pkgPath, "/vendor/") + if lastVendorIndex == -1 { + return pkgPath + } + return pkgPath[lastVendorIndex+len("/vendor/"):] +} + +// errorsByArg returns a slice s such that +// len(s) == number of return types of call +// s[i] == true iff return type at position i from left is an error type +func (v *visitor) errorsByArg(call *ast.CallExpr) []bool { + switch t := v.pkg.TypesInfo.Types[call].Type.(type) { + case *types.Named: + // Single return + return []bool{isErrorType(t)} + case *types.Pointer: + // Single return via pointer + return []bool{isErrorType(t)} + case *types.Tuple: + // Multiple returns + s := make([]bool, t.Len()) + for i := 0; i < t.Len(); i++ { + switch et := t.At(i).Type().(type) { + case *types.Named: + // Single return + s[i] = isErrorType(et) + case *types.Pointer: + // Single return via pointer + s[i] = isErrorType(et) + default: + s[i] = false + } + } + return s + } + return []bool{false} +} + +func (v *visitor) callReturnsError(call *ast.CallExpr) bool { + if v.isRecover(call) { + return true + } + for _, isError := range v.errorsByArg(call) { + if isError { + return true + } + } + return false +} + +// isRecover returns true if the given CallExpr is a call to the built-in recover() function. +func (v *visitor) isRecover(call *ast.CallExpr) bool { + if fun, ok := call.Fun.(*ast.Ident); ok { + if _, ok := v.pkg.TypesInfo.Uses[fun].(*types.Builtin); ok { + return fun.Name == "recover" + } + } + return false +} + +func (v *visitor) addErrorAtPosition(position token.Pos, call *ast.CallExpr) { + pos := v.pkg.Fset.Position(position) + lines, ok := v.lines[pos.Filename] + if !ok { + lines = readfile(pos.Filename) + v.lines[pos.Filename] = lines + } + + line := "??" + if pos.Line-1 < len(lines) { + line = strings.TrimSpace(lines[pos.Line-1]) + } + + var name string + var sel string + if call != nil { + name = v.fullName(call) + sel = v.selectorName(call) + } + + v.errors = append(v.errors, UncheckedError{pos, line, name, sel}) +} + +func readfile(filename string) []string { + var f, err = os.Open(filename) + if err != nil { + return nil + } + + var lines []string + var scanner = bufio.NewScanner(f) + for scanner.Scan() { + lines = append(lines, scanner.Text()) + } + return lines +} + +func (v *visitor) Visit(node ast.Node) ast.Visitor { + switch stmt := node.(type) { + case *ast.ExprStmt: + if call, ok := stmt.X.(*ast.CallExpr); ok { + if !v.ignoreCall(call) && v.callReturnsError(call) { + v.addErrorAtPosition(call.Lparen, call) + } + } + case *ast.GoStmt: + if !v.ignoreCall(stmt.Call) && v.callReturnsError(stmt.Call) { + v.addErrorAtPosition(stmt.Call.Lparen, stmt.Call) + } + case *ast.DeferStmt: + if !v.ignoreCall(stmt.Call) && v.callReturnsError(stmt.Call) { + v.addErrorAtPosition(stmt.Call.Lparen, stmt.Call) + } + case *ast.AssignStmt: + if len(stmt.Rhs) == 1 { + // single value on rhs; check against lhs identifiers + if call, ok := stmt.Rhs[0].(*ast.CallExpr); ok { + if !v.blank { + break + } + if v.ignoreCall(call) { + break + } + isError := v.errorsByArg(call) + for i := 0; i < len(stmt.Lhs); i++ { + if id, ok := stmt.Lhs[i].(*ast.Ident); ok { + // We shortcut calls to recover() because errorsByArg can't + // check its return types for errors since it returns interface{}. + if id.Name == "_" && (v.isRecover(call) || isError[i]) { + v.addErrorAtPosition(id.NamePos, call) + } + } + } + } else if assert, ok := stmt.Rhs[0].(*ast.TypeAssertExpr); ok { + if !v.asserts { + break + } + if assert.Type == nil { + // type switch + break + } + if len(stmt.Lhs) < 2 { + // assertion result not read + v.addErrorAtPosition(stmt.Rhs[0].Pos(), nil) + } else if id, ok := stmt.Lhs[1].(*ast.Ident); ok && v.blank && id.Name == "_" { + // assertion result ignored + v.addErrorAtPosition(id.NamePos, nil) + } + } + } else { + // multiple value on rhs; in this case a call can't return + // multiple values. Assume len(stmt.Lhs) == len(stmt.Rhs) + for i := 0; i < len(stmt.Lhs); i++ { + if id, ok := stmt.Lhs[i].(*ast.Ident); ok { + if call, ok := stmt.Rhs[i].(*ast.CallExpr); ok { + if !v.blank { + continue + } + if v.ignoreCall(call) { + continue + } + if id.Name == "_" && v.callReturnsError(call) { + v.addErrorAtPosition(id.NamePos, call) + } + } else if assert, ok := stmt.Rhs[i].(*ast.TypeAssertExpr); ok { + if !v.asserts { + continue + } + if assert.Type == nil { + // Shouldn't happen anyway, no multi assignment in type switches + continue + } + v.addErrorAtPosition(id.NamePos, nil) + } + } + } + } + default: + } + return v +} + +func isErrorType(t types.Type) bool { + return types.Implements(t, errorType) +} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/tags.go b/vendor/github.com/kisielk/errcheck/errcheck/tags.go new file mode 100644 index 000000000..7b423ca69 --- /dev/null +++ b/vendor/github.com/kisielk/errcheck/errcheck/tags.go @@ -0,0 +1,12 @@ +// +build go1.13 + +package errcheck + +import ( + "fmt" + "strings" +) + +func fmtTags(tags []string) string { + return fmt.Sprintf("-tags=%s", strings.Join(tags, ",")) +} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go b/vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go new file mode 100644 index 000000000..2f534f40a --- /dev/null +++ b/vendor/github.com/kisielk/errcheck/errcheck/tags_compat.go @@ -0,0 +1,13 @@ +// +build go1.11 +// +build !go1.13 + +package errcheck + +import ( + "fmt" + "strings" +) + +func fmtTags(tags []string) string { + return fmt.Sprintf("-tags=%s", strings.Join(tags, " ")) +} diff --git a/vendor/github.com/kisielk/gotool/.travis.yml b/vendor/github.com/kisielk/gotool/.travis.yml new file mode 100644 index 000000000..d1784e1e2 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/.travis.yml @@ -0,0 +1,23 @@ +sudo: false +language: go +go: + - 1.2 + - 1.3 + - 1.4 + - 1.5 + - 1.6 + - 1.7 + - 1.8 + - 1.9 + - master +matrix: + allow_failures: + - go: master + fast_finish: true +install: + - # Skip. +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d .) + - go tool vet . + - go test -v -race ./... diff --git a/vendor/github.com/kisielk/gotool/LEGAL b/vendor/github.com/kisielk/gotool/LEGAL new file mode 100644 index 000000000..72b859cd6 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/LEGAL @@ -0,0 +1,32 @@ +All the files in this distribution are covered under either the MIT +license (see the file LICENSE) except some files mentioned below. + +match.go, match_test.go: + + Copyright (c) 2009 The Go Authors. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/kisielk/gotool/LICENSE b/vendor/github.com/kisielk/gotool/LICENSE new file mode 100644 index 000000000..1cbf651e2 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2013 Kamil Kisiel + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/kisielk/gotool/README.md b/vendor/github.com/kisielk/gotool/README.md new file mode 100644 index 000000000..6e4e92b2f --- /dev/null +++ b/vendor/github.com/kisielk/gotool/README.md @@ -0,0 +1,6 @@ +gotool +====== +[![GoDoc](https://godoc.org/github.com/kisielk/gotool?status.svg)](https://godoc.org/github.com/kisielk/gotool) +[![Build Status](https://travis-ci.org/kisielk/gotool.svg?branch=master)](https://travis-ci.org/kisielk/gotool) + +Package gotool contains utility functions used to implement the standard "cmd/go" tool, provided as a convenience to developers who want to write tools with similar semantics. diff --git a/vendor/github.com/kisielk/gotool/go.mod b/vendor/github.com/kisielk/gotool/go.mod new file mode 100644 index 000000000..503b37c6f --- /dev/null +++ b/vendor/github.com/kisielk/gotool/go.mod @@ -0,0 +1 @@ +module "github.com/kisielk/gotool" diff --git a/vendor/github.com/kisielk/gotool/go13.go b/vendor/github.com/kisielk/gotool/go13.go new file mode 100644 index 000000000..2dd9b3fdf --- /dev/null +++ b/vendor/github.com/kisielk/gotool/go13.go @@ -0,0 +1,15 @@ +// +build !go1.4 + +package gotool + +import ( + "go/build" + "path/filepath" + "runtime" +) + +var gorootSrc = filepath.Join(runtime.GOROOT(), "src", "pkg") + +func shouldIgnoreImport(p *build.Package) bool { + return true +} diff --git a/vendor/github.com/kisielk/gotool/go14-15.go b/vendor/github.com/kisielk/gotool/go14-15.go new file mode 100644 index 000000000..aa99a3227 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/go14-15.go @@ -0,0 +1,15 @@ +// +build go1.4,!go1.6 + +package gotool + +import ( + "go/build" + "path/filepath" + "runtime" +) + +var gorootSrc = filepath.Join(runtime.GOROOT(), "src") + +func shouldIgnoreImport(p *build.Package) bool { + return true +} diff --git a/vendor/github.com/kisielk/gotool/go16-18.go b/vendor/github.com/kisielk/gotool/go16-18.go new file mode 100644 index 000000000..f25cec14a --- /dev/null +++ b/vendor/github.com/kisielk/gotool/go16-18.go @@ -0,0 +1,15 @@ +// +build go1.6,!go1.9 + +package gotool + +import ( + "go/build" + "path/filepath" + "runtime" +) + +var gorootSrc = filepath.Join(runtime.GOROOT(), "src") + +func shouldIgnoreImport(p *build.Package) bool { + return p == nil || len(p.InvalidGoFiles) == 0 +} diff --git a/vendor/github.com/kisielk/gotool/internal/load/path.go b/vendor/github.com/kisielk/gotool/internal/load/path.go new file mode 100644 index 000000000..74e15b9d3 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/internal/load/path.go @@ -0,0 +1,27 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.9 + +package load + +import ( + "strings" +) + +// hasPathPrefix reports whether the path s begins with the +// elements in prefix. +func hasPathPrefix(s, prefix string) bool { + switch { + default: + return false + case len(s) == len(prefix): + return s == prefix + case len(s) > len(prefix): + if prefix != "" && prefix[len(prefix)-1] == '/' { + return strings.HasPrefix(s, prefix) + } + return s[len(prefix)] == '/' && s[:len(prefix)] == prefix + } +} diff --git a/vendor/github.com/kisielk/gotool/internal/load/pkg.go b/vendor/github.com/kisielk/gotool/internal/load/pkg.go new file mode 100644 index 000000000..b937ede75 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/internal/load/pkg.go @@ -0,0 +1,25 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.9 + +// Package load loads packages. +package load + +import ( + "strings" +) + +// isStandardImportPath reports whether $GOROOT/src/path should be considered +// part of the standard distribution. For historical reasons we allow people to add +// their own code to $GOROOT instead of using $GOPATH, but we assume that +// code will start with a domain name (dot in the first element). +func isStandardImportPath(path string) bool { + i := strings.Index(path, "/") + if i < 0 { + i = len(path) + } + elem := path[:i] + return !strings.Contains(elem, ".") +} diff --git a/vendor/github.com/kisielk/gotool/internal/load/search.go b/vendor/github.com/kisielk/gotool/internal/load/search.go new file mode 100644 index 000000000..17ed62dda --- /dev/null +++ b/vendor/github.com/kisielk/gotool/internal/load/search.go @@ -0,0 +1,354 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.9 + +package load + +import ( + "fmt" + "go/build" + "log" + "os" + "path" + "path/filepath" + "regexp" + "strings" +) + +// Context specifies values for operation of ImportPaths that would +// otherwise come from cmd/go/internal/cfg package. +// +// This is a construct added for gotool purposes and doesn't have +// an equivalent upstream in cmd/go. +type Context struct { + // BuildContext is the build context to use. + BuildContext build.Context + + // GOROOTsrc is the location of the src directory in GOROOT. + // At this time, it's used only in MatchPackages to skip + // GOOROOT/src entry from BuildContext.SrcDirs output. + GOROOTsrc string +} + +// allPackages returns all the packages that can be found +// under the $GOPATH directories and $GOROOT matching pattern. +// The pattern is either "all" (all packages), "std" (standard packages), +// "cmd" (standard commands), or a path including "...". +func (c *Context) allPackages(pattern string) []string { + pkgs := c.MatchPackages(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +// allPackagesInFS is like allPackages but is passed a pattern +// beginning ./ or ../, meaning it should scan the tree rooted +// at the given directory. There are ... in the pattern too. +func (c *Context) allPackagesInFS(pattern string) []string { + pkgs := c.MatchPackagesInFS(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +// MatchPackages returns a list of package paths matching pattern +// (see go help packages for pattern syntax). +func (c *Context) MatchPackages(pattern string) []string { + match := func(string) bool { return true } + treeCanMatch := func(string) bool { return true } + if !IsMetaPackage(pattern) { + match = matchPattern(pattern) + treeCanMatch = treeCanMatchPattern(pattern) + } + + have := map[string]bool{ + "builtin": true, // ignore pseudo-package that exists only for documentation + } + if !c.BuildContext.CgoEnabled { + have["runtime/cgo"] = true // ignore during walk + } + var pkgs []string + + for _, src := range c.BuildContext.SrcDirs() { + if (pattern == "std" || pattern == "cmd") && src != c.GOROOTsrc { + continue + } + src = filepath.Clean(src) + string(filepath.Separator) + root := src + if pattern == "cmd" { + root += "cmd" + string(filepath.Separator) + } + filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { + if err != nil || path == src { + return nil + } + + want := true + // Avoid .foo, _foo, and testdata directory trees. + _, elem := filepath.Split(path) + if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { + want = false + } + + name := filepath.ToSlash(path[len(src):]) + if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") { + // The name "std" is only the standard library. + // If the name is cmd, it's the root of the command tree. + want = false + } + if !treeCanMatch(name) { + want = false + } + + if !fi.IsDir() { + if fi.Mode()&os.ModeSymlink != 0 && want { + if target, err := os.Stat(path); err == nil && target.IsDir() { + fmt.Fprintf(os.Stderr, "warning: ignoring symlink %s\n", path) + } + } + return nil + } + if !want { + return filepath.SkipDir + } + + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + pkg, err := c.BuildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); noGo { + return nil + } + } + + // If we are expanding "cmd", skip main + // packages under cmd/vendor. At least as of + // March, 2017, there is one there for the + // vendored pprof tool. + if pattern == "cmd" && strings.HasPrefix(pkg.ImportPath, "cmd/vendor") && pkg.Name == "main" { + return nil + } + + pkgs = append(pkgs, name) + return nil + }) + } + return pkgs +} + +// MatchPackagesInFS returns a list of package paths matching pattern, +// which must begin with ./ or ../ +// (see go help packages for pattern syntax). +func (c *Context) MatchPackagesInFS(pattern string) []string { + // Find directory to begin the scan. + // Could be smarter but this one optimization + // is enough for now, since ... is usually at the + // end of a path. + i := strings.Index(pattern, "...") + dir, _ := path.Split(pattern[:i]) + + // pattern begins with ./ or ../. + // path.Clean will discard the ./ but not the ../. + // We need to preserve the ./ for pattern matching + // and in the returned import paths. + prefix := "" + if strings.HasPrefix(pattern, "./") { + prefix = "./" + } + match := matchPattern(pattern) + + var pkgs []string + filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() { + return nil + } + if path == dir { + // filepath.Walk starts at dir and recurses. For the recursive case, + // the path is the result of filepath.Join, which calls filepath.Clean. + // The initial case is not Cleaned, though, so we do this explicitly. + // + // This converts a path like "./io/" to "io". Without this step, running + // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io + // package, because prepending the prefix "./" to the unclean path would + // result in "././io", and match("././io") returns false. + path = filepath.Clean(path) + } + + // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". + _, elem := filepath.Split(path) + dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." + if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := prefix + filepath.ToSlash(path) + if !match(name) { + return nil + } + + // We keep the directory if we can import it, or if we can't import it + // due to invalid Go source files. This means that directories containing + // parse errors will be built (and fail) instead of being silently skipped + // as not matching the pattern. Go 1.5 and earlier skipped, but that + // behavior means people miss serious mistakes. + // See golang.org/issue/11407. + if p, err := c.BuildContext.ImportDir(path, 0); err != nil && (p == nil || len(p.InvalidGoFiles) == 0) { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + return pkgs +} + +// treeCanMatchPattern(pattern)(name) reports whether +// name or children of name can possibly match pattern. +// Pattern is the same limited glob accepted by matchPattern. +func treeCanMatchPattern(pattern string) func(name string) bool { + wildCard := false + if i := strings.Index(pattern, "..."); i >= 0 { + wildCard = true + pattern = pattern[:i] + } + return func(name string) bool { + return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || + wildCard && strings.HasPrefix(name, pattern) + } +} + +// matchPattern(pattern)(name) reports whether +// name matches pattern. Pattern is a limited glob +// pattern in which '...' means 'any string' and there +// is no other special syntax. +// Unfortunately, there are two special cases. Quoting "go help packages": +// +// First, /... at the end of the pattern can match an empty string, +// so that net/... matches both net and packages in its subdirectories, like net/http. +// Second, any slash-separted pattern element containing a wildcard never +// participates in a match of the "vendor" element in the path of a vendored +// package, so that ./... does not match packages in subdirectories of +// ./vendor or ./mycode/vendor, but ./vendor/... and ./mycode/vendor/... do. +// Note, however, that a directory named vendor that itself contains code +// is not a vendored package: cmd/vendor would be a command named vendor, +// and the pattern cmd/... matches it. +func matchPattern(pattern string) func(name string) bool { + // Convert pattern to regular expression. + // The strategy for the trailing /... is to nest it in an explicit ? expression. + // The strategy for the vendor exclusion is to change the unmatchable + // vendor strings to a disallowed code point (vendorChar) and to use + // "(anything but that codepoint)*" as the implementation of the ... wildcard. + // This is a bit complicated but the obvious alternative, + // namely a hand-written search like in most shell glob matchers, + // is too easy to make accidentally exponential. + // Using package regexp guarantees linear-time matching. + + const vendorChar = "\x00" + + if strings.Contains(pattern, vendorChar) { + return func(name string) bool { return false } + } + + re := regexp.QuoteMeta(pattern) + re = replaceVendor(re, vendorChar) + switch { + case strings.HasSuffix(re, `/`+vendorChar+`/\.\.\.`): + re = strings.TrimSuffix(re, `/`+vendorChar+`/\.\.\.`) + `(/vendor|/` + vendorChar + `/\.\.\.)` + case re == vendorChar+`/\.\.\.`: + re = `(/vendor|/` + vendorChar + `/\.\.\.)` + case strings.HasSuffix(re, `/\.\.\.`): + re = strings.TrimSuffix(re, `/\.\.\.`) + `(/\.\.\.)?` + } + re = strings.Replace(re, `\.\.\.`, `[^`+vendorChar+`]*`, -1) + + reg := regexp.MustCompile(`^` + re + `$`) + + return func(name string) bool { + if strings.Contains(name, vendorChar) { + return false + } + return reg.MatchString(replaceVendor(name, vendorChar)) + } +} + +// replaceVendor returns the result of replacing +// non-trailing vendor path elements in x with repl. +func replaceVendor(x, repl string) string { + if !strings.Contains(x, "vendor") { + return x + } + elem := strings.Split(x, "/") + for i := 0; i < len(elem)-1; i++ { + if elem[i] == "vendor" { + elem[i] = repl + } + } + return strings.Join(elem, "/") +} + +// ImportPaths returns the import paths to use for the given command line. +func (c *Context) ImportPaths(args []string) []string { + args = c.ImportPathsNoDotExpansion(args) + var out []string + for _, a := range args { + if strings.Contains(a, "...") { + if build.IsLocalImport(a) { + out = append(out, c.allPackagesInFS(a)...) + } else { + out = append(out, c.allPackages(a)...) + } + continue + } + out = append(out, a) + } + return out +} + +// ImportPathsNoDotExpansion returns the import paths to use for the given +// command line, but it does no ... expansion. +func (c *Context) ImportPathsNoDotExpansion(args []string) []string { + if len(args) == 0 { + return []string{"."} + } + var out []string + for _, a := range args { + // Arguments are supposed to be import paths, but + // as a courtesy to Windows developers, rewrite \ to / + // in command-line arguments. Handles .\... and so on. + if filepath.Separator == '\\' { + a = strings.Replace(a, `\`, `/`, -1) + } + + // Put argument in canonical form, but preserve leading ./. + if strings.HasPrefix(a, "./") { + a = "./" + path.Clean(a) + if a == "./." { + a = "." + } + } else { + a = path.Clean(a) + } + if IsMetaPackage(a) { + out = append(out, c.allPackages(a)...) + continue + } + out = append(out, a) + } + return out +} + +// IsMetaPackage checks if name is a reserved package name that expands to multiple packages. +func IsMetaPackage(name string) bool { + return name == "std" || name == "cmd" || name == "all" +} diff --git a/vendor/github.com/kisielk/gotool/match.go b/vendor/github.com/kisielk/gotool/match.go new file mode 100644 index 000000000..4dbdbff47 --- /dev/null +++ b/vendor/github.com/kisielk/gotool/match.go @@ -0,0 +1,56 @@ +// Copyright (c) 2009 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build go1.9 + +package gotool + +import ( + "path/filepath" + + "github.com/kisielk/gotool/internal/load" +) + +// importPaths returns the import paths to use for the given command line. +func (c *Context) importPaths(args []string) []string { + lctx := load.Context{ + BuildContext: c.BuildContext, + GOROOTsrc: c.joinPath(c.BuildContext.GOROOT, "src"), + } + return lctx.ImportPaths(args) +} + +// joinPath calls c.BuildContext.JoinPath (if not nil) or else filepath.Join. +// +// It's a copy of the unexported build.Context.joinPath helper. +func (c *Context) joinPath(elem ...string) string { + if f := c.BuildContext.JoinPath; f != nil { + return f(elem...) + } + return filepath.Join(elem...) +} diff --git a/vendor/github.com/kisielk/gotool/match18.go b/vendor/github.com/kisielk/gotool/match18.go new file mode 100644 index 000000000..6d6b1368c --- /dev/null +++ b/vendor/github.com/kisielk/gotool/match18.go @@ -0,0 +1,317 @@ +// Copyright (c) 2009 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// +build !go1.9 + +package gotool + +import ( + "fmt" + "go/build" + "log" + "os" + "path" + "path/filepath" + "regexp" + "strings" +) + +// This file contains code from the Go distribution. + +// matchPattern(pattern)(name) reports whether +// name matches pattern. Pattern is a limited glob +// pattern in which '...' means 'any string' and there +// is no other special syntax. +func matchPattern(pattern string) func(name string) bool { + re := regexp.QuoteMeta(pattern) + re = strings.Replace(re, `\.\.\.`, `.*`, -1) + // Special case: foo/... matches foo too. + if strings.HasSuffix(re, `/.*`) { + re = re[:len(re)-len(`/.*`)] + `(/.*)?` + } + reg := regexp.MustCompile(`^` + re + `$`) + return reg.MatchString +} + +// matchPackages returns a list of package paths matching pattern +// (see go help packages for pattern syntax). +func (c *Context) matchPackages(pattern string) []string { + match := func(string) bool { return true } + treeCanMatch := func(string) bool { return true } + if !isMetaPackage(pattern) { + match = matchPattern(pattern) + treeCanMatch = treeCanMatchPattern(pattern) + } + + have := map[string]bool{ + "builtin": true, // ignore pseudo-package that exists only for documentation + } + if !c.BuildContext.CgoEnabled { + have["runtime/cgo"] = true // ignore during walk + } + var pkgs []string + + for _, src := range c.BuildContext.SrcDirs() { + if (pattern == "std" || pattern == "cmd") && src != gorootSrc { + continue + } + src = filepath.Clean(src) + string(filepath.Separator) + root := src + if pattern == "cmd" { + root += "cmd" + string(filepath.Separator) + } + filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() || path == src { + return nil + } + + // Avoid .foo, _foo, and testdata directory trees. + _, elem := filepath.Split(path) + if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := filepath.ToSlash(path[len(src):]) + if pattern == "std" && (!isStandardImportPath(name) || name == "cmd") { + // The name "std" is only the standard library. + // If the name is cmd, it's the root of the command tree. + return filepath.SkipDir + } + if !treeCanMatch(name) { + return filepath.SkipDir + } + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + _, err = c.BuildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); noGo { + return nil + } + } + pkgs = append(pkgs, name) + return nil + }) + } + return pkgs +} + +// importPathsNoDotExpansion returns the import paths to use for the given +// command line, but it does no ... expansion. +func (c *Context) importPathsNoDotExpansion(args []string) []string { + if len(args) == 0 { + return []string{"."} + } + var out []string + for _, a := range args { + // Arguments are supposed to be import paths, but + // as a courtesy to Windows developers, rewrite \ to / + // in command-line arguments. Handles .\... and so on. + if filepath.Separator == '\\' { + a = strings.Replace(a, `\`, `/`, -1) + } + + // Put argument in canonical form, but preserve leading ./. + if strings.HasPrefix(a, "./") { + a = "./" + path.Clean(a) + if a == "./." { + a = "." + } + } else { + a = path.Clean(a) + } + if isMetaPackage(a) { + out = append(out, c.allPackages(a)...) + continue + } + out = append(out, a) + } + return out +} + +// importPaths returns the import paths to use for the given command line. +func (c *Context) importPaths(args []string) []string { + args = c.importPathsNoDotExpansion(args) + var out []string + for _, a := range args { + if strings.Contains(a, "...") { + if build.IsLocalImport(a) { + out = append(out, c.allPackagesInFS(a)...) + } else { + out = append(out, c.allPackages(a)...) + } + continue + } + out = append(out, a) + } + return out +} + +// allPackages returns all the packages that can be found +// under the $GOPATH directories and $GOROOT matching pattern. +// The pattern is either "all" (all packages), "std" (standard packages), +// "cmd" (standard commands), or a path including "...". +func (c *Context) allPackages(pattern string) []string { + pkgs := c.matchPackages(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +// allPackagesInFS is like allPackages but is passed a pattern +// beginning ./ or ../, meaning it should scan the tree rooted +// at the given directory. There are ... in the pattern too. +func (c *Context) allPackagesInFS(pattern string) []string { + pkgs := c.matchPackagesInFS(pattern) + if len(pkgs) == 0 { + fmt.Fprintf(os.Stderr, "warning: %q matched no packages\n", pattern) + } + return pkgs +} + +// matchPackagesInFS returns a list of package paths matching pattern, +// which must begin with ./ or ../ +// (see go help packages for pattern syntax). +func (c *Context) matchPackagesInFS(pattern string) []string { + // Find directory to begin the scan. + // Could be smarter but this one optimization + // is enough for now, since ... is usually at the + // end of a path. + i := strings.Index(pattern, "...") + dir, _ := path.Split(pattern[:i]) + + // pattern begins with ./ or ../. + // path.Clean will discard the ./ but not the ../. + // We need to preserve the ./ for pattern matching + // and in the returned import paths. + prefix := "" + if strings.HasPrefix(pattern, "./") { + prefix = "./" + } + match := matchPattern(pattern) + + var pkgs []string + filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() { + return nil + } + if path == dir { + // filepath.Walk starts at dir and recurses. For the recursive case, + // the path is the result of filepath.Join, which calls filepath.Clean. + // The initial case is not Cleaned, though, so we do this explicitly. + // + // This converts a path like "./io/" to "io". Without this step, running + // "cd $GOROOT/src; go list ./io/..." would incorrectly skip the io + // package, because prepending the prefix "./" to the unclean path would + // result in "././io", and match("././io") returns false. + path = filepath.Clean(path) + } + + // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". + _, elem := filepath.Split(path) + dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." + if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := prefix + filepath.ToSlash(path) + if !match(name) { + return nil + } + + // We keep the directory if we can import it, or if we can't import it + // due to invalid Go source files. This means that directories containing + // parse errors will be built (and fail) instead of being silently skipped + // as not matching the pattern. Go 1.5 and earlier skipped, but that + // behavior means people miss serious mistakes. + // See golang.org/issue/11407. + if p, err := c.BuildContext.ImportDir(path, 0); err != nil && shouldIgnoreImport(p) { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + return pkgs +} + +// isMetaPackage checks if name is a reserved package name that expands to multiple packages. +func isMetaPackage(name string) bool { + return name == "std" || name == "cmd" || name == "all" +} + +// isStandardImportPath reports whether $GOROOT/src/path should be considered +// part of the standard distribution. For historical reasons we allow people to add +// their own code to $GOROOT instead of using $GOPATH, but we assume that +// code will start with a domain name (dot in the first element). +func isStandardImportPath(path string) bool { + i := strings.Index(path, "/") + if i < 0 { + i = len(path) + } + elem := path[:i] + return !strings.Contains(elem, ".") +} + +// hasPathPrefix reports whether the path s begins with the +// elements in prefix. +func hasPathPrefix(s, prefix string) bool { + switch { + default: + return false + case len(s) == len(prefix): + return s == prefix + case len(s) > len(prefix): + if prefix != "" && prefix[len(prefix)-1] == '/' { + return strings.HasPrefix(s, prefix) + } + return s[len(prefix)] == '/' && s[:len(prefix)] == prefix + } +} + +// treeCanMatchPattern(pattern)(name) reports whether +// name or children of name can possibly match pattern. +// Pattern is the same limited glob accepted by matchPattern. +func treeCanMatchPattern(pattern string) func(name string) bool { + wildCard := false + if i := strings.Index(pattern, "..."); i >= 0 { + wildCard = true + pattern = pattern[:i] + } + return func(name string) bool { + return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || + wildCard && strings.HasPrefix(name, pattern) + } +} diff --git a/vendor/github.com/kisielk/gotool/tool.go b/vendor/github.com/kisielk/gotool/tool.go new file mode 100644 index 000000000..c7409e11e --- /dev/null +++ b/vendor/github.com/kisielk/gotool/tool.go @@ -0,0 +1,48 @@ +// Package gotool contains utility functions used to implement the standard +// "cmd/go" tool, provided as a convenience to developers who want to write +// tools with similar semantics. +package gotool + +import "go/build" + +// Export functions here to make it easier to keep the implementations up to date with upstream. + +// DefaultContext is the default context that uses build.Default. +var DefaultContext = Context{ + BuildContext: build.Default, +} + +// A Context specifies the supporting context. +type Context struct { + // BuildContext is the build.Context that is used when computing import paths. + BuildContext build.Context +} + +// ImportPaths returns the import paths to use for the given command line. +// +// The path "all" is expanded to all packages in $GOPATH and $GOROOT. +// The path "std" is expanded to all packages in the Go standard library. +// The path "cmd" is expanded to all Go standard commands. +// The string "..." is treated as a wildcard within a path. +// When matching recursively, directories are ignored if they are prefixed with +// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata". +// Relative import paths are not converted to full import paths. +// If args is empty, a single element "." is returned. +func (c *Context) ImportPaths(args []string) []string { + return c.importPaths(args) +} + +// ImportPaths returns the import paths to use for the given command line +// using default context. +// +// The path "all" is expanded to all packages in $GOPATH and $GOROOT. +// The path "std" is expanded to all packages in the Go standard library. +// The path "cmd" is expanded to all Go standard commands. +// The string "..." is treated as a wildcard within a path. +// When matching recursively, directories are ignored if they are prefixed with +// a dot or an underscore (such as ".foo" or "_foo"), or are named "testdata". +// Relative import paths are not converted to full import paths. +// If args is empty, a single element "." is returned. +func ImportPaths(args []string) []string { + return DefaultContext.importPaths(args) +} diff --git a/vendor/github.com/kulti/thelper/LICENSE b/vendor/github.com/kulti/thelper/LICENSE new file mode 100644 index 000000000..e070215fe --- /dev/null +++ b/vendor/github.com/kulti/thelper/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Aleksey Bakin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..2f8dba957 --- /dev/null +++ b/vendor/github.com/kulti/thelper/pkg/analyzer/analyzer.go @@ -0,0 +1,416 @@ +package analyzer + +import ( + "flag" + "fmt" + "go/ast" + "go/token" + "go/types" + "sort" + "strings" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const ( + doc = "thelper detects tests helpers which is not start with t.Helper() method." + checksDoc = `coma separated list of enabled checks + +Available checks + +` + checkTBegin + ` - check t.Helper() begins helper function +` + checkTFirst + ` - check *testing.T is first param of helper function +` + checkTName + ` - check *testing.T param has t name + +Also available similar checks for benchmark and TB helpers: ` + + checkBBegin + `, ` + checkBFirst + `, ` + checkBName + `,` + + checkTBBegin + `, ` + checkTBFirst + `, ` + checkTBName + ` + +` +) + +type enabledChecksValue map[string]struct{} + +func (m enabledChecksValue) Enabled(c string) bool { + _, ok := m[c] + return ok +} + +func (m enabledChecksValue) String() string { + ss := make([]string, 0, len(m)) + for s := range m { + ss = append(ss, s) + } + sort.Strings(ss) + return strings.Join(ss, ",") +} + +func (m enabledChecksValue) Set(s string) error { + ss := strings.FieldsFunc(s, func(c rune) bool { return c == ',' }) + if len(ss) == 0 { + return nil + } + + for k := range m { + delete(m, k) + } + for _, v := range ss { + switch v { + case checkTBegin, checkTFirst, checkTName, + checkBBegin, checkBFirst, checkBName, + checkTBBegin, checkTBFirst, checkTBName: + m[v] = struct{}{} + default: + return fmt.Errorf("unknown check name %q (see help for full list)", v) + } + } + return nil +} + +const ( + checkTBegin = "t_begin" + checkTFirst = "t_first" + checkTName = "t_name" + checkBBegin = "b_begin" + checkBFirst = "b_first" + checkBName = "b_name" + checkTBBegin = "tb_begin" + checkTBFirst = "tb_first" + checkTBName = "tb_name" +) + +type thelper struct { + enabledChecks enabledChecksValue +} + +// NewAnalyzer return a new thelper analyzer. +// thelper analyzes Go test codes how they use t.Helper() method. +func NewAnalyzer() *analysis.Analyzer { + thelper := thelper{} + thelper.enabledChecks = enabledChecksValue{ + checkTBegin: struct{}{}, + checkTFirst: struct{}{}, + checkTName: struct{}{}, + checkBBegin: struct{}{}, + checkBFirst: struct{}{}, + checkBName: struct{}{}, + checkTBBegin: struct{}{}, + checkTBFirst: struct{}{}, + checkTBName: struct{}{}, + } + + a := &analysis.Analyzer{ + Name: "thelper", + Doc: doc, + Run: thelper.run, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, + } + + a.Flags.Init("thelper", flag.ExitOnError) + a.Flags.Var(&thelper.enabledChecks, "checks", checksDoc) + + return a +} + +func (t thelper) run(pass *analysis.Pass) (interface{}, error) { + tCheckOpts, bCheckOpts, tbCheckOpts, ok := t.buildCheckFuncOpts(pass) + if !ok { + return nil, nil + } + + var reports reports + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(node ast.Node) { + var fd funcDecl + switch n := node.(type) { + case *ast.FuncLit: + fd.Pos = n.Pos() + fd.Type = n.Type + fd.Body = n.Body + fd.Name = ast.NewIdent("") + case *ast.FuncDecl: + fd.Pos = n.Name.NamePos + fd.Type = n.Type + fd.Body = n.Body + fd.Name = n.Name + case *ast.CallExpr: + tbRunSubtestExpr := extractSubtestExp(pass, n, tCheckOpts.tbRun) + if tbRunSubtestExpr == nil { + tbRunSubtestExpr = extractSubtestExp(pass, n, bCheckOpts.tbRun) + } + + if tbRunSubtestExpr != nil { + reports.Filter(funcDefPosition(pass, tbRunSubtestExpr)) + } else { + reports.NoFilter(funcDefPosition(pass, n.Fun)) + } + return + default: + return + } + + checkFunc(pass, &reports, fd, tCheckOpts) + checkFunc(pass, &reports, fd, bCheckOpts) + checkFunc(pass, &reports, fd, tbCheckOpts) + }) + + reports.Flush(pass) + + return nil, nil +} + +type checkFuncOpts struct { + skipPrefix string + varName string + tbHelper types.Object + tbRun types.Object + tbType types.Type + ctxType types.Type + checkBegin bool + checkFirst bool + checkName bool +} + +func (t thelper) buildCheckFuncOpts(pass *analysis.Pass) (checkFuncOpts, checkFuncOpts, checkFuncOpts, bool) { + var ctxType types.Type + ctxObj := analysisutil.ObjectOf(pass, "context", "Context") + if ctxObj != nil { + ctxType = ctxObj.Type() + } + + tCheckOpts, ok := t.buildTestCheckFuncOpts(pass, ctxType) + if !ok { + return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false + } + + bCheckOpts, ok := t.buildBenchmarkCheckFuncOpts(pass, ctxType) + if !ok { + return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false + } + + tbCheckOpts, ok := t.buildTBCheckFuncOpts(pass, ctxType) + if !ok { + return checkFuncOpts{}, checkFuncOpts{}, checkFuncOpts{}, false + } + + return tCheckOpts, bCheckOpts, tbCheckOpts, true +} + +func (t thelper) buildTestCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { + tObj := analysisutil.ObjectOf(pass, "testing", "T") + if tObj == nil { + return checkFuncOpts{}, false + } + + tHelper, _, _ := types.LookupFieldOrMethod(tObj.Type(), true, tObj.Pkg(), "Helper") + if tHelper == nil { + return checkFuncOpts{}, false + } + + tRun, _, _ := types.LookupFieldOrMethod(tObj.Type(), true, tObj.Pkg(), "Run") + if tRun == nil { + return checkFuncOpts{}, false + } + + return checkFuncOpts{ + skipPrefix: "Test", + varName: "t", + tbHelper: tHelper, + tbRun: tRun, + tbType: types.NewPointer(tObj.Type()), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkTBegin), + checkFirst: t.enabledChecks.Enabled(checkTFirst), + checkName: t.enabledChecks.Enabled(checkTName), + }, true +} + +func (t thelper) buildBenchmarkCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { + bObj := analysisutil.ObjectOf(pass, "testing", "B") + if bObj == nil { + return checkFuncOpts{}, false + } + + bHelper, _, _ := types.LookupFieldOrMethod(bObj.Type(), true, bObj.Pkg(), "Helper") + if bHelper == nil { + return checkFuncOpts{}, false + } + + bRun, _, _ := types.LookupFieldOrMethod(bObj.Type(), true, bObj.Pkg(), "Run") + if bRun == nil { + return checkFuncOpts{}, false + } + + return checkFuncOpts{ + skipPrefix: "Benchmark", + varName: "b", + tbHelper: bHelper, + tbRun: bRun, + tbType: types.NewPointer(bObj.Type()), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkBBegin), + checkFirst: t.enabledChecks.Enabled(checkBFirst), + checkName: t.enabledChecks.Enabled(checkBName), + }, true +} + +func (t thelper) buildTBCheckFuncOpts(pass *analysis.Pass, ctxType types.Type) (checkFuncOpts, bool) { + tbObj := analysisutil.ObjectOf(pass, "testing", "TB") + if tbObj == nil { + return checkFuncOpts{}, false + } + + tbHelper, _, _ := types.LookupFieldOrMethod(tbObj.Type(), true, tbObj.Pkg(), "Helper") + if tbHelper == nil { + return checkFuncOpts{}, false + } + + return checkFuncOpts{ + skipPrefix: "", + varName: "tb", + tbHelper: tbHelper, + tbType: tbObj.Type(), + ctxType: ctxType, + checkBegin: t.enabledChecks.Enabled(checkTBBegin), + checkFirst: t.enabledChecks.Enabled(checkTBFirst), + checkName: t.enabledChecks.Enabled(checkTBName), + }, true +} + +type funcDecl struct { + Pos token.Pos + Name *ast.Ident + Type *ast.FuncType + Body *ast.BlockStmt +} + +func checkFunc(pass *analysis.Pass, reports *reports, funcDecl funcDecl, opts checkFuncOpts) { + if opts.skipPrefix != "" && strings.HasPrefix(funcDecl.Name.Name, opts.skipPrefix) { + return + } + + p, pos, ok := searchFuncParam(pass, funcDecl, opts.tbType) + if !ok { + return + } + + if opts.checkFirst { + if pos != 0 { + checkFirstPassed := false + if pos == 1 && opts.ctxType != nil { + _, pos, ok := searchFuncParam(pass, funcDecl, opts.ctxType) + checkFirstPassed = ok && (pos == 0) + } + + if !checkFirstPassed { + reports.Reportf(funcDecl.Pos, "parameter %s should be the first or after context.Context", opts.tbType) + } + } + } + + if len(p.Names) > 0 && p.Names[0].Name != "_" { + if opts.checkName { + if p.Names[0].Name != opts.varName { + reports.Reportf(funcDecl.Pos, "parameter %s should have name %s", opts.tbType, opts.varName) + } + } + + if opts.checkBegin { + if len(funcDecl.Body.List) == 0 || !isTHelperCall(pass, funcDecl.Body.List[0], opts.tbHelper) { + reports.Reportf(funcDecl.Pos, "test helper function should start from %s.Helper()", opts.varName) + } + } + } +} + +func searchFuncParam(pass *analysis.Pass, f funcDecl, p types.Type) (*ast.Field, int, bool) { + for i, f := range f.Type.Params.List { + typeInfo, ok := pass.TypesInfo.Types[f.Type] + if !ok { + continue + } + + if types.Identical(typeInfo.Type, p) { + return f, i, true + } + } + return nil, 0, false +} + +func isTHelperCall(pass *analysis.Pass, s ast.Stmt, tHelper types.Object) bool { + exprStmt, ok := s.(*ast.ExprStmt) + if !ok { + return false + } + + callExpr, ok := exprStmt.X.(*ast.CallExpr) + if !ok { + return false + } + + selExpr, ok := callExpr.Fun.(*ast.SelectorExpr) + if !ok { + return false + } + + return isSelectorCall(pass, selExpr, tHelper) +} + +func extractSubtestExp(pass *analysis.Pass, e *ast.CallExpr, tbRun types.Object) ast.Expr { + selExpr, ok := e.Fun.(*ast.SelectorExpr) + if !ok { + return nil + } + + if !isSelectorCall(pass, selExpr, tbRun) { + return nil + } + + if len(e.Args) != 2 { + return nil + } + + return e.Args[1] +} + +func funcDefPosition(pass *analysis.Pass, e ast.Expr) token.Pos { + anonFunLit, ok := e.(*ast.FuncLit) + if ok { + return anonFunLit.Pos() + } + + funIdent, ok := e.(*ast.Ident) + if !ok { + selExpr, ok := e.(*ast.SelectorExpr) + if !ok { + return token.NoPos + } + funIdent = selExpr.Sel + } + + funDef, ok := pass.TypesInfo.Uses[funIdent] + if !ok { + return token.NoPos + } + + return funDef.Pos() +} + +func isSelectorCall(pass *analysis.Pass, selExpr *ast.SelectorExpr, callObj types.Object) bool { + sel, ok := pass.TypesInfo.Selections[selExpr] + if !ok { + return false + } + + return sel.Obj() == callObj +} diff --git a/vendor/github.com/kulti/thelper/pkg/analyzer/report.go b/vendor/github.com/kulti/thelper/pkg/analyzer/report.go new file mode 100644 index 000000000..4a23e36d5 --- /dev/null +++ b/vendor/github.com/kulti/thelper/pkg/analyzer/report.go @@ -0,0 +1,56 @@ +package analyzer + +import ( + "go/token" + + "golang.org/x/tools/go/analysis" +) + +type reports struct { + reports []report + filter map[token.Pos]struct{} + nofilter map[token.Pos]struct{} +} + +type report struct { + pos token.Pos + format string + args []interface{} +} + +func (rr *reports) Reportf(pos token.Pos, format string, args ...interface{}) { + rr.reports = append(rr.reports, report{ + pos: pos, + format: format, + args: args, + }) +} + +func (rr *reports) Filter(pos token.Pos) { + if pos.IsValid() { + if rr.filter == nil { + rr.filter = make(map[token.Pos]struct{}) + } + rr.filter[pos] = struct{}{} + } +} + +func (rr *reports) NoFilter(pos token.Pos) { + if pos.IsValid() { + if rr.nofilter == nil { + rr.nofilter = make(map[token.Pos]struct{}) + } + rr.nofilter[pos] = struct{}{} + } +} + +func (rr reports) Flush(pass *analysis.Pass) { + for _, r := range rr.reports { + if _, ok := rr.filter[r.pos]; ok { + if _, ok := rr.nofilter[r.pos]; !ok { + continue + } + } + pass.Reportf(r.pos, r.format, r.args...) + } +} diff --git a/vendor/github.com/kunwardeep/paralleltest/LICENSE b/vendor/github.com/kunwardeep/paralleltest/LICENSE new file mode 100644 index 000000000..d06a809c2 --- /dev/null +++ b/vendor/github.com/kunwardeep/paralleltest/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Isaev Denis + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go b/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go new file mode 100644 index 000000000..31f6f2946 --- /dev/null +++ b/vendor/github.com/kunwardeep/paralleltest/pkg/paralleltest/paralleltest.go @@ -0,0 +1,256 @@ +package paralleltest + +import ( + "go/ast" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check that tests use t.Parallel() method +It also checks that the t.Parallel is used if multiple tests cases are run as part of single test. +As part of ensuring parallel tests works as expected it checks for reinitialising of the range value +over the test cases.(https://tinyurl.com/y6555cy6)` + +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "paralleltest", + Doc: Doc, + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspector := inspector.New(pass.Files) + + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + } + + inspector.Preorder(nodeFilter, func(node ast.Node) { + funcDecl := node.(*ast.FuncDecl) + var funcHasParallelMethod, + rangeStatementOverTestCasesExists, + rangeStatementHasParallelMethod, + testLoopVariableReinitialised bool + var testRunLoopIdentifier string + var numberOfTestRun int + var positionOfTestRunNode []ast.Node + var rangeNode ast.Node + + // Check runs for test functions only + if !isTestFunction(funcDecl) { + return + } + + for _, l := range funcDecl.Body.List { + switch v := l.(type) { + + case *ast.ExprStmt: + ast.Inspect(v, func(n ast.Node) bool { + // Check if the test method is calling t.parallel + if !funcHasParallelMethod { + funcHasParallelMethod = methodParallelIsCalledInTestFunction(n) + } + + // Check if the t.Run within the test function is calling t.parallel + if methodRunIsCalledInTestFunction(n) { + hasParallel := false + numberOfTestRun++ + ast.Inspect(v, func(p ast.Node) bool { + if !hasParallel { + hasParallel = methodParallelIsCalledInTestFunction(p) + } + return true + }) + if !hasParallel { + positionOfTestRunNode = append(positionOfTestRunNode, n) + } + } + return true + }) + + // Check if the range over testcases is calling t.parallel + case *ast.RangeStmt: + rangeNode = v + + ast.Inspect(v, func(n ast.Node) bool { + // nolint: gocritic + switch r := n.(type) { + case *ast.ExprStmt: + if methodRunIsCalledInRangeStatement(r.X) { + rangeStatementOverTestCasesExists = true + testRunLoopIdentifier = methodRunFirstArgumentObjectName(r.X) + + if !rangeStatementHasParallelMethod { + rangeStatementHasParallelMethod = methodParallelIsCalledInMethodRun(r.X) + } + } + } + return true + }) + + // Check for the range loop value identifier re assignment + // More info here https://gist.github.com/kunwardeep/80c2e9f3d3256c894898bae82d9f75d0 + if rangeStatementOverTestCasesExists { + var rangeValueIdentifier string + if i, ok := v.Value.(*ast.Ident); ok { + rangeValueIdentifier = i.Name + } + + testLoopVariableReinitialised = testCaseLoopVariableReinitialised(v.Body.List, rangeValueIdentifier, testRunLoopIdentifier) + } + } + } + + if !funcHasParallelMethod { + pass.Reportf(node.Pos(), "Function %s missing the call to method parallel\n", funcDecl.Name.Name) + } + + if rangeStatementOverTestCasesExists && rangeNode != nil { + if !rangeStatementHasParallelMethod { + pass.Reportf(rangeNode.Pos(), "Range statement for test %s missing the call to method parallel in test Run\n", funcDecl.Name.Name) + } else { + if testRunLoopIdentifier == "" { + pass.Reportf(rangeNode.Pos(), "Range statement for test %s does not use range value in test Run\n", funcDecl.Name.Name) + } else if !testLoopVariableReinitialised { + pass.Reportf(rangeNode.Pos(), "Range statement for test %s does not reinitialise the variable %s\n", funcDecl.Name.Name, testRunLoopIdentifier) + } + } + } + + // Check if the t.Run is more than one as there is no point making one test parallel + if numberOfTestRun > 1 && len(positionOfTestRunNode) > 0 { + for _, n := range positionOfTestRunNode { + pass.Reportf(n.Pos(), "Function %s has missing the call to method parallel in the test run\n", funcDecl.Name.Name) + } + } + }) + + return nil, nil +} + +func testCaseLoopVariableReinitialised(statements []ast.Stmt, rangeValueIdentifier string, testRunLoopIdentifier string) bool { + if len(statements) > 1 { + for _, s := range statements { + leftIdentifier, rightIdentifier := getLeftAndRightIdentifier(s) + if leftIdentifier == testRunLoopIdentifier && rightIdentifier == rangeValueIdentifier { + return true + } + } + } + return false +} + +// Return the left hand side and the right hand side identifiers name +func getLeftAndRightIdentifier(s ast.Stmt) (string, string) { + var leftIdentifier, rightIdentifier string + // nolint: gocritic + switch v := s.(type) { + case *ast.AssignStmt: + if len(v.Rhs) == 1 { + if i, ok := v.Rhs[0].(*ast.Ident); ok { + rightIdentifier = i.Name + } + } + if len(v.Lhs) == 1 { + if i, ok := v.Lhs[0].(*ast.Ident); ok { + leftIdentifier = i.Name + } + } + } + return leftIdentifier, rightIdentifier +} + +func methodParallelIsCalledInMethodRun(node ast.Node) bool { + var methodParallelCalled bool + // nolint: gocritic + switch callExp := node.(type) { + case *ast.CallExpr: + for _, arg := range callExp.Args { + if !methodParallelCalled { + ast.Inspect(arg, func(n ast.Node) bool { + if !methodParallelCalled { + methodParallelCalled = methodParallelIsCalledInRunMethod(n) + return true + } + return false + }) + } + } + } + return methodParallelCalled +} + +func methodParallelIsCalledInRunMethod(node ast.Node) bool { + return exprCallHasMethod(node, "Parallel") +} + +func methodParallelIsCalledInTestFunction(node ast.Node) bool { + return exprCallHasMethod(node, "Parallel") +} + +func methodRunIsCalledInRangeStatement(node ast.Node) bool { + return exprCallHasMethod(node, "Run") +} + +func methodRunIsCalledInTestFunction(node ast.Node) bool { + return exprCallHasMethod(node, "Run") +} +func exprCallHasMethod(node ast.Node, methodName string) bool { + // nolint: gocritic + switch n := node.(type) { + case *ast.CallExpr: + if fun, ok := n.Fun.(*ast.SelectorExpr); ok { + return fun.Sel.Name == methodName + } + } + return false +} + +// Gets the object name `tc` from method t.Run(tc.Foo, func(t *testing.T) +func methodRunFirstArgumentObjectName(node ast.Node) string { + // nolint: gocritic + switch n := node.(type) { + case *ast.CallExpr: + for _, arg := range n.Args { + if s, ok := arg.(*ast.SelectorExpr); ok { + if i, ok := s.X.(*ast.Ident); ok { + return i.Name + } + } + } + } + return "" +} + +// Checks if the function has the param type *testing.T) +func isTestFunction(funcDecl *ast.FuncDecl) bool { + testMethodPackageType := "testing" + testMethodStruct := "T" + testPrefix := "Test" + + if !strings.HasPrefix(funcDecl.Name.Name, testPrefix) { + return false + } + + if funcDecl.Type.Params != nil && len(funcDecl.Type.Params.List) != 1 { + return false + } + + param := funcDecl.Type.Params.List[0] + if starExp, ok := param.Type.(*ast.StarExpr); ok { + if selectExpr, ok := starExp.X.(*ast.SelectorExpr); ok { + if selectExpr.Sel.Name == testMethodStruct { + if s, ok := selectExpr.X.(*ast.Ident); ok { + return s.Name == testMethodPackageType + } + } + } + } + + return false +} diff --git a/vendor/github.com/kyoh86/exportloopref/.golangci.yml b/vendor/github.com/kyoh86/exportloopref/.golangci.yml new file mode 100644 index 000000000..e876057f3 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/.golangci.yml @@ -0,0 +1,4 @@ +linters: + enable: + - unparam + - exportloopref diff --git a/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml b/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml new file mode 100644 index 000000000..22ff44040 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/.goreleaser.yml @@ -0,0 +1,43 @@ +project_name: exportloopref +release: + github: + owner: kyoh86 + name: exportloopref +brews: +- install: | + bin.install "exportloopref" + github: + owner: kyoh86 + name: homebrew-tap + folder: Formula + homepage: https://github.com/kyoh86/exportloopref + description: An analyzer that finds exporting pointers for loop variables. +builds: +- goos: + - linux + - darwin + - windows + goarch: + - amd64 + - "386" + main: ./cmd/exportloopref + ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} + binary: exportloopref +archives: +- id: gzip + format: tar.gz + format_overrides: + - goos: windows + format: zip + name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}" + files: + - licence* + - LICENCE* + - license* + - LICENSE* + - readme* + - README* + - changelog* + - CHANGELOG* +snapshot: + name_template: SNAPSHOT-{{ .Commit }} diff --git a/vendor/github.com/kyoh86/exportloopref/LICENSE b/vendor/github.com/kyoh86/exportloopref/LICENSE new file mode 100644 index 000000000..7ac9dba4a --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 kyoh86 + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE +OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/kyoh86/exportloopref/Makefile b/vendor/github.com/kyoh86/exportloopref/Makefile new file mode 100644 index 000000000..4d3ef22f7 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/Makefile @@ -0,0 +1,16 @@ +.PHONY: gen lint test install man + +VERSION := `git vertag get` +COMMIT := `git rev-parse HEAD` + +gen: + go generate ./... + +lint: gen + golangci-lint run + +test: lint + go test -v --race ./... + +install: test + go install -a -ldflags "-X=main.version=$(VERSION) -X=main.commit=$(COMMIT)" ./... diff --git a/vendor/github.com/kyoh86/exportloopref/README.md b/vendor/github.com/kyoh86/exportloopref/README.md new file mode 100644 index 000000000..5c019c738 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/README.md @@ -0,0 +1,221 @@ +# exportloopref + +An analyzer that finds exporting pointers for loop variables. + +[![PkgGoDev](https://pkg.go.dev/badge/kyoh86/exportloopref)](https://pkg.go.dev/kyoh86/exportloopref) +[![Go Report Card](https://goreportcard.com/badge/github.com/kyoh86/exportloopref)](https://goreportcard.com/report/github.com/kyoh86/exportloopref) +[![Coverage Status](https://img.shields.io/codecov/c/github/kyoh86/exportloopref.svg)](https://codecov.io/gh/kyoh86/exportloopref) +[![Release](https://github.com/kyoh86/exportloopref/workflows/Release/badge.svg)](https://github.com/kyoh86/exportloopref/releases) + +## What's this? + +Sample problem code from: https://github.com/kyoh86/exportloopref/blob/master/testdata/src/simple/simple.go + +```go +package main + +func main() { + var intArray [4]*int + var intSlice []*int + var intRef *int + var intStr struct{ x *int } + + println("loop expecting 10, 11, 12, 13") + for i, p := range []int{10, 11, 12, 13} { + printp(&p) // not a diagnostic + intSlice = append(intSlice, &p) // want "exporting a pointer for the loop variable p" + intArray[i] = &p // want "exporting a pointer for the loop variable p" + if i%2 == 0 { + intRef = &p // want "exporting a pointer for the loop variable p" + intStr.x = &p // want "exporting a pointer for the loop variable p" + } + var vStr struct{ x *int } + var vArray [4]*int + var v *int + if i%2 == 0 { + v = &p // not a diagnostic (x is local variable) + vArray[1] = &p // not a diagnostic (x is local variable) + vStr.x = &p + } + _ = v + } + + println(`slice expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) + for _, p := range intSlice { + printp(p) + } + println(`array expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) + for _, p := range intArray { + printp(p) + } + println(`captured value expecting "12" but "13"`) + printp(intRef) +} + +func printp(p *int) { + println(*p) +} +``` + +In Go, the `p` variable in the above loops is actually a single variable. +So in many case (like the above), using it makes for us annoying bugs. + +You can find them with `exportloopref`, and fix it. + +```go +package main + +func main() { + var intArray [4]*int + var intSlice []*int + var intRef *int + var intStr struct{ x *int } + + println("loop expecting 10, 11, 12, 13") + for i, p := range []int{10, 11, 12, 13} { + p := p // FIX variable into the local variable + printp(&p) + intSlice = append(intSlice, &p) + intArray[i] = &p + if i%2 == 0 { + intRef = &p + intStr.x = &p + } + var vStr struct{ x *int } + var vArray [4]*int + var v *int + if i%2 == 0 { + v = &p + vArray[1] = &p + vStr.x = &p + } + _ = v + } + + println(`slice expecting "10, 11, 12, 13"`) + for _, p := range intSlice { + printp(p) + } + println(`array expecting "10, 11, 12, 13"`) + for _, p := range intArray { + printp(p) + } + println(`captured value expecting "12"`) + printp(intRef) +} + +func printp(p *int) { + println(*p) +} +``` + +ref: https://github.com/kyoh86/exportloopref/blob/master/testdata/src/fixed/fixed.go + +## Sensing policy + +I want to make exportloopref as accurately as possible. +So some cases of lints will be false-negative. + +e.g. + +```go +var s Foo +for _, p := []int{10, 11, 12, 13} { + s.Bar(&p) // If s stores the pointer, it will be bug. +} +``` + +If you want to report all of lints (with some false-positives), +you should use [looppointer](https://github.com/kyoh86/looppointer). + +### Known false negatives + +Case 1: pass the pointer to function to export. + +Case 2: pass the pointer to local variable, and export it. + +```go +package main + +type List []*int + +func (l *List) AppendP(p *int) { + *l = append(*l, p) +} + +func main() { + var slice []*int + list := List{} + + println("loop expect exporting 10, 11, 12, 13") + for _, v := range []int{10, 11, 12, 13} { + list.AppendP(&v) // Case 1: wanted "exporting a pointer for the loop variable v", but cannot be found + + p := &v // p is the local variable + slice = append(slice, p) // Case 2: wanted "exporting a pointer for the loop variable v", but cannot be found + } + + println(`slice expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) + for _, p := range slice { + printp(p) + } + println(`array expecting "10, 11, 12, 13" but "13, 13, 13, 13"`) + for _, p := range ([]*int)(list) { + printp(p) + } +} + +func printp(p *int) { + println(*p) +} +``` + +## Install + +go: + +```console +$ go get github.com/kyoh86/exportloopref/cmd/exportloopref +``` + +[homebrew](https://brew.sh/): + +```console +$ brew install kyoh86/tap/exportloopref +``` + +[gordon](https://github.com/kyoh86/gordon): + +```console +$ gordon install kyoh86/exportloopref +``` + +## Usage + +``` +exportloopref [-flag] [package] +``` + +### Flags + +| Flag | Description | +| --- | --- | +| -V | print version and exit | +| -all | no effect (deprecated) | +| -c int | display offending line with this many lines of context (default -1) | +| -cpuprofile string | write CPU profile to this file | +| -debug string | debug flags, any subset of "fpstv" | +| -fix | apply all suggested fixes | +| -flags | print analyzer flags in JSON | +| -json | emit JSON output | +| -memprofile string | write memory profile to this file | +| -source | no effect (deprecated) | +| -tags string | no effect (deprecated) | +| -trace string | write trace log to this file | +| -v | no effect (deprecated) | + +# LICENSE + +[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg)](http://www.opensource.org/licenses/MIT) + +This is distributed under the [MIT License](http://www.opensource.org/licenses/MIT). diff --git a/vendor/github.com/kyoh86/exportloopref/exportloopref.go b/vendor/github.com/kyoh86/exportloopref/exportloopref.go new file mode 100644 index 000000000..4d1671a06 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/exportloopref.go @@ -0,0 +1,305 @@ +package exportloopref + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var Analyzer = &analysis.Analyzer{ + Name: "exportloopref", + Doc: "checks for pointers to enclosing loop variables", + Run: run, + RunDespiteErrors: true, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + // ResultType reflect.Type + // FactTypes []Fact +} + +func init() { + // Analyzer.Flags.StringVar(&v, "name", "default", "description") +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + search := &Searcher{ + Stats: map[token.Pos]struct{}{}, + Vars: map[token.Pos]map[token.Pos]struct{}{}, + Types: pass.TypesInfo.Types, + } + + nodeFilter := []ast.Node{ + (*ast.RangeStmt)(nil), + (*ast.ForStmt)(nil), + (*ast.DeclStmt)(nil), + (*ast.AssignStmt)(nil), + (*ast.UnaryExpr)(nil), + } + + inspect.WithStack(nodeFilter, func(n ast.Node, push bool, stack []ast.Node) bool { + id, insert, digg := search.Check(n, stack) + if id != nil { + dMsg := fmt.Sprintf("exporting a pointer for the loop variable %s", id.Name) + fMsg := fmt.Sprintf("loop variable %s should be pinned", id.Name) + var suggest []analysis.SuggestedFix + if insert != token.NoPos { + suggest = []analysis.SuggestedFix{{ + Message: fMsg, + TextEdits: []analysis.TextEdit{{ + Pos: insert, + End: insert, + NewText: []byte(fmt.Sprintf("%[1]s := %[1]s\n", id.Name)), + }}, + }} + } + d := analysis.Diagnostic{Pos: id.Pos(), + End: id.End(), + Message: dMsg, + Category: "exportloopref", + SuggestedFixes: suggest, + } + pass.Report(d) + } + return digg + }) + + return nil, nil +} + +type Searcher struct { + // Statement variables : map to collect positions that + // variables are declared like below. + // - for , := range ... + // - var int + // - D := ... + Stats map[token.Pos]struct{} + // Local variables maps loop-position, decl-location to ignore + // safe pointers for variable which declared in the loop. + Vars map[token.Pos]map[token.Pos]struct{} + Types map[ast.Expr]types.TypeAndValue +} + +func (s *Searcher) Check(n ast.Node, stack []ast.Node) (*ast.Ident, token.Pos, bool) { + switch typed := n.(type) { + case *ast.RangeStmt: + s.parseRangeStmt(typed) + case *ast.ForStmt: + s.parseForStmt(typed) + case *ast.DeclStmt: + s.parseDeclStmt(typed, stack) + case *ast.AssignStmt: + s.parseAssignStmt(typed, stack) + + case *ast.UnaryExpr: + return s.checkUnaryExpr(typed, stack) + } + return nil, token.NoPos, true +} + +func (s *Searcher) parseRangeStmt(n *ast.RangeStmt) { + s.addStat(n.Key) + s.addStat(n.Value) +} + +func (s *Searcher) parseForStmt(n *ast.ForStmt) { + switch post := n.Post.(type) { + case *ast.AssignStmt: + // e.g. for p = head; p != nil; p = p.next + for _, lhs := range post.Lhs { + s.addStat(lhs) + } + case *ast.IncDecStmt: + // e.g. for i := 0; i < n; i++ + s.addStat(post.X) + } +} + +func (s *Searcher) addStat(expr ast.Expr) { + if id, ok := expr.(*ast.Ident); ok { + s.Stats[id.Pos()] = struct{}{} + } +} + +func (s *Searcher) parseDeclStmt(n *ast.DeclStmt, stack []ast.Node) { + loop, _ := s.innermostLoop(stack) + if loop == nil { + return + } + + // Register declaring variables + if genDecl, ok := n.Decl.(*ast.GenDecl); ok && genDecl.Tok == token.VAR { + for _, spec := range genDecl.Specs { + for _, name := range spec.(*ast.ValueSpec).Names { + s.addVar(loop, name) + } + } + } +} + +func (s *Searcher) parseAssignStmt(n *ast.AssignStmt, stack []ast.Node) { + loop, _ := s.innermostLoop(stack) + if loop == nil { + return + } + + // Find statements declaring local variable + if n.Tok == token.DEFINE { + for _, h := range n.Lhs { + s.addVar(loop, h) + } + } +} + +func (s *Searcher) addVar(loop ast.Node, expr ast.Expr) { + loopPos := loop.Pos() + id, ok := expr.(*ast.Ident) + if !ok { + return + } + vars, ok := s.Vars[loopPos] + if !ok { + vars = map[token.Pos]struct{}{} + } + vars[id.Obj.Pos()] = struct{}{} + s.Vars[loopPos] = vars +} + +func insertionPosition(block *ast.BlockStmt) token.Pos { + if len(block.List) > 0 { + return block.List[0].Pos() + } + return token.NoPos +} + +func (s *Searcher) innermostLoop(stack []ast.Node) (ast.Node, token.Pos) { + for i := len(stack) - 1; i >= 0; i-- { + switch typed := stack[i].(type) { + case *ast.RangeStmt: + return typed, insertionPosition(typed.Body) + case *ast.ForStmt: + return typed, insertionPosition(typed.Body) + } + } + return nil, token.NoPos +} + +func (s *Searcher) checkUnaryExpr(n *ast.UnaryExpr, stack []ast.Node) (*ast.Ident, token.Pos, bool) { + loop, insert := s.innermostLoop(stack) + if loop == nil { + return nil, token.NoPos, true + } + + if n.Op != token.AND { + return nil, token.NoPos, true + } + + // Get identity of the referred item + id := s.getIdentity(n.X) + if id == nil { + return nil, token.NoPos, true + } + + // If the identity is not the loop statement variable, + // it will not be reported. + if _, isStat := s.Stats[id.Obj.Pos()]; !isStat { + return nil, token.NoPos, true + } + + // check stack append(), []X{}, map[Type]X{}, Struct{}, &Struct{}, X.(Type), (X) + // in the = + var mayRHPos token.Pos + for i := len(stack) - 2; i >= 0; i-- { + switch typed := stack[i].(type) { + case (*ast.UnaryExpr): + // noop + case (*ast.CompositeLit): + // noop + case (*ast.KeyValueExpr): + // noop + case (*ast.CallExpr): + fun, ok := typed.Fun.(*ast.Ident) + if !ok { + return nil, token.NoPos, false // it's calling a function other of `append`. It cannot be checked + } + + if fun.Name != "append" { + return nil, token.NoPos, false // it's calling a function other of `append`. It cannot be checked + } + + case (*ast.AssignStmt): + if len(typed.Rhs) != len(typed.Lhs) { + return nil, token.NoPos, false // dead logic + } + + // search x where Rhs[x].Pos() == mayRHPos + var index int + for ri, rh := range typed.Rhs { + if rh.Pos() == mayRHPos { + index = ri + break + } + } + + // check Lhs[x] is not local variable + lh := typed.Lhs[index] + isVar := s.isVar(loop, lh) + if !isVar { + return id, insert, false + } + + return nil, token.NoPos, true + default: + // Other statement is not able to be checked. + return nil, token.NoPos, false + } + + // memory an expr that may be right-hand in the AssignStmt + mayRHPos = stack[i].Pos() + } + return nil, token.NoPos, true +} + +func (s *Searcher) isVar(loop ast.Node, expr ast.Expr) bool { + vars := s.Vars[loop.Pos()] // map[token.Pos]struct{} + if vars == nil { + return false + } + switch typed := expr.(type) { + case (*ast.Ident): + _, isVar := vars[typed.Obj.Pos()] + return isVar + case (*ast.IndexExpr): // like X[Y], check X + return s.isVar(loop, typed.X) + case (*ast.SelectorExpr): // like X.Y, check X + return s.isVar(loop, typed.X) + } + return false +} + +// Get variable identity +func (s *Searcher) getIdentity(expr ast.Expr) *ast.Ident { + switch typed := expr.(type) { + case *ast.SelectorExpr: + // Ignore if the parent is pointer ref (fix for #2) + if _, ok := s.Types[typed.X].Type.(*types.Pointer); ok { + return nil + } + + // Get parent identity; i.e. `a.b` of the `a.b.c`. + return s.getIdentity(typed.X) + + case *ast.Ident: + // Get simple identity; i.e. `a` of the `a`. + if typed.Obj == nil { + return nil + } + return typed + } + return nil +} diff --git a/vendor/github.com/kyoh86/exportloopref/go.mod b/vendor/github.com/kyoh86/exportloopref/go.mod new file mode 100644 index 000000000..34a53987a --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/go.mod @@ -0,0 +1,5 @@ +module github.com/kyoh86/exportloopref + +go 1.14 + +require golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa diff --git a/vendor/github.com/kyoh86/exportloopref/go.sum b/vendor/github.com/kyoh86/exportloopref/go.sum new file mode 100644 index 000000000..3b199f006 --- /dev/null +++ b/vendor/github.com/kyoh86/exportloopref/go.sum @@ -0,0 +1,20 @@ +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa h1:mMXQKlWCw9mIWgVLLfiycDZjMHMMYqiuakI4E/l2xcA= +golang.org/x/tools v0.0.0-20200622203043-20e05c1c8ffa/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/ldez/gomoddirectives/.gitignore b/vendor/github.com/ldez/gomoddirectives/.gitignore new file mode 100644 index 000000000..9da3e0da9 --- /dev/null +++ b/vendor/github.com/ldez/gomoddirectives/.gitignore @@ -0,0 +1,2 @@ +.idea/ +/gomoddirectives diff --git a/vendor/github.com/ldez/gomoddirectives/.golangci.yml b/vendor/github.com/ldez/gomoddirectives/.golangci.yml new file mode 100644 index 000000000..cc0a1fcae --- /dev/null +++ b/vendor/github.com/ldez/gomoddirectives/.golangci.yml @@ -0,0 +1,87 @@ +run: + deadline: 2m + skip-files: [] + skip-dirs: [] + +linters-settings: + govet: + enable-all: true + gocyclo: + min-complexity: 12 + goconst: + min-len: 3 + min-occurrences: 3 + misspell: + locale: US + gofumpt: + extra-rules: true + depguard: + list-type: blacklist + include-go-root: false + packages: + - github.com/pkg/errors + godox: + keywords: + - FIXME + gocritic: + enabled-tags: + - diagnostic + - style + - performance + disabled-checks: + - sloppyReassign + - rangeValCopy + - octalLiteral + - paramTypeCombine # already handle by gofumpt.extra-rules + settings: + hugeParam: + sizeThreshold: 100 + forbidigo: + forbid: + - '^print(ln)?$' + - '^fmt\.Print(f|ln)?$' + - '^panic$' + - '^spew\.Print(f|ln)?$' + - '^spew\.Dump$' + tagliatelle: + case: + rules: + json: pascal + +linters: + enable-all: true + disable: + - maligned # deprecated + - interfacer # deprecated + - golint # deprecated + - scopelint # deprecated + - sqlclosecheck # not relevant (SQL) + - rowserrcheck # not relevant (SQL) + - cyclop # duplicate of gocyclo + - lll + - dupl + - prealloc + - bodyclose + - wsl + - nlreturn + - gomnd + - testpackage + - paralleltest + - tparallel + - goerr113 + - wrapcheck + - exhaustive + - exhaustivestruct + +issues: + exclude-use-default: false + max-per-linter: 0 + max-same-issues: 0 + exclude: [] + exclude-rules: + - path: "(.+)_test.go" + linters: + - funlen + - goconst + - path: cmd/gomoddirectives/gomoddirectives.go + text: 'use of `fmt.Println` forbidden' diff --git a/vendor/github.com/ldez/gomoddirectives/LICENSE b/vendor/github.com/ldez/gomoddirectives/LICENSE new file mode 100644 index 000000000..caed523b4 --- /dev/null +++ b/vendor/github.com/ldez/gomoddirectives/LICENSE @@ -0,0 +1,190 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2021 Fernandez Ludovic + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/ldez/gomoddirectives/Makefile b/vendor/github.com/ldez/gomoddirectives/Makefile new file mode 100644 index 000000000..dd3b335c7 --- /dev/null +++ b/vendor/github.com/ldez/gomoddirectives/Makefile @@ -0,0 +1,15 @@ +.PHONY: clean check test build + +default: clean check test build + +clean: + rm -rf dist/ cover.out + +test: clean + go test -v -cover ./... + +check: + golangci-lint run + +build: + go build -v -ldflags "-s -w" -trimpath ./cmd/gomoddirectives/ diff --git a/vendor/github.com/ldez/gomoddirectives/go.mod b/vendor/github.com/ldez/gomoddirectives/go.mod new file mode 100644 index 000000000..fb65d2ddc --- /dev/null +++ b/vendor/github.com/ldez/gomoddirectives/go.mod @@ -0,0 +1,8 @@ +module github.com/ldez/gomoddirectives + +go 1.16 + +require ( + github.com/stretchr/testify v1.7.0 + golang.org/x/mod v0.4.2 +) diff --git a/vendor/github.com/ldez/gomoddirectives/go.sum b/vendor/github.com/ldez/gomoddirectives/go.sum new file mode 100644 index 000000000..4e4ac3ecc --- /dev/null +++ b/vendor/github.com/ldez/gomoddirectives/go.sum @@ -0,0 +1,25 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 h1:/atklqdjdhuosWIl6AIbOeHJjicWYPqR9bpxqxYG2pA= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go b/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go new file mode 100644 index 000000000..2a4c90474 --- /dev/null +++ b/vendor/github.com/ldez/gomoddirectives/gomoddirectives.go @@ -0,0 +1,125 @@ +// Package gomoddirectives a linter that handle `replace`, `retract`, `exclude` directives into `go.mod`. +package gomoddirectives + +import ( + "fmt" + "go/token" + "strings" + + "golang.org/x/mod/modfile" +) + +const ( + reasonRetract = "a comment is mandatory to explain why the version has been retracted" + reasonExclude = "exclude directive is not allowed" + reasonReplaceLocal = "local replacement are not allowed" + reasonReplace = "replacement are not allowed" + reasonReplaceIdentical = "the original module and the replacement are identical" + reasonReplaceDuplicate = "multiple replacement of the same module" +) + +// Result the analysis result. +type Result struct { + Reason string + Start token.Position + End token.Position +} + +// NewResult creates a new Result. +func NewResult(file *modfile.File, line *modfile.Line, reason string) Result { + return Result{ + Start: token.Position{Filename: file.Syntax.Name, Line: line.Start.Line, Column: line.Start.LineRune}, + End: token.Position{Filename: file.Syntax.Name, Line: line.End.Line, Column: line.End.LineRune}, + Reason: reason, + } +} + +func (r Result) String() string { + return fmt.Sprintf("%s: %s", r.Start, r.Reason) +} + +// Options the analyzer options. +type Options struct { + ReplaceAllowList []string + ReplaceAllowLocal bool + ExcludeForbidden bool + RetractAllowNoExplanation bool +} + +// Analyze analyzes a project. +func Analyze(opts Options) ([]Result, error) { + f, err := GetModuleFile() + if err != nil { + return nil, fmt.Errorf("failed to get module file: %w", err) + } + + return AnalyzeFile(f, opts), nil +} + +// AnalyzeFile analyzes a mod file. +func AnalyzeFile(file *modfile.File, opts Options) []Result { + var results []Result + + if !opts.RetractAllowNoExplanation { + for _, r := range file.Retract { + if r.Rationale != "" { + continue + } + + results = append(results, NewResult(file, r.Syntax, reasonRetract)) + } + } + + if opts.ExcludeForbidden { + for _, e := range file.Exclude { + results = append(results, NewResult(file, e.Syntax, reasonExclude)) + } + } + + uniqReplace := map[string]struct{}{} + + for _, r := range file.Replace { + reason := check(opts, r) + if reason != "" { + results = append(results, NewResult(file, r.Syntax, reason)) + continue + } + + if r.Old.Path == r.New.Path && r.Old.Version == r.New.Version { + results = append(results, NewResult(file, r.Syntax, reasonReplaceIdentical)) + continue + } + + if _, ok := uniqReplace[r.Old.Path+r.Old.Version]; ok { + results = append(results, NewResult(file, r.Syntax, reasonReplaceDuplicate)) + } + + uniqReplace[r.Old.Path+r.Old.Version] = struct{}{} + } + + return results +} + +func check(o Options, r *modfile.Replace) string { + if isLocal(r) { + if o.ReplaceAllowLocal { + return "" + } + + return fmt.Sprintf("%s: %s", reasonReplaceLocal, r.Old.Path) + } + + for _, v := range o.ReplaceAllowList { + if r.Old.Path == v { + return "" + } + } + + return fmt.Sprintf("%s: %s", reasonReplace, r.Old.Path) +} + +// Filesystem paths found in "replace" directives are represented by a path with an empty version. +// https://github.com/golang/mod/blob/bc388b264a244501debfb9caea700c6dcaff10e2/module/module.go#L122-L124 +func isLocal(r *modfile.Replace) bool { + return strings.TrimSpace(r.New.Version) == "" +} diff --git a/vendor/github.com/ldez/gomoddirectives/module.go b/vendor/github.com/ldez/gomoddirectives/module.go new file mode 100644 index 000000000..379ae07fa --- /dev/null +++ b/vendor/github.com/ldez/gomoddirectives/module.go @@ -0,0 +1,47 @@ +package gomoddirectives + +import ( + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "os/exec" + + "golang.org/x/mod/modfile" +) + +type modInfo struct { + Path string `json:"Path"` + Dir string `json:"Dir"` + GoMod string `json:"GoMod"` + GoVersion string `json:"GoVersion"` + Main bool `json:"Main"` +} + +// GetModuleFile gets module file. +func GetModuleFile() (*modfile.File, error) { + // https://github.com/golang/go/issues/44753#issuecomment-790089020 + cmd := exec.Command("go", "list", "-m", "-json") + + raw, err := cmd.CombinedOutput() + if err != nil { + return nil, fmt.Errorf("command go list: %w: %s", err, string(raw)) + } + + var v modInfo + err = json.Unmarshal(raw, &v) + if err != nil { + return nil, fmt.Errorf("unmarshaling error: %w: %s", err, string(raw)) + } + + if v.GoMod == "" { + return nil, errors.New("working directory is not part of a module") + } + + raw, err = ioutil.ReadFile(v.GoMod) + if err != nil { + return nil, fmt.Errorf("reading go.mod file: %w", err) + } + + return modfile.Parse("go.mod", raw, nil) +} diff --git a/vendor/github.com/ldez/gomoddirectives/readme.md b/vendor/github.com/ldez/gomoddirectives/readme.md new file mode 100644 index 000000000..510c8502e --- /dev/null +++ b/vendor/github.com/ldez/gomoddirectives/readme.md @@ -0,0 +1,16 @@ +# gomoddirectives + +[![Sponsor](https://img.shields.io/badge/Sponsor%20me-%E2%9D%A4%EF%B8%8F-pink)](https://github.com/sponsors/ldez) +[![Build Status](https://github.com/ldez/gomoddirectives/workflows/Main/badge.svg?branch=master)](https://github.com/ldez/gomoddirectives/actions) + +A linter that handle [`replace`](https://golang.org/ref/mod#go-mod-file-replace), [`retract`](https://golang.org/ref/mod#go-mod-file-retract), [`exclude`](https://golang.org/ref/mod#go-mod-file-exclude) directives into `go.mod`. + +Features: + +- ban all [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives +- allow only local [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives +- allow only some [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives +- force explanation for [`retract`](https://golang.org/ref/mod#go-mod-file-retract) directives +- ban all [`exclude`](https://golang.org/ref/mod#go-mod-file-exclude) directives +- detect duplicated [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives +- detect identical [`replace`](https://golang.org/ref/mod#go-mod-file-replace) directives diff --git a/vendor/github.com/ldez/tagliatelle/.gitignore b/vendor/github.com/ldez/tagliatelle/.gitignore new file mode 100644 index 000000000..74c84ce62 --- /dev/null +++ b/vendor/github.com/ldez/tagliatelle/.gitignore @@ -0,0 +1,3 @@ +.idea/ +/tagliatelle +notes.md diff --git a/vendor/github.com/ldez/tagliatelle/.golangci.yml b/vendor/github.com/ldez/tagliatelle/.golangci.yml new file mode 100644 index 000000000..b897103e9 --- /dev/null +++ b/vendor/github.com/ldez/tagliatelle/.golangci.yml @@ -0,0 +1,77 @@ +run: + timeout: 5m + skip-files: [ ] + skip-dirs: [ ] + +linters-settings: + govet: + enable-all: true + disable: + - fieldalignment + gocyclo: + min-complexity: 15 + maligned: + suggest-new: true + goconst: + min-len: 5 + min-occurrences: 3 + misspell: + locale: US + funlen: + lines: -1 + statements: 40 + godox: + keywords: + - FIXME + gofumpt: + extra-rules: true + depguard: + list-type: blacklist + include-go-root: false + packages: + - github.com/sirupsen/logrus + - github.com/pkg/errors + gocritic: + enabled-tags: + - diagnostic + - style + - performance + disabled-checks: + - sloppyReassign + - rangeValCopy + - octalLiteral + - paramTypeCombine # already handle by gofumpt.extra-rules + settings: + hugeParam: + sizeThreshold: 100 + +linters: + enable-all: true + disable: + - maligned # deprecated + - interfacer # deprecated + - scopelint # deprecated + - sqlclosecheck # not relevant (SQL) + - rowserrcheck # not relevant (SQL) + - cyclop # duplicate of gocyclo + - lll + - dupl + - wsl + - nlreturn + - gomnd + - goerr113 + - wrapcheck + - exhaustive + - exhaustivestruct + - testpackage + - tparallel + - paralleltest + - prealloc + - ifshort + - forcetypeassert + +issues: + exclude-use-default: false + max-per-linter: 0 + max-same-issues: 0 + exclude: [] diff --git a/vendor/github.com/ldez/tagliatelle/LICENSE b/vendor/github.com/ldez/tagliatelle/LICENSE new file mode 100644 index 000000000..caed523b4 --- /dev/null +++ b/vendor/github.com/ldez/tagliatelle/LICENSE @@ -0,0 +1,190 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2021 Fernandez Ludovic + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/ldez/tagliatelle/Makefile b/vendor/github.com/ldez/tagliatelle/Makefile new file mode 100644 index 000000000..f66a39804 --- /dev/null +++ b/vendor/github.com/ldez/tagliatelle/Makefile @@ -0,0 +1,15 @@ +.PHONY: clean check test build + +default: clean check test build + +clean: + rm -rf dist/ cover.out + +test: clean + go test -v -cover ./... + +check: + golangci-lint run + +build: + go build -v -ldflags "-s -w" -trimpath ./cmd/tagliatelle/ diff --git a/vendor/github.com/ldez/tagliatelle/go.mod b/vendor/github.com/ldez/tagliatelle/go.mod new file mode 100644 index 000000000..159e907fb --- /dev/null +++ b/vendor/github.com/ldez/tagliatelle/go.mod @@ -0,0 +1,8 @@ +module github.com/ldez/tagliatelle + +go 1.16 + +require ( + github.com/ettle/strcase v0.1.1 + golang.org/x/tools v0.1.0 +) diff --git a/vendor/github.com/ldez/tagliatelle/go.sum b/vendor/github.com/ldez/tagliatelle/go.sum new file mode 100644 index 000000000..ae1c76f51 --- /dev/null +++ b/vendor/github.com/ldez/tagliatelle/go.sum @@ -0,0 +1,38 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/ettle/strcase v0.1.1 h1:htFueZyVeE1XNnMEfbqp5r67qAN/4r6ya1ysq8Q+Zcw= +github.com/ettle/strcase v0.1.1/go.mod h1:hzDLsPC7/lwKyBOywSHEP89nt2pDgdy+No1NBA9o9VY= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/ldez/tagliatelle/readme.md b/vendor/github.com/ldez/tagliatelle/readme.md new file mode 100644 index 000000000..846767b2c --- /dev/null +++ b/vendor/github.com/ldez/tagliatelle/readme.md @@ -0,0 +1,31 @@ +# Tagliatelle + +[![Sponsor](https://img.shields.io/badge/Sponsor%20me-%E2%9D%A4%EF%B8%8F-pink)](https://github.com/sponsors/ldez) +[![Build Status](https://github.com/ldez/tagliatelle/workflows/Main/badge.svg?branch=master)](https://github.com/ldez/tagliatelle/actions) + +A linter that handles struct tags. + +Supported string casing: + +- `camel` +- `pascal` +- `kebab` +- `smake` +- `goCamel` +- `goPascal` +- `goKebab` +- `goSmake` +- `upper` +- `lower` + +## Examples + +```go +// json and camel case +type Foo struct { + ID string `json:"ID"` // must be "id" + UserID string `json:"UserID"`// must be "userId" + Name string `json:"name"` + Value string `json:"val,omitempty"`// must be "value" +} +``` diff --git a/vendor/github.com/ldez/tagliatelle/tagliatelle.go b/vendor/github.com/ldez/tagliatelle/tagliatelle.go new file mode 100644 index 000000000..dfb302b12 --- /dev/null +++ b/vendor/github.com/ldez/tagliatelle/tagliatelle.go @@ -0,0 +1,192 @@ +// Package tagliatelle a linter that handle struct tags. +package tagliatelle + +import ( + "encoding/json" + "errors" + "fmt" + "go/ast" + "reflect" + "strings" + + "github.com/ettle/strcase" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +// Config the tagliatelle configuration. +type Config struct { + Rules map[string]string + UseFieldName bool +} + +// New creates an analyzer. +func New(config Config) *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "tagliatelle", + Doc: "Checks the struct tags.", + Run: func(pass *analysis.Pass) (interface{}, error) { + if len(config.Rules) == 0 { + return nil, nil + } + + return run(pass, config) + }, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, + } +} + +func run(pass *analysis.Pass, config Config) (interface{}, error) { + isp, ok := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + if !ok { + return nil, errors.New("missing inspect analyser") + } + + nodeFilter := []ast.Node{ + (*ast.StructType)(nil), + } + + isp.Preorder(nodeFilter, func(n ast.Node) { + node, ok := n.(*ast.StructType) + if !ok { + return + } + + for _, field := range node.Fields.List { + analyze(pass, config, node, field) + } + }) + + return nil, nil +} + +func analyze(pass *analysis.Pass, config Config, n *ast.StructType, field *ast.Field) { + if n.Fields == nil || n.Fields.NumFields() < 1 { + // skip empty structs + return + } + + if field.Tag == nil { + // skip when no struct tag + return + } + + fieldName, err := getFieldName(field) + if err != nil { + pass.Reportf(n.Pos(), "unable to get field name: %v", err) + return + } + + for key, convName := range config.Rules { + if convName == "" { + continue + } + + value, ok := lookupTagValue(field.Tag, key) + if !ok { + // skip when no struct tag for the key + continue + } + + if value == "-" { + // skip when skipped :) + continue + } + + if value == "" { + // skip empty value, it can change in the future + continue + } + + converter, err := getConverter(convName) + if err != nil { + pass.Reportf(n.Pos(), "%s(%s): %v", key, convName, err) + continue + } + + expected := value + if config.UseFieldName { + expected = fieldName + } + + if value != converter(expected) { + pass.Reportf(field.Tag.Pos(), "%s(%s): got '%s' want '%s'", key, convName, value, converter(expected)) + } + } +} + +func getFieldName(field *ast.Field) (string, error) { + var name string + for _, n := range field.Names { + if n.Name != "" { + name = n.Name + } + } + + if name != "" { + return name, nil + } + + return getTypeName(field.Type) +} + +func getTypeName(exp ast.Expr) (string, error) { + switch typ := exp.(type) { + case *ast.Ident: + return typ.Name, nil + case *ast.StarExpr: + return getTypeName(typ.X) + case *ast.SelectorExpr: + return getTypeName(typ.Sel) + default: + bytes, _ := json.Marshal(exp) + return "", fmt.Errorf("unexpected eror: type %T: %s", typ, string(bytes)) + } +} + +func lookupTagValue(tag *ast.BasicLit, key string) (string, bool) { + raw := strings.Trim(tag.Value, "`") + + value, ok := reflect.StructTag(raw).Lookup(key) + if !ok { + return value, ok + } + + values := strings.Split(value, ",") + + if len(values) < 1 { + return "", true + } + + return values[0], true +} + +func getConverter(c string) (func(s string) string, error) { + switch c { + case "camel": + return strcase.ToCamel, nil + case "pascal": + return strcase.ToPascal, nil + case "kebab": + return strcase.ToKebab, nil + case "snake": + return strcase.ToSnake, nil + case "goCamel": + return strcase.ToGoCamel, nil + case "goPascal": + return strcase.ToGoPascal, nil + case "goKebab": + return strcase.ToGoKebab, nil + case "goSnake": + return strcase.ToGoSnake, nil + case "upper": + return strings.ToUpper, nil + case "lower": + return strings.ToLower, nil + default: + return nil, fmt.Errorf("unsupported case: %s", c) + } +} diff --git a/vendor/github.com/magiconair/properties/.gitignore b/vendor/github.com/magiconair/properties/.gitignore new file mode 100644 index 000000000..e7081ff52 --- /dev/null +++ b/vendor/github.com/magiconair/properties/.gitignore @@ -0,0 +1,6 @@ +*.sublime-project +*.sublime-workspace +*.un~ +*.swp +.idea/ +*.iml diff --git a/vendor/github.com/magiconair/properties/.travis.yml b/vendor/github.com/magiconair/properties/.travis.yml new file mode 100644 index 000000000..baf9031df --- /dev/null +++ b/vendor/github.com/magiconair/properties/.travis.yml @@ -0,0 +1,17 @@ +language: go +go: + - 1.3.x + - 1.4.x + - 1.5.x + - 1.6.x + - 1.7.x + - 1.8.x + - 1.9.x + - "1.10.x" + - "1.11.x" + - "1.12.x" + - "1.13.x" + - "1.14.x" + - "1.15.x" + - "1.16.x" + - tip diff --git a/vendor/github.com/magiconair/properties/CHANGELOG.md b/vendor/github.com/magiconair/properties/CHANGELOG.md new file mode 100644 index 000000000..ff8d02535 --- /dev/null +++ b/vendor/github.com/magiconair/properties/CHANGELOG.md @@ -0,0 +1,160 @@ +## Changelog + +### [1.8.2](https://github.com/magiconair/properties/tree/v1.8.2) - 25 Aug 2020 + + * [PR #36](https://github.com/magiconair/properties/pull/36): Escape backslash on write + + This patch ensures that backslashes are escaped on write. Existing applications which + rely on the old behavior may need to be updated. + + Thanks to [@apesternikov](https://github.com/apesternikov) for the patch. + + * [PR #42](https://github.com/magiconair/properties/pull/42): Made Content-Type check whitespace agnostic in LoadURL() + + Thanks to [@aliras1](https://github.com/aliras1) for the patch. + + * [PR #41](https://github.com/magiconair/properties/pull/41): Make key/value separator configurable on Write() + + Thanks to [@mkjor](https://github.com/mkjor) for the patch. + + * [PR #40](https://github.com/magiconair/properties/pull/40): Add method to return a sorted list of keys + + Thanks to [@mkjor](https://github.com/mkjor) for the patch. + +### [1.8.1](https://github.com/magiconair/properties/tree/v1.8.1) - 10 May 2019 + + * [PR #35](https://github.com/magiconair/properties/pull/35): Close body always after request + + This patch ensures that in `LoadURL` the response body is always closed. + + Thanks to [@liubog2008](https://github.com/liubog2008) for the patch. + +### [1.8](https://github.com/magiconair/properties/tree/v1.8) - 15 May 2018 + + * [PR #26](https://github.com/magiconair/properties/pull/26): Disable expansion during loading + + This adds the option to disable property expansion during loading. + + Thanks to [@kmala](https://github.com/kmala) for the patch. + +### [1.7.6](https://github.com/magiconair/properties/tree/v1.7.6) - 14 Feb 2018 + + * [PR #29](https://github.com/magiconair/properties/pull/29): Reworked expansion logic to handle more complex cases. + + See PR for an example. + + Thanks to [@yobert](https://github.com/yobert) for the fix. + +### [1.7.5](https://github.com/magiconair/properties/tree/v1.7.5) - 13 Feb 2018 + + * [PR #28](https://github.com/magiconair/properties/pull/28): Support duplicate expansions in the same value + + Values which expand the same key multiple times (e.g. `key=${a} ${a}`) will no longer fail + with a `circular reference error`. + + Thanks to [@yobert](https://github.com/yobert) for the fix. + +### [1.7.4](https://github.com/magiconair/properties/tree/v1.7.4) - 31 Oct 2017 + + * [Issue #23](https://github.com/magiconair/properties/issues/23): Ignore blank lines with whitespaces + + * [PR #24](https://github.com/magiconair/properties/pull/24): Update keys when DisableExpansion is enabled + + Thanks to [@mgurov](https://github.com/mgurov) for the fix. + +### [1.7.3](https://github.com/magiconair/properties/tree/v1.7.3) - 10 Jul 2017 + + * [Issue #17](https://github.com/magiconair/properties/issues/17): Add [SetValue()](http://godoc.org/github.com/magiconair/properties#Properties.SetValue) method to set values generically + * [Issue #22](https://github.com/magiconair/properties/issues/22): Add [LoadMap()](http://godoc.org/github.com/magiconair/properties#LoadMap) function to load properties from a string map + +### [1.7.2](https://github.com/magiconair/properties/tree/v1.7.2) - 20 Mar 2017 + + * [Issue #15](https://github.com/magiconair/properties/issues/15): Drop gocheck dependency + * [PR #21](https://github.com/magiconair/properties/pull/21): Add [Map()](http://godoc.org/github.com/magiconair/properties#Properties.Map) and [FilterFunc()](http://godoc.org/github.com/magiconair/properties#Properties.FilterFunc) + +### [1.7.1](https://github.com/magiconair/properties/tree/v1.7.1) - 13 Jan 2017 + + * [Issue #14](https://github.com/magiconair/properties/issues/14): Decouple TestLoadExpandedFile from `$USER` + * [PR #12](https://github.com/magiconair/properties/pull/12): Load from files and URLs + * [PR #16](https://github.com/magiconair/properties/pull/16): Keep gofmt happy + * [PR #18](https://github.com/magiconair/properties/pull/18): Fix Delete() function + +### [1.7.0](https://github.com/magiconair/properties/tree/v1.7.0) - 20 Mar 2016 + + * [Issue #10](https://github.com/magiconair/properties/issues/10): Add [LoadURL,LoadURLs,MustLoadURL,MustLoadURLs](http://godoc.org/github.com/magiconair/properties#LoadURL) method to load properties from a URL. + * [Issue #11](https://github.com/magiconair/properties/issues/11): Add [LoadString,MustLoadString](http://godoc.org/github.com/magiconair/properties#LoadString) method to load properties from an UTF8 string. + * [PR #8](https://github.com/magiconair/properties/pull/8): Add [MustFlag](http://godoc.org/github.com/magiconair/properties#Properties.MustFlag) method to provide overrides via command line flags. (@pascaldekloe) + +### [1.6.0](https://github.com/magiconair/properties/tree/v1.6.0) - 11 Dec 2015 + + * Add [Decode](http://godoc.org/github.com/magiconair/properties#Properties.Decode) method to populate struct from properties via tags. + +### [1.5.6](https://github.com/magiconair/properties/tree/v1.5.6) - 18 Oct 2015 + + * Vendored in gopkg.in/check.v1 + +### [1.5.5](https://github.com/magiconair/properties/tree/v1.5.5) - 31 Jul 2015 + + * [PR #6](https://github.com/magiconair/properties/pull/6): Add [Delete](http://godoc.org/github.com/magiconair/properties#Properties.Delete) method to remove keys including comments. (@gerbenjacobs) + +### [1.5.4](https://github.com/magiconair/properties/tree/v1.5.4) - 23 Jun 2015 + + * [Issue #5](https://github.com/magiconair/properties/issues/5): Allow disabling of property expansion [DisableExpansion](http://godoc.org/github.com/magiconair/properties#Properties.DisableExpansion). When property expansion is disabled Properties become a simple key/value store and don't check for circular references. + +### [1.5.3](https://github.com/magiconair/properties/tree/v1.5.3) - 02 Jun 2015 + + * [Issue #4](https://github.com/magiconair/properties/issues/4): Maintain key order in [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) and [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp) + +### [1.5.2](https://github.com/magiconair/properties/tree/v1.5.2) - 10 Apr 2015 + + * [Issue #3](https://github.com/magiconair/properties/issues/3): Don't print comments in [WriteComment()](http://godoc.org/github.com/magiconair/properties#Properties.WriteComment) if they are all empty + * Add clickable links to README + +### [1.5.1](https://github.com/magiconair/properties/tree/v1.5.1) - 08 Dec 2014 + + * Added [GetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.GetParsedDuration) and [MustGetParsedDuration()](http://godoc.org/github.com/magiconair/properties#Properties.MustGetParsedDuration) for values specified compatible with + [time.ParseDuration()](http://golang.org/pkg/time/#ParseDuration). + +### [1.5.0](https://github.com/magiconair/properties/tree/v1.5.0) - 18 Nov 2014 + + * Added support for single and multi-line comments (reading, writing and updating) + * The order of keys is now preserved + * Calling [Set()](http://godoc.org/github.com/magiconair/properties#Properties.Set) with an empty key now silently ignores the call and does not create a new entry + * Added a [MustSet()](http://godoc.org/github.com/magiconair/properties#Properties.MustSet) method + * Migrated test library from launchpad.net/gocheck to [gopkg.in/check.v1](http://gopkg.in/check.v1) + +### [1.4.2](https://github.com/magiconair/properties/tree/v1.4.2) - 15 Nov 2014 + + * [Issue #2](https://github.com/magiconair/properties/issues/2): Fixed goroutine leak in parser which created two lexers but cleaned up only one + +### [1.4.1](https://github.com/magiconair/properties/tree/v1.4.1) - 13 Nov 2014 + + * [Issue #1](https://github.com/magiconair/properties/issues/1): Fixed bug in Keys() method which returned an empty string + +### [1.4.0](https://github.com/magiconair/properties/tree/v1.4.0) - 23 Sep 2014 + + * Added [Keys()](http://godoc.org/github.com/magiconair/properties#Properties.Keys) to get the keys + * Added [Filter()](http://godoc.org/github.com/magiconair/properties#Properties.Filter), [FilterRegexp()](http://godoc.org/github.com/magiconair/properties#Properties.FilterRegexp) and [FilterPrefix()](http://godoc.org/github.com/magiconair/properties#Properties.FilterPrefix) to get a subset of the properties + +### [1.3.0](https://github.com/magiconair/properties/tree/v1.3.0) - 18 Mar 2014 + +* Added support for time.Duration +* Made MustXXX() failure beha[ior configurable (log.Fatal, panic](https://github.com/magiconair/properties/tree/vior configurable (log.Fatal, panic) - custom) +* Changed default of MustXXX() failure from panic to log.Fatal + +### [1.2.0](https://github.com/magiconair/properties/tree/v1.2.0) - 05 Mar 2014 + +* Added MustGet... functions +* Added support for int and uint with range checks on 32 bit platforms + +### [1.1.0](https://github.com/magiconair/properties/tree/v1.1.0) - 20 Jan 2014 + +* Renamed from goproperties to properties +* Added support for expansion of environment vars in + filenames and value expressions +* Fixed bug where value expressions were not at the + start of the string + +### [1.0.0](https://github.com/magiconair/properties/tree/v1.0.0) - 7 Jan 2014 + +* Initial release diff --git a/vendor/github.com/magiconair/properties/LICENSE.md b/vendor/github.com/magiconair/properties/LICENSE.md new file mode 100644 index 000000000..79c87e3e6 --- /dev/null +++ b/vendor/github.com/magiconair/properties/LICENSE.md @@ -0,0 +1,24 @@ +Copyright (c) 2013-2020, Frank Schroeder + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/magiconair/properties/README.md b/vendor/github.com/magiconair/properties/README.md new file mode 100644 index 000000000..e2edda025 --- /dev/null +++ b/vendor/github.com/magiconair/properties/README.md @@ -0,0 +1,128 @@ +[![](https://img.shields.io/github/tag/magiconair/properties.svg?style=flat-square&label=release)](https://github.com/magiconair/properties/releases) +[![Travis CI Status](https://img.shields.io/travis/magiconair/properties.svg?branch=master&style=flat-square&label=travis)](https://travis-ci.org/magiconair/properties) +[![License](https://img.shields.io/badge/License-BSD%202--Clause-orange.svg?style=flat-square)](https://raw.githubusercontent.com/magiconair/properties/master/LICENSE) +[![GoDoc](http://img.shields.io/badge/godoc-reference-5272B4.svg?style=flat-square)](http://godoc.org/github.com/magiconair/properties) + +# Overview + +#### Please run `git pull --tags` to update the tags. See [below](#updated-git-tags) why. + +properties is a Go library for reading and writing properties files. + +It supports reading from multiple files or URLs and Spring style recursive +property expansion of expressions like `${key}` to their corresponding value. +Value expressions can refer to other keys like in `${key}` or to environment +variables like in `${USER}`. Filenames can also contain environment variables +like in `/home/${USER}/myapp.properties`. + +Properties can be decoded into structs, maps, arrays and values through +struct tags. + +Comments and the order of keys are preserved. Comments can be modified +and can be written to the output. + +The properties library supports both ISO-8859-1 and UTF-8 encoded data. + +Starting from version 1.3.0 the behavior of the MustXXX() functions is +configurable by providing a custom `ErrorHandler` function. The default has +changed from `panic` to `log.Fatal` but this is configurable and custom +error handling functions can be provided. See the package documentation for +details. + +Read the full documentation on [![GoDoc](http://img.shields.io/badge/godoc-reference-5272B4.svg?style=flat-square)](http://godoc.org/github.com/magiconair/properties) + +## Getting Started + +```go +import ( + "flag" + "github.com/magiconair/properties" +) + +func main() { + // init from a file + p := properties.MustLoadFile("${HOME}/config.properties", properties.UTF8) + + // or multiple files + p = properties.MustLoadFiles([]string{ + "${HOME}/config.properties", + "${HOME}/config-${USER}.properties", + }, properties.UTF8, true) + + // or from a map + p = properties.LoadMap(map[string]string{"key": "value", "abc": "def"}) + + // or from a string + p = properties.MustLoadString("key=value\nabc=def") + + // or from a URL + p = properties.MustLoadURL("http://host/path") + + // or from multiple URLs + p = properties.MustLoadURL([]string{ + "http://host/config", + "http://host/config-${USER}", + }, true) + + // or from flags + p.MustFlag(flag.CommandLine) + + // get values through getters + host := p.MustGetString("host") + port := p.GetInt("port", 8080) + + // or through Decode + type Config struct { + Host string `properties:"host"` + Port int `properties:"port,default=9000"` + Accept []string `properties:"accept,default=image/png;image;gif"` + Timeout time.Duration `properties:"timeout,default=5s"` + } + var cfg Config + if err := p.Decode(&cfg); err != nil { + log.Fatal(err) + } +} + +``` + +## Installation and Upgrade + +``` +$ go get -u github.com/magiconair/properties +``` + +## License + +2 clause BSD license. See [LICENSE](https://github.com/magiconair/properties/blob/master/LICENSE) file for details. + +## ToDo + +* Dump contents with passwords and secrets obscured + +## Updated Git tags + +#### 13 Feb 2018 + +I realized that all of the git tags I had pushed before v1.7.5 were lightweight tags +and I've only recently learned that this doesn't play well with `git describe` 😞 + +I have replaced all lightweight tags with signed tags using this script which should +retain the commit date, name and email address. Please run `git pull --tags` to update them. + +Worst case you have to reclone the repo. + +```shell +#!/bin/bash +tag=$1 +echo "Updating $tag" +date=$(git show ${tag}^0 --format=%aD | head -1) +email=$(git show ${tag}^0 --format=%aE | head -1) +name=$(git show ${tag}^0 --format=%aN | head -1) +GIT_COMMITTER_DATE="$date" GIT_COMMITTER_NAME="$name" GIT_COMMITTER_EMAIL="$email" git tag -s -f ${tag} ${tag}^0 -m ${tag} +``` + +I apologize for the inconvenience. + +Frank + diff --git a/vendor/github.com/magiconair/properties/decode.go b/vendor/github.com/magiconair/properties/decode.go new file mode 100644 index 000000000..3ebf8049c --- /dev/null +++ b/vendor/github.com/magiconair/properties/decode.go @@ -0,0 +1,289 @@ +// Copyright 2018 Frank Schroeder. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package properties + +import ( + "fmt" + "reflect" + "strconv" + "strings" + "time" +) + +// Decode assigns property values to exported fields of a struct. +// +// Decode traverses v recursively and returns an error if a value cannot be +// converted to the field type or a required value is missing for a field. +// +// The following type dependent decodings are used: +// +// String, boolean, numeric fields have the value of the property key assigned. +// The property key name is the name of the field. A different key and a default +// value can be set in the field's tag. Fields without default value are +// required. If the value cannot be converted to the field type an error is +// returned. +// +// time.Duration fields have the result of time.ParseDuration() assigned. +// +// time.Time fields have the vaule of time.Parse() assigned. The default layout +// is time.RFC3339 but can be set in the field's tag. +// +// Arrays and slices of string, boolean, numeric, time.Duration and time.Time +// fields have the value interpreted as a comma separated list of values. The +// individual values are trimmed of whitespace and empty values are ignored. A +// default value can be provided as a semicolon separated list in the field's +// tag. +// +// Struct fields are decoded recursively using the field name plus "." as +// prefix. The prefix (without dot) can be overridden in the field's tag. +// Default values are not supported in the field's tag. Specify them on the +// fields of the inner struct instead. +// +// Map fields must have a key of type string and are decoded recursively by +// using the field's name plus ".' as prefix and the next element of the key +// name as map key. The prefix (without dot) can be overridden in the field's +// tag. Default values are not supported. +// +// Examples: +// +// // Field is ignored. +// Field int `properties:"-"` +// +// // Field is assigned value of 'Field'. +// Field int +// +// // Field is assigned value of 'myName'. +// Field int `properties:"myName"` +// +// // Field is assigned value of key 'myName' and has a default +// // value 15 if the key does not exist. +// Field int `properties:"myName,default=15"` +// +// // Field is assigned value of key 'Field' and has a default +// // value 15 if the key does not exist. +// Field int `properties:",default=15"` +// +// // Field is assigned value of key 'date' and the date +// // is in format 2006-01-02 +// Field time.Time `properties:"date,layout=2006-01-02"` +// +// // Field is assigned the non-empty and whitespace trimmed +// // values of key 'Field' split by commas. +// Field []string +// +// // Field is assigned the non-empty and whitespace trimmed +// // values of key 'Field' split by commas and has a default +// // value ["a", "b", "c"] if the key does not exist. +// Field []string `properties:",default=a;b;c"` +// +// // Field is decoded recursively with "Field." as key prefix. +// Field SomeStruct +// +// // Field is decoded recursively with "myName." as key prefix. +// Field SomeStruct `properties:"myName"` +// +// // Field is decoded recursively with "Field." as key prefix +// // and the next dotted element of the key as map key. +// Field map[string]string +// +// // Field is decoded recursively with "myName." as key prefix +// // and the next dotted element of the key as map key. +// Field map[string]string `properties:"myName"` +func (p *Properties) Decode(x interface{}) error { + t, v := reflect.TypeOf(x), reflect.ValueOf(x) + if t.Kind() != reflect.Ptr || v.Elem().Type().Kind() != reflect.Struct { + return fmt.Errorf("not a pointer to struct: %s", t) + } + if err := dec(p, "", nil, nil, v); err != nil { + return err + } + return nil +} + +func dec(p *Properties, key string, def *string, opts map[string]string, v reflect.Value) error { + t := v.Type() + + // value returns the property value for key or the default if provided. + value := func() (string, error) { + if val, ok := p.Get(key); ok { + return val, nil + } + if def != nil { + return *def, nil + } + return "", fmt.Errorf("missing required key %s", key) + } + + // conv converts a string to a value of the given type. + conv := func(s string, t reflect.Type) (val reflect.Value, err error) { + var v interface{} + + switch { + case isDuration(t): + v, err = time.ParseDuration(s) + + case isTime(t): + layout := opts["layout"] + if layout == "" { + layout = time.RFC3339 + } + v, err = time.Parse(layout, s) + + case isBool(t): + v, err = boolVal(s), nil + + case isString(t): + v, err = s, nil + + case isFloat(t): + v, err = strconv.ParseFloat(s, 64) + + case isInt(t): + v, err = strconv.ParseInt(s, 10, 64) + + case isUint(t): + v, err = strconv.ParseUint(s, 10, 64) + + default: + return reflect.Zero(t), fmt.Errorf("unsupported type %s", t) + } + if err != nil { + return reflect.Zero(t), err + } + return reflect.ValueOf(v).Convert(t), nil + } + + // keydef returns the property key and the default value based on the + // name of the struct field and the options in the tag. + keydef := func(f reflect.StructField) (string, *string, map[string]string) { + _key, _opts := parseTag(f.Tag.Get("properties")) + + var _def *string + if d, ok := _opts["default"]; ok { + _def = &d + } + if _key != "" { + return _key, _def, _opts + } + return f.Name, _def, _opts + } + + switch { + case isDuration(t) || isTime(t) || isBool(t) || isString(t) || isFloat(t) || isInt(t) || isUint(t): + s, err := value() + if err != nil { + return err + } + val, err := conv(s, t) + if err != nil { + return err + } + v.Set(val) + + case isPtr(t): + return dec(p, key, def, opts, v.Elem()) + + case isStruct(t): + for i := 0; i < v.NumField(); i++ { + fv := v.Field(i) + fk, def, opts := keydef(t.Field(i)) + if !fv.CanSet() { + return fmt.Errorf("cannot set %s", t.Field(i).Name) + } + if fk == "-" { + continue + } + if key != "" { + fk = key + "." + fk + } + if err := dec(p, fk, def, opts, fv); err != nil { + return err + } + } + return nil + + case isArray(t): + val, err := value() + if err != nil { + return err + } + vals := split(val, ";") + a := reflect.MakeSlice(t, 0, len(vals)) + for _, s := range vals { + val, err := conv(s, t.Elem()) + if err != nil { + return err + } + a = reflect.Append(a, val) + } + v.Set(a) + + case isMap(t): + valT := t.Elem() + m := reflect.MakeMap(t) + for postfix := range p.FilterStripPrefix(key + ".").m { + pp := strings.SplitN(postfix, ".", 2) + mk, mv := pp[0], reflect.New(valT) + if err := dec(p, key+"."+mk, nil, nil, mv); err != nil { + return err + } + m.SetMapIndex(reflect.ValueOf(mk), mv.Elem()) + } + v.Set(m) + + default: + return fmt.Errorf("unsupported type %s", t) + } + return nil +} + +// split splits a string on sep, trims whitespace of elements +// and omits empty elements +func split(s string, sep string) []string { + var a []string + for _, v := range strings.Split(s, sep) { + if v = strings.TrimSpace(v); v != "" { + a = append(a, v) + } + } + return a +} + +// parseTag parses a "key,k=v,k=v,..." +func parseTag(tag string) (key string, opts map[string]string) { + opts = map[string]string{} + for i, s := range strings.Split(tag, ",") { + if i == 0 { + key = s + continue + } + + pp := strings.SplitN(s, "=", 2) + if len(pp) == 1 { + opts[pp[0]] = "" + } else { + opts[pp[0]] = pp[1] + } + } + return key, opts +} + +func isArray(t reflect.Type) bool { return t.Kind() == reflect.Array || t.Kind() == reflect.Slice } +func isBool(t reflect.Type) bool { return t.Kind() == reflect.Bool } +func isDuration(t reflect.Type) bool { return t == reflect.TypeOf(time.Second) } +func isMap(t reflect.Type) bool { return t.Kind() == reflect.Map } +func isPtr(t reflect.Type) bool { return t.Kind() == reflect.Ptr } +func isString(t reflect.Type) bool { return t.Kind() == reflect.String } +func isStruct(t reflect.Type) bool { return t.Kind() == reflect.Struct } +func isTime(t reflect.Type) bool { return t == reflect.TypeOf(time.Time{}) } +func isFloat(t reflect.Type) bool { + return t.Kind() == reflect.Float32 || t.Kind() == reflect.Float64 +} +func isInt(t reflect.Type) bool { + return t.Kind() == reflect.Int || t.Kind() == reflect.Int8 || t.Kind() == reflect.Int16 || t.Kind() == reflect.Int32 || t.Kind() == reflect.Int64 +} +func isUint(t reflect.Type) bool { + return t.Kind() == reflect.Uint || t.Kind() == reflect.Uint8 || t.Kind() == reflect.Uint16 || t.Kind() == reflect.Uint32 || t.Kind() == reflect.Uint64 +} diff --git a/vendor/github.com/magiconair/properties/doc.go b/vendor/github.com/magiconair/properties/doc.go new file mode 100644 index 000000000..f8822da2b --- /dev/null +++ b/vendor/github.com/magiconair/properties/doc.go @@ -0,0 +1,156 @@ +// Copyright 2018 Frank Schroeder. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package properties provides functions for reading and writing +// ISO-8859-1 and UTF-8 encoded .properties files and has +// support for recursive property expansion. +// +// Java properties files are ISO-8859-1 encoded and use Unicode +// literals for characters outside the ISO character set. Unicode +// literals can be used in UTF-8 encoded properties files but +// aren't necessary. +// +// To load a single properties file use MustLoadFile(): +// +// p := properties.MustLoadFile(filename, properties.UTF8) +// +// To load multiple properties files use MustLoadFiles() +// which loads the files in the given order and merges the +// result. Missing properties files can be ignored if the +// 'ignoreMissing' flag is set to true. +// +// Filenames can contain environment variables which are expanded +// before loading. +// +// f1 := "/etc/myapp/myapp.conf" +// f2 := "/home/${USER}/myapp.conf" +// p := MustLoadFiles([]string{f1, f2}, properties.UTF8, true) +// +// All of the different key/value delimiters ' ', ':' and '=' are +// supported as well as the comment characters '!' and '#' and +// multi-line values. +// +// ! this is a comment +// # and so is this +// +// # the following expressions are equal +// key value +// key=value +// key:value +// key = value +// key : value +// key = val\ +// ue +// +// Properties stores all comments preceding a key and provides +// GetComments() and SetComments() methods to retrieve and +// update them. The convenience functions GetComment() and +// SetComment() allow access to the last comment. The +// WriteComment() method writes properties files including +// the comments and with the keys in the original order. +// This can be used for sanitizing properties files. +// +// Property expansion is recursive and circular references +// and malformed expressions are not allowed and cause an +// error. Expansion of environment variables is supported. +// +// # standard property +// key = value +// +// # property expansion: key2 = value +// key2 = ${key} +// +// # recursive expansion: key3 = value +// key3 = ${key2} +// +// # circular reference (error) +// key = ${key} +// +// # malformed expression (error) +// key = ${ke +// +// # refers to the users' home dir +// home = ${HOME} +// +// # local key takes precedence over env var: u = foo +// USER = foo +// u = ${USER} +// +// The default property expansion format is ${key} but can be +// changed by setting different pre- and postfix values on the +// Properties object. +// +// p := properties.NewProperties() +// p.Prefix = "#[" +// p.Postfix = "]#" +// +// Properties provides convenience functions for getting typed +// values with default values if the key does not exist or the +// type conversion failed. +// +// # Returns true if the value is either "1", "on", "yes" or "true" +// # Returns false for every other value and the default value if +// # the key does not exist. +// v = p.GetBool("key", false) +// +// # Returns the value if the key exists and the format conversion +// # was successful. Otherwise, the default value is returned. +// v = p.GetInt64("key", 999) +// v = p.GetUint64("key", 999) +// v = p.GetFloat64("key", 123.0) +// v = p.GetString("key", "def") +// v = p.GetDuration("key", 999) +// +// As an alternative properties may be applied with the standard +// library's flag implementation at any time. +// +// # Standard configuration +// v = flag.Int("key", 999, "help message") +// flag.Parse() +// +// # Merge p into the flag set +// p.MustFlag(flag.CommandLine) +// +// Properties provides several MustXXX() convenience functions +// which will terminate the app if an error occurs. The behavior +// of the failure is configurable and the default is to call +// log.Fatal(err). To have the MustXXX() functions panic instead +// of logging the error set a different ErrorHandler before +// you use the Properties package. +// +// properties.ErrorHandler = properties.PanicHandler +// +// # Will panic instead of logging an error +// p := properties.MustLoadFile("config.properties") +// +// You can also provide your own ErrorHandler function. The only requirement +// is that the error handler function must exit after handling the error. +// +// properties.ErrorHandler = func(err error) { +// fmt.Println(err) +// os.Exit(1) +// } +// +// # Will write to stdout and then exit +// p := properties.MustLoadFile("config.properties") +// +// Properties can also be loaded into a struct via the `Decode` +// method, e.g. +// +// type S struct { +// A string `properties:"a,default=foo"` +// D time.Duration `properties:"timeout,default=5s"` +// E time.Time `properties:"expires,layout=2006-01-02,default=2015-01-01"` +// } +// +// See `Decode()` method for the full documentation. +// +// The following documents provide a description of the properties +// file format. +// +// http://en.wikipedia.org/wiki/.properties +// +// http://docs.oracle.com/javase/7/docs/api/java/util/Properties.html#load%28java.io.Reader%29 +// +package properties diff --git a/vendor/github.com/magiconair/properties/go.mod b/vendor/github.com/magiconair/properties/go.mod new file mode 100644 index 000000000..4ff090bdc --- /dev/null +++ b/vendor/github.com/magiconair/properties/go.mod @@ -0,0 +1,3 @@ +module github.com/magiconair/properties + +go 1.13 diff --git a/vendor/github.com/magiconair/properties/integrate.go b/vendor/github.com/magiconair/properties/integrate.go new file mode 100644 index 000000000..74d38dc67 --- /dev/null +++ b/vendor/github.com/magiconair/properties/integrate.go @@ -0,0 +1,34 @@ +// Copyright 2018 Frank Schroeder. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package properties + +import "flag" + +// MustFlag sets flags that are skipped by dst.Parse when p contains +// the respective key for flag.Flag.Name. +// +// It's use is recommended with command line arguments as in: +// flag.Parse() +// p.MustFlag(flag.CommandLine) +func (p *Properties) MustFlag(dst *flag.FlagSet) { + m := make(map[string]*flag.Flag) + dst.VisitAll(func(f *flag.Flag) { + m[f.Name] = f + }) + dst.Visit(func(f *flag.Flag) { + delete(m, f.Name) // overridden + }) + + for name, f := range m { + v, ok := p.Get(name) + if !ok { + continue + } + + if err := f.Value.Set(v); err != nil { + ErrorHandler(err) + } + } +} diff --git a/vendor/github.com/magiconair/properties/lex.go b/vendor/github.com/magiconair/properties/lex.go new file mode 100644 index 000000000..367166d58 --- /dev/null +++ b/vendor/github.com/magiconair/properties/lex.go @@ -0,0 +1,407 @@ +// Copyright 2018 Frank Schroeder. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Parts of the lexer are from the template/text/parser package +// For these parts the following applies: +// +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file of the go 1.2 +// distribution. + +package properties + +import ( + "fmt" + "strconv" + "strings" + "unicode/utf8" +) + +// item represents a token or text string returned from the scanner. +type item struct { + typ itemType // The type of this item. + pos int // The starting position, in bytes, of this item in the input string. + val string // The value of this item. +} + +func (i item) String() string { + switch { + case i.typ == itemEOF: + return "EOF" + case i.typ == itemError: + return i.val + case len(i.val) > 10: + return fmt.Sprintf("%.10q...", i.val) + } + return fmt.Sprintf("%q", i.val) +} + +// itemType identifies the type of lex items. +type itemType int + +const ( + itemError itemType = iota // error occurred; value is text of error + itemEOF + itemKey // a key + itemValue // a value + itemComment // a comment +) + +// defines a constant for EOF +const eof = -1 + +// permitted whitespace characters space, FF and TAB +const whitespace = " \f\t" + +// stateFn represents the state of the scanner as a function that returns the next state. +type stateFn func(*lexer) stateFn + +// lexer holds the state of the scanner. +type lexer struct { + input string // the string being scanned + state stateFn // the next lexing function to enter + pos int // current position in the input + start int // start position of this item + width int // width of last rune read from input + lastPos int // position of most recent item returned by nextItem + runes []rune // scanned runes for this item + items chan item // channel of scanned items +} + +// next returns the next rune in the input. +func (l *lexer) next() rune { + if l.pos >= len(l.input) { + l.width = 0 + return eof + } + r, w := utf8.DecodeRuneInString(l.input[l.pos:]) + l.width = w + l.pos += l.width + return r +} + +// peek returns but does not consume the next rune in the input. +func (l *lexer) peek() rune { + r := l.next() + l.backup() + return r +} + +// backup steps back one rune. Can only be called once per call of next. +func (l *lexer) backup() { + l.pos -= l.width +} + +// emit passes an item back to the client. +func (l *lexer) emit(t itemType) { + i := item{t, l.start, string(l.runes)} + l.items <- i + l.start = l.pos + l.runes = l.runes[:0] +} + +// ignore skips over the pending input before this point. +func (l *lexer) ignore() { + l.start = l.pos +} + +// appends the rune to the current value +func (l *lexer) appendRune(r rune) { + l.runes = append(l.runes, r) +} + +// accept consumes the next rune if it's from the valid set. +func (l *lexer) accept(valid string) bool { + if strings.ContainsRune(valid, l.next()) { + return true + } + l.backup() + return false +} + +// acceptRun consumes a run of runes from the valid set. +func (l *lexer) acceptRun(valid string) { + for strings.ContainsRune(valid, l.next()) { + } + l.backup() +} + +// acceptRunUntil consumes a run of runes up to a terminator. +func (l *lexer) acceptRunUntil(term rune) { + for term != l.next() { + } + l.backup() +} + +// hasText returns true if the current parsed text is not empty. +func (l *lexer) isNotEmpty() bool { + return l.pos > l.start +} + +// lineNumber reports which line we're on, based on the position of +// the previous item returned by nextItem. Doing it this way +// means we don't have to worry about peek double counting. +func (l *lexer) lineNumber() int { + return 1 + strings.Count(l.input[:l.lastPos], "\n") +} + +// errorf returns an error token and terminates the scan by passing +// back a nil pointer that will be the next state, terminating l.nextItem. +func (l *lexer) errorf(format string, args ...interface{}) stateFn { + l.items <- item{itemError, l.start, fmt.Sprintf(format, args...)} + return nil +} + +// nextItem returns the next item from the input. +func (l *lexer) nextItem() item { + i := <-l.items + l.lastPos = i.pos + return i +} + +// lex creates a new scanner for the input string. +func lex(input string) *lexer { + l := &lexer{ + input: input, + items: make(chan item), + runes: make([]rune, 0, 32), + } + go l.run() + return l +} + +// run runs the state machine for the lexer. +func (l *lexer) run() { + for l.state = lexBeforeKey(l); l.state != nil; { + l.state = l.state(l) + } +} + +// state functions + +// lexBeforeKey scans until a key begins. +func lexBeforeKey(l *lexer) stateFn { + switch r := l.next(); { + case isEOF(r): + l.emit(itemEOF) + return nil + + case isEOL(r): + l.ignore() + return lexBeforeKey + + case isComment(r): + return lexComment + + case isWhitespace(r): + l.ignore() + return lexBeforeKey + + default: + l.backup() + return lexKey + } +} + +// lexComment scans a comment line. The comment character has already been scanned. +func lexComment(l *lexer) stateFn { + l.acceptRun(whitespace) + l.ignore() + for { + switch r := l.next(); { + case isEOF(r): + l.ignore() + l.emit(itemEOF) + return nil + case isEOL(r): + l.emit(itemComment) + return lexBeforeKey + default: + l.appendRune(r) + } + } +} + +// lexKey scans the key up to a delimiter +func lexKey(l *lexer) stateFn { + var r rune + +Loop: + for { + switch r = l.next(); { + + case isEscape(r): + err := l.scanEscapeSequence() + if err != nil { + return l.errorf(err.Error()) + } + + case isEndOfKey(r): + l.backup() + break Loop + + case isEOF(r): + break Loop + + default: + l.appendRune(r) + } + } + + if len(l.runes) > 0 { + l.emit(itemKey) + } + + if isEOF(r) { + l.emit(itemEOF) + return nil + } + + return lexBeforeValue +} + +// lexBeforeValue scans the delimiter between key and value. +// Leading and trailing whitespace is ignored. +// We expect to be just after the key. +func lexBeforeValue(l *lexer) stateFn { + l.acceptRun(whitespace) + l.accept(":=") + l.acceptRun(whitespace) + l.ignore() + return lexValue +} + +// lexValue scans text until the end of the line. We expect to be just after the delimiter. +func lexValue(l *lexer) stateFn { + for { + switch r := l.next(); { + case isEscape(r): + if isEOL(l.peek()) { + l.next() + l.acceptRun(whitespace) + } else { + err := l.scanEscapeSequence() + if err != nil { + return l.errorf(err.Error()) + } + } + + case isEOL(r): + l.emit(itemValue) + l.ignore() + return lexBeforeKey + + case isEOF(r): + l.emit(itemValue) + l.emit(itemEOF) + return nil + + default: + l.appendRune(r) + } + } +} + +// scanEscapeSequence scans either one of the escaped characters +// or a unicode literal. We expect to be after the escape character. +func (l *lexer) scanEscapeSequence() error { + switch r := l.next(); { + + case isEscapedCharacter(r): + l.appendRune(decodeEscapedCharacter(r)) + return nil + + case atUnicodeLiteral(r): + return l.scanUnicodeLiteral() + + case isEOF(r): + return fmt.Errorf("premature EOF") + + // silently drop the escape character and append the rune as is + default: + l.appendRune(r) + return nil + } +} + +// scans a unicode literal in the form \uXXXX. We expect to be after the \u. +func (l *lexer) scanUnicodeLiteral() error { + // scan the digits + d := make([]rune, 4) + for i := 0; i < 4; i++ { + d[i] = l.next() + if d[i] == eof || !strings.ContainsRune("0123456789abcdefABCDEF", d[i]) { + return fmt.Errorf("invalid unicode literal") + } + } + + // decode the digits into a rune + r, err := strconv.ParseInt(string(d), 16, 0) + if err != nil { + return err + } + + l.appendRune(rune(r)) + return nil +} + +// decodeEscapedCharacter returns the unescaped rune. We expect to be after the escape character. +func decodeEscapedCharacter(r rune) rune { + switch r { + case 'f': + return '\f' + case 'n': + return '\n' + case 'r': + return '\r' + case 't': + return '\t' + default: + return r + } +} + +// atUnicodeLiteral reports whether we are at a unicode literal. +// The escape character has already been consumed. +func atUnicodeLiteral(r rune) bool { + return r == 'u' +} + +// isComment reports whether we are at the start of a comment. +func isComment(r rune) bool { + return r == '#' || r == '!' +} + +// isEndOfKey reports whether the rune terminates the current key. +func isEndOfKey(r rune) bool { + return strings.ContainsRune(" \f\t\r\n:=", r) +} + +// isEOF reports whether we are at EOF. +func isEOF(r rune) bool { + return r == eof +} + +// isEOL reports whether we are at a new line character. +func isEOL(r rune) bool { + return r == '\n' || r == '\r' +} + +// isEscape reports whether the rune is the escape character which +// prefixes unicode literals and other escaped characters. +func isEscape(r rune) bool { + return r == '\\' +} + +// isEscapedCharacter reports whether we are at one of the characters that need escaping. +// The escape character has already been consumed. +func isEscapedCharacter(r rune) bool { + return strings.ContainsRune(" :=fnrt", r) +} + +// isWhitespace reports whether the rune is a whitespace character. +func isWhitespace(r rune) bool { + return strings.ContainsRune(whitespace, r) +} diff --git a/vendor/github.com/magiconair/properties/load.go b/vendor/github.com/magiconair/properties/load.go new file mode 100644 index 000000000..c83c2dadd --- /dev/null +++ b/vendor/github.com/magiconair/properties/load.go @@ -0,0 +1,293 @@ +// Copyright 2018 Frank Schroeder. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package properties + +import ( + "fmt" + "io/ioutil" + "net/http" + "os" + "strings" +) + +// Encoding specifies encoding of the input data. +type Encoding uint + +const ( + // utf8Default is a private placeholder for the zero value of Encoding to + // ensure that it has the correct meaning. UTF8 is the default encoding but + // was assigned a non-zero value which cannot be changed without breaking + // existing code. Clients should continue to use the public constants. + utf8Default Encoding = iota + + // UTF8 interprets the input data as UTF-8. + UTF8 + + // ISO_8859_1 interprets the input data as ISO-8859-1. + ISO_8859_1 +) + +type Loader struct { + // Encoding determines how the data from files and byte buffers + // is interpreted. For URLs the Content-Type header is used + // to determine the encoding of the data. + Encoding Encoding + + // DisableExpansion configures the property expansion of the + // returned property object. When set to true, the property values + // will not be expanded and the Property object will not be checked + // for invalid expansion expressions. + DisableExpansion bool + + // IgnoreMissing configures whether missing files or URLs which return + // 404 are reported as errors. When set to true, missing files and 404 + // status codes are not reported as errors. + IgnoreMissing bool +} + +// Load reads a buffer into a Properties struct. +func (l *Loader) LoadBytes(buf []byte) (*Properties, error) { + return l.loadBytes(buf, l.Encoding) +} + +// LoadAll reads the content of multiple URLs or files in the given order into +// a Properties struct. If IgnoreMissing is true then a 404 status code or +// missing file will not be reported as error. Encoding sets the encoding for +// files. For the URLs see LoadURL for the Content-Type header and the +// encoding. +func (l *Loader) LoadAll(names []string) (*Properties, error) { + all := NewProperties() + for _, name := range names { + n, err := expandName(name) + if err != nil { + return nil, err + } + + var p *Properties + switch { + case strings.HasPrefix(n, "http://"): + p, err = l.LoadURL(n) + case strings.HasPrefix(n, "https://"): + p, err = l.LoadURL(n) + default: + p, err = l.LoadFile(n) + } + if err != nil { + return nil, err + } + all.Merge(p) + } + + all.DisableExpansion = l.DisableExpansion + if all.DisableExpansion { + return all, nil + } + return all, all.check() +} + +// LoadFile reads a file into a Properties struct. +// If IgnoreMissing is true then a missing file will not be +// reported as error. +func (l *Loader) LoadFile(filename string) (*Properties, error) { + data, err := ioutil.ReadFile(filename) + if err != nil { + if l.IgnoreMissing && os.IsNotExist(err) { + LogPrintf("properties: %s not found. skipping", filename) + return NewProperties(), nil + } + return nil, err + } + return l.loadBytes(data, l.Encoding) +} + +// LoadURL reads the content of the URL into a Properties struct. +// +// The encoding is determined via the Content-Type header which +// should be set to 'text/plain'. If the 'charset' parameter is +// missing, 'iso-8859-1' or 'latin1' the encoding is set to +// ISO-8859-1. If the 'charset' parameter is set to 'utf-8' the +// encoding is set to UTF-8. A missing content type header is +// interpreted as 'text/plain; charset=utf-8'. +func (l *Loader) LoadURL(url string) (*Properties, error) { + resp, err := http.Get(url) + if err != nil { + return nil, fmt.Errorf("properties: error fetching %q. %s", url, err) + } + defer resp.Body.Close() + + if resp.StatusCode == 404 && l.IgnoreMissing { + LogPrintf("properties: %s returned %d. skipping", url, resp.StatusCode) + return NewProperties(), nil + } + + if resp.StatusCode != 200 { + return nil, fmt.Errorf("properties: %s returned %d", url, resp.StatusCode) + } + + body, err := ioutil.ReadAll(resp.Body) + if err != nil { + return nil, fmt.Errorf("properties: %s error reading response. %s", url, err) + } + + ct := resp.Header.Get("Content-Type") + ct = strings.Join(strings.Fields(ct), "") + var enc Encoding + switch strings.ToLower(ct) { + case "text/plain", "text/plain;charset=iso-8859-1", "text/plain;charset=latin1": + enc = ISO_8859_1 + case "", "text/plain;charset=utf-8": + enc = UTF8 + default: + return nil, fmt.Errorf("properties: invalid content type %s", ct) + } + + return l.loadBytes(body, enc) +} + +func (l *Loader) loadBytes(buf []byte, enc Encoding) (*Properties, error) { + p, err := parse(convert(buf, enc)) + if err != nil { + return nil, err + } + p.DisableExpansion = l.DisableExpansion + if p.DisableExpansion { + return p, nil + } + return p, p.check() +} + +// Load reads a buffer into a Properties struct. +func Load(buf []byte, enc Encoding) (*Properties, error) { + l := &Loader{Encoding: enc} + return l.LoadBytes(buf) +} + +// LoadString reads an UTF8 string into a properties struct. +func LoadString(s string) (*Properties, error) { + l := &Loader{Encoding: UTF8} + return l.LoadBytes([]byte(s)) +} + +// LoadMap creates a new Properties struct from a string map. +func LoadMap(m map[string]string) *Properties { + p := NewProperties() + for k, v := range m { + p.Set(k, v) + } + return p +} + +// LoadFile reads a file into a Properties struct. +func LoadFile(filename string, enc Encoding) (*Properties, error) { + l := &Loader{Encoding: enc} + return l.LoadAll([]string{filename}) +} + +// LoadFiles reads multiple files in the given order into +// a Properties struct. If 'ignoreMissing' is true then +// non-existent files will not be reported as error. +func LoadFiles(filenames []string, enc Encoding, ignoreMissing bool) (*Properties, error) { + l := &Loader{Encoding: enc, IgnoreMissing: ignoreMissing} + return l.LoadAll(filenames) +} + +// LoadURL reads the content of the URL into a Properties struct. +// See Loader#LoadURL for details. +func LoadURL(url string) (*Properties, error) { + l := &Loader{Encoding: UTF8} + return l.LoadAll([]string{url}) +} + +// LoadURLs reads the content of multiple URLs in the given order into a +// Properties struct. If IgnoreMissing is true then a 404 status code will +// not be reported as error. See Loader#LoadURL for the Content-Type header +// and the encoding. +func LoadURLs(urls []string, ignoreMissing bool) (*Properties, error) { + l := &Loader{Encoding: UTF8, IgnoreMissing: ignoreMissing} + return l.LoadAll(urls) +} + +// LoadAll reads the content of multiple URLs or files in the given order into a +// Properties struct. If 'ignoreMissing' is true then a 404 status code or missing file will +// not be reported as error. Encoding sets the encoding for files. For the URLs please see +// LoadURL for the Content-Type header and the encoding. +func LoadAll(names []string, enc Encoding, ignoreMissing bool) (*Properties, error) { + l := &Loader{Encoding: enc, IgnoreMissing: ignoreMissing} + return l.LoadAll(names) +} + +// MustLoadString reads an UTF8 string into a Properties struct and +// panics on error. +func MustLoadString(s string) *Properties { + return must(LoadString(s)) +} + +// MustLoadFile reads a file into a Properties struct and +// panics on error. +func MustLoadFile(filename string, enc Encoding) *Properties { + return must(LoadFile(filename, enc)) +} + +// MustLoadFiles reads multiple files in the given order into +// a Properties struct and panics on error. If 'ignoreMissing' +// is true then non-existent files will not be reported as error. +func MustLoadFiles(filenames []string, enc Encoding, ignoreMissing bool) *Properties { + return must(LoadFiles(filenames, enc, ignoreMissing)) +} + +// MustLoadURL reads the content of a URL into a Properties struct and +// panics on error. +func MustLoadURL(url string) *Properties { + return must(LoadURL(url)) +} + +// MustLoadURLs reads the content of multiple URLs in the given order into a +// Properties struct and panics on error. If 'ignoreMissing' is true then a 404 +// status code will not be reported as error. +func MustLoadURLs(urls []string, ignoreMissing bool) *Properties { + return must(LoadURLs(urls, ignoreMissing)) +} + +// MustLoadAll reads the content of multiple URLs or files in the given order into a +// Properties struct. If 'ignoreMissing' is true then a 404 status code or missing file will +// not be reported as error. Encoding sets the encoding for files. For the URLs please see +// LoadURL for the Content-Type header and the encoding. It panics on error. +func MustLoadAll(names []string, enc Encoding, ignoreMissing bool) *Properties { + return must(LoadAll(names, enc, ignoreMissing)) +} + +func must(p *Properties, err error) *Properties { + if err != nil { + ErrorHandler(err) + } + return p +} + +// expandName expands ${ENV_VAR} expressions in a name. +// If the environment variable does not exist then it will be replaced +// with an empty string. Malformed expressions like "${ENV_VAR" will +// be reported as error. +func expandName(name string) (string, error) { + return expand(name, []string{}, "${", "}", make(map[string]string)) +} + +// Interprets a byte buffer either as an ISO-8859-1 or UTF-8 encoded string. +// For ISO-8859-1 we can convert each byte straight into a rune since the +// first 256 unicode code points cover ISO-8859-1. +func convert(buf []byte, enc Encoding) string { + switch enc { + case utf8Default, UTF8: + return string(buf) + case ISO_8859_1: + runes := make([]rune, len(buf)) + for i, b := range buf { + runes[i] = rune(b) + } + return string(runes) + default: + ErrorHandler(fmt.Errorf("unsupported encoding %v", enc)) + } + panic("ErrorHandler should exit") +} diff --git a/vendor/github.com/magiconair/properties/parser.go b/vendor/github.com/magiconair/properties/parser.go new file mode 100644 index 000000000..cdc4a8034 --- /dev/null +++ b/vendor/github.com/magiconair/properties/parser.go @@ -0,0 +1,95 @@ +// Copyright 2018 Frank Schroeder. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package properties + +import ( + "fmt" + "runtime" +) + +type parser struct { + lex *lexer +} + +func parse(input string) (properties *Properties, err error) { + p := &parser{lex: lex(input)} + defer p.recover(&err) + + properties = NewProperties() + key := "" + comments := []string{} + + for { + token := p.expectOneOf(itemComment, itemKey, itemEOF) + switch token.typ { + case itemEOF: + goto done + case itemComment: + comments = append(comments, token.val) + continue + case itemKey: + key = token.val + if _, ok := properties.m[key]; !ok { + properties.k = append(properties.k, key) + } + } + + token = p.expectOneOf(itemValue, itemEOF) + if len(comments) > 0 { + properties.c[key] = comments + comments = []string{} + } + switch token.typ { + case itemEOF: + properties.m[key] = "" + goto done + case itemValue: + properties.m[key] = token.val + } + } + +done: + return properties, nil +} + +func (p *parser) errorf(format string, args ...interface{}) { + format = fmt.Sprintf("properties: Line %d: %s", p.lex.lineNumber(), format) + panic(fmt.Errorf(format, args...)) +} + +func (p *parser) expect(expected itemType) (token item) { + token = p.lex.nextItem() + if token.typ != expected { + p.unexpected(token) + } + return token +} + +func (p *parser) expectOneOf(expected ...itemType) (token item) { + token = p.lex.nextItem() + for _, v := range expected { + if token.typ == v { + return token + } + } + p.unexpected(token) + panic("unexpected token") +} + +func (p *parser) unexpected(token item) { + p.errorf(token.String()) +} + +// recover is the handler that turns panics into returns from the top level of Parse. +func (p *parser) recover(errp *error) { + e := recover() + if e != nil { + if _, ok := e.(runtime.Error); ok { + panic(e) + } + *errp = e.(error) + } + return +} diff --git a/vendor/github.com/magiconair/properties/properties.go b/vendor/github.com/magiconair/properties/properties.go new file mode 100644 index 000000000..1529e7223 --- /dev/null +++ b/vendor/github.com/magiconair/properties/properties.go @@ -0,0 +1,854 @@ +// Copyright 2018 Frank Schroeder. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package properties + +// BUG(frank): Set() does not check for invalid unicode literals since this is currently handled by the lexer. +// BUG(frank): Write() does not allow to configure the newline character. Therefore, on Windows LF is used. + +import ( + "bytes" + "fmt" + "io" + "log" + "os" + "regexp" + "sort" + "strconv" + "strings" + "time" + "unicode/utf8" +) + +const maxExpansionDepth = 64 + +// ErrorHandlerFunc defines the type of function which handles failures +// of the MustXXX() functions. An error handler function must exit +// the application after handling the error. +type ErrorHandlerFunc func(error) + +// ErrorHandler is the function which handles failures of the MustXXX() +// functions. The default is LogFatalHandler. +var ErrorHandler ErrorHandlerFunc = LogFatalHandler + +// LogHandlerFunc defines the function prototype for logging errors. +type LogHandlerFunc func(fmt string, args ...interface{}) + +// LogPrintf defines a log handler which uses log.Printf. +var LogPrintf LogHandlerFunc = log.Printf + +// LogFatalHandler handles the error by logging a fatal error and exiting. +func LogFatalHandler(err error) { + log.Fatal(err) +} + +// PanicHandler handles the error by panicking. +func PanicHandler(err error) { + panic(err) +} + +// ----------------------------------------------------------------------------- + +// A Properties contains the key/value pairs from the properties input. +// All values are stored in unexpanded form and are expanded at runtime +type Properties struct { + // Pre-/Postfix for property expansion. + Prefix string + Postfix string + + // DisableExpansion controls the expansion of properties on Get() + // and the check for circular references on Set(). When set to + // true Properties behaves like a simple key/value store and does + // not check for circular references on Get() or on Set(). + DisableExpansion bool + + // Stores the key/value pairs + m map[string]string + + // Stores the comments per key. + c map[string][]string + + // Stores the keys in order of appearance. + k []string + + // WriteSeparator specifies the separator of key and value while writing the properties. + WriteSeparator string +} + +// NewProperties creates a new Properties struct with the default +// configuration for "${key}" expressions. +func NewProperties() *Properties { + return &Properties{ + Prefix: "${", + Postfix: "}", + m: map[string]string{}, + c: map[string][]string{}, + k: []string{}, + } +} + +// Load reads a buffer into the given Properties struct. +func (p *Properties) Load(buf []byte, enc Encoding) error { + l := &Loader{Encoding: enc, DisableExpansion: p.DisableExpansion} + newProperties, err := l.LoadBytes(buf) + if err != nil { + return err + } + p.Merge(newProperties) + return nil +} + +// Get returns the expanded value for the given key if exists. +// Otherwise, ok is false. +func (p *Properties) Get(key string) (value string, ok bool) { + v, ok := p.m[key] + if p.DisableExpansion { + return v, ok + } + if !ok { + return "", false + } + + expanded, err := p.expand(key, v) + + // we guarantee that the expanded value is free of + // circular references and malformed expressions + // so we panic if we still get an error here. + if err != nil { + ErrorHandler(err) + } + + return expanded, true +} + +// MustGet returns the expanded value for the given key if exists. +// Otherwise, it panics. +func (p *Properties) MustGet(key string) string { + if v, ok := p.Get(key); ok { + return v + } + ErrorHandler(invalidKeyError(key)) + panic("ErrorHandler should exit") +} + +// ---------------------------------------------------------------------------- + +// ClearComments removes the comments for all keys. +func (p *Properties) ClearComments() { + p.c = map[string][]string{} +} + +// ---------------------------------------------------------------------------- + +// GetComment returns the last comment before the given key or an empty string. +func (p *Properties) GetComment(key string) string { + comments, ok := p.c[key] + if !ok || len(comments) == 0 { + return "" + } + return comments[len(comments)-1] +} + +// ---------------------------------------------------------------------------- + +// GetComments returns all comments that appeared before the given key or nil. +func (p *Properties) GetComments(key string) []string { + if comments, ok := p.c[key]; ok { + return comments + } + return nil +} + +// ---------------------------------------------------------------------------- + +// SetComment sets the comment for the key. +func (p *Properties) SetComment(key, comment string) { + p.c[key] = []string{comment} +} + +// ---------------------------------------------------------------------------- + +// SetComments sets the comments for the key. If the comments are nil then +// all comments for this key are deleted. +func (p *Properties) SetComments(key string, comments []string) { + if comments == nil { + delete(p.c, key) + return + } + p.c[key] = comments +} + +// ---------------------------------------------------------------------------- + +// GetBool checks if the expanded value is one of '1', 'yes', +// 'true' or 'on' if the key exists. The comparison is case-insensitive. +// If the key does not exist the default value is returned. +func (p *Properties) GetBool(key string, def bool) bool { + v, err := p.getBool(key) + if err != nil { + return def + } + return v +} + +// MustGetBool checks if the expanded value is one of '1', 'yes', +// 'true' or 'on' if the key exists. The comparison is case-insensitive. +// If the key does not exist the function panics. +func (p *Properties) MustGetBool(key string) bool { + v, err := p.getBool(key) + if err != nil { + ErrorHandler(err) + } + return v +} + +func (p *Properties) getBool(key string) (value bool, err error) { + if v, ok := p.Get(key); ok { + return boolVal(v), nil + } + return false, invalidKeyError(key) +} + +func boolVal(v string) bool { + v = strings.ToLower(v) + return v == "1" || v == "true" || v == "yes" || v == "on" +} + +// ---------------------------------------------------------------------------- + +// GetDuration parses the expanded value as an time.Duration (in ns) if the +// key exists. If key does not exist or the value cannot be parsed the default +// value is returned. In almost all cases you want to use GetParsedDuration(). +func (p *Properties) GetDuration(key string, def time.Duration) time.Duration { + v, err := p.getInt64(key) + if err != nil { + return def + } + return time.Duration(v) +} + +// MustGetDuration parses the expanded value as an time.Duration (in ns) if +// the key exists. If key does not exist or the value cannot be parsed the +// function panics. In almost all cases you want to use MustGetParsedDuration(). +func (p *Properties) MustGetDuration(key string) time.Duration { + v, err := p.getInt64(key) + if err != nil { + ErrorHandler(err) + } + return time.Duration(v) +} + +// ---------------------------------------------------------------------------- + +// GetParsedDuration parses the expanded value with time.ParseDuration() if the key exists. +// If key does not exist or the value cannot be parsed the default +// value is returned. +func (p *Properties) GetParsedDuration(key string, def time.Duration) time.Duration { + s, ok := p.Get(key) + if !ok { + return def + } + v, err := time.ParseDuration(s) + if err != nil { + return def + } + return v +} + +// MustGetParsedDuration parses the expanded value with time.ParseDuration() if the key exists. +// If key does not exist or the value cannot be parsed the function panics. +func (p *Properties) MustGetParsedDuration(key string) time.Duration { + s, ok := p.Get(key) + if !ok { + ErrorHandler(invalidKeyError(key)) + } + v, err := time.ParseDuration(s) + if err != nil { + ErrorHandler(err) + } + return v +} + +// ---------------------------------------------------------------------------- + +// GetFloat64 parses the expanded value as a float64 if the key exists. +// If key does not exist or the value cannot be parsed the default +// value is returned. +func (p *Properties) GetFloat64(key string, def float64) float64 { + v, err := p.getFloat64(key) + if err != nil { + return def + } + return v +} + +// MustGetFloat64 parses the expanded value as a float64 if the key exists. +// If key does not exist or the value cannot be parsed the function panics. +func (p *Properties) MustGetFloat64(key string) float64 { + v, err := p.getFloat64(key) + if err != nil { + ErrorHandler(err) + } + return v +} + +func (p *Properties) getFloat64(key string) (value float64, err error) { + if v, ok := p.Get(key); ok { + value, err = strconv.ParseFloat(v, 64) + if err != nil { + return 0, err + } + return value, nil + } + return 0, invalidKeyError(key) +} + +// ---------------------------------------------------------------------------- + +// GetInt parses the expanded value as an int if the key exists. +// If key does not exist or the value cannot be parsed the default +// value is returned. If the value does not fit into an int the +// function panics with an out of range error. +func (p *Properties) GetInt(key string, def int) int { + v, err := p.getInt64(key) + if err != nil { + return def + } + return intRangeCheck(key, v) +} + +// MustGetInt parses the expanded value as an int if the key exists. +// If key does not exist or the value cannot be parsed the function panics. +// If the value does not fit into an int the function panics with +// an out of range error. +func (p *Properties) MustGetInt(key string) int { + v, err := p.getInt64(key) + if err != nil { + ErrorHandler(err) + } + return intRangeCheck(key, v) +} + +// ---------------------------------------------------------------------------- + +// GetInt64 parses the expanded value as an int64 if the key exists. +// If key does not exist or the value cannot be parsed the default +// value is returned. +func (p *Properties) GetInt64(key string, def int64) int64 { + v, err := p.getInt64(key) + if err != nil { + return def + } + return v +} + +// MustGetInt64 parses the expanded value as an int if the key exists. +// If key does not exist or the value cannot be parsed the function panics. +func (p *Properties) MustGetInt64(key string) int64 { + v, err := p.getInt64(key) + if err != nil { + ErrorHandler(err) + } + return v +} + +func (p *Properties) getInt64(key string) (value int64, err error) { + if v, ok := p.Get(key); ok { + value, err = strconv.ParseInt(v, 10, 64) + if err != nil { + return 0, err + } + return value, nil + } + return 0, invalidKeyError(key) +} + +// ---------------------------------------------------------------------------- + +// GetUint parses the expanded value as an uint if the key exists. +// If key does not exist or the value cannot be parsed the default +// value is returned. If the value does not fit into an int the +// function panics with an out of range error. +func (p *Properties) GetUint(key string, def uint) uint { + v, err := p.getUint64(key) + if err != nil { + return def + } + return uintRangeCheck(key, v) +} + +// MustGetUint parses the expanded value as an int if the key exists. +// If key does not exist or the value cannot be parsed the function panics. +// If the value does not fit into an int the function panics with +// an out of range error. +func (p *Properties) MustGetUint(key string) uint { + v, err := p.getUint64(key) + if err != nil { + ErrorHandler(err) + } + return uintRangeCheck(key, v) +} + +// ---------------------------------------------------------------------------- + +// GetUint64 parses the expanded value as an uint64 if the key exists. +// If key does not exist or the value cannot be parsed the default +// value is returned. +func (p *Properties) GetUint64(key string, def uint64) uint64 { + v, err := p.getUint64(key) + if err != nil { + return def + } + return v +} + +// MustGetUint64 parses the expanded value as an int if the key exists. +// If key does not exist or the value cannot be parsed the function panics. +func (p *Properties) MustGetUint64(key string) uint64 { + v, err := p.getUint64(key) + if err != nil { + ErrorHandler(err) + } + return v +} + +func (p *Properties) getUint64(key string) (value uint64, err error) { + if v, ok := p.Get(key); ok { + value, err = strconv.ParseUint(v, 10, 64) + if err != nil { + return 0, err + } + return value, nil + } + return 0, invalidKeyError(key) +} + +// ---------------------------------------------------------------------------- + +// GetString returns the expanded value for the given key if exists or +// the default value otherwise. +func (p *Properties) GetString(key, def string) string { + if v, ok := p.Get(key); ok { + return v + } + return def +} + +// MustGetString returns the expanded value for the given key if exists or +// panics otherwise. +func (p *Properties) MustGetString(key string) string { + if v, ok := p.Get(key); ok { + return v + } + ErrorHandler(invalidKeyError(key)) + panic("ErrorHandler should exit") +} + +// ---------------------------------------------------------------------------- + +// Filter returns a new properties object which contains all properties +// for which the key matches the pattern. +func (p *Properties) Filter(pattern string) (*Properties, error) { + re, err := regexp.Compile(pattern) + if err != nil { + return nil, err + } + + return p.FilterRegexp(re), nil +} + +// FilterRegexp returns a new properties object which contains all properties +// for which the key matches the regular expression. +func (p *Properties) FilterRegexp(re *regexp.Regexp) *Properties { + pp := NewProperties() + for _, k := range p.k { + if re.MatchString(k) { + // TODO(fs): we are ignoring the error which flags a circular reference. + // TODO(fs): since we are just copying a subset of keys this cannot happen (fingers crossed) + pp.Set(k, p.m[k]) + } + } + return pp +} + +// FilterPrefix returns a new properties object with a subset of all keys +// with the given prefix. +func (p *Properties) FilterPrefix(prefix string) *Properties { + pp := NewProperties() + for _, k := range p.k { + if strings.HasPrefix(k, prefix) { + // TODO(fs): we are ignoring the error which flags a circular reference. + // TODO(fs): since we are just copying a subset of keys this cannot happen (fingers crossed) + pp.Set(k, p.m[k]) + } + } + return pp +} + +// FilterStripPrefix returns a new properties object with a subset of all keys +// with the given prefix and the prefix removed from the keys. +func (p *Properties) FilterStripPrefix(prefix string) *Properties { + pp := NewProperties() + n := len(prefix) + for _, k := range p.k { + if len(k) > len(prefix) && strings.HasPrefix(k, prefix) { + // TODO(fs): we are ignoring the error which flags a circular reference. + // TODO(fs): since we are modifying keys I am not entirely sure whether we can create a circular reference + // TODO(fs): this function should probably return an error but the signature is fixed + pp.Set(k[n:], p.m[k]) + } + } + return pp +} + +// Len returns the number of keys. +func (p *Properties) Len() int { + return len(p.m) +} + +// Keys returns all keys in the same order as in the input. +func (p *Properties) Keys() []string { + keys := make([]string, len(p.k)) + copy(keys, p.k) + return keys +} + +// Set sets the property key to the corresponding value. +// If a value for key existed before then ok is true and prev +// contains the previous value. If the value contains a +// circular reference or a malformed expression then +// an error is returned. +// An empty key is silently ignored. +func (p *Properties) Set(key, value string) (prev string, ok bool, err error) { + if key == "" { + return "", false, nil + } + + // if expansion is disabled we allow circular references + if p.DisableExpansion { + prev, ok = p.Get(key) + p.m[key] = value + if !ok { + p.k = append(p.k, key) + } + return prev, ok, nil + } + + // to check for a circular reference we temporarily need + // to set the new value. If there is an error then revert + // to the previous state. Only if all tests are successful + // then we add the key to the p.k list. + prev, ok = p.Get(key) + p.m[key] = value + + // now check for a circular reference + _, err = p.expand(key, value) + if err != nil { + + // revert to the previous state + if ok { + p.m[key] = prev + } else { + delete(p.m, key) + } + + return "", false, err + } + + if !ok { + p.k = append(p.k, key) + } + + return prev, ok, nil +} + +// SetValue sets property key to the default string value +// as defined by fmt.Sprintf("%v"). +func (p *Properties) SetValue(key string, value interface{}) error { + _, _, err := p.Set(key, fmt.Sprintf("%v", value)) + return err +} + +// MustSet sets the property key to the corresponding value. +// If a value for key existed before then ok is true and prev +// contains the previous value. An empty key is silently ignored. +func (p *Properties) MustSet(key, value string) (prev string, ok bool) { + prev, ok, err := p.Set(key, value) + if err != nil { + ErrorHandler(err) + } + return prev, ok +} + +// String returns a string of all expanded 'key = value' pairs. +func (p *Properties) String() string { + var s string + for _, key := range p.k { + value, _ := p.Get(key) + s = fmt.Sprintf("%s%s = %s\n", s, key, value) + } + return s +} + +// Sort sorts the properties keys in alphabetical order. +// This is helpfully before writing the properties. +func (p *Properties) Sort() { + sort.Strings(p.k) +} + +// Write writes all unexpanded 'key = value' pairs to the given writer. +// Write returns the number of bytes written and any write error encountered. +func (p *Properties) Write(w io.Writer, enc Encoding) (n int, err error) { + return p.WriteComment(w, "", enc) +} + +// WriteComment writes all unexpanced 'key = value' pairs to the given writer. +// If prefix is not empty then comments are written with a blank line and the +// given prefix. The prefix should be either "# " or "! " to be compatible with +// the properties file format. Otherwise, the properties parser will not be +// able to read the file back in. It returns the number of bytes written and +// any write error encountered. +func (p *Properties) WriteComment(w io.Writer, prefix string, enc Encoding) (n int, err error) { + var x int + + for _, key := range p.k { + value := p.m[key] + + if prefix != "" { + if comments, ok := p.c[key]; ok { + // don't print comments if they are all empty + allEmpty := true + for _, c := range comments { + if c != "" { + allEmpty = false + break + } + } + + if !allEmpty { + // add a blank line between entries but not at the top + if len(comments) > 0 && n > 0 { + x, err = fmt.Fprintln(w) + if err != nil { + return + } + n += x + } + + for _, c := range comments { + x, err = fmt.Fprintf(w, "%s%s\n", prefix, c) + if err != nil { + return + } + n += x + } + } + } + } + sep := " = " + if p.WriteSeparator != "" { + sep = p.WriteSeparator + } + x, err = fmt.Fprintf(w, "%s%s%s\n", encode(key, " :", enc), sep, encode(value, "", enc)) + if err != nil { + return + } + n += x + } + return +} + +// Map returns a copy of the properties as a map. +func (p *Properties) Map() map[string]string { + m := make(map[string]string) + for k, v := range p.m { + m[k] = v + } + return m +} + +// FilterFunc returns a copy of the properties which includes the values which passed all filters. +func (p *Properties) FilterFunc(filters ...func(k, v string) bool) *Properties { + pp := NewProperties() +outer: + for k, v := range p.m { + for _, f := range filters { + if !f(k, v) { + continue outer + } + pp.Set(k, v) + } + } + return pp +} + +// ---------------------------------------------------------------------------- + +// Delete removes the key and its comments. +func (p *Properties) Delete(key string) { + delete(p.m, key) + delete(p.c, key) + newKeys := []string{} + for _, k := range p.k { + if k != key { + newKeys = append(newKeys, k) + } + } + p.k = newKeys +} + +// Merge merges properties, comments and keys from other *Properties into p +func (p *Properties) Merge(other *Properties) { + for k, v := range other.m { + p.m[k] = v + } + for k, v := range other.c { + p.c[k] = v + } + +outer: + for _, otherKey := range other.k { + for _, key := range p.k { + if otherKey == key { + continue outer + } + } + p.k = append(p.k, otherKey) + } +} + +// ---------------------------------------------------------------------------- + +// check expands all values and returns an error if a circular reference or +// a malformed expression was found. +func (p *Properties) check() error { + for key, value := range p.m { + if _, err := p.expand(key, value); err != nil { + return err + } + } + return nil +} + +func (p *Properties) expand(key, input string) (string, error) { + // no pre/postfix -> nothing to expand + if p.Prefix == "" && p.Postfix == "" { + return input, nil + } + + return expand(input, []string{key}, p.Prefix, p.Postfix, p.m) +} + +// expand recursively expands expressions of '(prefix)key(postfix)' to their corresponding values. +// The function keeps track of the keys that were already expanded and stops if it +// detects a circular reference or a malformed expression of the form '(prefix)key'. +func expand(s string, keys []string, prefix, postfix string, values map[string]string) (string, error) { + if len(keys) > maxExpansionDepth { + return "", fmt.Errorf("expansion too deep") + } + + for { + start := strings.Index(s, prefix) + if start == -1 { + return s, nil + } + + keyStart := start + len(prefix) + keyLen := strings.Index(s[keyStart:], postfix) + if keyLen == -1 { + return "", fmt.Errorf("malformed expression") + } + + end := keyStart + keyLen + len(postfix) - 1 + key := s[keyStart : keyStart+keyLen] + + // fmt.Printf("s:%q pp:%q start:%d end:%d keyStart:%d keyLen:%d key:%q\n", s, prefix + "..." + postfix, start, end, keyStart, keyLen, key) + + for _, k := range keys { + if key == k { + var b bytes.Buffer + b.WriteString("circular reference in:\n") + for _, k1 := range keys { + fmt.Fprintf(&b, "%s=%s\n", k1, values[k1]) + } + return "", fmt.Errorf(b.String()) + } + } + + val, ok := values[key] + if !ok { + val = os.Getenv(key) + } + new_val, err := expand(val, append(keys, key), prefix, postfix, values) + if err != nil { + return "", err + } + s = s[:start] + new_val + s[end+1:] + } + return s, nil +} + +// encode encodes a UTF-8 string to ISO-8859-1 and escapes some characters. +func encode(s string, special string, enc Encoding) string { + switch enc { + case UTF8: + return encodeUtf8(s, special) + case ISO_8859_1: + return encodeIso(s, special) + default: + panic(fmt.Sprintf("unsupported encoding %v", enc)) + } +} + +func encodeUtf8(s string, special string) string { + v := "" + for pos := 0; pos < len(s); { + r, w := utf8.DecodeRuneInString(s[pos:]) + pos += w + v += escape(r, special) + } + return v +} + +func encodeIso(s string, special string) string { + var r rune + var w int + var v string + for pos := 0; pos < len(s); { + switch r, w = utf8.DecodeRuneInString(s[pos:]); { + case r < 1<<8: // single byte rune -> escape special chars only + v += escape(r, special) + case r < 1<<16: // two byte rune -> unicode literal + v += fmt.Sprintf("\\u%04x", r) + default: // more than two bytes per rune -> can't encode + v += "?" + } + pos += w + } + return v +} + +func escape(r rune, special string) string { + switch r { + case '\f': + return "\\f" + case '\n': + return "\\n" + case '\r': + return "\\r" + case '\t': + return "\\t" + case '\\': + return "\\\\" + default: + if strings.ContainsRune(special, r) { + return "\\" + string(r) + } + return string(r) + } +} + +func invalidKeyError(key string) error { + return fmt.Errorf("unknown property: %s", key) +} diff --git a/vendor/github.com/magiconair/properties/rangecheck.go b/vendor/github.com/magiconair/properties/rangecheck.go new file mode 100644 index 000000000..b013a2e5e --- /dev/null +++ b/vendor/github.com/magiconair/properties/rangecheck.go @@ -0,0 +1,31 @@ +// Copyright 2018 Frank Schroeder. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package properties + +import ( + "fmt" + "math" +) + +// make this a var to overwrite it in a test +var is32Bit = ^uint(0) == math.MaxUint32 + +// intRangeCheck checks if the value fits into the int type and +// panics if it does not. +func intRangeCheck(key string, v int64) int { + if is32Bit && (v < math.MinInt32 || v > math.MaxInt32) { + panic(fmt.Sprintf("Value %d for key %s out of range", v, key)) + } + return int(v) +} + +// uintRangeCheck checks if the value fits into the uint type and +// panics if it does not. +func uintRangeCheck(key string, v uint64) uint { + if is32Bit && v > math.MaxUint32 { + panic(fmt.Sprintf("Value %d for key %s out of range", v, key)) + } + return uint(v) +} diff --git a/vendor/github.com/maratori/testpackage/LICENSE b/vendor/github.com/maratori/testpackage/LICENSE new file mode 100644 index 000000000..644d0b1c8 --- /dev/null +++ b/vendor/github.com/maratori/testpackage/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Marat Reymers + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go b/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go new file mode 100644 index 000000000..cad24e1a5 --- /dev/null +++ b/vendor/github.com/maratori/testpackage/pkg/testpackage/testpackage.go @@ -0,0 +1,53 @@ +package testpackage + +import ( + "flag" + "regexp" + "strings" + + "golang.org/x/tools/go/analysis" +) + +const ( + SkipRegexpFlagName = "skip-regexp" + SkipRegexpFlagUsage = `regexp pattern to skip file by name. To not skip files use -skip-regexp="^$"` + SkipRegexpFlagDefault = `(export|internal)_test\.go` +) + +// NewAnalyzer returns Analyzer that makes you use a separate _test package +func NewAnalyzer() *analysis.Analyzer { + var ( + skipFileRegexp = SkipRegexpFlagDefault + fs flag.FlagSet + ) + + fs.StringVar(&skipFileRegexp, SkipRegexpFlagName, skipFileRegexp, SkipRegexpFlagUsage) + + return &analysis.Analyzer{ + Name: "testpackage", + Doc: "linter that makes you use a separate _test package", + Flags: fs, + Run: func(pass *analysis.Pass) (interface{}, error) { + skipFile, err := regexp.Compile(skipFileRegexp) + if err != nil { + return nil, err + } + + for _, f := range pass.Files { + fileName := pass.Fset.Position(f.Pos()).Filename + if skipFile.MatchString(fileName) { + continue + } + + if strings.HasSuffix(fileName, "_test.go") { + packageName := f.Name.Name + if !strings.HasSuffix(packageName, "_test") { + pass.Reportf(f.Name.Pos(), "package should be `%s_test` instead of `%s`", packageName, packageName) + } + } + } + + return nil, nil + }, + } +} diff --git a/vendor/github.com/matoous/godox/.gitignore b/vendor/github.com/matoous/godox/.gitignore new file mode 100644 index 000000000..30d94b102 --- /dev/null +++ b/vendor/github.com/matoous/godox/.gitignore @@ -0,0 +1,19 @@ +# Binaries for programs and plugins +*.exe +*.dll +*.so +*.dylib +.idea + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Project-local glide cache, RE: https://github.com/Masterminds/glide/issues/736 +.glide/ + +.vscode/ +debug +debug.test diff --git a/vendor/github.com/matoous/godox/.golangci.yml b/vendor/github.com/matoous/godox/.golangci.yml new file mode 100644 index 000000000..3f0fcdb19 --- /dev/null +++ b/vendor/github.com/matoous/godox/.golangci.yml @@ -0,0 +1,71 @@ +linters-settings: + depguard: + list-type: blacklist + include-go-root: true + packages: + # we are using "github.com/json-iterator/go" instead of json encoder from stdlib + - "encoding/json" + dupl: + threshold: 100 + gocritic: + # Enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint` run to see all tags and checks. + # Empty list by default. See https://github.com/go-critic/go-critic#usage -> section "Tags". + enabled-tags: + - performance + - diagnostic + - style + disabled-checks: + - emptyStringTest + - unnamedResult # it is experimental currently and doesn't handle typed channels correctly + gocyclo: + min-complexity: 14 # TODO go lower + golint: + min-confidence: 0 + govet: + check-shadowing: true + goconst: + min-len: 2 + min-occurrences: 3 + goimports: + local-prefixes: gitlab.skypicker.com/search-team/gonuts/conveyance-store + lll: + line-length: 140 + maligned: + suggest-new: true + misspell: + locale: US + +linters: + enable-all: true + disable: + # prealloc is not recommended by `golangci-lint` developers. + - prealloc + - gochecknoglobals + +issues: + exclude-rules: + - path: _test\.go + linters: + - goconst + - dupl + + - path: fixtures + linters: + - gocritic + - varcheck + - deadcode + - unused + +run: + modules-download-mode: readonly + +# output configuration options +output: + # colored-line-number|line-number|json|tab|checkstyle|code-climate, default is "colored-line-number" + format: tab + + # print lines of code with issue, default is true + print-issued-lines: true + + # print linter name in the end of issue text, default is true + print-linter-name: true diff --git a/vendor/github.com/matoous/godox/.revive.toml b/vendor/github.com/matoous/godox/.revive.toml new file mode 100644 index 000000000..db0e4edb6 --- /dev/null +++ b/vendor/github.com/matoous/godox/.revive.toml @@ -0,0 +1,135 @@ +ignoreGeneratedHeader = false +severity = "warning" + +# confidence <= 0.2 generate a lot of errors from package-comments rule. It marks files that do not contain +# package-level comments as a warning irrespective of existing package-level coment in one file. +confidence = 0.25 +errorCode = 1 +warningCode = 1 + +# Rules block. +# ⚠ Make sure to sort rules alpabetically for readability! ⚠ + +# argument-limit rule is setting up a maximum number of parameters that can be passed to the functions/methods. +[rule.argument-limit] + arguments = [5] + +# atomic rule checks for commonly mistaken usages of the sync/atomic package. +[rule.atomic] + +# blank-imports rule disallows blank imports. +[rule.blank-imports] + +# bool-literal-in-expr suggests removing boolean literals from logic expressions like `bar == true`, `arg == false`, +# `r != true`, `false && boolExpr` and `boolExpr || true`. +[rule.bool-literal-in-expr] + +# constant-logical-expr rule warns on constant logical expressions, like `name == name`. +[rule.constant-logical-expr] + +# context-as-argument rule makes sure that context.Context is the first argument of a function. +[rule.context-as-argument] + +# context-keys-type rule disallows the usage of basic types in context.WithValue +[rule.context-keys-type] + +# confusing-naming rule warns on methods with names that differ only by capitalization. +[rule.confusing-naming] + +# confusing-results rule suggests to name potentially confusing function results. +[rule.confusing-results] + +# cyclomatic rule sets restriction for maximum Cyclomatic complexity. +[rule.cyclomatic] + arguments = [15] + +# deep-exit rule looks for program exits in funcs other than `main()` or `init()`. +[rule.deep-exit] + +# dot-imports rule forbids `.` imports. +[rule.dot-imports] + +# empty-block warns on empty code blocks. +[rule.empty-block] + +# error-return rule ensure that the error return parameter is the last. +[rule.error-return] + +# error-strings rule ensure conventions around error strings. +[rule.error-strings] + +# error-naming rule ensure naming of error variables (has `Err` or `err` prefix). +[rule.error-naming] + +# errorf rule warns on usage errors.New(fmt.Sprintf()) instead of fmt.Errorf() +[rule.errorf] + +# exported rule ensure naming and commenting conventions on exported symbols. +[rule.exported] + +# flag-parameter rule warns on boolean parameters that create a control coupling. +[rule.flag-parameter] + +# get-return rule warns on getters that do not yield any result. +[rule.get-return] + +# if-return rule warns redundant if when returning an error. +[rule.if-return] + +# increment-decrement rule forces to use `i++` and `i--` instead of `i += 1` and `i -= 1`. +[rule.increment-decrement] + +# indent-error-flow rule prevents redundant else statements. +[rule.indent-error-flow] + +# modifies-value-receiver warns on assignments to value-passed method receivers. +[rule.modifies-value-receiver] + +# package-comments rule ensures package commenting conventions. +[rule.package-comments] + +# range rule prevents redundant variables when iterating over a collection. +[rule.range] + +# range-val-in-closure warns if range value is used in a closure dispatched as goroutine. +[rule.range-val-in-closure] + +# receiver-naming ensures conventions around the naming of receivers. +[rule.receiver-naming] + +# redefines-builtin-id warns on redefinitions of built-in (constants, variables, function and types) identifiers, +# like `true := "false"` etc. +[rule.redefines-builtin-id] + +# rule.superfluous-else prevents redundant else statements (extends indent-error-flow). Checks for `if-then-else`where +# the then block ends with branching statement like `continue`, `break`, or `goto`. +[rule.superfluous-else] + +# rule.struct-tag checks common struct tags like `json`, `xml`, `yaml`. +[rule.struct-tag] + +# time-naming rule conventions around the naming of time variables. Like not to use unit suffixes (sec, min etc.) in +# naming variables of type `time.Time` or `time.Duration`. +[rule.time-naming] + +# unexported-return rule warns when a public return is from unexported type. +[rule.unexported-return] + +# unnecessary-stmt suggests removing or simplifying unnecessary statements like breaks at the end of cases or return at +# the end of bodies of functions returning nothing. +[rule.unnecessary-stmt] + +# unreachable-code rule warns on the unreachable code. +[rule.unreachable-code] + +# unused-parameter rule suggests to rename or remove unused function parameters. +[rule.unused-parameter] + +# var-declaration rule reduces redundancies around variable declaration. +[rule.var-declaration] + +# var-naming checks naming rules. +[rule.var-naming] + +# waitgroup-by-value rule warns on functions taking `sync.WaitGroup` as a by-value parameter. +[rule.waitgroup-by-value] diff --git a/vendor/github.com/matoous/godox/LICENSE b/vendor/github.com/matoous/godox/LICENSE new file mode 100644 index 000000000..49e1b1e3a --- /dev/null +++ b/vendor/github.com/matoous/godox/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Matous Dzivjak + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/matoous/godox/README.md b/vendor/github.com/matoous/godox/README.md new file mode 100644 index 000000000..9c58e28ce --- /dev/null +++ b/vendor/github.com/matoous/godox/README.md @@ -0,0 +1,23 @@ +GoDoX +=== + +[![Tests](https://github.com/matoous/godox/actions/workflows/test.yml/badge.svg)](https://github.com/matoous/godox/actions/workflows/test.yml) +[![Lint](https://github.com/matoous/godox/actions/workflows/lint.yml/badge.svg)](https://github.com/matoous/godox/actions/workflows/lint.yml) +[![GoDoc](https://godoc.org/github.com/matoous/godox?status.svg)](https://godoc.org/github.com/matoous/godox) +[![Go Report Card](https://goreportcard.com/badge/github.com/matoous/godox)](https://goreportcard.com/report/github.com/matoous/godox) +[![GitHub issues](https://img.shields.io/github/issues/matoous/godox.svg)](https://github.com/matoous/godox/issues) +[![License](https://img.shields.io/badge/license-MIT%20License-blue.svg)](https://github.com/matoous/godox/LICENSE) + +GoDoX extracts comments from Go code based on keywords. This repository is fork of https://github.com/766b/godox +but a lot of code has changed, this repository is updated and the code was adjusted for better integration with +https://github.com/golangci/golangci-lint. + +Installation +--- + + go get github.com/matoous/godox + +The main idea +--- + +The main idea of godox is the keywords like TODO, FIX, OPTIMIZE is temporary and for development purpose only. You should create tasks if some TODOs cannot be fixed in the current merge request. diff --git a/vendor/github.com/matoous/godox/go.mod b/vendor/github.com/matoous/godox/go.mod new file mode 100644 index 000000000..69b34f0e9 --- /dev/null +++ b/vendor/github.com/matoous/godox/go.mod @@ -0,0 +1,5 @@ +module github.com/matoous/godox + +go 1.13 + +require golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578 diff --git a/vendor/github.com/matoous/godox/go.sum b/vendor/github.com/matoous/godox/go.sum new file mode 100644 index 000000000..970cb25dd --- /dev/null +++ b/vendor/github.com/matoous/godox/go.sum @@ -0,0 +1,8 @@ +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578 h1:f0Gfd654rnnfXT1+BK1YHPTS1qQdKrPIaGQwWxNE44k= +golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/matoous/godox/godox.go b/vendor/github.com/matoous/godox/godox.go new file mode 100644 index 000000000..6d7104b09 --- /dev/null +++ b/vendor/github.com/matoous/godox/godox.go @@ -0,0 +1,84 @@ +package godox + +import ( + "bufio" + "bytes" + "fmt" + "go/ast" + "go/token" + "path/filepath" + "strings" +) + +var ( + defaultKeywords = []string{"TODO", "BUG", "FIXME"} +) + +// Message contains a message and position +type Message struct { + Pos token.Position + Message string +} + +func getMessages(c *ast.Comment, fset *token.FileSet, keywords []string) []Message { + commentText := c.Text + switch commentText[1] { + case '/': + commentText = commentText[2:] + if len(commentText) > 0 && commentText[0] == ' ' { + commentText = commentText[1:] + } + case '*': + commentText = commentText[2 : len(commentText)-2] + } + + b := bufio.NewReader(bytes.NewBufferString(commentText)) + var comments []Message + + for lineNum := 0; ; lineNum++ { + line, _, err := b.ReadLine() + if err != nil { + break + } + sComment := bytes.TrimSpace(line) + if len(sComment) < 4 { + continue + } + for _, kw := range keywords { + if bytes.EqualFold([]byte(kw), sComment[0:len(kw)]) { + pos := fset.Position(c.Pos()) + // trim the comment + if len(sComment) > 40 { + sComment = []byte(fmt.Sprintf("%.40s...", sComment)) + } + comments = append(comments, Message{ + Pos: pos, + Message: fmt.Sprintf( + "%s:%d: Line contains %s: \"%s\"", + filepath.Join(pos.Filename), + pos.Line+lineNum, + strings.Join(keywords, "/"), + sComment, + ), + }) + break + } + } + } + return comments +} + +// Run runs the godox linter on given file. +// Godox searches for comments starting with given keywords and reports them. +func Run(file *ast.File, fset *token.FileSet, keywords ...string) []Message { + if len(keywords) == 0 { + keywords = defaultKeywords + } + var messages []Message + for _, c := range file.Comments { + for _, ci := range c.List { + messages = append(messages, getMessages(ci, fset, keywords)...) + } + } + return messages +} diff --git a/vendor/github.com/mattn/go-colorable/.travis.yml b/vendor/github.com/mattn/go-colorable/.travis.yml new file mode 100644 index 000000000..7942c565c --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/.travis.yml @@ -0,0 +1,15 @@ +language: go +sudo: false +go: + - 1.13.x + - tip + +before_install: + - go get -t -v ./... + +script: + - ./go.test.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) + diff --git a/vendor/github.com/mattn/go-colorable/LICENSE b/vendor/github.com/mattn/go-colorable/LICENSE new file mode 100644 index 000000000..91b5cef30 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mattn/go-colorable/README.md b/vendor/github.com/mattn/go-colorable/README.md new file mode 100644 index 000000000..e055952b6 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/README.md @@ -0,0 +1,48 @@ +# go-colorable + +[![Build Status](https://travis-ci.org/mattn/go-colorable.svg?branch=master)](https://travis-ci.org/mattn/go-colorable) +[![Codecov](https://codecov.io/gh/mattn/go-colorable/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-colorable) +[![GoDoc](https://godoc.org/github.com/mattn/go-colorable?status.svg)](http://godoc.org/github.com/mattn/go-colorable) +[![Go Report Card](https://goreportcard.com/badge/mattn/go-colorable)](https://goreportcard.com/report/mattn/go-colorable) + +Colorable writer for windows. + +For example, most of logger packages doesn't show colors on windows. (I know we can do it with ansicon. But I don't want.) +This package is possible to handle escape sequence for ansi color on windows. + +## Too Bad! + +![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/bad.png) + + +## So Good! + +![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/good.png) + +## Usage + +```go +logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true}) +logrus.SetOutput(colorable.NewColorableStdout()) + +logrus.Info("succeeded") +logrus.Warn("not correct") +logrus.Error("something error") +logrus.Fatal("panic") +``` + +You can compile above code on non-windows OSs. + +## Installation + +``` +$ go get github.com/mattn/go-colorable +``` + +# License + +MIT + +# Author + +Yasuhiro Matsumoto (a.k.a mattn) diff --git a/vendor/github.com/mattn/go-colorable/colorable_appengine.go b/vendor/github.com/mattn/go-colorable/colorable_appengine.go new file mode 100644 index 000000000..1f7806fe1 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/colorable_appengine.go @@ -0,0 +1,37 @@ +// +build appengine + +package colorable + +import ( + "io" + "os" + + _ "github.com/mattn/go-isatty" +) + +// NewColorable returns new instance of Writer which handles escape sequence. +func NewColorable(file *os.File) io.Writer { + if file == nil { + panic("nil passed instead of *os.File to NewColorable()") + } + + return file +} + +// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout. +func NewColorableStdout() io.Writer { + return os.Stdout +} + +// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr. +func NewColorableStderr() io.Writer { + return os.Stderr +} + +// EnableColorsStdout enable colors if possible. +func EnableColorsStdout(enabled *bool) func() { + if enabled != nil { + *enabled = true + } + return func() {} +} diff --git a/vendor/github.com/mattn/go-colorable/colorable_others.go b/vendor/github.com/mattn/go-colorable/colorable_others.go new file mode 100644 index 000000000..08cbd1e0f --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/colorable_others.go @@ -0,0 +1,38 @@ +// +build !windows +// +build !appengine + +package colorable + +import ( + "io" + "os" + + _ "github.com/mattn/go-isatty" +) + +// NewColorable returns new instance of Writer which handles escape sequence. +func NewColorable(file *os.File) io.Writer { + if file == nil { + panic("nil passed instead of *os.File to NewColorable()") + } + + return file +} + +// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout. +func NewColorableStdout() io.Writer { + return os.Stdout +} + +// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr. +func NewColorableStderr() io.Writer { + return os.Stderr +} + +// EnableColorsStdout enable colors if possible. +func EnableColorsStdout(enabled *bool) func() { + if enabled != nil { + *enabled = true + } + return func() {} +} diff --git a/vendor/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/mattn/go-colorable/colorable_windows.go new file mode 100644 index 000000000..41215d7fc --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/colorable_windows.go @@ -0,0 +1,1043 @@ +// +build windows +// +build !appengine + +package colorable + +import ( + "bytes" + "io" + "math" + "os" + "strconv" + "strings" + "sync" + "syscall" + "unsafe" + + "github.com/mattn/go-isatty" +) + +const ( + foregroundBlue = 0x1 + foregroundGreen = 0x2 + foregroundRed = 0x4 + foregroundIntensity = 0x8 + foregroundMask = (foregroundRed | foregroundBlue | foregroundGreen | foregroundIntensity) + backgroundBlue = 0x10 + backgroundGreen = 0x20 + backgroundRed = 0x40 + backgroundIntensity = 0x80 + backgroundMask = (backgroundRed | backgroundBlue | backgroundGreen | backgroundIntensity) + commonLvbUnderscore = 0x8000 + + cENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x4 +) + +const ( + genericRead = 0x80000000 + genericWrite = 0x40000000 +) + +const ( + consoleTextmodeBuffer = 0x1 +) + +type wchar uint16 +type short int16 +type dword uint32 +type word uint16 + +type coord struct { + x short + y short +} + +type smallRect struct { + left short + top short + right short + bottom short +} + +type consoleScreenBufferInfo struct { + size coord + cursorPosition coord + attributes word + window smallRect + maximumWindowSize coord +} + +type consoleCursorInfo struct { + size dword + visible int32 +} + +var ( + kernel32 = syscall.NewLazyDLL("kernel32.dll") + procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo") + procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute") + procSetConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition") + procFillConsoleOutputCharacter = kernel32.NewProc("FillConsoleOutputCharacterW") + procFillConsoleOutputAttribute = kernel32.NewProc("FillConsoleOutputAttribute") + procGetConsoleCursorInfo = kernel32.NewProc("GetConsoleCursorInfo") + procSetConsoleCursorInfo = kernel32.NewProc("SetConsoleCursorInfo") + procSetConsoleTitle = kernel32.NewProc("SetConsoleTitleW") + procGetConsoleMode = kernel32.NewProc("GetConsoleMode") + procSetConsoleMode = kernel32.NewProc("SetConsoleMode") + procCreateConsoleScreenBuffer = kernel32.NewProc("CreateConsoleScreenBuffer") +) + +// Writer provides colorable Writer to the console +type Writer struct { + out io.Writer + handle syscall.Handle + althandle syscall.Handle + oldattr word + oldpos coord + rest bytes.Buffer + mutex sync.Mutex +} + +// NewColorable returns new instance of Writer which handles escape sequence from File. +func NewColorable(file *os.File) io.Writer { + if file == nil { + panic("nil passed instead of *os.File to NewColorable()") + } + + if isatty.IsTerminal(file.Fd()) { + var mode uint32 + if r, _, _ := procGetConsoleMode.Call(file.Fd(), uintptr(unsafe.Pointer(&mode))); r != 0 && mode&cENABLE_VIRTUAL_TERMINAL_PROCESSING != 0 { + return file + } + var csbi consoleScreenBufferInfo + handle := syscall.Handle(file.Fd()) + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + return &Writer{out: file, handle: handle, oldattr: csbi.attributes, oldpos: coord{0, 0}} + } + return file +} + +// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout. +func NewColorableStdout() io.Writer { + return NewColorable(os.Stdout) +} + +// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr. +func NewColorableStderr() io.Writer { + return NewColorable(os.Stderr) +} + +var color256 = map[int]int{ + 0: 0x000000, + 1: 0x800000, + 2: 0x008000, + 3: 0x808000, + 4: 0x000080, + 5: 0x800080, + 6: 0x008080, + 7: 0xc0c0c0, + 8: 0x808080, + 9: 0xff0000, + 10: 0x00ff00, + 11: 0xffff00, + 12: 0x0000ff, + 13: 0xff00ff, + 14: 0x00ffff, + 15: 0xffffff, + 16: 0x000000, + 17: 0x00005f, + 18: 0x000087, + 19: 0x0000af, + 20: 0x0000d7, + 21: 0x0000ff, + 22: 0x005f00, + 23: 0x005f5f, + 24: 0x005f87, + 25: 0x005faf, + 26: 0x005fd7, + 27: 0x005fff, + 28: 0x008700, + 29: 0x00875f, + 30: 0x008787, + 31: 0x0087af, + 32: 0x0087d7, + 33: 0x0087ff, + 34: 0x00af00, + 35: 0x00af5f, + 36: 0x00af87, + 37: 0x00afaf, + 38: 0x00afd7, + 39: 0x00afff, + 40: 0x00d700, + 41: 0x00d75f, + 42: 0x00d787, + 43: 0x00d7af, + 44: 0x00d7d7, + 45: 0x00d7ff, + 46: 0x00ff00, + 47: 0x00ff5f, + 48: 0x00ff87, + 49: 0x00ffaf, + 50: 0x00ffd7, + 51: 0x00ffff, + 52: 0x5f0000, + 53: 0x5f005f, + 54: 0x5f0087, + 55: 0x5f00af, + 56: 0x5f00d7, + 57: 0x5f00ff, + 58: 0x5f5f00, + 59: 0x5f5f5f, + 60: 0x5f5f87, + 61: 0x5f5faf, + 62: 0x5f5fd7, + 63: 0x5f5fff, + 64: 0x5f8700, + 65: 0x5f875f, + 66: 0x5f8787, + 67: 0x5f87af, + 68: 0x5f87d7, + 69: 0x5f87ff, + 70: 0x5faf00, + 71: 0x5faf5f, + 72: 0x5faf87, + 73: 0x5fafaf, + 74: 0x5fafd7, + 75: 0x5fafff, + 76: 0x5fd700, + 77: 0x5fd75f, + 78: 0x5fd787, + 79: 0x5fd7af, + 80: 0x5fd7d7, + 81: 0x5fd7ff, + 82: 0x5fff00, + 83: 0x5fff5f, + 84: 0x5fff87, + 85: 0x5fffaf, + 86: 0x5fffd7, + 87: 0x5fffff, + 88: 0x870000, + 89: 0x87005f, + 90: 0x870087, + 91: 0x8700af, + 92: 0x8700d7, + 93: 0x8700ff, + 94: 0x875f00, + 95: 0x875f5f, + 96: 0x875f87, + 97: 0x875faf, + 98: 0x875fd7, + 99: 0x875fff, + 100: 0x878700, + 101: 0x87875f, + 102: 0x878787, + 103: 0x8787af, + 104: 0x8787d7, + 105: 0x8787ff, + 106: 0x87af00, + 107: 0x87af5f, + 108: 0x87af87, + 109: 0x87afaf, + 110: 0x87afd7, + 111: 0x87afff, + 112: 0x87d700, + 113: 0x87d75f, + 114: 0x87d787, + 115: 0x87d7af, + 116: 0x87d7d7, + 117: 0x87d7ff, + 118: 0x87ff00, + 119: 0x87ff5f, + 120: 0x87ff87, + 121: 0x87ffaf, + 122: 0x87ffd7, + 123: 0x87ffff, + 124: 0xaf0000, + 125: 0xaf005f, + 126: 0xaf0087, + 127: 0xaf00af, + 128: 0xaf00d7, + 129: 0xaf00ff, + 130: 0xaf5f00, + 131: 0xaf5f5f, + 132: 0xaf5f87, + 133: 0xaf5faf, + 134: 0xaf5fd7, + 135: 0xaf5fff, + 136: 0xaf8700, + 137: 0xaf875f, + 138: 0xaf8787, + 139: 0xaf87af, + 140: 0xaf87d7, + 141: 0xaf87ff, + 142: 0xafaf00, + 143: 0xafaf5f, + 144: 0xafaf87, + 145: 0xafafaf, + 146: 0xafafd7, + 147: 0xafafff, + 148: 0xafd700, + 149: 0xafd75f, + 150: 0xafd787, + 151: 0xafd7af, + 152: 0xafd7d7, + 153: 0xafd7ff, + 154: 0xafff00, + 155: 0xafff5f, + 156: 0xafff87, + 157: 0xafffaf, + 158: 0xafffd7, + 159: 0xafffff, + 160: 0xd70000, + 161: 0xd7005f, + 162: 0xd70087, + 163: 0xd700af, + 164: 0xd700d7, + 165: 0xd700ff, + 166: 0xd75f00, + 167: 0xd75f5f, + 168: 0xd75f87, + 169: 0xd75faf, + 170: 0xd75fd7, + 171: 0xd75fff, + 172: 0xd78700, + 173: 0xd7875f, + 174: 0xd78787, + 175: 0xd787af, + 176: 0xd787d7, + 177: 0xd787ff, + 178: 0xd7af00, + 179: 0xd7af5f, + 180: 0xd7af87, + 181: 0xd7afaf, + 182: 0xd7afd7, + 183: 0xd7afff, + 184: 0xd7d700, + 185: 0xd7d75f, + 186: 0xd7d787, + 187: 0xd7d7af, + 188: 0xd7d7d7, + 189: 0xd7d7ff, + 190: 0xd7ff00, + 191: 0xd7ff5f, + 192: 0xd7ff87, + 193: 0xd7ffaf, + 194: 0xd7ffd7, + 195: 0xd7ffff, + 196: 0xff0000, + 197: 0xff005f, + 198: 0xff0087, + 199: 0xff00af, + 200: 0xff00d7, + 201: 0xff00ff, + 202: 0xff5f00, + 203: 0xff5f5f, + 204: 0xff5f87, + 205: 0xff5faf, + 206: 0xff5fd7, + 207: 0xff5fff, + 208: 0xff8700, + 209: 0xff875f, + 210: 0xff8787, + 211: 0xff87af, + 212: 0xff87d7, + 213: 0xff87ff, + 214: 0xffaf00, + 215: 0xffaf5f, + 216: 0xffaf87, + 217: 0xffafaf, + 218: 0xffafd7, + 219: 0xffafff, + 220: 0xffd700, + 221: 0xffd75f, + 222: 0xffd787, + 223: 0xffd7af, + 224: 0xffd7d7, + 225: 0xffd7ff, + 226: 0xffff00, + 227: 0xffff5f, + 228: 0xffff87, + 229: 0xffffaf, + 230: 0xffffd7, + 231: 0xffffff, + 232: 0x080808, + 233: 0x121212, + 234: 0x1c1c1c, + 235: 0x262626, + 236: 0x303030, + 237: 0x3a3a3a, + 238: 0x444444, + 239: 0x4e4e4e, + 240: 0x585858, + 241: 0x626262, + 242: 0x6c6c6c, + 243: 0x767676, + 244: 0x808080, + 245: 0x8a8a8a, + 246: 0x949494, + 247: 0x9e9e9e, + 248: 0xa8a8a8, + 249: 0xb2b2b2, + 250: 0xbcbcbc, + 251: 0xc6c6c6, + 252: 0xd0d0d0, + 253: 0xdadada, + 254: 0xe4e4e4, + 255: 0xeeeeee, +} + +// `\033]0;TITLESTR\007` +func doTitleSequence(er *bytes.Reader) error { + var c byte + var err error + + c, err = er.ReadByte() + if err != nil { + return err + } + if c != '0' && c != '2' { + return nil + } + c, err = er.ReadByte() + if err != nil { + return err + } + if c != ';' { + return nil + } + title := make([]byte, 0, 80) + for { + c, err = er.ReadByte() + if err != nil { + return err + } + if c == 0x07 || c == '\n' { + break + } + title = append(title, c) + } + if len(title) > 0 { + title8, err := syscall.UTF16PtrFromString(string(title)) + if err == nil { + procSetConsoleTitle.Call(uintptr(unsafe.Pointer(title8))) + } + } + return nil +} + +// returns Atoi(s) unless s == "" in which case it returns def +func atoiWithDefault(s string, def int) (int, error) { + if s == "" { + return def, nil + } + return strconv.Atoi(s) +} + +// Write writes data on console +func (w *Writer) Write(data []byte) (n int, err error) { + w.mutex.Lock() + defer w.mutex.Unlock() + var csbi consoleScreenBufferInfo + procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi))) + + handle := w.handle + + var er *bytes.Reader + if w.rest.Len() > 0 { + var rest bytes.Buffer + w.rest.WriteTo(&rest) + w.rest.Reset() + rest.Write(data) + er = bytes.NewReader(rest.Bytes()) + } else { + er = bytes.NewReader(data) + } + var bw [1]byte +loop: + for { + c1, err := er.ReadByte() + if err != nil { + break loop + } + if c1 != 0x1b { + bw[0] = c1 + w.out.Write(bw[:]) + continue + } + c2, err := er.ReadByte() + if err != nil { + break loop + } + + switch c2 { + case '>': + continue + case ']': + w.rest.WriteByte(c1) + w.rest.WriteByte(c2) + er.WriteTo(&w.rest) + if bytes.IndexByte(w.rest.Bytes(), 0x07) == -1 { + break loop + } + er = bytes.NewReader(w.rest.Bytes()[2:]) + err := doTitleSequence(er) + if err != nil { + break loop + } + w.rest.Reset() + continue + // https://github.com/mattn/go-colorable/issues/27 + case '7': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + w.oldpos = csbi.cursorPosition + continue + case '8': + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos))) + continue + case 0x5b: + // execute part after switch + default: + continue + } + + w.rest.WriteByte(c1) + w.rest.WriteByte(c2) + er.WriteTo(&w.rest) + + var buf bytes.Buffer + var m byte + for i, c := range w.rest.Bytes()[2:] { + if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { + m = c + er = bytes.NewReader(w.rest.Bytes()[2+i+1:]) + w.rest.Reset() + break + } + buf.Write([]byte(string(c))) + } + if m == 0 { + break loop + } + + switch m { + case 'A': + n, err = atoiWithDefault(buf.String(), 1) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.y -= short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'B': + n, err = atoiWithDefault(buf.String(), 1) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.y += short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'C': + n, err = atoiWithDefault(buf.String(), 1) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x += short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'D': + n, err = atoiWithDefault(buf.String(), 1) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x -= short(n) + if csbi.cursorPosition.x < 0 { + csbi.cursorPosition.x = 0 + } + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'E': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x = 0 + csbi.cursorPosition.y += short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'F': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x = 0 + csbi.cursorPosition.y -= short(n) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'G': + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + if n < 1 { + n = 1 + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + csbi.cursorPosition.x = short(n - 1) + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'H', 'f': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + if buf.Len() > 0 { + token := strings.Split(buf.String(), ";") + switch len(token) { + case 1: + n1, err := strconv.Atoi(token[0]) + if err != nil { + continue + } + csbi.cursorPosition.y = short(n1 - 1) + case 2: + n1, err := strconv.Atoi(token[0]) + if err != nil { + continue + } + n2, err := strconv.Atoi(token[1]) + if err != nil { + continue + } + csbi.cursorPosition.x = short(n2 - 1) + csbi.cursorPosition.y = short(n1 - 1) + } + } else { + csbi.cursorPosition.y = 0 + } + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition))) + case 'J': + n := 0 + if buf.Len() > 0 { + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + } + var count, written dword + var cursor coord + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + switch n { + case 0: + cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y} + count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x) + case 1: + cursor = coord{x: csbi.window.left, y: csbi.window.top} + count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.window.top-csbi.cursorPosition.y)*dword(csbi.size.x) + case 2: + cursor = coord{x: csbi.window.left, y: csbi.window.top} + count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x) + } + procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + case 'K': + n := 0 + if buf.Len() > 0 { + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + var cursor coord + var count, written dword + switch n { + case 0: + cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y} + count = dword(csbi.size.x - csbi.cursorPosition.x) + case 1: + cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y} + count = dword(csbi.size.x - csbi.cursorPosition.x) + case 2: + cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y} + count = dword(csbi.size.x) + } + procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + case 'X': + n := 0 + if buf.Len() > 0 { + n, err = strconv.Atoi(buf.String()) + if err != nil { + continue + } + } + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + var cursor coord + var written dword + cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y} + procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(n), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(n), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written))) + case 'm': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + attr := csbi.attributes + cs := buf.String() + if cs == "" { + procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(w.oldattr)) + continue + } + token := strings.Split(cs, ";") + for i := 0; i < len(token); i++ { + ns := token[i] + if n, err = strconv.Atoi(ns); err == nil { + switch { + case n == 0 || n == 100: + attr = w.oldattr + case n == 4: + attr |= commonLvbUnderscore + case (1 <= n && n <= 3) || n == 5: + attr |= foregroundIntensity + case n == 7 || n == 27: + attr = + (attr &^ (foregroundMask | backgroundMask)) | + ((attr & foregroundMask) << 4) | + ((attr & backgroundMask) >> 4) + case n == 22: + attr &^= foregroundIntensity + case n == 24: + attr &^= commonLvbUnderscore + case 30 <= n && n <= 37: + attr &= backgroundMask + if (n-30)&1 != 0 { + attr |= foregroundRed + } + if (n-30)&2 != 0 { + attr |= foregroundGreen + } + if (n-30)&4 != 0 { + attr |= foregroundBlue + } + case n == 38: // set foreground color. + if i < len(token)-2 && (token[i+1] == "5" || token[i+1] == "05") { + if n256, err := strconv.Atoi(token[i+2]); err == nil { + if n256foreAttr == nil { + n256setup() + } + attr &= backgroundMask + attr |= n256foreAttr[n256%len(n256foreAttr)] + i += 2 + } + } else if len(token) == 5 && token[i+1] == "2" { + var r, g, b int + r, _ = strconv.Atoi(token[i+2]) + g, _ = strconv.Atoi(token[i+3]) + b, _ = strconv.Atoi(token[i+4]) + i += 4 + if r > 127 { + attr |= foregroundRed + } + if g > 127 { + attr |= foregroundGreen + } + if b > 127 { + attr |= foregroundBlue + } + } else { + attr = attr & (w.oldattr & backgroundMask) + } + case n == 39: // reset foreground color. + attr &= backgroundMask + attr |= w.oldattr & foregroundMask + case 40 <= n && n <= 47: + attr &= foregroundMask + if (n-40)&1 != 0 { + attr |= backgroundRed + } + if (n-40)&2 != 0 { + attr |= backgroundGreen + } + if (n-40)&4 != 0 { + attr |= backgroundBlue + } + case n == 48: // set background color. + if i < len(token)-2 && token[i+1] == "5" { + if n256, err := strconv.Atoi(token[i+2]); err == nil { + if n256backAttr == nil { + n256setup() + } + attr &= foregroundMask + attr |= n256backAttr[n256%len(n256backAttr)] + i += 2 + } + } else if len(token) == 5 && token[i+1] == "2" { + var r, g, b int + r, _ = strconv.Atoi(token[i+2]) + g, _ = strconv.Atoi(token[i+3]) + b, _ = strconv.Atoi(token[i+4]) + i += 4 + if r > 127 { + attr |= backgroundRed + } + if g > 127 { + attr |= backgroundGreen + } + if b > 127 { + attr |= backgroundBlue + } + } else { + attr = attr & (w.oldattr & foregroundMask) + } + case n == 49: // reset foreground color. + attr &= foregroundMask + attr |= w.oldattr & backgroundMask + case 90 <= n && n <= 97: + attr = (attr & backgroundMask) + attr |= foregroundIntensity + if (n-90)&1 != 0 { + attr |= foregroundRed + } + if (n-90)&2 != 0 { + attr |= foregroundGreen + } + if (n-90)&4 != 0 { + attr |= foregroundBlue + } + case 100 <= n && n <= 107: + attr = (attr & foregroundMask) + attr |= backgroundIntensity + if (n-100)&1 != 0 { + attr |= backgroundRed + } + if (n-100)&2 != 0 { + attr |= backgroundGreen + } + if (n-100)&4 != 0 { + attr |= backgroundBlue + } + } + procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(attr)) + } + } + case 'h': + var ci consoleCursorInfo + cs := buf.String() + if cs == "5>" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 0 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?25" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 1 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?1049" { + if w.althandle == 0 { + h, _, _ := procCreateConsoleScreenBuffer.Call(uintptr(genericRead|genericWrite), 0, 0, uintptr(consoleTextmodeBuffer), 0, 0) + w.althandle = syscall.Handle(h) + if w.althandle != 0 { + handle = w.althandle + } + } + } + case 'l': + var ci consoleCursorInfo + cs := buf.String() + if cs == "5>" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 1 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?25" { + procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + ci.visible = 0 + procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci))) + } else if cs == "?1049" { + if w.althandle != 0 { + syscall.CloseHandle(w.althandle) + w.althandle = 0 + handle = w.handle + } + } + case 's': + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + w.oldpos = csbi.cursorPosition + case 'u': + procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos))) + } + } + + return len(data), nil +} + +type consoleColor struct { + rgb int + red bool + green bool + blue bool + intensity bool +} + +func (c consoleColor) foregroundAttr() (attr word) { + if c.red { + attr |= foregroundRed + } + if c.green { + attr |= foregroundGreen + } + if c.blue { + attr |= foregroundBlue + } + if c.intensity { + attr |= foregroundIntensity + } + return +} + +func (c consoleColor) backgroundAttr() (attr word) { + if c.red { + attr |= backgroundRed + } + if c.green { + attr |= backgroundGreen + } + if c.blue { + attr |= backgroundBlue + } + if c.intensity { + attr |= backgroundIntensity + } + return +} + +var color16 = []consoleColor{ + {0x000000, false, false, false, false}, + {0x000080, false, false, true, false}, + {0x008000, false, true, false, false}, + {0x008080, false, true, true, false}, + {0x800000, true, false, false, false}, + {0x800080, true, false, true, false}, + {0x808000, true, true, false, false}, + {0xc0c0c0, true, true, true, false}, + {0x808080, false, false, false, true}, + {0x0000ff, false, false, true, true}, + {0x00ff00, false, true, false, true}, + {0x00ffff, false, true, true, true}, + {0xff0000, true, false, false, true}, + {0xff00ff, true, false, true, true}, + {0xffff00, true, true, false, true}, + {0xffffff, true, true, true, true}, +} + +type hsv struct { + h, s, v float32 +} + +func (a hsv) dist(b hsv) float32 { + dh := a.h - b.h + switch { + case dh > 0.5: + dh = 1 - dh + case dh < -0.5: + dh = -1 - dh + } + ds := a.s - b.s + dv := a.v - b.v + return float32(math.Sqrt(float64(dh*dh + ds*ds + dv*dv))) +} + +func toHSV(rgb int) hsv { + r, g, b := float32((rgb&0xFF0000)>>16)/256.0, + float32((rgb&0x00FF00)>>8)/256.0, + float32(rgb&0x0000FF)/256.0 + min, max := minmax3f(r, g, b) + h := max - min + if h > 0 { + if max == r { + h = (g - b) / h + if h < 0 { + h += 6 + } + } else if max == g { + h = 2 + (b-r)/h + } else { + h = 4 + (r-g)/h + } + } + h /= 6.0 + s := max - min + if max != 0 { + s /= max + } + v := max + return hsv{h: h, s: s, v: v} +} + +type hsvTable []hsv + +func toHSVTable(rgbTable []consoleColor) hsvTable { + t := make(hsvTable, len(rgbTable)) + for i, c := range rgbTable { + t[i] = toHSV(c.rgb) + } + return t +} + +func (t hsvTable) find(rgb int) consoleColor { + hsv := toHSV(rgb) + n := 7 + l := float32(5.0) + for i, p := range t { + d := hsv.dist(p) + if d < l { + l, n = d, i + } + } + return color16[n] +} + +func minmax3f(a, b, c float32) (min, max float32) { + if a < b { + if b < c { + return a, c + } else if a < c { + return a, b + } else { + return c, b + } + } else { + if a < c { + return b, c + } else if b < c { + return b, a + } else { + return c, a + } + } +} + +var n256foreAttr []word +var n256backAttr []word + +func n256setup() { + n256foreAttr = make([]word, 256) + n256backAttr = make([]word, 256) + t := toHSVTable(color16) + for i, rgb := range color256 { + c := t.find(rgb) + n256foreAttr[i] = c.foregroundAttr() + n256backAttr[i] = c.backgroundAttr() + } +} + +// EnableColorsStdout enable colors if possible. +func EnableColorsStdout(enabled *bool) func() { + var mode uint32 + h := os.Stdout.Fd() + if r, _, _ := procGetConsoleMode.Call(h, uintptr(unsafe.Pointer(&mode))); r != 0 { + if r, _, _ = procSetConsoleMode.Call(h, uintptr(mode|cENABLE_VIRTUAL_TERMINAL_PROCESSING)); r != 0 { + if enabled != nil { + *enabled = true + } + return func() { + procSetConsoleMode.Call(h, uintptr(mode)) + } + } + } + if enabled != nil { + *enabled = true + } + return func() {} +} diff --git a/vendor/github.com/mattn/go-colorable/go.mod b/vendor/github.com/mattn/go-colorable/go.mod new file mode 100644 index 000000000..1e590b819 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/go.mod @@ -0,0 +1,8 @@ +module github.com/mattn/go-colorable + +require ( + github.com/mattn/go-isatty v0.0.12 + golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae // indirect +) + +go 1.13 diff --git a/vendor/github.com/mattn/go-colorable/go.sum b/vendor/github.com/mattn/go-colorable/go.sum new file mode 100644 index 000000000..cf5b95d97 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/go.sum @@ -0,0 +1,5 @@ +github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae h1:/WDfKMnPU+m5M4xB+6x4kaepxRw6jWvR5iDRdvjHgy8= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= diff --git a/vendor/github.com/mattn/go-colorable/go.test.sh b/vendor/github.com/mattn/go-colorable/go.test.sh new file mode 100644 index 000000000..012162b07 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/go.test.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e +echo "" > coverage.txt + +for d in $(go list ./... | grep -v vendor); do + go test -race -coverprofile=profile.out -covermode=atomic "$d" + if [ -f profile.out ]; then + cat profile.out >> coverage.txt + rm profile.out + fi +done diff --git a/vendor/github.com/mattn/go-colorable/noncolorable.go b/vendor/github.com/mattn/go-colorable/noncolorable.go new file mode 100644 index 000000000..95f2c6be2 --- /dev/null +++ b/vendor/github.com/mattn/go-colorable/noncolorable.go @@ -0,0 +1,55 @@ +package colorable + +import ( + "bytes" + "io" +) + +// NonColorable holds writer but removes escape sequence. +type NonColorable struct { + out io.Writer +} + +// NewNonColorable returns new instance of Writer which removes escape sequence from Writer. +func NewNonColorable(w io.Writer) io.Writer { + return &NonColorable{out: w} +} + +// Write writes data on console +func (w *NonColorable) Write(data []byte) (n int, err error) { + er := bytes.NewReader(data) + var bw [1]byte +loop: + for { + c1, err := er.ReadByte() + if err != nil { + break loop + } + if c1 != 0x1b { + bw[0] = c1 + w.out.Write(bw[:]) + continue + } + c2, err := er.ReadByte() + if err != nil { + break loop + } + if c2 != 0x5b { + continue + } + + var buf bytes.Buffer + for { + c, err := er.ReadByte() + if err != nil { + break loop + } + if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { + break + } + buf.Write([]byte(string(c))) + } + } + + return len(data), nil +} diff --git a/vendor/github.com/mattn/go-runewidth/.travis.yml b/vendor/github.com/mattn/go-runewidth/.travis.yml new file mode 100644 index 000000000..6a21813a3 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/.travis.yml @@ -0,0 +1,16 @@ +language: go +sudo: false +go: + - 1.13.x + - tip + +before_install: + - go get -t -v ./... + +script: + - go generate + - git diff --cached --exit-code + - ./go.test.sh + +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/mattn/go-runewidth/LICENSE b/vendor/github.com/mattn/go-runewidth/LICENSE new file mode 100644 index 000000000..91b5cef30 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Yasuhiro Matsumoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mattn/go-runewidth/README.md b/vendor/github.com/mattn/go-runewidth/README.md new file mode 100644 index 000000000..aa56ab96c --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/README.md @@ -0,0 +1,27 @@ +go-runewidth +============ + +[![Build Status](https://travis-ci.org/mattn/go-runewidth.png?branch=master)](https://travis-ci.org/mattn/go-runewidth) +[![Codecov](https://codecov.io/gh/mattn/go-runewidth/branch/master/graph/badge.svg)](https://codecov.io/gh/mattn/go-runewidth) +[![GoDoc](https://godoc.org/github.com/mattn/go-runewidth?status.svg)](http://godoc.org/github.com/mattn/go-runewidth) +[![Go Report Card](https://goreportcard.com/badge/github.com/mattn/go-runewidth)](https://goreportcard.com/report/github.com/mattn/go-runewidth) + +Provides functions to get fixed width of the character or string. + +Usage +----- + +```go +runewidth.StringWidth("つのだ☆HIRO") == 12 +``` + + +Author +------ + +Yasuhiro Matsumoto + +License +------- + +under the MIT License: http://mattn.mit-license.org/2013 diff --git a/vendor/github.com/mattn/go-runewidth/go.mod b/vendor/github.com/mattn/go-runewidth/go.mod new file mode 100644 index 000000000..fa7f4d864 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/go.mod @@ -0,0 +1,3 @@ +module github.com/mattn/go-runewidth + +go 1.9 diff --git a/vendor/github.com/mattn/go-runewidth/go.test.sh b/vendor/github.com/mattn/go-runewidth/go.test.sh new file mode 100644 index 000000000..012162b07 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/go.test.sh @@ -0,0 +1,12 @@ +#!/usr/bin/env bash + +set -e +echo "" > coverage.txt + +for d in $(go list ./... | grep -v vendor); do + go test -race -coverprofile=profile.out -covermode=atomic "$d" + if [ -f profile.out ]; then + cat profile.out >> coverage.txt + rm profile.out + fi +done diff --git a/vendor/github.com/mattn/go-runewidth/runewidth.go b/vendor/github.com/mattn/go-runewidth/runewidth.go new file mode 100644 index 000000000..19f8e0449 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth.go @@ -0,0 +1,257 @@ +package runewidth + +import ( + "os" +) + +//go:generate go run script/generate.go + +var ( + // EastAsianWidth will be set true if the current locale is CJK + EastAsianWidth bool + + // ZeroWidthJoiner is flag to set to use UTR#51 ZWJ + ZeroWidthJoiner bool + + // DefaultCondition is a condition in current locale + DefaultCondition = &Condition{} +) + +func init() { + handleEnv() +} + +func handleEnv() { + env := os.Getenv("RUNEWIDTH_EASTASIAN") + if env == "" { + EastAsianWidth = IsEastAsian() + } else { + EastAsianWidth = env == "1" + } + // update DefaultCondition + DefaultCondition.EastAsianWidth = EastAsianWidth + DefaultCondition.ZeroWidthJoiner = ZeroWidthJoiner +} + +type interval struct { + first rune + last rune +} + +type table []interval + +func inTables(r rune, ts ...table) bool { + for _, t := range ts { + if inTable(r, t) { + return true + } + } + return false +} + +func inTable(r rune, t table) bool { + if r < t[0].first { + return false + } + + bot := 0 + top := len(t) - 1 + for top >= bot { + mid := (bot + top) >> 1 + + switch { + case t[mid].last < r: + bot = mid + 1 + case t[mid].first > r: + top = mid - 1 + default: + return true + } + } + + return false +} + +var private = table{ + {0x00E000, 0x00F8FF}, {0x0F0000, 0x0FFFFD}, {0x100000, 0x10FFFD}, +} + +var nonprint = table{ + {0x0000, 0x001F}, {0x007F, 0x009F}, {0x00AD, 0x00AD}, + {0x070F, 0x070F}, {0x180B, 0x180E}, {0x200B, 0x200F}, + {0x2028, 0x202E}, {0x206A, 0x206F}, {0xD800, 0xDFFF}, + {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFB}, {0xFFFE, 0xFFFF}, +} + +// Condition have flag EastAsianWidth whether the current locale is CJK or not. +type Condition struct { + EastAsianWidth bool + ZeroWidthJoiner bool +} + +// NewCondition return new instance of Condition which is current locale. +func NewCondition() *Condition { + return &Condition{ + EastAsianWidth: EastAsianWidth, + ZeroWidthJoiner: ZeroWidthJoiner, + } +} + +// RuneWidth returns the number of cells in r. +// See http://www.unicode.org/reports/tr11/ +func (c *Condition) RuneWidth(r rune) int { + switch { + case r < 0 || r > 0x10FFFF || inTables(r, nonprint, combining, notassigned): + return 0 + case (c.EastAsianWidth && IsAmbiguousWidth(r)) || inTables(r, doublewidth): + return 2 + default: + return 1 + } +} + +func (c *Condition) stringWidth(s string) (width int) { + for _, r := range []rune(s) { + width += c.RuneWidth(r) + } + return width +} + +func (c *Condition) stringWidthZeroJoiner(s string) (width int) { + r1, r2 := rune(0), rune(0) + for _, r := range []rune(s) { + if r == 0xFE0E || r == 0xFE0F { + continue + } + w := c.RuneWidth(r) + if r2 == 0x200D && inTables(r, emoji) && inTables(r1, emoji) { + if width < w { + width = w + } + } else { + width += w + } + r1, r2 = r2, r + } + return width +} + +// StringWidth return width as you can see +func (c *Condition) StringWidth(s string) (width int) { + if c.ZeroWidthJoiner { + return c.stringWidthZeroJoiner(s) + } + return c.stringWidth(s) +} + +// Truncate return string truncated with w cells +func (c *Condition) Truncate(s string, w int, tail string) string { + if c.StringWidth(s) <= w { + return s + } + r := []rune(s) + tw := c.StringWidth(tail) + w -= tw + width := 0 + i := 0 + for ; i < len(r); i++ { + cw := c.RuneWidth(r[i]) + if width+cw > w { + break + } + width += cw + } + return string(r[0:i]) + tail +} + +// Wrap return string wrapped with w cells +func (c *Condition) Wrap(s string, w int) string { + width := 0 + out := "" + for _, r := range []rune(s) { + cw := RuneWidth(r) + if r == '\n' { + out += string(r) + width = 0 + continue + } else if width+cw > w { + out += "\n" + width = 0 + out += string(r) + width += cw + continue + } + out += string(r) + width += cw + } + return out +} + +// FillLeft return string filled in left by spaces in w cells +func (c *Condition) FillLeft(s string, w int) string { + width := c.StringWidth(s) + count := w - width + if count > 0 { + b := make([]byte, count) + for i := range b { + b[i] = ' ' + } + return string(b) + s + } + return s +} + +// FillRight return string filled in left by spaces in w cells +func (c *Condition) FillRight(s string, w int) string { + width := c.StringWidth(s) + count := w - width + if count > 0 { + b := make([]byte, count) + for i := range b { + b[i] = ' ' + } + return s + string(b) + } + return s +} + +// RuneWidth returns the number of cells in r. +// See http://www.unicode.org/reports/tr11/ +func RuneWidth(r rune) int { + return DefaultCondition.RuneWidth(r) +} + +// IsAmbiguousWidth returns whether is ambiguous width or not. +func IsAmbiguousWidth(r rune) bool { + return inTables(r, private, ambiguous) +} + +// IsNeutralWidth returns whether is neutral width or not. +func IsNeutralWidth(r rune) bool { + return inTable(r, neutral) +} + +// StringWidth return width as you can see +func StringWidth(s string) (width int) { + return DefaultCondition.StringWidth(s) +} + +// Truncate return string truncated with w cells +func Truncate(s string, w int, tail string) string { + return DefaultCondition.Truncate(s, w, tail) +} + +// Wrap return string wrapped with w cells +func Wrap(s string, w int) string { + return DefaultCondition.Wrap(s, w) +} + +// FillLeft return string filled in left by spaces in w cells +func FillLeft(s string, w int) string { + return DefaultCondition.FillLeft(s, w) +} + +// FillRight return string filled in left by spaces in w cells +func FillRight(s string, w int) string { + return DefaultCondition.FillRight(s, w) +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go new file mode 100644 index 000000000..7d99f6e52 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go @@ -0,0 +1,8 @@ +// +build appengine + +package runewidth + +// IsEastAsian return true if the current locale is CJK +func IsEastAsian() bool { + return false +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_js.go b/vendor/github.com/mattn/go-runewidth/runewidth_js.go new file mode 100644 index 000000000..c5fdf40ba --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_js.go @@ -0,0 +1,9 @@ +// +build js +// +build !appengine + +package runewidth + +func IsEastAsian() bool { + // TODO: Implement this for the web. Detect east asian in a compatible way, and return true. + return false +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_posix.go b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go new file mode 100644 index 000000000..480ad7485 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go @@ -0,0 +1,82 @@ +// +build !windows +// +build !js +// +build !appengine + +package runewidth + +import ( + "os" + "regexp" + "strings" +) + +var reLoc = regexp.MustCompile(`^[a-z][a-z][a-z]?(?:_[A-Z][A-Z])?\.(.+)`) + +var mblenTable = map[string]int{ + "utf-8": 6, + "utf8": 6, + "jis": 8, + "eucjp": 3, + "euckr": 2, + "euccn": 2, + "sjis": 2, + "cp932": 2, + "cp51932": 2, + "cp936": 2, + "cp949": 2, + "cp950": 2, + "big5": 2, + "gbk": 2, + "gb2312": 2, +} + +func isEastAsian(locale string) bool { + charset := strings.ToLower(locale) + r := reLoc.FindStringSubmatch(locale) + if len(r) == 2 { + charset = strings.ToLower(r[1]) + } + + if strings.HasSuffix(charset, "@cjk_narrow") { + return false + } + + for pos, b := range []byte(charset) { + if b == '@' { + charset = charset[:pos] + break + } + } + max := 1 + if m, ok := mblenTable[charset]; ok { + max = m + } + if max > 1 && (charset[0] != 'u' || + strings.HasPrefix(locale, "ja") || + strings.HasPrefix(locale, "ko") || + strings.HasPrefix(locale, "zh")) { + return true + } + return false +} + +// IsEastAsian return true if the current locale is CJK +func IsEastAsian() bool { + locale := os.Getenv("LC_ALL") + if locale == "" { + locale = os.Getenv("LC_CTYPE") + } + if locale == "" { + locale = os.Getenv("LANG") + } + + // ignore C locale + if locale == "POSIX" || locale == "C" { + return false + } + if len(locale) > 1 && locale[0] == 'C' && (locale[1] == '.' || locale[1] == '-') { + return false + } + + return isEastAsian(locale) +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_table.go b/vendor/github.com/mattn/go-runewidth/runewidth_table.go new file mode 100644 index 000000000..b27d77d89 --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_table.go @@ -0,0 +1,437 @@ +// Code generated by script/generate.go. DO NOT EDIT. + +package runewidth + +var combining = table{ + {0x0300, 0x036F}, {0x0483, 0x0489}, {0x07EB, 0x07F3}, + {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0D00, 0x0D01}, + {0x135D, 0x135F}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1AC0}, + {0x1B6B, 0x1B73}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF}, + {0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2DE0, 0x2DFF}, + {0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D}, + {0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA8E0, 0xA8F1}, + {0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x10376, 0x1037A}, + {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x11300, 0x11301}, + {0x1133B, 0x1133C}, {0x11366, 0x1136C}, {0x11370, 0x11374}, + {0x16AF0, 0x16AF4}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172}, + {0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD}, + {0x1D242, 0x1D244}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, + {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, + {0x1E8D0, 0x1E8D6}, +} + +var doublewidth = table{ + {0x1100, 0x115F}, {0x231A, 0x231B}, {0x2329, 0x232A}, + {0x23E9, 0x23EC}, {0x23F0, 0x23F0}, {0x23F3, 0x23F3}, + {0x25FD, 0x25FE}, {0x2614, 0x2615}, {0x2648, 0x2653}, + {0x267F, 0x267F}, {0x2693, 0x2693}, {0x26A1, 0x26A1}, + {0x26AA, 0x26AB}, {0x26BD, 0x26BE}, {0x26C4, 0x26C5}, + {0x26CE, 0x26CE}, {0x26D4, 0x26D4}, {0x26EA, 0x26EA}, + {0x26F2, 0x26F3}, {0x26F5, 0x26F5}, {0x26FA, 0x26FA}, + {0x26FD, 0x26FD}, {0x2705, 0x2705}, {0x270A, 0x270B}, + {0x2728, 0x2728}, {0x274C, 0x274C}, {0x274E, 0x274E}, + {0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797}, + {0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C}, + {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99}, + {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB}, + {0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF}, + {0x3105, 0x312F}, {0x3131, 0x318E}, {0x3190, 0x31E3}, + {0x31F0, 0x321E}, {0x3220, 0x3247}, {0x3250, 0x4DBF}, + {0x4E00, 0xA48C}, {0xA490, 0xA4C6}, {0xA960, 0xA97C}, + {0xAC00, 0xD7A3}, {0xF900, 0xFAFF}, {0xFE10, 0xFE19}, + {0xFE30, 0xFE52}, {0xFE54, 0xFE66}, {0xFE68, 0xFE6B}, + {0xFF01, 0xFF60}, {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4}, + {0x16FF0, 0x16FF1}, {0x17000, 0x187F7}, {0x18800, 0x18CD5}, + {0x18D00, 0x18D08}, {0x1B000, 0x1B11E}, {0x1B150, 0x1B152}, + {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, {0x1F004, 0x1F004}, + {0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A}, + {0x1F200, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248}, + {0x1F250, 0x1F251}, {0x1F260, 0x1F265}, {0x1F300, 0x1F320}, + {0x1F32D, 0x1F335}, {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393}, + {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0}, + {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440}, + {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E}, + {0x1F550, 0x1F567}, {0x1F57A, 0x1F57A}, {0x1F595, 0x1F596}, + {0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5}, + {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2}, {0x1F6D5, 0x1F6D7}, + {0x1F6EB, 0x1F6EC}, {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB}, + {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F978}, + {0x1F97A, 0x1F9CB}, {0x1F9CD, 0x1F9FF}, {0x1FA70, 0x1FA74}, + {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA86}, {0x1FA90, 0x1FAA8}, + {0x1FAB0, 0x1FAB6}, {0x1FAC0, 0x1FAC2}, {0x1FAD0, 0x1FAD6}, + {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD}, +} + +var ambiguous = table{ + {0x00A1, 0x00A1}, {0x00A4, 0x00A4}, {0x00A7, 0x00A8}, + {0x00AA, 0x00AA}, {0x00AD, 0x00AE}, {0x00B0, 0x00B4}, + {0x00B6, 0x00BA}, {0x00BC, 0x00BF}, {0x00C6, 0x00C6}, + {0x00D0, 0x00D0}, {0x00D7, 0x00D8}, {0x00DE, 0x00E1}, + {0x00E6, 0x00E6}, {0x00E8, 0x00EA}, {0x00EC, 0x00ED}, + {0x00F0, 0x00F0}, {0x00F2, 0x00F3}, {0x00F7, 0x00FA}, + {0x00FC, 0x00FC}, {0x00FE, 0x00FE}, {0x0101, 0x0101}, + {0x0111, 0x0111}, {0x0113, 0x0113}, {0x011B, 0x011B}, + {0x0126, 0x0127}, {0x012B, 0x012B}, {0x0131, 0x0133}, + {0x0138, 0x0138}, {0x013F, 0x0142}, {0x0144, 0x0144}, + {0x0148, 0x014B}, {0x014D, 0x014D}, {0x0152, 0x0153}, + {0x0166, 0x0167}, {0x016B, 0x016B}, {0x01CE, 0x01CE}, + {0x01D0, 0x01D0}, {0x01D2, 0x01D2}, {0x01D4, 0x01D4}, + {0x01D6, 0x01D6}, {0x01D8, 0x01D8}, {0x01DA, 0x01DA}, + {0x01DC, 0x01DC}, {0x0251, 0x0251}, {0x0261, 0x0261}, + {0x02C4, 0x02C4}, {0x02C7, 0x02C7}, {0x02C9, 0x02CB}, + {0x02CD, 0x02CD}, {0x02D0, 0x02D0}, {0x02D8, 0x02DB}, + {0x02DD, 0x02DD}, {0x02DF, 0x02DF}, {0x0300, 0x036F}, + {0x0391, 0x03A1}, {0x03A3, 0x03A9}, {0x03B1, 0x03C1}, + {0x03C3, 0x03C9}, {0x0401, 0x0401}, {0x0410, 0x044F}, + {0x0451, 0x0451}, {0x2010, 0x2010}, {0x2013, 0x2016}, + {0x2018, 0x2019}, {0x201C, 0x201D}, {0x2020, 0x2022}, + {0x2024, 0x2027}, {0x2030, 0x2030}, {0x2032, 0x2033}, + {0x2035, 0x2035}, {0x203B, 0x203B}, {0x203E, 0x203E}, + {0x2074, 0x2074}, {0x207F, 0x207F}, {0x2081, 0x2084}, + {0x20AC, 0x20AC}, {0x2103, 0x2103}, {0x2105, 0x2105}, + {0x2109, 0x2109}, {0x2113, 0x2113}, {0x2116, 0x2116}, + {0x2121, 0x2122}, {0x2126, 0x2126}, {0x212B, 0x212B}, + {0x2153, 0x2154}, {0x215B, 0x215E}, {0x2160, 0x216B}, + {0x2170, 0x2179}, {0x2189, 0x2189}, {0x2190, 0x2199}, + {0x21B8, 0x21B9}, {0x21D2, 0x21D2}, {0x21D4, 0x21D4}, + {0x21E7, 0x21E7}, {0x2200, 0x2200}, {0x2202, 0x2203}, + {0x2207, 0x2208}, {0x220B, 0x220B}, {0x220F, 0x220F}, + {0x2211, 0x2211}, {0x2215, 0x2215}, {0x221A, 0x221A}, + {0x221D, 0x2220}, {0x2223, 0x2223}, {0x2225, 0x2225}, + {0x2227, 0x222C}, {0x222E, 0x222E}, {0x2234, 0x2237}, + {0x223C, 0x223D}, {0x2248, 0x2248}, {0x224C, 0x224C}, + {0x2252, 0x2252}, {0x2260, 0x2261}, {0x2264, 0x2267}, + {0x226A, 0x226B}, {0x226E, 0x226F}, {0x2282, 0x2283}, + {0x2286, 0x2287}, {0x2295, 0x2295}, {0x2299, 0x2299}, + {0x22A5, 0x22A5}, {0x22BF, 0x22BF}, {0x2312, 0x2312}, + {0x2460, 0x24E9}, {0x24EB, 0x254B}, {0x2550, 0x2573}, + {0x2580, 0x258F}, {0x2592, 0x2595}, {0x25A0, 0x25A1}, + {0x25A3, 0x25A9}, {0x25B2, 0x25B3}, {0x25B6, 0x25B7}, + {0x25BC, 0x25BD}, {0x25C0, 0x25C1}, {0x25C6, 0x25C8}, + {0x25CB, 0x25CB}, {0x25CE, 0x25D1}, {0x25E2, 0x25E5}, + {0x25EF, 0x25EF}, {0x2605, 0x2606}, {0x2609, 0x2609}, + {0x260E, 0x260F}, {0x261C, 0x261C}, {0x261E, 0x261E}, + {0x2640, 0x2640}, {0x2642, 0x2642}, {0x2660, 0x2661}, + {0x2663, 0x2665}, {0x2667, 0x266A}, {0x266C, 0x266D}, + {0x266F, 0x266F}, {0x269E, 0x269F}, {0x26BF, 0x26BF}, + {0x26C6, 0x26CD}, {0x26CF, 0x26D3}, {0x26D5, 0x26E1}, + {0x26E3, 0x26E3}, {0x26E8, 0x26E9}, {0x26EB, 0x26F1}, + {0x26F4, 0x26F4}, {0x26F6, 0x26F9}, {0x26FB, 0x26FC}, + {0x26FE, 0x26FF}, {0x273D, 0x273D}, {0x2776, 0x277F}, + {0x2B56, 0x2B59}, {0x3248, 0x324F}, {0xE000, 0xF8FF}, + {0xFE00, 0xFE0F}, {0xFFFD, 0xFFFD}, {0x1F100, 0x1F10A}, + {0x1F110, 0x1F12D}, {0x1F130, 0x1F169}, {0x1F170, 0x1F18D}, + {0x1F18F, 0x1F190}, {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF}, + {0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD}, +} +var notassigned = table{ + {0x27E6, 0x27ED}, {0x2985, 0x2986}, +} + +var neutral = table{ + {0x0000, 0x001F}, {0x007F, 0x00A0}, {0x00A9, 0x00A9}, + {0x00AB, 0x00AB}, {0x00B5, 0x00B5}, {0x00BB, 0x00BB}, + {0x00C0, 0x00C5}, {0x00C7, 0x00CF}, {0x00D1, 0x00D6}, + {0x00D9, 0x00DD}, {0x00E2, 0x00E5}, {0x00E7, 0x00E7}, + {0x00EB, 0x00EB}, {0x00EE, 0x00EF}, {0x00F1, 0x00F1}, + {0x00F4, 0x00F6}, {0x00FB, 0x00FB}, {0x00FD, 0x00FD}, + {0x00FF, 0x0100}, {0x0102, 0x0110}, {0x0112, 0x0112}, + {0x0114, 0x011A}, {0x011C, 0x0125}, {0x0128, 0x012A}, + {0x012C, 0x0130}, {0x0134, 0x0137}, {0x0139, 0x013E}, + {0x0143, 0x0143}, {0x0145, 0x0147}, {0x014C, 0x014C}, + {0x014E, 0x0151}, {0x0154, 0x0165}, {0x0168, 0x016A}, + {0x016C, 0x01CD}, {0x01CF, 0x01CF}, {0x01D1, 0x01D1}, + {0x01D3, 0x01D3}, {0x01D5, 0x01D5}, {0x01D7, 0x01D7}, + {0x01D9, 0x01D9}, {0x01DB, 0x01DB}, {0x01DD, 0x0250}, + {0x0252, 0x0260}, {0x0262, 0x02C3}, {0x02C5, 0x02C6}, + {0x02C8, 0x02C8}, {0x02CC, 0x02CC}, {0x02CE, 0x02CF}, + {0x02D1, 0x02D7}, {0x02DC, 0x02DC}, {0x02DE, 0x02DE}, + {0x02E0, 0x02FF}, {0x0370, 0x0377}, {0x037A, 0x037F}, + {0x0384, 0x038A}, {0x038C, 0x038C}, {0x038E, 0x0390}, + {0x03AA, 0x03B0}, {0x03C2, 0x03C2}, {0x03CA, 0x0400}, + {0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F}, + {0x0531, 0x0556}, {0x0559, 0x058A}, {0x058D, 0x058F}, + {0x0591, 0x05C7}, {0x05D0, 0x05EA}, {0x05EF, 0x05F4}, + {0x0600, 0x061C}, {0x061E, 0x070D}, {0x070F, 0x074A}, + {0x074D, 0x07B1}, {0x07C0, 0x07FA}, {0x07FD, 0x082D}, + {0x0830, 0x083E}, {0x0840, 0x085B}, {0x085E, 0x085E}, + {0x0860, 0x086A}, {0x08A0, 0x08B4}, {0x08B6, 0x08C7}, + {0x08D3, 0x0983}, {0x0985, 0x098C}, {0x098F, 0x0990}, + {0x0993, 0x09A8}, {0x09AA, 0x09B0}, {0x09B2, 0x09B2}, + {0x09B6, 0x09B9}, {0x09BC, 0x09C4}, {0x09C7, 0x09C8}, + {0x09CB, 0x09CE}, {0x09D7, 0x09D7}, {0x09DC, 0x09DD}, + {0x09DF, 0x09E3}, {0x09E6, 0x09FE}, {0x0A01, 0x0A03}, + {0x0A05, 0x0A0A}, {0x0A0F, 0x0A10}, {0x0A13, 0x0A28}, + {0x0A2A, 0x0A30}, {0x0A32, 0x0A33}, {0x0A35, 0x0A36}, + {0x0A38, 0x0A39}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42}, + {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51}, + {0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76}, + {0x0A81, 0x0A83}, {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91}, + {0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3}, + {0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9}, + {0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3}, + {0x0AE6, 0x0AF1}, {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03}, + {0x0B05, 0x0B0C}, {0x0B0F, 0x0B10}, {0x0B13, 0x0B28}, + {0x0B2A, 0x0B30}, {0x0B32, 0x0B33}, {0x0B35, 0x0B39}, + {0x0B3C, 0x0B44}, {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D}, + {0x0B55, 0x0B57}, {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63}, + {0x0B66, 0x0B77}, {0x0B82, 0x0B83}, {0x0B85, 0x0B8A}, + {0x0B8E, 0x0B90}, {0x0B92, 0x0B95}, {0x0B99, 0x0B9A}, + {0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4}, + {0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2}, + {0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0}, + {0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C}, + {0x0C0E, 0x0C10}, {0x0C12, 0x0C28}, {0x0C2A, 0x0C39}, + {0x0C3D, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D}, + {0x0C55, 0x0C56}, {0x0C58, 0x0C5A}, {0x0C60, 0x0C63}, + {0x0C66, 0x0C6F}, {0x0C77, 0x0C8C}, {0x0C8E, 0x0C90}, + {0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9}, + {0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD}, + {0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3}, + {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D00, 0x0D0C}, + {0x0D0E, 0x0D10}, {0x0D12, 0x0D44}, {0x0D46, 0x0D48}, + {0x0D4A, 0x0D4F}, {0x0D54, 0x0D63}, {0x0D66, 0x0D7F}, + {0x0D81, 0x0D83}, {0x0D85, 0x0D96}, {0x0D9A, 0x0DB1}, + {0x0DB3, 0x0DBB}, {0x0DBD, 0x0DBD}, {0x0DC0, 0x0DC6}, + {0x0DCA, 0x0DCA}, {0x0DCF, 0x0DD4}, {0x0DD6, 0x0DD6}, + {0x0DD8, 0x0DDF}, {0x0DE6, 0x0DEF}, {0x0DF2, 0x0DF4}, + {0x0E01, 0x0E3A}, {0x0E3F, 0x0E5B}, {0x0E81, 0x0E82}, + {0x0E84, 0x0E84}, {0x0E86, 0x0E8A}, {0x0E8C, 0x0EA3}, + {0x0EA5, 0x0EA5}, {0x0EA7, 0x0EBD}, {0x0EC0, 0x0EC4}, + {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD}, {0x0ED0, 0x0ED9}, + {0x0EDC, 0x0EDF}, {0x0F00, 0x0F47}, {0x0F49, 0x0F6C}, + {0x0F71, 0x0F97}, {0x0F99, 0x0FBC}, {0x0FBE, 0x0FCC}, + {0x0FCE, 0x0FDA}, {0x1000, 0x10C5}, {0x10C7, 0x10C7}, + {0x10CD, 0x10CD}, {0x10D0, 0x10FF}, {0x1160, 0x1248}, + {0x124A, 0x124D}, {0x1250, 0x1256}, {0x1258, 0x1258}, + {0x125A, 0x125D}, {0x1260, 0x1288}, {0x128A, 0x128D}, + {0x1290, 0x12B0}, {0x12B2, 0x12B5}, {0x12B8, 0x12BE}, + {0x12C0, 0x12C0}, {0x12C2, 0x12C5}, {0x12C8, 0x12D6}, + {0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A}, + {0x135D, 0x137C}, {0x1380, 0x1399}, {0x13A0, 0x13F5}, + {0x13F8, 0x13FD}, {0x1400, 0x169C}, {0x16A0, 0x16F8}, + {0x1700, 0x170C}, {0x170E, 0x1714}, {0x1720, 0x1736}, + {0x1740, 0x1753}, {0x1760, 0x176C}, {0x176E, 0x1770}, + {0x1772, 0x1773}, {0x1780, 0x17DD}, {0x17E0, 0x17E9}, + {0x17F0, 0x17F9}, {0x1800, 0x180E}, {0x1810, 0x1819}, + {0x1820, 0x1878}, {0x1880, 0x18AA}, {0x18B0, 0x18F5}, + {0x1900, 0x191E}, {0x1920, 0x192B}, {0x1930, 0x193B}, + {0x1940, 0x1940}, {0x1944, 0x196D}, {0x1970, 0x1974}, + {0x1980, 0x19AB}, {0x19B0, 0x19C9}, {0x19D0, 0x19DA}, + {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, {0x1A60, 0x1A7C}, + {0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, {0x1AA0, 0x1AAD}, + {0x1AB0, 0x1AC0}, {0x1B00, 0x1B4B}, {0x1B50, 0x1B7C}, + {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, {0x1C3B, 0x1C49}, + {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA}, {0x1CBD, 0x1CC7}, + {0x1CD0, 0x1CFA}, {0x1D00, 0x1DF9}, {0x1DFB, 0x1F15}, + {0x1F18, 0x1F1D}, {0x1F20, 0x1F45}, {0x1F48, 0x1F4D}, + {0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B}, + {0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4}, + {0x1FB6, 0x1FC4}, {0x1FC6, 0x1FD3}, {0x1FD6, 0x1FDB}, + {0x1FDD, 0x1FEF}, {0x1FF2, 0x1FF4}, {0x1FF6, 0x1FFE}, + {0x2000, 0x200F}, {0x2011, 0x2012}, {0x2017, 0x2017}, + {0x201A, 0x201B}, {0x201E, 0x201F}, {0x2023, 0x2023}, + {0x2028, 0x202F}, {0x2031, 0x2031}, {0x2034, 0x2034}, + {0x2036, 0x203A}, {0x203C, 0x203D}, {0x203F, 0x2064}, + {0x2066, 0x2071}, {0x2075, 0x207E}, {0x2080, 0x2080}, + {0x2085, 0x208E}, {0x2090, 0x209C}, {0x20A0, 0x20A8}, + {0x20AA, 0x20AB}, {0x20AD, 0x20BF}, {0x20D0, 0x20F0}, + {0x2100, 0x2102}, {0x2104, 0x2104}, {0x2106, 0x2108}, + {0x210A, 0x2112}, {0x2114, 0x2115}, {0x2117, 0x2120}, + {0x2123, 0x2125}, {0x2127, 0x212A}, {0x212C, 0x2152}, + {0x2155, 0x215A}, {0x215F, 0x215F}, {0x216C, 0x216F}, + {0x217A, 0x2188}, {0x218A, 0x218B}, {0x219A, 0x21B7}, + {0x21BA, 0x21D1}, {0x21D3, 0x21D3}, {0x21D5, 0x21E6}, + {0x21E8, 0x21FF}, {0x2201, 0x2201}, {0x2204, 0x2206}, + {0x2209, 0x220A}, {0x220C, 0x220E}, {0x2210, 0x2210}, + {0x2212, 0x2214}, {0x2216, 0x2219}, {0x221B, 0x221C}, + {0x2221, 0x2222}, {0x2224, 0x2224}, {0x2226, 0x2226}, + {0x222D, 0x222D}, {0x222F, 0x2233}, {0x2238, 0x223B}, + {0x223E, 0x2247}, {0x2249, 0x224B}, {0x224D, 0x2251}, + {0x2253, 0x225F}, {0x2262, 0x2263}, {0x2268, 0x2269}, + {0x226C, 0x226D}, {0x2270, 0x2281}, {0x2284, 0x2285}, + {0x2288, 0x2294}, {0x2296, 0x2298}, {0x229A, 0x22A4}, + {0x22A6, 0x22BE}, {0x22C0, 0x2311}, {0x2313, 0x2319}, + {0x231C, 0x2328}, {0x232B, 0x23E8}, {0x23ED, 0x23EF}, + {0x23F1, 0x23F2}, {0x23F4, 0x2426}, {0x2440, 0x244A}, + {0x24EA, 0x24EA}, {0x254C, 0x254F}, {0x2574, 0x257F}, + {0x2590, 0x2591}, {0x2596, 0x259F}, {0x25A2, 0x25A2}, + {0x25AA, 0x25B1}, {0x25B4, 0x25B5}, {0x25B8, 0x25BB}, + {0x25BE, 0x25BF}, {0x25C2, 0x25C5}, {0x25C9, 0x25CA}, + {0x25CC, 0x25CD}, {0x25D2, 0x25E1}, {0x25E6, 0x25EE}, + {0x25F0, 0x25FC}, {0x25FF, 0x2604}, {0x2607, 0x2608}, + {0x260A, 0x260D}, {0x2610, 0x2613}, {0x2616, 0x261B}, + {0x261D, 0x261D}, {0x261F, 0x263F}, {0x2641, 0x2641}, + {0x2643, 0x2647}, {0x2654, 0x265F}, {0x2662, 0x2662}, + {0x2666, 0x2666}, {0x266B, 0x266B}, {0x266E, 0x266E}, + {0x2670, 0x267E}, {0x2680, 0x2692}, {0x2694, 0x269D}, + {0x26A0, 0x26A0}, {0x26A2, 0x26A9}, {0x26AC, 0x26BC}, + {0x26C0, 0x26C3}, {0x26E2, 0x26E2}, {0x26E4, 0x26E7}, + {0x2700, 0x2704}, {0x2706, 0x2709}, {0x270C, 0x2727}, + {0x2729, 0x273C}, {0x273E, 0x274B}, {0x274D, 0x274D}, + {0x274F, 0x2752}, {0x2756, 0x2756}, {0x2758, 0x2775}, + {0x2780, 0x2794}, {0x2798, 0x27AF}, {0x27B1, 0x27BE}, + {0x27C0, 0x27E5}, {0x27EE, 0x2984}, {0x2987, 0x2B1A}, + {0x2B1D, 0x2B4F}, {0x2B51, 0x2B54}, {0x2B5A, 0x2B73}, + {0x2B76, 0x2B95}, {0x2B97, 0x2C2E}, {0x2C30, 0x2C5E}, + {0x2C60, 0x2CF3}, {0x2CF9, 0x2D25}, {0x2D27, 0x2D27}, + {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, {0x2D6F, 0x2D70}, + {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE}, + {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6}, + {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE}, + {0x2DE0, 0x2E52}, {0x303F, 0x303F}, {0x4DC0, 0x4DFF}, + {0xA4D0, 0xA62B}, {0xA640, 0xA6F7}, {0xA700, 0xA7BF}, + {0xA7C2, 0xA7CA}, {0xA7F5, 0xA82C}, {0xA830, 0xA839}, + {0xA840, 0xA877}, {0xA880, 0xA8C5}, {0xA8CE, 0xA8D9}, + {0xA8E0, 0xA953}, {0xA95F, 0xA95F}, {0xA980, 0xA9CD}, + {0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE}, {0xAA00, 0xAA36}, + {0xAA40, 0xAA4D}, {0xAA50, 0xAA59}, {0xAA5C, 0xAAC2}, + {0xAADB, 0xAAF6}, {0xAB01, 0xAB06}, {0xAB09, 0xAB0E}, + {0xAB11, 0xAB16}, {0xAB20, 0xAB26}, {0xAB28, 0xAB2E}, + {0xAB30, 0xAB6B}, {0xAB70, 0xABED}, {0xABF0, 0xABF9}, + {0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, {0xD800, 0xDFFF}, + {0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB36}, + {0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41}, + {0xFB43, 0xFB44}, {0xFB46, 0xFBC1}, {0xFBD3, 0xFD3F}, + {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFD}, + {0xFE20, 0xFE2F}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC}, + {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC}, {0x10000, 0x1000B}, + {0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D}, + {0x1003F, 0x1004D}, {0x10050, 0x1005D}, {0x10080, 0x100FA}, + {0x10100, 0x10102}, {0x10107, 0x10133}, {0x10137, 0x1018E}, + {0x10190, 0x1019C}, {0x101A0, 0x101A0}, {0x101D0, 0x101FD}, + {0x10280, 0x1029C}, {0x102A0, 0x102D0}, {0x102E0, 0x102FB}, + {0x10300, 0x10323}, {0x1032D, 0x1034A}, {0x10350, 0x1037A}, + {0x10380, 0x1039D}, {0x1039F, 0x103C3}, {0x103C8, 0x103D5}, + {0x10400, 0x1049D}, {0x104A0, 0x104A9}, {0x104B0, 0x104D3}, + {0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563}, + {0x1056F, 0x1056F}, {0x10600, 0x10736}, {0x10740, 0x10755}, + {0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808}, + {0x1080A, 0x10835}, {0x10837, 0x10838}, {0x1083C, 0x1083C}, + {0x1083F, 0x10855}, {0x10857, 0x1089E}, {0x108A7, 0x108AF}, + {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x108FB, 0x1091B}, + {0x1091F, 0x10939}, {0x1093F, 0x1093F}, {0x10980, 0x109B7}, + {0x109BC, 0x109CF}, {0x109D2, 0x10A03}, {0x10A05, 0x10A06}, + {0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A35}, + {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48}, {0x10A50, 0x10A58}, + {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, {0x10AEB, 0x10AF6}, + {0x10B00, 0x10B35}, {0x10B39, 0x10B55}, {0x10B58, 0x10B72}, + {0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, {0x10BA9, 0x10BAF}, + {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2}, + {0x10CFA, 0x10D27}, {0x10D30, 0x10D39}, {0x10E60, 0x10E7E}, + {0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD}, {0x10EB0, 0x10EB1}, + {0x10F00, 0x10F27}, {0x10F30, 0x10F59}, {0x10FB0, 0x10FCB}, + {0x10FE0, 0x10FF6}, {0x11000, 0x1104D}, {0x11052, 0x1106F}, + {0x1107F, 0x110C1}, {0x110CD, 0x110CD}, {0x110D0, 0x110E8}, + {0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11147}, + {0x11150, 0x11176}, {0x11180, 0x111DF}, {0x111E1, 0x111F4}, + {0x11200, 0x11211}, {0x11213, 0x1123E}, {0x11280, 0x11286}, + {0x11288, 0x11288}, {0x1128A, 0x1128D}, {0x1128F, 0x1129D}, + {0x1129F, 0x112A9}, {0x112B0, 0x112EA}, {0x112F0, 0x112F9}, + {0x11300, 0x11303}, {0x11305, 0x1130C}, {0x1130F, 0x11310}, + {0x11313, 0x11328}, {0x1132A, 0x11330}, {0x11332, 0x11333}, + {0x11335, 0x11339}, {0x1133B, 0x11344}, {0x11347, 0x11348}, + {0x1134B, 0x1134D}, {0x11350, 0x11350}, {0x11357, 0x11357}, + {0x1135D, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374}, + {0x11400, 0x1145B}, {0x1145D, 0x11461}, {0x11480, 0x114C7}, + {0x114D0, 0x114D9}, {0x11580, 0x115B5}, {0x115B8, 0x115DD}, + {0x11600, 0x11644}, {0x11650, 0x11659}, {0x11660, 0x1166C}, + {0x11680, 0x116B8}, {0x116C0, 0x116C9}, {0x11700, 0x1171A}, + {0x1171D, 0x1172B}, {0x11730, 0x1173F}, {0x11800, 0x1183B}, + {0x118A0, 0x118F2}, {0x118FF, 0x11906}, {0x11909, 0x11909}, + {0x1190C, 0x11913}, {0x11915, 0x11916}, {0x11918, 0x11935}, + {0x11937, 0x11938}, {0x1193B, 0x11946}, {0x11950, 0x11959}, + {0x119A0, 0x119A7}, {0x119AA, 0x119D7}, {0x119DA, 0x119E4}, + {0x11A00, 0x11A47}, {0x11A50, 0x11AA2}, {0x11AC0, 0x11AF8}, + {0x11C00, 0x11C08}, {0x11C0A, 0x11C36}, {0x11C38, 0x11C45}, + {0x11C50, 0x11C6C}, {0x11C70, 0x11C8F}, {0x11C92, 0x11CA7}, + {0x11CA9, 0x11CB6}, {0x11D00, 0x11D06}, {0x11D08, 0x11D09}, + {0x11D0B, 0x11D36}, {0x11D3A, 0x11D3A}, {0x11D3C, 0x11D3D}, + {0x11D3F, 0x11D47}, {0x11D50, 0x11D59}, {0x11D60, 0x11D65}, + {0x11D67, 0x11D68}, {0x11D6A, 0x11D8E}, {0x11D90, 0x11D91}, + {0x11D93, 0x11D98}, {0x11DA0, 0x11DA9}, {0x11EE0, 0x11EF8}, + {0x11FB0, 0x11FB0}, {0x11FC0, 0x11FF1}, {0x11FFF, 0x12399}, + {0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543}, + {0x13000, 0x1342E}, {0x13430, 0x13438}, {0x14400, 0x14646}, + {0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16A60, 0x16A69}, + {0x16A6E, 0x16A6F}, {0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5}, + {0x16B00, 0x16B45}, {0x16B50, 0x16B59}, {0x16B5B, 0x16B61}, + {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, {0x16E40, 0x16E9A}, + {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, {0x16F8F, 0x16F9F}, + {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88}, + {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, {0x1D000, 0x1D0F5}, + {0x1D100, 0x1D126}, {0x1D129, 0x1D1E8}, {0x1D200, 0x1D245}, + {0x1D2E0, 0x1D2F3}, {0x1D300, 0x1D356}, {0x1D360, 0x1D378}, + {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F}, + {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC}, + {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3}, + {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514}, + {0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E}, + {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550}, + {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B}, + {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006}, + {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, + {0x1E026, 0x1E02A}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D}, + {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E2C0, 0x1E2F9}, + {0x1E2FF, 0x1E2FF}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6}, + {0x1E900, 0x1E94B}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F}, + {0x1EC71, 0x1ECB4}, {0x1ED01, 0x1ED3D}, {0x1EE00, 0x1EE03}, + {0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24}, + {0x1EE27, 0x1EE27}, {0x1EE29, 0x1EE32}, {0x1EE34, 0x1EE37}, + {0x1EE39, 0x1EE39}, {0x1EE3B, 0x1EE3B}, {0x1EE42, 0x1EE42}, + {0x1EE47, 0x1EE47}, {0x1EE49, 0x1EE49}, {0x1EE4B, 0x1EE4B}, + {0x1EE4D, 0x1EE4F}, {0x1EE51, 0x1EE52}, {0x1EE54, 0x1EE54}, + {0x1EE57, 0x1EE57}, {0x1EE59, 0x1EE59}, {0x1EE5B, 0x1EE5B}, + {0x1EE5D, 0x1EE5D}, {0x1EE5F, 0x1EE5F}, {0x1EE61, 0x1EE62}, + {0x1EE64, 0x1EE64}, {0x1EE67, 0x1EE6A}, {0x1EE6C, 0x1EE72}, + {0x1EE74, 0x1EE77}, {0x1EE79, 0x1EE7C}, {0x1EE7E, 0x1EE7E}, + {0x1EE80, 0x1EE89}, {0x1EE8B, 0x1EE9B}, {0x1EEA1, 0x1EEA3}, + {0x1EEA5, 0x1EEA9}, {0x1EEAB, 0x1EEBB}, {0x1EEF0, 0x1EEF1}, + {0x1F000, 0x1F003}, {0x1F005, 0x1F02B}, {0x1F030, 0x1F093}, + {0x1F0A0, 0x1F0AE}, {0x1F0B1, 0x1F0BF}, {0x1F0C1, 0x1F0CE}, + {0x1F0D1, 0x1F0F5}, {0x1F10B, 0x1F10F}, {0x1F12E, 0x1F12F}, + {0x1F16A, 0x1F16F}, {0x1F1AD, 0x1F1AD}, {0x1F1E6, 0x1F1FF}, + {0x1F321, 0x1F32C}, {0x1F336, 0x1F336}, {0x1F37D, 0x1F37D}, + {0x1F394, 0x1F39F}, {0x1F3CB, 0x1F3CE}, {0x1F3D4, 0x1F3DF}, + {0x1F3F1, 0x1F3F3}, {0x1F3F5, 0x1F3F7}, {0x1F43F, 0x1F43F}, + {0x1F441, 0x1F441}, {0x1F4FD, 0x1F4FE}, {0x1F53E, 0x1F54A}, + {0x1F54F, 0x1F54F}, {0x1F568, 0x1F579}, {0x1F57B, 0x1F594}, + {0x1F597, 0x1F5A3}, {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F}, + {0x1F6C6, 0x1F6CB}, {0x1F6CD, 0x1F6CF}, {0x1F6D3, 0x1F6D4}, + {0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773}, + {0x1F780, 0x1F7D8}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847}, + {0x1F850, 0x1F859}, {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD}, + {0x1F8B0, 0x1F8B1}, {0x1F900, 0x1F90B}, {0x1F93B, 0x1F93B}, + {0x1F946, 0x1F946}, {0x1FA00, 0x1FA53}, {0x1FA60, 0x1FA6D}, + {0x1FB00, 0x1FB92}, {0x1FB94, 0x1FBCA}, {0x1FBF0, 0x1FBF9}, + {0xE0001, 0xE0001}, {0xE0020, 0xE007F}, +} + +var emoji = table{ + {0x203C, 0x203C}, {0x2049, 0x2049}, {0x2122, 0x2122}, + {0x2139, 0x2139}, {0x2194, 0x2199}, {0x21A9, 0x21AA}, + {0x231A, 0x231B}, {0x2328, 0x2328}, {0x2388, 0x2388}, + {0x23CF, 0x23CF}, {0x23E9, 0x23F3}, {0x23F8, 0x23FA}, + {0x24C2, 0x24C2}, {0x25AA, 0x25AB}, {0x25B6, 0x25B6}, + {0x25C0, 0x25C0}, {0x25FB, 0x25FE}, {0x2600, 0x2605}, + {0x2607, 0x2612}, {0x2614, 0x2685}, {0x2690, 0x2705}, + {0x2708, 0x2712}, {0x2714, 0x2714}, {0x2716, 0x2716}, + {0x271D, 0x271D}, {0x2721, 0x2721}, {0x2728, 0x2728}, + {0x2733, 0x2734}, {0x2744, 0x2744}, {0x2747, 0x2747}, + {0x274C, 0x274C}, {0x274E, 0x274E}, {0x2753, 0x2755}, + {0x2757, 0x2757}, {0x2763, 0x2767}, {0x2795, 0x2797}, + {0x27A1, 0x27A1}, {0x27B0, 0x27B0}, {0x27BF, 0x27BF}, + {0x2934, 0x2935}, {0x2B05, 0x2B07}, {0x2B1B, 0x2B1C}, + {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x3030, 0x3030}, + {0x303D, 0x303D}, {0x3297, 0x3297}, {0x3299, 0x3299}, + {0x1F000, 0x1F0FF}, {0x1F10D, 0x1F10F}, {0x1F12F, 0x1F12F}, + {0x1F16C, 0x1F171}, {0x1F17E, 0x1F17F}, {0x1F18E, 0x1F18E}, + {0x1F191, 0x1F19A}, {0x1F1AD, 0x1F1E5}, {0x1F201, 0x1F20F}, + {0x1F21A, 0x1F21A}, {0x1F22F, 0x1F22F}, {0x1F232, 0x1F23A}, + {0x1F23C, 0x1F23F}, {0x1F249, 0x1F3FA}, {0x1F400, 0x1F53D}, + {0x1F546, 0x1F64F}, {0x1F680, 0x1F6FF}, {0x1F774, 0x1F77F}, + {0x1F7D5, 0x1F7FF}, {0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F}, + {0x1F85A, 0x1F85F}, {0x1F888, 0x1F88F}, {0x1F8AE, 0x1F8FF}, + {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1FAFF}, + {0x1FC00, 0x1FFFD}, +} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_windows.go b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go new file mode 100644 index 000000000..d6a61777d --- /dev/null +++ b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go @@ -0,0 +1,28 @@ +// +build windows +// +build !appengine + +package runewidth + +import ( + "syscall" +) + +var ( + kernel32 = syscall.NewLazyDLL("kernel32") + procGetConsoleOutputCP = kernel32.NewProc("GetConsoleOutputCP") +) + +// IsEastAsian return true if the current locale is CJK +func IsEastAsian() bool { + r1, _, _ := procGetConsoleOutputCP.Call() + if r1 == 0 { + return false + } + + switch int(r1) { + case 932, 51932, 936, 949, 950: + return true + } + + return false +} diff --git a/vendor/github.com/mbilski/exhaustivestruct/LICENSE b/vendor/github.com/mbilski/exhaustivestruct/LICENSE new file mode 100644 index 000000000..893eb73b9 --- /dev/null +++ b/vendor/github.com/mbilski/exhaustivestruct/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Mateusz Bilski + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go b/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..0dfb713c5 --- /dev/null +++ b/vendor/github.com/mbilski/exhaustivestruct/pkg/analyzer/analyzer.go @@ -0,0 +1,187 @@ +package analyzer + +import ( + "flag" + "fmt" + "go/ast" + "go/types" + "path" + "strings" + + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + + "golang.org/x/tools/go/analysis" +) + +// Analyzer that checks if all struct's fields are initialized +var Analyzer = &analysis.Analyzer{ + Name: "exhaustivestruct", + Doc: "Checks if all struct's fields are initialized", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Flags: newFlagSet(), +} + +// StructPatternList is a comma separated list of expressions to match struct packages and names +// The struct packages have the form example.com/package.ExampleStruct +// The matching patterns can use matching syntax from https://pkg.go.dev/path#Match +// If this list is empty, all structs are tested. +var StructPatternList string + +func newFlagSet() flag.FlagSet { + fs := flag.NewFlagSet("", flag.PanicOnError) + fs.StringVar(&StructPatternList, "struct_patterns", "", "This is a comma separated list of expressions to match struct packages and names") + return *fs +} + +func run(pass *analysis.Pass) (interface{}, error) { + splitFn := func(c rune) bool { return c == ',' } + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + structPatterns := strings.FieldsFunc(StructPatternList, splitFn) + // validate the pattern syntax + for _, pattern := range structPatterns { + _, err := path.Match(pattern, "") + if err != nil { + return nil, fmt.Errorf("invalid struct pattern %s: %w", pattern, err) + } + } + + nodeFilter := []ast.Node{ + (*ast.CompositeLit)(nil), + (*ast.ReturnStmt)(nil), + } + + var returnStmt *ast.ReturnStmt + + inspector.Preorder(nodeFilter, func(node ast.Node) { + var name string + + compositeLit, ok := node.(*ast.CompositeLit) + if !ok { + // Keep track of the last return statement whilte iterating + retLit, ok := node.(*ast.ReturnStmt) + if ok { + returnStmt = retLit + } + return + } + + i, ok := compositeLit.Type.(*ast.Ident) + + if ok { + name = i.Name + } else { + s, ok := compositeLit.Type.(*ast.SelectorExpr) + + if !ok { + return + } + + name = s.Sel.Name + } + + if compositeLit.Type == nil { + return + } + + t := pass.TypesInfo.TypeOf(compositeLit.Type) + + if t == nil { + return + } + + if len(structPatterns) > 0 { + shouldLint := false + for _, pattern := range structPatterns { + // We check the patterns for vailidy ahead of time, so we don't need to check the error here + if match, _ := path.Match(pattern, t.String()); match { + shouldLint = true + break + } + } + if !shouldLint { + return + } + } + + str, ok := t.Underlying().(*types.Struct) + + if !ok { + return + } + + // Don't report an error if: + // 1. This composite literal contains no fields and + // 2. It's in a return statement and + // 3. The return statement contains a non-nil error + if len(compositeLit.Elts) == 0 { + // Check if this composite is one of the results the last return statement + isInResults := false + if returnStmt != nil { + for _, result := range returnStmt.Results { + compareComposite, ok := result.(*ast.CompositeLit) + if ok { + if compareComposite == compositeLit { + isInResults = true + } + } + } + } + nonNilError := false + if isInResults { + // Check if any of the results has an error type and if that error is set to non-nil (if it's set to nil, the type would be "untyped nil") + for _, result := range returnStmt.Results { + if pass.TypesInfo.TypeOf(result).String() == "error" { + nonNilError = true + } + } + } + + if nonNilError { + return + } + } + + samePackage := strings.HasPrefix(t.String(), pass.Pkg.Path()+".") + + missing := []string{} + + for i := 0; i < str.NumFields(); i++ { + fieldName := str.Field(i).Name() + exists := false + + if !samePackage && !str.Field(i).Exported() { + continue + } + + for eIndex, e := range compositeLit.Elts { + if k, ok := e.(*ast.KeyValueExpr); ok { + if i, ok := k.Key.(*ast.Ident); ok { + if i.Name == fieldName { + exists = true + break + } + } + } else { + if eIndex == i { + exists = true + break + } + } + } + + if !exists { + missing = append(missing, fieldName) + } + } + + if len(missing) == 1 { + pass.Reportf(node.Pos(), "%s is missing in %s", missing[0], name) + } else if len(missing) > 1 { + pass.Reportf(node.Pos(), "%s are missing in %s", strings.Join(missing, ", "), name) + } + }) + + return nil, nil +} diff --git a/vendor/github.com/mgechev/dots/.travis.yml b/vendor/github.com/mgechev/dots/.travis.yml new file mode 100644 index 000000000..f4a4a7363 --- /dev/null +++ b/vendor/github.com/mgechev/dots/.travis.yml @@ -0,0 +1,2 @@ +language: go +go: master diff --git a/vendor/github.com/mgechev/dots/LICENSE b/vendor/github.com/mgechev/dots/LICENSE new file mode 100644 index 000000000..c617c7e01 --- /dev/null +++ b/vendor/github.com/mgechev/dots/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Minko Gechev + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mgechev/dots/README.md b/vendor/github.com/mgechev/dots/README.md new file mode 100644 index 000000000..1203aef5f --- /dev/null +++ b/vendor/github.com/mgechev/dots/README.md @@ -0,0 +1,100 @@ +[![Build Status](https://travis-ci.org/mgechev/dots.svg?branch=master)](https://travis-ci.org/mgechev/dots) + +# Dots + +Implements the wildcard file matching in Go used by golint, go test etc. + +## Usage + +```go +import "github.com/mgechev/dots" + +func main() { + result, err := dots.Resolve([]string{"./fixtures/..."}, []string{"./fixtures/foo"}) + for _, f := range result { + fmt.Println(f); + } +} +``` + +If we suppose that we have the following directory structure: + +```text +├── README.md +├── fixtures +│   ├── bar +│   │   ├── bar1.go +│   │   └── bar2.go +│   ├── baz +│   │   ├── baz1.go +│   │   ├── baz2.go +│   │   └── baz3.go +│   └── foo +│   ├── foo1.go +│   ├── foo2.go +│   └── foo3.go +└── main.go +``` + +The result will be: + +```text +fixtures/bar/bar1.go +fixtures/bar/bar2.go +fixtures/baz/baz1.go +fixtures/baz/baz2.go +fixtures/baz/baz3.go +``` + +`dots` supports wildcard in both - the first and the last argument of `Resolve`, which means that you can ignore files based on a wildcard: + +```go +dots.Resolve([]string{"github.com/mgechev/dots"}, []string{"./..."}) // empty list +dots.Resolve([]string{"./fixtures/bar/..."}, []string{"./fixture/foo/...", "./fixtures/baz/..."}) // bar1.go, bar2.go +``` + +## Preserve package structure + +`dots` allow you to receive a slice of slices where each nested slice represents an individual package: + +```go +dots.ResolvePackages([]string{"github.com/mgechev/dots/..."}, []string{}) +``` + +So we will get the result: + +```text +[ + [ + "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/bar/bar1.go", + "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/bar/bar2.go" + ], + [ + "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/baz/baz1.go", + "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/baz/baz2.go", + "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/baz/baz3.go" + ], + [ + "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/foo/foo1.go", + "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/foo/foo2.go", + "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/foo/foo3.go" + ], + [ + "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/baz/baz1.go", + "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/baz/baz2.go" + ], + [ + "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/foo/foo1.go", + "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/foo/foo2.go" + ], + [ + "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/foo/bar/bar1.go" + ] +] +``` + +This method is especially useful, when you want to perform type checking over given package from the result. + +## License + +MIT diff --git a/vendor/github.com/mgechev/dots/resolve.go b/vendor/github.com/mgechev/dots/resolve.go new file mode 100644 index 000000000..309ba18ad --- /dev/null +++ b/vendor/github.com/mgechev/dots/resolve.go @@ -0,0 +1,456 @@ +package dots + +import ( + "go/build" + "log" + "os" + "path" + "path/filepath" + "regexp" + "runtime" + "strings" +) + +var ( + buildContext = build.Default + goroot = filepath.Clean(runtime.GOROOT()) + gorootSrc = filepath.Join(goroot, "src") +) + +func flatten(arr [][]string) []string { + var res []string + for _, e := range arr { + res = append(res, e...) + } + return res +} + +// Resolve accepts a slice of paths with optional "..." placeholder and a slice with paths to be skipped. +// The final result is the set of all files from the selected directories subtracted with +// the files in the skip slice. +func Resolve(includePatterns, skipPatterns []string) ([]string, error) { + skip, err := resolvePatterns(skipPatterns) + filter := newPathFilter(flatten(skip)) + if err != nil { + return nil, err + } + + pathSet := map[string]bool{} + includePackages, err := resolvePatterns(includePatterns) + include := flatten(includePackages) + if err != nil { + return nil, err + } + + var result []string + for _, i := range include { + if _, ok := pathSet[i]; !ok && !filter(i) { + pathSet[i] = true + result = append(result, i) + } + } + return result, err +} + +// ResolvePackages accepts a slice of paths with optional "..." placeholder and a slice with paths to be skipped. +// The final result is the set of all files from the selected directories subtracted with +// the files in the skip slice. The difference between `Resolve` and `ResolvePackages` +// is that `ResolvePackages` preserves the package structure in the nested slices. +func ResolvePackages(includePatterns, skipPatterns []string) ([][]string, error) { + skip, err := resolvePatterns(skipPatterns) + filter := newPathFilter(flatten(skip)) + if err != nil { + return nil, err + } + + pathSet := map[string]bool{} + include, err := resolvePatterns(includePatterns) + if err != nil { + return nil, err + } + + var result [][]string + for _, p := range include { + var packageFiles []string + for _, f := range p { + if _, ok := pathSet[f]; !ok && !filter(f) { + pathSet[f] = true + packageFiles = append(packageFiles, f) + } + } + result = append(result, packageFiles) + } + return result, err +} + +func isDir(filename string) bool { + fi, err := os.Stat(filename) + return err == nil && fi.IsDir() +} + +func exists(filename string) bool { + _, err := os.Stat(filename) + return err == nil +} + +func resolveDir(dirname string) ([]string, error) { + pkg, err := build.ImportDir(dirname, 0) + return resolveImportedPackage(pkg, err) +} + +func resolvePackage(pkgname string) ([]string, error) { + pkg, err := build.Import(pkgname, ".", 0) + return resolveImportedPackage(pkg, err) +} + +func resolveImportedPackage(pkg *build.Package, err error) ([]string, error) { + if err != nil { + if _, nogo := err.(*build.NoGoError); nogo { + // Don't complain if the failure is due to no Go source files. + return nil, nil + } + return nil, err + } + + var files []string + files = append(files, pkg.GoFiles...) + files = append(files, pkg.CgoFiles...) + files = append(files, pkg.TestGoFiles...) + if pkg.Dir != "." { + for i, f := range files { + files[i] = filepath.Join(pkg.Dir, f) + } + } + return files, nil +} + +func resolvePatterns(patterns []string) ([][]string, error) { + var files [][]string + for _, pattern := range patterns { + f, err := resolvePattern(pattern) + if err != nil { + return nil, err + } + files = append(files, f...) + } + return files, nil +} + +func resolvePattern(pattern string) ([][]string, error) { + // dirsRun, filesRun, and pkgsRun indicate whether golint is applied to + // directory, file or package targets. The distinction affects which + // checks are run. It is no valid to mix target types. + var dirsRun, filesRun, pkgsRun int + var matches []string + + if strings.HasSuffix(pattern, "/...") && isDir(pattern[:len(pattern)-len("/...")]) { + dirsRun = 1 + for _, dirname := range matchPackagesInFS(pattern) { + matches = append(matches, dirname) + } + } else if isDir(pattern) { + dirsRun = 1 + matches = append(matches, pattern) + } else if exists(pattern) { + filesRun = 1 + matches = append(matches, pattern) + } else { + pkgsRun = 1 + matches = append(matches, pattern) + } + + result := [][]string{} + switch { + case dirsRun == 1: + for _, dir := range matches { + res, err := resolveDir(dir) + if err != nil { + return nil, err + } + result = append(result, res) + } + case filesRun == 1: + return [][]string{matches}, nil + case pkgsRun == 1: + for _, pkg := range importPaths(matches) { + res, err := resolvePackage(pkg) + if err != nil { + return nil, err + } + result = append(result, res) + } + } + return result, nil +} + +func newPathFilter(skip []string) func(string) bool { + filter := map[string]bool{} + for _, name := range skip { + filter[name] = true + } + + return func(path string) bool { + base := filepath.Base(path) + if filter[base] || filter[path] { + return true + } + return base != "." && base != ".." && strings.ContainsAny(base[0:1], "_.") + } +} + +// importPathsNoDotExpansion returns the import paths to use for the given +// command line, but it does no ... expansion. +func importPathsNoDotExpansion(args []string) []string { + if len(args) == 0 { + return []string{"."} + } + var out []string + for _, a := range args { + // Arguments are supposed to be import paths, but + // as a courtesy to Windows developers, rewrite \ to / + // in command-line arguments. Handles .\... and so on. + if filepath.Separator == '\\' { + a = strings.Replace(a, `\`, `/`, -1) + } + + // Put argument in canonical form, but preserve leading ./. + if strings.HasPrefix(a, "./") { + a = "./" + path.Clean(a) + if a == "./." { + a = "." + } + } else { + a = path.Clean(a) + } + if a == "all" || a == "std" { + out = append(out, matchPackages(a)...) + continue + } + out = append(out, a) + } + return out +} + +// importPaths returns the import paths to use for the given command line. +func importPaths(args []string) []string { + args = importPathsNoDotExpansion(args) + var out []string + for _, a := range args { + if strings.Contains(a, "...") { + if build.IsLocalImport(a) { + out = append(out, matchPackagesInFS(a)...) + } else { + out = append(out, matchPackages(a)...) + } + continue + } + out = append(out, a) + } + return out +} + +// matchPattern(pattern)(name) reports whether +// name matches pattern. Pattern is a limited glob +// pattern in which '...' means 'any string' and there +// is no other special syntax. +func matchPattern(pattern string) func(name string) bool { + re := regexp.QuoteMeta(pattern) + re = strings.Replace(re, `\.\.\.`, `.*`, -1) + // Special case: foo/... matches foo too. + if strings.HasSuffix(re, `/.*`) { + re = re[:len(re)-len(`/.*`)] + `(/.*)?` + } + reg := regexp.MustCompile(`^` + re + `$`) + return func(name string) bool { + return reg.MatchString(name) + } +} + +// hasPathPrefix reports whether the path s begins with the +// elements in prefix. +func hasPathPrefix(s, prefix string) bool { + switch { + default: + return false + case len(s) == len(prefix): + return s == prefix + case len(s) > len(prefix): + if prefix != "" && prefix[len(prefix)-1] == '/' { + return strings.HasPrefix(s, prefix) + } + return s[len(prefix)] == '/' && s[:len(prefix)] == prefix + } +} + +// treeCanMatchPattern(pattern)(name) reports whether +// name or children of name can possibly match pattern. +// Pattern is the same limited glob accepted by matchPattern. +func treeCanMatchPattern(pattern string) func(name string) bool { + wildCard := false + if i := strings.Index(pattern, "..."); i >= 0 { + wildCard = true + pattern = pattern[:i] + } + return func(name string) bool { + return len(name) <= len(pattern) && hasPathPrefix(pattern, name) || + wildCard && strings.HasPrefix(name, pattern) + } +} + +func matchPackages(pattern string) []string { + match := func(string) bool { return true } + treeCanMatch := func(string) bool { return true } + if pattern != "all" && pattern != "std" { + match = matchPattern(pattern) + treeCanMatch = treeCanMatchPattern(pattern) + } + + have := map[string]bool{ + "builtin": true, // ignore pseudo-package that exists only for documentation + } + if !buildContext.CgoEnabled { + have["runtime/cgo"] = true // ignore during walk + } + var pkgs []string + + // Commands + cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator) + filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() || path == cmd { + return nil + } + name := path[len(cmd):] + if !treeCanMatch(name) { + return filepath.SkipDir + } + // Commands are all in cmd/, not in subdirectories. + if strings.Contains(name, string(filepath.Separator)) { + return filepath.SkipDir + } + + // We use, e.g., cmd/gofmt as the pseudo import path for gofmt. + name = "cmd/" + name + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + _, err = buildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + + for _, src := range buildContext.SrcDirs() { + if (pattern == "std" || pattern == "cmd") && src != gorootSrc { + continue + } + src = filepath.Clean(src) + string(filepath.Separator) + root := src + if pattern == "cmd" { + root += "cmd" + string(filepath.Separator) + } + filepath.Walk(root, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() || path == src { + return nil + } + + // Avoid .foo, _foo, and testdata directory trees. + _, elem := filepath.Split(path) + if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := filepath.ToSlash(path[len(src):]) + if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") { + // The name "std" is only the standard library. + // If the name is cmd, it's the root of the command tree. + return filepath.SkipDir + } + if !treeCanMatch(name) { + return filepath.SkipDir + } + if have[name] { + return nil + } + have[name] = true + if !match(name) { + return nil + } + _, err = buildContext.ImportDir(path, 0) + if err != nil { + if _, noGo := err.(*build.NoGoError); noGo { + return nil + } + } + pkgs = append(pkgs, name) + return nil + }) + } + return pkgs +} + +func matchPackagesInFS(pattern string) []string { + // Find directory to begin the scan. + // Could be smarter but this one optimization + // is enough for now, since ... is usually at the + // end of a path. + i := strings.Index(pattern, "...") + dir, _ := path.Split(pattern[:i]) + + // pattern begins with ./ or ../. + // path.Clean will discard the ./ but not the ../. + // We need to preserve the ./ for pattern matching + // and in the returned import paths. + prefix := "" + if strings.HasPrefix(pattern, "./") { + prefix = "./" + } + match := matchPattern(pattern) + + var pkgs []string + filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error { + if err != nil || !fi.IsDir() { + return nil + } + if path == dir { + // filepath.Walk starts at dir and recurses. For the recursive case, + // the path is the result of filepath.Join, which calls filepath.Clean. + // The initial case is not Cleaned, though, so we do this explicitly. + // + // This converts a path like "./io/" to "io". Without this step, running + // "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io + // package, because prepending the prefix "./" to the unclean path would + // result in "././io", and match("././io") returns false. + path = filepath.Clean(path) + } + + // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..". + _, elem := filepath.Split(path) + dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".." + if dot || strings.HasPrefix(elem, "_") || elem == "testdata" { + return filepath.SkipDir + } + + name := prefix + filepath.ToSlash(path) + if !match(name) { + return nil + } + if _, err = build.ImportDir(path, 0); err != nil { + if _, noGo := err.(*build.NoGoError); !noGo { + log.Print(err) + } + return nil + } + pkgs = append(pkgs, name) + return nil + }) + return pkgs +} diff --git a/vendor/github.com/mgechev/revive/LICENSE b/vendor/github.com/mgechev/revive/LICENSE new file mode 100644 index 000000000..c617c7e01 --- /dev/null +++ b/vendor/github.com/mgechev/revive/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Minko Gechev + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/mgechev/revive/config/config.go b/vendor/github.com/mgechev/revive/config/config.go new file mode 100644 index 000000000..298c0bdf5 --- /dev/null +++ b/vendor/github.com/mgechev/revive/config/config.go @@ -0,0 +1,226 @@ +package config + +import ( + "errors" + "fmt" + "io/ioutil" + + "github.com/mgechev/revive/formatter" + + "github.com/BurntSushi/toml" + "github.com/mgechev/revive/lint" + "github.com/mgechev/revive/rule" +) + +var defaultRules = []lint.Rule{ + &rule.VarDeclarationsRule{}, + &rule.PackageCommentsRule{}, + &rule.DotImportsRule{}, + &rule.BlankImportsRule{}, + &rule.ExportedRule{}, + &rule.VarNamingRule{}, + &rule.IndentErrorFlowRule{}, + &rule.RangeRule{}, + &rule.ErrorfRule{}, + &rule.ErrorNamingRule{}, + &rule.ErrorStringsRule{}, + &rule.ReceiverNamingRule{}, + &rule.IncrementDecrementRule{}, + &rule.ErrorReturnRule{}, + &rule.UnexportedReturnRule{}, + &rule.TimeNamingRule{}, + &rule.ContextKeysType{}, + &rule.ContextAsArgumentRule{}, +} + +var allRules = append([]lint.Rule{ + &rule.ArgumentsLimitRule{}, + &rule.CyclomaticRule{}, + &rule.FileHeaderRule{}, + &rule.EmptyBlockRule{}, + &rule.SuperfluousElseRule{}, + &rule.ConfusingNamingRule{}, + &rule.GetReturnRule{}, + &rule.ModifiesParamRule{}, + &rule.ConfusingResultsRule{}, + &rule.DeepExitRule{}, + &rule.UnusedParamRule{}, + &rule.UnreachableCodeRule{}, + &rule.AddConstantRule{}, + &rule.FlagParamRule{}, + &rule.UnnecessaryStmtRule{}, + &rule.StructTagRule{}, + &rule.ModifiesValRecRule{}, + &rule.ConstantLogicalExprRule{}, + &rule.BoolLiteralRule{}, + &rule.RedefinesBuiltinIDRule{}, + &rule.ImportsBlacklistRule{}, + &rule.FunctionResultsLimitRule{}, + &rule.MaxPublicStructsRule{}, + &rule.RangeValInClosureRule{}, + &rule.RangeValAddress{}, + &rule.WaitGroupByValueRule{}, + &rule.AtomicRule{}, + &rule.EmptyLinesRule{}, + &rule.LineLengthLimitRule{}, + &rule.CallToGCRule{}, + &rule.DuplicatedImportsRule{}, + &rule.ImportShadowingRule{}, + &rule.BareReturnRule{}, + &rule.UnusedReceiverRule{}, + &rule.UnhandledErrorRule{}, + &rule.CognitiveComplexityRule{}, + &rule.StringOfIntRule{}, + &rule.StringFormatRule{}, + &rule.EarlyReturnRule{}, + &rule.UnconditionalRecursionRule{}, + &rule.IdenticalBranchesRule{}, + &rule.DeferRule{}, + &rule.UnexportedNamingRule{}, + &rule.FunctionLength{}, + &rule.NestedStructs{}, + &rule.IfReturnRule{}, + &rule.UselessBreak{}, +}, defaultRules...) + +var allFormatters = []lint.Formatter{ + &formatter.Stylish{}, + &formatter.Friendly{}, + &formatter.JSON{}, + &formatter.NDJSON{}, + &formatter.Default{}, + &formatter.Unix{}, + &formatter.Checkstyle{}, + &formatter.Plain{}, + &formatter.Sarif{}, +} + +func getFormatters() map[string]lint.Formatter { + result := map[string]lint.Formatter{} + for _, f := range allFormatters { + result[f.Name()] = f + } + return result +} + +// GetLintingRules yields the linting rules that must be applied by the linter +func GetLintingRules(config *lint.Config) ([]lint.Rule, error) { + if config.EnableAllRules { + return getAllRules(config) + } + + return getEnabledRules(config) +} + +// getAllRules yields the list of all available rules except those disabled by configuration +func getAllRules(config *lint.Config) ([]lint.Rule, error) { + lintingRules := []lint.Rule{} + for _, r := range allRules { + ruleConf := config.Rules[r.Name()] + if ruleConf.Disabled { + continue // skip disabled rules + } + + lintingRules = append(lintingRules, r) + } + + return lintingRules, nil +} + +// getEnabledRules yields the list of rules that are enabled by configuration +func getEnabledRules(config *lint.Config) ([]lint.Rule, error) { + rulesMap := map[string]lint.Rule{} + for _, r := range allRules { + rulesMap[r.Name()] = r + } + + lintingRules := []lint.Rule{} + for name, ruleConfig := range config.Rules { + rule, ok := rulesMap[name] + if !ok { + return nil, fmt.Errorf("cannot find rule: %s", name) + } + + if ruleConfig.Disabled { + continue // skip disabled rules + } + + lintingRules = append(lintingRules, rule) + } + + return lintingRules, nil +} + +func parseConfig(path string) (*lint.Config, error) { + config := &lint.Config{} + file, err := ioutil.ReadFile(path) + if err != nil { + return nil, errors.New("cannot read the config file") + } + _, err = toml.Decode(string(file), config) + if err != nil { + return nil, fmt.Errorf("cannot parse the config file: %v", err) + } + return config, nil +} + +func normalizeConfig(config *lint.Config) { + if config.Confidence == 0 { + config.Confidence = 0.8 + } + severity := config.Severity + if severity != "" { + for k, v := range config.Rules { + if v.Severity == "" { + v.Severity = severity + } + config.Rules[k] = v + } + for k, v := range config.Directives { + if v.Severity == "" { + v.Severity = severity + } + config.Directives[k] = v + } + } +} + +// GetConfig yields the configuration +func GetConfig(configPath string) (*lint.Config, error) { + config := defaultConfig() + if configPath != "" { + var err error + config, err = parseConfig(configPath) + if err != nil { + return nil, err + } + } + normalizeConfig(config) + return config, nil +} + +// GetFormatter yields the formatter for lint failures +func GetFormatter(formatterName string) (lint.Formatter, error) { + formatters := getFormatters() + formatter := formatters["default"] + if formatterName != "" { + f, ok := formatters[formatterName] + if !ok { + return nil, fmt.Errorf("unknown formatter %v", formatterName) + } + formatter = f + } + return formatter, nil +} + +func defaultConfig() *lint.Config { + defaultConfig := lint.Config{ + Confidence: 0.0, + Severity: lint.SeverityWarning, + Rules: map[string]lint.RuleConfig{}, + } + for _, r := range defaultRules { + defaultConfig.Rules[r.Name()] = lint.RuleConfig{} + } + return &defaultConfig +} diff --git a/vendor/github.com/mgechev/revive/formatter/checkstyle.go b/vendor/github.com/mgechev/revive/formatter/checkstyle.go new file mode 100644 index 000000000..bd20da888 --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/checkstyle.go @@ -0,0 +1,76 @@ +package formatter + +import ( + "bytes" + "encoding/xml" + "github.com/mgechev/revive/lint" + plainTemplate "text/template" +) + +// Checkstyle is an implementation of the Formatter interface +// which formats the errors to Checkstyle-like format. +type Checkstyle struct { + Metadata lint.FormatterMetadata +} + +// Name returns the name of the formatter +func (f *Checkstyle) Name() string { + return "checkstyle" +} + +type issue struct { + Line int + Col int + What string + Confidence float64 + Severity lint.Severity + RuleName string +} + +// Format formats the failures gotten from the lint. +func (f *Checkstyle) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { + var issues = map[string][]issue{} + for failure := range failures { + buf := new(bytes.Buffer) + xml.Escape(buf, []byte(failure.Failure)) + what := buf.String() + iss := issue{ + Line: failure.Position.Start.Line, + Col: failure.Position.Start.Column, + What: what, + Confidence: failure.Confidence, + Severity: severity(config, failure), + RuleName: failure.RuleName, + } + fn := failure.GetFilename() + if issues[fn] == nil { + issues[fn] = make([]issue, 0) + } + issues[fn] = append(issues[fn], iss) + } + + t, err := plainTemplate.New("revive").Parse(checkstyleTemplate) + if err != nil { + return "", err + } + + buf := new(bytes.Buffer) + + err = t.Execute(buf, issues) + if err != nil { + return "", err + } + + return buf.String(), nil +} + +const checkstyleTemplate = ` + +{{- range $k, $v := . }} + + {{- range $i, $issue := $v }} + + {{- end }} + +{{- end }} +` diff --git a/vendor/github.com/mgechev/revive/formatter/default.go b/vendor/github.com/mgechev/revive/formatter/default.go new file mode 100644 index 000000000..145e6d548 --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/default.go @@ -0,0 +1,26 @@ +package formatter + +import ( + "fmt" + + "github.com/mgechev/revive/lint" +) + +// Default is an implementation of the Formatter interface +// which formats the errors to text. +type Default struct { + Metadata lint.FormatterMetadata +} + +// Name returns the name of the formatter +func (f *Default) Name() string { + return "default" +} + +// Format formats the failures gotten from the lint. +func (f *Default) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) { + for failure := range failures { + fmt.Printf("%v: %s\n", failure.Position.Start, failure.Failure) + } + return "", nil +} diff --git a/vendor/github.com/mgechev/revive/formatter/friendly.go b/vendor/github.com/mgechev/revive/formatter/friendly.go new file mode 100644 index 000000000..d0a3099f8 --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/friendly.go @@ -0,0 +1,149 @@ +package formatter + +import ( + "bytes" + "fmt" + "sort" + + "github.com/fatih/color" + "github.com/mgechev/revive/lint" + "github.com/olekukonko/tablewriter" +) + +var newLines = map[rune]bool{ + 0x000A: true, + 0x000B: true, + 0x000C: true, + 0x000D: true, + 0x0085: true, + 0x2028: true, + 0x2029: true, +} + +func getErrorEmoji() string { + return color.RedString("✘") +} + +func getWarningEmoji() string { + return color.YellowString("⚠") +} + +// Friendly is an implementation of the Formatter interface +// which formats the errors to JSON. +type Friendly struct { + Metadata lint.FormatterMetadata +} + +// Name returns the name of the formatter +func (f *Friendly) Name() string { + return "friendly" +} + +// Format formats the failures gotten from the lint. +func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { + errorMap := map[string]int{} + warningMap := map[string]int{} + totalErrors := 0 + totalWarnings := 0 + for failure := range failures { + sev := severity(config, failure) + f.printFriendlyFailure(failure, sev) + if sev == lint.SeverityWarning { + warningMap[failure.RuleName] = warningMap[failure.RuleName] + 1 + totalWarnings++ + } + if sev == lint.SeverityError { + errorMap[failure.RuleName] = errorMap[failure.RuleName] + 1 + totalErrors++ + } + } + f.printSummary(totalErrors, totalWarnings) + f.printStatistics(color.RedString("Errors:"), errorMap) + f.printStatistics(color.YellowString("Warnings:"), warningMap) + return "", nil +} + +func (f *Friendly) printFriendlyFailure(failure lint.Failure, severity lint.Severity) { + f.printHeaderRow(failure, severity) + f.printFilePosition(failure) + fmt.Println() + fmt.Println() +} + +func (f *Friendly) printHeaderRow(failure lint.Failure, severity lint.Severity) { + emoji := getWarningEmoji() + if severity == lint.SeverityError { + emoji = getErrorEmoji() + } + fmt.Print(f.table([][]string{{emoji, "https://revive.run/r#" + failure.RuleName, color.GreenString(failure.Failure)}})) +} + +func (f *Friendly) printFilePosition(failure lint.Failure) { + fmt.Printf(" %s:%d:%d", failure.GetFilename(), failure.Position.Start.Line, failure.Position.Start.Column) +} + +type statEntry struct { + name string + failures int +} + +func (f *Friendly) printSummary(errors, warnings int) { + emoji := getWarningEmoji() + if errors > 0 { + emoji = getErrorEmoji() + } + problemsLabel := "problems" + if errors+warnings == 1 { + problemsLabel = "problem" + } + warningsLabel := "warnings" + if warnings == 1 { + warningsLabel = "warning" + } + errorsLabel := "errors" + if errors == 1 { + errorsLabel = "error" + } + str := fmt.Sprintf("%d %s (%d %s, %d %s)", errors+warnings, problemsLabel, errors, errorsLabel, warnings, warningsLabel) + if errors > 0 { + fmt.Printf("%s %s\n", emoji, color.RedString(str)) + fmt.Println() + return + } + if warnings > 0 { + fmt.Printf("%s %s\n", emoji, color.YellowString(str)) + fmt.Println() + return + } +} + +func (f *Friendly) printStatistics(header string, stats map[string]int) { + if len(stats) == 0 { + return + } + var data []statEntry + for name, total := range stats { + data = append(data, statEntry{name, total}) + } + sort.Slice(data, func(i, j int) bool { + return data[i].failures > data[j].failures + }) + formatted := [][]string{} + for _, entry := range data { + formatted = append(formatted, []string{color.GreenString(fmt.Sprintf("%d", entry.failures)), entry.name}) + } + fmt.Println(header) + fmt.Println(f.table(formatted)) +} + +func (f *Friendly) table(rows [][]string) string { + buf := new(bytes.Buffer) + table := tablewriter.NewWriter(buf) + table.SetBorder(false) + table.SetColumnSeparator("") + table.SetRowSeparator("") + table.SetAutoWrapText(false) + table.AppendBulk(rows) + table.Render() + return buf.String() +} diff --git a/vendor/github.com/mgechev/revive/formatter/json.go b/vendor/github.com/mgechev/revive/formatter/json.go new file mode 100644 index 000000000..9c939face --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/json.go @@ -0,0 +1,40 @@ +package formatter + +import ( + "encoding/json" + + "github.com/mgechev/revive/lint" +) + +// JSON is an implementation of the Formatter interface +// which formats the errors to JSON. +type JSON struct { + Metadata lint.FormatterMetadata +} + +// Name returns the name of the formatter +func (f *JSON) Name() string { + return "json" +} + +// jsonObject defines a JSON object of an failure +type jsonObject struct { + Severity lint.Severity + lint.Failure `json:",inline"` +} + +// Format formats the failures gotten from the lint. +func (f *JSON) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { + var slice []jsonObject + for failure := range failures { + obj := jsonObject{} + obj.Severity = severity(config, failure) + obj.Failure = failure + slice = append(slice, obj) + } + result, err := json.Marshal(slice) + if err != nil { + return "", err + } + return string(result), err +} diff --git a/vendor/github.com/mgechev/revive/formatter/ndjson.go b/vendor/github.com/mgechev/revive/formatter/ndjson.go new file mode 100644 index 000000000..aa2b1d636 --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/ndjson.go @@ -0,0 +1,34 @@ +package formatter + +import ( + "encoding/json" + "os" + + "github.com/mgechev/revive/lint" +) + +// NDJSON is an implementation of the Formatter interface +// which formats the errors to NDJSON stream. +type NDJSON struct { + Metadata lint.FormatterMetadata +} + +// Name returns the name of the formatter +func (f *NDJSON) Name() string { + return "ndjson" +} + +// Format formats the failures gotten from the lint. +func (f *NDJSON) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { + enc := json.NewEncoder(os.Stdout) + for failure := range failures { + obj := jsonObject{} + obj.Severity = severity(config, failure) + obj.Failure = failure + err := enc.Encode(obj) + if err != nil { + return "", err + } + } + return "", nil +} diff --git a/vendor/github.com/mgechev/revive/formatter/plain.go b/vendor/github.com/mgechev/revive/formatter/plain.go new file mode 100644 index 000000000..a854d2562 --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/plain.go @@ -0,0 +1,26 @@ +package formatter + +import ( + "fmt" + + "github.com/mgechev/revive/lint" +) + +// Plain is an implementation of the Formatter interface +// which formats the errors to JSON. +type Plain struct { + Metadata lint.FormatterMetadata +} + +// Name returns the name of the formatter +func (f *Plain) Name() string { + return "plain" +} + +// Format formats the failures gotten from the lint. +func (f *Plain) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) { + for failure := range failures { + fmt.Printf("%v: %s %s\n", failure.Position.Start, failure.Failure, "https://revive.run/r#"+failure.RuleName) + } + return "", nil +} diff --git a/vendor/github.com/mgechev/revive/formatter/sarif.go b/vendor/github.com/mgechev/revive/formatter/sarif.go new file mode 100644 index 000000000..8968c3ffb --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/sarif.go @@ -0,0 +1,107 @@ +package formatter + +import ( + "bytes" + "fmt" + "strings" + + "github.com/chavacava/garif" + "github.com/mgechev/revive/lint" +) + +// Sarif is an implementation of the Formatter interface +// which formats revive failures into SARIF format. +type Sarif struct { + Metadata lint.FormatterMetadata +} + +// Name returns the name of the formatter +func (f *Sarif) Name() string { + return "sarif" +} + +const reviveSite = "https://revive.run" + +// Format formats the failures gotten from the lint. +func (f *Sarif) Format(failures <-chan lint.Failure, cfg lint.Config) (string, error) { + sarifLog := newReviveRunLog(cfg) + + for failure := range failures { + sarifLog.AddResult(failure) + } + + buf := new(bytes.Buffer) + sarifLog.PrettyWrite(buf) + + return buf.String(), nil +} + +type reviveRunLog struct { + *garif.LogFile + run *garif.Run + rules map[string]lint.RuleConfig +} + +func newReviveRunLog(cfg lint.Config) *reviveRunLog { + run := garif.NewRun(garif.NewTool(garif.NewDriver("revive").WithInformationUri(reviveSite))) + log := garif.NewLogFile([]*garif.Run{run}, garif.Version210) + + reviveLog := &reviveRunLog{ + log, + run, + cfg.Rules, + } + + reviveLog.addRules(cfg.Rules) + + return reviveLog +} + +func (l *reviveRunLog) addRules(cfg map[string]lint.RuleConfig) { + for name, ruleCfg := range cfg { + rule := garif.NewRule(name).WithHelpUri(reviveSite + "/r#" + name) + setRuleProperties(rule, ruleCfg) + driver := l.run.Tool.Driver + + if driver.Rules == nil { + driver.Rules = []*garif.ReportingDescriptor{rule} + return + } + + driver.Rules = append(driver.Rules, rule) + } +} + +func (l *reviveRunLog) AddResult(failure lint.Failure) { + positiveOrZero := func(x int) int { + if x > 0 { + return x + } + return 0 + } + position := failure.Position + filename := position.Start.Filename + line := positiveOrZero(position.Start.Line - 1) // https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html#def_line + column := positiveOrZero(position.Start.Column - 1) // https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html#def_column + + result := garif.NewResult(garif.NewMessageFromText(failure.Failure)) + location := garif.NewLocation().WithURI(filename).WithLineColumn(line, column) + result.Locations = append(result.Locations, location) + result.RuleId = failure.RuleName + result.Level = l.rules[failure.RuleName].Severity + + l.run.Results = append(l.run.Results, result) +} + +func setRuleProperties(sarifRule *garif.ReportingDescriptor, lintRule lint.RuleConfig) { + arguments := make([]string, len(lintRule.Arguments)) + for i, arg := range lintRule.Arguments { + arguments[i] = fmt.Sprintf("%+v", arg) + } + + if len(arguments) > 0 { + sarifRule.WithProperties("arguments", strings.Join(arguments, ",")) + } + + sarifRule.WithProperties("severity", string(lintRule.Severity)) +} diff --git a/vendor/github.com/mgechev/revive/formatter/severity.go b/vendor/github.com/mgechev/revive/formatter/severity.go new file mode 100644 index 000000000..a43bf3192 --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/severity.go @@ -0,0 +1,13 @@ +package formatter + +import "github.com/mgechev/revive/lint" + +func severity(config lint.Config, failure lint.Failure) lint.Severity { + if config, ok := config.Rules[failure.RuleName]; ok && config.Severity == lint.SeverityError { + return lint.SeverityError + } + if config, ok := config.Directives[failure.RuleName]; ok && config.Severity == lint.SeverityError { + return lint.SeverityError + } + return lint.SeverityWarning +} diff --git a/vendor/github.com/mgechev/revive/formatter/stylish.go b/vendor/github.com/mgechev/revive/formatter/stylish.go new file mode 100644 index 000000000..cd81fdae7 --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/stylish.go @@ -0,0 +1,89 @@ +package formatter + +import ( + "bytes" + "fmt" + + "github.com/fatih/color" + "github.com/mgechev/revive/lint" + "github.com/olekukonko/tablewriter" +) + +// Stylish is an implementation of the Formatter interface +// which formats the errors to JSON. +type Stylish struct { + Metadata lint.FormatterMetadata +} + +// Name returns the name of the formatter +func (f *Stylish) Name() string { + return "stylish" +} + +func formatFailure(failure lint.Failure, severity lint.Severity) []string { + fString := color.CyanString(failure.Failure) + fName := color.RedString("https://revive.run/r#" + failure.RuleName) + lineColumn := failure.Position + pos := fmt.Sprintf("(%d, %d)", lineColumn.Start.Line, lineColumn.Start.Column) + if severity == lint.SeverityWarning { + fName = color.YellowString("https://revive.run/r#" + failure.RuleName) + } + return []string{failure.GetFilename(), pos, fName, fString} +} + +// Format formats the failures gotten from the lint. +func (f *Stylish) Format(failures <-chan lint.Failure, config lint.Config) (string, error) { + var result [][]string + var totalErrors = 0 + var total = 0 + + for f := range failures { + total++ + currentType := severity(config, f) + if currentType == lint.SeverityError { + totalErrors++ + } + result = append(result, formatFailure(f, lint.Severity(currentType))) + } + ps := "problems" + if total == 1 { + ps = "problem" + } + + fileReport := make(map[string][][]string) + + for _, row := range result { + if _, ok := fileReport[row[0]]; !ok { + fileReport[row[0]] = [][]string{} + } + + fileReport[row[0]] = append(fileReport[row[0]], []string{row[1], row[2], row[3]}) + } + + output := "" + for filename, val := range fileReport { + buf := new(bytes.Buffer) + table := tablewriter.NewWriter(buf) + table.SetBorder(false) + table.SetColumnSeparator("") + table.SetRowSeparator("") + table.SetAutoWrapText(false) + table.AppendBulk(val) + table.Render() + c := color.New(color.Underline) + output += c.SprintfFunc()(filename + "\n") + output += buf.String() + "\n" + } + + suffix := fmt.Sprintf(" %d %s (%d errors) (%d warnings)", total, ps, totalErrors, total-totalErrors) + + if total > 0 && totalErrors > 0 { + suffix = color.RedString("\n ✖" + suffix) + } else if total > 0 && totalErrors == 0 { + suffix = color.YellowString("\n ✖" + suffix) + } else { + suffix, output = "", "" + } + + return output + suffix, nil +} diff --git a/vendor/github.com/mgechev/revive/formatter/unix.go b/vendor/github.com/mgechev/revive/formatter/unix.go new file mode 100644 index 000000000..b9ae62d38 --- /dev/null +++ b/vendor/github.com/mgechev/revive/formatter/unix.go @@ -0,0 +1,27 @@ +package formatter + +import ( + "fmt" + + "github.com/mgechev/revive/lint" +) + +// Unix is an implementation of the Formatter interface +// which formats the errors to a simple line based error format +// main.go:24:9: [errorf] should replace errors.New(fmt.Sprintf(...)) with fmt.Errorf(...) +type Unix struct { + Metadata lint.FormatterMetadata +} + +// Name returns the name of the formatter +func (f *Unix) Name() string { + return "unix" +} + +// Format formats the failures gotten from the lint. +func (f *Unix) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) { + for failure := range failures { + fmt.Printf("%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure) + } + return "", nil +} diff --git a/vendor/github.com/mgechev/revive/lint/config.go b/vendor/github.com/mgechev/revive/lint/config.go new file mode 100644 index 000000000..276305804 --- /dev/null +++ b/vendor/github.com/mgechev/revive/lint/config.go @@ -0,0 +1,35 @@ +package lint + +// Arguments is type used for the arguments of a rule. +type Arguments = []interface{} + +// RuleConfig is type used for the rule configuration. +type RuleConfig struct { + Arguments Arguments + Severity Severity + Disabled bool +} + +// RulesConfig defines the config for all rules. +type RulesConfig = map[string]RuleConfig + +// DirectiveConfig is type used for the linter directive configuration. +type DirectiveConfig struct { + Severity Severity +} + +// DirectivesConfig defines the config for all directives. +type DirectivesConfig = map[string]DirectiveConfig + +// Config defines the config of the linter. +type Config struct { + IgnoreGeneratedHeader bool `toml:"ignoreGeneratedHeader"` + Confidence float64 + Severity Severity + EnableAllRules bool `toml:"enableAllRules"` + Rules RulesConfig `toml:"rule"` + ErrorCode int `toml:"errorCode"` + WarningCode int `toml:"warningCode"` + Directives DirectivesConfig `toml:"directive"` + Exclude []string `toml:"exclude"` +} diff --git a/vendor/github.com/mgechev/revive/lint/failure.go b/vendor/github.com/mgechev/revive/lint/failure.go new file mode 100644 index 000000000..479b0cb48 --- /dev/null +++ b/vendor/github.com/mgechev/revive/lint/failure.go @@ -0,0 +1,39 @@ +package lint + +import ( + "go/ast" + "go/token" +) + +const ( + // SeverityWarning declares failures of type warning + SeverityWarning = "warning" + // SeverityError declares failures of type error. + SeverityError = "error" +) + +// Severity is the type for the failure types. +type Severity string + +// FailurePosition returns the failure position +type FailurePosition struct { + Start token.Position + End token.Position +} + +// Failure defines a struct for a linting failure. +type Failure struct { + Failure string + RuleName string + Category string + Position FailurePosition + Node ast.Node `json:"-"` + Confidence float64 + // For future use + ReplacementLine string +} + +// GetFilename returns the filename. +func (f *Failure) GetFilename() string { + return f.Position.Start.Filename +} diff --git a/vendor/github.com/mgechev/revive/lint/file.go b/vendor/github.com/mgechev/revive/lint/file.go new file mode 100644 index 000000000..ee29c1dae --- /dev/null +++ b/vendor/github.com/mgechev/revive/lint/file.go @@ -0,0 +1,278 @@ +package lint + +import ( + "bytes" + "go/ast" + "go/parser" + "go/printer" + "go/token" + "go/types" + "math" + "regexp" + "strings" +) + +// File abstraction used for representing files. +type File struct { + Name string + Pkg *Package + content []byte + AST *ast.File +} + +// IsTest returns if the file contains tests. +func (f *File) IsTest() bool { return strings.HasSuffix(f.Name, "_test.go") } + +// Content returns the file's content. +func (f *File) Content() []byte { + return f.content +} + +// NewFile creates a new file +func NewFile(name string, content []byte, pkg *Package) (*File, error) { + f, err := parser.ParseFile(pkg.fset, name, content, parser.ParseComments) + if err != nil { + return nil, err + } + return &File{ + Name: name, + content: content, + Pkg: pkg, + AST: f, + }, nil +} + +// ToPosition returns line and column for given position. +func (f *File) ToPosition(pos token.Pos) token.Position { + return f.Pkg.fset.Position(pos) +} + +// Render renters a node. +func (f *File) Render(x interface{}) string { + var buf bytes.Buffer + if err := printer.Fprint(&buf, f.Pkg.fset, x); err != nil { + panic(err) + } + return buf.String() +} + +// CommentMap builds a comment map for the file. +func (f *File) CommentMap() ast.CommentMap { + return ast.NewCommentMap(f.Pkg.fset, f.AST, f.AST.Comments) +} + +var basicTypeKinds = map[types.BasicKind]string{ + types.UntypedBool: "bool", + types.UntypedInt: "int", + types.UntypedRune: "rune", + types.UntypedFloat: "float64", + types.UntypedComplex: "complex128", + types.UntypedString: "string", +} + +// IsUntypedConst reports whether expr is an untyped constant, +// and indicates what its default type is. +// scope may be nil. +func (f *File) IsUntypedConst(expr ast.Expr) (defType string, ok bool) { + // Re-evaluate expr outside of its context to see if it's untyped. + // (An expr evaluated within, for example, an assignment context will get the type of the LHS.) + exprStr := f.Render(expr) + tv, err := types.Eval(f.Pkg.fset, f.Pkg.TypesPkg, expr.Pos(), exprStr) + if err != nil { + return "", false + } + if b, ok := tv.Type.(*types.Basic); ok { + if dt, ok := basicTypeKinds[b.Kind()]; ok { + return dt, true + } + } + + return "", false +} + +func (f *File) isMain() bool { + if f.AST.Name.Name == "main" { + return true + } + return false +} + +const directiveSpecifyDisableReason = "specify-disable-reason" + +func (f *File) lint(rules []Rule, config Config, failures chan Failure) { + rulesConfig := config.Rules + _, mustSpecifyDisableReason := config.Directives[directiveSpecifyDisableReason] + disabledIntervals := f.disabledIntervals(rules, mustSpecifyDisableReason, failures) + for _, currentRule := range rules { + ruleConfig := rulesConfig[currentRule.Name()] + currentFailures := currentRule.Apply(f, ruleConfig.Arguments) + for idx, failure := range currentFailures { + if failure.RuleName == "" { + failure.RuleName = currentRule.Name() + } + if failure.Node != nil { + failure.Position = ToFailurePosition(failure.Node.Pos(), failure.Node.End(), f) + } + currentFailures[idx] = failure + } + currentFailures = f.filterFailures(currentFailures, disabledIntervals) + for _, failure := range currentFailures { + if failure.Confidence >= config.Confidence { + failures <- failure + } + } + } +} + +type enableDisableConfig struct { + enabled bool + position int +} + +const directiveRE = `^//[\s]*revive:(enable|disable)(?:-(line|next-line))?(?::([^\s]+))?[\s]*(?: (.+))?$` +const directivePos = 1 +const modifierPos = 2 +const rulesPos = 3 +const reasonPos = 4 + +var re = regexp.MustCompile(directiveRE) + +func (f *File) disabledIntervals(rules []Rule, mustSpecifyDisableReason bool, failures chan Failure) disabledIntervalsMap { + enabledDisabledRulesMap := make(map[string][]enableDisableConfig) + + getEnabledDisabledIntervals := func() disabledIntervalsMap { + result := make(disabledIntervalsMap) + + for ruleName, disabledArr := range enabledDisabledRulesMap { + ruleResult := []DisabledInterval{} + for i := 0; i < len(disabledArr); i++ { + interval := DisabledInterval{ + RuleName: ruleName, + From: token.Position{ + Filename: f.Name, + Line: disabledArr[i].position, + }, + To: token.Position{ + Filename: f.Name, + Line: math.MaxInt32, + }, + } + if i%2 == 0 { + ruleResult = append(ruleResult, interval) + } else { + ruleResult[len(ruleResult)-1].To.Line = disabledArr[i].position + } + } + result[ruleName] = ruleResult + } + + return result + } + + handleConfig := func(isEnabled bool, line int, name string) { + existing, ok := enabledDisabledRulesMap[name] + if !ok { + existing = []enableDisableConfig{} + enabledDisabledRulesMap[name] = existing + } + if (len(existing) > 1 && existing[len(existing)-1].enabled == isEnabled) || + (len(existing) == 0 && isEnabled) { + return + } + existing = append(existing, enableDisableConfig{ + enabled: isEnabled, + position: line, + }) + enabledDisabledRulesMap[name] = existing + } + + handleRules := func(filename, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval { + var result []DisabledInterval + for _, name := range ruleNames { + if modifier == "line" { + handleConfig(isEnabled, line, name) + handleConfig(!isEnabled, line, name) + } else if modifier == "next-line" { + handleConfig(isEnabled, line+1, name) + handleConfig(!isEnabled, line+1, name) + } else { + handleConfig(isEnabled, line, name) + } + } + return result + } + + handleComment := func(filename string, c *ast.CommentGroup, line int) { + comments := c.List + for _, c := range comments { + match := re.FindStringSubmatch(c.Text) + if len(match) == 0 { + continue + } + + ruleNames := []string{} + tempNames := strings.Split(match[rulesPos], ",") + for _, name := range tempNames { + name = strings.Trim(name, "\n") + if len(name) > 0 { + ruleNames = append(ruleNames, name) + } + } + + mustCheckDisablingReason := mustSpecifyDisableReason && match[directivePos] == "disable" + if mustCheckDisablingReason && strings.Trim(match[reasonPos], " ") == "" { + failures <- Failure{ + Confidence: 1, + RuleName: directiveSpecifyDisableReason, + Failure: "reason of lint disabling not found", + Position: ToFailurePosition(c.Pos(), c.End(), f), + Node: c, + } + continue // skip this linter disabling directive + } + + // TODO: optimize + if len(ruleNames) == 0 { + for _, rule := range rules { + ruleNames = append(ruleNames, rule.Name()) + } + } + + handleRules(filename, match[modifierPos], match[directivePos] == "enable", line, ruleNames) + } + } + + comments := f.AST.Comments + for _, c := range comments { + handleComment(f.Name, c, f.ToPosition(c.End()).Line) + } + + return getEnabledDisabledIntervals() +} + +func (f *File) filterFailures(failures []Failure, disabledIntervals disabledIntervalsMap) []Failure { + result := []Failure{} + for _, failure := range failures { + fStart := failure.Position.Start.Line + fEnd := failure.Position.End.Line + intervals, ok := disabledIntervals[failure.RuleName] + if !ok { + result = append(result, failure) + } else { + include := true + for _, interval := range intervals { + intStart := interval.From.Line + intEnd := interval.To.Line + if (fStart >= intStart && fStart <= intEnd) || + (fEnd >= intStart && fEnd <= intEnd) { + include = false + break + } + } + if include { + result = append(result, failure) + } + } + } + return result +} diff --git a/vendor/github.com/mgechev/revive/lint/formatter.go b/vendor/github.com/mgechev/revive/lint/formatter.go new file mode 100644 index 000000000..7c19af278 --- /dev/null +++ b/vendor/github.com/mgechev/revive/lint/formatter.go @@ -0,0 +1,14 @@ +package lint + +// FormatterMetadata configuration of a formatter +type FormatterMetadata struct { + Name string + Description string + Sample string +} + +// Formatter defines an interface for failure formatters +type Formatter interface { + Format(<-chan Failure, Config) (string, error) + Name() string +} diff --git a/vendor/github.com/mgechev/revive/lint/linter.go b/vendor/github.com/mgechev/revive/lint/linter.go new file mode 100644 index 000000000..cdca84fb5 --- /dev/null +++ b/vendor/github.com/mgechev/revive/lint/linter.go @@ -0,0 +1,99 @@ +package lint + +import ( + "bufio" + "bytes" + "fmt" + "go/token" + "os" + "sync" +) + +// ReadFile defines an abstraction for reading files. +type ReadFile func(path string) (result []byte, err error) + +type disabledIntervalsMap = map[string][]DisabledInterval + +// Linter is used for linting set of files. +type Linter struct { + reader ReadFile +} + +// New creates a new Linter +func New(reader ReadFile) Linter { + return Linter{reader: reader} +} + +var ( + genHdr = []byte("// Code generated ") + genFtr = []byte(" DO NOT EDIT.") +) + +// Lint lints a set of files with the specified rule. +func (l *Linter) Lint(packages [][]string, ruleSet []Rule, config Config) (<-chan Failure, error) { + failures := make(chan Failure) + + var wg sync.WaitGroup + for _, pkg := range packages { + wg.Add(1) + go func(pkg []string) { + if err := l.lintPackage(pkg, ruleSet, config, failures); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } + defer wg.Done() + }(pkg) + } + + go func() { + wg.Wait() + close(failures) + }() + + return failures, nil +} + +func (l *Linter) lintPackage(filenames []string, ruleSet []Rule, config Config, failures chan Failure) error { + pkg := &Package{ + fset: token.NewFileSet(), + files: map[string]*File{}, + mu: sync.Mutex{}, + } + for _, filename := range filenames { + content, err := l.reader(filename) + if err != nil { + return err + } + if isGenerated(content) && !config.IgnoreGeneratedHeader { + continue + } + + file, err := NewFile(filename, content, pkg) + if err != nil { + return err + } + pkg.files[filename] = file + } + + if len(pkg.files) == 0 { + return nil + } + + pkg.lint(ruleSet, config, failures) + + return nil +} + +// isGenerated reports whether the source file is generated code +// according the rules from https://golang.org/s/generatedcode. +// This is inherited from the original go lint. +func isGenerated(src []byte) bool { + sc := bufio.NewScanner(bytes.NewReader(src)) + for sc.Scan() { + b := sc.Bytes() + if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) { + return true + } + } + return false +} diff --git a/vendor/github.com/mgechev/revive/lint/package.go b/vendor/github.com/mgechev/revive/lint/package.go new file mode 100644 index 000000000..7b6046fd7 --- /dev/null +++ b/vendor/github.com/mgechev/revive/lint/package.go @@ -0,0 +1,178 @@ +package lint + +import ( + "go/ast" + "go/token" + "go/types" + "sync" + + "golang.org/x/tools/go/gcexportdata" +) + +// Package represents a package in the project. +type Package struct { + fset *token.FileSet + files map[string]*File + + TypesPkg *types.Package + TypesInfo *types.Info + + // sortable is the set of types in the package that implement sort.Interface. + Sortable map[string]bool + // main is whether this is a "main" package. + main int + mu sync.Mutex +} + +var newImporter = func(fset *token.FileSet) types.ImporterFrom { + return gcexportdata.NewImporter(fset, make(map[string]*types.Package)) +} + +var ( + trueValue = 1 + falseValue = 2 + notSet = 3 +) + +// IsMain returns if that's the main package. +func (p *Package) IsMain() bool { + if p.main == trueValue { + return true + } else if p.main == falseValue { + return false + } + for _, f := range p.files { + if f.isMain() { + p.main = trueValue + return true + } + } + p.main = falseValue + return false +} + +// TypeCheck performs type checking for given package. +func (p *Package) TypeCheck() error { + p.mu.Lock() + // If type checking has already been performed + // skip it. + if p.TypesInfo != nil || p.TypesPkg != nil { + p.mu.Unlock() + return nil + } + config := &types.Config{ + // By setting a no-op error reporter, the type checker does as much work as possible. + Error: func(error) {}, + Importer: newImporter(p.fset), + } + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + } + var anyFile *File + var astFiles []*ast.File + for _, f := range p.files { + anyFile = f + astFiles = append(astFiles, f.AST) + } + + typesPkg, err := check(config, anyFile.AST.Name.Name, p.fset, astFiles, info) + + // Remember the typechecking info, even if config.Check failed, + // since we will get partial information. + p.TypesPkg = typesPkg + p.TypesInfo = info + p.mu.Unlock() + return err +} + +// check function encapsulates the call to go/types.Config.Check method and +// recovers if the called method panics (see issue #59) +func check(config *types.Config, n string, fset *token.FileSet, astFiles []*ast.File, info *types.Info) (p *types.Package, err error) { + defer func() { + if r := recover(); r != nil { + err, _ = r.(error) + p = nil + return + } + }() + + return config.Check(n, fset, astFiles, info) +} + +// TypeOf returns the type of an expression. +func (p *Package) TypeOf(expr ast.Expr) types.Type { + if p.TypesInfo == nil { + return nil + } + return p.TypesInfo.TypeOf(expr) +} + +type walker struct { + nmap map[string]int + has map[string]int +} + +func (w *walker) Visit(n ast.Node) ast.Visitor { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { + return w + } + // TODO(dsymonds): We could check the signature to be more precise. + recv := receiverType(fn) + if i, ok := w.nmap[fn.Name.Name]; ok { + w.has[recv] |= i + } + return w +} + +func (p *Package) scanSortable() { + p.Sortable = make(map[string]bool) + + // bitfield for which methods exist on each type. + const ( + Len = 1 << iota + Less + Swap + ) + nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap} + has := make(map[string]int) + for _, f := range p.files { + ast.Walk(&walker{nmap, has}, f.AST) + } + for typ, ms := range has { + if ms == Len|Less|Swap { + p.Sortable[typ] = true + } + } +} + +// receiverType returns the named type of the method receiver, sans "*", +// or "invalid-type" if fn.Recv is ill formed. +func receiverType(fn *ast.FuncDecl) string { + switch e := fn.Recv.List[0].Type.(type) { + case *ast.Ident: + return e.Name + case *ast.StarExpr: + if id, ok := e.X.(*ast.Ident); ok { + return id.Name + } + } + // The parser accepts much more than just the legal forms. + return "invalid-type" +} + +func (p *Package) lint(rules []Rule, config Config, failures chan Failure) { + p.scanSortable() + var wg sync.WaitGroup + for _, file := range p.files { + wg.Add(1) + go (func(file *File) { + file.lint(rules, config, failures) + defer wg.Done() + })(file) + } + wg.Wait() +} diff --git a/vendor/github.com/mgechev/revive/lint/rule.go b/vendor/github.com/mgechev/revive/lint/rule.go new file mode 100644 index 000000000..815abfdd8 --- /dev/null +++ b/vendor/github.com/mgechev/revive/lint/rule.go @@ -0,0 +1,31 @@ +package lint + +import ( + "go/token" +) + +// DisabledInterval contains a single disabled interval and the associated rule name. +type DisabledInterval struct { + From token.Position + To token.Position + RuleName string +} + +// Rule defines an abstract rule interaface +type Rule interface { + Name() string + Apply(*File, Arguments) []Failure +} + +// AbstractRule defines an abstract rule. +type AbstractRule struct { + Failures []Failure +} + +// ToFailurePosition returns the failure position. +func ToFailurePosition(start token.Pos, end token.Pos, file *File) FailurePosition { + return FailurePosition{ + Start: file.ToPosition(start), + End: file.ToPosition(end), + } +} diff --git a/vendor/github.com/mgechev/revive/lint/utils.go b/vendor/github.com/mgechev/revive/lint/utils.go new file mode 100644 index 000000000..28657c6df --- /dev/null +++ b/vendor/github.com/mgechev/revive/lint/utils.go @@ -0,0 +1,128 @@ +package lint + +import ( + "strings" + "unicode" +) + +// Name returns a different name if it should be different. +func Name(name string, whitelist, blacklist []string) (should string) { + // Fast path for simple cases: "_" and all lowercase. + if name == "_" { + return name + } + allLower := true + for _, r := range name { + if !unicode.IsLower(r) { + allLower = false + break + } + } + if allLower { + return name + } + + // Split camelCase at any lower->upper transition, and split on underscores. + // Check each word for common initialisms. + runes := []rune(name) + w, i := 0, 0 // index of start of word, scan + for i+1 <= len(runes) { + eow := false // whether we hit the end of a word + if i+1 == len(runes) { + eow = true + } else if runes[i+1] == '_' { + // underscore; shift the remainder forward over any run of underscores + eow = true + n := 1 + for i+n+1 < len(runes) && runes[i+n+1] == '_' { + n++ + } + + // Leave at most one underscore if the underscore is between two digits + if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) { + n-- + } + + copy(runes[i+1:], runes[i+n+1:]) + runes = runes[:len(runes)-n] + } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) { + // lower->non-lower + eow = true + } + i++ + if !eow { + continue + } + + // [w,i) is a word. + word := string(runes[w:i]) + ignoreInitWarnings := map[string]bool{} + for _, i := range whitelist { + ignoreInitWarnings[i] = true + } + + extraInits := map[string]bool{} + for _, i := range blacklist { + extraInits[i] = true + } + + if u := strings.ToUpper(word); (commonInitialisms[u] || extraInits[u]) && !ignoreInitWarnings[u] { + // Keep consistent case, which is lowercase only at the start. + if w == 0 && unicode.IsLower(runes[w]) { + u = strings.ToLower(u) + } + // All the common initialisms are ASCII, + // so we can replace the bytes exactly. + copy(runes[w:], []rune(u)) + } else if w > 0 && strings.ToLower(word) == word { + // already all lowercase, and not the first word, so uppercase the first character. + runes[w] = unicode.ToUpper(runes[w]) + } + w = i + } + return string(runes) +} + +// commonInitialisms is a set of common initialisms. +// Only add entries that are highly unlikely to be non-initialisms. +// For instance, "ID" is fine (Freudian code is rare), but "AND" is not. +var commonInitialisms = map[string]bool{ + "ACL": true, + "API": true, + "ASCII": true, + "CPU": true, + "CSS": true, + "DNS": true, + "EOF": true, + "GUID": true, + "HTML": true, + "HTTP": true, + "HTTPS": true, + "ID": true, + "IP": true, + "JSON": true, + "LHS": true, + "QPS": true, + "RAM": true, + "RHS": true, + "RPC": true, + "SLA": true, + "SMTP": true, + "SQL": true, + "SSH": true, + "TCP": true, + "TLS": true, + "TTL": true, + "UDP": true, + "UI": true, + "UID": true, + "UUID": true, + "URI": true, + "URL": true, + "UTF8": true, + "VM": true, + "XML": true, + "XMPP": true, + "XSRF": true, + "XSS": true, +} diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add-constant.go new file mode 100644 index 000000000..bc6268ee1 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/add-constant.go @@ -0,0 +1,152 @@ +package rule + +import ( + "fmt" + "go/ast" + "strconv" + "strings" + + "github.com/mgechev/revive/lint" +) + +const ( + defaultStrLitLimit = 2 + kindFLOAT = "FLOAT" + kindINT = "INT" + kindSTRING = "STRING" +) + +type whiteList map[string]map[string]bool + +func newWhiteList() whiteList { + return map[string]map[string]bool{kindINT: {}, kindFLOAT: {}, kindSTRING: {}} +} + +func (wl whiteList) add(kind string, list string) { + elems := strings.Split(list, ",") + for _, e := range elems { + wl[kind][e] = true + } +} + +// AddConstantRule lints unused params in functions. +type AddConstantRule struct{} + +// Apply applies the rule to given file. +func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + strLitLimit := defaultStrLitLimit + var whiteList = newWhiteList() + if len(arguments) > 0 { + args, ok := arguments[0].(map[string]interface{}) + if !ok { + panic(fmt.Sprintf("Invalid argument to the add-constant rule. Expecting a k,v map, got %T", arguments[0])) + } + for k, v := range args { + kind := "" + switch k { + case "allowFloats": + kind = kindFLOAT + fallthrough + case "allowInts": + if kind == "" { + kind = kindINT + } + fallthrough + case "allowStrs": + if kind == "" { + kind = kindSTRING + } + list, ok := v.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the add-constant rule, string expected. Got '%v' (%T)", v, v)) + } + whiteList.add(kind, list) + case "maxLitCount": + sl, ok := v.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v' (%T)", v, v)) + } + + limit, err := strconv.Atoi(sl) + if err != nil { + panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v'", v)) + } + strLitLimit = limit + } + } + } + + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintAddConstantRule{onFailure: onFailure, strLits: make(map[string]int, 0), strLitLimit: strLitLimit, whiteLst: whiteList} + + ast.Walk(w, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *AddConstantRule) Name() string { + return "add-constant" +} + +type lintAddConstantRule struct { + onFailure func(lint.Failure) + strLits map[string]int + strLitLimit int + whiteLst whiteList +} + +func (w lintAddConstantRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.GenDecl: + return nil // skip declarations + case *ast.BasicLit: + switch kind := n.Kind.String(); kind { + case kindFLOAT, kindINT: + w.checkNumLit(kind, n) + case kindSTRING: + w.checkStrLit(n) + } + } + + return w + +} + +func (w lintAddConstantRule) checkStrLit(n *ast.BasicLit) { + if w.whiteLst[kindSTRING][n.Value] { + return + } + + count := w.strLits[n.Value] + if count >= 0 { + w.strLits[n.Value] = count + 1 + if w.strLits[n.Value] > w.strLitLimit { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: n, + Category: "style", + Failure: fmt.Sprintf("string literal %s appears, at least, %d times, create a named constant for it", n.Value, w.strLits[n.Value]), + }) + w.strLits[n.Value] = -1 // mark it to avoid failing again on the same literal + } + } +} + +func (w lintAddConstantRule) checkNumLit(kind string, n *ast.BasicLit) { + if w.whiteLst[kind][n.Value] { + return + } + + w.onFailure(lint.Failure{ + Confidence: 1, + Node: n, + Category: "style", + Failure: fmt.Sprintf("avoid magic numbers like '%s', create a named constant for it", n.Value), + }) +} diff --git a/vendor/github.com/mgechev/revive/rule/argument-limit.go b/vendor/github.com/mgechev/revive/rule/argument-limit.go new file mode 100644 index 000000000..03bfa7f3d --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/argument-limit.go @@ -0,0 +1,65 @@ +package rule + +import ( + "fmt" + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// ArgumentsLimitRule lints given else constructs. +type ArgumentsLimitRule struct{} + +// Apply applies the rule to given file. +func (r *ArgumentsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + checkNumberOfArguments(1, arguments, r.Name()) + + total, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(`invalid value passed as argument number to the "argument-list" rule`) + } + + var failures []lint.Failure + + walker := lintArgsNum{ + total: int(total), + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *ArgumentsLimitRule) Name() string { + return "argument-limit" +} + +type lintArgsNum struct { + total int + onFailure func(lint.Failure) +} + +func (w lintArgsNum) Visit(n ast.Node) ast.Visitor { + node, ok := n.(*ast.FuncDecl) + if ok { + num := 0 + for _, l := range node.Type.Params.List { + for range l.Names { + num++ + } + } + if num > w.total { + w.onFailure(lint.Failure{ + Confidence: 1, + Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", w.total, num), + Node: node.Type, + }) + return w + } + } + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/atomic.go b/vendor/github.com/mgechev/revive/rule/atomic.go new file mode 100644 index 000000000..572e141da --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/atomic.go @@ -0,0 +1,94 @@ +package rule + +import ( + "go/ast" + "go/token" + "go/types" + + "github.com/mgechev/revive/lint" +) + +// AtomicRule lints given else constructs. +type AtomicRule struct{} + +// Apply applies the rule to given file. +func (r *AtomicRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + walker := atomic{ + pkgTypesInfo: file.Pkg.TypesInfo, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *AtomicRule) Name() string { + return "atomic" +} + +type atomic struct { + pkgTypesInfo *types.Info + onFailure func(lint.Failure) +} + +func (w atomic) Visit(node ast.Node) ast.Visitor { + n, ok := node.(*ast.AssignStmt) + if !ok { + return w + } + + if len(n.Lhs) != len(n.Rhs) { + return nil // skip assignment sub-tree + } + if len(n.Lhs) == 1 && n.Tok == token.DEFINE { + return nil // skip assignment sub-tree + } + + for i, right := range n.Rhs { + call, ok := right.(*ast.CallExpr) + if !ok { + continue + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + continue + } + pkgIdent, _ := sel.X.(*ast.Ident) + if w.pkgTypesInfo != nil { + pkgName, ok := w.pkgTypesInfo.Uses[pkgIdent].(*types.PkgName) + if !ok || pkgName.Imported().Path() != "sync/atomic" { + continue + } + } + + switch sel.Sel.Name { + case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr": + left := n.Lhs[i] + if len(call.Args) != 2 { + continue + } + arg := call.Args[0] + broken := false + + if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND { + broken = gofmt(left) == gofmt(uarg.X) + } else if star, ok := left.(*ast.StarExpr); ok { + broken = gofmt(star.X) == gofmt(arg) + } + + if broken { + w.onFailure(lint.Failure{ + Confidence: 1, + Failure: "direct assignment to atomic value", + Node: n, + }) + } + } + } + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/bare-return.go b/vendor/github.com/mgechev/revive/rule/bare-return.go new file mode 100644 index 000000000..3ee4c4adc --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/bare-return.go @@ -0,0 +1,84 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// BareReturnRule lints given else constructs. +type BareReturnRule struct{} + +// Apply applies the rule to given file. +func (r *BareReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintBareReturnRule{onFailure: onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *BareReturnRule) Name() string { + return "bare-return" +} + +type lintBareReturnRule struct { + onFailure func(lint.Failure) +} + +func (w lintBareReturnRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + w.checkFunc(n.Type.Results, n.Body) + case *ast.FuncLit: // to cope with deferred functions and go-routines + w.checkFunc(n.Type.Results, n.Body) + } + + return w +} + +// checkFunc will verify if the given function has named result and bare returns +func (w lintBareReturnRule) checkFunc(results *ast.FieldList, body *ast.BlockStmt) { + hasNamedResults := results != nil && len(results.List) > 0 && results.List[0].Names != nil + if !hasNamedResults || body == nil { + return // nothing to do + } + + brf := bareReturnFinder{w.onFailure} + ast.Walk(brf, body) +} + +type bareReturnFinder struct { + onFailure func(lint.Failure) +} + +func (w bareReturnFinder) Visit(node ast.Node) ast.Visitor { + _, ok := node.(*ast.FuncLit) + if ok { + // skip analysing function literals + // they will analyzed by the lintBareReturnRule.Visit method + return nil + } + + rs, ok := node.(*ast.ReturnStmt) + if !ok { + return w + } + + if len(rs.Results) > 0 { + return w + } + + w.onFailure(lint.Failure{ + Confidence: 1, + Node: rs, + Failure: "avoid using bare returns, please add return expressions", + }) + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/blank-imports.go b/vendor/github.com/mgechev/revive/rule/blank-imports.go new file mode 100644 index 000000000..9e8b8fc00 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/blank-imports.go @@ -0,0 +1,75 @@ +package rule + +import ( + "go/ast" + "strings" + + "github.com/mgechev/revive/lint" +) + +// BlankImportsRule lints given else constructs. +type BlankImportsRule struct{} + +// Name returns the rule name. +func (r *BlankImportsRule) Name() string { + return "blank-imports" +} + +// Apply applies the rule to given file. +func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + if file.Pkg.IsMain() || file.IsTest() { + return nil + } + + const ( + message = "a blank import should be only in a main or test package, or have a comment justifying it" + category = "imports" + + embedImportPath = `"embed"` + ) + + var failures []lint.Failure + + // The first element of each contiguous group of blank imports should have + // an explanatory comment of some kind. + for i, imp := range file.AST.Imports { + pos := file.ToPosition(imp.Pos()) + + if !isBlank(imp.Name) { + continue // Ignore non-blank imports. + } + + if i > 0 { + prev := file.AST.Imports[i-1] + prevPos := file.ToPosition(prev.Pos()) + + isSubsequentBlancInAGroup := isBlank(prev.Name) && prevPos.Line+1 == pos.Line && prev.Path.Value != embedImportPath + if isSubsequentBlancInAGroup { + continue + } + } + + if imp.Path.Value == embedImportPath && r.fileHasValidEmbedComment(file.AST) { + continue + } + + // This is the first blank import of a group. + if imp.Doc == nil && imp.Comment == nil { + failures = append(failures, lint.Failure{Failure: message, Category: category, Node: imp, Confidence: 1}) + } + } + + return failures +} + +func (r *BlankImportsRule) fileHasValidEmbedComment(fileAst *ast.File) bool { + for _, commentGroup := range fileAst.Comments { + for _, comment := range commentGroup.List { + if strings.HasPrefix(comment.Text, "//go:embed ") { + return true + } + } + } + + return false +} diff --git a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go new file mode 100644 index 000000000..0a4e696c6 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go @@ -0,0 +1,73 @@ +package rule + +import ( + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// BoolLiteralRule warns when logic expressions contains Boolean literals. +type BoolLiteralRule struct{} + +// Apply applies the rule to given file. +func (r *BoolLiteralRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + astFile := file.AST + w := &lintBoolLiteral{astFile, onFailure} + ast.Walk(w, astFile) + + return failures +} + +// Name returns the rule name. +func (r *BoolLiteralRule) Name() string { + return "bool-literal-in-expr" +} + +type lintBoolLiteral struct { + file *ast.File + onFailure func(lint.Failure) +} + +func (w *lintBoolLiteral) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.BinaryExpr: + if !isBoolOp(n.Op) { + return w + } + + lexeme, ok := isExprABooleanLit(n.X) + if !ok { + lexeme, ok = isExprABooleanLit(n.Y) + + if !ok { + return w + } + } + + isConstant := (n.Op == token.LAND && lexeme == "false") || (n.Op == token.LOR && lexeme == "true") + + if isConstant { + w.addFailure(n, "Boolean expression seems to always evaluate to "+lexeme, "logic") + } else { + w.addFailure(n, "omit Boolean literal in expression", "style") + } + } + + return w +} + +func (w lintBoolLiteral) addFailure(node ast.Node, msg string, cat string) { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: node, + Category: cat, + Failure: msg, + }) +} diff --git a/vendor/github.com/mgechev/revive/rule/call-to-gc.go b/vendor/github.com/mgechev/revive/rule/call-to-gc.go new file mode 100644 index 000000000..e05fa6924 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/call-to-gc.go @@ -0,0 +1,70 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// CallToGCRule lints calls to the garbage collector. +type CallToGCRule struct{} + +// Apply applies the rule to given file. +func (r *CallToGCRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + var gcTriggeringFunctions = map[string]map[string]bool{ + "runtime": {"GC": true}, + } + + w := lintCallToGC{onFailure, gcTriggeringFunctions} + ast.Walk(w, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *CallToGCRule) Name() string { + return "call-to-gc" +} + +type lintCallToGC struct { + onFailure func(lint.Failure) + gcTriggeringFunctions map[string]map[string]bool +} + +func (w lintCallToGC) Visit(node ast.Node) ast.Visitor { + ce, ok := node.(*ast.CallExpr) + if !ok { + return w // nothing to do, the node is not a call + } + + fc, ok := ce.Fun.(*ast.SelectorExpr) + if !ok { + return nil // nothing to do, the call is not of the form pkg.func(...) + } + + id, ok := fc.X.(*ast.Ident) + + if !ok { + return nil // in case X is not an id (it should be!) + } + + fn := fc.Sel.Name + pkg := id.Name + if !w.gcTriggeringFunctions[pkg][fn] { + return nil // it isn't a call to a GC triggering function + } + + w.onFailure(lint.Failure{ + Confidence: 1, + Node: node, + Category: "bad practice", + Failure: "explicit call to the garbage collector", + }) + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go new file mode 100644 index 000000000..7176c9957 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go @@ -0,0 +1,193 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" + "golang.org/x/tools/go/ast/astutil" +) + +// CognitiveComplexityRule lints given else constructs. +type CognitiveComplexityRule struct{} + +// Apply applies the rule to given file. +func (r *CognitiveComplexityRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + checkNumberOfArguments(1, arguments, r.Name()) + + complexity, ok := arguments[0].(int64) + if !ok { + panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0])) + } + + var failures []lint.Failure + + linter := cognitiveComplexityLinter{ + file: file, + maxComplexity: int(complexity), + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + linter.lint() + + return failures +} + +// Name returns the rule name. +func (r *CognitiveComplexityRule) Name() string { + return "cognitive-complexity" +} + +type cognitiveComplexityLinter struct { + file *lint.File + maxComplexity int + onFailure func(lint.Failure) +} + +func (w cognitiveComplexityLinter) lint() { + f := w.file + for _, decl := range f.AST.Decls { + if fn, ok := decl.(*ast.FuncDecl); ok && fn.Body != nil { + v := cognitiveComplexityVisitor{} + c := v.subTreeComplexity(fn.Body) + if c > w.maxComplexity { + w.onFailure(lint.Failure{ + Confidence: 1, + Category: "maintenance", + Failure: fmt.Sprintf("function %s has cognitive complexity %d (> max enabled %d)", funcName(fn), c, w.maxComplexity), + Node: fn, + }) + } + } + } +} + +type cognitiveComplexityVisitor struct { + complexity int + nestingLevel int +} + +// subTreeComplexity calculates the cognitive complexity of an AST-subtree. +func (v cognitiveComplexityVisitor) subTreeComplexity(n ast.Node) int { + ast.Walk(&v, n) + return v.complexity +} + +// Visit implements the ast.Visitor interface. +func (v *cognitiveComplexityVisitor) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + case *ast.IfStmt: + targets := []ast.Node{n.Cond, n.Body, n.Else} + v.walk(1, targets...) + return nil + case *ast.ForStmt: + targets := []ast.Node{n.Cond, n.Body} + v.walk(1, targets...) + return nil + case *ast.RangeStmt: + v.walk(1, n.Body) + return nil + case *ast.SelectStmt: + v.walk(1, n.Body) + return nil + case *ast.SwitchStmt: + v.walk(1, n.Body) + return nil + case *ast.TypeSwitchStmt: + v.walk(1, n.Body) + return nil + case *ast.FuncLit: + v.walk(0, n.Body) // do not increment the complexity, just do the nesting + return nil + case *ast.BinaryExpr: + v.complexity += v.binExpComplexity(n) + return nil // skip visiting binexp sub-tree (already visited by binExpComplexity) + case *ast.BranchStmt: + if n.Label != nil { + v.complexity++ + } + } + // TODO handle (at least) direct recursion + + return v +} + +func (v *cognitiveComplexityVisitor) walk(complexityIncrement int, targets ...ast.Node) { + v.complexity += complexityIncrement + v.nestingLevel + nesting := v.nestingLevel + v.nestingLevel++ + + for _, t := range targets { + if t == nil { + continue + } + + ast.Walk(v, t) + } + + v.nestingLevel = nesting +} + +func (cognitiveComplexityVisitor) binExpComplexity(n *ast.BinaryExpr) int { + calculator := binExprComplexityCalculator{opsStack: []token.Token{}} + + astutil.Apply(n, calculator.pre, calculator.post) + + return calculator.complexity +} + +type binExprComplexityCalculator struct { + complexity int + opsStack []token.Token // stack of bool operators + subexpStarted bool +} + +func (becc *binExprComplexityCalculator) pre(c *astutil.Cursor) bool { + switch n := c.Node().(type) { + case *ast.BinaryExpr: + isBoolOp := n.Op == token.LAND || n.Op == token.LOR + if !isBoolOp { + break + } + + ops := len(becc.opsStack) + // if + // is the first boolop in the expression OR + // is the first boolop inside a subexpression (...) OR + // is not the same to the previous one + // then + // increment complexity + if ops == 0 || becc.subexpStarted || n.Op != becc.opsStack[ops-1] { + becc.complexity++ + becc.subexpStarted = false + } + + becc.opsStack = append(becc.opsStack, n.Op) + case *ast.ParenExpr: + becc.subexpStarted = true + } + + return true +} + +func (becc *binExprComplexityCalculator) post(c *astutil.Cursor) bool { + switch n := c.Node().(type) { + case *ast.BinaryExpr: + isBoolOp := n.Op == token.LAND || n.Op == token.LOR + if !isBoolOp { + break + } + + ops := len(becc.opsStack) + if ops > 0 { + becc.opsStack = becc.opsStack[:ops-1] + } + case *ast.ParenExpr: + becc.subexpStarted = false + } + + return true +} diff --git a/vendor/github.com/mgechev/revive/rule/confusing-naming.go b/vendor/github.com/mgechev/revive/rule/confusing-naming.go new file mode 100644 index 000000000..143bb18c3 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/confusing-naming.go @@ -0,0 +1,190 @@ +package rule + +import ( + "fmt" + "go/ast" + + "strings" + "sync" + + "github.com/mgechev/revive/lint" +) + +type referenceMethod struct { + fileName string + id *ast.Ident +} + +type pkgMethods struct { + pkg *lint.Package + methods map[string]map[string]*referenceMethod + mu *sync.Mutex +} + +type packages struct { + pkgs []pkgMethods + mu sync.Mutex +} + +func (ps *packages) methodNames(lp *lint.Package) pkgMethods { + ps.mu.Lock() + + for _, pkg := range ps.pkgs { + if pkg.pkg == lp { + ps.mu.Unlock() + return pkg + } + } + + pkgm := pkgMethods{pkg: lp, methods: make(map[string]map[string]*referenceMethod), mu: &sync.Mutex{}} + ps.pkgs = append(ps.pkgs, pkgm) + + ps.mu.Unlock() + return pkgm +} + +var allPkgs = packages{pkgs: make([]pkgMethods, 1)} + +// ConfusingNamingRule lints method names that differ only by capitalization +type ConfusingNamingRule struct{} + +// Apply applies the rule to given file. +func (r *ConfusingNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + fileAst := file.AST + pkgm := allPkgs.methodNames(file.Pkg) + walker := lintConfusingNames{ + fileName: file.Name, + pkgm: pkgm, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(&walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ConfusingNamingRule) Name() string { + return "confusing-naming" +} + +//checkMethodName checks if a given method/function name is similar (just case differences) to other method/function of the same struct/file. +func checkMethodName(holder string, id *ast.Ident, w *lintConfusingNames) { + if id.Name == "init" && holder == defaultStructName { + // ignore init functions + return + } + + pkgm := w.pkgm + name := strings.ToUpper(id.Name) + + pkgm.mu.Lock() + defer pkgm.mu.Unlock() + + if pkgm.methods[holder] != nil { + if pkgm.methods[holder][name] != nil { + refMethod := pkgm.methods[holder][name] + // confusing names + var kind string + if holder == defaultStructName { + kind = "function" + } else { + kind = "method" + } + var fileName string + if w.fileName == refMethod.fileName { + fileName = "the same source file" + } else { + fileName = refMethod.fileName + } + w.onFailure(lint.Failure{ + Failure: fmt.Sprintf("Method '%s' differs only by capitalization to %s '%s' in %s", id.Name, kind, refMethod.id.Name, fileName), + Confidence: 1, + Node: id, + Category: "naming", + }) + + return + } + } else { + pkgm.methods[holder] = make(map[string]*referenceMethod, 1) + } + + // update the black list + if pkgm.methods[holder] == nil { + println("no entry for '", holder, "'") + } + pkgm.methods[holder][name] = &referenceMethod{fileName: w.fileName, id: id} +} + +type lintConfusingNames struct { + fileName string + pkgm pkgMethods + onFailure func(lint.Failure) +} + +const defaultStructName = "_" // used to map functions + +//getStructName of a function receiver. Defaults to defaultStructName +func getStructName(r *ast.FieldList) string { + result := defaultStructName + + if r == nil || len(r.List) < 1 { + return result + } + + t := r.List[0].Type + + if p, _ := t.(*ast.StarExpr); p != nil { // if a pointer receiver => dereference pointer receiver types + t = p.X + } + + if p, _ := t.(*ast.Ident); p != nil { + result = p.Name + } + + return result +} + +func checkStructFields(fields *ast.FieldList, structName string, w *lintConfusingNames) { + bl := make(map[string]bool, len(fields.List)) + for _, f := range fields.List { + for _, id := range f.Names { + normName := strings.ToUpper(id.Name) + if bl[normName] { + w.onFailure(lint.Failure{ + Failure: fmt.Sprintf("Field '%s' differs only by capitalization to other field in the struct type %s", id.Name, structName), + Confidence: 1, + Node: id, + Category: "naming", + }) + } else { + bl[normName] = true + } + } + } +} + +func (w *lintConfusingNames) Visit(n ast.Node) ast.Visitor { + switch v := n.(type) { + case *ast.FuncDecl: + // Exclude naming warnings for functions that are exported to C but + // not exported in the Go API. + // See https://github.com/golang/lint/issues/144. + if ast.IsExported(v.Name.Name) || !isCgoExported(v) { + checkMethodName(getStructName(v.Recv), v.Name, w) + } + case *ast.TypeSpec: + if s, ok := v.Type.(*ast.StructType); ok { + checkStructFields(s.Fields, v.Name.Name, w) + } + + default: + // will add other checks like field names, struct names, etc. + } + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/confusing-results.go b/vendor/github.com/mgechev/revive/rule/confusing-results.go new file mode 100644 index 000000000..1d386b3db --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/confusing-results.go @@ -0,0 +1,67 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// ConfusingResultsRule lints given function declarations +type ConfusingResultsRule struct{} + +// Apply applies the rule to given file. +func (r *ConfusingResultsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintConfusingResults{ + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ConfusingResultsRule) Name() string { + return "confusing-results" +} + +type lintConfusingResults struct { + onFailure func(lint.Failure) +} + +func (w lintConfusingResults) Visit(n ast.Node) ast.Visitor { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Type.Results == nil || len(fn.Type.Results.List) < 2 { + return w + } + lastType := "" + for _, result := range fn.Type.Results.List { + if len(result.Names) > 0 { + return w + } + + t, ok := result.Type.(*ast.Ident) + if !ok { + return w + } + + if t.Name == lastType { + w.onFailure(lint.Failure{ + Node: n, + Confidence: 1, + Category: "naming", + Failure: "unnamed results of the same type may be confusing, consider using named results", + }) + break + } + lastType = t.Name + + } + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go new file mode 100644 index 000000000..6a9156111 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go @@ -0,0 +1,88 @@ +package rule + +import ( + "github.com/mgechev/revive/lint" + "go/ast" + "go/token" +) + +// ConstantLogicalExprRule warns on constant logical expressions. +type ConstantLogicalExprRule struct{} + +// Apply applies the rule to given file. +func (r *ConstantLogicalExprRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + astFile := file.AST + w := &lintConstantLogicalExpr{astFile, onFailure} + ast.Walk(w, astFile) + return failures +} + +// Name returns the rule name. +func (r *ConstantLogicalExprRule) Name() string { + return "constant-logical-expr" +} + +type lintConstantLogicalExpr struct { + file *ast.File + onFailure func(lint.Failure) +} + +func (w *lintConstantLogicalExpr) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.BinaryExpr: + if !w.isOperatorWithLogicalResult(n.Op) { + return w + } + + if gofmt(n.X) != gofmt(n.Y) { // check if subexpressions are the same + return w + } + + if n.Op == token.EQL { + w.newFailure(n, "expression always evaluates to true") + return w + } + + if w.isInequalityOperator(n.Op) { + w.newFailure(n, "expression always evaluates to false") + return w + } + + w.newFailure(n, "left and right hand-side sub-expressions are the same") + } + + return w +} + +func (w *lintConstantLogicalExpr) isOperatorWithLogicalResult(t token.Token) bool { + switch t { + case token.LAND, token.LOR, token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: + return true + } + + return false +} + +func (w *lintConstantLogicalExpr) isInequalityOperator(t token.Token) bool { + switch t { + case token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ: + return true + } + + return false +} + +func (w lintConstantLogicalExpr) newFailure(node ast.Node, msg string) { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: node, + Category: "logic", + Failure: msg, + }) +} diff --git a/vendor/github.com/mgechev/revive/rule/context-as-argument.go b/vendor/github.com/mgechev/revive/rule/context-as-argument.go new file mode 100644 index 000000000..6502a07be --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/context-as-argument.go @@ -0,0 +1,63 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// ContextAsArgumentRule lints given else constructs. +type ContextAsArgumentRule struct{} + +// Apply applies the rule to given file. +func (r *ContextAsArgumentRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintContextArguments{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ContextAsArgumentRule) Name() string { + return "context-as-argument" +} + +type lintContextArguments struct { + file *lint.File + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w lintContextArguments) Visit(n ast.Node) ast.Visitor { + fn, ok := n.(*ast.FuncDecl) + if !ok || len(fn.Type.Params.List) <= 1 { + return w + } + // A context.Context should be the first parameter of a function. + // Flag any that show up after the first. + previousArgIsCtx := isPkgDot(fn.Type.Params.List[0].Type, "context", "Context") + for _, arg := range fn.Type.Params.List[1:] { + argIsCtx := isPkgDot(arg.Type, "context", "Context") + if argIsCtx && !previousArgIsCtx { + w.onFailure(lint.Failure{ + Node: arg, + Category: "arg-order", + Failure: "context.Context should be the first parameter of a function", + Confidence: 0.9, + }) + break // only flag one + } + previousArgIsCtx = argIsCtx + } + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/context-keys-type.go b/vendor/github.com/mgechev/revive/rule/context-keys-type.go new file mode 100644 index 000000000..9c2f0bbd7 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/context-keys-type.go @@ -0,0 +1,81 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/types" + + "github.com/mgechev/revive/lint" +) + +// ContextKeysType lints given else constructs. +type ContextKeysType struct{} + +// Apply applies the rule to given file. +func (r *ContextKeysType) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintContextKeyTypes{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + file.Pkg.TypeCheck() + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ContextKeysType) Name() string { + return "context-keys-type" +} + +type lintContextKeyTypes struct { + file *lint.File + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w lintContextKeyTypes) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + case *ast.CallExpr: + checkContextKeyType(w, n) + } + + return w +} + +func checkContextKeyType(w lintContextKeyTypes, x *ast.CallExpr) { + f := w.file + sel, ok := x.Fun.(*ast.SelectorExpr) + if !ok { + return + } + pkg, ok := sel.X.(*ast.Ident) + if !ok || pkg.Name != "context" { + return + } + if sel.Sel.Name != "WithValue" { + return + } + + // key is second argument to context.WithValue + if len(x.Args) != 3 { + return + } + key := f.Pkg.TypesInfo.Types[x.Args[1]] + + if ktyp, ok := key.Type.(*types.Basic); ok && ktyp.Kind() != types.Invalid { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: x, + Category: "content", + Failure: fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type), + }) + } +} diff --git a/vendor/github.com/mgechev/revive/rule/cyclomatic.go b/vendor/github.com/mgechev/revive/rule/cyclomatic.go new file mode 100644 index 000000000..f597909c7 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/cyclomatic.go @@ -0,0 +1,117 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// Based on https://github.com/fzipp/gocyclo + +// CyclomaticRule lints given else constructs. +type CyclomaticRule struct{} + +// Apply applies the rule to given file. +func (r *CyclomaticRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + checkNumberOfArguments(1, arguments, r.Name()) + + complexity, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic("invalid argument for cyclomatic complexity") + } + + var failures []lint.Failure + + fileAst := file.AST + walker := lintCyclomatic{ + file: file, + complexity: int(complexity), + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *CyclomaticRule) Name() string { + return "cyclomatic" +} + +type lintCyclomatic struct { + file *lint.File + complexity int + onFailure func(lint.Failure) +} + +func (w lintCyclomatic) Visit(_ ast.Node) ast.Visitor { + f := w.file + for _, decl := range f.AST.Decls { + if fn, ok := decl.(*ast.FuncDecl); ok { + c := complexity(fn) + if c > w.complexity { + w.onFailure(lint.Failure{ + Confidence: 1, + Category: "maintenance", + Failure: fmt.Sprintf("function %s has cyclomatic complexity %d", funcName(fn), c), + Node: fn, + }) + } + } + } + return nil +} + +// funcName returns the name representation of a function or method: +// "(Type).Name" for methods or simply "Name" for functions. +func funcName(fn *ast.FuncDecl) string { + if fn.Recv != nil { + if fn.Recv.NumFields() > 0 { + typ := fn.Recv.List[0].Type + return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) + } + } + return fn.Name.Name +} + +// recvString returns a string representation of recv of the +// form "T", "*T", or "BADRECV" (if not a proper receiver type). +func recvString(recv ast.Expr) string { + switch t := recv.(type) { + case *ast.Ident: + return t.Name + case *ast.StarExpr: + return "*" + recvString(t.X) + } + return "BADRECV" +} + +// complexity calculates the cyclomatic complexity of a function. +func complexity(fn *ast.FuncDecl) int { + v := complexityVisitor{} + ast.Walk(&v, fn) + return v.Complexity +} + +type complexityVisitor struct { + // Complexity is the cyclomatic complexity + Complexity int +} + +// Visit implements the ast.Visitor interface. +func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause: + v.Complexity++ + case *ast.BinaryExpr: + if n.Op == token.LAND || n.Op == token.LOR { + v.Complexity++ + } + } + return v +} diff --git a/vendor/github.com/mgechev/revive/rule/deep-exit.go b/vendor/github.com/mgechev/revive/rule/deep-exit.go new file mode 100644 index 000000000..0cdec005a --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/deep-exit.go @@ -0,0 +1,94 @@ +package rule + +import ( + "fmt" + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// DeepExitRule lints program exit at functions other than main or init. +type DeepExitRule struct{} + +// Apply applies the rule to given file. +func (r *DeepExitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + var exitFunctions = map[string]map[string]bool{ + "os": {"Exit": true}, + "syscall": {"Exit": true}, + "log": { + "Fatal": true, + "Fatalf": true, + "Fatalln": true, + "Panic": true, + "Panicf": true, + "Panicln": true, + }, + } + + w := lintDeepExit{onFailure, exitFunctions, file.IsTest()} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *DeepExitRule) Name() string { + return "deep-exit" +} + +type lintDeepExit struct { + onFailure func(lint.Failure) + exitFunctions map[string]map[string]bool + isTestFile bool +} + +func (w lintDeepExit) Visit(node ast.Node) ast.Visitor { + if fd, ok := node.(*ast.FuncDecl); ok { + if w.mustIgnore(fd) { + return nil // skip analysis of this function + } + + return w + } + + se, ok := node.(*ast.ExprStmt) + if !ok { + return w + } + ce, ok := se.X.(*ast.CallExpr) + if !ok { + return w + } + + fc, ok := ce.Fun.(*ast.SelectorExpr) + if !ok { + return w + } + id, ok := fc.X.(*ast.Ident) + if !ok { + return w + } + + fn := fc.Sel.Name + pkg := id.Name + if w.exitFunctions[pkg] != nil && w.exitFunctions[pkg][fn] { // it's a call to an exit function + w.onFailure(lint.Failure{ + Confidence: 1, + Node: ce, + Category: "bad practice", + Failure: fmt.Sprintf("calls to %s.%s only in main() or init() functions", pkg, fn), + }) + } + + return w +} + +func (w *lintDeepExit) mustIgnore(fd *ast.FuncDecl) bool { + fn := fd.Name.Name + + return fn == "init" || fn == "main" || (w.isTestFile && fn == "TestMain") +} diff --git a/vendor/github.com/mgechev/revive/rule/defer.go b/vendor/github.com/mgechev/revive/rule/defer.go new file mode 100644 index 000000000..2ec7ef47c --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/defer.go @@ -0,0 +1,137 @@ +package rule + +import ( + "fmt" + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// DeferRule lints unused params in functions. +type DeferRule struct{} + +// Apply applies the rule to given file. +func (r *DeferRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + allow := r.allowFromArgs(arguments) + + var failures []lint.Failure + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintDeferRule{onFailure: onFailure, allow: allow} + + ast.Walk(w, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *DeferRule) Name() string { + return "defer" +} + +func (r *DeferRule) allowFromArgs(args lint.Arguments) map[string]bool { + if len(args) < 1 { + allow := map[string]bool{ + "loop": true, + "call-chain": true, + "method-call": true, + "return": true, + "recover": true, + } + + return allow + } + + aa, ok := args[0].([]interface{}) + if !ok { + panic(fmt.Sprintf("Invalid argument '%v' for 'defer' rule. Expecting []string, got %T", args[0], args[0])) + } + + allow := make(map[string]bool, len(aa)) + for _, subcase := range aa { + sc, ok := subcase.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument '%v' for 'defer' rule. Expecting string, got %T", subcase, subcase)) + } + allow[sc] = true + } + + return allow +} + +type lintDeferRule struct { + onFailure func(lint.Failure) + inALoop bool + inADefer bool + inAFuncLit bool + allow map[string]bool +} + +func (w lintDeferRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.ForStmt: + w.visitSubtree(n.Body, w.inADefer, true, w.inAFuncLit) + return nil + case *ast.RangeStmt: + w.visitSubtree(n.Body, w.inADefer, true, w.inAFuncLit) + return nil + case *ast.FuncLit: + w.visitSubtree(n.Body, w.inADefer, false, true) + return nil + case *ast.ReturnStmt: + if len(n.Results) != 0 && w.inADefer && w.inAFuncLit { + w.newFailure("return in a defer function has no effect", n, 1.0, "logic", "return") + } + case *ast.CallExpr: + if isIdent(n.Fun, "recover") && !w.inADefer { + // confidence is not 1 because recover can be in a function that is deferred elsewhere + w.newFailure("recover must be called inside a deferred function", n, 0.8, "logic", "recover") + } + case *ast.DeferStmt: + w.visitSubtree(n.Call.Fun, true, false, false) + + if w.inALoop { + w.newFailure("prefer not to defer inside loops", n, 1.0, "bad practice", "loop") + } + + switch fn := n.Call.Fun.(type) { + case *ast.CallExpr: + w.newFailure("prefer not to defer chains of function calls", fn, 1.0, "bad practice", "call-chain") + case *ast.SelectorExpr: + if id, ok := fn.X.(*ast.Ident); ok { + isMethodCall := id != nil && id.Obj != nil && id.Obj.Kind == ast.Typ + if isMethodCall { + w.newFailure("be careful when deferring calls to methods without pointer receiver", fn, 0.8, "bad practice", "method-call") + } + } + } + return nil + } + + return w +} + +func (w lintDeferRule) visitSubtree(n ast.Node, inADefer, inALoop, inAFuncLit bool) { + nw := &lintDeferRule{ + onFailure: w.onFailure, + inADefer: inADefer, + inALoop: inALoop, + inAFuncLit: inAFuncLit, + allow: w.allow} + ast.Walk(nw, n) +} + +func (w lintDeferRule) newFailure(msg string, node ast.Node, confidence float64, cat string, subcase string) { + if !w.allow[subcase] { + return + } + + w.onFailure(lint.Failure{ + Confidence: confidence, + Node: node, + Category: cat, + Failure: msg, + }) +} diff --git a/vendor/github.com/mgechev/revive/rule/dot-imports.go b/vendor/github.com/mgechev/revive/rule/dot-imports.go new file mode 100644 index 000000000..78419d7d6 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/dot-imports.go @@ -0,0 +1,54 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// DotImportsRule lints given else constructs. +type DotImportsRule struct{} + +// Apply applies the rule to given file. +func (r *DotImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintImports{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *DotImportsRule) Name() string { + return "dot-imports" +} + +type lintImports struct { + file *lint.File + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w lintImports) Visit(_ ast.Node) ast.Visitor { + for i, is := range w.fileAst.Imports { + _ = i + if is.Name != nil && is.Name.Name == "." && !w.file.IsTest() { + w.onFailure(lint.Failure{ + Confidence: 1, + Failure: "should not use dot imports", + Node: is, + Category: "imports", + }) + } + } + return nil +} diff --git a/vendor/github.com/mgechev/revive/rule/duplicated-imports.go b/vendor/github.com/mgechev/revive/rule/duplicated-imports.go new file mode 100644 index 000000000..485b6a2ea --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/duplicated-imports.go @@ -0,0 +1,39 @@ +package rule + +import ( + "fmt" + + "github.com/mgechev/revive/lint" +) + +// DuplicatedImportsRule lints given else constructs. +type DuplicatedImportsRule struct{} + +// Apply applies the rule to given file. +func (r *DuplicatedImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + impPaths := map[string]struct{}{} + for _, imp := range file.AST.Imports { + path := imp.Path.Value + _, ok := impPaths[path] + if ok { + failures = append(failures, lint.Failure{ + Confidence: 1, + Failure: fmt.Sprintf("Package %s already imported", path), + Node: imp, + Category: "imports", + }) + continue + } + + impPaths[path] = struct{}{} + } + + return failures +} + +// Name returns the rule name. +func (r *DuplicatedImportsRule) Name() string { + return "duplicated-imports" +} diff --git a/vendor/github.com/mgechev/revive/rule/early-return.go b/vendor/github.com/mgechev/revive/rule/early-return.go new file mode 100644 index 000000000..ffb568a86 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/early-return.go @@ -0,0 +1,78 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// EarlyReturnRule lints given else constructs. +type EarlyReturnRule struct{} + +// Apply applies the rule to given file. +func (r *EarlyReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintEarlyReturnRule{onFailure: onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *EarlyReturnRule) Name() string { + return "early-return" +} + +type lintEarlyReturnRule struct { + onFailure func(lint.Failure) +} + +func (w lintEarlyReturnRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.IfStmt: + if n.Else == nil { + // no else branch + return w + } + + elseBlock, ok := n.Else.(*ast.BlockStmt) + if !ok { + // is if-else-if + return w + } + + lenElseBlock := len(elseBlock.List) + if lenElseBlock < 1 { + // empty else block, continue (there is another rule that warns on empty blocks) + return w + } + + lenThenBlock := len(n.Body.List) + if lenThenBlock < 1 { + // then block is empty thus the stmt can be simplified + w.onFailure(lint.Failure{ + Confidence: 1, + Node: n, + Failure: "if c { } else {... return} can be simplified to if !c { ... return }", + }) + + return w + } + + _, lastThenStmtIsReturn := n.Body.List[lenThenBlock-1].(*ast.ReturnStmt) + _, lastElseStmtIsReturn := elseBlock.List[lenElseBlock-1].(*ast.ReturnStmt) + if lastElseStmtIsReturn && !lastThenStmtIsReturn { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: n, + Failure: "if c {...} else {... return } can be simplified to if !c { ... return } ...", + }) + } + } + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/empty-block.go b/vendor/github.com/mgechev/revive/rule/empty-block.go new file mode 100644 index 000000000..fbec4d93c --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/empty-block.go @@ -0,0 +1,65 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// EmptyBlockRule lints given else constructs. +type EmptyBlockRule struct{} + +// Apply applies the rule to given file. +func (r *EmptyBlockRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintEmptyBlock{make(map[*ast.BlockStmt]bool, 0), onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *EmptyBlockRule) Name() string { + return "empty-block" +} + +type lintEmptyBlock struct { + ignore map[*ast.BlockStmt]bool + onFailure func(lint.Failure) +} + +func (w lintEmptyBlock) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + w.ignore[n.Body] = true + return w + case *ast.FuncLit: + w.ignore[n.Body] = true + return w + case *ast.RangeStmt: + if len(n.Body.List) == 0 { + w.onFailure(lint.Failure{ + Confidence: 0.9, + Node: n, + Category: "logic", + Failure: "this block is empty, you can remove it", + }) + return nil // skip visiting the range subtree (it will produce a duplicated failure) + } + case *ast.BlockStmt: + if !w.ignore[n] && len(n.List) == 0 { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: n, + Category: "logic", + Failure: "this block is empty, you can remove it", + }) + } + } + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/empty-lines.go b/vendor/github.com/mgechev/revive/rule/empty-lines.go new file mode 100644 index 000000000..61d9281bf --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/empty-lines.go @@ -0,0 +1,113 @@ +package rule + +import ( + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// EmptyLinesRule lints empty lines in blocks. +type EmptyLinesRule struct{} + +// Apply applies the rule to given file. +func (r *EmptyLinesRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintEmptyLines{file, file.CommentMap(), onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *EmptyLinesRule) Name() string { + return "empty-lines" +} + +type lintEmptyLines struct { + file *lint.File + cmap ast.CommentMap + onFailure func(lint.Failure) +} + +func (w lintEmptyLines) Visit(node ast.Node) ast.Visitor { + block, ok := node.(*ast.BlockStmt) + if !ok { + return w + } + + w.checkStart(block) + w.checkEnd(block) + + return w +} + +func (w lintEmptyLines) checkStart(block *ast.BlockStmt) { + if len(block.List) == 0 { + return + } + + start := w.position(block.Lbrace) + firstNode := block.List[0] + + if w.commentBetween(start, firstNode) { + return + } + + first := w.position(firstNode.Pos()) + if first.Line-start.Line > 1 { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: block, + Category: "style", + Failure: "extra empty line at the start of a block", + }) + } +} + +func (w lintEmptyLines) checkEnd(block *ast.BlockStmt) { + if len(block.List) < 1 { + return + } + + end := w.position(block.Rbrace) + lastNode := block.List[len(block.List)-1] + + if w.commentBetween(end, lastNode) { + return + } + + last := w.position(lastNode.End()) + if end.Line-last.Line > 1 { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: lastNode, + Category: "style", + Failure: "extra empty line at the end of a block", + }) + } +} + +func (w lintEmptyLines) commentBetween(position token.Position, node ast.Node) bool { + comments := w.cmap.Filter(node).Comments() + if len(comments) == 0 { + return false + } + + for _, comment := range comments { + start, end := w.position(comment.Pos()), w.position(comment.End()) + if start.Line-position.Line == 1 || position.Line-end.Line == 1 { + return true + } + } + + return false +} + +func (w lintEmptyLines) position(pos token.Pos) token.Position { + return w.file.ToPosition(pos) +} diff --git a/vendor/github.com/mgechev/revive/rule/error-naming.go b/vendor/github.com/mgechev/revive/rule/error-naming.go new file mode 100644 index 000000000..3a1080625 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/error-naming.go @@ -0,0 +1,79 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + "strings" + + "github.com/mgechev/revive/lint" +) + +// ErrorNamingRule lints given else constructs. +type ErrorNamingRule struct{} + +// Apply applies the rule to given file. +func (r *ErrorNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintErrors{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ErrorNamingRule) Name() string { + return "error-naming" +} + +type lintErrors struct { + file *lint.File + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w lintErrors) Visit(_ ast.Node) ast.Visitor { + for _, decl := range w.fileAst.Decls { + gd, ok := decl.(*ast.GenDecl) + if !ok || gd.Tok != token.VAR { + continue + } + for _, spec := range gd.Specs { + spec := spec.(*ast.ValueSpec) + if len(spec.Names) != 1 || len(spec.Values) != 1 { + continue + } + ce, ok := spec.Values[0].(*ast.CallExpr) + if !ok { + continue + } + if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { + continue + } + + id := spec.Names[0] + prefix := "err" + if id.IsExported() { + prefix = "Err" + } + if !strings.HasPrefix(id.Name, prefix) { + w.onFailure(lint.Failure{ + Node: id, + Confidence: 0.9, + Category: "naming", + Failure: fmt.Sprintf("error var %s should have name of the form %sFoo", id.Name, prefix), + }) + } + } + } + return nil +} diff --git a/vendor/github.com/mgechev/revive/rule/error-return.go b/vendor/github.com/mgechev/revive/rule/error-return.go new file mode 100644 index 000000000..737d8c66f --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/error-return.go @@ -0,0 +1,67 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// ErrorReturnRule lints given else constructs. +type ErrorReturnRule struct{} + +// Apply applies the rule to given file. +func (r *ErrorReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintErrorReturn{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ErrorReturnRule) Name() string { + return "error-return" +} + +type lintErrorReturn struct { + file *lint.File + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w lintErrorReturn) Visit(n ast.Node) ast.Visitor { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Type.Results == nil { + return w + } + ret := fn.Type.Results.List + if len(ret) <= 1 { + return w + } + if isIdent(ret[len(ret)-1].Type, "error") { + return nil + } + // An error return parameter should be the last parameter. + // Flag any error parameters found before the last. + for _, r := range ret[:len(ret)-1] { + if isIdent(r.Type, "error") { + w.onFailure(lint.Failure{ + Category: "arg-order", + Confidence: 0.9, + Node: fn, + Failure: "error should be the last type when returning multiple items", + }) + break // only flag one + } + } + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/error-strings.go b/vendor/github.com/mgechev/revive/rule/error-strings.go new file mode 100644 index 000000000..b8a5b7ed7 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/error-strings.go @@ -0,0 +1,98 @@ +package rule + +import ( + "go/ast" + "go/token" + "strconv" + "unicode" + "unicode/utf8" + + "github.com/mgechev/revive/lint" +) + +// ErrorStringsRule lints given else constructs. +type ErrorStringsRule struct{} + +// Apply applies the rule to given file. +func (r *ErrorStringsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintErrorStrings{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ErrorStringsRule) Name() string { + return "error-strings" +} + +type lintErrorStrings struct { + file *lint.File + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w lintErrorStrings) Visit(n ast.Node) ast.Visitor { + ce, ok := n.(*ast.CallExpr) + if !ok { + return w + } + if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") { + return w + } + if len(ce.Args) < 1 { + return w + } + str, ok := ce.Args[0].(*ast.BasicLit) + if !ok || str.Kind != token.STRING { + return w + } + s, _ := strconv.Unquote(str.Value) // can assume well-formed Go + if s == "" { + return w + } + clean, conf := lintErrorString(s) + if clean { + return w + } + + w.onFailure(lint.Failure{ + Node: str, + Confidence: conf, + Category: "errors", + Failure: "error strings should not be capitalized or end with punctuation or a newline", + }) + return w +} + +func lintErrorString(s string) (isClean bool, conf float64) { + const basicConfidence = 0.8 + const capConfidence = basicConfidence - 0.2 + first, firstN := utf8.DecodeRuneInString(s) + last, _ := utf8.DecodeLastRuneInString(s) + if last == '.' || last == ':' || last == '!' || last == '\n' { + return false, basicConfidence + } + if unicode.IsUpper(first) { + // People use proper nouns and exported Go identifiers in error strings, + // so decrease the confidence of warnings for capitalization. + if len(s) <= firstN { + return false, capConfidence + } + // Flag strings starting with something that doesn't look like an initialism. + if second, _ := utf8.DecodeRuneInString(s[firstN:]); !unicode.IsUpper(second) { + return false, capConfidence + } + } + return true, 0 +} diff --git a/vendor/github.com/mgechev/revive/rule/errorf.go b/vendor/github.com/mgechev/revive/rule/errorf.go new file mode 100644 index 000000000..1bffbab5b --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/errorf.go @@ -0,0 +1,93 @@ +package rule + +import ( + "fmt" + "go/ast" + "regexp" + "strings" + + "github.com/mgechev/revive/lint" +) + +// ErrorfRule lints given else constructs. +type ErrorfRule struct{} + +// Apply applies the rule to given file. +func (r *ErrorfRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintErrorf{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + file.Pkg.TypeCheck() + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ErrorfRule) Name() string { + return "errorf" +} + +type lintErrorf struct { + file *lint.File + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w lintErrorf) Visit(n ast.Node) ast.Visitor { + ce, ok := n.(*ast.CallExpr) + if !ok || len(ce.Args) != 1 { + return w + } + isErrorsNew := isPkgDot(ce.Fun, "errors", "New") + var isTestingError bool + se, ok := ce.Fun.(*ast.SelectorExpr) + if ok && se.Sel.Name == "Error" { + if typ := w.file.Pkg.TypeOf(se.X); typ != nil { + isTestingError = typ.String() == "*testing.T" + } + } + if !isErrorsNew && !isTestingError { + return w + } + arg := ce.Args[0] + ce, ok = arg.(*ast.CallExpr) + if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") { + return w + } + errorfPrefix := "fmt" + if isTestingError { + errorfPrefix = w.file.Render(se.X) + } + + failure := lint.Failure{ + Category: "errors", + Node: n, + Confidence: 1, + Failure: fmt.Sprintf("should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", w.file.Render(se), errorfPrefix), + } + + m := srcLineWithMatch(w.file, ce, `^(.*)`+w.file.Render(se)+`\(fmt\.Sprintf\((.*)\)\)(.*)$`) + if m != nil { + failure.ReplacementLine = m[1] + errorfPrefix + ".Errorf(" + m[2] + ")" + m[3] + } + + w.onFailure(failure) + + return w +} + +func srcLineWithMatch(file *lint.File, node ast.Node, pattern string) (m []string) { + line := srcLine(file.Content(), file.ToPosition(node.Pos())) + line = strings.TrimSuffix(line, "\n") + rx := regexp.MustCompile(pattern) + return rx.FindStringSubmatch(line) +} diff --git a/vendor/github.com/mgechev/revive/rule/exported.go b/vendor/github.com/mgechev/revive/rule/exported.go new file mode 100644 index 000000000..3dab1bbaf --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/exported.go @@ -0,0 +1,315 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + "strings" + "unicode" + "unicode/utf8" + + "github.com/mgechev/revive/lint" +) + +// ExportedRule lints given else constructs. +type ExportedRule struct{} + +// Apply applies the rule to given file. +func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { + var failures []lint.Failure + + if isTest(file) { + return failures + } + + checkPrivateReceivers, disableStutteringCheck, sayRepetitiveInsteadOfStutters := r.getConf(args) + + stuttersMsg := "stutters" + if sayRepetitiveInsteadOfStutters { + stuttersMsg = "is repetitive" + } + + fileAst := file.AST + walker := lintExported{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + genDeclMissingComments: make(map[*ast.GenDecl]bool), + checkPrivateReceivers: checkPrivateReceivers, + disableStutteringCheck: disableStutteringCheck, + stuttersMsg: stuttersMsg, + } + + ast.Walk(&walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ExportedRule) Name() string { + return "exported" +} + +func (r *ExportedRule) getConf(args lint.Arguments) (checkPrivateReceivers bool, disableStutteringCheck bool, sayRepetitiveInsteadOfStutters bool) { + // if any, we expect a slice of strings as configuration + if len(args) < 1 { + return + } + for _, flag := range args { + flagStr, ok := flag.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), flag)) + } + + switch flagStr { + case "checkPrivateReceivers": + checkPrivateReceivers = true + case "disableStutteringCheck": + disableStutteringCheck = true + case "sayRepetitiveInsteadOfStutters": + sayRepetitiveInsteadOfStutters = true + default: + panic(fmt.Sprintf("Unknown configuration flag %s for %s rule", flagStr, r.Name())) + } + } + + return +} + +type lintExported struct { + file *lint.File + fileAst *ast.File + lastGen *ast.GenDecl + genDeclMissingComments map[*ast.GenDecl]bool + onFailure func(lint.Failure) + checkPrivateReceivers bool + disableStutteringCheck bool + stuttersMsg string +} + +func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) { + if !ast.IsExported(fn.Name.Name) { + // func is unexported + return + } + kind := "function" + name := fn.Name.Name + if fn.Recv != nil && len(fn.Recv.List) > 0 { + // method + kind = "method" + recv := receiverType(fn) + if !ast.IsExported(recv) && !w.checkPrivateReceivers { + // receiver is unexported + return + } + if commonMethods[name] { + return + } + switch name { + case "Len", "Less", "Swap": + if w.file.Pkg.Sortable[recv] { + return + } + } + name = recv + "." + name + } + if fn.Doc == nil { + w.onFailure(lint.Failure{ + Node: fn, + Confidence: 1, + Category: "comments", + Failure: fmt.Sprintf("exported %s %s should have comment or be unexported", kind, name), + }) + return + } + s := normalizeText(fn.Doc.Text()) + prefix := fn.Name.Name + " " + if !strings.HasPrefix(s, prefix) { + w.onFailure(lint.Failure{ + Node: fn.Doc, + Confidence: 0.8, + Category: "comments", + Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix), + }) + } +} + +func (w *lintExported) checkStutter(id *ast.Ident, thing string) { + if w.disableStutteringCheck { + return + } + + pkg, name := w.fileAst.Name.Name, id.Name + if !ast.IsExported(name) { + // unexported name + return + } + // A name stutters if the package name is a strict prefix + // and the next character of the name starts a new word. + if len(name) <= len(pkg) { + // name is too short to stutter. + // This permits the name to be the same as the package name. + return + } + if !strings.EqualFold(pkg, name[:len(pkg)]) { + return + } + // We can assume the name is well-formed UTF-8. + // If the next rune after the package name is uppercase or an underscore + // the it's starting a new word and thus this name stutters. + rem := name[len(pkg):] + if next, _ := utf8.DecodeRuneInString(rem); next == '_' || unicode.IsUpper(next) { + w.onFailure(lint.Failure{ + Node: id, + Confidence: 0.8, + Category: "naming", + Failure: fmt.Sprintf("%s name will be used as %s.%s by other packages, and that %s; consider calling this %s", thing, pkg, name, w.stuttersMsg, rem), + }) + } +} + +func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) { + if !ast.IsExported(t.Name.Name) { + return + } + if doc == nil { + w.onFailure(lint.Failure{ + Node: t, + Confidence: 1, + Category: "comments", + Failure: fmt.Sprintf("exported type %v should have comment or be unexported", t.Name), + }) + return + } + + s := normalizeText(doc.Text()) + articles := [...]string{"A", "An", "The", "This"} + for _, a := range articles { + if t.Name.Name == a { + continue + } + if strings.HasPrefix(s, a+" ") { + s = s[len(a)+1:] + break + } + } + if !strings.HasPrefix(s, t.Name.Name+" ") { + w.onFailure(lint.Failure{ + Node: doc, + Confidence: 1, + Category: "comments", + Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name), + }) + } +} + +func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) { + kind := "var" + if gd.Tok == token.CONST { + kind = "const" + } + + if len(vs.Names) > 1 { + // Check that none are exported except for the first. + for _, n := range vs.Names[1:] { + if ast.IsExported(n.Name) { + w.onFailure(lint.Failure{ + Category: "comments", + Confidence: 1, + Failure: fmt.Sprintf("exported %s %s should have its own declaration", kind, n.Name), + Node: vs, + }) + return + } + } + } + + // Only one name. + name := vs.Names[0].Name + if !ast.IsExported(name) { + return + } + + if vs.Doc == nil && gd.Doc == nil { + if genDeclMissingComments[gd] { + return + } + block := "" + if kind == "const" && gd.Lparen.IsValid() { + block = " (or a comment on this block)" + } + w.onFailure(lint.Failure{ + Confidence: 1, + Node: vs, + Category: "comments", + Failure: fmt.Sprintf("exported %s %s should have comment%s or be unexported", kind, name, block), + }) + genDeclMissingComments[gd] = true + return + } + // If this GenDecl has parens and a comment, we don't check its comment form. + if gd.Lparen.IsValid() && gd.Doc != nil { + return + } + // The relevant text to check will be on either vs.Doc or gd.Doc. + // Use vs.Doc preferentially. + doc := vs.Doc + if doc == nil { + doc = gd.Doc + } + prefix := name + " " + s := normalizeText(doc.Text()) + if !strings.HasPrefix(s, prefix) { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: doc, + Category: "comments", + Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix), + }) + } +} + +// normalizeText is a helper function that normalizes comment strings by: +// * removing one leading space +// +// This function is needed because ast.CommentGroup.Text() does not handle //-style and /*-style comments uniformly +func normalizeText(t string) string { + return strings.TrimPrefix(t, " ") +} + +func (w *lintExported) Visit(n ast.Node) ast.Visitor { + switch v := n.(type) { + case *ast.GenDecl: + if v.Tok == token.IMPORT { + return nil + } + // token.CONST, token.TYPE or token.VAR + w.lastGen = v + return w + case *ast.FuncDecl: + w.lintFuncDoc(v) + if v.Recv == nil { + // Only check for stutter on functions, not methods. + // Method names are not used package-qualified. + w.checkStutter(v.Name, "func") + } + // Don't proceed inside funcs. + return nil + case *ast.TypeSpec: + // inside a GenDecl, which usually has the doc + doc := v.Doc + if doc == nil { + doc = w.lastGen.Doc + } + w.lintTypeDoc(v, doc) + w.checkStutter(v.Name, "type") + // Don't proceed inside types. + return nil + case *ast.ValueSpec: + w.lintValueSpecDoc(v, w.lastGen, w.genDeclMissingComments) + return nil + } + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/file-header.go b/vendor/github.com/mgechev/revive/rule/file-header.go new file mode 100644 index 000000000..7855c85ad --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/file-header.go @@ -0,0 +1,67 @@ +package rule + +import ( + "regexp" + + "github.com/mgechev/revive/lint" +) + +// FileHeaderRule lints given else constructs. +type FileHeaderRule struct{} + +var ( + multiRegexp = regexp.MustCompile("^/\\*") + singleRegexp = regexp.MustCompile("^//") +) + +// Apply applies the rule to given file. +func (r *FileHeaderRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + checkNumberOfArguments(1, arguments, r.Name()) + + header, ok := arguments[0].(string) + if !ok { + panic(`invalid argument for "file-header" rule: first argument should be a string`) + } + + failure := []lint.Failure{ + { + Node: file.AST, + Confidence: 1, + Failure: "the file doesn't have an appropriate header", + }, + } + + if len(file.AST.Comments) == 0 { + return failure + } + + g := file.AST.Comments[0] + if g == nil { + return failure + } + comment := "" + for _, c := range g.List { + text := c.Text + if multiRegexp.Match([]byte(text)) { + text = text[2 : len(text)-2] + } else if singleRegexp.Match([]byte(text)) { + text = text[2:] + } + comment += text + } + + regex, err := regexp.Compile(header) + if err != nil { + panic(err.Error()) + } + + if !regex.Match([]byte(comment)) { + return failure + } + return nil +} + +// Name returns the rule name. +func (r *FileHeaderRule) Name() string { + return "file-header" +} diff --git a/vendor/github.com/mgechev/revive/rule/flag-param.go b/vendor/github.com/mgechev/revive/rule/flag-param.go new file mode 100644 index 000000000..6cb6daea9 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/flag-param.go @@ -0,0 +1,104 @@ +package rule + +import ( + "fmt" + "github.com/mgechev/revive/lint" + "go/ast" +) + +// FlagParamRule lints given else constructs. +type FlagParamRule struct{} + +// Apply applies the rule to given file. +func (r *FlagParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintFlagParamRule{onFailure: onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *FlagParamRule) Name() string { + return "flag-parameter" +} + +type lintFlagParamRule struct { + onFailure func(lint.Failure) +} + +func (w lintFlagParamRule) Visit(node ast.Node) ast.Visitor { + fd, ok := node.(*ast.FuncDecl) + if !ok { + return w + } + + if fd.Body == nil { + return nil // skip whole function declaration + } + + for _, p := range fd.Type.Params.List { + t := p.Type + + id, ok := t.(*ast.Ident) + if !ok { + continue + } + + if id.Name != "bool" { + continue + } + + cv := conditionVisitor{p.Names, fd, w} + ast.Walk(cv, fd.Body) + } + + return w +} + +type conditionVisitor struct { + ids []*ast.Ident + fd *ast.FuncDecl + linter lintFlagParamRule +} + +func (w conditionVisitor) Visit(node ast.Node) ast.Visitor { + ifStmt, ok := node.(*ast.IfStmt) + if !ok { + return w + } + + fselect := func(n ast.Node) bool { + ident, ok := n.(*ast.Ident) + if !ok { + return false + } + + for _, id := range w.ids { + if ident.Name == id.Name { + return true + } + } + + return false + } + + uses := pick(ifStmt.Cond, fselect, nil) + + if len(uses) < 1 { + return w + } + + w.linter.onFailure(lint.Failure{ + Confidence: 1, + Node: w.fd.Type.Params, + Category: "bad practice", + Failure: fmt.Sprintf("parameter '%s' seems to be a control flag, avoid control coupling", uses[0]), + }) + + return nil +} diff --git a/vendor/github.com/mgechev/revive/rule/function-length.go b/vendor/github.com/mgechev/revive/rule/function-length.go new file mode 100644 index 000000000..e1cee21cf --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/function-length.go @@ -0,0 +1,153 @@ +package rule + +import ( + "fmt" + "go/ast" + "reflect" + + "github.com/mgechev/revive/lint" +) + +// FunctionLength lint. +type FunctionLength struct{} + +// Apply applies the rule to given file. +func (r *FunctionLength) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + maxStmt, maxLines := r.parseArguments(arguments) + + var failures []lint.Failure + + walker := lintFuncLength{ + file: file, + maxStmt: int(maxStmt), + maxLines: int(maxLines), + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *FunctionLength) Name() string { + return "function-length" +} + +func (r *FunctionLength) parseArguments(arguments lint.Arguments) (maxStmt int64, maxLines int64) { + if len(arguments) != 2 { + panic(fmt.Sprintf(`invalid configuration for "function-length" rule, expected 2 arguments but got %d`, len(arguments))) + } + + maxStmt, maxStmtOk := arguments[0].(int64) + if !maxStmtOk { + panic(fmt.Sprintf(`invalid configuration value for max statements in "function-length" rule; need int64 but got %T`, arguments[0])) + } + if maxStmt < 0 { + panic(fmt.Sprintf(`the configuration value for max statements in "function-length" rule cannot be negative, got %d`, maxStmt)) + } + + maxLines, maxLinesOk := arguments[1].(int64) + if !maxLinesOk { + panic(fmt.Sprintf(`invalid configuration value for max lines in "function-length" rule; need int64 but got %T`, arguments[1])) + } + if maxLines < 0 { + panic(fmt.Sprintf(`the configuration value for max statements in "function-length" rule cannot be negative, got %d`, maxLines)) + } + + return +} + +type lintFuncLength struct { + file *lint.File + maxStmt int + maxLines int + onFailure func(lint.Failure) +} + +func (w lintFuncLength) Visit(n ast.Node) ast.Visitor { + node, ok := n.(*ast.FuncDecl) + if !ok { + return w + } + + body := node.Body + if body == nil || len(node.Body.List) == 0 { + return nil + } + + if w.maxStmt > 0 { + stmtCount := w.countStmts(node.Body.List) + if stmtCount > w.maxStmt { + w.onFailure(lint.Failure{ + Confidence: 1, + Failure: fmt.Sprintf("maximum number of statements per function exceeded; max %d but got %d", w.maxStmt, stmtCount), + Node: node, + }) + } + } + + if w.maxLines > 0 { + lineCount := w.countLines(node.Body) + if lineCount > w.maxLines { + w.onFailure(lint.Failure{ + Confidence: 1, + Failure: fmt.Sprintf("maximum number of lines per function exceeded; max %d but got %d", w.maxLines, lineCount), + Node: node, + }) + } + } + + return nil +} + +func (w lintFuncLength) countLines(b *ast.BlockStmt) int { + return w.file.ToPosition(b.End()).Line - w.file.ToPosition(b.Pos()).Line - 1 +} + +func (w lintFuncLength) countStmts(b []ast.Stmt) int { + count := 0 + for _, s := range b { + switch stmt := s.(type) { + case *ast.BlockStmt: + count += w.countStmts(stmt.List) + case *ast.IfStmt: + count += 1 + w.countBodyListStmts(stmt) + if stmt.Else != nil { + elseBody, ok := stmt.Else.(*ast.BlockStmt) + if ok { + count += w.countStmts(elseBody.List) + } + } + case *ast.ForStmt, *ast.RangeStmt, + *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: + count += 1 + w.countBodyListStmts(stmt) + case *ast.CaseClause: + count += w.countStmts(stmt.Body) + case *ast.AssignStmt: + count += 1 + w.countFuncLitStmts(stmt.Rhs[0]) + case *ast.GoStmt: + count += 1 + w.countFuncLitStmts(stmt.Call.Fun) + case *ast.DeferStmt: + count += 1 + w.countFuncLitStmts(stmt.Call.Fun) + default: + count++ + } + } + + return count +} + +func (w lintFuncLength) countFuncLitStmts(stmt ast.Expr) int { + if block, ok := stmt.(*ast.FuncLit); ok { + return w.countStmts(block.Body.List) + } + return 0 +} + +func (w lintFuncLength) countBodyListStmts(t interface{}) int { + i := reflect.ValueOf(t).Elem().FieldByName(`Body`).Elem().FieldByName(`List`).Interface() + return w.countStmts(i.([]ast.Stmt)) +} diff --git a/vendor/github.com/mgechev/revive/rule/function-result-limit.go b/vendor/github.com/mgechev/revive/rule/function-result-limit.go new file mode 100644 index 000000000..51a4713f0 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/function-result-limit.go @@ -0,0 +1,66 @@ +package rule + +import ( + "fmt" + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// FunctionResultsLimitRule lints given else constructs. +type FunctionResultsLimitRule struct{} + +// Apply applies the rule to given file. +func (r *FunctionResultsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + checkNumberOfArguments(1, arguments, r.Name()) + + max, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0])) + } + if max < 0 { + panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`) + } + + var failures []lint.Failure + + walker := lintFunctionResultsNum{ + max: int(max), + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *FunctionResultsLimitRule) Name() string { + return "function-result-limit" +} + +type lintFunctionResultsNum struct { + max int + onFailure func(lint.Failure) +} + +func (w lintFunctionResultsNum) Visit(n ast.Node) ast.Visitor { + node, ok := n.(*ast.FuncDecl) + if ok { + num := 0 + if node.Type.Results != nil { + num = node.Type.Results.NumFields() + } + if num > w.max { + w.onFailure(lint.Failure{ + Confidence: 1, + Failure: fmt.Sprintf("maximum number of return results per function exceeded; max %d but got %d", w.max, num), + Node: node.Type, + }) + return w + } + } + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/get-return.go b/vendor/github.com/mgechev/revive/rule/get-return.go new file mode 100644 index 000000000..494ab6669 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/get-return.go @@ -0,0 +1,70 @@ +package rule + +import ( + "fmt" + "go/ast" + "strings" + + "github.com/mgechev/revive/lint" +) + +// GetReturnRule lints given else constructs. +type GetReturnRule struct{} + +// Apply applies the rule to given file. +func (r *GetReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintReturnRule{onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *GetReturnRule) Name() string { + return "get-return" +} + +type lintReturnRule struct { + onFailure func(lint.Failure) +} + +func isGetter(name string) bool { + if strings.HasPrefix(strings.ToUpper(name), "GET") { + if len(name) > 3 { + c := name[3] + return !(c >= 'a' && c <= 'z') + } + } + + return false +} + +func hasResults(rs *ast.FieldList) bool { + return rs != nil && len(rs.List) > 0 +} + +func (w lintReturnRule) Visit(node ast.Node) ast.Visitor { + fd, ok := node.(*ast.FuncDecl) + if !ok { + return w + } + + if !isGetter(fd.Name.Name) { + return w + } + if !hasResults(fd.Type.Results) { + w.onFailure(lint.Failure{ + Confidence: 0.8, + Node: fd, + Category: "logic", + Failure: fmt.Sprintf("function '%s' seems to be a getter but it does not return any result", fd.Name.Name), + }) + } + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/identical-branches.go b/vendor/github.com/mgechev/revive/rule/identical-branches.go new file mode 100644 index 000000000..094a79147 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/identical-branches.go @@ -0,0 +1,82 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// IdenticalBranchesRule warns on constant logical expressions. +type IdenticalBranchesRule struct{} + +// Apply applies the rule to given file. +func (r *IdenticalBranchesRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + astFile := file.AST + w := &lintIdenticalBranches{astFile, onFailure} + ast.Walk(w, astFile) + return failures +} + +// Name returns the rule name. +func (r *IdenticalBranchesRule) Name() string { + return "identical-branches" +} + +type lintIdenticalBranches struct { + file *ast.File + onFailure func(lint.Failure) +} + +func (w *lintIdenticalBranches) Visit(node ast.Node) ast.Visitor { + n, ok := node.(*ast.IfStmt) + if !ok { + return w + } + + if n.Else == nil { + return w + } + branches := []*ast.BlockStmt{n.Body} + + elseBranch, ok := n.Else.(*ast.BlockStmt) + if !ok { // if-else-if construction + return w + } + branches = append(branches, elseBranch) + + if w.identicalBranches(branches) { + w.newFailure(n, "both branches of the if are identical") + } + + return w +} + +func (w *lintIdenticalBranches) identicalBranches(branches []*ast.BlockStmt) bool { + if len(branches) < 2 { + return false + } + + ref := gofmt(branches[0]) + for i := 1; i < len(branches); i++ { + if gofmt(branches[i]) != ref { + return false + } + } + + return true +} + +func (w lintIdenticalBranches) newFailure(node ast.Node, msg string) { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: node, + Category: "logic", + Failure: msg, + }) +} diff --git a/vendor/github.com/mgechev/revive/rule/if-return.go b/vendor/github.com/mgechev/revive/rule/if-return.go new file mode 100644 index 000000000..c275d2766 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/if-return.go @@ -0,0 +1,115 @@ +package rule + +import ( + "go/ast" + "go/token" + "strings" + + "github.com/mgechev/revive/lint" +) + +// IfReturnRule lints given else constructs. +type IfReturnRule struct{} + +// Apply applies the rule to given file. +func (r *IfReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + astFile := file.AST + w := &lintElseError{astFile, onFailure} + ast.Walk(w, astFile) + return failures +} + +// Name returns the rule name. +func (r *IfReturnRule) Name() string { + return "if-return" +} + +type lintElseError struct { + file *ast.File + onFailure func(lint.Failure) +} + +func (w *lintElseError) Visit(node ast.Node) ast.Visitor { + switch v := node.(type) { + case *ast.BlockStmt: + for i := 0; i < len(v.List)-1; i++ { + // if var := whatever; var != nil { return var } + s, ok := v.List[i].(*ast.IfStmt) + if !ok || s.Body == nil || len(s.Body.List) != 1 || s.Else != nil { + continue + } + assign, ok := s.Init.(*ast.AssignStmt) + if !ok || len(assign.Lhs) != 1 || !(assign.Tok == token.DEFINE || assign.Tok == token.ASSIGN) { + continue + } + id, ok := assign.Lhs[0].(*ast.Ident) + if !ok { + continue + } + expr, ok := s.Cond.(*ast.BinaryExpr) + if !ok || expr.Op != token.NEQ { + continue + } + if lhs, ok := expr.X.(*ast.Ident); !ok || lhs.Name != id.Name { + continue + } + if rhs, ok := expr.Y.(*ast.Ident); !ok || rhs.Name != "nil" { + continue + } + r, ok := s.Body.List[0].(*ast.ReturnStmt) + if !ok || len(r.Results) != 1 { + continue + } + if r, ok := r.Results[0].(*ast.Ident); !ok || r.Name != id.Name { + continue + } + + // return nil + r, ok = v.List[i+1].(*ast.ReturnStmt) + if !ok || len(r.Results) != 1 { + continue + } + if r, ok := r.Results[0].(*ast.Ident); !ok || r.Name != "nil" { + continue + } + + // check if there are any comments explaining the construct, don't emit an error if there are some. + if containsComments(s.Pos(), r.Pos(), w.file) { + continue + } + + w.onFailure(lint.Failure{ + Confidence: .9, + Node: v.List[i], + Failure: "redundant if ...; err != nil check, just return error instead.", + }) + } + } + return w +} + +func containsComments(start, end token.Pos, f *ast.File) bool { + for _, cgroup := range f.Comments { + comments := cgroup.List + if comments[0].Slash >= end { + // All comments starting with this group are after end pos. + return false + } + if comments[len(comments)-1].Slash < start { + // Comments group ends before start pos. + continue + } + for _, c := range comments { + if start <= c.Slash && c.Slash < end && !strings.HasPrefix(c.Text, "// MATCH ") { + return true + } + } + } + return false +} diff --git a/vendor/github.com/mgechev/revive/rule/import-shadowing.go b/vendor/github.com/mgechev/revive/rule/import-shadowing.go new file mode 100644 index 000000000..7b34c90f3 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/import-shadowing.go @@ -0,0 +1,108 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + "strings" + + "github.com/mgechev/revive/lint" +) + +// ImportShadowingRule lints given else constructs. +type ImportShadowingRule struct{} + +// Apply applies the rule to given file. +func (r *ImportShadowingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + importNames := map[string]struct{}{} + for _, imp := range file.AST.Imports { + importNames[getName(imp)] = struct{}{} + } + + fileAst := file.AST + walker := importShadowing{ + packageNameIdent: fileAst.Name, + importNames: importNames, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + alreadySeen: map[*ast.Object]struct{}{}, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ImportShadowingRule) Name() string { + return "import-shadowing" +} + +func getName(imp *ast.ImportSpec) string { + const pathSep = "/" + const strDelim = `"` + if imp.Name != nil { + return imp.Name.Name + } + + path := imp.Path.Value + i := strings.LastIndex(path, pathSep) + if i == -1 { + return strings.Trim(path, strDelim) + } + + return strings.Trim(path[i+1:], strDelim) +} + +type importShadowing struct { + packageNameIdent *ast.Ident + importNames map[string]struct{} + onFailure func(lint.Failure) + alreadySeen map[*ast.Object]struct{} +} + +// Visit visits AST nodes and checks if id nodes (ast.Ident) shadow an import name +func (w importShadowing) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + case *ast.AssignStmt: + if n.Tok == token.DEFINE { + return w // analyze variable declarations of the form id := expr + } + + return nil // skip assigns of the form id = expr (not an id declaration) + case *ast.CallExpr, // skip call expressions (not an id declaration) + *ast.ImportSpec, // skip import section subtree because we already have the list of imports + *ast.KeyValueExpr, // skip analysis of key-val expressions ({key:value}): ids of such expressions, even the same of an import name, do not shadow the import name + *ast.ReturnStmt, // skip skipping analysis of returns, ids in expression were already analyzed + *ast.SelectorExpr, // skip analysis of selector expressions (anId.otherId): because if anId shadows an import name, it was already detected, and otherId does not shadows the import name + *ast.StructType: // skip analysis of struct type because struct fields can not shadow an import name + return nil + case *ast.Ident: + if n == w.packageNameIdent { + return nil // skip the ident corresponding to the package name of this file + } + + id := n.Name + if id == "_" { + return w // skip _ id + } + + _, isImportName := w.importNames[id] + _, alreadySeen := w.alreadySeen[n.Obj] + if isImportName && !alreadySeen { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: n, + Category: "namming", + Failure: fmt.Sprintf("The name '%s' shadows an import name", id), + }) + + w.alreadySeen[n.Obj] = struct{}{} + } + } + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/imports-blacklist.go b/vendor/github.com/mgechev/revive/rule/imports-blacklist.go new file mode 100644 index 000000000..31ef901e5 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/imports-blacklist.go @@ -0,0 +1,52 @@ +package rule + +import ( + "fmt" + + "github.com/mgechev/revive/lint" +) + +// ImportsBlacklistRule lints given else constructs. +type ImportsBlacklistRule struct{} + +// Apply applies the rule to given file. +func (r *ImportsBlacklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + var failures []lint.Failure + + if file.IsTest() { + return failures // skip, test file + } + + blacklist := make(map[string]bool, len(arguments)) + + for _, arg := range arguments { + argStr, ok := arg.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting a string, got %T", arg)) + } + // we add quotes if not present, because when parsed, the value of the AST node, will be quoted + if len(argStr) > 2 && argStr[0] != '"' && argStr[len(argStr)-1] != '"' { + argStr = fmt.Sprintf(`"%s"`, argStr) + } + blacklist[argStr] = true + } + + for _, is := range file.AST.Imports { + path := is.Path + if path != nil && blacklist[path.Value] { + failures = append(failures, lint.Failure{ + Confidence: 1, + Failure: "should not use the following blacklisted import: " + path.Value, + Node: is, + Category: "imports", + }) + } + } + + return failures +} + +// Name returns the rule name. +func (r *ImportsBlacklistRule) Name() string { + return "imports-blacklist" +} diff --git a/vendor/github.com/mgechev/revive/rule/increment-decrement.go b/vendor/github.com/mgechev/revive/rule/increment-decrement.go new file mode 100644 index 000000000..5d6b17671 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/increment-decrement.go @@ -0,0 +1,74 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// IncrementDecrementRule lints given else constructs. +type IncrementDecrementRule struct{} + +// Apply applies the rule to given file. +func (r *IncrementDecrementRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintIncrementDecrement{ + file: file, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *IncrementDecrementRule) Name() string { + return "increment-decrement" +} + +type lintIncrementDecrement struct { + file *lint.File + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w lintIncrementDecrement) Visit(n ast.Node) ast.Visitor { + as, ok := n.(*ast.AssignStmt) + if !ok { + return w + } + if len(as.Lhs) != 1 { + return w + } + if !isOne(as.Rhs[0]) { + return w + } + var suffix string + switch as.Tok { + case token.ADD_ASSIGN: + suffix = "++" + case token.SUB_ASSIGN: + suffix = "--" + default: + return w + } + w.onFailure(lint.Failure{ + Confidence: 0.8, + Node: as, + Category: "unary-op", + Failure: fmt.Sprintf("should replace %s with %s%s", w.file.Render(as), w.file.Render(as.Lhs[0]), suffix), + }) + return w +} + +func isOne(expr ast.Expr) bool { + lit, ok := expr.(*ast.BasicLit) + return ok && lit.Kind == token.INT && lit.Value == "1" +} diff --git a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go new file mode 100644 index 000000000..4c9799b2a --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go @@ -0,0 +1,78 @@ +package rule + +import ( + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// IndentErrorFlowRule lints given else constructs. +type IndentErrorFlowRule struct{} + +// Apply applies the rule to given file. +func (r *IndentErrorFlowRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintElse{make(map[*ast.IfStmt]bool), onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *IndentErrorFlowRule) Name() string { + return "indent-error-flow" +} + +type lintElse struct { + ignore map[*ast.IfStmt]bool + onFailure func(lint.Failure) +} + +func (w lintElse) Visit(node ast.Node) ast.Visitor { + ifStmt, ok := node.(*ast.IfStmt) + if !ok || ifStmt.Else == nil { + return w + } + if w.ignore[ifStmt] { + if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok { + w.ignore[elseif] = true + } + return w + } + if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok { + w.ignore[elseif] = true + return w + } + if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok { + // only care about elses without conditions + return w + } + if len(ifStmt.Body.List) == 0 { + return w + } + shortDecl := false // does the if statement have a ":=" initialization statement? + if ifStmt.Init != nil { + if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE { + shortDecl = true + } + } + lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1] + if _, ok := lastStmt.(*ast.ReturnStmt); ok { + extra := "" + if shortDecl { + extra = " (move short variable declaration to its own line if necessary)" + } + w.onFailure(lint.Failure{ + Confidence: 1, + Node: ifStmt.Else, + Category: "indent", + Failure: "if block ends with a return statement, so drop this else and outdent its block" + extra, + }) + } + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/line-length-limit.go b/vendor/github.com/mgechev/revive/rule/line-length-limit.go new file mode 100644 index 000000000..939ef227f --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/line-length-limit.go @@ -0,0 +1,82 @@ +package rule + +import ( + "bufio" + "bytes" + "fmt" + "go/token" + "strings" + "unicode/utf8" + + "github.com/mgechev/revive/lint" +) + +// LineLengthLimitRule lints given else constructs. +type LineLengthLimitRule struct{} + +// Apply applies the rule to given file. +func (r *LineLengthLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + checkNumberOfArguments(1, arguments, r.Name()) + + max, ok := arguments[0].(int64) // Alt. non panicking version + if !ok || max < 0 { + panic(`invalid value passed as argument number to the "line-length-limit" rule`) + } + + var failures []lint.Failure + checker := lintLineLengthNum{ + max: int(max), + file: file, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + checker.check() + + return failures +} + +// Name returns the rule name. +func (r *LineLengthLimitRule) Name() string { + return "line-length-limit" +} + +type lintLineLengthNum struct { + max int + file *lint.File + onFailure func(lint.Failure) +} + +func (r lintLineLengthNum) check() { + f := bytes.NewReader(r.file.Content()) + spaces := strings.Repeat(" ", 4) // tab width = 4 + l := 1 + s := bufio.NewScanner(f) + for s.Scan() { + t := s.Text() + t = strings.Replace(t, "\t", spaces, -1) + c := utf8.RuneCountInString(t) + if c > r.max { + r.onFailure(lint.Failure{ + Category: "code-style", + Position: lint.FailurePosition{ + // Offset not set; it is non-trivial, and doesn't appear to be needed. + Start: token.Position{ + Filename: r.file.Name, + Line: l, + Column: 0, + }, + End: token.Position{ + Filename: r.file.Name, + Line: l, + Column: c, + }, + }, + Confidence: 1, + Failure: fmt.Sprintf("line is %d characters, out of limit %d", c, r.max), + }) + } + l++ + } +} diff --git a/vendor/github.com/mgechev/revive/rule/max-public-structs.go b/vendor/github.com/mgechev/revive/rule/max-public-structs.go new file mode 100644 index 000000000..aa15628f0 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/max-public-structs.go @@ -0,0 +1,69 @@ +package rule + +import ( + "go/ast" + + "strings" + + "github.com/mgechev/revive/lint" +) + +// MaxPublicStructsRule lints given else constructs. +type MaxPublicStructsRule struct{} + +// Apply applies the rule to given file. +func (r *MaxPublicStructsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + checkNumberOfArguments(1, arguments, r.Name()) + + max, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(`invalid value passed as argument number to the "max-public-structs" rule`) + } + + var failures []lint.Failure + + fileAst := file.AST + walker := &lintMaxPublicStructs{ + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, fileAst) + + if walker.current > max { + walker.onFailure(lint.Failure{ + Failure: "you have exceeded the maximum number of public struct declarations", + Confidence: 1, + Node: fileAst, + Category: "style", + }) + } + + return failures +} + +// Name returns the rule name. +func (r *MaxPublicStructsRule) Name() string { + return "max-public-structs" +} + +type lintMaxPublicStructs struct { + current int64 + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w *lintMaxPublicStructs) Visit(n ast.Node) ast.Visitor { + switch v := n.(type) { + case *ast.TypeSpec: + name := v.Name.Name + first := string(name[0]) + if strings.ToUpper(first) == first { + w.current++ + } + break + } + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/modifies-param.go b/vendor/github.com/mgechev/revive/rule/modifies-param.go new file mode 100644 index 000000000..55136e6c8 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/modifies-param.go @@ -0,0 +1,80 @@ +package rule + +import ( + "fmt" + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// ModifiesParamRule lints given else constructs. +type ModifiesParamRule struct{} + +// Apply applies the rule to given file. +func (r *ModifiesParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintModifiesParamRule{onFailure: onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *ModifiesParamRule) Name() string { + return "modifies-parameter" +} + +type lintModifiesParamRule struct { + params map[string]bool + onFailure func(lint.Failure) +} + +func retrieveParamNames(pl []*ast.Field) map[string]bool { + result := make(map[string]bool, len(pl)) + for _, p := range pl { + for _, n := range p.Names { + if n.Name == "_" { + continue + } + + result[n.Name] = true + } + } + return result +} + +func (w lintModifiesParamRule) Visit(node ast.Node) ast.Visitor { + switch v := node.(type) { + case *ast.FuncDecl: + w.params = retrieveParamNames(v.Type.Params.List) + case *ast.IncDecStmt: + if id, ok := v.X.(*ast.Ident); ok { + checkParam(id, &w) + } + case *ast.AssignStmt: + lhs := v.Lhs + for _, e := range lhs { + id, ok := e.(*ast.Ident) + if ok { + checkParam(id, &w) + } + } + } + + return w +} + +func checkParam(id *ast.Ident, w *lintModifiesParamRule) { + if w.params[id.Name] { + w.onFailure(lint.Failure{ + Confidence: 0.5, // confidence is low because of shadow variables + Node: id, + Category: "bad practice", + Failure: fmt.Sprintf("parameter '%s' seems to be modified", id), + }) + } +} diff --git a/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go b/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go new file mode 100644 index 000000000..4fe22ddf3 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go @@ -0,0 +1,134 @@ +package rule + +import ( + "go/ast" + "strings" + + "github.com/mgechev/revive/lint" +) + +// ModifiesValRecRule lints assignments to value method-receivers. +type ModifiesValRecRule struct{} + +// Apply applies the rule to given file. +func (r *ModifiesValRecRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintModifiesValRecRule{file: file, onFailure: onFailure} + file.Pkg.TypeCheck() + ast.Walk(w, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *ModifiesValRecRule) Name() string { + return "modifies-value-receiver" +} + +type lintModifiesValRecRule struct { + file *lint.File + onFailure func(lint.Failure) +} + +func (w lintModifiesValRecRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + if n.Recv == nil { + return nil // skip, not a method + } + + receiver := n.Recv.List[0] + if _, ok := receiver.Type.(*ast.StarExpr); ok { + return nil // skip, method with pointer receiver + } + + if w.skipType(receiver.Type) { + return nil // skip, receiver is a map or array + } + + if len(receiver.Names) < 1 { + return nil // skip, anonymous receiver + } + + receiverName := receiver.Names[0].Name + if receiverName == "_" { + return nil // skip, anonymous receiver + } + + fselect := func(n ast.Node) bool { + // look for assignments with the receiver in the right hand + asgmt, ok := n.(*ast.AssignStmt) + if !ok { + return false + } + + for _, exp := range asgmt.Lhs { + switch e := exp.(type) { + case *ast.IndexExpr: // receiver...[] = ... + continue + case *ast.StarExpr: // *receiver = ... + continue + case *ast.SelectorExpr: // receiver.field = ... + name := w.getNameFromExpr(e.X) + if name == "" || name != receiverName { + continue + } + + if w.skipType(ast.Expr(e.Sel)) { + continue + } + + case *ast.Ident: // receiver := ... + if e.Name != receiverName { + continue + } + default: + continue + } + + return true + } + + return false + } + + assignmentsToReceiver := pick(n.Body, fselect, nil) + + for _, assignment := range assignmentsToReceiver { + w.onFailure(lint.Failure{ + Node: assignment, + Confidence: 1, + Failure: "suspicious assignment to a by-value method receiver", + }) + } + } + + return w +} + +func (w lintModifiesValRecRule) skipType(t ast.Expr) bool { + rt := w.file.Pkg.TypeOf(t) + if rt == nil { + return false + } + + rt = rt.Underlying() + rtName := rt.String() + + // skip when receiver is a map or array + return strings.HasPrefix(rtName, "[]") || strings.HasPrefix(rtName, "map[") +} + +func (lintModifiesValRecRule) getNameFromExpr(ie ast.Expr) string { + ident, ok := ie.(*ast.Ident) + if !ok { + return "" + } + + return ident.Name +} diff --git a/vendor/github.com/mgechev/revive/rule/nested-structs.go b/vendor/github.com/mgechev/revive/rule/nested-structs.go new file mode 100644 index 000000000..cfe9648b2 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/nested-structs.go @@ -0,0 +1,61 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// NestedStructs lints nested structs. +type NestedStructs struct{} + +// Apply applies the rule to given file. +func (r *NestedStructs) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + var failures []lint.Failure + + if len(arguments) > 0 { + panic(r.Name() + " doesn't take any arguments") + } + + walker := &lintNestedStructs{ + fileAST: file.AST, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *NestedStructs) Name() string { + return "nested-structs" +} + +type lintNestedStructs struct { + fileAST *ast.File + onFailure func(lint.Failure) +} + +func (l *lintNestedStructs) Visit(n ast.Node) ast.Visitor { + switch v := n.(type) { + case *ast.FuncDecl: + if v.Body != nil { + ast.Walk(l, v.Body) + } + return nil + case *ast.Field: + if _, ok := v.Type.(*ast.StructType); ok { + l.onFailure(lint.Failure{ + Failure: "no nested structs are allowed", + Category: "style", + Node: v, + Confidence: 1, + }) + break + } + } + return l +} diff --git a/vendor/github.com/mgechev/revive/rule/package-comments.go b/vendor/github.com/mgechev/revive/rule/package-comments.go new file mode 100644 index 000000000..00fc5bb91 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/package-comments.go @@ -0,0 +1,121 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + "strings" + + "github.com/mgechev/revive/lint" +) + +// PackageCommentsRule lints the package comments. It complains if +// there is no package comment, or if it is not of the right form. +// This has a notable false positive in that a package comment +// could rightfully appear in a different file of the same package, +// but that's not easy to fix since this linter is file-oriented. +type PackageCommentsRule struct{} + +// Apply applies the rule to given file. +func (r *PackageCommentsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + if isTest(file) { + return failures + } + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + fileAst := file.AST + w := &lintPackageComments{fileAst, file, onFailure} + ast.Walk(w, fileAst) + return failures +} + +// Name returns the rule name. +func (r *PackageCommentsRule) Name() string { + return "package-comments" +} + +type lintPackageComments struct { + fileAst *ast.File + file *lint.File + onFailure func(lint.Failure) +} + +func (l *lintPackageComments) Visit(_ ast.Node) ast.Visitor { + if l.file.IsTest() { + return nil + } + + const ref = styleGuideBase + "#package-comments" + prefix := "Package " + l.fileAst.Name.Name + " " + + // Look for a detached package comment. + // First, scan for the last comment that occurs before the "package" keyword. + var lastCG *ast.CommentGroup + for _, cg := range l.fileAst.Comments { + if cg.Pos() > l.fileAst.Package { + // Gone past "package" keyword. + break + } + lastCG = cg + } + if lastCG != nil && strings.HasPrefix(lastCG.Text(), prefix) { + endPos := l.file.ToPosition(lastCG.End()) + pkgPos := l.file.ToPosition(l.fileAst.Package) + if endPos.Line+1 < pkgPos.Line { + // There isn't a great place to anchor this error; + // the start of the blank lines between the doc and the package statement + // is at least pointing at the location of the problem. + pos := token.Position{ + Filename: endPos.Filename, + // Offset not set; it is non-trivial, and doesn't appear to be needed. + Line: endPos.Line + 1, + Column: 1, + } + l.onFailure(lint.Failure{ + Category: "comments", + Position: lint.FailurePosition{ + Start: pos, + End: pos, + }, + Confidence: 0.9, + Failure: "package comment is detached; there should be no blank lines between it and the package statement", + }) + return nil + } + } + + if l.fileAst.Doc == nil { + l.onFailure(lint.Failure{ + Category: "comments", + Node: l.fileAst, + Confidence: 0.2, + Failure: "should have a package comment, unless it's in another file for this package", + }) + return nil + } + s := l.fileAst.Doc.Text() + if ts := strings.TrimLeft(s, " \t"); ts != s { + l.onFailure(lint.Failure{ + Category: "comments", + Node: l.fileAst.Doc, + Confidence: 1, + Failure: "package comment should not have leading space", + }) + s = ts + } + // Only non-main packages need to keep to this form. + if !l.file.Pkg.IsMain() && !strings.HasPrefix(s, prefix) { + l.onFailure(lint.Failure{ + Category: "comments", + Node: l.fileAst.Doc, + Confidence: 1, + Failure: fmt.Sprintf(`package comment should be of the form "%s..."`, prefix), + }) + } + return nil +} diff --git a/vendor/github.com/mgechev/revive/rule/range-val-address.go b/vendor/github.com/mgechev/revive/rule/range-val-address.go new file mode 100644 index 000000000..bad26e5e9 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/range-val-address.go @@ -0,0 +1,144 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + "strings" + + "github.com/mgechev/revive/lint" +) + +// RangeValAddress lints +type RangeValAddress struct{} + +// Apply applies the rule to given file. +func (r *RangeValAddress) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + walker := rangeValAddress{ + file: file, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + file.Pkg.TypeCheck() + ast.Walk(walker, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *RangeValAddress) Name() string { + return "range-val-address" +} + +type rangeValAddress struct { + file *lint.File + onFailure func(lint.Failure) +} + +func (w rangeValAddress) Visit(node ast.Node) ast.Visitor { + n, ok := node.(*ast.RangeStmt) + if !ok { + return w + } + + value, ok := n.Value.(*ast.Ident) + if !ok { + return w + } + + valueIsStarExpr := false + if t := w.file.Pkg.TypeOf(value); t != nil { + valueIsStarExpr = strings.HasPrefix(t.String(), "*") + } + + ast.Walk(rangeBodyVisitor{ + valueIsStarExpr: valueIsStarExpr, + valueID: value.Obj, + onFailure: w.onFailure, + }, n.Body) + + return w +} + +type rangeBodyVisitor struct { + valueIsStarExpr bool + valueID *ast.Object + onFailure func(lint.Failure) +} + +func (bw rangeBodyVisitor) Visit(node ast.Node) ast.Visitor { + asgmt, ok := node.(*ast.AssignStmt) + if !ok { + return bw + } + + for _, exp := range asgmt.Lhs { + e, ok := exp.(*ast.IndexExpr) + if !ok { + continue + } + if bw.isAccessingRangeValueAddress(e.Index) { // e.g. a[&value]... + bw.onFailure(bw.newFailure(e.Index)) + } + } + + for _, exp := range asgmt.Rhs { + switch e := exp.(type) { + case *ast.UnaryExpr: // e.g. ...&value, ...&value.id + if bw.isAccessingRangeValueAddress(e) { + bw.onFailure(bw.newFailure(e)) + } + case *ast.CallExpr: + if fun, ok := e.Fun.(*ast.Ident); ok && fun.Name == "append" { // e.g. ...append(arr, &value) + for _, v := range e.Args { + if bw.isAccessingRangeValueAddress(v) { + bw.onFailure(bw.newFailure(e)) + } + } + } + } + } + return bw +} + +func (bw rangeBodyVisitor) isAccessingRangeValueAddress(exp ast.Expr) bool { + u, ok := exp.(*ast.UnaryExpr) + if !ok { + return false + } + + if u.Op != token.AND { + return false + } + + v, ok := u.X.(*ast.Ident) + if !ok { + var s *ast.SelectorExpr + s, ok = u.X.(*ast.SelectorExpr) + if !ok { + return false + } + v, ok = s.X.(*ast.Ident) + if !ok { + return false + } + + if bw.valueIsStarExpr { // check type of value + return false + } + } + + return ok && v.Obj == bw.valueID +} + +func (bw rangeBodyVisitor) newFailure(node ast.Node) lint.Failure { + return lint.Failure{ + Node: node, + Confidence: 1, + Failure: fmt.Sprintf("suspicious assignment of '%s'. range-loop variables always have the same address", bw.valueID.Name), + } +} diff --git a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go b/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go new file mode 100644 index 000000000..857787be3 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go @@ -0,0 +1,111 @@ +package rule + +import ( + "fmt" + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// RangeValInClosureRule lints given else constructs. +type RangeValInClosureRule struct{} + +// Apply applies the rule to given file. +func (r *RangeValInClosureRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + walker := rangeValInClosure{ + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + ast.Walk(walker, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *RangeValInClosureRule) Name() string { + return "range-val-in-closure" +} + +type rangeValInClosure struct { + onFailure func(lint.Failure) +} + +func (w rangeValInClosure) Visit(node ast.Node) ast.Visitor { + + // Find the variables updated by the loop statement. + var vars []*ast.Ident + addVar := func(expr ast.Expr) { + if id, ok := expr.(*ast.Ident); ok { + vars = append(vars, id) + } + } + var body *ast.BlockStmt + switch n := node.(type) { + case *ast.RangeStmt: + body = n.Body + addVar(n.Key) + addVar(n.Value) + case *ast.ForStmt: + body = n.Body + switch post := n.Post.(type) { + case *ast.AssignStmt: + // e.g. for p = head; p != nil; p = p.next + for _, lhs := range post.Lhs { + addVar(lhs) + } + case *ast.IncDecStmt: + // e.g. for i := 0; i < n; i++ + addVar(post.X) + } + } + if vars == nil { + return w + } + + // Inspect a go or defer statement + // if it's the last one in the loop body. + // (We give up if there are following statements, + // because it's hard to prove go isn't followed by wait, + // or defer by return.) + if len(body.List) == 0 { + return w + } + var last *ast.CallExpr + switch s := body.List[len(body.List)-1].(type) { + case *ast.GoStmt: + last = s.Call + case *ast.DeferStmt: + last = s.Call + default: + return w + } + lit, ok := last.Fun.(*ast.FuncLit) + if !ok { + return w + } + if lit.Type == nil { + // Not referring to a variable (e.g. struct field name) + return w + } + ast.Inspect(lit.Body, func(n ast.Node) bool { + id, ok := n.(*ast.Ident) + if !ok || id.Obj == nil { + return true + } + for _, v := range vars { + if v.Obj == id.Obj { + w.onFailure(lint.Failure{ + Confidence: 1, + Failure: fmt.Sprintf("loop variable %v captured by func literal", id.Name), + Node: n, + }) + } + } + return true + }) + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/range.go b/vendor/github.com/mgechev/revive/rule/range.go new file mode 100644 index 000000000..d18492c71 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/range.go @@ -0,0 +1,82 @@ +package rule + +import ( + "fmt" + "go/ast" + "strings" + + "github.com/mgechev/revive/lint" +) + +// RangeRule lints given else constructs. +type RangeRule struct{} + +// Apply applies the rule to given file. +func (r *RangeRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := &lintRanges{file, onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *RangeRule) Name() string { + return "range" +} + +type lintRanges struct { + file *lint.File + onFailure func(lint.Failure) +} + +func (w *lintRanges) Visit(node ast.Node) ast.Visitor { + rs, ok := node.(*ast.RangeStmt) + if !ok { + return w + } + if rs.Value == nil { + // for x = range m { ... } + return w // single var form + } + if !isIdent(rs.Value, "_") { + // for ?, y = range m { ... } + return w + } + + newRS := *rs // shallow copy + newRS.Value = nil + + w.onFailure(lint.Failure{ + Failure: fmt.Sprintf("should omit 2nd value from range; this loop is equivalent to `for %s %s range ...`", w.file.Render(rs.Key), rs.Tok), + Confidence: 1, + Node: rs.Value, + ReplacementLine: firstLineOf(w.file, &newRS, rs), + }) + + return w +} + +func firstLineOf(f *lint.File, node, match ast.Node) string { + line := f.Render(node) + if i := strings.Index(line, "\n"); i >= 0 { + line = line[:i] + } + return indentOf(f, match) + line +} + +func indentOf(f *lint.File, node ast.Node) string { + line := srcLine(f.Content(), f.ToPosition(node.Pos())) + for i, r := range line { + switch r { + case ' ', '\t': + default: + return line[:i] + } + } + return line // unusual or empty line +} diff --git a/vendor/github.com/mgechev/revive/rule/receiver-naming.go b/vendor/github.com/mgechev/revive/rule/receiver-naming.go new file mode 100644 index 000000000..589d5f0ef --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/receiver-naming.go @@ -0,0 +1,81 @@ +package rule + +import ( + "fmt" + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// ReceiverNamingRule lints given else constructs. +type ReceiverNamingRule struct{} + +// Apply applies the rule to given file. +func (r *ReceiverNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintReceiverName{ + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + typeReceiver: map[string]string{}, + } + + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *ReceiverNamingRule) Name() string { + return "receiver-naming" +} + +type lintReceiverName struct { + onFailure func(lint.Failure) + typeReceiver map[string]string +} + +func (w lintReceiverName) Visit(n ast.Node) ast.Visitor { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 { + return w + } + names := fn.Recv.List[0].Names + if len(names) < 1 { + return w + } + name := names[0].Name + const ref = styleGuideBase + "#receiver-names" + if name == "_" { + w.onFailure(lint.Failure{ + Node: n, + Confidence: 1, + Category: "naming", + Failure: "receiver name should not be an underscore, omit the name if it is unused", + }) + return w + } + if name == "this" || name == "self" { + w.onFailure(lint.Failure{ + Node: n, + Confidence: 1, + Category: "naming", + Failure: `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`, + }) + return w + } + recv := receiverType(fn) + if prev, ok := w.typeReceiver[recv]; ok && prev != name { + w.onFailure(lint.Failure{ + Node: n, + Confidence: 1, + Category: "naming", + Failure: fmt.Sprintf("receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv), + }) + return w + } + w.typeReceiver[recv] = name + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go new file mode 100644 index 000000000..947b8aac7 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go @@ -0,0 +1,145 @@ +package rule + +import ( + "fmt" + "github.com/mgechev/revive/lint" + "go/ast" + "go/token" +) + +// RedefinesBuiltinIDRule warns when a builtin identifier is shadowed. +type RedefinesBuiltinIDRule struct{} + +// Apply applies the rule to given file. +func (r *RedefinesBuiltinIDRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + var builtInConstAndVars = map[string]bool{ + "true": true, + "false": true, + "iota": true, + "nil": true, + } + + var builtFunctions = map[string]bool{ + "append": true, + "cap": true, + "close": true, + "complex": true, + "copy": true, + "delete": true, + "imag": true, + "len": true, + "make": true, + "new": true, + "panic": true, + "print": true, + "println": true, + "real": true, + "recover": true, + } + + var builtInTypes = map[string]bool{ + "ComplexType": true, + "FloatType": true, + "IntegerType": true, + "Type": true, + "Type1": true, + "bool": true, + "byte": true, + "complex128": true, + "complex64": true, + "error": true, + "float32": true, + "float64": true, + "int": true, + "int16": true, + "int32": true, + "int64": true, + "int8": true, + "rune": true, + "string": true, + "uint": true, + "uint16": true, + "uint32": true, + "uint64": true, + "uint8": true, + "uintptr": true, + } + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + astFile := file.AST + w := &lintRedefinesBuiltinID{builtInConstAndVars, builtFunctions, builtInTypes, onFailure} + ast.Walk(w, astFile) + + return failures +} + +// Name returns the rule name. +func (r *RedefinesBuiltinIDRule) Name() string { + return "redefines-builtin-id" +} + +type lintRedefinesBuiltinID struct { + constsAndVars map[string]bool + funcs map[string]bool + types map[string]bool + onFailure func(lint.Failure) +} + +func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.GenDecl: + if n.Tok != token.TYPE { + return nil // skip if not type declaration + } + typeSpec, ok := n.Specs[0].(*ast.TypeSpec) + if !ok { + return nil + } + id := typeSpec.Name.Name + if w.types[id] { + w.addFailure(n, fmt.Sprintf("redefinition of the built-in type %s", id)) + } + case *ast.FuncDecl: + if n.Recv != nil { + return w // skip methods + } + + id := n.Name.Name + if w.funcs[id] { + w.addFailure(n, fmt.Sprintf("redefinition of the built-in function %s", id)) + } + case *ast.AssignStmt: + for _, e := range n.Lhs { + id, ok := e.(*ast.Ident) + if !ok { + continue + } + + if w.constsAndVars[id.Name] { + var msg string + if n.Tok == token.DEFINE { + msg = fmt.Sprintf("assignment creates a shadow of built-in identifier %s", id.Name) + } else { + msg = fmt.Sprintf("assignment modifies built-in identifier %s", id.Name) + } + w.addFailure(n, msg) + } + } + } + + return w +} + +func (w lintRedefinesBuiltinID) addFailure(node ast.Node, msg string) { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: node, + Category: "logic", + Failure: msg, + }) +} diff --git a/vendor/github.com/mgechev/revive/rule/string-format.go b/vendor/github.com/mgechev/revive/rule/string-format.go new file mode 100644 index 000000000..6017c4180 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/string-format.go @@ -0,0 +1,282 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + "regexp" + "strconv" + + "github.com/mgechev/revive/lint" +) + +// #region Revive API + +// StringFormatRule lints strings and/or comments according to a set of regular expressions given as Arguments +type StringFormatRule struct{} + +// Apply applies the rule to the given file. +func (r *StringFormatRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintStringFormatRule{onFailure: onFailure} + w.parseArguments(arguments) + ast.Walk(w, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *StringFormatRule) Name() string { + return "string-format" +} + +// ParseArgumentsTest is a public wrapper around w.parseArguments used for testing. Returns the error message provided to panic, or nil if no error was encountered +func (r *StringFormatRule) ParseArgumentsTest(arguments lint.Arguments) *string { + w := lintStringFormatRule{} + c := make(chan interface{}) + // Parse the arguments in a goroutine, defer a recover() call, return the error encountered (or nil if there was no error) + go func() { + defer func() { + err := recover() + c <- err + }() + w.parseArguments(arguments) + }() + err := <-c + if err != nil { + e := fmt.Sprintf("%s", err) + return &e + } + return nil +} + +// #endregion + +// #region Internal structure + +type lintStringFormatRule struct { + onFailure func(lint.Failure) + + rules []stringFormatSubrule + stringDeclarations map[string]string +} + +type stringFormatSubrule struct { + parent *lintStringFormatRule + scope stringFormatSubruleScope + regexp *regexp.Regexp + errorMessage string +} + +type stringFormatSubruleScope struct { + funcName string // Function name the rule is scoped to + argument int // (optional) Which argument in calls to the function is checked against the rule (the first argument is checked by default) + field string // (optional) If the argument to be checked is a struct, which member of the struct is checked against the rule (top level members only) +} + +// Regex inserted to match valid function/struct field identifiers +const identRegex = "[_A-Za-z][_A-Za-z0-9]*" + +var parseStringFormatScope = regexp.MustCompile( + fmt.Sprintf("^(%s(?:\\.%s)?)(?:\\[([0-9]+)\\](?:\\.(%s))?)?$", identRegex, identRegex, identRegex)) + +// #endregion + +// #region Argument parsing + +func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) { + for i, argument := range arguments { + scope, regex, errorMessage := w.parseArgument(argument, i) + w.rules = append(w.rules, stringFormatSubrule{ + parent: w, + scope: scope, + regexp: regex, + errorMessage: errorMessage, + }) + } +} + +func (w lintStringFormatRule) parseArgument(argument interface{}, ruleNum int) (scope stringFormatSubruleScope, regex *regexp.Regexp, errorMessage string) { + g, ok := argument.([]interface{}) // Cast to generic slice first + if !ok { + w.configError("argument is not a slice", ruleNum, 0) + } + if len(g) < 2 { + w.configError("less than two slices found in argument, scope and regex are required", ruleNum, len(g)-1) + } + rule := make([]string, len(g)) + for i, obj := range g { + val, ok := obj.(string) + if !ok { + w.configError("unexpected value, string was expected", ruleNum, i) + } + rule[i] = val + } + + // Validate scope and regex length + if len(rule[0]) == 0 { + w.configError("empty scope provided", ruleNum, 0) + } else if len(rule[1]) < 2 { + w.configError("regex is too small (regexes should begin and end with '/')", ruleNum, 1) + } + + // Parse rule scope + scope = stringFormatSubruleScope{} + matches := parseStringFormatScope.FindStringSubmatch(rule[0]) + if matches == nil { + // The rule's scope didn't match the parsing regex at all, probably a configuration error + w.parseError("unable to parse rule scope", ruleNum, 0) + } else if len(matches) != 4 { + // The rule's scope matched the parsing regex, but an unexpected number of submatches was returned, probably a bug + w.parseError(fmt.Sprintf("unexpected number of submatches when parsing scope: %d, expected 4", len(matches)), ruleNum, 0) + } + scope.funcName = matches[1] + if len(matches[2]) > 0 { + var err error + scope.argument, err = strconv.Atoi(matches[2]) + if err != nil { + w.parseError("unable to parse argument number in rule scope", ruleNum, 0) + } + } + if len(matches[3]) > 0 { + scope.field = matches[3] + } + + // Strip / characters from the beginning and end of rule[1] before compiling + regex, err := regexp.Compile(rule[1][1 : len(rule[1])-1]) + if err != nil { + w.parseError(fmt.Sprintf("unable to compile %s as regexp", rule[1]), ruleNum, 1) + } + + // Use custom error message if provided + if len(rule) == 3 { + errorMessage = rule[2] + } + return scope, regex, errorMessage +} + +// Report an invalid config, this is specifically the user's fault +func (w lintStringFormatRule) configError(msg string, ruleNum, option int) { + panic(fmt.Sprintf("invalid configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option)) +} + +// Report a general config parsing failure, this may be the user's fault, but it isn't known for certain +func (w lintStringFormatRule) parseError(msg string, ruleNum, option int) { + panic(fmt.Sprintf("failed to parse configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option)) +} + +// #endregion + +// #region Node traversal + +func (w lintStringFormatRule) Visit(node ast.Node) ast.Visitor { + // First, check if node is a call expression + call, ok := node.(*ast.CallExpr) + if !ok { + return w + } + + // Get the name of the call expression to check against rule scope + callName, ok := w.getCallName(call) + if !ok { + return w + } + + for _, rule := range w.rules { + if rule.scope.funcName == callName { + rule.Apply(call) + } + } + + return w +} + +// Return the name of a call expression in the form of package.Func or Func +func (w lintStringFormatRule) getCallName(call *ast.CallExpr) (callName string, ok bool) { + if ident, ok := call.Fun.(*ast.Ident); ok { + // Local function call + return ident.Name, true + } + + if selector, ok := call.Fun.(*ast.SelectorExpr); ok { + // Scoped function call + scope, ok := selector.X.(*ast.Ident) + if !ok { + return "", false + } + return scope.Name + "." + selector.Sel.Name, true + } + + return "", false +} + +// #endregion + +// #region Linting logic + +// Apply a single format rule to a call expression (should be done after verifying the that the call expression matches the rule's scope) +func (rule stringFormatSubrule) Apply(call *ast.CallExpr) { + if len(call.Args) <= rule.scope.argument { + return + } + + arg := call.Args[rule.scope.argument] + var lit *ast.BasicLit + if len(rule.scope.field) > 0 { + // Try finding the scope's Field, treating arg as a composite literal + composite, ok := arg.(*ast.CompositeLit) + if !ok { + return + } + for _, el := range composite.Elts { + kv, ok := el.(*ast.KeyValueExpr) + if !ok { + continue + } + key, ok := kv.Key.(*ast.Ident) + if !ok || key.Name != rule.scope.field { + continue + } + + // We're now dealing with the exact field in the rule's scope, so if anything fails, we can safely return instead of continuing the loop + lit, ok = kv.Value.(*ast.BasicLit) + if !ok || lit.Kind != token.STRING { + return + } + } + } else { + var ok bool + // Treat arg as a string literal + lit, ok = arg.(*ast.BasicLit) + if !ok || lit.Kind != token.STRING { + return + } + } + // Unquote the string literal before linting + unquoted := lit.Value[1 : len(lit.Value)-1] + rule.lintMessage(unquoted, lit) +} + +func (rule stringFormatSubrule) lintMessage(s string, node ast.Node) { + // Fail if the string doesn't match the user's regex + if rule.regexp.MatchString(s) { + return + } + var failure string + if len(rule.errorMessage) > 0 { + failure = rule.errorMessage + } else { + failure = fmt.Sprintf("string literal doesn't match user defined regex /%s/", rule.regexp.String()) + } + rule.parent.onFailure(lint.Failure{ + Confidence: 1, + Failure: failure, + Node: node}) +} + +// #endregion diff --git a/vendor/github.com/mgechev/revive/rule/string-of-int.go b/vendor/github.com/mgechev/revive/rule/string-of-int.go new file mode 100644 index 000000000..38f453a4a --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/string-of-int.go @@ -0,0 +1,95 @@ +package rule + +import ( + "go/ast" + "go/types" + + "github.com/mgechev/revive/lint" +) + +// StringOfIntRule warns when logic expressions contains Boolean literals. +type StringOfIntRule struct{} + +// Apply applies the rule to given file. +func (r *StringOfIntRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + astFile := file.AST + file.Pkg.TypeCheck() + + w := &lintStringInt{file, onFailure} + ast.Walk(w, astFile) + + return failures +} + +// Name returns the rule name. +func (r *StringOfIntRule) Name() string { + return "string-of-int" +} + +type lintStringInt struct { + file *lint.File + onFailure func(lint.Failure) +} + +func (w *lintStringInt) Visit(node ast.Node) ast.Visitor { + ce, ok := node.(*ast.CallExpr) + if !ok { + return w + } + + if !w.isCallStringCast(ce.Fun) { + return w + } + + if !w.isIntExpression(ce.Args) { + return w + } + + w.onFailure(lint.Failure{ + Confidence: 1, + Node: ce, + Failure: "dubious convertion of an integer into a string, use strconv.Itoa", + }) + + return w +} + +func (w *lintStringInt) isCallStringCast(e ast.Expr) bool { + t := w.file.Pkg.TypeOf(e) + if t == nil { + return false + } + + tb, _ := t.Underlying().(*types.Basic) + + return tb != nil && tb.Kind() == types.String +} + +func (w *lintStringInt) isIntExpression(es []ast.Expr) bool { + if len(es) != 1 { + return false + } + + t := w.file.Pkg.TypeOf(es[0]) + if t == nil { + return false + } + + ut, _ := t.Underlying().(*types.Basic) + if ut == nil || ut.Info()&types.IsInteger == 0 { + return false + } + + switch ut.Kind() { + case types.Byte, types.Rune, types.UntypedRune: + return false + } + + return true +} diff --git a/vendor/github.com/mgechev/revive/rule/struct-tag.go b/vendor/github.com/mgechev/revive/rule/struct-tag.go new file mode 100644 index 000000000..cb3818e92 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/struct-tag.go @@ -0,0 +1,236 @@ +package rule + +import ( + "fmt" + "go/ast" + "strconv" + "strings" + + "github.com/fatih/structtag" + "github.com/mgechev/revive/lint" +) + +// StructTagRule lints struct tags. +type StructTagRule struct{} + +// Apply applies the rule to given file. +func (r *StructTagRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintStructTagRule{onFailure: onFailure} + + ast.Walk(w, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *StructTagRule) Name() string { + return "struct-tag" +} + +type lintStructTagRule struct { + onFailure func(lint.Failure) + usedTagNbr map[string]bool // list of used tag numbers +} + +func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.StructType: + if n.Fields == nil || n.Fields.NumFields() < 1 { + return nil // skip empty structs + } + w.usedTagNbr = map[string]bool{} // init + for _, f := range n.Fields.List { + if f.Tag != nil { + w.checkTaggedField(f) + } + } + } + + return w + +} + +// checkTaggedField checks the tag of the given field. +// precondition: the field has a tag +func (w lintStructTagRule) checkTaggedField(f *ast.Field) { + if len(f.Names) > 0 && !f.Names[0].IsExported() { + w.addFailure(f, "tag on not-exported field "+f.Names[0].Name) + } + + tags, err := structtag.Parse(strings.Trim(f.Tag.Value, "`")) + if err != nil || tags == nil { + w.addFailure(f.Tag, "malformed tag") + return + } + + for _, tag := range tags.Tags() { + switch key := tag.Key; key { + case "asn1": + msg, ok := w.checkASN1Tag(f.Type, tag) + if !ok { + w.addFailure(f.Tag, msg) + } + case "bson": + msg, ok := w.checkBSONTag(tag.Options) + if !ok { + w.addFailure(f.Tag, msg) + } + case "default": + if !w.typeValueMatch(f.Type, tag.Name) { + w.addFailure(f.Tag, "field's type and default value's type mismatch") + } + case "json": + msg, ok := w.checkJSONTag(tag.Name, tag.Options) + if !ok { + w.addFailure(f.Tag, msg) + } + case "protobuf": + // Not implemented yet + case "required": + if tag.Name != "true" && tag.Name != "false" { + w.addFailure(f.Tag, "required should be 'true' or 'false'") + } + case "xml": + msg, ok := w.checkXMLTag(tag.Options) + if !ok { + w.addFailure(f.Tag, msg) + } + case "yaml": + msg, ok := w.checkYAMLTag(tag.Options) + if !ok { + w.addFailure(f.Tag, msg) + } + default: + // unknown key + } + } +} + +func (w lintStructTagRule) checkASN1Tag(t ast.Expr, tag *structtag.Tag) (string, bool) { + checkList := append(tag.Options, tag.Name) + for _, opt := range checkList { + switch opt { + case "application", "explicit", "generalized", "ia5", "omitempty", "optional", "set", "utf8": + + default: + if strings.HasPrefix(opt, "tag:") { + parts := strings.Split(opt, ":") + tagNumber := parts[1] + if w.usedTagNbr[tagNumber] { + return fmt.Sprintf("duplicated tag number %s", tagNumber), false + } + w.usedTagNbr[tagNumber] = true + + continue + } + + if strings.HasPrefix(opt, "default:") { + parts := strings.Split(opt, ":") + if len(parts) < 2 { + return "malformed default for ASN1 tag", false + } + if !w.typeValueMatch(t, parts[1]) { + return "field's type and default value's type mismatch", false + } + + continue + } + + return fmt.Sprintf("unknown option '%s' in ASN1 tag", opt), false + } + } + + return "", true +} + +func (w lintStructTagRule) checkBSONTag(options []string) (string, bool) { + for _, opt := range options { + switch opt { + case "inline", "minsize", "omitempty": + default: + return fmt.Sprintf("unknown option '%s' in BSON tag", opt), false + } + } + + return "", true +} + +func (w lintStructTagRule) checkJSONTag(name string, options []string) (string, bool) { + for _, opt := range options { + switch opt { + case "omitempty", "string": + case "": + // special case for JSON key "-" + if name != "-" { + return "option can not be empty in JSON tag", false + } + default: + return fmt.Sprintf("unknown option '%s' in JSON tag", opt), false + } + } + + return "", true +} + +func (w lintStructTagRule) checkXMLTag(options []string) (string, bool) { + for _, opt := range options { + switch opt { + case "any", "attr", "cdata", "chardata", "comment", "innerxml", "omitempty", "typeattr": + default: + return fmt.Sprintf("unknown option '%s' in XML tag", opt), false + } + } + + return "", true +} + +func (w lintStructTagRule) checkYAMLTag(options []string) (string, bool) { + for _, opt := range options { + switch opt { + case "flow", "inline", "omitempty": + default: + return fmt.Sprintf("unknown option '%s' in YAML tag", opt), false + } + } + + return "", true +} + +func (w lintStructTagRule) typeValueMatch(t ast.Expr, val string) bool { + tID, ok := t.(*ast.Ident) + if !ok { + return true + } + + typeMatches := true + switch tID.Name { + case "bool": + typeMatches = val == "true" || val == "false" + case "float64": + _, err := strconv.ParseFloat(val, 64) + typeMatches = err == nil + case "int": + _, err := strconv.ParseInt(val, 10, 64) + typeMatches = err == nil + case "string": + case "nil": + default: + // unchecked type + } + + return typeMatches +} + +func (w lintStructTagRule) addFailure(n ast.Node, msg string) { + w.onFailure(lint.Failure{ + Node: n, + Failure: msg, + Confidence: 1, + }) +} diff --git a/vendor/github.com/mgechev/revive/rule/superfluous-else.go b/vendor/github.com/mgechev/revive/rule/superfluous-else.go new file mode 100644 index 000000000..b9ce39606 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/superfluous-else.go @@ -0,0 +1,114 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// SuperfluousElseRule lints given else constructs. +type SuperfluousElseRule struct{} + +// Apply applies the rule to given file. +func (r *SuperfluousElseRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + var branchingFunctions = map[string]map[string]bool{ + "os": {"Exit": true}, + "log": { + "Fatal": true, + "Fatalf": true, + "Fatalln": true, + "Panic": true, + "Panicf": true, + "Panicln": true, + }, + } + + w := lintSuperfluousElse{make(map[*ast.IfStmt]bool), onFailure, branchingFunctions} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *SuperfluousElseRule) Name() string { + return "superfluous-else" +} + +type lintSuperfluousElse struct { + ignore map[*ast.IfStmt]bool + onFailure func(lint.Failure) + branchingFunctions map[string]map[string]bool +} + +func (w lintSuperfluousElse) Visit(node ast.Node) ast.Visitor { + ifStmt, ok := node.(*ast.IfStmt) + if !ok || ifStmt.Else == nil { + return w + } + if w.ignore[ifStmt] { + if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok { + w.ignore[elseif] = true + } + return w + } + if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok { + w.ignore[elseif] = true + return w + } + if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok { + // only care about elses without conditions + return w + } + if len(ifStmt.Body.List) == 0 { + return w + } + shortDecl := false // does the if statement have a ":=" initialization statement? + if ifStmt.Init != nil { + if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE { + shortDecl = true + } + } + extra := "" + if shortDecl { + extra = " (move short variable declaration to its own line if necessary)" + } + + lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1] + switch stmt := lastStmt.(type) { + case *ast.BranchStmt: + token := stmt.Tok.String() + if token != "fallthrough" { + w.onFailure(newFailure(ifStmt.Else, "if block ends with a "+token+" statement, so drop this else and outdent its block"+extra)) + } + case *ast.ExprStmt: + if ce, ok := stmt.X.(*ast.CallExpr); ok { // it's a function call + if fc, ok := ce.Fun.(*ast.SelectorExpr); ok { + if id, ok := fc.X.(*ast.Ident); ok { + fn := fc.Sel.Name + pkg := id.Name + if w.branchingFunctions[pkg][fn] { // it's a call to a branching function + w.onFailure( + newFailure(ifStmt.Else, fmt.Sprintf("if block ends with call to %s.%s function, so drop this else and outdent its block%s", pkg, fn, extra))) + } + } + } + } + } + + return w +} + +func newFailure(node ast.Node, msg string) lint.Failure { + return lint.Failure{ + Confidence: 1, + Node: node, + Category: "indent", + Failure: msg, + } +} diff --git a/vendor/github.com/mgechev/revive/rule/time-naming.go b/vendor/github.com/mgechev/revive/rule/time-naming.go new file mode 100644 index 000000000..a93f4b5ae --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/time-naming.go @@ -0,0 +1,93 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/types" + "strings" + + "github.com/mgechev/revive/lint" +) + +// TimeNamingRule lints given else constructs. +type TimeNamingRule struct{} + +// Apply applies the rule to given file. +func (r *TimeNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := &lintTimeNames{file, onFailure} + + file.Pkg.TypeCheck() + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *TimeNamingRule) Name() string { + return "time-naming" +} + +type lintTimeNames struct { + file *lint.File + onFailure func(lint.Failure) +} + +func (w *lintTimeNames) Visit(node ast.Node) ast.Visitor { + v, ok := node.(*ast.ValueSpec) + if !ok { + return w + } + for _, name := range v.Names { + origTyp := w.file.Pkg.TypeOf(name) + // Look for time.Duration or *time.Duration; + // the latter is common when using flag.Duration. + typ := origTyp + if pt, ok := typ.(*types.Pointer); ok { + typ = pt.Elem() + } + if !isNamedType(typ, "time", "Duration") { + continue + } + suffix := "" + for _, suf := range timeSuffixes { + if strings.HasSuffix(name.Name, suf) { + suffix = suf + break + } + } + if suffix == "" { + continue + } + w.onFailure(lint.Failure{ + Category: "time", + Confidence: 0.9, + Node: v, + Failure: fmt.Sprintf("var %s is of type %v; don't use unit-specific suffix %q", name.Name, origTyp, suffix), + }) + } + return w +} + +// timeSuffixes is a list of name suffixes that imply a time unit. +// This is not an exhaustive list. +var timeSuffixes = []string{ + "Sec", "Secs", "Seconds", + "Msec", "Msecs", + "Milli", "Millis", "Milliseconds", + "Usec", "Usecs", "Microseconds", + "MS", "Ms", +} + +func isNamedType(typ types.Type, importPath, name string) bool { + n, ok := typ.(*types.Named) + if !ok { + return false + } + tn := n.Obj() + return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name +} diff --git a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go new file mode 100644 index 000000000..d4da01574 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go @@ -0,0 +1,183 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// UnconditionalRecursionRule lints given else constructs. +type UnconditionalRecursionRule struct{} + +// Apply applies the rule to given file. +func (r *UnconditionalRecursionRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintUnconditionalRecursionRule{onFailure: onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *UnconditionalRecursionRule) Name() string { + return "unconditional-recursion" +} + +type funcDesc struct { + reciverID *ast.Ident + id *ast.Ident +} + +func (fd *funcDesc) equal(other *funcDesc) bool { + receiversAreEqual := (fd.reciverID == nil && other.reciverID == nil) || fd.reciverID != nil && other.reciverID != nil && fd.reciverID.Name == other.reciverID.Name + idsAreEqual := (fd.id == nil && other.id == nil) || fd.id.Name == other.id.Name + + return receiversAreEqual && idsAreEqual +} + +type funcStatus struct { + funcDesc *funcDesc + seenConditionalExit bool +} + +type lintUnconditionalRecursionRule struct { + onFailure func(lint.Failure) + currentFunc *funcStatus +} + +// Visit will traverse the file AST. +// The rule is based in the following algorithm: inside each function body we search for calls to the function itself. +// We do not search inside conditional control structures (if, for, switch, ...) because any recursive call inside them is conditioned +// We do search inside conditional control structures are statements that will take the control out of the function (return, exit, panic) +// If we find conditional control exits, it means the function is NOT unconditionally-recursive +// If we find a recursive call before finding any conditional exit, a failure is generated +// In resume: if we found a recursive call control-dependant from the entry point of the function then we raise a failure. +func (w lintUnconditionalRecursionRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + var rec *ast.Ident + switch { + case n.Recv == nil || n.Recv.NumFields() < 1 || len(n.Recv.List[0].Names) < 1: + rec = nil + default: + rec = n.Recv.List[0].Names[0] + } + + w.currentFunc = &funcStatus{&funcDesc{rec, n.Name}, false} + case *ast.CallExpr: + var funcID *ast.Ident + var selector *ast.Ident + switch c := n.Fun.(type) { + case *ast.Ident: + selector = nil + funcID = c + case *ast.SelectorExpr: + var ok bool + selector, ok = c.X.(*ast.Ident) + if !ok { // a.b....Foo() + return nil + } + funcID = c.Sel + default: + return w + } + + if w.currentFunc != nil && // not in a func body + !w.currentFunc.seenConditionalExit && // there is a conditional exit in the function + w.currentFunc.funcDesc.equal(&funcDesc{selector, funcID}) { + w.onFailure(lint.Failure{ + Category: "logic", + Confidence: 1, + Node: n, + Failure: "unconditional recursive call", + }) + } + case *ast.IfStmt: + w.updateFuncStatus(n.Body) + w.updateFuncStatus(n.Else) + return nil + case *ast.SelectStmt: + w.updateFuncStatus(n.Body) + return nil + case *ast.RangeStmt: + w.updateFuncStatus(n.Body) + return nil + case *ast.TypeSwitchStmt: + w.updateFuncStatus(n.Body) + return nil + case *ast.SwitchStmt: + w.updateFuncStatus(n.Body) + return nil + case *ast.GoStmt: + for _, a := range n.Call.Args { + ast.Walk(w, a) // check if arguments have a recursive call + } + return nil // recursive async call is not an issue + case *ast.ForStmt: + if n.Cond != nil { + return nil + } + // unconditional loop + return w + } + + return w +} + +func (w *lintUnconditionalRecursionRule) updateFuncStatus(node ast.Node) { + if node == nil || w.currentFunc == nil || w.currentFunc.seenConditionalExit { + return + } + + w.currentFunc.seenConditionalExit = w.hasControlExit(node) +} + +var exitFunctions = map[string]map[string]bool{ + "os": {"Exit": true}, + "syscall": {"Exit": true}, + "log": { + "Fatal": true, + "Fatalf": true, + "Fatalln": true, + "Panic": true, + "Panicf": true, + "Panicln": true, + }, +} + +func (w *lintUnconditionalRecursionRule) hasControlExit(node ast.Node) bool { + // isExit returns true if the given node makes control exit the function + isExit := func(node ast.Node) bool { + switch n := node.(type) { + case *ast.ReturnStmt: + return true + case *ast.CallExpr: + if isIdent(n.Fun, "panic") { + return true + } + se, ok := n.Fun.(*ast.SelectorExpr) + if !ok { + return false + } + + id, ok := se.X.(*ast.Ident) + if !ok { + return false + } + + fn := se.Sel.Name + pkg := id.Name + if exitFunctions[pkg] != nil && exitFunctions[pkg][fn] { // it's a call to an exit function + return true + } + } + + return false + } + + return len(pick(node, isExit, nil)) != 0 +} diff --git a/vendor/github.com/mgechev/revive/rule/unexported-naming.go b/vendor/github.com/mgechev/revive/rule/unexported-naming.go new file mode 100644 index 000000000..96cec3e46 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/unexported-naming.go @@ -0,0 +1,115 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// UnexportedNamingRule lints wrongly named unexported symbols. +type UnexportedNamingRule struct{} + +// Apply applies the rule to given file. +func (r *UnexportedNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + ba := &unexportablenamingLinter{onFailure} + ast.Walk(ba, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *UnexportedNamingRule) Name() string { + return "unexported-naming" +} + +type unexportablenamingLinter struct { + onFailure func(lint.Failure) +} + +func (unl unexportablenamingLinter) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + unl.lintFunction(n.Type, n.Body) + return nil + case *ast.FuncLit: + unl.lintFunction(n.Type, n.Body) + + return nil + case *ast.AssignStmt: + if n.Tok != token.DEFINE { + return nil + } + + ids := []*ast.Ident{} + for _, e := range n.Lhs { + id, ok := e.(*ast.Ident) + if !ok { + continue + } + ids = append(ids, id) + } + + unl.lintIDs(ids) + + case *ast.DeclStmt: + gd, ok := n.Decl.(*ast.GenDecl) + if !ok { + return nil + } + + if len(gd.Specs) < 1 { + return nil + } + + vs, ok := gd.Specs[0].(*ast.ValueSpec) + if !ok { + return nil + } + + unl.lintIDs(vs.Names) + } + + return unl +} + +func (unl unexportablenamingLinter) lintFunction(ft *ast.FuncType, body *ast.BlockStmt) { + unl.lintFields(ft.Params) + unl.lintFields(ft.Results) + + if body != nil { + ast.Walk(unl, body) + } +} + +func (unl unexportablenamingLinter) lintFields(fields *ast.FieldList) { + if fields == nil { + return + } + + ids := []*ast.Ident{} + for _, field := range fields.List { + ids = append(ids, field.Names...) + } + + unl.lintIDs(ids) +} + +func (unl unexportablenamingLinter) lintIDs(ids []*ast.Ident) { + for _, id := range ids { + if id.IsExported() { + unl.onFailure(lint.Failure{ + Node: id, + Confidence: 1, + Category: "naming", + Failure: fmt.Sprintf("the symbol %s is local, its name should start with a lowercase letter", id.String()), + }) + } + } +} diff --git a/vendor/github.com/mgechev/revive/rule/unexported-return.go b/vendor/github.com/mgechev/revive/rule/unexported-return.go new file mode 100644 index 000000000..c9c8a41d3 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/unexported-return.go @@ -0,0 +1,106 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/types" + + "github.com/mgechev/revive/lint" +) + +// UnexportedReturnRule lints given else constructs. +type UnexportedReturnRule struct{} + +// Apply applies the rule to given file. +func (r *UnexportedReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := lintUnexportedReturn{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + file.Pkg.TypeCheck() + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *UnexportedReturnRule) Name() string { + return "unexported-return" +} + +type lintUnexportedReturn struct { + file *lint.File + fileAst *ast.File + onFailure func(lint.Failure) +} + +func (w lintUnexportedReturn) Visit(n ast.Node) ast.Visitor { + fn, ok := n.(*ast.FuncDecl) + if !ok { + return w + } + if fn.Type.Results == nil { + return nil + } + if !fn.Name.IsExported() { + return nil + } + thing := "func" + if fn.Recv != nil && len(fn.Recv.List) > 0 { + thing = "method" + if !ast.IsExported(receiverType(fn)) { + // Don't report exported methods of unexported types, + // such as private implementations of sort.Interface. + return nil + } + } + for _, ret := range fn.Type.Results.List { + typ := w.file.Pkg.TypeOf(ret.Type) + if exportedType(typ) { + continue + } + w.onFailure(lint.Failure{ + Category: "unexported-type-in-api", + Node: ret.Type, + Confidence: 0.8, + Failure: fmt.Sprintf("exported %s %s returns unexported type %s, which can be annoying to use", + thing, fn.Name.Name, typ), + }) + break // only flag one + } + return nil +} + +// exportedType reports whether typ is an exported type. +// It is imprecise, and will err on the side of returning true, +// such as for composite types. +func exportedType(typ types.Type) bool { + switch T := typ.(type) { + case *types.Named: + obj := T.Obj() + switch { + // Builtin types have no package. + case obj.Pkg() == nil: + case obj.Exported(): + default: + _, ok := T.Underlying().(*types.Interface) + return ok + } + return true + case *types.Map: + return exportedType(T.Key()) && exportedType(T.Elem()) + case interface { + Elem() types.Type + }: // array, slice, pointer, chan + return exportedType(T.Elem()) + } + // Be conservative about other types, such as struct, interface, etc. + return true +} diff --git a/vendor/github.com/mgechev/revive/rule/unhandled-error.go b/vendor/github.com/mgechev/revive/rule/unhandled-error.go new file mode 100644 index 000000000..0e2f62875 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/unhandled-error.go @@ -0,0 +1,120 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/types" + + "github.com/mgechev/revive/lint" +) + +// UnhandledErrorRule lints given else constructs. +type UnhandledErrorRule struct{} + +type ignoreListType map[string]struct{} + +// Apply applies the rule to given file. +func (r *UnhandledErrorRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { + var failures []lint.Failure + + ignoreList := make(ignoreListType, len(args)) + + for _, arg := range args { + argStr, ok := arg.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg)) + } + + ignoreList[argStr] = struct{}{} + } + + walker := &lintUnhandledErrors{ + ignoreList: ignoreList, + pkg: file.Pkg, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + file.Pkg.TypeCheck() + ast.Walk(walker, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *UnhandledErrorRule) Name() string { + return "unhandled-error" +} + +type lintUnhandledErrors struct { + ignoreList ignoreListType + pkg *lint.Package + onFailure func(lint.Failure) +} + +// Visit looks for statements that are function calls. +// If the called function returns a value of type error a failure will be created. +func (w *lintUnhandledErrors) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.ExprStmt: + fCall, ok := n.X.(*ast.CallExpr) + if !ok { + return nil // not a function call + } + + funcType := w.pkg.TypeOf(fCall) + if funcType == nil { + return nil // skip, type info not available + } + + switch t := funcType.(type) { + case *types.Named: + if !w.isTypeError(t) { + return nil // func call does not return an error + } + + w.addFailure(fCall) + default: + retTypes, ok := funcType.Underlying().(*types.Tuple) + if !ok { + return nil // skip, unable to retrieve return type of the called function + } + + if w.returnsAnError(retTypes) { + w.addFailure(fCall) + } + } + } + return w +} + +func (w *lintUnhandledErrors) addFailure(n *ast.CallExpr) { + funcName := gofmt(n.Fun) + if _, mustIgnore := w.ignoreList[funcName]; mustIgnore { + return + } + + w.onFailure(lint.Failure{ + Category: "bad practice", + Confidence: 1, + Node: n, + Failure: fmt.Sprintf("Unhandled error in call to function %v", funcName), + }) +} + +func (*lintUnhandledErrors) isTypeError(t *types.Named) bool { + const errorTypeName = "_.error" + + return t.Obj().Id() == errorTypeName +} + +func (w *lintUnhandledErrors) returnsAnError(tt *types.Tuple) bool { + for i := 0; i < tt.Len(); i++ { + nt, ok := tt.At(i).Type().(*types.Named) + if ok && w.isTypeError(nt) { + return true + } + } + return false +} diff --git a/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go b/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go new file mode 100644 index 000000000..732d8a8bb --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go @@ -0,0 +1,107 @@ +package rule + +import ( + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// UnnecessaryStmtRule warns on unnecessary statements. +type UnnecessaryStmtRule struct{} + +// Apply applies the rule to given file. +func (r *UnnecessaryStmtRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintUnnecessaryStmtRule{onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *UnnecessaryStmtRule) Name() string { + return "unnecessary-stmt" +} + +type lintUnnecessaryStmtRule struct { + onFailure func(lint.Failure) +} + +func (w lintUnnecessaryStmtRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + if n.Body == nil || n.Type.Results != nil { + return w + } + stmts := n.Body.List + if len(stmts) == 0 { + return w + } + + lastStmt := stmts[len(stmts)-1] + rs, ok := lastStmt.(*ast.ReturnStmt) + if !ok { + return w + } + + if len(rs.Results) == 0 { + w.newFailure(lastStmt, "omit unnecessary return statement") + } + + case *ast.SwitchStmt: + w.checkSwitchBody(n.Body) + case *ast.TypeSwitchStmt: + w.checkSwitchBody(n.Body) + case *ast.CaseClause: + if n.Body == nil { + return w + } + stmts := n.Body + if len(stmts) == 0 { + return w + } + + lastStmt := stmts[len(stmts)-1] + rs, ok := lastStmt.(*ast.BranchStmt) + if !ok { + return w + } + + if rs.Tok == token.BREAK && rs.Label == nil { + w.newFailure(lastStmt, "omit unnecessary break at the end of case clause") + } + } + + return w +} + +func (w lintUnnecessaryStmtRule) checkSwitchBody(b *ast.BlockStmt) { + cases := b.List + if len(cases) != 1 { + return + } + + cc, ok := cases[0].(*ast.CaseClause) + if !ok { + return + } + + if len(cc.List) > 1 { // skip cases with multiple expressions + return + } + + w.newFailure(b, "switch with only one case can be replaced by an if-then") +} + +func (w lintUnnecessaryStmtRule) newFailure(node ast.Node, msg string) { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: node, + Category: "style", + Failure: msg, + }) +} diff --git a/vendor/github.com/mgechev/revive/rule/unreachable-code.go b/vendor/github.com/mgechev/revive/rule/unreachable-code.go new file mode 100644 index 000000000..5472feaa9 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/unreachable-code.go @@ -0,0 +1,114 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// UnreachableCodeRule lints unreachable code. +type UnreachableCodeRule struct{} + +// Apply applies the rule to given file. +func (r *UnreachableCodeRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + var branchingFunctions = map[string]map[string]bool{ + "os": {"Exit": true}, + "log": { + "Fatal": true, + "Fatalf": true, + "Fatalln": true, + "Panic": true, + "Panicf": true, + "Panicln": true, + }, + } + + w := lintUnreachableCode{onFailure, branchingFunctions} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *UnreachableCodeRule) Name() string { + return "unreachable-code" +} + +type lintUnreachableCode struct { + onFailure func(lint.Failure) + branchingFunctions map[string]map[string]bool +} + +func (w lintUnreachableCode) Visit(node ast.Node) ast.Visitor { + blk, ok := node.(*ast.BlockStmt) + if !ok { + return w + } + + if len(blk.List) < 2 { + return w + } +loop: + for i, stmt := range blk.List[:len(blk.List)-1] { + // println("iterating ", len(blk.List)) + next := blk.List[i+1] + if _, ok := next.(*ast.LabeledStmt); ok { + continue // skip if next statement is labeled + } + + switch s := stmt.(type) { + case *ast.ReturnStmt: + w.onFailure(newUnreachableCodeFailure(s)) + break loop + case *ast.BranchStmt: + token := s.Tok.String() + if token != "fallthrough" { + w.onFailure(newUnreachableCodeFailure(s)) + break loop + } + case *ast.ExprStmt: + ce, ok := s.X.(*ast.CallExpr) + if !ok { + continue + } + // it's a function call + fc, ok := ce.Fun.(*ast.SelectorExpr) + if !ok { + continue + } + + id, ok := fc.X.(*ast.Ident) + + if !ok { + continue + } + fn := fc.Sel.Name + pkg := id.Name + if !w.branchingFunctions[pkg][fn] { // it isn't a call to a branching function + continue + } + + if _, ok := next.(*ast.ReturnStmt); ok { // return statement needed to satisfy function signature + continue + } + + w.onFailure(newUnreachableCodeFailure(s)) + break loop + } + } + + return w +} + +func newUnreachableCodeFailure(node ast.Node) lint.Failure { + return lint.Failure{ + Confidence: 1, + Node: node, + Category: "logic", + Failure: "unreachable code after this statement", + } +} diff --git a/vendor/github.com/mgechev/revive/rule/unused-param.go b/vendor/github.com/mgechev/revive/rule/unused-param.go new file mode 100644 index 000000000..60df908d3 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/unused-param.go @@ -0,0 +1,102 @@ +package rule + +import ( + "fmt" + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// UnusedParamRule lints unused params in functions. +type UnusedParamRule struct{} + +// Apply applies the rule to given file. +func (r *UnusedParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintUnusedParamRule{onFailure: onFailure} + + ast.Walk(w, file.AST) + + return failures +} + +// Name returns the rule name. +func (r *UnusedParamRule) Name() string { + return "unused-parameter" +} + +type lintUnusedParamRule struct { + onFailure func(lint.Failure) +} + +func (w lintUnusedParamRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + params := retrieveNamedParams(n.Type.Params) + if len(params) < 1 { + return nil // skip, func without parameters + } + + if n.Body == nil { + return nil // skip, is a function prototype + } + + // inspect the func body looking for references to parameters + fselect := func(n ast.Node) bool { + ident, isAnID := n.(*ast.Ident) + + if !isAnID { + return false + } + + _, isAParam := params[ident.Obj] + if isAParam { + params[ident.Obj] = false // mark as used + } + + return false + } + _ = pick(n.Body, fselect, nil) + + for _, p := range n.Type.Params.List { + for _, n := range p.Names { + if params[n.Obj] { + w.onFailure(lint.Failure{ + Confidence: 1, + Node: n, + Category: "bad practice", + Failure: fmt.Sprintf("parameter '%s' seems to be unused, consider removing or renaming it as _", n.Name), + }) + } + } + } + + return nil // full method body already inspected + } + + return w +} + +func retrieveNamedParams(params *ast.FieldList) map[*ast.Object]bool { + result := map[*ast.Object]bool{} + if params.List == nil { + return result + } + + for _, p := range params.List { + for _, n := range p.Names { + if n.Name == "_" { + continue + } + + result[n.Obj] = true + } + } + + return result +} diff --git a/vendor/github.com/mgechev/revive/rule/unused-receiver.go b/vendor/github.com/mgechev/revive/rule/unused-receiver.go new file mode 100644 index 000000000..2289a517e --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/unused-receiver.go @@ -0,0 +1,77 @@ +package rule + +import ( + "fmt" + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// UnusedReceiverRule lints unused params in functions. +type UnusedReceiverRule struct{} + +// Apply applies the rule to given file. +func (*UnusedReceiverRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintUnusedReceiverRule{onFailure: onFailure} + + ast.Walk(w, file.AST) + + return failures +} + +// Name returns the rule name. +func (*UnusedReceiverRule) Name() string { + return "unused-receiver" +} + +type lintUnusedReceiverRule struct { + onFailure func(lint.Failure) +} + +func (w lintUnusedReceiverRule) Visit(node ast.Node) ast.Visitor { + switch n := node.(type) { + case *ast.FuncDecl: + if n.Recv == nil { + return nil // skip this func decl, not a method + } + + rec := n.Recv.List[0] // safe to access only the first (unique) element of the list + if len(rec.Names) < 1 { + return nil // the receiver is anonymous: func (aType) Foo(...) ... + } + + recID := rec.Names[0] + if recID.Name == "_" { + return nil // the receiver is already named _ + } + + // inspect the func body looking for references to the receiver id + fselect := func(n ast.Node) bool { + ident, isAnID := n.(*ast.Ident) + + return isAnID && ident.Obj == recID.Obj + } + refs2recID := pick(n.Body, fselect, nil) + + if len(refs2recID) > 0 { + return nil // the receiver is referenced in the func body + } + + w.onFailure(lint.Failure{ + Confidence: 1, + Node: recID, + Category: "bad practice", + Failure: fmt.Sprintf("method receiver '%s' is not referenced in method's body, consider removing or renaming it as _", recID.Name), + }) + + return nil // full method body already inspected + } + + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/useless-break.go b/vendor/github.com/mgechev/revive/rule/useless-break.go new file mode 100644 index 000000000..a631bd1e1 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/useless-break.go @@ -0,0 +1,82 @@ +package rule + +import ( + "go/ast" + "go/token" + + "github.com/mgechev/revive/lint" +) + +// UselessBreak lint rule. +type UselessBreak struct{} + +// Apply applies the rule to given file. +func (r *UselessBreak) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + astFile := file.AST + w := &lintUselessBreak{onFailure, false} + ast.Walk(w, astFile) + return failures +} + +// Name returns the rule name. +func (r *UselessBreak) Name() string { + return "useless-break" +} + +type lintUselessBreak struct { + onFailure func(lint.Failure) + inLoopBody bool +} + +func (w *lintUselessBreak) Visit(node ast.Node) ast.Visitor { + switch v := node.(type) { + case *ast.ForStmt: + w.inLoopBody = true + ast.Walk(w, v.Body) + w.inLoopBody = false + return nil + case *ast.RangeStmt: + w.inLoopBody = true + ast.Walk(w, v.Body) + w.inLoopBody = false + return nil + case *ast.CommClause: + for _, n := range v.Body { + w.inspectCaseStatement(n) + } + return nil + case *ast.CaseClause: + for _, n := range v.Body { + w.inspectCaseStatement(n) + } + return nil + } + return w +} + +func (w *lintUselessBreak) inspectCaseStatement(n ast.Stmt) { + switch s := n.(type) { + case *ast.BranchStmt: + if s.Tok != token.BREAK { + return // not a break statement + } + if s.Label != nil { + return // labeled break statement, usually affects a nesting loop + } + msg := "useless break in case clause" + if w.inLoopBody { + msg += " (WARN: this break statement affects this switch or select statement and not the loop enclosing it)" + } + w.onFailure(lint.Failure{ + Confidence: 1, + Node: s, + Failure: msg, + }) + } +} diff --git a/vendor/github.com/mgechev/revive/rule/utils.go b/vendor/github.com/mgechev/revive/rule/utils.go new file mode 100644 index 000000000..0d5744846 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/utils.go @@ -0,0 +1,199 @@ +package rule + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "go/types" + "regexp" + "strings" + + "github.com/mgechev/revive/lint" +) + +const styleGuideBase = "https://golang.org/wiki/CodeReviewComments" + +// isBlank returns whether id is the blank identifier "_". +// If id == nil, the answer is false. +func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" } + +func isTest(f *lint.File) bool { + return strings.HasSuffix(f.Name, "_test.go") +} + +var commonMethods = map[string]bool{ + "Error": true, + "Read": true, + "ServeHTTP": true, + "String": true, + "Write": true, + "Unwrap": true, +} + +func receiverType(fn *ast.FuncDecl) string { + switch e := fn.Recv.List[0].Type.(type) { + case *ast.Ident: + return e.Name + case *ast.StarExpr: + if id, ok := e.X.(*ast.Ident); ok { + return id.Name + } + } + // The parser accepts much more than just the legal forms. + return "invalid-type" +} + +var knownNameExceptions = map[string]bool{ + "LastInsertId": true, // must match database/sql + "kWh": true, +} + +func isCgoExported(f *ast.FuncDecl) bool { + if f.Recv != nil || f.Doc == nil { + return false + } + + cgoExport := regexp.MustCompile(fmt.Sprintf("(?m)^//export %s$", regexp.QuoteMeta(f.Name.Name))) + for _, c := range f.Doc.List { + if cgoExport.MatchString(c.Text) { + return true + } + } + return false +} + +var allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`) + +func isIdent(expr ast.Expr, ident string) bool { + id, ok := expr.(*ast.Ident) + return ok && id.Name == ident +} + +var zeroLiteral = map[string]bool{ + "false": true, // bool + // runes + `'\x00'`: true, + `'\000'`: true, + // strings + `""`: true, + "``": true, + // numerics + "0": true, + "0.": true, + "0.0": true, + "0i": true, +} + +func validType(T types.Type) bool { + return T != nil && + T != types.Typ[types.Invalid] && + !strings.Contains(T.String(), "invalid type") // good but not foolproof +} + +func isPkgDot(expr ast.Expr, pkg, name string) bool { + sel, ok := expr.(*ast.SelectorExpr) + return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name) +} + +func srcLine(src []byte, p token.Position) string { + // Run to end of line in both directions if not at line start/end. + lo, hi := p.Offset, p.Offset+1 + for lo > 0 && src[lo-1] != '\n' { + lo-- + } + for hi < len(src) && src[hi-1] != '\n' { + hi++ + } + return string(src[lo:hi]) +} + +// pick yields a list of nodes by picking them from a sub-ast with root node n. +// Nodes are selected by applying the fselect function +// f function is applied to each selected node before inseting it in the final result. +// If f==nil then it defaults to the identity function (ie it returns the node itself) +func pick(n ast.Node, fselect func(n ast.Node) bool, f func(n ast.Node) []ast.Node) []ast.Node { + var result []ast.Node + + if n == nil { + return result + } + + if f == nil { + f = func(n ast.Node) []ast.Node { return []ast.Node{n} } + } + + onSelect := func(n ast.Node) { + result = append(result, f(n)...) + } + p := picker{fselect: fselect, onSelect: onSelect} + ast.Walk(p, n) + return result +} + +func pickFromExpList(l []ast.Expr, fselect func(n ast.Node) bool, f func(n ast.Node) []ast.Node) []ast.Node { + result := make([]ast.Node, 0) + for _, e := range l { + result = append(result, pick(e, fselect, f)...) + } + return result +} + +type picker struct { + fselect func(n ast.Node) bool + onSelect func(n ast.Node) +} + +func (p picker) Visit(node ast.Node) ast.Visitor { + if p.fselect == nil { + return nil + } + + if p.fselect(node) { + p.onSelect(node) + } + + return p +} + +// isBoolOp returns true if the given token corresponds to +// a bool operator +func isBoolOp(t token.Token) bool { + switch t { + case token.LAND, token.LOR, token.EQL, token.NEQ: + return true + } + + return false +} + +const ( + trueName = "true" + falseName = "false" +) + +func isExprABooleanLit(n ast.Node) (lexeme string, ok bool) { + oper, ok := n.(*ast.Ident) + + if !ok { + return "", false + } + + return oper.Name, (oper.Name == trueName || oper.Name == falseName) +} + +// gofmt returns a string representation of an AST subtree. +func gofmt(x interface{}) string { + buf := bytes.Buffer{} + fs := token.NewFileSet() + printer.Fprint(&buf, fs, x) + return buf.String() +} + +// checkNumberOfArguments fails if the given number of arguments is not, at least, the expected one +func checkNumberOfArguments(expected int, args lint.Arguments, ruleName string) { + if len(args) < expected { + panic(fmt.Sprintf("not enough arguments for %s rule, expected %d, got %d. Please check the rule's documentation", ruleName, expected, len(args))) + } +} diff --git a/vendor/github.com/mgechev/revive/rule/var-declarations.go b/vendor/github.com/mgechev/revive/rule/var-declarations.go new file mode 100644 index 000000000..441132115 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/var-declarations.go @@ -0,0 +1,120 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "github.com/mgechev/revive/lint" +) + +// VarDeclarationsRule lints given else constructs. +type VarDeclarationsRule struct{} + +// Apply applies the rule to given file. +func (r *VarDeclarationsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + fileAst := file.AST + walker := &lintVarDeclarations{ + file: file, + fileAst: fileAst, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + file.Pkg.TypeCheck() + ast.Walk(walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *VarDeclarationsRule) Name() string { + return "var-declaration" +} + +type lintVarDeclarations struct { + fileAst *ast.File + file *lint.File + lastGen *ast.GenDecl + onFailure func(lint.Failure) +} + +func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor { + switch v := node.(type) { + case *ast.GenDecl: + if v.Tok != token.CONST && v.Tok != token.VAR { + return nil + } + w.lastGen = v + return w + case *ast.ValueSpec: + if w.lastGen.Tok == token.CONST { + return nil + } + if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 { + return nil + } + rhs := v.Values[0] + // An underscore var appears in a common idiom for compile-time interface satisfaction, + // as in "var _ Interface = (*Concrete)(nil)". + if isIdent(v.Names[0], "_") { + return nil + } + // If the RHS is a zero value, suggest dropping it. + zero := false + if lit, ok := rhs.(*ast.BasicLit); ok { + zero = zeroLiteral[lit.Value] + } else if isIdent(rhs, "nil") { + zero = true + } + if zero { + w.onFailure(lint.Failure{ + Confidence: 0.9, + Node: rhs, + Category: "zero-value", + Failure: fmt.Sprintf("should drop = %s from declaration of var %s; it is the zero value", w.file.Render(rhs), v.Names[0]), + }) + return nil + } + lhsTyp := w.file.Pkg.TypeOf(v.Type) + rhsTyp := w.file.Pkg.TypeOf(rhs) + + if !validType(lhsTyp) || !validType(rhsTyp) { + // Type checking failed (often due to missing imports). + return nil + } + + if !types.Identical(lhsTyp, rhsTyp) { + // Assignment to a different type is not redundant. + return nil + } + + // The next three conditions are for suppressing the warning in situations + // where we were unable to typecheck. + + // If the LHS type is an interface, don't warn, since it is probably a + // concrete type on the RHS. Note that our feeble lexical check here + // will only pick up interface{} and other literal interface types; + // that covers most of the cases we care to exclude right now. + if _, ok := v.Type.(*ast.InterfaceType); ok { + return nil + } + // If the RHS is an untyped const, only warn if the LHS type is its default type. + if defType, ok := w.file.IsUntypedConst(rhs); ok && !isIdent(v.Type, defType) { + return nil + } + + w.onFailure(lint.Failure{ + Category: "type-inference", + Confidence: 0.8, + Node: v.Type, + Failure: fmt.Sprintf("should omit type %s from declaration of var %s; it will be inferred from the right-hand side", w.file.Render(v.Type), v.Names[0]), + }) + return nil + } + return w +} diff --git a/vendor/github.com/mgechev/revive/rule/var-naming.go b/vendor/github.com/mgechev/revive/rule/var-naming.go new file mode 100644 index 000000000..768f65b96 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/var-naming.go @@ -0,0 +1,230 @@ +package rule + +import ( + "fmt" + "go/ast" + "go/token" + "strings" + + "github.com/mgechev/revive/lint" +) + +// VarNamingRule lints given else constructs. +type VarNamingRule struct{} + +// Apply applies the rule to given file. +func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + var failures []lint.Failure + + var whitelist []string + var blacklist []string + + if len(arguments) >= 1 { + whitelist = getList(arguments[0], "whitelist") + } + + if len(arguments) >= 2 { + blacklist = getList(arguments[1], "blacklist") + } + + fileAst := file.AST + walker := lintNames{ + file: file, + fileAst: fileAst, + whitelist: whitelist, + blacklist: blacklist, + onFailure: func(failure lint.Failure) { + failures = append(failures, failure) + }, + } + + // Package names need slightly different handling than other names. + if strings.Contains(walker.fileAst.Name.Name, "_") && !strings.HasSuffix(walker.fileAst.Name.Name, "_test") { + walker.onFailure(lint.Failure{ + Failure: "don't use an underscore in package name", + Confidence: 1, + Node: walker.fileAst, + Category: "naming", + }) + } + + ast.Walk(&walker, fileAst) + + return failures +} + +// Name returns the rule name. +func (r *VarNamingRule) Name() string { + return "var-naming" +} + +func checkList(fl *ast.FieldList, thing string, w *lintNames) { + if fl == nil { + return + } + for _, f := range fl.List { + for _, id := range f.Names { + check(id, thing, w) + } + } +} + +func check(id *ast.Ident, thing string, w *lintNames) { + if id.Name == "_" { + return + } + if knownNameExceptions[id.Name] { + return + } + + // Handle two common styles from other languages that don't belong in Go. + if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") { + w.onFailure(lint.Failure{ + Failure: "don't use ALL_CAPS in Go names; use CamelCase", + Confidence: 0.8, + Node: id, + Category: "naming", + }) + return + } + if len(id.Name) > 2 && id.Name[0] == 'k' && id.Name[1] >= 'A' && id.Name[1] <= 'Z' { + should := string(id.Name[1]+'a'-'A') + id.Name[2:] + w.onFailure(lint.Failure{ + Failure: fmt.Sprintf("don't use leading k in Go names; %s %s should be %s", thing, id.Name, should), + Confidence: 0.8, + Node: id, + Category: "naming", + }) + } + + should := lint.Name(id.Name, w.whitelist, w.blacklist) + if id.Name == should { + return + } + + if len(id.Name) > 2 && strings.Contains(id.Name[1:], "_") { + w.onFailure(lint.Failure{ + Failure: fmt.Sprintf("don't use underscores in Go names; %s %s should be %s", thing, id.Name, should), + Confidence: 0.9, + Node: id, + Category: "naming", + }) + return + } + w.onFailure(lint.Failure{ + Failure: fmt.Sprintf("%s %s should be %s", thing, id.Name, should), + Confidence: 0.8, + Node: id, + Category: "naming", + }) +} + +type lintNames struct { + file *lint.File + fileAst *ast.File + lastGen *ast.GenDecl + genDeclMissingComments map[*ast.GenDecl]bool + onFailure func(lint.Failure) + whitelist []string + blacklist []string +} + +func (w *lintNames) Visit(n ast.Node) ast.Visitor { + switch v := n.(type) { + case *ast.AssignStmt: + if v.Tok == token.ASSIGN { + return w + } + for _, exp := range v.Lhs { + if id, ok := exp.(*ast.Ident); ok { + check(id, "var", w) + } + } + case *ast.FuncDecl: + if w.file.IsTest() && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) { + return w + } + + thing := "func" + if v.Recv != nil { + thing = "method" + } + + // Exclude naming warnings for functions that are exported to C but + // not exported in the Go API. + // See https://github.com/golang/lint/issues/144. + if ast.IsExported(v.Name.Name) || !isCgoExported(v) { + check(v.Name, thing, w) + } + + checkList(v.Type.Params, thing+" parameter", w) + checkList(v.Type.Results, thing+" result", w) + case *ast.GenDecl: + if v.Tok == token.IMPORT { + return w + } + var thing string + switch v.Tok { + case token.CONST: + thing = "const" + case token.TYPE: + thing = "type" + case token.VAR: + thing = "var" + } + for _, spec := range v.Specs { + switch s := spec.(type) { + case *ast.TypeSpec: + check(s.Name, thing, w) + case *ast.ValueSpec: + for _, id := range s.Names { + check(id, thing, w) + } + } + } + case *ast.InterfaceType: + // Do not check interface method names. + // They are often constrainted by the method names of concrete types. + for _, x := range v.Methods.List { + ft, ok := x.Type.(*ast.FuncType) + if !ok { // might be an embedded interface name + continue + } + checkList(ft.Params, "interface method parameter", w) + checkList(ft.Results, "interface method result", w) + } + case *ast.RangeStmt: + if v.Tok == token.ASSIGN { + return w + } + if id, ok := v.Key.(*ast.Ident); ok { + check(id, "range var", w) + } + if id, ok := v.Value.(*ast.Ident); ok { + check(id, "range var", w) + } + case *ast.StructType: + for _, f := range v.Fields.List { + for _, id := range f.Names { + check(id, "struct field", w) + } + } + } + return w +} + +func getList(arg interface{}, argName string) []string { + temp, ok := arg.([]interface{}) + if !ok { + panic(fmt.Sprintf("Invalid argument to the var-naming rule. Expecting a %s of type slice with initialisms, got %T", argName, arg)) + } + var list []string + for _, v := range temp { + if val, ok := v.(string); ok { + list = append(list, val) + } else { + panic(fmt.Sprintf("Invalid %s values of the var-naming rule. Expecting slice of strings but got element of type %T", val, arg)) + } + } + return list +} diff --git a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go b/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go new file mode 100644 index 000000000..b86929136 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go @@ -0,0 +1,66 @@ +package rule + +import ( + "go/ast" + + "github.com/mgechev/revive/lint" +) + +// WaitGroupByValueRule lints sync.WaitGroup passed by copy in functions. +type WaitGroupByValueRule struct{} + +// Apply applies the rule to given file. +func (r *WaitGroupByValueRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { + var failures []lint.Failure + + onFailure := func(failure lint.Failure) { + failures = append(failures, failure) + } + + w := lintWaitGroupByValueRule{onFailure: onFailure} + ast.Walk(w, file.AST) + return failures +} + +// Name returns the rule name. +func (r *WaitGroupByValueRule) Name() string { + return "waitgroup-by-value" +} + +type lintWaitGroupByValueRule struct { + onFailure func(lint.Failure) +} + +func (w lintWaitGroupByValueRule) Visit(node ast.Node) ast.Visitor { + // look for function declarations + fd, ok := node.(*ast.FuncDecl) + if !ok { + return w + } + + // Check all function's parameters + for _, field := range fd.Type.Params.List { + if !w.isWaitGroup(field.Type) { + continue + } + + w.onFailure(lint.Failure{ + Confidence: 1, + Node: field, + Failure: "sync.WaitGroup passed by value, the function will get a copy of the original one", + }) + } + + return nil +} + +func (lintWaitGroupByValueRule) isWaitGroup(ft ast.Expr) bool { + se, ok := ft.(*ast.SelectorExpr) + if !ok { + return false + } + + x, _ := se.X.(*ast.Ident) + sel := se.Sel.Name + return x.Name == "sync" && sel == "WaitGroup" +} diff --git a/vendor/github.com/mitchellh/go-homedir/LICENSE b/vendor/github.com/mitchellh/go-homedir/LICENSE new file mode 100644 index 000000000..f9c841a51 --- /dev/null +++ b/vendor/github.com/mitchellh/go-homedir/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 Mitchell Hashimoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/mitchellh/go-homedir/README.md b/vendor/github.com/mitchellh/go-homedir/README.md new file mode 100644 index 000000000..d70706d5b --- /dev/null +++ b/vendor/github.com/mitchellh/go-homedir/README.md @@ -0,0 +1,14 @@ +# go-homedir + +This is a Go library for detecting the user's home directory without +the use of cgo, so the library can be used in cross-compilation environments. + +Usage is incredibly simple, just call `homedir.Dir()` to get the home directory +for a user, and `homedir.Expand()` to expand the `~` in a path to the home +directory. + +**Why not just use `os/user`?** The built-in `os/user` package requires +cgo on Darwin systems. This means that any Go code that uses that package +cannot cross compile. But 99% of the time the use for `os/user` is just to +retrieve the home directory, which we can do for the current user without +cgo. This library does that, enabling cross-compilation. diff --git a/vendor/github.com/mitchellh/go-homedir/go.mod b/vendor/github.com/mitchellh/go-homedir/go.mod new file mode 100644 index 000000000..7efa09a04 --- /dev/null +++ b/vendor/github.com/mitchellh/go-homedir/go.mod @@ -0,0 +1 @@ +module github.com/mitchellh/go-homedir diff --git a/vendor/github.com/mitchellh/go-homedir/homedir.go b/vendor/github.com/mitchellh/go-homedir/homedir.go new file mode 100644 index 000000000..25378537e --- /dev/null +++ b/vendor/github.com/mitchellh/go-homedir/homedir.go @@ -0,0 +1,167 @@ +package homedir + +import ( + "bytes" + "errors" + "os" + "os/exec" + "path/filepath" + "runtime" + "strconv" + "strings" + "sync" +) + +// DisableCache will disable caching of the home directory. Caching is enabled +// by default. +var DisableCache bool + +var homedirCache string +var cacheLock sync.RWMutex + +// Dir returns the home directory for the executing user. +// +// This uses an OS-specific method for discovering the home directory. +// An error is returned if a home directory cannot be detected. +func Dir() (string, error) { + if !DisableCache { + cacheLock.RLock() + cached := homedirCache + cacheLock.RUnlock() + if cached != "" { + return cached, nil + } + } + + cacheLock.Lock() + defer cacheLock.Unlock() + + var result string + var err error + if runtime.GOOS == "windows" { + result, err = dirWindows() + } else { + // Unix-like system, so just assume Unix + result, err = dirUnix() + } + + if err != nil { + return "", err + } + homedirCache = result + return result, nil +} + +// Expand expands the path to include the home directory if the path +// is prefixed with `~`. If it isn't prefixed with `~`, the path is +// returned as-is. +func Expand(path string) (string, error) { + if len(path) == 0 { + return path, nil + } + + if path[0] != '~' { + return path, nil + } + + if len(path) > 1 && path[1] != '/' && path[1] != '\\' { + return "", errors.New("cannot expand user-specific home dir") + } + + dir, err := Dir() + if err != nil { + return "", err + } + + return filepath.Join(dir, path[1:]), nil +} + +// Reset clears the cache, forcing the next call to Dir to re-detect +// the home directory. This generally never has to be called, but can be +// useful in tests if you're modifying the home directory via the HOME +// env var or something. +func Reset() { + cacheLock.Lock() + defer cacheLock.Unlock() + homedirCache = "" +} + +func dirUnix() (string, error) { + homeEnv := "HOME" + if runtime.GOOS == "plan9" { + // On plan9, env vars are lowercase. + homeEnv = "home" + } + + // First prefer the HOME environmental variable + if home := os.Getenv(homeEnv); home != "" { + return home, nil + } + + var stdout bytes.Buffer + + // If that fails, try OS specific commands + if runtime.GOOS == "darwin" { + cmd := exec.Command("sh", "-c", `dscl -q . -read /Users/"$(whoami)" NFSHomeDirectory | sed 's/^[^ ]*: //'`) + cmd.Stdout = &stdout + if err := cmd.Run(); err == nil { + result := strings.TrimSpace(stdout.String()) + if result != "" { + return result, nil + } + } + } else { + cmd := exec.Command("getent", "passwd", strconv.Itoa(os.Getuid())) + cmd.Stdout = &stdout + if err := cmd.Run(); err != nil { + // If the error is ErrNotFound, we ignore it. Otherwise, return it. + if err != exec.ErrNotFound { + return "", err + } + } else { + if passwd := strings.TrimSpace(stdout.String()); passwd != "" { + // username:password:uid:gid:gecos:home:shell + passwdParts := strings.SplitN(passwd, ":", 7) + if len(passwdParts) > 5 { + return passwdParts[5], nil + } + } + } + } + + // If all else fails, try the shell + stdout.Reset() + cmd := exec.Command("sh", "-c", "cd && pwd") + cmd.Stdout = &stdout + if err := cmd.Run(); err != nil { + return "", err + } + + result := strings.TrimSpace(stdout.String()) + if result == "" { + return "", errors.New("blank output when reading home directory") + } + + return result, nil +} + +func dirWindows() (string, error) { + // First prefer the HOME environmental variable + if home := os.Getenv("HOME"); home != "" { + return home, nil + } + + // Prefer standard environment variable USERPROFILE + if home := os.Getenv("USERPROFILE"); home != "" { + return home, nil + } + + drive := os.Getenv("HOMEDRIVE") + path := os.Getenv("HOMEPATH") + home := drive + path + if drive == "" || path == "" { + return "", errors.New("HOMEDRIVE, HOMEPATH, or USERPROFILE are blank") + } + + return home, nil +} diff --git a/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md new file mode 100644 index 000000000..1955f2878 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/CHANGELOG.md @@ -0,0 +1,73 @@ +## unreleased + +* Fix regression where `*time.Time` value would be set to empty and not be sent + to decode hooks properly [GH-232] + +## 1.4.0 + +* A new decode hook type `DecodeHookFuncValue` has been added that has + access to the full values. [GH-183] +* Squash is now supported with embedded fields that are struct pointers [GH-205] +* Empty strings will convert to 0 for all numeric types when weakly decoding [GH-206] + +## 1.3.3 + +* Decoding maps from maps creates a settable value for decode hooks [GH-203] + +## 1.3.2 + +* Decode into interface type with a struct value is supported [GH-187] + +## 1.3.1 + +* Squash should only squash embedded structs. [GH-194] + +## 1.3.0 + +* Added `",omitempty"` support. This will ignore zero values in the source + structure when encoding. [GH-145] + +## 1.2.3 + +* Fix duplicate entries in Keys list with pointer values. [GH-185] + +## 1.2.2 + +* Do not add unsettable (unexported) values to the unused metadata key + or "remain" value. [GH-150] + +## 1.2.1 + +* Go modules checksum mismatch fix + +## 1.2.0 + +* Added support to capture unused values in a field using the `",remain"` value + in the mapstructure tag. There is an example to showcase usage. +* Added `DecoderConfig` option to always squash embedded structs +* `json.Number` can decode into `uint` types +* Empty slices are preserved and not replaced with nil slices +* Fix panic that can occur in when decoding a map into a nil slice of structs +* Improved package documentation for godoc + +## 1.1.2 + +* Fix error when decode hook decodes interface implementation into interface + type. [GH-140] + +## 1.1.1 + +* Fix panic that can happen in `decodePtr` + +## 1.1.0 + +* Added `StringToIPHookFunc` to convert `string` to `net.IP` and `net.IPNet` [GH-133] +* Support struct to struct decoding [GH-137] +* If source map value is nil, then destination map value is nil (instead of empty) +* If source slice value is nil, then destination slice value is nil (instead of empty) +* If source pointer is nil, then destination pointer is set to nil (instead of + allocated zero value of type) + +## 1.0.0 + +* Initial tagged stable release. diff --git a/vendor/github.com/mitchellh/mapstructure/LICENSE b/vendor/github.com/mitchellh/mapstructure/LICENSE new file mode 100644 index 000000000..f9c841a51 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 Mitchell Hashimoto + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/mitchellh/mapstructure/README.md b/vendor/github.com/mitchellh/mapstructure/README.md new file mode 100644 index 000000000..0018dc7d9 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/README.md @@ -0,0 +1,46 @@ +# mapstructure [![Godoc](https://godoc.org/github.com/mitchellh/mapstructure?status.svg)](https://godoc.org/github.com/mitchellh/mapstructure) + +mapstructure is a Go library for decoding generic map values to structures +and vice versa, while providing helpful error handling. + +This library is most useful when decoding values from some data stream (JSON, +Gob, etc.) where you don't _quite_ know the structure of the underlying data +until you read a part of it. You can therefore read a `map[string]interface{}` +and use this library to decode it into the proper underlying native Go +structure. + +## Installation + +Standard `go get`: + +``` +$ go get github.com/mitchellh/mapstructure +``` + +## Usage & Example + +For usage and examples see the [Godoc](http://godoc.org/github.com/mitchellh/mapstructure). + +The `Decode` function has examples associated with it there. + +## But Why?! + +Go offers fantastic standard libraries for decoding formats such as JSON. +The standard method is to have a struct pre-created, and populate that struct +from the bytes of the encoded format. This is great, but the problem is if +you have configuration or an encoding that changes slightly depending on +specific fields. For example, consider this JSON: + +```json +{ + "type": "person", + "name": "Mitchell" +} +``` + +Perhaps we can't populate a specific structure without first reading +the "type" field from the JSON. We could always do two passes over the +decoding of the JSON (reading the "type" first, and the rest later). +However, it is much simpler to just decode this into a `map[string]interface{}` +structure, read the "type" key, then use something like this library +to decode it into the proper structure. diff --git a/vendor/github.com/mitchellh/mapstructure/decode_hooks.go b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go new file mode 100644 index 000000000..92e6f76ff --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/decode_hooks.go @@ -0,0 +1,256 @@ +package mapstructure + +import ( + "encoding" + "errors" + "fmt" + "net" + "reflect" + "strconv" + "strings" + "time" +) + +// typedDecodeHook takes a raw DecodeHookFunc (an interface{}) and turns +// it into the proper DecodeHookFunc type, such as DecodeHookFuncType. +func typedDecodeHook(h DecodeHookFunc) DecodeHookFunc { + // Create variables here so we can reference them with the reflect pkg + var f1 DecodeHookFuncType + var f2 DecodeHookFuncKind + var f3 DecodeHookFuncValue + + // Fill in the variables into this interface and the rest is done + // automatically using the reflect package. + potential := []interface{}{f1, f2, f3} + + v := reflect.ValueOf(h) + vt := v.Type() + for _, raw := range potential { + pt := reflect.ValueOf(raw).Type() + if vt.ConvertibleTo(pt) { + return v.Convert(pt).Interface() + } + } + + return nil +} + +// DecodeHookExec executes the given decode hook. This should be used +// since it'll naturally degrade to the older backwards compatible DecodeHookFunc +// that took reflect.Kind instead of reflect.Type. +func DecodeHookExec( + raw DecodeHookFunc, + from reflect.Value, to reflect.Value) (interface{}, error) { + + switch f := typedDecodeHook(raw).(type) { + case DecodeHookFuncType: + return f(from.Type(), to.Type(), from.Interface()) + case DecodeHookFuncKind: + return f(from.Kind(), to.Kind(), from.Interface()) + case DecodeHookFuncValue: + return f(from, to) + default: + return nil, errors.New("invalid decode hook signature") + } +} + +// ComposeDecodeHookFunc creates a single DecodeHookFunc that +// automatically composes multiple DecodeHookFuncs. +// +// The composed funcs are called in order, with the result of the +// previous transformation. +func ComposeDecodeHookFunc(fs ...DecodeHookFunc) DecodeHookFunc { + return func(f reflect.Value, t reflect.Value) (interface{}, error) { + var err error + var data interface{} + newFrom := f + for _, f1 := range fs { + data, err = DecodeHookExec(f1, newFrom, t) + if err != nil { + return nil, err + } + newFrom = reflect.ValueOf(data) + } + + return data, nil + } +} + +// StringToSliceHookFunc returns a DecodeHookFunc that converts +// string to []string by splitting on the given sep. +func StringToSliceHookFunc(sep string) DecodeHookFunc { + return func( + f reflect.Kind, + t reflect.Kind, + data interface{}) (interface{}, error) { + if f != reflect.String || t != reflect.Slice { + return data, nil + } + + raw := data.(string) + if raw == "" { + return []string{}, nil + } + + return strings.Split(raw, sep), nil + } +} + +// StringToTimeDurationHookFunc returns a DecodeHookFunc that converts +// strings to time.Duration. +func StringToTimeDurationHookFunc() DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(time.Duration(5)) { + return data, nil + } + + // Convert it by parsing + return time.ParseDuration(data.(string)) + } +} + +// StringToIPHookFunc returns a DecodeHookFunc that converts +// strings to net.IP +func StringToIPHookFunc() DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(net.IP{}) { + return data, nil + } + + // Convert it by parsing + ip := net.ParseIP(data.(string)) + if ip == nil { + return net.IP{}, fmt.Errorf("failed parsing ip %v", data) + } + + return ip, nil + } +} + +// StringToIPNetHookFunc returns a DecodeHookFunc that converts +// strings to net.IPNet +func StringToIPNetHookFunc() DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(net.IPNet{}) { + return data, nil + } + + // Convert it by parsing + _, net, err := net.ParseCIDR(data.(string)) + return net, err + } +} + +// StringToTimeHookFunc returns a DecodeHookFunc that converts +// strings to time.Time. +func StringToTimeHookFunc(layout string) DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(time.Time{}) { + return data, nil + } + + // Convert it by parsing + return time.Parse(layout, data.(string)) + } +} + +// WeaklyTypedHook is a DecodeHookFunc which adds support for weak typing to +// the decoder. +// +// Note that this is significantly different from the WeaklyTypedInput option +// of the DecoderConfig. +func WeaklyTypedHook( + f reflect.Kind, + t reflect.Kind, + data interface{}) (interface{}, error) { + dataVal := reflect.ValueOf(data) + switch t { + case reflect.String: + switch f { + case reflect.Bool: + if dataVal.Bool() { + return "1", nil + } + return "0", nil + case reflect.Float32: + return strconv.FormatFloat(dataVal.Float(), 'f', -1, 64), nil + case reflect.Int: + return strconv.FormatInt(dataVal.Int(), 10), nil + case reflect.Slice: + dataType := dataVal.Type() + elemKind := dataType.Elem().Kind() + if elemKind == reflect.Uint8 { + return string(dataVal.Interface().([]uint8)), nil + } + case reflect.Uint: + return strconv.FormatUint(dataVal.Uint(), 10), nil + } + } + + return data, nil +} + +func RecursiveStructToMapHookFunc() DecodeHookFunc { + return func(f reflect.Value, t reflect.Value) (interface{}, error) { + if f.Kind() != reflect.Struct { + return f.Interface(), nil + } + + var i interface{} = struct{}{} + if t.Type() != reflect.TypeOf(&i).Elem() { + return f.Interface(), nil + } + + m := make(map[string]interface{}) + t.Set(reflect.ValueOf(m)) + + return f.Interface(), nil + } +} + +// TextUnmarshallerHookFunc returns a DecodeHookFunc that applies +// strings to the UnmarshalText function, when the target type +// implements the encoding.TextUnmarshaler interface +func TextUnmarshallerHookFunc() DecodeHookFuncType { + return func( + f reflect.Type, + t reflect.Type, + data interface{}) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + result := reflect.New(t).Interface() + unmarshaller, ok := result.(encoding.TextUnmarshaler) + if !ok { + return data, nil + } + if err := unmarshaller.UnmarshalText([]byte(data.(string))); err != nil { + return nil, err + } + return result, nil + } +} diff --git a/vendor/github.com/mitchellh/mapstructure/error.go b/vendor/github.com/mitchellh/mapstructure/error.go new file mode 100644 index 000000000..47a99e5af --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/error.go @@ -0,0 +1,50 @@ +package mapstructure + +import ( + "errors" + "fmt" + "sort" + "strings" +) + +// Error implements the error interface and can represents multiple +// errors that occur in the course of a single decode. +type Error struct { + Errors []string +} + +func (e *Error) Error() string { + points := make([]string, len(e.Errors)) + for i, err := range e.Errors { + points[i] = fmt.Sprintf("* %s", err) + } + + sort.Strings(points) + return fmt.Sprintf( + "%d error(s) decoding:\n\n%s", + len(e.Errors), strings.Join(points, "\n")) +} + +// WrappedErrors implements the errwrap.Wrapper interface to make this +// return value more useful with the errwrap and go-multierror libraries. +func (e *Error) WrappedErrors() []error { + if e == nil { + return nil + } + + result := make([]error, len(e.Errors)) + for i, e := range e.Errors { + result[i] = errors.New(e) + } + + return result +} + +func appendErrors(errors []string, err error) []string { + switch e := err.(type) { + case *Error: + return append(errors, e.Errors...) + default: + return append(errors, e.Error()) + } +} diff --git a/vendor/github.com/mitchellh/mapstructure/go.mod b/vendor/github.com/mitchellh/mapstructure/go.mod new file mode 100644 index 000000000..a03ae9730 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/go.mod @@ -0,0 +1,3 @@ +module github.com/mitchellh/mapstructure + +go 1.14 diff --git a/vendor/github.com/mitchellh/mapstructure/mapstructure.go b/vendor/github.com/mitchellh/mapstructure/mapstructure.go new file mode 100644 index 000000000..3643901f5 --- /dev/null +++ b/vendor/github.com/mitchellh/mapstructure/mapstructure.go @@ -0,0 +1,1462 @@ +// Package mapstructure exposes functionality to convert one arbitrary +// Go type into another, typically to convert a map[string]interface{} +// into a native Go structure. +// +// The Go structure can be arbitrarily complex, containing slices, +// other structs, etc. and the decoder will properly decode nested +// maps and so on into the proper structures in the native Go struct. +// See the examples to see what the decoder is capable of. +// +// The simplest function to start with is Decode. +// +// Field Tags +// +// When decoding to a struct, mapstructure will use the field name by +// default to perform the mapping. For example, if a struct has a field +// "Username" then mapstructure will look for a key in the source value +// of "username" (case insensitive). +// +// type User struct { +// Username string +// } +// +// You can change the behavior of mapstructure by using struct tags. +// The default struct tag that mapstructure looks for is "mapstructure" +// but you can customize it using DecoderConfig. +// +// Renaming Fields +// +// To rename the key that mapstructure looks for, use the "mapstructure" +// tag and set a value directly. For example, to change the "username" example +// above to "user": +// +// type User struct { +// Username string `mapstructure:"user"` +// } +// +// Embedded Structs and Squashing +// +// Embedded structs are treated as if they're another field with that name. +// By default, the two structs below are equivalent when decoding with +// mapstructure: +// +// type Person struct { +// Name string +// } +// +// type Friend struct { +// Person +// } +// +// type Friend struct { +// Person Person +// } +// +// This would require an input that looks like below: +// +// map[string]interface{}{ +// "person": map[string]interface{}{"name": "alice"}, +// } +// +// If your "person" value is NOT nested, then you can append ",squash" to +// your tag value and mapstructure will treat it as if the embedded struct +// were part of the struct directly. Example: +// +// type Friend struct { +// Person `mapstructure:",squash"` +// } +// +// Now the following input would be accepted: +// +// map[string]interface{}{ +// "name": "alice", +// } +// +// When decoding from a struct to a map, the squash tag squashes the struct +// fields into a single map. Using the example structs from above: +// +// Friend{Person: Person{Name: "alice"}} +// +// Will be decoded into a map: +// +// map[string]interface{}{ +// "name": "alice", +// } +// +// DecoderConfig has a field that changes the behavior of mapstructure +// to always squash embedded structs. +// +// Remainder Values +// +// If there are any unmapped keys in the source value, mapstructure by +// default will silently ignore them. You can error by setting ErrorUnused +// in DecoderConfig. If you're using Metadata you can also maintain a slice +// of the unused keys. +// +// You can also use the ",remain" suffix on your tag to collect all unused +// values in a map. The field with this tag MUST be a map type and should +// probably be a "map[string]interface{}" or "map[interface{}]interface{}". +// See example below: +// +// type Friend struct { +// Name string +// Other map[string]interface{} `mapstructure:",remain"` +// } +// +// Given the input below, Other would be populated with the other +// values that weren't used (everything but "name"): +// +// map[string]interface{}{ +// "name": "bob", +// "address": "123 Maple St.", +// } +// +// Omit Empty Values +// +// When decoding from a struct to any other value, you may use the +// ",omitempty" suffix on your tag to omit that value if it equates to +// the zero value. The zero value of all types is specified in the Go +// specification. +// +// For example, the zero type of a numeric type is zero ("0"). If the struct +// field value is zero and a numeric type, the field is empty, and it won't +// be encoded into the destination type. +// +// type Source { +// Age int `mapstructure:",omitempty"` +// } +// +// Unexported fields +// +// Since unexported (private) struct fields cannot be set outside the package +// where they are defined, the decoder will simply skip them. +// +// For this output type definition: +// +// type Exported struct { +// private string // this unexported field will be skipped +// Public string +// } +// +// Using this map as input: +// +// map[string]interface{}{ +// "private": "I will be ignored", +// "Public": "I made it through!", +// } +// +// The following struct will be decoded: +// +// type Exported struct { +// private: "" // field is left with an empty string (zero value) +// Public: "I made it through!" +// } +// +// Other Configuration +// +// mapstructure is highly configurable. See the DecoderConfig struct +// for other features and options that are supported. +package mapstructure + +import ( + "encoding/json" + "errors" + "fmt" + "reflect" + "sort" + "strconv" + "strings" +) + +// DecodeHookFunc is the callback function that can be used for +// data transformations. See "DecodeHook" in the DecoderConfig +// struct. +// +// The type must be one of DecodeHookFuncType, DecodeHookFuncKind, or +// DecodeHookFuncValue. +// Values are a superset of Types (Values can return types), and Types are a +// superset of Kinds (Types can return Kinds) and are generally a richer thing +// to use, but Kinds are simpler if you only need those. +// +// The reason DecodeHookFunc is multi-typed is for backwards compatibility: +// we started with Kinds and then realized Types were the better solution, +// but have a promise to not break backwards compat so we now support +// both. +type DecodeHookFunc interface{} + +// DecodeHookFuncType is a DecodeHookFunc which has complete information about +// the source and target types. +type DecodeHookFuncType func(reflect.Type, reflect.Type, interface{}) (interface{}, error) + +// DecodeHookFuncKind is a DecodeHookFunc which knows only the Kinds of the +// source and target types. +type DecodeHookFuncKind func(reflect.Kind, reflect.Kind, interface{}) (interface{}, error) + +// DecodeHookFuncRaw is a DecodeHookFunc which has complete access to both the source and target +// values. +type DecodeHookFuncValue func(from reflect.Value, to reflect.Value) (interface{}, error) + +// DecoderConfig is the configuration that is used to create a new decoder +// and allows customization of various aspects of decoding. +type DecoderConfig struct { + // DecodeHook, if set, will be called before any decoding and any + // type conversion (if WeaklyTypedInput is on). This lets you modify + // the values before they're set down onto the resulting struct. The + // DecodeHook is called for every map and value in the input. This means + // that if a struct has embedded fields with squash tags the decode hook + // is called only once with all of the input data, not once for each + // embedded struct. + // + // If an error is returned, the entire decode will fail with that error. + DecodeHook DecodeHookFunc + + // If ErrorUnused is true, then it is an error for there to exist + // keys in the original map that were unused in the decoding process + // (extra keys). + ErrorUnused bool + + // ZeroFields, if set to true, will zero fields before writing them. + // For example, a map will be emptied before decoded values are put in + // it. If this is false, a map will be merged. + ZeroFields bool + + // If WeaklyTypedInput is true, the decoder will make the following + // "weak" conversions: + // + // - bools to string (true = "1", false = "0") + // - numbers to string (base 10) + // - bools to int/uint (true = 1, false = 0) + // - strings to int/uint (base implied by prefix) + // - int to bool (true if value != 0) + // - string to bool (accepts: 1, t, T, TRUE, true, True, 0, f, F, + // FALSE, false, False. Anything else is an error) + // - empty array = empty map and vice versa + // - negative numbers to overflowed uint values (base 10) + // - slice of maps to a merged map + // - single values are converted to slices if required. Each + // element is weakly decoded. For example: "4" can become []int{4} + // if the target type is an int slice. + // + WeaklyTypedInput bool + + // Squash will squash embedded structs. A squash tag may also be + // added to an individual struct field using a tag. For example: + // + // type Parent struct { + // Child `mapstructure:",squash"` + // } + Squash bool + + // Metadata is the struct that will contain extra metadata about + // the decoding. If this is nil, then no metadata will be tracked. + Metadata *Metadata + + // Result is a pointer to the struct that will contain the decoded + // value. + Result interface{} + + // The tag name that mapstructure reads for field names. This + // defaults to "mapstructure" + TagName string +} + +// A Decoder takes a raw interface value and turns it into structured +// data, keeping track of rich error information along the way in case +// anything goes wrong. Unlike the basic top-level Decode method, you can +// more finely control how the Decoder behaves using the DecoderConfig +// structure. The top-level Decode method is just a convenience that sets +// up the most basic Decoder. +type Decoder struct { + config *DecoderConfig +} + +// Metadata contains information about decoding a structure that +// is tedious or difficult to get otherwise. +type Metadata struct { + // Keys are the keys of the structure which were successfully decoded + Keys []string + + // Unused is a slice of keys that were found in the raw value but + // weren't decoded since there was no matching field in the result interface + Unused []string +} + +// Decode takes an input structure and uses reflection to translate it to +// the output structure. output must be a pointer to a map or struct. +func Decode(input interface{}, output interface{}) error { + config := &DecoderConfig{ + Metadata: nil, + Result: output, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + +// WeakDecode is the same as Decode but is shorthand to enable +// WeaklyTypedInput. See DecoderConfig for more info. +func WeakDecode(input, output interface{}) error { + config := &DecoderConfig{ + Metadata: nil, + Result: output, + WeaklyTypedInput: true, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + +// DecodeMetadata is the same as Decode, but is shorthand to +// enable metadata collection. See DecoderConfig for more info. +func DecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { + config := &DecoderConfig{ + Metadata: metadata, + Result: output, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + +// WeakDecodeMetadata is the same as Decode, but is shorthand to +// enable both WeaklyTypedInput and metadata collection. See +// DecoderConfig for more info. +func WeakDecodeMetadata(input interface{}, output interface{}, metadata *Metadata) error { + config := &DecoderConfig{ + Metadata: metadata, + Result: output, + WeaklyTypedInput: true, + } + + decoder, err := NewDecoder(config) + if err != nil { + return err + } + + return decoder.Decode(input) +} + +// NewDecoder returns a new decoder for the given configuration. Once +// a decoder has been returned, the same configuration must not be used +// again. +func NewDecoder(config *DecoderConfig) (*Decoder, error) { + val := reflect.ValueOf(config.Result) + if val.Kind() != reflect.Ptr { + return nil, errors.New("result must be a pointer") + } + + val = val.Elem() + if !val.CanAddr() { + return nil, errors.New("result must be addressable (a pointer)") + } + + if config.Metadata != nil { + if config.Metadata.Keys == nil { + config.Metadata.Keys = make([]string, 0) + } + + if config.Metadata.Unused == nil { + config.Metadata.Unused = make([]string, 0) + } + } + + if config.TagName == "" { + config.TagName = "mapstructure" + } + + result := &Decoder{ + config: config, + } + + return result, nil +} + +// Decode decodes the given raw interface to the target pointer specified +// by the configuration. +func (d *Decoder) Decode(input interface{}) error { + return d.decode("", input, reflect.ValueOf(d.config.Result).Elem()) +} + +// Decodes an unknown data type into a specific reflection value. +func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error { + var inputVal reflect.Value + if input != nil { + inputVal = reflect.ValueOf(input) + + // We need to check here if input is a typed nil. Typed nils won't + // match the "input == nil" below so we check that here. + if inputVal.Kind() == reflect.Ptr && inputVal.IsNil() { + input = nil + } + } + + if input == nil { + // If the data is nil, then we don't set anything, unless ZeroFields is set + // to true. + if d.config.ZeroFields { + outVal.Set(reflect.Zero(outVal.Type())) + + if d.config.Metadata != nil && name != "" { + d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + } + } + return nil + } + + if !inputVal.IsValid() { + // If the input value is invalid, then we just set the value + // to be the zero value. + outVal.Set(reflect.Zero(outVal.Type())) + if d.config.Metadata != nil && name != "" { + d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + } + return nil + } + + if d.config.DecodeHook != nil { + // We have a DecodeHook, so let's pre-process the input. + var err error + input, err = DecodeHookExec(d.config.DecodeHook, inputVal, outVal) + if err != nil { + return fmt.Errorf("error decoding '%s': %s", name, err) + } + } + + var err error + outputKind := getKind(outVal) + addMetaKey := true + switch outputKind { + case reflect.Bool: + err = d.decodeBool(name, input, outVal) + case reflect.Interface: + err = d.decodeBasic(name, input, outVal) + case reflect.String: + err = d.decodeString(name, input, outVal) + case reflect.Int: + err = d.decodeInt(name, input, outVal) + case reflect.Uint: + err = d.decodeUint(name, input, outVal) + case reflect.Float32: + err = d.decodeFloat(name, input, outVal) + case reflect.Struct: + err = d.decodeStruct(name, input, outVal) + case reflect.Map: + err = d.decodeMap(name, input, outVal) + case reflect.Ptr: + addMetaKey, err = d.decodePtr(name, input, outVal) + case reflect.Slice: + err = d.decodeSlice(name, input, outVal) + case reflect.Array: + err = d.decodeArray(name, input, outVal) + case reflect.Func: + err = d.decodeFunc(name, input, outVal) + default: + // If we reached this point then we weren't able to decode it + return fmt.Errorf("%s: unsupported type: %s", name, outputKind) + } + + // If we reached here, then we successfully decoded SOMETHING, so + // mark the key as used if we're tracking metainput. + if addMetaKey && d.config.Metadata != nil && name != "" { + d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + } + + return err +} + +// This decodes a basic type (bool, int, string, etc.) and sets the +// value to "data" of that type. +func (d *Decoder) decodeBasic(name string, data interface{}, val reflect.Value) error { + if val.IsValid() && val.Elem().IsValid() { + elem := val.Elem() + + // If we can't address this element, then its not writable. Instead, + // we make a copy of the value (which is a pointer and therefore + // writable), decode into that, and replace the whole value. + copied := false + if !elem.CanAddr() { + copied = true + + // Make *T + copy := reflect.New(elem.Type()) + + // *T = elem + copy.Elem().Set(elem) + + // Set elem so we decode into it + elem = copy + } + + // Decode. If we have an error then return. We also return right + // away if we're not a copy because that means we decoded directly. + if err := d.decode(name, data, elem); err != nil || !copied { + return err + } + + // If we're a copy, we need to set te final result + val.Set(elem.Elem()) + return nil + } + + dataVal := reflect.ValueOf(data) + + // If the input data is a pointer, and the assigned type is the dereference + // of that exact pointer, then indirect it so that we can assign it. + // Example: *string to string + if dataVal.Kind() == reflect.Ptr && dataVal.Type().Elem() == val.Type() { + dataVal = reflect.Indirect(dataVal) + } + + if !dataVal.IsValid() { + dataVal = reflect.Zero(val.Type()) + } + + dataValType := dataVal.Type() + if !dataValType.AssignableTo(val.Type()) { + return fmt.Errorf( + "'%s' expected type '%s', got '%s'", + name, val.Type(), dataValType) + } + + val.Set(dataVal) + return nil +} + +func (d *Decoder) decodeString(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataKind := getKind(dataVal) + + converted := true + switch { + case dataKind == reflect.String: + val.SetString(dataVal.String()) + case dataKind == reflect.Bool && d.config.WeaklyTypedInput: + if dataVal.Bool() { + val.SetString("1") + } else { + val.SetString("0") + } + case dataKind == reflect.Int && d.config.WeaklyTypedInput: + val.SetString(strconv.FormatInt(dataVal.Int(), 10)) + case dataKind == reflect.Uint && d.config.WeaklyTypedInput: + val.SetString(strconv.FormatUint(dataVal.Uint(), 10)) + case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: + val.SetString(strconv.FormatFloat(dataVal.Float(), 'f', -1, 64)) + case dataKind == reflect.Slice && d.config.WeaklyTypedInput, + dataKind == reflect.Array && d.config.WeaklyTypedInput: + dataType := dataVal.Type() + elemKind := dataType.Elem().Kind() + switch elemKind { + case reflect.Uint8: + var uints []uint8 + if dataKind == reflect.Array { + uints = make([]uint8, dataVal.Len(), dataVal.Len()) + for i := range uints { + uints[i] = dataVal.Index(i).Interface().(uint8) + } + } else { + uints = dataVal.Interface().([]uint8) + } + val.SetString(string(uints)) + default: + converted = false + } + default: + converted = false + } + + if !converted { + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", + name, val.Type(), dataVal.Type(), data) + } + + return nil +} + +func (d *Decoder) decodeInt(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataKind := getKind(dataVal) + dataType := dataVal.Type() + + switch { + case dataKind == reflect.Int: + val.SetInt(dataVal.Int()) + case dataKind == reflect.Uint: + val.SetInt(int64(dataVal.Uint())) + case dataKind == reflect.Float32: + val.SetInt(int64(dataVal.Float())) + case dataKind == reflect.Bool && d.config.WeaklyTypedInput: + if dataVal.Bool() { + val.SetInt(1) + } else { + val.SetInt(0) + } + case dataKind == reflect.String && d.config.WeaklyTypedInput: + str := dataVal.String() + if str == "" { + str = "0" + } + + i, err := strconv.ParseInt(str, 0, val.Type().Bits()) + if err == nil { + val.SetInt(i) + } else { + return fmt.Errorf("cannot parse '%s' as int: %s", name, err) + } + case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": + jn := data.(json.Number) + i, err := jn.Int64() + if err != nil { + return fmt.Errorf( + "error decoding json.Number into %s: %s", name, err) + } + val.SetInt(i) + default: + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", + name, val.Type(), dataVal.Type(), data) + } + + return nil +} + +func (d *Decoder) decodeUint(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataKind := getKind(dataVal) + dataType := dataVal.Type() + + switch { + case dataKind == reflect.Int: + i := dataVal.Int() + if i < 0 && !d.config.WeaklyTypedInput { + return fmt.Errorf("cannot parse '%s', %d overflows uint", + name, i) + } + val.SetUint(uint64(i)) + case dataKind == reflect.Uint: + val.SetUint(dataVal.Uint()) + case dataKind == reflect.Float32: + f := dataVal.Float() + if f < 0 && !d.config.WeaklyTypedInput { + return fmt.Errorf("cannot parse '%s', %f overflows uint", + name, f) + } + val.SetUint(uint64(f)) + case dataKind == reflect.Bool && d.config.WeaklyTypedInput: + if dataVal.Bool() { + val.SetUint(1) + } else { + val.SetUint(0) + } + case dataKind == reflect.String && d.config.WeaklyTypedInput: + str := dataVal.String() + if str == "" { + str = "0" + } + + i, err := strconv.ParseUint(str, 0, val.Type().Bits()) + if err == nil { + val.SetUint(i) + } else { + return fmt.Errorf("cannot parse '%s' as uint: %s", name, err) + } + case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": + jn := data.(json.Number) + i, err := jn.Int64() + if err != nil { + return fmt.Errorf( + "error decoding json.Number into %s: %s", name, err) + } + if i < 0 && !d.config.WeaklyTypedInput { + return fmt.Errorf("cannot parse '%s', %d overflows uint", + name, i) + } + val.SetUint(uint64(i)) + default: + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", + name, val.Type(), dataVal.Type(), data) + } + + return nil +} + +func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataKind := getKind(dataVal) + + switch { + case dataKind == reflect.Bool: + val.SetBool(dataVal.Bool()) + case dataKind == reflect.Int && d.config.WeaklyTypedInput: + val.SetBool(dataVal.Int() != 0) + case dataKind == reflect.Uint && d.config.WeaklyTypedInput: + val.SetBool(dataVal.Uint() != 0) + case dataKind == reflect.Float32 && d.config.WeaklyTypedInput: + val.SetBool(dataVal.Float() != 0) + case dataKind == reflect.String && d.config.WeaklyTypedInput: + b, err := strconv.ParseBool(dataVal.String()) + if err == nil { + val.SetBool(b) + } else if dataVal.String() == "" { + val.SetBool(false) + } else { + return fmt.Errorf("cannot parse '%s' as bool: %s", name, err) + } + default: + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", + name, val.Type(), dataVal.Type(), data) + } + + return nil +} + +func (d *Decoder) decodeFloat(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataKind := getKind(dataVal) + dataType := dataVal.Type() + + switch { + case dataKind == reflect.Int: + val.SetFloat(float64(dataVal.Int())) + case dataKind == reflect.Uint: + val.SetFloat(float64(dataVal.Uint())) + case dataKind == reflect.Float32: + val.SetFloat(dataVal.Float()) + case dataKind == reflect.Bool && d.config.WeaklyTypedInput: + if dataVal.Bool() { + val.SetFloat(1) + } else { + val.SetFloat(0) + } + case dataKind == reflect.String && d.config.WeaklyTypedInput: + str := dataVal.String() + if str == "" { + str = "0" + } + + f, err := strconv.ParseFloat(str, val.Type().Bits()) + if err == nil { + val.SetFloat(f) + } else { + return fmt.Errorf("cannot parse '%s' as float: %s", name, err) + } + case dataType.PkgPath() == "encoding/json" && dataType.Name() == "Number": + jn := data.(json.Number) + i, err := jn.Float64() + if err != nil { + return fmt.Errorf( + "error decoding json.Number into %s: %s", name, err) + } + val.SetFloat(i) + default: + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", + name, val.Type(), dataVal.Type(), data) + } + + return nil +} + +func (d *Decoder) decodeMap(name string, data interface{}, val reflect.Value) error { + valType := val.Type() + valKeyType := valType.Key() + valElemType := valType.Elem() + + // By default we overwrite keys in the current map + valMap := val + + // If the map is nil or we're purposely zeroing fields, make a new map + if valMap.IsNil() || d.config.ZeroFields { + // Make a new map to hold our result + mapType := reflect.MapOf(valKeyType, valElemType) + valMap = reflect.MakeMap(mapType) + } + + // Check input type and based on the input type jump to the proper func + dataVal := reflect.Indirect(reflect.ValueOf(data)) + switch dataVal.Kind() { + case reflect.Map: + return d.decodeMapFromMap(name, dataVal, val, valMap) + + case reflect.Struct: + return d.decodeMapFromStruct(name, dataVal, val, valMap) + + case reflect.Array, reflect.Slice: + if d.config.WeaklyTypedInput { + return d.decodeMapFromSlice(name, dataVal, val, valMap) + } + + fallthrough + + default: + return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) + } +} + +func (d *Decoder) decodeMapFromSlice(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { + // Special case for BC reasons (covered by tests) + if dataVal.Len() == 0 { + val.Set(valMap) + return nil + } + + for i := 0; i < dataVal.Len(); i++ { + err := d.decode( + name+"["+strconv.Itoa(i)+"]", + dataVal.Index(i).Interface(), val) + if err != nil { + return err + } + } + + return nil +} + +func (d *Decoder) decodeMapFromMap(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { + valType := val.Type() + valKeyType := valType.Key() + valElemType := valType.Elem() + + // Accumulate errors + errors := make([]string, 0) + + // If the input data is empty, then we just match what the input data is. + if dataVal.Len() == 0 { + if dataVal.IsNil() { + if !val.IsNil() { + val.Set(dataVal) + } + } else { + // Set to empty allocated value + val.Set(valMap) + } + + return nil + } + + for _, k := range dataVal.MapKeys() { + fieldName := name + "[" + k.String() + "]" + + // First decode the key into the proper type + currentKey := reflect.Indirect(reflect.New(valKeyType)) + if err := d.decode(fieldName, k.Interface(), currentKey); err != nil { + errors = appendErrors(errors, err) + continue + } + + // Next decode the data into the proper type + v := dataVal.MapIndex(k).Interface() + currentVal := reflect.Indirect(reflect.New(valElemType)) + if err := d.decode(fieldName, v, currentVal); err != nil { + errors = appendErrors(errors, err) + continue + } + + valMap.SetMapIndex(currentKey, currentVal) + } + + // Set the built up map to the value + val.Set(valMap) + + // If we had errors, return those + if len(errors) > 0 { + return &Error{errors} + } + + return nil +} + +func (d *Decoder) decodeMapFromStruct(name string, dataVal reflect.Value, val reflect.Value, valMap reflect.Value) error { + typ := dataVal.Type() + for i := 0; i < typ.NumField(); i++ { + // Get the StructField first since this is a cheap operation. If the + // field is unexported, then ignore it. + f := typ.Field(i) + if f.PkgPath != "" { + continue + } + + // Next get the actual value of this field and verify it is assignable + // to the map value. + v := dataVal.Field(i) + if !v.Type().AssignableTo(valMap.Type().Elem()) { + return fmt.Errorf("cannot assign type '%s' to map value field of type '%s'", v.Type(), valMap.Type().Elem()) + } + + tagValue := f.Tag.Get(d.config.TagName) + keyName := f.Name + + // If Squash is set in the config, we squash the field down. + squash := d.config.Squash && v.Kind() == reflect.Struct && f.Anonymous + + // Determine the name of the key in the map + if index := strings.Index(tagValue, ","); index != -1 { + if tagValue[:index] == "-" { + continue + } + // If "omitempty" is specified in the tag, it ignores empty values. + if strings.Index(tagValue[index+1:], "omitempty") != -1 && isEmptyValue(v) { + continue + } + + // If "squash" is specified in the tag, we squash the field down. + squash = !squash && strings.Index(tagValue[index+1:], "squash") != -1 + if squash { + // When squashing, the embedded type can be a pointer to a struct. + if v.Kind() == reflect.Ptr && v.Elem().Kind() == reflect.Struct { + v = v.Elem() + } + + // The final type must be a struct + if v.Kind() != reflect.Struct { + return fmt.Errorf("cannot squash non-struct type '%s'", v.Type()) + } + } + keyName = tagValue[:index] + } else if len(tagValue) > 0 { + if tagValue == "-" { + continue + } + keyName = tagValue + } + + switch v.Kind() { + // this is an embedded struct, so handle it differently + case reflect.Struct: + x := reflect.New(v.Type()) + x.Elem().Set(v) + + vType := valMap.Type() + vKeyType := vType.Key() + vElemType := vType.Elem() + mType := reflect.MapOf(vKeyType, vElemType) + vMap := reflect.MakeMap(mType) + + // Creating a pointer to a map so that other methods can completely + // overwrite the map if need be (looking at you decodeMapFromMap). The + // indirection allows the underlying map to be settable (CanSet() == true) + // where as reflect.MakeMap returns an unsettable map. + addrVal := reflect.New(vMap.Type()) + reflect.Indirect(addrVal).Set(vMap) + + err := d.decode(keyName, x.Interface(), reflect.Indirect(addrVal)) + if err != nil { + return err + } + + // the underlying map may have been completely overwritten so pull + // it indirectly out of the enclosing value. + vMap = reflect.Indirect(addrVal) + + if squash { + for _, k := range vMap.MapKeys() { + valMap.SetMapIndex(k, vMap.MapIndex(k)) + } + } else { + valMap.SetMapIndex(reflect.ValueOf(keyName), vMap) + } + + default: + valMap.SetMapIndex(reflect.ValueOf(keyName), v) + } + } + + if val.CanAddr() { + val.Set(valMap) + } + + return nil +} + +func (d *Decoder) decodePtr(name string, data interface{}, val reflect.Value) (bool, error) { + // If the input data is nil, then we want to just set the output + // pointer to be nil as well. + isNil := data == nil + if !isNil { + switch v := reflect.Indirect(reflect.ValueOf(data)); v.Kind() { + case reflect.Chan, + reflect.Func, + reflect.Interface, + reflect.Map, + reflect.Ptr, + reflect.Slice: + isNil = v.IsNil() + } + } + if isNil { + if !val.IsNil() && val.CanSet() { + nilValue := reflect.New(val.Type()).Elem() + val.Set(nilValue) + } + + return true, nil + } + + // Create an element of the concrete (non pointer) type and decode + // into that. Then set the value of the pointer to this type. + valType := val.Type() + valElemType := valType.Elem() + if val.CanSet() { + realVal := val + if realVal.IsNil() || d.config.ZeroFields { + realVal = reflect.New(valElemType) + } + + if err := d.decode(name, data, reflect.Indirect(realVal)); err != nil { + return false, err + } + + val.Set(realVal) + } else { + if err := d.decode(name, data, reflect.Indirect(val)); err != nil { + return false, err + } + } + return false, nil +} + +func (d *Decoder) decodeFunc(name string, data interface{}, val reflect.Value) error { + // Create an element of the concrete (non pointer) type and decode + // into that. Then set the value of the pointer to this type. + dataVal := reflect.Indirect(reflect.ValueOf(data)) + if val.Type() != dataVal.Type() { + return fmt.Errorf( + "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", + name, val.Type(), dataVal.Type(), data) + } + val.Set(dataVal) + return nil +} + +func (d *Decoder) decodeSlice(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataValKind := dataVal.Kind() + valType := val.Type() + valElemType := valType.Elem() + sliceType := reflect.SliceOf(valElemType) + + // If we have a non array/slice type then we first attempt to convert. + if dataValKind != reflect.Array && dataValKind != reflect.Slice { + if d.config.WeaklyTypedInput { + switch { + // Slice and array we use the normal logic + case dataValKind == reflect.Slice, dataValKind == reflect.Array: + break + + // Empty maps turn into empty slices + case dataValKind == reflect.Map: + if dataVal.Len() == 0 { + val.Set(reflect.MakeSlice(sliceType, 0, 0)) + return nil + } + // Create slice of maps of other sizes + return d.decodeSlice(name, []interface{}{data}, val) + + case dataValKind == reflect.String && valElemType.Kind() == reflect.Uint8: + return d.decodeSlice(name, []byte(dataVal.String()), val) + + // All other types we try to convert to the slice type + // and "lift" it into it. i.e. a string becomes a string slice. + default: + // Just re-try this function with data as a slice. + return d.decodeSlice(name, []interface{}{data}, val) + } + } + + return fmt.Errorf( + "'%s': source data must be an array or slice, got %s", name, dataValKind) + } + + // If the input value is nil, then don't allocate since empty != nil + if dataVal.IsNil() { + return nil + } + + valSlice := val + if valSlice.IsNil() || d.config.ZeroFields { + // Make a new slice to hold our result, same size as the original data. + valSlice = reflect.MakeSlice(sliceType, dataVal.Len(), dataVal.Len()) + } + + // Accumulate any errors + errors := make([]string, 0) + + for i := 0; i < dataVal.Len(); i++ { + currentData := dataVal.Index(i).Interface() + for valSlice.Len() <= i { + valSlice = reflect.Append(valSlice, reflect.Zero(valElemType)) + } + currentField := valSlice.Index(i) + + fieldName := name + "[" + strconv.Itoa(i) + "]" + if err := d.decode(fieldName, currentData, currentField); err != nil { + errors = appendErrors(errors, err) + } + } + + // Finally, set the value to the slice we built up + val.Set(valSlice) + + // If there were errors, we return those + if len(errors) > 0 { + return &Error{errors} + } + + return nil +} + +func (d *Decoder) decodeArray(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + dataValKind := dataVal.Kind() + valType := val.Type() + valElemType := valType.Elem() + arrayType := reflect.ArrayOf(valType.Len(), valElemType) + + valArray := val + + if valArray.Interface() == reflect.Zero(valArray.Type()).Interface() || d.config.ZeroFields { + // Check input type + if dataValKind != reflect.Array && dataValKind != reflect.Slice { + if d.config.WeaklyTypedInput { + switch { + // Empty maps turn into empty arrays + case dataValKind == reflect.Map: + if dataVal.Len() == 0 { + val.Set(reflect.Zero(arrayType)) + return nil + } + + // All other types we try to convert to the array type + // and "lift" it into it. i.e. a string becomes a string array. + default: + // Just re-try this function with data as a slice. + return d.decodeArray(name, []interface{}{data}, val) + } + } + + return fmt.Errorf( + "'%s': source data must be an array or slice, got %s", name, dataValKind) + + } + if dataVal.Len() > arrayType.Len() { + return fmt.Errorf( + "'%s': expected source data to have length less or equal to %d, got %d", name, arrayType.Len(), dataVal.Len()) + + } + + // Make a new array to hold our result, same size as the original data. + valArray = reflect.New(arrayType).Elem() + } + + // Accumulate any errors + errors := make([]string, 0) + + for i := 0; i < dataVal.Len(); i++ { + currentData := dataVal.Index(i).Interface() + currentField := valArray.Index(i) + + fieldName := name + "[" + strconv.Itoa(i) + "]" + if err := d.decode(fieldName, currentData, currentField); err != nil { + errors = appendErrors(errors, err) + } + } + + // Finally, set the value to the array we built up + val.Set(valArray) + + // If there were errors, we return those + if len(errors) > 0 { + return &Error{errors} + } + + return nil +} + +func (d *Decoder) decodeStruct(name string, data interface{}, val reflect.Value) error { + dataVal := reflect.Indirect(reflect.ValueOf(data)) + + // If the type of the value to write to and the data match directly, + // then we just set it directly instead of recursing into the structure. + if dataVal.Type() == val.Type() { + val.Set(dataVal) + return nil + } + + dataValKind := dataVal.Kind() + switch dataValKind { + case reflect.Map: + return d.decodeStructFromMap(name, dataVal, val) + + case reflect.Struct: + // Not the most efficient way to do this but we can optimize later if + // we want to. To convert from struct to struct we go to map first + // as an intermediary. + + // Make a new map to hold our result + mapType := reflect.TypeOf((map[string]interface{})(nil)) + mval := reflect.MakeMap(mapType) + + // Creating a pointer to a map so that other methods can completely + // overwrite the map if need be (looking at you decodeMapFromMap). The + // indirection allows the underlying map to be settable (CanSet() == true) + // where as reflect.MakeMap returns an unsettable map. + addrVal := reflect.New(mval.Type()) + + reflect.Indirect(addrVal).Set(mval) + if err := d.decodeMapFromStruct(name, dataVal, reflect.Indirect(addrVal), mval); err != nil { + return err + } + + result := d.decodeStructFromMap(name, reflect.Indirect(addrVal), val) + return result + + default: + return fmt.Errorf("'%s' expected a map, got '%s'", name, dataVal.Kind()) + } +} + +func (d *Decoder) decodeStructFromMap(name string, dataVal, val reflect.Value) error { + dataValType := dataVal.Type() + if kind := dataValType.Key().Kind(); kind != reflect.String && kind != reflect.Interface { + return fmt.Errorf( + "'%s' needs a map with string keys, has '%s' keys", + name, dataValType.Key().Kind()) + } + + dataValKeys := make(map[reflect.Value]struct{}) + dataValKeysUnused := make(map[interface{}]struct{}) + for _, dataValKey := range dataVal.MapKeys() { + dataValKeys[dataValKey] = struct{}{} + dataValKeysUnused[dataValKey.Interface()] = struct{}{} + } + + errors := make([]string, 0) + + // This slice will keep track of all the structs we'll be decoding. + // There can be more than one struct if there are embedded structs + // that are squashed. + structs := make([]reflect.Value, 1, 5) + structs[0] = val + + // Compile the list of all the fields that we're going to be decoding + // from all the structs. + type field struct { + field reflect.StructField + val reflect.Value + } + + // remainField is set to a valid field set with the "remain" tag if + // we are keeping track of remaining values. + var remainField *field + + fields := []field{} + for len(structs) > 0 { + structVal := structs[0] + structs = structs[1:] + + structType := structVal.Type() + + for i := 0; i < structType.NumField(); i++ { + fieldType := structType.Field(i) + fieldVal := structVal.Field(i) + if fieldVal.Kind() == reflect.Ptr && fieldVal.Elem().Kind() == reflect.Struct { + // Handle embedded struct pointers as embedded structs. + fieldVal = fieldVal.Elem() + } + + // If "squash" is specified in the tag, we squash the field down. + squash := d.config.Squash && fieldVal.Kind() == reflect.Struct && fieldType.Anonymous + remain := false + + // We always parse the tags cause we're looking for other tags too + tagParts := strings.Split(fieldType.Tag.Get(d.config.TagName), ",") + for _, tag := range tagParts[1:] { + if tag == "squash" { + squash = true + break + } + + if tag == "remain" { + remain = true + break + } + } + + if squash { + if fieldVal.Kind() != reflect.Struct { + errors = appendErrors(errors, + fmt.Errorf("%s: unsupported type for squash: %s", fieldType.Name, fieldVal.Kind())) + } else { + structs = append(structs, fieldVal) + } + continue + } + + // Build our field + if remain { + remainField = &field{fieldType, fieldVal} + } else { + // Normal struct field, store it away + fields = append(fields, field{fieldType, fieldVal}) + } + } + } + + // for fieldType, field := range fields { + for _, f := range fields { + field, fieldValue := f.field, f.val + fieldName := field.Name + + tagValue := field.Tag.Get(d.config.TagName) + tagValue = strings.SplitN(tagValue, ",", 2)[0] + if tagValue != "" { + fieldName = tagValue + } + + rawMapKey := reflect.ValueOf(fieldName) + rawMapVal := dataVal.MapIndex(rawMapKey) + if !rawMapVal.IsValid() { + // Do a slower search by iterating over each key and + // doing case-insensitive search. + for dataValKey := range dataValKeys { + mK, ok := dataValKey.Interface().(string) + if !ok { + // Not a string key + continue + } + + if strings.EqualFold(mK, fieldName) { + rawMapKey = dataValKey + rawMapVal = dataVal.MapIndex(dataValKey) + break + } + } + + if !rawMapVal.IsValid() { + // There was no matching key in the map for the value in + // the struct. Just ignore. + continue + } + } + + if !fieldValue.IsValid() { + // This should never happen + panic("field is not valid") + } + + // If we can't set the field, then it is unexported or something, + // and we just continue onwards. + if !fieldValue.CanSet() { + continue + } + + // Delete the key we're using from the unused map so we stop tracking + delete(dataValKeysUnused, rawMapKey.Interface()) + + // If the name is empty string, then we're at the root, and we + // don't dot-join the fields. + if name != "" { + fieldName = name + "." + fieldName + } + + if err := d.decode(fieldName, rawMapVal.Interface(), fieldValue); err != nil { + errors = appendErrors(errors, err) + } + } + + // If we have a "remain"-tagged field and we have unused keys then + // we put the unused keys directly into the remain field. + if remainField != nil && len(dataValKeysUnused) > 0 { + // Build a map of only the unused values + remain := map[interface{}]interface{}{} + for key := range dataValKeysUnused { + remain[key] = dataVal.MapIndex(reflect.ValueOf(key)).Interface() + } + + // Decode it as-if we were just decoding this map onto our map. + if err := d.decodeMap(name, remain, remainField.val); err != nil { + errors = appendErrors(errors, err) + } + + // Set the map to nil so we have none so that the next check will + // not error (ErrorUnused) + dataValKeysUnused = nil + } + + if d.config.ErrorUnused && len(dataValKeysUnused) > 0 { + keys := make([]string, 0, len(dataValKeysUnused)) + for rawKey := range dataValKeysUnused { + keys = append(keys, rawKey.(string)) + } + sort.Strings(keys) + + err := fmt.Errorf("'%s' has invalid keys: %s", name, strings.Join(keys, ", ")) + errors = appendErrors(errors, err) + } + + if len(errors) > 0 { + return &Error{errors} + } + + // Add the unused keys to the list of unused keys if we're tracking metadata + if d.config.Metadata != nil { + for rawKey := range dataValKeysUnused { + key := rawKey.(string) + if name != "" { + key = name + "." + key + } + + d.config.Metadata.Unused = append(d.config.Metadata.Unused, key) + } + } + + return nil +} + +func isEmptyValue(v reflect.Value) bool { + switch getKind(v) { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + } + return false +} + +func getKind(val reflect.Value) reflect.Kind { + kind := val.Kind() + + switch { + case kind >= reflect.Int && kind <= reflect.Int64: + return reflect.Int + case kind >= reflect.Uint && kind <= reflect.Uint64: + return reflect.Uint + case kind >= reflect.Float32 && kind <= reflect.Float64: + return reflect.Float32 + default: + return kind + } +} diff --git a/vendor/github.com/moricho/tparallel/.gitignore b/vendor/github.com/moricho/tparallel/.gitignore new file mode 100644 index 000000000..71342280e --- /dev/null +++ b/vendor/github.com/moricho/tparallel/.gitignore @@ -0,0 +1,3 @@ +/tparallel +.envrc +/dist diff --git a/vendor/github.com/moricho/tparallel/.goreleaser.yml b/vendor/github.com/moricho/tparallel/.goreleaser.yml new file mode 100644 index 000000000..e9f6d727e --- /dev/null +++ b/vendor/github.com/moricho/tparallel/.goreleaser.yml @@ -0,0 +1,38 @@ +project_name: tparallel +env: + - GO111MODULE=on +before: + hooks: + - go mod tidy +builds: + - main: ./cmd/tparallel + binary: tparallel + ldflags: + - -s -w + - -X main.Version={{.Version}} + - -X main.Revision={{.ShortCommit}} + env: + - CGO_ENABLED=0 +archives: + - name_template: '{{ .ProjectName }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' + replacements: + darwin: darwin + linux: linux + windows: windows + 386: i386 + amd64: x86_64 + format_overrides: + - goos: windows + format: zip +release: + prerelease: auto +brews: + - tap: + owner: moricho + name: homebrew-tparallel + homepage: https://github.com/moricho/tparallel + description: tparallel detects inappropriate usage of t.Parallel() method in your Go test codes + install: | + bin.install "tparallel" + test: | + system "#{bin}/goreleaser -v" diff --git a/vendor/github.com/moricho/tparallel/LICENSE b/vendor/github.com/moricho/tparallel/LICENSE new file mode 100644 index 000000000..4f029982f --- /dev/null +++ b/vendor/github.com/moricho/tparallel/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 moricho + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/moricho/tparallel/Makefile b/vendor/github.com/moricho/tparallel/Makefile new file mode 100644 index 000000000..fb3588069 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/Makefile @@ -0,0 +1,13 @@ +all: build + +.PHONY: build +build: + go build -o tparallel ./cmd/tparallel + +.PHONY: build_race +build_race: + go build -race -o tparallel ./cmd/tparallel + +.PHONY: test +test: build_race + go test -v ./... diff --git a/vendor/github.com/moricho/tparallel/README.md b/vendor/github.com/moricho/tparallel/README.md new file mode 100644 index 000000000..cd358d155 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/README.md @@ -0,0 +1,100 @@ +# tparallel +[![tparallel](https://github.com/moricho/tparallel/workflows/tparallel/badge.svg?branch=master)](https://github.com/moricho/tparallel/actions) +[![Go Report Card](https://goreportcard.com/badge/github.com/moricho/tparallel)](https://goreportcard.com/report/github.com/moricho/tparallel) +[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) + +`tparallel` finds inappropriate usage of `t.Parallel()` method in your Go test codes. +It detects the following: +- `t.Parallel()` is called in either a top-level test function or a sub-test function only +- Although `t.Parallel()` is called in the sub-test function, it is post-processed by `defer` instead of `t.Cleanup()` + +This tool was inspired by this blog: [Go言語でのテストの並列化 〜t.Parallel()メソッドを理解する〜](https://engineering.mercari.com/blog/entry/how_to_use_t_parallel/) + +## Installation + +### From GitHub Releases +Please see [GitHub Releases](https://github.com/moricho/tparallel/releases). +Available binaries are: +- macOS +- Linux +- Windows + +### macOS +``` sh +$ brew tap moricho/tparallel +$ brew install tparallel +``` + +### go get +```sh +$ go get -u github.com/moricho/tparallel/cmd/tparallel +``` + +## Usage + +```sh +$ go vet -vettool=`which tparallel` +``` + +## Example + +```go +package sample + +import ( + "testing" +) + +func Test_Table1(t *testing.T) { + teardown := setup("Test_Table1") + defer teardown() + + tests := []struct { + name string + }{ + { + name: "Table1_Sub1", + }, + { + name: "Table1_Sub2", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + call(tt.name) + }) + } +} + +func Test_Table2(t *testing.T) { + teardown := setup("Test_Table2") + t.Cleanup(teardown) + t.Parallel() + + tests := []struct { + name string + }{ + { + name: "Table2_Sub1", + }, + { + name: "Table2_Sub2", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + call(tt.name) + }) + } +} +``` + +```console +# github.com/moricho/tparallel/testdata/src/sample +testdata/src/sample/table_test.go:7:6: Test_Table1 should use t.Cleanup +testdata/src/sample/table_test.go:7:6: Test_Table1 should call t.Parallel on the top level as well as its subtests +testdata/src/sample/table_test.go:30:6: Test_Table2's subtests should call t.Parallel +``` diff --git a/vendor/github.com/moricho/tparallel/go.mod b/vendor/github.com/moricho/tparallel/go.mod new file mode 100644 index 000000000..9947ccb60 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/go.mod @@ -0,0 +1,8 @@ +module github.com/moricho/tparallel + +go 1.15 + +require ( + github.com/gostaticanalysis/analysisutil v0.1.0 + golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65 +) diff --git a/vendor/github.com/moricho/tparallel/go.sum b/vendor/github.com/moricho/tparallel/go.sum new file mode 100644 index 000000000..bcc4158da --- /dev/null +++ b/vendor/github.com/moricho/tparallel/go.sum @@ -0,0 +1,34 @@ +github.com/gostaticanalysis/analysisutil v0.1.0 h1:E4c8Y1EQURbBEAHoXc/jBTK7Np14ArT8NPUiSFOl9yc= +github.com/gostaticanalysis/analysisutil v0.1.0/go.mod h1:dMhHRU9KTiDcuLGdy87/2gTR8WruwYZrKdRq9m1O6uw= +github.com/gostaticanalysis/comment v1.3.0 h1:wTVgynbFu8/nz6SGgywA0TcyIoAVsYc7ai/Zp5xNGlw= +github.com/gostaticanalysis/comment v1.3.0/go.mod h1:xMicKDx7XRXYdVwY9f9wQpDJVnqWxw9wCauCMKp+IBI= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190307163923-6a08e3108db3/go.mod h1:25r3+/G6/xytQM8iWZKq3Hn0kr0rgFKPUNVEL/dr3z4= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200624225443-88f3c62a19ff/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65 h1:DajXNh69ob79PCQz1N7OHxmqq6ASZC5xAnJJWIQGR6I= +golang.org/x/tools v0.0.0-20200831203904-5a2aa26beb65/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go b/vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go new file mode 100644 index 000000000..5a8e637bd --- /dev/null +++ b/vendor/github.com/moricho/tparallel/pkg/ssafunc/ssafunc.go @@ -0,0 +1,34 @@ +package ssafunc + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + "github.com/moricho/tparallel/pkg/ssainstr" + "golang.org/x/tools/go/ssa" +) + +// IsDeferCalled returns whether the given ssa.Function calls `defer` +func IsDeferCalled(f *ssa.Function) bool { + for _, block := range f.Blocks { + for _, instr := range block.Instrs { + switch instr.(type) { + case *ssa.Defer: + return true + } + } + } + return false +} + +// IsCalled returns whether the given ssa.Function calls `fn` func +func IsCalled(f *ssa.Function, fn *types.Func) bool { + block := f.Blocks[0] + for _, instr := range block.Instrs { + called := analysisutil.Called(instr, nil, fn) + if _, ok := ssainstr.LookupCalled(instr, fn); ok || called { + return true + } + } + return false +} diff --git a/vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go b/vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go new file mode 100644 index 000000000..374553f5e --- /dev/null +++ b/vendor/github.com/moricho/tparallel/pkg/ssainstr/ssainstr.go @@ -0,0 +1,63 @@ +package ssainstr + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/ssa" +) + +// LookupCalled looks up ssa.Instruction that call the `fn` func in the given instr +func LookupCalled(instr ssa.Instruction, fn *types.Func) ([]ssa.Instruction, bool) { + instrs := []ssa.Instruction{} + + call, ok := instr.(ssa.CallInstruction) + if !ok { + return instrs, false + } + + ssaCall := call.Value() + if ssaCall == nil { + return instrs, false + } + common := ssaCall.Common() + if common == nil { + return instrs, false + } + val := common.Value + + called := false + switch fnval := val.(type) { + case *ssa.Function: + for _, block := range fnval.Blocks { + for _, instr := range block.Instrs { + if analysisutil.Called(instr, nil, fn) { + called = true + instrs = append(instrs, instr) + } + } + } + } + + return instrs, called +} + +// HasArgs returns whether the given ssa.Instruction has `typ` type args +func HasArgs(instr ssa.Instruction, typ types.Type) bool { + call, ok := instr.(ssa.CallInstruction) + if !ok { + return false + } + + ssaCall := call.Value() + if ssaCall == nil { + return false + } + + for _, arg := range ssaCall.Call.Args { + if types.Identical(arg.Type(), typ) { + return true + } + } + return false +} diff --git a/vendor/github.com/moricho/tparallel/testmap.go b/vendor/github.com/moricho/tparallel/testmap.go new file mode 100644 index 000000000..fa9bed708 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/testmap.go @@ -0,0 +1,63 @@ +package tparallel + +import ( + "go/types" + "strings" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" + + "github.com/moricho/tparallel/pkg/ssainstr" +) + +// getTestMap gets a set of a top-level test and its sub-tests +func getTestMap(ssaanalyzer *buildssa.SSA, testTyp types.Type) map[*ssa.Function][]*ssa.Function { + testMap := map[*ssa.Function][]*ssa.Function{} + + trun := analysisutil.MethodOf(testTyp, "Run") + for _, f := range ssaanalyzer.SrcFuncs { + if !strings.HasPrefix(f.Name(), "Test") || !(f.Parent() == (*ssa.Function)(nil)) { + continue + } + testMap[f] = []*ssa.Function{} + for _, block := range f.Blocks { + for _, instr := range block.Instrs { + called := analysisutil.Called(instr, nil, trun) + + if !called && ssainstr.HasArgs(instr, types.NewPointer(testTyp)) { + if instrs, ok := ssainstr.LookupCalled(instr, trun); ok { + for _, v := range instrs { + testMap[f] = appendTestMap(testMap[f], v) + } + } + } else if called { + testMap[f] = appendTestMap(testMap[f], instr) + } + } + } + } + + return testMap +} + +// appendTestMap converts ssa.Instruction to ssa.Function and append it to a given sub-test slice +func appendTestMap(subtests []*ssa.Function, instr ssa.Instruction) []*ssa.Function { + call, ok := instr.(ssa.CallInstruction) + if !ok { + return subtests + } + + ssaCall := call.Value() + for _, arg := range ssaCall.Call.Args { + switch arg := arg.(type) { + case *ssa.Function: + subtests = append(subtests, arg) + case *ssa.MakeClosure: + fn, _ := arg.Fn.(*ssa.Function) + subtests = append(subtests, fn) + } + } + + return subtests +} diff --git a/vendor/github.com/moricho/tparallel/tparallel.go b/vendor/github.com/moricho/tparallel/tparallel.go new file mode 100644 index 000000000..3139e0425 --- /dev/null +++ b/vendor/github.com/moricho/tparallel/tparallel.go @@ -0,0 +1,72 @@ +package tparallel + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + + "github.com/moricho/tparallel/pkg/ssafunc" +) + +const doc = "tparallel detects inappropriate usage of t.Parallel() method in your Go test codes." + +// Analyzer analyzes Go test codes whether they use t.Parallel() appropriately +// by using SSA (Single Static Assignment) +var Analyzer = &analysis.Analyzer{ + Name: "tparallel", + Doc: doc, + Run: run, + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + }, +} + +func run(pass *analysis.Pass) (interface{}, error) { + ssaanalyzer := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + + obj := analysisutil.ObjectOf(pass, "testing", "T") + if obj == nil { + // skip checking + return nil, nil + } + testTyp, testPkg := obj.Type(), obj.Pkg() + + p, _, _ := types.LookupFieldOrMethod(testTyp, true, testPkg, "Parallel") + parallel, _ := p.(*types.Func) + c, _, _ := types.LookupFieldOrMethod(testTyp, true, testPkg, "Cleanup") + cleanup, _ := c.(*types.Func) + + testMap := getTestMap(ssaanalyzer, testTyp) // ex. {Test1: [TestSub1, TestSub2], Test2: [TestSub1, TestSub2, TestSub3], ...} + for top, subs := range testMap { + if len(subs) == 0 { + continue + } + isParallelTop := ssafunc.IsCalled(top, parallel) + isPararellSub := false + for _, sub := range subs { + isPararellSub = ssafunc.IsCalled(sub, parallel) + if isPararellSub { + break + } + } + + if ssafunc.IsDeferCalled(top) { + useCleanup := ssafunc.IsCalled(top, cleanup) + if isPararellSub && !useCleanup { + pass.Reportf(top.Pos(), "%s should use t.Cleanup instead of defer", top.Name()) + } + } + + if isParallelTop == isPararellSub { + continue + } else if isPararellSub { + pass.Reportf(top.Pos(), "%s should call t.Parallel on the top level as well as its subtests", top.Name()) + } else if isParallelTop { + pass.Reportf(top.Pos(), "%s's subtests should call t.Parallel", top.Name()) + } + } + + return nil, nil +} diff --git a/vendor/github.com/nakabonne/nestif/.gitignore b/vendor/github.com/nakabonne/nestif/.gitignore new file mode 100644 index 000000000..df71a2ac7 --- /dev/null +++ b/vendor/github.com/nakabonne/nestif/.gitignore @@ -0,0 +1,16 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib +/nestif + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ diff --git a/vendor/github.com/nakabonne/nestif/LICENSE b/vendor/github.com/nakabonne/nestif/LICENSE new file mode 100644 index 000000000..ddf4d71ed --- /dev/null +++ b/vendor/github.com/nakabonne/nestif/LICENSE @@ -0,0 +1,25 @@ +BSD 2-Clause License + +Copyright (c) 2020, Ryo Nakao +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/nakabonne/nestif/README.md b/vendor/github.com/nakabonne/nestif/README.md new file mode 100644 index 000000000..ede411f73 --- /dev/null +++ b/vendor/github.com/nakabonne/nestif/README.md @@ -0,0 +1,122 @@ +# nestif + +[![Go Doc](https://img.shields.io/badge/godoc-reference-blue.svg?style=flat-square)](http://godoc.org/github.com/nakabonne/nestif) + +Reports deeply nested if statements in Go code, by calculating its complexities based on the rules defined by the [Cognitive Complexity white paper by G. Ann Campbell](https://www.sonarsource.com/docs/CognitiveComplexity.pdf). + +It helps you find if statements that make your code hard to read, and clarifies which parts to refactor. + +## Installation + +``` +go get github.com/nakabonne/nestif/cmd/nestif +``` + +## Usage + +### Quick Start + +```bash +nestif +``` + +The `...` glob operator is supported, and the above is an equivalent of: + +```bash +nestif ./... +``` + +One or more files and directories can be specified in a single command: + +```bash +nestif dir/foo.go dir2 dir3/... +``` + +Packages can be specified as well: + +```bash +nestif github.com/foo/bar example.com/bar/baz +``` + +### Options + +``` +usage: nestif [ ...] ... + -e, --exclude-dirs strings regexps of directories to be excluded for checking; comma-separated list + --json emit json format + --min int minimum complexity to show (default 1) + --top int show only the top N most complex if statements (default 10) + -v, --verbose verbose output +``` + +### Example + +Let's say you write: + +```go +package main + +func _() { + if foo { + if bar { + } + } + + if baz == "baz" { + if qux { + if quux { + } + } + } +} +``` + +And give it to nestif: + +```console +$ nestif foo.go +foo.go:9:2: `if baz == "baz"` is nested (complexity: 3) +foo.go:4:2: `if foo` is nested (complexity: 1) +``` + +Note that the results are sorted in descending order of complexity. In addition, it shows only the top 10 most complex if statements by default, and you can specify how many to show with `-top` flag. + +### Rules + +It calculates the complexities of if statements according to the nesting rules of Cognitive Complexity. +Since the more deeply-nested your code gets, the harder it can be to reason about, it assesses a nesting increment for it: + +```go +if condition1 { + if condition2 { // +1 + if condition3 { // +2 + if condition4 { // +3 + } + } + } +} +``` + +`else` and `else if` increase complexity by one wherever they are because the mental cost has already been paid when reading the if: + +```go +if condition1 { + if condition2 { // +1 + if condition3 { // +2 + } else if condition4 { // +1 + } else { // +1 + if condition5 { // +3 + } + } + } +} +``` + +## Inspired by + +- [uudashr/gocognit](https://github.com/uudashr/gocognit) +- [fzipp/gocyclo](https://github.com/fzipp/gocyclo) + +## Further reading + +Please see the [Cognitive Complexity: A new way of measuring understandability](https://www.sonarsource.com/docs/CognitiveComplexity.pdf) white paper by G. Ann Campbell for more detail on Cognitive Complexity. diff --git a/vendor/github.com/nakabonne/nestif/go.mod b/vendor/github.com/nakabonne/nestif/go.mod new file mode 100644 index 000000000..325901d59 --- /dev/null +++ b/vendor/github.com/nakabonne/nestif/go.mod @@ -0,0 +1,8 @@ +module github.com/nakabonne/nestif + +go 1.13 + +require ( + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.4.0 +) diff --git a/vendor/github.com/nakabonne/nestif/go.sum b/vendor/github.com/nakabonne/nestif/go.sum new file mode 100644 index 000000000..6d790ef35 --- /dev/null +++ b/vendor/github.com/nakabonne/nestif/go.sum @@ -0,0 +1,12 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/nakabonne/nestif/nestif.go b/vendor/github.com/nakabonne/nestif/nestif.go new file mode 100644 index 000000000..d458022fb --- /dev/null +++ b/vendor/github.com/nakabonne/nestif/nestif.go @@ -0,0 +1,148 @@ +// Copyright 2020 Ryo Nakao . +// +// All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package nestif provides an API to detect deeply nested if statements. +package nestif + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "io" +) + +// Issue represents an issue of root if statement that has nested ifs. +type Issue struct { + Pos token.Position + Complexity int + Message string +} + +// Checker represents a checker that finds nested if statements. +type Checker struct { + // Minimum complexity to report. + MinComplexity int + + // For debug mode. + debugWriter io.Writer + issues []Issue +} + +// Check inspects a single file and returns found issues. +func (c *Checker) Check(f *ast.File, fset *token.FileSet) []Issue { + c.issues = []Issue{} // refresh + ast.Inspect(f, func(n ast.Node) bool { + fn, ok := n.(*ast.FuncDecl) + if !ok || fn.Body == nil { + return true + } + for _, stmt := range fn.Body.List { + c.checkFunc(&stmt, fset) + } + return true + }) + + return c.issues +} + +// checkFunc inspects a function and sets a list of issues if there are. +func (c *Checker) checkFunc(stmt *ast.Stmt, fset *token.FileSet) { + ast.Inspect(*stmt, func(n ast.Node) bool { + ifStmt, ok := n.(*ast.IfStmt) + if !ok { + return true + } + + c.checkIf(ifStmt, fset) + return false + }) +} + +// checkIf inspects a if statement and sets an issue if there is. +func (c *Checker) checkIf(stmt *ast.IfStmt, fset *token.FileSet) { + v := newVisitor() + ast.Walk(v, stmt) + if v.complexity < c.MinComplexity { + return + } + pos := fset.Position(stmt.Pos()) + c.issues = append(c.issues, Issue{ + Pos: pos, + Complexity: v.complexity, + Message: c.makeMessage(v.complexity, stmt.Cond, fset), + }) +} + +type visitor struct { + complexity int + nesting int + // To avoid adding complexity including nesting level to `else if`. + elseifs map[*ast.IfStmt]bool +} + +func newVisitor() *visitor { + return &visitor{ + elseifs: make(map[*ast.IfStmt]bool), + } +} + +// Visit traverses an AST in depth-first order by calling itself +// recursively, and calculates the complexities of if statements. +func (v *visitor) Visit(n ast.Node) ast.Visitor { + ifStmt, ok := n.(*ast.IfStmt) + if !ok { + return v + } + + v.incComplexity(ifStmt) + v.nesting++ + ast.Walk(v, ifStmt.Body) + v.nesting-- + + switch t := ifStmt.Else.(type) { + case *ast.BlockStmt: + v.complexity++ + v.nesting++ + ast.Walk(v, t) + v.nesting-- + case *ast.IfStmt: + v.elseifs[t] = true + ast.Walk(v, t) + } + + return nil +} + +func (v *visitor) incComplexity(n *ast.IfStmt) { + // In case of `else if`, increase by 1. + if v.elseifs[n] { + v.complexity++ + } else { + v.complexity += v.nesting + } +} + +func (c *Checker) makeMessage(complexity int, cond ast.Expr, fset *token.FileSet) string { + p := &printer.Config{} + b := new(bytes.Buffer) + if err := p.Fprint(b, fset, cond); err != nil { + c.debug("failed to convert condition into string: %v", err) + } + return fmt.Sprintf("`if %s` is deeply nested (complexity: %d)", b.String(), complexity) +} + +// DebugMode makes it possible to emit debug logs. +func (c *Checker) DebugMode(w io.Writer) { + c.debugWriter = w +} + +func (c *Checker) debug(format string, a ...interface{}) { + if c.debugWriter != nil { + fmt.Fprintf(c.debugWriter, format, a...) + } +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/.gitignore b/vendor/github.com/nbutton23/zxcvbn-go/.gitignore new file mode 100644 index 000000000..4bff1a28e --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/.gitignore @@ -0,0 +1,2 @@ +zxcvbn +debug.test diff --git a/vendor/github.com/nbutton23/zxcvbn-go/LICENSE.txt b/vendor/github.com/nbutton23/zxcvbn-go/LICENSE.txt new file mode 100644 index 000000000..e8f59e06d --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/LICENSE.txt @@ -0,0 +1,20 @@ +Copyright (c) Nathan Button + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/nbutton23/zxcvbn-go/Makefile b/vendor/github.com/nbutton23/zxcvbn-go/Makefile new file mode 100644 index 000000000..6aa13e006 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/Makefile @@ -0,0 +1,15 @@ +PKG_LIST = $$( go list ./... | grep -v /vendor/ | grep -v "zxcvbn-go/data" ) + +.DEFAULT_GOAL := help + +.PHONY: help +help: + @grep -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + +.PHONY: test +test: ## Run `go test {Package list}` on the packages + go test $(PKG_LIST) + +.PHONY: lint +lint: ## Run `golint {Package list}` + golint $(PKG_LIST) \ No newline at end of file diff --git a/vendor/github.com/nbutton23/zxcvbn-go/README.md b/vendor/github.com/nbutton23/zxcvbn-go/README.md new file mode 100644 index 000000000..3f742a9da --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/README.md @@ -0,0 +1,78 @@ +This is a goLang port of python-zxcvbn and [zxcvbn](https://github.com/dropbox/zxcvbn), which are python and JavaScript password strength +generators. zxcvbn attempts to give sound password advice through pattern +matching and conservative entropy calculations. It finds 10k common passwords, +common American names and surnames, common English words, and common patterns +like dates, repeats (aaa), sequences (abcd), and QWERTY patterns. + +Please refer to https://dropbox.tech/security/zxcvbn-realistic-password-strength-estimation for the full details and +motivation behind zxcbvn. The source code for the original JavaScript (well, +actually CoffeeScript) implementation can be found at: + +https://github.com/lowe/zxcvbn + +Python at: + +https://github.com/dropbox/python-zxcvbn + +For full motivation, see: + +https://dropbox.tech/security/zxcvbn-realistic-password-strength-estimation + +------------------------------------------------------------------------ +Use +------------------------------------------------------------------------ + +The zxcvbn module has the public method PasswordStrength() function. Import zxcvbn, and +call PasswordStrength(password string, userInputs []string). The function will return a +result dictionary with the following keys: + +Entropy # bits + +CrackTime # estimation of actual crack time, in seconds. + +CrackTimeDisplay # same crack time, as a friendlier string: + # "instant", "6 minutes", "centuries", etc. + +Score # [0,1,2,3,4] if crack time is less than + # [10^2, 10^4, 10^6, 10^8, Infinity]. + # (useful for implementing a strength bar.) + +MatchSequence # the list of patterns that zxcvbn based the + # entropy calculation on. + +CalcTime # how long it took to calculate an answer, + # in milliseconds. usually only a few ms. + +The userInputs argument is an splice of strings that zxcvbn +will add to its internal dictionary. This can be whatever list of +strings you like, but is meant for user inputs from other fields of the +form, like name and email. That way a password that includes the user's +personal info can be heavily penalized. This list is also good for +site-specific vocabulary. + +Bug reports and pull requests welcome! + +------------------------------------------------------------------------ +Project Status +------------------------------------------------------------------------ + +Use zxcvbn_test.go to check how close to feature parity the project is. + +------------------------------------------------------------------------ +Acknowledgment +------------------------------------------------------------------------ + +Thanks to Dan Wheeler (https://github.com/lowe) for the CoffeeScript implementation +(see above.) To repeat his outside acknowledgements (which remain useful, as always): + +Many thanks to Mark Burnett for releasing his 10k top passwords list: +https://xato.net/passwords/more-top-worst-passwords +and for his 2006 book, +"Perfect Passwords: Selection, Protection, Authentication" + +Huge thanks to Wiktionary contributors for building a frequency list +of English as used in television and movies: +https://en.wiktionary.org/wiki/Wiktionary:Frequency_lists + +Last but not least, big thanks to xkcd :) +https://xkcd.com/936/ diff --git a/vendor/github.com/nbutton23/zxcvbn-go/adjacency/adjcmartix.go b/vendor/github.com/nbutton23/zxcvbn-go/adjacency/adjcmartix.go new file mode 100644 index 000000000..66ad30b82 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/adjacency/adjcmartix.go @@ -0,0 +1,108 @@ +package adjacency + +import ( + "encoding/json" + "log" + + "github.com/nbutton23/zxcvbn-go/data" +) + +// Graph holds information about different graphs +type Graph struct { + Graph map[string][]string + averageDegree float64 + Name string +} + +// GraphMap is a map of all graphs +var GraphMap = make(map[string]Graph) + +func init() { + GraphMap["qwerty"] = BuildQwerty() + GraphMap["dvorak"] = BuildDvorak() + GraphMap["keypad"] = BuildKeypad() + GraphMap["macKeypad"] = BuildMacKeypad() + GraphMap["l33t"] = BuildLeet() +} + +//BuildQwerty builds the Qwerty Graph +func BuildQwerty() Graph { + data, err := data.Asset("data/Qwerty.json") + if err != nil { + panic("Can't find asset") + } + return getAdjancencyGraphFromFile(data, "qwerty") +} + +//BuildDvorak builds the Dvorak Graph +func BuildDvorak() Graph { + data, err := data.Asset("data/Dvorak.json") + if err != nil { + panic("Can't find asset") + } + return getAdjancencyGraphFromFile(data, "dvorak") +} + +//BuildKeypad builds the Keypad Graph +func BuildKeypad() Graph { + data, err := data.Asset("data/Keypad.json") + if err != nil { + panic("Can't find asset") + } + return getAdjancencyGraphFromFile(data, "keypad") +} + +//BuildMacKeypad builds the Mac Keypad Graph +func BuildMacKeypad() Graph { + data, err := data.Asset("data/MacKeypad.json") + if err != nil { + panic("Can't find asset") + } + return getAdjancencyGraphFromFile(data, "mac_keypad") +} + +//BuildLeet builds the L33T Graph +func BuildLeet() Graph { + data, err := data.Asset("data/L33t.json") + if err != nil { + panic("Can't find asset") + } + return getAdjancencyGraphFromFile(data, "keypad") +} + +func getAdjancencyGraphFromFile(data []byte, name string) Graph { + + var graph Graph + err := json.Unmarshal(data, &graph) + if err != nil { + log.Fatal(err) + } + graph.Name = name + return graph +} + +// CalculateAvgDegree calclates the average degree between nodes in the graph +//on qwerty, 'g' has degree 6, being adjacent to 'ftyhbv'. '\' has degree 1. +//this calculates the average over all keys. +//TODO double check that i ported this correctly scoring.coffee ln 5 +func (adjGrp Graph) CalculateAvgDegree() float64 { + if adjGrp.averageDegree != float64(0) { + return adjGrp.averageDegree + } + var avg float64 + var count float64 + for _, value := range adjGrp.Graph { + + for _, char := range value { + if len(char) != 0 || char != " " { + avg += float64(len(char)) + count++ + } + } + + } + + adjGrp.averageDegree = avg / count + + return adjGrp.averageDegree +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/data/bindata.go b/vendor/github.com/nbutton23/zxcvbn-go/data/bindata.go new file mode 100644 index 000000000..f3a0c010c --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/data/bindata.go @@ -0,0 +1,444 @@ +// Code generated by go-bindata. +// sources: +// data/Dvorak.json +// data/English.json +// data/FemaleNames.json +// data/Keypad.json +// data/L33t.json +// data/MacKeypad.json +// data/MaleNames.json +// data/Passwords.json +// data/Qwerty.json +// data/Surnames.json +// DO NOT EDIT! + +package data + +import ( + "bytes" + "compress/gzip" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" + "time" +) + +func bindataRead(data []byte, name string) ([]byte, error) { + gz, err := gzip.NewReader(bytes.NewBuffer(data)) + if err != nil { + return nil, fmt.Errorf("Read %q: %v", name, err) + } + + var buf bytes.Buffer + _, err = io.Copy(&buf, gz) + clErr := gz.Close() + + if err != nil { + return nil, fmt.Errorf("Read %q: %v", name, err) + } + if clErr != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +type asset struct { + bytes []byte + info os.FileInfo +} + +type bindataFileInfo struct { + name string + size int64 + mode os.FileMode + modTime time.Time +} + +func (fi bindataFileInfo) Name() string { + return fi.name +} +func (fi bindataFileInfo) Size() int64 { + return fi.size +} +func (fi bindataFileInfo) Mode() os.FileMode { + return fi.mode +} +func (fi bindataFileInfo) ModTime() time.Time { + return fi.modTime +} +func (fi bindataFileInfo) IsDir() bool { + return false +} +func (fi bindataFileInfo) Sys() interface{} { + return nil +} + +var _dataDvorakJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\xb4\x98\x57\x57\x1b\x41\x0c\x85\xdf\xf9\x15\xb0\x74\x30\xbd\xf7\xde\x7b\x6f\xa6\x77\x30\xbd\x17\xf3\xdb\x33\x26\x39\x99\xef\x9e\x78\x96\x7d\x88\x5e\x72\xc6\x61\xf9\xa4\x95\xae\x34\xd7\x7c\x14\x14\x16\x46\x63\xf7\xfb\xb7\x67\x51\x67\x61\xee\x83\xfb\x58\xef\x8e\x5b\xdf\x47\xf7\xa1\xa3\x22\x4a\xfd\x39\x5f\x3f\x65\x32\xf9\xce\xd1\xd6\xc7\xdf\x67\xa2\xcc\xb4\x3f\xdf\x2f\x46\xdf\xc7\xed\xdf\xff\x13\x35\x10\xbc\xf7\xf5\x33\xb8\xb1\xdf\xc3\xca\xd3\x91\xfc\x82\x90\x1b\x49\x6e\x28\xfa\x99\xdc\x54\xec\xc9\xa9\x6e\x8d\x22\xe4\x26\x92\x91\x4f\x90\xdc\x5c\xe2\x69\xb5\xbd\x12\x45\xc0\xcd\x04\x23\x9d\x20\xb8\xa5\xd4\xc3\x6e\xe7\x25\x88\x80\x5b\x08\x46\x36\x41\x70\xeb\x8e\x87\xbd\x6d\x48\x10\x01\xb7\x12\x8c\x6c\x82\xe0\xb6\x32\x0f\x3b\x19\x95\x20\x02\x6e\x23\x18\xd9\x04\xc1\xed\x55\x1e\x76\x3a\x26\x41\x04\xdc\x4e\x30\xb2\x09\x82\xa1\xf6\xe8\x70\x48\x82\x08\xb8\x83\x60\x64\x13\x04\xd7\x57\xca\x58\x30\x88\x80\x8b\xcc\x46\xc4\xfd\xcc\xa3\x05\x81\x79\x11\x1c\xe7\x62\x7f\x20\x4c\x2e\xb6\x1a\x91\x12\xab\x11\x29\xb5\x1a\x91\x32\x2b\x25\x97\x5b\x35\xaf\xc2\x4a\xc9\x95\x56\xb7\x48\x95\xd5\x50\x57\x13\x5c\xd7\xe7\x1f\xdc\xce\xe6\x0d\x12\xa5\xd3\x9f\xf9\x7f\x50\xb3\xab\xe4\x14\xc9\x9c\x52\x69\x19\xef\x24\x8e\xc5\xcd\x9c\xb4\x52\xc8\x35\x24\x3f\x2c\xf9\x07\x99\x7f\x4f\xf5\xcf\x45\x7a\xdf\x54\x70\x2d\xc1\x14\x13\xb3\xe4\x20\x73\xde\x8e\x47\x24\x7b\x01\xd7\x11\xcc\x3e\xb3\xff\xa8\x38\xb3\xcf\x15\x36\x85\xb7\x15\x70\x67\x68\x44\x20\x7f\xa9\xe5\xdd\x42\xb0\x2c\x02\xee\xb2\x02\x77\x9b\xc9\xa2\xc7\x4c\xca\xbd\x56\xba\xe8\xb3\xd2\x45\xbf\x99\x37\x1c\x08\x09\x43\x7a\x49\x04\x7b\xd6\xd5\x19\xde\xca\x83\xcc\xf9\x75\xdd\xff\xd2\xd1\xb0\x3f\x9f\x8d\xfb\xf3\xd5\x4c\x32\xc9\x0d\x11\xcc\x0b\x87\x17\x11\x17\x26\x57\xfc\xe3\xb2\x04\x17\xf0\x30\xc1\xe7\x13\xf9\x8d\x1f\x03\x32\xfb\x83\x41\x7f\x76\x6f\x2b\xe0\x11\x82\x59\x3f\xce\x02\x45\xf6\xb4\xe2\xcf\x17\x93\x32\x95\x02\x1e\x25\x98\x97\x3a\x2f\x7b\x5a\x58\x66\xcf\x3e\xb8\xb7\x15\xf0\x18\xc1\x7c\x7d\xc2\x58\x6f\x5e\x4a\x2c\x8b\x0b\x22\xe0\x71\x82\x99\x01\x33\x23\x8c\x0d\x83\x42\x72\xf5\x16\xf0\x04\xc1\xac\x1f\xcb\xc2\x37\x61\x70\xca\xf3\x72\x4a\xc1\x93\x04\x63\x2d\xca\x26\x60\x40\x07\x48\xa4\xe3\x29\x82\xd9\x65\xc2\x28\x43\x64\x19\x0b\x9e\x26\x98\xe2\xa7\xef\xe1\x4a\xe2\x76\xe5\x05\x7c\x3d\xab\xa5\x98\x21\x98\x82\x67\xc7\xd9\xb0\x97\xb5\x64\x19\xcf\x12\x4c\x00\xb3\xe7\x42\x65\x96\xcf\xab\x12\x50\xc0\x73\x04\xf3\xae\xe3\x46\xe3\x14\xb2\xa9\x6c\xb6\xdb\x74\x02\x9e\x27\x98\x00\xde\x1b\xf4\xf2\x94\x21\x1b\xec\x02\x0a\x78\x81\x60\xec\x57\xd9\x1b\xcc\x12\xca\x89\xad\xf1\x22\xc1\x9c\x30\xae\x4a\x2a\x84\xf5\x76\x4a\x60\x83\x05\xbc\x44\x30\x1f\x24\x80\x12\xe3\x4d\xe7\x3c\x1b\x1b\x29\xe0\x65\x82\x29\x31\x66\x4f\x85\x30\x38\xa4\x97\xdb\x1b\x02\x5e\x21\x98\xb5\xe4\x0e\x66\xc3\x38\x85\x18\xef\x5c\xed\x05\xbc\x4a\x30\x33\x60\x66\xd4\x2e\x5e\x3f\xb6\x79\x6b\x04\x73\x0d\x72\x58\x18\x04\x43\x11\x0b\x5e\x27\x98\xaf\xc6\x57\xe6\xaa\xc4\xd8\xc7\x82\x37\x08\x66\x5d\x39\x14\xbc\xa6\xb8\x9b\x19\xdc\x0d\x8b\x80\x37\x09\xe6\xec\xb3\xae\x90\x58\xc8\x23\xfd\x93\xf1\x16\xc1\x18\x84\xa0\xc9\xa2\x93\xa3\xbe\x9d\xee\x05\x9c\x4e\x93\x9c\xe0\x9b\x4c\xe2\x94\xb7\x09\xc6\x46\x4f\x02\x13\xf3\xe9\xd2\x17\xf0\x8e\xd5\xdf\xc9\x76\xad\xbe\xec\xed\x05\x8c\x6c\x10\x10\x63\xa3\x05\xbc\x6f\xe6\x90\x0f\xac\x1c\xf2\xa1\x95\x43\x3e\xb2\x72\xc8\xc7\x56\x0e\xf9\xc4\xca\x21\x9f\x5a\x39\xe4\x33\x2b\x87\x7c\x6e\xe5\x90\x2f\xac\x1c\xf2\xa5\x95\x43\xce\x58\x39\xe4\x2b\x2b\x87\x7c\x6d\xe5\x90\x6f\xac\x1c\xf2\xad\x95\x43\xbe\xb3\x72\xc8\xf7\x56\x0e\xf9\xc1\xca\x21\x3f\x5a\x39\xe4\x27\x2b\x87\xfc\x6c\xe5\x90\x5f\xac\x1c\xf2\xab\x95\x43\x7e\xb3\x72\xc8\xef\x56\x0e\xf9\xc3\xca\x21\x7f\x5a\x19\xe4\xac\x95\x41\xfe\xfa\xef\x76\xd3\xfd\x9b\x2d\xc8\x16\xfc\x0a\x00\x00\xff\xff\xd5\xc4\xca\x21\xce\x20\x00\x00") + +func dataDvorakJsonBytes() ([]byte, error) { + return bindataRead( + _dataDvorakJson, + "data/Dvorak.json", + ) +} + +func dataDvorakJson() (*asset, error) { + bytes, err := dataDvorakJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/Dvorak.json", size: 8398, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +var _dataEnglishJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x5c\xbd\x4b\x9a\xe3\xca\x0e\x34\xb6\x15\x7f\x9a\xf4\xe4\xae\xc0\x6b\xf0\x0e\x3c\x4a\x92\x29\x31\x4b\x24\x93\x87\x0f\xa9\xd4\xde\xbc\x81\x88\x40\x52\xfd\x0f\xee\xed\xea\x3e\x55\x2a\x32\x1f\x78\x04\x02\x81\xff\xef\xf6\xff\x94\xfd\xb8\xfd\xdf\xff\xef\xff\x75\xfb\xd4\xf3\xf6\xbf\x5b\xb1\xff\x1d\xd5\xff\x6f\xcc\xf6\xff\xc9\xff\xb7\x0c\xf8\x7b\x3a\xfc\xbf\xfb\xff\xd5\xbb\xfd\xdf\xec\xff\xfd\xad\x7f\xdd\xfd\xff\x16\x7c\x1b\xbe\x7e\x2e\xf5\xed\xff\xf4\x67\xb6\xff\xbf\xd7\xcd\xfe\x7f\xf1\x4f\x1d\xd3\xcb\x7f\x6c\xfe\xd8\xff\x0d\x75\xf9\xe3\x3f\xfc\x73\xee\x07\xfe\xfb\x81\x7f\xb4\xff\xeb\xfc\x7b\xaa\x7f\x9c\x3d\x94\xff\xec\x3b\xf9\x87\xbe\x33\x1e\xe0\x0f\xbe\x2e\xc7\x68\x7f\xec\xf8\xf6\xd3\x7f\x32\x4d\x13\xbe\x07\x7f\xa4\xcd\xbf\x17\xaf\x50\xfd\xfb\x52\x57\xf1\x4d\x5b\x79\x8c\x07\x3f\xf7\x0f\xbe\xe5\x91\x0f\x7c\x23\xfe\xc2\xef\x79\xd4\xb2\x3c\xec\xcf\xa9\x3c\xfd\x1f\x3f\x39\xf9\x27\x94\x3b\xbf\xcf\x7f\x29\x3e\xb7\x4f\xfe\x80\xe7\x8a\xc7\x5b\x0e\xbe\xfb\xf2\xd4\x52\xe1\x21\xb9\x08\x0f\xbc\x78\xf1\x95\xc0\x62\x8d\xfa\x57\xfe\x88\x7e\xf3\xe2\xff\x3f\x94\x01\x6b\xea\xab\xb3\x67\xfc\x92\x8a\x65\x7e\xd4\xca\x3d\xc8\xfe\x9f\xb6\x6c\xef\xea\x5f\x70\x55\xea\x39\xf9\x7f\x9c\x6a\x7d\xe2\xa7\x33\xf6\xa1\x70\x7f\x0a\x96\xa3\x3e\x93\x7f\x7f\x97\xfa\x27\x9f\x1c\x0b\x3f\x67\xbc\xc2\xc1\x25\x2b\x7f\xf0\xc7\x7d\xab\x33\xde\xf4\x83\xd5\xd4\x3a\xe2\x75\x7a\xfd\x26\x7b\x4c\x7e\xc0\x27\xef\x78\xb5\x1d\x7b\x86\xdf\x8b\xad\xde\xed\xa9\x7d\x31\x1e\x78\x1e\xee\x68\x9f\xce\x3d\xeb\xbf\xe1\x20\xe8\x85\xfc\x87\x76\x3c\x1d\x1e\xff\x48\x58\x74\x3c\xd8\x1b\xff\x7c\xee\xd8\x8b\xe3\x98\x70\x72\xf8\xdf\x97\x9c\x07\xac\xcb\xb2\x24\xfc\xf5\x85\x9d\x79\x67\xee\xea\x51\x2b\xf7\x09\xcf\x5d\xfe\xe0\xd0\xed\x27\xff\xdb\x98\xfd\x05\xe7\xca\x85\xe7\x0f\xf2\x98\xed\x75\xdb\x3e\x5c\xc3\x2d\x4e\x37\x3e\x61\xca\xb1\xbf\xfe\x4a\x09\x3f\x9f\x3e\x38\xa6\x43\x7d\x2f\xf8\xeb\x82\xb7\xf2\xef\x3e\xfd\xbc\xd8\xe7\x62\xc9\x3f\xb1\xcb\xf8\xa0\x7d\xd4\x1a\xa6\xe5\x13\x1f\xb7\x27\x6c\xfb\x7c\xf6\x23\xff\x03\xde\xf7\xee\x1f\x6e\xaf\x85\x45\xbd\xdf\xf1\x9b\xf8\xfd\x76\xbc\x70\xcc\x1e\xe5\xc5\xa3\x33\xf1\x77\xd4\x93\x67\x7b\xcc\x93\x1f\xca\xe3\x8d\x6b\x9c\x26\xff\xde\x35\xd7\x75\xe2\x49\xf2\xdf\xb5\x1f\x3c\x17\xef\x84\xfb\x5c\x16\x5c\xf9\x7b\xc1\x4d\xb7\x8b\x18\x2f\xfa\x48\x71\xa9\x97\x87\xd6\x01\xaf\x31\xd4\xbc\xf3\x08\xf4\xbc\x76\x47\xc5\x5b\x3d\xec\x68\x1e\xd8\xee\x3b\x97\xb7\xcb\xc7\x81\x05\xd6\x06\x2d\xba\x7f\xfe\x0e\xfe\x0b\xb8\xc5\xf7\xb2\xed\xfc\xb1\xa9\x64\xbe\xd4\xc1\xab\x76\xcf\x79\xd2\x4f\xb7\xf5\x7a\xd7\xcd\x5f\xc9\xaf\x30\xbe\xd7\x1e\x1b\x27\x8a\x07\x2b\xdd\xf9\x0b\xa7\xb4\xeb\x82\xe1\xd8\x0d\xf8\x45\xcf\x9c\x57\x3d\xbe\x2f\x0a\xed\xc6\x56\xcf\x85\x8b\x52\x57\x5d\x32\x9e\xa2\xf2\x07\xef\x74\xfa\x4f\x16\xbd\x6f\x9a\xec\x91\x79\x22\xf7\x23\xf3\x94\x2e\x07\x0e\xe3\xbc\xf1\xbb\x71\x27\x70\x06\x6c\x4f\xf0\xcb\xbb\x02\x7b\x82\x1b\x3f\xa6\x75\xcd\x4b\x1e\x62\x23\x79\x85\xf9\xe1\xc7\xf6\xe1\x2b\x3e\xb9\x13\xef\xad\x6a\xc7\x37\xdf\x5c\x6d\x27\xbf\x65\x4e\x03\x2f\xc2\x1b\x0b\xc7\x7f\x7c\x9c\x79\xe7\x95\xc4\xd6\x70\x0b\x70\xd3\x66\xdc\xea\x2d\xcf\x79\xee\xb0\x3e\x66\xf8\x0e\xad\x67\xfe\xa3\x3d\xb4\x7f\xe3\xb2\x0f\xf8\x99\x29\xd3\x5a\xaf\x53\xea\xfd\x4f\x5b\xa6\x6c\x1b\x45\x7f\x00\xb3\xa3\x35\xe9\x8f\x53\x16\x69\xcc\x69\xc3\xef\xc4\xb9\x5f\x0a\x7e\xee\x9e\xf4\xb1\x79\xa2\x09\x38\x92\xec\x3f\x2e\x59\xd2\xd2\xda\x37\x15\xff\x2e\x33\x33\x1b\xbe\x6f\xd6\x61\x99\xb9\x1a\x59\x57\x2a\x2f\xb1\x18\x1b\x7f\x25\x9e\xbe\x4f\x73\x98\x17\x7e\xae\x9f\x0f\x1a\x3f\x5c\xd0\x7b\x9a\xcb\xc4\xcb\x5d\x27\x9a\x98\x38\x64\x76\x73\x76\xfd\x84\xfd\xe2\xc9\xef\x5a\x19\x32\x5c\xdf\xfe\xc4\xea\xe2\x28\xcd\xf4\x53\x66\x92\xb9\x6e\xb5\x99\xdd\x38\x97\x34\x02\xef\xb1\xe0\x26\x9b\xe3\xd8\x7c\x33\xb6\x8a\xb5\x9f\xf2\xdd\x7f\xfc\xc9\x1d\x3b\x6a\x5c\x05\xb7\xe6\x78\x70\x18\x90\xba\x62\x57\xf5\x32\xfc\xe3\x9d\xe1\x5e\x4e\xd8\xe5\x79\xd6\x21\xf2\x77\x59\x37\xdb\x45\x78\x8b\x04\x9f\x52\x36\x7c\x54\xa1\x97\x82\x9f\xb9\x6f\x25\x63\xf9\xd2\x64\xbf\x69\xe0\x37\xeb\x98\x2d\xf9\x97\x0b\xbf\xc1\xd9\xfc\xd4\x0e\x9f\x59\xbb\x89\x26\xb2\x2c\xe7\x81\x0d\xd4\x0d\xb1\x9b\x37\xf1\xe0\x9a\xa3\xe3\x47\xb8\x43\xe7\xf6\xf9\xd6\x0f\x78\x85\x05\xbe\x63\x4e\xba\xfa\xf3\x47\x8b\xda\x5f\x47\x26\xff\xda\xa1\xe1\x89\x49\x2f\x19\x9b\x51\xbf\x3c\x75\xfa\x0f\x7e\x51\x78\x3e\x71\x0c\xc7\x73\x83\x91\xa8\xb0\xab\x15\xfe\xdf\xd6\x1a\x9b\x69\xdb\x45\x7f\x70\x1c\xd8\xb7\x89\x67\x60\x2f\x0b\x4e\x60\xfe\xed\xcf\x38\x7a\xf8\x90\x27\xed\xdf\xc8\x47\xf6\xa3\x82\xf7\xaa\x34\x13\x3d\x0e\x71\x2c\xd7\xb9\x98\xb5\xbc\x31\xe4\x60\x84\xe0\x57\x1e\xd7\x8c\x47\xc0\xfe\x0a\x2f\xf4\x81\x97\xd8\xe9\x08\x07\x6e\xeb\x41\xaf\x5c\xf9\x1c\x0f\x3e\x96\x1b\x4d\x9a\x8c\xba\x2d\x72\x02\xe7\xba\x9a\xb1\x18\xda\x8e\xed\x5c\x52\x1a\xa6\x13\x0e\x60\xae\x38\x80\x27\xbf\xed\x2d\x67\xb5\xe7\xbe\x62\x77\x56\xbe\xda\x44\xb7\x70\x6c\x27\x23\xa4\x7e\xac\x15\x8e\x9d\x97\xd8\x0c\x33\x63\x1e\xfb\x06\x98\x87\xd3\x16\x89\x56\x23\x27\x9c\xdb\xcb\x53\xe3\x48\x7a\x28\xe7\xff\xf5\xce\x73\x6b\xb7\x4d\xc7\x9d\x2e\xca\xae\x13\x5c\x02\x7f\xa6\x37\x93\x86\x5f\xb3\x9d\xf0\x8a\xf5\xc5\xab\xf6\xe1\x8b\xaf\x66\x3f\xf0\xa1\xdd\x27\x2e\x6b\x57\xb1\xc8\xc3\xa6\x23\x84\x25\xa4\x2b\x9c\x0b\x1e\x6b\x4e\x9b\xad\x08\xde\xd0\x3c\x20\x5e\x31\xe9\x64\xa5\x30\x86\x19\xaf\x64\xbf\x87\x0e\x65\xa6\x17\x5d\x47\xfe\x5a\xdb\x49\xfe\xda\x21\xcd\x0b\xee\x2c\x16\xd2\xaf\x2e\x42\x97\x8d\x9f\xd2\x27\xbc\xc3\x71\x6e\x38\x76\x65\x87\x91\xa9\xb6\x49\x1b\xae\xd2\xb3\x60\x57\x6c\xe1\x68\x08\xc6\xcc\x70\xca\xde\xf9\x81\x53\x86\x6d\xf0\x87\xe3\xb3\x28\xc4\xb8\x73\x43\xcc\x9a\xd2\x23\xd1\x1e\x99\x71\xd6\xbd\x49\xdc\x4c\xbd\xca\x87\x16\xd4\xe3\x1c\xfc\xb2\xeb\x15\xd3\xc2\x87\x0d\xb3\x65\x66\x99\xf7\x84\xbf\x0f\x61\xe6\x4e\xe7\xd4\xe5\x74\x1e\xe5\x7e\xfa\xbe\x3f\x68\xc4\x6d\xfb\xf1\xd0\x5b\xfa\xfb\xd1\x61\xc1\xe9\x61\x40\xcf\x9b\x96\xee\x1b\x63\x11\x73\xdb\x6f\x1e\xe3\x32\xaf\x75\x3b\x18\xdf\x6e\xb4\x83\x77\x6c\xed\xb3\xe8\x10\xf2\x06\x1c\xd8\xb1\xc7\x94\x9a\xa5\x56\x48\xcb\x07\xda\xdd\x5b\xe2\xbd\x60\x53\xf6\xf8\x26\xed\x7e\x57\x68\x59\x4f\x06\x29\x76\xa6\x7c\x51\x72\xd1\x9b\xce\x1b\x0f\x60\xd6\xd1\x1b\x92\x87\x3a\x8c\xf8\x18\x4c\x67\xdc\x47\x33\xd7\x38\x27\x70\x54\x58\xab\x9c\x10\xfd\x58\x4c\x5b\x18\x06\x54\x06\xa8\x38\x2e\xfd\x44\xf7\x3c\xd2\xa3\xd9\xfe\xf8\xb3\xfc\x67\x3e\xf4\x28\x38\x2e\x87\xce\xdd\x3d\x31\x09\x41\x28\xe5\xa1\xe7\xce\x37\x61\x38\x19\xde\xc8\x22\x6e\xc4\x46\xe5\x7e\xb7\x70\x6f\x09\x33\x93\x69\x14\xf7\xb5\x98\xf3\xe6\xc1\xd0\x5b\x9b\x45\x96\x0f\x78\xe7\x61\x90\x1d\x18\x19\x94\x74\x34\x6b\x45\xb7\xe6\x9e\xb1\x7b\xe1\x7c\x3d\x2d\xa0\x9f\xe7\xbf\x56\x58\x82\x8f\x2f\x1a\xfc\x83\xc7\x75\x5a\xaa\xb2\x2c\x58\x0b\xc4\x72\xb1\x7f\xb8\x1d\xe3\xb9\x77\x49\xae\x41\xa6\x65\xc5\x89\xb2\x83\xf6\x66\xf4\xc5\xb0\xd8\xc2\x4f\x3a\xf2\xcc\x80\xdf\x3c\x18\xd2\x01\xb3\xbd\x30\x4f\xe3\x65\xd7\xc7\x04\x4b\xbf\x97\xa1\x39\xd4\x1b\x82\x0f\xae\xc7\x3e\x31\x00\xdb\x68\xd8\xde\x57\x5c\xec\x76\x99\x26\x91\x19\x81\x7b\x1c\xfc\x87\xca\x87\x5d\x0b\xee\x1a\x32\x0b\x2d\x34\x7c\x54\x97\xf9\x02\x48\x04\x07\x5e\x85\x89\x3b\x63\xf7\x83\x2e\x95\x0f\xf1\xb5\xd9\xf8\xe0\xbc\x95\x7a\xf2\xb2\x8c\x0c\x30\x6c\x7d\xf8\xdc\x63\xc1\x8e\x26\xd9\x5f\xbb\x0a\xc8\xac\xaa\x1f\x2b\x5e\xaa\xbb\x36\x6e\xe5\x9d\xee\xb1\x69\xff\x9d\x05\xbf\x7f\x39\x15\x63\x8d\x8a\xac\x76\x3e\xbc\xa5\xae\x13\x6f\x24\xdc\x3a\x43\xe5\xa5\xea\x34\x3c\x18\x94\xba\xeb\x62\x56\xc0\xa4\x25\xe7\x19\x87\xbf\x2c\x0a\xb1\xdc\xbe\x33\x68\x0c\xb3\x78\xae\x3a\xcf\x69\xc3\x77\xcc\x99\x7e\x12\x9e\xa7\xdb\x22\x2f\xf0\x8f\xc2\x12\xd2\xc6\xa6\x3b\xd7\x4a\xee\x17\xbb\xb9\xc9\xe1\x86\x6d\xf2\x08\x90\xff\xd9\xd2\x42\xee\xe0\x58\x69\x3a\x5f\x5c\xf8\x5d\xe6\xc9\xec\xbd\x3c\x59\x3f\xd1\x82\xd9\x12\x31\xe1\xe2\x4f\xdc\xf3\x84\x9f\x2f\xbf\x58\x36\xbf\x20\xb8\x2d\x1b\x73\xe6\xd4\xed\x75\x32\xcb\x40\xd7\x5f\xdf\x4c\x33\xd2\xa0\x8b\x52\xb4\x18\x0b\x03\xc3\x4c\x83\xc4\x10\x01\xbb\xb9\xe6\xbe\xe0\x7a\x75\x7c\xf9\x89\x19\xb3\x9d\xf9\x3f\xbc\x88\x71\xc1\x26\x5c\x29\xdb\xc5\xe9\x46\xbf\xa1\x7b\xc2\x87\xb6\x35\xd2\x3d\x54\xc8\x73\x4f\xe7\x44\x97\x4f\x7b\xa3\x98\xbf\xf2\x98\x76\xc8\x0d\x66\x5b\x9a\x71\xd7\x51\xe3\xc3\xc0\xec\x4c\x8c\x1f\x7e\x72\xe4\x50\x4f\x5e\x9b\xcd\x8c\xe1\x8c\x4c\x51\x5b\x6f\x46\x54\x59\xe8\x11\x27\x30\xe1\x29\x2c\x24\xd8\x0b\x4d\x81\x6d\xf7\x2e\x44\x60\xa5\x77\x33\x93\xa6\x6c\xb9\x02\xe3\xf0\x93\x8d\x6f\xb5\xdf\x4f\xc8\xa6\x1e\x3a\x33\x8f\xb6\xcb\xf6\xa8\x1b\xc2\x81\x76\xf5\xe0\x5e\xd3\x43\x7e\x72\x2a\x7f\x71\x77\xcc\xd6\xdb\x93\x30\xee\x5c\x26\xc6\x07\x7b\xfe\xe5\x26\x20\x4b\x34\x33\xc8\x2c\xc6\x9e\x01\xbe\x95\x87\x71\xf7\xb4\x63\xe0\x8d\x3d\x94\x71\x9b\xe1\xd4\x69\x61\x6e\xb4\x0c\xfc\x16\x7a\x4e\x19\x34\x33\xfd\xda\xfe\xfc\x6b\x57\x17\x89\xa7\xbb\x38\x3d\x78\x36\x2f\x54\x98\xea\xee\xe5\x81\xc5\xaf\x70\x2d\x5b\x36\x8f\x6b\xf6\x7a\x1f\xcb\xaa\x93\x89\xdf\x92\x10\x05\x4f\xf1\xf3\x63\xa5\x0d\xf3\x18\x41\xe9\xfd\xfd\xd4\x03\xbe\x2d\xf1\x80\xeb\x3e\x69\x64\xe2\xde\x7a\x24\x80\xc7\xf3\x18\x08\x9b\x58\x19\x99\x3c\x19\x99\x98\x33\x85\xa5\x09\x97\xe1\xff\x56\xbb\x97\xdb\x15\xa6\x1a\x55\x8e\x68\xe5\xeb\x78\xdc\x17\xc7\x6b\xce\xf1\x18\x9e\xdd\xb9\x13\xdb\x0f\xf3\xbe\xbc\xaa\x3d\x03\xcb\x3b\xad\x8b\x3b\x59\xd8\x00\x9e\xe9\x45\x9e\x75\xe3\xa5\x90\x5b\x44\xb0\xe0\xa9\x2b\x13\x7b\x8b\x27\x68\x4e\x13\xa3\xc4\x49\x9f\x3c\x6c\x48\x72\x7b\x0b\x59\x2b\x8d\x02\xf6\xb8\x2f\x07\xa3\xb0\x0c\x70\xc3\x1c\xe6\x41\x8f\x73\x78\xf2\x81\x98\x78\xe2\xee\xed\xe7\xb6\x6e\x74\xa6\x05\x37\xd8\x7c\x85\xa7\x8d\x8c\xdc\x56\xfa\x57\x8f\xac\xf4\xeb\xe1\x59\x7f\xfb\xbc\x02\x5a\x43\xe8\xf0\xf9\x23\x78\x70\x50\xb0\x78\x28\xd8\xb1\xeb\xbc\xf9\x3d\x86\x15\xcd\xba\xd3\xda\x72\x4b\x31\xe5\x95\x3b\x1c\x4d\x26\xf0\x8c\xe2\xcb\xac\x0c\x63\x8c\x5c\x92\xc1\x8e\xad\x02\x63\x60\x5b\xe9\x32\x10\x71\x98\xd3\x5f\xc1\x28\x58\x08\x07\x51\x16\x9e\x39\xbe\xf0\x16\xc9\x6a\x59\x5e\x75\xa2\x95\xb3\x57\x84\x75\x58\x4b\xee\x15\x33\x63\x4d\x91\x74\x0d\xf6\x94\xbc\x09\x34\x47\x0a\xd7\xea\xab\xf0\x68\x21\xb3\xe8\x15\x20\x59\x88\xb3\x30\x6d\xd5\x9d\x32\xdf\x6e\x3f\x46\x17\x08\xd7\x78\x17\x8e\xe9\x3f\x20\x30\x80\x6b\x37\x98\x75\x97\xad\xaa\x8c\xbf\xf1\x3e\x85\x19\xec\x82\x75\x67\x7c\x9d\x26\x3d\x00\xdf\x93\xa9\x4e\x1a\x66\x04\x0b\x4a\xef\x70\xf3\x4f\xd9\x9d\x91\x0f\x99\x5f\xc8\x73\x86\x6c\xce\xa6\xe8\x2e\xfa\xe9\x22\xb8\x6b\x89\xc3\x1c\x5b\xe0\x71\x35\x8e\x66\x46\x1c\x6f\x3e\xe6\xc9\x84\x7d\x67\x6e\xe2\x3e\x8c\x40\x42\x9d\x26\x66\xc1\x47\xe1\x9d\x77\x48\x4b\xa9\xe3\xc9\x7c\x19\xbe\x5f\x5e\x84\xdf\xb2\xf8\x37\xdf\x10\x7f\xe1\xb0\x4e\xa7\x27\xa6\x8b\xa5\x7a\x3c\x81\x48\x95\x78\x9f\xfc\xb4\x96\x39\x3d\xe8\xf2\xef\x5c\x8e\x3e\xc9\xe5\x75\x93\x12\xe9\x63\x63\x86\x60\xe7\x48\xbf\xea\x9e\x5e\x58\xb2\xe4\x91\xb0\xc2\x13\x47\x1e\x70\xe8\xb3\x99\x44\x9a\x5d\xf7\x66\x48\x19\xec\x04\x2c\xba\x58\xb8\x8b\x5b\xda\x4b\x66\x9a\x38\xa5\x5f\xbc\xbc\x5d\x6e\xac\x4e\xea\xfb\xb0\x9d\x6f\x1a\x68\x5b\x74\x3a\xb0\x99\xd7\xc4\x02\x98\x9d\x26\xd7\x97\x4c\x0f\x64\x89\x59\x15\x1a\x97\x3d\xd4\x96\x75\xe5\x73\x2c\xdd\xbe\xe2\xec\x0b\x11\x9e\x2b\xd3\x3b\xbb\x30\x8f\x85\xde\x70\x36\x1f\x87\xef\x6d\xe9\xcc\x96\x19\xad\xec\xa3\x50\x3a\xde\xee\x8d\x3e\x86\x0e\x9a\x01\x4e\x12\x62\x40\x9b\x70\x30\x5e\xb9\xdb\xe6\xe1\x9e\x9a\x95\xc6\x99\xf5\x00\x52\x21\xd1\xc6\x50\xc7\x31\xb3\x95\x9b\xa6\xe0\x63\x13\x74\xbb\xdd\x14\xae\x5e\x6f\x47\x7b\xf5\x93\x98\x4d\xdb\x0f\x5c\x60\x97\x47\xae\xd3\xb9\xf0\x8e\xb8\x7f\x2c\xf8\x98\x2c\xab\xf5\xb0\xed\x33\x2f\xc4\x14\xfa\xa0\x0f\x1e\xeb\xca\x9f\xb7\x2c\x2f\x77\x11\x8a\xef\x8c\x68\xed\x15\xb1\xbc\x6b\x5a\xe9\xb6\x1b\x7e\xb6\x0a\x06\x38\x72\x5c\xc7\x1b\x5c\x49\xc3\x49\x6c\xaf\x4e\x38\xab\x07\x51\x91\xae\x98\x11\x21\x2e\xb0\x0b\x8f\x85\x9d\x25\x26\x51\x22\xf8\xb6\x35\x26\xa0\xec\xd1\x3f\x4e\x37\x0d\xef\x9d\x07\x8f\xae\x20\x62\x8d\xbe\xec\xb2\x65\xa7\x45\x5d\x82\x93\x96\xac\x40\x8f\x89\xe7\x4b\x0e\xcb\xac\x91\xac\x1b\x20\x37\xfe\x5e\x47\x1f\x75\x8e\xf0\x81\x16\xcd\xbe\x08\x18\xfb\xc9\xc3\xcf\xf5\xc4\x6e\xcc\xa4\xef\xfb\xd7\xa9\xd2\xdd\x33\xfb\xee\xbb\x60\x21\x2b\x43\x47\x07\x28\xe4\xec\xb6\x2a\xd4\x21\x7c\x6f\xc7\x48\x15\x86\xbe\x30\xfe\x5a\x64\x08\x2c\x85\x72\x84\xb5\x3d\x37\xd3\x25\x22\x72\xcb\x95\xbf\x21\x29\xb9\x23\xf4\xdb\x60\xd7\xf1\x79\x2b\xef\x88\xbb\x13\xde\x1b\xdf\xd9\x66\x61\x62\x5d\xdc\x69\x32\xeb\xe0\x99\x92\xff\x41\x9a\x8a\x7f\x71\x50\x57\xf0\x67\xb9\x33\x2c\x54\x36\xb2\x9e\x71\xdb\xcd\x78\xc2\xb3\x73\x21\x2b\x2a\x49\xa7\x2e\x5e\x2c\xf3\xa4\x4f\x59\xdc\xd0\xc1\x64\x9e\x42\x1b\xde\x9f\xdc\x4c\xc5\xad\xc5\x22\x37\xa6\x18\x3b\xce\xf4\xef\x8d\xf0\x5b\x3b\x46\xf7\x89\xae\xd0\xbc\x7c\xa0\x9f\x0d\xc4\x46\xdc\xe5\x21\x16\x17\xc0\x7d\xe7\x88\x53\xf8\x8b\xad\x20\x54\xfe\x73\xf2\x08\x5b\x74\x6f\xbf\x0d\x36\x74\xa7\x35\xb1\x74\x0a\x05\x34\x45\x87\x8c\x34\x88\xa0\xfb\xc1\x0e\x23\xa4\x57\x37\x6b\x44\x07\xd2\x4d\x2d\xcc\x20\x5e\xe1\x29\x06\x83\xaf\xc5\x7e\xe5\x1e\x3f\xe9\x2f\x85\x5f\x63\xbe\xf2\x86\x22\x8f\x9d\x1c\x5e\x80\x89\x31\x9e\x9d\x9a\x9e\x01\xae\xbc\xc4\x7e\x30\x72\xda\xf2\xca\xec\x22\x92\x60\x62\x41\x45\xf0\x3a\x2e\xc8\x2e\x6f\xd3\x25\x9c\xad\xc8\x4e\x86\x7a\x76\xc7\x2d\x70\x75\x41\x4c\x0d\xed\xee\xeb\xca\x83\x4e\xfc\xde\x5f\x60\x53\xf6\x76\xde\xef\x78\x84\xa9\x1d\x83\x7e\x9c\x2a\x7e\x2f\x53\x9c\x7d\x1c\xf1\x68\x3d\xb1\x8a\x89\x7f\xcc\xca\x60\x7e\xaa\x42\x33\xfc\xab\xd9\x1e\x16\xdc\x6c\x67\xcc\x4b\x32\xd8\x2f\x17\xe4\xb5\x0c\x58\xc3\x2d\x07\x48\x14\xa1\xed\xc3\x2e\xf2\xc1\x25\x7a\xea\x3e\xd8\x7b\x45\x0e\xee\x09\x34\xf3\x08\xcb\x2b\x26\xfc\xd6\x27\xfd\x95\x39\x21\x5a\x52\x33\x7e\x93\x5c\xc5\xac\xe3\xbd\x9e\xdd\x54\xfa\x1b\xc1\x4e\xc6\x24\xfb\x44\xfb\xa6\xec\x15\x8b\xdf\xf3\xba\x4d\xb4\xcc\x1e\x5a\xe9\xda\xba\x57\xc6\x51\x18\x5e\x3c\xe3\x1e\x75\x12\x15\x5e\x89\xf3\x58\x4e\x2c\x1b\x7a\xc2\xe6\xf8\x25\x60\xc0\xdb\xde\xf9\xdc\x03\xdb\x77\x0b\x4f\x97\xd7\xd5\x9d\x89\xb3\xb9\xdd\xe5\x2a\x96\x32\x87\x5e\x6a\xcf\x63\x3d\x54\x7f\xf6\xed\xa4\xd7\xee\x11\x6e\xc1\x8d\xaa\xf8\x51\x88\x40\xd9\x65\xc6\x59\x6a\xf5\x08\xa4\x4f\xb2\x89\x40\x1e\x36\x9e\x21\x5f\x42\xee\x39\x22\x00\x9c\x33\x54\x7b\x16\x02\x95\x5d\xc5\x96\xa4\xb5\x4e\xf5\xc1\x34\x66\x57\x8e\x6d\xd6\x91\x3f\x99\xfa\x8d\xcf\xbe\x57\x1d\x95\xb4\xc9\x51\x98\x69\x93\x2f\x88\x2a\x2d\x8e\xe9\x48\x67\x39\x64\x0f\x88\x4b\xd4\x43\xe2\xb5\x17\xd5\x9e\xce\x07\xe3\x20\x7b\x72\x05\xed\x63\x6d\x31\x04\x3d\x7d\x7a\xd3\x19\xaf\x0e\x04\x32\xef\xf0\x10\x3e\x60\xf4\xe5\xa1\x9d\xb0\xab\xc5\xa0\x93\xdb\xea\xa0\x02\xde\x8a\x39\x33\x76\x62\x08\xfc\xbe\x1e\xdc\x58\xbb\xc1\x0f\x0b\x5c\x4f\x65\x42\x78\xb1\x92\x61\xea\x16\x65\x79\x9e\xa3\xda\x9f\xaf\xb2\x07\xd6\x16\xbe\xa4\xff\x33\xd7\x45\x3b\xc0\xa0\x0d\x11\xad\xee\x24\x33\xca\x44\xcf\x48\x57\x62\xbe\xfc\xcd\xeb\x72\x27\xb4\xec\xa5\x06\x3e\xf3\xcc\x87\x76\xa8\x81\xff\xb0\x66\x25\xda\x33\xdd\xa4\xb2\x42\x6c\x81\xec\x11\xf0\x6b\x25\x86\x8c\x33\x6c\x0b\x36\xe2\xbd\xf6\xb0\xf2\x07\x2d\x5c\xd8\xea\x5f\x1c\x50\x8b\x8f\x33\x2b\xaf\x7f\x92\x58\x06\x04\xb8\xb9\x44\xe7\x16\xa7\x89\xb8\x37\x51\x2a\xbe\xb1\xf2\x5c\xa6\x26\x16\x20\x14\x19\xaa\x27\xcd\x1a\x6e\x9b\x3f\x90\xad\x69\x87\xb5\xf3\x7f\x20\x22\x51\xf6\xfe\xdc\x77\x9d\x5f\x45\xf6\xb8\xed\x77\x1e\x66\xf3\x6e\xb6\x01\xda\xf8\x7b\x52\x81\x47\xae\xcc\xce\x70\x65\xdd\x7c\xc5\x29\x1e\xcc\x57\xb3\x90\xff\xd0\x95\x31\x5b\xbb\xb1\xbe\x61\x06\x95\x60\x73\x52\x4e\x4a\x78\x68\xb7\xcb\x45\x4c\x90\xaf\x95\xb6\x99\x86\x51\x7b\x88\x25\x9d\xca\x8b\xd9\x47\x5f\x01\x69\x31\x05\x5d\xf9\x52\xb6\x31\xc5\x7c\x2a\xee\x6a\x6f\xf1\x5f\xe0\x0b\x85\x19\xca\xb0\x9d\x04\x64\x0e\xb3\xd7\xac\xe4\x38\xe6\x2f\x52\x07\x9e\x45\x15\xd0\x87\x80\xa8\x21\x7f\x85\xd0\x0b\x23\xf3\xfd\xf9\x09\x28\x1d\x3f\x69\xb6\x19\x7e\xd2\x16\x9e\x17\xd1\x7e\x0d\xf3\xe0\x4a\x78\xd2\x83\xd5\x9d\xb7\x83\xf9\x66\xfe\xb5\x57\xc7\x2f\x27\x3e\xae\x00\x81\xab\xf1\x88\x9a\xed\x5b\x15\x56\x8b\x86\x51\xf8\x71\xa8\x8e\xfe\x97\xe5\x27\x00\x65\x9e\xcc\xfa\x27\x7d\x56\x6d\x4a\xec\xe6\xa3\xc2\xea\x55\x58\x93\x73\x61\x0e\xec\x88\x77\x43\x5d\x3a\x95\x08\xde\x76\xa9\x85\x8b\x1c\xc2\x3f\x12\x4f\xf8\xcf\x39\xfb\x66\x9a\xa1\xd1\xfd\xdd\x6a\x65\xb1\x8b\x88\x06\x0e\x3a\x1d\xb0\xbf\x23\x03\x89\x4d\x50\xba\x1b\x0c\x12\x6d\x84\xf1\xd2\x99\xdb\xa6\xde\x98\xdd\xd2\x54\xbe\x05\xbf\x46\xca\xf1\xa0\xcf\x1a\xe2\x77\xee\xb4\x11\x9b\x17\x20\x0f\x78\x0c\x5f\x5e\x5a\xdd\xf9\xee\xf8\xbd\xce\x63\xe1\x45\x43\xe5\x82\xa1\x5a\xa1\xb7\x7e\xe9\xf2\xc0\xa4\x63\x3d\x3d\xe8\xe4\xfe\x02\x26\x87\x0d\x3e\x99\x72\x4e\x0d\xf3\xde\x67\x96\xdf\xa6\x14\x15\x59\x1a\xdc\x47\x1d\xfe\x10\xb2\x27\x72\xee\x3c\x0a\xd5\xd2\xf1\xef\xb8\x50\x6f\xa2\x08\xf9\xb7\x2f\x82\x7e\x99\x19\x98\xc5\x0d\xa7\xaf\x98\x86\x05\x5b\xbf\xc8\xa8\x2a\xc3\xea\xf1\xe9\x95\x30\xfe\xba\x0f\xe4\x01\x74\xa8\x10\xe1\x85\xdf\x7f\x6d\x09\x5c\x8f\x39\xd1\x03\x85\xd8\x85\xc7\x81\xd7\xcf\xff\x43\x72\x04\x06\xff\xe9\x1e\x30\x09\x13\x5c\x41\x0f\x4a\x60\x70\x75\x1d\xbb\x25\xea\xa4\xaa\x74\x47\x32\x06\x1d\x89\x98\x49\xaa\xa7\xf9\x72\xeb\x09\xbc\x82\xce\xeb\x9c\x59\x15\x21\x32\x6a\x66\x4e\xe4\x8d\x24\x6c\x38\xc2\x74\xbc\xb7\x2a\xc3\x85\xfe\xd7\xb1\x3b\xe2\x1a\x87\x2a\x75\x66\x14\x56\x7c\xfa\xc9\xbd\xf6\xaa\x27\x39\x3c\x99\xb5\xab\xad\xe8\xa2\xbf\xb4\xc4\x5b\xc5\x6f\xd9\xcc\x22\x55\xbc\x05\x3f\x68\xcc\x48\xbc\x2c\xff\xe3\x96\xcc\xd9\x5e\x66\xe9\x09\x09\x45\x86\x07\x63\x81\x6a\x11\x21\x2e\xfc\x13\x42\x9e\xaa\xd3\x34\x05\xe6\x40\xc3\xfd\x22\x18\xce\xd2\x8c\x67\x53\x74\x22\x27\xd3\xb8\x3a\xb0\x08\xe8\xd6\x3f\xac\xfc\xe3\x94\x5f\x31\xeb\x87\xb3\xf6\x20\x34\xd6\x47\xe0\x0b\x00\x6c\x48\x3c\x5d\xc0\x37\xf1\x14\x69\xe5\xc3\x44\xbe\x22\x0c\x9d\x05\x32\xb7\x4e\x34\x32\xb6\x1e\x3d\xeb\x41\xb6\x41\x74\xc1\x15\xc1\x21\x2f\x1c\x52\x25\xbe\x2a\x68\x5d\x16\xed\x30\xc2\x64\x21\x8e\x16\x60\xc9\x2a\x44\xa8\xbc\x1a\x54\x27\x12\xbe\xc4\x5c\xd8\x12\xcf\xb6\x65\x47\x3b\x2f\x3e\x16\xca\xcd\x16\xf8\x62\x66\xcc\x19\xe4\x03\xfe\x0e\x5a\x8f\x25\x95\x83\x8c\x82\xfe\xed\xae\xeb\x25\xbb\xcb\x60\x03\xf7\x31\x92\xf0\x70\x72\x55\xe5\x1b\xfd\xfd\x38\x09\x9c\xdb\x6b\x92\x15\x13\x95\x17\xb3\x10\x5c\xbb\x33\x2a\xcf\x53\x7e\x60\x15\x5f\xe5\x59\xb0\x78\x83\x2e\x03\xbc\x3a\x49\x3b\xcf\x3a\x25\xbe\x88\x9d\x8c\xc4\x07\x9b\x6e\x51\x58\x55\x4c\x70\xb0\x92\xf7\x20\xf2\xb3\x90\xc9\x40\x0b\x77\x43\x34\x68\x0e\x05\x26\x5e\xd5\xf3\xc9\xd6\x20\x90\x2a\x2c\xf4\x23\xd3\xa3\xc1\xe2\xdf\xeb\x04\xcb\x4b\xc3\xe2\x67\xbf\x2c\x38\x39\xf7\xda\x9f\xbc\x67\xe4\x22\x58\x80\x89\x73\x9a\x11\x46\x7a\x2e\xa3\xa2\x7f\x3f\x29\xa2\x73\x7e\xc3\x9b\x57\x15\x11\xdd\xb9\x1d\xaa\x19\xc4\xfd\x4c\x02\x3a\xcc\xe1\xb3\x20\x86\x0a\x18\xbe\xc9\xc3\x54\xde\x6f\x40\x4b\xf8\x4e\x5d\x25\x3e\xda\x40\x63\x6c\x21\x0f\x5d\x8a\x6d\xec\x22\x67\x18\xb9\x4d\x9e\x57\x46\x07\xf2\x5a\x24\x4d\xbe\x69\x40\x7c\x99\x71\x43\xde\x34\x0a\x4e\x1c\xd0\x2d\xa2\xb3\xf3\x6a\x47\x20\x66\x01\x94\x09\xec\xde\xcf\xee\x87\xc9\xb4\x33\x7f\x80\x08\xd4\x99\xd9\x25\xf6\x74\x16\xec\x48\x87\xc3\x88\x2c\x91\xdc\x61\x0b\x75\x17\x93\xc0\xae\xc2\x43\xd4\x1a\xa0\xa0\xad\x22\xd2\xe3\x52\x9e\x0e\xfd\x99\x7b\x8c\x64\xcf\x8c\x7b\xd4\xd1\x11\x1e\xad\x49\xb7\xc6\x2d\x2c\xbd\x66\xfe\x54\xb9\xe7\x1c\xd4\xbc\x95\xa1\xa2\x83\x36\x11\xd1\x4d\x42\xfe\x19\xba\x00\x06\x64\x90\xa0\x5c\xdd\x0c\x0e\xfe\xe1\xde\x60\x73\xaf\x07\x64\x56\x1f\x7a\x87\xd0\x18\x2d\xda\xba\xf1\x08\xa4\xa5\xd7\x42\x65\x77\x52\xbe\xf5\xf6\xda\xca\x3e\x68\x42\x3d\xe9\xbf\x29\x84\xc7\x27\x0d\xf8\x8c\x44\x52\xe0\xf1\x76\xa7\xe7\xbf\xdc\xa2\x00\x5a\x66\x95\x3e\x68\x8e\xea\x9d\xd8\x93\x87\x7b\xac\x8f\xdb\x21\xca\x58\xe2\x45\x8e\xa1\xd8\x95\x83\x61\x71\x6c\x15\x36\x01\xc4\x16\xd4\xef\x70\x63\x76\x4b\x04\xf9\x48\x03\xd7\x0a\x9c\xb7\x56\x04\xb1\xd8\x8c\x1b\x75\xce\x64\x6c\x59\x0e\xb3\xf1\x37\xd4\xa8\x18\xe4\xbd\x27\x8e\x85\xdb\xb6\x47\x6d\x3e\x9d\x1b\x91\x40\xcf\x05\x04\xfd\x21\x7d\xf3\xef\x64\xbe\x9a\x86\xc0\x83\x9c\xcd\xc2\x83\x6b\x16\x2c\x18\x5e\x7b\xcf\x9b\x02\xa8\xa2\xd5\x27\x36\x82\x1d\x76\xe7\xf9\x44\x66\xca\xe9\xe4\xeb\x20\x4f\xa7\x52\x14\xa3\xc1\xc3\xdc\x38\x5d\xa4\x62\xe1\x32\x31\xb2\xda\xcf\x80\x7e\x7a\x0f\x2d\x89\x3c\x6f\x28\xdc\x9e\x73\x77\x13\xee\x7a\x47\x00\x73\xb7\xa3\xce\x9c\xf7\x41\x96\xeb\x08\x4a\xa7\xb8\x5b\x48\x27\x68\x34\xea\x9f\x5e\xe6\x91\x70\x25\x0f\x80\xdd\x6c\x9e\x13\x54\x57\x69\xf1\xbc\x4c\x71\x63\x14\xa3\xdc\x64\x62\x6a\x55\x36\x65\x93\x5d\x80\x9a\x79\xe9\x68\xe6\xeb\xc4\xe8\x79\x2a\x81\x90\xbe\x5b\x98\xa7\x9b\xe7\xc8\x81\x40\x38\x0b\xe2\x88\x2b\x3a\xc7\xb4\xc5\x4f\xab\x87\x78\x0a\xcb\xd2\xcc\x64\xd0\x56\xe6\xc9\xca\x91\x79\x24\x86\x19\xa2\xeb\xcd\x69\xc3\x81\x49\xf7\x3b\x81\x8a\x97\xc2\x95\x1c\x0c\x58\x62\x64\x3c\xbc\xa7\xd0\x4c\xde\x0a\xcf\xb5\x6f\x0a\xef\x02\xfc\x27\x4a\xb9\xa9\x50\x1c\xb9\x14\xc3\x40\x82\x7e\xfe\xe2\x13\xb6\xdd\xae\xb8\x1d\x21\x40\x5f\x53\x9f\xfe\xe2\x95\xcd\xf0\xcd\x6b\x7a\x2c\x72\xd5\x8b\xdb\x07\x92\x23\x3c\x51\x25\x0d\xca\x92\xd3\xe0\x10\x79\x95\x00\x56\xe6\x15\xe0\x6e\xe0\xdb\xa3\xce\x75\xd4\x28\xf8\xd6\x67\x1c\x60\xf3\xc9\xab\x2a\x87\x00\x45\x6f\xf0\x4c\xbe\x05\x24\xa7\x82\x8c\x88\x75\xae\xef\xa5\x21\x62\x7c\x88\x1b\x6a\xeb\x84\xcb\x65\x72\x27\x85\x32\x76\x60\x08\x50\x25\x50\xa8\x76\xf8\x96\xe1\x3c\x14\x50\xc1\x57\x10\x8e\xd8\x62\x6b\x87\x7c\x57\x9d\xf9\xd9\xea\x19\xf9\x41\xf4\xc0\x31\x24\xb9\x4d\xa2\xc4\xeb\xe6\x40\x1b\x2f\x8f\xaf\xde\x20\xa2\x75\xc7\xd3\x69\x81\x45\x23\x14\xe3\x01\xcd\xc8\x47\x85\xa8\xd8\x8f\x72\xbf\x3c\x3c\x90\xc3\x8f\x7a\x65\x37\xd1\x3b\x9c\x0f\x3e\xbf\x45\x7a\xeb\xb5\x8e\xbb\xee\x6d\x64\xf7\xbb\xc2\xd6\x4e\xd0\x7e\x4f\x0c\xc3\x9f\x97\x18\x26\x00\x6f\x06\xf3\x01\xd4\x5b\xc8\x9c\x3b\x9d\x57\xfb\x39\x33\xb1\x49\x25\x8b\x25\x90\xc3\x81\x39\x3e\x9d\x07\xaf\x01\xd2\x38\x73\xc0\x07\x2b\xd7\x0e\xf5\x90\x13\xef\x96\x43\xbb\x17\x41\x08\x31\x98\xa3\x06\x13\x17\xee\x90\xa5\xae\x53\xf5\x11\x07\x05\xb9\x52\x1e\x6c\xc8\x53\x6c\xf5\x6e\x87\xa9\xd4\xb0\x98\x49\xf6\x03\x48\x3c\x76\xe3\x17\xc7\xda\x32\xbc\xe2\x9f\x84\x47\x7d\xd9\x63\xf3\x6e\xc1\x89\x37\xce\xe9\x9e\xe6\x82\xd5\xc2\x91\xaa\xc1\xbd\xc5\x37\x01\xdf\xec\xb2\x80\xa0\xe5\xd1\xd6\x95\xbb\x68\xdf\x83\x0f\x57\x46\x27\xe6\x95\xd6\x9b\x1f\x93\x10\x3e\xce\xd7\x06\xae\x1e\x33\x89\x78\xb5\xb1\x68\xcc\x5f\xd5\x18\xcd\x2a\xd9\xb7\x4a\x21\x7d\x6e\xab\x8d\x35\x2a\xad\xd3\x4d\x09\x57\x38\x19\x75\xbf\x29\x53\x8a\xd4\xc8\xd6\x4a\xdc\x24\xcb\xc1\x68\x9d\xba\x0a\x2b\xaa\x56\x07\xf3\xd6\xf9\x15\x75\x06\xcb\x34\xcc\xcb\xf2\xd0\x4f\x55\x5c\xbb\x17\xed\xce\x98\x07\xda\x0d\x42\xa4\x8c\xf3\x56\xa5\x8c\x3a\xdf\x7a\x26\xbb\x2d\xd3\xd9\xb2\x33\x3b\x17\xb4\x80\x93\x02\xb3\xad\x15\x67\xca\xec\x30\x43\x44\x0c\x66\x1f\x9c\xb9\x17\x24\xa0\x2a\xaa\x06\xcc\x0d\x43\x1e\xd4\x2f\x08\xab\x04\x85\xdc\x19\x1c\xcc\x2e\x16\x7e\x0a\x3c\x2a\x2d\xf7\xc8\xb2\x9e\x19\x7e\x7c\xd0\x4f\x92\x15\x18\xc9\x79\x11\x8c\xd2\xf8\x08\x7c\xf4\x97\x57\x3f\x1e\x0d\x1f\xef\x58\x92\x63\x6e\x74\x53\xf5\x2b\x36\x91\x4c\x4a\x3b\x85\xac\x72\x6c\x65\x96\xff\x7e\x34\x33\x01\x9a\x24\x0c\x74\xb1\x15\x9a\xaf\x83\x5b\x49\x9d\x6a\x81\xe2\xfd\x34\x5f\xc7\x17\xae\x57\xaa\x10\xd0\x8c\xae\x7b\x0b\xc7\xbd\x54\xb9\x3c\x78\xce\x09\x55\x5a\x74\x4c\x72\x6f\xc2\xc9\xdb\x05\x6b\x34\xfe\x1c\x2d\x80\xff\x15\xbf\xc4\xa1\xdc\x96\x1e\x1c\x39\x5c\xfc\xf5\x9d\x23\xdd\xa5\x7f\x5f\x8b\x96\x18\xe2\x56\x3b\x1c\x34\x08\x20\xf2\x2b\xa2\xb4\x54\x66\xd3\x69\xb0\x3d\xb7\x7d\x1c\x2b\xe2\x1a\xb7\x39\xb3\x38\xe6\x9b\x18\x08\x7f\x93\x18\x0f\x9e\x0f\xdb\xc9\x21\x5c\x9c\x45\xce\xb6\xb3\xca\xbc\xd1\x2e\x21\xec\xa2\x65\x6e\xfb\x78\x0b\xda\x28\xdf\x23\x62\x72\x3f\x8d\x8f\x85\x36\x84\xf1\x1c\xa1\x41\xad\xd8\x3d\xab\x56\xfc\x48\x8f\xf0\x2b\x02\x9e\xdd\xbf\xd1\x6c\x97\x25\xb2\xee\x70\x3e\xe7\xa6\xda\xf1\x73\x21\x7f\x15\x14\x3c\x22\x52\x89\xbc\x2b\xb2\xc5\x89\x0c\x30\x3d\x1f\x59\x8e\x76\x16\x07\xd3\xac\x6d\x68\xd5\x3b\x9c\xd5\xec\x2c\x66\xec\x3b\x92\x33\x9a\x51\x78\xd6\x87\xd3\x6d\xf9\xd2\x76\x4f\x11\xd7\x9e\xdb\x4b\xcc\x0d\x07\x5d\x64\x95\xb6\x38\x97\xa8\x1f\xea\xd5\x18\x57\x10\x60\x12\xa3\x30\x18\xfc\x7a\xbf\x7c\xda\x0e\xab\xfb\x48\xd6\x8a\x5d\x05\x8d\xf1\xa3\xe0\x6d\x73\xdb\x42\xa4\x7b\xfc\xec\x02\x00\xd3\xcb\x42\xb5\x76\xd1\xab\x19\x2a\x1e\xe3\xcc\x12\xa6\xb3\xf8\x79\x64\x56\x6e\xb3\x19\x03\xc4\xd9\x65\x77\xa6\x83\xbc\x75\x12\x65\x09\x2e\xa6\x5d\x71\xd2\xf4\x6e\xc8\x5e\x0e\x2e\xa8\x67\xd0\x2c\x67\x20\x27\x94\x1b\x75\x87\x2d\x74\xf4\x50\x79\xee\x60\x6e\x73\x30\xdb\xb7\x23\xc6\xcb\x6d\x1e\x9c\x39\x98\x08\x7d\x43\x5e\xb5\x44\xb3\xaa\x71\x64\xad\xf0\xd3\x7e\x4e\x5c\xfb\x05\xeb\x8a\xb7\x35\x67\xc7\xda\xa9\x20\x7f\xb7\x1a\x7f\x5a\x69\x54\xb7\xcc\xe2\xf4\x87\x70\xd2\xb3\xd9\x1b\x10\x92\x96\x56\x1f\xdb\x6a\x2f\xf2\x46\x89\x38\xab\x4b\x4a\xa5\x8e\x0f\x57\xd3\x99\x22\x00\x0f\x26\x46\x73\xbb\x5f\xdc\x24\x9f\xb7\x05\x3d\x22\x91\xf4\xce\x20\xcd\x89\x89\x2c\xee\x9f\x0b\xb3\x02\x6d\x8d\xf7\x0a\x71\x31\x97\x70\xde\x66\x01\xe1\x46\x82\x69\x4b\x54\x2d\x3d\x87\x14\x11\x05\x83\xe6\xbc\xc9\xa9\x46\x81\xd8\x41\xc5\x30\x72\x85\x79\x38\x00\xe1\xe0\x4f\xf4\x07\xe1\x4e\xe1\x45\x8e\xd6\xe2\xa2\x96\xad\x3f\x67\x7f\x03\x5d\x53\xf9\x5a\x1e\x05\xff\xa4\x20\x12\xa0\x89\xcb\x19\xd2\x3a\xa1\xbd\x62\x29\xda\xbe\x1b\x13\xbe\xc0\x5a\x0f\xdd\xff\xab\xb3\x44\x46\x70\x22\xf1\x03\xf9\x25\xa8\x78\x3c\x6a\xa2\x53\x88\x21\x3f\xc6\x39\x99\x89\x62\xd9\x6f\x67\x80\x4b\x27\x22\x4f\x3f\x7f\xdc\x62\x7e\x6e\xaa\x26\xd1\xf5\x58\x12\x73\x4e\x64\xf9\x17\x2e\xd0\x2b\x33\xd1\xd0\xa3\xf0\xec\x81\x2e\x97\xc4\x0b\x22\x40\x9a\xf6\xe6\x0d\xf7\xc0\xcc\xef\xe6\xe1\x19\xc5\x23\xfb\x13\xc4\x9a\x88\x21\xdd\xe3\x26\x58\xc8\xf3\x09\x2b\x22\xd7\x9f\x64\x32\x08\xef\x34\xb6\x43\xda\x3a\x2e\xef\x7b\x24\x24\x74\xf0\x3b\xd5\x50\x32\xa0\x9d\x68\x6f\x4c\xfc\xbd\x95\xce\x2c\x66\xcc\x17\xb3\x2e\xc2\xb0\xf8\xca\xbe\x29\xf0\x30\xaf\xa5\x25\x02\x50\x41\x62\xe2\xcd\x4d\xaf\xca\x6e\xb4\x80\xc2\xee\x41\x61\x06\xf7\x4d\x2e\x95\x95\x5c\x36\x95\x45\x5d\x84\x26\xe1\xa6\xde\x0e\x1c\x6b\x95\x0d\xa2\xa2\x35\x12\x66\xdb\xf0\xbb\xc8\x2e\x02\xb0\xf0\x05\xac\x37\x9b\xb5\xab\x9a\xb8\x7b\x4e\xab\x2e\xa2\x06\x48\xbe\x74\xc6\x19\x6f\x02\xed\x6b\x1e\x89\x2e\x8e\xe9\xb1\x45\x3a\x79\x4a\x45\xf7\x93\xbb\xa1\xa8\x53\x20\xde\xb2\xd0\xc2\xdd\xed\x1c\x46\x5a\xa3\xf6\x1e\xc7\x22\xbd\xb5\x4b\x19\xd5\x1e\xf1\x35\x37\x86\xe9\x9b\x1d\x4d\xe2\xe6\x2a\x1f\x32\x82\xc9\x2c\x75\x99\xa7\x7a\x28\x6a\xe8\x82\x48\x08\x5a\x17\x17\xa3\x55\x85\xdc\xea\x91\xf2\x51\xc9\xbf\xda\xc4\x14\x35\x73\x37\x85\x6b\x2b\x44\x3e\x09\xdf\xfa\x15\x72\xdf\x0e\xdb\x64\x51\x5e\x3a\x18\x73\x6c\x39\xff\xe5\x5d\x0b\x68\x9d\x34\x91\xbd\x8f\x0a\xd7\x4a\x0b\x9a\xf5\x16\x29\xf8\x20\x7b\x12\xb1\x21\x2b\x4c\x5c\x71\xc6\xce\xe5\xdf\x02\xc2\x9a\xb9\x32\x8b\x6c\xc0\x76\xc1\x94\x30\x76\xec\x98\x7d\xe7\x08\xbe\x1d\x14\x51\xf9\x31\xd0\xf4\x63\x6b\xc4\x37\x70\x9f\x18\x8c\x6d\x9b\x60\x7b\xe4\x61\xb0\xb2\x28\xc2\x04\x74\x60\x67\x6a\x5e\x55\x91\x51\x37\x86\x6d\x6d\xe6\xae\x32\x64\x71\x58\x9f\x89\x04\xd2\xc5\x62\xe6\x9e\x15\xc7\xcd\xdd\x2b\x3e\x65\xc9\x1e\xfe\xa9\x1d\xa2\x8b\x6a\x32\xa2\xee\x1b\x7b\x03\x58\x62\x73\x16\x24\xdf\xec\x37\xe2\x19\x33\x21\x20\xd6\x3b\x50\xbf\xc9\xaf\x01\x6a\xcf\xc1\x91\x42\xd9\x99\xf4\xf9\x87\xda\xca\x96\xfc\x99\x59\x9b\x7e\x93\x95\xcc\x02\x87\x5d\xc0\x67\x14\x70\xbf\xfd\x56\x25\xca\xf2\xfb\x15\xd7\xcb\x96\x78\x1c\xc3\x33\x6f\xb9\x38\xd9\xaf\x73\x62\xe7\x31\x0e\x0e\x4a\x24\x44\x21\x97\x86\xbc\xa3\x7e\xd0\xd8\xaf\x3c\xff\xf9\xf8\xaa\x0a\xdd\xd4\x44\x24\x72\xc3\x08\xd4\x77\xad\x8a\xa5\x67\x5b\x56\xb4\xee\x4e\x66\x52\x2e\x58\x59\xa7\x9c\xe5\x54\x92\x12\x1e\x8b\x1a\x5d\x9c\x3e\x07\xa4\xca\x2d\x99\x6d\xd4\x48\x2b\x7a\xef\xd0\x55\x9e\x54\xa1\x39\x48\x22\x31\x2b\x17\x3e\x24\xb7\x12\x66\x04\x3c\xbc\x2c\xa8\xf1\xaa\x0a\x5a\x96\x20\x9a\x33\xc3\x55\x5d\x74\xae\x03\x40\x64\x52\x24\xc2\x7c\x0c\x30\x44\x0f\x82\x59\x76\xa6\x93\x90\xae\x3b\x49\x53\x9e\xe1\x92\xe6\xe1\x79\xca\xad\x01\x1b\xa2\x3e\xa8\x48\xc6\xb4\x75\x7f\x06\xab\x83\x16\x4b\x09\xf3\x29\xee\xfd\x78\x8a\x6a\xd1\x8a\x8a\x40\x55\xdc\x6d\xa8\x60\x88\x55\x15\xe1\x0f\xc7\x8d\x9f\xeb\x2e\xf0\xc6\xd8\x63\x51\x75\x05\xc7\x08\xb9\x0d\xac\xac\x2a\x0b\xd8\x8a\x45\xa9\xad\x02\x4e\x7a\xba\x5d\xe7\xb2\xc5\x30\xdb\x29\x96\x2d\x0d\x91\x99\xc5\x53\x4e\xfa\x90\x11\x75\xd4\xb7\xf1\x72\x10\x16\x47\xc4\x36\x56\xb9\xb7\x16\xc3\x79\x6a\x2f\xab\xa1\xb2\xc6\xc3\x39\xc0\x3c\x92\xa9\x2b\xc4\x86\xdd\x7d\xb2\x6d\xaa\xbe\x85\x84\x6d\x16\x98\x25\xa6\x0e\x76\x57\xd9\xa5\x43\xe0\xfb\xf0\x10\x8c\xc0\x4d\x8a\x02\x42\x1c\xfc\x3a\x5c\xf1\xfc\x1c\xc0\xbd\xc5\xc5\x2b\xd9\xb2\x51\xcd\xb5\x53\xd2\x22\xba\x8d\x86\x68\x4e\x0f\x27\xf5\x22\xa6\x33\x0b\x78\xbb\x18\x76\xcc\xe7\x4f\x72\x86\xd8\x33\xf4\x47\x59\x66\xb8\x35\xcf\x46\xf3\xf1\x69\xab\xcc\x28\x8e\x71\x86\x9d\x3e\xf6\xdd\x31\x37\xf5\x92\xb4\x3d\x1a\xc9\x30\xe0\xa8\x32\x51\x47\x77\x0b\xbf\xc1\xdb\x11\x78\xfb\x80\xbc\x04\x58\xcf\x32\xd0\xb3\x0c\x4b\x52\x31\x6d\xff\xef\x64\x4c\x6d\x3e\xe8\x4f\x00\x33\x4b\x03\xf7\x71\x50\x57\x25\x63\x65\x5e\x5b\x65\x0a\x0c\x0b\x5d\x09\x7d\xd4\xcc\x98\xef\x60\xf3\x41\x12\xd1\x19\xc5\xfa\x88\x05\x36\x99\xb3\x74\x20\x9c\xe1\x13\x5a\x18\x7c\x72\x77\x19\xf3\x89\x3f\x6f\xe6\xeb\xe4\x69\xb3\x44\xc3\x0c\x73\xa0\xfb\x04\x20\xe1\x8e\xec\xea\xf1\x3b\x26\xd2\x88\xec\xc1\xed\x21\x60\x7c\x1b\xbf\xd6\x0e\x9d\xcc\x73\x77\x06\x35\x8d\x98\x5d\x9a\xa2\xbe\x8d\x05\x58\x68\x14\x17\x05\xa8\x8d\x94\xc0\x4a\x38\x8b\x40\x03\x8b\x30\xf6\x11\xc8\x85\x98\x04\x63\xd9\x40\x88\x55\x78\x43\xd6\x2e\x6e\x29\x02\x44\xb1\x96\xf6\xa2\xea\xc0\xbc\x88\xf1\xd5\xe1\x5c\x9c\x9e\x69\xb5\x87\x7d\x9c\xcd\x17\x1e\x0d\x6b\x33\xdb\xa2\xf0\x70\x8b\xa2\xb4\x53\xa4\xe0\x9c\xf3\xa0\x84\xdb\x63\x1f\xe1\x63\x9d\x02\x34\xb4\x83\x82\x6a\x44\x5c\x56\x6c\x53\x44\xa5\xda\x3b\x6c\x79\x02\x6e\x60\xf1\x64\xb9\x7b\x27\x68\xe2\xc5\xc0\x2b\xe7\x07\x62\x9a\x7c\x57\xeb\x97\x39\x81\x9e\xe2\x12\x8c\x6d\x5b\xe1\x8f\x1c\xaf\xe2\x7d\x1d\xc3\x79\x5d\xf2\x70\xad\x6d\x13\x23\x51\xc1\xc7\xea\x48\xa9\xea\xca\xa7\xf2\xce\xc3\x3f\xba\xbf\xe7\xc5\x23\x04\xe7\x82\xe1\xf7\xcc\xb2\xaa\x53\xcc\x26\x85\x10\x62\x31\xd2\x04\xe1\xf7\x9d\x1b\x2b\x7f\x3d\xab\x5c\x5c\x66\x1d\xb7\xb8\xd7\xe0\xf5\x71\x89\xde\xb0\xa2\xde\x0e\xc1\xb6\xfe\x7b\x14\xcd\x7a\xcb\x2a\xfa\x8b\xcd\xea\x06\x0c\x39\x2b\xe8\x8c\x5c\x56\x1c\xd8\x55\x98\xe7\x5e\x59\xba\xf1\x16\x6a\x6f\xef\x21\x43\x52\x6d\x64\x6b\x11\xa5\x91\x17\x6a\x8a\xa2\xe7\xc3\x92\x04\x11\x09\xc9\x11\x97\x05\xc7\x79\x37\x3f\xcd\xef\xaf\xe7\x41\xa2\xc4\xac\xf6\x48\x96\x34\x57\x11\x96\xe7\xf4\x89\xf8\x2b\x3d\x15\x93\x3e\x1e\x0d\xc5\xa0\xa5\xcb\xcb\x4f\x0d\x6c\x17\x3e\x03\xed\xe2\xc1\x04\x8d\x04\x5b\x25\x30\x4b\xdc\x32\x4b\xff\x8f\x45\x67\x6c\x6b\x11\x02\xf0\xe9\x41\xff\x95\x05\x86\xb2\x10\x72\xfc\x7c\xf3\x36\x36\xda\x60\x0b\xd2\x76\x6e\xde\x29\xa3\x2a\x32\x86\x97\x63\x2c\x1f\xe2\xdb\xdf\x27\x46\x80\xad\x9b\xbd\x3b\x8f\xa3\x8a\xca\xad\x7a\x49\xf9\x3d\x18\x1a\x1e\x2d\x8d\xa0\x33\xb0\xdf\x50\xb8\x70\x4c\xc4\x18\xa0\x7d\x5a\x82\x32\xa9\xe0\x90\xa2\x5d\x9b\xed\xa6\x7e\x4e\xdb\x06\xd7\xeb\x36\xde\xc5\xf8\x2d\x2a\x96\xdc\x65\x2f\x10\xef\xd1\x00\x2d\x00\x12\x82\xa6\x85\xfc\x07\xbf\x57\x38\xe6\x9d\xdd\xc8\x4e\xda\x73\xd3\xa7\x80\x7b\x9a\xc8\xfa\x71\x3c\x76\x4d\x37\x61\x1e\x0c\x47\x1d\xec\x2a\x77\xbe\x57\xdc\x15\x0f\xc0\x0e\x06\x91\x6c\x1f\x42\x7e\x08\xc4\xf7\xf1\xc1\xc5\xdd\xce\xcc\xce\x70\x26\x20\x5d\x55\x2c\xe3\xe7\x3c\xce\x97\xf3\x48\x93\x12\x5f\x98\x85\x46\x4a\x46\xdc\x87\xd5\x47\xf5\x16\x5f\x94\x8d\x0f\x09\x5e\x1a\xb1\xa5\x25\xf3\xf0\xdd\xc5\x21\xf1\x36\xe5\x1d\xab\x0a\x6b\x16\xd1\xd2\x61\xd9\x83\x1a\xd0\xbd\x70\xca\xe7\xde\x17\x66\xe4\x33\xb8\xd7\xf7\xb2\x51\x3d\x00\xd8\x24\xb9\xa1\xce\xcb\x98\x1b\x11\xfb\x95\x98\x63\xa9\x5a\xe0\x3c\x47\x50\x83\xf9\x61\x73\x1c\x0e\xb2\x5f\x70\xf6\x40\x3c\xe8\x1b\xc0\x76\xd8\x5a\xe0\x36\xbd\x83\x9a\xd4\x3b\x58\xa2\x3e\x84\xe8\xc3\x66\x88\x7d\xbb\x50\x50\xb9\xe2\xa9\x10\x0e\x6b\x2c\x46\x24\x3a\x57\x40\x42\x8c\x8d\xdc\x03\x17\x93\xa0\xe7\xdc\x48\x5e\x4d\xb3\x85\xf4\x8a\x73\xde\xde\x61\x4f\x6a\xd3\x7e\x61\xe5\x01\x00\xb0\xb2\x0b\xb7\xa3\x56\x26\x10\x61\x5a\x3e\x45\xd1\x95\x55\xf7\x6d\x4a\xde\xb0\xa6\x74\x75\xa6\x2d\x0b\xfa\x88\x97\xde\xf0\xba\xcf\xc2\xe6\x9d\x22\x1b\x15\x4d\xfa\xaa\xc7\xf8\x92\x07\xfe\x56\x57\xe5\xde\x20\xc3\x36\x07\xe4\xbc\xc2\x20\xd5\x17\x9e\x6f\x47\x9d\xc0\x1f\xf8\x6d\x3d\xe6\x5d\x36\xe7\xf1\x11\xc3\x09\x1d\xdf\x47\x03\xa1\x14\x8c\xf1\x7a\x12\x26\xc4\x39\x11\x8d\xc3\x02\x72\xc5\x0b\x8b\x7e\xd1\x5e\xd1\x78\x81\xf6\x28\x96\xc7\xd4\xc1\xca\x56\x42\x56\x73\x82\x37\x90\x17\xa5\x99\x13\xfb\x03\x50\xf5\xc3\x07\x9b\xd5\x51\xca\x03\xde\x49\xd0\x34\x67\xed\x05\xf0\xb2\x41\x74\x9b\x9e\xed\x34\x7b\xb3\x66\xf6\x55\xdb\x68\xae\xac\x59\xb5\xd8\xf1\xb3\x88\x89\x1a\xc7\xcd\x8e\x95\x1f\x9b\x86\x14\x74\xe9\x90\xa5\xf1\x07\xb3\xe5\x12\x8c\x67\x4b\xcb\xe0\xc6\x16\x6e\x8f\x34\x3b\x1d\xb4\xb6\x95\x3d\x21\xfe\x3b\x49\xf7\x50\x0d\x1e\x01\xcc\x39\xb3\x3c\x62\xdf\x04\xbb\x68\xa1\xd8\xc8\x2b\x1e\x2e\xd7\xfb\x54\x75\x32\x1c\x68\x46\x76\xe8\x40\xdb\x9e\x08\x96\x26\x51\xe2\xbc\x6b\x00\xb0\x44\x65\x43\xbf\xd9\x22\x82\x4f\x53\x40\xeb\xde\x67\xcc\x20\x05\x05\x7d\xf8\xd8\x73\xb8\x72\x75\x61\xbe\xa0\x2d\xd1\xc3\xee\x4c\xe8\x41\xca\x8a\xd4\x8e\x9c\x42\xd2\x3a\x83\x2d\x6b\x1f\xcc\x6a\xd3\x44\x26\x98\xa7\x00\xf6\x29\x64\x09\xda\xe3\xb3\x9c\xeb\xd5\xa6\xab\x79\x85\xec\x90\x65\x78\x53\xbe\xc4\xcf\x80\x32\x51\xca\xb6\x74\x8d\xe3\xd8\xa0\x9e\xb7\x87\x16\x0b\x8d\x93\xcb\xab\xfc\xa1\x95\x0f\xf7\x6b\x21\x3d\xfe\xc1\xf1\xa6\xa9\x65\x1c\x41\x77\x62\x4e\xc6\x02\x70\x60\xfa\x02\x2b\xdf\xea\x38\x29\x4a\x19\xd1\x0a\x48\x62\x40\x3f\x7d\x7f\x93\x3a\xb0\xfe\x3b\xa3\x88\xcc\x7a\xc1\xac\x7e\x27\xd5\x16\xc5\x52\x8c\xc7\xca\xd1\xfe\x73\x8a\x8a\xe2\xaf\xaf\x37\x73\xc2\x54\x00\xc8\xc3\x43\xe7\x10\x4c\x87\x68\xa0\x24\x57\x12\x68\x4c\xf6\xa6\x7e\x98\x81\x73\x8b\x46\x88\x20\xba\xd2\x9b\x22\x10\xe3\xee\xa3\xa2\x83\xe6\x86\x20\x7b\x90\x4d\x09\xaa\x11\x0b\x49\xe7\xb4\xb4\x52\x4b\xa7\x06\xc1\xe1\x02\x23\x9c\x97\x25\x3c\xc0\x8d\xcd\x5f\x72\x27\xcc\x02\x7b\x32\x82\x93\xb5\x46\xc1\x7a\xe1\x0b\x3e\x05\xc1\x1d\x42\x0e\x9f\x4b\x0e\x44\x41\xa2\x39\x9b\x36\xad\xf3\x0c\x1b\x9b\x15\xc4\x3f\x5b\x93\x17\xd9\x78\x43\x56\x2b\xd4\xb9\x04\x9d\xc4\xc5\x3f\xf0\x73\x3f\x35\x6a\x22\x0a\x12\xf1\xbc\x22\xd1\x6d\x8c\x46\x5f\xe1\xd8\x5d\x38\x20\x3a\xb2\xa8\x9b\x83\xff\xce\x84\xf8\x21\x78\xde\x42\xa7\x5b\x04\x3c\x3c\xdf\x71\x64\x40\x8c\xf3\x5f\xea\xb9\x53\xc8\x49\xfc\x0a\xa7\xb6\x5f\xcf\xb6\x2b\x88\x1a\xd0\xd1\xdb\xdd\xe5\x81\x7c\x8a\xaf\x7e\x54\xb2\x2d\x2d\x4a\x62\x89\x77\x53\xdc\x65\x9e\x07\x2f\x6d\x29\x9a\x5e\xab\x35\x9e\x98\x7b\x3d\x4a\x6b\x77\x8a\x04\x98\xcc\x3a\x44\xa1\x70\x6a\x21\xd8\x50\xff\x32\xb4\x51\xc6\xac\x27\x75\x00\x82\x66\x0c\x3d\x82\x84\x3b\xec\x93\x36\x5e\xc8\x88\x93\xc8\x9d\xb9\xa1\xe1\x5d\x70\xce\xf6\xd2\x51\x9b\x5a\xa8\xd6\xb7\xae\x8d\x97\xa5\x43\x65\x13\x1f\xb5\x45\x47\xce\x82\x52\xf7\x21\x71\x99\x57\x62\x81\x56\x1f\x6e\x16\x57\xce\x2b\x51\xae\x43\x95\x0e\x2c\x5f\x11\x74\xa0\x05\xf3\x03\x7e\xa3\xbf\x0e\x25\x32\xe1\xf9\x43\xa8\x21\x2d\x17\xba\xd8\x49\xd7\x81\x67\x3a\x48\x04\x0d\xaa\x54\x33\x26\x2a\xae\x61\xa9\x1e\x1b\xfb\x14\x9c\x49\x1c\xac\x37\x12\x2b\x0f\x09\x35\x74\x96\x6c\xdc\x5a\xa3\x23\x8f\xa6\xad\xd8\xe7\x4d\x70\x89\xdd\x63\xea\x5b\x40\xb6\x86\xd2\xfd\x29\x2c\xe2\x55\x36\x46\xf7\xa3\x84\x06\x2c\x54\xb6\xd5\x10\xa4\xec\x80\x69\xec\xb3\x73\x61\xb0\x68\x1b\xa9\x3f\xa2\x8d\xf2\xf6\x9a\x57\x9a\xeb\x8d\x34\x40\x54\xfe\x0a\x3f\x7e\x8a\x7b\xf0\x56\xc7\x6a\xee\xc7\xa5\x15\x08\x98\x1c\x71\x69\x26\xc9\xbe\x2c\xbf\xb1\x8d\x3c\xee\xf0\xd8\xd1\xf2\x56\x24\x97\x02\x96\x86\x02\x88\x7d\x54\xd7\x2b\xca\x86\x03\x6f\x6d\x94\xa0\xdd\x4d\x73\x91\x08\xb0\xa0\x19\xfe\xca\xfa\xed\xcf\x0f\xd0\x0f\x0b\x39\xb0\x6b\x1e\x2d\xca\xbc\x55\xf5\x29\x6d\xdc\x95\x8e\xa4\x4f\x75\x0a\x39\x1d\xdd\xdb\xe0\x14\x9b\xf7\x16\x73\xca\x54\xba\x49\xe6\x3f\xae\xa4\x82\x2e\x76\x02\x74\x26\xee\x28\xa6\xca\xc0\x39\x4f\x27\xb4\x6e\x2c\x6a\xb2\x45\x61\x60\x38\x15\xe1\x16\x43\x11\x99\xb4\x6b\xb4\x2d\x08\x4c\xa9\x9e\x1d\xd1\xd0\x77\x7f\xff\x17\x8a\xa2\xd3\x05\xe9\x31\x1e\x54\xdc\xbb\xc2\x1e\xe6\xad\xfe\x7e\x64\x95\x54\x6a\xb1\xdc\xb8\x35\x90\x4f\x61\x51\xdf\x4c\xc4\x6b\xb7\xb7\x72\xc7\x46\x2e\xfd\xe1\x95\x09\x55\x39\xc8\x57\x62\x58\xe5\x94\x03\x7d\x4c\xbe\x0b\x44\x85\x4c\xc5\x9f\x88\x57\x1b\x62\xbc\x65\x31\x1a\x9c\xfb\x13\x4a\x64\xe2\x80\x14\x42\xd3\x54\x7d\x68\xd0\x61\xb0\x1a\x8b\xf8\xde\xe7\x12\x3c\x25\x05\x0c\x68\x40\x65\xf6\xb5\xd1\xee\xce\x6a\xe4\xbc\x23\xc0\xbc\x85\xe1\xc4\x07\xa0\x94\xa5\xde\x5e\xaf\xed\x32\xaf\x18\x53\x84\x74\x0c\x7b\xe2\xd6\x45\x51\x48\x00\x4a\xe3\x20\xab\x8d\x78\x52\xbd\xae\x17\xd6\x14\xdd\xe9\x8e\x6c\x36\x53\x89\xab\xce\x78\x1a\x66\x9f\xc4\x8b\xab\xab\xd3\xad\x85\x3b\x6b\x55\x11\x3b\x69\x82\xa4\x1e\xb4\x3f\xc1\x66\x82\x85\xd5\xfb\x85\x87\xf6\xdc\x59\x26\xa4\x57\x73\x0a\xd8\x05\x30\x36\x08\x46\xf0\x71\x30\xe9\x1f\x5a\x2e\xb6\xe8\xc9\x42\x4b\x87\x6a\x39\x45\xd8\x13\x78\x0b\xbd\xaf\x3f\x2d\xfd\x53\xe9\x03\x0f\x4c\xbc\x73\x0b\x6d\xbf\x57\xa9\x71\xee\x47\x02\xff\xcc\xc2\x1b\xac\xe6\x14\x5f\x2c\xb9\x6a\xd4\xdb\x45\xeb\x74\xe7\x72\xa8\xe2\x3e\xd5\x0f\xfe\x74\xa5\x03\x25\xcb\xde\x06\x45\x6b\x31\xf1\xf2\xb3\xff\x74\x62\x17\xd0\x5f\x87\x33\xd3\xd4\xd7\xb1\x32\xb3\x60\x0c\x1e\x5d\xb0\x08\x68\xf7\x8f\x6d\x46\x3a\xb6\x12\x44\x3a\x7e\x58\x11\x7c\x83\xb0\xc9\x65\xba\x06\x3c\x60\x11\x45\x4f\x64\x4b\x65\x5e\x04\x12\x24\x8b\x64\x31\x3a\xfe\xf9\x2f\x9f\x30\x20\xa7\x9b\x3a\x0f\xb4\xf1\x27\x65\x34\xa3\x6e\x61\x31\x38\x44\x9b\x16\xcb\x0d\x05\xe8\xcb\xc1\xf5\x66\xae\x5b\xbc\x2f\x50\x87\x31\x63\x99\xc2\x13\x1d\x65\xd7\xb3\xb0\x9b\xff\x52\x1a\x90\x92\x84\xd7\xcb\x3c\x47\xbb\x85\x84\x59\x63\x42\x9d\xb6\x59\xe4\x34\x79\xfd\x38\x73\x95\xc4\xab\x40\x23\x02\x76\xcd\x23\x62\x4a\x16\xfc\x77\x96\x55\x81\x5d\x97\x88\x62\xb9\x34\x13\x3b\xce\xd1\x1f\xa7\x68\x85\x6c\x48\xdd\x87\x70\xec\x7e\xc8\x61\x9a\xc4\x2e\xa3\x15\x9d\x52\x27\x35\x01\x4f\x2f\xb6\x12\x38\x9f\x7a\xa9\xb6\x59\xc5\x2d\x36\xa8\xd3\x61\xf7\x59\xf6\x7c\x38\x17\x88\xe3\x7a\xbf\x0b\xe9\xfa\x7e\x9c\xaa\xea\x57\x0e\xbb\x34\x2d\x2e\x80\x76\x38\xcb\x7f\xa9\x02\xf2\x6c\xd9\xdf\x0f\x7f\xd7\x28\xb7\x9a\x3a\x4a\x23\x9d\x8b\x74\x6c\xaf\x1a\xbb\x36\xc7\x2e\x1d\xda\xe6\xc9\x93\xdf\x69\x9f\x10\xdb\x04\xe6\xa8\x56\x96\xa8\xf8\x6f\xaf\xa8\x32\x7b\xc0\xde\x5f\xdc\xcf\x49\xac\x10\xa4\xb0\x8a\x64\x5a\xe8\xa2\x7e\x63\xa6\xa2\x57\x2b\xdc\x02\x25\xb9\x47\x5c\x2a\xb6\xfa\xa4\xd3\x96\x68\x0b\xb6\xb0\x7d\x40\x99\xa6\x56\xb9\x78\x6b\xf9\xf3\xa6\x22\xbe\x87\x16\x1b\xa3\x2c\xea\xaf\x28\x8b\xa0\x2b\x74\x3e\x0f\xe1\xfc\x8e\x54\x9c\x1a\x5c\x79\xef\x22\x26\x2f\x0d\x68\xf2\xbb\x21\xe9\xe8\xf2\x62\xbd\x9d\x06\xc0\x73\x25\x9a\x33\xa7\x84\xe1\x7d\x2b\xbf\x15\xc7\x1d\xb6\xe9\x3a\xda\x1e\x9f\x0b\x61\x9c\xbd\x66\x4a\xbb\x6c\xe1\xd1\x12\x11\x2c\xea\x70\x24\xbb\xb3\xa1\xd9\x09\xc5\x2a\x80\x86\x14\x4e\xf2\xf8\xb5\x06\xe4\x36\xe8\x2a\x21\xb2\x91\xd5\x0e\xf5\x2f\xfd\x50\x54\x1f\x3c\x72\x1f\xc4\x68\xa4\xb0\xef\x18\x19\x92\xb7\x0b\xf3\x9c\xda\x6d\xd8\x5a\x74\x52\xd7\x20\xc9\x7a\xb7\x53\x73\x19\x8b\x5a\x36\xc0\x4f\xa2\x33\xde\x5e\x82\xbf\xf6\x74\x46\x52\x3f\x97\xc0\xa1\x42\xe8\x07\x04\x06\xf8\x09\xb6\xe2\x82\x6a\x26\x13\x3c\x31\xce\x93\xb6\x8f\x52\xa0\x53\xec\x51\xa7\x97\x20\x7e\x26\x97\x4e\xf5\x8d\x65\xbf\x2b\xbf\x03\x91\x5c\xe9\xfc\xc2\x1e\x27\x18\x11\xa9\xd6\x84\xe2\x1a\xdd\xa6\xed\x2a\x43\x3e\x54\x8e\xbc\x43\xf7\x08\xb3\xbe\x05\xcc\x76\x46\x57\x2f\xdc\x49\x57\xd0\x32\x4c\x13\xbd\x5a\x42\x8d\x02\x81\xca\x17\x1e\xfa\xe2\x98\x14\xe1\x87\x4e\x8e\xd9\x5a\x5b\x40\x24\x58\x53\x6e\x6a\x83\x11\xe0\x38\x8d\x41\x98\x41\xbf\xa9\x13\x23\x88\x72\xf0\xdf\xfc\x34\xca\x35\x8e\x39\x6a\x32\x51\x53\x7f\xa8\x4f\xd6\x3e\x98\xc4\xac\xb4\x3f\xc3\x72\x04\x36\xc5\xa8\xa7\xaa\x3b\xbe\x48\xea\xcd\xcc\x89\x50\x18\x34\x96\xe2\x01\xfd\x32\x6c\x4c\x66\x13\x5b\x43\xf0\x06\xe9\x62\xe7\x7b\xcf\x17\x93\xf9\x9f\xd3\xad\xe1\xc5\x8d\xfa\x22\x51\x3d\x0a\xd9\xc9\xcc\xd9\xc0\x86\xe1\x66\xb5\x1e\x2e\x8b\xf9\xa8\xd9\x1b\xb8\xf1\x76\x52\x56\x54\x8a\x52\x7f\x68\x36\x24\xf9\xe7\x4c\xe2\x99\xc6\x01\x20\xe2\x29\xd9\xe0\x6d\x3e\x71\x96\x59\x1e\xc4\x5a\x2f\x49\xd2\x71\xbd\x52\xfd\x59\x28\x49\x5a\xaa\x84\xb3\xd3\x4d\xd8\x55\xf0\x6b\x8e\xd0\xeb\xb0\x3b\x5e\x42\xbe\x35\xd4\x5e\xd0\xb2\x87\xeb\x0e\xf9\x86\x9d\x09\x02\xef\xba\x88\x8b\x01\x19\x95\xb0\x4a\xad\xce\x2a\x92\x88\x5a\x23\x64\x3e\xb6\x1c\x5a\xb2\xfb\xc8\xf8\xd3\xcb\x96\x6c\xcc\x35\xc7\xaf\x4f\x40\x17\x44\x95\x24\x04\xb7\x07\xd8\xb6\x64\x5c\x9c\x38\x7b\x0b\xcd\x52\x46\x90\x5b\x3c\x88\xa7\x92\x8b\x40\x89\xb3\x45\x5e\xde\x5c\xb2\xc8\x28\x0e\x59\xa0\x96\xed\xaf\xb4\xcb\x8e\x10\x36\x89\xf6\x30\x77\xda\xa4\xc2\x81\xc9\xe9\x2c\x0f\xcf\xe2\x98\x97\xa0\xd6\x4e\xa0\xd0\x7e\x3f\x9c\x3c\x9a\x90\x59\x52\x20\x0a\xc9\xad\x20\xb3\x4a\x3f\x76\x2e\x21\x4a\x7b\x48\x7b\x95\xe5\x7b\x5e\x84\xd5\xd2\x7e\x75\xec\x47\x66\xe9\xf9\x3f\x3e\x4b\x64\x81\x1b\x91\x45\x72\x03\x4e\x95\xbc\xa6\x41\x1c\x02\x67\xdb\xea\x2e\x29\xab\x5d\xcb\xb7\x9a\x18\xe3\xbf\x70\x16\x2e\x74\xde\x7a\x84\x0f\xd2\x68\x67\xa8\xdb\x82\xcd\xea\x4f\xf9\x02\x1d\xa1\x88\x3a\x77\xa8\x5d\x7e\x37\x57\x42\x0e\xcd\x54\xbb\x8e\x30\xc8\xc8\x4b\xf3\x87\xc2\xdd\x0f\x56\x98\xf3\xaf\xbb\x21\xb2\x6c\xa0\xbc\x69\xee\x66\xa6\xf4\x2c\x09\x45\x92\x9e\xba\x27\xb5\x24\x75\x11\xc8\x74\xd4\x26\x17\x67\xc9\xc3\x81\xee\xdc\x70\x5a\x91\x3c\x31\x88\xff\x43\x35\xfe\xee\x8c\x3c\x41\x41\x1b\xf8\x7e\x83\x58\x98\x51\x63\x82\x85\x62\xa2\x30\x47\x0f\x05\xf5\x66\xb2\x8a\xee\xc8\x89\xa2\x94\x39\x52\x8e\xc5\x99\x5f\x2c\x27\x29\xe7\xf3\xb8\x8b\x26\xf0\xa7\x9e\x6a\xfd\xbe\x13\x0f\x90\x6f\xa7\x4f\x3a\xd7\x55\x75\xef\x2d\x51\x49\x6e\x5a\x89\xef\xa3\x56\xc8\x2d\x37\x2b\x3e\x07\x33\xd9\xfb\x97\xa2\xbd\x21\x93\x6d\x76\x15\xb4\xcd\x46\x4e\x2d\x3c\x69\x7b\xb6\xb2\x2d\x48\x35\x7b\x5a\x07\x95\xb1\xb0\x40\x2f\xf6\xcc\xd1\x85\x25\x42\x5f\xc7\xd7\x61\xc2\x43\xd3\xed\xf7\xe1\x1e\xb7\x38\xbd\x66\x1f\xcf\x49\x46\x40\x57\xcc\xfe\x08\x91\xbb\xff\xce\x44\x8f\x75\x09\xb4\xac\x28\xad\xc2\x32\x56\xdd\x0d\x08\x4f\x04\xe1\x21\x0d\x20\xee\x6f\x0c\x15\xf6\x88\xc3\xc1\xea\x21\x1c\x87\xc0\x1a\xfc\x18\x6e\x70\x34\x5f\x28\xf9\x32\x6b\x5b\x61\x1a\x48\x27\x9c\x12\x63\x28\x08\xa2\x30\xf5\x60\xd9\x54\x04\x65\x5e\x11\x6c\x62\x12\x6e\xee\x5c\x9f\x3f\xa8\x89\x91\x39\x19\x9f\x2f\xc5\xb4\x49\x94\x29\xc7\xd1\xe9\x7b\xbd\xf6\x4c\xa8\x07\xc8\x66\x14\x7e\xd5\x08\x06\x5f\x86\x15\x76\xb8\x4e\xa5\x59\x51\x2f\xf0\x53\x9f\x55\x67\xc5\x42\xd6\x4e\xca\x1b\x7b\xd0\xc4\x42\x6f\x8d\xd5\xcd\xd8\xd2\x84\x9a\xdb\x04\xd3\x61\x61\x4d\xb4\x50\xf4\xe7\x26\x9c\xbb\xa3\xb1\xc9\x51\x9e\x38\xd8\x5a\xcd\x0a\xb2\x62\x49\x69\x32\xbb\xa0\x16\x5d\x38\xfa\x2d\x43\x28\xdc\xfc\x57\x94\x18\x7b\xc1\x05\x5d\xc6\xc5\x8e\xb2\xd4\xc9\x03\xdd\xd5\x5f\x85\x33\x13\x53\x42\x33\x96\x83\x50\x71\x66\x2b\xf8\x26\x25\x87\x9e\x91\x0e\x44\x0e\xed\x41\x24\x6d\x6f\x31\x49\x89\xfe\x94\x60\xff\x87\x18\xf5\x43\xb9\xff\xbb\xac\x82\x87\x1f\x59\x4d\x8e\x96\x97\x32\xed\x3b\xb2\xc2\x84\xc5\x1c\x3c\xdb\xaf\x88\x7a\x4f\x82\x9a\x54\x1d\x85\xd1\x34\x4b\x45\x3b\x00\xdf\x41\xe3\xad\x35\x24\x85\xb4\xb0\x81\xae\xb0\x47\x4c\xa8\xba\x28\xef\x2e\xcc\xa4\xe3\xee\x5a\xf9\xcc\x82\x9c\x26\x2e\x72\xb7\xc7\x24\x96\x7c\xb1\x41\xa6\xc7\x7e\x20\xe3\x22\xc2\x70\xf7\xd6\x84\xf0\xcb\x66\x60\xc9\x0e\x3b\xde\xa2\x16\xda\x77\x37\x7e\xae\xfd\x45\x2a\xd3\xa8\xf9\x88\x01\x8c\xc2\x98\xe2\x4f\x8a\x96\xf2\xac\x45\xfb\xe9\x22\x55\xbd\x47\x48\x37\xee\x35\x91\xe5\xf1\xb8\x3a\x44\x1a\xcf\x18\xa6\x20\xbd\x49\x83\x5c\x3c\x5e\x64\xaa\x5a\xd9\xcf\xbe\xd3\xe5\xad\x22\x5b\xb1\xc3\xef\x2e\x3a\x31\x53\xc5\x9b\xc4\x86\x64\x55\x14\x4b\x7b\xc9\x2e\x88\x70\x59\x2b\x33\xa6\xab\xde\x89\x72\x9a\xae\xeb\x98\x24\x25\x3b\x4d\x6f\x69\x30\x4f\x77\x7e\x45\x1e\x1a\x33\xaf\x5d\x3d\x98\xae\xe5\x7c\xb2\xf9\x7a\x40\x20\xf7\x08\x7e\xa4\x3a\xf0\xf0\x24\x70\xfa\x6c\xb0\xde\xb9\xd4\xfd\x53\x8a\x08\xfb\x67\x5e\x0f\x62\xff\x22\x57\xa0\x0d\x4d\x8d\x2d\x54\xbf\x1b\xe2\xe3\x28\x35\x51\x1e\xc9\xb5\xe1\x79\x18\xa0\x6f\xc1\x43\xc3\xce\x49\xdc\x5c\x0f\xc5\xab\xae\xda\x07\x0f\xff\xc6\x22\xda\x71\x3f\xe1\xca\x24\x76\x39\x65\x2a\x7c\xdb\x41\x08\xe8\x52\x1d\x23\x20\x0b\xb7\x45\x83\x3c\xb8\x58\x2a\x95\x92\xa4\x10\x72\x69\x35\x23\x11\x77\xfd\x32\x88\x73\x36\x4a\xe6\xfb\xff\xe4\x7f\xcf\x2e\x26\x26\x31\x1a\xdb\x6c\xe6\x30\xe4\xf8\x8f\x04\x69\xdc\x10\x2b\xef\xc5\x92\x22\x2e\xbb\xb3\x61\x95\x3d\xd8\x55\x6d\x28\xab\x52\x8c\x4b\x50\xd1\xf2\xc1\x18\x6c\xb1\xb9\xcf\xf3\x0f\x87\x4a\x3a\xcf\xa5\x27\x1b\xd7\xc5\xc1\xb5\x5b\x8e\x88\xeb\x46\xcd\x88\x80\x7b\x26\xb3\x90\x81\x11\x96\xaa\xf2\x8d\x9c\xb6\x3c\x91\x52\xea\x81\x1e\x0b\x2c\x14\xe4\xa0\x78\xe5\x8d\x14\x11\xe5\x7b\x54\xbe\xa5\x75\x28\x7d\xf0\x64\xf3\x9d\x4e\x61\xaf\x8c\xbd\x00\xb2\xc2\x57\x31\x73\x5c\x1a\x42\xf7\x41\x88\x82\x24\x4d\xa7\xf5\x1d\xfa\x39\x13\xd1\x0f\x74\x84\x32\x0d\x20\xff\x42\x3d\x20\x6a\xea\xb2\xdf\x7b\xb6\x38\xdb\x35\x7d\x18\xa8\x4c\x74\xb2\x66\xd5\xbb\x16\xfa\x8a\xcf\x5c\x24\x15\x94\x86\x1a\x05\xab\xcf\x9f\x10\x19\x8c\x8a\xb1\x4a\x7b\x61\xf8\xb4\x5d\xd9\xd2\x03\x1c\x5c\xf3\x6f\xa1\xc2\x87\xec\x46\x38\xc5\x1a\xa4\x7a\x04\xe5\x28\x5c\x06\x9e\x63\x7e\x65\x52\x23\x22\x5a\xb3\x14\x68\x65\x32\x62\x9d\x5a\x48\xe8\xf0\xa0\x36\x26\x59\x60\x52\x12\x9e\x97\x12\xf5\x8a\x70\x53\x68\x6a\xc0\xe5\x3f\xaf\xec\x85\x74\xf4\xa0\xb7\x22\x70\x3c\x59\x35\x87\xbe\x80\x07\x3b\x8a\x14\xee\x8d\x17\xab\xa4\x96\xbf\x28\x84\x36\xba\x4e\x5d\x07\x80\x1d\xc2\x11\x6b\x00\x92\x97\x64\x9a\xb3\xb4\x45\xa4\xa0\xb6\xc7\x07\x12\x5f\x60\x7b\xbe\x4a\xa5\x48\xa0\xc8\x9f\x0d\x48\x45\x4a\xb5\xf3\x55\x08\xdb\x43\x88\xd7\xc1\x2c\x44\x83\xca\xad\xb0\x5f\x7f\x5d\x02\x00\x07\x3d\x8a\x07\x2b\xf1\x22\x36\x26\xba\xe0\x86\x48\xcb\x93\xb4\x66\x83\xc2\x60\xbf\x96\x81\xbb\xfb\x1d\x26\x41\xaf\xdc\xdb\x5d\xa2\xc1\x75\x58\x82\xa2\x06\xe6\x84\x35\xce\xc5\x4e\x10\x6f\x0e\x3b\xda\x1b\xd8\xe6\x18\x12\x03\xa6\x43\x39\x89\x77\x1f\x52\xa0\x22\xe4\xe4\xf9\x18\x20\x40\x85\xe6\x42\xf0\xa3\xd9\xc0\x52\x33\x05\x54\x49\xad\xc1\x67\x58\xc6\x7d\xc4\xc3\xec\x3c\x43\x3d\x19\x14\x5e\xf0\xbc\x05\xcb\x36\x6c\x64\x3b\xee\x30\x0b\x64\xc2\x67\x6f\x65\x67\x60\xfd\x0a\xaa\xcf\x21\xc9\xa5\x83\x89\xe1\xde\x34\x8a\xc8\xb4\x24\x35\xaf\xd9\x43\x21\xca\x34\x56\xce\x2f\xfe\xa3\x72\x07\x7b\x48\xcf\x46\x60\x8e\x56\x0b\xbb\x09\x5c\xa5\x7b\x13\x5b\xcb\xbf\x3a\xa4\x87\xfa\x6b\xfa\xea\xbd\xe9\xaa\x56\xaf\xd1\x69\xe5\x11\x38\xbd\x28\x6c\x2a\xe3\xb4\x2d\x04\xd8\x1a\xd9\x97\xd1\x34\xe3\x77\xd1\x98\x7c\x3f\x1a\x95\xda\xb3\x77\x3a\xe5\x70\xe0\x5f\xea\x84\x04\xad\xf7\x5b\xab\x62\x33\x98\x60\x40\x63\xa7\x4a\xdc\xd7\x14\x42\xa6\x76\xf0\xe1\xc0\x68\x20\x3b\x17\x2f\x54\xc7\x04\x13\xab\x95\x15\x67\x4f\x42\xe5\xfa\x08\xc5\x90\xd0\x72\x93\xf4\xd0\xff\x01\x6b\xdd\xd4\x8a\xc4\x0e\x63\x8a\x97\x32\x13\xbe\x9b\x93\x02\x0e\x37\x21\x4f\x0a\x0a\xd0\xd2\x9a\xa1\xec\x52\xad\x5f\x12\xaa\xd8\x0b\x73\xa3\x89\x6d\x84\xf1\x34\x99\x9b\x8f\x36\x23\x24\xe3\x0c\xcf\xa7\xe0\x3e\xd8\xcd\xbe\x78\x92\x60\x75\x6b\xa0\x88\xad\x63\xfd\x50\xcb\xc7\x1c\x54\x54\x4a\xf0\xea\x5e\x23\x21\xf9\xc8\x23\xab\xbe\x50\x8a\x63\x25\xb8\xff\xa3\xd8\xc2\xcb\x04\x22\x94\xf7\x81\xc0\x0c\x9b\xbf\x8b\xa3\x93\xa1\x12\xc5\x23\x6f\x7e\xe8\x3f\x16\x70\xb7\x56\xcf\x6a\xcc\x03\x65\xd8\x99\xb8\x06\x42\x38\x4f\x9a\x50\x99\x4b\xd4\xce\x5f\x52\xf8\xfe\xe5\xbc\xa9\xad\x99\xa7\x42\x2c\x05\x4a\x43\x08\x7b\x2d\x82\xfc\xc8\x7e\xa4\x2d\xdb\x43\xc8\x80\x12\x61\xd1\xcf\xf4\x4a\x60\x52\x88\x0b\x81\x6b\x18\x91\xbb\xd4\x49\x78\x43\xd9\x36\x4c\x93\x7b\x94\xbf\x92\x4e\x9f\xd4\x8d\xd0\x69\xb2\xc5\x46\xce\x53\x28\x4a\x01\xfa\xfa\xcd\xfd\x79\x04\xa1\x7a\x11\x7f\xed\xa8\xe1\x2c\x85\xa5\x25\xb5\x3e\x8e\x69\xeb\xc2\xed\x3b\x5e\x43\xc7\x8b\xd4\x87\x6d\x23\xba\xa3\xa4\x6c\x49\x6e\x5b\x78\x2c\x79\x72\xfe\x3e\x27\xc3\x80\xb9\x86\xe2\xd7\x6c\x29\x52\xe0\x6d\x35\x93\x8a\x18\x85\x21\x28\x2f\xe3\xab\x4f\x28\x02\x89\xab\x12\x6d\xdc\x94\x98\xe7\xdb\x04\xe7\xcd\x35\x90\x83\xdb\x63\x0e\x41\xb7\x1f\x6a\x2d\x2a\x27\x7c\x1a\xd4\x5e\xf6\x56\xe7\x52\x55\x4f\x35\x7a\x2f\x35\x47\xcf\xc9\x40\x4b\x62\x77\xa5\x92\x15\xc6\x29\x7c\x45\x24\x68\x50\x1e\x35\x02\x45\x52\xd1\x5e\x62\x50\x67\x4b\x76\xde\xfb\xa6\x25\x39\x32\x3e\xd0\x36\xc3\xd5\xa9\xfe\x34\x23\x03\xcf\xb2\x89\x80\xba\x30\x82\xe4\xc0\x0e\x42\x2f\xfd\x33\xb2\xe1\x43\xd1\x9d\x46\xf1\x49\x9d\x91\x74\x31\xe6\x1e\x6b\xd9\x29\x7e\x02\x0d\x73\xe1\x2f\x0b\xe9\x20\x76\xb0\x37\xbe\xe0\x7a\x95\x96\x78\xe9\xd8\x00\xca\x68\x5d\x34\x59\x33\x06\x21\xa5\xb4\x65\xd7\x08\x14\x18\xb0\x49\xdd\xd3\x23\xc1\x18\x4c\xb4\x9c\x31\x9a\x60\xca\x6c\xfb\xf0\xaa\x43\x50\x4c\xfb\xcb\x3b\x10\x87\xeb\x32\xb3\x71\x0c\xe5\xc2\x99\xfe\x85\x90\xc0\x28\x29\x67\x1e\x5e\x8b\xa6\x49\x05\x2b\x33\x9f\x5a\x45\x1a\x9a\x8b\x22\xc6\xbc\x8b\xe5\xf3\x58\x17\xa6\xeb\x24\x5e\x5c\x45\x10\xb6\x82\xc4\xaf\x76\xb0\x47\xb0\x4b\xa8\x10\xc3\x09\xf1\x96\x68\xda\x90\x4a\xac\x38\xff\x27\x41\x27\x08\xb5\xed\xd8\x6f\x46\x11\x1e\x7f\x45\x8b\xbd\x1d\x33\xd5\x2e\xe2\x25\x59\x62\x9c\x14\x5f\x76\x1e\x4a\x44\xed\xfa\x14\xca\x2e\xc9\x16\xe1\x38\xd3\x57\x39\x15\xb9\x9d\xb3\xa8\x19\x43\x4e\x93\x04\x40\xc6\x68\x52\xdc\xa2\x72\x94\x3e\xe4\x39\x8c\x41\x02\x83\x05\x46\x25\x31\x34\x96\xa8\x91\xac\x92\xd8\x2d\x4a\x9a\x6e\x0e\x2e\x70\xc2\x4f\x1d\x7e\x2b\x3a\x1b\x2e\x8e\x96\x7c\x59\x08\xf6\x65\x8d\x20\x09\x11\x6e\x3d\xe0\x3b\x82\x65\x92\x9a\xcd\x85\x3a\x72\xc9\xda\x08\xa6\x7b\x6c\x22\xfe\x59\xca\xe9\xec\x1b\x8d\x46\x58\xc4\x00\x71\x82\xba\xc2\xbe\x31\x54\xb1\x9b\xe0\x0b\xa3\xad\xad\x15\x79\x0f\xc5\x34\x2e\x36\xad\x54\xd7\x6b\xad\x8c\xba\xc0\x26\x50\x09\xe3\xc8\xdc\xa4\x05\xe3\x6e\x1e\x31\xf9\xe7\x47\xc5\x98\x3b\xed\xa2\xf9\x20\x09\x7e\xf5\xad\x6e\x1d\x43\x2f\x58\x7d\x20\x3e\xa5\xb1\x77\x22\xa5\x47\x91\xee\xa6\x3a\xbf\xaf\xc9\x7f\x67\x55\x75\xca\xcc\x44\xff\x91\xfa\x08\x55\x50\xbe\x98\x7d\xac\xad\xbd\x5a\x14\x11\x55\x24\x35\x49\x0a\xb6\x1b\x94\x36\x23\xc4\x93\x1b\x82\x32\xb1\x48\xd6\x85\x71\x00\x4a\x6c\x98\x50\x72\x34\xfe\xa4\x39\x1e\x75\xda\x51\x7b\x62\x68\xef\xa0\x1b\xbf\xb4\xd0\xda\xc9\x42\x4d\x4e\x93\x1a\xe2\xa1\x34\x93\x25\x62\xe3\x8d\x81\xbf\xb7\x86\x7e\xde\x22\x09\x67\x08\x57\xa5\x03\x65\x1b\x10\x4a\xdd\xa9\x17\x11\x16\xbf\xeb\xc7\xfd\x7b\x9f\x54\xed\x78\x95\xd6\x4a\x4b\x68\x45\xba\xef\xfe\xe1\x6d\x8a\x50\x6f\x07\x50\x64\x2a\xb3\x98\x78\x43\x9a\xd3\x80\x2b\x1c\xba\x10\xf5\xbd\xe5\xb4\x3f\xa9\x0b\x79\xf7\xa2\x2a\xe1\xe0\xdf\x46\xd7\x9f\x53\x93\x07\xe6\x83\x80\x6b\x0c\xa0\xaf\x0e\xb2\x26\x94\x9d\x56\x5c\x84\xfb\xa5\xb8\x23\x92\x7d\xe2\x06\xed\xdf\xbb\x4b\x91\xde\x89\x87\x0c\xa6\x2d\x76\xd4\xec\x2c\xbd\x20\x24\x0f\x9b\x16\x5c\x62\x92\x83\xa9\x6e\xcc\x03\x34\x70\x68\x43\x1f\x5c\x9a\x54\x0e\x92\x0d\x54\xc7\x09\x74\xb9\xb0\xe0\xb5\x69\x20\x79\x67\x1e\x4b\x1f\x16\xbd\xb0\x3f\xa2\x78\x1d\xb8\x55\xbe\xdc\xbe\xa6\xa6\x0a\xe2\xfd\x06\x3a\x15\x3e\xee\x4c\x79\x4e\x7a\x6d\x95\xb0\x07\x05\x24\xd4\x10\xdb\x9a\xc0\x54\x79\x6e\x3a\x27\x4d\x4c\x10\x89\xf2\xa8\xe2\xc5\xb2\x07\x93\xfa\x20\x28\xb8\x06\x8d\xaf\xfe\x7e\x1e\x70\x6d\x3e\xaa\x90\xe3\x75\x5c\xe8\x8c\xdd\x05\x85\x65\x64\x86\x8a\x44\xdb\xda\x89\x13\xbb\x62\x59\x2c\x76\xee\x23\x56\x05\x79\x80\xe0\xf3\x97\x68\x9a\xbf\x08\x51\x4f\x06\xbf\x78\x80\x09\x3d\x20\x76\xa0\xd3\x62\xb9\x59\xb4\xc0\x69\x9a\x4d\x65\xfd\xb5\xfe\x59\x32\x7f\xf9\x7c\x8a\xe6\xe4\xb1\xdb\x19\x59\x33\xe2\xa1\xa0\x82\xc3\xa1\x7c\xcc\xa6\x92\x2f\x4b\x0f\x71\x91\xc0\xef\x32\x1d\x90\x55\xfd\x42\x8f\xba\x14\xf2\xc2\x39\x45\x70\x9b\x3a\x62\x1d\xe6\xd0\x65\x2d\xdf\x0d\xe0\x15\x5d\xe3\xce\xc5\xda\x72\x50\x68\x57\x4e\xb7\xec\x4e\xea\x90\x27\x94\xe2\x30\xc0\x91\x1a\x36\xd4\xd9\x8d\xb3\x1b\x11\xdd\x7c\xb5\x00\xf7\xb2\x65\x29\xc4\x7f\x49\xe0\x29\xad\xaf\xdd\xc5\x1e\xa4\x97\xfe\xab\xba\x75\xb6\xeb\x6a\xee\xb7\x9d\xa0\x41\xed\x17\xd0\x1f\x90\xb5\x71\x2a\x0d\x0f\xbc\x57\x9b\x95\xa5\x77\xb5\x52\x1f\xfa\xcd\xda\xee\x2e\x48\xb7\x77\x4d\x50\xa5\x9c\xf6\xbc\x93\xfa\x5c\x24\x3a\x96\x96\xd1\x2b\x1c\xe2\x7e\x5d\x38\x6c\x28\xe7\x42\x36\x19\x27\x04\xcd\x4a\xda\x19\x8e\x50\x76\x98\xbd\x0a\x6f\x15\x8b\x75\xf7\x36\x96\x9d\xda\xe4\x8d\x55\xf3\xa0\x6f\xc1\xc4\xc5\x38\x16\x98\x36\xc4\xd2\xb3\x9c\xab\x8b\x0f\xd3\x47\xec\x4f\xce\x40\xb9\xd4\x4e\x07\x56\x8a\x86\x96\xe5\x31\x2b\x38\x97\x2f\xa1\xa1\xdd\x2b\x77\x33\xb1\x56\x4a\xf8\x2d\xbd\x98\x2c\xb3\x07\x7e\x79\x52\x95\x76\xdb\x55\x90\xf0\xcb\xa9\x03\x04\x0e\x33\x0f\x0a\xe1\xae\x63\x83\x4e\xb4\x3c\x51\x1b\x63\x18\xdd\xf8\x2e\x72\x7e\x4a\xe5\x5c\x75\x4c\xc4\x7d\x61\x56\x4f\xdd\x7b\x81\x37\xd3\xf9\x54\x5f\x47\x52\x71\xa6\x8b\xf9\x48\x80\x09\x23\xea\x01\xfe\xce\xb7\x99\x6a\x12\x6a\x43\x08\x9f\xda\xc2\x64\x08\xdc\x69\x5e\x21\x08\x14\xdc\x94\xf5\x94\xd1\x9d\x4a\xf7\xe5\xc1\xcd\x6a\xac\x2e\xca\x85\xbf\x4c\x6d\x0a\xae\x84\xab\xd4\xc2\x62\x01\x1c\xbb\x37\x82\x2a\x2f\xd9\x62\x75\xcc\x2c\x12\xbf\x93\xba\x97\x9c\x8a\xfd\xfa\x00\xe0\x43\x41\x60\xd8\x24\x1c\x0b\x89\x09\xde\xc5\xdc\xc6\x6a\xf9\x99\x25\x32\xe9\x1d\xde\x61\x0c\x43\x0d\x7f\x6f\x83\x69\xd4\x04\x5d\x64\xce\x9c\x9d\x12\xe5\xc7\x36\xd9\x66\x0b\xef\xd6\x9f\x7f\x79\x1e\x74\xe3\x2d\x48\x4e\xca\xc9\xfb\x88\x5d\x64\x91\xd4\x28\xd2\xab\x64\xa8\x59\x38\xbc\x89\x76\xb7\xd8\x0d\xc3\xe6\x27\xf0\x6c\x82\x8b\xa5\x1a\xa7\x37\xef\xb7\xb4\x19\xb0\xbd\xea\x7f\x54\xbf\x77\x1d\x59\x78\x84\x81\x87\xd7\x7b\x9c\x68\xf2\x04\xdb\xbb\x78\xba\x54\xaa\x38\x7e\xe6\xb1\x91\xae\x70\x8f\x30\xce\x1c\x26\x60\x85\x83\x4d\x6d\x0e\x70\x6f\x34\x65\xd0\xd7\xc0\x53\x48\x6d\xfa\xa3\x85\x69\xb0\x1d\xef\x50\x7f\x46\xb3\x9f\x25\xa7\x7a\x54\x7b\x4b\xa7\xa4\x08\x6f\xb0\xbc\x68\x2b\x9c\xaa\x88\x38\xbd\xe9\xbf\xaa\x48\x16\xb7\x12\x2a\x46\x41\xba\x5d\xf3\xd5\x01\x74\xa3\x2e\xe3\xd5\xbc\xfe\x93\xa2\xdd\x17\xc3\x6f\x14\x59\x46\x93\xcb\xe8\x24\xef\x2a\xbc\xfa\x1e\x53\x28\xcc\xc8\xb7\x36\xe2\x4d\xe0\x70\xba\x8b\xf0\xec\xe2\x80\x38\x30\x2d\x74\xf2\xb2\x58\xd5\x98\x93\x98\xb8\xe0\x7f\x32\x10\x75\x90\xb8\x69\xc4\xaf\xa4\x8d\xb8\xed\x3f\x4a\x0c\xad\x6c\x3a\x51\xef\x00\x8f\x2c\x10\xfc\xef\xa4\xa1\xda\x3f\x73\x57\xc5\xc6\x96\x6a\xc2\x9a\x34\x50\xcc\x2d\x31\xa5\x1b\x65\xc2\x52\x04\x61\x54\x78\x9e\xa3\x94\xf5\xe1\x08\x82\xa3\x29\xba\xef\x92\xf1\xd9\x13\xb5\x4b\x37\xa9\x77\x78\xf5\x97\xa6\xc4\x7c\x6c\x63\x90\xb2\xa9\x60\x0c\x4e\x9d\xe7\x80\x9e\x01\xd2\xb6\x02\x29\x9d\xce\xfe\xa3\x94\x21\x4d\x02\x9b\xae\xb8\xa4\xf3\xbc\x43\x7c\xc1\x16\xbd\xde\x38\xaa\x47\xb8\xc0\xf9\x78\x68\x90\x5a\x15\x75\x7d\x17\xfd\xeb\x6b\x05\x6f\x68\xd6\xc7\x8f\x7c\x82\x6c\x2e\x09\x0a\x68\x05\xf3\x79\x38\xd5\x95\x91\x05\xe5\x07\x1a\x1f\x76\xd7\x5f\x1e\xa2\x96\xbc\x23\x8a\x06\x83\x86\x3f\xbe\x5f\x26\x1b\x71\x1a\x3e\xe8\xbf\xd3\x1b\x86\x8b\x1a\x1f\x1e\xb7\x7f\xc4\xcd\xe4\x2f\xff\x25\x22\x36\x39\xb6\x9b\xc2\xe4\xa5\xb4\xb4\x9d\x17\x16\x5e\xcd\xf9\xc9\xaa\xd9\xc5\x0c\xa0\x89\x24\x01\xfc\x50\x73\xd5\x76\xfc\xd0\xcc\x74\x0b\xf5\x15\x95\xc4\xef\xed\x61\xfb\xb4\xdd\x79\x4c\x9c\x24\xc1\x66\xa0\xee\x32\x09\x6c\xb5\xd7\x89\x98\x04\x2f\x4d\x0d\x8d\xf9\x49\x72\x01\x16\x06\x37\xec\x1c\x18\xc6\xd7\xe4\x4f\x94\xaf\xd4\x5d\x97\x1d\xfe\x56\x3c\x33\xf5\xac\x10\x99\xf9\xde\x35\x7e\xd1\xce\x7f\xc8\x6e\x86\x4c\x1f\x46\x6b\x84\xb6\x48\x21\x14\x19\xa0\x1e\x8f\x8e\x86\xe8\xf5\x6c\x23\xdb\x62\xa5\xbe\xa9\x32\xe9\x0d\xc9\x06\x0d\xe3\xa7\x4b\xa4\xe4\x22\x23\x99\xfd\xa4\x82\x67\x6b\xdd\x38\x54\x64\xa2\x04\x94\xe6\x25\xad\xda\x40\x59\x2d\xbb\xf8\x1d\x23\xcf\xe8\xf9\xe5\xf3\x38\xdd\x58\xc8\x04\xa3\xb0\x08\x3e\xf1\x08\x90\xeb\xe4\x1d\x70\x25\x09\x5d\x2c\x3b\xfe\x3b\x9b\x4f\x51\x8f\x60\xce\x4e\x5d\x22\x5b\x62\xf5\xca\x59\x48\x94\x42\xfb\x7e\x91\x6c\x96\xf7\x45\x4b\xb2\xb4\xf6\xd1\xb6\xed\x6a\x44\xbc\x04\x2f\x26\x89\xce\xea\x7b\xe8\xd5\x2e\xbd\x16\x1e\x2d\xd2\xf4\xfd\xa3\xc8\x70\x44\xfe\x12\x2c\x48\xc1\x38\x7e\xe2\xa3\xe7\xde\x52\x27\xc1\x2a\x28\x95\xee\xd1\x2c\xc7\x2a\x1c\x31\x46\xa7\xd8\xe8\xa9\xca\xde\x86\x8d\x85\x3a\x1f\x41\x0a\x5f\x47\xc6\x54\x0f\xc9\x07\x6f\x6a\xad\x53\x2f\xce\x57\xc2\x8c\x75\x98\xc9\x47\xb0\x2b\xb9\xcb\x3e\x69\xbe\x41\x5a\xaa\xcf\xf0\xe5\x4e\x0f\xaa\x70\xdb\xa1\x55\x61\xd2\xe7\x96\x70\x35\x1e\xad\xc8\x69\x17\x97\xb8\x01\x05\x29\xf8\x35\xe5\x5b\x60\x97\x02\x07\xfc\x02\xe1\xd1\xd9\xa3\x9b\xe9\x7c\x27\x31\x0e\x42\x33\xc9\x05\x68\x39\xac\xb8\xbc\xfc\x47\x53\xcc\x55\xab\x6a\xb2\xe2\x8c\x1b\x8b\xbb\x73\x3a\x6f\x82\xbd\x63\x7a\xad\x48\x79\x89\x48\x8e\xd3\x2c\x06\xe1\x7c\x14\xd6\xb2\x5f\xa2\x64\xda\x22\x5f\xcc\x28\xaf\x95\xa7\x31\xc2\x6d\x22\x00\xde\x24\x1f\x36\xcf\xac\x7b\x58\x19\x0b\xbf\x81\x29\xc5\xee\xea\x7e\xcc\xdd\x45\xdf\x76\xf5\xd8\x4e\x83\x7a\x91\x42\xbc\x52\xcc\xc1\xd9\xb2\x1c\x49\x2b\xcd\x98\xb5\x4d\xaa\xb9\x38\x1f\xee\x86\x6a\xef\xe1\xb2\x74\x2b\xca\xb0\xe9\xd3\xb5\x78\xe7\x1f\x99\x2e\x9f\xee\x09\x75\xe0\x4b\xf0\x26\x99\x09\xa2\x71\x70\x10\x86\xfe\xd5\x11\xd4\xab\x47\xd2\x17\xa1\x0b\x40\x2b\x6d\xcf\x28\x0a\xba\xb2\x89\x4a\x17\x49\xfd\x40\xfb\x11\x4a\x5b\xe5\x57\x43\xb5\x55\x47\xf5\x01\x2e\x53\xd6\x04\x19\x6a\x96\x2a\x4a\x90\x9e\xa5\xd9\x4f\x4d\x67\xf0\x0b\x91\x85\xd5\x5a\x66\xd3\xe9\xec\xd6\xab\x7d\x02\x83\x99\xed\x53\x6a\x8c\x40\xd4\x18\x3c\x68\x90\xf3\xa8\x99\x23\xde\x22\xc2\x3a\x02\xe1\x84\xe9\xde\x9a\x96\xe4\xcc\x9c\x11\x24\x2e\x6c\xfd\x03\xe1\xa4\x22\x5e\x0e\x38\x9f\xce\x6e\xd7\x19\x46\x85\x20\xc4\xc3\x97\x50\x97\x04\xae\xad\x25\x79\x5c\x0c\x0f\xc6\xf2\xfe\x09\xb6\x8d\xea\xef\x83\xfd\xe6\x82\xb9\x89\x52\x3b\xa1\x25\x39\x83\xb8\xcd\xec\x8e\x08\xe4\x8a\x36\x04\x44\x23\xd9\xd3\xfb\xc4\xce\x54\xb3\xdd\xf0\x23\x7d\xd0\x6b\x45\x85\xbe\x11\x9f\x9f\xb9\xf0\x48\xcf\x98\x91\x0d\x27\x8b\x8c\x13\x23\x72\x8c\x7b\x0c\x1a\x97\x93\x09\x9a\x1e\x0d\x75\xba\xf6\x7f\x4e\x94\xdc\x2f\x2d\x01\xd1\xd8\x1f\xcd\xc3\x53\x96\x1d\x55\x1f\x8b\xc4\x34\xb4\x84\x3b\xd7\x5a\xb6\xfd\x2f\xb7\xab\x46\x7d\x63\xc4\xf1\x6f\x54\xc9\xb8\x60\x78\x52\xbc\x53\x53\x2c\xbf\x36\x13\x1b\x86\x68\xfb\x1a\x0b\x14\x3d\x5f\x20\xba\xb4\xfe\xb4\x90\x2a\x88\x49\xbd\x5a\xe8\x92\xef\x1a\x5c\xee\x6c\x23\x86\x44\xf6\x40\x0c\x60\x88\x49\x6f\x6a\xb1\xdc\xc2\xc7\xac\xe5\xc1\xa8\x9a\xd8\xa7\x6d\x40\x08\xcb\x33\xbd\x63\x45\x07\xcd\x13\xb7\xff\x49\xb5\x1a\xd9\x2c\x14\xde\x15\x14\x7a\xab\x12\xad\xd5\x67\x55\xbd\x74\x85\x71\xde\x38\xe5\xc6\xf3\x34\xea\xfb\x34\x4e\xc7\x2c\x2b\x13\x44\xc3\x62\x51\x05\xd6\xcd\x91\x78\x16\x7e\x9a\x5e\x09\x6f\xb1\xd4\xcb\x43\x8b\x3e\xcd\x51\x14\x4f\xd3\x81\x4d\x47\x73\x23\xd7\x53\x7a\xa1\x4c\x5d\xca\xa2\x19\x6d\xde\xbc\xcb\x2d\x91\xf4\x50\x6f\x81\x25\x83\x9f\x43\xc2\x80\xec\xad\xc3\x6f\xda\x9f\x92\x9a\x51\xa1\x3e\xda\x0b\xe9\xe2\x28\xc4\x2e\x5c\x6e\x54\x05\x13\x43\xbc\x98\xd5\x06\x0f\xfb\xef\xdf\x8f\x36\x8a\x23\x15\x38\xf7\xb4\x0b\x3e\xb2\x0f\xe4\xda\x55\xd0\x03\xc9\x3d\xd8\x6d\x9b\x86\x87\x6d\x79\x84\x85\xa6\x38\xdf\x75\x66\x89\x4d\xf8\xe8\x1f\x66\xa3\x53\xe6\x0b\x10\xb0\x53\xe2\xf7\x25\xd5\x8f\x4a\x7c\x50\x14\x2c\x98\x68\x87\xcf\x5c\xe9\x9d\x3c\x21\xd4\xe3\x84\x2d\x34\x6f\xf0\x64\xa5\x83\x60\x99\xc3\x42\x1c\x35\x89\x34\x31\xba\x7f\x42\x44\xd4\x8b\x01\x73\x54\x30\x1c\x8a\x1b\xb8\x67\xa3\xba\x14\x58\xc3\x13\x76\x90\x34\x35\x3d\xa2\x0a\x9d\xf2\x45\x23\xc2\xa6\x13\x73\x05\x3a\x75\xa6\x1c\xe4\x5c\xa0\xa2\x26\xba\x1e\x29\x77\xa7\xa4\xf1\x49\x29\xf6\x82\xcb\x57\xf1\x51\x6a\xaa\x78\xcc\x91\x75\xe1\xbb\xf3\x7c\x02\x43\x76\x7e\x61\x2f\x3f\x1c\xcd\xac\x1e\xf2\xf3\xd4\xb7\xc1\x75\x7c\x22\x11\x33\x16\xf1\x3f\xbd\x6b\x1b\x12\xa6\x81\x56\xd9\xb2\x23\x6e\xa1\x4c\x4e\x6b\x22\x0f\x72\xa3\x10\x41\x9c\xb1\x72\xe8\x9e\x5e\xd6\xc8\x5f\x00\x86\xa1\x47\x90\x66\x1e\x4e\x2b\x09\x84\x5e\x97\x06\xc7\xd9\xe7\xfa\x30\xf5\x29\x40\x29\xa2\xd6\x1f\xbd\xbc\x64\x81\xe9\xa5\xcd\xee\xd1\x22\xf7\x13\x67\x29\x7c\x38\x44\x49\x1d\x62\xef\x36\x6d\xe6\x38\x3b\x26\x88\x2d\x32\xda\x29\x52\xbb\x12\xb7\x5f\xe9\xe6\x16\x0d\x61\x1b\x13\x87\x9f\x4f\x4d\xb1\xad\xf5\xe8\xe2\xc6\x4b\xbd\x26\xdd\xd1\xed\x79\x8b\xde\x47\xa6\x14\x73\x89\x8a\xe9\x9b\x0a\x7a\x39\x08\x36\x97\xac\xbc\xff\x0d\x23\x29\x89\xbc\x9d\xd2\x81\xf1\xb1\x59\xbc\x69\xc1\x9b\xdf\x92\x04\x5e\x96\x4f\x6b\xc1\x13\x47\xdd\xa5\x38\xb1\x23\xb3\x9d\xaa\xc2\x1e\x02\x34\x4c\x30\x42\xab\x0a\x14\xb6\x43\x63\x2d\x29\x04\xe4\xc5\xec\xd0\xb8\x8f\x59\xe7\x5d\xdb\x51\xef\xc0\xe0\x9c\xa9\x50\xea\x25\xc6\xb3\x50\xd8\x57\xba\x41\x16\x08\x94\x86\xed\xfb\xfc\x4f\x5f\x05\xc2\xd6\xbf\x57\x8b\xe4\xb9\x5c\x31\x88\xcf\x9a\xc3\x33\x9f\xd0\xb8\x37\xc3\x57\x1f\x12\x89\x0d\x9c\xde\x25\x35\x74\xf8\x99\x20\x91\x76\x29\x3a\x6b\xfa\x48\xdd\xb7\x27\x31\x43\x8a\x1c\xab\x57\x6c\x14\xd3\x8f\x27\x04\xc2\xa3\x61\xc1\xce\xab\x78\xae\x96\x9f\x57\xce\x17\xdc\x17\xc6\x55\xdb\xd5\xe7\x71\x69\xba\x36\xca\x30\x1a\x0e\xc2\x3f\xf6\x4d\xf7\x73\x9d\x44\x47\x27\xf9\xc1\x21\x20\x11\x30\x50\x41\x82\x43\xca\x98\xbe\xee\xff\x7a\x2e\x40\x0c\x55\x6c\x73\xaf\x4e\x7b\x87\x9e\x8b\xe1\xeb\x71\x83\x55\xf0\xcd\xd5\xa9\x6b\xb9\x26\x52\xb1\x7a\x49\xfd\x31\x14\x8d\x61\x59\x3d\xea\xa3\xe4\x7e\x21\x76\x87\x31\x2d\x54\x44\xdf\xa5\x5f\x0c\x8e\xeb\x4d\x7e\xe8\x86\x21\xc6\xb0\x4e\xff\xd1\x70\x0d\xae\x40\xd9\xbc\xee\xa1\x5a\x33\x14\x77\x77\xa8\x1e\xfb\x03\x17\x01\xf2\x4e\xcd\x67\xcf\xde\xd3\x4c\x02\x63\x9b\x31\x10\x9b\xf1\x6b\xd5\x5b\xdb\x55\x16\xb4\x85\xc6\x31\xbe\xa5\x2a\xb0\xd1\x00\xb3\xcb\x70\x88\x6a\xed\x7a\xce\x81\x44\xda\x97\xc2\x21\x6b\x0c\xdc\x8c\x40\x0a\x8d\x4e\x39\x8a\x19\xcf\x80\x01\x89\x31\xce\xe4\x2f\x5b\xda\x3d\x7d\xc8\x0d\xf8\x48\x35\xce\xf5\x36\xf1\x6b\x44\x03\xd8\xdb\x99\x93\x4b\x6a\xca\x73\x13\xdb\x04\x5c\xae\xe0\x2b\x7e\x89\x68\x1d\x97\x3c\x09\x3e\x7f\x4a\xb0\xa4\x4d\x4d\xc4\x81\x80\x68\x0e\xe1\xcd\x12\x38\xd8\xbb\x70\xa2\xe6\x51\x67\x30\x34\x18\x50\xc4\x10\x15\xc0\x6e\xec\x24\x8b\x11\x3d\xff\x4c\x15\x09\x2a\xf1\x4a\x31\x87\xfc\x13\x4b\xda\x1a\x65\x34\xfe\x3e\x06\xb3\x0a\x15\x1a\x04\xf8\x42\x06\x53\x5b\x9d\x43\x80\xa0\xab\xbc\xd6\x16\x2e\x0c\x0c\x4c\x11\x4d\xb8\xce\x36\x16\xe0\x27\x11\x12\xfd\xd6\xda\xa4\x65\xc2\x12\x17\x16\x9d\xca\x2a\x08\x29\x84\x06\x35\xf8\xb6\xd1\x75\xa2\x9f\xcf\x1b\xa8\xff\xe9\x2c\xe2\x35\xf0\x99\x37\x62\x50\xf4\xca\xec\xa1\xc4\x79\x6b\x92\x50\x58\x3e\x33\xf5\x96\xda\xd3\x59\x8a\x40\xe1\x45\x45\xe7\x9f\x70\x04\x11\xd9\x87\xe4\xff\x9c\x31\x93\x66\xa8\x12\x33\xd8\xc2\xce\xe1\xc3\xc0\xbe\xe3\xc7\x72\xcb\x90\x65\xa1\x35\x3a\x12\x12\x2d\xf8\x03\xf4\x5c\x3e\xbf\x65\x02\x7a\xd9\x9f\x33\x44\xb4\x1c\x9e\x0b\xbc\xd5\x8b\x31\x41\x25\xba\x73\x0c\x43\x5e\xfe\x8a\xe2\xfe\xfa\xea\x0d\x0f\xdb\xc8\x84\x36\x40\xe5\xd0\x2c\x94\x50\x2c\x59\x2c\xe6\xc2\x48\x80\x74\x31\x41\xec\xbd\x5b\x14\x05\xfb\x4d\x53\xcc\x9b\xda\x35\x70\xae\x9b\x2e\xe9\x7f\xc7\xc2\x18\x88\x96\x67\x54\x69\xcf\x2d\x09\x32\x44\x4e\x1b\x85\x2a\xfe\x8d\x87\x46\x63\xd9\x86\xf4\x2a\x41\x2a\x0d\xf9\xe0\xaf\xf9\x9b\xe4\x3a\x2c\xde\x4a\xc7\xa9\x20\xef\x72\xfc\xcd\x5b\x9b\xdb\xfa\x22\x47\x59\xf2\xa5\xeb\xf9\xf7\x2f\x33\xc9\x33\x6f\xf0\x01\xde\x9a\x18\x8a\xc7\xba\x5e\xe0\x85\x66\xea\x70\x47\x35\x0b\x93\xcd\x99\x51\xad\x71\x07\x50\xdf\xb5\x4b\xe9\x28\xbf\x6b\xca\xdd\x30\x33\x28\x0a\xdb\x55\xb2\x70\xde\x48\xfd\x05\x85\xf4\xce\xea\x96\x66\x94\x8f\x24\xc2\xbf\xa1\x1d\x93\x31\xb2\x25\xaa\xbd\xc8\x67\x6a\x33\xb4\xdc\x7b\xf9\x0c\x29\x8e\x77\xd3\x45\xea\x85\x32\xbb\x5a\xe8\xd9\x58\x1c\x95\x80\x4d\xaf\x69\x59\xe8\x3b\xa1\x11\x70\x88\xbc\xf9\x52\x28\x10\x69\xff\x22\x04\xb2\x74\x99\xb4\x16\x07\xfd\x90\xaf\xa4\x46\xae\xc7\x88\x3b\x42\x5b\x79\x79\x91\x3d\xc7\x74\x33\xc6\x87\xff\xd1\xf3\x1f\xe4\xe2\x45\xc5\x77\xaf\x77\x46\x5c\xa5\x41\x3e\xc8\xa5\x71\xa7\x31\x5e\x97\x26\xfc\x31\x2a\x18\x1e\x68\x69\x35\x56\x96\x5d\x0c\x79\xba\xd3\x0f\x78\xff\x41\x0d\x4d\xd1\xac\x29\xcd\x07\xc5\x8b\x24\x71\xea\x20\x32\x7f\xf1\x9c\xdf\xd4\x44\xdc\x3e\x7a\x3c\x0d\x22\xdd\x63\xb1\x6e\x0c\xd9\x71\x6d\x30\x4f\x3b\x72\x1d\xb6\xc6\xbc\xa5\xc2\x13\x64\x79\xe5\xe7\x67\xa8\xcf\x1c\xd1\x27\x88\xce\x5f\x62\x07\xae\xbf\x26\x53\x1e\xc2\x04\xe9\x57\x39\x12\x0d\x86\x77\x1d\x65\xa6\x83\xa9\x0d\x54\xb9\xa7\x72\x8c\x74\x35\x9d\x3c\xab\x68\xab\x7f\x0b\xbb\x61\xc5\x00\x7f\xa7\x6b\xb0\x39\xa1\x30\xdd\x7b\x52\x16\xa1\x97\xc1\xb0\x35\xbb\xec\x4b\x20\x0a\x99\xb2\x15\x21\x57\xbe\xc5\xd0\x0a\x94\xc3\x9a\x20\xce\x91\xd4\xa6\xe3\xaa\xa7\x02\xe0\x0e\x0e\x66\xc6\x3a\xd2\xf2\x39\x25\xf8\xa6\x6e\x7f\xbd\x99\xc8\xb0\x31\x37\x3e\xa6\x7d\xee\x78\xf6\x47\x9a\xbb\x4b\x71\x87\x0a\x81\xb8\x09\x1d\x18\xac\x75\xf7\xe1\x05\xa4\xbe\xaa\x66\x21\x2e\x11\x4f\x3f\x94\x17\x02\xb6\x94\x6e\x9a\x8f\x11\x0a\xac\x86\x8d\x34\xed\x8d\xbc\x27\x65\x66\x29\x68\x6d\xde\x66\xcb\xae\x95\xe2\xb1\x64\xe5\xa6\x5c\xaa\xbc\x54\x83\x94\x04\x41\xcc\xd8\x60\xb8\xf9\x83\x26\x31\x97\x5b\x8e\xec\x46\x0d\x63\xf5\x21\x73\xe1\x26\x7c\x8f\xcb\x10\x08\x93\x10\xfc\xef\x29\x13\x9d\x32\x6c\xbf\x84\xaa\x8b\x38\x6a\xb0\xc6\x4e\xee\x75\x75\x10\xa1\x0d\x23\x13\xd2\x0b\x3a\x87\x59\x66\xa2\xe9\x57\xd5\x01\x01\x2f\xe9\xfb\x80\xe5\x19\xdf\x37\x6d\x88\x5e\x38\x03\x0a\x65\xff\x44\x56\x01\x19\x36\x60\x8e\xab\x8e\x60\xb8\xe7\xd8\xf0\xb1\x11\xa7\x6b\x78\x52\xc4\xc1\xf5\x8c\xac\xbf\xcd\xb8\x65\x6a\xd4\xa4\x85\x34\x40\xe6\x21\x90\x43\xe2\x34\xaa\x44\xfa\x93\x33\xe5\x77\x95\xe5\x5e\xf1\x17\xd5\xbb\x60\x70\xa3\xe3\xc3\xfb\xc9\xaf\xbc\xae\x91\xab\x59\xed\x88\xb2\x21\x06\x19\xcf\x1f\xcd\x72\x75\xbe\x3a\x41\xdb\xca\x62\xaf\x03\xde\xfd\x29\x6a\x9f\x32\x68\x87\x4f\x88\x12\xd8\x7f\x9c\x35\x6f\x44\x42\xab\x31\xc2\x9a\x29\x64\xa7\x42\x62\x30\xf1\xc0\x93\x5b\x19\x98\x60\xb8\x91\xf8\xb5\x30\x5c\x49\x4a\xc4\x80\x9f\xa2\x4f\x8c\x41\xb4\xfe\x76\x57\x0a\x6e\x67\x8e\x5d\x3c\x03\xf7\x0b\xd0\xf1\x8d\x55\x2c\x78\x96\x56\xf3\xa0\x0c\x29\xcd\x74\x14\xc2\x2c\xda\x91\x09\x9b\xcb\x35\xb2\x65\x0a\x6a\xd0\x52\x08\xc3\x63\x0c\x3e\xcd\xa8\x1c\x00\x24\xe0\x42\xde\x91\x53\x1d\x89\xf5\x2c\x7d\x5c\xf2\x5e\x54\x29\x47\x82\x58\x4e\x1d\x54\x1a\xdc\x17\x35\x3e\xc4\xa0\xd6\xe8\xd6\x96\x35\x25\x77\xa3\x31\xa0\x96\xca\x11\x93\x4d\x78\x0c\xf7\x28\x31\x40\x92\x9b\xf1\xcf\x66\x25\x00\xa0\xcd\xc3\x4e\x6f\x68\xa2\x16\x7f\x26\x9e\x01\x7b\xc5\xa1\xcd\xca\x92\x68\x80\x52\x37\x8f\x6e\xa6\x12\xcd\xea\xaf\x88\x4a\x91\xfa\x37\xb0\xd9\x1c\xe1\xe4\x6c\x0a\x8e\xac\x48\x31\xf3\x20\x06\xa9\x3a\x53\x57\xfe\xa6\xbc\xda\x9c\x9e\x73\x1a\xc8\xe3\xd9\xcd\x05\xd3\xb0\x1d\x81\x8d\xba\xae\x6b\x12\xeb\xf3\x64\x5f\xab\x4f\x37\x60\x8d\x27\x75\x59\xf0\x9a\x03\x0c\x11\x05\x4f\x64\xae\x02\xab\x6f\xea\x31\xa0\x62\x6c\x5f\x43\x1c\xbe\x26\x35\x39\xee\x81\x38\xcd\x0c\xae\xa0\xb2\x2e\x2b\x91\xa3\xae\x97\x1a\x90\x62\xe3\xde\x31\xf6\x0d\x82\x61\xec\x73\x84\xb4\x4b\x0b\xe0\xa4\xd5\x27\xa3\x42\x59\x58\x90\xb3\xfc\x8b\x05\x30\xd9\x12\x63\x0f\x42\xa3\x84\x5f\x0f\xcd\x18\xb9\x50\x17\x22\x5c\x56\x4d\xd0\x7a\x5b\x42\xaf\x54\xfa\x74\xd2\xb8\x29\xca\x92\xf6\x26\xb9\x6a\xa1\x2c\x7b\xaa\xa3\x47\xc5\xfe\xee\x41\x1e\x77\x9c\xb3\xbd\xef\xd1\x36\x81\x49\x70\x41\x56\xb2\x48\xc2\x8e\x81\x50\xa0\x35\xab\x64\x13\x92\xb4\x6c\x48\x25\x34\x1f\x83\xf1\x26\x1f\x63\xcb\xda\xaf\x67\x2b\xea\xf5\x68\x2c\x94\xab\xb3\x00\x81\xf0\xb6\x7f\x5f\x0c\xd8\xb4\xf4\xa5\x9b\x84\x1a\xa2\xce\x08\x74\xb2\x18\xd7\x61\xdc\x7e\x2f\x6a\x46\xac\xc9\xd7\xc8\xb1\xc8\x03\x9d\x1d\x8d\xe4\x81\x52\x12\x5f\xf3\x7b\x1e\x8f\x4b\x55\x06\xbd\xe2\xc4\x1d\x26\xb6\x5b\x64\x2f\x42\xe8\x3c\xf8\x2c\xfa\xb6\x90\x6a\x4c\x8f\x44\x6e\x49\x9a\xcb\x80\xb1\xae\x38\xd3\xd2\x56\x22\x97\x11\x07\xef\x52\x77\x1a\xce\x28\x7e\xb9\x51\xc6\x0e\xa7\x4f\x5c\xea\x21\x80\x03\xf5\x06\x07\x3d\x87\xfd\xb0\xac\x0b\x3a\xb9\x51\x04\x7f\x41\xe1\xe9\x0c\x45\x8d\x70\x32\x2f\x8a\x53\x84\x0c\x37\x0c\x28\xf8\x58\x5b\x24\xc8\x98\x42\x41\x33\x10\x8f\xaf\x5e\x4a\x0c\x00\x6d\x69\x7c\xd4\x05\x24\x3d\x0b\xe5\x2f\x6e\xd4\xb6\xd2\x4b\x08\x85\xde\x6b\x97\x23\xcc\x8a\xc4\xd7\x89\xa3\xf5\x6b\xbc\x93\xf0\x14\x4b\x58\xa8\x51\x8a\x81\x44\xf4\x5f\xad\x82\x0e\x7e\x88\xea\xd3\xa8\x32\xe7\xa6\xaf\x09\xe0\x60\x6b\xd5\x6b\xfd\x2d\x49\x8a\xfc\x2b\x59\xea\x95\xde\x39\x39\x50\xd3\x0c\xde\x42\x0e\xbd\xa9\xbc\xd9\xa6\x13\x93\x70\xe3\x18\xcf\xa5\x0d\x6c\xf4\x20\x21\x24\x30\x29\x21\xc8\x08\x5c\x27\xf8\x8f\xb7\xe9\x4c\x95\xfb\xb4\xff\x13\xbe\xc9\x8c\x4f\x5e\x10\x42\xba\xb2\x9d\xd4\xc3\xf1\x30\x3c\x32\xd7\x60\x9e\xbf\xd3\x43\x83\x66\xbf\x64\x30\x70\xa1\xe2\xce\x5e\x7d\x29\xce\x22\xb8\x08\x18\x3e\x23\x6f\xf9\x03\x4f\x2b\xb5\x96\x87\xe0\x8c\xbb\xc4\x06\x07\x47\xa1\x5b\xcd\xf8\x1f\x9a\x63\xdc\x40\x41\x2b\xda\xdb\x3d\x02\x5b\xb3\x3c\xbb\x36\x4c\x73\x5c\xc1\x6a\x99\x88\x28\xfb\xc8\xf1\x00\x9c\x73\xda\xa2\x15\x05\xd8\x42\xf4\x91\xa3\x75\x8b\x7d\x12\xd3\x94\x56\xae\xd0\xb9\xca\x8f\xec\xd7\x2c\x80\x99\xc9\xa0\x5d\x6a\xb5\xc2\x50\xfb\x0c\x66\x9e\x12\x69\x50\x83\x69\x05\xe3\xb3\x0d\x47\xec\xc3\x47\xf8\x84\x19\x75\xe9\xd1\xdf\x3a\x02\x41\x5c\x27\x8a\xbe\x2b\x47\x98\x00\x4d\x07\x21\xf8\xd6\xd2\x20\x4a\xab\x22\x1b\x61\xff\x1c\x71\xa3\x35\x46\x7f\xbb\x85\x66\x55\x3f\xbf\x6e\x01\x55\xd4\x96\x50\x83\x6b\x7e\x9c\x97\xc2\x97\x47\x23\xc8\xf3\xcd\xd2\x6b\xda\xf0\x17\xf0\x89\x8b\xe3\xb3\xc6\x83\x0e\x50\x8b\xd8\x7d\xd0\x33\x55\x16\x19\x32\xc7\xf6\xbb\x79\x4d\xfc\x42\xac\x91\x62\xbb\xf0\x62\x0c\xdb\x62\xbb\xad\x0e\x6d\x5d\x58\xa0\xf4\xc8\x7e\x69\x8a\xe9\xb0\x2c\x4c\x67\x7a\xdf\xd1\xa6\xd3\xe2\xa9\x0f\x8d\x88\xb7\xa1\xf1\x88\x2a\xfa\x52\xd0\x37\xfc\x50\xe9\x12\xba\x90\xf4\x72\x55\x74\x88\x26\xf4\x23\xde\xa7\xf4\x15\x10\x84\x20\xf7\xfd\x52\x15\x62\x2b\x18\x89\xa4\x6e\xdd\xf3\xc9\x1b\xff\x13\xbe\xeb\x1e\x33\xf2\x4a\xf8\xde\x7b\xd0\xd2\xfa\xd1\x73\x5a\x75\x33\x45\xa0\x78\xe3\x38\x30\x06\x9a\xcf\xca\xa9\xd9\xa2\xe7\x95\xaf\xca\x8b\x96\xa0\xb4\xc9\x3b\x88\x00\xcb\x15\x9a\x37\x71\x2a\x0e\x74\x9c\xbe\x1b\x05\x61\x3f\xda\x7c\x77\xbb\xda\xeb\x28\x7d\xa7\xe3\x6c\x45\xab\x33\xf4\x9f\x36\x44\x6d\x5e\x54\xd6\xcd\xf5\x58\xa2\xb5\x4d\xd8\x82\x8f\x84\x7f\x3c\x53\xbe\x51\xa9\x47\xb1\xb0\x6f\x0f\x38\xb4\x4d\x9f\xb2\x67\x9b\x73\x3f\x4e\x55\x78\xf2\x54\x30\x47\x26\x75\xa1\xa9\xe9\xf9\x24\xaa\x18\xf5\xf9\x41\xe3\xfa\x18\x42\xfe\x4e\xe0\x0a\xd2\x1c\x87\x5e\xf9\x43\x29\xe8\x20\x77\x6e\x3e\x67\xe0\xc4\x33\x5f\xf2\xa7\xe6\xcf\x37\x52\x22\xb3\x37\x86\x40\x16\x5b\xa0\xbf\x39\xdd\xf7\x29\x17\x0b\xeb\x18\xb9\xdf\x9b\x63\x34\x53\xa8\xc2\xa3\xe4\x06\xe2\x18\xb5\x16\xe3\x77\x51\xe3\x97\x65\x2b\x2c\x60\x35\xd5\x50\xac\xe1\x40\x3c\x68\x5d\x5b\x56\xeb\xbf\x34\x0c\x20\x06\xde\xa8\x06\x97\x21\x14\x51\xb6\x86\x89\x4e\xac\x3c\x08\xf0\x97\x14\xf4\x07\xe3\xd4\x29\xe8\xe8\x5f\x41\xbf\xe6\xee\xbc\x2d\xa6\x1f\x1c\x65\xd5\xf4\x06\xae\x76\xfd\xdc\x07\x0e\xef\x79\x3b\x35\x96\x62\x68\x80\xaa\x0e\x97\xb6\xef\x16\xaa\x62\x71\xf0\xf1\x97\x36\x26\xc2\x15\xfb\xb9\x01\x98\x91\xbf\x91\x10\x77\xf2\xb2\xfc\xc8\xec\x5d\x09\x25\x75\xde\x49\xe0\xd5\xa0\x1d\x33\xe4\x6d\xd4\xc7\x68\x5e\x6f\x61\xf4\x3b\xb6\xa9\x69\xa2\xc6\x13\x1d\x22\xe6\xa8\x5c\x2f\x75\x5b\x78\x80\x68\xee\x3a\x41\xf1\xe9\x1a\x4f\xbc\x3b\x91\xc8\xbf\xa9\x04\x35\xe5\xac\xea\x2f\x9a\x52\x60\x8f\xc6\x32\xd5\x5d\xe7\xdf\xb9\x8e\xa4\x95\x7a\x1f\x26\x53\xa3\x81\x4c\x98\x21\xf7\x93\xf8\x59\xa7\x63\x73\xd4\x55\xd8\xb8\xf6\x87\x2a\xc8\xfb\xd9\xd1\x62\xc2\x11\x78\x76\x11\x46\x82\xa4\x3b\xaa\x79\x4c\xe4\xb8\x4e\x49\x8c\xcc\x91\x7a\x4b\x3c\x27\xee\xe8\x0e\x4a\x13\x0d\xad\xbb\xfd\x9e\xaf\x0e\xcd\x50\x51\x09\x89\x4b\xcc\xda\xa0\x5f\x96\x1c\x86\xff\x9a\x06\x79\xff\x2b\xd3\xe0\xdf\xf6\xc9\xa0\x45\x56\xa8\x0b\xb8\x81\x9d\xa3\xac\x0c\x7e\x3d\x0d\x9e\x73\xf4\xe1\x1d\xf3\xc3\x42\x1c\x75\x7e\x38\xe4\xbd\x2c\xc4\x58\xe7\xf4\x13\xda\x59\x93\x8b\x8e\xb0\x6c\xad\x09\xa7\x25\x84\xb4\x25\xe0\xc4\x44\xd4\xe7\x4e\x36\x37\x1c\xda\x7e\x24\xbb\x9c\xd0\x62\xd1\x3c\xcd\xcc\x55\x7b\x16\xcb\x84\x9e\x85\x07\xc8\x3d\x81\x62\xb5\x93\x47\x2f\xa3\xdf\xff\xe2\x90\x59\x50\xc6\x36\x75\xea\x53\xea\x01\x60\x12\x99\xea\xad\x4d\x84\x4c\xba\x6f\x9b\xb8\x9c\x21\x67\x80\xbf\x41\xd5\x88\x0f\xc5\xb6\xd2\x18\xf7\x86\x15\xf2\xd3\xbe\x28\xaf\xf4\xf3\x19\xed\x35\x52\x26\x3c\x17\xb6\xea\xb3\x82\x1a\xe6\xd7\x0f\xe7\x5f\x42\x50\x10\x36\x69\x14\x2a\x4e\x52\xe5\x81\x8d\xd9\x58\x7e\xec\x71\xa2\xce\x98\x91\xd5\x48\x36\xbb\xea\x5d\x5e\x20\x27\x6b\x9b\xb2\xc0\x37\x58\xe4\x5d\x6b\xb9\x72\x7a\xca\x58\x73\x97\x15\x0b\x1f\x9a\x23\x29\xb9\xdf\xe8\x60\xad\x11\x69\xcf\x62\xb7\xb8\xa0\x89\x68\xe1\xe5\xe2\x14\x8c\x1e\x80\xf3\xfa\x9d\x64\x30\xd7\x98\x12\x09\x36\x59\x1c\xb5\x87\x34\x17\x2e\x66\x57\x6a\x9d\xf4\x3e\xc7\x4c\x78\x14\x04\x72\xa2\xd9\xd9\xd1\xa2\x45\xfb\xee\xb7\x73\x55\x5c\xdd\x5d\x43\x39\x81\x1b\xb7\xbf\x52\x31\x4b\x49\x33\xdb\xa0\x69\x4e\x78\x7b\xee\x12\x83\x53\x0f\x5c\x23\x5d\xe1\xc9\xd2\xa6\x9c\xc6\x5d\x66\x0a\x06\xc2\x91\x62\x66\xe2\x1e\xf2\x6d\x53\xe8\x45\x5a\x64\x99\x7a\xb9\x0f\xf7\xa8\x72\x1a\xe8\x2e\xa5\x91\xda\x6a\x61\x17\xec\x4b\x7f\x0a\x3e\x88\x9c\x28\x9c\xc7\x14\x22\x15\xfc\xca\xa5\xc1\xb4\xf3\xf5\x73\x95\xb0\xba\x48\x91\xbb\x14\x60\x5c\xbc\x82\x7c\x4c\x1b\x98\xfb\x6e\x4d\xc5\x9e\x61\x4a\xe1\x0d\xfd\x1a\xac\x1e\xb8\x23\x57\x08\x9c\xdb\xa8\x84\x77\xce\x51\x68\xbe\xf6\x97\xe0\xe9\x3e\xa6\xb7\xc0\x6b\xc8\x51\x81\x8c\x2c\xdf\xe4\x40\xfa\x4a\xf5\xb7\x15\xc9\xa6\xbb\x87\xb7\x19\xeb\xd8\x88\xe9\x69\x41\x3d\xdb\x13\xe7\x90\xa4\x1e\x85\x8b\x4d\x92\x1b\x81\xc6\xba\x80\xb1\x2d\xba\x3b\x7d\x22\xa6\x2a\x34\x48\x3a\xf8\xd5\x9c\xfe\x8a\x6e\x9c\x19\x9e\x36\x60\x96\x14\x55\x39\x7c\x9d\x8d\x88\x49\xbc\x8e\x49\xaf\x4d\x35\xf3\x28\x8e\xc1\xb1\x7c\x2c\x7b\x57\xaf\x4a\xab\x44\xdf\xd0\x0b\x96\x03\x14\x12\x14\xc6\x8e\xa6\x60\x8a\x0c\x7f\x3e\xbc\xb9\x67\x24\x86\xd0\x72\xdd\x2f\x8b\x82\xed\x9a\xcc\xcd\x44\x5b\x4e\x44\x0c\x0e\xa9\xe0\x8f\x41\x53\xe9\x24\xf7\x0b\x97\xf5\xca\x8f\x7c\x34\x78\x3f\x91\xad\x24\x17\x8c\x4d\x59\xb7\x6b\x90\xe4\xea\x06\xa6\x34\x13\x10\xb0\x69\x95\x88\x61\x54\x72\x97\xaa\x11\x89\x7f\xb9\xa2\x3f\xa2\x01\xab\x71\xdb\x25\xd9\x5f\xd1\x8d\xa4\x52\xe9\x3d\x68\x44\x13\xb8\xb6\xea\x5e\xdc\xa3\x31\x07\xaf\x1d\xe5\x1a\x47\xc2\x71\xc9\x28\x60\xc0\x2b\x05\x27\xf2\x91\xc0\x97\x47\x3b\x2c\x0f\xb4\xa1\xc1\xcc\x9d\x97\x7a\xf7\x8b\xd1\x87\x9e\x4d\x12\xcc\x9f\x62\xae\x1c\x68\xdb\xe2\xca\x58\xc4\xf1\xf9\x2b\xd3\x0b\x77\xe9\xaa\x19\x37\xce\x02\xf0\x6f\x6f\x03\x3c\x91\x89\x60\x2c\x15\x7e\x72\x50\xe4\xe7\xa2\x76\x16\x91\x07\x1a\x6a\x8b\x1a\x9c\x12\x86\x95\xa1\xbd\xd1\x92\x69\x85\xd5\xa0\xdc\x69\xeb\x26\x86\x0d\xa4\x92\x09\x83\x3a\x66\x8b\x0a\x46\x96\x03\xde\xe8\x18\xf3\x97\x3e\x11\x58\x38\x91\x32\x9a\x1e\xf0\x7a\x3c\xb5\x77\xb1\x49\xa3\x54\xe0\xdc\x6c\xf5\x89\x78\x97\xd1\x16\x4d\xa9\xae\xdd\x89\x57\x63\x24\xd7\x3a\x5d\xf2\xd4\x16\x71\x50\xa3\x5f\xcf\x26\x0a\xb3\xe1\x9b\x02\x24\x54\xd0\xe5\xe5\xfb\xef\x19\x5e\x28\xa6\xca\x1c\xfc\x92\x10\xf2\x4c\xad\x50\x61\x57\x80\xc7\xef\xbf\xb3\x48\x24\xc6\xac\xb9\xc4\x2c\x2d\xae\x03\xd2\xd8\x2e\xba\x70\x57\xf3\x8e\x1d\x71\xaf\xe8\xe0\x13\x14\x49\xb5\xdf\x74\x0d\x24\xbb\x27\xae\x76\x5e\xa2\x70\xe4\x83\x7d\xde\x5a\xbf\x41\x64\x03\x79\x44\x5f\x6a\xa6\x10\x5d\x0c\xf8\xf1\x15\xee\xd1\x6d\x48\xa5\x63\x36\xc8\x40\x44\x40\xca\xf0\x63\x1b\xb8\x24\x51\x8b\x2b\x7a\x0d\xde\x36\x98\x8e\x76\x56\xd9\xa8\xf0\xf2\x26\x33\xce\x87\x44\x6c\xc5\xa5\x9e\x1a\x6c\xaf\xf4\x63\x60\x6e\x30\x94\xf4\x58\xaa\x06\x60\xe9\x9e\xa3\xca\x19\x81\xa5\xda\x03\xca\xa6\xa3\x96\x94\x05\xf9\xbc\xb5\x8e\x03\xeb\x5f\x29\x30\x67\x78\xf1\x48\x5d\x38\x0e\x7b\x2b\xec\x58\xd8\x44\x24\x5e\x5d\x8f\xa3\x25\x76\x2e\x46\xd3\x5e\xa2\xc9\xd8\xc7\xf0\x02\xe2\xcd\x73\xb1\x0c\x5a\x12\x01\x3e\x48\x27\x22\x42\xef\x54\x68\xc2\x6c\x8b\xb8\x01\xc7\xce\x31\x73\x75\x13\x62\xbf\x6b\x78\xda\xa6\xa9\x67\x4d\x6e\xdc\x4f\xca\xcc\xa9\x85\xda\x3a\xf4\x09\xde\xa0\xc9\x45\x93\xcf\xd9\xa0\x88\x09\x4a\x48\xa7\x78\xf7\xa6\xf4\x82\x7f\xc7\x42\x1d\xd8\xfc\x82\x77\x66\x3b\xef\x19\x73\xc5\x5c\xf7\x9e\xba\xea\x57\xfb\xb5\xce\x76\x12\xfc\xa5\xa9\x02\xe6\xb0\x9e\xf2\x8d\x27\xa7\x7b\x1f\x63\xe6\xac\xdf\xb3\x25\x82\x51\xd5\xb7\x6b\xef\x53\xfa\x69\x52\xa0\xf6\xd5\x8a\x72\xea\x38\x64\x1b\x63\x24\x6c\x18\xa7\xcb\xb8\xa7\x69\x88\xb1\xae\x89\x57\x94\xec\xf6\x36\x4b\x32\x71\x04\x53\x7f\x24\x55\x11\x48\x6c\x30\x76\x79\xa2\xee\xa1\xfe\xf6\x88\x37\x6e\x21\xea\x4d\x6c\xc8\x20\x15\x00\xcc\xdb\xf5\xfe\xd2\x51\xf4\x4c\x25\x3a\xb4\xb6\x69\xd0\x84\xa9\x7d\x6e\x7d\x82\xf9\xf7\x8c\x01\xd5\x19\x83\x4e\x65\xcf\xf6\x4c\xe5\x0f\x14\xea\x79\x48\x30\xb4\x98\xf0\xad\xe3\x4d\x02\x42\x9f\x39\x28\xbb\x0d\x71\x19\xb0\xa9\xe1\x26\xb6\x45\x96\xa9\xb7\x67\x81\x53\xdc\xc4\x46\xc8\xdb\xa6\x0a\x7a\xeb\x57\x6f\x6c\xe9\x36\x11\xc0\x41\xfd\x9b\x2c\x2a\x83\x97\x4e\x23\xd9\xd3\xfb\x1f\x81\x28\x19\xab\xb9\x83\x3c\x23\x1e\x1e\xed\x6d\x38\xa6\x6c\x61\x88\x11\xf0\x18\x2b\x11\xb7\x3c\x8a\x96\x67\xd7\x35\x2b\x76\x11\x84\x6e\x5f\x72\x39\x5f\xc6\xe0\xc9\x2f\x52\xa8\xb5\x84\xe0\x3c\xd1\xc4\x8a\x52\x59\x9e\x3b\x4d\x40\x70\xd1\x1c\x71\x65\x5a\x81\x12\x13\x89\xa2\x7e\x4a\x7a\xc4\xb3\x41\x44\x79\x57\x37\x1e\x55\x2e\xe2\xca\x62\x79\x23\x66\xc5\x5d\xc5\xdc\x50\xc2\x44\x01\x20\xcf\x16\xe9\xe8\xf6\x7a\xef\x2d\x80\x38\x07\x7c\xb0\x0d\x16\x5a\x06\xaf\x63\x3b\x94\xe9\xd9\xb7\x55\x96\x80\x04\xc8\x12\x4a\xc9\x79\x6b\x23\xab\xcc\x80\xee\xd0\x08\x71\xa9\x0f\x8e\x9d\xa0\xee\xdc\xb9\x5c\x63\x48\xdb\x92\xaa\x58\xe6\xd8\x2c\x9f\xe4\x7b\xb0\x8d\xe0\xba\x80\x0e\x77\x61\x5c\x47\x7e\xc0\x1d\x9f\xbf\xa7\xd2\x7d\x89\x9f\x3f\x3f\x9a\x1a\x6e\xde\xa4\xb4\x12\xc1\x0d\x11\xd7\x4d\x79\x04\xaf\x4e\xf4\x4d\x3d\x8a\x6a\xa7\x8f\x58\x39\xf3\x98\x30\xff\x3c\x70\xd8\x1a\x0b\xc7\x28\xfe\xa3\x46\x3e\x50\x64\x70\x98\x3a\xaf\xbb\xab\x95\x3a\x6c\x22\xc6\x74\x97\xe8\xf8\xa3\x6f\xd9\x9f\x21\xce\x39\x6a\x74\x90\xb7\xb5\x47\xe0\x08\xcd\x3c\x4e\x09\x60\x2f\xfb\x21\xae\x65\x48\x60\xad\xca\x8e\xe8\xba\xe6\xab\x67\x7a\x72\x8d\x55\xfc\x20\x9a\xe5\x70\xcf\x2c\xe8\xda\x6f\x5f\xba\x85\x32\x8e\x9f\x2c\xd1\x7d\x5e\xa1\xf1\xdc\x4b\xda\x5b\xf1\x44\x72\xe9\xe6\x72\xd6\x50\xc5\x4d\x85\x82\x5d\x3e\xbf\x83\x49\x39\x9b\xcb\x14\xbe\x8f\x12\x85\xdb\xa3\x21\x05\x43\x2b\x03\xeb\xff\x51\xda\xb5\x86\x5c\xbd\xeb\xdd\x53\x05\xe5\x6f\xc1\x56\x4c\x32\x45\x13\x27\xaa\x31\x9e\x8d\x8c\xa2\x6b\x2d\x9c\x4e\x21\xd3\xe0\x0d\x52\x30\xd9\xbb\xac\xa9\x6d\x40\x8b\xb7\x78\x3c\x2f\xee\x6a\xc1\x80\x59\xa2\xe5\x98\x18\x02\x59\x8a\x6e\xb1\xc8\x7e\x66\x4b\x44\x3b\xf9\x58\x4b\x6a\x7d\x30\xee\xc2\xb8\x97\x4b\x3f\xc1\xb5\xed\xba\x8e\xf7\x42\x43\xc7\x70\x43\x3d\xa0\xa7\xc1\xac\x00\x79\xc6\x4a\xfa\xf3\xab\x3c\x79\xfe\x5e\x89\x93\x07\x7d\x1c\x94\xba\xf2\x3a\x80\x8d\xab\xba\xb4\x95\x4f\x8a\x43\xdc\x2d\x35\xa6\xf8\xcc\x4a\x1a\x5d\xf6\x87\xe5\x55\x8f\x5c\x10\xfb\x3c\xa4\x9b\x78\x57\x55\xdf\xcb\xdb\xec\x53\x19\xed\x79\x58\xf8\x82\x49\x0b\x64\xad\x6b\xe4\x02\xce\x4c\xa0\x09\x21\x9c\x0d\xa5\x79\xec\xa3\xa2\x6f\x8b\x07\x7a\xe9\x90\x36\x3d\x8e\x19\xd9\xc5\xf7\xb1\x0a\x50\x1c\x40\x51\x59\x82\xe3\x36\xab\x55\xd7\xf6\xb5\x46\xc4\xd2\xf6\x92\x8b\x4a\x42\xc2\x69\x8e\xab\xc4\x70\x4b\xf3\xcb\x8c\xda\x3b\x16\x3c\x8e\xab\xcd\x4c\x09\xe4\x26\xeb\xe8\x93\x22\xe5\x8c\x2e\xc6\x06\x2d\xf0\x94\x2e\xa2\x77\x70\xe0\x2e\x78\x66\xdd\x94\x61\xa8\xbc\x78\x64\x46\x99\xb8\x9e\xf6\x00\x5f\x1a\xa1\xd5\x6c\x93\x3e\xc9\x82\x59\x80\xe0\x7e\x7b\xd1\x3d\x58\xa4\x6e\xa0\x86\x03\x80\x2e\x51\xfd\x69\x60\xf7\x40\xa0\x5b\x73\x83\xb6\xfe\x6c\x03\x08\xd9\x08\x38\x50\x85\xb4\x05\x8c\x6f\x4a\xb4\xb8\xf6\x3b\x03\xea\x6b\xf2\x22\x67\xa9\x7c\xae\x3c\x1a\x11\x88\xa7\x32\xa3\x86\x02\x7e\xb0\x1a\x6f\xcd\x2f\xc6\x84\xce\x2f\x97\x14\x4e\xe8\xfe\x35\x4c\x02\xd4\xb0\xc0\x56\x3f\x04\x68\x96\x65\xff\x4c\x96\x36\x80\x6d\xe0\x31\xde\x14\xf7\x25\xb0\x1a\xc7\x2e\x10\x81\x59\x54\xb8\x0f\x0d\x44\x32\x07\xbd\x97\x28\xf3\xbb\xf8\xad\x60\xaf\xab\x88\xd5\x6f\x67\x1f\xaa\x5e\x6c\x35\xd2\xd4\x52\xf3\x1a\x48\xc7\x91\xcd\x25\x5e\x9b\xbd\x84\xc4\xe7\x7a\x71\x9a\x40\x78\x6b\x5c\x40\x76\xd7\xef\x3d\x3b\x0a\xfd\x5e\xea\xb5\x76\x0d\x7a\xde\x28\xf9\xb8\xd4\xab\x6c\xc4\xa0\xbb\x78\xa9\xe8\xaa\x62\x73\x01\xd9\xbf\x82\xaf\x1a\x90\x3e\xa0\xac\x1f\x75\xc7\xef\x12\x14\x30\x04\x69\x60\x75\xac\x37\x36\x40\x5b\x44\xd2\xf5\x62\xb8\x47\x1b\x78\x2f\x1a\x54\xcb\x64\xdf\x31\xcb\x09\xa2\x5f\x3c\xcf\x12\xa3\xed\xbd\xca\x79\x8b\xfa\xf8\x4d\xb2\x03\xba\x2a\x8e\x0b\x21\x6c\x38\x1f\x8d\x20\xec\xa3\xc6\x64\xbc\xf7\x5e\xa3\x78\x3d\xd6\x7b\x7e\xe2\x8b\xb8\x85\xa1\x61\x5f\x87\x0f\xc6\x98\xf9\x13\x2e\xbf\x1a\x52\xae\xfe\x62\x26\xc6\x6e\x60\xf8\xa0\xa3\xb8\x99\x4d\xc0\x80\xe9\x5c\xd8\xdd\x1b\xe5\x43\xf7\x5b\xc8\x88\xd2\x4f\x94\xa5\xe9\x1b\x37\x49\x09\x0a\xdf\xc5\x80\x90\xa3\xf6\x55\xda\xf1\x2f\x89\x62\x3d\xd2\x46\x68\xc2\x4f\x0f\xe7\xb1\xf9\x65\xc2\x4a\x33\x76\x53\x5b\x58\x72\x20\xec\x86\x4e\xad\xfa\xc8\x8a\x79\x35\x7f\x88\x30\x94\x27\xad\x43\xf0\xb8\xec\xf8\x9c\x5b\x64\xc4\xb2\xba\x5b\xbd\x7f\xc9\x48\x44\x34\xe2\x50\xbc\xf9\x98\xc9\xfe\xa4\x4c\x31\x32\x6c\x56\x86\x81\xb8\x9e\x22\xf5\x6e\xf2\x82\x3e\x3c\x15\xfb\x91\x33\xe9\xe4\x9c\xae\x4b\x1f\x8b\x29\x27\xca\x0c\x1a\x43\x01\x11\x85\x1d\xaa\x4a\xa4\x40\xe4\xaa\xa1\xb6\xc6\x56\x4c\x0c\x2f\x91\x00\x46\xc4\xce\x7e\xc0\xa0\x35\x05\xb7\x0d\xf3\x03\xf1\x64\x9d\x77\xaf\xb5\xab\xd5\x95\x98\xb2\x1b\x86\xd5\x85\xe4\xdd\x8f\x47\xcd\x08\xcb\xe4\x12\x56\x21\x70\x69\xc7\x6d\x23\x6b\xab\xc1\x5a\x7b\x18\x0b\x95\xb9\xa1\x43\x1e\x88\x8f\x08\xec\xde\xcb\xa9\x00\x92\xfc\x20\x4b\xb6\xb9\x67\xa3\xbd\x2f\xc6\xfa\xfb\xd7\x19\x8c\x25\x1e\x57\xdb\x01\x51\x47\x9a\x30\x36\x1a\x24\x9a\x54\xca\x45\x20\x68\xd0\xa4\x28\xb7\x5d\xb0\x2b\xb5\x90\x2e\xbd\x22\x5e\x7f\x54\xbc\xdd\xbf\x0d\x51\xfc\xde\x02\x6b\xf4\x3a\x8d\x7c\xd4\x36\x7e\x8e\x91\x04\x14\x74\x38\x04\xe3\x33\xaf\x2d\xc6\x5e\x4f\xea\x05\xef\x24\xf1\xa6\x0f\x54\x8e\x6f\x5f\x8d\x87\x42\x1d\xfb\x11\x70\x17\x22\x5f\x45\x7f\x9e\x2c\x26\xcd\x73\xf9\x39\x5d\x8e\x91\x04\xf6\x3a\x6a\xec\xc9\x2c\xe4\xf6\xe1\x42\xe6\x94\x3b\x71\xc6\xb1\x46\xac\xed\x31\x75\x20\x12\x20\x0c\x36\xe3\x9d\x76\x30\x5b\x89\xbe\x26\x9e\x63\x8f\x43\x12\xde\x9c\xc2\xc3\x52\x43\x85\x1d\xad\xb7\xfd\x5c\x48\xf8\x15\x0c\xd3\xf9\x3d\xc7\x8a\x21\x78\xfb\x2c\x83\x14\xb0\xb6\x16\x93\xaf\x9a\x16\x00\x09\x25\x29\xcd\x60\x53\x98\x6e\x53\xb2\x47\x1b\xc7\x46\x37\x1c\xe1\x12\x92\x76\x7d\xfd\x27\xb2\xf3\xaf\x4b\xaa\x38\xa4\x90\xee\xce\x93\x39\xaf\xbf\x95\x73\x50\x26\x45\xec\xf6\xa1\x9e\xf8\xb0\x3f\xde\x3b\xbb\x43\x2a\x84\x2c\x04\xc0\x36\xa9\x93\xf1\x7f\x27\x35\x13\x14\xbe\x06\xfc\x06\x6f\x3e\x9b\xef\x8f\x36\x2a\x87\x53\xe3\x8e\xc0\xd3\x2d\x0b\xc1\xff\x53\x89\x65\x3b\xa7\x56\x4f\xde\x63\xca\x93\x97\x11\xff\x8f\x2c\x95\x77\xac\xf8\x0c\x26\xe2\x0b\x1a\x06\xca\xb3\x9d\x5b\x3a\x3e\x24\x3a\x3f\x0c\xc4\x08\x52\x85\x25\xe5\xda\xc8\x81\x4a\xeb\x5e\xa0\x80\xdc\x40\xe8\x3b\xd8\x72\xb3\x69\xa9\xc9\xbd\x1c\x68\xb1\x8e\xa1\xf6\xa3\x5c\xef\x25\xf7\xe2\xf5\x3c\x29\xa7\xee\x54\x64\x85\x0e\xe4\xce\xd3\x78\x69\xae\x09\x79\x71\x35\x43\xee\xe3\x10\x33\xe6\x8e\x28\x35\xf4\xae\xca\xcd\x9a\x45\x6a\x35\x4b\xe9\xb1\xa2\x66\xf7\x7b\x95\xe7\xbe\x48\x7c\xb4\xbf\x73\x84\x5f\xae\x45\x90\x35\x76\x52\xb8\x32\x3c\xc1\x49\x62\x48\xda\xa8\x84\x18\xc0\xf9\xeb\x1a\x75\xdc\xf8\x0b\x0c\x44\xb6\x8b\xfc\xba\x4a\x53\xcb\xb5\xff\x20\xfa\x55\x45\xca\x38\x5c\x90\x3c\xb2\x9c\xe2\xc5\x41\x6e\x50\xa8\x3f\xf8\x03\xb0\x12\x47\xe1\x8d\xd6\x73\xc1\x70\xa4\x35\xdf\x31\xef\x09\x63\x4a\x39\x69\x66\x1f\x02\xd2\x5e\x59\x90\x8f\x03\x56\x4d\x7e\x19\x7f\x06\xc5\xe6\x48\x61\x78\x42\xe5\x85\x2c\x18\x6a\x68\x2d\xb5\x46\xa2\x3b\x0a\x2d\xdf\x33\x98\xb5\x90\x4c\xf8\xfb\x92\xc4\x85\xc7\x59\x0f\xbd\xb4\xc3\x80\x0f\x69\x91\x37\xe1\xf1\x9f\x5c\x57\x17\x83\xf8\x4b\x10\x6c\x51\xef\xa7\x26\xce\x6f\x7a\x70\x2a\x7c\xf0\x3d\x19\xa1\xa6\x7f\x7b\x60\xd4\xbb\x79\x86\x64\x51\x1f\x05\x16\xff\x31\x99\x56\x8f\xc6\x2e\x6a\x01\xbe\x38\xd1\x84\xa2\xd6\xe0\x73\x61\xdb\x63\xd4\x0e\x4e\xb5\xa1\xd6\x17\x55\x7f\xf7\x31\x96\x24\xb3\x9d\x76\x97\x62\xd1\x0e\x0e\x27\x53\x45\x6a\x83\xfd\xa4\x39\x71\x87\x4f\x5e\x12\xe7\x20\x10\x2c\x32\xb7\xce\xee\x6a\x0d\xf2\x2b\xdb\x57\x0f\xe1\x60\xbf\x9e\x90\x56\xb4\xed\x79\x70\xad\x7a\x7e\x57\xe7\x4e\x39\xa4\x9d\xb1\x18\x30\xe9\x84\xdf\xfd\xf8\x2a\x45\x37\x61\xb4\x97\x43\x9b\x88\x3e\x2d\x84\x67\x2c\x04\xc5\x27\xbe\x15\x95\x9d\x25\x59\xb2\xb3\x96\x03\x48\x8c\x7b\xc8\xe4\xd6\x81\xd3\x56\x75\xff\xd2\xf5\xc5\x2b\x15\x1c\x25\x9f\x8f\xca\x03\xfb\x42\x12\xca\xc6\x2c\x01\xd8\x51\x18\xc7\x76\x12\xc5\xfb\xee\x19\x92\x25\xf1\xf0\x28\xf6\xf2\x62\x85\x58\xd4\xd5\xc9\x3d\x86\x50\x9b\xc5\x49\xee\x8a\xdc\x7e\x0c\x73\x1b\xd8\x0b\xdb\x43\x7a\x1e\x99\x7a\x4d\x27\xf8\x0c\xf1\x13\x4c\xe1\xa4\xfd\x01\xef\xfc\xcd\x49\x48\x80\x9f\x3e\xdc\xda\xfb\x3f\x28\x57\x8e\x5a\x5c\x68\xeb\x71\x0c\x9c\xac\xa4\x6b\x0c\x33\xa1\x19\xa3\x66\xe6\x72\x5b\x18\xdb\x31\x9d\x08\x18\x5a\xad\x4b\x6f\xb3\x69\xd4\x93\x7d\xa1\x80\x07\x0d\xbc\xbd\xc6\x36\x51\x59\xbe\xe7\x14\x79\xae\x51\x7a\x53\x2b\x07\xca\xe5\x28\x60\x6b\xb6\x20\x03\x04\x17\x41\x8e\x0b\x1a\x0d\x7f\xa2\xee\x86\x1f\xb7\xe5\x6f\x7a\x37\x0a\x7d\x44\xb7\x15\xe8\xee\x6d\x9d\x6b\x11\xb5\x72\x66\x72\x4e\x84\x11\xc6\x35\x75\x20\xbe\x90\x63\x42\x4a\x89\x6f\x23\x2e\xcb\x03\x2a\x78\x8f\x44\x64\x29\x9c\x92\x37\x54\xc4\x40\x1c\x46\x8c\x1a\xf8\x14\x12\x05\x4d\xce\x70\xbf\x86\x90\xba\xf2\x43\x9b\xe4\x6f\x61\x9b\xa0\xe3\x6e\x93\xe1\x49\xcb\xe7\x52\x59\x77\xd8\x9a\x91\x91\x8f\x53\x51\xfa\x28\x40\xf7\xcd\x49\x1e\x66\x9b\xa9\x53\xba\xdb\xca\x5d\x85\x8c\xa3\x49\x92\x44\x23\x93\xa2\x68\x49\xaa\xde\xa2\x1b\x07\xf7\x78\x5b\xa5\xb1\x54\x29\xdb\x6f\x49\x97\x62\x87\x10\x5a\xe2\xe9\x6e\xac\xa7\xca\x52\x23\xc6\x48\x46\x0c\x21\xc9\xfe\xfc\xeb\x6a\x60\x25\x4a\x80\xea\x4b\xb8\x7d\x71\xc6\x14\x57\xa8\x82\x95\x36\xf6\xfa\xbe\x6a\x9b\xfc\x6e\x21\x84\x5a\x0f\xf7\x13\x1b\xde\x82\x42\x9c\xe7\xa9\x11\x23\x7d\x24\xab\xc8\xf0\x2d\x74\x1c\x2f\x5c\x6d\x3b\x9b\xde\x87\x39\x2b\x05\x8e\xf9\x5e\x94\x99\x79\x5a\x22\xa0\xfb\xa3\x74\x6d\x39\xc1\xac\x99\x6b\x93\x96\x74\x1e\xfd\xcf\x99\x20\xa9\x6c\x91\x04\xac\xd0\x8f\xc5\x8f\xdf\x6c\x30\xa1\x28\x3c\xd6\x0d\xd8\xbe\x81\xf9\x79\x8c\x16\x15\x23\x10\x1d\xb6\x82\x6b\xe7\x13\xd0\x75\xae\x06\x6f\x7e\x38\x32\x87\xcf\x40\xae\xf3\x12\xc6\x61\xc2\xd6\x41\x0f\x90\xb8\xed\xc5\xb9\xe2\x41\xe1\x71\x87\x11\x50\x84\x76\x04\x17\x05\x5c\x3e\x75\x02\x53\x45\xd8\xd6\xf1\x12\x0a\xa4\x58\x9b\xc7\x7c\x02\x55\xbe\xee\x86\xb2\x02\xf9\x05\x4f\xbf\x43\xd2\x41\xb5\x5e\x6f\x3d\x85\x7f\x3e\x17\x73\x87\x82\xbd\x55\x93\xc7\x7c\xf3\x2b\x8e\x8c\x3a\xc5\x5d\x64\x4a\x4c\x8e\xe5\x35\x18\x43\x94\xdd\xbe\x82\xa4\x40\x6f\xaf\x77\xf6\xf6\xbb\x10\x63\x6e\x6a\x65\xad\xa7\x65\x3e\xc8\x65\xe7\x53\x8d\x7a\xc3\x05\x5b\x38\x52\x87\x98\xc1\x42\xc9\x39\x9c\xdc\xc4\x86\x3c\x10\xe8\x02\x0a\x60\x97\x01\xb5\xf1\x34\x4c\x98\x21\xcc\x94\x7e\xa3\x60\xb3\xd0\xd8\xad\x27\x53\x42\x33\xf4\x23\x53\xf7\xab\x3d\x58\xfc\x6a\x07\x8c\x9b\xc8\xaa\x0f\xe8\x89\x40\xc4\xa5\x86\xdb\x78\x6d\x57\x71\x16\x1e\x9a\x1f\x74\x15\x24\x97\x80\x30\x19\x05\x62\x5e\x1a\x0d\x4e\x62\xd8\x7c\x29\x32\xf2\x5d\xc9\xb6\x62\x63\x8b\x37\xdf\x84\x44\x95\x72\x5f\xa2\xca\x51\x8f\xa2\xff\x5d\xe2\x13\x66\xd2\x05\x19\x60\x4d\x3c\xa5\x0f\x89\x35\xcc\x64\x8f\xdc\xd8\x07\x17\x25\xe4\x8d\xc0\xb9\x8f\xdb\xd7\x15\x9f\x75\x68\x67\x1a\xb3\xfe\x8b\xc6\xc6\xa8\x8b\xe3\x7b\xe5\xc8\xce\xab\x85\x4c\xbc\x34\x07\x7f\x57\x0d\x66\xf6\x8f\xe7\x77\x02\x75\x55\x85\x44\xbd\xe6\x4d\xd6\x43\x4d\xac\x50\xe1\x7e\x39\x8d\xe0\x7b\xe3\x08\x00\x0c\x0d\xaa\x59\xc1\x56\xd0\x8c\x2c\x0b\x41\x04\x13\x85\x2f\x50\xfe\x73\x43\x34\x22\x45\xe3\xa7\x8f\xd6\x50\x3c\x62\x8e\x45\x3c\x02\xf7\xdb\x37\x51\x1f\xf1\x67\x78\x40\x28\xb8\x28\x19\xbf\xfb\x95\x96\xdc\xf5\xd0\x2c\x7f\x0b\x3a\xa1\x3c\x01\x33\xe8\x80\xda\x2e\xf3\x2f\x21\x4f\xca\xfa\xf7\xed\xe0\xc4\xbc\xf6\x26\x9e\x17\x3a\x5f\x18\xd6\xfd\x50\x2d\x13\xe7\xd2\x0e\x48\xd9\xda\x6a\x86\x2c\xbd\x47\x9b\x08\x8b\x06\xf5\xd6\xbe\x32\x83\xed\xe3\xcd\xa6\xb6\x03\xf1\x3c\x63\xf8\xed\xd1\x46\xb2\x61\x80\x46\xcc\x11\x3a\xf6\x28\x71\x95\x16\x07\xad\x41\xbe\x0e\xd9\x6d\x0d\x88\x40\x52\x12\x53\xab\xda\xd8\x3e\x20\x72\x52\x93\xb0\x17\x99\xae\xd2\xf8\x22\xba\x65\x93\xee\xa3\x68\x9d\x5a\xbe\xb2\xa6\x89\x6b\xe6\x26\x63\xc0\x28\xfe\xf6\x82\x49\xed\x98\x5e\xcc\xd8\x1d\x9d\x2f\x6c\x06\xb8\xc6\xc9\x39\x1d\x29\xa4\x69\xd2\xf4\xe2\xda\xf8\x84\x02\xb5\x5a\xb4\x49\x39\x8d\x48\xb5\x36\x1e\x31\x68\x2f\xad\x44\x0e\x28\x86\xad\x03\x28\x79\x20\x74\xad\x6f\x67\x93\xdf\xd0\xbf\x7e\x93\x40\x77\x19\x5a\x79\x57\x3c\xf2\x6b\x5a\xd2\x4a\xb3\xe1\xe5\x28\x8a\x04\xf9\x85\x8e\x48\xb0\x5e\xe0\xa4\x8f\xc8\xeb\x34\x11\xae\x8f\x41\x59\x43\x8a\xc9\x38\x30\x57\x1d\xb4\x7f\xfd\x8b\x1a\x0d\xcb\xb8\x47\xd2\xd9\xc6\x02\x89\x3d\xc3\x81\x9a\x51\x6a\x04\x9b\x43\xf6\x5e\x13\x09\x28\xc1\xec\xad\x57\xaa\xc6\xba\x5a\x16\x6e\x93\x07\x24\x1a\xb2\x71\x5d\x87\x95\xd5\x10\x1f\xe9\xa2\xa4\x2e\xc6\x80\x53\xf3\x71\xaa\x7b\x0c\xf8\x65\xfd\x8d\xa1\xd3\xed\x5b\x10\x4e\x37\x26\x4b\xa9\xe0\xba\x2b\x89\xed\x28\xde\xc7\x70\x32\xcb\x12\x31\x9a\xf9\x0c\xca\x36\x61\x48\x6d\xab\x38\xde\x1f\xd7\xe1\x55\xd8\x31\x52\x39\x12\xfa\xa8\x4f\x4d\x1e\x8f\x5a\x12\x47\x63\xb3\xaf\x0a\x16\x64\x6f\x9b\x8a\xbf\x6a\x6e\x37\xfe\xca\xc1\x3d\x29\x7a\xeb\xb6\x1c\x62\x5f\x8a\x0e\x1a\x55\x3f\x5c\xe3\xfe\xd5\x62\x84\x71\xfd\x34\xb2\xde\x2b\x1a\x72\x28\x4e\xa4\xe5\xbe\x47\xb7\xf8\x33\x85\x1c\x17\x6e\xf6\x74\xfe\x70\x68\x68\xe2\xe8\xbb\x2f\x31\x96\xec\xc1\xbf\xfa\xee\x62\x0e\x4e\x5f\x39\x38\x27\x54\x1b\x83\x2a\x9e\x24\x24\x7b\x25\x71\xe7\xc2\x91\xfd\xf6\xd7\x4b\xb6\x8a\xc6\x73\x0e\x2d\xe4\x39\x37\x10\x5e\x2b\xbc\x7b\x2b\x93\xc6\x40\x2f\x90\x6e\x56\x41\x1a\xa4\xc6\x4e\xb1\xa2\xd7\xfd\xb9\x44\x8f\xef\xa2\x87\xa7\x49\x31\x67\x3e\x72\x5e\x04\x92\x1b\x0b\x9a\x17\x57\xd4\xd7\x61\x74\x06\x21\x74\x08\xd5\x8c\xa6\x4e\xdc\x53\x64\x5e\x8f\x27\x11\x5a\xc0\xa4\x9b\x9d\x51\xac\x76\xb7\xcc\x70\x8c\xee\x2f\x07\x9b\x34\xbd\xad\x3a\x57\x1c\xd7\x62\xaa\x75\x10\x85\x8e\x3d\x3b\xcc\x1d\xdf\x12\xfc\xb7\x3c\x34\xb2\xab\x98\xc8\x29\xae\xc8\xdb\x57\x1f\xff\xed\x94\x5a\xf5\x09\x69\xd2\xfd\xad\x0f\x71\xe6\x00\x43\xcc\xe6\xb5\x39\xc3\x33\x45\xae\x81\xbd\x27\x35\x68\xb2\x93\x2b\xc7\x5b\x1c\x5b\xc7\xf5\xf8\x6c\xec\x1f\xbe\xdc\x3e\x47\xaa\x45\x34\xed\xd5\xf8\xa0\x6d\xa3\x4a\xdf\x94\x31\x12\x8d\x46\xaf\x81\x3c\x9c\x60\x50\x76\x45\x88\x5e\x7b\xa3\xb1\x70\xef\x11\xa2\xd8\x9b\x79\xc6\xfe\xc6\xfa\x54\xe4\x19\x58\x5f\x52\xb3\x4f\xa7\x93\xe3\xab\x20\xdc\xb3\xbf\x6b\x0f\xd5\x43\xaf\xf7\x2b\x4c\x34\x8f\xf7\x72\xae\x36\xef\x88\xea\x51\xfb\x05\xfa\x6c\xd7\x14\x77\xcb\x21\xb4\x04\xab\x33\x3c\x17\xa2\xca\x2a\xc9\xba\xec\x25\x0a\x70\x80\x27\x6e\x08\xa0\x71\x98\xc6\x98\xe5\x36\x51\x55\x4d\xaa\x09\xc8\x25\x62\xa8\x52\x70\x5f\x02\x82\x77\x90\x99\x11\x26\x24\x3e\x6f\x52\x2d\x8c\x82\x55\x04\x95\x1b\xa8\x10\x11\x94\xec\x9c\x4a\xd9\xff\x51\xf1\x1d\xfa\x83\xa7\xc6\x31\x09\x97\x0e\x12\x9d\xc3\xed\x5d\xa9\x6a\xf1\xbf\x64\x28\xfc\x2f\x9d\x3b\xc3\xc8\x4f\xfb\xb1\x97\x4c\x9a\xc5\xa1\x4d\x63\xd8\xeb\x9d\xcc\x5a\x7d\x2c\x33\x56\x72\xbd\xee\x9c\x34\xde\x31\x4e\x90\xf9\x08\xf8\x14\xba\x62\x0d\x18\xf4\x22\x7c\x73\xc0\x07\x09\xa0\x18\x8c\x10\xec\x2d\xb7\xe0\x7c\xc0\x25\x6b\x75\x29\xdf\xa2\x58\x3d\x06\x17\xcf\x1a\x0a\xee\x3a\xbd\x43\x0a\xe4\x8d\xf5\xd2\x28\x05\x23\x34\x24\x08\x1a\x9a\xec\x1e\xf5\x61\x46\x80\xd7\x82\x7b\x69\x9e\x7b\x41\x27\xed\x57\xcc\x17\xf2\xd6\x63\x0d\x24\xce\x65\x5d\x74\x0a\x03\x9b\xea\x4e\x89\x15\x76\x27\xa5\xf5\xcf\xb9\xe9\x7b\x80\xca\xc2\x9c\x0f\x19\x3e\x96\x2f\x7d\x26\x29\x66\xec\xa3\x5a\x88\xa4\x36\x63\x67\x70\x90\x35\xc2\x3c\xa0\x5b\xe4\xb0\xca\xeb\x2e\x5d\x36\xe4\x49\x1c\x6e\xe2\xf6\xff\xe4\xa1\xd0\xb4\x6d\x2c\x0b\x4b\x51\x17\x34\xcc\x1c\xc0\x0c\xf6\x9d\x32\x29\xde\x2f\xe8\x27\xd3\x9c\x8b\x34\x5c\xbc\x41\x49\x89\x9e\xdf\x51\xe2\x20\x41\x21\xc5\x75\x9d\xa8\xa4\x60\x71\x23\xc7\x18\xfd\x8a\x30\xeb\x4e\x25\x04\xab\xca\xfe\x50\x0d\x3c\xa4\x25\x5a\xc5\x28\x7f\x51\xca\xc0\x85\x6a\x23\xde\x2c\x07\x24\xce\x7e\x4a\xf4\xd6\x69\x81\xc1\x17\xea\x38\x66\x36\x7d\x09\x1e\xbd\xa2\xe7\xc4\xc2\x41\x0e\x21\x31\x63\xde\x18\x95\x9c\xac\x83\x05\x35\x67\xf6\x66\xf6\x05\xb1\xa2\x4e\x52\x33\x97\xb2\x48\x45\xbd\x89\xf1\xf4\xea\x61\x8f\x8a\x48\x61\xd6\x61\xea\x29\xdc\xcc\xef\x07\x1d\xde\x93\xc6\x3b\x5b\x03\xb6\x45\x17\x1e\xc0\x27\x92\x67\x1f\xd3\x84\x73\x77\x55\xb6\xbd\x60\x17\x0c\xc7\xa1\xb4\xc1\x34\x0c\x16\x75\xb3\xf7\x50\xbb\x39\xb7\x6b\xde\xd7\xf0\x1d\x2f\x7f\x67\x20\xec\x21\xd0\x34\xc8\x4f\x62\xf5\xa1\xb8\xa2\x5c\xb4\x48\x43\x7a\x4e\x22\x2d\x0b\x52\x4a\x02\xdf\x5c\xe1\xa3\x72\x60\xae\x1d\x72\xba\xd2\x2e\xb8\x30\xfb\x21\xac\x60\x07\xdc\x82\xdb\xcc\x11\xf6\x5b\x06\x1f\xe2\x06\xf5\x9e\x51\x84\x4a\x8a\xc7\xec\xba\xb5\xee\xfd\x6f\x54\x63\x4d\xfc\x82\xfc\xc6\xfa\xc7\x59\xc3\x20\x13\xf2\x9a\xcc\xe9\x57\xdc\x6b\x33\x0f\x77\x99\x16\x8e\x86\xe9\x19\x66\x7a\x9f\x6d\xf8\xa0\xbb\xa6\x0a\x48\xf3\x1a\x0b\x56\x2f\x1a\x36\x7b\x7e\x55\xc8\xd8\x25\x04\xa4\x9b\xad\xde\xd7\xdc\xda\x8e\x29\x65\x2d\xac\xca\xee\xea\xd6\xcb\x3a\x74\x2e\x4b\x23\x24\x82\x01\x7f\x44\xe5\xce\x32\x44\x1a\xfb\x32\x33\x4e\x20\x42\xf4\x50\xfa\x1c\x3d\x9d\x07\xbc\x58\x4c\x10\x31\x74\x02\x65\x70\x7c\xc6\x13\x2b\xe9\x4d\x22\x20\x13\x26\xc7\x58\xc5\x7e\x93\xb8\x17\xaa\x6c\xcd\x32\x82\xe7\x2a\x56\xd9\xce\x7d\x5d\x59\xd8\xba\xb1\x0c\x6c\xbe\x5b\xdd\xe7\x2b\x3b\xac\x66\x08\xd2\x31\xc4\xf1\x77\x43\x8f\x21\x19\x69\x9c\x85\xc2\x30\xd3\xc2\x09\x8e\x91\x2d\xcb\xef\x4d\xfd\x19\xd0\xae\xf7\xba\x67\x76\x15\x5c\x31\xac\x01\x7d\x79\x14\x5b\x9a\x22\x54\x09\x4e\xd4\xc6\x0e\xc5\x89\xc5\x5c\x4c\xec\x68\x1c\xf0\x10\x84\x55\x76\x43\x41\x31\x5f\xbd\xa0\x66\x7e\x36\xea\x5c\xfb\x9c\x45\x49\x58\x65\x4e\x3c\xbe\x7c\xef\xdc\xc5\x40\xfb\x2f\x1d\x55\x12\x81\x23\xb7\x69\xb5\xae\xb5\x12\xd8\xfd\x2b\xc9\x24\x71\x2d\xa5\x02\xcc\x92\x02\x44\x7d\x19\xa6\xda\x1d\xd3\xa0\x26\xb5\x4e\x20\x04\x63\xdb\xe8\xa6\x29\x75\x9e\xaf\x9f\x57\x65\x62\x4d\xa1\x76\x35\x35\xda\xa8\x6d\x33\x2b\xd7\xd0\x12\x93\x6b\xc0\x3c\xfc\xf4\x6e\xbc\xf3\xde\x5c\x7a\xb8\xb6\xb7\x43\xc7\x81\xe4\x90\xe7\x68\x29\x1d\x37\xd9\x67\x64\xee\x6d\x21\x6e\xad\x21\x99\xa1\xb9\x0f\xe8\xd1\x11\x70\x59\x2f\xf9\xc1\x0f\x2a\x0c\xb3\x6d\x67\x84\x8f\x97\x4e\xbe\xeb\x61\xf0\xdb\x9e\x69\x5f\x2a\x18\x63\x1c\xa8\x27\x28\xa4\xf1\xe1\x95\xd0\x70\x4a\x51\xfb\x57\x0d\x48\xa6\x73\x92\x38\x3c\xb5\x8f\xf9\xe0\x21\x15\x19\x94\x6f\x82\x60\x1d\x8b\x9f\x88\x40\x24\x3f\x78\x43\x00\xab\xff\xec\x24\x19\x59\x59\x8f\x44\x86\xaa\xc9\x0b\xe8\x66\xb8\x89\xa6\x1f\x22\x33\x03\x47\xf1\xb3\x10\x70\x70\xd1\xea\xed\xd2\x80\x6a\x63\x15\x8e\xb3\x51\xf8\xcf\x5e\x3d\xba\xb4\x62\x67\xf7\x2d\x59\xbf\xfb\x7e\x89\x64\xe6\x42\xf1\xf4\xbb\xac\x9f\x39\x61\x26\xf8\x6c\x5b\x73\x22\x9b\xf3\x6f\x2d\x12\xe5\x8d\xf4\xc0\x01\x4e\x01\x3a\x06\x37\x28\x5d\xfb\x1f\xcf\x45\xeb\x24\x4d\xa9\x8d\x31\xb4\x5c\x8e\xac\x99\x12\x6a\xa7\x6f\xab\x55\x15\xb2\xbf\x4c\x8d\x08\x28\x50\x8d\x41\x78\xe2\x9d\x0a\xdc\x1e\x12\xc6\x31\xb4\xe0\x1d\x38\x50\x7f\x29\x8e\xb7\xce\x55\x86\x1d\xd1\x47\xe4\xb6\xf8\xa1\x14\x60\x7a\xc6\xb8\xe7\x65\xbf\xa2\xe6\xc3\x3c\xf8\x0d\x81\xf0\xa6\x04\x5a\x4d\x75\xb6\x20\xa8\xb2\xb2\x29\xbe\x8f\xdc\xb1\x8b\x49\xf6\xce\x09\x18\x44\x4c\x74\x01\xb4\x08\xda\x38\x55\xa2\x9e\x87\x08\x18\x9e\x2d\x60\x35\x5c\xce\x90\xef\x7e\x15\xa0\xa2\x55\x3b\xb5\xb4\xc0\x57\x9c\x5f\x0f\x4a\x26\xa5\xd6\xe0\x7a\x24\xe0\xd7\x4a\xfa\x5d\x72\x41\x21\xa2\x4b\xf3\xe1\xbc\x22\x61\x7b\x43\x99\x4e\x73\xe0\xb7\xa8\x20\xdf\x64\xf5\xf7\x5b\x33\xff\x3c\xbe\x12\x19\x21\x76\x5b\x66\x69\x92\xa7\x46\x1c\xe0\x4c\xd0\x1b\x7a\xe2\x5d\x10\xea\xe6\x83\xd6\x41\xc9\x7b\xa5\xfe\x44\xd9\xd0\xa1\x15\x16\x18\x03\x0f\x3d\x6c\x23\x19\x29\x27\x8e\x2c\xf4\x5c\x48\x0c\xb1\xd2\x1c\x83\xf7\x5e\x30\xf8\x63\xd1\x39\x37\x19\x64\x8d\xfb\xad\x14\xfa\x0e\xe6\xae\x0f\xfe\xe6\x7f\x26\x8c\xba\xaa\x60\x3f\x73\x66\x3e\x3a\x4a\x75\xfe\xec\x9d\x1e\x18\x66\xa8\x13\xa6\x6c\x6b\x92\x80\x43\x62\xca\xec\xeb\x35\xea\xe4\x0f\x28\xe1\x9e\xbf\x5a\x95\x73\x0e\x52\x59\x53\x1c\xe1\x19\x6d\x9c\x00\xff\x7a\xae\x96\x40\xfa\x2f\x7f\x95\xe7\x21\x72\xba\x19\xe9\x59\xef\xde\x2b\xc1\xb8\xe6\x8c\x66\x25\x0b\xe1\x70\xcd\x53\x3f\x46\x6f\x03\xc3\x7a\x94\x90\xc3\x3f\xa4\xe1\xea\xa3\x84\xc3\xe2\x69\x31\xc8\x6a\x5c\x27\xc9\xbe\xfa\x17\x14\x67\xe7\xf9\x7b\x2a\x8e\xf0\x48\x64\xa9\x20\x47\x2d\x3b\x0b\x1b\x65\x11\x3f\x90\x40\xd5\x4f\x8d\x01\x1b\xd7\x24\x48\xf3\xcc\x15\x72\xa8\xbf\xe9\xf1\xb8\x24\x94\x33\x66\x55\x37\xa9\x0b\xbb\xe5\xae\x73\x1d\xd5\x4e\x37\x6f\x0b\x2d\x9e\x4f\xf5\xda\xa2\xe9\xd8\x87\xd8\xb2\x8c\x7d\x8a\xdd\xa3\xb9\x23\xe1\x4d\x3c\xa0\xea\x2c\xb1\xc3\x1f\x2d\x28\xc0\x44\x79\xde\xe3\xe5\x13\xb5\xb7\x84\x4b\xe9\x48\x88\xb7\x6d\xf8\xa3\xfe\x05\xb2\x42\xb6\xc9\x1e\x17\x7b\x8a\xbe\xd1\xd6\x63\x67\x6b\x1d\x73\x04\xf7\x89\x51\xee\xaa\x86\x22\xa2\x5d\x5f\xad\x8b\x94\x5e\xc6\x89\x3a\x1b\xcf\xdb\x8b\x43\x84\xb2\xa6\x73\x0b\xad\xc8\x1a\xb2\xe1\xcd\xb7\xb6\x01\xf4\x9c\x28\x49\x2d\x66\xde\xca\x11\xd2\x6c\x0f\xd7\xdc\xc5\x8c\xc7\xed\x1a\x07\xfb\x48\x9a\x11\xed\x29\x49\x50\xa5\x21\x46\x86\xdf\xe8\xf2\xaa\xaa\x79\x7d\xcd\x39\xe3\x7f\xf2\xc1\x9b\x4c\x3d\x26\x8e\xb7\xe5\xac\x6f\x86\xcf\x5d\x92\xb0\xc3\x40\x91\xd4\x8f\xfe\xc6\x83\xbc\xc7\xcd\xdf\x8f\x10\x79\xa2\x38\x09\x6c\x00\xa7\x25\xf6\x3e\xb5\xe0\xc6\x22\xd5\x1a\x28\x1d\x21\x6b\xba\xb6\x32\x24\xa6\x29\x4a\xce\x9c\xf9\xa7\xb1\xf6\xea\xdd\xfe\xb6\x1f\xc2\xa1\x5c\xff\x75\x0e\xba\x29\x1f\xff\x03\xe9\x2a\x29\xd3\x1c\x9c\x2d\xce\xe4\x0a\xf3\xf3\x1e\xe5\xd5\x90\x2f\xbb\xd4\xe8\x77\xb9\xa9\x47\x0a\x77\x0c\x60\x92\x24\x0e\x79\xbb\xcc\x4c\xb9\x92\x2c\x0e\x42\x9e\xee\x31\x0d\xc0\x09\xb7\x3d\x75\x42\x97\xb7\xc4\x62\xf7\xc0\x61\xe4\xd6\xe6\x16\x76\xb6\xc2\x0b\x11\x8f\x67\x66\xae\x43\x63\x3e\xe7\x23\xad\xa3\xf8\xe9\x42\x48\x5d\xa5\x45\xd3\x0e\xa0\xea\xcb\xe6\xe8\x2f\xb9\x02\x0d\xd2\x48\x1e\xee\x91\xdb\x1a\x55\xcf\x7b\xeb\x08\xd4\x58\x5c\x5e\x25\x8f\xd7\xbb\xd4\x0c\x75\x93\x94\xc0\x1c\x9f\xe5\xd6\xaa\x32\xb2\xd8\x20\xba\x5a\x08\x10\xdd\xcc\x1e\x3a\x8a\xb4\x84\xc6\x13\x62\x62\xb3\xfc\x09\x35\x4d\xe6\x26\x6e\xe8\x08\x23\x9f\xd0\xcb\x0a\x9a\x97\xba\xcd\x9c\x9c\x69\x21\x4c\x12\x25\xa6\x81\xa7\x5e\xb3\x8c\x6a\x2b\xb5\x9d\xcc\x3b\xe6\xbe\x8f\xde\x90\x2c\x0b\xc6\x21\xdb\x20\xbe\x88\xb5\x15\xb3\x7e\x5c\x19\x4b\x60\xb2\x43\x03\xea\x1d\x78\x84\x82\x99\xf7\x8e\x7b\xe1\x94\x8b\x31\x67\x22\x65\xfd\x27\x54\x32\xbc\xe7\xc2\x73\xd2\xab\xdf\x59\x03\x5a\x5e\xf5\xc3\xb4\xba\x31\x61\x99\x27\x6f\x5d\x22\x2b\xe6\xdf\xf2\xa7\x8f\xfb\x17\x86\x0e\x9e\x32\xc6\xb5\x33\x71\x09\x6e\xec\x3e\x4a\x43\x95\xb3\xc0\x2c\xaa\x7d\x08\x40\x7d\x9c\xd3\xd7\x34\x87\x47\x00\xa6\x2e\xf6\x44\x4f\x35\x9d\x21\xb0\xe0\xf0\x90\x33\x28\x6e\x98\xed\x4c\x8d\x64\xef\xbe\x8a\x1e\x4f\x1e\xb1\x87\xaa\xa2\xee\x8d\xde\x1c\x6d\x82\x76\x43\xe1\xd1\x31\x6c\x85\xa3\x75\x99\xe3\x79\x2c\xac\xa9\x11\x70\x84\x90\xce\xe2\x1d\x47\x7e\x61\x17\xd7\xa7\x69\x9e\x84\x17\xbc\x67\x92\xcd\xc6\xa1\x77\xef\x31\x8d\xea\xbf\xef\xdc\xa9\xac\xef\xb2\xf0\x7b\x5c\xb4\x4b\x26\x6a\x6d\x22\xdd\xaa\x30\x91\xb5\xe9\x93\x5c\x88\xe7\x4c\xd1\xf6\xb7\xe7\x8b\xc8\xce\x60\x0a\x67\x04\x6d\x5f\x2e\xc1\x16\x7c\x58\x4f\xe6\x74\x80\x2e\x85\x00\x7f\x25\xcd\xfe\xb1\x50\x45\xf4\x0d\x80\xf5\xdf\x73\xa4\xa8\x32\xe8\x2b\x75\x8c\x91\x18\x2c\x1d\x5a\x12\x81\x93\x35\xe4\x1e\xf1\xb2\x97\x06\x6b\xdc\xca\xef\x3a\xdd\xc3\xfc\xc2\xaa\x60\x93\x33\x21\xef\xa5\xd3\x7c\xa9\x48\x3e\x2d\x9b\xf5\xff\x5f\x62\x3c\x07\x14\xf3\x04\x65\xac\xfc\x16\x8e\xc8\x60\x7c\x55\x59\xee\x3f\x2f\xa5\x0a\xf6\xf3\x47\x9c\x55\x63\xaa\xa1\x8c\x47\x27\x62\xe8\xac\x0a\xc7\xfb\x94\xc6\xdb\x74\x67\x6d\xf6\xed\xcd\x35\x38\xbc\x4a\x42\xc4\x99\xe1\x46\x1f\x55\xfd\xf8\xee\x67\xbf\x9a\x10\x30\xbe\x47\xa9\x66\xa1\x40\xa0\xd3\xa4\x46\x26\xbe\x76\xb9\xc0\xb5\x38\x00\x47\xd3\x74\x4a\x01\xcd\xc2\x60\x4a\x21\x7a\xf3\x10\xfe\x44\x45\x09\xa7\x7c\xff\xef\xc4\x25\x40\xb9\x00\x57\x9c\x1c\xdc\x31\xc6\xee\x7b\x52\x1e\x39\xf9\x49\x8b\xbe\xa8\x15\x6d\x66\x1d\x4d\x89\x1b\xb3\x87\x39\x7d\x04\x06\x16\x4b\x7a\x6e\x51\x5e\x6b\x94\xab\x73\xd2\x4e\x43\x3e\xe8\x19\xa3\xf1\x1f\x90\xf7\x86\xef\x0c\x95\x87\x7b\x65\xa3\x2e\x2e\x5a\x4c\x61\x24\x85\x5b\xda\x2f\x2f\xa1\x49\x31\x22\x36\x48\x94\xba\xe7\x6f\x0d\x8f\x3c\xc6\x53\xdd\x0c\x5b\x70\xfe\x87\xa0\x2e\x3a\x22\x21\xbb\xe7\x47\x40\xc8\x38\xe0\xeb\xe6\x3c\xe0\x4a\xf8\x5f\x1c\x81\x27\x37\x99\x97\x6d\xb1\x77\x1c\xd8\xcb\xb1\xe4\xf7\x2c\x11\xe0\xb9\x4d\x8a\xb2\xcc\x39\x58\x27\x49\x81\x94\x7b\x33\x76\x74\xea\x2f\xfe\x07\x0b\x16\xee\x20\x19\x0b\xdf\xaf\xe6\xc7\x3b\x07\x27\xdd\x1b\x4a\x32\xa8\x34\xe7\xca\x4a\x92\x39\x96\xbc\x04\x5d\xb0\x23\xd5\x58\x86\xe5\x9f\xe9\x15\x8d\x3c\xea\xb7\xb1\xfd\x45\x7f\x50\xfa\x54\xc5\xaa\xa8\xdf\xbf\x8a\x3c\xb5\xc7\x64\xb5\x49\x12\xd7\x4b\x61\xcd\xcd\x1c\xad\x87\x74\x44\xf6\x0a\xea\xb5\x1a\x2a\x72\x6e\x4a\xd2\x1b\x5a\xcc\x68\x65\xa1\x52\x02\x51\xa1\x55\x0d\xc8\x5e\xbe\x8e\xb4\x40\x54\x63\x69\xdb\x62\x93\xcc\x40\x7c\x0d\x0d\xb0\xc3\x87\x43\x98\x5d\x3a\x86\x91\x1c\x7a\x70\xa6\x90\xd3\x23\x22\x80\xaf\x52\x14\xf1\xa4\x8e\xfb\x50\xdb\xe7\x7d\x6a\xb3\x4c\xcb\x74\x50\xfe\xad\x4d\xad\xa2\x00\x9f\x1a\xaa\xf2\x10\xe3\xf8\x1d\x49\xa7\x8d\x18\xe8\xae\x9d\x1d\xac\xd5\xbf\x5f\xa3\x9b\x5d\x29\x89\x72\x71\xb9\x29\xe5\x11\xaf\xa2\x22\x59\x1c\xd2\xa6\xcb\x74\x7c\x75\x88\x73\x5a\x77\x6c\x44\xee\xca\xee\xa9\xc4\x5b\x8d\xda\xec\x5e\x51\x06\x4c\x00\xe8\x95\x22\x0f\x5e\x5b\xd8\x8c\xde\xbf\x36\x13\x2b\xaf\xad\x3d\x99\x62\x0b\xea\x04\x51\x61\x94\xc9\x88\x4e\x8a\x44\xd5\xc5\xb2\xdf\xf6\x10\xf7\xf9\x2d\x57\x72\xb1\x45\x3d\x85\x23\x6a\x82\x60\x44\x56\x51\x33\x66\x43\xb1\x2f\xaf\xd9\x6e\x3e\x9f\x1f\x35\xd1\x4d\x02\x1f\x88\x80\x62\xd2\x5c\xef\xd3\x8c\xb8\x94\x0e\x2a\x06\x2a\xac\xdb\xda\x9d\x0b\x8b\x08\x41\x1f\x65\xc4\xd3\x84\xff\x10\x0e\x54\xc2\x0c\x20\xe2\xd1\x1a\x26\x49\xc2\x7b\xca\x10\xc0\x47\xbd\xda\x87\xd1\x98\xe5\x5f\xb1\x1e\xc4\x09\x8c\x58\x99\x10\xe4\x71\x9d\x7f\x02\x0f\x2b\x75\x83\xfc\xd2\xab\xd0\xae\xd4\x7b\x81\xae\xf1\x92\x1f\x2d\x70\x9b\xff\x3c\x13\x5b\x63\x17\xef\x4c\x51\x13\x3d\x86\xd2\x42\xbf\x75\xb1\xf5\x13\xbf\x87\xda\x9e\x38\x9e\x76\xa1\xcf\x17\xb2\x09\x1f\xf4\xc2\x53\xe5\xfd\x45\x1a\xf9\x03\x0a\xad\x26\x6f\x6e\x30\x01\x5b\x1b\x03\xa4\xd6\x14\x9d\xaa\x2c\xe5\x84\x34\xe9\xe4\xa7\x61\x68\x2a\x21\x0e\xdb\x85\x5a\xa4\xe6\xd6\x9c\x18\xab\xb0\xd1\xa8\xed\x8e\xca\x60\xc5\x2e\x5a\xe2\x2a\x9f\xb4\x4f\x2a\xe9\x6c\xe3\x47\xda\xd7\x93\x88\xe7\x12\x2f\x26\xb2\x14\xf6\x51\xbd\x8c\x51\x83\x57\x1f\x8f\xd9\xcf\x10\x4e\x59\x05\xae\x79\xbd\x62\x00\xe0\x08\x89\x44\xe5\x33\x0b\x42\x25\x55\xe9\x1b\xbd\xe3\xe1\xcd\x45\xdb\x9d\x72\x6f\x1e\x70\x86\x9a\x7c\x54\x69\x39\xa2\x70\x55\xb4\x7e\x7c\x0f\xa0\xc7\x64\x1d\x40\xa6\xe8\xb9\xc5\xb2\x57\x96\x96\xee\x2e\xf2\x47\x57\x91\x3f\xb9\x13\x59\x6e\x10\x71\xd8\x51\x85\xdb\xff\xbe\xa4\x74\x95\x27\x07\x13\xa2\x44\x90\xd0\x37\xce\xd0\x16\x28\x19\xf4\x0e\x7e\xc9\xfd\x06\x56\xd1\x6b\x3a\x61\x9f\x84\x31\xf5\x0e\x27\x23\x14\xee\xec\x96\x1e\xa8\x4a\xa4\x97\xa5\xca\xd8\xc5\x4d\x79\xa9\x86\x5a\x7b\x08\xcf\x06\x53\x37\x55\x92\x21\xd1\x74\x44\xd5\x84\xb9\xab\x4e\x30\x8d\xb6\x8b\xfb\x9d\xa1\xdc\x5a\x86\x18\x08\xc4\x6c\x75\xcb\x8a\x8b\xb6\xc8\xd5\xd7\x73\x1f\xd5\x26\x16\x89\xc1\x8a\x7a\x12\xfe\x51\x52\x40\xfe\xaf\x75\x64\xe5\xdd\x75\x67\x10\x8c\x91\xb6\x3f\x17\x4d\x60\x6c\x23\x3d\x91\x32\xad\x59\x89\xe9\xd8\x46\x40\xb0\x09\x47\x32\x89\x1b\x11\x4c\x76\xd6\xee\x02\x17\x2c\x26\x5b\xb5\xcc\xf4\x7a\x96\xa2\x35\x19\x85\x26\x3d\xc9\x53\x1f\x03\x6e\xc1\xa5\xb8\x6b\x31\xf9\x07\x4d\x76\x0a\x64\xda\x8c\x02\x9a\x64\xde\xb2\x97\xef\x82\x3a\x2f\x13\xa4\x5c\x62\xca\xd2\xab\x16\xe0\x18\x47\xb1\x1c\x72\x94\x5d\xe1\xb5\xdc\xdf\x57\x69\xf8\xad\xb1\x00\x5e\x6b\xa0\x66\x81\x06\x88\x30\x45\x77\xaf\xb7\xf2\x86\x38\x1a\x88\xbd\x51\x97\x7d\x17\xfa\xa0\x3e\x9b\x19\x21\x6e\xf3\x8f\xa1\xe1\xc2\xd1\x1b\x25\x50\x46\x3b\x69\xb4\xc0\x54\xa3\x0a\x03\x5c\x64\x7f\xa3\x49\x80\x0c\x6d\x9e\x6a\xb1\xfd\x1c\x5e\x0c\x7d\xb4\xbe\x6e\x5f\x92\xc4\x93\x26\xff\x39\x01\x57\x4b\x79\x32\x95\xf1\xcc\x33\x6d\x8c\x23\xb2\x5b\x9b\x59\xab\x1a\x7e\x6a\x27\xfe\x6a\x81\x11\x9a\x9a\xbc\x17\x41\x8b\x72\x76\x6b\xcd\x20\xdf\xee\x9c\x41\x2b\xe7\x1f\x86\x17\xf2\x6f\x4c\xd2\x35\x6a\x91\x68\x0d\x79\x8d\x33\x59\xc4\xcf\x1c\xd6\xb2\x3c\x64\xdb\x30\x33\x8a\x63\xfc\xbd\x9d\x11\xde\x5c\xeb\x9b\x97\x6b\xd2\xec\x00\xa3\xef\xef\x4c\xfe\x56\x09\xdf\x85\x1e\x69\xce\x2d\xc1\x71\x2a\x9b\x1a\x1d\x7d\x42\x0b\x4f\x55\xa7\x0c\xde\x3c\x36\x3b\xb1\x3a\xe8\x80\xce\xec\x6b\x75\x22\x57\xb4\x6a\xf4\xde\xf0\xcd\x72\xd0\xba\x46\x9d\x20\xb9\x16\x40\x4c\xa6\xa4\xc7\x52\xa1\x44\xb9\xf8\x07\x7d\x58\xef\x2d\xc7\x5e\xc3\xfe\xd2\xea\xe1\x6f\x5f\x65\x29\xe1\xcb\x3b\xeb\xb3\xfb\xec\x3d\x83\x21\x01\xd2\x24\xb1\x38\x89\x9a\xfe\x0b\x94\x4a\x84\xfb\x17\x15\x70\x6a\x4a\x52\x6e\xd2\x77\x67\x8f\x22\xde\xca\xdb\x89\x35\xff\xb0\x84\x82\xfc\xac\x2f\x2b\x97\x26\x06\x86\xfb\x97\xeb\x06\xde\xd3\x42\x5d\xca\xd9\xd5\x6f\x61\x71\x2b\x63\x75\xfb\x13\x7a\x86\x0a\xb7\x44\xb3\x25\xa2\xe7\x8d\x17\xac\x7b\x7a\xdb\x50\x53\x6e\x85\x7e\x0c\xbd\xe8\x18\x39\xfc\x88\x0e\x8a\x1b\xb3\x00\x6c\xac\xfb\xc0\x98\x61\x82\x3a\x56\xb9\x1a\x87\xb6\x36\xa1\x45\x9d\x14\xbd\x05\x80\xd2\x17\xf3\xe6\x56\x39\x55\x17\xe9\x52\x8f\xa6\x0b\xd5\x68\x8a\xa5\x03\x93\x8a\x1f\xa0\xe5\x8a\x8d\x7b\x85\x30\x33\xc0\x7b\x5c\xe4\xc3\x12\xfd\xa6\x36\x26\x8d\x2a\x1a\xcd\xef\x4e\x05\x4d\x28\xbc\x98\x55\x1a\xc0\xcb\xb6\xb6\xe8\x31\xda\x34\x82\x97\xb0\x09\xde\xb8\x2e\xd1\xb8\x31\x47\xde\x7e\x89\xee\x7b\xdc\x40\xe0\x29\x6d\xad\x1b\x7e\xbf\x7d\x75\x60\x05\x27\xa9\x69\x15\x78\x7f\xf7\x50\x14\xce\x8d\xe7\xcc\x8b\xfd\xd8\x58\xe4\x7d\xb8\xc9\x60\x45\x86\x57\xff\x6b\x00\x90\x84\xa6\x88\x6d\xe6\xc1\x0c\xab\x46\xe1\x6c\x51\x2f\xb1\x5c\x29\x75\x8c\x17\xd4\xfb\x2d\x56\x93\xb6\x84\xa9\x2c\x1a\x9d\xf3\x15\xef\xf2\x1e\xa1\xda\xda\xf1\x29\xd2\x6f\x40\x59\xba\x37\x58\x8e\x1b\x78\xf1\xa1\x7e\x73\x8c\x96\xb4\xe1\xea\xa0\x01\xfa\xd6\x26\x8d\x45\x77\x25\xe6\x96\x2b\xfd\xa0\x84\x29\x01\x43\xf2\xe6\xb8\x75\x79\xe9\xb2\xd4\xc0\xf7\x35\x0a\x15\x6b\x3d\xa2\x9e\xb8\x8b\xa9\x2f\xf2\x5c\x14\xaf\xc9\xa4\x8b\x5c\x30\xa2\x70\x12\x0c\x6f\xa8\x9a\xe1\xff\x7b\x8d\x98\xd9\xea\x4b\xa9\x72\x59\x03\xa5\x0f\x36\x90\x3b\x4d\x42\x1b\x67\xc1\x35\x22\x72\xc1\x3a\xb5\xf7\x5a\xb0\x3b\x01\x32\xbc\xa4\x59\xe9\x0d\x7f\xd0\x7d\x03\xa5\x72\xfe\x49\x41\x1f\xdf\xd4\x4b\x98\x75\xac\xca\x04\xef\xf4\x56\x16\x0c\x08\xc7\xe0\x31\x71\x6a\x31\x99\xbb\x30\x85\x54\xb3\x12\x27\xa3\x63\x49\x97\xb3\x5e\x19\xb8\xa4\xf7\x74\x85\xe4\x57\xf7\xbd\xd9\xcb\x25\x90\xe2\x66\x17\x11\x64\x7b\x21\x94\xa5\x19\x8b\x2e\xb1\x71\xee\x39\xdd\x40\xbc\xdb\x0c\x77\x4a\xb8\x9c\x07\xcc\x07\xe5\x09\xb5\x6b\xf5\xf9\x07\x8a\xed\xfb\xd5\x2b\xea\x66\x2c\xbe\xac\xd2\xd6\x80\xd0\x4e\xe0\x75\x97\x7e\x63\xef\xc5\x4b\x5d\xcc\x74\x1c\x52\x59\xab\x77\x95\x4c\x5c\x3e\x9c\x2d\x8b\x11\x12\xce\x8b\x5a\x6a\xaf\x21\x3b\x37\x10\x09\xf0\x2a\x66\xc0\xae\x4b\xc7\x5f\x3c\x95\x19\xbc\x21\xff\xe4\x67\xea\xe9\x87\x5e\x0d\x73\x6d\x77\x90\x2e\xf8\xd4\x5c\xd6\x98\x42\x89\x2f\x6b\xe8\x25\x51\xec\x45\xd7\x88\xf3\x26\xed\x5e\xbd\x85\x35\x65\x47\xc6\x53\xa3\x79\x90\xdc\x21\xd3\x66\x5e\x52\xa9\xa5\xd7\x45\xbf\xc6\x06\xcc\x59\xb1\x83\xb3\xf9\x99\xb1\x78\x17\x2b\xf7\xf2\xd2\x03\xd2\x5f\xd3\xaa\xb4\xca\x7b\xd8\x82\x6f\xa8\x09\x6c\x9d\x0b\x9f\x47\x36\x1a\xba\x8c\xa9\x3b\x79\x54\x3e\xf5\xc1\x09\x93\x39\x56\xf2\x1d\x03\xca\xee\x77\x4d\x8c\x2d\xfe\x53\xb4\x9c\x3e\xd3\xc4\xff\xcd\xe2\x32\x97\x10\xc5\x47\x9c\xcb\xd7\xcc\x21\xe2\xfd\x89\x07\x97\x3c\x84\x7c\xfb\x5f\xd3\x45\xe1\x31\x48\xd2\x2b\xdd\x31\x00\x9e\xf7\xaf\xae\xba\x93\xd0\xf5\x0a\xa6\x6b\xcb\x5d\x31\x4a\x23\xb8\xe2\x31\xe0\x35\x22\x0c\x0b\x59\x7b\x9a\x59\x8c\x8a\x65\x69\xc9\x45\x82\x2e\x5d\x51\x73\xd9\x55\x97\xf6\x8f\x45\xc4\xd4\x7a\xcd\x6f\xfd\x29\x66\x88\x39\xbe\x55\x60\xd2\x14\x74\x70\xcc\xbf\x6c\xc1\xd4\x14\x00\xae\x67\x82\xa1\x70\xc3\x76\x7f\x8e\x89\xc6\xed\xf6\x76\x60\x81\xf4\x3f\x28\x23\x89\x4d\xe9\x87\x0a\xe0\x3f\x51\x21\xd4\xf0\x24\xb9\x5f\x6c\x11\x63\x14\xab\x34\x6d\x27\x21\xcb\x0b\x49\x72\x0f\xcb\x29\x9c\xbb\x13\x8e\xf3\x6a\x70\xb6\xc8\xfa\x04\x92\x96\x82\xb2\x4d\xb6\x72\xeb\x91\xae\x94\x16\x76\xa8\xe6\x12\x56\x6f\x43\x8e\xd9\x31\xd2\x8e\x5e\x6b\xa0\xb9\x02\x7a\x8f\xc4\xeb\x42\x69\x52\x1c\x2d\x7b\xec\x2a\x0c\x8e\x99\x8b\xd0\x0d\x18\x8b\xfd\x33\x21\x38\x4b\x8c\x42\x3c\x72\x8a\x59\x9f\xca\x27\x3e\xc8\x73\x43\xdf\xf8\x1d\x6a\x86\x8a\x0d\xbd\xa3\x9e\x9d\x93\xcd\x34\x40\x3b\x21\xc0\xac\xc3\x91\xc3\x98\xb5\x99\x37\xa7\x00\xe1\x54\x95\x2d\x62\xd3\xb5\x68\x74\x7a\xdd\x42\xc2\xd8\xdb\x82\x17\x8a\xc0\xba\xe6\xa8\x78\x3b\xab\x3a\x9a\xbd\x5b\x8e\x36\x66\xc9\xa7\x37\xe9\x40\xa6\x45\x5c\xd8\xfa\x0e\xa1\xc4\x8d\x41\x9f\x67\x43\x22\xaf\xc0\xe6\xfc\xa8\x05\x6d\xff\xca\x88\x1a\xad\xc1\x67\xb7\x48\xc1\x81\xaa\xc2\x96\xd0\x9d\x1a\xfb\x11\xed\x0a\xb7\x0b\x72\x61\xdb\xb1\xab\x87\x90\xc7\xb8\x29\xfb\x76\x91\x6c\x1c\x02\x4e\xce\x6a\x3a\xd9\x2c\xee\x92\x96\xcb\x96\x06\x8c\x71\x0a\x1e\x8c\x93\xc4\x98\x1b\x65\x3d\x86\x87\xcc\x9d\x90\x45\x8b\x63\x24\x82\x9a\xa5\x17\x45\xd1\x44\xfa\x0d\x2c\x61\x8f\x86\x7e\xd4\x24\x43\xcf\xce\xd9\x59\x64\x0e\x79\x19\x47\x43\xaa\xfc\x22\xf7\xa2\xa3\x25\x97\x9c\xae\x21\xa5\x3d\x42\xbc\x17\xe7\xc1\xeb\xe9\xcb\xdc\xa2\x81\x73\x0a\xe6\x47\x30\x42\xd0\x5f\x14\xb3\xda\x5c\xb2\xec\x86\x12\x10\xed\x1f\x3b\xdf\x37\x56\xd1\xbc\xfe\xc3\xc3\x70\x86\x4e\x4a\xd3\x65\x3a\xbc\xcc\x4a\x3f\xee\xca\x8b\xd1\x8c\x72\x88\x50\xe9\x75\xfc\x99\xda\x5a\xab\x72\xa7\xbd\xf5\x0a\xee\x44\x55\xd5\x96\xa6\x7f\x4b\x12\x93\x54\x56\x13\x59\x34\xd5\x8f\xa4\xab\xb9\xeb\x8c\x88\x04\x71\x21\x9a\x49\x62\x56\xe0\x5e\x32\xcf\x5b\xec\xf1\x80\x28\x79\x86\x57\xc3\x62\xf8\x7f\xf9\x29\xd2\xb6\x08\x09\xf5\x8b\x8a\xdf\xf4\x08\x6f\x6d\x18\xac\x70\xa3\x36\xe3\x7d\x8a\xd1\x65\x68\xff\x45\x76\xe8\xbd\x20\xbe\xfb\xbb\xc5\xc9\x81\x68\x44\x5a\x82\x54\x49\x49\x13\x32\x46\xd1\x0c\x8b\x67\x09\x28\xfc\xb8\x1b\x66\x24\xb8\x2d\x92\x0f\xb9\x6c\x7a\x34\x0f\x2b\xa3\x04\xed\xb7\xab\x4d\x99\xdc\x75\xc0\x29\x96\x62\x8e\x88\x96\xc6\xb1\x6a\xa5\x61\x1f\x0d\xcc\x7e\x87\xba\x98\x1d\x39\x0b\x45\xe5\xdf\x25\x5a\x4e\xd5\xd9\xd1\xee\x28\xf1\xaa\x36\x4b\x6e\xd1\xf4\xb6\x9e\x75\x35\x9f\xc5\x4f\x74\xdf\x32\x58\xc1\xfc\xc7\xd6\x70\xab\xa9\xe9\x23\x7b\x80\x7c\xf5\x14\x09\xf9\x71\x58\xda\xb1\x9c\x20\x28\xb7\xce\xbe\xc5\xb5\x67\x79\x76\x43\x25\x5d\xc5\xd0\x9c\x07\x4e\x2f\xc1\x0c\xcd\x1b\x1b\xfb\x14\x39\xcc\x7c\xdf\x14\x23\xe2\xbd\xae\x34\xb2\x87\x71\xce\x4e\x6b\x54\x25\x74\xfa\x0c\x59\x84\x1e\x2f\x65\xdd\x98\xf4\x53\xa5\x57\x01\x20\xc0\x02\x95\x5a\x7b\xc2\x47\x0f\xa7\xdf\xb0\xda\x45\xfc\xea\x2e\x09\xf6\xfc\xf8\x60\x61\x21\x12\x15\xb1\xc6\x57\x2b\x87\x33\xf0\xef\xb8\xe1\xef\x98\xfc\x23\x89\x1b\xc6\xf7\x8e\x4d\x52\xe5\xac\x2b\xa1\x7e\x3f\x62\x22\x25\x0b\xb2\xd1\xb8\xe3\x5a\x20\xbc\x50\x48\xf6\x26\x91\xef\xce\x45\x8d\x84\xde\xe6\x99\x18\x16\xa0\xda\x96\x44\xda\xb5\xef\x93\x8c\x51\x14\x3f\xa4\xcb\xce\x60\xde\xc2\xa5\xce\x9b\x77\xb1\xcf\x35\x44\xc8\x96\x90\x14\xee\x47\x22\x38\xc4\x66\x40\x22\xd1\x5e\x2a\x32\x7c\x45\x9b\x96\x6d\x60\x00\xdf\xfe\x5f\xff\x28\x37\xdb\xcf\x14\x7a\xd1\x9f\x3d\xc4\xc4\x67\xea\xf2\xb3\x88\xb3\x71\x1f\xe7\x60\x48\xce\x31\xfb\x79\x76\xc9\xd4\x99\xd5\x37\x9f\x41\x25\xfa\x3e\x0b\x55\x48\x36\x62\x26\x89\x2b\xee\x06\x71\xe3\xf9\xf0\x10\xff\x87\x70\xe1\x78\x4d\x97\x01\x51\xb5\x04\x7f\x24\x3a\xee\x1a\x63\x51\x5f\xa7\xe5\x9a\xf8\x5e\xcc\x4c\x69\x3a\x40\x3e\xc2\xad\xf4\xa7\x3a\xf4\xce\xb5\x44\x24\x5f\xa7\x6b\xfc\xf9\x1c\x4d\xbc\x50\xe0\x54\xa1\xaa\x3b\x27\xb0\x77\xec\x86\x1f\xc1\x09\xf5\xe4\x2d\x82\x42\x17\x7c\xa1\x0b\xdf\x53\xc8\xf1\x7b\x33\x2a\x08\xf7\xc9\x3f\x3f\xf4\x24\x49\x44\xe0\x37\x74\x9c\x85\xf1\xa1\xf4\x93\xcb\x70\xdd\xa8\x59\xce\x9c\xdb\x15\x6c\x59\x04\x1d\xda\xc4\xc6\x60\x81\xfa\xc2\xde\xfe\x47\x79\x35\xc5\x86\xb1\xe3\x47\x0a\x29\xf7\xd6\xc9\x0e\xa9\x50\x81\x9b\xcf\x8f\x42\x48\x0e\x70\x24\x64\x22\x12\x60\x3e\x14\x3b\xef\x5e\x46\x54\x7b\x4c\x93\x83\xdc\x30\x09\x67\xc3\x52\xde\xc4\x09\xcc\xa1\x95\xc7\x19\x8e\x5f\x3d\x54\x57\xdb\xec\x5a\x9d\x65\xb6\xf6\x2b\x0b\x82\x4b\xbe\xca\xd7\xce\x22\x69\x67\x25\x07\xb8\x30\x27\xde\xed\xa4\xbe\xa0\x91\x69\x9b\x39\x98\x67\x6b\x57\xc0\xa9\xc0\xbf\x22\x85\x7e\xa4\x05\x33\x3e\x08\x24\xdc\xf3\x14\xc3\xa2\xee\x12\xc2\xf2\x3e\x3d\x19\xc1\x26\x79\x15\xfd\x37\x29\xe2\x9f\xc1\x02\xdc\x81\x53\x3a\x37\xf2\x81\x06\x62\x35\x43\x5d\x4e\xda\x88\x87\x50\x33\xfe\x7f\x2f\xf1\x28\xca\xfd\x07\x5f\x9a\x20\xa2\xb2\x7f\x28\x2e\x07\xe4\x62\x87\x3a\xa8\xe3\x9d\x8c\xa8\x65\x19\x39\x2e\x70\xeb\x75\x23\xaf\xa8\x7f\xde\x55\x8e\x88\xaf\xfd\xd4\xb8\x2a\xc4\x8d\x4c\x5a\x42\x52\x17\xbf\x45\xa7\xb1\xbf\x7a\xf7\xa4\xe3\xf2\x72\x8a\xec\x8d\x6d\x14\x33\x29\xc7\xaf\xc2\x4c\xf0\xdc\xd8\x7b\xda\xb7\x16\xff\x73\x9e\xe1\x3e\x18\x20\xd0\xf4\x1c\x47\xad\x02\x12\xb6\x2f\x45\x6f\x9f\xc1\xc2\x6e\x3c\xb3\xc4\x3c\x49\x50\x46\xc6\x57\x5d\x1b\x48\xc3\xe0\x55\x82\xb0\xc2\x8c\xda\xf4\x02\xf9\x9a\xa5\x9d\x32\x32\xa1\x15\x25\xdc\xfe\xc7\x51\x4c\xcc\x57\xb2\x05\x58\x6c\x96\xf4\xc3\x26\xee\xc6\x4a\x39\x98\x43\xca\xc5\xf5\x26\x19\x8c\x1c\xe6\xa6\xc3\x01\x74\x8a\x5a\x04\xfe\x1e\x5b\x76\x85\x4c\x2e\x2a\xcc\xf2\x76\x3c\x17\x29\xa9\x3f\x43\xe0\x8c\xa3\xdc\x5a\x37\xb5\xb4\xd4\x34\xc9\x00\x62\xd5\x83\x97\xec\x24\x92\x15\xa3\xc8\x74\x46\xe4\xe8\x55\xf0\x5b\xaa\x19\x07\x65\xb6\x18\x59\xcd\x33\x97\xf6\x51\xcc\x97\x7e\x0b\xfb\xdd\x18\x1b\x6d\x10\xdc\x11\x05\x69\x9f\xa0\x06\x04\x81\xed\x42\xef\xb4\xad\x4a\x26\xbf\x06\x57\x93\x33\x76\xe1\xe3\x87\xf8\x14\x52\xdc\x14\xd9\xc4\xde\x5b\xb1\x2d\x74\xa7\xb4\x5b\x87\x4a\xf7\x6e\x3f\xc2\x26\xbb\x1a\x60\x69\x3d\x0c\x1a\x7d\x94\x09\x54\x51\xa0\x2c\xcb\x5a\x78\x55\x11\x5b\x15\x4d\x52\xeb\xd5\xc2\x1b\x49\x83\x45\x0d\xac\x15\xb0\x99\xe6\x41\xa7\xe1\x2a\x94\xbb\x27\x2e\xb4\x06\x66\x24\xde\xd1\xef\xc5\x14\x30\x26\xe5\x68\x7d\x4b\xc0\x9b\x4e\x2b\x9e\x38\x74\xd4\x1b\x8b\xba\x26\xe9\x70\xf6\x57\x48\xc0\xe9\xa7\x51\xc2\xd2\xc8\x34\x33\x0a\x92\xd2\x5d\x7e\xea\x27\xae\xb4\xc7\x20\xe2\x17\x7b\xb6\x76\xf2\x82\xef\x5f\xb3\x64\x3d\xea\x17\x46\xe1\xb5\x41\x0d\xc9\x5e\xa5\xfc\x13\xe3\x9f\xbf\x66\x74\xf9\x5c\xba\x20\x3f\xf4\x89\x39\x96\x43\xc1\x16\x94\x90\x0e\x3a\xa9\x1c\xf3\xe0\xf0\x4f\x91\x08\x5b\x93\xde\x39\x34\x25\x84\x36\xb8\x83\x20\x2d\x4c\xc2\x4c\x8f\x64\x07\xfc\x68\xf6\xe0\x85\x21\x91\x0c\x15\xcb\x31\x0e\x54\x1a\x79\xdb\x95\x22\xde\xf0\x66\xe8\xf4\xca\x63\x88\xc6\x52\x9b\xed\xc6\xa6\x5b\xbb\x5e\x41\x67\x58\x34\x53\xb0\xd1\x7c\xc9\xb9\x23\x1a\xcf\xaf\x1a\xf3\x69\x8a\xfa\xbd\x1b\x82\xb4\x8a\x96\x98\x04\xc4\x59\xb8\x69\xd9\x8b\x54\x4b\xbe\x7a\x4f\x27\x19\x01\x99\xc9\xf5\x54\x1b\x92\xb8\xc1\xf8\xd9\x4a\x25\x4e\x57\x26\x6f\x10\xc4\x9b\x11\xdc\xe2\xf5\x05\x89\xd3\xd6\xc2\xc3\xe4\x43\xc5\x05\x1b\x4f\x3c\x9a\x96\x31\x4a\xee\x7b\xe5\xb5\xd7\x0b\x3e\x31\x3d\x5b\x4f\xe5\xf9\x44\x23\x9d\x46\x85\x34\xd0\x64\x10\x6a\xd4\xab\x39\x86\x42\x17\x05\xf2\x1e\xe2\xbd\xab\x56\x07\x0a\x05\x72\x1c\x0d\xcb\x4a\x3b\xce\x62\xd0\x27\xd6\x51\xc4\x12\x33\x12\x13\xa7\x08\x8b\x10\x70\x23\xc4\xa0\x8e\x27\x65\x17\x38\x57\x9a\xe1\x10\x1a\xe9\x51\xa4\x12\x93\xdf\x5b\x8a\x62\x6c\xca\x21\x7e\x53\x17\x78\x50\x97\x46\x09\x5a\x9d\x4b\x7a\x99\x45\x8c\xbd\x7d\xbb\xd9\x65\x74\xe9\x79\x31\x00\x66\xe2\xfc\x64\x17\x97\x4b\xef\xb6\xcd\x44\x01\x94\x1c\xb5\x2c\x0b\x49\xf5\xcf\x87\xc7\x53\xb2\x16\x4f\xa9\x4c\xe7\x98\x0e\x00\x0d\x83\xd6\xe6\xe3\x73\xeb\xb9\xdf\x77\x85\x99\x2e\xb5\x96\x8f\x08\x1d\xee\x79\x53\xbf\xf7\xaa\xdc\xc2\x22\x00\x8e\xce\x61\x3f\x6e\xfe\x92\xdc\x5a\x4e\x16\x5a\x1d\x1a\xe7\x49\xc1\x71\x00\x13\x1c\x2d\x52\x38\x16\xa7\xc6\xd0\xd1\xdb\x79\xd8\xc1\xc3\xc1\xbc\x62\x9f\x65\x67\x98\x4a\xba\x8f\x91\xf1\x1f\x85\x83\xba\xee\x5a\x09\x95\x74\xca\xea\x99\xd1\x13\x7d\xe1\x51\xe9\x55\xf2\x6f\x2f\x70\x30\x4b\xd6\x6e\x38\xe7\xce\xfd\x11\x84\x72\x22\xb7\x98\x4a\x6b\x0c\x4c\xf3\x35\x07\xa7\x2b\xfd\xd4\xf0\xa3\x98\x27\x59\x67\x76\x26\x4d\x51\x07\xb4\x2f\x73\xf4\xa8\x8a\x56\x07\x11\xc8\xa0\x3e\xb2\x38\xa1\x79\xb1\xaa\xa0\x6b\x92\x2f\x0f\x87\x6d\x08\x45\x62\x6b\x53\xe7\x1a\x44\xe4\xf3\xa6\x97\x77\x89\x31\xf2\xe3\x4d\xec\xff\xda\xb1\xe2\x90\x5c\x7a\x48\x88\xc4\xa4\xb6\xd8\xd4\xed\x8e\x8a\x30\x9c\x95\x1e\x24\x88\xdd\x31\x14\xdc\xf5\x6c\x62\x2e\xc8\x30\xa0\x6a\xaf\x20\x24\x8e\x59\x2b\x73\x5c\x73\xc6\xd5\xeb\xbf\x40\xa6\x53\xf2\x3f\x2d\x48\xb3\x0b\x83\xef\xd6\xfd\x06\x14\x2a\xf6\x4a\xa2\x38\x06\x85\x11\xd5\xdc\xcd\x68\xde\xcb\xca\x9b\x4a\xf7\x68\xb1\x63\x7b\xc1\x56\xdb\x20\x8b\xb5\x8d\x56\x3f\x48\x3f\xd5\x01\x84\x52\xdb\x8d\xa9\x50\x63\x90\xaf\x8e\xe4\xac\xd7\xe4\xa5\x1e\x3a\x79\xf8\x71\x61\xa8\xd0\xfe\xb2\xcf\x16\x4c\xfa\x18\x3b\x3b\x7b\x23\xfd\xfc\xd2\x12\x07\xc7\x25\xa5\x6f\x33\xe5\x18\xb6\x04\x51\x17\x79\x3e\x9f\x5b\x5c\x32\x55\x5f\xbc\x08\xb9\x14\x21\x62\x1d\xe7\xd9\x50\xed\xe2\x47\xad\x95\x0e\xe4\x35\xc0\xdc\x87\x52\xaa\x2e\x31\x92\x00\x36\xd6\x4f\xc4\xcc\xfe\x74\x8f\x74\xe2\x91\xef\x16\xfc\x35\x65\x0b\x4b\x96\x21\xc3\xc9\x0e\xdf\xb8\x64\x50\x20\xbd\x45\x0b\x26\x4d\x13\x8c\x66\x73\x88\x64\x56\x80\x8a\xbf\xdf\x82\x75\xd3\x66\x2a\x07\x46\xaf\x98\x78\xfe\x9e\x01\x36\xd5\x4d\x63\x2a\x39\x7f\xb7\xd7\x68\x73\x73\xd2\xfd\x18\x73\xc9\xbc\x5d\x82\x35\xbb\xce\x96\x81\xf4\xbd\xed\xd1\xba\xcb\xd3\x1b\x07\x14\x82\xf0\xd4\x61\xcf\x53\xe3\xa2\x63\x70\x18\xbf\xf4\xde\x7f\xc6\x70\x2f\xb6\x3b\x46\xc5\x2c\xca\x2c\xe5\xd9\xa2\x20\x5a\x8e\x9d\x04\x8e\x7d\xe6\xa4\xed\x9d\x41\x38\x64\xba\x14\x3a\x21\x55\x52\x3f\x63\x6b\x10\xe5\x2c\x16\x02\x2a\x7e\x2d\x94\x6a\x5c\x33\x32\x55\x23\xfb\xff\xb9\xba\xb2\x34\xc5\x75\x25\xbd\x17\x5e\xea\xe5\x6e\x4a\x1e\xc0\x4e\x6c\xcb\xc7\x03\x14\xb9\xfa\xd6\x3f\x84\x4c\xf5\xed\xfe\x0e\x82\x24\x29\x12\xac\x50\x0c\xff\xf0\xc5\x01\x1a\x7d\x65\x85\x4c\x00\xe9\xcd\xaf\x80\x00\x0d\xfa\xf7\x65\xb9\xc6\x05\x30\x5b\xf2\xf6\xad\x3d\x96\xb8\x96\xe2\x6c\xe2\x3c\xda\x4e\x74\xfa\xc4\x07\x7e\xc4\x93\x68\x04\xcf\xd1\x3d\xf9\x1f\xc0\xb5\xdc\xdb\x10\xa1\x15\xa2\x90\x0a\x7b\x53\xb4\x54\xb0\xa9\x25\xfb\x45\x53\xe0\x80\xf2\xb7\x17\xb0\x94\x99\xf2\x54\x4d\x7a\x5f\x29\x6a\xf4\x0e\x26\x17\xee\x97\xad\x55\x75\x14\xe2\x60\x5c\x5a\x7d\x9d\x61\xe7\xb3\x0a\x88\x48\xcb\x21\x7e\x94\x4d\x4e\xb5\x4f\xd0\xd4\x19\x70\x6a\x95\xdb\xa4\xb6\x1d\xab\x7f\x11\x0e\x00\xc9\x8b\xdd\x84\x21\x26\x71\x70\xaa\x34\x15\xc6\x1b\x7c\xe2\x2e\xa2\xb9\x99\x80\x7e\x55\x62\x73\xcd\x59\xc6\x2d\xac\x3e\xfb\x52\x40\x5a\xe1\x00\x5f\x2e\x23\xcf\xc2\x92\x73\x9f\xd1\x74\x55\x83\x2d\x2a\x1d\xe0\x88\x79\x7d\xb8\xb7\xb4\xc5\xa4\x79\x1b\x0d\x74\x51\x65\xa2\x1a\xa8\x24\xa8\x86\xa4\x0a\x59\xaf\xc6\xc4\x7a\x5a\x13\xc0\xb8\x7a\x5d\x27\x6d\x78\x4d\x80\xdc\xef\xd6\xfc\xaa\x4b\x34\x82\x91\x9a\x34\xe3\x86\x61\x46\xf8\x05\xe2\x0a\xd0\x59\xcf\x02\x5b\xbb\x99\x89\x71\x17\x56\xea\xc1\x32\xc7\xb0\xf5\xd4\x04\xd6\xfd\xe8\x69\x6c\xb6\x68\x1b\xff\xf8\xb8\x00\x5a\xb9\x02\xdd\xa1\xa1\x27\xe2\x08\x7f\x34\xcf\x76\xff\x85\xa7\x98\xb4\x29\x1d\x46\xd9\x97\x37\xb2\xa9\x7c\x20\xb3\x7a\x73\x49\x1d\x79\xb5\x9f\x19\x7d\xfa\x40\xe6\x44\xea\x7a\x4f\x7f\x75\xa0\xfe\x3d\x2a\x00\xb6\x64\xed\x15\xb2\x57\xc9\x02\xf2\xd1\xc9\xef\x25\x38\xb8\xdd\x68\xb4\xbe\xfd\xba\xb6\x00\x9f\x43\x79\x3c\x0a\x6e\x34\x09\xe2\x34\xdd\x04\xb5\x4c\xfb\x99\x02\xb4\xd6\x7c\x1e\x39\x1a\x40\xbf\xbf\xd1\xaf\xdd\x02\xa2\xea\x4e\xac\x9c\x5b\xa2\x0d\xaf\x73\xd3\xcd\xe8\x77\xaf\xc0\x70\x61\x12\x38\x19\xb5\x5d\x31\xa9\x99\x1e\x55\x1f\xd1\x65\x86\x5c\xab\xcc\x0c\x21\x02\x15\x13\xde\xa3\x5f\x65\x6e\xae\xd8\xf4\xa5\x3f\xaf\xcb\xad\x3e\x02\x2d\xe2\xcd\x6c\xea\x52\x63\xc6\x20\x38\x5a\x7c\x67\x24\x5c\x59\x57\x0e\xda\x42\x65\xfb\xde\x42\xfa\x47\x5c\x71\x68\xe7\xc9\x01\x7a\xac\xf9\x46\x49\xb8\xfd\xca\x73\xbf\x05\x16\x09\x76\x32\xea\x90\xcc\xb0\x70\x9b\x54\x22\x94\xf4\x15\xcd\xdb\x53\x47\x93\xc3\x0a\xb5\x11\x74\x1a\x95\x5c\x2a\xc4\x83\xfa\xc5\x6d\xc1\xa1\xfe\x09\x8f\xbe\x67\x09\x4e\xb0\xb7\xa6\x73\xc6\x76\xf9\x8b\xf7\x88\xa6\x14\xfd\x1e\xcc\x6c\x2a\xd1\x14\x9a\x41\x24\xba\xa9\xc4\xbe\x49\xe0\x23\xb2\x55\x1e\x42\x02\x1e\x4f\x7d\x53\xfd\xb5\x49\xfe\xd4\x35\x92\x6c\xdb\x17\xa2\xe5\xfe\xea\x3b\xdb\x0d\x96\xf4\xc7\x85\x16\x34\x3d\x2f\x67\xe7\xb4\x3c\x5d\x41\x55\x46\xc7\xf1\xa9\x03\xb8\xb1\x01\xbe\x66\x64\xa4\xa9\x02\x0d\x80\x92\x3a\xc4\xb0\xa9\x51\xe1\xc8\x4b\x50\xb1\x4b\xb0\x35\x2f\xa7\xff\x1b\xce\xd5\x25\x32\x18\x49\xb1\xb7\x97\x26\x64\xa9\xa9\x30\x18\xf5\x64\xee\x6d\x2f\xbf\x12\x52\x50\x3d\x05\x71\x47\xa7\x1e\x29\xb5\xbe\x00\xda\xfa\x51\x02\xb5\xcf\xc4\x7a\x43\x5d\x72\x23\x9f\x84\xfb\xb8\xd4\x51\x75\xc0\x52\xd6\x5f\xf6\x45\x8c\x2c\xc2\x40\x19\x07\xed\x41\x70\xa9\x87\x3d\xb4\x6b\x43\x8c\x71\xea\x3d\x01\x0e\xde\xc1\x94\xaa\x23\xcb\x94\x42\x77\xb2\x1c\x50\x25\x4a\x58\xb9\xf4\x0c\xb6\xd5\x0f\xf7\x87\xcb\xfd\x64\x60\x30\x44\xfe\xcf\xf5\x8a\x41\xd6\x57\xc5\xa1\xf4\x70\x24\xa0\x08\x42\x1b\xa0\x89\xc7\xe4\xe6\xe8\x03\x2f\xa7\x3c\xc7\x20\x62\xbb\x1b\xc4\xdc\xd7\xad\x1a\x1c\xac\x67\xc4\x91\x7d\x0d\xdb\xb3\x1d\xe3\x08\x2b\xe0\x3d\x94\xc0\x0f\x51\x2b\x74\x1a\x66\x97\x3a\xad\xb7\xfa\x57\x49\x0b\xce\x2a\x22\x08\x5b\x38\xf5\xce\x61\xbc\x11\xd8\xe4\x50\x02\xe3\xc5\x57\xb2\xa4\x29\x62\x0c\xfe\xea\xfb\xe4\x28\xd4\x3b\x48\x2b\x2f\xdf\x28\x43\x78\xa3\x7e\xe4\x1e\x3d\x4c\x52\xbf\xaf\x0b\x72\xd2\xcc\x23\x8a\x8b\x5f\x0c\x97\xd4\x0a\xe0\x25\xc8\x93\xa2\x54\xf9\xda\xcf\x07\x1b\xf8\xd0\xd2\xbd\x5a\x40\x21\x93\x0d\x62\x9b\x35\xb5\xd6\xb2\x0d\x2b\xd2\x68\xaf\x72\x0b\xbb\x58\x35\xe5\x3b\xbe\xd2\xc5\x4b\x47\xfb\xd2\x79\xd8\x6b\x7f\x6f\x8e\x5e\x12\xca\xca\x9c\x62\xe4\x17\xec\x9e\xb0\xbf\xf5\xb9\x55\x51\x90\x4c\x92\xca\x21\x90\xef\x12\xd4\x5c\x95\x10\xbd\xbf\x21\x45\xf2\x96\x73\xbe\x44\x58\xc3\xa6\x16\x72\xee\xd4\x04\x80\xcb\x8f\x0a\x42\x31\x5e\xf7\xdb\xb7\xb7\xfb\x35\x4f\x8e\x91\xf9\x38\x7f\xd1\x2c\x70\xa7\x0d\x81\xbb\xb1\xc5\x7c\xb7\x24\xa8\x36\x59\x1f\xac\x33\xd1\x73\xc6\x02\x3c\x49\xc8\x4d\xc8\xa5\xd9\xc1\x6d\x0b\xf0\xc9\x23\x84\x2e\xa0\xcb\x8f\xf9\x4c\x5c\x7a\x8f\x73\x0f\xd9\x0d\x57\x89\xbd\x26\xdc\xb7\x6f\x8c\xe6\x78\x79\x99\xc7\xa9\xf6\x3b\x86\x33\xe5\x12\xd4\x4c\x39\xbf\xfb\x84\x53\x11\x40\x77\x93\xbe\xea\x64\x96\x38\xa2\x7a\x8f\xa6\x08\xce\xbb\x11\xa8\x5c\xab\xe0\xa4\x1c\xa3\x3a\x9c\x6c\x30\x72\xf1\x5a\x79\x95\x49\xbf\x6f\x59\x4c\x6d\x00\x7e\x46\x07\x23\x87\xd2\xbc\x54\x3b\x28\xf0\x79\x77\x8b\x95\xb9\xc5\xca\x9f\x67\x00\xf1\x95\xd4\xa9\x83\x7d\xfb\x9f\x9d\x4a\xa4\xef\x25\x48\x11\x28\xe7\x25\xf6\x24\x23\xde\x26\x5f\xcc\xe5\x63\x5d\xdd\x54\x39\xc2\x6f\x1c\xc4\xc5\xf0\x05\x01\x2f\x62\xab\x1d\x2c\x5e\xe5\x5b\xb6\x32\xfa\x91\xb3\xfa\xa1\x25\x1a\x59\xa7\x49\x43\x70\xf2\x7a\x14\x6f\xcf\x29\x5c\x92\x09\x5b\xc6\x6d\x8c\x94\x30\x12\xd9\x2a\xf2\xa6\xe4\x37\xad\xc1\xc6\x22\x8d\xb9\x97\x61\xa5\x17\x5e\xe9\x52\x09\x88\x51\x5b\x77\xfb\x9e\x98\xf0\x27\xc0\xd1\x8b\xfa\x37\x7d\xb5\x3c\x17\xe4\x36\xed\x44\x10\x3c\x91\x5c\x92\x4f\x80\xf3\xd8\x10\xc4\xaa\xc0\x6d\x00\x2e\xe9\x90\x4c\x8c\x9d\x99\xe5\xe3\x9e\x00\x62\x70\x7f\x83\x1a\xc8\x2e\x17\x47\xda\x5f\x9d\x84\x83\x0d\xa7\x8d\x9c\x6c\xec\x7d\x1f\x0d\x5d\xb8\x13\x61\xcf\xab\x32\x6b\xda\x61\xae\xb0\xc8\x66\x1e\x8e\x87\xad\xe8\x1c\x96\x17\xba\xc2\xcb\xe5\xac\x76\xf6\x76\x5e\x80\x55\x2b\x32\x34\xb5\xf7\x31\x55\x8c\x1d\xa2\x61\x04\xca\x68\x8c\x0f\x94\xfe\xad\x40\xae\xf8\x71\xdf\x3e\x6d\x2c\x59\x12\xb8\xc1\x92\x68\x38\xcf\x55\x42\x4c\x9a\xc4\x69\xbe\xa2\x4b\x2f\x34\x9c\x61\xa9\x1b\x2d\x87\x34\x29\x6c\x9a\x05\xa9\x0f\xe9\x93\x34\x9d\x33\x86\x0c\xee\x9b\x0a\xa0\xc7\x39\xd6\x06\x99\xf7\xba\xcc\x9c\xb5\xaf\x8f\x5c\x2e\x0f\x8f\x4f\xda\x18\x9c\x96\x65\x76\x3f\x63\xdc\xcc\x53\xdb\x57\xf9\x5d\xd1\xc1\x53\xb1\x16\xfa\x72\x92\x2f\x8c\x6c\x40\xd3\x35\x37\x35\xca\xb7\x13\x57\x1b\x8d\xdc\x74\x05\xf1\x50\xf5\xe1\x7e\x84\xcd\x99\x98\xa5\x96\xf2\xc3\xdb\x5a\x4a\xb9\xeb\x77\xb8\x94\xc3\x3c\x6d\x82\x03\x62\xcb\xdf\xcf\x90\x49\x9a\x47\x7b\x71\xcd\xde\x67\xcf\xcf\x92\x76\x35\xe8\x9e\x8b\xf6\x4f\x39\xcc\xd5\x08\x86\xd1\x69\xb5\x7f\x8e\x6a\x31\x8e\xf5\x52\xd7\x4c\x1e\x7b\x97\xb2\x91\x87\x71\xee\x6a\x2a\x7b\x87\x54\x9c\x4e\x54\x28\xaa\xda\xe4\x55\xbd\xb6\x9b\x9c\x87\x97\x36\xde\xff\x7d\x0c\xb3\xd5\x3b\x00\xcf\x46\xea\x1a\x31\xd7\x7d\xf6\x28\x46\x93\x06\x78\xda\xd3\xd2\xde\x7a\xd4\x1a\x9f\xb2\x50\x56\x2b\xbb\xf7\x15\xb9\x71\x86\x03\x4d\x6b\xfa\x91\xc1\x1c\xbf\xba\x2a\xbf\xf2\x8d\xe8\xbf\xe6\x0a\x3f\xd4\xe1\xd0\x5a\x6e\x99\xbe\x5a\x35\x9f\x23\xc7\x4d\xc7\xfb\x64\x36\x49\xb3\x55\xbc\x0d\x26\x64\xee\xd0\x8e\xdb\x31\x98\x28\xd9\x90\x75\xdd\x04\xa0\xa5\x41\x8b\x42\x90\x60\x36\xf5\x0c\x86\xbf\xe6\x7f\x37\x07\x5f\x5e\xb9\x54\xc7\x0d\x6d\xfd\x81\x43\xe2\x72\x53\x2e\x8c\x97\x9d\xd7\xf0\xd6\x8c\x42\x60\x1f\xa5\x6a\xc3\x97\x4f\xcf\xa9\xbe\x79\xcf\x5a\x52\x83\x22\xcc\x0a\x76\xa8\xfb\xf0\x0a\xc5\xeb\x45\x8b\x2d\x94\xd8\xff\x9f\x23\xf9\x7f\x92\x92\xd3\xe0\x9a\xaa\x9a\xda\x96\x40\xdd\x87\xbe\xe6\x16\xa0\x01\x78\x6a\x54\xb1\x23\x66\x52\xca\x46\xaf\x95\x12\x4f\xaa\xf0\xb4\x82\x08\x95\xd0\x28\xb8\x09\x8b\x96\x85\xd1\x5e\xa5\x4a\xed\x87\xe0\x5e\xda\xda\x90\x3e\x54\xe5\x0e\xcd\xb7\xb2\x35\xb3\x12\xd9\xae\x9b\x62\x04\x0d\x5d\xb2\x5d\xb0\xd7\xfc\x34\xfe\x55\x7d\x36\x95\x75\xd7\x0c\xca\xd0\x43\xd2\x2f\xbc\xe6\x00\x56\x66\xbf\x20\xe0\x5a\x30\x62\x48\x9f\xda\xfc\x78\xc0\x12\x51\xfd\x29\x5a\xc2\xba\x90\x99\x5e\xa6\x4d\xee\xe5\x80\x69\xbc\x37\x41\xa7\xc2\x09\xe5\x0c\x60\x8d\x0a\xb3\xe3\x97\xeb\xe6\xf0\xb6\xd5\x52\x1c\xc8\x73\x3f\x3a\x35\x51\xd3\xf6\xbd\x5b\x6b\xa4\xad\xa8\xad\x82\x84\xc9\x69\x65\x15\xe4\x6b\xd2\x6e\x69\x48\x9b\xdd\xa8\xca\x39\x04\x46\xd4\x64\x03\x9c\x76\xee\x0c\xb6\xd3\xf0\xf3\x73\xdd\x15\xdf\xce\xe5\xd2\xf0\x38\x46\xe6\xa6\xfb\x1b\x2d\x6b\x56\x34\x6f\x6e\x51\x5e\x4c\x6f\x9d\x9b\x1c\x15\xb8\x73\x12\x79\xe3\x8d\x35\xcd\xb4\xb2\xb9\xbe\x55\x84\xc1\x91\x4b\x25\xe5\x66\xc8\x1a\x67\x26\xdc\x54\x9d\x0e\xc2\x23\x56\x07\xa6\x3b\xd3\x20\x58\xf2\x1a\xe2\x80\x4f\xa3\x02\xd1\xe9\x1b\x8b\xf3\x39\xfd\x1b\x82\xb4\x33\x9f\xd3\x5b\xda\xff\xc6\x22\x5d\x57\xc3\x97\x55\xe9\xe4\x73\x7a\xd2\x1e\x9d\xaa\x25\xcd\x53\xf8\xf9\x67\x8f\xff\xfe\x28\xa2\xcb\x31\xd6\xc5\x34\xcd\x26\x16\x22\xd2\x3c\xae\x9c\x23\xd3\x44\x45\xaa\xe8\x51\x2e\x8c\x6e\x18\x6f\x55\x21\xd6\x60\xb4\x90\x80\x05\x7a\xa1\x9a\xc0\x7d\x9b\xd4\x54\x49\x1f\x27\x8f\x54\x23\xaf\x26\x0d\xf6\xf9\x8e\xc1\xe3\xf6\x31\x91\x87\xc4\x5c\x23\x99\x60\xea\xea\x0b\xc7\x72\xf9\x21\xf2\x55\x63\x58\xe7\x32\xb9\x4e\x43\x1d\xc2\xf8\x0d\x36\x3d\x1c\x51\x5e\x42\xac\x05\x6b\x4f\x2d\x11\x5b\xb6\x71\x6c\xa0\x39\xa5\xba\x63\x36\x86\xe6\xe1\x5a\x5e\x91\x20\x98\xf7\x07\x80\x7a\x5b\x88\x2b\x74\xbf\x72\xc9\xb5\xd1\xed\x62\x07\xa5\x65\x97\xe4\x9c\xa8\xee\x53\x52\xc9\xeb\x60\xd5\x25\x75\x97\x16\xfe\x24\x2a\x3b\xf9\xe3\x01\x60\x5d\x46\x33\x1f\xfa\x47\x35\xd5\x7a\xf5\xd3\x97\xee\x5c\x39\x7a\x0d\xc7\x2a\x5f\xf9\xea\xcb\xed\x16\x44\xbf\x68\x9e\xb0\x64\x8e\x6c\x2d\xd4\x99\xd7\x3c\x7d\x38\x9a\xba\xb1\x13\xc7\xdf\xea\x17\xef\x31\x81\x3f\x0c\x00\x83\x8e\x6c\x95\xc3\x84\xae\xae\xd0\x8d\xbd\x10\x07\xec\x9d\x85\xa3\xe1\xf1\x09\xae\x97\x82\xfd\x34\xda\x61\x6e\xea\xef\x47\xb6\x16\xc3\x4f\xb6\xf3\xc4\xcf\x38\x7b\x02\x3c\x7e\x95\xca\x8c\x4a\x73\x15\xb0\x18\xaa\x82\x51\x09\x4f\x4c\xd9\x06\xff\xfa\x00\xb4\x4c\x5e\x83\xf4\xd9\x7b\xac\x99\x2a\x8b\x79\x3b\x9a\xcd\x9a\xd4\xe5\x38\x47\x77\x85\xd3\xaf\x20\xdf\x6a\x88\x30\x85\xed\xde\x1d\xa8\xc3\x41\x73\xf5\xfb\x18\x0e\x1a\x6b\xed\xf8\x83\xe9\xec\x5e\xf0\x3d\x3c\xba\x5a\x20\x96\x63\xe4\x85\xe6\x4b\x5c\x9c\x55\x2d\x13\x50\x8a\xda\xc8\x09\x3d\x03\x55\x30\x53\xf8\x90\x0c\x62\x61\x35\x25\x3d\x72\xda\x97\x3a\xcf\x12\x42\x46\xa1\x01\xe9\x89\xe3\xbe\xb4\x92\x4a\xba\x94\xf8\x64\xbf\x96\x98\x4c\xa7\x80\xb0\x38\xbd\x7e\x4b\xba\xe9\x1d\x22\x0d\xef\x5e\xe4\xb7\x12\xd8\x9e\xb5\x4b\x54\xc5\x57\xa2\xcd\x97\xab\x06\xab\xfc\x1b\x7c\xc6\xfe\x63\xfc\x74\x78\xa6\x7b\x94\x78\x26\x76\x36\x2a\x04\x71\x45\x89\xb4\x71\x4d\xba\x9f\x0d\x74\x99\x16\xaf\x47\x0f\x2b\x92\x89\xdc\xa1\x50\x42\x61\x8b\x9b\x81\xba\x77\x6b\x71\x2d\x34\x7a\xd9\xab\xc4\xdf\xee\x1e\x77\xab\x0e\x3f\x28\xdf\xfa\xe2\xb9\xa7\x5d\xb7\x8c\x1c\x2e\xdc\xe8\x80\x8f\x67\xff\x77\xff\x3b\xff\x48\xcc\x65\xb5\xff\x1b\xe6\x65\x51\xa2\x57\x60\xec\xca\x3a\xa1\xd3\x1e\x68\x05\x74\xa2\x48\xe8\x47\x43\xb6\x45\x06\x76\x33\x4e\xf3\x08\xb3\xcf\x3e\xbf\x4c\x58\x46\x02\xab\x68\x9b\x03\x9b\x53\x2a\xa5\xcf\xe2\x2a\x12\x34\x31\xb3\x80\x5a\xf4\x63\x71\x81\xef\x41\x0f\x90\x36\xe0\xcd\x0a\xcc\xba\x74\x2d\x43\x7d\x13\x83\xce\xff\xcc\xc3\x46\x3f\x48\xe2\x6c\x86\x60\x7f\x0d\x20\xac\x84\x6f\x06\x5a\xc2\x0d\x17\x57\xe4\x2e\x63\xb6\xa0\xff\xa4\x98\x58\xf9\xca\x03\xfa\xac\x8d\xb6\x49\xd9\x2e\xa9\xfb\x72\x2d\xdb\xd8\x23\x69\x28\x37\xe2\x87\x92\xcd\x10\x9c\xb1\x09\xd1\x33\xf9\x54\x14\xd3\x56\xe8\xd0\x7f\x04\xa5\xa0\x65\xad\xbc\x6c\xea\xc3\x05\x61\xfc\xeb\x46\xe6\x3e\xa8\xca\xd9\xad\x36\x40\xf7\xf3\xc4\xaf\x73\x37\x45\xe4\x20\x7a\xba\x7c\xa9\x2a\x0e\xca\x39\x6c\xa7\x8d\x89\xd9\x6e\xf2\x3c\xc1\x8c\x3b\xe4\x64\x91\xf9\xad\xf1\x85\xe5\xf3\x90\x6b\x49\x89\x22\x5e\x34\x55\x33\xbb\x7c\x3a\xaa\x39\xdd\x1e\xd4\x89\xf9\x8e\x17\xf9\x21\xfd\xbd\xf6\x5c\x10\x45\x53\xa5\x9f\x1f\x1a\x63\xdc\xbe\xd9\x7a\x0c\x35\x25\x63\xe1\x5c\xe2\x31\x29\xd5\xba\xc6\x7e\x3e\x2c\x7d\x67\xea\xd5\x4e\xc1\x48\xdf\xdf\xd7\x4b\x29\x93\x28\x1b\xfa\x02\xaf\xba\xb3\xbc\xe9\xe8\xec\xb8\x57\x17\x33\xf7\x4b\xdd\x1a\x87\x6b\x05\xc2\x03\xc5\x63\x23\x90\x4c\x11\xd5\x8b\xfe\xaa\x6f\xdd\x69\x09\xf4\x8b\xe3\x6b\x3e\x86\x8b\x1c\xcc\x41\xfb\x8d\x70\xd1\xad\x37\x76\x20\xc9\x2b\x90\x23\xad\xdd\x22\xa0\x89\x3b\xdf\x7a\x98\x72\xca\xe4\x53\x4b\x26\x31\x31\x72\x97\xeb\xf6\x29\xc6\x8c\x54\xa6\x83\x3a\xb6\x55\x22\xa8\x6d\xb3\xae\xcd\x55\x76\x11\x0f\x4a\xfb\x8f\x84\xf8\x85\x40\x38\xae\x48\xa7\xe7\x37\xf0\xab\xb6\xfd\xda\x3e\x44\xc1\x4b\x14\x61\x30\x79\x3a\xa7\xda\x47\x9f\xab\x61\x1d\x81\x19\xba\xae\xb4\xf0\xc1\xb3\xe8\x7e\x34\xfc\x9c\xec\xd3\x7f\x40\x41\x64\x8e\x31\xf1\x8a\x0b\x26\x52\x3c\xb4\x97\x4e\xa7\x76\x5b\xff\x73\xda\xa3\x05\xea\x49\x39\x86\xee\x9c\x9d\x74\x8e\x30\x71\x04\xcf\xda\x03\xf3\x39\x9b\x98\xa4\x6a\x48\x45\xad\xf0\x3f\x63\x9d\x58\xcd\x3e\x01\xe7\x74\xd9\xff\x1f\x76\x2d\xdf\xcc\x66\x72\xee\x0e\x6c\xb6\x22\x92\xd0\x82\x37\x9a\x44\xf4\x31\xf9\xc5\x77\x58\x52\x4d\x09\xe0\x9d\xc6\x89\x80\x9c\xa8\x19\x23\xa5\x40\x70\xe9\x9e\x71\x5e\xe0\x57\x55\x02\x4f\xca\x65\xee\xd1\xcc\x80\x3f\xde\x7e\x0b\x9f\xbc\xa8\x11\xc6\xdf\x54\x25\xff\x89\x58\xd7\x58\x0e\x00\x93\xea\xc9\x32\xbc\x93\x69\x15\x12\x10\xf3\x29\xcb\x7c\x39\xc0\x27\xb5\x5c\xad\xa6\xf7\x3e\x7b\x79\xe5\xc7\x66\x78\x8d\x17\xc0\xc8\xde\xc8\x51\x58\x5b\x22\x32\x76\x4a\x95\x2b\xc2\x1d\x7c\x9b\x7b\x1d\x93\x3d\x9c\xba\x35\xa7\xf4\xb7\x44\x77\x6e\x00\xa2\x7c\x07\xd6\xa6\xb1\x16\x6f\x09\xac\xd2\x1f\x98\x68\xd1\x93\xee\x76\x8e\x7a\xa9\x35\x08\x2e\x97\x61\xe0\x09\xa0\x19\x95\xb7\x62\xcd\xdf\xe4\xb7\x59\x9b\x87\xe7\x22\xd6\xd5\xcd\xec\xa7\x8a\x67\x2a\x1f\xb9\x91\x6c\x30\x4d\xb4\xd5\x86\x6b\xa5\xc0\xb5\x6a\x53\x98\xa4\xb9\x97\x4b\x4f\xe7\x69\x0d\x32\x72\x90\x68\xed\x53\x1e\x03\x8d\x2b\xc1\x17\x1e\x0e\x2a\xef\xda\x18\x9e\xf5\xa9\x5c\xd6\xb9\x9b\x36\x6b\xd1\x56\xfd\x3d\x16\x3e\xb8\x2d\x5f\xb8\xde\x4d\x9a\xfa\xa0\x63\x58\x87\xda\x12\x09\x15\xaa\x2f\x8b\x16\xd7\x3e\x57\xdb\x90\x9a\xd1\x71\x6f\x4a\xd6\x64\x80\xac\xa4\x7f\x13\xa3\x70\x95\xd7\xa0\x65\x79\xdb\x06\xdb\xe6\x66\x77\x2c\x27\xa6\x3a\x83\xe7\x5e\x1a\x9c\xd0\xe1\xeb\x05\xc2\x9b\xdb\xf9\xdc\x04\xc6\xa2\x1a\xb3\x3b\x8e\x5f\xae\x1f\x73\x00\x77\xca\x77\xd5\x9f\x3e\xdf\x25\xe1\x09\x85\x21\x6e\xfe\x72\x30\x7c\x42\x06\x7c\x03\xb4\xfc\x2a\xaf\xa3\x97\xce\xb0\xbf\x89\xfa\x9e\x77\x15\x87\x0f\x0d\xa7\x09\x36\x08\xaf\x6f\xc0\x9d\xa6\x00\x7c\xb7\x97\x6c\x3c\xf8\x5d\x9e\xfd\xf7\xa8\x39\x25\x8c\x75\x59\x17\xf6\x2d\x87\xa9\x6d\x2c\xe5\xf8\x75\x4e\xee\x68\xe6\xcd\x1f\x1b\x27\x42\x97\x34\x74\x07\xd5\x6a\xe1\x5e\x1a\x19\x00\xd6\xce\x57\x7b\xf9\x37\xa2\x51\x1b\xc0\x85\x56\x4e\x35\xed\x64\x7c\xb1\x8c\x21\x3d\x10\x22\x40\x4b\x7b\xe6\x2b\x65\x08\x51\xa2\x26\x06\x23\x65\xaf\x94\x42\xdb\xed\xfa\x3a\x69\x4c\xe4\xb3\xd9\xbd\x3e\xea\x2e\x1d\x1d\x98\x7d\xd8\xd9\x9d\x26\x84\xdc\x68\x9d\x21\xd8\x12\x41\xe4\x43\xc4\x70\xdd\xa8\x70\xab\x9d\x75\x12\x53\x77\xae\x4e\x45\x4b\x12\xe2\x1d\x51\xbe\x6b\xde\xe4\x92\x31\xec\x9c\x19\x96\xf3\xad\x6e\x8e\x63\x10\x8f\x20\x5c\x00\xd8\x05\x85\x79\x0d\x37\x58\xf5\x39\xa4\x64\x8e\x6a\xe3\x92\x9d\xaa\xa5\xbe\x60\xcc\x84\x05\xe8\xfe\x15\x37\x1e\x29\x6f\x2f\x42\x74\x9b\xfd\xd7\xef\x61\x63\x9a\xe4\x6b\x65\xb2\x95\xb4\x24\x6e\xff\xbb\x6c\x2f\xab\x40\x21\x55\x1c\x32\x58\x01\x6e\x3c\x51\xbf\x8a\xff\x24\xa2\xb4\x39\x65\x6a\x85\x6e\x4a\xb0\x67\xa8\x1b\xbb\x83\x2e\x71\x26\x00\x23\x2a\x9f\x15\xf3\x4d\x7e\xcc\xcf\xde\x34\xeb\x9f\xfc\xc9\xb6\x53\x7a\xc1\x3a\xb6\xb9\x5c\x61\xb1\x61\x6f\xd6\xd8\x39\xeb\xe4\x59\x92\x3b\x5a\x41\x79\x48\x6a\x10\xf7\x34\xfb\x65\x18\x5d\xf5\xcc\xe9\x62\x9d\x96\x2f\xc5\x43\x4d\x0f\xd1\x4b\x26\xfb\x98\x2f\xa3\x55\x69\x21\xe9\xa0\x00\x39\x86\x7d\xac\x90\x85\xe3\x89\xb0\x19\xac\x6a\xe5\xcd\x9e\x95\xb3\xab\x31\xf7\xe0\xcc\x80\x52\x27\x34\x09\x8f\xd2\xae\x1e\x2f\xad\xee\x38\x95\x5c\x34\xec\x5a\xf3\xf2\x63\x8f\x2b\x1c\x0f\x4e\x92\x39\x66\x8f\x16\x6b\xda\xba\x90\x74\x68\x2a\x91\xa3\x99\x84\xe9\x9a\xc2\x12\x4a\xc2\xf2\x80\x23\xc9\xf3\x3b\x91\xd5\xa0\xb2\xcc\x06\x51\xae\x9b\x4b\x46\x02\xbe\xf4\xe6\x6b\x99\x3e\xb9\xd5\x52\xe5\x8d\xfa\x81\x68\xd6\x1b\x31\xcf\xea\x54\x22\xc1\xb6\xef\x55\x89\x63\xfc\x46\x5e\xc0\x0f\xf1\x22\xc3\x40\x4c\xc7\xc7\xe5\xa6\x54\x2a\xb7\x53\xa3\xf1\x63\xb3\x11\x6c\xb8\x92\x2a\x3b\x3a\x34\x68\xc2\xc6\x2c\xf1\x8e\xd7\x7b\xcd\xfa\x39\x50\x55\x66\x65\x8e\x96\xd3\xf4\xa3\xd4\x8d\xce\xb8\x94\xcb\x3f\x45\x00\xa2\xda\x68\xb4\xfb\xc9\x38\x7e\x79\xdc\xd4\xbf\xa5\xc0\x78\x13\xdc\x11\x6e\x97\x3c\x1c\x72\x18\x4c\x46\xed\x66\x0c\x7e\x00\x80\x25\xda\xe5\x4b\xfe\x5c\x43\x47\x9a\xa4\xaf\x98\x14\xf0\x44\xf3\x5a\x4a\xeb\x90\xe9\xb8\xac\x46\xdf\x3e\xb8\x78\xb6\xf0\x36\x1b\xa7\x54\xb1\x61\x6b\x8f\xba\x25\x49\xbf\x72\x72\x53\xad\xeb\xd4\x52\xdb\x62\x1a\x35\x86\x6e\x3f\x99\x7d\xc1\x40\xa4\x1b\x83\x76\x9b\xba\xff\xe2\x87\x07\x84\xf1\xa7\xb7\xaf\x4b\x10\xfc\x58\xe1\x7d\x1e\xa5\x38\xf0\xc4\x61\x85\x5c\x71\x0c\x5a\xd7\xfe\xbe\x9d\xa3\xc0\x45\xbf\x7d\x23\xcb\x25\x34\x63\x2c\x18\x66\x6c\x10\x1b\x6b\x3e\x01\xca\xc6\xc3\xc5\x60\x6d\x61\xff\x23\xec\x59\x70\x10\x65\xba\x4f\xd6\xa6\x48\x6d\x30\x00\xcf\x2a\x2c\x28\x9c\x63\x72\x6e\x44\x85\x88\x90\x1c\xbc\xb9\x9a\xb8\x7a\x9c\x25\xee\x73\x08\x4b\xfc\x1d\x9c\xbf\x04\x15\xb0\x76\x58\x9b\x20\xfa\xaf\xa9\x7c\x09\xc5\x2f\x49\xc0\xa6\xa9\xfd\x92\x5d\x85\x88\xa6\x60\x71\x99\x23\xba\x06\x6e\xb1\x1b\x79\xd7\xc9\xdf\xea\x2f\x72\x4b\x5e\xef\x0b\xca\x0f\xaa\x84\x18\x54\xf9\x06\x97\x58\xa1\x42\x7f\xb5\xfb\x70\x90\x5b\x8f\xe6\x16\x67\xa9\xaa\x54\x78\xd9\xc7\x75\x75\x80\x6b\xe6\x2c\x06\x0a\xab\xbe\xb8\x73\x40\xe5\x92\x65\xde\x25\x28\x17\x3a\xc4\x10\xe9\xae\x56\x41\x7c\x62\xae\x13\x1d\x8c\xfe\xa2\x13\x88\xa4\x49\xd6\x65\xb2\xc1\xb1\x81\x7e\xd9\x6e\x76\x15\xaa\x82\x80\xeb\xd9\x84\xe4\x16\x81\xf0\x36\xc8\xe9\x75\x20\xa1\x18\x68\x07\xbd\x93\x0c\x05\xc4\x87\x57\x74\x26\xbd\xd5\x9e\x70\xa7\xa5\xf4\xf9\x10\x1b\xe3\x7b\x9f\xf3\x05\xa4\x0c\xa7\xe7\x98\xac\x4d\x01\xd7\xaa\x42\x6a\x8c\xce\x95\xe1\x52\x3e\xf0\x3a\x14\xc4\x68\x07\x45\x62\x95\x41\xc0\x5a\xd1\x64\x38\xe7\x48\x57\xe8\x2e\x68\x6c\x00\x84\xfc\x8d\xc2\xdd\xe6\x40\x94\x57\xe5\x57\xb4\x7d\x55\x65\xab\x87\xa4\xeb\xf5\xe1\x63\x26\xe4\x67\x19\xf6\xa7\x50\x8e\xed\xb6\x52\x8b\xd8\x55\xa7\xcb\xbb\x3e\x0b\x58\xd8\x8a\x4b\x2c\x37\x9a\x48\x71\xaa\xc2\x79\xbf\x45\x77\xa2\xe4\x37\x15\xb5\x5b\x22\x21\x87\x21\x2d\x2d\x38\x3c\x37\xc3\xec\x34\x8e\x00\xe3\xf2\x03\xe3\x8b\x73\xd5\x75\x33\x46\x62\xf3\x17\xf1\xa8\x7c\x5c\xf3\xd2\xeb\x2a\xc6\xee\x54\x4a\x33\x55\x01\xe2\x66\x84\x66\x85\x2d\xcf\xa7\xc7\x68\x55\x04\x8d\x95\xed\x1b\xc1\xb8\x0f\xfc\x40\xe0\x41\xa9\xdd\x77\xfb\x5f\xb8\x30\x45\x4b\x65\xb2\x96\xf1\x05\xa6\x43\xab\x6e\xb6\x40\x87\x8a\x54\xc8\x1a\x31\x9c\xa3\x80\xae\xf5\x42\x8a\xd9\x4a\xc9\x2d\x59\xfd\x42\x62\x2a\x9a\x68\xa8\xbb\x44\x7c\x4b\x9f\x60\x56\x8e\x9e\x99\x8d\x92\x97\x41\x8e\x63\x34\x01\x60\xab\x8a\xeb\x90\x2e\x30\xdc\x1d\x9b\xc0\x54\x18\x61\xae\xb7\xb1\xeb\xaa\xfb\xc9\x3b\x20\x05\xc8\x6a\x34\xdf\x0d\xd8\x67\x25\x5e\xde\x54\x0a\xfc\x4a\xed\x2b\x76\x83\xa2\x75\x74\x74\x74\xc7\x9c\xb9\x52\x00\x3d\x2f\xb8\x4c\xe2\xb0\x82\x46\xb9\xd7\xb0\xcc\x35\x09\x02\xf6\x9c\x94\xec\x3f\xcb\x76\x56\x79\x86\xb4\x1e\xc8\x5b\x2e\xb7\x34\xb8\x34\x7e\x39\x72\x5b\x9a\xa6\x7e\x24\xe3\x9e\xa7\xaf\x06\xcf\x76\xfa\x4d\x13\x55\x03\xc5\xd4\x80\xaa\x4b\xf3\xca\xc3\xb3\x2c\x00\x8d\xbd\x5c\x19\xd9\xdf\x35\xd5\xf1\x65\x01\xb5\x6c\xba\x87\xf1\xce\xa4\xb6\x22\x94\x60\x79\x9f\x5c\xaf\xc8\xb2\xee\xc1\xed\xbd\xf7\x25\x19\xe6\xb6\x81\x95\x83\xd5\x9d\x08\x66\x5c\x9c\x16\x76\x23\x8a\x6c\x67\x45\x55\xeb\x27\x94\x67\x6e\xb5\x7e\xd6\xb5\x5f\x2f\x28\x61\x6f\x2c\xf9\xbf\xad\x6e\x69\xe7\x50\x30\xcc\x48\x06\xbb\xca\x30\x26\x34\xc8\xb3\xf4\x52\x14\xf3\x22\x87\x5d\xa5\xbe\xc4\xa6\x3a\x20\x43\xb5\x8f\xb8\x59\x6d\x8e\xe4\x1e\x24\xe5\x01\xc2\xf2\xa9\x41\x02\xcc\x8d\x54\x2e\x5c\x37\x84\xd3\x0a\xfc\xee\x47\x12\xcf\x04\xde\xe8\x1f\xfe\x15\xbd\xfe\x66\x83\x40\xb7\x96\xe1\xaa\xb9\x5a\x05\xe7\x35\x56\xce\x97\x1b\xad\x25\xb3\x09\x4c\x0d\x32\x43\x35\x31\x6c\x3d\x6f\x2f\x23\x38\x95\x5a\xf8\xcf\xc2\x11\x9a\xf1\x91\xad\xa0\x95\x12\x2b\xc8\x2f\xb6\x01\x5f\xcc\x1d\xbf\x29\xc0\x1d\x8d\x78\x28\x9f\x66\xb4\xaa\x78\x39\x70\xb7\xf4\xf6\x91\xbb\xf0\xb3\x5b\x0f\x9b\xa6\x0a\xbb\xf1\x61\x4b\x20\x84\xb0\x49\x5b\xcc\xb3\xe5\x64\x1e\xbf\x05\x90\xf4\x59\x4b\x63\x52\xce\xe2\x47\xc4\x8b\x42\xa9\x0c\x1d\xeb\xf1\x9a\x0a\xe9\x81\xfe\x56\x65\xcc\x76\x0f\xb2\x5f\x31\x9e\xec\x6b\xad\x87\x5a\xfb\xb8\x76\x16\xcd\x8b\xb0\x66\xe1\x91\xd5\x54\xce\x7f\x16\x99\x97\x2d\x25\xfc\x78\xb2\xbb\x64\x41\x27\x90\xf2\x5f\x70\x1e\xb2\x22\x5d\x7a\x94\x2d\xeb\xb4\xc9\x6d\xc6\x99\xa3\xe8\x09\x1a\xe4\xfd\x18\xd0\xc9\x6e\x12\x26\xf0\x1b\x79\x7d\xd3\xd0\xc7\xd3\x9f\x12\xf1\x46\x51\xea\x9d\x40\x97\x13\xed\x3a\x98\x20\x35\xa7\x07\x3d\x51\xf3\xd8\xa7\x27\xf8\x65\x48\xd5\xd6\xee\x61\x3e\xf5\x7d\xb3\x35\xdc\x08\x64\x0f\xb7\xe2\x8d\xf5\xb6\x5d\xa0\x4a\xe9\x61\xee\x72\xff\x77\x48\x95\x9b\x5c\x0e\xb4\x21\x0e\x17\x1c\x6e\xeb\x45\x31\x01\x9e\xd0\xc6\x6c\x64\x68\x57\xb7\x1b\x5a\xae\xdb\xd8\xbd\x4b\xf1\x9c\x7a\xde\x44\xcf\xaa\x56\x7c\x49\xad\xf9\x0a\x0d\x7a\x6d\x76\x97\x4c\x04\x3b\x73\x80\x79\xf6\x4b\x8e\x43\xa7\x8b\x43\x47\xe2\xdd\x4d\xaf\x4e\x1f\xc4\x1c\x6b\x1b\xb7\x7b\x54\x83\x98\xc0\x5d\x98\x9a\xd9\x61\x4c\xc9\xad\xf4\x4b\xf5\x61\x1d\x36\xd8\x61\x1c\xa2\xde\x38\x27\x3f\xea\xf4\x0b\x72\xfc\x97\xdb\x02\x1f\x50\x54\x3f\x97\x81\x32\x41\xc1\xf8\x4f\xae\xb7\xe7\x6c\xc3\xbb\x33\x94\xa2\x0e\x9c\x43\x2b\xe2\x01\xee\xf0\xd8\xbe\xd1\x9f\x0c\x88\x55\xa7\x5d\x65\xd3\xa5\x23\x7d\xac\x23\xd6\x5f\x2e\xba\xd4\x77\x7c\x68\x68\xba\x9e\x3a\xa6\x20\x87\x2c\xb5\x74\xd0\x3e\x7c\x9a\x81\xf9\x16\xd2\xa8\x83\x2e\xae\xfd\xab\x40\x04\xf8\x4d\xf9\x94\x8e\xaa\xd8\x4c\x77\x71\x8a\x4a\x2a\xce\x43\x4b\xf2\x88\xfa\x87\x30\x53\x4d\x06\x83\xe4\xcb\xe9\x1d\xca\x01\x74\x94\xab\xdd\x2c\x8b\xba\x52\xa6\x39\x26\x4c\xc1\x33\xc4\xdf\xcd\x9f\x06\x29\xc6\x9b\xee\xb1\x09\x13\x42\x91\x22\x6e\x2c\xbb\x2f\x1a\x0c\x62\x12\xc4\xf1\xd2\x06\x7b\x84\x12\x39\xa0\x73\xa6\x84\x47\x57\x77\xb4\x38\x46\x39\x4d\x1e\xea\xbc\xff\xe4\x20\x6c\xfd\x08\x57\x80\x8c\x4f\xd5\xfa\x3e\x2e\xe7\xc5\xb8\xda\x9d\x81\x83\x92\xdc\x7a\x02\x3b\x8f\x8f\x8a\xd7\x8c\x7a\xc5\xb9\x5f\xde\x66\x5d\x8f\x50\xa7\x49\x81\x83\xdf\xc2\x24\x62\xe9\xd6\xe4\x63\x4f\xac\xd2\x98\xb8\x1e\x43\x25\x37\xdd\x4d\xd1\xbf\x83\x60\xa8\x9e\x17\x07\x06\x87\x8e\xb7\x25\x58\x8b\xe2\xe3\x68\xdf\x75\x31\x71\xe8\xa4\x3d\x13\xfe\x84\x15\x52\x90\xac\x20\xd7\x8d\x8d\xce\x42\x44\xd7\x8f\x97\xab\xa7\x49\xf2\x2f\xec\x6e\x41\x67\x0f\xd1\x36\x00\x76\x03\xf4\xd7\x06\x4b\x9a\xd4\x68\xd7\x84\x80\xff\x99\x17\xd6\x9d\x15\x48\x45\xc9\xda\x3d\x1c\xb3\x19\x76\xae\xbc\xd1\xb2\x49\x16\x95\x83\x34\x47\xb9\x5c\xa4\x92\x60\x06\xf5\x58\x27\xbd\x56\x59\x64\x03\x72\x0f\x78\xf4\x76\x30\x77\x95\x18\x69\xa7\x55\x8a\xfa\x35\x91\x31\xa5\xee\xd8\xd7\x74\x20\x68\xd3\x49\xd4\xa2\x10\x44\x7f\xe5\xc0\xa1\xa2\x62\x55\x1a\xf9\x18\x11\xff\xd5\x58\x40\xec\x95\xa4\x42\xa7\x2f\xf7\xd8\x3c\x30\xa6\x51\x99\x91\xb9\x28\xcc\xdc\x37\x3a\x44\xc7\x3b\x06\xc9\x15\x97\xd4\x80\xbb\x41\xbf\xcb\x6d\x76\xd3\xa1\x7b\x64\x69\x7e\xd8\xb8\xe3\x0c\xb4\x60\x59\xb4\x29\x78\x92\x79\xd6\xf9\x3b\x5a\x22\x67\xff\x22\x94\xac\x75\x30\xb5\x2f\x55\x4f\x17\xd2\xa9\xba\x9a\xc5\x46\x51\x4c\x48\x73\xec\xf4\xaa\xa2\xea\x3b\xfd\xe5\x92\x8e\x29\x96\x4c\x0c\x27\x3b\xcc\x3c\x2e\x08\xf8\xfd\x74\xe3\xba\xd4\xbc\xe3\x1c\x31\x41\xa5\x5c\xd6\x98\x59\x93\x8f\xf5\xfc\xfd\x9d\xdc\xa3\xd8\x00\x25\x34\x0d\x2a\x40\xb2\xf6\x44\x64\xac\x98\x9c\x0b\xc3\x5c\xc8\x53\x0b\x69\x66\x5f\x4c\x06\x31\xc7\x1b\x05\x0b\x89\x44\xb0\xec\xbb\xdf\xf7\xd5\x4f\x5b\x3e\xbc\x20\xad\x62\x13\xb0\x54\x4a\x12\xad\xe6\x8a\x2f\x16\x24\x2f\x19\x2f\x34\x27\x16\x67\xbf\xad\xf0\x92\x93\x38\xe4\x4f\x98\xd6\x6e\x26\x8e\x3f\x7b\xe9\x4c\x7c\xf3\x13\xb8\xa3\x73\xc2\xc3\x0f\xa8\xb3\x6d\xc2\xfa\x70\x2b\xdd\x61\xa5\x24\x04\x78\x1c\x84\xdc\xa8\x0b\xed\x61\xd4\x81\x68\xa4\x53\xd0\x9d\xe2\x9b\xb0\x7a\x43\x97\x63\xaf\xbb\xae\xd2\x31\xa5\xa1\x6e\x35\x92\xc9\xdc\x17\x56\x89\x2d\xf1\xc2\x44\xff\x48\x99\xa0\x1c\xdd\xe6\x2a\x8c\xc6\xc5\xa1\xa9\x47\x41\x67\xcb\x73\xc7\x6a\x39\xbe\xc6\x3e\x10\xbc\xbc\x19\xa3\x5b\x9b\x83\x12\x05\xe6\x0a\x8a\x59\x7a\xee\x64\x01\xb8\xb6\x52\xd5\xaa\x9a\x8e\x4a\xbd\x4f\x1f\x69\xaf\xc7\x40\x18\xa4\x06\xda\x31\xa0\x9a\x69\x0b\x68\x21\x6c\x22\xa4\x79\x62\x21\x6b\xf2\x8f\x55\xe1\x65\x92\x96\xb8\x0d\x21\x9a\xc4\x8d\x07\xf5\x70\x95\x71\xda\x2a\x98\x03\xed\x92\x86\x11\x05\xf0\x6b\x76\x50\x9b\xd5\x3c\x6f\x71\xd1\xbb\x33\x75\xb0\xea\x69\x2b\x8c\xe8\xd3\x05\x30\x0e\x52\x99\x89\xe7\xa5\x54\x70\xb9\xda\xbe\xe4\x38\x4f\xbb\x07\x49\x11\xc7\x33\x9e\x61\x3e\x85\x3b\x2f\xaf\x48\x2d\x7b\x41\x2f\x6e\x91\xc8\xde\xfe\x67\xc3\x64\x67\xaf\xe8\xce\xd4\x2d\x68\x25\x11\xad\xc7\x50\x7c\x64\x33\xc7\xe7\x6d\xb4\x4b\x46\xfe\xe5\x9c\xb0\x8e\xd0\x5f\x0a\x2d\x69\x2a\x96\xcb\xb5\x15\x7d\x70\x03\x1d\xd6\xe9\xd4\x0b\x94\x45\x9d\xfa\x47\x64\x10\xa5\x42\x3c\x8a\x3b\x3d\x72\x9b\x5a\x14\x95\x93\x36\x1b\xd6\x95\xff\x90\x09\xc9\x3e\x0a\xcb\x62\x41\x01\xb8\x14\x75\xcc\xe8\x3a\x1b\x57\x76\x26\xe3\xcb\x08\x00\x38\xf5\xaa\x06\xb4\x9c\xea\xb7\x4c\x53\x6a\x74\x28\xeb\x1c\x9b\xa0\xe4\xcd\x1a\x33\x7d\x14\xcb\x9e\xc2\xb5\x3c\x47\x8b\x75\x3e\x91\x09\xa8\x03\x04\xf2\xe6\xc3\x5c\x6a\x40\xa1\xa6\x51\x60\x10\x1d\xcd\xc6\x13\x68\x29\x35\xb4\xbb\xcd\xbf\x28\xa4\x6c\xd8\x7b\x1c\xaf\xbd\x09\x43\x50\xfc\xf3\xf1\xb9\x6a\x5e\x54\x72\x54\x43\x32\x6d\x8a\xdd\x8d\xd2\x06\x7c\xc0\x4b\x43\x1b\x90\x22\xb9\x3a\x01\xc3\x46\xbe\x3d\x89\x5d\xd6\x96\xda\x73\xc8\xe0\xb7\xa1\xf9\x80\x48\x6e\x81\xa9\x72\xd0\x10\xbc\x59\xa2\xea\xb2\x5c\xfc\x1f\x03\x47\x74\x11\x00\xf9\x5b\xfd\x8c\x12\x19\x79\x7f\xe2\x24\xf3\x74\x67\xd3\x57\x98\x1a\x2b\x6f\x03\x88\x10\x94\xdd\xb2\x7d\x9e\x90\xfb\xe5\x12\xe9\x6b\x34\xfb\xde\x3a\x82\x4b\x6a\x67\x07\x73\xcc\x58\xff\xb0\x61\xf0\x4e\x3a\xfe\xb2\xfd\x7d\x80\x46\x10\x0e\xa7\x5a\x01\x9a\xd2\x08\xc1\x32\x1f\x51\x7b\xa8\x52\x9d\x15\x1a\x5b\x4e\x26\x4d\x04\xf7\x6a\xb2\x34\x5d\x33\x51\x3a\xea\x0a\x73\x9c\xd7\xcb\xd7\x74\x1f\xfc\x87\xcb\x70\x49\xbf\x56\x5b\x11\x68\x9a\x6f\x81\x35\xb0\x7b\xe4\x7f\xe7\xf8\x3b\x87\x7f\x06\xad\x64\x0d\x97\xb7\x7f\x2e\xf2\xf1\x0a\xf1\x10\x23\x3b\x76\xc9\x45\x60\x5d\x87\x52\x30\x94\xeb\xd3\xcc\xec\x29\xc2\x7e\x9e\xca\xb9\x1c\xae\xe5\x7d\x5c\xc8\x9c\x20\xd8\xad\xfc\x3c\x8e\x5b\x00\xec\xcd\xa9\x91\x56\x5a\xc0\x02\x99\x1f\x5a\x3b\x75\xfc\xfd\x1d\x43\xf4\x72\x12\xf2\xf8\x2e\xe5\xf0\xbe\x9c\x2f\x33\xbb\xbc\x01\x01\xe4\x6e\x50\x6f\xeb\xc9\x56\x04\x2e\xf7\x8e\xc8\x49\x58\x89\x2b\x99\x9c\xa6\x41\x97\xd8\xf0\x87\x4a\x12\x8f\x72\xf1\xf3\xd2\x47\x03\x5c\xfb\x81\x5d\x47\x5c\xea\x67\x25\xdd\x6e\x63\x24\x83\x40\xe0\x4b\xbf\xd7\x82\x0f\xae\xf0\x8c\x44\xc5\x51\x57\x43\xc2\x65\x0b\xc3\x27\x22\x5f\x72\x46\x5a\xd2\x2f\x7d\x3f\xdd\xfc\xba\x59\x34\x47\x9e\x05\xcb\x68\x27\xad\xf6\xb4\x05\xef\x65\x86\x44\x8c\x4e\xf5\x6e\x25\x9a\x59\x47\x89\x4c\x88\x13\x91\x88\x00\x25\x13\x86\x0a\x9f\x6a\x6e\x90\x7e\x11\x8a\xa6\x49\x17\x92\xb9\x0e\x39\xd0\x95\x00\x2b\x49\x8d\x12\x2b\x2b\xd0\x9f\xd6\x8d\xfd\xf4\x08\xb4\xc3\xed\x7f\x5f\xae\x71\xda\x49\xa7\x5b\xb1\x32\xcd\x77\x33\x65\x74\xd8\x7f\x0f\xf5\x04\x00\xb1\xc3\x6d\xf5\xf2\x2f\xa7\x67\xb5\xf4\xa3\x26\xae\x9b\xec\x21\x54\xa9\x83\x07\x33\xb9\xa4\x42\xaf\x1c\x50\xc0\x64\xa8\xed\x32\x90\xe1\xb5\x68\x57\xfd\x3d\x4c\x72\x51\x5f\xe6\x76\xe1\x79\xfa\x58\xa3\x0a\x0e\xe3\x5b\x70\x43\x04\xdb\x74\x0d\xd8\x07\xbe\xc1\xe7\x58\xd9\xaf\x97\x55\x3c\x34\xa0\xdc\xba\x91\x8c\x82\x9a\x36\xe5\x0b\x2a\x71\xdc\xed\x7b\x3b\xc6\x4c\xee\xb9\x97\x83\xda\xe0\xf5\x30\x30\x52\x69\x78\xd2\x3b\xb7\x94\x11\x39\xc4\xa4\xb6\xfc\xf7\x13\x3b\x74\x0b\x18\x81\x18\x03\x7b\x9c\x77\x57\xe7\xe5\xa7\x76\xa5\xcb\xe6\x85\x6d\xde\x2d\xd8\xf0\x4e\xf3\xb2\xf1\xb5\x6b\xdf\xbb\x9f\xb9\x32\x87\x5e\x53\xe6\xb6\x71\xe3\xdf\x07\x15\xd0\x69\xe4\x0a\xfe\xe1\x31\xca\x20\x36\x07\x04\x60\xce\x29\x0c\xba\xd2\x89\x78\x7c\x4a\x0c\x66\x72\xc3\xdf\xc8\xcd\x67\x2e\x07\x00\xed\xf3\xa7\xf3\x00\xd8\x6f\xdc\xd2\x7f\xb7\x6f\xf2\x60\x9d\x00\x94\x20\xea\x33\x10\x77\x42\x5e\x96\x16\x15\xda\xa4\x70\x43\xaa\xce\xc0\x56\x0b\x56\xef\xf3\xa3\x46\x1c\x75\xac\x7c\x65\x0c\xe4\x50\x99\xd4\xea\x37\x03\x71\x23\xb7\x63\xa0\x6d\x2c\x2d\xfb\x5e\x5e\x05\x8f\xfe\x49\xd9\x83\xfb\xa6\x54\x39\x70\x8e\x5c\x03\xd9\xa2\xc9\xc0\xdd\xde\x16\xc8\x49\xbd\xb5\xef\x81\xd5\x45\xba\x1d\x1e\xd0\x04\x21\x89\x22\x2b\x57\x54\x0d\x1b\x4c\x36\xe8\x4a\x54\xf2\x84\x6c\x97\x6c\x74\xcd\x46\xc3\x30\xee\xf2\x26\xd8\x3e\xbb\x4d\xfc\x49\xc6\xdf\x72\x30\x66\xcb\x53\x2a\x5f\xf1\xdc\x6c\xb3\xed\x23\x91\xcb\xbb\x85\x01\xa0\x9f\x47\x87\xa3\x1b\x1b\x3c\xc6\xd1\xb7\x43\xe8\xa4\xb7\xc9\x9a\x7a\x17\xc1\xac\x29\xa5\x46\x79\xe7\x9e\x3b\xcf\x01\xd0\xb3\xde\x56\xef\x33\x55\xb3\x5d\x82\x83\xc3\xb9\x13\x91\x2c\x31\xd5\xa0\x5a\x66\x85\xe3\x39\x0a\x93\x70\x3b\xc6\xa3\xa3\x4f\xad\x12\x22\x64\xf0\x8a\xc3\xf6\x3a\x61\xc3\xd6\x15\x15\xe5\x50\xd9\x4d\x6f\x83\xc2\x44\xbc\xd5\xe2\x19\x53\xba\x38\xef\x4f\xe1\xdd\xb3\xf8\xf8\x07\x9a\x76\x3e\x61\xa1\xd0\xbc\xab\xe3\x7a\xbf\xe7\xf6\xda\xc6\x96\x2e\x0c\x17\xde\x7b\x8e\x31\x39\x90\x6b\xd6\x88\xec\xd3\x5c\x41\xc3\x31\x96\x47\xf3\xe2\xad\xda\x84\xd0\x0b\x0d\xa9\xd1\x1d\x6a\x55\x3e\x06\x91\x80\x22\xf3\xdc\xd4\xd1\x94\x2e\xab\x90\x23\x2f\x71\x4c\x70\x0d\x23\xff\xea\x54\xba\xbf\xb0\x62\xee\xa7\xfe\x3a\x8d\x65\x4f\x9d\xa7\x73\x64\xad\x1e\x73\xbc\xcc\xfa\x5b\x7b\xf4\x7c\x31\x3d\xd2\x4f\x7a\x27\xaa\xaa\xf1\x30\x36\x9a\xac\xc7\x96\xa5\x8a\xb7\xc8\x61\xb1\x7c\x88\xb9\x44\xa5\x27\x71\x1d\x27\x34\x0a\x6f\x6a\xf9\x3c\xca\x09\x1c\x7a\xa4\xe3\xf1\xfb\x52\x39\x88\x5c\x32\xdc\xbe\xe7\x51\x04\x78\x71\xff\xc9\x71\x9f\xab\xd1\xb6\x1c\xa1\xb4\x6a\x4d\x5f\x2f\x81\x99\x19\xab\x8d\x65\x9f\x27\xc8\x86\xe4\xc9\x2d\x1c\x48\xff\xf4\x74\x2e\x20\xc4\xa9\xb2\x8b\x43\xd6\xfc\x56\xb9\x98\xfc\x65\x70\xda\x6d\x01\x52\xde\xa8\x95\xa2\x6b\xdf\x34\xb8\xed\x5c\x32\xdc\x11\xda\xbc\xd8\xd1\xe5\x91\xd3\x9f\x53\x67\xe1\x63\x54\xc7\xe5\x61\x16\x1f\x10\x70\xe7\xee\x33\x9e\x5d\x03\xcd\x4d\x2a\x8e\xa0\x44\x05\x62\xa3\xc7\xda\x37\xa2\x3a\x87\x68\x79\xf4\xb2\x44\xf3\x28\xdb\xc2\xbb\xdb\x38\xb1\xc3\x2a\xe7\xcd\x9d\xa4\xf6\xf2\x61\x28\x27\xb3\x82\x4c\x17\xca\x72\x6d\x68\x9b\x70\xd3\x7b\x4f\x87\x42\xe6\x4d\xe5\x24\x67\x8c\x25\x8c\x2f\x96\x6f\xb7\xed\x6b\xb6\xfd\x99\x1d\x00\x30\x74\x60\xc1\x9a\xca\xd3\x42\x3c\x86\x98\x22\x3a\xc7\x31\x14\x86\x6c\x5e\x85\xd7\x6a\x6b\xcf\x16\xe9\x4a\x30\xf5\x37\x7f\x6e\x7a\x00\xaa\xc2\x55\xff\x77\xe4\x96\xae\x42\x33\x43\x6c\x64\xa5\xc1\x52\xc8\x7b\x07\xc6\x18\x4a\x0f\x91\x25\x1e\x24\x9f\x1c\x25\x5f\x75\x17\x36\x2f\xee\x20\x42\xb9\xc9\x90\xc3\xc1\xd0\xbe\xfd\x53\xfe\x56\x99\xa9\x9c\x84\xe5\xa9\xef\xca\x92\x7d\xcf\xf7\x68\xc4\xf4\x53\xa9\xf8\x9f\x82\x41\x41\x6c\xd3\xb9\xef\x64\x7e\x28\xf7\xc0\x76\xea\xee\x36\xfa\x60\x55\x41\xbc\x95\x7c\xf5\x1d\xe5\x24\x38\x8e\x95\x82\x0f\x5a\x73\xa4\xc9\x6f\x95\x6e\xc8\x97\xd5\xce\xa2\x64\xb2\x07\x1c\xfb\x5e\x21\xb9\x82\x4b\x5b\xd1\x75\x2b\x61\x65\x22\x6c\x8a\x6c\x14\xe3\x2a\xec\x8c\xe3\x96\x6c\xe3\xaa\x56\x27\xb0\x3b\x2f\xe5\x15\x25\xcd\x06\x2c\xbc\xe6\xed\xe3\x52\xe1\xb8\x9d\x25\xdc\xca\x45\xcf\x1d\x09\x70\x88\x9a\xde\xa4\x0c\x18\x51\xf8\x18\xab\x4d\x11\x2a\x37\x8f\x04\x61\xae\x73\xba\xe6\xbb\xdc\x74\x42\x2a\x3e\x4e\x67\xe2\x4a\x8d\xb4\x12\xc4\x29\xce\xe1\x50\xdc\xbc\x99\xd1\xaa\x82\x61\x88\xb2\x78\xc8\x4b\xae\xfe\x22\x08\xe1\xdc\x7e\xae\x29\x73\x57\xa2\xec\x9e\x94\x6c\xdd\x3d\x5d\x26\x35\x66\x73\xcb\xb6\x9f\xb3\xd8\x31\x39\x4f\x65\xa3\x73\x2e\x04\xa2\xab\x90\x29\x58\xf0\x80\x96\x89\xa5\xd8\xae\xa8\x18\x0f\x1b\x5f\x8a\x08\xeb\x8e\x6f\xb9\x36\x6c\x6a\x46\x7c\x23\xdb\x17\xc2\x38\x02\xef\xa5\xb7\xd4\xb7\x94\x3b\x91\xb3\xc6\x56\x47\x99\xb9\x73\x23\x3b\x9c\xf4\x63\xc2\xf2\x08\x4f\x1a\x18\xc3\x97\x28\x10\x21\xaa\x3d\xb7\xcd\xde\x63\x44\x81\xfb\x6c\xb6\xdf\x64\xcc\x5e\x5a\x08\x62\x6a\xd8\x39\x87\x7d\x26\xb7\xeb\x19\x27\xf4\xb2\xe6\x67\x16\x2a\x17\xc6\x43\x56\xd7\x82\xb6\xfa\x91\x05\x82\x14\xca\x45\xe7\xf3\x19\x86\xb1\x9d\x6a\xa4\x7a\x1c\xab\xf9\x13\xa2\xd2\xa9\x95\x0b\x78\x05\x34\xee\xcc\x42\x94\xac\x77\xc4\x74\xe0\xb5\x25\xcf\xfa\x5b\x76\xb2\x86\x33\x5b\x30\x97\xdf\x8c\xe8\x37\x14\xb3\x2f\xfe\xb7\xad\x76\x1d\xfd\x72\x1a\x27\xa6\x72\x11\x6d\x04\x66\x86\x80\xcd\x6c\x2e\xd2\xce\xf5\x61\xb5\xe8\x73\xad\x1d\x79\x38\x30\x9d\x4a\xf7\x5b\xd7\x69\xc7\x9b\xd3\xff\xa3\xd4\x05\x77\x59\x4c\x6f\xb0\xab\x0e\xd4\x3e\x9b\x53\x0a\x13\x94\xf5\x62\x58\x81\x47\x98\xd8\x19\x18\x78\xa9\x39\x05\xc8\xbb\x9e\xd7\x07\x24\xa7\x0e\xe6\x0e\x90\x6d\x8d\xc9\x4c\x2a\xbf\x27\x77\x80\xcf\xf2\x31\x35\x2e\x09\x4a\x65\xaa\x24\x1d\xe4\x32\x2e\xa6\x88\x74\xbb\x0f\xe4\x5d\x21\x5f\x78\x05\xeb\xfd\x4b\xf5\x55\x87\x7f\x0a\x71\xac\xf4\x7a\x09\x85\x79\xca\xce\x0c\x6c\x2b\xb1\x3a\x53\xbb\x91\xa9\xac\x10\x73\x36\x8d\xc2\xff\x16\x82\x09\x63\x58\x13\x6e\x6e\x96\x63\xc4\x1b\xa9\x81\x11\x5c\x80\xb1\xe9\xd9\x16\x16\x99\x22\x2b\x40\xf1\xd3\xba\x26\xb8\xf3\x3b\x8f\x18\x75\x8c\x61\x99\xb9\xf2\x14\x87\x7e\xbc\xa6\xb6\xfd\x26\x74\x50\xaf\xde\x03\x5d\x15\x35\x7a\x55\xd0\x62\x26\x05\x08\x28\x07\xb4\xf0\xbb\x52\xb0\xb1\xe3\xdf\x1c\xa4\xbb\x59\xb4\x29\x8c\x09\x4e\xc7\x2c\x7c\x2f\x0a\x56\x0a\x44\x73\xd9\x3f\x65\xcb\x85\xb9\x67\x89\x5c\x31\xc0\x7d\x2c\x66\xa9\x52\x67\x38\x1a\x59\xd5\x4c\xf2\x67\x54\x8c\x7a\x05\x15\x40\xd8\xd1\x2c\x4a\x69\xd9\x2b\x25\x30\x07\xdb\x6b\xd4\x39\x3a\xc0\xb5\xc9\x50\xb8\xcd\x4e\xbd\x40\xe1\x86\x95\xd3\x90\xce\x2a\xe0\x36\x3a\x3f\x38\x17\xca\x4b\xdc\xe4\xbe\xbd\xbc\x55\xd7\x3c\x4c\xba\x85\x92\x36\xfd\x74\xb7\xda\x03\xa3\xac\x9b\xab\x06\x8c\xe4\x95\x29\x30\x01\xb9\x13\x63\x3e\x29\x44\xf1\xbf\x66\x93\xdd\x53\x30\x57\xab\xf2\x0b\x9a\x9b\x12\x13\x2c\x39\x50\x27\x5c\x5a\x77\xb9\x05\x42\x24\x8e\xca\x4d\xf9\x4b\xc2\xbd\x3f\xaa\x7e\x32\x0d\x64\x24\x9a\x9a\x34\x32\x2a\x1f\xcd\xb7\x5b\x5f\x09\x4a\x50\xb6\xa8\x84\x68\x54\xf5\x5e\x0f\x6a\x7c\xa7\x29\xc2\x43\x28\x3c\xe9\xf2\x6e\x4e\x4c\xa5\x9d\xdd\x0a\x7a\x7d\xa4\x0a\x51\xd5\x0c\xbc\x99\xa2\xfa\x6a\xc2\x68\xb8\x49\xd4\x70\x61\xf0\xd1\xf4\x07\xdd\x83\x14\x73\xb6\x74\x01\x4b\x4a\x5d\x70\x93\xd0\xbb\x9a\xd6\xf2\xb9\xf3\x7c\x49\x5d\x84\x87\x18\x93\xef\x21\x33\xc3\x78\x4b\x81\x04\xc3\x65\x4d\x89\xcd\x03\xc0\x54\xe9\x6c\x42\x6e\xb9\x6c\xbc\x43\xfd\x18\x29\x52\xbc\xad\x73\x04\x79\xf8\xde\x27\x58\x89\x29\x55\x92\x62\x84\x95\xbf\x31\x1e\xe9\xef\x5a\xa1\x4b\x47\x3d\x21\x76\x41\x02\x6a\x71\x10\x7d\xb9\xda\xdc\xa6\x6c\xbc\x5b\xe2\x9c\x3f\x44\x5c\xb8\xc2\x48\x13\x5d\xcf\x52\x84\x19\xb0\x3d\x60\xcf\xe2\x6f\xd4\xbd\x6a\x48\xba\xf7\x36\x26\xc0\x57\xe7\x3d\x07\x1f\x51\xc5\x95\x99\xe4\x2b\xc6\x87\x73\x51\x74\x78\x5c\x0d\x72\xe8\x98\x54\x80\x77\x6b\xc5\x3d\x4c\x1f\x35\x3c\x0e\x5e\x27\x00\xab\x61\xce\x1f\xe6\x40\x31\x3d\xcf\x9f\xf0\x8d\x25\x6a\xe3\x54\x78\xca\x13\x0d\x1f\xa4\xb6\xc2\xcb\xbc\xec\x5b\x76\x4a\xb9\x24\x24\x4f\xa8\xd6\xbc\x83\x4b\x27\x33\x9b\x09\x3e\x35\xf6\xb0\x01\x7f\x72\xb2\x9a\xc2\x4c\xe4\xaf\xf8\xbc\x40\x4f\x0d\xe5\x70\x3b\x5c\x3c\x2c\xa7\x5a\x3d\xbe\x73\xaf\x94\xc4\x72\xcf\xcd\xc2\xe9\x94\xfb\x8e\x88\xcd\xc0\x23\x0b\x63\x95\x93\x3e\x87\x52\xfb\x10\xc9\x30\x5e\x3e\x7b\xc3\xb9\x4d\xde\xfb\x2e\x18\x3c\x2c\x41\xd7\x0f\x90\x38\x6e\xef\x9e\x68\x69\xef\xef\xe8\x76\x2f\xb6\x9f\xa1\xcf\x28\x7f\x98\x42\xf8\x65\x9c\xfc\x4d\x2b\xcb\xf0\x9e\x06\x87\xc4\xa4\x0a\x9a\xa9\x6d\x1e\xdb\x75\x17\x28\xb2\xfb\x93\x00\x30\x10\x63\xa2\x35\x8f\x9f\x06\x29\x4e\x19\x16\xf2\xb8\x83\x3e\x31\x8d\x15\x2b\x9b\x2d\x30\xd0\x4e\x6c\x2d\x4c\xce\x25\xec\x10\x5b\x4e\x62\x37\x04\x77\x55\x02\x9c\xf9\x79\x74\x6d\x79\xd0\x18\x24\xa5\x50\x92\xe1\xf0\xd2\x08\x72\x68\x79\xa1\xa0\x66\xde\x3f\x87\x47\xe5\x51\x91\x1e\x40\x98\x2f\x76\x04\x34\xaa\x30\xc4\x8a\x9a\x3e\x82\xb6\xb3\x09\x26\x16\x79\xea\xd5\x34\x7f\x45\x8f\xef\x50\x22\x60\x7b\xc1\xf2\x55\xbe\xd5\x4d\x3f\xef\xb4\x05\xd5\x61\x72\x64\x95\x69\xd1\xe7\xa3\xda\xbe\x8e\x7c\xeb\xf9\xee\xef\xe0\xfa\xc6\x36\x0d\x35\x2e\xd5\x0c\x93\x7d\x1c\x26\xc3\xff\x08\x41\xd4\x69\x3d\x80\x5b\x7c\x88\x3f\x11\x81\x4b\xb2\x94\x3c\x7f\xf3\xc3\xdb\x84\xde\xcf\xb6\x7a\x1f\x75\x68\x6e\xe2\x53\x83\x14\xdb\xa9\x84\xef\xfa\x79\xad\xea\x1e\xbd\x41\x79\x74\x80\xaf\x30\x55\xaa\x7b\xf8\x4a\xb1\x9d\xb1\x9e\x35\xe0\x42\xcd\xab\x7d\x7f\x52\xeb\xde\x6c\x5e\x91\xa1\xf3\xf8\x95\xa2\xc7\x8d\xfd\x73\x7f\x47\xf3\xc9\x7d\xc6\x97\x74\x97\x4e\x2f\xdf\x69\x3f\x3d\x63\xd1\xea\x58\x4e\xa0\x78\x58\xfc\xb2\x02\x86\xcb\xa6\x61\xe3\x7c\xbc\x53\x49\x0d\x9b\x26\x9b\x28\xf7\xcc\x3f\x23\x37\x51\xe7\x76\xf8\xcf\x29\x64\x9a\xa0\x8b\x6a\xdc\x99\x99\xea\x4a\xc1\x14\x0e\x95\x10\x63\x55\xd7\x41\x57\xcf\x54\x23\xe3\xfd\x8e\xf8\x09\xf9\xd2\xc3\x29\x9c\x95\xe5\xc3\xe9\x60\x61\x44\x41\xd5\x2a\xc7\x6c\x4a\xe2\xd2\x95\x53\xf3\x98\x6c\x08\xfe\xa0\xf0\xd1\xc3\x14\x28\x55\x05\x98\x43\x56\xb5\xf2\xfb\x24\x34\xc7\x1d\x04\x2b\x15\x0d\xae\x24\xca\xc9\x6f\xbc\xa0\x7d\xad\x2a\x16\x0b\x41\x4a\xc9\x3d\x20\xff\x01\x15\xd9\x04\x1d\xd9\xf2\x14\x30\x90\x1d\x7b\xd1\x1e\x8a\xbd\x80\x90\x91\xe6\x5b\xfc\x4e\x7b\xf2\xdc\x02\x76\x75\xae\xd1\x56\x32\x94\xeb\xde\x8f\x1a\x2b\x7f\x19\x73\x77\xb4\x45\xa9\x4a\xc0\xe8\xf8\x05\x9b\xb0\x36\x5e\x41\xef\xd7\x01\x8f\x49\x06\x91\x11\x82\x10\x0b\x48\x5f\x89\x84\x1b\x13\x79\x88\x06\x97\x93\x7f\xe4\xd2\x1e\xc7\x25\x83\xaf\x50\x51\xe2\xbd\xee\xa2\x35\x35\x34\xe9\x69\xea\x47\xdd\x20\x4e\x88\x9e\x55\x36\x8a\xba\x8c\x09\x22\x36\xc6\x1f\x13\x31\xe2\xce\x01\xb0\x66\xf6\xb1\x25\xf7\x82\xd5\x88\xf3\xf4\x8f\x8e\xf4\xbf\x5f\x70\xb0\x8f\xdb\xf5\x6f\x50\x78\x2b\x20\x19\x12\xc5\xfd\x35\x4b\x73\x22\x73\xae\x21\x61\x7a\x62\x3e\x18\xa1\x04\x90\xc5\x0a\xcf\x94\x21\xca\x51\x4d\x7d\xd5\x4b\x10\xaa\x0b\x7a\x2b\xe5\xe0\xe5\x10\xbf\x7c\x5c\xe3\x52\x12\x9a\xf0\x66\x46\xdf\x8f\x81\x62\x35\x2f\x6d\x27\x64\x01\x5e\xac\xe1\xfb\x35\x55\x44\x66\xae\x4c\xf3\xe8\x67\x94\xed\x36\x59\xa6\x1d\xfd\x55\x93\x50\xa6\x20\x97\x83\x85\x66\xed\xc9\x6f\x7b\x0f\xfd\x74\x1d\xb6\xe4\xa8\xb2\x86\xb6\x04\x68\x9d\x25\x91\xae\x98\xe7\xae\xef\x2b\x50\xc4\x54\x96\x5a\x26\x95\x83\x76\x92\x22\x01\xea\x0f\xb5\x27\x3e\x81\x81\xc6\xe9\xdd\x44\xd3\x62\xac\xb8\xb2\xfd\xcb\xdb\x88\x8e\x88\xed\xc7\x77\x28\x21\xa3\xb5\x7b\xc7\x6b\x55\x65\x33\x89\xd2\xd1\x0c\xc8\x17\xb5\x93\x6a\x04\x33\xb8\x72\x55\xb1\x23\x8f\xa4\xce\x8d\x4a\xb6\xc1\x20\xd6\x82\xd9\x7b\xeb\x06\xa5\xc5\x2c\x43\xa2\x50\x49\xc6\x09\xd0\x8e\x40\xa2\xd4\x5c\xed\x18\x3b\x96\x8c\xf6\x9a\xc1\xa1\xf8\x7e\x1f\xea\xfe\xcd\x9f\x83\xc2\xd1\x02\xaa\xa8\x4b\x66\xbd\x1a\x85\xc0\xf2\x4f\xe6\x40\x66\x57\x02\xd8\x9c\x00\x5e\x1e\xd9\x2f\xba\x99\xc8\x60\xfc\x24\xa2\x21\x00\xbd\x66\x32\x8f\x97\xdc\x3f\x36\x9c\x71\xdc\x73\xa3\xce\x0a\x63\xa6\x36\x4a\xf4\x2d\xb7\xaa\x9d\xb8\xec\x11\x0d\x69\xbd\xed\xd2\x85\xd3\x2d\x0f\x00\x87\xcb\x32\x46\xfa\x6d\xc8\x8e\xca\x11\xc4\x5a\xf8\x21\x42\xf7\xc3\xf8\xbb\xc7\x20\x70\xd0\x43\x55\x95\x0c\xff\x9d\x9e\x94\xeb\xe5\x57\xfd\x94\x8f\xfb\x26\xb9\x33\xd1\x0d\xd0\x7d\x2d\x80\x64\xe3\xaa\x9c\x7c\x55\x32\xe7\x6f\x82\x38\x94\xa7\x17\xf8\x1a\x2a\x4d\xab\x7a\x63\x7b\x96\x5f\xc3\x01\xf2\xec\xc9\xb0\xf0\xc7\x68\xb7\x06\xb7\x39\xba\xfe\x95\x0d\xb5\xb9\x57\x94\x78\x5b\x22\x9c\x9a\x55\xdd\x9f\xdc\x9f\x2f\x7b\x8e\x12\x0a\xd0\xe6\x52\x6a\x54\x63\x10\xea\xdf\xa7\x2a\x6c\xb2\x07\x54\x96\x22\x27\x0e\x86\x53\xcc\xb8\xe0\xd2\x54\x62\x99\x9a\x24\xe8\xf3\x72\x51\x12\x97\x2e\x1e\x8c\xe6\x5a\x73\x56\x9b\xf2\xec\x4a\x1b\x04\x3b\x5b\x0b\x25\xab\x10\xb0\xc8\x6a\x52\xf3\xd9\x47\x4f\x37\x46\x52\x33\x18\xf7\x10\x8f\x63\xf8\x19\x26\xde\xa7\x7d\x87\x56\x8e\x5e\xac\xec\x68\xe4\x7a\x59\x94\xbc\x5e\xdc\x53\xfc\x4f\x75\x8d\x1d\x37\x68\x51\x4c\x56\xdc\x7b\x18\xad\xe0\x38\xee\xcf\xbc\xe2\x1d\xbc\xd3\xa4\xc1\xde\xeb\xf2\xda\x7d\xb1\xab\x8b\x17\x03\x15\xf5\x0a\x88\x15\x49\xbb\x40\x5f\xd1\x21\xd1\x32\x66\x62\x25\xd5\xd9\x07\xe8\xa3\x31\xe6\x6c\x9f\x94\x1a\x56\x89\x03\x4d\x94\xa0\x1d\x2d\x3a\x3f\x19\xda\xf8\x3e\x0c\x2d\xd8\x4e\x13\xf1\x36\x75\xaa\x76\x4b\x11\x93\x99\x57\x55\xaa\x7a\x28\xfa\x64\xf7\x42\x66\x25\x4c\x5d\xd7\xbb\x8d\x91\x27\xf7\x31\xe0\x76\xe3\x28\xf7\x38\xbf\x86\x9b\xe8\xbd\x56\x49\xc8\x10\x92\xfb\xef\xcc\xc7\x77\xa7\x55\xd1\xc4\xa0\x03\x45\x1b\x50\x56\x2b\xe3\x0e\xcd\x1e\x96\xe5\x8c\x24\x46\xf8\xe7\x79\x7c\xb0\x66\xc8\x92\x68\x59\x50\x12\xe7\x35\x22\xc9\x5d\x7c\x8f\x25\xa9\x81\x81\x3c\x9b\x83\x91\x9e\x56\x76\x9e\x62\x70\x7f\xa3\xa5\x20\x29\xf0\xb1\x69\x42\x9e\xf7\x25\xe0\x5b\x0c\xd6\x26\xe8\x48\xee\x8a\x0b\xb9\x5a\x16\x3c\xc3\xa1\x56\x2e\xb7\x5e\x85\x71\x8f\x07\xa1\x4b\x6e\xeb\x00\x04\x49\x0a\xe5\x5c\x91\x34\xed\x55\xd2\x57\xce\x68\xe3\xfc\x8f\xce\xf2\x70\x06\x3a\xa4\xb6\x38\xf8\x70\x32\x7a\x13\xed\x0c\x7e\x5f\x0f\x9b\x37\x3e\x90\x0a\x39\x8f\xea\x89\xe8\x0b\xc2\xec\x22\x3e\x4b\xc9\x99\x0e\xdd\xaa\xfd\x1a\x8a\x12\xf0\x91\xf5\x7c\xe2\x3e\x32\x43\xbc\xf7\xce\xb2\x4a\xd6\xcb\x7c\x09\x2a\x80\x6b\x38\x88\x75\xdc\x4a\xdd\xc7\xc3\x90\x77\x58\xf9\x71\x1e\x32\xdf\xf5\xd7\x40\x94\x61\xb9\xbc\x63\x8f\x1c\x6d\x5b\xc9\xbb\xf9\x70\xef\x7a\xd9\xc1\x80\x01\xca\x05\xd1\x83\x4a\x6f\x49\xae\xdd\xdb\x2b\x72\x70\x44\xea\xee\x47\x39\xf5\x3c\x42\xa9\x1f\x9a\xee\xce\xf9\x6b\x5d\xd1\x14\xe5\x92\x94\x5c\x66\x49\x8e\x77\xed\x60\x69\x9f\x4b\xa2\x33\x59\xba\x97\x72\x0d\xea\x76\x94\x04\x83\x88\x5b\x32\x46\xa4\xf9\xba\xcd\x56\x4c\x7a\xb3\x46\x85\xf9\x5e\x66\x82\x21\x92\x56\x4c\x50\xf8\x26\xba\xfa\x3d\x7e\xce\x27\xdf\xcf\xc7\x49\xc8\x3b\x84\x95\x06\x05\x27\xb5\x65\xdd\x4d\x2d\xdf\x95\xf2\x2f\xea\x13\xaa\xb3\xa9\x4d\x74\xfb\xd6\x75\x60\x3e\x34\x6c\xee\x83\x00\x00\x23\x24\x12\x2d\x78\x88\xab\x40\x19\x7e\x44\xff\xe7\xc0\x71\xa9\x96\x28\xfd\x6c\x0c\x9e\xfd\xd8\x72\xcc\xde\xdb\xe1\x97\xbe\x29\xd4\x48\x0d\xd9\xc1\xa2\xac\x93\x3c\xb5\xd6\xab\x79\x52\x72\x21\x62\x24\xf7\x3a\x2b\x90\xe5\x75\xd9\x34\x5f\x00\xa6\xa3\x96\x98\xe5\x8e\x84\x56\x77\x87\x67\x8c\x56\xdf\xbc\xc0\xb6\x5c\xc9\x19\x20\xbe\x45\x74\x79\x3a\xa4\x94\x74\xe6\x54\x9c\xd1\x25\x0f\x58\xa0\x73\x2f\x99\x3c\x56\x3e\x0a\x42\x4b\x64\x41\xa6\x74\x85\xf0\xac\x03\x0e\xe6\x20\x61\x50\x41\xa7\x01\x77\x55\x26\x09\x99\xac\x56\x0b\x1a\x6b\xdf\x70\x05\x88\xc3\xf8\x04\x18\x61\x1e\x6e\xb1\x22\xb2\x29\x54\xc9\xe4\xc2\x79\x52\x39\x62\x42\x4f\x92\x1c\x05\x56\xa3\x8b\xb8\x95\x4b\x15\xc3\x5c\xd2\x6b\x34\x29\x69\x29\x51\x06\x56\x1d\x5a\xcf\xbd\xe1\xb9\x7b\xaf\x43\x87\x52\x69\xbd\x82\xd8\x3b\x9d\x4f\xee\x13\x24\x40\xf5\x13\x9b\x71\x3e\xb2\x01\xab\x2c\x68\x0b\x12\x79\xf9\xfb\x0f\x51\xf9\x83\x01\x30\xa5\x4a\xe9\x5c\x57\x5e\xe0\x50\x29\x1a\x23\x8a\x49\xe1\xf1\xe6\x4c\xa7\xc2\x61\x81\xe6\xb8\xdf\xab\x6e\xad\x70\x1c\xc6\x43\x96\xa3\xbd\x1a\xa7\x61\xaa\x0b\x6c\x91\xfa\x30\x8c\xa9\xa5\x10\x14\x09\xb4\xd4\x01\x2f\x77\x6e\x61\xd4\x1b\xf9\x51\x12\xb7\x15\x15\x62\x85\xad\x95\x6b\x86\xb2\xbc\x49\x77\x7c\x71\x63\x10\x6c\x40\x10\x84\xc4\x0f\xed\xae\x07\xd9\x68\x37\x26\x56\x02\x84\x60\x0e\xf5\xc7\x05\x23\x82\xa0\x96\xed\x33\x82\x61\x05\x7b\xb9\xc5\xa5\x69\x93\x39\x73\x01\x1b\x2a\xd5\xe2\x76\xb6\xd5\xc9\x95\x8e\x69\xfe\xeb\xfb\x65\x88\x3e\xa8\x97\xb7\x4b\xd6\x1a\x4b\x18\x85\x8d\xce\xbe\xbb\x60\x4e\x76\x91\xbe\x83\x75\x87\xe2\x25\x09\x3e\x49\xae\x81\x9e\x01\x86\x41\x0d\x8b\x77\x29\x0a\x2a\x78\x36\x9b\x1c\x18\x3d\xc4\x68\xa9\xdf\xca\xc0\x76\xb6\xf1\x0f\xb4\x67\xa3\x51\x73\x39\x0c\x20\x08\x70\x53\xee\xd5\xef\x36\xdd\x08\xed\x61\xc5\x4f\x58\x59\x0c\x42\xc0\x40\xb8\xe3\x8f\xfb\x4e\x81\x2c\x81\x5e\xc7\x1c\x5a\xdb\xa4\x55\x6d\x4e\xce\x6c\x36\x6c\x02\x37\x1c\xb6\xf5\xbb\xe5\x80\xee\x75\xdb\x4d\x7d\xf4\xe3\x09\x53\x8b\xd3\xb6\xd9\xce\xe8\x11\x68\xa9\x6b\x86\x85\x73\x74\x9a\x07\xdb\x12\x41\x13\x21\xd4\x0e\x42\x11\x8c\xeb\x6e\xab\x72\x9d\xa1\xcd\xdd\x1b\x94\x0c\x82\xb2\xbe\xed\x92\x8a\xc4\x0e\x4c\xa5\x80\x6e\xac\xf6\x49\x0b\xe5\x12\x81\x42\x57\xe3\xcd\x12\x3b\xcf\x4e\xd9\x58\xf5\xbc\xfa\xa0\x26\xbe\x38\x6a\xc4\x02\x42\x34\xd6\xd5\x41\x39\xa4\x88\x5a\xa2\xa0\x54\xb7\xbf\x7c\x16\x8f\x1c\x7e\xa5\xe3\x31\x79\x10\x5e\x22\x8f\x43\xb0\xd8\x98\xa8\x49\x15\xab\x71\xb5\x2b\x81\xa6\xb1\x18\x6f\xc3\xcb\xf4\x34\x07\x9f\x91\x59\x8b\x10\xd5\x83\x39\xc6\xd0\x57\xd3\xd3\x71\x6b\x55\x68\xca\x63\x41\xc9\x5f\xb9\xa2\x85\x7c\x61\x97\x60\x47\xe9\xe1\x51\x33\xd7\x7d\xf2\x3a\xc7\xce\x17\xaf\x57\xcb\xf1\xab\xf9\x6d\x14\x2f\x2e\x1c\x34\xde\xfd\x0e\xfa\x49\xa6\x5a\x25\x7c\x9f\xd5\x74\x15\x32\xe1\x5e\xc2\xa1\x41\x9f\x98\x98\x9e\xa6\xb4\xeb\xf8\x13\x0e\x5c\x9f\xe2\x26\xfc\xca\x36\x76\x06\x84\x7f\xcb\xce\x6c\x3d\x8d\xef\xa3\x76\xbe\x6f\x81\x1a\x6f\xf9\xdf\x54\x65\x6f\x36\x29\x63\x73\x49\x74\x84\x45\x51\x8e\xcb\x83\x7b\xcb\x3f\x01\xaa\x00\xe5\x82\x91\xbc\x24\x43\xd9\x87\x6d\xa9\x7b\xff\x63\xd8\x5b\xc7\x29\xb9\x8f\x6e\xb4\x2e\x25\xa5\x9d\x85\x4a\x9e\xf7\x4f\x79\x63\x93\x79\xf4\x59\x7e\x49\x33\xca\xd8\xf6\x63\x67\xc1\x4b\x5e\x3a\x7a\x78\x7c\x25\xb4\xf2\x42\xf1\x92\xbc\xac\x24\x81\xa6\x29\x78\x02\x93\x54\xbf\xa6\x51\xae\x45\xa1\x16\x59\xea\x52\xe5\xa2\xe5\x0f\xc6\xe2\xe7\xd2\x8c\xf9\x29\x05\xb8\x57\xa5\xc6\xba\x82\xb8\x32\xcc\xfe\x45\xa0\xef\x77\x60\xf6\xb9\x37\x7c\xd6\x30\x02\x1b\x02\xa3\x82\xb8\xdc\x63\xdc\xa2\x12\xb6\xb1\x85\x02\xb4\xbc\xd4\xcf\x67\xac\xbe\xf7\x93\x06\x6d\xdb\x64\x4c\xc0\xb8\x49\x4b\x81\xd5\xec\x46\xf2\x05\x9a\x2b\x82\x9c\x03\x45\x00\xde\xda\x2d\x4a\x5b\x05\xe3\xb0\xe2\x08\x2d\x97\x9b\x66\x6d\x15\x96\x03\x94\x06\x53\x5a\x62\xd4\x95\xa3\xf6\xc7\xa9\x92\xd7\x6d\x33\x29\x06\x9c\x6e\x12\xf4\xff\x55\x05\xd4\x1e\x1e\xa6\xa1\x2a\xcc\xfa\xd8\x02\x47\xfb\x85\xd1\xc2\xfd\x3e\x2a\x4e\x44\xcc\x86\x3b\x05\x2e\x4b\x87\x6e\x1f\x02\xf1\xa9\xcb\xde\x66\x04\x23\x9b\x93\xa2\xbf\x17\x32\x30\xa8\xc3\x8f\xba\x6c\xbf\x44\xb3\xb1\x77\x0c\x24\x90\x35\x66\x2d\x79\x8d\xcd\xb5\x25\xd4\xd5\x1d\xb4\xa0\x76\xa9\x66\x8c\x16\x60\x2b\xd0\x3f\xa7\x8d\x81\x01\x4c\xf6\xae\x35\xd3\x84\xe8\x02\xc7\x60\x9b\xe6\xfd\xb1\x56\x2d\x4c\xa5\xdc\x2e\x0c\x42\x71\x03\xb6\x8d\x7e\x9a\x77\x3b\xe3\xd9\xa1\x77\x2c\x85\x0a\x78\x04\xaa\x9b\x31\xd2\xc9\x9b\x89\x75\x28\x39\x1c\x75\x79\x09\x8a\x6d\x76\x53\xcc\x05\x88\x90\xe9\x30\x1d\xa0\xdd\x31\xec\x8d\x71\x7f\xe7\xcc\x1e\x46\x60\x01\x6f\xe6\x96\xa9\x11\x78\x4e\x8f\x24\x2a\xb6\x8a\xfb\xf3\x30\x6e\xe4\x5c\xce\x9a\x9c\x9f\x0b\x7a\xec\x83\x1b\x83\xe5\x45\xcc\x1f\xe5\x90\x4e\x6f\x83\x1a\x1e\x1a\x1a\x18\x86\xe0\x67\x57\xed\x54\xbe\x3c\xc4\x95\x69\x18\xb3\xf9\x0e\x0b\xeb\xb7\x49\x23\x34\x20\x52\x8e\x6c\x94\x71\x17\x77\x4a\x3a\xdf\x2b\x50\x37\xa7\x98\xc3\x65\xb5\x02\xb3\xe0\x90\x8c\x04\xd7\xc1\x6f\xf9\x54\xd7\x9a\x85\x65\xcc\xfe\x04\xdd\x49\xe7\x3d\xb5\x04\x2b\x02\x11\x69\xb2\x23\x2a\x04\x70\x8c\x36\x84\x03\xf5\x6e\x37\xf3\x9c\xa7\xc7\xc8\xf1\xd6\x96\x67\x72\x71\xfd\xa1\x6c\x71\x1c\x20\x4b\x1e\x45\xab\xa1\xd9\x92\xf3\x65\x4b\xfc\xaa\xec\x86\xf0\xc7\x4d\x7c\xb5\xaa\x08\x7c\x9f\x7c\x1d\x6e\xb5\xa7\x03\x49\x7d\xb7\x3f\x56\x7e\xcd\xab\x69\x79\xa6\xd3\x38\x7f\xd6\xd1\xb2\xe6\x5c\xc5\x34\xfd\xe5\xac\xca\x71\x65\x60\x1c\x7d\x64\x80\x19\xdb\xa0\xad\x94\x03\x9a\x66\x11\x9e\x4a\x6e\x47\x08\x33\x2f\xe7\x56\x59\x6d\x9a\x87\x9b\xaf\xba\xf5\x7f\x18\x3b\xf3\xc2\xe4\x90\xe1\x73\x1e\x08\xee\x27\x5a\xd1\x93\xc5\xed\x69\x33\x41\xe2\xd7\xf4\xbc\x29\xdb\xcb\x87\x93\x10\xe5\x4b\x3f\xb9\xea\xde\xfe\xc8\x0b\x0e\xd0\x92\x7b\xe8\xaa\xde\xc7\x29\xa0\x56\x32\xda\x61\x40\x25\xf6\xf5\x08\xfc\xed\x70\x6a\xe8\x4c\x04\x82\x0d\xc4\x61\xec\x44\x3f\x89\x5e\x02\x64\x8f\xd0\xa7\xda\x82\xa1\x77\xdf\x20\xf9\xc5\xc7\x72\x0c\x07\xee\x79\xa2\xf3\x81\x03\x9d\xdc\x39\xca\x82\x5f\xe7\x9d\x82\x68\x0a\x7d\xb0\x53\xe5\xeb\xf6\x32\x35\x53\xe8\xe3\x57\xd4\xaf\x23\xd4\xe2\x15\xf7\xa8\xd6\xc8\xbc\x74\xa2\xa6\xd1\x70\x33\x4d\xe1\xc6\xac\xb4\x6f\xab\x51\x64\x27\x85\x9f\x71\x2f\x11\x7b\x39\xae\xb9\x87\x65\x87\x2e\xb5\x6d\x59\x87\xab\xe3\x37\x41\xb7\xab\xba\x7e\x8f\x9e\x6d\x95\x2c\xb5\x82\x9f\xc9\xbc\x3b\x1b\x87\xd5\x52\xa2\x84\xcf\xc5\x7a\x0d\x3d\x2a\xdd\xa0\x5d\x34\xf6\x94\xe1\x31\x9d\xfb\x95\x6e\x96\xec\xc0\x57\x14\x54\xb8\x77\x66\x5d\xca\x2d\xd1\x75\x17\xca\xa7\x39\xa7\xa7\xc2\x99\x52\xcb\x14\x9a\x35\x40\x3d\x03\x46\x48\x2a\x11\x6c\x06\x62\xbc\xd0\x48\x10\x22\x59\x36\xe5\xcd\xb9\x4a\x9a\xc5\xaa\x4c\xd3\x39\xd3\xb4\x2a\xe9\x63\xf2\x14\x94\x86\x2d\x41\x7d\x67\xe2\x88\x87\x7f\xfb\xa0\x2f\xc3\x1d\x43\x9b\x9c\x14\x78\xfd\x91\xaf\x72\xa9\x45\x58\xeb\x0f\x14\x74\xbb\xc6\xa2\xac\xd3\x2b\x39\x62\xef\x7b\xf1\x66\xe5\x25\xef\x80\x05\xb6\x73\xee\x38\xa6\x60\x50\x1b\xc2\x53\x04\xb1\x2c\x57\xc6\x44\x15\x03\x65\x1e\xaa\x4c\xb3\x52\x58\x0f\x7b\xdf\xb0\x1b\x90\x63\x4a\x5f\xee\x21\x9e\x5f\xc9\xea\x75\xff\xa6\xf4\x34\xaa\xe2\xa3\xc2\x1f\x8e\x64\x38\xe0\x91\x58\xd7\x7f\x16\xa9\x2d\x68\x5d\x35\xd3\xf6\xb7\x54\xe1\x61\x69\x3a\xb9\xcd\x40\x09\x3e\x33\x72\x6b\x5a\xea\xfc\x31\xe3\x83\xe1\x43\x39\x08\x15\x14\xc2\xeb\x15\xf4\xa0\xf8\x15\x49\x65\x32\x98\x60\x4f\xe0\xf3\xdd\x94\x15\xf2\x7c\x0f\xc8\x9e\x21\x54\xc6\x4e\x2d\x96\x71\x76\x95\xbb\xc2\xdf\x22\xf2\xc1\x9f\x6a\x1f\x00\x55\x8b\x4a\x84\x5d\xf3\xbb\xab\xaa\x5f\x46\x60\x67\x69\x2c\x2b\x9a\xb5\x21\x31\x8f\xce\xab\x46\x21\x4b\x1f\x71\x62\xbd\xe0\x59\x98\x81\x64\xeb\xf2\xa7\xca\xab\x2e\xd7\x3e\x03\xe1\x62\x63\xb5\x92\x78\x95\xe7\x99\xac\xaf\x22\xa3\xe3\xfa\xdc\xbe\xba\x08\xc2\x6f\x2b\x06\xbe\x6f\x41\xa2\x12\x59\x03\xa1\x5f\x91\x10\x4a\x96\x86\x60\x04\xab\x83\xed\x8d\x50\x56\x07\x48\x42\x83\xbb\x19\x26\xac\x4e\x33\xf3\xa9\x54\x66\xaa\x58\x25\x21\xca\x45\xae\xe2\x95\xfb\x63\x6a\x1b\xd5\xfc\x07\x37\x2f\xbb\xe0\x04\x8e\x5f\x94\x58\x48\x70\x88\x59\xcb\xf7\x5d\x2e\xbd\x24\x91\xa9\x21\x4f\x71\x3c\x4b\xea\xc1\x59\x64\x9a\x67\x7f\xda\x43\x9a\x68\x9a\x73\x23\x4a\xcb\x0b\x08\xdb\xf2\x23\x7c\x4c\x52\x38\x29\x05\x7d\xf5\x42\x2e\xdb\xea\x13\x12\x01\xbb\x84\x0c\xd0\xdd\x6c\x1d\x57\x01\x7d\xd5\xf6\xe2\x74\x38\x30\xa6\x6f\x27\x90\xf8\x90\x84\x4a\x4d\x27\x36\x58\xff\xa6\x6d\x2d\x28\xb0\xce\x56\xca\xd2\x7c\x3b\x4e\xfe\x63\x8c\x4c\xd5\x9c\x9b\xf4\xa5\x2a\xa0\x54\x49\x4f\x97\x5b\x33\x42\x36\x16\x18\xa6\x24\x97\x22\x24\xf4\x68\xcb\xa5\x46\x86\x8a\xa5\x02\xb8\xc2\xf4\xcb\x55\xf9\xa6\x32\x03\x2a\x01\x06\x80\x97\xa3\x1d\x40\x50\xad\xe1\x13\x18\xb2\x69\xc8\x31\x85\x3f\x4e\xe2\x28\xd3\x58\x56\xb9\xe4\x94\x2b\x8d\xcc\x28\x5b\x70\x0e\xb8\x6b\xdb\xaa\xc7\xce\x02\xdf\x95\x52\x3b\xc8\x08\x3c\xec\xb2\x34\xae\x2e\x79\xbd\x41\xbe\x94\x95\xf6\x4b\xaa\xea\xaf\xb2\xd0\x34\x10\x8c\x8a\x9e\xd5\xba\xbd\x6b\xf5\x58\x49\x3a\x23\x26\xe3\x26\x47\x4b\x45\xf6\x2f\xfa\xc5\x31\xb4\x6c\x9b\x74\xfa\xc2\x6b\x00\x2a\x13\xce\x8c\xf0\x31\x96\xf3\x81\x4d\xe9\x90\xdf\xff\xe3\x90\xa5\x76\x5c\x6a\xac\xed\x45\x00\xac\x80\xaf\xf6\xb0\x7c\xab\x34\x7b\x5b\xca\xff\x5d\x95\x4f\x5e\xa3\x9c\xe0\x97\xfe\xd7\xf4\x95\x13\x68\x2d\xcb\x50\x9e\x93\xe2\xcc\x61\x9a\xeb\x81\xd9\x72\x00\x90\x21\xf4\xff\xfc\x12\x88\xd3\x44\x26\x86\x06\xfb\xdb\x02\x9e\xef\xb2\x73\x85\xea\xdc\xdf\xc6\x80\x9d\xa8\xf8\xf6\xe3\x32\x74\xe4\x70\x3a\x96\x01\x0e\xaa\x5a\xd4\x37\xd6\xf3\x23\x67\x82\x3b\xa2\xd2\xae\xed\x51\x52\xca\xac\x9f\x0e\xe7\xfd\xae\x80\x09\xf9\x5d\x9b\x43\xf7\x1a\x26\x02\xbf\xea\x07\xd2\x5d\xb8\x2a\x33\xd6\x04\x34\x7b\x49\x63\x21\x75\xe1\x1f\xbc\x05\xfe\x1b\x00\x55\xdc\x94\x28\x0f\x29\xf9\x64\x7b\xbb\x29\x86\x39\x93\x36\x10\x27\x39\xa1\xbb\x6b\xc5\x82\x5a\x69\x97\xc3\x3a\x74\x47\x59\x73\x3a\x6d\x44\xc3\x5e\x51\xb4\x64\xf0\x81\xd6\x5b\x7b\x73\xa4\x34\x56\xae\x5e\xc0\xa8\x49\x55\x6c\x4f\xf6\x14\x5d\xce\x43\x57\x4c\xb5\x97\xe6\xfa\xb1\x65\x3e\x11\x46\xef\x8a\x9b\xbc\x4a\x60\x17\xa6\xc0\x28\x16\x7a\xf9\xa4\xcb\x55\x91\x0f\x4d\x80\x10\x2b\x2d\x55\x3d\x53\x1a\xf5\x16\x63\x64\xe3\x6b\xb6\x30\x86\x82\x0a\x5b\x28\x13\x05\x5e\x4d\x78\xb4\xc3\xb1\x34\x3c\xa4\x26\x94\x4a\xf6\xbf\x9f\x42\x52\x5e\xd6\x8f\x31\x41\x5a\xac\xae\x02\x25\x84\xe9\x0a\xa9\x3e\x5c\x0c\xd0\x77\x5f\x14\x05\x7a\xb0\xb3\xa8\xaa\xfb\xac\xfa\x5b\x43\x49\xe7\x71\xd2\x2b\xca\xce\xab\x5a\xa5\x13\x6d\x6e\xeb\xd0\x41\xce\x7b\x37\xba\xb5\x97\x60\x62\x1d\x5e\xd4\x12\x0c\x9a\x25\xdd\x66\x72\x20\xf4\x1b\x74\x1d\xa2\xda\xc6\xa7\x45\xff\x8a\x3d\x48\xaf\x04\xc8\x2b\x28\x2e\x8f\x54\xe3\xdf\x9b\xed\x44\x2e\xed\xe8\x03\x56\x8c\x78\xea\x55\x62\xe5\xe5\x32\x7c\xf7\x9c\xf0\x4a\x29\x75\x87\x63\x56\xff\x04\x28\x33\x01\xf8\x89\xb6\xa9\x49\xe7\x2b\xba\x45\x50\x78\xd8\xea\x10\x89\x7c\x44\xd6\xee\x16\x9a\x70\xf2\xea\x89\x8e\xb4\x29\x03\x99\x9f\xe7\x1a\xd7\x38\x30\x12\x23\x59\x4f\x9c\x4b\x32\x55\xf1\xff\x83\x88\xb9\x43\xb4\xc9\x01\xbc\x71\x26\xd0\xc6\x06\x02\x0a\xc7\xe6\x83\x78\x9f\x9e\x4a\x8f\xe5\x9a\xd4\xaa\x1b\x2f\xcf\xeb\x92\xa7\xda\xd7\x8a\x0d\x74\xc5\x45\xa2\x9e\x9a\xfc\xb6\x04\x5a\x36\x0d\x20\x87\x39\x61\x39\x8d\xab\x78\x2c\xe0\xa5\xda\x87\xf0\xc2\x84\x67\x2d\x47\x9c\xe9\x0d\xfa\x45\x80\xfc\x3e\x1a\x49\x97\xc8\xce\x58\xd8\xe9\xa4\x53\x55\xde\x2a\x1b\xea\x62\x5d\x3d\x59\xac\xb3\x7c\xcd\xab\xf0\x21\xfe\x02\x65\x8a\xc5\x27\x1c\xed\x2d\x8a\x45\xac\xad\x92\x5e\x24\xd2\x6e\x62\xbe\x07\x9a\x4b\x71\x99\x36\x49\xcc\x2e\x1d\xc2\x84\xc8\x03\x6a\x03\x41\x3d\x54\xc1\x75\x0d\xdb\x8a\x93\xb3\xda\xb5\x66\xc3\x24\xc4\x3a\xd1\xc5\x21\x51\xd2\x16\x75\x5c\x79\x25\xdd\x9c\xf6\xa2\xab\x1b\x34\x82\xe0\x0d\xfb\x0e\x00\x33\x6e\x44\x20\x27\x16\x95\x60\x83\xb2\x7a\xb4\xe1\x8e\xfc\x57\x8d\x15\x4c\x0e\x2a\xd7\x20\xba\xd3\xd8\x0f\xe5\xec\x3f\x6d\x25\x40\x2d\xcb\x52\xe4\xeb\xdf\xf0\x2c\xfc\x6c\x6a\xde\xcd\x2e\x6c\x0a\x31\xc7\x1d\xba\xcf\xea\x10\x40\xd0\xc9\x61\x9d\xe2\xe9\xba\x5c\xf7\x92\x1c\x9e\x3c\x6b\xf6\x3c\x50\x20\x1a\x94\x27\xfe\xa4\xec\xf3\xc3\x68\xc6\xda\x43\x18\xaa\x0e\x2d\xec\x70\x2f\xef\xa8\x93\x17\x02\xf2\x95\xe8\xb4\x5a\x07\x08\x8c\x44\xdc\x8c\x73\x23\xc1\x9f\x4d\x1a\x34\x14\xab\x52\x96\x2c\xd4\xb3\x96\xe3\x34\x46\x1f\xc0\x0a\x40\xb7\xaf\x7e\x2b\xd7\x83\xca\x32\x70\x17\xa7\xa9\x26\xce\x5b\x72\x5b\xb0\x7c\xdc\x4b\xe0\x21\x89\x7d\xaa\xc8\x23\xb0\x81\x83\xaf\x50\x8a\x2d\xb1\x99\xf3\xba\x8f\x52\x5c\x87\x22\xd1\xee\xba\x90\xb4\xbf\xca\xb2\x02\x6e\xb7\x5c\x93\xe7\x51\x01\x47\x86\x15\xdd\xfe\x57\x15\xac\xa3\x13\x7b\xf8\x28\x41\x73\xf6\x4b\x9e\xcc\xba\xa7\x58\x91\x0e\x8e\xbf\x00\x1e\xaa\x93\x1c\x89\xd9\xae\xe5\x63\x34\xf3\xd0\xf0\x4d\xc7\x89\xdb\x10\xfd\xc3\xe5\x32\x1a\xe8\x59\xe9\xf7\xcb\x5f\xf2\x3c\xfe\xad\x92\xbe\x78\xf5\x24\xca\x24\xfb\xbf\xed\x03\xfd\x38\x9f\x14\xdb\x8e\x06\xb1\xd9\x5b\x81\x6f\xf6\xfe\x9a\x65\x56\x32\x31\x1f\x28\x19\xee\x6a\xc1\x02\x60\xb5\x6d\xa8\x60\x69\x1b\xa3\x93\x60\xcc\x6b\x2d\x90\xc0\x90\x3e\x7b\x31\x51\x7e\xe4\xd2\x86\x41\xef\x65\xd1\x66\x9f\x26\x61\x35\x2b\x2d\x8c\x77\xfc\x14\xad\xb9\x2c\x69\x72\xef\x51\xd6\x48\x9f\x83\x98\xe3\x41\xc0\xb3\x5f\x42\xdf\x1b\xdf\x40\x49\xad\x79\xe1\x5a\x72\x6f\x13\x2f\xd8\xd4\x6d\x4a\xe8\x29\xd8\x97\x1f\x65\xa5\x41\xe5\x68\x3e\x35\x62\x84\xa6\x7d\x30\xb8\xb7\x14\x76\xa1\xa2\x71\x56\x50\x25\xd0\xda\xbb\xb9\xae\xf0\x32\x77\x3e\xf9\xe0\x1f\x79\xdf\x2e\x2b\xb9\x03\x4d\xfe\x6a\x0e\x36\x2a\xbc\x86\xa3\x31\xd7\xec\x5d\xf2\xc7\x62\x16\xdf\x78\xd6\x18\xa8\xbd\x7f\x09\x23\x02\x2e\x56\x01\xd8\x04\x37\x8d\x71\x66\xa5\xb7\x30\x13\x5d\x7a\x99\xf3\x1d\x58\x8f\xce\xc5\x7a\xe7\x91\x6a\x97\xe6\x5d\x87\xc0\xb9\x55\x53\xa7\xed\xf8\x04\x1e\xd4\x52\x7d\x71\xfa\x34\xe7\xfe\xc5\xd9\xe8\x2f\x5a\x19\xc8\x1c\x96\x61\xe2\x9d\x87\x1f\xbe\x39\x19\xbf\xfd\x4f\x52\x60\xba\x6e\xcb\x39\x93\xad\x25\x91\xb6\x58\x58\x3a\xd9\xf0\xa8\x79\x8d\x9e\xef\x68\x6e\x22\x3d\x5b\x74\xe8\x94\x77\x07\x2c\xd0\xcd\x79\xf9\x1e\x0b\x5b\x49\x99\x5f\xd0\xa0\x2d\xe5\x43\x09\x99\x8e\x5f\x10\x03\x24\xe5\xec\x10\xf5\xba\xfb\xa8\x28\x19\xfc\x13\xe9\x38\xbb\xde\x54\x35\xb7\x04\x4b\xc9\xb5\x1f\x01\xaa\xe2\xd4\xd1\x24\x92\x08\x9f\x49\x8e\x95\x58\xdd\x1f\x88\xaa\xbb\x68\x10\xe5\x25\x96\x72\x4a\xf6\xee\xe3\x51\x91\x82\x6f\x00\xaa\x38\x96\x07\xfc\xf4\x6a\x1b\x81\x6d\x12\xb8\x8a\xb2\xbd\x85\xf4\x7b\x63\x50\xc6\x53\x8c\x0a\x85\x42\x04\xbd\xf2\x1c\xd4\xd0\x97\xae\x54\xbe\x3f\xbd\xdc\x59\x9d\x15\xce\x65\xad\xbc\x8d\x13\x32\xf5\x50\xcb\x94\x49\x51\x17\x63\xbe\xd0\x43\x22\x56\x49\x27\x8e\x9c\x77\xb5\x73\x8e\xcc\x91\xe3\x31\x5e\xfd\x14\xb0\xe5\x66\xad\xdc\x0c\x39\xe1\xde\xa1\x43\x44\x62\x85\x7d\xdc\x89\xc1\x5f\x2d\x0e\x82\x7f\xcc\xb2\x5d\x3f\x0c\x0f\x8e\xc3\x62\xb4\xac\x1a\x36\x4d\x30\x29\x00\x66\xb0\x7e\xf8\xae\xf7\xb7\x0a\x0e\x8b\xb6\x77\x3d\x53\xf7\xe7\x68\x11\x57\x68\x67\x66\xdf\xea\x94\x1a\x48\x9a\xe6\x4a\xb2\x19\x35\x43\xda\xfb\x5e\xc2\x8b\xed\x00\x8d\x68\xbe\xe4\x9f\x18\xb2\x6d\x62\x90\xd3\x6c\xc4\x0f\x2c\xc4\x2f\x54\xdb\xf6\x57\xb4\x80\xfe\x19\xce\x84\xb1\x30\x97\xab\xd1\x63\x4b\xb4\x69\x60\xf6\x9e\x89\x2e\xb0\xed\xbb\x99\x77\x7d\x38\x8a\x97\x53\xeb\x99\x2b\x71\xc6\x43\xe8\x1b\xfb\x43\x5d\x28\x47\xcd\x1a\x64\x3b\x12\x58\x3b\xfe\x7a\x60\x3d\xcb\x75\xf0\x6b\x15\x1d\x11\x16\xae\x16\x52\x9c\x7b\x6c\xd0\x63\x2d\xf0\x70\xb9\x41\xdf\xc7\x23\xc5\x72\xb0\x3e\xad\x3a\x95\x30\x69\xe4\x39\xd6\x07\x56\x75\x25\x52\x47\x87\x6c\x59\x4b\xab\x82\xeb\xb4\x6b\xb3\xf3\x30\xe3\xf5\x59\x56\x34\xcb\xc0\x92\x96\x13\x10\x7e\xf4\x51\x74\x06\x98\x71\xae\xbd\x89\x12\xd3\xf0\x65\xeb\x00\x1a\x21\xff\x75\x88\x4f\x04\xa7\xe2\xcd\x13\x41\x74\x36\xcb\x93\xaa\x1a\x9c\x1c\x10\x82\x9a\x0c\xde\xa0\xcf\xaf\x07\x59\x49\xae\x81\x56\x3b\xf5\x4e\x30\x1a\xaa\x06\xf7\xf9\xc2\xd3\x65\xdd\xd4\x1e\xd4\x04\x71\x74\x8f\x31\x0f\x1b\x5c\x4c\x4e\x0b\x27\xc3\xcf\x9f\xe5\x92\xc0\xa7\xf8\x93\x87\x65\xb1\xe5\x59\x1f\x2b\x1c\x52\x63\x7b\xd1\x94\x45\x39\x68\xc5\xfc\x51\xef\x89\x64\x93\x41\xc6\x99\x30\xe5\x65\xcb\x89\xf0\x16\x35\x9f\x00\xa2\x97\x45\x22\xef\x7e\xe1\x5f\x4b\xa5\x7a\x36\xa1\x44\x89\xe5\x4d\xcd\x2a\x77\xaa\x1a\x1c\xac\x37\x76\xa7\x96\x60\x2c\x04\xef\x15\x4b\xdd\x8c\xee\x5b\xe9\x10\xe3\x11\x5e\xce\x32\xb2\xcc\xd9\xc3\x52\x97\xea\x13\x13\x4f\x78\x04\x86\x86\xdb\xbd\x5c\xcf\x54\x2f\x91\xea\x29\xe6\x10\xfd\x66\x9a\xd2\x08\xa1\xd0\xb7\x6d\xfb\x35\x0c\x05\xd0\x08\xd7\x56\x68\xcc\xd6\x23\x90\xa7\xde\x8a\x14\x07\x9b\xc4\xf5\x7d\x3f\xb0\xff\xff\x78\x2c\x99\x27\x20\x1a\x1a\x46\xfd\xa6\x7b\x30\xa1\x8d\xd1\x4b\x9a\x88\x52\x93\x57\x75\x17\x30\x2c\xec\xf4\xff\x49\x9b\xf9\xd0\x59\x83\x8f\x36\x33\x25\x51\xb3\x69\x0b\xd3\xfb\x96\x2e\x17\x37\xb7\xa9\x30\xc5\xaf\x78\xdf\x12\xe1\x42\xef\x00\x6a\x7f\x31\xfd\x6c\xcd\x6f\x74\x3f\x27\x87\x83\x68\x6b\x3e\x51\x39\xec\x64\x7a\x8f\x1a\xaa\x3f\x82\x46\x6e\x2c\x4c\x43\xe8\xb2\xc3\x1f\x95\x7b\x7d\xc2\x69\x4b\x35\x17\xe4\xb9\x51\x01\x93\x90\xbb\x72\x78\x09\xb0\x9e\x0a\xa5\xe9\x2c\xdf\xa6\xe8\x0e\x93\x9c\x7e\x21\x8e\xf4\x70\xd2\x5e\xb2\xd7\xc3\xc3\x83\xd4\xbd\x82\x47\x52\x96\xe8\x87\x59\x6f\x1a\x83\x5d\xc9\x2b\xd5\xa2\xcb\xa5\x96\xa1\xc3\xcd\x1e\x1a\xc1\x29\xf1\x0b\x79\x97\x8f\x4c\xe7\x99\xa4\x36\x65\xa5\x68\xdb\x4f\x24\xd2\x15\x3c\x7b\xae\x8d\x6c\x41\xce\xe5\x7b\xe4\x5b\x0e\x1b\x15\x3d\x55\xe1\x1a\xa9\xae\x35\x2f\x8e\xa8\x73\x0f\x93\x26\x34\x33\xc0\x9f\xe1\xae\x3f\xaa\x0c\x23\x52\x22\x29\xb3\xf6\x9f\x60\x91\x47\x30\xa5\x41\x85\x4f\x9a\x7a\xce\x21\x00\x86\x0e\xd6\xf2\x90\x7e\x0c\xc2\x75\xdc\xe2\x38\x51\x5f\x6a\x4d\xd2\xde\x54\x33\x73\x27\xf2\x4c\x8b\xe4\x5e\x23\x1a\x56\x84\xa5\x0c\x15\x30\x12\x56\x5f\x7b\x15\x58\xea\x7b\x9a\x72\xb3\x30\x62\x94\x7d\x4b\x72\x83\x1b\x50\xc7\x02\x15\xf0\xaf\x29\x40\x77\x86\x05\xab\x37\x55\x39\x04\xce\x2f\x8d\x7e\x54\x32\x00\x84\x5e\xc0\xcc\xcd\x56\x38\xb8\x30\xf0\x7d\x00\x40\xc3\xaa\x62\xef\xcf\x4e\xd6\x20\x99\x72\xc7\x26\x47\x6c\xd1\x38\xc7\xb9\x8a\xaa\x63\x8a\x02\xe8\x08\xac\xc6\x0a\x8e\x87\xbd\x41\xca\xd7\x23\x1d\xfa\x21\x1f\x6a\x61\x7b\xa2\x70\xdd\x17\xec\xf9\xbb\xee\xe1\x13\xa2\x27\xb5\x86\xa0\x37\x32\x65\x7c\x00\xa2\x52\x6c\xe3\x63\x74\x07\xa9\x7c\x6b\xd9\x96\xd3\x4b\xff\xde\xd5\x84\xba\x7d\x35\xc9\x74\x32\xc0\x5e\xd1\x34\xb1\xab\xd3\x5f\x96\xbd\x92\xcc\xb9\x47\x82\x19\xe8\xc1\x0f\xb4\x48\x74\x7d\x93\xc2\x19\x3a\x14\x5b\xb6\x35\x70\x5b\x75\xa8\x27\xb8\x3a\xdf\x28\xdf\x76\x23\x62\xc5\x17\x54\x65\x58\xf0\x59\x4f\x5e\x12\x3f\xa7\x80\xcf\x6a\x0b\x5f\x42\x52\xc1\x33\x93\xc4\x82\xeb\x95\x9f\xb3\x0a\xf4\x8e\xf3\x5a\x8d\x61\x87\x88\xbe\x97\x5e\xb7\x43\xb5\xa1\xb9\x83\x8f\x94\xc7\xe4\xce\xf8\xc3\x5a\x3b\x40\x0f\xfd\xb2\x92\xb0\xa8\x4d\x39\x55\x56\xf7\xbc\x4e\x8b\x79\x9b\x1f\x46\x38\xdf\xae\x79\xaf\x86\x02\x2b\x22\xe4\x43\x30\xbf\x07\x7d\x77\xf8\x37\x58\x01\x2e\xd4\x5a\x7b\xe0\x56\x07\x87\xec\xae\x6a\x80\x77\x15\x07\xd4\x6d\x31\xef\xef\x72\x4c\xa1\xbb\xcb\x16\x40\x98\xaa\xea\xbd\xa5\xa6\x18\xf9\x23\x5d\xfa\x20\x7a\xb3\x33\xf5\x51\x65\x53\x8e\x7f\x9b\xa8\x6c\x67\x28\x91\xb5\x61\xfe\x2c\xb8\x8a\x56\xaf\xd0\x4e\xb3\xd6\x69\x44\xe5\x8a\xb4\x8a\x10\xec\x8c\xcd\x00\x6e\x67\x09\x6d\xd6\x63\xed\x73\xf5\x80\xc0\x1e\xae\x2d\x7d\x47\x93\x1e\x2b\xf9\x15\x47\xad\xb7\xff\xd9\x41\xf2\x5f\xd6\xf9\x3b\xeb\xf3\x6a\x36\xf5\xa2\x81\x46\x51\xd0\x6c\xe4\x18\x51\x82\xb5\x41\x8a\x30\xfd\x53\xf5\xb0\x45\xad\x11\x23\x48\x75\xbc\xb4\xdc\x77\x8b\x8e\x04\xfc\x4f\x8f\x36\x54\x13\xbe\x51\xf7\xee\xa3\x1b\x87\x8d\x72\x86\xdd\x48\x5b\x1f\xb2\xf3\x8a\xf4\x94\x87\xcc\xb6\x46\x63\x16\x8a\x1a\x53\x35\x4b\xb1\xd2\xaa\x52\xef\xf7\xa5\x48\xf9\x1e\x3e\x7f\x36\x23\x06\x27\xdb\x97\xc2\xa7\x92\x2f\xfa\x42\x45\xf7\x69\x5c\x67\x8c\xe8\xd7\x28\x2c\x52\x07\xbd\x92\xfb\x8d\x68\xe1\x0a\x5e\xd7\x6d\x10\xdc\x8f\x37\x0e\x00\xa9\xc1\x94\x02\x78\xc1\xc4\xf4\xc6\xa0\x1e\xf6\x97\xcb\x1e\x7d\xb2\xc3\x73\x0e\x68\xe6\xf5\x6e\x61\x45\xba\xae\x66\x96\x13\x7c\x2a\x22\x13\xdc\xb8\x0b\x50\x5c\x6a\x6b\xfa\x5a\x96\x58\x3e\xee\xe9\x72\x01\x74\x84\x07\x7f\x5c\xa3\xb7\x72\x9a\x95\x0f\x2c\x14\x62\xa1\xd8\xef\x22\x63\xcd\x55\x9b\xa7\x5c\x17\x81\x62\xb4\x4f\x37\x1f\x5e\xc2\xf5\x69\xb9\xb8\xf4\xec\x65\xc4\x01\xb9\x57\x64\xd7\x8e\x71\xa2\x7b\x5c\xe3\xf4\xa2\xcd\x95\x06\x1b\xa3\x85\x65\xab\x5a\x28\x00\x87\xeb\x14\x12\x7c\x67\xe3\x85\x2b\x25\x50\x26\x93\x08\xbf\x6b\x34\xea\xfd\xa4\x2d\xc7\x28\x09\x36\xfe\xb1\xba\x57\x4d\xcd\xaa\x83\x89\x39\x05\xfb\x7d\x31\xe8\x10\x23\xbe\x54\x11\xf2\x29\x92\x6e\x90\x16\x98\x43\xeb\x7d\x6f\xc9\x54\xe2\xd4\x59\x92\x80\x40\xc4\xf0\xd8\xde\x07\x2b\xa3\xac\x12\xc1\x58\xa7\x6b\xd8\xe0\xf0\x3f\xee\x81\x34\x0f\x4d\xbe\x95\x3a\x7e\x21\xe7\xb1\x72\xe0\xac\xd7\xbb\x82\x72\x79\x4f\x31\xb4\xc7\xc4\x3a\x96\xb0\x38\xe1\xcc\x64\x84\x02\x87\xe7\x23\xf1\x35\x2c\x29\xec\xba\x31\xf1\xb2\x1d\xc3\x26\xd5\x4e\xd2\x49\xb9\xca\x96\xff\xb8\x1a\xe5\x73\xee\x02\x04\x3f\xd7\x5e\x70\xa9\x28\xdc\xaf\x4d\x55\x43\x8b\xde\xcd\x29\x7a\x61\x6d\xed\x8a\x39\xb5\xac\x4e\xf3\x59\x80\x9d\xe9\x7e\xc5\xfa\x49\x0c\xf1\x69\x0c\x26\x5e\x25\x9e\x4e\x69\xd5\x40\x45\xdd\xaa\xe7\xf8\x4a\x42\xb1\x1b\x5f\x23\x21\x42\xe9\xce\xc1\xc6\x77\xab\xd0\x6d\x40\x1f\x6d\x40\x60\xf4\x3a\x62\x5d\x8c\x52\xa6\x4e\x28\xf5\x51\xdd\xa6\xc1\xa6\x11\x32\x8b\x70\x8d\x60\x43\x63\xcb\xef\xab\x58\x48\x1d\xc4\x56\xf4\x3b\x9c\x68\xd7\xaa\xa0\x7c\x1e\x0e\x3a\x0f\x42\x50\x05\x2c\xa5\xf6\x48\xd8\x23\x62\x38\x13\xe8\xb6\x47\xf8\x1d\x82\xb2\x63\x04\xff\x23\x9d\x8c\x1a\x0f\x8c\x17\x38\xc4\x61\x6b\xdf\xee\x30\x1e\x80\x8f\x25\xd8\xe8\x4c\x07\xb6\x68\x72\x26\x8c\xb5\x2b\x0d\x9f\xfe\x16\xc4\xf3\x83\xfd\xa9\x59\xf8\xb6\x25\x0b\x97\x9c\x93\x8d\x24\xcb\x17\xbd\x47\x87\x55\xba\xa7\x25\x03\x1c\x5b\x4b\x0b\xbb\xe6\xb8\x71\x34\x9e\x14\xa5\xc1\x13\x8b\x81\x37\xdc\x23\x3d\xa5\x21\xd5\x47\x83\xee\x35\xc4\xbd\xe1\x76\x1f\xca\x93\x5d\x4f\xa5\x6c\xaf\xc1\x0c\x5f\xfc\x9b\x08\xc9\x30\x2d\xf6\xd9\x58\xea\x63\xb2\x09\xcf\x70\x02\x6d\x31\xf4\x14\x66\xb8\x75\x6b\x41\x06\x7d\x5e\x95\x53\xed\xe3\xa6\x53\x2b\xf7\x26\x61\x2f\xbf\xe8\x41\x38\xe7\x2c\xc1\xbf\xfa\x88\xe3\x81\x17\x03\x72\xf3\xb1\xe9\x33\x89\xee\x6c\x23\x16\xe3\xd0\xdf\xdd\x63\x2b\x6f\x25\x6b\xb5\xe5\x0a\xdd\x85\x4a\xeb\xe9\xf1\x78\x08\x3d\x94\xb3\x45\xfe\xec\x7b\x93\x05\x7a\x37\x16\x13\xb4\x46\x75\xd9\x44\x09\x6d\xc6\x0b\xe1\x80\x09\x8f\x58\xb5\xf6\xbf\x54\x9b\x6d\xb7\x00\x1f\x99\xda\x8a\xf1\x89\x51\x64\xd4\xb9\xf6\x30\x4c\xa6\xc2\x97\x3c\xcd\xe1\xa1\xfd\x9b\x7c\x8c\x7d\xb2\x95\x68\x3f\x89\xc1\xac\x5c\xa7\xb3\xfb\xcf\x14\x63\x14\xda\x3d\xa8\x87\xec\x10\xf1\xf5\xcb\x3d\xf8\x2e\x73\x25\xb6\x9a\x8d\x33\x17\x8a\x7e\x1d\x55\x64\xe5\xad\x36\xe3\xf1\xd2\x79\x24\x73\x62\x1c\x64\x8a\x7b\xc0\xc7\xe0\xc6\x9c\xeb\x23\xdf\x31\x31\x0b\xa1\x57\xd5\x24\xa6\x2d\x25\x15\x1e\x70\xd1\x51\x97\x8c\x78\x22\x2f\xfb\xad\x91\x72\x00\xa5\xc9\x3f\x3e\xd3\xa2\x1f\x75\xc8\x4c\x4a\x14\xee\xa3\x5f\x77\x9b\x3f\x33\x42\x61\xb5\xb3\x0e\xbb\xca\x16\x9d\x63\xc9\x06\x66\x7b\x3e\x21\x0e\x60\x79\xba\x76\xcc\xab\xd8\x28\x7b\x08\x54\xed\x93\xcf\x43\xd3\xc3\x55\xd3\x8c\x55\x07\x90\x23\x1b\x75\xc6\xca\xde\xd7\x3c\x1e\xc7\xa5\x88\x4f\x13\x8a\x7c\x03\x4b\x78\x74\x99\xd2\x42\x7c\xe7\x28\xff\x72\x62\x3c\xba\x68\x93\xed\xba\x4d\x82\x50\x22\x87\xe4\x4d\xd3\x84\xb9\x4f\x08\x6f\x40\x4f\x7d\x33\xe4\x73\x4a\x7f\x2f\x0e\xe6\x45\xe7\xe4\xb7\xfa\x5b\x7f\x72\x79\xbe\x28\x5d\xb8\xb1\x08\x12\x16\x14\x61\x91\x0a\x95\x06\x8a\x5b\x91\xf6\x6a\x7e\x9d\x62\xf3\x93\x53\xee\xb9\x4c\xde\x03\x3f\x9a\x5d\x0f\x4d\x95\x22\x5d\x5e\xdf\x8f\x55\x67\xbb\x5a\x08\xa9\x37\x76\xbf\x07\xc1\xfc\x72\x40\xe8\x6d\xe5\x65\x9d\xd4\xf2\x2d\x1d\x9f\x41\x4a\x56\x40\x09\xa8\x20\x3a\x43\x15\xca\x8a\x7c\xb5\xc2\xab\x9e\x39\xbc\x93\x07\xf5\xce\xca\x99\x62\x4c\x01\xd0\x76\x26\x43\x2c\x71\x66\x01\x88\xc5\xab\xcf\x56\x2a\x3a\xa7\x96\x46\x3a\x7e\x9a\xd5\x95\xdb\x21\x2c\x55\xe0\x1c\xa6\x24\x0f\x62\xb3\xbc\x59\xca\x35\xa3\x48\x5f\xd6\xcb\x38\x24\x01\x0c\xb6\x6b\x42\x54\xaa\x8b\xf9\xa3\x75\xe0\x07\xf2\x07\x3a\x0b\x96\xe1\x8d\x41\x80\xf3\xa2\x29\x7d\x34\x8d\x40\x59\x83\x2d\x04\xa1\xe6\xb0\xba\xfc\xc9\xfb\x10\xc8\x7f\x16\x2d\x16\xe8\xc0\x6b\xd8\x4e\xf2\x80\x4a\x7b\x54\x5a\x8e\xc2\x23\x53\x4f\xb4\xa3\xc2\xba\x65\xcd\x47\x08\x02\x22\x23\x48\x95\xd4\x55\x4d\xcb\xc7\x40\xbe\x6e\xb6\x2c\xb6\x0c\xe8\x45\x3c\x46\x7b\x21\x78\x03\xae\xd0\x7c\xd4\x96\x77\x8e\xf0\xad\x3e\xda\x67\xdd\xad\xc7\x7d\xa6\x12\x31\x15\x83\x1e\x80\x02\x48\x10\x04\x9c\xae\x89\xf3\xd2\x47\x06\x30\x50\x07\x64\xd9\xc3\xab\x4c\x2e\x65\x0a\x19\x99\x43\x52\xf3\x6c\x9a\x67\xd7\x83\x3c\xdf\x1e\x89\x7a\x2e\xf7\xbe\x37\x6f\xeb\xd3\x87\x72\x23\x35\x7d\x62\x08\xa4\x8f\x61\xae\x33\xcc\x7e\xf9\xcd\x9e\x29\x75\xc0\xf8\x06\xeb\x75\xf9\x82\x44\xf5\x32\xdf\x17\x1d\xbf\x3b\xeb\xb8\x8e\x6a\x55\xe6\x17\x40\xdd\xdc\x1b\xbd\x14\x75\x40\x6f\x4f\x01\x81\x18\x4b\x30\x73\xdf\x8c\xfe\x33\x31\x73\x90\x00\x58\xb6\xb5\xcc\xa5\x23\x59\x0e\xd2\x2c\xd8\xed\xbd\x0e\x8c\x88\x86\xd0\xad\xaf\x7e\x30\xd0\x82\xcd\x4f\xfd\xc4\x1b\xcb\x43\xcf\x28\x5a\x02\xdf\x19\x61\x00\x2b\xd3\xe6\xa5\x2e\x51\xf4\xf2\x72\x4c\xaf\x28\x70\x78\xf3\x71\xb9\x54\x08\x05\xb9\x8f\x7a\x73\xc4\x94\x39\xcd\xc1\x61\x2e\x72\x1d\x1d\x6c\x46\x03\x29\x36\x48\x5d\x3c\xfb\x7f\xee\x70\x9d\x23\x5c\xb4\x09\x66\x1a\x3a\xe1\x18\xbd\x5b\x06\x1d\x96\x89\x2e\xfe\x72\xb8\x88\x80\xd9\x7f\xe8\x16\x97\x01\xdb\x82\xa0\x3b\x1b\x59\x56\x0e\x45\x08\xe0\xd8\x13\x07\x79\xf9\xa0\xc2\xa6\xa1\x74\x40\x2c\x78\xbb\xc1\xc9\x80\xab\xe5\x27\xeb\x56\xc3\x27\xfa\x5e\xe9\xbc\x6d\x4c\x29\x7b\x8b\xe5\x45\xa3\xe9\xb0\x67\x07\x18\x2e\x85\xbe\x7a\x0e\x75\xd5\xf2\xe3\x26\x44\xe6\x89\x26\x33\x67\x2c\xfa\x82\x91\xbd\xff\x6b\xde\x8b\x67\x7f\x92\x8d\x06\x34\xad\x51\xf1\x89\xa4\x7a\xb2\x46\xda\x68\x1d\x16\x80\x2d\x24\xb8\x34\x1d\xce\xd3\xc2\x0a\x3e\xc6\x59\x2c\x42\xc5\x5a\x1b\xb9\x7d\xc2\xac\xe7\xc6\x76\xe2\x45\x53\x00\xfe\x78\x32\xee\x1d\x0a\x79\xab\xd3\x3c\xe0\x30\x21\xe0\xa5\x32\xd5\x84\x26\x34\xfd\x0c\xc7\x48\xe2\x51\x09\xff\x86\xb3\x7f\x0c\xe4\xef\x66\xd5\x0b\x9f\xc6\x3a\x73\xb7\xb1\x02\xdb\xb6\xe6\x6c\xc3\x08\x18\x38\x1d\xad\xa6\x90\x2f\xa8\xe6\xf2\x47\x12\x78\xc2\xb4\x94\x7d\x39\x55\xbb\x2e\xa5\x36\x90\x9c\x81\x8e\x46\x89\x36\xc7\x91\x0b\x06\xb2\x64\x9f\xbe\x2d\xa7\xc5\xab\x48\x61\x45\x8c\x5e\x89\x94\x19\x80\x42\x82\x07\x2c\x4f\xe7\xf0\xda\x2f\x0b\x69\x74\xef\x3e\xe9\x37\xc8\xcf\x51\x8f\x64\xcb\xbf\x96\x8d\xca\x6c\x66\x5b\x38\x2a\x60\x6a\x98\x5d\x65\xb7\x00\x01\xaf\x08\xce\xe5\x57\x93\xf2\xa6\x53\x79\xb5\x28\x0c\x8c\xc3\xfc\x74\xb7\x25\xe3\x1c\xde\x57\x98\x27\x4b\x0e\x26\xa4\x5d\x60\x5b\x7c\xd9\x61\x82\x5e\x35\xab\x23\x89\x53\xd8\xad\x1a\xc1\xc8\xd3\x97\x39\xbe\x67\x52\x9e\x46\x29\xf2\xe1\x40\xb6\x78\xcb\x18\x20\xb6\xd5\x73\xf3\x92\x10\xa5\x4a\x79\xf3\x57\x8f\x03\xb7\x1a\x90\x21\x93\xd3\x89\x8b\x0c\x65\xea\x09\xbe\x89\x1a\x54\xe7\x6d\x2d\xad\x16\xf3\xe8\x97\xbb\xcf\x52\x88\x5d\xf1\x63\x88\x35\xeb\x95\xa5\xca\x56\xcd\xe7\x11\x35\xd1\x7c\xb6\x83\xca\xd0\xd3\xbb\x77\xa6\x06\xab\x5b\x98\x18\x39\x08\x89\x35\xc3\x04\xe5\x46\x14\xdf\xe1\xa3\xf5\xe8\xc3\xc5\x3c\xa8\x71\x17\x74\xaf\xde\x4f\x8f\xaa\x18\x33\x85\x8f\x28\xef\x8d\x44\x8d\xc5\x58\xeb\x6d\xab\xb4\xb1\x62\xfb\xa7\x1c\xc8\x3e\x9c\x73\xf1\x20\x3e\x5b\x0d\xc0\x38\x8b\xbc\x99\x66\x8d\x47\x9e\x06\x52\x3f\x93\xb8\xe8\x3e\xb8\xab\x60\xc1\xcf\xb9\x31\x2c\x7c\x35\x3c\x7f\xce\xc9\x73\xf1\x1f\xb9\x90\x61\x2e\x16\x86\x0d\x3f\x08\x06\x77\x09\x9f\xfe\x44\x3f\x26\x20\x20\x37\xfb\x8e\x86\xb0\xc4\xf2\x8d\x5e\x1a\x69\xcb\x5c\x19\xdc\xf7\xb0\x9a\xfe\xc2\xd6\x52\x94\x7b\x5e\x0f\x64\xe5\x21\x67\x3a\xcc\xf3\x2c\x0d\x9b\xfb\xe1\x7a\xb5\xec\x00\x16\xa5\xd4\x98\xe0\x2f\x3e\x3a\x22\xb6\x59\xa6\xdc\x58\x9c\x72\xc6\xc6\x02\xf3\x12\x53\x2c\xa1\x7c\x3f\xdc\x47\x41\x58\x57\x21\x4b\x15\xd0\x3b\xe0\xfd\x3a\x99\x43\xba\x46\xad\xd7\xa3\x9a\xf3\xf6\xcb\xcb\x1a\x06\x9a\xe0\x05\xbf\x25\x48\x26\xd8\xa9\xa6\x0c\x77\x25\xef\x13\xcb\xa5\xdf\x4d\x64\xc1\x75\x79\xd1\x5a\xca\xbd\x20\x5a\x97\xe3\x37\x2a\x6b\xe1\x11\xa3\x91\xa2\x47\x5c\x39\x75\xe3\x3c\x56\xa7\xa6\x0e\xe2\x83\x6e\x85\x74\x63\xaa\xe8\xc4\x35\x57\xd4\xe2\x37\x03\x9c\x4a\x39\x5c\x60\xb0\xe8\x21\x17\xea\x5e\x4f\x77\x50\xde\xba\x6d\xfb\xea\x55\xa5\x32\x3a\xfb\xa4\x45\xdb\xb6\xd2\x08\xed\x05\x09\xa1\x4d\x41\xd2\xdb\xe9\x6a\xff\x0a\xd1\x6d\xf9\xb0\x4e\x12\xa0\x7b\x23\x33\xba\x04\x02\x6d\xaf\x02\x75\x72\xdf\xa0\x4d\x83\xa5\xb5\xda\xa4\xc9\x1a\x91\x3d\x66\x68\x8f\xf1\xba\x74\xab\xe2\x82\xc8\xc3\x46\xaa\x07\xe8\xd4\x4a\x20\xa3\x97\x8a\x5d\x93\x02\x77\x80\xfe\x27\xd3\x15\x09\xfe\xa7\xf3\x7b\xdc\xc1\xa3\x34\x98\xed\x12\x1e\x0b\xd7\x3a\xd4\x91\xac\x94\x13\xfc\x62\xc7\x90\x5e\x49\x10\xe5\xaf\xf0\x11\x72\xf2\xf5\xba\x38\x72\x83\x70\x91\x90\xb6\x7f\x88\x3a\xa4\x90\xa1\x8d\xef\x81\xff\xeb\xe0\x3b\x5e\xd6\xbf\x27\x31\x0d\xbf\xdf\x66\x40\x95\xc5\xe3\xdb\xae\x93\x8b\xa4\xca\xe1\xbe\x42\x47\x66\x03\x8a\x5e\x69\xc7\xf9\x37\x0b\x20\xc2\xfa\xb4\xad\x88\xc4\xd1\x03\xa1\xe3\x32\xef\x3e\xb2\x50\x7f\x07\xbc\x20\x1f\x5a\x2d\x2e\x82\xc3\x4d\xa0\x7a\xf8\x90\xf0\xd5\x88\x24\x4e\xbf\x03\xbb\xee\x13\xf8\xe0\x4c\xd2\x82\xa7\xf5\x1f\xd8\x3f\x30\x8f\x12\x10\xfc\x6d\xb5\xf8\x70\x48\x70\xb9\x2c\x13\xee\xfd\x6c\x70\x05\xb5\x95\xf6\x82\x23\xf9\x55\x3d\x6b\x03\xb2\x58\x62\xac\x5e\x03\xdc\xca\xd0\x41\x43\xbe\x2f\x93\x85\xa5\xdc\x1e\x72\x1b\xd8\xe7\x91\xe9\x1f\x55\x18\x55\xdc\x96\xb3\xbc\xab\xbd\xde\xa5\xb3\xbd\xed\x3c\x7f\xf9\xdc\x26\xad\x66\xb9\xe5\x95\x22\x40\xc4\x77\x40\xde\x46\x1d\xdf\x9c\x81\xfa\xf4\x36\x68\x0e\x22\x44\xe1\x7d\x84\xdc\xc9\xd8\x47\xd3\xca\xf3\x5b\x72\x8d\x39\x5c\x11\xb6\xdc\xc5\x81\x1b\xc2\xd6\x5b\xff\x45\xcf\xc5\xa9\xb9\x6d\xfd\xd7\x5d\x6c\xe8\xe8\x0e\x87\xbf\x29\x2c\xc9\x52\x1c\xe9\xa5\xe2\x48\xf6\xf9\x03\xbc\xb1\x3b\x85\xae\xf4\x80\x51\x4f\x67\xe3\x58\x6a\x48\x15\x8b\x44\xce\x7c\x89\x55\xfe\x86\xa1\xa2\xb2\xfb\x14\xff\x0b\x73\x1c\xaf\x5f\xf9\x59\xe1\xf3\x25\xfe\x86\xf4\x73\x73\x55\x06\x6b\x28\xe6\x73\xf6\x18\xb3\x46\x39\xb1\x78\x3a\xd9\xbb\x1e\x2f\x01\xf6\xea\x27\xad\x68\xcb\x4d\x6a\xcb\xac\xd6\xae\x5b\xc3\xd9\xa6\x7c\xc7\x6e\xca\xae\xe3\xc3\xa9\x40\x0c\x51\x57\xbe\xf3\xb2\x80\xa6\xbe\xdd\x9f\xca\xa5\x11\x0a\x12\x65\xbd\x8a\xaa\x9b\x37\x2b\x00\xe0\xf0\x07\xd2\x44\x7d\x04\x60\x2e\x33\x40\x8e\xd2\xc3\xaf\x79\xc9\x72\x86\x70\xf3\x92\xeb\x5f\x57\xc9\x4f\x2a\xc5\x23\xc1\x5a\xca\x4b\x34\x52\xba\x3c\xf7\x68\x44\xce\x00\x6c\xbb\x7f\x3d\x85\xb3\xcc\x0c\x72\x37\x1b\x17\xb1\x53\x29\x90\x82\x32\x70\xe9\x93\xd9\xf7\xb7\x40\xf9\x9b\x72\x35\x43\xcc\x39\x20\xff\x3a\xf9\x7d\x07\x3e\x42\x97\x35\x55\x53\xcf\xa3\xf2\xef\xf9\xcf\x98\x3c\x73\xc1\xf4\xb0\x43\xef\x46\x65\xfa\xd2\x91\x41\xc4\xb5\x95\xde\xa8\x1c\xc5\xdf\xfe\xc9\x53\x94\xe3\xa5\x06\x99\xb9\x64\x00\xd1\xaa\xbc\xd3\x90\xd6\xdc\x52\x38\xea\xdc\xd3\x71\x5e\x30\xce\xee\x9c\x1e\x17\x5b\x60\x47\x72\x1b\x3f\x0a\x79\x8f\x48\x03\xa4\xc9\xc9\xb6\xc0\x5c\xb5\xb9\x86\x72\xdd\xf3\x3c\x18\xce\x05\xb8\x1e\x7e\x32\x43\xc9\x73\xa2\x1d\x8e\x1d\xca\xbd\x3c\x60\xe4\xc0\x3a\xbe\x7c\x62\x84\xc5\xc0\x5c\x72\x37\x6f\x6b\x39\x9f\x4f\xbe\x0e\xb5\x3c\x59\x9c\xb3\x76\xbf\xa0\x31\x65\xbb\x50\xc1\x37\xd8\x5b\xa2\x74\x25\xb7\xb5\x1d\x1b\x1e\x25\xe1\x46\xee\x45\x01\xee\xd3\x0a\x67\x25\x79\x70\x4f\xef\x6e\x06\xe1\x7d\xfc\x6b\x6d\x2a\x26\xfe\x77\x97\xf8\x7d\x67\xf6\x2d\xab\xf9\xb1\x5a\x2e\x67\x68\x7e\x0a\x26\xa3\x57\xec\x4b\xd4\xd6\x26\xc1\x98\x7e\x0e\x15\x17\x16\xf7\x37\x53\xbd\x0c\x8c\x31\xd9\x84\xd3\x56\x65\x10\x12\xb7\xf2\xd4\xd6\x40\xc4\x2e\xb7\x21\x44\x88\x64\x02\xda\x19\x5e\xe7\xa6\xaf\xeb\xa5\xff\xb8\x82\xb3\xc7\x2c\x57\xfd\x23\x9b\x18\x36\x7c\xba\xad\xfa\x59\x3a\x1d\x93\xc0\xbf\x11\x35\xbb\xec\x2b\xe9\x6c\xff\x1b\x45\xba\x74\x06\xea\x14\xb7\x87\x77\x4a\x55\x13\x5e\xf6\xf0\x94\x45\x0d\xbf\x56\x78\x4d\x8f\xdc\x4f\xa9\x03\xd0\x86\xee\x7b\x4f\x9e\x10\xc0\xb2\x5d\xb7\xe5\xd7\x4e\xff\x92\xad\x87\x7a\x57\xfb\x4e\x7b\x5b\x5c\x58\x0b\x46\xd6\x37\x0e\x83\xb7\x2b\xf7\x00\xc2\x2d\x24\x8b\xa7\x6a\x47\x2d\x13\xea\x1b\x49\x64\xfd\x5c\x45\xfe\x12\xdb\x99\x29\xa4\x63\x84\x6d\x2d\xe7\x44\xf2\xa4\xf8\x9c\xa4\x6e\x5c\x0e\xaf\x98\xe4\x03\xbb\x3a\x56\x0d\x83\xa4\x5b\x4c\x6c\xd5\x40\x80\x7b\xa0\x16\x41\xc7\x6a\x66\x4a\xa4\x42\x0f\xd5\x42\x4e\x4d\xb2\x0f\x7d\x68\xfa\x97\x4c\x85\x19\xc3\xd9\x59\x1c\x10\xc0\x0b\x4b\x7a\x6d\x82\x83\x55\x85\x83\xdd\x77\xca\xc6\x43\x9b\x98\x78\x64\x15\xfb\x24\x8b\xaa\x09\x90\x2c\x47\x51\x5e\x37\xeb\x6d\x43\x3b\xb0\x84\xfd\x4d\x63\x20\xf1\xd5\xf8\xf9\x7f\x94\xd4\x7c\xbe\xa6\x47\xef\x51\x86\x45\x6f\x28\x11\x2a\x35\xa1\x75\xbe\x97\xe3\xd6\x39\x0b\x78\xa3\xd3\x63\x13\xfb\xe6\xaa\xf9\x5e\xbc\xa8\x6f\xae\xe2\x9f\x2e\xe2\xed\x99\xa8\x5c\xe2\x5d\xf6\xf5\x8d\x15\xfd\x8e\x39\xa2\x97\xbc\xa1\x30\x84\xab\x7c\x51\x2d\xf4\x2b\xb4\x10\x8d\x9a\x42\xd2\xe9\x6c\xf6\xe3\x13\x1f\xb5\xb2\x27\x20\x34\x6a\x6e\x4c\x68\x42\x56\xe7\x18\x36\xb6\xf8\x61\x64\xb3\x55\xaa\xf1\x66\x26\x82\xf3\xa7\x40\x16\x25\x55\x0d\x57\x1f\xa0\x56\x58\xfb\xea\x52\x18\x92\x5b\x4c\x26\xb2\x7a\xf7\xfe\x71\xc9\x55\x35\xb0\x1e\x65\xa9\xb9\x03\xc8\x6a\x63\x6f\x68\xaa\x3a\x9d\xd8\x64\x96\xcd\xb6\xbb\xe7\xc9\x9e\xe1\xed\x54\x3c\x16\x2b\x6e\x13\xc0\xac\x6c\x59\xd0\xaf\x1f\x92\x4e\xc8\x36\xfa\x1e\x1f\x32\xfa\x1e\xbb\xf1\x0b\x3d\xe9\xbb\x46\xb4\x5a\x85\x68\xab\xd0\x24\xce\x99\x2f\x14\x12\x7d\xf6\xf9\x2a\xb5\x93\x90\xac\xf7\x03\xf5\xb2\xc4\xdb\xf1\xaf\x24\x37\xd7\x13\x04\x20\x8f\x87\x4d\xad\x5f\xad\x0f\x4f\x76\xf3\x4d\x19\xc4\xd8\x99\xf0\x5c\x32\x4d\x04\x58\xeb\x37\x4c\x01\x36\x0b\xf0\x6a\xd0\x34\xa0\x51\xe8\xbc\x01\xc4\x29\x37\xed\x8f\x35\xb2\xe3\x75\x12\x73\x4e\xb3\x67\x61\x27\x57\x5a\x72\x28\x4f\xd8\x46\x43\x57\x57\x3e\x6a\x7e\xb4\x4f\xba\xe8\x20\x68\x09\xb6\x90\x27\xd3\xf4\xa1\xb8\x11\xc3\x14\x29\x5d\xbe\x4c\xf1\x11\x7c\x4c\xdd\x5f\xf2\xc1\xed\xb1\xd4\x91\x03\x78\xd3\x69\x92\x97\xfb\x92\xbf\x64\x0a\x60\xea\x27\x8d\x08\x52\x04\x85\x80\x5d\x9e\x7d\x18\x5e\x95\x1d\x94\xc5\x01\xd4\xfd\x29\xc6\xb0\xb3\xcd\x18\x4b\xaa\x20\x09\x9c\x39\x7d\x80\x99\xb2\xfb\xb1\x9a\xf4\x37\x4e\xa5\x35\x9c\x2e\xb9\x88\x82\x03\xc6\x84\xd3\x18\x92\xf5\xa3\xe4\x1e\xf5\xa4\x52\x31\x9a\x04\x38\xa2\x82\xd8\x9d\x24\x84\x87\xca\x13\x1c\x58\x7e\x99\x4f\xa1\x1c\x9e\x18\xd9\x61\xf1\x63\x8e\x1e\x34\x7d\x94\x2e\xf4\xe5\xab\xdc\xba\xb8\xc8\x7f\xd2\xef\xaf\x93\x82\x57\x15\x36\x82\x30\x5b\xa4\x0a\xdf\xdc\xae\x71\x19\x92\x0b\x7d\xe8\x05\x06\x91\xe3\x3e\xa5\xaf\x84\x81\x39\x30\xd6\x0f\x16\x1d\x63\x17\x6a\xdd\xd9\x07\x2f\x74\x27\x6d\x91\x46\x2d\x8a\xf1\x88\xe4\x60\x9a\x15\x76\x87\x12\x94\xcc\xd3\x2e\x4b\x13\x46\x06\xdb\x0d\x00\xd9\x60\x29\xf7\x01\x78\x24\x55\xbe\xa0\xd0\xb2\x91\xc0\x96\xaf\x72\x87\x33\xe2\x45\x49\x1d\xdc\x2c\xb8\xd8\x21\x7c\x4a\xce\xf7\x78\xbc\xd3\x40\xef\x31\x25\xfb\x4b\x6f\x21\x21\x74\xe9\xee\x1a\xf3\xcc\xac\x81\xba\x64\x77\xb6\xe2\xb5\x7a\x9c\x1a\x0a\x89\x0a\x49\xbf\x02\x2c\xb8\x65\x68\x31\xa4\xab\x11\x66\x80\x06\x9b\x94\x0c\xa2\xad\x69\x58\x29\x7c\x05\xe6\xd6\xf7\x18\xca\x6f\xd7\x98\x60\x32\x94\xaa\x9c\xcd\xe5\xa3\x90\x72\x9b\xb3\x82\x33\xd2\x8c\x2e\x30\x5f\xbf\xce\x28\x64\xd8\x02\x1b\x5b\x2a\x89\x74\xa3\x8d\xa9\x41\x9e\x04\xf8\xf3\x4b\x93\x63\x0b\x01\x4e\x73\xce\x4d\x40\x4f\xd1\xfb\xb7\xde\xb3\xb2\x0c\x5d\x54\x31\x38\xe7\x7a\xb3\x82\x09\xd9\xe5\x32\x1a\x33\x84\x06\xd7\xc4\xb9\x46\x76\xb1\xe5\x25\x7d\x29\x1a\xf5\x98\xfc\x06\xd8\x17\xbd\x88\xf5\xfa\xa1\x52\xcf\xc0\x8c\x59\x5c\x2e\xc4\xcd\x4b\x5a\x61\x87\x83\xbe\x1c\xdc\x8f\xc8\x11\xd4\x8c\xa0\x5b\x84\xd2\x00\x28\x99\x39\x21\xb0\x84\x4b\xf3\xe1\xd8\xd6\xbc\xc9\x73\x8f\x86\xc4\x19\x60\x5c\x12\x5a\x2e\xe5\xce\xa8\x4d\xe1\x78\xa8\x03\x7e\x5c\xa2\x39\x61\x6e\x25\x55\xcf\x4f\x7e\xd7\x65\x9d\xbd\x43\x1b\x9a\xa2\xfa\x44\x97\x1a\x5e\xc2\x48\x59\x0d\xff\x52\x62\x26\xe5\x04\x80\x10\x93\x99\x32\x4b\x6c\x59\x27\x7c\x0f\xf1\xf3\xcd\x4e\xc4\x77\x69\x83\x76\xa8\x63\x4f\xb7\x2c\x53\x97\xa3\x36\x4c\x1d\x38\xbf\x7b\x5d\xf9\x51\xba\x39\xb9\x93\xf5\x41\x1b\xe2\xc6\xc6\x7f\xb4\xfd\xde\xa3\xc1\x67\xb0\xf4\x90\x86\xa4\x72\x82\x19\xc8\xb9\x8f\x26\xf8\xa2\xec\xbc\xfb\x6e\xd2\x39\x0c\x61\x51\x6d\xd3\x37\xcd\x91\x04\x51\x3b\x27\xb4\xed\x6a\xe9\xf5\x32\xd6\x8b\x89\x04\xaf\x60\xc2\x9b\x75\x54\xbf\xbc\xfd\xce\x93\x16\x2e\xe7\x46\xf6\xc1\xe9\xa6\x18\xba\x54\x75\x80\x7c\x2e\x99\xba\x87\x49\x30\x81\x6a\xc2\x41\x78\x50\x40\xae\x6c\x1b\xae\xd5\xdd\xd4\x99\x00\xb9\x1e\x54\x3d\xc1\x37\x58\x47\x71\x25\x02\xe7\x63\xf8\x18\xdd\x86\x6d\xa3\x15\x14\xd5\xed\x10\x99\x8c\x71\xb3\x54\x74\x29\xe2\xde\x02\x1c\x7f\x20\x60\xae\x93\xfc\xe4\xf1\xee\x4f\x97\x70\x74\xf5\x2b\xce\xb0\x60\x81\xc5\xcc\x5f\x02\xd0\x8e\x00\x14\xed\x6b\xb8\xbe\x66\xcb\x97\xcc\x15\xfe\x36\x55\x65\xde\x7d\xfc\x5b\x3b\x91\x7b\x78\x48\xb2\xd1\x11\x08\xb6\x4b\x07\x75\x0e\x9c\xdc\x70\x09\xe9\x6d\x8f\x9e\xde\xb1\xbd\xcd\xea\xf7\x5e\xde\x13\xe1\xba\x60\x54\x74\x8a\x3c\x5b\x8d\xd5\x72\x51\x2d\x61\x95\x3e\xe7\x70\x97\xb5\x98\x2b\xfa\x16\xaf\xaa\x31\x5f\x2e\xb0\xbb\x10\x01\x54\x49\xd1\xef\xb4\xe3\x1a\xfc\xcf\xf4\xaa\x7d\x36\x9a\xcf\x5a\xe4\xfd\x5c\x25\x74\x42\xe1\xa7\xaf\xa1\x3f\x49\xfc\x22\x77\xc6\xb0\x78\xcd\x59\x82\x50\x95\xa7\xb5\x0e\xb9\x6f\x9c\x25\x54\xc8\xe9\x1a\xb8\x24\xd0\x3a\x4d\x5e\x81\x2f\x0d\x88\x38\xb7\x2a\x98\x12\xd3\xeb\xfc\x6b\xe6\x8b\x35\xe4\xdd\x61\x38\x8f\x2f\xcb\x75\x8c\xfd\x75\xb2\x67\x18\xe5\x10\x5d\x0c\xef\x54\xc7\x8b\x52\xa3\x0a\xf9\x86\xc2\x54\x5d\x06\xc7\xe5\x25\x2c\x0c\x96\x51\x3d\xed\x59\xca\xaf\x33\x00\x44\xd1\x5a\x28\x1b\xd3\x8c\xcd\x1c\x76\x7d\xbb\x0c\xaa\xb6\x3e\x7a\x0e\xce\x31\xe1\xdb\xd7\x7b\x9c\x2b\x7f\x0a\xfb\xe5\x98\x35\xb3\x41\x02\xd1\x26\x1d\xde\x3a\x73\x12\x5f\x6c\x3a\x57\x7b\xc0\xcf\x4d\x0a\x62\xcd\x2e\x10\x1a\x52\xb7\x0a\x7f\x93\xd2\x8b\xfa\x11\xcf\xaf\xdc\xc3\xaa\xbc\x6c\x43\xd4\x79\x44\x52\x7c\xff\x51\x50\xff\x71\x48\x42\x78\x34\x29\xdf\x7d\x06\x2a\xb1\x47\x66\x50\xc5\x5d\x90\x25\x60\xc8\x55\x9b\x0e\x06\x03\xc2\x93\xab\xfc\xa1\x15\x48\x30\x27\xfb\xfd\x0d\x1c\xa5\x75\xd2\x6c\x55\xbf\x73\x00\xaf\x18\x80\x77\xe5\x01\x4b\x57\xc2\x4f\x89\xf9\xfd\xdd\x77\xc5\xf8\xfc\x2c\x60\x17\x47\xdb\xe7\x01\x9e\x7a\x39\x0c\x7a\xf8\x54\x08\x48\x37\x64\xf5\x13\x2a\x68\x41\xc8\x39\xed\x5a\x10\x46\x9b\x8f\xa1\x01\xc4\x21\x69\x56\x87\xc1\x1a\x0f\x74\xd0\x46\xa4\xf7\x72\xc8\x31\x04\x42\xba\x37\x8b\x0a\x46\xef\xe0\xd3\x4f\xe1\xcd\xe9\xf5\xcd\x22\x83\x76\xf4\x92\x53\xc8\x9f\xaf\x34\xc0\xe3\x08\x8e\xf6\x08\x8d\x93\xa5\xbf\x54\xb9\x16\x1b\x80\xf0\xf9\xd5\xe0\x03\x1d\x00\x9e\x4c\x98\xf0\xef\x9a\x06\x94\x7f\xa1\x0e\xeb\x1f\x61\x13\x2a\x7d\x64\x80\xfa\x83\xdc\xd1\xc2\x6a\xd1\x5c\xd0\x15\x82\xb3\x0f\x0f\x09\x96\x9f\xf3\x91\x34\xa6\x87\xc5\x72\xa5\xf1\x44\x4d\x63\x7d\x98\x2a\x81\x60\xc3\xfd\x6c\x53\xe2\x01\xb5\xb5\xce\x62\xcc\x0d\xd0\x19\x53\x19\x10\xca\x30\xae\xce\xa9\x3c\xd8\x40\xa1\x54\xc7\x34\x52\x39\xc0\xd7\xf9\x3e\x5d\xd4\xeb\xdf\xd5\x3c\x10\x28\xfa\xac\xdb\x70\x30\xdf\x82\xf4\x06\x70\x5c\xa8\x35\x94\x12\xd9\x67\xf8\x49\xcc\x97\xea\x7d\x56\xa7\xcd\xf9\x78\xe8\x4c\x3f\x17\x65\xe6\xcd\x96\xcb\xd7\xf7\xda\x9f\xb2\x3a\xa9\x4e\x20\x8d\xfa\xd2\x4d\x06\xc3\xc1\x63\x5c\xd0\x58\x6d\x5d\x36\x55\x06\xd0\x19\x98\x74\xe8\xcd\x68\x71\x54\xae\xce\x31\x40\xcb\x58\x87\xfe\x5e\x45\x91\x58\xc3\x04\x67\x32\x79\xa2\x61\x13\x55\xfe\xda\xd4\x3f\x01\x9f\xe6\x71\x3f\xe0\x7f\x58\x00\xe8\x59\x2d\xd4\x1f\x25\xe8\xcf\x8b\xd5\x16\xe4\xa8\xa4\x9f\xfc\xb2\x5f\x58\xc2\xbb\xfc\x02\x40\x09\xd2\xe1\x20\x79\x42\x9d\xf9\x1b\xa2\x05\x69\xac\x25\x3b\x4c\x83\x1d\x4f\xde\x59\x72\x0c\x25\x47\x6a\x12\xf9\x76\xc0\x05\xac\xfe\x5b\xde\x66\xef\xbc\xc6\xe7\x73\xd4\x81\xdf\x57\xf1\x6a\xda\x0c\xe0\x1f\x3c\xd7\xae\xaa\x2d\xbc\x01\x82\x32\xe1\x35\x54\x5a\x6a\xe7\x60\x0f\x1d\x02\x42\x00\x6e\xd1\x0d\xc8\x3e\xdc\xeb\xbd\x9b\x99\x47\xb3\x94\xd2\x0f\xd5\xae\x00\x82\x2e\x5a\x5c\xc0\x7a\x68\xfc\xbe\x95\x03\x0c\x74\x61\xf2\xec\x03\x56\xca\x75\x69\x36\xed\x90\xe5\xbb\xc1\xc9\x08\xd9\x5f\x5c\x49\x48\xfa\x1f\x2f\x50\x0e\x49\xc2\x7f\xae\x94\x29\xa2\x48\x25\x17\xda\xa5\x0a\x21\xe2\xfd\xc4\x44\xb4\xff\xc7\x23\xde\x90\xc1\xf2\xf2\x87\x18\x84\x94\x29\xd3\xb9\x9b\x75\x19\xee\xf5\x5d\xee\xa1\x84\x52\x2e\x44\xdf\xa8\xed\xf0\x1b\x33\x18\x9b\x49\xaa\xf5\x10\x80\x87\xa1\x7c\x93\x8b\x91\x80\x2f\x43\x03\x17\x8b\xac\x5f\x84\xd2\x72\xcc\x57\x65\xc8\xea\x88\x0f\xad\x07\x73\x72\xed\x5e\xb2\xe5\xbf\xc8\x8f\xd5\x37\xe8\x6b\x3f\x73\x33\x18\x29\x8c\xe2\x4e\xf5\x15\xaa\xb4\xe3\x11\xb3\x06\xcc\x3e\x27\x1f\xe7\x8b\x50\x7e\x9e\x8d\xc6\xf8\xe0\x38\x61\xe3\xab\x43\x3e\x9c\xad\xaa\x8c\x4d\x49\x0c\x4a\x3c\x9e\x7d\xbc\x1f\x81\x0c\x5b\xab\x3c\xcd\x5a\xf6\x38\xf6\x13\x97\xf6\x21\xca\xaf\x8b\x97\x0e\x32\xab\x35\x45\xf2\x1a\xd0\xec\x5b\x60\x07\xb4\x0d\x81\x60\x67\xaf\x7c\x29\xa5\xca\x16\x88\xd7\x85\x1e\x32\xea\x0a\xcc\xee\x42\x2d\xfd\x5b\x98\x82\xf7\xce\x52\x41\x6b\x56\x82\x5c\xae\x49\xf6\xf7\xce\xb8\xec\x5f\x19\xe7\xb2\x4d\xae\x6f\xff\xe8\xfb\x08\x96\x4e\x8c\x80\x9e\xf7\x83\xbe\x98\xe1\x02\x00\x3b\xf5\xa2\x41\xed\x61\xbd\x35\x55\xb5\x0e\x08\x3a\xf0\x11\x9b\x38\xa1\xf0\x47\xbe\xa4\xd2\x7f\x3b\x3b\x05\x30\xb0\xda\x83\xdd\xf6\xec\x3f\xad\xbe\xf4\x27\x15\x1f\x03\x51\xfc\x73\x2e\xcf\xf0\xe8\x22\xd3\x91\xcf\xf9\x29\x67\x86\x73\xec\x9f\x4c\x0c\x19\x56\xfa\x50\x84\x02\x88\xf9\x3e\xed\xeb\xe3\x8e\xc4\xa8\xca\x97\x91\xff\x5e\x1a\x43\xc1\xb9\xd2\xb2\xec\x50\x0f\x16\xca\xe1\x77\xee\xf5\x57\x21\xfc\xc9\x05\xe8\xb1\x66\xdf\x22\x9b\x39\xc2\xb6\x07\x6d\xec\xac\x33\x69\x40\x45\xe0\x2f\x6b\x20\xc7\x84\xad\x16\x5c\x4d\x07\xef\x10\xb9\x3d\xe4\xd9\x74\x5c\x35\x1b\xe6\xa0\x9b\x94\x08\x37\xe9\x73\x1e\xf0\xef\xde\xd8\x75\x10\x26\x16\xe5\xc4\x85\x23\x7c\xc8\x5c\xac\x7c\x77\xd1\x9d\xf8\x96\x3f\xba\x91\xb0\xbb\x0f\xd9\x6e\x63\xb3\x6e\xc6\x45\xee\xd1\x96\xa5\xf0\x52\x3b\xfc\x91\x2d\x0a\xfb\x20\xac\xa3\xf1\x2c\xe9\x31\x56\xa5\xe5\x92\x61\xcc\xbd\x90\x30\xe5\x6f\x95\x6c\x52\x70\xda\xe9\xd7\x92\x64\x33\x38\x06\x09\xfb\x5e\xd3\xe1\x7b\x2f\x60\xf1\x5d\xe3\xfe\x3b\x45\x51\x9c\x6f\x98\x2a\x84\xae\x43\xb4\x2e\xca\xc7\x3f\x54\xff\xbe\x0a\x87\x70\xeb\xa1\x44\x2a\x8f\xf9\x4a\xe9\x80\xd8\x2f\xd1\xcf\xdb\x97\xb1\xcf\xd5\x87\xf8\xb4\x08\x57\x1d\x3f\xc0\x52\x9e\x3e\xd4\x26\xd8\x6b\x85\x8f\xd9\x45\x2f\xef\xbb\xb2\x24\x00\xf5\x66\x89\x26\xaa\x14\xdf\x84\x7b\x70\x43\xa2\xb7\x80\x06\x8a\xb3\xdd\xba\xf4\xd3\xa0\x1f\x4d\xa9\xdd\x32\xdb\xe2\x5d\xff\x05\x0b\xe9\xe2\xcd\x76\x7d\xcb\x7c\x21\x88\x6c\x81\xe6\x2f\xa5\x87\xb1\xfb\x50\xa1\x56\xe6\x53\xaa\xed\x26\x74\xf4\xd8\x76\x58\x42\xcf\x6e\x79\x20\xcf\xbf\x9a\x0e\xcc\x59\xf7\xdb\x77\x02\xe4\xbc\x67\x85\xad\x79\x45\xfc\x4f\xfd\x17\xfc\x5f\xa3\x10\x65\x42\x40\xf9\xf1\xa5\xd1\xb1\x71\x45\xc5\xe9\x85\x7e\x3e\xf4\x40\x15\xda\xd2\x39\x55\xaf\x22\x81\xfe\xec\xe2\x2c\x97\xa1\x0f\x8c\xb9\xb1\x38\x15\x84\x9b\xf2\x6a\xf7\x30\xbb\x69\x68\xc3\x17\x6c\xb6\x69\x7a\x48\x37\xae\x49\x31\xb2\xa2\x54\x06\x64\xe4\x02\xef\xb0\x23\x0d\x51\x6f\xc1\x60\x43\x2e\x3d\xf6\x34\x46\xe5\x26\x3e\x1b\x02\x7a\x0c\x1d\xae\xbb\xa3\xf9\xcd\xbf\x43\x3f\x06\x77\x17\xdd\x80\x87\x89\x66\xe8\x59\xe8\xc1\x71\x63\x73\xfd\x66\x62\xf3\x9e\xcc\x66\xce\x13\x3d\xe5\x5b\xe1\x99\x92\xbc\xb6\xdc\x85\xa7\x11\xf5\x3f\x9c\x39\xbd\x9d\x46\x14\x5c\xfd\x0b\x0d\xd5\x05\x55\x2c\xa6\x06\x99\xcb\x2f\x8d\x2b\x7f\xa1\x06\x86\xcd\xfd\x5b\x76\x32\xb3\x20\x6e\xfe\x4f\x85\x09\xbd\x09\x33\xa1\xde\xd4\x87\x8b\xa5\x66\x44\xe3\x64\xf6\xe6\x7b\xb0\xd1\x52\x0a\xc0\x46\xb2\x8c\xcc\x9b\x96\x96\x5e\x72\x46\xf9\x1a\x37\xbf\x5b\x60\x22\xd1\xa8\xc1\xb2\x77\x6a\x00\xab\xd9\xa9\xf6\x30\x28\x65\xae\xd5\xf8\x1a\xbb\xab\x83\x61\x42\xa8\x9f\x37\xf1\x8e\x22\xcb\xf1\x96\x40\x33\x62\xe9\xd3\x46\x38\x32\xbc\x76\x7b\x23\x04\xa8\x6d\x11\x10\x30\xb4\xb2\x6a\x07\x25\x47\x95\xaa\x8e\x9b\x40\x8e\x8c\x53\xb0\xb4\xd5\xe1\x80\xc2\x94\xa9\x73\xa0\x6d\x49\x9f\xaa\x9a\x39\x1d\xf6\x72\x72\x56\xc3\xd4\x61\xcd\xc6\x62\xef\xab\x13\x2a\xa8\x4e\xd9\x13\x6a\x0c\xbf\x97\x69\x54\x2b\xa2\x53\xdb\x84\xf9\x91\x33\x0f\xdb\x12\x80\x78\x2f\x63\xa5\x8d\xf5\x18\x7d\xf7\x85\x50\x44\x13\x47\x7d\x89\x44\x54\x10\xf5\x3c\xea\xa8\x03\x9c\x79\xe1\x30\xf6\x50\x26\xde\xfa\x9f\x4b\x77\xda\xa6\xac\x41\xe0\x96\xbc\x5b\x9d\x93\x9c\x5b\xa5\xec\x41\xe5\x5c\x2d\x03\x30\x21\x85\x8d\x2c\x75\xe9\x2c\xa0\x04\xa6\x90\x29\xf8\xd7\xf9\x4b\x6d\x87\xd9\xce\xf6\x25\x54\x08\xdb\xdc\x20\x2d\x44\x6f\x2b\xe8\xdb\x5a\xf2\x80\x8d\xa7\xb4\xf2\x87\x02\x6b\x2f\x08\x0e\x36\x4d\x59\xcb\x47\xa2\xc4\xc8\x02\xc2\xab\xbf\xae\x2a\x5a\x35\x1a\x4a\x89\xf5\xd3\xdc\x85\xcb\xd3\x6e\x4d\x9d\x3b\x20\xad\x99\x3d\xab\x69\x60\x79\x7b\xd9\xbe\x0e\xfe\xa0\x01\x79\xcb\x0d\x87\x37\x56\x02\x5d\x6a\x72\xcf\x56\x86\xeb\xb6\x12\x76\x5f\x79\x17\x4d\xe1\x75\x4d\x24\x60\x88\x27\x76\x9f\xa6\x24\xe3\x62\x6e\x9f\xec\xba\x7b\x64\x04\x16\xfd\x38\x37\xe3\x20\xfb\xc9\xba\xda\x27\xff\xeb\x3b\xe5\xc2\x0a\x80\xbd\xd1\x95\x7a\x5c\x98\xa5\xe9\x7c\x64\x9a\xbf\x4d\xe3\x5a\xde\x20\x16\xf0\x45\x6c\xe1\x57\x75\xc3\xe0\x43\x98\xfd\x67\xba\x93\x79\x4b\x42\x4b\x0a\x57\xbd\x7f\xfa\x4c\xe3\x22\xe7\x86\x3e\xee\x6c\x2f\x0d\xee\xd4\xb1\x48\xdf\xb6\x34\x74\xfa\x3a\x7d\x87\x00\xd8\x48\x5c\xbe\xa4\x5a\xc8\xd4\x9c\xfd\x2b\xd4\xbc\x32\x9f\x1c\xe8\x47\x76\xa8\x1c\xb4\x9c\xd9\x04\xf3\xcf\x34\x03\xe4\xf1\xa9\x11\x5c\x77\x20\xa7\x40\x7d\x3d\x2e\x2c\x70\x3b\xc3\xe7\x20\x60\x54\x03\xfb\xfb\x58\xa4\x5f\x4d\x3f\x3e\x24\xcf\xde\x94\xa9\x98\xfe\x34\xe0\x00\xe0\x2d\x79\x0e\xdb\x69\xf4\xf9\x03\x1a\x02\x81\xaa\x48\x0b\x1a\xb5\x76\x2c\x5f\xba\xe7\xd8\xf9\xf1\xff\xa7\xa7\x25\x2e\x61\xee\x76\x95\x69\x0f\x9e\x6d\x78\xea\xfd\x43\x3f\xaf\xb3\x3a\xd3\xde\x6d\x32\x52\x76\x9f\xb0\x9a\xfc\x92\xe1\xc4\x10\xbe\xd9\xc4\x5d\xf0\x29\xa3\x52\xe5\x3b\xd2\xff\x89\x28\xc4\x7b\xa3\x0b\xef\xee\x3a\xb3\xe4\x30\x25\x52\x73\xb6\xc2\xbb\x7f\xbf\xbc\xbd\x68\x86\x5e\x3f\x6b\xa0\xa6\x36\xcf\x45\x9a\x80\x36\xf7\x54\xa0\xea\x9f\x6c\x83\x9c\x9d\x05\xb6\xca\x7b\xc1\xe9\x2e\x00\x86\xde\x42\x97\xff\x11\x26\xdf\x2a\x0a\x5d\x62\x91\x37\x69\x79\x2d\xd7\x97\x87\xa4\x24\x1c\x77\xfa\xeb\xf3\xea\xbe\xc8\x50\x1d\x4d\x2c\x94\x16\x41\x7c\xd6\xa4\x43\x77\x4f\x40\xb4\x57\x3a\x41\x34\xe6\xa4\x41\x09\xa6\xe2\x72\x28\x13\x75\xbd\xcd\x19\xdc\xca\x98\xa3\x10\x9e\xe1\x06\x07\x00\xe1\x8b\x6c\xf3\xff\x45\xde\xb1\x75\x53\xe5\x7a\x29\x89\x72\x7e\x81\x38\xa0\x77\x50\x1b\x38\xff\xe8\x54\xa6\x00\x67\xb5\xd9\xc4\x0a\x7c\xba\xd5\x8d\xae\x6d\xdd\x64\x39\x5b\xe3\x3b\x91\xc6\x97\x2f\x95\xc3\x8e\xf3\x7e\x67\x0e\xd6\x40\xcd\x48\x59\xca\xe6\xd6\x6d\x93\x09\x8a\x6f\xb2\x35\xb8\x9b\x29\x9c\x26\x9b\xcb\xd7\xba\x01\x89\x81\x7c\x89\xf1\x87\xe7\x77\xd3\xf3\x8c\x56\xa7\xa5\x6c\xd0\x57\x94\x10\x8d\x35\xe8\x9a\x8b\x30\x4b\xc4\x05\x13\x89\x4d\xdc\xc5\xcb\x51\x16\x19\xde\x22\x4f\x6a\x6b\x80\xcd\x49\x9c\xbc\x34\x95\x5a\x65\x4b\x68\x07\x7e\x29\x57\xf2\x27\xcd\xf7\xe5\xf0\xce\x5b\x58\x30\x42\x43\xce\x28\x04\x38\x4e\x44\x49\xe8\x0e\x89\x32\x87\x98\xae\xa4\x8f\xe1\xc2\x65\xe5\xa2\xeb\x5d\x71\x95\xaf\x71\xf9\x70\x02\x02\x94\x94\xb7\xd2\x2b\x5d\xa0\xd4\x73\xdd\xdb\x64\xed\x4a\x9e\xe4\x97\xae\xfb\x6a\xc5\x61\xba\x4f\x50\x19\xac\xfe\x2c\xbd\x72\x95\xa8\xa6\x99\x7d\x68\xa1\x01\x36\x17\x13\x92\xc3\x79\xc3\xb2\x73\xce\xeb\x4e\xca\x29\x6b\x49\x28\x56\x0a\x34\x3a\xc7\x04\xa4\x9f\x9d\x28\x08\x33\xea\xd7\x09\x86\xcf\x91\xca\x49\x8f\x10\x60\x98\xc5\x5b\x07\x35\x11\x90\x55\x50\x0b\xf4\x4a\x9a\xb8\x7c\xdf\xb9\x31\xad\xa8\xe2\x63\x97\x1d\xa3\x68\x1d\x5c\x94\x3d\x68\xfe\x22\x47\x6f\xf1\xf3\x0c\x1f\x06\xbd\x40\x0e\xd7\x22\x19\xed\x6b\xf0\x02\x10\xe3\xc3\xc7\x16\xd2\x94\xf2\x35\xf5\x36\xd9\x97\xb4\x80\x7e\xc7\x56\x6c\xc0\xa2\x2a\x7b\x19\x05\x61\x26\x2d\xd3\x27\xe9\x9e\xcd\xfe\x9f\x2d\xfa\x40\xa9\x32\xf1\x4d\x77\x39\x88\xed\x52\x08\x53\x8f\x85\x1b\x66\xb1\xb0\x00\xf7\xad\x3e\x97\x96\xe6\xbb\x82\x87\xd4\xf3\x9e\xf8\x90\x1b\x89\x20\x4b\x60\x4b\x99\x0b\x29\x63\x09\x65\x1f\x8a\x6b\x8e\xf5\xb8\xf9\x76\xdf\x85\x7a\x59\xd2\x02\x18\x43\xab\x75\xd2\x88\xe3\x66\x68\xc8\xdf\xe0\x81\x7c\xda\x20\x63\xb6\xb3\x4c\xed\xde\x9d\x79\x9a\x86\x5f\xa1\xe5\xcc\x7e\xff\x7f\x27\xf1\x47\x14\xa9\x21\x47\xf3\x5b\x73\x0b\x53\x9b\x51\xad\x61\x78\xe0\xf1\x49\x44\x00\x71\xe1\x46\x50\x09\x64\x61\x1d\x40\x92\x26\x78\x1e\x56\x6d\x9e\x42\xa9\x67\x75\xfb\x66\x1d\x34\xc1\x89\x61\xd6\x8a\x81\x87\x61\xa7\xd4\xaa\x51\x02\x03\x19\xe1\xa3\xc2\x43\xc3\x77\xa1\x2c\x4b\x79\xad\xd6\x0f\xf4\x45\x99\x52\x72\x56\x18\xb0\xd2\x3f\xa8\x37\xc3\x0e\xb8\x6e\x5b\xd3\x48\x72\x1a\x9c\xe9\x98\x93\xb5\xf4\xef\x59\x79\xc3\xd2\x97\x7f\x25\x14\x36\xe6\x93\xca\x49\x66\x8b\x5c\xec\xc5\x72\x28\x9c\xd5\x34\x3c\x24\x9a\x94\xff\x54\x3f\x08\x65\x3f\x4b\x55\x3f\x6b\xa1\xca\x32\x75\x6e\xf9\x7c\xee\x93\x45\x5d\xe6\x04\x36\xff\x96\xac\xa8\xbf\xc8\x9e\x18\xb5\x91\xa6\x76\x13\xf6\xb4\x66\xc8\xd3\xc9\x63\x15\xe3\x9c\xcd\xda\x68\x2f\x6f\xfc\x29\x9f\x55\x89\xbf\x36\x63\xc0\xca\x95\x64\xcb\x34\x06\x23\x6f\x2a\xdf\xca\x63\x91\xf7\x53\xc9\xc4\xd5\x2e\x02\xf4\x0b\xb7\x7d\xef\x80\xf6\x73\xbe\xec\xe3\xd7\x99\x0c\x92\x2b\xc3\xf3\x38\xc7\x6a\x43\xca\x24\x37\x06\x3f\x4a\xa6\xa9\xf7\x3a\x7d\x63\x42\xb6\x80\x3c\x63\xd8\x73\x69\xb3\xca\xde\xb8\x9a\xf6\x7d\xf6\x60\x12\x56\x9d\xea\x1b\xa5\xff\xd7\x9a\xff\xbc\x3e\xb4\xf9\x61\xdb\x87\xa8\xb9\xd1\x7d\x19\x0f\x74\x98\x7b\x40\x34\x2d\xe0\x1e\x10\xb0\x77\x9b\xba\x94\x40\x74\x91\xaa\x5d\x90\xc7\x9f\xc5\xaf\x75\x3f\x8d\x30\xba\x67\xf7\x99\xa8\xf4\x1f\x2d\x97\x71\x16\x2f\x94\x9a\x3e\x41\x3a\xf9\x9b\xc2\xe5\xf4\xf8\xc5\x48\x5c\xd5\xee\x7d\xfc\xa2\x8e\x56\xa1\xf9\x3b\xfd\xaf\xd5\x7a\xf9\xf4\x76\x59\xef\xff\xe6\xe5\xa2\xa4\x1c\xea\x19\xc7\x84\x07\x12\x40\x91\xed\x93\x30\xbc\xc7\xf0\x1c\x04\x58\x79\x14\x40\x0c\xc8\xf8\x0f\xa1\x32\xb2\x73\x70\x14\x99\x55\x8b\x54\x83\x07\x0b\x61\x1a\x57\xea\x1f\x40\xa4\x37\xa9\x39\x03\xea\x89\xa9\xa2\x17\x3c\xa3\x24\x41\x53\xbe\xb2\xa1\x4a\x5f\xe9\xa9\xc6\x8f\x55\xfa\xfd\xad\xd4\xd0\x77\xe8\x96\x2e\x7f\xf8\x9a\x69\x5e\x7a\x83\x92\x3a\xc8\x9b\x84\xae\xd0\x1e\xb2\x42\x67\xab\x7f\xb3\xc5\x98\x65\xb2\x3a\xc3\x65\x47\xf0\x89\x7b\xa9\xd1\x71\x7b\xf9\x0e\xfa\x89\xec\x90\xd5\xfc\x66\x3e\x97\x4b\x00\xb5\x44\xef\xf8\x41\x75\x29\xc8\xae\x98\x68\x8b\x1f\x0b\x59\xc0\x80\x61\x2a\x1c\x38\x96\xd1\xa8\xb1\xdd\x21\x30\xca\x2d\x2e\xcd\x40\x72\xb4\xb4\x4f\x99\x42\xac\x1b\x7a\x3c\x76\xcd\xda\x9a\xb2\x05\x9c\xb4\xa5\x20\x0e\x34\xc1\x3f\x4d\x46\x9c\xf6\x68\xc9\x72\x99\x95\x44\xd1\x16\xcd\x9a\x45\xc8\x35\xbe\x9c\x0d\x76\xb7\x0b\x4a\xc6\x15\x9d\x9c\x26\x48\xbe\x30\x6b\xff\x44\x62\xd2\x54\x53\xe1\x86\xff\xb2\xb8\x90\x58\xab\x23\xf3\x1a\x89\x6e\x85\xa0\x3d\x5d\x6d\x6e\x6c\x03\x85\x08\x7a\x82\x10\x42\xf8\x55\xa2\xe3\x43\x85\xa8\xb4\x94\x23\x57\x7d\x97\xe5\xc1\xf1\x1b\x1a\x3b\x38\xa7\xb1\x7c\x5c\xd2\xc9\xa9\xa3\xfe\x3c\x56\xf8\xb2\xaa\x23\xdc\x6f\xf2\x67\xfe\xc9\xa7\x6e\x2f\xed\xdc\x8f\xcc\x5a\xdf\x5b\xf4\x76\x61\x2f\xe9\x86\x0c\x53\xff\x8a\x5e\xb5\xae\xdd\xe0\xb6\xcf\x5b\x13\xc1\x92\x66\xed\xe9\xad\xc5\xaa\x9b\x41\x0d\xd6\x37\xde\x1a\x53\xab\x72\xd9\x55\x48\xca\xb9\x4b\x74\xc2\x51\x8a\x57\xcb\xce\x05\x48\xe5\x6a\xc9\xc8\x4f\x49\x4b\xd9\x8a\x9d\x4b\xc3\x0f\x4c\x3d\x9b\xc7\x64\xd9\x6c\xb0\x06\x4b\xdc\x8d\xc2\x55\x93\x28\xd9\xdc\x64\xc0\xfe\xf8\xe0\xb8\x1e\xd9\x70\xd5\xfe\x82\xae\x42\xfe\xcf\x12\x4b\xd0\xca\xdf\x25\x06\x09\xb3\x06\x66\x50\x1f\x10\xd7\x85\xf3\x78\x57\xb2\x54\x59\x2a\x35\x3a\xb7\xeb\x40\x0e\x43\x31\x27\x15\x67\x73\x4c\xbd\x72\x95\xe3\x8c\xf6\x4d\x75\xfe\x87\x3a\x93\x52\x27\xb9\x7b\x7b\x6d\xd2\x2b\xf9\x70\xf5\x99\xe5\xad\x28\x7b\x59\x37\xb5\xe9\xe1\xce\xfb\x31\x52\xd6\xfc\x8d\x7d\xa9\xd6\x3a\xf3\x65\x17\xfe\x3c\xdd\x11\x92\x97\xcb\x18\xa9\xd3\xfb\xf2\xea\xf5\x44\x53\xe2\x15\x58\xb4\x84\xaa\xc8\x09\xa8\x64\xe0\x95\x35\xb4\x9d\xb3\x20\x79\xdb\x05\xc0\x83\xf3\xc3\x71\x50\x5d\x6f\xcc\x6e\xf0\xe0\xc2\xb6\xb8\xeb\x6a\x74\xb4\xed\xbf\xb5\x74\x56\x75\x81\x4e\x74\xb7\x5c\xc9\x42\x54\x22\x85\xb2\xba\x52\x8f\x2b\xc1\x12\xb3\x7d\xff\xa5\x4f\xde\x77\xe3\x4d\x28\xdb\x47\xbd\xe6\x37\xab\x36\x20\x8d\x8a\x47\xba\x9a\x40\x6e\xa8\x2d\xfa\xc3\x9d\xf8\xad\x6a\xfe\x96\x5c\xcb\xce\x49\x04\xc6\x58\x28\x6b\xfd\x00\xb0\x22\x9f\xb3\x73\x9a\x43\xfd\x72\x25\x3e\x4a\x58\xdd\x2a\x68\xbe\x49\xa4\x84\xe8\xdc\xfd\x6a\x42\xcd\xdc\x14\xe2\xeb\x54\x95\xa1\x95\x7f\xdc\xb9\xf5\xce\xe0\xc8\xdf\x57\xcb\x09\xb4\x78\x4d\xb6\xd7\x48\x70\x56\x78\xf5\x95\xc2\x8c\xa9\x9d\x93\x31\x83\x6c\x6c\x60\x14\x90\x56\x40\x6c\x02\x6c\x13\x21\x98\x6a\x82\x71\x3a\x33\x82\xf7\x81\xb5\xd9\xd8\xab\xa9\xf9\x2f\x72\xb7\x4d\xa8\x0a\xd0\x78\x76\xa5\x6b\x18\xff\x1b\xa5\xfb\x41\x92\x43\x4a\x0f\xc7\x6d\x18\x25\xbf\xe9\xed\xb1\xe8\xee\x6c\x4d\x8d\x1c\xc4\xb0\x85\x1d\xfc\xe8\x63\xe9\x8e\xf2\xaf\x73\x17\x53\x4a\x77\x1e\xf2\xd2\x15\x24\xc7\x98\x5e\x78\x6c\x3a\xf7\x72\x52\x77\x23\xf8\x66\x1b\x6d\x77\x14\xa2\x5f\x90\x2d\x2c\x79\xaf\xf3\x37\xe7\x6c\x7e\x95\xc4\xfe\x9a\x99\xc4\x69\x2b\x25\x6b\x95\xaa\x7a\xa8\xcc\xa0\x0e\x8b\xb5\x10\x99\x86\x95\xbc\x46\x30\x60\x11\x31\x27\xc3\x87\x45\x01\xe6\x62\x0a\xdd\x8e\x66\x24\xbc\x79\x4a\x6f\x35\xa5\x9e\xa7\x9c\xd6\x9e\x4b\xa0\x2d\x9e\x0c\x15\x3a\x81\x9f\x90\xf8\x71\x31\xfe\x73\x1a\x6f\xf6\x73\x76\x8f\x38\xc0\x21\xed\x71\xda\xae\x4e\x7a\x4d\x3f\x8a\xc5\x3f\xa0\x13\xfb\x71\xa6\x7a\x11\x7f\x30\xe3\x9b\xbe\x8c\x9a\x39\x24\xdf\x2b\x21\x58\x9a\x20\x95\x10\xcc\x56\xe9\xd5\x24\xe3\x36\x32\x54\x58\xa1\x1b\x5c\xb3\x25\xbe\xa2\x0b\x00\x54\x95\x19\xb7\x2c\x84\xc2\xf8\x8f\x0b\x3b\xa0\x41\x2c\xcf\x95\x7a\x3a\x0d\x44\xd7\x5a\x5d\x31\x78\x44\x01\x4b\x6c\xfe\x11\x3f\x9a\x20\xbd\x0f\x39\x64\x39\x4b\x51\x13\x7e\xd0\xfd\xb8\x55\x27\xa9\x0d\x5a\xc8\x6a\xae\x7d\x42\x77\x6f\xa0\x48\x1b\x64\x7a\xd9\x4a\x3b\xf9\xe2\x02\x15\x61\xb6\xa8\xac\x12\x4a\x00\xc2\x1d\x5d\x6e\xe1\x68\x9f\xd5\xbe\x4c\x89\x8d\x04\x14\xa3\x1b\x08\x5f\x2a\xb7\xcb\xa6\x7b\xa9\x14\x02\x62\x7c\x3e\xc6\xf8\x38\x98\xd4\x70\x41\x75\x6b\x26\xa3\xc0\xea\x3c\x2b\x04\xf9\x11\xcf\xc4\x18\xbd\xef\xab\x37\x15\x04\x7a\x77\xad\x36\x33\x9b\xaa\x5a\xcf\x7d\xac\x8e\xa8\xa3\x2f\x19\x0c\x0e\xa3\x5f\xa4\xb5\xf2\x59\x2b\x76\xf1\x29\x6c\x9e\xf8\xe1\xd1\x6a\xaa\x77\x77\x11\xfb\xbf\xeb\x65\x63\x07\x68\x53\xaa\xe8\x2e\xc9\x6f\x63\x71\xb2\xb7\x53\xbe\x5c\x9f\x52\x06\x2f\xf3\xd5\x89\x28\x8c\x4a\xb9\x1f\xef\x77\x4e\xbf\xc0\x6a\xe4\x4f\xd3\x66\xc5\x93\xd3\x64\xe9\xd3\xee\x07\x55\xcb\x52\x42\x25\x40\xe2\x6f\x75\xf4\xe8\x7f\x87\xf8\x66\xcf\x1a\x35\x8e\xd4\x73\x23\xd4\x95\x8c\xd7\x60\xe7\x3b\xa9\x97\xee\x26\x76\x63\x29\x8d\xf2\x1e\x86\x31\x07\x90\x94\x76\x26\xdc\xd9\x09\xe4\xd0\x53\xf9\x6f\xe4\xbd\x25\x05\xe6\x54\x12\x0a\x5d\x4c\xfc\x3e\x53\x54\xe4\x6d\xed\xc7\x95\x70\x27\x24\x1e\x84\xa5\x57\xe5\x66\x2d\x04\x93\x3f\x5a\xb4\x4f\x49\xb1\x50\xe3\x3f\x32\xd7\x60\x93\x2a\xdd\xdd\xba\x30\xe8\x5a\x3f\xb5\xcb\xb7\x9f\x97\xd5\xeb\x17\xd9\x3f\xc6\x98\x9e\x48\x56\xe4\x74\xcc\x2b\x25\x76\x6b\xa7\x00\xa5\xc2\x7d\x12\x6c\xa3\xc4\xff\xb2\xfd\xb6\x18\x33\xb6\x84\xdc\xc6\x6a\xd4\x9b\x1c\xa0\x76\x92\x6c\x21\xb0\x9a\x07\xda\xa2\x09\xdd\x3b\x78\xb5\xb5\x47\xd9\x45\xf7\x70\xea\x4c\xd9\x4a\x47\x15\x2e\x6f\xd3\x2a\xcc\x70\x10\xb8\xa6\x0a\x8d\x82\x3c\xb3\x0a\x37\xd0\xb5\x4c\xc7\xb2\x3a\xf3\x61\x32\x79\x5f\x65\x58\xf2\x2e\x43\x25\xd8\xde\x0c\x5a\xcc\x4d\x74\x09\x01\x7f\x31\x18\xa0\xe4\xd7\x6f\x75\x71\x9a\x28\xbf\xcb\xe1\xba\xde\x9c\x6d\x07\x6e\x7b\x4a\xbf\x91\x6b\x97\xb3\xa7\x4b\xee\x6c\x8e\xff\x34\x44\x60\xa2\xa3\x34\x7e\x4c\xbb\xfb\x95\x3b\x84\xc9\x6f\xec\x4c\x96\xff\xe7\xdf\xd8\x04\x30\xb1\xe4\x02\x5b\xfa\xb9\x99\x18\x96\xed\x64\x5a\xee\x24\x27\x4a\x82\x89\x69\x16\x0a\x07\xa3\x75\x1b\x2b\x09\x8b\x09\x7a\x1b\x04\xf7\x25\xfc\x49\x94\x7d\x4f\x6d\x26\x8e\x8b\x3f\x1b\x23\x0d\x06\xa4\xaf\x23\x38\x21\x51\x02\x0b\x9a\x31\x47\x68\x82\xa6\x2e\xd9\xe1\x2c\x04\x36\x79\x47\xbc\x58\xff\xf1\x25\x69\x17\x99\x0c\x7a\x05\xcc\xbc\x47\x18\x33\xec\xbd\x06\xa8\xc7\x30\xd8\xcc\x47\xa3\xce\x60\x91\x95\x7b\x46\x9d\x41\xc3\x90\xcf\xed\x61\xce\x6d\xbe\x59\x32\x1a\xf8\x1d\x00\xc9\x57\xf9\x8c\x4f\x1d\x10\xaf\x91\xdd\x5e\xbe\x43\x0c\x4a\x4b\x36\xb8\xfa\x39\xc9\xf6\xe9\xca\xce\xd7\xe9\x74\xf3\x4a\x86\x0a\x5e\x02\x05\x61\x23\x5f\xf5\x3f\x4b\x41\x16\xe2\xbb\x5f\xc9\x70\xc9\xde\xb3\xfb\x1c\xc7\x76\x39\xf9\x00\x54\x33\x84\x34\xcd\x2c\xd7\xea\xc9\xd4\x35\x8e\xcf\xba\xba\x8e\x5f\x4e\x21\x5d\x84\x2c\xc2\xbf\x5b\xb6\x97\xb1\xe5\xa3\xc9\xf9\xc3\x67\xcb\xea\xaf\xf6\x61\xf6\xa2\xc1\x6b\x0a\x3f\x92\xa3\xf7\x25\x74\xa4\x77\x67\x1d\x1c\x95\x05\x76\x4a\xf8\x50\x1e\x71\xbc\xaa\x01\x03\xd0\x62\x74\x4d\xcd\x3a\xc3\xce\x25\x90\x80\x4c\xff\xf0\x2d\x10\x48\xdc\xaf\xe0\xfd\x55\x73\xa1\xf7\xa8\xe7\xf5\x6b\x97\xd4\x19\x0d\x3f\x67\xb0\xf8\x08\x41\xcb\x5f\x1a\xdc\x98\x18\xe9\xcd\x3c\x8d\x20\x1f\xe7\xaa\xb8\xc9\x91\x99\xde\xd4\x50\x29\x38\xd4\xe4\xd6\x63\x39\x40\x6b\x38\x90\xf5\xdb\x0f\x41\xd3\xe7\xca\x79\x64\x8b\xd4\x7d\xd3\x61\xfc\xcd\x6b\xf9\x75\xc5\x89\xdd\xc3\xc4\x9d\x4c\x0f\xad\x8e\x71\x17\x18\x82\xe9\xf6\x27\xfd\xa1\xe5\x5c\x39\xac\x79\xbf\xc4\x0c\x61\xd5\x80\x3f\xb1\xb7\x71\xea\x9a\xb0\x2c\xdb\xe4\xcb\xbf\xd1\xb8\xb9\x0e\x96\xf3\x53\xed\x53\x22\x16\x54\x53\xac\xc1\xf9\x5f\xf2\xeb\x92\xbe\x9b\xae\x81\x32\x4b\x86\xa0\xe6\x3d\xfc\x5b\x1e\x3a\x1b\x2c\x9f\x5f\x55\x05\x16\xdc\xd8\x78\x94\x7a\xd8\xfb\x85\xa9\xef\x7d\xae\xd3\x52\xcf\x4c\xbe\x92\x8f\xb1\xf2\x50\x35\x71\x93\x43\x12\x7f\x01\xe5\x03\xab\xf8\x15\xc4\x09\xa4\xb8\x5c\x07\xe3\x3f\x34\xd1\xa4\xf2\xd3\x5e\x62\xe3\x3f\x21\x4e\x0b\x25\x80\x92\x90\x48\x91\x5c\xfa\x08\x66\xf1\x41\xa6\x9b\x55\x05\x26\x10\x37\x31\xf8\xa2\x6c\xd8\x8f\xd6\x1d\xc5\x15\x7b\x48\xd8\x3e\x9b\x5a\xaf\x06\x3f\x50\xc3\xf4\xa3\x85\xc4\xf1\x6d\x45\x11\x0d\x86\x15\xa5\x53\x4c\xaf\x2b\x04\xf0\x72\x5d\x8a\xfb\xa0\x40\x08\x91\x0e\xca\x9a\x94\x7f\x08\xe8\x63\x61\x91\x1d\xa6\x2c\xe9\x94\x87\x99\xff\x75\x81\xb9\x7c\x28\xed\x0a\x64\x7f\xf4\x7c\xc1\xae\x93\x14\x80\xcc\x52\xb5\xea\x05\xe2\xe7\xe4\x47\x28\xc0\x51\x5a\xe7\x49\x71\x08\xdd\x7c\xde\xb0\x90\x84\xe2\xad\x86\xe1\xd5\x18\x77\x3e\x9d\x02\xcc\x2c\xd7\xd0\x26\x3e\x43\xdc\x76\xf3\x41\x07\xab\x6f\xd1\x60\x4b\x0c\xfe\xe5\x8d\xa0\xd5\x7a\xde\x38\x8f\xa6\x15\x76\x6f\x49\x8c\x04\xfa\xdf\xc4\x81\xb6\x9d\x92\x09\x84\x7f\xc3\x38\xaa\x04\x8f\xa7\xd4\x07\x48\x52\x98\xa9\xe4\x3d\x53\xba\xeb\x46\x24\xe1\x70\x53\x99\xc1\x22\xe2\x6c\x47\x63\x09\x77\x4b\x90\x70\x19\x5a\x44\x06\x16\xbe\x97\x39\xbc\x38\xa2\x5d\x5f\xf2\xba\x2c\x7f\xf6\xe7\x96\xfb\x41\xdf\xc2\x73\xe3\x10\xd6\x35\x86\x3b\x01\x3f\x31\x6a\xfd\xf1\xeb\xfd\x8c\xcb\x5f\xbc\xfe\x0f\x86\x40\x38\x64\x7f\x04\xab\x29\x75\x86\xd3\x96\x9f\x74\x11\x27\x7f\x6a\x91\xcc\x66\x73\x1c\xb3\xd4\x1e\xb2\x3e\xc4\xb8\x30\xcb\xbf\x5d\x75\x45\x14\x19\x14\x34\x30\xd3\xc0\x66\x5d\xb2\x9b\xda\xbf\xcb\x8a\x57\x05\x51\xa3\xb4\x50\x9b\x05\xab\x64\x70\xf2\xa8\x93\xba\x54\x0b\xb3\x9b\xb8\xe5\x0a\xf6\xe0\x0d\x08\x2e\x78\xe8\x89\xaa\x08\x11\xf0\x0b\x3d\x58\x0e\x5c\x33\x12\x21\x3c\xeb\xa2\x22\x9d\xe6\x2b\xc6\x76\x1e\xd0\x44\x55\x20\x2b\xcb\xe8\x6a\x27\x02\xe1\x20\x67\xd0\xbf\xc2\xe2\x0a\x02\x33\x9c\xe2\x3f\xf8\xe5\x3d\x04\x3f\x94\x4b\x4b\x49\x50\x45\x6a\x74\xc7\x3a\xda\x49\x28\x07\xc6\x87\x07\x4a\xf7\x6c\x1b\xa7\xfb\x24\x27\xd8\x92\x0e\x6e\xd1\xb0\x46\x8e\x06\x85\x2c\xdc\xe9\xa4\xc2\x8b\x0f\xe3\xca\xe5\x65\x31\x17\xf7\xca\x1f\x9e\x2a\x82\xa6\xa7\x12\x54\x90\x15\x7e\xf2\x27\x80\x81\x60\x9a\x70\xea\x8e\xf1\xb7\xdb\xbe\xa7\x53\xe2\x4e\x82\xa5\xdd\xd9\xb8\x10\x84\x46\x1a\x1f\x61\x50\xe0\x73\x73\xd5\xb3\x1f\x77\xdb\xe6\x44\x4e\x4f\xfc\x44\x20\x89\xcb\x7d\x0a\x4a\x68\x59\x39\xf7\x1a\xde\x07\x2d\xa2\x95\x50\x53\xb5\x7b\xe9\x40\xe8\x8f\x01\xbe\x1f\xa1\xbe\x21\xf3\x63\xa8\x23\x44\x72\xbd\xc1\x5a\x53\x49\xf9\x18\xfa\x49\xa7\x08\x3a\xea\x1f\x99\x7c\xb8\xb9\xdd\x14\x34\x08\xc3\xa9\x91\xa6\x3b\x5d\xe7\x7f\x9d\xd4\x87\x2e\x7e\xa4\xea\x16\x58\x08\x30\x62\x0a\xce\x0f\x58\xdc\xa7\x47\xfe\x38\x0f\xc2\x1c\x17\x19\xfb\x5e\x3f\x02\x41\x0c\x5d\x30\x94\x0f\xea\xae\x0c\x3e\xa9\x5b\x3d\x86\xa6\x30\x75\xdb\x97\x00\x1d\x96\xfa\xa3\x65\xb7\x99\x00\x44\xe5\xc0\x25\x94\xb3\xad\x58\xd2\xf0\x3f\x4a\xcd\xe7\x35\x68\x93\x53\xab\xfc\xb4\x39\x75\x01\x41\x4b\xd1\x87\xa7\x52\x73\x25\xe4\xd9\x6d\x7f\xb0\x27\xd4\x45\x21\x65\xd2\x8f\xa5\x51\xd9\xf9\x5f\x67\xe9\xa7\xc6\x97\x50\x4f\x68\xc8\x90\x55\xb6\x7d\x22\x3d\x74\xc2\xcd\x06\xf8\x61\x5f\x82\xa6\xbc\x28\x33\xc8\x86\x17\x50\x83\xf1\x94\x12\xed\x0a\xc9\x81\x3d\x98\x3a\xdc\xd4\xe7\x65\xe2\x1b\xd2\x50\x7c\xf8\xe0\x3c\xf5\x6b\x19\x78\x48\x18\xf8\xf0\xfd\x95\x5c\x5d\x22\x8a\x1b\xf1\xfc\x14\x84\x5a\x2c\xde\x16\x32\x07\x4a\xbb\xbb\xb0\x16\x9b\xc9\x4f\xc1\x6a\xaa\x51\xa0\xfc\xb1\xae\xee\xd3\x20\xc1\x29\x9e\x48\xbf\x8c\x57\x6f\x22\xe8\xb0\x18\x15\x90\xdf\x63\x67\xd0\x21\x16\xf6\x15\x2b\xd9\xeb\x7b\x0c\x6c\x01\xf1\x7f\x6f\xd6\x38\x81\x3c\x14\x6a\xa5\xdc\xfa\x3e\x3b\xff\xaf\x91\xb6\x6e\x9b\xf2\x64\xbc\xe9\x17\xe7\x13\x49\x89\x30\x30\x89\x01\x44\xcc\xfc\x6f\x97\xe3\x7e\x03\x99\x40\xdf\x69\x25\x29\x05\x1b\xd9\xcd\x40\x77\x5e\xb5\x8a\x83\xe5\x13\xe5\x8c\x2a\x04\xa3\xd8\x45\x88\x55\x5f\xb9\x12\x4a\x72\x4b\xf1\xa7\x14\x56\x1e\x18\x3b\x26\x7b\xf6\x01\x78\xf7\x78\x99\x6a\x06\x03\xc6\xbf\xc0\xb8\x7a\x26\xe4\xa4\x66\x96\x29\x4e\x61\xe9\x0b\x63\x6a\x05\xe7\xff\x4e\x5e\x2b\x46\x84\x53\xdd\x80\xae\x96\x3b\x77\x4b\xee\xc3\x32\x33\x94\xa6\x4a\xe9\x64\x53\xb2\x2c\xb9\xce\x1d\xbd\x46\xfe\xc2\x4c\x71\x32\xbd\x12\xf7\x4b\xf4\xa9\xf3\xdb\x22\x8d\x65\x79\x46\x66\xdb\x83\x7c\xd6\x39\x91\x9d\x6a\x96\x2b\xeb\xd5\xbd\x27\x78\x3d\x54\x98\xa5\x9b\x68\x8d\x08\x81\x20\x87\x10\x43\xd5\x52\xb2\x64\x25\xb7\xe5\x3b\xb1\xca\x56\x02\x64\xa0\x7c\x1a\x34\x4a\xfb\x33\x9b\x2a\xb1\x39\xd7\xa2\x7e\xda\xf8\x2f\x4d\x43\xeb\xb9\xda\x60\x97\x34\x14\xc3\x2c\x2d\xc1\x50\xdf\xa2\x4d\xdd\x87\xe1\xd8\x3d\x70\x85\xa0\x15\x05\x87\x98\xda\x0c\x4e\xe0\x7a\xc8\x32\x2f\x4e\xd6\xcc\x58\xda\x7c\xaf\xcb\x97\x5c\xe3\xba\x19\xd7\x88\x3b\x4d\x1d\x78\x62\x50\x1f\x3d\x5f\x93\x85\x73\xc8\xe8\xc9\xa9\xb4\x64\x6b\xe3\x23\xab\xbd\x1b\xda\x8e\x79\x7b\xf7\x0f\x75\xef\x96\x6a\x2d\xb4\x8c\x7e\x53\x50\x5c\x2a\x7f\xe1\x31\x24\x71\x2f\xe0\x49\x77\x64\xf3\x2a\x69\x25\xee\x29\xfd\x18\xbd\xe9\x39\xf3\x93\x9d\x85\x8a\x98\xc7\xe9\x29\x2e\xe5\x2e\xaf\x05\xfa\xd3\xdc\x02\x75\xb8\xdb\xaf\x19\x3a\xd9\x65\x3f\x29\xc3\x2a\x57\x09\x12\x6e\xad\xc1\x05\xeb\xd5\x12\x1e\x63\x8a\x6f\xa3\x54\x4e\xeb\x53\xe7\xc1\xfd\x78\x4d\xe7\xf3\x53\x02\xcb\x19\x87\x0c\x1f\x2a\x67\x96\x25\x1d\xee\x3d\x40\x23\x7c\xb0\xb7\xd3\x59\x5a\x3c\x98\x7f\x12\x02\xff\x44\x55\x7f\x43\x3e\x65\xee\x10\x08\x18\xe6\xfe\x8d\x0c\xd5\x21\xc9\x8e\x7b\x31\x24\x81\xb2\x32\x9b\x91\xfc\x0d\x86\x08\x65\x97\x30\x33\xdb\x3a\xdb\xc5\xd3\x63\x93\x59\x49\x16\x77\x7a\xc8\x73\x23\x62\x65\x7e\x54\xc4\xa1\x92\x15\x96\x38\x82\x1c\x6a\x26\x5f\xce\x0d\xdd\x45\xe4\x8c\x3c\x66\x94\x08\xc7\x90\xee\x96\x76\x8a\x13\xfd\x61\x6c\xe0\xe3\x84\xf0\x44\x0a\x27\xe9\x1d\xa6\xc3\x4a\x10\xe8\x2b\x7d\x21\x0c\xb1\x28\x7f\x57\x24\x86\xf0\x1c\xa8\x7c\xe8\x92\xd9\xec\x9e\xaa\xdc\xcb\xe7\x4b\x24\x24\x66\xf0\xe0\x33\x18\x5c\x71\xaf\x4d\xcb\x92\x14\xbc\x55\x40\xdc\xe1\x6d\xe4\xae\x66\x1f\xdf\xcf\xbd\x8f\x79\xc8\xbd\x37\x2e\xef\x5e\x4a\x39\x4b\x38\xf0\x3c\xb5\x0a\x94\xf2\x9d\x73\x25\x03\x95\x2a\x50\xc0\x67\x6d\x61\xdf\x43\x24\x85\x90\x87\xec\x61\xa2\xa3\x25\xfe\xc4\x04\x0e\xb1\xcc\x7f\xa6\x98\x86\x9f\x34\xee\x87\xf8\x0b\x5b\xd8\x65\xf5\x54\x8a\xd9\x59\x6f\x1f\xbc\x57\x79\x59\xa3\x02\x64\xba\x92\xcb\x45\xab\x9f\x59\x03\xbd\x24\x3f\xd1\x33\x00\xe1\x3f\x0c\xac\xc7\x92\x10\x0d\x72\x3c\x50\x2f\x93\x2f\x03\xb3\x21\x3f\x17\xa2\x73\x7a\xec\x2e\x55\x36\xa5\x43\x7f\x4a\x96\xaf\xcc\x03\x47\xba\xd3\x0c\x34\x49\xea\x54\x7e\x5c\xce\x2f\x17\xec\xaa\x15\x62\x79\xa8\xe8\xda\x10\x9f\xb8\xbd\x02\xaa\x08\xbd\xca\xe8\xfe\x0b\xa0\x58\x5b\x92\x53\xff\xcf\xec\xfe\xff\xe5\x39\xf1\xef\x4e\x8e\x3b\xed\x3f\xb2\x51\xca\x6f\xc0\x76\x33\xd7\xd4\xb3\x38\x74\x26\x83\x7f\x59\xd6\x73\x9e\x43\x10\x22\xe0\x00\xec\x30\x9e\xfb\xed\x6a\x36\x8a\x4d\xb1\xb1\xb5\x75\x53\x57\x31\xe0\x87\xf9\xd3\x96\xd2\xe7\xf6\x3f\xe9\x44\xeb\x31\x34\x04\xa3\x37\xf8\x04\xf0\xe7\xa6\x1c\x24\xf8\x9f\x53\x9d\xb1\x41\x6f\xb2\x4e\x2e\x41\xbf\x38\xa7\xca\x01\x2d\x47\x19\x6f\x9b\xf1\xa8\x79\x77\x9d\xc1\xdf\xd8\xe1\xe3\x11\x9a\x66\xe9\x3a\x9a\x21\x1a\xfd\xbe\xd1\x78\x9a\x80\x19\x81\x58\xd1\x43\xcf\xd0\x94\x8b\x2a\x0e\x5d\xd5\x21\x2e\x39\xe7\x84\x03\xc4\xa4\xc8\xbf\x25\xce\x03\x77\x4a\xaa\x28\x53\x8a\xbc\x05\xdb\x80\xb4\x08\x86\x23\xd9\x71\x7b\x1c\xdf\x63\x4b\xd9\x4f\x8d\xb8\x17\xc2\xb4\x5f\xe8\x95\x6b\x67\x9f\x8a\xae\x22\x83\xf2\x81\xe5\xdd\x4f\xad\x8a\x16\x8c\xdf\x29\x4a\x61\x5f\xd4\x05\x96\x6c\xee\xd8\xad\xd2\x76\xe3\xd4\x63\x0e\xb9\x5c\x8c\xdc\x23\xe7\xf8\x94\x7f\x81\xdd\x9d\xe3\xec\xf7\x2e\xcc\x8b\x50\x64\x88\xcb\x40\xda\x68\xf5\x89\xfe\x92\x66\x3c\x0c\xf8\x38\x7a\xc1\x1e\x01\x01\x32\x91\xa2\x0e\x6f\xdd\x4b\x8b\x61\x7a\xb9\xf2\x71\x08\xba\xa3\x76\x97\xf2\xaa\x59\x0e\xc7\xa9\x14\xa0\x64\x27\x34\x2d\x0d\xe7\x09\x2c\x74\x13\x90\xa5\xb2\x2f\xee\x77\xfd\x4e\xf2\xa1\x54\x56\xe3\xa6\x62\x71\xdf\x77\x59\x46\x84\x70\xd5\x45\x09\x30\x60\xd1\x77\x96\x53\x37\x7d\x1f\xd2\x54\x73\x9f\xc2\xf0\x88\x14\x8a\xdd\xfe\x25\xfb\x24\x13\xbc\xa7\xe8\x17\x68\xd0\x01\x05\x59\xd5\x39\xd5\xb1\xb3\xf1\xb8\x52\x18\xa7\x36\x76\x3a\xdc\x7b\xff\x2d\xfd\x5b\xc1\xad\x44\x99\xa3\x64\xdf\x4c\x51\x5a\xf6\x23\xbc\x9c\x67\xa7\x32\x22\xc1\x26\x0c\x1f\x90\x15\x6b\xb3\xef\x69\x1e\x65\x9a\xe4\x2e\xef\x16\x0d\x5b\x68\x81\x3a\xa7\x99\x2d\xe6\xb8\x5d\x5c\x95\xad\x1e\xfc\x50\x21\x54\xee\xb9\x41\x14\xb6\x6a\x5f\xf6\xb0\x5e\xf1\x8c\x7e\x0f\x56\xf1\xd6\x97\x7c\x43\x19\x0f\xfb\xe4\x5c\x2d\xfe\x5b\x28\x67\x21\x88\x64\x39\xb9\x63\x56\x6f\x60\xcd\x66\x6f\x37\x61\x1f\x0f\x53\x3f\x2c\x58\x01\x55\xec\xba\x71\x80\x7a\x4c\x9e\x0a\xaf\x46\x63\x4a\x4c\x6b\xeb\x07\x4e\xa1\x64\x38\x51\x72\xaa\x96\x66\xa4\x6a\x7d\x49\xfe\x2a\xd4\xba\x69\x73\x22\xa2\x6c\x56\x27\x69\x35\xe6\x7b\x2d\xb1\x2e\x1a\xf1\x65\xcd\xd6\xce\x1a\x98\x79\xa0\xb6\xb4\x65\xd6\xaa\x5a\x00\x8f\xa7\x94\xed\xf4\xfa\xe2\xcd\x62\x57\xb6\xb5\x9f\x5e\x76\x38\xf7\x08\x9f\xde\xe6\x5e\xee\xec\xf9\xa5\x90\xd0\x20\xc2\xb2\xf3\xd3\x97\xde\x66\x81\x95\x17\x44\xba\xad\xdf\x45\x8a\x81\x3b\x3e\xcf\x52\xfc\x2b\xf5\x2b\x11\x31\x77\x8c\x23\x76\xca\x50\xa3\x6e\xe9\xa5\x40\x9f\xcb\xe9\xf4\x1a\x45\x38\x81\x87\xd4\xc5\xb4\xc8\xe9\xd9\xa5\x29\x74\x3b\xf9\x0c\x9b\x8e\x20\xeb\x43\x8b\x8d\x0f\x61\x8a\x08\xe4\x4b\xcf\xcf\x66\x49\xa1\x1b\x8b\x3a\x81\x37\xbb\xb2\x6a\x2e\x92\x57\x1a\xde\x02\x8e\x69\x01\x07\xe4\x7f\x9e\xd6\x8f\xe5\x90\x83\xff\x98\xb9\x2a\x87\x45\xd7\x6e\xd2\xf7\x84\xf9\xab\x5a\x75\xb8\xf4\x2d\xa9\x51\xd6\x00\x0e\x45\xfb\x68\xae\x96\xbf\x33\x2a\xc0\x51\x79\x64\x3b\x28\xc0\x4c\x9f\x8e\xa0\x26\x25\x75\x20\x12\xe1\x36\xa3\x6f\xac\x4c\x0c\xb3\xa6\xe4\x94\xef\x63\x69\xef\x97\x20\x7a\x93\x69\xbb\x4b\xb6\x64\xcc\x73\x28\x7b\xa8\xbc\x32\xbe\xe1\x67\xf9\x76\xb2\x9b\x6e\x36\xcb\xf8\x21\x3e\x92\x8b\x87\x31\x6c\xc4\x62\x55\x27\xe6\x1f\x9d\xeb\x3f\x65\x33\x08\x97\xf4\x93\x1a\x6a\x10\xea\x6b\xed\xaf\xc1\xfd\xd1\x73\x1e\x78\x5e\x8d\x37\x50\x1c\xcc\x82\x01\xa4\x73\x0e\x32\x2f\x10\xc3\xa3\xa7\xf7\x50\x0f\x0d\x64\x28\x32\x1c\x1a\x2e\x56\x9e\x6f\xdf\x84\xae\x87\x46\xf9\xbe\x53\x85\xc5\x29\x6a\x53\x93\xd3\xea\x54\x6a\x44\x68\x39\xf3\xf8\xb3\x73\x0b\xa7\xdb\xb2\x94\x49\x2f\x40\xe9\x41\x90\x05\x90\x4a\x4d\x50\xe8\xbc\xe9\x73\x1c\xfa\xf2\x7f\x37\x75\xe4\xec\xe6\x08\x2c\x6c\x68\x89\x59\xb8\x69\x00\x5d\x7b\x72\x2e\x1b\xd9\x0e\xe0\x7d\x6a\x90\x60\xd5\x24\x8d\xf4\x4b\x8a\x82\x82\x00\x6b\x73\x80\x27\x36\x48\x1e\x96\xe3\x7c\x4c\xd1\xde\xc6\x80\x3e\x29\xc4\x97\xa2\x99\x53\xd5\x07\x0a\x2a\x26\x9f\x27\xcc\x61\xa4\x1f\x72\x56\xeb\xae\xfb\x56\x5b\xec\xf0\xa4\x2f\x27\xb4\x1f\x76\x97\x4e\x90\x1f\x7b\x6e\x45\xdf\x2f\x1c\xef\x35\xf9\xd7\xa3\xe5\x3d\x3c\x75\x3b\xcf\x17\x30\xe0\xa9\x24\x19\x7d\xbf\xfe\x4a\x86\x75\x79\xdc\x53\x55\xe7\xc6\xb8\x7f\xd1\xdc\xff\xa1\x1f\xb5\x98\xcd\xea\x9b\xea\xff\xf6\x82\x18\xc1\xd1\x72\xfd\xe2\x1e\x22\x38\x4b\xb7\x6c\xd7\x91\x8c\x92\x89\xfb\x07\xbd\x42\x90\xb5\xb0\x81\x6f\xa6\xea\x54\x18\x40\x65\xb7\xa3\x15\x56\x22\x89\x80\xaf\x73\xd3\xa3\x99\x1e\x4d\x45\x05\xa3\xee\xa4\x1c\x7f\x77\x1a\x22\x80\xc2\xfc\x9e\xdc\x40\x3c\xc2\xfb\x92\xd5\x41\xa7\x11\x0e\x21\x02\xc1\x3c\xb0\x80\xaa\x56\x40\x0d\xa7\x2d\xbe\x29\x3c\xd0\xbd\x80\x78\x50\xba\x3d\x4a\x44\x2e\x74\xd9\xb9\x4c\xeb\x50\xf2\x7c\xa9\xac\x9c\x61\x46\xc6\xb7\xec\x7e\x62\xf5\x15\x7b\x8d\x5d\x28\x6b\x85\xc9\x19\xaf\x90\xf6\xdc\xaa\x22\x5a\x6f\xad\x75\xe4\x93\x91\x16\x5f\xbd\xc4\xd6\x79\x33\xb0\x45\x91\x28\xa3\xc3\xab\xe7\x61\x24\xa0\x07\x23\x43\x2d\xb9\xb0\xc7\xf7\x69\xfb\xd6\xff\x6f\x47\xf3\x8b\x4f\x8b\xb3\xa1\x17\xa8\x99\xff\x10\xc4\xc1\xca\x6e\x0c\x39\x55\x67\xc8\x90\x4b\xd5\x70\x3e\xea\x0b\x48\xad\x18\x85\x90\x56\x76\xab\xa1\x46\x2c\x90\xeb\xb2\xc4\x4f\x40\xb8\xf7\x1f\x9f\x9c\x1f\xb6\xe9\xe1\x09\xfe\x7e\xb8\x89\x58\xae\x41\x4f\xf7\x83\xbb\x2c\xf7\x10\xad\xf8\xa3\x9c\xe7\x01\x7a\x2a\x37\x0d\xe9\xf5\x33\x44\x4c\x40\x32\x2b\x18\xf6\x46\xee\x10\xf2\x73\x8a\xb7\x8c\xdb\x17\x47\x3a\xc4\x0c\x1a\xa8\x0c\x6f\x75\xfa\x3e\x51\x53\x97\xaf\x5b\xd3\x3e\xf4\x0a\xc3\x92\x15\xee\x6c\xf2\x3b\xdb\x4b\xac\xaf\x68\x58\x04\x5f\x93\x8c\x12\x1a\xec\x7c\x6a\xf9\xb3\x3f\x56\xda\x46\x97\x40\xda\xad\x63\x45\xc0\xa0\x55\xc8\x1b\xa0\x88\x75\x71\xa5\xe8\x17\x71\xeb\x95\x24\xd0\xf4\xe9\x06\x5c\x07\x26\xb8\x54\x79\xf1\xae\xe5\x65\xd9\xc6\x46\x28\xf9\xfd\x16\xd9\x3b\xaf\xa6\xde\x6c\xe8\x4e\x4a\x0b\x25\x23\xbf\xe3\xdf\x7b\x8f\xd2\x4b\xa2\xaa\xab\x50\xb3\x9c\x3e\x88\xa3\x94\xf6\xc1\x30\x08\x63\x65\xd9\x31\xcc\x93\xaf\xfe\x17\x61\x59\x4b\x70\x9a\x01\xeb\xd0\xb2\xa5\x75\x1a\x93\x7a\xe0\x61\xd8\xc2\x3d\xd7\x77\x80\x66\xc3\xa8\xd1\x42\x7d\x20\x28\x7d\x07\xfb\x73\x90\xca\x9b\xe7\x72\xc7\xfb\x22\x7f\x81\xcd\xc1\x9b\x1e\x25\xcb\x24\xbe\x73\x49\xe2\x53\x9d\xec\x1c\xd9\xd2\x56\xc7\x78\xbf\xa7\x25\x34\xdb\xfa\xba\x00\x82\xf6\x76\xa9\xb6\x38\xdf\xef\x09\x12\x94\x72\x9b\xbe\xd2\x23\xd4\x90\xca\x89\xf1\x8c\xba\x68\x7f\x4b\x3e\xa9\xd2\x96\x22\xfb\x67\x7b\x37\x60\xb5\xf8\xe3\xda\x0a\x98\xf5\x6c\x7d\x14\x97\x75\x57\xfb\x53\x4b\x25\x36\xe2\x22\xfd\x67\xc4\x3d\x66\x19\xe1\x48\x0a\x14\x6d\xa4\xf7\x4a\xd3\xd7\x6a\x48\x0b\x35\x7c\x8f\x26\xf6\xea\x2a\xba\xdb\xe1\x6e\x5f\x64\xa5\xc9\x0a\xc0\x69\xbf\xf9\x49\x4f\xeb\x5e\xee\xb1\x8b\x4a\x5a\x2a\x58\xee\x00\x60\x35\x17\xe5\x97\xa3\x29\x89\xb5\x3d\x8c\xa2\x9b\xf9\xc7\x3d\xcc\x4d\x5b\x9a\x92\x72\x4a\xec\xcf\x69\x0d\x7c\x02\x06\xf0\xf1\x89\x24\x82\x1a\x4a\xe0\xb4\x99\x69\x49\x76\x04\x72\x2d\x95\x9f\x3c\xe7\x4e\x3a\x46\x73\xd9\x29\xe1\xd9\x80\x1b\xbe\x23\xbf\x10\x53\xe9\x35\x06\xcc\x36\x3c\x77\x91\x72\xe7\x4a\xcd\xde\xbf\xb0\x65\x5b\x3f\x06\x7a\xa4\xec\x0f\xfe\x0b\x64\x35\xc5\x1c\xfd\xf4\x84\xde\x5d\x7e\xa8\x5d\x8d\xb6\xa0\x31\xd5\x1c\xb7\xc9\x48\x01\xf3\xba\x23\xa5\xa7\xc4\xad\x49\x4c\x1b\xc1\xd8\x5a\x52\x0e\xbf\xba\x6f\xa3\xce\xf0\xc6\x2c\x35\xda\x24\xc5\x8e\x12\xb2\x3f\x87\x0e\xe8\x72\x1c\xf8\x1c\x42\x88\x79\x07\x49\x09\x80\x5a\xdd\x3e\x0e\x9b\xff\xac\x63\xb2\x94\xfd\x4a\x41\xeb\x95\x03\x18\x8a\x0a\x42\xa9\x20\xd8\xde\xdd\x54\x87\xe0\x6e\x19\xe7\xf7\x1b\x97\x8c\x60\x26\x95\x8f\x9a\xb7\x46\xb7\x16\xdf\x90\x52\x1d\x53\xed\x3f\x28\x0f\x6e\xd2\xba\x6d\x23\xac\x5c\x12\xb7\xcc\xac\xcb\x85\xed\x2c\xf6\xb3\x1d\x4e\xc4\xe7\xf2\x35\x8a\xf2\x39\xcb\x91\x10\x52\x75\xd3\x28\xb2\xb7\xdf\x21\xf2\x84\xfb\x28\xf4\x4f\xc9\x9f\xcb\x11\x69\xf6\x77\xa9\xb4\x46\xaf\x34\x6d\x9a\xa1\x55\xe3\x41\xf1\xac\x6b\x28\xd2\xe6\x12\x4b\x55\xad\x80\xc2\x76\xdf\xa4\x5f\x01\x8c\xa7\x0a\xb2\xb2\x72\xa2\x45\x7d\x1c\x65\x26\x53\x39\xc9\x79\x93\xa2\x0b\x33\xd5\xca\x70\xea\x75\x05\x97\x1a\x20\xe4\x72\x2e\xca\x93\xb6\xe0\x94\x9c\x8b\x03\x71\xca\x56\x02\xbd\x93\x94\x88\x77\x1f\x6b\xc9\x12\x48\xa7\x6a\xfd\x67\x54\xfd\xfe\x13\xd3\xf0\x9f\xf4\x64\xc9\x3a\x2e\x46\x8b\xfc\xab\xde\xc7\x7b\xcb\x95\x40\x27\x3f\x07\xa8\xca\xd1\x39\x31\xe9\x50\xa3\x85\x7a\xc0\x15\x67\xcf\xab\xba\xc8\xd6\xec\x07\x16\x17\x52\x10\x18\x94\xff\x95\x0f\xb9\xda\x26\x0f\x99\xc2\x55\x43\x9e\x15\xbc\x5d\x80\x2a\x95\xae\x6c\xa3\x01\xea\xbb\x31\x99\xa6\xf9\x6c\x34\x76\xa7\xa9\xd1\xbf\x52\x3e\x98\x4e\x3f\x8f\x89\xce\x90\x6a\x6e\x3c\x87\xaf\x35\xba\xbb\x16\x42\x7b\xc0\xd8\xeb\x6d\xa9\x7d\x08\xf5\x87\xd1\xea\x03\x13\x78\x94\x43\x37\x72\xb1\x56\x8f\xb2\x2f\x86\x57\x6d\x4a\x4b\x85\x57\x21\xe9\x91\xb5\x47\x1f\x19\x20\xb1\x49\xf9\xf3\x23\x93\x6d\x12\xa2\x2f\x4c\xaf\x7b\x5b\xcb\x9e\xbf\xc0\x4f\xdc\x43\x33\xfb\x5e\x12\x25\xf6\x9d\xb9\xf0\x68\xc4\xd0\xd8\x1b\xfd\xfb\x8e\xa0\x9d\x4f\x0a\x6c\x77\x08\x4c\x19\x02\x4b\xf8\x94\xbe\xf5\x7b\x4f\xa7\x01\x63\x5c\xf7\x2f\xdd\x9f\xfe\xaf\x14\x0e\x7d\x87\xb3\x5b\x46\x80\xb2\x6e\x4f\x11\x50\x71\x8f\xa1\x84\x79\xeb\xd1\xb2\x1f\x76\xbb\x2c\xdf\x99\xdd\x42\x76\x4b\xfc\xf6\x68\x35\xe0\xd4\x89\x68\x4f\x91\xbf\xcd\xf8\x26\xb2\x0f\xac\x2c\x58\x75\x49\xcb\x53\xd1\x24\x63\xb6\x19\x62\xe3\x5f\x68\x58\x67\xb4\x50\xe2\xa9\xa3\x74\xe6\x14\xfa\xc1\xec\xdc\xf5\x95\x22\xcc\x75\xfd\xa1\xce\x13\x5c\x86\xf6\xd1\xec\xae\x10\xee\xd2\x3d\x1c\xe4\x76\x1c\x0a\xe6\x58\xda\x9e\x56\x8b\xe5\xbd\x90\xf4\xfb\x2c\xa3\x2d\x12\xda\x4f\xb2\x9d\x5a\x5b\xb1\x60\xa5\xe4\x5e\xa2\x0d\xfc\xca\xd1\x46\x1e\x97\xb3\x36\x87\xd9\x61\xfe\x27\x05\xae\x1d\xe6\x7b\xb4\x6d\x38\x17\x5f\x6a\xf8\x13\xad\xbd\x32\xd3\x78\x77\xb9\x04\x7b\x60\x2e\x7d\x98\xe6\xde\xc4\x0b\xdc\x75\x5a\x91\x15\xcb\x9f\x51\x56\x4b\xb9\xb1\x25\x0b\x9d\x16\xf7\xb5\x93\x5c\x8e\x9d\x26\xf4\x0f\x01\xd8\xd8\x91\xe8\x09\xc0\xda\x24\x29\x1e\x60\x3e\xae\x06\xf2\x59\x52\xaf\xc5\x79\x6f\xd7\x0d\x8a\xf6\xcd\x76\xf6\xa6\x37\x80\x20\x3f\x33\x71\xb8\x29\xfd\xbd\x79\x68\xee\xa6\xf3\xaf\x44\x0b\xd1\x64\x6f\x45\x89\x9f\xca\x11\x67\x0b\x83\xc9\x2c\xf6\xb2\xa8\x00\xa4\x06\x2c\x4f\x91\xe8\x2b\xe2\x01\x72\x6d\x36\x08\xee\xd3\x09\x3b\xe3\x9b\xf8\xf2\xfe\x31\x84\xfc\x7b\x3d\xc6\x11\x53\xa3\x7d\xdb\x88\xe9\xc6\xcb\x2e\xfd\x65\x73\x5b\x0e\xf9\x37\xf6\xb3\xb7\x64\x9c\x6a\x0a\xfb\xbf\xb4\x4b\x26\x5d\xa2\x07\x32\x42\x70\x3f\x84\xa5\x16\x3f\x58\x94\x50\x19\x40\x8e\x24\x53\x25\x8d\xd4\xab\x9e\x21\xdc\xed\x23\x17\xf6\x54\x1d\xb0\x4a\x35\x3a\x90\x0b\x56\xe4\x5d\x6a\x79\x48\x7c\xca\xdf\xcd\x8c\x76\xb3\x10\x35\x86\x3c\xeb\xc5\x23\x83\x77\x0a\xaf\x2a\x0c\xd3\xf3\xdb\xa4\x32\xf1\x2d\xd0\xd6\x5e\x63\xe0\xce\xd1\xbb\xc6\xed\x7d\x5f\x3b\xdc\xbd\x9e\x58\xfe\x7e\x3d\x90\x94\xf4\xbd\xca\xc1\xaf\x53\xa1\x42\x53\xf1\xab\x50\x42\x8c\xe4\xa0\x6c\xad\x20\x12\x92\xe2\x7f\x54\x8e\x99\x74\x79\x94\x30\x4f\x81\xf2\x05\xcb\x2c\xe4\xbe\xce\xc5\x41\x8b\x59\xeb\x5b\x7f\xfc\x41\xf6\xdb\x8d\x6d\xee\xfb\x3d\x6c\x13\xc7\xc7\xe3\xc2\x9f\x06\xd1\x11\x22\x6c\xfc\xd5\x72\xa9\x13\xe5\x89\x35\xa5\x4f\xf9\xcc\xb1\xe3\x77\x7a\x5c\x74\x5a\xc8\x20\x3a\x8b\x1e\xb7\x50\x40\xde\xc4\xa5\xe0\x78\xa7\xad\x52\x41\x01\x59\x5d\x2c\x8a\x58\x4e\xda\xb0\x29\x2f\xd7\x28\x74\xcb\x3d\xd5\x3b\xe8\x3a\xcf\xc5\x24\xcb\xa9\xd4\xda\x7c\x52\x20\x80\x74\x5c\xf4\xb5\x37\x8f\x14\x25\xd9\x63\x6b\xc7\x17\x18\x4b\x55\xf9\x80\xa6\xfc\x01\x41\x55\x2b\xbb\x4a\x8d\xf4\xad\xba\x54\x90\xfa\x58\x76\x93\x21\xb2\xc7\x55\x58\x40\xd0\x6c\x0b\x78\x80\xa4\x74\x58\xa2\x70\x17\x80\xb9\xc3\x0f\x83\x82\x01\x4f\xbf\x96\xf5\x02\x6a\xa7\xbc\xfc\x96\x6d\x2f\x20\xdd\x58\xd3\xf6\xf0\x9f\xc2\x09\x06\xb8\x15\xd6\x8b\xc9\x79\x0b\xcf\x91\x7d\x09\x5e\xfd\xae\xf2\x1f\x72\x8e\x55\xd8\x99\x1e\x6f\x6d\x40\xd8\x4b\xb2\x7e\xa1\x0c\xde\x2e\xce\xb0\xb4\x22\x21\x2d\xfe\x27\x67\xee\xc7\xe3\xb4\x2b\x74\x86\x34\x9a\x3e\x97\xa1\x7c\x42\x9a\xd7\x94\xe5\xdd\x7c\xbb\xb2\xf7\x6d\xc2\xa9\xbc\x7e\xb7\x6c\x41\xf9\xb7\xf8\x84\x5e\xff\x64\x5f\x9d\x2e\x40\xf0\x52\x11\xd3\x02\xfb\xa1\xc6\x3f\x8a\xfc\x47\x36\x38\xe1\x83\xca\x46\x79\xff\x61\x94\x4b\xa9\x00\x3a\x3b\x4e\x9c\xc1\xd8\x9b\x2b\x7b\xaf\xfc\x2d\x6a\x4f\x81\x9a\x76\x63\x13\x3f\x42\x1c\x7b\xe7\xf2\x7e\x90\x95\x65\x64\xfd\xff\x60\xac\x00\xb9\x3d\x03\x1c\x5b\xca\xf5\x00\x3d\x54\x99\x50\x62\x68\x29\x11\x1f\x77\x87\x64\x69\xa9\x70\x19\x0f\xe8\x2f\x88\x79\x7c\x12\xa5\x0e\xf6\x9b\x0c\xa9\xcd\xd8\x5c\x99\xee\xf1\xed\x88\x6a\xf7\xe7\x2b\xeb\x0f\xe2\x5c\x79\x57\x51\x33\xd8\xdf\xa6\x1c\x93\x7b\x2d\x11\x72\xae\x30\xc6\x15\x2a\xf1\x50\xe0\x56\x96\x4f\xc2\xa4\xd5\x2c\xca\xe1\xad\x13\x7d\x75\xdf\x86\x1a\x09\x21\xa1\x43\x22\x85\xfe\xbd\x29\x3d\x4e\x43\x6a\x03\x28\x3b\xc6\x24\x74\x1d\xdb\x45\x78\x69\x7c\xb1\x6a\xe5\xbf\xa3\x5a\xd8\x21\xea\xab\x84\x8f\xb8\x8d\xda\xe1\xbf\xa1\x82\x70\xef\x9e\xbc\x79\x66\x11\xd8\xc4\x3d\x25\xd9\xb2\xe0\x27\x20\xc0\xf1\x24\x09\x5f\x0c\x55\x10\x2d\x61\x41\x04\xcc\x8e\x28\xe0\xb9\xb0\x6e\xc2\x5f\xf5\xed\x4d\xf5\x09\x7b\x6b\x35\xe4\x4f\x01\x64\xcd\xc9\x28\xd9\xa4\xfc\x37\x4b\x6e\x23\x5b\x93\x79\xfc\x2b\xfc\x04\xe6\x0f\x51\x5b\xf4\xd0\xca\x61\x5d\xd0\x36\xda\xa8\x94\x48\xeb\x55\x1e\xa0\x92\xf3\xca\xee\x9a\x65\xdb\x79\xa8\xe5\x22\x63\x38\x5d\x56\x98\x8c\xc2\x9a\xc2\x11\x0a\x90\x8d\xaa\x93\x90\xee\x21\x7e\x7d\xbc\x84\xa5\x62\x5e\x33\x84\x99\x29\x30\x1a\x04\x2d\xdc\x84\x8d\x55\x9d\x70\x3e\x10\x65\x82\x77\xfd\xcc\x1d\x55\x47\x9f\x1a\xc0\xb1\xcd\x6e\x89\x62\xc0\x5e\xc3\x73\xb3\xbc\x3c\xfe\xf6\x51\x73\xea\x71\x19\xc6\x66\xb4\xee\x25\xe4\xd4\xaa\x48\x76\x95\x3b\x21\x7a\x75\xf9\xc7\x58\x63\xcd\x97\x0f\xd7\x3f\xf8\x40\x98\x6c\x5f\x26\x1d\x01\x0c\x61\xe3\x7e\x6e\xfa\xd6\x85\x07\xd4\x64\x88\x85\x18\xce\xd8\xb8\xbc\x66\xbf\x70\xac\x6a\xaa\x33\xe1\x2f\x85\x62\xc4\x64\x60\x60\x6f\xb5\xc4\x88\x4d\x35\x8c\x18\xe6\xf0\xe7\x25\x44\xa1\x0a\xbb\xa9\xeb\x7e\x33\x62\xc4\x48\x11\x87\x01\x08\x3c\x3b\x2d\x7a\x9c\x41\x55\xe7\x4a\x75\x42\x34\x1b\x1f\xb9\xeb\xd2\xcc\x4b\xec\x21\x7b\x02\x90\xb2\xd9\x5b\x37\x17\x2e\xa4\x30\x33\xcc\xd4\xb4\xe8\x4e\x75\xba\xd9\xc0\x73\x27\x5c\x31\x09\xfe\x14\x2c\xf9\xfe\x8f\xaa\x2b\xcb\x52\x5d\x47\xb6\x73\xe1\xe7\xfc\xbc\x49\xc9\x0d\xc6\x89\x6d\xb9\xdc\x40\x92\xa3\x7f\xda\x4d\xc8\xdc\x5b\xb5\x16\x32\x09\x1c\x1a\x5b\x52\xc4\xee\xa0\x87\x0a\x6f\x07\xfe\x60\xf7\x51\x0b\x66\x30\x43\xd4\x18\x17\x05\xe6\x8e\xf5\xef\xc5\x13\x5b\xd5\x82\x31\xad\x52\x5c\x83\x91\xe5\x1a\x21\x08\xb5\xb5\x50\xed\x7f\x99\xf7\x2f\xde\x48\xae\x86\x95\xe8\x98\xf7\xe1\xfb\x3d\xb2\x61\x07\x1f\x83\x66\xaa\xbe\x3f\xec\x57\xc5\xd2\xd3\x2f\x6c\xee\xf3\xe1\xa5\x58\xfc\xb8\xb4\xef\xe7\xce\x36\xe2\x34\xba\x8a\x46\x7a\xa9\xff\xbb\xa0\x9e\x8c\x8e\xde\x2d\x5b\x99\x6f\x5f\xab\xcf\x82\x8e\x2e\xb7\xee\xa7\x15\x8f\x65\x25\xdc\x6c\x1a\xf1\xb6\x36\x2e\xa3\x65\x26\x2a\x2e\x9e\xbc\x58\x3c\x47\x0b\x1a\x57\x16\x12\x5d\x54\x5e\x2e\x33\x4f\xe2\x40\xe9\xe3\xf5\x28\x70\x8a\x6e\x6c\x1a\x3d\x1b\x4e\x60\xf6\x2a\xaf\x7e\x48\x1d\x49\x0b\xea\xa3\xf3\x14\x0e\xf7\xad\x7b\xe0\xff\x61\xdf\x77\x85\x93\x5f\xee\x14\xe9\x78\x34\x6e\xc2\x27\x8b\x13\x25\xc7\xe3\x7d\xae\x4a\x4e\x7b\xaa\xb5\xee\x64\x81\xdf\x12\xc5\x04\xe4\x1d\xbc\x75\x93\xb0\xdd\x02\xd5\x6e\xcb\xf4\x67\x3e\xcb\x3e\xc3\xfc\xb2\x6c\xf1\xa4\x9d\xdb\x0e\xe0\x3d\x87\xe9\x29\xeb\x47\xec\xbd\x6a\xd8\xe5\x67\x2f\x90\x7c\x5c\x21\x27\x58\x80\xda\xa0\x02\xd7\x28\xe0\x36\xdb\x2b\x37\x9e\x84\x54\x51\xf3\x89\xcb\x69\x34\x2f\x5f\x35\xcf\x78\x15\x3c\x67\xf5\x3d\x07\x71\x26\x02\xd8\xd2\x21\xae\x4f\x9b\x2f\x95\x69\x9b\x1f\x14\x37\xde\x54\xee\xd8\xbd\xe2\x8a\xce\xa5\xef\xc5\x7c\x51\x26\xda\xc7\xb9\xae\x4a\x76\x75\x38\x6a\xb9\x7d\x96\xb9\xa4\x63\x63\x8f\x9d\xa7\x88\x72\x2b\xdb\x00\xb8\x37\x39\x83\xaf\xec\x15\x68\xbd\xb7\x8b\x56\x8c\x6c\xc0\xa1\x3a\x9b\x36\x6a\x24\x97\x52\x27\xc9\x7f\xb9\x81\x88\x7f\x8e\xea\x25\x47\x00\xac\x28\x99\x94\xeb\x55\x33\x0c\xcc\x4c\x15\x23\x2e\x77\x3c\x5d\xfd\x64\x01\x00\x76\x04\xdb\x80\x9c\x6b\xa4\xd7\xe8\x1f\xe9\x35\xba\x7c\x6e\xca\xee\x21\x99\x9d\xb3\xb9\xfa\x59\x9e\x30\x9e\xd2\x50\x15\x47\xd9\x0e\xb3\xdc\xb0\x4d\xaa\xfd\xd9\xb7\xdd\x0a\x24\x9a\xa6\x57\x17\x52\x34\x07\x1d\xbc\xfe\x14\x1e\x29\xab\x76\x1a\x8d\xdd\x27\x6d\x9a\x52\x57\x66\x7d\xf7\xef\x5b\xc1\x51\x2a\x68\xa8\xe5\x74\xff\x07\x56\xa7\x2e\x83\x1a\x7d\xc8\xbf\x48\x07\x2e\x53\x0e\xdf\xd2\x5f\xf0\x50\xfe\xcc\xa9\xf8\x9c\xf6\x8b\xfc\x60\xaf\x16\xce\x43\x9f\xbc\x79\x13\x07\x4d\x13\x6e\x40\x60\xd7\x40\x22\xba\xb7\x3b\x76\xef\xc7\x67\xf9\xc7\x3f\x00\x26\xe0\xbf\x51\xaa\xa0\xa5\x72\x6a\x4c\x28\xd6\xb0\x14\x58\xb0\xe9\xbf\x5d\xa6\x65\x37\x74\xff\x69\xf4\x7f\x63\x65\x03\xd7\xbd\xcb\xa7\xcc\xa6\x14\xae\x69\xc2\x04\x6a\x09\x03\xd4\x55\x94\x70\x40\x9f\x59\xcf\xb7\x8b\xdd\x52\x5f\xa2\x8d\xf8\xf6\xb2\xe9\x10\x6d\xe7\x9c\x73\xd4\x3b\x3c\x75\x3d\x5e\xf6\x0a\x15\x1f\xe8\x9b\x38\x27\x1f\x78\xbf\x12\xe4\xc6\x59\x38\xc9\x31\x72\x71\x2a\xc5\xcf\x36\x88\x0e\xc8\xee\xe1\x2e\x7e\x02\xc6\xaa\x6f\xca\x35\x7e\xaa\xa8\x71\x50\xee\x4d\x42\xbd\xa3\xee\x2e\x60\x80\x06\x8e\xd4\x8d\x88\x42\xb8\xab\x95\xf3\xdd\x7f\x96\x73\xd7\x7e\x2d\xa9\x65\x77\x77\x8f\x94\xb9\x93\x51\xa6\x2e\x56\xce\x80\xa0\x1d\x46\xbd\xd5\x83\xf1\xf8\x89\x94\x54\xc0\x66\xbe\xfd\x26\xee\x54\xe6\x3a\x31\x87\xaf\x8d\x2e\xe0\xae\xe3\xf2\xf1\xd8\x02\x8b\xa8\xb1\xb9\x6b\xdf\x5e\xa6\x80\x7b\xd9\xc9\xb6\x61\x4a\x3f\x4d\xf9\xef\x8f\xa4\xe3\x0c\x20\x4e\x95\x44\x96\xd3\xd9\x02\xd1\x26\xff\xbd\x19\x3e\x5a\xa0\xbe\xf3\x7d\xb8\x17\x55\x7e\x06\x93\x8e\xf8\x5e\xcb\x43\x93\x7d\x22\xb0\x2d\x88\xbc\x5e\xa8\x48\x6a\x82\xbe\xb9\xb8\xc4\x23\x44\x41\x77\x35\xb3\x56\x1b\xb5\x47\x5f\x9e\xa1\x37\xd7\xbf\x22\xa7\x1e\x96\x89\xe6\x29\x4d\x55\xef\x01\x8a\x52\x74\xaf\x41\x8a\x34\x3f\x19\xc1\xfc\x4d\x13\xc3\x9c\x5d\xd5\x6c\xfd\x5b\x65\x20\xf3\xfa\x54\xf4\x80\x8d\xed\xef\xa4\x4d\x76\x0e\xd9\xc1\xd8\xf2\x15\xb1\x13\x45\x53\xac\xff\xd2\xe1\x7a\x70\x05\x54\x0e\x48\x7c\xd9\xcb\x14\xb5\xcf\x0e\xf5\x0b\x4a\x3d\x09\x4e\xad\xd1\x0d\x48\xe2\xa3\xce\x79\x29\xc1\x44\x54\xa6\x5a\xfc\x84\x37\x2f\xeb\xa0\x64\x47\x92\x52\xa2\xf6\x2e\x87\x18\xac\x15\xd4\x19\x64\x8a\x6e\xd5\xca\x0a\x05\x8f\xe6\x23\x44\xef\xc8\x5c\xce\x8f\xd5\x59\x0c\x45\x11\x7f\x11\x44\x01\xaa\x28\x08\xa1\x8b\x9c\xde\x9a\x64\xe2\x2d\x73\x83\xbf\xdd\x46\xd8\xa2\xa8\xd9\xc1\x53\x1f\xad\x2f\x6c\x20\x22\x08\xde\x05\xd3\xbd\x5a\x44\x48\x44\xc8\x7f\x27\x5f\xdb\x9c\xb5\x02\xd7\xab\xfc\xf1\x95\xee\xe3\xe2\xa7\x4c\xff\xb6\x3d\x99\xbc\x7e\x21\x48\x66\x62\x7b\xca\x99\x3f\xaa\x7a\x7c\x32\xda\x84\x84\xf7\xa5\x1a\x04\xb8\x7b\x2b\xc7\x50\xc0\xca\x01\xc8\xe7\xd1\x8d\x31\xa8\xd8\xc9\x18\x0e\x6e\x5a\xbc\xb5\x08\xe6\x8d\x1b\x16\xe8\x0c\x17\x9f\x8b\xa0\x2d\x6d\xb1\x48\x2e\x68\xaf\x8a\xc2\xe5\xc0\x00\x0e\xcb\xa0\x72\x94\x61\xd3\x1d\x46\xf6\x70\x90\x5b\x24\x06\x3c\x8f\x47\x99\xb7\x55\xb5\x94\x5f\x41\xbc\xa2\x1c\x8b\x31\x60\x16\xc3\x2e\xb0\x24\x17\xc9\xbd\x7e\x27\x73\x88\xf6\x09\x16\x88\xbe\x2e\xdc\xa5\x5e\xfc\xb3\x58\x60\x33\xf3\x13\x6e\x02\x63\x54\x50\x85\x9e\x00\x29\xc6\xa4\x14\x6b\x4c\x74\xda\x4f\x4d\xfb\x74\x55\x5f\xf6\xe2\x35\xe1\x16\x0a\x2d\xbd\x1d\xc4\xa1\xbf\x22\xbf\x40\x63\xbf\xf7\xb2\xa3\x73\xf7\x02\x2c\xa8\x48\x1b\x98\x82\xef\xc1\x8e\x97\xcc\xda\xe0\x8e\x67\x0e\xd4\x3d\x1a\x98\x93\x03\xbb\xc3\x98\x6b\x42\x6a\xd8\x56\xdd\x4f\xfa\x90\xb8\x4d\xe5\x2a\xe2\x6b\xf4\x83\x27\x61\xf0\x5d\xc4\x98\xb7\x4a\xe0\x59\x7e\x8f\x56\xf8\xcf\xb3\x2c\xa4\x24\x9e\x3c\x47\xd4\x7c\x18\xa4\x95\x78\xea\x0f\xf8\x08\xf7\x58\xcc\x7e\x4a\x99\xc1\x09\xac\x7c\xb3\xa7\x8b\xaf\x32\xa5\x89\x04\x31\x06\xbb\xd9\xe3\xae\x5a\x99\x2c\x27\x1a\x44\x1a\x3a\x08\xa1\x14\x58\x63\xf8\x93\x8d\xf2\xfa\xbe\xc8\x54\x1d\x10\xf1\x10\x26\xf6\x6d\x60\x3c\xf4\x34\x71\x19\x37\xd1\x33\xe9\x06\xe6\xd3\x2c\x0f\x93\xe8\x90\x51\xf6\x28\xdc\x67\xd7\x4e\xeb\xcf\x05\x1c\x52\xfe\x1f\x7a\x92\x0c\x24\x1f\x59\x6f\xcb\xee\xc1\x5a\x28\xd0\x12\x52\x82\x52\x99\x28\x1e\x26\x71\x3e\xd2\xe7\x3d\x3a\x4b\x69\x8e\x6d\xc4\xa3\xbc\x76\x63\xf2\xd3\xb8\x31\x1e\xef\x26\x32\xbf\x63\x15\x4f\xee\xe2\x91\x77\xa0\xf9\x63\xd8\x46\x1b\x9d\x40\x54\x6d\xe9\x17\xdd\x67\xcb\x76\x9e\xdf\xd5\x30\x41\x3b\x46\xe6\xd4\x14\xb9\xed\xc3\x48\xcd\x17\x5f\x7c\x40\xb4\xa3\x5e\xa2\xff\x0a\x6e\xa2\x61\xca\x76\x33\x0c\x54\xed\x54\x84\x09\x69\x43\x30\xf4\xbd\x52\x1f\x85\x49\xdf\x4f\x43\x44\x0f\xe5\x6d\x7c\x19\xc4\xdd\x41\x6c\x1e\xe9\x83\x70\xe7\x64\x7f\x63\x8d\x88\x96\xab\x7f\x0a\xa4\x58\xee\xba\x8d\xb2\xd0\xb6\x5f\x40\x8e\xe6\x6a\xc0\x82\x0e\x81\x0b\x40\x07\x48\x33\x2f\x78\x57\x01\x18\x66\x7b\xbd\x75\x95\xdb\x95\x20\x00\x5d\x25\xe1\x9c\x90\x2e\x81\x91\xb9\x49\xfc\xc7\xa3\xf2\x29\x6a\x53\x3c\xf2\x1f\x2b\x5f\xca\x85\x5b\x3f\xf0\x9a\x86\xdf\x70\x50\xfe\x43\xb5\xe3\x96\x1a\x2f\xf0\x6b\x4b\xc0\x78\x69\x0b\x26\x3a\x2b\xcc\x15\x03\x19\xb5\xda\x0e\x3a\xb7\xc7\x53\x4d\xf8\x57\x24\xe4\x9a\x44\xf7\x60\x7c\x83\x2f\xfd\x4e\x9d\x8e\xae\x5c\x8c\xbc\xbc\x48\xfa\x0b\x06\x17\x02\x1b\x09\x89\x97\x82\x2e\xd2\x91\x3a\x7e\x2a\x05\x46\xf4\xed\x18\xe1\x55\x09\x11\x25\x1a\xf1\x77\x21\x80\x84\xdb\xed\x72\x6a\x68\x33\x68\x26\xa3\x41\xa1\xb7\xab\x9a\x57\x1f\xa2\xcb\x37\x13\x16\x75\xf0\xe4\xda\xce\x71\x7e\xba\xe3\x86\x42\xea\x4a\xa1\x91\xf5\x71\x77\x85\x3d\xd4\x28\x2b\xbd\xc4\x74\xce\x86\x8e\x22\xe9\x3c\xc9\x71\xa5\x79\x2b\x7a\xae\x9d\xb2\x03\xbc\x18\x5e\xed\xf7\x38\x55\x0b\x89\x52\x5b\xed\x34\x51\x19\xe1\x5e\xb9\x18\x59\x6c\x71\x65\x06\x07\xbd\xfc\xf6\xd2\x6b\x3e\x1c\x8e\xf9\x9f\x7c\x96\x16\x57\x82\xb9\x53\xb1\x6a\xb6\xd0\x91\xf1\x76\xd2\xb5\x83\xfd\x47\x59\x93\x09\xee\x9c\xd3\xd4\xe5\xa1\x4a\x30\x1f\x61\x8d\x92\xfe\xfa\x8a\x2b\xbd\x2f\x6b\xc0\x0d\x28\x37\x06\xd1\x0c\x84\x61\x8a\x65\x98\xe5\x1b\x73\xdb\xb9\xf1\xf7\x25\x31\xe6\x19\x9f\xae\x99\xaa\xc6\xa8\x0c\x53\xf0\x8a\x3c\xe6\x13\xfa\xc9\x02\xae\xb2\xef\x5a\x3d\x88\x56\x80\xdc\x9a\x07\x13\xb0\xb0\x9f\xd2\x45\x9c\xde\xa2\x36\x24\xa5\x01\x90\x7c\x65\xe6\xd5\x5e\xab\xc8\x04\xae\x4c\x1a\x94\x7b\x59\x76\x19\xbc\x5d\xe5\x6b\x23\x42\x56\xdf\x76\xca\x87\x82\x7f\x1e\x7f\x37\x32\xae\x2e\x07\x15\xed\x24\xd2\x44\x12\x33\x46\xc3\x80\xc8\xef\x80\x96\xee\x12\xeb\xc2\x69\x81\xb1\xfd\x41\xff\x6a\x05\x24\x56\xf1\x85\xb3\x29\xf8\x4f\x7d\xd2\xd3\xdf\x0d\xa1\x28\x41\xf1\x65\x69\xc2\x0f\x1f\xb9\x13\xe5\xdf\xeb\xe5\x1c\x5d\x2e\x48\x07\x70\x5a\x3e\xfb\x1e\x87\xf0\x36\xcc\x79\x77\x46\xf7\xfe\x34\x71\x0b\x36\x23\x50\x3f\xf2\xe0\xb8\x92\xb8\x60\x04\xa2\x91\x14\x52\xac\xd3\xf2\xe4\x80\x2e\xb9\x69\xc3\x48\xc5\xa8\xf3\x6b\x34\x0d\xbe\xd4\x72\x46\xa6\x94\xe1\xc3\x82\x6c\xd7\x6f\x78\x46\x57\xed\x24\x2d\xe5\xa6\x04\x8b\x5d\x05\x59\xad\xa3\x4a\x79\x07\xaa\xa9\x86\x22\x2d\xfa\x62\x2e\x85\xdd\xb9\x84\xec\xea\x5c\xa0\x9a\x8a\x6e\x3f\x2b\xbc\x97\x7c\x70\xf9\xb3\x1c\xa7\xe2\xe8\xbe\xbc\x9f\xc1\x00\xdb\x1d\x92\x14\x32\x0e\x95\x72\xbf\x63\x1b\x96\x7a\x47\x2e\x97\xa3\x0a\xc3\x52\x35\x64\xbd\x3b\x07\x7b\xfa\xee\x7c\x3c\x92\xe3\x2f\xa2\xc0\xae\x91\x73\xc8\x5d\x1e\x47\x71\xd0\xca\x9e\x42\xfd\x1c\x94\x84\x55\x4a\x82\x9e\x07\xfd\x75\x51\xff\x41\x4f\xa8\x8a\xaf\x1a\xb5\xfc\xfb\x3a\xae\xe6\xbf\x40\xc3\xd0\xd9\xd2\xdf\x3a\x3f\xa6\x7d\x2c\x5c\xc9\xc2\xdc\xc5\x15\x41\x19\x4d\xff\x84\x2f\x80\x87\xa6\x2a\x35\x79\x65\x25\x88\xa8\xba\xe2\x33\x37\xa3\x4e\xe6\xdd\x39\x20\xd0\xa0\x64\xe7\xac\x81\x93\xd6\xff\xef\xfc\xd2\xa3\xac\xce\xe9\xde\xa3\x42\xe6\x38\x3a\x95\x65\x69\x6f\x03\xe6\x22\x51\x0f\xf1\xcf\x81\x8e\xc1\x89\xdb\xf8\xd8\xb8\x54\xc1\xac\x48\x99\xc2\xbc\x22\x20\x95\x16\xe4\xbb\x9a\xcb\x3c\x74\x34\x2c\x9c\x2d\x2d\x68\x59\xf2\x1b\xdd\x55\xbe\xde\x04\x1c\xd5\x56\x44\x3b\xf7\xf9\x8f\xe4\xc5\x7f\x37\x78\x8c\x5d\x96\xca\xc1\x43\x39\xc8\xe6\xa2\xe5\xf7\xe0\x4f\x4d\xfc\xcb\x6f\xe4\x51\x76\xc8\xab\xaa\xc6\xb4\x3d\x11\x74\xa3\xb1\x7a\x19\x3b\x14\xb0\x29\x40\xac\x74\x19\x5f\x7e\x59\xc9\x24\x1d\xab\x84\x71\xe5\x57\x56\xd2\xf8\xd6\xdb\x70\x9b\x91\x8f\xdc\x47\x43\xc4\xcf\xf1\xf3\x26\x27\x76\xef\xe0\xcf\x87\x1d\xef\x66\x44\x67\x33\x0b\xed\x94\xe3\xd1\x96\x8f\xb0\x7f\xf9\x3c\x7f\x4a\x4d\x26\xda\x9a\xd3\x97\xe1\xf1\x8d\xb2\xce\x60\xd8\x75\x19\x61\x37\xa2\x77\xa4\x3f\x7d\x87\x6f\x5f\x16\x34\x2a\x12\xe7\x64\xb7\x9a\x49\x21\xdd\xaa\x17\x7f\xf2\x68\xc8\xec\x21\x67\xad\x8b\x45\x17\xf5\xa3\x0e\x9e\xc1\x70\x44\x0e\x8a\x19\x74\xcd\x39\x4e\x1e\x01\x2b\xe4\xa8\x72\x61\xe3\x28\x28\xb0\x1b\x8b\xee\x7d\xf1\xdb\x08\x9f\x73\xb4\xa6\x49\x02\x29\x95\x26\x2d\x69\xb6\xfc\xd6\x59\x85\xf2\xf1\x26\x19\x8d\xea\xb7\xcd\x3e\x0d\x2b\xfc\x35\xbf\xf2\x60\x53\x35\x15\x2f\xe5\x89\x46\x8a\x1e\x58\x83\x93\xba\x56\x67\xc2\x15\xa0\x84\x98\x75\xa8\xfd\x84\xb9\x31\x5c\x3e\x38\x22\x6b\xaf\x69\x67\xed\x9f\x9c\xfe\xd6\x8a\xf4\xaf\x0c\x6b\xbc\xa9\x92\x74\x58\x3d\xdc\xce\x76\x87\xac\x84\x1e\xa0\x8c\xd2\xbf\x28\x33\x05\xad\x89\xf5\x57\x36\x7e\xe1\x00\xab\x72\x53\xbf\x62\x76\xec\x2e\xe4\x93\xd4\x2f\xa0\x4c\x74\xe2\xca\xfd\x4e\x28\xae\x81\x48\x52\x5f\xfb\x92\xd5\x8c\xe2\x90\xff\x71\xb4\x44\xce\xc5\x4d\x46\x97\xfc\x2c\x0b\xca\x47\x3e\xb2\x17\x32\xd7\x0f\x53\x8d\xdf\x5f\xd2\x89\x4e\x88\x2b\xb2\xd8\x72\x97\xc1\x85\xc6\xa9\xab\x34\x9f\x4b\xf0\xd4\xe7\x6b\xab\x35\xc7\x96\x80\x85\xaa\x40\x9b\x19\x16\x31\xab\xc8\xa7\x40\xef\xca\x94\x76\xaa\x7c\x9d\xed\xc1\x39\xbb\x80\x04\x50\x70\x0a\xb5\xeb\x7b\xd7\x9f\xe9\x77\x9c\x6d\xa3\x5e\xce\x9b\xde\xf1\xf8\xf0\xaf\xae\xe6\x98\x60\x7a\xf1\xbd\xd1\x3b\x71\xbc\xa9\x94\x74\x01\xaa\x30\x9b\x29\xbf\x51\x2a\xb2\xfe\x83\x01\xc5\xac\xe0\x12\xa4\xd8\x08\xa3\x7b\x29\x39\x6f\x54\xb5\x5b\x36\x63\x60\xcd\xd5\xd2\xb1\x5a\xa1\x97\x22\x83\x95\x62\xd4\xa1\xe5\xd6\x5c\xe6\xa9\xaf\x19\xb7\xb0\x27\xc6\x87\x20\xca\x17\xce\xea\x61\x54\x26\xe7\x74\xdd\xf5\x65\x58\xf9\x04\x09\x93\x83\xa4\x45\xe5\x59\x96\x29\x2a\x72\xf2\x16\x8e\x10\x3f\xb0\x34\xa8\xa0\x60\x60\x82\xb3\xb3\xb7\xc6\x8d\x86\xfe\xb5\x8d\x0a\x8b\xf5\x26\xa0\xbe\x2f\x6f\x61\xe5\xef\x6f\x75\x24\xcc\xb0\x7a\x18\xfe\xe7\xa8\x0d\x60\x50\x56\x0e\x84\x08\x43\x89\x03\x38\x83\x65\x1f\x7e\x7d\x62\x79\x19\x1b\x7f\xfd\x7e\x90\x3d\x6a\x76\x7c\xc0\xcd\xf5\x46\xc6\xdf\x93\x37\x50\x84\x1e\xc1\x02\x2c\xbb\xfe\x80\xbc\x87\x2f\x8b\x5e\x8d\x99\xc5\x58\xef\xc8\xb3\x8b\xc2\x71\x75\x71\x68\xd9\xf1\x00\x59\x73\x8d\xe6\x4f\x72\x7d\xa0\xc1\x8d\xaa\xbb\x49\x5c\x9a\x3b\xed\x5f\x54\x74\x9d\x9e\x67\xee\xdb\x49\xf5\x57\xa9\xf1\x9a\xa8\xb7\xee\x5b\x7f\x76\xda\x7f\xde\x69\xe6\xa6\x99\x9b\x05\xdf\xfe\x70\x0f\xba\x2c\x50\xde\x75\xdd\xc7\x70\x63\xbb\xf7\xb3\xd4\x39\xc0\xe9\xf8\x4a\x97\xa4\x06\xa3\x90\xd2\x94\x0a\x31\xeb\x76\xd4\x85\x20\xf5\x8c\x4d\xaa\xee\xb4\x4a\x74\x6d\x68\x17\x4d\xdd\x3f\x5a\x50\x48\x23\xf8\x18\xb6\x95\xb0\x89\x88\x60\x41\x19\xa5\x80\xc4\x9e\x74\xd0\xef\xcd\x23\x6c\x46\x98\x04\x91\xa6\xb8\x4b\xed\x2a\x4b\x11\xb1\xd7\x72\xdb\xa0\x47\x7a\x88\xcb\x42\x83\x07\x14\xdd\x84\x02\x5d\x68\xa2\x79\x8c\x0b\x6c\xcc\x49\x9d\xfb\x2f\x66\xa8\x82\x93\xde\x9c\x7c\x52\x17\x16\x77\xa5\x72\xb8\x5f\x16\x93\x9c\xec\x54\x6f\x46\x86\x02\x81\x43\x90\x5b\x6e\x36\xe0\x74\x1d\xb9\x56\x07\x24\x98\xce\x27\xc7\x01\xa2\x7f\xa9\xd2\x12\x29\x18\x9e\x7f\x83\x9d\x18\x10\x1f\x20\xc9\xab\x25\x20\x36\xa2\xc5\x0f\x5d\x4f\xd5\x14\x47\xcd\x89\xb6\x8b\x5e\xa0\x94\x16\x61\xc8\x29\x16\x54\x87\x2f\x9d\x58\xde\x29\xfb\x40\x10\xdb\x65\xc0\x09\x0b\x7a\xc1\x99\x48\x5b\x58\x3d\x02\xd9\x45\x65\xe0\xb9\x1d\x51\xc3\x29\xaf\x27\xd4\xf0\x8e\x8e\x12\x6b\xb1\xd6\x8f\x08\x92\x6a\x2f\x42\x23\xb5\xa3\xef\x3c\xa7\xb0\xe4\x1c\xab\x0c\x94\xd9\x82\xbc\x4a\xf7\x38\xea\xfa\x2b\x9e\xb0\xcd\xdf\xde\x9d\xe5\x6b\x0d\x2b\x22\xf4\x01\x96\xce\x90\xdf\x17\xe8\x6f\xff\x7b\xb1\x2f\x31\x39\xf3\x76\x12\x89\xd4\xc5\x6d\xef\x3e\x3d\x44\x43\x21\x1f\x1a\x2f\x9f\xad\xf6\x41\x95\xb1\x6d\x83\xdc\x53\xa2\x59\x50\xd8\x7b\x66\x73\x1e\xcb\xa7\x02\xe1\xe5\xc6\x22\xd5\xd6\x9c\x2d\xce\xb1\x2b\xf4\xa1\xbc\xa8\xb3\xd6\x9b\x0d\xff\x1a\xa1\x36\x78\xe6\x6e\xc9\x65\x67\xd7\xb3\x65\xa3\x12\x93\x83\x84\x68\x6f\xb9\xfd\x4c\xe1\x1b\xdf\x8c\xd2\xc1\x37\xea\x39\x34\xb2\xed\x6b\x30\xc7\x23\xe0\xc7\x32\x1f\x86\x34\x0b\xff\x4b\x16\x1a\xf5\x76\xa9\xa3\x11\x67\x1d\x6c\x66\x81\x20\x1b\x71\xad\xfe\x14\xf0\xe3\x8c\x9f\x02\xa4\x99\x5d\x57\x04\x9c\x14\x3e\xc7\x63\x16\x3e\xe8\xb3\x95\x31\x8d\x7e\x9a\xd5\xfb\xc2\x01\xcb\x16\xec\xf6\xed\xdb\x69\x85\x0e\xa4\x65\xf0\xc9\x67\x61\x5a\x3d\xd6\xc9\x70\x79\xf4\xfa\x3e\x59\xa6\xd6\x8e\x7b\xc2\x02\xa6\x7f\x7f\xe0\x09\xa4\x97\x67\x8a\xa2\xf5\x46\x5d\x5e\x6b\x31\xca\xef\x58\x90\x62\x58\x07\xa0\xa3\xc0\x66\x5f\x4a\x49\x90\xe4\xfb\x9a\x8d\xd1\x37\x25\xfc\x81\xc1\x18\xa2\x7f\x69\x5f\x6e\x12\x12\x99\x2e\x69\xb3\x47\x82\x86\x7e\x6e\xf5\xeb\x78\x83\x23\x3e\xc7\x40\x36\x2a\xef\xf4\xeb\x32\xb3\xa9\x8b\x2e\x6d\x01\x38\x55\xf1\x1a\x30\x28\xce\xaa\xb4\xfa\x0d\x96\x0b\x9e\xbb\xc0\x52\x96\x66\x97\xa7\x63\xe7\x40\x69\xbb\x6a\x94\xa2\x54\xae\xcd\x52\x1e\x55\xb7\x10\x06\xeb\x6b\xb0\xb7\xe1\x05\x8b\x22\xf5\xae\x9a\xb4\x1a\x66\x23\x07\x09\xf3\x68\x64\x94\x92\x6c\x39\x44\x34\x34\xd6\x2e\x3b\x15\xb4\xec\x7c\x82\x76\xe9\x98\xdc\xa3\x76\x3f\xb0\x24\xdf\x2a\x13\x53\x23\x9f\xa5\x48\x44\x98\xc2\x84\xa0\x9c\x82\x29\x52\x1a\x55\xd5\xb2\x36\x45\x55\x2b\x5a\x0b\x86\xe5\x6c\x71\x2c\x01\xd0\xca\x52\x4a\x65\xb1\x14\x71\xb5\x66\xde\xce\x8a\x57\x2a\x2b\xa6\xab\x58\x85\x83\xb2\x16\x37\x5e\x49\x52\xb4\x2a\x57\x86\x0a\x70\x5f\x74\xc8\xe7\xd7\x75\xe7\xea\xc4\x6a\xa5\xac\x62\x92\xdc\x9c\x2f\x19\x76\xc5\xa8\x43\x45\xc7\x0c\x8b\xff\xf7\x55\x42\x6a\x6b\x85\x40\x48\xde\x98\xb1\x54\xea\xcf\xea\x22\xb9\x1f\x1f\xa3\xfe\xca\x72\x0a\x62\x66\x28\x98\x8e\xf1\x12\x47\xd5\x0c\x13\x56\xa1\x99\x97\xb2\x1c\x9b\xf8\x4a\xa4\xe4\xb9\xb4\x34\x31\xf3\x2a\x46\xc1\xe8\x74\xd5\xd9\x22\x6a\x49\x7f\x01\x13\x5d\x40\xa5\x89\xfb\xa8\x40\x5d\x89\xaa\xc9\x50\xea\x93\x9a\xc7\x3d\xd3\xfe\x49\xc3\x70\xb3\x66\x99\x2a\x44\xf1\xb1\xc9\x66\x01\x98\x64\x50\x26\x73\x6f\x7c\xcf\xe8\x1f\x62\xeb\x1f\x1e\xf0\x26\x85\x6a\x62\x87\xd9\xbf\x4d\x4e\xfb\xae\x1a\xd0\x33\xf9\xb2\xd2\x52\x5b\xca\x77\x84\x48\x42\x36\xc8\x51\x3a\x3e\x7c\x4f\x29\x7c\x63\xf7\xf4\x60\x54\x4d\x59\xde\x10\x7d\xfe\xb1\x82\xca\xc0\xdd\xa6\xf9\xb2\xda\x99\x62\x8c\x7e\x6e\x23\xe9\x2f\x63\x34\xf9\x80\x72\x7a\xb4\xb5\x92\x04\x97\xd1\x7c\xcc\xb2\xce\x4d\x53\xd8\x9a\x46\x97\x08\xe1\x51\xbb\x74\x4d\xb0\x31\x75\xcb\x6e\xeb\xef\xa7\x04\x9b\x2e\x1f\x3d\xac\x0b\x20\x8a\xc7\x29\xfe\x0d\x50\xe0\x93\xc1\x49\x68\x2d\x38\x71\x96\x33\xfd\x4f\x94\xf5\xff\x9d\x32\x63\x5c\x1c\x65\xf7\x3f\x40\xe0\x4a\xf4\x3e\xbd\x81\x5b\xad\xbc\x11\x40\x69\xbe\x16\x66\xd4\x73\xda\x2b\x20\x69\xcb\xd2\x8c\xfc\x5b\x8d\x1c\xd3\x0b\xfe\xc9\xcc\x7b\xbc\xf1\x42\xfc\xc1\x18\x19\x9c\x5d\x80\x06\xdf\xe1\x9d\xa6\x27\x04\xeb\x72\xe9\x2d\x1c\x1b\x07\xb9\x75\xae\x8f\xcf\x0e\x55\x43\x08\xb6\xf4\x57\xac\x3c\x6e\x95\xac\xcc\xb8\xd9\x22\x97\xf3\xc3\x05\x53\xb5\xe2\x51\x1d\x43\xe0\xbe\x60\xfe\x24\xba\x98\x5d\x94\x9f\x7c\x35\x54\x8e\x29\xa0\xec\xfc\x0a\xdc\x0b\xa1\x38\xdb\x14\xe3\x64\x32\x76\xd9\x4c\x70\xb2\xcd\xb6\x5a\x95\xde\xcb\xf5\xdf\x29\x94\x6b\xe9\xe1\x0a\xc5\x7f\x6d\xc1\x0e\xbe\xc9\x92\x77\x23\x7c\x99\xdb\xe9\x52\x28\xc6\x95\xb8\x24\x9b\x10\xcd\xce\x18\x97\xd8\x4b\x71\xf3\x3c\x6c\xc6\x88\xc2\x52\xcd\x60\x93\x05\xff\x79\x9c\x9e\xb0\x0e\xd1\x1f\xca\xef\x65\x35\x59\x7f\x6c\x5f\xa4\xa9\xb9\xf7\x42\x0d\x43\x05\x15\x7e\x9f\xe4\x0a\x51\x95\x29\xd4\x08\xc7\xea\x0c\xa5\x99\x04\x6f\xdd\x3f\x9c\xac\xd9\xce\x45\x1a\x14\x38\x94\xc9\x06\xa9\x6c\x35\x16\x74\x36\x0d\x25\x22\xd3\x2e\x9b\xc1\x79\x54\x52\xa7\x73\x14\xda\xec\x04\xd2\x52\x10\xea\x1f\x9e\x70\xb6\x4f\x61\xc9\x52\x76\x1e\x7e\x4a\x99\xa3\x9b\x93\x17\xc1\xb3\x9c\x3a\x42\x89\x9e\xe8\x0a\xf2\x6c\x79\xe6\xcc\xa9\x52\x41\xeb\x18\xa8\x5d\x82\x50\xf5\x8f\x93\xd4\xc7\xf0\x42\xd5\x0c\xf2\x53\x26\xf6\xab\xdd\xf6\x23\x5d\xbd\x1c\xef\x82\xe6\xf9\x9c\x62\xf4\x48\x5f\xa1\x59\x0c\x14\x6d\xed\xb1\x70\x9f\xaa\xb9\xa9\xa4\xfd\x26\xca\xd9\x05\xd5\xcf\x6f\x2d\x17\x1b\x97\xe6\xaa\xfa\x78\x6e\xe9\xc7\xa6\x49\x18\x1a\x23\xae\x32\x25\x6d\x83\x44\x4c\x6c\x69\x1c\xfc\xd3\xde\xf1\xf1\x59\x69\x83\x21\xfe\xe7\x69\x44\x99\xf5\xa2\x71\x47\x53\x56\xaf\xd8\x75\x2d\x67\xcc\x8c\x13\xd1\xef\x91\x9f\xbd\xee\x52\xf7\xe6\x81\xa6\xcd\x23\xb8\xf4\x17\x3c\xa9\x52\x33\x75\x0f\xfa\x6c\xe2\x20\xed\x76\xf4\x78\xd8\x28\x10\xe4\x51\x6f\x58\x6e\x02\x26\xdb\xd3\x7f\x40\x86\x0a\x76\xb5\xa5\x1e\x43\x64\x87\xd4\x67\x3d\x1a\x78\xd2\x92\xa5\x3f\x25\xb2\x71\x2c\xa8\x65\xd0\x1f\x96\x3f\x07\x37\xd8\xa5\x1c\x09\xda\xd8\x82\x3a\x86\xdd\x05\xde\x29\x9c\x9e\xe2\x0c\xe7\x32\xe4\xe3\xdd\xab\x2e\xcd\xda\x3a\x43\x98\xae\xd2\xb2\xff\xbd\x02\x87\xf0\xa3\x1d\xa6\xbc\xde\xd1\x44\xe0\x75\xc5\xc7\x45\x82\x43\xb9\x8a\x59\x0b\x62\x8f\xe5\xba\xf1\xa2\x81\x22\xbd\xfd\xac\x05\x19\x2d\x5a\x4d\x2d\x05\x9d\xf4\x46\x12\x69\x64\xe4\xf7\x8b\xc2\x60\x02\x57\xb4\xda\xb4\x94\x1b\x61\x4c\x66\x67\x05\x31\x45\x33\xbb\xc7\x37\xc2\x89\x0a\x26\xb9\xd1\xae\xd5\xfd\x88\x4e\x06\x47\xe0\xe2\xf3\x16\x7e\x2b\x1f\x0d\xc4\xd1\x56\x96\xea\x3f\x0f\xef\x7a\x54\x56\x8c\x22\x62\xc8\x04\xd3\x23\xd1\xc5\xfe\xab\x91\xc2\xd5\x8d\xbf\xf1\xbc\xd0\xad\x45\x61\xc8\x09\xcf\x7f\x01\x84\x1a\x06\x0d\x8c\x51\xe7\x70\x90\xc2\xb9\xa3\x97\x81\x5e\xa4\x3f\x52\xfb\x98\x6b\x46\x43\x1f\xe1\x09\x1d\x88\x33\xaf\x18\xcf\xb2\x03\x75\xaa\xc3\x74\xc5\xbd\x97\x1a\xb2\xa6\xb9\x76\x97\x6f\x09\x79\xa6\xa7\xc8\xaa\xf1\xc6\xd3\xa7\xfe\x7c\x65\xcc\xfe\x06\x87\xfb\x83\x84\x45\x8e\x41\x28\xe1\x60\xbe\xc8\xb7\xff\x00\xe3\x4e\xd9\x64\xd4\x2a\xec\x95\xd1\x6c\x8c\xc7\x48\x38\x28\xc5\xe7\x5f\x24\xa5\xa1\x0c\xbd\xa9\xf2\x6c\xc7\x29\xaa\xc8\x32\x5f\xbb\x32\x34\xe3\xb4\xb2\x4c\x91\x33\x2e\xbf\x09\x32\x4e\xaf\xca\x8f\xd3\x8d\x46\x94\x4b\xa4\xe3\x2b\xd3\x3e\x58\xa3\x90\xec\x57\x24\x45\x87\x55\x8c\xf7\x48\x49\xa7\x59\x3b\x9d\x55\x63\xd3\x4e\x61\x97\x50\x46\x51\x4e\xce\x86\x49\xed\xb9\x42\x96\xa0\xdd\x26\xb0\x70\x3b\x04\xab\x2c\x92\xff\x3b\x43\x64\x87\xe8\x0b\xb1\xdc\x60\xe3\xa6\x3b\xb1\x37\x8a\x2f\x2a\xb2\x03\x5b\x06\x5d\x9b\xc3\x8a\x3e\xb4\x5d\xdc\xb6\x70\xf3\x03\x4e\xba\x33\xbc\xb2\x4d\xa6\x50\x82\x96\xba\xeb\x43\xa0\x12\x9b\x90\x77\x7a\xab\xc5\x6a\xc0\xab\xb0\x6a\x52\xe5\x8d\xab\x0f\xfd\x21\x1f\x34\x59\x7b\x18\x00\xae\x56\x5f\x20\xe0\x2d\x9d\x48\xec\x10\xce\x5a\x4e\x83\x7b\xda\x9d\x4e\x51\xf3\x25\xcb\xf0\xdf\xc1\x8b\x0a\xa8\xaa\xb9\xab\x27\xce\x49\x45\xd9\x96\x4d\x67\x37\x56\x3d\x9f\xff\x9e\x4c\x56\x6b\x46\x4d\xa8\x1c\x4a\x52\x38\x1a\xe1\x05\x80\x60\x66\x66\x35\xba\x50\x14\x1c\x5b\xd9\xa1\xed\x8b\x02\xd5\x70\x43\x7a\xa9\x5f\x0d\xaf\x8b\x66\xd4\xb5\xc0\x42\xd2\x32\xb1\xb4\x79\xe3\x99\xca\x46\x41\x02\x3e\xd7\xc2\xe5\xb7\x3e\xfa\x1a\x1e\xc0\xe2\x56\x33\x17\x0a\x8b\xf2\xd3\x39\xd6\x5f\xac\x49\xb4\x70\xf7\xde\xbd\xef\x54\xb6\x3b\x9f\x58\x78\x10\x05\xa8\xfd\x54\x9a\xd6\x47\x6a\xfa\xfa\x07\x88\xc0\xc3\xe3\x62\xaa\xe6\x1a\x53\x3f\x3e\x4f\xa5\x63\x74\x2f\xcc\x0c\x9f\x6b\xb8\xc7\xf0\xf2\xc6\x38\xa7\xb0\x89\x67\xa0\xef\xe5\x94\x11\xec\xaf\x3f\xc8\x3f\xcd\xae\x2d\x5b\x17\x56\xb0\x98\xce\x35\x1d\xbe\xe1\x39\x29\x69\xe0\xf1\xb0\x2f\x36\xf5\x80\x86\x66\xcb\x57\x2b\xdb\xf2\xf7\x65\x69\xf9\x06\x9e\x25\x3c\x36\x6c\x57\xdf\xa5\x90\xd2\x45\xaa\xa9\x58\xdf\x0d\xc3\x2d\x8c\xc3\x52\xba\xfa\x4f\xd5\xef\xa0\xb5\xe7\x45\xfa\x0f\x4b\xd4\x5d\xa7\x1d\xd2\x86\x66\x17\xa9\x63\xb9\x4a\x0c\xbc\xfe\x87\x28\x04\xcd\xe0\x32\x5e\x84\xd9\x20\x39\xb8\x93\x83\x99\x25\xc4\x09\xe7\xd2\x6c\xa3\xd5\x1e\x65\xd9\xea\xff\x8c\x35\xbe\xe6\xf2\x16\xf6\x48\x05\x8e\x3c\xde\x03\xa1\x24\xeb\xc3\x49\x76\x5b\xd5\xad\x61\xa6\xee\xa3\xbc\xed\x2a\xe5\x96\x04\x5c\xdf\xef\x9a\x36\x5e\x3f\xc0\x04\x67\xe6\x1e\x65\x5b\x69\x1f\xae\xe3\xa1\x76\xf2\x21\xfe\x43\x29\x96\x89\x78\xec\x67\x18\xe4\x39\x28\x38\xf8\xb3\x6d\xcb\x8b\x45\x07\xcd\x15\x3b\x45\x35\xaa\x6a\xcb\x72\x1a\xbf\xc5\x70\x29\xe3\xe1\x2b\xd5\xe2\x0c\x95\x32\xed\xab\x5c\x88\x2a\xd2\x0e\xbe\xae\xac\xf1\xd6\xc0\xd0\x11\x8d\x59\xad\x3a\xfa\xf0\xc3\xab\x29\x4c\x70\xe8\x18\xeb\x48\xe5\x20\x74\x1a\x9b\xef\x83\x7b\xea\x26\x02\xed\xd4\xcf\x8b\xeb\xd2\x7b\xec\x00\x1c\xa5\xb1\xdb\xcf\x43\x61\x7c\xcb\xb8\xae\xae\x52\xf5\xd9\x59\xa2\xfa\xc3\xc2\xba\x9f\xb7\x65\x5e\x36\x81\x67\x87\x4d\x98\x95\x84\xf2\xff\x00\xdd\x48\x4d\xf2\x7d\xac\xd1\x29\x28\x59\x34\x00\x98\x1f\xb6\x1e\x11\x47\x0d\xf8\xb4\x7f\xc0\x4b\x27\xec\x8b\xcb\x12\x67\x15\x62\xd9\x41\xd6\xd5\x61\xef\xcb\xf6\xdb\xdf\x5a\x1b\xfe\x5b\x20\xd5\xc2\x31\xd8\x2e\x7f\x28\x0a\x67\x7d\x51\x65\x56\x34\x62\x7a\xd8\xa2\x54\xc3\x60\xca\x5e\x41\xc6\xf8\x4a\xd9\x83\xda\xea\x6a\x56\x4a\x5d\x5d\x2f\x5b\x6e\x18\xee\xff\x7b\x53\x89\xbb\x4b\x47\x5e\x76\x76\xae\x2d\x80\x9d\x2a\xd2\xe2\x26\x06\x2d\x53\x92\xa2\xca\x3d\xf2\x97\x6d\x48\x3c\xb7\xec\x0b\x52\xd4\xdd\x40\x58\xe5\xb0\x1d\x72\xc3\xde\x63\x25\x24\x19\xc4\xec\x45\xd5\xfa\x76\x85\x81\x8b\x60\x17\xab\x12\x50\xd7\xa1\x42\xb1\xb1\xb0\x56\x40\x75\x2c\x2f\x6d\xb0\x5e\x69\x1e\x7c\xb3\x65\x41\x49\x53\x18\xa2\x90\x31\x99\xaa\x07\x61\x2b\x6c\x19\x6e\x31\xe4\xbb\x82\x94\x6b\x95\xdf\x9e\xec\x45\x28\x80\x0b\x2f\xfd\x95\xb5\x65\x52\xae\x35\x8b\xb9\xf9\x67\xaa\x2e\x73\x5f\x55\xfa\x02\x43\x3a\xf5\x1a\x60\xfe\x05\x1a\xdb\x9b\xc9\x7a\xfb\x46\x60\x75\xd0\xf6\x9d\x75\x8d\xa7\x73\x9d\x8f\x10\x9d\x40\xf9\xeb\xfa\x95\xb1\xcc\x16\x07\xac\xbd\xce\xe4\x15\xbd\xbe\x87\x06\xc1\xf5\x85\x52\x4f\xb5\xaf\x76\x8f\xa8\x78\xa7\xf8\xec\x04\x4e\x73\x55\x26\x56\x6a\x5f\x39\x3f\x5d\xfc\x36\xbd\x1b\x9f\xcb\x67\x96\xa7\xfb\x12\x7d\x9c\x32\xe8\xb1\xc2\xea\x4e\x32\xbe\x96\xf1\x99\xa7\x84\xef\x70\xa9\xce\x0a\x18\x09\x53\x3c\x2b\xb6\x80\x76\x39\xd2\x46\xcc\xb3\xc5\x56\xda\xff\x4c\xc4\x39\xab\x94\x1d\x27\xfb\x3a\x8f\xbd\xca\x5c\xb8\xe6\xec\xa6\xd3\x96\x12\x26\x2a\xde\x09\xf2\x28\x8f\xbb\x6a\x7e\x34\xc3\x5f\x4e\x95\x08\xf6\xd9\x62\x3d\xcf\xb0\x8f\x77\x91\x04\xef\x93\x6a\x77\x3f\x97\x22\xc8\x90\xda\xfc\xcf\xff\xee\x74\xfe\x9e\xc1\x25\x9e\x62\x37\x55\x06\x69\xb7\x88\xf1\x10\xac\x69\x6e\xc4\x45\x4b\x9b\xca\xea\x31\x3a\xfb\xf9\x5e\xbe\x25\x9e\x45\x53\xef\xd9\x53\xf0\xa7\xee\x4a\xdb\xe2\xc8\x0f\xf0\xa5\x26\x8e\xf6\xbf\x49\x91\x83\xdb\xcb\xd1\x1f\x3c\x6b\xa0\x72\xc4\x2d\x92\x3d\x5c\x90\xbe\x32\xec\x09\x92\xeb\x51\xf4\x75\x4d\x72\x45\xeb\x77\xbe\x29\x18\xba\xf9\xcf\x66\xb5\x94\xba\x17\x60\x39\x54\x51\x23\x0a\xa6\x74\x39\x07\xae\xb2\x85\x37\x7d\xb6\xec\x35\xc2\xeb\x7a\x11\x15\x76\xac\x62\x63\x54\xab\xc3\xf4\x69\x7b\xf5\xea\x1f\x4c\x77\xe4\x00\x09\x2c\x2a\x3e\x4f\xd0\xaa\x3d\x2e\x9b\x05\x49\xb3\x91\x24\xe1\x76\x53\x19\x46\x79\x09\xd1\xb9\x9d\x00\x55\xa4\xda\x62\xa5\x9f\xec\x94\xad\x13\xcb\xc6\x28\xa0\x0d\xdd\x1c\xf3\x11\xf5\xe9\x32\x24\xdd\x37\xdd\x0d\x6c\xb1\xa2\xe1\xed\x50\xb6\xd2\x8f\x7e\x14\x89\x76\x93\xfb\xb5\x43\x4d\x01\x8c\x56\x0b\x39\x1e\xcc\x22\x6b\x0f\x99\xa5\x2d\xef\xce\x11\x30\x02\x8e\x4c\x10\x68\xf3\xa9\x6a\xb7\x5c\x90\x0f\xf5\x26\x08\x8e\xa9\xf6\x2c\x1b\x00\x27\x98\xd9\x41\x50\xb5\xea\x69\xf6\xc3\xbd\x6c\x8b\xcd\xc4\xc5\xde\x77\x15\xe7\x02\x60\x95\x8a\x5b\x1a\xcb\xeb\xe9\x7d\xfd\x8c\xf7\xe0\x4e\xdc\x23\x07\x1b\x01\x81\xf7\xd3\x05\x71\xb5\xde\xbd\xa7\xb2\xff\xe0\x3c\x56\x2a\xd8\x0b\xaf\xc2\xc1\xad\xba\x6b\xeb\xcf\xda\xac\x22\x49\xf0\xe4\xbc\x4d\xad\x64\x04\x0a\xf6\x1b\x3c\x1e\x84\x54\xc2\x03\x5d\x77\xef\xab\x6e\x20\xcb\x73\x9d\x2c\x08\x54\x70\x27\xf2\x84\xac\xaf\xb4\xdc\xab\x7c\xf5\x5f\xb6\x6a\x20\x70\xbf\x5d\x32\x13\x0d\x0d\x92\x2d\xca\xe6\x00\x42\xcb\xa4\x14\x05\x26\x7e\x04\xd7\xbc\x12\x4a\xbe\x97\xc0\x2d\xcb\x10\x53\x9a\x8b\xc9\x2c\xe1\x12\x34\x95\x6f\x9b\xa6\x64\xda\xb9\x08\xa3\x1c\x5f\xe7\x64\xc3\xc1\x9d\xfb\x82\x1a\x5c\xb2\x47\x01\x0c\x51\x95\xf3\x0f\x41\xbd\x85\xfa\xb5\x9a\xba\x6c\x87\x3c\x29\x19\x50\x38\xe6\x8a\xb6\x5c\xc7\x2a\x62\xb1\x02\x04\x03\x57\x5b\x2a\xab\x28\x97\xc8\x42\x59\x46\x15\xb9\xf7\xfe\x7a\x8d\xb2\xc0\xff\x69\x00\x87\x5c\x43\xad\x76\xe1\x10\x9c\xfa\xe9\x30\x5f\xb1\xe4\xfa\x53\xa8\xe1\xb9\x4d\xaa\x6f\xa7\x55\xda\xb3\xd6\xe5\xdd\xf6\x69\x64\x10\x5d\x5e\x15\x24\x69\xfe\x0d\x32\xa0\xff\xb1\xbb\x02\xfd\xc8\x5a\x2b\x62\xf3\x46\x4a\x71\xb5\x5c\xe9\x20\x6d\xc0\xa9\xe7\xea\x5c\xee\x0c\x7d\x19\x07\x51\xe5\xe5\xe5\x65\x91\xaa\x94\x97\x31\xc4\x67\xaa\x78\xeb\xe1\xfe\x8d\xbc\xc7\x2b\x0e\x4b\x46\x70\x8c\xe6\x5c\x31\x58\x18\xc6\x7c\x15\xca\xa5\xc6\x0b\x25\x78\x3b\x55\xaf\xc4\x71\xf3\xe7\x78\x9c\x11\x6b\xdd\x3e\xd4\x87\x2c\x17\xca\x2b\x4d\x9b\x90\xd5\x5e\x31\x13\x69\xd5\x61\xa9\xb3\xb0\xa0\xfe\x8b\x62\x17\xc5\x68\x8e\x22\x16\x65\xea\xe0\xaf\x0f\x29\x43\x0f\xbb\x60\x32\xac\xc5\x58\xeb\x96\x7d\x97\x56\x2a\x0e\x81\xad\xaa\xfe\xe3\x85\xad\x52\x32\xaa\x55\x15\x65\x4d\xd9\x78\xc9\xab\x66\xfa\x49\x8b\x00\x98\xe6\x6c\x82\x01\x24\x2b\x0d\x95\xb2\x58\x9e\x45\x20\x1e\xe7\xb0\x13\x21\x99\xb8\xaa\x36\xcb\xa2\x91\x35\x30\x7d\x38\x08\xc5\x9d\x63\x1b\x49\x73\x35\x8a\xdb\x2a\x86\xb9\xe9\xc7\x1f\x3f\x4c\xc4\xc0\x26\x75\xa4\x1b\xf9\xce\xa4\x2b\xbc\x14\xae\xac\x21\x4b\xbd\xba\x96\x35\x93\x55\xde\x11\x7e\xde\xbb\x8e\x84\x21\x94\xdd\xc2\xac\x3b\x1e\x36\x5c\x48\x7b\x2b\x52\xfd\x4d\x60\xae\xcc\x20\x05\xc6\x96\x1d\x09\xb8\x22\xa2\x19\xc3\x88\x19\xb7\x8a\x63\x81\x64\xd5\xc7\xdc\x46\xc0\x0c\x5c\x05\xab\x53\x28\x19\x6e\xd9\xe8\x2e\xf9\x7e\xa4\xa9\x7c\x1c\xfe\x93\xa3\xbe\xed\x34\xba\x20\x45\xfe\x55\x2d\x42\x63\x1a\x2e\xd7\x81\x5c\x1e\x9b\xdd\xe9\xa8\xa5\xf2\x71\x95\x96\x52\x22\xd3\x26\xf1\x3f\x8c\x90\xbd\xc8\xef\xe2\x93\x5d\x7f\x7c\x42\xfa\x86\x0c\x75\xd5\xab\x9f\xa5\x7b\x70\xe3\x40\xff\xf6\xd8\x45\x61\xa2\xdc\xcb\x4b\xe3\xd7\x79\x8f\xb1\x1d\x78\x3f\x04\xfb\x22\xf7\xc6\x8e\xb5\x6f\x4a\xc8\xb8\xe7\x29\xe5\xd3\xf1\xe7\x82\x76\xb2\xc4\x97\xee\xee\xc6\xa2\x5f\x60\x5b\x39\x96\xb1\xa9\x5b\x6c\x86\x3c\xbb\x9e\x1d\x9c\x3c\x63\xd7\x9b\x5b\x05\x72\x75\x50\xca\x5a\x50\xd7\xf6\x60\xad\x9c\x4b\xdd\x01\x62\x23\x56\xce\xa6\xfd\x76\x99\x49\x46\xed\xaa\x99\x29\x68\xc4\x30\x6a\xc2\x08\x95\x96\x57\xc3\xe3\xc3\x4d\xd9\x71\x79\xc9\x94\x2a\x72\x25\xb6\xdc\xeb\x60\xb1\xf5\xf7\x81\x98\x3c\xac\x7e\xba\xdf\x49\xd0\x44\x6e\x86\x6a\xac\x93\x6c\xb6\xd3\xd7\xec\x09\x9e\x75\xb8\xcd\xa3\xe0\xd7\xae\x11\x88\x3b\x86\xb1\x4d\xa9\x67\x75\x63\xa3\x2d\xb2\xa2\x20\x11\x27\x1a\x8b\x33\x5b\x34\x5f\x39\x54\xea\x4c\x04\x89\xd8\x24\xf5\xa3\xef\xe3\xf4\x3f\xc0\x92\xe3\x7d\xe4\x06\xb3\x6e\xfa\x48\x37\xe4\x92\xf7\x03\xbf\x7d\xe3\x8d\x6f\xb3\x51\xe9\xa7\x19\x3d\xc8\xfd\x0d\xa1\x1c\x47\x27\xc8\x28\x35\x85\x58\x51\x3b\x39\x64\x94\x32\xe1\x0f\x34\xb7\xb2\x89\x97\xfe\x3f\x77\xf8\xa0\x45\xf5\x7f\x63\xc5\x5d\x56\x66\x95\xd9\x65\xaa\x08\x84\x17\x06\x01\xc6\x4b\x0f\xe4\x08\x06\xe7\x18\xdd\x01\x39\x4d\x1e\xfd\x3d\x3c\x63\xca\x46\x71\x7f\x86\x54\x55\x85\x23\x84\xb4\x49\xfe\x96\xe8\xde\x84\x70\x96\x2e\x3d\x1e\x67\x27\x98\x97\x2b\x67\x44\x9b\xf5\x88\xe2\x5a\xb5\x3e\xb6\xfe\x82\xb5\x4b\x45\x7d\x11\x86\xdd\xcb\xd9\xe7\x51\x69\x1a\xe0\x1e\x4d\xab\xf0\xdc\x49\x7e\xc1\xbb\x9c\x64\x11\x5c\x1f\x9f\x67\x0c\xf4\x09\x9a\x55\x31\x7f\x47\xee\x85\x19\x71\xb9\xf7\x7d\x3c\x0e\x70\x81\x40\xe0\xd3\x31\x30\x0e\x34\x71\x55\x2d\xdc\x18\xb3\xb6\xe0\x65\x34\x35\x55\x64\xcf\x07\x0d\x63\x77\x46\x86\x8b\xbc\x8c\x1e\x4e\x18\x73\x52\xc2\x1a\x6a\x56\xf5\x60\x09\x23\x2f\xab\x32\x9d\xca\x7e\x40\x81\x43\xb8\x6c\x6e\xc2\x92\x3b\x15\x19\x28\xb9\xcb\x59\x9c\x5d\x74\xdf\xfb\x27\xe1\x6d\xbe\xe1\x90\xc8\x6d\x88\xcc\xbe\x85\x5d\x0f\x47\x39\x7c\x4a\xca\xc8\xa4\x39\xca\x58\xed\xd3\x1f\xd1\x1b\xe5\x07\xeb\xa2\x06\x4f\x2e\x9c\x0f\x47\x50\xa0\x14\x2f\xa5\x75\xa5\x3d\x53\xc9\xfc\xe5\xf7\x53\x33\x96\x15\xce\xaf\x90\x45\xda\xf6\xf3\x76\x3d\x8f\x2f\x6e\xf3\xd3\xb1\xf9\xa0\x33\xeb\xef\xf7\xf2\xb4\x68\xf4\xe3\xab\x28\x57\x65\x3d\x42\x27\x5a\xa5\x36\x64\x0f\xca\xcb\x8c\x28\xcd\xc5\x9f\x19\xf3\xfb\xe2\x2a\x9c\xe9\x00\x29\x44\x18\x5b\xaa\x99\x99\x9e\xcd\x24\x65\x08\x25\x2b\xba\x69\x66\x5c\xac\xa7\x95\xa7\xb0\x0d\x5d\xda\xa8\x9d\xb3\xce\x68\x85\xf0\xc7\x03\xea\xf5\x85\x70\xcd\xb1\x7f\xbb\xe6\x5e\xdd\x5b\x5d\xd1\xfa\xc0\x8f\xb3\xf3\x14\x5e\x29\x03\xd5\x1f\x10\x03\xa0\x57\x99\xe8\x1f\xc0\x20\x80\xb2\x33\xe4\xbf\xd1\x1f\xe7\x14\x1e\xff\x07\xa9\xf8\x86\xa4\x37\xaa\xa4\x83\x3a\x4d\x63\xf7\x9a\x26\xe0\xe8\x01\x61\xdd\x7a\x78\xa4\x39\xc9\xd4\x66\x85\x07\xb1\x8d\xff\x71\x41\x5b\x54\xbb\xd9\xbf\x92\x11\x01\xf6\x59\x5d\x6d\x38\xbb\xc6\x36\x1b\xf3\xeb\x5f\x9f\x6c\xa0\x8f\xa3\x32\x15\xb2\xce\x0d\xc1\x58\x46\xb6\x86\x56\xf6\xf2\x55\x71\xc7\x98\x61\xdf\xb8\x54\x21\x6e\xd9\x2c\xc9\xb4\x74\x8d\xce\x30\x62\x64\x28\xe9\x05\xc0\xbd\xbb\x11\x60\x73\x79\xf5\x0a\xc8\x41\xc1\xd5\xfe\xe1\x11\x24\x04\x5b\xed\x18\x74\x2e\x6e\x3d\x94\x70\xb7\xfc\x68\x1e\x70\xc6\x35\x1f\x74\x21\x89\x4b\xc0\xb9\x6c\xe1\x16\x6d\xb3\xe6\x37\x57\xe2\xf9\x74\x70\xe7\x69\x44\xbc\xfa\xa5\x03\x2b\x0f\x98\x9c\xbd\x5a\x33\xa2\x29\xb7\x95\xf8\x37\x37\xc1\x8e\x4e\x35\x7c\x00\xe3\x7e\x31\x6d\xba\xac\xb5\xbd\x74\xe3\x73\x99\x65\xd4\x71\x68\xcb\xd9\x23\xd6\xe1\x1c\xf6\x4c\x88\x03\x0d\xca\xc1\x0c\x23\xc1\xa4\xd7\x4f\x7f\x56\xe6\x9e\xbb\x49\xd3\xd0\x44\x37\xd5\x22\x69\x7f\x98\x69\xbd\x9c\xa8\xc9\x20\xb1\xd1\xf1\x23\x0f\x49\x06\xa8\x9f\x5d\x51\x31\xec\x22\x70\xcf\xb6\x84\xb5\xeb\x34\x3e\x83\x03\x8d\xb9\xef\xdd\xdb\xd7\xd6\xc4\x6a\xfe\xa1\x7f\xb9\x51\x3e\xf5\x87\xbb\x88\x53\x6f\x1f\xd6\x52\x3c\xb9\x89\xf0\x59\x7e\x35\xe8\x92\x14\xbf\x98\x1b\xf8\xcf\x07\x19\xb7\x4c\xc4\x7c\xab\x4f\xe6\x24\x70\x20\x32\xf5\xd8\x2d\x97\xeb\xc3\xb3\x17\x49\xe9\x09\x83\x05\xec\x11\x7f\x54\xd3\x00\x77\x17\xbb\xba\x6f\xcc\xa9\xde\x53\x63\xbe\xe6\x18\x1d\x86\x2d\xfd\x0f\xb3\xc3\x98\xf9\x61\xc7\xb2\x53\x55\x23\x61\x29\xf5\xd8\x52\xb5\xeb\x68\x62\xec\xb5\x89\x01\x57\x63\x05\x6e\x21\xc4\xb1\xad\xac\xec\xb2\x38\x74\xe9\x2b\x3a\x61\x53\x22\x92\xbb\x15\x7b\xe8\x2b\x31\x9c\xbf\x3b\x1f\x58\xe2\xf4\x12\xb5\x9d\xf1\xf3\x85\xe1\x53\x42\x1c\x50\x3f\x1d\x79\xe2\x5f\x57\x80\xae\xb5\xc5\xe1\x06\x55\xdd\xa1\xa6\xaf\x7f\x63\x5e\xcf\x48\x36\x4d\x9a\xea\xc7\x59\xc1\x44\x1c\x4f\x64\x46\xf8\x1f\xff\xc7\xd9\x70\xfc\x37\xd2\xbd\xa9\xd4\x71\x6a\x86\x9c\xfa\x87\x1e\x20\xef\x4a\x14\x43\xd5\x8c\x62\x8b\xd0\x08\x71\x7b\x64\xf2\x72\x4f\x37\xf1\x63\xcb\xee\x7f\xc0\x38\x56\x29\x2c\x0f\xa5\x82\x3e\xd8\xcb\x60\x59\x8f\xe1\x68\xf9\xf6\xf0\x99\x97\x52\xcc\x9f\x56\x14\xbf\xa7\xa0\x82\xc3\x1f\xd6\x12\x5f\x8e\x39\xd8\xa4\xf5\x55\x9f\x6f\x18\x37\x79\x4c\x0c\x68\x4a\x08\xf5\x47\x08\xf6\x43\x7b\xf2\x21\x12\xdc\xcb\xc0\x66\xd4\x64\x35\xf1\x76\x52\x8e\xdb\x1d\x9b\x52\x26\x28\x6c\x16\x6e\xdd\xc1\x47\x37\xde\x4f\xce\xb8\xe1\xfd\x9a\x0c\x0f\x8b\xa9\xe8\x80\x94\x13\x95\x75\x28\x52\xc6\xe4\x23\x7e\xef\xcb\x95\xeb\x5e\xdd\x5d\xc1\x8d\xee\x8d\x1c\x91\xd5\x80\x74\x28\xab\x86\x9f\xf5\xf6\x4b\x2b\x2c\x21\x55\x39\xd0\x59\x02\x87\xd9\x9b\xfa\x1f\xff\x4a\x65\xc5\xbf\x71\x7f\xcc\x36\xc5\xbe\x6b\x55\xd5\xc1\xd3\x0a\x88\xb2\x77\xab\x4b\x69\xef\x1b\x71\x7a\x22\xdd\x10\x12\xea\xc1\xec\x53\x74\x3e\xd0\xe3\xa4\x59\xed\x3c\x8f\xbc\x41\x2f\x53\x0e\xb7\x73\x93\xcd\xaa\xa2\xc9\x97\x5c\x81\x20\x27\xe3\xb2\xd5\x29\x32\x9d\xcd\x04\x20\xd7\xb8\x75\x52\x61\x57\x1b\x91\x5d\x6e\x4f\xca\xe5\x02\x12\xcf\xbe\xd0\x28\x43\x36\xb6\xbf\x87\x3c\x01\x22\xe4\xf1\x19\xe0\x3f\x6d\x26\x0f\x2b\x90\xa5\x90\xbe\xb9\x25\x02\xab\xdc\xe8\x88\xac\x53\xb6\x77\x95\x86\x36\xac\x7d\x8d\x29\x98\x01\x6d\xa4\x53\x4a\xa0\xec\xe1\x7a\xc5\xc2\xf2\x93\x77\x97\xbb\x60\x19\x4e\x02\x19\xe8\x46\xe5\x36\xca\x5d\x5e\x59\xfd\xbd\x0d\x0f\x2a\x37\x46\xc4\x1c\x17\x28\xd3\x6e\x17\x69\xde\xe9\xb1\x1a\x22\x71\x2f\xee\xde\x9c\x1f\x63\xaf\xa8\x4b\xfe\x46\x57\xa8\xa0\x52\xfb\x0e\x12\x81\xb5\x4c\xe0\x88\x18\x77\x45\xec\xdd\x77\xc8\x13\x7a\x16\xc9\x30\x7d\x13\x7e\xb7\x53\xd6\xcd\x68\xe8\xb1\x9d\xc2\x2d\x90\x90\xbc\xae\xbd\x72\x0a\xcc\xc6\xe3\xc9\xfe\x76\x67\x42\xbb\x96\x72\x6b\x56\x35\x8c\xd3\xd5\x1e\xe9\x81\x86\x98\xdc\x81\xee\x45\x30\x11\x5b\x85\x8d\xea\x6e\x03\xef\xaf\xc0\xd9\x6b\x32\x5b\x59\x6c\xd0\x60\x51\x0b\x03\x58\x86\x43\x25\xca\x9b\x66\x9f\xa1\x6c\xc4\x84\xa8\xa3\x9d\xe8\xd6\xc0\xa5\x42\xde\xce\x49\x19\x12\xdb\x68\xdc\x7d\xdc\x65\xa5\x0b\x78\x53\xf7\x80\x5f\xea\x10\xda\x1a\xdf\x5a\x46\x9d\xb6\x9d\x7c\xb7\x4d\xd0\x17\x1a\xf7\xb4\x9b\x9e\x20\x87\xe8\xe3\xd3\x29\x7a\x7a\x1f\xf5\x7f\xd3\x3f\xca\xab\x8a\x64\xfe\xf0\xf6\x12\xde\x6b\xd1\xb9\x00\xf5\x51\xe7\x37\x74\xce\x63\xc4\xc1\xa1\xa0\x35\xff\x7c\x12\xb7\x9d\x72\xcf\x58\x61\xa3\xff\x71\x13\x27\xdd\x01\x11\xa5\x88\x8a\x78\x8a\x43\x56\x52\xf0\xc3\x0f\xd6\x39\x4e\xf5\xda\x4c\x2a\x13\xcf\xaa\x26\xc4\x06\xeb\x75\xb6\x15\x96\x0f\xf6\x37\x1c\xe5\x1a\x1e\xab\xc4\x8a\x28\x07\xe1\x5d\xc4\xd3\xce\x5b\xa6\xb2\x7d\x5b\xfe\x6c\xb8\x7b\xc7\xf9\x8a\x8f\xc8\x83\x3b\x98\xe5\xce\xa4\xb8\xc3\x44\x2f\x78\xea\xdd\x26\x3b\x85\xff\xf8\x97\xa7\xb2\xdb\x82\x82\x96\x97\x85\x1b\x32\xc0\xe4\x4d\x0c\xfd\x94\x5f\x11\xff\x3c\x5a\x18\xa4\x9b\x63\xc5\x30\xf6\x4e\x6f\xc8\xf7\x63\x74\x5f\x99\x01\xb8\x96\x19\xbc\x41\x97\xe3\x59\xcf\x38\x8b\x8e\xa7\xf4\x1b\x65\xa3\x50\x69\x40\xef\x01\xc2\x8b\x73\xf6\xca\xb3\x97\xd1\x17\x04\x91\xb3\x2a\x41\xb0\x64\x6f\x8c\xc7\x65\xd3\x02\x0a\xaa\xaf\x70\x8b\x57\x6a\xed\x76\x70\x35\xd0\x4e\x2d\x74\x88\xb0\x53\x13\x82\x9a\xcc\xe8\x48\xc8\x5a\xdf\x63\xd8\x5e\x99\x4e\xfe\x14\xc2\x78\x2e\x3f\x92\x54\x95\x7a\xa2\xce\x2b\x84\xeb\xcb\x92\x11\x47\xa4\x2f\x54\x97\x73\x10\x1d\x35\x65\xf0\x68\x98\x84\x38\x1f\x9f\x40\x24\x8e\xf7\x88\x9d\x14\x1b\x1b\xec\xdf\x1d\x04\x51\x6f\x96\x53\xf7\x12\x56\xa7\xd9\xcd\x52\x0c\xfb\x08\xd6\x68\x05\x3d\x1e\xd2\xf3\x1d\x59\xb2\x5e\x7a\xec\xf2\xb1\x65\x97\x70\x3e\x0f\x0a\xa0\x1f\xef\x20\x10\x70\x33\xdf\xc8\x1d\xf3\x78\x28\x5e\xe3\x11\xb6\x4e\x65\x34\x47\x90\x1e\x26\x2d\x07\xf5\x21\x60\x07\x75\x0a\xc7\xc1\x6a\x2e\xa3\xe4\x87\x3e\xfb\x28\x0d\xf6\x0f\x55\x2c\xf2\xc9\xfd\xcc\x20\x8e\x8a\x8c\x6e\xfb\x5a\x58\x66\x55\x26\x80\xed\xb3\xf8\x3c\x5b\xe1\x9e\xe1\x7b\x75\x29\x1c\xe8\xa7\xe5\x80\xbe\x74\x9c\x66\x4a\xa2\xdd\x30\x79\xfb\x8a\xf1\xf0\x08\x5e\x80\x12\x01\xd0\x85\xe0\xfa\x05\x63\x46\x3e\x19\xfd\x87\xd9\x6f\x63\xcd\x4f\x61\x4e\xe0\x08\xd4\x93\x83\x17\x65\x8d\xcb\x2a\x47\x4a\xc9\x5b\x83\x42\x40\xc0\x5f\xad\x8a\x35\xc5\xbe\x80\x84\x02\xfc\x18\xa4\xd4\xcf\x95\x51\xb2\x4f\x0e\x01\x9c\x92\x0f\xeb\x5f\x9e\x61\xd3\x52\x96\x78\xa7\x7e\xf4\xb2\x07\x7b\x26\x9b\xf2\x62\xab\x57\xce\x45\xb6\x29\x1e\x5b\x9d\x96\x20\xa5\x36\xb9\x1d\x46\x5b\xa7\x9d\xb5\x44\xfb\x80\xf5\xe6\x54\x1f\x58\x4a\xd1\x60\x07\xc8\x58\x5c\x1f\xaa\x9c\x99\x77\xed\x92\xb9\xd7\x90\xec\x3d\xbc\xb8\x5a\xf7\x2e\x36\xba\x14\x55\xff\x2d\x39\x9b\x3b\xfb\xf3\x7e\x99\x93\xc1\x6d\x28\x14\xda\x8f\xde\x52\x70\xf5\x2e\x22\x34\x50\x96\x5b\xee\x5e\x38\x62\x25\x89\x54\x4f\xe9\x9c\xfb\x17\xda\xba\x6c\xb2\x1f\x2b\x53\x58\x57\x6a\xc4\xf6\xb1\xf4\x96\xc1\x6f\xf6\xf0\xdd\x1e\x9f\xb0\xe2\x7a\xf4\x91\xcf\x47\x2b\x2e\x35\x0e\xa8\x47\x31\x66\xff\x32\xe0\x6f\xe6\x9c\xc6\xc6\xca\x30\x9a\xc6\x2f\x62\x81\x48\x07\x1c\xcf\xc2\x6c\x18\xc0\x0c\x17\xcc\xe0\x0a\x84\x11\x68\x29\x66\x4a\xcd\x5a\x05\xd4\x34\x58\xb9\x38\x04\x7e\xc8\x28\x01\x29\x48\xf8\xd1\x02\x19\x2c\x75\x06\x05\xdf\x6f\x8d\x3d\x10\x07\xa8\x44\xb3\xb7\x8c\xaa\x5d\x6d\x19\xf7\x7d\xf5\x46\x46\x42\x8a\x86\x72\xd4\xf0\x98\xae\x61\xee\x89\xac\x96\xf6\xfe\xef\x9c\x53\x68\xb5\x43\x1b\x0d\xba\x41\xb5\x6c\xd0\x91\xd9\xf8\xec\xdf\xc4\xa3\xfa\x9f\x13\xf6\x2c\xee\x6e\xb4\xd5\xf6\x6a\xcd\xef\xb7\x2c\xbe\xf2\x45\xa1\x58\x61\xca\xaa\x57\xe1\x48\x4d\x0e\xb5\x25\x5e\xce\x72\x45\x8f\x4b\x1d\x33\x8c\x3c\xf1\xac\xa0\xb6\x3a\x83\x50\x8c\x7c\x4d\xe1\xeb\x63\xea\x19\xcd\xb5\xf6\xdb\x3d\x68\x20\xf0\xac\xc1\x87\xe6\xf0\x89\x72\x9b\xcb\x76\xfe\x15\x1d\x04\x14\x7b\xb6\x0c\xce\x43\x6e\x50\x65\x50\x77\x1c\x59\xdc\x23\xb5\x25\xd6\xa4\xfa\x31\xe7\x2c\x1d\x55\x06\x9c\xcc\xb2\x26\x5b\xb4\x92\xff\x1d\x59\x78\xe9\x72\x6e\x26\x30\x2d\xe8\x23\xcd\xf2\x39\x44\x00\x2f\xd9\x7e\x18\x57\xee\xeb\x52\x36\xd0\x6a\x5d\xce\x5c\x6a\xdc\x19\x20\x23\x54\x96\x5b\x75\xb7\x33\xe7\x41\x4a\xec\xb2\xa1\xdc\x47\xf5\x07\x76\x18\xea\x24\x99\xb1\xcf\xf4\x5a\xdd\x9c\x7d\x3d\xb2\xcc\x9e\xc5\x78\xe6\xae\x82\x51\xd0\x35\xb5\xa5\xb3\xb4\xba\x47\x08\x90\xd8\x0c\xe0\x5f\x79\x87\x8f\x26\xc3\xc3\x25\xa6\x1d\x99\x93\xba\x0b\xdb\x80\x94\x83\x68\x25\x40\x8d\x7d\xf1\x28\x50\x1f\x02\x3f\x3e\x74\xd0\xf5\x53\x93\x4e\xb1\x19\x96\xb2\x5d\xf7\xde\x61\x06\x9b\x92\xb7\x53\x88\x48\x4d\x90\xd0\x87\x82\xb0\xda\xeb\x00\xc6\xc8\xd2\x32\xe7\xa1\xec\xda\x62\x8b\x04\xc5\x87\x9a\x05\x7c\x0b\x9f\xc9\x32\x6f\x25\x98\xd0\xe0\xf7\x90\xa0\x7b\xa5\x1b\xb8\x16\x09\xb9\xa8\x90\xb8\x5f\xb6\x4e\xb4\x61\x7e\xe6\x9f\xd1\xdd\x06\x19\xa8\x3e\x7b\x97\x82\xcf\xb2\xb7\xd2\xfc\x23\x93\x30\x6f\x3f\x7f\xfa\x32\xeb\x94\xe5\x8b\x7c\x86\x7d\x4b\xbd\x5c\x87\xa0\x85\xb8\x49\xbf\xcd\xb9\xe4\x6a\x20\x2c\xb5\xa2\xb7\x6d\xa6\xfa\x02\x07\x64\xff\xd3\x1e\xe3\x52\x8a\x92\x9e\x7f\x53\xc4\xe2\x76\x56\x5b\x11\x26\x2e\x56\x2a\x39\x9c\xc7\xc0\x05\xd7\xab\x5f\x1c\x47\xe2\x9b\x67\x3c\x06\x30\xf1\x74\xb5\x2b\x08\xb7\xd8\x65\x0c\x1d\xcb\xdb\x77\x68\xf8\xe5\x15\x7d\x11\xa8\x6b\x70\x38\xc6\xdd\x3f\x36\x02\x10\xc1\x78\x23\x7d\x02\xd9\xe1\xe5\x45\x9f\x3a\x22\x7b\xe2\x54\x90\xa4\x5c\xa4\x55\xd1\x3f\xf2\x1c\xf0\x12\xd8\xba\x9e\xa9\x1f\xf6\x3d\x78\x84\xaa\xf0\xe1\x7e\xe6\xc3\x04\xb8\xc7\xf8\x13\xed\x69\xe8\xcf\xdd\x53\x58\x0d\x11\xc0\xbd\x4c\xbe\x08\x28\x45\xa2\xe1\xb0\x1d\x72\x26\xdb\xf7\xc8\xad\x59\xba\xf6\xe4\xa6\x08\x0a\x01\x9e\x1f\x1c\x77\x2c\x6e\xd5\x6b\xb0\xf1\xc7\x70\x7e\xec\xfe\x39\x9c\x4e\x6e\x2a\xd7\xd9\xaa\x1b\x51\x29\xce\xb0\xa1\x1d\x36\xbb\x76\x0f\x30\xed\xb0\xa5\x19\x7a\xcd\xc7\x65\xc7\x38\x6c\xf2\xb0\x82\xa0\x3d\xd9\xe1\x2c\x67\x87\xe4\x4c\xc1\x12\x1a\xa4\x2d\x18\xe4\xe3\x3c\x84\x2b\xcc\x90\x42\x56\x81\xf3\xe1\x57\x53\xc0\x90\xba\x41\x7d\xc8\x72\xbd\x74\xf6\x9b\xe1\x21\xd6\xbf\x8b\xc3\x7d\xdf\xf2\xe5\x3f\x75\xdf\x54\x86\xdd\xc1\x54\x60\xeb\x62\x83\xb6\x45\xbd\x0d\x34\x87\x75\xb5\x7d\x25\x4d\x40\xcd\x30\xe5\xec\x9c\x75\x1b\x3d\x52\x7b\x22\x4f\xb4\x71\xab\x57\xba\xac\xb4\x2d\x67\xb8\x5c\xd0\xa4\x72\xc0\x08\x53\x11\x9b\x1c\x23\x92\x0a\xf5\x4a\x50\x6c\x3b\x20\xf1\x0e\x1b\xaa\xea\xf3\x0a\xfb\xec\xa0\x92\x5c\x91\x98\xd5\x37\xf5\x5e\x36\xe3\x62\x81\x40\x06\x61\x6d\x3c\x7a\x04\xea\x95\xa0\x1d\xaa\x59\xba\x7f\x05\xeb\x02\x76\x94\x5c\x72\x7a\xd1\x6a\xfa\xe5\xef\xa3\xaa\x96\xfd\x0e\x4b\xed\xb1\x51\x7a\x48\xd1\x04\x23\xed\x8f\x5a\x26\xa4\x7a\x07\x2b\xa4\x9c\x00\x4d\xbf\x89\xfd\x51\x16\x15\x14\x5c\x65\x76\x72\xbb\xa4\xcc\x96\x6c\x95\xa4\x2d\x4c\xf4\xcb\x06\x1e\xde\xf5\xb4\x37\xab\x74\xc8\x6e\x4b\xef\xd0\x03\x76\x9b\xd7\xf2\x2e\x7f\xec\x35\xd3\xe5\x33\x50\xf7\x2e\x07\xf5\x02\x3f\x83\xff\x6a\xc9\x43\xce\x94\xe8\xc8\x91\x3b\x94\x12\xaf\x08\xc3\x51\xff\x64\x8c\xf6\x08\x1a\xbf\x6e\x9b\x48\x3c\xe6\xfb\xd1\x4e\x71\x67\x65\x38\x77\x87\xfc\xbc\xe4\x0b\xd8\x95\x6d\xd1\xef\x4d\x1d\x92\x2f\x2f\xcf\xb2\x9f\xd2\x3f\x29\x87\xb9\x5e\x04\xb9\x8b\x23\x52\x97\x1f\x10\x22\xb7\x0a\x32\x42\x37\xd1\x89\x62\x52\xd6\x00\xc8\x2e\x79\x2f\x12\x7a\x3c\x83\x77\xff\x18\x58\x5c\x7e\xa8\x31\x26\xa7\x96\x96\xa5\x6a\x19\x54\xbc\xa6\xcd\xef\x6a\x67\x6c\x01\x84\x02\x34\x35\xfe\xea\x9c\x0c\x57\x6a\xa6\x4d\xa6\xa2\x89\x42\xfb\x14\xd6\x73\x3e\xd2\x88\x32\xc4\xe8\xab\x4c\x94\xa4\x87\xec\xfe\x7e\xee\x55\x8f\x8f\x93\xb9\xde\xef\x05\x94\x3a\xfd\xbd\x3e\xa6\x3b\x47\x73\x48\xc2\x9d\x8f\x01\xf6\xe5\x72\x30\xb5\x43\xfe\x72\xa1\xa8\x68\x3c\x28\xab\x58\x35\x12\x88\xce\x11\x56\xb9\x68\xd8\x98\x91\x40\x73\xee\xea\xd5\x8b\x23\x8a\xfa\x6d\x67\x0f\x1e\xd6\xa2\xef\x17\x7e\x64\xcd\x16\x92\x30\x13\x48\xd4\x5b\x21\x14\xee\x6f\x31\x59\x81\x58\x06\xf6\xd5\x6d\x53\xcd\xf7\x93\x7f\xb7\x94\xfb\xeb\xb5\xee\x80\x60\xa2\x3b\xfd\x05\xa4\x11\xf6\xc2\x7e\x76\x17\x74\x94\x2e\xbd\x4c\x42\x09\x07\x2b\x30\x4d\x3e\xd1\xc2\xe9\xf2\x9f\x7d\xeb\x22\xb8\xa6\x39\x23\x0a\xb0\xd9\xce\x45\xd3\x3d\xb5\x14\x41\x3f\x81\x6a\xcb\x46\x72\x5b\x7c\x85\xa4\x9f\xb0\x85\x52\x1d\x76\xc9\xfd\x3c\x82\x81\x02\x2b\x81\x46\xe6\x68\xcd\x34\xd6\xee\x0e\x13\xa8\xde\xaa\x98\x9a\xf1\x72\x93\x00\x41\xc5\xad\x9d\xe3\xb8\x52\x93\x44\x0f\x69\x2a\x6e\x8a\xfc\xa4\x43\xb7\x4c\x41\x7e\x6a\x2c\xfb\xbd\x04\x40\xdc\x7a\xd6\x06\x25\x73\xa8\x01\x1a\xec\x99\xec\x7b\x0e\x1e\x85\x9a\x3d\xed\x53\xf6\x28\xee\xf2\x04\xfb\xe3\x55\x7e\x33\x77\x69\xca\x95\x42\x0d\x9a\x44\x14\xfb\x6e\x05\xc7\xe5\x4a\xce\x27\x6c\x73\x99\xf4\x26\xa2\xd2\x14\x69\xa8\x57\x83\xbd\x2b\xc9\x64\x6a\x14\xad\x97\x09\x6f\xa2\x95\xa6\xda\x40\xe2\x88\xdd\xa8\xc5\x70\xeb\x87\x51\x72\x8c\xbf\xaf\xae\x77\x68\x25\xa8\x17\x9d\x06\x26\xd9\x7a\x73\x0b\x21\x90\xed\x11\x20\x83\xe7\x6a\x53\x3e\xd6\x52\x0a\x87\xbe\x8b\xa6\x15\xb6\xb1\xaf\x2b\xdf\x49\x86\xe7\x0e\x70\xb2\x7b\x9e\xde\x01\x82\x0c\xca\xd9\xd3\xb0\x88\x4d\x44\xf4\x6e\x70\x40\x5f\x5a\xde\x4c\x3c\xd5\x3f\xca\x0a\xf9\x18\x27\xfb\x2d\xdb\x4a\x4c\x59\x6f\xf6\x9e\x4d\x6e\x81\x36\x99\x4e\x07\xf9\x9c\x3a\x76\x96\x46\x2f\xeb\x6f\x24\xd4\xa9\xc3\xa4\x44\xc2\x37\x6c\xde\x82\xde\x01\xe3\x3d\xf7\x9d\xf8\x65\xbc\xcb\xdc\x40\x9b\x7b\xf5\xa0\xfa\x66\xb7\x06\xf2\x0d\x2d\xfd\xb2\x89\x2b\xb3\xf3\x5f\x28\xcb\xb1\xd0\xb5\x17\xb3\x9d\xf8\xf5\xbe\xc6\x21\x18\xa7\xaf\x91\x67\x1c\x49\x8c\xb3\x05\xa0\xe5\x20\x4b\xf8\x73\x92\x8f\xe6\x9e\x0f\x68\x0d\x6d\xed\x2a\x95\xbd\xd2\xc5\x59\x82\x53\x41\x7b\x86\x9d\xfa\xbd\xbf\x3a\x53\xcb\xbd\x1a\x59\x55\xc3\xbd\x2a\x09\xe9\xa5\xdc\x0f\x3e\x8d\xab\x15\x1e\xe2\x1c\x67\x97\xe8\x64\xe7\xcb\x8e\x7c\x3d\x26\x1b\x57\x18\xc7\xf6\xd9\x25\x00\xc9\x3e\x56\xcf\xc9\x16\x06\x97\xe5\x3a\x9d\x84\x44\xb4\x41\xa2\x56\x28\x3e\xe0\xb4\x85\xc1\xb8\x69\x09\x87\xe1\xac\x0b\x46\xd4\x1d\xbc\xc5\x76\xa6\x06\x4c\xdd\x27\xc7\xb6\xd2\x3b\x58\x96\x06\xf8\x28\xaa\xff\xdf\x94\xa1\x68\x74\xb7\xc0\xa1\x9c\x9a\x7f\x49\xa6\x04\xd4\x02\x5c\x2d\xa2\x68\x18\x73\x3c\x46\xb7\x08\xdb\xde\xa3\xa6\xda\xf1\x98\x96\xba\x37\x31\x5c\x70\x03\x5b\xc2\xbc\x2d\x91\x94\x4a\xa8\x39\xd2\x4f\x49\xd0\x8e\x98\x57\x18\x9f\xf9\xa9\xa1\x3c\x89\xce\x93\x5c\xda\xc5\x95\x19\xef\xf7\x68\x42\x55\x67\xbb\xe3\x6a\xea\xd0\xb2\x4d\xbd\x21\x73\x40\x60\xe3\xa3\x1e\x13\x31\x9f\xb2\x4d\x2f\x55\x15\xbb\x3b\x33\x14\x5a\x37\xb2\x56\xd8\x91\xdf\xa7\x20\xb6\x72\x91\xdb\x2c\xfa\x28\xfb\x1e\x1b\xa6\x43\xff\x61\xee\xca\x63\x93\x9f\x23\x60\x2b\x35\xa6\x90\x13\xe5\x5e\xcd\x72\x31\x82\xfa\xe8\xf2\xf4\x41\xc0\xe9\x3b\xb5\x13\x61\x2a\x56\x4e\x1a\xfd\x18\xad\x3c\x5f\xcb\x2d\xfb\xd7\xb2\x4e\xb7\xf8\x59\x95\xa8\xc9\x26\x6a\x9c\xa5\xed\x19\xcd\x9c\xf2\xf6\x48\xcd\x28\x5b\x90\x10\x51\xe4\x5f\xab\xad\xcb\x0c\x23\xa8\x6d\xeb\x7f\x7d\x63\xb1\x87\x3b\x5f\xa5\xbe\x0a\xbb\x3c\x59\xe7\xa9\x0b\xa2\xdf\xde\xd6\x79\xa0\x90\x54\xf9\xc6\x39\x45\xc0\xec\x6a\xd1\x39\xb4\x1c\x97\x97\x1e\xb5\x1c\xb7\xda\x91\x89\x3e\x8c\x5b\x2e\x41\x37\x8f\x47\x0f\xb6\xf1\xbd\x85\xda\x43\x44\x04\xdf\x81\x0d\xa8\xce\x9a\x30\x5d\x17\x1b\xc5\x3d\x1f\x35\xd4\x39\x2a\xb3\xa0\x1d\x11\x39\xb6\x12\x24\xc4\x9f\x32\x44\xd0\xaf\x49\x33\x8c\x73\x35\x05\x65\x6e\x12\x59\x20\x54\xbe\x78\x7e\x81\x2d\x42\xac\xa2\xd2\x84\x8c\x32\x3f\xc8\x4d\x34\x67\x88\x4c\xe9\xb1\x4e\x82\xda\xe0\x05\xda\xd5\xe6\x0d\xaa\xfc\xba\xbf\x2f\xbb\xeb\x23\x0c\xbb\x31\x0e\xcf\xfd\x55\xd6\xa4\xce\xaa\x0a\x5a\x0c\x5c\x1d\x94\xf3\xb7\xaa\x49\x8a\xc4\xde\xe5\xa1\x3b\xb4\x83\x29\x9b\x8f\x72\x9d\xd8\x40\x69\x35\xc5\xfa\x66\x47\x3e\xbb\xce\x96\x8f\x20\x36\x2f\x28\x28\x63\xd9\x0a\xcb\xc1\x41\x5e\x6d\x30\xde\xfb\x68\x00\x73\x55\x13\x3f\xb0\x6e\x9e\x84\x0f\x3c\x0c\x16\x0a\x34\x5f\x6b\x15\x95\xb4\x52\x3c\x94\x21\x7f\xd4\x72\x91\xe1\x3a\x72\xc3\xc7\xbc\x93\x60\x89\x6c\x5d\x70\x95\xd8\xeb\xc1\xed\xd2\x5e\x3b\x85\x7c\xa9\xce\xac\xf8\x2e\x73\xb4\xd9\x5a\xb9\xd3\x15\x21\x8f\x78\xeb\x4d\x3e\x65\xff\xc2\xcb\x6f\xc9\xa0\xb9\x69\xf2\x5f\x32\x10\x5c\x55\x9d\x0b\x2d\x16\xfa\x18\x01\xb0\x53\x63\xe8\x8d\x9d\x0a\x75\x2c\xe2\x13\x60\x94\xd4\x9b\xd4\xf9\xf8\x56\x17\x6b\x09\xb5\xe8\x22\xbe\xe8\x5c\xa6\x6c\x93\xaa\x67\xc5\xe5\xcf\x26\x3e\xce\xe0\xc6\xb7\x7a\x3b\xe8\x5f\x49\x5e\x92\xb9\x06\xbb\xe1\x42\xc4\xd1\x7a\x97\x5c\x7d\xa8\xe6\xcc\xc9\x0a\xd1\xea\x3c\x6a\xc2\x22\x7c\x26\x23\xdd\xd1\xe3\x98\x2d\xc2\x41\x70\xfc\x32\x80\x78\x18\x55\x93\x4d\x3d\x7a\x76\x49\x16\x10\x00\xad\x07\x2f\x62\x94\xa7\xeb\xb6\xd3\x6d\x97\x42\xad\x4f\x94\x6b\xae\x46\x9b\xb3\x33\x7a\xe6\x4a\x8e\xc3\xa8\x7d\x44\xe3\x69\x3b\xf2\x79\xf7\xa8\x66\x7d\xd1\x7b\x48\x31\xc4\x8b\xfc\x9b\xe6\xb0\x5d\x81\x08\xc7\x2f\xfc\x63\x83\x0c\x94\x79\xb5\xe3\x33\x13\x8e\x33\xbb\x26\x98\x24\xd3\x87\x6c\x7f\xf8\xda\xb3\x0d\x75\x56\xb1\x0d\xd2\x6b\x06\x79\x4b\x20\x9b\xd8\x42\x67\xb8\x54\x94\x6d\x35\x49\x30\xe3\xfe\x27\xee\x0c\x7f\x57\x35\xae\xca\x6a\x1a\x62\x1d\x30\x6a\x64\x52\x01\x5d\xda\x2e\x6e\x4c\xf9\xab\x4c\x0d\xb9\x6d\x73\xfe\xd8\xac\x5a\x79\x42\x7a\x99\xa9\x0d\xd0\x0a\xf9\x77\x7a\x6e\xda\xae\x3e\x45\x93\x78\x0a\x23\x7d\x8e\x41\xb3\xff\xc9\x9f\x4d\xa7\x10\x8c\xa1\x37\x31\x4d\x98\xf7\x68\x2e\xcc\x92\xdb\x1e\xdc\xb6\x5b\x8d\x29\xf3\xaa\x89\xcc\x67\x34\x59\x25\x4d\x86\xd3\x60\x05\xed\xcb\x7c\x79\x5e\xf4\x99\xb6\xcc\x57\xa1\xee\xa9\xb1\x65\x7e\x18\xab\x34\x32\xb2\x74\x87\x42\xf3\xcd\x56\xd9\x10\x43\x51\x73\x90\x55\x4a\xc2\x9e\x42\x1d\x9f\x0f\x35\xc7\xda\xa2\x3c\x3e\x91\xde\xf7\xb8\xd6\x13\x0e\xcd\x5f\x29\x05\x2e\x1f\x97\xdf\xff\x24\xf4\xa1\x4f\x7e\x66\x43\x28\x67\xfb\x52\xc8\x49\xe7\x91\x91\x83\x70\x63\x6e\xb2\xb3\x94\x7f\xd4\x76\xa6\x15\x22\x27\x24\x04\xcd\xcb\x64\xbf\x1f\xe7\xb2\xfa\xaa\xf3\x84\xa4\x90\x48\x54\x4e\xf3\xb9\x94\x7a\x6a\x13\xe9\xa5\xfc\x20\xca\x43\x4b\xe3\xf3\xe4\xad\x7d\x41\x1e\x89\x33\xf9\x70\xca\xd7\x6c\xd8\xce\xe8\x14\x0e\x50\x48\xc4\x50\x1c\x16\x20\xac\x6a\x21\x61\xcd\x8b\xbd\xf9\xb0\x39\x53\x0b\xd1\xbe\xaf\x8f\x07\x6e\x62\xd5\xb0\x6d\x69\x89\x8e\x2a\x49\x1f\x72\x07\xee\xa1\x03\x98\xbd\x91\x18\x22\x98\x61\xd0\x81\x1b\x56\xd8\x70\xf1\x0b\x28\x9b\xfd\xdc\x8e\x66\xcd\xac\xd1\x63\x42\x55\x21\xfb\x8b\x52\x87\x4c\x91\xe9\x82\xa6\x12\x14\xdc\xfe\x4b\x0f\x2b\xf3\x9b\x1a\x4c\xa1\x7b\xbf\x57\xd1\xe9\x3d\x8b\xaf\x75\x77\x8e\xfd\xdd\x2d\xb6\xfb\xe8\x66\x0f\x1d\x19\x35\x48\x9b\xb9\x33\xfd\x3f\x5d\x0d\xb0\x64\xb4\xd3\x7f\x29\x55\xa9\x23\xfa\x98\x95\xdb\xff\x1a\xdd\x84\xd9\xfe\x36\x7e\xd3\x4e\x70\x4f\x27\xe4\xcb\x22\x21\x3f\xc5\x50\x0e\xea\xbd\x7e\x7d\x6c\xba\x0e\x98\xd0\x3c\xb9\x7b\x74\x8c\xf6\x4f\xc4\x3e\xc5\x7a\x21\x9f\xa8\xbd\x97\x06\x75\x91\xe0\x05\xa8\x87\x86\x57\x44\x2f\xe4\x40\x7e\xfd\xfe\x07\x3b\xff\x44\x6c\x61\x96\x85\x5e\x6c\x17\x1e\x05\x79\xa2\x2f\x75\x96\x1b\x38\xe7\x12\xd0\x48\x77\x0e\xda\x2a\xb2\xa1\x14\x4d\xa3\x13\xf2\xcf\xde\x82\xa4\xe8\x29\x8d\xf4\x69\x0e\x6a\x0d\x84\xad\xf5\xfe\x14\xd6\xb9\xf5\x60\xbb\x99\x52\x33\x7d\xa5\xab\x95\xad\x27\xe5\xfe\x49\x1a\xa4\x25\xbc\x39\x08\xef\x77\x63\x67\x65\x50\xa9\x6e\xbc\xc7\xe8\xc6\xf4\xd7\xaf\x8e\x78\x23\xdf\xc6\x50\x42\xf9\xc9\x73\x8d\xdf\x77\xce\x9b\x29\x36\x97\xa5\x46\x7f\x0f\xa5\x52\xeb\xb6\x29\x7a\x48\x7e\x65\x10\xc1\xf1\x33\x97\x57\x94\xf7\x34\x52\xdd\x1e\x9b\x16\x78\xc5\x05\x58\x6e\x74\x8a\x87\xe3\xee\x92\xee\x23\x71\xcd\x4b\x5d\x4b\xb7\x92\x6b\x57\xc2\x63\xf7\x65\x4a\xe5\xf8\x4f\xbe\x89\x44\xf7\xa2\xb5\x8b\xe2\x75\x8a\xf6\x8e\x75\x37\x35\xd2\xad\x9a\x6f\xa0\xba\x88\x2c\xb6\xda\xc3\x5a\x0c\x9d\x52\xbf\x95\x6a\xa4\x81\x8c\x1b\xfb\x5b\x90\x78\x5a\x7f\x4e\x9b\x38\x5a\x52\x84\x34\x82\xe0\xf3\x94\x95\x38\x95\x05\x6b\xd4\x1f\x52\x2c\xc4\xb6\xe0\xd0\x30\x23\x50\xc3\xdd\xa4\xea\x3e\x8f\x61\x68\x03\x4a\xb9\xb5\x45\xe2\x75\xb9\x30\xfe\x92\x86\x08\xbf\x66\xf3\x86\x61\x70\xed\x18\xb1\x75\x65\x82\x37\xbf\xa7\xac\xb1\x5c\x07\x21\x5c\x62\x3e\x74\x15\x2e\x9d\x4f\xa5\xc5\x49\x71\x93\xe4\x23\xc9\x96\x53\x04\xdd\x80\x1c\xe4\x09\x1e\x6a\x26\x30\x66\x9d\x11\x07\xd5\x93\xdb\x47\x66\x21\x35\xe7\xdf\x9f\x65\x4b\xbb\x23\x4a\x9a\x13\xc1\x24\xea\x9e\x9c\x8a\xd0\x6c\xe0\x46\x53\x05\x4b\xea\xf6\x50\xae\xa4\x76\x52\x9f\xec\xb7\xb1\xb9\x29\x45\x2f\x24\xbb\x64\x94\x59\x27\xb4\x4e\x39\x18\x52\xcd\x04\x34\x97\xff\xc2\xb8\x3d\xbd\x3d\x6f\x7a\x2c\x35\x9d\x28\x40\xce\xa5\xeb\x11\x93\xa1\x0e\xbe\x68\x41\xea\x18\x95\x79\x5b\xf2\x82\xa6\x6f\x9f\xef\x2c\x54\xa2\xe9\x9d\xe6\xd6\x98\x2e\x0d\xe5\xb7\x8b\x84\x26\x0d\xab\x13\xd9\xca\x70\xd0\x5f\x9b\xb2\x43\x8a\xa8\xf4\x74\x85\x07\xa6\xf3\x17\x95\x9c\x0c\x30\x0e\x1b\xe6\xa8\x51\x14\x1a\x27\x14\xa1\xe1\xbc\x0f\xef\x54\xde\xec\x55\x59\x04\x92\xc2\x95\x81\xb0\x2d\xd9\x4c\xec\xb2\x16\x68\x5d\x85\x57\xca\xe1\x2e\x47\x19\x27\xed\x79\xd2\x5a\xca\xa2\xe9\x13\x6e\x1e\x91\xb2\x6e\x2a\x91\x92\x79\xd9\xc2\x01\x53\x49\xba\x29\x01\xe1\x69\x6e\xce\xe9\xab\x8d\x94\x75\x03\x2e\x9e\xd2\x19\xca\x56\x47\xb9\xe2\xd3\xe0\x02\xa2\xcc\xc5\xfb\x53\x29\xe2\xe3\xe6\xa6\xd3\xfd\x8e\xcf\xed\xcf\x50\x13\xc9\xbb\xd7\x05\x9f\xc3\x1e\x1a\x96\x6f\x22\x23\xcd\x96\x21\xa4\x0e\x5b\x04\xf5\xc3\x2e\x7a\x38\x43\x1c\xd6\xb4\x78\x6f\xf7\xd7\xf3\x6b\xfa\xe4\x0f\xf3\xd5\x3f\x99\x2c\xf7\x4f\x6a\x35\x19\xd1\x2e\x53\xcd\xa3\xed\x72\xfc\xb0\x33\x77\xb9\xf5\x1d\xee\xad\xbd\x91\xff\xa3\x1f\x1b\x71\xe3\x1e\x4c\x65\x82\xe1\xa9\x02\xa7\x10\x5e\xb7\x6f\x98\x07\x6b\xe2\x7f\x33\x0b\xd5\x43\xef\x20\x21\x5d\xe8\x9d\xe1\xb0\xc8\x17\xfd\x25\x72\x52\xb4\x93\x2e\xbf\xe0\xf2\xa9\x5a\xc5\x3c\x44\x70\x4e\x99\xa6\x45\x49\x7b\xb1\x73\x27\xd2\xd2\x8a\xc0\x9b\xe8\x10\xd1\x9e\x9b\xa7\xa7\xda\x4a\xd8\x7a\x0e\x95\x75\x64\x24\x21\x6c\x30\xd1\x3e\x3a\xc2\x3a\xb3\xbb\xdc\x38\xd8\x40\xa2\xb3\xd2\xd4\xdb\x9a\x73\x51\xde\xfd\x8d\xba\xac\x71\xf2\x6e\xfd\x78\x2b\xbb\xf5\x70\xdf\x15\x8e\x59\xd5\x47\x73\xfb\xdf\x19\xe9\x0d\x78\x53\x96\x4d\x6d\x67\xf0\xdf\x68\x3c\x62\xfa\x12\xb3\xce\xa3\xe1\xa4\xcc\x11\x53\x9c\x62\x8a\x3b\xa8\x5e\x9e\x39\x72\x6f\xfe\xa0\xa2\xc0\x7f\xcd\x48\xdd\x12\x7d\x29\x93\xd3\x77\x60\x02\xfd\xc4\x80\x8f\x1f\x93\xe8\xa3\x87\xcd\x6f\x39\x14\xc3\xa9\x5c\xf0\x9c\x29\x0f\x68\x4e\xfc\x5b\x7c\x65\x40\xf0\x08\x67\xb7\x4e\x86\x32\xee\x92\x6c\x35\x95\x9e\x7f\x58\x14\xb8\xbf\xdd\x86\x79\x07\x99\x72\x3f\xf7\x2b\x0a\x74\x87\xd5\x69\x50\x68\x38\xb6\xfe\x4b\xf6\x27\x1a\x36\x6e\x7e\x95\x01\x5a\x3d\x4b\x1f\xfd\x2b\xf4\x4d\xfd\xd4\xa6\xb3\x59\x0f\x7a\x56\x0c\xf8\x2f\xf3\xc3\x4a\xda\x9c\xc6\xed\x53\xd6\x1f\x3c\x2a\xef\xe9\x9d\xc2\x21\xa5\x37\x70\x89\x4d\xe3\x10\xfd\xaa\x20\xd6\xa9\x5f\xa5\x06\x52\x59\xa0\x7a\xb9\x42\xa2\x67\x15\x00\x83\xfa\x57\xa2\x3e\x4d\x4a\x0d\x5f\xc7\xff\x50\xa1\x9c\x39\xb1\xa6\x3f\xdd\x40\x41\x87\x11\x42\x44\xe9\x0d\x1a\x32\xb7\xb2\x91\x46\xde\x39\x5f\x75\x96\xe7\xde\x6e\xfb\xc1\x9d\x35\x97\x9a\x4b\xd3\x58\x19\xd6\xfb\xd3\xa8\x04\xa9\x51\x7e\x43\x65\xe3\x0a\x66\xa0\x3b\x62\x76\x0a\x7d\x98\x35\x84\x01\x56\x4c\xdf\x39\xf6\xd1\x98\xa3\xfc\x5f\x3d\x40\xc0\xd5\x62\x77\xec\xbd\x63\x64\x77\x06\xd1\xa9\x7b\xa6\xda\x75\x97\xda\x50\xf2\xab\x9e\xc1\xeb\xa6\x4e\x0d\x35\xcb\xb0\x4d\x01\x88\xee\xe9\xdd\x89\x14\xa8\x94\x89\xf8\xc8\xe5\x2b\xd7\x64\x48\xa7\x14\xf1\xe1\x12\xf8\xfe\xf6\xca\xdf\xd3\xe3\xe7\xe1\x1c\x8a\xb6\x9e\x7a\xd0\x38\xb0\x13\xc6\xca\x87\x83\xd5\x0d\x91\x4d\xee\x31\x54\x2a\xa9\xd9\x16\xbe\xc5\x1b\x6c\xb8\xf2\x76\xbf\xd9\x76\x25\xfa\x50\xb9\xf3\xdb\x2d\x65\x98\xde\xe2\x36\x9a\x60\x55\x83\xde\x69\x88\xac\x1e\xd9\xab\x9f\xbe\x14\x5b\x4c\x9e\xd4\x88\x71\x16\x8d\xdb\x79\xeb\x77\xe7\x4c\xfc\x3d\x77\xf6\xd6\xca\x7e\x5a\xd3\x15\x01\xbf\x84\x30\x0c\x1d\x0a\xbd\x04\x9d\x44\xbf\x7a\x75\xf1\x6a\xa1\xdf\xea\x47\x05\x26\x57\x3d\x18\x01\xf3\xe8\x04\x76\x63\xa5\x6a\xd1\x6c\xec\x26\x75\xd8\xf2\x96\x63\xd4\x15\x93\xe8\x77\x50\xea\x38\x75\xe5\xe6\x3d\x1c\x67\x48\xa1\x54\x2b\x4f\xe2\x00\xec\xfd\x78\x0a\x96\x8b\xa2\x66\xe0\xad\x9f\x9f\xf2\x83\xb1\x95\x75\x12\xa6\x52\xb0\xa2\xda\x6f\x67\xf9\x12\x92\xed\x5a\x22\x5e\xd1\x4d\xbc\xb3\x8b\x16\xde\x41\xd9\xc9\x7e\x1d\xf4\x71\xc0\x32\x3c\x94\x65\x3c\xd0\xf0\xab\x2f\x09\x33\x9b\xae\x8a\xcf\xbe\x4c\xd3\xa3\x37\xe8\xbf\x8d\x33\xc0\x98\x24\x56\x97\x33\xa9\x38\xb0\x6f\x8c\x7c\x52\x3d\x86\x0f\x8c\x15\x63\xa8\xfa\xa2\x6f\x98\x86\x1a\x02\x89\xbe\xff\x95\x7c\xbf\x57\x70\x09\x63\x91\xf5\xf1\xa3\xfb\x6b\xc8\x0c\x6a\xd7\x1b\x51\x6e\x04\xad\xdf\xd4\x4f\x7c\x65\x31\x5f\x56\xc0\x5c\x6c\xfa\x29\xd0\xa3\xaf\x92\x36\x72\x68\x37\x77\x16\x0f\xfa\xea\x29\x1b\x52\x1d\xc9\xbe\x75\x1f\xf1\xd1\x5f\xf1\x67\xb0\x08\x56\x07\x71\x4b\x66\x4d\x52\xd1\x62\x0d\xdb\xd2\xe5\x2d\xb2\x3d\x66\xb0\xc9\xf4\x61\x43\xd1\x6d\x35\x5b\x30\xc5\xa3\x87\xa8\xbf\x21\xc3\x98\xbf\x29\x3f\x20\x8a\x4b\x86\x7e\x2c\x6d\xa0\x93\x79\x8c\x5e\xa1\x1b\x79\xe7\x01\xeb\x03\xad\x21\xe5\x20\xbe\xb7\x45\xfe\x1b\x3e\x7f\xcb\x66\xe2\xf8\xdb\x95\x62\xb4\x8c\x55\x9c\xbf\xf4\xef\xe9\x23\x16\x72\x19\xb6\xd9\xdf\xc4\xd2\x5b\xba\x42\x53\xd7\x49\x2f\x9d\xc0\x8e\x30\xe2\x3d\x7f\x1c\xbe\x0f\x52\xb8\x5b\x69\x78\xe4\x97\x0f\x4e\x67\x2e\xd8\x19\x54\xdd\xb9\x3a\x41\xce\x11\xb7\x30\xdb\x28\xa3\x9c\x3d\xd6\xe3\xa0\xaf\xd8\xcf\x1e\x98\x29\x37\x67\xed\x6b\xe9\xb5\x32\x45\xcf\x71\x4a\x5e\x97\xe7\xdc\x91\x0f\x1a\xff\xf4\xb8\x57\xcf\x57\x94\x9f\xac\x7e\xbe\xfe\x2a\x85\xd8\xcc\x14\x1b\x25\xfa\x8f\x1d\x98\x5f\x37\x6a\xe0\x76\xdd\x44\x12\x3a\xcd\x1b\xe2\x2a\x96\x93\x03\xb6\x23\x8a\xba\x44\x62\xba\x3e\x23\x90\xd4\x21\x3a\x7c\x7b\x90\xec\x98\x4c\x3b\xeb\x01\x35\x8f\xb3\x94\x37\x91\xac\x49\xfe\x07\xff\xc1\x64\xff\x1d\x98\xf2\x38\x93\xe4\xed\x9b\x1a\x22\x92\xdf\x21\x84\x43\x3e\x73\xdc\x59\x66\xf2\x8d\x55\x2c\x58\x6c\xfe\xd7\x4a\x75\x96\xf7\x8a\x24\x94\x09\xed\xc4\x36\xc3\xdb\xec\x69\xec\xca\x4c\xa1\x0a\x72\x42\x5c\x22\xdf\xc1\xd4\xbf\xd9\x7a\xec\x5f\xa1\xb5\x85\xb1\xbf\x37\xa0\x53\xef\x24\xa6\xa9\xaf\x3e\xca\x53\x2a\x03\x47\x6e\x26\x50\x0c\xed\xf5\x83\x1d\xbc\xbc\x15\x70\x4d\x3a\xf5\x64\xeb\x95\xaa\x39\xb8\xed\x08\x4b\x5f\x8c\x9e\x80\x20\x3e\x72\x02\x2a\x45\x41\xa9\x38\x44\x9b\xc3\x4a\xad\x5e\x59\x79\xa0\xd7\xca\xa7\xda\x0d\x8a\x9f\x41\x26\x0a\x18\xa7\x27\x1b\x63\x65\x97\x8f\x3b\x7f\x4e\x00\x4f\x66\xd9\x79\xd3\xf0\xa3\x65\xf9\x27\x7f\xb4\xef\xfc\x19\x1f\xac\xc4\x7e\xb0\x4c\xc6\x83\xcd\xee\x1f\x37\x30\xdd\x92\x3a\x88\x2f\x27\x1e\x71\x23\x02\xe7\xf6\x3e\x7a\x9a\x9b\x8d\x64\x38\x46\xb3\x74\xbf\xc9\x8d\x28\xea\x7b\x1e\xfe\x58\x2c\x57\xfe\x99\x8a\x9d\xc3\x57\xd7\x74\x09\xf4\x39\xf5\xa3\xc3\x07\xa7\xef\x4c\xb5\x3b\x25\x0f\xa4\xa6\x67\xa8\xbc\x3f\xba\x4c\xd6\x7e\xe9\x1c\x7d\x2f\xd0\xec\x2a\xbd\x88\x09\x98\x7b\xf0\xef\x70\x22\x6b\x67\x50\xf6\x62\x22\x76\xf1\xa5\x9e\xfa\x41\xc7\x41\x12\x4a\x9b\x2f\xd0\xab\x57\x89\x2d\x1f\xcc\x49\x88\x77\x98\x75\xc8\x42\x9c\x33\x8a\xba\xa5\xe1\x1f\xf1\xa0\x81\x9d\xf8\x78\x59\x7d\xcb\x5c\x2e\xac\x9a\x36\xfb\xa0\x69\xbb\x9b\x98\xe2\x8d\x3c\x20\xf3\x36\x57\xfe\x51\x6e\x77\xdf\xea\xf1\xe5\x4b\x92\x8b\x52\x99\x7d\x7f\x79\x2b\x8b\xf7\x07\xa5\x97\x18\x40\x1d\x23\x5f\x0a\x90\xf3\xac\xfc\x2f\xc3\x1e\x04\x1b\x8d\xa1\x1f\x88\xc6\xe9\x32\x44\xf2\xe8\xf3\x5c\xdc\x1c\x0d\x8b\xa3\x73\xb7\x31\x52\x15\x34\xc1\x33\x49\x2d\xd1\x7e\x88\x46\xe7\x9f\x7b\x9f\xe9\xf8\x9e\x5e\xd8\x01\xed\x4f\x71\xf1\x36\xa2\xc5\x43\xee\x10\xba\xc3\xe6\xea\x12\x3a\xa8\x61\x8a\xa0\xcf\x32\x2a\x3f\x76\xcd\x16\x5d\xae\xf4\x20\x10\xf3\x74\x3e\x0e\x49\xc6\xa3\x77\x38\xbe\x78\x47\x5c\x36\x1c\x30\xce\x12\xcb\xed\x62\xeb\x99\x81\x57\x56\xc2\x46\x97\xcb\xdd\x16\x11\xf7\xf2\x96\xed\xc5\x5b\x86\xe7\xd4\xdb\xb1\x89\x36\xe1\x31\x87\x95\xd5\xee\x38\x11\x2a\x6c\x8c\x4f\xdd\xd6\xae\x7f\x7c\x3a\x91\xf5\xc0\xd7\xa0\xa7\x07\x9a\xa9\xee\x97\xdc\xa7\x53\xfd\x86\x3b\xd9\xb0\xfe\x8e\xe9\x3e\x1c\x6c\x34\xa7\xde\x68\x08\x5b\xf7\xaa\x3b\x0a\x9a\xd3\x3d\xc8\x83\xa3\x5d\x32\xee\xfd\x61\x66\xd4\x1d\xc2\x34\x7d\x4e\x10\xfd\x6b\x2c\xce\x15\x63\xe3\x30\xd3\xdf\x03\x8c\x78\x17\x94\x0c\xad\xe9\xbc\x83\x91\xb6\x51\x77\x47\x8f\xd6\x7f\x18\xab\xd9\x31\xf7\x38\xfb\x35\xf4\x03\xfc\xb4\x72\xf5\x85\x03\xd4\xaf\xdd\x21\x65\x99\xfc\x4a\x6b\x38\xf8\xf4\xd8\x0a\xdf\xa8\x86\x1c\x91\x27\xac\x16\x53\xbf\x96\xca\xd5\x4d\x0d\x34\x7b\x93\x8c\xf3\x7a\xf4\xf3\x6c\x99\x8c\xc6\x6f\xe5\x0a\x46\xdf\x10\x08\xf0\xe0\x95\xbb\x1f\x06\xc4\x01\xe8\xd1\xe1\xb5\xda\x9d\xef\x24\xfb\x0c\x9a\x24\xeb\x56\xa1\xab\x5b\xff\xf1\x79\xdf\x6d\x69\xff\x4e\x66\x7d\xf5\xba\x3d\x1b\xdf\x91\xff\x74\xbb\x78\xda\xed\xb2\xc2\x5e\x82\x00\x78\x13\xd1\xcf\x49\x5b\xd5\x42\xb9\x36\x77\x8f\xab\x54\xef\xb0\x5d\x3b\xfa\xa1\x7a\x4e\xd1\x42\xe2\x08\x52\x60\x02\x52\x10\xf5\x49\xa7\x58\x15\x18\x2b\x8b\x7f\x58\xeb\x59\x5a\x4a\xcd\x55\xfe\xc8\xb6\x6e\x90\x00\x85\x2f\x87\x91\x32\x5d\x95\xc5\xfd\x9b\x94\xf4\x8a\x98\x8e\xeb\xef\x6a\x2c\xfa\xe5\x45\x37\x8b\x2e\xb0\x9b\x38\x5d\xef\x26\x5e\x17\x36\x9c\xa5\x94\x7e\x3a\x56\xba\xb3\x6f\x02\xa2\xe5\xdd\x29\x8e\xc4\x72\x2a\x1d\x70\x4f\x7b\x8a\x51\xa1\xb0\x6e\x75\x7f\xcf\xb5\x75\x7f\x1d\xd4\x60\xcb\x34\xf3\xb9\x4a\x57\x55\x86\x9c\x17\x68\x49\xd5\xba\x40\x45\x2e\x7c\x18\x4b\x9d\x21\x10\x81\xd5\x06\x10\xe6\xec\x86\xee\xfa\xd9\x3c\x43\xa2\x29\xdc\x57\xa7\xe5\x71\x39\xab\x07\x73\xaa\x3f\x1a\x8e\x74\xb3\xc3\x74\x23\x3a\xce\xe4\x11\x7c\x8f\x5b\x3f\xaa\xcb\x7e\x3d\x04\x01\xd5\x84\x9f\x2b\x05\xa1\xf2\x0c\xf7\x38\x60\x39\xa2\x04\xdb\x60\x3a\x4e\xc0\x56\xfc\x08\xf0\x65\xfd\x4c\xff\x7b\x68\x15\xb2\xa9\xcb\x66\xb0\x68\x92\x60\xb2\xbd\xf2\xc7\xbc\x4c\xf3\x0f\xf9\xfe\x1f\x59\x4a\x18\x73\xa3\xb4\xb6\xb3\x9e\x8a\x9e\x33\x4c\xad\x9c\x7f\x86\x68\xdb\x3e\xac\xc2\xd1\xdc\x1e\xbe\x52\xca\x5b\x6e\x08\xda\x30\xb2\x12\x38\x8a\xc4\xee\x73\xfa\x62\x22\x3a\x73\xa0\x75\x9a\x67\x73\x12\xa7\x30\xb5\x70\xeb\xa4\xb3\x04\x53\xfb\x51\xf9\x48\x8d\x3d\xd5\x9a\x30\x49\x6c\x36\x07\x9f\x96\x01\xdc\x79\xc4\x26\x8c\x1b\x84\xe4\xb8\x1f\x0b\xb1\xc8\x71\x51\x0c\x51\x84\x55\x7e\x61\x5c\x4e\xb0\xb9\x4a\x22\xfd\x37\xe5\xb7\xb2\x8e\x34\x37\x2c\x2e\x9a\x2c\xac\xab\xec\xd5\x59\x74\x97\xf3\xf2\xd4\x4d\xef\x43\xc7\x8a\x31\xd8\x28\x4b\x86\xd5\x8c\xbb\x3a\xfd\x0d\xeb\x57\xed\xe2\xc1\x49\x64\xbf\x9e\x07\x58\x0d\xdc\x46\x46\x88\x48\x7c\x01\x32\x6b\xe3\x30\x7d\xe0\x94\xa6\x16\xb2\x2f\x1a\xd0\x10\x27\x8b\x35\x9a\xb4\xf2\x8d\x40\x7c\x91\xbd\xe2\x80\x8e\x68\x9d\xea\x54\xd5\xab\xcf\x29\xd1\xe7\xb1\x21\xab\x2f\xc8\x8a\x77\x4f\xd4\x4d\x6a\x78\x99\xa4\xf7\x13\xad\x4b\xf7\xb1\x6b\x3f\xba\xd3\x81\x56\x20\x34\xa6\xa5\x28\xe0\x63\xca\xce\xbf\x26\x0c\x62\x33\x70\x4e\xbe\x7f\x7d\xa4\x49\xba\x55\x64\xb0\xba\x25\x5d\x7e\xa6\x72\x21\x6a\xcc\xd4\x08\x36\x77\x95\xb3\xa4\x1e\xf8\xca\xc2\x56\x32\xdf\xb4\x94\x2d\x27\xfb\xcb\x0b\xb8\x0f\xd1\x2d\x9e\x5a\x67\x81\x81\xab\xd8\xda\xd9\x1a\x8e\xd1\x21\x63\x45\x96\xd2\xf1\x6d\x20\x8d\x6e\xa7\xd3\x67\x53\xc3\x7a\x48\x49\x4a\x8e\x4f\x62\x8e\xd2\x0d\xde\xd1\x50\x0b\xde\x68\xce\xb5\xf3\xc6\xec\xa6\x4f\x29\x38\x6f\x72\xe8\x2a\xd5\x80\xe7\x08\x68\x5c\xc3\x9b\xf4\x53\xb6\x36\xec\x22\xbe\xcf\xae\x93\x97\xf4\x95\xb0\xfa\xce\xf6\x2c\x66\x54\x98\x9d\xac\xdf\xb5\x0b\xf5\xbe\x1e\x08\x7f\x74\x3e\xb9\x8f\x93\x1e\xd1\x9a\xf4\x12\x82\x44\xf6\xe3\x8e\x72\xdc\x02\x86\xd9\x44\xee\x84\xed\x97\x02\x9e\x94\x05\x7d\x5c\x9d\x68\xc9\x65\x39\xc9\xbe\x10\xd9\x61\xf3\xae\x8c\xaf\x91\xeb\xe4\x2b\x97\x42\xdb\xce\xd6\x57\xff\xa5\x7c\x9f\x51\x1e\xd1\x75\xc7\xd3\xce\xcb\x41\xf7\xa6\x3b\x0e\x63\xd8\x4e\xc0\x03\x27\x8c\xde\x99\x44\x5c\x59\xe3\x2f\x63\x51\xb0\x75\x8c\x66\xf6\xd8\x89\x7b\xf4\x4a\x6d\x99\x4f\xdd\x7c\x66\xb4\x11\x97\x5d\xb4\xb6\x69\x22\x7d\x2e\xaf\x7e\x54\x9b\x7a\xca\x2f\x77\xa4\x2f\x23\x46\xe4\x3e\x5d\xfb\x87\xb2\xbb\xe4\xc9\xa9\x83\xe7\xe6\xda\xea\xf8\xd0\x8b\x92\x2d\x6a\x35\x71\xdf\xf2\x16\xc3\x0e\x4f\x1e\xd5\x30\x04\x62\x29\x79\xec\xad\xb4\xb6\x50\xc0\x3a\xf0\xa9\x5c\x03\x7a\x9d\xe0\x47\x46\x17\x5b\xfe\xd8\x59\x3e\x63\xc9\x0d\xca\x23\x0c\x47\x0f\x9a\x1f\x93\xf7\x18\x0a\x80\x23\xaf\xf6\x2c\x86\xf1\x9d\x3b\xe9\x39\xd2\xda\x0e\x6b\x65\x8e\xb1\xb3\xd1\x58\x5f\xa3\x91\xcb\x24\xa0\x5d\xab\xa2\x8b\x75\x1f\x6b\x11\x42\x21\x57\x20\x71\x4c\xc3\x47\xe2\xf6\x55\xa3\xc1\xd3\x05\xf6\xac\xba\x8a\x11\x20\xe1\xb8\x89\x23\x0d\xd4\x3e\x96\xe1\xfe\xb7\x46\x10\x01\x59\x99\x6e\x84\xbe\x8d\x71\xed\x67\xd8\x48\x95\x12\x2b\x5c\xc7\x44\x61\xdb\xcb\xb6\xd4\x59\x52\x73\xb5\x2e\xde\x4f\x37\xb6\x1d\x31\x07\x26\xe6\x41\x03\xab\xaa\xea\x3d\x6b\x19\x1b\xad\x6b\x76\x46\x8f\x7e\x9d\x79\xc6\x52\xa5\x1b\x9d\x5b\x06\x45\x5d\x59\x1a\x08\xa9\x53\xf3\x97\xc6\xde\xb3\xbc\x2b\x60\x68\xa2\x9f\x79\xcf\x91\xe4\xb5\x33\xaa\xc6\x1a\x5b\x71\x71\xeb\x7b\x84\x64\xd5\xfa\xe4\xd9\x05\x14\x7a\xd2\xfe\xe8\x33\xa5\xe8\x6c\x4a\xeb\x35\x05\xc3\xfa\x0d\x3d\x3f\x55\x72\xfc\x2c\x2b\x83\x58\x9a\x47\x4c\x54\x90\x74\xd9\x28\x1b\x74\x17\xfd\x6a\x74\xf1\x76\x1b\x7d\x44\xb0\xa1\x58\x3f\x65\x6c\x75\xee\x19\x2e\x3b\x68\x5a\x2f\x8e\x1c\xdf\x1f\x59\x1a\xb8\xfd\xc1\xff\x38\xaa\x09\x07\x1c\x9a\xfb\x59\xb6\x51\xfd\x5d\x23\x74\x9c\x35\x0a\xda\x16\x32\x5a\x75\xbb\xbd\x2a\xc3\x14\x4e\xbd\xe2\xf1\xc2\x52\x2a\x3e\x5a\x5b\xbf\xbb\x16\xfb\x5c\x3e\xab\x75\xa4\x97\x75\xbe\xa6\x54\x2b\xe9\x4a\x5f\x57\x3a\xfb\x2d\x08\x4a\xb0\xbc\xd8\x8d\xfa\xef\xd8\x39\x38\x7b\xd9\x8f\xdc\x3a\xef\x3a\xcb\xb0\x6c\xb7\x1e\xce\x7e\xa7\xc1\x19\xf7\x0c\xf0\xd8\x1b\x83\x5c\x4a\x1a\x0c\xfb\x74\xdd\xcd\x0e\x67\x7a\x53\xa5\x70\x8c\xfc\xac\xb7\x3d\x90\xcb\xf6\xef\xf0\x6b\x6f\x41\x05\x64\x5e\x73\xb4\xc7\xa7\xbb\x0c\xbd\x18\x15\x13\xfe\xe3\xc1\xf4\xa2\xd2\xaf\x52\x51\xfb\x4a\x1d\x25\x15\xf5\x52\x4e\x94\x3b\xf2\x52\x55\xbf\x5f\x8b\x20\x2d\xc0\xbe\x1a\xe7\x79\xed\xc3\x25\x8d\x5c\x62\x8f\x86\x7c\x8c\x61\xa5\x26\x65\xb1\xc6\x12\x13\x1f\xee\xae\x43\xb1\x2f\x9a\x07\xca\x93\x39\x44\xc4\x15\x56\x42\x83\x80\xe7\x2e\x58\xab\x74\x3a\xdd\x7d\x30\x94\x3a\xb3\xf6\xc8\x23\x38\x6b\xeb\x6b\x10\xd5\x96\xde\x0f\x7f\x74\x80\xc2\x68\xe2\xba\x19\x8e\xa6\x47\x92\x6a\x38\x82\x3f\xd7\x33\xca\x59\x78\xa8\x9d\xaa\xdc\xca\x10\xf4\xb0\xb0\x04\x5e\x25\xc6\x5c\xcf\x46\x8d\x63\x36\xc7\x2f\x1f\x0b\x70\xde\xec\x58\xa6\x76\x2c\x16\xae\x3a\x88\xc6\x33\x93\xbe\xdc\x0c\x82\xaf\x9a\x04\x3f\x3a\xa0\x4d\x61\x65\xb6\x4e\x22\xc3\xda\x71\x38\x48\xad\x9b\x1a\xd2\xe3\xee\xee\x7b\x4e\x81\x13\x82\xcf\xea\x46\xb0\xc8\x0c\xbe\x5b\x72\xc9\xc0\xae\x7c\xe8\x4f\x5d\xf6\xb8\xd6\x14\xbb\x39\xad\xa7\x2f\x47\xae\x94\x1d\x07\x2e\x45\x12\xf5\x30\x7c\xdc\x6c\xfe\xc0\x47\xbd\xd7\x30\x8b\x29\x0e\x8c\x9d\x96\xe4\x21\x63\xf6\xee\x02\x2a\xc8\x40\x0f\x12\x35\x8a\xe5\xa6\x47\x9e\x82\x67\x7f\x74\xa9\x1f\x89\x5d\x33\x0c\xef\x53\x84\x2f\xe4\x6d\x80\x43\xac\x89\xad\x33\x43\x4f\xf3\x0f\xcd\xd1\x15\x1c\x66\x7f\xb6\xae\xd3\xac\x6f\x4f\x35\x3d\xf7\xdf\x43\x5e\x99\xf9\x5f\xd9\x0f\x8d\x62\xa0\xe6\xe0\x0e\x2d\xd9\x61\x3c\x26\xb5\xb2\x2d\x3d\xd6\x38\x22\x92\x5a\x49\x3e\xbb\x39\xb4\x5a\x3d\x6a\xf4\xdd\xad\x80\x3e\xcb\xd7\x12\x5f\x2b\xbc\x33\xca\x6e\x20\x0c\xd9\xfa\x8d\x73\x19\x5a\xdb\xd5\x88\x60\x49\xaf\x71\xb8\xf4\x96\x0b\x74\xec\x3e\x55\x17\x6d\xca\x96\x04\xce\x34\xdb\x60\x30\x0c\xd3\x93\x16\xe4\x37\xb2\xeb\xbf\x40\x64\xc9\x6e\x70\xa9\xdc\xd8\xc9\x99\xcb\x2c\x91\x1d\x1c\x3a\x9f\x53\x6c\xa4\x69\x74\x87\xad\x81\xd9\xb3\x4a\xd2\x92\xed\xdb\x3d\x96\xaa\x99\x61\x10\x1c\x2c\x4c\x3a\x8b\xc6\x73\x56\x50\x20\xf3\xb0\x63\x72\x28\x07\xdb\x57\x64\xda\x2e\x03\x19\x8e\xe1\x46\x28\xc9\x36\x5d\xe0\xf5\xd4\xa5\xd4\xd5\x76\x84\x9b\xc6\xf0\x89\x8f\xa4\xb5\x5d\x07\x7d\x6c\x33\x67\x82\x73\xd5\x9a\x6d\x06\x43\x6d\x52\xe6\xd0\x4d\xec\xdb\xf2\x5e\xcb\xc9\x66\xf1\xb8\xe2\x0b\xa9\xee\xe6\x31\x3e\xcf\x52\x09\xb5\x07\x1b\x08\xbd\x0f\xca\x4f\x9d\x44\xb9\xa5\xc3\x28\xf9\xa4\x33\x1c\x69\xc4\xa8\xdd\x3e\x7a\x5c\x99\x9e\xfd\xf4\x32\x39\xb5\xca\xf0\xfe\xf1\xe6\x01\x16\xed\xca\xe4\xc2\xa8\x0d\x12\x6d\x79\x3f\xb3\x5c\xeb\xdf\x4e\x5e\xcb\xfa\xcd\x85\x5a\xe9\xbd\xa1\x97\xb4\xc4\x40\xeb\xdc\x84\x1e\xec\x32\xf7\x1f\x27\xbb\xcd\xfd\xf4\x65\x49\x47\xe7\xfa\x3e\x60\x74\xd0\xd5\xd5\x00\xef\xd3\xaa\xee\xf6\x6f\x9c\x32\x53\x3a\xa2\x0d\xae\xa0\x07\x88\xee\xd6\x6c\x5e\x2d\xa5\xbf\xdc\xee\x4d\xa9\xf9\xa0\x87\x8b\xef\x0c\xa6\x2e\x9e\xc3\x63\xbd\x7c\xf6\x83\x6f\xd4\x27\x7b\x26\x88\x82\xb8\x76\xfc\x04\x3e\xf0\x73\x76\xf8\x21\x7e\xb2\x4e\x9c\x9f\xbe\x6c\x11\xed\xe8\x8b\xd6\xb6\x33\x39\xca\xec\xc3\xfe\xae\xa4\xe5\x37\xf7\xac\x97\xe8\x46\xef\xe7\x57\x32\x9c\xa8\x66\xe3\xd7\xb4\x34\x2e\x5c\x7d\x3c\x0e\x5d\x2a\x05\x82\xb3\x3e\x14\xda\xdc\x7a\xf6\x1d\x04\xeb\x6a\xb2\x8f\x8e\xb3\xdb\x5a\x38\x9a\x42\x4d\x39\xc2\x3f\xa9\xdf\xcd\x02\x86\xbc\x35\x34\x5b\x0c\x12\xdf\xaf\x94\x33\x1c\xcb\x70\x1b\x1e\x24\xc4\x80\xd1\xc9\xae\xad\x8f\xb1\x7c\x9b\x0b\xf7\xd3\x8f\xcf\xba\x38\x42\xb8\x0c\xcd\xd3\xe5\x0f\xcc\xb6\xb5\x5a\xd4\xe0\x24\x44\xff\xad\x1c\xa1\x17\xa0\x37\xfb\x70\xa8\x08\x8c\x4c\x6a\xeb\x7a\xae\xf9\xfb\x60\x10\x84\xcc\x5a\x63\xcd\x98\x8f\x71\x2b\xfb\x91\x72\x79\x4f\x7e\xd8\xa0\x18\x36\xfd\x73\x65\xf6\x16\xa3\x12\x96\x76\xee\x42\xc3\xc9\x8e\x33\x86\x4f\x83\xa0\x0a\x96\x43\xd6\x09\xf8\x3c\x65\x66\x4b\x73\x6d\x4c\x9b\x82\xfb\x79\x78\xae\x40\x47\x3a\x9a\xd5\x69\x91\x5e\x98\xcd\x68\x34\xaf\xf8\x60\x80\x41\xce\x44\xcf\xea\x48\x0c\x48\xf1\xe0\xbf\x3a\x31\xb9\xff\xa3\x61\x2b\xba\xfe\x30\x65\x39\x1d\x0d\x63\xde\xb8\xfb\xc1\x65\x35\xba\xb5\x3e\xf4\xf6\x4c\x02\x6d\xe4\xa0\x63\xd2\x90\x6c\x62\x31\x04\x97\x63\x08\x06\xf2\x7d\xe3\x5c\x11\x81\xe9\x6e\xda\xc3\x02\x0f\xaa\xc3\x67\x68\xc5\xab\xf8\xe9\x9e\x7f\x0d\xec\xdc\xc9\xa4\xab\xc2\xf3\xec\x70\x51\x90\x7d\x0f\xc9\x81\x80\xc3\x1b\xfa\xbf\xe7\x89\x56\x8e\x37\x3a\xe5\xe5\xad\x76\x35\xee\x91\xf8\x7c\x9f\x42\xfe\x7a\x07\x61\xd1\x3d\xe8\x89\xe4\x5e\x8d\xb4\x4f\xbf\x43\xcb\xef\x86\xf4\xd6\xdf\x0d\xfb\xdf\xd5\xf7\xe5\xb0\x0a\xcb\x3f\x3a\x6a\xb5\x9d\xb9\x63\x91\x18\x4e\x09\xcb\xa7\xc9\x5e\x22\xf7\xcb\x46\xbc\x0c\xc3\x3d\xef\xd7\xfe\xcc\xea\x1d\xff\xd6\x8e\x33\x01\x68\x11\x8a\xed\x1a\xe7\x8e\x74\x74\xaa\x5b\x53\xe7\xb0\x2d\xa4\xef\xd6\x20\x85\x07\x36\x27\xbc\xe2\xfb\x52\xce\xc6\x96\xbf\x17\x06\xab\x66\x35\xe2\x06\x82\x6a\xbc\xc9\xb8\xa2\xcc\x77\xa7\xee\xea\x67\x19\xf2\xad\xd2\x35\x4a\xaa\x8e\x01\x30\x2d\xbe\x94\x43\xfc\xf4\x5a\xda\x6d\x44\x86\xbb\xde\x90\xd5\x94\x7d\x6d\xbc\x77\x7e\xd1\xb2\xf1\x0a\xc6\xb1\xd4\xde\xe7\x1c\xea\x00\x24\xc8\xcd\x31\xcc\xd5\x76\x13\xe6\x7e\x7d\xcd\xfb\x83\x87\x56\x97\x5d\x52\x60\x60\xd1\x7a\x7b\x98\x3c\x8d\x4e\x34\x5a\x6a\x35\xc0\x60\xfd\xea\x51\xaf\xe7\x51\xe9\xc7\x92\x14\xf9\x29\x5d\x72\xbc\x5f\xf9\xfd\xd6\x29\x38\xc5\x65\x81\xed\xa2\x29\x8d\x30\x06\x0d\x19\x8a\x6f\x6f\xbf\xa3\x82\x79\x04\x47\xb6\xaf\x70\xbf\xd1\x1d\xe6\x7b\x64\xd9\x81\x5e\x3c\x57\x4b\x29\x51\x8c\xd5\x69\x7e\x2f\xb5\xe5\xec\x10\x3f\x5c\x11\x2f\x36\x17\xca\x3b\xf3\x59\xd7\x4a\x19\xd9\x02\x97\xea\x4c\xce\xdd\x2c\xaa\x77\x9c\xfc\x2f\x87\xa5\x46\x73\xe3\x78\x8b\x56\x73\xbf\x8a\xae\x5c\x7f\x10\x84\xfd\xf5\xba\x7d\x9b\x57\xfc\xea\x2b\x1b\x79\x9d\x42\x45\x6e\x49\x2b\x79\xc9\x60\xd7\x74\xb5\x25\xbd\xd5\xed\x5a\x39\x78\xce\x29\xc2\x00\x68\x43\x1d\x1e\x73\x6d\xce\x47\x64\x29\xe4\x35\x5a\xd5\xca\xda\x89\xe6\xf3\x95\xdd\x2c\x6e\xf3\xfa\xc5\x6c\x3e\xe7\x10\xbc\x73\xe5\xf9\x4f\xda\xfd\xe6\xee\xf6\xfe\xe5\x43\xc8\x83\x78\xe1\x72\x8e\x46\x6f\x7c\x60\x0c\xac\x85\xe5\xe8\x66\x83\x2c\xe2\x96\xbb\x0e\x3d\x82\x29\xe3\x1e\x63\x8b\xa8\x2f\x76\x80\x7d\x00\x70\x98\xa6\x5b\xf4\xc2\xe3\x31\x7d\xa5\xfe\x58\x68\xaf\x3f\x4c\xd3\xe8\xd4\x7c\x58\x5d\xee\x75\x74\x23\x73\x9a\x53\x44\xb9\xcd\x62\x1c\xb4\x20\x2c\x04\x69\x19\xe3\x11\x27\xd2\xae\x23\x65\x85\xb2\x55\xee\xdf\x07\x6c\x48\x7f\xaa\x11\x89\xa6\xda\x69\xa1\xb0\x50\x0e\x44\xa9\xdf\x6a\xf3\x3b\x18\xd4\x6e\x6f\x97\xeb\x5d\x1f\x27\x1d\xfb\xd3\xe1\x22\xad\xae\x78\xda\x25\xae\xd8\x3c\xf9\x01\x69\xf2\xb9\x97\x82\x81\xd6\x3a\x1d\x15\x3a\xff\xc8\xe3\xdf\xe8\xdd\xe5\x9d\x34\x0e\xaf\x70\xc4\x6e\x54\xab\x7d\xb3\xf1\xe2\x0a\x8b\x3e\xcb\xfc\x57\x5a\x74\x87\x2b\x40\x52\x83\xa9\x81\xa0\x92\x8e\x87\x27\xac\xd3\xd5\xfc\x3e\x1d\x85\x5d\x0a\xcf\x10\xc8\x9f\x6e\xaa\x93\x08\xaa\x07\xd9\x51\x91\x49\x74\xb7\x2f\x17\x00\xde\x0b\x02\x77\xaf\x41\xfb\x84\x29\xb3\x38\xdc\x8a\x5a\x84\x89\xe3\xc7\x03\x75\xc1\xc1\xc6\x05\xdb\xff\x14\x12\x0d\x62\x77\xea\xc4\x04\x4f\xd1\xf4\x6c\xa4\xa2\xbc\xd1\x12\x60\xd0\x64\xda\xe4\xa9\xbc\xd7\xd7\x48\xc1\xfe\x54\x36\x2d\x8a\x8a\x80\x4d\xc0\x5c\xf9\x09\x71\xe4\x61\xb7\x47\xea\x34\x3d\x22\xd5\xf7\x68\x62\x93\xc9\xf6\x3d\xb3\xc3\x6f\xff\x67\xbb\x48\xbf\x0a\x8a\x3c\x6d\x4a\x1b\xb8\x4b\x54\x77\x03\x24\x45\x0f\x5e\xa6\xcb\x06\x75\x3c\x44\xd3\x83\xbb\x00\xef\x4a\xe1\xb7\xd5\xf8\xb7\xb4\x89\x40\xbd\xd3\x91\x90\x69\x79\x46\x2f\x19\xcb\x98\x9b\xfb\x8d\x6f\xd4\xc8\x4b\x60\x14\x89\x40\x5e\x46\x4a\xbf\x51\xe8\x22\xd2\x98\x35\x33\x27\x9a\xf4\xe6\x39\xf2\x17\x69\x30\x60\xba\x34\xb8\x79\xe6\x59\x6f\x65\xb9\x11\xc6\x08\xab\x81\xa1\xef\x84\xb8\xa0\x10\x3d\xb2\x3e\x02\xe3\xbd\xf8\x8f\x2d\x9f\xf2\xee\xc8\xe3\x2e\x95\xc4\x27\x7a\xe7\x4b\x3f\x3b\x08\x52\x1e\x01\x4b\x1a\x44\x05\x57\xcf\x26\x4d\x83\xc5\xd9\xe2\x7c\xd7\x91\xf3\x25\x47\x43\x02\xe3\x5c\x4b\x40\x03\xb4\xe9\xc1\x05\xc8\x66\x94\x6a\xf9\x77\x61\x24\x0b\xfa\xb7\x7c\x0a\x20\xc7\x94\x0f\x6c\x4a\xb0\xfb\xd3\x80\xfb\xb3\x3f\x55\xbb\x9f\x3c\x20\xd9\xf8\xc6\x56\x3d\xef\x71\xa0\x86\x3a\xe3\xdb\x18\x28\x0b\x86\x61\x34\x90\x25\xac\x87\x3b\x89\xe7\x99\x52\x6b\x07\x45\x7c\x34\xd3\x02\x1a\x4d\x3e\xea\x91\xf3\x1a\xbd\xfb\xb1\xfa\x5a\xca\x68\x0f\xdd\xfc\x2b\x56\x92\xd4\x72\xb3\xc8\xd3\x9f\x98\xe6\x69\x57\x62\xc7\xa8\xc6\xf6\x4b\x8c\x97\xd7\xb8\x29\x15\xf4\xa5\x3e\xd8\xcb\x65\xd5\x8b\xe9\x3f\x11\x3b\x69\x8f\x13\x0a\xd4\x1a\x19\x28\xf0\x3b\xe2\x2b\xa0\x29\x11\x9e\x97\xe5\xaa\xd2\xe9\xf9\x4a\xe4\x33\x68\xd8\x56\x9f\xe2\xb3\xac\x87\x6c\xb0\xaf\x91\x1c\x75\xa2\x25\x23\x5b\x4b\x27\x88\xb8\xbb\x0e\xd2\x5a\xe7\x4d\xe1\xb9\xa0\xcd\x55\x2e\x83\x38\xec\xd0\x56\x79\x7e\xd1\xc8\x01\x0f\xdf\xc4\x10\x97\x91\x73\xd9\x70\x96\x53\x6b\x11\x47\x93\x2e\xe6\x6c\x8e\x33\x1c\x37\x48\xdf\xb0\xee\x4b\x37\x37\xd7\x3f\x13\xde\x73\x3d\x5c\x23\x29\xe5\xc8\x1f\x77\xd8\xdf\x4b\xcd\xf1\x40\xbf\x84\xb7\xee\xa5\xcb\x71\xc0\xf6\x9a\x8f\x8f\x7d\xd6\x11\x4e\x49\xcb\x84\x4c\x43\xe7\xe3\xa1\xa8\xca\x87\xaa\xaa\x5b\xf5\xbe\xdc\x39\x2c\x05\xfe\xe6\x56\x7c\x7a\x9b\xff\x5d\x79\xe3\x65\x45\xe3\x9f\x80\x8c\x23\x71\xf5\xe6\x36\x3c\x2e\xbd\xa8\x70\x8e\x74\xfe\x63\x13\xf6\x28\x53\xf8\xad\x76\xe5\xdd\x77\x37\x33\xf1\x80\x45\xfa\x16\x1d\xfa\xfb\x5d\xe2\x85\x43\xab\xd7\x11\xb6\x59\x7b\x59\x1d\xd0\xba\xe6\x90\xae\xcd\x5c\x62\x14\x08\xa2\x41\x99\x73\x0e\x77\x10\xf6\x77\xa2\xe1\x71\xb9\x95\xfb\x01\x6c\x14\xa2\x1f\x54\xb6\x64\xf7\x2f\xe3\x84\x77\x0e\xa3\x04\x78\xf2\xeb\xee\x59\x91\x9d\x80\x00\x4c\x65\x03\x53\xfd\x9c\x22\x34\x44\xd9\xab\x7a\x6c\xb3\xe6\x5e\x15\xe0\x7e\xd0\x46\x8c\x23\x89\xca\x77\x70\x28\xcb\x1a\x21\x98\xb7\x6c\x5f\xde\x6e\xf6\xf7\xeb\xea\x74\x90\x1e\x0d\xb1\xf0\xe8\x3c\x22\xd4\x11\x7c\xe3\xca\x3e\xa0\xc3\x85\x5a\xc1\xe8\x01\x71\xb0\x8e\xad\xb2\x20\x30\xe2\x4d\xa4\x01\xee\x19\xd5\xa0\x1a\xdd\x65\xa8\xe7\x41\xbf\xab\xcf\x96\x43\x8e\xbf\x2f\x24\x0a\x95\xc5\xd4\x35\xe6\xbe\x5c\x19\x6f\xbb\x1a\x42\x3e\x80\x2b\x38\x6f\xe3\xab\x9a\x52\xd8\x84\x5f\xb2\x1e\x74\xf9\x6b\xb0\xc7\xc8\x6d\x8e\xde\xfd\x48\xd9\xb1\x47\xa6\x11\xed\xe6\x5d\x95\x8f\x6b\x87\xcf\x0e\xfa\xd5\x37\x87\xa1\x51\xd8\x1f\xe7\x12\xa6\x0e\x5b\x45\x58\xca\xa3\x9a\xf1\x4f\x23\x13\x19\xca\x28\xc7\x6c\x56\xde\x18\x7e\xb6\x80\x06\x70\x42\x20\x0e\xa2\x66\x83\x46\xa3\x7b\x67\x7b\xb5\xef\xd5\xfd\x60\x07\x3b\xc2\x48\x7a\x65\xa3\xef\x7d\xe5\x77\xec\x2d\x5b\xc0\x88\x9e\xd2\xd1\xdc\x28\xeb\x9e\x5d\xd9\x26\x90\x82\xde\x6e\xa4\xb0\x71\xea\x36\x73\xe6\xcb\x8e\xa4\xec\xfd\xfd\x90\xa4\xcc\xd3\xd6\xa7\x65\x3a\xdb\x80\x0e\xbe\x4c\x03\xe9\x05\x6a\x3a\x7d\xd7\x45\xff\xbc\xec\x07\xbd\x90\x60\xb8\xb9\x18\xdf\x90\x01\xe8\x07\x88\x83\xe7\xa0\xd1\xd3\x41\x01\xdb\xfe\x12\x51\xde\xdc\xf7\xd9\xcb\xf5\x86\x22\xc4\xd8\xc1\xa3\xe7\x4c\x8d\x24\x8f\xb0\xa1\xd8\xc3\x85\x0b\xdd\xfd\xe5\xb8\xc6\x5c\x18\xc5\x61\x47\x2b\x39\x87\x81\xc5\xa3\x9e\x12\x18\xb3\x7f\x52\xc1\x83\x01\x42\xee\xda\x8e\x1f\xc6\x6a\x03\x18\xa6\x9f\x7a\x58\x6f\x72\xdf\x76\x31\xe2\x5b\x9c\xb8\x6d\xfd\xe7\x9b\x51\xfa\x33\x52\xdc\xb3\x99\xec\x6b\x50\xdb\xa7\xf5\x11\x4d\x7c\x86\x8f\x6a\x1c\x1a\xa1\xcd\x57\x61\xb9\x84\x42\x30\xb7\x7e\x26\xfa\x9a\xad\x60\x2e\xab\xbf\x0f\x9a\xe4\x4b\x4d\xff\xe0\x35\xab\x97\x4f\x5f\x35\xb6\xf6\xcb\xd7\x6f\xd7\x89\x1c\xa1\x6b\x1e\xda\xff\xc1\xbd\xfe\xca\x75\x47\x07\xe9\xcb\x6a\x74\xa8\x16\x9c\x20\xb5\xeb\xc6\x91\x16\x80\xf8\xdc\x4c\x06\x95\x3d\xee\x54\x7a\x69\xb0\xd3\x3f\xa3\x20\x01\x14\xfa\x3c\x67\x4c\x48\xdf\x2b\x39\x6a\xcd\x33\x26\x65\x97\xfb\x6b\x9e\x06\xd9\x04\xac\x13\xa6\xb4\x78\xa5\xa9\x02\x57\x18\xd6\x3b\x11\xfe\xe0\x8e\x3d\x7a\x85\xfa\xa8\xe5\x2a\xff\xe7\x01\x39\x48\xeb\x23\x1f\x4a\xda\xae\xb9\xa9\xe3\x14\xc1\x08\x38\x44\x8c\x2d\x3f\xaf\x04\x25\x91\xd4\x22\xb8\x61\xd7\x78\x25\x76\xe0\xaf\xa9\xdf\x6c\x74\x3a\xe2\x35\x93\x9f\x8b\x10\x0b\xbd\x1e\x0d\x50\xb3\x86\x91\xbc\xda\xdb\x63\xa3\xb7\x1f\x47\x37\x0e\xea\x39\xac\x65\x95\xb5\x6d\x47\xd9\x4d\x23\x7f\x92\x4c\xfa\xb2\x07\x5c\x8c\x6e\x01\x63\x90\xb9\xc0\x9a\x7e\x94\x99\xb3\x26\x77\xbf\xe1\x66\x7c\xd7\xd2\x59\x96\x5a\x4c\x35\xd5\x7e\xc3\xe0\xc1\x79\xc0\x8f\xd8\xee\x1a\x2b\x7c\x71\xf0\xe6\xf3\x53\x2f\x44\x2b\x09\x2c\xec\x36\xc9\x38\x8f\x3d\xf3\x36\x45\x52\xcb\x69\xf3\x0c\x73\xdf\x49\x7d\xe4\x66\x4e\x43\x61\x08\xf4\x5c\x00\x4e\xa6\x0b\x67\x81\xbb\x27\x6f\xa1\x88\xd6\x2b\x97\x75\xfb\xa3\x5b\xc1\x16\xa3\x94\x75\xf0\xd9\x20\x29\x92\xe3\x17\xda\xa4\xb2\x6e\x05\x01\x7b\x33\x26\x11\x19\xb3\x08\x7c\x7d\x1b\xb5\x18\x4a\x29\xd0\xeb\x91\xed\xe6\x4a\x6f\xe9\x1b\x79\x5d\x07\x46\xa1\xf7\x5c\x49\xf8\xa3\xd0\x89\x52\x45\x2b\x4d\x67\xfe\x4c\xf6\x7e\x75\xa7\x9d\xd7\x94\x3a\x52\xf0\xdc\x7d\x5d\x30\x87\xb3\x64\x38\xa2\xc1\xdf\xf8\x67\x54\x22\x62\xfc\x71\x30\xaf\x72\x7a\xdd\x15\x12\x12\x98\x03\x55\xc7\xeb\xc5\xa6\x5f\xc6\xe8\x08\xcc\xe3\xf3\xa9\xac\x99\xf2\x21\x71\x02\xe9\x1f\x83\x65\x6a\xd9\x7e\xcc\xfa\x4b\xc3\xcc\xaf\x99\x4a\x74\x0c\x40\x35\xe2\x5f\xb0\x08\xce\x35\xce\x26\xed\x42\x4d\xd2\xc6\x52\xee\x76\x39\xc4\xda\xee\x35\x3d\x93\x30\x81\x2e\x0d\x09\x93\x3d\x0e\xfe\x11\xd4\x98\xce\x3f\x19\xd5\x83\xf9\x6c\x9e\x3c\xbc\x23\x36\xfb\x2f\x94\xca\xfc\xc5\xdf\x05\xe9\xb6\x68\x42\xee\x1c\xc7\xb5\xf8\x9f\x4c\x0b\x39\x68\xf0\xca\x29\x8f\xa3\xd7\xec\xd4\xc3\x7d\xc5\x10\xc0\x5d\x7f\x49\x91\x65\x3a\xa5\x4f\xcc\xf4\xe0\xc2\x77\x86\x03\x66\xb5\x64\x9e\xa7\xc0\xa2\xe7\x52\x4d\xb6\x9e\x4a\x01\x7b\x8e\x8d\xb6\x2e\x65\x5b\xbc\xf2\xa6\x5c\xf1\xa6\xb2\x97\xf5\x5d\x96\x80\x3f\xd9\x1e\xb2\xe5\x2b\xe1\xcd\x24\x33\xb0\x9f\x6c\xa4\x00\xf1\x8b\x2f\x8f\xca\xf7\x96\xca\x42\xc7\xab\x1c\x4e\xb2\xd8\x38\x47\x83\x1e\x7e\xb1\xfd\xf2\xdd\xe4\xd7\x6c\x00\xd0\xe6\x75\x41\x02\xf4\xec\x88\xa4\x5c\x39\xe5\xe3\x5c\x60\x6a\x50\x12\x8b\x1d\x0e\x61\x63\x7b\xc5\xeb\xca\x16\xe0\xeb\xb8\xbd\xa2\x66\x08\x0f\xf3\xf7\x03\x87\xbd\x1f\xc2\x77\x2e\x7a\xff\x93\x1b\xff\x9a\x73\x38\x2e\xbb\xb8\x20\x0b\xc2\xd5\xa4\x12\xe9\xa7\x69\x89\x9d\x20\x48\x3f\xce\xe2\x5d\x5c\x3b\xc2\x48\x36\xe2\x79\x83\xf2\x23\x37\xd9\xf1\xe5\x6e\xff\x39\xdf\xe5\xe8\x40\xb4\x38\x5d\x28\x81\xff\x9e\xe3\x81\x79\x9d\x64\x46\x5b\xbe\xec\x21\x19\x57\x98\xed\x02\x22\xe9\x3c\x5c\x67\xb5\xcf\x2f\x7b\xa0\x72\x66\x7e\x84\x4b\x8c\x6c\xca\x43\xfa\x29\xe1\x53\x59\xed\x3f\xba\xdd\x43\x79\x02\xb6\x84\x91\x02\x46\xf4\xda\x83\x05\x07\xa6\xf3\x3d\xd2\xe8\x9a\x0a\xe6\xb3\xc1\x5f\x7f\xa3\x01\xc0\x0e\x3e\xf6\x47\x06\x05\xce\x91\x73\x0e\xef\x2e\x6b\xe5\x10\x46\x20\x83\xee\x01\xdd\x42\xa1\xbf\xa7\x2c\x65\xb7\xc8\xb2\x18\x2a\xb1\x49\x5e\x1d\xbc\x6f\x8a\x7d\xed\x50\xe9\x40\x43\xcf\x6b\x78\xb0\x71\xd0\x40\x32\x9d\x8c\x3f\xc8\xab\x20\x80\x90\x2a\xee\x07\x8c\x00\x9c\x03\x0e\x49\xe1\x18\x90\xda\x8b\x26\x46\xa5\xb4\x5f\xf2\xaf\xfb\x56\xce\x71\x3b\x72\xc0\x40\xa9\x73\x46\xc1\x7d\x63\x68\x60\xc4\xdb\x6c\xbd\xe5\x9d\x77\x5a\xf3\x2b\x5e\x38\x93\xe1\xa7\x66\x0a\x60\x03\xc7\xe6\x94\xcd\x29\x92\xbc\x3d\x34\xdb\x3d\x87\xbf\xac\x7a\xbe\x77\x18\xdf\x2c\x74\xb8\xbf\x8f\xbf\x7a\xcc\xb8\xa9\x56\xff\xe2\xb1\xf3\xc8\x27\xaf\xf8\xeb\xc2\x10\xfa\xbf\xbf\x51\xdd\x7f\xc0\x0b\xee\xd9\xdf\xd1\x30\xd5\xd5\x75\x4f\x1b\xd3\x8d\x7d\x50\x3e\x46\x04\xf6\xcc\x14\xe0\xab\x9a\xba\x9b\x24\x74\x47\x2f\x22\xdd\x35\x04\xb5\xd4\xa9\x43\x4a\x3b\xbe\x1c\x47\x50\xb1\x96\x59\x4f\xfc\x36\x1e\xb6\x95\x04\x5f\xc9\xbc\x0c\x41\xae\xfe\x9d\x8e\x44\xfe\x72\x2d\x99\xd2\x72\xfd\x91\x06\xcf\xce\x44\xce\x93\x60\x40\xfd\x09\x5e\xa8\x93\x3b\x77\xd8\xba\x0b\x18\x38\x97\x93\xb5\x13\x36\x5f\xfc\xcb\xae\xfe\x1e\x86\xf4\x06\xe6\x9d\x4b\x64\xbe\xf5\x94\x52\x1a\xdb\x80\x38\x6e\xaf\x24\x71\x5a\x9e\xf8\x2f\xf3\x7a\x7c\xea\xb0\xdf\xcc\xc8\x40\x89\x40\x45\x58\x19\x94\x39\x2c\x36\x5b\x65\x8f\x8a\xe2\x54\x7f\x11\x5a\xd6\x37\x54\x29\x75\xa5\x26\x25\x98\x50\x3e\x87\xb1\x02\x43\x11\x78\x39\xc6\x2d\xab\x29\x8f\x4d\xaa\xa9\xf2\x83\xc1\x86\xc9\x86\x27\x53\x54\xf8\x9d\xd5\x64\x9d\xc6\xf4\x65\x09\xcc\x81\x71\xca\xfe\x03\x1a\xf1\xb5\xbb\xd1\x01\x8d\x2f\x27\xb0\xe8\xef\x62\xc5\xd7\x67\xf1\x48\xcf\x1a\xd0\x47\xb1\x31\x4a\x67\xdc\xe1\x55\x63\x84\x5e\x63\xe5\xb2\xd7\x44\x66\x43\x10\x61\x9c\x90\xa6\x7a\x18\xd8\x03\x1c\xf5\xf5\xa4\x32\x4f\x67\xfd\x7d\x22\x89\x68\x0a\xa8\x42\x37\xe9\x13\x4f\x81\xe7\xaa\xff\x75\xf1\xa4\x3a\x2d\x57\xa5\x0e\x8b\xf0\x8d\xee\x5f\x9a\x3c\x73\xd8\xb5\xb3\xf3\x90\x1b\x0b\x63\x12\x1f\x5e\x20\x80\x2e\x7c\xc6\xc0\x45\x25\x02\x96\x65\xa8\xa2\x61\x76\x82\x34\x18\xf2\x61\x35\xb2\x79\x3b\x44\x97\x7d\x03\x18\x2f\x77\xec\xff\x3e\x15\x8f\x38\xec\x03\x0e\x03\x95\x73\xcb\x6a\xfb\xe7\xa7\x41\x0d\xe2\x0a\x15\x1a\x28\xdf\x58\x95\x86\xc2\x37\x65\x1a\x2f\x60\x63\xb9\x8f\x9d\x19\xbd\xa0\xc6\x43\x30\xe5\x87\xb5\x7d\xf5\xde\x09\x13\xde\x40\x07\x42\x77\xbc\xff\xf7\xd0\x47\xf4\x5c\x89\xd7\xc7\xe7\xfe\xbd\x60\x09\xf8\x4a\xdc\xbe\x01\x07\xff\x6b\xf0\xb4\xe6\xa0\xc2\x2f\xd7\x4b\x24\x75\x1f\xdb\x29\xbf\x6d\x8e\x8b\xee\xf1\x7b\xf4\xbd\xbd\x99\x3c\x02\x13\x04\xb3\xe0\xcb\xa9\xae\x5b\xed\x43\xf1\xa5\xb8\xd5\xae\xb4\x7d\xb0\xd7\x5a\xcb\x07\x30\xf0\x7b\x15\x14\x02\x12\xb2\x54\x57\x8e\x99\x76\x48\x76\xd9\x4d\xad\x99\xbd\x44\x84\x7a\xf5\x66\xe3\x8f\x8f\x33\x95\xff\x73\xdc\xf7\x12\x21\x02\x89\xa8\xcc\x7d\xd7\x71\x6d\xb9\x12\x1a\xf5\x4c\x5b\x3b\x23\xb4\x11\xf8\x84\xe2\xf5\x82\x12\xfc\x9d\x94\x97\xf8\xd8\x1f\x18\xb5\x8a\x5f\x45\xad\x2c\x84\x54\x1f\xa3\x1e\x56\xd6\xc0\x26\x45\x62\x75\x67\xc4\x43\x69\x56\x4e\xfd\x53\x66\x75\xd9\xb2\xca\xa7\xa0\x39\x23\x00\xca\x5f\x17\x00\x04\xe8\x32\x38\x34\xf5\xa8\x39\x6b\xfa\xd2\x86\xde\xb8\x1a\x43\x65\xdc\xed\xf4\xde\x6a\xf8\x0d\xbe\x8d\x0f\x8c\x7a\x20\xda\x28\xc8\x66\x08\xd4\x80\xbf\x87\x9e\x97\x60\x88\xb6\x3f\x62\x3c\x26\x46\x66\x67\xd0\x72\xd4\xb4\x2f\x3f\x38\xa8\xc5\x37\x11\xfd\x59\x48\x34\x75\xf5\x25\x36\x10\x0e\x2f\xe3\x9a\x85\xdb\x94\x11\xae\xf8\x5d\x56\xc6\xf6\x03\x1e\xe7\x26\x6b\x00\x0b\x78\x7b\x11\xf7\xdc\x6e\x37\xd1\x55\x10\xa2\xa0\x2f\x0d\x90\x02\x3b\x3f\x0d\x71\x05\x6c\x85\x05\x3a\xf4\xec\x1a\x35\x7d\xb7\x26\x3b\x0d\x03\x5f\x30\x5f\x01\x4e\xc8\x10\xcf\x18\x2b\xc0\x6c\x28\x80\xa4\xca\x5e\x1b\x88\x97\x78\xbb\xfd\x89\x06\x55\x46\x8b\xcd\x3c\x1b\x2c\x88\x1e\x34\xc9\x6c\x0a\xe4\x59\x8d\x7a\x91\xea\xf1\xc2\x40\x2b\xff\x23\xed\xf3\x94\xa7\xf0\x8b\xbf\x7c\x52\x32\x51\x3a\x03\xf1\xac\x92\x01\x8d\xc7\xe3\xec\xec\x66\xa3\x6e\xbf\x63\x17\x89\x63\x66\x45\x82\xca\xcb\x26\xa8\xfa\x11\x97\x6f\x3b\x9b\x1d\x3c\x3d\xe1\x1a\xe5\x0d\xb1\xa6\x40\xe3\x67\x0f\x80\x62\x04\x36\xcc\x47\xfe\x8f\xe7\x7f\x02\x1d\xd9\xc5\x05\xbd\x6e\xac\x65\x94\x80\x20\xba\xb2\xc4\x31\x34\x18\x7a\xcb\xdd\x52\x29\x19\x79\x53\xae\xbc\x57\x3c\xac\x0d\xbc\x65\x21\x38\xc7\x21\x54\x3a\xfc\xe7\xa7\xf5\x91\x9a\x2f\x25\x7a\x9a\xb2\xee\x9f\xec\xa4\x8c\x0d\xcd\xc8\x9d\x5e\x1a\x1d\x61\x50\xb6\x17\x51\xa3\xa5\x07\xa9\x6c\x69\x18\x9c\xdc\xaf\x17\x19\x90\x3c\x60\x27\x1d\x98\xc3\xcd\x42\x0b\xc3\x2f\xc7\xae\x41\xe5\x84\x79\xd8\xb0\xbe\xaa\x1d\x0c\x94\x60\xe9\x93\x1a\xa3\x13\x20\x8d\xba\xcc\x88\x89\x66\x46\xbd\x13\x76\x70\xfd\x48\x2c\x3b\xb3\x48\x59\x0e\x2a\xff\x73\x27\xee\x2f\xbb\x45\x86\x04\xd8\x9b\xac\x74\x78\xfb\xcf\x1e\xad\xef\x4f\xd5\x33\x53\xf2\xa0\x3b\xc7\xe3\x1d\x83\x47\x99\x6e\xdf\x75\x18\x9a\xcc\x38\xd2\x05\x00\x60\xc5\x5e\x85\x6f\x06\x4e\x09\x8e\x7a\x8f\x36\x76\x9e\xa6\x60\x99\xc2\xdc\xf9\x2e\xa1\x43\xd9\xe2\x87\xe9\x24\xc6\x6b\xb4\x3c\xdf\x35\xf4\x48\x56\xa8\x1c\x5c\x59\x8d\x65\x3c\xc1\xb0\x58\x80\x10\x5a\xbf\x2e\x30\xdf\xd8\x45\xeb\xde\xb4\xa8\x73\xfb\x4e\x23\x36\x0e\x65\x23\x96\xac\x6b\x78\x09\x91\x79\x09\x9f\x41\x14\x3d\xdf\x40\xa8\x22\x42\x3a\xf1\x14\xa9\x2f\xdc\xa2\x2d\x83\xd8\xbe\xd5\xab\x8a\x54\xc7\xa7\x07\xe7\x5a\xbf\x38\x90\x17\x4e\x2b\xe7\xea\x89\xe8\x5c\xde\x5c\x98\x17\x8f\x39\x43\x9e\x65\x6d\x35\xce\x72\x51\xea\x2b\x48\xb3\x96\xcf\x64\xaa\x0c\x20\x9b\xed\x2c\x27\xac\x96\x8f\x73\xc1\x3a\x5e\x3d\xa5\xff\x77\x6a\x4f\x8e\x51\x9a\x46\x93\x29\xce\xa5\x7c\xd7\xf7\xd8\x99\xa1\x70\x3e\xae\x3d\x0b\xeb\xe8\xc9\x12\x6c\x26\x9d\x7d\x91\xd5\x6e\xa1\xc0\xf8\xf2\xa9\xae\x9e\xe5\x7a\xc6\x3c\x33\x58\xe4\x9c\xdc\xf5\x45\x84\xc7\x92\x65\x28\xd4\x27\xe1\x42\x0a\x28\x83\x36\x63\xd1\x40\x6b\x2b\x89\x9e\xce\x48\x3b\xcb\xcf\x29\x58\x28\xaf\x6c\x4f\x1d\x9b\xa9\xcf\x9c\xcc\x6d\x6a\x8d\x6b\xa6\xb5\x26\x63\x93\x14\xeb\xb8\x28\x2f\x07\x3b\xe6\x1a\x74\x36\xaa\xe6\x08\x03\xfd\xce\xc7\x78\xc9\x98\x21\x62\xda\x23\xba\x0a\x45\xce\xcb\xc0\x10\x74\x83\xbe\xb3\xdf\xd7\xd1\xa3\xb2\x73\xb2\xb5\xa3\x50\x24\x5d\x5c\xe5\x5d\xe3\x6f\x87\xdd\xb0\x7f\x25\x30\xc1\x2e\xa1\xd5\xbf\xdb\x6f\x81\x2d\xf9\xfb\xe9\x13\xe5\x84\x1c\xa7\xdf\xd5\x18\xf4\xc1\x44\x05\xbf\x0c\x5c\xba\x35\x40\x6a\xa7\x5a\xfb\xdc\x8e\xdb\xc2\x23\x22\xe6\x8d\x22\x01\xa7\x75\x7b\xff\x5d\x65\x03\x65\x9f\xbe\x84\x46\xe0\xac\xae\xd8\xa0\xdb\x07\xc7\x1d\x98\x51\x6a\x7d\x3f\xdd\xb6\x53\x04\xb5\xdd\x2b\xcb\xb3\x6c\xf9\x92\x1c\x32\xf7\xd3\x74\x37\xbb\x17\xe9\x8a\xa1\x07\xb7\x16\x49\x68\x3f\x42\xe4\x04\x66\xac\x24\x0f\xf4\xe1\xd6\x9f\xf3\x3b\xc9\xea\xa0\x0c\xf5\x39\x44\x98\xf4\xa8\xcf\xc7\x67\xb5\x79\x37\xb8\xb7\x16\x94\xd8\x66\xdf\xc6\x46\x31\x46\x33\xfe\xaf\x8a\x48\x92\x07\xd9\xac\x3a\x3a\xa8\xc5\x37\x87\x94\xee\xb8\xbb\x37\xf3\xa9\x8c\xca\x8c\x34\xf7\xf5\xfe\x36\x2e\x16\x5e\x6c\x65\x93\xcc\x7f\x1c\x84\x6e\x0b\x4f\x64\x25\xb6\x97\x8d\x2f\x2e\x5f\x3d\x2d\xbb\x9c\x20\xde\xa4\xcf\x31\xcb\xe0\xc8\x60\xcf\xdc\x2b\x5f\x6e\xb6\x90\x7b\x47\x27\x4c\x6f\x60\xc2\x4a\x7b\x5c\x4e\x2c\x48\xde\x17\x30\x15\x8f\x7d\x6a\x35\xdd\x9f\xfd\x05\x61\x3d\xcb\x8f\x5e\x69\x79\xfb\x58\x1b\x14\xfb\xf8\x6b\x59\xca\xa6\x6e\xb1\xf4\x29\xee\x64\xee\x8a\x7d\xac\x7c\x91\xfd\x91\x23\x57\x2e\x4c\xe7\x60\xb4\x6a\x63\x0f\x04\xcb\x09\x46\x7b\xa4\x77\x40\x4c\xfe\x17\x1f\x74\x80\xe2\x13\x7a\xa6\x72\x84\x99\x13\xe4\x66\x34\x77\x87\x6d\xa6\xde\x4a\x7f\x15\xbe\xca\xc7\xe7\x7c\xb2\xe3\x74\x38\x1c\xbe\x51\x0e\x1a\xc5\x7e\x95\x59\xed\xa3\x9b\xd1\x81\xec\xd8\x59\xca\x5e\x09\xcc\xa7\x3d\x06\xfe\xdb\x63\xa4\x54\x26\x4e\x88\x96\x01\x9c\xfa\x55\x90\x48\x77\x99\x34\xd9\x45\x2b\x4d\x91\x9b\x89\x26\x80\xb3\x23\xf6\x7f\xa1\x30\xfa\x47\xd2\xd8\x76\x02\x99\x92\x2d\x78\x2e\x27\xe9\xeb\x66\x74\x69\x74\xbe\xfe\x5c\x97\xe2\x2d\xb3\x5f\x59\x6e\x02\xe7\x2d\x9b\xf4\x1f\x22\x4e\xe3\x5d\xe7\x42\xf9\xf7\x56\xa3\x4a\xaf\xd1\x8a\x0f\xb8\x68\x58\x45\xb2\x9e\xc8\xcc\x92\xcd\xd1\x77\x75\x83\xa3\xd4\xd6\x1a\x9e\x5e\x4b\xc2\x92\x96\xfe\x1d\x0f\xb1\x3f\x4a\x28\x4c\x9c\x92\x37\xa5\x5f\xfb\x2b\x91\xf9\x6c\x68\x2a\x9a\x7c\x17\x66\x25\x25\xca\x40\x23\xfa\x78\x45\x7b\x27\xe8\x81\x70\xde\xb0\xa4\xa9\x9c\x4e\x52\xe5\xe0\x87\xad\x12\x95\x2f\xed\x33\x16\x83\xfa\xd6\x53\x0d\xad\x5b\x81\x50\x6a\xa8\x15\x1b\x1e\xfc\x81\x5a\xe9\x32\xdf\x92\x5d\x7c\x39\xd0\x57\x95\x6c\x23\x57\x55\xbd\xb7\x0b\xfb\xd2\xb3\x61\x65\x26\x78\xe5\x5c\x7e\xcb\xd4\x06\x1a\xa0\xd5\x2a\x41\x86\x58\xf7\xdd\x3e\x4a\x67\x67\xa5\xca\x69\xbd\x04\x51\xad\xf6\xc2\xb1\xaa\x6b\x01\x94\x2a\xe3\x95\xa4\x07\xab\xec\xe0\x5a\xc1\x8e\x89\x36\x00\x4b\xaa\x88\x55\x6e\xbf\x6d\x9c\xaa\x06\x67\xdd\x64\xa9\x48\xa4\x2b\x42\xf9\x33\x11\xf3\x9b\x1c\x96\xaa\x07\xcf\x0a\x56\xa3\xf4\x27\x59\x5e\xa9\xab\x2d\xf6\xd6\x71\x0d\x85\x4b\xec\xce\x57\xe5\x45\x71\x08\x67\xee\x68\x1b\x02\xa2\x82\xe3\x18\xd8\x42\xfa\x7c\x0f\xe4\x00\x29\x01\xac\x8c\xd3\x1e\xfe\xed\xec\xfb\x8e\xe1\xe5\xb9\xd2\x87\x5a\x56\x4c\x19\xb4\xc5\x49\xe0\xd4\x22\xe2\xfa\xcd\x70\x93\x05\x35\x8a\x73\x5c\xa5\x80\x80\xeb\x7b\xb6\xa4\xb0\xea\x5f\xf4\x8f\x80\x3e\xd2\x0f\xbc\xcc\x31\x66\xe9\xab\x67\xe2\xf0\xef\x4f\x80\xd5\xf2\xcc\x7a\xf4\xb5\xc1\x5b\x43\x6a\x19\xa0\x94\x87\xf4\xaa\xe2\x38\x2e\xe8\x32\x71\x51\x38\xa1\xe1\x64\x0e\x07\x2c\x17\xfd\x02\x6b\x1f\x79\x10\x79\x5e\x4a\x51\x67\x2e\x70\x2e\x6f\x0c\x37\x4f\xae\x65\xf9\xc9\x0e\x6f\xee\xaa\xb3\xd3\x1b\x00\xea\xad\x66\x02\x12\x4e\x42\xc3\xda\x20\xd2\x7b\xb7\xa9\x81\x30\xa6\xc9\x71\xa5\x02\x9a\x34\xea\xe5\x73\x2b\x09\x4c\xc8\x66\x41\x8d\x99\xbc\xc8\x96\x45\x82\x2d\x9c\xc5\x60\x11\xee\x9b\x6b\xbe\xce\x4c\x75\x9e\xf5\x2b\xbb\x55\x2d\x9b\x8d\xcf\xf3\x2e\x5f\xa5\x1c\x36\x4e\xb3\x10\xa2\xf2\x69\x75\xbf\xbd\x9b\xf8\x42\x79\xea\x74\xa7\x6c\xe1\xa7\x38\x98\xaa\xe7\x3b\x6d\x26\xb6\xb2\x8d\x93\xfc\xa4\x7c\x61\x10\x69\x3b\xaa\x70\xa9\xe8\xd3\xd8\x08\xf9\xc1\xce\x71\xb1\xef\xfc\xd8\xf2\x3a\x51\xf6\xa0\xe3\xcd\xc0\xe0\x14\x65\x7e\x8e\xc8\x6e\xa8\x3c\xe0\x23\x63\x8b\xf8\xd0\xaa\xd8\x1d\xde\x2e\x53\x89\xf9\x57\x71\x4d\xd1\xe7\xa9\x0d\x0d\xca\xee\x54\xa0\x72\x15\x45\x8e\x2e\xe6\x61\xb9\x91\xc5\xd1\x5d\x14\xf6\x59\x12\xab\x39\x1c\xd3\x10\x1d\xdc\xe4\x5f\x3f\x8c\x39\x60\x7b\x4d\x29\xc4\xed\x3f\x68\x57\x38\x78\x98\x3f\x4a\x81\x0b\x6e\xdd\xd2\x9f\x4e\x5e\x10\xd3\x49\x96\xdc\x64\x77\xa7\x8c\xad\xb6\x50\xa7\x1c\x19\x6d\x53\xbe\xe8\x82\xf6\x3f\x87\xe5\x9d\x6a\x1b\xc5\xf6\xb7\x1a\x1a\x98\xa6\x34\x86\xaf\x01\x6f\xf6\xcd\x0e\x51\xbd\xb2\x51\x90\xc8\xe3\x87\x81\xa7\xc5\xce\xeb\x04\x2b\x80\x6d\x78\xa8\xde\x9f\x10\xfe\x23\x34\xab\x6c\xdd\x09\xba\x3e\x73\xd9\x09\xa9\x81\xf2\x5c\xc0\xcf\x24\x20\xf0\x64\x37\x95\x83\xcc\x74\x81\xe7\x03\xc5\x77\x3a\xad\x83\x01\x0d\x45\x46\x51\xf4\xfb\xe5\x23\xcb\x97\x4d\xc0\xac\x5c\xc6\x63\x97\xcb\x7a\xbb\xe2\xf3\xfc\x84\xa3\xd8\x0f\xcf\xd6\x45\x62\xd0\x9f\xf2\x15\x98\x9c\xf0\x03\x0e\x3b\x25\x34\x7d\x48\x09\x7f\xca\x87\xe3\x3d\xe9\x03\x43\x47\xfd\xc4\x3f\xf8\x61\xe4\x79\x3f\xca\x4c\x1d\xdb\x15\x2c\x50\x5b\x14\xf2\x0e\x62\x34\xaa\x75\x70\x22\xf6\x1f\x7c\xca\xf6\xb2\x71\x72\x9e\x47\xa8\x6d\x8e\x1a\xe9\x32\x2a\x2e\x8c\x23\x90\x36\x34\xa2\x3c\x22\xb0\x37\xb9\xd2\x69\x98\xb6\xeb\x7e\xf8\xa6\x5b\x4c\x23\xe7\x0e\x97\xf0\xa3\x9b\x91\x7a\x7d\xaa\x67\xea\xd8\xe1\x8c\x01\xb6\xc1\xef\x14\xa3\x32\xb3\xb4\x72\x79\x2a\x8f\x68\x24\xc6\x20\xd3\x1d\xb7\xe7\x26\xa3\xc4\x47\x39\xdb\x04\x05\x30\xac\x91\x40\xd9\x59\xfe\x87\x5b\xff\x6b\xf0\xd4\x5c\xb3\xc0\xb4\xad\x7c\xe1\x9e\x10\x61\xee\x11\x20\x5a\x37\xa9\x13\x59\xce\xe5\xa7\x50\xb1\xb2\xe7\x79\x8c\x21\xb2\x51\xb0\x23\x65\x33\xaa\x5a\x1f\x88\xf5\x90\xf0\xb4\x2c\x10\xf6\x9a\x44\x49\xf3\x50\x16\xdf\x43\xa5\xb5\x74\x39\x18\xb7\x93\xcc\x66\x1e\xac\xb3\x5b\x69\x0c\x01\xec\x0d\x3a\x91\x31\x0c\xc7\xea\x47\xe8\x68\xca\xde\xf1\x19\xa3\xad\x97\xb3\xd4\xd2\x85\xbd\xd6\x23\xcc\x6f\x19\xfb\xb8\xea\xeb\x18\x4a\xd5\x09\x1f\x18\x94\x7f\x74\xc5\xb7\x91\x7e\xb8\xe2\xdf\x98\xd0\xb8\xf7\xcd\xa9\xb0\x12\xc4\x35\x1a\x64\x8b\x09\x08\x16\x51\xba\x91\x30\x67\x0c\xf5\xe3\x60\x36\xf9\x20\x7e\xc5\x00\x93\x65\x41\x70\x69\xf7\x3d\xdb\x36\x06\xd7\x4b\x09\x8d\x1c\x00\x7d\xd1\xa0\x37\x55\xe9\x0e\xde\x30\x31\xa7\x72\x1e\x2c\x17\x34\xa6\xde\x98\xc6\x43\x1f\x78\x5b\xd9\x60\xde\xf9\x7d\xf2\xa9\x39\xf8\x49\xf7\xa8\x1d\x94\x15\xa5\xbb\x22\x5d\xf2\x3e\x55\x13\x93\xfb\x94\x62\x57\x72\x1f\x8f\xbf\x01\xdd\x09\x49\x71\xc6\x2b\x12\xeb\x3e\x06\x0f\x10\x53\xf3\x9c\xaa\x39\xd4\xbd\x5c\x55\x9c\x46\xef\x94\x1c\xca\x18\x6a\x9a\xd8\xa6\xba\x3b\x64\xe7\x9e\x42\xf7\x50\xae\x85\xff\x9d\x29\x19\x5b\x73\xad\x46\x60\xcd\x00\x18\x0d\xa2\x76\x9b\x3c\xe1\x20\x3c\xa1\xaa\x04\xb1\xff\x05\x16\xb5\x4b\x7d\xf3\x10\x43\x0c\x86\x4f\x92\xd7\x50\x55\x13\xce\xfb\x38\xdd\x38\x90\xb3\xbf\xfe\x09\x7a\x3d\xed\xa1\xb5\x11\x9f\xb9\x5f\xb6\x24\xa9\x7e\x99\xb3\xce\xf1\x16\xe1\x90\x7f\xbe\xaf\xcb\xef\x88\x81\x04\x7b\xe2\x41\xae\x26\x5f\x79\x5e\xab\x2d\x54\xd3\xd7\x5a\xa9\xec\x67\x35\x89\xd0\x11\x5c\x30\x18\x56\x38\x8f\xc6\xdf\x51\x7f\x64\x8e\xe4\xf4\xb1\x34\x67\xaa\x25\x6f\xd9\x66\xbb\xae\xc2\x64\xcd\xf7\xd0\x0a\xde\x6b\xf9\x20\xfe\x25\x0d\x11\x1b\x09\x55\x08\xf3\x1a\xb7\x8f\x0d\xf9\xb7\xfe\xd7\x2e\x53\x11\xbf\x16\xd1\x93\x37\x03\x67\xbe\xb7\x9c\x8d\x32\x7b\xca\xad\x6e\x9a\x51\x66\x16\xdd\xf8\x82\x7d\xaa\x30\xb4\xaf\x70\x00\x3c\x11\xa6\xbf\x15\x0f\x3b\xa2\xe5\xdb\x41\xd0\x7a\x49\x0e\x81\xbc\x6d\xf9\x65\x10\xae\x14\x31\x9f\xfa\x94\x72\xd0\x8d\x9e\x06\xe9\x48\x85\x8d\xa3\x36\x78\x80\xe2\xfc\x62\xd0\xbd\x6c\x01\xc8\xad\xbe\x01\x97\x52\x43\x21\xe4\xe4\x2e\x09\xe2\x02\x61\xf2\x48\xf5\x19\x04\xe7\xfc\xcf\xf7\x55\x68\x84\x60\x82\xbc\xb9\x19\xae\xc3\x80\xd1\x5e\xe1\xc1\xd1\x45\x31\xd5\x21\x5e\xc3\xdf\x31\x68\x99\xc1\xba\xb2\xba\xc8\xcf\xb3\xa5\x66\x87\x04\xec\xc3\x21\x05\xd0\x01\x4b\xc1\x04\xcb\x23\xdd\xfa\xcd\xf4\x32\xc8\xf2\x81\x0c\xb2\xf4\xa2\x77\xb7\xa5\x31\x4a\x1e\x91\xab\xe3\xc7\x9a\x1d\x18\xff\x6e\xb9\xe4\xdf\x54\xdf\x76\x6e\x94\x77\x40\x5e\x2c\xbb\x12\xaf\xbe\x4b\xf7\x7b\xd9\x81\x0a\x64\x29\xdb\xce\xdd\x46\x58\xbb\x53\x21\xcf\x95\xea\xef\xf6\x54\x94\x5a\xe0\x81\xc4\x76\xce\x1f\x22\x42\x56\xe6\x03\xf3\x7b\x3a\x9e\x80\xd9\x09\x86\xcd\x20\x87\x72\xd4\x26\x62\x3a\x03\xfa\x7a\xc1\x68\x8d\xa3\x73\xab\xda\xa5\x45\xad\x33\x8c\xf6\xba\x55\x84\x85\x16\xcc\x87\x38\x5c\xd8\x95\x0e\x84\x0f\xfb\x6a\x5f\x76\x3e\xaa\xf2\x23\xbb\x68\x5d\x72\x24\xcc\x8d\x61\xb1\xc5\xbe\x81\x89\x2f\x12\x1e\xed\x4a\xa7\xe1\x01\xec\x04\x5e\xc6\xf2\xae\x58\x6f\x0a\xc0\x9c\xc1\x28\x31\xd8\x11\xa3\xa5\x6a\x9b\x66\x99\x34\xd4\x67\x57\x4a\x8e\x34\x46\xf5\x9d\x4f\x86\xbe\xa1\x30\xea\xdb\x2b\x7d\x14\x96\x8a\x4a\x05\x68\xb3\xb7\x02\x61\x08\xd8\x4e\x67\x7f\x48\x70\x84\x1e\xb9\xef\x63\x0c\x43\x17\xc3\x29\x02\xc5\x5a\xf1\x2f\xdb\x29\x55\xb5\xd1\x31\x86\x14\x0f\xa4\x4b\x46\xc5\x95\x01\x69\xb3\x30\xde\xe8\x37\x01\x7d\x96\x4d\x3d\xc4\xce\xc6\x55\xfd\x96\x0b\x3a\x86\xbb\xb5\x5e\x65\xb1\x5d\xb5\xbd\x69\x1f\xe9\x08\x54\xef\x2c\x4b\xf4\xe4\xbd\x01\xd4\x48\x69\xb3\xa4\xc9\x56\x5e\xe1\xeb\xd5\x3e\x02\xfc\xab\x72\xa3\x6e\x1c\x22\x45\x94\x2e\x5e\xdc\x2a\xb7\x69\xc9\xab\x50\x45\x6e\x99\xdb\x34\xb7\x4c\xa8\xe5\x78\xca\x52\xd2\xb7\x57\x7f\xb2\x4d\x8d\xd9\x1f\xcd\x87\x8b\x75\xa4\x86\x42\x19\x2b\x50\xea\x2b\x37\xbd\x89\x04\xe7\x26\x34\xd7\x0d\x1c\xd4\x25\x52\x9a\xa6\xd5\xe2\xa4\x72\x7d\x08\x36\xdb\xb2\x76\xee\xcd\x96\x0c\xf6\xd9\xbc\xa5\x29\x45\xa0\x96\x91\x26\xe7\x70\x04\xcb\xd0\x6f\x09\xfc\xca\x43\xff\x99\x3d\x14\xba\x37\x9d\x71\x12\x63\xd8\xf0\xd6\xa6\x8c\x54\x13\x85\x22\x18\x71\xa4\xbb\xd6\x70\x32\x74\xc1\x96\xe3\x38\x9b\xda\xde\x8c\x03\xf7\x65\x80\xfa\x9c\x7c\x51\xd6\xa5\xbb\x61\x33\xa4\xef\xc5\x00\x04\x44\x4b\x1f\x9b\x68\x40\x36\x7d\x40\xa2\x65\x7b\xc0\x0b\xac\x29\x3b\x93\x37\x9d\x05\x9a\xbe\xda\x86\x85\xf9\x02\x0c\xc4\x96\xd1\x39\xa6\x21\x7e\x4a\x7f\x49\x9f\x29\xed\x0f\xbf\x53\x6c\x62\x9c\xc2\xdc\x54\x2a\x6c\x53\xfd\x3a\x9b\xf4\x23\xcb\xb0\x61\x10\xd3\xbe\x49\xd5\x1a\xec\x2d\xb7\xae\xb7\x92\x15\xca\x1e\x1f\xa2\xa3\x8f\x87\x73\x59\x7a\x8c\x77\x1d\xc0\xc7\x6e\x46\xfe\x46\x67\x57\x5c\xc0\xd3\xfe\xd8\x0c\xea\x69\x42\x2b\xb7\x7a\x6f\xa4\x77\xf3\x76\x5e\x55\x23\xd4\xe4\x68\x6c\xff\x1f\x73\xa0\x5f\xcb\x32\xa2\xab\x21\xc0\x6c\xd1\x64\x8f\xcc\x8a\x6c\x5f\x85\x04\x1e\xba\x11\xbe\xff\xc0\x66\x73\xe4\x78\x96\x82\x69\x1c\xce\x6b\x1c\x02\x27\xfc\x12\xb1\x4c\x60\xf2\xdd\x3a\x43\x6c\x17\xfd\x03\xb1\x14\xc7\x11\x4f\xe8\xe8\xe3\x52\x4f\xdb\xd4\x89\x0c\x9b\xca\x84\x80\x9b\xe6\x23\xd7\xbf\xd4\x94\x29\x2e\x68\x9b\x7f\x11\x26\xf3\x07\x9c\x9f\x33\x3d\x90\x36\xeb\x98\xd2\x9f\xe2\x2b\xca\xce\xdf\xbb\x10\x58\x92\x09\xd2\x2a\xb3\xf8\x50\x51\x31\x6a\x99\x38\x48\x21\xb4\x7e\xe7\xb2\x84\xbc\x25\xd8\x7f\xbb\x27\xfa\xa6\x82\x06\xa7\x90\x8e\xca\xc9\x1e\xd1\xa9\xe5\x5d\x2c\xe1\x47\x06\x5c\xd5\x49\xaa\x79\x95\x73\x19\xc9\x99\xda\xa1\x71\xcc\x78\xf6\xbe\x33\x26\x37\x5a\xf5\x51\x46\xed\x43\xdd\xa1\x37\x1a\xdb\x06\xea\xfa\x7e\x6a\xb9\x75\xbf\x49\x15\xf5\x4f\xb2\xa8\xf2\x6b\x08\x37\xec\x45\x22\xd4\x10\xce\xd7\x1c\x26\x7a\xfe\x40\x81\x54\xa3\x34\x2e\x31\xd2\x3b\x85\x4a\xe7\x8d\x32\x83\x3f\x35\x82\x3a\xb3\x14\x55\x01\x00\xa6\x2e\x24\x57\x56\xd1\xbc\x8c\x7d\xbc\xc6\x0d\xb9\x26\xc2\xee\xf8\x86\x5f\x0e\xe7\x00\x57\x58\x59\xd6\x18\xca\x7c\x8c\x31\xd4\x9b\x06\x8d\x1f\xd5\x84\x39\x1a\xc9\x55\xbb\xee\xb5\xa9\xc8\xab\x6f\xcb\x77\x98\x2d\xbb\x9a\x81\x2f\xdc\xe8\x6f\xc6\x50\x0f\x28\x2c\xf9\xd6\x31\x8b\x79\x71\x2b\x2b\x6a\x36\x02\x5a\x96\xd4\x47\x7a\x56\x04\x0d\xc9\x04\x81\xc5\x6d\xa7\x1f\xbc\x69\x0e\x39\x97\x1a\x77\x07\x57\x34\x41\x01\xe7\x72\x41\x79\x43\xee\xfc\x8c\xee\xac\xf8\xdd\x5c\xf1\x3b\xa8\xb4\xf2\xb7\x64\x8b\x6f\x6f\x09\xf1\xb0\x96\x47\xc2\x7c\xd4\x0f\x57\x54\x11\x13\x8c\xdf\xe0\xf1\xc6\x96\x92\xea\xad\x73\xa1\x06\x8a\xf5\x3f\x51\x2a\x65\x1e\xc2\x02\x33\xe2\x66\x81\x0a\x39\x3a\xef\x80\xeb\x96\x3a\x7e\x07\xa4\x18\xa3\xe3\xb8\x41\x20\x68\x7a\xc9\xc1\x22\x90\xc6\xce\x6b\x95\xf0\x83\x0c\x30\x83\x54\x65\x58\x6a\xe1\x1c\x63\xce\xf8\x65\x32\x1a\x92\x90\x8d\x03\xe2\x61\xdc\x7e\x53\x89\x2d\x02\x6b\xc4\x31\x8b\x23\xb5\x1a\x00\xe8\xa1\x2a\xe5\x93\xcb\xfa\x27\x33\xb5\x87\x35\x59\x0f\x9b\x4c\x13\xb3\xb3\xb7\x15\xa5\xea\xbc\xed\x17\x97\x1e\xff\xb1\x63\xe3\x71\x64\x9c\x21\x36\x44\xef\xa1\x54\x66\x72\x8e\x2b\xbf\x75\xd9\xc4\x72\x24\xf4\x21\xd4\x5d\xfb\x75\xc0\x65\x76\xff\xe0\x27\x90\x4b\x56\x5c\xd1\xbb\xdc\x03\x39\xea\x25\x9c\x3a\x03\x81\xa3\xe2\xcb\x88\x0f\x7d\xda\x22\x54\x02\x2e\x0f\x01\xf6\x21\xde\x45\xe2\xaf\x96\xb1\x7e\x04\x4c\xce\x86\xe1\x68\x1e\x1b\xe2\xf5\x0b\x35\x10\xc6\xa8\x0f\xc1\x34\x12\xeb\xb9\xbe\xd6\xeb\x72\xb0\x66\x89\xc0\x5c\x7b\xa3\xfd\x32\x47\xf8\xc8\x66\x7b\x3b\x79\x61\x0f\x96\xb1\x1d\x34\x11\x9f\x2e\xc5\x57\x45\x6f\x0e\xf5\xf1\xe2\x7e\x57\x0b\x65\x30\x70\x7a\x85\xa6\x2c\x19\x55\x61\x16\x6f\xc0\x58\x08\xac\x38\x57\x8f\x0c\x46\x1e\xc9\x1c\x38\xf9\xca\xed\x1a\xd5\xe6\xc3\xbe\x96\x42\x7b\x5f\xc5\x47\x66\xde\x49\x45\x05\xb9\xdd\xc0\x37\x1a\x7a\x25\xa1\x80\x57\xd2\xc3\xbe\x86\xc5\x40\x19\x89\x63\x0f\x2b\xba\xd1\xd1\xb9\x5a\x5f\xf6\x7c\x3f\xdc\xa9\xd8\x73\xb0\x9b\xf6\x9c\xd6\x86\x29\xfd\xfb\x62\x3a\xd3\x3e\x33\xc3\xc5\x7f\x07\x4c\xd8\xc0\x5a\xed\x46\x50\xd0\xe4\xe7\x5d\x0e\xd0\x1c\xc1\x1b\x94\xa3\x27\xec\x52\xa5\x55\x7b\xd2\x07\xd9\xa7\x8e\x83\x8b\x3b\x8d\x47\xf6\xd0\xa4\x48\x9b\x5e\x84\x13\x24\x52\x33\xe8\x45\x99\x5a\x60\x85\x51\x60\x52\x96\xa6\x64\x13\xc7\x0f\xf3\xb1\x0f\xb4\xa2\xfd\x08\xc2\xe2\x0a\x23\x86\x67\xcc\xc1\xd9\x18\x9b\xf6\xcb\x4e\x66\xef\x4d\xc5\xdf\x61\x7b\xe6\xf3\x48\xf5\xb2\x55\x69\x43\x80\x0d\x7b\x8d\x68\xa7\x31\x9d\xf3\x55\x60\xfe\x1b\x11\xd0\xed\x76\x2a\x2d\x1f\x26\x75\x8a\x8f\x01\x79\xf1\x34\x00\x08\xaa\xf8\xa1\x4c\x21\x1d\x8f\xd0\x74\x3b\x6f\xa5\xdc\xbb\x3c\xad\xe6\x0b\x56\xca\x8e\x06\x17\x6f\xcd\xdb\xdb\x95\x5e\x58\x6e\x46\x83\xb4\x69\x7a\x45\xbe\x40\x19\xcf\x70\x20\x98\x14\xeb\x72\xef\x69\x28\xc3\x71\xbb\x31\x40\xe9\x46\x79\x9a\x74\xa2\x5b\x18\xea\x6d\x27\x99\x60\x72\xa9\x13\x3a\x78\x0e\xa1\x1c\xcb\x47\xf8\xd5\xed\x8f\x2a\x54\x2b\xab\xb6\x76\x7d\xc0\xb5\x0c\x1f\xc6\xc9\xc3\x88\x8f\x07\xfb\x58\x10\x77\x13\x33\xd8\x10\x05\x1d\xfe\x51\xf4\xb3\x0b\x27\x6f\x5a\xaa\x9d\x86\xfd\x0e\x05\x7b\x61\x2d\x01\x5e\xa1\x3b\x77\x5f\x9d\x5b\x4f\xef\x3c\x8e\x10\x0d\x34\x7a\xbc\xba\x16\x08\x18\xd2\xf7\xf6\xc7\x77\xf4\xf2\xcb\x2f\xe1\x00\x99\x50\x44\x13\x37\x14\x0a\x38\x80\x27\xc2\xd1\x7d\xfa\x8a\x78\xee\x00\x81\xe8\xa1\x5d\x0d\xaa\xa6\x0b\xbe\x34\x7d\x3d\x93\xe6\x05\x6d\x6d\x84\x89\x8d\xff\x7d\x7c\x5b\x4e\x3f\x7e\x51\xb6\x96\xa9\xc0\x60\xc8\xe2\x96\x2b\x23\x66\x0c\x79\xdc\x9d\xb1\xd5\xb1\x43\x80\xa1\xc5\xe7\x26\x95\x5c\x75\x0a\x2f\x3f\xe1\xc3\xa2\xb3\x7c\x54\xe6\x01\x94\x6d\xa5\x22\x11\xa7\xad\x1c\x80\xf0\xeb\xfb\x21\xbc\xe9\xcf\x7a\x60\x83\xc5\x32\x26\x19\x62\xf2\x41\xf9\x3a\x0c\x19\xde\x7b\x23\x81\xf9\x4a\x6f\xce\xad\x88\x64\x80\x07\x7b\xab\x70\xf1\xf5\x87\x5f\x6a\x19\xaf\xf6\xd6\x2b\x5f\x1c\xda\x91\x7e\x04\x7a\x0d\x66\xca\xae\x71\x42\xa1\x0c\x2e\x73\xec\x79\x05\xbb\xac\x6b\x20\x8e\xf9\x2b\xe1\x25\xab\xe2\x20\xbf\x31\x4b\x12\x08\x23\x14\x6c\x71\x88\xc3\x4d\xa7\xa3\x55\x26\x07\x86\x60\x42\xf5\x05\xb1\x4e\xe9\x63\xe4\x0b\xc3\xfa\xa2\x93\x57\x1b\x68\xeb\xf4\x22\xc9\x67\xce\x8a\xcd\x43\xd9\x5a\x3f\x39\xde\x03\x7f\x1c\x17\xf7\xb7\x80\x64\x56\xb9\x5e\x55\xdc\xc5\x71\x79\x87\x5c\xef\x29\xa3\x7b\x6a\x90\xac\x9b\x53\x77\xc0\x90\xe4\x06\xe1\x93\x53\x66\x96\xc5\x6f\x06\x6d\x3c\x81\xb9\x7d\xff\x0a\x39\x5d\x08\x3b\x90\x4b\x6d\x17\xc0\xe4\x8e\x2b\xf4\x8f\x6e\xea\x62\xf8\x15\xc0\xe3\xa6\xcd\x0a\x46\xa2\xd7\xd8\xb5\xcc\x18\xbc\x81\xb7\xaa\x4c\xd3\xb2\x3c\x60\xd0\x50\x79\xf3\x92\x3f\xe2\xe8\xd1\x4f\x4c\x59\xfe\xf8\x0e\xd9\x34\x0a\xc1\x2c\x87\xdf\xec\x2c\x5a\x01\x86\xef\x1c\xe0\x4d\xcd\x38\x40\x37\x2d\x81\x2b\xc3\x20\xf7\x52\xc1\x5c\xea\x80\x49\x9b\xc0\x8c\x24\x8b\x25\x1c\x5a\xec\x1d\xc8\xcb\x1e\x26\x06\x63\x05\x26\x2a\x1f\x8a\x8b\x15\x4e\x3c\x1e\xa0\xaf\x47\x1c\x14\xce\x0b\x7a\x60\x23\x98\xbb\xd7\xb8\x87\x60\x5b\x17\x6a\xfe\x27\x69\x9d\x03\x71\x8c\x23\xd7\x43\x8e\xe7\x26\xfc\x00\xcf\x66\xb4\xc0\xef\x3f\xf9\x0e\xcc\xd6\x96\x26\x50\xff\x71\xd8\xd9\x9f\xb0\xc9\xf6\x89\x5f\xfa\x35\x62\xc5\x17\x80\xbd\x0d\x4e\xe7\x6c\xc4\x95\xbd\x85\x52\x1f\x18\x3f\x25\x61\xd0\xc3\xc7\xc9\xdb\x6d\x7d\x44\x28\xce\x84\xcf\x2a\x63\xb8\xf9\x6c\x79\xf5\x7e\xab\xc0\x31\x6e\x5c\x3b\xc1\x16\x70\xa8\xfc\x83\x72\x24\x92\xda\x8c\x32\xe9\xa8\xc8\x69\xd9\x4a\x48\x11\x88\x61\x90\xad\xa4\xdc\xab\xce\xf0\x38\xc4\x1a\xb2\xf4\xda\x22\xe0\x38\x4d\x6d\x5d\xae\x51\x54\xce\xc2\x6c\xe6\x71\x71\x98\x4e\xd9\x1c\xbc\x39\x28\x67\x11\xdf\x2e\x3b\xf6\x5a\xcd\x80\xb4\x36\x46\x24\x6a\x1a\x37\x9f\xce\x0d\xa0\x75\x81\x53\x5c\xbf\x8e\x88\xef\x3d\x34\x60\x33\xa7\x4f\x56\x34\xf6\xef\x48\x81\x8c\x30\x58\x9b\x3f\x28\x65\xc7\x43\xe8\x35\x35\xbd\xcd\xe8\x0f\xf1\xd6\xa2\xa8\x19\x38\x79\x20\xb0\x88\xe3\xd6\xa8\xcc\x07\x72\x9e\x93\xca\x50\x77\xfe\xe4\x00\x6e\x47\x49\x07\xcb\x2f\x37\x24\xcb\xfb\x18\x72\x26\x90\x54\xd2\x87\x72\x33\xee\x61\xc2\x39\x55\xc2\xc6\x84\xd0\x26\xde\x86\x05\xd4\x14\xbe\x98\x97\x0f\x0c\xf2\x79\x76\xde\x76\x35\xbc\x87\x66\x88\x37\x62\xb0\x87\x2d\x92\xa7\xf1\xda\xd2\xe2\xe2\x6a\x42\x7f\x84\x88\xa2\x69\xe4\x0f\x3b\x8d\xff\x64\x4a\x38\x5c\x9e\x0f\x13\xe2\x3d\x3f\xdf\xc2\x43\x7e\x51\x53\x5a\x3b\x9a\x09\x03\xe1\xbf\x0c\x62\x9f\x79\x85\xa5\xa0\xe1\x58\x16\x4c\xcf\x65\xf4\xa4\x8d\xdd\x5c\xf4\x19\x9f\x63\xb7\x68\x37\x52\xde\x45\xde\x25\xe7\x7a\xda\x62\xe4\x89\xd2\x18\x7f\xfb\x29\x35\xdf\x64\x5f\x42\xbe\xc3\x1f\x95\x46\x3f\x39\x92\x7e\x7e\xca\xfb\x71\x0c\xd4\x4f\x4f\x85\xd3\x4f\x39\xf7\x46\x56\xa6\x3f\xa9\xfd\x1f\x43\xaa\x6d\x5a\x58\x2e\x1c\x76\x6f\x46\x84\xe0\x54\x35\x22\x3c\xef\x08\xf1\x39\x9b\x67\xd7\x10\x5c\xd7\x2b\x5f\x47\x89\xe3\x69\xf2\xc1\x56\x49\x2f\x11\x5a\x51\xdd\x0e\xf3\x54\x3d\x09\x77\x9c\xe0\x4e\xeb\xd9\xb1\xea\xaf\x11\x53\x5e\xd6\x2d\xc7\x97\x3f\x52\x85\x6e\x99\x7d\xa1\x7f\xfd\x0e\xca\x8d\xdd\xa4\x75\x47\xbf\x1f\xd7\x3f\x72\x77\x58\x30\x68\xba\xaf\xb1\x3b\x63\x12\x64\xf8\xcf\x52\xf9\x0c\xe3\x12\x79\x18\x56\x3a\x2e\xb0\x16\x73\x16\x12\xc3\xcd\xf7\xea\x17\x87\x43\x74\x9d\x1f\xdc\xde\x5a\x65\x99\xce\xc1\xf1\x8c\x23\x96\xfc\x32\x73\xb7\x01\x09\x6b\xab\xe0\xa3\x6c\x78\x61\x84\x6b\xe6\x72\x25\x0a\x85\xb1\xa2\x3f\x21\x60\x10\x61\xc5\x43\xbc\x09\xe5\xc9\x8e\xff\xc8\x3f\x51\x48\x90\xb8\xa5\x88\x4f\x67\x71\x1f\xfe\x8b\x1b\xb1\x71\xb7\xc6\x78\x07\xfd\xd5\xf9\x8d\xf0\xb0\xfe\x24\x0f\x58\x28\x54\x04\xb9\x73\x48\x50\xa4\x08\xc5\x1f\xb4\xd5\x7a\x64\xcd\xe5\x8f\x4c\xa5\x21\xff\x34\x56\xca\xe1\x63\x5c\xc2\x79\xd1\xad\xcb\x0b\x5a\xfe\x51\x04\x3b\x66\x2c\x36\x55\x68\xd5\x68\x6a\xcc\xa3\x6c\x1a\xe1\x81\x38\x0a\x67\x26\xaf\x00\xa6\xd6\xf4\x9d\x7d\xf4\xe3\xe2\xe0\xa1\x25\x2b\x74\x9d\xef\x70\x90\x29\xc7\x70\x02\xa3\x29\x33\x28\xef\xdb\xb2\x2c\x28\xca\x80\x80\x2d\x5a\x33\xd6\x48\x20\xe2\x25\x2c\x24\xcb\xa9\xb3\xab\xbe\x24\x62\xac\xe4\xf4\x5b\xb5\x71\x14\xdb\x92\x0f\x2c\x2b\xb5\x1e\x97\xa1\x8e\xe3\x5d\x53\xcf\x7e\x53\x55\xe3\x0d\x68\x8a\x6b\x17\x08\xa7\x75\xe2\xd7\xcc\xbe\x05\xef\x97\x63\x33\xdd\x86\x52\x69\xea\x46\x47\x47\xff\xec\x4d\xfb\x1b\xd2\x36\xe4\xcf\x14\xe8\x72\x07\x2f\x35\x8f\x85\x17\x0e\x48\x1b\xe0\xed\x14\xe1\xee\xc9\x19\xc4\x65\x60\x13\x6c\x8c\x0c\xcf\x9e\xa2\x07\xa3\x45\x28\x48\x1a\xdf\x7c\xc8\x3a\x61\x39\xb1\x54\x87\xc8\x4e\x9b\xff\xbb\x8d\x71\x35\x2e\xc5\xf3\x78\x68\x49\x02\x03\xe2\xd4\xf7\xcb\x20\x46\xe9\x35\xb3\xbb\xea\x77\xd8\x0b\x4b\xd2\x99\xd8\x02\xbe\x4f\x39\x3c\x46\xee\x98\x01\x75\x6b\x9d\xe6\x68\xc6\xe2\xee\x83\xfb\x58\xbf\xea\xbb\xff\xe9\x71\x3a\xaa\xaa\x13\xb8\xa1\xa4\xa5\xb1\xb5\xbf\xd7\x24\x0a\x8e\x2a\x1a\x7d\x58\xcb\xca\xaf\x87\x03\x62\x60\xd6\x81\x4a\xb6\x59\x91\xe9\xc4\x32\xc2\x62\x4f\x43\xd2\xd3\xe7\x1a\x1b\xbb\xa5\x70\xd3\x12\xca\x97\x45\x95\x48\x2c\xa8\x49\xef\x67\x3b\x71\x8f\x5c\x76\xfb\x49\x3c\x76\x06\xb7\x0a\x5e\x7e\x19\x09\x87\x47\x9a\xef\x4a\xe1\x55\x82\x5c\x3b\xde\x05\x25\xe5\x33\xa0\x66\x21\x4c\x82\x96\x07\x4a\xbe\x75\x02\xf5\xaa\x13\xba\x53\x2c\x29\x02\x93\xe7\xf2\x76\xa6\x7e\x77\x86\xab\x7f\xb7\x89\xaf\x03\x6b\xc7\x48\x92\x0f\xd8\x19\x5a\x4f\xce\xa6\x1d\xd2\x4f\x22\xad\x5d\x46\x4f\xe5\x66\x5b\x74\x55\x76\x5e\x1c\xbb\xf8\x79\xbb\xb2\x4a\xea\xa1\x7a\xe9\xf8\x47\xcb\xcf\x52\xf6\x40\x01\x9b\x51\x77\x39\x7e\xcd\x5d\x44\x7d\x19\x97\xa3\x3f\xfb\x55\x9d\x3c\x85\x40\xa2\x50\x75\xf6\x8e\x28\x6a\x8f\x2b\x9b\x1e\x72\x1b\xce\xc8\x97\xe0\xd2\x78\xae\x9f\x2f\x5f\xe0\xe8\x7f\x7b\x96\xf6\x2c\x5a\xca\x9f\x2d\xbc\x81\xcb\x78\xaa\x51\xfa\x7d\x82\x96\x59\xdf\x54\xd9\x59\x2a\xbd\x09\x00\xc1\xc9\xe5\xac\xfd\x2c\xc7\xc3\x59\x70\xad\xe6\x16\x04\xce\x93\x1f\xd7\x6e\x70\xe3\x94\xbf\x63\x86\x44\x30\x02\xe9\x47\x51\xc0\xf0\xe1\x95\x37\xdf\x77\x52\x5e\xa2\xf5\x33\x69\x74\xe1\x5e\xf9\x93\xc3\xfa\x10\x18\x6e\x78\x16\x9e\x6b\x7d\xc0\x89\xc5\x12\xe6\xdd\xdb\x75\x78\xef\xc7\xcb\x05\x12\xb1\x57\xd7\x41\xad\x9d\xdb\xbc\x5e\xde\x90\x15\x77\x8d\xac\x10\xe2\xb1\x9f\x1c\xfe\x8f\xe7\xa5\x9c\xf4\x81\xc6\xca\x12\x8f\x87\xd1\x3c\xdb\x2f\xb4\x20\x71\x5f\x59\x6b\xe5\xe8\xa7\x8f\x85\x16\xea\x4d\xb7\x6b\x84\xb5\xc6\xdd\x33\x5b\x02\x55\xad\xd9\x87\x93\x63\x05\xfb\x80\xad\x86\x78\x55\xe0\xea\x05\xb5\x0a\xa7\x46\x0b\xcc\xd8\xad\x20\x56\x0e\xbb\xce\x38\x6d\xfb\xdc\x6b\xa2\x74\x9b\x89\x85\x41\xb2\x79\xd8\x46\xae\x9d\xbc\x62\x91\x90\x64\x79\x66\xbf\xb9\x23\xdb\x96\x89\x09\x6d\x8f\x52\x54\x36\xa3\x11\xd8\x7d\x8f\x36\x08\x6c\x20\x63\x45\x84\xfd\x23\x17\xd8\xf6\xa1\x4e\x69\xb9\x1d\x06\x61\xb0\x28\xf9\x77\xe3\x15\x12\x74\x5a\x6d\xde\xd2\x99\x29\x94\x9d\x63\x38\x29\xb6\x0f\x2b\xf2\xbe\x7d\x22\xf5\xf8\x1a\x49\x55\xf6\x37\xcf\x9a\xe8\xaf\xf2\xcb\x3f\x2c\x0d\x2d\xf8\x8a\x8c\x40\xf5\x9d\xe5\x37\x33\x22\xbb\xfb\x22\x83\x7e\xb3\xac\xfb\x3a\x2f\xd3\x36\x85\x9d\x32\xe0\x5b\x0f\x10\x08\x2b\xa4\x16\x65\x09\x7f\x3d\x88\xe2\x26\xc3\xc0\x1c\x6a\x14\xb2\xce\xb2\x89\x2e\x9b\x10\x8f\xcb\x5a\xad\x3f\x83\x59\xde\xb3\x5b\xea\x04\xa7\xfd\x1a\x1a\xb2\xdd\x1b\x69\xed\xe0\x22\x99\xce\x72\x35\x58\xf8\xb9\x94\x85\x4c\xa2\xc7\xb3\x94\x33\x82\x54\xcf\xee\xdd\x8f\xbb\x05\xa1\xa1\x1d\x68\x4e\x39\x95\xc0\x56\x72\x89\xae\x16\xd3\xff\x27\xf9\x48\x52\x65\x20\x43\xc8\x13\x17\x29\x5f\x3f\x1f\xec\xa8\x22\x16\xc9\x00\x28\x21\x5f\xbd\xc1\xdc\xb1\xe1\xde\x4c\x67\x85\x9e\xa7\xb0\x08\x91\x2f\xa4\x92\xa6\xa6\x60\xa8\x52\xfc\x59\x93\x98\xb0\xdf\x11\xd0\x0a\xac\x37\x22\x4e\x1b\x90\xf8\x0e\x8d\x42\x80\x19\x31\x8f\x52\x7b\x6a\x04\x69\x8b\xc1\xde\xc0\xa0\x7b\x4c\x46\x22\x3d\x34\x80\xd6\xaa\x46\x74\xa0\xc7\x77\xc5\x80\x05\xe0\xf6\x83\x2c\x9d\x01\xee\x5a\xc1\xa9\x9c\xfb\x26\x31\xf1\x5b\x78\x2e\x59\x9c\x65\x71\x41\x27\xfd\x3f\x68\xef\xae\x03\x9f\x14\xb4\xa0\x6c\xcf\xfe\x7b\xdc\x79\xec\x07\x40\x9b\x37\xe4\x4b\x1e\xaa\x7b\xcb\x32\x5b\x47\x72\x9a\x72\xa2\x54\xe0\xcd\x50\x0f\x13\xff\x2c\x45\x42\x4d\x5c\xa2\xf0\x82\x30\xea\xc9\x30\x1c\xa1\xc4\x74\x29\x57\x16\x14\xa8\x25\x01\x1d\xdb\xaf\x2a\x1d\x0e\x68\xa9\xea\x50\xe3\xc0\xc7\x58\xae\x33\x01\xd0\x5b\x39\x39\xec\x1e\x82\xcf\xf9\xd2\x69\x9b\xb6\x6c\xa9\xe8\x9c\x1d\x2f\x05\x4c\xc4\x7a\x48\xb4\xc4\x30\x03\xd4\x03\x63\xd6\xe8\xee\xf0\xc1\x65\x37\xd4\x4a\xe7\x0a\x96\x0b\x6f\x05\xfb\x2e\x8b\x40\xea\x00\x9f\x8d\x06\x03\x6a\x7e\x1b\x6b\xee\x48\xf5\x0c\xe0\x19\xe6\x0c\x5e\x59\x52\xd8\x02\x25\x93\x38\xd3\x5c\x6d\x5b\xe0\x80\xd1\xd5\x68\x2d\x5c\x5a\xfc\xa0\x50\x94\xfa\x33\xe3\x5b\x8f\x18\xac\x3b\xfe\xcf\x51\x9b\x1f\x01\x12\x19\x66\xad\x3c\x40\xb4\x37\x17\xdb\x84\xd2\x45\x33\x68\x55\x80\xab\xbd\xb7\x29\xcb\x22\x2d\x7c\x52\x77\x49\xb5\xca\x2e\x67\x3d\x97\x98\xe6\x53\x2b\x32\x79\x6a\xe4\xcf\xe4\x17\x64\x70\x96\xa2\xb3\x6a\x75\xfd\x37\x96\xf5\x87\x7f\xe6\x48\xf7\x95\x25\x8d\x6f\xfa\x73\x4e\xfd\xa1\x99\xfc\x53\xd6\xc2\xee\xe3\x24\xad\x05\x4c\x4b\x0d\x9d\xc9\xf5\x2b\x55\x02\x34\xa5\x4f\x63\xdc\xd1\xa1\x7d\xe7\xa8\x3c\xde\x57\x40\x3c\x5c\x3a\x77\xdd\x76\xb3\x9d\x76\xdf\x63\x67\x3c\xe5\xfd\xa8\xab\x22\xe0\x6b\xbd\x60\x6f\xd5\x0b\x24\x02\xc6\xc9\xfb\xb2\x21\xb7\x1d\x35\xe3\xb5\x60\x03\x5a\x0f\xee\xe2\x81\x73\xdc\x18\x85\x46\xfb\x92\xe7\xc2\x3b\x2d\x11\xc0\x55\x5e\xde\xaf\x30\xea\x2f\xb1\x1b\x7e\xa7\xa4\x3b\x12\xdb\x96\x2f\xc3\x18\x2f\x5b\xa2\x44\x3b\x03\xcc\xa9\xbe\x4c\x6b\x1e\x03\x8c\xf0\xfd\x4b\xa0\x9e\x02\x9d\x25\x1a\x7c\xd5\xa4\x27\x85\x69\x09\xdb\x95\x32\xfb\xfc\x72\xdb\x2f\xfb\x06\xf9\x53\x9d\x6b\x17\xd7\x10\x90\x64\xd8\x33\x06\xd8\xcc\xf4\xf2\xa7\xd1\x61\xb0\x37\xbf\x80\xe8\xa7\xb0\x5f\x7b\xc2\xf8\x20\x4f\xdd\x85\x1f\xef\xe3\xf6\xad\x13\x8d\x7f\x0e\x63\xcb\x92\xca\x19\xa5\xbd\x16\x8d\x83\x3d\x15\x50\x25\xda\xa7\xaa\x4b\x4d\x4d\x9e\x15\x7b\x7f\xde\xd9\xad\x3a\x3e\x46\x5d\x11\xaa\xc2\xd7\x81\xa9\x68\xc8\x42\x7f\x7b\xf1\x2a\x0e\xf4\xe8\x09\xc4\x82\x16\x7c\x68\x20\x51\xe8\x39\x2f\xf2\xea\x14\xe5\x16\x68\xa4\xb1\x15\x62\xd2\xa9\x93\x23\x3d\x6d\x74\x5b\x0e\x46\x4b\x18\x14\xf2\xe1\xbc\xaf\x3e\x59\x63\x9a\x64\x8d\xdd\xc5\x81\x8d\xd0\x38\xfe\x8a\x6c\x47\x14\xd8\x92\xe2\x7a\x3e\xa2\xe7\x5b\x06\xed\x03\x21\x4f\xba\x57\x02\x9f\x23\x4f\x65\xa6\x16\xd8\x8c\x7f\x48\x23\x55\x44\xb4\x27\x0d\x4f\x52\x45\x97\xa1\xe4\x6d\x4c\x1a\x85\x35\xc8\xeb\x4a\xdf\x04\xf8\xac\x5b\x75\x4f\x0f\xcc\xa4\x83\x92\xb2\x0f\xb0\x6f\x78\x6d\x1c\x69\xfb\x13\x5b\xeb\x28\x7b\x00\x3d\xb1\xb2\xfe\x40\x65\x4b\xc8\xf6\x15\x86\x8e\x1f\xb5\x45\xc0\x35\x1e\xb5\x7f\xca\x24\xf8\x99\x8d\x67\x7e\x16\xec\x82\x16\x83\xb8\x9f\x79\x85\x7a\x29\x74\xa7\xb0\xda\x8b\x0d\xd7\xfe\x61\xc8\x93\x00\x68\x64\x8b\x45\x43\x6f\x87\x38\x1b\x4e\x96\x18\x97\x5f\x2e\xa8\x7b\x10\x9c\xc6\x3e\x88\x8a\xd3\xf5\x52\x99\x2e\x46\x19\xaa\xf8\x54\x63\x2e\xdd\x1c\x96\xeb\x19\x56\x7f\x1c\x77\x36\x37\x25\x70\x6d\xe4\xf9\x5c\x1d\xb8\x4c\x64\x7a\x35\x38\x8c\xfc\x54\xf9\x97\x05\x36\xdd\x5e\x49\x65\x40\x8e\x25\x43\x44\xd6\x9b\x04\x91\x47\xb5\xff\xe5\x2a\xab\xfb\x46\x2b\x73\x20\xc1\xd5\x3d\x64\xc2\x5b\x9f\x3a\x09\x74\x46\x1a\x59\xb5\x73\xa7\xbd\xa9\x46\xbb\xc3\xbd\x90\x76\x18\x88\x28\xc7\x81\x3a\x67\x6b\x73\x57\x8b\x45\x99\x4a\x85\x6e\xb1\x03\xd9\xd6\xc8\xf2\x81\xf5\xa9\xa6\xa6\x3d\x97\xe9\x8a\x0f\xce\xe1\x25\x5b\x96\xd1\x2a\x6b\xc4\x92\x6a\x03\xcf\x05\x5b\xad\xce\x23\xde\x54\xf1\xc2\xbe\x8c\xc0\x2f\xf9\x8c\x25\x29\xcd\xa4\x0c\x9e\xbd\xa3\x8f\x77\x88\x93\xfc\xc1\x67\x60\x0b\x65\xdf\xa7\xdf\x8e\xa9\x6a\x7e\x91\xa9\x9a\x8a\x97\xa1\xfa\x9a\xfb\x94\xe5\x60\xb4\xd3\x17\xe8\xc1\x82\x14\xd8\x76\x40\xdb\xc2\x66\x10\xb0\x36\xc7\x48\x7f\x22\xd7\xca\x71\x8a\x3b\x14\x9e\x81\x4e\x77\x6c\x61\x6a\xa8\xf8\xc9\xdb\xff\x39\x4e\xcd\x0a\xd5\x71\x7d\x6f\xc2\x01\xe9\x96\xca\x0f\xf3\x60\x04\x8a\x1c\x56\xfb\xc6\xfa\xda\x07\x8c\x4f\x96\x80\xb7\xcb\x9d\x8f\x64\xe5\xef\x23\x88\xf3\x40\xb4\x23\x18\xed\xdc\x5e\x02\xa9\x21\xd2\x53\x05\x0b\x47\x3c\xa5\x8a\xa3\xaf\x6f\x41\x2a\x75\x46\x02\xb9\xd3\x6f\x8e\xa8\xa8\x3d\xbd\x4b\x71\xc1\x77\x95\x5e\xa3\xe6\xa4\x1d\x2d\x45\xbe\x8e\x94\xaa\x7a\x81\xc4\x93\xc2\x62\xd5\xa5\x6b\x92\xf1\xe8\xc8\x3d\xdb\xff\xad\xda\xf9\x6c\xf9\x93\xa2\x0b\xe2\x5c\x34\x09\x3a\x19\xf5\xd1\xa8\x49\x40\xf9\xea\xe2\x81\x70\xe2\xb2\x09\x08\x74\x7a\xac\xf7\x71\x49\xd9\xc6\x7b\x4d\x3a\x7b\x55\x8f\x2b\xa5\xa6\xd5\xdc\x32\x04\x45\x6d\x6a\x98\x10\x5e\x1e\xdb\x8a\x0e\xef\x6e\x42\x21\x40\x6d\x7a\xc5\x2b\xed\x55\xc0\xba\xb7\xe1\xb2\x8d\x40\x85\x80\x1e\xca\x18\x51\x5f\x01\x03\x0b\xbc\xfa\xd3\xeb\x2f\x56\x96\x55\xba\x50\x19\xe4\xd7\xa5\x90\x55\x17\x1a\x02\x04\x5f\xff\x5b\x4f\xa5\x95\xb1\x6b\x89\x88\x3c\x9c\x2d\x43\x85\xd9\xa0\x1e\x0a\xd3\xc4\x4d\x6f\xa7\x3d\x2d\xcc\x15\xbb\x75\xaf\xe3\x7f\x31\x5c\xda\xb1\x7e\x39\xce\x60\x0b\xd9\x2d\x0f\x77\x7f\x30\xc4\x0b\x5c\xe8\xf6\xe5\x06\x86\x83\x3e\x1c\x68\x9b\x78\x87\xe8\xca\xa7\xe8\xac\x7e\x65\xcb\xc1\x6d\x51\x9f\x3c\xad\x26\x2a\x42\xfd\x3a\x09\xbe\x2e\x4f\x80\x9a\x80\x5f\x6b\x79\xfe\x83\xfd\xd0\xf5\x33\xcc\x01\x69\x57\x8d\x83\x36\x7a\xb1\xab\x93\xdb\xab\x2c\x4f\x78\x06\xaf\x20\x3a\xdc\x88\x48\x5f\xfc\x49\xba\xba\xee\x15\xee\xf6\x2d\xa0\x91\xda\xdb\x0f\xc1\x6c\x3d\x52\xd9\x42\x55\x8f\x45\xb9\x9b\x30\x09\x2a\xb5\x7e\xa5\xb8\x05\xf7\x5e\xd6\x22\xc4\xb7\xfd\x94\x24\xc0\x1b\x2d\x6b\x03\xd5\xfb\x51\xe7\x4f\xb4\xd3\x83\x02\xb0\xa2\x97\xb0\xc4\x40\xf7\xa4\xa8\x4c\x6d\xff\xea\x51\x13\x77\x1a\xc7\xc6\x76\x9e\xcf\x1c\xc3\xfc\xac\x8c\x82\x2c\xb9\x8e\xff\x53\x61\x04\xa3\xfd\xd6\xfc\xbb\x75\x0c\xa3\x43\x06\x39\x0c\x82\xb4\x07\x14\x22\x7e\xac\x6e\x20\xc2\x95\x01\xc5\x8a\xcd\xc1\x1c\x1c\xa5\xc0\xa9\xf5\x97\xed\xc7\x78\x3f\xf0\x6a\xdf\xb2\x85\x6f\xe0\xfa\x40\xaa\x90\x95\xb7\x0a\xa1\xc2\xd4\xe4\xdf\xa0\xaf\xfa\x5b\x29\xff\xa0\x2d\xab\xa4\x81\xaa\xa6\x2d\x5f\x98\x3e\xcd\x65\x5a\xb4\x42\xc6\xc7\xa9\x06\xfe\x05\x92\xf1\x84\xb5\xbb\x1e\x0a\x8b\xf0\x88\x1f\x93\x0e\x97\x83\x69\xd7\x43\xa3\x9b\x0b\x59\x6e\xc5\xa8\x89\x69\x7b\x8c\x64\x58\x0f\xfd\x65\x96\xab\xd3\xf5\x1e\x18\xd9\x89\x1e\xec\xb0\x94\xcd\x51\xe7\x80\x81\x69\xf6\x40\x19\xea\x8b\xcd\xe7\x48\x3e\x72\x46\x5c\x30\xbf\x87\xec\x20\x50\xe4\xeb\xf1\x57\x65\xdf\x4c\x77\xb5\xa5\x7e\xf1\xaf\x08\x98\x1a\xd0\x50\x60\xd6\x97\x71\x58\x0e\x6f\x0a\x1d\xf4\xf5\x09\x2a\x46\xf2\x3f\xd0\x78\x84\x67\x2f\xf2\xbf\xc9\xff\x1e\x4c\x8a\xc4\x13\x96\xcf\x14\x38\x75\xbe\xd2\x6e\x17\x68\xce\x26\x67\x5b\xb0\x5a\xd2\xb6\x0c\xbd\xd2\xee\x76\x81\xd9\x42\xb3\xf5\x3f\x0d\xd1\xfa\xd1\x06\x69\x61\x1c\xbc\x12\xf9\xf8\x0b\x32\x7a\x4f\xe2\x63\x87\xc4\x2e\xfd\x79\xf9\xd3\x3e\x1f\x79\x73\xcc\x06\x7d\x6b\x19\x00\xa1\xf7\x96\xaa\x30\xdd\xf9\x79\x1c\xe9\x9f\x4c\x3c\x53\x96\x7f\x3a\x99\xe7\x4f\x39\x2b\x9f\x31\x30\xdf\x20\x90\x67\x13\xf9\x88\x91\x2f\x5f\xfe\xb5\xa7\xdb\x71\xc2\xc9\x2d\xc6\x99\x4d\xdd\x9b\xf3\x72\xce\x97\x30\x82\x98\x79\xf2\xce\x11\xb1\x3f\xa5\x60\xb8\x49\x82\x6c\xa4\xdb\x2a\xdc\x71\xcf\xc3\x67\x89\x8b\xe4\x5b\x81\x1c\xc8\x3a\x50\x1a\x95\xd3\x08\x5b\xf3\xb6\x6f\x1e\x67\x3f\xa1\x3b\x14\xa1\x4a\x01\x32\x57\x57\x3e\xb4\xd2\x21\xe7\x7e\x48\xb1\xe8\xce\x6e\xae\x0b\xfc\x8e\x7f\xa1\x4f\x91\xb2\xc6\xa1\xc0\xec\xe8\x80\x96\x45\xe2\xd4\x4d\xd9\x05\x4c\x46\xad\x4b\xa1\x58\x7e\xa3\xd9\xe3\x49\xee\x28\x90\xf5\x23\x82\x52\x43\x20\x55\xa3\x6c\x1a\xc3\x43\xd7\xff\x1c\x66\x77\x23\x2f\xd0\x23\xfb\x25\xd1\x24\xfc\x49\x52\x1a\x93\x22\x33\x9d\x93\x1a\xfc\xd3\xf9\x04\xeb\x91\x40\xb9\x74\xc6\x86\xbf\x5f\xbd\xf7\x72\x10\x2d\xfb\xe7\x98\x72\xf8\x2c\x4f\x65\x3d\xda\xd8\x9f\x9e\xca\x7c\x64\x28\x82\xd9\x5c\x1a\x04\x53\x20\xdc\xcf\x70\x1b\x1d\xb7\xf2\x6f\x9c\xb4\x83\xb1\x06\x79\x16\x0f\x7d\xea\xc7\x3d\xb8\x79\x53\x3a\x59\x2e\xdd\x1c\xc0\xa7\x5b\x7b\x6e\xfa\x1b\x99\x52\x14\xe2\x53\xd2\xa9\xf3\x2c\x2b\x0f\x31\xec\xb2\x87\x98\xe0\xa6\xb3\xbf\xa5\xe6\x7e\xe6\xfd\x23\x6e\xeb\xf3\x01\x87\xd1\x1b\x85\xc8\xfa\xee\x9f\x96\x55\x3d\xfd\xd0\xb2\x23\x9f\x38\xa1\xfc\xd0\x69\x58\xf0\xf6\x89\x08\x35\xae\xdd\x65\x38\x3a\x03\xf9\x87\x42\x79\x41\xe1\x79\x72\xec\xd3\x8f\x34\xab\x3f\xc0\xeb\xc7\x56\xa0\xf8\xa6\x9e\x17\xdc\xc4\xb4\xca\xfe\x54\xd5\x0a\x80\x6f\xed\xa4\x81\x7b\x7f\x87\xe5\x05\xd4\x6d\x84\x5a\xd4\x59\x3f\x72\x1f\xd7\x51\x23\x6d\x4f\x0c\x87\xd3\xf8\x54\xf7\x0b\x6b\xb6\xcd\xef\x9d\xc0\x47\xbc\x6e\xff\x0b\xff\x83\x2b\xbd\x0f\x8e\xa8\x61\x08\xdc\x7d\xd1\x50\x05\x4b\x97\xc3\x80\xa4\x9d\xed\xe7\x7f\x21\x8e\x6e\x55\xc1\xac\xa9\x14\xc0\xfb\x12\xbc\xde\x72\x90\x84\x9b\x37\x5b\x48\x95\x19\x9a\x1c\xfa\xe6\x2f\x03\x92\x4b\xd4\x1c\xe9\x81\x61\x81\x3a\xfa\x0a\x85\x9c\x80\xec\x82\x51\xfa\x86\x87\xe2\xb8\xa1\x6b\x2e\x2f\xb3\xd8\x27\x58\xdd\x1b\x38\xff\xfe\xf3\x60\xd5\x0f\x01\x25\x73\x40\xd0\xb9\x33\x06\x1d\x66\x45\x0f\x1a\xf5\xda\x35\xb8\x9c\xc7\x65\x4f\x72\x24\x3b\x0e\x33\xfd\xf0\x26\x04\x5a\x30\xf2\x21\xd3\x05\xf6\xce\x85\x30\xef\xce\x5d\x05\x88\x8f\x42\x52\x43\x8b\x5f\x01\x40\xc7\x35\xfd\xe8\xcd\x58\x63\x7c\xa0\x2f\x89\x47\x2a\x2b\xc0\x5d\x0a\x65\xed\x44\x1e\xc9\xb6\xc1\x5b\xac\x4d\x65\x88\x56\xb6\x86\x4b\x37\x38\x10\x11\x3d\xbd\x36\x68\x06\x38\x1a\xdf\x21\xf6\x79\x80\x19\x4e\x80\xbc\x36\xf1\x10\xda\x50\x87\xdd\x4f\xb9\x29\xf3\x63\xea\xdb\x8b\x83\x36\x20\x90\x81\x80\x16\x48\xd4\x44\xbe\xd1\x56\x17\xb4\x7c\x26\xea\x26\x6c\x7c\x3a\xa8\xc4\xbe\x90\x67\x1e\x1b\x15\x1f\x97\xfa\xe9\xca\xc1\x9c\x47\x3d\x62\x84\x7c\xb2\xf5\xbd\x9e\xf2\x07\x10\xf5\x47\x81\xe6\xfd\xe7\x7a\x56\xd9\x0e\x4c\x55\x7f\x1d\x26\xc6\x59\xca\xe2\x01\x5a\x65\xaa\xb7\x27\x41\xe8\xe5\xc5\x3e\xec\xfd\x53\x09\xd1\x87\x20\x65\xd0\xde\x63\x20\x83\x54\xe8\xf7\x9f\x5d\x1a\x87\xca\xd6\x1e\xd2\x19\x77\x1d\x01\x84\xbb\xa5\x74\x77\xde\xe4\xfd\x7c\xda\xad\x38\xcf\xb4\x47\xb8\x6f\x97\x9e\xfd\x5e\xbe\x15\x4e\xfd\xf7\x0c\x1d\x25\x17\xda\x7b\x39\xdb\xb4\xe4\x43\x8b\x1d\x6b\xf7\x1d\x7b\x0e\x09\xa9\xcb\x5c\xfa\x16\x5f\x11\xc3\x47\xc4\x22\x2e\x7f\xbc\x19\xf5\x18\x25\x2e\x32\xfe\x90\xc0\xf8\x34\x2e\x15\x10\x1f\xc3\x29\xe5\x3e\x56\x20\xbb\xfc\x86\xf7\x90\x86\x8f\xd8\xf8\x6b\x9b\x04\x24\xbc\xee\xa6\xef\xf2\x30\xd2\xfd\x5d\x24\x95\x72\x0d\x71\xb7\x1b\xfe\x10\x30\xba\xaa\xcf\x00\xd7\x51\x9c\xb0\x7b\x0f\xe7\x09\xa3\xf9\x1c\xeb\x6b\xe9\x0d\xc4\xa7\x17\x66\x9a\x48\x55\xb4\xdf\xdc\x3d\xc1\xf0\xa0\xd7\x28\x39\xb4\x15\x26\x60\x7a\x4d\x44\x2a\x2e\xb1\x93\xea\x7f\x1d\xee\xfc\x75\x20\x50\xfb\xb7\x54\x4d\x75\xd8\xaf\x81\xc1\x43\x97\xe6\xbd\x5a\xff\xab\x1b\x2b\xbe\x8d\xac\xb3\xd8\x44\xec\x7d\x10\xe3\xfa\xe5\xef\xc3\xdf\xa8\x2f\xb3\xaf\x45\xdf\x6f\x6f\x39\xd1\x0e\x0d\xf1\x72\x79\x53\xb4\x8e\xd5\xfd\xd0\xad\x54\xc1\xf7\x80\x13\x53\xff\xd8\x12\x24\xa8\x7e\x1e\x05\xc4\x43\xe3\xa3\x3b\x90\xc6\x15\xcf\x99\x43\xf2\x5d\xe5\xfe\xd6\x7a\xeb\x93\x96\x45\x8c\xf4\xf9\xfe\xb1\xfd\x3f\x57\xdf\xb6\xe5\xaa\xce\x33\xfb\x2e\xb9\xe9\x9b\xfd\x52\x06\x1c\xa0\x03\x98\x8f\x43\x32\xe9\xa7\xdf\x2a\x55\xc9\x64\xfd\x73\xad\x31\x90\x09\x49\x68\x02\xb6\x25\xd7\xc1\x7e\xd9\x41\x57\xc6\x13\x13\x3d\x04\xdd\x35\xe9\x2e\xea\x5c\x6f\x8d\xb4\xe3\x33\x64\xf8\xba\x28\xb9\x74\xee\xa3\xa2\x9d\x10\x3d\xc2\xb6\x50\xfd\xd7\x66\x33\x10\xb7\xa0\xe9\x54\x17\x66\xaa\x5d\x89\xed\x66\xf3\x4d\xbe\xb4\xfa\x75\xf5\x70\x76\x5d\xb9\x58\x38\x97\x88\x0d\x96\xf5\x4b\xf5\x70\x74\x0f\xa6\x4a\xf9\x3e\xc8\x7b\x08\xdf\x46\xbe\xcf\xfe\xec\x0f\xd7\xe4\x77\x37\x4e\x7e\x88\xe7\x0d\x54\x64\x65\x87\x27\x99\x12\x20\x5c\x9d\x2a\xae\x8f\xa4\x0d\x4a\x65\x7e\x43\x0e\xc4\xa3\xf0\xd5\xee\xf4\xfb\x75\x23\x68\x8f\xfa\x74\x5f\x9e\x74\xce\xf3\x98\x62\xf1\x13\x90\x00\x4a\xd1\x8b\xbe\xad\xd5\x7d\x0d\xb0\x8c\xc7\x5e\x1d\x6a\x57\x61\xea\x50\x61\x9e\x58\x1d\x47\x48\xf8\x30\x69\xe9\xd9\x2b\x14\xfc\x98\xa7\x8d\x6e\x21\xbc\xdc\x96\xd8\xdb\xa4\x5b\xab\x39\xb1\x57\xe8\x30\xe7\xf3\xed\xca\xde\xb3\x4b\xb3\x2a\xaa\x16\x05\xd0\x20\x8d\xfb\x25\xf2\x76\x47\x3a\x5c\xf7\x53\xb2\xbc\x0e\x5a\xa7\xca\x08\x65\xb0\xe9\x86\x80\xf1\x3e\x5f\xe4\xdd\xd0\xd2\x0d\x14\xaa\xcc\xb7\xb9\xe0\x66\x23\x19\x83\xbc\x0a\x7e\x60\x93\x13\x2d\x7b\x39\x56\x60\xbb\xd8\xb5\x79\x43\xab\xe9\x5b\xe0\xf0\xd9\x5a\x6e\xdb\xc8\xb5\x9a\x45\x5a\xfa\x5a\x0e\x45\x42\x11\x10\x21\x53\xa1\x06\xa5\x42\x01\x9c\x0c\xc7\x90\x62\xaf\x15\x2f\x00\xf4\x40\x1b\x4e\x94\x0e\x1f\xe8\x6e\xc9\x67\xfb\x75\x26\x31\xe9\x49\xde\xfe\x72\xa7\xb4\xce\xef\x56\x61\x83\x06\xf4\xd9\x86\x07\x65\x17\x00\x34\x97\x86\x9e\xbf\x98\xe2\x33\x7d\x35\xf4\x75\xf3\x2c\x1f\x18\xb7\x8b\xac\x3b\x6f\xa7\xca\xb0\xc4\xb3\xa8\x39\x83\x12\x1e\x4e\x91\x32\x7f\x2c\xc2\x0b\x30\x57\x6f\x03\xb7\xde\x86\x72\x14\x70\x01\x5a\xa6\x1f\x1d\xd3\xcd\x45\x7f\x9b\x43\x48\x5e\xda\xa6\x28\xbb\x82\xb9\xae\xf9\x8f\xcc\x9d\x2c\x00\xb9\x9a\xcb\xff\x23\x96\x8b\x3c\x42\xb7\xc7\x40\x3f\xd8\x90\x33\xb9\xde\x39\xf4\xbc\x3c\x8c\x17\x63\x39\x02\x58\x02\x51\xc6\x87\x2a\xd8\xec\x02\xcf\x12\x74\x46\x77\xac\x9c\x94\x0d\xb1\x6d\x5c\xf2\x79\x92\x9b\x24\xca\x0a\xda\xbb\x55\xb8\x47\x72\x7f\x8e\xda\x50\xca\x82\x3a\x66\x3f\xbe\x05\x3b\x80\xdb\xa4\x64\xc5\xe0\x38\xe9\x9b\xf5\x88\xfc\xbd\xf5\xf9\x06\x61\x09\x98\x4e\xa1\x27\x7a\xed\x6a\x06\xbf\xdc\xa2\x2e\xd4\xa8\x7d\x89\xd4\x23\xac\xb9\x27\xd9\x5f\xda\x44\xde\x7f\xa8\x84\xc5\x7b\x5f\xd6\x86\x24\xa5\xd6\xc3\xb1\x56\xc0\xed\x22\xb9\x68\xae\x51\x9f\xb1\x8a\xd1\x54\xab\x5b\xd0\xc4\xb1\x40\xc8\xa5\x71\x14\x9e\xb9\x74\xed\xac\x34\x5f\x1c\x0f\xe4\x47\x03\x8a\x4d\xe0\xe7\x9b\x2d\x5c\x7f\x6c\x1e\xfc\xd1\x56\x7c\x6e\x24\x3a\xa4\x84\x40\x64\x5a\x48\x06\xdc\xfe\x02\x1b\x9c\x01\x43\x38\xb5\x30\xdb\x94\x9d\x5e\x76\xf6\xb7\x08\xd3\xd6\x4c\x67\x96\x2f\x82\xb3\xc9\xb5\xc0\xd0\xd8\x7d\x47\x90\x83\x3d\xb7\x49\x35\x55\x98\x73\xb1\x0b\x07\x51\xd1\xf7\x10\x02\xdc\xe4\xac\x85\x6b\xeb\x39\x9a\xb3\x67\xd4\x9e\x54\x6e\x6f\xc4\xa7\x69\x00\x41\x14\xab\xfb\xaf\x94\x17\x89\xdd\xd7\x47\x45\xe4\x26\xbd\x6f\x09\xe8\x5d\x03\xbc\xac\x2a\xab\xf8\xf3\xe6\x00\x17\x72\xc8\xb7\x99\x0b\x7a\x16\xf5\x81\x1d\x08\xdc\x40\x22\x74\xbe\x49\xac\x11\x34\x20\x55\x7b\xc1\xa1\x41\x29\xda\x45\xb7\xd3\x8b\x78\x85\x97\xde\x0b\xea\xb4\x9c\x36\x53\x5b\x91\x09\xed\x6b\x95\xbc\x25\xad\x2f\x43\xc3\x13\x69\xe6\x34\x92\xb0\x7d\x48\x4c\x82\x8d\xad\xb4\x9a\xe5\x20\xbf\x22\xa2\x00\xec\x60\x5f\xc3\xde\x03\xe2\x01\x32\x3a\x79\xfb\x96\xd2\xd7\xa5\x75\x74\x5f\xa3\x64\xa7\xd7\xe1\xfa\x77\xab\x2d\xef\x43\xa0\x37\xec\x09\x22\x04\xc7\xeb\x67\x7c\x72\xd2\x26\x54\x00\x5d\x52\x7d\x00\xfc\x17\xd6\x55\xae\x7f\x3b\x54\xdc\x2d\x9a\x36\x38\xf8\x75\x81\x56\x35\x94\xb3\xf9\xed\xcb\x15\x3a\xd4\xb4\x24\x96\x62\xb5\x5e\xc4\x9a\xb9\x0f\x69\x4a\xbf\x5d\xc1\x3a\x56\xd0\x20\x49\x2d\x04\xbf\x5d\xe0\xed\x8c\xc2\x49\x02\x56\x80\x00\x01\xd7\x26\xd1\x29\x58\x2f\xbc\xdc\x0d\xc7\xa9\x2a\x4c\xfc\x88\x2a\xdc\x91\xc6\x0d\xa5\x55\x9e\xd7\x28\xfa\xbb\x4d\xfb\xe9\x3b\xc0\x3f\x02\x2a\xb7\xa8\xd5\x71\xf6\x9a\x9e\x36\xd3\x5e\x14\x8d\x5e\x9f\x48\x36\x54\x0a\xb1\xf0\x55\x64\xb6\x5f\x19\x89\x3a\x25\xa7\x15\x7b\xd8\x9e\x73\x48\x50\x42\xb0\x1a\x88\xd4\x9a\x5d\xa7\xe6\x82\x4f\xad\x47\xee\x41\x8c\xa3\xfe\x46\x97\x14\xbe\x38\xe7\xb8\x4a\x19\xbc\xb4\x76\xcd\x0e\x70\xba\xc2\xa9\xf5\xb2\x5b\x6e\x1f\x2c\xef\xb6\xf8\x73\x72\xfd\xea\xb3\x8d\xf2\x4c\x07\x59\x9e\x03\x1f\x26\xf7\xb3\x2c\x40\xad\x7b\xe3\x93\xe0\x4c\x79\x59\x33\x01\x60\xb0\x8e\x3e\x3d\x73\x3f\xd0\x7f\x0a\xb8\x06\x03\xad\x6a\xad\xfe\xfb\x7c\xfd\x21\xb3\xd0\xc0\x1e\x8c\xa1\xcd\xee\xf2\xd5\x01\x43\x60\xe6\xf6\x51\x25\x0a\x5b\xbd\x94\x34\xb7\x70\xcc\x01\xe9\xf0\x07\x1d\x77\x3f\xb7\xff\xf6\x27\x91\x3f\x00\x71\x7b\x5f\xa6\xf5\x38\x7c\x1b\x81\x36\x70\x9e\xba\xa5\xb7\xce\x65\xc7\x72\x9b\xff\x1e\xef\x32\xbd\xf6\x4f\xea\xfd\xaf\x7d\x8f\x7f\x0b\xf5\x43\xde\x23\x34\x56\x76\x46\x63\xa0\xf2\xdc\x7d\x54\x13\x59\xa2\x0f\xe0\x4f\xce\xe4\x13\x56\x74\x2a\xeb\xbd\x69\x2a\xa2\x08\x6c\x4b\xc6\xc4\xe4\xbd\xf3\x30\xfa\x5a\x25\xe2\x74\x0d\xa7\xf0\x0b\x17\x37\x1c\x18\xde\xf1\x2d\xc9\x2e\x7a\x61\xd4\x8d\x4c\xea\xcf\xd3\x97\x47\x4f\x18\x5d\xf6\xc4\x32\x08\x6c\x72\x42\xbf\x92\x43\xa9\xa5\xd8\x81\x36\x40\x65\x30\x60\x05\xa3\x63\x29\x26\x21\x17\xc6\x50\xa1\xa9\x7b\x64\x5d\xce\x37\x52\x17\xa8\x36\xdc\xcc\xf4\x94\x48\x37\x99\xf2\x4b\xc4\xad\x9e\x8c\x73\x69\xc3\x2b\xc0\x19\xf2\x9b\xe8\xf2\x96\x2f\xee\x0f\x42\x16\x1c\x14\x70\xee\xac\xae\x40\xd9\x5a\x65\x8c\x03\x00\x7c\xe1\x11\x2e\xae\xc9\x1d\xee\xd9\xc6\xd5\x78\x87\x25\xf0\x55\x16\xf7\x6d\xeb\x43\x9b\xf6\xb5\x5a\x11\xc5\x02\xb6\x3e\x25\x2b\x6f\x26\x68\x41\xb9\xb9\xf5\x62\x76\x87\xc8\x75\xd5\x0d\x55\x29\x28\x48\x0c\x83\xb4\xf9\xbd\x81\x62\x50\xc5\x34\x70\xfa\x72\x58\xa7\x26\xbf\x61\x54\x57\x77\xf8\xff\x3f\x82\x8a\xcf\x1f\xff\xc0\x53\xe4\xa0\x08\xa4\xc6\xab\x86\xfc\xc3\x26\x5c\xdc\x42\xbb\x46\xaa\x01\x70\x0c\xf5\x7e\x0e\xd6\xf8\x1a\xf7\x8e\x71\xb5\xfd\xfa\x9b\x85\x7c\x70\x05\xee\x86\xd3\x95\x83\x86\xdb\x07\xcc\xa8\xfc\x34\x98\x11\x57\x07\xd8\xb1\xbb\xe2\xcd\xc4\x74\x1e\xc3\x39\x0b\x0e\x7f\x0c\xdb\x19\xf0\x3f\x8b\xab\x55\xe3\x81\x89\x5a\x64\x76\xe0\x00\x49\xdc\x94\xa2\xdc\xba\x55\x49\x09\x89\xe9\x0e\x50\x9b\xd6\x9b\xf8\xef\x60\x37\xbe\x74\xba\x97\x14\x17\xa7\x26\x79\xc0\x65\x00\x01\xc1\xbd\xc2\x6d\x84\xe5\xeb\xb6\x09\x53\x07\x15\x1d\x3a\xe1\xee\x2e\xa5\xd0\x88\x4c\xe5\x72\x3e\x5c\xe8\x46\xde\xb8\x3f\x2a\x54\x62\xe4\xaf\xb6\x7b\x5f\x90\xef\x90\x41\x38\x82\x9e\x7f\x90\x86\x10\xcb\x7f\x0b\xca\xf2\xb9\x62\x1d\x94\x50\x01\xf4\xad\x84\x46\xcc\xeb\x0f\x83\x9e\xca\x01\x2e\x05\xf0\xa8\xa0\x09\x82\x3b\xce\x46\x29\x81\xb7\x28\xf3\xb0\x33\xa4\x5b\xa2\x5b\x4f\xf7\x2a\x89\xec\x94\x5a\xf7\x48\x42\x9c\x1e\xe7\x8f\x54\xd4\x80\x95\x0d\x03\x45\x36\xd3\x8c\x64\xdd\x63\x22\x40\x8e\x10\x24\x75\xf8\x9c\x76\xc9\x60\x6d\x77\x17\xcb\x10\x94\x06\x7e\x5a\x9c\x7d\x94\x31\x85\xed\xb0\xf1\xb5\x4a\x7c\x83\x22\x7f\xd5\x48\x57\x03\xe0\x09\x99\xf0\x52\xed\x8c\xc4\xfc\x6d\x0b\x68\xc4\x06\x3a\x12\x57\xf9\x89\x94\x47\x99\xe8\x13\x2a\x10\x7b\xe0\x94\xb0\x92\x2d\x6d\x05\x00\xa2\x42\x67\x9a\xd5\xb3\x7d\x71\x64\x04\x61\x15\xe5\x43\x71\x1c\x35\x50\xe9\xe1\xb1\x80\x57\xe8\x98\x24\xd0\xa8\x45\xfe\x84\x07\xb4\x42\xb1\xf0\xfc\xfb\x3c\x06\x71\x60\x9f\x73\xf5\xbe\x8d\x1a\xd1\x3e\x65\x62\x3e\x6c\xbb\x16\x19\xca\xa3\xe1\xf0\x82\x29\x05\x72\xda\x86\x0e\xbb\xb9\xa9\x18\x30\x86\x1a\x38\xd6\x46\xf4\x79\xa3\x2a\x6a\xa2\x66\x09\x61\xe1\x86\xb6\x71\x44\x57\xa5\xe3\xdd\xb8\x56\xf0\x08\xa7\xbb\x33\x74\x55\x00\xc5\x70\x08\x4b\x5a\xe0\xb7\xdf\xe1\x60\x37\x65\x11\x4f\x21\x90\x19\xf1\xba\x34\x06\x30\xd5\xf1\x53\xb3\xe4\x08\x93\x3c\x22\x37\xd2\x07\x90\x0c\xc5\xf3\xc9\xed\x0f\x35\xba\xb1\xec\x28\xb8\x17\x44\xc4\xf4\x41\x19\x65\x75\x55\x66\xf7\x2c\xde\x2e\xf4\xc6\x5b\x16\x5c\x76\x32\x7b\x76\x7b\xf2\x57\x76\x87\xd4\x18\xe7\xc2\xec\xde\xd2\x6b\xd5\xdf\xd4\x06\x7a\xdd\xa2\x2d\x57\xa4\x92\x35\x2a\xda\xa5\xf5\x7a\xbd\x1f\x1d\x5e\xcf\xbb\x93\x8d\xe4\x91\xec\x5a\x02\x02\x74\x4c\x55\x61\xc3\x42\xed\x4a\x73\xb0\x3d\xb6\xb3\x17\x70\x9a\x32\x02\x77\xe4\x90\x80\xb3\x2b\xd5\xfa\x15\xac\xfd\xed\x21\x55\x81\x4a\xa7\x47\x43\xbb\xc1\x74\x11\xd8\x63\x17\x2e\xc4\xce\x5a\x6f\xa6\x58\x1a\xb2\x21\xa2\x44\x22\x8b\x05\x89\x3d\x8e\x69\xb8\xb1\x41\x99\xb0\x10\x69\xec\x6c\x55\x35\x12\x33\xca\x25\x09\x14\x11\x2e\x66\x81\xd6\x20\x52\x8d\xaf\x79\x02\x2f\x14\x84\xd0\x82\x08\x82\xb0\xb1\xe5\xea\x2e\xa1\x96\xcd\xfc\x53\x05\x79\xec\x1a\xbe\xa0\x8e\x1e\x46\xbb\x2e\xa5\x7d\xe3\x47\xa6\x00\x7c\xf8\x8a\x0b\x3f\x7f\x8e\x35\xac\xcd\xe6\x4f\x21\x61\xbe\x41\x75\xab\xda\xf5\x82\xf0\xc4\x23\x74\xdf\x5a\xf0\xd4\x8e\x00\x7b\x74\x00\xd0\x84\xcd\xd8\xe6\xaa\x87\xbd\x34\x16\xda\xf0\xf6\x05\x30\x5d\xc4\x22\x6b\xe4\x6a\x28\x0c\xb3\x9e\x7e\x99\x03\x76\x92\xaa\xae\x7a\x5a\x03\x56\x92\x26\x9d\x01\xc0\x89\x7e\x58\xc5\x88\xd4\xc3\x2d\xb1\x8a\xc7\x1d\x55\x77\xb8\xcf\x51\xda\xc0\x26\xce\xbc\xc4\xff\x3b\x9d\x30\x07\x52\xe7\xe4\x5b\x5f\x09\x50\xd1\xcd\x5a\x1c\x60\xd6\x73\xdb\x4f\x51\xef\xcf\x4d\x7b\xfa\x8a\x0d\x09\xb0\x86\x3d\x13\xa4\xef\x9f\x90\x3b\x0b\x48\x05\x1a\xf7\xf2\x13\xc1\x23\x5c\x96\xa8\xed\x5a\xc1\x42\x69\x70\xd4\x23\x01\x99\xf5\x2a\xb2\x5e\x5a\xa2\xe7\x56\x01\xb6\x57\xc7\x65\x87\xd4\x3a\x16\xe9\xc9\xb0\xc7\x14\xf1\x10\xe4\xcb\x46\xb2\x77\x92\x2e\x2a\x46\x35\x2f\x54\xd0\x61\x18\x8e\x96\xf1\xd3\xb8\x79\xf0\xbe\x96\x30\x1c\xce\xff\x29\x86\xac\x31\x4a\x01\x2e\x72\x86\x5b\xf0\x44\x81\x75\xfb\x89\x5d\x55\x40\x92\xb9\xd0\x31\x38\x42\x7c\x00\x03\x5a\x42\x97\xb3\x8e\xff\xc6\xe0\x6a\xaf\xe3\x9b\x6a\xe5\x63\xcc\xdd\x50\x12\x5b\x88\xd9\x5c\xc7\xdc\xea\xa8\x4e\x04\x7b\x4c\x91\xf4\x07\xc4\x60\x4f\x95\x83\x2c\x13\x61\x2c\x83\x0d\x21\x54\xb6\x0e\x52\x9f\x5d\x9f\xcf\x27\x75\xd9\x8f\x1c\x62\x06\xba\xdc\xd9\x9d\x33\x43\xeb\x40\x58\x13\xc6\x85\x07\x6c\xe7\x9b\x09\x1f\x6e\x35\xe7\xfb\x54\x6b\x62\x99\x08\xdb\x0d\xd2\x12\xd0\x66\x53\x09\x79\x15\x63\xa5\xcb\xcb\x8c\x2b\xa9\xdf\x55\x08\x01\xde\x95\x8c\xec\xc7\x79\xd1\x45\xd8\x45\x8c\x09\x0e\x59\x38\x71\x85\x21\x5f\x72\x4f\xf9\x35\xcd\x12\x4b\x98\x32\x2c\xfb\xbf\x54\x14\x46\x97\x5d\xd4\xab\x10\xd7\xf1\xfd\xe5\xef\x8f\xf2\xeb\x81\x2a\xd9\x1f\xa1\xfc\x1e\xd5\x49\x6f\xc5\xf2\x96\x2b\xc1\x73\xfe\x8f\x70\x4a\x21\x0f\xff\x64\x91\x06\x61\xc7\x97\xcf\xc3\x09\xfa\xc2\xa1\x4c\x5c\x90\xa5\x50\xbc\x7f\x09\x12\xdb\x0e\x24\x36\x9e\x08\x6b\x77\x65\x93\x6d\x0f\xfc\xda\xb8\xed\x64\xf5\x1a\xf3\x23\xdf\xeb\xff\x06\x45\x44\x75\x5a\x1f\xea\x9e\xcb\xfe\x3a\xcc\x69\xc6\xd0\x4e\x38\x84\x75\x91\xbd\x40\x49\xaf\x0f\x6f\xf2\x45\x52\xe3\x55\x3a\xc1\xef\xf0\xc5\x86\x9e\x60\x07\x2c\x05\xc6\xec\x1e\xbc\x0b\x4a\x66\x8c\x8f\x6a\x9a\x5c\xca\x7f\xe5\x13\x1e\x04\x91\xf0\x6a\x2d\x82\x8b\x2d\x70\x0c\xf6\xad\x25\xf2\x5e\xad\xf3\x46\x0b\x61\x7e\x77\xd6\x72\xbc\x89\x66\xc0\x8b\x25\xa5\x9a\x32\x2f\x2e\x7a\xfa\xa0\x5e\x7d\xe8\x39\xa4\x2a\x8a\xef\xbc\x0c\x7e\x01\x78\xac\x61\x95\x2c\xe7\x64\x97\x6c\x00\xf9\x3a\xc7\xa7\x25\x0d\x1b\x8b\x0d\xe7\x84\xaf\xbc\x92\xe5\x0f\x8e\x9f\xb0\x74\xc9\x71\x1a\x96\xbe\x26\x3a\x41\xb8\x50\x9b\x2c\xdd\xe6\x72\xfc\x25\x05\xd6\x35\x3d\x6f\xb7\x20\x78\x28\x50\x9a\x1e\x9a\xd8\x74\x5d\xfe\x52\x6f\xb6\x06\xed\x95\xcb\x16\x0a\x6a\x20\x69\xea\x27\x75\xf0\xea\x83\xd8\x15\xad\x59\xcc\x76\x19\xb4\x4f\xb4\xcb\xb9\xa2\xe0\xe7\x32\x38\xe7\x65\x2e\x9d\x50\x2e\x9d\x65\x11\x0e\x0a\x81\xd4\xba\x66\xc1\x3e\x05\x64\xb6\x20\x51\xb3\xc8\x7d\xad\x09\xb7\xc8\xa4\xb4\xe2\x4b\x0b\xa2\xea\xec\x97\xaa\x7e\x0e\xd1\x07\x45\x59\x44\xd1\x39\x7b\x61\x8e\xfb\x30\x4c\x9c\x84\xb8\xd8\x05\xaf\x23\xd1\x4c\x39\x94\x2e\xfc\x5e\x35\x0c\x49\x07\x22\xf6\xfd\x50\xf0\xe1\x82\x7b\x31\xc1\x66\x92\x82\xa8\x5a\x10\xa8\x12\xa9\x52\xee\xba\xfb\x0a\xfc\x78\x01\x5e\xa0\xe8\x12\xf0\x98\x90\xdb\x03\x87\x2d\xcc\xb0\x51\x37\x2f\x4d\x52\x98\xda\x38\x20\x9f\xef\x1c\xda\xfe\x04\xe1\x24\x57\xa7\x6a\xd5\xb3\xcd\xa9\xab\x35\x95\xe9\x0a\x88\xb9\xf5\xdf\x3e\xc7\x70\x59\x7e\x87\x9c\x08\x1a\xe3\xa1\x80\x33\x85\x1c\x2e\x08\x44\xd4\x4f\x28\xd6\x09\x10\xe2\xf2\x0e\x9d\xfe\xd1\x0b\x45\x94\x80\x10\x38\x66\x72\x57\x0e\x79\x52\x6b\x1f\x62\x46\x2d\x6a\x7c\xfc\xb8\x0c\x89\x37\x56\x33\xfd\x71\x0f\x0d\x09\x8a\x44\x08\x28\x13\xc0\x41\xcb\x30\xdf\x89\x48\x19\x51\x47\xb1\x6e\xa8\x51\xdd\x3d\xf8\x26\x81\x4e\x26\x8d\xd6\x96\x1f\x5e\x55\x52\x29\xc0\x77\xaf\x73\xfb\xf3\x6f\xb4\xe0\xf8\xfb\x50\xb6\xf0\x45\xdb\x99\x97\x25\x1a\x6e\x15\xa0\x2e\xc1\xb2\x06\x21\x83\x60\xfc\xf2\xf3\xe6\xbe\x99\x38\x34\xeb\x0e\x7f\x5d\x8c\xc2\x3a\x18\x01\x6d\x16\xdf\xa6\xbf\x93\x6a\xff\x57\x58\x83\xff\xc2\x9f\xc6\xef\xd3\x5f\x20\xac\xb4\x93\xc3\xc4\xef\x28\x45\xd3\xdf\xb1\x42\x17\x7e\xb3\x25\xb2\x9c\x35\x58\x88\x2c\xc8\x81\x36\xf9\xf9\xdc\x34\x8c\xfd\xa6\xb9\x46\xed\xff\x96\xe2\x13\x61\xac\x45\xec\xbc\x61\x7e\x93\xcf\x3d\xa1\x67\x2d\x03\x80\x9d\x5a\x0c\xdb\x76\x63\x68\x6d\x48\xfb\x9f\xd4\x1c\x3c\x17\xf7\xaa\x23\xdb\x96\x08\xcf\x2c\x29\x0a\x1c\x73\x4b\x5e\xcb\x15\x20\xc8\x1a\x6e\xb9\x2d\x51\x50\x8b\x2b\x58\xc2\x62\x2a\x52\x3c\x27\xf7\x42\x16\xce\x07\xfa\x12\x59\xd8\x99\xee\x9c\x7a\xa1\x6d\xba\x8c\xf4\xa1\xa9\x5f\x27\xc6\xae\x60\x3b\x6d\x19\x42\xe9\x75\x5c\xbe\xf4\x31\x50\x04\x0d\x8c\x8e\x0b\x47\x48\x44\xd8\xa1\x36\x6f\xd5\xb7\xbd\x81\x99\x1c\xdf\x3f\xdf\x9e\x2d\x7c\xd5\xa6\x11\xbe\x8c\x04\x29\x7f\x75\xdb\x8e\xf6\x09\x8d\xb7\x91\x26\x46\xc3\x35\x13\x90\x22\x93\xeb\x73\xd3\xd4\x64\x38\x41\x39\x71\x30\xcb\x09\x37\x60\x0f\xe8\xd2\x30\x58\xe6\x43\x6f\x00\x40\x73\x78\x34\xba\xd6\x2d\x75\xcc\xe2\x06\x0e\x36\xe0\x87\xd3\xf5\x7c\xc0\x92\x1e\x05\x4a\xc0\xbe\x12\xff\x71\x28\xbd\x3a\xef\xa1\x04\x85\x73\xa8\xae\xbe\x00\x0d\xcd\x36\x1c\x9e\x1e\xdb\x87\x6f\x94\x99\x1c\xc6\x4e\x2a\x11\xb7\xc7\x9a\x4d\x0b\x56\x88\xce\x20\x74\xed\x06\x3f\xd0\xc9\x7e\x7a\x92\x86\x58\x3d\x09\x7b\x6f\xfe\x01\x01\xbc\xe1\x21\x2f\x96\x7b\x86\x44\x8c\x31\xb0\x3a\x3e\x1c\x32\x96\x97\x36\x13\xa8\x88\xfc\x0f\xe9\x37\x5a\x5f\xf4\x30\xfa\x11\x60\x86\xef\xda\xea\x40\xeb\x16\x05\x0e\xca\x7d\x54\x95\x89\x00\xdd\x24\x9a\x26\xf7\x50\x4a\xb6\x4e\xce\x7f\xdf\x7e\xfc\xfb\x63\x4a\xd9\x43\xad\xcf\x75\x75\xfa\xb1\x1a\x75\xf6\x10\x47\x93\x2e\xc4\x5e\x3d\x09\xc4\x03\x73\x89\x88\xea\x58\x30\x25\xc9\xcc\xf7\x69\xdc\x3e\x17\xdf\x23\x8c\xe1\xd3\xd7\xa2\x85\x11\x21\x59\xcc\xb1\x2f\xe7\x0c\xbd\x17\x42\x68\x60\x07\xae\x0a\x9d\xb8\x5f\xd1\xc8\x45\x30\x1a\x77\x66\x0b\xad\x05\x00\x31\xb8\xad\xb7\xe3\x53\xfe\x2a\xc0\xd4\xa8\x72\xca\xd0\x03\x1b\x51\x7a\x02\x66\xce\x7f\xbe\x29\x42\x26\x3e\xbd\x83\xa5\xa4\x44\x66\xf9\xe6\x69\xf3\xb9\x5f\x2d\x0a\x3d\xe1\x68\xc6\x03\x39\xdf\xe6\x4e\x64\x26\x15\xb8\xe3\x7a\x57\x02\xd9\xb4\x82\xbf\xdc\x12\xbd\x4f\xa8\x35\x00\x92\xca\x83\x73\x77\x83\xd7\x9f\xe9\xb8\x25\x7d\x9e\x69\x6b\x74\x91\x68\x17\x2e\xe7\x70\x7e\x39\xd7\x35\x1e\x5f\x32\x13\x16\xe7\xab\x1f\xaa\x49\x78\xdd\xf9\x8f\xac\xb7\xdd\xe3\xa4\x2e\x12\x29\xb2\x88\x10\x90\xe7\x13\x3f\xc2\x26\xec\x36\xe1\x27\xf7\x23\x3b\xe9\x9f\x1f\xb7\x30\x81\x7e\x10\xf4\x42\x3c\x8c\xa3\x63\x63\x3c\x07\xa4\xcf\x9f\x6d\x88\xa5\x6f\xb1\x0f\xa5\xb5\x88\x29\x3c\x9a\x6d\x66\x99\x6e\x74\x01\x6e\xa8\xc0\xbc\x34\x2e\x27\xb4\xdf\x5e\x06\x90\x23\x64\xd5\x25\xd0\x2f\xf7\xfb\xb8\x43\xbe\xe4\x4f\x21\x29\xf8\x92\x3d\xa9\x90\xea\x76\x24\x4f\x6b\xfd\xab\x2f\xf6\x0a\xb6\xc6\x31\xdb\x57\x38\x1e\xc0\xc6\xd8\x38\x48\x98\x48\xe8\x24\x76\x9b\xa5\x5d\xb1\x6e\xdc\x6d\x51\x15\x73\xe2\x9c\xfb\x04\xaa\x21\x7c\xcb\x34\x3d\x93\x6c\x08\xde\xf2\x15\x70\x44\x4b\x48\x4a\x40\x08\xeb\x88\xa8\xfd\xde\x7d\xbb\x1c\xf8\x65\x0e\x9f\x83\xb5\xdc\xfe\xe2\x6e\x50\x55\x5b\x18\x7d\x96\x2f\xb5\x8a\x3d\x34\x8d\xe2\x73\x7c\xdd\x4a\xe7\x50\x75\xab\xa9\x70\x21\x68\xc9\xbb\xdc\x66\x05\xe3\xed\x26\x7e\x10\x8e\xce\x3f\x14\xcb\x7d\x22\x32\xf9\xea\x5f\x7b\x1b\x06\xb8\xa1\x7c\x40\x53\x26\x99\x0a\xf0\xd9\xed\xc2\x9c\x2d\xbc\x06\xf8\x8b\x84\x1f\x3a\x8c\xd9\x02\xe7\x32\x47\x56\x0c\xc1\x40\x00\x7e\xf4\xbd\x4d\x33\xc6\x29\xa4\x63\x90\xc2\x46\x22\x40\xa5\x73\x86\xb6\x60\x2e\xcb\x8b\x0b\x6a\xed\x5f\xb6\x19\xf9\x6e\xf3\x2d\x9b\x1b\xf8\x8e\x0b\x0b\x51\xf4\x4b\x68\x2f\x10\x2d\x56\xfe\x38\xed\x79\x34\x7a\x7c\xed\x71\x74\x9c\xc3\x39\xad\xa1\xbd\xef\x68\x72\xc1\x5f\xd6\x70\x30\x07\x3d\x6f\x67\x28\xb2\x03\x23\x79\x95\x9f\x39\xd0\x31\x62\x54\x5a\x70\x46\x09\xae\x75\x8b\x68\x06\x94\xe7\x18\xbf\x2a\xf9\x2e\xd2\x41\xab\x01\x48\xb9\x05\xce\xe4\x13\xdd\x03\x4a\x7c\x92\xa2\xa8\xb4\xf4\x56\x00\x8f\xb2\xbd\x42\x5a\x03\xe6\x3a\x2e\x16\xe2\xd8\x98\xfd\x46\xc7\x3b\xce\xe5\x16\xc5\xb3\x26\x84\x40\xf4\x2e\x80\x5c\x42\xba\x63\x71\xf3\x43\x09\x1a\x84\x76\xc6\x97\xb9\x39\xe0\xbc\xd1\x37\xb7\x45\x32\x1c\x13\x3d\x60\x5b\xa8\x9f\x09\xa6\x52\x97\x8c\xdc\x64\x40\xe2\x19\xee\x49\xee\x03\x37\xe0\x2b\x0d\xcb\x6c\x50\xb5\x08\xa8\xa0\xbf\x57\x60\xae\x76\x54\x3d\x1e\x40\xe1\x60\x72\xb6\x03\x46\x26\x19\x99\xd7\x11\x0d\xd7\x58\x62\x29\x80\xb8\xa4\xe5\x2f\xf3\x08\xd5\x22\x1c\xb5\xe2\xd6\x66\x6a\x25\x2a\xf5\x43\x0a\x7c\xd9\x99\x62\xb7\xde\x47\x65\x45\xd5\xcd\x03\x86\x86\x47\x38\x86\xd8\x0c\x18\x6c\x68\xe9\xa6\xe4\x0c\xf8\x6b\x9b\xde\x15\xfa\x63\x1d\xf5\x7a\x4e\xc4\x89\xec\x7b\xe4\xd7\x10\xc1\xa8\x16\x0d\x09\x9e\xca\x7d\x4f\x63\xf4\xa4\x4a\x2a\xf0\x2a\x5f\x96\xf9\x29\x94\x7d\xa1\x75\x21\x8a\x3e\xc2\x2a\xc2\x6f\x1d\x16\x12\x9a\x50\xdb\x98\xc6\x86\x27\x8e\x04\xed\x08\xf0\x8a\x0d\x0f\xfe\x2b\xc1\xb3\x40\x30\x95\x63\x0a\x59\x8c\xfd\x6a\x0a\xa1\x1b\x67\xcc\x72\xa8\x8a\xa1\x23\x65\xa6\x66\x41\x27\x21\xff\x73\x12\xa7\xc1\xa2\x3e\x51\x40\x1f\xf8\x15\xbd\x79\x3b\xb5\x2d\x1f\x48\x72\x33\xb4\x2c\x41\xf2\x15\x88\x17\x29\xb5\x7b\x3b\x4b\x0a\x17\xe8\x95\x57\x30\x90\xed\xda\x9e\x42\x5c\xa0\x8a\xd8\xc4\x55\x6b\xca\xa5\x4a\x5b\x53\x8e\xb4\xa8\xc3\x6e\xca\xa2\x8b\xd3\xc4\x1a\x01\x2c\x11\xc4\x91\x03\x03\xd7\x37\x63\xd5\xda\x70\xe5\x8c\x12\x2f\xa3\x41\xa1\x6d\xd7\xce\xd8\x19\x28\x8d\x75\x93\x04\x49\x6c\x50\xe7\xac\x19\xed\xa3\x6e\x57\xfc\x06\xc2\x40\xd8\x0e\x69\xf6\x3c\xbb\xc9\xd6\x3f\xe5\x21\x53\x39\x43\x57\xbc\x8a\x76\xe4\x29\x48\x88\x0d\xca\x14\x0f\x2a\x65\x84\x4b\xfa\x87\xe0\x94\x2f\x9f\x09\x79\xe1\x37\x60\x57\x8e\x3a\xde\xd2\x45\xd9\x7e\x3a\x00\xc6\x13\xbb\x1b\x0f\xd3\x0a\x41\x08\x3d\x8d\x80\x2a\xc2\x09\xbf\xba\xb2\x8b\x83\xe6\x22\x19\xb4\x7d\x48\xe1\xf9\x60\x6f\x5d\x90\xa8\xe9\x4d\xed\x6b\x20\x60\x34\xfd\x1d\x6e\xfa\x91\xfe\x51\x13\xe2\xdc\xdb\xe1\x33\x1e\x00\x18\x27\x0a\x56\x5c\xbb\x80\x13\x58\xa7\xdd\x5b\x41\x39\xb6\x9e\x64\x9b\x10\x70\xb0\xb6\x56\xa0\x6d\xf6\xc2\xfb\x09\x12\x62\x5b\x3e\x28\xa6\x61\x5d\x5e\x22\xe8\xc2\x45\x31\xb0\xb8\xeb\x69\x93\xfb\xa6\x07\x6b\xcf\x2e\xbc\x8d\xed\xa3\xec\x15\xfe\x55\x44\x19\xb4\x31\xfa\x52\x39\x3a\x69\x29\x50\x85\xa0\xb0\x44\x2c\x3b\xda\x4c\xa7\x50\x01\x64\xc9\x6d\x27\xe1\xa3\xb4\xe8\xaf\xb4\x34\xa7\xb8\x4d\x0a\x60\x93\x25\x4c\x19\x72\x9d\x47\x02\x35\x94\xc3\x8a\xa1\x30\xed\x02\x06\xd4\xad\xdd\xa7\x6a\xa4\x4a\xf5\x74\x7f\x73\x48\x8f\xd8\xdd\xaf\x29\x6a\x7a\x3a\x44\x2c\xcc\xd3\xa5\x7b\x21\xbf\x74\x49\x5f\x14\x17\x97\x49\x1d\x88\x23\x21\xc3\x61\x43\x48\xe3\xbf\xbf\x7b\xa6\xcf\xa5\xab\x78\x99\x16\xbd\x51\x95\x39\x74\x2b\x87\x26\x62\x61\xbe\x2c\xd8\x2a\x22\xa9\xea\x65\xe8\x8f\x4a\x1f\xfb\xf7\xa0\x74\x86\xdd\x40\xf8\xd7\xf7\xbd\x34\x34\xce\x17\x27\xa2\x7f\x45\xb4\x82\x3f\xfc\x2e\xd8\xc2\xd4\xcf\x57\xfe\xaf\x33\x64\x7b\x2f\x98\x34\x62\xfb\x33\xd0\x58\xed\x73\xf9\xb5\xf9\x6c\xb1\x10\xf1\x01\xc8\x49\x68\x13\x76\xad\xb7\x35\x44\x14\xc7\x2c\x7c\xfa\xc6\xbe\xc0\xd9\x58\x1f\x17\xc0\x64\x30\xb8\x37\x8b\x3e\x61\x74\xc8\x66\x2b\x91\x0d\x94\x81\xe6\x1d\x93\x3b\x36\x03\x25\xf6\x41\xba\x7f\x45\x10\x6f\xcd\x14\xce\x18\x28\xae\xc9\x4c\xef\xc3\xf3\x79\xd0\x12\x82\x32\x96\x88\x76\xbb\x2d\xb5\x6a\x8b\xa6\x8d\x16\x33\x0a\x43\x93\x2f\x3d\x7c\x20\x4a\xe0\x7c\x01\x8b\xba\xf0\x91\xa8\x65\x18\x78\x88\xc7\x97\xa6\x7f\xc4\xcb\x84\x05\x8f\xa5\x9a\xf4\xc0\x48\xdb\x50\x08\x9b\xd9\x9e\x69\x93\x66\xc7\x8a\x7b\xd9\x23\xeb\xfc\xc3\x4c\x42\x1a\x1f\x1d\x51\x88\x1f\xbb\x61\x1f\x40\xcd\x6c\x4c\x5a\xdf\xa5\x0f\xb3\xc9\xb7\x65\x22\x44\x66\xd8\xe4\x9c\x88\x17\xeb\xdd\xce\x56\x6e\xf0\xdb\xe8\x2a\x93\x88\x33\x9d\xe3\xf3\x41\x3d\x14\xbf\x15\xdb\xea\x24\xb1\x05\x88\x40\xfa\x1e\x01\x95\x09\x21\x47\x7f\x8b\x9d\x73\xcb\x55\x92\x77\x68\x03\xbe\x41\x87\x9b\xa4\x25\x45\xc3\x89\x1c\x18\x9a\x12\xdb\x78\xcf\xa4\x57\x5a\xf2\x4f\xf8\x16\xae\xe0\x08\xbd\x22\x59\x22\xf7\xa5\x77\x40\x4d\xdc\x38\xe7\xfa\x22\x38\x95\xe2\x73\x01\x40\xf3\x3c\x39\x3c\x25\xf6\x9c\xc2\xae\xc2\x29\xb5\x8a\x87\xb1\xaa\x7d\xa8\x9f\x3b\x17\x58\xdd\x59\xbf\x11\x92\x21\x58\xa6\x7d\x57\x37\xcc\x13\xc6\x97\xc7\xa0\xf7\xf8\x88\x75\xbb\xca\x2f\xe3\x1c\x3d\x91\xb5\x3a\x98\x7c\x49\x92\x44\x58\xaa\xe3\x42\xff\xb2\xe1\x5c\x0f\xbb\x75\x89\xcc\x38\x99\x85\x1e\x27\xfc\x91\x3c\xc0\x93\xec\x01\xd7\x5b\x8e\x33\xe0\x22\x7b\xa2\x03\xfc\x76\x2a\xc7\x97\x79\x45\xb5\xcb\x73\x43\x0c\x17\xad\x80\x7b\x85\x50\x37\x40\xab\x10\x34\x33\x06\x58\x04\x72\xe8\x21\x1f\xf2\x62\x09\xc2\x11\x36\x7b\xfe\xc7\xa4\x91\xe0\x9b\x3a\x67\xf4\xe6\xb3\x0a\xb4\x61\x4d\x4f\xd4\x6e\x07\xd5\xd8\x90\x2f\xdb\xe2\x70\x4a\x3a\xa8\x80\x74\xb8\x30\x1a\x0f\x1c\x8f\x50\x37\xa1\x8f\xcb\x11\xdc\xf1\x63\xa4\x15\xac\x6b\x3a\x13\xf1\x7d\x7c\xd5\x33\xe5\x84\x51\x91\x31\x1b\x0d\x63\xdd\x12\xa3\xe5\x05\x1a\xc6\xfd\x87\x87\xe6\x10\x1e\x43\xb9\x3d\xc1\x50\x38\x30\x34\x85\x0e\x8e\x87\x3d\xa1\x1d\x51\x9a\x87\x25\xbb\x7e\x68\x5e\x54\x53\x03\xf8\x4a\x2a\x25\x4b\xf8\x52\x00\x2f\x73\x0c\xf2\xc6\x9f\x32\x68\xe5\xd2\xb4\x73\x3f\x8d\xd1\xdd\xb8\x1e\xc4\xd2\xac\x45\xc7\xd1\x1f\x9f\x61\xeb\x3d\xc3\x81\x0c\x85\x7f\x4f\x52\x5f\x76\xb8\x56\xa9\x50\x46\x36\xf0\xbd\xa4\x7b\xe2\x4c\x5a\xdb\x8e\xa5\x0d\x25\x94\x0f\x5d\xa4\x31\xe1\x71\x20\x9d\xbb\x53\x71\xcf\xa4\x9f\x04\x13\x3e\xee\xa1\xdb\x7f\x58\xb0\xdf\x5a\x28\xbe\x7e\xff\x09\x2f\x04\xeb\x0a\x7d\x93\x02\x31\xf1\x49\x2a\x26\xc1\x96\xe3\x96\x8b\xb9\x4d\x3a\xf4\xda\x6d\x69\xbc\x9f\x21\x7e\x61\x41\xda\x02\x91\x33\xbb\x14\x99\x87\xd3\x21\x4b\x8e\xe9\x49\xdd\x08\x4a\x7a\xed\x67\xe3\xf2\xff\x6f\x41\x78\xce\x8d\xd0\x28\xb7\xf5\x48\x4b\xf5\xe5\xbf\xb6\x02\xf9\x4e\xc6\x15\x43\x02\x8f\x7e\x05\x45\x10\xa4\x63\xe3\x80\xe9\x29\xb8\xb4\x4c\xa8\x95\x42\x68\xc8\x51\x6c\xd2\x2f\x80\x8b\x3b\x73\x74\xe1\xf6\xe8\x65\x97\xe8\xa4\x6a\x83\x71\x9a\x04\x15\x82\xdb\x0f\x5f\xb7\x07\x2d\x7c\xe7\x0f\x2f\x29\xd3\xa8\x43\x01\xef\x04\x44\x8d\xb4\x4b\x56\x57\x8a\x12\x1c\x08\xc8\x00\x2c\x34\x30\x9d\xf0\x55\x3e\xff\x24\x40\x7c\x0f\xca\x69\xb8\x70\x8a\x1e\x2d\x37\xf3\xa8\x3a\x19\x6e\xe4\x1f\x78\x1b\x28\xc3\x6e\xa3\x97\x9d\xf7\x12\x36\x11\x65\xbb\x57\x61\x30\x03\xb0\xe9\x89\x9f\x43\x01\xcc\xa0\xfe\x26\xc5\x39\x93\x1e\x3d\xe9\x7d\x61\x5b\xdd\x85\x34\x02\x89\x8f\xc3\x97\x4b\x46\xc5\xc1\x42\xda\xbf\xa4\xaa\xa9\x67\xf1\x76\xef\xe6\x4a\x98\x05\x6d\x45\x00\x5d\xf6\xe0\xf1\x9e\x02\x18\xc8\x83\x29\x6c\x62\x9d\xda\xb8\x4b\x57\xe5\xd6\x33\x77\x68\x50\x05\x06\xa5\xbf\x2c\x19\x91\x49\x52\x0a\x16\x50\x07\x57\x87\xbf\x2e\x4e\x9d\xf7\xd7\xd8\x09\xc0\xf3\xaa\x9e\x22\x6f\x97\x4d\x73\x8c\x75\xa2\xf3\xd6\x3e\x56\x9e\x8b\x85\x2d\xf1\xf5\x3b\x32\x28\x5e\x37\xe8\xaf\xfa\x76\xaa\x6e\x78\x3b\xd7\xae\x19\x76\x59\x5a\x3c\x63\xdb\xea\x4b\xf8\xb9\xc3\x15\x3a\x3f\x83\x16\x2a\xf7\x61\x3b\x25\x57\x0a\xb4\x51\xd6\xbe\x2c\x61\x5a\xe0\x7e\xf9\xa1\x43\x39\xa7\xee\x27\x3d\x84\x44\xca\x4b\x44\x2d\xd3\x62\xaf\x72\xc5\xb0\x06\xa4\x5d\x5c\x5f\x70\x41\x05\x72\xb2\x21\x29\x8c\x5b\x86\x14\x9f\x8b\xc5\x4f\x7e\x95\x73\x2a\x22\x5e\x85\x5f\x1b\x50\xa9\xf6\xd3\xcb\xff\x42\x46\xc8\xad\x0d\xc3\x60\x26\xaf\x9c\x95\xc3\x15\x45\xf2\x49\x79\x42\xd5\x73\x8f\x4b\x93\x89\xcd\xab\x76\xf4\x36\xce\x36\xee\x1d\xb8\x4b\x52\xcb\xad\x4f\x44\x02\x45\x1c\x48\xac\xb6\x6a\x14\xb5\x5a\xcc\x84\x3c\x4d\xfc\x09\x40\x6b\x84\x76\x10\x3a\xd9\xe3\x8f\x9d\x96\xdd\x78\xea\x71\x6c\x40\x0e\xbc\xd2\x15\x1d\x03\xb3\xf7\x1d\x53\xef\x99\x2a\x36\x0b\xef\xf0\xb4\xbc\x12\xed\x87\xa2\xfe\xb0\x27\xe7\x06\xb3\xce\x62\x93\xbb\x30\xe0\xb1\x19\x89\xcc\x57\xa6\x3f\xcd\x23\x21\x60\xf3\x11\x67\x12\x9a\x23\xfc\x9c\x09\x4e\xa4\x1b\xa9\xbb\x7b\x7a\x9d\x53\xe1\x77\x3f\xab\xa3\xed\x9e\x42\xad\x25\x8c\xd9\xad\xdf\x12\x1c\xea\x5c\x04\x96\xfa\xc2\xca\x28\xf6\x70\xfc\x7d\x67\xa2\xa1\xfe\xa9\xe8\x75\xd3\x61\x5d\xdc\x95\x1f\x0c\xea\x0e\x47\x2e\xd7\x51\xfe\xaf\xcb\x8a\xef\xce\xef\xb1\xaa\xd9\x1c\x20\xcc\x56\x0c\x53\xd9\x02\x57\xb4\xe4\x3e\xf6\x7e\x7b\xac\x83\xcd\x50\x75\x62\x26\x27\xc2\x56\xd4\x10\x5d\x45\xf9\xf6\xe9\x16\x73\x80\x56\x8d\x6b\x31\xf3\x15\x00\xf2\xf4\x51\x3d\xa0\x3b\x47\x80\x9d\xfa\xea\x23\x00\xa9\xe5\x49\x3a\x39\x4f\xac\x3b\xb5\xf1\x8e\x90\x0e\x01\xa6\xa9\x0d\x38\xd3\x96\xeb\xfb\x54\x9c\x7a\xdc\x88\xa6\x25\x3c\x60\xda\x54\x55\x69\x54\xd6\x46\xf5\x5f\x7d\xd6\x86\x59\xbb\x5e\xa5\xbc\x95\xdc\xcd\x5d\x9b\xc6\xbb\x12\x40\x90\xf8\xe5\x90\x4a\xf7\xe1\x04\x56\xda\x01\x10\xaa\xa5\x6e\xe2\x81\x76\xc9\xa9\x60\xb9\x05\x73\x26\x41\x8a\x96\x73\x69\xc7\x2f\xd1\x19\xfa\x32\xc8\x93\x05\x4c\xe0\xc0\x1e\xd9\xd5\x12\x6d\x01\x62\x34\x98\xcc\x3d\x28\x47\xd3\x8e\xab\x70\x2e\x9b\xc0\xdc\x36\x2c\x7f\xfb\xde\xac\x71\xcd\x02\x52\x64\x1f\x15\xc7\x25\xc1\xe9\x57\x71\x44\x28\x4d\x43\x4b\x13\x48\x5c\xb6\xc2\xfa\x54\x5b\x0d\x00\x8c\xae\xc4\x8c\x76\x2d\xd5\x64\xca\xc3\x4a\x39\x06\x2a\xa2\xf5\x1c\x34\xac\x58\xf6\x88\xf5\x87\x6b\x4c\x5b\x0b\x68\x8f\x04\x6f\x15\x4b\x07\x08\xf3\x81\xd2\x97\x16\x12\x56\x14\x0a\x03\xb5\x74\xf6\x35\xcc\x39\x27\xf9\xb3\x20\xf0\xeb\x35\x55\xa5\xb1\x15\xe3\x1b\x91\x49\x21\x55\x05\x6a\x60\xab\x67\x68\x1d\xa0\x58\xba\xf3\x54\x87\x52\x7c\xa2\xe8\xb0\x51\xc9\xd1\xdc\x8b\x2c\xd0\x2d\x08\x73\xf8\x15\x17\x51\xc2\xc8\x2e\x6f\xb3\x56\xe0\x18\x4a\x17\xf2\x6c\x84\xb6\x99\xef\x4b\xc7\x3d\x9f\x82\x4e\xcd\xe8\x62\x7a\x42\x6e\x61\x98\xa9\xc2\x35\x69\xba\x7c\x2e\xb1\x12\xb0\x12\x07\xb4\xc3\x29\x4b\x1e\xbb\x2d\x1d\x14\xef\xb0\xaa\xa4\x1f\x22\xc9\xbc\x69\x4d\x76\xe7\xd3\xa4\xe5\x1f\xca\x3d\xb4\x5e\xf9\xc8\xa3\x88\x88\x22\xbf\xe3\x11\x42\x62\x7e\x51\xdc\x71\x31\x0b\x21\xea\xcf\xbc\x27\xbc\x85\x22\xee\xb2\x7f\x23\x94\xda\x50\xc8\x84\xec\xa8\xcd\xa7\xf9\x67\x01\x7d\x92\xa4\x9b\x43\x47\x96\x7c\x87\x71\x53\x7f\x01\xaf\x5d\xfd\x46\x9f\x03\xb1\xbd\x1c\xce\x3c\xc5\xee\x3f\xf6\x79\x6e\xde\xc7\xc2\x41\x59\xda\x28\x76\xab\x6c\x55\x66\xaf\xd0\xc1\x93\xc8\x9b\x2f\x0d\xf6\x96\x8f\xe2\x21\x78\x48\x3a\x47\x10\xc9\xd2\xec\xb7\xd9\x4b\x71\x9c\xcf\xe9\x37\xf9\x72\xda\x3d\x21\x8f\x17\xfb\x0d\x13\x51\x49\xcb\x90\xdd\xbc\x03\x58\x19\xc9\xd5\xa0\x84\xee\xe9\x32\x88\x6e\xab\x14\x70\x8e\xdb\xe2\xc5\xfe\x32\xc7\x04\x41\xe0\xc6\x86\xdd\x36\x33\x23\x5f\xc6\xd7\x48\x24\x10\xba\xdb\x39\xdc\x71\xbd\x05\x91\x29\x8f\x31\xf5\x73\xb9\x1c\xd7\xc2\xf1\x9f\x54\x0e\x30\xe7\x26\x18\xd1\xfb\x52\xf0\x45\xf5\x58\x00\x61\x3b\x1f\xd4\xc1\xd9\xa1\xda\x45\xac\x92\xd4\x0c\x30\x95\x22\x98\xe6\xb4\xdf\x57\xc4\xcb\x19\x80\x8d\x97\x14\x5a\x20\x82\xb3\x4e\x21\x90\x03\xd7\x18\xcc\x61\x08\xd0\x39\x27\x30\xd6\x09\x54\x9a\x08\x13\x3a\x5b\x49\xde\x00\xb9\xe4\x01\xc6\x9a\x87\xf0\x4b\x02\x21\x51\x7a\x9f\x8e\x69\x73\x68\x20\x02\x93\xe4\x97\xc3\xd2\x36\x70\x47\xc9\x3d\x9b\xdd\x99\x41\x68\x10\x6f\x34\x42\xc9\x80\x22\xeb\xdb\x99\x2c\x86\xb9\xd0\x10\x6c\x1e\xff\x79\x6a\x34\xbb\xfd\xb7\xea\xd9\xf3\xb8\xf7\x3e\x64\xa9\x11\xa6\x25\x4e\xcc\x97\x55\x0b\xe4\x20\xfc\x12\xdb\x5f\x58\xf4\x2a\x64\x8a\x24\x53\x67\x59\xa4\xf5\x91\xf2\x8e\xd9\xe7\x2a\x67\x3d\xc7\x9a\x32\xee\x32\x87\xf7\xd8\x28\x26\xb8\x15\x0c\x76\x14\x25\x42\xa0\x9a\xcf\x49\x44\x11\xba\x8c\xa4\xa4\xa0\xb6\x42\x05\xc8\x39\xf5\x57\x8d\xe6\x50\x89\x63\xb3\x65\x81\x19\xb9\x51\xf9\x0f\xe2\x88\x90\xa7\xb4\x9e\x03\x3f\x75\x39\x89\xe8\xe6\x31\x61\x06\x35\xa7\xc9\x9e\x50\x9e\x85\x25\x50\x4b\x7b\x57\x43\x20\xc3\x73\xcc\x49\x11\x01\x4a\x0e\x7f\xf7\xa8\x5d\xd2\xcc\x09\xcf\x9c\x5c\x5b\x45\xbf\x7d\xf2\xd5\x91\xa3\x02\x9d\x40\x9e\x5d\x74\x18\x8c\x61\x3d\x09\x9b\xae\xbd\x10\xb8\xf4\x2f\xcf\xb8\x69\x1d\x26\x74\x52\x72\x16\xb3\x77\xef\xdd\xdc\x12\x84\x77\xd0\x44\xeb\x38\xc7\x35\x51\x90\x77\xc2\x08\xd3\x09\xed\xd4\x4a\x84\x65\x2a\x4d\xa3\x5c\xd3\xe6\xfb\xb2\xd2\x82\xe1\xb8\xd3\xcb\x30\xdb\x26\xb8\x09\x35\x14\x09\x06\xa2\x21\xe4\xd3\xac\x1d\xd5\x97\xc1\xee\xf4\x16\x35\x02\xbb\xce\x94\xfc\x19\x73\x03\xe4\x90\x87\x1d\x8f\x68\xbc\x14\x07\x1d\x52\x02\x9c\xb2\x9d\x61\x95\x41\x23\x0a\x4a\x3c\xf5\x29\x57\x0a\x2f\x2c\x65\xb9\x88\x32\xc9\x5b\x7d\x8a\x84\xc4\xa6\x3d\xba\x76\x53\x7e\x1e\xb1\xcd\x1a\xe8\x6c\xc0\xda\x42\x3d\x62\x72\xb7\xce\x9d\xe1\x95\x28\x60\xe6\x99\x0b\x05\x81\xa6\xb4\x63\xb1\x4b\x17\xd0\xc6\x0e\x1b\x0c\x59\xe8\x9e\xa0\x9a\xb7\x2a\x62\x95\x67\x4a\x85\xed\xa5\x73\x3f\x76\x04\x4f\xfa\xc3\x4e\xf4\x29\xd7\x61\x94\x62\x98\x80\x43\x93\xac\x51\x13\xfe\x4e\xaf\xab\xf8\x43\x07\x24\x96\x1f\x05\x18\x16\xe1\x58\x12\x3a\x5a\x5c\xdb\x5c\xd7\x03\xe8\xac\xac\xe0\xb6\xfd\xf1\x3f\x0d\xfd\x8d\xbf\x61\x9c\xfc\x73\x46\xac\xa4\xc8\x3f\xf4\x05\xda\xa9\x90\x79\x98\x72\x1f\xbe\x6d\xf9\x68\xff\x9e\x28\xe7\x73\x9c\xb4\x38\x11\x6a\xe9\xca\x47\xf7\x13\x05\xf5\xa3\xe4\xdb\x97\x90\x5a\x33\x27\xd9\xbf\x18\xef\x6b\xc8\x0f\xcc\x5c\x08\xfd\x85\x55\x9a\xef\x48\x81\xd3\xe2\xc9\xff\x02\x5f\xc3\x1d\xc0\xf4\x6d\x11\xb6\x59\x87\x2d\x3d\x0d\x86\xb0\xe6\x40\xa1\x26\xcb\xe1\xbd\xfe\xf2\x9b\xc6\xc9\xcb\x7b\x0f\x62\xbd\x84\x10\x83\xec\x92\x2a\x07\xbf\x3f\x9e\x29\xda\xaf\x48\xb4\xd5\x3b\x51\x59\xc0\x1b\xce\x4b\xab\xfa\x46\xb8\x65\x81\xfa\x4a\x5b\x34\xff\xd3\xf5\xfb\x1e\x9f\x75\x08\xb8\xe5\xcd\x4e\x9f\xc4\x35\xfc\x07\xc1\x60\x50\x8c\xe3\xdf\xe1\xbe\x44\xe7\x97\xc2\x92\xf3\x5f\x08\x03\x63\xa7\x06\x38\xd8\x78\xab\x3e\x49\xb7\x45\xa7\x44\x59\x16\x7e\x6c\x7e\x3e\xf3\x0d\xf4\xd2\xb2\x97\x07\x0f\x82\xb8\x8e\xf3\x0b\xde\x35\x06\x76\x05\xcb\xab\xbc\xb9\x20\x82\xba\xed\x3c\x3a\xba\x48\x6f\x34\xb9\x95\x27\xc5\x38\xfb\x24\x8a\xd6\x42\xdc\xd3\x61\xb2\xd6\x7a\x54\xa2\x02\x63\x3f\x0d\x4d\x47\xc7\x1f\x61\x00\x21\xcc\xd4\x02\xaa\x05\xcd\x32\xb6\x07\xbf\x81\x87\xab\xdb\x60\x1a\xef\x3b\xed\xae\x0e\x29\x22\x28\x38\x11\xc9\x85\xd1\x51\xfd\xcc\x40\xb0\xbc\x6d\x46\xbe\x44\x34\x3d\x4c\x13\x88\xd5\x3a\xc3\xac\x66\x70\xc7\x63\x85\xf8\xd5\x1e\x04\x92\xe9\x3a\x0e\xd0\x02\x23\x4e\x2c\xf0\x64\x73\xa0\xcd\xe6\x2c\x57\x01\x17\xf3\x06\x56\xfa\x27\x5a\xc8\x00\x39\x89\x1a\x0a\x3b\xa3\x61\xfc\x95\x38\xf8\x90\xbd\x9b\x42\x26\x71\x68\x39\x68\x80\xdd\x9e\xd4\xa0\x36\x20\x63\x3c\x97\xb6\xe4\x77\x7d\x10\x57\x56\x73\xb1\x21\xe7\x8b\x5b\x68\x9b\x11\x82\xa6\xf5\x03\x3c\xca\x0d\xab\xda\x96\xff\xbf\xb9\x01\x82\x46\x95\x4d\x14\x05\x74\xea\x90\xa2\x0f\x25\x9e\x01\xf5\xfb\x73\xd3\x11\x0d\xef\xb6\x01\x99\x01\x67\x05\x43\x9a\xa1\x9d\xca\xab\x91\x6e\x2d\xaa\x14\xff\x79\x03\x33\x7d\x89\x66\x25\xc1\xcf\xce\x85\x14\xe6\x9e\x95\x81\xfe\x2b\xe3\x85\xaa\x53\xe0\x9c\x2d\x7e\x4a\xbf\x29\x0b\xaf\x66\x7f\x84\xa4\xfc\xa2\x29\x40\xdb\x97\x2d\x52\xa0\xd6\x02\x10\x97\xd8\x41\xf7\x98\xe4\x6a\xd0\xec\x31\xaf\xcb\xea\x9e\xfa\x52\xaf\x6b\x5f\xc0\x0e\xd6\xde\xae\x96\x06\xfb\xe9\x1e\x94\x7b\xeb\x6e\x23\x09\xea\xf3\xc5\xc4\xa4\xb7\x3f\xf1\x8a\xe5\xa6\x3e\x7f\x01\x88\xa4\x1b\x95\xe3\x0d\x12\x4c\xec\x51\x04\xe6\xec\xd0\x42\x5f\x74\xef\xd3\x1e\x7e\x4c\xee\xaf\xa4\xbf\x51\x31\xdf\x8e\x7e\x7f\xf2\x13\x4d\x53\x48\x4a\xd9\x13\x4c\xd7\x24\xad\x7b\x3f\xcf\xd7\x68\xef\xf0\x5b\xec\x0b\x66\xfe\xb4\xc9\x8f\xb3\xca\xdd\x5d\x69\xa9\x18\x39\x38\x2d\x09\x6d\xb6\xe5\xc6\x37\x49\x6c\xf9\xa7\x93\x71\x62\xc1\xe3\xb9\x55\xb5\xa9\x7f\x4d\x29\x84\xb8\x61\x29\x91\xd2\x59\x70\xc9\x24\xde\xac\xb8\x21\x4c\xc4\x4d\xb5\x82\x85\x20\x95\xf6\x8e\xe2\x66\x3e\x27\xa8\x61\x28\x24\x6e\xc4\xb6\xb9\x25\xdc\x2e\x1d\x8d\x70\x76\x36\x7f\x64\xa9\x14\xa4\x0b\x41\xea\xe4\xcd\xe4\x31\xea\x20\x4e\xd0\x60\x4b\x47\xd8\xb0\x7f\x4a\x11\xe2\xe9\x05\x0c\x59\x30\x0d\x53\x08\x0a\x03\x60\xc7\x5b\xeb\x89\xe4\x59\x68\xa6\x67\x9e\x79\x2a\xee\x42\xc8\xf7\x58\xf7\xda\x9f\x01\xb6\x9b\xa3\xb2\xf3\xb4\xf9\x1a\x13\x33\x20\x6c\xd3\xfe\x75\xb5\x08\xc4\xa3\x71\x94\x8d\xab\xcf\x2c\x04\xde\x39\xc5\xca\x50\xbe\x50\x7b\x21\xee\xee\xb8\x55\xf9\xa0\x5b\x45\x8b\x37\xc1\xe8\xfe\xad\x10\x06\x26\xf4\xed\x9f\x97\x85\x17\x02\xd5\xfe\xa9\x47\xa5\x9e\x15\x41\x73\xff\xda\x29\x34\x1b\xb2\xd6\x72\x41\x69\x6b\x2b\x7b\xcc\x21\x7c\x5c\xda\x81\x57\xd4\xc3\x5d\xa2\x2c\x3d\x5d\x59\x77\xb6\x1f\x73\x09\x1e\x75\x76\x07\x58\xa9\x5b\xfd\xef\xcc\xd5\x05\x03\xc6\x71\xfa\xbe\x75\x8c\x9a\x45\x5e\x36\x4a\x7c\x02\xdf\x00\xc1\x2e\x7f\x1b\xcd\xa5\xfc\x8f\x9c\x1b\x94\x09\x84\x57\x77\x10\x5f\xd2\xe2\x55\x34\x14\x4e\x4a\x44\xf3\x2c\xaf\x9c\x3c\x91\x29\x99\xa7\x7d\xe5\xdd\x26\x54\x9f\xb4\x69\xd9\x0a\x4d\x50\x4b\xb1\x78\xcc\x90\x7d\xd3\xf7\x5e\x1e\x45\xc8\x1e\x32\xfb\xe4\xb7\xf5\x59\x66\x16\x67\x29\x7b\x17\xe8\x37\x5e\x86\xe2\xac\x4f\x64\x81\xfa\x73\x5e\x5d\x07\x7d\xd1\x23\xb4\xb1\xe6\x06\xcc\x50\x85\x31\x09\xe8\x4e\x62\x4b\xba\x73\xdc\x45\x7d\xe9\xb6\x58\x16\xe9\x7c\xe9\x8d\x6f\xc7\x82\x8a\x43\xd6\x60\xf7\x4b\x24\x4c\xb7\xb9\x44\xba\xf0\x83\xdb\x4c\x1b\xac\x29\x96\xba\xbb\x32\xca\x95\xea\xfd\xd6\xb6\x6a\x60\x74\x44\x5e\x00\x28\x78\x26\x69\x56\xed\x5b\xf6\xe5\x83\x1b\x1e\x08\x18\xfe\x5c\xf1\x85\xe1\x08\xe3\x50\x41\x0a\x55\x0b\x18\x38\x43\xce\x25\x1a\xbd\x4b\x55\xf6\xfa\xea\xea\x9f\xa5\x77\xb6\x94\x4f\xaf\x92\x5a\xf1\x65\x0b\x47\xab\x6e\x9c\x3f\xbc\x46\x94\xce\xd2\x17\xa2\xf4\x17\x16\x5b\xdd\x2f\xc5\xb6\xe8\x60\xd5\x8d\xac\xdd\x31\xac\x30\xc4\x40\x88\x5a\x1c\xd8\xb4\x2e\xcb\x89\x1e\xf2\x5a\x98\x06\xc5\x11\xa8\xc3\x26\xe7\x28\x74\x10\xef\x4e\xff\xd1\xdd\x0a\x08\xe3\x26\x63\x86\x2e\xdf\xe8\xc2\xd5\xe5\x04\x5a\x02\x09\x9d\x4f\xa5\x43\x66\x37\x42\xd5\x50\x0c\xe6\x8b\x80\xd3\x50\x70\x57\xc9\x58\x3a\x5d\x1e\xd9\x03\xf5\x62\x60\x49\x8a\xff\x71\x0e\x67\xac\xbe\x5f\xa8\x5f\x7a\xd0\xd8\xc4\xe4\xa9\x2f\xb7\xa1\x59\x62\xa1\x5d\xfa\xe3\xe6\x88\x9e\xa8\xb3\x59\x74\x17\x8e\x60\xf3\x12\x90\xca\x34\xd5\x82\xac\x4d\x26\x2c\xc5\xdd\xd0\x05\x75\x3f\xa9\x2f\x92\x29\x3a\xdf\xea\xec\xda\x0a\xe5\xaa\xa0\xfd\x76\xbb\xfc\x32\x63\x48\x13\x5e\x31\x4b\xcf\xc0\x6b\x50\xa7\x8a\x19\x60\x13\xb3\xb2\xee\xc0\x8a\xec\xaa\x4b\x0f\x21\x10\x7d\x5b\x45\xd8\x1d\x89\x18\x62\x5d\xce\xc8\x14\x71\xc8\x77\x1c\xfb\x27\x24\xac\x6c\xce\x04\x2b\x18\x07\xba\x15\xf7\x3d\x05\x2a\x31\x38\x97\x44\x28\x1e\x31\x3a\xfc\x17\xb0\x38\x4a\x14\x4b\xaa\x5e\x55\xc6\x4b\x9e\x60\xd1\xac\x8e\x00\x94\xf8\x22\x6a\x30\x80\x8e\xb7\x93\x58\xda\x14\xb9\x20\x8c\xde\xfc\x74\xeb\x39\x61\x21\x9f\x72\x1a\x57\xb8\xa0\x8a\x34\xc5\x6b\x0e\xc5\x6e\xd5\x6a\x39\x9f\x23\x30\x52\xb7\xba\xcb\x7c\x55\x6d\x2f\x5d\x8d\xb8\x42\xb3\x97\x33\xf4\x6b\xb8\x0a\x58\x1b\xe7\x3c\xcf\xb1\x13\x63\xbd\x0e\x7f\x86\x1f\xa7\xc5\xf9\x5c\xd7\x54\xbf\xa3\x49\xd5\x5f\x6d\x4e\xc1\xd9\x41\x4d\x0d\x5a\xc0\x33\xe3\xea\x92\x66\x93\xcf\xa6\x7c\xe9\x98\xd1\x24\xab\xd8\x38\xa2\x4b\x9b\xfe\x05\x3c\x73\x7c\x3e\xc3\xfd\x0d\xac\xd2\x70\xe8\x9a\xc2\x6c\xba\x0d\x1f\xba\xe1\xbc\xb8\xd1\x0f\x32\x9c\x22\xc7\x57\xe9\x31\x1d\xa7\x72\xb0\x77\xbd\x2c\xbe\x58\x38\x75\x32\xaa\xf5\x38\x6c\x91\xc1\xc8\x7d\xc5\xac\xc4\xe9\xb9\x4e\xfd\x81\x23\x7e\xf9\x70\xd9\x1b\x95\x2a\x4a\x2e\x40\x5d\xac\x9a\x75\x38\x7c\x3d\x1c\xcc\xe6\x55\xe6\xee\x3a\x0c\xec\x46\xfe\xe5\x24\x46\x5a\x56\x46\xac\x4e\x6b\x73\xe2\xb1\x7a\x93\xb9\x2a\x91\x10\x95\xeb\x5d\x67\xb1\xc6\xce\xd2\x8f\x45\x2b\xc4\x51\x89\xd6\x5c\x47\xba\x40\xb9\x2b\xd9\xa2\xf5\x2d\x34\x46\x8b\x19\x76\x13\x98\x45\x6c\xd0\xbf\x2d\xdd\x98\xcd\x2f\xf6\x1c\x16\xf9\x3e\xc9\x65\x4b\x5a\x1b\x18\xfc\x0f\x4a\xd6\x91\x48\x8c\x0f\x70\x05\x07\xfa\x9d\xff\xe4\x27\x76\x4f\x9b\x6d\x5a\xf1\xe2\x2e\xe2\x30\x89\xd3\xec\x25\x28\x86\xd4\x46\x10\x4b\xe0\x45\xa8\x28\x76\x2e\x81\x45\x85\xce\x98\x33\x3c\x6c\x80\x3e\x7c\xd6\x6e\x01\x5f\xb0\x9b\xdb\x7d\xba\xec\xb7\x24\x74\x04\xf0\x4b\xeb\xf1\xe8\x6f\x12\x2a\x63\xbd\xe0\x99\x96\x24\x93\x0c\xee\xf6\x67\x8e\xec\x64\x43\x25\xa0\xc6\x89\x5e\x0c\xb4\x95\xba\x4e\xe3\x8e\x05\xfa\x5b\x6c\x72\x59\x09\x4d\xcd\x14\x53\x38\x8b\x42\x94\x8c\x5f\x47\x80\x08\x95\xc9\x46\x09\x92\xd9\x73\x16\xa6\x8b\xcd\x94\x54\xed\x6a\x46\x96\x2b\x1b\x96\x41\x61\xab\x59\x9b\x02\x7e\xf2\x5e\x6f\x46\x00\xc3\xf5\xa2\x08\xcf\x70\x55\x1b\x77\x79\x2b\x59\x23\x7b\x4a\xd2\xdc\x20\xbb\x26\x9e\x64\x00\x3b\x2f\x6e\xf5\x20\x7a\xcd\xe5\x7d\xf2\x45\xf1\x77\x11\x64\xe9\x5d\x34\x39\x56\xbf\x9a\x3c\xbe\x0a\x45\xc7\x32\x2b\xdf\x36\xf5\xdd\xb1\x7a\xc3\x57\x3b\x41\x47\x53\x88\x9d\x7d\xf8\x55\xc0\x23\x04\x92\x17\x71\xdc\x0e\xb8\x73\xff\x88\x0a\x5d\x2e\x8a\xa9\x4d\xf6\xd3\xfe\x25\x21\x40\xe7\xc2\x49\x5b\x93\x5e\x3b\xbd\xe6\x34\xd5\x6a\x52\x57\x6f\x16\xac\x78\xb9\xe9\x8c\x1a\xb0\xed\xb4\x30\x7d\x5e\x48\x8c\x89\xb3\x7c\x6b\xe5\x27\x01\x19\xe5\x63\x6f\x12\x93\x2a\x1d\xc7\xc9\x99\xb8\x8b\xa0\xf1\x28\x4b\xdd\xc4\xbc\x4f\xee\x31\xe3\x70\x46\x88\x9c\x09\x1f\x95\x02\x36\x09\x93\x34\x74\xb2\x3e\xb9\x86\x9c\x5b\xb5\x1d\xdb\x84\xb2\x4f\x5b\x7f\x4d\x81\x24\x9d\xc3\x5b\x03\x7a\x6f\xae\x20\xf0\x20\x70\xf4\xf0\x49\x08\x21\xa4\x58\x9b\x76\xdd\x08\xfa\x39\xf1\xdc\x56\x24\x6b\x39\x60\xa2\xc7\x38\xc0\x55\x4a\x22\xa5\x68\x93\x5e\xec\xab\xc5\xc8\xcb\x6f\x42\x78\x9a\x4f\xb9\x57\xa4\xc9\x52\x48\x2c\x1a\x7a\x0c\x2c\x51\xc0\x0d\xb5\x90\x93\x26\x5a\x4b\x58\x1e\xb0\x89\xf0\x9e\x46\x25\x2f\xa9\xaf\xf2\x61\x2e\xe9\x4b\xd5\x27\x9e\x5b\x57\x89\x24\x76\x69\xed\xa3\xda\xb8\x06\xf0\x46\x0b\xf0\x6c\xf7\xe5\x11\x80\x2e\xa2\xda\xba\x61\x6f\x5c\x6c\x39\x74\x78\xa8\x1f\xac\x09\x8d\xb9\xc6\xd5\xfc\x05\x2d\x6d\x42\x8e\xd4\xa2\x8e\x7b\x9e\x7e\x69\xf8\x8f\x5e\x6a\x3e\xbb\xfd\x2b\x13\xf5\x79\xfe\x0a\x1f\xc8\xbf\xcc\x2e\xfe\x2f\x39\x5e\xcd\xe5\xcd\xd2\x4c\x41\x33\x4e\x79\x3f\x5b\x8b\x84\x04\x06\x69\xc4\x2e\x96\x12\x5b\x9b\x16\x53\x64\x6c\x84\xc2\x86\x23\x28\xed\xa9\x2d\x9f\x7d\xd4\xee\xc5\xa6\x8b\x5a\x36\x45\xe3\x29\x4d\x32\x9b\x88\x8b\x92\x89\x30\x55\xdc\x28\x94\xcc\x04\xd2\x1c\x67\xee\xb1\x07\x36\x50\x9c\x9d\x90\x99\xe8\x5a\xfc\x69\xb3\xfe\x59\xa7\x94\x47\xdd\xda\xd0\xc3\x62\x6a\xfa\xc9\x4d\xad\x09\x7f\x12\x45\x66\xc1\x0c\x64\x5f\x07\x67\xb5\xc2\x2d\x1f\xbe\xf7\x78\x04\x91\xd6\x86\x95\x7c\x25\xba\x7c\xbd\x51\x81\x9b\x35\xc5\x7d\x43\x14\x8b\x40\xc7\xbc\x75\x49\x58\x4b\xa1\x03\xde\x58\x8d\xf0\x2d\x7d\xd7\x1c\x3d\xe9\x3d\x19\x54\xae\xcb\xf9\x8e\xdd\x36\xe8\x5f\xed\x48\x84\xe5\x34\x06\x7e\xb2\xaa\x9a\xbe\x13\x52\x56\x9c\xa3\x8d\xda\xd6\x2d\x2a\xe9\x3f\x57\xb9\x18\x9d\x37\x29\xeb\x5c\x31\x8b\x55\xbc\xfc\x8d\x02\x51\xa6\x3d\x00\x8e\x30\x95\xaa\x10\xca\x73\x3c\x2a\xd6\x71\x3f\x4e\x1f\x22\x60\xce\xe6\x2b\xd5\x81\xae\xd4\x9d\x9c\x34\x99\x3b\x17\x20\x98\x43\xdd\x8c\x0f\x2b\x0d\xda\x18\xe5\x79\x9d\xca\x75\x83\x2d\x6d\xae\x9c\xaa\xc0\xd9\x19\x1a\x68\xd6\x21\xf8\x45\x3f\x17\xcc\x3e\xab\x9c\x19\x66\x74\xae\xac\x18\x7f\xe2\xbd\x47\xef\x6c\x32\x0a\xd9\x0b\xaf\xc6\x92\xbc\xce\xc9\x4f\x6f\x30\x81\x96\x93\xf4\x71\x01\x1e\x94\x65\xc6\x16\xc0\xc6\x73\x5b\xe0\xdc\xca\x50\xd8\x9e\x43\xda\xde\x87\x5d\xe1\x4e\xc5\x66\x8a\xa3\x31\x8a\x95\x33\xe0\x32\x57\xa9\x9f\xe1\xda\x1c\x04\x03\x6e\x79\x7d\x51\x76\xcd\x01\x98\x0c\x72\x1f\x32\x69\xe9\xa8\x1d\xda\xe1\x4e\xa2\x93\x5e\x41\x9c\x43\x8a\x2d\xad\xf9\x4f\x80\x4d\x9b\xe4\x28\xea\x32\xb5\x31\x50\x51\x70\x80\xa0\x74\x21\x0e\x00\xc5\x37\x97\x98\x38\xbc\x0c\xe9\x81\x34\xc1\x9c\x1f\xe3\x48\xd3\xe0\x45\xbb\x36\x99\x5b\xe1\x78\x63\x2b\x29\xf0\x93\x5a\x55\x3f\x28\xf5\xaa\x38\x2d\x91\xbc\xc1\xfa\x8d\x95\x02\xea\x94\x2d\xa1\xbe\x84\xb2\x37\xbd\xc7\x1d\x3d\x29\x13\x4b\x17\x21\x4b\xdb\x2b\x32\x09\xb4\x5f\xa8\x0b\x13\xbb\x88\x7a\x3e\xd1\x93\x59\x45\x81\xa3\xe5\xa7\x05\x12\x0f\xae\x70\xc2\x95\x26\x5f\xa4\x62\xd7\x7d\xa4\x00\xaa\x78\x83\x72\x68\xe9\x65\xbd\xed\x03\x08\xc7\x22\x0b\x10\x6a\x9a\x59\xca\x5a\xd8\xd8\x6e\xcf\xb7\x58\x63\xb6\xc1\x69\x8d\x42\x05\x94\xca\x42\x0c\xc2\x01\x90\x3c\x75\x36\x91\x2a\x12\xec\x8a\x07\x44\x28\x28\xcc\xbd\xf6\x80\x31\x9e\xee\x96\xee\x77\x12\x44\xcc\xbe\x90\x95\x8d\x04\x4b\x2c\x82\x93\xe3\xd9\x0b\x0f\xe9\x3a\x51\x92\xee\x3a\x9b\xb9\xea\xb6\xec\x00\xfb\xd6\x86\xf5\xb7\x4b\x45\x85\x41\x55\x4a\xf7\x0e\x15\xcf\x58\x46\xc7\x4f\xb2\x1e\x82\x54\x20\xa9\xb5\xfe\x03\x2c\x25\x22\xae\x70\xe3\x04\x7e\x0f\x56\x71\x23\x8f\x92\xb7\xb6\xa3\x26\x19\x8c\x36\xea\x8d\x8c\xfa\x39\x79\xb2\x49\x4b\x39\x0f\x30\xaa\xca\x31\x0e\x4a\x6a\x94\xb7\xb2\x9f\x4a\xbe\x66\xc0\xb9\xd2\x18\x0f\x3f\xcf\x1c\x9a\x6b\xa9\xce\x4f\xf7\xff\x9d\xd9\xee\xe9\x9d\x61\x55\x47\x83\x2e\x90\x90\x6a\xab\x0b\x48\x09\x45\xe9\xcb\x56\x04\x4d\xf2\x0b\xd6\x71\x09\x6d\xb5\x7c\x97\x69\xad\x21\x0f\x46\x07\x52\x3e\xaf\x88\xb0\x98\xab\x3f\x1a\xfc\x3f\x62\x20\xa9\x69\xbc\x63\x74\x9c\xb4\x67\x9a\x55\x02\x00\x33\x1b\x13\xde\x9f\x68\x80\x30\xc5\xb5\x6c\xc1\x29\x03\x32\x69\x77\x1f\xad\x20\xf7\xd2\x54\x84\xa2\xf5\x40\xcf\x49\xa2\xb2\x21\xaf\xa6\x58\x8a\x6d\x22\xcd\xed\xd3\x18\x7c\xbf\xdd\x4d\x7e\xfd\x32\xbc\x2e\x57\xf8\x0a\x61\xb4\xd3\x33\x85\xfd\x35\xbe\xdf\x82\x2c\xbe\x6c\xb0\x94\xa7\x9e\x65\x12\x71\x4d\x3d\xf4\x20\xb4\x99\xf7\xfa\x67\x23\x45\x0b\x2b\xba\xdc\x04\x9e\x91\x5d\xd3\x3e\x9c\x55\x89\x6e\xe0\xa8\xb7\x87\xb7\xf4\x1e\x96\x70\x96\xcd\xff\xbc\xb9\x67\x7c\x73\x23\xd2\xbb\x43\x0d\x03\x5e\xe8\xa0\xc3\x4e\x71\x86\x06\xdf\x0d\x41\xfc\xcf\x5b\xba\x0a\x62\x45\xec\x5f\x81\x47\x40\x08\xcb\x3c\x91\x20\xbe\x7b\xc5\x8f\x69\xa4\x0d\x6d\xf2\x1a\xcc\xd2\xf8\x85\x4a\x1a\xa1\x93\xe0\x5a\x06\xb8\xc8\xa5\x0e\x22\x5f\xda\x41\xee\x11\x60\xb2\xbd\x5d\x12\x5b\x68\xfc\x4c\x53\x4c\xed\xf7\x76\x98\xab\x41\x33\x20\x87\x0b\xaa\xaf\x32\xcb\xcb\xa1\x5e\xd8\x7e\x71\xb4\x76\x50\xf2\x46\x06\xc0\xdf\xfb\x01\xa1\xe3\x66\xd3\xce\x41\xb9\xc7\x4e\xa8\xc4\x8e\xd5\xc8\x54\xc1\x87\xa1\x3b\x97\x96\x20\x00\x03\x46\xc8\x03\x5b\xe7\xde\xb3\x57\xdd\xec\x11\x0f\x9b\x3c\x09\xa2\xed\xb9\xa1\x25\xbc\x85\x09\x17\x99\xa0\xc1\xa5\xe3\xb4\x0a\x72\x68\x51\xb4\x76\xf3\x3c\xe2\xde\x46\x39\xe7\xb5\x05\xfa\xde\x0f\x82\x06\x21\x12\xe0\x37\xc0\x96\x3f\x5b\x11\x50\xef\x4d\x78\x19\xe3\xe4\xab\x4f\x98\xa8\x81\xb6\x14\xa6\x77\xbb\x6c\x9a\xab\xca\x19\x63\x87\x17\x0a\x2d\x48\x67\x16\x59\xc1\x65\xaf\x43\x6a\xc4\x87\x64\x3c\xff\xb6\x5c\xaa\xa3\x21\x74\xd1\x3e\xd4\x56\xd3\xf2\xbf\x5c\xf2\xf8\xd1\x73\x83\x19\x13\x35\xca\xec\x77\x05\x1c\x6e\xb8\x11\x8b\x37\xc2\xf1\xb7\x8c\x15\x4c\x08\xef\x98\x6d\xa9\xd8\xc1\x71\x6e\xce\x6d\xaf\x00\xc4\x8a\xf5\x77\x3c\xdd\xa6\x13\x92\xf3\x03\x4f\x53\xba\x6a\xfc\x7b\x28\xd4\x64\x5b\x7f\xef\xa3\x22\x13\xe3\xdb\xe0\x4c\x1f\x13\x0d\x80\xf2\xf6\x6a\x47\xd8\x7c\xa1\x2b\x29\xb7\xc6\x30\xdc\xc8\xa0\xb5\xb6\xc6\x0d\x8d\x34\x90\x25\xe0\x4d\xd2\xdb\xd6\x7d\x07\x5a\x14\xd4\x51\xee\x3a\x76\xc7\x7d\xda\x30\x48\x84\x08\xf1\xa0\xa8\x5a\xe9\x9b\x52\xf7\x95\xbf\x80\xcd\xe2\x4f\x9d\x05\xac\xd9\xa2\xf3\x7d\x3e\x28\xc3\xf6\xe2\x96\x72\x56\xeb\x85\xfb\xc0\x39\x0e\xd1\x7c\xb8\x08\xdb\xc8\x0c\x60\x95\xfb\x2e\xb0\x91\xf1\xc7\x57\x15\x36\x62\xf0\x5c\x77\x6d\xaf\xe6\xab\x15\x3c\x99\x04\x77\xb4\x53\x69\x18\x1d\x61\x3b\x0e\x1c\xe5\x36\xe6\xc0\xd3\x41\x6e\x63\x0f\xcd\x13\x20\x27\x43\xd5\x7e\x75\xb1\xa4\xf8\x22\x9a\x12\x31\xf9\x85\x7f\x9f\xce\x26\x54\xd6\x6e\xbb\x3f\xfb\x71\x42\x8c\x7b\x75\x10\x69\xf7\x05\x9e\x8c\x77\x81\xa1\x51\xe3\xe5\x55\x0d\xfd\xe4\x66\xb6\x56\x46\x33\x50\x95\x70\xad\xf3\x27\x59\x86\x7f\x8a\xe2\xcc\x5c\x7d\x96\x1e\x7c\x2e\xe6\x16\x6f\x74\xe4\x81\x4d\x7e\x3f\x81\x7c\xac\x97\x10\x9a\xea\x5c\xa8\x02\x7f\x05\x98\x9d\xc2\x78\x97\x11\xe0\x82\x3b\xcc\x3f\x71\x04\x97\xb9\x65\x8d\x86\x0c\xcc\x50\xb8\xf3\x86\xe0\xa3\xae\x1a\xab\xba\x26\xa4\xd6\x76\x81\x1e\xc3\xdc\x8b\x40\xe6\x58\x48\x91\x73\x92\x10\x4f\x98\xed\xdc\xba\x71\x74\x03\xd4\x51\xa3\xd2\x04\x84\x01\x27\x95\xde\x4b\x1c\xdd\x3a\x7e\x52\x2f\xad\x97\xb6\x6b\x76\xe2\x8b\x37\xec\x69\xf6\x2b\x96\x6f\x60\xa6\x1c\xfe\x6c\xf0\xb6\x8e\x93\x36\x83\xb0\x9d\x15\x38\xf2\x18\x0a\xb5\xa1\x31\x3d\xa3\xf3\xc4\x0a\x41\x2f\x61\x33\x37\xa0\x33\x8b\x46\xcb\xaf\xa6\x5a\xe0\x91\xe3\x6f\x4b\x8f\x0a\xd0\xd4\xd7\x06\x29\x6c\x4d\x37\xdb\x68\x4d\x22\xb0\x58\x10\x72\x00\x58\x70\xc4\x88\xab\x8f\xec\xe3\x9b\xda\xe1\xb2\xf7\xcd\x8c\x2b\xe2\x12\xb9\x69\xc0\x32\xed\xb7\xbc\x14\xe2\x17\x99\xd5\x1d\xf9\x43\x40\xb8\x9a\x4d\x72\xb8\x7c\x82\x84\xe2\x25\x45\x5d\x8b\x7b\xce\xe0\x2d\x6a\xa8\x11\xb7\x8d\x72\xfb\xb3\x9c\x7a\x51\x5d\xb8\xd0\x96\xa5\x14\x8e\x3f\xa5\x50\xa3\xed\x96\x79\xf3\xa9\xbe\x2b\xc1\xfa\xd6\xcb\x35\xa5\xeb\x54\xbc\x2c\x0d\x2d\x2e\xf7\x47\x00\x2c\xf9\x9c\x95\x66\xba\x7d\x49\xd1\xa0\xb8\x96\x4c\x0a\x4b\x94\x10\xd8\xe6\x1f\xc4\xf5\xcf\xf2\xf3\xb2\x8e\xc5\x31\x91\x57\xef\x05\x81\xe5\x22\x30\x12\xb9\x2c\xa1\x8e\xf5\x41\xb2\x28\x26\x58\x4b\x51\x1d\x6d\x59\x68\x08\x68\xb7\x9f\x03\x24\x6f\xec\x97\x85\xbd\x9e\xaf\x45\x85\xd8\x25\x7f\x76\x27\x86\xee\x6c\x3c\x7d\x26\xc7\xe1\xc8\x9a\x4d\x61\xc6\x4f\x50\x26\xe6\xf3\xfe\xbb\x2e\xd9\x7d\x93\x74\x0e\xe8\x9c\x85\xe8\x24\xa6\x4c\xb8\xcd\x2d\x5e\xef\x27\xbb\xee\x52\xb0\x73\x47\x43\x04\x28\x8e\xeb\xa7\x84\x5f\x84\x4c\xef\x16\x28\x57\x14\xc1\x3c\xa1\x37\xa7\xcf\x00\x02\x83\x8b\xa8\xae\x3d\x67\x29\xd1\x4e\xf4\xe7\x4a\x0f\xc4\x89\x9b\x11\xbe\xa4\x0f\xa0\x33\xc9\xb1\xf4\x70\x6f\x27\xa1\x3b\x37\x2e\x55\xdb\xac\xdc\x67\x61\x0e\xf7\x94\xae\xac\x70\xa0\xae\x00\x3f\x9f\x31\xdc\xa0\x6a\xb5\xfb\xf6\x45\xb7\xc5\xb9\xea\xb4\xce\xa7\x27\x9a\x42\x7c\x22\x71\x14\x5c\xb2\xd8\x0c\xff\x41\xa5\xba\x5e\x0f\x31\x3c\x94\x4f\x45\x05\x72\xe2\xf4\x61\xa4\x85\xe2\xc5\xb0\x63\x4e\x33\xa3\xb7\xdb\x5f\x8f\x50\xa4\x13\x2e\x14\x61\xd5\x8d\x2b\x61\x48\x58\x3a\x4c\x73\x6e\x64\x22\x68\x08\x3a\x09\xa1\x4b\xc7\x9d\xdd\x96\x05\x91\x9f\xce\x58\xb9\x0c\x23\xbf\x19\xc4\x7d\xdf\xda\x5d\xf3\x61\xd0\x71\x56\x3c\x8f\x73\x21\xff\x15\x88\xd8\x00\x8f\xcc\xe3\x6b\xa0\x22\xcd\x9c\x31\x6d\x1b\xe5\xbb\xb8\xb9\x63\xd4\x83\x28\x51\xcb\xe8\x13\x0f\xd1\x9a\xed\xec\xe0\x00\x5d\x53\xf7\x66\x1c\x3b\x85\xfa\x59\x2d\xd4\x0a\xbb\x45\xc0\xd6\x13\xfa\x31\xb7\x7d\x0a\x05\x67\x3c\xf9\xc2\x8c\x56\x7d\x0d\xc4\xa9\xe5\x34\x71\x16\x95\xca\x66\x53\xf1\x06\xfa\x32\x7e\xc7\x82\x99\xda\xfd\x7f\xc6\xbb\x26\x68\xf6\x48\x37\xcf\x6e\xa4\x6b\x0f\x51\xbc\xdf\xb8\x85\xe7\xd4\x71\x8e\x0c\x28\x28\xa1\xa7\x2d\xbd\xa8\xf5\x3d\x30\x5c\x11\xc2\xf1\xd2\x24\x72\xba\x5a\x87\x92\x4e\xa7\x33\x09\x3d\x93\xf3\x58\x58\x4e\xeb\x76\xc4\xb2\x98\x2c\x9f\x59\xfe\x84\xd8\x8c\x5c\x1b\x3a\x76\xda\xc3\x0a\xfd\x24\xde\x19\xf1\xa0\x74\x88\xb4\xf1\xb9\x0f\x30\x24\x0a\x33\xc2\xd1\x4d\x50\x45\xe1\x8a\x16\xee\x63\x02\x46\x01\x1e\x5f\x02\x15\x5a\x45\xa0\x3c\x8e\x1b\x6b\x1a\x25\x81\xed\xfa\x78\x7c\xb9\x4f\xd2\x4b\x9e\x2c\xef\x60\xb1\x04\x90\xed\xa6\xa4\xb0\x8e\x9c\xc7\x8f\x10\x59\x53\xee\x7d\x78\x76\x44\x27\xd1\xa5\x28\xa2\xf3\xed\xe9\x73\x85\xfa\x1e\x70\x49\x49\xc0\xd8\xa4\x39\x24\xd4\x4c\x5a\x1a\xc7\x43\xe3\x8a\xfc\xe6\x17\xae\x3f\x65\xee\x96\xa2\x25\x86\xd7\xc2\x04\xc6\xfa\xf9\x57\x05\x56\xfa\x26\x23\xa5\xb2\xa1\x4d\x2e\x2c\x36\x82\x5c\xaa\x3e\x58\x58\x76\x5f\xab\x79\x65\xdc\xd4\x8e\x57\x78\x25\xf7\xd1\x75\x0d\x3c\xdf\x52\x72\x8f\x45\xd8\x5f\x15\x61\x7f\x35\x59\xfb\x2d\x57\xa0\x1f\x69\x39\xa9\x98\xee\x2d\xbf\xfc\xf1\x7e\xc7\x70\x2b\xfa\x0d\x7b\x42\x60\x2d\x5d\x8b\xf3\x37\x2b\xbf\xfe\x4d\x4b\xe0\x30\x21\xc7\x98\xf6\xe4\x7d\xd2\x6f\xa8\xdf\xfe\x12\x6e\x00\xb5\x45\xff\x09\x6c\xba\x76\x94\x55\x02\x79\x00\xa6\x3a\x56\x11\x65\xa3\x15\x3e\x4c\xb7\x5b\xe5\xa6\x42\xf8\xb8\x78\xe5\x6b\x09\xbd\x3b\x61\x2a\xa5\xa7\xc7\xe5\x66\x37\x5a\x79\x50\x45\xef\x79\xfb\x65\x05\x38\x52\x9f\xe3\x86\xcf\x01\x9b\x1c\x62\x39\xdf\xc3\x50\x85\x02\x98\x88\xa8\x46\x0b\x96\x93\x50\xc8\x50\x5c\x96\xb0\xde\x5f\x88\xec\xe5\x7f\xd5\xc8\xd2\x7d\x32\x3d\x02\xca\x38\x7d\x79\x6e\xe6\xff\x9c\x8f\x75\x10\xf1\x16\xe0\x01\xe6\xd8\xdf\x55\x9e\x8c\x3b\x66\x1e\xb7\xd4\x1f\xea\x39\x9a\x6c\xa1\xf1\x04\xb9\xe8\x96\xec\x73\x6b\x84\x2a\xa0\x24\x7f\xcd\xed\x4b\xd3\xcf\x8d\xa8\xb4\x38\xa3\x91\xd5\x75\xfb\xde\x51\xfc\xb5\x16\xc6\x48\x7e\xc1\x74\x63\xd7\xec\x84\xca\x26\xbb\x4a\x10\xe8\x75\xf9\xb1\x9c\x39\xba\x88\x3e\x80\x97\x85\xea\x3b\xad\x9a\x72\x8c\xb4\xac\x5b\x0a\xa5\x03\xbc\x23\xf4\x68\x02\x74\x39\x8d\xb5\x73\xb7\x66\x52\x95\x64\xa0\x71\xc6\x70\xea\xda\x0f\x67\x17\xeb\x18\x83\x13\x59\x36\x59\x5a\x96\xb7\x70\x98\x8b\x1c\x29\x86\x92\x7a\x2a\xe7\xcd\x9c\xe6\x0c\x63\x70\x84\x06\x54\xb7\x06\x2e\x30\x0f\x2a\x11\x0d\x7e\x39\x85\xd4\x1b\x60\xa3\x5e\x4d\xc4\xa0\xcb\x36\xea\x6b\x32\x6b\xf6\x83\xbb\x31\xca\x82\xd3\xae\x8c\x80\x9e\x79\xdc\x26\x19\x32\xc1\xd5\x43\x03\xbd\xaf\x77\xee\x64\x7c\xba\x0d\x27\xab\xec\x08\x3b\xd9\xbd\x0d\xd5\x12\x01\x4e\x9c\xa3\x44\xfe\xec\x5d\x6e\x7a\x26\x90\xe4\x97\xf8\x2b\x1b\x71\xb6\x96\x0d\x36\xb5\x0c\x0f\x45\x9b\x58\xdc\x1e\xd2\x1a\x57\xd8\x1e\x45\xca\x0c\x52\x46\x90\x7a\x81\x18\xa9\xf8\xaa\xdd\xfd\x1c\xc7\x06\xb0\x33\xb1\xaf\x3f\x0f\x61\x31\x59\xd7\xed\xcf\xd9\xc7\x6f\xf8\x73\x8a\x42\x63\x61\x97\xb1\x2a\x8b\xd8\x06\x6d\x5f\xae\xe8\x37\x69\x88\xf5\x5b\xa0\x38\xc7\x9d\xed\xb0\x88\x87\x17\x27\xf7\xa8\x73\x71\x91\xc1\xf7\x28\x57\xcc\x8d\xe5\xc3\x7e\xab\x34\x07\x70\x48\xec\x21\xf0\xef\x29\x42\xe7\x58\xf0\x54\xa0\x3a\x4a\xbf\x88\x10\xda\x87\x52\xa3\xcd\xc6\x1c\x05\x65\xdb\x79\x0d\xc8\x66\x0a\x50\x45\x3f\x62\xb9\x93\x7b\xb1\xe4\x1b\x54\x8f\xde\x06\x45\x4c\xf4\x10\x66\x4a\x6d\xf9\xee\xec\x12\xca\x81\xd7\xf4\xc5\x1c\x3f\xcf\x74\x75\x5e\x20\xeb\xb9\xac\xdd\xfb\x64\x2e\x11\xba\x09\x90\xa6\x8e\x5a\xfa\xa6\x5a\x79\xf4\x90\x40\xff\x0e\x85\xd9\x94\x7f\xe6\x49\xe8\xd3\x13\x22\x62\x0c\xca\xa7\x82\x31\x0b\x0b\xe4\x16\xfc\x1c\x7e\x49\x9e\x5b\xe0\x09\x37\xfa\xd4\x3d\xc1\x20\xbe\x84\xee\x04\x39\x4d\x30\x1a\x9f\xf1\xf2\x5b\xad\xdf\xea\xb3\x4c\x0c\x80\xbf\xdc\x69\xfd\x09\x50\x88\x12\xb5\x67\x11\xfc\x12\xa8\x9e\x8b\xd1\x5a\xf1\x8d\xd0\x3a\xfc\xd1\xeb\xb2\x6c\x79\x4e\x4a\x47\x9e\xd5\xef\xd3\xe6\xa5\x4f\xfa\x4f\x54\x58\xa6\x50\x97\x0e\xcc\xdc\xc6\x90\x64\xfb\xda\x21\x8c\x26\xda\xf5\xb5\x34\x09\xb8\x49\x79\x22\x30\x1b\x32\x3f\x55\x57\xce\x61\x9c\x0f\x47\x68\x6a\x01\xfb\xe9\x75\x14\x47\x60\x52\x1e\x91\x9f\xeb\xb1\xfe\xc0\xf4\x47\xa8\xe6\x47\x5e\xab\xe9\xa8\x18\xcd\x3d\xb2\x64\x0f\x43\x1b\xfb\x99\xb6\x80\x6b\x12\xc4\x29\xd5\x49\xfc\x82\x0c\xa6\x3d\x72\xe4\x67\xf2\x34\x2d\xa4\x15\xf5\xa7\xc4\xa2\x17\x80\x9d\x59\x2b\xbe\x37\xae\x53\xd0\x48\xfa\x93\x6a\xd6\x17\xc8\x4e\x1e\xd9\xda\xe5\xff\x63\xb4\xc5\x08\x01\x7b\x52\x08\x53\xeb\x83\xdb\xf4\xbe\x11\xa2\x69\xfe\x62\xf3\x64\x09\xa8\xdb\xe4\xd3\xf1\x8f\xc2\x76\xf9\x02\xb6\x1c\x4d\x77\xa6\x6e\xd2\x70\x64\xe1\xcd\xe5\x4d\xb0\x6a\x42\x70\xe9\xff\x54\xf1\x42\x30\xc9\x5b\x0a\x0f\xaf\x75\xc4\x5f\x9e\xe9\xae\xf1\xc8\x33\x42\xfd\x4d\x7a\x8f\x70\x73\xe0\x17\xa1\xf8\x9a\x15\x02\x5a\x4d\x80\x26\x00\xa2\x5b\x75\x30\x0d\xf3\x95\x0c\x21\xb9\x41\x4b\xf1\x20\x1c\x89\x7f\xe0\x30\x50\xbe\xb1\xef\x65\xc1\x92\x3b\x69\xbb\xa1\x00\x52\xe2\x0a\x57\x32\x61\xd8\x83\xe2\x6b\xe4\xfe\x12\xb5\xb4\xee\xda\x31\x6a\x13\xaa\x29\x8f\xcc\x93\x6e\xa6\xd0\x65\x16\x54\x0f\x66\x49\x42\x1b\x5a\x96\x03\x1e\xdc\xc3\xf1\x9c\x0b\x67\x6c\x44\x76\x0a\xcf\x09\xce\x2a\x51\x9c\xd6\xd3\x07\xfa\xa4\x83\x7c\x95\x0c\xc4\x2c\xee\x29\x4d\x80\x28\xa0\x7d\xe5\xe3\x8f\x1b\x64\xce\xa0\xdc\xaf\x50\x1f\x8a\x1a\x56\x61\xe4\x3f\xed\x36\xfe\xf3\x46\xbc\xac\x4a\x8f\x05\xac\x8b\x38\x24\x54\x32\x7a\x10\x96\x04\x9c\x78\x7f\xdc\xa2\x91\xa1\x04\x89\x25\xd6\xe9\xd0\x51\x3e\x73\x8a\x04\x02\x90\x4e\x64\x8b\x0a\x87\xb4\xdd\x82\x92\x09\x1d\x77\x80\x0f\xc7\x4d\xc3\x42\x67\x87\xa3\xa0\xc7\xef\x59\xfa\x81\x1f\x1b\x95\x38\x22\x40\xe3\xc3\xc1\x29\x5c\x3c\xc9\xb2\x78\xe6\x91\x51\xda\xeb\x46\x27\x83\x22\x71\xda\x46\x02\x6f\x69\xa7\xca\x8f\x71\xda\x6d\x84\x36\xeb\x8d\xab\x6a\x39\x0e\xe0\x5d\x1b\x21\x95\x75\x36\x43\xc5\x4a\xee\x84\x11\x55\xf8\xa6\xee\x79\xbb\x41\xa0\x2b\x37\x04\x70\xae\x3c\xe0\xdb\xc0\x0e\xad\xf4\x0e\xc5\x4a\xe1\x43\xf9\x99\x76\x77\x2f\x81\x4f\xb5\x3e\xbf\xce\xf3\xc2\xca\x55\x2f\x79\x41\x3a\xc7\xa7\x3d\x75\x07\xfb\xec\xad\xd3\x3e\xbf\x43\x74\x46\x79\x6d\x8b\xfe\x12\x87\x8c\x4a\x1c\x13\xd5\xe4\x5d\x21\x14\xb8\xf7\x1a\xa6\x1b\x56\xda\xd6\x0a\x52\xc4\x1e\x36\x02\xb1\xa6\xae\x29\x13\x21\xa4\x76\x1f\x9f\x1a\x7e\xbb\xe4\xcb\x71\xbc\x59\x6d\x40\x15\xfa\x98\x49\x76\xa0\x4a\x19\xf7\x2a\x6d\x75\x96\x2a\x7a\xb5\xa6\xfb\xf1\x9e\xa5\xbd\x00\xd6\xc6\x2b\xd6\x5f\x41\x06\x9f\xe5\x02\x4b\x43\xdf\x11\x20\x95\x23\x72\xd1\x95\x9b\xb9\x77\x1d\x25\x08\x79\x92\x47\xdd\x9e\x30\x5f\x58\x7c\x61\xa9\xdd\x4e\x55\xd4\x2c\x12\x2a\x74\x13\x08\xbf\x85\xa2\x71\x00\xe4\x10\x6b\x8a\xd9\x6e\xe1\x9d\x89\xb4\x19\xde\x60\x94\xd4\x8c\x15\x46\x88\x4d\x54\x20\xeb\xd3\x95\x57\x10\x17\xfa\x3a\x0b\xb0\x9a\xda\x43\x8d\x3d\x07\x6a\xf4\xac\x80\xca\x4d\xb2\x7c\x00\xb0\xe6\xa4\xa0\xd9\xc2\xda\x76\xeb\x12\xb1\xa0\xb0\x10\x96\x90\xaf\xa3\x57\xaf\xc0\x9f\x1e\x55\xb2\xce\x1b\x15\xed\x8f\x56\x3c\x82\x1e\xcf\x6b\x78\xe3\x63\x6a\x7f\x02\x63\x13\xf0\xd4\x6f\xb4\xb3\x37\xc7\xf5\xf6\xa8\xb5\x9b\x6d\x0b\xa7\xdb\x98\x1b\x5b\xf8\x5b\x34\x5f\xb4\x58\xb3\x99\x87\xb0\xae\x31\xe5\x77\xe7\x5a\xbf\x6c\x6c\xb4\x50\x99\xd2\x09\xb4\x45\x97\x65\x69\x25\x21\x6a\x83\x10\x15\x4b\xdd\xc3\x3a\x00\xae\xeb\xd7\xa7\xfd\x87\x59\xce\x6c\x64\xdc\xab\x58\xe8\xfc\x1f\x18\xec\x71\xe8\xf9\x14\x0c\x36\x7f\x35\x51\x9f\x51\x7e\xdd\x86\x22\x76\x7b\xff\x39\x3d\xa4\xf8\x84\x7d\xa5\xcd\x61\x5b\xaa\x3e\xab\x75\x90\xc1\x0e\x69\xa7\xb3\xaa\xe4\x43\x94\xd4\x93\xdf\xd6\xa6\x3b\xcb\x93\x48\x27\x8b\x93\x30\xab\xf0\xc8\xe6\x2f\x3e\x39\x88\x8f\x3b\x09\xf9\x73\xcd\x52\x62\x4b\x5d\xbd\x54\x97\xd8\x85\x4a\x39\xda\xb4\xe3\xd6\x9e\x73\x5b\x5b\x18\x22\x28\x37\x3b\xda\x6c\xf4\xd8\x08\x62\x3d\x8f\xbf\x95\x8f\x8f\x85\x04\x28\x3a\x6a\x36\xe0\xae\xa7\xaa\x02\x14\x6b\xd2\x2c\xad\x1d\xc6\x7a\x5b\x02\x1d\x9b\x14\xa4\xbe\x08\x5f\xfb\xb5\x08\xe9\x86\xbb\x77\x98\x60\x1c\x16\xac\xca\x36\xd4\xc3\x11\xe8\x4f\xc0\x02\xeb\x3f\x06\x59\xcf\x73\xba\x32\x35\x83\x5a\x14\x3b\x88\x44\xdd\xc8\xbf\xe7\xfa\x59\xe8\xb2\x26\x9f\xf6\x48\x23\xd5\x81\xb1\x40\xc6\xf2\xa5\x90\x09\xa5\xa9\x6e\xc7\xa8\x8d\x0b\x67\xd9\x32\x11\xb4\xa8\x0e\x4a\xce\x14\x20\x6f\xf0\x91\x5a\x36\x3b\x0a\x5b\x9e\x7f\x7f\x7b\x72\x03\xdb\x93\x2a\xdb\xf0\xd4\x0d\xd1\xd2\x45\x3f\x79\x73\x4a\xd3\xd2\xe9\x2f\x32\x6b\x05\xa3\x7d\x49\x92\x11\x0d\xae\x3c\x6c\x73\x8b\xf6\xd5\x65\x76\x38\xe7\x6a\xce\x4c\x2d\x37\xce\xec\x1d\xbb\xca\x9d\xc9\xbe\x48\xb2\xa7\xe5\x6a\x6b\xbd\xba\xf1\xe1\x80\x6f\xc4\xce\x56\x29\x45\x53\xa8\x64\xe4\x61\xd9\x8f\xff\x62\x5b\x6b\xab\xa7\xe5\x6e\x38\x79\x5a\x44\x9d\x20\xc0\x5c\x5d\x9c\x36\xb0\xae\xa5\x3b\x6c\xac\xa3\x52\x1c\x56\xb1\x0f\xc2\x24\xdc\x77\x57\x70\x57\x47\xea\x8e\xb0\xe4\x61\x7e\x60\x8f\x31\xbc\x0f\xf9\x55\x28\x78\xb2\x5f\x69\xc6\xbe\x29\xb2\xf5\xcd\xd4\x46\x14\x06\x95\x6e\x98\x9e\x13\x6f\x81\x38\xb5\x7e\x25\xf9\xec\xa7\xb1\xac\x97\xe0\xde\x3c\x94\x22\x2c\x6b\xe5\x50\x37\x90\x8e\x89\x1f\x45\xb1\x87\xd4\x64\xf7\x90\x40\xd2\xdc\xad\x74\xf1\x6a\x72\x17\xea\xa4\xba\xae\x09\xeb\xda\x15\x9e\x6b\x37\x71\xc5\xb8\x86\x4c\xe9\x26\x9d\x49\x44\x99\x93\xd8\x26\x20\xdf\x80\xb8\xca\xd9\x77\x9c\x84\x43\x4e\x82\x26\x3b\x94\x35\x35\x4d\x88\x99\xb6\xa2\x24\xa7\xc8\xb7\xec\xf7\x16\x5c\x1f\x85\x26\x56\xb6\x13\xf2\x38\x07\x75\x9e\x9c\xe1\xc1\x0e\xf7\x46\x67\xde\xcc\x0e\xd7\x3d\xd5\xd4\xd3\x71\xe9\x47\xb8\x80\xa1\x96\x8b\xd5\xba\x74\x1f\xfa\xe5\x69\xe4\x93\xe0\x31\x1c\x7b\x21\x05\xdd\xea\xb4\x36\xa4\xdf\x4e\x4c\x85\xaf\x00\xef\x45\x44\x29\x7f\x01\x50\x37\x7b\x9c\x08\x80\xb5\x59\x08\x95\x52\xb1\x94\x4f\xc4\xe7\xea\x6b\xed\x84\x7c\xae\x7c\x93\xfe\x52\x38\x8b\x2e\x84\xe8\x2e\xa8\x27\x72\x67\x40\x70\xec\x59\xdd\x24\xa8\x02\x42\x41\x91\x52\x02\x63\x1e\x31\xa3\x6c\xe6\xa9\x70\x28\x9c\x58\xfe\xd1\x09\x14\x3b\xf2\x10\xac\x04\x52\x53\x12\xe5\xd0\x40\x97\x3a\x5d\x2a\xc9\x81\xf5\x6e\xf1\x52\xcd\x96\xa8\xa9\xfb\x47\x82\xbc\x8e\x12\x57\x05\x96\xde\x0f\x18\xa1\x36\x41\x87\xe4\x11\xda\x98\x99\x91\xd4\x25\xd2\x10\xff\xbe\x1b\x5f\xf1\xc3\xc1\xb6\x87\xaa\xf3\xee\xcd\xb0\x94\x00\xcb\x3e\xfb\x81\x7f\xa7\xfb\x02\xd7\x1f\x28\x94\x59\x11\x67\x4b\x94\x7c\x67\x07\xad\x04\xb7\xf9\x6d\xc3\x7e\x16\xa8\xda\x42\xcc\x5a\xa2\x0f\x04\x3c\x80\xfc\xf7\x73\x9c\xd7\x97\x39\x74\xdb\x3a\x01\x4f\x16\xc2\xf6\xf8\xcc\x95\x34\x98\x1a\xf5\x84\x09\xa6\xdd\x55\xa3\x15\xaa\x63\x84\xe2\x76\x4e\xfa\x7e\x10\x8e\x1b\xaa\xac\x7f\x98\x92\xfd\x31\xff\xb6\x3c\x4d\x4e\xf8\x7f\x79\xe2\xb5\xb2\xc0\xab\x0c\x17\x47\x8a\x2b\xbf\x7b\xfa\xf8\x5e\xd9\xd9\x0f\x57\xde\x5f\x61\x45\xcc\x32\xd1\x95\x29\x18\x7e\xe5\xe4\x5f\x71\xa5\xf9\x74\x86\xd1\x45\x45\xbd\xcf\x95\x84\x39\xff\xec\xab\x63\x6c\xb7\xb1\x5a\x63\x7d\x90\x51\x4c\x21\xe4\x7a\x4e\x50\x27\x25\x7f\xdb\xad\x8b\xab\xe8\x1d\x30\xbf\x9a\x89\x7d\xc6\x65\x91\x15\xf1\xc2\x81\x18\xbd\x92\xa0\x9c\x40\x91\x55\x20\xfc\x67\x88\xf5\x0b\x07\xfe\xca\x9e\xe3\x03\xa5\x2b\x87\xec\xda\xd8\x41\x7f\x45\x8b\xb2\xfc\x90\x2d\x9a\xf4\x3d\x08\x1b\x2f\xdb\x78\x2b\x85\x16\xab\x2f\x4a\x85\x58\x2c\xf5\xe9\xf9\xb7\xe4\xfc\xd2\x22\x96\x85\xa1\x34\x9b\x93\x90\x25\xae\x05\x66\x13\x13\xfe\x4e\xb4\x3b\x0d\x94\xf0\x3e\x84\xa8\xf4\xc7\x9e\x72\x29\xbc\xb2\xa8\x68\x33\x58\x75\xde\x88\x74\xfc\xf4\xe2\x43\xfe\x91\xe3\xe2\xc7\xee\x4f\xc1\x89\xed\x22\x7a\x31\x00\x4e\x85\x6b\x10\xf0\xdf\xa3\x4c\x73\x09\xa3\x20\x94\x18\x7d\xbc\xa2\xde\xa7\x1c\x99\x8d\x2e\x97\x23\xad\x81\x4e\xbe\x61\xb1\xc0\x55\xf9\x11\xd9\x26\x96\x44\x1e\xdb\xdf\x43\x74\xb1\x3d\xc0\x61\xa1\xf9\xbe\xeb\xcf\xef\xe4\xc8\xe9\x77\xd2\xd4\xfd\x2d\xfa\xa4\x6d\x6d\x36\x2f\xe1\x0a\xb7\x4a\x3e\xc2\x2d\x79\x2d\x44\x2b\x0b\x2c\x0f\xd0\x31\xa7\xed\xe7\x3a\x94\x69\x8f\xa9\x99\xb7\x2a\x02\xd9\x75\x71\xa3\x71\xee\x01\x42\xa6\x77\xab\x4d\xd1\xaf\xef\x5a\xc1\xb9\x6c\x60\x3f\xd5\x46\xf6\xc9\xe1\x5f\xc5\x12\x3b\xf8\xa3\xfd\x2a\xc9\x9f\xcb\x9c\x76\x82\x96\xa1\x2d\x35\xc4\x17\x7d\x99\x2e\xb3\x8d\xca\xc4\x56\x05\x5f\x7b\x1b\x93\xf9\xfd\x55\x0a\xf6\x21\x54\xb2\xc8\x52\x1e\x6f\xe9\xcb\x98\x59\x75\x31\xf7\x53\x12\x33\xea\x5c\xa0\xbe\xee\xfd\x8b\x85\xa3\xc4\x66\x9b\x31\xc5\x9f\x49\x90\xea\x19\x9d\x16\xf0\xc7\xe1\x90\x7a\x54\x81\x2c\x09\xbe\x0a\x0a\x3c\xa6\x8e\x81\x3d\x2b\xc1\x48\x45\x65\x66\xa0\x0c\x0b\x96\x4f\x46\x96\x5b\x0f\xb2\xfa\x78\x00\x4c\x96\x27\x25\x18\x54\x76\x2d\x61\x07\x81\xe6\x12\x7a\xb0\x61\xb3\x49\x14\x78\xbc\xb9\xab\x91\xfd\x71\x45\xdc\x8a\x03\x8b\x49\x80\x06\x38\x70\xd8\xfe\xfa\x4b\xdb\x21\xc0\xb8\xf6\x5b\x6d\x01\x44\x4e\xd5\xf4\x58\xb0\x5d\x14\x9d\xf9\x55\x72\x44\xf6\xd0\xfd\xdb\x6f\x04\x3b\x14\x5f\x17\x39\xd0\x49\xde\x95\x7f\x31\x70\xc8\x52\x0d\x0a\x60\xb2\xb0\xc7\xdb\x36\x2a\xa2\xd2\x87\x6d\x9f\xbc\x9b\x80\x4b\x06\x1c\xf5\x21\x60\xf2\x40\x79\x14\xc5\x52\x9e\xad\x28\x65\xe9\xbd\x46\xdd\xf5\x08\x89\x05\x92\xbf\x15\xe4\xa8\x02\x1d\x29\xfc\xd4\x0f\x49\x98\x1d\x72\x65\x39\xbc\x28\xef\xc1\x2b\x4b\xc9\xd5\x8b\xd3\x9e\xb3\x1c\xa9\x83\xc0\x9a\x7f\x03\x60\x3f\x0d\x87\xd2\xfd\x72\xc9\x3f\xec\x76\x88\xb2\x20\xcc\x58\xb7\x25\x6c\xb9\xba\x0f\x9f\x9a\xb5\x3b\xc0\x61\x67\xf0\xfc\x82\x2d\x63\x59\xbd\xb6\x06\xfb\x56\xf6\x3e\xbb\x3d\x6c\x2b\xb7\x42\xb5\x52\xcf\x55\x1f\x31\x87\x19\xbf\xe5\xa5\x27\x37\xeb\x20\x39\xd7\x09\x9e\x64\x81\x8c\x9e\x9e\x14\x71\xa5\x29\x32\x60\xcf\xa1\x2a\xe9\x6b\x73\xc2\x52\x37\xb4\x4f\xad\x2d\xe1\x85\xcf\xc6\xbe\xa7\x8f\xbd\xdd\x49\x90\xf0\x55\xe1\xc9\x28\x01\xf2\x55\x17\x9f\x20\x2e\xd9\x86\xe4\x1e\xa9\xc3\xae\x56\xae\x08\xe8\xe4\xe5\x68\xa9\xbf\xaa\xa5\xc3\xca\x76\x1d\x81\xad\x47\x2b\x3b\x19\x94\x0d\x51\x5f\x43\x30\x36\xbe\x1c\xf8\xe7\xa4\x63\xaa\x25\xf2\x91\x89\xca\x73\x3a\x09\xb1\x9c\xe8\xd6\xec\xde\x5f\xb5\x94\x66\xed\x95\x6a\x3c\x6a\x38\xd5\x2e\x5a\x42\xe5\x5a\xb0\x06\x90\x9a\xf2\x44\xda\x9d\x5e\x55\x4a\xd2\xa6\x2f\x67\x18\x54\x83\x7b\xa7\xc8\x72\x00\xe8\xf1\x6d\xc0\xc4\xbe\x6c\x7a\x77\xcd\x82\x61\x2f\x1d\x9c\x1c\x69\x5e\x6c\xf7\xdc\x87\x6b\x17\xee\x25\xa6\xf9\x85\xfb\xde\xf0\x13\x51\xf3\x39\x2a\x10\x5b\x0a\x5e\x40\x5f\xcb\x91\x78\x3d\x79\x46\xd6\x85\xfb\x7a\x0b\x02\xa2\xa7\x37\xda\x15\xec\xab\x9d\x67\x52\xb4\x45\xa5\xc5\x12\x92\x71\x09\xf7\x6a\xaf\xa5\x85\xaa\xad\x53\x37\xfd\x84\xd6\xdb\x0a\xdb\x7b\x07\x1e\xe0\x52\xf1\x7e\x3e\x28\xc1\x06\x40\x5b\x28\xdf\xb2\x9c\xdf\x6a\xb6\x40\x66\x0b\x7c\xdd\xd6\xe1\x67\x5f\xce\xea\x5b\x5d\x3e\x22\x0d\x39\xf4\xba\xb8\xe6\x32\x61\xd9\xa3\x0c\xc2\x97\xf4\xf4\xdb\x7a\x06\x59\xd5\x25\x4a\xbd\x35\x4a\xfd\x77\x3a\x43\xfd\xd7\xa2\x95\x5e\xd4\xf2\x5e\xdd\x27\xda\x0e\xbb\x66\x2d\x46\x73\xfe\x2d\x76\xf2\xdc\x2b\x74\x3f\x14\xa0\x08\xed\x7f\x5d\x5d\xe8\x71\x82\x59\xa9\x5a\x83\x43\xb4\xbf\x4f\xed\x95\x88\x43\xd8\xc7\x7f\x3c\x80\x54\x6f\x58\x61\xb3\x59\xbb\x28\x86\x1e\x2c\x7d\xd5\xb5\x75\x99\x1e\x41\xbd\x5d\xbb\x96\x08\xef\xfe\x29\x65\x9c\x7d\xa4\xec\x17\x9e\xbd\xb4\x51\x16\xb7\x93\x65\xea\xee\xc6\xfd\x1e\x34\xf5\xc1\x07\xd1\xf7\x22\xf2\xfb\xa4\x8c\xad\xc3\x87\xaa\xbf\xb4\xf5\xda\xc8\xb2\x74\xf5\x07\x1b\xa4\xc2\x4b\xbb\x7c\xbe\x14\x8f\x07\xac\x36\xf1\x1c\x87\xff\x4c\x12\x5d\xed\x16\xa3\x3f\xe3\x75\xa2\x04\x8c\x85\xa5\xaa\xf5\x0e\x63\xa2\xef\x1a\xe0\xa5\xf1\xdb\x0d\x49\x46\xdb\x54\x7b\xd9\xb1\xa0\xdf\x8d\x2b\x07\x7c\x6f\xc9\xe1\xda\xa5\xeb\x73\x84\x82\xcc\xed\x30\x7a\x09\xd6\xd6\x0e\xfa\x7e\x40\xe5\x73\x0e\x4f\xdd\x3d\x77\x51\xc3\xdb\x73\x68\xdf\xc3\x75\x20\x47\x72\xb7\xdf\x3e\xe7\xf0\x5c\xb1\xcb\x72\xba\x85\x04\xfd\xba\x3d\x28\x9f\x1b\x81\x7e\xd6\x83\x0b\xa4\x4a\x65\xd0\x6d\x5d\xf1\xf6\x07\x29\x56\x75\xe5\xc8\x23\x42\xed\xb9\x1d\xe6\x53\x6a\xde\x50\x89\x8a\xbd\x49\xfa\xb8\x69\x5a\x63\x47\x85\xa3\x3f\x9f\xb7\x98\x6e\x3a\xab\xdd\xb7\x4b\xde\x32\x5e\xa4\x33\xec\xda\xb7\xd2\xb6\xbd\x8d\x67\x76\xff\x2c\xed\xb7\x87\x55\x9e\x2b\x16\xab\x53\xb0\x1b\x9a\x3a\xba\xf0\xbc\xc4\x9e\xed\xea\xd3\x44\x8f\xcd\xed\x8c\xc2\x07\xcc\xb2\x34\x4d\xdd\xce\x5b\xdc\x04\xf6\x48\x42\x8a\x9f\x5d\xb2\x4c\x8e\xea\xb8\x5a\x3e\xb3\x7e\xc0\x65\x59\xcb\x71\x54\x27\x92\x4d\xea\x8c\x7c\xbd\xc8\x2e\x30\x26\x80\x8c\xd1\xa5\x37\x53\xf0\xd9\xac\xdd\x11\x2c\xcf\x8a\x38\xb0\xf0\xb8\x01\x11\x12\x1c\xb0\x81\xec\x4b\xcf\x6d\xd5\x5e\xb7\x21\x3b\xc6\x83\x7e\xe0\x15\xcf\xed\x0d\x41\xb2\x81\xe9\x54\x08\xff\xb1\xb5\x0a\x67\x6d\xd9\x67\x91\x8a\x31\xf0\x26\x01\xe7\x7d\x8c\xd9\x19\xae\xa3\x00\xe5\xfb\x6d\x83\x41\x31\xde\x3b\xa6\x33\xb9\xde\xa0\x95\x4a\xb8\x89\x4f\xbb\x20\xf3\x2e\xce\xae\xc3\xd7\xd0\x6f\x77\x1d\x5f\x9d\x99\x3a\x1e\x0f\x02\xf2\x4e\x61\xde\x47\x95\xdf\xf5\xf9\xf9\xe6\x0a\xe9\xe1\xee\xfd\x94\xc0\xa3\x45\xd3\xed\xda\xe8\x18\xea\x56\x45\x77\x80\xde\xdb\x72\x7b\x92\xc3\xf4\xea\xa8\xe8\xf9\xbb\x5a\x1d\xc2\xbc\xb1\x23\x0e\x67\xf5\xb7\xb6\xdc\x08\x53\xf1\x28\x77\xf2\xf1\x06\xce\xaf\x71\x96\x08\x53\x1b\x46\xe6\x79\x7c\x57\x43\x72\xcc\x7a\x15\x2e\x63\xcd\xe0\x51\xd1\x0c\xc2\x01\x94\xf8\xbc\x27\x27\x66\xde\x3f\x1b\xc7\xed\x11\x38\xd6\xf9\xf6\x31\x3f\x62\x91\x17\x48\x05\x9b\xd7\x6a\xe5\x04\xfe\x1a\x3e\xc8\x5b\xd0\xfe\x25\x5a\x9a\x6f\x12\x10\x0e\xf7\x29\xe9\x0c\xeb\xb3\xe3\xc9\xc2\xf4\x99\xf7\xa0\x4d\xf6\x38\xe5\x82\xa7\x91\xdb\x53\x3e\xdc\x0e\xfd\x10\x4e\xc8\x86\xe2\x3f\x55\x0c\x2c\xf4\xc7\xda\x15\x78\x22\xd7\x03\x46\x3f\x52\x27\x5f\xa1\x7d\xde\x88\x28\x98\xa4\x07\x08\x7f\x9e\x83\x13\xbe\x9e\x14\x92\xa3\x23\x7a\xa8\xd6\x02\x97\x9f\xb1\x5a\x25\x54\xfe\xbf\x91\x36\xd8\xc0\xe1\x0b\x13\x8e\x90\xb0\x75\xdd\xe6\x81\xc0\xf7\x95\xb0\x40\xc7\x7b\x8b\x91\x25\xc4\x7d\xdd\x8d\xc5\x3b\x21\xf3\xcb\x0d\x83\x82\xda\xb1\x13\x25\x42\x14\x19\xde\x5f\xb7\x4b\xfb\x46\xa0\xfb\x37\xa1\xd4\xa5\x8f\xf5\x62\xfd\xd2\xbc\x37\xd7\x11\x63\x93\x35\xa9\x0d\x7b\x43\xfc\x09\xad\x97\x38\xb2\x64\x60\x95\x0d\xf2\xa3\x90\x2a\x3e\x02\xe8\x4f\x68\x7e\xac\xee\xad\xe5\x00\x82\x67\x67\x68\xfb\x37\x1f\x7a\x39\xaf\xd1\x8a\xb0\x35\x58\x84\x74\x9f\xf6\x2c\x9c\xff\xac\x8d\x83\x4a\x78\x6a\xd6\x00\x3e\x43\x00\x03\x9b\xdb\x5f\xa0\x38\xea\xb5\xe9\x92\x1c\xb4\x63\x36\x1d\x90\x1e\x94\x0d\x78\xbb\x67\x6d\x9b\x20\x12\x4c\xe7\xc8\x7d\x48\x8b\x3c\xc8\xbb\xe3\x4c\xb1\x8e\x2c\x67\x74\x8b\x82\xb4\xe6\x19\x5e\xe8\x27\x2f\x15\xea\x3f\xb9\xa1\xf9\x93\x59\x81\xf5\x46\x20\xf5\x12\xe2\x3e\xda\x98\x50\xb4\x1b\x59\x4a\x2b\x1d\x66\xf7\x85\x7f\xfc\x3f\x99\xc1\xfb\xeb\x03\x91\xfc\x83\x5b\xe4\xaf\xa2\x0d\x0c\xe3\x54\x79\x65\xeb\x33\x63\x9a\xac\x33\xcf\x55\x4a\x24\x2c\xe4\x2a\xd1\xe0\x8c\xee\x1a\xb6\x12\x39\x0a\x64\xd1\x90\x3f\xbf\xfd\xd4\x70\xac\xfe\x6a\x4e\x62\x70\x60\xc9\xc9\x7a\x11\xa2\xf9\x3f\x01\xe0\x3f\xf7\x80\xef\x9f\x81\xfc\xc7\x34\x33\x5e\xfe\x52\x71\xe6\xb4\x0c\xb9\xb3\xfa\x37\x90\x39\x2a\x59\x24\x49\x7f\x05\x22\xcf\x5e\x8b\x41\x10\x2a\xce\x40\x6b\xc6\x5f\x95\x90\x15\x33\xb2\x2f\x44\xc5\x4b\xfb\x75\xc6\xd5\x04\xaa\xbc\x6f\xdd\x8f\xa0\x07\x3c\x6e\xe3\xf8\x88\xd7\xdb\xed\xaf\x36\x2b\x57\xa0\x2a\xd3\xa8\x8d\xc5\x05\xbd\x08\xb3\x08\x4a\x3f\x9f\x7f\xe5\x41\x3e\x01\x20\xec\x3b\x63\x01\x77\x2c\x7a\x0a\x17\x67\x21\xb7\xc7\x21\xd7\xf8\xb0\x93\x07\xc1\x20\xb4\x9d\xdd\xe3\xda\x23\x9b\x99\x9f\x24\xff\x94\x15\x0c\x86\x49\x91\x8f\xc0\xf6\x30\xd8\x53\x72\x56\xfb\xad\x52\x5c\x88\x0c\x7c\x04\x27\x12\x94\xff\xb8\xcf\xab\xcc\x51\x96\xeb\x9f\x6f\xe4\xeb\xf0\x70\x8a\xc2\xee\x66\xe4\xfe\x9d\x31\xe5\x2a\x6d\x00\x4f\x6d\xce\xed\x63\x0a\x43\x30\xf2\xfc\x2a\x97\x9f\x0e\xc8\x5c\x8f\xa0\xb4\x24\x36\xc1\x79\xfc\x96\xc6\x83\xc5\x73\xd6\x05\xb0\xf6\x2b\x58\x07\xc8\x0d\xc9\x11\x28\x8b\x97\x47\x6d\xfb\x4c\x64\x29\x34\x32\x52\xb7\x71\x45\x98\xff\x91\x58\x22\x70\x0e\xa8\xf0\xfc\xd9\x3f\x45\x69\xb8\xa8\x04\xa2\x0f\xec\x12\xd4\x42\xe9\x62\x14\x5f\x00\xa1\x12\x14\xb8\x08\xed\xd5\x20\x6a\xc9\x3f\x14\xad\x06\xc8\x98\x65\x01\x90\x74\x29\x10\x40\x4e\x01\x5f\xde\xc3\x87\x7e\x73\xd3\xa7\x05\x0b\x98\x2a\x8e\xcc\xca\xa8\x67\xeb\xfb\x63\x00\x13\x15\xe0\x0a\x0a\x40\x47\xe4\xff\xf1\x16\xd6\x1f\x5e\xbe\xd2\x1a\x9f\x1d\x5b\xfb\x08\x06\x80\xa0\xe6\xc5\xad\x08\x09\x6b\x2f\xd5\x87\x17\x52\x8c\x09\x1a\x83\x3c\x1e\xd9\xa7\xe4\xa0\x49\x14\xe8\x14\x87\xcc\x71\xb1\x7e\x9e\xea\xaa\x01\xc8\x03\x79\x00\xf5\x37\x47\xa0\x97\xb0\x33\x98\xa5\x28\xe1\x6c\x00\xbe\xf3\x56\x60\xa0\x3b\xfd\xdf\x4d\x1d\x68\x1a\xe1\xfe\x0f\x40\xbb\xf5\x27\x62\x0d\x86\x2c\x80\x9d\xf3\xa4\x2c\xc8\x99\xed\x18\x5e\xe2\x0f\x3c\xfd\x2e\xe5\x89\x8c\x7b\x37\x6e\xd5\x78\x15\x54\xb3\x61\x84\xfe\x18\x5f\xb4\x94\x54\x86\xef\xe0\xa6\x23\xc9\x78\x04\x9d\xe0\x41\x36\x81\xce\xdc\x79\x05\xc4\xf4\x8f\x03\xb5\xad\x3b\x82\x03\xe6\x11\x40\x06\x39\x72\x58\x43\xc2\xdd\xf9\xb0\xc9\x7e\x93\x68\xc2\x9f\x0f\x5d\x16\x7b\xb8\x7d\xb3\x1c\x15\xb0\x8f\x51\x2b\xc0\x54\x73\x8b\x69\x18\x99\x01\x57\x58\xd8\x9f\xda\xee\xce\x20\xa7\x12\xb4\xe5\xc2\xc3\x2d\x51\x0d\xe4\x0e\x03\x74\xd0\xfc\x43\xbd\xaa\xa5\x77\x6e\xe3\x6f\xfa\x24\xe9\x40\x6f\x1a\xcf\x66\xd7\x13\x0a\xf6\xc1\x25\x37\x7c\x79\x96\x63\xd6\x0d\x9b\x1f\x86\xa1\xaf\xee\x6e\xf9\xdc\xf2\xfb\xe6\x51\xef\x9f\xf0\x75\x3c\x6f\x77\xcc\x0f\x7f\xfe\x9e\x1d\x87\x07\xf7\xb7\x01\xf5\xcb\xcf\xe9\x9c\xc3\x3e\xa7\x5b\xde\xc4\x25\xac\x79\x1b\xff\xcc\xfe\xa5\x3f\x83\x36\x84\x13\xb9\xf1\xfe\x83\xea\xd5\xbe\xa9\xf0\x37\x48\x0a\x39\xef\xe0\xc4\x9a\x68\x18\xf0\xbf\x73\xf5\x79\x99\x4a\xcc\x44\xc0\x59\x70\xca\x03\x20\x0e\xfe\x69\x25\x75\x22\x14\x1c\xbe\x68\xd0\x3f\xc4\x5a\x08\x71\x6b\x38\xd8\x3c\xc8\x5e\x00\x2e\x5a\xec\x85\x9d\x9b\xfe\x1c\x03\xbf\x34\x49\x95\x08\x8a\xd6\x9b\x6c\x84\xbd\xfe\xd7\x9c\x13\xba\x29\xa0\xac\x59\xc8\x9d\x32\x6b\x1c\xd0\x11\xf5\x73\x00\xbf\x2a\x24\xaa\xb1\x5c\xe9\x5d\x0f\x5b\xd5\xe4\x5f\x85\x79\xcc\x08\xde\xf2\xe5\xbf\xd9\x0a\xd5\xf3\xd5\xba\xda\x8d\x95\x40\x79\xfe\x4b\x2d\x9a\x0c\x22\x1b\xa1\x84\x25\x44\x21\xbc\xaa\x58\xef\xb2\x96\x9d\x92\x8d\x77\x51\xf7\x45\x1a\x37\x15\x2d\x1b\xbb\xd0\x98\xce\x84\xb7\xe7\xeb\x9c\x81\x28\xf4\x28\x51\x7f\x7a\xe3\x54\xe5\x65\x9d\x8c\xa3\x5f\x5f\x5b\x26\x43\x22\x51\x44\xe3\xc5\x1f\xff\x4b\x8e\xda\xf7\x3a\xfb\x4b\x59\xe8\x6b\x7c\x53\x80\xfa\xd8\xb9\xea\xf6\xaa\x26\x82\x2f\xb9\x9e\xbd\x46\xe8\x4e\x7e\x3c\x7a\x91\x44\x71\x51\xe7\x1f\xeb\xf0\x7b\xd2\xbe\x26\x35\xbe\x4d\x7a\xc2\x5e\xd6\x03\xbf\xdc\x60\xd0\x1b\xdb\xc4\x93\x85\x86\x89\x16\x4d\xc0\xa3\xf0\xcc\xf7\xf7\x1c\x59\x57\xf9\x3d\x55\x15\xb6\xa0\xcf\x44\x49\xa2\xb5\xfb\x5a\x34\x90\x38\xec\x82\x7e\x8b\x8f\xbe\xbf\xd9\x66\xf8\xf9\x21\x41\x6b\xbf\x58\xbf\xf9\x75\x79\x89\xea\xd7\x72\x82\xb7\x64\x76\x7f\xb3\x26\x69\xbf\x99\x7a\x1f\xb6\xe5\x74\xc4\x82\xfd\xd4\x2b\x5e\x7e\x02\x13\x83\xcf\xdc\xaf\x3d\x6a\x36\x29\xa2\xa8\x35\xa5\x9d\x5c\xc8\x9a\x70\xd5\xdf\x24\x98\xc1\xaf\x5b\x34\x29\x18\x42\x60\xc7\x65\xae\xd9\x29\x8c\xff\x16\xef\xbd\x50\xdc\x0d\xd6\x2b\xb4\x26\x79\x95\xc4\xe4\xf0\x08\x37\x00\x77\x6e\x5b\x95\x2f\x66\xfc\xc5\xe8\xb0\xbc\xb9\xb4\x41\x4e\xd8\x90\x69\x1e\xb5\x35\x27\xbd\xff\x25\xae\xc3\xbb\xa8\x7a\x3e\xba\xdd\x60\xaa\x1c\x07\xf4\x70\x92\x64\xe7\x9a\x4d\xec\x7e\x90\x00\xd2\x7c\x39\x52\x8c\xff\xc9\x44\xd1\x42\x31\xb2\xb6\xd6\x5c\x17\x71\xc7\x65\xaf\xba\x9a\x16\x43\x2f\x40\x27\x42\xa7\x95\x78\x0f\x67\x7e\xe3\x62\x77\x00\xdf\xe6\xac\x12\x1d\xfa\xf4\x12\x7d\x65\x75\x60\x41\x25\xd5\xac\xcd\xd5\xb6\x7d\x0b\x09\xc8\xca\x4a\x01\xf5\x23\xc8\x22\x80\xf0\xed\xf1\x4d\x2d\x14\x3a\xf9\xc6\x16\xe0\x3b\x8f\x6c\x76\x74\x6e\x37\xfb\x05\xbe\xa6\xb1\xc0\x0e\x5e\x47\x88\x16\x7b\xdc\xc7\x45\x50\xa3\x1e\x36\x15\x51\xd1\xa5\xd8\xed\x27\xf2\x72\x68\x9b\x44\xb8\xfd\x33\xba\x6f\x2d\x63\xd4\x08\x27\x6a\xb5\xdb\x1d\xbf\x8a\xe9\x70\xad\x40\xaa\x43\xda\x95\x2f\x61\x87\x3b\x65\xcc\x41\x0a\xe1\xbd\x02\xe0\x93\x6f\x6c\x28\x1e\x3d\x98\xa9\xed\x32\x9c\x0e\xc2\x91\xef\x2e\x28\x20\xe4\x45\xc0\x6d\x5b\xcc\x0c\x54\xbf\x29\x09\x27\xce\xc7\x12\x2f\x34\x36\x3b\x6b\xbe\x1b\x08\x00\x80\x85\x70\x9f\xf7\xb3\xdf\x82\x5e\xc3\xb8\x95\x61\xf4\xee\x07\xf6\x9e\x0e\xb7\xe0\x59\xda\x85\x57\x4a\xbd\xab\x49\xee\xc4\xe8\xfc\x39\x32\x40\xc6\x17\xfd\x0a\x07\xcd\xfe\xb0\x6e\x72\x04\x36\x79\x00\x89\xde\xfe\x6e\xcb\x7e\xf8\xb5\xa1\xc6\xed\xaa\x75\x40\xe4\x1c\xe5\x49\x88\xa7\x35\x9a\xd0\xb3\x8c\x31\x69\xc8\xcb\x76\x91\x39\x62\xf9\xe7\x53\x1d\xfa\x90\x59\xf4\xb6\x14\x7d\x1b\x25\x11\x0e\xba\x88\x2f\x7c\xa8\xd9\x61\xae\x13\xfc\x92\x0c\x19\x17\x4e\x9e\x07\x77\xac\x3f\x2a\x59\xa4\x42\x75\x06\xaa\xa1\x3d\xc8\x12\xd1\xc4\x85\x61\x93\xd4\xe8\xc7\x43\xf2\xdd\x8d\xcf\x0a\x7b\xfb\x81\xc3\x24\xa7\x3f\x2f\x9a\x2c\x5a\xa0\xad\x33\x6e\x96\x91\x2f\x83\xcd\xe0\xdb\x60\x67\x80\xdd\x3f\x55\x7d\xef\x22\xb3\x33\x8b\xfc\xb2\x3a\x29\x24\x4c\xd7\xfb\x4d\x5a\xa8\x3d\x08\x5f\xdd\xa8\x22\x68\x5f\xd5\xf6\x7a\xe8\x04\x0f\x25\x7a\x79\x58\x2f\xaf\xf5\xa3\x93\x13\x80\x7a\x37\x7b\xba\x65\xc0\xbb\x91\xd4\x3b\xca\x85\xef\x54\x14\x57\x51\xa2\xdf\x94\x0e\xbb\x4c\xb8\x4e\x37\x75\xe4\xc8\x58\xd4\xd8\x65\xd0\x27\x15\x67\xf2\xf5\x48\x5c\x3a\x05\xda\xf2\xcc\x4b\xb7\x4b\xda\xa5\x2f\xba\xd2\x3d\x98\x51\xec\xd4\xfa\x29\x81\xb2\xe0\x4c\xbc\x1e\x02\x1c\x7e\xa6\x40\x4d\xbb\xbe\x5b\x0f\x85\xf9\x2f\xde\x88\x1b\x94\xf8\x49\x83\x8d\xe2\x1c\x93\x3c\x92\xef\xd7\xa7\x3f\x4f\xe9\x6c\x8b\xb5\x2b\x3f\x3c\x6d\xe4\x14\x5a\xd0\xf0\x76\xec\xd3\xd4\xdf\x96\x18\x3d\x46\x03\xa8\xc3\x7a\xfc\x74\x2e\x76\xff\xc3\x79\xe9\xf3\xb2\xd1\x0d\x5b\xac\xdb\xfa\x42\xd1\xf3\x54\x81\xdc\x66\x91\x61\xca\xf5\x0c\xfb\x7a\x0f\x48\x7d\xd8\x80\xd9\xac\xea\xe2\xbe\xae\x36\x4a\x85\x08\xc2\xe2\x9e\x99\x58\x90\x35\xc5\x7a\xba\x23\x23\x0b\xaf\x1e\xdb\xb7\x85\xb2\xf8\xd9\x9d\xe2\xab\x8a\xc9\xe2\xa1\x3d\x2e\x9f\xa4\x6c\x1d\x8d\x76\xe2\x2f\x89\x38\x91\x90\x07\x61\x46\xf2\x12\x9f\x31\xf9\x78\xc6\x93\xef\x34\x17\x91\x5c\x7a\xad\x84\x3f\x65\x93\xf7\x9c\xc2\xa8\xd4\xa2\xe7\x33\x76\x56\x81\x03\x90\x5b\x8e\x5d\x18\x8b\x27\x38\xa1\x64\xb4\xd8\x44\x28\x54\xc2\xc1\xef\x2c\x97\x4e\xd5\x66\x3c\x2e\xa1\x89\xb5\x36\xc8\x5c\x7a\xb8\x89\x5c\x72\xaf\xc8\x80\xe4\x02\xaa\x1e\xd9\x31\x64\xcf\x40\x14\xa2\xf2\x63\x5a\x2f\x3a\xf8\xa0\xfe\x0c\x99\xc8\xa7\x94\xd1\xa5\x55\xf7\x84\x14\x16\xfb\xda\x27\x3d\xc2\x9e\xb7\xc1\xda\x33\x34\x5c\xa5\x58\xde\x31\x1c\x58\x94\x42\x94\x63\x2d\xf6\x29\x3c\xf9\xf3\x3e\x23\x54\xa8\xd4\xd9\x3f\xd3\x28\xf1\x72\x8c\x8f\x64\x53\x5c\x36\x45\x56\xd0\x4f\x01\x33\xb0\x06\x99\x18\xff\x7c\xec\x29\x0c\xed\xc2\xdd\x8a\xb9\xd6\xb6\x39\x2c\x07\x35\x12\x5d\x46\xf2\x56\x80\xf1\x1e\x53\x55\x99\xce\x6f\x09\xed\x6e\x6c\xd9\x45\x8b\xc5\x17\x18\xb6\x5c\x1c\x44\x41\xdc\x3f\x64\x72\x9c\x01\x1b\x21\xc8\x36\x9f\x7d\x48\xf2\xe4\x93\x7a\x8b\x58\xba\x29\xb2\x81\x8d\xf8\xe1\x4c\x18\xef\x22\x82\x81\x82\x75\x46\x97\x92\xca\xe0\x4a\xf0\x4f\xb5\x67\x41\x9f\xb5\x4d\x92\x35\xe7\xd5\xb2\x71\xef\x20\x91\x1b\x11\x7b\x9c\x2f\x5e\xcc\xfe\xf8\x3f\x34\x19\x65\xd0\x58\x75\x72\x59\x43\x18\xcb\x9e\xfb\xf8\xad\x15\x8f\xd4\xd4\x25\x38\xb2\x34\xd3\x8f\xba\xee\x69\x83\x81\xee\x79\x8b\x82\xd4\x32\x7f\x55\x85\xb2\x9b\x29\xef\x8c\x86\x6b\xe7\x0d\x91\x67\x39\x0e\xba\xb2\xba\x57\x41\xed\x85\x9d\x5a\xb3\x7c\x1f\x52\x68\xd8\xd3\xf3\xe2\x00\xeb\x10\x98\x45\xf7\xab\xf6\x4a\x51\xee\x2f\x98\x67\x91\x3e\xf4\xb4\xfb\xd5\xcf\x96\xab\x60\xbe\xaf\x1d\x84\x36\xb3\xc7\x11\x9c\x98\x47\xe5\xd7\xb8\xfa\xf5\xb9\xba\x26\x16\x6f\x67\xb6\x82\xa8\x60\x63\x04\x59\x13\xd0\xb0\xcd\x9d\xb8\x0f\xe7\x53\x87\x56\xac\x6d\x27\x8e\x5c\x77\x2a\x61\xee\x4e\x97\xf6\x7d\x04\x1d\x67\x51\x5a\x82\xaf\xe7\x3a\x6d\x17\x5d\x55\xb7\x91\x5c\xea\xea\xeb\x24\xe5\x8c\x5c\x1d\xec\xb6\xf4\x69\x42\x8d\xc5\xa9\x3a\x73\x65\xe5\x04\x13\xba\x2b\x95\xd8\xe3\x44\x00\xba\x8f\x76\xb0\x0e\xb8\xc2\x3a\xa2\x2b\x35\x37\xee\x28\x43\xcb\x63\x96\x9f\x83\xb3\xb5\xae\x78\x39\xdd\x59\x3c\x55\xd8\xa4\x2b\x1a\xbd\xbb\xd2\xb7\x44\x8d\x79\xa3\x75\x19\xd3\x38\xa6\xad\xa9\x3f\x63\x7f\x2a\x9d\xf9\x43\x32\xc6\xf8\x56\x41\x1c\x12\xf0\x62\xf4\xec\xb0\xfa\x4a\x9b\x1a\xff\x91\x80\x87\xf1\x5c\x66\x3c\x8d\xaf\xba\x3f\x6c\x65\xea\x8e\x01\xce\xe2\x49\x31\x58\xb6\xc1\x0e\x1a\xea\x21\x79\x89\xe2\x8e\xcb\xc2\xfb\xf2\x67\x30\x79\x76\xad\x63\x82\x35\xf4\xd6\x04\x40\x85\x95\xb7\xce\x96\xb6\xc6\x36\x2a\x6b\x38\x04\x21\x28\x58\x42\xe8\xd8\xea\x17\x85\x82\xfb\xf8\x2b\xfa\x92\xab\xc7\x93\x94\x62\x33\x83\xa2\xaf\x6c\xe3\x78\x67\x07\x55\x12\x10\x11\xc0\x08\xf8\xe7\xa0\x07\x29\x6b\xcd\xca\x3a\xe5\x50\x98\x11\x8c\xe2\x9f\x4a\x36\x3e\x38\x39\xce\x89\x48\xa2\x0f\xb9\x1e\x2d\xa3\xfc\x96\x11\x7e\x97\xa1\x59\x59\x19\x46\x2e\xc2\xc7\xfd\xf0\x10\x64\x04\xd9\xdf\x2d\xc4\xda\xb1\xce\x52\xd1\x77\xde\x8a\x9c\xa6\x23\xec\xb0\x4a\xcd\xf7\x89\x5b\x5f\x7f\x2b\x3c\xd1\xa7\xb8\x5e\xe1\xf5\xf8\x08\x3e\x91\xee\x08\xd7\x97\xe7\x39\xda\xaf\xb2\x52\x7e\x1f\x61\xec\x2c\x5d\x5c\x9d\x76\x74\x90\xb8\xef\x4f\x92\xff\x4f\x9f\x4e\xcc\xa0\x73\x1d\xaa\xfe\x7c\xde\xc9\x4d\xf3\x2e\xc9\xbf\x26\x81\xe0\xc1\x20\xe6\x53\xe4\xff\x04\xb2\x1f\x2b\x25\x51\xac\x6d\xcf\xf6\x8c\xce\x08\x3c\x9f\x4d\x69\x4c\xbb\x5d\x50\xef\xa9\x7d\x60\x8b\x95\xe0\x89\xaf\x94\x4f\x17\x7c\x1f\x02\x4e\x3d\x20\x81\x00\x48\xd3\xd0\x5b\xdf\xfc\x2f\x6c\x51\x3f\x10\xbb\x07\xc8\x7a\x9d\x11\x62\x7d\x0a\x7a\xac\x08\x25\xc0\x6d\xcf\x7f\x9d\x2f\xb8\xba\xbd\xe8\x43\x91\x0e\xfb\x2a\xe7\x21\xca\xc9\x3b\xfe\x30\x30\x85\xae\x6f\xd2\x10\x9e\x9e\xe9\xf6\x37\x68\xcb\x66\xd3\xe4\x20\xa5\x6c\x6b\x65\x1a\x6d\xdd\x2d\x1e\xbf\xee\x84\x56\x81\x1f\x74\x4e\x81\xf2\x45\x0b\xeb\xbd\xc1\xc8\x81\x26\x7e\x0c\x8b\xd2\xc1\xbf\x09\x43\x10\xf2\xaa\xab\xb2\x6d\x30\xa7\xca\xf2\xb4\xd1\x34\xb8\x47\x0e\x65\x4a\x15\xcc\x88\x3d\x5e\xe2\x00\xdb\xa7\x4c\xf5\x84\x97\x6e\xfc\x3e\x86\xad\x20\x0f\x75\x63\x0c\x69\x4e\xf1\xd9\x6f\x4a\x4f\x58\x47\x23\x1e\xa7\x4a\xfc\x19\x45\x5c\xb2\xc8\xb2\x4d\xc5\x28\x48\x4b\xbc\x7e\x2e\x9d\xce\x2f\x24\xf1\xd5\xd2\x83\xd3\x3a\x8a\x87\x41\x33\x86\xf2\xba\xcb\xd9\xeb\x7d\x20\x8b\x54\xfd\xfa\x29\x6d\x95\xee\x53\xa9\x56\x36\xb2\x6a\x86\xd6\x42\x58\x84\x5f\x3d\xc1\xca\x81\xe2\xf2\x53\xda\x83\xe3\x03\x66\xef\xb0\x55\x11\x5d\xdf\xa1\x8a\xb9\xed\xf9\x9f\x6f\xb0\x98\x4e\xb6\x0d\x96\x38\xef\x64\x98\xfa\xf6\x36\xa8\x5e\xdf\x0d\xb5\x6c\xde\x8f\x64\xcf\x05\xf8\x91\x68\xfa\x30\x42\xbd\x7b\x8e\x56\xa0\x19\xe9\xb7\x1e\x9c\x20\x2c\x41\x18\x6b\x61\x8d\x2b\xc2\x56\xba\xf7\x76\x7f\x4a\xa3\x7f\x48\x7f\xdc\x9c\x6f\xf4\x95\xda\x47\xbe\x50\x12\xea\xb1\xfd\xca\xeb\x30\x58\x96\x10\xca\xc7\x83\x38\x53\xa7\xc6\x5a\x2f\xea\x74\xb7\x44\xce\x58\x5f\xb7\xab\xb3\x27\x0b\x4b\x21\x54\x0c\x07\xe6\x22\xd6\x40\x67\xc9\x3f\xdf\xe2\x66\xfb\xe2\x01\xbd\xb3\x57\xf7\x5a\xcc\x35\x71\x77\xf8\xfc\xb4\x05\xd7\xba\xf4\xa7\x7e\x48\xe2\xd3\x04\xf8\x00\xa1\x48\xe4\x26\x9b\xe3\x8b\x62\x24\x0e\x19\xbc\x53\xb3\x98\x48\x6b\x98\x3b\xa4\xb5\xea\xe4\x73\x08\xb1\x6d\xb0\x8a\xde\xa9\xda\x5c\x48\x7a\x3f\x4e\x60\x49\x5d\xa8\xf9\x7b\x99\x5e\xf1\x34\xc9\x0f\x0c\x58\x99\x26\x60\x0c\xd6\x88\x99\x80\x85\x3a\xb4\xc1\x8f\xd0\x5c\x55\x04\x16\x7c\x24\xb2\x90\x0e\x67\x74\x3b\x1f\x91\x0d\xce\xf4\xf7\x07\xa9\x4a\x3a\x1a\xf3\x6d\x51\x55\x40\x2e\x24\x31\x89\xa4\x9c\x13\x62\x35\xce\x00\x3a\x5f\x09\x42\x90\x1e\x8e\x00\xec\x78\xd4\x89\x0c\xe4\x3a\xfc\x22\x2f\x49\x79\xbf\x34\x9c\x96\x36\x5b\xc0\x73\x1a\x49\xdf\x36\x40\x3e\x51\xb1\xa7\x01\x2e\x2f\x08\x4e\x89\xea\x59\x4e\x2d\xe0\xf6\x55\x5f\x12\x67\xad\xd9\xbe\xec\xba\xd0\xe0\xde\x72\x31\x31\x74\xc9\x70\xdd\xc1\x4d\x61\x51\xba\x81\xf1\x74\xcf\xa8\xe8\xa8\x22\x4f\xa7\xa6\x2c\x8d\x98\x56\xbc\x07\xa1\x29\xce\xad\x5d\x01\xbf\x6c\xd3\x29\xd9\x95\x06\x0f\x79\x58\xd0\x36\x53\x55\x6e\x43\x38\xdf\x61\xb7\xc7\x02\xb6\xb7\x86\xb0\x22\x85\x36\xd9\x1f\xaf\xed\x34\x8a\xa5\x6d\x13\x4b\x9e\xa2\x3d\xf2\x07\x0f\xb2\x81\x98\x77\x43\xe3\xe5\x7b\x7e\xf3\x28\x83\x81\x91\x76\xb5\x2d\x63\x24\x0c\x2d\x0f\xe5\x2f\x3c\xfe\x92\xa2\x35\xf6\xf2\xb1\x6a\x50\x05\x7b\x38\x71\xca\xd1\x9c\xbe\x2f\x6f\x3d\xdc\xc4\xc4\x9e\xda\x50\x9f\xe7\x7e\x12\xa9\xd4\x70\x6a\x4f\xf3\x25\x21\x65\x71\xe8\x28\x35\xd4\x47\xf5\xef\x12\x7d\x4a\xef\x0a\x2b\xe7\x26\x7d\xfc\x8c\x1c\xb4\xc0\x1d\x47\x5d\x27\x45\xac\x39\x69\x63\x19\x43\x58\x23\x58\xd8\x84\xed\x44\x83\x65\xc3\x2e\x58\x56\xbc\x46\x69\x6b\x94\xf1\x38\xb7\xaa\xd5\x33\x0b\xdb\x38\x79\x41\xdb\xdb\x65\x3a\x30\x56\x26\x5c\xea\x07\x2e\x4e\xe2\x69\x0a\x6f\x74\xc4\x00\x85\xbf\x14\x77\x1b\xd7\xcb\xd2\xe5\xb5\xb0\xf4\xf1\xdb\x05\x65\x4c\x4c\xbd\xe8\x19\xd0\xde\xbc\xaa\xc3\x21\xcf\x24\xcb\x78\x4c\x4a\xcc\x01\x8f\xed\x4b\x91\x1c\xaa\x2c\x19\xcf\xdc\x0e\x48\x30\x5b\x1e\x18\xcc\x9b\xfd\x9a\xf9\x83\x7a\x8f\x65\xf7\xc0\x10\xf2\x93\x58\xe6\xd7\xb6\xda\x11\x60\xb2\xa7\x6f\x0d\x32\x2c\x16\xcc\xda\xea\x50\x9e\xb6\xcd\xa9\x37\xdb\x4c\x5d\x65\x12\x7f\xb6\xd9\x99\x94\x0f\x39\x13\x90\xd6\x05\x3c\x7c\xf0\xc1\xa2\x11\xa7\x81\x19\xd0\x22\x0d\xfe\x95\xf7\x3e\x6a\x58\xd9\xd1\x79\x8f\x70\x2d\xf8\xab\x64\xad\x11\x78\x56\xbe\x15\x12\x8a\x30\x9c\x8e\xd7\xac\xbf\x66\xa6\x05\x62\x2a\x19\x5b\xa1\x5c\xe2\xe2\x66\x62\xdd\x26\x2c\x13\x0f\x22\x8d\x2d\xe1\x62\x6d\xd3\xb8\xb3\xea\x64\xa5\x19\x4b\x3f\xa2\x5e\xe5\xe9\xe4\xa7\x4e\x10\x64\xb1\xab\xe6\x1f\x32\x35\x80\xc0\x93\xc7\xf5\x9b\xc4\xc2\x92\x1a\x3a\x59\x58\x1e\xf0\xe0\x2a\x74\x6f\xf7\x4a\x75\x6e\x90\x95\x42\x5f\x3d\xb3\x53\x75\x82\x4f\x3d\x0a\x7e\x91\x86\x90\x84\xa5\x18\x20\x2a\xcf\xce\x3d\x9a\xe9\xff\x82\x18\x68\x16\x92\xa8\x2a\xb3\xea\x9c\xef\x5f\xab\xe5\x3c\x25\xd2\x09\xb4\x27\x81\x33\x2c\xf6\x42\x03\x2f\x3e\x39\x58\x7e\x6a\x8d\xd8\xea\xa9\xf1\x6e\x86\x91\xdf\x4b\xcd\x82\x42\x7e\xd0\xb4\x9c\xe4\xa1\x2b\x87\x27\x44\xd7\x39\x39\xcc\x81\x46\x09\xce\xcd\x9a\xca\xe1\x00\x99\xbf\xcc\x15\xc7\xbf\xbc\xfd\x9d\xae\xa1\xf6\x97\x56\xa8\xf8\x3a\x41\x8b\xbe\xb7\x7f\xf0\x11\x60\x81\xe2\x3a\xfb\xb2\x4f\xa0\x6c\xa0\x51\xce\xe2\x9b\x0d\x90\xd7\x8b\x45\xd6\x2b\x7b\x19\xcd\x39\x5b\xdc\xda\x38\xc3\x15\x5d\xb0\xb7\xf0\x9d\x17\xdd\x1a\xae\x1f\x08\xdf\xbb\xfb\xc1\xcf\xbc\x03\x23\x68\x3b\xbd\xfd\xa0\x31\x83\x3c\x0e\xb6\x4a\x78\x76\xd6\xd6\x8f\x1b\x24\xd8\x7c\x56\x6f\x80\xfe\x15\xf9\x54\x45\x2a\x48\x1f\xef\xe6\x49\xec\x9a\x9e\xbc\xeb\x2c\xb2\x0f\x79\x79\xc4\x8f\xf5\xeb\xcf\x20\xf7\xf4\xe6\xfe\x60\x0d\x46\x93\xcc\x8f\x32\xf3\x4f\xac\x6c\xc3\xdb\xc1\x09\xb8\x72\x7a\xe8\x7b\xbe\x1c\x7e\x88\x16\xe1\xc6\xce\x74\xb8\xfc\x0c\xae\xdd\xfc\x20\x1f\x4c\x9d\xe1\x67\x08\x32\xd8\xc1\xcd\x96\x55\x78\xff\x78\x4d\xdc\x5f\x94\x85\xfb\x87\xca\x6b\xb0\xe7\x89\x62\xdf\x27\xf3\x8f\x72\x40\x45\x48\xfb\x5b\xab\x17\x65\xeb\xad\xbc\xc9\xf9\x5e\xa3\xbc\x20\x0e\xeb\x5c\xe5\x90\x53\x4d\x43\x3f\xc9\x87\xb8\x8f\xbc\x65\x91\x57\xe7\x4a\xf7\x9a\x14\x50\x14\xf4\x93\xc4\x89\x02\x0b\xd5\x57\x70\xf8\x0d\x2d\xb4\x79\x67\x0f\xe5\xfe\xfd\xde\xae\x97\xdd\xc7\xbe\x28\x0d\x66\x18\xb7\x07\x2f\xde\x9b\x3a\x0f\xb6\xe9\xb9\x7f\x82\x75\x12\x79\x58\x60\x2b\x79\x50\x1d\xd8\xde\xa3\x43\x9e\xff\x82\x17\xb6\xb7\x41\x2d\x63\xc5\x08\x5b\xdf\x94\x38\x6f\x71\xc7\x54\x71\x79\xe7\x8f\x7f\x8c\xfb\xae\x71\xc7\x76\x04\x9f\x8c\xda\x56\x6f\xcb\x6a\x8f\x14\xc6\xe0\x6f\x8d\x0f\xef\x7b\x58\x72\xaa\x58\xf7\x10\x4f\x8c\x42\xb3\x6f\x8c\x37\x2d\x91\xd3\x6f\x5c\x34\xbd\x19\x05\x62\x79\x66\xa4\xea\x0d\xe1\x49\x98\x53\xaf\xe8\x7c\x66\x37\x34\xcb\x29\x27\x8b\xa5\xc0\x58\x47\xb7\x72\xa2\x17\x74\xda\x55\xa0\x34\x2d\xd1\xcd\xa2\x94\x7d\x6a\xe9\x17\x66\x17\xb9\x72\xb8\x58\x8b\x53\x38\x6b\xa7\x74\xbb\x49\x0b\xa3\x29\x03\x5f\x28\xa1\x73\x0e\xf0\x93\x8b\x12\x80\x55\x66\x3f\xc4\x3b\xf6\x4a\x73\xe5\x9b\x72\x36\xdd\x62\xa2\xe7\x42\x5f\xdc\xb0\xf5\x02\x34\x78\xf7\x12\x20\xdf\xee\x32\x26\xef\x78\x73\xe7\x82\x42\xb2\xd7\xb0\x46\x4f\x55\x1d\xb6\xa9\xd5\x10\x67\xd3\xc0\x9a\xc0\xff\xf6\x17\x96\x3c\x58\x13\x3d\xae\x2d\xc8\x5f\xd7\xca\xf1\xe1\xb8\xb8\x86\x71\x5c\xa0\x53\x23\xf8\x14\x09\x57\x42\x91\xd6\x5f\x3a\x49\x0f\x06\x2f\xad\xa3\x3e\xf7\x81\x6c\x5f\x9d\xe8\x71\x36\x90\xce\x84\x34\x0c\x5a\xbb\x8d\x08\x83\xbe\x0e\x4a\x61\xc9\xcb\x32\x16\xca\xd1\xe2\x64\x81\xe5\xd8\x48\x29\x3f\x20\x86\xbe\xb2\x23\x82\x63\x81\xf6\xf5\x50\xf9\xc9\x94\xe0\x85\xd3\xe3\x14\x07\x40\x64\x98\xb4\x36\x7a\x37\x29\x7c\x45\xd4\x08\x66\xe7\x27\x8e\xc5\xe7\x30\xcf\x40\x42\x78\x44\x24\x64\xaf\xff\xee\xa2\x92\x61\xe6\x1c\xa6\x1b\x80\xa4\x85\x19\x08\x5a\xad\x34\x5c\xac\x01\x3a\x13\xfb\xaa\x03\x55\x42\x59\x6d\x2c\xc2\x13\x1e\xf0\x6e\x03\x7a\x9f\x24\x38\x2e\xa5\x1c\xf8\x25\x3c\xf0\x89\x2a\xff\x12\xac\x14\x3b\xee\xf9\x40\xf5\x75\xe3\x4a\xdb\x31\xd6\x75\xf8\x63\x20\x49\x0c\x4b\xa1\xbc\x98\x43\xb1\x99\x44\x65\xdd\xd9\x24\x49\x14\x38\x4c\x24\xb6\x53\xb1\x4f\x3c\x74\x7c\x16\xf3\x6c\x48\x9f\xa0\xd8\x61\x7e\xc5\x80\xb4\xb6\x7f\x87\x2c\x45\x0f\xd7\x25\x2e\x32\xc0\x39\x5c\xb8\xdc\x03\x39\x93\xf2\xfd\x79\x76\x60\xf2\x91\xfe\x8d\x7e\x2b\xf2\x0e\x12\x10\xe6\x70\x64\xa4\x07\x50\x1e\xe1\x4b\xad\xac\xa6\x31\xad\xbb\x90\xef\xf1\x96\xf9\x49\x2f\x12\x4b\xf7\x6b\x71\x9b\x25\xef\x08\x6c\xfe\x06\x3b\xa2\x28\x11\xed\xd6\x4b\xb7\xf4\xe9\xc8\xf9\x70\xf1\x95\x9d\xad\x4e\x94\xb8\x54\x1d\x31\xd0\x25\x8f\xde\x49\x5a\xc8\xb3\xdc\x4f\x7e\x28\x90\xa0\x4b\xa1\x67\x01\x62\x77\xc8\x13\x31\xcb\x9e\x93\x54\xe9\x1e\xa4\xbe\xf1\xab\xed\xc6\x2d\x15\x5f\xbd\x9f\xf6\xbc\x93\x33\xe1\x72\x17\xad\xec\x39\x8e\x91\x18\x62\x71\xd9\x3a\x86\x5e\xe8\x90\xff\x07\xa5\x92\xf5\xf1\xb1\x6e\x6d\x51\x82\x7a\x0e\x26\xa4\xfb\x71\x62\x6d\x96\x1d\x31\x7c\x3d\x44\xc7\xb2\x87\xaa\x11\x35\xec\x14\xde\x0d\x8b\xc0\xef\xca\x89\xb3\x2b\x72\xd3\x8e\xdc\xb2\xb2\xaf\x26\x0f\xce\xda\xfc\x8a\xb1\x6a\x2d\xbe\x1f\x60\x40\xea\x8a\x2d\x96\xb3\x47\x08\x65\x40\x60\xa6\xce\xa7\x48\x7d\xd3\x79\x39\xa4\x97\x51\xfb\x22\x63\xb0\x92\xea\x72\x9a\xc9\xc0\x50\xab\x93\xf7\x9d\xc7\xcf\x24\x22\x9e\x18\x3e\xa0\xb3\xc9\x5a\x03\x33\x41\x89\xca\x02\x76\xa3\x7d\xad\x32\x0d\xeb\xd6\x09\x13\xdd\xbf\x64\xfe\x11\x47\xc1\xdc\x19\x6b\x22\x8b\xb9\x49\x88\x76\xc3\x8b\x60\x65\xe0\x9a\x8c\xb2\x92\x26\x65\x4d\x87\x97\xc3\x17\xcd\xb9\xbf\x04\xb8\xc0\x42\x71\x4e\x5c\xb3\x81\x7c\xc9\xd5\x95\x2d\x44\x87\xcb\xb9\x6b\x64\xe7\xe2\xea\x6d\x12\x6b\x01\xb1\xcd\x7e\x89\x35\x3c\x32\xd6\x24\x62\xdb\x32\x8a\x02\x0e\xbb\x11\x7d\x89\x8b\x80\x8f\xfd\x72\x88\xdc\x76\x0c\x36\xdc\x2c\x8c\x49\xf9\x43\x8a\xbb\x82\xea\xe2\x0d\x4b\x34\xf7\x44\x74\xeb\x1e\xea\x05\x3b\x16\x62\x15\x90\xae\xc4\x86\xdd\xd7\x72\x3b\x91\x12\x37\x8c\x46\x8e\xa0\xc3\x6d\xaf\xa0\x06\xec\xcb\x18\xba\x94\x58\xc5\xaf\x3b\x03\x6f\x01\x86\x9c\xd8\x79\xe0\xca\x91\x0d\xb7\x78\xc7\x17\xc7\x66\xbd\xcc\xb5\x64\x70\xe6\xfc\x01\x43\x2e\xae\x43\xa6\x4c\xcc\xfc\x3e\x55\xdf\xa0\xbd\xde\x5c\x0e\x51\x8f\xc8\x1e\x46\x3d\x00\x53\x02\x34\x04\x51\xf5\x3e\xf1\x86\x7e\xd2\xd7\x38\xb1\xe4\x60\x53\xaf\x29\xb3\xae\xbf\xbf\x12\xe9\x6e\x47\xa6\xff\x0a\x92\x94\xb1\x12\xe3\x7c\x5b\x25\xe7\xdc\x1a\x56\x77\x81\xdd\x01\xc1\x94\xeb\xf2\x4b\x80\xbc\xdd\x9d\x28\x11\x80\xd8\xb6\x47\xe0\xdb\xf2\xf9\xa2\x89\x0e\xe5\x08\x3a\x9b\xdd\xb5\xaf\x80\x4a\x81\xce\x16\x73\x1d\x87\x2d\xf0\x4c\x44\x6c\x8b\xb7\xae\xf1\xd6\x1c\x01\xff\x14\x4a\x99\x32\xda\xe2\xf3\xad\x5b\x86\x3a\x8d\xc7\xc0\x7c\x6f\xfa\x70\x51\x0d\xad\x77\x0f\xa6\xdb\x60\x19\x55\x97\x46\x86\xdb\x2a\x0e\x9d\x5e\x0a\xad\xf3\x7d\x08\xc8\x1b\x28\x74\xd9\x49\x99\x59\x5c\xbf\x40\x89\xc1\x8b\xa5\xe3\x63\x0e\xb3\xd0\x99\x81\x38\x74\xc0\x2c\x88\x56\x96\x81\x06\x24\xc5\x66\x47\xbd\x63\x69\x2b\xcf\x2e\x4c\x8c\xc0\xb3\xd3\x8f\x8b\x65\x6c\xfe\x9d\xf9\x8b\x64\xb4\x67\xfb\x02\xf6\x85\xa0\xe6\xab\xef\xc8\x29\xb2\xbd\x3d\x57\x72\x5b\x4e\x03\xd0\x8f\x0c\x1d\x30\x82\x90\xcc\xbb\x8e\xe1\xa4\xb1\x17\xc4\x3b\x6e\xf8\x94\xb7\x65\xd3\xb2\x1b\x43\x0f\xec\x37\x24\x20\xcf\xc2\xa5\xfc\x31\x98\xa5\xa6\x6f\xe1\xe4\xd5\x1e\x00\x83\x1d\x80\x81\xc0\xeb\xac\xbc\x7a\x61\xce\xe7\x34\xbc\x4a\xbf\x9b\xe4\x33\xd5\xba\x1d\x8c\xa2\xca\xc7\xbb\x58\xed\xb6\xc0\xe6\x67\xf2\x8e\x4a\x5c\x34\xc3\xf2\x59\x3c\x5d\x29\x5c\x12\xf6\xaa\xa9\x84\x08\xbd\x77\x18\xc7\xa4\xe9\x13\xc4\xbc\xb9\x24\x6d\x7d\x33\x9d\x37\x61\xce\x5a\xc1\xe5\xa5\x25\xd6\x0e\xa9\x1c\xfd\x01\xa9\x17\x5d\xf7\xc7\x6e\x30\xa7\x26\x05\x9e\x71\xdb\xdf\xab\x23\x7d\x36\xc8\x07\xe6\x87\x8c\x67\xc8\x8d\xb2\x9b\x8a\xa4\x2a\xeb\xa1\xfc\x60\x08\x80\x91\xa9\x07\x79\xdf\x37\xf7\xe1\x87\x68\x49\xf7\x67\xfb\x39\xf2\x73\x62\x3d\xb3\xca\x33\xa1\xb2\xb9\xa8\x72\xb1\x69\x69\x73\x73\xd9\xda\x00\x32\xe3\xb2\x7c\xa8\x15\xba\x0d\x17\x67\x35\xdb\x90\x6d\x2f\x19\xfd\xf0\xad\x19\x0f\x11\xed\xea\xdc\xdb\xc3\x93\x6b\x95\x63\x7d\xf1\x75\x53\xfb\x64\x93\x44\x66\x1f\x5f\x3f\xc0\x59\x22\x2d\xcd\xae\xb5\x82\x91\xe2\x39\x95\xda\xc7\x0f\xaf\xba\x3e\x7a\x4d\x7c\x40\x19\xdf\x38\x97\x2f\x33\xe4\xd5\xca\xd6\xa5\x37\x61\x2a\x03\xba\xce\x17\x8f\xaf\xd2\xe8\xc2\xd3\x7e\x73\x75\xcd\x5d\x21\xd9\x58\x5b\x5d\xf8\x03\xb7\x75\xe6\x2e\x87\x73\xf3\x84\x26\x18\xd6\xea\x33\x5f\x63\xc8\x64\x6d\xf9\xf7\xb4\x67\xe6\xb6\xa8\x51\xb3\x92\x03\x5d\x94\x5f\xaf\xb1\x75\x7b\xe2\x58\x6f\xc9\xef\xf3\x05\xc8\x8e\xd4\x67\x50\xff\xc6\x6f\xfd\x51\x67\xee\x71\xb8\xb4\xb0\xca\x5e\x90\xbb\x17\xab\x4b\x20\xbc\xae\xa1\x2a\xb2\xe5\xa6\x54\xc6\x21\xca\xec\x73\x98\x02\xa5\x60\xf7\xe9\x57\xce\x52\xaf\xc4\x58\xae\x07\xcf\xc2\xdb\xdd\x72\x03\x56\xaf\x3a\xe4\xd0\x10\x29\xf9\x5c\x6f\xf3\x85\x09\xbe\x62\xc3\x2f\x39\x86\x58\xde\xe4\xdb\xd6\xa0\x57\x2a\x21\xda\xa0\x48\xc8\xa3\xa1\x37\xec\x41\x37\xd6\x4b\x83\x98\x9f\x80\x19\x7a\x96\x8f\x4e\xa9\xf5\x34\x58\x0a\xf2\xb1\xf0\x88\xbf\x93\xcf\x56\x90\xdc\x07\x15\x0d\x3b\xc4\xbd\x87\x7c\x1f\x84\xdc\x19\xda\xd3\xf2\xaa\x91\x07\xc9\x46\xad\xd2\xd1\xa5\xc7\x61\x8a\x81\x98\x53\x93\x34\xa3\xcb\x9f\xb9\x91\x64\x30\xeb\x07\x61\x5b\xea\x64\x0e\x79\xf5\xa8\x06\xb9\x9e\xd2\x57\xb0\x80\xbc\xa6\x93\xd8\xeb\x15\x69\xeb\x10\x39\xe4\x7a\xc6\xc4\x9d\x3c\xc2\x07\x49\x84\xf4\xfe\x91\x93\x0d\x6b\x96\xb4\xf7\x91\x14\x87\x5f\x51\xee\xe1\x59\xc1\x9d\xaa\x85\x9b\x62\x10\xf0\xca\xde\x8e\x90\x40\x61\x23\x5c\x79\x2a\x65\x70\xcd\x41\xa7\x10\xad\x50\x61\x5f\xb1\xad\x20\x0c\x86\xfd\x0c\x98\x84\x98\xed\x2f\xdf\x6c\xc2\xaf\x03\x9b\x74\xe3\x13\x7c\x32\x18\xdf\x33\x56\x82\x1c\xfa\x83\x43\xe7\x9a\xa1\x76\x13\xe9\x37\x9a\x42\x4f\x03\xdc\xa4\x49\xbd\xd3\x49\xb6\x77\xde\xef\x38\x05\xbc\x01\x64\x43\x17\xd3\x5b\x9d\x20\xeb\xd2\x4f\xda\x2f\x61\xa8\xd5\xd1\x00\x1b\xa3\xec\x57\x45\x7f\x20\xb2\xa3\x36\x48\xe8\x6b\xf9\xe8\xf1\xb6\xe9\xe9\xa9\x2d\xcf\x11\x10\x94\x95\xc1\x58\xbf\xb7\xec\xe2\x10\x6e\x36\x8f\xa7\x61\xb6\xc7\xaf\x34\x15\x1e\x50\x26\x29\x6c\xac\x65\x3a\xa0\xb0\x11\x0e\x41\xae\x26\x2b\xce\x23\x4d\x23\xd7\xe9\x14\x00\x16\xc4\xc3\x39\x5e\x9d\xaa\x4c\xe1\xca\xc1\x6c\x85\xe0\xb6\x1c\x8a\x6c\x02\x86\x94\xf4\x41\xf2\x61\x9d\x85\xae\xce\x3b\xf8\x89\xb0\x29\x97\x42\xfb\xc9\xc4\x9d\x04\x13\xb1\x9d\x70\xfb\xf8\x4b\x61\xca\x34\x2e\x6b\x85\xcc\x5b\xe3\x15\x7b\x5f\xdc\x4e\x3d\x7e\x41\xde\x85\x63\xbf\xbf\x64\x89\xf4\x7c\x06\x47\xf1\xeb\x67\x1f\xdb\x96\xea\x37\xc1\x54\xbc\x18\x43\x74\xe7\x41\xfe\x21\x8d\x80\x64\x5c\x2f\x83\x1f\x94\xb1\xf4\x3a\xb4\xf0\x17\x86\xcf\x93\x93\xed\x35\x6b\x4f\xf8\x05\x65\x7a\x25\x31\xe6\xf2\x80\x8d\x02\xaf\xd8\x8a\x74\xf8\x0e\x4a\x22\x54\x7b\x38\xf5\x80\x8c\xe7\x12\x64\xc4\xa8\x4b\x63\xbd\xb1\x3a\x0d\x61\x21\x62\x0c\x47\x4a\x60\x79\xf0\xcb\xb7\x11\xa7\xd0\x27\x41\x8b\x7a\xfd\x1e\x01\x76\xaf\x2b\x00\x8d\xb8\x32\xd5\xb7\xc4\xe7\xae\xa3\x78\x8d\x2b\x7b\x14\x8f\xed\xa2\xc3\x28\x9c\x67\xa6\x5b\x35\xa5\x91\x3d\x6c\xf9\x47\x1b\xa3\x8f\x68\x75\x0e\x94\x90\xf1\x88\xc7\xf6\xc3\xbf\x1e\xb7\xcf\x51\x65\x2b\x6e\x27\x69\x7c\x16\x4e\x95\x26\xfd\x6d\x81\xf4\x08\x56\x63\xb0\x18\xcf\x03\xc9\x00\x7b\x71\xf7\x3e\xca\xf2\x3e\xea\x8a\x3d\x54\x57\xc4\xdc\x86\x9d\x71\xf1\x4c\x74\xa4\x52\x77\xb0\x17\xf5\x65\x9b\x2f\xbb\x87\x83\xd1\xa6\xba\x82\xcd\xca\x05\x9f\x06\x6f\x31\xae\x39\xe2\x25\x8c\x34\xb1\x18\x79\xa3\xf8\x9d\xbb\x48\xf2\xe2\x7c\xae\x9f\x44\x53\xa5\x60\xb3\xdb\xd9\x5f\x5d\x21\xcb\xc9\x9e\xbe\x44\xab\xa7\x21\xd1\x67\xbb\x20\x73\x0e\xfa\xe5\x93\x7e\xa8\xa5\xf3\xc9\x15\xc8\xcb\xdb\x16\x93\x66\x52\x99\x17\x71\x1b\xd1\xc5\x84\xbd\x12\x66\x9a\x41\x1f\x84\x0d\x53\x1f\x64\x9e\x92\xa8\xd7\x51\x7e\xb0\x52\xa1\xc4\xb9\xfc\xf4\x3e\x7c\x2d\x97\xe8\x24\xcb\x79\xd3\xdc\x96\x33\x9c\x96\xce\x5e\x5c\x47\xb7\x02\xd5\x4d\x84\x6c\x91\xb3\x64\x8b\x5a\x4f\xcd\x97\x52\xfc\x96\x5d\x00\x51\x09\x8f\x19\x6b\x60\x62\xaf\xd1\x65\x29\x5f\x35\x22\x6b\x00\xac\xe2\x51\x66\xd1\x7b\xe1\xec\x72\x01\x55\xa7\x7b\x90\x55\x29\xb7\xa3\xb1\x97\x62\x2a\x22\x1d\x8d\x62\x5e\xe2\xe9\xe4\x8f\x8d\x66\x64\x62\xda\x05\x09\x1c\x15\x44\xfc\x79\x68\x1e\x7c\x8d\x64\x71\xbc\x84\x8f\xde\x4b\x46\xdd\x84\xf3\x5a\x30\x29\xfb\x6a\xb9\xb4\x77\xe9\x7f\x1e\x6c\x10\x82\xf3\xc8\xfa\x39\xcf\x1d\x97\x9f\xe5\xc7\xf9\x7d\x97\x5d\xe9\x94\x3d\x4a\xcb\xec\x97\x71\x3e\xc9\xf8\x9f\xcf\xd3\xfe\xf7\x00\xce\xf5\xc1\x4d\x24\x9a\xd1\x23\x9b\x76\x0e\x34\xaf\x16\xfb\xf2\x26\xe8\x87\x33\x13\xdf\xe2\x8a\x5e\x0c\xed\xe6\xf0\x2f\x2e\x7f\x61\x84\xee\x4c\x73\x71\x2d\xcb\xd6\x90\xc2\x24\x3a\x66\xf5\x48\xf2\x96\xf8\x94\x47\x6a\x82\x8d\x19\x96\x4a\x5b\x27\xee\x22\xfd\xc0\x66\xfc\x84\x32\x61\x02\x91\x55\x91\x8b\x8e\x01\x25\x53\xb4\xd5\xd2\x07\x9c\x9a\x78\x02\xb4\x6a\x06\x9c\xc4\x1e\x78\x96\xe8\xe6\xa2\xc5\xcf\x59\x0e\xb1\xf3\xfc\x12\xdb\xd1\x0e\x12\x01\x93\xba\x88\x02\xfc\xfc\x28\xee\x32\x13\x56\xbb\x57\x4e\x24\x6a\x99\x31\x79\xa5\x12\xf9\xb5\x2d\xb0\x28\x89\x64\xcb\xe9\xa8\xef\x9e\xb4\x67\x02\xe6\x8e\x0c\xcc\x97\x78\x95\x3d\x2f\x65\x68\x33\xcd\x63\x7e\x89\xc7\x38\x76\x32\xfc\x7e\x38\xd3\x12\xfe\xa2\x72\x7a\x02\x8c\x17\x2e\x79\x7e\x98\xf3\x74\x69\xaa\x95\x43\x03\xdd\x05\xa9\x78\x35\xf3\x52\xc0\x2b\x4f\xb7\x19\xd4\x46\x52\x25\x97\x8d\x66\xd4\x0b\x94\x80\xcf\xad\xfb\x4d\xb4\x82\xb0\xce\xe9\xa2\xcb\x82\xbf\x96\xfe\x8d\xa4\x20\xcc\x18\x08\xf0\x4b\xf0\x23\xed\x67\xdd\xe9\x6f\x85\xb5\x6a\xbf\xd5\xa8\xad\x27\x86\x10\x5a\x45\xfe\x4f\x7b\x22\xf1\x72\xfb\xc3\xd8\xc0\x90\x55\xcf\x19\x8e\x20\xca\x08\xed\xee\xff\xd3\x3c\x12\xc4\x4d\x00\xba\xab\x51\x14\x34\x53\xca\x16\x44\xcf\xc5\x85\xed\x16\x35\x34\x5d\x46\xe4\x78\x22\xc6\x76\xa3\x31\xb2\x7c\xb6\xba\x58\x4d\x4f\x3a\x4e\xea\xad\xe3\x12\x26\x5a\x58\xf3\xd5\x31\xdd\xa7\xcc\x3a\x49\x57\x47\x8f\x2f\xb5\x2c\xda\x1d\xae\xc9\xd9\x5c\x78\xb3\x59\x50\x6d\xa5\xae\x58\x98\x9c\x4e\x50\xde\xfd\x66\x9a\xce\x5f\x8e\x56\xd3\xe9\xc6\x0b\x34\x5c\x2a\xd2\xa5\x9d\x4a\x65\x6f\xce\xdc\x3c\xbd\x93\x86\x30\x45\x96\xaa\xbb\xc5\x96\x9c\x91\x4e\xf3\xa0\x27\xd5\xb8\xf2\x01\x99\x8a\xcf\x99\xa7\x89\xf8\xd4\xf0\xa1\x0a\x47\x29\x9b\x53\xc7\x9f\xe2\x1c\x4e\x75\xb4\x88\x81\xfe\xf0\x2f\xc2\xc8\x3b\x68\xb7\x18\x90\xf0\x98\x22\xbb\x12\x22\xd3\x7c\xb7\x1d\x7f\xc4\xbb\x7d\x26\x32\xb9\x4b\x84\x07\x24\x47\xb9\x63\x78\x70\x3d\x77\xe2\x0b\xa6\xbc\x2e\x3a\xa8\x54\xa5\x5e\x47\xb8\xf9\x1f\x96\xb9\x66\x3a\xe5\xf4\x8c\xad\xb4\x17\x27\x64\x4d\x64\x8a\xa6\xbf\xcc\x45\x02\xf7\xb3\xf2\x6f\x22\x24\xd2\x23\xe7\xa9\xc6\xf5\xb7\xd6\xf3\x26\x76\xda\x1f\x3c\x8d\x62\x87\x92\xf1\xc9\xfd\x7d\x71\x32\xee\x94\x3a\x5d\x1a\x15\x1a\x5e\xe7\x87\x9c\xc3\x97\x4f\x29\x9d\xae\x09\xb0\x1c\x5f\xac\xca\xed\x2f\x24\x94\xac\xb6\xbe\x90\x68\x7a\xb6\xf1\x2a\x9f\x22\x25\xa1\x57\xd9\xcb\xdb\x69\xa3\xb0\xcd\xd2\xb2\xd3\x0b\x06\x68\x9d\x07\xb9\x6e\x5b\x87\xd9\xbf\xc6\x4e\x38\xcb\x57\x7e\x13\x8a\x8a\x07\xd3\x1f\xd5\x57\xd6\x7a\xdd\x2b\x41\x36\x95\xfb\xd2\xf9\x0c\xd8\xcc\x2b\x01\x4a\x34\x17\x56\x59\x5f\x18\x2a\x82\x05\xaa\xd7\x37\xf9\x6b\xd9\x24\x5d\xa7\x07\x36\x0b\x95\x13\x7e\xcf\x59\xeb\xa1\xbf\xe7\x44\xe6\xd8\xef\x89\xf5\x42\xe7\x62\x16\x08\x11\x93\x09\xf0\xeb\xba\x02\x7e\xa0\xf5\x5a\x3e\x94\xfd\x66\x58\x87\x23\x48\x34\x5c\xfe\x4d\x21\xe5\xf2\xeb\xea\xc5\x22\x27\xee\x96\x39\x4c\xe3\x4d\xb1\xec\xc5\xf2\xab\xf4\x4a\x91\x2d\x85\xff\x54\x33\x96\x73\xf4\x9c\x8e\x36\x82\x53\x94\xdc\x22\xea\x5e\x72\x37\xd6\xdb\xce\x60\x5a\x56\x6f\x01\x5f\xb9\x44\x95\x44\x4c\x46\xb4\xc2\xa8\xd7\x5b\x0e\x40\xcc\xd5\x78\xea\xb8\x0d\xb5\xa4\xaa\x59\x3f\x86\x90\xd9\x78\x75\xaf\x6b\x0c\x4e\xce\x3c\xce\x0a\x48\xf5\xf6\x54\xa3\x18\xbc\xa3\x51\xdf\xe3\x75\xb9\x71\x09\xd9\x09\x7e\xff\x72\x62\x21\x45\x7f\x86\x5b\x7d\xc5\xb9\x3e\xa7\x60\x86\x3e\x6d\x24\xf9\xa2\x52\x3e\xe5\x39\x61\x11\x4d\x0c\xdd\xa2\x2b\xde\x76\x83\xdd\xf7\xaf\x76\x3e\xd4\x28\x8f\x9b\xca\x19\x3c\x55\xc0\x74\x1f\x37\xab\x53\x5f\xd3\x86\x8e\x1c\xc2\xe6\xfe\x7a\x68\x24\x59\xaa\x72\x86\xcd\x99\xcd\x92\xdd\x02\xd7\xce\x96\x97\x6a\x46\x7e\x7e\x44\x28\xe5\x0f\x7a\x79\x05\xe5\xb3\x1c\x82\xf6\x8e\xb3\xe8\x16\x7a\x01\x73\xa8\xb1\xd2\x3f\xe5\xce\x2a\x06\x28\x24\x73\xea\xdb\xe6\x9b\x91\x3b\x4b\x6b\xc1\x1b\x40\xde\x7e\x5d\x2c\x6b\xce\x77\xce\x3d\xda\xdf\x2f\xc2\xb0\x93\x44\x99\x05\x89\x30\xea\x67\xd9\xba\xd0\x97\xcf\x04\xc7\x34\x37\xe2\x86\x62\x3c\xae\x0b\x7c\xc3\x85\x07\x67\xa5\x5a\xd6\x70\xf5\x76\x4e\x22\x49\xc2\x58\xcc\x86\x40\x12\x2b\xe9\x32\x86\xcf\x1f\xce\xbf\xbf\x44\xdf\xb0\x9d\xec\x48\x87\x9e\x47\x41\x1b\xa6\xe5\x11\x68\x08\x19\x42\x0e\x70\xc0\xb8\xc1\xd7\x9a\xc6\x0b\xfa\xee\x33\xd6\x0a\xaa\x06\x5c\x61\x4f\xd6\xa7\x5d\x51\x3e\xf6\x83\x56\xe1\x2d\xcb\x25\x17\x92\x3b\x89\x45\x1d\x50\xb5\xb0\xa7\x59\xd8\x08\xf0\xd3\xab\x9a\x31\x68\x14\x67\xe4\x5c\xce\xa9\xd0\x13\x8a\x05\xa2\x6b\x3f\xb5\x5a\x3d\x94\x39\x57\x07\x18\x34\xfa\x71\xa3\xd7\xd9\x14\x0e\x63\x98\x7a\xe9\x23\x5d\x5c\x42\x15\x16\x87\x18\xfb\x7d\x01\x88\x31\xdd\xd0\xc6\x57\x50\x59\x83\x97\xea\x2d\x1b\x5f\xfc\x07\x03\x2a\x66\x64\x60\xd7\xb5\xac\x47\xe5\xa1\xc6\xd9\x88\x88\xe9\x23\x64\x93\xe5\xbd\x65\x2d\x65\x6e\x03\x4a\x71\xc3\x18\xd6\x40\x4e\x44\x4d\xfb\xfd\x31\xa9\x73\xfe\x1b\xe2\xf4\x61\x65\x77\xb0\x09\x78\x65\x89\x0f\xd0\xf0\xf3\x2f\x02\xb9\x5e\x84\x55\xbb\x66\x9b\x38\xaa\x8b\x24\x88\x06\x22\xf6\x87\xf4\x35\x01\xb4\x09\x3b\x94\x47\x48\xe9\x4d\xae\x8a\xcf\xc9\xf4\xa0\x62\xcd\x50\x59\xb9\x29\xf5\x32\x44\xc3\xbf\x07\x98\xac\x5e\x1b\x01\x67\x55\xb4\xd1\xd3\xaf\x72\x7f\xce\x58\x80\x61\x34\xbb\x29\xd8\x39\xf1\x05\x94\x01\xc8\x82\x3d\x0e\xf7\x26\xdb\x2a\x25\xc0\xc2\x73\xad\xcc\xd6\x1b\x1d\xeb\xe4\xd6\xd8\x3f\xee\x87\x1c\xcf\x44\xa6\xec\x61\xe3\x0d\x3e\x28\xf7\x0a\x36\x6e\xc1\x70\xd3\x52\xb1\x3a\x8c\x08\x5f\xb5\x54\xd6\x4a\x5f\x84\xdb\xec\x4b\x2f\xca\x27\x68\x47\x9e\x15\xf4\x94\xa4\xd0\xb7\x4e\x4c\xd7\x7b\xac\x01\x6a\x4f\x4e\x73\x84\xf1\x95\xf2\x4a\xed\x3d\x05\x7b\x54\xaf\x34\x9e\xc4\xd8\x84\xd8\x86\xc3\x53\xc8\xcb\xcd\x34\x3f\xcb\x37\x56\xd6\x62\xa7\xde\x2a\xfd\x03\x8f\x25\x09\xfc\xd8\xd7\x4a\x5e\x7f\x5b\xcb\xf6\x2e\xcd\x5d\xa3\x87\xd3\x56\xb1\x6e\xc5\xa8\x87\xa4\x08\x99\xc1\x69\x7b\x82\xe2\x35\x31\xee\x62\xe1\xd5\xe2\x36\x68\xb5\xc9\x6d\xc3\xe1\xba\x16\x56\x6c\x96\xb8\xc8\x50\xbf\xb7\x34\xd6\x4f\x16\x25\xb0\x41\x4c\x59\xea\xd4\xc6\x6f\x8f\x3c\x59\xab\x01\xbd\x4d\x5b\x43\xb7\xb7\xe7\xe4\xb2\x07\x54\xd3\xe7\x0e\x4f\x67\x1d\x4c\x14\x62\x43\x83\xf3\xd5\x27\x1d\x6e\xc8\x62\x75\xb2\xac\x07\x3d\x12\xc3\xe7\x66\x39\xb5\xb7\x29\x54\x40\x24\x2f\x04\x30\xfd\xcc\x9f\x40\xa9\x88\xaa\x89\x52\xa1\x26\xee\x16\x7b\xf2\xca\xfd\x89\x82\xab\x4f\x74\x78\x1b\xc7\x44\xf8\xb6\x00\x9b\x38\x50\x11\xdd\x99\xe2\x1f\x82\x22\x61\xf6\x56\x1e\xd5\xf4\x4d\x1d\x0e\xb0\x6f\xbb\x38\xe1\x1e\xfb\x85\x86\x2e\x35\x5c\x45\xfd\xe3\x2d\xd9\x8b\xaf\x2c\xb1\x50\xc0\xc8\x83\xe9\xe5\x37\xcb\x93\x46\x2a\xc1\x8f\x3d\x39\x71\x72\xfa\x6c\xec\xa3\xf1\x5c\x6d\x6d\x32\x8c\x2b\xae\xbe\x26\xd6\xac\xdd\x72\xa9\x52\x5c\x41\x15\xd4\xd1\xae\xcc\x15\x24\xdb\xea\x09\xf7\xf7\xe7\x9b\x7f\xd5\x5a\x6d\x7c\x87\x15\xdc\x3e\x2c\x72\x87\x23\x51\x75\xa4\xc0\xa2\xf3\x62\x6d\x30\xa5\x7b\x9e\xcd\x64\xb6\x1c\xd7\x6a\x7c\x92\xd7\xf8\xc4\x2f\x21\xae\x33\x14\x1f\x34\xc4\x3d\x73\x98\x92\x3d\x2b\x16\x18\xfe\x72\x89\x6f\xca\x59\x34\xb1\x27\xe6\x8f\x62\xe6\x86\x69\x88\xbb\xca\x85\x4f\x9c\xbc\xe4\x3c\xb6\x1f\xab\x39\xa5\x52\xf6\x74\xe3\x38\x1e\x83\xe5\x40\x22\x73\x60\x21\x37\x54\x4e\xee\xa8\xa4\x0a\x91\x48\xb9\xf2\x69\x79\xa6\x51\x3a\x6e\x4f\x16\xfc\xb8\xb3\xf5\x89\x25\x16\xa2\xe4\x64\xd7\xe6\xb8\xbc\xf9\xca\x6b\x22\x4c\x3f\xff\xfb\x27\xaf\x38\xaf\x7f\xc1\x6c\x15\xf5\x47\xef\x8e\x7c\x5f\x47\x3d\x4a\x8f\x6f\x06\x97\x5e\x1e\x99\x10\x78\xc3\x57\x05\x3a\x86\x85\xc9\x13\x38\xbd\xa0\x0e\xd1\x06\x06\xad\xa0\xfb\xe7\x7f\x28\x5a\x54\x5f\xbb\xb1\x0f\x2f\xbb\x61\x74\x0f\x2b\x7d\x50\xb5\xe6\x62\xa8\xb7\xba\x17\x92\x76\xdb\x29\x05\x5d\xc8\x1a\x61\xd6\x0f\x4f\xbc\xfd\xcb\x43\x4f\x0e\x79\x8a\x59\x60\x60\x54\x59\xc4\x5b\x53\x3a\xb2\x8b\x81\x38\x8c\x33\x38\x31\x9c\xa4\x5d\x44\x62\xca\x36\x62\xbb\x66\xb1\x5f\xc1\x6b\xec\xcb\xa1\x90\xe2\xbf\x96\xcd\xec\xc1\x09\xf6\x44\xa2\x55\xe3\xc0\xa0\xaa\x6b\xbb\x0e\xe4\xc5\xde\xbc\xdf\xc7\x4d\xf1\xbd\x19\xc3\x80\xfb\x8a\xf1\xeb\xa1\x47\x3d\x4c\xae\xb4\xd7\xa6\x52\x6f\x9a\x6d\x50\xed\x63\xaf\x91\xbf\x4e\x4e\x70\xf5\xd4\x03\x2f\x31\xcc\xfe\xe6\x75\x48\x61\xaa\x07\x35\x7b\x5e\x15\xa8\xe2\x84\xad\xde\xec\x54\x0f\xed\xa6\xd3\xdf\x0c\x72\x42\xb8\x99\xa1\x80\xd6\x56\xb3\x29\x61\x8e\xed\x4f\x94\x0a\x99\x3d\x0d\x51\x7f\xca\x96\x96\x57\x3f\x3f\x79\x13\xe5\xa7\x33\xd6\xb5\x93\x7e\x7d\x31\xe5\xcb\xee\x86\x01\x45\x47\xcd\x99\x73\xda\xaf\xfe\x26\x15\xcb\x43\xb5\x3b\xe9\xd8\x76\xb2\xf8\xdf\x49\xb4\xab\xb3\xee\x3a\xe8\xbf\x27\x7b\x64\x6c\x49\x67\x84\xe2\xac\x2a\x28\xdd\x16\x38\x0d\x67\x00\xab\x2e\xd2\x6d\xd9\x1f\xa1\x0e\x5d\x2e\xb7\x60\xf4\x7b\xb5\x09\x46\x7d\x74\x95\x41\xaa\xb9\x07\x94\xaa\xa3\x73\x4d\x07\x4f\x52\x72\x76\x97\xf2\xa1\x16\xb6\x53\x7c\x99\x6e\xd0\xb3\x4f\x04\x4d\x61\x3a\x64\xd0\x27\xcb\xb6\x51\x22\x34\x5f\xcd\x40\x2a\x74\xee\xe6\xdb\x05\xbd\x76\x2f\xd3\xbb\x12\x6f\x77\xff\x0d\x18\x7b\x06\xc8\x90\xfa\x9f\x7a\x73\xa8\x33\x90\xca\xfb\xc5\xda\x6d\x4f\xad\x89\xb9\x88\xeb\x22\xbb\xe4\x6f\x78\xa9\x2c\x00\x03\x2b\x86\x26\x78\xf0\xd1\xb0\x74\x04\xeb\x31\xfc\x88\x25\x08\xc0\xc1\x59\xb4\xc4\xe7\xb6\x12\x6c\x2b\xc1\x38\x35\x5c\x7e\xea\xf2\xe9\x0e\x86\xd9\xe6\x70\x95\x6d\x9c\xeb\xb2\x1d\x0f\xb1\xec\xa7\xf2\x77\x01\xcd\x09\x27\x41\xf7\xee\x63\x04\x34\x4d\x4b\x62\x2b\xe5\x06\x48\x8d\x5d\x8b\x5c\xf2\x56\x2e\xd3\x77\xd5\x79\x6f\x76\xe1\xd9\xaf\x2f\xf1\xaa\xcb\x57\xfb\x29\x7b\x89\xae\x6a\xdd\x7e\x35\x82\x2e\xbb\x5d\xd5\x44\xd1\xee\xe0\x6b\x57\x70\xae\x22\x55\x43\xcf\x36\x56\x11\x90\xeb\x35\x99\x1c\x63\x70\x0b\x82\xa9\x9b\xa6\xfa\xf7\xa4\x67\x64\xce\xb0\xe7\x13\x89\xa2\xab\x66\x44\x9d\x80\xd2\x1d\x56\xf3\xf5\x73\xda\x9c\x47\x24\x65\xb1\xdf\x3a\x1a\xdc\x6a\xdf\xac\x99\x5a\x87\x07\x2b\x3e\xa5\xe7\x19\xff\xe4\x83\xdc\xbc\x3f\x10\xf6\xf7\xa9\xbc\xd3\x4b\x8b\x8e\xb0\xb8\x0f\x8b\xb1\xb3\x4a\x7c\x59\x67\x9b\xda\xe4\xdc\xc3\x73\x75\x69\x6f\xee\x75\xdc\x94\xe8\xad\x36\xa6\x1d\x87\xde\x38\x7d\x65\x82\xb5\x45\x36\xde\x4e\xc4\x36\xd4\x81\x09\x7c\x70\xe3\xbe\x41\xe2\x12\xd6\x58\x2e\xed\x75\x77\x32\x22\x09\xa6\x2a\x92\x86\x1d\x44\x23\x51\x2e\x56\x3d\xbd\x13\x79\xf5\x0d\xb7\xe6\x6e\xeb\x4e\xc2\x08\x44\x3c\x17\x4b\xd7\x3e\x61\x57\xc5\xb3\x2d\xdb\x4e\x79\xc3\x16\xb3\x13\x25\x8a\x28\x81\x06\xaf\xb7\xc8\x7d\x0d\x9c\xdb\xea\x9c\x17\x8c\xdb\xca\xc0\x1d\x97\x93\x83\x3d\x7d\xf8\xa2\x66\x31\xdd\xbb\xaa\x0c\x2d\x9a\xbc\x29\x3d\x9a\xc2\xd2\x3e\x9c\xbb\xf5\x15\x0b\xef\x3d\x9d\xc5\xf2\xbc\xd9\xaf\x14\xf9\x94\x17\xa2\xb5\xe2\xae\xf7\xb8\xab\x9c\x71\xb7\xf1\x73\xe3\xbd\xf8\x5a\x0c\x93\xf5\x43\xa0\x95\xfc\x9f\x86\x72\x50\x6f\xa5\xe9\xf6\xdd\x83\xbc\x74\xbd\xce\xa0\xd0\x24\xa9\x26\xb3\xd1\xde\x07\xc6\xaa\xa3\x5b\xfe\x91\xad\x49\xb3\xf1\xd8\x6b\x37\xa5\xbf\x23\x5a\x5d\x44\x5f\x4b\x01\x6a\x72\x99\xd2\x1b\xf5\xdd\xc0\x8a\x6d\x71\xb1\xa6\x73\x5e\xb4\x60\x0d\x42\xf0\xe2\x29\xf8\x15\xad\xfb\x95\x49\x09\x9f\x87\x4d\xb9\xdf\x0f\xbb\xbc\xf0\x35\xac\xca\x52\x76\x89\x1c\x4a\x68\x5b\xc1\x6a\xdd\x19\x30\x5e\xb4\x5e\x45\xcc\x71\x94\x8b\x19\xc0\x7e\x5e\xa7\x8e\xae\xb2\x8d\xf0\xf9\xf4\xfa\x17\xe9\xc6\x20\x96\x7d\x29\x32\xb7\x28\xa2\x62\x3b\x1e\xac\xba\xda\x2c\x85\x54\x61\x58\x06\x8e\xc4\xb5\x57\x86\xf1\xcd\x28\x2e\xab\x6b\x09\x82\xd0\xb6\x07\x9f\x37\x6e\x50\xb7\x5d\xe7\x15\x45\xfe\x9f\x3f\xf2\x04\x14\xeb\x7d\xb0\x13\xe2\x77\x0f\x22\x1f\x21\x48\x0e\x4d\x65\x63\x5b\x4a\x73\x91\xca\x1c\x1e\x93\x48\xbb\xab\xcd\x60\x9a\x57\xf8\xd6\xea\xe3\x12\xad\x98\xc1\x16\xf6\x2f\x42\x2d\x90\x09\x1f\x38\xc2\x81\x75\x45\x1c\xb0\x0a\x70\x82\x37\xb1\x83\x8f\xb0\xed\x4c\x87\x9b\x13\xb6\x8c\xab\x61\x52\x8b\xc5\xee\xc5\x46\xdd\x68\x65\x08\x2d\x3e\xc8\x12\x76\x7d\x14\xbd\xb0\x95\x90\x81\x01\x6b\x78\x7c\x17\x41\x45\xad\xf5\x1b\x84\x5e\x9b\xe2\xa9\x48\xc4\x57\x56\x12\x93\xd7\xf1\x96\x63\x11\x65\x98\x11\x88\x3f\x54\x14\x48\x4b\x15\x41\x6f\x7d\xcd\x23\x66\x10\x6c\x4d\xe4\xbc\xa7\x8a\x20\x6b\xd3\xcc\x9f\x0e\x74\x65\x9d\xd5\xf4\x17\xb7\x0a\x12\x02\xa7\xf4\xb6\x62\x4e\xb6\xa9\x91\xfe\x77\x13\xa8\x6b\xf2\x86\x1b\x1a\x53\x40\x00\x8a\x0b\xf2\xcd\x69\xbd\xd6\xce\x03\xb0\x5c\xce\x00\xf6\xc4\xa2\x65\x5a\x7f\x96\x4e\xb9\xc5\x35\x40\xa7\x8b\x67\x7b\x66\xa2\xd3\x9a\xed\x0a\x12\x28\x90\x85\xf1\x6d\x76\x87\xfd\x05\xa5\x58\xb3\x3f\x8b\xbc\x72\xdf\xc8\xbf\xdd\xb6\x58\x1f\x64\x94\x48\x2c\x06\x73\x35\xdc\x03\xb7\xcc\xa3\xa9\x2c\x0f\x36\xf1\x3e\x4a\x1b\x07\x44\xe2\x49\x47\x25\xd2\x06\xe1\x9b\xd8\x55\xe7\x45\x1b\x39\xf3\x02\xf6\x9c\xf3\x48\xdd\x11\x42\x2f\x7c\xe2\x0c\x81\xdc\xfe\xdf\x29\x96\x70\xf8\x29\xea\x20\x12\xcf\x9a\x12\x85\x6c\x30\x8d\x55\x97\x70\xd2\x71\xd0\x8f\xfb\x5c\xed\x14\x11\x7b\x30\x37\xfa\x90\x89\x9e\xb7\x0d\x45\x18\x6d\xc3\xeb\x25\xd3\x8d\x30\x5a\x69\xa6\xf3\x14\x79\xf8\x2b\xa7\x6d\x26\xb9\x49\xc3\x9e\x91\x14\x63\x0d\x35\x0d\x86\xc3\xa2\x80\xb9\xa7\x18\xcd\xf1\x32\x15\x98\x9c\xb5\x0c\x79\xe0\x88\x47\xbd\x3e\xd2\x2e\x92\xdf\x22\x90\x4b\x33\xa5\xef\xbd\x09\xce\x8e\x33\xef\x77\x6b\xc5\x24\xab\x19\x29\x80\x05\x01\xbd\x88\x98\x1c\x36\x61\x60\x85\xf8\x49\x6b\xbe\x46\x10\x7c\x68\x72\xe4\x4d\x44\x69\x0b\xdc\x49\xd9\xdf\x94\x5d\x68\xde\xa3\xe5\x17\xc3\x2c\xf7\x2e\xc0\x4f\x6a\x32\xd3\x54\xb2\x60\x03\xf0\x20\xef\x92\x3c\xd9\x4d\xf2\x27\x34\x29\xd2\x87\xab\xfa\x5c\x59\x0b\x7d\xb1\x22\x1d\x30\x84\x12\x1c\x8c\x20\xfd\x82\xd9\x2d\xfd\x5b\x6c\xbc\xe1\xce\x90\x64\x6f\xd2\xbf\x10\x38\x05\xe3\x59\x84\xe7\x3d\x07\x11\x3d\x8a\x05\x16\x3c\x6b\xd4\xd0\x25\x72\x91\xa4\x99\x45\xaf\xea\x1b\x64\x8d\x4c\x6e\xf2\x34\xed\x97\x02\x9f\xdb\x32\x7e\xc9\x31\x34\x4d\x6d\xbd\xd3\x92\xca\xa9\x0d\x6a\x74\x64\x43\x3f\x9f\x41\xad\x0f\x63\x1f\x04\x7c\x4d\x9b\xf6\x08\xf6\x75\xfb\xea\x13\xc6\x43\x6f\xa4\x97\x73\x4a\xff\x8e\x0d\x0b\x78\x33\xe3\x4c\xf6\x32\x6f\x27\xcb\xf5\x9c\x5c\x7a\x1e\xa5\x19\xff\x53\x5e\xb3\x5d\x81\x83\x85\x9d\x33\x08\xba\x89\x74\x6a\xac\x1d\x28\xf1\x27\xb7\xfa\x87\x61\x17\x2b\x1f\x08\x65\xc4\x48\xbe\x75\x25\x59\xdb\x04\x46\x2a\x8d\x22\x57\x8f\xe2\x5c\x83\x56\xad\x17\x9c\xf4\x5c\x28\x83\x46\xb3\x05\x9e\x29\x96\xa3\x1f\xff\xef\xcb\xbf\x92\xe7\xb6\xd1\x08\x72\xd3\x4a\x92\xd3\x9e\x8f\x22\xde\xb3\x38\xd0\x08\x2d\x79\x91\xc3\x24\xa1\x80\x8a\x48\xa8\x5d\xd7\x4d\xca\x04\x08\x41\xfa\x14\x95\xc6\xc6\x31\xa4\x13\x69\xb9\x64\xfa\x93\xdc\x71\x83\x13\x15\x5f\xd5\xf6\xf1\x93\x0d\x2c\xc8\xfb\x9b\x48\x98\x6e\x19\xf2\xe7\xb6\x3b\xc4\xd3\x73\xdc\x29\xc3\x58\xc9\xc3\xa0\xe1\xb1\x43\xd7\x5a\xa8\x76\xf7\x63\x71\xe8\xda\x15\xad\x87\x13\xa9\x49\x0b\x9e\xf1\x03\x4c\x14\xa1\x4a\x20\xb1\x51\x77\xd2\x3f\x65\x16\xd1\x5d\xc6\x98\xfc\xe8\xb9\x19\x7b\xcd\x23\x11\xcb\xfb\xd2\x59\x47\xf4\xfe\x9c\xae\x5d\x39\x76\x9a\xbc\x02\x28\xf7\xc7\xa9\xe5\x42\x77\x82\x8a\x05\xcb\xc8\x90\x56\x9e\x75\x68\x6a\x02\x68\xed\x9e\x99\x12\x36\x4c\xc3\x41\xfe\xf5\x66\xaf\xfb\x2d\xf3\xb4\x99\x25\x5c\xce\xfd\x5d\x1d\x56\x0e\xdd\xfb\xdf\x42\x29\x0f\x5b\x84\x6e\x82\x7f\x5e\x07\x90\x42\x5c\x09\x35\x3c\x8c\xbc\x26\x85\xfd\xb2\x5b\xc1\x74\xbc\xae\x2d\xe8\xc7\x53\xee\xaa\xdc\x1a\x68\xda\x62\x59\x8b\xa2\xcd\xd0\x9e\x7b\xbe\x01\xd2\x22\x5f\xf4\x6a\x2f\xe1\x26\x18\xfd\xca\xcd\xb4\xb1\x59\x61\xa7\xab\xd6\xec\x01\xef\x4e\x80\xac\x89\xab\x0d\x10\x31\x0f\x05\x50\x8e\x57\xaa\xa1\x11\x1f\x3d\x34\xab\xb5\x66\x57\x0d\xb5\x53\xd3\xc8\x39\x55\x38\x38\xdb\x36\x3e\xa7\x4a\xd4\x2b\xa1\xe3\xa6\xd3\xba\x55\xd8\xfc\x83\x1d\x8e\xf7\x7d\x7f\x58\x31\x84\xe8\x86\xc7\x33\xde\x99\x3d\xac\x2e\x5b\x7f\x79\x1d\x2e\xef\x41\xfe\xd0\x5f\xb6\x8c\x08\x0e\xbc\xca\xe9\x50\xa2\x0b\x58\x1f\xee\x78\x71\x3e\x7d\x51\xc0\xe2\x42\x06\xa6\x99\xc8\x15\x22\xbd\xd7\x0f\x9d\x07\x0f\x9e\xde\xe7\x5a\x00\x15\x40\xb4\x51\x5d\xe4\xe3\xc2\x1c\x4e\x49\xf6\x01\x1c\x1f\xf8\xd1\x70\x15\xce\x9c\x62\x79\xa3\x2a\xad\xfd\xc1\xbb\x47\x14\xbb\x5e\x54\x8a\xfc\x10\x1e\xf1\x29\x85\x89\xce\x87\x12\xfe\xee\xe4\x79\x45\x10\x1c\xf2\xd2\xb9\x95\xb5\xc7\xbf\xa9\x3b\xad\x07\x8d\xc5\xf2\xcf\xd7\x58\xf7\x01\xf3\x28\x42\x30\x3e\xfc\x1b\x6c\x28\x3a\x12\xbf\xec\xe6\x5b\x7d\x22\x25\xfa\xe0\xf1\x68\xc8\x0b\x87\x9e\xbc\xf6\x0e\xa7\x00\x65\x64\x85\x2b\xf0\xe9\xfd\x07\x58\x96\x3d\x4d\x72\x04\x1d\x6b\xa6\xeb\xd6\xa1\xdc\x89\xd9\xa8\x73\x10\xc9\x12\xe7\xab\xfe\x9c\x7c\xb2\x03\x11\x7d\x8f\x8d\x8c\x1f\xf9\x7a\xe6\xe5\xe4\x2e\x1b\x77\xc5\x76\xcf\x3a\xa8\xe1\x26\xbd\xab\x91\xa8\x0c\x62\x3e\xb9\x9e\x6e\x3a\x4e\xd7\x61\x71\xf6\xb8\x1f\x94\x5c\x4e\xe1\x47\xf1\x73\xcc\x53\x10\xc9\x57\xba\x1e\x03\x49\x11\x29\xbf\xc5\xfb\xd1\xe8\xb7\x4f\xfc\x99\xed\x7e\xf2\x0e\xf4\x4d\x2a\xaa\xb3\xb7\xcb\xe1\xdd\xc3\xbb\x60\x09\xe0\x87\xaf\xca\x81\xef\x3d\xbe\x99\x0b\x5b\x90\x23\xe0\x43\x86\x20\x06\x93\xf7\xc8\x7c\x53\x4e\x48\x8c\x58\xf7\x00\x45\x7c\x64\x3f\x09\x34\x20\xa7\x0a\x96\x66\x2e\xe4\x97\x2f\xad\xf8\xda\x58\x3f\x63\xd0\xeb\xa3\xba\x5c\xf4\x52\xcb\xfa\xe8\xdb\x55\x78\xf8\x6a\xd6\x28\x8f\xce\xc7\x67\x94\x3b\x1b\x4b\x18\x9d\x86\x43\xf3\xdb\x8b\x87\x53\x01\x39\x62\x66\x5b\x93\xa1\xb7\xab\xa9\x71\xb4\x7b\x5b\xa6\x40\xe4\xcc\xdb\xb9\xe8\x47\x8d\x5a\xbe\x4c\xea\x79\xc7\x78\xe4\x76\x71\x1e\x24\xc3\x4e\xd4\xf9\xca\x39\x78\x43\x41\xa2\x96\x3c\xec\xe9\x75\xa8\xdb\x59\x57\x12\xce\xbd\xde\xe0\xe7\xde\x91\x97\x1e\x76\xa4\xdb\x49\x7a\xfa\x6d\xe4\x2c\x8e\x3a\x3f\x0b\x31\x89\xe5\xab\x3d\xda\x5e\x37\x38\xc1\xfe\x0b\x3e\x7a\xf0\x4c\xd5\x5a\x63\x1a\x0f\x69\x21\xcf\x06\x3c\xb0\x8b\xa0\xfc\xde\x9b\xf4\x29\x45\x6d\x60\x0f\xc7\x8b\x13\xcf\x95\x38\xe6\xd3\xf8\xaa\x66\xa9\x53\xc8\x04\x5b\x1e\xef\x25\x4b\x01\x94\xd4\xcc\xd1\x40\x6e\x5e\x39\xed\x20\xbf\x44\xc3\x2d\x44\x65\xb3\x5a\x39\xd9\xe7\x32\x04\xac\xd7\x3a\x27\xbf\x58\xcb\xf3\xe4\x7d\x62\xd1\x96\xf3\x5f\x9c\x96\x56\x82\x6a\xd3\x86\x30\x3d\xac\xe7\x22\x8d\x51\x7d\x4e\x5d\xa4\x41\x48\x99\xb9\x78\x25\x71\xbe\x48\xfa\x7c\x79\xd7\x17\x6a\x89\xb3\xf2\xe9\xdd\x79\x65\x89\x86\x04\xdf\x19\x83\x74\x17\x8d\x58\x6f\x89\x46\x8e\x98\x98\x5d\x1e\xe7\x95\xa2\x98\x65\xc2\x1b\xe9\x3d\x06\x14\x02\x08\x84\xff\x63\xc5\x0f\x09\xcc\x8c\x65\x01\xc4\xbd\x94\x6a\x4f\xfa\x17\x1f\xb2\x63\x39\xdc\xe9\x3e\xc2\x46\x26\xa9\xf8\xed\x26\xed\xa4\xd9\xc4\xe1\x66\xa9\x99\x91\x58\xd9\x1b\x8c\x39\x6e\x32\x38\xf8\x8b\x5c\x64\x85\x75\xac\xdc\x48\x37\xa9\x31\xba\xa0\x74\x70\xe6\x63\x5e\xeb\x44\xfa\xf0\x60\xc5\xe3\xd2\x4b\xd4\xaf\xda\xcc\x8a\x12\x1f\xad\x07\x9d\x66\x33\xdd\xac\x2c\x3c\xe7\xc4\x23\xec\x27\x95\x4c\xab\x2b\xb7\x06\xc5\x7e\xd9\x9d\xd3\x51\xf9\xf9\x09\x34\xa7\xe9\x21\xcf\xd9\x7a\x1a\x90\x46\x8d\xb7\xa8\x0a\x7a\xac\x3f\x64\xb3\x97\x7f\x3e\x23\x9b\xe5\xbe\x7d\x00\xb8\xe1\xdf\x0e\x27\x35\x75\x3b\x16\xeb\x37\xb2\xc8\xc6\x88\xa2\xb7\xd2\x8e\x06\x24\x4e\xfe\xe9\x65\x5d\xa4\x6a\x75\x88\x93\x79\xa0\x48\x47\x5a\x3e\xaa\x8c\xdc\x17\xa7\x0b\xac\xcc\xc0\x60\x84\xba\x2c\xaf\xf3\x38\xe9\x27\x41\x87\x5b\x83\x70\xae\xbd\xed\x47\x8f\xb1\x6b\x24\xa8\x30\x56\x02\xc6\x01\x3e\x9b\x6f\x37\x55\x11\x2c\x82\xaa\xa1\xb3\xef\xab\x0f\x2e\xcd\x65\x21\x69\x3c\x04\x50\x18\x2d\x2e\x95\x43\xec\x77\xe6\xf1\x18\x2c\x26\xee\xfb\xd6\x20\xc2\xfa\x14\x37\xd4\x14\xc8\x54\x45\x85\x1e\x00\x5d\x18\xbf\x8d\x74\xfd\x95\x7c\x0c\xb5\x4a\xe1\xda\x00\xaf\xf2\xf6\x70\xdb\xd2\x97\x67\xb0\xcb\x03\xf0\x32\x64\x76\xa4\x58\xcb\x28\x8b\x8c\xa1\xad\xd1\x10\x92\x09\x43\xdd\x7f\xda\xaa\x64\xef\x21\xd8\xf7\xee\xa2\x8b\xc5\xd6\x26\x56\x6d\x20\x30\xd0\xba\x7a\xa1\x45\xbc\x4e\x92\x91\xe0\xa2\x71\xa7\x30\xf5\x82\x51\x1e\xe0\x33\x68\xf7\x22\x98\xc2\x81\xda\x96\xd7\xbe\xf0\xc4\x87\x70\x05\x00\x23\xab\x64\x0a\x5a\xbe\x95\xca\x86\xbe\x7c\x5e\x03\xa7\xa7\x58\x63\x07\x62\x49\xf4\xd4\x8b\x37\xf4\x7e\x85\xf1\xa6\xb4\x0b\x76\x8f\x6d\x3e\x5f\xf8\x9c\xef\x1f\x17\xa9\xa1\x2e\xc1\xf8\xf7\x47\x1e\xff\x67\xbc\xf7\xc5\x9a\xfb\xfe\x19\x63\xe1\x74\xff\x54\x9d\x0f\xb7\xfa\x05\x4b\xd8\x3f\xf8\x93\xae\xbf\x24\x7e\xfe\x27\xd5\x42\xe6\xfe\xf9\x36\x70\xd8\x6d\x08\x7d\xd1\xb1\x62\x3f\xff\x24\x09\x65\x1d\xf6\x9a\xe4\xc2\xbb\xc6\xc2\xa2\x4b\x20\x50\xc2\xcb\x43\x1d\x39\x57\xa3\xf1\x9d\x1a\x33\xc0\x25\xea\xb5\x9e\xe3\x93\x07\x75\x97\x93\x49\x4f\x79\x0e\x37\x6f\xa2\x44\x2c\x3a\x1d\x9f\xe1\x5e\xbf\x48\xcb\x44\xbb\x76\x81\x04\x1d\x0b\x71\x37\xa5\xa5\x3b\xac\xe9\xe6\x74\x2b\x24\xfc\x82\x96\x20\xd1\x85\xc3\xed\xa1\x6c\x33\x8f\xa1\x89\xb0\x1c\x62\xac\x5a\xfa\xda\x85\x28\xc2\x85\xd4\x52\x8a\x06\x9b\x2b\xdd\x52\xc2\x01\x16\xc1\xf1\x37\xb9\x5e\xc2\xcd\xf9\x47\x9f\x04\xad\xce\x68\xa0\x2b\x1c\xc8\x25\x95\x54\xb4\x1e\x5f\x27\x47\x74\xb7\x13\x71\x92\x23\x9a\xcd\x15\x3e\x90\x54\xe0\x31\xea\x43\x7c\x02\xe1\xba\x2b\xb2\xf7\xad\x1d\x2b\x10\x8b\xc2\xda\x22\xec\x82\x93\x0b\x22\xa1\xfc\x61\xf1\x00\xf8\x75\x3f\xdc\x97\x85\xd1\x71\xea\xd2\x1f\x10\xe2\xd2\xb9\xa7\xe5\x4f\x07\x2e\xc1\xf9\xb0\xb8\x5f\xc2\xa5\xd8\x3d\xa1\xeb\x55\x70\x1d\x05\x04\x96\x77\x3b\x23\x7a\x3d\xf9\x53\xf9\xb2\x3e\xaf\xf4\xea\x62\xee\x3c\x7e\x55\x9d\x8f\x7e\xbe\x98\xc7\xb2\x01\xb1\x51\xfa\xfc\x4e\xc1\x74\xf4\x70\x7f\xab\x02\x88\x29\x01\x59\xbb\x54\x36\xb0\xbb\x5d\xdb\xee\x5b\x3f\x41\xfa\x1a\x6b\x6e\xef\xb5\x25\xb4\x46\x94\x6a\xd9\x48\x62\x4b\x62\x90\xd1\x13\xb5\x26\x7e\x40\x92\x4b\xd9\x5e\xae\x46\xa4\xf5\x72\xbe\x27\x71\x9b\x91\x4e\x91\xcf\x8d\x28\x05\x53\x0d\x0e\xc2\xeb\xc9\xdf\xb3\x9c\xab\x74\x17\xc4\xc1\x77\x1e\xea\xa1\x1f\xa1\xac\x43\x3c\x3b\x25\xe8\x69\xa8\xe8\x01\x42\xe0\x94\x9d\xdd\xa5\x2e\xa5\x5c\x50\x9e\x61\x33\x5d\x6c\xea\x5c\xbb\x32\xca\xb7\x69\x1a\xa0\x06\x29\xf4\xcb\x15\x4e\xc8\x96\x22\x53\xbf\x2f\x5a\xe8\x6a\xa5\xd3\x90\xaa\x34\xc3\x7c\xf6\x71\x1f\xce\x3c\x1f\xff\xb6\x39\x06\x54\xb7\x25\xd6\xd6\x37\x45\xd9\xd8\x3e\x65\x01\x80\xf6\xd7\xe5\x94\x6b\xc6\x7a\x90\x5e\x59\x94\x73\x3b\x49\xcd\x03\xf7\x11\x90\x0e\x5a\xe3\x8e\x6b\xb1\x3c\xcb\xe9\xf0\xb8\x8d\x42\x24\x61\xfe\x52\xd6\x09\x49\x05\xc5\xc0\x76\xf0\x77\x1d\x2d\x3b\xdf\xee\x28\x34\xb6\x20\x3a\x76\xd0\x77\x38\x63\xdd\xd4\xa3\x2e\x7f\xc2\x76\x63\x1f\xae\xf8\x4b\x87\xf3\x9d\x88\xf9\x87\xe2\x82\xfa\x34\x98\x04\xdf\xae\x69\xe8\x1b\xe9\x0e\xc2\x46\xb8\xde\x43\x3f\xa1\x1a\x67\x63\x32\x17\x26\xe4\x96\xed\xda\x54\x68\xd5\xfe\x10\x36\xb6\xf0\x29\x57\x61\x36\xdd\x84\xe1\x47\xf1\x24\x4b\xe2\x3c\x35\x57\xbd\xc7\x07\x58\x08\xbd\xa4\xe9\x40\x21\x90\xc1\x17\x2c\xf8\xa6\xf4\xbf\xb3\x1e\xe9\x6c\x23\x88\x2d\xd8\x5f\xa4\x68\x0c\x47\x62\xf5\xac\xec\xf1\x15\xae\x23\xb5\x6e\x9c\x91\xc1\x3f\xd3\x79\x6d\x60\x16\x84\x7d\xb1\x5c\x5a\xf6\x2c\x50\x97\x05\x14\x69\xf0\xb5\x40\xbe\x07\xa5\xa0\xd0\x28\x0f\x45\x05\x8f\x7d\xe1\x54\xe2\x04\x36\xdb\x6a\xa6\x2a\x87\xb0\xc6\xaf\x1a\x20\x57\xe8\x21\xa4\xf3\x2f\xfc\x88\xe7\x50\x3f\xf8\xa3\xbe\xfa\xde\x42\x12\x5a\x8e\xd4\x2d\xe5\x37\xdc\x61\x95\xf7\x00\xca\x8e\xfe\x29\x76\xbb\xf0\xa7\x06\x28\x8a\x7b\x96\xfd\x45\x15\x67\xac\xfe\xc0\xb6\x50\xbb\xab\x42\x02\x72\x2f\x0f\xa6\x50\x90\xb0\x2c\xbe\xf2\x2f\x76\x47\x69\xf8\x91\x3f\x2d\x7f\xe8\xad\x02\x3b\x2d\x72\x58\x15\x29\xf8\xe7\xc2\x7b\x67\xc3\xe4\xd9\xc7\x72\x10\x12\x49\x2f\x07\x82\x8d\xbd\xbf\x4d\x92\x29\x0d\x80\x99\x16\x96\xd7\xa4\x4d\x10\x7e\x46\x8c\x3a\x8f\xa0\x47\xc4\x59\xb8\xc5\xe8\xe7\x9c\x66\x5e\xc5\x4f\xb7\x60\xba\x57\x3b\x62\xed\x1e\xae\x95\x76\x03\x5b\x51\x71\x34\x24\x0f\x74\xbc\xd4\x4a\xe1\x53\xac\x8f\x1a\xa3\x90\xb0\x01\xe2\xe9\xfa\x01\x63\xf5\x3d\xf1\x50\xc7\xa9\x38\xb0\x29\xcf\xc6\x16\x0c\x92\xe2\x31\x32\x90\x8e\xa1\x16\x3b\x6f\x4d\x84\x9d\xad\xe9\xff\x73\xf5\x6e\x59\xb2\xea\x3e\x13\xe7\x7b\x0f\xa3\x5e\x6a\x54\xfd\x60\xc0\x09\xae\x04\xcc\x9f\x4b\xe5\xce\x5a\xab\xe7\xde\x0a\x45\xc8\xe4\xf9\xea\x9c\xbd\x90\x49\xee\x17\x63\xcb\xd2\x2f\x5a\x7a\xfe\xaf\xa7\xf8\xab\x40\x30\x96\xa8\x00\x68\x84\x47\x70\x21\x44\x0a\xc3\xcb\x04\x9f\xcd\x5c\x1a\x4a\xc0\x95\x71\x94\xbe\xef\xe8\x11\x5a\xad\x59\x03\x7b\x48\x32\x9c\x92\x19\x0b\xf7\x97\x40\x0c\x3b\xe3\xf6\xf6\x2c\xfd\xa5\x38\xcc\x2d\x58\x0a\xd2\x97\x95\xed\x22\x7f\x1f\x18\x02\x7c\xb2\x74\x02\x76\x81\xe4\xae\x34\xdb\xa5\x47\xa5\x4b\x2c\x1d\xd2\x4b\x8b\xdd\x8a\xe0\x3b\x84\x57\x52\x83\x6d\xa2\xff\x20\x41\x61\xeb\x8b\x68\xab\x18\xd1\x0e\xac\x02\x18\x6e\x11\x0a\xea\x41\x2a\x5c\x44\xa9\x15\x0d\xa8\xf0\x1b\x64\x65\x8f\x4a\xe4\x35\x99\x28\x90\x15\x30\x08\x78\x05\x76\x22\x07\x02\x0a\x1f\x5a\xc6\x8f\x2b\x90\x3d\x3b\xc8\xb0\x63\xde\x83\x4b\x86\xb4\xf3\x35\x7e\xca\x8a\x13\xb4\x57\x72\x0f\x51\x64\xd7\x67\xd6\x5c\x0c\x5c\x87\x59\x90\x2e\x9f\xa8\xb8\x8c\x0a\x21\x85\x56\x72\x88\x26\xbf\x28\x9e\x1d\x1f\x99\x1d\xd0\x6c\xce\x09\x95\xdf\x3d\xfd\x64\xe9\x7d\x27\xb9\x5f\x9c\x28\xac\x0d\xd8\x71\x49\xfc\x78\xb4\x9e\x52\xa6\xa5\x6d\x0d\x6a\x42\xa2\x92\x63\xa3\xcd\x07\x9f\x3c\xc4\xee\x2b\x00\x08\x84\x2b\x38\xe6\x00\xb9\x65\x92\xd9\x7d\x7b\x5d\xb2\xbd\xc7\x25\xcd\xa1\x7b\x7c\xe8\x21\xda\x04\x49\xb3\xc6\x0d\x32\x56\x68\xfd\x57\x22\x79\x61\xab\x7f\xbb\xb4\x83\x0d\x6f\xbe\x82\x1b\x1c\x5e\x50\x90\x97\xd7\xb0\x00\x57\x88\x0f\xd7\x5f\xc4\x1b\x64\xda\xf8\xe0\x47\x52\xb6\x8b\x23\xef\xc1\x34\xd8\x4b\x66\x1a\xbf\x87\x9e\x0d\xb4\x9a\x10\x72\x23\x16\x68\x3b\x9f\x9c\x00\x2f\x44\x06\x8c\x4b\x20\xb3\x1e\xb5\xe7\xfb\x1d\xf8\x81\xdf\x26\x30\xbe\xe1\x7d\x6c\xc2\xc6\x91\xe0\x0f\x16\x1f\xe9\x1c\xe0\x11\x30\x02\x83\x04\x82\x40\x0a\xb4\xfc\x7e\x85\x70\x62\xaa\xa6\x9e\x4c\x6e\x13\xbc\x28\x5e\x93\xba\x7d\x7c\xf0\xad\x01\x28\x74\x06\xb8\x02\x4a\x3e\xaf\xf5\x41\x72\x81\xbd\x2a\xbc\xca\x08\x0c\x29\x54\x4d\x76\x91\xe3\x5b\xb6\xd3\x66\xcc\x55\xbf\x40\xf2\xf1\x4b\xf0\x81\x80\x0e\x08\xdf\x30\x7b\xe8\x4e\x30\x07\x44\x7c\x03\x47\x80\xd1\xf6\x2a\x71\xfc\x2a\x0a\xe8\x8f\x86\x9d\xd3\x25\x74\x41\xd2\x38\x16\x35\x90\xdb\x79\xcc\x29\xe8\xe4\x5b\x39\xe3\xdd\xdf\xca\x1d\x99\x00\xe0\xc0\x3b\xac\x10\x21\xdc\x90\xb9\x63\xed\x05\x9e\x4a\xd1\x00\x10\x0c\xd1\x0a\xc6\xf1\x1d\xe9\xf4\x05\x78\x24\x6d\x20\x87\x72\x35\xd2\x4d\x3f\x3a\xe4\x4c\x3f\x8d\xcf\xb4\xf5\x27\x0f\xc4\x6c\xfa\xa1\x20\x87\x98\x18\x44\xd7\x55\xde\x8e\x30\x3b\x32\x0c\x24\x0a\xb3\xc1\xbb\x54\xdd\xc7\x6f\x55\xda\x7b\xe7\x94\x12\xcb\x2c\x1c\xd3\x59\xa8\x87\x6d\xad\x05\x56\x9a\x9e\x6e\x73\x4b\x73\x6c\x2e\xe2\x85\xd6\xe1\x9b\xa5\x2d\x47\xb2\x3c\x39\x79\xf1\x0d\xd8\xa0\xc7\xee\xd3\x35\x7c\x37\x4e\x41\xe0\x94\xc8\xa6\x2d\x2f\x08\xf3\x24\x21\x21\x0b\x82\x30\x04\xbb\xc9\xea\x99\x3e\x2f\xa2\xbc\x42\xb0\x19\x6d\x38\x37\xe3\xeb\xbd\x39\xaf\x85\x57\x48\x3c\x04\x80\x9d\x2f\x1f\x2e\xda\xf4\x89\xf4\x1e\x41\x96\xc5\xf8\x32\x28\x33\xeb\x55\x42\xe3\xe1\x5b\x56\x62\x48\xc3\x96\xe6\x93\x6a\x90\x1b\x50\xef\x52\x77\x8e\x08\x3c\xb3\x44\x3c\xda\xee\x6e\x9e\x83\x0c\x66\xb6\xac\x60\x4f\xed\x53\x88\xd2\xa0\xd8\x49\x4f\x1e\xb2\x7b\xe0\x1d\x07\x2f\x10\xc9\xe3\xba\xcc\x33\x9d\x0d\xd6\x0b\x39\xbc\xe5\x6d\x06\xaa\xc0\xf8\x5c\x50\x86\x59\xf4\x00\xe8\x3c\xd0\x6b\x0e\xb2\xd7\x1e\x14\x83\xe4\x29\x29\xf8\x24\x89\x8e\x05\xd3\x27\xd1\x9d\xfa\xcc\x7e\xab\xb5\x52\x70\xf9\x3f\x4a\xcb\x21\xb4\x6c\xaf\xa2\x42\xcf\xeb\x4c\x79\x27\xb2\x0a\xdc\x0f\xef\x4b\x71\xb9\x51\x54\x87\xe1\x7d\x1c\xf9\xa2\x59\x43\x3d\xea\x96\x5a\xbf\x4b\xab\x0f\x1f\x06\xa1\x80\x36\xd2\x14\x23\xcc\x7a\x7d\x57\x42\x01\xec\x00\xa6\x9b\x54\xb3\xda\x75\xe1\xcb\xb2\x5e\x3e\xb6\x6e\xf5\xc7\x1f\xc3\x94\xd6\x8a\x51\x49\xbf\xb1\xab\x30\xf3\x2b\x3d\x31\x2b\xc9\x6f\x6b\xdd\x5f\x79\x54\xeb\xcd\x79\x05\x07\xc3\x11\x20\xe3\x4c\xbd\x65\x0c\x54\x32\xd5\x68\xad\x90\x19\xf3\x25\xd1\x79\xc0\x40\x2b\x37\xe2\xfd\x5d\x11\x03\x5c\xfd\x88\x80\x75\xb3\x19\x1d\x62\x6f\xc5\xc5\x89\xbd\x04\x5c\xaa\x7f\xae\x82\x12\x99\x9d\x9b\x9a\xf3\x4b\x9a\x1f\x6b\x76\xce\x05\x15\xa5\xf3\x78\x87\x2e\x22\x53\xd5\xb3\x60\xad\x51\xbf\x73\x46\x04\x72\xac\x11\xe1\x6d\x8d\x6d\x6b\x12\x2d\xb4\xbc\x49\x8c\x88\x23\x96\x57\x6f\x33\xad\x50\xcb\xa7\x3c\xb4\x18\x6d\x2b\x6b\xa6\x15\x03\x9d\x98\x7e\x1f\xef\xd5\x55\x7f\xdf\xf7\x7b\xe4\xc5\xe3\x0c\x75\x86\xe5\x96\x6a\x42\xd2\x9f\xe4\x9d\x01\xd2\x7d\x14\x5a\x07\x87\x21\xcd\x22\x59\x02\x46\xe0\x8e\x96\x6b\x3f\x13\x6f\x89\x18\x08\x92\x9f\x6e\x4f\xa4\x99\x52\x09\xbe\x5a\x36\xf7\x62\x9f\x25\x4f\x63\xbf\x82\x2a\x60\xa7\x1a\xfd\xa9\xa5\x4a\x28\x61\xe1\xcd\x05\xc4\xa0\x49\x74\x2f\xc8\x6a\x59\xc1\xd6\x7f\x84\x4a\x74\x95\x62\x35\x52\x4a\x69\x58\x73\x97\x7d\x92\x05\x8c\x9b\x95\x86\x67\xfd\x8b\x80\xe0\xec\x35\xae\xdd\x3b\xf2\x9e\xf6\x37\x8f\xbd\xfc\x0b\x19\xe7\x63\x67\x35\x66\x56\x04\x3d\x40\x39\x3a\x73\xec\x0f\x66\x0a\xaf\xdd\xe2\x79\xa0\x74\xb7\x2c\x1e\x5b\x2d\xd0\x15\xb7\xb4\x96\x0f\xbc\x01\xc2\x0a\x38\xe8\x03\x7a\x41\x28\x2c\x9b\xa9\x0c\xde\xa5\x8c\x7b\x58\x56\x63\x21\x6c\x30\xcf\x1c\x29\x5b\xb2\xe2\x61\x98\x78\x45\x51\x18\xb6\xe8\x40\x33\xb0\x57\x53\xa1\x83\xc0\xf5\xec\xb1\x4a\xae\xc2\x16\x1c\xd5\x7a\x6b\x12\x95\xae\x3b\xb5\xb1\x33\xb6\xce\x4d\x06\x57\x6e\xf1\x64\x00\x69\x62\xe7\x20\xd5\x21\x06\x37\xe4\xc5\xfb\x35\x6b\xc8\x6f\xe9\x47\x74\x9d\xdc\x4a\xfb\x4a\x87\x99\x3d\x97\x7f\x5e\xdf\x2c\x11\xab\x83\x27\x35\x2f\xe9\xd6\x3b\x43\x80\xe0\x7e\xfb\x44\x22\xc1\x98\xe9\x83\xf8\x48\x0a\x4e\x60\xc5\x5e\x7c\x5b\x48\xab\x24\x06\x57\x42\xd4\x5a\x9c\x81\xfd\x37\x9a\xc3\xd0\xb6\x0e\xd0\xc0\x6e\xcf\x50\xa6\x55\x95\x74\x0c\xb3\x01\x07\xac\x11\x39\x5c\x4f\x2d\x11\x88\x85\xbd\x6f\x72\xd7\xeb\xc7\xad\x05\xbb\x3b\x50\x09\x20\x1f\x84\xe9\x9c\x12\xf2\x1a\x56\x7a\xd8\x17\x97\xdc\xa7\xf1\x21\xf2\x04\x36\x82\xb5\x61\xd4\xb0\xb0\x52\x9a\xd8\x46\xc3\xc7\xef\x9c\x64\xc0\xf5\x70\xca\xae\xbf\x41\x88\x28\x4b\x9c\xdc\x98\xfe\xfe\x03\x4c\x38\xbc\x0b\xbe\x24\xb4\x89\x3d\xd9\xde\xc5\xad\x99\x77\x8f\xf4\xf3\xb8\x6c\xb3\x0f\x8a\x71\x2b\xf3\x65\x3d\x14\xdf\xc4\x7c\xf9\x57\x4a\xe8\x84\xe0\x1a\xb8\x64\xee\x48\x8b\xfc\x03\xeb\xca\xd6\x4b\x74\x84\x4a\x29\x34\x34\x7d\x09\x31\x00\x28\x84\xcd\x15\xe1\x35\xe7\x3a\x04\xc8\xc4\x3e\xa7\xd5\xa9\x24\x5c\x85\x5d\xea\xb9\x86\xfc\xc5\x5c\x02\x64\xa0\xc1\x02\xf7\xc4\x7a\xa2\xa9\x7e\xb7\xcf\x1d\x5b\x9d\x80\x26\xa0\xd3\xc0\x1d\x95\x87\xa4\xb7\x35\xf1\xce\xec\x5c\xa4\xd9\x34\xe7\x97\xf0\x08\x60\x9c\xc7\x69\x91\x4f\x59\xdd\x31\xe6\x04\x05\x3f\x26\xb4\x9f\x29\x7b\x9d\x05\xeb\x98\xf3\x72\x91\xa9\xa0\x8d\x8d\xe5\x50\x22\x8b\xd9\x37\x27\x7d\x46\xc8\x35\x52\x81\x09\x39\x78\x2d\x32\xac\x35\xba\x87\x2c\xf6\x71\x3c\xeb\xf6\x70\xd3\xb3\x16\xbf\x04\x4e\x40\xaf\xe6\x50\xe1\xc1\xc1\x14\x88\x48\x5a\x23\x95\x16\x42\x73\xdc\x0a\xaf\xc6\x0c\x46\x09\xb7\xdf\xd3\xf5\x39\x03\x76\xa7\x2d\x76\xd6\x06\x97\x5e\x37\xb2\xc8\x49\x2b\x78\x05\xc4\x80\x7e\x95\x27\x82\x7d\xac\x07\xe2\x24\x84\xed\x1f\x1e\x8a\xa7\x14\x4b\x9f\x90\xce\xf5\x19\xf8\x9a\xd9\xe7\x8c\xd1\x28\x4f\x7b\x98\x1c\xba\xf8\xb4\x1a\x7a\x25\x0f\xcc\x4c\xfb\xee\x9c\xee\xb9\xb2\x8b\x8d\x10\x7e\x5a\x12\x42\x83\x45\x5d\x21\x90\x06\xbf\xa9\xc9\x8d\x47\x4f\x6a\x9b\x00\xb5\x2e\x12\x5c\x7b\x4e\xfb\x75\x58\xdd\xf6\xcb\x5f\x26\x3a\x6b\x9f\xf9\xdd\x12\x0d\x9e\xf9\x45\x70\x03\xfd\xe8\x78\x3d\xb2\x00\x0e\x5c\x07\x94\x93\x2a\x0b\x31\x5f\xbe\x3f\x7b\x2b\x17\x8e\xa7\x41\x97\xd9\x61\x4b\x66\xf0\xf8\x7e\x1a\x97\xe7\xe7\xfa\xb1\x85\x2e\xb7\x86\x60\x74\xfc\xb0\x97\xf7\x03\xa8\x25\xa7\xd6\xf4\x62\x67\xeb\xa7\x9e\x21\xda\xbd\x22\x08\x96\x47\xf0\xc3\xbc\xc4\x9f\xdb\x1f\xfa\x53\x90\x65\x4a\x13\xe3\x9f\xde\x93\x32\x4b\x81\xc2\x3f\x1e\x0e\x49\x17\x35\xec\x98\x9b\x37\x4e\x11\x7e\xc6\x87\xfc\x27\xfd\xfa\xf9\xfc\xd8\x2b\x80\x61\x85\x35\x6c\xfd\xba\xe0\xae\xe7\x2f\xca\x79\xfb\xbc\x22\x66\xae\xdb\x48\xec\x83\xe0\x71\x68\x6a\x87\x24\x66\x09\xa8\x00\x34\x81\xcb\xf8\x29\x49\x1d\x73\xc4\x2c\xf0\x28\x07\x9a\x88\xb9\x6a\xf0\x87\xbd\x22\x58\x8a\x6d\xc5\xc6\x82\x90\x9a\x35\xc6\x01\xaf\xad\xa9\x70\xef\xdb\x9e\x1b\x6d\x01\x45\xb0\x5b\x45\x22\xf3\x19\xca\xeb\x27\x36\x62\x4f\x62\xdf\x48\x8d\xbb\x01\x16\x58\xd2\x66\x0e\xe5\x90\xf1\x68\xe8\x10\x12\x0d\x42\xcd\x2c\xa4\xef\x4a\xc8\x7a\x45\x62\x6e\x83\x3c\xcc\x9e\x7a\x44\xee\x43\x6c\x0e\x39\x15\xf3\x0d\x6a\xb8\x15\xf5\xca\xea\xd9\x8c\x4d\x82\x7b\xb8\x98\x28\x19\x47\x8c\xde\x79\x6a\xec\x0a\x7d\xe9\x65\xaf\xb1\x7b\x04\xb0\xcb\x00\xe9\xf9\x8c\x43\xe9\xa5\xe0\x7d\x2c\x0c\x2e\x36\xb3\xf7\x64\x1b\x1d\x07\xb1\x5b\x24\x3a\x28\xf0\xd1\xb5\xba\x55\x33\x7b\xff\xf2\xd3\x8c\xfc\xc1\xb2\xd8\x47\xf4\x92\x63\x46\x49\xe6\x68\x53\x85\xac\x00\x24\xbd\x7d\xde\x00\x20\x9c\x67\xbf\x51\x69\xf5\x51\xe2\x49\x19\xbe\xfd\x2c\xfa\x48\x07\x2a\x1d\xc2\x4e\x1e\x5e\xbb\x95\xef\xf2\x4d\x69\xef\x85\x70\x81\xf7\xb0\x5b\xdb\xeb\xa0\x0d\x21\x4d\xac\x31\x21\xd4\x04\x38\xd7\x6f\x95\xd8\xbb\x9b\x14\x8f\x60\x53\xde\x58\x33\xec\x78\xf4\x93\x67\x92\xc9\xee\x42\xee\xdb\x7a\x01\xc0\x46\x0f\x6e\x2b\x0f\x6f\x6a\x69\xc6\xee\xd2\x9f\x44\xa3\xf2\x82\x1d\x72\x6a\x8a\xe0\xcd\xe5\x6f\x66\x2f\x45\x66\x40\x99\x09\x5c\x00\x1f\x43\x59\x42\x28\xf4\x1c\x20\xf2\xfc\x8a\xd1\xfb\x4c\x84\x35\xc4\xc6\x18\x87\xed\x9c\x86\xa5\xd6\x9b\xcd\x50\x1d\x1e\xd8\xb3\x50\x04\x73\x58\xf2\x3d\xea\x33\xd5\x47\xe3\xc9\x4c\xab\xd7\xe1\x13\xbc\x0a\x1f\x5c\x06\x8d\xc0\x4e\xc5\xc9\xe0\xd2\x2a\x9f\xe7\x35\x44\xaf\xa7\xfc\x2f\x51\xf5\xc2\x0b\xaf\x97\x13\x1f\x72\xd3\x4e\x81\x2f\xc7\x3b\xb5\x66\x58\xeb\x8f\x90\x48\x0c\x5a\x48\x32\x1c\xf9\xaa\x5c\xd5\xea\x04\xce\xd2\x89\x03\x84\x33\xb2\xcb\x64\x76\xbb\x5e\xd9\x9a\xc7\x6f\xe9\xa1\x6b\x94\x00\x89\xac\x9b\x3b\x61\xa7\xac\xaf\xef\x94\xbb\xdd\xba\x39\xb4\x42\x7a\x3d\x64\xae\xfc\x23\xae\xf7\xd1\x6c\x6a\x98\xb7\xfe\xb5\xc7\xd8\xc9\x2f\x82\x20\xbb\x60\x17\x99\xbd\x87\x10\xd6\x94\x3c\xdc\x47\xf4\x06\x6f\x93\x68\x99\x6d\x7b\xcb\xa2\xc2\x5b\x14\xc6\x68\x31\xa1\xdd\x56\x34\x6d\x72\x2c\x2c\x8c\xfc\x1d\x7a\x5b\x39\x37\x88\x44\x6c\x62\x7e\xec\xf9\x29\xab\x1d\xde\xc8\x08\xe6\x09\x19\x4f\xb8\x3c\xe3\x9b\x4e\x4b\x97\x3f\x27\xb9\x41\x41\x3a\xe3\xf5\x21\xb6\x34\x5e\x2d\x6b\x79\xbc\x42\xe0\xc0\xda\xc9\x18\xab\x56\x2c\xcd\x28\x94\xb8\xc3\x23\xbe\xc3\xea\xa9\x5d\x5e\x91\x4e\x11\x96\x16\xaf\x74\x2b\xd9\x11\x49\xab\x9c\x6e\xd4\x11\x74\xc3\x4a\xe3\xf1\xd0\x86\xb2\x84\xbe\xbe\xa4\xa1\x7e\x30\xaf\x1b\xdd\x0a\x85\x1a\x33\x9a\x3c\xe6\xae\x94\x24\xdf\xbd\x49\x72\x4e\x4d\x56\x1d\x57\xad\xd3\xe2\x03\xbd\xbb\xe3\x1e\xd4\xfa\x51\xb0\x97\x11\x47\xbb\xd1\xaa\xec\x6e\x9b\xd1\xcd\x61\x68\x6b\xf1\xe2\x58\xef\xcc\x3e\xbe\x6f\x72\x2d\xf8\x34\x49\x36\x9d\xd6\x80\xfe\xa4\xae\xd1\x2a\x1d\xf8\x59\xee\x39\x33\x9c\xf4\x60\xcd\x49\xdd\xfa\x71\x96\x80\xf5\x58\xfe\x48\xce\x1d\x8b\x9e\x04\xab\x32\xfe\xa8\xb4\xbe\xb0\xc6\x82\xe1\x5f\x4d\xd7\x53\x0f\x78\x04\x7c\x56\xbb\xb4\xd4\xad\x9e\x9e\x42\x56\xdd\x2f\x76\x3a\x59\x94\x2a\x7c\x10\x33\x20\xb5\xae\x23\xb2\x6b\x34\x95\x1f\xbf\x32\x69\xc6\x28\x65\xac\xe9\xb9\xbc\xdc\x05\x84\x06\xe7\xc8\x0f\x18\x93\xd5\x95\x8a\xdc\x1a\x13\xf5\x99\xec\x4b\xa5\x86\x32\xac\x16\x36\xf8\x00\x7f\x86\x34\x89\xa5\xb4\x20\x88\xc7\x35\x0c\x3e\xe9\x05\x8b\xb0\x0a\x0a\xd9\x86\x54\xa3\x80\x20\x7b\x27\x7a\xc0\xde\x74\xdb\xa3\xf5\xa4\x20\x3b\x59\xe2\x67\xc3\x44\xa6\x95\x96\xb0\x6a\x77\x56\x94\xd3\x03\x12\x8c\x4a\x54\x79\x20\xaf\x87\x6c\x03\xef\x91\x17\x11\x22\x76\x56\x54\x4e\x95\x60\x0e\x5b\xa8\xb4\x93\x04\x60\xa5\x2e\x0d\xb2\x28\x36\x6f\x75\x6e\x93\xa1\x83\x34\x0a\x5e\x71\xfe\x10\x5a\xb2\x8f\x4a\xe4\x3f\xd4\xdb\x51\x29\x34\x53\xfb\x02\x51\x8f\x60\x8a\xf9\xe2\xe4\x56\xc3\x36\xbb\xb5\xfc\xcd\x8e\xc4\xd0\x07\x12\x6d\xa2\x85\x63\x05\xb9\x4c\x60\xf9\xa4\x08\xce\xfa\x98\x1b\x53\xdb\xcc\x44\x49\x33\x6b\x35\xf0\x2b\xf0\x28\xff\xd4\xbf\x01\x8f\x42\x70\x0d\x6b\x16\x30\x10\x02\xf8\x89\x10\xc0\x23\x8a\x22\xed\x77\x69\x0e\x00\x87\x7f\xee\xba\x24\x96\xc5\x7a\xf7\x2b\x1e\x91\x39\xf0\x80\x97\xd7\xa3\xab\x1e\xec\xe0\xa0\xa9\x22\x14\x04\x95\xdf\xc9\x79\x68\x8c\x05\xb3\x1c\x3c\xe7\x66\x19\x89\x94\xf8\x6d\x90\xa1\x07\x69\x02\x0f\xeb\xa1\x78\x5d\x04\xe3\x4b\xc0\x0a\xfe\x10\x6a\x44\x8f\xb4\x3f\xc5\xa9\xa0\xbb\x04\x7d\x14\xb5\x20\x1c\x52\x61\x67\xba\x06\xa7\xe2\x96\xa7\x36\x5b\xc3\x57\x5c\x87\x2b\x58\xfb\x99\x17\xc6\x89\x12\xe8\x69\x93\x60\x43\xc0\x84\xb0\x11\x01\x96\x10\x06\x22\xe4\x65\x1a\x5e\x42\x78\x05\x07\x4d\x90\x4b\x70\x2b\xcb\x7b\xc9\x9a\x0a\x5a\x18\x96\x1b\xe9\xd8\x9a\xc3\x31\xff\x7a\x0e\xbb\xd0\x03\xbf\xcc\xa3\xe5\x0f\x1e\xe0\xeb\x6c\x09\x08\x7a\x73\x7a\xc7\x1e\xe5\x63\x41\xaa\xf9\x90\x64\x7f\x89\xfb\xa0\x41\x5c\x28\xd1\x89\x4c\x41\xfa\x23\x46\x28\xa3\xd1\x6d\x6d\x7a\x8f\x71\xcb\x5b\xf5\xfe\x16\x0e\x57\x7b\x75\x06\x44\x98\xf6\xac\x91\x97\xb1\x1e\x97\xe6\x4d\x49\x79\xbe\x19\xb1\x96\x0f\xcd\xe5\x47\x81\xc7\xb5\xa2\x52\x0f\xdb\x5a\x51\x50\x6a\xe6\x0a\x89\x84\x6e\x87\x66\x90\x1f\x01\x69\x78\xb9\xfd\xf2\xb2\x78\x8f\x24\x2b\xf5\x96\xe6\x1a\x10\xb6\xbc\x04\x15\xc3\x3a\x6d\x88\x50\x13\x03\x22\xed\x2d\xdc\x96\x25\xca\xac\xc6\x3a\x48\x1b\xe6\x6f\xe2\x43\x68\xcb\x0d\xcf\x11\x14\x10\x07\x44\xc4\x8f\x2e\x68\xad\x5b\x12\x09\x37\xb9\xe8\x3a\xda\x97\x45\xc1\x82\xc0\x46\x44\xf3\x3f\x87\xa0\x58\xce\x3e\x9a\x07\x65\x5c\xbf\xc4\x43\x21\x56\xfc\x60\xc1\xc9\x00\xb9\xff\x93\x0a\xbe\x0b\xaa\xc7\x15\x00\x56\x47\x92\xfe\x88\x0e\x7e\xd3\x08\xef\x44\x96\x6f\x6a\x78\x67\x65\xcf\xbf\xd4\x04\x1a\x7e\xbd\xa1\x39\x5c\xbf\x99\x28\x0a\x6b\xc5\x13\x1d\x70\xcd\xbd\x66\x35\x64\xcf\x10\x28\x36\x47\x4e\x84\x1c\x3d\x1b\xdf\x03\x92\xcc\xd3\xd5\x7f\x09\x35\x21\xd5\xee\xfa\x3a\x82\x0b\x60\xbd\xbd\x5d\x42\x70\xb0\xf1\xe5\x65\xf2\x7f\xc4\x0f\x0e\x9e\x30\xf8\x25\xc4\x44\x89\x03\xf9\x04\xee\x5a\xc1\xb3\x56\xf9\xad\x1f\xaa\x64\x27\x90\xa1\x40\xe0\x43\xd5\xf6\x15\xf0\x31\x94\xdf\x6b\x0e\x98\xc3\x6f\x69\x23\x8f\x0d\x58\xd1\x94\xdd\x4f\x02\x07\xbe\x1a\x97\x22\x68\x12\x47\x43\x83\x40\x68\xbe\x49\x19\xa0\x80\x2e\x91\xe4\xea\xbd\xd0\xc4\xd5\x91\x53\x50\x6e\xb5\xfb\x72\x58\x0b\xdb\xbe\x36\xb3\xb6\x72\x8b\x26\xa1\x80\xfe\x56\x5b\x10\x35\x68\x04\xf2\x01\x72\x91\x43\x7d\xcd\x0a\x8e\xfb\x69\x4b\xf6\xd6\x90\x3f\xf2\x57\x23\x5e\x58\x87\x36\x16\x64\x4b\x90\x85\xb5\x21\x2e\x22\xf9\xdc\x85\xe8\xaf\x33\x0e\xe0\x83\x56\x68\x05\x90\x95\x56\x51\x38\x6e\xb1\x78\x6b\xb4\xeb\x8a\x7b\xf3\x9d\x07\x07\x16\xec\xc1\x91\x16\x6b\x07\x6e\xd3\x9e\xc6\x85\xbf\xb8\x54\x3d\xaf\x1f\x55\xf4\x24\x4f\x2f\x49\xfa\xdf\x24\x19\x06\x09\xc5\xb3\x62\x2c\x8d\x9a\x91\x1a\x96\x0f\x6c\x8c\x5e\x6f\x13\x45\xea\xb9\xd1\xed\x56\xc2\x07\xfd\x95\x46\xbd\xda\x1d\xcc\x41\x69\x0e\x0c\xc6\x5d\x4e\x5a\x1a\x8e\xd8\x2f\x21\x34\x78\x18\x0d\x45\x42\xd5\x44\x0a\xd8\x87\x62\x3d\x78\x1d\x7e\x41\xb3\x13\xcd\xe8\x6a\x1c\xfe\x1b\xc1\x30\x64\x67\x5a\xd1\x8a\x4f\x3a\x84\xed\x3b\x47\xac\xdd\x42\xf7\xd1\xc3\x76\xad\x7b\xe9\xd4\x43\xea\xbe\x8f\xac\xcf\x01\xc3\xda\x71\x34\x83\xbb\x31\x55\x70\xfc\xc2\x7e\xf5\x1f\xbc\x8e\xa5\xe1\x41\xa8\x7b\xdf\x16\x05\xa7\x83\x5b\xf3\x8e\xbc\x86\x68\x86\xfc\xe1\xc0\x07\xc3\x63\xbb\xfd\x89\x43\xee\x2e\xbf\xff\xfc\xad\xb3\xcb\x55\x78\xd0\xd6\xcb\x29\xa2\x76\xbc\x62\xec\x10\xd9\x75\xfe\xbe\x27\x60\x99\x75\xc3\xd3\x8e\x4c\x26\x1f\x81\x19\x90\xc3\x7e\xd2\x20\xd9\xc6\xda\x1a\x99\x53\x6b\x28\x22\xa3\x9d\xf1\x14\x94\xdf\xf7\xcb\xe0\xda\xaa\x4e\x00\x78\x9f\xf5\x1f\xd9\x18\xc8\x26\x1f\x42\x64\xff\xac\x1e\x1b\x6d\x86\x90\x32\xfd\x25\xaa\x87\x07\xbb\x7d\xd3\xde\x7f\xa3\x9b\xd0\x7f\x40\xcc\xcc\x7e\xb0\x73\xd7\x83\xfb\xbf\x77\x84\xaa\xf7\x56\xc3\x55\xba\xd2\xfa\xdd\x61\x8a\x32\xfb\x82\x56\x51\x15\xc2\xe3\x52\x52\xb0\x99\xae\x81\x00\x85\x7e\xc8\x99\xf3\x67\xab\x01\xb8\x56\x8b\x57\xea\x71\xd7\xc9\xf3\x28\x1d\x67\xe4\xd7\x12\xb8\x0f\x88\x7f\x29\xb9\xdf\x43\xe0\x35\x9b\x39\x56\x50\xf5\xcf\x6c\xe4\x63\xf0\xc5\x73\xf4\xa1\xe8\xf5\x0e\x43\x8b\xbb\x8a\x1f\x57\x08\x8d\x6f\x0f\x65\xd9\x69\xf5\xcf\x18\xd2\xef\x2b\x1a\xbb\x62\x7f\x38\x36\x64\x4b\xa2\x76\xe0\xad\x01\xed\x58\x05\x4e\x3c\x7d\xee\x5b\x52\xff\x82\x4f\x30\x18\xf7\x5b\x05\xc7\x04\xaa\x80\x5b\x2e\x8b\x43\xc0\x3d\x52\x12\x8e\xcd\x99\x1d\x5a\x1b\x19\x16\x83\x0e\x60\xbb\xef\x08\x41\x24\x5c\x04\x20\x12\x3d\xa9\x31\xa3\x78\x5d\x18\x8b\x9e\x5e\x3b\xca\x9d\x84\x72\xfa\x84\xb1\x54\xb0\x56\x65\x78\x38\xf6\x5a\x37\x29\xf6\x5b\x25\xb8\xdd\xe4\x90\x70\x46\x9a\xf9\xe4\xc4\x23\xb1\xb5\x28\xfd\x4e\xb4\x95\xfe\xd2\x50\x22\xf5\x83\x92\xd2\x0b\x64\x45\x25\xca\x33\xb6\xa8\xe9\xc2\x84\xde\x46\x28\x41\xbe\x47\x6a\x6c\x11\x07\x8d\xf4\xc1\x20\xb9\x2b\x5a\x67\x8d\x30\x7e\x2b\x60\x35\x4e\x0a\x19\x50\x79\x70\x67\xd6\x26\x89\xf3\x87\x29\xd0\x0d\xc2\x76\xe5\xe1\x36\xbb\x2b\x71\x49\x97\x6f\x6d\x77\x6e\xb2\xc4\xb2\xef\x5d\xba\x40\xa0\x7e\x99\x3f\x74\x7d\x9c\x31\xb2\x77\x42\x5f\x34\xe0\x48\x9c\x51\x03\x90\x68\x55\x35\x6c\xfa\x40\x10\xf4\xd5\xba\xed\x97\x8e\x3a\x46\x35\xfb\xaa\x07\xbc\x0e\x08\x47\x1b\xe2\x4a\xdb\xa1\x37\xf5\x0d\x94\x98\x64\x02\x4b\xef\xd2\x7c\x9d\xf1\xfa\x59\xc7\xe8\x6c\xdc\x12\x8c\x43\xe7\xf4\x60\xe1\x50\x9e\x89\x99\xa5\x8d\x8a\x22\x1c\xac\xad\x1a\x72\x96\x3d\x08\xd4\x9c\xee\xa3\x70\x0a\xf0\xb3\xf1\x61\x05\xa1\x2f\x06\x66\xfb\x39\x02\x49\xfb\x72\x47\xd6\x58\xe5\xbc\x5a\x9d\xb7\xd3\xf4\x58\xd6\x7e\xf2\xc4\x25\x5a\x74\x0a\x7a\x8e\x28\x0d\xcf\x1f\xf7\xc3\x04\xf0\x74\xf5\x71\x1b\xba\x18\x6d\x46\xb1\xab\x4c\x86\x49\xf5\x38\x14\x9e\xfc\x34\xa7\xe5\x4d\x0f\x4b\x8f\xe0\x18\x9e\xf2\x54\x90\x06\xe2\xcf\xed\x94\x33\xbd\x29\x6e\xf1\x3a\x4d\xb9\x9f\xd6\x37\xd7\x49\xd7\xaf\x77\xa8\xc8\x23\x99\x92\x57\xca\x08\xe3\x38\xf7\x7c\x69\xdf\xa8\xa2\x45\x5c\xb1\x0f\x81\x78\x29\x5b\xe0\x3e\x26\x88\x2e\x0e\xb4\x06\x22\x28\x81\x46\xc9\x7f\x82\xa4\x5c\xb3\xf0\x45\x79\x97\xb7\x95\x8b\xcc\x47\x61\xd5\x66\xad\xcc\x6b\xa6\x37\x06\x36\xb7\x95\x1d\xad\xc3\x3d\xc0\x07\x28\x6f\x82\xdb\x3d\x7b\xd4\xbd\xe4\xfc\xf1\x01\x16\xb0\xe4\x04\x76\x53\xf7\x24\x11\x2f\x93\x28\x77\x31\xd0\xa6\x88\xa7\x19\x53\xb8\x26\x61\xab\xbe\xb7\xcf\x52\xf2\x51\x77\xb3\x5c\xcb\x94\x7b\x21\x64\x45\xb6\xdd\xc9\x3a\x66\x31\x5d\x76\xa4\xfe\x7f\x87\xbd\x86\xb9\x5d\x22\xc5\xa4\xbb\x9e\x72\xb3\x1d\xc7\x76\xba\xaf\xb9\x87\x40\x95\xde\xb8\x90\x29\xee\x7d\x9c\x99\xc6\xe0\xa4\x24\x6d\x15\x5f\xfe\x3b\xfc\xac\x4f\xcb\x31\x91\xd4\x8b\xd8\x63\xd6\xf5\x69\xd6\x78\x9d\x5b\x3b\x0d\xd4\x0c\xc1\x8e\x99\x3d\xc7\xa0\xea\x6a\x15\x2e\x61\xef\x5d\xeb\x72\xda\xa5\x4d\xbf\x71\x35\x7b\xf8\x78\x89\x93\x4a\x62\xe2\x76\xef\x8d\xa3\x20\xb0\x5f\x89\xaa\xf8\xd7\x1f\xa3\xb0\xbb\xeb\xcd\x0f\x2f\xa9\x17\x45\x9c\x96\x43\x20\x1b\xe0\x5b\xe6\xe0\xc6\x20\x2d\x46\x53\x6e\xa3\x6a\x5b\x6b\x09\x8e\x0b\xdc\x3f\xf5\x25\x7b\x98\xb5\x5a\x7c\x7a\xbb\x6b\x9e\xa7\xc8\xd4\xec\x02\x4a\x72\xf5\xef\x27\xa7\xd1\x1f\xe9\x38\xd6\x46\x99\xd7\x6e\xaf\xaf\xae\x45\x2f\x81\x0b\x43\x42\xde\xa1\x12\x21\x1c\x7b\x5d\x66\x19\xfc\x88\x76\x08\x3f\x1a\xc4\xa5\xb1\x13\x78\xc8\x0b\x02\xbf\xff\x18\xd9\x5b\xee\x13\x56\xe4\xb3\x78\x31\xf3\xfb\x2f\x07\x3e\xe6\x79\x6d\xf1\xcb\x40\x92\x87\x3d\x96\xaf\x8b\x80\xe0\xce\x47\x80\x64\x44\xa0\x08\x58\x32\x43\x33\xb5\xc3\x80\xf2\x81\x2a\xf3\x16\xc6\xaf\xab\x7f\x7c\xf4\x80\x8b\x09\xdc\x8a\x9b\x6d\x6e\x5c\x8c\x7a\x52\x6a\xbd\xab\xcd\x89\x6e\xe6\xe1\xbb\x46\x20\x15\xa9\x0b\x5d\xc3\xcc\x3c\x11\x2d\xff\xa0\x3d\x16\x32\x3d\xec\xf3\x77\x04\x5c\x66\x46\xef\xa3\xf0\xfa\x59\xa5\xce\xb3\x75\x30\x2f\x67\xa5\x3d\xe6\xe9\x19\xec\x66\xe8\x66\x29\x28\x01\x61\x61\xa7\xc4\x07\x1d\x1f\x23\xa2\x0c\x1d\xaa\xdd\x2c\x91\x61\xe8\x97\x08\x62\x32\x23\x82\xb3\x04\x0d\xc6\xea\xc8\x53\x49\xc4\x5c\x2e\x0d\xed\x37\xb0\xe2\x5d\x3a\xe4\xcb\xf9\x30\x88\x6f\x78\xd2\x54\x1e\x99\x59\x7d\x7a\x13\x4e\x02\x6e\x0c\xf9\xf9\x30\x7b\x26\xbf\x74\xe0\x83\xf4\x34\xe8\x31\xe6\x49\xfd\x97\x46\xd2\x15\x09\x83\x59\x87\xa4\xfe\x73\xe6\x4a\xb0\xec\xbb\xc2\x9b\xe9\x99\xeb\x1d\x52\xba\x79\x09\xac\x5d\x27\x35\x43\x38\xad\x34\xcf\xda\x10\x47\x90\x62\xc8\xb7\xb1\x2e\x86\x7e\x9b\x35\x88\x0c\xab\xfe\x4f\x46\x40\x64\xe6\x86\xb3\x01\x5d\xba\xc1\x67\x46\x51\x81\xf2\xa3\xc9\x7a\x77\x19\x51\x32\xbf\xf2\xfe\x77\x9e\x2f\x22\x6b\xfe\xcb\x64\x80\x9b\x4d\xa7\x4b\x97\xd1\x6d\xd7\x13\x04\x9b\x97\xcf\xac\x02\xb6\x05\x6d\x8f\x91\x77\xb3\x7f\xea\xb5\xc5\xf8\xd7\x19\x07\x9e\xae\x33\x8e\x3c\xde\xd4\x74\x09\xdd\x49\xeb\x8b\x48\x9b\x39\x2b\x68\x8d\x05\xc6\xb6\xb6\x22\x37\x91\x90\x12\xda\x69\x5d\x8c\x6d\x38\xbd\x86\xa5\xc6\x11\x85\x8b\x86\x09\x96\x66\xad\xa9\xd7\x1e\x24\xc4\xdc\x85\x38\x60\x17\x84\x64\x33\xa2\x6e\x48\x91\x06\x43\x8b\x47\xb3\x46\xfa\x56\x97\x16\x39\x2d\x21\x0b\xae\x63\x9d\x67\x8a\x7b\x71\x2d\xff\xb6\x74\x69\xda\x93\x76\x26\x39\x49\x07\xe5\x68\xcb\x2e\x7e\x40\x9d\x4b\x14\xda\xb1\x22\xd4\x29\xee\x5d\xea\xde\x07\xb1\x56\x66\xa1\x21\xa6\xad\x74\x42\x5c\xd9\x6d\xfe\xf5\x49\xfe\xe3\x84\x7d\xfc\x04\xa1\x36\x06\xd2\x3b\x19\xe4\x1a\x97\xc8\x06\xf8\x12\x05\x87\x63\xd7\x32\x65\xd5\x93\x91\xe6\x22\xe2\x30\xb4\x2d\x29\xc9\x02\x29\xa5\x4a\xbe\x81\x19\x52\x95\x18\xc5\x72\x30\x08\x6a\x11\xe9\xe5\x79\xf1\x00\x66\xc2\xcf\x22\x44\x0c\x7d\x6b\xfb\x35\x32\x89\x32\x61\x90\x28\x62\xca\x71\xab\xfc\x0c\x5c\xbe\xa1\xd2\x7a\xf8\x44\x8d\x3e\x24\x45\x24\xe2\x6a\xa0\x04\xf0\xcf\xdd\x93\x3c\x9d\xcd\x47\x88\x09\x2e\x21\x96\x33\x0a\xf0\x6d\x51\xf9\x16\x0d\xfc\x09\x6e\x9b\x06\xa1\x59\x41\x1e\x39\xc9\x7c\x00\x60\xfd\x1d\xee\x46\x28\xee\xcf\x61\xe0\xf5\xd2\x59\xac\xa3\xfc\x6e\x8c\x43\xbb\x9a\xa5\x9f\xed\xb9\x45\xc6\x6b\x44\x9e\x81\xe0\x56\xb5\x6f\x4a\x5a\xcb\xab\x06\x62\x1a\xc2\x96\x08\x58\x59\xc1\xb5\xc9\xb4\x1c\x7a\xc7\xce\xb0\x1d\xa7\x3c\xa8\x69\x21\xdd\xc7\x9a\xe9\x6d\x00\x34\x21\x04\x8c\xb7\x62\xf1\xa7\x96\x80\x9c\x6b\x61\x08\x51\xf0\x71\x74\x19\x54\xca\xed\xa7\x76\xe5\xe6\xea\xcd\x06\x3b\xb0\x80\x23\x62\xb8\x46\x40\x1e\xb0\x9e\xfd\x24\xc3\x5f\xef\x43\x83\x83\x7e\xa4\x64\xa9\x90\x3a\x6e\x71\xa4\xc2\x1e\xbc\x78\x04\xda\xab\x9b\x46\x0a\x9d\x5b\x65\xaa\xbd\x40\xee\x77\x9c\x7d\x8c\x39\x3d\xf6\xf0\xdd\xd9\xcb\xb1\x57\xba\x93\xd2\x43\x9a\x0c\xa1\xf7\x90\xf2\xae\x41\x66\x12\x79\x76\x59\x33\xa7\xf0\x9e\x11\x73\x33\xfc\x26\x17\x4a\x5a\x45\xf2\x81\x20\x28\x6f\xe7\xb0\x40\xb9\xfa\x3b\xec\xd5\x53\x4b\xd4\x04\x4a\xc3\xcf\x25\x32\x73\x1a\xa6\x16\x00\x61\x55\xd5\xff\xae\x76\x08\xc3\xd0\x24\x06\x69\xeb\x25\x1c\xd0\xa4\xe3\x89\xc1\x8c\x1b\xd7\x7b\xc6\x67\xeb\x3a\x21\x6a\xe9\xb7\xdd\x14\xa0\x3f\x49\x00\x12\x44\x08\xb1\x0f\x47\x98\x08\x8a\xbe\xcf\xa1\x3b\x5a\x0e\x9b\xbd\xf9\xbf\xd9\x3f\xa6\x2c\xf8\x0d\xea\x82\x11\xe2\xdc\x1e\x9e\xa0\x7d\xf4\x58\xdf\xa6\x44\x19\x9e\xe4\xcc\x1e\x33\xfe\xae\xbf\xcb\x7f\xf9\xbb\x5e\x85\x42\x55\x7f\x95\xde\xfd\x3f\xc6\x0c\xfd\x95\x6d\x76\x04\xe9\x5f\x7a\x22\xb6\xd6\xfb\x0f\xef\xcb\xd1\xbe\xef\x6b\xc1\xf3\x69\xcd\x3b\xef\x11\xbd\xab\xa3\x0c\xdf\x20\x02\x78\x19\xa0\x96\xc3\x8d\x41\x54\xf1\xb7\xf7\x5e\x06\xe6\x53\xbc\x33\xd5\x03\xde\x39\x11\x87\xe8\x86\x1a\x51\x6e\xfb\x89\xbc\x1b\xfe\xf5\x8d\x2a\xd6\xa7\xd3\xf9\xe7\xa3\x96\x6f\xc0\xb3\xfc\x04\xde\xdf\xe2\xa1\xbe\xbf\x29\xdb\xf3\x4f\x08\x98\xeb\x83\xa9\x83\x06\x17\x16\x06\xe5\xe7\x8b\x80\x1f\xb1\x4c\xcd\xb4\x7e\x56\xf5\x01\x0d\xf8\x3b\x8e\x56\x43\xa2\x54\x8a\x36\x50\xa1\xe8\x7c\x12\x81\xc1\x45\xdd\x89\xfc\x52\x38\x39\xcd\x41\x5b\x5c\x9f\x84\xff\x2c\x77\xb6\xd4\xcb\x6a\x04\x6b\x77\x1f\xfc\x65\x7d\xbe\x7c\xe2\xb5\xfa\xab\x88\x56\x8f\xc9\xb0\x27\x6f\xe2\xbe\xec\xb2\xbd\x35\xe5\x42\xfa\x38\xbb\xe1\xd3\xb5\xb2\xac\x98\x78\xa4\x8a\x3a\x00\x21\xab\x00\x31\x5e\xed\xdc\x7d\x61\x0b\xbf\xcd\xaf\x12\xcd\x31\x5a\x83\x2c\xaf\xdf\x5f\x93\xdf\xa0\xd7\xf4\x62\xf2\xe6\x6b\x72\x9e\xc3\x6b\xe2\xbe\xa6\x50\x42\x30\x6b\xb8\xe8\xa8\x33\x93\x3a\x12\xa0\x08\x45\xc3\xd2\x89\x42\xc7\xca\x71\x6c\x2b\xec\x6d\xbe\x1d\x84\xf7\x69\xcd\x0a\xc2\x89\x9b\x5c\x30\x9d\x91\xb1\x0d\x7b\xf8\x0e\xfc\xd0\x99\xc2\x22\xc8\xc1\x0c\xc5\x38\xbe\xd8\xae\x01\x86\x28\x04\x0f\x31\x4e\xe1\x7e\x3c\x18\xf9\x60\x58\xb5\xd9\x3d\x63\xdb\x88\x27\x0a\x13\x71\xee\x22\x1f\xe5\xcc\x9a\xe4\x95\x87\xb1\x08\x5f\x14\xf1\x1c\xaf\x9c\xe8\x52\x32\x63\x53\x4b\xf9\xc5\xe7\x0e\x59\xee\x8c\x60\x7d\xa5\x7f\xe2\x16\xc1\x03\xbf\x8e\x22\x4c\x79\x20\x92\x1b\xba\xa7\x36\xdd\x45\x3f\x7c\xa1\x6b\xa9\xcd\x39\x2f\xcb\x0f\xc0\xae\x9b\x8b\x8b\x03\x6f\x94\xdc\xf3\xce\xcd\x0f\x03\x2f\x03\x5c\xa7\x7e\x5f\x92\xc6\xcc\xad\x43\xba\x7a\xe6\xc0\x6f\xf5\x5e\x1c\x88\x46\x3c\x70\x64\x0b\x34\x1f\x36\x0a\x8a\xdb\xfa\x2d\xff\xb2\xe0\x44\xc7\xb4\xd6\xe0\x19\xb9\xa7\xcf\x4d\x74\xd4\xc3\x3c\xaf\xea\x20\xb1\x5f\x0a\xf8\xdc\xae\x26\x10\x8d\x84\xba\x74\xf3\x9b\xd6\x2c\xc7\xef\x6f\xf1\x5c\xa7\xdf\x32\xe6\x20\x27\x21\xc2\xc0\x3f\x75\xbf\xc5\xd6\x74\x17\x06\xc3\xed\x38\x80\x09\xa6\x91\x03\xd3\x89\x3d\x0a\xe7\xa1\x57\xed\xea\xcc\xb6\x84\x41\x27\x1a\x65\xad\x36\x73\x32\xe6\x26\x55\xfb\x9b\xe3\xf5\x87\xa5\x35\xc8\x55\xfc\x4d\xce\x7c\x76\x6b\xae\x7c\x29\x5d\xd6\x16\x7d\x52\xb7\x47\x69\x5a\x9b\xa5\x69\x7f\x47\x08\x5e\xd7\xe2\xfc\x20\xe4\x7a\x7a\x77\xd5\xc9\x44\x1c\x32\xba\x36\x74\x6f\xb9\xdf\x6b\xf3\x01\x2b\x5f\x65\x7d\xdd\x58\x66\x2b\xa0\x86\x24\xea\xe7\x24\xb3\x9e\x76\xf8\x8d\x02\x4d\x54\xfe\x53\x6a\x99\x29\xd7\xea\x5a\xf2\x53\x5b\xd1\xc9\x45\x5c\x74\xa1\xb4\xd4\x8d\x2f\xaa\xbf\xcd\x06\x79\x6e\xe5\xe1\x90\x5d\x24\x7b\x7d\x04\xdb\xcc\x0a\xcb\x27\x51\x08\x41\x46\x45\xd8\x24\x8f\xde\xd1\xfc\xfc\x6f\x83\x88\x05\x0b\x43\x0e\x8e\x86\xdb\x63\x0a\xec\xbe\x17\x63\x20\x27\x96\x5d\x4b\x03\x31\xb9\xcf\xd3\x5a\x45\x47\x5c\x97\x1e\x3c\xcd\x49\xa6\xb5\x26\xb4\xd2\x7f\x49\x41\x6a\x42\x5d\x2b\xfd\x0a\x5a\x46\xbd\x42\x95\xfc\xa6\x5d\xcb\xe6\x77\x67\xbe\x7c\x14\xed\x9a\xa6\xc9\x51\x2b\x2f\xd5\x37\x27\xd2\x6c\x3c\x8d\xe8\xbc\x8e\x27\x27\xfe\x86\xda\x14\x60\x5d\xbf\xb7\xf6\xf2\xd2\xd7\x61\xc6\x38\xf9\x1b\x0f\xf0\x50\xbc\x1d\x84\x10\xb1\xf3\x70\x5e\x8f\x07\x57\x57\x9c\xcf\x79\xb8\x73\x1c\x83\x2f\xa4\xe8\x5b\x5b\xff\x1f\x99\x3f\xfe\xd1\xb6\xc9\x86\x76\x0f\x67\x81\xf2\x7a\xee\xb2\x37\xd6\xcc\x50\x42\x5c\x82\x2a\x64\x6f\x8c\x40\x46\xa5\x23\x9e\xac\x91\x86\xb8\x2c\x6b\x49\x27\x0e\x85\xcc\x0c\x99\x43\x67\x7c\x87\x1c\x31\xe4\x2f\xdc\x79\x23\x87\x12\x02\xd4\xe8\xf2\x43\xf9\x5b\x3f\xd8\x17\x26\x36\x31\x10\x08\xb3\xb7\x16\x3f\xe2\x70\x59\x47\x9e\xf5\xda\xb3\x88\x64\x27\x32\x92\x02\xec\x03\x8f\x99\x1f\x4e\x95\xf6\xb9\x1b\x3a\xad\xaa\xe4\x34\x87\x12\x89\x13\x64\xfd\x70\xe6\xbc\x9d\xca\xb2\x47\x67\xa2\x97\x31\x72\xba\xf0\x3e\xd5\xa5\x22\x40\x8e\xf3\x82\xba\x7d\x56\xc1\x47\xce\xea\xbe\x24\x3b\xe9\x3f\xe2\x86\xa0\xfc\xeb\x21\xce\x80\x16\xe9\x04\x0a\x35\x8d\x6c\xda\x11\x30\x35\xbd\x9c\x2a\x8c\x80\x04\xd1\x84\xce\x89\x3f\xec\x17\xfd\x98\xb0\x0e\x81\x81\xf6\xd0\xad\xa4\x39\xc8\xb2\xf3\x6f\xcb\x96\x9e\xab\x5b\x3b\x80\xb1\xca\x88\x84\x7c\x06\xe7\xde\x0a\x7e\x75\x10\x9f\x58\xd7\x0b\x9e\xe4\x5d\xe5\xa4\xb8\x3e\x50\x89\x5e\x42\x0c\xe1\x1b\x18\x5b\xe6\x8f\x08\x16\xe1\xe9\xe6\x7f\x6c\x3a\x23\xe8\x58\x6b\x22\xce\xfa\xd1\x5e\x15\xd0\xfd\x78\xdc\x0e\x1f\xba\xad\x2f\x52\x86\xc0\x35\x10\x65\x88\xf4\xa1\x68\x03\xdd\x08\xa2\xb6\xa5\x19\xda\xf9\xf9\x0c\x28\x51\x03\x93\x9d\xf6\x85\xdc\x6a\x6c\x3c\x2d\x81\x8d\x06\xae\x85\xfb\xb6\x66\xb6\x43\x98\xe0\x1b\x3d\xaf\x39\xb8\x44\x9a\x5a\x55\x4f\x9f\x97\x63\x89\x06\x1a\xc4\xfa\xb8\x34\x88\xf6\xe2\x44\x71\x51\xbf\xac\x59\xaa\xe7\x0e\x61\x39\x5a\xa7\x28\xd9\xd1\x2c\xad\x6c\x6d\x11\x76\x99\xad\x8e\xd6\xac\x5e\x31\x4a\x67\x8a\xd1\x82\x53\x62\xe4\xde\x70\xf7\x13\x89\x82\xe7\x22\x1c\x6f\x66\x08\x1d\xf4\x10\x21\x95\xcc\xfb\x69\x8a\x2a\xf0\x32\xa7\xe8\x06\xf4\x61\xb2\x86\x05\xef\x48\x77\xfd\x70\xbc\xa3\x58\x0d\xaf\xb2\x80\x22\x74\xc8\x5e\x02\x03\x63\x6d\x95\x76\x59\x9d\xa9\x10\xae\x6f\x2b\x64\x61\x68\x5e\x0d\xef\x71\xfd\x22\xb3\xcb\x2d\xb1\x8c\xf0\x71\x73\x63\x5f\x99\x73\x89\xa6\x20\x61\x14\x17\x32\xa1\x09\x35\x35\x3b\x84\xe6\x55\x82\x47\x5e\xcb\x6f\x88\x66\x9f\xe1\x52\x3b\xa2\xdc\xd0\x1f\x28\x3c\xe6\x46\x43\x02\xe8\xc2\xd3\x68\x0f\xa7\xef\xf1\x74\x40\xb2\x1b\xd2\x1c\x9c\xa5\x55\x42\x41\xb0\x74\x5e\x00\xeb\xe9\x78\xd7\x48\xca\x3f\xc8\x9e\x3f\x2e\xe2\x6a\x40\xf1\xf6\x9e\xc5\x71\xc5\x86\xfa\x27\x6f\xc3\x65\xdd\xa3\x93\x8d\x20\xa2\x94\xda\x69\x58\x3f\x69\x2f\x6d\xb0\x1a\x38\x25\xe0\x90\xb3\x0a\x18\xc3\x99\x69\x86\x9b\x08\x25\xc8\xa5\xf9\xf5\x39\xe1\x01\x11\x12\x09\x26\x77\x8b\xfa\xb8\x04\xe2\x28\x94\xf4\x1b\x3f\xa9\xd1\x94\xd2\x12\x1c\xbd\xc3\xab\xcf\x23\xac\x86\x55\xa2\xfb\xdc\xa1\x2f\xaf\x06\xb4\x41\x69\xca\xab\x30\x47\x18\xce\xf3\xfb\x6a\x75\xd5\x8d\x90\x41\x8f\x31\x8e\x40\x68\x19\xae\x5b\x9a\xe0\x0b\x6c\x78\x01\x68\xe7\x97\x72\x47\xcc\xfc\x65\xb4\x33\x64\x54\xaa\xa6\x9c\x2c\x8b\xb6\x92\xf3\xfa\x7a\xbb\x47\xef\x70\xb0\xc7\xa2\x25\xcc\x9e\xe8\x54\x86\x3f\x6e\x6c\x47\x83\xdc\x1d\x09\x51\xc2\x9d\xb2\xb4\xa7\xf5\x4c\xc8\x06\xf2\x6b\xfa\xbf\x2b\x1c\x9f\x32\x33\xcd\x26\xc1\x02\x3b\xd6\xdb\x42\x01\x12\xac\x25\xd6\x17\x07\xf2\xf3\x81\x4f\x25\x9b\x67\x03\x00\xcf\x0d\x77\x0c\x73\x09\xe8\xf9\xd3\xa8\xcf\xd8\x92\xab\x4b\x80\x68\xe3\x25\x38\x8d\x19\x35\x78\x6c\x37\x97\x09\xad\x5b\x66\xc9\x98\xed\xcd\xbf\x06\x5c\xd2\x73\x83\x4c\x8b\x3d\x89\xb2\xc4\xd7\x0b\x9e\x24\x6b\x9b\xf2\x67\xb1\xb1\x5c\x0e\xfe\x22\x44\xe9\x25\x96\xd2\x39\x45\x4f\xc4\x0e\x64\xe7\xa4\x17\xf4\x07\x21\xf3\x42\x22\xf1\x5e\x58\x9f\x7e\xca\x9e\xc9\x8a\x4e\xe8\x07\xa0\x8e\x45\x19\x9b\xa0\x4e\x0b\x5c\xe5\xf1\xbb\x93\x96\x2a\x77\xf0\x20\x3e\xa6\x0e\x4c\x73\x3b\xf8\x05\x3c\xaa\x67\x1c\x85\xc5\x0b\x5c\x53\x33\x88\x7a\x02\x6b\x89\xf7\xdb\x39\x4b\x3e\xc0\x1d\xc8\x25\x5d\xd4\x35\x50\x7a\x07\x1a\x8e\xf7\xec\xd2\x90\xdf\xde\xd9\x0b\xf2\xcf\x7a\x53\x68\x16\xca\xe6\x3b\x9f\x89\xcf\xf5\x82\x41\x12\x5f\x6c\x69\xcd\x09\x37\x45\xa0\x3b\x66\x6b\x5e\x8b\x05\x36\x5f\x6d\x43\x60\xc7\x7b\x2b\xeb\x98\x4b\x40\xdd\xe6\x9b\x78\x84\x7c\x74\x49\x2a\x98\x9d\xfe\x22\x96\xe3\x78\x56\x66\xea\xda\xb1\xed\xde\x40\x3c\x58\xd8\x1a\xdb\xed\x99\xa3\xca\x2e\xe7\x93\x0c\xa6\x13\xb4\x6e\xc1\x9b\x0e\xe2\xc4\x8f\x50\x67\x3f\x4a\xf4\xd3\x0e\xa4\xa2\x40\xbf\xe7\x50\x81\x13\xbd\xe4\xa0\x1e\x73\x07\x65\x7e\x6a\x68\x06\x66\xcc\xe2\x35\x28\x10\x9b\x14\x02\xea\xee\x8b\xa2\x43\x19\x0e\x9e\x63\xba\xdc\x21\xee\xe6\x7e\x5f\x10\xb4\x36\xf8\x86\x4f\xd5\x23\xa6\x0e\x38\xf3\x55\xa7\x4c\x1f\x38\x63\xb4\xaa\x9c\x79\xcd\x4b\x6d\x9f\xc3\x50\x85\xc3\x5b\xf2\x8e\xbd\xa0\x4b\xa2\x95\xcd\xaa\x8d\xf3\x94\x79\x3a\x93\x35\x2a\x57\x1a\xe0\x29\x88\x15\x65\x4f\x0b\x01\x68\x53\xc8\xcf\x1c\x4c\x2a\x38\x64\x6a\xb1\xa1\xfd\xa8\x91\x43\xb3\xac\x6f\x3d\xbf\xc3\xe4\xf3\x8b\x01\x04\xbe\x60\x9e\xdb\x1b\xce\xaf\x23\x6f\xdb\xf5\xf4\xef\xb0\x35\x9f\x02\xa0\x66\xed\x7d\xbe\x98\x66\x80\x0b\xbf\x88\xb4\x8e\x6a\xbd\xe9\x6e\x1e\x79\xfe\x5b\x99\xd3\x8a\xb8\x30\x24\x5d\x09\x9e\x86\xe8\x87\x42\xa2\x14\x4f\x01\xf5\xb7\x2e\x7f\x7f\xb1\xe9\x78\x20\xe8\x47\x86\x5f\x9a\x7e\xbf\xac\x0e\x09\x76\x19\x43\x7f\xf8\x78\xf9\x57\x47\xa2\x3a\x28\x74\xca\xaf\x43\x97\xfa\x0c\xab\x91\xcb\x10\xf2\xa0\x99\x45\xea\x35\x07\x44\x4d\xd6\x86\x94\x2a\x83\x20\x58\xfd\xc4\x40\xff\x03\xf2\x21\xed\x2b\xd0\x27\x81\xde\xfa\x06\xf9\x87\x69\xa7\x58\x5f\x5a\x4f\xbc\x3b\x34\xdd\x1a\xbf\x8a\x41\xee\x36\x65\xa2\xe3\x21\x49\xfb\x23\x12\xed\x61\xa8\x7a\x48\x7b\x5f\xad\xe6\x14\xbe\xd1\xd3\x1f\xb8\x44\x08\x5c\x1d\xf0\x23\xcb\xa0\x97\xef\x48\xf4\x64\x81\x51\x0c\xdd\x30\xae\x39\x82\x23\xb2\x73\x74\xdf\x4a\xe2\x54\x3d\xb2\xf7\x27\x39\xb3\x17\x06\xab\x47\x4f\x4f\xac\x42\xeb\x71\x5b\x93\xc3\x6f\x50\xea\x30\xc7\x9e\x8c\x89\xc1\x9e\x20\xba\x0a\x33\x74\x2d\xae\xee\x0f\x6f\x05\xc6\x27\xdd\xec\xba\xa0\x54\x59\x73\x9c\x83\x77\xbb\x7f\x34\x06\x92\x91\x2a\xf1\x4f\xd5\x85\xe5\x19\x66\xbb\x57\x7b\x1e\xf8\x3a\xed\xa1\x80\x6d\x46\x34\x62\x61\xa2\xe5\x20\x7a\x12\xee\xd3\x3b\x4c\xef\xf2\x61\xaa\x95\xa8\x21\xcf\x60\x44\x94\xb4\xdc\x5c\x07\xf7\xe1\x9b\xf5\x88\x59\x21\xfd\xba\xd7\xe1\xde\x60\x27\x4a\xa6\x33\xb0\x38\xaf\x28\xd1\xdd\x23\x42\x76\x05\x5b\xee\x45\xf1\xb4\x3b\xb8\x9a\xfe\x04\x20\x1d\xa6\x59\xa3\x7e\xf4\x04\x3d\xb7\x02\x64\xb2\xe7\xd7\x8d\xcd\x42\x44\xb8\xd6\xc9\xbf\x37\x4c\x49\x0b\x3a\xe7\xea\xa0\xc9\x0f\x57\x40\xad\x62\xc1\x43\x01\xe7\x9e\xf1\x70\x41\x49\xbf\xfd\x82\x1e\xab\x20\x57\x1e\xe0\x1c\x88\xa9\x23\xc0\x51\x60\x39\x22\x1a\xe3\x22\x7a\xe9\xb8\x65\x9d\x44\xc8\x92\x99\xcf\x7b\x77\x56\x25\x44\xb5\x20\x32\x16\xcd\x3d\x5e\xb3\x06\xc1\x5a\x62\x1d\x68\x26\xb0\x0b\x1a\x36\xcd\xea\xa2\x0d\xb4\x91\x9b\xc0\xa3\x5b\xff\x2a\xa7\xce\x1b\xff\x22\xd6\x6a\xe7\x6f\x4b\xb9\x8f\x64\xc9\x9d\x90\x51\x76\x39\xda\x30\x9b\x95\xd2\xbf\x14\xc7\xf7\x73\xfd\xe6\x35\x30\x56\x3f\x77\x2a\xe7\x9e\x7d\x54\xc5\x1a\xce\x70\x75\xc5\x9a\xe2\x5c\xf1\x9e\x5b\xa9\xae\x71\xd4\xa3\xb5\xcb\x65\x31\x7a\x54\x67\x3a\x2a\x24\x6c\xcf\x8f\xda\x0b\xd9\x85\xf8\xd0\x58\x8f\x0d\x3e\xa4\xb4\xd8\xbe\xb5\x8e\xfc\x31\x6a\xfa\x5a\x8d\xf1\x56\x82\x23\x2a\x8f\xf2\x21\x5c\xb6\x67\xc6\x85\xa5\x58\x70\xbe\x81\x5a\x7d\xba\x19\x5e\x7d\x6b\x27\xe0\x61\x39\x99\x20\x8d\xb4\x2b\x28\xd6\xb5\x1f\x66\x29\x28\x00\xba\xd4\x10\x5d\x89\xc3\x3b\xde\x36\xda\x3b\xaf\x69\xf6\xa6\x98\x62\xd6\xe8\xbd\xb4\x5d\xb9\x74\x3b\x47\x86\x57\xe5\x30\xaa\xf8\x45\x16\x17\x39\x63\x09\x3e\x14\x7d\xfa\x1d\xe7\x27\xe4\xd6\xc6\x36\xa0\x7b\x3b\xfc\x63\xb0\x7b\x42\xe5\xc8\x5f\x7f\xfc\x0b\x0f\x44\x57\x71\xcd\x00\xce\xb5\x4f\x9f\x57\x17\xc9\xa3\xbb\xd7\x30\x83\xfa\x35\xb4\x4e\x31\xec\x80\xa4\xe1\xab\xa6\xa3\xef\xbc\xa6\xfb\xdf\x45\x3c\xcb\xff\x2e\x74\x44\xbe\x48\xf6\xf2\xc7\x03\x2c\xc4\xdb\xe0\x85\x32\x33\x73\xad\xec\xb9\x3c\x68\x22\xef\xfa\xa8\x4b\xd5\x90\x0b\x02\xf3\x85\x7d\x83\x56\xaa\x29\x54\x45\x0f\x9a\x48\x1c\x1d\xdc\x8c\x1c\xc1\xcd\xbe\x54\xe7\x75\xd3\xbe\xec\xbb\x05\x8d\x21\xb7\x47\x6f\x61\x6f\x60\xcd\xc6\x29\x39\xf0\x2b\x04\x3c\x59\xd8\xa4\x98\xc0\x92\x8f\x59\x7a\x1d\xb7\xed\x21\x70\x86\x21\xd8\x74\x94\x27\x4d\xab\x0d\x86\x36\xdf\x53\x5c\xfc\x2c\x50\x10\x21\x09\xb8\xb0\xf6\x4a\x44\x69\xfd\x0a\x74\x58\xcf\xc6\x2d\x0a\x19\x41\xbc\xc2\x4f\xed\x2d\x8b\xc9\xf5\x51\xf6\x26\xa4\x8f\xe2\xf4\x6e\x60\x00\x14\x23\x7e\x0c\xbc\xf8\x47\x58\x4b\x0c\xd4\xd1\xce\xda\xc9\x23\xbc\x9e\x8e\x23\xcb\x37\x8a\x6c\x2e\xbf\x91\x17\x6d\xb5\x07\xf4\x65\xc2\x9b\x86\xbe\x8a\xae\x01\xab\x59\x1d\x61\xa4\x4f\xb9\x27\x95\xcd\x75\x87\xf1\x89\x8b\x96\xbd\x7d\xa0\x0d\x66\x7b\x71\x49\xb3\x42\x75\x31\x00\x7a\xa5\xe5\x1f\xfe\xbd\xd8\xf0\x1a\xba\x3e\x5d\x6c\xb2\xb7\xce\x68\x03\x98\x2d\xf4\xed\x6e\xe2\x57\xed\x69\x56\x67\x77\x43\x6c\x7d\xd2\x80\xe8\x76\x67\x84\xb9\xc9\x4f\x39\x7a\x52\xfe\xd1\x52\x97\x2a\xa8\x33\xe8\xc4\xc7\x7c\xd7\x89\xa0\xa9\xba\x86\xd5\x26\x8d\x40\xb1\xc3\x91\xa0\x79\xfb\xf9\xcc\x6f\x59\xe0\x42\x30\x6a\xd0\x4a\x2b\xe3\x64\x82\xb7\xd6\x0e\xc7\x9e\xd0\x5e\xb4\x29\x90\xe1\xce\x30\xb4\x5c\xdd\xe2\x58\x16\x89\x3e\x6c\x95\xdd\x8c\xad\x16\x31\x1e\xb6\xaa\x9d\x0f\x60\xd4\xf9\xde\x67\xf6\x3b\xac\x99\x40\x5f\xaf\xb3\xd2\xf4\xc2\x23\x48\x43\x8d\x05\xc7\xa0\x89\x1c\xec\x49\xf9\xdf\x11\x09\xe2\x01\xb7\x5d\x3d\xc2\xd4\x6d\xfc\xe1\xf9\x96\x7f\x8c\x93\xdf\x02\x53\xb5\xa1\x9f\x60\x6d\x41\xae\x50\x3c\x84\xca\xba\x99\x29\x7e\xdd\x1b\x6e\xad\xac\xd5\x9a\x74\x99\x26\x08\x83\xba\xc0\x56\xd1\x3e\xf8\x9d\x71\x7e\x9a\x66\xaa\x0b\x0d\x43\x57\xb8\xc4\x76\x1a\x90\x0b\x64\xd4\x95\xd3\xe7\xe6\x30\x5b\x2e\x60\xad\x29\x7c\x65\xfe\xc3\x56\xe3\xd6\x9c\xae\xe6\x86\xbd\x8c\xdc\x5c\xa4\x6f\x83\xa4\x56\x09\xfa\x3d\x3e\x8b\xba\x1f\x48\xc8\x56\x8d\x32\x15\x62\x16\x74\x10\x13\x32\x26\xf2\xda\x76\xe1\x90\x17\x1e\xc9\x94\x8e\x58\x08\xe2\x72\x84\xd1\xe5\x97\x5e\x3d\x7c\xdd\xe5\x1f\x07\x5f\xad\x12\x6e\x1b\xb0\x36\xad\x98\x77\x78\xda\x79\x31\x33\xb5\x0c\xf0\x88\x28\x82\xc2\x4d\xd5\x7e\x4e\x69\xbb\x6e\x32\xdb\xee\x28\x27\x9a\xa0\x14\xe9\x85\xb1\x02\x4e\xea\x2d\xdb\x61\x1c\x5a\xff\xa1\x26\x8d\x33\xde\x98\xd1\xe4\x9d\xc5\x8b\xbf\xe3\xd9\x94\xb4\x9e\x78\x6f\xb1\xa6\x67\x2f\x72\xeb\x73\xdb\xff\x3c\x6b\x0b\xf6\x35\xe4\xe8\x1c\x40\x05\xbd\xba\x0a\x66\x53\x78\x5a\x07\x1f\x3d\xe2\x2d\x2b\x1e\x62\x4b\x0e\xce\xfb\x96\xad\x69\x8c\x8c\x6f\xc0\x61\x1d\xa2\xb6\x9d\xe7\xbb\xe7\x90\xeb\x06\xd5\xaa\x99\xbd\x7a\xda\xaa\x07\xd3\x59\x79\x75\x01\x93\xa3\x21\xc5\xcd\x26\x58\x80\xa6\xd0\xea\x23\xdd\x1b\x17\xd8\xc9\xa1\xc3\xc5\xbb\xcf\xf5\x2e\xb2\xc4\xc8\x62\xda\xf6\x1e\x5c\x0d\x5a\xb7\xf9\x18\xcc\x06\xfe\xbc\xd5\xa4\xdc\x94\x9d\x47\xe2\x56\xaa\x22\x67\xb6\x04\x54\xbc\xce\x03\xd2\xfe\xf2\x24\x6f\x01\x1f\x72\x7b\x89\x5a\x21\xcd\xdb\x47\x64\xf5\x26\x1d\x1b\x10\xec\x42\x56\x7d\x83\x36\xf8\xed\xc1\xd8\x92\xbb\x28\xeb\x8b\x18\x38\x80\xa2\x24\xd0\xe1\xb9\xf2\xa7\x1c\x90\x5e\xc0\x3d\x55\x5b\x15\xe5\x35\xdc\x77\xae\x9a\xf1\x2f\xe8\xd6\x28\xa5\xf5\x9f\xbc\x85\x5e\x42\x57\x90\xc9\x6a\xd6\x16\x7d\x35\x80\xdd\x75\xfe\xd6\x66\xae\x97\x04\xa5\xcd\x9e\x55\xa7\xbb\xb9\x36\x93\xf1\x7b\x6e\x29\xfd\xc1\x79\x78\x6c\x8c\x9a\xc9\x1c\x0b\xb8\xbe\x6c\x81\x20\xe3\xd9\xb3\xfc\x76\x43\xcd\xcd\xea\xbe\xdf\x95\x29\xa6\xcc\x2c\x6b\xe3\xa1\x08\xb1\xcc\xd4\x00\x56\x59\x89\x46\x75\xf3\x2f\x96\xce\x7d\x23\x4c\x95\x3b\x00\xc6\x70\x97\xe5\xb4\xbd\xea\x6e\xb3\x3a\x37\x12\x2f\x7a\xb4\x98\x28\xe6\xb5\x3e\xbd\x3d\x2d\x7a\x9e\x00\x7a\x9f\x76\xb8\xf4\xab\xfb\x90\x32\xad\x8e\x08\x58\x7b\xeb\xd2\x1a\x71\x95\x5f\x4e\xd0\xfb\x90\x35\xaf\x9d\x58\x34\x55\x4d\x92\xfa\x8d\xd1\x13\xbe\x1b\xf5\x7b\xe2\x80\x5a\xfd\x86\xda\x9d\x3f\x52\x34\xd9\xf4\xb1\x1b\xb0\xf9\xd7\x73\x55\xd7\x7e\xbd\xf8\xb1\x5e\x49\xee\x5a\xaf\x88\x01\x73\xec\x45\x38\x2b\x57\x34\x86\xb2\x46\x2e\xad\xd0\x2c\x6b\x33\xf0\x7a\x39\x6b\x2f\x3b\xd2\x60\x65\xb1\x73\x44\x5e\x5d\x04\xb7\xf1\x84\x32\x2e\xb9\x88\x4d\x57\xbd\x73\xba\x16\x6a\x38\xac\xc5\x73\xef\x5d\x77\x8d\xdf\x6d\x3e\x7b\x62\x61\xad\xf9\xb5\xf0\xf2\xac\xe8\xb4\xf9\x74\xe1\xeb\xb3\x22\x90\xa1\xab\x3b\x9c\xb5\x9a\x31\xa2\xc9\xcf\x51\x8e\xd5\xe5\xde\xf9\x84\xd3\x76\xa3\xf7\xbe\x6a\xef\x4f\x18\x98\x7b\x5c\xb1\xf7\x0b\x4e\xb4\x4f\xa4\xe6\xaf\xd6\xbe\xbe\x48\xd9\xdb\xc0\x02\x9c\x09\x80\xb0\x9e\x01\xda\x6c\x53\xd3\x68\xc5\x8c\xfe\xcd\xf6\xb3\xd9\x74\xcf\xac\xc9\xae\x8f\x2c\x12\x05\xd6\xef\x37\x49\x18\xce\xde\x6b\x38\xbb\xf7\x64\x47\xed\xf7\xc8\x6b\x6c\xcd\xbd\x8e\x27\x45\x3d\xcd\x6a\xb3\xf2\xc5\x48\xd4\x05\xa3\x16\x27\xad\x45\x81\xa4\x4b\x08\x9e\x2f\xd7\x48\xfc\x9b\xf5\x11\xbd\xc7\xbf\x5c\xbd\x6d\xeb\x29\x58\x5f\x3f\xe1\x70\x7c\x8d\xdd\xaf\xe6\x12\xd9\x49\x8b\xe3\x8e\x84\xd4\xc3\xa8\x3c\xba\xd5\x64\x94\x01\xb7\xb9\xca\xf0\x7d\x80\x3a\x1f\x04\x48\x16\x1e\x14\xf9\x72\x74\x5f\xa1\x3b\x7e\xf1\xf7\x93\xcb\xcb\x8b\xb8\xd8\x9d\xf2\xe1\x30\xc0\xfa\xe8\xa3\x00\x0d\x06\xdf\x88\x9d\x36\xaf\xd5\x52\x3d\x52\x90\x5b\xb1\x13\x73\x4c\x21\x52\x55\xf0\x6c\xf2\x4c\x6a\x21\xeb\x6f\x8a\x10\x3a\x50\x98\xe3\x95\x5e\x2a\xc3\xd9\x17\x7a\x84\x17\x72\x9a\x16\x0d\xff\x2e\xe5\x63\x40\x03\x29\xa6\x5b\x0e\x6a\x5f\xc3\x16\x45\x9f\x01\xce\x53\x77\x8f\x02\xe9\x97\xa8\x58\xea\xe2\x26\x5a\x19\x2f\xc5\xb5\xcb\x4c\x5c\xc3\xea\x2f\x3f\x33\xeb\x56\x46\xaa\x8e\x63\xfe\x08\x64\x67\x89\x6f\xdf\xe2\x19\xf3\x6c\x2f\x90\xf9\x17\x7b\x85\x56\xcc\xf7\x19\x66\x11\x79\x30\xef\x7f\x2f\xe6\xb4\x2c\x65\x78\x29\x20\xcf\xf6\xe2\x2a\x31\x24\x38\xa2\xd0\xeb\x2b\xe6\x85\x2e\x16\x9a\xb2\x46\xa6\x96\xc9\x03\xd9\x96\xfc\x87\xd1\x47\x7f\xa8\x17\xab\x5b\x22\x5c\x69\xc1\x38\xe7\x35\x37\x74\x9d\xd3\x03\xf9\x54\x2f\xd0\x63\x83\xd8\x24\xc3\xbe\x17\xcf\x8f\x8f\x7e\x01\x46\xec\xa9\x2c\xb4\xe4\x42\x1e\x21\x07\xf0\x96\x8c\xb7\xae\x73\xcb\x73\x34\xb9\xa9\xc1\x15\x64\x6d\x1a\x94\x3d\x5c\xac\x55\x50\xbf\x7e\xcc\x2f\x5e\xce\x7e\xb8\xd6\x80\x7e\x2e\x3d\xfd\x32\xae\x92\x88\x69\x7a\x9f\x4e\x67\x30\x43\x84\xc3\x74\xfe\x25\x71\x07\xa1\x5b\xdb\xcb\x26\xf3\xb0\x21\x08\xd5\xf2\x00\x8b\xc5\x1a\x58\x37\x79\xf0\xc8\x7a\xd2\xd3\xfe\x4f\x80\x46\x6b\x18\xbc\xb5\xe9\xfd\x0c\x28\xab\xdb\x29\xe8\x80\xc0\xef\xc5\xec\xd9\x2a\xa4\x2a\xfb\xe9\x6a\x22\x1c\x94\x21\x65\x94\x8f\x52\xda\x23\xa8\xc6\x87\xe9\x74\x41\x40\x8d\x8d\x99\x88\x98\x77\x6b\x2d\x8f\x7c\xdc\x1f\x7d\xcc\xe8\xa5\xb8\xb5\x78\xe8\x14\x37\xb8\x2c\x81\x3d\x5c\xb8\xad\x59\xf5\xb8\x96\x9c\x89\x37\x9c\xc9\x85\x58\xd2\x4f\x25\xea\x66\x49\xf0\x3e\x71\xcd\xa9\xf9\xf7\x16\xe4\x25\xaf\x32\x30\xa2\x1e\xa9\x7d\x4b\xc2\x17\x54\x20\xca\xa6\x3c\xeb\x2c\x42\x8f\xbe\xe0\x88\xb3\x95\x07\xbb\x61\x7c\x44\xe0\xdd\x2c\x0c\x6c\x36\x9b\x71\x7e\x30\x56\x3e\x29\xdf\x07\x53\x2b\xe6\xf7\xfa\x8f\x13\xd1\x04\xdf\x31\xf2\x3a\xbf\x07\xa5\xe0\xcf\x8a\x43\x98\xd5\x8a\x9c\xaf\x80\xf6\x5d\xf6\x32\x8c\x32\x7b\x3e\xaa\xc2\x1a\xae\x02\x17\x5e\x45\x9a\x65\x73\xa8\x09\xcd\x51\x29\x01\x66\x88\x8f\xd5\x79\xdb\xfc\x7d\xf1\xd0\xf2\xb9\xaa\xd9\x3b\x7b\x1c\xd9\x9f\xd6\xe9\xe9\xe5\x77\x9c\x21\xa6\x73\x7d\xf3\x23\x14\xa4\x5b\x37\xad\x32\x0c\x76\x1a\x0a\xd9\xdd\xa6\xb3\xd4\xb6\xf0\xbe\x5d\x91\x76\x36\x17\xa6\xc2\xcd\xc5\x2f\xa3\x36\x25\xa4\xe1\x87\x74\x14\x8e\x74\xe6\xb4\xb3\x57\x9a\x3f\x87\x46\xdf\x8c\x0c\x6a\x19\x0d\x7d\x38\x25\x8f\x1e\x9c\xf3\xab\xb0\xab\x8e\xe1\xe0\x5d\x91\x5b\x73\xf6\xb7\x41\xf3\xad\x32\xa3\x73\x7a\x06\xc3\xaa\x99\xd4\x68\x85\x91\x22\x88\x70\xce\xee\x69\xa4\xd9\x5c\xba\x40\x8d\x01\x9f\xe1\x5d\x66\xd2\xb8\x60\xa4\x7f\x29\xa2\x87\xe7\xf4\x1b\x4f\xa5\x75\x42\x9f\xe4\x1c\xc6\xb0\xc5\x0c\x18\x14\x71\x88\xab\x46\x68\x66\xbe\xa8\x80\xaf\xbc\xbf\x6e\x7e\x22\x1f\x3c\x94\x22\xaa\x7d\x46\xc3\x4f\x97\xc8\x51\x9a\xec\x55\x02\xba\x53\xb4\x40\xff\xc4\x13\x24\x6a\x62\xdf\x1c\x8a\x24\x28\x36\x0b\x9f\x10\xad\xf1\x9d\x16\x0e\x10\x3c\xdf\xfb\xf8\xfe\x93\x2a\xdc\x73\x8f\x21\x3f\xfb\xae\x12\x91\x58\x7f\x8b\x20\x85\xf8\x88\x42\x15\xcb\x59\x84\x35\x34\xa0\x9f\x10\xaa\xc7\x26\x11\x8b\xf6\x4d\x03\xe6\x97\xa8\x8b\x74\x3c\x3b\x7b\x51\xe6\xc2\xac\xe3\xe7\xac\xc4\xac\x67\x79\x49\xff\xe4\x69\x4d\xec\xc0\x29\x9e\xf2\xd0\x3c\xf5\x65\x7b\x42\x42\x9f\x5d\xbd\x67\xd9\xbb\x58\x6c\x1d\x07\x76\x3f\x9f\x45\xa1\x6a\xa7\xbe\xf4\xcf\x62\x9f\xef\xe0\x34\xea\xf3\xf0\x2c\xb3\x10\x8d\x65\x58\xbd\x17\x67\x06\x83\x37\xcc\xa0\x47\xe0\xa9\xf0\x69\x4c\x23\x9f\x36\x6c\x99\x8c\x8d\x73\xcc\xe3\x04\xce\xa3\x17\xac\xcd\x44\xc9\xb0\x67\x7e\x31\x6c\xfa\x69\x75\x0b\x77\x4b\xb7\xed\x13\x21\x3a\x3e\x9d\x43\x50\xf6\xd9\x0f\xf8\x5e\x3d\x51\xa3\xeb\x8d\x87\x4d\xdc\xe3\x9e\x99\x48\xf2\x4c\x85\x33\x1e\x79\x5e\xa5\x4b\xf6\xb4\xfb\xd9\xf9\xd4\x57\xfa\xb9\xd6\x1f\x1f\xb5\xfe\xd1\xf8\xc9\xcf\x35\x3b\xcf\xfb\x07\xd0\xd3\xec\xc6\xea\x01\x8a\x3f\x75\x66\xf7\xf2\x07\xe9\xab\xb8\xb1\x3f\xf9\x95\xe3\x78\x7e\xb2\x32\x69\x7e\xb2\xd4\x44\x7e\xb2\xe7\xed\xaa\x52\xff\x49\xef\x80\x38\xbe\x94\x15\xfc\x83\x76\x61\xd5\x20\x1c\x0a\xde\xe6\x85\x21\x2e\x63\xc8\x84\x14\x74\x81\x51\x1f\x94\xd5\x55\x32\x1b\xea\xd0\x29\xbf\x34\xe1\x63\x6c\xa6\x7f\x6b\x60\x60\x9c\x2c\xbe\xd9\x0d\xc5\x28\xda\x23\x4a\x75\x4c\x37\x53\xd1\x15\xdb\xfa\xe0\x2d\x06\xaa\x51\xac\x43\xb4\xe5\xe5\x28\xf7\x82\xa7\x4f\x9f\xf9\x3f\xc5\x1b\xe2\x08\x07\x7f\xd8\x0d\xc6\xe0\x25\x04\xdc\xad\x5e\x68\xf0\x7e\x33\x11\x41\xa1\xa3\xf7\xec\xf3\xb4\xe6\x80\x52\xc2\xb1\xd1\xf8\x8b\xd1\x23\x15\xcd\x70\xf5\x51\xc8\xc0\x14\xae\x50\x45\xe1\x4a\xab\xea\x29\x00\x20\x0b\xb7\xfb\x13\x5e\xd4\x02\xc9\xb9\x56\xf7\x00\xdc\xef\x13\x6f\x68\x41\x2b\xe0\xeb\x03\x12\xc9\x25\x1e\x3b\xd9\x8c\x88\x0d\xbd\x43\x88\x01\x8d\x2c\x7d\x43\x3c\x3e\xbc\x4d\xa6\x13\x1a\xae\xfb\x87\x81\xed\xb0\x28\xe0\x2b\xce\xcd\x81\x9b\xc2\xda\x7e\x6a\xa7\xe0\x4d\xa1\x2a\xc6\xe6\x10\x75\xbe\x8f\xb3\x47\x23\x09\xfa\x02\xdd\xbd\xd0\x7f\x47\x2a\x50\x6e\x83\xc5\xc8\xad\x06\x21\xa0\xe4\xb5\xdd\x02\xe4\xf7\x5b\x6f\x39\xf6\xa7\x0f\xa3\xfb\x6c\x43\xd6\xcd\x19\x95\x9a\xee\x9f\x5b\x4e\x7b\x03\x26\x95\xb5\xdb\x73\x03\x4e\x02\x55\x89\x5e\xa9\xb6\xba\x6c\xd7\x10\xb7\x8c\x31\xb4\xa5\x06\x9b\xca\x1d\x7a\x4b\x04\x2e\x03\x55\x19\x29\x1f\x66\x7b\x1a\xf2\xda\x4a\xbd\xd6\x28\xe8\x72\xe3\x6d\x2f\xe1\xe7\x2b\x43\xa9\x88\x34\x83\x86\x33\x6f\x52\x2f\xf2\x97\xb5\x4b\x3c\xa9\x94\xf4\xd1\x1e\x23\x53\xfe\x41\xb2\x26\x1a\x7e\x9e\xde\x00\x97\x3a\x09\xf0\xed\x8e\x5c\x75\xf1\x1d\x5b\xd9\x4f\x73\xdd\xd9\xbd\x99\x28\xdb\x3a\x5d\xfa\x2e\x4d\x17\xf9\x9b\x48\x4e\xe7\x0f\x8b\xb7\x97\xa7\x6b\x56\x83\x67\xba\xba\x3e\x89\x66\xd9\x71\x70\x79\x3a\x4f\xd4\x24\x08\x4a\x48\x9c\x76\x4c\x03\x36\x8b\x55\xa6\x3b\x85\x47\x97\x35\x89\x92\xab\xc2\xad\x5e\x3a\x5f\x4c\xb1\x30\x2b\xc2\xc9\xc0\xb0\x8c\x8c\x41\x28\x5d\xa1\x51\x7e\x5e\x5a\x0a\xaf\xaf\xac\x41\xd3\xa4\x48\x74\xa4\x76\x86\xe3\x64\x82\x08\x07\xa7\xac\xb2\x1a\xc3\x92\x4b\x8a\x60\xc9\x43\x5e\xf2\x6b\xcf\xca\xfb\xb7\x52\xb7\x0b\x9a\x89\xc8\x86\xef\x30\x83\x7d\x09\x4b\x5b\x7c\x02\xfa\xcb\xbd\xb8\x93\x63\xaa\x23\x22\xfc\xb9\x5c\x48\x2c\xda\x6e\xaa\xb6\xd7\x35\xc2\x27\x4c\x1e\x7d\x62\xfa\xde\x54\xf8\x9d\x9f\xca\x22\xbc\x09\x5c\xaf\xbd\x44\x46\xa6\xf2\xd3\x1a\x9d\x93\xbd\x83\x8c\x84\x11\x47\x13\xd2\x03\x6e\x74\xf9\x7e\x92\xa7\xcc\xe4\x04\xeb\xa1\x5e\xdf\x17\xb9\x95\xf6\x81\x4e\x9a\xa7\x87\x7a\xca\xf0\x47\xf3\x13\x37\x65\x51\x1c\x79\xc6\xd0\x76\xe2\x25\xcf\x81\xde\x9e\x32\xd5\x3b\xdd\x6c\xff\xdd\x25\xb7\x86\x31\x36\xdd\x92\xb0\xdd\xe4\xdd\xcc\xe9\xf7\x4d\x00\xc8\xe4\xa2\x8e\x71\xb7\x90\x30\x9d\x05\xe4\xb4\x2b\xe8\xb9\x9e\x2c\x0c\x9b\x38\x3c\xce\xd1\x94\x6e\xcf\x94\xbf\xfd\x94\xd3\x9f\x7d\xf3\xf8\xb4\xa5\x3f\x55\x87\x53\x7a\x2b\x9b\x17\x31\x2c\x7e\x77\x24\x22\x30\x41\x5c\x43\xa7\x02\x87\xa8\x40\xad\x49\x11\x72\xa0\x70\x96\x20\x69\x9a\x39\x24\xcd\x8d\x38\x57\x33\x83\x0e\x38\x79\x92\xa2\xac\x5e\xa2\x97\x13\x58\x18\x8d\x9d\x0a\x30\x27\xe9\xe1\x30\x75\x8d\x81\x4c\xd4\x29\x03\x9c\xc0\xe7\xc8\x1a\xd4\x13\xa7\x9d\x2b\xf0\xfa\x3b\x97\xca\xbe\xe9\x67\x08\x00\x9f\xb2\x3c\x76\x32\x0e\x92\x4d\x91\x89\x49\x21\x53\x62\x62\xce\xf4\xfd\xf6\xf2\xf8\xf6\x96\xcf\x78\x51\x69\x6e\x54\xb0\xfb\x78\x89\xfe\x32\x5e\x37\xc9\xd1\x53\x0a\xf8\x6b\x51\x94\x87\x5b\x7e\xae\xe3\x7e\x65\xe2\x34\xaf\xb8\xb9\xe3\xee\x7f\x5f\x24\x77\xfa\x03\xfc\x01\x69\x23\xba\x93\xab\x22\xf1\x85\x8e\xe9\xb1\x45\x55\x8e\x3b\xc8\xd6\xea\x39\x8e\x08\x27\xdd\xf9\xe9\xb0\xcf\x93\xb0\x91\x4e\xee\x3c\x69\x35\xe0\xe2\x18\x8d\x02\xf7\x14\x6a\xb9\x95\xe2\x9d\x76\xad\x67\x06\x88\x8e\xb5\x7a\x27\x83\x44\xce\x43\x56\xe2\x74\x88\xce\x15\xec\x99\x8b\xcd\x43\x70\x0b\xc7\xea\x4f\xd8\xc1\x05\x86\xf4\xa2\x43\x0d\xe6\xe2\xc7\x47\xaf\x8a\x4d\xb8\x8d\xf9\xdd\xab\x2e\x18\x11\x25\xc3\x79\x8c\xce\x1a\x89\x1d\x03\xae\x13\x83\x1c\x11\xc3\x38\x7a\x6f\x87\x27\x31\xe7\x35\x86\xe0\xc6\x39\x5d\x7d\x25\x89\xb4\xec\xb3\x07\x11\xd1\x46\x52\xa4\xaf\xc8\x78\xb3\xb1\xd8\x33\xf2\xa6\xc1\x63\xf0\x50\x4b\xc9\x39\x8e\x13\x93\x18\xc6\x7c\x34\x8f\xda\x98\x3f\x72\xe4\xad\xd0\xb2\xd7\x79\xf6\x20\xd0\x39\xe7\x33\x3f\xc5\x32\x1f\x33\xbe\xd5\xb4\xf2\x53\xb3\xd2\x1e\x6e\x49\xd8\x9c\x99\x42\xa3\x70\x04\x72\x70\xa0\x31\xdf\x2d\x84\x11\x63\xd9\x27\xf7\x6b\x6f\x86\x1f\x58\xe2\x33\x49\x9e\xc1\x08\xaf\x6a\xff\xc7\x48\xd4\xf1\x7b\xa2\xda\x19\xc8\x9f\xe5\xeb\x06\x7f\xfa\xb6\x1e\x37\x1d\xd9\xee\x49\xe4\x50\x3d\x40\x13\xe0\x3a\xfd\xf3\x95\xc2\x62\xbd\x65\xd6\x44\xd7\xd0\x03\xd2\x5f\x82\x62\xee\xf9\x62\x8f\xed\xb1\xb7\x30\x13\x32\x42\x09\xdf\x44\x16\xa8\xc6\xc2\x1c\x18\x2a\x1a\x26\x94\xc4\x98\x37\x6c\xed\xa4\xbf\x12\x73\xdd\x77\xdc\x27\x6d\x30\x15\xa1\x3c\xff\xc5\x30\xea\xa3\x6a\xbc\xfe\x81\x97\x23\xfa\xd0\xd6\xc4\x12\xf3\x15\x8d\xad\x4b\x89\x4e\x66\x3f\x32\xc3\xc3\x31\x30\xf8\x62\x4c\x64\x00\x45\x63\x45\x26\x40\x01\x27\xaa\x57\x11\x44\x51\xb5\x0f\x3c\x28\x24\x96\xa4\xa3\xea\x81\x8f\xbd\x80\x99\xad\xdf\x0f\x1c\xe8\x8e\x30\xc2\xb0\x23\xf6\xf0\xe1\x20\x1c\x6d\x77\xce\xec\xdd\x3c\x66\x8f\x16\x93\x89\x5c\x97\x93\xe6\xf9\x62\x06\x26\x4c\xe4\xc7\xf4\x51\x88\xd1\x8d\x07\xde\xc8\x21\xb5\x1f\x0a\x8f\xa8\x45\x8b\x3e\x42\x0c\xf8\x21\xe8\xdf\xa3\x90\xbc\xff\xb0\x27\xb0\x91\x39\x1e\x68\x2e\xeb\x67\x84\x6b\x14\x59\xf3\x2c\x2b\x8d\xda\xc8\xfc\x24\x2e\x74\x46\x45\x43\xd3\x71\x78\xbc\x38\xb0\x23\x28\xe9\x51\x1e\x5c\x19\x99\xab\xd2\xe4\x7c\x40\x14\x90\xbb\x49\x91\xdb\xf6\xc8\x0c\x12\x7b\x64\xaf\x43\x11\x6b\x53\x7c\x50\xfa\x91\x97\x08\x77\x7f\x84\x8b\xfd\x61\x5f\xfc\x9d\x8b\x79\xc7\x00\xba\x17\x8d\x2c\x2a\x86\x0c\x11\xc3\x31\xd3\xfa\x26\xd7\xf4\xa4\x29\xe8\xab\x5e\xae\x47\x8a\x18\xfa\x47\xfa\xa1\xb2\x22\x84\xe0\xdf\x8d\x59\x5a\x76\x57\x12\xec\xa3\x74\xeb\x81\x3d\x92\x26\xd6\x15\xf0\x6e\x01\xf4\xcf\x4e\x4d\xd5\x26\x71\x48\x29\x7e\xcb\xef\xcc\x51\xb3\x2f\xe7\x8f\x32\xbc\x82\xfe\xbf\xfc\x8f\x90\x79\x0e\xc2\x90\x41\xca\x03\x47\x36\x18\x82\x91\xe3\x07\xc4\x5c\x47\x2c\x24\x8a\x43\x69\x04\xd3\xc4\x63\xca\xff\xea\xde\x53\xcf\x36\xff\xf3\xbe\xce\xd9\x90\xf2\xf9\x9f\x7d\x9f\x8f\x60\x76\xfe\xcb\x3d\x57\xb0\x36\x1f\x86\x2f\x77\xee\x03\x90\x2b\xc8\xe9\x37\x36\x2a\xdc\x18\x5d\x70\x4d\x31\x42\xac\x43\x43\xba\x60\x13\xdd\x05\xe5\x94\x43\x6a\x19\x5f\x41\xbf\x58\xf9\xa2\x1f\x57\x26\xb2\xc8\xbc\x4e\xca\x37\x0d\xf5\xeb\x86\xa1\xc6\xbc\x44\xc0\xb2\xa3\x07\xe7\x08\x0a\x0a\x32\x1f\xea\x71\xeb\xc6\x72\xfc\x4d\x27\x65\xfd\x52\x28\x32\xae\xf9\x52\xdf\x17\xa7\xcc\xb0\xbe\x0c\x7a\xbf\x3c\x2d\xd0\xa2\x59\x74\xb0\xeb\x28\x59\xc0\xbc\x3e\xe4\xbe\xb3\xf5\xfd\x00\x56\x0d\x3a\x85\x22\x25\x62\x33\x7a\xcf\xb2\x63\x8b\x2e\x53\x8c\x96\x97\x60\xe1\xaa\xe0\xe6\xbf\x25\xd5\x68\x05\x0c\xca\xb3\xfa\x01\xec\xf4\x86\x9a\x96\xa6\x20\x0e\xa0\xe9\xa1\xaa\xc4\x7a\x62\x2e\x58\x99\xfd\x46\x7f\x09\x5b\xba\x83\x8f\xee\xc7\xa1\xe0\xb6\x2f\xc7\x94\xd6\xa6\x2c\x2b\x66\x69\x4b\x18\xcd\xc3\x2f\x6b\x96\x6c\x9d\xc5\xe8\xce\xe5\x01\x83\x20\xaa\xc2\x73\x7f\x25\xc2\xf0\xd1\x75\x65\x5a\x52\xee\xcb\xee\xd3\x74\xe8\xe1\xb3\x36\xd8\x24\x3d\xf8\x0c\xfd\x1a\xc2\x02\x65\xf2\xe0\xad\x6d\xe6\x1b\x1c\xde\x87\xcb\x3f\x62\xbd\xc1\xa5\xc5\x79\x59\xe0\x28\x55\x0e\xb9\xa3\x4d\x89\x2a\xbc\x98\x0d\x3a\x58\xfd\xc6\x24\xd4\xe1\x52\x3a\xfa\x70\xad\x1c\x3b\x37\x43\x2d\x6f\x58\xfa\x6d\x19\x3c\x6a\x64\xb8\x40\x5a\xe2\xac\x61\xa6\xe8\x85\x9d\x29\x2e\xd1\xb0\x5f\x41\x13\x1f\x3c\x8c\xdb\x9f\x0b\x98\x3c\x06\xb4\x91\x6a\x58\xd5\xa3\x2b\xbe\x04\x4b\xe5\x3e\xc0\xb2\x94\xa4\xf6\xb0\xcb\x71\xea\xdc\xd4\x20\x40\x22\x77\xfc\x0c\x96\xe3\x9e\x62\x5a\xd6\x8d\x7a\x79\x66\x8f\x6a\xb9\x0e\x55\x43\x15\x78\xff\xb9\x7f\xfa\xf7\xe0\xae\x56\x93\x78\xa8\x82\x3a\xe2\xeb\x34\xd3\x78\x4a\x3b\x71\xb0\x8f\x04\xe4\x52\xf8\x7b\xf5\xc4\x36\x3f\x9e\x0a\xa0\x99\x2e\xbc\xd9\x88\xae\xe1\x79\x60\x40\x4b\x86\x7e\x25\xd9\x38\xec\x9e\xc9\x6c\x43\xa5\x90\xc9\x80\x84\x05\x29\x90\x0f\x3f\xee\x7e\x1e\x90\xcf\xa7\xa3\x2e\xbf\x94\xba\x02\xa1\x75\x10\x17\x68\x00\x4a\xfd\x4b\x64\xd6\x43\x40\xd2\xe3\xae\x15\x07\x68\x58\x8d\x91\xc3\x1a\x30\x56\xd9\x4b\xd2\xaa\x50\xed\x89\x99\x6e\x7f\x09\xb2\x8a\x6a\xfa\x8c\x42\x73\x8a\x70\x06\x02\xcb\xb5\xa0\x67\x50\xc7\x51\x1e\x7d\xd9\x22\x20\x0b\x90\x55\xa7\x94\x68\xeb\x7b\xc4\x29\x0e\xd0\x8a\xf1\xe9\x12\x48\xd5\x2b\x70\xaa\x57\xaf\xc6\xc4\x50\xc6\x7c\xb4\xed\x0a\xcc\x34\xdc\x31\x40\xc8\xcb\xf6\xf7\x49\x03\x5e\xad\xfc\x45\xba\x6a\x83\xd8\x3a\x68\x95\xe3\x3e\x5f\x4e\x56\xd5\xa7\x7c\x40\xf6\x7b\x51\x1c\xe5\x40\x4d\x5b\x5a\xf5\x9f\x76\x91\xcf\xb6\x2c\x2a\x6c\x3f\x44\x45\xfd\x72\xae\xc8\xab\x03\x0b\x7d\xdc\x17\x7c\x24\x69\x78\xd2\xb0\x2c\x5d\xb1\x6c\xaf\xb8\x56\xdf\xca\xcd\x28\x85\xad\x0d\xad\x11\x61\x20\x60\x2b\x57\x5b\x2e\x6d\x69\x71\x8e\x60\xb0\x49\x05\x83\x18\xb2\x34\x9d\x86\xf6\x34\x66\x05\xa9\xc3\xe8\x3e\x09\xac\x73\xb7\x73\xc8\x16\x63\x48\x52\xa1\x1f\x72\x84\xfc\x0d\xf6\xc8\x5c\x6c\x94\x0f\xf9\xd1\x20\xac\xbc\x36\x8f\x56\x59\x03\xdf\x1a\xd0\x59\x34\x41\x6e\x5a\xeb\xaa\xe1\x16\xd8\x7d\x00\x5d\xe1\x2a\xd3\xe9\xf5\xfb\x7b\x6b\xf6\x0d\xa8\x0c\x56\xeb\xb3\x9d\xbc\xb0\xab\x59\x05\x49\xda\x0e\xea\x05\x73\x21\xe8\x2b\xd0\x40\x9e\xa4\xe6\x2d\x1e\xbc\x41\xd3\x07\xc5\x06\x97\x75\xe4\xc3\x97\x9a\x0f\xc2\x4d\x1e\xc8\x37\x02\xbf\xdd\xc0\x50\x1c\x31\xc9\xdf\x31\xec\x6f\xd6\xb8\xa2\x25\x63\x76\xff\x97\x39\x38\xed\x46\xf5\xb4\xfa\x67\x11\x83\x15\x30\x53\x82\x04\xdf\x40\x63\xbb\x61\x57\x7e\x60\x0e\x25\x42\x3a\x89\x69\x33\x4b\x7e\xac\x5e\x02\x12\xa0\xb1\xde\xb0\x53\xe2\x58\x79\x9d\xc9\x63\xd5\x42\x70\x02\xa9\xdf\xd2\x93\x64\xdc\x5f\x4c\x6c\x80\xb8\x2f\xdf\x6d\x67\xb2\xd2\xb7\xd2\x37\x4d\x8d\x3e\x72\x76\xc1\x68\x55\x38\x6a\xdf\x84\x69\xcd\x92\x2b\xad\xf7\x88\xfb\x2e\xd8\x86\xee\x74\xe4\xe6\xf3\x11\xf0\x54\x8a\xe4\x12\x64\xb9\xc7\xbc\xb4\x68\x8d\x14\x0a\xbc\x50\x17\x67\x9d\x05\x7d\x9f\x17\x33\x88\x7a\xc4\x0d\x11\xd6\x07\xa9\x25\xe6\x9a\x82\xd2\x7c\x06\x20\x42\xe8\x55\xd2\x96\x5a\x79\x55\x1b\x1a\xa5\xa3\x91\xa2\x7a\xeb\x55\xaa\xc5\xd0\xd7\xe3\x86\x0d\x59\x81\xa3\x9d\x66\xf8\x01\x20\x79\x9a\x60\x4e\xf1\x3c\x03\xbd\x7a\xfc\xa7\xc4\x42\x84\x7c\xc1\xe4\x04\xba\x41\x4f\xed\x66\xef\x1a\x59\xb3\x2a\xd4\xb8\xf7\x71\xc5\xad\x9e\xcd\x56\x47\xce\x0a\x68\x79\xf2\x91\xb0\x0f\xc8\x70\x30\x5a\x0e\xfe\xda\x6b\x96\xaf\x80\x4c\xd7\x21\x5e\x7f\x21\x5e\x1b\x2b\x15\x78\xd7\xfb\xb8\x81\x6f\x15\x2c\xd6\xcd\x9b\x87\xe8\xcc\xe3\x54\x02\x25\xbb\xfe\xa8\x39\x6a\xe6\x98\x25\xaf\x64\x36\x24\x13\xe2\x52\xae\x6c\xcd\x8f\x51\x18\xf2\xc7\x15\x5a\x09\x18\xf7\x8f\xbc\x63\x5d\x45\xd2\xf6\xd8\x5c\xc5\x9f\x01\x67\x60\xe7\xc1\x26\xaa\x33\x5b\xad\x6d\x7b\x36\xf4\x2a\x22\xe3\x3e\x48\xa9\xee\xea\x15\x43\x77\x59\xea\xf0\xb1\xe4\x92\xf7\xde\x9b\xc3\xdc\x9b\xd5\x3e\x69\xbc\x02\x40\x3b\xb7\xb1\x39\x2f\x3e\x85\x3d\x2d\x22\x65\xd6\x11\xa4\x56\x31\x52\x1f\x51\xfb\xf5\xb5\x7b\xe5\xce\x0d\x96\xd7\xce\xef\x02\x86\x94\x39\x63\x76\x79\x22\x99\xcb\xc1\xd4\x2c\x33\x1b\x62\x75\x16\xc8\xdd\x8d\x95\x06\xc3\xaf\xdc\xc8\x34\x36\x4e\x06\xcd\xbf\xd7\xd6\x10\xa9\x19\x4b\x17\xfb\xc9\x84\xb0\xe6\xb4\x47\xcc\x15\x08\xb9\x51\x17\xcc\xc0\x91\x09\x1e\x0c\x09\x39\x35\x73\x60\x4f\xb1\xd5\xb4\x3e\x6f\x73\x6c\xe6\x22\xc7\x52\x5f\x7e\x7f\x75\x55\xad\xfa\x78\xd1\xcb\x6d\x66\xfc\xba\x0b\x80\xf9\x8e\x92\x3a\x2a\x6a\x6f\x82\xcf\x2a\xe8\x28\xa9\x5c\x41\x16\x9d\x40\xad\xe0\x12\x95\x0d\x31\x24\x6f\xf1\x27\x68\x6e\xba\x61\xaf\x65\x6d\x60\x0d\xe0\x58\xa3\x89\x0b\x7b\x26\x36\x79\x12\x3f\x55\x83\x88\x88\xa2\x3c\xb5\x1d\xf0\x59\xb9\x26\xe8\x6b\xd6\x25\xff\xc7\xc2\xae\x70\x32\x87\xae\xee\x1a\x38\xe8\xa1\x01\x74\x31\x39\xda\x6c\x57\x9a\x4b\x25\x76\x67\xdd\xc1\xc6\x37\xb5\x82\x3f\x55\x50\xed\x8a\xa7\xdb\xda\xfb\x60\x9c\xf1\xe1\xf6\x9e\xca\x20\xeb\x43\x7e\x92\xc5\x07\x45\x2d\x1d\x3e\xca\x6b\x91\xed\x77\xbf\xac\xd6\x99\xe4\xd9\xa0\x6f\x1b\x97\x38\xfd\x66\x7a\xfb\x1c\xd5\x5a\x82\x25\x7a\x21\xc1\x3d\x50\xa7\x57\xf0\x66\xed\x43\x3b\x44\xcd\xd8\x74\x8f\xcd\x9a\x82\xf6\x7a\x46\xee\x3f\x38\xae\x51\xeb\xf9\xf8\xc9\x96\x49\xbd\x4d\x07\x70\xb0\xfa\x61\x0f\xda\xea\x7e\x02\x32\x34\xcb\x76\x4c\xa7\x7e\xd8\x82\xff\x9b\xe8\x60\xb6\xe9\x7c\x74\x49\xb3\x72\x78\x30\x61\xe3\xf9\xe3\x5b\x91\x90\x59\xd3\x7e\x00\x03\x76\xe1\xce\x11\xfa\xaf\x00\x04\x7e\xad\xe7\xc6\xd2\x05\x2a\xb7\xe1\x4e\x43\xf7\xb0\xf7\xd8\xd6\xef\x30\x8b\x30\xc8\x69\x4d\xfc\x84\x82\xec\x06\x2d\x1a\x22\x7b\x01\x77\x13\x49\x76\x6e\x41\x49\xb0\x8b\xfb\xf0\x54\x7a\x68\x48\x1e\xa1\xc0\xda\x72\x0c\xfc\xf7\x09\x98\x67\xd1\x18\x81\x69\x25\xa9\xf0\x3a\x5b\x98\x7f\xe7\x8c\x74\x2d\x70\xbc\x25\x77\xd3\x5d\x72\xdf\x76\xd7\xbe\x69\x3a\xd6\x25\x17\xf9\x61\xba\xcb\x41\x3e\x5d\xcb\xf3\x37\xeb\x19\xa1\xff\x20\xb7\xf6\xf4\x2d\x75\xd7\xfc\xd4\xb7\xa0\xbb\xca\x3c\xc4\xfe\x03\xaf\x78\x59\x6f\x91\xd8\x46\x58\x5c\xac\x7f\x42\xb5\x94\x5b\x45\xf2\x25\xc1\x9f\xfb\x25\x61\x5c\x20\x5c\x0f\x51\x55\xd1\xc5\x10\x52\x1a\xd0\x56\x61\x29\x11\xfc\xfc\xa6\x21\x49\x57\xb3\x62\xf8\x4d\x94\xd6\xe3\x36\x65\x89\xaa\x9a\x45\xf9\xc0\xe8\x9a\xe6\x24\x74\x22\x57\xed\x52\xc8\x5b\x07\xa3\xf0\xc8\xab\x07\xd8\x75\xde\x0b\xa0\xb7\xb8\x6b\xda\xf2\x1d\xf5\x06\xdc\x3a\xf8\x7d\xb7\xaa\x7c\x54\xe4\x59\xd7\x12\x5b\xba\x5a\x41\xbd\xe4\xef\x35\xe6\x11\xa8\x89\x51\x2b\xf6\x30\xbb\x8a\x7e\xbb\x70\x94\x35\x02\xce\x3b\x84\xe4\x12\x1e\x8b\x6f\xc2\xbe\xa9\x59\xda\x55\x77\xb8\x76\xf6\xc1\xf3\x15\x66\x24\x78\x13\xe2\x89\x67\x89\xb7\x17\xb2\x40\x80\x2c\xe8\xa4\xbd\xd8\x88\x8f\x1d\x24\x4e\x05\xdf\x9c\x19\xaf\x6f\xed\x2a\x26\xa9\x77\xb3\x54\xa2\x88\x61\xd5\x42\xa0\xc9\x53\x38\xda\x6d\xf5\x1d\xdd\xa6\x77\x95\x66\x6c\xde\xed\xb0\xba\xe0\x23\x00\xde\xaa\xcd\x75\x4d\xbb\x15\x5c\xd6\x2e\xc0\x9a\x05\x21\xeb\xd6\xbb\x8a\x00\xb3\xae\x54\xd7\x5c\x18\x34\xe0\x0b\x86\x97\xd5\x89\xf4\xe7\x74\x56\x0f\x2b\x9f\xaf\xf3\x00\x23\x0e\x12\x75\x11\x88\xd8\x95\xf1\xfb\xe2\xa5\x2e\x5e\x19\x7d\xcb\xce\xa4\xaa\xc2\x21\xc2\x59\xf9\x08\x29\xcc\x2e\x1f\xfa\x95\x99\x09\x18\x77\x53\x56\x7f\xe7\x1c\x57\xad\x20\xf2\x2b\xe4\xaf\x35\xc3\xba\x75\x7e\x57\x00\x79\x9d\xf8\xeb\x7c\x92\xf3\x82\x1a\xb9\xab\x6f\x2e\x38\xdf\x6c\x35\x14\x42\xc7\xbd\xcb\x53\x11\x14\x37\x43\xad\x4e\x7b\x1f\xab\x70\xb2\x79\x0e\xb0\xac\x2d\xf8\x4b\x8b\x48\x05\xae\x03\x4c\x6b\x6c\x35\xb1\xd3\x09\xa3\xe1\x68\xad\x96\xe5\xbb\x83\xf1\x6a\xe7\x32\x7a\x81\xfb\x4e\x97\xdf\x59\x38\x30\x59\x26\x23\xb0\x43\xd7\x80\x97\x3a\xe9\xcd\x94\x02\x1b\x97\xda\xa3\x47\x63\x66\xf7\x69\xd2\x51\xd0\xb1\xcf\x0f\x0e\x01\xde\x5c\x1d\x1d\x08\x30\x40\x69\x4d\x2c\x0c\x8a\xd9\x72\x0a\xeb\x17\xd1\xab\x7a\xb7\xd3\x28\x6f\x47\x97\x24\x1b\xd8\x25\x41\x10\xf0\xbc\x53\xe1\xaa\x0b\x7c\x1c\x90\xaa\xdc\x45\xfa\x7b\x2a\x63\x2f\xbd\xad\x49\x00\xf0\x25\x6c\x0e\x9a\xa6\x17\xfe\xdc\x48\xcf\x1c\xd4\x55\x86\x7e\x00\xb8\x2a\xca\x22\xba\x12\x6a\x18\x98\x0d\xaf\x22\x49\xd8\xc9\x83\xad\x9c\xcf\x69\xd5\x82\xaf\x74\x61\x18\x2e\xde\x14\x7c\xeb\x7a\x1a\x3b\x75\xe5\x81\x29\xaf\xae\xb3\xad\x92\xd7\x21\xc3\xd7\xcd\x58\xfd\x22\x53\x75\x0f\xb0\x24\x6d\x9e\xca\x81\x04\x6c\x79\x0c\xad\xb1\xc4\x67\x05\xb1\x99\x1c\xce\x4f\x22\x7f\x25\x0c\xd5\x25\x87\x67\xca\xdb\x8c\x18\xcc\x38\x52\xab\xe1\x05\x01\xdd\x07\xc5\x48\x78\x9a\xc7\x1e\xd6\x50\x6a\x4f\x96\x9b\x73\x59\x73\x83\x9e\xee\x5d\x41\x1e\xab\x40\xa9\x7b\x62\xfc\xb6\x3b\x3f\x36\x0f\x30\x19\x54\x4c\xc1\xf0\xdc\xe0\x3f\x0e\x64\x26\x72\x53\xd4\x4b\x48\xeb\x1b\x3a\x4c\xdf\xbe\xc2\xfa\xaf\x84\xac\xbc\x75\xa0\xe4\x17\xf7\x64\xeb\x19\x81\x76\xb1\x06\x61\xbc\x10\x6e\x0b\x66\x6b\x59\xc8\x77\x1d\xe7\x7a\xc8\xd0\x10\xb7\x80\xad\x81\x0d\x05\xad\xf5\xb0\x16\x34\x53\xa7\xbd\x68\xad\x37\xae\x3c\x54\x0e\x49\xc0\x9a\xe9\xf3\xc7\x73\xc8\x43\x5e\xb9\xb5\x85\x6f\x6e\x5a\xe8\x4f\xb0\x66\xe8\x54\xba\xa2\xdf\x56\x5c\x2e\xe8\xa4\xf8\xd7\xc6\xca\x88\x8f\xf1\x43\xb6\xd6\xbf\x7a\x16\xd0\x40\x42\xa8\xa6\xdf\x00\x7c\xfb\x31\x9d\xb5\xd5\xf9\x97\x8f\x24\xdb\x69\x69\x3e\xd1\xc7\x95\x89\x81\xfe\x0d\x59\x33\x7f\x5c\x65\x9b\x00\x04\x65\xf7\xca\x4a\x89\x84\xdb\x19\x4a\xdd\xbe\xcb\x78\x5d\x40\x7d\x8d\xa9\x9e\xaf\x79\xae\xfd\x0d\x87\x9d\x4b\x43\xe2\xc2\x45\x2a\xaf\x25\xec\x51\x73\x41\x9f\x1a\x18\x7c\x65\x85\x5e\x9b\x4d\xe5\xa9\xc3\xb4\xb7\xca\x0d\xc4\xd5\xf9\x79\x15\xbe\x36\x76\x84\x5c\x16\xb1\x5d\x2d\xf6\x22\x8d\x02\x6f\xd8\xdb\xbb\x7b\x0c\x45\x1a\x91\x8b\xc0\x45\x01\xb1\x08\xdf\x5e\x7a\x60\x60\xa7\xf5\xf8\x12\xd2\xa7\x31\x1d\xea\x76\x43\x59\x7f\x22\xcc\x11\x6d\xa0\xfe\x03\xd6\xea\x59\xfa\x3c\x42\x17\xc5\xfa\x6a\x6c\x56\x92\x90\x69\xf3\x48\xfa\xfd\x4d\x3f\x06\xa0\xac\x50\xe5\x90\xbb\xdf\x8a\x0d\x52\x67\xdf\xa8\x4a\x84\xbd\x5d\xf8\x7d\x17\x45\xb6\xf3\x14\x90\xc4\x14\x2a\x0c\xc7\x7f\x70\x58\xdb\x1f\xca\xdf\xc2\xeb\xfe\x5d\x08\x12\xf6\xa7\xeb\x8f\x1d\x0a\x9b\x3c\x83\xf5\xf4\x67\xdd\x92\x4a\x4a\xd6\x5f\x23\x99\xfd\x41\x6a\xcd\xdb\xb6\x7f\xd9\x1a\x81\x7c\x2d\xfe\xbc\x7d\xfe\x76\xab\x9c\x18\xc1\xf5\xad\xa7\x95\x4f\xe6\x1f\x48\xc2\xbe\x19\x5c\x5d\x9b\xbe\x67\xba\x8b\xdf\x8d\x02\xf2\xce\x87\x4b\xdc\xbe\x31\x88\xe1\xd3\xd1\x17\xc8\x76\x80\x53\x18\x98\xa6\x77\xf5\xc9\x6b\xe3\x84\x44\xd7\xa4\x28\x9b\x77\x9a\x86\xc4\xe5\x18\x95\xf0\xfe\x1e\xdd\x33\xf0\xcf\xaa\x19\xa6\xbd\xbc\x5e\xb5\x7a\xe5\xfa\x82\xb7\xe5\x95\x18\x3f\xfa\xda\x9b\xc2\x0f\xa8\xad\x83\x9f\xc0\xab\xfa\xc0\x8e\x80\x9d\x04\xed\xbf\xe8\xf4\x64\xd5\x47\xb2\x2b\x0d\xcf\xee\xc1\xf4\xe4\x40\x23\xcc\x68\xe7\xda\x2e\x49\xd0\xb4\x16\x8e\x83\x5e\xb5\x36\x3d\xb1\x2f\x24\x32\x72\xb1\xd9\x1d\x34\x5a\xc7\x3e\x61\xa7\xb6\x3e\x07\x5b\xb5\x42\xd5\x8f\x68\xd0\x3b\x9c\xc7\x93\xbf\x78\x27\x5e\xe5\x70\x94\x67\x11\xa5\x35\x08\xaa\x08\x54\xe4\x9c\x75\x1c\x62\x2d\x60\x95\x88\x66\x81\x5d\x5f\x80\xbc\x79\x01\x01\xde\x34\xce\x58\x56\x61\x19\x2f\x77\xb2\x3d\x65\x41\xbc\x56\x47\x63\xed\x6b\x2a\x4a\x1d\x2a\x59\x1f\x8f\xad\x2e\x2b\xb8\xfc\x08\xb0\xaf\x17\x77\x26\x60\x2f\x7a\xc8\xe2\xe2\x4e\xef\xb7\x6f\x66\x6a\xb9\xb8\x4e\x7a\xf5\x69\xf9\x6b\xe8\x56\xe6\x93\x90\xf7\x7a\x86\xf5\xe0\xf7\xe9\x05\xc0\xb5\x6e\xe1\x54\xe2\x67\x4a\xee\x3a\x09\xf6\xda\x48\xda\x85\xdd\x69\x6e\x56\xf7\xf6\x85\x10\xa0\x30\xd3\xdf\x1f\x81\xa8\xd6\x17\xd6\xbf\x24\x40\x2d\xa3\x15\x1c\x17\xdb\x68\xb1\xdf\xcd\xdc\xbf\x04\x8b\xf5\xed\xe4\x7f\xd1\xb9\x79\x65\xf5\x33\x61\x7c\x11\x23\x7b\x91\x67\x9b\x43\xae\x17\x4c\x59\x89\x39\x60\x61\x92\x70\x73\x1b\x34\x7a\x49\xab\xfc\x95\xbb\x8d\x09\x3c\x56\x4f\xb7\x94\xbd\x17\x44\x09\x7d\xea\x6a\xfb\x82\xd2\xbd\xf8\x69\xa6\xe5\x55\xc4\x2b\x35\x97\xfd\x4b\x34\x64\x4c\xb3\xfc\xd0\x66\x77\x12\x84\x79\xa5\x59\x9e\x4a\xa0\xc3\xba\x24\x99\xe8\x57\x2a\xe2\x9e\x83\x16\x8b\x7f\x5f\x00\xbd\xca\xd1\xf9\xeb\xba\x25\x44\xc2\xee\x0a\xd1\xfd\x65\xa7\xce\x26\xf0\x31\xb8\x35\xc3\x8d\xbe\x93\xf6\xfa\xeb\x6d\xe3\xdf\x12\x70\x05\x07\xc4\x5e\x6e\x6c\x2c\xe3\xb3\x57\xc9\x83\x1d\x72\x1d\xd3\xa2\xe5\x82\xeb\xf0\x0b\x6f\xc6\xb7\xac\xe9\x7d\x1c\xb5\x04\xf2\x35\xed\xea\x1e\xff\xaa\x1b\x6a\x3d\xf4\xeb\xb7\x68\xd6\x2e\x70\xeb\x3e\x5e\x92\xb2\xf2\xd1\xdb\x86\x71\x85\xdd\x80\xb0\xf9\xef\x42\xb8\x3d\x0b\x9e\x20\xf4\x9b\x67\x64\x9a\x65\xce\x9b\x39\xa0\x05\xa3\x2f\xf6\x85\x61\xc5\x1e\x88\x58\x7f\xf0\x7e\xad\xa9\x9b\x23\xe4\x06\x12\x73\x81\xb5\xb5\x06\x6b\x0f\x5c\x3b\x4f\x2c\x6d\x3a\x2f\xbf\xb9\x93\xf3\x8b\x7f\xed\x63\xca\xe6\x30\xdc\x1a\x83\x3e\x15\xb0\x43\xa9\x0d\xa0\x76\x85\x31\xfd\xda\x37\xe6\x5a\xb8\x40\xdf\xab\x3a\xb9\xfe\xba\xfc\x2c\x8a\xa4\xbf\x8e\xcb\xbf\x11\xd6\x11\xdb\x57\x7f\x27\x2f\x81\x03\x70\xf6\xc4\x79\x5c\xab\xd7\xce\xbc\x18\x60\xc8\x7a\xeb\x58\x87\x7c\xad\xbf\x60\x68\x37\x00\xea\x19\xc3\x96\x66\x3a\x89\x53\xd9\x71\x48\x67\x8b\x7a\xd5\xe5\xc9\xaf\xa0\xb8\x1e\xfd\x1e\x34\x81\x6b\xdd\xf3\xc7\x06\x80\x0c\xdc\x9f\xe9\xfe\x6d\xbc\xa5\x92\xae\xd5\xb9\x11\x6d\xbf\x88\x9e\x76\x02\xed\xc5\xbd\x55\x40\x06\xf5\xdb\xea\xc1\x6c\x2b\x0b\x0b\x55\x7b\xae\x75\x2e\x4d\x27\x1e\x48\xe9\x60\xd2\x06\x6b\xd7\xfa\x27\x76\xf0\xf2\xcb\x39\x9e\x16\x7d\x91\x76\x30\x4e\x05\xde\xd3\xcc\x42\xef\x1e\xc5\x6b\x7d\x04\x5e\xc9\xcc\xe4\x79\xca\x6d\x85\xc1\x73\xe7\x3a\xfd\x6a\xf7\x13\x2c\x9c\xb8\xa6\x2a\xe6\x28\x34\xe2\x49\x94\x9a\xad\x78\x00\x1d\x2d\x5c\xc5\x9b\xa3\x19\x52\x1c\x28\x9a\x18\x42\x02\xc3\x5b\x7c\x8a\x6a\xcb\x77\x0f\x06\x9a\x82\x67\x5b\x00\x6d\xc0\xa0\xe2\xb2\x8f\x02\x57\xc1\x6e\x6d\x93\xbe\xc4\xc5\x81\xb6\x12\xb8\xd2\x51\x24\xe4\x76\xfd\x94\xd7\xb1\xcf\xc4\x3d\xc6\x78\xad\xd1\x9e\x64\xc1\x03\x92\xbe\x9c\x8b\xab\xb7\xe4\x52\x02\xdc\x35\xbd\xdc\x7d\x74\xa9\x38\x92\xfb\x6d\x53\x9f\x3c\xe8\x7f\x01\xf5\xcb\x2f\x0f\x01\x9b\x8e\xb2\xcc\x1f\xe6\x40\x93\x9d\x2d\x33\xd6\x88\xf7\x74\x5b\xd3\x31\xd6\x68\x43\x97\xb4\x71\xcd\x4f\x50\x3e\xb8\x19\xeb\x8c\xce\xac\x55\xcf\x96\x8c\x09\x2b\x29\x98\x58\xe1\xbf\xa7\xdd\x74\x85\x99\x9f\xd7\x82\x01\x21\xc1\x64\x2f\xe5\xab\x9c\x8c\x13\x83\xb5\x33\x69\x18\xe3\x95\x4f\xcd\xb9\x3a\xe4\xe9\xf1\xe4\x30\x24\x34\xd3\x80\xbb\x2b\x11\xd0\x0b\x5f\x8a\x92\xfa\xad\x20\xdd\x0e\x00\x78\x4b\xa2\x90\x0b\x6c\x4e\x12\x24\x6a\x1b\x39\xd5\xe3\xc6\x1a\xf6\xd7\xb5\x2f\xb8\x77\xf1\x78\xb9\x45\xeb\xec\x02\x3b\x71\xc6\x6f\xeb\x71\x33\x08\xbd\x18\x22\x1d\x2c\x28\xc4\xdd\x0b\x0f\xba\xd7\x68\x5b\xa3\xa1\xfd\x00\xb8\x88\xae\xac\x17\xf3\x1a\x1b\xff\x1f\xa7\xcb\xa6\x2d\x96\xad\x91\x83\xcb\x1a\x88\x5f\xc5\x11\x9a\xf5\x48\xf3\x48\x28\x2d\x72\xdb\xb3\xd8\xc2\x9e\x05\xed\x7b\xf6\xbc\x3a\xef\xce\x3a\xf8\xb7\x70\xe6\x19\xc1\x36\x34\x79\x20\x15\x38\x32\xb6\x6b\x4f\x66\xae\xcb\x2a\x73\x98\x81\x13\x86\x87\xcd\xab\xea\x93\x60\xfd\x53\x8d\xb2\x13\xef\x08\xa8\xbf\x6e\xc3\x37\x42\x77\x87\xf7\xd5\x79\xc1\x2a\xe3\xed\xce\x3a\xb2\x48\x37\xcb\x59\x1f\xd7\xce\x61\x12\x47\x04\xc7\x21\x85\x57\xf4\xac\xdd\x7e\x11\x2b\x0c\xd7\x85\xe6\x25\x34\xc5\xaa\x3f\x10\x95\x19\xf7\x8a\x48\x72\x15\x52\x2e\x24\xb9\x0b\x50\x53\xdf\x9a\x9e\x51\xd1\x9e\x1e\x1a\xed\x8d\x9e\xb3\xc8\x6d\x76\x16\x88\xfd\xf0\x58\x27\x6a\xe4\x9f\x71\xe7\xa6\x6b\x1c\xdd\x8f\x4e\xca\xb0\x0e\x12\x03\x0d\x1d\x9f\x68\x33\x73\x18\xaa\x65\x00\x19\x7e\xf0\x5a\x4f\xd5\x69\x67\x9d\x5e\x1f\x08\x01\x4c\x8a\x3c\x47\x8b\x59\xd4\x49\x12\x88\x69\xd9\x91\xf0\x57\xfb\x02\xcf\x89\x38\x5f\x04\xa8\x70\x03\x5e\xbf\x55\x0d\x94\x9d\x1e\xe6\x74\x49\xdd\xfc\x74\x46\x42\x68\x16\x11\x35\x4c\x23\x1f\xfc\x3a\x03\xb5\x86\xcb\x7c\x0a\x22\xbc\x5d\x7e\xe3\xf3\x7a\xc5\xd6\xed\x2b\xd4\x0b\x5e\x1c\x63\x35\xb0\xf2\xae\x81\x4d\x14\xb4\xa9\x96\xfc\x04\x50\x0b\xf5\x78\xa3\xf4\x20\x44\x13\xa6\x4f\xda\x1d\x8d\x81\xf6\x33\xfd\x83\x56\xb8\xf7\x04\x60\x73\x3b\x9e\x84\x27\xc0\xaf\x55\xc1\xe2\xbe\x9d\x10\x11\xd2\x9e\x12\x02\x98\xce\x30\x13\xc4\xc5\x68\x0f\x9c\x7c\x73\x8a\xc1\x7a\x01\xf3\xe3\xb6\x5a\x25\xf0\xd6\x34\xc0\xc2\x84\x55\xda\xbb\xb0\x5c\x5c\x0d\x09\x6c\xda\x4f\x47\x1d\x1c\x7f\x6a\x4e\x0a\x51\x41\xcb\x36\xe0\xd0\xc9\x23\xf8\x79\xa5\x0e\x8c\xb0\x33\x13\xf8\x78\xab\x61\x71\xbc\x81\xcd\x18\x1a\x94\x18\xb0\x9c\x43\xe6\xca\x70\xef\xe3\xc5\xfc\x0c\x9b\x02\x2b\x00\xcb\xda\x4b\x3f\xea\x25\xc0\x1e\x13\x67\x5f\xbf\xbe\x9c\x80\x9d\xd7\xfe\x0b\xad\x19\xc5\x8a\x5a\x71\x61\x23\x0e\x2c\x61\x0f\x0f\xd5\xec\xfc\x60\xba\x06\xd0\xc2\xb1\x04\x21\xc3\x6d\x55\x72\x86\xe3\x0d\x17\x5c\xf8\x60\x9b\xca\x4b\x87\xa4\x2a\xc9\x19\x26\x66\xf2\x72\xfa\xca\xd6\x4c\x3b\xf6\x20\x0e\xe3\xe1\xa1\xd1\x45\x23\xe5\x70\x71\xd6\x7e\x2e\xde\xc2\xb5\x07\x5a\x3c\x68\x34\xea\x83\x1d\x3c\x06\xf8\xf4\x7a\x3c\x4a\x5f\x84\x33\x3a\x40\x07\xe0\xce\x03\x4d\x79\xb5\xa0\x0b\x07\x0e\x73\x6a\x5f\x62\xd5\xaa\xc7\xd5\xe1\xe9\x88\xb3\xe9\xa8\x1c\x1a\x44\x5e\x2b\x17\x64\x85\xdd\x3f\x07\x49\x14\xed\x24\xc4\x76\x88\x3f\x3c\x5d\xc2\x40\x9f\x04\xc2\xe2\x03\x14\x73\x90\x0d\x33\xc8\x1a\x93\xda\xed\x88\xed\x0a\x5e\x2a\x3e\x2f\x23\x32\x24\x74\x48\x56\xb6\x7e\x8b\x9c\x08\x18\xf9\x7e\xb7\x1f\x92\x52\x24\x82\x44\xfc\xdf\x42\xfc\x66\x5f\x99\xf3\x06\xbb\x9e\x20\x86\xc6\xc6\xaa\x5d\x03\x6f\xab\x59\x95\x80\x54\x3a\x1e\x40\x11\x9c\xdb\xab\xaa\xb0\x96\x4f\x62\xf1\x23\x5e\x5c\xd0\x88\x63\xc3\xf6\x56\x05\x5a\x14\x15\x46\xb5\x36\x45\xfb\x89\x47\x92\xb7\xa5\x0a\xba\x69\xf6\x94\x82\x59\x9c\x85\x5f\x25\xa0\x58\x17\xea\x86\x09\x9f\xde\x91\xe2\x8b\x70\x46\x16\x2e\xe2\x34\xdb\xf6\xd3\xa2\x4e\x15\x86\x34\xf6\x50\x33\x0f\xed\xe8\x46\x90\x86\xd6\x0f\x91\x53\x30\x6f\xc0\xd1\xe1\xfc\xb5\x41\x56\xeb\xea\x7b\x01\x53\x30\xc0\xa2\xc1\xe3\x05\x5d\x15\xc7\x18\xf3\xdc\xac\xbb\xeb\xb5\x83\x1b\x99\x19\x99\x66\x3f\xf3\xe1\x62\x31\xad\xa4\x57\x00\xa1\x63\xd1\xce\x76\x7a\x31\x1f\x4f\x67\x4e\x71\x7b\x92\xd1\x3f\x36\x4a\x56\x1e\x1e\x79\xcb\x75\x19\x72\x8c\xa9\x52\x6d\xcc\x84\x9f\xf6\x52\x5a\x13\xb0\xc6\x82\xb0\x83\x6c\xcc\x37\x77\x4b\x7b\xe3\x28\x27\xb1\x4c\x37\xe7\x60\xf9\x61\xd7\x77\x97\x05\x3e\x46\x8b\xf6\xfc\x40\x1e\x0b\x90\xe1\xb6\x3a\xcd\x6e\xab\x2f\x7d\x54\xc9\xfc\x1f\x08\x15\x50\x3a\xee\x51\x39\xe6\x71\x38\x2a\x8c\x86\xbd\x04\x1c\x35\x23\x34\x59\x3c\x56\x7d\xea\xed\xd1\x19\xef\x06\x28\x70\xc8\xdf\x02\x23\xcf\x18\x66\xe5\x01\xcc\x65\xae\xff\xbb\x78\xbc\xac\xa4\x40\x45\x0e\x64\x6b\xd5\x61\xf6\x45\x59\x10\x5e\xea\xec\xcd\xe2\x91\x44\x42\x35\x00\xc8\x87\xa6\x01\xe8\x85\xe9\xed\xb9\x63\x45\x7b\x37\x16\x6c\x6d\xdf\x63\x6d\x88\x9d\x63\x4d\xd7\xa1\x58\x1c\x00\x0b\x35\x17\x14\x64\x2d\x6b\x66\xe4\x15\x98\x6d\xcd\x7a\x57\x36\x38\x88\x50\xa0\x35\x07\x56\x75\x56\xaa\x8e\x19\x11\xa1\x02\x4a\x32\x9f\x98\xb9\x2a\x4c\xfd\xb0\x76\x97\xdc\xf3\xc7\x1c\x70\xd0\x63\xce\x57\xc4\x42\x03\x92\xfc\x1b\x7a\xe1\xc7\x1c\xe3\xe4\x18\x2f\x5e\x1b\x0c\xb9\x34\x2e\xfb\xd3\xda\x66\xe1\xdb\x82\xbe\x1e\x88\x81\x0e\x4d\x3b\x9e\x69\x8c\xc0\x7b\xe7\x23\xfb\xb4\x5e\xff\x7c\xda\x08\xef\x65\xfd\xe1\x21\x14\x6f\x18\x0b\x44\x6d\xaf\xe8\x4b\x06\xbc\xa7\xda\x2d\x6a\x8e\x3b\xff\xf5\x28\x70\xbb\xcd\x4c\x89\x40\x21\x40\xca\xe3\x45\xde\xb7\x63\x52\x04\xf0\x36\x1b\x0a\x3b\x39\x16\x59\x6f\x6c\x75\xa1\x62\xaf\x3d\x9b\xfc\x6d\x40\x80\xb0\x9b\xd3\x3b\xea\x9b\x29\x6a\xca\xe9\x66\xd3\x4f\x27\x69\xbf\x4d\xa6\xc7\xac\x92\x49\x6e\x9e\x5a\x6c\x83\x9b\x3e\x20\xcc\xc2\xf6\x6c\x12\x8a\xc7\xa4\x21\x00\x17\x96\xe5\x77\xc6\xbd\x66\x27\xad\x33\x44\xb6\x61\x7b\x18\xbe\xec\x2e\x71\xcb\x65\x8b\xce\xa5\xa7\xd9\xc4\x2d\x99\xa0\xb4\x42\x23\xa4\xe7\x61\x86\x68\x92\xd9\x85\x7b\xcd\x03\x85\xd5\x0e\xa4\xda\x71\xf7\x99\x78\x06\x80\x9c\x15\x8b\x73\x30\xb2\x44\xa6\xb5\xe2\xb8\x69\xab\x32\xe9\x7a\x26\xca\xf9\x94\xf9\x88\x0b\x96\xbe\xed\xe1\x26\x68\xf9\x9f\xe4\xd2\x88\x6b\xe6\x96\x72\xd4\x4e\x99\x0f\x08\xe3\x6f\xd5\x67\x38\xe0\x1b\xd7\x97\x32\xef\x1a\x1b\x39\x32\xe4\x6c\x90\x00\xc0\x47\x24\x47\x2b\x06\xc1\xf0\x59\x96\xea\x35\x84\xe7\x73\x6a\x15\x11\xdf\xd9\xbb\x85\x08\x82\x46\x8b\xd2\x38\x3c\x42\xfd\x68\x56\x44\x5e\x1d\xb9\xac\x0f\xf9\x3b\x8f\x1c\x07\xdd\xb6\x40\xa7\xde\xa1\x38\xf9\xc3\x23\x35\xa3\x1a\xca\x14\xfa\xa6\x63\x3d\x4a\x6e\x22\x40\x4c\x2b\xf4\x76\x71\xc4\xb0\x4e\x71\x25\x52\x40\x18\xcc\x9c\xe3\x92\x21\x8c\x4b\x9f\xc2\xfe\x6a\xc3\xbb\x66\x7b\x90\xde\x81\x7c\x87\x78\xd2\xa4\x31\x3f\xd0\x2e\xcb\xc2\xae\x31\xe2\x8e\xf3\xca\x1a\xd1\x8b\x76\xb3\x54\x2b\xf4\xb6\x3f\xce\xb3\x8b\x13\x10\x67\xb2\xa6\x27\xe5\x60\x1c\xfd\xb4\x8b\x61\x0b\xf6\x34\xb8\x63\xe4\x67\xf7\xf7\x43\x8b\x28\x26\x1d\x42\x6a\xd5\x88\x87\xaa\xac\x89\x51\x06\x07\xf4\x86\xb9\x39\x3c\x2d\xfa\x48\x27\x09\x51\x99\x61\x87\xca\xeb\xd5\x46\x22\x8f\xb4\x6b\xa3\x70\x59\xf9\x46\x3c\xfb\x7d\x24\x62\xde\xda\xd4\x55\x46\xc7\x1f\x67\xe1\xab\x9d\x2c\x4e\xa3\x88\x0c\xfd\xc8\x3d\x55\x69\xbc\x34\x70\x40\xcc\x0c\x09\x04\x03\x2d\x5d\xc0\xe2\x50\xc5\x92\x3a\xe5\xeb\x98\xa5\xf7\x01\xc4\x47\x36\x75\x76\x04\xc2\xe4\xc2\xf6\xba\x75\xc2\x0e\x45\x9f\x01\x32\x40\xf6\xb1\xa3\xa5\x71\x32\xf0\x11\x04\x00\xb7\x3a\x53\x7c\x75\xf3\xa0\xaf\x01\xc8\x52\x2e\x76\x04\xff\x59\x42\xbc\x60\x6e\x39\x06\xb6\x81\xa2\x8f\x25\x4a\xcc\x27\xc3\xa8\xf7\xe9\x0d\x5f\xa0\xa2\xf9\x8c\xef\x35\x40\xd1\xf6\x35\xe7\x17\x75\x77\xd0\xa3\x58\xb5\xc1\xf4\x47\x8a\xc0\x1e\xe3\xf4\x76\xa6\x5b\xa5\x6a\xf5\xee\x7d\x56\x35\x43\xec\xf9\xf1\x95\x4a\xb4\x72\xed\xcd\xd7\x68\x46\x53\x61\xdc\x4b\x27\xd9\x5b\x9c\x9a\xce\x61\x7a\x9f\x13\x47\xb7\xed\x19\x22\xe4\xf7\x25\x8a\x1e\x48\xd1\xd5\x07\xcd\xb2\x4a\xec\x39\x82\xa5\xc0\x2d\x13\xa5\xe4\xae\x4d\x40\xa3\x0f\x97\x46\xd7\xba\xd6\x7a\x8f\xa5\x08\x8d\xbe\x4b\x8a\x52\x3d\xa2\x48\x78\x6f\x46\xee\x9c\x02\x37\x01\x56\xdd\xf1\xb8\x69\x99\xe6\x6a\x41\x77\x94\xd8\xec\xcc\xc4\xa5\xc8\xb2\xbf\x09\xd1\x77\xd1\x5e\xf3\xd8\xe9\x07\x03\xcf\x0a\x73\xe9\x03\x80\x0c\x38\xd6\x6d\xcf\x73\xb3\x99\x94\x01\xa3\xdc\x58\xe4\xba\x8f\xe9\x43\x34\x7f\x57\xf0\x3a\xf7\x08\xd2\xd9\xf9\x41\xd1\xb6\x66\xe6\x1f\x8d\x14\x07\xe5\xe2\x94\x7d\x50\x9a\x17\xb2\x88\xb3\xdc\xa6\x80\x4e\x0b\x8a\x8c\x18\x0f\x9c\x9f\xb8\xc4\xd9\x3d\xd0\xb4\x20\xa5\xf8\x75\x33\xa9\xdd\x7c\xde\x19\x30\x7b\x06\x00\x20\xf0\xd4\xe3\x1d\x0a\x49\xe0\x74\x40\xac\x47\xf5\x56\xad\xab\x77\xe5\xb8\xb4\x0f\x7b\x69\xee\xf8\x58\x94\xb3\x37\xc5\x09\x44\xce\x48\x16\x64\x17\x1c\x1d\xc4\x35\xcb\xb8\x41\xd1\xf9\x5f\x5a\x62\x7e\x3e\x34\x1c\x6f\xcf\xd2\x1a\x44\xee\xc1\x9e\xac\x0f\x66\xb5\x2d\xd1\xe9\x66\x0e\x7a\x8c\xf3\xd0\xd7\x80\x93\xf7\x77\x87\x1b\xbc\x5c\x1f\x2c\x72\x70\x2e\x94\xae\x1a\x18\xbc\x2f\xe0\xf9\xa6\x8f\x62\x69\x67\xd9\x07\xfe\x97\x40\x6b\xce\xec\x5a\xd8\x85\xf3\xac\x5b\x38\x20\xb2\x2e\xdf\x9a\x16\x31\x68\xc8\xb8\xbe\xaf\x88\x82\x82\x77\x4a\x54\xbb\xf5\xfd\x64\x57\x7b\x4f\xaf\x39\x20\xe8\x5e\x41\xfa\xfa\xa9\x7d\x43\xf7\xe4\x9f\x80\x9d\x3d\x77\xb8\x96\x31\x04\xc1\x03\xb5\xda\x71\xf7\x78\x80\x0f\xc2\x79\xb2\xa7\x82\x17\xd0\x1e\xc0\x61\xaf\x5a\x6d\x62\xbc\x0c\xa2\xa9\xf9\x6e\x0a\x34\x0e\x5c\x35\x4f\xb0\x75\xd2\x61\x71\x03\x9d\x35\x40\xe1\xfa\x04\xb7\xda\x7b\x07\xce\xab\xbe\x64\xc9\xe1\x6b\xdf\x6b\x4d\xd4\xcf\xa5\x1e\x1d\x8d\xf9\xab\x71\xab\x8f\xdb\xe4\xa1\x5a\xa1\x68\x7c\xc4\xcc\x71\xa1\xef\xc0\x4c\xe4\x2b\x11\xe5\x0a\x89\x63\x4c\xaf\xf3\x5a\x38\xb6\x6c\xe6\xb7\x3f\xfe\x80\x5c\x3f\x84\xfc\xa5\x39\xd0\xcc\x32\x56\x85\xff\x6d\x17\x02\x24\x49\xb5\x16\x2c\xf8\x1a\xbc\xeb\x65\xd3\x86\x01\xf6\xae\x10\x8f\xcb\xec\x9e\x1f\x06\x00\xa1\xeb\x60\x97\xeb\xed\xbc\x5f\x87\x5d\x0f\xb8\xc2\x5e\xca\xd7\x00\xc7\x8a\xdb\x49\x38\x67\xd7\x21\x26\xb3\xb9\x62\x44\x6c\xd7\xec\xf3\x13\x57\x26\xbc\xb5\x7b\xf8\x88\xb7\x66\xc5\x09\x3b\x35\x5e\x75\xfd\xa8\xa5\x36\xc8\xcd\x3f\xe2\xa7\x71\xff\x00\x60\x82\x56\x7d\x7b\x30\xc4\xae\xbe\x77\x34\x94\xf8\xde\x35\x7c\xb5\x96\xeb\x62\xcb\x1d\x89\xf5\x41\xb5\x16\x84\x15\x4a\xa8\x6b\x98\x18\x37\x3a\xc3\xb6\x57\x25\xc0\xbe\xf6\xe2\xfb\x44\xbe\x38\xd9\x29\x54\xd8\x36\xaf\xce\x37\x7d\x09\xb6\x9b\x4e\x40\x02\x76\x18\xeb\xe7\xa5\xc9\x1f\x42\x54\x9b\xc3\xf8\xa3\x26\xda\xf0\xe4\xc0\x69\xb2\xc7\xa1\x5a\x4d\xb0\x9d\x61\x15\xed\x03\x09\x49\x08\x61\xd2\xe9\x91\x98\xbd\x7d\xec\x00\xaf\xfa\x19\x48\x6f\x6b\xdc\xf8\xdb\xe2\x28\xec\x8d\xd8\x4f\xb7\xa3\xb7\x43\x30\xf6\x21\x2b\x48\xd4\xe7\x26\x10\xf5\x41\xc1\x02\xbc\x92\xcd\xd1\xe2\xc9\xb0\x3e\xaa\x0e\x0b\x61\x06\x32\x1f\x76\x17\x49\xda\xad\x7c\x2f\x31\x8d\x75\x88\xaa\xad\x9e\xc9\xbf\x55\xd5\xed\x5b\x9d\xdf\x5a\x72\x7e\x2f\x2d\x4e\x7a\xf3\x3c\xa7\x3e\xac\xc8\xa8\x44\x41\x00\x07\xe4\x97\x08\xa4\x1d\x14\x61\x6f\x24\x3c\x81\xce\xf3\x92\x5d\x73\x7f\x8e\x67\x6b\xf7\xb1\xc9\x02\x16\x36\x52\x85\x37\xda\x2e\xb9\x17\x77\x1a\x45\x3e\x0d\xf3\x4d\x09\x0a\x62\x36\xcd\xa3\x09\x95\xa1\xb0\x38\xdf\x5b\x11\x4e\x36\x43\xd5\xc4\x06\x1c\x91\x8c\x1e\xd4\xc1\x66\x73\xeb\xd6\x31\x4a\x1e\x46\x00\x4c\x36\x1f\xe9\xb2\xe5\x06\x32\x2f\x9b\x16\x5b\x01\x98\xac\x34\x5d\x93\x2b\xc0\xd9\xae\xf7\x2d\xfb\x29\x24\x71\x11\xd9\x74\xf3\x2f\x19\xd7\x9a\x67\x0d\xc9\x99\x49\x85\xd5\xad\x84\xc3\x6a\x2b\xa3\x5a\xf3\xb4\xdc\xc8\x42\x54\x97\x9c\xf9\xe6\x96\xde\xf3\x9a\x19\x7b\xbc\xd9\x25\x0c\x7a\xf6\x7e\x23\xae\xcd\xe4\x4e\x00\xd7\xfe\x01\x06\x2d\x92\x80\x7d\x0e\xb4\xf6\x84\xd6\xb6\x6a\xc0\xfa\x05\x97\x0a\x02\xde\xb2\x90\xbb\x58\xc6\xf7\x44\x22\x6c\x5c\x11\x94\xd0\xbf\x8f\x9f\xdc\x4b\xc5\x23\xb5\xeb\x5f\x17\x3d\x57\x53\x8a\x57\x0f\xfa\x99\x95\xba\x81\xdb\x83\xcf\xfb\xc3\x41\x5d\xeb\x85\xf7\x13\x65\x34\xe0\x7e\x69\xb4\x74\xba\xcd\x5f\x2b\x7d\xfe\xed\x6a\xfb\x55\xb4\xc6\x16\x8b\x3b\x74\xb4\xb2\x4c\x7b\x6e\x9c\xe8\x07\x1b\xe3\xf8\xf1\x03\x72\x60\x78\x35\x9c\xcb\xbd\x7f\x70\xb9\x75\x5b\x1d\x21\xa6\x67\x39\xef\xc8\xd3\x21\x23\x7b\x2f\x75\xa8\xa1\xe8\xed\x54\xee\x35\xf4\x4f\x51\x42\x02\x26\x4d\xe9\x7b\xb9\xbf\x98\x9f\xc1\x0d\xd1\x03\x6c\x1d\x7a\x9b\x6d\x89\xfb\x88\x42\x84\x35\x6e\xf7\x20\xc4\xd6\x88\x48\x01\xf4\xa6\xb9\xbe\x3b\x25\x82\xa1\xd0\xed\x95\x80\x35\x2b\x4c\x4c\xdd\xdc\xb2\x64\x56\x90\x61\x47\x87\x88\x5b\x7b\x3c\x54\x0c\xe2\xb6\x49\x43\x7f\x67\xed\x27\x6f\x51\x49\x78\x4b\x89\x1f\x64\x78\xeb\xb4\xbd\xf4\x7c\x6b\x7a\x73\xf9\xed\x73\xc8\x8f\x5a\xfa\xd7\x7b\x47\x68\x4b\x72\xab\x81\x03\x1e\xd1\xa4\x00\x81\x6b\xe6\x21\x6d\x15\x87\x7b\xfb\xd4\x91\xa8\x69\xa6\x1d\x7d\x09\x77\xfb\x59\x17\x95\x4f\x86\x47\xe8\x72\x43\xfb\xd0\x00\xec\xf6\x04\x6d\xe8\x48\xf5\x51\xf8\x78\xe4\x51\x64\x8d\x80\x94\x94\x58\xb9\x7d\xe2\xec\x2d\x81\x02\x27\xd7\xd4\xb7\x37\xad\xb9\xa1\xb9\x15\x32\xe3\x46\x0b\x85\xdc\xec\xae\xcd\x49\x27\x14\x81\x8d\x66\x09\xcd\x6e\x86\x82\x2f\x37\x6f\x76\xd1\xc7\x0e\x12\x38\x5e\x13\xd5\x6d\x56\x42\x55\x39\x69\xd7\xf6\xaa\x07\xfd\x3d\x31\xf2\x42\x2c\xf4\x31\x47\xe3\xc3\xea\xcf\xa8\x22\xac\x45\xc4\x27\x1a\x06\x5b\x1a\xf5\x4d\xb4\x75\x44\x03\x3b\x47\x1c\xfa\x9f\x6b\x14\xbe\x82\x21\xce\x87\x1e\xf6\x5e\x14\xb2\xa1\x02\x37\xf1\x0b\x4f\xce\xfb\xfe\x01\xc9\xdb\xb1\x15\x04\x19\x66\xc8\xb2\xe6\x7b\x46\x5b\x0b\x19\x40\x3c\x42\x7b\x16\x4e\xe5\xe1\x99\x69\xaf\x69\xbe\x1a\x7d\x7c\x8e\x4d\x3b\x5a\xbc\xd9\x0f\x85\xdc\x79\xb2\x61\x18\x9c\xf8\xbe\x2e\x31\xc6\x1b\x73\x57\xb4\x59\xe5\x77\x55\x07\x35\xa4\x88\xee\xa8\xf0\x99\xf9\xb4\x47\xd5\xaa\x0f\xb8\x7f\xfe\x00\x22\xe0\x96\x3f\x7a\x41\x69\xfe\x3f\x33\xfc\xee\xd6\xdd\xbb\xbd\x15\x51\xc3\x2c\x7f\x0f\x59\x99\x3b\x35\x3c\x20\x95\xdc\xe9\xe5\x76\xda\x12\x4f\x4e\x3a\x38\x4c\x60\x72\x60\x57\x86\x5c\x4b\x98\xdd\x0f\x7e\x55\xb0\xab\x23\xe9\xfc\x3c\x16\xde\x79\xbb\xac\x1a\xe6\xab\x4f\xa4\x79\xee\x0f\xf6\xf3\x91\x54\x37\xf0\x5d\xaf\x0f\x9d\xfa\x83\xdf\xd8\xda\x61\x0c\x63\x67\xb8\x76\x2b\xf0\xcc\x3a\xe6\xfb\xdb\xd4\x7b\xc7\xf5\xdb\xbe\xab\x3b\xb7\xf2\xbd\xe7\x32\x37\x40\x79\x5e\xbd\x1a\x5b\xdf\x50\x7e\x71\x23\xb5\xff\xb1\xe6\x7a\x49\xa9\xd0\x0c\x4f\x69\xb7\xe9\x1e\x5d\x15\xb3\x95\x38\xbe\x5e\x3b\x18\xd6\xc5\xcd\xa5\x3b\x9e\x97\x66\xcf\x68\x38\xe6\xe3\xb6\x5b\x40\xef\xda\xf2\xf9\xd6\x0b\xda\xe6\x30\x4e\xff\xea\x7d\x52\x72\xd7\xea\xa3\x75\xde\x6a\x74\xac\xb9\xea\x09\x20\x89\xad\x8a\x52\xab\xd4\x4a\x8f\x96\xb1\xbd\x2a\x4d\x05\x53\xef\x08\xac\x75\xaa\xa4\x7d\xd7\x8e\x3d\x21\x6b\x9c\xbf\x78\x8b\xcd\x22\x70\x63\x65\x48\xd9\x5a\x3a\x4d\x34\x78\xbe\xe6\x57\x24\xcb\x99\x49\x4d\xc8\xb0\x13\xb5\x48\x3f\x8b\x6d\xc1\x3e\xc9\xad\xbb\xe6\xdf\x36\xfb\x8a\x59\xd6\x78\x40\x6c\xb9\x9f\x95\x0f\x75\xbf\xea\xee\x0f\xfc\x2a\x14\x9d\x35\x1d\x1c\x01\xcf\x8f\xbd\xa3\xd2\xf9\x40\xac\x72\x9e\xae\xd6\x5b\x23\x20\xc2\x7d\xce\xe9\x63\xdc\xc8\x66\x70\x7f\x48\x08\x3b\x42\x98\x62\x0d\x45\xef\x35\x11\xe6\xb9\x92\x74\xb4\xa6\x87\xbb\xcd\x97\xb7\xf5\x47\xf7\x81\x35\xf2\xf2\x9e\x95\x5b\xb2\xbc\x9d\x93\xfa\xe5\x28\xf4\x32\xdf\x30\xf4\x00\x8c\x8b\x61\x7e\x3a\x52\x81\xb8\x74\x61\x91\xaf\x53\xdf\x1e\x5a\x6e\x1c\x02\x68\x5f\xc7\x33\x68\xe9\x7b\x90\xb5\xcc\x24\x99\x75\xb9\xe6\xd7\x3d\x73\xb6\x56\x40\x3a\x02\x6b\x6c\xf5\x42\x0d\x4e\xf3\xf5\x93\x3d\x93\x74\x89\x81\xd3\xe5\x1a\xe0\xf6\x1a\x63\x59\x87\xac\xab\xb0\x7b\x47\x76\x61\x0d\xb9\x20\x8b\x3b\xcb\xc0\x47\x72\xa3\xbd\x8b\x5a\x6e\x06\xc7\x7e\x96\xea\xd1\xd7\xda\x06\x11\xeb\x7c\x02\x85\x58\x77\x0b\x83\xa3\x34\xd8\x81\x72\xca\x7a\xac\x63\xa6\xce\xbe\x42\x5f\x99\x3b\xd8\xef\x7c\xbc\xa5\x2a\xd2\xc8\xc7\x61\xde\x61\xc4\xea\x78\x03\x56\xd6\x72\x28\xec\x49\x8c\xf5\xb5\xa2\xbd\xfd\x97\x55\x18\x29\xbb\x03\x93\x2d\x09\x8c\xe4\x4c\x14\x46\x35\x33\xed\x9a\xea\xd8\x1f\xfe\x91\x04\x9e\x5d\x79\x35\xa0\xb3\x5f\xbc\x0a\x3d\x2f\x94\x8f\x45\xea\x28\xca\xf9\xf7\x9b\x88\x7e\x2f\xcc\xc7\x27\xaf\xbd\x5d\x19\x7b\xce\x4e\xc7\xb6\x2e\x40\x5b\x7c\xf6\xab\x38\xef\xe7\x22\xbf\x62\xf9\xbf\xc8\x0a\x8c\xe5\xef\x11\xa9\x62\x85\x0f\x67\x8b\xe8\xee\x45\x5e\x09\x10\xde\x35\x82\xee\x0e\xa9\x67\xd9\x4f\xd9\xcb\x7d\xa0\xeb\xf0\x42\x2a\x25\xd7\x97\x93\x6b\x29\x33\xc4\xb8\x2a\xe5\x2c\x1c\xfb\xde\x44\x56\x96\x32\x5e\xc8\x69\xaa\xb4\xf7\xfb\x8c\x00\x0b\xe3\xa5\xb1\x2e\xc3\x25\xa1\x4f\xd8\xae\xaf\x4b\x7b\xe7\x28\xff\x82\xf1\x1c\x6d\x7c\x48\xdc\xac\x53\xde\x61\xa1\x39\xeb\x5d\x23\x16\x26\x7b\xe0\x98\xd1\x63\x5f\xd2\x41\x11\xab\x0b\x50\x5d\xa4\xb7\xef\xfd\xa5\x24\x1e\x6b\xfa\x5f\xfe\xa6\xc2\x88\x39\x0a\x6c\x35\x8b\x41\x99\x0b\xc2\x65\xed\xa5\xd5\xef\xcb\x42\x7e\x3b\xc6\x64\x38\x67\x6e\x3c\xdb\x25\x8f\x89\xae\x1e\x58\x7b\x92\x31\xbb\xc3\xd7\xe3\x18\x16\x75\x2e\xac\x65\x73\x31\x1a\x0a\x5c\xd9\x5f\xfd\x16\xc0\xd9\xc5\x19\xd2\xf0\x71\x1d\x6a\x2d\x2f\xfd\x8a\x98\x21\xbe\xd0\x3d\x12\x5b\xa7\x37\xbd\xc9\x4b\x7a\x6f\x6c\x5c\x9a\xc5\xe0\xa5\x25\x5d\x9a\xb6\x78\xda\x25\xb5\x20\x75\x0c\x36\xbd\xc3\xd0\x9c\xbb\x6b\xb7\x24\x75\x30\xc9\x82\xe7\x1e\xa0\x15\xbd\x04\x24\x7d\x77\x61\x4f\x99\x59\xcd\x55\x04\x8d\xea\x09\x43\x93\x32\xc8\xf2\x30\xd5\x0c\x5b\x3c\xa6\x6d\x0f\xd3\xbe\xfa\xf4\x52\x7a\x81\x53\x57\x7f\x89\xc3\x58\xfb\x00\xae\xaf\x4c\xbc\x5c\xd2\x22\xce\xfa\xec\x9f\xc8\x78\xbc\x31\x50\xb0\x24\xb1\xea\x67\x40\xa9\xfb\x49\x6b\xce\x1e\x79\xa9\xdd\xcc\xa9\x68\x47\x4f\xeb\x0f\x6a\x89\x9f\x1c\x51\x60\x0b\x94\x1f\xf4\xea\xdb\x64\x11\x69\x1f\xc7\xc4\x1b\x09\x98\xca\x4c\xe2\x92\x00\xf1\x3a\xc9\xd1\x29\xcf\x4b\xca\x33\x6a\x29\x5e\xb1\xa1\x67\x85\x17\x6a\x3e\x80\xc5\x5f\x9c\x3e\xd2\xee\xe2\x53\x9a\x9d\xf8\x00\x26\x4f\xc0\x58\xbe\x39\xfa\xbe\x7c\xe3\x5b\xc2\x4b\xeb\xcd\x3b\x5f\x1a\x1c\xef\x5d\xc8\x73\xf8\xc1\x56\x37\x14\x65\x39\x5f\x8b\xc0\xee\xae\x3a\x13\x20\xf1\x10\xc8\x98\x2f\x0a\x77\xcf\x8a\x30\x17\x28\x5e\x2c\x70\xa4\xfb\x53\x6e\x64\x8e\xc1\x7b\x33\x14\x75\x3e\x57\x0e\x4c\xcc\x80\x45\xf8\x94\xa4\x33\x9b\x72\x68\x67\x6e\xb0\xc5\x19\x48\xa0\xf0\x21\xcf\x55\x6d\xfa\xb9\x16\xad\x88\x71\x60\x0e\xef\xb3\x6d\xa7\xd6\xd4\x6c\x2d\x2f\x2e\xa8\x64\x6d\x37\x76\x5a\x9d\x42\x71\xe6\x9a\xd8\xdd\x77\x19\x1a\x1e\xa3\xb5\xf7\x2f\x49\xc9\x3b\x68\xfe\xe2\xef\xeb\x93\xe3\x19\x4d\x4c\xdd\xf1\xf2\x52\x79\xd4\x8c\xd1\x61\x9d\x5f\xc2\xcc\x07\x8a\x1d\xba\x03\x49\xa3\xad\x28\x34\xdd\x52\x14\x3c\x47\xf6\x8b\xbc\xf9\x90\xa0\x71\xbe\x8c\xb0\xf7\x09\x2e\x35\xce\xfc\x74\xb9\x59\xb3\x93\x2e\x4d\x7b\x41\x30\x0a\xca\xb7\x6a\xce\x91\x40\x0d\x8b\xdc\xf3\xe0\xe8\xcd\xd9\x23\x84\xf9\xbc\x5b\x47\xec\xef\xab\x41\xe6\xf9\x3d\x9a\x51\xab\x04\x94\x9f\x76\x03\xdf\xbb\xf6\x19\x0c\x40\x51\x8f\x30\xfc\xf3\x39\x5b\xa7\x92\x18\x96\x39\xbd\xde\xf1\xca\x9b\x1d\xae\x40\x33\x3d\xcf\x4d\x05\xeb\x10\x88\x1b\x7f\x59\x67\xdb\xbb\x6a\x33\x2a\x12\xfd\x7c\xee\x8d\x42\x1f\x4d\x12\x8f\xd0\xa6\xa1\xcf\xde\x6c\x8d\x36\xea\x18\x24\x17\x2c\xd5\xba\xb1\xbf\x67\x8e\x21\xac\x99\xe9\x0c\x33\x34\xfc\x7d\x0a\xb4\xd8\x65\x4d\x2d\xd2\xf2\xfb\x69\x7f\xdb\xe7\xdf\xcf\xde\xd1\xf5\x7e\x60\xdf\x9e\x77\xcf\x37\xe1\xdb\x5f\xde\xe7\x2b\xd1\xb9\xfb\xbc\xfe\x20\xff\xe1\x56\x38\xb7\x9e\xfb\xd5\xff\xbd\x35\x98\x6c\x4d\x17\x86\x4c\xd3\x7e\xda\x42\xe4\xd7\x5b\x7b\xd4\x19\xf5\x15\x55\x3c\x81\xef\xf5\x7a\xa4\x7f\x6e\x58\x9d\x27\xcc\xba\x75\x45\x9c\xfa\xbe\xd6\x7f\xd1\x6b\x07\xdd\x3e\xd9\x06\xb4\x83\xb5\x48\x82\xde\x19\xf7\x7e\x00\xb3\x47\x4e\x58\x5d\x29\x60\x83\x7d\x63\xc5\x98\xb7\x35\x39\x63\x05\x81\x93\x74\xfa\x96\xd8\xf8\x04\x9c\xba\x0c\xb2\xe7\x3b\xe0\x12\xb4\xfa\xd4\xf4\xad\x41\xa0\x67\xcf\x9e\x58\x7a\x52\xeb\x27\x5d\x6d\xeb\xe9\x78\x52\x15\xcd\x25\xda\xcf\xcf\xfc\x2b\x26\xfd\xaf\xa2\xeb\x9f\x79\x7d\xf3\x7e\x3e\xf3\xa8\x17\x11\x01\x0f\x87\xba\xa0\x4f\x08\x01\x69\xdd\x44\x72\x3f\x38\x10\x4f\xa2\xe8\xd1\x3d\x1d\x2e\x9a\xb1\xe7\x04\xe1\x4d\x3f\x5e\x5c\x73\x62\xf1\xad\x3e\x9f\xf5\xeb\xe8\x0d\xe5\x9f\xeb\xb7\xf8\x64\x47\x2b\xa5\x6f\xdd\xc8\x9f\xeb\xe7\xed\x18\x4f\xb7\x07\xc7\xfa\xfe\xd4\x48\x37\xfa\xa9\x52\x27\xf9\x71\xbd\x75\x36\x3f\x7f\xc4\x4a\xf9\xa9\x6a\x9c\xff\xc0\x6d\xa8\xc5\xa6\x35\x3e\xd8\x3f\xd6\x26\xf3\xad\xd9\xf7\x8d\xd5\xf9\x8f\x23\x55\x65\x71\x19\x75\x18\x6c\xea\xfd\xb3\x9f\xbc\xfe\xf9\x03\xf7\x83\x54\x06\xb1\xef\xf3\xca\xed\x58\x9d\xad\xd7\xf6\x87\x5a\xf6\x3f\x79\xc0\x67\x9d\x30\x7d\x0c\x20\x69\x7c\xfe\x27\xbd\x85\xe7\x8f\x34\xe1\x9f\xb4\x2f\x59\xa0\x7c\x06\x27\xfe\xa4\x51\xa0\x7d\xeb\xf1\xda\x4b\xe4\x0f\xd5\x8f\xc0\xbf\x98\xba\x02\x29\x0f\x2e\x45\x35\xf9\xe3\x99\xf3\x3f\xae\x33\x02\x67\xab\x53\xea\xaf\xc5\x79\xee\xe0\xa2\x07\xc1\x7d\x87\x04\xf0\x2e\xde\xbd\x28\x40\xe0\x9b\xbf\x55\xe6\xc8\xa8\x78\xe7\xfb\x7f\xd4\x6b\x51\xfc\x18\xa5\x03\xa5\xdf\xaa\xf2\xb1\x09\x88\x59\x33\xf2\xd7\x63\x53\x30\x98\x54\x3e\xf0\xfb\x57\x97\x6e\x9a\x3f\x65\x3f\x1b\x7d\xdf\xf5\x8c\xc3\x1d\xed\x64\x7f\x4e\x5b\xe6\x85\xc7\xc7\x51\x28\xc3\xcd\xe3\xd6\x13\x25\xd9\x3f\xb6\x9c\x15\x27\x4a\x6c\x7f\x59\xdb\x61\x5a\x11\x41\x0b\x7b\x14\x7a\x7b\x86\xee\xed\x01\xab\x32\xc6\x82\x63\x2c\x05\xd2\x4c\xac\x7f\x9c\xef\x66\xed\xd7\xf2\xd1\xfe\x76\xc0\xff\x18\xe0\xf8\x0f\xc0\xbf\xce\xc6\xea\x4e\xb6\xc1\x59\xe2\x31\xff\x38\x0b\x45\xe7\xa6\xbb\xc2\xfc\x17\xce\x1a\x35\x91\xcf\xc9\x1b\x84\xd2\x96\xf7\xd4\x8a\xb6\xef\x9b\x16\x07\xac\xff\xd1\x78\xfd\x21\xa2\x05\xbe\x7f\xeb\xb3\xa9\xc0\x23\xf1\xa1\xdb\x9b\xef\x6f\x7d\x75\xaf\xe4\xcd\x4e\x92\x37\x20\xee\x53\x0a\x68\x56\x84\xce\x95\x80\xf4\xf0\xea\xdd\x17\x67\x40\xe3\xe4\xad\xfb\x0f\x0c\x62\x58\x76\x3e\x6d\x75\x80\xf8\x3e\xe4\x0c\xd8\x15\xa1\x49\xa1\xd4\x7b\x49\x8c\xc0\x0f\x0d\xf3\x3f\xfc\xe7\x62\x13\xbb\xd6\x08\xff\xd7\xde\xce\xd9\x11\x52\x94\xfc\x6b\x3f\xdb\x33\xd2\x82\x8e\x34\xc7\xb5\xfc\x75\x84\x7d\x9d\xfc\x3d\xd0\x8e\x7b\xc2\xfb\xbf\xa4\x1d\x10\x17\xad\x2f\x7e\x08\x33\xd7\x49\xe4\xd2\xc7\x0d\x4f\x83\xed\x20\xf5\xba\x2e\xcb\x86\x20\x12\x6e\x62\xf1\x71\xc2\x36\x52\xec\xe5\x6d\x6f\xb9\xed\x28\x97\xa3\xfd\x54\x6e\xd1\x80\xcf\x21\x40\x2b\xd6\x23\xe4\x10\x16\x0c\x34\x49\x16\x1c\x52\x03\x42\x74\x22\x57\x2a\xf2\xed\xa0\x2c\x20\x9d\x02\x64\xfa\xb5\x96\x02\x8a\x8a\x6e\x81\xd9\x15\x2d\xd4\x5d\xfa\xb9\xc3\x78\x7b\x98\x88\xc2\x76\x49\x8e\xd9\xdb\x9d\xf1\x16\xce\xde\xde\xa0\x59\x3c\x1c\xce\xbe\x40\xee\x72\x80\x64\x81\x3f\x3d\xbd\x88\xe9\x1e\x77\xe7\xdb\x90\x14\xc1\x25\x85\x02\xab\xcd\x56\xcd\xa7\xfb\x0c\x2a\x04\xd0\x2f\xe6\x55\x99\xae\x95\x44\x7d\x6b\x81\xa6\x90\xcb\x84\x2e\x81\x20\x26\x66\xaa\x43\x0f\x91\x82\x30\x3c\x41\x5c\xeb\x5b\x27\x31\x06\x8f\x27\x29\xc3\x79\xce\x05\xd1\xe9\x68\x02\xf3\xd3\x37\x9d\x0b\x45\x0a\x3c\xfe\xd3\x26\xb3\xe0\xfe\x27\x89\xfe\x27\x07\x51\xa7\xaa\x37\xc9\x8c\x50\x02\xb0\xfa\x07\xac\xa5\x2f\x4a\x08\xc4\x4b\x0a\x1b\x14\x83\x30\x19\xd6\x08\x01\x98\x5b\xe0\x70\xaa\x4d\x08\x40\xf9\xaf\x53\x2d\xf1\x06\xbb\x8a\xc0\x4c\xb6\xd6\x54\xa3\x29\x3d\x49\xd9\x0d\xd2\x01\x45\x12\x03\xc2\x55\x4c\xcb\x22\xa1\x86\x22\x70\xcf\x54\x10\x84\x2f\x23\x24\x04\xd0\x8b\x61\xe3\x12\xb6\xcb\x43\x14\x72\xeb\xe0\xc2\x70\x3a\x8a\x56\x6e\xd5\x3c\xb4\x06\xd4\x84\x00\xda\xe0\x60\xa4\xdb\x94\xdf\x83\xc7\x24\x4c\xd6\xda\x26\xf5\xdf\x7a\x84\x0a\x4a\xa4\xcc\x00\x1f\xe2\x09\xae\xbb\x6d\xc2\x10\xb5\x3f\xac\x56\x76\x42\xbd\xb5\x76\xad\x2d\x4d\x90\xff\x12\x03\x7f\x32\x69\x31\xec\x6a\xca\xf3\xf6\xd1\x03\xb6\xe2\x72\xcc\x59\xbf\xcc\x13\xa2\x35\xbe\x3e\xb4\x08\xdc\xd6\xcf\x31\x1d\x06\xee\x51\x9d\x30\x57\x17\xe8\x24\x72\x6f\x85\x4d\xc7\x99\x42\xde\x01\x81\x9a\x27\x9b\xe2\x6e\xe7\x58\x72\x50\x64\x9c\x64\x08\x74\xab\xd2\x2b\x06\x55\xa6\xf4\xeb\xcd\x54\x9b\x46\x04\xd5\x94\x24\x1f\x48\x89\x01\xee\x2a\x1d\x9c\xb3\xff\x66\x31\x19\x5d\x61\x80\xd3\x81\xda\x41\x93\x34\x36\xd2\xaa\x4d\xae\x47\xa5\x28\xc0\x3a\xae\xec\xa0\x7a\x54\xdf\x19\xbb\xc6\xad\x6d\xb3\xd9\x60\x98\x62\x9c\x25\x9a\x7e\x13\xc8\xb1\xa7\xcc\x19\x72\x61\xd0\xa3\x5c\x55\xac\x5c\xf4\x6e\x9d\xb8\xec\xa3\x4f\x27\xc9\x0b\x50\xe9\x02\x39\x2a\x83\x7b\x08\x70\x81\xc7\xb7\x1c\x9e\xe3\xa5\xb3\x19\x2f\x6f\x7a\x8f\xe8\x82\xb2\x3c\x7b\x0b\x7c\xe4\xe0\xf0\x88\x18\x92\x99\x81\xe8\xd6\xb3\xfa\xa5\x34\x81\x3d\x2f\xe8\x9d\xaf\x2c\xec\x71\x36\x66\x5b\x57\xc4\x9a\x8d\xa4\xda\xef\x17\xd2\x5f\x76\x99\xcc\x82\x87\x85\x70\xa2\x10\x13\x88\x0c\xf8\xd1\x75\xc3\xbf\x24\x41\xa0\x95\x2a\xbb\x45\x23\xf9\xa4\x63\x0c\x29\x9b\xc1\x10\x49\x33\xec\x30\x86\xa2\x6d\x95\x47\x50\xfb\x19\x32\x27\x73\x98\xab\x76\x9d\xc9\x2e\x30\xc3\x1a\x52\x6f\x5a\xa9\x89\x20\xa4\x5f\x47\x37\x71\x2d\xbb\x1b\x4f\xcf\x26\x18\x11\xb4\xc3\x65\x15\x08\x3c\xba\xec\xac\xcc\x2b\x14\x16\x5c\x99\x89\x07\x52\x01\x81\xa2\x31\x86\x66\xc1\x43\x47\x56\xe7\x61\x61\x48\x03\xcc\xbc\xee\x95\xdb\x29\xc4\xd2\x8d\x4d\xe5\xc0\x61\x9f\xef\x91\x21\xdd\x2a\xe6\x56\x4c\xdf\x97\xba\xc5\xe3\xac\x1b\x05\x72\x7c\x34\x5f\xa0\x55\x30\x4a\x04\x72\x04\x9c\x4f\xe8\x77\xb3\x17\x7f\x72\xcc\xe8\x45\x7b\x1b\x5d\xc1\xc8\xcf\x01\x4d\x6a\xd6\x5a\x66\x32\x88\x60\x2c\x4a\xd5\x1d\x43\xeb\x7a\x54\x23\x7b\x2c\xe0\xd8\x71\x54\x75\x9c\xa0\xbc\x49\x39\x87\xc9\xe6\x79\x5d\x36\xe2\x91\xe6\xaf\x8c\xab\x1a\x73\x3c\x0e\x7a\xae\xc7\x08\xc1\x71\x1b\xec\x19\x8d\xdc\x8c\x08\x3e\x9e\x65\x45\x20\xc2\x98\x5d\x5e\x07\xa8\x3f\x7e\x34\x47\x04\xf1\x15\xaa\x21\x54\xc5\x5f\x24\x7f\x0e\xf1\xdd\xe2\xd4\xeb\x31\x20\x25\xcf\xf8\xc5\x75\xe4\xa8\x55\x81\xf4\x9a\x7b\xa4\x4b\xc5\x2c\x7b\x40\xe3\xde\xcd\xb9\x63\xff\x60\x4c\x7f\xa8\x74\xad\xbd\xe2\x27\x9a\xde\x92\xbc\x48\xed\x21\x48\x72\x7a\x87\xb0\x02\x4d\x46\x8d\x43\xcc\x4c\x57\xdc\x15\x16\x69\x24\xcd\x90\x3f\xd7\xbe\x1e\x1e\xde\x35\x2a\x51\x0d\xd3\xb9\x19\xac\x3e\x46\xa5\xac\x8d\x2d\x92\x1e\xde\x84\xd9\x9d\x94\x66\x39\x6a\x87\x15\xda\x98\x34\x4a\x34\x26\xd6\xab\x23\xe3\x37\x1e\x97\xeb\x93\xd2\xd5\xfe\x60\x18\xd1\xe3\xda\x83\xba\xfa\x68\xa0\x38\x58\xd2\x2a\xb8\xd6\xe7\x5b\x3d\x35\xb3\x07\xc2\x81\xf4\xcb\x90\x3e\x59\xea\x0f\xab\x41\x76\x6f\x94\x3e\xec\xc2\x2c\xf5\x74\x3f\xf2\xe3\xd2\xcd\x40\x94\x22\x0f\xfb\xd1\xf4\x49\x30\x66\xe2\x8e\x76\x1f\x3c\xf1\x7a\xec\x81\x70\x5f\x1e\xc4\xfe\xd1\x98\x79\xec\xd6\xaf\xb7\x7f\x34\x2b\x35\x20\x10\x87\x10\x12\x0f\x15\xae\x34\x6f\x2b\x9b\x3d\x6a\x03\x88\xed\xd7\xea\x25\x22\x86\xd8\xa6\x96\x65\xf7\x2a\xa2\x8b\x50\x92\xc2\x03\x06\xf5\x86\x50\x5c\x84\xbc\xc4\x5c\x93\x46\x8b\xbd\x94\x42\xc2\x0a\x7a\x13\x22\xbf\x3c\x20\xf4\x94\xfe\x84\xf2\xc7\x61\xef\x1f\x61\xa6\x90\xa0\x78\xda\xa5\x96\x40\xc2\xee\x5b\x90\x2e\x01\x0a\xd6\x22\xd0\x21\x25\xc1\xa5\x1e\xb5\x25\xf3\xd3\xbb\xa2\xa5\xeb\xbe\xde\xe3\x86\xad\xc4\xa3\x91\x4a\xc5\x5d\x6a\xce\x1e\x2f\xac\xb1\x85\x31\xe2\x03\x21\x54\x71\x4e\x3e\xb0\xfc\x25\xb5\x0a\xbf\x00\xd0\xaa\x50\x9a\xed\xe3\x6e\xf0\x50\xb7\xa2\xd9\xcf\xb0\x12\xf5\x22\xa4\xac\xf2\xf0\x6c\x98\x30\x05\x0c\x7c\xcc\x1f\xe3\x98\x2e\x76\x21\x54\x9a\xd9\xbe\x47\xe4\x54\xb9\xc2\x03\x67\xe6\xc2\x70\x77\x2d\x93\x87\x90\xc0\x81\x8d\xd6\xca\xc5\x1f\xd2\x5b\x5b\xf4\x76\x8e\x4d\x5e\x2d\xd1\xe0\x31\xb7\x34\x0a\x17\xb8\xe8\x92\x36\x90\x9a\xbc\x46\xc3\x94\x3e\x66\xbd\x5b\x8f\x72\xa8\x83\x02\x58\x8a\xce\xa2\xec\xf1\xa3\xc7\x24\x3b\x68\x02\x76\x3b\x3e\xb3\xbb\x6b\x0c\xab\x50\xa3\x03\x4f\xd9\xfe\x11\x7a\xfc\xf0\x70\x9a\xac\xdf\x24\x45\x51\x80\x0c\x56\xd8\x27\x0a\xca\xd1\x46\x48\x2e\x5b\x34\x8f\x0f\x97\x27\x9a\xc9\xbd\x1e\xca\x92\x1f\x83\xb7\x35\x1e\xa5\x8b\x44\x82\x07\x1c\x9c\x5e\xed\xb3\x20\x37\x03\xc4\x31\x78\x8b\x20\xab\xe6\xbf\x21\x32\x7b\x97\xc1\xee\xcf\xc3\x83\xa8\xdf\x5c\xcc\xba\x8b\xbc\x06\x94\x9a\xfa\x92\x66\x46\x3f\x79\x3e\xf6\xc3\x73\xd7\xb8\x93\xfc\xa9\x93\x01\xf5\x99\xea\x75\xe7\x23\x11\x8c\x63\xd3\x5e\xe7\x61\xb5\xaa\xea\x31\x33\x73\xa4\xf9\x41\x40\x21\x1d\xef\xf6\xcb\x9a\x5a\xec\xfd\xc3\xf9\x59\xbc\x5e\x56\xe3\xa9\x06\x43\x8a\x20\xef\x75\x62\x67\xe1\x91\xe2\xbd\xcf\xff\x90\xd0\xcf\x95\xa5\x91\xe1\xc7\x45\xdb\x37\x64\x26\xfd\x0c\xed\xa7\xe9\xba\xe5\x23\xfe\x79\x8b\xfa\xa3\x98\xa4\xe7\x9a\xff\x21\x5c\x50\x2a\x16\xff\x10\xe9\xa8\xd7\x2a\xff\xeb\x43\xa6\x19\xe6\x4c\x03\x32\x3b\xfb\xbd\x8b\x14\x81\xf3\xf9\x57\xf9\xa3\x66\xf4\x21\x71\x02\xd7\x8e\xfc\x50\x6e\x36\xb0\x5a\xfe\x4d\x11\x3c\x0a\x7d\x8c\xd1\x45\xf7\x5c\xa1\x22\xc4\x2d\x22\x16\x3c\x1f\xde\x07\x95\xf2\x45\x8d\x9e\x1c\xd2\x31\xbd\x79\x96\xa5\x01\xe5\x49\x3e\xcd\x71\x93\xbd\x99\x35\xca\xf2\x43\xd9\xd7\xe3\x55\x18\x15\x90\x37\x30\xf3\xb7\x24\xc5\xdc\xbc\x79\x63\x9f\xfb\xdf\xfa\xda\xb4\x33\x22\xff\x05\x83\x71\x4b\xa0\xe6\x5a\xa1\x89\x6c\x4c\x7b\x70\x23\xf3\x7a\x5c\xda\xed\xea\xf9\xea\xc9\x4d\x3b\xe4\x90\x06\x59\x55\x0b\xe5\x75\x6e\x55\x0f\x42\xbb\x14\x39\x28\xb3\x9d\xfc\x0a\x21\x84\x58\x0a\xa9\x64\x6e\xf4\x7b\xc4\xd2\xb8\xde\xa0\x6b\x85\xac\x49\x83\xa9\xd4\xdc\xd0\x3a\xe8\x8d\x4b\xf0\x62\x41\x1c\x02\xaf\xdd\x62\xf7\xa1\x68\xe1\x2e\x4b\xff\x06\xca\x1b\xa1\x63\x81\xc1\x2e\x56\x78\xb9\x11\x84\x33\x52\x1c\x35\x30\x60\x76\xf1\xc9\x22\x47\x10\xd2\x78\xb7\x68\xa9\x60\xc0\x68\x95\x0f\x2e\xd3\x8d\xa1\xf9\x90\xef\x68\xf2\xcf\xcc\x90\xe2\x71\x5a\xbd\x25\x31\x10\x55\x5a\xd6\x2a\xe4\x60\x25\xd7\x1c\xbd\x21\x99\x43\xb4\x87\x12\x1f\x5c\xe1\xd1\x64\x2f\x73\xf6\x08\x29\x48\xbf\x1c\x9a\xc6\x51\xc4\x37\xd9\xf5\x3d\x10\x35\xea\x36\xc9\x89\xdc\x03\xc9\x62\x19\x92\x5f\x43\xa3\xb2\x63\xac\xb7\x53\x0f\xcf\x2e\xfe\xcb\xc3\x90\xf3\xb7\x64\x3b\xc2\x93\x36\x5c\x6b\x27\x34\xe0\x70\x2d\xcb\xb3\x6e\x0f\x37\x47\x06\xcd\x0f\x40\xe1\x94\x83\x1c\xfb\x26\x0b\x33\xd8\x3d\x44\x1f\x84\xab\x59\x07\xe2\x29\x0d\x8f\x5d\x03\xe8\x03\x20\xf2\xf7\x02\xd5\xbb\xe2\x36\x65\x58\x2b\xd4\x38\x62\xca\x63\x35\x2b\x42\x59\x86\x5d\xb8\xfa\x41\x34\x61\xda\xe9\x2f\x4b\x88\xe3\xe5\x8d\x7f\x4c\x85\x43\x87\xb9\x85\x1a\xc7\x2b\xd1\xed\x08\x31\x0e\x9f\x44\x56\xa9\x3d\x8c\x35\xc0\xc8\x43\x85\x6a\xdf\x2e\x2b\xcf\x63\xc0\xa1\xac\xa8\x85\xef\xcf\xb2\xe4\x3a\xb8\x53\xff\xc4\x20\xcc\xb2\x89\x73\xd4\xdf\x26\xd0\xb1\xe6\x71\xe2\x62\x4b\xe1\x44\x1b\xfb\x71\xf1\x89\xda\xd3\xdf\xa9\x6f\x30\xa4\x38\x56\x5d\x4f\x37\xfd\x26\x43\xa7\x86\x42\x27\xe5\xf2\x1c\xa0\xe3\xc3\xfe\xa2\x58\x07\xde\x20\x06\x7e\xa0\x44\xc9\x21\x2e\x76\x1c\x45\x5e\x33\x88\x77\x5c\xdb\x79\xdc\xe6\xbd\x90\x47\xce\xfb\x5a\x29\x54\x38\x3c\x7e\x8e\x92\x2f\x05\xa9\xcc\x71\x5c\xc7\x82\xb7\x6c\x5f\xda\x46\x59\xfe\x0a\x59\x0f\xba\x1e\x61\xc3\xc3\x55\xea\x7a\x2f\xe9\x9e\x82\xdc\x4e\xeb\x08\x09\x52\xde\x70\x17\xfe\xb0\xdf\xd9\x71\xf0\xa2\x7d\xb0\x18\x27\xec\xa5\x59\xee\x29\xc8\x80\x1c\x77\x92\x12\x8a\x4d\x80\xa1\xec\x67\x89\xab\x85\x80\x37\xf4\x58\xbe\x28\x10\xc2\xad\xc2\x47\xc9\xb5\x46\xae\xf0\x68\xbe\x5d\xd8\x52\x97\x29\xc4\x9f\x0d\x76\x97\x5f\x59\xcb\x87\x1c\x1a\xac\x18\xd9\x1b\xbc\x3f\xd4\xe5\xb0\x27\x45\x6e\x9b\xdd\xb5\x5b\x95\x5f\xa9\xe9\x83\x28\x4a\x12\xea\x20\xa5\xd5\x24\x90\x05\x69\x42\x2e\xf0\xa0\x9e\xb7\x15\x2f\x65\x3e\x53\x7b\x20\xa8\x2d\xa0\xcb\x2a\xc9\x90\x50\xbb\xc0\x45\x8d\xd4\x01\x24\xde\xc7\xb9\xe5\xbd\xe5\x17\x0c\x79\xbb\x4e\x39\x2f\x07\x8e\x48\x48\xe5\x24\xbb\xd3\x44\x32\x22\xb1\x6c\x91\x8a\x09\x3c\xc7\x49\xfb\x5c\x43\x9b\x0a\x82\x22\x7e\x0f\x63\x0b\x0b\xdf\x3a\x0c\xb5\x7a\x8f\xa0\xcd\xb7\xf7\x42\x9f\x9c\x21\xcf\x3a\x41\x10\x16\xa5\xc6\xe1\x29\x58\x4a\xc7\x80\x8a\x48\xaf\xd7\x97\xa2\x20\x3c\x54\x77\xa5\x87\xfb\x16\xc5\xee\x2b\x44\x41\x62\x09\xf5\xff\x87\x9c\xa5\x51\x63\xd5\xe2\xc7\x51\x74\xd4\x9f\xc9\xe9\x7f\xbc\x2a\x29\x44\x70\x12\xa3\x05\x87\x74\xe7\x4a\x0f\xa8\x21\x15\x07\x3b\xb8\x4c\xe5\x18\xf3\x9f\x1f\x62\xa1\x83\x2b\xe7\xc7\xab\x14\x05\xda\xfd\x7f\x05\x44\x06\x9a\x21\x34\xb2\x78\x74\x58\x8d\x82\x4f\xe6\xdd\x9f\x50\xb7\xc7\xda\xb9\x9b\x68\xa0\x7f\x76\xf8\xb6\x9a\xc8\xc3\x1d\x86\x6f\x04\x1a\xef\x6f\xdf\xcb\xb7\x13\x77\x5c\xc9\xe0\x1d\xa4\xf4\xf7\xca\x47\xcf\xc5\x42\xd6\xd0\x8b\x70\xb5\x10\x1a\xd9\xc7\xdc\xcc\x48\x6c\xa1\x59\x67\xe1\xf4\xb4\x4a\x29\x81\xec\x67\x43\xdf\x63\x18\xc0\x9f\x26\xb3\x16\x54\xe6\x04\xde\x5f\xfb\x20\xbd\x91\x35\x97\x07\x89\x8b\x90\x13\x71\xe6\xe7\xc0\xc2\xc6\xf1\x7a\x6e\xd2\xe5\x76\x7a\x64\x11\x6a\x58\xbe\xbf\x22\x67\xd2\xf5\x5b\x8e\x30\xbc\x57\x66\x16\x3c\x69\x3c\xf0\xdd\x47\x77\x3f\x32\x78\xd1\x77\xed\xe6\x50\x05\xb9\x06\x09\x42\x40\xff\xa9\x87\x5f\x92\x05\x1e\xe8\x0e\x19\x73\xf7\x17\xa3\xba\x11\xcf\x1d\x1f\x17\xb6\x5c\x7a\x64\xde\x30\xb4\x1b\x42\x24\x6a\x00\x20\x2b\xd8\xdf\x05\x08\x91\xb0\xd1\x48\x4b\x3f\x32\xb4\x1c\x32\x24\xcf\x43\x06\x27\x8f\x53\xe8\x09\xb7\xe3\xf4\x52\x67\x1f\x4f\x4a\x2f\x40\xcb\x37\x44\x3e\x7c\x60\x78\xa6\x7d\xd9\x5d\xff\xa6\xb4\x08\xb0\x65\xa2\x1d\xf6\xe8\xef\xd1\x38\x9b\xf6\x41\x3d\x5a\xeb\xcd\xec\xd9\xc7\x24\x6f\xc9\x11\x1e\x62\xdd\xb7\x10\x36\x84\xea\x48\xa0\x4c\x21\x3d\x72\x21\x7d\x42\x22\x26\x6b\x6e\x72\x1d\xd4\xff\xb2\xe9\x10\x82\x08\xb5\x86\xf8\x49\x9d\xa5\xb0\x52\x23\x4f\x1e\x66\x48\x64\xd4\x27\xa8\x06\x9c\x0b\xa5\xa1\x10\x91\xc0\xa0\x91\x52\x2c\x69\xfb\xab\x4e\xb3\xbb\xce\x38\xc3\x10\x2b\x09\xd1\x0f\x0d\x52\xc2\x90\x58\x92\xe2\x8d\xda\x96\x5a\x57\xc7\x0b\xcb\x16\x35\x84\x6b\xff\xdc\x42\x27\x4c\x84\x95\xc8\xc7\xea\xe9\x5f\x4d\xc3\xa1\x7e\x0c\xf5\x41\xaa\x04\xfe\x89\x4f\xe9\x93\x87\x9a\x49\x6e\xee\x92\x2c\x79\x94\x33\x8c\xa3\xf5\xe4\xa5\x7a\x12\x07\x70\x5f\x3d\x28\xf8\x95\x5b\xfa\x65\xf5\x0e\x79\xdb\x81\x8b\x44\x48\xc7\x05\xe1\x7e\xb1\x31\x49\xd6\xf8\x08\x96\x36\xb4\x04\x16\xc8\x4d\x49\xda\xc3\x8e\x61\x13\x57\xf2\x8b\x4b\x10\xd2\x27\xa1\x91\xc2\x01\xd5\xa6\x92\xf2\xd1\xdc\xf0\xe2\x52\xf7\xfb\x4c\x96\xc5\x3d\xdc\xba\x44\x4b\xdb\xba\x1d\x1e\x8f\x14\x24\xad\x18\x03\x54\x49\xbb\x45\x7c\x54\x6d\x56\x88\x8e\x54\xf8\x12\xd8\x30\x81\xbd\xaa\xfb\xdc\x13\x4b\xde\xde\x36\x44\x19\x66\x09\xc3\xcc\x91\xc0\xda\xd7\x1f\xf9\x18\xfa\x5a\xd8\x19\xc6\xa0\x62\xd4\x46\x6e\xea\xfe\x85\xf0\x6d\x6f\x15\x5c\x7b\x40\x07\x08\xa2\xb9\xd5\x5b\x97\x26\x16\x68\xfa\x91\x2e\xdc\x72\x28\x03\xa2\x9f\xaf\xb3\x05\xeb\x49\xc8\x25\x2c\x4e\x97\x23\x52\x6c\x50\xd8\x68\x04\xbd\x9e\xf2\x2d\x87\xc6\x5a\x7a\x97\xfc\xe4\x91\xd3\xe4\xb6\xb2\xfb\xa4\xb5\x0d\x25\x7b\xf6\x33\x05\x8b\x21\xe1\xc2\xf3\x9a\xc1\x87\xd1\xca\xe9\xbd\x08\xdf\xd0\x63\xfc\x48\xd5\x14\xcd\x26\xeb\xa2\xb1\xba\xbe\xb8\xfa\x58\x53\x24\x29\x17\x55\x7c\x89\x40\x94\xeb\x20\xf4\x5a\x72\xfb\xa5\xe8\x75\x81\xf6\xfd\x12\x35\x4c\xe1\xb3\x3f\x21\xb0\x99\x73\x30\xce\xa7\xab\xe3\x3a\x2e\xd1\x33\xb5\x12\x42\x41\x42\xcc\x08\x25\xad\x50\xd9\x72\x33\x63\xd9\x42\xf3\xa5\xf6\xd4\x75\xb1\xbb\x3d\xc7\xd6\xca\x2e\xf5\x97\xe2\xf8\x4e\x74\x2d\x59\x97\xa0\x8f\x99\xfa\x49\xfe\x74\x2b\x0a\x46\xef\x52\x2f\xf4\x7c\xbb\x99\xad\xa9\xff\x2f\x0a\x69\xa0\x92\xca\x24\xfd\x09\xdc\x87\x23\xb4\x58\x71\x27\xae\x8e\xc6\x22\x15\x9f\x29\xc1\xef\x75\xed\xad\xd4\x6b\x27\xe0\xac\x9e\x9a\xb9\x8f\xda\x58\x82\x90\x12\xf2\x09\xe3\xe8\x31\x64\xcc\xf1\xd7\x7e\xe2\xb7\x3f\xce\x35\x2d\x7c\x4f\xa7\x34\x5f\x4e\x1c\x31\x0b\xa3\x22\xbc\xf4\x53\xe2\xa7\x38\x33\x27\x19\x53\xe6\xca\xd0\xba\xdd\x86\x28\xbb\x63\x2a\x6b\x39\x64\xc1\xd3\xca\xdd\x2e\xbd\x19\x98\x9c\x89\x86\x99\x5e\x00\xd4\x98\x6b\xe8\x34\xa1\x2b\x1f\x9a\x34\x40\xec\xb1\xae\x45\x3f\xd7\x1b\xcb\x7d\x6e\xb9\x4f\x56\x17\xa5\x90\x85\xf1\xdc\x61\xd7\x8c\x79\xf9\xb8\xdb\x18\x45\x2a\xb1\x1c\x2a\x8e\xf5\x56\xa2\x39\x83\x6f\x6f\x66\xb2\x77\xec\x90\xca\x0f\x24\x66\x86\x10\xa1\x39\x0a\x13\xa3\x61\x29\xff\xaf\x8f\x8f\x2b\x46\xf1\x42\x66\x86\xc3\xbb\xb1\xbd\x5b\x11\xc7\x1a\x8d\xb3\xe6\xb5\x30\x6b\xd8\xf4\x28\xc0\xd2\x36\x6f\xfd\x03\xb7\xad\xe9\xd1\xe6\xb7\xad\xae\x92\x86\xd9\x25\x8b\xb3\x0f\xa5\x5a\x93\x00\xef\xc9\xde\x66\x28\x4c\xbd\x07\x4c\x78\x95\xba\x52\xda\x85\xd7\xe8\xd3\x76\xc9\x07\x68\x66\x08\x27\xa5\x88\xf6\x72\x6e\xa3\x8e\xd1\x9a\xd0\xd7\x1c\x76\xda\xb2\x8c\xa1\x64\x2d\xba\x6c\xa9\x8c\x7a\x48\x92\x7b\x36\x42\xc7\xc6\x9a\x72\x4d\xd9\x67\x46\xbb\xf6\x3c\x69\xa7\xa5\x9c\xa1\x98\xc3\xa7\xca\xb6\x07\x2f\x1c\x67\xd9\x0d\xe1\x82\x5d\xfe\xe3\xd1\x76\x2e\x84\xe9\xc2\x0e\xef\xbf\x46\x71\x87\x9c\xcd\x93\x81\xf7\x30\x43\xd5\xc3\xcc\x6f\x1a\xef\x26\x6b\x63\x4f\xa4\x4f\x8f\xa6\xa8\x71\xb1\x7a\x74\xec\xb7\x97\x91\x8a\x43\x43\x81\x5f\x9d\xd5\x9a\x4f\x0e\xf8\x75\x1e\x95\x4e\x0b\xce\xd3\x98\x3b\xcf\x4d\x59\x67\x9e\x87\xea\x5d\x79\x33\x3b\x9e\x5c\x27\x99\x4f\x28\xdb\xbc\x3b\x77\x6b\x75\x68\x8f\x4a\x40\xe4\xe2\xf0\x49\x77\x75\x17\x7f\xea\x3a\xbe\x33\x1d\x86\x33\x29\x4a\x23\xe7\x1a\x14\x6f\x56\xe6\x01\x76\x90\x0b\x4e\x13\x25\x68\xea\xaa\xf7\x5a\x42\x38\xfe\xea\x98\x2d\x3f\x1b\x44\x6f\x86\xe8\xbe\x76\x56\xd3\x3d\xa5\x8a\x53\xf2\xa3\x57\x2b\x11\xe1\xbf\x52\x41\x81\xd4\x75\x2c\x9b\xf3\xa4\x4d\x78\x7a\xa2\xcb\x27\xb1\x98\x9e\xa1\x94\xee\x85\xd0\x5b\xd9\x7d\xd0\x9c\xe3\xb1\x1d\x86\x3d\x29\x57\xef\x26\xb7\x6e\x2f\x14\x2f\x07\xc6\x41\xe3\x4c\x51\x05\x95\x30\x9b\x40\x8f\xd5\x1f\xaf\xf4\x1b\x8b\x94\x55\x3d\x64\xb3\xc7\x91\x6d\x71\x33\x07\x8e\x4e\x41\x2f\x85\x62\xdf\x6e\xf1\x71\x80\x35\xd0\xe0\x88\x66\xe7\x9a\x7f\xab\x22\xcf\xed\x66\x9c\x8a\xa8\x6c\x32\xb8\x2e\xcf\xf3\xa6\xa1\x4e\x14\x34\xd7\xa4\xc0\x53\x0f\x49\xfd\x40\x80\xe7\x1f\xad\xa7\x23\x99\xb2\xe6\x3f\x11\xe6\x2e\x93\x95\x3d\x2c\xe9\x61\x76\x68\xf7\x59\xdb\x58\xbb\x5c\xc7\x2a\x23\xd0\x5c\x30\xe3\xc7\x38\x62\x38\x04\xd5\xcc\x52\x41\x07\x4a\x25\xd0\xae\x16\x49\xd1\xba\xc5\xa9\x6a\x89\xae\xe2\x3a\x69\x3b\x5d\x57\x35\xa5\xfe\x09\x06\x77\x4f\x19\x9d\x7c\xe5\xd0\x06\xfa\xa9\x12\xdd\x81\x04\xd0\xc4\xe7\x15\xfa\x3f\xa5\x09\x00\xf5\x4d\xca\x09\xb6\x37\xa6\xa4\x1a\x04\xbc\x15\x1f\xae\x99\x1a\x38\x73\x96\x46\x90\x86\x67\x3b\x26\x8e\x73\x69\x44\x36\x72\xe1\x34\x0c\x6d\x26\x95\x83\x5a\xa1\xcb\xd2\x3c\xfa\xa9\x8e\x34\x71\x95\x62\x9f\x52\x7e\xad\x63\x8e\xbd\x4d\x26\x44\x36\x0f\xb4\x35\x18\xab\x5f\x7a\xd7\x76\x26\x75\xff\x2e\x7a\x01\x15\xc1\xc5\x17\xa6\xcc\x7c\xb8\xcb\xb8\x88\xda\x6c\xe6\x14\x7b\x45\x20\xe8\x11\x01\xb3\x5d\xb1\x56\xf1\x56\xa4\xbf\x73\x1d\xd2\xbb\x02\x58\x15\xc2\x6c\x6e\xdb\x17\x64\xef\xa9\xca\x73\x94\x2c\xd5\x1a\xb4\xa4\x28\x85\x64\xfd\x8d\x68\xd5\x9b\x9d\xa9\x96\xe6\xc7\x04\xd7\xf6\x1a\x31\xc4\x28\xd1\x5f\x6c\xd6\x4f\xc9\x61\x68\xc5\x35\xbb\xb8\xf7\xae\x52\x3f\x31\x26\xc6\x4d\x3d\x33\x70\x73\xf9\xd4\x5a\x3e\xe9\x9e\x8b\xa0\x19\x2a\x7a\xe1\xe3\xcf\xa9\x8b\x0d\xee\x3a\xd2\xf0\x1b\x74\x79\xda\x75\x28\xa3\x23\xde\xc2\x8c\x2d\xe5\x55\xeb\xe7\x07\x97\x07\xad\x94\x2d\xe4\x2e\x87\x62\xd2\xc0\xd1\xab\x0e\x90\x2c\x9f\x42\x3c\x07\xa3\x1f\xac\x77\x73\x0a\xe5\xa9\x9c\xfa\x50\x4d\x8a\x31\x71\xb7\xaf\xa1\x5e\x12\xee\xd1\x13\x4d\x54\x6b\x2b\xec\x31\x86\x0c\x9b\x11\xd1\xfa\x45\x4d\x3a\x58\xaa\x8e\xf0\xd1\x7c\xeb\xd7\x2e\xa6\x3a\x80\x04\x31\x71\xd6\xdc\x69\x3b\x15\x30\xec\xa2\x44\x97\xd4\x5a\x51\x68\xf2\x45\x0b\x77\x69\x7d\x07\xcd\xb0\xd7\x32\x8e\xdf\xe3\x99\xb8\xeb\x67\x8c\x87\x76\xf0\x67\x31\xec\x1e\x01\x6c\xd6\xf8\x1e\xb8\x44\xc7\x31\x99\x2e\x25\xbe\x5b\xdf\x0f\x82\xc3\xa0\x40\xe4\x9a\x32\x52\x80\x76\x21\x22\x37\xd0\x0f\x1f\xc3\x5c\x15\xcf\x96\x7e\xd3\xc9\x26\x8e\x44\x89\x24\xe0\x73\x61\xbc\x44\x0e\x36\x2f\x28\x37\x49\xb6\x54\x88\x20\xb3\xb6\x7c\x35\x11\xa3\x33\xdd\x6b\x4c\x88\x99\x8c\x1c\x91\xcf\xb2\x17\x39\xbc\x0b\x01\xa9\x16\xf4\x90\xae\xce\x5d\x3d\xd0\x38\xe2\x43\x05\x2d\x53\x65\xeb\x26\x64\x69\x4d\x45\xb3\xed\xbe\xe8\xe9\xf7\x8a\x22\x5c\x06\xe9\x78\x1f\x27\xd3\x99\x3c\xfc\xc1\x1a\x89\x0f\xb7\x8f\xb2\x94\xe8\x0e\xdc\xa5\xaf\x26\x86\xd4\x54\xc7\xe5\x70\x95\x76\xd0\x01\x07\x6c\x09\x6d\x23\xdc\xdb\x2c\xab\x0f\xc9\x20\xd8\x53\xd5\x05\x46\x54\x9f\x76\xef\x8d\x34\xce\x44\xea\xee\xac\xd1\x13\x6f\x86\x69\x4d\x45\x65\xb8\x54\x92\x9d\xcf\x3b\xec\x81\x46\x40\x17\x13\x30\xa3\x14\x95\xd9\xfc\x05\x0c\x3d\x24\x9a\xc8\xae\xe0\x2a\x9b\x7b\x2e\x75\x83\xad\xa0\x96\x66\xda\x3c\xd8\x9d\x96\x3d\x6c\xdc\xe8\x7a\x82\x68\x42\x6b\x96\x5c\xd1\x59\x10\x9f\xfb\x96\xbd\x6a\x2a\xd7\xa9\x74\x6a\x6c\x86\x22\x14\x13\x86\xd0\x10\xeb\xe2\x63\x6a\xda\xe8\xd4\xa4\x9d\x10\x6f\x45\x3d\x24\x07\x29\xe8\x31\x51\x14\x3d\x60\xe6\x89\x09\xe3\x50\xa3\x54\x27\x2b\x11\xdf\x9d\xec\x3b\x37\xd3\x90\xe6\x12\x9f\xb5\x55\xfa\x4a\x7d\xe4\x37\xe3\xd3\x5f\xd5\x99\x45\x2b\x73\x7a\xcf\xb7\xb8\x53\x92\xac\x5a\x44\xd4\xa5\xe5\xdc\x13\xb5\x96\xa0\x5f\xe0\xfb\x5b\xb6\x2b\xd0\x62\xc4\x33\xcb\xf4\xbc\x0e\xae\x94\x43\x4c\xcb\x5e\x58\xe8\x34\x70\x24\x0d\xc9\x07\x45\xd2\x4b\x54\x5a\x3a\x19\x65\x01\xc7\x66\x89\x1b\x7b\x6b\x2b\xf1\x27\xbb\x3f\x14\x57\xaa\x57\xe6\x47\xc9\x31\x75\xa1\x66\x85\x48\x3a\xfe\x0e\x68\xcb\xfe\xa1\xac\x94\x57\xd5\x8f\xae\xa6\xa4\x13\x76\xf3\x96\x5c\x2a\xe3\x1a\x3f\x78\xfc\x12\x1f\xb7\x59\x1d\xaf\x34\xdf\x1f\x31\xb3\x33\x33\x7a\xd2\xec\xe2\x98\x5f\x2e\xb5\xf4\x52\x46\x2f\x24\x40\xa9\x72\xa4\x06\x63\x9a\x3c\xbb\xcc\xde\xc4\xa6\x13\x4e\x5b\xc7\xf7\xdf\x88\x8d\xf4\xa0\xb8\xd2\x7f\x6e\x1d\xc0\x82\x1b\x35\xfd\x60\x0f\xef\x15\x94\x07\xff\x69\xf0\x70\x6b\xc9\x8c\x0d\x22\x23\x98\xe1\xcf\x8d\x06\x49\x12\x64\xff\x23\x65\xcf\x0a\x48\x2f\x3d\x3f\xc2\xa5\xac\x1f\x60\x55\xf5\xa0\x2c\x19\x34\x5e\x35\xde\x96\xe0\x0a\x3f\x58\xd3\xa6\xfe\xda\x00\x06\xf1\x70\x23\x96\xf9\x94\xd9\xac\x24\x08\x5d\x13\x76\x3a\x54\x68\x72\x10\xa9\x97\x4f\xd5\x76\xb2\xd6\x17\xc2\x3f\x78\xc8\xfd\x24\x59\xa7\xbe\x41\xcd\x20\x5d\xac\xea\xa5\xef\x2f\x39\xaf\x60\x2e\x37\xf5\x1f\x02\x51\x6a\x99\x41\x1c\xca\xf3\x14\xfe\x53\x80\xdd\x1d\x70\x3f\xbf\x69\x42\x40\x59\x5a\x51\x47\x9d\x7f\x6b\x58\x8a\x71\x31\x3b\xc7\xc3\x60\xcd\xdc\x49\xba\xd2\x48\x76\x2a\xa3\x58\xd2\xc9\xc5\x17\xdd\xc0\x10\xdd\x6a\x4f\xa6\xdf\xfc\xf8\xfb\xaf\xe0\x14\x0a\xdf\xac\xa5\xfe\xdf\xff\xef\xff\xf9\xff\x03\x00\x00\xff\xff\x71\x1d\x08\x34\x38\x36\x05\x00") + +func dataEnglishJsonBytes() ([]byte, error) { + return bindataRead( + _dataEnglishJson, + "data/English.json", + ) +} + +func dataEnglishJson() (*asset, error) { + bytes, err := dataEnglishJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/English.json", size: 341560, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +var _dataFemalenamesJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x6c\xbd\x4b\xb2\xab\xbc\xd3\xe5\xdd\x7f\x47\xf1\xc6\xbf\xfd\x8d\xe0\x1b\x43\xcd\xa0\xa2\x1a\x32\xc8\x20\x5b\x80\x0f\x17\xef\x83\x2b\x6a\xee\xa5\x14\xde\xe4\x6f\x71\x2a\xe2\x69\x1c\x1e\x6f\x63\x90\x52\x79\x5d\xb9\xf2\x7f\xff\xd7\x7f\xff\xf7\x7f\xfe\x47\x5a\xd6\xff\xfc\xff\xff\xfd\x3f\xcb\xbf\xcb\xd5\x10\xe6\xfd\x3f\xff\xdf\xf1\xef\x57\x58\xe7\xd4\xa4\xf0\x7b\x9d\xd3\xd8\x9e\x17\xb7\x30\x97\xff\xce\xcb\x98\xd3\x27\xdc\xe2\xda\xff\xfe\x8f\x47\x1c\xc7\x74\x8f\xf3\xef\x75\xb9\xb1\xdf\x69\xd9\x96\x30\xe2\x93\x2e\xcc\x71\xfd\xbd\x6e\xa7\x79\x5a\xfb\xdd\x7f\x75\x39\xbf\x37\x86\xb1\x39\x3f\x78\x96\x2f\x9d\x37\x29\x3f\xbd\x9e\x9f\xf4\x31\xfb\x27\xe5\x97\x5a\x7f\xce\x76\x1a\xc7\xf3\xa2\x09\xf3\x94\x7f\x2f\xe6\xcd\x1f\x7e\xe9\xcb\x27\xfe\x80\xa9\x29\x77\xcc\xf1\x7c\xa2\xb0\xf9\x0d\x97\xb2\x08\xe7\xf7\x9e\x69\xb8\xc5\x39\x9f\x0f\xd2\xc6\xdb\x84\x8f\x1f\x71\x59\x52\xe3\x5f\xed\xd3\x9c\xe3\xf9\xc7\xcd\x3e\xae\xbd\xaf\x51\x18\xbb\x98\xcf\xab\xa1\x2c\xf0\xe2\x0b\x71\x2b\xaf\xee\x7b\x11\x86\xdd\xbf\xe5\xaf\x37\xc7\x5b\x6c\xfc\xe7\xde\x69\xee\xd2\xe8\x3f\xf0\x0c\x6b\x9f\xa3\x2f\xd4\x2b\x0c\xfc\xc1\x30\xaf\x7d\xc0\x8b\xcc\xf8\xb9\x80\x1f\x5f\xd6\xf8\xea\xc3\x98\xa2\xac\xea\x7e\xde\xb6\xe9\xe7\x22\x61\x69\x8c\x94\x84\xf3\xe2\x11\x46\xdf\xf9\xa6\x3c\x51\x9c\xf1\xa7\xf7\xb9\x6c\x78\x5c\xf0\x72\xe7\xf7\xa6\xbd\x39\xff\xac\x4d\xc1\xbf\x13\x72\xf2\x4f\x1e\x5b\xf6\xdf\xea\x63\xbd\xff\xef\xe5\x1a\xe7\xb8\x40\x30\xca\x63\xfe\x5e\x74\x79\x82\xb4\xc6\x77\xc4\xfb\x3c\x62\xc0\xbb\xc5\x79\xcf\x2e\x27\xb9\x9d\x63\xcb\xf5\x95\xb7\x79\x4c\xfe\xcd\xb0\xf4\xd8\xfa\xc7\xd6\x26\x97\xbe\x79\x5a\xb8\x3e\x78\x9d\x67\x11\xc3\xf3\x4b\xe5\x83\x29\xe3\x4d\xdb\xfd\xba\xe4\xb2\xd5\xe7\xa7\xf6\x54\x78\xf1\x5b\x79\x3d\x11\xd9\x31\xf9\xcf\xaf\x61\x70\xd9\x4a\x45\xe8\x64\xe3\xce\x03\x51\x56\xeb\x7c\xf8\x60\x87\x85\x7b\x8d\xb5\xb3\xa3\x18\xe5\xa9\x66\xff\x30\x4f\x1b\x7e\x79\x81\x72\x29\xfb\x8e\xdf\x6d\xfe\x6c\xe5\x30\xf8\xff\xf9\xa1\x34\xde\xca\xf9\x4e\xb2\xfd\x7e\x1c\xb6\x9b\x2b\x95\xc9\x37\x9b\x0b\xf5\xea\xf7\x9c\xfd\xa3\x71\x9a\x07\xff\x2c\x6c\x7e\x3c\x4c\xe4\xf8\x7c\xfe\x9b\x39\x95\x3b\xf8\x46\xc7\x22\x15\xbb\xef\xec\x2d\xf9\x79\x8b\x5d\xe7\x5b\x36\xef\xcb\x1a\xb2\x0b\x60\x68\xf7\xf3\x31\xe6\xb4\xfa\x0f\x87\x1f\x17\x3f\x79\xd9\x7b\xd9\x86\x38\xba\xb0\xac\x65\x2b\xce\xfb\xc7\xd6\x1f\x77\x4d\xf7\x7b\x18\x77\x9c\xd7\xc1\xb5\x40\x11\x3e\xd7\x8f\x45\xe1\x9f\x7f\xd6\x95\xdb\xf9\x9a\x47\x7c\xf2\x4e\xcd\x2a\xe7\x85\xc2\x5c\x54\xa2\xeb\xbb\x38\xbb\x75\x59\xf6\xfc\xf6\xef\x3c\x8a\xcc\xbf\x7a\xec\x6a\x91\xd2\x3c\x40\x55\x96\x35\x76\x8d\x5c\xee\x94\x7c\x2f\xe2\x0a\x89\x33\x45\xed\x8b\x9f\x83\x2a\x9e\xc7\x34\x8b\xa2\xe2\x55\xd1\xf9\x79\x5a\x57\xff\xf3\x69\x84\xb2\x8e\x0b\x75\x87\xc9\x02\xee\x1c\x07\x7f\xd4\xc7\x56\x8e\xec\x0a\xd9\xc0\xc5\xdc\x4f\x90\xd5\x3e\x7c\xfc\xb9\x83\x59\x0e\xd7\x38\xd4\xbc\x37\x7f\xc8\xf0\x2a\xe7\xe9\x94\xb4\xb8\x40\xbb\x35\x19\x27\x26\x6f\x4d\x82\xc5\x7a\x84\x21\x89\x1a\xc2\xa3\xe7\xa2\xcf\xa6\xf3\x97\xdf\x21\x47\x2c\x4a\x6b\x8a\x1d\x37\x1a\x62\x07\x1d\x96\xe9\x1d\x2c\xdb\x87\xf7\x3d\x8c\xe6\x79\xd9\x05\x7f\xee\xf2\xa2\x34\x2d\x65\xd9\xa1\x57\x8a\x32\x92\x75\x7f\x94\xf7\x38\x9f\x75\xf6\xd3\xd3\xc5\x39\xe4\x16\x7b\x60\x56\xd9\x77\xbe\xa1\xc6\xab\x6f\x0c\x65\x35\x53\x2a\x8a\x7a\x82\xaf\x00\xfd\x3a\xc7\x0e\x9a\xa1\xfc\x76\x03\x9d\x59\x5d\x23\x5f\xa6\xc9\xce\xde\x82\x17\xa4\xd6\x0e\x5b\xd1\x7a\xe7\x6d\xf7\xf7\x84\x65\xb2\x25\x83\xcc\x3d\x36\xb5\x93\x2e\xf2\x2d\xf4\x4d\xd1\x14\x0d\x2c\x3e\x0c\xfe\x18\xcf\xaf\xa7\x16\x96\xff\x0d\x8d\x54\x94\x50\xd9\x00\x97\xc2\x09\xef\x7c\x2b\xaa\x66\x85\x6e\x30\x8f\x80\xba\xad\xbc\xe5\xea\x5f\xdd\xa7\x4c\xdd\x6b\x96\x91\x2f\x63\x3b\xe2\xdf\x2d\xda\x1e\x17\x66\xa0\x7c\xb7\xde\xe5\x8b\xf0\x6e\x02\x4f\xfe\x06\x51\x5d\xfc\x0e\xea\x6b\x52\xf2\x8a\x3a\x7a\x26\x9c\x71\x57\x14\x2b\x4e\x88\x19\x59\x3a\xbb\x45\xa1\xc0\x17\x2a\xbe\x81\x9b\xb4\x22\x6a\xeb\xbc\xb5\x11\x87\xcb\x8d\xe9\x34\xee\x70\x82\xb3\xec\x91\x6f\xfa\x4f\xc2\x2b\x51\xac\x9e\x87\xa5\xf6\x37\x59\xf0\x8e\x63\x28\x26\x01\x87\xbf\x1b\x5d\xc6\xca\x39\x09\xa2\xba\x70\x86\x6e\x72\x97\x36\x8a\x70\x9a\x27\x89\x4d\x7b\xc5\xf2\xdd\xf3\x17\xe4\x36\x83\x9d\x28\x1c\xa9\x22\x28\xb8\x0c\x66\x28\x5d\x3e\x8b\xa7\x80\xd5\x2d\xee\x99\xaf\xde\x54\xfc\xce\x00\x9b\x55\x96\x06\x66\xaa\xda\x4b\x6a\xb1\xe2\x0b\xf9\x7d\x42\xd9\x4b\x9e\x08\x28\x1c\x93\x53\x8f\x0c\x22\xef\x32\x24\x1a\xc6\x41\x5f\xab\xd8\x8e\xd6\xf7\x36\xd3\x91\xce\x3b\x7e\xfb\x9d\xa6\x8c\xc7\xde\xe6\x75\xf4\x0d\xad\xe1\x8c\xeb\x8d\x95\x5b\x5f\x1d\x60\x68\x98\xe2\x5a\x62\x61\x44\x4c\xf1\x8c\xeb\xca\xab\xbf\xf8\x7e\x82\x13\x32\x94\x20\x8b\xde\xd5\xe2\x8a\x74\xd8\xe7\x15\x41\x4a\x44\x9c\x53\xfd\x41\xb8\x78\x8c\x12\x4a\x94\xb7\x78\xe4\x54\x5c\x58\x97\x53\xb3\xf2\x93\xb8\xbe\x5c\xd6\x11\xaf\x65\x31\x1c\x3e\x49\x8c\xb1\x16\x8d\xc0\x8a\xbd\xe9\x5d\xf8\xc6\x11\xf1\x9c\x47\x9e\xe5\x79\xcd\x80\x21\xf0\x9b\x86\x24\xeb\x7b\xfe\xd8\x94\x3b\xd7\xcb\x22\x4b\xe6\xa3\x8d\xe2\x0e\xf9\xfe\xe5\x38\x8d\x81\xef\x75\x7e\x72\x8f\x6a\xd6\x26\x84\x4c\x43\x2a\xbb\x3e\xf8\x11\xc4\xa1\x2e\xa1\xd6\xd3\xd5\xe8\x44\xa3\x6d\x82\xe4\x41\xce\xf3\xe2\x9b\x17\x0d\x92\xfc\xfe\xd8\xa6\x21\xb8\x33\xdc\xd3\xf1\x6f\x43\xf2\x1d\x9b\xc3\x80\x37\xa9\x2e\x56\x82\xcd\x7a\xd2\xdb\x9e\xb9\x2f\xc5\x35\x9a\x71\x08\xc7\x76\xc1\xea\xd8\x65\xc0\x11\x1e\xe1\x91\x74\x5b\x68\x43\xde\x5e\x50\x37\xa2\x51\x8e\x78\x1e\x5e\xcf\x22\x51\x52\x03\xb1\xb9\x87\xdd\xb5\x5b\x1b\xa8\xf7\x20\xd8\x4b\xb8\xf1\xc9\xd3\x72\x39\x03\xdd\x56\x5e\xd4\xcd\x4e\x2f\x47\xa9\xaf\x4e\xde\xb9\xfa\x03\xad\x5d\x13\x8b\x90\xa5\x8b\x2e\x3e\xdf\xaa\xc4\x9e\xae\x27\x6e\xc5\xd2\x15\x87\xc6\x1f\x09\xfe\x6f\x31\x25\x17\xef\xca\x9f\x15\x41\x65\xdc\xc4\x29\x18\x71\x60\xca\x61\xff\xb8\x0e\x92\xb5\x6c\x25\xd6\xb1\x10\x1d\x29\x82\x2c\x96\xbc\x6e\x53\x2a\xe1\x9c\xbf\xec\x98\xfe\x6c\x78\x34\x7f\x9f\x07\xcd\x7a\x39\x79\x99\x02\xbf\x43\xff\x8e\x17\xb9\x85\xf7\x5c\x9e\xd3\xdf\xe7\x1e\x2e\x2a\x77\x87\xcf\x35\xbd\x3c\xb4\x09\x62\x36\xf6\x37\x3d\x86\xf2\x76\x93\xaf\x6b\xde\xce\x45\x59\x36\x2c\x71\x51\xae\x88\x1c\x2c\xec\x09\x72\x50\x5c\x8e\x9f\xb3\x1e\x28\x78\xc1\x19\xc1\x5c\x86\xa2\x2f\x76\x75\xc4\xa1\x2e\x7f\xd5\xfb\x96\xad\x53\x12\x0f\xc7\xd6\x40\xb2\x0d\x61\x14\x6f\x48\xfe\xf8\xe5\x9a\xc3\xe2\xf8\x73\x2b\x7e\xfa\x44\xcb\x52\x64\xbd\xed\xa0\x2e\xe8\xc0\x34\xdc\xfd\x1c\xd6\x69\xa7\x1a\xf7\x37\xb5\x65\xf0\x3f\xec\xca\xf6\xd2\x1b\xc0\xcb\x26\x28\x2d\x73\x84\xa9\x71\x5c\x8f\x1c\xc6\x6e\x84\xbc\xcc\xb4\xd0\x16\x8f\xbb\x91\x88\xb0\x56\xf5\xd4\xd0\x40\xc1\x0a\xac\xd4\xb2\xe5\xe9\x65\xcf\xe0\xa6\xfd\x0d\xe2\x2e\x77\xa6\x9e\xf1\xf1\x0d\xd9\x85\xb6\x9c\x74\xfc\xb1\x85\xb2\x74\x9f\x72\xfc\x2b\x96\xe8\x36\x4f\xd3\x53\x1c\x4a\x78\xbe\x4b\x68\xe9\x6c\x16\xa1\x68\xb9\x99\x66\x4d\x12\x0e\x96\x8b\x5c\xe4\x4f\x94\x13\xdf\xc3\x56\x59\x72\x24\x50\x77\xdd\xfc\xb3\xf2\x13\x51\xf3\x64\xe6\x3d\x53\xcf\x35\xdc\xba\x48\x03\xff\xd4\xec\xc1\xb1\x4a\x23\xf4\x29\x03\xa5\xb2\x7b\x6e\x21\xaa\xe7\xa2\x16\x53\x12\x1e\xe2\xcf\xd3\xe0\xc3\xe8\x14\xff\x26\x4a\x02\x21\x4a\x8a\xaf\x8d\x4b\xb1\x34\x91\xa7\x68\xa4\x87\x37\x26\x04\x4e\xf0\x45\xbe\x59\x2c\x24\x03\x23\x14\x63\x9c\x25\xcd\x50\x04\x50\xb6\x07\xd2\xdb\x5c\x0c\x6e\xe4\xc9\x8f\xa9\xeb\xfd\x9e\x5d\x0f\x9f\x6e\x7a\x21\x1b\x1b\x2d\x63\x04\xc9\xd2\x8c\x70\x57\xc2\xa2\xf2\x7f\x46\x18\x16\x1e\xc0\x39\xd8\x11\x39\x2d\x52\x1c\xcd\x22\x21\x45\x90\x77\x04\x3f\x0f\xe8\x10\xf3\xc0\x5c\x9f\x60\x6b\x2e\x89\xf9\x60\x5a\x4b\xf4\x23\x32\x78\xb4\xa4\x89\x39\x97\x58\x2e\x90\x19\xe7\x13\x17\x67\x21\x52\xb3\x24\xc8\x00\xe5\x4e\xbc\xee\xa2\xd6\x1f\xfe\x4c\x48\xd0\x64\xbc\x52\x4b\x7d\x36\x30\xc1\x24\x41\xcb\x40\x03\x5b\x7f\x94\x61\xa0\xa4\x8d\xee\xe9\xa2\x74\x19\x07\xcc\x0c\x50\xdb\xf4\x97\x06\x00\x2b\x68\xd9\x6e\x78\x25\x14\x2c\xe4\xc4\x1f\x53\xcf\xf3\x61\x55\x02\x38\xa5\x03\x33\x2e\x25\x1c\x4a\xf8\xbb\x1f\x88\x1f\x0b\x1a\xf1\x36\x49\xb0\x7d\x3b\x3f\xc1\x59\x33\xc9\x4e\xb8\xf3\x2d\x21\x7d\x32\x71\x07\x7e\xd2\x98\xee\x4c\x41\xeb\x09\xb2\x63\x89\xac\x65\x79\x10\xbf\x4f\x48\x83\x9f\x51\xee\x71\xd1\xda\x23\xd7\x89\xe1\xf4\x40\x83\xaa\x4e\x5b\x79\x31\xb8\x80\x2f\xcd\x97\xcc\x4c\x83\x1d\x3e\x2b\xfc\xc0\x96\x67\xbe\x18\x10\x86\x52\x16\xcb\xba\xbe\xb0\x20\x9b\xdf\x9c\x99\x95\x6d\x68\xc2\xca\xa2\x31\x05\x55\x8e\xa7\xa9\x3e\x17\x9b\x1f\x3f\x9f\x0d\x33\xcf\x2d\x72\x58\x56\x4f\x79\xba\x3b\x60\x02\x44\xe1\x9a\x66\xa4\x9c\xe2\xfd\x8e\x2d\xe3\xb2\x97\xbb\xc0\xb3\xe5\xee\x95\x70\x7b\xa2\x85\x64\x80\x45\x7d\x61\x59\x4a\x38\x6f\xf4\xf8\xcb\x2f\x61\x0f\xc6\xf4\x7c\x5e\x54\x7d\xd0\x95\x5d\x1a\x1e\xa5\xa4\xe6\x4d\xe4\x06\x1a\xca\x0c\x88\x17\xde\x2c\x63\x04\xf7\xc5\xea\x02\xac\x95\x95\xb8\xb9\xf5\xe4\x43\x4d\xd2\x41\x13\x32\x2e\xbf\x17\x1d\x06\xd5\x84\xd0\xbb\xfc\x1c\xcc\x57\x39\xff\xfe\x73\x89\xce\x1a\x97\xa5\x1e\x33\x26\x89\x02\x12\x62\x4d\x39\xe1\xd9\xdf\x21\x8d\x5d\x71\xba\xce\xab\x37\x4c\x45\xf1\xa9\xd3\x25\x8e\x76\xed\x3c\xe5\x96\x8b\x84\x14\x51\x51\xd6\x2c\x7e\x1e\x25\x17\x48\xf1\x88\xcd\xc4\xf1\x7a\x5c\x13\x68\x92\x5e\x6a\xa8\x13\x2d\x8f\x52\x8c\xca\x44\x85\xf3\x84\x5f\xb4\x50\x62\x8e\x3a\xa4\x1c\x0c\x57\xef\x8c\x88\x5a\xd1\xa5\xe5\x50\x64\x57\x8a\xbe\xc5\x5a\xe1\x38\xea\xb5\x11\xea\xca\xbf\x65\x79\x33\x28\xf8\x57\x2c\x52\xf7\xb9\x68\x05\x97\xab\xa3\x7a\x1a\xe1\xc6\x52\xdd\xf6\xe2\xf2\x34\x25\x82\x77\x35\x10\xef\x8c\x67\xcc\xf1\x9b\xd3\xe4\x2a\x52\xbc\x0d\x78\x58\xa2\x3c\x19\xd2\x06\x2a\xe9\x22\xc3\x3c\x09\x08\x3e\x6b\x1d\x9b\x79\x37\x6c\x5e\xfd\x4c\xb4\xbe\x87\x0d\x16\x77\x7a\xf0\x57\xe3\x59\x66\x6e\x16\x96\x06\xa7\x86\xde\xf5\x2b\xa4\x0e\x01\xdd\x78\xa9\xbb\xf1\x2e\x78\xed\x36\xbc\x7a\x56\x48\x5d\x32\x4c\x2b\x78\x55\x23\xae\x2e\x25\x5d\x55\x01\x7e\x8f\x34\x4c\x1d\xeb\x7f\x13\xcf\xee\x33\x52\xef\x64\x66\x5a\x2c\xc3\x06\x8f\xc6\x62\x79\xdc\xf5\x09\x1d\xbd\xcd\x0b\x5d\xb3\xf4\xf9\x40\xcb\xa5\x99\x09\x63\x93\x0c\x51\xd2\x52\xe6\xd9\x19\x9e\xec\x52\x3b\xc4\x52\x76\xa2\xe3\x8a\xf3\x02\xbf\xbd\xd6\xc4\xa4\x18\x0e\x79\x38\xe2\x8a\xb1\xf5\xf3\x64\xe9\x0e\xa6\x84\x13\x94\x92\xc9\xf8\x47\x6e\xe4\x98\x03\xab\xd9\x30\x7e\x8c\x22\x9e\xc5\x30\x46\x16\x19\x3b\x79\xcd\x7b\x59\xc7\x96\xee\x16\x8f\xcd\x06\x93\xfd\x6f\x7e\x55\x43\x58\x45\x01\x1c\x3f\x73\xdd\xaf\x0c\x23\xb5\x6e\x83\x27\x05\xca\xb7\x83\xd6\xb8\xa1\x12\xab\x79\x76\x81\xbc\xe8\x51\x0f\x9f\x36\x89\x7b\x98\x55\xcb\x1b\x6d\x96\xe5\x7a\x04\x28\x52\xfc\x47\x26\x38\x96\xa9\xd8\x8a\x79\xc2\x9f\xd3\xef\xb7\x6f\xe3\xb3\xb7\xbb\x11\x8d\x98\xe1\x9a\xbc\xe3\x86\xad\x1f\x3c\x0f\x4f\x6e\x96\xba\xc3\xe1\x7b\xd2\x57\xa1\x8c\x4a\x79\x21\xb3\x0c\x99\x83\x80\x52\x7e\x92\xbc\xf1\xb8\x41\x0e\xbb\xe0\x3f\xbe\x70\x9d\x5a\x2a\xf1\x65\xbf\xb9\xa7\x13\x6e\x5e\xf5\x4e\x6f\xe4\x80\xe9\x34\x6a\xe6\xa6\x41\x06\x06\x62\xff\x90\x8c\x40\xd7\x06\xaa\xe3\xe9\x4e\xa9\x19\x62\x60\x9d\x70\x6b\x99\x29\x5e\x99\x63\xce\x2c\xe4\x96\xdd\x1b\x79\x9b\x5b\x62\xfa\x60\x29\x1a\x48\x75\xed\x8e\xc0\x1a\xb9\xba\x0a\x74\xa1\x11\xa5\x6e\x32\xdc\x12\x94\x5c\x2d\x4e\x88\x9a\xc6\x66\x47\xa4\xb0\xbe\x69\x19\x3f\x38\x73\x3f\xb1\x94\x0b\x95\x6d\xfa\xc8\xad\x21\xd4\xf0\x91\x46\xe0\x19\xce\x13\x83\x1e\x2c\x85\xc9\xb9\x2b\x9c\x6d\xed\x91\x5d\x8f\x58\xbf\x72\x02\xe2\xfc\xa6\x1a\x70\x39\x58\xc5\xaf\xce\x11\x51\x43\x5f\x7e\x0a\xb5\x2f\x73\x0d\xcf\xcf\xde\x21\x33\x7c\xbf\xcd\x92\x14\xa8\xca\x0f\xd9\x9e\x12\x10\x48\x52\xd0\x2a\x56\x10\xfb\x37\x9e\x34\x88\xda\x83\x43\x99\x53\xcb\x02\xd1\x4c\x85\xf2\x9b\xb5\x91\x0a\x52\x13\x5f\x0d\x6a\xa3\x6b\x92\x98\x0c\xd0\xad\xd9\x73\xe8\xed\xa4\xc5\xdc\x27\x6c\x84\xc7\x6a\x31\x8f\xe2\x29\x5d\x4a\x17\xe5\x43\xec\xc3\x9b\xba\x65\xa7\x9b\x20\x41\x5e\x89\xd8\x02\xd6\x7e\x89\x0c\xaf\xa8\x87\x82\x46\xd3\xf7\x24\x71\x2d\x12\xa0\x2d\xab\xa5\xa3\x84\xbc\xdf\x7c\xf7\x0a\x55\xc3\x6a\xf7\xab\x18\xed\x3c\xbd\x10\x21\xe7\xd0\x95\x3d\x3d\xdf\x7f\x60\x46\xfc\x16\x05\xca\x50\x64\x0b\xa9\xdb\x0a\x61\x42\x58\x0a\xab\x17\x97\xa1\x96\xe7\x35\x64\xf0\x8f\xdf\x78\x1e\x13\xa0\xf3\xf2\x43\xa5\x36\x04\xa9\x2b\x8c\x92\x66\x2a\xfe\x6e\x7c\x22\x90\xda\xb8\x51\xbd\xe9\x50\x97\x6f\xe6\x7d\x8b\xf3\x03\x5f\xf5\xd7\x94\xf3\x00\x31\xbf\x1e\xa4\x60\x32\x53\x2a\x1f\x22\x1a\x66\x9a\x1d\x45\x61\x18\x03\x94\x04\x18\xa1\xb5\xd4\xaf\xe5\xe5\x2f\x49\xbf\xc3\xf1\xa0\xfa\x88\x03\x8d\x51\x43\xa0\xd1\x07\xce\xc1\x78\x71\x22\x27\xaa\xee\x78\x2d\xec\xe6\xf2\x84\x04\x00\xf1\x53\x6e\xf9\x22\x3e\xb7\xed\x31\x54\x69\x31\xfe\x40\xb0\x65\x49\x11\xe7\xf0\xe4\x9f\xc6\xf6\x47\x2d\x71\xf1\x7a\xd4\xae\xc7\x99\x29\xd4\x5d\x2c\x1b\x3d\xfb\xbf\xe2\xe6\x43\x13\x55\x34\x19\x56\x72\xd4\x58\xca\x9c\xb4\x41\x70\x1f\x41\x80\x18\x89\x4b\x50\x33\x1b\xe7\x1a\x6c\x1f\xb9\xd1\x34\x4b\x32\x75\xc7\x4f\x68\x1a\x96\x8a\xb0\x15\x57\xa1\x4a\xd9\xa8\x6b\x07\xa9\xe3\x53\x6e\xb0\x41\xcf\xb0\x48\x05\x7a\x17\x20\xcf\xfb\xea\x21\x1a\x3c\x50\x8b\x2b\x30\x14\x41\x60\x73\x7b\x89\x4e\x71\x7e\xcd\x4a\x61\x77\xda\x2e\x72\x95\xa1\x88\x5e\x52\x68\x38\xc0\x43\x90\x5d\x7a\xa1\xeb\x47\x9c\xaa\xd3\xd0\x99\x05\x96\xf5\xad\x08\x00\xc5\x80\x62\x69\x16\xf1\xc3\x69\xa1\x8b\xc8\x49\x14\x72\x04\x67\xfe\xd7\x02\xaa\x4a\xed\x4c\xd1\x58\x92\x14\x4f\x90\xae\x8e\xcc\x98\x16\xcb\x07\xd0\x4f\x4c\x33\xee\xd2\xe1\xa0\xdc\x24\x11\x15\xac\xf2\xec\xf5\x1e\xcb\xad\x42\x6e\x4d\x05\x61\x1b\xad\xe6\xee\xe9\x1c\xea\x79\xa9\xf5\x95\x25\x7c\x23\x34\x13\x97\x20\x68\x7a\xb9\x6a\x0d\x37\x61\x13\xf4\x94\x1a\xef\x85\xf9\xbb\x5a\x79\x44\xe9\x9f\x4b\x59\x51\xb5\xd8\xb2\x56\xe2\x14\xcb\xf0\x28\xb0\xef\x8e\x0f\xbb\x0d\x19\x94\xe3\x1c\xc0\xbe\xd2\x1b\x0a\xa6\xff\xa1\xe1\xdf\xf6\x08\x28\xc6\x85\x72\xa3\x85\x70\x0d\x98\x8e\x03\x39\x23\x92\x02\xa5\x5a\x2b\x5d\xd7\x23\x8d\x4d\x96\x4a\x43\x16\xb4\x44\x18\x5e\xc1\x23\x0c\x3b\x61\x62\x92\x90\x39\xa4\x28\xfe\x24\xb5\x85\xbe\xae\x25\x7a\x89\x40\xc2\x6f\x74\x94\xd3\x3f\x47\x3b\xa8\xd0\xd0\xfd\x84\x92\x64\x05\xb3\x9c\xea\x96\x82\xc1\xd5\x06\x88\xec\xd8\x51\xdf\xec\xe6\x89\xbd\x2f\xcb\x91\x2e\x25\xe9\xa8\xb0\x4a\xc4\xcd\x0a\xe9\xa5\x24\x34\x61\xb9\x94\xdf\x0e\x8f\xda\x77\xd4\xde\x71\x0d\x14\x39\x82\x39\x2e\xb0\x39\xa9\x31\x19\xb8\x8c\x99\x40\x45\x65\x1b\x22\x7c\x5c\xa9\xc2\x22\x1e\xb2\x16\xbd\x60\x0b\xa9\x62\x4c\xc9\x0a\xa2\x34\x7b\x2c\x5f\xe1\x6d\xe7\x0e\x44\xc0\x50\x3a\x6e\xf7\x0c\x77\xa3\xdc\xee\xcf\x96\x14\x7b\x87\xa4\x2b\x45\xc6\xfc\x1b\x22\xa0\x6d\x6d\x10\xea\x7e\x91\xf1\x48\x9e\x69\x4a\x33\x4f\x5c\x4a\xab\x91\x27\x16\x46\x7f\x22\x15\xe3\x0e\x13\xff\x83\x20\xc4\x6a\xfa\x97\xc4\xee\xa9\x72\x00\x45\xab\x0e\x39\x14\x8e\x29\x2a\x3a\x5c\x37\x6e\x79\x97\x70\xf0\x82\x38\x94\x6f\x56\x05\x5e\x61\xf4\x52\x20\x0b\x23\xc5\x23\x92\x34\xd7\x9a\xe0\xfe\xb7\x1a\xb2\x59\x54\x48\x97\x48\x94\x00\x02\xac\x12\xbc\xc6\x46\x83\x08\xba\x03\xf9\x1f\xb8\x3c\x12\xd0\x47\xf6\xce\xb7\x34\xce\x03\xbb\x2c\x2c\x08\xf3\x23\x20\x87\x78\x05\x6c\x7e\x6a\xd6\x40\x38\x74\x68\x99\x2d\x46\x8e\xa8\x8b\xea\xc6\x7c\x9b\x53\xa0\xca\x98\x8d\x1f\x05\x37\x64\xe8\xcd\x5e\x8d\xa0\x8b\xe6\x34\x22\x7f\x9c\x8b\x29\xe0\x6e\x58\x83\xc2\xf9\x13\xbd\x64\xe0\xc3\xb8\x96\xa3\x29\x8e\x86\x25\x5a\xfc\x58\x5b\x34\x9c\x5a\xfa\x65\xbe\xae\xdc\xae\x28\xf5\xa8\x56\x6c\x41\x4e\x48\x63\x54\x1f\xe4\x5c\x0f\x62\x57\xac\x06\x05\xdf\x71\xa0\xcf\x69\xa5\x4a\xd4\x30\xbe\xb9\x66\xe4\x06\x97\xc1\x8f\x8f\x45\x68\x0c\x4a\x5a\x38\xfa\x5c\x15\x29\x2c\x17\xd3\x86\x47\xae\x91\x14\xfc\xfa\x81\x79\x9d\x40\x7c\xd8\x3c\x35\x9e\xac\xae\xe7\x18\xf1\x75\xbb\xaf\x0e\x3b\xaa\x21\x17\x2a\xd6\x8b\xc8\x93\x39\x40\x58\xc1\x8e\xe5\xaf\x4c\x79\x7e\x99\x69\x0d\x94\xa0\x10\x99\x2c\xde\xa5\x45\x4a\x5d\x15\x9a\xe4\x0d\xee\x41\xf3\x47\x10\xf1\xb5\xf7\x48\xd4\x28\x6b\x31\x15\xaf\x80\x0c\x38\x53\xd5\x52\xdc\xba\x69\x95\x3d\x2d\xb2\x15\xdf\xe0\x89\x41\x18\x9a\xa2\x5a\x05\xad\xb4\x71\x88\xeb\x4c\x55\x01\x7f\xe1\x11\x2e\x4d\x25\xa6\x00\x80\x4f\x4e\x92\x25\x92\x63\x74\x2f\xa6\xc7\x6f\xb4\x4a\xb6\xf5\x0b\xae\x67\x74\x87\x52\x1d\x1c\xd2\xa2\x75\xad\xdf\xe1\x62\xe1\x21\x9e\xe6\x1a\x48\xf0\xb2\x6a\xe2\x98\x41\x1a\x76\x8c\xf6\xd7\x3a\x09\x10\x3d\x6c\x6d\xd2\xd4\x25\x57\x26\x01\xf6\xd4\x28\x70\x50\x15\x43\x5f\xf4\x4d\x1c\x89\xca\xed\xe3\x44\xc0\x6b\xed\xb2\x62\x0b\x84\x65\x57\x89\x6a\x57\x47\x32\x4b\x13\xc8\x4d\xb7\x70\xa4\x5d\x6d\xf9\xc8\x56\x89\x75\xac\x5e\x51\x1d\x2e\x5a\xcc\xac\x32\x22\x79\x43\x56\x0c\xc1\x27\x0e\xb3\xa6\xbc\xac\xb4\x82\xde\x9c\xcd\x0a\xfd\xf3\x20\x82\x37\x41\xb7\x1c\xc5\x65\x0d\xea\xe9\xe4\x49\x10\x93\x27\x56\xac\x8b\x32\x45\x75\xa9\x57\x50\x73\xa8\xf2\x0f\x03\x86\x65\x0e\x9d\xa4\x98\x8a\x5f\xda\x06\x77\xfc\x92\x65\x6b\x7d\x8f\xd2\x5b\x23\x63\x05\xe3\x57\x43\x7f\xae\x52\xa4\x49\x16\x1c\x82\x28\xe8\x35\x0a\x18\x38\xb1\xab\x22\x6f\x4d\x39\x31\x04\x97\x30\x3b\x62\x25\xc7\x1e\x9b\xca\xea\x50\x10\xb3\x7c\xd4\x79\x7c\x03\xb5\xcb\x62\xe9\xf9\xcd\x57\x91\x4b\xe6\xa3\xd1\x78\x64\xb6\x0d\x4d\x54\x2d\xdc\xd7\xd5\xa2\xe2\xf3\x6a\x1a\xf8\x5b\x2d\x9f\x6a\xa4\x71\x00\x76\x92\x69\xe8\x39\xdc\x83\x24\x39\x76\x58\x1f\xba\xcc\xe5\x3b\xbc\xf9\x9b\x87\xcd\x9a\x6a\xfc\x0d\xc5\xd6\xd1\xcd\xe6\xc6\x2c\x45\xbb\x13\x20\x32\x73\xeb\x8f\xf4\x3b\xf4\x88\xb8\xbb\xef\x48\xe8\x9d\x24\xff\xec\x80\x51\x43\xa0\x81\xce\x8a\x71\x90\x12\x4b\xbe\xf9\x87\x35\x15\xe6\x1a\x5b\x92\x6d\xcd\x25\x65\x63\xb9\xf9\x73\x5d\x27\x37\x0f\x87\x8b\x85\x23\xed\x26\x41\x4e\xc4\x01\x16\xe4\x8a\x0f\x1e\x21\x48\x3a\x4a\xce\x55\xf5\x0c\xa4\x13\x14\x10\x13\x39\x38\xe2\xe9\x1d\xf0\xce\xe0\x0f\x0c\xcf\x59\xb2\xf7\x51\x6c\xf4\x26\x2d\x34\xcf\x9d\x4e\xc2\x81\x2f\x38\xdf\xf4\x65\x59\x7a\x28\x87\x99\xf0\x3f\x3e\xf2\xd1\xd1\xe7\xa6\x25\x28\x88\x8f\xbf\x1f\x27\x38\xea\xb3\xe8\xfd\x07\xd7\xeb\x48\x44\xf2\xac\xb1\xac\x5f\x8e\x06\x83\x24\x48\x7d\x2d\xca\x75\x3c\x11\x2b\x7d\x30\x96\x07\x33\xa2\xdd\x9a\xe6\xf7\xe7\x97\xe2\x6b\x9a\x89\x38\xa3\xee\x78\xc7\x71\x3b\x77\xea\x00\xee\xfb\xaf\x35\x02\xfe\x3d\xde\x27\x49\x94\x52\x74\x0d\x01\xd9\xa3\xe2\x16\x8b\xfe\xd0\x76\x21\x6d\x6f\x5b\x80\x54\x64\xff\xab\xc9\xa0\xaf\xbe\xe1\xf4\x20\x33\x4f\xdd\xe0\xc7\x36\x76\xbe\xa3\xfb\x82\x2c\xfe\x8e\x0a\x5d\x90\xda\x4d\xc5\xb9\x68\x59\x13\xe7\x41\xa0\x19\xf7\x94\xa7\x01\xbe\xc1\xa7\xfc\x3b\xb1\x6a\x75\xd4\x0a\xc4\x3f\x81\xdd\x78\x03\x93\xbf\x31\x9f\xc5\xc7\x79\x5d\x4a\x40\x47\x97\x25\xd3\x94\x4c\xf5\x5a\x24\x7a\xbe\xb1\x98\xa1\xa8\xfd\xbf\x52\x46\x6a\x43\x35\x7e\xd4\x1b\x52\x75\xbe\x31\xb5\x39\x86\xa7\x00\xfb\x98\xc1\xae\xe9\x54\x44\x84\x9a\xca\x45\xe1\xa8\x15\xc8\x61\x2b\x5d\x9c\xd6\x7c\x4f\x7f\x6b\x95\xac\xca\xc4\x8c\xed\x37\x2d\x0e\x9d\xc1\x4a\x96\xa9\x4a\xd9\x9f\x5a\x5e\xe6\x59\x65\x31\xaa\x9d\xde\x8c\x19\x06\xc9\xae\x0e\x40\xf6\x7f\x1b\xa4\x4f\x99\x40\x8a\x7a\x25\x44\x73\x60\x11\xde\x92\xa0\x2b\xb3\xa0\x56\xbb\xbf\x68\x56\xff\x62\x43\xcb\x56\x02\x47\x7a\x4f\xd1\x0b\xb1\x97\xb4\x64\xf6\x96\xe6\xc7\xd4\x8f\x92\x33\x84\x73\xc2\x44\x8f\x99\xbc\xc8\x96\xf7\x27\xb2\xc8\xd2\xcd\xdb\x5f\x8c\xcb\x1e\x5a\x58\xd9\x57\x9c\xa5\xb8\xda\xb1\x61\xb9\x42\x92\x05\xbb\xd3\x13\xa0\x3f\x4c\x1f\xe9\x81\x35\xf7\xd0\x7b\x0f\x25\x1d\x51\x34\x41\x7b\xcd\x7e\x8a\xab\x95\x24\xff\xb3\x0a\x9e\x2c\x07\xf9\xf0\xdb\x38\x37\xaa\xa7\xe0\x8f\x18\xd0\x10\x6d\x7d\x5b\xb1\x0b\x33\x8e\x6e\x11\x7b\xaa\x89\x2f\x4d\x03\xce\x65\x27\x29\x49\x07\xa5\xfc\x42\x3e\x60\x5d\xd8\x63\x0a\xaf\x68\xbe\x10\x63\x4c\xcc\xa9\x1c\x72\x78\xee\xaa\xf4\x1d\x04\xeb\xcb\x56\x27\x07\x66\xa8\xc8\x39\x96\x68\x22\xa0\x31\x5e\x52\x91\x33\x0b\x40\x70\x8e\xca\xaf\x11\x16\xbc\xc6\xcb\x42\xc4\x91\xfe\xd2\x4d\xb0\xf2\x45\xb7\xa8\xd7\xdd\x48\x10\x2a\xc9\xad\x56\xe3\x44\x96\x16\x0f\x0e\x11\xc2\x5c\xc5\xb9\xbf\x91\xa3\xa4\xd8\xb1\xb8\xfc\x03\x30\x72\x17\x0e\x39\x86\x38\x4e\x6f\x04\x8e\x4d\x2e\x61\xed\xb8\xa6\x4b\xd7\xc3\x8d\x5e\x1c\xb0\xdc\xf5\xea\x1f\x09\x1b\xa9\x9e\x99\x2e\x6a\x82\xd7\xa6\x25\x77\xbc\x02\x62\x55\x5e\x5a\x8b\x0f\xde\xe7\x62\xea\xef\xfc\x77\x24\xf7\xc9\x9b\xd0\xd4\x37\x0f\x67\xcd\xd5\xfb\xae\x5a\xd5\x2a\x09\x42\x52\x4e\x6a\x09\xcb\x47\xd2\xad\x80\x6d\xa5\x91\x22\xac\x96\x0a\x44\x52\xad\x71\x12\x0e\xeb\x85\xf5\xa6\xb6\xbd\x5e\x0a\x89\x0a\x98\x12\x0f\xba\xb6\x5a\xfb\xda\xfc\x40\x28\xc7\x48\x2c\x41\x10\x9c\x58\x76\xa3\xdd\x4b\xc2\xf9\x5e\xa4\x73\x12\xa7\x33\x88\x97\x62\x41\x51\xa6\xcc\x91\x79\xc5\x9c\xaf\x4c\x35\xed\xdb\xb4\x9d\xaa\x22\x27\xa9\x6f\x66\x62\xf2\x61\x49\x25\xb7\xf4\x88\x02\xe9\x17\xc8\x6f\x5d\x20\x46\xbf\xbd\x04\x94\x41\xc1\x4f\x89\x7e\x37\xdc\x8a\x46\x3d\x25\xb3\x85\xc8\xc7\xa6\x0c\x77\xb7\x93\xfa\x7a\x74\xf0\x96\x94\x1b\xec\xf9\x11\x02\xbb\x16\xb3\x7e\x10\x3c\x46\x0c\x84\x44\xb6\x5b\x6e\x90\xba\x5c\xd3\x9b\x0e\x52\x16\xe8\xd4\x13\x38\x92\xe3\x68\xc1\xb1\x92\xfa\x84\x35\x1e\xb9\x2e\xf4\xd3\x53\xa1\x58\x04\xf0\x2f\xe2\xf4\x1b\x0e\x88\x66\x38\x06\x36\xac\xfc\x62\xb2\xb0\xb6\x8b\x58\x9f\x6f\x26\x16\xd1\xde\x28\xb6\x88\x4a\xb2\x52\x13\x11\x8a\x28\x70\x0b\x83\x49\x45\x26\xf4\xda\x24\x61\xea\xb8\x8a\x9c\x43\x70\x9a\x83\x64\x83\x6f\xac\x8d\x94\x8a\x12\x60\x79\x37\x12\x8b\x26\xcc\x27\x2d\xfb\x2b\x1a\xa9\x50\x97\xcd\x04\xb4\x3b\x6b\x47\xbb\x65\x3a\xfc\xaa\x16\x63\xfc\xd8\xfd\x20\xfb\xd7\x3c\x23\x51\x55\xd4\xd5\x87\x5a\xf7\x1d\xba\xed\x17\xd3\x91\x34\xbf\xd3\x06\xb9\x91\xaf\x70\x45\x61\x78\x35\xac\x7c\x79\x94\x6e\xc5\x0d\xdb\x2a\x94\x11\x06\x35\x07\xd6\x3c\x56\x94\x12\x0e\x95\x36\x29\x70\xcb\x2d\x26\x0a\xa0\xd0\x9a\x15\xed\xb3\x86\x0a\x91\x65\x8c\x86\x8f\x5b\x4d\x92\x7d\x0c\x67\x04\xb0\x8e\x91\x13\xb9\x10\xec\xd2\x5e\x5c\x51\xd8\x7e\x27\x40\x2d\x56\x8a\xbb\x91\xf1\xb8\x5e\x7e\xf5\xdb\xe4\xd1\xd2\x73\x57\xfa\x03\x13\xf5\xf3\x45\x6a\xaf\xee\x79\xcf\x30\x5e\x28\x59\xe6\x89\xe6\x7e\x6c\x04\x97\x20\x39\x57\xe9\x1e\xb2\xe2\x95\x38\x89\xc7\x8d\x45\x5e\x11\xd8\x5f\xfb\xde\x2b\x63\x15\xab\xaa\x45\x34\x5e\x78\x07\x65\xb7\x98\x60\xd6\x4a\x00\x1c\x77\x04\x18\xe5\xa0\x3a\xf2\xe7\xb9\x63\x55\x52\x8e\x17\x7f\x31\x66\xe1\x5d\x13\xe4\xbf\x45\x1b\xd0\xa9\x9a\x52\x3e\x8a\x09\x00\x02\x33\x03\x16\xd9\x1b\x19\x06\x81\xcb\xee\xac\x42\x1a\xc6\xc6\xcf\x97\xa5\x8a\x7d\x9b\x3a\x03\x3f\x9d\xf2\x1e\xdf\xec\x3e\xb9\x9c\xb0\x9d\x09\x14\xba\x03\xcf\x1d\x11\xf4\x97\x7f\x44\xde\x76\x4e\x6a\x1d\x2f\xc2\xe5\xac\x18\x89\x29\xb4\x30\xb2\xc5\xbc\xc8\xfd\xc2\x04\xf7\xe6\xba\x40\x51\x34\xd6\xfb\xcd\xfc\xc1\x5d\xba\x12\x6a\xde\x71\x54\x4c\x1c\x5e\xd3\xba\x02\x8b\xc3\x4c\xa6\x27\x56\x2b\x0e\x22\x1e\x78\x25\xd6\x14\x43\x87\x9a\x57\xb7\x5b\x84\x52\xfd\x1b\xc4\x3b\x8c\x9a\x23\xeb\xc6\x89\x4d\xc5\x89\xaf\xa3\x64\x4a\x33\x7f\x3d\xb0\x5f\x4a\x7c\x60\x92\x8b\x19\xc7\x15\x19\x23\xde\x82\x6d\x4b\x92\x1d\x95\x5a\xcf\x23\xaa\x4f\x21\xf8\xc1\x72\x1f\x84\x9b\x59\x6d\xc6\x1f\x65\x03\xf8\x65\x17\x84\xc2\xfc\x52\x4d\xf9\xca\x4b\xc4\xd9\x24\xa5\xe1\xcb\x37\x29\xd9\xfa\x41\x2c\x9b\x8f\x5a\x63\x3d\x5a\xe2\x53\xc2\x0f\xbe\x92\x23\xb2\x19\xe4\x19\xa3\x97\x68\x2b\xc3\xc6\x79\x40\x2c\xb9\x49\xbf\xca\x60\x0d\xa8\x50\xb1\xff\xd5\xae\x16\x0a\x6c\x7f\xd1\xd4\x34\xe5\xcc\x07\x5a\x6b\x95\xc6\x81\xaa\xa2\x58\x92\x9d\x14\xc6\xe8\x7e\x96\xd4\xfb\xe7\x01\x32\xf4\x81\x15\xf8\x9b\x26\x62\x45\x8a\xd7\x0f\xd9\x30\x0f\x03\x05\xe8\x31\x8a\x8e\xbe\xd4\x97\xd9\xc6\xc9\xac\x48\xd5\x00\x6e\x65\xd2\x40\x8a\x05\xcf\x92\xdd\xc3\x8c\x38\x21\x4a\x30\xda\x68\xfd\xf5\x37\xa9\xb8\x70\x17\x32\xb3\x8a\x8e\x34\xd9\xa7\x3c\x49\x02\x82\x15\xe5\x95\x18\xa9\x97\xd1\x09\x81\x03\xe7\xa5\xc4\x2a\xbb\xa0\xef\x8a\xf3\x31\xa2\x13\x66\x9a\x5f\x44\x69\xcd\xef\x08\xc0\xe1\x47\xa4\xec\xc8\x35\x63\x75\x66\xcd\x97\x5c\xa8\x80\x0e\x5e\x15\xe8\x2f\x42\x17\x2a\xf5\x09\x5c\x4f\xcd\xea\x82\xc5\x05\x42\x62\xd2\x2c\x0f\x60\x45\x70\x28\x3c\x44\x35\xbd\x2f\x62\xb1\x3a\x4f\x6f\xed\x3a\x40\x7c\xb0\xe6\x02\x83\xea\x93\xf0\xc2\x3d\x04\x8f\xd5\x5c\x18\x2b\x8a\xfe\x5d\x01\x5f\xae\xa8\x3c\x74\xed\x08\x9e\xb8\x0f\xb2\x97\xb5\x95\xc4\xd7\xb5\xa1\xd3\xcf\xee\x32\xff\x46\xca\xcc\xe6\xa6\x1c\xaf\x1c\x5b\xc9\x8f\x08\xfc\xee\x1d\x8c\x00\x07\x5f\x1e\xe2\xb7\x56\xf3\x9a\xc0\x0c\x08\x0d\xc5\x53\xf2\xf1\xf1\xe6\x6a\x61\x97\x94\xde\x48\x8d\xca\x74\xe7\x10\x81\x51\xb6\x83\x73\x2b\xc1\x8c\x6b\xa5\xe9\x4d\x0b\xd2\x91\xfc\xee\x5b\x4b\x74\xd3\xb9\xb5\x9e\x0a\xfe\x20\x99\xf9\xbe\x64\xc1\x7c\x83\x6b\x9e\x4a\xba\xa0\x12\x71\x1a\x66\x43\xa1\x27\xaa\xf0\x31\x5b\x83\x93\xf2\x08\x9f\x41\x9a\x3c\xcf\x6f\xf5\x93\xff\x53\x6a\x67\x95\x55\x0e\xa7\x9f\x32\x10\x4b\x28\x21\x49\xac\x56\x4b\x7f\x54\xff\xb4\x56\x86\x03\x51\x48\x59\xe2\x29\xb5\x2e\x52\x31\x45\xe7\xbf\x8d\xd6\x10\xa5\xfc\x31\xb6\xe9\xd2\xdd\xbc\x48\x90\x0d\xcd\x8a\x74\x78\xf5\x51\x20\x61\x40\x16\x95\x33\xbd\x8a\x3b\x82\xa5\x33\x33\x88\x78\x8f\xd1\x3e\x1d\xb0\x46\x94\x84\xe1\x53\x04\xf9\x25\x4c\x7f\xab\xb1\xbc\x31\x99\xc7\xa4\x2a\x0b\xf8\x59\x83\x96\xe2\xc2\x02\xf8\xa6\x64\x00\x07\xaa\xc3\x1f\x8e\x29\x96\x86\x05\x9c\x6f\x6d\x3d\x53\x1b\xa0\x65\x6a\xfb\x30\x0a\x0d\x2c\x27\x4c\x42\xba\xb2\x0b\x32\x14\x6f\x57\x31\x2d\x70\x54\x8f\x52\x0f\xb0\xdb\xda\x04\x20\x04\x61\x82\xeb\x97\xae\xf2\xe6\xd2\x1c\x2f\x48\xd2\x90\x87\x48\x10\x83\xb0\xf1\x65\x85\x56\x58\xf7\x16\x9b\x1f\xcd\xb7\xf0\xda\xcb\x9b\x52\xdf\xec\xd8\x8b\x86\x41\x5f\xc3\xce\xef\x4f\xd2\xc2\x12\xa4\x4f\x48\xda\x70\x5a\xbf\x91\x1a\x3e\x95\xd2\xf4\xe1\xa9\xc1\x61\x16\x44\xa0\xb1\x14\x45\x28\x39\xeb\x5b\x64\xe3\xa1\xd0\x2e\xe1\x79\x12\x21\x16\x49\xea\xb9\x35\xa3\xbe\xf0\x8a\xcc\x74\x15\x3f\x48\xe2\x92\xb4\xb2\x60\x09\x2f\x67\x49\x6c\x20\x3f\x88\x79\x15\x10\xb7\x3b\xf1\x5b\x56\x0e\xa1\xde\x7a\x23\x3a\x55\xa0\xda\x0e\x10\xff\x62\x3d\x67\xa5\xc9\xb0\x56\x20\x24\x5b\xb6\x57\x22\xba\x1a\xa9\xde\x1c\xf0\xa6\x51\xf6\xc8\x4e\xf4\x55\xe2\xce\x2b\x6b\x55\xde\x71\x66\x60\x32\xbf\x4d\x27\xe7\x65\xa7\x70\xb8\x1f\x09\xba\xab\x1a\x97\x33\x86\x9a\x4f\xbf\x51\x07\x3a\xac\x6c\x67\x38\x57\x99\x41\x11\x86\x59\x0d\x9a\x55\x84\x74\xdb\x19\xea\x8d\x23\xdd\x95\x4b\x73\xf4\xc6\xb8\x57\x5a\xbc\x60\x98\x9f\x2a\x6f\x48\x54\x3d\x26\xe2\x5b\xee\x24\x5f\x31\x46\x0f\x5f\x3d\xd1\x55\x86\x76\xf5\x55\xff\x6c\xa4\xb7\x83\xab\x31\xa1\x6c\x55\x13\x27\x81\x44\x28\x82\x1e\x78\x08\x03\xc7\x37\xc0\x16\xe2\x27\x56\xed\x03\x6b\x8f\x54\xe2\xef\x8a\x8f\xe6\x99\x25\xf5\x78\xb5\xc8\x6c\xac\x91\xaa\xe9\x28\x98\xda\x0b\x9a\x9f\xeb\x57\x01\x97\x90\x1e\x85\xfe\x77\x6c\x48\xba\x0b\xc7\x53\xd4\xc6\xf3\x98\x6b\xe7\xb3\x5f\xb7\x91\xdd\x88\xab\xe4\x1a\xc9\xcf\x77\xfe\x73\x92\xb4\x87\x64\x70\x93\x32\x5b\x89\x56\xac\x99\x4b\x3f\x96\xda\x16\x3d\xad\xc2\x24\x67\xdd\xea\x09\xba\xbf\x61\x43\x7f\x2f\x59\x8a\x12\x65\xf3\xea\x4a\x6b\xfb\x08\x1b\x8f\x51\x14\x2e\x1d\xcb\x98\xa3\xad\x1a\x99\xb4\x30\x84\x56\x3c\x95\xc4\x73\x24\x79\x5f\xab\x27\xf8\x21\x9a\x6e\x48\x29\x9b\xf4\xba\x7d\x29\xa7\x1f\x7a\x62\xda\x24\xdd\x07\x52\x9e\x81\xf0\xfb\x83\x82\x4d\x0a\xe2\x92\xe3\x20\xd7\xef\x81\x47\xf1\xf3\xb2\x81\x2c\xbc\xc8\x64\x66\x71\x2d\xb2\xcd\x73\x6a\x55\xdd\xd2\x60\x97\x23\x98\x23\xfa\x5f\x90\x9d\x78\xb2\x8e\x5f\x0d\x0a\x5c\xbf\xa4\xc0\xbc\x39\x2b\xb9\xc3\x47\x42\xd7\xea\x5a\x9c\xd7\x01\x1d\xc6\xf3\x51\x8e\x3f\x9f\x8d\xe9\xba\x21\xcd\x1a\x61\x8c\x9a\x9b\x93\x7e\x3a\x82\x31\xbe\xb0\x02\x17\x0a\xb2\xc1\x1e\x20\xca\xf0\x8f\x45\x91\x02\x8d\x1a\xa8\x86\xce\x09\xbb\x80\xaa\x26\x40\x46\x36\x4c\x4c\xdf\x8c\x77\x1c\x76\x7a\x1e\x0e\xf8\x92\x39\x01\xc4\x28\x2d\xbd\xf4\x13\xbc\x82\x34\x7e\x5e\xd4\xb5\xb0\xcf\x10\x12\xf8\xa5\x90\x87\x96\x96\xb2\x85\xad\x13\xa9\xb5\xc4\xd7\x3f\x26\x54\x20\x73\xde\x6f\x54\xdc\x42\x96\x19\xaa\x73\x85\x3e\xb4\xa8\x48\x5d\x28\xd6\x5a\x4d\x6c\xc4\xf6\x43\x0d\x66\x6e\x44\x6d\x74\xe4\xe1\x17\x4c\x6e\x00\x8e\xdf\xb0\xc1\x6c\x36\x25\xc2\xb6\xa5\x14\x37\xc2\xac\xd0\x10\x29\xff\x91\xda\x8f\x14\x50\xde\xe0\xbc\xa9\xea\x81\xf9\xc2\x90\xaf\xc3\x1b\xa0\xf0\x8d\x44\x4b\x55\x3e\xfa\xdc\xd0\xb9\x57\x55\x3a\xbc\x76\x16\x5e\x02\xe3\x9a\x9f\x9d\x20\xdd\x55\x18\x6e\xed\xd4\x4f\x52\x67\x12\x07\xe3\x22\x5d\x2d\x0f\x21\x01\x8a\x69\x7c\x4a\x37\xbd\x22\x0a\x12\xfa\xb8\xb3\xf4\x16\x6c\x34\xe6\x59\xf8\xff\xc8\xab\x19\x85\xf9\x77\x12\xaa\x73\x4b\xe2\x46\x5e\x8a\x84\xed\xd2\x87\x04\x65\x64\x96\x53\xc8\x43\x77\x49\xf0\x8d\xf6\x12\x91\xd8\xa9\x48\xbb\x60\xed\x4e\xf4\xb9\x07\xe4\x57\xe6\x8d\x38\x09\x63\x8e\x83\x23\xf2\xc1\x01\xad\x72\xbc\x5f\xd8\x40\x83\x74\xdf\xee\x33\x3b\x55\xb5\xf8\xaa\x64\xa1\xc6\x71\x87\x45\x1a\x22\xad\x80\xe1\xd4\x67\xae\x99\x52\xd0\x1f\x45\x35\x7f\xf9\x07\x83\x90\xe9\xa6\x5d\x3c\xc2\x18\x83\xae\xce\x5a\x45\x80\x1a\xd9\x9c\x9a\x93\xc9\x1d\x6b\x84\x93\xb6\x00\xa9\x41\xdd\x95\xca\x4d\xa8\x59\xbf\xc8\x7f\x3f\x97\x3b\x93\xca\x8d\xb2\x7e\x57\x3e\x04\x7a\xd2\xb5\xb7\xb1\x11\x5f\xd1\xb5\xe3\x47\x56\x76\x9f\x7c\x33\xd7\x38\xfe\xdb\x00\xcb\xc3\xc3\x08\xd0\xca\x2a\xf8\xb7\x14\x32\x0c\xe4\xc3\xa4\xfa\x2a\xbc\xd0\x65\x41\x50\xcc\x7b\x46\x88\x93\x55\x40\x3c\xd6\x22\x36\xb7\x13\xc7\xf5\xb7\xeb\x17\x4e\x7f\x47\x4c\x4e\xe5\x8c\x84\x88\x44\x72\x4f\x5a\xf5\xe1\x83\x44\x5e\x25\x94\x64\x32\x11\x11\xa7\xb1\xa9\xc4\x0b\x09\x74\xc4\x51\x60\x04\x3f\x5e\xe0\x4c\xc3\x85\xa1\x9b\xb3\x0f\xf2\x26\x9d\x94\x54\x44\x4f\xa9\xe4\xc5\x56\x75\xf7\x8d\x54\x98\x77\xe4\x23\xd7\x4b\xdf\x04\x49\x0b\xca\xde\xb1\x93\xa9\xc3\x4e\xf4\xf8\xd9\x44\x21\xc6\x01\xda\xf5\xfc\x88\xe9\x3f\x92\x34\xcc\x92\xb6\xfc\x6b\x2b\x62\x25\xb0\xa2\x26\xc2\x0e\x8f\x22\xef\x22\x0b\x38\x21\x5f\x89\xf8\x46\xe0\xd7\x03\x6b\xac\x8c\x76\x84\xe1\xe1\x99\x3e\x9e\x94\xb9\x1b\x65\x01\x90\x2d\x47\xcb\x82\x87\xea\xd3\x0d\x85\xf7\x72\xb6\x1b\xd6\x48\x4b\x40\xe3\x5a\x62\xde\xa4\xf5\xca\x55\x70\x6d\xc5\x4d\xff\x16\x8e\xfc\x79\x6a\xcb\xd8\xf9\xf7\x69\x1e\x88\xb8\x4b\x04\xe2\xf4\x6c\x46\xae\xd1\x36\xca\x4d\x08\xd8\xad\x49\x2c\xd2\x2c\x72\xf8\x4e\x4b\x4a\x8d\x86\xe9\xb9\x1f\xf7\xb0\x84\x98\x8a\xa0\xaa\x97\x39\x94\xa4\xb8\xdc\x50\xa5\x5d\x82\x9f\xd9\xbc\xbd\xd9\xfb\x5f\x37\xc1\x2f\x7c\x7d\x8a\x89\x41\xfa\xa8\x73\xa8\xc4\x31\x77\x86\xdd\x01\x82\x96\x35\x4a\x1a\x97\xa7\x89\x55\x9e\xca\xa0\xb9\xd2\x4c\x43\x96\x9e\x20\xce\x1d\xe3\x44\xf7\xf0\x20\x18\xc7\x56\xb5\x8c\xd4\x2a\x31\x23\xbd\x51\xe0\xaf\x03\x01\x69\x8a\x24\x3b\x60\x8f\x91\xbb\xb6\xcb\x9c\x14\xd1\xe7\xbf\xb8\x3b\x57\x3b\x5f\x4a\x10\x66\xc5\xb6\xf2\x57\xee\x58\x25\x75\xba\x2d\x8b\xce\x3a\x77\xf1\x64\x11\x9b\x88\xba\x7c\x09\x53\x72\x62\xe9\xa8\xb6\xc3\xf2\x04\x42\x65\x3d\x26\xba\xd8\xc5\xed\x27\x97\xa0\x98\xab\x9b\xcc\xd0\x61\x0f\xe5\x01\xce\xf4\x0d\x73\x8e\x1b\x22\x95\x89\x71\x3f\xd8\x68\xa1\x0e\xa3\x72\x86\x48\xba\xdd\xcc\xc0\x18\xf5\xa4\x44\x9c\x14\x3c\x7f\xbc\x94\xb7\x0c\x6a\xc7\x10\xe9\x2d\x4d\xf0\xc6\x3c\x48\x26\xb2\x8a\x7e\x85\xc4\x74\x21\x03\x0b\xdb\x26\x71\xca\xa5\x43\xa3\xc4\x46\x0a\x19\xc2\x87\x47\xb7\xaf\x9f\xd8\x79\x13\xb8\x7f\x1d\xde\x73\x2e\xf9\xb7\x2d\x0c\x7b\xc0\xf6\x64\x2b\x0b\x8d\xe8\x61\xe3\x46\x06\x36\x9b\x7d\x70\x62\x76\xe4\xca\x6a\xfa\x37\xea\xea\x9c\x57\x96\xa2\x5a\x59\x72\x41\x43\xef\x32\x0d\x03\x49\x00\xcd\x42\x40\xdc\x28\xd1\x42\x57\x55\xd5\x3f\x29\x67\x40\x8e\x91\x39\x8f\x2f\x0b\x7d\xde\x2f\x67\x9c\x87\x3d\x8a\xf8\x3c\xa4\x96\x61\x1a\x9b\x7f\x28\x4c\x36\xc9\x43\x0a\x03\xcc\x9c\x1f\x6a\x89\xeb\x7d\xf7\x8a\x59\x6d\x44\xf1\x6d\x92\x8c\xd6\x6d\xde\xa4\xd1\xb0\x36\xad\x9f\x6a\xbe\x96\x38\xce\x8b\x59\x6a\x40\x2b\xbb\xc7\x56\x3f\x2a\x2b\x67\x04\x96\xe7\x47\x91\x67\x89\xc2\x0d\x4b\xa4\x4b\x9d\xec\x80\xbd\x46\xfd\xaa\x0e\x3d\xa0\x06\xac\x2d\x76\x8c\x8b\x2f\xe3\x4a\x8c\x1f\xc1\x17\x54\xb8\x5e\x1a\xc1\xa0\x17\x15\x41\x95\x06\xb7\x9c\x49\x95\xb7\xe0\x1e\x8d\x87\x34\x93\x00\x7c\x50\xf2\x6b\x69\x50\x2f\x51\xce\xf0\xe2\x44\x8d\x86\xe5\xde\x2a\x1b\xac\xe1\x3e\xa6\xcc\x88\xbd\x88\x1f\x4f\x22\xb4\x67\x2f\x64\x9b\x31\x2c\x4c\x6e\x64\xa1\x27\x30\x93\x03\x97\xa6\x67\xfd\x35\x58\x43\x86\xbf\xc9\x62\x7b\xd0\xba\x4e\x26\xc9\xcb\x4c\xd8\xc0\xd8\x4d\x8d\x1b\x2b\xc9\x3f\x5d\xcc\x2b\xb1\x44\x59\x4f\x97\x51\xd5\x09\xad\xb7\xce\x0b\x7b\x08\x0e\x1a\x9d\x10\x11\x43\xfa\x5a\xe9\xa9\x6f\x74\x60\x53\xb8\x68\x81\x5e\xda\xb9\x7b\x99\x35\x38\xb2\xc4\x4f\xc7\x62\x60\x13\x46\x66\xd9\x45\xd0\x67\x06\x77\xda\x58\xbe\x49\xc2\x6a\xba\x33\xc8\xfa\xb3\xe1\xfc\xa7\x6e\x64\xec\xd3\x43\xc7\xf5\x69\x46\xb8\xf3\x4b\xa4\x0e\x1b\x4d\xb6\xcf\x56\xd1\x0d\x6d\xe8\xb3\xb0\xcc\xc9\x68\xa3\x4a\xa6\x8b\x4f\xc7\x22\x96\xa8\x7b\x89\x59\xba\x05\x21\x7a\xb5\x23\xc3\xe8\xe3\xff\xe5\x5d\x73\x91\x7f\x46\xad\x66\x03\x76\xf0\x67\x4b\xd2\xba\x27\x07\x09\xb5\xec\x81\x25\x9a\x21\x5e\xc8\xbd\xe6\x2e\xc1\x69\x15\xeb\x9b\x22\x63\xf2\x5d\x0b\x68\xa8\x75\x3d\x64\x0a\xa4\xfe\x40\x64\xcd\xc1\xca\x69\xa4\x0e\x48\x88\x0c\x9b\x20\xf3\xba\xe8\x6e\x84\x9b\x02\x81\xf0\x87\x75\x16\x18\xd6\x8b\xdc\x1f\x75\xf5\x98\x75\x7e\x32\xc6\x50\x17\xf1\xd2\x21\x07\xde\xd0\xea\x2e\x4a\x33\x3d\xf5\xf6\x33\x09\x6a\x73\x93\x72\xcb\x67\x90\xd4\xfc\x88\xf0\xa0\xaf\x69\x18\x34\xfb\xd7\xd1\x52\x13\xb9\x26\xeb\x18\xb1\xf3\xf2\x38\x8c\xa7\x71\x2a\xf2\x08\xe3\x35\x97\xb8\x91\xdc\xc8\x75\xca\x09\xdc\xbe\x7b\x26\x5d\x48\xa3\xf5\xe4\x2f\x47\x89\x4b\xe1\x78\x21\x96\x7e\x13\x74\xbe\x0a\x1b\x84\xa9\x43\x11\x57\x57\x8d\xe2\x08\x8c\xd2\x88\xa3\xa9\x85\x41\x11\x81\x79\x93\xa7\xcb\xd7\xac\x3b\xc7\xfa\x3d\x38\x0b\xcd\xdd\xa1\xde\x73\x01\x8a\x49\xae\x83\x32\xcf\x45\xa1\x9d\x8d\x64\xde\x8c\xa4\x4a\x2d\x7b\x88\xce\xa4\xd6\x30\x17\xf0\xd8\x84\xf3\x2f\xb2\x1f\xaf\xb1\xe9\x8b\x8d\x94\xf3\x7a\xba\xff\xcc\xf0\xbc\xc9\xc8\x50\xd9\xbf\x59\x13\xff\xa1\xf9\x09\xec\x06\x5f\x64\x8c\xda\x12\xc4\x3a\x1e\xa6\x34\x8a\x7e\x10\x16\xbd\x51\xa0\xcc\x32\x3c\xe4\x42\x40\x00\xd7\xbd\xa3\x71\xd2\x06\x95\x59\x8c\x51\xb8\x14\xf3\x74\x0f\x89\x74\x88\x02\x68\x8c\xc8\xb1\x57\x41\xec\x7d\x5b\x19\x18\x9b\xd0\x4b\x8f\x39\x1e\xd3\x5a\x7b\x26\xf4\xf6\xa4\x7f\xc8\x91\x9e\xb8\x6e\x99\x64\xc9\x48\xf1\xec\xe4\xf4\x78\x07\x24\x5f\x66\x06\x5f\xab\x20\x66\xb6\xa7\xff\x93\x31\xc5\xe4\x28\xb5\x62\xb3\xfc\xff\x5f\xaa\x03\x40\x74\xce\x25\x1e\xe0\x48\xe6\x5a\x2b\x90\xba\x15\x90\x4d\x4c\xd7\xe7\x89\xac\x39\x45\x5e\xe7\x0b\x2e\x02\xa4\x30\x65\x7d\x77\x8e\x43\x05\xd3\x15\x07\x01\x35\x52\x3b\x3e\x68\xfc\x3d\xfc\x48\x34\x22\x41\x13\x18\x61\xd9\x46\xd2\xaf\x5e\xe8\x32\x8a\x3c\xbb\xc8\xbe\x25\x6f\xbd\x4a\xde\x7a\x9d\x04\x2d\x2d\x1f\x69\xc2\xc8\x6a\x47\x82\xa5\x5e\xd8\x36\x72\xcc\x6d\x81\x7c\x0b\x70\x8f\x58\xbd\xc8\x54\x84\x5d\xc1\xe2\x09\x99\xa4\x81\xde\x66\xbf\x60\x27\x69\x27\x5e\x45\x65\x37\x4f\xe8\x41\x2f\x76\x87\x41\x48\x50\x4c\x44\x14\xb5\xa9\x93\xe6\x5a\xe7\x4c\x6a\xcd\x07\xc2\x41\x90\x01\x8d\xd2\x3d\x93\x05\x6b\x33\x0c\x42\x66\x71\x63\x9f\x76\x2d\x99\x4a\x95\x72\xbe\xc8\x25\x1c\x8f\x4d\x7e\x66\xd4\x1e\x1e\xcb\xe6\x91\x2d\x85\x72\xf4\x8c\x17\x8a\xd0\xcf\x46\xef\x5e\x08\x92\xec\x65\x30\x46\x20\x08\xca\xa0\x38\x68\xc4\xe7\x37\x79\x12\xe2\x8a\x6f\xbd\x04\x26\x4f\x5d\xdb\x83\x68\x90\x42\xbb\xb5\x52\x4d\xc6\xa6\xfd\x92\xa7\x9e\x4a\x82\x7a\xa1\xfc\xe1\x65\xac\x0b\x18\xa9\x65\x7e\x45\xe5\xe4\xf3\x73\x1f\x65\x74\xc4\x9f\xa4\x9d\x22\xba\x82\x60\xdc\x8e\x4a\x14\xaa\xb4\x3f\xcf\xdd\x5b\x2b\x4b\xf0\x13\x2f\x73\x94\xae\x9d\x9d\xe0\xc9\x60\x66\x33\xb2\x00\x64\xed\x95\xb0\x85\x3a\x2c\x3f\x74\xc0\x06\x34\x32\x3b\x79\x62\xc8\x77\x80\x11\x31\xc0\x5c\x28\xca\x5e\x13\x8b\x78\xaf\x2b\x19\x6d\xcd\xba\x27\x41\x9f\xaa\xef\xc6\xdf\x3d\xb3\xb2\x50\x8d\xb3\xb8\xad\x28\xd6\x28\x49\xcf\xaf\x4f\xc5\x36\xb6\x27\x53\x37\x8b\xa4\x6e\x8a\xbf\x85\x79\x2e\x2d\x59\x47\x1a\x61\xc5\x3d\xf2\x03\x6a\x8d\x22\x39\x9d\x63\x27\x69\xfd\x08\xa7\x75\xd1\x99\xff\x0b\xfa\x02\x8b\x3a\x7a\x0b\x83\x25\x54\xdc\x2c\xc8\xc3\x39\x48\xcf\x99\x35\x91\x08\x99\x51\x59\x30\xa7\x83\x26\x85\x4c\x11\xa7\x1b\xd1\xec\x4f\x0a\xe9\x01\xb7\x63\x81\x4f\xc8\x76\x39\x33\xfc\x21\x39\xc8\x24\x54\xcf\x83\x92\xa7\xc1\xa7\x6b\x09\xdc\xb4\x7c\xea\x40\x26\x14\xc3\x90\x30\x75\x29\x08\x0b\xab\x6b\x83\x81\x4e\xfb\xd6\x66\x76\xcb\x76\x28\x7c\x1c\xb3\xe3\x7c\x63\xc0\xb7\xf1\x21\xd9\xf6\x2f\xb4\x2c\x8a\x16\x60\x31\x66\xce\xc2\x16\xab\x23\x4b\x00\x7e\x99\xcb\xeb\x17\xc9\xf1\x1e\xb6\x28\xf1\xbe\x65\x6b\x91\xe4\x1e\xd8\xcc\x54\x5c\x03\x7c\x32\x09\xcd\xd0\xc1\x31\xe1\x4a\x42\x67\x6f\x90\x69\xa9\x12\x12\x9f\xdb\x64\xd8\x9b\xcb\xc0\x02\x99\xe8\x5f\xe2\x70\x09\x8c\x92\x80\xf4\xea\x38\xd8\x20\xda\x1b\x1a\xa0\x78\x25\x96\x2b\xd4\x90\x24\x42\xfb\xb2\xbb\xaa\x5a\x7e\x99\x80\x86\x68\xd3\xb8\x96\x94\x6b\x86\x25\xce\x3e\x60\xe4\x8d\xbc\x79\xd9\x09\xa9\x2d\x84\xb7\xe2\x9e\x25\x17\x68\xad\xa6\x38\x40\xcc\x99\x4c\x2d\xa1\x5c\x23\xf5\x76\xd2\xce\x29\xf8\x64\xbb\xe4\x2c\x9f\x44\x9e\x95\xd3\x81\xbb\x9b\xd7\xe9\x47\x45\x52\x9b\x8c\x3c\xfa\x1d\x6a\xcc\xd5\x78\x31\x62\x9e\x0d\x31\x83\x06\xe8\xfd\x2c\x94\x34\xd2\x96\xfa\x25\xbf\x81\x35\x9c\xdf\x29\x5f\xbb\xca\xe1\x32\x13\x4f\x54\xdc\xd9\xc4\xae\x96\x11\xce\xd0\x5b\x1a\x5a\xc5\x0d\x5c\xbd\xfd\xe9\x0b\xfd\xc1\x4e\x25\x19\x91\x88\x9a\xa1\xb5\x63\x42\xec\xc7\xc5\xe0\xe5\xc2\x61\x2d\xe3\x51\x2d\x5a\x03\x3a\x52\xda\xcb\x2a\x60\xc3\x77\x04\x64\xb7\xbf\x23\x2b\x3d\xaf\xb4\xbb\x37\xdb\x09\x66\x3d\x66\x1d\x29\x76\xd4\x1d\xaf\xff\x83\x96\x30\x08\x2c\x3f\xac\xf1\x02\x24\xca\x4c\x90\x14\x2f\x16\x01\xa5\x50\xd5\x88\x5b\x55\xcb\xd8\x5a\x3b\x22\x84\x6f\xdb\x3d\xbb\xb1\xec\x25\xf8\x41\xe9\xc8\x4e\xbc\x13\xa8\xec\x12\x2f\xee\xbe\x47\xf5\x0a\xb2\xcd\xf0\x5d\x48\x54\x31\x9c\x85\x63\x58\xb3\xc0\xb9\x7e\x6b\xa5\x11\xba\x07\x75\xce\x49\x7c\xb5\x72\xfb\x56\xe0\x4c\xca\x03\x1a\x08\x94\xff\x81\x15\xce\xc2\x01\xd4\x68\xb7\x4c\x09\x17\xc7\x1f\xcc\x06\xcd\x32\xe0\x02\x6c\xf3\xab\x90\x74\x6c\x3c\xab\x4b\xf1\x01\xd8\xe1\x37\xc9\xd8\x21\x4b\x4d\x05\xea\x7d\xea\xc3\x2f\xbc\x0d\xd9\x54\xe5\x15\x2e\x97\x6c\xdf\x1b\x2e\x4c\x7f\x34\xe8\xaf\x2c\x65\x2b\x24\xb2\x8c\x3e\xfb\xfc\x77\x89\x0a\x2f\xdc\x41\x44\xb9\x4a\xd3\x67\x05\xd6\x5f\xc6\x47\xc2\x66\x48\x07\xcf\x53\x4c\x6d\xbd\xf2\x73\x04\xb5\x06\x2b\x6f\x60\x11\x70\x18\x9c\xdc\x34\xe2\x8d\xba\x5b\xb6\x89\x3b\xfa\x26\x75\xd1\x3f\x2d\x6a\x42\x67\x0b\x6b\xd9\x4e\x32\x17\xb0\x92\x45\xc3\x8d\x05\xda\xa8\x06\x0a\x32\x87\x59\x80\x55\x39\xfd\x85\x1d\x92\xae\x86\xb9\x15\xc8\xe7\x8f\xa6\xc9\x7e\x44\x32\xc2\x26\x23\x53\xc4\x7a\x6b\x57\xc1\xcc\x87\xfb\x4e\xe3\x50\xab\xe2\xe7\x10\x8a\x25\x4f\xcc\x0e\xa4\x45\xd8\x55\x55\x14\x1f\x02\x20\x7e\xb0\x19\x62\xf1\x25\xeb\xf7\xda\xf2\xe3\xa9\x99\xe8\x01\xb6\x59\x7e\x6d\x4b\x62\x3a\x58\x5b\x59\x26\x99\xdd\x70\x99\x40\xdd\x5e\x22\xde\x4b\x63\x67\x93\xd8\x91\x77\xd3\xee\xfb\x4c\x22\xdc\x63\x58\xaf\x9f\xee\x85\xd0\x5d\x13\x75\x66\x3f\x33\x1f\x70\xdf\x98\x9c\x60\xf1\xae\xfc\x5c\xe7\x4a\xe1\x47\xa9\x1a\x2e\x28\x44\xfc\xda\x68\xcd\x3b\x38\xd9\xa9\xdb\xb4\x25\x59\xc6\x26\xed\x37\xc5\xd4\x65\x0d\x0b\x8f\x51\xe6\xf8\x3f\x2d\x71\x0d\x80\xc8\x9b\xb8\x0b\x39\xc6\x44\x9e\xd9\x9a\x0f\x92\xd1\x08\x9b\xf6\xf6\xcb\x0b\x65\x25\x2a\x7a\x2a\x11\xc0\x93\x59\xfc\x27\xe7\x1c\x3d\x82\x75\xae\xb8\x7f\xa8\x58\xfa\xce\x3c\xc4\xeb\x20\x07\x02\x33\xe2\x2b\x89\x57\xd0\xee\x32\x53\x90\x0c\xa1\x11\x73\x16\x0c\x3b\xca\x51\x8a\x95\xd5\x97\xd0\x52\x99\x12\xa8\x03\x50\x1b\xe1\x4c\xb5\x61\x45\x17\xe3\x8c\x8d\x34\x32\x4a\xf1\x82\xd8\xf3\xba\xce\xe8\xde\x30\x22\x0f\xa0\xa0\x16\x2b\xc4\x77\x0c\x17\xc8\xa6\x7b\x94\x7f\x99\x2d\x24\xac\xe4\x3b\x58\xd8\xf7\xbf\x22\xef\xfe\x89\x7e\x21\x20\x6c\x4d\x23\x2c\x2f\x6b\xdb\x5f\xe6\x48\xb8\xa7\x8c\xf0\x8d\x92\x8d\x67\x6a\x9d\xae\x56\xa7\xd5\x22\x65\x65\x32\x37\x0c\x9c\x23\x61\xdc\x14\xe2\xff\xd4\x0d\x61\xe1\x8d\x5a\xf5\x68\x9b\x42\x4c\x28\xe8\x20\x73\x62\xdd\x4d\xbb\xcd\xf0\xdf\xc2\x67\x6b\x64\x04\xc5\xa8\xc9\x9e\x0e\x59\xf3\xb7\xf8\x25\x6f\xad\x6d\x59\x78\x7e\x99\x97\x4c\xbd\x1e\x6f\xc2\x84\x1c\x08\xec\xe3\xcc\x88\x59\x17\xeb\xc5\xae\x7e\xdb\xee\xde\x2f\x50\xb3\x4a\x73\x64\xae\xa9\x0e\x91\x5a\x92\xfa\x6b\x59\x8e\xb1\x8c\x76\x3a\x7c\x2e\x98\x6a\x76\x61\x04\x65\x3a\xf4\x34\x92\x36\x0d\xd6\x09\xc7\x7e\x15\x0d\x42\xea\xc7\x5d\x9a\x27\x4e\x2f\xda\xc3\xc5\xbc\x59\xbe\x45\x1c\x01\x22\xe3\x7b\xb4\x79\x77\xa2\x2b\x0e\xd5\x80\xe2\x96\x96\xad\xe3\x5f\xe2\x29\x51\xd2\x9d\x59\xd7\xb1\xb6\x07\x3f\xbd\xf1\xcd\x83\xec\x59\xac\xb0\x2c\xdb\xc8\x6c\x81\xa4\x57\xcd\x72\x40\x07\xd8\x85\x3f\x7f\x31\x1d\xf0\x54\x0e\xbc\xcd\x79\xf9\x43\x15\xfa\xe3\x0d\x27\x3f\x21\x17\xcb\xe2\x65\xbf\x37\xb5\x48\x64\xcf\xb9\xc9\x9e\x60\xe3\xd2\xc5\xa3\x14\x58\xdd\x2b\x91\xe1\x59\x3d\x0c\xeb\xeb\x15\x8e\x80\xe4\xec\x41\x45\x4a\x88\xd7\xdd\xa4\x21\xe1\x21\xd9\x7a\x43\x69\x6b\xd0\x94\x04\x0b\x4d\xcd\x74\x0f\x50\x0d\xe0\x06\x8e\x4d\x3f\xf9\x0e\xbd\x29\xfb\x25\x54\xda\xfc\x34\xd3\xcb\xb3\x31\xf0\x4c\xef\x0c\xfc\xf0\x3b\x4d\xc7\x13\x6c\x2f\xc8\xf9\xe1\x50\x70\xfb\xa4\x26\x87\x8a\xe0\x26\x85\x08\xe1\x1c\x30\x0a\x73\x4e\xb9\xe6\x3d\x2c\xe7\x70\x49\xaa\x7b\x53\x63\x94\xb0\x66\x8e\x52\xba\xb3\x69\x9e\xe8\x9e\xba\xb5\x9b\xd4\xf6\xa5\xa5\x5d\x06\x3c\xc4\xae\x0b\x5a\x5a\xe5\x55\xc7\xba\xf6\xdc\x5c\x26\xa1\x4a\xba\xa2\x0d\x32\xc8\xa4\xf8\x57\xec\x65\xfe\x70\x14\x6e\xd0\xb7\xce\x57\x38\x3f\x43\x9b\x72\x44\xf0\x11\x4e\x4f\x1a\xa7\x46\x80\xa1\xdd\x0f\x87\xca\x94\x23\x3f\x09\xc8\x40\xe6\x66\x58\xd8\xec\xee\x40\x1a\x10\x05\x1d\x72\x44\x8b\xc2\x1c\x63\xe5\xb4\xe2\x25\xa9\x49\x28\xec\x87\x59\x10\xb4\x13\x06\xaa\xb5\xe5\x7b\x04\x45\x4b\x62\xec\xef\x64\xf0\x10\x4f\x51\x89\xa9\xad\x10\x57\x99\xcb\x2c\x85\xad\x0b\x42\x1f\xd0\xea\xf3\x34\x54\x9e\xb7\xf3\x82\xf8\x24\xeb\xbd\xf4\x7b\x8f\x97\x6d\x1a\xa4\x57\xbc\x4e\x7b\xd7\xd6\x96\xe7\x24\x92\x33\x73\x4a\x20\x18\x10\xd3\x47\x19\x01\xa3\x96\x74\x12\x27\xe2\xcb\x90\x9e\xca\x30\x82\x91\xe2\x92\x5d\x1c\x47\x4e\x18\x1a\x39\x7a\x43\x5a\xde\x7b\xce\xb9\x8a\x3a\x73\xb1\x55\xc3\xd0\xea\x51\xf9\x92\x24\x9d\x92\x91\x25\xed\x78\xf0\x6e\xb8\xda\x30\xb6\x16\xf4\x91\x68\x2a\x3c\x08\x84\x6c\xc7\xa4\x9f\xdd\xcb\xc1\x3f\x7e\x9c\x7e\x02\xf1\x52\x9b\x2f\x9e\x49\x83\x6b\x0b\xf6\xd9\x2d\x02\xff\x3c\x46\x54\xf9\xa5\x0d\xe5\xe1\x14\xa1\xd0\xa1\x8b\xb0\x92\x57\x35\x02\x0a\x86\x9d\x9d\x72\x22\xc4\x6e\x6a\x15\x45\xb4\xa2\x75\x51\x34\x6b\x09\x5d\xc0\xa5\xc2\x42\xd7\x10\x59\x6e\x2d\x82\xf4\x26\xb6\x80\x13\x71\x77\x51\x81\x96\x59\x90\x79\xe6\x1b\x87\x43\x05\x9d\x56\x6f\x23\x03\xaf\x60\x25\xfa\x8a\x4f\x4c\x74\x7c\x4c\x8b\x54\x0b\x1f\xa4\x2d\x7a\x68\x8b\x34\x91\x4c\x87\x15\x73\xd5\xf5\xae\x3e\x96\xc7\x2c\xe5\x2e\x7e\x0e\x94\x1a\xf5\xdb\x4c\x4d\x61\xdb\x21\x6c\x26\x7b\xf2\xa9\x50\x88\xce\x0c\x48\x2d\x53\x75\xfe\x9b\xa6\x26\xe8\xb8\x8b\x4a\xe3\x44\xf7\xc3\xe3\xd3\x5a\xf8\x8d\x48\xd9\x5a\x5d\xa1\x77\x03\x26\x8d\x24\x2e\x71\x65\x37\xb2\x66\xb0\x66\xf1\x38\xe2\x85\x03\x67\x93\x9e\xa0\x45\x8c\xcb\x2c\x5c\xcc\xca\xf4\x2c\x63\xd5\x6b\xbc\x83\x5d\x5e\x05\xf1\xf8\x67\x53\xd2\x0a\xe6\x6f\xb5\x1d\xe7\x99\x83\x36\x94\x25\x61\x93\x86\xd6\x09\x3a\x1c\xec\x81\x74\x5a\x57\x5c\x73\x19\xa6\xb4\x62\x6a\x7f\x1c\x24\xfa\xae\xad\xfc\x88\x1d\x15\x5f\x5c\x2c\xc0\xd8\x71\x4b\x49\x0a\xc5\xa1\xbf\xb3\xcc\xa4\xb2\x89\xbf\xd3\xe9\x7e\xd7\x89\xc0\x73\x2b\xd8\x2d\x2c\x6b\x2d\xf7\x03\x63\x34\xcb\x6c\xee\x20\x6c\x14\x04\x85\x8b\x4f\x8f\xdc\xf6\x7e\x29\x1a\xaf\xd3\x0d\x43\x03\x16\xd6\x25\x59\x16\xae\xae\x28\x2f\xa2\xd4\xbf\xb2\xa4\x41\x95\x65\xc5\x10\x4c\xf0\x88\xec\xed\x5c\x6b\xcd\xd3\x3b\xb5\xe2\x21\x54\xe4\x81\xb0\xec\xf7\xf4\x91\x08\x6f\xba\x76\xa5\xba\xb6\xb2\x8d\x52\x02\xd2\x5d\x5c\x19\xa6\xa8\x9e\x51\x3b\xf5\x59\xe2\x7a\x46\xba\x7a\xdf\xf9\xa2\x10\x2a\xe4\xe7\xbb\x3a\xd0\x14\xd1\xb2\x72\x7c\x44\x1d\xc9\xc3\xc0\x59\xc7\xb8\x1f\x4a\x85\x52\x45\xb5\xa1\x29\xe5\x9b\x8c\x51\x29\xaa\x15\x76\xf2\x97\x26\x18\x02\x22\x9c\xc5\x32\xed\x94\xd3\xb9\x0e\xd2\x32\xd4\x19\x96\x7e\xe5\xc8\xa2\x70\x1d\x3a\x11\xa5\x12\xba\x2b\xdb\xa2\xf6\xc8\x07\x48\x85\xfb\x3a\x01\x1e\xe3\x48\x26\xe9\xb4\x2e\xc8\xd5\x59\x74\x2c\x21\x8e\x52\xcb\xef\x4a\x6a\x58\xcc\xa5\x00\x3e\x6f\x17\xf6\x0f\xc5\x0f\xc8\x7c\x8c\xf5\x52\x4d\x95\x0e\xe4\xac\x5b\x90\x83\x30\x80\x3e\xa5\x04\x67\x69\x58\x5e\x44\x09\xac\x17\x28\xde\x47\x54\xaf\x49\x7f\xe4\x21\x21\xe9\x23\x08\x8b\x68\x22\xc1\x5e\x44\x8e\x47\x26\xbd\x15\x1f\x64\x25\x4c\xb7\x0e\xce\x47\xc7\xad\x10\xe0\xcd\x34\x49\xb5\x8c\xeb\x17\xff\x20\xc6\x48\x0d\x63\x15\x56\x17\xc0\xa4\xe0\x48\xb6\x7b\x7b\x0c\x58\xc4\x8d\xf9\xd8\x90\x39\x7f\x83\x7d\xe0\x49\xee\x87\xae\x76\xab\xaf\x09\x70\x8f\x16\xcc\xba\x15\x85\xf9\x50\x14\xe3\x22\xda\x5e\x89\x75\xe6\x78\xdf\x3a\xba\x54\x17\x84\xf5\x85\xed\x51\x5c\x39\xeb\x69\xa4\x47\x04\x50\xa3\xb5\xa6\x41\xf0\x24\x1e\xa9\x49\x0f\x08\xe2\xa5\x44\x33\x29\xb4\x5e\x80\xd0\x99\x23\x2f\x9e\x6c\x09\xae\x08\x00\x12\xa6\xda\x75\xa4\xbe\x23\xc3\x77\x46\x7b\x45\x94\x6e\x80\xb4\x4c\x68\x50\x3e\x89\xe3\xfd\x7f\x80\xaf\xf6\xe0\x4c\x44\x76\x22\x8a\xba\x39\x86\x07\x21\xe9\x67\xe3\xdc\x57\xe0\x09\x1b\x69\xf6\xbd\xb4\x2d\x37\x75\x0f\xe7\x4b\xb8\xa7\xed\xbf\x2e\x0f\xcc\xd2\x12\x32\x7a\x40\x72\x91\xb3\x3d\xfa\xc3\x12\x45\x07\x46\x6d\x57\x7c\xb4\x52\xcc\xd2\xf8\xc9\x00\x87\x2a\x6f\xa7\x40\x4d\x0b\xe1\x61\xe3\xd4\x63\xb0\xf7\x08\xef\x61\x98\x38\x5e\x42\x67\xd6\x0c\x4a\x56\x10\xc9\x56\xf4\x6d\xd0\x67\x6e\x40\xac\x67\xd0\x5e\x89\x41\x79\xd8\x8f\xa9\xae\xe4\x1f\x69\x13\x33\xbc\x59\xb0\xc0\x93\xf0\x4a\x66\x75\x6f\xaf\x83\xc0\xb5\xfd\xa5\x02\xda\x5a\x11\xe5\x09\x2d\xbf\xec\xb6\xf5\x76\xe6\x28\x2d\x9c\xab\xd4\xee\x9f\x01\x3c\x0c\x8c\x08\x84\x0a\x74\x67\xeb\x8c\x70\x16\xf5\x5e\x9e\xea\xa5\xf4\x7f\x8c\x58\x65\xca\x3b\x49\x28\x1a\x97\x97\x0c\xf8\x66\xd7\x77\x1b\x85\x88\xae\x36\x8b\x36\x82\xd7\x9f\x50\xe6\xbc\x6d\x28\x10\xfc\xd2\x43\xba\xa2\x75\x4c\xd8\xcd\x48\x29\x11\x74\xdc\xd2\x0d\x1d\x83\x41\x6e\x13\x31\xe8\xc1\x82\x74\x5c\x15\x6f\xe5\x25\xf4\x4f\x33\xca\xfd\x6f\xe1\x90\x5f\xb5\x0e\x36\x0d\x03\x7b\x79\xe1\x3c\xce\x8c\x38\x30\xe5\x30\x0c\x83\x1e\x11\xa2\xdf\x42\xeb\x77\xfb\x92\xdb\x23\x20\x6e\xd0\x92\x5a\x42\x4e\x38\x8e\x40\x25\x8d\x92\x15\x35\x86\x7e\x69\x34\x9f\x95\xd1\xdb\xfa\xfe\x84\xe0\xfb\x8d\xb4\x87\xf6\x6e\x66\x16\x46\x6a\x20\x8b\x2b\x75\x30\xc9\x20\x15\x81\xb4\x4f\x34\x7b\x47\xaf\xeb\x45\x67\x42\xd8\x88\x0b\xfe\x81\x49\xbf\x17\x3d\xe1\x97\x03\x5c\x6f\xc0\x0f\xea\x98\x50\x17\x3e\xa9\x7e\xc7\x20\x90\x4e\x01\x99\x34\xca\xc2\xd3\x28\x92\xbf\x09\x25\x56\xa1\xb2\xb5\xff\x51\x8b\x2b\x1e\xdd\x1c\x01\x8c\x5e\xbb\x6e\x96\xe2\x7d\xb8\xf8\xa4\x6f\x19\xa5\x72\x0c\x8f\x77\xd9\x9a\x29\x44\x2b\xdb\x12\x75\xee\xf7\xa2\x44\x98\x07\xcd\x05\x50\xce\x0c\x61\x37\x94\x90\x8b\x42\x06\x1d\x1d\xfa\x74\x67\xe6\xa6\xfe\x6c\x68\x19\xa8\xd0\xd5\xf3\x4b\x46\x59\x84\xfa\x8f\xf6\x6a\x52\x0a\x47\x49\x6b\x8c\xa4\xcd\x02\xfe\x46\x8a\xca\x43\xd8\x27\x45\xea\xe8\x74\x2c\x63\x8b\xea\x71\xd1\x89\x7b\x72\x49\x8b\x09\x93\xad\xf9\x15\x42\x34\xc5\xce\xfa\x1c\x55\x53\x8b\x5a\x97\x42\x65\x16\x0c\xf8\x33\x89\xcf\x11\xd7\x0d\x79\xfe\xaa\xb0\x93\xb4\x87\x10\x3a\xca\xf4\xe1\x41\x8d\xc6\x72\x04\x5b\xd2\xa4\xdd\xaa\x0e\x2d\x71\x25\x5e\x82\x28\xcc\xe2\x4e\x8e\xeb\xed\x05\x73\xde\xa5\x6d\x89\x2f\x2a\xbf\x7a\x2c\x71\x2a\xef\xc6\xe3\xe0\xc7\xf2\xc0\xf3\xc3\x06\x88\x3e\x34\x3b\x32\xb0\xf2\xe1\xd3\xc1\xdb\x0d\x3c\x4c\x36\xd4\xd6\xd3\x03\x3b\x92\x69\x35\x35\x84\x24\x43\xed\x11\xc7\x01\xea\xae\x24\x03\x6f\x66\x47\xff\xd2\xb4\xfe\x04\x90\x38\x69\x9b\xfa\x2a\xa3\x70\x6a\xd2\x98\x40\x3d\x3c\x4f\xd1\xd7\x37\x41\x2b\x8b\x8b\x24\x2d\x18\x7d\x92\xb8\x7f\x13\xb7\x88\xdc\x4f\xb5\xb1\xf4\xcf\xc6\x4f\xfd\x2e\x51\xa2\xbc\xe2\x92\x87\xbb\x94\x58\x05\x2b\x33\x2d\xaa\x41\x5e\xe5\xe4\xb3\x43\x6b\x12\x7d\x33\x86\x96\xc7\x62\x9b\xd8\xc2\xda\x5f\x41\x1a\x4d\x6c\xe3\x87\x27\xce\x12\x5e\xb8\x6e\xe4\x88\x21\x91\xde\x81\x53\x62\x08\x77\x66\x6d\x73\xd0\xf9\x30\x3b\x61\x15\xd5\x05\xc0\xf5\x40\x98\xc0\x53\x9a\xf2\x6c\xd0\xb4\x5f\xc8\xf7\x2a\x8b\xd9\x05\xc4\xcb\xad\x4f\x32\x96\xe3\x34\x42\xea\xe5\xf4\x9c\x24\x5f\x89\x8c\xe8\xf6\xc0\x91\x8a\x3d\xa0\x61\x51\x20\xbe\x36\x29\x47\xfa\x42\xa3\x40\xf5\x78\x0a\x6b\x5e\x03\x26\x48\x46\xfb\xcd\xd3\x9e\x49\x83\x35\x5c\x10\x1a\x9c\xe0\x68\x1e\x03\x52\x20\x3b\x58\x4d\x24\x23\x64\x58\x6f\x06\xa3\x97\xd1\x1c\x45\x97\x61\xe9\x8f\xd2\xcf\xef\x95\xa8\x99\x7d\xba\xa4\xd6\xa4\x80\xb4\x87\x9e\x6c\x36\x06\x97\x66\x5a\x7a\x4d\x92\x2c\x50\x60\xe8\xaa\x2d\x82\xc6\x2f\x48\x8b\xa6\x33\x9e\xde\x97\xb9\x62\xdb\xc8\xd1\x75\x34\xc7\xd5\xdf\xc2\xb9\x64\x63\xda\xce\x86\x82\x0d\xf6\x0f\x1c\x60\x73\xec\x58\x94\xb3\x06\x44\x8c\x27\x35\x23\x44\x0d\xf4\x0a\x03\x02\xae\x41\x28\xe6\x86\x09\xe5\x9f\xef\xc8\x71\xfa\x65\xc4\x39\x64\xe9\x82\x63\xfa\x25\xe9\xc6\xe5\x28\x6d\x62\x6c\x27\xb2\xf9\x25\x52\xb2\x3c\x02\xe1\xf3\x52\xe7\xe4\x1b\xd7\x10\x69\x40\x49\x52\xf2\x24\x38\xd2\xf2\x83\x8e\xaa\x88\x97\xfe\x09\xcd\x7a\x3c\xb6\x0b\x17\x21\x0d\x1d\x5d\xdd\x47\x00\x23\x52\x6f\xa9\x3a\x87\xde\xa5\x16\x15\xde\xee\x9f\x7e\x49\xe9\x67\xb9\x93\xab\xf9\x98\x4b\xee\x5d\x81\xc5\xf0\xdd\xc8\x4f\xf6\xc1\xbf\x15\x3d\x76\xd0\x8a\x72\x0e\x1c\x9b\xd4\x24\x78\x6a\x93\xc0\x59\x2a\x4f\xd7\xc2\x63\x2f\xa3\x42\xbf\x54\x94\x24\x9f\xe8\x95\xe7\x0e\xec\x6a\xb7\x98\x9f\xa8\xf2\x73\x7f\xcc\x28\xe0\xf4\xa2\xac\x69\x1a\x1f\xa7\x73\xf6\xa3\xf9\x90\xa3\x19\x10\xee\xe0\x71\xb6\x3c\xc3\x75\xa8\xb0\x00\x3f\xa1\x86\x56\xc3\x78\xcc\x28\x97\x7d\xfa\x6c\xf4\x41\x47\x65\x39\x52\x0a\xf6\x5e\xc7\x6c\xd4\x4c\xa9\x5e\xff\xd9\x14\x4e\x5a\x6c\x2b\x9c\xd5\x8f\xc6\x49\xd6\x86\x86\xc4\x15\x19\xba\x74\xc0\xef\x1c\x24\xc6\x7f\xf5\xa8\x28\x59\x6b\x8a\x20\x88\xad\xb8\xdc\xb0\xe9\x6f\x04\x89\xad\x75\xa7\xf8\x61\xa6\xf3\x37\x88\xb3\x5b\x71\xce\x51\x5c\xd5\xf7\xa5\x67\xd9\x52\x26\xd2\xae\xd4\xcb\xd5\x95\x09\xf4\x23\x63\x54\x6e\x37\x16\x9a\x7a\xba\xb2\xa1\xc5\x08\xaf\x67\xba\xb4\x9c\x5f\x5a\x9b\x64\x6e\xde\xf3\x42\x57\x23\xdd\xb5\x86\xa6\xe5\x99\x4e\x0c\xe9\xcc\x81\x5d\x94\xc2\x33\xa0\x62\x90\x06\x29\x7a\x3f\x8f\x39\xad\x7e\xf2\x13\x69\xb9\xac\x7b\xcd\xfd\x5b\xf5\xe8\x0d\x48\xc1\x11\x02\x2c\x5a\x15\x57\x81\x03\x7d\xe2\x70\x29\x38\x00\x5d\xdd\x12\x32\x3d\x29\x16\xf7\x7b\x8d\x93\x1e\x67\x45\x61\x82\x9e\xaf\x02\x4f\x78\xea\x39\x0d\x3f\xd9\x69\x95\xd9\x45\xfd\x45\x01\xc4\x45\x60\x7f\x4a\x0a\x56\xee\xbc\x48\x00\x4a\x7e\x89\x85\x04\x9a\x38\x05\xc6\xe1\xe3\x7e\xc2\x91\x51\x89\x8a\x2c\xf6\xbf\x4d\x98\xfb\x76\x73\xcf\xee\x77\x2a\x2c\xe2\xd7\x4e\x5a\xcb\xa2\x60\x8e\x2f\xc0\x31\x38\x4d\xc5\x11\xc0\x91\x34\xb7\x40\xf0\x87\x0a\x91\x3e\x58\xc1\xe0\xb9\x43\xc2\xd7\x38\xbc\x48\xbb\x33\xc8\xa0\x14\x72\x8d\x2d\xc5\x36\x3f\x51\x85\x9b\x40\xf0\x54\xb4\x93\x30\x6a\xcc\x32\x44\x9f\xb3\x1a\x8f\x29\x28\x50\x34\x69\x12\xad\x03\xc7\x20\x02\x2a\x46\x54\xde\xcc\x2c\xca\x24\x63\x6c\xe2\x65\x48\xbd\xef\xde\x04\x1f\x1e\xd8\xa7\x31\xc8\x0c\xca\x21\x65\x52\x78\xcd\x3a\x40\xca\x82\xe2\x59\xe7\xe9\x69\xf3\xc4\x20\xb3\xb9\xb6\x59\x27\x27\x66\x99\x89\x55\x04\xd4\xa6\x16\x4a\x52\x53\xca\x40\x97\x5a\xb9\x39\x1f\xca\x7a\xb6\x37\xa4\x92\x17\xb6\xab\x1d\x9e\xfd\x25\xfa\x0d\x54\x25\x1c\x56\xce\x44\x55\x57\x01\x50\xe7\x55\x0e\x7b\xeb\x10\x86\xf2\x52\x28\xe4\x76\x88\x29\xcc\x7b\x68\xaf\x3e\x80\x1f\xef\x9b\x30\x4a\x07\x01\xd4\x1e\xec\xc8\xd2\xab\xdd\x12\xed\x50\xdc\x51\xe4\xd6\x9b\x49\x43\x81\x78\xe9\xe4\x56\xeb\x56\x29\x37\x50\xe4\xac\x63\xe9\xe0\x06\x08\xb2\x5f\x1a\xc8\xcd\xef\xc5\x40\xac\xf1\x21\x2c\xf2\xa4\x80\x0c\x59\xa9\x26\x82\x22\x9c\x3e\xc0\xc5\xed\x1b\x2b\x48\x06\xbb\x98\x5a\xfc\xe6\x5b\x60\xac\xd2\x70\xb8\xee\x52\xb1\xc2\xc8\xb2\x75\xd2\x2a\x97\xfc\x1d\xeb\x9a\x2b\xe2\x90\x25\xe9\x7c\xce\x9a\xd9\xe4\xb1\xf5\x6c\xc3\xa2\x53\xde\x17\xa5\x09\xb1\x5c\xd7\x8e\x4e\x62\xe6\xc7\x22\x01\xa6\x45\x4d\xb8\xab\x10\x36\xd6\x21\xc6\xe9\x86\xa0\x7d\xec\xd8\x7f\x66\x54\x8d\x08\xb4\x9f\x9c\x14\x7e\x54\x2c\x60\xde\xff\xa6\xe1\x32\xca\xde\xff\x9d\x60\x49\x2d\xe1\x1f\xe4\x74\x5e\xce\x1e\x4f\x1e\xca\xfb\x39\xa0\x93\xa9\x92\xf2\xe3\x14\xae\xbd\x06\xe0\x72\x60\x67\xed\x76\x4e\xec\xe7\x04\x71\x88\x71\xd7\xdd\x2f\xe1\x38\x82\xb5\x03\x6f\x84\xab\x27\x8d\x46\xbf\xbb\xb5\x27\xb9\xe6\x1c\x3e\xa8\x57\xdc\xb3\x64\x6f\xee\x52\x8a\x89\xd7\x21\xf1\x3f\x3a\x1f\x40\x46\x62\x93\x25\xec\x81\x0f\x5a\x58\x72\x70\x87\x49\xf3\x70\x39\xb7\xa4\xcf\xca\x12\xb4\x97\xa7\x77\x71\x68\xb4\x3f\xa6\x39\xc2\x57\x89\xf7\x8d\x1e\x80\xd7\xb0\xef\xa0\x32\xbb\xf1\xa4\x2b\xad\xd6\x2c\x24\xb5\x96\x65\x01\xfd\x4b\x71\x41\xfd\xa3\x41\x1a\x81\x18\x1d\xbc\x27\xe9\x0a\x72\x80\x9f\xb9\xfd\xfe\x49\x14\x08\xdb\x0e\x81\xfa\x36\x65\x9e\x77\x2c\x37\x6c\x12\x1b\x0d\x34\x07\xc7\x5a\xa4\x8e\x9b\x5f\x13\xf1\xbf\x6b\xd4\x2b\x30\xc9\xae\x51\xb1\xa4\xe4\xb7\x5e\xc9\x6c\x5c\xeb\xc3\x88\xfb\xc3\xba\xcd\xe4\x48\xb6\xe4\x08\xd2\x04\x2f\x2f\x31\xbd\xc2\xe2\xa9\x4a\x26\x67\xc7\xed\x26\xbd\x08\xe9\x3a\x13\xff\xe2\xd7\x5b\x69\x5b\xfe\x47\x2b\xbd\x6c\xd2\x9c\xc2\x7a\xb0\xb9\x51\x3a\x2b\x71\x6c\xdd\xe7\x3b\x2c\xf3\x45\x17\x60\xd0\x62\x44\x22\xe7\xe8\x82\xf2\x2b\x49\x60\x3f\x2f\x8d\xbb\xcf\xbe\x32\x99\x81\x4e\x39\x66\xf6\x3d\x2a\x29\xd8\x28\x94\xd7\xe5\x88\x13\xdb\xa1\x23\x08\x50\xa2\x7d\xc8\x88\xc3\x1e\x88\xdf\x4a\x55\x01\x93\x5e\xcd\xf6\xdb\xd3\x6e\x68\xb4\xad\x4c\xd9\x70\xef\x87\xa0\xd9\xba\x3b\x1b\x51\x24\xde\x6b\x55\xea\xca\x7e\x34\x52\xb1\x0c\xd2\x33\x39\x23\xdc\x6b\xb6\x46\x4e\x3e\x61\x70\x49\x66\x6c\x31\x93\xad\xb3\x05\xad\xb2\xc4\x32\xfe\x92\x58\x3d\xf9\x76\xa8\x53\x11\x44\xfa\x60\x15\x45\xe7\x57\x93\x30\xf0\x9d\x28\x3a\xff\x03\xd8\xae\x5b\xf9\x29\x41\xcd\xb1\x67\xc5\xd2\xac\xae\x1f\x58\x26\xfe\x10\x7f\xf1\x61\x9a\xcb\xd0\xc0\xe8\x03\x09\xd0\x7d\x3f\xd2\xb1\x79\x90\x8f\xbb\x7e\xb0\xc9\x0a\xf4\x02\x70\xae\x67\xc9\x00\xac\x02\x08\x32\xa2\x6b\x39\xf6\x3f\x4c\xde\x6b\x52\x91\x48\x9e\x0a\x6a\x60\xc6\x9e\x44\x97\xc2\x49\x28\x45\xbe\x9a\xcb\x40\x60\xd1\xc7\xcb\x28\xf0\x78\x89\x18\x88\x21\x9d\x85\x21\xd6\xa2\x04\x2a\x1d\xc4\x05\x88\x1d\x18\x8f\xcc\x2c\x8a\x5a\x0a\x22\xef\xb8\x1c\x9b\x4b\x2d\x17\x00\x54\xc2\xc1\xb4\xfb\xe5\x79\xe1\x43\x49\xcd\xbc\x31\xd3\xdf\x44\x45\xf7\x48\x6b\x5b\xde\x6e\xd0\x53\x8a\x1c\x23\x2f\x42\x8e\xcd\xa5\x76\x06\x9f\xa4\xaa\x63\x7c\x4a\xb7\xa3\x0f\x6d\x7a\x68\xb5\x4c\x7b\xb5\x16\xd0\xba\xe7\x7c\x49\x3e\xc8\xe8\xa0\x65\x5b\x92\x5c\xaf\xe2\x99\xb0\x99\xec\x11\xef\x77\x49\x54\xec\xc4\xfa\x04\xe5\x2d\xfc\x82\x7d\xdc\x05\x49\x02\x29\x8f\x6f\xc9\x3f\x60\x4e\x09\x01\x09\xf1\x48\x99\x41\x35\x2d\xac\x64\x0b\x1d\x6f\xcb\xe6\x4f\x38\x2a\x23\xc1\xda\xc7\x50\x80\x05\x97\xc2\x44\xb9\x23\x37\x60\x9e\x0c\x3c\x10\x63\xf7\x14\x57\xe6\x9a\x7e\xc8\x52\xac\xb8\x94\xcc\x6b\x31\xd6\x53\x96\x46\x74\x2c\xad\xc8\x30\x1b\xd6\x97\x1c\xe9\x98\x20\x6b\xd9\x6d\x10\xd9\xd0\x8d\xdb\xe2\xfa\x45\xa8\x1e\x12\x0b\xcb\xef\xd4\xac\x02\x58\x5c\x77\x8d\x1f\x24\xe0\x5e\xa7\xa6\x21\x23\x18\x38\x9a\x0e\x92\x1b\xfe\x2d\xc7\x1b\x2c\x93\x14\xf7\xe2\x25\x7b\x39\xe8\x80\x4e\x0b\xe0\xe4\x12\xdc\xd2\xf1\xb5\x46\x92\x05\x2e\x52\x19\xb4\x0e\x46\xbf\x10\x7a\x16\xae\xc1\xac\xdc\xd6\xb5\x2a\xe1\xd9\x82\x95\xc2\x58\x82\x4d\x18\xcb\x63\x22\x01\x69\x18\x1a\xb2\xca\x33\x4c\x32\x2e\x3e\x5c\x68\xa6\x61\x5d\x3c\x43\x33\x70\x32\xe6\x10\x95\xc1\x6f\xee\x90\x92\xe8\x75\xbc\xa1\xd6\xc4\x8a\x32\xd1\xc9\x07\x64\xf8\x93\xa2\xc6\x34\x8a\x9e\x19\x05\x7b\x42\x2f\x36\xd7\xf9\x7c\x59\xf4\x8e\x0e\xb7\xb3\x7e\x7c\x37\x2c\xd6\x31\x4a\x48\xfd\x41\xc5\xc9\x20\x88\xed\xa4\x62\x84\x1e\x13\xe7\xae\x17\x6f\xc5\x3f\x09\x47\x5b\xe8\xef\x75\xef\x60\x96\x8e\x53\x11\xba\x3e\x2d\x9a\xd4\x3c\xf8\x51\x41\xa6\x57\x24\x65\x49\xa8\x4c\xde\x95\xd0\x4b\xa6\x39\x69\x1d\xd9\x7c\x51\x22\xae\xa1\x45\x84\x6b\xa4\x66\xa1\xa2\x5c\xd3\x78\x14\x8d\xb3\x53\xe1\x48\x42\x94\xcf\xde\xec\x25\xb6\xc4\x48\xf4\x9a\x20\x71\x8d\xb1\x5f\x00\xb5\x6c\x6a\xa6\xdf\x77\xdb\x38\xb1\x6e\xe3\x07\x24\x1c\x2b\xa7\x86\xfc\xb9\xe1\x8d\x54\x40\x90\x35\xfd\xe0\x4c\x15\x6f\xc4\x30\x87\xf8\x1f\x0a\x12\x38\x52\x91\x4f\x5e\x67\x85\x82\x17\x37\xea\x09\x84\x8e\x50\xd0\x10\x84\x1b\xa7\x7b\x62\x03\xe5\x4e\x07\x65\x1b\x39\x42\x9d\xe2\xb9\x86\x67\x60\x42\x13\x4d\xd3\x8b\x10\x5e\x2d\xe9\xaf\x62\x0d\x58\x1e\x8d\xcc\x3e\x9a\xff\xc1\x16\x96\x9b\xf0\x77\xef\x46\xee\xed\x39\x8e\x88\xd9\x19\xdc\x15\x99\x83\xa4\xbc\xb7\xc2\x2e\x50\xab\x9a\x50\x0c\x3a\xf5\x77\x53\xc0\xf0\x8c\x2a\xb7\x5d\x5d\x7c\x94\x46\x58\x00\x68\x7f\x86\xf0\xe4\xac\xb5\xe9\xad\x60\xf6\x6d\x26\x59\xcf\x3c\x0b\x0e\x48\xc0\x70\xc7\x40\xab\x20\xda\x82\x94\xfd\xd2\x5c\x9e\x75\x7a\x52\x33\x4b\x7a\xd3\x62\x29\x34\xea\x18\xf0\x94\x0c\x03\x6f\xfc\x3b\x51\xe1\xcc\x13\x6b\xa2\xf0\x47\x12\xe0\xc4\x8f\x18\xb4\x14\xf1\x08\x2c\x8c\x24\xf0\x94\xa4\x4a\xb7\x0d\x8d\xe2\x7f\x78\x0f\x36\x7f\x03\x3e\x8b\x80\x48\xa3\x82\x7a\x12\xfe\x8d\x25\x52\x48\x6c\x5b\x8c\x36\x70\x3c\x32\x67\xa8\x81\x6b\x6a\x05\x10\x71\x27\x8a\x1f\xd6\x92\xa8\x4b\x2e\x95\x7c\xa3\xb9\xea\x64\xab\x79\x44\xed\x34\xca\x54\x2d\xe2\xa8\x28\x74\x2f\x93\xe7\xff\xa6\x1d\x13\xb5\x44\x12\x47\xa4\x5e\xa8\x7c\x80\xe7\x34\x1a\x4d\xa1\x75\x63\x8f\xc9\x2c\x6e\x61\xe5\xc3\x16\xfa\x3d\x9c\xa9\xc0\xa1\xbc\xa1\x92\xd4\x96\x7f\xff\xaf\xff\xfa\x3f\xff\xf5\x7f\x03\x00\x00\xff\xff\x1c\xf7\x13\x7e\x95\xd2\x00\x00") + +func dataFemalenamesJsonBytes() ([]byte, error) { + return bindataRead( + _dataFemalenamesJson, + "data/FemaleNames.json", + ) +} + +func dataFemalenamesJson() (*asset, error) { + bytes, err := dataFemalenamesJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/FemaleNames.json", size: 53909, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +var _dataKeypadJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x9c\x94\xcd\x6a\x85\x30\x14\x84\xf7\x3e\x45\xc8\x52\xeb\xff\x5f\xec\x0b\xf4\x21\x4a\x17\xdd\x75\x21\xa5\x14\xba\x2a\xbe\xfb\xcd\xd5\x8b\x99\x91\xe3\x3d\xea\x46\x26\x42\xe6\x3b\x33\x1c\xf2\x1f\x19\x63\xdf\x7e\x3f\x7f\xbe\xec\xab\xb9\x1f\xfc\xb1\xf0\xf2\x7d\x96\xc6\x7c\xff\x8d\xe3\xcb\x43\xdb\xd2\xae\xb2\x0a\xb2\x0e\x32\x5b\x25\xde\xdb\xea\x59\x7e\x2c\x7f\xbc\xa7\x0c\x23\x70\x13\x10\xad\x38\x43\xb1\x0b\x26\x58\x05\x30\x4c\x23\xfb\x77\x4a\xc6\x0d\x95\x48\x35\x92\x2a\xc5\xfe\x59\x59\x32\x97\x58\xcd\x91\x0a\xfb\x70\xdd\x69\x6d\x96\xfb\xb9\x5a\xcc\x05\xb5\xc9\xf6\x83\x56\x26\x41\x09\xd4\x21\xa8\xd5\xdc\x13\xb5\x4b\xa6\x12\xaa\x3f\xd0\x1f\x59\xe5\x5a\x97\xcd\x7e\x81\x0e\x73\xf5\xe2\xd8\xe8\x1f\x6b\x65\x12\x95\x48\x03\x92\x60\x4e\xd9\x3d\xd5\xca\x64\x28\x91\x62\x24\xe5\xe7\xd6\x5a\xe4\x62\x56\xc7\xac\x04\x59\x83\x12\xe5\xc8\x04\x9b\x8c\x04\x4b\x11\x16\x5f\xb6\x25\x04\xa5\x24\x5a\x86\xb4\x42\x79\x6b\xaf\xcc\x40\xb4\xfc\xe4\xd6\x53\x08\x79\x2d\x1d\xbe\x07\x0b\xcc\x7f\xa7\x68\x8a\x6e\x01\x00\x00\xff\xff\x2d\x9a\xa0\x40\x67\x06\x00\x00") + +func dataKeypadJsonBytes() ([]byte, error) { + return bindataRead( + _dataKeypadJson, + "data/Keypad.json", + ) +} + +func dataKeypadJson() (*asset, error) { + bytes, err := dataKeypadJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/Keypad.json", size: 1639, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +var _dataL33tJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\xaa\xe6\x52\x50\x50\x4a\x2f\x4a\x2c\xc8\x50\xb2\x52\x00\x71\x80\xdc\x44\x20\x33\x1a\xcc\x04\x72\x4c\x94\x74\x60\x4c\x07\x25\x30\x2b\x16\x22\xa0\x94\x84\xac\xcc\x02\x55\x2e\x19\x59\x4e\x03\x61\x44\x35\x82\x19\x8d\x60\xda\xa0\x6a\x4e\x45\xd6\x6c\x8c\x2a\x97\x8e\x2c\x67\x86\x30\xc2\x12\x55\x59\x26\xb2\x32\x43\x84\x32\x45\x04\xb3\x06\x55\x47\x0e\x0e\x1d\x35\x08\xa6\x39\xaa\x8e\x7c\x64\x1d\x06\xa8\x72\xc5\xc8\x72\x2a\x08\x23\x4c\x51\x95\x95\x20\x2b\xd3\xc6\x69\x53\x05\xb2\x32\x55\x54\xb9\x2a\x64\x39\x23\xa8\x1c\x90\xac\xe5\xaa\xe5\x02\x04\x00\x00\xff\xff\xd5\xd6\x71\x46\xdd\x01\x00\x00") + +func dataL33tJsonBytes() ([]byte, error) { + return bindataRead( + _dataL33tJson, + "data/L33t.json", + ) +} + +func dataL33tJson() (*asset, error) { + bytes, err := dataL33tJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/L33t.json", size: 477, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +var _dataMackeypadJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x9c\xd4\xcb\x4a\xc6\x30\x10\x05\xe0\x7d\x9f\x22\x64\xd9\xda\xfb\x35\x42\xd7\x3e\x84\xb8\x70\xe7\xa2\x88\x08\xae\xa4\xef\x6e\x6c\xa5\x39\xa7\x4c\x99\xd4\xcd\xcf\xf4\x87\xcc\x97\x39\x24\xf9\x4e\x8c\xb1\x4f\x9f\xaf\x1f\x6f\xf6\xd1\xfc\x7e\xf8\xcf\xca\x97\xcf\x5b\x69\xcc\xfb\xd7\xb2\x3c\xfc\xd5\xb6\xb6\x47\xd9\x84\xb2\x0d\x65\x71\x94\xb8\xee\x5c\x6f\xe5\xcb\xfe\x8f\xef\x29\x63\x04\x77\x81\xe8\xc5\x3d\x54\x97\x30\x61\x0d\x60\x38\x8d\xdc\x7f\x50\x66\x3c\xa9\x24\xb5\x28\x35\x5a\xfb\x4c\xcd\x8d\x59\xa2\xba\x98\x04\xc7\xb0\x7c\xd2\xc2\xac\xaf\xc7\xea\x71\x2c\x48\x4d\x6e\xef\xb4\x2c\x09\x25\x68\x40\xa8\xd7\xba\xe7\x5a\x94\x8c\x92\x34\x46\xc4\x47\xad\x66\x2d\xca\xee\x3a\xbf\x09\xc7\x1a\xe5\xad\x42\xff\x52\xcb\x92\x54\x92\x1c\x4a\xb0\x4f\xb9\x7b\xaa\x64\xc9\x26\x41\x29\x42\x65\xd4\x03\xa0\x46\x9c\xe3\xdc\xa4\x65\xa8\x0d\xca\x61\xb8\xed\xb6\x8c\xe5\x88\x39\x25\xb8\x28\x80\x12\x25\xab\x40\xab\x52\x1e\xda\xff\x84\x4b\x5a\x89\xda\x7c\x6f\x06\xf9\xac\x38\xbc\x15\x64\xcd\x37\x2f\x18\x61\xf2\x0d\x98\xf0\xe5\xd9\x31\xff\xbb\x26\x6b\xf2\x13\x00\x00\xff\xff\xa3\x67\xe0\x02\xd0\x06\x00\x00") + +func dataMackeypadJsonBytes() ([]byte, error) { + return bindataRead( + _dataMackeypadJson, + "data/MacKeypad.json", + ) +} + +func dataMackeypadJson() (*asset, error) { + bytes, err := dataMackeypadJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/MacKeypad.json", size: 1744, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +var _dataMalenamesJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x5c\x5a\x4b\xb6\xeb\x38\x6c\x9c\xf7\x2a\xfa\xbc\x71\x56\x90\x35\x64\x07\x39\x19\x50\x22\x2d\xd1\xa6\x48\x3f\x52\xb2\x5b\x37\x27\x7b\x0f\x28\xb1\x0a\x78\x3d\xd3\xf5\x95\xf8\x01\x0a\x85\x02\xc8\xff\xfd\xf5\x5f\xb1\xed\xbf\xfe\xf3\xbf\xff\xfa\xfb\xef\x5f\x4f\xb7\x85\xf6\xeb\x3f\xae\xc7\xb2\xe6\xfb\xa9\x96\x29\xd4\xfd\x7e\xde\xe2\xbc\xba\x90\xee\x3f\xbe\x31\xa5\xe8\xb6\xfb\x0f\xef\x3e\xd1\x8f\x0f\xfa\x4b\x75\xfc\xd1\x1f\x93\x0e\xda\xc2\x7b\xbd\x9f\xf7\xb5\x6c\xae\xe1\xa5\x2a\x8b\x28\xef\x35\x54\x8c\x96\x23\xa6\x79\xbb\x63\x3c\x6d\xae\xbe\xc6\xff\x4b\x76\x69\xcc\xb0\x84\x52\x97\x70\x3f\xbf\x42\xce\x61\x1f\x33\xb4\x3d\x7c\xc2\xd8\x44\xf0\x5f\x2e\x69\xaa\xd1\x71\x6f\x3a\x8e\xcb\xb2\xa4\x7c\x62\xa0\x4f\x1c\xef\x3c\x5d\x2b\x19\x0b\xd8\xf7\x35\x7c\xc7\xbc\xae\x8e\x97\xf7\xb8\x95\x7d\x3d\x75\x8f\xf7\x53\x72\x15\x6f\x3c\xc3\xe3\x51\xc3\xf8\xe3\x51\x5d\x1e\xfb\x68\x73\xd9\x87\x69\x83\x98\x8d\xeb\x16\x4b\x64\xac\xca\x57\xcc\x58\xdd\xb9\x95\x8c\x6d\xd7\xb0\x94\xaa\x93\xae\x87\xc3\x5c\x9c\xd6\x8b\x39\xe2\xb0\xf1\xd7\xa5\x1d\xe6\x7d\xbb\x5d\x66\x1b\x6b\x78\x07\xfe\x2e\xbe\x2a\x30\x87\x2f\xc7\x92\xe0\x20\x59\x0e\xc6\x9c\xc5\x9f\x63\x69\x75\x5f\x8f\xf1\x65\x3d\xd5\xa4\x0b\x86\x7b\x96\x61\x89\xe7\xe1\x68\x4c\x4c\xeb\x92\xc2\xea\x29\x6e\xd8\x57\xbe\x73\xb4\x1d\xc6\xdf\x75\x33\x32\x2c\x7d\xf5\x0a\x91\x5e\x76\xdb\xf1\x07\x22\x03\x8c\x95\x00\xb5\xe4\xbe\x35\xe4\x79\xfc\x23\x0b\x3c\x0b\x77\x56\xcb\x18\x7e\x0a\x59\xe0\x8f\x79\xa7\x7a\xe0\xfd\x49\xdc\xe5\x81\x00\xe7\x01\xf8\x55\xbd\x2b\xae\xf5\x30\xf2\x99\xf1\x99\xac\xe5\x34\x40\x1c\x4b\x29\x07\x1c\x22\x7e\x0a\xdb\x78\xc3\x89\xe1\x61\x3f\x99\x6d\xfc\x1a\x8e\x25\x60\xb8\x6e\xf6\x82\x35\x1f\xad\x85\x34\xf6\x3c\x95\x69\x1a\xef\x7f\xe2\xbc\x97\x0a\x38\xe5\xd0\x86\x79\xdf\x6b\xb7\xcb\x7b\x58\xb4\x78\x8f\x05\x34\x40\x75\xae\x2e\x2e\xf0\x0b\xfc\xd0\x56\xf7\x1d\x8f\xb3\x60\x59\x2d\xd8\x02\x5e\xe9\x23\x63\xe0\x2b\x88\x95\x3c\x10\x49\x81\x78\x79\xc6\x8d\xfb\xcd\x7b\xc9\xb1\x30\xd8\xf1\xf2\xa4\x38\xda\x19\x8b\x89\x26\xdb\xe2\x0b\x4b\xd8\x5d\x4e\x08\xa8\x14\x04\x3f\x88\x6e\x8b\x24\xef\x52\x40\xe4\x66\xa2\xa4\x16\x9f\xf1\xe9\x7c\xd4\x1d\xa3\xe7\x52\x37\xc7\x48\xaf\x24\x00\x79\x98\x43\x1e\xb6\x5c\x92\xc4\x94\x06\x75\x20\x07\x54\x21\xc0\xa6\xff\x20\xfb\x79\x02\xbf\x4c\x63\xec\x90\x38\xb6\x4b\x0a\x9e\xd7\x89\xd5\x76\x86\x98\x31\x9a\xe0\xcf\x73\xab\xe2\xb3\x83\x31\x59\x35\x82\xfa\x28\x41\x63\x5a\x98\x82\x21\x98\xc8\x80\x98\x94\x68\x0e\xde\x33\x5c\xe4\x5b\x6e\x05\x31\xd1\xb1\xe8\x08\x34\x05\xbc\xcc\xab\x06\x17\x53\xc1\x93\x29\xf0\xd3\xb1\x85\xb9\xf0\xa5\xf9\xa0\x17\xe7\x35\xb8\x31\xaa\xf0\x69\xf1\xa5\x02\x87\x29\x3e\x1e\x85\x23\xc7\x85\x4e\x2b\x4d\x22\x00\xa6\x3c\x09\x28\x60\x65\x43\x94\x58\xd3\x86\x7f\x48\x2f\xf0\xbc\xf0\xa1\x89\xcf\xb1\xea\x54\x4e\xcf\x81\x36\x03\x2a\xb0\x68\x0d\xc8\x3b\x62\x03\x46\x9e\x04\x70\xd9\xe0\x31\x1d\x43\x3e\xc4\x02\xb8\x96\x1d\x2b\xfd\x86\x46\x5f\x7a\x86\x51\xe7\x72\x2c\x95\xb9\xcc\x1b\x1a\x7c\x07\x5f\x0b\x57\xa3\x7e\xf6\x18\xe1\xc7\xf5\x44\x0b\x50\x17\xe6\x1a\x01\x89\xc1\xf4\x21\x5f\x8e\xd1\x3f\xe2\xc2\xf2\x47\x86\x2f\x70\xc1\xe9\x83\x62\x1d\xc3\x28\xb3\x08\x2d\x64\x05\x8d\xc0\xa0\x90\x88\x15\x4a\x30\x45\x63\x6e\x99\x2a\x63\xa8\xba\x0d\x53\xef\x82\xf9\xf1\xc2\x12\x4d\x46\x50\xe2\xeb\xc8\x19\x9f\x85\x25\x6a\xba\xae\xc7\x14\x48\xd6\x01\x59\xd4\xe5\x05\x88\xb9\x89\x80\x32\xa2\xba\x87\xd3\x58\x58\x00\xa6\x2d\xa6\x5d\xa9\x17\x4a\x83\x43\xcf\x61\x8e\x09\xfe\xe0\xbe\xaf\xa4\x3c\x86\x4a\x9b\xee\x10\xc1\xbe\x38\xd1\x18\x4a\x37\x34\xb3\x50\x2b\xe9\x80\x61\xef\xbc\xea\x91\x17\xc9\x72\x66\x6e\x17\xee\x3d\xe0\x11\x09\x71\x83\xc4\x13\x3b\x0d\x58\xe3\x2d\xa4\xa2\x66\xdb\x4f\xc4\xea\x1e\x3d\x66\xb3\xa7\x97\x63\xe6\xbe\xf7\xc0\x50\x56\x79\xb3\x9f\xb2\xf0\x60\x60\x3f\x5e\x4e\x26\x86\x64\x3f\xc0\xd3\x5c\x90\xb5\x9e\x47\x02\x1f\xbc\x0e\xf8\x53\x58\xc4\x69\x2e\x39\x39\xf5\x7a\x2c\x2b\xe6\x1e\xd1\xea\x4d\x12\x15\xce\x5a\x56\x0c\xd1\x91\x8c\xf5\x3f\x42\x8a\xff\x90\x05\x36\x2c\x48\xf2\xac\x62\x41\xad\x4a\xa0\x80\x82\x9f\x2e\x6e\x4c\xac\x0d\xc1\x72\x93\x31\xa3\xd7\x04\x85\xa8\xdb\xf1\x7a\xfc\xd0\xb8\x3d\xc9\x61\xe2\x16\x35\xa7\x4c\x27\x5d\xde\x6d\x81\xf7\x63\x73\x6e\x56\x9a\xa3\x09\xba\x68\x21\x99\x8a\xa5\xe1\xd5\x4f\xac\x0b\xfc\xaa\x9a\xa0\x09\xab\x38\x8f\x58\x7c\x45\x28\xe2\x16\xe4\x6d\x44\x22\x25\xf0\x8b\x31\xd7\x85\x94\x7a\xab\x32\xbc\x82\xc0\x9a\x21\xdc\x5f\xd2\x34\x2f\x9a\x2f\xfe\x3e\x98\x92\x4c\xb2\x68\xbb\x7c\x64\x32\x8f\x46\xbc\xfc\x7c\x54\x5a\x30\x3c\x7b\xa4\x14\xd8\x0b\x06\xca\x4a\x10\xc7\x4e\xcd\xff\x0d\xd9\x5b\x6e\x0d\x5b\x74\x2b\xdc\xea\x8c\x41\x91\x47\xbc\x41\x62\x61\x16\xdf\x6b\xf8\xc0\x3c\xe9\x80\x5e\x90\x62\x83\x3c\x23\x3a\xb2\x1a\x91\x4b\x4b\x41\x6f\xac\x87\xf2\xd0\xc5\x27\x85\xc4\x77\x20\x08\x4e\x15\x16\xa4\x9e\xf4\x28\xb9\x8d\x77\x4b\x4d\x0a\xd6\x8a\x44\xd4\x45\x1c\xfd\x7e\x2b\xb4\xf1\xca\xdb\x4d\x98\x26\x09\x7b\xe7\x1f\x8c\xb3\x81\xa8\x04\x26\x13\xd4\xf6\x94\x9c\x6e\xac\x3a\x7a\xb5\x58\x0d\xf0\x52\xe9\x2c\x74\xe4\x56\x4b\xcf\xf0\x44\xac\xce\x30\x1a\xaa\xb1\xd0\x98\x6a\x05\xe3\x46\x4f\x6d\x2e\xcd\x25\x61\x9c\xc3\x17\x4a\x6c\xaf\x94\xfe\x32\xf2\x46\xde\x19\x33\xbd\x6b\xdf\x2c\x53\xb3\x3e\xd6\x79\x8d\x86\xea\xc7\xbe\xbf\x63\x92\x5e\x9c\x80\x9c\xb3\x99\x6f\x71\xa6\x2e\x28\xa6\xb8\xba\x4b\x09\x8c\xde\x69\xe2\x8d\x84\x1f\x14\x2e\x37\x06\x0a\x42\x00\x60\x10\xf9\x8f\x22\xec\xf2\xc2\xf8\x3d\x98\xc4\x24\xfb\xc7\xe6\x96\x43\x22\x57\xf2\x2b\x86\x71\x46\x72\x4f\x0a\xa7\x5c\x8c\xf0\x2c\x02\x92\xb1\x59\x65\x89\xe0\xb7\x03\x89\xe1\x8a\xef\x5d\x05\xd1\x15\x5c\x58\xd2\x97\xf1\xd1\x15\xc3\x02\x58\x9a\x1c\xef\xb5\x6e\x6d\x67\xfa\x98\xfc\x5b\x8b\x14\xa6\x9f\x90\x40\x8e\xad\x32\x1d\x3e\xfb\x00\x11\x8c\x20\xfa\xab\x72\x1f\x5f\x9b\x94\x53\xd0\x04\xd6\x22\x1d\x1e\xbb\x54\x1e\x8b\x91\x25\xd2\xf5\x0f\x04\xea\xb7\x14\x61\x81\x91\x57\x5e\xff\x2a\xf5\x93\xac\xd8\xb8\xbf\xc1\xb6\xa2\xf0\x3f\x88\x88\x28\x99\x48\x7b\x00\x3b\x1c\x1f\xd3\x1f\xba\x25\x82\xeb\xc4\x89\x5a\xe0\x49\xf6\x34\x9c\x16\xdb\xc6\x6d\x17\x59\x77\x42\x1c\x6b\x95\x1d\x54\xf9\xfb\xf0\xd5\xdc\x40\x83\xa8\x62\xef\x65\x37\x38\x63\x19\xeb\x88\x4b\x76\x33\x98\x58\xf4\x68\x18\x29\x67\xd7\x76\x87\x78\x2c\xa9\x06\xd7\x88\xbd\x1a\x03\xd1\xe6\x5c\x02\x47\x58\x28\x2a\x49\xeb\x7e\x9c\xa4\x63\x70\x85\xbc\x82\x19\xd4\x67\x16\xbb\xa2\xbb\x22\xd8\x78\x2e\x99\x72\x25\x17\x10\xed\x22\xbf\x21\x60\xd7\xa8\x52\x44\xcc\x4f\xda\x4d\x8e\x9c\x74\x2b\x1a\x3a\x00\xba\xfc\x1d\xea\x4c\x8d\xfb\xcf\x6e\xf5\x91\x11\xf7\xa1\x26\xdd\x2a\x45\xfb\x06\x04\x5c\xb9\x08\x70\x97\x5f\xc1\x4e\x51\xdd\xd3\xa8\xd7\x44\x88\x6b\xcd\xd7\x57\x9b\x94\x34\x58\x69\x14\xd5\x09\xbb\x96\xa2\xdd\xce\x5a\xf5\x4b\x0e\xd2\x02\x94\x75\x35\x7e\x72\x13\x3c\x32\xb8\x62\x38\x4e\x52\x15\xdd\x23\x96\x7a\xc2\x9c\x33\x25\xf9\xf5\x3e\x20\xe2\x8e\x89\x7c\x15\xb6\x8d\x82\x65\xeb\x35\x3e\x60\x61\x2b\xd6\xa7\x69\x7d\x08\x59\xb0\x07\xd4\xe7\x25\x52\x55\x35\x95\x7d\x27\x56\x3c\xfc\x29\x33\x76\xf1\x5c\xb8\x53\xf4\x5e\x42\xd3\xcc\x89\x08\x5a\x8f\xcd\x84\x96\xac\xd1\x2c\xe6\x6a\x4f\x39\xaa\xc1\x03\x36\xfd\xc4\x5e\x25\x53\x19\xca\x2a\x77\x72\xb5\xf2\x86\x14\x7f\xcc\xea\x82\xf7\xc8\x02\xc3\x41\xb3\x08\x4b\xb2\x83\xe4\x83\x58\xa7\x12\x7a\x41\x53\x91\x89\xa0\x5a\x58\xba\xbe\x63\x47\x0d\x1d\x81\xbd\x9e\x0c\x17\x11\xbd\x24\x3a\x29\x7b\xa6\xa8\x8d\x43\xad\x74\xa5\x88\x65\xab\x43\x60\x98\xc8\x6e\x4b\x2d\x1f\x2c\x2d\x68\x0e\x9b\x53\x27\x56\x6e\xd1\x9f\xaa\x75\xd7\x43\x4b\xb5\x4d\xdb\x8e\x41\x8b\x97\xde\xc9\x00\x13\x5e\x6d\x0c\x88\x86\xab\xc9\x03\x8e\x60\xed\x2e\xdf\x45\xf8\x45\x8c\x81\x92\xba\xd0\xf2\xa2\x0d\xb8\x43\x91\x18\xd8\xe0\x4c\x29\x2e\x92\x2e\xb3\xb0\x74\x8b\xa9\x32\x9f\x12\xd9\xc4\xd3\x47\x5b\x5e\x9a\xb4\xf6\xb3\x19\xd9\x6a\xd4\xf2\x44\x3d\x28\x95\x8d\x78\x90\x81\x03\x43\xfa\xc0\x4e\x64\xb7\x44\xa0\x88\x76\xbf\x0f\x82\xe0\x02\x0a\xde\x6f\x06\xe7\x49\xc2\x6b\x67\xbc\xd2\x18\x77\x0f\x94\xd5\x86\xa4\xb1\x08\xaa\x2b\x6d\x46\xf7\xb1\x27\xc1\x89\x21\xac\x65\x4f\x2b\xa9\x30\x89\xe5\x52\x2d\x69\x92\x63\x72\x51\x67\x3a\x46\x2d\x77\xd8\x17\xaf\xc5\x0c\xac\x37\x29\xb0\x2b\x4a\x8e\x90\x7a\x1a\x54\xb4\xd9\x22\x5e\x11\xbe\x95\x48\xa6\xeb\x74\x80\x61\x1e\xd1\x07\x76\x4c\x9c\xf7\x01\x1f\x5c\x1d\x13\xbe\x3f\xe3\x25\x07\xf0\x57\xf7\xd6\x8e\xbd\x44\x82\xea\xae\x8d\x80\xed\x92\x83\x2a\xbd\x77\xaa\x49\x8d\x00\x92\x68\xf7\xa6\x8d\xfb\x43\xf5\x46\x0e\xd4\x2d\xca\xae\x02\x5d\xe4\xe8\xab\xd7\x93\x14\xa4\xb4\x1f\x7b\xa7\x6e\x32\x61\xb1\x2c\x0c\x48\x36\x13\xa5\x6e\x62\x21\xb0\x60\xdb\x52\x8f\x61\x04\xed\xbc\x48\x7d\x42\x8a\x35\x5a\x71\xb3\x05\x96\x90\x05\x34\xe6\x22\xa9\xd3\x25\x6d\xab\x68\xf7\x4c\xe4\x4a\xc4\x48\x8b\xca\xe8\xfe\xa3\xa1\xc5\xde\x60\x56\xc5\xcf\xad\xc9\x30\xd8\x44\x2e\xa6\xdb\xa2\xd0\xd2\x24\x6d\xe2\xfd\xee\xce\x00\xcb\xab\x28\x73\x9c\xac\x94\x5c\x98\x81\x26\xd7\x40\xf9\x3d\xfd\xa8\xcc\xe9\x27\x09\xe0\x61\x06\x5d\x47\x27\x32\xb1\x66\x82\x26\xd1\x60\x9a\x04\x8f\xbb\x73\x38\x26\x3e\xd2\xd9\x88\x40\x11\x69\xd1\x56\xc1\x52\x70\x31\xb5\x7a\x55\xb8\x8b\x63\xb8\x08\x5f\xd3\x5a\x52\x72\x01\x54\x4e\x79\xc3\x47\x3e\x39\x92\x96\x30\xe8\xc6\xe4\x33\xa3\x87\x58\x4c\x3d\xd8\xfb\xb1\x6c\x88\x1e\x19\x62\xab\x35\xd4\xd4\xfe\xd0\x2e\x69\x73\x59\x85\x86\x68\xc1\x49\xcf\x2e\x36\x96\x73\x57\xf5\x85\x83\x8f\x22\x0c\x03\x4b\xee\xe1\xe1\xb4\x53\x8a\x4d\x92\xe0\x24\x26\x35\x92\xa7\xe0\x8e\xc1\x87\x26\x23\x2d\x8e\x9d\xb3\xd8\x22\x2b\xe0\xbd\x1e\x46\xa8\xa7\xed\x5f\x55\x16\x79\x29\xd2\xab\x68\xeb\x8e\xb1\x94\xf9\xb6\xde\x05\x4c\xec\xfa\xf4\x33\xb6\xd3\xb2\x40\xcc\x54\xa0\x35\x71\xfa\xa0\x5c\xf5\x32\x8d\xef\xae\x90\xa9\x5f\x64\x0a\x15\xb8\x1f\xe6\x9a\xb9\x5b\x88\x6b\xcf\x1f\x95\xef\xaa\xcd\xc3\xc6\xb0\x7e\xbb\xf6\xfb\xd0\xee\x7b\x91\xea\x73\x73\xa6\x61\xec\xb2\x36\xff\x23\x68\x33\x99\x83\x16\xcf\xce\xc6\x75\xfe\x12\xf4\xfc\x05\xcc\x73\x49\x14\xb2\xa0\x8b\x50\x99\x87\xba\x7e\x31\x95\xd1\x64\x92\xa1\xe4\x2e\xb6\x03\x3a\x62\x09\xce\xde\x46\x25\x80\x9f\x6e\x96\x5c\x09\xe9\x21\x59\x0d\x58\x2c\xed\x60\xf7\xe4\xab\x11\xdc\x33\x22\xa7\xee\x7d\x0d\xed\x2b\x9e\xbd\xc0\xe3\x8e\x59\x59\xee\xbd\x16\xd2\x83\x33\x1e\xe4\x88\xa5\xc5\xde\x9a\x95\x59\xb5\x66\xf2\x74\x99\xa5\x3e\x62\x99\x51\x8c\xe6\x5d\x0f\xc8\x95\x9e\x89\x6d\xda\xd3\x9e\x70\x53\x56\xee\xe4\x3f\x86\x8f\x9d\x67\x72\x04\x86\x2b\x14\xd9\x7c\x56\xa6\x9b\xb3\x46\xed\x6b\x66\x73\xc0\xc5\x9e\xa0\xf6\x01\x64\xac\x5d\x95\xf0\x46\x59\xe2\x8e\x5b\x75\x68\x53\x73\x5e\x89\x64\xff\x25\x0f\xdf\x2d\x0f\xd6\x93\xd0\xa9\xdb\x69\x0e\x94\xb3\xe3\xa9\xc2\x6e\x52\x8d\x29\xe2\x7e\x34\x8b\x89\x25\x09\xb5\x1f\x57\xff\xe4\x71\xd3\x67\xd0\xba\xc3\x3d\x8b\x09\xd2\xc8\x81\xaa\x69\x39\x6b\xab\xd7\x4d\xfe\x60\xb7\x98\x39\x30\x1d\xb3\xe2\xbd\x85\x73\x2b\xe8\xe3\x5c\xc7\xbf\xec\x5d\xf6\xd3\x3e\x78\xf4\x8e\x18\x86\x61\xe7\x13\xd5\x6c\x35\x1f\x54\x64\x7d\xf0\x40\x26\xaf\x3e\x66\xa6\xf5\x9d\x87\x4f\xe1\x87\x7d\x9e\xa4\xfa\x72\x52\x8d\x72\x22\x04\x2f\x2b\x8e\x8f\x1c\x0b\x34\xa9\x7b\x18\xba\xbf\x79\x1a\x97\xf4\xdc\xee\x25\x98\x0e\x1b\xad\x0c\x62\x9b\x8a\xc9\xe2\xda\xcd\x93\x84\xac\xdd\x73\x38\x81\x50\x15\x46\x79\x17\x2e\x73\x74\x74\xa9\x43\x5a\x78\xe0\xd3\xc2\xda\xca\x1e\xe8\x1b\x61\x55\x43\x34\x25\x4f\xb7\x14\x45\x8e\x6a\xd4\xaf\xe9\x4b\xf5\xc8\x20\x86\x24\x25\x1d\x0a\x17\xcd\xe0\x13\x95\xf4\xf3\x20\x10\xd9\xa9\x15\x22\x6d\x3c\x00\x5a\xc9\x79\x39\xf2\x5c\x42\xca\x1e\x3d\xdc\xdf\xbf\x14\xa6\x89\x24\x27\xf6\x93\xe8\x23\xf5\xd6\xf0\x38\xd8\xdb\xf5\x64\xf6\xd2\x3e\x66\x73\x58\x48\x3f\x54\xe2\xe1\xc4\x7d\xa2\x09\xd1\x19\xd9\x16\x8e\x9f\xa2\xe7\x96\xec\xca\xde\x0a\x18\xd9\xd1\x89\x13\x78\x04\xd6\x8f\xba\x50\x80\x3e\x64\x65\x3f\xb0\x76\x0b\xe4\xb5\xcd\xd2\x9f\x64\xe0\x05\x0b\x62\x99\x34\x6e\x79\x80\x79\xaf\x86\x29\x9a\x17\xe9\xdc\xb4\xb1\xae\x58\xe9\xe4\xaf\xad\x28\x3d\xa9\xa1\x5e\xe9\x68\xd3\xa2\xe5\x1b\x11\x95\x93\x4d\x76\xa2\x7a\xd1\x9f\x52\x45\xed\x2e\x0e\xd2\xe6\xc5\xe1\x09\x8f\xc3\x33\x23\x2c\x7c\xe5\x3a\x34\xd2\xd3\x81\x30\x1b\x2e\x60\x07\x31\x2b\xb3\xe7\xe8\xe1\xfd\x1d\x0f\xef\x52\x1f\x91\x05\x4b\xf1\x0c\xa5\x90\xed\x8d\x99\x43\x14\x1c\xfc\x53\xd8\x2f\xd1\x0e\xd8\x4c\x11\xf3\xa2\x23\x6f\x52\x05\x4f\xbf\xf4\xea\xcb\x2a\x72\x82\x53\x4e\x87\xf6\x19\xc7\x22\xea\x87\xc1\xef\xd8\xbb\xef\x67\xf2\x20\xa6\xfb\x6c\x91\x2e\x29\x49\xe1\x1b\x27\x25\x8e\xc4\xe0\x52\xfe\xc9\xe3\x1b\x71\x92\x22\x95\xac\xb1\x17\x55\xd6\xda\x05\xbe\x3a\xda\x5c\xaf\x9e\x58\x5e\x94\x48\x4b\x9b\x73\x37\xaf\xdb\xdb\xa0\x06\x7b\x62\x58\x4d\xb0\xf3\xd4\xb0\x4c\x68\x71\xe8\x25\x1c\x26\xcc\x7a\xbc\xb5\x59\x56\x92\x96\xf4\x2f\x03\xa6\x5e\xbe\x2a\xa4\x23\x23\x99\xa7\x65\x5f\x0d\xcb\x71\x38\x42\x2c\x7b\x5c\x09\x79\xe1\xd6\x84\xb0\x09\xea\xba\x53\x38\x14\x4b\xee\x37\x0b\xd8\x7a\x5c\xb9\xed\xaf\xbd\x54\x62\x45\xae\x76\xab\xb7\x22\x14\x06\x9f\xbc\x48\x3a\xb9\xcc\xab\x46\x06\x65\xd6\x55\xbf\x9a\xfa\x3f\x12\xf8\x8f\xab\x03\x3e\xeb\x25\x0d\x73\xec\xbc\x97\x29\x6a\xde\x85\x93\x57\x27\x6a\x9c\x61\x73\xb4\xf0\x7e\xb3\xf8\xb7\xe7\xff\x22\xf6\x89\xd3\xbe\x69\x9e\xc3\x9f\xba\xae\x57\x90\x88\x40\xa7\xa1\x9f\x94\x54\x60\x2a\x68\x5d\xa6\x76\x16\xb5\x24\x72\x90\x13\xac\x81\x6d\xf8\xba\xb3\x90\xea\xd7\xc7\x78\x79\xe2\xe0\x09\xb9\xa1\x15\x51\x73\xe6\x8e\x04\x92\xce\xae\x9d\x7a\x3d\x45\xba\xaa\x52\x0c\x16\x85\x2b\x49\xd7\x8f\xde\x86\xd0\xfe\x0f\xc5\x98\xff\xe3\x44\xb1\x6b\x11\x84\xc1\x75\x03\x02\x08\x96\xea\x31\x1b\x31\xff\x4f\x44\x86\x6c\x85\x4d\xcd\x14\xe2\xa2\x77\x9b\xfe\xe8\x07\x4f\xdc\x6e\x75\x4d\x05\x80\x14\xe7\xbb\x36\x50\x14\xbe\x24\xc6\x7e\x8d\x8a\x94\x20\x91\x0b\xe7\xfe\x66\x0e\x0c\xe2\xd1\x29\x6a\x29\x08\x7e\x17\x9b\xf0\xd4\xc4\x1b\xc9\x79\x1f\x1d\x8c\xb5\x88\x37\xd9\xb5\x2a\x27\x43\xf0\xd4\x4a\x2a\xfc\x84\x17\x85\x94\x10\x4e\xb3\x30\x54\xa2\x0e\x89\x57\x4c\x46\xd1\x88\x9d\x57\xe6\xcf\x1c\x4c\x63\x3f\x78\xc5\xb4\x0f\xf6\x4e\x93\x53\x49\x2f\x3f\xb3\x7b\x3a\xee\x0d\x38\xaa\xfd\xea\x4d\xc5\x4a\x9a\xbb\x32\xc6\xfd\x48\x02\xed\x47\x31\xba\xa3\xfb\xba\x1e\x9f\x4d\x45\x7a\xe5\x41\x52\xa7\xdb\xa6\xca\x4b\x83\x7b\x64\x6c\xac\x9a\x12\x1f\xa6\xdb\x28\x86\x92\xa8\xd5\xd6\xa0\x6b\x1b\x4f\x0e\x49\xad\x5a\x7f\xbe\x48\x06\x6c\x64\x69\x75\x28\xc9\x0b\xab\xed\xd7\x89\xc0\x65\x22\x20\x78\x3f\x6e\xa5\xd0\x7d\xf2\x1a\xc2\xb8\x1b\x88\x70\x93\x8d\x31\x88\x4b\x33\xf4\x27\xbf\xb6\x6f\x61\x1d\x71\x9f\x68\xf3\x1c\xa3\xf0\xc4\xfc\x36\x39\xdb\x54\x6c\x75\x4d\x21\x8b\xfb\xe6\x9d\xcc\xc6\x14\x01\xf4\xc5\x56\x79\x95\x68\xb9\x54\xf9\x30\x58\xcf\xee\xc8\xd5\x0d\xe2\xb7\xd2\x2c\x3f\xbc\x87\xb8\x15\xaf\xc7\xb0\xd7\xad\xa5\x60\x6e\x2d\x65\x06\xc5\xd3\xa1\x0a\x73\x67\xd2\x0a\xd3\xa9\x7a\x7c\xc8\xde\x6c\xb7\x24\xd8\xc3\xe3\xeb\xe6\x2c\x30\xb9\xc8\x34\x06\xa2\x5f\xed\xf8\x25\xdc\xef\xe8\x0b\xd4\x0b\xa6\xbc\x3f\x99\xf5\x9a\xd6\xfc\xaf\x83\x34\x49\x41\xb1\xe8\xad\x99\x34\xe3\x52\xd4\xab\x98\xd3\xd1\xf6\x0e\x2b\x39\x53\xef\x20\xf4\x3d\x8d\x67\x36\x7e\x57\xb1\xb7\x63\x7c\xa8\xb0\xa3\x2e\xed\x07\xc0\x43\x8d\x57\x7d\x62\x8f\x34\x18\xf1\x7d\x8b\x96\xc4\x33\x24\xd1\xe2\x94\x2a\xd7\x3e\x1e\xe6\x0e\xaa\xe6\x5d\x29\x75\x8a\xa9\x58\x02\xbb\xe6\x29\x98\x9b\x05\x2f\xbd\xbd\xf8\x0c\xff\xbe\xa0\x05\x9a\x37\xb7\x98\xe7\x3f\x6f\x2c\x24\x6d\x0b\x98\x8c\x2a\x30\x4e\x7a\x01\x71\x19\x79\x5e\x84\x00\x55\xf4\x53\x64\x0a\x8d\xb1\xe8\x1a\xae\x63\x3c\x49\x66\xe0\x1f\x53\xfb\xb7\xc8\x4b\x62\xf1\xc1\x45\x3a\x2c\xf2\xbe\x51\x0d\x3f\x5c\xd7\x0e\x7f\x78\xf9\x80\x97\x90\x2f\x24\x8d\x61\x4e\xd3\x8b\xea\x77\x26\x60\xc3\x88\x00\x24\x29\xf6\xb6\x03\xaf\xf2\xfd\x79\x53\xa0\x1f\x22\xb6\xf2\xeb\xaf\xff\xf9\xbf\xbf\xfe\x3f\x00\x00\xff\xff\x45\xf8\xc0\x95\x0f\x2e\x00\x00") + +func dataMalenamesJsonBytes() ([]byte, error) { + return bindataRead( + _dataMalenamesJson, + "data/MaleNames.json", + ) +} + +func dataMalenamesJson() (*asset, error) { + bytes, err := dataMalenamesJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/MaleNames.json", size: 11791, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +var _dataPasswordsJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x4c\xfd\xeb\x72\xf3\x3a\xd0\x2c\x8c\xdd\x0b\x7f\x24\x55\x7b\xbf\x3b\x65\xc9\xe7\xe4\x16\x72\x07\xf9\x91\x02\x49\x88\x84\x44\x12\x7c\x78\xd0\xe9\xbb\xf9\x6f\xa6\xbb\x21\x2f\xfb\x59\x65\x2d\xd9\x92\x48\x60\x30\xc7\x9e\x9e\xff\xab\xfa\xff\xa6\x75\xab\xfe\xdf\xff\xbf\x6a\x0e\xeb\x7a\xcb\x4b\x5b\xfd\x4f\x75\x38\xbe\x7f\x7c\x7e\xbd\x1e\x7c\xff\xe8\xa1\xfd\xf8\x77\x8b\xcb\xf6\x28\xbf\xb2\x9f\xed\x12\xba\x3c\xd9\x83\x79\x5f\x57\xff\x45\x1d\xd6\x58\x87\x61\xb0\x87\xa7\x9c\x37\x3d\x1c\xe2\x36\xc6\xe4\x7f\x37\xe6\xe9\x12\xfd\x0f\xbf\x7e\xfd\xdb\x1e\x84\xba\xb1\x77\xf3\x5f\xed\xeb\x16\xa6\xce\x1e\xad\x7d\x68\xf3\xcd\x9f\x0a\xeb\x16\x17\xff\x3c\x7c\xd9\x83\xe3\xdb\xdb\x9b\xfd\x38\xdb\xa5\x06\x7f\xbf\x75\x9f\xe3\x32\xe2\x61\x1f\x96\x21\x3e\xfe\xae\xdb\x2f\x61\x6f\x2e\x63\xf4\xdf\xed\x13\xdf\xc8\x9f\x79\xe4\xdd\x1e\x6d\x8b\x7d\xde\x94\xfd\x4d\x17\xfb\x58\xfc\xb6\xde\xf5\x79\x5b\xea\xf8\xcc\x9a\x9b\xe6\xf5\x42\xdc\xdf\xc6\x4f\xdb\xa2\x2d\xdc\xff\x60\xdd\xec\xc7\x25\x0d\x03\xfe\xac\xcf\x0d\xef\xaf\xf1\xcb\x49\xfe\xd9\x43\xbe\x46\x5c\xe9\xb4\xf6\x69\xf2\x87\xf6\x9a\x3e\x0f\x51\xab\xe0\xef\x12\xe7\x19\x2f\x0f\xf6\x69\x78\xc3\xb2\xf6\xfe\xdb\xaf\xcf\x8f\xf7\xe3\x01\xcb\xd1\x75\x78\x4b\x5b\xa8\xe5\x16\x16\xff\xc3\x35\x0d\x57\xbc\xb4\xb5\xa5\x0e\xfe\xcc\x23\xd8\x12\x47\xbd\x09\x97\xf6\x0b\x5f\x7e\x79\x71\x18\xb2\xfd\xcc\xb8\x65\xbf\x9f\xb4\x45\xac\xd0\x69\x89\xb1\xcd\xa3\x5f\x79\x1e\xe7\x9d\xcb\xb0\xc6\xbb\xdf\xcb\x66\xcb\xd7\xe2\x89\x2e\x69\xa5\xfa\x30\x8e\xfc\x93\x5d\x0f\x9a\xbc\x5c\xe3\xb6\x45\xad\x15\x6f\xc7\xd6\x13\xdb\xae\xdd\xb3\xbf\x1c\xf8\xff\x47\xff\xf6\x37\xcc\xc3\x89\x2f\xef\xed\x9a\xfd\xc5\xf3\x92\x26\x2d\x82\x3d\x37\xac\x31\xf8\xcd\xa5\x60\x92\xe3\xd2\xf9\xf0\x3b\xb8\xe1\xca\xbb\x36\x43\x5a\x62\xb3\xc4\x0d\xcb\xda\x9e\xba\xde\x9f\x99\xc3\x72\xc1\x26\xe4\x5b\x9d\xf1\x20\x8c\x61\xc9\x58\xc2\x6d\x49\x77\xbf\xc6\x30\x34\x90\xdc\xe4\xfb\x43\x91\xe8\xf6\x64\x0b\x0b\x69\x5e\x66\xec\xcf\xda\x98\x10\xe3\xfa\xe6\x3e\xc7\x09\xaf\x0c\xf8\xa2\x90\x44\xec\xc1\x9c\x97\xd5\xae\xd5\xdf\x3e\x69\xfb\xc7\x60\xbb\x92\x20\x32\xf6\x16\x17\xec\xda\x14\xd6\x86\x6f\x1f\xc3\xb4\xfb\x15\x1f\xde\xfd\x9b\xc7\x02\x2f\xeb\xf3\x32\xf9\xcf\xd5\xae\x77\xda\xfa\x00\x09\x9b\xb6\x84\xed\x34\xd9\x8c\x03\x3f\x71\x9d\x72\x9e\x71\xde\x72\xe6\xf2\xdf\xfa\xb0\x45\x4a\x42\x6a\x22\x65\x74\x1d\x33\xaf\xa6\xb3\xdf\xdd\xc2\x03\x52\x72\xc9\x5b\x78\x2d\x8d\xbf\x57\x0c\xdd\x10\xb9\xdc\x7e\xf5\x13\x96\x9b\xc2\x3e\x04\xfc\x7c\xde\x9b\x6b\xed\xcf\xdb\x56\x2d\x61\x49\x2e\xf0\x53\xea\xfa\x8d\xa7\xae\xb5\xad\x8f\x12\x9c\xf0\x0f\x0f\x4e\xa7\x18\x79\x75\xf6\x8f\x62\xd6\xf4\x38\x60\xc3\xc0\x5d\xbb\xe3\xcb\x2f\x3c\xda\x46\x40\x04\xe7\x21\x3c\x70\x03\x53\xd3\x1c\xbe\xdf\x5c\x60\x6e\xe9\x19\xa0\x94\x7c\x23\x6a\xbf\x81\xf3\x3e\xa5\x8c\xbb\xf4\x13\x3d\x61\xe3\x5d\x14\x74\xc5\x26\xd7\x2d\x44\x36\x4e\x53\x5a\x71\x0f\xf9\x76\xce\x35\x8e\xee\x64\xdf\x5c\x6c\x1d\xf3\x75\x4e\x94\xeb\x21\x5c\xb8\xb0\x4b\xa8\xed\x5a\x7d\x55\xa4\x30\x6c\x75\x9b\xd8\x62\x7d\x4e\x51\xa7\xe0\x61\xbb\xd3\x4b\x2c\x6b\x9c\xa7\x3a\xaf\x1b\xa4\x09\x32\x81\xfd\x5f\x78\xd6\x7d\x51\x4d\x47\xbe\xb4\xcc\x8a\xed\x98\xec\xb4\x9e\x70\x7f\xfa\x18\xbb\x85\x2d\x6d\xb8\xe0\xb0\x50\x16\x96\xe0\x17\x27\xf9\x9a\xf0\x9b\x96\xd2\xec\xf7\x84\x15\x33\x41\x9f\x20\xe8\xbe\x02\xa1\x6d\xa9\x78\xa2\xee\x6e\xb0\x13\x83\x8b\x1a\x53\x5b\xb6\xab\x1e\x76\x9c\xd0\xe4\xba\xc8\xf7\xe1\x0d\x5f\xd8\xc8\x69\x0a\x38\x3d\x65\x1b\x0e\x87\x97\xda\x35\x45\x60\xff\xfc\xa2\x62\xbb\xe6\x3b\x74\xc2\xfd\x70\x78\xff\xd1\xb1\xe3\x1d\x0f\x75\xc6\x21\x73\x91\xed\xf1\x16\x94\x1c\xd7\x2a\xa6\xad\xe2\x14\xdc\x18\xfc\x0b\xcf\xdb\x7a\xc7\x3e\xe8\xaf\xbe\xf9\xe5\xbb\x1b\x56\x2a\x42\x5b\x98\x89\x1a\x27\x0f\x2d\x64\xb2\xde\x37\xfb\xf3\xe0\xc2\x70\x4d\x17\x5e\x7c\xba\xd9\x27\xf9\xc9\xf5\xa5\x1d\xd6\x97\xf6\xe0\x89\x8f\x0b\xee\xbe\x4b\x0b\x7e\x35\x86\x36\xad\x78\xa6\xc7\xa1\xe6\x91\x0a\xcb\xb6\x44\x1c\xd3\x30\x6f\x81\x36\xca\x56\x12\x32\x6a\x57\x33\x72\x17\xfd\xc3\xb9\x4f\x39\x77\xe5\xba\xfc\xae\x17\xd7\xf7\x1b\xd6\x43\x87\x6e\x48\x76\x0a\x67\x48\xfd\xc9\xf6\x89\xc6\xc3\x0e\x08\xcf\xe6\xb8\x9f\x4e\xf8\x90\x6d\x5f\x36\xaa\x98\x3c\xf7\x50\x0f\xbe\xb4\x76\x5f\x7e\x59\x5b\x7e\xf0\x94\xae\xc9\x4f\x1c\xd7\x43\x12\xd6\x25\xbb\x67\xdc\x6a\x68\x24\xb4\x53\xbc\x3d\xf2\xc2\x9b\xd0\xfa\xd5\x7b\x5d\x07\xec\xe1\xf1\xf8\xfe\x5e\xd1\x0a\xe1\x8f\xc7\x6c\xb6\x90\x37\x6a\x47\x69\x8b\xbe\xa0\xed\x92\x78\x79\xa6\x70\xe7\xb1\xa8\x70\xae\xf1\x5c\xd4\xd7\xe0\xa6\x13\xcb\x73\xb7\x33\xeb\x3b\xf9\xf3\xf5\xfd\xf9\xfe\xe6\x26\xaa\x8e\x81\xc6\xc0\x5e\xb5\x51\xe1\x64\x7c\xda\xe7\xe1\xd3\x85\xeb\x88\x2f\xff\x8b\x3e\x95\x5d\xf4\x8f\x30\x45\x81\x75\xee\xf6\x40\xeb\xb9\x71\xe5\x75\x16\xba\x68\xeb\x9f\xca\xd5\xbf\xb4\x06\xf4\xc6\x4b\x66\x1a\x3b\xd5\x6d\xe0\x72\x2f\x09\x8f\x7e\xf4\x85\x0d\x32\x87\x64\x6f\x71\x92\x37\x33\x91\xa9\xf1\xfb\xcd\xcd\x06\x25\xb2\x2d\x79\xaf\xb9\x0d\x7b\x31\xbd\xeb\xb6\x9b\x5a\xc0\x2e\x8c\x4d\xf0\xbb\x31\x53\xbb\x50\xe9\x98\xe6\xf6\xf3\xe5\x12\x37\xcf\xd4\x9a\x7e\x94\xaa\xe2\x1a\xed\x09\x0b\xd4\xe1\x53\xec\x63\x71\x22\xdb\x3c\xd8\x16\x4f\x54\x2f\x69\xaa\x61\xbe\xba\x5d\x02\xfe\xfb\xf3\x6d\x06\xbe\x82\x05\xe6\xdf\x87\x21\xde\xa9\xbb\x16\xb3\x22\xd0\xcc\x19\xda\xed\x78\x80\xcd\x94\xe9\xb4\x27\x43\x93\x07\xbf\xdf\x7b\xb8\x26\x3a\x01\xfc\xa8\x8a\xfe\x09\x37\xb0\x36\x35\xf0\xf6\xf6\x8d\x15\x18\x6b\xfc\xd9\x35\xe7\x16\x12\xaa\x25\x34\x73\xc3\x93\x11\xe6\x4c\x07\xe1\x94\xa0\xd2\xb7\xa2\x4b\x6c\x83\xaf\xb8\xa6\x6b\x7e\x04\x9e\x00\xd7\x4a\xd0\x6b\xfb\xda\xeb\xca\xea\x48\xb3\x3f\xbf\xac\xe7\x32\x27\xff\x1b\x13\xec\x19\xf6\x6a\x7d\xb4\xd2\x6e\xb1\xa5\x6f\x32\xeb\x8c\xf8\x27\x50\x29\xd3\x06\x9f\x4d\xbc\xe9\x5c\x9d\x86\x07\x45\xd7\xcd\x89\xff\x3c\xbe\xfb\xb7\xab\x17\x7c\xfd\x7d\x12\x6c\x57\x96\x93\x34\xc4\x2e\xc2\x65\xc8\xfb\x0f\xae\x8e\x8e\x51\x05\xbb\xf6\xc4\x47\x2d\x65\x13\xea\xb4\xb4\x49\xfe\x90\x36\xf8\x13\x5f\x10\x83\xd9\x36\xab\xa2\xa2\x93\xb2\xeb\xfd\x62\x27\xa8\x22\x39\x6e\x53\x1a\xc3\x20\x49\xc7\xfb\x74\xd4\xd5\x1f\xf6\x55\x41\x2f\x5d\x03\x9c\xa3\x2e\xb7\x4f\xf3\x16\x03\x54\xc5\x29\xf6\x34\xb1\xb3\xa9\xdf\x0d\xee\xd7\xc2\xbd\x0e\x7b\xb7\xc3\xc3\xb4\xbd\x19\xe9\xcb\x64\x7f\x7f\x33\x92\xb6\xaf\xfb\x88\x15\xba\xe0\x92\x4d\xcd\x35\x19\x17\x75\x09\x5f\x37\x8a\x86\xfc\xc9\x66\x1f\xcd\xd5\xe4\x89\x5c\x4e\x29\x0e\x2d\x74\x89\x5c\xa8\xc6\x34\x81\x7f\xc2\x00\x67\x84\x1e\x39\xfc\x51\x3b\xc4\x5b\x7c\xb9\xd7\x55\xf1\xb7\xb1\xe6\x70\x17\x60\x13\x4c\x59\xc0\x24\xbb\xa6\x35\x23\x57\xc9\x62\x50\x94\x23\x04\xf5\x9d\x8e\xab\x2d\xe9\x15\x66\xca\xcc\xbf\x6b\x80\xd7\x55\xaf\x15\x7d\x1e\x88\xc4\xba\x2f\xf4\x05\xcd\x72\xaf\xd0\xa5\xbf\xf8\xc2\x22\x98\xbe\xc4\x1e\xa4\xa5\xf6\xbf\xf7\x0b\x19\x22\x56\x4d\x5a\xc5\x7f\xd9\x6c\x94\x01\xd3\xf4\x2d\x7c\x61\x9c\xca\x8a\x8e\x96\xbf\xe4\x3e\x9b\x8e\x90\xf4\x9a\x2a\xa0\x81\x5e\x92\xbc\xd4\x36\x36\xe5\x84\xdc\xa8\xde\xd7\x5b\x2c\xda\x5d\xfb\x92\xa7\x81\x26\xc1\x3d\x0e\x69\x32\xbb\x97\x7c\x19\x1e\xfe\x16\x8d\x3b\x7d\x78\xc9\x12\x1a\x29\xd0\x48\x4f\x44\x76\xd6\x9c\x38\xaa\x82\xd8\x9a\x52\xef\xfc\x61\xbb\x44\x3b\x80\x15\xdc\xc7\x3e\x75\x8c\x66\xa2\x6b\x69\x6a\x13\x6e\x37\xf4\x04\xd7\xb3\x2d\x51\x94\x39\xb3\xa6\xc3\xa1\x64\x52\x67\xce\xab\x4b\xc8\x3b\xbe\xfc\x5a\xcc\xb6\xd1\x26\x99\x63\x6b\x82\x5e\x02\x0b\x8b\xb8\xfc\xd6\x66\x73\xf8\xb9\x29\x27\x73\x86\xb3\xb6\xdd\x9f\x99\xd3\x38\x63\x41\xb0\xd7\xfe\xd3\xad\x0f\x03\xb8\x29\x2d\x57\xba\x51\xd7\x60\x7f\x05\x2d\x21\x8d\xec\xfe\x1b\x85\x7e\xde\xc7\xf9\x82\x3f\x37\x47\xf5\xa6\x20\xd0\x75\x09\x8f\xfc\x5a\xe2\x83\xd1\x14\x1d\x44\xd7\x0e\xff\x26\x75\x6a\x3b\x1a\x10\x4a\x36\x71\xd8\xa0\x47\x4d\x7a\x96\x85\x5e\xfc\xba\xe2\x94\xbe\xb4\xfb\x3a\xe9\xfc\xd9\x51\x8f\x6b\xa2\x38\x99\x52\x6f\xb9\xc0\x34\x26\xae\x4f\xcd\xe3\xa2\x8b\x3a\xb8\x5a\x70\x89\x2e\x9a\x0d\x91\xef\x1b\x9c\x4c\x3b\xb4\xd2\xb4\x16\x49\x5d\x63\x07\x11\x5a\x87\x34\x5f\x26\x9c\x21\xad\x6b\x6b\x17\xeb\x2a\xff\xf0\xef\x78\x7b\xc7\xf2\x34\xf6\x37\x08\x5e\xf8\xd4\x07\x54\xf1\x52\x22\x23\xf3\x37\x37\x79\x9e\xfb\x8a\x3b\x52\x00\x61\x1b\x64\x52\x38\xd1\x1d\x37\xc1\xb4\xa8\x17\x22\x64\xef\x9f\x10\x33\x22\x5c\x3b\x40\x69\x64\xca\xe5\xc9\x2d\x1c\x1d\x7f\xfb\x55\x73\xc1\x15\x36\x99\xfb\x66\x1b\xdb\xc6\x53\x45\xeb\xcf\xe5\xec\xc3\x2d\x24\x17\xa3\xd3\x60\xdb\xac\x10\x63\xc5\xaf\x2c\xaa\x1b\xb9\x78\x52\x74\x1e\xc3\x33\xd2\x68\x2d\x7e\x92\xbc\x30\xa8\x3a\xc3\xb9\xb6\x8b\xe0\xef\xdd\xb2\xc1\x01\xb6\x63\xda\x52\xc4\x26\x53\x58\xfd\xc6\x0f\x98\x60\x84\x5d\xc7\xe2\x09\xc5\x9e\xa6\x88\xf8\xff\x08\x76\x0e\xdc\x75\x44\x05\x6e\xdd\xe1\x75\x48\xbc\x57\x93\x3f\xc8\x9d\xf9\x1e\xbd\x5c\x8b\x8f\x0f\x69\x52\xf3\x82\x69\x16\x5a\x58\x38\x7f\x43\xfa\x83\xfe\xdc\x50\xe2\x05\xfb\x13\x5d\x97\x7b\xd8\x13\x16\xd6\x84\xcf\xf4\x81\x3f\xe7\x9f\x57\xff\x39\x14\x7e\xac\xcd\x21\xe0\x33\x83\xef\x2e\x8d\x42\x70\x53\x84\x94\xc8\x9b\x7f\x43\x81\x98\xb7\xc8\x60\x45\x09\x93\x83\x16\x0e\x37\x64\xf1\x92\xf9\x1b\x90\x04\x13\xc3\x31\xa4\x01\x07\xcd\xb4\x03\xf5\xa4\x59\x80\x1d\xd6\xc2\xe4\xeb\x12\x78\x33\x32\xa6\x61\x71\xf7\x14\x31\x64\x58\x1e\xe7\x30\xe9\xfe\x67\x2c\x98\xdd\xd5\x86\x1b\x6d\x63\xc6\xd1\xeb\x28\x36\x16\xc0\x6c\x32\xd5\x7e\x4a\x32\xed\x5b\x93\xf7\x8e\x8b\x76\x7c\xb3\x7f\x65\xd3\xfd\xfa\x2d\x0e\x18\x43\x55\x82\x8b\x0a\x3e\xc6\x13\x97\x59\x7b\xb8\x10\x25\x0d\xc8\xed\xec\xf8\xdf\x76\xc9\x37\x53\xc7\xb3\xf6\x69\x40\x86\xc5\x7c\xf8\x23\x9d\x78\x13\x0a\x6c\xaa\x9d\xc4\x97\x3b\x9a\x8a\x6f\x64\x36\x75\x8d\xfe\xe6\x8b\x44\xd5\xfd\xb9\xe8\xca\xec\x66\x9b\xb7\xa6\x96\x9e\xd7\x73\xbf\x24\x2d\x29\xf5\x77\x5f\x8e\xce\x10\x5a\x4a\x5c\x30\xbf\x26\x48\x75\x60\xfd\x0f\x1f\xdf\xf6\x0f\xca\x6a\xa1\x61\x5d\xa3\x8b\xf3\xc9\xdf\xe9\x6c\x1e\x1c\x4f\xb7\x29\x81\x05\xda\xd3\x4d\xd2\xbe\x52\xb7\xbb\x1f\x40\x4f\x23\x31\x7c\x6a\x77\x73\xf4\x78\x09\x0b\x3d\xaf\x50\x3f\x3c\x6e\xa8\xe0\x79\xb7\xb6\x02\x5c\xec\x25\x31\xe9\xe3\x51\x9b\x42\x49\x1e\xc7\x39\x14\xbf\xa1\x5e\xf6\x6d\xc7\x9f\x8f\xb6\x34\x2e\xde\x66\x00\x5c\x01\x73\x6b\x99\xa2\xf2\x80\x00\xd6\xdd\x0f\xbb\x5f\x1e\xf2\x21\x30\x9d\x26\x1a\x3d\xfe\xf4\x9e\xc6\xfd\x6f\x39\xb1\xfb\x5a\x9e\xd9\xf6\x79\xc4\xa3\xa6\x77\x25\x8c\x87\x2e\x06\x14\x79\xbb\xe0\xc4\xf8\x3f\x9f\x4a\x06\x6e\x36\x03\xcd\x40\xfa\x61\x7a\x8f\x69\x9e\x1a\xb7\x10\xa7\x33\x74\x36\xdc\xfb\x03\x77\x94\x0e\x14\x63\x58\xdc\x4a\x6c\xf6\xa5\xd8\x19\x1e\xe2\xd0\x8e\xb8\x25\x73\x26\x5a\x5b\x3c\x04\x24\x76\x6c\xa1\x3c\xa6\x46\x39\xa2\x85\xf1\x27\xf2\x41\xad\x14\x94\x52\x89\xd4\x55\x08\xe2\xcc\x13\xe0\x85\x9b\xd1\xf5\xac\xc9\xa4\xed\x52\x6e\xcd\xb4\xf2\x82\x57\xaf\xff\xf6\x84\x63\x70\xf6\xb4\x52\xc3\x53\x56\xd7\xb8\x42\x73\xcc\x97\xe1\x8c\xa5\xf4\x88\x7d\x5f\xe8\x78\xdb\x35\xfa\x79\x91\x17\xb1\x78\x02\xc6\x57\xc3\xdc\x05\x7e\xe2\x68\xa6\x7f\xa1\xa2\xab\xf1\x85\xb7\x32\x3f\x8a\x69\x8b\x29\xf0\xba\x4d\xdf\x0d\x0a\xa0\xcd\xeb\x68\xcc\xea\xc2\x82\x9a\xdf\x54\x82\xc8\x4e\x32\x68\xfe\x3d\xa3\x78\x17\x0c\x65\xd8\x3a\x7e\xd6\x33\x8f\x35\xb3\x77\xb7\x34\x8e\xf4\x0d\xda\x1d\x2e\x1c\x83\x87\x03\x05\x2f\x16\x9d\x3e\x04\xb8\xaf\xa6\x16\xe4\x32\xe7\x2c\x9b\x35\x96\x98\x0c\x2a\xe8\x44\xbb\x67\xce\x9c\x1c\x7e\xb3\x29\x97\xc3\x8f\x3b\x82\xc5\x4c\x9a\x7b\xb1\xf3\x77\x1e\x17\x22\xb3\xd2\x33\x69\xe3\x86\x73\xc1\xa2\xf8\x72\x9b\x4f\x0f\x47\x2a\xbc\x92\x5c\xa6\x54\x68\x85\x87\x54\x4b\x9f\x9b\x58\x3c\x33\xd4\x8f\x6f\x62\x85\xf8\x53\x2f\x84\x9f\xcf\x83\x67\xf6\x69\xe3\x01\x77\x11\xa4\x2d\xb2\x18\x00\xd6\x2c\x0c\xca\x18\xc1\xf7\x82\x3a\x08\x54\x14\xc7\x0f\xff\x86\x2c\x8c\x35\x82\xaf\xb6\xa4\x62\x9b\xc1\xf5\x03\x03\x2b\x64\x91\x98\x1a\xc9\x8c\xb5\x43\xd3\x30\x7d\x3d\xe7\xb9\x61\xbe\xc4\x5c\x46\x7a\x49\xb5\xb9\xf9\x34\x3c\x3b\xb4\xa0\x45\x43\x0c\x88\x2c\xcc\x34\xc7\x9d\x77\xc7\xa8\xdd\xaf\x01\xee\x18\x3e\xd2\x83\xf6\x4d\x07\xcd\x1e\x0f\x34\x4a\xeb\xa3\xe9\x21\x1d\x66\x09\xe8\x73\xcc\x66\xe1\x53\x80\xaf\x92\xa7\x09\x91\xa2\x59\x11\xe8\x29\xf3\x32\xa9\xb0\xcc\x15\xd7\xa6\xdb\x41\xd5\xd6\x32\xff\x34\x32\xcd\x67\x52\x95\x19\xf5\x66\x65\x69\xda\x78\x4d\x03\xd5\xa6\x3c\x2d\x58\xe3\x0a\x9e\x43\x9b\x98\x4e\x59\xfb\xd0\x75\xd0\x29\x4b\xe6\x83\x7f\x0c\xa8\x2e\x66\xfa\xd7\x00\xcd\x7a\xc9\xa6\x11\x18\x66\xcc\x90\xf1\xe2\x48\xda\x8d\x68\xbf\x7a\x13\x29\x25\x12\x1f\xf4\x76\x4c\x85\xd7\xd2\x23\x13\x23\x0d\x77\x5d\xab\x57\x7a\xce\x6f\xe0\x1f\xbe\x2a\xf8\xe5\xe6\xfe\x31\x57\x62\x9a\x82\x02\x1b\xae\xb6\xd1\x0c\x23\x3a\xe5\x07\x4c\x36\xb1\x1a\x52\x1b\x38\x94\x8c\xa4\x3c\x37\x7a\x0a\x0d\x3d\x7b\x33\xe3\x35\xbc\x68\xd3\x16\x8c\x8e\xb7\x65\xa7\x09\x71\xcf\x7c\x56\xd0\x14\x97\x2b\xfd\x7b\x53\x10\xf2\xc5\x15\x8c\x30\x5f\x66\x22\x04\x63\x63\xe1\xee\x83\x4b\x75\xa5\x26\x31\xaf\x54\x69\x7c\xcf\xd7\x71\x25\x12\xdf\x7f\x09\x33\x73\x03\x07\xc6\x42\x08\xa3\x97\xc0\xd7\x97\x6c\xf0\x69\x09\x4c\x33\xf9\xd9\x57\x72\xc9\x34\x36\xb6\xd4\x4e\x82\x3f\xf3\xfd\xf3\xcb\x32\x8a\x7b\x33\x16\x5d\xd3\x3d\x31\x25\xc2\x54\xab\x29\x3d\x08\x5e\x1f\xfa\xa0\x3c\xaf\xc5\xbb\x90\x60\x73\xfd\x5b\x26\xa9\xcd\x5e\x5d\xa9\xe8\x56\x4f\x6c\x21\xdb\xed\x1e\xff\x35\xc8\x1e\x61\x5b\x8e\x9f\xfe\x5d\x54\x0d\x34\x13\x8b\x15\xab\xbd\x44\x7e\xd4\x1c\x1f\xd8\xc1\xb0\x6d\x38\x2e\x9b\xb9\xc4\xab\x8e\x12\x23\x9c\xb0\x2f\x34\x5b\xe6\xb5\x64\x85\x40\xcc\x3e\x28\x9d\xe7\x0f\xcf\x61\xa6\x44\x98\xb0\xe2\xb3\xe1\xfa\x70\x23\x71\x98\xe1\x05\x79\xb2\x46\x99\xca\x92\xf0\x77\x85\x0d\x53\x6b\xeb\xd5\x47\xe6\xea\x16\xf7\x37\x83\xb6\xb4\x63\xe9\xc7\x83\x04\x95\x56\x46\x8f\xf9\x98\xca\x33\xdb\x8a\xa8\xfa\xbc\x73\x35\x3c\x4e\x0a\x71\xf0\x0f\x9b\x8a\x7a\x38\x7c\xfe\x7e\x7f\xba\x7b\x0e\x57\xe1\x00\x91\x31\xe5\x20\x4b\x74\xbb\x70\x01\xcc\xae\x04\x9a\xfc\xdc\x96\xec\xbd\x9d\x11\xaf\x3c\xe1\x23\x1b\x95\x68\xe6\xc7\x12\x46\x64\x81\x2c\x0e\x88\xc8\xc5\x5d\x42\xf1\xe2\x4d\x0f\x63\x31\x93\xfc\x88\xc6\xd3\x75\xfe\x4c\x9a\x4e\x25\x1e\x9b\xe3\xbc\x22\x73\xc5\xd2\x17\x75\x3b\x6e\xb1\x77\xfb\x44\xff\xc8\x0c\x56\xc4\x92\xf9\xea\x60\xa7\xf2\x6e\x6a\x1f\xe5\x0d\x73\xbb\x26\x9a\xbf\x61\xb0\xb0\x0a\x96\xa9\xfc\x15\x42\x29\xed\x33\x6f\xc1\x14\x45\x2a\x1e\xc1\xd4\xd1\x45\x6d\x42\x5c\x59\x6f\x30\x85\x23\x67\xce\x1c\x7b\xb9\x5d\x75\xbe\x0d\x14\xa9\xdb\x5e\xea\x6e\xfb\xb4\xb2\x92\x32\x04\x3b\x39\xc8\x70\x97\xd2\xd8\xd3\x4b\x54\xdc\xcb\x19\x5f\x15\x8a\x71\x70\x7f\x65\xcb\xfc\xbd\x99\xbb\xb1\xd3\x60\x7a\x30\x50\x17\x31\xe6\x33\xd9\xcc\x4c\x88\x87\x95\x46\x59\x15\x32\xd4\x85\xf0\x55\x31\xa9\xdf\x31\xe1\xe9\xe9\xf2\x41\xe9\xa4\xc0\xea\xca\xe6\xd1\x78\xd1\x70\x66\x2f\x91\x68\xc9\x75\x38\xc5\x0d\x22\x6c\x2f\x59\xe9\x4d\x31\x3d\xb4\x9a\x12\x56\x92\x15\x5f\xbe\xbe\xe6\x8c\xed\x33\x85\xba\xcd\xf8\x24\x88\xf0\x5a\x54\x0c\x2b\x28\xe6\x41\x21\x67\xbf\x9a\x2a\x80\x4f\x11\xcc\xfc\x79\xa2\xd9\xae\x80\xb1\xe5\x35\x45\x6c\x93\x39\x8a\x6d\x86\x8b\xd5\x4a\xfd\xb6\x69\xa8\xa9\x95\x66\x0f\x21\x71\xa8\xce\x52\xae\x26\xca\xca\x17\x9e\x86\x62\x29\x4f\x25\x69\xda\x46\x93\xd8\xc4\xa3\x64\xd2\x3a\x49\x9b\xee\x4c\x1d\xa2\x98\x53\xc9\x2b\x43\x8a\x2e\x6e\x7c\xab\xfb\x29\x0d\x2a\x10\x95\xf2\xdc\x9c\x9e\x4f\xc6\x17\x35\xb5\x54\x0c\xc3\x46\xb7\x72\xda\x37\xbf\x83\x6e\x37\x07\x05\x7e\xa0\xb9\x3f\xd8\x2a\x53\xd5\x5d\x4f\xcb\x69\x02\x63\x92\x83\xcc\xdd\x38\xd5\xd7\xe6\xee\x39\xb6\x5b\x92\x17\xa7\x42\xad\x6f\x9c\x69\x27\x5b\x21\xbf\x6b\xcf\x00\x2a\x0b\x18\xcc\xb9\x67\x45\xc7\x23\x29\x8a\xc1\xe1\x76\x8c\xef\x8b\x2b\xf2\xde\x42\x5e\xfa\xcc\x97\xa9\xa8\xdc\xd8\x4e\x11\x1a\xcb\x9d\x43\x39\x88\x5d\x7a\x8e\xd8\x02\x3b\x2c\x08\x3a\x58\x6b\x46\xfa\xc3\xed\xa2\x12\xc5\x4b\xb2\x28\x16\xa5\x25\xb3\x99\x9e\xc6\xab\xa0\xa7\x37\xa6\xec\xcc\x97\xbb\x21\x24\x42\x81\x80\x25\x48\x66\x81\x43\x25\xa5\x0d\x8f\x79\x9c\x03\x33\x72\xb6\xea\x38\x46\x76\xfd\x1d\x3c\x1c\x64\x85\x0f\x7e\x41\xa8\x08\xcd\xa8\x7e\x99\x60\x2d\x88\x5c\xd7\x51\x29\x13\xb3\x6b\x4f\x85\x9e\xbb\xdd\x96\x1c\x10\xf9\x8c\x6e\xb5\x75\x57\x76\xbd\xd2\x57\x90\xb6\x0a\x41\xf0\xbe\xb1\xca\x6a\x9a\x97\x7e\xc1\xab\xfa\xfb\x06\x99\x69\x3d\x5f\xd8\xf0\xec\x50\x9e\x4d\xea\xa9\x74\x91\x63\xa6\x2a\xa0\x8f\xe3\x55\x02\x46\x4c\x2b\x8d\xb7\x97\x2e\x2a\x2a\x65\x56\x39\x3d\xa9\xa0\x98\xd1\xd6\x89\x65\x1d\x86\x0c\x2a\x37\x3e\x4b\xf5\x28\x2a\x0b\x66\x97\x6a\xa1\x15\x04\xd3\xd3\x14\x2c\xab\xbd\x4a\xf3\xc7\x57\x31\xd7\xdf\xb5\xc4\x1b\x4b\x6c\x52\xa6\xec\xef\xb2\x42\xc1\x3d\x5a\xa8\x52\x2f\xea\xe0\xdd\x16\xfa\x91\x73\x1e\x14\x78\x6d\x49\x81\xb9\x49\x0e\xf5\x85\xbf\xa1\x2a\xc4\xe5\xc4\xac\x97\x47\x49\xc2\x41\x26\x56\x9e\x3e\x5a\x58\x1d\x61\x8b\xb9\xec\x4f\xe8\x08\xe5\x92\x87\x6b\x18\xe9\x5a\xf0\x1d\x37\x94\x47\x90\x7b\xc4\x3a\x86\x85\xce\xf9\x2d\x6e\x05\xcb\xc0\x45\x91\x80\x52\x50\x4e\xd9\xd4\x0d\x77\xcf\x5c\x09\x7f\xb3\x31\x2f\x73\x1f\x71\xa3\x1e\x06\xd0\x69\xf9\xf0\x6f\xad\xef\xb6\xa3\xc6\x31\xdb\x63\xa4\x3b\x3d\x14\xc6\x8e\xf9\xb9\xff\xff\xdb\xd9\xa6\x3a\x76\x25\xc2\x77\xde\xcd\x76\x42\xc8\xd7\xd4\xb1\x5a\xb5\x6d\xb8\xb4\xe3\x9b\x7f\x57\x25\x54\xf7\x37\xb3\xc3\xf2\x03\xb1\xb5\x13\x09\x49\xbb\x06\x6a\xe0\x2e\x5b\x88\x99\xe8\x5f\x4d\xac\xeb\x87\xba\xf3\x0a\x85\xef\x08\xbe\x20\x4f\x0a\xd5\x78\x3e\x29\xd0\x8c\x50\xe3\x7d\x56\x60\x74\x4d\x4b\x07\xbd\xff\x2c\xeb\x7f\x33\x0d\x74\x61\x12\xd8\xf5\x1f\x7d\x3c\xf3\x7c\xd7\xfd\x95\xf0\xba\x30\xc3\x63\x0e\x4c\x94\x41\x34\xcd\xbc\x2f\xc8\x68\xfe\xfe\xbe\xf2\xba\x48\x93\x33\xa9\x6f\x7b\x12\x37\x25\x9b\xa3\xfd\xab\x0a\xfc\xc3\xdf\xa7\x44\x23\xe6\xaf\x7a\xc9\xe3\xe5\x6e\x6c\xf0\x1f\xec\xaf\x3c\x13\x9c\xb0\x07\xd8\x6b\x3f\x40\x72\x83\x7f\x95\x37\xc5\xd5\xdc\xf7\x80\x4d\xf3\xe0\xda\x5f\xf9\xfd\x57\x7a\xac\x5d\xf9\xc3\x47\xe8\xed\xd8\x16\x07\xdd\x13\x16\x84\x6a\x98\xe3\x30\x97\xb4\x50\xc9\x2d\xed\x1d\x37\xdc\x62\xe7\xc4\xd4\xb5\xf9\xc1\xb6\xf0\x8d\xca\xfd\xb8\xf1\x9b\x99\xb9\x52\xbf\x51\xf6\xb6\xed\x5e\xd1\x0f\x51\x33\xfb\x22\xb8\xc9\x14\x58\x28\x37\x9b\xea\xaf\x90\x57\xe4\x15\xc9\x44\x1c\xcb\xaa\x24\xcb\x9c\x3a\x3a\x6e\x10\xac\xf2\xae\x4c\x25\x79\xd6\x84\xbe\x80\x45\x7b\x8f\xbf\x7b\x63\x10\x36\xf2\x75\x0a\xc0\x7b\xee\x82\x83\x75\xa8\x45\xa8\x9b\xed\x06\x59\x11\xf2\xa3\x2a\x93\x65\x3e\x3c\x4b\xb1\xf6\xba\xa2\x65\xcc\x1f\x2b\x29\x0c\x48\x65\x52\x59\x7a\xa1\x43\xe5\x36\x5e\x77\xe6\x96\x94\x05\x7c\xf3\xf7\x99\x21\xb1\x38\x30\x00\x60\x12\xbd\x7e\xe3\x9f\x8f\x7a\x71\x85\x54\xb2\xce\xa5\x69\x75\x66\x24\xaf\x09\xb9\x36\x05\x1f\x48\x01\xfd\x22\xaf\xe8\x25\x46\x95\x19\x1b\x8f\x38\x58\x09\xeb\xcc\xf9\xa7\xfc\x17\xa9\xcd\xa9\x51\x21\x68\x72\x91\x85\x8e\xb4\x3b\xba\x06\x54\x4e\x6f\x7d\x5a\x15\x2d\x2a\x6d\xef\x1a\x8e\xd9\x5a\x06\x5a\x12\x75\x77\xb4\x87\xcc\x4c\xd2\x62\xf1\x0a\x21\x1d\x76\x8e\x19\xfd\x08\x7d\x43\x53\x33\xd2\x11\xeb\x7d\x31\x07\xfa\x3c\x29\x9e\x90\xe6\x36\x95\x72\x62\x5a\x03\x2e\x69\xc5\xf8\x0d\xe7\x64\x18\xcb\x81\x72\x60\x4b\xd4\xb2\x54\x88\x5f\xc6\x52\x1e\x82\x2e\x4d\x0c\x88\x96\x56\xa9\x92\x25\x8f\x23\xb4\x92\xd2\x95\x76\x84\xdb\xc0\x68\x66\xf4\xe0\xf9\x2a\x77\x9a\x7e\xfe\x38\xe2\x8c\x7a\x0e\x44\x5b\xc4\x3a\x69\x58\xcc\x6e\xe3\x12\x16\x93\x1f\xd6\x15\xb9\x26\x66\x08\xa6\x8b\x72\xaf\xb4\x20\x9e\xdb\xb6\xeb\x1f\x2a\x66\x65\x58\x3b\x2e\xa6\x21\x2d\x14\x9c\x03\x0f\xfb\x39\x7a\xfc\x0e\xd7\xdc\x7c\xf4\x52\x45\x01\xb0\xa7\xd4\x47\xfc\xa2\xd7\x5b\xf1\x62\xec\x7e\x19\x3b\xb8\xc5\x66\xe6\x2b\x6d\x1e\xb1\x54\x2f\xbf\xfa\x88\x1b\xf8\x0f\x6a\x0c\xb1\xdc\x90\xec\xa6\xb1\x38\x84\x22\xd1\xd7\x2e\xe5\x66\xc8\xd4\xe5\x51\xc4\x9d\x81\x9c\xe9\x29\xb3\xf8\x2d\xad\xa2\x39\x1c\x37\x84\x49\x87\x4f\xff\xae\x18\xa2\x08\x54\x63\x07\x82\x22\x26\xf7\xb9\xcb\x6d\x4b\x4d\x69\x7f\xa0\xda\xe8\x5a\x3c\x55\x64\x18\x54\xd7\xf0\x3f\x3e\x15\x88\xcd\x64\xf7\x4f\x4b\x8a\x5c\x7d\x55\x92\xfd\x70\x5c\x93\x85\x0a\xf1\x59\xde\x67\x8e\x54\x72\xb9\x94\xe4\x51\xfe\xf4\x3b\xb7\xbb\x9c\x54\x34\xc8\x9e\xe6\xc6\xb9\x76\x93\x48\xf7\x63\xc9\x67\x9d\x42\x57\x95\x48\x25\x23\x11\x2b\x13\xe1\xf8\x88\x34\x12\xb0\xa1\xc4\x58\xb3\xa4\xb1\x9c\xfb\x4c\xab\x7d\x32\x8d\x14\x8a\x07\x21\x2c\x9b\x17\x0e\xe5\x70\x8e\x28\xeb\x9e\xca\x96\xdd\x4a\x70\x87\xc2\x28\x73\xe2\x2c\xae\x75\x4b\x8c\x52\x2e\xbf\xff\x29\x65\x41\x7b\xcc\x54\x6e\xc9\x74\x99\x5f\xa4\x69\xc1\x97\x6f\x6c\x42\x0e\xa1\x6a\xe9\x2d\xd9\x3d\x8d\x35\x1d\x83\xd0\xee\x03\x7d\x12\x4f\x86\x61\xfd\xf7\x81\x49\x6d\xcf\x2e\xf7\x50\x4a\xf8\xf4\x0c\xe1\x8b\xb8\x7f\xcf\x36\xb0\xe6\x66\x0e\x1b\xc3\xcc\x56\xa5\x0e\xcf\x84\xc1\x83\xbf\x28\x2d\xfc\x6e\x66\xfc\x13\x39\x80\x81\xf0\x22\xa0\x3d\x4a\xc5\x44\x69\x72\x13\xdf\x4e\x01\x21\x4a\x6a\xfe\xde\xf8\xe2\x51\xa3\x9b\x6a\x0e\x3d\x65\xa5\xcf\x88\x6d\xbb\xec\xdf\xd8\x41\xaf\xb4\x22\x01\xe0\x29\x56\x7a\x4c\x44\x9b\x9c\x06\x1e\x4e\x25\x7d\x6d\x2b\xf7\xb0\x28\x8d\x01\x95\xb5\xc5\xa6\xc7\xbd\x0d\xc5\x8c\x0f\x61\xa0\x1f\xeb\xd9\x8c\xde\xed\xa1\x1d\x03\xe2\x92\x96\x74\x4f\xcc\x29\x0d\x70\xc1\xcc\x83\x64\x50\x59\x67\x79\x5c\x17\x99\xce\xdf\x2f\xa4\x0c\xe7\x9d\x8b\x30\xa4\x7f\x3b\x02\xe1\xba\x54\x03\x2d\x40\xa3\xaf\x69\x2a\xfa\x46\xdc\xc9\x05\x5f\x78\xf5\xb7\xa2\x3d\xe6\x90\x6b\x93\x5f\x85\xac\x1e\xc4\x2d\x14\x5a\x13\x56\xe4\xec\xa6\xc0\x2a\x61\x67\x4e\xc2\x5e\x80\x0e\x54\xf7\xb8\x96\x4b\x28\xb9\x7d\xcf\x40\x31\x31\x3f\x75\x97\x7c\xa9\x5e\x48\xce\x23\xc4\xd7\xcc\x2f\x81\x1d\xe1\xb1\xd1\xa5\xb4\x50\xdc\x14\x24\xd7\xeb\xf2\xb8\x85\xe1\xa2\xfc\xac\x4e\xad\xb9\x22\xe6\x4b\xe3\xd3\xaf\xc1\x36\x60\x88\x2c\x3c\x04\xa5\x50\x4d\x5b\x24\x66\x4c\x96\xe7\x2b\xe5\xd1\x4a\x2d\xc5\x62\xd1\x3b\xba\x96\xa6\x10\xb6\x86\xe1\x99\xc9\x64\x1a\x75\xd1\x3d\xdc\x1d\x4f\xdd\x6c\xca\x17\xa0\x1c\xb9\x6c\x4c\x7a\x36\xfc\xbd\xfd\xa0\xbf\x77\x85\x77\x3a\x3f\xa4\x7c\x4d\x43\x9c\xfd\x4e\x1e\xa6\x60\x6f\x9e\x63\x43\x31\xfe\x84\x58\xe8\x85\x0d\xdd\xe8\xe4\x9f\xf9\xd6\xa6\x61\xf7\xb5\x14\xe0\xcd\xeb\x83\xb7\xb1\xbc\xd0\xa5\xe6\x3d\x21\x00\xc8\xc2\xb8\xd9\x8d\xe0\x83\x2c\x22\x1b\xf1\xc4\xf8\xd0\x01\xaf\x93\x85\x5d\x78\xea\x11\x7a\x68\x42\xbb\x99\x95\x90\xa4\x78\xff\x20\xf8\x6c\x54\x50\x3b\xc7\x52\x6d\x9c\x53\x83\x1f\x7d\x62\xc8\xb6\x71\x79\xec\xc6\xa9\x03\x94\x52\xc4\x4e\x87\x86\x4e\x80\xa7\xd2\x2e\xc8\x81\xe0\x0b\x97\x89\xd8\xcd\x64\x00\x2e\x9d\xd9\xd0\x25\x0c\x8a\xeb\x88\x7b\x63\xcd\x87\xfe\x7e\x28\xa0\x0f\xfb\x71\x4f\x58\xca\x3b\x9f\xdc\xc2\x53\x49\xbe\x52\xf0\x31\x0f\xbc\x4f\x35\x23\x10\xf3\xb3\x58\x08\xa3\xa4\xf1\x7c\x9e\x03\xe0\x24\x2e\xbe\xbb\xdf\xc0\x63\xe7\x1b\xe4\x91\xe0\x0c\xdf\x2b\x5c\x74\xb6\x63\x89\xc2\x96\x79\x00\x8b\x47\x50\xd0\x63\x2c\xcc\xc2\x7d\x47\xa9\x57\x65\x0d\x0b\x16\x97\x70\xa2\x3b\x43\x13\xb3\x16\x41\x5b\x79\xaf\xd2\x2f\xe7\xa4\xaa\x84\x97\x11\x33\x5e\xb1\x70\x09\x4f\xd2\x1f\x71\x53\x68\xfd\xcf\x3c\x59\x3a\xd6\x76\xe4\xe0\xbf\xf2\x2f\x6e\xf8\xf2\x4b\xcf\xfe\x6d\x0f\x9e\xbd\x3e\x8a\x7f\xe0\x55\x19\xfa\xfc\xe1\x79\x5f\xfd\x2f\xff\xf1\x53\x1b\x7e\x52\xd3\xf3\x9e\x5b\xfe\xf9\xa4\x03\x91\x63\xd2\x42\xc5\xf9\xa5\x63\x23\xec\x8c\x69\x0c\x56\xf5\x2d\x34\xda\x25\x56\xd8\xf6\x7e\xef\x1c\xe2\x82\xd3\xcf\xe5\x4d\xfc\x8c\x91\xfb\x34\x16\xef\x76\x62\xf2\x9c\x05\x68\x3f\x22\x87\xfa\xd8\x00\xe0\x43\x89\x7d\x96\xe5\xba\xcb\xd4\xbb\xfb\xd9\x64\x56\x57\x1f\x82\x70\xa6\xa0\xcf\xd0\xbb\xc6\xd1\xe4\x10\xa2\x16\xf5\x31\x5d\x6a\x98\x9f\xec\x92\xeb\xce\x72\xb3\x5c\x1c\xf3\x87\x74\x68\x61\x11\xb3\xb0\x82\x9b\x02\xfc\x0b\x57\xc5\x7e\x24\xde\x00\x71\x15\x42\x9e\x8f\xf2\x78\x3a\xfe\xda\x71\xb2\x70\x5f\xbb\xa8\x32\x0f\xfe\x6e\x2e\xf7\x9b\xe8\x47\xff\xe3\xd3\x66\x56\xaf\xf8\xeb\x89\xab\x33\x69\xe5\x4b\x91\xc7\xcd\x41\x56\x2e\x90\x21\xbd\xad\x02\x51\xb5\x81\x1b\xcc\xd7\x3d\xf9\xba\x27\x45\xea\xc1\xa5\x7a\xf2\x77\x77\x7e\xd4\x93\x57\xf0\xf6\xff\xb2\x6f\x3a\x5b\x7c\xde\xb6\x2c\x51\x1a\xf9\xc3\x53\x79\x7c\x50\xa4\x14\x61\x3a\x3f\x60\xe6\x55\xcc\xfc\xf4\x41\x7b\x19\xcb\xce\x8e\xfe\x16\x8b\xe4\x55\xcb\xdb\xf0\xc7\x22\xd1\xd2\x2b\xf9\x7f\xff\x52\x91\x3b\x6a\xc3\x1d\xfe\xcc\xae\x08\xab\x14\xf1\x99\x39\xe5\x0f\x6d\x85\x67\xfb\x68\xf6\xd7\x66\x60\xec\x4e\x0b\xa6\xda\x7c\x45\x57\x6f\xcc\xd8\xc8\x27\xdf\xff\xc9\x5b\xb9\x4b\x4a\x9e\x7d\x7c\xfd\xe4\x0f\xfe\x51\x28\x27\x07\x27\x69\x64\x32\xf4\x49\x85\xfd\xe4\x25\x3e\xa5\x1b\x98\xd0\xc6\x2a\x65\x8a\x3f\x7e\xbd\xe9\x26\xf9\xfe\x97\xfd\x25\x9d\xf0\x2b\xdf\x3f\xbf\x11\xff\x22\x15\xc1\x33\x98\xf1\x83\xdb\xf5\x4f\x97\xe7\xf2\x99\xb0\x00\x5a\x07\xfc\xd5\xa4\xdf\xea\x97\xc7\xe3\x0b\x5e\xf9\xd4\xda\x3c\xf5\xab\xb5\x2f\xfb\xab\xff\x77\x40\x03\x71\x65\xe7\x82\xfe\xf8\xfa\x7a\xb5\x07\x20\x25\x9b\xa1\x84\x91\x62\x85\x2e\x54\xa4\x77\x33\x8d\x6b\xbf\xf4\xcf\x3f\xbe\xbd\x01\x87\x08\x6c\x29\x40\x19\xe6\x13\x4b\xd5\x3a\xc2\x11\xfe\x9f\xbd\x9c\x39\xd4\xe3\xc7\xd7\xcf\x81\x69\x0b\x33\xaa\xc8\x0a\x98\x39\x26\x02\x0d\xe9\x06\xd5\x50\xcd\x39\x24\x74\x83\x70\x1d\xa1\x22\x16\xc4\xa5\xaa\xdc\x9b\xaa\x43\x45\x65\x5f\x4e\x0a\xd2\x6c\xbd\xfe\x83\x39\x44\x68\xb1\xb6\xf6\xcf\x1f\xec\x35\x31\x63\x8e\x6c\x36\xad\x8f\x9e\x14\x77\x3d\x92\xc2\xe4\x89\xbb\x2a\x00\x3a\xd2\x99\x6a\x60\x70\xb1\x11\x5c\x35\x8e\x01\xb2\x75\xf0\xbb\x45\xc2\xd9\x9b\x30\x70\x8b\xdd\x92\x33\x60\x6d\xdd\x90\xf9\xf6\x87\x1f\xff\xe6\x85\xfe\x56\x52\xf7\x2c\xd5\x98\x09\x52\x75\xf6\xa1\x8e\x0b\x7b\x91\xb4\x46\x1b\x6e\x34\x3e\xf6\x57\x6c\x82\xb0\xd7\xbf\xe1\xc7\xd7\x47\xc5\xa2\x68\x85\x2c\x19\x5d\xfd\x86\xfe\x42\x6c\x23\x8b\x21\x8c\x5b\xed\x35\xd8\x95\x6f\xff\xc6\x03\x64\xe7\xdd\x6f\x71\xcf\xf7\x6c\xe1\x06\x9b\x01\xdc\x11\x87\xeb\x1d\x66\x41\xc6\xe6\x02\x6f\xbe\x06\xb9\x38\xd1\xdc\x64\x58\x15\x3b\x64\x7d\xa4\xd7\xe7\xf0\x77\x24\xba\x95\xb6\xed\x53\x4b\x3d\x86\x16\x8a\x1d\x97\xf0\xf3\x06\x99\x6f\x9f\xf0\xb6\x0f\x1f\xdf\xc7\x4f\x5f\x8f\x53\x59\xc6\x7a\xef\x4a\xab\xcd\x49\x69\xfb\x31\x0f\x54\xb5\xca\x6e\xdd\x5e\xc5\x6d\xaf\x79\x6d\xbc\xf4\xc4\x92\xeb\xc1\xfe\xe4\xdd\xff\x88\x60\x7a\x56\x79\x62\x8d\xb3\x6e\xab\x31\x6f\xfd\x92\x99\x17\x35\xb9\x50\x45\x70\xf4\x70\x8e\x81\xee\xc2\x92\xfc\x98\x5b\x06\x29\x76\xbd\x1f\x92\x50\x9e\xdf\x9b\xa2\x0d\x40\x0d\x16\xdc\xad\x29\xbd\x2d\x01\x42\xe2\x18\x4f\xac\x84\x42\xea\x39\xdf\xb8\x5a\xdb\xad\x84\xd0\x5e\xfe\x8d\x0d\x74\xb9\x5d\xcc\x4e\x9c\x22\x32\x91\x15\x4a\xc2\x02\x2a\x2a\x83\xce\xe8\x3d\x2b\x81\xdb\x67\xe6\xac\x57\x7b\xa5\xc3\x93\x2a\xe2\x9f\xa0\x34\xc6\x5a\x48\x5b\x25\x75\xd7\x50\xf3\x3d\xaf\xf9\x41\x48\x91\x4a\x80\xef\x5f\xfe\x5d\x11\xff\xee\x97\xdf\xa1\x3d\x0a\xfb\x9e\x08\x96\x54\xc5\x76\xda\x5b\xba\xa1\x0a\xa7\xd6\xc1\xd6\x8f\xad\x13\x76\xf3\x88\x22\x4e\x9e\xbf\x66\xee\xa4\x76\xa8\xbd\x5f\x79\xb8\x78\xe2\x33\x41\x56\x1c\x34\x93\xe9\xe1\x0c\x6a\x68\xf9\xfd\xf6\x05\x35\xb1\x7f\xd8\x4e\xe3\x32\x26\x39\xe4\x4d\xe3\x95\x2c\x5c\x3b\x73\x43\xde\x7f\x85\x3b\x97\xba\x86\x48\xe6\x61\x1f\x6b\x24\xf5\x2e\x16\xf9\x8c\xdc\x2d\x8f\x18\x06\xa2\xe0\x07\x33\x37\x70\x6b\xed\x68\xb8\x7c\x16\xfc\xef\x25\x28\x87\x31\xee\xaa\x1d\x6e\x7d\x44\x32\xe0\xea\xf9\xe3\x02\xa0\x45\x49\xa3\x0d\x45\x06\xb3\xd0\xbf\x4c\x29\x78\x32\xf8\x05\xfc\x67\xa5\x41\xd0\x1d\xe6\x80\xcd\x25\xe6\x41\x48\x26\xba\xfe\x8e\x3f\xe7\x8f\x47\x7c\xdf\x9f\xb8\x4f\xf3\x74\x18\x76\x03\x93\xe3\xaf\x28\x90\x70\xc4\xb4\x5e\x16\x56\x52\xd1\x6e\x72\x43\xf7\xdc\x1f\x98\xfd\xee\x18\x76\x74\xa5\x05\xd3\x45\xc2\x06\x32\xe5\xde\xe1\x0b\xeb\x76\x62\x4e\xc1\x13\x7c\xc8\x59\xf8\xe9\xc5\x5a\x99\xdf\xbd\xed\x4a\x0b\x3a\x48\x12\xf6\xc7\xbd\x02\xa4\xa5\x1d\x17\xb5\xaa\x0e\xc1\xfc\x82\xd9\xb1\x85\x30\xa4\xfb\x46\x05\x6b\xa2\x4d\xd4\x64\x9f\x37\x96\x6e\xdf\x3f\xfc\xbb\x7a\xb5\x81\x54\x7f\x58\x40\xc8\x95\x62\x5e\xaf\xf5\x0e\x44\x5e\xac\xdb\xab\x91\xee\xe3\xd3\xbf\x2b\xba\x53\xaa\x00\xe7\xac\x8c\xc7\x9a\x9b\xe2\xbe\x17\x00\x04\x6b\x35\x5e\x9b\xc2\xb1\x7c\x7f\x35\x46\xb9\x21\xe0\x3e\x9e\xd2\xb0\xf5\x7e\x61\xb9\x7f\x44\x34\xaa\x84\x93\x60\xc7\xc9\x73\x50\xc4\xec\xb3\x44\xca\xfa\xc7\x2a\xf8\xf8\xa2\x32\xcc\x9c\xb7\x80\xb8\xb3\xc7\x17\x64\x6c\xea\x98\x1c\xbf\xc5\xc8\xb2\x07\xf0\x27\x08\x2a\xbd\x80\x0b\x05\x53\x74\xca\x50\x20\x12\xf3\x4e\x30\xbb\x39\x66\xac\x02\x79\x96\x06\x9e\x08\x52\x48\x28\xb4\x79\xd2\x90\xb5\x95\xe9\x31\xf2\xb7\xed\x2b\x2f\xe0\x69\x6f\x81\x62\x72\xb8\x48\x2c\x32\xbe\x28\x37\xb4\x60\x66\xfd\x59\x34\x6f\x8a\x1d\x54\x3e\x95\xd8\xba\xb5\x51\x57\x44\x5b\x34\x66\xe3\x1d\x3f\x8b\x44\xb9\x94\x85\x16\xc2\x0a\xd7\xd2\x51\x73\xce\xe7\xac\x6b\x55\xda\x70\xf4\x90\x14\xb1\x96\xfd\xbf\x1a\x1d\x4f\xf8\xa2\xae\xb2\x05\xc8\x04\x9c\x9b\xeb\x8a\x15\xff\xcb\xdf\xbc\xba\x30\xf0\x55\xa9\x6e\x87\x9c\x94\x89\x83\xda\x2c\xc6\x87\x3e\xc9\x65\x52\xe5\x18\x53\x15\xc0\x2c\x98\xa9\x55\x23\x80\xa7\x3a\x14\x38\xd8\x19\x40\x1c\xeb\xf0\x3d\x22\xb0\x54\xb3\xf9\xd3\x20\xf4\x58\x5a\x47\x94\xc0\x27\x76\x3c\xec\x5d\x7b\xfa\xf7\x11\xe8\x19\x95\x73\x74\xfc\xf6\xef\x0a\x21\x2c\xa0\x8b\xc4\x7b\x75\x9e\x09\x1a\x1e\x52\x44\xc4\x1c\xc5\x93\x94\xa5\x57\xc9\x99\x1d\xf9\xc3\x37\x8e\x79\x4a\xbc\x2f\xd3\xfb\x14\x9f\x75\x08\x14\x09\xbb\x74\x82\x48\xb7\x95\x4d\x5f\x8e\x2a\x45\xad\x1e\x05\x7d\xec\xd2\x1b\xf2\x5f\x15\x01\xd3\x32\x38\x40\x86\x4c\xb8\x13\x93\xd1\x11\xc9\x8c\x2e\x5f\x5c\x25\x0c\xf8\x82\x5c\x84\xf5\x89\x58\x33\x5b\xec\x4d\x94\xd9\x0a\x5f\x80\xdd\x78\x78\xc6\x44\x67\xd3\x05\x09\xf9\xff\xd7\xd9\x6b\xde\x0f\x4b\x12\xad\x29\xd9\xb2\xf5\x5e\x76\x2a\x49\xab\x0a\x30\x0a\xf3\x4d\x2b\x04\x3e\x43\xa4\x85\x08\x25\x2e\xf3\xa6\x27\x15\xfb\x81\x05\xae\x98\x15\x4c\x84\x70\x2d\xa5\x5e\x88\xdc\x1f\xed\xeb\x17\xf7\x38\xd4\x03\xd1\x9b\x0b\x50\xf5\xaf\xe6\x22\x77\x47\x59\xf7\xeb\x4d\x1c\xb9\x12\xb1\xa3\x51\x31\xbb\x4e\x2b\xbd\x86\x25\xf4\xbe\x84\xa6\x40\x94\x9f\x18\xf2\x3e\xb0\x8b\x97\xc5\x0f\xb8\x96\x4c\x53\xbe\x09\xda\x65\x8a\xb4\x66\x9b\x12\x7b\x78\xfd\x0d\xce\xb1\x45\xf4\x66\x47\x34\x13\x2b\x96\x99\x7a\xf4\x22\x14\xf6\x48\xcb\x07\x24\xbc\x2a\xac\x91\xb9\x8c\x39\x5d\x82\x1d\xc8\xea\xd5\x0e\x4b\xdd\xe0\x0d\x78\xb8\x12\xc2\xd5\xa9\x70\x72\xe9\x1e\x46\xd7\x22\x0e\x83\x59\xec\x9a\xb0\x71\x57\xff\x8b\xa0\x52\xbb\xdc\x66\x34\xbb\xf8\xb5\x96\x86\x16\x4f\x08\x94\xf4\xf2\xdc\x0b\xe2\x6e\xc1\x09\x72\x9a\xee\x91\xab\x51\x75\x11\x48\x74\x2e\x50\x17\xc7\xe2\x09\x37\xb9\x04\xe6\x28\x7a\x22\x9a\xfd\x7a\x59\x3f\xaa\x00\x63\x1c\x99\x2f\xb6\xf0\x68\x64\x8f\x66\x8c\xa7\x86\x40\xaf\xe7\xf3\xaf\xb3\xa6\x57\x00\xec\xe9\x1e\xc8\xa7\x6a\x87\x0e\x0a\xca\x54\x32\xa6\xb7\x6b\xc1\x66\x9b\x0b\xd0\x15\x5d\x5e\xd4\xe3\xe2\xea\x44\xb0\x2e\x73\xf9\x4b\x0a\xcd\x93\x8b\xaa\x64\xd8\x5a\x9c\x94\xbc\xf3\xd4\x24\xde\x6f\x0c\x4f\x02\x47\xed\xa0\x13\xeb\x8d\xe2\x16\x32\xba\x2d\x74\x36\xa3\xa4\x7f\x2f\xfc\x19\x77\x8e\x67\x82\x28\x3a\xc0\xd0\xa1\x02\xdd\xc6\x60\x07\xa5\xbe\xdd\x20\xfa\x7d\xe4\x7e\xf2\xff\x2a\xf8\xa9\x3f\x9f\xc7\xea\xd5\x99\xcd\x2a\x5d\xf7\x82\x18\x48\x0b\x9b\xc7\x52\xfc\xe3\x87\xe9\xb6\x5d\x49\x5f\x13\x01\x75\x63\xc6\x50\xfa\x1a\xec\x46\xe9\xa9\xd9\xbd\xb4\x2c\x0f\xb3\x3f\x1d\xa6\x0c\xbd\xc7\x10\x7b\x40\x14\x51\xbb\xba\xc4\xc9\x7d\x41\x3c\x7a\xd4\x99\x87\x12\x9e\x35\xcd\x88\x29\x33\x3a\xa9\xe1\x89\x75\xb0\x07\x03\x74\x0f\x44\x05\x6e\xb2\xbb\x89\x75\x45\xec\x25\xd3\xe7\xa5\x5b\xdd\x71\xfa\x29\x76\x4c\xba\x74\x93\x72\xad\x16\xb5\x11\x16\xd4\x94\xfe\xba\xd1\x4e\x35\x11\x63\xee\x87\x69\xb5\xee\xd4\xc5\x1b\xd1\x91\x5b\x41\x0c\x12\x1e\xcc\xae\x59\x3f\xaa\x15\xbd\x20\x87\x49\xf0\x38\xd0\x54\xe4\xfb\x89\xda\x27\xaf\x89\x35\x99\xbc\x74\x61\x1d\x25\x05\x44\x14\xb1\x81\x1a\x09\xcd\x3d\xae\xea\x70\x58\xdc\x26\x63\x47\x4b\xc1\xd5\x53\xd4\x61\xc9\xcc\x79\x9b\xc9\x0a\x34\xf4\xe6\xc3\x40\x4f\xd8\x06\xb1\xc7\x41\x9e\x17\x65\x9f\x70\xbc\x73\x69\x58\x3f\x87\x6b\x69\x8c\x84\xa0\xe6\x56\x2a\x58\xdd\xe6\x4a\x9f\x16\x37\xe2\x34\xa8\xa7\xc5\x34\x5e\x8b\xa5\x99\xa2\x9f\x2c\xc2\xf7\x4d\xd6\xa0\x3f\x2e\x05\x76\x70\x8b\xf5\x8b\xb4\xe0\xba\x0f\x04\x22\x1e\x7e\xfd\xbb\xa2\xb1\x53\x1d\xd6\xac\x27\x0d\xe8\x29\x2c\x82\x00\x2c\x7b\x57\x0b\x62\x1d\x65\x38\x01\x45\x20\x45\x80\xb7\xc9\xe1\x08\xdb\x92\xf7\x6c\x16\x37\xcf\x29\xca\x20\x65\x0b\x0e\x59\xbc\x5c\xa4\x52\x3c\x25\xbb\xd2\x76\xba\x72\x3c\x13\x18\x98\x77\x57\x05\x74\xb7\x1c\xce\xc2\xa0\xbd\xb7\xcd\x2e\xd5\x32\xd3\x66\xb0\xbc\xf8\xaa\x4a\x2e\xae\xfd\x80\xc4\xb0\xe1\x2c\xf7\xdc\xda\x38\x5c\x89\xf8\xc3\x27\x40\x49\x78\xb0\x34\x31\x1b\xe2\x29\x7b\xbc\x93\x83\xf5\xd8\x28\xe1\x65\x79\x61\x86\xbd\xe3\x53\x47\xa4\xf1\x9e\x6d\xe1\x2b\x52\xe9\x5a\x74\x38\x28\x0b\xd5\xad\x63\x7d\x2a\x04\xec\x0c\x02\x47\x17\x66\x66\xc0\x00\x5a\x87\xfb\x91\x85\x72\x18\x77\xfb\x2b\x4a\x95\x90\x73\x2c\xf4\xf9\xee\x33\xf7\x20\x67\xf5\x92\xf4\x41\x4c\xce\x9b\xc3\x2f\xfb\xcd\x4c\x95\x93\x57\x28\xb3\x00\x47\x92\xa8\x74\xf9\x44\x27\x47\xb8\x52\x31\xed\xee\xf3\x32\x63\xb8\x32\x98\x76\x0c\xe1\x40\xe1\xf1\x18\x46\xc9\x13\x74\x90\x42\x53\x0f\x8f\xd1\x77\x81\x47\x49\x1d\x4a\x2e\x31\x40\x7c\xf6\x81\xfd\x97\x08\xdf\x36\xa0\x20\xa0\x41\xe9\x94\x45\x7c\xf1\xa2\x7b\x79\xdd\xb6\x9f\xb2\x14\x5e\xda\x52\x60\x63\xdb\x61\xf2\xca\x9c\x7f\xbc\x2e\x44\x30\x6c\x05\xb4\x6a\xfe\x83\xad\x02\x2e\xa1\x77\x48\x34\xd5\xe5\xc8\xb3\xec\x62\x21\xc8\x86\xbf\x2f\x1b\xc1\x1c\xd0\x69\x07\x8e\x10\xbd\x66\xaf\x77\x59\x77\x6e\x3e\x99\x19\x9e\x61\xa6\x55\x35\x93\x31\x64\xf2\x30\x30\x28\x19\x93\x5a\x83\xa0\x1e\xa0\x7b\x5c\x19\xf2\x54\x09\x1d\x0f\xc0\x15\x42\xae\xa5\xf4\xc4\x83\xed\x82\xd5\xe6\x50\xe0\xbd\xe8\xcf\x64\x5a\xc3\xac\x20\xdb\x96\x4c\xe0\x3c\xfb\x53\xd6\x02\xf8\x3c\xb4\x65\xaf\x65\x21\x99\xb2\x5f\xc6\x1d\x26\x29\x9b\xee\x1e\x0a\x78\x55\x96\x61\xb9\x34\xac\x3d\x02\x16\x51\x01\x6d\x3e\x35\x40\x42\x3c\x04\x03\x48\xd3\x29\xb2\xe3\x15\xe2\x2e\xf2\x09\x27\x5d\x60\x45\xfb\x84\xba\x82\x43\xa5\x10\x5a\x0d\x81\x87\x73\x96\x4e\x9a\x97\x0c\xac\x83\xc5\xa3\xbe\xc9\xfe\xc7\xe6\x9b\xb3\x74\x6e\x47\x92\xde\x94\x6d\x20\x53\x14\x0f\x7c\xd9\x03\x74\x1e\x11\x65\x69\xb7\x0c\x25\x6b\xb6\x1b\x7e\x2b\xc3\x42\x0f\x2c\xf7\x8e\x92\x57\x68\x59\xcc\x09\xe0\xfb\x09\x56\x79\x60\xae\x1f\x19\xc2\xbc\xa0\x47\xff\xb6\xa4\x52\xf5\x7f\x21\x24\x9c\xba\xa5\x40\x6a\xe6\x85\x0b\xee\xbd\x04\x0c\x6d\x28\xdd\x8e\xf2\xb3\x1f\x9f\x5f\xfe\x0d\xe9\x9c\x4e\xfb\x5f\xb5\x19\x4b\xfe\xa0\xe5\xe8\xf7\xa5\x40\xb1\xd1\xf2\x05\x99\xb9\x1f\x9f\x97\xee\xc7\x73\x7f\x4f\xa1\xa6\xcc\xd7\xdf\x98\x48\x66\xef\x26\x91\x0d\xbe\xe2\x66\x6e\x19\x1a\x00\xd9\x59\xbd\xfa\x12\x8a\xb8\xe2\x73\xfb\x87\x5d\x3c\x5f\x7a\x89\x57\x21\x6d\x7d\x83\x80\x8f\xfa\x78\xf5\x8d\xad\xee\xd3\x95\x1e\xc3\x85\x2d\xb1\x53\x27\x4c\xee\x69\x27\xac\xcb\xeb\x83\x35\xa3\xbd\x9c\x86\x52\xa0\x2f\x28\x24\xf4\xae\x03\xf6\x90\xc6\xd1\x96\x1d\x0b\x02\x64\x33\x43\xef\x41\xb8\xf9\xfb\xc8\x02\xed\x39\x2e\x82\x1c\x78\x4f\x57\x89\xfe\xfb\x58\x58\x1d\xd6\x15\x79\xa5\xa6\x64\xdf\x46\x8b\x2b\xa1\xf2\x4a\x5b\x07\x0f\x91\x5a\x2b\x04\x18\x87\xde\xa6\x33\xeb\x91\xa9\x84\x9d\x4b\xc7\xf8\xbc\x62\x24\x53\xda\xab\x4d\x66\xfd\xe3\x04\x9c\x29\x66\xeb\x51\x0a\xcd\xb2\x16\xd9\x7c\xa1\x9d\x39\xd6\x2f\x24\x6b\x23\x81\xed\xef\x5f\xc7\x0f\x24\x94\x04\xea\xc1\x96\x25\x35\xec\xd8\x05\x44\xc7\x7a\xe2\x0e\xef\x04\xc0\x0b\xfa\x56\xba\x69\x70\xfe\x14\xbb\xc6\x9b\xa2\x08\x28\xcc\xd3\xae\xca\x36\x9a\x66\x2a\x28\x7a\x0b\x83\x82\xaf\x97\xd9\x33\x05\x8c\xbb\x23\x1e\x29\x4d\x93\x6e\xfc\x66\xfb\x40\xa4\x8e\xfb\x7c\xfe\x8c\x99\xd4\xc7\x5f\x9e\xb5\x42\x1a\x88\x18\x2b\xc7\x51\x9d\x86\xac\x16\x2d\xef\xb0\xc0\x0e\xd9\xdf\xed\x38\x78\xf0\x0a\xfe\xa3\x1e\xa0\x5a\x2c\xde\xe1\x29\x14\xfd\x0b\x62\x0f\x7b\x2d\xa5\x27\xbb\x69\x52\xb2\x2a\x2b\x59\xd5\x2c\xca\xf9\xc4\xbb\xda\x65\x92\x97\x3e\x4a\xc7\x9a\xf2\x1e\x43\x40\xf0\x62\x6a\x5f\x88\x0a\xd7\xc7\x5c\x1c\xf3\xca\xbc\x29\xa4\x52\x45\x11\x6f\xe9\x96\xac\x62\x3f\x85\x44\xc1\x71\x6e\x15\xaa\xbf\x6a\xa4\x35\x0d\x4d\xd4\x67\x2c\x46\xc0\x85\xa1\xa7\xf3\xe8\x19\x13\x75\x2b\x98\x63\xca\x48\xea\xb2\x2f\x6a\x36\x50\xd2\x25\xa3\x39\x95\x65\xa7\x9b\xaa\x75\x38\x5c\x7e\x87\x17\x2f\x96\x2b\xc3\xb7\xa9\x7c\x7f\x43\x3e\x8d\xa5\x71\x46\x93\x66\x1b\x6d\x69\x89\x51\x31\xa1\xe9\x91\x6e\x06\x59\x8d\x9e\xaa\x69\xbd\xe6\x2c\x03\x1e\xdc\xb5\x64\x5b\xeb\xf4\xa4\xb5\xf3\x83\x95\x8a\x33\x40\xdc\x89\x79\x41\x2c\x76\x7b\x3f\x0c\x53\x48\x4d\x66\xc2\xa5\x15\x5d\xc5\xbe\x8e\x0d\x85\x44\x08\x90\x8b\x43\xcb\xe8\x8a\xd8\x71\xbf\x28\x80\xf3\x15\x01\x42\x2d\x35\x0f\x6a\x7e\x73\x72\x96\x91\x91\xfb\xb6\xb7\xc8\x0a\xbe\xbf\xbf\x1a\xa7\xd7\x59\x4d\x78\xe6\x51\x13\x23\xe5\x8a\xb5\x34\xa7\x9a\xad\xe9\x05\x7e\x32\x6d\x31\x6a\xd3\xc0\xd5\x82\xb8\x41\x81\x74\xbc\x37\x05\x34\xee\x72\x8d\xf4\x4a\xf2\x92\x4b\x82\xda\xb0\xcd\x56\xb6\xc3\x8e\xcd\x1d\x77\xb9\xf5\x0a\xfb\x87\x53\xa1\xd0\x0a\x0d\xff\x1a\x75\x0b\xe6\xa5\xc6\xd0\x14\xaa\x17\x77\x73\x85\x2b\xbe\x7a\x57\xda\x7f\x78\x7d\x08\xad\x09\x4f\x5c\xbd\x57\xd2\xe5\x08\x85\x7f\xe6\x77\xa5\xbf\xc2\x3b\x4c\x15\x9d\x1a\xf7\x72\x70\x4e\x08\xee\x9f\xe7\x17\xce\xdf\x4c\x57\x9b\x80\x8c\x99\x8a\x13\x6c\x21\x65\x81\xc9\x5e\x4d\x3f\x21\x42\xb9\xfd\xd5\x38\x92\x52\xee\x6b\xf2\x30\xf3\x85\x09\xe0\x26\x75\x4b\xde\x67\x7a\x01\xf1\x8a\x3f\x2b\xa9\xa3\xee\x95\xec\x6c\xf3\xde\xf5\x72\xbc\x3d\xd4\x82\xbd\x9c\x76\xb5\x2e\x0f\x2c\x81\x0c\x16\xe7\xa4\x19\x66\x7b\xb7\x43\xc5\x93\xe8\x9d\x35\x2c\x93\xe4\x41\x11\xf1\x95\xfc\x20\xb7\x54\x17\xae\x8f\x91\x78\x07\x33\x66\x9f\x90\xb9\x99\x39\xf0\x93\x19\xd2\x27\xfb\x40\xd9\xc3\x4c\xae\x1a\x78\xd1\x9e\xf4\xe3\xf5\x9b\x1a\x61\x53\x98\xe7\x71\xfc\x03\xf0\xe5\x5b\x93\x0b\x8f\x93\xe7\x82\x3b\x1c\x6c\x4f\x89\x88\xb3\x6b\x6f\x21\xce\x26\x8b\x8e\xf7\xb4\x47\x3f\x87\xd3\x7e\xb9\xf8\x9d\x7f\x7f\x1c\x18\x80\x36\x25\x62\xf3\x46\x14\x38\x14\x28\x37\x75\x92\xfe\x8d\xaa\x28\xf2\x50\xda\xfe\xb3\xa1\x19\x3b\x5d\x87\x9a\x7b\x9d\x88\x74\x6f\xbc\x95\x44\x59\xb7\x41\x71\x42\xe9\xe3\x01\x3a\xe1\xc0\xd3\xbc\xc8\x99\x43\xd6\x07\x3c\x44\x08\x87\x5e\x9d\xce\xab\x85\x95\x38\xdc\xab\x30\x86\xa5\x2c\x6d\x87\xb6\xe8\x1c\xd1\x45\x8c\x21\x7a\x30\x56\x09\xc5\xec\x1f\xf0\x42\xfa\x3a\x7d\x0f\xfd\x50\x25\x5f\x1f\x19\x4b\xec\x35\x66\x26\xb1\x7b\x6a\x39\xbb\x46\x75\x86\x21\x27\x46\xb2\xaf\x42\xc9\xd3\x79\x73\x3c\x37\x4b\xa4\x56\x70\xbe\x6a\xfb\x58\xdf\x89\x30\x17\xbe\x12\x3b\x66\x74\x05\x72\x47\x6b\xdf\x27\x19\x12\x6f\x3d\x13\xfe\xdd\xd3\x2e\xca\xf4\x1f\x7e\x7f\xd4\x92\xce\x2c\xa9\xc0\x13\xa8\x94\xbf\x6a\xe5\x82\xb4\xad\x68\x02\xf5\x07\x8e\x00\x56\x01\x73\xd8\x45\x9e\x40\xa2\x30\x44\xcc\x0e\xd0\x62\x4e\x68\x63\x11\x1a\xf0\x33\x04\x45\xe1\xc1\x9e\x32\x3b\xbf\x44\xf2\x1c\xde\x90\xd0\x0c\x43\x4d\xab\x88\x32\x1d\x8e\x4b\x74\x4e\x9f\x85\xa4\x19\xbb\x7b\x7f\x44\x0b\xfa\x77\xe5\x2a\xaf\x94\xb7\x98\xd7\x3d\xfc\xfe\xfe\x60\x23\x2f\xd4\x39\xb7\x9c\x4f\xfe\x5f\x45\xa7\xb2\x62\x9e\x1e\x2a\x3a\x31\x09\xdf\x98\xd2\x5c\xa8\x66\xfe\xd7\xff\x72\x45\x3d\x3d\xce\x74\x9e\x2d\x40\x4f\x6b\x2f\x55\x6b\xce\x2f\x33\x74\x57\xdb\x30\x3a\xb0\x2c\x7c\xb1\x43\xd6\x9b\x47\xd4\xa6\x1c\xd8\xc2\xe3\xbd\xbc\x5b\xa0\xab\x33\x09\x0c\x89\xb7\x1d\x42\x6d\x4b\xe3\xeb\x1f\x27\x66\x90\x18\x33\xd7\xc4\x4d\x8d\xe1\xd1\xa3\x45\x79\x89\xad\x4a\x33\x5d\xbe\xe6\x81\xba\x23\x8e\x34\x06\x9f\x6f\xfe\xed\x4f\x95\x43\xb7\x14\x8a\x23\x81\x4a\x59\x1d\x74\xac\x05\xf1\xce\x93\x99\x78\xdc\x66\x1b\xd7\xcb\x99\xfd\x0c\x7b\xdb\x0e\x6a\xdb\x2e\xfe\xf5\xe8\x5d\x18\xe8\xb2\xef\x05\x2b\x3e\x7b\x89\x1f\xde\xb0\x49\xc9\x91\xa7\x44\xad\x26\x75\x7a\x32\xf9\xb2\xa6\x7b\xb1\xf2\x05\x26\x66\x96\x4f\x1d\x84\xce\xbd\xb2\xaa\x15\x3c\xda\x5a\xab\x6c\x66\xae\x2f\xcd\x8b\xa7\xa1\xde\x0f\x2c\xdb\x99\x0b\x44\x6d\x77\x1a\xe4\x58\x9a\x1d\x07\x3b\x88\x19\x99\x61\xc9\x81\x25\xa4\x15\xc0\xcd\xc6\x42\xf7\x01\x49\xc9\x7a\x79\x6b\xdf\x17\x4a\xf0\x52\xe8\x98\x6e\xb1\x2d\x4e\x64\xb8\x11\x81\x82\x6c\x96\x52\xeb\x70\xa9\x27\x6f\x65\x61\x5d\x38\x33\xb9\xea\xc1\x26\x37\x5d\x05\x09\x27\x5d\x51\xca\xa8\x5e\x82\x10\xb6\x33\x23\xb3\x81\x08\xe5\xb6\x30\x27\x22\x73\x4a\x71\x34\x91\xe1\x03\xef\x06\x60\x0e\xe6\x29\x44\x96\xf2\x21\xc7\x2f\xff\xc6\x95\xa6\xa9\xa8\xb9\x6d\x61\x9a\x01\x80\x57\x62\x03\x4a\xb1\xdf\xdc\xe3\x89\xf2\xe3\x5c\x19\x2c\x66\x38\x31\x0d\xd6\xd0\xae\xa0\x65\xfb\xd4\x68\x3a\xa2\x58\x50\x14\x9e\x09\x48\x50\x4e\xe5\x45\xe9\xd7\xc6\x17\xa3\xdb\xc4\x6e\x46\x74\x47\x33\x89\x38\xc9\x69\xbb\x04\x73\x3c\x55\x7b\x54\x66\xe5\xe3\x20\x96\x0a\x19\x52\xef\xbd\xa0\x6b\x9e\xe2\x22\xd2\x17\xfb\x84\x85\x0c\x53\x76\x8a\xe9\x4c\x0e\x51\x47\xb7\xc0\xbb\x70\xa5\xde\xaf\x87\x17\x2f\xb9\xce\x42\x25\x79\x75\x8c\xa1\x42\x20\xee\x61\xf5\x5a\x92\xf8\x0b\xc1\x30\x51\x11\x3c\xb4\x21\x7e\xd8\x56\xbb\x42\xa0\x6e\xe0\x18\xd0\xf4\xef\xac\xfa\x28\x1d\x0c\x33\xb5\x13\x8b\xe0\x38\x63\x55\xd0\x4c\xe9\xfb\x36\x3c\xc3\xbf\x83\xe8\x1c\xd8\xd8\x89\xcf\x9c\x4b\xe7\xa9\xbb\x16\xf8\xa0\xd5\x62\x74\x14\x27\x91\x30\xaa\x4a\x23\x09\xd2\xc1\x63\xe1\x65\xf4\x9e\x92\x9d\x9e\xdc\x22\xad\xb7\xda\x7e\x51\x49\x06\xc7\x07\x55\x85\x7f\x12\xde\xf4\x65\x08\x3d\x4b\x9e\x03\x81\x29\xad\xf3\xc4\x25\x5d\x70\x55\x7c\x0e\x9e\x51\xd6\x31\x5f\x6d\xb8\xe3\xf8\xea\x1c\xbc\xd9\xa7\xf5\x99\xd8\xcd\x3d\xaa\x81\xfa\x69\xbe\x9a\x1a\x4c\x41\x2c\x40\x28\x69\x3a\xa9\x88\xf1\xd7\x06\x33\x90\x86\xc6\x61\x93\xb1\x68\x92\x0a\xde\xb1\xea\xfa\x8e\xf8\x47\x66\xdb\x24\x63\x0d\x84\x25\x26\xea\x5d\x17\x6e\x99\xa4\x94\xc0\xb5\x62\xcb\xea\x6e\xc5\xe3\x59\x4a\x9b\x66\xd2\x6a\xee\x9a\xa9\x18\xb3\x9e\xb0\x95\x5e\xf0\x90\xf3\x48\x64\x65\x5d\xbf\x38\x08\xfa\x30\xfc\xd2\xe8\x74\xa6\xba\x79\x3a\x95\xf2\xea\x72\x69\xc7\xf0\x42\x1b\x3d\xd2\x91\x6d\x8f\x6b\xb3\xc3\x52\xe7\x69\x78\x40\x53\x6f\xdb\x1f\xa8\xd2\xe2\x7e\x41\xc5\x6d\x9f\xb0\x3f\x83\x77\x3a\x23\xf8\xdb\x59\x8b\x3c\x9b\xe2\x0d\x37\x74\x43\x10\x43\x08\x20\x58\x89\x2f\x28\x15\xeb\xab\x95\x72\x2e\x34\x52\x76\x77\xe6\x23\x40\xb1\xbf\x00\x03\xc3\xe5\xdc\x77\x94\xf0\xc1\xd9\x9c\xa8\xe4\xed\x74\xec\x2b\xd2\xae\x7b\x52\x02\xad\x7b\x8c\x13\x6b\x2e\x8d\x09\x3b\xe9\x01\x72\x18\x14\x28\xb6\xeb\xc8\x56\xef\x81\xd8\x17\x71\x28\x22\x1f\x92\x59\x71\x33\x87\x5e\x90\xf5\xe6\xd5\x52\xd9\xc6\xf8\x54\x2d\xb9\x2f\xe8\x0f\x65\xce\xc0\x14\xc3\xdb\x7e\x64\x6f\xef\x61\xf6\xaf\x30\x54\xac\xf1\xae\xd0\x2a\xde\x95\x78\x4d\xcd\xff\x53\x79\x8a\x51\x4e\xf4\xcb\x5a\xa9\x42\xb3\xc4\xed\x79\xa1\x6b\x67\xd7\xc4\xec\xf5\x69\x49\xac\xcd\x65\x53\xaf\xc8\x49\x51\xe6\xb9\x10\x13\x58\x1a\x2b\xb7\x4a\x71\x69\xd5\x56\x1f\x0f\x1f\xf8\xc4\x3a\x0b\xe6\x78\x21\xa0\x00\xf9\xb0\x26\xef\xa4\x70\x39\x7c\xf9\x37\xa4\xec\xc7\xa5\xec\x99\xd8\x76\x0a\xef\x44\x10\xdd\x4d\x16\x31\xbc\x7a\xa8\xbd\x79\x45\x2d\xd9\x0d\x78\xc0\xb0\xf6\x79\x48\xac\x49\x89\x85\xcc\x03\x47\x95\x65\x2c\xe2\x60\xdd\xca\x3e\x18\xf6\xcd\x7c\x4c\x2e\xb8\xf9\xc4\xf6\xaf\x62\x7d\x57\x3c\xaf\x8d\x08\x8b\xec\x8f\xae\x82\xf3\xd9\x72\x85\x49\xbd\x09\x02\xdd\x7a\xf4\xa5\xa6\x3e\x57\xb4\xa4\x1a\xb3\x55\xe4\xa2\xcc\x11\x70\x85\xeb\x1a\x3b\x38\x86\xe8\x1e\x39\x68\x2b\x0e\x54\x3f\x2c\xc5\xad\xff\x76\x47\x97\xe1\x73\x3c\x6b\x04\xd9\xcd\xa5\xcf\x23\xbf\x0a\xdc\xa1\x50\x0d\x5a\x34\xa7\xd2\x8b\xc7\x1e\x0c\x42\x3d\x0f\xb3\x05\x82\x40\x03\x0a\x42\x88\x38\x51\x43\x16\x41\x27\xc1\x29\xba\x47\x2f\x91\xe0\x8a\xb7\xc7\xa0\xe2\xdf\xc3\x7c\x3d\x1a\xf2\x7d\x6a\x97\x82\xaa\x97\x52\x98\x4a\x8a\xf5\xea\xce\x52\x47\xb0\x8e\xa0\x28\x1e\x14\xbc\x43\xf5\x8a\x9f\x07\xbd\x2f\x28\x4d\x3b\x37\x48\x69\x72\x6c\xb9\xa8\x00\x5a\xe3\xc4\xc5\xbd\x8b\x99\x50\x06\x5b\x2e\xa0\xf3\x2d\x14\x20\xed\xaf\x53\x20\x64\x22\x86\x65\xa7\xbc\xf9\x67\x60\x27\xb2\x39\x64\x33\x1b\x06\x5f\x94\x77\x8e\x29\x20\x89\xcd\x69\xd0\x51\x02\x51\x9e\x1f\x4f\xb3\x2d\x1b\xfd\x78\xdb\xb0\xbd\xf5\x0b\x45\x03\x16\xf2\x48\xf9\x44\xef\xc2\xf3\x62\x10\xfa\x3c\xc6\x91\xb0\xe6\x97\x32\x36\x57\x9e\x27\xd1\xf4\xa8\xb7\x08\x55\x7f\x95\x11\x7f\x98\x48\x2c\x90\xa6\xd5\xf7\x51\xed\x0d\x8f\x33\x99\x74\x2c\x50\xa4\x73\xb1\x95\xca\xa7\x17\xec\x0a\x4d\xcb\x98\x5a\xf6\xdc\x2d\x69\x66\x27\x8d\x93\x3a\x9c\x64\x71\xd8\xbc\x8b\x0c\xa9\xf8\x6a\xba\x3e\x6e\x1b\x55\x47\x67\xae\x0e\x5a\x07\xf6\x75\x0a\x57\x42\x82\x26\x69\x7a\x1a\x4c\x82\xf1\x86\x15\x8e\xb5\xb7\xf1\xaa\xc9\x71\x2f\xbd\xbd\xde\x8b\x19\x55\x21\x9e\x3a\x29\xe2\x31\xad\x27\xfa\x1d\x69\x38\x4d\x68\x0b\x47\xcf\x1f\x5e\x71\x33\xeb\x4b\xc3\x91\x3a\xd1\x29\x23\x3b\xd9\xa3\xcb\xff\x29\x5a\xc4\xad\xd4\xef\x2f\xe6\x47\x9f\x50\xf5\x0f\xcc\x2c\xb9\x14\x0f\x19\x49\xb1\x60\xb1\xcf\x27\x9b\x2e\x3d\x48\xe4\x23\xe7\xb4\xf5\x17\x02\xf7\xe5\x8a\xab\x44\xd0\x60\x94\x82\x34\xdb\x69\x61\x24\x81\x8e\x19\x3c\xb0\xe0\x80\x47\x33\x75\x4c\xf3\x99\x20\xaf\xc4\x45\xc6\xf8\xc8\x6a\xed\xca\x74\xe0\xec\x66\xd0\x8c\x04\x3d\x54\x32\x12\xce\x9b\x13\xd4\x79\x45\xb8\x0e\x04\xd4\x8f\x25\x91\x13\x4e\xa6\xa2\x34\x95\x39\xf4\x54\x8a\x60\x49\x54\xc0\x50\x30\xf9\x1e\xda\x45\x35\xc5\x38\x5f\x10\xa0\x8e\x05\x4c\x42\x36\x38\x51\x40\xa5\x55\xc0\xf4\xd2\xb6\xed\x92\x37\xf3\x68\xc5\x33\x92\x04\x05\x8f\xd6\x67\x13\x6f\xca\xd8\x2a\xe2\x44\x70\x2d\xd4\x99\xde\x27\xf9\xcd\x4b\x06\xe3\xe6\x00\xdf\x8a\xfe\x32\xb9\x48\x7c\x79\xa0\xfd\x66\x6a\xc3\x97\xcd\x43\x2b\x05\xd3\xf1\x53\xc1\x17\xba\x3b\x87\xf4\xab\x59\x51\xd6\x10\x00\x07\x87\x12\x73\x9a\xcf\x20\x94\x2d\x03\xe1\x93\x89\xa6\x1a\x79\x4e\xe6\x3c\xc3\xe1\xb3\x53\xbd\xd0\x34\xac\xa5\xf3\x69\x35\x9d\x05\xab\x89\x12\x17\x83\x5f\x54\x69\x69\x40\xba\xa4\x94\x9a\x7f\x57\xa5\xed\xc4\x57\xee\x51\xfa\x76\xed\x7c\xc2\xc8\x8a\x03\x00\xd4\xc8\x8c\x75\xaf\xfb\x80\x1a\xe8\xd5\xbe\xb0\x32\x4e\xc6\x98\x28\xa3\x0a\x1d\x1e\x3b\x59\x27\xdf\x80\xb8\xf4\xeb\xb1\xe7\x49\x82\x02\xba\x6d\x3a\xc0\x53\x1d\x64\x34\x09\x63\x5b\xed\xc0\x90\xf3\x2c\x9f\x36\x17\x1b\x2c\x54\xd3\xd0\x78\x6f\x7d\x5a\x44\x21\x33\xc4\x3c\xf3\x44\x99\x02\x0b\xac\xb3\xc4\xd1\xe4\x4e\x1c\xc3\xb7\xd2\xa4\xb3\xce\xbb\x18\xc4\xdf\x3f\xbe\x80\xa9\x0d\x53\xe1\xb1\x44\x84\x5c\x07\x02\xa4\xd4\xa4\x76\x7a\xb1\xb7\xad\xa6\x0d\x26\x21\x7d\x3f\x3f\x55\xf5\xf3\x36\xb9\x0c\x95\x43\x64\x8b\xc5\x0e\x66\x3f\xf1\x20\x91\xc2\xcd\x69\x91\x94\xc8\x6d\xe1\xdd\xa7\x54\xbc\x3a\xb2\x5d\xac\x7b\x4d\x2a\x6d\x74\xa9\x31\x9f\x34\xc5\xc2\x1f\xed\x65\x44\x15\xeb\xe6\x52\x17\x28\x39\x33\x11\x20\x02\x20\x49\x44\xcf\x79\x9f\x22\x71\x92\x5e\x31\x10\x74\x60\x60\x3f\xb5\xb7\xc6\xb3\xf0\xe9\x31\xdf\x85\xd8\x49\xf7\x18\x89\x6a\xf0\x5c\x3f\x91\x78\xf5\x8b\x93\x16\xc9\x11\xb8\xff\x5e\x63\x18\x62\x50\x38\x6c\x51\x11\xd3\x28\xf9\xf9\x64\x8b\xca\xfd\x2f\x4d\x62\x8a\x47\xa4\xc8\x16\xaf\xdd\xd2\x89\x79\x54\x76\x73\x91\xfc\x81\xba\xa0\xc1\x3b\x38\x03\x1d\xd2\x45\xc4\x18\x3b\xe3\x8c\x7c\x0d\x94\x01\x57\xae\x13\xb2\x25\x4e\x0c\xc1\xf0\x28\x2e\x60\xad\x13\x24\xd0\x2c\xde\xf1\x83\x30\xac\x76\x2f\x8e\x83\xab\x46\x11\xae\x79\x93\x50\x24\x2a\x06\xf4\xf1\xcc\xbf\x7e\x7c\x02\xfe\xd0\x38\x8c\x86\xdd\xd3\x0e\x99\x54\x1b\x86\x0a\x39\x6c\xbe\x73\xa8\x08\xb2\x9e\x76\x20\x60\x31\xc2\x10\x2e\x85\xe8\xa3\x42\x36\xb3\x16\xab\x6e\x1e\x0a\x79\xa1\xdc\x3f\xd3\xef\x45\x29\x90\xf8\x0a\x48\x54\x2a\x10\x27\x25\x14\xf5\xb2\x03\x0e\x77\xf1\xe7\x99\x06\xc5\xdb\x3e\xbd\xd1\xbd\x87\x36\xc9\x6c\x99\x72\xb5\xc2\x7a\x49\x29\x11\x9b\x32\x62\xfe\x32\x74\x0c\x2a\xc7\x70\x27\x46\xc9\x4b\xe0\xb9\x15\xc0\x7c\xf1\xaa\x80\xa0\xa4\xde\x58\x8c\xb0\x8a\xe4\xec\xc8\x07\x78\x76\x9a\x89\x40\x07\x52\xac\x45\x21\x28\xa3\xbf\x64\x5a\x23\xbb\x91\x07\x8b\x9b\xc5\x0d\x72\xe4\x38\x71\xba\x40\x16\x12\x32\x38\x38\xb1\x95\x98\x22\x92\xfa\x27\xeb\xbd\xbd\xc5\x54\x78\x0c\x85\x23\xb3\x93\x91\x08\xcb\x89\x77\x4f\x63\xc7\xa9\x21\x1d\x88\x93\x11\xe0\x7e\x25\x3c\x87\xdf\x2f\x60\xe7\x37\x95\x42\x9d\x24\xa3\x57\x01\x44\xce\xd4\x0c\x9a\x5d\xd1\xef\x10\x9f\xc6\x75\x99\xd9\xa6\x6a\xaa\x54\xcc\xdb\xde\xfa\xab\xb2\xf5\x5f\x78\x64\x2f\x11\x51\xa7\x05\x4a\x27\x55\x2d\x0a\x27\xa8\x2d\x5a\x23\x72\x4a\x0b\xa4\x73\x57\x78\x2a\x99\x0a\x2f\xe4\x76\x80\x88\x7c\x80\x90\xd5\x8b\x8b\x85\x19\x00\x81\xde\xff\x81\xc1\x4e\xf5\xd7\x5e\xbb\xc6\x31\xb7\xed\x8a\x16\x52\x87\x94\x1d\x8a\x4a\xa8\x54\x7f\x62\x72\xd7\xd9\x37\x68\xe3\xe0\x7c\xb7\x95\x0a\x76\xca\x0f\x2c\x4a\x38\xba\xce\x50\x23\x73\xb3\xed\x5a\x80\x38\x17\xed\x90\xc8\xaa\xb2\x39\xd5\x1a\x73\x0b\x16\x3c\x05\xff\x4f\xe7\x82\xbc\xdd\x43\xda\x44\x49\x20\x7e\xca\xa5\x50\x04\x86\xd7\x89\x71\x66\x49\x9e\x27\xaf\x27\x88\xad\xdc\x3c\xbd\x32\xc9\x61\xc9\x8f\x80\xa0\x0f\x14\x1a\x64\x4e\x4e\x92\x89\xbf\xfe\x99\x31\x38\x4f\x2a\x93\x1b\xe6\x24\x62\x6f\xbc\xc5\x43\xa5\xd1\x29\x5f\x92\xdc\x5e\x76\xbd\xd5\xcb\xe7\x87\x2f\x99\xd3\x76\x8f\xa5\x39\x61\x64\x72\x05\x65\x3b\x92\xc7\x9b\x01\x76\x2d\xa8\x2e\xe6\xf7\x4f\xff\x86\x12\x08\x44\xb3\xa9\x5b\x1e\x39\x55\x16\xe8\x6d\xb9\x58\x79\xdd\x0a\x35\xf1\x50\x33\x66\x31\x03\x05\xba\x18\xd7\xb2\x82\xae\xa2\x19\x19\x6a\xbd\x03\xbc\x51\x65\x07\xd6\x75\xfd\xe6\x70\x69\x39\x90\xbc\x6d\x40\x53\xaf\x20\xae\x83\xc9\x8b\x12\xb4\xaf\x22\xa7\x43\xcb\xc4\x44\x65\xcb\xcb\x3e\x40\x8f\x64\xe0\xa4\x4e\x25\xe6\x04\xc7\x02\xcf\x09\xbd\x92\xff\x53\x09\x69\x8c\x33\x32\x8a\x93\x2b\x99\xfe\xc6\x26\xaa\x54\xef\x20\x13\x01\x4d\xea\x97\xeb\xbf\x15\x27\xfd\xe3\xf3\x8b\x24\x62\x9b\xa3\xda\x52\x91\x93\x48\x0e\x63\xb3\x7f\x64\x9d\x38\xbc\x81\xd7\xc0\xc2\x37\x8d\x62\xb0\xfd\x1e\x90\xb2\x01\x0e\xfb\x43\xfa\x83\xa5\xb6\x6b\x2c\x5c\x26\x4b\x7b\x62\x82\xd3\x1d\x59\xae\x51\x78\x8c\xf4\x80\x2c\x72\x20\xd7\x43\xa1\x0a\x87\x15\x25\x4f\xae\x7b\x18\xf0\xa9\x0b\xfa\x0e\x6e\x24\xe4\xa0\x13\x32\x69\x60\xa7\x50\x2b\x84\xf4\xc5\x5c\xc5\x4b\x00\x9a\xcb\x1d\x62\x86\x06\x29\x6e\x13\xfc\xae\x2b\x05\x9c\xfd\x2f\xb8\x8e\x37\x05\xe1\x45\x31\x00\x5f\xab\x66\x53\x72\xc6\x96\x18\x3c\x29\x0e\x74\xa7\x81\xf0\x39\xb3\x3f\xbe\xb0\xe6\xee\xf9\x09\xe3\xad\x33\x8f\xba\x2b\x39\xb7\x3a\xab\x28\x83\xbb\xc9\x91\xfe\xa4\x59\xa6\xb5\x58\x02\x7b\x2d\xf6\xe9\x32\x31\x4f\x12\x6a\x13\xc7\x9d\xde\x82\x8f\x58\xa1\x97\x20\x63\x59\x21\xff\x73\x2b\x31\x81\x2e\xf2\xf0\xf3\xf1\xfe\xf5\xc9\x76\x1c\xf2\x07\x3c\xef\x8d\xfd\x73\x61\x7f\x7b\x67\x07\x8b\x93\x39\x68\x92\xcc\x62\xd1\x99\xf8\x41\x57\x2f\x9c\x14\x37\x76\x54\xd7\x79\x2c\x58\x80\xc3\xc7\x2f\x50\x8a\x53\x47\xfe\x8c\x5c\x18\x16\x38\x85\x83\xd2\x9f\x17\x46\x43\xce\xae\x41\x37\xd3\xb9\x34\x99\x4a\x30\x83\xdb\xd3\x3b\xb4\xd0\x5f\x9c\xf1\x43\xde\x04\x21\x5d\xa6\xec\x81\x22\xcb\x90\xb6\x10\xab\x00\x2c\x4b\xbb\x0b\x3f\x74\x8d\xe2\xe2\x59\x58\xca\xa6\x53\x59\x89\x1c\x09\x81\xaf\x39\x2a\x6a\xdb\x30\xbf\x40\x0d\xb0\xb1\xf4\xe5\x9b\xf4\xaf\x42\x4e\xb3\x85\x22\x69\x44\x45\x81\xbb\x6c\xe1\x31\xb0\x15\xe4\xe3\xe3\x40\x7a\x57\xd5\x9d\xdd\x9d\x82\x5e\x46\xf2\x71\x22\x31\xa2\x27\xce\x99\xd6\x50\xc5\xc2\x77\x5d\x1c\xbb\x64\xdb\xc5\x93\x60\xb5\x47\x3f\x81\x17\xdf\x54\x3e\xca\x43\x71\x33\x55\x48\x30\x99\x1f\x64\x24\xba\x5e\x54\x91\x73\x68\xe9\x0f\xce\xae\x39\xf9\xe7\x16\x4f\x61\xbb\xcd\x07\x28\x78\x64\x68\xd5\x02\x17\x15\x46\xb4\x19\x0a\x97\x51\x6d\x8b\xcd\x42\x48\x9f\xfd\xbb\x62\x9d\x49\x0d\xf2\xa6\xc5\x37\x32\x9e\xc5\xd2\x91\xce\x1c\x38\x21\x39\x77\x2d\xe2\x39\xde\x62\xb9\x67\xf7\x6c\x61\xf6\x79\xd3\x4b\x4f\x8e\x83\xba\x05\xac\xd2\x53\xdf\x39\xab\x45\x2d\xfd\x87\x7a\x13\x67\xca\x81\x49\x4a\x25\x2e\x79\xdf\x22\x61\xf4\x11\x16\xce\xfe\xff\x12\x34\xd7\xc8\x7c\x1c\x36\xb5\x99\x6f\x41\x90\xf4\x30\xbc\xa0\xe8\xdb\x2d\x15\x42\xd4\x66\x49\x82\xaf\x34\xfb\x2c\xfc\x30\xb0\xf2\xf0\x4d\x3d\xb3\x48\xb4\x80\x00\xf4\x43\x21\x15\x32\x9f\x81\x49\xdd\x34\x79\x97\x9c\xdf\xe2\x3d\x2e\x77\x15\xd3\x12\x51\xf5\x22\x62\x98\xf5\xd3\xe1\xd6\x81\x34\x4a\x63\x2c\x3d\x57\xa6\xb0\x82\x28\x3c\xb6\xc4\x6e\x81\x60\xc6\x41\xa0\x50\x2d\x06\x36\x9c\x86\xe3\xf3\xf3\xb3\x0c\x1f\xb2\xe5\x6b\x79\xc8\xdb\xac\x7c\xe1\x74\xcb\x1f\xa5\x69\xb9\x64\x4e\x47\xe2\x10\xa9\xa5\xdb\xbf\x36\x0e\xa5\x19\x3d\x36\x06\xd9\x8f\xcb\xce\x94\x27\xd6\x9f\x0b\x5d\x18\x79\x97\x95\x7d\x64\xaa\xe0\x97\x26\x72\xb8\x70\x67\xaf\x57\x05\x62\x6a\x1e\x44\x02\x9b\xb9\x3a\x8b\xc7\x48\xdf\xa5\xca\xc0\xf6\x72\x59\xcf\x7b\xd7\x91\xb5\xdd\x25\x1f\x2c\x9f\xe6\x38\x46\xf1\xf8\x7c\xbe\x01\x09\x43\x25\x51\xa0\x3c\x1e\x11\x39\x6b\xe8\x55\xb0\xcb\xbd\x16\x9d\xb7\x39\xae\xc4\x56\x17\x1a\xed\xda\x51\x50\x15\xa2\xc0\x07\xcc\xc8\x7f\xf0\xe0\xdb\x63\x80\xba\x78\x3f\x1e\xc8\xcc\xe9\x05\xdc\x89\x81\xe5\x0a\x42\xce\x0a\x19\xba\x20\xc8\x4a\x50\x0e\x5c\xd3\x8b\x7c\x37\x7b\x8b\xeb\x58\x49\x47\x81\xf4\xdf\x2e\xa7\x58\x4d\x9e\xde\x1b\xeb\xe7\x11\xec\x71\x70\xd4\xeb\xe5\x0b\x31\x85\xa9\x02\xd3\x89\x81\x85\x9d\xd2\x72\xed\x8d\x74\x5b\x81\x9a\x83\x0f\xa2\x5d\x34\x1d\x29\xd7\xe7\xd8\x6c\x2f\x2f\xb3\x2d\x42\x5c\xfd\xe5\x60\x95\xa5\x6a\x5f\x67\xb6\x30\x10\x90\x25\xd2\xc2\x32\x01\xa4\x9c\x6a\x91\x8e\xb0\x73\xae\x21\x71\xe8\x5d\x03\x51\x8c\x9d\xa0\x8f\xe6\x38\x9f\x03\x77\x4e\x6e\x4f\x4d\xf6\xc8\x67\x6a\x89\xf9\xdf\x0a\x27\xaa\xa3\x81\x04\xec\x5b\x89\x12\xf3\x26\x08\x0f\xe1\x2a\x80\xc8\x76\x3a\xf6\x9b\x2a\x5d\x8e\xc7\xa3\xd7\x85\x15\x4d\x15\x55\x98\x1a\x01\x0a\x1a\x4f\x6d\x96\xa7\x5d\x48\x0c\xac\x49\xf5\x82\x56\x54\x40\x37\x2d\x72\xf7\x3a\x11\x2c\xce\x0e\x39\xa2\x2b\x24\x62\x23\x68\xbb\x28\x5b\x3f\xd2\x4e\x6c\x5e\x52\x27\x03\x88\x97\x1f\x05\xd9\x9f\x23\xb9\x19\xa7\xbc\x30\x2c\x36\xdb\xcf\xf3\x82\x26\xc7\x85\x19\x12\x1d\x8e\x73\xe9\xbc\x7b\xbf\x7f\xcf\x77\x08\x0a\xc6\x97\x41\x76\x7c\x52\x40\xd6\x7c\x2c\x55\x49\x59\xf6\x73\x82\x2b\x4a\x88\x47\x86\x22\x52\x9d\xda\x45\x74\x6a\x1e\x1c\xbf\xa0\x4f\xc2\x9a\x1f\xd8\xc3\x72\xf8\x78\xb7\x7f\xbe\x64\x6b\x60\xaf\xb8\xbb\x89\x3c\x5b\x3f\xbf\xfe\x5d\xfd\x4f\x99\xd0\x04\x2c\x36\xe7\x5b\x21\x51\xec\xa9\xd3\xe2\x38\xb6\x27\x97\x45\x3c\x7b\xbb\x48\xea\x96\x2e\x3f\x48\x87\x02\x01\xa4\x43\x56\xb8\x7c\x84\x9d\x36\x4b\x23\x34\x05\xa2\xca\x17\xe4\x6a\x16\xb1\xbb\x17\xe2\x45\xcd\x91\x4a\x25\x32\xde\xc0\xf2\x07\xf1\x5a\x52\x83\x6e\x75\x10\x18\x13\xcd\x70\x27\x18\x2e\x6f\x8b\x02\xd6\x46\x54\x6b\xe6\xff\x08\x65\x3d\x7a\x57\x10\xc6\xe7\xe5\x65\xeb\x5e\x2c\xd4\x1b\xeb\xd4\x3e\x39\x4c\x5d\x8e\xd9\xdb\xc2\x59\x3b\x1d\x84\x45\xc0\xb9\x63\x13\x04\x5c\x26\xd3\x5f\x2c\x00\x85\xd7\x9c\x89\xab\x98\x9b\xfa\xb0\xa8\x6c\xe7\x84\x76\x58\xe5\x4b\x62\x16\x5c\xfc\x35\xcc\x0e\x6f\x35\x7c\xfe\xbe\x60\xe5\xfb\x7c\x03\x7a\xc9\x91\xf9\xfd\x40\x9e\xb3\x8b\x85\x06\xdc\x16\x77\xa5\x8a\x23\xf7\xfb\x43\x75\x68\x8e\x7e\x9d\x14\x6c\x0b\x6e\x61\xc6\x92\xe8\x38\x5a\x4d\xc4\x56\x2c\xff\x30\x87\x1c\x33\x87\xb5\xcd\x05\x61\xe0\xa4\x05\xf9\x55\x8d\x53\xdf\x83\xe3\x0f\xb0\x10\xd3\x39\xeb\x01\xa9\xbb\x5c\xaa\x3c\x53\x8e\xd3\xb6\x60\x31\xc3\xf3\x29\x8b\x1c\xbc\x48\x86\xd0\x29\x45\x41\xed\xda\xbc\x35\x08\x0c\xcc\x73\x0f\x72\xd6\x4c\xae\xc8\xd3\x6b\x7b\x3b\x05\x71\x7a\x79\x3a\x5d\x85\xf3\xfd\xae\x73\x53\x10\xf9\x75\xe9\xa9\x72\x1c\x11\xca\xfb\x63\xa9\x7c\x5e\xbd\x2f\xd6\x7f\xc5\xd1\x37\x64\x54\x35\x33\xc1\x8d\xbc\xbd\x48\x3f\x9e\x3e\xa6\x84\xe9\x8a\x87\x79\x30\x4a\x44\x99\xf7\x4a\x80\xed\x68\x3e\x2b\x81\x2f\x99\x74\xe0\xe1\x36\x3d\x1a\x7a\x43\x4a\x89\x8b\x8d\x18\x98\x83\xff\x6d\x3e\xd7\xff\xfe\xef\x41\x48\xcb\x3a\x48\xa9\x6c\xe4\xd6\xb0\xa7\x4e\x9e\x0a\x20\xf1\x65\x63\x9b\x92\x65\x82\x5f\x63\x24\xc1\x3e\x55\x0f\xb8\xfd\x42\xea\xdd\x3c\x4a\x13\xca\x3e\x67\xd4\x7a\xcd\x88\x35\x61\x2e\xf0\x20\x85\x4d\x1c\x4d\x07\xe9\x1e\x58\xa8\x33\xa7\xbc\xc0\x29\x2d\x88\x74\xf1\x20\xcb\x0b\x41\xcb\xbd\x59\x71\x92\x2d\xba\x6c\x36\x0f\x6d\x76\x26\x33\x45\x4b\x05\xa5\x49\x64\x6c\x5c\x0a\x4d\x99\x56\xb1\x3b\x2c\x06\x75\xe7\x6e\xc9\x84\x87\x99\xc7\x90\x7b\xe6\x67\xbe\x7f\xfc\xbb\x12\x5b\x42\x85\xf2\xc9\x18\xef\x84\x6c\xc9\xcd\x43\x5f\x38\x74\x47\x2a\xcd\xe9\xa7\x9d\x19\x37\x13\x1a\x46\x51\x7f\xfd\x3c\x0e\xc6\x47\x5d\x70\xc7\x97\x3d\xf8\x3d\x1c\x7e\x0f\xac\x37\x80\xdf\x9c\xdd\x1e\xa1\x50\xbe\x4f\x25\x11\xf0\x9f\xa2\xb7\x87\xd4\xac\xcf\x5d\xd3\xb2\x31\x85\x71\x36\xff\x2e\x52\xdb\x82\x4f\xa0\x52\xee\x9b\x05\x26\xb5\x40\x1e\x55\x6a\x1c\x04\x8c\x74\x36\x1c\xa5\xb1\xe6\xfe\xc1\x20\x34\x11\xb8\x8e\xee\x56\x68\x68\x6f\x5b\x98\x89\x2c\x01\x28\x80\xf1\x78\x57\x1a\xf1\xe3\xe5\x22\x78\x86\x66\x0c\x7a\x6d\xee\xf2\x9f\x52\xbb\x80\x02\x9d\xfb\x66\xcc\xe9\xe2\x22\x38\xa3\x84\x99\x3c\x56\x83\x5f\x38\x48\xc1\x44\x11\x0e\xfb\xd5\x7d\x7f\x7f\xff\xbe\x63\x8d\x40\xcc\xc6\x8c\xf9\xe4\x48\xc6\xca\xb9\x7d\xb7\xd2\x0d\x47\xc6\xea\x83\x16\x5c\x29\xf3\x4e\x41\x9f\x99\x6f\x35\x4c\x84\xa5\x10\xca\xfa\xd8\xac\x0a\x8e\x6b\x54\xf7\x5c\xde\x27\x8d\x18\x10\x8e\x85\x33\x3e\x0f\x3a\x9f\x15\x0c\x9c\x39\x39\x89\xeb\xcb\x69\x08\xc7\x37\x24\xcb\x5f\x03\x48\x6f\xb3\x3f\x73\x03\xda\x6a\xaf\x29\x55\x7f\x15\x74\x10\x78\xd3\xdf\x2d\xac\x3a\x4a\xbd\x20\x9a\xe4\x89\x17\x87\xe0\x68\x3a\x8b\xbb\x5f\x78\xff\x2c\xaa\xab\x33\x33\x20\x88\x94\x4c\x9d\x9a\xa9\xa6\xb7\xbf\x2c\x84\x81\x84\x53\xf3\x4b\xc3\xb6\x09\x43\x9c\xed\x06\x61\x42\x73\x7e\x35\x1c\x8f\x25\x3f\x29\xbc\x27\x48\x3c\x88\xfa\x92\x64\xbe\xf9\x77\xa5\xd1\x37\x45\x44\x45\x58\xd8\x17\xfa\x35\x33\x1a\x0f\x11\x1c\xc4\x57\xa7\x8a\x66\xe8\xf9\x6b\xfe\x1f\x2c\xb3\xfa\x8f\xff\x0f\xef\xfe\xd5\x91\xac\xa9\x99\xa8\x52\x13\xba\xd1\x65\x35\xe7\x7f\x7c\xfb\x77\xc5\xa4\xf6\xa5\xf8\x72\xca\xa3\xcf\x41\xcc\x00\xf9\x92\x49\x0b\x36\x9b\x07\x93\x5a\xc1\x33\xfd\xbb\x62\xe3\x5e\xaf\x10\xa5\x76\xa8\x51\x45\xd0\xa6\x2f\xb2\x49\x2a\x1c\xa4\x11\x79\x25\x98\x96\x3e\x6c\x2c\xd9\x15\xbf\x3c\x15\x36\xe0\x57\xc1\x12\x12\xe2\xcd\x1a\x03\xdd\x65\x52\x65\x01\x02\xe3\x77\xfa\x56\x1a\x31\xb0\xc3\x3c\x3b\xfe\xb6\x15\x32\x9f\xbb\x22\xca\xa8\x64\xa8\x89\x2a\x15\x68\x1f\x1e\xa1\xe6\x2e\x8b\xb1\x9c\xe9\xde\xbc\xf0\x74\xfc\x0b\x4f\xc6\x70\xf6\x37\x24\xd3\xb4\x38\x6c\x22\x25\x38\x32\xba\x9a\xf9\xe3\xce\x5b\xab\xbd\xb9\x16\x7f\x81\xbe\xab\x45\xc4\x19\x90\x18\xcf\x25\x73\x52\xdc\x25\x9f\xc1\x3f\x09\x1e\x11\xd6\xfb\xe4\xc4\x1e\xca\x3c\xb0\x9b\x09\x04\x72\xdb\xcf\xc8\xa1\x21\xc5\xde\xbc\x10\xd3\x69\x54\x04\x80\x86\x7e\xfa\x73\xa5\xf1\x05\x05\x5d\xe6\x38\xd9\x39\x5b\x43\x5b\x08\xaa\x44\x3e\x6b\x4e\x41\xb5\x87\x41\xd8\x34\x11\xfc\x9a\x99\x0f\xa4\xf9\x33\xb7\xb9\xe7\xca\xbc\xba\xc5\x1a\x4f\x9d\x2d\xa2\x12\x1d\x63\x53\x38\xd9\x6a\xd1\xa2\x7b\xe2\x83\xca\xb6\xb5\x6b\x51\x9f\x91\x64\x9e\xdc\xad\xaf\xdc\x39\x17\xd8\x29\xb0\x39\x9e\xcf\xdb\x5f\x4a\xe2\xc1\xf3\x22\xac\xaa\x2c\x8f\x75\x28\x3c\x1f\x9b\x88\x6e\xbc\xc6\x7e\x26\xf8\x54\x33\xce\xec\xb6\xa9\xba\xdb\x2e\xf6\xda\x06\x54\x57\xb9\x39\x1a\x7e\xd4\x93\x67\xa7\x8f\x0f\x76\xf3\x21\x55\x26\x22\x42\xdb\x34\xa6\x18\xbc\x86\x26\xd6\xbd\xe4\x03\x5e\xd9\x6a\xbc\xee\x13\x46\x4b\x56\x64\x69\xf0\x67\xe2\x54\x26\x2e\xfa\x3c\x08\x6c\x84\xb0\x57\x79\x22\xba\xca\xbb\xd3\x2e\x05\xd5\xd4\xf4\xca\x25\x2e\xa3\xc6\x48\x78\x4e\xf1\xc4\xac\x13\x0a\xc7\x28\x4f\x1c\x68\x33\x0e\x1f\xc7\xcf\x77\x82\x53\x93\x1d\x51\x98\x4b\x8e\x59\x60\xa6\xb9\x8c\x19\x0b\x02\xff\x76\x9c\x7b\xa3\x92\x85\x0a\x52\xb2\x63\x60\x40\x3c\xe8\x14\xab\xc9\x23\x2f\xb8\x84\xe5\xd8\x1e\x9b\xf7\x99\x75\xe9\x2b\x47\x7f\x99\x3e\x26\xee\xed\x71\x37\xb5\x88\xb2\x5f\xa1\x1a\x74\x6a\xc4\xbc\x2f\x8c\xbb\xd6\x5e\x69\x1d\x51\xf0\xa3\x60\x17\x8e\xf5\xbb\xbf\xcb\xe1\x35\xb7\xfa\x96\x21\x7f\xec\x09\xec\xf3\x99\xcb\x89\x71\xc1\x42\x98\x74\xaa\x95\x98\x5b\xb2\x0b\xb3\x6a\xde\xc5\x4e\x2e\xc9\x49\x8c\x67\x67\x01\x51\x0e\x47\xc8\x6d\xed\x03\x53\xe1\xc1\x79\x6e\x13\x9e\xc3\x7a\x5f\xbe\x45\x49\x2b\x0a\x7a\x8d\x5c\xa5\x57\x30\x0c\x62\xa5\xea\x4a\xab\x60\xcb\xf2\xad\xf7\xac\xd3\x71\x65\x5d\x77\x2e\x14\xc1\x4b\x10\x41\x47\x5d\x66\x23\x79\x58\x44\x4f\xfc\x51\xa8\x86\xa3\x18\x0f\x2f\x79\x7a\x30\x7f\x9f\x34\x99\xea\xf8\xe3\xdf\xf6\xe0\xfe\x90\x0e\xf9\x38\x7e\x79\x34\xed\x8f\xde\xfc\x1b\x52\xe4\x3d\x6e\x03\x79\x34\xa6\x6e\x66\x2b\xe4\x10\x9e\x61\x51\x30\xba\x48\x95\x43\xbf\xc1\xd5\x2f\xa3\x52\x5c\x75\xab\xcf\xdb\xeb\xb2\x7c\xe0\x11\xa0\x2b\x50\x8d\x5b\x69\x9a\xbf\xba\x7e\x47\x78\xfc\x89\x4d\x71\x9d\x53\x16\xb3\x0d\xac\x2d\x95\x29\xd3\x60\x27\x06\x02\x9a\x74\x7a\xd0\x09\x55\xd3\xb6\x0f\xcc\x62\x2e\x60\x63\x31\xad\x19\x70\x7d\xaa\xeb\x4e\x4a\xc7\x5c\xe9\xc0\x99\x78\xb0\xf8\x7f\x31\x9b\x3d\x26\x42\x55\x91\x36\xf1\x28\x29\x15\xe2\x6a\x27\x06\x9a\x84\x7e\x10\x19\xcc\xd5\xaf\x8d\xaf\xa0\xfe\xaf\xe0\x7b\xfa\x1a\x3e\xef\x4d\x99\x68\x37\x0b\x2d\x2f\x24\x04\x4a\xe2\x15\x0b\xe1\x1c\x03\x66\x02\x45\x57\x57\xcb\x3a\x96\xc1\x21\x8b\x2a\x36\x66\x4d\x61\xf9\x9c\xc3\x38\x30\xb8\x09\xbb\xad\x8a\x9f\xef\xcf\xcf\xba\x13\x65\x4a\xf3\xb7\x8a\xde\x5b\xc5\x79\x8b\x9e\x7f\x27\x09\xb4\xc6\x4e\xb6\xd3\x1a\x5a\xb6\xcb\xcf\xb2\xc9\x1c\x31\xd6\x01\x7e\xb9\xbd\x06\x81\x9b\xdb\x11\xca\x2c\x54\xbb\x40\xc2\x4e\x33\xd3\xd1\x4d\xde\x1b\x72\x1b\x38\x12\x57\x74\xf5\xf9\x35\xb6\xa3\x07\xbe\x1b\xe1\x65\x9f\x9e\x29\x30\x1e\xb5\x95\x84\x9b\x57\x06\x8b\x80\x58\xf9\x51\x91\x59\x05\xa7\xcd\xa9\x29\xa9\x90\xc0\x8b\x58\x01\xeb\x34\xc8\xaf\x0c\x65\x78\xf5\x2c\x5c\x1b\x26\xb4\xf8\x0b\x1d\xea\x04\x67\x36\x3e\xd5\x6e\x0e\xcc\x17\x6d\xc8\x13\x4e\x3c\x08\x3e\x8e\xd4\x37\xa5\x69\x6f\x59\x5e\xad\xcc\xad\x5b\x1f\x64\x32\x4b\x73\xfb\x5f\x8f\xcb\x56\x48\x4d\x83\x29\xb9\x07\xf9\x65\x5a\x67\x8c\x42\x55\xd0\x19\x39\x14\xb6\x95\x41\xca\x16\xe3\xaa\xd0\xef\xf7\xcc\x55\x47\xef\x2c\x5e\x71\x71\xa8\x36\xba\x63\x92\xf8\x6f\x3c\xd7\x5c\xf8\x20\x87\x91\x3d\x09\x9e\xdf\xa3\xbb\x83\x91\xa4\x38\x4f\x51\xe7\x29\xe6\xdb\x8e\xa2\x9d\xb3\x73\xd3\x77\x7b\xbc\xba\x42\x3d\x42\xc6\x21\x71\x20\xaf\x1f\xbd\xaa\x38\x1d\x28\x16\x15\x4a\xa7\xc7\xf3\x41\x6c\xa0\x8f\x39\x66\xb2\x3f\x09\x3f\xe0\xa3\xd8\xd9\x79\xe6\x33\x18\x09\xca\xb6\x37\xf8\xfc\x91\x46\x2b\x83\x51\xff\x7a\x24\xeb\x02\xbb\x31\x85\xaf\xc6\x2c\xaf\x8c\xd1\xf9\xf0\x66\x6d\x32\x8f\x9e\x5e\xcc\x31\xad\x73\x9f\x8c\x85\x9f\x53\x52\xe8\x0e\x89\xc0\x0c\x1e\xa8\x92\x66\xd3\x4e\xee\x63\x64\xe5\xaa\x8d\xf0\xad\xcd\x68\xf1\xff\xff\x82\x41\x10\x3a\xa0\xfa\x1e\xc6\x0e\x82\x8a\x14\x2b\x4e\x1d\xa7\x72\xb1\x38\x1d\x5e\x93\xe5\x50\x85\x54\x79\xe7\xa1\x55\x72\x18\xb7\x88\xa8\x38\x83\x88\x3c\x0b\xb3\xb9\x85\x38\x72\xc7\x42\x43\xed\x59\x53\x62\x4a\x9e\xa9\x7b\xc2\xa6\x3e\xfe\x36\x00\x41\x11\x83\x2b\x0b\x62\x18\xc7\xb8\x14\x17\xe8\xde\xed\x65\x6c\x4d\x7c\x45\x24\x59\x66\xfc\x73\x1a\x44\xa5\x49\x35\x75\x99\x5d\xcf\xfa\x10\x5c\x60\x5e\x2a\xc0\x5d\xd4\xa3\x0f\xba\x71\xaa\xce\x40\x0d\x78\xce\x9f\x44\xb8\x1c\x7b\x8c\xc5\xf7\xc9\xc1\xf5\x50\x40\xfc\xaf\xe1\x02\xfb\x82\x5e\x8f\x1d\x83\xbb\xb3\x38\x53\x42\x1b\x46\x0e\x12\x06\x3d\xf4\x58\xba\xa7\x46\x02\x34\xc5\x38\x4f\x7a\x60\x78\x2e\x69\x66\x1d\x80\x49\x6c\x7f\xea\xf8\xeb\xdf\x94\x47\x56\x22\x6f\x7f\xb9\x88\x75\x13\xfb\xbc\x13\xc8\xcd\x3c\xdc\x8b\xf0\xa9\xa2\x95\x9c\x77\x4e\xc1\xd2\x60\xc8\xd9\x01\x45\x43\xe1\x12\xff\x60\x19\xcb\xdb\xe3\xee\x84\x05\xd5\xb1\xa1\xcb\xf1\x27\x63\xee\xe2\x31\xac\xef\x82\xba\x80\xcb\x69\x5f\x0b\x5f\x80\x85\xad\xaa\x15\x8f\xc5\xec\x30\x06\xac\xd0\xc2\xc5\x33\x63\x2e\x1a\xea\xb9\xe3\x43\xa3\xe4\xdd\xf9\xd5\x08\x28\x9f\x0b\x0a\x49\x70\x0b\x1e\xff\xd2\x96\xd0\x78\x0e\x4e\xa4\xf2\x1a\x64\x0a\x7d\x4a\x11\x10\x4d\xf2\xfd\x39\xca\x0f\x7d\xd8\x47\xec\x3b\xc7\x49\x82\x13\x11\xd9\x25\x24\x75\x87\x5d\xb2\xe8\xf3\xbc\xc8\x5b\x6a\xeb\x46\x7e\xa9\x70\xe2\xd4\x7d\x4f\x0a\xe0\x1d\x40\xeb\x4c\x2f\x6a\xa0\x7b\xe7\x2e\x8e\x86\x13\x7b\x79\x56\x0c\xa3\xd7\xd8\x25\xc2\x1f\x39\xdc\x17\xdb\x9e\x39\x8f\x96\xbc\xd9\x30\x92\x9c\x1f\x7e\xd0\xbe\x0b\x28\x0c\x13\x81\x8d\x37\xb7\x20\x32\xb4\xf7\xcf\x90\xa5\x34\xdd\xff\xc4\x4d\x0e\xc3\x8d\x9b\x76\x4d\x6b\xc9\x40\x3c\xd4\x1e\x73\xf8\xfd\x79\xe7\x59\xda\xb7\x04\x5c\xe3\xd3\x47\x91\x81\x8a\xc2\x3b\xe7\x27\xe1\xc8\x5e\x14\xf4\x43\xa1\xe9\x58\xc7\x50\x46\xd4\x83\x7d\x9b\xc1\x85\x99\x4e\xfa\xad\x7e\x95\xa0\x45\xc7\xe4\x24\xaa\xc7\x73\xa6\x06\x37\xaf\x0c\xc9\x9d\x79\x4b\x80\xb9\x01\xa6\x25\x9e\x1f\x87\x2e\x10\x08\xbc\x9b\xed\x62\x1b\x6f\x30\x67\x89\x05\xd3\x8e\x10\xa2\x13\x86\x4c\x12\x43\x84\xa9\xba\x7c\x38\x65\xcf\x9d\x57\xe2\x2f\xf7\x9f\x6f\x47\x6c\x22\x10\xf5\xcc\x66\xd5\xb4\x12\x91\x58\x56\x1f\x4e\x85\xc6\x45\x73\xa0\xc8\x53\x94\xef\x9a\x17\xd0\xc4\x52\x62\xe4\x7c\x8f\xf5\xa5\xc5\xd8\x6c\x22\x94\xab\x2d\xe0\x17\xb5\x12\x9a\xaa\xcd\x58\xc8\x16\x80\x03\xd6\xaf\xeb\x7f\xf9\x17\xd6\xd3\x0c\x39\xd1\x13\x16\x30\xa9\xe4\x92\x97\x91\xb5\x54\xf6\xc0\x7f\x1c\x09\xcd\x32\xc5\x39\xd1\x45\xf3\xfa\xa2\xdc\x61\x4d\x06\x05\xcf\x4f\xe4\x71\x05\x49\x8b\x1d\xfb\x2c\x39\x7f\xf1\x85\x09\x1a\x71\x78\x13\x62\xa9\xb1\xf0\x13\x52\xfc\xdc\x38\xed\xff\x8e\x9e\x5a\x9f\xc0\x86\x68\xab\x0f\x17\xea\x2f\xb3\xe1\xf0\x5d\xc3\x6b\xee\x99\x33\x59\x57\xff\xa3\x79\xfd\xac\xbe\x62\x86\x39\xa5\xbb\x25\x74\xce\x49\x6b\xc8\x93\x7a\x13\x9d\xd6\xfc\xd0\x28\x4c\x30\xc7\x20\xff\xf4\xe2\x31\x98\x32\x55\x42\xed\x04\x04\x5a\x77\xf4\x19\xfb\x64\x0f\x91\xb5\x62\xde\x9d\x5f\x6a\x6c\x05\x06\x70\xe7\x99\x65\xa3\xcb\xe5\xc5\x67\xfe\xf7\x08\x4d\xbc\x39\x31\xf3\x88\xe0\xde\x9c\x40\xb8\xd6\x1f\x5f\x60\xbd\xd4\x1c\x1c\xb8\xc7\xaf\x59\x06\x22\x2a\x86\xa2\x08\x00\x60\xac\x7b\xfd\x64\x4c\x03\x1f\x11\x4b\x7f\xc9\xc3\x05\xcf\x5c\x4a\x03\x06\xa8\x33\x28\xf3\xfb\x36\x25\xd2\xbd\xaf\x9b\x36\xe0\xec\x72\xa5\x2d\x38\x02\xfe\xe2\x35\x38\x92\x64\x61\x18\x9c\xef\xca\x47\xbc\x16\xa0\x37\x3d\x5f\x0d\x49\x87\x53\xb4\x7d\x3d\x04\xa8\x19\x7d\xee\x2c\x95\xc9\x90\xe1\x10\xbb\x5d\x56\x77\x86\x5f\xba\xd0\x29\x8d\x1d\x84\x56\x23\x6f\x26\x8d\x3f\x1a\x1d\x81\x36\x31\x67\x80\xd3\x4a\x91\xfd\x25\x1f\xf5\xc0\x76\xfa\x47\x78\xb0\x0f\x7e\x56\xe4\xef\x4c\x01\x14\xcf\x42\x7f\x25\xce\x4a\x4c\xad\x42\x5d\x30\x9d\x4e\x61\xd2\x48\x5d\x71\x8f\x0d\x25\xa9\x75\xce\x6b\x9c\x7b\xfa\x30\xde\x33\x4a\x74\x77\xb4\xd3\xce\xd2\x4b\x78\x66\x61\x62\x54\xf0\x57\x33\x7b\xf7\x98\x57\x7a\x2d\x65\x18\x74\x18\x0b\x2f\xba\x60\xd8\xa6\xe5\xa9\x5f\xed\x99\x51\xe0\xf8\x14\x59\x5d\xec\x44\xf9\xde\x9a\xbe\xe5\xb4\xf0\xa4\x79\x35\x1c\x42\xa4\xce\x1b\xa7\xc4\x77\x11\x7c\x80\x6e\xf3\x55\x7a\xcc\x4b\xeb\x7b\xe2\xdd\xd7\x50\x9b\xf3\x52\x88\xe5\xec\xd1\xdc\x73\x04\x9f\x53\xfc\xf8\x2b\xf6\xa1\xf4\x72\x37\x65\xc0\x9c\xb9\x57\x0a\xf6\x90\x5c\x85\xde\xda\x91\x03\x9e\x5e\xed\xc7\x28\x0e\x51\x61\x3f\x88\xf7\x34\x2d\x63\xc6\x1b\xad\x24\xde\x96\xc7\x7e\x78\x9f\x0a\x59\x83\xc0\x93\x63\x93\xa1\xf6\x63\xf1\xd4\xd6\x79\x28\xfc\x2b\x28\x15\xa9\x78\x82\xd9\x6f\x34\xfe\xa5\xe0\x34\xfa\x4b\xd2\x0b\x01\x8d\x67\x9c\x9f\x1d\x9f\xec\xad\x9b\xea\x90\x0c\xa4\x1c\x40\xb3\xf7\x4a\x79\xb4\x10\x07\x08\xa4\xa5\xb0\xbf\xa0\xaa\xc4\xbe\x6f\x55\x08\xdb\x32\x17\xb2\x5d\xd2\x49\x24\xa4\x3e\x52\x33\x70\x54\xe9\x6b\xd4\xe0\x1f\x19\x98\xf7\xc3\x7d\xbc\xbf\x7d\x13\xc5\xe3\x6c\x4e\xc0\x3a\xc3\x02\x72\x35\x9d\xb7\x89\xab\xb9\x06\xc4\x73\x6d\x68\xd9\x1a\x3e\x3e\x54\xf3\xaf\xf3\xed\x86\x1b\xf0\x0a\x1b\x07\xa5\xb8\x43\xa9\x8a\x94\x07\x85\x1e\x00\x54\x0c\x15\x39\x68\x60\x18\x0a\x91\x45\x7a\x61\x85\xd5\x49\x05\x6d\xcc\x71\x26\x4c\xcd\x94\x0d\x5d\xcd\xe5\x42\x32\xd4\x74\x00\x7d\x59\xaf\x98\x0f\x8a\x13\x41\x32\x97\x4a\xa3\xd3\x96\x23\x07\xdb\x72\x48\x20\x3d\x0c\x0d\xce\x03\xe5\x17\x1e\xc4\x93\x39\x60\x20\xfe\xd4\x70\x44\x17\xb7\x25\x13\x99\x89\xde\x42\x9c\x09\xfa\x9d\xd3\xc4\x97\xd7\x72\xb7\x00\x0f\xa1\x2b\x58\x02\x05\x80\x4c\xf9\x08\x6c\x65\x0c\x33\xcb\x80\x7d\x1f\xf1\xa2\xd7\x76\x41\xa6\xca\x3b\x84\x54\xfe\x34\x13\xaf\x46\xac\xb4\x6c\x35\xeb\xe0\x19\x23\x56\xa9\xd5\x1b\xb5\x32\xfe\x15\x44\x09\x54\x2d\x18\xaf\xbc\x2a\xa3\xf4\xd7\xbf\x16\x9a\xfd\x3f\xfc\x06\x90\x7f\x53\x43\x10\x90\x10\x39\xac\x65\x8a\xa3\xed\x3b\x8a\x42\xe5\x48\x4c\xf1\xbe\x11\x88\x92\xeb\xf6\x41\x98\x6b\xdc\x97\x78\x21\xc3\xec\x38\x92\xbd\xab\x39\xff\xde\xee\x9f\xc8\x0e\xd8\x76\x2a\x3c\x40\x8f\x3c\x60\xf2\xa6\x9d\xd7\x1b\x22\x80\x1b\xc3\x5d\xc7\x31\x8d\x0a\x22\xa2\x26\x73\x4f\x0a\x85\x93\x26\xad\x01\x28\xba\x11\x6a\xf8\x94\x0d\x7d\x3f\x1c\x58\xbd\xf0\x80\x4c\x82\x84\xee\x1e\x66\x9f\x26\x75\x58\xad\x79\x1f\x34\x43\xc2\x99\x28\x60\x96\x2f\x8b\xda\x6d\xe0\x1a\xb3\x24\x50\x98\x1c\x40\xbf\x06\xbb\xa3\xb8\x62\x07\xcd\xe1\x29\x29\x1f\x1a\x68\x0e\xf1\x0e\xc7\x4a\x59\x02\x6a\x13\xf3\xdb\xf7\xff\x54\x03\xfc\x51\xed\xc8\x30\x65\x02\xbd\x7c\xc3\x99\x40\x3d\x25\xef\x0f\x02\x34\x17\xfe\xf0\x59\x55\xc6\x7d\xe0\xb4\xc9\xab\x85\x3e\x8f\x85\xf9\xc5\x7d\xd6\x54\xdc\x51\x96\xcb\x87\x1a\x99\x2e\x24\x88\x60\x57\xf3\x89\x99\x95\xa0\x6e\x01\x2f\xc0\x30\x26\x9c\xb2\x73\xda\xb2\x5b\x2c\xd7\x1a\x22\x99\x0b\x28\xc0\x3b\x16\x10\x30\x3b\x21\xcf\x8d\xcc\x76\x7d\x34\x95\xca\xa4\x6a\xde\xc9\xc5\x6e\x31\xdc\xa4\x12\x6d\xfc\xab\x49\x38\xfe\x89\x09\x2d\x67\x67\xe4\x2a\x61\x28\x15\x2e\xc1\x3b\x45\x89\x73\xf6\x66\x95\x54\x3a\x46\xc8\xc7\xfd\x41\x76\x6f\x80\x5c\x5f\xec\xe7\x68\x07\x87\x78\x14\xed\xb1\x2a\x6b\x87\xfc\x7f\xd1\xa5\x7a\x34\xec\x1d\xab\x2d\x83\xa7\x0d\x71\x78\xd7\x9a\xa0\xc6\xf3\x4e\xf0\xe1\x18\x5b\x62\xf0\x1c\x97\xc2\xb1\x20\x62\xa4\x83\xfb\x81\x38\x5f\x24\x76\x70\x44\xde\x90\x73\xf2\xf4\x2b\x7b\x4e\x46\xc4\x5d\xdf\x9f\xfe\x6d\x0f\x3e\x7f\xfc\x1b\xc7\x1b\x5e\x4c\x45\x17\x97\x30\xd7\x36\xe5\x1e\x9e\xf4\x3e\x3b\x2b\x0a\xcc\xaf\x1d\x90\x8a\x7e\x80\x8a\xc9\x9a\xbd\xcc\xa8\xc4\xd3\xc7\x3c\x70\xe9\x72\x01\x58\xc2\x01\x44\x74\xb3\x1e\x1b\x13\x21\xf7\x46\xf5\xda\xb4\x68\x0a\x9a\xad\x39\xb1\xe7\x5e\x82\xd6\x9d\xbc\x26\x72\x1d\x5e\xa4\x79\xb5\xf7\x3b\x09\xb1\x50\xcb\xe4\x7a\xbb\x1c\x51\xa9\xc8\x7d\x02\x3e\x85\x91\xc2\x50\x8f\x1e\x54\x16\xa8\x9c\x3a\xac\xcc\xc1\x9d\x34\xab\xa4\x0c\x6d\x9a\x17\x36\xf5\xdb\xaa\x6f\x8f\x99\x55\x42\x1f\x89\x43\x18\x92\xc5\x07\x1c\xba\x32\xb8\xb5\x3a\xe8\x01\x89\x03\x96\x02\xcb\x3d\x90\x9f\xfc\x70\x7c\x11\xb1\x9a\x73\x51\xf0\x97\x66\x3f\x40\x7f\xe4\xee\xb9\x52\xc8\xa7\xcc\xb6\xe9\x36\xb7\xec\xa5\x54\x7b\xba\x27\xe2\x46\x32\x91\x82\xca\x33\xeb\x34\x0b\xa2\x5e\xca\xc1\xce\x2c\x47\x86\x2c\xa9\x40\xb0\x3f\xb0\xb2\x30\x94\xa1\xcd\x73\x3c\x81\xc8\x28\xb6\xbb\x90\x08\x85\x55\xdb\x8c\x7d\xaf\x46\x7c\x73\x4e\x38\xa2\x8d\x93\x12\x28\xf9\x53\x49\x90\xb1\x3f\xbf\x26\x82\x7f\x6a\xc5\xd1\x7a\x7d\xa1\xd2\xae\xa6\x0e\x90\x23\x99\x30\x88\x8b\xf2\x8f\xb2\xc0\xcf\xb7\xfd\xe3\xff\xb3\x5c\x04\xbf\x73\xdd\x57\xfa\x84\x1c\x9f\xc4\xfe\x7e\x2f\xf5\x95\x56\x21\xf6\x04\xf8\x70\x7c\xc6\x19\xe6\xab\x98\xb5\xb9\xe8\x48\x68\x92\xa1\x3f\x0c\xfa\x29\x85\xbf\xe5\x22\x24\xe7\x28\x5f\xd0\x55\xa0\x62\x6c\xf9\xb0\x23\x26\x0e\x2b\x9f\xaa\x0c\x60\x0e\x6d\x19\x56\x89\x14\x17\x1f\x10\x00\xe0\xc3\xbe\x59\xc5\xc0\x4c\xed\x8a\x58\x5a\x69\xa3\xef\x9f\x5f\x92\x73\xfc\x8d\x7b\x6a\x2c\xac\x0c\xfc\x00\x33\xb8\x9d\x46\x26\xaf\x1a\x26\xf3\x97\x6a\xe4\xa6\x5a\xa8\x4d\xb1\xca\x6a\x60\xfe\x63\x2c\x45\x25\x6c\xc3\x6d\xef\xfb\x0b\x03\xf0\xf7\xc8\xdc\x15\xfa\x99\x0e\x0d\x92\xc1\xcb\x52\x97\x6b\x43\xd1\x9c\xcc\xfc\xe3\x0a\xbc\xc6\x83\x49\xec\x41\x49\x21\x87\x63\x31\xa1\xe5\x09\x1c\xf1\x69\x4a\x03\x79\xe8\x83\xfa\xe6\x25\xdd\x84\x5a\x9b\xfe\x66\xa4\xcd\xf4\x14\x50\x3b\xf9\x40\x0c\xff\x0d\x96\x1b\xcf\x87\x31\x21\x6e\x27\xf3\xc5\xc6\xb7\xb4\xa8\xff\x39\xa5\x71\xc5\xa2\x13\x69\xb7\xc0\xfe\x09\xe3\xee\x95\x6d\x01\xf7\x5f\xe8\x1b\xdd\x88\x6d\x63\x24\x01\x46\x28\xed\x6b\x2a\x2f\xf9\x1e\xd0\x42\x15\xf8\x7a\xd8\x6e\xb9\x70\x44\x4e\x6a\x08\x39\x85\x65\x64\xf0\xea\x24\x9a\x82\x8f\xf8\x40\xfb\x95\xde\xe1\xea\xc9\x11\xad\xff\x2a\x59\x38\xc2\x0f\x77\xe3\x75\x49\xa2\x22\xf4\x12\x4b\x05\x7f\xba\xc1\x79\xc5\xe0\xa5\xb5\x3c\xf8\x2b\xfd\xe3\xdc\xbc\x40\x00\x69\x75\x42\x37\x8a\xa3\x5a\x0b\x79\x4a\xe8\x70\xf8\x68\x81\x32\x5e\x80\x1c\x17\x35\x26\xc4\x32\xd9\xeb\x9c\xd7\x78\xea\xf2\x22\x9b\x61\xf0\xbd\x13\x50\xe0\xc9\x52\x55\x53\x00\xe4\x54\x5b\x80\x39\x79\x9c\x28\x65\xb1\x16\x8a\x44\x83\x27\x60\xe9\x53\xed\xec\x99\x07\x4d\xa0\x58\xda\x6a\x21\xfd\xc6\x70\xbf\x59\x34\x86\xe3\xa1\x49\x49\x8e\x95\x53\x5b\xe5\x32\xae\x5b\x69\x60\x34\x4f\x93\x93\x37\x2c\xc0\x23\x0a\xa3\x2e\x94\xee\x41\x3c\x59\xc1\x6b\xbd\x28\xea\x7c\xbe\x33\x00\xfe\x10\x01\x72\x99\xfa\xe4\xad\x97\x87\x9f\x77\x34\x3b\x0e\x8f\x32\xef\x98\x9d\x9c\x08\xa9\xbc\x6f\xa1\x30\x79\xfb\xa8\x73\x6c\x74\x66\x93\x58\xee\xcb\x8c\x84\xd0\xd4\x51\x33\x8b\xdb\xbf\x09\x74\x8f\x92\xec\xd2\x78\x4f\x8b\x50\xa7\xd0\xb1\xa1\x3b\xf3\x14\xfa\xc9\xe1\x64\xb8\xc5\xb5\x09\xb3\x37\xa1\xa0\x60\x27\x0b\x8b\x37\xed\x6e\x4c\x85\x62\xcd\xdd\x12\x02\x8d\xe6\x35\x0d\x62\x13\xf4\x1e\x57\x26\x8a\x27\x5f\x54\xc6\xb9\x20\xf2\x84\x00\xfc\xb9\xfb\xc5\xe7\x83\x29\x63\x77\xbe\xb9\xa4\xf4\x4a\xd7\xd2\x5d\xed\x98\x99\x64\xbe\x11\xb6\xc6\xb7\xb4\x26\x04\xcd\x49\x4a\x61\x80\x89\xf0\xac\x48\xcd\x92\xef\xac\xb3\xa1\x7b\xe0\xa0\xc1\xca\x8b\xcf\x02\xe6\x31\xe9\x3a\xec\x86\x29\xa5\x82\x47\x2a\x45\xf1\x7a\xf9\x7d\x7b\x5b\x08\x48\xaa\x4d\x3d\xd2\x4b\xf7\xc9\xc6\x37\x0e\xf9\x21\xbd\x1d\x8b\x8e\xe4\x28\x00\xa2\x42\x0b\x5c\xfa\x11\x9b\x51\x33\x99\x9c\x2d\xa3\x02\xeb\x4f\x27\x1b\xb3\xf9\x0c\x7a\x5a\x7c\x8b\xa3\x44\xfa\x1d\x42\xe0\x94\x00\x8b\x5a\x03\xb7\xb4\xd1\xdb\xb7\x0e\xa8\x40\x84\xba\xa8\x27\xb2\x2e\xce\x63\x1c\x7c\x0a\x37\xac\xe3\x25\x15\xee\x81\x5b\x7a\x0a\xca\xc8\xa9\x9c\xd8\x37\xcf\xd6\x2b\x0c\x33\x11\xef\x55\x25\x3a\x7e\xfe\xbc\xf9\x7f\x54\x4b\x65\x54\x8d\xf7\x02\x91\x6d\xd1\x2f\x6e\x56\xbe\x71\x09\x37\x56\x7d\x5e\x1d\x18\xb1\x26\xe1\x5f\xba\xcf\x4c\x10\x7b\x77\x09\xcf\x53\xff\x90\xbf\x43\x80\x93\x2f\xff\x46\xee\x1d\xc7\x82\xc8\xd4\x30\xd4\x15\x9a\xbe\x74\xfd\xba\x0a\xc1\x0c\x16\xff\x9b\xaf\x1f\xff\x86\x1e\x63\x63\xb3\xd9\x74\xb5\x69\x64\x44\x50\x07\x3d\x3e\x83\xb9\xd9\x8c\x39\xa9\xea\xd6\x18\x3a\x52\x1f\x65\xce\x24\x1c\x34\x56\xcc\x51\x63\x70\xa8\x6c\x3f\xcc\x83\x46\xf7\xc1\x35\xec\xf7\x9e\x07\x2a\x34\xa9\xd5\xd8\x1b\x93\x25\x10\x70\x3c\x50\x54\x83\x8d\xcb\xf9\xa9\x5e\x25\x07\x68\x36\x1c\x06\x07\xfc\x8c\x63\x74\xd4\x71\x37\x8a\x77\xf0\xc0\xbc\x06\x56\xd6\x8c\x17\xae\xa2\xb6\x33\x51\xef\x82\xa6\x7b\xbc\x06\x61\x74\x80\x26\x51\x68\xe6\xb1\xdd\x4a\x82\x72\xbc\xbd\xc3\x3b\xc5\x8c\xdf\x9a\x3b\x40\x37\xc4\xc7\x56\xac\x65\x34\x1b\x51\xe5\x6e\x0d\xe9\xe8\xfe\x87\x86\xef\x52\xe6\xe8\xed\x1b\xec\x15\xc6\x51\x95\xa1\x7a\xde\x7e\x48\xa0\xc9\xe0\xd8\x9a\x49\x55\xeb\x8b\x5c\x05\x81\x4d\x9d\x4c\x8c\xf8\xcd\xf3\xee\xd4\xd6\x38\x72\x49\x30\xe6\xd1\xd9\x90\xee\x74\x1f\x3c\x67\x88\xc4\xb6\x0f\x29\x45\xf9\x4f\x59\xf4\x8d\x07\xb2\x0e\x8b\xda\x72\xd5\xd4\xe9\x9a\xcb\x8c\x3c\xb0\x8e\x4e\xc6\x8b\x85\x72\xd0\x08\xbd\x89\x56\x31\x3f\xf3\xcc\x42\x65\x71\x8c\x9d\x1b\xb7\xb6\xc4\x66\x76\x05\xd0\x03\x5f\xbf\xc8\xce\x7f\x3b\x41\x05\x10\xfc\xb7\xe5\x98\xbe\x7b\x40\x0e\xdf\x3f\x8f\x1f\x44\x51\x48\xdb\xbb\xf6\xe4\x9f\x9d\x92\x32\xa4\xe6\x20\x6b\xe2\x29\x58\x7e\x57\x54\xd9\x9d\x1a\x16\x8e\xf0\xc9\xce\xe9\xc0\x62\xd2\xc9\x01\x06\xc8\x87\x05\xed\xc1\x49\x70\x99\x35\xf8\x25\xe2\x81\x50\x03\x66\x2c\x80\x04\xe7\x43\x4d\xb5\x6f\xd3\xb5\xac\x19\xc7\xc4\x38\x40\x46\xfd\x45\x0e\xfb\x67\x10\x18\x27\xe0\x61\xcd\x2f\xa0\x6b\xa0\x41\xea\xd5\xff\x14\x9a\x16\x62\x67\xa4\xb3\x4a\x70\x3b\x05\x6f\xf1\x40\x48\xf1\x9f\xbc\xd6\xb2\x13\x80\x53\xef\x6d\x98\xe3\x8b\xa0\x9f\x70\x2d\x53\x0f\x79\xc7\x09\x27\xbd\x1d\xb5\xb4\xa9\x95\xa2\x3f\x0a\xed\x5b\x6f\x2e\x44\xaf\x0c\x98\x1b\x6e\x3c\xeb\x50\xb1\xed\x32\x91\x38\x0d\xf5\x69\xfb\x41\x6b\xdd\x69\x2c\x93\x4b\x3a\x93\x35\xcf\xf0\x8f\x2c\x49\x98\xef\x0d\x17\xf7\xed\xed\x0d\x49\x4f\x0c\x4b\x81\xd0\xc6\x65\xa6\xd0\xac\x23\x3d\x69\x53\xae\xb7\x42\xd5\x9c\xcc\x33\x21\x57\x7b\xee\xe5\xa4\xd6\x59\x63\x03\x4c\xc8\xd8\x29\x83\x70\xde\x4f\xc5\xe1\x8d\x85\x0c\x0b\x59\x70\x6e\x30\xd2\x1d\xea\x33\xc8\x59\x9a\x9e\x41\x2e\xeb\x95\xeb\x82\x90\xcf\xd7\xfb\xf3\x1d\x85\xb9\x53\x20\x2b\x1d\x99\xae\x58\x23\xf4\x34\xe3\x11\xfb\x3e\x09\xdb\xcd\xa8\xb3\x5d\x5e\x2c\xd9\xb6\xdb\x34\xb9\xd9\xe2\x14\x46\x4b\x79\xe5\x87\xce\x4b\x7e\xd2\x32\x2c\xe9\x6a\x07\x94\x51\xe4\x28\x1e\x06\xc5\xf7\x15\xdc\xd6\xc8\x9a\xa2\x89\x86\x2a\x44\x16\x7c\x88\x5e\xaf\x49\x4d\x93\x48\x30\xa2\x16\x41\xfb\x33\x33\xcc\xc0\x89\xb4\x21\xd2\x6d\xab\x5f\xac\x1c\x35\xe8\x78\xfd\x56\xd9\xf4\x1e\xbb\xee\x35\xde\x69\x11\x15\x98\x7b\x61\x88\x2a\x9d\x06\x2e\x4f\x48\x18\xf8\xa8\x5f\xca\xd8\x33\xef\x4f\xf8\x29\xc0\x32\xfe\x62\x79\x31\xc6\x0c\x39\x7a\xf8\x31\x05\x05\x73\x87\xdd\xf6\xae\x65\x66\xe0\x23\x81\xee\x68\xf3\x67\xa2\x6a\x20\xff\xbb\xcf\x18\xea\x59\x1c\x01\x90\x0a\x92\xb9\x3c\x44\xab\x34\x38\x20\x76\xd1\x66\x2b\x3c\x61\xf3\x56\x55\x1c\x30\x6c\xea\x9c\xd4\xb3\xa9\x9a\xbe\xd7\xfd\x74\xce\xe3\xc4\xd4\x8d\xeb\x54\x26\xde\x92\x50\x7b\x7e\x68\xf3\x09\x14\x35\xde\xe7\x4b\xea\xd1\x9c\x47\x91\xe7\xfb\x79\x2c\x6d\x11\x8e\x3d\x8f\xcc\x8a\xfa\xe0\x4f\x46\xf8\x4e\x3f\x8a\xbc\x9a\x7f\xd1\x38\x3c\x38\xee\x2d\x5f\x4c\x5c\x81\x0a\xce\x1a\x33\xb8\x7b\xbe\x90\x66\xbb\x61\x37\x8b\x17\x52\xcc\xcc\xb0\x4c\x38\x31\x16\x8b\xa1\x0c\x84\xb4\xa0\x18\x83\xae\xb1\x3b\x04\x0c\x8f\xa8\xf1\x2d\x44\x44\x28\x3a\x77\x8b\x2e\x3a\x20\x8c\x81\xdf\x68\x61\x82\x48\xe5\x39\x1c\x09\x15\x84\xdc\x25\xf1\x52\xa0\xb0\xe2\xcf\xbd\xfd\xb0\xaf\x72\x1f\x7c\x4f\x3f\x8e\xa5\xe0\xe2\x55\x2c\x22\x49\xa4\x48\xdd\x03\x64\xd7\x34\x28\xa9\xd4\x67\xde\x4d\x00\x9e\x38\x33\xba\x28\x27\xc8\xfc\xb3\x78\x47\x2d\xac\x84\x45\x8a\xfc\xa7\xc7\x95\xfa\x2e\xa9\xde\x95\x46\xb1\xd3\x45\x1d\xe8\x98\xa7\x96\xd5\x85\x1d\xa7\x1f\xa8\x55\x5a\x7c\x53\x08\xda\x58\x04\xfe\x40\xd0\xe7\xfb\xc3\x87\x64\x55\x42\x8d\x93\x3e\xe7\xb4\x6f\x75\x16\xdd\xeb\xaa\xca\x6f\x7a\x30\xc9\x69\x67\x13\x1a\x33\x0b\x98\xe1\x7e\x3c\x75\x74\x49\x32\xec\x2d\xc4\x7b\xda\x37\xf3\x50\x2a\xb4\x29\xd6\x60\x59\xbc\x9a\x36\x8c\x64\xce\xdb\xb2\x9a\x6f\x37\xcf\x04\x54\x6c\x6e\x13\xd6\x07\xf7\xe1\xe4\x2a\x33\x73\x3e\xb5\x23\x9a\xfc\x57\x7d\xde\x0a\x8f\xe8\xe6\x94\xb7\xd8\xec\xf5\xc2\xd1\xd1\x76\x10\x35\x9b\x21\x5f\xe8\xc5\xc5\x45\xb3\x1a\xb6\x45\xf8\xd2\xa5\x55\xf4\x71\xc9\x1a\x0d\x34\xe6\xe2\x6a\x30\x48\xbd\xfe\xa5\x1b\xc4\xc9\xe9\x9b\xfb\x37\xd7\xd0\xab\x18\x17\xc2\x87\xc1\x51\x82\x03\xed\xdc\x1d\xa4\xed\x76\x39\x11\x0e\x4b\xb8\xaf\xf7\x6f\xff\xae\x50\x51\x7e\xbc\x1a\x8c\x38\x89\xcb\x8d\xb4\x5a\x03\x1a\xa6\xfc\x06\x50\x83\x33\x8f\x6e\xb2\x79\xae\x44\xca\x86\xea\xbf\x79\xb5\xbe\x8f\xa5\x8d\xbc\xf7\xe2\x05\xb7\x3c\x10\x88\xdd\x84\xa1\x63\xa7\xad\x9d\xdc\xd2\x9d\x70\x1a\x5e\x61\x9b\x45\xa1\x33\xb3\xe2\xcb\xa6\x2d\x63\x06\xdc\x11\x6e\x1a\x8d\xb2\x5f\x8f\x91\x7a\xd6\xb1\xd7\x75\xbe\xb9\x0d\x22\x6c\x51\x90\xdf\x51\x6a\xc2\x93\xa2\x60\x58\xbe\xd6\x13\x3b\x4e\x1a\x4d\x2a\xf5\xb1\x1f\xd8\x49\x37\x6e\xe4\x7b\x00\x07\x17\xf2\xaa\x51\x33\x85\x1b\x26\xee\xba\x7e\xbb\x69\x6e\x52\x21\x7b\xae\x3d\xf6\xc4\xee\x68\x9f\xfd\x3c\x01\x62\xc3\x62\x92\x4f\x7e\xe5\x91\xba\x58\x80\x47\x0f\x78\xaa\x99\x21\x8a\x37\xb5\x51\x5f\x3d\x65\x88\xd6\x04\xf8\xf0\xac\x96\xfe\xb2\xd1\x7d\xb9\xc4\x42\x7f\x95\x5b\x67\x77\xac\x90\xbd\x23\x21\x82\xab\xb5\x5c\x97\x19\x95\x65\xbc\x0d\xed\xb5\x77\x3d\xb2\x89\x08\x85\x0f\x76\xea\xf4\xda\xe9\x73\x8e\x9a\x7a\x92\xda\x32\x8d\xe4\xc4\x53\x7d\x38\xbe\x7d\x21\x77\xe1\x59\x1f\x9e\xdf\xc8\xe6\x45\xef\x0f\x58\x90\xaf\x73\xba\xf9\xb5\x52\x55\x10\x56\x16\x58\x26\x3e\xf0\xbe\x04\x24\x7e\xbf\x3e\x3f\x35\xae\x1c\x5c\x39\xd8\x6a\xe7\xc1\xd6\x70\x53\xd3\x06\x2c\x43\x3a\xcd\x83\x6c\x1d\xd8\xc4\xb8\x65\x16\x05\xab\xa6\x52\x86\x78\x74\x66\x72\x35\xa2\xc5\x79\xd6\x98\xac\x18\x25\xc3\x8d\xda\x59\xca\x90\x8b\xd9\xe7\x34\x96\x30\x6d\x64\x7f\x2e\xa8\xba\xde\x21\x2e\xca\x0e\xd9\x7e\x0a\x30\xb7\x14\x0d\x6a\xfe\x76\x14\x9f\x86\x3b\x9c\x50\xf7\xed\xc8\xe4\xd5\xef\xd7\xbb\x46\xde\x34\x91\x32\xd8\xa4\x85\x30\x4f\xaf\x95\x90\xca\x15\xac\xf0\xd7\x44\x31\xd9\x56\x72\xc7\x08\xc8\x1b\x07\x33\x4d\xdf\x90\x8b\x42\xe2\xea\x4d\xc7\xb4\x4e\xce\xf9\xa5\x82\xc9\x83\xc6\x15\xad\x2a\xd8\xaf\xc0\x96\x98\x73\xb8\xc4\x33\xdb\x85\x4d\x42\x3e\xf8\x03\xf5\x74\x02\x55\xdd\x74\x9f\x03\x69\xea\xe3\x20\x12\xb1\xd5\x7c\x3e\x06\xb1\xea\x45\x61\xd0\x56\x9a\x27\xdc\x7f\x25\xd1\x4a\x57\xf4\xdd\x18\x54\x51\x77\xe6\x21\x82\xc1\x2d\x0a\x68\xd7\x7d\x6b\x2b\x4e\x85\x2f\x39\xc5\x34\x9e\xd1\x75\xcc\xfa\x32\x6f\xc3\x79\x58\x88\x78\xd8\xcc\x79\x23\x41\x80\x79\x6c\x0d\x16\xf5\xc0\x66\x8f\xc3\x1b\x3a\x6a\x1c\x1c\xac\x83\x1e\x9e\xeb\xbd\x65\x0b\xcb\xc2\x03\x15\xc4\x1f\xe6\x52\xc6\x8a\xa3\x9a\x0f\xbf\x7f\x70\xeb\xde\x75\x12\x58\xac\xb7\x73\x42\xa8\x83\x63\x18\x45\x34\xee\xc5\x4c\xa2\xfc\xcb\x6e\x7b\xee\x31\x11\x56\xec\x98\x44\xa0\xae\x93\x85\x8a\x78\x44\x02\xd7\x40\xb7\xdd\x2d\x2f\xd9\xa2\xc3\x8d\xf3\x60\xb6\xe5\x35\x7f\xab\xb1\xc0\x85\x40\x17\x67\x0e\x25\xf0\xce\xb3\x41\xa3\xb2\xf9\x6a\x43\xce\xb9\x64\xe7\x45\xae\x6e\xce\x48\x1e\x68\x7a\x23\x66\x65\x16\x1c\xb2\xf8\x14\x31\x73\x53\xcd\x69\xde\xe2\x7f\x5b\xe9\x96\x9f\x4e\x1a\xd4\x1b\x06\x71\xe3\x79\xcf\xd8\x46\xb9\xa8\x6b\x65\xd3\x3c\xcf\xc0\x74\xa4\x8f\x9a\x44\x25\xc6\x79\x0e\x68\x67\x92\x0f\x67\xc3\xf8\xb6\xd4\xa9\x49\xca\x45\x05\xbe\x7b\x0d\xa5\xdb\xea\xa9\x77\xa6\xd8\x7c\xdc\x2a\x6b\xdc\x00\xf4\xf9\x1b\x3f\x3c\xc8\x83\x6e\x7e\x0c\xee\x38\x23\xca\xdc\x27\x1d\xd7\x4d\x95\x6b\xf7\x57\xa8\xc2\xdf\xd3\xfc\xfd\x75\x81\xb3\xe8\xa0\x25\x16\x0f\x86\x6e\x81\xbc\x6c\xde\x89\xe8\x32\xb9\x25\x55\x2d\x21\x60\x15\xcc\x4a\x2f\xe8\x78\xd2\xa4\x23\x4f\x58\x9f\xf7\x50\x1e\xd2\x96\x3b\xf9\x93\x90\x75\xed\xc4\x05\xf4\xca\x8f\x05\xa4\xee\xa3\x1c\x0e\xf0\xe4\xed\xb8\x73\x2a\x60\x71\x4c\x2a\x32\x8e\x51\x53\x7a\xc7\x3b\xe1\xab\x40\x17\xe2\xc1\x2c\x7a\xbc\xa5\x2d\xa5\xda\xec\xcc\xd7\xd8\xf8\xef\x6f\xf4\x0d\x7c\xfc\x52\x65\x9a\x67\x32\x15\x5a\x25\x31\x87\xf8\xfc\x1d\xa6\xc4\x4e\xa5\xc1\xff\xe4\x2a\xf2\xc5\x42\x4a\x45\x64\x9a\x6c\x54\xf3\xf5\x48\xc2\x4c\x32\x45\x16\x27\x45\x29\x9b\xf6\xac\x17\x12\xc9\x43\x68\x7b\x1c\xb9\x3a\xbf\xbf\x85\x25\x3c\xd4\x8d\x3a\x7b\x1c\x93\x26\x9e\xc1\x17\xe1\xb8\x85\xa2\x9b\x32\xd9\x37\x91\x7b\x0f\xb9\x9b\x42\xd1\x46\x15\x33\xb5\x67\x08\xed\xc5\xf9\xa8\x58\x7a\x06\x15\xf1\x42\x6d\xc4\xa8\x6f\xb4\x0f\x7f\xaa\xa3\x27\x8f\x54\xb7\x8e\xe3\xd1\x24\x58\x65\x94\x8e\x1c\x06\xc9\x12\xdf\xab\x2e\x0c\x60\x0b\xcc\x28\xa2\x0b\x09\x20\xb1\x1e\x5b\x2c\x8c\xbd\xa7\xb2\xb4\xab\xf7\xe0\x90\x13\xcd\xe4\x81\x7d\xa7\x28\x0b\x12\xf1\x38\xce\x7e\xf4\x20\x3d\xa3\x5a\xf9\x36\xf3\xeb\x09\x53\x8e\x27\x96\x7c\xcd\xd8\xb5\x98\x05\x84\x47\x51\xb4\x07\x9d\xda\xc1\x0a\x35\xaa\xa4\xcf\x14\x94\x48\xb3\x0e\xb8\xf4\x03\x41\x6a\xa6\xa6\x50\xf8\x38\x32\x19\x1d\xea\xd7\xac\xff\xb0\xd4\xa1\xcd\x32\x82\xdc\xcd\xaf\x0f\xff\xae\xe4\xbd\x6a\xa2\x97\xcf\xbe\xa7\x3c\xbc\xba\x2d\xfc\xc4\xf8\x90\xe3\x8a\x60\x8e\x95\xdb\x5f\x7a\x02\xc0\x47\x0b\xed\xa2\x0c\x83\x3b\xad\x1a\xe9\xa3\x99\x6a\xb9\x5e\xcb\xa4\x34\x27\x1c\xc1\xdd\x7b\x64\xcf\x2c\x58\x48\xea\xbe\xf7\xa1\xa8\x13\x1b\xcd\x1b\x1f\x06\x0b\x5f\xb5\x31\x97\x98\xc4\x2c\x8d\x29\x2a\xc0\x5c\xea\x5c\x12\xb5\xbd\xf9\xc3\x24\x40\xf7\x86\x74\xfc\x15\xc6\x55\xa1\xee\x11\x2f\x3e\x59\xa6\xa2\x9d\x62\xcf\x53\x94\x8c\xdf\xc2\xa6\x1e\xf2\x9b\x85\x9e\x2c\x45\x1f\xa8\xab\x0f\xcc\x64\xf8\x98\x62\x8d\x2a\xae\x53\x60\xf0\x5a\x97\xf1\xa1\xde\xa1\xbc\x33\x46\x77\x16\x07\x9e\x3b\xb4\x57\xc2\xf1\x79\x64\x51\x26\x3e\x5c\x51\x45\x70\xe1\xaa\x83\x99\x19\xfe\x7d\x4e\x48\x7c\xae\xae\x21\x99\x2d\xf3\x2c\xd4\x85\xe9\xa2\xd5\x39\xc8\xd8\x4f\x36\x62\x04\x11\x6a\xcd\x0e\x9f\x15\x52\x72\x16\x60\x1d\x54\xa9\xb4\x89\x03\xdb\x2a\x9c\x4c\x93\xce\x94\x7d\x1a\xb3\x99\x79\xdc\x59\x75\x56\x2f\xc0\xd9\xc7\xb1\x31\x2f\x76\x4f\x63\xf9\x89\x27\xbc\x7d\x95\xb0\x1e\xbf\x7e\x22\xae\x0a\xe4\xd7\xa4\xec\xed\x1d\xa2\xf5\xc6\xd9\xd7\x47\xba\xcd\x47\x98\x45\x14\xab\xd9\x9e\xf8\x1a\xd8\xee\x61\x10\xe7\x05\x96\x81\x79\x85\xf3\x8c\x59\x0d\x2f\x06\x21\xa4\xb7\x90\xb8\xcb\x54\x37\x99\x3a\xd6\x7c\x16\xe2\x75\x3f\xbe\x99\x6c\x7e\xcd\x25\x01\x8b\x2b\xb3\xa9\x4b\x21\x0b\x75\x49\x2d\xf5\x07\xb8\x63\x1d\xd9\x16\xb5\x59\x8b\x73\x6b\x91\x3e\xd3\xbc\xaf\x9d\x18\x0a\x0b\xb9\xba\x02\x97\xcb\x24\x73\x6d\x85\x3f\x6d\xc9\x11\xa5\x94\x11\x3a\xec\xc1\x8d\x22\x4c\xe4\x89\x5e\x37\x66\x68\xa3\x90\x3b\x21\x13\x81\x44\xc1\x2e\x4e\xd3\xb1\xb0\xdc\xd5\x3e\x6b\x9a\xf0\xcf\xe1\xd1\x08\x72\xa4\x21\x49\x71\xb2\x7b\x68\x8a\x38\x96\x7e\x25\x73\x8f\x7c\xdf\x8e\xef\x2f\x76\xca\x5a\x73\xa1\x51\x4f\x82\x1e\x32\xc3\x45\xab\xfb\xb4\xd8\xfa\xcc\x73\x05\xc2\x21\x38\x96\x2c\x3b\xe0\x91\x43\x6d\x5a\xa2\xb3\x1d\x01\x4a\x8a\x48\x0d\x30\xc4\x7a\x0c\x65\x50\xb7\x89\xd7\xdf\x00\x48\xe6\xc6\x4d\xe1\x68\x16\xb9\xa9\xf9\x53\x6c\x36\xa6\x5c\x3d\xce\x45\x8f\xb3\xbd\x89\x80\xe7\x18\xa0\xb2\x52\xb6\x54\x36\x31\x07\xd6\xd6\x7a\x62\x88\xcd\xc8\x6a\x5e\x18\x54\x3a\xc7\x35\x24\xf2\xfb\xeb\x9b\x8e\xbb\xa0\x8c\xbe\xc5\x0a\xbf\x1c\xab\x67\x2f\x07\x3e\xc1\xb7\xf1\x20\xb7\xdc\x82\xa2\x6b\xe0\x0c\xc2\x39\x36\x99\xf3\x6f\x54\x9f\x73\xc2\x5a\x21\x0e\x9c\x04\x0c\x92\x61\x87\x74\xdb\x01\x21\xef\x9c\x8a\x06\x1b\x6e\x7e\x73\x26\x92\xc0\xd6\x03\x0b\x82\x24\x2f\x64\xa1\x77\x40\x3f\x9e\xb2\x78\x81\x01\x60\x9b\x56\xf5\xf2\xb9\x7e\x13\x96\xc9\xd4\x14\x9d\xda\x19\xdd\x62\x22\xbe\x1e\x92\x26\x60\x5e\x35\xec\x96\x38\xd6\x32\xdc\xaa\x4c\xfc\x89\x05\x6f\xd2\xdc\xf2\x8e\x7e\xe4\xe0\xc3\x86\x88\xf0\xcc\x85\xe1\x38\xfa\x78\x40\xfe\x9d\x37\xe1\xad\x0c\xff\x62\xeb\x63\x73\x2b\x06\xe2\x0c\xc5\xf3\xa5\xd0\x08\xe4\x55\x84\xbb\x2a\x83\x0b\x23\x46\xa9\x82\x13\xfb\xfb\xfd\xa3\x09\x93\x47\x22\x48\x39\xbd\x39\x15\x08\x7f\x61\x66\xbb\xa4\x9a\x5c\xf0\x0e\xea\xd7\x0c\xb4\x78\xa7\xe1\x9f\x87\xc4\x49\x32\x2f\x22\x6e\x74\x21\xe2\x75\x73\xe9\x28\x01\xcf\x07\x1d\xac\xcd\x1b\x5b\x39\xc9\xd6\xd1\xc5\x5a\x8e\xe8\xe4\x46\x6b\x94\x20\x11\x03\x77\x76\x86\x56\x9a\xbb\x67\x1b\x96\xfb\x37\x74\x51\xa3\x39\xb1\x57\xd0\xbb\x43\x03\x11\x0f\xc3\x89\x69\xa6\x54\xb4\x3f\x6e\xed\xb4\x2d\xde\x9b\x08\xb5\xda\x79\x98\x44\x02\x63\xcf\xa6\xdf\xb0\xe1\xe0\xd6\xf6\x57\xf8\xe4\x07\x52\xce\x04\x60\xc0\x9c\x4c\x34\xd4\x82\xe5\x3b\x88\xc6\x65\xa8\xc5\xd9\xee\xcc\x9d\x61\x2e\xde\xa4\x00\x0b\x81\x94\x8e\x2a\xd0\xe2\x03\xd9\x47\x21\xe3\x2c\xb0\x92\x8f\xdc\x66\x44\x2f\xde\x9a\x2a\x67\xa9\x2d\x90\x3f\xc0\x06\xe0\x41\x16\x38\xa4\xcf\x9e\xce\x1a\x6d\xde\xa5\x06\xae\xad\x3a\x3c\xb7\x1b\x29\x15\xf2\xb5\xb4\xff\x35\x5e\x9c\x8b\xc4\x0e\x5c\x4d\x40\x08\xea\x65\x0b\x97\x8a\xa6\xa5\x03\x14\xc8\xac\x7f\x48\xb8\xfb\xbc\x3b\xe6\x01\xc4\x74\x61\xfe\xd6\x8b\x0f\x72\xbb\x30\x34\xdc\x55\x3a\xe9\x73\x67\xea\x4b\x94\x99\x48\x40\x56\x68\x14\x68\x81\x07\x8a\x5d\x07\xae\xcc\xea\x7f\xca\xa8\xfb\xaf\xaa\x00\x4d\x60\x77\x6f\x61\xc6\xb7\x3f\x8c\x0a\x69\xc8\x8e\x8d\xf2\xff\x35\xd5\x0b\x6b\x3f\xb5\x08\x42\x4c\x62\xb6\xbe\x0d\x0f\x3f\x09\xaf\xbc\x20\xe6\x61\xa9\x87\x60\xaf\x9d\x38\x6d\x92\x64\xd6\x6a\x33\x59\x44\x5c\x80\x01\xc0\x6f\x7a\xce\x9b\xce\xb6\xbf\xa7\xb1\x94\x98\x3d\x0e\xe5\x36\x80\x7f\x00\x2e\x7e\xf4\x11\xb7\x84\x0a\x39\xd4\xb7\x4e\x6c\x94\xdd\xd6\x3d\x4b\x7a\xa7\x2c\x89\x55\xc0\xed\x69\xe0\x8d\xae\xdb\xa8\xc4\x9a\x0b\xb1\x9c\xe0\xf3\xfe\x4a\x5a\xc7\x61\xe1\xfc\x74\x54\x13\xe5\xf1\xd3\xa7\x42\x96\x02\x7a\xc3\x33\xc2\x2f\x12\xc7\xe9\x99\xf9\x9c\xed\xc9\xc4\x47\xdb\x23\x33\x31\x87\x61\x7c\x2e\xf9\x98\xa7\x01\x8d\xe9\x4a\xb5\xd1\x04\x48\x75\x2a\x9b\x6b\xaf\xf9\x78\x26\xb2\x85\xb6\xb5\x2b\x61\xd7\xb2\x4f\xc2\x09\xba\x8a\xe3\x38\x09\xdb\x6d\xaa\x46\xe7\x4b\x11\x67\x4a\x16\x78\x2a\x94\xea\xa0\x9d\xec\xd2\x33\x1b\x7c\x3e\x2e\x74\xa3\x05\xc2\x93\x26\x50\xe6\xf6\xa1\x31\x8b\x18\x72\x36\x88\xa5\x34\x4d\x85\xdf\xc7\xb5\xbe\x10\xfc\x26\x2c\x70\x38\xc4\xa1\xe5\xbd\xd4\xe2\x54\x64\x15\xea\x12\xd8\xab\xee\x6e\xbe\xba\xc8\x46\x76\x33\x11\xc4\x02\x39\x4a\x77\xc8\x3e\x32\x0e\x34\x9d\x30\xc3\xd0\xc0\xd7\xfc\x08\x82\x05\x39\xac\xa6\x62\xbe\xf1\xcc\xea\xaf\x73\xb9\xd2\xc3\x67\xdf\x0d\x9d\x39\x48\xa1\x66\x99\xba\xa5\xfd\x55\x4b\x10\x24\xeb\x61\x1f\x2a\xfa\xfd\x57\x81\xeb\xfd\xdd\xb9\x9e\x2b\xd4\x95\xb7\x07\x1e\xf8\x1c\x05\x56\x9a\x47\x95\x9c\x9d\xa0\x5c\x3e\x9b\x37\xb5\xd4\x64\x9b\xa6\x61\x5b\x7d\x49\xe8\x4c\x0f\x9a\x74\x36\xf7\x09\xdd\x62\xd5\x1f\x9e\xdc\xdf\x18\x88\x79\xda\xe4\xcc\x88\x82\xfd\x30\x6c\xdc\x8d\x3e\x47\x00\x6e\xf6\x10\x6e\xa2\x0d\xcb\x32\x40\x68\x4e\x62\x06\x29\x5b\xe0\x71\xa0\x04\xfa\x46\xc2\x43\xf3\x96\x19\x61\x2e\x9d\x7d\x47\x92\x79\x4d\xc5\x40\x81\x7b\x59\x58\x79\x87\xb2\x92\x94\xad\x7e\xc4\xfa\xa1\x4c\x77\xa1\x28\x6f\xc2\xc8\x5e\x24\x9f\xff\x47\x78\x88\xc5\xbb\x9c\x86\x71\x0a\x57\xef\x23\x8a\xa0\xae\x72\xb4\x1c\x98\x32\x3a\x13\xd7\x86\x3c\x66\xa7\x65\xa7\x8e\xf6\x11\x22\xe6\xaa\xc1\xd8\x7a\xa1\x58\xd3\xdf\x7d\x4a\x18\x51\x85\x6e\x6e\x13\x71\xc6\x3a\x47\x85\xc2\x0e\x9c\xc2\xfb\xc8\x71\x5a\xca\xae\x62\xc9\xec\x22\x68\xa5\x4c\x39\x2f\x91\x31\x85\xc9\x97\xe7\x16\x7e\x3f\x0f\xdf\xe8\xf0\x0d\x4d\xbf\x89\x2a\x87\xa1\x29\x30\x60\xae\x34\x1b\x2a\xdd\x45\x5d\xc2\x1e\x95\xf5\x4c\xfd\x3b\x69\x98\xd8\xdf\x5b\x8e\x23\x9c\xd2\xc8\x6e\xa9\xfa\x95\xae\xeb\xe3\x56\x78\xac\x9d\x6c\x89\xe7\xb2\xff\x8b\x32\xfb\xdd\x0e\x29\x8b\x35\x6e\xdf\xf6\x0d\x99\x9d\x4b\x54\x92\xd4\x84\xbb\x0c\xa9\xbb\x75\x3f\xf1\xfd\x76\x76\xc3\x9b\x5c\xd7\xd6\x00\x04\xa6\xe9\xaa\x5e\x95\x5f\x95\x33\xd5\x36\x8d\xb8\x44\x3d\xd1\xa5\x29\xbf\x8e\xd3\x39\x55\xaf\x9c\x5b\x85\x36\xf1\xa7\xc4\xfa\xa2\x5a\xf5\x16\x12\x7d\x70\x53\xbb\x83\xda\x75\xfb\x50\x68\x0a\xc7\x62\xf0\x87\x7d\xe1\xec\xc4\x7f\x7b\x0c\xc2\x54\xd0\x3c\x00\xca\x57\x40\xee\xf9\xa2\xee\x7a\x35\xd1\x9a\x60\xca\xc4\x33\xb5\xfb\x09\x7d\xe1\x51\xc1\xc0\xb4\xc1\xdd\x3f\xda\x96\xd8\xc3\x55\x6c\xe4\xc6\xb3\x6d\x3b\x47\x5c\x86\x19\x7f\x56\xf5\xde\x89\xd2\x64\x84\x56\x97\x91\x74\x01\x53\xdb\xe0\x02\x2c\x0f\xa1\x3d\x3f\xbf\xd1\x95\x50\x79\x22\xe4\x83\xa0\x5e\x9f\x1d\xb0\xbc\x4a\x6d\xff\xed\x4d\x87\x27\x09\x0b\xca\xc2\xed\x7a\xe3\x2a\xba\x10\xb2\x61\xbd\x0c\x81\x6e\xd9\x77\x66\x72\xa8\x54\xcf\xf4\x18\xe7\x9e\x41\xba\x98\xd6\x90\x83\xa3\xce\x0b\xa3\xb8\x7b\x8b\x17\xe8\x6f\xb3\xa9\x52\xce\x99\x9a\x76\x80\x34\x52\xb4\xd9\xe3\x72\x65\x9e\x4e\x9c\xde\x93\xb7\x37\x20\xf5\x33\xc5\x5d\x40\xae\xda\x19\xc0\x68\xcb\x4f\x18\x4b\xeb\xe3\xd1\x05\xfe\xf3\x05\x75\x0b\xd1\x3f\xe8\xda\xfa\x8c\xe2\x0b\x11\x70\x68\x12\x11\x3a\x74\x8b\x6c\x54\xf3\x3e\x07\xc4\x82\xf6\x27\xfe\xaf\x42\x33\xda\xe1\xc8\x2a\x9c\xe7\xe0\x14\x1c\x99\x0c\x9d\x88\x97\x7f\x04\xf3\x55\x69\x9f\x97\x44\xb1\xd8\x42\x1d\x56\xb5\x3f\xcd\xc0\x18\xbc\xff\xf8\x77\x05\x8a\x5b\xf1\x08\xc6\x29\x36\xa4\x71\xda\xc5\x99\x60\x46\xc4\x27\xc5\xe0\xb4\x86\xad\x3f\x11\x8c\x0e\xa6\x64\x19\x63\x3b\x46\x3d\x61\x54\x1b\x8b\xda\xa3\x58\xc9\x46\x69\xdc\xd1\xbc\x33\x35\x08\xed\xe6\x4c\x70\xd2\x92\xaa\x77\x4e\x33\x12\x45\x1e\x2b\x15\xe7\xd0\x2b\x66\xea\x9c\x72\xbc\xfa\x9f\x32\xe6\x1e\x11\xec\xb0\x65\xce\xc0\xa8\x31\xca\xbd\x02\x55\xda\x2d\xcd\xcc\xa2\xec\x7d\xa0\x8f\x84\x2f\x5a\xe8\x9e\x30\x50\x27\xe4\x21\x89\x9e\x93\x1a\xcd\x22\xc9\x70\x43\x8e\x77\xf9\x71\xe2\x47\x7f\xc1\xd7\x57\xa1\x97\x92\xf7\xf9\xb9\xdc\x1f\x33\x94\x27\xda\xe3\x0a\x97\x46\x71\xe3\x9c\x8f\x63\x79\x41\xea\x58\xb7\x3c\x05\xd5\xad\x4f\xed\xf8\x0d\x0f\xcd\x5c\x88\x70\x42\x77\x4b\xf7\xe2\x4f\x3b\xb9\x85\xc1\x31\x7d\x75\x41\x2d\x4b\x81\x81\x79\x0a\xe7\x46\x50\x5f\x8b\x8c\x03\x0d\x7e\x43\x77\xc7\x53\xfb\x4b\x2a\xf5\x08\x81\x78\xbd\xd5\x9b\x59\x66\x27\x47\xa5\x45\xb2\x28\xf3\x2a\x32\x78\x1f\x9e\xa8\x7c\xf0\x5c\xfc\x5d\x56\xb7\xbe\x71\x06\x12\xa9\x46\xbd\x25\x19\x1e\xbc\xd8\xc3\x38\xd9\x7b\x18\x0a\x53\x79\xf6\xf9\x65\x95\x86\x91\xc0\x4f\x08\x65\xa6\x08\x24\x7d\xa0\xd0\xfb\x54\x0f\x94\x8b\xea\xfd\x72\x09\x9a\xec\xb5\xa8\xa1\xdd\xab\x91\x61\xe7\x6f\x39\x83\xda\xd4\x36\xb3\x8f\x28\x76\x51\xfe\x55\x34\xb8\xca\xfd\x32\xa5\x48\x23\x76\x10\x5e\xfd\x17\x60\x8f\xa3\xb7\x00\xbc\xf3\x1c\xb0\x78\xe0\x07\xa2\x72\xb0\x01\x8b\x60\xf6\x61\x40\x2e\x61\x48\xcf\xa1\x9c\x8d\xcc\x5f\xc9\xa1\x7f\x38\x17\x1b\x0a\xde\x64\x20\x26\xd5\x26\x9d\xda\x45\x0c\x58\x7e\x92\x4e\xca\x52\x17\x06\x82\xad\x4c\x88\x58\x2f\xff\x71\x1b\xa8\xd0\x67\xce\x04\xc0\x3e\x38\x25\x06\x45\x63\x66\x66\xd7\x93\x8e\x9a\x52\xd6\x47\x64\x1a\x2a\xc1\x72\x59\x32\x29\x0c\x72\x43\x50\x0f\xb8\x57\xce\x7d\x11\xf6\x39\x15\xcc\xb8\x60\x25\x76\x5c\xe4\xc7\x1e\xd8\x5a\xea\x4a\x18\xc7\xda\xe1\x9e\xe4\x39\x22\x27\xf9\xbb\x1c\x5a\x02\x1b\x50\x9d\xf5\x85\xf3\xfa\x87\x4f\xb3\xe5\x43\x26\xd8\x5e\xa9\x47\x27\xac\xe3\x14\x9a\x02\xfc\x6e\x3c\xab\xcd\xae\x5d\x2f\x4e\xab\x4c\x82\x01\xe7\xa5\x64\xb2\xaa\xd4\xce\x11\xa9\xc4\xdb\x41\xe2\xdd\x68\xd1\x8a\x9e\xde\xde\xfe\xba\x97\x30\x44\xf2\x58\x89\x42\x68\xaf\x19\xae\x75\xcc\xfc\x78\x6f\x69\x62\x30\xda\xda\x6a\xb7\x54\xf3\x77\x62\xce\xf3\xe9\xb4\xf6\x64\x29\xf0\xe6\xed\x95\x0b\xd3\x84\xa5\x30\x4a\x4f\x1d\x01\x33\xc0\x9b\xad\x92\x7d\xc8\x7b\x36\x5d\x84\xf7\xd0\xb0\x01\x5f\x71\x61\xd8\x7c\x58\xc6\xc4\x19\x2b\xf6\x50\xfe\xc4\xd6\x5f\xc9\xe0\x0f\xde\xbc\x3b\xe3\x35\x07\x36\x94\xfe\x6d\x5a\x87\x3e\xc9\xda\x61\xfa\x1c\x7c\x87\xa1\x71\xba\x5f\xa6\x03\xf2\xc0\x7e\xbd\x0b\xea\xbf\xb8\xdc\x7d\x99\xfb\x12\x7d\x35\xec\x90\x48\xe6\xf6\xe3\xc1\x91\x44\xda\x2e\xe3\x55\xf1\x1b\x16\x25\x39\x99\xe3\x7c\x04\x2f\xaf\x61\x3f\x9e\x25\xe7\x8f\x01\xe3\x40\x68\x90\xcc\xee\xfd\xed\x8d\x1d\x00\xf7\x15\x65\x1b\x5b\xb2\xd7\xb8\xf1\x3b\xbb\x45\x0a\xf5\x27\xc9\x00\x62\x14\x51\xdf\x2c\x97\x56\x2d\x00\x48\x5a\x92\xd9\xc7\xe7\xca\x89\x01\xcb\x42\x95\xba\xa5\xdb\x10\xcb\xa4\x97\x7d\x92\xd9\x31\xeb\x59\x26\xd0\x38\xb9\x09\xf9\xd3\xe2\x76\x24\xf0\x67\xb1\x30\x35\x60\xbb\x3d\x19\x8d\x8c\xf9\xf1\x9d\xe5\xe2\xa3\xf2\x99\x24\x3d\x1a\x3e\x04\x53\xf1\xe2\x1c\x47\xf7\x87\xb5\xe3\xf5\x7e\xfc\xf8\x77\xe5\x7d\xcc\x68\x75\xf7\x39\x2f\x90\xbb\x8d\x55\x7a\xef\xa8\x4b\x6c\xe1\xb2\x98\x6c\x55\x47\x4b\xe3\x24\x34\x65\x8c\xa9\x22\xe5\x13\x28\x2d\xe1\x71\xf8\x64\x37\x56\x85\x5d\xff\x30\xf1\x45\xa5\x5c\xc1\xdf\x30\xe1\x81\x77\x99\xca\xc0\xcb\x2d\x7e\xec\xee\x0c\x10\xb2\x79\x82\x69\xf4\xcc\x3c\x4e\xb6\x73\x84\x11\x0c\xb9\xe4\x59\x94\x39\xf6\x66\xa5\x89\xc0\xa7\x42\x08\xa6\xeb\xbd\x02\x78\x40\xe6\x2e\xf3\x5e\xdb\x13\x87\xca\xa8\x7b\x86\xf8\x3c\x55\x63\xa6\xd2\x6d\x3a\xb0\x8c\xe3\xae\x05\x34\x58\x5c\x31\x83\x86\xde\x05\xfd\x6a\xe7\xda\x19\x33\x7b\xdb\x3f\xba\x1f\x64\x2c\xd2\x60\x31\x08\x40\x48\x18\x97\xab\x7e\x77\x74\x54\x39\x9c\xe7\x28\x1e\x0e\x45\xfe\x2a\xe1\x6d\xc3\xab\xe4\x12\xff\xe3\x81\x30\x9b\x6c\xce\x80\x6a\x1a\x0e\x49\x1d\x30\x10\x17\xcc\x2a\x1c\xe4\x5e\x0a\xd5\x1e\x8e\xdf\x98\x7d\x31\xe1\xf4\xed\xd2\x8c\xfd\x4d\x37\x2e\xd3\x63\xa2\xda\x2a\xab\x77\x1d\x80\x79\x77\xca\xba\xd0\xc8\x37\xb1\xc0\x20\x91\x82\x80\x19\x76\x8b\x88\x92\x62\x7f\x13\xcf\x96\x19\xac\xa1\x64\x54\x07\x8b\xf1\x6e\x24\x6a\xf6\x67\xfd\x1b\x0f\x0b\xdd\xc2\xb9\x54\xb1\xcf\xf9\xc1\x0a\xd6\xe8\x10\xdd\xd2\xe7\x32\x6d\x82\xac\x4e\x2f\xf8\xd8\xb4\x40\xdc\x17\x48\x41\x52\x69\x90\xe3\xf1\x0f\x8e\x18\x27\x74\xc6\x39\xaa\x90\x66\x35\xd5\x0c\x19\x27\x95\x80\xfd\xfe\x8d\xc2\xcd\x6c\x42\x28\x34\x1f\x80\x31\xc3\x18\x7b\xd2\xf2\xa0\xbf\x79\x10\x0e\xe8\x0f\x3b\x02\xbf\x2d\x66\xca\xc4\xf3\x77\x2b\x7b\x46\x3c\x5d\xc1\x0a\x7b\xef\xe3\xc3\x52\x19\x29\xa7\x3c\x15\x18\x34\x7d\x59\x7f\xd2\xa4\x81\x35\x1f\xb7\xdb\x95\xb5\x25\xf3\x24\xd4\x79\x00\x2e\x99\xa4\xe0\xd3\xd4\xf3\x8a\x04\xa4\x77\x75\x2c\xa4\x85\x34\xb3\x82\x23\x48\x96\x06\x14\x65\xc3\x28\xcc\xb4\x12\x92\x5e\x14\x18\x99\xb7\xbd\x87\xcb\x15\x4f\x99\x3d\x3a\x51\x7d\x4b\xfd\x98\xf5\xe5\x3f\x2c\x6b\xcb\x06\x1a\x2f\x1a\xd2\xcd\xfa\x57\x77\xc7\x2f\xf0\x35\xb9\x8e\x56\x6e\xf2\x39\x5f\x31\x8d\xe5\x4a\x2c\x92\x07\x82\xf8\x71\x93\x9f\xe0\xfd\xf2\xae\xae\x70\x78\x6e\x08\xe1\xa7\xed\xb3\x45\x25\xc4\x9c\x92\xe4\x1d\x2b\x78\xe8\xf4\xbd\x48\x43\x9a\xee\x56\xaf\xfd\xf8\xb8\xc8\xbb\x5c\x1e\x65\x76\xb1\x73\x50\xc2\x6b\x4f\xfe\x5d\xa9\x46\xc0\xe4\x74\x02\x53\x12\x6b\x5d\x5b\x99\xc7\xea\x25\x78\x9c\x3b\x4d\xbe\xbc\x84\x7e\x00\x85\xf0\x28\x36\xbe\x31\x3d\x09\x83\xbc\x21\xa3\xc9\x6a\x7c\x6b\x97\xc0\x11\xd9\xb6\x73\x70\x58\x50\x77\x38\x05\xd1\xe4\x0c\xe5\xf4\x89\xdb\xda\xce\x74\xad\x11\x8a\xbb\x2a\x21\x64\xc6\xa1\x25\xf0\x2d\xc4\xc3\xb7\x3f\x37\xee\xed\xfb\xed\x03\x99\xa9\xe7\xbe\x10\xf3\x55\x06\x16\xb2\x20\x91\x06\xcd\x69\x03\x8f\x85\xcb\xc6\xf1\xfb\xf7\x0f\xd2\xa7\xc8\xd3\x55\x0c\x94\xa1\x8f\x04\x24\x5a\x66\x4c\xaa\x88\xe5\xb9\xa7\x39\xb4\xf7\x7c\x08\xfb\xe7\xd8\x26\x32\xb2\xf9\xae\x94\x9e\x5a\xd3\x87\xa7\x21\x3f\x90\xb2\xea\x35\x64\x70\x2e\x33\xca\x7c\x56\x27\x0b\xcf\xde\x16\x78\x52\x57\xa0\xc7\xe2\x10\xf6\xcb\x7a\xab\xdb\x1d\xe7\x95\xa3\x31\xcd\x9e\x2c\x0c\x30\x18\xa6\xf3\xc8\xd1\x8c\xbc\xd1\xa8\xf0\xc0\xe9\x38\x1e\x80\x5b\x74\x02\x85\x40\x0f\xbc\x2e\xc4\x06\xee\x00\xc8\x2d\x41\xfd\x43\xa4\x97\x37\xa7\xd7\x80\xbc\x2f\xaf\x3a\xbe\x88\x46\x9c\x48\x4e\xd4\x36\xa5\x91\xde\xe1\x47\x70\x1a\x00\xfc\xf8\x47\x1f\x5e\x8d\xd7\x6d\x9f\x66\xf4\x8d\x22\x09\x5d\xa8\x3c\xcc\x66\x5c\xa4\x5a\x48\xb0\x35\xfb\x3c\xcb\x38\x48\xf4\x57\x62\x8a\x92\x85\x01\x40\x6c\x84\x2e\xa8\xd5\xa9\x31\x9d\x88\x1c\x43\x0b\x5c\x80\x3f\xb3\x9e\xea\xe5\x13\x04\x49\x0e\x24\x5c\x49\x23\x36\x79\xbb\x2d\xb2\x2f\x53\xd3\x7c\x7f\x90\xa9\xbc\x7e\x0d\x34\xa9\x67\xd9\x63\x13\x77\x31\x55\xd4\x25\x5d\xde\xc7\x07\xe3\x10\xcf\xc3\x96\xca\x6e\x1c\x65\x45\x7b\xb5\x55\x9b\x13\x36\x45\x4d\xfb\xb4\xed\xe6\x4a\xc4\x7b\x23\x36\xc5\xbb\x23\xc5\xcb\x19\x60\x34\x79\x4d\x4b\x97\xa8\xad\x2d\x0a\x69\xd9\xc8\xff\x9f\xba\x9e\x27\x04\xb1\x77\x28\xdd\x56\x18\xe3\x31\xdd\xcf\x23\x7a\x1c\x3c\xb3\xc9\xac\x5c\x3d\xde\xec\x5f\xa5\x09\xba\x2e\x8e\xb7\x7b\x73\xad\xa7\x22\xdd\x13\x8c\xc6\x66\x3e\x3f\xfb\x18\xc2\x65\x97\xe3\xf3\x10\x43\x57\xbc\xa6\x07\x24\x7b\x2c\xb9\x6a\xf7\x7f\x20\x90\xb1\xa0\xbe\x4c\x2e\xb6\x3c\x2a\x39\xfd\x70\xaf\x13\xc2\x29\xa9\xd9\x62\x89\x2d\x41\xa0\xef\x4f\x89\x2b\xd9\x59\xe4\x52\xc9\xa8\x74\x14\x8b\x73\x9c\xce\x1c\x3f\x6b\xb7\xbb\xd0\x4d\x5d\x9d\x02\x8e\x0f\xb7\x2d\x92\x42\xc0\x9c\x46\x64\x39\xd2\xe9\xe4\xc1\x63\x55\xe6\x91\x51\xa0\x59\xe7\x7d\x3b\x0a\x5b\x47\xb7\xdd\x4c\xa8\x8a\x2f\x48\x9c\x59\x18\x4e\x62\x65\x9f\x7f\xca\x71\xdf\xe0\x58\xa6\xc9\xc8\x9b\xcf\x3f\xa8\x10\xd8\xd6\x4b\x6a\x89\xa3\x93\xbb\x2f\xf8\x9e\x39\x05\x51\xca\xd4\x6b\x2c\x2c\xd6\x99\x61\x57\x24\x3d\x64\x8d\x62\x74\xe4\x01\x33\x30\x5d\x49\xd2\x2f\xaf\xea\x25\x78\xcb\xc1\x09\xe7\xd0\x59\xc4\x4b\x3e\x91\x95\xce\x5b\x7e\x28\xae\x98\xc3\x30\x96\x20\x29\x9c\x45\x39\xf9\x79\xbb\xfc\x03\x86\x39\x98\xf5\xd9\x5e\x54\xe7\xdc\xb9\x0d\x11\x37\x09\x02\x43\xe9\xfd\xdf\x57\x82\x7a\xfc\xf7\x8f\xeb\xde\xfd\xf3\xe7\xe6\xad\xcc\x91\xb1\xd8\x00\xd3\xab\x5e\x98\x6b\x90\xb6\xe2\x64\xa2\x3a\x4e\x00\x0e\x87\xa4\xc2\x74\x84\x6b\x38\x33\x0f\x5e\xd7\x81\x02\x96\xcc\xac\x00\x89\xd4\x2f\xa7\x27\xba\x04\xc0\x6b\x80\xaa\xcb\xb4\x10\xde\x49\x16\x6e\x7f\x80\x39\xdd\x6c\x90\xdd\x37\x15\x64\xd2\x73\x87\x2f\xa7\xca\xcc\xb1\xd2\x34\x0e\x9e\x8b\xc8\xb6\xba\x9b\x49\x21\x47\x93\x9a\x49\x18\x44\x22\xed\xb8\x2c\x08\xc0\x2f\xc8\x80\x0f\x9f\xbf\x07\x0c\x31\x3d\x14\x24\x89\xb7\xd8\xc9\x29\x43\x82\xab\x12\xb4\x9b\x8c\xca\x03\x73\xa0\xf7\x7f\x1d\x21\xf0\xce\x5d\xa7\x41\xd4\xcf\x08\x15\xe4\x28\xb9\xcc\x29\x13\xa9\x54\x54\xdf\x7f\x58\x8a\xc4\x90\x7a\x9c\x22\x66\xd7\x56\x4e\xdb\x61\x56\xdd\xc4\x7b\x66\xe9\x96\xb8\x40\xe0\x22\xf2\x44\x87\xd3\x01\x6a\x08\x2d\x2c\x56\x68\x15\x2a\x0f\x05\xed\x37\xbb\x40\xd1\x12\x64\xc2\xf6\xfd\x19\xe6\xe3\x91\xb9\xe4\x19\xf3\xc9\x7d\x15\x8c\x83\x7b\x5e\x78\xce\x09\x3a\xb8\x27\x9e\xbe\x0d\x1a\x66\x58\x86\xb5\xc4\xb9\x8c\xa3\x8d\x35\xc7\x24\x7b\xc6\x9d\x09\xf7\xd8\x32\xb7\x86\xb9\x2d\xc0\x14\x9e\x6f\xdf\x77\x7a\x65\xd0\x84\x3e\xbe\xe0\xeb\xe3\x9b\xe8\xe3\x23\xe9\xac\xba\xdb\x79\x7f\xd7\xb8\xe1\x87\x7a\xee\xbe\xeb\x2e\xfd\xf3\xfb\x11\x39\x39\x06\x69\xf9\x47\x7c\xac\xd3\xf3\x97\x67\x6a\x6a\x1f\xa2\x5e\xc0\x17\x53\xf2\xdb\xa9\x4c\x6f\x21\xe5\xf9\xa9\x10\x70\x9d\x80\xf4\x23\x76\x62\x52\x0f\x9d\x77\x14\x89\x29\xcc\x22\x93\xe7\x48\xf4\xc7\xc3\x14\x0a\x59\x25\x3a\x1a\x6c\x35\xb9\xe7\x38\xb6\x83\x7f\xb4\xed\xc4\xfb\x9d\x0d\x30\x8c\xc5\xbd\x26\x44\xf5\x8a\x74\x10\x9c\xac\xf0\xbc\xad\xf7\xd8\x6a\xe0\x10\x87\xf2\xfb\x74\xf7\x3b\x99\x5a\xa6\x8f\xe3\xbf\x33\xce\x47\xe9\xf6\x02\x03\x35\x73\x64\xd3\xde\xb2\x38\x30\xa5\xb1\x0d\x47\x48\xdb\x9e\xc4\x52\x89\x39\x71\xb5\x93\xc8\x70\x60\x93\x1f\x0e\x92\x35\xe6\x21\x75\xb4\x10\xa6\xc4\x07\xa2\xd8\x2e\x61\x91\x8b\x01\x36\x50\xe5\x40\x85\xec\x30\xb9\x68\x38\x6c\xe2\x56\x26\x04\xb9\x4f\xc4\x15\x49\x93\x83\x6e\x48\x12\x21\x7d\x24\x4e\x3c\x10\x18\xd0\x38\xdf\x1f\xdb\x69\x77\x1d\xf9\xb0\xff\x27\xee\xda\x2d\xc6\x7d\xbd\xf9\x6e\x3f\x1e\x9f\x4b\x7d\x42\xc7\x96\xcf\x98\x84\xd3\xf3\xf6\xf5\x85\xd9\x74\xa1\x57\xc4\x6b\x36\xba\x27\xec\xe0\xfd\x5d\x12\xf1\x9a\x9a\x26\xb6\x5e\xf3\x7f\x25\x17\x9a\x1d\xac\x84\x3c\x07\x79\xad\x2a\x17\xac\xf3\x99\x35\x35\xf5\xa3\x1e\x74\x1c\x3c\xbd\x81\x53\x03\xa6\x04\xd6\xec\x25\xea\xb6\x6f\xac\x6f\xce\x40\xdb\xb0\xd4\x69\xce\xd6\xb2\xc8\x61\xda\xc4\xd4\xb6\xf9\xe8\x43\x74\xea\xa6\x71\x16\xc4\xdf\x3e\xf5\xe3\xed\xfb\x1b\x0f\xed\xd5\xc4\x71\x93\xee\x61\xc6\xa3\x3b\x9d\xf6\x25\x79\x73\x4a\xa5\xce\x18\xa5\x67\x09\x59\x32\x9f\x09\x18\x85\x8f\xef\x9f\xdf\xaf\x77\x1e\x90\x77\x45\x26\x1a\x2c\xeb\x39\x7a\x28\x59\x6f\xe8\xc7\x33\xe6\x97\xf5\x0f\xec\x47\x6d\x16\xae\xb4\x5a\x86\x4d\x29\x59\xcc\x55\x3f\x54\xaa\x64\xf1\xd4\xfa\x73\x2f\x46\x9c\x9f\xd3\x63\xe6\x73\x2d\x3f\xe3\xfb\xfb\xe7\x07\x69\x42\xb7\x1e\x05\x03\x5e\x78\xa3\x4c\x7c\x82\x80\x94\x2d\x41\x10\x8d\xa3\xb6\xc9\x0f\x8e\x51\x95\x54\xf8\xe1\xac\x49\xc5\x05\x52\xb9\x14\x1e\xec\x52\x0b\x3b\xe2\x14\x92\xd6\xf2\xe4\x4c\x45\x64\xde\x3b\xc5\x38\x74\x2c\x23\x77\xa7\xfb\xbf\xfb\x17\x78\xe1\xd6\xb0\x72\x02\x9f\xd9\xb9\x33\x13\x71\xab\x7d\x26\xd1\x29\x43\x2d\xae\xb4\x76\xbf\x44\xff\x0f\x87\xf4\x36\xf5\xac\x30\x39\x77\x52\x4f\x1e\x24\xf3\xa7\x76\x72\xc7\xb9\x67\xa5\x91\x48\xa3\x2d\xaa\xce\x44\x30\x07\x3c\xd1\x02\xda\xe9\xe1\x18\x40\xaf\xa4\xc9\x65\x56\x37\x3d\x24\x64\x17\xc4\x88\xd5\x35\x14\xc9\x30\x52\xed\x50\xbd\x8a\x6f\xfe\x39\xff\x76\x3a\x9d\x6c\x25\xa6\x23\xe3\x53\x7b\x94\xdc\xb3\x0f\x2b\x48\x18\x27\x21\xa1\x7e\x89\x27\x51\x81\xb7\x26\x8f\xa4\xbb\xfd\x75\x3c\x42\x25\xf8\x32\x4d\xe6\xae\xa9\x9d\x93\x77\x6d\x71\x06\xd5\xa5\x04\xde\x35\xe6\xb3\xd0\x44\xe5\x8e\x43\x0a\x6a\x0e\xe4\xe8\xf7\x81\x2e\xe5\xa6\x9b\x8e\x02\x0f\xc5\xfb\x2c\x3e\xfe\x24\xc2\xe0\x4b\x83\xee\x26\x54\x4c\x38\x37\x09\x4a\x83\xbe\x96\x97\x4e\x4a\x69\x5f\xfd\x53\xb7\xf0\xef\xf6\x8e\xa0\x2a\x95\x39\xd2\xb7\xbe\xad\x37\x3c\xd5\x5f\x9e\x0f\x9c\xfc\xcf\x8f\xbd\x8b\xa0\x7e\x3a\x9c\x8a\xe9\x64\x7f\x9e\x29\x92\x85\x3e\x61\x3d\xec\xaa\xd6\xd7\x51\x29\x2b\x07\xc9\x2a\x51\x47\x72\x6b\xb0\x2c\xde\x17\x8d\x59\x56\xb7\xd4\xe6\x0d\x7a\xa4\x81\x58\xf7\xe5\xca\x4a\xb1\x45\xcd\x4c\x72\x6e\x25\x25\xff\x3e\xce\xcf\x0f\x3c\x60\xee\xe5\x5d\xdc\x24\x4e\x5e\x4e\x28\x93\x03\x81\x66\xf6\xb5\xdb\xe9\x42\xb6\xca\x67\xf3\x69\x78\x96\x3b\x14\x42\x95\x81\xeb\x53\x94\xf0\x7e\xa2\xa3\x7a\xb1\x26\xf1\x2f\xce\x85\xbb\x77\x03\xcc\x1d\x4a\x03\xcc\xc7\x88\xa8\x87\x92\xa0\x84\x4f\x4b\xb3\x70\x8e\x6d\xf2\xcc\x65\xa5\x69\x12\xcc\x8b\x3c\xdb\x40\x7a\x85\x11\x68\x0d\x8e\x76\x5a\x3a\x66\xf8\xc1\xa3\x03\xc9\x73\x4b\xab\x60\x4c\x60\xea\x0f\x36\x74\x98\xa3\xcb\xa8\x87\x80\x09\xd8\x9f\xb5\x3d\x75\x3d\x23\xb2\xb4\xbc\x14\x82\x4e\xac\xa3\xbe\x89\x25\xf8\xbe\x8f\x9f\x8b\xbb\x78\x5f\x24\x09\xf1\xa2\xb7\xe0\x4e\x76\xf8\xd9\x06\xe3\x89\xbe\x13\xed\xbf\x0f\x5c\xcf\x04\x9f\xf9\x14\xb3\xa4\x0e\xdd\x11\xc2\xcd\x3c\x1f\x8e\xef\x49\x84\xa6\xe8\x0e\x62\xc7\x6d\xd6\x94\x3c\x33\xa9\x0e\xac\x23\xaa\x02\xfa\x2e\xcf\x89\x41\xed\xe0\x23\xee\x78\xac\xaf\x24\x97\x30\xe9\x1f\x44\x61\x66\x2a\x9d\xdc\xd2\x76\x8f\x23\x46\x12\x1d\x79\x58\xbb\x95\x75\xac\xe5\xf8\xdb\x83\x95\xe4\x8a\x64\xcf\x0d\x8e\x7b\x12\x25\x5c\xa8\x1b\x3b\x7b\x5d\xf1\x53\xa9\xd7\x9b\x42\x43\x3e\xb9\xa4\x91\x54\xc2\x19\x15\xb4\x5c\x26\x89\x8f\xe1\x93\xbf\x36\x93\x81\x11\x69\xf1\xc6\x97\x4e\x7e\xd3\x04\x5f\x38\xdd\x93\xaa\xdc\x60\x1e\x15\xe4\x60\xd7\x69\x76\x5d\xad\x12\x0d\x39\xa4\xed\xcf\xe5\x94\xf4\xcb\xfb\x63\x43\xd6\xd1\x44\x45\xe4\x6d\xbd\x80\xc1\x65\xb2\x10\x4e\xaf\x48\xfa\xa2\x93\xc7\xa9\x91\x29\x8d\x17\xf8\x7e\x97\xa3\xf7\x7f\x57\xe8\x21\x9e\x0a\xb5\x61\xd4\x45\x78\x37\x0b\xef\xdf\x33\x0c\x4c\xee\xef\xa7\xc0\x40\xd2\x01\xe5\x00\xf4\x62\xa6\x29\xce\xf8\x02\xa6\xc1\x8a\xb1\xa3\x0a\x69\xc7\xe3\x37\x11\xc3\xaf\x69\x6c\xe6\x0b\x12\x51\x57\x0f\xaf\xd3\xad\x04\x21\x9c\x85\x82\x44\x71\xa6\x5b\x1e\x78\xfa\x8c\xe2\x31\xba\x39\x11\x30\x40\x9d\x3f\x9f\xdb\xf3\xf9\xcf\xd5\xd1\xf3\xee\xdf\xfe\x60\x1d\xcf\x47\x24\xbb\xf6\x88\xec\xe3\x76\xfc\xea\x26\x94\x56\x0b\xd3\x96\xc7\x00\xf4\x89\x6f\x39\xc3\x1d\xf8\x20\x1e\xfc\xe3\x08\xba\x02\x3b\x13\xbc\xfb\xf7\x9f\xf7\xb9\x3d\x5f\x07\x1e\x79\xb6\xf0\x78\xf6\x45\x65\x11\x73\xe9\x98\x94\x66\xcf\xb5\x0e\xbc\x8a\x66\x3e\xd0\x4c\xa0\x15\x73\x9d\xe8\x40\x88\x18\xce\x3c\x6a\x2a\x9f\xd9\xc7\x90\x32\xff\xbc\xb1\x47\xdb\xb7\xc1\xcc\x2a\xce\xe2\x60\xb6\x8b\x2a\x21\x4d\x3a\xc8\x43\x98\x0a\xd3\xcf\x52\x33\x20\xc1\x94\x08\xba\xb1\x76\xac\xc9\xea\x23\x20\x32\xfe\x2f\x0c\x3e\x2f\x0e\xce\x40\x1e\x56\x95\x81\x2c\x98\xb9\x07\xc6\x97\xde\xbc\xd9\xb0\x1d\xcc\x3d\x49\xb8\x0a\xe6\xba\xc5\xe7\x13\x28\x61\x33\xb6\xea\x40\xe8\x8a\xd6\xfd\xf9\x7a\x0f\x75\xb7\xa2\x81\x82\xdd\x9d\xdf\xbf\xfe\xed\x0b\x08\xc8\x7c\xe5\x44\x0f\xef\x04\xdc\x7f\x3c\xff\x05\x4c\x23\x2a\xc8\x82\xfe\x73\x1a\x19\xf2\x02\x30\x53\x31\xbb\x13\xe5\x40\x94\xac\x13\xca\xbb\x54\x0a\xf6\xd0\x7c\x0f\x25\x45\x69\x95\x98\x28\xc7\xda\xd8\x43\xc9\x3d\xa9\x26\xe1\xcc\xe8\x77\x16\x9a\x28\xc4\xb5\xb8\xdf\xc4\x17\x05\xe7\xf7\xfd\x7c\xe3\x50\xdc\xe9\x0c\x2c\xcc\x7e\x62\x3b\xab\x8f\x47\x29\xf4\xdb\x69\xe5\x92\x38\xda\x4b\x13\x33\x86\xc7\x58\x5a\xc0\x1b\x8a\x51\xf6\x04\x3f\x11\x5d\x2d\xb1\xcb\xee\x41\xac\xe4\x68\x79\x88\x51\xc2\x8e\x03\xe7\x40\x98\xfe\x67\x64\xc5\xfe\x63\x08\xc4\x76\x8a\xef\xf7\x3b\xda\x31\x6f\xf7\xf6\x13\x64\x36\xfb\x22\x70\xbb\x3b\x11\xa1\xcc\xea\x9e\x6d\x3d\xfc\xa3\xf6\xc7\x7d\x42\xca\x6e\x7b\xa8\x97\xda\x14\x54\x81\xf2\x91\x5d\xa0\xb4\x90\xd8\xbf\x8a\xd5\x33\xe8\xe2\x74\x4d\x03\xc9\x1d\x9a\x01\x8c\xfa\x70\x31\x7c\x12\x3c\xb2\x31\x1c\xe7\x88\xe7\x1e\x33\x85\xb9\x0d\xf3\x73\xff\x00\xdf\x13\x06\xc3\xd3\x43\x9d\x72\x99\xaf\x52\xd0\x93\xbd\xc6\xe5\xc5\xc1\xde\xb5\x1e\x58\xf0\x98\x3a\x25\x38\xc7\xbd\xbd\x7a\xfb\x5b\xc5\x18\x82\x4e\xd5\xfc\x93\xbf\x80\x12\xf7\x12\x55\xc7\x69\xe2\x5e\xc8\x0b\x2a\xb9\x7d\xb2\xe4\x46\x38\xf7\x90\x2f\xaa\x66\xb4\x91\xd5\xe3\x91\x68\xfe\x67\xce\x9a\xe3\xf2\xf8\x37\x74\xcb\x17\x53\xac\x3e\xc7\x54\xc3\x66\xbd\x39\x14\xc9\xd8\xdb\xd7\xfa\x38\x57\x04\xf2\x68\x40\x0a\xc6\x91\xe3\x18\xdf\x02\x27\x74\x7f\x7c\x38\x8d\x4c\x05\xf2\x0d\xc6\x56\xef\x3f\x6f\xcf\x81\xee\xce\xea\x1e\x10\x9b\xef\x5e\x4d\xcb\x40\xb6\x4d\x9c\x55\x9a\x1c\x1f\x46\x9b\x5f\x26\x46\x81\xce\xff\x22\xad\xa0\xa9\x34\xb3\x33\x24\x72\x12\x42\xbf\x10\x19\xb4\x95\x26\x05\xb3\x48\x1b\x7b\xa9\x5c\x9b\x43\x03\x98\x55\x61\xaf\x9e\x8f\xe8\xa4\x2e\xf0\xfe\x6d\xfc\x6a\x6a\x85\x86\x19\x5e\x0e\xd5\xe8\x67\x4a\xad\x7a\x4b\xd0\x6c\x76\x33\x75\x8c\x6f\x4c\x90\xdd\x2b\xfa\x1b\x76\x6f\x5a\x8e\x88\x0e\x52\x6c\x05\xb9\x98\x70\x06\xaa\x57\xc4\x00\x21\x7b\x3a\x4a\x01\x60\xec\xf0\x5c\x88\x99\xee\x0b\x64\x02\x23\x45\x35\x65\x73\x38\x75\x1b\xb9\x18\x6e\x1e\xe2\xc1\x46\x1d\xd7\xa1\x51\xab\x30\x4b\x29\x5f\x26\x38\xad\xeb\x26\x8f\x64\x44\x6c\x35\x89\x2c\x27\xac\x24\x44\x08\x4f\x70\x15\x93\xc7\xa7\x62\xcb\xa9\x7e\x33\x3c\xe6\x95\x8f\xe6\x28\x81\x3a\x15\x62\xd6\xd3\x10\xca\x9c\x77\x8c\x73\x26\x0c\xe6\x64\xee\xba\x43\x78\x2a\x40\xde\x35\x36\xf8\xf7\x39\xad\x1f\x55\x81\x83\x54\x2f\xa2\x20\x84\x21\x9e\x91\x62\x66\x3d\x9c\x4e\x9a\x14\x99\xbb\x91\x63\x29\x9d\x2d\x6d\x64\xa4\x83\x6e\x66\x61\xdf\xf2\xce\xde\x1c\x9f\x95\x4b\x9c\x84\xd7\xe4\x84\x1b\x6b\x31\xd8\x9c\x4e\x9f\xe2\xa5\x1c\x2e\xca\xb6\x0d\xfb\x69\xfb\xc0\x91\xdf\x08\xcf\x53\xd6\x70\x5b\x9c\x21\x08\xba\xe2\x6a\x87\xbc\x75\xdf\x04\x4d\x14\x15\x01\xf3\x0f\x98\xab\xed\xd1\xb0\xb6\x14\xfe\x93\x53\xcb\x53\x19\xd0\xeb\x89\x47\xae\x52\xb3\xbc\x12\x32\x0e\xd9\xeb\xd9\xde\xda\x16\x1e\x6b\xb1\xc0\xda\xa5\xaf\x9a\x72\xf8\x1b\xeb\x0b\xa9\x28\x16\xe8\x30\x0b\x38\xbc\xed\x9b\xd5\x76\x99\xab\x7a\x89\x1a\xa6\x50\x3b\x15\x29\xfa\xc9\x7c\x5c\xfe\x0c\xb5\xd2\xef\xa7\xf1\xdf\xad\x7a\xcd\x26\xa8\xc8\x9e\xce\x86\xf3\xe8\xa3\xdc\xc8\xfc\x1f\xb7\xeb\xed\xe6\x4b\x10\x6f\x8f\x1d\xef\xe1\x85\x2e\x7f\x57\x0b\xfc\x18\xfb\x5d\xd2\xca\x91\x9a\x97\x21\x84\x8d\xd3\xaa\x86\x32\x75\xee\x26\x34\xba\xbd\x61\xb3\x1e\xa0\x3e\x12\x49\xb6\x0f\x4e\x56\x26\xfe\x4a\x3b\x47\xc2\x23\xdf\xdf\x3f\xaf\x3f\x1c\x10\xd8\xf9\xeb\x1e\xb7\xeb\x7d\x46\x5e\x6e\xea\x6e\xd9\x55\x85\xb3\x95\xb2\x7c\x83\x46\xa7\x8a\x1c\xe3\x2e\x11\x0e\xa0\xbb\x05\x38\x05\x05\x6b\x6e\x4a\x81\xff\x2a\x64\xdc\x5c\x37\xb3\x59\x24\x2d\x13\x53\xb6\x5e\xe2\x81\x43\xba\xe6\x41\x94\x66\x62\x5e\x9f\x2f\xf7\x88\x9a\xd0\x9c\x86\x2c\xd0\x90\xf9\x50\x44\x1b\xed\x83\xff\x07\xe5\xa0\x29\x2f\xa9\xd9\x40\x3c\x02\xa3\x4f\x7e\xcf\x21\x8a\x8d\xe7\xb2\xd7\x2a\x38\xea\x4e\x51\x6c\xc1\xe3\x95\xa4\x3f\x8f\xf5\xeb\xe6\xb7\x89\x19\x8f\x28\xb8\xbe\xa2\x88\xb3\x68\xda\x1d\xfc\xb5\x91\xfb\x62\x29\x3c\xf1\x07\x72\xbd\xa8\x64\x2a\x3c\x40\x21\xcc\xae\x43\x2b\x6e\xc2\xb0\x6e\x3d\x0e\x5c\xd8\x95\x8b\x06\x8d\xa0\x02\x76\xd1\x7a\x7e\x1d\xbe\x7e\x91\xc9\xf8\xfc\x9e\x66\xf0\xa3\x7d\x7e\xfd\xeb\xef\xa8\x84\x8e\xd3\x35\x92\x27\x6d\x1b\xf6\xeb\x82\xaa\xf4\xe9\xdf\xe5\xf6\x39\x56\xa5\xc9\x57\x44\x2e\x7e\xcc\x99\xa1\xb3\xd3\x36\x11\x4a\x26\xc4\x61\x67\xae\x03\x5b\x81\x6b\x5d\x61\x97\xbb\x32\x18\x0a\x25\x54\xe5\x7b\x0a\x05\xf7\x92\x6f\x65\x24\xba\x1a\x75\x56\x8b\x81\x36\x3d\x20\x22\x1e\xa3\x53\x99\x63\x18\x73\x4f\x2d\xe0\x28\x03\x5c\x62\xab\x86\xe0\xc2\x81\x4d\x44\xe4\x92\x4a\xf7\xd9\xfb\xed\x1f\x92\xb1\xde\xc3\x3e\x90\x24\x29\x4f\xa2\xb2\xf6\x7c\x20\x67\x82\xf8\xe0\xbe\x24\xf7\x61\xea\x34\x66\xd7\x87\xa2\x8b\x60\x76\x5d\x81\xfe\x58\x1c\x02\x2d\x62\xd9\x3b\x7b\x8d\xc8\xa2\xc8\xf8\xca\xdf\x8f\xc3\xaf\x9c\x6f\xa2\x25\x3a\xb3\xd9\xbd\xc5\x6b\x61\x49\x6a\xe0\x14\xee\xf1\xa1\xa9\xac\xd3\xff\xcd\xd4\x95\x6c\xa9\xae\x04\xc7\xbd\xff\xc2\x2c\xbd\x6a\x66\x58\xf9\x4b\xbc\xd0\x50\x12\x42\x13\x68\x44\x9c\xe3\x7f\x77\x46\x44\x16\xd7\xef\xbe\x3e\x57\x07\xba\x6f\x83\xa8\xca\xca\x8c\x8c\x8c\xb0\x7d\xeb\x17\x11\x16\x88\x23\x12\x69\xff\xf3\xf5\xb7\x85\xcc\x6e\x22\x38\x87\x8d\xd2\xfd\x47\xf5\x78\x06\xcb\x06\x79\xe9\x85\x61\x50\x51\x11\x90\xeb\xab\x0a\xb1\xc5\xc7\x9b\x14\x6c\x29\x33\x6e\xdb\x49\x3a\x38\x49\x5c\x62\xb4\xfb\xf8\x7d\xdc\xef\x98\x5e\x57\x2b\x4e\xd0\x3c\x8b\xfa\x00\xb2\x22\x35\x86\xb0\xd3\xc9\xc5\x64\x73\x56\x88\xc6\x1a\x2c\xdd\xe1\xd4\x3a\x04\x30\x99\x4d\xc8\x00\x8b\x1d\x62\x97\xa5\xfb\x4f\x3e\x5f\xbb\xfb\x46\x6a\xb9\x74\x74\x53\xcd\x25\xe3\x18\xdc\x46\x65\xeb\x25\x3f\xb8\x59\xbc\x96\x45\xc7\xe7\x70\xaa\x6a\xbc\xcd\xbf\xcb\x31\xdf\x9e\x1c\x87\xa4\x06\x0b\x85\x7a\xbe\x53\x5b\x64\x6f\x45\x86\x97\xcc\x0d\x27\x28\x40\x72\xfd\x6c\x6d\xbf\xde\x76\xde\xd0\x65\x72\x63\x9f\xeb\x89\x86\x5b\x2e\xbd\x48\xfa\xb0\xb7\x73\xb9\xd2\xba\xc4\x95\xa8\xfa\x62\xf2\x64\x01\xd0\x64\xc8\x75\x22\x91\xed\xee\xba\x61\x40\xdf\x33\x57\x87\xe8\x5c\x6f\x27\xf1\x59\x99\xca\x15\x06\xe6\x27\x1b\xb5\x90\x5a\xe2\xcf\x3f\x31\x57\xa4\x0d\x1f\x19\x59\xb4\x5f\xfa\x8d\x63\x0b\xa5\xea\xed\x54\xcf\x23\x14\xe9\x69\xb4\x85\x86\x35\x3a\xd2\xbd\xe7\x2a\x28\x46\x2c\x80\x45\x1c\xa1\xf4\x16\x98\x5a\xbd\x4c\xc3\x12\x0d\x31\x22\xa9\x60\x64\x68\x5e\xe2\xa0\x27\x76\xf6\x55\xaa\x9f\x68\xf1\xe2\xfa\xa3\x18\xb1\x12\x8d\xa2\x71\xaf\x3a\xc0\x8b\x95\x5a\x41\x57\x3b\x27\x37\xc4\x58\x94\x17\x3b\x21\xfd\x3b\xb9\x60\xee\xe4\x91\xb8\xa3\xaa\x4c\x54\x9c\x28\x96\xfc\xa9\x0a\x05\x5d\x34\xa7\xb4\x96\x89\x73\x4d\x64\x06\xcd\xf8\x60\xeb\xdf\x9b\x99\x65\xf3\x9b\xf3\x8e\x2a\x2a\x1a\x81\xd4\xb0\xa1\xc5\x8c\xde\x69\xd3\x10\xa6\x90\x8f\x2b\xfc\x31\x24\xd5\xd4\x53\x04\xde\xe7\x41\x74\x2a\x42\xf2\xf4\x23\xb4\x37\xb3\xe4\x4f\x95\xdd\xec\xb2\x01\xc0\x2b\xb6\x88\x24\x3a\xb6\x6f\xe7\xb6\xc8\x28\xa4\x86\x7e\x99\x74\xcd\xaf\xae\x68\xa9\x14\x02\x0e\x12\x3f\xef\xc1\x03\xd3\x7d\xb4\xf0\x48\x6a\x7e\xf4\x8c\x4a\xb2\x66\x3c\x90\x9e\x95\x58\xa9\xc1\x5e\x9b\xa5\x4a\x73\xc3\xae\x58\xe2\x81\x39\xeb\x91\xc1\x30\x3f\x80\x1b\x2f\xcb\xaf\xf7\xa1\x7e\xe1\xe7\xf2\x4b\x7f\x7b\x11\xb7\xd9\x40\x87\x91\x77\xa0\x95\xaa\x59\x4c\x18\xdc\x89\x31\xc3\x40\xd7\xe8\x4f\xe2\x8b\x97\xa3\x6c\xb9\xdb\x6f\x78\xb1\xd1\xd7\x6e\x2e\x8b\xd6\x61\x3c\x91\x7b\xaf\x8f\xa2\xb4\x68\x4d\x44\x51\x33\xbd\xae\x74\xea\x9e\x2c\xaa\x88\x68\x44\x6d\x51\xdb\x33\xa1\xa1\x41\x8d\xfd\x46\x4a\x33\x86\xd5\x6e\x25\x71\x48\xd8\xf6\x08\x76\xad\x7b\x32\x39\xea\xee\x67\x02\x09\x66\xd5\x43\x51\x05\x74\xdd\x2e\xf6\xbf\x15\x5d\x62\xaf\xc3\x52\x72\x2d\x3f\x1c\x03\x70\x3d\xe7\xa5\x67\x85\x20\x83\x88\x5d\xbd\x60\x3a\x9a\xb9\x58\x4f\x40\x7b\xa9\x06\x69\x6e\xda\xc7\xfa\x3c\xb2\x79\x0e\x79\x06\x99\x72\xa1\x52\xa5\x50\x43\x03\x11\x73\xc2\x5c\xfb\x38\xdf\x7f\x90\xeb\x8d\x15\xf0\xfe\xf6\x52\x2a\x75\xf8\x30\x70\xe7\xe3\x37\x94\x47\xa5\x0a\xc0\x37\x79\x7f\x46\x04\xf7\xcf\xa0\xb1\x84\x7f\xa2\x50\xb4\xa2\x14\x09\x51\x29\xf8\x04\xe5\x22\x39\x41\x93\x77\x8e\xc7\x5c\x04\xc8\xb5\xd9\x8e\x57\xf2\xfa\x4e\xa7\xe8\x87\x72\xbc\xdc\xc2\xd3\x62\xf9\x4e\xc9\x89\x1a\x88\x9d\x3a\xf3\x5d\xe5\xa0\xa8\x3d\x92\x4c\xb1\x8f\x38\x08\x4f\x73\x9d\x62\x25\x57\xaf\x7a\x6a\x3f\x62\xdd\x5a\x81\xec\x66\x6a\xe1\x23\x8d\x00\x78\x96\x6b\xe5\xdf\x96\xf3\x91\x1a\x36\xfd\xbd\x3e\x3c\xd9\xf9\x81\x7c\xbf\x86\xbc\xed\x8a\xbe\x01\xa2\xd8\xf2\xb9\x79\xa0\xe6\xf6\x2e\x2a\x45\xb8\x31\x75\x1c\x0e\xd8\x9f\xae\x87\xf3\xcd\xf9\x8a\x64\xd9\x08\xd8\xdc\x3b\xbe\xf9\x77\xd7\x30\x36\xc9\x9b\x68\xe1\x8e\x02\xce\xa0\xbc\xa1\xb8\x24\x0f\x0f\x8e\x15\xc5\x51\x1f\x97\xfc\x4f\x13\xe8\x60\x05\x35\x4f\x9a\xb4\xe7\xd2\x8d\xac\x76\x4b\x7a\x92\x54\x59\x43\xd9\xbb\x6e\x26\x62\x96\xe6\x70\xe0\x7c\xe8\xa4\x6f\xb9\xbd\x30\x90\x5c\x2f\x97\xa1\x6c\xde\x24\xbb\x9f\xd9\x56\xbe\x9e\x8f\x6a\x1e\xdf\xf2\xea\x91\xe1\x5d\x5c\xee\x9a\x0c\xda\x91\x44\x79\xbd\x46\xcf\x57\xce\xf2\xd1\xee\xca\x1f\xa8\x7d\x8a\xc8\x0b\x16\x5b\x7a\x3e\xdb\x59\xfc\xe0\x71\x20\x22\xb2\xc8\xef\xf8\x9e\xf8\xf6\x60\x66\x2e\xea\x14\x6d\x99\xf7\x7e\x91\x46\x29\xda\x25\xec\x8f\x13\x45\xc0\x2c\xe1\x70\xc5\x2a\xb2\xd2\x84\x7c\x5b\x78\x9b\x34\xfa\x9a\xd9\x56\x94\xc9\xa6\xd5\x4d\x02\x6d\x5b\xaf\x84\xe0\x41\xe3\x4e\x16\xcc\x88\x98\xf8\xe2\xd2\xfb\xab\xfd\xe0\x53\x23\x30\x11\x71\x28\xb3\x75\xb9\xd4\x77\x77\x39\x1e\xd8\x77\xee\x81\x7a\xf0\x68\xfe\x49\x69\x20\x92\x9c\x1c\xef\xb1\x07\xe7\x4e\x0c\x9d\x25\x34\xfd\x3f\x41\x50\x7c\xc8\x6a\xa5\xcc\xc1\x8d\xe5\xc0\x54\xb3\x33\x81\x39\x6a\x9a\x08\xdb\xa1\x6f\x11\x4b\x4b\xbb\x23\x6a\x60\x11\xd3\xc0\x4b\xbd\xac\xdd\xe0\x64\xd0\x59\x5c\xab\x3c\x3c\x5e\x9b\xc8\xf6\xcb\x36\xca\x80\xa3\x7b\xbe\x33\x16\x2d\x9d\xd5\x57\x3e\x86\x12\x7d\xa0\x49\x2e\xee\x57\x7e\x9b\x25\x7a\x9d\x83\xbe\x78\x54\xd2\x6c\xe9\x1c\x0b\x28\xe0\xbc\x62\xfe\x54\x51\x6a\xce\x1b\xe8\xe0\x0c\xb8\x77\xea\x06\x7e\xc8\x4e\xc3\x51\x3e\x56\x8e\x4b\x09\x1b\x81\xfa\x16\xc8\xb3\x7b\x7c\xbf\xe1\x7e\xb8\x23\xee\xd2\x31\x4e\x5d\x5f\xcb\x39\x1f\xbc\x51\x18\x1a\xd3\x2b\x81\x2d\x2b\xb4\x1d\x70\x79\x7e\x15\xac\xa7\x42\xce\x6e\x49\xb0\xcf\xb5\x69\xbd\xed\xc3\xe9\x8a\x59\xf0\x4e\x28\x74\xf6\x58\x3d\xf6\x95\x65\x04\x38\x70\xb8\x61\x35\xda\xab\x3c\x27\x08\x0a\xab\x61\x3c\x5b\x5c\xdd\x4b\x07\xc9\x93\x6f\x3b\xda\x85\xb4\x23\x06\xa3\x37\xbb\x93\xb0\xba\x0f\xec\xac\x7f\x7f\x13\xcd\xee\x99\xb2\x49\x21\x32\x19\x9e\xb2\x8a\x82\x9c\x27\xf3\xcf\x2e\xdf\x44\x1c\xd8\x4b\x5f\x96\x29\xc7\x85\x64\xf9\xfd\xf5\x6f\x8f\xaf\x1d\x87\x12\xb4\x79\x0e\x17\x26\xa8\xb6\xe4\x1b\x17\xa9\xe8\xa4\x9c\xb6\x55\x93\x62\xd2\x3f\xce\xde\x6f\xe4\x60\x72\x05\xef\x89\xba\xcf\x0c\xe4\x53\xf2\xb5\xff\xf9\x24\x88\xe6\x2a\x0a\x08\xff\x6b\xec\x1b\x24\x9a\x56\x0f\xa2\xb5\xd4\x3b\xa1\x6f\xf0\x31\xc4\xe3\x7b\x79\xb3\x0c\x3a\xaa\xaf\x61\xa1\x4a\xad\x97\xd1\xf2\x34\xc5\xeb\xaa\x7d\xb0\x91\xc2\xae\x11\xcb\xc7\xf1\x35\x7d\x4e\x92\x93\x07\x26\x27\xec\x78\x18\x75\xfa\xbe\xa2\xa4\xf2\xab\x3b\x3f\x97\x55\x3b\xc6\xe1\x53\xfa\xfc\x7b\x8c\xf6\x3d\x87\xb6\x3c\xd3\xdb\x49\x9d\xd9\xe9\xf1\xab\x4f\xd4\x87\xe6\x26\x1d\xa2\x09\xf5\xfe\xe0\x7a\x3b\xba\xbf\xa7\xab\x43\xca\x04\x17\xe1\xb4\x9a\xf0\xf4\x85\xaa\xe2\x20\x82\x13\xe8\x41\x8e\x23\xa5\x0a\x53\xc9\x37\xd1\xfe\x84\xa4\x81\x4e\xba\xcb\xdc\xe4\x0b\xfe\xa9\xdb\x71\xbb\xbc\x00\xd3\xdf\xf4\x1f\x6e\xc9\xd4\x2c\x5c\x80\x18\x3b\x3f\xab\x51\x3c\xd8\x2b\x23\xc0\x03\xf1\xb0\x91\xdf\x26\xd7\x3b\x05\xbd\x21\x38\xda\x8e\x4b\x05\x1f\x5a\xb6\x78\x27\xfb\x97\xdf\xf5\x39\x19\x65\x3b\x22\xc1\xee\x94\x57\x61\x89\x91\xe6\x69\x99\x1e\x4a\x21\x21\x58\x0f\xfd\x68\xb5\xc8\xc9\xd4\x3e\xfe\x8d\xe1\x46\xc4\x74\xef\x4e\x79\x5d\x30\xcc\xb9\xcf\xd6\x22\x42\xaa\xcd\x8d\x41\xb8\x44\xf5\x5f\x52\xfe\x31\x51\x2c\x93\xbb\xcb\x0a\x8e\xb5\x36\x6c\xdf\x35\xce\xa0\xea\xa7\xc8\xbd\x72\x5f\xc7\x36\x29\xaa\x1f\xcb\x4a\xcd\x68\xe8\x3f\x15\x4e\x4e\x74\x75\xb7\xf7\xfb\x71\x3f\x70\x7e\x28\x6b\x8b\x5c\xea\xf0\x83\x1d\x49\x83\x23\xc7\xca\xc1\x5e\x53\x9a\x3f\xb0\x32\xa6\x79\x98\x94\x11\x45\xc1\x7d\xad\x09\xbb\xa5\x05\x3e\xdf\xb9\x0c\xcf\x85\x12\x69\x29\x0f\x9a\xfb\x7e\x3f\x81\x86\x88\x47\x32\xf2\x52\xd0\xfe\xf2\x49\x57\x3b\x91\x1f\xc2\x98\x1a\xc1\x1c\x56\x52\xba\xdf\x6a\x02\x29\x2b\x4a\x36\xf4\x98\x3a\x96\x08\x3f\xb6\x37\x27\x8d\xfa\x21\x49\x45\x46\x4a\x7b\xd7\x6c\x4d\x71\x1a\x69\xf4\x1b\x96\x64\x3e\x4b\xdf\xcb\x7a\xd4\x87\xea\xb1\xe9\x2f\x87\x32\x7d\xef\x9c\xda\xc8\xa0\xf6\xaa\x0f\x6f\xbc\xb0\xf0\x5a\x9e\x29\x5b\x9f\x5d\xba\x48\x00\xd7\x3b\xba\xed\xdc\xf9\xda\x5f\xab\x4c\x3a\xfc\x07\x79\x54\xa6\x96\xc9\x6d\xf2\x64\x4b\xc1\x75\x56\x07\x6a\x74\x3c\xd5\xf6\x50\xa7\x93\x2d\x85\x56\xa6\x46\x42\xed\x84\x97\x7a\xc5\xb6\xd8\x27\x76\xe6\x54\xdc\xd6\xb1\x44\xdf\xc4\xb0\xfc\x36\xdf\x62\xe0\x28\x6a\xdf\x2c\x5c\x27\x56\x1e\x3a\x49\xf4\xbf\xa2\x3f\x1d\xea\xb9\x52\xbd\x4a\xcc\x2b\xea\x4c\xfd\x3c\xde\x9c\x5b\xa7\x34\x89\x84\x1b\x2d\x97\x39\x71\x43\x1e\xeb\xea\x74\x20\xac\x84\xd8\xad\x40\xf1\x88\x5e\x1c\x63\x14\x11\x45\xef\xd8\xfb\xc7\x23\x65\xfa\xc5\xfb\xe5\x70\x02\x7f\x05\xf2\xba\x4a\xbd\x2a\x14\xc0\x6a\x2b\x70\x6c\x89\x17\x56\xe9\xac\x7c\xfa\x15\x29\xc1\x13\x4b\x1e\x1f\x8f\x9a\x7c\xde\x83\x48\xd2\xfe\x57\x27\x12\x23\x6f\x5d\xc3\x08\x91\x43\xf8\x44\x58\xdf\xfc\xab\x8a\x22\x4d\xfb\xbd\x8b\x2e\xc9\x47\x7b\x2f\x26\xb0\x4a\x38\xca\x9e\x54\xaa\x0c\x21\xe3\xa5\x31\x36\x8c\xc8\xab\x32\x8c\x52\x49\xe0\x07\x8b\x5e\x58\xce\x03\x0e\xab\xeb\x21\x3f\x4f\x78\x29\x97\x3f\xfc\x41\xfc\x58\xb3\xf7\xd3\x19\x29\x3e\x4b\x0f\xfd\x49\xfe\x50\xf1\xb3\xe7\x2e\x03\x2b\xea\x32\x7d\x64\x05\x59\x26\x96\xe6\x78\xf2\x53\xcc\xc9\xfb\x4b\x48\x38\x89\x5e\xb6\x80\x8b\xec\xc3\x5c\xf8\x06\xa7\xc4\xc1\xbe\xa8\xa6\x37\x26\xac\x70\x46\xe2\xb5\x52\x26\x0d\x4b\x43\x28\x37\x77\xc7\xc9\x7c\x70\x45\x6f\x28\xfc\xfb\x60\x6c\x98\x33\xe7\xe0\x93\xe1\x87\x28\x90\x4d\xae\x28\x60\xf1\x20\x59\xc5\x31\x19\xa9\x81\xe0\x11\x60\x88\x2e\x51\xe2\x5b\xbf\xef\x73\x4b\x8e\xa0\x65\x41\xee\xd8\x43\x80\x48\x9e\x48\x22\x2a\x63\xd5\xcc\xe1\x56\xbc\x3c\x02\xe8\xb4\xb3\xa2\xf0\x48\x85\x5f\x2b\x74\x40\x4a\xd5\x1c\x4c\xf2\x8d\xc2\xf9\xf3\x54\xb9\x4d\xe4\xaf\x23\x94\x87\x67\xb2\xd0\xdc\x60\xfe\x58\xf8\xc6\x9e\x4e\x97\x2a\x9d\xb8\xb9\x6b\x97\xd4\xac\x35\x19\x83\xe6\x98\x0a\x3a\x8b\x24\xca\x6a\xe2\xa1\xa4\xb2\x4f\x5d\xee\xa6\x59\x5d\xd2\x62\x06\x7b\x69\x2d\xe3\xde\x96\xd8\x2a\x4a\x88\x46\xc3\x23\x74\x7e\x67\xfe\x48\x61\x3d\x6e\xf9\x62\x1b\x38\xb1\xda\x42\xee\x60\x94\x81\x6f\x5f\xae\x8c\x28\xed\xd8\x7d\x38\x1c\xde\xae\xef\xcb\xbb\x61\x17\x01\xec\x66\xde\x7a\xbb\x8f\x9b\xf3\x87\x16\xfb\x4b\x03\xba\x32\x30\x60\x35\x71\xbe\xcb\xcc\x61\x7f\x91\x68\x98\xc5\xc1\x44\xd9\x84\x2c\x8e\x30\xdb\x23\x75\xc1\x34\xe4\x0d\x6b\x16\x7b\x4c\xb8\x02\x70\x31\x7c\xef\xc7\xca\x23\xd2\xf0\xfe\x9b\xff\x61\x7d\xcc\xb6\x51\x79\xb2\xcf\x9d\xd3\x8a\xc6\xb9\x0e\x52\x1b\x5a\x2b\xc9\x9b\x1d\x20\x25\x27\x60\x28\x7c\xb4\xcd\x07\xcd\x4f\x82\xd1\xa6\xc3\x6b\xac\x43\x98\xa2\x8e\x6b\x9c\xb3\x66\x57\xdd\x1b\xd2\x8a\xb8\x56\xca\xd8\xd6\x76\x77\x9f\x46\x6c\x0b\xdb\xec\xb9\xb7\x99\x1e\xb4\x1d\x60\x86\x0c\xfc\x88\xb3\xa7\xed\xf3\xd3\x09\x45\xa1\xa9\xc0\x44\x52\x74\x73\x28\xaf\xf5\x71\xf7\x43\x93\xc5\xe9\x2c\x0a\x7c\xed\x04\x29\xb9\xf3\x4f\x1b\x3b\xea\xa0\x77\x32\xc4\x8a\x93\x82\x05\x5a\xbd\x26\xe5\xab\x54\xf9\x1e\x15\x01\x14\x08\x34\xa3\x7e\x3a\xdc\xce\x1e\x18\xc0\xfb\xdb\xeb\x29\x6a\x22\x0d\x68\xc8\xaa\x5e\x6b\x3a\xef\x72\xa3\xfb\x37\xfb\x3a\x02\xd7\x04\xef\x98\xa6\x4f\xac\x9a\xed\x84\x11\x79\x1e\xe1\x44\x85\xda\x38\xea\xa1\x47\x66\x59\x2a\x33\x63\x34\x60\x3c\x1a\x0c\x89\xc3\x84\x76\x15\x09\xa5\xa8\xe3\x94\x36\xb4\x22\xd4\xdf\x96\xe7\x97\xb7\xf3\x7a\x23\xe7\xd7\xfe\xd2\x59\x78\x9b\xab\xe4\x8b\x73\xd3\xf5\xc6\x4f\xd7\xd3\x33\x5f\x78\x77\xce\xe7\xfd\x98\x29\x49\x3a\xff\xc5\x21\x33\x96\x76\x3c\x3b\x1f\x91\xb9\x8e\xee\xbc\x50\x5f\xe4\x30\xb2\xf3\x42\x35\x37\x49\x34\x81\xd6\xbe\xd5\xcf\xbd\xc2\x92\x0c\x25\x23\x7d\xe9\xbc\x6b\x08\xa7\x88\xd2\x98\x55\x16\x83\x35\xc3\x86\x98\x13\x3d\xa7\xfb\x5c\x27\x7f\x6f\xcb\x54\xa1\x85\xbd\x6c\xee\xa4\x7e\xb0\xbc\x52\x71\xe7\xd1\x6e\xa5\xfb\xcc\x57\xce\x87\x79\x25\x00\x2f\x5c\x3a\xa2\xfb\xc7\x5a\xec\x00\x89\xa8\xbb\xfd\x48\x5c\x98\xa0\x03\x2a\xc4\xd1\xe7\x15\x7f\x70\x51\xce\xcb\xc6\xa1\x93\xcf\xe7\x18\x8a\x49\x20\x57\x54\x7c\x85\x6d\x82\x08\x6f\xf3\x50\xef\x6f\xe4\x5f\xe7\xcd\x67\x26\xc8\x35\x97\x10\x58\xdd\x91\x88\x53\xa9\xe6\x98\xd7\xa1\xb9\xb2\xa1\x0d\xa7\x1a\xcf\x44\xda\xa2\x7b\x71\xb6\x23\x64\x95\x68\x6e\x96\x8f\xa5\x95\x6b\x79\xea\x5c\xcf\x6d\x2f\x24\xd2\xbb\x05\x82\xe1\xd3\xc2\x5d\x35\xba\x3f\x93\x14\x7f\xf8\xef\xc1\x14\x05\x0a\xe6\x7c\xd4\xde\xbc\x4f\x26\x01\x27\x8b\x42\xb0\xa5\x0e\xd9\x2a\xfd\x74\x23\x79\x37\xff\xaa\xac\xbe\x11\xf0\x1a\xf2\x36\x62\xb1\x00\x49\x3b\xde\xb4\x7c\x71\x57\x13\xd8\xed\xe1\x65\xd5\xdd\x2c\x9a\x0e\x51\xee\xc2\x49\x73\xbe\x52\xb1\x20\xb4\x9e\xe5\x2d\xea\xcf\x56\xf2\x7d\x6e\xfb\xa7\xb0\x36\x0a\x14\x38\x05\x61\x70\xc5\x8c\xa5\x4e\x3e\x99\xd0\x29\x3b\x53\x5c\x0a\xa6\x6f\x1c\x90\x59\x8a\xfc\x51\x61\x75\xda\x92\x1a\xa3\x97\xcb\xf2\xfa\x3a\xb0\x9e\xfb\x20\xd2\xca\x09\x6a\x62\x55\xd5\xfb\xfb\x9d\x24\x04\x09\xd5\x16\x37\x85\x16\xc0\x7b\xbf\x9e\xf0\x85\x4b\xa9\x1a\xe1\xea\x7d\x58\x8f\xe1\x34\x9c\xc9\x01\x3e\xb3\xc0\x3e\x1c\xce\x6e\x81\x61\xd5\xec\xaa\xab\xd4\x99\x28\xcc\xac\x32\xaa\x20\x7f\x29\xc2\x8c\x5f\x3f\x65\x09\x01\xf5\xb5\x77\xc6\xcc\x47\xc7\xc2\xe7\xf9\xed\xde\xcc\xbc\x82\x5c\x46\x70\xf9\x58\x3b\x26\x82\x56\xbb\xc9\x22\xfb\x78\xc7\x1f\xfc\xde\xc2\xd6\x2c\x11\x0a\x5b\xfa\x0e\x25\x28\x3d\xe2\xd5\x03\x72\x7e\xea\xc9\x85\xc4\x6b\xea\xb1\xea\x64\x25\x36\x5a\x56\x22\x65\xad\xb1\xad\xaf\x47\xa6\x3a\xb0\x26\xf4\xb6\xcc\xc8\xe1\x5a\xa5\x56\xd4\x4f\xe2\xa5\xd5\xb7\xb2\xf0\xfe\xf5\x59\x5f\x70\x93\x45\x3b\xf1\x95\x57\xe2\x7f\x47\xb1\x4b\x91\x86\x07\xf1\x45\x30\xbe\x22\x44\xbf\x7f\xb1\xc8\x9c\x7a\x1f\xf6\x44\xeb\xf2\xa1\xf3\x53\xfa\xdc\xad\x06\x26\x67\x17\x9f\xb6\xf2\x12\xe6\x3f\xbc\x2c\xbb\xf9\x2e\x85\xa5\x2e\x24\x6c\x90\x62\x14\x94\x24\xa1\xbe\xd4\xa0\xe8\xd0\xb7\x73\xe3\x76\x89\xb9\x65\x8c\x85\xf3\x86\x48\x27\x3e\xb9\x64\x21\xe8\xaf\x8a\xba\xf4\x00\xe4\xa2\x4b\x1a\x4b\xa1\xc4\x83\x4c\x9a\x7b\xc2\xf9\xfe\xa4\x6f\xa2\x85\xbb\x8f\xb6\xbc\xc0\xbf\xe3\xb7\x0f\xa3\x12\x39\xe7\x37\xa6\x69\x2a\x74\x3e\xf9\x44\x94\x1a\x0a\xbb\xdc\x38\xc9\x0a\x2c\xce\xa9\xc7\x79\x11\x9c\x69\xb8\xd6\x22\xf3\x00\xc7\x64\xb5\x05\xcf\x00\x46\xb2\xdb\xf9\xe4\xb2\xcd\xf4\x95\x21\x73\xab\x2b\x2f\x07\xbc\x97\xf3\xf9\x34\x7f\x5f\xfc\xa8\xcf\x27\x9e\x25\xb0\x56\x66\xd4\x3e\x1f\x98\x71\x82\xf7\xdf\xbb\x34\xde\xe0\x59\x62\x85\xbc\x97\xa5\xe0\xaf\xb6\xec\xbf\xc2\x38\x8a\x46\x78\x71\xf1\xf5\x61\x42\xba\x74\xf8\x88\x25\x28\xcf\xca\x00\x55\xfa\x0e\x73\x1c\xb8\x69\x47\xd7\x33\x50\xe3\x1f\x17\x24\xc7\x08\x09\xa3\x36\xa8\x9e\x9d\x20\x63\x87\x9c\x2a\xef\xdf\xcb\xfb\xc8\xa7\x37\x57\xcf\x7e\x89\xd1\xed\xa4\x81\xc4\xf3\xa8\x1e\x8c\x04\x97\x9a\xea\xd0\xf0\x62\x28\xef\x36\xa9\x69\x62\x21\xcc\x61\xa2\x89\xdb\x3b\x2b\x5a\x19\xed\x6e\xc5\x8d\x25\x15\x2c\xdf\xe3\x14\xbe\xa5\x8b\xe2\x33\xb0\xa3\xc8\x5f\x42\x11\x2a\xcd\xf1\x0e\xd5\xa2\xb9\x5e\xcf\xe7\xed\x81\x73\xc2\x0f\x3b\x5f\x74\x9a\xdd\x6f\x04\x57\x93\x72\x5b\xfa\x81\xa1\xb9\x89\x12\xbe\x59\xef\x16\x30\x59\x3f\xfb\x5c\x96\xfc\xf5\xdd\xa2\x6a\x55\x30\xcc\xb6\x6f\xcd\xfa\x94\x52\x16\x1a\xba\xce\x0f\x96\x21\x31\xa8\x27\xf0\x02\xa1\xee\x76\x1e\xae\x2d\x4d\xdd\xbb\xe4\x5b\xce\x12\x8e\xb4\x64\x22\xea\x5c\x7c\x05\x9a\x4d\xfa\x95\x60\x49\x7a\xd9\x43\xa7\x03\x11\x12\x86\xce\x4f\x7c\x2b\x32\x6b\xe5\x2a\xa1\x2b\x81\x3d\xb2\xfb\xd8\xb8\xeb\x44\x4d\xed\x32\x75\x0c\x52\xa5\x5f\xa0\x35\x6b\x58\xcc\x9d\xb5\x2c\xd6\xf9\xe6\x44\x74\x16\xbb\xcb\x12\x34\x69\xe7\xad\x71\xfe\xd9\xee\x89\x3a\xbd\x6b\x22\x9f\x48\x82\x5a\x3a\x62\xab\x2e\x12\x58\x9f\xd1\xb0\x6a\x2f\x9d\x80\xbd\x9d\x55\x1a\x1f\xdf\x8b\xf7\xb2\x1f\xa2\xf5\xd8\xe1\x6f\xff\xb4\xa4\x13\x1f\xa7\xc5\x55\x7e\xf3\x9b\xb1\xc2\xa2\xa9\x33\x8f\x11\x57\x63\x2f\x53\x18\x2f\x4e\x05\x71\xcd\x2d\x4f\xad\x98\x5c\xac\xcb\xf3\xdc\x91\x75\x3d\x9f\x02\xf5\xbf\x25\x60\xc0\xc3\x00\x45\x4a\x13\x48\x9f\x42\x67\x61\xbf\x8b\x22\xad\xc4\xc7\xec\x86\xb8\xa6\xc7\x1f\xdd\xdf\xd9\x4f\xf8\x93\x99\xeb\x2f\xd9\x9d\x9e\x49\x2a\xc0\x0c\xa7\x08\x21\xaf\xa9\xed\x02\xe9\x57\x35\xba\x8a\xdc\x20\x75\x68\xfc\x8a\x26\xb0\x12\x9f\x7e\xf9\xac\xb5\x3d\x96\xb9\x1b\x2c\xfc\x47\x24\x83\xd0\x34\x24\x41\x52\x84\xae\xeb\x45\x48\xa1\x79\x25\xff\x1e\x7d\x6c\xa9\x57\xc6\x16\x29\xc8\x16\x4f\x7d\xfe\xb2\x9e\xcb\xf6\x8a\x25\xd6\x04\x2b\xea\xc8\x76\xc7\x7c\x12\xbe\x76\x1c\x37\x8f\xb5\x18\x04\x60\x5b\xb5\x5d\x9f\xbd\xfe\xf0\xb2\x53\x45\xd8\x56\xf9\x0f\xb1\xde\x0a\xf5\x90\x41\x75\x73\x32\xe6\xb7\xe3\x41\x8b\x6a\xd8\x7d\xbc\x06\x00\x21\xdc\x66\x56\x4b\x7a\x10\x4e\x5c\xdc\x81\xfe\x0a\xd2\x68\xda\x9f\xf6\xde\xd0\xc0\x78\xc7\x8e\x6d\x54\x82\x6d\x03\x72\x45\xf2\x37\x31\xc4\xe4\x60\x85\x6d\x34\xaf\x8f\xb1\x8f\xf1\x24\x7a\x5e\x7a\x5d\xd4\x17\x89\xbd\xd3\xaf\x33\xab\xe7\x8c\xb8\x0c\xb5\x8d\xd9\x5e\x4b\x7e\x5a\x34\xd7\x3a\x0d\x3c\x37\x5c\x6e\xf4\x92\x3e\x17\xaa\xf9\x9c\x9b\x2d\xe4\x54\x88\x92\x71\xea\xf9\x70\x57\xc6\xca\xd9\x74\xc6\xd2\x6b\x33\xac\xac\x0c\xf1\xc2\x46\x3d\x39\xe9\xf0\xf9\x7f\x83\x56\x83\x05\x31\xa1\x63\x45\xf5\xc8\x0b\x44\x3b\xb9\x32\x89\xa3\x5d\xf6\xae\x48\x5e\x26\x41\xe2\x44\xc5\x3a\x26\xb9\x4e\x84\xdc\x0b\xf1\x26\x6f\x49\xe0\x2c\x3e\xc7\x69\x96\xbe\x7b\xaa\x52\xd3\x29\x1d\xfc\x07\xa2\x90\xcb\xf0\x69\xa7\x9a\x19\xe7\xcc\x1d\x03\xab\x6b\x96\x6b\xf9\x1c\x27\x3f\x40\xd7\x26\x61\xbb\xff\x36\x56\x29\xae\x78\xb6\x5f\x8e\xc9\x93\x82\x1a\x7d\xf6\x53\x9a\xac\x93\xcd\xb9\x9c\x59\xb9\x2d\x8e\x98\xda\xb6\x1a\x32\x17\x36\x91\x23\xd0\xf2\x0c\x25\xde\xf7\xfb\x61\xf5\x27\x8f\xfc\x21\x34\xb3\x28\x8b\x13\x10\x70\x86\xd5\x19\x8c\x80\x9d\xfa\xb7\xa3\xab\x60\xcd\x6e\x4a\x25\x94\x4e\xad\x8b\x44\x32\x94\x49\x18\x7a\xec\x81\x1d\xa1\x39\xed\x89\xac\xc1\x98\x85\xc8\x9d\xbf\x59\x91\x58\xaa\xe4\xa1\x01\x61\x84\x71\x76\x7b\xa9\x08\xcc\x21\xc2\xaa\x9c\x78\x91\x54\x4a\x3f\x77\xee\x72\xdb\x85\xcf\x3c\x72\x8e\xac\x77\x2d\x0f\x5b\xc0\x45\xcf\x31\x10\x50\xbb\x26\xc5\x58\x47\xe4\xb0\x92\x25\x16\xc5\xbe\x88\xf3\xd4\x63\xa5\xc5\x57\xb3\x93\x1b\xb1\x5a\x9e\xb6\x9f\x5d\x4c\x06\x4d\x88\x4e\x63\xca\xb3\x76\x1f\x87\xf8\xf3\x20\xcd\x5f\x84\x05\x85\xec\x5e\xd4\x92\xb0\x74\x2d\x32\xcb\xc7\x5c\xf6\xf8\xda\xd1\x94\x50\x59\xf5\x42\x60\x84\xe3\x29\x3f\xfa\x08\x48\xab\xf9\xee\x7f\xfe\xf7\x3f\xfe\x2f\x00\x00\xff\xff\x9c\x84\xb0\xba\xce\x07\x01\x00") + +func dataPasswordsJsonBytes() ([]byte, error) { + return bindataRead( + _dataPasswordsJson, + "data/Passwords.json", + ) +} + +func dataPasswordsJson() (*asset, error) { + bytes, err := dataPasswordsJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/Passwords.json", size: 67534, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +var _dataQwertyJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\xb4\x98\xd7\x52\x23\x3d\x10\x85\xef\x79\x0a\x18\x72\xce\x39\xe7\x9c\x33\x98\x9c\xc1\xe4\x8c\x31\xcf\xfe\x6b\xf8\xb7\x56\xdf\xa9\xb2\xc6\xbe\xd8\xbe\xd9\x6a\x2f\x33\x5f\xb7\x5a\xad\xa3\x63\x67\x8a\x8a\x8b\xa3\xc9\xa7\xa3\x87\xcb\xa8\xa7\x38\xfe\xe0\x3e\x96\xb8\x70\xf7\x37\x74\x1f\x0e\x7f\xa2\xba\x3f\xf1\xdd\x6b\x3a\x9d\x2b\x8e\x5a\x86\xfe\x3e\x13\x3d\x2e\xcb\xf3\xbf\xe1\xde\xff\xff\x13\xa5\x22\x92\x7b\x7b\xfc\x5b\xbb\x19\x1f\xef\x65\xf3\x67\x6c\x1c\x8c\x84\x5c\x4a\x30\xca\x09\x02\xda\xca\x7c\xc2\xb3\x71\x1f\xbf\x6f\x2a\xb8\x8c\xe0\xd6\xd2\xfc\xe0\xf6\x72\x0f\x7b\x5a\x91\x24\x02\x2e\x27\x18\xd5\x04\xc1\x1d\xfb\x1e\xf6\xb2\x26\x49\x04\x5c\x41\x30\x5e\x0a\x82\xbb\x6a\x3c\xec\x75\xdd\xc7\x9f\xdb\x0a\xae\xb4\xda\xbc\x2a\x82\x51\x4d\x10\xd0\x54\xed\x13\xde\x2f\xfa\xf8\x6a\x5a\xc1\xd5\x04\x77\x57\xe5\x07\xd7\x1f\x78\xd8\xc3\x92\x24\x11\x70\x0d\xc1\x9d\x15\xf9\xc1\x48\x1e\x57\xc9\x7e\x0b\xb8\x96\x60\x54\x13\x02\x4b\x12\xf4\x3e\xde\x13\x01\xd7\x11\x7c\x3b\xef\x1f\xbc\x99\xf5\x71\x7a\xce\xc7\x0d\x03\xc1\xe4\x02\xae\x27\x18\x1b\x13\xac\xb2\xbf\x36\xf7\xe4\xb8\x7e\x0b\xb8\x81\xe0\xba\xbe\xdc\x55\x72\x0c\xdd\x58\x15\x54\x71\x23\xc1\x58\xa6\xc0\x2a\x9d\x56\x15\xd2\x7c\x21\x37\x59\xcd\x5b\xb3\x95\x22\xb7\x10\xdc\x5c\x92\x1f\x0c\x09\x8c\xb5\x92\x49\x04\xdc\x6a\x25\xc8\x6d\x56\x82\xdc\x6e\x25\xc8\x1d\x04\xa3\x9a\x20\x18\x82\x12\x8b\x30\x93\x08\xb8\xd3\x4a\xe9\xbb\xac\xe4\xad\xdb\x4a\xe9\x7b\x08\xa6\x40\xf0\x54\x51\x6d\x78\xbe\xa9\x1c\xb1\x20\x08\xb9\xd7\x8c\xdc\x67\x25\xc9\xfd\x56\x97\xc8\x80\x95\x24\x0f\x9a\x49\xf2\x90\x95\xc0\x0d\x03\x2c\x00\xc8\xad\x00\x9e\x57\x7d\xfc\xb5\x13\x96\xe4\x11\x56\xfc\xb6\xe1\x5f\xba\x98\xf4\xf1\xe5\x94\x8f\xef\x16\x0a\x6b\xf2\x28\xc1\x1f\x5b\x1e\x70\x3a\xe6\xe3\xf3\x09\x1f\x23\x79\x22\x78\x8c\x60\x2e\x93\x3a\x4e\xe9\x65\x92\x93\x51\x1f\xbb\xa2\x04\x3c\x4e\x30\x7b\xc9\x4d\xe2\xc5\xc1\x24\x5c\x95\x2b\x4a\xc0\x13\x04\xf3\x41\x02\xa8\xe9\xec\x3d\xf7\xc4\x55\x2f\xe0\x49\x82\xb9\x4c\xc2\xa8\xe9\xdc\xc8\xe3\x11\x49\x22\xe0\x29\x82\x59\x0d\x61\xd4\xf4\xeb\x99\x9c\x13\x12\x27\x11\xf0\x34\xc1\x04\xf0\xb2\xa0\xbe\x53\x86\xa9\x54\x2e\xa1\x80\x67\x08\xe6\x32\x99\x84\x97\x05\x61\x94\x43\x57\xbd\x80\x67\x09\xe6\x32\x09\x63\x95\x54\x27\xaa\x96\x4b\x22\xe0\x39\x82\x59\x0d\x61\x94\x7d\xaa\x13\x55\xcb\x25\x11\xf0\x3c\xc1\xdc\x0c\x56\xcf\x84\xa8\x32\xf1\xe4\x2d\x10\xcc\xf1\x61\xbf\x99\x04\x7d\x4d\x04\x2f\x12\xcc\xbe\x72\x12\x78\x39\xb3\x2d\xec\xb7\x5b\x95\x80\x97\x08\x66\x5f\x09\xa3\x2d\xe6\xd5\xca\x7e\xbb\x24\x02\x5e\x0e\xe9\x31\x44\x5f\x6c\x31\xf5\xe4\x68\x38\xac\xc7\x2b\xac\x98\x8a\x46\xe1\xa1\xc9\xe4\x51\xa7\x04\x38\x9d\x11\xf0\x2a\xc1\xa8\x40\x2a\x63\x42\x0a\x15\xf5\xdb\xdd\x26\x02\x5e\x23\x98\x8a\xc6\x2a\xe9\x5e\xa9\x21\xd4\x16\x57\xbd\x80\xd7\x09\xe6\x4b\x74\xac\xd4\x0d\x4e\x0e\xc7\xd0\x8d\xa7\x80\x37\x08\xe6\x8d\xc0\xfe\xb1\x32\xcc\x7a\xe2\x1c\x6f\x12\xcc\x9b\x99\x93\xc0\xdb\x84\xfd\xe6\x55\xe6\xf6\x47\xc0\x5b\x04\xe3\x3a\x97\x97\xb8\x61\x58\x55\x62\xc5\xdb\x04\x73\x94\xb8\x61\xec\x37\xe5\x94\xc7\xde\xb5\x4b\xc0\x3b\xa1\x03\xc2\xd1\x63\xf5\x18\xb1\xc4\x8a\x77\x59\x31\x75\x80\xc7\x98\xdf\xfa\x69\x31\xc5\x2d\xbb\xf3\x2d\xe4\x54\x8a\xe8\x02\x7e\x5a\x2a\xd8\x16\xee\x11\x4c\x89\x41\x9d\xd2\xa4\x54\xea\x3b\xf7\x1f\xe2\x05\x08\x7a\xdf\xea\xeb\xde\x81\xd5\x2f\x2d\x87\x81\xc9\x08\x82\x13\x7c\xb4\x80\x8f\xac\x3c\xf2\xb1\x95\x47\x3e\xb1\xf2\xc8\xa7\x56\x1e\xf9\xcc\xca\x23\x9f\x5b\x79\xe4\x0b\x2b\x8f\x7c\x69\xe5\x91\xaf\xac\x3c\xf2\xb5\x95\x47\xbe\xb1\xf2\xc8\x69\x2b\x8f\x7c\x6b\xe5\x91\xef\xac\x3c\xf2\xbd\x95\x47\x7e\xb0\xf2\xc8\x8f\x56\x1e\xf9\xc9\xca\x23\x3f\x5b\x79\xe4\x17\x2b\x8f\xfc\x6a\xe5\x91\xdf\xac\x3c\xf2\xbb\x95\x47\xfe\xb0\xf2\xc8\x9f\x56\x1e\xf9\xcb\xca\x23\x67\xcc\x3c\xf2\xb7\x95\x45\xce\xda\x59\xe4\x9f\x7f\x6e\x38\xdd\xbf\xd9\xa2\x6c\xd1\x7f\x01\x00\x00\xff\xff\x4c\xae\x50\xc0\xce\x20\x00\x00") + +func dataQwertyJsonBytes() ([]byte, error) { + return bindataRead( + _dataQwertyJson, + "data/Qwerty.json", + ) +} + +func dataQwertyJson() (*asset, error) { + bytes, err := dataQwertyJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/Qwerty.json", size: 8398, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +var _dataSurnamesJson = []byte("\x1f\x8b\x08\x00\x00\x09\x6e\x88\x00\xff\x54\x7d\x5b\x62\xf3\x3a\xcc\xdc\x5e\xfc\xdc\x15\x74\x0d\xdd\x41\x9f\x28\x89\x96\x18\x51\xa2\x0e\x29\xda\x9f\xd3\xcd\x17\xc0\x0c\xe8\xfc\x4f\x27\x27\x5f\x12\x4b\xbc\xe0\x32\x18\x0c\xfe\xdf\xe3\xff\xa4\x76\x3f\xfe\xf7\xff\x7d\xb4\x23\xdd\xdb\xe3\x7f\x3d\x7e\xca\x76\xb6\x72\xca\x57\xef\x94\x73\x0a\x47\xb3\x6f\x9e\x51\xff\x3b\xd5\xf2\xd6\x7f\x5a\xc2\x2b\xe9\xff\x1f\xf2\x23\xb1\xe2\x67\xf1\x4b\x47\x29\x35\xca\x7f\xef\xf0\xc9\x45\xff\x25\x9c\x4b\xac\xf8\xb7\x9f\x30\xef\xfc\xd3\x5b\xba\xf5\xa7\xb6\x50\x2b\xfe\x50\xa8\x77\xd2\x7f\xb9\xb7\x72\x5c\xf8\xa1\x35\xd4\x39\x85\xf1\x8f\xf1\x57\xbe\xac\x65\x4a\x7c\xbc\x39\x87\xba\xdb\xb7\x96\x9a\xd6\x6e\xff\x9c\xe3\xdb\xfe\x5c\x8e\xfa\xd7\xdf\x21\xef\xf6\x74\x5b\xc8\x59\x1f\x45\x1e\x56\x7f\xf3\x53\xfa\xb9\xea\xb7\x63\x3d\xf5\xf1\xf4\x37\xf7\x64\xdf\x7a\xcb\x9f\xda\x6e\xfd\x0b\xe5\xb2\xef\x6f\xc9\x7e\x75\xad\xd1\x7e\x35\x2c\x58\x90\x29\xe0\x2f\xaf\xe5\xfc\x0d\xd9\x7e\xf2\x8c\x5c\x82\x59\x1e\xd7\xfe\x51\x56\x74\xde\xa2\xfd\xfe\x15\xab\x3f\x7f\xac\xb7\xfe\x85\xbb\xd7\xd3\x7e\xea\xd2\x8f\x48\x57\xb3\xdf\x3c\xae\x89\xbf\x20\x2f\x67\xff\x1c\x5f\xe1\xd4\x7f\x8b\xcb\x3b\xd4\xc5\x7e\xaa\xc8\xcf\xdb\xf7\xda\x1d\xe5\x9b\xfa\xb8\x2d\x9c\xf2\x51\xbf\xb6\x01\x5c\xd2\x5a\x56\x59\x78\xfd\x22\xc6\xc5\x7e\xad\xec\xf8\xf7\x35\xe8\x63\xf2\x83\x8e\x5e\xaf\xed\x63\xaf\x94\x72\xd4\x2f\x6a\x7a\xc5\x1a\xf0\x1b\x97\x3d\x43\x4d\xf3\xa6\x1f\x8e\xd7\x2b\xff\x74\x5d\x8a\x3e\x8e\x2d\xb2\xfd\xe7\x96\x8f\xb5\x23\x72\xc5\xdb\xf7\x7b\xad\xc1\xfe\x5e\x38\x12\xde\xfe\x1d\x6e\xfc\x8b\x9c\xa3\xb2\x37\x3c\xf7\x82\xa7\xbc\xe4\x43\xa2\x3d\xd6\x79\xc6\x5b\xdf\xe9\x5d\xca\x62\xcf\x55\x71\xf8\x6a\x69\xcd\x76\xed\x7b\xa2\x64\x29\xe2\x61\x6f\xf3\x13\xcf\x1d\x8b\x22\x8f\x5c\xf5\x63\xaf\xf2\xc6\x1b\xe6\x62\x5b\x7b\x85\x7b\x3c\xd9\xd6\xd7\xcd\xfe\xe6\x53\x0e\xa9\x7d\xf1\x0e\x6d\x93\x23\x70\xe3\xf1\xfa\x8d\x63\xdd\xd2\x71\x14\xfb\xab\xcf\xd2\xee\xbf\x3b\x8e\xdb\xf0\x09\xe7\x6d\x07\x2b\xfe\xb3\x17\xb1\x97\x4e\xcf\xa7\x1d\xe5\x25\x05\x3b\x41\xe1\x63\x3f\x7d\x7c\xf0\xa2\xcf\x62\x0b\xb6\xc9\xaa\xe4\xdb\xd7\x49\xfe\x4f\x3f\xae\xcb\xce\xbe\xec\x7d\xe4\xec\xe6\x60\x0b\xa2\xcb\xc0\x6d\xb7\xff\x8d\x4d\x3f\x52\x5e\xee\xc4\xe6\x7e\xce\x92\xed\x07\x9e\xa9\x6d\x38\x32\xf2\x57\xda\xb8\x5d\xf8\x88\x34\xf1\x7e\xce\x4b\x39\x43\xb6\x03\x51\xfb\x2f\x6e\x57\xe3\x05\x29\x72\xcf\x7e\xed\x1d\x0f\x9c\xa5\x5e\xb1\x85\x4f\x39\x43\x58\x67\x5d\x53\x5b\xaf\x38\x4d\x58\x20\xde\x56\x59\x9e\x17\x1e\xe9\xee\x33\x8e\xee\x55\x78\x11\xb6\x34\xdb\x6e\xcf\x35\xbc\xf9\xfa\x53\xf9\x2c\xf6\xe1\x6e\x36\x2a\x17\x75\xd7\xfd\x5f\x3e\x38\x59\x35\x62\x21\xff\xd9\x0f\xc9\x41\x2a\x7c\x67\xac\xbf\x5c\xa1\x66\x8f\x5b\x17\x3c\xc3\x16\xde\x76\x36\xf3\x81\x13\x83\x23\xc5\x4b\xc7\x9d\xb7\x1d\x5b\xc2\x99\xa2\xbd\xc8\x15\xe4\x87\x71\x5b\xf1\x66\xa7\x1c\xf6\x62\x5f\xc9\xbe\xd8\x0f\x3f\x63\x5d\xbb\xbf\xa5\x58\x42\x5b\xda\x37\x8f\xdb\xd2\xcf\x13\xa7\x8e\xdf\xd8\x3a\x2f\x4a\xbb\xe2\x39\xe3\x4c\xc8\x1d\xc1\x65\x97\x55\xba\x36\x2c\xd3\x15\x3e\xf6\xa7\xae\x14\x2b\x4f\x3e\x0e\xee\x21\x07\x75\x8b\x6f\xfd\x19\x39\xfa\x25\xe3\x96\xad\xa7\x1b\x5a\x6c\xae\xdc\x25\x7e\x60\xf9\xda\x9d\x5a\x6c\x1f\xe5\x99\x66\xdb\xad\x76\x7e\x16\x5a\x40\x33\x13\xb3\x3c\xac\x9c\x72\x9c\xb6\x1c\xec\xf3\xe5\xe0\x56\x7c\x5a\xed\x09\x07\xb6\xe2\xda\x3f\xed\xa6\x57\x9a\x85\x50\x8f\x76\x57\xdc\xa5\x59\x7f\xe4\xc4\xe6\xbe\x63\x78\xf1\xe4\x8b\x8d\x8c\x3c\x7f\xc5\xae\xb0\x98\x8d\x97\x9d\x24\x39\x27\x5f\xe3\x60\xbb\x9c\xf1\x57\x9f\xb2\xc6\x7b\xb6\xfb\x92\xc3\x1b\x2f\xf2\x94\xad\xb1\x13\xbd\xf6\x5b\x16\x07\xb6\xa3\xcd\xdb\x91\x96\x9b\xef\x29\xff\x79\x85\xf6\x1f\xac\xfe\x1c\xda\x2d\xcb\x52\xf4\x61\xb6\x18\x71\x75\xe5\xa3\x2f\x1c\xd9\x92\x13\x1e\x50\xae\xf2\xad\x47\xbb\xc2\xca\xc1\xa6\x3d\xbe\x8e\x0e\x4e\x4a\xfd\x1f\xcd\x80\x3c\x9a\xdd\x5c\x39\x6f\xfa\xfb\x93\x5c\xb0\x72\xd9\x25\x9a\xcb\xc7\x0e\x1a\x4d\x4c\xc8\xaf\x50\xff\x98\x5f\x1c\x35\x31\xdc\x76\x7e\x9f\x7f\xfc\x8c\x1c\x85\xdf\x80\xb3\x8b\x0f\x39\xd7\xfe\x89\xf4\x8e\x65\xc2\xf1\x4e\xba\xe5\xcf\x4e\xdf\x9a\x3f\x62\xdb\xf1\xab\x15\x96\xb1\xea\x4b\x14\xdc\x46\xfb\x27\x71\x85\x5c\xba\x1a\x7e\x13\x1e\xb6\x8b\x03\xe1\x53\xe0\x8a\x89\xdd\x2c\xf6\xd9\xf2\x8c\xf1\x2c\x76\x09\xdf\x58\xa2\x23\x2e\xe9\x0c\xb6\xe5\x6f\x7c\xe8\x24\x67\x02\x27\xa7\x3c\x9f\xf8\x21\x59\x77\x9e\xb4\x96\xe4\x7d\x6d\x3b\xfd\x73\xe5\xc6\xc8\x71\xb2\xc7\x16\x8b\x0c\x5f\x2a\x26\x19\xef\x2f\x6b\xb3\x06\xbb\xb0\x1f\xbb\xfb\x1a\x3d\x2c\xfe\x7b\x17\x4f\xf1\x61\x86\x66\xd3\xed\x36\xe7\xf3\x0e\x8b\x3e\x7e\x2b\xb7\xbd\x68\xc8\x3c\x39\x67\x0c\xd9\x1e\x26\x2f\xc3\xbe\xbf\x23\x3e\x4e\xcf\x36\xbc\x73\xa5\x0f\x91\x2b\xfc\x32\x53\xf0\x93\xe4\xfd\xe1\xd1\x0b\x57\x5e\xac\x65\xf6\x8d\xf6\x95\x2d\x53\x4d\xf1\xe4\x99\xb2\x25\x6e\xfd\xbe\x69\x39\xe5\x49\x4f\x3b\xb4\xb9\xcf\x81\xe1\x0f\x3d\x93\x84\x20\x7e\x18\xe5\x72\x4d\x78\x54\x59\x14\x0b\x25\xec\x1b\x37\xd6\x46\x23\x82\x8a\xeb\x7b\xe3\x51\xef\x08\x5b\x6d\xd6\xbd\x6e\x65\xa1\x1b\xb5\xfd\x98\xe2\xbc\xdb\x4b\x73\xa3\x36\x35\x1a\xf6\xd1\x33\x4c\x18\x37\xd6\x3e\x7a\xea\x6d\xb3\xe5\x16\xef\x66\x36\x49\x62\x08\x33\x51\x7e\xb1\xc4\xdb\xde\x29\xac\xfa\x5a\xa7\x87\x09\x7a\x09\xf4\x9d\xc4\x32\x45\x7b\x84\xb9\x7f\xfd\xa7\xbd\x86\xdc\xbc\x9e\xef\x5f\xac\x13\xc3\x9e\xfe\x8b\xe7\xb9\xc2\xaa\xbf\x73\xf4\xb3\xe0\x07\x6c\x47\xec\x76\xf3\x12\x9e\x0b\x23\xc4\x38\xf1\xc5\xb3\x3d\x65\xfe\xc0\xb1\x8a\x51\x6f\xb6\xa8\xef\x92\x9f\x11\x1f\x77\xca\x0d\xc0\x8d\xed\x1e\xea\x48\x58\xd0\xdc\xc6\xd7\x0b\x67\x37\x72\xef\xf0\x87\xe7\x7e\x1c\xdc\x7f\xf1\xe4\x70\x10\x7a\x48\x12\x5c\xac\x7a\xe5\x1b\xcb\x93\xb1\x5c\x5b\x9f\x26\x04\x2f\x2d\xe4\xf0\x1b\x2c\xd2\x89\xf1\xc5\x78\xa0\x0e\x8b\xfd\xf2\xcf\x96\x3b\xb5\xc6\xc6\xb8\xe5\x36\x5f\x74\x07\x0b\x64\x67\x06\x27\xab\xff\x9a\x2c\x8f\x1d\x9a\x30\x23\xe8\xb2\x7f\x95\xe0\xb6\x9e\x7e\x9a\x56\x8b\x51\x9f\x62\x56\xed\x17\x96\x98\xd7\xb0\xe8\xd6\x84\xb5\xa7\x6c\x4f\xf3\x8a\xab\x1e\x82\x35\x97\xd7\xf8\xa3\xb8\x5b\x73\xd9\x22\xce\x43\x3d\xe0\x1b\xcb\xe2\xe1\x5e\x99\x26\xac\x99\x9c\x1a\x9e\xef\x1c\x12\xfc\xf0\xba\xe2\x9f\xe4\xaf\x54\x73\x02\xf2\xc2\xcd\x5e\x40\xfe\x34\x3d\x9c\x58\xc6\x9d\x97\xf9\x96\x70\x5f\xee\xf0\x62\xc7\xe1\xf6\x00\xa8\x70\x51\xf5\x52\xf2\xc3\xdf\x30\x35\xc7\x85\x6f\xa8\xe3\xb7\x47\xd7\xa0\x0b\x7f\xf6\xed\x76\x48\x7f\x9f\x56\x47\x76\x42\x82\x11\x7b\xe9\x0f\xef\x81\x1e\x78\x04\x31\xfa\x18\x6e\x24\xe4\xdd\xe0\xcf\x0b\x22\xa1\x13\x8b\x1c\x16\x1a\x0f\x39\x5c\x0c\xcf\xe4\x6f\xba\x87\xd6\xd0\x0d\x16\xc9\x4f\xb2\x18\xd8\xc9\x76\xa3\xd8\x91\xff\xaf\x27\xf8\x6e\x09\x73\xf5\xa1\xdb\x24\xdb\x63\xc7\x79\xd6\x18\xde\xa2\x62\x49\x2c\xd4\x22\xe9\x23\x4a\xe0\x55\x25\xfe\xc7\xee\x2f\xe5\x65\x76\x48\x02\xab\x09\xb1\x02\xb6\x58\x0e\xd9\xaf\x6c\x04\x22\x2c\xf1\x23\xbb\x3d\xf8\x52\x3e\x76\xb7\x5a\xe8\x1e\xef\x4a\xa2\x62\xeb\x5e\x32\x03\xb4\xaf\x23\xc1\x8a\xa8\x3b\xb5\x28\x2a\x2c\xf2\x6f\x30\x89\x6e\x03\x3b\x4c\x99\x5e\x37\x9e\xe0\x79\xd3\x4c\xc0\x3c\xd0\x2c\x36\xcc\x6c\xe7\x56\x3a\x3d\xd9\xbc\xc9\x65\xbf\xdd\x1e\xef\x39\xda\xfe\x5d\x35\x98\xd1\x93\x9f\x5f\x57\x84\x24\x6a\xce\x61\x5e\xb2\x5a\x11\x38\xe6\x5f\x89\x82\x63\xc5\x9e\x49\x24\x08\x6f\x34\x75\xbd\xdc\xc1\x83\x37\x44\xe3\x57\xe4\xc1\xb9\xd2\x7d\xe3\x17\x26\x79\x9d\x0f\xd6\xb4\xd4\x23\xd9\x75\x50\x6f\x21\xf9\x83\x65\x1d\xb5\xd8\xb7\xae\x82\xf0\x36\x17\x66\x27\xa1\x31\x2f\x34\x6b\xb1\xd4\xb0\x23\x18\xa6\x6b\x79\x3e\x3d\x96\xbf\x90\x06\x1e\x6e\xd6\xc3\x34\x21\xf8\x68\x6e\xff\x4e\xff\xa7\xad\x3f\x9f\xf6\x47\x1b\x4c\xce\x53\xf3\xc6\x5a\x2c\xd7\x71\x4f\x36\xf9\x61\xb1\xa8\xb1\xb9\x7f\xc0\x1f\xb8\xf1\xa2\x6a\xd1\x1f\x8c\xaf\x47\xce\x50\x70\xee\x66\x3d\xfb\xe6\x08\x34\x3f\xb5\xd3\x14\xee\xcd\x4c\x6d\x93\x1f\x16\x0f\xe0\xc9\x35\xcf\xee\x1c\x6c\x91\xed\x60\x30\xe5\xd9\xb8\x08\xe2\x72\xe2\x29\xfe\xfc\xc1\xa4\x0f\xe1\x40\x37\xcf\x20\x61\x64\xbe\xb0\x55\x62\x2f\x2c\xe7\xd6\x7d\xc7\x45\xd1\xbf\xee\x0f\x7f\x06\x5b\xc1\xd6\x75\x13\x2d\xa8\xf4\xc0\x51\x7e\x0a\xc9\x9c\xee\x16\x2f\x16\xa2\x32\x59\x76\xba\x33\x8d\xb1\xe1\x1a\xe7\x22\xa7\x08\xfb\xbc\x05\x3b\xbb\xe2\xc7\xe3\xa9\x41\xbe\x3d\xb2\x3d\xf9\xca\x6b\x1c\x3e\x21\xe3\xa6\xbc\x11\x3b\xbd\xc2\x2f\xff\xe2\x92\xe6\xf1\x6c\x76\xb3\xf5\xc7\x67\x49\xaf\x2c\xfe\x90\x60\xe7\xb4\xbb\x76\xa5\x13\xd1\x0a\xe3\x71\xa6\x09\x67\x2d\x11\xbb\x2d\x46\xe7\x01\x24\x81\xb9\x78\x51\xbb\x60\x98\x43\x33\x4f\x0d\x6f\x53\xf1\xe3\x19\xf1\xcd\x9e\xea\xf4\xe1\x49\xc0\xf9\x94\x8b\x77\x00\xb2\x58\x8a\xfa\x0f\x3c\x64\x61\x98\x21\xc7\x19\xe6\xf6\xb0\x5b\xbe\x8b\xd1\x64\x3c\x7d\xce\x38\xb8\xab\xde\xc1\x75\x63\x9c\x4f\xc7\x23\x4e\x4a\x0f\xa8\x7d\x85\xf5\x79\x0f\xd3\xab\xc7\x01\xf1\xb6\x5c\x89\x8c\xcd\x97\xd4\x6d\xc4\x05\x66\x07\xda\x81\xff\x1e\x1e\xa2\x68\xd2\x80\xbd\x99\x60\xd5\x96\x8f\xa7\x9b\xc8\x75\xe4\xe5\x6f\x0d\x9d\x6c\x63\x3f\xbc\xd8\xe1\x1f\xac\x62\x3b\x6d\xb1\xc4\x0d\xe1\x6f\x48\xe8\x23\xe7\xc7\x2c\x87\x86\x8e\x70\x25\x85\x5e\x66\x8a\x81\x3f\x86\x70\x3d\x0e\xc4\x66\x8b\xc1\x92\xb6\xc8\x54\x76\xfd\x26\x98\x92\x3d\x33\x68\x57\x1f\x67\x3b\xff\x42\x6c\x20\x7b\x67\x7f\x41\xde\x50\xf6\xee\x9b\x8e\x30\x9f\x94\x73\x03\x13\x76\x74\x8f\x1a\xf6\x78\xa8\x87\x17\x8f\x18\xfd\x6a\x99\xe7\x16\x67\x12\x19\xfb\x89\x69\xe3\x2d\xb2\x07\x8d\x81\x76\x4e\xb6\x39\xda\x99\x7f\x69\x32\xc4\x3b\x8a\x30\x71\xeb\xc7\xb5\x55\xbc\x97\x3d\xb7\xd9\x95\x8e\x23\xbd\x75\x09\xcc\x1c\x59\xda\xe0\xe3\x97\x52\x61\x26\x26\x89\x42\xb0\xd9\x62\x13\x98\xfa\xc7\xc0\x50\x96\xfb\x23\x21\xad\x1d\x78\x4d\x2b\x1e\xe6\x71\xe7\x9d\xf1\xe9\xa7\x98\x81\xb2\xc4\x02\xff\x74\xca\xb5\x96\x05\xbb\xcc\xa2\xc4\x57\xb2\x60\x5c\x82\x10\x39\xa3\x0e\x03\x15\x60\x22\x3d\xe1\xee\xe9\x21\xdb\x68\x6c\x25\x82\xf8\x04\xbc\xda\x82\xe0\x46\x7d\x94\x05\xf6\x92\xdc\xf3\x43\xe5\x3a\x3d\x99\x6a\xeb\x09\x8d\x6b\x21\x76\xd2\xfe\x9a\x1f\x5f\x5e\x5c\x20\x39\xda\x15\x9f\x07\x13\x41\x57\x8e\xe8\x84\x6e\x79\x2b\xf0\x77\x77\xe0\x0d\xdb\x4f\x98\x90\x20\x07\x78\xb6\xf7\x60\x8a\x0b\xb7\x96\x61\xc7\x6b\xf9\xb1\x57\x6b\xfa\xa4\xf6\xba\x07\x8d\xb3\x62\x03\x38\x0a\x51\xac\x23\x76\xa5\xbd\x35\x93\x44\x54\xca\x8c\x53\xef\x4a\xaf\xb0\x84\x27\x5c\x47\xc5\xa9\x90\xc4\xe1\xc4\x66\x3c\xc7\x5e\x9b\xc7\xc7\x3d\xaf\x06\x97\x9c\x84\x10\xde\x31\x21\x12\xab\xfd\x87\x29\xa3\xa1\x25\x38\xc3\xb9\x00\x4b\xea\x4c\xe6\x24\xcc\xa1\xc7\x15\x7f\x18\xe1\x97\x0a\xed\x99\xfa\x64\xf9\x38\xb3\x12\x1a\x2f\x00\x26\x94\xa4\xd8\x3e\x57\x73\x54\xfe\xfd\xc0\xcb\x22\x3f\x6e\x16\x31\xfe\xbb\xe9\xb7\x2c\xeb\x97\xd8\xda\x9e\x38\xea\xb9\x98\x1f\xc4\x58\x11\xe5\xbd\xc4\x9d\x7a\x92\x2b\x9f\x6f\x27\x4b\xbd\xee\x6a\x7f\x47\x6f\x0b\x36\xd5\xdc\x90\xbe\xa5\x1e\x4f\x5d\x88\x42\x53\x21\xd7\x2c\xc3\x7a\xcb\xde\xce\x88\x2b\xd4\x43\xdb\x33\xca\xd9\x07\x10\xa0\x36\x87\x69\x93\x1d\x03\xd9\x6a\x73\x27\xa1\x13\xfd\x7b\xe1\x83\xd4\x95\x13\x62\xb9\x24\x38\xc0\x03\x02\xd6\x44\xe6\x21\x11\x12\xc3\x9d\x65\xc0\x3a\xe6\x29\xd4\xde\x5c\xc4\x0b\x63\xdc\x79\xc8\xe5\x85\x0a\xee\x92\x27\x61\xf6\x4b\x92\xae\x6b\x1c\xcf\x38\x39\x2d\x8b\xfb\xcc\x66\xf9\x26\x1e\x84\x41\x48\xd6\xdb\x63\x7f\x0e\xf1\xb0\x1c\x52\xbc\x82\xc5\x88\xb8\xf7\xe1\x36\x40\x61\xdc\x18\x5b\xbd\x02\x14\xe5\x29\x61\x9b\xe5\x89\xcc\xe4\xec\xde\xaa\xcf\x7b\x58\x0a\x76\x0e\xf7\xae\x76\x8a\x50\xe7\x33\xb9\x97\x9c\x93\x66\x01\xfa\x20\x7b\xb1\xe7\x91\xcf\x55\x5b\x81\xc5\xb1\x0d\xdd\xf0\x3b\xb2\x86\xc8\x80\x25\x7c\x62\xd0\x14\x7f\x0c\xdc\xde\x81\xc2\x48\x7e\xe3\xd8\xa3\x44\xdf\xf6\xe7\x25\xc0\x94\x6f\xc2\xa0\x1d\x61\x59\x80\xc6\xf4\xd6\x0a\x36\xe9\x24\x54\xad\xbf\x61\x6b\xcd\xc4\x42\x81\x2c\xf8\x69\x79\x39\xf3\x76\x12\xc8\xac\xb6\x2d\x04\xc1\xe4\xca\xe0\x52\xfa\x91\xea\x75\x9c\xad\x07\x6c\x18\x63\xe7\x43\x2c\x7e\x31\x67\xf5\x0c\x3d\xef\xb8\xf2\xe2\x1c\x11\xad\xee\x67\xb8\xd4\x88\xed\x34\xbb\xb2\x08\x16\x4c\x01\xca\x20\x88\x8f\xb4\x5e\xce\x1c\x41\xc0\x8d\x41\xb6\x9e\x09\x2c\xce\x16\x99\x1f\xcb\xad\x59\x22\x6c\xa0\xad\xed\xa7\x12\xc6\xcf\xb4\x3d\x19\x96\x02\x10\xa6\xa3\x5e\xe7\xdd\x71\xd4\x92\x07\xd2\x1a\x3d\xd8\x3d\xe8\x0a\x52\xd9\x7d\x97\xef\x32\x1e\x95\x6f\xb9\x77\xb3\x75\xdb\x61\x68\x24\x5e\xc1\x9a\x78\xc6\xb9\x4a\x90\x67\xb7\x69\x19\x89\x88\x24\xeb\x37\xe3\xf8\x53\x0c\x20\xe2\xc6\x89\x80\xc5\x1c\xeb\x2b\xf0\xe3\x36\x26\x44\xe9\xf4\xc0\xb3\xd2\xf1\x1a\x26\x28\x5f\xfc\x06\x8d\x9a\x1f\x9a\xf8\xd0\xbe\xe9\xa2\xea\x4f\xb8\xf1\x11\xe7\x7b\x5b\x06\x52\xba\x1d\xde\x63\x16\xfb\xc6\xf0\xba\x05\x87\x5d\x65\xc9\xe8\xb5\x81\xd4\xca\xa5\x1a\x00\xaf\x5c\xa1\x6e\xb1\x59\xf4\xe4\x48\xae\x2b\xb1\x02\xcd\x29\x80\x18\x05\x3e\xda\x56\xae\x8b\x51\xcc\x88\xce\x35\xe9\x33\xf4\xa4\x7c\x46\x36\x6c\xab\xac\x39\x6a\xad\xdf\xac\x89\x16\x86\xf1\x03\xa1\x70\xc5\xf6\xcc\x25\x88\x45\xf3\x07\xff\x20\x21\xba\x34\x08\x44\x94\xce\x3b\x2d\x67\xb2\x98\x51\x3a\x73\xb8\xec\xae\x7e\x96\x48\x57\x45\x30\x60\xde\xed\xa1\xe6\x22\xb1\x15\x53\x0a\x31\x70\xe6\xff\xdf\x9e\x16\x17\xbf\x3b\x72\x07\x69\xb0\xe9\xa8\xe6\xc2\x54\x54\x8e\xd1\xc2\x97\x41\xea\xf0\x8c\xee\x37\xf4\x30\xf1\xcb\x67\xa7\x67\xcf\xc5\x5d\xb0\xfc\x16\x2c\x77\xf1\xb4\x6b\xf3\x0a\xc9\xc4\x1b\xa4\x80\x05\x30\x3f\x82\x8c\x6a\xbd\xf4\x8f\xc4\x97\xfe\x7f\x59\x86\x59\x9a\xe8\x6a\x17\x7d\xd2\x88\xd7\xd0\x7b\x0a\x30\x20\xf9\x27\x84\x4c\x43\x28\x7f\xd0\x00\x22\xa6\x6a\x7a\x55\x23\xff\xe4\x61\xeb\xf6\x7c\x22\xa5\xb9\x7f\x25\x4e\xaa\x48\xb4\xcc\x84\xfc\x4a\x60\x85\x32\x1a\x5c\x57\xc8\x0c\x0c\x8e\x59\x83\x40\x9c\x23\x71\x46\xaf\x68\xfe\xc1\xc0\x30\xd6\x7f\x0c\xdb\xa8\x7a\xdb\xb1\xf0\x9e\xdf\xab\xad\xe1\x16\xac\x72\x54\x60\x6c\xc2\xd3\xd6\x29\x47\x96\x4c\xde\x08\xe5\x0d\x68\x27\x54\x93\x90\xac\xde\x81\xf7\xa2\x9d\x04\x7a\xc4\x52\x8d\xa2\x0a\xcd\xda\x1b\xeb\xb3\xc1\x56\xd9\xa6\x99\x71\xd2\xda\x0c\xb2\x86\x81\xc7\xe9\x67\xca\xb5\x86\x5b\xd1\xbf\x1e\x91\xa6\xe7\xc0\xfa\x43\xd8\x08\xdf\x00\x0f\xd3\x2c\x95\x81\xa0\xbd\xa1\x1a\xea\xc0\x3b\x62\xf8\x92\x99\x1d\x89\x78\xdd\x71\xdc\x21\x7b\x62\x34\xa7\xc0\xda\x03\xcb\x6c\xe1\xce\x09\x69\xe5\x3c\xfb\x1d\x5c\xf0\xac\x7b\xe8\x9e\x65\x14\xfb\x91\x19\xb9\xd8\x54\xb2\xa3\x40\x4f\xff\x59\xf3\x64\xe6\x09\x18\x2f\x48\xaa\xe2\x30\xcc\x2e\xc7\x48\x9f\x48\x02\xc9\x01\xc5\xfa\xfa\x4b\x8c\x8c\x93\x98\x35\x49\x79\x20\x8c\x22\x14\x9b\xe0\x25\x9b\x2c\xae\xad\xc2\xf9\x85\xdc\x01\x62\x55\x0d\x58\xb0\x47\x2f\x7a\xee\x37\x51\xb9\x76\x88\xa7\xc1\xb6\x25\x5a\x69\xee\x88\x9c\x1e\x16\x83\xec\x78\xa1\x46\x5b\x68\xbe\x74\x6d\x59\x58\x7a\x79\x08\xb9\xc4\x9f\xd8\xba\xed\xe2\x67\x41\x7c\x52\x27\xfb\xb8\x15\xc8\xfb\xc3\xa0\xfe\xad\x26\xd6\xbb\x6e\x24\x24\x1d\xc0\x9e\x1c\xc8\x6c\xe9\xcb\x44\x14\x45\x5c\x5c\xd8\x86\xb3\x3c\x79\x80\xe4\x46\xe8\x8e\x76\x37\x2c\x9a\xba\x61\xd3\x24\x44\x7d\x67\x26\x49\xc5\x52\x72\x0d\xac\x3c\x1f\xd5\x7d\x76\xa3\x81\xd8\x40\xd2\x1c\xfb\x8b\xbd\xb1\x46\x7d\xfb\xe6\x7f\xfc\x40\x44\x9a\x10\x5d\xca\x04\x20\xee\x84\x3f\xe0\x75\xc8\x7d\x06\x6a\xa4\x51\x8d\x59\xb6\x66\x88\x8a\x1f\x48\x3b\x44\xe5\x1b\xa5\x48\xea\x69\xa9\xeb\x54\x3a\x33\xc1\x50\xf9\xc5\x54\x70\xe5\x97\xf2\xb5\x14\x28\x9b\xda\x32\x9d\x91\x21\xff\x4c\x18\xee\x8d\xd7\x78\xca\x72\x12\x68\x9b\xdc\xfa\xbe\x91\x6e\x2a\xfe\x42\x17\x18\x50\x7b\x92\xe4\x07\xd9\xdb\x27\x54\x09\x40\x11\xc0\xac\x8a\x46\x01\x28\x2a\xf3\x5d\xe0\x41\x10\xbd\x65\xc9\xcd\x11\xc6\xc3\x8b\xc9\x3a\xac\x96\x9b\x4d\xe2\xfa\xed\x68\x58\x10\xd2\xa3\xd5\x09\x24\xf5\x0b\xf1\x39\x2e\xba\x99\x99\x2d\x66\x54\xd2\xcb\xc7\x9e\x60\x05\x7e\xd6\x6e\x3f\x35\x19\x88\x4f\x52\x7b\x64\xc7\x97\x29\x9a\x2d\xf2\x6b\x04\x66\xfa\xd6\x74\x60\xc1\xc1\x75\x45\x5d\x99\xb9\xe1\x06\xbc\x0b\x81\x58\x31\xdd\xbb\x2d\xa5\x5d\x28\x7b\xeb\x8a\x6c\xcf\xbe\x26\x4c\x18\x2c\xff\x7c\x11\x76\x5b\xcc\xf0\xb6\x4b\x83\x6c\xfe\xf9\x8d\xe6\x6f\xf1\x9a\xf9\xa4\x85\x18\xc0\x33\x92\x29\xb2\xda\xb6\x0e\x63\x4d\x74\x43\xe3\x56\x2b\x61\xbd\x03\xad\x99\x64\xe4\xf6\x5b\x77\xb8\xfb\x01\x1b\x02\xe0\x58\x31\x0d\x5b\xe8\x6e\xa0\xde\x55\x3f\xb6\xfe\xa5\x96\xdf\x19\xf1\x5f\xd0\xbc\xe5\x26\x24\x64\x98\xbe\xd6\x55\x08\x3f\x14\xb9\xa1\x48\xd8\xc4\x5e\xca\x83\xd1\xe2\xcb\x59\x69\xe5\x40\x40\x09\xfc\x99\xe5\x0a\xe2\x10\x7a\x50\x68\x99\xd3\x61\xdb\xb3\x96\xe5\x89\xa4\xf9\x29\xff\x16\x56\xda\x7a\x7b\xd8\x59\xeb\xae\xb6\xc1\xb1\x49\xc0\x67\x90\xf5\x32\x92\x97\x26\xd6\x87\xb8\xb3\x46\x24\x5b\x37\x93\x5d\x7b\xe4\x22\xce\xab\x98\xef\x84\xcf\xa8\x84\xc2\x61\x52\xb5\xb6\x4c\x97\x2d\x76\x02\x09\xa9\xc4\x8f\x2c\x08\xdd\xc8\x52\xb4\x7e\xa0\xe7\xb5\x38\xa2\xa0\x27\x67\x25\x20\x26\x46\xd9\x0c\xdf\x25\xa9\x8b\x59\x80\x1a\xc6\x65\xc6\xc2\x8b\xa9\x9e\xc4\xd5\xc3\x8d\xd4\xfd\xeb\x32\xff\x62\x7a\xea\xa2\xfd\x61\x37\xe4\x44\xee\x86\x80\x33\x5b\x7c\x16\x56\x20\xe7\x12\xba\x60\x0d\xae\x82\x18\xef\x4a\xc4\xff\x3e\x8e\x19\x20\xa2\x3b\x34\x4d\xb3\xc7\x4a\x07\x2b\x26\x53\x47\xa0\xa5\xd9\xbd\xfd\xb0\x18\x18\x43\xf3\x70\x6a\x3a\x4c\xcc\x61\x96\x32\x4d\x13\x41\x5a\xe6\xa2\x72\xc9\x79\x03\x22\x53\xa8\x2b\x1b\xda\x67\x7f\x87\xe8\xd6\xec\xd0\x55\x1d\xe5\xf9\xe4\xce\xff\xa9\x9f\x7a\x16\x33\xee\x34\x70\xb5\x4f\xc9\x50\x68\x25\x17\x30\xe8\x8b\xf9\x0b\x1c\xa2\x54\x2e\xbb\x6f\x78\x6e\xe2\x7d\x13\x0f\x85\x27\x91\x2d\x85\xe9\xd9\xc3\xed\xa0\xdf\xc0\x1b\xd4\xb2\x36\x66\x83\x92\x56\x55\xa0\xb2\x33\x00\x3d\x8b\x8b\x08\x5c\x62\x19\x73\xdc\x68\xc6\x46\xe9\xfa\xb7\x4b\xf2\x63\x90\xb8\xd6\xcd\x80\x68\x85\x27\xcf\x8c\x23\x7b\x60\xc7\x18\x4a\x23\xe6\xbe\xc2\x09\x9e\xe9\x9f\xdd\x83\x98\x59\x6a\x0f\xbc\x38\x08\x91\xe4\xfa\x4d\x76\x26\xc4\x28\x30\xe9\x48\xe7\x4e\xe2\x05\x0a\x57\x17\x2f\x81\xae\x0c\x13\xab\xb5\xd0\x2e\x55\xb5\xde\xfa\x85\x1a\x3e\xfd\x8e\x96\x18\xca\xa8\xd8\xc5\x7a\x20\x98\x1c\xf9\x10\xcb\x72\x8a\x7d\x21\x32\xf1\x2c\xd0\x22\x41\xda\x41\x7d\x63\x1e\x64\xb5\x2e\x38\xc7\x8e\x34\x7a\x96\x5b\x1a\x37\x67\xfa\x20\xbf\x0a\x35\x31\xa3\x21\x2c\x3d\xe1\x8a\xc7\x7c\x0f\x18\x57\x22\x7e\x10\x06\xe4\xd6\xc2\xa0\x79\x00\x7d\x77\xa6\x3a\x33\x5f\xe9\x0a\x1f\x8f\x59\x62\xae\x56\xfe\x9e\x15\xbb\xf7\x3a\x6b\x41\x00\xa1\x11\xf9\x20\x51\x2d\x7e\x2e\xf4\xb2\x93\x99\x30\x15\x10\xbe\x64\x07\xf5\x60\xdc\x25\x33\xf9\x5b\xa2\xe4\x34\x08\xe7\x92\xbc\x55\x2d\xc0\x8b\xbc\xc8\x2b\x37\xda\x16\xe2\x95\x56\x0b\x34\xee\x70\x25\x78\x9e\x84\x47\x17\xff\x49\xf8\x5c\x41\xa3\xee\xd9\xab\x91\xd1\x12\xe3\xd6\xbd\x5b\x20\x68\x8c\xab\x40\x1e\xca\x49\x33\x7f\x47\x49\x6a\xcc\x13\x81\xd1\x61\x74\x06\xfe\xc5\x59\xd9\x4d\xf6\x69\x31\xe2\x6a\x17\x5c\x70\x46\x9a\xd7\x96\xae\x0b\x78\x02\x2e\x76\x48\xb0\xeb\x01\xe0\xac\xfc\x49\x46\x97\xca\x62\x00\x20\xa3\xcf\x80\x93\x04\xb0\xdb\x8e\xe7\x54\xde\xb0\x2f\x2f\xa0\xe4\xb6\xe5\x6f\xd4\x63\xab\xd2\xc9\xf0\xd4\xea\x50\x22\xe0\xa5\x2b\x5d\x34\x51\x12\x0a\x32\xf1\xd7\x7a\x21\x50\x40\xb1\x5a\x1b\x0b\xc7\xb7\x6d\xe3\xe2\x89\xb9\x16\x23\x8c\xef\x17\x9f\x1a\x29\x30\xc6\xce\x6a\x43\xe5\x16\xc3\x7c\x2a\x4f\x00\x41\xc6\x11\x0d\x8b\x97\x88\x12\xee\xd3\xc2\x16\x78\x58\xcf\x9d\xee\x72\xe4\xc1\xcd\x1b\xbb\x3a\x25\xfc\xcd\x87\x59\x53\x71\xb2\x76\x7a\x1d\xd5\x6e\xe6\x9c\xbc\x66\x14\xcb\x95\x79\x55\x24\x36\xb3\xcf\x55\x3a\x60\x70\x0a\x13\xf9\x45\x38\x76\x06\xc2\x15\x04\x1d\xca\x0a\x6c\x6e\x83\x71\x4c\x66\x62\x9a\xcd\xcb\x62\x72\x10\x81\xc2\x91\x89\xa3\x20\x87\x07\x63\xd5\x8e\xaf\xee\xc4\xa2\xb8\x87\x43\x4c\x0c\xfb\x42\x1e\x39\xc9\xd1\xc5\xe3\x99\x9b\x98\x4a\x7b\x3b\x81\x8e\xf0\x84\xde\xf5\x1b\x41\xdf\x15\xc4\x74\xdb\x36\x1b\x9d\x09\xa1\x02\x73\xa8\xf3\x2c\x58\xba\x74\xc2\x4e\xd9\xef\x01\x4d\xc8\xfd\x74\xca\x1b\x36\x5a\x63\x90\x08\xf2\x84\x78\x64\x98\x7d\x2e\xd7\xd6\xbd\xa2\x70\x27\xa7\x42\xb4\x3f\x6c\x4b\x45\xad\x89\xac\xeb\xd1\x38\x17\x2e\x90\xb2\x13\x1f\x56\x51\xe3\xc1\x09\x5e\xe0\x5c\x58\xab\x09\x53\x05\xcb\x51\x52\xcf\xd7\xa8\xf9\x9a\x8b\x96\xbc\x00\x56\x96\x94\xa1\x49\xa9\x2d\x86\x7c\x7a\x51\x58\x5e\x90\x7c\x1e\x85\x34\x02\x63\x42\x04\x09\x17\xc9\x2f\x6a\xea\x17\xc6\xb6\xef\x41\x37\x71\x90\x62\x76\x78\xc9\x99\x9d\xf6\xd2\xa4\x7d\x14\x16\x98\xaa\x1b\x40\x35\xd9\xfc\xb8\x99\x69\x6d\x3a\x1d\x2e\x32\x9e\xa1\x9d\x82\x64\xc9\xe9\xd2\x77\xb7\x52\x12\x3c\x99\x7d\x0b\x5f\x47\xab\x51\x63\x75\xdf\x41\x78\xfa\x8b\x5a\xc9\x39\x5e\x40\x86\x8b\x83\x3a\x91\x6a\x1b\x5c\x92\xa7\xbb\x4e\x4d\x3c\x90\xe5\xc8\xad\xc7\xb3\x2d\x08\x6e\x35\x98\xb1\xd4\x2e\x5c\xd9\xf3\xb8\x43\x79\x48\x08\x9d\x73\xb1\x54\x5e\xfd\x61\x72\x00\xd2\x8e\x50\x05\x90\xc1\x24\xa6\xa5\xdf\x48\xa7\xae\x25\x46\x7c\x40\xbf\x10\xcc\x34\x59\x27\x5b\x96\x78\x4b\x84\xf8\x7c\x78\xdc\xec\x95\x0d\x46\x0a\x4f\x87\x7a\xe7\xd0\x3d\x7a\x76\x02\xe5\xda\xb5\x78\xc0\x8b\xac\x0c\x40\x8b\x47\xcd\xf7\xbd\x35\x5b\xc5\xde\x88\x65\x03\xd0\x8d\xfd\x50\x4c\x04\x37\x03\x3c\x0c\x3c\x43\xc3\x69\xd9\xbc\xb2\x83\x83\xf1\xec\x66\x45\xe6\xe2\xa5\xc5\x2b\x48\xd2\x88\xda\xfe\xaa\x55\x10\x4b\x10\x66\x24\x18\x9b\x1a\x7e\xa0\x51\xcd\x03\xf0\x0b\x77\x3c\x9e\x83\x75\xd3\x82\xb3\x27\xd4\xa3\xc5\x3b\xb1\x74\x9e\x9c\xa4\x26\xf6\xd8\x73\xa1\xd2\xc9\xea\xcb\x49\x72\x57\xfb\x90\x5b\x7c\x4f\xf6\x92\x39\x10\x98\xe2\xa9\xd6\xc1\xa2\x97\x46\x95\x6f\x58\x14\x6c\x1d\x2c\xbe\xa1\xbc\xe5\x41\xe8\x18\x64\x0c\xb5\x5a\x4e\xb7\x94\x4f\xed\x04\x0c\xe8\x2b\x2f\x42\xce\x72\xa4\x59\x62\xb9\x87\xb3\x64\x7a\x23\xa7\x82\x06\x0e\xd5\x93\x6e\xe4\x54\xbb\x12\x62\x8f\x19\x75\x6c\x1d\xc1\x4d\x3f\x80\x48\xc4\x33\xa1\xf2\x1c\xaa\x39\x02\x56\x59\x70\x2b\x43\x5f\x12\x71\xa7\xb0\x5a\x09\x3d\x28\x2f\x17\x3e\x44\x62\xf3\x6a\x81\xd0\x9a\xae\x61\xc0\x5f\x08\x17\x98\xc2\xbc\x80\x21\xc9\xb9\x25\x1a\xd6\x5a\xb2\xba\xbe\x98\xfd\x41\xe1\x9a\xb5\x4a\x6d\xa9\xc1\x52\x70\xc0\x17\x86\x67\x9a\x31\x68\x4e\x25\x8e\x3d\x02\xd1\xc8\x8c\xfa\xdd\xf4\x6a\x21\xd3\x7e\x53\xa9\xe5\x8c\x13\x3b\xdd\x89\x12\x15\xdc\x56\xcd\xf8\xfb\x8e\x96\x28\x27\xe3\x76\x16\x4a\x43\xf9\x6c\x2b\x49\x1f\xac\x26\x03\xa7\x73\x5c\xf4\xde\x31\x98\x13\x3b\x4e\x93\x23\xfb\xcf\xf4\x38\x62\x27\x3f\x12\x00\x90\xb7\x8a\x38\xa4\xc9\x53\x7b\x02\x77\xe1\xc3\x3a\xaa\x5f\xea\x3c\x80\xbe\x89\x7d\x74\x6a\x80\x13\x64\x24\x53\x64\x24\xa1\x06\x09\xa6\x75\xea\x9e\x9b\x2a\x55\xd1\xd9\xab\x28\xca\x19\xc1\x97\xc8\x24\xd9\x43\xca\x7e\xe0\x61\xe9\x92\xa0\xb3\x8e\xe1\x11\x5e\xe0\x7f\x7f\x53\x5c\xc9\x53\xbe\xe4\x1a\xb3\xb4\xb1\x85\x85\x31\x78\xe5\xd3\xb6\x7e\x90\x94\xd4\xbd\x28\xf5\x1b\x5e\x2c\xd0\x6f\xb0\x90\x7a\x79\x19\xec\x88\x3d\x6c\x33\x89\xa4\x61\xcb\x38\xcf\xc0\xc4\x2a\x1c\xa4\x3d\xd7\x7d\x06\x64\xac\x7f\xfa\x04\x0e\xc9\x32\xc0\xb8\x3d\x70\xfe\x7e\x0a\x8d\x71\x0b\xe4\x81\x6d\x71\x61\xa6\xb5\x71\xb9\xf5\xbf\x66\x16\x12\x42\x14\x89\x38\x33\x56\x52\x6e\x3e\x3d\xbd\xfc\xb9\xf6\xd7\xbe\xd2\xec\xae\x3d\xa1\x96\x65\x10\x6e\x22\x6a\xc9\x1c\x44\x5d\x2d\x8e\x45\x4b\xf6\x6b\x0e\x0a\x04\x9a\x8c\x78\x82\x2f\x23\x2f\x12\x58\x3f\x1d\xf8\xad\x51\x2f\x4e\x2f\xaa\x9e\x44\x44\x32\xe2\x9f\xe8\x60\x87\x96\xea\xf0\xbe\xf0\xa5\x57\x0c\xa4\x05\xc4\xee\xec\x51\xc9\x6f\x9c\x89\xa4\xdc\x2a\x37\x61\x16\x0d\xb9\xf9\x63\x21\xee\x00\x4b\x36\x0f\x52\x3c\xbe\xad\x3c\x7a\x06\x6d\xb3\xa7\x39\x11\xa1\xe3\x3d\x08\xea\x65\xc3\x3b\xec\x91\xc4\xb0\x83\xc6\x16\x1c\x27\xa3\xa5\x34\xbe\xae\x25\xe5\xa7\xc4\xc6\x01\x95\x93\x16\x09\x70\xa9\x2f\xb2\xc8\xd9\xec\x8c\xfe\xd5\x00\x87\xb9\x77\x44\x51\x24\xde\x3c\x50\x92\x76\x0e\x8f\xbb\xf9\x7e\x27\x00\x1d\x6f\x7c\x68\xe3\xf5\x51\xcf\x4e\xaf\x7a\x79\x31\x2f\x7f\x70\x0d\x32\x41\x61\xcd\x7c\x71\x66\x96\xc1\xad\xde\xfe\xa4\x5a\xce\x26\xfa\x0c\x4a\x4e\x40\x49\x59\x83\x5d\x56\x12\xd6\x8e\x6d\x8c\xf6\xac\xb9\xfc\x85\x02\x9d\x76\xef\x21\xaa\x18\x5f\x52\x10\x36\xef\x45\x90\x9c\x3e\x7b\x0a\x21\xb9\x51\xb2\x9b\x6b\xa8\x9a\x5d\x06\xcb\x35\x4f\x5a\x16\x96\x55\x65\xbd\xf8\x57\x2c\x8e\xc0\xf9\x0d\x6c\x1a\xb1\x3e\x13\xf5\xea\xfa\x17\x3f\x8e\x63\x35\x05\x93\x19\x12\x6b\x7c\x6e\x11\x81\xf6\x68\x30\x7d\xfd\x86\xd6\x62\xf4\xf8\xb7\x2d\xcc\x02\x21\xed\x6d\xf9\x28\x7c\x5a\xc6\xe6\x1c\x86\x5d\x63\xc1\xbe\x09\x59\xe0\x49\x5b\x9d\x31\x8e\xdc\x03\x47\xdf\x2c\xe3\x19\x2e\xb8\x99\x56\xba\xc5\x65\x59\x29\xa5\x88\xf7\x90\xb7\xbc\xa3\x1d\x95\xc2\xb0\x56\x4c\xa9\xd3\xf7\x25\x2e\xd0\x3b\x61\x86\x2c\x4c\x3c\xf4\x12\xa0\xdf\xd8\x82\x26\xa6\x29\x22\xe1\x23\x9f\x4b\x7e\xb7\x59\xd2\x14\x6e\xae\xf1\x1b\x68\xf9\x6b\x30\x2d\x2f\x43\x6c\x2d\x02\x3d\xc3\xc7\xd7\xca\x01\x9c\x27\xb0\xe7\x5e\xf9\x8c\x53\xcc\xfe\x8a\x4a\x1d\x7d\x3e\xc8\xaf\x04\x86\x7e\x9a\x9b\x20\x7b\x09\xc8\xcd\x07\xa1\x52\x7b\x3b\x39\x41\x6c\x2b\xda\x98\x2e\x92\x83\x90\x2a\x2d\xfd\x15\x68\xfd\x40\x16\xfa\x97\xc0\x41\x50\x38\x9f\xd8\xc2\x16\x11\xce\x2b\xd8\xce\xf2\x6d\x49\xb7\xbd\x4d\x46\x85\x50\x31\x13\xbe\x56\x9a\xbd\xb2\xf0\x72\xd0\x98\x55\x21\x09\xf7\x91\x75\xbf\xed\x2c\xd1\x55\xe7\x17\x72\x27\x79\x7b\x22\x21\xd1\x5e\x6f\x8f\x0e\xfb\x2f\x0e\x63\xc9\xfd\xe4\xd5\x95\xf3\x93\x90\x33\x5b\x1d\x42\xcd\x97\x9d\x5e\x65\x9a\x58\xdc\x32\x31\x3e\x13\x4b\xb5\x74\xd4\x35\x79\xac\x25\x21\x25\x58\x78\x97\x89\xcc\xce\xb6\xb1\x0a\xbe\x15\x66\xc5\x8a\xd4\xe0\x80\x0f\x7e\x90\xc4\x12\x2f\xbb\xb1\x5a\x29\x1f\x3c\xa0\x8b\x5b\x16\xd1\x9d\xf1\x53\xb4\xca\xd0\x1c\x75\x4b\x03\x1d\xce\x65\xc2\xf3\x86\x5e\x09\x9d\x9e\x12\x14\x24\xb7\x62\xf0\xa0\xa4\xab\x68\x9e\xd8\xae\x52\xbe\x66\xc4\x36\x4e\x36\xd7\x96\x5d\xb6\x56\xd2\x80\x05\x0f\xc7\x1a\xb7\xc6\xf5\xfa\x0d\x6f\xe7\xc8\xbd\xed\xf8\x6d\x73\x6a\x1a\xda\x5a\x36\x7a\x6f\x2c\x8c\x35\xe7\x39\xab\x8d\xc4\x1d\x94\x70\x9a\xd8\x02\xb9\x26\x4e\xf4\x5a\x71\xce\x02\x69\x68\x4a\x38\x7a\x18\x1a\xee\x0c\x48\x83\x88\xe7\x4e\xb8\x27\xd8\x5a\x2e\x8e\x07\x07\x40\x3b\xb6\xf4\x28\x72\xc6\x76\x8f\x64\xe5\x2e\x3c\x80\x12\x47\xf0\xaf\x1d\x7d\xb0\xf8\xc9\x02\xf4\x0a\x9c\x62\x88\x76\x98\x7e\x98\x56\x6e\xa3\xf4\x5e\x16\x52\xab\xe6\x9e\x5f\x5e\xa6\x84\xf3\x89\xdc\xc1\x33\xae\x95\x01\x9f\x57\x22\xf4\xd3\xcd\x2d\x77\xad\x5d\xd2\xd1\xea\x79\x60\x99\xfc\x26\x8e\x6f\x70\xa1\xa4\x28\x83\x9e\x51\x24\xa0\x35\x67\x7c\xfe\x61\xf8\x1d\x1f\x7a\xfb\x9f\x6e\x90\x3b\xa8\x01\x5e\x0e\xb4\x3f\xbd\xc0\x1f\x69\xca\xc5\x94\x2e\x80\x5b\x3d\x75\xd0\x17\xb4\x4f\x6f\xf1\x96\x8a\x81\xb7\xe7\x70\xe0\xee\xdc\x5f\xaa\x8b\x7e\xf5\x09\xbf\xa0\x4c\x0e\xc3\x7d\x39\xb7\x73\x4d\xce\x89\x9a\xdd\xcf\x83\x19\x9b\xbc\x55\x43\xee\xb1\x36\x27\x90\x3d\x5e\x4f\x22\xf9\x62\x86\x26\x7b\xdf\x83\x39\x8e\x5e\x61\xec\x11\x48\x87\xa8\x64\x00\x3a\xb0\x5d\xc5\x21\x61\x5b\xcf\x4a\x33\x2e\x31\x8a\xbc\x07\x48\xde\xcb\xc2\x1b\x93\xfb\x0f\x49\x0d\x6f\x33\x06\x3f\xf2\xb6\x04\x86\xb6\xfe\x39\x51\x2f\xec\x8e\xfc\x2c\xde\x8e\x50\xb7\x12\xf0\x9c\xc5\x88\x73\xb0\x72\x7b\xf2\x8c\xeb\x0f\x8c\xbd\x81\x04\x89\x04\x85\x5c\x1b\x23\x42\xcb\x1b\xd4\x08\xb4\xd7\x36\x47\xa3\x48\xa6\x24\x2b\x2b\x98\x45\xb2\x0a\x83\x38\x63\x5a\x69\x76\x41\xe9\xb8\xe8\xce\x94\xe0\x95\x1c\x48\x79\x5b\xe8\x23\xf7\x54\xbe\x65\xce\x73\x63\xf3\xc6\x82\x53\x56\x95\x04\x7e\x3c\x10\xd1\xe3\x2a\xc9\x8b\x79\xc5\xc8\x96\x56\x21\x0c\xc0\x96\x28\xce\x5f\xcc\x54\xc3\xc7\xfb\xa3\x0c\x98\x63\x0e\x34\x3b\xd3\x49\x72\x05\xb8\x02\x59\x7c\xa6\xf0\x7b\x60\xf4\x21\x26\x09\xdf\xf2\x03\xba\x8d\x45\x2c\x87\x65\x49\x12\x93\x22\xf6\xc8\xc1\x16\x62\xaf\x28\x17\x48\xc8\x1d\x2d\xe7\xd3\x16\x08\x4b\xad\xb2\x64\xce\x96\x31\x86\x09\x05\xb1\xb2\x0d\xce\x04\x36\xfb\xe8\x0b\xf9\x1b\x1e\x91\x4e\x19\xa4\xb3\x1a\xd9\x8c\xf2\x14\xff\x0f\x4a\xed\x41\xff\x55\x78\x9a\xce\xa0\x68\xd0\xc3\x38\x7d\x2f\x04\x74\x92\x70\x8f\xa6\xab\xca\xbc\x76\xa4\x8a\x56\x51\x7a\x80\xdf\xc2\x9c\x65\x50\x4f\x73\xd2\x95\xde\xcf\x4e\xd6\xcf\xd3\xc1\xe5\xc5\xf9\xda\x9b\x86\x7f\xaf\xb2\xc2\x71\x81\x0b\x5f\xb5\x96\x03\x84\xd2\xd9\xa9\x6a\x83\x12\x13\xd3\x99\xbf\xfb\x1b\x2e\x60\xe7\x1b\xb9\xa6\x9b\xa4\xd0\x8c\x7b\x8c\x2a\x64\x9f\xf3\x82\xe7\xed\x8b\xbc\x7a\xff\x67\x57\x20\xac\xc3\xf4\xd1\x21\xdd\x35\xfe\x60\x49\x07\x27\x44\x0d\x26\xd3\x86\xbc\x00\x1a\xdb\xbc\x19\xcb\xda\x70\x0a\x0f\xe7\xc6\x6e\xa5\xa2\x9d\x19\x40\x96\x77\x76\x07\xae\x23\xad\x7a\x16\x10\x19\xc4\xdd\x36\xe7\x44\x6a\x59\x83\x1d\xcf\x4f\x46\x68\x12\x59\x35\x14\x9a\x1b\xb2\x19\x79\x68\xe7\x61\xcf\x19\x88\xf2\xc6\x44\x20\x36\xd9\xa7\x57\xc4\x96\xb1\xd3\x59\xe1\x1b\x5f\x26\x6f\x6b\xc8\x7d\x79\xdb\xfd\x91\x8d\x72\xc0\xd0\xb8\xa3\xc8\x78\xf8\xf7\xe3\x2f\xfe\x85\xf0\x23\x0c\xf2\x07\x38\x5b\x39\x6d\x0f\x4e\x80\xd2\x46\xae\xb0\xe3\xa1\x89\x1c\xaf\x03\x0e\x85\x42\xeb\x76\x68\x25\x23\xe6\x13\x55\x76\xff\x2d\xa3\x25\x64\x66\x09\x3d\x5c\x5a\xf2\xf1\xfb\x82\x4c\xdc\xe2\x5f\x5c\xbd\xb7\xf7\x67\xe4\x42\x36\xa3\x96\xf0\xf0\x1a\xff\x9c\xef\xce\x97\x00\xaf\x36\x39\xae\x26\xef\x09\xce\x0f\xb9\xf6\xd1\xfa\x21\xec\xe7\x1c\x50\x95\x38\xa7\xfc\x00\xef\x7e\x3a\x85\x43\xdc\x35\x69\xc8\xac\x2d\x69\xf4\x60\xb0\x48\xf9\xd3\xd0\x46\x73\x3c\x95\x77\x63\xbe\x55\xc5\x21\xa1\x84\x93\x10\x89\x99\xf5\x1d\xa6\x0f\x10\x36\xab\x2c\x7d\x06\xc3\x45\xb9\x02\xcc\x66\x99\x33\x3f\xfb\x0a\x26\x88\xa6\xc5\x48\xca\x03\x3a\xd4\x15\x8d\xb2\x87\x6f\x6e\x4b\x01\x96\xbd\x47\xac\xa9\x05\x54\xf0\xb2\x99\xd1\x4c\x85\x5d\x61\xcc\xd1\xe4\x3a\xd8\x9b\xcc\xf4\xef\xb2\xff\x16\x9b\x06\x92\x7d\x64\x39\x16\x42\x5f\x0a\xd8\x9a\xc5\x4c\x60\x11\x4e\x1f\xfe\xcf\xa8\x7c\x19\x0f\x17\xac\xbf\x66\xc1\xd8\x6f\xbc\x50\x83\x30\xd4\x7e\xbc\xb8\x96\xd1\x9d\xd7\x65\x1b\x21\x8f\xcc\xb4\xd8\x5a\x86\x82\x5d\xf9\x0a\x54\xa7\x46\x96\xf7\xcf\xf8\x26\x8b\x44\x33\x30\x84\x9a\x7d\xc2\xb2\x06\xee\x9e\xdc\xe0\xb6\x04\x46\xbd\xba\x49\x91\x75\xa5\x99\x2c\xe4\x0f\xf0\x2d\x6d\x63\x75\xc3\x43\xb6\x84\x2c\x7b\x44\xe6\x8f\xb8\xd7\x9a\x66\x48\x90\x91\xe4\x71\xc3\x46\x5b\x31\x99\x5c\x24\x2d\xfa\xf2\xaf\xd3\x75\x79\x05\x6d\x10\xfa\x9e\x39\x38\x54\xb7\x7c\xbc\xdd\x06\x21\xb8\xd8\x17\x1e\x83\x5b\x23\x30\x64\x8b\x4f\xf2\x06\x4a\x20\x61\xe8\x15\x7e\x0b\xeb\xb3\x4e\x74\x55\x4b\x0b\x06\x76\x20\x51\x5c\x92\x64\xfa\x2b\xd9\x22\x27\x5c\x94\x81\xe4\xb0\xea\xf0\x1e\x40\xa7\x93\xa0\xa6\x18\x6d\xfd\xac\xa9\x0b\xb4\xaa\x4b\xe3\x1c\x42\x3a\x84\x9b\xe7\xd2\x6f\xb0\x9b\xc3\xd7\x8e\x77\x25\xc8\x22\xd8\x8a\xe6\x95\x16\x72\xaf\x27\x60\x6c\x93\x86\xd0\xa0\x7e\xc6\xaa\x7c\x30\x5d\x97\xc8\x9a\xfa\x12\x47\x29\xe6\x76\x4a\x9b\x3a\x8b\x93\xd9\x33\xa9\x31\x5a\xf8\x64\xbf\x1b\x2c\x6f\xe9\x0d\xe4\x17\xb9\x8d\x12\x18\x03\x86\x77\xb6\x43\x42\x62\x6d\x0c\x76\x7b\x91\xff\x3a\xe3\x8e\x51\x7d\x42\x8c\x62\x27\xce\xd8\xcb\x33\xf2\xc6\xd3\x7e\xd1\x10\xa9\x07\xb3\xc7\x37\x57\x5a\x6d\xb7\x7c\xd5\x33\xb3\x8b\xb6\x15\x67\xc0\xea\xee\x23\xe6\x60\xa1\x9b\x2c\xee\xec\xb5\x48\xb9\xfc\xde\x96\x6e\xf1\x04\x8b\x16\x4c\xbc\xfe\xd3\xbe\x18\x8f\x00\x34\xe1\xbd\xd4\x4c\x19\x20\x90\xae\xc6\x83\x3e\x7b\xaf\xff\x1c\x26\x23\x9a\x1a\x3c\x96\x58\x3e\xd7\xbb\x07\x9f\x74\x1b\x95\x9c\xa7\xa9\x54\x4f\xba\xd8\x7f\xa0\xd4\xc9\xd1\x9f\x08\x26\xb0\x15\xf8\x18\x6d\xad\xac\x06\x99\x4a\x82\x61\x13\xc1\x5e\x7f\x4b\x15\xb7\x78\x01\x0f\xeb\x2a\xc9\x5d\xbc\xd2\xbd\x07\x54\x1f\x58\x73\x89\x60\x3d\x07\x64\x23\x77\xf5\xd8\xdf\xe0\xff\x07\x4a\x55\xf2\x33\x50\x84\x20\xa4\x9d\x03\x5b\x7a\x67\x39\xca\x97\x9d\x88\x13\xf5\xdb\xb7\x6c\x27\x5c\x58\x6c\x13\xab\x92\x6c\xfe\xfc\xd1\xc2\xa5\x77\x0a\x28\x55\xe7\x41\x01\x05\xbb\x82\x4e\x6b\x3d\x5c\xcf\x41\x93\x10\x67\x0d\x19\x5b\xdc\x9d\x14\xb0\xa7\x5a\x9e\x78\x7b\xb4\x71\x74\x27\x7d\x9d\xdd\xf9\x67\xf1\x6d\xa6\x2e\x27\xad\x3a\x6b\xa7\x84\x1d\x37\x76\x43\x9c\x4e\x0d\x8c\x23\xf7\x67\x02\xc5\x8e\x2a\x44\x75\xd5\x3a\xd2\x2d\x7d\xbc\x6a\x27\xed\x7d\xde\xf0\x54\x47\x98\xec\x87\x8c\xfd\x8e\xeb\x53\x09\x04\xcb\x76\x8e\x44\x29\xac\xcc\xde\xf9\x0c\x6d\x76\x59\x82\xb2\x79\x73\x8a\x98\x66\x43\xd0\xb7\x32\x84\x4a\x4e\x9c\x10\x74\xa6\xae\x88\xa4\x7e\x63\x22\x9a\x61\xcc\xa1\x4c\x5f\xe1\x7a\x12\x92\xf4\x18\xe5\xc8\x36\xdf\xdb\x57\x36\xea\xaa\xcc\x5f\xcd\x8b\xb7\x73\xcf\x3c\x5c\xda\xdd\x94\xc9\x51\xf9\x0d\xf5\xdb\x6e\x30\x7c\x9e\xd6\xae\xc8\x16\xb8\xd0\xfc\x3c\x1a\x23\x66\xed\x87\xc6\x99\xb4\x45\x68\x17\x43\x3c\x5b\xf8\x81\xb0\x62\xdf\xbc\x63\xc6\xdc\xb1\x5e\x2f\xd6\xc9\x66\x4d\x38\x02\x83\xf7\xc6\x12\xd7\x44\x2c\xa3\x5d\x68\x5b\x94\xc8\x99\x45\x5e\xbd\x86\xd6\x1d\x7c\x21\xf6\xd0\x8e\x31\xc2\xd0\x12\x85\x90\xdb\x5b\x9c\x6e\xab\x46\x8f\xb1\x53\xea\x76\xb4\xfc\x1a\x6d\x1e\xc8\x6e\x0a\x9d\x12\x72\x92\xac\x29\x93\xfb\xcb\x10\x4d\x8c\xcb\xc2\x54\xe2\x62\x4c\x1b\x07\x59\xce\x58\x96\xf6\x5f\xbf\x92\xe2\xd0\xc9\x12\x19\x60\xb3\xda\x39\xef\xe3\x91\xb8\x35\xa7\xd8\x71\xe6\x76\x77\x7c\xef\xb0\x8f\xc5\xd2\xde\x2a\x3a\x29\x03\xce\x11\xf2\xbe\x13\x1d\x24\x35\x08\x56\x32\x17\x25\x79\x59\x11\x2d\x7e\xfb\x2b\x79\xbf\xf4\xc1\xd9\x6b\x15\xdd\xd9\x6a\xd3\x54\x4c\x53\xf4\xac\xdd\x1b\xb3\x9c\x2a\x15\x14\xb2\xb3\xc4\x04\x74\xae\x5e\x27\x24\x3f\x6f\xbc\xba\x3c\xdc\xb2\x56\xf4\xe0\xa8\x4b\x61\x8d\x55\x0c\xab\x73\x80\x8b\x99\x72\x75\xd6\x7a\x48\x11\xdc\xe0\x18\x4b\xf2\x82\x4b\xb1\xb1\x19\xe7\xa3\x84\x1c\x33\x1c\x1a\xc4\xb3\x8a\x4e\x0b\x98\xc5\x9a\x32\x78\xc9\x86\x2a\x6f\x7d\x59\x9d\x48\xdf\x86\x56\x86\x16\x19\x60\xbc\x66\xd2\x52\x08\x33\xbd\x3d\x81\xb9\xd8\xfe\xaa\x26\x61\x08\x62\xac\x9e\x03\x1c\x5e\x9f\x8f\xa0\xc5\xbc\xd5\xb2\xe3\xca\x66\xf2\xc3\x14\x2a\xd5\x15\xff\x41\x58\x94\xe9\xe0\x16\x67\x24\xc9\xee\x1a\xfe\xd3\xc9\x09\xa9\x31\x50\x3e\x84\xa5\x58\x24\x0f\x70\x75\xee\xcf\xf5\xba\xd0\x96\xc3\xbf\x53\xbc\x63\x53\x9e\x15\x7c\x6b\xf4\xa6\xe4\x03\xd8\xa5\x49\x05\x21\x99\x56\x7f\x1f\xf0\x10\x1e\x3a\xc8\x61\x65\x91\xd2\x98\xc7\x56\xaf\x33\x3f\x4c\x1d\xa2\xe8\xe1\x4d\xf3\x60\x5e\x92\x0a\x4a\x3a\x1c\xe8\xff\x32\xc4\x3c\x1b\x00\x27\x97\xe4\xcb\x60\x5b\x3a\xa9\x3e\x8d\xe2\x43\x92\xeb\xb0\x90\x10\x2a\x8e\xff\x93\xf5\x68\x09\x08\x36\x0f\x46\x25\xa0\x95\x4d\xb4\x74\x17\xe5\x50\xbf\x8f\xde\x5e\xba\x39\x6b\xb9\x7a\x77\xbe\xd2\xfa\x3c\x41\xd2\xe5\x9d\x19\xde\xfe\xa1\xa8\x9a\x3d\x12\xc7\x85\x73\x27\xe6\xdb\x70\x01\xe3\x84\x93\x49\x2e\xd9\xaa\x83\xf4\x3b\x0f\xa5\x98\xb8\x9b\xc5\xac\xc8\x26\xcb\x1b\x57\x45\x99\x5c\xe6\x44\x1f\xa8\x40\xb0\x32\x1c\x16\x9a\x83\x77\x71\x4b\xd7\x89\x0d\x9b\x62\x14\xa3\x14\xb9\xa9\xc6\x57\xfb\xe1\xe3\x28\x6a\x30\xfa\x1a\xec\xea\xe9\xc5\x42\x80\xa9\x95\x3d\x3c\xf9\x83\x8d\x55\xcd\xd7\xfc\x64\x7f\xaf\x0a\x03\x10\x14\xa9\x86\xca\x5b\x3d\xb4\x72\x4d\xfe\xeb\xda\x59\x01\x78\x59\x0d\x0b\x5e\x08\xb6\xc8\x5e\xfd\xc7\x41\x27\xd3\x99\xe1\x0e\xa5\x0c\x44\xea\x61\xfe\xbe\x5d\x86\x15\xa5\xdd\x3b\x28\x49\xf3\x30\x3e\x2d\xb3\x7c\xcd\x70\xd0\x20\xf6\x79\xb1\x2d\xd0\xb2\xd7\x85\x95\x92\x77\x20\xfe\x2f\x21\x45\x03\x9f\x4e\xbc\x0c\xab\xb8\x92\xf5\x07\x76\x2b\x51\x90\xe2\xed\xc9\x69\x93\x34\x05\xed\x40\x65\xb4\x32\x6a\xb5\x1d\x2d\xf0\x97\xe7\x81\x9a\x20\x91\x6e\x81\x3e\x24\x25\x13\x06\x14\x67\xad\xbb\x92\x8d\x3d\xbd\x01\xf6\xa2\x4b\x54\x39\x0e\x74\x73\x1e\xe9\xd7\x9e\xea\x43\xbe\x99\x83\x34\x73\xf0\xce\x04\x67\xf2\x24\x09\x57\xf4\xab\xd4\x42\x98\x11\x3e\x29\x05\x66\x2a\x5c\xbc\x8d\xca\x49\xb7\x5b\xdd\x97\xe6\x19\x3b\x59\xa3\x07\x4a\x74\x2d\x01\x05\xde\x7a\x76\x0b\x46\x77\x61\xb4\x72\xe2\x44\x2f\x4d\xf0\xe1\xb1\x09\x15\x49\xb8\x30\xfa\x25\x2d\x1f\xc3\x65\x77\x0a\x43\xd9\x63\x34\xf9\x0a\xf9\xfe\x44\x8c\x98\xc1\xf6\x53\xfd\xb5\x9b\xbd\x49\x97\x9b\x37\x3b\x29\x2a\xeb\xb7\x5c\x8b\xc7\xee\x52\x0c\xc1\xb7\x45\xd8\x8b\xea\x65\xec\xe9\x01\x36\x31\xf0\x50\xf1\x21\x33\x18\x12\x72\x81\x0d\xb2\xad\x68\xf4\x91\x33\x16\x99\x22\x3b\x43\x3e\x7b\xac\x66\x14\x1e\x0b\x32\xbc\x17\x69\xe2\xce\xbf\x3f\x1e\x43\x7b\xc5\x42\xc5\x04\xa6\x07\x61\x2f\x5e\x75\x75\x38\x8d\xa1\x08\x3b\x6b\x00\x49\xdc\x9b\xf8\x17\xd6\xb6\x95\x4f\x6e\x5b\xb0\xba\x83\x90\x5b\x80\x7c\xfe\xea\x95\x3a\x00\xef\xcd\x75\xa1\x0e\x54\xad\x71\xee\x29\x28\xa2\x36\x1f\x5f\x82\xac\xac\x3f\x16\xc9\x86\x82\x10\x1c\xc4\x11\x82\xa6\x18\xce\x2e\x73\x85\xa2\x9b\x7d\xf6\x83\x92\xa0\x4d\xb5\x4d\x5c\xcc\xe7\x31\x78\x56\xa4\xfb\x59\x59\xcc\x1b\x25\x2a\xcb\x4c\xcf\x32\x52\x75\x00\x80\x12\xff\x49\x82\xe2\xa1\xed\xb9\xe2\xb3\x65\x25\xec\xc1\x9e\x5e\x2f\xf0\x0e\x05\x6d\x6a\x65\x29\x57\xcb\x0f\x6b\x00\x75\xf1\x3d\x64\x66\x7c\x87\x15\xa5\x60\x8f\xa7\xd7\x74\x20\xe1\x00\xf9\x2d\x74\x13\x92\x9b\x6f\xe1\x80\xed\x72\xa7\x92\x99\x04\x4b\x08\x1c\x7a\xbd\xe1\x2d\x5b\xcc\x38\x1a\x21\x11\x57\x15\x53\x7a\xb3\xf1\xa9\xe7\x2b\xb2\xb7\x6b\x2e\x23\xea\x50\x9b\xe5\x07\x1a\x00\x1d\x1e\x71\xf7\xd3\x41\xb6\xd1\x1d\x2d\x77\xd2\xe6\xbb\x87\xf5\xfd\xe2\x59\xb1\xf2\x3c\x1a\xb0\x0e\x3b\xda\x6c\x36\x0f\x67\xd7\x70\x50\x9f\xa9\x37\x92\x4a\xeb\x45\xcf\xe4\x2d\x28\xaa\x29\x40\x6f\x02\x6c\x20\x61\x69\xad\x55\xf3\xab\x6b\x15\xce\x5f\x36\xcb\x51\x0c\xaa\x16\x67\xbf\xc9\x9f\x28\x5f\xd6\xf2\xf9\x21\xd0\x7c\xcf\xde\x96\x33\x18\x8a\x47\x71\x77\x4e\x15\x00\x94\x80\x00\x2e\xfd\xbd\x7f\x1f\x8b\xa5\xea\xe8\xbd\xba\xbc\x5a\x70\x8d\xea\xb1\x2e\x0b\x9e\x6d\xfa\x2a\x82\x21\x30\xc9\xdf\xde\x84\xd0\xf4\xb4\xd9\x53\xca\xf9\x67\x00\xdf\x12\xcb\xb6\xc5\xfb\x4a\xac\x8f\x8d\xe5\x9c\xda\x57\x1a\xb9\xc9\x99\xbd\x1d\x60\x4b\x37\x3c\x1c\x34\x4b\x1c\xe7\xd5\x8b\x38\x9d\x72\x33\x46\xfc\x44\x5f\xf1\x33\x7a\x41\x67\xd6\x0d\xa1\x9b\x57\x6a\xd7\x11\x00\xfd\x89\x59\x59\x08\xfd\x6d\x49\x02\x5b\x3b\xdc\x4f\x0f\x93\xf4\xd4\x3c\x40\xd0\x23\x90\x71\x2b\xd1\xa9\x9b\x75\x31\x94\x1a\x50\x9a\x25\xf4\x20\xc7\x4a\xc6\xeb\xe2\x37\x15\xb6\x5d\x2c\x55\x23\x32\xe2\x52\x49\x68\xfc\x63\x30\x04\x2e\xab\x2c\xd1\x2f\xe4\xa8\xda\x9d\xac\x05\x5a\x7b\x3f\x50\x7f\x33\x4a\x3b\x62\x02\x6e\xed\x4a\xf0\x28\x0e\xde\x70\x66\x1f\xeb\x57\x0d\xc5\x3b\x97\xcb\xe4\x12\x69\x49\xc1\xae\xc6\x7b\xcf\x70\x60\xd6\xc6\xc8\xb3\x03\x51\xec\x15\x06\xff\xa2\xef\x1a\x0a\x95\x4e\x08\x8b\xaf\x41\xa4\x77\x24\x6d\x0b\x97\x31\x87\x83\xb3\x5d\x25\x7f\x17\x6f\xf8\xcf\xbb\x2d\xe5\x6e\xda\x17\xb3\x22\x22\x09\xbd\x00\xe2\x4e\xfe\x3d\xd8\x91\x80\x73\xee\x3a\x20\x5b\x01\x06\xf9\x86\xea\x5d\xf3\x36\xb7\x16\x4e\xb6\x58\xee\xee\x6b\x57\xf7\x91\xf2\x7d\x47\x95\xc5\x5a\x64\x04\x43\x13\x0d\x8a\xd3\xdf\xad\x54\x64\x1f\x36\xfa\x30\xd5\x3a\x78\x19\x9d\x21\xd0\x8b\x84\x4a\xe7\x38\x99\x53\x61\x04\x00\xbe\x80\x76\x37\xc3\x88\xcb\x9a\x77\xe3\x74\x6a\xdd\xf2\x6f\xd1\x33\x90\xa3\xbb\xf6\xf0\xc3\x74\x00\x1b\xa9\x6a\x28\x2c\xa8\x42\xd7\xc2\x79\xc9\xb1\xfe\x16\x2b\x30\x29\xa2\x0d\x8d\xcd\xb6\x79\x33\xe1\x9b\x2e\x6a\x04\xd7\x72\xc4\x00\x83\x66\x9c\x0a\xb4\x2e\x3b\xb3\x8d\x45\x86\xa9\xe0\x86\xb8\x5b\x92\x98\x9a\x01\xf0\x9d\xaa\xb3\x04\x4d\x93\x0b\xd5\xf5\x82\xcf\x3d\x8b\xb7\xdb\x9d\x08\x01\xa3\x27\xeb\xba\xcc\x80\x01\x87\x90\x8e\xc7\x04\xef\x33\xb3\x59\x5e\xdc\x3e\x52\x43\x2d\x76\x19\xda\x84\x20\xab\x64\xca\x69\x32\x45\x82\x2f\xef\x1e\x24\x3c\x9f\xd8\x2d\x49\x48\x47\xb5\x75\xd2\xea\xe2\x03\x2d\x60\x5d\xf1\x0d\x3b\x0e\xb0\x33\x92\x05\xb2\x0f\xef\x7a\xc6\xc4\x20\x50\x83\x10\x06\xa8\x91\xa4\x25\x57\x2a\xdd\xa2\x6b\x46\xad\xd6\xfd\x4e\x28\xa8\x1c\x9e\x89\x6c\xaa\x6a\xe7\x09\x68\x26\xea\xf3\xa2\xca\xa4\x3e\xd3\xf4\xb5\x51\x88\x38\xbb\xb9\x57\x5c\x6f\x4b\x07\x8b\x97\x55\x25\xe0\xa2\xfc\xc7\xed\xff\x8d\x66\x08\x24\x1f\xf5\x9f\x69\xb7\xb7\x05\x69\xa7\xa6\x9b\xdc\x62\xa2\x17\x38\x10\x95\xd6\x6c\xf3\x08\xca\x24\x4a\x24\x56\x80\xf6\x1d\xb4\x06\xb4\x01\xca\xfb\x9f\xb6\x04\xf4\x72\xed\x5f\xfa\xb3\x7c\x5a\xfb\xdc\x1b\xc2\xd0\x15\x48\xbc\x1c\x5b\xe7\x67\x68\x7b\x79\x75\x80\x6b\x27\xd7\x65\x80\xd9\xbd\x26\x43\x6d\x6b\xfa\xfd\x85\x5a\x1a\x35\x3c\xbf\x44\xea\x75\x84\x51\x0a\x8d\x90\xf4\xa4\x38\x33\x02\x15\x12\x14\xd6\x4e\xa4\x72\x4d\xf2\xf6\xa8\x31\x5a\x23\xf1\x68\xa0\x5c\x9c\x3e\x4b\x1a\xe9\x25\x77\x0d\xbc\x26\xad\x2f\x11\x25\x27\x0d\x8a\x22\x66\x96\xf8\xd7\xa8\xa5\x8a\x07\x42\x09\x42\x8f\x9e\x5d\x56\x7f\x48\x57\x1e\x04\xe2\x44\xae\x82\xd7\xb8\x0d\x36\xf8\xc0\x2a\xca\x5a\xce\x11\x8c\x1b\x12\x35\x8f\x81\x44\x47\x92\x86\x4c\x06\xce\x42\x5c\xcf\xd5\xad\x62\xc1\x00\x7f\xec\xa4\x92\x71\xb0\xd3\x65\xfc\x8d\x63\x3e\x3b\xf1\xb8\x53\xfe\x08\xbe\xb5\x6a\x8a\x85\x7d\xae\x8b\xcb\xdb\x31\x75\xd5\x36\xca\xea\x0a\x92\x0a\x76\xa0\xf8\x04\xfa\x9d\x69\x84\x51\xe0\x66\x75\x05\x46\x55\xa7\xc4\xe2\x2b\x8b\x08\xf7\x5f\x2c\x42\x7f\x50\x23\xd5\xd2\x8f\xbd\x46\xb3\x6d\x3f\xc4\x7e\xd0\x2b\xc4\xa3\x1c\xc7\x2b\xa8\x75\xa4\xcb\x3d\x59\x5e\x2e\x68\x9b\x6b\x1a\xfa\xd9\xb9\xae\x41\x33\x7f\xd2\x0b\x6e\x87\xc6\x0e\x38\x5f\xb9\xea\x41\x2d\x91\x6a\x21\x46\xb3\x49\xda\xde\x88\x0f\xd5\xf6\x5b\x5b\xd0\x27\x41\xa1\xd9\xf1\x75\x40\xa8\x0c\x39\xa8\x13\x60\x25\x79\xa0\xae\xfd\x9c\xb1\x70\x9a\xab\x77\xa4\x5c\x5f\x79\xbc\xa0\x14\x01\xdb\x3a\x92\x65\x74\x03\x5a\xa9\xd6\xc5\x65\x3a\x7a\xe4\x14\xb0\x02\xf8\x2c\x60\x4f\x68\x76\x05\x57\x32\xef\x5e\x27\x88\x5e\x1d\x38\xe8\xc1\xc4\x91\x02\x8f\xc7\xa7\x69\xce\x27\x11\x00\x68\xcc\xaa\x51\x61\x7f\x51\x92\xa6\x3a\x0a\x74\xe3\xee\xbe\x3f\xd9\x3c\x9f\x84\x75\xd4\x9f\xba\xba\xd7\x3f\xcc\x0b\xd9\xc5\xec\x6e\xa4\x26\x63\xc2\xe1\x6e\x58\x9f\x8f\xea\x5c\x21\x33\x20\xd4\x10\xea\x3f\x7b\x5c\xd8\x25\xed\x34\xe9\xe4\xc7\x33\x20\x9d\xbe\x4d\xf7\x72\xf2\x0f\x0b\xf6\x8d\x6e\xf1\x00\x53\x89\xbe\x1b\x77\xa1\x53\x09\x33\x7a\xc5\xe7\x9d\x66\xe7\x42\x65\x17\x0e\xbd\xb7\x34\x95\x85\xde\xe5\x24\x46\x9c\xc3\xab\x40\x65\x02\x19\xe5\xcc\xb4\xdd\x4a\x1a\x8d\xcd\x42\x48\x79\x02\xc4\x01\xb2\x15\x36\x2c\x43\xda\xe5\xb1\xcd\x70\x1d\x44\xb6\xb7\xea\x61\x96\x24\xdb\xa8\x4b\xbf\x63\x3c\x90\x91\x6f\xe8\xe5\xf1\xb4\xa9\x19\x52\xe9\xb7\x4c\x15\xfc\x6c\x8b\xa1\x27\x17\xeb\x82\xda\x9e\xe2\x61\xbf\x7f\x1f\x5e\x7f\x33\x88\x83\x5e\xd0\xb1\xab\x27\x96\x42\x3a\x31\x79\xeb\x85\x78\x0b\x42\x6e\x3f\x85\xaa\x35\x5a\x59\xb2\xa3\xb8\x7b\x5c\xaf\x64\x5b\x70\x13\xd0\x78\x8f\x9a\x94\x37\xe5\x04\x05\xe4\x58\x4e\xbd\x02\x94\xb1\x23\xfd\xef\x51\x7e\x92\xc5\x0d\x7b\x97\xbc\xdc\x29\x34\xf2\x6f\xe6\x24\xa2\x64\x5b\x87\x37\xdd\xda\x55\xf8\xe0\x12\x3a\x51\x56\x8f\x9f\x27\xd2\x2f\x0d\x59\x1f\x0a\xe3\x0d\xf9\x12\x79\xd3\xf5\x42\xec\x74\x6b\x78\x85\xb8\xbb\x76\x84\x07\x6d\x08\xbb\x0d\x29\xae\xea\x7b\xad\xf8\x15\x14\xbe\xb6\xd1\x6c\x6e\x5d\x18\xa3\x8c\x2c\xf1\xac\x9e\x13\x0b\x24\xdb\x03\xc4\xa9\x88\xd2\x97\x17\x83\xc8\x63\xc9\x65\x6d\x0b\xcb\x54\x1a\x0f\xc2\x92\xa4\x35\xf2\xef\xb2\x6f\x4b\x23\xa7\x27\xc5\xbe\x34\xe4\xfa\xa2\xef\x92\x44\xef\xbc\x64\xde\xd6\xc9\x16\xf3\xf7\x30\xe4\x5f\xd9\x39\x0a\x93\x4f\x09\x6b\x60\x6a\x19\x0f\x00\x5a\xe6\x6d\xe3\xa5\xe4\xaf\x05\xe7\x31\x29\xa1\xc9\x3a\x90\xaf\x5a\xd4\x97\xa0\x13\xed\xf4\x80\xb5\xb5\xc4\xf0\xe1\x04\x43\xe8\x08\x89\x55\x0f\xca\xea\xaa\x2a\x4a\xc1\x06\x9d\x05\x3d\xbd\x53\x60\x8c\xaf\x4d\x0a\x66\xbc\x5d\xdd\x7b\x2e\x68\x7b\x36\x1a\xbc\x9d\x92\xe4\x4a\x43\x33\x41\x10\xc9\xd8\x89\xa9\xdf\xc1\x43\x74\x48\xfd\x3e\x00\x9c\x21\x84\x5d\xbb\xeb\x05\x4b\x44\x16\x67\xe7\xc5\xb4\xa1\xc3\x39\x17\xba\xbc\x29\x7c\x49\xc8\x68\xdd\x79\x4b\xf0\xfb\x1e\x04\x3e\xcd\x4c\x2c\x2b\xb7\x8a\x07\x16\x71\x08\xf6\x06\x23\x03\xa8\xda\x1c\x2a\x44\xfd\xf0\x4d\x11\xdb\xfc\x19\x02\x15\x86\x61\xaa\xa8\xb9\xb7\xab\x7d\x65\x3d\x72\xf4\xe8\x46\x79\x01\xf0\x43\x2b\xe1\x83\xe7\x90\xef\x9c\x3c\x90\x97\x98\xe1\x2f\x24\xf0\xa7\xf3\x8e\x9d\x29\x57\x90\x70\x6b\x22\x20\xae\x9d\xca\xf6\x12\xe7\x6a\xd5\x9b\x0d\x5d\x05\x5e\xe8\x3d\xac\xe2\xfd\x60\x7b\x93\x2d\x46\x64\x7b\x31\x1e\x49\x52\x39\xb6\x9f\xdf\x24\x0c\xbf\x83\xa7\xf1\x97\x07\xfb\xe5\xa6\x14\xe3\xe1\x44\xfa\x83\xfb\x22\x97\xbd\x83\x61\x13\x4f\xa7\x74\xf0\x88\x2e\x29\x80\x31\x7b\x24\x34\xc3\xc8\x46\xf7\x8b\xe8\x6f\x21\x24\xe1\x0a\x36\xb0\x9e\xd6\xed\xe1\x8c\xa8\xb9\x8e\x06\x09\x79\x78\xab\xc4\xb2\x07\x08\x58\xf2\x4d\x37\x7f\x23\x2f\x68\x89\xe0\x1d\xe0\x1a\xcd\xc8\x1a\xff\x4b\x66\xb5\x96\x7a\xb1\x4d\xf3\x68\xf6\x9d\xc3\x33\x02\xb4\xcf\x8d\x3d\x4f\x2a\x1f\xc1\x48\x06\x42\xd5\x6a\x83\x32\x13\x64\x05\x9b\xd7\xd1\xb2\x96\x9c\xc3\x20\xef\xf0\x0d\xd3\x49\xd0\x70\xb2\x7f\xbb\xa1\xc1\x60\x81\x24\x39\x6a\x62\xd9\x9f\x15\xcd\x38\x6d\x28\xd0\xab\x06\x65\xc2\x42\x9c\x8e\x5e\x6a\xc9\x8c\xf6\x90\x50\xfe\xb3\x30\xc5\x75\xbf\xaa\x25\x22\xf2\x99\x6b\xf8\xc2\x37\x64\x3d\x07\x04\x4f\xc1\x45\xf1\xed\x38\x19\xf0\x42\x95\xd2\x76\x80\xb6\xd9\x32\xe5\x10\x54\x6c\x94\xfd\xb8\x38\xbe\x1d\xa2\x64\xdf\x40\x44\x8b\x9b\x16\xb6\x74\xaf\x9f\xb8\xf2\xdb\x97\x3a\x89\xc7\x50\x0a\xd6\xc3\xb2\x2e\x7a\xa6\x50\x71\xdc\x54\x61\x3d\x2d\x83\xfb\x3e\x0e\xf9\xe6\x15\x1b\xf1\x8f\x20\x97\x02\xfa\x38\x25\x20\x62\x59\x21\x7b\xa7\x9b\xe9\x41\xa1\x0e\x21\xa1\x15\x4a\x4e\x12\xc5\x82\x86\xfb\x53\x26\x40\x1e\x15\xac\x31\xb1\xab\x68\x30\x63\x23\x9d\xb5\x44\x23\x6a\xf6\xcc\xea\x2a\x0b\xba\x12\x0d\x19\x46\xa9\xb5\xa3\xb4\x74\x01\x88\x92\x9c\xc8\x23\xca\x2c\x8e\xf2\x54\x1d\x2c\x03\x62\xf7\xa0\xfd\x6d\xf6\xa5\xc4\x2d\xc1\x75\x4c\xbb\x13\xf2\x3c\x38\x0d\xca\x63\xb5\xfd\x4f\x2e\x0e\x96\x31\x78\x42\x25\x52\x8a\x35\xa2\xef\xf2\xaa\x16\x0c\x1a\x61\x93\x6d\x31\xb2\xc7\xcd\x8e\xfd\xf3\xdb\x09\x32\x4b\xda\xe3\x7a\xa1\xa0\x56\x62\xf1\xd0\x27\x0c\xa8\x04\xac\x0c\x89\x4f\xc8\xbc\xbf\x3a\x8a\xc9\x72\x01\xe1\xfe\x67\x6f\x51\x9a\xc3\xc4\x3a\x77\x67\x53\x4c\xa3\xd0\x18\x34\xfb\x87\x2b\xf2\xee\xc0\xf3\x03\x8b\xe0\x3e\x63\x3f\xbb\xf3\xb5\x94\x0f\x83\x27\x5f\xcb\x93\x5b\x2b\x9f\x5d\x2c\x2f\x98\x32\x9a\xed\x40\x94\x26\xe3\xe3\x64\xc2\xde\xa8\x7d\x57\xd1\x5c\xa8\xd4\x1c\x4e\xef\x80\x18\xf8\x6e\x2d\x32\xe8\x04\x29\xf5\x05\x4d\x07\x71\xbb\x24\x01\x3c\x25\xe2\xf5\x52\xba\xdf\xbf\xb9\xe7\xaf\xd6\xbf\xdc\x64\xef\x89\xd7\x49\x01\x10\xc5\x35\x15\x86\xc4\x83\xea\x47\x31\xfc\xf9\x2d\x89\x12\x6c\x47\x4c\x42\x6a\x50\xfa\xc1\x50\x24\x04\x87\x42\xc7\x9d\x7e\xb4\xaa\x13\x78\x72\x1f\xd4\xcb\x74\x6d\x63\x79\x61\x72\x39\x0f\x2a\x56\x30\xee\xdf\x0a\x04\x5a\x96\xa8\x81\x21\x3a\x5e\x92\x37\x07\xb3\x6a\x0c\xd2\x15\x75\xcc\xc9\xbf\x72\xd9\xf2\x3b\xf9\x84\x8c\x26\x41\x17\xca\xf8\x62\x28\x33\x59\x6e\xf4\x2c\x97\x02\xff\xf6\xe1\xe7\x8c\xda\x4b\x4e\x16\x0b\xab\x88\xa7\xb3\x56\x3c\xdf\x9d\xb5\x27\xd2\xd6\x4f\x3b\x4a\x07\xec\xea\xe5\xbd\xa9\x00\x92\x52\xe7\x19\x56\x2f\x6e\xea\x63\xfd\x82\xd2\x66\xa4\xbb\x41\xbf\x8b\xae\xba\x28\xfe\x06\x04\x93\xe6\xd5\x22\xa3\xdc\xc1\xf4\x98\x36\xdb\x03\x64\x30\x6f\x41\x5d\x57\x32\xb9\xa2\xab\x92\x2d\xf1\x05\x57\x66\xbc\x56\x36\x48\x98\xd5\x0d\x33\x84\x70\x5b\x61\xaf\x63\x1b\x7c\x0b\x45\x14\xd9\xeb\xa0\xab\xe1\x05\xf0\xa2\x61\x21\x97\xe7\x27\xc0\x22\x40\xa3\x2c\x7d\x7d\x90\x1f\x7d\x17\x46\x21\x56\xde\x7d\xe2\xcb\x14\x2e\x0d\x30\xec\x5d\x35\x9a\xe1\x6b\x25\x04\x85\xde\x48\x65\xba\xe5\x0f\x30\xb3\x48\xd8\x4f\xa4\x4b\xb3\xba\xa1\x11\x15\x6b\x9c\x69\x5d\x08\x2a\x6b\x23\x0a\x40\x99\x70\xb1\x11\x63\xdc\x45\xc9\x47\x40\x8a\x93\x80\x2e\x11\xc4\x8a\x2c\x4b\xca\x9d\xb7\x8f\xd5\xd2\x1b\xda\x35\x0e\x86\x99\xcc\x37\x7e\x02\x65\x57\x36\x70\x57\xb4\x7e\xf2\xb4\x3b\x64\x47\x53\xe5\x10\x8d\xa1\xc1\xe9\x3d\x26\x29\x7f\x72\x2a\x88\x69\x41\x57\x9e\x64\xb1\x4c\x01\x45\x7d\x58\x10\xeb\x6a\x32\x0f\x69\x14\x64\x58\xa2\x13\x66\xf3\x42\xd7\x9b\xb7\x73\x53\x1c\xd1\x6d\x61\xa9\xd0\xd5\xc8\xda\x93\xf9\x84\x5f\xa2\xc6\x0f\x44\xdc\xb5\xa3\x78\xc0\x02\xe7\xd0\x64\xf5\xf3\x3a\x5b\x7a\x38\x21\xe2\xf9\x4d\xba\x9a\x30\xca\xb7\x23\x33\x77\x95\xe7\x18\x7a\xa8\x97\x8d\x92\x78\x80\x1f\x93\x5d\x06\xf2\xf0\x4c\xf9\x0d\xcd\xbc\x63\xd8\xe1\x68\xde\x7a\xe4\x94\x3e\xdc\xc4\x25\x59\xb5\x8b\x17\x99\x3a\x39\x38\xb2\xf6\x9a\x2e\x96\x3f\x78\x33\x07\x42\xb0\xcd\x2e\x2e\x9c\x73\x81\x7a\xad\xa9\x52\x31\xa1\x12\x0b\xe6\x3d\x8b\x70\xd4\xd6\xb1\x0d\x50\x4e\x83\x46\xe2\x5c\x26\x42\x49\x93\xa5\xc7\xc1\x68\x01\x5a\x64\xa0\xa2\x7f\x82\xe8\x3a\xea\x01\xf6\xd9\x30\xdb\xc9\x73\x64\x53\x1c\xf6\x5b\x9d\x1d\xc6\xd9\xfc\x2e\x64\x73\xed\x04\x78\x47\x8d\x93\xb3\x52\xbc\x4d\xe4\xd6\x35\x96\x74\xcf\x15\x0e\xa6\xc8\x50\x12\x14\xbc\x81\xf2\xb1\xd5\x67\x87\xf1\x63\x4f\x84\x9d\x2a\xd3\x67\x70\x2a\xca\x4c\xcd\xd2\xd9\x65\x33\x8d\x0f\x01\x4e\x45\xc9\x4b\xb7\x7a\x80\x29\x3b\x71\x9e\x94\x83\xb2\x60\x90\x1b\x5d\x41\x76\xd6\xe5\x62\x03\x28\x6a\x2d\x4d\x03\xbd\x7a\xb2\x9b\x6a\x36\x41\x31\x70\xe3\x32\xa9\xc0\xc6\x7b\x71\xbd\xa9\x73\x58\x21\x26\x1d\x8a\xfa\xd8\xa9\x33\x5f\xbd\x44\x86\xaa\x64\xed\xcf\x23\xcb\x19\x03\x58\x9c\xe2\xfd\x1b\x2a\x12\xe0\xf6\xf6\xa1\x3c\x0d\xdd\x5d\x95\x94\xef\xcb\xf2\x6f\xec\xc9\xcb\x19\x5e\xe2\xec\x10\xd8\x3c\x73\x34\x80\x47\xad\x93\x41\x45\x96\x71\x43\x1f\x71\x5d\x47\xa3\x43\xa4\x4f\x1b\x95\x61\x6d\x60\xe5\xc8\x1e\xa8\xe6\x9c\x0f\xb6\xdc\x80\x77\x2d\xb9\xa1\x77\x0c\xb8\x04\xcd\x03\x1e\xa1\xba\x0b\x0f\x43\x39\x0e\x9b\x5f\x51\x3c\x9d\xb4\x8b\x0b\x21\x94\x9c\xcd\x09\x97\xc5\xdc\x66\xe3\xc1\x9c\x9d\xcd\x8f\x62\x41\x2b\x9e\xcd\x39\x6c\x70\x78\x01\x3d\x7f\x35\xda\x24\x0f\x62\x94\xbc\x17\x48\x89\x46\xe6\x64\x5b\xeb\x78\x34\x87\x1a\xeb\x8b\x5f\x2c\x0b\x68\x91\xa4\x9d\xd5\xaf\xcd\x16\x7f\xf3\x4d\xa1\x6e\xbe\x68\xbb\x03\xd9\x60\xc5\x89\xa1\xa1\xd3\x28\xbb\xe8\xa0\xbc\xfd\xac\xc7\xfa\x92\x70\xc5\xbb\xf4\x83\xa7\x74\x34\x3d\x79\x58\x50\xfb\x78\x05\x4d\xcc\x12\x3e\x51\x94\x15\xfb\xf3\xcb\xde\x1e\x6b\x20\x7c\x00\x94\x5b\x0a\x01\xe5\x09\x91\x42\x1d\x29\xed\xe5\x23\xcf\x8e\x32\xa8\x61\x6f\xde\x51\xf5\xdd\xf6\xd0\x6a\xf2\x0d\x75\x22\x9c\x8d\x17\x77\x59\xbd\x17\x69\xf7\x7a\xac\xf9\xb6\xef\xe0\x00\xa5\x4f\x97\xb2\x12\xb6\x57\xb0\xe5\x5c\xd9\xd3\xa6\xca\xfe\xf9\xd5\xc7\x67\x2c\xf1\x3b\x5b\xa2\x82\x56\x30\xbb\x4c\x9c\x5e\xc9\x1c\x9d\x8e\x35\x19\xe5\xce\xbe\xf0\x49\x62\xa4\x83\xbb\x26\xc7\xed\x83\x97\xc2\xc7\xb5\x74\x51\xe6\xa2\xc1\xb4\x30\xb5\xbb\x7e\xc7\xe1\x85\x1f\xf1\x55\x1b\xe2\x3e\x92\xaa\x36\xf6\xcb\x3d\x3b\x67\x34\xcd\x4e\x4e\x99\x46\x27\xea\x6d\xd7\xd5\x6e\xb5\xb2\x0f\x58\xb0\x8a\xab\xdb\xcc\x8b\x83\x7e\x1e\x24\x31\xa0\x52\x5d\x28\xd1\xa5\xd0\x0a\xc2\x6f\x2a\xe7\xaf\x1a\x96\xb0\xc9\x8f\x6f\xf1\x1c\x40\xcc\x42\x39\x94\x59\x36\xdf\xf3\x23\x13\x27\xc7\xdb\x53\x3d\x45\xbc\x4c\x31\x0c\xeb\x95\x6e\x2c\x8c\x45\x90\x48\x50\x29\xda\xd9\x56\x2a\xfd\x7b\xdc\x36\x8d\x72\x82\xa7\x21\x6f\x6f\xb0\xa0\x61\xb5\x08\xf1\xe5\x4a\x06\x2a\x18\xb3\x23\x7f\x91\x8d\x3e\x59\x05\xfa\x3a\x1d\xda\x83\xe2\x8a\x34\x47\xb4\x02\x03\xbd\xa9\xf7\x6b\x61\x55\x57\x9a\xe0\x35\xbb\x18\xe5\x4a\x6c\x37\x51\x8b\x0e\x63\x20\xac\x79\x07\xbe\x9e\x32\x91\xb7\xab\xcf\xb6\x61\x96\x6b\xf9\x70\xe5\xcb\x10\x0e\x3d\x7a\x22\x89\xc4\x25\xe4\xb3\x9b\xff\xfd\x4b\xdf\xd8\xd5\xc9\x60\xdd\x09\x28\xad\x9c\x06\x00\xf6\x1d\x52\x5b\xe0\x05\x1f\x8a\x78\x68\x35\xfb\x03\x9c\x64\xa4\xd6\x2e\x11\xa3\x03\x7f\xa8\x76\x6b\x9d\xa5\x78\xf5\x35\x8d\x2a\x30\x2e\x91\x0d\xc5\x41\xea\x16\xc5\x45\xa3\x67\xa9\xf5\xe3\x1a\x8a\x4d\xb1\x12\x6c\x30\x0b\x65\xe9\x1c\xfc\x88\xfe\xcb\x92\x00\x0f\xc0\x65\xcd\xa0\x0d\x6b\xde\xea\xb5\xc3\x00\x55\x8a\x49\x2e\xbe\x37\x4f\x7f\xe7\x0e\x39\x66\x97\x13\xff\x4d\xcd\x3a\x99\xe9\xb7\x49\x45\xdb\xc9\xee\x2e\x2d\x71\x23\x63\x6a\x8a\xd4\xf9\xa4\x0e\xe5\xc1\x75\x0e\xf4\x5a\xb3\x37\x41\x45\x0c\xe2\x61\x0f\xca\x62\xfa\x8c\xc8\x35\x50\x46\x90\x00\x9e\x9d\x9f\x19\x06\x7b\x1e\xda\x7f\x96\x4c\x79\x4f\xa7\xc4\x34\x88\x5c\x15\x26\x44\x73\xac\xeb\xf6\x19\x98\xc6\x99\x06\x13\xb0\x0c\xee\xea\x8f\x5b\x0e\x08\x2c\x9a\xdd\xda\xe2\x13\xc7\x54\x35\xe1\xd9\xb1\xe4\x7e\x40\x52\x21\x97\x18\x2f\xa3\x47\x31\x07\x52\x5c\x67\x04\x7d\xda\x73\x6b\xee\x7e\x0c\xf4\xba\x71\x41\x9b\x87\xdd\x67\xf8\x41\xa9\xd5\xb4\x4b\x1d\x72\xa2\x7e\xd8\xe5\xb0\x8e\x62\x3d\x60\x41\x8e\x1a\xf5\x52\xd8\x86\xf1\x61\xd7\x31\x7b\x98\xe7\x31\x93\x4c\xb6\x1c\x96\xd3\xa8\xe7\x16\xfd\xcb\xd9\xb4\xff\xca\x4d\x1e\xf1\xe6\x31\xa1\x06\xdf\x6e\x08\x68\xb4\xcb\xab\x6e\x36\x8d\xe4\x70\xf6\xcc\x3b\x3a\x99\xad\x05\x3e\x2a\x08\xfb\xb8\x42\x1c\x33\x15\xc1\x33\x8f\xd6\xc8\xf7\x74\x41\x37\x31\xcb\xa3\x76\xab\x08\xcc\x98\xb3\x32\x66\x4b\x99\xf7\xe6\x56\xf4\x8b\xfd\xdf\xad\xfc\x15\xdb\x6a\x83\xb5\xd3\x38\xd1\xef\xe0\x95\xb2\x66\xb7\xb6\x3b\xf2\x27\x3e\x8a\x00\x6d\xee\xa3\x93\xc4\x78\x26\x74\x8b\xf2\xe5\x8f\xb9\x44\x1d\x5f\x40\xa0\x30\xdc\x89\xf2\x83\xc7\xc4\xea\x12\xf1\xf5\x69\xc8\xb5\x80\xed\x41\xd9\x00\x66\x13\x77\x5f\x51\x5c\xb9\x0b\x86\x6f\xdd\xde\xc3\xa9\x49\xa2\x6d\x1a\x14\x28\x28\x58\x36\xba\x82\x50\x1a\x45\x78\xe6\xcd\x21\x7a\x09\x71\xa4\x28\x36\x75\x7f\x47\x7f\x49\x8e\x40\x45\xba\x76\x39\xd7\xab\x52\x4e\xd1\xe4\x09\x11\xb3\x2b\xe8\x77\xa2\x3f\xe5\x62\x1f\xbc\xb5\x9c\xb1\xf9\x3c\xba\x5e\x59\xbc\x7f\xb3\x43\xe8\xd4\x11\x34\x3c\x13\x71\xcf\x0c\x66\x80\x56\xc4\xbd\xa1\xf9\xf9\x74\x51\xea\x5a\x59\x98\x53\x29\x28\x43\x19\x5e\xf8\x1c\x3f\xae\x57\xfc\x8c\x44\x23\x2e\x6c\x90\x9e\x77\x67\x80\x91\x94\xa7\x93\x54\xc6\xd4\x4d\x6d\x3e\x44\x72\x4b\xb1\xe2\x38\x9f\x54\x67\x65\x0b\x47\x50\xba\x1d\x4c\xb9\xd7\x38\x6f\x30\x33\x6c\x69\xf8\xe0\x31\x7a\x79\xe4\x3b\x2f\x31\x27\x74\x20\xee\x3e\xa2\x61\x27\xa6\x7a\x1b\xdc\x20\x59\xdb\xca\x0e\xd6\x42\x55\x13\x9d\x4b\xc9\x56\x2d\x4d\xc1\xa8\x95\x74\x5a\x51\x07\xeb\x0e\x99\xaf\x07\xe0\x01\xf8\x47\xc6\x0d\xc5\xc5\x91\xeb\xc1\xf1\x2d\xae\x52\x63\x7c\x4b\x78\xb1\xbe\xba\xe4\xc9\x31\x54\x39\xf2\x30\x28\xe9\x1c\x4d\xcf\xfd\x58\x83\xb3\xe2\x54\xbb\x18\xc5\x51\x2d\x08\x80\x55\x76\xeb\xd9\xb2\xf3\xa1\xcd\x57\x08\xe0\xb5\xfe\x6a\x27\x61\xc8\x1a\x90\x48\x6a\xd3\x6c\xf5\x3d\x3b\xe5\xc8\x23\xbb\xc7\xe4\x04\x2e\xcb\x00\xe2\x5f\x3a\xac\x04\xaf\xfc\x21\x01\x6c\x52\x61\x5c\xb4\x19\x4e\x46\x06\x08\x1e\x43\x7f\xe9\xff\x88\xe1\x64\x11\xa6\xee\x58\x19\x09\x92\x65\x94\x6b\x4a\x4b\x5e\xbd\xea\xcc\xa4\xd9\x17\xfb\x9b\x1d\xac\xfc\x29\x63\x0c\xe1\x68\x42\xc1\x8c\x04\x59\x99\xe2\x1a\x92\xe9\x1a\xe4\x2c\x7e\x4a\x22\xc5\xe3\x52\xa7\xf0\x63\x84\x7a\x49\x6d\x21\x3d\x5b\xd0\x72\x60\x8d\x45\x66\x03\x76\x8d\x9a\x11\xb0\xf9\x48\x35\x31\xb2\x44\xce\xe3\xe9\xcd\xfc\xae\xaa\xb4\x71\x3e\x93\x58\x76\x07\x32\x54\x5e\x11\xfc\x0e\x2d\xa6\x74\x10\x27\x15\x24\x28\x00\xc4\xa9\xbf\xb7\x7a\xdf\xc3\x93\xb1\xa8\x26\xe9\x9e\x98\x7e\x7c\xbc\xd1\xe6\xcc\x9e\x77\x32\x08\x47\x7f\xb0\xdf\x43\x80\x7f\x30\xeb\xb5\xff\xd7\x27\x7f\xb0\xa8\x0b\x55\x1e\xd4\xba\x29\x4a\x08\x81\x89\xf2\x67\x36\x6b\x09\x18\x2b\xe5\x94\x86\x57\x20\x43\xa6\x92\x96\x25\x31\xc9\x66\xae\x6a\x3f\xe1\x06\x7e\xe2\xdb\xbb\x4d\x5c\xa4\xd2\x7a\x6a\x6d\x81\x94\xa6\xf4\x80\xf0\x2c\xc0\xb5\xdf\x38\xa6\x2a\x23\x97\xb1\x26\xdd\xe4\xe0\x45\x7f\xba\x94\x72\x3f\x29\x61\xab\xf7\x86\xc9\xe4\x1c\x9e\xcf\x3a\x8c\x37\xf6\xa9\xa2\x31\x58\xe3\x03\xea\x28\x74\xea\x52\x5a\x6b\x86\xae\x67\xf0\x2c\xe0\xc5\xce\x7d\x40\x66\x72\x8f\x00\xe2\x68\x5f\xc2\x48\xb3\xe4\x6b\xdc\x0e\x4c\x41\x81\xe1\xb8\x7d\x44\xc0\x51\x46\x2b\x5e\x1c\xbd\x60\x9c\x38\x79\x0d\x56\xe2\x11\xfa\x45\x38\x64\xc4\xd2\xca\x88\x77\x54\xb9\x35\x2f\x86\xf8\x7e\x6d\xc1\x25\xce\xb5\x42\x87\xc8\xfc\xfe\xfd\x33\xa2\x37\x6c\xd4\x1a\x0b\xe7\x98\x32\x9b\xd9\xed\xe4\xfc\x28\xfd\x4a\xf6\x3a\xa1\x63\xb6\x7c\xc5\xff\x24\x81\x6a\x18\xf1\x43\x7f\xac\xed\xbb\x19\x6c\xb1\x63\x8e\x6f\x8e\x17\x01\xc9\xfb\xd9\xff\xeb\x76\xb6\xe5\xb0\x0c\x7a\xca\x8c\x0c\x7d\x92\x20\x82\xdd\x89\xda\x6a\xd6\x6c\x99\x10\x96\xc8\xf1\x39\xa2\xc7\xef\x13\x29\xd3\x62\x2d\x60\xcb\x9b\x35\xfa\xb0\x38\xfb\x42\x13\xed\xa5\x4d\x6d\x88\x72\x24\xea\xb9\xd8\xe4\xa9\x85\x65\x30\x54\x36\x8a\x7d\x43\xfb\x9a\x47\x0c\x93\x08\xab\xd3\xb9\x75\x78\xef\xe2\x7d\xa6\x94\x96\x51\xce\x1c\xdd\xf0\x27\x70\x1e\x1d\x06\x01\x8a\x33\x76\xc1\xc9\x37\x0a\x0a\x6d\x50\x74\x6d\x24\x1a\x07\x3e\xb2\xa9\x40\xc7\xae\x0d\x91\x50\xb1\x43\x9c\x56\x52\x39\x94\x73\xd7\xb2\x34\x07\x05\xa1\x95\x03\xe4\x7d\x9c\x42\x55\x31\x65\xf7\xe5\x66\xaa\x37\xde\x37\xf8\x27\x05\x6b\x6f\x4c\xc0\x7d\x3a\x05\x70\xf1\x36\x29\xcc\x58\xc2\x6d\x0a\xce\x9f\xc0\xc0\x31\x7c\xf5\x2a\x08\x0a\xa7\xc2\x19\x3e\xae\x7f\xae\xfe\x13\x20\x49\x61\x21\xc9\x47\x56\x64\x96\x22\xc2\x64\x01\xed\x8f\xd8\xb7\x7e\x3d\x7c\x18\xab\x2b\x8e\x93\x6a\x61\xf2\xd0\x27\x59\xf5\x1a\x4f\xe1\x18\x44\x13\x53\xb6\x03\xd1\x41\x44\xd1\x16\xc6\x05\x89\x74\xce\xfe\x51\x6a\x20\x91\x90\x69\xff\x32\xcc\xd5\xc9\xab\x2b\x09\xf4\xe4\x15\x1a\x96\x9d\x9b\x73\xa9\x2a\xdd\x84\xc4\xb8\xae\x9d\x77\x68\x44\x3e\x0a\x86\x26\x8e\xf1\x6d\x0b\x64\x7a\x79\x92\x6f\x8e\xfa\xf8\x80\xea\x75\xf1\x8c\x35\xe7\x35\x19\x85\x71\x9d\x46\x16\x28\xfd\x9b\xae\x8b\x82\x94\x95\x38\xb0\x2a\x3b\x0e\x35\xf4\x53\x32\x02\x53\x58\x4b\xdf\xa4\xce\x5b\xea\x6c\xce\x97\x59\x98\x8c\xfd\x97\x70\x2c\x2c\xa8\x99\x49\xde\x03\x5e\x7b\xf4\xef\x18\x6b\xd0\x9f\xe4\x0a\x2e\x69\x57\x4d\xd2\xc2\x0a\x03\x57\xba\x2b\x98\x25\xec\x04\xb0\xd9\xcf\x48\xf2\xc4\xd3\x17\x16\xa8\x65\xad\x36\xa2\x35\xde\x5f\x9e\x25\xf4\xa4\xe0\x83\x87\x2e\x73\x27\x04\x33\xfb\x34\x73\x9d\x4a\xe4\x1a\x79\x13\x18\x24\xda\xe6\x40\x5e\x14\xe6\xb9\x4e\xce\x85\x08\x1d\xc3\x9e\xc4\x2e\x41\x4c\x4f\x89\x99\xb6\xb2\xec\x77\x96\xc5\x5c\x06\x35\x82\x6c\xbe\xb3\xfb\xf4\x20\xed\xf0\x65\xaf\x80\x3e\x06\x2b\x5f\x3b\x98\x4b\xcf\x38\xbd\x40\xcd\xe6\xc0\x57\x59\x99\xf2\x4e\x2c\x20\x5d\x17\x02\x4a\x33\x88\xa0\x38\x22\x19\x59\xe0\xdb\x39\xab\x0f\x75\x62\x95\x77\x22\x1b\x80\x6d\xae\x0a\x4c\xa9\x79\xfb\x4e\x6e\x07\x1a\xd2\xee\x0a\x69\xce\x46\x19\x26\x68\xce\x96\x11\x12\xb1\x13\x67\x1e\xe4\xfb\x32\x66\x71\xea\xfc\x1d\x4e\x63\x91\x70\xc0\x9e\x33\x21\x57\x07\x4b\x61\x28\x1c\xba\x2d\xa7\x03\xe2\xfc\xd2\x27\x47\x10\x4e\x48\xa0\xc3\x57\xb3\xf4\x9d\x24\x3e\x89\x0c\x67\xc4\x82\xbe\x5d\x90\x5b\x23\xd5\xc3\x6f\x06\xa8\xb1\x6e\xbb\x8f\x82\xb0\xb1\x7f\xe1\xc5\x0e\xd0\xbd\x6c\xc8\x8b\x03\x0b\xc7\xa6\x4d\x45\xe7\xc3\x95\x7e\xda\x7d\x20\xdd\xfe\xc9\xe9\xf9\x0b\x0a\x25\xea\xe2\x1d\xfd\x1d\xd7\x6f\xf2\x36\x68\xa3\xbf\x52\x84\x43\x25\x40\xa1\x16\xe7\xcd\x6f\xed\x00\x80\x07\x11\x45\x3b\xba\xd6\xf7\x87\xd9\x2c\x44\x19\x25\x35\x18\xa4\x91\x1c\xc1\x17\x1c\x57\x9b\x34\x41\x8d\xb6\xec\xfc\x79\x67\x8b\xb6\xe8\x05\x78\x95\xec\xc3\x2f\x35\xdb\x73\xd8\x8b\xc3\xd0\x0e\x8e\xda\x01\x38\xa1\xcb\xa0\x15\x03\x32\x68\x5d\x8c\x3c\x7a\xd3\xc3\xc2\x6e\x13\x0c\xa6\xe5\x9d\x58\x3c\xb9\x5a\x82\x0f\x81\x96\x13\xc0\x1a\xf8\x1c\x48\xa9\x9d\x6a\x77\x76\x52\x58\xa6\x01\x20\x9b\x92\xcd\xc3\x8a\x29\x2c\xcb\xca\xa6\x53\xf0\x52\x76\x49\x99\x22\x07\x4e\xa7\x85\xdc\x61\x72\xdd\x3d\x32\x00\xef\xf8\xc3\xe6\x33\x67\x03\x5d\xb0\x01\x67\xa9\x62\x5e\xd0\xa9\x47\x61\x36\xad\x54\x3d\xd0\x01\x0d\xda\xd2\x1d\xe9\x2a\x54\xed\x7a\x4c\xcb\x8e\xdd\x27\xa5\x9c\x44\x4a\x66\xa0\x3c\xea\x29\xf9\x4e\x81\xf5\xc5\xa9\xc4\x0d\x15\x19\x18\xf0\x4f\xf1\xfe\x3a\xae\xd8\xed\xe4\xc2\xe6\xf1\xb4\xf1\x51\x90\x21\x2e\xae\xd5\x20\x11\x5a\xdb\x47\x45\xec\xcf\x14\x07\xab\x06\x53\x4e\x21\xf8\xbc\x39\x6d\x5f\x27\x09\xec\xa4\xf2\x7b\xa5\x79\x5e\x4d\xcb\xda\x30\xf1\x94\x1d\x12\xa6\x90\x5c\xb3\x1a\x4b\x60\x1a\x6f\x98\xbf\xb8\x85\x5e\xe1\x01\xd8\x51\x6a\xd1\x9b\x1c\x52\xfc\x0f\xa3\xb8\xef\x14\x8d\xa6\x33\x7c\xb3\x53\x0f\x59\xdf\x86\x64\xda\xe9\x10\x30\x06\x2d\x92\x08\xd7\x35\x03\x03\xbc\x07\xaa\xfb\x83\x45\x7d\x84\x79\x87\xa3\x43\x3b\xc7\xd4\xef\xe5\x05\x00\xcc\xe4\x03\xd8\x43\x34\xb4\xac\x35\x86\xdc\xc6\x44\x24\x71\xc0\x7c\xac\x48\x31\xa4\xd3\x39\x41\xda\xd4\x82\x14\xc3\x09\xfc\x93\xea\xdd\xa0\x38\x35\xfa\x32\xdf\x0e\x88\xbc\x51\xab\x95\x68\x03\xad\x67\xa8\x5b\xda\xae\x21\xbf\xd1\x09\x40\x24\x45\xc4\x0f\xe9\x27\xd0\xdd\x45\xcf\x7c\x85\xe4\x13\xa5\xfd\x7f\x71\x0c\x1d\x30\x3b\xc5\x2f\x91\xe0\x93\xbd\x33\x1a\xb9\xad\xe6\x4a\x5c\x93\x48\xc5\xc3\x1c\xa7\xca\x8b\x48\xba\xe2\x3e\xaa\x6e\x9b\x1e\x2b\x80\xa5\x9b\x0f\x40\x5b\xa9\xbf\xd1\x89\x29\x85\xbc\xb2\xb6\x54\x00\xbc\x58\x4b\x00\x60\x6f\x1f\x05\x71\x72\xce\xca\x81\x6e\x3d\x6d\x72\x1c\x42\xdb\x43\x9c\x5c\x7d\xb4\x7d\xbc\xd2\x3a\x98\x21\x7c\xb9\x17\x9d\xc7\x68\x2d\x71\xb2\x34\xef\x99\xa1\x21\x3c\x77\xcf\xec\xe7\x9c\x98\xf0\x7a\x33\xf3\x83\x68\xbd\xdd\x6e\x4b\xbc\x7d\x22\xab\xc2\x5c\x08\x7d\x13\x48\x1b\xd5\x15\xeb\x4e\xa0\x96\xc7\x6c\x8c\x6d\x0c\xa4\xb1\xe9\xb9\x18\x09\x17\xa8\xf7\x89\x60\x7a\x0f\x8d\x89\xb8\x5b\xf0\xa7\xf6\xc2\x5a\xc3\x59\xa1\xf8\x4a\x57\xb1\x28\x2a\xe7\x4c\x75\x94\x2b\x46\x8b\xe1\x5b\x87\xbf\xde\xc4\x60\xd9\xf1\x83\x7e\x46\x5b\xd5\x78\xd1\x84\x36\x1d\x48\xce\xce\xdb\x9e\x7f\x71\x22\x54\x99\xea\x41\x0d\x42\xf3\x85\x53\xfd\x90\xfb\x61\xbe\x4a\xa3\x4c\xc8\xdd\x7b\x37\xbf\x4e\xe7\xc7\x69\x4e\xec\x25\xda\x5c\x22\x7b\xea\x56\x56\xd5\x19\x65\x18\x8b\xe3\x44\xf3\x49\x62\x15\x4f\xbc\x46\x2a\x64\xd2\x9f\x0f\xab\x46\x93\xd9\x7b\x43\x3d\x59\xe2\x8c\xe7\x93\x8a\x25\x15\x65\x67\xea\x32\x98\x1c\xc9\xce\xd9\x25\x3e\x92\x32\x62\xc6\xce\x0e\xa7\xbb\x21\x48\xf4\x29\x69\xb5\x8d\xa9\x22\x4e\x92\xef\x11\xf1\x9f\x0e\x23\x43\x6a\x91\x7e\x7f\x07\x97\x1b\xee\x4a\xbd\x0b\x55\x95\xd2\x93\x71\xb2\xd7\x45\xb5\x14\x15\x7e\x02\x82\xa9\x5f\x57\xe4\x2e\xa9\x31\x2d\x51\x62\x63\xa6\x64\x1c\xb5\x56\x59\x4d\xb4\x5e\x59\xd8\xd4\xa3\x05\x60\x44\x68\xbb\x28\x90\xae\x3e\x0a\x4d\xea\xae\x23\xdc\x71\xbe\xd7\x88\xa9\x2d\x72\xa9\x4c\x48\xdf\xec\x0b\x61\xa3\x5d\x47\xf1\x83\x45\xa9\xfe\x91\x64\xb7\x5a\x30\x8c\x08\x39\xd4\xb3\xc3\x22\xd8\xf0\x0c\x16\xbb\x7d\x10\xfe\x0d\x38\xcc\xa4\x83\xad\x01\x21\x91\x71\x21\x6e\xc1\xac\x0c\x49\xe8\xed\x62\x30\x2e\x67\xc8\x49\x8b\xe7\x87\x6d\x55\x79\x88\x48\x4f\x78\x25\xb6\xa1\xfb\xd9\x97\x97\x29\x6f\x6c\x19\xfb\x03\x33\x5b\xdb\x54\x87\x3b\x8d\xee\xbd\xf3\xdb\x3f\xe1\x56\x51\x7c\xdb\x9f\x11\x23\xfa\x7f\xd1\xb5\xec\x31\x6f\xc2\x6e\x2d\x10\x66\x1d\x7e\x3d\x76\xe8\x98\xdc\xac\xd6\x7d\x8c\xca\x53\xdf\x49\x47\x2e\x3f\xe8\x04\x79\x82\x89\x16\xb5\xd3\xb9\x98\x4c\x08\xa1\x1c\x09\x7f\x6f\xea\x98\xe2\x70\x6a\x25\x76\x1b\x9d\xb5\x1b\x21\xf2\x00\x51\x9f\x8a\xd3\x52\xd1\x0b\xa8\xd6\x95\xbc\xff\xa4\xe1\xce\xb7\x1c\x01\x2e\xac\x8b\xb7\x9d\xe4\xa8\x28\xaa\x0c\xe8\x79\xa4\x0a\x3f\xe2\x17\x3a\x8a\x3d\x30\x85\x6b\xe4\xe5\x73\x3d\xaa\x65\x60\xc5\x8b\x9c\x22\x8a\x4f\x6b\x3c\x0b\x98\xdd\x9b\xfb\x4b\xf8\x8e\x34\xb1\x8e\xe3\x42\x4a\xdb\x42\x56\x94\xf6\xb2\x83\x02\x33\xb9\xc4\xbf\xd9\x6e\xe2\x4c\xd9\xe8\xb6\x96\xbb\x29\xbf\xc7\x3a\xc0\xe4\x93\xe1\x77\xe5\xf5\x9c\x84\x6a\x96\xc4\xab\xc0\xda\x4b\xc9\x17\xf3\xd8\x39\x90\xb6\xba\x8a\x75\xf3\x42\xd8\x53\x4f\x9d\x11\xeb\xac\xd9\x1e\x72\x32\x16\xb6\xa3\x90\xe1\xad\x8a\x3e\x3c\x60\x62\x05\x74\x72\xcd\xe9\xc9\x45\xc0\xb4\x6f\x81\x70\x14\xcf\x95\xf6\x21\xc0\xd1\x70\x14\xe9\xc3\x62\x30\x0e\xd7\xbb\x7c\xba\x55\x0b\x98\xd3\xa9\xc5\xf4\x4a\x94\x27\xbb\x02\x3f\x2d\xba\x26\x8e\x4e\x8c\xbb\x3d\x19\x90\x07\xa7\x2e\xa3\x2a\xa0\x31\xfa\xdb\x3e\x74\xe2\xba\x6b\x56\xfa\x56\x2e\x18\x5e\x10\xb8\xc4\xe2\x0d\x8e\xd6\x39\x4a\x9d\xb0\xd4\xd8\xc8\x11\xfe\xd1\xdf\x6f\x1f\xcf\xc5\x56\x02\xbe\x27\xcb\xb7\xf2\x95\x9c\x56\x1e\xdd\xe4\x76\xdd\xaa\xbc\xac\xf8\x79\x48\x20\xaf\x68\x17\xf3\x52\xca\x87\x61\x05\x97\x58\x06\xa7\xa4\xca\x82\x81\x28\x1d\xce\x5f\x90\x49\x4c\x7b\x91\xb5\x90\x65\xc5\x47\x28\x9b\x43\x91\x69\x33\x2f\x5a\xca\xe5\x05\x78\xc2\xdd\x68\xea\x28\xc1\x4f\x63\x09\xd9\xb6\xed\x23\x56\x45\xfc\x12\x94\xe0\xc0\xaa\x50\xa1\x71\xab\xb8\x6e\x1c\x29\x43\x32\xff\xcd\x45\x46\x01\x58\xf9\xad\x3f\xc4\x68\x31\x0b\x40\xa1\xfb\x05\x0f\x49\x55\x36\x6f\xf0\xb6\xae\x14\x2c\xb5\xc4\xce\x17\x8d\x22\x4b\x89\xaa\xde\x42\xef\xaf\x31\x82\x17\x19\x46\x7d\xd8\x87\xee\xcc\x92\x8a\xa2\xd8\x3d\x69\x1a\xb9\x22\x4e\x76\x4e\xf9\xc4\x34\xcb\xed\x41\x26\xae\x7d\x79\xa0\x76\x05\x4b\x64\xcf\xee\x93\x54\xa8\x5b\xa4\x97\x8d\x21\x92\x6b\x3c\x6f\x63\x24\xa6\xd6\x0f\x57\x3b\x96\x51\x67\x06\x02\xbe\xd3\x59\x10\x1c\x3a\x14\x39\xea\x69\x72\x70\xf3\xfd\xe5\x62\xdf\xce\x62\x36\xe6\x97\x3b\x85\x8b\x65\x40\xa2\xb5\x0a\x02\x61\x8d\xe4\x35\x54\xce\xe3\xe0\xe7\xe3\x41\x46\x07\x60\x0e\xbf\xb8\x64\xb7\x0f\xc6\x57\x46\x10\xd7\xe5\xe4\xee\x2a\x9d\xcc\xba\xfe\xa6\xd3\x2d\x29\x91\xe8\x37\x87\x0a\xd8\xa8\x2e\x42\x80\x56\x97\x6a\xfc\x6a\x51\xbc\x19\x06\x94\x49\xdd\x35\x6a\x33\x62\xc2\x07\x8e\x7d\x45\xbe\x3a\x2a\x57\xd6\x35\x05\x36\xcd\xd9\xef\x6f\xac\xc7\xb0\xf4\xf7\x17\x09\xe7\x7a\xf6\x41\x7c\x22\xf3\x63\x2f\xbf\xd0\x47\x09\xe7\xee\x05\x04\xc9\x3e\x7c\xee\xb1\x44\x04\xce\xd9\xaa\x6f\xd7\xaa\xd6\xe1\xf7\x2c\xaf\x7c\x48\x5b\x60\xb3\x79\x27\xf3\xc9\x5c\x02\x68\xe7\x61\x60\x73\x13\xe4\x9f\x78\x50\x32\x14\x6d\x72\x8a\x43\x31\x49\x51\x6b\xc8\x34\x66\x4b\x04\x01\x78\xa0\x03\x66\x03\xa7\x4c\x62\x4a\x0c\x64\x29\xab\xcf\x81\x1d\x18\xa2\x58\x43\x4e\x46\xd1\xa9\xfe\x9c\x12\xa2\xcd\x84\xe0\x70\xa1\x39\xa3\x01\xf8\xf4\x84\xe7\x95\x58\xea\x56\xdd\x10\x2f\x31\x47\xe7\x8c\xc8\x26\x38\xfc\xb1\x0d\x6c\xf5\x28\xde\x2f\x9f\x93\x8f\x46\xc9\x66\xc0\x73\x78\xb3\x05\x56\x5b\xfa\xf5\x2c\x55\x04\xa5\x62\xe0\xc6\x1c\xda\xac\x58\x1d\x39\x58\x84\xd6\x36\x2d\x99\x5a\xd4\xb2\xca\x49\xa1\xe0\x87\x78\xdf\x21\x93\x01\x1f\x56\x32\xe7\xaa\xf9\xd4\x16\x9b\x23\xeb\xc3\xc7\x27\x96\x84\x13\x02\x5f\x1f\x52\x98\xb2\x37\x82\xbd\x3f\x38\x59\xf2\x92\x2b\x5a\xb9\x6c\xc4\x29\x4e\xdb\x3b\x06\x9b\x53\x80\x13\x29\x87\xca\x22\x30\x09\xe3\x26\xd7\xf4\x48\x43\x0f\xe1\x76\xa6\xe1\x19\x38\x97\x50\x33\x33\x84\xeb\xff\x86\x9e\x09\x00\x2e\x7f\x6b\x74\x3f\x66\x6d\x5b\x28\x48\x4f\xe5\xde\x10\x11\xa8\xc5\xa2\x55\xd6\xea\x16\x57\xe7\x91\x58\xd6\x04\x49\x92\x2b\x94\x7e\xa6\x00\x6d\x42\x09\x6f\x59\x04\x31\x0e\x27\x2a\x32\x56\x0e\x7e\x7c\x8b\x49\x4c\xb6\xf3\xa8\xcf\x7c\x29\x7e\xc8\x64\xb0\xc2\xf8\x4a\x9b\xa4\x6d\x6d\x65\xfd\x9d\xd6\xac\xf2\xa7\xa8\x8b\x0c\x05\xa7\x51\xa1\x98\x86\xda\xba\xcd\x53\x7b\xd8\x70\xe7\x89\x82\xc2\x35\xf6\xdb\x13\x32\x0b\x57\x95\xb6\x73\x70\x4e\x81\xfc\x51\x5e\x9e\x63\x28\xfa\x3a\xfd\x8b\x80\x7a\xf6\x27\xae\x94\x9f\x43\x1f\xfa\x4f\x5f\x7c\x5a\x4f\x52\xcd\x3f\x70\xe3\x15\xac\x82\x6f\x51\x59\x30\xaa\x8f\x8a\x2f\xb6\xa0\xcf\x7a\x6c\x71\x23\x96\x05\x97\x20\x62\xe0\x9e\xfa\x16\x02\x9c\x2a\x22\xc5\x40\xc7\xf2\x7a\xcb\x87\x87\xdc\xa2\x56\xe1\x69\x48\x9b\xc6\x31\x10\x0a\xd7\xb0\x05\x00\x50\x59\xcc\x79\x9c\x5a\xf5\x24\x0f\x49\x2c\x3f\x20\x03\xf3\x48\x4a\xd6\x41\x99\x7c\x4c\xea\x07\xa9\x52\xa7\x73\x11\xaf\x75\x94\x63\x21\xaa\x64\x93\x55\x79\xf8\xbf\x2c\xfd\x27\x78\xcc\xf2\x81\x68\x34\x58\x06\x3d\xed\x0b\x4e\x4e\xfd\xdf\x98\xa9\xce\x51\xaa\x0a\x3e\x9f\xd6\x9a\x3b\x45\x46\x21\x72\xfc\xbb\xf9\x4c\x6b\x5c\x64\xe5\x46\xb3\x26\xfe\xf3\xda\xc1\x30\x0a\xae\x4a\xf9\x21\xe9\x5e\x2c\x5d\xfb\x83\xee\xd2\xb0\x19\xf5\xc2\xbe\x9a\x39\xfb\x7a\x02\x6c\xbf\x11\xb7\xbf\xbc\x00\x50\xd0\xbe\x60\xaa\x0a\xdc\xe4\x85\x4a\x01\x84\xde\x4c\x88\x40\x7f\xe6\x67\x28\x77\x2b\x61\x8d\xda\x74\x36\xb7\x2c\x60\x78\x0e\x3d\xac\x0a\x4a\xa3\x4b\x75\xb6\x6e\x32\xaf\x34\x7a\x93\x4a\x91\xad\xf2\xa5\x88\x75\x34\x03\x9a\xde\x69\x64\x27\xdd\x2f\x9a\xac\x35\xf0\xd8\x5c\x63\xcd\x07\x51\xd8\x20\x98\x07\xab\x04\xf4\x3e\x43\x7a\x40\x23\xfa\xe2\x31\x96\x36\x3f\xa0\xa0\xea\x66\xf1\x4a\x8e\xe0\x5c\x1b\x41\x0a\xf9\xe1\x72\x9f\x78\xf5\x31\xc8\x58\xc3\x6a\x1c\x01\xc8\x55\xad\xde\x79\x25\x57\x79\x76\xe5\x8e\xac\xb2\x1c\x48\x8f\x5f\xa3\x03\xfa\x20\x7b\xc9\xc1\xca\xdb\x59\x48\xe0\xb0\xa0\x09\x14\x62\x2a\xed\xa4\xd6\x3e\xa9\x8b\xd7\x70\x02\x9a\x6e\x78\x38\xae\xb3\xd7\xaa\x43\x8c\x6f\x67\x83\x1e\xe0\x33\x69\x61\xd4\xd5\xac\x95\x22\x8f\xb6\xb6\x3e\xe2\x3e\x2d\x92\xc2\x5a\x49\x30\x25\x3e\x1a\x24\xa8\xf4\x8f\x35\xae\x18\xbc\x59\xfa\xa9\xca\x30\x80\x5a\x3d\xca\x5b\xbc\xfc\xb3\x44\x1b\x22\x62\x5b\x5b\x4f\x74\xa1\xcc\x3e\xae\xc6\xec\xa6\x9c\x3d\xa8\xbb\x97\x8f\xb7\x8a\x68\x59\x00\xf1\x82\x92\x81\x39\x1b\x45\xa3\x27\xd4\x64\x51\x5c\x97\x0b\x4c\xba\x8b\xe6\xcb\x0f\x83\x0b\xa0\x45\x2a\xfe\xb6\x78\x6b\x86\x55\x34\x57\x9f\x98\xa4\x24\x37\xea\x87\x84\xf7\x0c\x94\x21\x42\x5a\x44\x29\x1c\x2b\xee\x80\xd8\xca\x3c\xe2\x73\x72\xc4\x25\xb6\x77\x7a\xa3\x0f\x4c\x99\xc2\x97\x62\x3e\xfd\x99\xc2\xf6\x2a\xa6\xb4\xae\xe9\x80\x73\xb8\xef\x82\x89\x6a\xda\xe2\x44\xf2\x7a\x55\xe3\x03\x71\x13\xc4\xcc\x97\xa4\xbb\x85\xa1\xcd\x0d\xc0\xe8\x27\x1c\xc9\x73\x27\xe5\x77\xfa\x1e\x18\x07\x04\x0f\x2f\x01\x8d\xf7\xfd\xed\xfe\x00\x72\x4d\x09\x6a\x6d\xa6\x40\x39\x7b\x5b\xab\xc2\x38\xd4\x4d\x1b\x62\x77\x9d\x3a\x8f\x3a\x4c\x70\x88\x7b\xfb\x78\x0c\x56\x5b\xb4\x99\x51\xff\x90\xb3\x58\x1b\x67\xf4\x54\xe5\x5d\x42\x91\x3a\x91\x9b\x59\x29\xb6\x70\x25\x8f\xd3\x5c\xc7\xee\xe0\x54\x6c\xe5\xcb\xfa\xc4\x38\x05\x57\x3b\x80\x9c\xc5\xc0\x17\x1d\xbb\x80\x14\xb1\x42\xf5\xf9\x29\x79\x1f\xd4\x86\x9e\x98\x7c\x6c\x83\xb8\x9c\x1f\xc5\x4c\x64\x72\x04\xcb\x00\x61\x9d\xfd\x62\x1b\x60\x1a\xb8\x3e\x0e\x0f\xc9\x99\xb5\xdb\x56\x98\x35\x30\x75\xc4\x9a\x41\x89\xc5\x28\x77\xd9\x73\xbc\x3f\x93\xe5\x73\x14\xb3\xd3\x21\x41\x5d\x49\x18\xd1\x44\x22\xa0\xaf\x17\x0c\x10\xd5\x63\x77\x29\xee\x3c\x86\x47\xa0\x61\x56\xb7\xa4\x3b\x3b\x5f\x45\x83\x5c\x7d\xd9\x27\x94\xc6\x1f\x94\xdf\x39\xe7\xc5\x57\x3f\xe2\xd0\xd8\xf8\x46\x72\xce\x5e\x25\xad\x28\x39\x96\xf7\xe4\xfb\xdd\x8a\xc4\x47\x68\x8f\x67\xa7\x68\x2d\xdb\x82\x33\x25\x77\x26\x50\xe4\x01\x9a\xf6\x64\x97\x9f\x3e\x65\x04\x1d\xce\x18\x1c\xe5\xcd\x5e\xaa\xfc\xb7\x20\xd9\x5a\x86\xea\x82\xb5\x16\x2b\xe7\xac\x43\x89\xd1\x89\x32\xcf\x78\x32\xc7\x43\x20\xb8\x18\x79\x01\x12\x99\x63\xec\xdb\xec\x43\xaa\x59\x8d\x9a\xc3\x2f\xa9\x76\x4a\x2f\x3d\x31\x84\x79\x72\x39\x12\x39\x28\x2e\xd4\x34\xa1\x57\x9c\x97\x8c\x94\x83\x3b\x9c\xcd\xbd\xa5\x09\x70\x1d\x18\x3b\xc8\x5c\xa1\x5a\x1b\x8b\xbe\xab\x98\xf0\x93\x62\x9c\xb3\x4e\x34\xb5\x17\xee\x83\xcd\x7f\x47\x1b\x1f\xf2\x30\xb4\x82\x3a\x1f\xde\xeb\x7c\x70\x3c\x84\x1c\x05\x6b\xe9\x47\x48\x8f\x06\x25\x94\xde\x94\xa8\xfe\x00\xce\x08\x2e\xed\x3d\xda\x11\x5c\x0c\x6e\x95\xc0\xc3\xab\xb7\xae\x17\xe1\x52\xd4\x24\x33\x2d\xb1\xfd\x98\xae\x0d\x91\x01\x90\x0c\x4d\x06\x15\xd5\xe6\x34\xe0\x5f\xb7\x5d\x93\x0b\x1b\x5a\x75\x3c\x43\x57\xec\x8f\xca\x1b\x7a\xa2\xf5\x94\x94\xc6\x21\x84\xd6\xd6\x5b\x99\x76\xdb\x4e\x5c\x60\x95\xa3\x02\xcd\x15\x51\x5e\x30\xa8\x94\x3a\x52\xcc\x9c\x98\x92\x82\x20\xd3\x9f\xc1\xe2\xf6\xe6\x2f\x14\x3b\xe2\x50\xb0\x51\x85\x21\x32\x39\x2a\x1b\x6b\x34\xf0\x78\x80\xea\x64\xa5\x37\xbd\x47\xe6\x07\xe5\x8a\x36\x56\xbf\xc5\x12\x21\x73\x37\x4d\x6e\xfa\xc1\x8e\x99\x68\xb7\x62\x3e\x0f\xeb\x99\x2e\x9c\xa6\xa4\x2b\xec\x99\xa6\x93\x3a\x1a\x4b\x64\x72\xa0\xd9\x58\xd9\xd2\x7a\xb0\x75\x40\xab\x4e\x76\xef\x17\x17\x6e\x8c\x72\xa4\x41\x28\xa7\xae\x4c\xe6\x64\x02\x54\x45\x72\xd8\xc8\x00\x58\xbd\x3a\xf9\xe3\xac\x7c\x65\x1d\x3d\x90\xf7\x24\x1f\xbd\xf5\x1c\x5a\x3f\xde\xb8\xb1\x99\x58\xb1\x43\x5e\x12\x48\xb3\xfc\xc9\x0a\x43\x5c\x0e\x12\xe4\x97\xe0\x73\xd5\xe7\xaf\x8e\xbc\x36\xe3\x0c\x31\xe5\x8b\xad\x83\x43\xfe\x78\xc2\x9c\x93\xb7\x43\xb1\xaa\xf2\x6f\x86\x0f\xf8\x16\x66\x41\x7e\xd5\x38\x39\x6a\xbc\x95\x4c\x1d\x95\x5a\xe2\xd4\x39\x48\xfe\xed\x2c\x4d\x34\x1a\xde\xb0\xc4\x57\xa0\x56\xf6\xca\x01\x8c\x88\xa6\x72\xdf\x59\xf1\x19\x3a\xe5\x3f\x62\xa8\x92\x83\xb6\x04\xd1\x55\x5d\x81\x8f\xba\x48\xe2\x00\x7f\x3c\xbb\xc2\xff\xcc\x2c\x77\x0e\x57\xbf\x01\x7b\xc5\xf7\x57\x09\x22\x2e\xae\x04\x96\x91\x82\x5a\x40\xfb\x66\x59\x43\x92\xf8\x93\x1a\xa9\xc1\x1c\xac\x22\x60\x20\x80\x52\xe3\x05\x50\xdd\x95\x2c\xb0\xbe\xc2\xf7\x54\xef\x43\xac\x35\x4a\xbe\x42\x3c\x61\x0c\x1b\x46\x5b\x9f\x97\x91\x50\x26\xc9\x54\x2a\xcf\xe1\xe5\xc5\xf9\xbd\x86\x08\xf7\xb7\x33\xe9\xdf\x13\x3b\x7e\xb6\x82\x63\x21\xf1\xf0\x1f\x3e\xd0\x40\x0c\x4d\x32\x39\x71\x58\x64\x98\x1c\x98\x30\x45\x1d\xd6\xca\x03\x0b\xda\xcf\xe0\x7d\x4b\x2a\x22\xd3\x60\x1f\x82\x55\x56\xaa\xa7\x11\x06\xd7\x2e\x92\xe1\xea\x22\x28\x1a\xd3\x88\x17\x6a\xe3\x2a\x3d\xe2\x5b\xac\x91\xe9\xc8\xeb\x3a\x69\x85\x86\xf7\xc9\xbe\x71\x47\x9c\xba\x7b\x94\x63\x9a\xac\xe1\x48\x54\x13\x10\x1b\x89\xd3\x39\x87\x8e\x54\x6b\x24\x05\x3a\x83\x02\x02\xfb\xf9\x09\xe8\xc9\xb9\xa2\x87\x85\xa0\x18\xa9\xc8\xc6\x0b\x50\x0a\xed\x72\xbd\x90\x7e\x6e\xe1\x97\x0d\x1a\xf9\xe2\xab\x13\x47\x7e\x62\xd8\xbc\xca\xd4\xe1\xbf\x19\x47\x56\x92\x0b\x40\x9c\x9c\x9f\x81\x80\x1f\xe3\x7c\x27\x09\x62\x3a\x63\x7e\x03\xca\x82\xb6\x95\x60\xde\x43\xf0\xf1\xc8\xaf\x70\x2a\x9e\xfb\x00\x1d\x35\x8c\x40\x6c\x60\x21\x9c\xc0\xdf\xd8\xed\x0e\xa1\xab\x87\xc1\x04\x07\xd1\x53\x0a\x73\x9e\x81\x0d\x31\xc7\xbc\x17\x72\x26\x51\xdf\xd8\x1d\xd8\xd8\x8a\xac\xc2\x9e\xc8\x85\x68\x1c\xda\xef\xfa\x7a\x2b\x07\x0b\x4a\x92\xce\xe9\x14\xd1\xe7\xf8\x1a\x17\x9e\x36\x20\x90\x21\xab\x56\x7d\x05\xee\x69\xfd\x0f\x56\xa7\x99\x81\x16\x7c\xd8\x07\xa1\x27\x60\x54\x88\xbd\x03\x3c\xfc\x1d\x37\x66\x9d\x8c\x05\xdb\x3b\xba\x13\x0c\x7f\xa3\xb5\xc4\x68\x4c\x76\x03\xea\x90\x1f\xc8\xed\xd6\x72\x81\xc3\xa7\xfd\x54\xf6\xfa\xd1\xeb\xf4\x18\x33\x89\x6a\x66\x47\x51\x46\xef\xac\x0e\xfe\xb3\xd5\xe8\x07\x55\x82\xbc\x68\x8d\xba\xf0\xe9\x55\x8f\xdd\xe7\x06\xec\x71\x38\x90\x93\xec\x9c\x6d\x4c\x13\x05\x72\x8b\x12\x5e\x3e\xd8\xe0\xa7\xf7\xca\xac\x88\x7c\x95\x26\x7a\x18\x3b\x31\x6c\xd8\xd4\x88\x82\x65\x54\x57\x0e\x9e\x8a\x4f\x1b\x0f\x1f\xb6\xf9\x05\x42\x93\xc6\x05\x45\xe7\x8a\x77\x36\x6b\x8f\x0a\xc8\x5c\x31\x21\x49\x6b\xbf\xfa\x4f\xb8\xb6\x88\x3e\x3a\x4b\x62\x6b\x79\x8d\x29\xee\xef\xe0\x1d\x5b\x3e\x64\x3d\xd5\x9b\xf8\x2c\xe7\x47\x89\x35\x9d\x19\x68\x88\x51\xfa\xd3\x37\x89\xd2\x8f\xac\x33\xa8\xf6\xbb\x26\x4b\x98\x53\xb5\x67\xa0\x6f\x1a\x04\xb5\x42\x66\x01\x46\x24\xac\xea\x8e\xed\x9d\xe3\xc8\x5d\x3e\xbb\xde\x61\x3b\x59\x2e\xd9\xbc\xc4\xd9\x05\x14\x16\x9f\x2e\xb7\x00\x22\xd3\x45\x1a\xe2\xa3\x86\xf5\xb2\xc4\x5c\x86\xae\xf6\x06\xa6\xd8\x8c\x3e\x40\x33\xc9\x13\x48\x63\x07\xb3\xac\x4f\xf0\x29\x86\x9c\xe6\xf0\x1e\xd3\x27\x22\xdb\x28\x5e\xd6\xb6\x7e\x81\x0d\xad\x2b\x88\xe6\x67\xad\xc1\x90\x0c\x55\x15\xe6\xb4\x2f\xe2\x31\x5c\xc2\xf5\x44\x97\xf7\x15\x3d\x97\x47\xeb\x0c\x86\x64\xd6\x00\xed\x5a\xc8\x2f\x07\xad\xf5\x5c\x38\x74\xa8\x06\xcb\x3f\x38\x52\xa9\x32\xe3\x75\x54\xa1\xc8\x64\xd9\x86\xe9\x5a\x6c\x3c\xd9\x01\xbb\xb3\x71\xc2\x80\x6a\x3e\xbb\xaf\x13\x47\x05\xea\x4c\xf9\x0a\x21\x83\xe0\x3a\x79\x87\x82\x17\x83\x95\x57\x04\xdb\x84\xd2\x41\x22\x2e\x46\x1a\xa8\x42\x7c\x9b\x33\x68\x0d\x12\xec\xd3\x30\xbd\xef\xd3\x2f\x29\x69\x11\xa9\x3a\xe5\x3c\x0f\x5a\x6f\x8a\xf0\x51\x15\x12\x90\xf6\x15\x97\xf9\xfa\x8c\xce\x0e\x98\x29\x05\x65\x69\x0d\x74\x8d\xe2\x6d\x52\x2b\x76\x14\xb4\xa8\x87\x96\x84\x85\xfa\xd7\x1c\x4e\xb2\xf9\xbc\xcd\x8d\x48\x9a\xfc\xf2\xc4\x10\xf7\x3b\xb0\x7a\x0b\x9f\xd1\x02\x78\x24\x3a\x7f\xb2\x2b\x2a\xb9\x70\x98\x21\xab\x8b\x4a\xe5\xa7\x79\x34\xed\xc8\x7a\x3f\x8c\xf9\xb2\x91\x12\x36\x6f\x1b\xf2\xe6\x09\x72\x3b\xde\x63\x43\x56\x5d\x18\x3a\xa1\x81\xfa\xb2\x4a\xf3\xf9\x32\x63\x2e\x8c\x7a\x79\x69\xc7\x1e\x04\xeb\xb4\x55\x9f\x82\x80\xf7\x2f\x6d\xde\x56\x89\x8a\xd7\x0f\x28\x5a\x97\x5a\x7b\xbb\xa9\xaa\xb4\xb1\x7a\x66\x40\x94\x5f\x73\x00\xfc\xf7\x5c\x31\x73\x43\x12\xa2\xc5\xfe\xba\x1e\x21\x92\x61\x02\xa3\x81\xbe\x81\xee\x5d\xbd\x0d\xfb\x38\xfc\x9c\x1d\xcc\xbb\x9f\x5e\x7a\x5e\x0a\x78\x8e\x4b\x1c\x23\x5d\x30\x7a\xca\xa0\x69\x06\xc6\x83\x86\x3e\x51\x68\x6f\x4a\xa6\x15\x6c\xc2\xd1\xca\x2b\xb2\x0b\xa9\xd2\x3d\x00\x4d\xdf\x1f\xf1\x4a\x9c\x5c\x53\x8d\x2e\xe1\xa4\xbe\xa6\x61\x33\xb8\x21\x62\x32\x01\xb5\x49\xa4\x32\x7a\x16\xac\x91\x89\xd4\x2f\x09\x66\x0e\xb0\x2a\x74\x78\x74\x42\x26\x55\xbd\x8d\xac\x86\x99\x3a\x2b\x72\x4c\x2e\x8c\x50\x0c\x2a\xe7\x0e\x84\x3a\xe6\x2d\x9c\xc3\xaa\x79\x63\x7a\x5c\x59\xd9\xcd\x61\x88\x18\xec\x18\xd1\xb5\x57\x06\x55\x3f\xd4\xd7\xdd\xd2\xf0\x95\x1c\x9a\x63\x93\x46\x32\x58\x2e\x36\x2f\xcd\x8e\xd6\x3a\xd4\x73\x5b\xb5\x3e\x12\x44\xd4\xb9\x7e\x38\x36\x66\xf6\xea\xfe\x1c\x06\xc9\x47\x87\x31\x01\x78\x92\x34\x94\x25\xbe\x32\xc4\x1b\x95\x06\x0b\x9b\x02\x1d\xe8\xb7\x8f\x7e\xbc\xad\xb8\x41\x3a\x87\x16\xf3\x9b\xe3\x3a\x96\x8f\xe0\xea\x89\x47\x80\x54\xc5\xa5\x85\x01\xa4\xdf\xdf\xe6\x7c\xad\xd0\x41\xd7\x42\x63\x23\x0a\x48\xc6\x17\xe9\xdb\xa7\xf7\x18\xca\x21\x02\x8f\xa9\xd0\x03\xb2\xed\x62\xd5\x4b\x6a\xff\xa2\xcb\xe5\xfa\x0e\xc8\x5f\xc4\x94\x6b\x6c\xc0\xc9\x6d\xed\x00\xa4\x60\xa0\x0d\x88\x38\x55\xfd\x29\xca\x99\x6e\xd5\x23\x06\xbd\x29\x79\xd5\x9a\xc3\x94\xb7\x42\xc1\x2e\xb9\x66\x54\xeb\xbd\x5b\x81\xc4\x93\xce\x0b\x32\x52\x8f\x1c\x0e\xd4\x52\x34\xd2\xc0\x77\x34\x15\x4d\xa8\x9b\x56\xd8\x83\x5a\x0c\xe5\xab\xdb\x57\xcc\x5d\xdc\x9a\x51\x87\xd5\x74\x72\xa4\xe8\xc1\x89\x48\xdd\x3e\xd9\x6e\x5d\x1d\xe1\xeb\x31\x5b\x1f\x07\x20\x08\xc7\xe0\x72\xa1\x74\xbf\x6a\x2a\xe1\x10\xee\xdd\x4d\xbd\xff\xd0\x8a\x92\xd1\xfa\x2d\xb6\x3a\x59\x70\xf5\x2c\xce\x44\x3f\xa9\x31\x1e\x9d\x4b\xa6\xcd\x89\x38\x41\xa4\xa6\x29\xd3\x18\x23\x62\xd8\xcc\x68\x49\x98\x93\x42\x25\x8a\xf1\x01\x6b\x3a\x36\x8d\x3e\xf3\x43\xb0\x30\xfd\xe3\xd0\x8d\x91\xbb\xc7\xd1\xd0\xa1\xb7\x9c\x3e\x56\x2c\x85\x19\x95\x5f\x7a\x00\xb9\xbf\x5b\xa4\x3d\x23\x00\xfa\x4a\xcc\xaa\xb5\x1f\xf3\xfe\x7d\x5a\xb0\x6b\xec\x88\x05\xfb\xd1\x51\x82\xa5\x70\x56\xed\x93\x77\x01\xc9\xd9\xf7\xd5\x54\xb8\xfd\x2b\x4e\x8f\xa4\x40\x02\x38\x4d\x63\x39\x9c\x75\x00\x1c\xe5\x90\x83\xf4\x30\xe5\x22\xc3\x38\xf4\x9c\x90\x4e\x97\x5d\x72\xd4\xcb\xc8\x7f\x86\xa4\xca\x6a\xc2\xbb\x99\xcf\x25\x6c\x6f\x35\x2c\x56\xdb\xc9\x18\x28\xb8\x17\x16\x56\xb1\x0e\xb3\xa4\xa1\xa7\xa5\x3e\xb8\x7a\xea\x3b\xe8\x63\x12\x9e\x03\xa2\xd4\x56\x51\x28\x10\x2a\xeb\xd5\x87\x12\xce\xd8\x83\x3e\xc8\x94\xa4\xae\x8b\x2b\x42\xab\xda\x87\x90\x1d\x35\x38\x48\xe8\x0b\xd5\xa7\xa1\x18\x4a\xf9\xeb\x58\xe1\xdf\xf0\xd9\xce\xec\x0b\xb0\xa3\x6b\xca\x59\x4c\xa8\xff\xd5\x4e\x46\xb0\x2d\x4d\xea\xe7\x41\xb0\x81\x23\x40\x15\x89\xf2\xc4\xb5\x76\x0e\x4f\x91\x8f\x64\xcd\xcb\xa4\x13\x16\x48\x9d\xe8\xe4\x2a\x1e\x7f\x30\x4d\x4e\x9d\x28\x8a\x73\xfe\xd2\x1a\x9d\x5d\xd0\xdd\x8f\xb1\xce\x75\xf2\xcc\xce\x74\xe0\xb8\x27\x18\x79\xeb\xf0\xe9\xf6\x0d\xe5\x4c\x92\xd6\xfb\x52\x70\xe9\x56\xd4\xe0\x56\x76\xcc\xac\xda\x54\x6b\xd1\xf8\x1a\x07\x51\x53\xb3\x12\x5c\x8c\x4c\x55\xa7\xe8\x6d\x15\xa8\x63\x3c\x20\x28\x12\x79\x69\xec\xa6\xd4\x21\x31\xe7\xba\x65\xbc\x08\x7d\xd4\x1a\x21\x00\x87\xbd\x63\x7d\x7d\x02\x14\x31\xc5\xbf\x78\xba\x09\xb3\xf1\x96\xc0\xdf\x6b\x96\x83\x86\xe5\xe4\x16\x4a\x71\x10\xfb\xcf\xea\xdc\x4c\x38\x91\x5b\x89\xdc\x0f\x4b\x72\xbc\x4b\xf9\x18\xa2\xb2\xea\x43\x01\x5f\xcb\xd3\xb3\xbf\xde\xc6\xe4\xe9\xc1\x2f\x63\xa2\x45\x99\x2a\x9e\xe0\x00\xe6\x72\x30\xe1\xd3\x7e\x31\xe0\xa4\xe2\x1d\x21\x4b\x75\x2e\xce\xf5\xc8\x3a\x0f\xca\xbe\xb2\xf2\xea\x03\xc5\x08\x0b\xe3\x87\x3f\xde\x21\x87\xfa\x13\xd9\x2a\xa6\xa3\xca\xfc\x1e\x95\x0e\x00\xd1\x67\x5e\xaf\xbd\xdd\x54\xaa\xf6\x06\x37\x4a\x53\x89\x37\x39\xc9\x26\x22\xd1\xbb\x9f\x0b\x8a\xc6\xb2\x5f\x99\x3d\x85\xaa\xd7\x06\x8e\xbb\xcf\xd9\x56\x6e\x10\x81\xd7\x8b\xd1\x45\xf7\x32\xb0\xab\x89\x82\xbb\x39\x06\x8f\xa7\x7a\x7c\x49\xa8\xec\xc9\x9b\x02\x88\x45\xbf\xe1\x63\x4f\x21\xd7\xc4\x2c\xd1\xed\xb9\xcf\xed\x0d\xd3\xda\x48\xf0\x00\x0f\x21\xfc\xb2\x2f\xcb\x87\x66\x2a\x8f\x1c\xa5\x37\xa5\x47\x82\xbb\xd0\x06\xc1\x2e\x9e\x88\x38\x0e\xe4\xec\xa7\x57\x64\x21\x43\xe5\x4f\xb4\x33\xe6\xdd\x06\x70\xa6\x58\xb5\xeb\xb9\x47\x62\xd4\x3c\xd7\xe2\x44\xff\x0e\x65\x60\x55\xb1\xbb\xe8\xcf\x64\x58\x91\x1e\x36\x85\xed\x2c\x12\x90\x55\x57\xc1\x9c\x29\x39\xaf\x09\xb3\x2a\x63\x5a\xfc\xc8\x91\x27\xa9\x7c\xec\xfe\x6b\x26\x41\xd5\x8d\xd9\x76\xa8\xef\xba\xb8\x79\xa8\xc5\x27\x04\x87\x09\xfd\x29\xa0\x65\x3b\x47\xfc\x52\x22\x31\x08\x51\x72\x6a\xd0\x48\x3c\xae\xc4\xa9\x90\x79\x72\x8a\xc2\x20\x09\x1e\xb3\x69\x3b\xdb\x57\x4c\x1e\x25\x23\x32\x9b\x61\x89\x65\x25\x0b\xef\xa2\x02\xa6\x3c\x26\xee\xdf\xce\xd1\x23\x03\xb6\x1c\x57\x5e\x07\xb2\x62\x50\x48\x69\x2e\x74\xac\xb6\x2b\x7e\xbf\x7a\x50\xd2\x07\x09\xe0\x13\x3b\x15\xb7\x9a\x51\xd9\x45\x5f\x7f\x41\x0d\x05\x06\x01\xb7\x01\x5a\xbc\x19\xa6\x6c\x0e\xde\x64\xd1\x4f\x12\x64\x26\x4b\xbc\x27\x9d\x44\x3b\x84\x4b\x83\x73\xe1\xc5\xb7\xbc\x50\x6e\xd2\xb9\x31\x9c\x4d\x15\xa8\x20\x26\xef\xfe\xb9\x31\xbd\x53\x1b\x8d\x91\x71\xfd\x9e\x29\x00\x31\x94\xec\xdc\x89\x01\x86\x85\xa3\x98\x0a\xa3\xe0\x13\x6c\x8e\xee\x38\x61\xed\x28\xbd\x68\x08\xee\x33\x84\x64\xa3\x9e\x5f\x7d\x5b\xe3\x6b\xbc\xa9\xf4\x55\x24\x15\xb6\x3c\xa1\x9f\xa8\x26\x14\x9f\x9c\x4c\xb8\x94\x1e\x31\x8f\x59\x09\xbb\x6b\x68\x5b\x0b\x81\x7d\x27\xfd\x92\x1f\xa1\xc1\xa0\x35\x88\xfb\x0c\xbd\x1f\xdd\x6d\xdd\xa1\x3e\x8d\x91\x1a\x6e\xe6\x3b\x55\x8f\x0a\xb1\xd3\xe8\xc1\xec\x22\x09\x6f\xc0\x80\x39\x24\xa2\x73\xaf\xd1\x5b\x8d\x9f\xb4\xca\x63\xfc\x70\xf2\x55\xd0\xaa\x3a\x4b\xea\xa0\x84\x86\x69\xe9\x1a\x2f\xd9\x2a\x56\x56\x7a\x34\x89\xb5\x12\x5b\x1f\x1d\x1f\xd6\x85\x85\xa0\x18\x10\x4f\x63\x67\x4e\x0b\xa0\x09\x48\xde\xd8\xd8\xfd\xe2\xe2\xb1\x98\xea\x02\x87\xd8\xe6\x1e\xf2\x63\x64\x13\xf8\xb2\xee\xae\xc4\x57\x7e\x21\x96\x57\x9a\x0f\xe2\x32\xfa\xad\x9d\xe9\x4d\x5c\x24\x72\x3e\xc2\xb5\xb0\xb7\x24\xa9\xfc\xc8\x09\x29\xc4\x6e\x64\xbd\x80\xbe\xb9\x58\x8d\x0d\xd1\x71\x6d\x7d\xce\x54\x01\x4f\x52\xdf\x8b\x79\x18\x85\x16\xc4\x4c\x8c\x86\x0d\x2f\x71\x28\x98\xee\x24\xf0\x02\x6f\xa6\x8a\xe3\x08\xa4\x2b\xae\x05\x86\xe4\x93\x73\xd9\xa9\x68\xd0\x7e\xc1\x20\x6b\x61\xb4\x47\x9a\xbd\xa9\xfd\xa6\xe9\xaa\x28\x91\x5e\xe4\x8a\x5d\x91\xdc\xc2\x22\xae\xc0\x4c\x83\x4d\xa9\x27\xef\xb0\x71\x02\x14\x67\xc2\x1e\x9e\xf1\xab\x4a\x2b\x75\x07\xda\xe8\x63\x4d\x3e\x7a\x6b\x8f\x23\x6e\x30\x99\xce\xcd\xf2\xb9\x15\xa1\xe7\x5a\x96\x02\x98\xcd\xa7\x50\x8f\x61\x3d\x72\x35\x59\x4d\x91\x5f\xda\x7d\x54\x96\x13\x10\x16\xd2\xd7\x97\x31\x35\xd4\x9a\x8c\x33\xc7\x75\x54\x5c\x1e\x03\xf9\x75\x33\xed\xe6\x7c\x5d\xa0\xde\x29\x12\xe8\x17\xa2\x97\xe4\x02\x24\xe2\x65\x6d\xf3\x5a\x96\xc2\x22\x11\x62\xa1\x95\x64\x0d\x14\x9f\xa3\xb7\x7b\x66\x17\xd7\xa9\x63\xf0\xa0\x4a\x31\xe0\x92\x7e\xd5\x7c\x0e\x3d\x72\xe6\xad\x69\x1b\x0b\xdf\x65\x1f\xca\x73\xaa\xcd\xd8\xb0\x77\x5b\x27\xce\xb5\x75\xcf\x55\x37\x39\x07\x81\x1a\x65\xff\xec\xa0\xbd\x5c\x5c\xf4\xa6\x6f\xb1\x81\x40\xcc\x40\x5c\xb2\x42\x47\x62\x51\xcd\x10\xc5\xc4\xb5\xd0\xa4\x3c\x95\x9f\x82\x23\xf3\xfc\x2a\x5c\x5d\x11\xa9\x89\x9a\x08\xc3\xf8\x20\x62\xa9\x9c\x2e\x5c\x6c\x6d\x66\xea\x0e\x49\xf5\x8a\xee\x6d\x8d\x92\x0d\xb1\xab\x14\xf3\x52\x97\x3c\x06\x4c\xdd\x43\xbe\x52\xce\x95\xcd\x1c\xb2\xd4\x15\x7d\x86\x7a\x3d\x43\xa7\xda\x97\xf6\x05\x1b\x74\x74\x29\xc1\xe2\xc1\x01\x57\xfc\xc2\xbd\xf6\x19\x94\x14\x79\xb3\x78\xd4\x25\xa5\xde\x00\x07\x64\x30\x7d\x76\x88\xc1\x6c\x85\x74\x80\xed\xcf\x84\x87\xcd\x0e\xe6\x49\x9e\x50\x80\xeb\xb2\xc1\x06\x62\x47\x35\xb2\x59\x59\x43\x7b\xf6\x8d\xf6\xea\xe9\x7a\x52\xa6\x2e\xa3\x4b\x32\xc4\x3a\x16\x50\xd2\xd8\xf2\x9c\x6e\x8a\xdf\x01\x9a\x37\xd1\x20\x06\xab\x63\x34\x87\x36\xff\xd0\x33\x35\xd4\xa7\xfa\x0a\x8c\x80\x73\xe0\x19\xb6\x20\xb3\x34\x48\xd3\xc7\xf8\x66\x82\x61\x26\x87\xc1\x93\x2c\xdf\x61\xb7\x4b\x70\x4d\x46\x8d\x8a\xc0\xe8\x71\x16\xed\x6d\xc3\xa1\x40\x76\x39\xfe\x50\x7a\xe5\x6a\xf8\xcc\xf3\x56\x76\xe0\xa6\xd1\x07\xf2\x69\x17\xcf\x98\x69\xc1\xac\x45\xd3\x0f\xf2\x61\x8e\x91\xfd\xd6\x70\x79\x64\xd1\xf3\x3f\x9c\x7f\x67\x7f\xfb\x14\x5d\x53\x4d\x87\xb5\x80\xc2\x92\x49\x6d\x0d\x95\xe7\xcd\xa5\x17\x55\x6f\xc5\x76\x35\x77\xff\x85\x9f\x88\x9a\xed\x26\x0e\xfc\xc3\xc4\xde\x5a\x38\x12\x4e\x75\x3a\x78\xed\x9e\x20\x1f\x98\x4f\xf7\xde\x1a\x37\x21\xe7\x68\x9c\x8c\x99\xae\x6f\x51\xef\xe8\x43\x7f\xad\x26\x07\xc6\xa2\x67\x6f\xb5\x3b\x1b\xc7\x7d\x9a\x95\xc7\xf0\x5f\x07\xdb\x24\xee\x76\x52\x66\xe2\x6c\x12\x71\x6a\x3e\xcb\x57\x05\x64\x08\xf6\x64\xad\xb7\x60\xee\x9b\xea\x54\x44\xff\x8a\x22\xe1\x6f\x47\x91\x55\x71\xee\xe3\xeb\xba\x01\xd5\x71\x85\xdc\xc4\x02\x28\xc7\x26\xa3\xf8\xb5\x47\x97\xd2\x6d\x63\x36\x2c\x6a\x59\x4a\xb3\xf2\x01\x14\x79\xc8\xf9\xe7\xe0\x83\x69\x76\xea\x26\xee\x85\x92\xcd\xc9\xe7\xb2\xfd\xa4\x83\xf4\xce\x4d\x19\x36\x6e\x5a\x2a\x53\xee\xf0\x4b\x18\x67\xb5\xf9\x1e\xc8\xe5\x4e\x56\x1f\xac\x76\x02\x0b\x2e\x36\x89\xf3\xd8\x95\x87\x00\x24\x4d\xa7\xed\xc3\x13\x1e\x18\xaf\xa9\x53\x92\x4e\xd4\xa7\x81\x55\x83\x9e\xc7\x1d\xa0\x56\x8d\x0e\xf3\x46\xfa\xa6\xad\x2c\x5e\x87\xb2\xb7\xb4\x3e\x85\x46\xd1\x77\xa5\xde\xb1\x13\x5b\x47\xb4\x3c\x1f\x44\xba\xc7\xc8\x17\x8c\xfb\x11\x23\x74\x03\x7d\x1c\x7c\xc5\xcb\x37\xe0\x4c\x72\xb4\x61\x6e\xe8\x24\x25\x6e\xa4\x74\x03\xe7\xfb\xb1\xfd\x54\xa2\x2f\xab\x4e\x80\xcc\x20\xcb\x3b\x06\x95\xe5\xfe\x95\x86\xf2\x99\xe3\xb9\xc7\x17\x8f\xb6\x4e\x59\xb4\xff\x26\xe4\x68\xcf\xa7\xce\x59\x7f\x68\xfd\xe5\xc9\x68\x77\x4b\xa7\x03\x49\x18\x43\x2f\x81\xc3\xc3\xe0\x38\x14\xf4\x9a\x1e\x2c\x0e\xeb\xf0\x02\xff\x12\xa8\x9c\x34\xa3\x56\xac\x53\xae\x77\x45\xd3\xc1\xe0\xd1\x5a\x02\x43\x2b\xd5\xa9\xf4\x23\xbb\xa5\xc7\x90\x76\x7e\xa0\x69\x2e\xa1\xba\xeb\x94\x0d\xb1\xf7\xe6\x2d\x6f\x56\x6d\x25\x33\xff\x7c\xcd\x82\x7c\x25\x56\xd9\x15\x7d\x0f\x3a\x22\x6a\xad\x25\xb5\x7f\xf6\xf6\x24\xdf\xaa\x81\xae\x2b\xc5\xed\xb3\x49\x79\x44\x1e\xbd\x46\xe5\x20\xb6\xca\x1b\x06\x01\x8e\x20\xc6\x0b\x79\x4e\x69\x53\x0e\x88\xc3\x89\xd5\xf6\x3e\xdf\xa0\xda\x61\x0f\x6a\x18\xf0\xd3\x74\xd0\x9f\x3d\xa1\xc2\x99\x5e\x93\x12\xa3\x6c\x57\x72\x66\x4a\xa3\x41\xd8\x48\x3d\xd5\x37\x21\xe6\x6a\x9b\xf3\x7d\xa1\x2c\x32\x79\x55\xcb\xc0\x3b\x18\xe2\xb7\xd7\xb7\x5e\xe2\xe7\xb7\x48\x59\xc7\x6e\xd4\xd9\xb6\x55\x50\x06\x6d\x54\xbb\x57\x58\x02\x2c\xaa\x52\xe7\xa9\x4f\x23\x9e\x94\x43\xe2\x7e\x13\x49\xa1\x6d\xd0\x74\xaf\xc4\x9e\x93\x8b\xf3\x5e\x4e\x2f\x65\x9e\xf0\x74\x12\x7f\x5e\x1b\x8f\x9a\x58\x04\x2f\xfb\x8b\x0d\xf8\x33\x43\xd9\xe4\x9b\x01\x65\xb2\x10\xa6\xeb\x4d\x72\x09\xf8\x05\x3a\xa2\xda\x9b\x57\x2c\xb8\x90\xb8\xfa\x85\x03\x26\x5f\x2d\x94\x21\xb0\x59\xfb\x76\xb3\xfb\x8d\x00\xe2\xfe\xf5\x82\xe4\x82\x54\x0f\xcb\xb8\xc4\xc3\xcb\x82\xd4\x92\x9f\xf5\x05\x38\xad\x80\x3a\x1f\x62\xf3\x87\x36\x3f\x27\x5c\xea\x30\xc7\x93\xe4\x5c\x5d\xe3\x2f\xd5\x20\x79\x88\xf3\x56\x48\xcc\xb6\x5f\x6d\x26\x4c\x41\x42\x7c\x76\xab\x50\x01\xff\x26\x3b\x44\x5b\x67\x5c\xd3\x4c\xaf\xc0\x75\xdd\x6e\x6f\x2e\x6e\x91\xd8\x8d\x64\xbd\x3f\x16\xd1\x5c\x9c\xe7\x6d\x5f\xae\x28\x02\xa2\x3e\x24\x77\x9c\x2a\xc6\xc6\xdf\x5f\x88\xf9\x1d\x81\xb1\x7b\xee\x36\x9b\x0b\xe3\x05\x51\x8a\x90\xab\x8e\x76\xdc\x38\x8b\x01\x54\x95\xde\x9d\x5d\xb7\x0a\xd0\x00\xe2\x44\x4f\xb3\x24\x1a\x1e\xe6\x45\x6a\x08\x4a\x22\x71\x39\xdc\x4c\x3b\xb8\x8e\xae\x78\xe0\x58\xcd\xfb\xf2\xb0\x03\x92\x50\xd2\xa6\xbe\x1d\x35\x8b\x43\x81\x40\xe9\x3f\x4e\x39\x95\x1d\x4d\x15\x4c\xfd\x53\x49\xd9\x08\xf0\x00\x2b\xc1\x03\x82\x4a\x93\xaa\x2b\xc3\x1f\x83\x85\x05\x8a\x44\xf6\xe1\xc0\xe1\x4d\x71\x02\x6c\x8a\x31\x71\x41\x93\x8c\x95\x7d\x22\xda\x80\xe6\x3a\xb7\x1b\x6a\x5c\x4d\xeb\xa8\xce\xca\x6d\xf1\xc5\x81\x36\x12\x4b\x42\x5e\x68\x09\x95\xba\x6e\x0b\x95\xb3\x4f\x5c\x13\xad\x4c\xe0\x8b\xf9\x20\xff\x23\x50\x23\xfb\x40\x17\x6c\x0e\x3f\x18\x39\xb4\x17\xdf\xe5\x3d\x0d\x51\xbe\x9d\x14\x91\x8f\xcf\xe3\x55\x55\x49\xa2\x00\x56\x8a\x07\xaf\xe6\xf7\x7f\x98\x18\x08\xf9\x3e\x61\xc8\x11\x40\x80\x04\x5b\xa3\xa7\x23\x72\xcb\x5c\xab\x1d\xe3\xe3\x19\x00\x7e\x58\x8f\x9e\x94\xec\x43\xbe\x85\xab\x85\xd8\x5c\x72\x80\x08\x55\x65\x8d\xed\xab\x9f\x39\x81\x59\x7e\x7e\x25\x8c\x95\x8e\x81\x03\xff\x1d\x9b\xf9\x32\x2d\x20\x8c\x37\xb0\x36\x03\x8b\x4c\xad\x11\x05\x18\xf2\x98\xdc\xaf\xe8\x10\xb1\x88\x3e\x68\xe8\x9d\xf2\xbf\x65\x30\x35\xb8\x41\x99\x92\x6e\x1b\x35\x22\xda\x57\xbd\x94\x3a\x30\x92\x61\xbb\x9d\xa9\x7a\x66\x20\x02\x58\x32\x6b\x77\x0d\x24\x5d\xd5\x6e\x06\xf9\x55\x87\xc3\x93\x1e\x9e\xa9\x81\x20\x51\x21\xc2\xc3\x9c\x89\x78\xb2\x66\x9a\x3d\x9c\xca\xa3\x41\x21\x8b\xdd\x48\xa0\x77\x14\xc0\x19\x11\x9e\x7d\xeb\x3e\x73\x35\x3b\xd8\xc2\x72\x03\x37\xf1\x73\xb2\xb3\xbc\xee\x2e\x1c\xa2\xa2\x05\xe6\x25\x94\x17\x9b\xa2\xe9\x64\x55\x60\x09\x2a\xba\x0f\x36\x94\x38\xf9\xdd\x51\x68\x4f\x2f\xb5\x32\x76\x78\x3f\x7a\xa8\x4e\xb1\xd7\xd4\x17\xc6\xec\x83\x83\x6d\x52\xcb\x1c\x23\x8a\xce\xcf\xe9\x6f\x86\x4e\xaf\x51\xff\x6c\xe3\xb9\x90\xa5\xaf\xb3\xbc\xf2\x77\x7c\xa8\x2a\x4b\x90\x3b\xe3\x53\x3c\xdb\x97\x47\x21\x81\x5d\xc5\x84\x80\x8a\x33\x7b\xe9\x08\x9c\x87\x12\x2d\x19\x23\x9f\x72\xcb\x3d\xe8\x95\x58\x8b\xe3\x5d\x94\x91\xc9\x59\xc9\xc7\x2c\x79\x7f\x42\x40\xb9\xe8\xb0\x3b\x58\xb6\x1a\x38\xa4\x57\x7b\x34\x70\xaf\x4e\x5e\x3e\x55\xdc\x81\xe0\x4e\x56\x02\x26\x65\xb6\x20\x91\xb8\x93\x7d\xeb\xb2\xb4\x12\xae\xb7\x41\xb6\x41\x2b\xc6\x50\xd6\x26\x71\xb3\xfa\x98\xe2\xf4\x13\x60\xa4\x56\x15\x43\xa6\x81\xe3\x96\x46\xde\xbe\xd9\xf1\x87\x79\x04\xe6\xb3\xab\xcb\xcf\xe2\xee\x67\x8e\xc2\xd3\xe4\x74\xd4\xb4\xfb\x9f\x0b\x33\xb1\x2f\x4e\x25\x53\x4f\xfc\xf7\x2b\x36\xab\x24\x28\x0c\xcf\x4d\x36\x18\x0f\xe3\xe5\x7d\xf4\xb1\x46\x40\x50\xfa\xb1\x06\x7b\xce\x83\x90\x7d\xf1\x86\x96\xcd\x95\x7a\x55\x00\x88\xf3\xc2\x4c\x11\xc9\xa2\xcd\x7e\x20\xb6\xc1\x8c\x1b\x33\x6b\x2a\x8a\xe3\x2c\xbc\xcb\xb9\x55\x47\xa1\x4f\x38\x6c\x38\xdb\xe4\x6c\xa9\xbd\x13\xc6\xe3\x87\xac\x15\x85\xea\xd5\xda\xe9\x2c\x43\x95\x90\x1c\xf4\x57\xe4\xe8\xe2\xd5\x9d\x9d\x1f\xb5\x29\xc3\x4b\x5f\x87\x93\x3d\xe3\xf2\xa5\x00\x54\x94\xdf\x95\xaf\xcf\xbe\xb2\xae\x51\x31\xb2\x7a\x54\xa4\xb5\xf6\x45\x35\x6b\x50\xaa\xe4\x40\xee\xae\xb4\x24\xbe\x20\x68\x30\x0c\x40\x2a\x65\x6a\x7d\xbf\x55\x68\x9f\x72\xdb\x4c\x8c\x25\x36\x8a\x63\x92\x95\xb6\x61\xca\x4e\x18\x4d\x80\xed\x3f\x77\x5c\x9c\x22\x18\x9d\x26\xdf\xc4\x51\x25\xe6\xa3\x8c\xc8\x74\x62\xad\xbc\x8c\x41\x06\x5a\x77\x64\x57\xbd\xde\x00\x57\xbc\xf9\x0d\xe0\x03\x2b\x15\xc0\xa2\x55\x95\xfc\xb2\x28\xb6\xb0\x26\x28\x61\xc6\x4c\xfa\x0a\x15\x7b\x21\x9c\x7b\x90\xb2\x92\xdd\x9b\xc0\xf6\xa8\x54\x3e\xce\xf2\xfb\x4f\x91\xf2\xcb\x4c\x76\x99\x4d\xf2\x01\xb6\xb0\x8e\xd2\xd6\xca\x79\xd9\x4a\xb5\xcf\x4e\x2e\xd4\x2b\xcd\xa6\xb9\x43\x9b\xcb\x38\x2a\x27\x0d\x2b\xb3\x84\x0f\x2d\x0c\x5d\xa7\xdc\x9c\xce\x4e\x09\x0c\xf2\xed\x37\x27\x8b\x46\x12\x4a\xef\xcf\xe8\xe0\xb9\xb5\xa3\x8a\x0d\x99\x80\x51\x55\xce\x40\xfd\x87\x7e\xa9\x9e\x19\x3c\xc4\xf2\xd5\xc0\xad\x11\x91\x92\xfc\xd3\xc6\x71\xcd\x92\xc8\x2b\x63\xca\x6c\x37\x18\xae\xb6\x40\x1f\xba\x5b\xbf\x66\x47\xe0\xe4\xbe\xa8\xb2\x1a\x66\xc6\xbf\x0e\x37\x5b\xef\x11\x39\x1a\x38\x19\x3a\xf0\x11\x56\x24\xfe\xc4\xce\x64\xe0\x17\xa9\x91\xcb\x5c\x7b\x05\x58\xf3\x01\x26\xa0\x45\x3e\x9f\x57\x66\x1b\x21\xd2\xea\xa0\xb2\xb6\x8d\xb3\xdb\x1f\x0b\x18\x3b\x26\x20\x9f\xdc\x09\x28\xb6\x3b\x45\x68\xe1\x9a\xca\xc7\xa3\x9c\xbe\x7c\x6b\xbb\xf8\xd6\xfd\x71\x1e\x91\x6a\x44\xa1\x04\xfe\xf6\xa6\x43\x65\x65\x4c\x63\x3e\x86\x18\x9c\xa1\xa8\x9f\xda\x49\xbc\xb1\xf9\xec\xc3\x16\x5c\xf6\x5b\x35\xef\xb5\xf1\xd4\x0e\x2e\xd5\x8f\x90\x40\x17\xb6\x2b\x89\xa5\xfe\xaf\x83\xfd\x0a\x84\xc5\x8e\x26\xa7\xdd\xe9\xf4\xbc\x03\xbb\x94\x7d\x26\x51\xf6\xb6\x8d\x1c\xc9\x2d\x7f\x51\xf4\x16\x34\xff\xad\x70\xf9\x38\xdd\x6a\x53\x66\x80\x2d\x53\xf7\x2d\x8a\xff\xcc\x75\xc5\xd7\x40\xb2\x3a\x57\xdf\xa4\x48\x7c\xe2\x04\xdb\x59\x51\x53\xb5\x07\xb3\x1a\x0a\xa2\x3f\x1d\x46\x4d\xe5\x2d\x9f\x30\xa3\xa9\x84\x1b\x5f\x0d\xe6\x69\x6b\xc5\xf9\x77\x9f\xcd\xe1\x74\xf2\xd6\x8f\xea\x27\xf7\x4b\x38\x6c\xe4\x36\xb5\x21\x2e\x1e\x28\x9f\xa9\x41\x15\x96\x4d\xb1\x11\x24\x46\xa8\x8a\xcc\xab\xb2\x78\x38\x43\x24\x51\x77\x50\x4c\x55\x40\x2b\xc1\xa8\x58\x27\x79\x8d\x2d\x71\x88\xfa\xcb\x55\x34\xcf\xca\xce\x29\x45\xda\xf0\x40\x5a\xa5\x46\x03\x9a\x97\x97\x9f\xe8\x35\x93\x1b\x1f\x51\x0b\xa3\xb0\xb6\xf1\x23\x2d\x56\x28\x95\x63\x4c\x75\x01\x08\x14\xea\x35\x7b\x52\xf2\xa7\xc8\xee\xd9\x28\x05\x93\x40\xf7\xa8\xda\xf3\x4d\xf1\x17\xbe\xe8\x53\x59\x48\x4c\xd0\x84\x90\x9a\x7f\x41\xf3\x0b\x54\xab\x01\x2a\x71\xae\xee\x18\x96\xa7\x14\x04\x42\x23\xcb\xe0\x40\x28\xc4\xed\xfd\x89\x9e\x14\x29\x53\xeb\x4b\xcf\xca\xa5\xb3\xab\x3e\x79\x15\x5e\x75\x59\xfc\x94\xab\x6d\xeb\xac\x2d\xd6\xc3\x3a\x21\x1f\xd0\x7b\x27\x41\x50\x13\x27\xb3\x23\x27\x4d\xb3\xa6\x7e\xc8\xdd\xb5\x8d\xc2\x92\x47\xfd\x54\xa5\x9f\x91\x41\xe8\x74\x19\x33\x34\x0c\xef\x95\x8b\x8d\x3c\xea\x5d\x87\x4c\x44\xf4\x7e\xf7\x77\x66\x52\x4b\xfd\x82\x11\xee\x99\x40\xc4\x8c\x7e\xca\xad\x80\x5d\x2c\x26\xfa\x4f\x70\x6f\x0d\xfc\x34\xd3\xef\xdd\x7b\xe4\xef\xf1\x03\xde\x63\x01\x25\x2a\x09\x6f\xc4\x47\x83\xbb\x30\xa0\xd5\x67\x47\xe0\xff\xe4\x90\x5d\x8b\x19\x2d\xca\x93\xff\xda\x2d\xa9\xbc\x66\xb3\xea\xa7\xbb\x44\xa2\x4b\xc6\x2b\x0b\xbc\x23\x59\x72\x88\x52\x5b\x52\xf8\xa5\x75\xac\xe8\x2f\xfc\x92\x1a\x96\x08\x0e\x8a\xdb\xf4\xb3\x77\x43\xdc\x9d\x44\x63\x09\x44\xfe\x79\x45\xfb\x8a\xbc\x2b\x01\x2a\xd0\x36\xe9\xcc\x35\x53\x19\xca\xeb\x00\x2c\xd6\x21\xcf\xdb\x44\x1c\x51\x1f\xa6\xfa\x91\xfb\xd5\x7f\x66\x9e\xb8\x56\x55\xc7\xc7\x99\x53\xad\xd1\xf5\x56\x7d\x96\xcb\x59\xa8\x9d\xf7\xcf\xd5\x1a\xfd\x92\x9b\x80\x9b\x43\x94\x62\x80\xe8\x31\xbe\x53\x79\x73\x84\x38\x42\x76\x82\x86\x49\x0d\xa1\xd8\x3e\xd2\xb0\xc8\x29\xf3\x26\x61\x5a\x39\xd9\xac\x78\xc3\xeb\xef\x2f\x06\x32\x8c\x2e\xdb\xaf\x6e\xb5\x69\x48\xd9\x46\xe9\x8c\x18\xbb\x9c\x2c\x30\x99\xac\xa0\x43\x41\xec\x85\x96\x2c\x06\x93\xf0\x38\x49\x40\x8b\x1a\xe4\xf3\xa8\xd2\x04\xb7\x8e\xd3\x2e\xcb\xbc\xff\x09\x29\xb3\x03\x6a\xa4\x94\xa4\x3f\x83\x56\xa6\xf4\x37\xfa\xe4\xf4\xb2\x29\x6c\xa0\x07\xca\x36\x4d\xd5\xc3\x46\x87\xc2\x3f\x65\x63\x10\xc5\xbb\x72\x3b\x8b\xe1\x86\x4a\x33\xea\x5a\x5a\x5b\x09\x0f\x0a\xb3\x80\x3c\xde\xef\x6b\x73\x75\x0c\x3a\xfc\xb4\x17\x1c\x0d\x26\xe2\x26\xcb\x35\xc4\xf5\x87\xaf\xb6\xd1\x0e\xe4\x66\x51\x9c\x44\xb6\x7a\x4e\x9c\x01\x2f\x07\x9d\x02\x9c\xce\xef\xcc\xdd\xa8\xbd\x85\xd0\x46\xfa\x65\x75\x96\xca\x08\xa6\x2b\x3a\x5a\xb8\x00\x71\x9a\xbe\x76\x19\xa5\x98\x85\xd8\x72\xbd\xdb\xf7\xf6\x71\xd6\x7b\xa9\x08\x85\x9e\x92\xaa\xd9\x07\x68\x58\xe2\x51\xd3\xd2\xe5\x82\xdf\x63\xee\x24\xac\x69\x2d\x5e\x65\x91\x10\x1e\x49\x80\x91\x9b\x89\xe8\x99\x8a\x45\x44\x2c\x55\x6f\x09\x18\x80\x36\x87\xad\xb0\xed\xdd\xc5\xb9\x8c\xfc\x45\x54\x23\xbe\xb9\x6d\x9a\x24\x0c\x77\xf7\x1d\x7e\xed\x8a\xda\xdf\xd2\x85\xe6\x1d\xa4\x6d\xdc\x2e\x0d\x14\xd1\x0e\xf8\x0a\x33\x65\x8f\x6f\xda\xfd\xd6\x97\xa5\xdf\x23\x05\x77\xb9\x80\xbb\x94\x8b\x14\xc8\xb7\x23\x2c\xf6\xe1\x08\x1e\x4e\x70\x7b\x4c\xaa\x7f\xf0\x05\x26\x40\xad\xd5\x24\xcd\x1e\x9e\x64\xa8\x95\x50\x31\x05\x63\x0e\x98\xe4\x1a\x50\xd8\xa7\x42\xc0\x00\x62\x2d\x7d\x84\xf9\x2c\x20\x07\xb4\xdb\xe5\x13\xb2\xcf\x99\xda\x23\x8b\xd8\xb2\x17\xbc\x5b\x3a\x1c\xed\x2b\x5c\x0b\x5f\xa8\x9a\x67\xe4\x10\x95\x17\xef\xe2\x9d\x7d\xa2\x7e\x1a\xf5\x19\x17\xe6\xe8\x3e\x97\xf9\x99\x03\x7b\x8c\xc5\x2b\x20\x62\x95\x68\x36\x11\xe1\x93\xcb\x9a\xb8\xd0\xca\x47\xf7\xc4\x6b\xe9\x4f\x79\xb3\x93\x08\x4c\x23\xa3\x64\x19\xba\xec\x33\x04\xcd\x8c\x10\xf6\x60\x49\x8d\xc2\x64\x95\x65\x35\xb6\x75\x7d\x4a\xdb\xc0\x91\x33\x9f\x0a\x34\x25\x22\x9c\x35\x9d\xd2\x7e\x33\x35\x9f\x37\xc2\x8a\xd9\xf1\x45\x5e\x25\xd5\xb3\xfc\x78\xcd\x81\x79\x4e\x63\x74\xd2\x36\xa4\xe2\xd5\x26\x36\xd8\x26\x6e\x8c\xac\xa3\x2c\x99\x6d\xce\x89\x64\x84\xa9\x76\x27\xb7\xe3\x14\x47\x12\x4d\x9c\x43\xee\xa9\x9b\xd4\x6b\x63\xbb\x24\x0a\x11\xf6\x65\x34\xaf\x0a\x3b\x3b\x67\x24\x42\x4a\x4d\x29\x64\xe7\xd0\x39\xc6\xf4\x03\xaa\x88\x04\x90\x17\x6e\x6b\x78\xaa\x34\x7a\xa0\x15\x9e\x20\x40\xf1\x13\x9c\x7a\x1f\x03\xef\x65\xf3\x76\x70\x58\xdb\x30\x04\xb3\x13\xaa\x0d\x5a\x14\x45\x89\x22\x0d\x1b\xbb\x7a\xd1\x72\xf9\xa0\xe1\x4a\x2d\x2a\x22\xa2\xbe\x31\xc4\x84\xc3\x9f\xcb\x84\xd1\xc4\x8a\x37\x53\x8e\x69\xfa\x20\xae\x32\x63\xeb\xa2\x0c\x23\x47\x9a\x12\xef\x81\xce\x54\x43\xa5\x74\xed\xde\x79\x73\xe0\x9e\x85\x19\x1f\xff\xe9\x40\xd5\x3c\x6c\x7f\xa7\x95\xa3\x4e\xde\x31\xb2\xf2\xf3\x4a\x4b\x85\x9f\x79\xe9\x38\x76\xd6\x4b\x49\x73\x93\x9b\x68\xfd\x1e\x6d\x4b\xae\xd7\xa4\xca\x57\x0c\x83\x7c\x6b\x54\x80\x5c\x47\x28\x06\x12\xab\xd4\x00\xda\xba\x96\x86\xda\xd6\x99\xc8\xc3\x3b\xc7\xb0\x7f\x4f\x70\xb4\x32\x7a\x8e\xaf\x48\xb1\xd0\x38\x75\xe8\xc2\xcc\x3a\x2b\xe2\x74\xb0\x79\x4c\x39\x52\x84\x86\x15\xf0\xc0\x53\xa5\xa4\x65\x40\x66\x9a\xd8\xd8\x76\x9a\xe5\x57\x3e\x3f\x3d\x6a\xad\xac\x88\x93\x86\x27\xa1\x10\x02\x21\x6b\x00\x5a\xdf\xbc\xc4\x34\xc6\xfa\xd2\xec\xc0\x56\x6d\x3c\x58\xde\xa7\x05\xa0\x0f\x0a\xbb\x47\x0a\xbb\x9b\x2c\x83\xdd\xc3\x72\x96\x67\x25\x1c\xd1\x2c\xc3\xe4\xc4\x07\xaf\x70\xcc\x5e\xa4\x33\x45\x00\x90\x7a\x6d\x13\x41\x06\x52\x9e\xe4\x9c\x10\x08\x1d\x7d\x49\x20\x8c\x8b\x1d\x78\xb1\x54\xda\xc6\x00\x51\xef\xc6\xbe\x7d\x2c\x89\x7a\x44\x90\x33\x39\x89\xa2\x79\x3e\xa4\x60\x33\x89\x71\x2c\x0a\x57\x6c\xad\x24\x15\x07\xb3\x0b\x50\x13\x74\xdf\x98\xc6\x1b\x7f\x03\x88\x4c\x86\xd0\xf0\x21\xee\x7f\x9a\x86\x92\xbb\xe9\x7f\xda\xd7\x14\x52\x97\x2f\x96\x82\x4d\xf9\x14\xa0\x46\x2a\x2a\xe6\x5d\xef\x3a\x40\x16\x8a\x93\x62\x82\x90\xd2\x2d\x85\xb9\xb0\x29\x73\x3e\x4c\xf9\x95\xc8\x9f\x26\xbf\xb6\x50\x3f\xe9\x0f\x7d\x53\xfb\x3f\xbf\x0d\xd1\x53\x60\x3f\xf8\x77\x40\x28\x8a\x3d\x0a\xb0\x2d\x70\xe2\x9c\x76\xf2\x80\x4a\xc5\x37\xbf\x41\x84\x94\x5c\x7a\x3c\xba\x18\xca\x13\xc0\x8e\xca\x43\xa1\xeb\xad\x5f\x95\x1d\xe5\x57\x8a\xa0\xbd\x2e\x3e\x18\x71\xce\x3e\xbf\x4c\x8b\x01\x2e\x2b\xe0\x75\x55\xe5\x40\xc3\x2b\xda\xc9\x75\xba\x33\x1d\xc1\x4b\xbb\x44\x2d\xae\x1d\xfa\xe1\x37\xe9\x2a\xb7\x37\xaf\xdf\x61\x0f\x6a\x85\xcc\x6f\xea\x9f\x21\xbf\x0e\x1b\xd3\x4e\x6a\x5f\x6e\xde\x75\x53\xa8\x40\x65\x38\x86\xdd\x82\xab\x38\xd1\x1b\xda\xec\xd1\xbf\x7a\x18\x15\x0c\x74\x8c\x4b\x7b\xb3\xf8\xc5\x0e\xf0\x83\x1a\xea\x59\x92\x1d\xfb\xd9\xf3\x6f\xa5\xf0\x48\xcb\x4e\xfd\xa4\xe8\x12\xe5\x3a\x20\x1f\x2a\xac\x12\xfe\x5f\xf8\x20\x94\x83\x16\x24\x32\x4e\x99\xc8\xac\x62\xef\x27\x07\xfa\x25\x96\xe5\x3c\x3f\xc1\xff\xaf\xfd\xb2\x2b\xa1\xc2\x0f\x43\x8c\x40\x2e\xe5\x90\x32\xea\xd9\x00\x42\x55\x6b\x86\x7d\xb1\x59\x88\xf8\x8a\xd3\x5d\x6d\x30\x9b\xf7\xc8\x13\xcf\x14\xaf\xf9\xe1\x5d\x05\xd6\x34\x40\x96\x25\x7a\xcb\x91\x76\xd2\xbb\x82\x3f\xca\xa4\x5d\x05\x8b\x51\x29\xb2\x9b\x04\x9e\x25\xda\x16\x4e\x2c\xa2\xa2\xbb\x0c\x9c\xd2\x60\x63\x87\x4a\xa6\x64\xe1\xdb\xab\x64\x22\x44\x73\x6c\xac\xd3\xa8\x40\x90\x66\xd5\xd4\x5c\x8d\xce\x59\x34\x6c\x55\x88\xec\x7f\x5b\xd8\x55\x65\x99\x5c\xab\x00\x63\x71\x41\x4d\xe9\x74\xf9\x2a\xcc\x5c\x60\x47\x64\xce\x2d\x5c\xf6\x95\x37\xad\x81\x60\xaa\xd7\x99\xbc\x56\x67\x16\xf0\x96\xc2\xda\xa2\xa5\x55\xec\x2a\x65\x8a\xa1\xb7\x46\x95\x54\xd5\xd7\xc1\x7f\x41\xe8\x0a\xdd\x1b\xf5\x77\x1c\x4a\xed\x28\xa5\xdd\x45\xde\x68\x7d\x81\xe8\x0a\x58\xb3\x1f\x4a\x31\xad\x63\xcc\xcf\x53\xb5\x1f\xe0\x4d\xd9\xc1\xb1\xa4\x2f\x70\xb0\x04\x17\x8d\xa9\xe2\xbe\xfe\x94\x23\x28\x24\x69\xe3\x7f\x3d\xd4\x1d\xd9\x27\x6a\xfd\x5f\x92\xaf\x62\xb7\x43\xfd\x03\x3c\x6b\x10\xc3\x26\xf8\x7a\x08\x1d\xe9\x4f\xfe\x2a\xb7\x8d\x43\x40\x16\x96\xba\x65\xf3\x86\xec\x1d\x94\x81\x1f\x94\xda\xc5\x2e\x27\x2f\x7e\x1a\x57\xd9\xd2\x89\x5e\xff\xeb\x6c\xd2\xb9\xd1\x68\xae\xb2\x29\x64\x69\xb3\xc6\xf1\x0e\xb6\x0f\xad\x8f\x0d\x66\x41\x4a\x1c\xe5\x49\x14\x2c\xdc\x10\xaa\xf5\x51\x55\xb9\xc0\xdb\x25\x4a\xbd\x67\xe7\x78\xed\xf6\xaa\x66\x0c\xbb\xfb\xe8\x2d\x99\x31\xb6\x89\x9e\x63\x12\x52\xf0\xde\x39\x4a\xf7\xea\x05\xb5\xd6\x9e\x6f\xca\xa1\x81\x27\x9e\x08\x2d\xa9\x0b\x7a\x3e\xb4\xb0\x6d\x5e\xad\x57\xe8\xac\x9c\x4a\x72\xe3\x7c\x70\xd5\x50\xe3\x10\xc4\x1c\xd0\xe0\x13\x5a\x1a\x7a\xcf\x15\xa1\xbf\x7c\x95\x01\xa5\x4a\xa0\x73\x6d\x08\x73\x34\xa9\x01\x9d\x88\x2a\x1d\x71\x10\x4e\xa6\xc8\x82\x92\x82\x43\xd8\x49\x53\xd8\x7d\x18\x24\x44\x01\x3e\xa3\xed\xc1\x16\xe8\x04\xf6\x07\xa7\x50\xa3\x22\xcb\x72\x2c\xeb\x94\x36\x38\x09\xb3\x61\x9b\xa3\x9b\xe6\x21\xb5\xc1\x87\x56\xb6\x16\x26\x29\x3a\xd9\xc6\x54\x54\x1f\x26\x57\xb5\x22\x2c\xbb\xba\x1c\x63\x84\xae\x97\xb6\xff\x1a\x8b\x76\x83\xe1\x3c\xb5\x33\x13\x3a\xbc\x87\x16\xc5\x92\xdf\x45\x70\x2f\xbf\xc3\xff\x24\xb7\x47\xae\xc8\xc5\x27\xa3\xe9\xc7\xa7\x1e\x6c\xa4\x0c\x6d\x92\x5f\xd9\x76\x95\x6f\x0f\x87\x09\xbe\xa3\x5d\x59\xff\xb0\x8b\xa1\x35\x6f\xc3\x3d\x41\x12\x1a\xf5\x6a\xb1\xac\xd0\x48\x48\xa6\xee\xe2\x72\x33\x49\xad\x8f\x5d\xbf\x95\xda\xdd\xd0\x23\x71\x47\xab\x37\x9e\x12\x9a\x9e\x64\x68\x97\x33\xf6\x1f\xa1\xde\x5c\xc3\xd3\xa9\xcf\x63\xf6\x2d\x3f\x55\x4e\xc1\x10\x59\xfb\x16\xce\xbd\x31\x58\xf6\xe0\x43\xf4\xe0\xa4\x4a\x97\xcb\xd5\xaa\x3a\xe7\x97\xae\x67\xf3\xe1\x8d\xc9\x7a\x12\x26\x57\x9b\xfa\xb4\x5b\x07\xda\xb9\xd6\xca\x9c\x0e\xe8\xf3\xf6\x3a\xdd\x2a\xff\x1b\x51\xc0\x11\xaf\xe7\x23\x92\xe4\xe0\xba\xa8\x2b\xe7\xf2\x7d\x67\x00\x8f\x3a\x91\x21\x46\x73\xed\x66\x8c\x63\x82\xf5\xbc\x74\x8a\x0f\xe8\x28\xf4\x90\x54\x0a\x3b\xca\xdb\x8d\x70\xc8\x6c\xa9\x70\x89\xa6\xe4\x92\xd2\xc7\x57\x62\x5c\xfb\xfb\x9c\xbc\xf6\x15\x23\xcc\xe0\x5b\xa7\x3f\x03\xc7\x69\x86\xbd\x71\x2f\xc7\x0b\x7d\x1b\x39\xa8\x04\x91\x59\x65\x49\xda\x2a\x32\x56\x89\xe7\x03\xb2\x58\xba\xa0\xf4\x42\x41\xb8\xc7\xc9\x73\xd9\x65\x65\xc1\x79\x0b\x43\x2c\x42\xc2\x4f\xb3\xc0\xa6\x93\x89\x22\x4d\x67\xbf\xbb\x84\xf3\x34\xcb\x3e\xd0\x6c\x19\xf0\xd3\x32\xba\x6f\x96\xe8\x5b\xa6\x2d\xf0\xec\x23\x92\x6b\xe8\xea\x88\x4e\x7c\x51\x9f\xca\x48\x58\x85\x4c\x31\x8a\xa4\x74\xad\x00\x80\x5c\x08\x61\xa7\x09\xad\x54\x3a\xb5\x79\x1e\xd1\xf0\xa8\x0e\xbe\x03\x86\xb2\xbe\xd0\xd0\x75\x7f\x50\x5d\x23\x74\xe8\x7d\xd6\xed\x3d\xae\x74\xf7\xa1\x39\xe1\x7c\x52\xb5\x53\xb5\x5b\x48\xbc\x6c\xd9\x25\x49\x5b\xba\xd0\xe5\xda\x60\x9a\x95\x48\xc2\x6a\x27\xd4\x12\x15\x45\xfa\x77\x86\x8f\x4b\x5b\xd8\x46\x2b\x77\x03\x85\x0d\xf6\x9b\xf4\xfc\x62\x81\x1a\x44\x8e\x2b\x9e\x3e\xaa\xef\x4c\x2a\x1c\x68\x47\x41\xdb\x8b\x11\x39\x3d\x43\x1d\xc0\x61\x77\xc9\x14\x94\xe8\x02\xd9\x02\x61\xad\xdd\x5d\xc0\xe6\xc8\x30\x78\x46\x39\xf8\xb4\x97\x1f\xf1\x28\x6c\xda\x01\x19\x38\xff\x3a\xe5\x83\x8d\xe6\x09\xa6\xc1\xbb\x6b\xe5\x8f\x02\x07\x5d\x13\x69\xf0\xf2\x28\x6c\x66\x95\x50\x98\x02\x10\x0e\xe6\xca\x17\x63\x06\xc8\x02\x2d\x84\x37\xd2\x1c\x37\xfd\x9a\xa4\xbf\x10\xed\x8b\x7b\x70\x45\xcc\x62\x9a\x30\x8f\x01\x4c\x01\x67\xf4\x0e\x65\x2d\xb4\xb2\xe3\xb3\x57\xf2\xea\x6d\xc6\xe7\x90\xad\x19\xbd\x00\xda\x8e\x49\xaa\x54\xe8\x9e\x9b\xe9\x25\x20\x2e\xe1\xf9\xd8\xad\xcf\x13\x90\x14\x49\xe8\xb1\xa2\xde\x92\x39\x73\xa3\xf5\xe4\x53\x53\x83\x47\xa7\xcd\xb8\x48\xcc\x75\x73\xe7\x64\x96\x4b\x03\x40\x83\xed\x93\x49\x79\xeb\xe3\x88\x95\xa2\x3a\xbb\xa1\x7e\xd6\x31\x60\xb3\x1d\xe0\xb9\x9f\x4f\x44\x46\x47\xbc\x25\x20\x7e\x22\xf0\x8a\xfb\x98\xf6\x11\x07\x8b\xd5\x87\xd8\x9a\xd6\x89\x13\x5a\x55\x10\x17\xed\xb9\x95\xa3\x2f\xb3\x26\x44\x68\xc2\xd0\xa6\x62\x34\x64\xec\x20\x92\xee\xcc\x2e\xe4\xb2\x6b\xec\xf5\xa7\xfd\x73\x2b\xef\x5f\x94\x05\x94\x0f\xe9\x71\xb4\x2a\x0f\xb8\x54\x88\xf6\x2f\x02\x31\xc1\x24\x35\x8b\x6f\xd7\xde\x46\x69\x31\x7a\x2b\xc7\x00\xfc\x57\x5b\xf4\x07\x5a\x7d\x31\xfe\x4a\x32\x28\xc4\x8b\x18\x7e\xab\x5f\x7c\x2b\x60\xca\xcb\x67\x33\x9f\x0e\x72\xa5\x1e\xe7\x41\xe0\x42\xe2\xee\xd0\x90\x58\x4d\x63\x5e\x84\x26\xf1\x0b\xec\xdb\x34\xd2\x1a\x7d\x4e\x16\x0e\xe6\xbd\x71\x02\xb6\xbe\x28\x63\x98\xc9\xe7\x22\x4e\x61\x8c\xc4\x27\x81\x53\x75\x35\x18\xc4\x95\x7a\xbb\x9f\xb8\xc7\x4c\x60\x63\x61\x65\xb9\x96\x96\x2a\xaa\xe9\x28\xb7\xcf\xc9\x5e\x8d\xf8\xd7\xbe\xb3\x82\x8c\x43\x37\xbe\x0e\x07\xdb\x26\xa0\xf3\xd2\xa2\x73\x57\x66\x6b\x2c\x80\x90\x89\xcd\x97\x83\xa9\x40\x27\x71\xb9\x37\xc7\xf4\x34\x7c\xb0\xe5\xbc\x86\xe0\xb9\x29\xbd\x3b\xc2\x12\xcf\xa1\x86\x5d\x09\x5a\x5d\xda\x24\x66\x87\x8f\x1d\xa0\xaa\x60\xe0\x32\xb7\xab\x33\xca\x0b\x59\xd1\x12\xf4\x92\x15\xcd\xb1\xe6\x8a\xa5\xdb\x79\xa9\xb0\x2e\x7a\x2c\x99\x75\x57\x76\x46\xfe\x48\x06\x74\x71\xfa\x61\x3a\xa8\x8a\x60\x11\x24\xc2\x0c\x17\x54\x52\x28\x2c\xb1\x2e\x64\x03\x05\xa0\x33\xeb\xd9\xf0\xb3\x62\x64\xa1\x16\xa6\xb9\x23\x51\xfc\xba\x89\x73\x6b\xf9\x0f\xc4\xe8\x48\xe9\xa5\xa5\x7c\x70\x60\x34\x94\x23\x77\x77\xf6\x3f\x65\xb3\x32\x39\x1b\x42\xfb\x92\x11\xc5\xbb\xbc\xcf\x24\x2e\xc2\x92\x6e\x76\xe1\xbc\x3f\x4d\xcb\x15\x0f\x42\xd1\xc8\xb8\x73\x82\x70\xa1\xd3\x4f\x9a\x41\xf3\xa8\x53\x73\x41\x15\x23\x71\xea\x90\x0e\x72\x61\x21\x88\x38\x95\x32\xbb\xbc\xd5\xe8\x7a\x7a\x9f\x8f\x76\x71\x78\x54\x76\x11\x99\x2b\x7d\x42\x28\x58\x94\x78\x6c\x9c\x23\x9d\xc7\x36\xa8\xab\x0b\x88\x41\x90\x05\x34\xd6\xac\xd6\x8d\x28\xf3\xaa\x50\x8e\x15\x1a\xc4\x88\x9e\xec\x7c\xd8\x29\x80\xab\x85\xad\xdd\xf6\xec\xa0\x32\x6e\xce\xa4\xfa\x99\xd7\x27\x78\xad\x8d\xf7\xd8\x3b\x3a\x02\x00\x24\xc9\x3b\x5f\x37\x94\x53\xd6\x5e\x21\x78\x2a\x1e\x60\x62\x71\x09\xd7\x72\xa5\xed\x5b\x03\x1a\xb5\x56\x27\xc3\xad\x26\xfc\xeb\x02\x8c\xde\xb3\x35\x08\x43\xe2\x1c\x78\x70\xf5\x62\x57\x30\x6d\x0a\xfb\x14\x1d\x39\x36\x65\x7e\x0c\x54\xcb\x71\xa5\x0e\xb6\x76\x31\x72\xbe\xc5\xfc\x9d\xb8\xa7\x7f\x23\x71\x4a\x5e\xb6\xe1\x0e\xd3\x87\x30\xa0\x4e\x6c\x59\xbc\xbc\xa8\x23\xe2\x46\x09\x83\x85\x5d\x89\x22\x0a\x4d\x04\x07\x9c\x6d\xc8\x1d\xb4\x9e\x4f\x99\x13\xea\x1e\xe9\x35\xe6\x48\xb6\x51\x1d\xba\x3d\xd8\xc3\x0c\xc9\x87\x57\x16\xed\x5b\xf3\x98\xba\xd3\x4c\xaa\x07\xb4\x07\x1f\x58\xdd\x42\x00\x45\x67\x89\xe2\xd6\x1e\x06\xc9\x8c\xe6\x9f\x1a\x50\x18\xf6\x79\xf7\x12\xc1\xcd\x1a\xdf\x04\xde\xfc\x31\xa8\x3c\xb7\x5f\x1f\x6c\x26\xa9\x20\x2e\xbc\x8f\xcf\x3d\x53\x6e\xde\x1d\xf4\x22\x3d\xc5\x06\x86\xd9\x59\x93\x5c\x1a\x9d\x10\x07\xa5\x51\x29\x0a\xea\x3a\x6d\x63\x30\xf7\xac\x2c\x18\x4a\x8a\xcc\x1e\x07\x1a\xd4\xfa\xf6\x21\x0c\x1a\x6f\x1c\xc9\x52\x3f\x39\x7b\x83\xa0\xad\x7d\x15\xde\xea\x2a\x16\xca\xc6\x99\x32\x16\xff\x29\x0c\x33\x6a\xfc\xd3\x9d\x15\xc7\x98\xce\x0b\xb0\xbd\xae\x60\x5a\x98\xb7\x3c\x79\xb8\x3a\x25\x6e\xd8\x0d\x1b\xd3\x6d\xca\x98\x2a\x2e\x4f\x93\xab\xcd\x04\x26\xc8\x01\xea\x7f\x19\x3a\x4c\xc9\xcf\x9b\x24\x4d\x6c\x16\xc8\x97\xfd\x67\xed\x94\xa8\xd1\x84\x90\x1a\x08\x3a\xdb\x0f\xbd\x4a\x85\x83\x82\x24\xe8\x22\x17\x75\x0e\x90\xcf\x0b\x8b\xbf\xed\xe4\xba\x96\x56\x52\x21\x3b\xfe\xdf\x04\xd6\x07\xe9\x43\x93\x86\xcd\xde\x72\x10\x07\xab\x26\x54\xd7\xb0\x9c\x02\x41\xcd\x49\x89\x63\xbf\xa8\xac\x05\xd8\xe4\xf0\x61\x2d\x51\x49\x58\x48\xe6\xac\x54\x63\x49\xcf\xc4\xea\xe4\x77\xf6\xf2\xad\x31\x58\x70\x8e\x0e\x53\x2c\xf9\x0a\x33\x38\x4e\xea\x11\xda\x60\x00\x10\x1b\xb6\x94\x99\x00\x4a\x8c\xbb\x60\x68\x51\xe0\xc4\xc2\xa6\x21\x0a\x4e\x32\xa5\x79\x2a\xd4\x76\xea\xa0\x7c\x69\x81\x9c\xa5\xa4\xeb\xe9\xce\x55\x29\x12\xce\x62\x70\x56\xfd\xd9\x4f\x87\x93\xe4\x29\x48\x2f\x39\xbd\x1b\xec\x28\xcb\xb7\x7b\x67\x70\x7c\x7c\x98\xd9\x11\xd4\x69\xf2\xa5\x73\x4c\x93\xf3\xec\x54\xbf\xf6\x17\x13\x9e\xe5\xc1\x58\x60\x2d\x93\x36\xc7\x1b\x38\xba\xf9\xd0\x44\x89\x70\x0f\x6f\xfc\xd9\xc6\x6c\x2f\x2b\xb7\xb2\x59\x62\x4c\xce\x34\x63\x6c\xcf\xf9\x1c\x05\x07\x39\x55\x2e\xe0\x55\x02\x18\x97\x4a\x1d\x87\x65\xd3\xd3\x35\xda\xa1\x74\x9b\xbd\x33\xf5\xa7\x54\x96\x8e\x26\x25\xcd\x62\x30\x2e\xcb\x67\x92\xa6\x7a\xc2\xaa\x2d\x4f\xae\xa8\x44\x05\x93\xfb\xed\xfd\x6a\x3e\x2f\x55\x63\x57\x57\xbf\xf9\xe3\x05\x4d\x58\xf7\x03\xd6\x09\x05\x02\x53\x1c\x04\xe4\xc8\x65\xbe\x18\x53\x03\xac\x45\x76\x9a\xa0\xc0\x7a\x26\x27\x36\x1e\xdd\x85\xd1\x74\x72\x14\xcb\x6a\x26\xd8\x67\x5f\x59\xf9\x73\xec\x51\x3a\x7f\xa1\xce\xee\x23\xcf\x54\x98\xc9\x1f\x2a\xa7\x8b\x9d\x2b\x67\x72\xf2\xda\x19\xb9\xd3\x3b\x45\xaa\x76\xdc\xe4\x1f\xce\xf2\xda\x94\xaf\x5a\xd8\x55\xcd\xa1\x05\x6a\x48\x57\x34\xbe\x52\x8b\x38\xce\x6c\x85\x54\x90\xdc\x02\xc0\x87\xf3\xfe\x01\xde\xca\x7e\x5b\x05\x43\x12\x90\x08\x02\x83\xce\x3a\xfe\x78\x11\x34\x74\x54\x3e\xb4\xeb\xc4\x29\xc9\x1e\xf6\x9b\x9e\xfd\x20\x2a\xe8\xdb\x7a\xbd\x5b\x1c\xed\x50\x1f\x4b\xd6\x90\x35\xa9\x30\x1b\x75\x19\x26\x42\xe1\xa1\x4f\x6c\xd8\x38\xd4\x36\x21\xf5\xd0\xfa\x95\x5d\x54\x26\x90\x2a\xdf\x6b\xb7\xf9\x17\xe3\x92\x6d\xaa\x07\xc6\x23\x71\x2a\xce\x3b\xb0\x0c\x75\x73\xac\xa4\xa9\x91\x3f\xc0\x70\x61\x21\xa7\xde\x8e\x10\x7a\xef\x77\x85\x30\xa3\x05\xbf\xac\x62\xa8\x19\x70\x6d\x3f\x65\x3a\x92\xef\xc0\x22\x5e\x08\xf6\xd0\xd5\x37\xc8\x48\x77\x68\x85\x0e\x3b\xa9\x48\xc3\xe0\x5c\xde\x81\xa4\xc4\x75\x97\x2b\xcd\x61\xe1\x24\x55\x9d\x0f\xc9\xb9\x04\xb7\x8e\x6a\xa4\xce\x6b\x3e\xbd\x74\xb7\x51\xec\x78\x0d\xde\x2b\x42\x42\xb7\x7e\x35\x0a\xeb\xe0\x57\xdf\x37\xba\x75\xb4\xeb\x91\x34\x4f\x09\x7f\x87\x92\x5a\x7a\x21\xa5\x55\xe7\xc3\x17\xd5\xb2\xad\x55\x6d\x0d\xeb\x50\x9a\xc0\xfc\xfb\xd9\x71\xfc\xc6\x84\xe8\xe2\xf8\xcd\x8f\x0a\xc6\xc2\x77\xa6\x05\xd0\x07\xf2\x9d\x4e\x4a\xce\x36\x7a\xb7\xb6\x91\x59\xae\x9d\xac\x96\x55\xdc\x16\xf3\x61\x15\x8e\xd3\xa3\xb6\x4a\xf0\x80\x98\x29\x4e\x84\x74\xd7\x6f\xf6\xf9\x2c\xff\xfc\xec\x28\x6c\x69\x73\x89\x0c\x8f\xc4\x69\x45\xd0\x30\xa7\xc3\x65\x79\x5d\xf5\x5e\xa9\xad\x8e\x8a\xb9\x02\xba\xc3\x21\x9a\xe6\x30\xd8\x9c\x54\x59\x07\x9e\x83\xc7\x26\x7c\x48\x9f\x59\x48\xd8\x79\x7b\x90\xfa\xe2\x74\x00\xed\x3e\x91\x33\x6b\x67\xea\xcd\xca\x52\x43\x37\xdb\x8b\x0a\x8e\xfa\x6e\x1b\x25\x4a\xc5\xdc\xe2\x34\x61\x92\x86\x23\xcd\x98\x3a\x8d\x33\x85\x92\xbe\x86\x3e\xf5\xc9\x2f\x6c\x69\x6d\x42\xff\x7f\x9d\x33\xe2\x47\x2c\x5c\x87\xce\x8a\x7c\x0a\xfd\x84\xb1\x17\xa8\xf7\x8f\x0c\x59\xbb\x0e\x03\xe3\x18\xa6\xc7\x73\xca\x3e\x89\x59\xce\xd2\x86\x43\x35\x53\x36\xd2\x4a\x4d\x93\x9d\x1d\x1b\x32\x1c\x9c\x60\x05\xed\x0a\xb9\x3b\xce\xaa\x0a\xe8\xee\xf9\xb1\x61\xb6\x1d\x7e\xa2\xa1\x11\x61\xeb\xcb\x19\xbc\x6f\xc2\x49\x28\x2a\x8b\x47\xcc\xb4\x82\x7b\xa8\x34\xf3\x38\x06\xa6\x85\x0e\x92\xda\x33\x34\x27\x39\xda\xcd\xd7\x3d\x40\x8d\x62\xcb\x14\xf7\x97\x88\xb7\xa3\x74\xe4\xe0\x38\x5b\x8e\x48\x42\x2f\x59\xbc\x14\x9a\xf7\x8b\xd6\x96\xec\x5f\xb3\xaa\x67\xda\xab\xcd\x81\xc3\x27\x27\xa7\xa3\x4e\x94\xc9\x9a\xca\x1c\x54\x55\xdb\xd0\x91\x21\xa9\xaf\x43\x00\xf5\x24\x5a\xaa\x5b\x43\xc2\xdc\xf8\x93\x17\x37\x64\x58\xac\x5f\x57\xf5\xe1\xd4\xf8\xe8\x82\xa8\x6f\x2f\xcd\xbf\x82\x0f\xf7\x96\x8b\x60\xc5\x10\x33\x4c\x6f\xd8\x25\x67\xb8\x89\x6d\x76\x85\xb6\x4e\x8d\x75\x2d\xd4\x3c\x39\x61\xfb\x9e\x3f\x76\x5e\xb4\x4f\x03\xd1\xb3\x96\x33\xed\xc7\x13\x3a\x85\x08\xa5\x81\x4a\x60\x02\xe3\x66\xaa\x1a\xce\x4f\x74\xf6\xdd\x62\x1a\x5b\xf8\x5a\x4f\xfd\x8d\xaf\x86\x72\xbc\x76\x4c\xe0\x95\x2a\x44\x69\x6b\x08\x13\xcc\xd8\xe2\x72\x04\x56\x2d\x7d\xd8\x18\x68\xd8\xb8\xa3\x7c\xce\xc4\xa1\x2a\x8a\x40\xdc\x5e\xda\x1e\x0c\x04\x39\x72\x7f\xd4\x75\x31\x1c\x31\x74\x97\xdd\xab\xb5\x3f\x71\xce\x7c\xbc\x56\x66\x24\x27\xf6\xc9\x7b\xf0\x51\x69\xdb\x03\xba\x17\x20\x40\x8e\xa3\xb8\x21\xa3\xf8\x09\xca\xfe\xb0\xe3\x37\xe2\x63\x85\x41\x60\x99\x82\xc7\x16\x6b\x92\x13\x58\x68\x7c\xd8\xd8\x5d\x7c\xc4\x89\x31\xc2\xec\xbf\xe6\xf7\xd1\x12\x9b\x38\x6a\xc6\xa8\x4d\x88\x2c\x17\xce\x0c\xc0\x61\x85\xe8\x2f\x96\x1c\x9f\x38\x44\x71\x95\x57\x46\x51\x60\x53\x76\x65\x83\xc9\x4c\x32\xd5\x64\x1c\x33\x78\x47\xf6\x48\x44\x65\x59\x9c\x5e\x40\xb3\xab\xae\x9c\x03\x92\xfc\x64\xab\x2b\xb2\x1f\x6b\xd8\x32\x8b\x13\x66\xf6\x32\x6a\xb2\xe6\x3d\x8e\xfa\x8f\x24\x1b\x9d\x24\x85\x72\x44\xd4\x46\x4a\x7f\x78\x37\xd2\x8a\x8a\x2b\x91\x35\x8d\xb6\x59\xf0\x97\xe0\xce\x85\xa8\x6c\xc8\xc8\x77\x12\xf5\x56\xfc\x94\xd1\xae\x59\xa1\xf0\x64\xbd\x3c\xfa\xdc\x7a\x1d\x4f\x01\x0c\xd7\x73\xb4\x8d\x72\xa9\x61\x54\x9f\xeb\xb7\x41\xa9\x3a\xb1\xeb\xbf\x0e\x0b\xa8\xbd\x48\x3c\x72\xfa\xca\x70\xa6\x92\xe4\x1b\xd1\xc7\x39\xa2\xe5\xcb\xde\x38\xbf\xf3\x7c\xb4\x8d\x15\xc8\xcd\xa1\x41\x0c\xf0\xc1\x0a\x6a\xd6\x81\x32\xab\x0d\xf9\xa3\x4a\x40\x63\x63\x92\x55\x6a\x99\xed\x49\x3e\x04\x24\xb8\xdf\x94\x5c\x5f\xd8\xee\x9c\xcb\x18\x0b\x29\x67\x99\x3d\xd8\x8d\x89\x32\xfc\x26\x68\xe2\x7a\xfc\x69\xb6\xf7\x74\x78\x43\xf2\xf7\x9b\x4a\xb3\x01\x9f\x06\xdd\x97\x54\xf6\x30\x5c\x11\x3d\x21\xac\xab\x6f\xcc\xaf\x56\x1d\x42\x9b\x51\xc2\xc7\x0c\x1a\xf9\xe0\xe6\xa1\x5f\x1a\x8d\xcc\x21\x53\x68\x02\x3d\x15\x94\x5e\x33\x41\x5a\x27\x3e\xd6\x81\x3a\x13\x1d\xa2\x85\x17\x8f\x30\x51\xd8\x71\x76\x62\xa0\x61\xa8\x96\xd5\xb1\x01\x11\x75\x62\xad\xc2\xbd\x87\x34\xea\x54\xd0\x2f\xa5\x8a\x03\x48\x01\xb9\x22\x73\x18\x6d\x55\xa4\x85\x4e\x9f\x2f\x8f\xb5\x33\x06\x9e\x38\xa9\x47\xa9\x1e\x34\x89\xe1\x76\xf4\xc1\xda\xde\x0f\x57\x01\x0d\x8c\xb5\x43\x6e\x06\x94\x4a\x12\x20\x3e\x8b\xd2\xc8\x64\xbe\xbd\x0a\x35\x6a\x94\x50\x14\x59\x2a\xb6\xbb\x7e\x0f\xe9\x88\xf6\x8e\x43\x6a\x4d\x05\x8f\xc8\xeb\x51\xb4\xc7\x35\x9c\x25\x0b\x30\x13\x50\xe3\x81\x73\xf8\x3f\x84\x44\x29\x7b\xf5\x82\x55\xf4\xda\xd5\x85\x5b\x0a\x08\xf1\xc5\x32\x52\x79\x3e\xd9\x46\xa7\xc7\x23\xb9\x87\x55\x55\x00\xce\x74\xde\x49\xbe\xfa\x10\xe3\xee\x1c\x78\x43\xb3\x6a\x1a\x0d\x85\x26\x34\xd4\xa1\x0d\x20\x7f\x07\x7e\xfb\x9f\xab\x5f\xd4\x3f\x67\x98\xf5\x2d\x67\x3d\xca\xf9\xb0\x00\x4a\xcf\x61\x60\x5b\x87\xdc\x0e\x6f\xdd\xe6\x50\x08\xad\xb4\x40\xad\xba\x8f\x3a\x25\x1b\x57\xed\xbb\x35\x70\x1c\xe5\xea\x2d\xfd\x2a\x01\x83\x9e\x6e\x13\x71\x04\x87\xc4\x87\x9f\x12\xd4\xf2\x87\x1a\xa3\xd6\x0c\xd5\xce\x43\x48\x4c\x03\x3a\xd0\x31\x7d\x5c\x8f\xd5\x0a\x4d\x3c\x46\x52\xaa\x8c\xb6\x79\xad\xf5\x31\xa4\x53\x3d\xa4\xc4\xb6\x86\xba\xff\xa1\xf3\x0d\x0d\x38\xf3\xe4\x77\x18\xe2\xb3\xb9\x73\x16\x05\xfd\xbf\x43\xee\x92\x87\x74\x92\x91\xf5\x88\x22\x13\xc9\x24\x49\x58\x07\xb2\x4f\x00\xbe\x1d\xe6\xd6\xa6\x6a\xce\x47\x7b\x59\xb8\x6d\xca\x97\x01\x67\xf8\xd6\x1f\x44\x6e\xc1\x9b\xfd\x60\xea\xc9\x2e\xfd\xc6\xe5\x12\xcb\x0b\x7c\x5b\xb6\x11\xdc\xde\x16\xc6\x7c\xc4\x32\xa4\xa7\x1a\x9e\x84\x58\xc6\x59\x14\xdb\xb2\xfc\xd3\x74\x6b\x1e\x46\x43\xa9\x30\x71\xbb\xd3\x51\x16\x4e\xd9\x10\x7f\x4c\x60\xf6\x88\x3f\x9c\xe9\x3c\x1f\xfd\xdb\x78\x8e\xea\x91\xe6\xa2\x0e\x0c\x1c\x6c\x23\x3e\xa0\x50\xca\x48\x50\xeb\x68\x9c\xb7\xc4\x9c\x43\x72\xbb\x6b\x10\x25\x20\x01\xe2\xa3\xdb\x0c\xfc\xc6\x5f\x96\x2f\x47\xc7\xd5\x14\xd8\x92\x5e\xc9\x06\x75\x2e\x84\x72\x60\xbe\xd3\x74\x9b\x6b\x10\xe7\x81\x74\xe1\x10\xba\xca\xbd\x99\x3d\x62\x5b\x1c\x93\xb6\x06\xce\xd3\x59\x6d\x1a\x36\x07\xbe\xa8\x88\x03\xe8\x86\x09\x75\x6a\xa5\xa2\x6d\x78\xfc\xa8\xf5\x0e\x54\x0b\xac\x7b\xce\xc0\x2d\x37\x79\x4b\x94\x5f\x65\x21\xbe\xfb\xf0\xec\x59\x65\x9d\x0c\x36\x57\xa9\xf3\xe1\xcc\xc5\xe3\x26\xbe\xa2\x9a\xa9\xaf\xf9\xf3\x49\x18\x28\xa4\xfc\x1d\xae\xa7\x33\x31\x50\x87\x09\xd0\x53\x55\x58\x6c\xe4\xcf\x5a\xcb\x27\x4b\x95\x7a\xdd\xf5\x0b\x7c\xfd\xb2\x5c\x07\x40\x67\x82\xab\x1c\x95\xb8\x60\x1a\x44\x0f\x74\x4b\xe9\x0f\xfc\x6a\x03\xa9\x99\xcd\x77\xaf\x3e\x75\x1b\x1d\xe1\x8b\x8b\xc5\x84\xd5\x6f\x89\xca\x2f\xdd\xe8\xfc\x56\x83\xa8\x42\x63\x2c\xfa\x79\x0f\xb1\x0b\x89\xc8\x6f\x40\xb7\xab\x65\x88\x8d\x68\x71\x12\xb1\xe6\xba\x92\x39\x4e\x08\xb5\x46\xcc\x6d\x79\x98\x01\x75\xdf\x76\xa1\xfc\x31\xd4\x2e\x2f\x12\x01\x2f\xa2\x5b\x9a\xf8\x55\xb3\xef\x27\x47\x28\x1f\xca\x39\x02\xee\x1a\x31\x8a\xfb\xf0\x41\x0c\x7a\x54\x69\xfe\x56\xae\x7f\x76\xc2\x5f\x1e\xfd\xea\xec\xd0\x16\xc7\xb0\x23\xda\x9c\x5c\x56\x5c\xcb\x26\xf6\x4b\x9a\x0e\x5b\x65\x20\xb9\xa8\xd3\xae\xcf\x65\x69\x70\xaf\x20\x3d\x6a\x76\xf3\xff\xb9\x7a\xb3\x05\xd5\x75\xdf\x69\xf4\x5d\xb8\xfe\x5e\xca\x49\x4c\xe2\xce\xe0\xfc\x32\xc0\x86\xa7\x3f\x2a\x55\xc9\xac\xff\xb9\xd8\x1b\x77\xaf\x9e\x80\xc4\x96\x4a\x35\xa8\x25\x99\x9a\xd7\xd6\xf4\xc3\xcd\xf0\xa8\x24\xd9\xe9\x27\x78\x1b\xdd\x25\x9b\xc7\xb3\xae\xc9\x9a\x27\x99\x0f\x3e\x7f\x69\xa9\x6e\x5e\xfa\x70\x46\x00\xeb\xf4\xe1\xa0\x13\xc8\x50\x23\x86\x27\x13\x80\x5d\x12\x1b\x24\xbe\xee\x03\xb5\xd7\x3d\x88\xcd\x62\x03\x08\x89\xdd\x04\xe8\xf7\x61\x4b\xea\x61\x66\x72\x95\x76\xca\x6b\x43\x68\x6e\x97\x67\xd1\xa1\xab\x03\xa3\x43\x69\x5a\x0b\x27\x2b\x56\xa3\x7e\x7c\x88\x82\x1b\x99\x71\xaf\xb3\x9c\xd1\x9c\x8b\x28\x1c\x15\x83\x06\xdf\x57\x21\xcd\xa2\xb4\xcc\xbf\x2a\xb9\x1a\xe5\xeb\xd8\xb2\x57\xd9\x9f\xb0\x09\x7b\xd7\xe5\xa9\xa1\x61\x95\xdc\xef\x55\x5b\xe6\xf5\x41\x4f\x0b\x7b\x1f\xfe\x0b\x05\xe5\x41\x5f\x15\x4f\xca\xe4\xc6\xb9\x97\x9f\xc4\x17\xac\x04\x72\x8f\xec\xfc\x1e\x3d\xec\xce\xd5\x2a\x2d\x6b\x38\xb9\x35\x97\x75\x1d\xad\x6a\xe5\x71\x0b\x09\x84\xcc\x13\x4b\xdf\x83\xab\xec\xa4\xce\x7c\x04\xd7\x60\x64\x91\x0f\xee\xf0\x75\x7b\xc3\x5e\x9b\x64\xac\x0e\xb0\x95\xf6\x8b\xd7\x5f\x24\x9c\xd8\x41\xc5\xb6\x4d\x58\x17\xea\x51\xa3\xeb\xb1\xbf\xa8\x57\x5c\x53\x5a\x6e\x47\x11\x30\x9c\x62\xd2\xba\x55\x5a\x8d\xa4\x92\xce\x76\xbc\xcd\x6f\xeb\xf0\x7f\x1f\xd9\xe5\x47\x84\x96\x15\x11\x72\x63\xc3\xe0\xfe\x13\xfb\xe9\xab\xa9\x2b\xe2\x68\x99\x12\xcf\x4d\xdb\x56\x9b\xed\x71\xf6\x8b\xce\x37\x56\xb5\x49\x8e\xc8\x78\xb5\xf8\xb4\x83\x36\xa4\xb2\x4f\xee\x66\x31\xf5\xc8\xc0\x99\x49\x5c\xda\x26\xfa\x82\xd0\x37\x00\x0a\xce\x5f\x22\x5b\xf8\xe0\x77\xe1\xd1\xe1\x6e\x2c\x24\x6d\xea\x2c\x84\xfd\x8a\xb6\x23\x8e\x97\xed\xef\x16\xcb\xf8\x62\xe9\x74\xb5\xfc\xee\xf3\x2d\xc7\xf6\xf3\x65\x1d\xdf\xf0\xcb\x59\xd2\x8c\x09\xfc\x74\xa5\x36\x34\xed\xe3\xd9\x6c\xcf\x4f\x72\x34\x74\xb6\xb4\x00\xb5\x33\x48\x76\x3e\xc1\x54\xf2\xda\x5a\x20\x06\xf0\x2b\xe3\xa3\x22\xca\xae\x15\x62\x56\x47\xab\xd3\xad\xc1\x73\xea\xc0\x7e\x2f\x99\x2a\x27\x70\x8c\xf8\xf3\x37\x5d\x44\x80\x88\xc3\x67\xbe\x0c\x43\x0a\xdc\x6e\x71\xac\xd5\xbe\x8f\xfb\xcf\xc5\x7a\x6f\xbe\xff\xd2\x9b\xc7\x27\xf4\xdc\x7c\xaf\xc1\xc3\xf7\x31\xaf\x9d\x38\x8b\xfa\xed\x09\x51\xe6\x82\xdc\x32\x1d\xb5\xa6\xbc\x47\x9e\xcf\xba\xc5\x62\x0d\xe2\x5a\x33\xb7\x60\xa2\xb9\x37\x68\xa3\x77\x24\xdc\x8a\x10\x87\x54\x42\x98\xca\xe4\x78\x7f\xdf\xfd\xe5\xf5\xad\x2a\xb0\x9b\xb4\xcc\xfc\xf1\x56\x6d\xa8\x51\x1b\x6e\x9e\x79\x3e\x7e\xd6\x41\x9a\xc2\x5a\x11\x4f\x44\xad\xf2\x94\x86\x5f\xa4\x2b\x8c\xf7\x5a\x2f\x40\x34\x99\x2a\x6e\x28\xa8\xe8\x81\x76\x04\x27\xb3\xab\xef\x58\x2c\x83\x34\x38\x3a\x52\x6d\x2f\x57\x2e\xa9\x2d\x8b\x7c\x18\xbe\x31\x59\xee\x9a\xe8\xd2\xd3\x07\xa2\xb2\xa3\x49\x15\xcd\x7c\xf8\xfc\x61\x93\xcb\x0b\x02\xfa\x0d\x9e\x25\x57\x64\x67\x5b\x3b\x55\x3b\xd6\x2e\x9e\x3f\xf0\x1b\x6a\x8e\xc1\x7a\x51\x6f\x87\xfd\xc8\x8f\xc7\x3d\xf7\x24\x33\x9f\xb5\xc5\x41\xae\x59\x58\x8a\x03\x81\x7e\xa5\xf6\xb5\xb9\xe8\x25\xc2\xdb\x87\x33\xaf\xdc\xb4\xac\x8d\x8d\x4e\x5e\x7f\x4b\x98\xa4\x54\x7e\x62\x94\xdf\xab\x58\x3f\xf0\x25\x7b\x38\x94\xe3\x9b\x56\xb4\x58\x8c\x6d\x88\x21\x53\xd8\xbd\xee\x59\x50\x33\x8d\x55\x1e\x9c\x3b\x11\x34\xc4\x36\xd7\x93\xc5\x6b\x4d\xdf\x6f\x32\xaa\x91\xc7\x40\x61\xd8\x1a\xd5\xcf\x8a\x19\x06\xbb\x17\xf7\x05\xa1\xc8\x3c\x2f\xa3\x46\x52\xd0\x67\xd3\xc7\x0f\x54\x2c\x81\x02\xab\xac\x79\x96\x0f\x4f\x29\x1c\xd7\x38\x61\xfd\xa6\xf8\x74\x61\x9c\x9c\x5f\x52\x16\x8d\xcd\xa4\xfa\x99\x22\x7d\xd3\x2e\x9b\x90\xf7\xcb\x76\x6d\xf6\xb3\x71\x06\x67\xf5\x9c\xf1\xe4\xff\x52\xe0\x7a\xb0\xc2\xfc\x39\x60\xfa\xdd\xf4\x16\x06\x33\x31\xe7\xd5\xbf\xaa\x08\x5f\x03\x69\x5e\xcd\x77\x91\x41\x32\xa6\x5c\x34\x91\x18\xb1\x3d\xd0\xf1\xd2\xba\xcc\xb8\x31\xc2\xc5\xed\x19\x3c\x9f\x67\x8a\x30\x5c\x94\x42\x1a\xcd\x87\x64\x26\x93\x60\x34\x88\x1a\xd8\xc6\x31\x40\x48\x02\x26\xc1\x9c\x97\x69\x11\x0c\x0c\xeb\xad\xf2\xde\xa6\xc6\x0c\xf3\x31\x4c\xb3\xa4\xaf\xfe\x7a\x91\x24\xb0\xd4\x0f\x19\x01\xa9\x00\xd6\xe3\xc8\xff\x5e\xfe\x89\xcf\x91\x19\x9d\x1d\x41\x8a\x2a\xe8\x23\x4f\xdf\x09\x00\x72\xe3\xc4\xed\x1c\xa3\x39\xc5\x6f\xd6\x61\x93\x6a\x5f\xdb\x1a\x16\xbd\xdf\x7e\x8b\xec\xec\xa7\xc8\xa4\xb7\xfa\x74\xaa\x92\xc8\xc9\x68\xe0\x1d\x29\x3a\x90\x9a\x57\x9e\x04\x52\x71\xa0\x96\x50\xe4\xe5\x75\xb4\xb1\x5e\x26\xd1\xfd\xb2\x56\x29\x45\x3e\xdf\xf9\x19\xe2\x44\x08\x57\x08\x87\xc2\x25\xad\x5a\xaf\xc2\xad\x1f\x1b\x03\xff\x15\x4b\xd1\xdc\x62\xfd\x60\xff\x2f\x01\x48\x96\x37\xb9\x7b\x53\xd2\xa6\xf5\xd4\x20\xe4\xa0\x17\xd2\x71\x2b\x66\xba\xec\x3b\x41\x02\x69\xd8\x6c\xe1\x35\xed\x01\x2e\x5d\x74\x53\xff\xbb\xa9\xff\x03\xe5\x88\x88\x15\x47\x25\x5b\x24\x90\x6d\x1b\x37\x1e\xe4\x73\xbd\x5b\x83\xcf\xb7\x16\x3a\x82\x81\x7b\x3e\x66\x43\xa1\x1a\xb8\x83\xb3\x76\x29\xac\xdd\xae\xbe\x4d\x63\x89\xf9\x90\x06\x44\xdb\xdd\x9c\x78\x31\xfe\xe5\x3d\x62\x11\xc8\x5a\xb5\x22\x42\x8f\xfc\x58\x41\x39\x1c\x28\xea\x66\x74\x30\x24\x1c\x32\xbf\x55\x59\x89\x73\x18\xc1\xae\x30\xbc\x7d\xb8\xe9\xb6\x2e\x6a\x6b\xd1\xd4\x9f\x8f\xcb\x1d\xb0\x14\x3d\x3b\x1d\x9f\x6a\x81\x53\xa3\xbd\x54\x7e\x47\x3f\x23\xa6\xef\x29\xe0\x30\x7f\xe8\xb7\x71\xb6\xee\x30\xa7\xc8\x5b\xb7\x7b\xac\x84\x2b\xfa\x02\xc9\xbe\x84\x61\x88\xff\x0a\x68\xf5\x12\xfa\xd5\xb7\x88\x7d\xbb\xb6\x9b\xe3\x66\xf8\x23\x20\x5d\xcb\xcb\x51\x14\x2a\x5b\x14\x2a\x57\x0e\x4b\x04\xe9\x0b\x61\x90\xe6\xbf\x5a\xe0\x27\xb3\xfa\xed\x56\xeb\x63\x2b\x7d\x37\xfb\x4d\x51\xa2\x70\x61\xc7\x5f\x6e\x4d\xd6\xcf\x9b\x53\xaa\x0a\xa2\x4e\x2e\x1a\x04\x81\x83\xf2\xc1\xa4\xcd\xe9\x8e\x3e\xf8\xba\x35\xf3\xbd\x1a\xd2\x7f\x95\x65\xd4\xc8\xe7\x8a\x67\x64\xd7\xb0\x48\x96\x84\x64\x4e\xca\x49\x44\xa1\xa1\x07\x5a\xb4\x3f\x07\x19\x24\x47\xe5\xa8\xbb\x93\x67\x1b\xae\x56\x0d\xbd\x77\x58\xc3\x3d\x5c\xa5\x2d\x7d\x13\xfb\xbe\x1a\xec\x2d\x09\xf9\xb6\x4f\x7c\x62\xb3\xd2\x1c\xb9\xf1\x58\xa6\x36\xf6\x05\xbe\x80\x07\xb7\x31\xa4\x5b\x31\xc8\x33\x7a\xd4\xef\x5d\x7f\xcc\x99\xd2\x34\x8b\x7d\x63\x70\x06\x25\x17\x15\x33\x2d\x74\xd6\x34\xff\x0c\x09\xff\x72\x5b\xb6\x69\xd2\x72\xcb\x1a\x53\x30\xb0\x95\xad\xcd\xa3\x01\xcd\x9e\x7f\x4d\x92\x5b\x69\x14\x49\x2e\x51\xe5\xb9\x80\x3b\x51\x86\xb0\x9e\x00\x1c\xe8\x03\x70\x0e\xff\x0a\x79\xfb\xc0\x48\x40\x1c\xef\xa5\x58\xd5\xa2\xf3\x01\xb4\x06\xc9\xe3\xc4\x27\x47\x39\xbd\x0b\xa7\xf8\xb9\x0a\xd8\xee\x46\x24\x0d\x05\x96\x9f\x1d\xc0\x5c\x45\xf3\xa1\xc1\xcf\x15\x57\x3d\x2e\x6e\x04\x8f\xfa\xd1\x50\xd5\x80\xda\x13\xfe\x1d\x0c\x32\x30\xb0\x3f\xfc\x94\x07\x39\x06\x11\x11\x37\x0c\x2f\xcf\xed\x2b\x9e\xd8\xa2\x01\xb1\x9d\x0a\xed\x75\xe9\x3d\x68\xde\x57\x53\x49\xba\x5d\x80\x5e\x2a\x2e\xce\x7a\xc5\xdd\x7a\x6a\xf6\x8a\x2c\xaa\x8e\x3a\x27\x85\xd8\x23\x5a\x4a\x07\x04\xb7\x7c\x64\xca\xa9\x67\x2c\x2d\x3f\xc6\xd3\xf2\x89\xd8\xca\xdc\xc3\x4a\x1c\xa5\xf7\x47\x38\xd2\x37\xfd\xa5\xfe\x21\x09\x42\x15\x42\x21\xd9\x64\xcb\x23\xb0\x97\x2d\x0a\x31\xcc\xdc\xbd\x15\x0c\xc8\xe9\x72\xb7\x66\x3a\x13\xdc\xc1\x41\x8e\xf9\x2f\xaa\x30\x1a\xe9\xd7\xff\x63\x58\x8c\x39\x21\x8b\x7f\xc4\x43\x7b\x6d\x56\xc6\xe8\x22\x43\xb4\xe0\xe7\xac\x08\xea\x6d\xbe\xb6\xd2\xbc\xb4\xb1\x47\x4f\xf0\x7e\x1e\x4d\x44\x9f\x44\xf0\x21\xef\x0f\x16\x37\x85\x1e\x26\x4e\x81\x80\x5f\x57\xe1\x94\x65\x6f\x11\x10\xdb\x27\xac\xa4\x1c\xfa\x70\x2a\xc6\x86\xee\x8d\xfc\x00\x2b\xb7\xaa\x14\x2f\x56\xee\x84\x73\x2f\xd5\x0d\xd4\xf5\x76\x61\xd1\x0d\x0c\x18\x90\xa2\xdf\x0b\x78\xc3\x97\x90\xca\x92\xec\xf1\x64\xb4\x24\x7c\xc8\xe5\xbe\x76\x11\x6a\xb0\xea\xea\x16\x51\x28\xf5\xfc\xd4\x2c\xd4\xc8\xdb\x0f\x62\x7c\x24\xf8\xcc\x29\xc8\x93\x78\x0f\x1f\x60\x48\xab\x0d\x81\x78\x9f\xa6\xe3\xb2\x0c\xb4\xb2\x4a\x06\x94\x42\x42\x56\xa2\x73\x56\x71\x17\x11\x59\x2f\xfb\xbd\x9c\x03\xcb\x93\x02\x6c\x14\x41\xeb\xc3\x1d\xde\x7d\x43\xa5\x66\xaa\x8c\x05\xbe\x00\x9b\x7b\x5c\xe5\x68\xab\x06\x14\x7f\xf4\x1e\x3b\x8a\xbc\x7f\x8f\xdd\x25\x26\xb6\xb0\xa3\x9e\x95\x93\x2c\xcd\x40\xd1\xde\x74\xfc\xf7\x22\x40\xb8\xdb\x5e\xc0\xc7\x41\xe8\xf5\x45\xa3\x0c\x1c\x63\x50\xa1\x3a\xbb\x62\xf8\xb9\x1f\x63\xa1\x0b\x6b\xfc\xce\x67\x1d\x74\x7a\x05\x46\xe2\x67\x4e\xd2\x88\xef\x37\xfe\x4a\x11\xb3\xf0\xbd\x55\x0d\xbe\x9b\xfe\xc3\xb9\x4d\x67\x9c\x0f\x11\xc4\x17\x21\x4b\xf6\xc3\x5d\x15\xe6\xf7\xc2\xbd\xee\xba\xdc\x05\x8b\xe2\x58\x20\xc0\x5c\xe2\x82\x5d\x1a\x8b\xdc\xf1\x0f\x6f\x3a\xec\xee\x20\xea\x71\x51\x87\x73\xaa\x15\x38\x42\x95\x0f\x76\x34\x09\xd0\xa2\x52\x85\xcc\xbc\x92\x62\x88\x58\x60\x4d\x80\x7d\xeb\x70\x23\xe9\x5e\x83\x60\x4d\x54\x56\x98\xf6\x08\x64\xde\xe2\xc2\x4d\xbe\x0d\xb2\xf3\xff\x2d\x7c\x23\xcf\x9c\x54\xec\x52\x2f\x07\x03\x63\xc6\x64\xc7\xc7\xbc\x39\x6d\xe1\x2d\xd4\x86\x9a\x2e\x5d\xce\x82\x8d\xdf\x01\x24\xcb\x16\xa4\x53\x13\x1c\x68\xcf\xf8\xdb\x2b\xc7\x5b\x36\x30\x76\x35\x3f\x5c\xcf\xcc\xc9\x19\xd8\x97\xa4\xb1\xe1\x30\x6f\x38\xf1\x27\xfc\xbf\x97\xaa\x8b\x34\x54\x95\xde\x47\x0f\xcd\x81\xa8\xbf\xb5\x15\xd7\xa8\x4b\xec\x8e\x6e\x04\x7a\xf8\x3c\x96\x2b\xb6\xd7\xd2\xa6\x63\xea\xe9\xbb\x5b\x9b\x69\xb6\xfe\x35\x29\x64\xe4\xc6\x50\x89\x99\x75\x50\x5c\xaa\x62\x21\x1e\x8d\x47\x22\x6f\xb6\xb5\x7d\x71\x45\xbb\xe9\x6a\x96\x86\x63\xa0\x04\x14\x8d\x2f\xef\xc0\xe4\x5d\xf3\xe5\x1a\xc1\x77\xa8\xca\xde\xda\x14\xad\x8d\x71\x3a\x87\x73\xa7\x84\xca\xb9\x43\x8c\x9e\xc9\x25\x12\xf3\xf5\x8f\x70\xfe\x74\xb3\x76\x5f\xa4\xf0\xac\x80\x8b\x99\xae\xbe\x7a\xcf\xcc\x60\x03\x3f\x91\x7d\xaf\x06\x64\xf0\x12\x12\x47\x3c\x38\xdd\x27\x70\x44\x0e\x54\xf0\xea\x49\x4f\xe2\xc8\x1c\xdb\xaf\x23\x7b\x81\xbb\x6b\x98\x82\x9c\x37\x46\xb5\xfd\x93\x9a\x6c\xc5\x88\x04\x0c\xd0\x8c\x75\xec\xa8\xed\xfb\xbf\xff\x0a\x78\x4f\x2d\x25\xb9\xb7\x8a\x92\x3a\x68\x90\xa9\x08\xe8\xac\xa0\x08\x56\x7d\x72\x88\x09\x5b\xbf\x51\xeb\x03\xbd\x67\x11\xfb\xd4\xae\x48\x32\x07\x96\xe6\x6c\x0b\x63\x6e\x31\xf8\xdc\xfe\xd9\xaf\xe2\xc0\x2b\xe7\x72\xf0\x52\xf3\x4c\x25\xd2\x17\xd4\xc1\xfc\xd1\xaf\xc9\xce\x43\x31\x20\x6d\x67\x55\x88\x43\x73\x0a\x4c\xa2\xf8\xda\x5f\x1f\x23\xe2\x3d\x90\xe8\xdc\x54\x8a\x87\x22\xce\x80\xef\x04\xbc\x93\x35\x25\xe1\x56\xf4\x94\xf6\xea\x59\x82\x81\xfb\x94\x92\xfd\x9f\xe1\x31\xae\x78\x3d\x13\x4c\x4a\x24\x62\xcb\xd2\x81\x0e\x08\x36\xe3\x5d\x40\xc8\x62\x28\xe7\x7a\x47\x30\xb2\x4f\x53\x7d\x09\x3b\x02\x96\x22\x83\xb5\xe5\x61\x5d\xb6\x3d\x5b\x0f\x4b\x72\xcf\xa2\x43\xa3\xe7\xec\xaf\x87\x2a\x5a\x2d\xaa\x24\xe8\x7d\xe4\x8a\xf5\xe9\x7d\xc9\xfe\x23\x29\xe7\xc2\xfd\x27\x49\x9e\x58\x6e\xc6\x9f\x3a\xa0\xad\xa0\x98\xe6\x27\xc0\xb0\x6e\xec\x18\x4a\xea\xba\x69\x1b\xb1\x4a\x07\xb9\x3c\xed\x14\x4f\x1a\x4d\xbe\x43\xc0\x85\x4b\x54\x43\x44\x0c\x55\x04\x13\xb9\xcc\xa5\x0b\x31\xec\x41\xec\xd2\x4e\x6b\x3b\xe2\x1d\x9f\xa1\xa7\x8d\x6d\x08\x7a\x0b\xae\x8f\xd7\x4c\xd7\x3b\xb2\x7b\x55\x98\xd3\x26\xc4\x17\x9b\x72\xe8\x81\x6a\x86\xc8\xc6\x37\xa1\x8e\xcd\x24\x7c\x41\x55\x4a\x9d\xe9\x4c\xba\x6f\xf6\xbd\xcd\xd5\x4f\xf7\x81\x9d\xa3\x3d\x9d\x78\x1d\xb0\x53\xc5\x77\xd9\x21\xf6\xc7\xb7\x60\xf7\xf7\x6c\xb7\xd7\xb6\xd1\xe5\x6d\xcf\xd3\xb8\x66\x0a\xbf\x28\x4f\xe1\x63\x19\x0e\xa2\x79\x8e\xb0\x1b\xb5\x49\xf5\x3f\x4c\x1c\xfd\x38\x84\x23\x04\xef\xc7\x7a\x34\xf6\x32\xef\xbe\x83\x36\xb1\xf0\x92\x3c\x64\x81\xdd\x9c\x6a\xfa\x59\xaf\x3d\x49\x69\x4a\x2e\x95\xc8\x06\x3d\x8e\x46\x92\x42\x8b\x97\x9b\x9b\xfd\x22\xe2\x28\xec\x2e\x54\xe7\x37\xd3\x48\x34\x87\xac\x65\x3a\x3f\x03\x1c\x4e\x85\x18\x27\x1c\xe1\xc9\xc8\xc0\x79\xd2\xa5\xfd\xc2\x6d\x8b\x9b\xed\x6b\x45\x50\x61\xe0\x27\x82\x96\xea\x29\xf1\xc6\x84\xd1\xac\x22\xba\x30\xa4\x54\xc7\x6b\xc7\x8a\x4e\x95\xa2\x3d\x0d\x18\x98\x9f\x1d\x20\xb7\xd0\x33\xe1\x54\x4d\x83\x3e\x9d\xa5\x3c\xad\x51\xd1\xb8\xfa\xc7\x0e\x1b\x06\x2a\x03\xce\x9b\x6a\x41\x2b\xeb\x27\x7f\xb5\x78\x99\x2b\x09\xb1\x77\x80\xdb\xf9\x16\x56\x57\x46\x27\x0a\xd5\x41\x34\xc1\x77\xe8\x7b\xee\x16\x44\x11\xd6\x7e\x38\x72\xc8\x47\xd2\x6b\xd9\x05\x4f\xba\x93\x9d\x14\x8c\x72\x26\x1f\xf3\xd4\x31\xfd\x42\xc6\xfe\x99\xa5\x47\x67\x9d\x0e\x1d\xed\xee\xd3\xa0\x09\x7f\x1a\xe6\x00\x63\xbe\xde\x8c\x12\x63\xa5\xb2\xb8\x1e\xff\x68\x8d\x83\xf4\xec\xdd\x31\x1d\x75\xfc\xc9\xc4\x0b\xe1\x74\x54\xef\xb8\x91\x2a\x98\x34\x0c\xca\x9a\x06\xdc\xd9\xe9\xc5\xbe\x4e\xef\x08\x2b\x79\x87\x98\xf9\xaa\x7e\x3f\xf8\xaa\xe3\xf7\xe6\x18\x28\xdb\x2f\x79\xe5\x16\xed\x90\x9c\xc7\x79\xc6\xdf\x70\x4a\xc5\x9a\xc3\x93\x98\xe9\x7b\x4a\xd2\x3a\xa1\x1e\xa4\xd1\xdd\xfd\x9b\xc4\x40\xe9\xb3\x45\x99\x60\xbd\x1c\xe1\x13\x18\xf9\x48\x10\xb1\x43\xc4\x2d\xdc\x96\xce\xb2\x41\x24\xe9\x04\xd4\xec\x59\x25\xd7\xb5\x91\x90\x5c\xff\xa1\x8d\x57\x57\x2b\xf0\x6e\x3a\x7f\xba\x94\x07\xdb\x01\x2e\x64\x45\x81\xb0\xdb\x68\x04\xe0\xf5\x33\x8a\x40\x62\xbd\x19\x8f\x0a\x59\x1e\x46\x00\xe3\x22\xce\x32\x82\x49\xdd\xd5\xd2\xaa\x75\x15\xf6\x5e\x56\xfe\x55\xb9\xfc\x3a\xbf\xce\x27\x9c\x3e\x42\x60\x85\xdf\x6a\x23\x08\x20\x64\x84\x10\x16\x5a\xb8\x4d\xc8\x05\x5e\xa4\x0e\xc8\x0c\x59\xb1\xe7\xc5\x79\x7e\x1d\x48\xe9\xa4\x41\x82\xb7\xcc\xb9\xe8\x26\x1a\x22\x26\x80\x7e\x99\x48\x3e\xe2\xdd\x87\x13\x8a\x6e\x7c\x8d\x93\x88\xbb\x41\xb3\x88\xe6\x77\x6a\xdb\x19\x26\x48\x3d\xcb\x2b\x54\xbf\x2a\xde\x46\xee\xea\x79\x40\xf5\xaa\x31\x56\xba\x56\x49\x14\xdc\x6b\x12\xa6\x43\x4c\xbd\xc0\xd4\x75\x69\x7c\x94\xdf\x11\x66\x2d\x08\xa4\xc1\x5a\x47\x7f\x92\x5b\xa7\x35\xe4\x3f\x96\xc4\x76\x6b\xeb\xa2\xeb\x01\x18\x8a\x50\xb5\x4b\x68\xde\xd7\xf1\x8c\x49\x08\xd4\x5b\x7e\xd3\x16\xbb\xbd\xbd\x4f\x27\x03\x93\xae\xb2\xec\x2f\x50\xa3\x8b\x5d\x0c\x30\xab\x53\x71\x58\x9b\x40\x23\x74\x9c\x6c\x4d\x22\x2f\xbf\x13\x8f\xa9\xcb\x2f\x9d\x84\xd7\xa4\x19\x4b\x96\x69\x83\xd5\x90\x2c\x25\xcf\x5f\xa0\xe5\x42\xa2\x7e\xf8\x60\xe8\x18\x85\xa3\x25\x6d\xb1\x86\x39\x60\xb7\x4f\x4b\xbc\xb1\xc3\xd2\x73\x5e\xc9\xca\x9a\x6e\xd1\xb2\x4a\x98\x37\x67\x45\x4f\xbc\xaa\x10\x8f\x57\xf8\xd2\x7a\x6a\x56\x11\xe8\x15\x5c\xb6\xf3\xa6\x95\xef\x99\x2f\xb5\x36\xbd\xc8\x2b\xe8\xc6\x27\x3e\x1e\xf9\x47\x48\x14\xf6\xb5\xd2\xac\xf0\x50\x92\x5c\x1b\x20\x31\x77\xdc\xc7\x26\xe9\x37\xa4\xdb\xeb\x6e\xc7\x35\x71\x5a\x9f\x7e\x3c\x5c\x79\x9c\x74\x5f\x05\x5e\x80\x9b\xe8\x33\x70\x6a\x31\x82\xfa\x49\xf2\x5f\x95\x55\x6e\x0d\xca\xfb\xd2\x4c\x29\x86\xbb\x7f\xc8\xe7\x85\xf9\xa8\x89\x74\xc1\x35\x3a\xef\x3e\x1c\xa1\x0f\x36\x24\x73\x55\x84\x89\x2c\x20\xff\xd2\x2f\xaf\x6b\x8c\xc9\x04\xfd\xcc\xa6\x50\xbe\x4f\x39\x45\x7e\x37\xf5\xc7\x23\x1b\x42\x90\xb6\x94\xa2\x6a\x77\x1a\x5b\x96\xf1\x97\x39\x39\xfe\xc0\xa9\x71\x89\x59\xf7\x13\xfb\xcc\x49\x46\xb6\x75\xfe\xe2\xc7\xc2\xcf\x8d\x96\xe0\xf0\xc0\xf1\xbb\x67\x39\xb5\xe5\xe6\x85\x42\xc0\x1c\xa5\x51\x16\x7d\x65\x38\xea\x2d\x04\x2b\xec\xdb\xca\x57\x86\x06\xf6\x4f\x1a\x0f\x12\x79\x67\xe0\x0c\xdb\x7d\xb4\x44\xc1\x45\x54\x51\x78\x87\x39\x54\x7d\x2b\x1d\x03\x6c\x6e\xfe\x9d\xe4\x75\xa7\x70\x3e\x19\x58\xf5\x38\x38\xd5\x71\xff\xee\x8e\x08\xf7\xc7\x5e\xab\xf2\x10\x52\xc1\x9e\xdc\x9b\xba\x28\x22\xa9\xab\xcd\xc8\x66\xfb\x51\x14\x73\x1a\x24\x87\xbd\x75\x5b\xd0\x69\xd8\x9b\xab\xef\xed\x6f\x24\xa8\x89\xba\x3b\x0e\xdb\x80\xe1\x55\xce\x3b\xa5\xf0\x78\xec\x35\x35\xfb\xe4\xdd\x4b\xfa\x8f\xcb\x1f\xde\x6d\x03\x84\xa7\xfa\xcf\xdd\x18\x80\xa8\x8a\x4b\xec\x28\x0f\x72\x20\xfc\xfe\xf4\x44\x86\x07\x01\x30\x21\x60\xe9\x08\x73\xb2\xab\x04\x52\x75\xe9\x8e\xd8\xc1\xe7\x0e\xc1\xb4\x7d\x10\x3b\x15\x52\x66\x97\x36\xa8\xe4\xf7\x54\x2f\xd4\xad\x23\x73\x15\x97\x00\xaf\xf2\x7c\x8a\x10\x71\x0f\x81\x0e\x14\x9a\xf0\x82\x21\xe1\x0b\xbc\x5a\x78\x11\x30\xea\x80\x22\xfe\xf6\x89\xe1\x7e\xbb\xb8\x6c\x5f\x1a\x51\x92\xd6\xe9\x76\x83\x89\xd8\xc6\xf8\xc0\xb5\x89\x5b\xad\xd6\xfa\x91\xd0\x72\x52\x61\xb8\x35\xe1\x0b\x62\x90\x78\xa3\x7d\xab\x8f\x36\x31\x13\x09\xef\xef\x2a\x27\x12\xb0\x25\xd8\x73\xb9\xa8\xcd\x21\x2b\xf0\xdf\xfd\x9e\xb2\x6b\x98\x37\x19\x52\x77\x83\x44\x51\xb7\x23\x91\xf3\x23\x98\xf1\x2f\xc2\xfa\xfe\xea\xef\xe4\xf3\x3b\x90\x6c\xdd\x63\x8c\xd1\xbb\xdf\x86\x25\xe8\xde\xd6\x83\x69\xde\x2e\xa7\xce\x49\x54\x77\x18\x04\x47\x49\x04\x5f\xe0\x16\x73\xdc\x35\xbb\xf1\xbb\x44\x6d\xa2\xb9\x4a\x79\xd0\x81\x56\xb7\x2a\xee\x50\xbf\x70\x9f\x07\x0f\xcf\x67\x8d\x2d\xd2\xca\xcb\xfe\xe6\xbc\xd1\xeb\x71\x67\x59\xca\x91\x84\x61\xe1\x6c\x16\xb3\x60\x96\x01\xe9\x48\x0f\x5a\x08\x1c\xe4\x1a\x65\xd8\x92\xd1\x75\x82\xaa\xf2\x87\x33\x39\x48\xb9\x3c\x40\x17\x21\x8c\xcf\x90\xd4\x60\x11\x85\xf3\x5d\xdf\xa6\xf9\x04\xd0\xb4\xb4\x3f\x65\x8b\xbb\xae\x59\x16\xa7\x6f\xcb\x54\x14\x43\x00\xe7\x18\x07\x8a\x2e\x4f\xea\x60\x5d\xfe\x10\xec\x76\xb2\x4c\x42\x30\xf6\xe6\x77\x4c\x97\x36\x99\x16\xc9\x7e\x03\x4d\x2a\x91\x34\x76\xa3\xf0\x49\xd6\x63\x14\x78\x08\x83\x38\x1c\x38\xff\x64\xf6\x62\x6f\x8c\xf6\x38\x7f\x43\xc1\x19\x2d\x9d\x67\x80\x39\x69\xb8\x2e\x73\x98\x5b\x43\xae\xa0\x3d\xc7\x7e\xd5\xa2\x44\x7d\xf2\x97\xce\x4f\x88\x94\xdf\x0d\xed\x3e\xaf\xdb\x76\xf0\x97\xf2\xf1\xec\x36\x9d\x49\x4f\x29\x41\x10\x18\x23\x2a\x68\x14\xb4\x6b\x77\x5c\x0b\x7a\xb5\x9a\x52\xb3\xfc\x66\x9b\x2c\x77\xc1\x12\x36\x74\x6b\x98\x77\xd8\xed\x06\xec\x5b\xe3\x97\xfd\x3e\xca\xd3\x07\x6d\x7b\xb3\x81\xb1\xed\xbd\x51\x91\x77\x7b\xc6\x4a\x7e\xdb\x53\x4b\x96\xa4\x3f\x51\x3d\x23\x3f\x3a\xc4\xb4\xdb\x67\x4c\xdc\x13\xb7\xfc\xee\x65\x40\x64\x77\x2a\x33\x63\x63\x20\x93\xe1\x36\xc8\x46\xce\xeb\x0f\xf1\x93\xc5\x82\x5a\xfb\xf5\x27\xfa\x48\xc1\xd7\xe3\x63\x58\xfc\x8e\xe2\x74\x2f\xb9\x45\x7d\xda\x55\xc7\x56\xed\xe0\x86\xe4\x86\x0a\xfc\xa7\x5a\x95\x43\xf1\x97\xd9\x51\xd9\xbd\x08\x16\x0e\x4b\x51\xa2\x7f\x2e\x45\xe5\x4d\x78\x36\x8f\xe0\x30\x1d\x95\x97\x3d\x44\xef\x07\x41\x85\xf1\x10\xd4\x89\x05\x6f\xff\x71\x49\x43\x54\x50\xf6\x1e\xd5\x67\x5a\x78\xe3\xb5\xb1\x8a\xbd\xd5\x9d\x5f\x65\x79\xf8\xd7\x91\x43\x02\x61\x7c\xea\x66\x39\xf8\x4e\xac\x05\x4f\xc9\xec\xad\x76\xe9\x25\x16\xb1\x3b\x53\x54\xd2\x2b\x2b\x30\xc9\x2e\x52\x87\x11\xfa\xda\xe4\xbe\xfd\x72\x33\x1f\x3b\x9d\xfa\xc6\xb4\xdf\xe1\xfc\xb5\x09\x05\xea\x13\x0c\x57\x65\xf6\x71\xea\x28\x3b\x82\x19\xdb\xd5\xe3\x4f\xca\xa6\xf0\x50\xdc\xf8\xe4\xe4\x74\xca\xfb\xa6\xe7\x2f\xfa\x26\x77\x4a\xd9\x65\xd3\x16\x3e\xa7\x91\x1c\x5d\x42\x71\xee\x37\x06\x1b\xb1\x49\x26\x78\x57\x8e\x3c\x10\x78\x6f\xe8\x50\xf9\x88\x1c\xe7\x59\x07\x7c\xe4\xc9\xe4\xb2\x6b\x9e\x55\x35\x6e\x89\x2b\x0f\x0d\x4f\x94\x31\xa3\xed\xdc\xee\x82\xf1\xf0\x21\x7d\x9c\x59\x82\x57\xc1\x96\xa6\xcb\x50\x84\xc7\xc2\x03\x24\xd2\xf9\xa8\x70\x55\xcb\x7c\xd0\xec\xd6\x9a\x03\xe5\x31\xef\x20\xac\x8a\x89\x7a\x2f\xa5\x72\xc5\x40\xb2\x3d\x32\xb5\x41\x4a\x55\xfb\xef\x36\xac\xb8\x48\xc8\x08\x4c\xda\x9a\x80\x90\x13\xa9\xaa\x34\xcb\x80\x56\x3b\x87\x5b\x55\xec\x32\xec\xd6\x38\xf5\xbc\xb7\x60\x6b\xff\x94\x20\xd0\x4a\xf1\xb4\xeb\xef\xae\xd3\x08\x14\x58\x96\x68\xfb\x87\x8e\xb6\x97\x4c\x20\x31\x91\xba\xc8\x05\x38\xe0\x42\xa5\x62\x72\xd5\x6c\xf3\xd6\xa6\x61\xb7\x10\x3b\x3a\x85\x1b\xce\x59\xd8\x2d\xd2\x1c\x48\x05\xb4\x67\xea\x0e\x38\x53\xd6\x50\x7a\x8a\x50\x84\x29\x8d\x3c\xbb\x06\x3d\x89\x91\xee\x6d\xc8\x3a\x72\xa7\x86\x48\x71\x3e\x5b\x64\xde\xa9\xc1\xa6\xef\x6c\x3c\xcc\x52\x0b\x2b\x7e\x3a\xd0\xf4\x2c\x9c\x27\x3d\x19\xdf\xcb\xdb\x09\xf3\xd4\x80\xd8\x6d\xa3\xdc\x28\x57\xc1\xd6\x27\x02\xc0\x7a\x44\xad\xb9\x84\x87\x05\x68\x11\x9c\x09\x15\x69\x59\x40\xce\xa4\x5c\x20\x0b\x8f\x94\x7d\xf7\xd1\xca\xa9\xbe\x2a\xf5\xbf\xce\x41\x92\x25\x6b\x76\x89\xb4\x0f\x4f\x63\xe2\xf9\x37\x25\x19\x56\x61\xa5\x00\x18\xd1\xd5\x7c\x75\xec\x25\xac\x33\xfc\xd6\x0b\x6b\x40\x0a\xd9\x7f\xce\x19\xf8\x88\x8a\xc2\x23\x3a\xa9\xae\x46\x4c\x4e\x07\x97\x78\x2a\x0a\x69\x16\x82\x0c\x45\x39\xd7\x76\xb0\xe4\xe5\x02\x3f\xf7\xe3\xbe\x59\x10\xbf\x07\x60\xbf\xaa\xd4\x02\x74\x8f\x69\x91\x96\x4c\xa5\x7c\xc7\xfd\x8e\x72\x52\x70\x12\x18\x07\xb4\x95\x78\x15\x1e\x61\x6e\x4b\xed\x37\xca\xbd\xcd\x1b\xed\xfb\x2a\xd1\xaf\x2b\xa9\xde\xba\x30\x2b\x73\xc6\x8c\x1d\x67\xbd\xb4\xad\x8b\xfc\x73\x4f\xf7\x9d\x52\x30\xc0\xba\x87\x91\xc7\xfd\x69\x64\x5b\x44\xaa\x86\x37\xcc\x27\xa8\x09\xdf\xa8\x2a\xd1\x65\xa6\x17\x29\x09\x22\xb8\xe5\x2d\x34\xf8\x2e\x11\x79\x38\x95\x2d\x30\xa1\x43\x41\xbe\xbb\x84\x32\x7b\xdb\xb7\xf7\x2a\xa4\x65\x21\xdc\xbf\xff\x3c\x01\x77\x2b\x0e\xfd\x4b\xe0\x5d\xcf\x13\x07\x76\xe6\x1f\x3f\x68\x9c\xdd\xa0\x62\x10\x0e\x20\x74\x56\x2e\x79\x95\xfa\xca\x8e\x3d\x22\x85\x6b\xa3\x52\xc0\x51\x3b\x29\x55\xa5\xcf\xd4\x60\x64\x99\x63\xe3\xd8\x3b\xb5\xfd\xc1\x91\x21\x4b\xc1\x93\xae\xf3\x16\x2d\x7c\x8d\x3b\x1b\xc0\x26\xe7\x5d\x1e\x31\x44\x5f\x1f\x1f\xd2\x02\x6e\xd2\x18\x21\xbf\x23\x2b\x65\x69\xa2\x34\xb7\x98\xf4\x53\x12\x61\xec\x0f\x2a\x2e\x82\x03\xb7\x0b\x4b\x99\x33\x09\x45\x33\x55\xa0\xb3\x93\x5f\xd8\x55\xe6\xa7\x35\x7a\x3e\xf2\x9a\x3e\x67\x84\x9d\xdd\x67\x06\x50\xcc\xbb\xdf\x6a\x82\x00\x71\x06\x99\x54\xa0\x74\xd5\x00\x2d\x98\x77\xae\x0b\x92\x31\xfb\x92\xbe\x01\x84\xf2\x67\x3c\x8f\xea\x93\xde\xe7\x72\xc7\x5c\xef\x19\xac\xaf\xa7\x70\x9b\xc0\x8a\x20\xf9\xa5\x47\xa2\x95\xce\x32\xc9\x23\xc7\x7e\x80\xea\x86\x75\xa6\xbb\xa5\x52\xe0\x7b\xe6\x9f\x00\x78\xd4\xbd\x2d\x46\x4f\x2f\x8b\x66\xec\x9f\xf6\xb2\x8a\x9f\x6c\x87\xf7\x2d\x4f\x3a\x3b\x2b\x23\x0f\xc5\x27\x49\x9d\xdd\x00\x9a\x10\x2c\x6a\x59\xad\xc3\x3e\x97\x36\x63\x93\x2d\x4a\x07\xdd\x0d\x9d\x0b\xe6\x59\xca\xf1\x60\x3c\x7a\xf0\x3d\xcb\x4d\x8c\x5f\x56\x8a\x31\xac\x05\x1a\xc2\xf0\x54\x3d\x26\xe7\x6d\xbc\x41\x8f\x1c\x34\xa1\x26\x0d\x7d\xc7\x04\x11\x23\x5f\xea\x1f\x5e\xc1\xd8\x45\x51\x99\x5a\x4e\xad\x86\x75\xd5\xee\x3e\xc2\x9d\xaa\xd2\xe0\x20\x5f\x62\x54\x97\x1d\x67\xb7\x73\x15\xdd\x11\x2d\x8f\xe3\x4d\xb4\x13\x98\xc1\x0e\x57\x0c\xe5\xec\x88\x9c\x82\xfa\x06\xf7\x62\x21\xdc\xa0\x12\x78\x9d\x0a\x99\x4f\x58\x67\xc4\x93\x38\x93\xfc\x1c\xe1\x5a\xec\xdd\x20\xf2\x46\x39\x86\x3f\xe4\xbe\xf0\x33\xd5\x3f\xe8\x10\xab\x94\xf9\xa3\xa9\x9f\x8f\xb4\x33\x7f\x05\x93\x67\x19\xa8\xd8\x81\xfb\x24\x6d\xc4\xfe\x0c\xab\x33\xe9\xa4\x4d\xa0\x63\x53\x63\xb6\x95\xb8\xc3\xf5\x79\x14\xfa\x2c\x31\x3f\x93\x22\xa3\xf5\x7a\x5a\x21\xfc\x57\x5e\x62\xa2\x8e\x8b\xbe\xca\x0e\x59\xd9\x10\xad\xe9\xbf\xb8\x25\xfb\x12\x46\xe8\x9b\xd7\x71\x88\x8e\xd1\x30\x9f\x08\x20\x6f\x46\x78\x22\xea\x76\x8c\xf1\xf8\x52\x22\x11\x11\x34\xd6\x34\x2d\xba\x57\x45\xa9\x70\x22\x45\x84\x74\x58\x5d\xfb\x6e\x66\x0b\x1c\x43\xcf\x9b\x68\x2a\x08\x54\xcd\x91\xbe\x35\x67\xb2\x89\x6c\x2b\x79\x09\x98\x95\x3f\xf2\x1f\x36\x66\xd6\xc1\x1f\x3e\x80\x67\xba\x09\x99\x0d\x53\xef\xe5\x27\x38\xc5\x69\x4f\x7b\xef\x85\x79\xd7\xb6\x18\x73\x34\xa6\xa1\xc0\x9f\x42\x28\xe2\x9e\xb6\xfc\x37\x59\x5e\xfd\x54\x4a\x63\x25\x08\x0b\x33\x69\xc4\xbd\x3f\xdc\xba\x61\x54\xde\x51\xba\x11\xf7\x7e\xfb\x92\xd9\x5a\x5e\x09\xc8\xdf\x8c\x5d\xf1\xb3\x7a\xab\xfa\x6c\xee\x02\x3e\x7a\xf4\xaf\x08\xda\xa1\xd5\x00\x14\x5c\xe0\x5d\xd5\xa1\x6e\x7f\xee\xfd\x65\xaf\x6a\x15\x09\x55\xa7\xf7\x15\x0d\x14\xf4\x2f\x9c\xbc\xab\xae\xef\x2b\x77\x07\x58\x7c\xe1\x4a\xd6\x29\x9f\xdf\x41\x0e\xc4\x5c\x47\x83\x91\xab\x9f\xa4\xd6\x73\xd7\x23\x7a\xe1\xdd\x8b\xe0\xd0\x3b\x6f\x47\x0d\x57\x3d\x82\xaf\x8c\x81\x38\x92\x8c\x57\xef\xe5\x4d\xb3\x5a\xec\x28\x64\x71\x75\x25\x9a\xe0\x72\xd0\xae\x08\xe2\xee\x10\x2b\xba\x13\xa3\x6f\x1a\xb7\xb5\x0c\x97\x94\x87\x0b\xaf\x48\xa8\x14\xfc\x33\x9f\x9a\x66\xa2\x44\x65\x81\x5f\x9f\x6f\x0f\x53\xd1\x3d\xf5\x96\x76\x88\x1f\x84\x8e\xf7\x95\x4e\xf1\xda\xa8\x4f\x1c\xc4\x1a\x51\x1c\x1d\xb4\x34\xe2\xd5\x56\xbe\xb5\x57\x1c\xa3\x57\xf1\xf2\x01\x14\x24\xb7\xca\x68\x36\x48\x32\x7a\xdd\x73\xb0\xab\x5c\x00\xad\x3e\x37\xbc\xb1\xad\x72\x27\xd6\x44\x18\xe7\x0c\x9f\xd6\x73\x0b\x67\xa0\x53\x9b\x4d\x78\xec\x63\x90\x76\x35\x4f\x1e\x38\xf0\xe2\x31\xbf\x4a\x10\x04\xc2\xab\xe6\xb4\xf7\xf4\xb5\x45\x01\x71\x45\x05\x51\xcf\x9f\x09\xa3\x27\x3c\x70\x26\x84\x29\xa3\x2c\x0d\x78\x05\x1c\xe1\x59\x77\x20\x5e\x50\x69\x69\xa4\x86\xd8\xe6\x13\x41\x60\x3b\xf9\x24\x56\xda\xfd\x8a\x82\x3d\x38\x6d\xfe\xd4\x59\x63\xef\xc8\xde\xf3\xaf\x04\xe9\x57\x1d\x76\xe2\x1e\x24\x58\x64\xbd\xc9\x07\x5c\x1b\x98\xbd\xc2\x7a\x87\x75\x41\x38\xe7\xdb\x76\xa3\xf7\x09\xb6\x94\x94\x3b\xd8\x13\x73\x2a\xae\xc4\x98\x55\x93\x4f\x2f\x06\xca\xbe\x8b\xda\x38\x6b\x17\x21\x9b\xca\x6a\x37\x2f\x6d\xbd\x0e\xf0\x32\x6e\xce\xad\xea\x87\xe7\x11\xbb\x64\x30\x53\xed\x81\x05\x52\x51\x55\x8d\x6e\x87\xfc\x82\x03\x72\x36\x72\x0c\x28\xf0\x9a\x6a\x9e\x4f\x9a\xd5\x4e\xf9\x56\x5f\xbd\x48\x9c\x3c\xe5\x5f\x7c\x04\xc2\x20\x63\x1f\x21\xdf\x7f\x62\x35\xe3\x15\x02\x6b\x4e\xbb\x97\x7d\x47\xa8\x43\x27\xfb\x4b\xdc\xde\xe4\xd1\xc3\x56\x85\xab\x8e\x5b\x89\x5c\x9a\xc7\xb0\x1d\x1a\x3d\x28\xfb\x11\x9e\xe6\xfe\xe2\xc0\x57\xb9\xd0\xe4\x48\x4c\xca\x24\xad\x6c\x8e\x14\x9e\x2c\x28\x2f\xb7\x14\x65\xa6\x98\x73\xc4\x36\xdc\x3d\x4d\x48\x07\xbd\x8c\x83\x1d\x28\x7c\x51\x3c\x10\xa8\x17\xb1\xac\x05\x62\x82\xc8\xa7\x71\xcd\x7d\x8a\x8e\x70\x34\x8b\x9d\xfe\x28\x1a\x2e\xf5\x07\x3b\xf3\x6a\x07\x95\x3c\x17\x91\xd1\x25\x9b\xb4\x25\xfe\x16\xb4\x51\xca\x01\xee\xe3\x64\xef\x93\x20\xcd\x3e\xb9\x7f\x0d\x3b\x07\x3b\xdd\x7f\xfa\xd0\xf2\x4f\x70\x00\x44\x32\x67\x18\x68\x89\x5b\x4f\x98\x4d\x56\x38\x2d\x8e\x28\x85\xb1\x9f\xed\x53\x21\x53\x61\x5a\x4a\x57\x83\xdb\x8f\x12\x4f\x13\xa2\x2c\x8f\x26\xc9\x67\x7c\x40\x8e\x5d\x89\xc6\x1c\x48\x2a\x0a\x36\xd1\x81\x58\x1d\x02\x84\x80\x11\xa9\x50\x85\x4f\x38\x6f\x9f\x2f\x5a\x36\xa6\x61\xbf\x6d\xab\xd2\xfe\x85\x3e\xa5\xa8\x51\xb1\xba\xad\x70\x4c\x74\x5e\x71\x2b\xdb\xed\x11\x34\x69\x8c\xf9\x14\x8c\x5e\x44\x70\x7b\x01\xbd\x77\xb4\xcb\x56\x4a\xeb\x77\xad\x7d\x63\x30\x79\x4d\x44\x5a\x98\xe0\x6f\xfb\xec\x16\x38\x78\xfc\xc0\x73\x97\x4d\x31\xd8\xc8\xda\x8e\x68\x4f\x9c\x89\x75\x3e\xfe\x9f\x6c\x09\x7d\x91\x0a\x8d\x2c\x6e\x9f\x18\x03\x6c\x38\x78\x4a\x1c\x35\x24\x28\x47\xe1\x18\xd5\x76\x34\x9d\x25\x47\xb4\xac\x18\x39\x04\x17\x3b\x0d\xb4\xbc\x2e\x4a\x1a\xdc\xc3\x6a\x74\xff\xff\x65\xbf\x50\x18\xb8\x67\x6b\x5d\x19\x46\xb3\x63\x97\x20\xf4\xb0\xb9\x76\x0c\xa1\x43\xf4\x23\x73\xb6\x68\x48\xb6\xef\x63\xbc\x0b\xad\xa5\x6a\x78\xdf\xab\x0d\x94\x71\xa1\x32\x7f\xec\x28\x91\x29\xd6\xda\xfb\x8b\xa4\xd9\x71\xf8\x70\xa0\x48\x22\xad\xf4\x66\xc4\xc8\x61\xf5\x11\xa9\xd7\x47\x1f\xf1\x23\xdb\xcf\xa4\x7a\x4d\x1c\xc0\x02\x56\xf5\x9a\xe9\x1e\xb8\x23\x2d\x95\xfa\xdb\x42\x47\x58\x17\xe6\xaa\x3a\x5b\xec\xfe\xd4\xb7\xdb\xd6\xa6\x6c\x7f\xbd\xb6\x0b\x94\x63\x51\x18\xdd\x0b\x03\x2e\x62\xca\x3f\x33\xc8\x4c\x9c\xee\xf3\xfa\xe5\x9a\x7c\x2a\x39\xdb\xcb\x53\xbc\x1b\xf8\x4c\xb2\x67\x51\xd4\xd0\x58\x5b\x1b\x53\x6f\xfb\xfd\xbe\x5d\x2d\x81\x50\x8f\x0a\xcd\xc8\x92\xca\xc2\x1e\x91\xe4\x58\x7b\x73\x7e\x0c\x6e\xb0\x2e\x7c\xf5\x17\xa2\xdf\xe7\xf2\xf1\x3b\x1a\xde\x3d\x92\xcc\x05\x0b\xe2\xc9\x49\x61\x3e\x9b\xeb\x5a\xf6\x9a\xe4\xd0\xc0\xba\x9c\x94\xfe\x4a\x4d\xd7\xfc\x06\xc0\xad\x51\x33\xb4\x84\xfe\xe1\xa5\x46\x76\x48\x41\xef\x8e\xda\x65\x51\x02\x0d\xb9\x54\x42\x35\x5e\x6d\x75\x34\xef\x3d\x37\xf2\x51\x7e\xd9\x19\x29\x80\xae\x0e\x1e\x89\x26\x8a\xc1\x7a\x47\x56\xa7\xbd\x60\x82\x2f\x22\x86\xc7\x97\xd2\x89\x5a\x89\xb3\x09\x8d\x86\x64\xfd\x4c\xb2\x74\x0c\x46\x6b\x67\xad\xe2\x25\x7a\xec\x36\x6b\xa4\x90\xe8\xa5\xe9\xbd\xa9\x26\x68\x18\x2b\x0b\x77\x44\xd5\x47\xac\x23\x66\xab\x88\xbb\xe6\x03\xdf\x4d\xbf\xea\xb6\x7c\x3b\x15\xf6\x85\xa9\x11\x77\x85\x13\xbf\x9e\x5c\xac\x2d\xaa\x9e\x57\x1a\xb6\x44\x4b\xc0\x77\xe4\xcd\xc0\x89\xe9\xe0\x22\x70\x8c\x0b\x73\x7a\x61\x9b\x99\xf4\x8d\x2b\x2d\xbf\x9c\x3e\x50\xcd\xc1\x0c\x24\x7c\xf9\x96\x36\xfe\xdd\xa4\x7f\x17\x1c\x36\x7d\x11\x3e\x0f\x57\xea\x62\xda\x76\xb7\x04\xcc\x88\xd4\x25\xa6\x77\x96\x10\xa1\xc3\xf5\x78\x67\x8d\x54\x9e\x31\x06\x87\xae\x5f\x44\xf3\x60\xb7\x27\xaa\x16\x09\xa3\x70\xf8\x75\x84\x2b\xda\x71\xc7\xeb\x75\x54\x46\x71\x1d\x31\x20\xa3\xdb\x93\x0f\x1b\xf2\x44\x9d\xa0\x02\x7f\xed\x84\xf0\x9f\x86\x82\xe4\x57\x53\xed\xcd\x5e\x6a\x5f\x04\xea\x60\xa6\xa7\x32\x48\x67\x40\xfd\xbf\xbe\x29\xf5\x9f\x64\x12\x2b\x4c\xa8\x4d\x23\x47\x33\x6b\x28\xb0\x05\x5c\x82\xdd\xa9\x85\x53\x55\x34\x78\x8a\xbf\xa9\x03\x8d\xf3\xd7\xf2\x49\x01\x9c\x04\x30\xbb\xe6\x2e\xcc\x15\x41\xc1\x94\x9f\x22\x86\xb1\xdc\xa2\xec\x56\xa2\xfd\xc4\x72\x33\xf6\x6f\xa9\xaf\x48\x82\x6f\xa4\x1a\x05\xc0\x63\x1f\x3a\xc3\x29\x65\x18\xb4\x09\xbd\xb4\x2f\xfd\xb3\x45\xf5\xe1\xf6\xe9\xf6\x13\x41\xf2\xdc\x6b\x51\x72\x39\x50\xde\xd0\x66\x69\x3c\xe7\xa3\xfc\xbb\x2b\x89\xf6\xc2\x48\x3d\xf3\x5d\x2b\x6c\x8e\xe7\xd4\x7f\x23\xbb\x09\x00\x4c\x58\xb1\x48\xaa\x88\x6c\xa8\xb0\xcc\x08\x71\xef\xd8\x58\x35\x57\x88\xae\x50\x62\x4d\x51\x62\x3d\xc3\x93\xe5\xe5\xe6\x42\x4f\x84\x4a\xf9\x23\xcd\x24\xc0\x4a\x73\xfb\x03\x6b\xbd\x65\x5d\xfd\xd4\x3b\xf5\x84\x51\x5f\x6c\x4c\x4f\x7b\xae\xcd\xc2\x93\x43\x6e\x87\x61\xdd\x3a\x63\x95\xa0\x23\x0f\x63\xbb\x2b\x72\xe3\x46\xfe\x42\x7f\x6e\x04\xf8\x90\x09\xfa\xe1\xa0\x71\x79\xe5\xde\xba\x2c\x16\x53\x7e\x3c\x80\xcf\x11\x29\x34\x8d\x0b\x53\xaf\xeb\x88\x41\xff\x99\x5e\x51\x22\xb5\x64\x1a\xdb\xa1\xc2\xc0\x78\x8f\xaa\x69\x0d\x7a\xe8\xf2\x54\x60\x1f\xbd\xea\x90\xbf\x79\xcb\x17\x9f\xd1\xd9\x00\x80\xec\x1d\xf4\xa3\x0b\xf8\xec\xbb\x95\x48\xe9\xd2\x46\x77\xcc\x0d\x0a\xf3\x58\xe2\xc6\xaf\xf9\x63\x5a\x11\xc5\x62\x44\x7e\x76\xdf\xbf\x90\x8c\xe3\xcf\xe3\xa3\xc8\xa0\x8f\x6f\xcc\x4e\xe3\x8f\x17\xd2\xae\x41\xc1\x40\xf6\x94\xac\x5e\xf1\x69\x39\xf3\x74\xdf\xd0\x8c\xcb\xde\x00\x45\xe8\xc6\x6d\x47\x2a\x30\x46\xa6\xf0\xc6\xb0\xdd\xe4\x08\x2a\xff\x1e\x5e\xb4\x4b\x93\xb9\x03\x10\x61\x88\xe7\x14\x57\xd9\x99\xed\x96\x2e\x22\xc4\x9c\xc9\xdb\xee\x63\xfa\xac\x82\x5b\xbd\x9b\xc6\x6c\x90\x33\x90\x85\xc2\x5d\x6b\x6c\x0e\x82\xa6\xe9\x8a\x19\x08\x08\x99\x3a\x60\xf7\x44\x3b\x49\x20\xa6\x21\x0c\x16\x6f\xc6\xae\x2b\x6a\x74\x37\x36\x2d\x6b\x30\xeb\x3c\x88\x2e\x29\xac\xa3\xf7\xa1\xce\xea\x94\x54\x61\xa5\xe1\x4f\x88\xab\x44\xa4\xec\x2e\x07\x25\x5b\x9d\xf2\x2a\x9b\xc5\x55\xa7\xf9\x9a\xac\x6d\x91\xd0\x40\x57\xe5\x72\xef\xd1\x1b\x09\x40\xf1\x27\x2a\x78\x79\xba\xb7\x33\xac\x02\xbc\x3e\x0c\xef\x1f\x40\x26\xdf\x9e\xad\xca\xc4\x6e\x1d\xfc\x57\x5e\x7b\xc8\x35\x8f\x69\x22\xf7\xef\x09\xb6\xfb\x4a\x88\xc9\x63\xf8\x2e\x64\x65\x83\x30\x22\xa6\x0d\xf7\x2f\x8c\xe6\x1d\x06\xb1\xa2\x91\x45\x88\xd5\x50\x72\x05\x07\xe8\xc5\x9b\x6e\x0c\x6b\x1f\xbb\xba\x07\x71\x73\x14\xb1\x37\x36\x44\x62\x94\x73\xe9\xf3\xf8\x24\xc2\xac\xb9\xf5\x77\x4f\x18\x15\xea\xa6\xfe\xef\x3f\x7f\x38\x27\x91\xe5\x90\x3f\xfe\x20\x57\xce\xef\x5d\x18\x2b\x33\x17\xf3\x45\xcc\x28\xab\xb8\xce\xcb\xdc\x6c\x71\x04\x25\x0f\xf7\xa1\xd2\x44\xcc\x64\xfb\x8d\xe7\xcc\x3b\x74\x88\x81\xc4\xe0\x48\xb5\x2f\x14\x54\x31\xe4\x75\xe7\x24\xd4\xfa\x57\x6d\x00\xb6\x47\xf2\x6b\xe8\x95\x30\xb0\xd1\x18\x00\x7f\xbb\x7c\xd9\xe5\xd9\x5d\xe2\xae\x05\x3b\xb2\xb8\xe2\xac\xa7\xda\x22\x09\x47\x7e\xa3\x56\xf5\x08\xfc\x9d\x02\xaf\xc5\x30\x21\x0a\x19\xd6\x34\xdb\x6f\x4d\x56\x9d\x13\x8e\x84\xf2\x6c\x41\x8f\x3d\x22\x44\xbb\xab\xb0\x76\x52\x7b\xb4\xdc\x61\x64\xd9\x2d\xce\x8c\x13\x57\x21\x2b\x11\x32\x7d\xa4\xbb\x00\x51\xfb\xe1\x54\xb9\xfb\x4f\x69\xda\x33\x0f\x8b\xa4\x77\x52\x49\x2f\xdf\xbb\x91\x46\x29\xfa\xe1\x35\xf2\x65\x65\xf8\x3e\x6a\xd7\x98\x29\x60\x05\x34\xb2\xec\x70\xd2\x72\xf5\xed\x6c\xc3\x1f\x5b\x36\x0c\xa9\xce\x6b\xa4\x3b\xee\x5b\xe4\x6a\x40\x41\x8a\x45\x3b\x54\x80\xdb\xdb\x23\x8d\x6a\x2a\xc2\x6f\xaf\x36\xde\x53\x2a\x93\x76\x2a\xab\x5d\xa2\xa1\x97\xd3\x01\x19\x46\xc0\x80\x7d\xe6\xa4\x4a\xc6\x57\x25\x98\x20\xe7\xde\xd4\xa7\x7b\xe2\x9d\x7b\xda\xc9\xed\xe1\x85\xbe\xf6\x5f\xb4\x48\xda\x79\x4e\xc7\xa7\x45\xb2\xc3\xa9\x49\xbf\xcf\x4e\x36\xfd\x3a\x68\x78\xe5\x4c\x08\xc3\xcf\xf3\xd2\xb1\x7c\x04\x60\x73\x4c\x6c\xe4\x31\xb3\x23\x17\xa9\x6c\xf2\x1a\x62\x60\x9f\x1f\x90\x7b\x18\x49\x38\x35\xc2\x6f\xa7\xbd\x31\xdb\x27\x39\xe5\xc2\xa8\xb7\xca\x09\x01\x90\x87\xd7\xc5\x56\x35\x70\x5c\x5c\x23\xcd\x76\x63\x25\x42\x81\xc9\xc3\x85\x79\x8d\xbe\x44\xd6\x8f\xb7\x59\xb4\x55\xb5\x3d\xb7\x4b\xac\x57\x2e\xc1\x5d\xb6\x9a\xd8\x87\xae\xde\xe1\xf2\x73\x8b\x78\x60\x60\xef\xc2\x58\xc6\x83\x00\x6d\x1f\xfc\x64\x1a\xb5\x42\xe7\xa7\xe6\x0a\x83\x12\x6f\x9f\xee\x75\x9f\xa3\xa6\x79\x65\x52\x01\x96\xfa\x94\x35\x11\xe9\x7e\x4b\x89\x7e\xd0\x36\xbf\xbd\x79\x3b\x2e\xf9\x39\xff\xe3\x03\x2e\x89\x6c\xba\x49\xcf\x5b\x22\xc5\x15\x83\xcf\x48\xa7\xbb\x0f\x4f\xb5\xf0\xd2\x87\xcd\x56\xcb\xf4\x0d\x5a\xc4\x8c\x9e\x12\x8b\x3f\xa6\xcc\x72\xb7\x75\x84\x76\x3a\x8b\x4b\xcd\x26\xbd\x0a\xb6\xf1\xea\xd7\x4f\x9c\xc1\x4c\x35\x3f\xb9\xcf\x82\x1e\xf5\x70\x38\x7a\x56\x7a\x68\x0b\x33\x1a\xeb\x02\x98\x77\x21\x78\x64\x87\xb8\x9c\x94\xee\x45\x47\xa3\xb5\x7a\x97\x8c\xbe\x8e\xac\x19\x54\x20\xa2\x4f\x7b\xef\xc9\xc6\xf7\x8b\x65\xe3\x34\xba\x1e\x6d\x90\xf4\xb4\x77\xda\xb7\xbf\xe8\x2d\x7d\x02\xad\x0d\x30\x8b\xf1\x2c\x19\xf1\x70\x6f\x8c\x58\x1b\xea\x96\xc4\x14\x2e\xda\xd0\xca\xbf\xce\xb1\x03\xe0\x4b\xdd\xcf\xbd\xf0\xb6\xde\xaa\x5c\x56\x73\x7d\x8d\x44\x86\xfa\x0f\x93\xa4\x87\x14\xd9\x1f\x1d\xe1\xc0\xbb\xe0\x58\x72\x0f\x4f\xaa\xd8\xc1\x8a\xe6\xd1\xcb\x8f\x0e\xac\x3c\x37\xbc\x5e\x02\x7a\x5a\xe7\x15\x2a\xd9\x8e\x25\x7f\x97\x74\x0b\x76\x1c\xa2\x76\xbf\x3d\x31\xdd\x76\x20\x4e\x9a\x64\x7d\x93\x06\x25\x40\x73\x64\xde\xd4\x71\x17\xf2\x57\x05\x5d\x54\xef\xa3\x61\x41\x37\x3e\x64\x06\x36\x49\xd5\xe2\x47\x50\xd6\xeb\xa8\x21\x96\xd9\x7c\x93\x75\xac\xe6\xcf\x3b\x74\x10\xf9\x03\x16\x71\x6f\x53\xb2\xfb\xc5\x31\x72\x07\xbd\x96\x91\x2c\xa4\x1a\x43\x0e\x6e\x28\x20\x60\xf9\x75\x76\xc2\x0e\x90\xbd\x56\xf0\xdc\x6d\x47\xcc\xf5\x20\xac\x0c\xcd\xb2\x6f\x40\xc7\xef\xfd\x3e\x11\x20\xc9\xef\x69\x1c\x6f\xa4\x52\x65\xec\x84\xbe\x9e\xc3\xed\x1a\x3e\xac\xaf\x98\x67\xdf\xff\x3a\xa1\x1e\xb1\xa8\x6d\xcb\x52\x5f\xe8\xfe\xa2\x04\xa4\xce\xd4\x6a\x3b\x80\x86\x5e\x75\x45\x4a\x6c\x3d\x75\x2e\x21\x0d\xe0\x4a\x02\x41\x8e\x48\xaf\x3c\xf2\x28\x07\x4a\x8d\x60\xa0\x1d\xce\x9b\x26\x99\xb4\x74\x11\x99\x0b\xb9\x4e\xba\xbf\x3d\xf1\x82\x0c\x2f\x28\x9c\x15\xdc\x2d\x39\x4e\x7d\xff\x84\xf4\x5b\xea\xb9\x65\x05\xc3\x7c\x6d\x15\xea\xda\x03\x78\xf1\xc5\xf4\x9b\x96\x41\x52\xb6\x68\x1f\x0a\xc7\x35\xe9\x92\x45\xfb\x45\x13\xa0\xf6\xea\xcb\x71\x36\x3c\x19\x7c\xa3\xa9\x5b\x40\x05\x60\x76\x3c\xe8\x1c\xf8\xe6\xc9\x37\x97\x28\x6a\x67\x64\xc9\x3c\x98\xbc\x04\xfb\x16\xfc\x9d\x1e\xf9\xae\xba\x76\x02\xc9\x89\x44\x2e\x77\x9a\x4f\x6a\xa8\xa7\x7a\xfe\x58\x96\xb5\xa9\xec\x6d\x19\xfc\xcc\xda\xc9\xf0\x6a\x2a\x0b\xcb\xbb\xac\xfe\x6a\x64\x12\xde\x58\x47\xb6\xc0\x90\xc9\x09\xd0\x1b\xd3\xd0\xc9\xed\x05\x9b\x39\x6f\x2d\x1f\x5a\xad\xea\x9a\x7e\x8c\x4e\x34\x4d\xa2\xac\xf8\xae\x96\x8b\x80\x9f\xe1\x43\x8a\xa6\xed\x1b\xba\x25\x90\xf8\x16\x7a\x1f\x58\xfb\xb0\x80\x82\x99\x9e\x3f\xf9\xc1\x36\x1e\xcd\xbb\x72\xa0\x39\xcd\x9a\x99\xd0\xa4\xa7\x30\x93\x02\x3d\x45\x11\xfe\x4b\x1e\x81\x4d\xff\x3f\x36\x2f\x42\x9e\x53\x23\x4a\x87\xce\xdd\x89\xd0\x2d\x02\xb8\xab\x11\x72\x7e\xb4\x68\x8e\xa0\x63\x7a\xb7\x24\x08\x9b\xd9\x81\x3e\xe4\xb6\x3f\xc5\x6d\x51\xbe\x38\x2e\x0e\xca\xf0\xbe\x89\xdb\xe4\x27\xc9\x78\x9a\xb2\x7e\x0e\xc0\x1b\xd5\xe8\x9d\xc3\x23\xcc\xca\xc5\x2b\x52\xd2\x87\x7c\xf3\x7d\xbe\xee\x27\xa7\xd7\xee\xfb\xc6\x05\xf9\x59\x57\x84\xa2\xb9\xdc\x87\xff\x56\xbd\x2a\x01\x34\x63\xbb\xac\xdf\xde\x0d\xe2\x39\xaf\x1c\x42\x35\x0c\xaa\xc2\xa0\xc9\x7e\xbb\xd8\x66\x30\x00\xe2\x2e\x82\x2c\x3d\x3e\x0e\xf9\x9f\x15\x77\x0b\x09\x5b\x3b\xfd\xcb\x56\x79\xc1\x7b\xea\x17\xbf\xa6\x44\xa2\xdd\xc5\x61\x56\x4d\x4c\xfd\x0e\x43\x78\xdb\x3c\x68\x94\x60\xdb\x04\x77\x6b\xaf\x6b\x36\xde\xff\x67\xcc\xab\x16\xe5\xf1\x77\x65\x10\x0f\x14\x09\x69\xdc\x06\x94\x4e\x5b\xfe\x0b\x0a\xe7\x7b\x69\x73\x2a\x5b\xa7\xb0\x72\x72\x5e\xe7\x0b\xdb\x35\x47\xe8\xd9\xdf\xc6\x0a\x87\xb9\x83\xe1\x37\x43\x6b\x3e\xb7\xdb\x07\x0a\x0f\x9a\x78\x28\x72\x91\xdb\x41\x4b\xc8\x05\x4e\xe3\xfb\x43\x58\x75\x94\x05\xc2\xee\x07\xd1\xe4\xe6\xe5\x01\x93\xa7\x1c\x54\xb6\x5f\x18\x89\xcc\x98\x81\x75\xf2\xbb\xf1\xbd\xe2\x82\x36\x52\x4d\x0a\x69\xcb\x9a\xc2\x43\x3c\x61\xef\xcf\x5a\x22\xed\xc0\xd1\x95\xf0\x80\xc5\x60\xbe\x70\x2c\x7f\x37\x16\x5c\x69\x9b\x86\x03\xcc\x82\x9b\x16\xcf\xd0\xe0\xd6\x53\xf7\xaa\xf4\xc7\x74\xde\x02\x74\x5c\xec\xf9\x20\x58\xa3\xfc\x83\x53\x46\x07\x1b\xef\x65\xeb\xd5\x1b\x8c\x85\x24\x6d\xb1\x26\x66\x78\xfb\x71\xb7\x72\x8e\x79\x12\xdf\x76\x8a\x42\xe7\xc6\x50\x99\xdb\x53\xe4\xa9\x84\x3e\xaa\xaa\x3b\xc1\xa2\x71\xc8\xcb\x15\x2b\x9f\x84\x4d\x8d\xb3\x3f\x42\x73\xba\xc9\x94\x2a\x4a\xa3\x65\x10\xfb\xa6\x8a\x49\x64\x57\x77\xf2\x23\x96\x87\x1f\xf7\xad\x4b\x68\x2d\x7c\x50\x45\xb9\x13\xd6\xe7\x76\x93\x21\x9d\xda\xc9\x56\x81\x41\x86\xff\x50\xbb\x52\xc4\x93\xcb\x2e\xe6\x91\xba\x26\x8c\x7a\x90\x14\x46\x8d\xd3\xad\x30\x28\xf8\xa7\x86\x8d\xd5\x1b\xe1\x76\xc4\x7e\x16\xb4\x5f\x25\xa2\xe9\x86\x28\x83\x8e\x86\x73\xf4\xf5\x15\xc0\x8f\x55\xec\xaa\x11\xad\xcb\x2b\xa4\xc3\x4e\x32\xf4\xb9\xe4\x37\x04\xee\x1d\x8e\x71\x61\x41\x5b\x0a\x86\x8f\xb3\x74\x34\xdb\xeb\xbc\xf2\xa3\x6d\x16\x9a\x37\x4d\xe1\xa1\xf8\x5d\x42\x45\xef\x0f\xe1\x63\x95\x0e\x71\x53\x15\x1d\xe5\xbb\xdb\x72\x29\xa7\x0d\x81\x54\x43\x7d\xa9\xb7\x53\x35\x54\x64\xb8\xfb\xc5\xc9\x40\xa2\x0f\x78\xe7\x57\x43\xaf\xe9\x9c\xc5\x31\x3d\xde\x04\x69\xa4\x70\x68\xd3\xe7\xe2\x6d\x1b\x64\x7c\xc1\x3f\x25\xe0\x8b\xe2\xb1\x4b\x89\x37\x90\x08\x87\x52\xe8\xfa\xd1\x69\xf0\x13\x79\x4b\x5c\x22\x44\x83\x66\x1b\x25\xeb\x09\x25\x2e\x91\x20\x3b\x92\x03\xc0\x72\x61\xbe\xe6\x59\xb6\x9d\x7a\x55\x24\xc8\xc0\x8b\x9f\x23\x42\xf8\x30\x02\x26\xf4\x5c\x56\x9a\x0a\xb8\x4b\x96\xec\x86\xa6\xd4\xc0\x25\xbb\xfb\xdd\xdc\xc4\x1d\xb3\xb8\xbb\x25\x58\xe0\x88\x33\xa4\x46\x0d\x42\xdd\x26\x90\xac\xdf\xe0\xe8\xaa\x0a\x9a\xe0\xde\xc7\x46\xee\x9f\xac\x0b\x57\x18\x78\xd1\x13\x4a\x20\x1f\x93\x11\xd9\xdd\xd3\x27\x11\x88\xf2\xdb\xd3\x0a\x42\x96\x1d\xb6\x70\xdb\x85\x87\x3b\x02\xd4\x30\xbd\x81\xcb\x96\xb6\xf0\x2d\xbf\x3b\xee\x00\xb0\xdd\x1e\x69\xb4\xb8\x1e\x49\xc1\xb2\x97\x06\x50\x2b\x72\x4a\x64\x5f\x84\xf9\x74\xf3\xf2\x0c\xc3\x00\x28\x8f\x88\x16\xdf\xde\xbb\xd8\xd6\x73\x25\x46\x7a\xf9\x9f\x84\x49\x96\x88\xbb\xd6\x4d\x2b\x97\x1f\xa4\x34\xa5\x80\x21\x63\x94\xfe\xb3\x1d\xee\xde\x07\xd9\x7c\x78\x5d\xca\xd7\xde\xae\x4c\xd1\x02\x70\x3a\xf2\xf3\xd4\x10\x39\xb5\xe7\xc1\x96\x49\x34\x9d\x4f\x98\xbf\xd9\x2d\x7a\x36\x23\x65\x21\xa8\x58\xb5\x08\x0e\x2f\x76\x94\xfa\x01\xfa\x8c\xf2\xe7\x8e\xa6\xff\x77\x30\x19\x63\xd1\x07\x9d\x8c\xf4\xef\x69\x6b\xb1\x5d\x23\x8a\x68\x59\x48\xfa\x21\xa2\xe0\xe3\x3a\x52\xd3\xe5\x46\x5f\x9a\x31\x8d\x9c\xd6\x3c\x6f\x6d\x3b\xe4\x29\x41\x4a\xab\x7d\x19\x28\x14\xad\x97\x3f\xf4\x5a\x1c\x3e\x49\x0d\x56\xb3\x15\xb0\x43\x5e\x06\x48\x8b\x32\xb3\x7d\x43\x0b\x05\xe5\xa9\x7a\x27\xe8\x51\x7d\xc4\x8c\xf7\x61\x90\x87\xa9\xbd\xea\x9f\xbd\x4d\xde\xfa\x14\x8c\xc9\xee\x73\x28\x15\x33\xba\xa2\x23\x47\x80\x8c\x8f\xea\x1e\x31\x48\x8f\x9a\x48\xd3\x8f\x9f\x83\x8c\xad\x08\x76\x3f\x34\x45\xd7\x3c\x9d\x24\xdb\xae\x6a\x6c\x16\xf6\xcf\x56\xe5\x69\x25\xa7\xa0\xae\x41\x2c\x70\x27\x8f\x9c\xd7\x2e\x97\xd0\x6f\x4a\x35\x43\xd3\xea\x44\x73\xcb\x7e\xd2\x8b\x98\xf4\xf7\xd8\x6e\xa5\x5b\xd9\x7d\x3e\x19\x78\xb6\xc4\xd4\x22\x2d\x14\x09\xa0\x23\x25\xe4\xb4\x28\x4f\xfe\x13\xe5\xdf\x27\xff\xbb\x7d\x2d\x57\xb3\x6b\x2d\x8a\x63\x9f\x8a\x54\xbe\xe0\x4c\xad\x61\x74\x68\x55\x9b\x5d\x80\xa2\x1f\x82\xe0\xa4\x0e\xeb\x4a\xdb\xdc\x52\x4d\x56\xa1\x45\xf0\x4d\x17\x95\xe8\x48\x72\x36\xa0\xb9\xbb\x5d\x75\x94\xa0\x81\x42\xc8\xc6\xa9\x8c\xe2\x1e\x4e\x4a\xbd\x38\x73\x52\x6c\xa0\x15\xc1\xbe\x25\x9c\x50\x8e\x34\xfd\x27\xc9\x66\xf6\x46\x0e\xe4\x03\xad\x85\x93\x7a\x0d\xe5\x29\xec\x13\x17\x51\x47\xe7\x4e\x51\x8a\x15\x05\x2c\x9e\xea\xd4\x02\xd8\x20\x05\xf7\xc3\x05\x24\x37\x15\xd3\xb6\x54\xff\x87\xf7\x99\x07\x78\xb5\x6b\x9e\x4a\x7e\xdb\x7d\x9a\x8d\x53\x0c\x80\xb6\xa2\x6d\xe7\x96\x86\xdc\x95\xf2\x8e\x79\xdf\x11\xd8\x09\x7b\xd8\x32\xf1\x86\x45\x80\x65\x58\xc5\x7a\x2c\x54\x56\xa9\x94\xb7\x01\xc4\x63\x5f\xb6\x3a\xac\xe7\x97\x73\x47\x5a\x5a\xa6\x8a\xec\xd0\x15\xe8\x53\xa2\xc2\x02\x58\xec\xb5\xd1\x9d\x87\x6b\x56\xed\xb4\x9d\x72\xfb\x5a\x60\x7a\x95\x14\xef\x62\xa5\x9a\xc6\xef\xfb\x14\x0a\x83\x67\x8d\x32\xc9\xfd\x35\x2b\xfe\xff\x07\xc0\xc1\xff\x00\x64\xdf\x38\x9f\x3e\xa4\x30\x56\xd9\x8e\xf2\x30\x99\x49\x0a\x98\xee\x90\xe7\xd4\x41\x7c\x14\x28\x77\x02\x52\x4f\x7d\x78\xbf\x9f\x4c\x6f\xb7\x3d\x2b\x2d\xbf\xf8\xf5\x08\xda\xdc\x68\x9b\x8e\x83\x4c\x5f\x16\x74\xa1\x43\x49\x04\xe0\x26\x70\xb1\xae\xb2\x8f\x87\xb3\xed\xd6\xe8\x42\xf4\xb0\xbd\x7b\x89\xe8\x92\x17\x76\xd6\x8b\xb6\xa4\xfe\x71\xe4\xc8\xdd\xda\x11\x27\x04\xdd\x87\x6e\xf0\xe1\xee\x92\xb0\x1f\x0a\xea\xc4\x12\xda\xc1\xb9\x4b\x0a\xea\x46\x30\x6c\xb0\x14\x69\x9b\x80\xec\x4e\xd7\xe5\x0e\x80\xa8\xbd\xdf\xec\x5b\x82\x5a\x5f\xc3\x42\x7e\x09\xcb\x5d\xd8\xb4\xf9\xd5\xdc\xff\x13\xf1\xe3\xaa\x04\x4e\xbc\x60\x68\xa4\x6f\x16\x97\xa3\x47\x09\xc3\xfe\xee\x73\x88\x1d\x74\x8f\x2d\x54\xef\x57\x38\x74\x00\x47\x1d\x04\xe6\x07\xe7\xbf\xde\xf3\x02\x4d\xbb\x5a\xd5\x0a\x6a\xd3\x63\x35\xd7\x2d\x55\x63\x82\x8e\xb7\x46\x47\xd4\xd3\x5d\x2a\xe8\x12\x0e\x98\x8a\x6a\x62\x5f\x52\xc4\x6e\x17\xa1\x5c\x2c\xba\x1f\x49\xc5\x19\x08\x94\xca\x2e\x83\xb6\x2b\xde\xf9\xe0\x3e\x4d\x69\x0d\x75\x79\xe0\xf4\xbe\x29\x71\x47\x92\x79\x68\x26\x1e\x05\x45\x9e\xed\x1f\x7d\x98\x45\xf8\x5f\x78\x8b\xcd\x77\xd1\x52\xf2\xd2\x78\xd8\xf6\x9d\x46\x7e\x3c\x88\x24\x00\xa3\xfe\x05\x83\x5e\x18\x29\x6f\x5c\xf0\xeb\x9a\x87\xfc\x45\x73\xa4\xd3\x7e\x64\xb8\x47\xc4\x76\x01\x4b\x7c\x8f\xc3\x99\x4b\xc7\x3d\x8b\xee\xa3\x53\x46\x6c\x8d\xef\x5d\x65\xa0\xb3\x44\x5f\x03\x4b\xe2\xeb\x70\x7a\x76\xba\x55\x0d\x95\x9b\x97\xa0\xa1\x8d\x37\x08\x7a\x37\x79\x44\x58\x07\x2c\x39\x1f\x7a\xe1\x74\xce\x2d\xdf\x34\xee\x71\xf4\x7e\x3e\x69\xcf\x4d\xca\x20\x2d\xd9\x7a\x6b\x97\x5f\x9d\xcf\xc1\x80\xdc\xaa\xac\x21\xc8\x17\x38\x85\xfb\xd7\xc7\x11\x49\x11\x97\xe4\xb7\xab\xcc\x91\x7a\x0f\x77\x88\x4d\x66\xca\xbf\x5d\x46\xdd\xe4\x0a\x86\x09\x9b\xbc\x03\x88\x2d\xbf\x40\xc6\x8e\x6e\xfc\x15\x3e\x4a\xb3\x70\xa2\x57\x60\xd4\xc0\x73\x0e\x51\xaa\x35\x66\x83\xc6\x17\x8c\x0a\x6f\x02\x17\x40\xec\x41\x13\x8a\xc8\xa9\x85\x39\x1c\xf3\xbd\x7d\xb9\x35\x49\x48\x9f\x77\xdf\xdf\xe6\x30\x22\x9b\xf1\x37\x34\xeb\x74\x8d\x63\xf0\x4e\x9f\x7c\xcc\xc1\x9b\xfe\xb6\x56\x6c\x8b\xf6\xd1\x36\x85\xc0\x7b\xa6\x54\x24\x91\x48\xbc\xdc\x46\x0e\x97\x47\xbc\x5f\x6c\xcb\x40\xc9\x53\x14\x0f\xaa\xa9\xa4\x04\x06\xa6\x63\x3e\xbc\x6f\x63\x12\x1e\x64\xf9\xfa\x42\x3b\x56\x42\x97\x5f\x65\xb0\x0a\x71\x88\x33\x8f\x60\x6d\x58\x48\xa2\x0e\x91\xd1\x08\x49\x7e\x74\xdd\xcf\xfb\xe0\x80\x07\xf8\x37\xe1\x6f\x64\xed\xf9\xa1\xfa\xac\xe2\x2f\x23\xa9\x19\xb3\x28\x2c\x61\x85\xe1\x7b\x5c\x3f\xab\xbd\xa1\x91\xbb\x26\xf9\x29\xdc\xbc\x3b\x25\xb2\x08\x82\x72\x15\x41\x8a\x91\xde\x92\x76\x92\x87\x6c\x63\x2b\xbd\x84\xc4\x22\xef\x0f\xa9\x09\xf7\x0b\x6b\xa1\xfe\x70\x3b\xc7\x21\x46\x79\xd2\x3c\x9e\xaa\xde\xaa\x5d\xfb\x01\x8e\x5f\xdc\x10\xe5\x6c\xd5\xcb\x48\xd6\x23\x80\x72\x90\x97\x60\x16\x2e\x06\x51\x77\xaf\x3b\x5d\x28\x6f\x89\x93\xba\x20\xfd\x77\x1a\xd6\x3b\x40\xdf\x15\x1f\xea\xd3\x9a\xf8\x68\x3b\x14\x83\xfe\xbb\xac\x14\xb1\x2e\x12\x22\xd5\xec\x22\xc3\x18\xb9\x72\xec\xee\x2e\x5d\x95\xc0\xbf\xbf\xfe\x88\x6a\x82\x50\x78\xd6\x89\x4c\xe3\xbe\x32\xc8\xd6\xfe\xa3\x64\x22\xdb\x68\xce\x70\xef\xb3\x96\xef\x5b\xc8\x48\x8a\x9d\xed\xba\xf5\x52\x5c\x53\xbc\x9b\x17\x50\x09\x81\xda\x45\xf0\xb3\x1c\x6a\xc1\x01\x60\xc3\x77\x41\xc0\xb5\xc7\xd2\xf7\xb7\x33\xa8\xe1\x27\xdc\xc9\xa4\xa3\xec\x4b\x28\xc8\x6d\x43\x12\x1c\x95\x97\xdd\x77\xa8\x9c\x22\x5e\xfd\x8e\x21\xf3\x89\x69\x6b\xf3\x92\xba\x34\x1b\xc5\xb2\x99\x4b\x83\x84\x7d\x86\x8c\x4b\x1e\xe5\xde\xf7\x1c\x48\x6c\xf5\x7d\xcb\x8a\x10\x2f\xbf\xf2\xa6\xd6\xda\xda\x3b\x16\x66\x59\x37\x11\x34\x20\x3a\x7b\xec\xec\xf4\x16\xef\xc8\x2b\x29\x8f\x0c\x70\x47\x54\xc5\x45\x1e\x42\x8d\x19\x2d\xe4\xea\xd1\xd0\xa2\xf0\xc4\xc3\x14\xa9\xed\x5e\x7c\x79\xb7\x1d\x6e\xb4\x57\x78\xbf\xc0\x83\x8a\xd2\x65\x64\x44\x64\xc9\x98\x63\x0f\x58\xbd\xb8\x85\xd4\xe4\xe1\xc5\xd6\x57\x54\x23\xfb\xd4\x99\xb4\xfa\x99\xd6\x92\xe9\xb3\xf6\x40\x4d\x44\x4d\x00\x65\xb2\x10\x3b\xdf\x14\xe3\x94\xd0\xc5\x90\xa8\xb7\x46\x4d\xa7\x57\x1b\x3e\xb5\x9c\xde\x21\xc1\xf1\x22\x62\x75\xe8\x4d\x42\x2f\x29\xf1\xa7\x8e\x4d\x48\xa4\x6f\x0a\xc4\xac\x15\xd9\x3a\xca\x3f\x6f\x62\x24\x40\x3d\xd5\x61\xce\x6f\x9f\xb2\xce\x6f\x72\x8e\x8e\x5b\x76\xd0\xf7\x4e\x55\xb5\xdf\x98\xb3\x5d\x32\x92\x8d\xf5\x93\x7c\x3d\xc5\x48\x0a\xf3\xa0\x39\x9c\xf5\xe7\x9f\xa0\x68\xce\x1a\x48\xcf\xf4\x4b\x9c\x1b\x13\xe5\x0f\xf2\x05\x7c\xea\xaf\x2a\xe1\xfe\x2f\xb7\xc9\xcd\x9f\xb8\x1a\xd3\x1d\x76\xca\xb8\x0f\x83\xeb\xb4\xd5\x3f\x51\x35\x9f\x91\x1d\x7a\xc5\x88\x65\x92\x3b\xfc\xf4\x6b\x68\xdc\x4d\x28\xcc\x1e\xb1\x83\xbe\x3f\xbe\x6b\xc2\x6b\x97\x01\x93\x72\x5f\x78\xda\xe6\x23\x5a\xc6\x33\x62\x6c\x9f\x34\xd3\x7e\x46\x50\x2b\xf8\x12\xf4\x6f\x4f\x32\x00\x87\xb7\xa3\x47\xe9\x6f\x2c\xff\xec\x59\x7c\x39\x0d\x8c\xb0\xdf\x9c\x8e\x58\xb0\x73\xbd\x23\x13\x79\x38\x9a\x75\xda\x60\x57\x78\xa0\x63\x47\xa0\xe0\x83\x0b\x09\x38\x24\xe4\x38\xd1\x36\x09\x07\xb5\xb5\x46\xe7\x94\x46\x29\xc5\x39\x3b\xf2\x40\x0d\x51\x1b\x7c\x77\x97\x2f\x5e\x95\x3f\x2f\xd2\xf3\x9a\xf8\xa5\xb6\x99\x42\xaf\xea\x1d\x98\xfc\xbf\x58\x59\xe8\xa3\xd0\x09\x4b\x18\xb7\x5d\x8a\xc5\x6c\x0b\x19\xcc\x28\x76\x41\x27\x84\xcb\x5c\xd4\xf6\xde\x6d\x1b\xae\xde\x57\x59\xa3\x3b\x48\x40\x97\xce\x7f\x69\x12\x98\x09\x70\xe0\xd8\x2c\xc3\x5c\xe9\xe2\x25\x1e\xac\xcd\x13\x6b\x94\x14\x9d\x6c\x68\x5f\x30\x1d\xc4\xe3\x10\x56\xda\xa9\xd3\xe9\xf4\xa5\x81\xcb\xa7\x49\x5e\x34\x49\xc3\x5b\xdd\xc8\xe3\x25\x9c\xa1\x7c\x33\xd6\xc5\x04\xd8\xad\x59\x8b\x45\xf2\x42\xd3\xc3\xb9\xe0\xce\xa3\x75\xdc\x91\x68\xe3\x98\x72\x03\xf1\x88\x35\xbd\x97\x4f\x1d\xcf\xfc\x7b\x1f\xca\xcc\x51\x84\xb2\x87\xaf\xba\xd3\x6f\x13\x61\x11\xac\x19\x25\x6b\x87\x79\x26\xa9\x16\x77\x17\x2d\x32\xa9\x52\x97\x2a\x33\x47\xed\xc4\x51\x4f\x57\x54\x93\x56\x06\x76\x31\x69\x84\xa8\x2f\x26\x18\xe9\x37\xcc\x48\x91\xad\x85\xc0\x62\xf9\xd6\x03\x93\x08\x4c\x86\xa3\x09\xff\x74\x04\xaa\x9f\x2d\xb4\xda\xea\xd3\x4d\x5b\x93\x9b\x51\x68\x1c\xa1\xa3\x20\x2d\x91\xea\x7e\xc8\x66\xfe\x28\xdf\x2f\x73\x8f\x65\xa9\xd4\x8b\x68\x81\x81\x69\x66\xe9\x9a\x78\x59\x1f\x56\x8e\x9f\x62\x95\x21\x9e\xe2\xe1\x7a\x5b\xee\xea\xbe\x3f\xef\xe9\xb5\x14\x8e\x2a\xec\xea\x7c\xb2\x82\xaa\x76\x7e\x90\x8c\x56\x67\xb2\x3b\x37\x4e\x0b\x36\x4e\x0f\x6e\x2a\x29\xd6\xfa\x95\xbf\x92\x27\x42\x91\x0d\xb1\xf6\xaf\x20\x75\x79\x56\xb2\xe2\xb8\xfb\xa7\xfe\x22\xf7\xa0\x68\xbd\xb1\x12\x20\xa5\xcb\xc6\xbb\x4d\xa7\x40\x9c\xb2\x01\xd4\x95\x88\x91\xd2\x69\x89\x9c\xf2\x07\x41\xbd\x4d\x7c\x89\xcf\x2a\x20\xef\xad\x5a\xd5\x5d\x93\x4f\xed\xc2\x1b\x3d\x67\x34\x04\x99\xed\x49\xe8\xa6\x99\x9d\x27\x30\xb7\xb7\x14\x06\x6e\x5a\x41\xe0\xeb\xbb\x68\x92\xca\x70\x8a\xa2\x7d\xa2\x6f\xe8\x74\xff\xa6\x9d\xdc\x48\x27\xf6\xd0\xcf\xb5\x19\x8f\x4e\xf9\x1d\x7e\xe5\xaa\x5d\xff\x68\x60\xd8\x79\x1c\xf1\x48\x12\xe4\x78\x13\x75\x19\xc1\x92\x63\xe1\x79\x64\x5e\xcb\x63\x8c\xd0\x9d\xa5\x16\x2b\x45\xbc\xd7\xb7\x52\x9c\xfd\x96\xe6\xa4\xa1\x8d\xb7\x41\x4e\x0d\x9f\xf3\x63\x23\x7f\x77\xd4\xeb\xee\x0a\x40\xbd\xba\x4f\xea\x63\x9e\x0e\x3c\x91\x19\x8f\xdb\x39\xc2\x02\xa3\x9c\x85\x1f\x35\xeb\xa8\xac\x79\x57\x76\xeb\x3e\xff\xcc\xa6\x11\x1d\x0c\xc5\x1a\x53\xd5\xa5\xd7\xda\xf5\x07\xec\xf2\xaa\x97\x87\xf2\x7f\xba\xf3\xb3\xf2\xfa\xb4\x15\x04\x3d\x03\xf7\xdf\x1c\x3c\xb6\x41\x6e\x70\x50\xb2\xd2\x91\x11\x3d\x11\x79\xe2\xb0\x88\x67\x81\x0a\xa3\x97\xd5\xf5\xfa\xfa\xf0\x57\xcb\x8a\x9d\x26\x6d\x5d\x1f\xfa\xd0\xbe\x6a\xca\x31\xaa\xc9\xb6\x1e\x3f\xac\xda\xa7\xfa\xbf\x5b\x7d\x50\xef\x3d\x86\xb7\xfe\xa0\x22\xc1\x4a\xce\xd7\xcb\xa4\x5e\xfc\x3e\x64\x04\x0a\x6a\xbe\xb2\xb9\xee\x7e\x8a\x3e\xff\x9e\xb8\x4d\x93\xc6\xd1\x45\x60\x5f\x57\x24\x5a\x56\x3a\xba\x55\x97\xb6\xa7\xe2\x85\xfe\x0a\x22\xf8\x7a\x12\x36\xc9\xab\x87\x76\xd0\x0f\xf4\xab\x93\x0a\xe0\x7b\xf1\xb3\xc0\x1d\x51\x05\x12\x96\xb0\x1b\x70\x8b\xc8\x08\x75\xe2\xc5\x62\xbb\xef\x2e\xeb\x54\xb0\x8c\x02\xa5\xb6\xad\x76\x0d\x1b\xc8\xb4\xd0\x1b\xf8\x75\xb4\xfb\x03\x99\x9d\x65\x09\x95\xb3\xd3\x56\x41\xb6\xa2\x6e\x51\x91\xb3\xef\x88\x77\x3b\x01\xa1\x71\xca\x0b\x6b\x2c\x16\xc2\xf0\x84\xb9\x64\xe3\xe8\x29\xca\x1a\x03\x3f\x93\x46\x83\xb6\x5f\x86\x97\xc8\xc5\xf9\xc1\xb9\x23\x4d\x92\x64\x5a\xec\xe0\xfe\x55\x75\xf8\x67\x1e\xa2\x48\xb1\xa9\x2a\x38\xde\x6e\xb3\xc6\x04\xb1\x5e\x91\x17\x11\x2d\xb3\x62\x30\x92\xb6\xc0\x25\x95\x1c\xaf\x1d\x07\x67\x3e\xf7\xd6\xd0\x24\xb6\x5c\xbc\x43\x8e\xcf\x07\x2c\x9f\x1c\x85\xd4\x38\x78\x5f\x48\xa3\x40\xb8\x94\x78\x6a\xf9\xe8\x0b\x07\x06\x7b\xd2\x17\x51\x73\x0c\xbb\x73\xdf\x47\xd1\x30\x11\x96\x94\xab\x0e\x34\x42\x0e\x46\x46\x51\x7a\x37\xdd\xc8\x5a\xe5\xee\x63\xe5\xb6\xcc\x61\x6d\x2d\xba\xa6\x15\xc9\x8a\x72\x76\x8a\x7c\x68\x32\xd7\x9c\x49\x20\xf1\x7c\xd2\x22\xd4\xa0\x0f\xc1\xc8\x9a\xd8\xbb\x81\xbe\x1b\x64\x3c\x1f\xe4\x6a\xa4\x3b\xfd\x4a\x62\xa1\x99\x49\x09\x10\x56\xe9\x8b\x91\x8f\x1d\xc2\xff\x04\xe0\x23\x56\xae\xb2\xa1\xc1\x36\x4c\x18\x80\xc2\x6b\x3b\xe3\xfd\xd1\xed\x6b\x38\x4a\xf1\x5d\xee\x0f\xac\x6c\x2f\x39\xff\x72\xc4\x1a\xfd\xa9\x00\xb7\x46\x2b\x48\x68\xd0\x04\x68\x38\xfb\x0f\x9f\xe4\x08\x7a\xa5\xed\xb2\xb1\x08\xe6\x70\x91\x9c\x75\x92\x22\x73\x22\x20\x35\x35\x8c\x6d\x72\x6a\x67\x58\x10\xed\x6d\x42\x43\xed\x3b\xf6\x4b\x99\xc9\xe6\x41\xbc\x79\x44\xab\x2c\x9c\x82\x20\xa2\x4d\xf4\x93\x1b\xb5\x9b\xb8\xc3\x50\x2a\xc8\x84\xd5\xea\x5d\x09\xab\x0b\x0d\xf0\x1e\x3e\xfb\x9d\x05\x83\x0e\xa2\x4b\xbb\xb1\x33\x05\x03\x02\x9f\x6d\x77\x25\x78\x8a\x19\x27\x47\xa7\x43\x94\xc3\x43\x58\xb9\xd9\x26\xb7\x38\xa1\xd5\xea\xd0\xab\x0d\xe4\x6c\x8f\xbb\x28\x36\x42\x52\x32\xa7\xae\xf5\xe8\x38\x7e\x29\xa9\xb9\xcf\x1e\xa2\xe6\x1f\x9d\xa8\xb8\x5d\x33\xe9\xe8\x14\xe2\xde\x29\x4a\xc4\x0a\x46\x2f\x08\xba\xa2\x69\x2c\x4c\x07\x35\x0b\xb4\xf2\x51\x6c\xb5\x4b\x69\xd9\xe9\xdc\x54\xea\x7a\x66\xbf\xf3\xd5\xfe\x8b\xd0\x16\x1e\x6b\x4e\xbb\x6a\xfe\xfa\xb0\xfe\x92\x2f\x98\xff\xa3\x5b\xf8\x30\x87\x08\xfb\xde\xa7\x84\x0d\x9f\xe0\xc5\xfa\xd7\x17\xeb\xb3\x02\x25\x82\xad\x17\xa9\x0b\xae\x5c\x94\xd9\x0a\xfd\x61\xde\xad\xdf\x4f\xbf\xcd\x6c\xce\xe1\xdb\xa7\xe6\x09\xec\x36\x1f\xd0\x36\x61\xc6\x55\xc3\x41\x29\x3c\xa8\xac\xbc\xa2\x23\x34\x3a\x26\x55\x88\xb8\x3a\xbc\x32\xfb\xb2\xec\xc0\xee\xa6\xcd\x4d\xad\xc3\x59\xb9\x2f\x95\x55\x97\xb5\xed\x50\xda\x98\xd2\x2d\x03\xdb\x97\x7c\x59\x88\xc6\xdb\x63\x7a\x4a\x44\xb4\x84\x89\xe7\x99\x1c\xac\x3c\x3e\x2d\x01\x1e\xd9\xa4\x1c\xd9\xa6\x83\xda\xc6\x66\xae\x34\xb5\x38\x4b\xae\x7b\x2d\x45\xca\x6d\x27\xb8\x6d\x6b\xfa\x96\x42\xa5\x24\x6c\x9e\x09\x5b\x12\x95\xda\x8f\x3a\x7b\xc2\x05\xec\x5b\x48\x6b\x13\x92\x6b\xbd\x30\x1c\x99\x1f\x4e\xd1\x9d\x44\x73\xf4\x70\xd0\x4d\xcc\xb6\x83\x0e\x4c\x5b\x78\x9c\xd9\xe5\x16\xf3\x9a\xa3\xcc\xf8\x8b\x90\x25\x41\x53\x96\x1b\xfe\x6b\xab\xf2\xf7\xac\xe2\x24\x08\xc9\xf9\xca\x83\x2c\x17\xee\x79\x07\xda\x13\xd4\xd6\xfe\xd1\xc6\xc0\xd1\xfe\xa5\x84\x61\xe4\xd1\x28\xb5\xbe\x77\x97\x83\x66\x55\x1d\x49\xcc\xe9\x0c\x45\x64\x50\x58\x24\xfe\x5b\x93\xdf\xbb\xcb\x4d\xcd\xa1\x73\x52\xfc\x51\xe1\xa4\x91\x55\x53\xfa\xf0\x7a\x12\xc9\x05\x94\x79\xcd\x61\xc6\xc0\x47\x13\xc5\x45\xe7\xac\x20\x28\x28\x59\xf9\x0f\xbb\xae\x97\x59\xdd\x07\xa2\x78\x0f\x85\x37\xdb\xb3\x0e\xd9\x91\xb2\x10\xac\xce\xa4\xbb\xd3\x5f\x96\x34\xc3\x36\x7c\xaf\xc7\x81\x41\xcb\x3f\xc7\x1a\xf3\x9b\xb6\x12\x50\x4c\xb4\xcc\xf2\x77\x6c\x94\xd2\xcd\x4d\x6d\x1c\x0c\xec\x74\x62\xba\xfa\x24\xe0\xbc\x11\xd3\x26\x17\x34\xb8\x03\xd2\xc3\xc7\x3b\x31\xa4\xbe\xbe\xcd\x2c\x78\x2b\xe1\xd3\x46\x1e\x0a\x06\x3e\x2d\x01\xb8\xf9\x48\x21\x0c\x78\x90\xf0\xc9\x5a\x7f\x7f\xbc\x07\x0e\x76\x38\xbc\x16\xcb\x00\x1f\x24\xd5\xae\x3c\x4c\x80\xbb\xca\x95\x82\xb3\x0d\xc0\x07\xcd\xbd\xfb\x14\x7e\x0d\x40\x8f\x56\xf9\xba\x87\x9f\x9f\x27\xf1\x84\xbf\x42\x8b\x54\xc9\xa6\x60\x54\xc4\x37\xe6\x59\x17\xba\x16\x7a\x54\xa6\xf8\xc6\x4c\xcd\x54\x63\xfb\xcc\xe2\x7a\x3b\x93\x26\x72\xad\x7e\xf2\x8a\xfd\x0e\x28\xe1\xdf\x4c\xec\x96\x7e\x36\x38\x01\x21\x50\x84\x45\xe8\x87\x0b\x2e\xbc\x3e\xcd\xab\x26\xe3\x51\xee\x92\x5d\xd2\x1f\x4a\x36\x91\x55\xb1\xd3\x03\x6f\xc9\xcd\x79\x41\xf7\x28\x02\x79\x0d\xf6\x2d\x23\xb1\x87\xd8\x50\x4c\x1b\x6b\x63\x45\xbf\xd9\xc0\xa0\xe0\x60\x9d\x7b\xb9\x6d\x88\xec\x72\xec\x95\x53\xed\x79\xd7\xb0\xd8\x79\x0b\x48\x81\x13\x2b\x2b\xd4\x23\x02\x7d\x5c\xa9\x3e\x29\x09\xa1\xab\x92\xfa\x43\x97\xde\x2c\xeb\x36\x1d\x16\x3f\x9c\xe1\xd0\x79\x71\xc7\x16\xd4\x69\xde\x8f\x97\x4b\xa4\x77\x30\xaf\x5f\x42\x5e\x93\x17\x59\xbe\xc8\xe4\x08\x03\xca\xed\x6f\x4e\x9b\x80\xe5\x49\x49\xf4\xfd\x79\x81\xbe\x5b\x34\x57\xd9\xe6\x06\x15\x50\x9f\x8e\x3f\xe8\x45\x0b\xf5\x17\x0c\xfa\xb7\x81\x42\xd3\xcd\xde\xe8\x2c\x77\x8d\xac\x27\xe8\x34\xc6\xc3\x8e\x1a\xa5\x2a\x77\x51\x77\xda\x36\x26\x80\xcf\x8e\x39\x44\x97\xde\xe1\xab\x75\x13\xe7\xb5\x5d\x6e\x0e\xbf\x7f\xed\x83\xbe\xe3\xff\x93\x22\x4e\x46\xf3\xfe\xcb\x73\xd9\x73\x56\x2d\x5c\x9d\xca\x02\x31\x7b\x04\xdb\x5b\x6f\x1a\xcc\xe6\xb5\x71\xa5\xd1\x54\x2e\xad\x1a\xb7\x9d\x79\xa7\x0f\x65\xf4\xfe\x2d\x02\xf3\x44\xc2\x7f\x64\x5b\x02\x7a\x3a\xa3\xc6\x95\xf9\x2c\x38\x3f\xfd\x14\x82\xd4\xf0\xfc\x62\xca\x11\x4e\x96\xc6\x6d\xdc\x74\x44\x0c\x31\x10\x66\x4e\x74\x0b\xdb\x9d\x78\x0c\xd8\x2d\xeb\x08\x32\xd8\xce\x3c\x2f\xee\xcd\x8b\x97\xbd\x2a\x9c\x63\x5f\x58\x87\xef\x61\xc6\x4f\x0f\x75\x96\xa3\x98\x84\x15\xd9\xcd\x92\x2b\x0f\xcd\x03\x43\x4e\xa1\x9b\xe7\x75\xbb\xa7\xe1\x5e\x58\x1e\xd7\x0f\xcb\xe3\x22\x09\x14\x8a\xdc\x4a\xc0\xe0\x3e\x1c\x1c\x87\xe9\xd7\xa5\x47\xd6\xb4\x88\x32\xfb\x23\xe6\xd0\xa2\xd2\x71\x3e\x04\x47\xfa\x28\xcd\x52\x68\xfb\xf2\x54\xd8\x46\xf2\x8d\x30\x06\x53\x41\x3c\x69\x44\x36\x0c\x52\xb9\x8b\xc7\xb2\xc4\x4d\x12\x84\x23\x24\x1a\xf8\x03\x3d\x0d\xdc\xa9\x8e\x71\x67\xf6\x34\x7c\x7b\x3f\x48\x9b\x98\x17\x3a\xc3\xfe\xdd\xf2\xba\x76\x37\x60\xff\x59\x65\x5d\xa2\xe2\x55\xbe\x59\xa1\x8d\x03\x8c\x00\xa5\x30\xad\xef\xc6\x3e\x73\x62\xe2\x1a\xcb\x41\x59\xd7\xd5\x99\x59\xfa\x8a\x30\xb8\x2d\x61\xfa\xce\xdf\x8d\xfd\x4c\xfb\x34\x0a\xd7\x26\x7a\x43\xda\x1f\xc1\x06\x48\x55\x8b\x98\x46\x11\x5b\x83\x17\x8f\xcd\x0d\xcc\x05\xc3\xef\x71\x54\xdc\x9a\x1d\xb8\x57\x08\x6b\x9f\x32\xff\xb6\x47\x6a\xe0\x3c\xba\x55\x1b\xf7\x73\x09\x83\x75\xc4\xbc\xc3\x49\x84\x5d\x3c\x65\x72\xc3\x11\x19\x4b\x70\x77\x1f\x27\x7f\xed\x6c\x2f\xa5\x57\x75\x96\x3e\x1e\x11\x83\xb9\x65\x0c\x9e\xfb\x11\x75\xb1\xf6\xd7\x26\x29\xe8\x6f\x16\x66\xbd\xf3\xa7\x65\x26\x78\xb7\x68\xb1\x1e\xe8\xbe\xef\xaf\x25\x1c\x24\xd5\xf2\xf5\x53\xb8\x34\x23\x9d\x21\xfe\x4d\x34\x20\x68\xbb\x99\xb5\xb6\x68\xec\xd5\xb9\x92\xfb\x0c\x33\xc1\x74\xb7\xf3\x0d\x80\x2c\x39\x38\x0f\xa9\x3e\x1a\x17\xa9\xf9\xc5\xd7\x2e\x0d\x52\x42\x77\xd5\x09\x6e\x6e\x10\xbf\xa8\x32\xe8\xf2\x8f\x22\x69\x37\x8c\x94\x53\x7e\xfe\xc4\xac\x5e\x7b\x2c\xcc\xc0\xe9\x28\x92\x96\x93\xec\xc7\xce\x07\x0c\xe9\xeb\x93\xc9\xf4\xa6\x9a\xff\xbe\x5c\x17\xc9\x62\xfb\xa2\x7d\xfc\x82\x0e\xc6\x93\x21\x3a\xfa\xeb\x7e\xa3\xb9\xf9\x7a\xcc\x04\x82\x78\x67\xf1\x8f\x50\x1c\x9f\x0f\x8a\x68\x05\xe3\xbc\xa1\x5c\x66\xa5\x8d\x23\xd0\x6b\xef\xa9\x5c\x9a\xb0\xbe\x6d\xff\x69\x4e\x48\xce\x0b\x7b\x7b\x95\xf4\xc6\xda\x21\x0a\x9c\x6a\x51\xa9\x6f\x29\x5a\x76\x3b\x39\x31\x8e\xf5\xdd\x5a\xec\x26\xdf\xb5\xd7\x24\x33\x91\x4d\x6c\x0d\xf0\x77\x07\x62\x0f\x45\x20\x84\x9d\x8d\x3e\xee\xb4\xbf\x6a\x97\x3b\xce\x15\x94\x9e\x4b\x3a\x7d\xf2\xa0\xc8\x82\x9a\x53\xcf\xcd\xdb\x4a\xeb\xc0\x6b\x8f\x3b\x48\xb9\x0e\x5c\xc8\x2f\xe0\xa7\x9e\xbb\x88\x6b\x3a\x04\xd1\xb8\x9b\x91\xc7\x7f\xa2\x04\x88\xec\x25\xdc\xd4\xda\x24\xce\x60\x60\xc6\x1c\x18\x91\xa4\x0f\xf7\x4f\xe4\xbd\x00\x2f\xdd\x25\x29\xa5\x54\x00\xcf\x4e\xc9\x8d\x3d\x48\x11\x57\xe2\xf2\xd9\x23\x06\x0c\x45\x77\x54\xd8\x7e\xe3\xd7\xd3\x8d\xa8\x7c\xef\xfc\x84\xae\xe4\xf8\xc7\xc3\x7c\xab\xdd\xc2\x9d\x18\x55\x5d\x41\x21\x8e\x75\xe6\x91\xb5\x09\x95\xdd\x12\x48\x94\x4c\x7e\x5b\x6f\xb2\x51\xd7\x2a\xdb\x89\x55\x5a\x03\x47\x16\x7c\x97\x0d\x9f\x7b\x58\x0c\xa3\x16\xe1\xd6\x3b\xb7\xf9\x5d\x8f\xad\x4f\x1a\x62\xe8\x2b\x58\x99\xd3\x4c\x89\xcb\xad\xc4\xce\x3c\xde\x89\x94\xa8\xe5\x15\xa3\xbc\x24\x35\xdd\xbd\xaa\x79\x5e\x6a\x94\x6f\xb6\xb3\xb0\x5c\x07\x44\xc4\x7d\x3a\xff\x6c\x1e\x3d\xf3\xe9\xe1\xf5\xb9\x86\x76\xc9\x31\xac\x5f\xbc\xde\x11\x0e\xad\x74\xab\x9b\x89\x68\x58\x0b\x84\x77\x62\xde\x34\x6b\x9e\x65\xf4\xc6\xfe\x6d\x4e\x54\x64\xcd\x9a\xb8\xff\xdd\x27\x8b\xb2\x12\xdc\xa8\xfb\x57\x14\x4e\xf7\x28\xfe\x82\x75\xf7\x84\x2f\x04\x2d\xd9\xfd\x48\xc2\x10\x48\xfa\xe2\x81\xfe\x9c\x67\xed\x96\x6f\x76\xef\x1e\x13\xce\xc5\x12\x01\x34\xb6\x8b\x87\x88\x79\xed\x88\x0d\xdf\x8d\x13\x3a\x88\xc5\xe0\x41\xe2\x27\x97\x3d\x5e\x50\x5f\x2f\x59\x66\xd2\xe3\xcf\xdb\x3c\xcb\xc3\xdc\xf6\x1d\xe6\x0b\x3e\x83\xf5\xf2\xd4\xfc\xf1\x99\x1a\x08\x7c\xb0\x1f\x00\x84\x4f\x3d\xf2\x5b\xac\x05\x7b\xdd\x29\xaf\x39\x7b\x70\x65\xdc\xf0\x69\x3b\xd7\xe6\xa9\xe2\x01\xcf\xa4\x32\x2c\x0e\x4e\xba\x2a\xd5\xdb\xcb\x4c\x15\xed\xf0\x39\x55\x79\x23\x40\xd1\x4f\x89\xe2\xf8\x90\x0f\xec\x8a\xc4\x3a\xf9\x3a\xc2\x0c\x2a\xa4\x27\x43\xde\x79\x70\x60\xa3\x10\xcd\x11\xb2\x9d\xd3\xce\x23\x1e\x15\x50\xf5\x3f\xa8\x5c\x86\x45\xb2\xab\x98\x51\x0d\x8b\x6a\x0b\x37\x5a\xd9\xa6\x1f\xf4\xe5\xec\xeb\xe1\xcd\xa3\x5d\xfe\x23\x5f\x30\x17\x51\xe8\x88\xc0\x08\xca\xcb\xdd\xfe\x1f\x7c\xa5\xc6\xa9\x31\x40\x71\x2d\x27\x0d\xe0\x6b\xe2\x90\xc2\x66\x9a\xf6\x5b\xee\xc8\x1e\x96\xd0\x35\x0d\xc1\x99\x10\x87\xb8\x83\xbb\x0d\xb7\xff\x9f\x4b\x0b\x26\x16\x64\x98\x1e\xce\xe6\x73\x22\x80\x26\xef\x88\xc6\x92\x27\x9f\x47\x2e\x73\x5b\x5e\xbb\x12\xd8\x88\xf6\x56\xb9\x1d\xbe\x70\xd6\x39\xac\xf8\xca\xdc\x0d\x5f\x49\x2d\x33\x2c\x9c\x96\x0f\x3d\x5c\x50\x20\x1d\x83\x28\x59\xb6\x0f\x84\x3e\xd3\x0e\x10\xf7\xaa\xb8\x9a\x6d\x08\x1c\x97\x1c\x38\xb8\x92\xf6\x94\x0b\x9e\x53\x72\x8f\x7b\xa7\xcf\x97\x20\x09\x4e\xc3\x7f\x4c\xd8\x45\xa9\xb3\x9f\xbb\xc8\xf3\xb2\x8d\xc8\xae\x30\xca\x3e\x77\x25\x20\xec\x25\xfc\x7a\x7c\xc9\xc2\xee\x5c\x38\xa4\x3a\x71\x6f\x2b\x2a\x94\xb2\x39\x0a\x04\xab\x62\xa0\x4e\xbd\xc2\x56\x78\xe5\x3e\x11\x87\xd1\xdd\xe1\xf2\x9f\x66\x8b\x1b\x7a\x41\x18\xeb\xf9\x9f\x99\xe8\x61\x73\xc2\x9f\xdb\x1f\x97\xa0\x65\x02\x81\x41\xd1\x4d\x03\xdc\x7a\x16\x61\x2b\x9b\x0c\x1c\xac\x20\xd4\x8a\xf4\xfa\xcc\x54\x56\x70\x2b\x38\x02\x52\x40\x8a\x7b\xee\xfb\x0f\x94\xfa\x68\xd7\xc9\xb3\x0b\x6f\xb7\x72\x87\xc0\xfd\x1e\x0a\x78\x98\xe8\xca\x43\xb7\x70\x4a\xb4\x23\xf7\xe1\xe7\x8a\xfd\x8e\x7a\xfb\x2c\x2a\xb8\x7b\x46\xaf\xd4\x8b\x33\x54\x5c\xdd\x5c\xf8\x9c\x90\x0e\x22\x95\xcc\xa6\xda\xe3\x4a\x91\x69\xa4\x1f\x20\x3e\x0b\xd8\x4a\x16\x8e\xb0\x79\xd4\x2d\x95\x49\x22\xf5\x6d\xcd\x3c\x17\x94\x49\x99\x41\xd8\xe5\x27\x86\x47\xef\x2a\x05\x9e\x12\xee\x20\x7a\xd2\x3f\xe2\xa3\x8d\xff\x16\xce\x7c\x89\x74\x4c\x2f\x5e\xad\xd6\xaa\xb9\x33\x4c\xae\xe4\x71\xb5\x30\x8a\x4c\x34\x89\x05\x87\x93\x76\xb3\xa5\xe6\xb7\x4a\xf5\x57\x73\x89\x29\x17\xd9\x66\xcb\x8f\xee\xb3\xe4\xf0\x9e\xb1\xed\x8f\xd8\x0e\xb2\x57\x9c\xf0\x91\x73\xb8\xcb\x30\xfd\x64\x09\x07\xfe\x25\xc8\x8f\x50\x61\x4a\x50\x99\xc2\x8e\x6d\xde\x22\x46\x10\x1e\xc7\x7c\xc4\xef\x9b\xd6\xe2\xcd\x0a\x1c\xd1\xc2\x13\x8b\x25\xd7\xdf\x1d\xf4\x1f\x0f\xfe\x26\x94\xa3\x0b\x69\xaa\x7b\xd8\x57\xd4\xa5\x19\x2d\x4f\xb5\xf9\xfe\x16\x19\xe8\xe7\x12\x74\xb8\x2c\x1f\x9e\x29\x32\xf2\xec\xec\x28\xfc\xc4\x5f\x26\x29\x0e\xde\xa8\xfe\x1e\x8e\x77\xcc\xfe\x51\xe7\x2b\xf9\x09\xd2\x62\x9a\x69\xd5\xa6\x6f\xca\x34\xf8\x83\xf7\x1f\xd5\x98\xb6\x0d\x31\x3b\xce\x7e\xd4\x12\x33\x44\xd6\xfd\x61\x51\xea\x1a\x6d\xdf\xe2\x95\x0e\x99\x45\xa8\x76\x99\x86\x1a\x83\x3c\x68\x1b\xbf\xb3\x1c\x2b\x8e\x30\xf4\x8b\x4c\x7f\xdb\xfe\x6d\xff\x29\xd2\x5e\x0a\x8d\x81\x7b\x45\x8c\x04\xdd\xf6\xcc\x57\x61\x2d\x86\xb4\xcf\x8b\x5f\xaf\xfc\xbf\xfe\x9b\x7b\xed\xf7\xa4\xb7\xd9\xd5\xb7\xd3\x8b\xb8\x8e\x83\xfc\x6c\x96\x70\x93\xec\xcb\x1e\x6c\x1f\x44\x18\x5d\x9c\xce\x41\xe4\xed\x7d\x43\xb6\x1a\x47\x7b\x67\x8f\xfb\xfb\x92\xa9\xb1\x66\x8b\x89\xe0\x06\xcc\x6d\x8a\x02\x37\x26\x9e\x02\x56\xde\x0b\x34\x41\x5b\x45\xb0\xfd\x73\x0a\x62\x79\x91\xd9\x51\xe5\xfb\x27\x3a\xf0\x42\x5b\x63\x20\x8b\xfe\x43\xe1\x7c\xad\x53\x81\x74\xb9\xa5\xf5\x19\x2e\x97\xe2\x27\xdd\x62\x8a\x47\x06\x3a\x19\xf6\x0b\x5f\x11\x90\x11\xb9\xc7\x76\x24\xfd\x13\x97\x23\x56\xef\x03\x3d\x40\xe6\x41\xf9\xf9\x15\xff\x92\x4e\xc3\x04\x43\x28\xcc\xac\x29\xe2\x29\x1b\x96\x77\xdc\xdc\xef\x38\x2b\x5f\xb2\x7f\xc6\x71\xc2\xaf\x81\xe1\x17\x21\xfa\x70\x02\xc1\x38\x4a\x1e\x5f\x6f\x69\xc8\xc3\x53\xf9\x52\x33\x7a\xa5\x7a\xac\xdc\xc1\xc0\xaa\xc8\xed\xd3\x01\xc6\x58\x09\xc7\x62\xfd\xc7\x9f\xb8\x5a\x1e\x81\xd5\xc5\xcd\xd5\x8b\x84\x8a\x22\x5d\xbc\xf7\x57\x52\x4c\x71\x02\x72\xea\x8e\x77\x22\xeb\x2f\x9d\x57\x09\x45\x67\x80\x3a\x69\xe7\xf5\x00\xa9\xb3\x57\xf4\x94\xce\x40\x48\xaf\x90\xb9\xef\xf7\x96\xe7\x85\x8c\xbd\xb2\xc4\x0f\x49\x1f\xfb\xec\xde\xc1\xf2\x3b\x92\xed\xf7\x16\x85\xb4\xdb\x6e\xab\xbb\x7e\x07\xa2\xe1\xb7\xea\xae\xad\x7e\xf1\x89\xbe\xf6\x7f\xd9\x43\xec\x21\x76\xc3\x99\x18\x27\x13\xa8\x45\xb9\xe7\xa8\x6d\x4f\xff\x71\xc7\x42\xa4\x05\xfe\x10\xc9\xad\x9c\x6e\x4c\x36\xdc\x5d\x7e\x81\xf0\x32\x9b\x58\x49\xd1\x5c\x5b\x9c\xcd\xca\x53\x78\x45\x07\x23\xaf\xa0\xa5\x44\x78\xc2\x30\x84\x59\xd0\x1a\xbd\x12\x54\x0f\x39\x47\x46\xd0\x0f\x9c\x4f\xc4\x59\xfa\x92\xff\xc9\x8a\x47\x32\x22\x8d\x32\x28\xd1\xb5\xea\x33\xf6\xdf\x77\x18\x7a\x59\xa9\xa7\x28\xd7\x14\xdd\xcc\x1c\x1d\xe4\xbc\x51\xda\xe5\x71\xd8\xe4\xcd\xa5\x83\x0f\x0b\x4b\x20\x38\x7a\xd1\x9c\xb9\x46\x9e\xab\xc6\x77\x7f\x2d\x15\xe1\x2f\x91\x74\x1a\x16\x4c\x53\x33\x6a\x70\xdd\xfa\x36\x9c\x6d\x5a\x58\x85\xac\x77\x0c\x12\xca\x0a\xcd\x99\xe2\x7a\x99\xd2\x18\x61\xf4\xc3\xe0\x57\x1c\xdc\x86\x4a\xb3\x1b\xb2\xe3\x4e\x6c\x6d\xab\xb0\x4e\x0a\x6f\x3c\x03\x8c\x95\xb9\x1d\x48\x2f\x72\x34\xbe\x2d\xbb\xe1\xe6\x2c\x31\x85\xb5\x36\x26\x60\x7e\x86\x3c\xeb\x16\xf0\x06\x89\x1b\xe2\xd4\xd1\xbe\xff\x09\x27\x5b\xa2\x5a\xcf\xd4\x4a\x77\xbb\x91\x5d\x52\x8a\xb3\xd0\xeb\xf4\xc2\x9d\x68\xa8\x41\x91\x1e\xc0\xc9\xa4\x9c\xe2\x50\xd6\xd4\x10\xcf\xce\x36\x5b\xff\x38\x69\xe8\x30\x28\xb5\xdb\x97\xa2\x92\xd8\xc2\xa9\xc6\x47\xe8\x6b\x7b\x69\xdd\xed\x56\x09\x6e\x05\x36\xf6\xc0\x68\x22\xbe\xa8\xca\x25\xc3\xe5\xaf\x0c\x5b\xa9\x9e\x14\xf4\xf8\x7f\x4c\x9d\x0d\x41\x7d\x8e\x3b\x96\xd6\x42\xa9\x05\x41\x04\xd4\x83\x27\xb9\x68\x33\x1e\x05\x83\x3f\x9f\xb7\xf4\x17\x72\x1e\xb3\xf2\xa5\x89\x2c\x30\x3a\x09\x82\x72\xcb\x2b\xe9\x00\x2c\xf1\x73\x5b\xda\xc5\x7b\xec\xea\xda\x45\x36\x59\x79\x89\x7d\xe7\x86\xaf\x10\x9e\x2d\x94\x9b\x8d\xa1\x2a\x1b\x02\x48\x6f\xa6\x1f\x2d\xcb\x0f\x80\x3b\xcf\x82\x3c\x54\x01\x42\x42\xf1\x61\xb9\x1e\xfe\xcb\xe9\xc3\xa8\x4c\xd4\xb8\x03\xcf\xa6\x64\xa5\x15\x21\x4a\x3b\xe2\x07\xb1\xf3\x00\x6d\xf2\x0b\xe7\xf6\xad\x32\x33\xc2\x17\x7c\x53\xc8\xfe\xed\x92\x97\x70\x63\x23\xb0\xf3\x86\x2c\x81\x1c\x3d\xeb\xa5\x7f\xee\x45\xea\xb4\xde\x4d\x2f\x9b\x08\x2f\xd8\xa3\xca\xad\xd7\x2f\xb8\xdf\x6e\x5e\x16\xcd\xb7\x3c\x8a\x2e\x86\x8d\x5d\xc0\xb0\x1f\x0e\xb3\x6f\x3d\xfd\x09\xae\xa3\xfe\xba\xe4\xcb\xd5\x59\x6e\x30\x92\x6e\x29\x4d\x4e\x67\x58\xfd\x7c\x8f\x44\x96\xc1\x8e\x2f\x99\x5a\x85\x8e\xb7\x31\xf1\x64\x6a\x8f\xb7\xbf\xf9\x23\xe5\x67\x16\xd7\xc4\xae\xb8\xac\xd5\x16\xac\x93\x88\xc0\xb9\x5a\x3e\x06\x22\x47\xa3\xc3\xb8\x0f\xcd\x93\xce\xe6\x49\x74\xee\x39\x72\x9f\x4e\xb4\x99\x7e\xa2\xcc\xb5\x72\xde\xab\x84\xab\x93\xa1\x66\xe7\x74\x47\xf2\x94\x5b\x9d\xaa\x13\xca\x92\x88\x9f\xb6\x7d\x47\xe4\x8e\x22\xd5\xbc\x2c\xf5\xa6\x05\xa1\xe4\xbf\x76\x64\x6b\xae\x26\x25\xd2\x75\x30\x3c\x67\x4f\x82\xd3\xe8\x2b\x45\xef\x19\xb4\xf0\x21\x3f\x6f\x1e\x56\x8e\x6c\xf8\xf9\x74\xdb\x51\xed\xbd\x86\x55\x04\x4a\x52\xe5\x00\x79\x15\x45\xdc\x39\x61\xf8\xb9\xdb\x50\x18\x06\xb0\x94\x16\x03\x40\x9b\xa6\x23\xc5\x67\x80\x7c\xfd\xac\x28\x3d\xb5\x98\xd9\x71\x4d\x38\xb5\xdf\x8a\xa1\xdb\x6b\x90\xe3\xf7\xba\xef\x82\xaf\x24\x96\x29\xca\xf5\xb1\x86\x3e\x18\xe4\x11\xe7\xc3\x75\xd2\x88\xe0\x92\xb7\x9d\xec\xa2\x40\xb2\xf7\x5f\x22\x79\xd9\x0e\x16\xea\x03\x27\xdb\x8a\xb0\xa2\x87\x37\x28\x92\xb3\x7c\x82\xf2\xad\x00\xeb\xb5\xb6\xc6\x42\x56\xf9\x6b\x7d\x3e\x93\x7f\x49\xd1\x49\xb9\x96\xad\xb9\x29\x84\x5a\x70\x45\x21\x5a\x35\x55\x08\x43\x84\xbe\xff\xed\xbd\x6b\xb3\xd9\x5f\x09\xe4\x84\xb8\x2f\xa4\x3c\xa9\xf7\x53\xdf\x13\x5a\xd8\x8a\x60\x44\xd3\x70\x63\x97\x13\xf2\x0a\x58\xf2\x14\x5c\x46\xed\xb0\xb6\xbd\xd9\x6d\xae\x99\xf2\xae\xb6\x63\xe2\x17\x93\x99\x6c\x9d\x7d\x72\x33\xc7\xf9\x8e\xe4\x61\xb0\xc8\x39\x88\xfe\xbb\x9d\x7f\xf4\x27\xe8\x0e\xe1\xe6\xde\xad\xdb\xd9\x57\x4f\x1e\x82\xd4\x30\x2b\x77\x7c\xaa\x1c\x1b\xd4\x21\x8e\x31\x64\x90\x3d\x7c\x80\x3c\xb7\xc9\x71\xe4\x33\xda\x55\xf4\x55\x6c\x81\x08\x9e\x76\x20\xc2\xa3\x40\x33\x88\x77\x90\xc6\xc5\x95\x04\xed\x5c\x8c\x04\x70\x1b\x23\x0b\xd3\x2e\x1d\x8a\xcd\xec\x95\xf3\x20\x2e\x6f\x49\xec\x4f\xd5\x28\x18\x0e\x0a\xfe\xa9\x06\x41\x83\x1d\xeb\x9c\xc9\x54\x02\x1c\x79\x2e\xa0\xce\xfb\x26\xff\xc4\xc8\x77\xd1\x42\x04\xe1\x67\x59\x44\xcb\x78\x5a\x89\x74\x90\xd5\x83\x1c\x4d\x07\xa1\x90\x99\xa5\x61\xb0\xda\x98\x29\x46\xc1\xf9\x59\x82\x40\xae\x4e\x70\xa8\x30\xe5\x89\xc5\xa8\xb9\x05\xd4\x10\xec\x60\x36\xe1\x57\xff\x02\x52\xc1\xb3\x1f\xda\x46\x62\xf7\x2a\x6c\x69\xc9\xf7\xc9\x9e\x4d\x4d\x7f\x2a\x99\xa1\xf6\xf7\x25\x75\x65\x64\x4e\x3f\xfc\x0c\xe5\x80\xac\xe7\x3d\xec\xcd\x88\x63\xcb\x8d\x67\x5e\xdf\x24\x0c\xd5\x96\x0b\x88\xbb\x2f\x3c\xcf\x77\xf2\x1f\x21\x0f\x12\xed\xfc\xfe\x99\xd2\x28\x86\x7a\xed\xc2\x9c\x66\x6d\xe7\x50\x27\xc0\x06\x98\x96\x42\xaa\x49\x34\xbf\xb1\x6d\xd0\xe6\xa1\x71\xbc\x9b\x3b\x34\x30\xe9\xb1\xf2\x94\xf4\x1c\x5c\xc9\x97\xbb\x22\x81\xf7\x3f\xf2\xdf\x0e\x26\xa6\x14\x5c\x17\xda\x41\x58\xdf\xa9\xce\xe5\x68\x0d\x4b\x17\x22\x98\x2e\x49\x2b\x6d\xc7\xa5\x57\x2b\x00\x12\x44\x80\xf1\x28\x08\x3f\x0c\x97\xfc\x9f\xb3\xda\x1f\x3a\x0e\x95\x82\x7d\x87\x4a\x42\xb6\x07\xef\xcf\x36\x13\xc5\x02\x52\xa6\x83\x8f\xca\x91\x17\xc1\xb2\xf2\x45\x3c\xb7\xe3\x65\x71\x93\xbd\x44\x0c\xe4\x6c\xf9\x37\xa6\xb0\x0f\xb4\x59\x5e\x77\xeb\x66\x18\x6e\xe6\x10\x9a\x5d\x6a\xb2\x6c\xbb\xf2\x7b\x8e\xc2\xfd\x4a\x1c\x4d\xe8\xb5\x3f\xdf\x32\x4e\x86\x8d\xb1\x06\x78\x3c\x0a\x85\x8d\xd9\x7a\xd0\x8c\x62\x8d\x63\x2f\xf7\x8d\x9d\x4e\xc8\x6b\x27\xa5\xc0\x2d\x26\x74\x7a\x55\x3f\x0c\xce\x99\xac\x94\xb3\xac\x6a\x8b\x9a\xe9\xaa\xfd\x5a\x5d\x6c\x67\xe6\xae\x7b\x06\x26\xe2\xd3\xe7\x60\x50\xae\x61\x36\x02\x6f\x87\x62\xcd\x1f\x4f\x93\xae\xdd\x90\x58\xf7\x93\xb0\xfc\xc3\x6d\x6e\x98\x2a\x65\x97\xa3\x48\x49\x83\xd4\x2b\xc8\xdd\x89\xd5\x44\x1e\xd2\x01\xf7\x8d\x93\x6d\xd3\x1d\x4d\x18\xd8\xeb\x1c\x18\xec\x37\x5f\x9e\x1d\xe2\x4c\xff\xc4\x4f\x4c\xbb\xd7\xeb\x25\xa6\xa6\x1d\xe1\x67\x21\x1f\xf3\x48\x03\xdd\xbc\x77\xeb\x22\xc2\x87\x10\x85\xd3\x45\x4e\x7b\x7d\x73\xa4\x58\x2f\xbd\x10\xee\xa2\x29\xb0\xab\x9e\x82\x25\xac\x50\x57\x4f\x25\xe9\x1b\x21\xb0\xfb\x94\xcf\x36\xca\x77\xbf\x7c\xb6\xca\x93\x0a\x9c\x6b\x65\x94\x58\x13\xab\x21\x0b\x49\x3f\xd6\x87\x89\x11\x8e\x00\xc7\x38\xa3\x38\x2f\x5a\x5d\x1d\xf6\xf8\x7f\x4c\xac\xf2\xe3\xa4\xef\xd9\x29\xb9\xcf\x05\xe9\x9a\xd7\x79\x33\xf4\xca\x3e\xa1\x73\x74\xf5\xf4\x59\x2e\x66\x21\x67\x08\xc0\x55\xf7\xb5\x49\x1e\xba\x51\x0c\x8d\xa3\xc9\x93\x20\x97\x4f\x17\xc8\xd1\x22\xfb\x64\xb8\xa5\x04\xf1\xc5\x11\xb3\xf1\x0c\x05\x14\xb6\xa1\x87\x0f\x52\xae\xb2\xea\xbc\x0a\x03\xe6\xf9\xd6\x0d\x34\xab\x8c\x42\x98\x8e\x2c\x0a\x17\xb7\x16\xc5\x7c\x48\x34\xa8\x99\xa7\xd4\xfc\x4f\xf7\xe2\xb9\xf6\xe1\x7f\xc0\x0f\xd4\xd6\x79\xb9\xfc\x07\x77\x7d\x3c\x72\xa6\xf9\x97\x9f\x7d\x4d\x04\xc5\x82\xac\x87\x55\x90\x59\xa6\xfc\xab\xb9\x91\xcf\x2c\x87\x66\x5b\x2e\x4a\x8b\xcd\x0a\x2e\x85\xd1\xcf\xff\x4f\x68\xea\xcb\xbc\x89\xe5\xd4\xee\x3d\xb8\x6a\xb1\xc3\x8b\xe2\x75\x52\x12\x37\xea\x97\xb8\xf9\xec\x80\xfb\x51\x70\x47\xb1\x32\xc7\x72\xc8\x32\x67\x2c\xb5\x88\x22\x2a\xe5\x23\x56\xf6\x2b\xa2\xe7\xe3\xf1\x26\xad\xd9\x98\x64\xa6\x32\xb2\x68\x47\x33\xc8\x9d\xc5\x87\xf3\xed\x42\xb4\x63\xc4\x19\x9a\xcf\xd6\xcd\xd9\x4a\x60\x1b\x42\x32\xc9\x98\xb4\xbe\x51\x1c\x82\xe7\xe2\xe9\x1b\x51\x89\x3c\xdd\xf3\x58\x21\xb6\xbc\x99\x9e\x90\xa9\xfa\x23\x47\x6a\x59\xf6\xea\x39\x66\x6d\xd6\x8a\xea\xc8\x1c\x94\x3b\xed\x63\x38\x29\xb1\xce\x33\x5c\xf1\xef\x83\xc1\xe1\x3e\xd9\xd7\xae\x00\xb6\x7f\xbd\x68\xe0\xe8\x9b\x17\xa5\xaa\x47\x04\x88\x1d\x5b\xc4\x04\xca\x46\x1d\xfe\x8c\x6e\xe5\x8e\x86\x90\xae\x74\x3d\x50\x58\xb6\x08\xde\x1b\xae\xe1\x3f\xeb\x00\xa0\x47\x27\x91\x56\x75\xca\xa7\x06\xe3\x1a\x9d\x7d\x68\x5e\xc3\x6f\x76\xe3\x44\xd1\x5d\x8b\xe4\xea\xb8\x84\x22\xab\xcb\xdf\x14\x2b\x61\x7b\xd7\x25\x41\xff\x4d\x27\x25\x9e\x89\xa7\xa2\x2f\xc0\x63\x09\xa2\xd4\x4d\x61\xa8\xe7\xe7\x86\xa5\xc8\x71\xaf\xe2\x47\x45\xb3\xd1\xc9\x75\xb0\xb3\xca\x23\x93\xe1\x82\x2d\x25\x64\x05\xa9\x89\xf9\x35\x72\xc5\xed\xd0\x91\x35\x63\x7f\xe7\x10\xd4\x42\x0c\x0a\x8f\xb0\x77\xfe\x16\x3b\x53\xf1\x15\xdf\x14\xda\x0c\x8c\xf6\xc7\x18\x5f\x7f\x70\x2f\xd1\x88\xf1\x53\xbd\x20\x7f\xdf\xba\x90\x50\x67\x91\x9a\xfa\xae\x7f\x97\xd5\x14\xbd\xd4\xb5\x72\x92\x2c\x8b\xba\x7d\x98\xba\x21\x3d\xc0\x5b\xc9\x7c\xd6\x66\x7e\xe6\x29\x21\xca\x98\x4f\x8b\x6f\x45\xaf\x7a\x31\xeb\x50\xba\x58\x68\x25\xd9\x56\x3b\x23\xcb\x0f\x40\xdb\x98\xcb\x7e\x52\xd4\x7f\x1d\xa5\xf9\xcb\xc5\xd8\xf3\x0a\x6f\xbe\xcb\xfe\x70\xc7\x3d\xd9\xc5\x91\xaf\xfb\x74\xdf\x9f\x0b\x81\x93\xb2\xdf\x3a\x3f\xf4\x07\xb0\xee\x56\x2e\x4c\xde\x30\xf2\x98\x04\x76\xab\xd5\xd6\xd0\x00\x68\xb4\x1e\x24\x6d\x69\x24\x14\xc2\x2a\x6b\xed\xc2\x5a\x60\x72\xd9\xa2\x83\x89\x7a\x94\x84\x17\xf2\x3e\x1d\xd1\xc8\xa9\x57\xf7\x87\x76\xae\x8d\x78\xcf\x20\x0c\xa1\x63\x8b\x5e\x91\xdc\x2c\x6f\xb6\x3c\x40\x09\x1e\xe7\xfe\x81\x54\xf3\x02\x70\x8e\x48\xfe\x3a\xd0\x1e\x72\x94\x34\xea\x6c\x74\xa1\xb4\x98\xb7\xb7\x97\xe5\xfb\x01\x56\xe1\xc3\x3b\x30\x1e\xf8\x7b\x09\xa7\xca\x32\x2a\x07\x7c\xa2\x8d\xd5\xde\x86\xab\xbb\x82\x1c\xad\xe9\xba\x69\xec\x74\x04\xb6\xb3\x27\x50\x10\x1c\x46\x86\x73\x9d\xe0\xcc\x2a\xb6\xdf\x16\x9e\x67\x1b\x25\x66\x1b\x72\x2e\xc5\xd5\x52\xea\xaa\x46\x80\x2b\x03\xe9\xd6\xba\x28\x40\xce\x4e\x11\xa6\x8c\x2f\x3f\x0b\xde\x86\x12\xac\x34\x8b\x59\xfb\x4d\x0a\x4f\x1c\x8a\xc7\xa6\xf3\xb1\x69\x16\x5c\x31\x3f\x6b\xed\x65\xdf\x9b\x87\x9d\xe6\x89\xab\xbb\xa5\xf3\xe4\x04\x46\xcb\x03\x30\x74\x0e\x6e\x14\xad\x7e\x6d\xd9\x39\x09\x06\x5a\xb9\xc9\xac\x62\xf9\xe8\xae\xb2\xe6\x87\x74\xaf\x7b\x5d\x15\x47\x67\x47\xe3\x24\x95\x9a\xad\x11\x2f\xa2\x7c\x15\xf1\x82\xab\xea\x97\xc5\xf7\xbe\xf0\xff\x25\x83\x21\x3f\x45\x9b\x5c\x72\x0e\x61\xf1\xa0\x58\x48\x58\xbd\xc8\x24\x2f\x8d\xda\xb5\xe6\x26\xd5\x9e\x85\x49\xce\x9b\x68\x0b\x41\x7b\xb0\x7b\xf4\x0a\xa9\xf0\xa2\x2a\x6d\x4e\x00\xc5\x7d\xb1\xeb\xaa\xb7\xa3\x34\xef\x93\x9f\x64\x77\xff\xd3\x01\xbb\x9e\x67\xaa\xcf\xdf\x29\x3c\x55\xc2\x7e\x93\x5b\x5d\x87\x82\xc2\x4b\x77\xff\x1e\xda\x8d\x4d\x32\x53\x08\x57\x3a\x6c\xbd\x41\x46\x3b\xec\x0a\x69\xa4\x86\xf0\x56\xbf\x39\x3f\xb5\x47\x9e\x7f\x47\x6a\x4e\xec\xe4\x31\x60\xd1\xe4\x15\x34\x2f\x05\x41\x38\x7a\x12\xac\xdb\x6b\x8d\xb1\x95\xce\xc6\xaa\x36\x64\x2c\x37\x55\xdc\xd6\x56\xaa\x61\x1d\x73\x1d\x27\x05\xd7\x8d\x76\x2d\xe9\x70\x65\xbe\xeb\xf3\x66\x75\x84\x40\xab\x07\x7d\x44\x19\x1c\xf0\xd4\xbc\x98\x15\x9f\x0c\xf8\x02\x1e\x77\xd7\x22\x76\x74\xb6\xdf\xda\x7f\x90\x71\x0f\xf6\x12\xf8\xa7\x0e\x4f\xe1\x78\x90\xe3\x36\x69\xc2\x05\x49\x84\x8e\x42\x0d\xae\xfe\xf9\x94\xd8\xbd\x03\x27\x18\x8a\xb2\x16\xad\x11\xcd\x21\x8a\xc9\x55\x6b\x12\x1b\xbe\xe9\xaf\xf9\xc7\xc2\xad\xbd\x04\xdd\xa1\xf5\x8c\x9a\x2b\xf6\x87\x4c\x71\x11\xbb\xeb\x8f\x3f\xf3\x2d\xe4\x07\x73\x85\xb8\x52\x1e\x76\x1f\x75\xdb\xc8\x74\x50\x37\xdc\xb7\xf8\x4e\x82\x45\x08\xb8\x16\xad\xc6\x27\x5f\x04\x57\xed\xc8\xe3\x97\x77\xc7\xdd\x32\x2a\xd1\xd9\xfc\x43\x95\xe3\x11\x57\x3f\xe1\xb2\xd1\xd5\x78\x66\x5d\xa5\x3d\x1a\xec\x92\xd9\x6b\xb2\xf0\xf5\x16\xd2\x8a\x73\xb1\x2b\x9a\xa7\x47\x17\x5c\xc1\x2e\x5a\xb3\x70\xbc\x21\xf5\x39\x6f\x9e\xf7\xe0\xcb\xa5\x6a\x6a\x96\x25\x7f\x69\x2a\xf2\x2e\x7d\xda\x0e\xdd\xa5\x66\xcc\x83\x4d\x55\x5d\x68\xc4\x0b\xa2\x3e\xe1\x77\xfc\x82\x52\x94\xc7\xe5\xc7\x31\xb6\x61\x35\x18\x08\x0f\x75\xa6\x33\xcc\x2e\x15\x47\x8a\xbd\x8f\x4e\xad\x40\x96\xf0\xb3\xbf\x37\xe1\x6a\x74\xa3\x9b\x46\x6f\x4b\x33\xfd\xb2\xa2\x59\x38\x2d\xa9\x54\x34\x9c\xf7\xc5\x10\x70\xed\x42\xa8\x16\x42\xef\xe3\x12\xcb\x8e\xd5\xce\x4b\xd1\x25\x2f\x22\x1e\xf6\x4a\xf9\xcd\x66\xcd\xeb\x37\x58\x1f\x1b\x5f\x93\xeb\x56\x19\x73\xa1\xa7\x0a\xfb\x67\x62\x67\xf0\x3b\xdc\xb5\x2a\x0e\xef\x5f\x14\xee\x5b\x1b\x7a\x3a\x21\xdd\xcf\x5d\xda\x4d\x5c\x95\xe6\x23\x28\xb6\x73\x04\x9f\x1e\xd3\x4d\x06\x49\x22\x09\xf1\xbc\xd1\xd6\xfa\x62\x8d\xfc\xc2\x2c\x25\xa9\x1f\xc7\x3a\x22\xeb\x42\x1b\x83\x33\xb8\xce\x8b\x48\x82\x67\x0b\x5c\x3c\xa7\x98\x46\x43\x61\xd3\x72\x80\xfc\x42\xe1\x6a\x54\x84\xf4\x99\x05\x9f\xe6\x10\x5f\x43\xd9\x49\x86\x05\xd0\x55\xc5\xaa\x1e\x4c\xf3\x3c\xc8\xc3\x83\xde\x3f\x44\x36\x37\xbd\x9f\xeb\x25\x43\xd8\x5f\x90\x6a\xb1\x4f\x91\x41\x75\x48\x3b\xee\x1e\x12\x2f\x0e\x04\xb7\xb0\xc0\x47\x24\x16\x53\xb1\x0f\x2a\x02\x5c\x90\x4d\xdc\x54\xac\x2a\xb4\xb4\x53\x5d\x57\xd2\x3e\x4a\x18\x5b\xb8\x27\x45\x6c\x7d\xde\xed\xfe\x33\x2d\xf4\xcf\xd5\xe5\xab\xb6\x37\x11\x29\xdf\x9d\xba\x1c\xd3\xc2\x31\x0e\x7c\xcf\xec\xf1\xa6\x18\x47\xe7\x8d\xdd\x75\x0f\x05\xbc\x2d\xd6\x30\xb2\x05\xc0\xe2\xc7\xfb\x57\x04\xd2\x7a\x44\x17\x7c\x0c\xce\x5d\xa5\xb6\xc0\x36\x15\x92\xb7\x93\xd8\x7c\x35\x8c\x7c\xad\x1b\x66\x49\x01\x1a\x37\xe9\x05\xd6\x58\xb9\xd1\xc5\x06\x34\xf3\x41\x1a\xf7\xc3\x0b\x02\x96\x9b\x2b\xf9\xb6\x76\xfc\x6b\xc7\xb7\x55\x27\xa1\x0f\xa8\xcf\xa2\x6b\xf7\x79\x79\x47\x3a\x09\xdd\x97\xa3\x5d\xee\xfd\xd5\xf5\x18\xc7\x68\x93\x8f\x25\xcc\x3f\xac\x25\x7e\x91\x53\xb0\x26\x9f\xce\x2f\x77\xd8\x7a\xdc\xbd\x0e\xf3\x43\x31\x91\xcb\x2f\x68\x68\x08\xa6\x77\xe3\x86\x84\xb6\x77\xf1\x11\x14\xe1\xd9\xe1\xa6\xbf\xd1\xad\x61\xd9\x2c\x4b\x88\xd9\x5e\xa6\xdd\xab\xec\xd9\x03\xdb\xe1\x84\xe4\x33\x4b\x5d\xd2\xb3\xc6\x39\xb3\xed\xc1\x85\xf9\x28\x2a\x6e\xe6\xf0\x16\xfb\x93\x1e\x10\x12\x20\xef\x5a\xd1\x52\x6b\x12\xf9\x56\x58\xca\xc6\x0a\x04\x8a\xc8\x9f\x36\x32\x11\x58\x85\xbf\xb6\xeb\x2a\xdd\x86\xe5\xc7\x1a\x8f\x84\x11\x40\x89\xa1\xa8\x44\x14\xb5\xfe\x35\xc5\xf9\x7d\xce\x2d\x89\xf6\x8a\xc6\x19\x9d\x95\xce\x6f\xec\x51\x91\xdc\x76\x8f\xd6\x86\x33\x6f\x19\xc3\x4e\xce\x02\x9c\x66\x2e\x88\xc4\x7a\x61\xba\x77\x2f\xe2\x4b\x8d\x68\x26\xfc\xd6\xf1\xaa\x9d\xfb\xd6\x98\x06\xd9\x2a\x85\xeb\x77\x10\xe3\x9f\xd6\xd9\x2f\x4d\xf1\x73\xff\x5c\x93\xad\xf9\x1d\xb9\xb3\x3e\x43\xdf\x6d\xfd\xed\xaa\x2a\x05\x31\x2b\x9a\x8f\xe6\xee\x26\xfa\xab\x28\x17\x27\x34\xfa\x01\xff\x12\x57\x65\x8e\xb3\x08\xcd\x6c\xdc\x74\xc3\x47\xa4\x95\x67\xeb\x69\x61\x4c\xe9\x6f\x99\x1d\xf4\xbb\x22\x56\x04\x0a\xfb\x79\x6d\xb7\xc6\x37\xf9\xbf\x47\xe7\x34\x60\x64\x47\x82\x2d\x12\xfa\x38\x71\xcd\x2b\x6f\x4d\xca\xda\x0f\x8d\x56\xef\x2e\x1a\x62\xc2\xbc\x39\xb5\xdc\x15\x59\x8d\x78\xeb\xc9\x3e\x57\xaa\x57\xf4\xbd\x57\x3b\xb6\x8f\x35\x87\x54\x48\xd3\xd6\xf4\x54\x86\x33\x5e\xb3\x49\x90\x2e\x0f\x7e\x9c\xd5\xde\xbb\x1e\xf0\xed\x20\xd7\x1d\x94\xe9\x18\x66\x2e\x4a\x6b\xee\x6a\xe8\xb0\xba\x3a\xc8\x80\x24\x8e\xd7\x4d\xba\x26\xef\x66\x75\xfa\xfe\xe3\x9e\x99\xee\x17\xe3\xde\xb0\xdc\x15\x64\xf9\xe1\x6b\xdf\xb5\x8b\x0c\x31\xfb\xf4\x8f\x4a\xbc\x6a\xad\x89\x61\xcb\x1b\xad\x6f\x7f\x93\xbe\xbe\xcd\xfa\xcc\x88\x97\xc2\xf9\x61\xe2\xc0\x7e\xdf\x25\x2a\xdf\x2f\xe2\xbe\xf0\xf8\xcf\xdc\xf1\x9b\xa9\x33\xfe\x08\x10\x03\x75\x72\xa2\xf6\xd4\xad\xe6\xc6\x90\xcd\x7b\xe4\xaf\xf7\xb9\x99\xa3\x82\x77\xfa\x3f\xae\xc0\xae\x51\x73\x62\x65\xb3\x34\x7f\x85\x9e\xd7\xb1\xe0\x57\xd6\xcc\xeb\x05\x6a\xb5\x3d\xde\x67\x30\x5f\xf4\x78\x2b\x43\xfd\xaa\x82\x0c\xed\x67\xcf\x72\xbb\x0b\xb6\x65\x87\xd9\xb1\xff\xdb\xd4\x32\xba\x2f\xaf\x48\x78\xbc\x26\x46\x08\xb5\x20\x3c\xef\x8b\x3d\xf3\x8c\x47\xec\x48\xce\xcc\xf1\x0f\x42\xfc\x7c\xb2\x61\xdd\x19\x1b\x75\xc2\x3d\xec\xe1\x87\x69\x48\x95\x72\xc8\x87\x7c\xdf\x13\x61\x72\xd5\x7b\x72\xfa\x25\xa7\x7e\x96\x1b\x26\x9c\x8d\x48\x89\x81\x53\x0a\xa1\xa6\x23\xac\xca\xd0\xca\xa7\x50\xa3\x7a\x34\x25\xc7\x8e\x9e\x1d\x11\xc2\xd5\xc8\x99\x39\x52\x59\x4e\x0d\x6e\x6c\x1b\xe7\xc8\x03\x3d\xac\xce\xb7\xb0\x13\xc3\xd1\x47\x38\x18\xef\x2d\x4f\xb7\x5a\x2e\xd2\xb4\xf7\x72\xf6\x25\x42\x86\x76\x2b\xcd\x0f\xd2\x21\xdf\xfe\xff\x23\xeb\xc3\x4d\x70\xef\xce\xf4\x30\x8c\x18\xa7\x70\xee\xb3\x3d\x9d\x57\x31\xb7\xd6\x4a\xc8\xa7\x72\x77\xa3\x75\xbb\xdc\x48\x71\x6f\x10\xdf\xae\x63\xfc\x75\x70\x42\xf1\x33\xe9\xde\x7a\x06\xe2\xd5\x77\x47\x9f\x0d\xd4\x80\x5b\x28\x5d\x35\xfb\xb3\xb3\x95\x23\x48\xbe\xd3\x6b\x91\xe8\x15\xf4\xb4\x42\xa5\xff\xba\xf2\x33\xf6\x9c\xfe\xa1\xe2\xc7\xd5\xb8\xfe\x83\x7b\xae\xbe\xb3\xbb\xdd\xcc\x83\x9d\x33\x4f\x40\xef\xf3\xc2\x41\xb0\x69\x67\xd6\x5e\x64\xb8\xb5\x0f\x38\x1c\x86\xf0\x99\xdd\x70\xc3\x04\xed\xcc\xe4\xce\xb2\x02\xc1\x26\x6b\x27\x7c\x18\x97\x7a\x07\x47\xff\xac\x1f\xe6\x4c\x9c\xae\xa3\xb2\x1d\x90\xd9\x12\xd9\xdd\x4e\x96\xdc\x71\x6a\xa9\xa3\xcc\x16\x41\xdf\x84\x9b\xe9\xc5\x32\xd1\x0e\x51\xba\x64\xb5\xa6\xd7\xb7\xb7\x99\xfb\xa4\xbb\x65\xcd\x3c\x5b\x97\x8a\xbe\xeb\xef\xe1\x78\xb2\x20\xe4\x96\x42\x3f\x2b\x5b\x6b\x0e\xc1\x8e\xf5\x25\x62\x09\xcc\x56\xad\xe0\xcf\xfa\xab\x34\x38\xfe\x0b\xa2\xc3\x1f\x72\xa5\x94\x72\x41\x4b\x24\x6c\x5e\xee\xb7\xce\x53\xca\x2a\xd9\xaf\xd2\x9e\xa7\x5b\x6e\x2f\xf7\xc0\xb3\xf7\x0e\xe1\x1c\xb4\xb7\x71\x0c\xd3\x36\x3a\x7f\xe2\x80\xdd\x2e\x69\x03\xf2\xa0\xf9\xc2\x94\x3e\x3a\x54\x3f\x3d\x84\xe9\xbe\x84\x45\xc3\x8f\xce\x79\x34\xaf\xd5\x68\xb3\x80\x43\x53\x59\xd0\xca\xe5\xa9\xbd\x98\x53\x8a\x0c\xa8\xc9\xc7\x61\xde\xe8\xde\x3f\x53\x69\x82\xe6\x08\x8d\xef\xfd\xa9\x8e\xd5\x43\x5c\x78\x0c\x37\x28\x02\x5e\xe8\x1c\xb4\x46\x58\xb7\x67\xfb\xdc\xca\xf6\x61\x35\x0d\x92\x83\x58\xa0\x76\xcc\x2b\x12\x03\x46\x5e\xfa\xa4\x47\xe1\xf1\x9b\x0b\x4b\xec\x7f\x40\x79\xba\xb0\xf3\x80\x05\x39\xf3\x2a\x74\xbc\x8f\x21\x32\xea\xd7\xa5\x0d\x14\xf1\x91\x7d\x70\x45\x0f\xee\xb7\x55\x2e\x3f\xd7\xc4\x61\x53\x0e\x62\xe6\x53\x56\x64\x5a\x58\xd1\x08\xa0\x96\xb9\x42\x58\x45\x0d\x32\xdc\xc7\xc9\x3c\x32\x45\xa8\x9c\x69\x79\x69\x74\x2b\x4d\xad\x35\xdc\x6b\x34\xe1\x81\xa0\x7a\x56\x22\xcf\x64\xc1\xc7\x87\x4c\xc2\xc2\xc2\xcf\x5a\xed\x40\xae\xd3\x25\x1f\xd9\x1a\x97\x67\x1f\xf1\x26\x7d\x8d\xd8\xa1\xf4\x1f\x8d\x1a\x97\x48\x9f\xb0\x15\x50\x0e\x5e\xbe\x7d\x8b\x7b\xb2\xeb\xcc\xed\x1b\xf9\x81\xc8\x4f\x7f\xc4\xad\xcf\x14\x7e\x0f\xec\x5d\x5d\x47\x9c\x95\x61\xbd\x39\xa3\x1b\x78\xf7\x37\xc2\xac\x31\xa4\x95\x8f\x4d\x34\xc0\xf7\x28\xd9\x14\x3d\x1c\xf9\xaf\x56\x20\xf0\x82\xeb\x0e\x69\xe1\x8e\x38\xb3\x5c\xcb\xd0\x62\x1b\x05\x69\x1f\x79\x26\xf4\x7d\xa8\xef\x82\x42\x8e\xf2\x90\x2e\x64\xeb\xce\xbd\x3a\x7d\x06\xd9\x85\xbf\xed\xd8\x12\x4e\x9a\xb1\xad\xce\x2d\x4f\x7b\x94\x3b\x07\xca\xc9\xe8\xd6\xa3\x59\x2f\x5f\xd6\x14\x11\xe2\x2f\xab\x32\x34\x19\xe1\x6b\x56\xe5\xbd\x0d\x2e\xcd\x46\xab\xdb\x21\xad\xfd\xf7\x83\x27\xf4\xc9\x22\x98\xd8\xc2\xbb\x0e\x98\xed\x8c\xb8\xa9\x1f\x4e\x9e\x92\x79\xd7\xbb\xb2\xe1\x2e\xff\x55\x96\x09\xd8\x6a\x13\x57\x60\x1b\x3a\xba\xed\xde\xdc\x6c\xe2\x33\xdd\x9b\xde\xff\xb7\xfa\x7e\x13\x0b\x79\x63\x83\x6d\x59\xba\xee\xe6\x93\x22\xf1\xca\xfa\x61\xba\x9a\x20\x2d\xfd\xdb\x53\xaf\x61\x0d\xbb\x4e\x18\x74\xac\xa7\x82\xe9\x6f\x3f\x96\x15\x0b\x78\xdd\x47\xaf\xc7\xae\xe5\x40\x6b\x08\xfd\xf0\x52\xe3\x9a\x44\xa0\xb2\xf5\xa4\xe0\xdd\xb6\xd9\x21\x90\x80\x47\xfb\xc7\x5e\x27\xf2\x96\xde\xbf\x50\xb9\x77\x91\xc7\xa6\xad\x82\x4a\x25\x3e\xa6\xbb\x41\xb5\x0a\x23\xba\x79\x00\xf4\xe4\x5e\x11\x6b\x3e\x45\x8b\xb7\x8a\x8f\x1e\x6d\x11\xf3\xc4\x0a\x44\x86\x4e\x40\xd5\xa3\xaf\x0f\x81\x22\x05\xd0\xdf\x58\xdf\x0b\x7f\x4e\x3f\x39\xa7\x46\x94\x5f\xa0\xad\x2c\x4c\x6e\x2b\x23\x22\x6e\x5c\x48\x2f\xd0\x30\x36\xef\x27\xcb\xf0\xa3\xb6\x41\xb6\x6f\xe6\x47\xe1\xc0\x5a\x27\xc6\x91\xd7\x36\xa4\x0e\xcf\xcc\xbe\xd0\x9f\xec\x20\xd5\xf1\x48\xf4\x54\xde\xe1\x38\x25\xc6\xd3\xe7\x9a\x44\x32\xd9\x69\x7d\xb1\xd7\xb9\x92\x49\x0e\x51\xb4\xc8\xba\xaa\xf5\x6d\x11\xaa\x8d\x2d\xac\x33\x3e\x11\xeb\x09\x17\x2a\x5f\xec\x28\x48\x68\x57\x58\xef\x4b\xb3\x15\xe4\xa5\x05\xaa\x86\xd8\x18\x62\xd3\xdb\x1d\xb2\x69\xf9\x8d\x6e\x38\x12\x1f\xa8\x45\x8e\x8b\xf2\xa5\xf5\x17\xb9\xba\x22\xb6\x44\xf3\x67\xe6\xfd\x7a\x39\x41\xaa\xc0\xaa\xf4\xa7\xb5\x44\x67\x1e\xe5\xf7\x0a\x33\x0e\x75\xdd\x91\xbe\x62\x45\x05\x72\x94\xd4\x8a\x1f\x3f\x1e\x70\x33\xc9\x00\x5d\xba\xb2\xf5\xae\x74\x8d\xc3\xfd\x1c\xfd\x96\x5b\x63\xa9\xb5\xae\x9d\xd2\xc9\x85\x3d\x2e\x0d\x28\xf7\x90\x06\x18\x0e\xf1\xc3\x29\x1c\x3a\x26\xb6\xe4\xd4\x08\x5b\x41\xab\x16\xdd\xca\x79\x19\x18\x2c\x29\x1c\xe2\x86\x77\xa1\x43\x9c\xf0\xdc\xf9\xfe\xae\xf4\xe3\x44\x6b\xa0\xc1\x75\x65\xce\x0c\x45\x04\xb3\xa3\x1c\x33\x58\xe3\x00\x30\xfe\xa6\xc6\x27\x96\xf1\x0f\x1c\xb0\x38\x18\xbe\xbd\xf2\xc1\xb4\x48\x55\x81\x74\x7e\xf5\x19\x1e\x9b\x41\x80\xa7\x60\x3b\x54\x38\x76\xc1\xd3\xbb\x08\x3b\x5a\x6b\xb8\xe3\x90\x3f\x2e\x05\xf1\x85\x78\x3c\x2d\x31\x90\x56\x1a\x07\xbd\x89\xf8\xf4\x01\xab\x7b\x23\x7d\xb7\x19\xe3\xe8\x54\x0d\x5f\x64\x59\x24\x7b\x50\xf1\xe2\x17\x04\xdd\x90\xc3\x53\xce\xff\xaf\xac\x85\xb1\x6e\xcc\x3d\x1c\x79\xc8\x81\x2a\xa9\xb6\x70\x0c\x15\xd0\xd8\xb2\xe7\x47\x98\x20\x06\x77\x79\x95\x5f\xf2\xe2\x2f\xe5\x98\x7e\x55\xc4\x52\xc0\x87\x7b\x78\x0e\xa0\xbf\x7c\x98\x59\xb3\xb0\x80\xe6\x26\x45\x80\x97\xfd\xf1\x7e\xb9\xa0\x49\x67\xc5\xe0\xd9\xaa\x2a\x0e\xec\x06\x52\x36\xea\x66\x05\x8c\xf7\xf0\xf6\x2e\x28\xf6\x98\xbe\xa4\xaa\xa0\x70\xfc\xeb\x9e\xf9\x49\x99\x87\xdb\x9a\x6b\x97\x8c\x1c\xf5\x4b\xe3\x8e\xb3\xaa\x39\xa7\x17\xef\x80\x33\xcc\x8b\x83\xfc\xc7\xfd\x61\xc0\x70\x71\xe5\xd7\x3c\x05\xda\x7b\x6a\x8f\xaf\xba\xa5\x0a\xaa\x67\x0f\xdb\xdf\x8e\x63\x78\x02\x8a\x2a\x85\x4a\xfb\xc9\x3e\x4c\x18\xbc\x8f\xe7\xe2\x18\xd8\x1f\xf4\xc8\x61\x51\x25\x87\xc8\x94\xf1\xe0\x6b\x65\xeb\x38\x65\xed\x8e\xfa\xd5\x02\xc8\x3b\xfd\x85\xc8\xdb\x66\x59\xda\x88\xfb\xd2\xe3\xf0\x1b\x7f\xc7\x96\xd8\xd3\x69\x0d\xc4\x2f\x6b\x91\xc9\x3f\xeb\x69\x27\xec\x97\x49\x77\xcb\x27\xe4\x8e\x88\x9e\x26\xd9\xb7\xf3\x3e\x0c\xd2\xec\xb8\xff\x82\x04\x46\x7e\xd6\xd1\x22\xe2\xbb\x43\xdc\x90\x4e\xe6\xd2\x76\xbe\xff\x4f\xf1\x1b\x04\x1a\xc2\x09\xdb\x03\x3a\x34\x04\x98\xc2\xb6\xf4\x9b\xe5\x3f\x15\x35\x47\x0e\x2e\x77\xfe\xc9\xe9\x33\xa2\x2b\x00\xf1\xe0\x03\x8f\x58\xb8\x82\x32\x26\x46\x75\xf5\x6b\x9b\x19\xb4\x94\xbc\xdc\xa4\x99\xd9\xfb\x6e\xef\x80\x97\xda\xf0\x3c\x55\xbc\xd0\x11\x4a\x7c\x37\x57\xd9\x7c\xb3\x45\x25\xe8\xef\x04\x34\x67\x62\x90\xa1\x74\xf2\xba\xf9\x53\x45\xd3\x7d\x7f\xfc\x96\x7a\x7f\x7a\x96\xa2\x6e\x7f\xca\x29\x80\x1d\x9d\x51\xee\xe2\x14\x25\x2a\x0d\x80\x62\x15\xc6\x9f\x9b\x2d\xcd\xbb\xc5\x07\x4a\x3c\x3f\x51\xbf\xf2\x76\xf7\x46\xf2\xd2\x14\x72\x63\x0b\xfb\xf9\x01\x44\x2c\x1a\x6d\xd2\x2b\xf5\x95\xbd\xa9\x7f\x31\xf7\xfe\xc5\x69\x26\x94\xab\xf2\x81\xbb\x28\x49\xbe\xea\x51\x3a\x7f\xe2\x5e\x11\x68\xc2\x1e\x06\xaa\xd8\x34\xdb\xf1\x09\xd3\x54\x2f\x22\xb2\xba\x7e\x2b\x2d\x49\x34\xba\xd8\xd0\x5d\x2e\x87\x25\xc4\xef\x4f\xe5\xbc\xee\x86\x36\xd4\x66\xee\x57\x83\x59\x63\xef\xd4\x14\xe4\xeb\x4d\xbd\x3e\x5c\x53\x97\xfe\xa6\xfd\xdf\xcd\x33\x7c\xcf\x41\xcc\x26\xd3\xf8\xac\xdc\xeb\xce\x55\x3e\x7c\x30\xd5\x7e\xa8\x84\x38\x36\xe6\x5d\x9c\x11\x2d\x74\xc2\xdc\x81\x0b\xd5\x31\xb9\x6f\xc3\x80\x24\x97\x16\x4f\x81\xbb\xb4\xa4\x7b\x23\x42\x4c\x5c\x81\x26\xd8\x83\xa3\x4a\x2c\xd2\x8b\xcf\x52\x6b\xae\xd2\x21\x3f\x41\x85\xad\x9d\x2d\x44\x2d\xfa\x08\xb1\xde\xbd\x6a\x80\xb9\x89\x30\xcb\xa3\x91\x8e\x0e\x76\x14\x56\x10\x6c\x74\xb7\xb6\x36\x87\x44\x94\x23\x8f\x32\xfd\x3c\x22\x77\x00\x1e\x17\x7c\x3f\x44\xe7\xdf\x0f\x51\x3b\xf7\xf8\x44\xfd\xc8\x2b\x40\x26\x33\x04\x46\xb0\x70\xdb\x3e\x22\x24\x02\xec\x77\xb7\x7d\xb0\xf7\xb5\xa9\x48\x37\x29\xe6\xf6\x4c\xe3\x1c\xf6\xa4\x3b\xb2\xd5\x12\xab\x11\x6b\x3e\x12\xbb\xde\x5d\xf4\x0e\xeb\x73\x7d\x63\xab\x83\xdf\x36\x5b\x7d\x55\x67\x01\xd4\x2a\xef\xd8\xb7\xee\x87\x0d\x44\x6f\x3c\xea\xe0\x5c\x23\xc1\x62\x05\x21\x45\xab\x25\x18\x71\xae\xcf\x23\x40\x22\x81\x51\x09\x75\xce\x8a\x20\x7c\x32\x04\xf0\x7a\x63\xd1\x0b\x60\x91\x23\x41\x22\x6f\x7b\x0b\x13\x6d\x00\x20\x0b\x9d\xf4\xd6\xbe\x44\x71\xbb\xf6\x69\xa8\xac\x54\x5c\x73\xca\x85\xed\x64\x0a\x36\xff\x06\x47\x40\x66\x06\x10\x59\x90\x1f\xf0\x97\x43\xc9\x64\xd5\x2e\x73\x5c\x2a\xd1\x0e\xf5\x1e\xcb\x9d\x3b\xc6\xba\xdc\xf4\x1b\x83\x8f\x4b\xf2\xad\x76\xa9\xbe\x4f\x2e\x4e\xdf\x15\x04\x52\xf8\x8a\x78\x35\xfb\x60\xc9\xb3\x05\x2b\x80\x26\xee\x4b\xe3\x83\xce\x2c\x99\xe6\x60\x80\x88\xc2\x3a\xcb\xc9\x6f\x5e\x0a\x2b\x9a\xf2\x0f\xb9\x0e\x30\xb4\x33\x42\xe6\xb2\x8a\xcb\x34\x07\xd5\x70\x2e\x5d\x47\x36\x9e\xed\xe6\xce\x19\x08\xd9\xcf\xcc\x43\xf2\x2f\xeb\x05\x70\x83\x07\x56\x3a\xc7\xd2\xdd\x74\xb0\xd9\xce\xb8\xdf\x41\x27\x48\x14\x73\x4c\xf5\xbf\xc2\xc7\x38\x95\xdd\xb1\x71\x0b\x64\x6a\xaa\xe1\x7a\x31\x05\x42\x3e\xc1\x91\xc8\xaf\x54\x94\xf1\xff\x30\xf2\x58\xf3\x78\x9e\x16\xab\x9e\xbb\x93\x40\x74\x8a\x93\x10\x93\xc0\x12\x25\x51\x09\xe6\x5e\xfb\xd5\x69\x6c\x2e\x0a\x8a\x7a\xbf\x1b\x43\xef\x10\x08\xe0\x35\x51\x04\x44\xd4\x88\x23\x85\xd9\xb9\x80\x92\xe5\x47\x4c\xff\x0d\x30\xb0\x53\x2c\x98\x5f\xc6\x87\xcd\xfe\xd1\x1f\x3b\x61\x10\x63\x73\xd8\xb1\xfb\x67\x91\x80\x56\xf0\xe9\xf3\x08\x33\xa2\xa7\x40\xbc\xe7\x02\xfd\x16\xbd\x22\x19\x8c\xe3\x9f\xb5\x1e\xea\xe9\x96\x2a\x10\xd4\x0b\xf7\x58\x22\x29\x5e\xe4\x5b\x5c\x12\x72\x5c\xe8\x39\xcd\xb4\xca\x24\xc2\x01\x63\x14\x71\x87\xdb\xd9\xf1\x09\x4f\x6d\x64\xc7\x27\x49\x72\x39\x86\xa8\xaa\x83\xb4\xa1\x0e\xe5\x5c\xe5\x93\x6f\x17\x89\x5d\x7a\x4a\x16\xb2\x0f\xe4\xc8\xf3\xce\xb2\xe8\xb1\xee\x48\xb6\x0d\xb8\x5c\xc4\x64\x68\x26\xcb\x30\x69\xd8\x14\xd4\xb3\xc8\xce\xf7\x81\x7a\x69\xf3\x9a\x8a\xb1\xf3\x2a\xd8\xc0\x76\xbf\x54\x33\x9d\xa7\x5b\x88\xfb\x07\x19\xf3\x8d\x36\x1c\x11\xb5\xe1\xba\x82\x34\x78\x89\x5e\xdb\xfb\x93\x92\xf3\xef\xd9\xe4\x66\x0c\xa9\x70\x2f\xe0\x20\x1c\x76\x5d\x98\x05\x89\xfa\x62\x47\x03\x6b\xa4\xc9\xde\x5b\x37\x00\x86\x19\xbe\x94\x68\xf0\x03\x24\x19\x62\x5f\x52\x8c\x57\x84\xb8\x74\xdc\xbb\x41\x8a\x08\xb7\x5f\xc4\x66\xcb\x55\x08\xa4\x83\x98\xb9\x9c\x92\x03\x59\x49\x75\x37\xf2\xb4\xe3\x29\xc2\x47\x38\xff\x84\x1b\x9b\x10\x93\x23\xa2\x62\x2b\xb1\xf8\x4e\xc1\xa7\xe4\xd8\x8b\x4e\x51\x8e\xe1\x8c\x60\xc5\x41\x31\xbb\x5d\x8a\xf2\xe8\xaa\x62\x30\x1c\x9c\xc2\xdb\x6a\xd3\x63\x9f\x37\xd1\x1b\xb6\x20\x1f\xf6\xac\x65\xed\x1a\x54\xe6\x61\x8a\x69\x61\x0b\xe7\x08\xd2\x62\x83\x22\xbd\x27\x39\xb9\x08\xc5\x15\xc2\x84\xf4\x88\x0a\xcd\x7f\x40\x4b\x70\xf8\x56\x71\xf1\xcf\xaa\x48\x8f\xf0\xd9\x7f\xe3\x86\xf2\x9a\xc9\xde\xf9\x12\x8b\x90\x04\xdb\xba\x0f\x3f\xf9\xcf\x0a\xaa\xbb\x2f\xe9\x23\xf5\x26\x2d\xdf\x1e\x72\x84\xef\xf3\xb5\x82\x11\x86\x26\xef\x2f\x9a\x82\x60\x5c\x53\x8f\x7f\xec\xe2\xa6\x5a\xdd\x2d\x6e\x93\x8d\xc3\x1d\xd3\x93\x8b\x66\x89\x30\x59\x0f\x3b\x6e\x5b\x8f\xcc\x34\xaa\x76\xcf\x2f\x11\x90\xad\xe1\x25\xd0\x11\x0e\x72\x42\x55\xfc\xd6\x71\x6d\x8b\x48\x90\x39\xdf\x00\x43\x88\xb5\x80\xee\xf9\xf8\x7f\xf4\x52\xfe\x49\xe1\xee\x35\xaa\xa8\x9b\xe8\x37\xba\xe6\x6f\x09\x04\x66\x54\x8a\xe4\x55\x64\x06\xe0\x2e\xf5\x64\x4f\x5c\x41\x73\x06\xcb\x82\x1e\xf7\x4b\x92\xcf\xdf\xe9\xe2\x59\xbf\x53\xdc\xbb\xc8\xc7\x3b\x93\xdd\x9e\x87\x16\x68\xa3\x02\x88\x06\xd9\x42\xf2\xe9\x2e\x7d\xfd\x4b\x33\x8b\x20\xa4\x8c\xa8\x5a\x7a\xa7\x2d\x70\x99\x23\xdb\xf5\x34\x52\x90\x8c\xea\x0a\x95\x45\x88\xac\x8b\x22\xea\x50\x2d\xc5\x90\xf5\xb8\xbf\xf6\xe7\x7b\xd9\x53\xd9\x02\x1c\x76\x07\x24\xd6\x30\x28\x4a\x84\xe1\x40\x0e\xc9\x08\x39\x38\x54\x05\xfd\xc9\xaa\x2c\x5d\xff\xc0\x76\x0b\xc9\x17\x25\xe0\xb5\x23\x0f\x2a\xe9\x6d\xbb\xe0\x9c\xe9\xbd\x04\xa1\xed\x88\x7c\xfb\xb0\x68\xfc\xdf\xad\x67\x61\x95\xe8\x4e\x6b\x13\xb8\xdf\x31\x36\x6d\x3f\x6c\xf3\x95\x29\x13\x06\x38\xc2\xf6\xc1\x8e\x1c\x48\xaf\xb4\xb3\x8b\x0b\x7b\x5e\x2a\xb2\xbc\xf2\x70\xd6\xc3\xfe\xa4\xc8\xc5\xca\x2b\x47\x9b\xf6\x9c\xce\xa0\x5d\xf8\xb7\x41\xb9\x40\xc7\x68\xbb\xc1\x46\xb2\x2a\x46\x5a\xb2\x01\x1e\x65\x5d\x85\xc3\xb1\x65\x6c\x8a\x0b\xb4\x15\xbf\xe9\xe0\xce\x21\xef\x54\x95\x58\x87\x32\xe9\x6a\x04\x39\xad\x21\x07\xaf\x85\x55\xd5\xf7\x48\x8e\x73\x2a\x10\x1b\x83\xa7\x0f\xf3\x58\x5a\x6e\xc7\x5a\x66\x72\x8e\x31\x5f\x5a\x55\x7a\x15\x72\xb3\xd7\x1e\x2a\x4a\x71\x2e\xfa\x11\x58\xba\x3e\x9f\xdf\x8d\x9b\x71\x6a\x47\xc7\xba\x65\x98\xc3\x9f\x5b\xac\x4d\x0a\x76\x54\x6e\x5d\xee\x1b\xce\x25\x9c\x2a\x42\xea\xd0\xec\x41\xc2\x58\x61\xf5\x7c\x53\xf6\xb7\xa8\x0e\x38\xb7\xda\xe4\x5f\x95\x02\x6c\x81\xab\x35\x0b\xaf\x90\xe6\x83\x79\x21\x7f\xeb\x12\x0a\x07\x2f\xb2\xca\xfe\xab\xc8\xa2\x77\x5b\xb2\x60\x4c\x92\x0a\xb8\xc8\xfa\x76\xab\xaa\xba\xe0\x4b\x2e\xe9\x3d\x09\x66\xba\xae\x12\x18\x54\x0b\x2b\x06\xab\x33\xd8\x23\x76\xc8\x5d\x4c\x24\x5d\x99\xcc\xb9\x24\xa2\xb1\xf3\x91\x9a\x4c\x7e\x5e\xaa\x44\xee\xff\x04\xbf\x9c\xd7\x2f\x0d\x46\x85\x1c\x32\x67\x1e\x98\x68\x7d\xdf\xd4\x7b\xfc\xa5\xf9\xee\xe2\xde\xc0\xb4\x12\x3f\xd9\xca\x3d\xaf\x19\x8a\x0f\xb2\x59\xc5\x0d\x8d\xfe\x79\x9f\x2d\x9c\xdd\xad\x26\xe0\x37\x18\xd5\xa3\xf3\x40\xbd\x9a\x2b\xe3\xf4\x54\x98\x69\x11\x50\x95\x1b\x47\x24\x13\x94\x9d\xf2\x10\x01\x5d\x9f\x1f\x9c\xd5\x92\x09\xbc\x66\xf3\x47\x19\x1b\xda\x2b\x20\x97\xd9\x99\x53\x22\x3b\x5f\x4f\x96\xea\xb0\x6b\x57\x12\x4d\x3f\x77\x72\xf9\x18\xef\x56\xc1\xf5\x92\x26\x3a\xd6\x15\x44\x14\xa7\x9c\x88\xf9\x03\xa3\xd9\x23\x9c\x66\xe1\x6d\x44\x2e\xe9\xd0\xd1\x6e\x67\x5c\xd4\xca\x8c\xf2\x72\x1e\x4b\xcb\xca\x19\x1d\x4d\xe1\x88\x76\x74\x69\x02\x0d\x50\x96\xc8\x3e\xf2\x18\x05\x3e\x26\x32\xb1\xac\x92\x53\x8e\xf5\xd3\x7a\x22\x0e\xbe\xbe\x1a\x80\x35\xdb\x75\x64\x1e\x76\x51\xd6\xda\x07\xac\xfc\x34\x35\x02\x91\xd1\xe7\x58\xb6\x0d\x58\x8b\x88\xbf\x3b\xa3\x6b\xd3\xbf\xa2\x05\xf7\x4d\x3c\x33\x4e\x33\xf7\x2d\xf0\x46\x41\x88\x07\xd5\x02\x83\xa0\x7f\xa7\x9a\x28\x5f\xc1\x33\x8a\xc2\x5c\x71\xe5\x35\x81\x62\x8c\xb1\x4d\xb6\x6a\x38\xcd\x90\xee\x9d\x59\xd9\x83\x13\x48\x0a\x0b\x33\xa2\x52\x07\x3d\xc1\xfb\xfa\x4d\x44\xa9\x9c\x8e\xc9\x4f\xad\x32\x43\xe9\x17\x41\x40\x76\x27\x0e\xcd\xa0\xae\xb7\x67\x95\x07\x55\x4e\x9f\x20\xa6\x9c\xff\x86\x49\x37\xb3\xdb\x45\xf5\x55\x65\xd4\x09\x36\x0d\x6a\xe5\xef\xaf\x0a\xa5\xfb\x54\xf9\x64\xad\x88\xde\x32\xcf\xc9\x09\x2b\x14\x3f\x58\x9e\xa2\x4a\xe3\x6f\x08\x1b\x14\xa0\x1b\x41\x47\xad\xdb\xd3\xdf\x66\x5f\xd3\x87\x08\x19\xb1\xfe\x58\xa0\x39\xf1\xc5\xc2\x1b\xa9\x0b\x8f\xe6\x2e\x47\x42\x35\x4b\x55\x56\x52\x8a\x27\xe9\x72\x4c\x72\xbd\xca\xca\x62\x65\x58\x55\xc5\x9a\x6a\x51\xba\x19\x6e\xaf\x83\x38\x69\x97\x26\x7e\xae\xd7\x21\x9b\x74\x40\xa2\x28\x23\x12\x55\x94\x7d\xdd\x2c\xec\xbe\x10\x72\x33\x80\x27\x7c\xfa\x68\xda\xf8\x50\x52\x44\xa2\x1a\x23\x38\x8b\x0e\x4b\xb1\xa8\x92\xae\x1f\x97\xfa\x25\x44\xaa\xaf\xff\x69\x34\x35\xad\x31\xb4\x4a\xa4\xb3\xb2\x9a\x22\xc6\x25\xac\x0a\x46\x59\x5e\x68\xb8\x3c\x92\x90\xdb\x2b\x32\x96\x59\x43\xd9\x8b\xd6\x8c\x5f\xfc\xcd\x7f\xb5\xdd\xd8\xee\x10\x4a\x3e\x3e\xaa\x91\xc8\x60\xba\x00\xbb\x84\x23\x3b\x15\x68\x58\xe6\xff\xac\x5c\xa3\x1b\x8c\x13\xd4\xaf\xb4\x87\x9d\x93\x95\x43\x1a\x18\x5d\x52\x31\xdb\x82\x2d\x0a\x4e\x51\x2f\x36\x70\x03\x13\xec\x41\xe3\x19\x02\xfc\xb0\x12\xb3\xab\x54\xa0\x0e\xe7\x50\xac\x4a\x0a\x60\x0a\x16\x2e\xbf\x0c\x89\x49\xb6\xaf\x18\x4e\xc5\xe7\x30\x97\x10\xb8\x14\xc8\x84\xff\x6d\x02\x87\xd8\x41\xd0\x6c\xac\x4d\xb1\x0e\x25\x08\xa3\x26\xaa\x4b\x5c\x2a\xac\x90\x1a\x66\x0d\x14\x2a\xd1\x7a\x07\x41\x40\xf1\x87\xa5\xb5\xa9\x46\x96\x22\xb7\x78\x19\x8e\x7d\xd8\x0f\x1d\xbc\x1b\x59\x12\x61\xce\x7d\xd3\x84\x52\x46\x34\xa4\x6b\x1d\x19\xb8\xad\xd4\x24\x6d\xda\xb5\x49\xab\xd0\xb8\x42\xf0\xf9\xfd\x93\xc6\xb2\x3e\x83\x65\x73\x69\x72\x87\xe9\x16\xef\xcd\x1d\x73\x86\x91\x0a\xcb\xc6\x99\xab\x74\x59\xaf\x07\x3d\x12\x20\x45\xd6\x61\x82\x25\xfc\xe4\x9d\x51\xb3\x55\x99\x4e\x56\x6f\x05\x79\xa6\xd6\x49\x43\x80\x2a\x4b\xa9\x9a\x23\x53\xae\xfb\xc8\xeb\xf2\xfe\xba\xa8\xcc\xf9\xa5\x04\x99\x88\x35\x6f\x01\xee\x6d\x6e\x9a\xa0\xdc\xdf\xa4\x12\xca\xfa\x52\x5a\x5c\xd6\xc8\xfe\x5e\x6b\x40\x51\x63\x84\xb4\xaf\xda\xb3\xad\x1c\x42\xf4\x07\x2b\x21\x6c\x12\xac\x8e\x40\x45\xf0\xda\x04\x0a\x04\xff\x3a\xc8\x53\xb4\xf0\xdc\xd1\x15\xd1\xb1\x2c\x76\xc0\x42\x12\xfd\x16\xd6\x01\x0b\xfd\x9d\xd6\x08\x7e\x71\xb6\x6a\xd8\x0d\x80\xa4\xda\xfc\x71\x8e\xfe\xb7\xf3\xae\xc8\x64\xdd\xd2\x20\x4f\x81\xdc\x08\x38\xe2\xdd\x6c\x63\x51\x54\xdd\x29\x2f\xfb\xed\xb2\xde\xc8\xd1\xd2\xc5\xbd\xf1\x7c\x2c\x96\x75\x36\xd9\x22\x09\x51\x92\x83\x24\x82\x3b\x53\x04\xa4\x8b\x6e\xb2\x90\x6b\xb8\x24\x45\xb7\x22\x52\xe9\x94\xf9\xbd\xae\x94\xf9\x0e\x73\xa2\x79\xf3\x56\xac\x8b\x50\x8e\x99\x84\x72\x94\x58\x9a\xdf\x42\xa3\x12\x50\x14\x3c\xaf\x55\xcd\x58\xab\x47\xce\x3f\x21\x27\x15\x36\xc1\xd4\x11\x7b\x60\x4e\x0a\x7f\x56\x86\x88\x3f\x6d\x3a\x8c\xf2\xb3\xb9\x45\x8b\xa0\xa7\xf7\x9d\xf2\x2f\x2f\x5e\x41\xc2\x4e\xbf\x0f\x4e\x0f\x8e\xcc\x12\x3e\x65\xd3\x3d\xaa\xb1\xb6\xd5\x28\x5f\x83\xfa\x0e\xe3\x65\xd6\x3b\x35\x0c\x33\x81\x63\xb1\x02\x52\xa6\x10\xae\x60\xbd\x68\x53\x08\x73\x7c\xc1\x1a\x07\xc9\x92\xc4\xad\xae\x2b\x45\x5e\x9e\xed\x94\x44\xad\xe0\x48\xc7\xb4\x64\x7f\x12\x51\x07\x0d\x21\x26\x0d\x21\x0b\x5a\x7f\x55\x35\xb6\x64\x89\xed\x4a\x18\xce\xe9\xca\x4f\x42\x2a\xd9\x28\x0d\x9a\x7d\x09\x5b\x75\xf9\x07\x75\x1a\xe7\x25\xb8\xa7\xfa\x3f\x3f\x3f\x4f\x59\xe9\x1f\xfc\x59\xcf\x7b\xc1\x66\xf5\xf4\x19\xdb\x83\xa5\x8c\x4a\x9a\xa0\x50\x3c\x59\x33\xd9\x03\x5f\xda\xa7\x33\x18\x7d\xa1\x11\xaf\x87\x52\x73\xc1\xea\xa7\x76\x6e\x55\x8a\x92\x87\x5c\x71\xd4\x3b\xbc\x9e\x9e\x40\xfb\xca\xc1\xae\xe9\x99\x29\xa1\x78\xb6\x91\xe5\xb3\xed\xa8\x08\x7e\xf7\x5d\xdb\xdd\xa0\xf5\xb9\x5e\x3a\x93\x1c\xe1\x3c\xc3\xbd\x11\x81\x19\xee\x30\x92\x3b\x32\x99\x96\x83\x50\xb5\xa1\x68\x8e\x3f\x14\xbd\x39\x43\x69\xda\xd3\x5c\x22\x42\xba\x5f\x54\x03\xbd\xfd\xff\x38\x3f\xe8\xcf\xb0\xcd\x2c\x83\x6c\x0b\x19\x54\x11\xf5\x54\x53\xf7\x62\xd0\xc2\x52\x34\x6c\xe6\x8e\x9d\x0f\x94\xa3\xd6\x3d\x3e\xbf\x9d\x65\x88\x28\x80\x2d\x6a\x0f\x5b\x76\x95\x28\x56\xaf\x60\xc8\x5e\xc6\x58\xfd\x92\xdf\x8c\xac\x0a\xb6\x3b\x7d\xea\x52\x54\xff\x3d\xca\x08\x3a\x60\x5b\xcb\x2d\xb2\x0f\xcd\x6f\x44\x0f\x6a\x85\x56\x09\x7a\x10\xf2\x35\xf6\x49\x14\xa2\x63\x8d\x37\x12\x7a\x1f\xa1\x87\x30\xc0\x66\xb9\x74\x43\x8a\x46\xaa\x8f\x4c\x1e\xd2\x97\x51\x44\xee\xa5\x14\xb0\x95\x5c\x4e\x8f\x34\xc8\xde\x08\x16\x10\x99\x5a\x1f\xcf\xa5\x28\x5c\x76\xb4\xc0\xae\xd7\x24\x74\x6b\xd2\x24\x90\xdf\x84\xfa\xc3\x17\xb2\xb1\x13\x55\xbe\x6b\x52\xba\xce\x4e\x1d\xc1\x5b\x25\x6b\x26\x99\xdd\x13\xbb\x52\x13\x24\x05\x90\x52\xaf\x95\x38\xd0\x85\x12\xa3\x0b\xc4\xb4\x4b\xdf\x80\xc4\x84\x8f\xb6\x72\x2d\x10\xb2\x10\xfb\xf0\x4d\xee\x52\x0b\x37\xb3\x42\x3a\xea\xb4\x2b\xe0\x30\xf1\x2d\x1c\x0f\x7b\x38\xdf\xc8\x3f\xbf\x80\x46\x24\xec\x8b\x39\x92\x8e\x81\x25\x0d\x18\x21\x05\x72\x2a\x52\xe7\x9f\xf8\xde\x59\xe2\xae\x6f\x8d\x9a\xe1\x53\xe7\xfa\x61\x9d\xf8\xa9\xdc\x36\xde\x9f\x85\x0c\x38\x2b\xea\x38\x14\x7b\x53\xbb\x6f\x0f\x21\xbd\x2d\x4b\x2b\xf3\xd6\x86\xa1\x2d\x89\x35\xda\xdb\x8f\xfd\x57\xd6\x88\x11\x69\x21\x84\xd4\x30\xd1\x51\x0f\x69\x6b\x75\x2c\x2f\x6b\x91\x79\xd9\x05\x3e\xe6\x5b\xec\x4b\x7e\xfe\xf7\xa8\x34\xe3\x0b\x8a\x19\x86\x3e\xdb\x2d\x42\x3c\xe3\x6a\x96\xc3\x57\x19\xf8\x00\x4e\xb3\x17\x7c\xb8\x70\x65\x7b\x46\x2d\xaf\xaa\xbf\xe4\xc1\x89\x14\x0d\xb8\xa9\x2c\x2b\x41\x0f\xe1\x75\xe0\xe9\x9d\x9d\xc3\x55\x1a\x66\x26\x94\xe2\x84\x59\x16\x37\x2b\xf8\x7a\xaf\x7c\x8c\x11\xa3\x9d\xf9\x1c\x3b\x42\x1e\x21\xf3\x6f\xbb\xed\x45\x79\xc7\x2d\xb1\x89\xb6\xc4\x3b\xdd\x17\x25\x4b\x02\x6d\x1f\x71\x67\xc4\xc2\x27\x85\xfb\x11\xb3\x46\x64\xe0\xb0\x42\x5b\xe9\xb0\xc4\x52\x10\x46\xb5\xaa\x28\x37\x47\x24\xe8\x16\xee\x8b\xfc\x14\x93\x0a\xdd\xf8\xff\x6e\x5a\xd7\xde\xe1\x1d\xee\x32\x32\xff\x63\xed\x82\x93\xa8\x58\x9c\xb2\x96\x04\x11\x38\x5b\x95\xff\x02\x38\xda\x6d\xf0\x8a\x78\x6a\x9a\xd4\x92\x63\x01\x39\x1e\xd3\xcc\x6a\x23\xb9\x3a\x98\xe6\x9b\x3f\x92\x21\xd5\xe3\xd8\xd9\xc3\x6d\xfe\x98\x14\xe7\x79\xe4\x7d\xd1\xe1\x67\x4d\xa6\xc4\x39\xb6\xe3\x88\x55\x05\x87\x26\xfe\x9b\xdf\xc1\xff\xbb\x13\x3d\x08\x0f\x9f\x96\x84\x99\xed\x10\x3a\xe2\xea\x03\x47\xeb\xca\xbc\x5e\xb4\x43\x01\xa3\x53\xce\x24\x47\x5d\x28\x7b\x4c\x5a\xad\x9c\xfc\x8d\x1a\x8f\x7f\xb5\xc8\x76\xfb\xd8\x4f\x62\x91\x41\x2f\x0f\x9e\x1a\xf5\xfe\xfa\x0f\xab\xbf\x30\x41\x24\x64\x91\xaa\xad\x5b\x0d\xb8\x86\x17\x97\xb3\xfd\xc0\x8b\x03\x4b\x72\xfe\x36\x6a\x03\xb6\x92\x65\xe1\x89\x0c\x13\xb9\xdf\xaa\x78\xb1\x05\x47\x64\x5b\xe4\xad\xd9\x56\x10\x45\xdb\x51\x59\x49\x7e\x02\x71\xb3\xae\x84\xf8\x13\x1d\xb1\x34\xd7\x6c\x30\x1c\x30\x05\x31\xc0\x99\xe1\x66\xf7\x15\x65\xd3\xf9\xd2\xdc\xdc\xde\x02\x19\x2e\x82\x9d\xf1\xf8\x7f\x2d\xfa\x98\xd6\x8c\x99\xe1\x4a\x98\xce\x16\xd5\xa2\x31\x19\x41\xe2\xe6\x30\x04\x21\xeb\x0e\x23\x65\x56\xa1\xc1\x71\x03\x3f\xab\x36\xa2\x56\x11\xbc\x76\x0b\x34\xd3\x6c\x36\x1d\x41\xa6\x87\xc4\x20\x4c\x1d\x97\x18\x89\xaa\x44\x1d\x03\x29\x84\x0f\xa2\x6c\xe0\x53\x27\x89\x55\x48\xad\x94\xc8\xb3\xdc\x9a\x72\xd6\x2e\x29\xcb\xf3\xa5\xf4\xb8\x5f\xf4\x16\xd2\x96\x64\xe1\xb3\x84\x1d\xc6\x82\xc3\x4a\x0b\x79\x60\xb9\xa3\x05\x8f\x2a\x7c\x80\x89\xf5\x43\x00\xdb\x11\x0c\xaf\x18\x27\xcd\x76\x51\x32\x88\x29\x88\x94\x33\x62\xf7\x7d\xeb\x9d\x97\x24\x82\x83\x15\xa8\x51\x9d\x6e\x9f\x58\x00\xbd\xaa\x2c\x46\xdf\x71\xdd\x60\xa9\x50\x2b\x27\x91\x7f\x7c\x91\x36\xc7\x80\xf1\xb5\x91\x2f\x51\x4e\x1f\xec\x17\xb6\xe9\xb0\x92\x0c\xbf\x5f\x8c\x7d\x88\xbf\x85\xb6\x06\x0e\x5c\xbf\x9a\xb0\x54\x19\xff\x36\x21\x88\xbd\x82\x4b\x99\x48\x21\xdb\xa2\x5a\xb3\x9e\xf4\x15\xf5\xe6\x67\xba\xe9\x91\x35\x25\xb7\xe9\xe3\x2a\x87\xb1\xb0\xcf\x69\x02\x9f\x6b\x7e\xc0\x56\xd4\xa4\x57\x18\x74\xd9\x09\xab\xa9\x25\xe2\x0a\x78\xd6\x83\x24\x16\x45\xa9\x46\xea\x23\xe7\x36\xa3\x9d\xd9\xba\xa4\x47\x9c\xda\xf2\x9f\x97\x9b\xd3\x88\xe2\x4d\x26\x29\xa5\x59\x1f\x8f\x3e\x84\x7e\x38\xbd\xdc\x93\x50\xed\xaf\x56\x75\x9b\x02\x71\x93\x17\x71\x6a\x59\xc2\xb0\xcd\x7a\x90\x60\x7e\x8a\x3f\x16\x11\xfb\xe7\xa5\x47\x09\xbd\xe2\x1e\x7f\xd6\x16\x6d\x03\xa3\x4c\xff\x29\x1a\x83\xc0\x02\xcc\x91\x34\x5e\xed\x39\x62\x2a\xf3\xae\x72\x20\xdb\x35\x44\x86\x99\x15\x5b\x4d\xe5\x31\xdc\x17\x75\x5f\xf6\x6d\x99\x9f\x58\x29\x6d\x1a\x0e\xda\xb2\x0d\xf5\xad\x5a\x03\x3b\xe9\xe4\x44\x9c\xa1\x56\x95\xa9\xb5\xab\x9a\xa6\x72\x2c\x5a\xd4\x4d\x5a\x4d\xda\x55\x45\x48\xd9\x31\x91\x2f\x16\xa3\xd6\x8c\xc9\x3c\x65\x77\xe1\x25\x72\x4f\x36\x25\xed\x0e\x3c\xb3\xfb\xcf\xde\x0c\xc1\x14\xd4\x7f\xd4\x9e\x73\xd2\x12\xae\x28\x87\x64\xcf\x3d\x0e\x41\x45\x9f\x72\xbe\x59\xd7\xbd\x6a\x1c\xba\xa2\x2c\x66\x39\x4a\x26\x57\x0f\xe5\xd5\x2b\x48\xe5\xf0\x47\x69\x5c\x32\xe9\xc9\x1d\xc5\xe2\x6e\x0d\x4e\x92\x68\x63\xf5\x7e\x6b\xca\x89\x72\x2e\xa8\x60\x31\x95\x3c\x52\x4c\xbe\x50\x23\xfe\xef\x6e\x51\xd6\x7c\x84\x11\x9e\x57\x72\x56\xf8\x8a\x68\xb6\x24\x15\x81\x22\xf0\x77\x53\x9a\x2f\x62\x6e\x3a\x61\x03\x2e\xea\x22\xfd\xa3\x4b\x56\x77\xb8\xa8\x01\x09\x10\x5e\x62\xed\x92\x11\xa5\x35\x6a\x33\x5a\xe2\x61\x42\x2c\x7f\xb2\x34\xd3\x75\xf3\x1b\x56\x21\xdf\x70\xff\x7d\x7f\xc2\xf7\xe4\xa3\x44\xd5\xda\x92\x57\x3d\xe5\x04\x77\xa2\xb8\xac\xef\xa9\xaa\x56\x7e\x03\x30\xe7\xf8\xd1\x2b\x2e\xf1\x11\xde\xb6\x83\xab\x0e\x2b\x7a\x02\xef\xe6\x40\x07\x76\xb8\xd7\x28\xbe\x9e\xc2\x84\xf3\xfa\xe9\x7d\x90\x6e\xd8\x1c\x56\x08\x45\xbd\x0a\xa6\xfd\x61\x54\xb6\x30\x9a\x0f\x90\x1c\x7e\xcb\x2d\xb6\xc4\x1d\xe9\xf6\x3e\x62\x97\x11\xe7\x3b\x3b\x68\x7f\xdd\xd5\x37\x15\xb8\xcd\x44\xca\x8a\xe7\x9d\x3f\x94\x95\xf5\x90\x04\x9c\xdf\x55\x85\xd9\x51\xba\x84\xd3\x77\x13\x72\x77\x30\x83\xcb\x16\x49\xbe\xce\xf9\x8a\x0a\xd0\xae\x2f\x4a\x29\xad\xd6\xba\x02\x5b\xc1\xab\x36\x71\x6a\x89\x98\x06\x0d\x2b\x61\xfe\x15\xea\x6b\x7c\x34\x04\x91\xdc\x2e\xb6\xbe\xec\x53\xcc\x34\xa3\xdd\x90\x74\x9c\xab\xa0\x92\x59\xc9\x28\xf0\x6f\x3f\x5a\xa6\xd6\xfe\xfb\x61\x7b\x60\x71\xb8\x51\xa3\x86\xf4\x6e\xc5\xcb\x3a\x28\x98\x68\x9a\xe6\x55\xd2\x59\x8e\x90\xc3\x15\x08\xa3\x93\xff\xb6\xd2\xc6\xb8\x25\x54\xa4\xe7\xa4\x88\x09\xcc\x50\xc5\x63\x8f\x29\x6c\xef\x24\xb0\x07\xeb\xb8\xd2\xa6\xa5\xef\xf0\x59\x83\xbd\x1f\x56\xe9\xb3\x54\x05\x81\xbb\xba\x55\x8e\x9f\xfe\xe5\xd6\x36\xf0\x39\x27\x58\x36\xf8\xe8\xb2\x4a\xb2\x5e\x65\x8f\xc6\x01\xb7\x15\xa8\xa4\xe2\x1f\x68\x95\xf8\xa8\x48\xc9\xd2\x28\x8d\xc7\xcf\xc8\xe2\x40\xac\xa7\x78\x66\x7b\x14\x7a\xab\xa8\x30\x21\x7d\xd9\x3f\x87\x9c\xd8\xa2\x43\xdf\x8f\xef\xa7\xfb\xa8\x80\xdc\xad\x82\x6c\xc5\x9e\x6c\x27\x76\x9d\x0a\x3b\x93\x5f\xf9\xed\x59\xf1\xe5\x3b\x84\x3a\xab\x2f\x7a\xaf\x61\xf6\x8c\xa3\xc6\x57\x24\x41\x87\x17\xa8\x34\x7f\xd4\xdf\x15\xa1\x86\xb4\x61\x8c\x58\xd8\x0a\x63\x1e\x0e\x46\x3f\x8c\xc8\xb3\x27\x64\x0b\x67\x9d\x65\x9e\x39\xa0\xc2\x2b\xe0\xe6\xcd\xdb\x94\xaa\x3c\xad\x46\xe7\xd5\xcb\x9e\xed\x08\xdb\x1a\xfb\xab\x6f\x96\x6f\xcf\xa7\x6f\xe9\x6b\xf9\x13\xdf\x1c\x6a\x79\x2a\x3c\x1e\xac\xcf\x0a\x6d\x45\x56\x48\x91\x06\xe5\x22\xe4\xbf\xaa\xa6\x0f\x4c\xb4\xeb\x6a\x7a\xf6\xf0\xad\xc1\x38\x2f\x56\xcb\x8b\xab\xfc\xe3\xa9\xf5\x55\xe5\x16\xce\xe6\x1c\x13\x4f\x39\x77\x63\xb0\xc3\xc5\xe5\x2e\x83\xbe\x3c\xc3\xfc\x26\xe2\x2f\x8f\x6f\xd8\xdc\x1c\x11\x8b\xe6\x81\x5d\xc1\x71\x1b\x15\xc6\x1e\x70\xa2\xcf\x4f\xbd\x4c\xbb\x39\x3c\x3d\x39\xa7\x5b\x30\x4b\xd7\x3c\xb5\x0b\x15\x60\xd9\x83\xad\x76\xca\x73\x6e\xf1\x18\x67\x8e\x4d\xe9\x5a\x8a\x11\x80\x1c\x4d\x8f\xa8\xd8\x76\xe5\x22\x4b\x85\xea\x52\xc0\x5e\x2b\x8e\xfe\xe6\x3b\x36\xc1\x19\xc3\x5e\x2f\xd1\xed\x86\x44\xc3\x42\x21\xe0\xc4\xa2\x6d\x3b\xc3\x47\x2e\x7c\x6e\x7c\x42\x2a\x22\x5b\x2e\x2a\xee\x24\x70\xf9\xbb\x1d\x44\xc4\x14\x37\x73\x62\x6a\x3b\x18\x3b\xca\x72\xa6\x70\x3c\xfd\xec\x82\x0a\x5b\x38\xe6\xbd\xb0\x5c\x3b\xfc\x39\x4e\x4a\x2d\xe6\xe4\x08\xb4\x35\xcd\x38\x33\x84\x3c\x82\x09\x25\xc0\x17\x71\x2e\x3a\x89\xc9\x55\x69\x41\xf0\xd7\x60\x62\x8a\xc1\xcf\xe4\x87\xd4\x45\x51\xd9\xa4\x63\x6f\x24\x8e\xe5\xba\xfb\x1a\xc3\x4e\x98\x30\x38\xe0\x77\x88\x84\x86\xf1\xa7\x98\x47\xa3\x4c\x7d\xc6\x66\x33\x6e\x25\xc6\x2f\x9c\x62\xac\x52\x7f\xf8\x40\x94\xc4\xb7\xb2\xd0\xbd\xd4\xd7\x22\xb9\x75\xb4\xb0\xcb\xe4\xc8\x00\x6b\x9c\xc2\xa0\xb5\x29\x87\xc6\x1c\xc6\x74\x63\xa4\xf1\xc3\x67\x27\x87\xe3\x4e\x4e\x0a\x1c\x2a\x34\x26\x27\x6f\xd9\xc1\xc1\x16\xbd\xf1\x0b\x8b\xf3\xcf\x3b\x3c\xe0\xe7\xd5\x33\x2f\x4d\x14\x20\x26\xbf\xe8\x6f\x31\x18\x45\x77\xe4\x15\x93\x75\xc0\xcd\xb3\x7d\x20\xf3\x72\xb0\x83\x6c\x8a\xf6\x4b\x35\x19\x8b\x32\x99\xeb\x94\xff\x8a\x8a\x31\xbf\x2f\x39\x3f\x4d\x8b\x2a\xb0\xbe\x5e\xcd\xb5\x6e\x67\x20\xd4\x10\x16\xcd\x43\x5e\x6f\x25\x4f\xd8\xdd\x2e\xf9\x00\xa4\x6e\x03\x67\xae\x4f\xe9\x6c\xe1\xe0\xed\x3f\x2e\x2d\xe1\x9f\x5e\xc6\x66\x1c\xde\x7f\xba\x5b\xbb\x27\xb8\x6b\x12\x16\xd6\x43\x52\xc1\x43\x49\xf0\x56\xb1\xd1\x10\xc0\xde\xd4\xce\x1f\xb9\xcd\xba\x22\xf0\xc3\x05\x76\xf5\x20\xb0\xd1\x3b\xb7\x4f\x97\x5b\x84\x73\xae\x8a\x4d\x61\xbc\x29\x35\x60\x45\x26\x69\x40\xc4\x4c\x59\x99\x07\x52\xc5\xd5\x8c\xf0\xfc\xf1\xaf\xc8\xf9\xe6\xc3\x87\xfb\x4b\x96\x67\xa7\xe3\xc6\x67\xb0\x0a\x20\x7d\xca\x1a\xe0\xf8\xe6\x6f\x40\x0d\xa0\xbb\xf9\x83\x5d\xe6\x5b\x14\x84\x5d\x00\x85\x7a\x6b\xba\x66\x60\x61\x65\x07\xdf\x06\xaf\xf8\xf4\x39\x78\x8a\x71\x2e\xab\x8a\xef\x1b\x06\xd8\xae\x74\x4b\xfd\xff\x6e\x95\x84\x8e\xdc\xba\xc3\x9e\x06\xb2\x6d\xc6\x82\x11\x70\xd4\x9e\xe9\x5e\x9b\x69\x8e\x7d\x30\xd4\x10\x13\x14\xe2\x82\x76\xcc\x0e\xb7\xa8\x71\x91\xa9\xb1\x09\x77\xe9\x52\x04\xf7\xf7\xa1\x4a\xc4\xd6\x54\x64\x03\xe4\xde\x05\xb0\x9a\x75\xbd\x01\x9d\x3c\xac\x99\xe8\x03\xf3\x7b\x38\xfd\x0d\x3f\xf2\x8b\x00\xf7\x77\x28\x0a\xe6\xca\x70\xff\x1a\x5e\x2c\x56\x93\x68\xe4\xfc\xc6\x6c\xfd\xe1\x70\xde\x45\x47\xbe\xf1\x57\x0f\x3a\x29\x8e\xf2\x5b\x44\x74\x84\x39\x50\xb1\x66\x57\x39\xc3\x6f\x17\xd2\xf9\x22\xd8\x19\x56\x52\xca\x57\x28\xa1\xe1\x08\xf7\x3e\x2b\x8e\xfe\x29\x34\x83\x1f\xe2\xf1\x51\x7a\x11\x5f\x56\x1f\xc2\xf2\x49\x4b\x62\x60\x2f\xbc\xd9\x3e\x07\x86\x27\x0f\x6b\xd2\xb4\x4d\x0e\x2e\x78\x4a\x6b\xd1\x5e\x63\x1f\xe0\x6a\xc0\xab\x78\xef\xd6\xce\x1c\xb4\xf9\xba\x3e\x1c\xa6\x21\xdf\x8e\x13\x3d\x19\x24\xd0\xa2\x28\x2c\xde\xaf\xa2\x9a\x03\xfe\x36\x42\x0b\x3d\x2d\x4c\xd6\x7b\x77\x98\x31\x80\x41\x27\xa6\x9c\x15\xa4\xa1\x3c\x38\x7e\x7c\x53\xf0\xe1\x0e\xd5\x86\x4a\xa3\x71\xa5\x81\xc2\xfe\xc2\x02\x70\x4d\x2d\x1d\xd0\x75\x06\x84\xf1\xe8\xcc\x74\xda\xad\xfa\x7c\x86\x17\x91\xf2\x9e\xdd\x7e\x48\xc5\x5c\x0a\x9f\x04\xa4\x1e\x79\x35\x75\xc3\x6e\x9e\xe0\xdb\x5b\x56\x42\xf2\x50\x70\xf3\x3f\x66\x1c\x1d\x95\x1c\x7c\x2b\x4a\xfe\x77\x8b\x99\x7f\xd4\xb1\x71\xdd\xf2\x7f\x61\xd3\x9e\xe5\x57\x67\x5b\xc6\x97\xff\x14\xf5\x5b\x9e\x74\x43\xc3\x5f\x9a\x15\xdd\xf3\xc9\x82\xce\xfd\x90\x7d\x95\x3c\xf8\x1b\xc6\x2d\xa7\x4a\x37\x4f\x38\xd9\xeb\x1d\x62\x95\x5d\x2e\x0b\xab\x07\xd3\xee\x0b\x15\x33\x52\x13\xe8\xda\xf0\x4c\x70\x1e\xc6\x7b\x58\x27\x62\xa3\x3c\xf5\xe8\x3f\x1a\xef\x52\x08\x1b\xad\x2d\x2d\x72\x71\xdf\x46\x05\xc1\x6e\x7d\x15\x70\x68\x37\xc8\xf7\x4b\x4b\x6d\xbc\x46\x1c\x23\xbf\xe0\xf1\xcc\x81\x32\x19\xf8\xf5\x68\x86\x93\x4a\x02\x61\x42\x15\x41\xba\x81\x8e\xf0\x89\xa3\x8f\x2d\xfd\x71\x3e\xfb\x39\x97\x02\xd1\x8d\xaf\x55\xbc\xdd\x92\x5b\xaf\x6e\xff\xaf\x29\x71\x70\x59\x57\xbb\x8a\xf8\x99\x05\x6d\xbd\x4a\x44\xda\x52\xac\x7a\x07\x51\xec\xfe\x69\x96\x5c\x07\xf9\x3d\x88\x1d\xba\x96\x19\xd3\x4c\x8d\x95\x85\xc0\xe5\xeb\xfb\x33\x38\x5c\x54\xdf\xfd\xef\x96\x7c\x61\xcf\x99\x85\xe1\x96\xd5\x3c\xac\xfd\x68\xc7\xfd\xa4\x2f\x1c\x27\xfe\x8a\x7e\x08\x0b\x53\xc0\x7b\x6b\x17\x78\x9e\x87\xcd\xf8\xb2\xcb\xf2\xa6\xff\x4c\x2c\x07\x56\xb1\x4b\xe1\x82\x48\xd1\x17\x8a\x3d\x0e\x87\x3f\x0c\x28\x5a\x6e\xf5\xbb\x0b\xb1\x68\xa8\x18\x54\xff\x2f\xee\x61\xb4\xd4\xa7\x88\x6f\xb5\xaf\x52\x33\x70\x72\x4c\x00\x0f\x64\x7e\x7f\xb4\xfb\x8c\x85\x5d\x86\x6e\xed\xc3\xda\xb0\x04\x7d\x18\x66\x10\x6f\x3d\xde\x99\xb1\xba\x3a\x23\xed\xa8\xff\xf2\x81\xe6\x4b\x18\x64\xc2\xbd\x4c\xa3\xe7\xe6\xbc\x36\xcb\xa5\xc4\xea\x43\xb1\x6d\xe7\x43\x7e\x88\x47\xcd\x31\x87\xbe\xb3\xda\xea\x59\x20\x82\xeb\x1f\xe2\xb4\x9a\xcb\x12\x75\xf7\x2c\x67\x87\xf9\x67\x41\x0c\x96\x1f\x2b\xca\x10\x9a\x3b\xf8\x47\x4a\xe8\x2c\x27\x7a\x8c\x12\xfc\xef\x26\x05\x62\xfa\xf1\xe8\x6c\xd5\x71\xbc\x7c\x84\x25\xdf\x74\xc7\x9c\xf9\x1f\x77\x62\xdb\xb8\x5b\x3b\x3e\xa9\xb1\x06\xfb\xee\x97\x3d\x13\x33\x6f\x2b\x1e\x17\xd9\xee\x0b\xf4\x87\xeb\x68\xe5\x3c\x3a\x25\xc6\x7d\x5d\x71\x5f\xd0\xf5\xc1\x4b\xb8\xd6\xb8\x03\xc6\x13\x33\xee\xa0\x37\xac\xea\xc2\x98\xe4\x8f\x4b\xf8\x35\x5a\x71\x97\xc8\x78\x83\x43\x17\x89\xc5\xcf\xc3\xa3\xb9\x34\xfd\x05\x27\xe6\x41\xd9\x26\x2b\x94\x27\x90\x5f\xff\x94\xa8\x80\x4f\x95\xd4\x59\xba\x0a\x6c\x0f\x4f\x79\x38\xc8\x67\x38\x23\x0f\x45\xb3\x5c\x84\x75\x3c\x5c\x52\x50\xac\xae\x65\xb8\x2d\x7c\x93\x56\x6c\x6b\x72\x4e\xe2\x16\x8c\x32\xeb\x95\x47\x55\x59\x25\xb2\x2b\xdd\x7e\xc6\x47\xbc\x24\x0b\x0d\x76\xea\x32\xfc\xae\xbf\x77\x99\xc5\x42\xfc\x57\x7d\x56\x88\x98\x60\x15\x4b\x04\x74\x5d\x90\xf9\xf3\x20\xe6\x35\xd2\x07\x56\xec\x8e\x0e\x17\xbf\xb1\x36\x3a\x0b\xe1\x74\x8e\x58\xeb\x38\x2d\x4d\x50\x50\x54\xbe\x29\xdd\xbd\x36\xeb\xb0\xde\xef\x66\xfe\xde\xb2\xd7\x30\x72\xb2\xfd\xa1\x9d\xc2\x80\xf3\x07\xcd\x79\x71\x39\x5e\x21\x3c\xd8\x77\x29\x75\x29\x42\x70\x61\x42\x0e\xd7\xfe\xb3\x79\x0d\xf6\x71\xc1\x7a\xba\x3c\xf1\xbd\xa4\x49\x7f\xef\x06\x93\xa2\xd9\x15\x69\x3f\x8f\x5f\xfe\x71\x8d\x62\xa9\x57\x6c\xae\x3d\x22\xfc\xd2\x57\x6b\x47\x7d\xe5\x27\x48\x78\xe0\x88\x45\x0c\xbc\x62\xe4\xef\x9e\x25\x61\x3f\xe3\x5d\x67\xb1\xc6\x8a\xb5\x3b\x3e\x1c\x16\x97\xc6\x19\x44\x90\x32\xd3\x73\x8e\x9f\x8f\x44\xfd\x85\x21\x57\x77\xec\x17\x0a\xb8\x85\xf1\x63\x11\x9d\xdb\x6a\x41\xa6\x11\x00\x07\xf4\xdf\x9a\xc3\x3a\xd2\xb6\xd1\x78\x8c\x38\x4d\xaf\xfd\x58\xbe\xc1\x7b\x4a\xd3\xdd\x41\x04\xec\x46\x24\x4e\x88\x07\xd6\xcb\x81\x17\xc6\x5f\x04\x7b\x2d\x6e\xca\xc2\xd2\xd0\xc4\x99\x49\xba\x8a\x4f\x84\x98\x7d\xfc\x6a\x47\x64\x32\xa9\x76\x42\xe2\x31\xa6\x7d\xc5\xbb\x88\x0f\xec\x15\x18\x27\x0e\x6c\x9d\x65\x2b\xed\x02\xad\x3d\xa4\x10\xf4\xaa\x6e\xa1\x22\x14\x35\x08\xab\x5f\x3b\x73\xbc\xfc\x03\xce\xf9\x52\x69\x17\x86\x57\x6f\xf9\x84\xbd\xf1\x0e\xfb\xa1\xff\xaa\xf5\x10\x20\xf0\x72\xd7\xde\x07\x85\xa2\xc2\x63\x62\x5b\x7d\x65\x8a\x41\x5e\x09\xde\x60\x27\xe9\x7a\xdb\xfb\xa3\x60\x03\x4f\x7b\x96\xc3\xaf\x17\x75\x9c\x4d\xde\x6b\x58\x9e\x5f\xef\x90\x8e\x22\x19\x97\xb9\x3f\x79\xa4\xbc\x74\x8d\x60\xa0\x56\xd7\x4d\x35\x58\x7e\xdc\xb9\xae\x14\x16\xcf\x91\xb0\x43\x73\x7b\xbc\x0d\x81\xfa\x85\x76\x00\x1e\x8a\x2d\x9b\xc2\x9a\x99\x59\x9a\x86\xcb\x0f\x75\x40\x7f\xaa\xee\xf6\xa0\xe2\x58\x51\xd7\x10\x3a\xeb\xa8\x13\xb3\xe1\xe6\x9f\x5f\xb5\xab\x20\x54\x20\xce\xcd\xfa\x79\x8e\x81\xe8\x59\x22\xb6\x5f\x96\x7c\xf6\x0c\xad\x9f\x68\xda\x8c\x12\x91\xff\x82\x24\xa7\xb8\xcf\xec\x5a\x15\x17\xe9\x6c\xf6\x9e\x30\xb0\xf3\x3f\x20\x17\xa1\xfd\xa7\x9c\x12\x00\x0c\x36\x6b\x4a\xce\xd0\x81\x06\x36\x0a\xa1\x7b\x6e\x11\x0f\xbc\x5b\xe8\xdb\xe9\x1b\x34\x60\x41\x5d\xae\xb6\x94\x83\xa5\x95\x92\xd0\x22\x3d\x58\x4d\xf2\x71\xa3\xaa\x82\x9c\x9c\x03\x1b\x92\x30\x41\x57\xff\x41\xec\x42\x17\x3d\x41\xf2\xfb\x91\x6f\x19\x59\x58\x13\x42\xa1\xab\x27\xd4\x71\xb5\xd4\x9b\x1c\xa2\xdd\x36\x91\x9b\x8f\xb0\x54\x15\xf3\x17\x66\x17\x1f\x29\xe7\x77\x51\x12\x77\x3b\x52\x16\xe2\x8a\xf0\x9b\xcc\x84\x48\x76\x68\xd4\xb9\xf0\xd2\xef\xc1\x7a\x4f\xf7\x58\x1d\xed\xcc\xc5\xa3\xbb\xf6\xd1\xe4\x02\xde\xa0\x7e\x99\x6d\x75\x10\xc6\x27\xc4\x66\x0b\x6b\x3d\x5b\xb8\x5b\x84\xd3\x00\x0f\x4d\x6a\xf9\xc6\x58\x79\xe7\x90\x1c\xa4\x80\x2c\xe8\x36\xff\xa5\xee\xa9\x15\xfa\xd2\x2b\x6e\x97\x35\xc7\x76\x89\xda\xec\x8a\x29\xea\x8c\x99\x5e\x83\xf0\xb2\x6a\xb3\x9b\x11\x7a\x3d\xa9\x48\x18\xc1\xca\x26\xc8\x7d\x2b\xf5\x49\x98\x48\x46\x36\x76\x55\xa6\xb0\x67\x3b\xfc\x72\x85\x3e\xe2\x63\xb8\x14\x22\xb0\xba\x4b\xe1\x66\x20\x07\x56\x65\xa2\x06\x41\xd7\xb5\xaa\x81\x0a\x2e\x9a\xd0\xb2\xd0\x4d\xe5\x6a\x6a\x89\xc9\x5f\x13\xeb\x24\x11\x09\xcd\x42\xec\x23\x91\x2a\xcf\xd5\x05\x45\x6d\xcb\x27\xaa\x27\x4b\x3e\x24\xcd\xb2\x22\x07\xd9\x30\xf5\x72\xbc\x0e\xbb\x8e\xf0\xff\x4a\xe5\x4b\xff\x2f\x99\xac\x2e\xd0\x95\x12\x00\xf4\x44\x7b\x0a\x5b\x9f\x32\xd4\x5b\x12\x0f\x0f\xf1\x0e\xef\xa3\x28\x2d\x42\x85\x9a\xe7\xff\xb1\xc2\x4c\x47\x4b\x9b\xd8\xce\x66\x1a\xb6\xf5\x8b\x0a\xc6\x95\x49\x82\xc9\x89\x84\xd6\x70\x34\x1e\xe5\xec\x17\xb7\x7f\xfa\xc7\x7b\xf0\x88\x56\x61\x85\x4b\x10\xf1\x67\x04\x22\x68\x59\x9a\xc4\xa2\x84\xe2\x13\x2b\x89\x2d\x72\xd8\xcf\xda\xdb\xce\xf9\xc8\x1c\x71\xe8\x33\x47\x65\x7f\xd5\xdf\x81\xbf\xb2\x6a\xef\x28\xfa\xc9\x98\xcc\x92\x71\x78\x58\x99\xe2\x52\x07\xda\x3b\x4c\x7e\x03\x4d\xf5\x54\xf0\xf5\xa4\x0e\x70\x52\xc6\xd9\x04\xe7\x50\xfd\x0c\xa7\x4f\x72\x01\xe7\x2e\xaf\x16\x8b\x0a\x45\x3c\x99\x70\x1a\xa1\x2c\x13\x87\xe7\xb7\xb2\x66\xdc\x84\x44\x6e\x7a\x02\x93\xc6\x1f\x53\x1e\x89\x43\x9e\x11\x4b\x7e\xec\x2c\x29\x0f\xc9\x31\x08\x58\x8e\x76\x21\x04\x43\x71\x15\x4d\x74\x14\xaa\x6d\xdd\xa9\x2c\x43\xde\xe1\xc6\x5d\x9b\x59\x77\x38\x75\xe7\x98\x13\x5b\x91\x29\x39\x6c\xa3\x3a\x16\xcf\xdb\xf5\xef\x00\xa5\x84\xb0\xe1\xb1\x05\x9b\x31\x78\xa2\x10\x5b\xb8\x69\xc8\x1d\xc4\xfa\x27\xda\x49\x87\x1e\x90\x31\x2c\x53\x24\xe4\x61\x88\x47\x78\x7b\x94\x08\x63\x2e\x60\xa7\xe5\xc5\x67\x8b\x09\xce\x07\x99\xec\x59\x29\x15\xf6\xe2\x94\xd1\xff\x81\x15\x86\xe3\x86\xfa\x52\x08\xcb\xbd\xc1\x84\x55\x04\x87\xc5\x4a\xed\xc9\xf2\x54\x1a\xac\x71\x66\xb0\xd3\xbd\xf5\x01\x4f\x0d\xc0\x31\x18\x95\x78\xf7\xf4\xf8\xae\x88\x5a\xe2\xd5\x38\xe8\x36\x02\x17\x31\x86\xc2\x3e\xac\x0f\x79\xc6\xaf\xe0\x85\x48\xe7\x64\x36\xd2\x90\xff\xd8\xdd\x01\x41\xa1\xa5\xdb\x90\x7b\xcd\x85\x73\x0a\xbf\x5e\x98\x9c\x88\xea\x98\x5a\xf5\x1b\xf6\xd9\xf0\x43\x08\xed\xec\xbb\x99\x96\x6d\x4d\xcc\x71\x74\xac\x89\x35\x36\xb5\xa2\x57\xc1\xe9\xd0\x45\xf0\x4b\xa4\x3e\xe9\xb3\xb8\x22\x3d\x0c\x5c\x06\xc1\x8b\xf0\xa3\x50\xd9\x28\xcf\xed\x96\xe7\xd3\x35\xf9\xb5\x4b\x62\x05\x18\xda\x5b\xfa\x8f\xcd\x58\xe0\x84\x35\x1c\xc4\xf8\xc3\xbc\x4e\xa4\x2d\x78\xbd\xce\x36\x48\x3e\x24\x88\xfd\x31\x0d\x23\x5a\xcd\x0e\x5a\x89\x66\xa3\xc9\xf2\x8c\x05\xc1\x23\x56\xa6\x87\xa8\xb6\x84\x2b\x49\x3e\x94\xc4\x7e\xfc\x32\xaa\x8e\xa8\x2a\x2f\x3d\x0e\x81\x27\x0e\x24\x39\x62\x57\x67\xd2\x64\x38\xbf\x83\xdb\xe2\xdf\x0c\xa4\x5f\x30\x63\x5c\x9f\x9e\x6d\x55\xe8\x88\x06\x6d\x11\x1f\xc1\x15\xf6\x55\xe3\x28\x16\x4d\x4c\x52\x40\x3e\x68\x07\xe4\xc1\x68\x5f\x3f\x53\xe4\x61\xaf\x3a\xac\x09\x89\xda\x82\x11\xf3\xfd\x45\xad\x26\x79\x38\xe3\x71\xe0\x48\x2c\x0d\xcc\x7e\x4a\x1d\x63\xe5\xbf\x99\x26\x2c\x30\x47\x8d\xb1\xf7\xea\x2f\xb8\x2d\x3f\xc0\x3a\x69\x99\x3a\x2a\x77\x9e\xf4\x40\x17\x8d\x9c\x3f\x00\x92\x7b\x07\xb1\x48\x8a\x85\xdf\xf9\xa3\x29\x75\xf6\xf0\xc5\x4a\x93\x33\xc5\x0d\xbf\xb9\x0b\x13\xaa\xcc\x6d\xf8\x9d\x39\xeb\x78\x67\x96\x6f\x72\x43\x09\x7a\x22\x88\x9e\x5e\x86\xf2\xaa\xf0\x92\x7d\xf1\xa7\xfd\x2a\x7a\xb9\x5f\xbe\x3f\xbc\xec\xd3\xa0\xe1\xe0\x9f\x3c\x64\xc4\x51\x15\x58\x9d\xd3\xf3\xec\xa8\x7f\x3f\xf5\xa8\x55\x95\x3b\xdb\xc4\xab\xde\x1a\x6c\xdb\x16\xe8\x5f\x58\x57\x0a\xa8\xac\xe2\xb0\x7b\xc8\x17\xc1\x53\x14\x69\xe9\xca\xe2\x63\x5f\x9c\x0e\x5c\x34\x4d\xb8\x1c\x49\xf7\xaa\x0c\x66\x6a\x78\x74\xd7\x34\x95\x95\x10\xfd\xca\xb6\x8c\x00\x32\xd4\x2c\x21\xf3\x2d\x0a\xbc\xcc\x41\x49\xcc\x63\xd8\xac\x25\xe1\xa8\xe7\xbe\xa4\xb7\x17\xa6\x56\xea\xb2\x32\xf4\x29\xf6\xcf\x3e\xfd\xe3\x9f\x5c\x15\x09\xb6\xf8\xbe\x61\xbf\x7c\x0f\x58\xc1\x2a\xda\xaa\x69\xf6\xda\x38\x8b\x65\x70\x4b\x10\xe7\x22\x06\xb2\xe9\x94\x79\x01\xac\x13\x0c\xb4\x94\x62\x79\xc8\xcd\xd0\x6a\xd8\x68\xf8\x4f\xeb\x68\x8b\xe4\x2f\xef\x44\x2c\xc8\x45\x2f\xed\xa7\x41\xfd\xab\x2f\xc8\x51\xf1\x82\x82\xe5\x1c\xcb\xb4\xf2\x2a\x3a\xee\x41\x83\xc5\x03\x89\xd9\xfe\x78\xff\x13\x30\x76\xa9\xd6\x81\x97\x1b\x49\xa8\x47\x5d\x83\xd9\x72\x48\xbf\x11\x51\xc1\x18\x3a\xa0\xcd\xc3\x72\xaa\x52\x09\x4f\x9e\x48\x75\xb8\xa2\xce\x27\xe7\xd6\x12\xfa\xc3\xdd\x84\xc4\x69\x67\x39\x0c\x5d\x70\x94\x66\x47\xe2\x65\x89\x12\x99\x67\xfe\x0e\x75\x3f\xcb\x60\xd6\xae\x7b\xd9\xa8\x9a\xf7\x90\x4b\x2e\xc0\xd9\xd3\x14\x1b\xfa\x65\x89\x62\xe0\x3c\xeb\x78\x6b\x06\x45\x98\xfe\x2b\x12\xd7\x40\x48\xa3\x34\xcd\x9f\x0d\x2b\x48\x90\x2c\x45\xf7\xc4\x70\x18\x54\xcc\x4f\xbf\x97\xeb\x25\xfb\xa0\x7a\x5e\x52\xde\x70\xf0\xa8\x54\xee\x0a\xf7\x2a\xbe\x7c\x35\xfa\xad\xad\xaa\x9e\xc6\x88\xdc\x17\xf9\xad\xc2\x13\x60\x6a\xd1\x4a\xae\x68\x9b\x93\x12\x2f\x95\xd9\x5d\x4b\x92\x3e\x84\x52\xde\x34\x75\x43\xb8\x99\x8a\xd7\xaa\xa8\x95\xfa\x4f\xbf\x0f\xbf\x77\xee\x64\xb8\x78\x04\x8c\xda\x6f\xf0\x6f\x28\xa7\x0c\xea\xd7\x12\x5a\xe0\xdc\x24\xa0\xc0\x48\x8b\x6c\x8c\xd6\x5f\x01\x6b\x55\xf7\x41\xef\x1f\xf7\x9d\x53\x5f\x6d\x15\xe1\x3f\xd5\x34\xfc\x89\xf8\xd9\xfb\xe4\xa3\x8f\xc9\x63\x3e\xee\x1a\xe3\xac\xda\xba\xfd\x09\x84\x21\xad\x8c\x8e\xa3\x62\x75\xda\x8d\x2f\x86\xe3\x1e\x59\x2e\x93\x94\x87\xb2\x78\x0b\xdd\xd4\x52\x5b\x60\x1a\xfa\x79\xcd\xc7\x59\xef\x2d\xd8\x45\x6e\x55\xc7\xb6\xb7\xb7\x5e\x76\xc9\xaf\x43\x03\x74\x7b\xbf\xe3\x16\x05\x33\xc9\x1f\x9e\xcc\xb5\x5d\x32\x0f\x57\x38\xd2\x8b\xdb\x48\x65\x8c\xd4\xd9\xf3\xed\x66\xcb\xaa\x7d\xeb\x1a\x63\xf3\xa4\xa9\xf9\x06\x9b\x4a\xc2\x9d\x95\xf9\xd5\xb3\x95\x1f\x2c\x70\x65\x11\x39\x17\x82\xc4\x1e\xd6\xc6\x7f\x82\x47\xbc\x3f\x96\x6b\xe2\x9b\x6d\xcb\xa7\xc6\xe8\x83\x4b\x5a\xfe\xc0\x88\xf1\x8d\xfb\x0f\xb7\x94\x24\x3a\x56\x01\x69\x90\x7e\x6f\x57\x73\x94\x9a\x64\xca\x72\xb3\x53\x9b\xdc\x37\x90\x55\xee\xd9\xd4\x36\xc8\x75\x25\x9b\xca\x6b\x60\xff\x21\x55\x41\x63\xb6\x83\x9c\x01\xa6\xc1\xe0\x3e\x0a\x63\x7b\x3d\xe5\x43\x53\x3a\x7d\x2e\x7c\x10\x26\x52\xdd\x29\xd0\x09\xef\xdd\x1c\xf1\x70\x01\xa5\xda\x4f\x0a\x83\xdd\x08\x1c\xc0\xb5\xac\x8a\x66\xe4\x6b\xec\xe3\xf7\x44\x11\x6c\x78\xec\x79\x05\x0c\x8f\x80\x77\x62\xb4\x8d\x57\x59\xbe\x80\x80\x90\xb2\xe5\x83\xd0\xcd\x08\x2a\x9c\x72\x6f\xb6\xa1\x89\x8d\x37\x11\x48\x46\x7f\x25\xfd\x0b\xe9\xf1\xcc\x29\x29\x3e\xd0\x4a\x16\x7e\xc1\xe2\x1c\xed\x5d\x02\x5c\x46\xf6\x64\x5c\xbe\xb0\x71\xcd\xbb\xff\xd9\xa0\x31\x86\x74\x99\x75\x08\xe6\xf1\x11\xac\xf9\xbc\x35\x7a\x78\x1e\xaa\x44\x6d\x91\x17\x9e\x40\x1e\x32\x47\xe4\xb7\x56\xa2\xbe\x6b\x47\x81\x0f\x19\xbc\xcf\xe5\x73\x51\x03\x54\xfe\x87\x3d\x8c\x1a\x1f\x0d\xeb\x1c\x97\xf5\x47\x78\x22\x70\xa6\xcf\x8c\x39\xb7\xe9\xc5\x02\x09\xf9\x3c\x05\x9f\xe1\xf2\xfe\x14\xb5\xd7\xd9\x54\xf8\xa6\xac\xed\x01\x1c\x2a\xa7\x6a\xda\xdf\xa6\xe4\xb9\x70\xf8\xd3\x86\x36\x04\xff\xdd\x4b\xee\x93\x35\x37\xfc\x77\x55\x68\xdb\x0b\xdb\xcb\xd3\x66\x09\x8a\xe6\x70\x70\x9f\x43\x7c\xab\x1c\x3b\xe1\x55\xe3\x0f\xcb\x33\x62\x5c\x97\xbe\x90\x9b\x89\xf8\xb9\xac\x15\x77\xb4\x21\x67\xd9\x03\x2f\x2e\xb6\xec\x63\xa2\x04\xd2\x75\x20\xc3\xb7\x6d\x19\x6c\x94\xa0\x9f\x16\xf3\xc9\x96\x00\x2e\x20\x02\x7b\xb0\x32\xff\xd9\xde\x90\xa3\x69\xbb\x83\x78\x9b\x76\x52\xb1\x15\xea\xed\x94\xd7\x63\x44\xfa\x10\x92\xfe\xc9\xc4\xfb\x96\x62\x0f\x39\x91\x44\x1c\x3d\xf3\x9e\x91\x25\xd9\x09\xc0\xb3\xb5\x86\x39\x7d\x6b\x6f\xad\xc5\x79\x5b\x09\x70\x04\x9f\xa0\xea\x4f\xb5\x3b\x49\x90\x35\x88\x79\x2c\xf0\xc3\x3c\x14\x4c\x02\xc9\x92\x52\xb8\xec\x00\x38\xcd\xb2\x0e\x8c\xe4\xd6\x23\x6c\x05\xeb\xff\x47\xd6\x9b\xa5\x3b\xcb\xf3\x4c\xa3\x73\xc9\xf1\x3f\x29\x03\x0e\xb0\x68\xcc\x0b\x38\x79\x92\xd1\x6f\x95\xaa\xe4\xdc\xdf\xb5\x8f\xf0\x4a\xb7\xd2\x80\x6d\x95\xaa\xf9\x2f\x6e\xb1\xaa\x9c\x8e\xff\x30\x14\x8e\xcd\xbf\x1c\x09\x61\x1a\xcc\x01\xa4\x44\x2d\x64\x1d\x84\xc9\xb0\xc1\xd9\x25\x30\x22\xd5\xdf\x77\xfa\x0a\x09\x0a\x42\x98\xed\xfa\x03\x83\x5e\xa5\x31\xa2\x12\xad\x5b\x45\x23\xee\x20\x8e\x75\xc3\x97\x2e\xf3\xbd\xa0\xe6\x73\x0a\x01\xb4\xe9\xa7\x60\xb2\x8e\xed\x49\xb7\xd0\x59\x69\xc1\x06\x69\x90\x5d\x0f\x31\xf2\xa3\xfb\x03\x77\x69\x95\x76\x68\xf1\xd3\x3e\x31\x23\x39\xdd\xbb\x5f\x0f\xb6\x3f\xf4\xcf\xfb\xa0\x6e\xc8\x0f\x9d\xc8\xaa\x88\x85\x21\x15\xd5\xa1\x7e\x0c\xd6\x21\x89\x39\xc0\x44\xe6\x7f\xb2\xa0\x52\x97\xb8\xf9\xf8\x7e\x70\x1b\x3e\xd7\x97\xf3\xd6\xa7\x68\x3e\x79\x73\xff\xf4\xae\xbb\x28\xa7\xc1\x16\x79\x2b\x34\xec\x05\x5f\x4a\xba\x0a\xfb\x6e\x9a\xbe\x82\xf3\x40\x86\xdd\x2b\xbd\x28\xef\x4e\x57\x17\x8b\xf2\x2b\xed\x30\xe6\x10\x4c\x3c\xb0\x26\xae\xa3\x2c\x27\xdd\x7a\xb8\x61\x59\x77\xf5\xb3\xc0\xde\xef\x38\xc8\x1b\xa7\x52\x0a\x77\xcb\x1e\xce\x63\xf9\x7e\x18\x31\x27\xb9\x5b\xe6\xb3\xa1\x77\xb3\xf2\x42\x89\x07\x9c\xac\xef\xa4\x6e\x2c\xa2\xfc\xf8\x72\x8a\x22\x42\x93\xc0\xb1\xd1\xb7\xac\xd5\xaf\x7a\xf7\x00\x62\x39\xde\x9b\xa9\x05\xbe\xc3\xd6\xdb\xc7\xf7\x29\x9f\x40\x18\xf9\x3c\xc4\x34\x75\x19\x0e\xf7\xdd\xf0\xa5\xd3\x80\xdc\x52\xd0\xde\x7d\xe0\xdb\x63\xa1\xd0\x63\xbc\x36\x3a\xdb\x18\x80\xfa\xa8\x30\x86\xa3\x91\x4d\x25\x1b\x2a\xff\xc7\xe4\x87\x5d\x6c\xc0\xa7\xfe\xef\xe7\xf0\x20\xb4\xf5\x91\x8b\x97\x8d\x08\x34\xcb\x9f\x47\x26\x54\x61\xd4\xd3\xb4\xe3\x53\x18\xfd\x30\x68\xee\xca\x49\x0b\x1e\x29\xa7\x3f\xa7\x64\x6d\xc8\xd3\xfa\x25\x76\x7c\x29\x62\x5e\xdb\x53\xe0\xca\x34\xcc\x40\xd4\x7c\x83\x9c\x69\x94\x81\x7d\x37\xb7\xd0\x11\x78\x79\x7e\x3a\x29\x6d\xc1\x72\x98\x83\xe6\x80\x82\x2d\xa8\x08\xe0\x38\x04\x16\x73\x16\x5b\x30\x44\x14\x3d\x7f\x3d\xb4\xb3\x0c\x84\x13\xdd\xb3\xda\x8f\xc3\x2a\x63\x9f\xc0\xab\xfb\x5e\xb2\xd7\x33\xcf\x61\xd1\xac\xfa\xe8\xcc\x0a\x94\xf8\x70\xe7\xee\x50\xb5\xcd\xbc\x2d\x2a\xf4\x7f\xd5\x5f\xcc\xb7\x92\x0f\xa7\x3c\x44\x3e\xf3\xe1\xbb\xc0\x07\x71\xec\x33\xb2\x21\x62\xc3\x9d\x43\x2a\x61\x85\xa2\x6d\xa8\xb8\xb5\x76\x5d\x89\x53\x10\x3e\xc1\x8e\xdd\xcb\x0f\xa5\x56\x38\x62\x52\xe3\x71\x4f\x12\xa6\xc3\x55\x6f\x16\xaf\x00\xf3\x6e\xf8\x9a\x39\x05\x61\xe7\x80\xd6\xcb\x1e\x0d\xe5\xfc\x72\x1f\x12\x3d\xc2\x50\x02\xa3\xc0\xc7\x00\xf7\x50\xff\x43\xd7\xc4\x87\x6f\xa1\x1b\x41\x35\x5e\xc3\x65\x2c\x0e\x81\xff\x08\x3f\xa2\x28\x70\x2f\x6c\x97\xc9\x1a\x69\x4c\xc2\x76\xb6\x4c\x2d\x4f\x96\xa3\xe2\x29\xbd\xd0\xe1\x2d\x3a\x3e\xcd\xb9\x0a\x3e\xb0\x2a\x80\xb7\x78\xe8\xbe\xff\xab\x7e\x88\x06\x2a\x36\xe8\xa5\x05\x35\xaa\xe6\x80\xf2\xa8\x92\x3d\xe2\x22\xa4\xb3\x59\x06\x6d\x47\x17\x5b\x71\x90\x5e\xe7\x06\x7f\xd7\x2f\x01\xee\xaa\x40\x27\xc5\x2c\x93\xb8\x48\xf6\x84\xbd\xb5\xa4\xbd\xba\x2b\xe9\xf9\xf6\x12\x8d\x47\xd7\xda\xab\x2b\x0c\x26\x1d\x2f\xb3\xb5\x10\x27\x5e\x35\xf1\xac\xb3\x58\x69\xee\xc1\x4b\x98\x59\xb6\x59\xab\x67\x61\x3e\x88\x46\xc7\xa6\x76\x0d\x49\xfd\xb3\xc8\x66\x74\x05\xbc\x42\xc0\x65\x61\x2c\xe6\xb2\x57\xe5\x3f\xe9\x22\x07\x11\x38\x16\x6d\xdb\x77\xf7\x12\x56\xda\x42\xc8\x1d\xcf\x32\xff\x92\x6b\x6c\xdc\x8c\x54\x96\xdc\x3c\x80\x8e\xd8\x90\x6f\x81\x45\xf3\xfa\x5c\xb4\x77\xa7\x93\x93\x93\x15\x14\xa7\xbc\xbb\xda\x81\xfa\xa5\x8d\xb6\x41\x6a\x07\xfe\xa1\x25\x44\x27\x21\x5b\x71\x98\x95\xa4\x2e\xbd\x6d\xc2\x47\xf6\x3a\x27\x9a\x57\x4e\xb3\x90\x6c\x86\xe4\x4f\x2d\x69\x71\xca\x32\x8f\xb6\x19\xe6\xdb\x76\xda\x7b\x78\x57\xda\xa6\x5b\xf3\x85\x47\x4a\xf9\x31\x94\x4b\xb2\xf2\x1c\x2b\xb1\x1b\x19\x56\xe7\x9d\x7e\x89\x67\x73\x9c\x2e\x59\x99\xb8\xe4\xad\xfa\x86\xdb\xdd\x6f\x39\x52\x14\xc4\x88\x22\x20\xa0\xe5\x9e\x3f\xc4\xf8\x4f\x00\x85\xad\x57\xdf\xe0\xa9\xd2\xfc\x3a\xad\xcd\xe4\x47\xbe\xd9\x29\x9f\xfa\xa6\x9f\x55\x0b\x10\xa2\x2e\xb8\x21\x3e\xa9\x14\x7e\xce\x25\xec\x53\x9f\x6a\x83\x3e\x49\x21\xdf\xc2\x28\xe1\x99\xe5\xa0\xff\x04\x37\xd4\x8f\x9d\x16\x36\xe4\x6b\xfb\x26\xb7\x8a\x0e\x01\x84\xd0\x07\xb0\xde\x26\x2f\x82\xe9\xd9\xa2\x44\x02\x7b\x8e\x90\xe5\x33\x44\xd1\x43\x95\x47\xf2\x10\x26\xec\x03\x88\xc1\xf4\x78\xcc\x6f\x65\x52\xb1\x56\x1d\xca\xe5\x33\x3b\xc7\x91\xff\x39\xcc\xa7\xbc\xb5\x87\xf9\x29\xab\x0b\xdb\x5a\x9c\x0b\xef\xc4\xbe\x5a\x7b\xec\x30\x0e\xda\x23\x0c\x2b\x83\x4f\xa4\x3d\xf1\xa6\x4c\x3a\xec\x98\x1d\xb3\x7f\x38\x17\x63\x8d\x38\x4b\xf6\xc3\x07\x5d\x88\xbd\x9d\xcf\x14\x2e\x60\xaf\xfc\xd1\x31\x18\x18\x95\x5e\xc6\x8e\x58\x6b\x9b\xda\xa6\x8f\x1e\xeb\xf5\x49\xaa\xa6\xbb\x39\x93\x6b\xf1\x12\x52\x5d\x95\x7f\x61\x9b\x62\xca\xef\x4b\xb0\x3a\x40\x58\xe6\xb6\x19\x00\xfd\x1e\xbc\x0d\x6e\x5a\x01\xb6\xfb\xd1\x49\x17\x1d\xb3\x2e\xfa\x19\xd6\x49\x3e\x9a\x6a\x38\x13\xf6\x48\x7e\xf9\x6d\x93\x3d\x28\x97\x9f\x1c\x05\x38\x36\xa6\xdc\xac\x7b\x07\xc3\xf7\xd5\xdc\xcf\x71\x93\xbc\x1d\x24\xdc\x01\x23\x9f\x15\x63\xee\x61\x1c\x1f\x29\xa9\x7a\x99\x70\x75\x9e\x41\x18\x2e\x91\x57\x20\xe0\xb6\x97\xca\x1a\x69\xbb\x0b\xd5\x39\x31\xf6\x53\xc2\xd4\xae\xbc\xb9\xff\xa5\xe7\x91\x00\xe5\xb2\xcb\x5c\x72\x8d\xc4\x4c\xce\xc2\xdd\xda\x78\xb1\xb8\x8e\xe4\x24\x29\x69\x2f\x93\x24\x7c\x40\x05\x63\x17\xf6\x66\x5d\x22\x5e\xd7\xb9\x0f\x39\xd1\xef\x73\x4f\x54\xf6\xa3\x0b\xa1\xa8\xaf\xb5\xab\x59\x36\x93\x11\xfd\xdf\xa5\x9f\x60\x2a\xd5\x4f\xa6\x31\x75\x0a\x57\x49\xd7\x38\x68\x6f\xcc\x9d\x9d\x4d\xb6\xb4\x03\x4e\xc1\x77\x48\x08\x3e\x91\x44\x14\x3e\xec\xfe\x3d\xd9\xb4\x3f\x3b\x71\x28\x59\xa1\xbe\xcb\x5a\x2a\x21\x91\x09\xa7\xc8\x17\x6d\x46\xdf\x26\xbb\x86\x1a\x83\x14\x06\xaf\x9f\x7a\xfb\xc2\xf2\x09\xcb\xc3\xf7\x67\xe4\xbf\x7e\xd7\xd3\x35\x5c\x95\x33\xfb\x7b\xf6\xec\x1a\x42\xd2\x90\x6d\x89\x3a\x6b\xcb\x38\x69\x1f\x13\x04\xb9\x82\xaf\xe7\xeb\xab\x63\xb3\x02\x07\x0d\x2e\x25\x0e\x7d\x0f\xf9\x4e\xff\x05\x27\xf7\x5f\x7e\xbe\xfd\x25\xe7\xef\x71\x94\x63\x85\x82\xc7\x1e\xdc\xac\x13\x8e\xb2\x3a\x59\xd3\x00\x33\xc7\x9c\xd9\x01\x8b\x91\x4f\xe9\x39\x1c\x3e\x61\xc1\x94\x77\x6f\xd1\xf2\xa2\xab\xc2\x38\xea\x4e\xb9\x5f\x75\xf6\xe9\x5d\x37\xed\xa8\x49\x2f\xb8\x4f\x8a\x60\x99\x0e\xea\x83\x00\x6e\x6f\x39\x0e\xdf\xf3\x11\x7e\x97\x6b\x23\xea\xd2\xcd\xc9\xc7\x68\x9b\x09\x77\xcf\x77\x10\x7e\xee\xcc\xa8\xcb\xdb\x49\xfd\x3d\x87\xae\x8d\x79\x00\x32\x5f\xb4\xf5\x7c\xff\xe8\xbb\x6f\x24\x94\x11\xde\xae\x8d\x5b\x51\xb5\x0b\x0f\xff\x4a\x7b\x9b\x1d\x8f\xe9\xa3\x76\x1d\xb6\xf4\x5b\x90\x7e\x5f\xaa\x35\xae\x3b\x24\xd5\xbe\x23\x6d\x00\xf6\x21\x78\xfa\x08\x77\x89\xeb\x20\x7e\x03\x76\x15\xb9\xc1\xfe\x15\x62\x1b\xaf\x6c\x1d\x1b\x32\xe7\xcb\x4e\x85\x21\x9e\xb5\x0a\x21\xbb\xb4\x88\x13\x69\xf7\xd2\xdd\xce\x84\x59\x06\x04\x6e\x40\xe5\x83\x71\x6f\xbd\x2b\xfb\x63\x6c\x9b\xff\x88\x41\x9d\x90\x6a\x1d\x9b\x70\xa7\x83\xb0\x18\x98\x3d\x2f\xcb\x36\xfd\x47\x0b\x45\x3d\xb1\xe1\xe4\x93\x52\xe0\x58\x57\x13\xa7\x45\x68\xee\x95\x37\x39\x0b\x6c\x9b\x82\x53\x15\xac\x71\xe5\xf8\xf7\x40\x08\xb8\x53\xba\xfa\x19\x4a\xfa\x5d\xd0\xbd\xed\x02\x03\xce\x0f\x7f\x85\xe6\x60\xa5\x3f\x1a\x9b\x25\xcf\xa1\x72\xcb\x59\x89\xab\x77\x18\x5a\x21\x71\x75\xe6\xe8\x24\x2f\xf1\x42\x42\x8f\x2c\x3c\xaf\x5f\x10\xdb\x95\x9a\x5c\xff\x92\x50\xfd\x94\xf7\xe6\x59\x23\xa8\x85\xec\x6a\xdf\xf9\x57\x0a\xa3\xdd\x83\x57\x78\x7e\xe9\xa2\x0b\x30\x33\x91\xef\xcc\xff\xc1\x6d\xd7\x47\x11\xe7\x9a\x77\xd6\x13\xfb\xed\xd9\xed\xbc\x2d\xbe\x44\x48\xe5\x34\x48\xcd\x11\x61\x63\xa5\x11\x9e\x59\xe1\xd6\x7c\x7c\x04\xbc\x1c\xa7\xac\xb3\xd0\x6e\x65\x0f\x00\xa3\x08\xb2\x63\x59\x62\x0b\x02\xbf\xe7\x83\x79\xe8\xf0\xd4\x4a\xea\x11\x7c\xc5\xa7\x19\x4e\x12\x79\x8e\xf4\x33\x3e\x98\x05\xbc\x1c\x49\xf5\xb6\x9d\x44\x6e\xac\x75\x76\x2a\xa8\xa8\x96\xf3\x27\x20\xe5\x8b\x26\x59\x36\xd5\xb7\x82\xc5\xdd\xd9\x3f\x3e\x6a\x6b\xef\x9e\xde\x60\x5c\x79\x7d\x20\x9a\xed\x56\x37\x06\xac\x96\xf3\x98\x58\x12\xb8\xca\x3a\x52\xbf\xb7\xf9\x93\x62\x63\xb9\xe1\x6c\xa6\x99\x80\x5b\xd2\x06\xce\x8f\xb0\x69\x92\x52\xe4\xf7\xb3\x29\x36\x0b\xd8\x7e\x4e\xf2\x02\xdd\x23\x78\xc6\xca\x8a\x39\xd2\x65\xd6\xbc\x36\x07\x51\xb7\xd9\x3c\x03\xfc\xcf\xf9\x15\x05\xc5\x1e\x49\x33\xf7\x1d\x22\xb8\xe6\x5c\x20\xa5\x07\xea\x01\xf9\x37\x42\xcc\xa9\xf2\x29\xa1\x56\x92\x52\xcf\x16\xab\x70\x6e\xde\x1a\x53\x27\x31\x5c\x7b\xfd\xec\xe4\x21\x7f\xd8\xa5\x74\xda\x0c\xcb\x87\x65\x11\x0b\x46\x47\xdb\xc3\x7b\x5d\x30\xaf\xc1\x8c\x06\xf8\x0a\xa3\x95\x87\xa3\xfd\x22\xc6\x8c\x51\x65\xe4\x81\x39\x38\x20\x46\xb3\x73\xb3\x7a\xd9\x23\x02\x8c\x6c\xd5\xbd\xf6\xe0\x72\x03\xdf\xc1\x3d\x58\x31\xb6\x3e\x3c\x89\x23\xc0\x64\x54\x7b\x3a\x30\x68\x95\x39\x8f\xbd\x98\x5a\x23\x56\xa9\xa4\x60\x53\x0f\x36\x73\xe5\xca\xd7\xb0\x9d\xc2\x4e\xb9\x5e\xc2\x1e\xa0\xe7\x48\xba\xea\xe5\xe3\xaf\xb3\x9c\x64\x23\x2d\xa7\x0e\x3a\xed\x97\x52\x9d\x6b\x66\x45\x0c\x69\x20\x70\x4c\x50\xd9\xb2\x05\x3b\x3b\x3e\xeb\x92\xc9\xf9\x59\x52\x44\x20\x2c\x11\x3d\xb9\x24\x01\x97\x7f\xde\x93\x54\xcc\x4e\xab\xbd\x6d\xb5\x67\x51\x62\xe5\xc9\x4b\xbf\xec\xc4\x96\x8e\x1d\xd4\x69\xb8\x44\xba\xde\x20\x12\xe6\xf0\xb9\xa7\x51\x41\x3c\x3b\xf9\xd3\xb6\xff\x61\xf9\x52\x82\xc6\x34\x51\x84\x3c\x15\xba\xed\x4f\x0e\x76\xad\x9a\x11\x11\x03\x91\x3b\x6d\x44\xa6\xbc\x89\x5a\xb3\x86\x13\xd8\x8e\x85\xa9\x39\xd6\xb7\xde\x41\x56\x10\xe4\x94\xec\x6e\x19\x9a\x56\x76\x15\x2e\x9c\x64\xa1\xf9\x8b\x34\x1f\x09\xef\x30\x83\x5f\xc1\xef\x8e\x44\xb0\x09\xb6\xb6\x61\xa6\xea\xcb\xf5\x58\xef\xa9\x45\xe2\xd7\xb5\x3b\x83\x70\x32\x0a\xdc\xa0\x2b\x2a\x4d\xc0\x4e\x36\x5d\x50\xcb\xda\x2e\xc1\xb1\x82\x30\x39\x1c\xcf\xc8\x06\x64\xcd\x88\xbe\x44\x94\xb5\x63\x91\xb3\xcd\x38\xdb\xd9\x91\x1e\xcd\xc5\x41\xf1\xfa\xab\xba\x17\x88\x34\xf1\xb5\x90\x85\xd8\x83\xb2\x41\xde\x09\x22\xa0\x3a\x16\xf4\x7a\xf8\x75\x23\x54\x1e\x55\x75\x78\x40\xe0\xf1\xb6\x80\xad\x05\x4a\xe4\xff\x4f\x0d\xed\xe7\x3f\xe2\x01\x28\x06\xc3\x3e\x35\xd4\x36\xcf\xf6\x51\x9f\xaa\x80\x9f\xc0\xe5\x6f\x95\x5b\xf6\xe6\xed\xe4\x73\xe8\x22\x8b\x00\x9f\x2f\x6f\x8f\xd0\x8b\xd2\xdd\xe7\xa2\xc0\x52\x25\x96\xae\x9b\x60\xe4\x50\x9d\x00\xed\x95\x4b\xf5\x95\xc3\x7e\x61\xd5\x0e\x43\x79\xb3\xcb\x10\x05\xd8\x3f\x61\xb3\xc3\xec\xde\x34\x3e\x7a\x06\xaa\x0d\x75\xb0\xce\x0f\x7b\xe7\x3c\xed\x59\x42\xb1\x34\x12\x29\x12\xfd\x85\x93\x09\xad\x18\xee\xac\xb9\xfa\x6c\x0b\x25\xef\x6e\x4e\x07\x03\x08\xfb\x72\x75\x85\x16\x81\x44\xcd\x21\xf1\x7d\x25\x2d\x1a\x43\x3a\x43\x79\xb8\x6a\x13\xe5\x55\x18\x2b\x2d\xda\xf1\x80\x23\xef\x75\x54\xb5\x0b\xce\x5f\xa5\xa7\xfb\x8e\x8b\xd6\x55\xbc\x9c\x11\x38\xec\x49\xe4\xe1\x25\x51\x69\x3a\x0d\xf3\x4b\x16\x5c\x9b\xf3\xf1\xfb\xf5\x13\x9d\x06\xc5\x9a\xf5\x39\xde\x88\x0b\x13\x67\xc9\x0f\xe9\xd7\xe5\x48\x0d\x49\x31\xb6\x43\x8d\xbc\xe2\xf0\xc2\xb1\x65\xea\x90\xea\x70\x15\x38\xd5\xd5\xf9\x3e\xc5\x23\x1a\x68\x0f\xdb\xad\x9c\x3d\x3b\x99\x61\x80\x73\x1e\xc5\x92\xf7\x8f\x83\x77\x5e\x14\x62\x54\x4a\x10\xc4\xc5\x90\xb5\x12\xea\xc7\x3f\x3f\xa1\x21\xe6\xbd\x60\xd9\x6c\x1a\x0d\x72\xb2\x88\x67\xda\xb2\xdf\xc8\xeb\x9f\xa8\xac\x7e\x2e\x0b\x1d\xdc\x25\xfd\xe8\x62\xfb\x87\x97\x59\x55\xfe\x5b\x5e\x71\x45\xf5\x35\x7b\x9d\xcd\xf2\x2b\xaf\xca\xe8\xec\x24\xc1\x75\xa3\x33\x02\x1e\x36\x6c\xbc\xf6\x95\x65\x2f\xce\x88\xb2\x89\x05\xcf\xb5\xb2\x93\x6f\x73\x87\x28\x33\xd6\x67\x39\x28\x58\x9e\x24\x61\x05\xdc\x35\x45\xe1\x76\xb2\x9b\x22\x6f\x8f\x54\xff\x23\x0a\x02\x9d\x20\x7f\xbc\x24\xde\x51\xa1\xc1\x76\x02\xda\x4a\xe6\xb8\x4d\x3d\x07\x67\x75\x80\x28\x44\x31\x51\x92\xd1\x13\xcd\xe5\x2f\xfe\x54\x7e\x1d\x29\x84\x9e\xa9\x1b\xd0\x5f\xc5\xf3\xbe\xa2\x95\x7c\x23\x38\xf8\xeb\xb8\xde\xdb\x47\x7f\x73\x9f\xd9\xd5\x58\xfc\x27\xf8\x04\xf3\xec\x0d\xc9\xae\xbc\x32\x6c\x57\xd8\xdc\xea\xdf\x10\xf6\x68\x20\x03\x0e\xab\xce\xc2\x8a\x23\xd4\xdc\x1e\xba\x24\xee\xbc\x0d\xc3\xec\x36\x47\xe0\x21\x7e\x52\xc5\x9a\xbe\x73\x0b\xec\x79\x4b\xd2\x6d\xe5\x59\x78\xac\x59\xd9\x66\x67\x17\x26\xf8\xd7\x29\xdf\xd6\x17\x34\x01\x5e\x8a\x15\xbd\x15\x08\x44\x75\x15\xbd\x50\x5a\x53\xdb\xec\x74\xfc\x9e\x5c\xab\xd7\x1c\x62\x2b\xe4\x3f\x4a\x3a\xb9\xc7\x6e\x19\x32\xca\x7f\x54\x94\x36\x78\xb1\xf7\xe7\x7f\xa0\xd3\xac\x3f\x92\x15\xf7\x9f\x9f\xe9\x6e\x2f\x74\xaa\x5e\xbc\xa2\xec\x3e\xf2\x97\x70\x3d\x71\x20\xac\xd3\x26\x26\xed\x83\xef\xf2\xde\x9b\xe2\x52\x71\x55\x50\x26\xdb\x47\xf1\xd3\xcf\x4b\x3a\x9e\xf0\xb0\x71\x63\x94\x54\x8a\x4e\x1f\x4a\x3a\x36\x67\x4e\x7a\xe6\xbe\x4b\x47\xa6\xf6\xf5\x9e\xa5\xf6\xb9\x60\x39\xc4\x5a\xfc\x12\x13\xc0\x8a\x38\x15\x0d\xb7\x3a\xa4\x97\x67\x63\xb3\xb7\xd2\x74\xe9\xd0\xe8\x04\x19\x6a\xde\xa3\x93\x76\x15\xc2\x64\xb6\xb1\x66\x12\x14\x24\x68\x0f\xef\xab\xb0\xe8\x00\x13\xd8\x9f\x6e\x5b\x83\x44\x62\xd2\x14\x14\xa1\x6b\x2a\x6f\x15\x99\x53\xd6\xa3\x72\x64\x75\x5c\xf4\xaa\x45\x80\x94\x5c\x81\x5b\x7c\xab\x73\xf5\xef\x89\x10\x39\x43\xb2\x95\x91\x1d\xb9\x44\x50\x81\xea\x8a\xbb\x40\x45\xa7\x7b\x6f\x0d\x99\x02\xa9\xfb\x52\x13\x83\x07\x35\x37\x87\x5f\xee\x34\xaf\x14\xce\x21\xa8\x8b\x54\xff\x28\x89\xdb\x4a\x22\xef\x82\x7c\xe2\x72\x82\x0b\xb0\xf7\x4c\xb8\xb3\x38\x29\x27\xd1\x52\x8e\x37\xef\xbb\x5d\x26\x72\xcb\xe3\x6d\x97\x0d\xf0\x47\xa5\x4e\x25\x6a\x88\x85\x58\xb5\x4f\x13\xd7\xda\xf8\xdf\x3f\x3a\x9d\xcb\x1e\x4f\xcf\xda\xe7\x79\x73\xc7\x65\xd7\x00\xbd\x90\xce\x94\x88\xd4\x82\x2f\x85\x54\x2b\xaf\x64\xd0\x31\xf2\x8a\xa8\xfe\x32\x51\x0f\x04\xdd\xed\x7d\x52\x53\x26\x5c\x0d\x50\x2a\x85\xbb\xdf\xd1\x64\x53\x47\xa1\x1c\xb5\xa1\x58\x68\xdc\xab\xec\x82\x87\xda\xaf\x19\x76\xcc\x0a\x77\x3e\xb0\xec\xd2\xce\xc5\xd5\x08\xfe\xd8\xbc\xa7\xe7\x2a\xcf\x91\x95\xfe\x91\x18\xc8\x8e\x24\x49\x92\x6c\x03\xee\x91\x61\x3c\xc7\xc2\xcb\x63\xb9\xc3\x61\xa4\x5c\x61\x5e\x54\xce\x6f\x46\x03\x9d\x1b\xf4\x66\x74\xec\x6f\x00\x5e\x81\xf4\x9d\xf3\xb0\x5e\xee\x36\x19\xd7\xa9\xbc\xce\x33\x42\xa1\x11\x9c\xb5\x84\x8e\x7f\xe7\x7c\xbf\xcf\x5c\x59\xf7\x39\x98\x57\x7b\xae\x08\x45\xf2\x91\xae\xda\x3d\x89\x94\xb6\x7d\xe8\x75\xb8\x55\x5a\x32\x6d\xee\xec\x7e\x8a\x8d\x45\x28\x0e\x80\x25\x82\x24\x63\x38\xf2\xd6\xf9\xab\x6d\x2d\xba\x45\xfe\x5a\xce\xab\x61\x57\x06\xe7\xaa\x6e\x53\xd1\xb7\xc6\x67\xdf\xe6\x85\xf3\xdd\x06\x63\x57\x0e\x82\xd5\x89\xad\x3c\x83\x11\xb6\x1c\x6d\xa1\x3c\x8f\xea\x2b\x85\xca\xf5\x4c\x11\xd5\xd5\x2e\x75\xd4\x7a\xbd\x34\x75\x88\x2d\xcf\x11\x3b\xea\xdb\x3d\x0e\x87\xa2\xb4\x09\x10\x12\xe2\xb5\xac\x50\x60\x00\xf8\x96\xbe\x73\x3a\x59\xd8\x55\x49\x2a\x22\xe8\x1c\xff\xe4\x8a\xaa\x30\xd8\x60\x67\x78\xfe\x63\x19\x66\x07\x89\xe0\x14\x0a\x43\x3d\x66\xdc\x9b\x6a\x38\xfd\xfb\x0e\x13\x57\xa9\xb5\xf2\xdb\x5f\xab\x17\x51\x04\x62\xd7\xe2\xfd\x45\xaf\xb5\xca\x2e\x7a\xd8\x25\x7d\x33\xc2\x48\x22\x62\x62\xa7\x79\x0a\x1c\x84\x59\xc4\x01\xb2\xfc\xaa\xf3\x74\x93\xb7\xb8\xfe\x43\x21\xc0\x52\xdf\x52\x25\xdc\x75\x40\xc3\x9d\x7e\x04\x6e\x91\x73\x4a\x72\x31\x46\x93\x73\x11\x22\xee\x16\xcd\xf2\x6a\x5b\x38\xbd\x59\xc9\x76\xa8\xfd\xe4\x00\xdd\x32\xab\x6c\x3b\xa5\x9b\x84\x64\x36\x85\xaf\x8a\x7d\xe5\x4e\x7f\x41\x33\x49\x35\x1c\x55\xb3\x7f\xf9\x08\x1a\xd8\x19\xdd\x9f\x58\x5e\x66\x49\x47\x27\xf7\xd5\x98\x00\x40\xb2\xb9\x84\xdc\x1b\x3a\xa8\xd8\xef\x37\xf2\x27\x06\x41\x4c\x75\xe6\x74\xd2\xd2\x03\xab\x8f\x6a\xac\x22\x68\x7b\x0a\xb3\x21\x54\x70\xa2\x86\x9d\x2c\x35\x3c\x75\x95\xcd\x2a\xb1\xa9\xa6\xac\x84\xe9\xf0\xd0\x63\x01\x96\x23\xc2\x12\xa5\xdd\x5b\x4a\xdb\x88\x29\x4b\x57\x96\x14\x69\x92\x4e\x6d\xfa\xa7\xaa\xb6\xba\xfe\x29\x61\xcc\x64\x93\x0a\x1b\x4f\x75\x8e\x7a\x6c\x97\x6e\xa2\xee\x3f\x86\x18\x89\x53\xe3\x29\x9b\x95\x33\x54\x24\xce\x0b\x63\xd1\x96\x44\xc2\x1c\x11\xdb\x4c\x4b\xbe\x12\xd0\xd1\x58\x5a\x3f\x2c\x5c\x78\x6d\xe4\x1b\x33\x18\xbb\xc8\xa1\xa5\x74\xf2\x72\xd9\xd5\x9f\x1b\xb1\x33\xba\x38\x18\x64\xe8\x12\x2a\x60\xd8\x49\x91\x47\x96\xc3\xb7\x1b\x96\xd2\xd1\x0c\x0b\xe1\xc0\xa8\xb2\xd6\x4a\xb8\xaa\x7f\x88\x5d\x5c\xb4\xd5\xd6\xc1\xce\x00\x6a\x34\xa2\x2a\x7c\x42\xdc\x13\x66\xd1\xef\x55\xb5\x9d\x26\x27\x9c\x55\x51\xd8\x31\x34\x55\x36\xd0\xe9\x11\x75\x5d\x69\x23\xda\xc1\xec\x73\xbc\x70\x56\x3b\xc1\x06\xb2\xf9\xa3\xff\xd3\x33\x35\x27\xe9\x14\x7a\x97\x27\x5d\xff\x9f\x20\x3c\x68\x34\x9f\x22\x98\x3f\x94\x48\xaf\xec\xb4\x5f\x7c\xf4\xf0\x81\x03\x09\x06\xa8\x8e\xee\x34\xb0\x0c\xdc\xf7\x28\x0d\x97\x30\xa0\x66\xbb\xad\x46\xec\xcf\x50\x3b\xbf\xfe\x87\xf2\x9f\x02\xd5\xf6\x4d\xa8\xe9\x50\x86\x51\xbd\xb6\x4b\x22\xdb\xa1\x39\xa5\x0f\x33\xf0\x2b\x9a\x0d\xda\x4f\xc4\x5c\x8f\xf9\x1f\xdb\xbf\xd9\xf1\x48\xba\xce\xfc\xd4\x1c\x43\xf6\xde\x32\x4a\xc9\x8b\x24\x36\xb7\xd6\xf0\x8c\x61\x52\xd7\x52\xb4\xfa\x32\x25\xc9\x43\x7a\x29\xe3\x63\x1e\xd5\xa8\x1b\x86\xc4\xbd\x5c\xff\x45\xc4\x85\x90\x26\x77\x0f\xa5\x9a\xb9\x76\x1d\x1b\xdd\x36\x5b\x47\x76\x01\x38\x50\x2c\x5e\x91\x58\xcb\xc9\x13\x52\xe8\x39\xe2\x6a\x3f\x3e\x55\xf9\xf3\xdd\xc3\x1a\x4b\x34\x81\xc8\x1e\x34\x5c\xca\x9f\xa5\x5d\x45\x99\x52\x98\xa0\xd4\xc3\x3f\xeb\x0e\x62\xd9\x27\x78\x65\x25\x22\xd7\x6c\x03\x3a\xb2\x01\x98\x90\xa3\x1d\xcd\xb5\x30\xd7\x44\xb4\xa3\xba\x7e\x14\x42\x76\x1f\x76\x9b\x90\x21\xe0\xb5\x9d\x0b\x02\x23\x09\x7f\xc9\xea\xc2\x11\x50\xe9\x20\x1a\x18\xa3\x1b\x37\x45\xe0\x9a\x9c\x4b\x70\x1c\xa5\x68\xf3\x94\x5c\x49\x4d\xd0\xae\xcb\x54\x3b\x75\x44\xa3\x7d\x54\xd2\x10\xa6\x9c\x98\x30\xb4\xa0\x78\x1d\xc9\xc7\x96\xba\xec\x9a\x48\xba\x9f\xd8\xd2\x87\x5b\x84\xbf\x5d\x49\x16\x87\xa5\x15\x95\x9b\x5b\xd3\x44\x85\xa9\x94\xdc\x35\xd2\xf1\xd7\xcc\x32\xad\x9b\xc5\xac\xb4\xc1\xd2\x0a\xc3\x1b\xef\x42\x32\x97\xa8\x23\xa3\x8c\xf4\x44\x68\x1f\xa6\x08\xd8\x17\x75\xcf\x21\xde\x87\x68\x70\xe4\xc8\x85\x88\x65\x9f\xe4\xb3\x6e\x63\xba\xee\x59\x25\xb9\xbe\x33\xad\xb5\xe7\x2b\xda\x7e\x3c\x09\xbb\xc4\xd6\x17\x08\x6c\xe4\x47\x41\x09\x48\xfe\x5d\x6a\x25\x2a\xba\x68\x4a\x90\xab\x5f\x54\xe2\x0f\x6f\x11\x7a\xe9\x78\x51\x06\x97\x04\x83\x00\xd5\xb4\xb9\x15\xa3\x41\x55\x26\xe6\x21\x96\x94\x65\x6d\x92\xc0\xaf\x1c\x2a\xbe\xe9\x5b\x69\xf7\xf1\x4d\xee\xf3\x86\x8f\x81\x28\x15\x99\xe2\x80\x9a\xc1\x34\x3a\x4d\x4e\xb8\x1c\x78\xa9\xbd\x5d\xca\xec\x2a\x95\xb9\xb9\xe9\x58\x79\xa9\x62\x90\xc2\x3c\xd6\x8f\x2e\xdc\x66\x51\x29\x7e\x1e\x06\xe2\x79\xa0\xfb\xa7\x44\xdf\x51\x48\xc2\xab\xd0\xde\xe4\x05\xa3\x24\xaa\x68\x5e\x2d\x22\xec\x25\x0e\x93\x12\x56\xd4\x07\x74\x97\x36\x4a\x54\xaa\xb6\x7f\x6e\xdd\xc8\xde\x5d\x5d\x59\x0b\xd8\x28\x64\x2b\x5a\x43\xd1\x7b\xed\x56\x6a\x5c\xea\x8f\xaf\x87\xc8\x44\xef\x99\xde\xf6\x31\xc8\x6e\xbc\xe7\x9f\x88\x7b\x24\x76\x78\x43\xc1\x38\x71\x90\x25\x7d\xc6\xee\x9c\x3e\xe0\xf9\xed\xba\x18\xc4\xff\x8b\x9b\x47\x8f\x9f\x14\xf6\xd4\x9f\x57\xac\x39\xd0\xc6\xe0\xab\x21\x3b\x0f\x5b\x88\x08\xb3\x43\xe7\xbb\xf9\x3f\x2a\xdd\x0f\xdf\x61\x44\x0b\x9f\x29\x40\x13\xe8\xc0\x1b\x03\x6f\xde\xf5\xed\xca\xdc\x3b\x58\x71\xc7\x1c\x12\x1a\x4f\x98\xf5\x42\x6a\x07\xac\xe5\xcf\x73\x58\x88\x8d\xb8\x3d\xca\x3e\x0f\xb4\xa6\xe2\x7b\x2b\x2c\xd0\x82\x14\x74\xcd\x6a\x1d\xca\x37\x14\x1a\x99\x25\x64\xe0\x4d\x4d\xde\x4c\xc7\x00\x27\xdd\x6a\xc8\x89\x75\x07\x6b\x3f\x8a\xd8\x6d\x83\x91\xd4\x40\x3b\x25\xe9\x07\x29\x7a\x0b\xed\x0c\x04\x8c\x91\x2d\x33\x51\x94\x60\x95\x64\x11\x1b\xcf\x2b\x49\xe9\xc2\x63\xe0\x67\xd0\x09\x29\xbe\x1f\x37\xf1\x07\xcf\x3a\x05\x55\xaf\x7c\x7d\x9b\x73\xa2\x74\x89\x66\x87\xad\xe9\x82\xd9\x4f\x45\x7f\xda\x91\x5f\xe5\x59\xd8\xfe\x08\x19\xee\x39\x2b\xc2\x19\x9d\x34\xaf\xf3\x26\x91\xfa\x00\xad\x88\x74\x97\xc3\x88\xea\x74\xcb\x3b\x1f\xd9\x2c\xee\xef\x8d\xee\x1c\x27\x98\xe9\xbe\x53\xb0\xe2\xe4\x9a\x5b\xa5\x7a\xb8\xed\x96\x55\x84\x7d\xa1\x98\xc8\x0b\x42\x0d\x28\x62\x3d\xce\x30\x87\x3d\x4e\x32\xd3\x8e\x93\xb1\x80\x07\x64\x8a\xfe\x2a\x2b\x63\x1c\x8f\x95\x6c\xbd\x03\x84\xdf\x3b\x7b\x25\x39\xef\xa4\xfe\x80\xc8\x35\x2b\xae\x0f\x8a\x43\x7f\x1e\xec\x8c\x5e\x5c\x60\xe1\x4e\xe8\x87\xe9\x0c\xd7\x4a\x0a\x00\xdd\xa3\xdc\x8f\xf4\x57\xb7\xc2\xb0\x6f\xbe\x8c\x47\x52\xef\x2e\x81\x8d\x47\xfb\xa3\xf0\x20\xd0\x0e\xc0\xdb\x17\x82\x22\xe1\xdd\xc9\xca\xb0\xea\x78\x02\x71\xc6\x60\x40\x72\x0f\x19\x1a\x85\xae\x35\x0f\x54\x84\x57\xa2\x6c\x87\x4e\xa2\x3b\x9c\xd1\x9a\xe8\xfd\x24\xd5\xf0\x9e\xa2\x1b\xb0\x55\xc2\x04\x5b\xf9\xb2\xd7\x66\x6b\x1e\x45\xec\xe5\xfe\x2c\xad\x24\x64\xad\x67\x85\x60\x93\xe7\x38\x2e\xf5\x88\x3e\xdf\xc3\x85\x3a\x6a\xa2\x29\xcd\x7f\x9b\x7f\x11\x9f\x1b\x3a\x11\xd2\xf8\xb8\x2b\x2f\x1b\x77\xee\x81\x19\x3c\x42\x39\x98\xcf\xc3\xcf\xff\x28\xed\xd1\xf4\x53\x48\x0e\x62\xc2\xfd\x2e\xab\x02\x97\xf0\x3c\xd2\x44\x80\x1a\x8f\x25\x59\xfd\x97\x0e\x38\xac\xe1\x7e\xf9\x95\xd5\xa5\xf6\x4f\x36\xb0\x39\x85\x7e\x3d\xb6\x2d\x22\x44\xec\x9e\x47\xac\x54\x93\x4e\x11\x1b\xcc\x5f\x3d\x63\xda\x94\xdd\x93\x5a\x98\xfe\x06\x23\x75\x14\x2a\x48\x7e\x9b\xb2\x6f\xaf\xe0\x7a\xa4\xee\x1b\xfc\x20\x58\x39\xd9\xce\x83\x9d\x3b\xa4\x37\xb7\x56\x1f\xfa\x8c\xec\xd8\x65\x7d\x6f\xab\x2c\xd6\x91\xbb\xa3\x1b\xdc\x65\xd5\xbb\x62\xf4\x5c\x70\x19\xbb\x1f\xc3\xb8\xf2\xa4\x2f\xff\x52\xc2\x9f\x88\xe0\xdf\x52\xa6\x75\x93\x90\x7b\x29\x21\xb3\x5b\x5a\x7c\x84\x6d\x2c\x9c\x06\x6c\xcb\x89\x5e\x79\x0d\xfa\x86\x8b\xda\x59\xe1\xc9\xcb\x68\x26\xef\x74\x99\x57\x87\x6f\x96\x38\xe5\x96\xdc\x08\xba\x0b\x1d\x57\x96\xe4\xcd\x9c\xbf\x22\x5f\x90\x3f\x44\x7a\xfb\xc8\x6a\x1d\xab\x39\x7c\x37\x35\x2b\x67\x61\xaa\x57\x78\x22\x39\x63\x52\xba\x9b\x4a\xfb\xdb\x49\xbd\x7a\xdb\x3f\x50\xa0\x8e\x7d\xc4\x02\xa9\x86\xdf\x6b\xd5\x01\xef\x1d\xb5\xfc\x4e\xd8\xd6\xf9\x4d\xde\x77\x7b\x38\xe7\xf0\x9a\xa3\x84\x13\xbd\x64\x82\x21\x25\xc5\xf4\xf6\x6b\xab\x86\x83\xda\x9f\x0f\x03\x4d\x4e\xd0\x93\x87\x8d\xad\xf1\xd8\xd4\xfa\x6d\xdf\x36\xc8\xab\xc4\x32\x53\xc4\xd6\x21\xda\x90\x37\xbc\x17\x1e\xaf\xd6\xc4\x7b\xeb\x43\x87\x11\x88\x47\xb5\x84\xe9\x67\xcc\x03\x53\x1a\x76\x2f\xa6\xbd\x38\xa7\x7e\x88\x09\x0a\xae\xa4\xf7\x02\xad\x34\x57\x9e\xb1\x28\x4f\x6b\x8c\x5c\x43\x20\xc5\x7c\xd4\xa4\xec\x1f\x30\x06\x58\xe1\xb5\x16\xc2\xc8\x84\xfe\x31\x5c\x38\xe3\x53\x7a\x03\x78\xd7\xda\x35\xc2\x44\x42\x75\xdd\x18\x64\xc7\x73\x54\x3f\xf5\x59\xd1\x76\xf4\x13\xe7\x59\xff\xe6\x37\x2d\x01\x30\x64\x60\xcf\x29\x06\xf6\x53\x11\x60\x30\x61\x97\x2d\x54\x51\xa1\xf9\x8c\x55\xf3\x09\x97\xe8\x55\x05\x5e\xa7\x9b\xec\x1d\x11\xe1\x7a\x12\x9f\x91\xcd\x07\x95\x40\x33\x94\xbb\xcc\x47\xd4\x2e\x11\x11\x82\xde\xb6\xbb\xa6\x70\x76\xcf\xe4\x91\xc2\x46\x98\x7c\xc9\x33\xcc\x26\xf0\x5f\x1e\x0c\xc9\xf6\xe7\x12\x69\xfb\x25\x0b\x01\x7f\x89\x27\x4d\xfa\xce\x50\x3f\xc6\xf3\xad\xe4\x03\x54\xc9\x36\x5b\x5d\xdd\x80\x71\xa8\x30\x04\xc5\xe0\x94\x51\xd6\x50\xae\x50\x15\xf5\xa4\x4d\xbe\x66\xd5\x74\x98\xa8\x28\x20\x3a\x9b\x96\x1c\x56\xa1\xbc\x8d\x08\x14\x8b\x35\x65\xb6\x0f\xe1\x75\xeb\x46\xef\x7e\xf4\xce\x21\x96\x01\xfe\xc5\x64\xa5\xc1\x76\x5a\x61\x4a\xda\xb1\x14\xed\x3f\x92\xb2\xf6\x08\x81\xf2\x0a\xa8\x5e\x08\xa3\x67\x6b\xef\xe2\xc7\xe8\xcf\xc2\x0d\x94\x9b\x5f\x15\xde\x86\x3d\xdf\x2e\x72\x8f\xdb\x5d\x91\xf8\x58\x4e\x01\x43\xb6\x0e\x4f\x7a\xd2\x1e\x06\x57\x5e\xf7\xd2\x1f\x6b\x94\x56\xb7\x0f\x9c\x05\x46\x55\xe8\x5a\x4a\x3b\x84\x25\xe2\x8f\xd6\xa5\x90\xc2\xcb\xf7\xbb\x47\x2d\xa1\x4a\x0e\x64\xac\x4b\x71\x44\x87\x2a\x50\x6f\x1f\xb1\xbc\x5b\x95\xa0\xd5\x7d\x42\x2d\x61\x67\xac\xfc\x01\x6c\x1b\xaa\x3b\xc1\x9e\x8f\x9e\xde\xbb\xb9\x07\x74\x51\x76\xc5\x2d\xf3\x10\x65\xd9\x1c\xe9\x99\x4e\x9f\x94\x5a\x29\x69\x8d\xeb\xc4\x47\xea\x3c\x16\x38\x42\x8e\x9a\x43\x29\xfe\x83\x1f\xb9\xbd\xb6\x6d\x54\x25\x68\x63\x23\x61\x2f\x60\x81\xee\x4f\x42\xdd\x5d\xd1\x4d\xcd\x10\xbf\x34\xa8\xbf\x13\xd6\x8d\x3e\x20\x4a\xf4\xc9\x99\x02\x9d\x32\x0a\xed\xa8\x2a\xee\x96\x5e\xbc\x9b\x2f\x17\xb4\xb2\x8f\x29\x9f\x8f\x6e\x8e\x78\x4b\xd8\x49\x0d\x51\xd9\x31\x6b\x12\x33\x6e\xf4\x08\xcf\x06\xf7\x74\xed\x43\xa3\x83\xc8\xed\x3f\xda\x83\x11\x94\x94\x68\xc4\x6c\x83\x41\x9e\x5a\xdc\x48\x79\x64\x92\xcf\x6c\xc9\xea\x25\xac\x02\xe8\x8e\x7b\x0d\x56\xb7\xb8\xd6\x5d\x44\x42\xfa\x26\xe5\x3e\x76\x59\xbd\x79\x3e\xa4\x73\x13\xa2\x0f\x1f\xc2\x9b\x47\x5d\x86\xce\xd6\xf7\xbb\x76\x95\x77\xee\x94\xc1\x53\x33\x79\xd4\x9d\x0f\x9c\x35\x6a\x97\x73\xd4\x9c\x29\x9e\x3f\x52\xdf\xf1\x9d\x73\xc4\xc5\xda\xec\x9d\x49\x68\xfd\xa6\xe5\xfc\x86\xee\xe1\x53\x27\x3f\x25\x3f\x9e\xfe\x8b\x01\x5a\x1d\xb4\x36\x28\x9b\x2e\x81\xf7\x1c\xa9\xf5\xef\x86\xcf\xd0\xf8\x20\x47\xd1\x07\xe8\xd8\x75\x55\xb6\x34\x3c\x68\xd4\xcf\xce\x64\xb2\x2f\x7f\x27\x8c\xfd\xe2\x16\xee\x55\xb0\xf1\x60\x0c\xdd\xab\xd8\x05\xee\x9f\xf0\x35\xbf\x44\x0d\xb0\x9f\xe8\x96\xfa\x6a\x75\xe2\x1d\xef\x0e\x83\x55\x16\x83\xbb\x1a\x7b\xbb\x15\x8d\x65\x8f\x66\xa0\xfd\x91\xf7\x30\xe8\x5a\x73\x7d\xc5\x38\x55\xf2\x91\xec\x6b\x12\x09\xbb\x12\x55\xf5\x1a\x51\xdc\xad\x5b\x17\x0e\x0c\xf8\x07\x5d\x6b\x77\x04\x8c\xdf\xe5\x60\x61\xb8\xf5\xec\x9f\xdc\x65\x6b\x7b\x51\x1b\xa7\x6b\x96\x75\xc2\xaa\xce\xc4\x1d\x8e\xbe\x56\x42\x36\x13\xc3\x7b\x0e\x61\xcc\x2d\x52\xd7\x1d\xc9\x0c\x48\x5e\x6a\x56\x0c\x36\x5f\x55\x5a\x15\xb8\xdb\x03\x37\x3d\x17\xac\x82\xd8\x2a\x0c\x17\xda\x0b\xf1\xa1\x57\x0c\xfc\xc8\x8d\x8e\xc7\xa0\xab\xd0\xb1\x79\x65\xfb\xc7\x2e\x4c\xd8\x20\x54\x7d\x8a\x09\xbd\x7f\x54\x17\x1b\x4f\xaa\x14\x77\x31\x40\xa3\x71\x77\xc4\x72\x65\x23\xb2\xab\xed\x0a\x0a\x2f\xb1\xbd\x55\x79\x5b\x63\x26\x5f\x76\x7d\xcc\x4c\x1a\x2d\x30\xe7\xf4\x11\xd9\x1c\xb6\xbd\x20\x63\x73\x65\xa6\xd7\xc5\x66\xf2\x35\x33\x99\xd2\x8e\xa1\x8a\xba\x40\x41\x92\xa9\xac\xcd\x09\x61\xc0\xf0\x0f\x9d\x74\x0a\x8b\x50\xd4\x95\x7a\x87\x53\x6c\x56\x9d\xe8\x29\x7f\x86\x57\xe8\x98\x20\xc0\x6a\x5a\xa8\x4b\x61\x00\xb6\x3a\x86\x6d\x84\xeb\xca\xd5\xcc\x04\xdd\x49\xdd\x4c\x60\xeb\x2a\x4c\x73\x84\x9f\x82\xce\xf9\x93\x90\x31\xca\x81\x99\x5d\xde\xd5\x0c\xdb\x10\x11\x14\xed\xf0\x57\x59\xc5\x02\x16\xe0\xbb\x4e\x6a\x23\x5d\xe8\x99\x53\x24\x97\x00\x30\xb2\xf8\xfc\x24\x39\x40\xdc\x42\x60\x3c\x3a\xa1\x63\xb0\xff\x19\x7b\x4b\xcc\xe2\x59\x81\x3d\x67\x04\xe0\xdb\x49\xd2\xa2\xdf\xe7\x95\x8c\x4e\x85\x21\x79\xbb\xb2\x65\x57\xd1\xfa\x61\x4f\xdc\x66\xb1\x4b\xe9\x15\x1f\xe9\xe8\x87\xa8\x82\x56\x8b\x32\xb0\xca\xe6\x22\x68\xc2\xbc\x7b\xc8\xd0\x6d\xba\xa4\xb1\xc7\x08\x1a\x29\x5b\x9c\x00\xb9\xbd\x98\x2c\x68\x5c\x3f\xd8\x88\x64\xe2\xbb\x36\x32\x68\x48\x72\xdf\x70\xb8\xba\xc2\xdf\xf7\xc1\x8a\x0b\x30\xfc\x29\x8b\x88\x9b\x97\xe3\x0f\xe1\x3c\x94\x39\x6b\x47\x45\xd2\xc3\xc1\xf4\x08\x6c\x05\x7f\xd9\xdc\x17\x7f\x04\x17\xed\x48\x9c\x8a\x8e\x5f\x10\x37\x12\xe7\x07\x99\xf3\x8a\x8d\x9a\x02\x6f\x3d\x12\xbd\xaf\xed\xa8\x58\xfa\x1b\x8e\x6c\x7e\x31\x16\x69\x22\xc1\x2c\xf5\x4f\x6b\x15\x88\xb8\xa6\xc8\x05\x18\x29\x53\x2c\xbd\xbb\x5a\x78\x19\x1b\x97\xd8\x5e\x82\x46\x64\x0f\xd3\x9d\x7b\x7e\xff\x43\x45\x75\x2c\xd9\x87\xb3\x60\x9c\xbd\xa5\x0b\xed\x99\x32\x3a\xdb\xae\xf3\xd3\x6d\x95\x1e\x4d\x9b\xe3\xd3\x94\xb8\xb9\x3f\xf0\x2f\xd4\xe1\x0e\x03\x0a\x15\x8b\x76\x65\xc5\x16\x62\xc3\x62\x12\x37\x87\xc5\xdb\x4c\x34\x1e\xc5\xac\x5a\x89\x73\x2f\xc3\x5d\x58\x2c\x2b\xef\x61\x78\x39\x8c\xb0\xd9\x62\xa9\x0b\x1e\x6d\x4c\x16\xb2\x57\xb4\x28\xff\x57\x93\x03\x88\xae\x8e\xa3\x76\xce\x76\xef\x03\x7b\x95\xe0\xc8\xb0\x3f\x69\x43\x45\x6a\xf5\x03\x3d\x77\xb7\xf4\xf9\x35\x25\xa8\x6e\xf3\xee\xb5\xfc\x29\x1a\x8f\x55\x9d\x03\xfb\xe9\xe6\xcb\x77\x4b\xfe\xc7\xfc\x65\xc9\x3a\x6d\x91\xc7\x35\x0c\x2d\x4d\x9f\xdd\x04\xd0\x5a\x59\xec\xfa\x0c\xb6\xd6\xa0\xae\x7a\x07\x61\x0d\xef\x6b\xf7\x6f\x22\x75\x95\x55\xad\xcb\x1e\x34\xb3\xc2\xc8\xa2\xd1\xbd\xad\xee\xf5\x34\x87\x1c\xc0\x27\xed\x05\x58\xf3\x6e\xea\x27\xaf\x0c\x86\x59\x73\x5f\x3f\x7a\x50\xd7\xaa\xe1\x70\x7e\x3b\x75\xba\xfa\xb6\x7b\xac\xec\x5f\xee\xc8\xf8\x24\xaa\xb5\x2a\x73\xdd\x96\xc7\x1c\xd9\xb5\x5d\x92\xdc\x4e\x5a\x68\xab\x94\xc7\x48\xf7\x9a\x58\x33\xef\xcc\xd8\x87\xf0\x8e\x76\x19\xab\x4c\x2f\xe0\xcf\x11\x61\x5f\x30\x95\x50\xea\xe0\x42\x46\x27\x47\x4d\x27\xb4\xcc\xd1\x9d\x04\xa7\x7f\xe4\x2c\x0d\xbf\xe0\x10\xe3\x29\xd5\x67\x49\x84\x75\x97\x74\x70\xb7\xfe\x57\xa6\x66\x85\xf1\x97\x7f\x91\xaf\x7f\xd1\xaf\x9f\x61\x57\xe6\xe7\x6a\x6b\x04\x4f\x1f\x5b\x3b\x9c\x32\x8a\xf2\x98\xc6\xb8\x6e\x85\x71\x8a\xae\x9a\xfd\x23\x4d\xd0\x0a\xf2\x68\xeb\xff\x92\x7c\x6d\x99\xc2\xca\x6a\x9a\xc7\xb0\xcc\x08\x9b\x2d\xfb\x81\x0a\xbb\xae\x59\x90\xa5\x4d\x07\x4f\x35\x43\xe7\x65\x69\x15\x32\xfe\x6b\x52\x9a\x45\x50\x61\xa6\xfc\x4f\xb0\xed\xe7\x97\x75\x1b\xd5\x6d\x0d\x9f\x63\xd4\xbe\xe4\xbb\xda\xd2\x4e\x07\x0e\xd7\xef\x28\x82\x4c\x4e\x62\x48\xb9\x90\x09\x08\x02\xad\x6b\x23\xc2\xe6\x67\xfc\x93\x41\xbe\x04\xb0\x37\xce\x8d\xdf\xfa\xd9\xd5\x57\xfd\x8a\xb1\xaa\x38\x7f\x9b\x9e\xdd\x39\x43\xce\x27\xa3\xcc\x75\x47\x45\x9f\x8f\xf2\x96\xc6\xb1\xa7\x34\xf0\x25\xb2\xaa\xc8\x19\xd8\x1c\xb3\x14\x96\xb8\x60\x84\x30\x81\x53\xf0\x98\xba\xa2\xea\x58\x3e\x21\xb0\x9e\xf3\xea\xf4\x84\x96\xc2\x9d\x2d\x4e\x72\xe4\x9e\x45\x41\x15\x4f\x60\x61\xfb\x8f\xa3\x0a\x00\xfd\xc1\x36\x25\xeb\xeb\xf4\x52\x82\x06\xea\x5f\xe6\xea\x82\x68\xf0\x80\x69\x32\xbd\xea\x22\xbe\x09\x57\x97\x40\x18\x86\x49\x91\xbe\x7a\xbf\x67\xb5\x42\x68\x04\xdb\x76\x5c\x5e\xde\xc6\x2e\x38\x0f\x71\xa2\x67\xad\x91\xd8\x0c\xc9\xc4\x0f\xed\xcc\x2c\x9d\xd9\x80\x75\xd7\x8f\x1e\x30\x1b\xa4\x56\x76\x0a\xe7\x6e\x66\x2c\x30\x72\x31\xa6\x1c\x91\xb9\xe7\x16\x76\x75\x23\x7c\x5f\x59\xc0\x0d\x50\x82\x89\x68\x4a\xee\x2c\xf8\xf6\xfa\xb0\xc8\xb3\x96\x74\x11\x36\x59\xf4\xdb\x48\xd4\xa1\xd8\x36\xf3\xdc\xc5\x7c\xb4\xb9\x62\x8d\x5a\xf6\xe6\xe9\x63\xfb\xa9\xad\xd9\xa2\xf4\xd0\x3b\x66\x0d\xa8\xf9\xab\xd2\x7f\xd2\x73\xe3\xe1\x94\x55\x6d\xdd\xfb\x33\x87\x43\x46\xfd\x59\x35\x9f\xa1\x1a\x3c\xef\xe8\xe6\xf7\xb0\x1d\x96\x5f\x07\xbe\x07\xe5\x6f\xec\x10\x7d\xb2\xbd\xb9\x0f\x72\x5d\x2e\x6b\xb9\x28\x38\xb3\x21\x82\xbf\x7d\xf7\xd2\x17\x82\x20\x7d\xb0\x49\xb0\x8a\xa8\xbf\xd9\x33\x47\x38\x7b\x36\x3e\xab\x62\x99\x96\x62\xb0\x47\x9d\x2c\x42\x0e\xca\xe4\xa0\xd9\xe2\xaa\xfa\x7e\x45\x99\x3d\x09\xc5\xa0\x60\xe6\x71\x53\x3c\x48\xa4\x65\xf7\x29\xdc\x47\xa0\xa7\xe1\xba\xde\x7d\x76\x4a\xfa\x6a\x70\x77\xc1\xaa\x25\xe3\xf6\x24\x8e\xe0\x91\x40\x6c\x8a\x36\xfa\x6a\x8d\xa6\x66\xcd\x72\xc3\xb0\x72\x37\x3a\x9e\x5b\xab\xba\x61\x59\xa0\xe2\x79\xce\xb2\xed\xcb\xb3\xaa\xe8\x25\x8b\xcc\xd0\x85\x1f\x51\x57\xb4\x85\xef\xf0\xad\x45\xd1\x6c\x9f\x51\x83\xf0\xca\x73\xa3\x7b\x5a\xf9\xc5\xa3\x56\x6d\x70\x6c\x60\x25\x39\x87\xff\x98\x4a\xcf\x61\xbf\xdd\x4d\xaa\xe2\xed\xc8\x14\x5f\xbb\x1e\xd5\xd3\xb4\x4f\x37\xf1\x98\x9b\x8f\x60\xc0\x10\x1d\x35\x07\x0d\x6e\xf0\x2b\xe1\x2e\x4a\x15\x9e\xe2\x15\x7e\xa3\x40\x0b\x51\x85\x37\xef\x99\x4e\x56\x36\x1d\xe6\xf9\x30\x2d\x81\x45\x31\xab\xe9\xb5\xb0\x37\xea\xaf\x4d\xc2\x66\xfa\x2f\x2e\xd3\x74\x90\x8a\x92\x8a\x2c\xfe\xec\x92\x17\x9b\x36\xff\xd1\x55\x04\x92\x47\x16\xdb\xab\xa8\xab\x09\x25\x0b\xeb\xe6\x5e\x2d\xca\x2f\x16\x0c\x86\x64\x7d\x03\x5c\xfa\x12\x54\xfb\x26\x30\xaf\x59\x05\x97\x0a\x1a\x90\xaf\xe6\x9f\x42\xd8\xe0\x63\x35\x85\x6f\x3a\xde\x1f\x5e\x48\x3e\xb1\x44\x30\x49\x58\xda\xa3\x51\xba\xa9\x30\xc6\xfe\xe0\x17\x1e\x02\x95\xe3\xf9\xeb\x97\x46\x17\xd5\x7e\x4a\x2b\x7d\x2e\x8d\xf5\x4c\x8a\x05\x1f\xb2\x27\x69\x97\xb7\x4b\x2a\x99\xcf\xfb\xb6\x05\x32\x2c\x06\xb3\xf4\x6d\x0c\xd5\xd3\xdb\x7e\xbb\xe8\x92\x6d\xd7\xb1\x45\x9c\xfc\x0c\x36\x10\x73\x12\x0a\xcc\x74\xd2\x4f\xeb\x0d\x8e\xd7\xa6\x73\x09\x81\xc7\x22\xfb\x32\xe8\xca\xea\x75\x2f\xdb\xe7\xaf\x5f\x79\xdc\x6c\xbe\xe0\xca\x4d\xb2\xee\xba\x72\x03\xfe\x92\x08\xc8\x96\xc6\x4a\x4e\x6f\xe6\x14\x54\x57\x32\x6e\xeb\x44\xdb\xe7\xbb\x9e\x36\x21\x78\x6f\xd4\x56\x8c\x43\xe5\xb5\x62\x6a\x6e\xc0\x6d\xac\x9f\x27\x38\x9a\x2d\x73\xa3\xd7\x2a\xc1\xff\x86\x40\x6f\x8d\x60\x64\xd9\x0e\xd9\x74\xcf\x02\xfd\x42\x6a\x05\xc5\x74\x77\xbd\x55\xde\xba\xd9\x36\x87\x27\xed\x6f\x30\xa0\x55\x00\x58\xb6\x39\x24\x90\x67\x6e\xd5\xee\x5d\xe4\x13\x79\xb9\x0b\x8b\x9a\xc0\x94\x4e\x72\x70\xa8\xd0\x6e\x95\xef\x9d\x2a\x35\x7e\x28\xe5\xe3\xa6\x4d\x71\x75\x77\x04\xf4\xf5\x8b\xb2\x0a\x90\xc1\x17\xfd\xd2\x23\xf7\x73\xd2\x50\x05\x78\x79\x87\xf7\x09\xc3\xa1\xaf\x02\x46\x9d\x0f\x86\x96\xa1\x08\x49\x78\xc4\x38\xff\x15\x51\xc5\xae\x19\x5f\x26\x05\x8b\xb3\xad\x9f\x8a\x58\xee\x45\x4d\xbc\x58\x5e\x5d\x93\x9b\xd5\xf8\x08\x19\xbd\xac\xa5\xe7\x97\x3a\xb9\x92\x8a\x8a\x6b\x4a\x65\x25\x8b\xec\xac\xcd\xd7\x25\x34\xcd\xe6\x1b\x16\xd5\xa3\x2a\x11\x6f\xe6\x92\x04\x78\x41\x3b\xc1\x77\xdd\x4f\xf5\xd7\xc1\xad\xb2\xbf\x44\x55\x2d\x37\x5a\x28\x0d\x53\x0b\x71\x0e\x8e\x1c\x86\xf0\x55\xec\x72\x50\x8d\x69\x2e\x84\x82\x9a\x75\x74\x47\x8b\x8b\xb3\x32\x90\xc4\x8e\x76\x8b\x0f\xb2\xb7\x57\xcb\xbb\x85\x01\x16\x56\xd2\x3e\x3b\x88\x04\x7c\x51\xa1\x0c\x3d\x07\x1f\x65\x57\x87\xa0\xf8\x73\xd6\x6f\x7d\xfe\x5a\x75\xa7\x73\x19\x5a\xf0\xcc\xe6\xa7\xc0\xa9\x84\x49\x52\x7f\x1f\x4a\x99\xe1\x0c\xe4\xc5\xb4\xd7\x92\x6a\x91\xd6\x93\x12\xb7\x43\xd1\x30\xb0\xae\x53\x64\x20\x2b\x0f\x74\x77\xfb\xf9\x48\x2a\xb5\x95\x98\x86\xbc\x46\xb1\x7d\xd1\x48\x63\x55\xcd\xc2\x19\x7c\x48\x1c\xc1\x8d\x3d\xd5\xd8\xdd\xa9\x06\x38\x9e\xdc\xf2\xdb\x11\x1b\x63\x38\xb0\xb2\x93\x6b\x67\x47\x14\xbb\x75\x0d\x4b\x17\x77\x52\x7e\xb0\x8d\x3b\xc9\x59\xf1\x5c\x94\x64\xc3\x9f\xb6\x54\x3b\x31\xbc\xfc\x0d\x38\xc2\x6a\x62\x9d\x8b\xe5\xdc\x66\xf5\x7f\x4a\x0f\xaf\xf6\x99\xdc\xfe\xbd\x84\xb6\xdc\x6d\x61\x9c\xc7\x5b\xb8\xcd\xb2\x9a\x4d\x39\x9b\xfb\xac\xc8\x02\xd8\x30\x22\x50\x8c\x45\xae\x7f\xb2\xbd\xb1\x63\x3f\x11\xf5\xbb\x55\x92\x83\xe0\xb7\xc8\x96\x65\xb9\xdb\xc4\xe6\x96\x8b\xa2\xf4\x46\xdc\x20\xda\xfa\xba\x13\x6a\x9c\xca\x32\x79\xf8\xe6\x7f\x2c\x60\xd4\x22\xb5\x92\x8d\x74\xe1\x19\xc2\x37\xd5\xc6\xb3\x2b\xcb\xfc\xf2\xa6\x82\xd3\x81\xad\x0d\x04\xf1\x97\x0a\x63\x74\x3c\xd8\xda\x5d\xb9\x29\x41\x3f\xf7\x62\xc7\x36\xaf\x53\xd4\x4f\xee\xff\x22\xad\xe6\x49\x0e\x1a\xac\x5d\x9a\x6f\x79\x5d\xa3\x7b\x2b\xaa\x32\x8a\x60\x71\x70\xef\x24\x41\x67\x2f\x76\x18\x98\xba\xfb\xcf\x11\x7d\xac\x52\x6f\xee\xef\x14\x01\xdb\x29\x9a\x3b\x68\xf9\x46\xf6\xe1\xe8\x08\xf2\xaa\x8c\xba\xb5\xf2\xbc\x5d\x09\x23\x5b\x29\x6c\xab\x04\x39\xbc\xb3\xd7\xbc\x75\x60\xc0\xfe\x5a\x85\x7e\xba\x43\x7a\xf4\x76\x1b\xfb\x0e\x51\x6c\xd1\x00\xc5\xd8\xbf\x7a\xc0\xd5\xcd\xc9\x11\x32\x43\x66\x5d\xd3\xfb\x61\xe5\x17\x84\xe0\xc4\x45\x14\x60\x56\xbe\xb6\x16\xde\xf4\x64\x27\x6c\xe7\x05\x76\x13\x0c\xaf\x59\xd6\xce\x36\xf0\x2a\xc1\xc1\x6d\xda\xd1\x00\x34\x60\x8a\x4f\xaa\x4f\x15\xc7\xbb\xfb\xf4\x3c\x54\x46\x47\x11\xbd\xfd\x6e\xf5\xc6\x33\xbf\x8b\xf4\xd4\x31\xb2\x76\xc0\x2e\x7c\x40\xfc\xf9\x8d\x16\xb4\xb7\xa4\xf5\xa6\xc2\x24\x69\x41\xd8\x14\x2f\x2f\x38\x23\xd9\xb5\xc9\xb3\x61\x39\xa5\xf3\x2c\x84\x89\x16\xbb\x60\x16\xf9\xe0\x0c\x11\xb1\xbd\x27\x5e\x0f\x0b\x5d\xe3\xbd\x5a\x5f\xd3\xa4\x8a\x5c\xfd\x19\x2b\xbc\xbf\xb2\xb9\x09\x80\xc0\xb3\x1d\x23\xe0\x3b\xe7\x96\xcf\x23\x5c\x1a\xe6\xbe\xe8\x22\xdb\xd0\x2e\x5b\xbc\x9b\xb9\xcf\x60\xa8\x79\x55\xf9\x89\x30\xc7\xa1\x88\x44\x5c\x0f\x76\x96\x57\x65\x1f\xd8\x28\x72\x1a\x0b\x1a\x30\x1c\x0d\xe9\x50\x79\x2d\x47\x4a\x64\x5b\xab\x87\x6c\xef\x85\xcc\xe2\x5d\xe5\x9a\x4d\xaf\xaf\xe8\x3a\xb3\xcf\x0d\xf3\xc9\x10\x89\x0e\xcd\xde\x32\x0f\xaa\xcb\xbb\x10\x8b\x7e\x5a\xb6\x8f\x3a\xca\x03\xa9\x42\x13\x84\x3d\x99\x25\xf4\xa4\x7e\xf4\x18\x26\x0b\x63\xfd\xce\x0c\x00\xa2\x89\xce\x59\xf2\xa5\xad\xd3\x08\x78\x9f\x95\xb3\x62\x39\xc6\x13\xb4\xf8\x5d\x2e\x94\xd7\x1d\xb4\x63\xe5\x7a\x57\x59\xb9\x5f\x57\x92\x87\x7b\x98\xe2\x8e\x45\xfa\x18\x8f\xfa\xf6\xda\x5a\xa7\xcb\x38\x97\x73\xa4\x2a\xd4\x7b\x36\xb4\x72\xc7\xef\xc5\x57\xc7\xc8\xcf\xf8\x51\x76\x99\xf9\xd7\x8d\xae\x5c\x12\xc7\x96\x5b\x32\xa6\x96\x14\x9e\x24\xc1\x1d\xed\x57\x18\x55\x92\x8f\x9a\xd1\x9e\xcd\x1f\xd1\x2a\x70\xac\xb6\xbd\x3c\x6c\xd0\x33\xb5\x5f\xf2\xe1\x5d\x68\xda\x34\xb1\x25\xed\x5e\xf0\xfe\xd4\xb5\x2a\xb2\x08\xb2\x2e\x94\xd8\xfe\x00\x78\xf9\xf8\xd7\xf2\x9c\xf7\xa6\x59\xb4\x1d\x64\xbf\xf0\xee\x3e\x5a\xda\xbd\x62\xe4\x9e\xbe\x3d\x7b\x44\xdf\xda\x07\x81\x96\x64\x61\x08\x19\xdc\xa0\x9e\x69\x45\x7e\x02\x7b\x75\x7f\xea\x41\xe4\x07\xe6\xb0\x21\x77\x57\x8a\xe8\x45\xaf\x17\xb3\x10\x9c\x7e\x21\x6a\x32\x3e\xb1\x1e\x2a\xc0\x6b\xa8\xa2\xde\xc3\x21\xc0\x2b\x6d\xcc\x91\x2c\x44\x31\x14\xdf\x14\x21\xe4\x2a\xe7\x21\x11\x93\x7c\xf5\x97\xad\x34\xdf\xee\xca\xce\xe1\x4d\x16\xce\x30\x4b\xb8\x31\xcc\x48\xcb\xd4\xbd\x0b\x21\x80\x57\x78\x20\x23\xb2\x9c\x5d\x69\xec\x52\x36\xf2\x90\xcf\x1f\xb1\x1e\x6d\x8c\xad\xc8\xc7\x7e\xbe\xc5\x5d\x06\x9b\x4d\xb8\x40\x66\x66\xd3\xb4\x85\x1e\xb5\x67\xf1\x3f\xfc\x15\x92\x8d\xc3\x31\xb2\x07\x7c\x1f\x99\xe6\x3a\x5f\xb1\x96\xa8\xaa\x7f\xb7\x9e\x74\xb3\x71\xee\xdd\xb6\x82\xa2\xd4\x93\xbe\x8e\x18\xf8\xe1\x08\xbf\xcd\xb2\xbf\xa2\xc6\x9f\x18\xa5\xbe\xd6\x10\xae\x46\xa4\x8d\xed\x4c\xce\x08\x93\x82\xa3\x94\x28\x69\xb6\xa3\x63\x1a\x27\x0a\x17\x5f\x2a\xb0\x97\x10\x54\x31\x01\x09\x6c\x80\x0c\xff\xec\xd4\x74\xc7\x17\x15\xe6\x9a\x69\xa3\xea\xb7\x47\xc8\xc2\x47\x65\x7f\xcb\xe9\x87\xdd\x5d\xf3\x0e\x42\xe2\x1f\x47\xfb\x3f\x69\xec\xf6\x05\xa8\xa7\xda\x7d\xa2\x96\xad\xd8\x15\xb1\x90\x17\x25\x03\x2d\xf2\x20\x35\xc3\x2a\x86\xaf\xd9\x9d\xdf\xf2\x8d\xfe\x5b\x77\x7e\xa2\x51\x5e\xb7\xb9\x29\x64\x05\xd8\xe1\xba\x6e\x9d\xf5\x66\xa1\x7f\xce\xff\xa9\x8d\x3e\xef\xaa\xfe\x47\x25\xb7\x5a\xfd\xbf\xb5\xc0\xcf\xb7\xfa\xea\xbb\x2d\xe2\x5b\x30\xa0\x43\x1a\x02\x5f\x7f\x11\x49\xbb\xf2\xcd\x6c\xaa\xd7\x55\x4e\xff\x4d\x14\x0b\xd0\xbe\xd0\x34\xb3\x73\xa9\xb5\x0f\xa2\x0c\x2f\x91\xcf\x59\xdc\x99\x22\x88\xd0\x8a\x10\x98\x5f\x85\x26\x44\xa8\x41\x5d\xc4\xdc\xcd\x5d\x14\xf0\xfb\x33\x06\x36\x93\x88\xce\x9d\xd7\xca\x0e\xf8\xba\xa6\x96\x04\xb0\x36\xac\x40\x2e\xff\x3f\xbe\x08\x6a\x7b\x89\xab\xbb\xd6\x0b\xec\x90\x23\xca\x36\x12\xc8\xcf\x12\x07\x3b\x47\x8a\x0d\x7b\x1b\x8a\x5c\xcd\x99\xd4\xf6\x3f\xd1\x1f\xbf\xb8\x1b\x4c\x17\x59\xd6\xe0\x8b\x24\xea\x6d\x91\x66\xa6\x4a\x10\xaa\x41\xfa\x21\xf1\x9a\x6d\x05\x2d\x1a\xe4\x95\x09\xf7\x56\x7f\xea\x25\x57\x96\x56\x0a\x66\x4b\x02\x19\x70\xce\x2a\x8a\x25\x41\xd6\x1c\x1e\x14\x68\xc7\xf9\xc3\xd3\xa5\xbc\xd2\x49\x7c\xbf\x0f\xf7\xec\x1f\xa1\xfb\x1f\x10\xb5\xe9\x88\xf4\xd1\x67\xc4\x87\x70\x8f\x8d\xac\x3f\xa4\xb4\x2d\x32\x4e\xda\xc3\x87\x16\x38\xc0\x25\x39\x2f\x02\xe2\xf8\xfd\xbe\x83\xa8\xe1\xf6\x88\x81\x0d\xa0\x96\x92\x8f\x69\x92\x70\xd7\xca\x84\x7f\xac\x95\xf6\x26\xd2\x05\x43\xff\x6e\x99\xfa\x21\xe9\x4d\x08\xc7\x3f\x1e\x24\x5d\xf3\x96\xe0\x64\x59\x69\xef\xa1\xf4\x4c\xd1\x77\x33\xd4\x17\x53\x41\xfc\x0f\xcc\x24\x2c\xfc\x47\xa9\x75\xe1\x13\xf9\xe1\x88\xab\x06\x68\x7f\xf3\x16\x0c\x17\xe7\x69\xdb\xa7\x64\x57\xe0\x65\xaf\x56\x78\x4c\x5f\x41\x5b\xf5\xe6\xe7\xf6\x84\x02\xc6\x66\xd7\x3d\x78\xe9\x4e\xd0\x0e\x6a\xf7\x69\xb3\xed\x97\x94\xee\x1c\x26\x64\x18\x8a\x63\x73\x9f\x6e\x02\xe0\x0f\x28\x74\x7d\xbc\x35\xdb\xdf\x73\xe3\xee\x59\x11\x74\xff\xb1\xfb\x0f\x0f\x0e\x82\xe6\x37\x50\x7b\x3f\x6a\xf7\x78\x83\xc2\xc6\x9f\xf0\x46\x5b\x99\x9d\xe1\x0f\xa5\x0a\xd7\x7b\x16\xec\xa0\x49\x17\x28\x82\x68\xde\x34\x49\x6d\x1e\x40\xce\xc5\x16\xc3\x00\x7f\x84\xd7\x92\xfd\x54\x12\x41\x79\x23\xfe\xd7\xeb\xf2\x7e\x83\x0a\x65\x5b\xbd\xd5\x71\x87\x03\x6f\x74\xe4\xad\x12\x7c\xca\x17\x6a\x9a\x9f\x94\xaa\xda\xa8\x63\xf2\x85\x4d\x1f\x2f\xbd\x93\xa9\x6d\x14\x2f\xdb\x3a\x57\xbe\x65\x2b\x58\xb8\x01\xbe\x9a\x4f\x96\x9d\x9c\xcd\x0e\x29\x2b\x22\xc1\x76\xc6\x4f\x6e\x7c\x61\x9d\x1a\xd2\x15\xf0\x70\xb3\x3a\xe3\x6f\x69\x9d\xdc\x08\x69\xe3\x60\x83\x99\xff\xbf\x4d\x72\xff\xb8\x89\x76\x28\x57\x00\x51\x3e\x3e\xb5\x75\xb2\x4a\x3e\x3c\xb7\x5d\xfa\xcb\x81\x4b\xf7\x88\x53\xd0\xfe\xc8\x47\xdd\xcc\x52\xff\xba\xc5\xea\x16\x79\xea\xac\xe1\x7d\xa4\x44\xc8\x33\xf8\xa1\xee\xb6\xfa\x45\xf0\xaf\xff\x41\xef\xd4\x12\xf1\xff\x7a\xcc\xcc\x3a\x13\x36\xdf\x7e\x18\x74\x3f\xca\x64\x39\x1d\xb9\x11\x0b\x47\xba\x0f\xb1\xde\x0f\xb6\xd6\xc3\x7e\x75\x88\x86\xba\x2d\x34\xfe\x4a\xa9\x09\x2e\x20\x62\xc3\x3b\xb5\x53\x5e\xad\xa3\x03\xfd\x3e\x0a\x7d\x23\x38\xcf\x4d\x92\x58\xed\xc7\x5e\x1e\x29\xb4\x3d\xab\x7d\x89\x1a\x31\xe0\x44\x78\x34\x87\x6b\x9b\x06\xc9\xb5\x8e\x66\xe5\xe1\x60\xe3\x87\x23\xc9\xf9\x8e\xe8\x87\x1d\x53\x78\x93\x53\x04\xcc\x16\xfb\x89\x3e\x38\x6f\xc4\xe4\x36\x9e\x91\x44\x7b\x27\xf6\xc2\x3d\x88\x83\xda\x61\x58\x5d\x87\x4f\x97\xfd\xb1\xf0\x01\xe7\xa5\xcc\x83\x43\x21\xa6\x76\x6c\x64\x70\x2b\xfd\x3a\xf5\xd4\xd5\x6f\xb6\x25\xde\x49\x01\x58\x55\x57\x96\x20\x9e\x70\xcb\xed\xba\xff\x80\x6a\xa8\x07\x67\x86\x6a\x7b\xff\x80\xe5\x50\xb0\xc3\x22\x9a\x64\xe9\x04\x51\xec\x65\xa4\x2b\x15\x76\x74\xd7\xcc\xee\xfa\xed\x45\xe7\x0e\x7f\x00\xbc\xe2\x6e\x1f\xca\xdf\x3c\x6c\x68\xf5\x36\x76\x9b\x09\xfd\x41\x0a\xb7\xb0\x45\x67\x4e\x1c\x9c\x72\x47\xc7\x28\xd1\x92\x56\x3e\xd2\x5b\xb8\x28\x9d\x5a\xad\x21\x6e\x23\xad\xfc\x7d\x0a\x6f\x38\xe5\x13\x75\x26\x0d\xca\x19\x84\x2d\x44\xe4\xf6\xc2\x27\x06\x51\xcf\x7b\xba\xf6\x6e\x73\xcc\x22\x7c\x19\xfb\xc9\x3c\xa8\xd8\x06\x61\x2a\x05\x45\x32\x39\xdf\x33\x8c\xdc\xf9\x2c\x3b\x49\x92\x14\xc8\x54\x25\x6c\x50\xb5\x5f\xa1\x3b\x9e\xfb\x35\xe0\x0a\xd9\xca\xfa\xfc\xc4\x95\x7b\x6b\xf1\x3d\xb0\x9a\x0a\x94\xc2\xce\x04\x0d\xe0\x1b\xc4\xaf\x04\xf4\x62\x72\x71\xb7\x44\x4a\x25\x61\x8a\x70\x9e\xc2\x46\xb7\xf0\xe6\x06\x54\xf4\x44\x47\x30\xd0\x8b\x90\x59\x83\x4d\x8d\xb8\x3c\x9b\xad\x4b\x7a\xd5\x15\x12\x72\x1f\xcd\xa1\x4b\xda\x58\x11\xc3\xc0\x96\x6f\xd2\xc9\xe9\x3e\x68\x06\xb4\x98\x1a\xd8\xce\xcf\x2a\x4f\x1d\x7f\x50\xfe\x44\x81\x45\xd4\x2f\xa1\xe2\x19\x6d\xfc\x62\x1b\x4d\x06\x8d\xac\xb1\x9b\xf7\x81\xd0\x06\xf4\xc3\x28\x4b\x7e\xe9\x85\xf2\x4b\x09\x14\xbb\xca\xe2\x48\xc9\x5d\xf3\x44\x7c\x03\xcc\x3b\x01\x09\x7b\x7f\x17\x36\xe7\x37\xf2\xdd\xa5\xf7\x80\x46\xa4\xe8\x31\x92\xb1\x3d\xc0\x75\xff\x4b\x2d\xb8\x4d\x9e\x9c\xb6\xc2\xd6\x30\x87\xc2\xd9\x13\xa9\x15\x3c\xe3\x97\x36\x0f\x2c\x65\xd2\x71\x8c\x96\xbf\x6f\x9d\x96\xf9\xe5\xaf\xe7\x9d\xfe\x87\x62\x2d\xd8\xfb\xb7\x19\x77\xd4\x40\x77\x29\xc3\x68\x91\xdd\xca\xa2\x2d\xc6\x92\xc3\x82\x86\xe1\xc0\x1a\x1e\xad\xed\x1f\x46\x46\x08\x02\x24\x7f\xc0\x7e\xb6\x81\x14\x00\xff\xe9\xff\x40\x36\xc4\x07\xff\x2b\x92\xad\xfc\x15\x38\x7f\xd2\x57\xe0\x8f\x82\x51\x89\x03\xec\x84\xf0\x84\x08\x67\x03\x6c\x89\x1e\x57\xb6\xef\x09\xb9\xa5\x9d\xa6\x51\x0b\x4e\x35\xa8\x60\x53\x55\xbb\x60\xaa\xf3\x17\xd1\xe8\x3e\x1c\x43\x11\x7d\xee\x9f\x44\xa0\x43\x13\xbc\xa3\xcc\x8f\x80\x32\x3a\x35\xab\x27\xb1\x40\xa7\x79\xac\x56\xcf\x3c\xfe\x1f\xe3\xe3\xa2\xf7\x7f\x0e\xc2\x3a\x5a\x16\x1d\x35\xa7\x53\x76\x8d\x1c\xef\x43\xd6\x0e\x3b\xfa\xf6\x3c\xed\x63\xa7\xf4\xe1\xda\x69\x83\x4c\xcf\xab\xf7\x22\xab\xab\xfa\xf3\x03\x8e\x58\xba\x14\xd6\xab\xcc\xbb\x7d\x50\x69\xbd\x7e\x62\x90\x83\x26\x10\xaa\xb9\xb1\x7e\xbf\x4c\x3c\x6e\xa2\x1a\x68\xae\x43\xf1\x6c\xe7\x92\xe3\x0c\xe7\xfc\xcb\xe4\x10\xf4\x40\x5a\xd7\x58\xae\x80\x3e\xd6\x38\x0e\x5d\x50\xed\xd7\x3c\x48\xcb\x3a\xce\x67\xc4\xd6\x45\xea\x88\x53\x88\x88\x59\x84\xd3\x8c\x0d\x9e\xff\xb8\x60\x71\x47\x62\x57\x5f\x3c\xc3\x9b\x86\xb7\x46\x0f\x87\x3d\x6e\xc5\x72\x48\xa8\x09\x20\x84\x91\x79\xe9\x0c\xc9\xb5\x40\x62\x44\x77\xd0\x2c\xe6\x59\x6d\xd3\xe7\x80\x03\x2d\x30\x3d\xab\x43\xdf\xe5\xb3\x79\x81\xf9\x88\x98\x88\x55\x53\x6e\x7f\x08\x48\x84\x4b\xf9\xb3\xc4\xe3\x57\x77\x03\xb2\x03\x1b\x1b\xcf\xf9\x94\x58\x18\x9f\x64\x16\x0a\x02\xca\x16\x91\x4d\x67\x32\xd0\x97\x8b\x56\xc7\x1c\x49\x0a\x00\x67\x23\xbf\xc5\x36\x52\x04\x2a\x30\x37\x39\x06\x91\x0f\xd9\x54\x06\xcc\x11\x43\x87\x14\xb2\x70\x8c\xbc\xba\x77\x1e\x44\xf8\x3c\x81\xbc\xd1\x44\x0c\x27\x93\x24\x91\x27\xba\x15\xb5\x78\xf2\x21\x56\xa8\xa1\x1e\xa7\x32\xf8\xba\xfa\xbf\x2a\xaa\xbe\x95\x3f\x0f\x47\x3e\x56\xe1\x1e\xab\xb4\x6d\xc3\x1c\xdc\xc3\xa1\x61\x33\x43\xde\x96\x46\xd1\x7f\x38\x95\xe1\xd7\xd7\xec\xbf\xe9\x60\xd9\x8d\xad\x61\xf0\x0a\x48\x5a\x04\x26\x51\x4e\x0d\x74\x57\x68\xf8\x7b\x49\xbf\xfa\xb0\x9a\xb1\xc1\x44\x62\x39\x7c\x92\xb8\xb4\x79\x02\xb5\x4a\xfd\x72\x45\x38\xf5\x45\xf4\xbb\xf7\x1d\x9d\xa0\x8c\x60\xf1\xa3\xcc\x8c\xf8\xe9\x00\x09\xd6\x16\x6a\xd7\xd3\x8c\x2c\xfe\x48\x6a\x70\xda\x76\x95\x90\x7f\x0f\x0b\x11\x49\xb1\x53\x1d\xa6\x33\xe8\xfb\x91\x06\x22\xa2\x4c\xcf\x2e\xe4\xc3\x39\x0a\xa2\x9e\xc0\x6f\x63\x09\xbe\x02\x5f\x36\x6d\x47\xb4\x5b\x9d\x8f\x10\x8d\x7b\xe7\x24\x88\xb1\x91\x86\x12\x1c\xde\xae\xde\x4b\x48\xed\x10\xdc\x1d\x18\x85\x24\xcd\xf5\x6c\x36\x5f\x7b\x0b\x9c\x46\x2f\x8f\xe9\x1c\xd0\x65\x07\xbb\xa0\x84\x11\x57\xb6\xd5\xcc\x6b\xfc\x33\xac\x89\xcb\xa7\x59\x1d\x93\xfa\x50\xf6\xb2\x11\x3f\xa0\x79\x3f\x58\x05\xca\x72\xed\xd6\x1f\xaa\x61\x97\x46\x33\xe5\x02\xbe\x12\x3e\xcc\x4e\xa9\x22\xbd\x7e\xbf\x57\xf6\xfc\x27\x08\x08\x78\x9b\xe2\x80\xbb\x3c\x10\xc8\x01\xa3\xfe\x2e\xa4\xd9\x5b\xa5\x15\x41\x80\x1f\x2a\x0b\x51\xc2\xab\x17\x89\x21\x22\x39\x84\x0f\x84\x1f\x6a\xe7\x73\x81\xd8\x04\xa1\x3e\x48\x10\xa7\x8b\xb4\x0a\x7c\x81\x2e\x9a\x9d\x8a\x41\xbb\x6f\x24\xc3\x40\x0a\x0a\x9b\x7e\x25\x4a\xb2\x0d\xba\x84\xe8\x20\xe8\x0f\x61\xeb\x75\xe3\x56\x1f\xa1\x22\xa0\xb7\x17\x4b\x5b\x1c\xc7\x2a\x06\x7f\x38\x31\xab\xc7\x9b\xce\xf9\xeb\xdd\x1f\x8c\x77\x61\x0e\x1b\xd5\xd8\xb8\x1d\xc7\xf5\x98\x64\x06\xf6\x0b\xbd\x4e\xda\xbc\x7d\xeb\x91\x78\xfc\x99\xe6\x7c\xb3\x38\x5b\x5f\xf7\x3b\xfc\xfa\x68\x3a\xd3\xb0\xcf\xa4\x2c\x08\xea\x01\x61\xc1\x9f\x6c\xb5\x08\xbf\x32\xb7\x4b\xc7\x33\x3f\xff\x5e\xad\x9f\xd0\xef\xd8\x4c\x22\x9a\x42\x0b\xff\xb1\xd1\xb3\x21\x07\x25\xeb\x53\xbd\xad\x36\x22\xf7\x5f\xd0\xe7\x1b\x3c\x2a\xdd\x85\x47\x0b\xdd\x70\x2f\x39\x0e\x02\xd9\xf8\xc7\xf3\xf8\x1d\x72\x47\x04\x19\x2e\x91\xcf\xfd\xf9\x05\x7f\xdb\x5b\xd0\xde\xcc\x8a\x61\x5d\x46\x28\x8b\x19\x92\xf8\x2a\x5e\x97\xbd\x8a\x9f\xdf\xaf\x39\xd2\xb7\x6d\x0b\x7c\xce\xc1\x3e\x70\x37\x97\x87\x67\xb3\x10\x98\x48\x7a\xc7\xaf\x14\x6e\x3f\xb0\x2d\x4c\x91\xed\x43\xdd\x38\x99\x9b\xed\xcf\x7f\x7c\xc3\x7a\x49\x03\xdc\x27\xda\x5f\xd1\xe3\x52\x06\x0a\x6b\xef\x8f\x4e\xd4\xdb\xbe\x06\xa5\x7c\xbf\x43\xc3\x72\x57\x47\xf3\x65\xff\x05\x1c\xd9\x77\x5f\x77\xed\x42\x74\x1e\x95\x1e\x48\x07\xd5\x8f\xb6\x30\x55\x2a\x0b\xea\xa5\xf0\x16\xb7\x36\xe0\x72\x75\xc7\xe4\x7a\x13\x2b\x84\xd2\xc0\x0f\x2c\x3b\xee\x19\x79\x47\xfe\x3f\x62\x47\x77\xcf\x61\x5c\x1d\xc1\x59\x7e\xa2\xdf\xf3\x20\x42\xfd\x9d\x1d\xd6\xbb\x1b\x29\xfa\x4e\x7d\x64\xc5\x34\xd4\xa2\xca\x77\xab\xf1\x9a\xa8\x4a\x0f\xa9\xb9\xfd\x41\x6a\xc4\x19\xa1\x53\x1e\x1f\x37\x34\xd0\xc3\x6a\x54\xc5\xc7\x74\x0d\xf9\x90\x33\x75\xfa\x34\xb6\x44\xdc\x97\xc2\xca\xec\x28\x3b\xb6\x42\xe4\x41\x94\x96\x58\x0e\xf4\x23\xb0\x8f\x71\x17\x2d\xc3\xd3\xcb\x15\x23\xb9\x27\xea\xd7\x05\xf7\x5b\x41\xfa\x86\xd5\xb9\xa2\xcb\x59\x50\x82\xd3\xb0\x54\x29\x06\xa8\xa3\xb7\x85\x87\xf7\xa4\xb0\x32\xb0\xe1\x34\x33\x91\xdc\xfd\x74\xaf\xdc\xe4\xef\xe8\x5b\xed\x31\x08\x9d\xc0\xb9\x0a\x21\x89\x33\xda\x71\x91\x55\x68\x48\x95\x0b\xf4\xdc\xc8\x0e\x73\xc3\x46\xf8\x9e\xd2\xfb\x16\x74\xc8\x9c\x19\x8d\x88\xb5\xd8\x17\x1a\xa4\x70\x37\x42\x13\x32\x22\x71\x45\x4a\xe1\xa0\x70\x7e\x22\x5a\x06\xf4\x40\x07\x1e\x6a\xec\xed\xce\x3a\xec\x2d\x90\x86\x5a\x1a\xb0\xe7\x98\x4a\xf3\x56\xa8\xe4\x05\xbd\xe8\x83\xc0\x08\x09\x0f\xe2\xf2\x9f\xc1\x9c\x05\x93\xed\xd2\xd1\x9f\x33\x15\xee\xd6\x90\x97\xae\xbe\x10\x62\x07\x57\x0d\x1a\x25\xdc\xb6\x4a\x91\x49\xd3\x31\x5c\xf0\x4c\x61\xb7\x06\x8b\x7a\xce\x9b\x20\xfe\x3e\x18\x51\xf3\x70\x8c\x84\xd5\xdc\xff\xea\x4f\x1f\x0e\xbe\x84\xcc\xc4\xa1\x89\x9f\x76\xdf\x1a\x1e\xa7\x78\x12\x25\xd2\x15\x6c\x34\x9c\x61\x90\x06\xed\xb6\x72\x51\xec\x8f\x49\xb4\x02\x70\x28\xd8\x15\x3d\xca\x7c\xe9\xaa\x3b\xa0\x20\xe6\xac\x75\xac\xe5\x0e\xbf\xb4\x9b\x93\xf4\x41\xa1\x27\x60\x15\x09\xee\xe7\xbc\x44\xc6\x20\x24\x32\xfc\xdf\x4f\xda\xb5\x81\xa6\xce\x78\xf8\x7c\x37\x19\xea\x91\x01\x10\xea\x19\xc1\x19\x02\xef\x67\x2f\x2f\x11\x3a\x60\xb7\x49\xda\xc5\xdd\x74\x0e\x56\x4b\x93\xb7\x76\xa0\x2f\xc8\x27\xb9\x62\x4c\x9a\x85\x88\x00\x3c\xf8\xe3\x1e\xf4\x1d\x28\x81\xf9\x96\x30\xbd\x16\x48\xb5\x97\xb4\x88\x67\xe1\x97\x8a\xa7\x5e\x16\xef\xc5\xfb\xd0\x2f\xa4\x3d\xbf\xc9\x82\x74\x7d\x1d\x5e\x66\x67\x07\xce\xc3\xe1\x14\xe3\xb3\x10\x6e\xdc\x7f\x96\xe4\x36\xc1\xa8\x0a\x00\xd8\x31\x86\xbd\x9a\xa4\x1e\x50\x1a\x44\x4e\xcf\x3a\xdf\x0a\xea\x99\x04\x19\x30\xc6\x9b\x50\x07\x00\x0b\x9d\xda\x9b\x82\xf7\x40\xa5\xe0\x0d\xfd\x86\xb4\x78\x99\x9d\x8d\xb2\x15\xc3\xc8\x63\x7d\x42\x4c\x70\x36\x61\x41\x1f\x57\x26\xac\xb1\xcf\x57\x8b\xdd\x41\xdf\x88\x43\xd1\x13\x9d\x84\x41\xcb\xb3\xbb\x8a\x8b\x81\x73\x27\xa8\x54\x5b\x72\x84\x3d\x89\x9e\xd1\xe8\x15\x0d\x52\xa1\xfa\x6f\x6b\x3c\x8f\x24\xad\xcb\xd6\xfc\xe8\x20\xb1\xe1\xbf\x8a\x83\x1c\xd3\xec\xf4\xe1\x03\x86\x53\x20\x49\x6f\x53\x18\xa1\xf0\x4d\x54\xd9\x0d\x00\xf0\xad\x30\xfa\x1a\x70\x47\xff\xfd\x84\xae\xbe\xf6\x0a\xb1\xef\x1b\x36\x87\x9e\x8f\xdf\xe6\xde\x06\x1c\xc1\x36\x7a\x27\x31\x63\x22\xdd\x43\x70\xe8\x3a\x1f\x8c\x96\x58\xff\xf5\x10\xf5\xac\x4e\xa9\x28\xd6\xb9\xb9\x91\xe0\xb2\x74\x16\x85\xed\x43\x9d\x4c\xb3\x66\x27\xe7\xc3\xc2\x9b\xcf\xcb\x49\x09\xf5\x8a\x47\x5e\xd3\x5b\x72\x7f\xdb\x3a\x0f\x0a\x0d\x58\x53\x73\xa6\x5f\x93\xbc\x61\x16\x5b\x80\xc5\xb0\x20\xf1\x02\x2d\x73\x6e\x73\x96\xd3\xe5\x42\x91\xe8\x99\xbb\xab\x0b\x76\x85\xb2\x5c\x96\x5d\x11\x46\x80\x3e\x3c\x46\x48\xcd\xf4\xc5\xed\x08\x7c\x90\xc0\xd6\x14\xfc\x71\xcb\xdb\x1c\x34\x0b\x8f\x86\x16\x3c\xb2\x04\x4e\x32\x9e\x22\x5f\x34\x8f\x3f\x30\xf0\x5b\x1e\x91\x13\x30\x7c\xff\xbb\xc0\xc7\xe3\x01\x94\x03\xe8\xb4\xfa\x27\x7f\xfa\x2c\x7f\xe1\xa8\xf3\x57\x09\x1f\xc0\x32\x80\x3b\x0a\x44\x98\x33\x78\x88\x9a\x6f\x0c\xe7\x77\x5c\x53\x8d\x20\xe5\xaa\x0b\x3f\xd2\x43\x8e\xd6\x23\xd3\x87\x66\x6b\x7c\x67\x13\x73\xa7\xe0\x1a\x17\x4e\x03\xcf\x27\x18\xb3\xc1\xf4\x90\x6a\x6d\xaa\x91\x0c\x6a\xfb\x11\x62\x23\x4a\x04\x06\x1b\xef\xe2\x31\x52\xca\x6d\xa8\xe0\xd1\x71\xe4\x66\xdc\xd6\xb3\x44\xd5\xfc\x44\x9f\xfe\x49\x7a\x4c\x78\x0c\x34\x42\x07\x8c\x33\x9a\xa5\xdc\x6c\x55\xf1\xdc\xa0\x91\x5b\xc6\xf5\xc4\x49\x98\x97\x04\x79\x2d\xc9\x1c\xf5\xb8\x9b\x92\x62\x9c\xe4\x3e\x70\x87\xa0\x42\x0e\x05\xe9\x6c\x16\x02\xdb\x26\xd0\xc7\xb6\xdb\xbc\x65\x0d\x75\xd8\x44\xf1\xd1\x94\xc6\x86\xc1\x8c\xf1\x0e\xc7\xea\xad\xe3\x8b\x43\x30\x16\x9d\x97\x51\xd1\x98\x50\x4e\x12\x62\x97\x4a\x0c\x22\x59\xa9\xf0\x42\x02\x38\x22\x37\x02\x9d\x71\x23\x29\x4d\x0c\x2c\xbd\x62\x24\x02\x88\x14\xe8\x23\x5c\x27\x08\x77\xd4\x16\xc0\xe4\x66\x8b\x1c\xc2\x35\x4a\x8c\x8f\x96\x5d\xfe\xb4\x3d\x3e\x1d\xdf\xea\x3f\xe5\xc3\xb3\xca\x12\x1c\xbe\x88\x72\x16\x80\x09\x51\x34\xad\x9e\x72\x84\x79\xb6\xc0\x0a\x1b\x31\xcd\xea\x69\x57\x3e\xd9\x1f\x79\x8b\x52\xeb\x39\x13\xd0\xb6\x6d\x03\xe5\x17\x11\xde\xb4\x08\xd6\xb0\x4f\xab\xcc\xba\x67\x3a\x27\x17\x1b\x3d\x13\x82\x0a\xe5\x21\xf8\x4c\x23\x03\x4c\x79\xf9\xe7\xff\x56\x96\x61\xf6\x2f\x7c\xc2\x40\xfd\xe4\xd4\x0f\x59\x45\x35\x35\x0d\x1c\xc6\x15\x12\x31\xd4\x5d\x9b\x54\xa0\x16\xb3\xb4\x42\x36\x9e\x45\x79\x1c\xce\x14\xde\x71\x68\x58\x1d\xe2\x60\x94\xda\x45\x26\xf0\xa0\x7d\xc9\x10\xfa\x3c\xbb\xa4\x83\xe4\x31\xc5\x52\x3c\xcc\x17\xb1\x8e\x19\x69\xb4\x64\x73\xc0\xa6\x86\xb7\x49\xcf\x31\x5b\xed\x2b\x0f\x39\x94\x48\x3e\xb8\xd4\xd0\x47\x36\x6a\x9e\x48\xfa\x08\x83\x8d\x21\xcb\xe1\x78\xc8\xca\xd9\x82\xb5\x9c\x40\x15\x9f\xe6\x42\x82\xeb\xd1\xd7\xdc\xf5\xc1\x90\x9c\xe7\xd5\x90\x18\x95\x00\x96\x93\x64\x5e\x7d\x5d\xc3\x49\xfc\xac\xdf\x16\x9c\x3a\x78\x37\x1b\xb4\xb4\x66\x3c\x8e\x19\x95\x0b\xd3\x2f\x12\x00\xf9\x4f\xc4\x4b\xa2\x7b\x49\xaa\x87\x0f\xca\x1a\x13\xb9\x2d\x06\x81\xc8\xec\xa2\x5f\x00\xe6\x5f\x67\xf6\x98\xfa\xe2\x79\xd4\x64\x85\x74\x81\x79\xb8\xdf\x8d\xca\x9f\x1e\x93\x81\x84\x3f\xbd\x15\xf6\x76\x62\x8e\x14\x74\x4c\xe5\x6f\x17\xf3\x13\x7b\xe0\x30\x32\x00\xe5\x43\x9c\x15\xef\x23\x89\xe8\x91\x42\xd3\x71\xe9\x93\xda\x2a\xf6\xe6\xb1\x0b\x87\x84\xd4\xa5\xbf\x44\x11\xc8\xda\x83\x00\xe5\xc3\x99\x0a\x0e\xd1\x9b\x20\xa0\x22\xdf\xe3\x6c\xa2\xe7\xae\xee\x71\x54\x76\x49\x57\x83\x2d\xe1\x31\x53\xe1\x6b\x17\x1e\xe9\xb2\xa6\x9b\xe5\x92\x8e\x5e\x77\xcf\x91\xbe\x02\x7f\xe9\xe6\x7b\x2e\xf3\x70\xdb\xbf\x92\xdd\xd5\x95\x4f\xab\x03\xbb\x72\xd2\xf6\xa1\x2b\x41\xde\x98\x0a\x43\x7f\x9c\xb2\xc1\xdb\x60\x46\x42\xdf\xb7\x02\x6c\x47\xaf\xac\xb5\xcb\xf5\x1d\xa2\x48\x75\xb3\x14\xe9\x1d\x2d\x9b\xba\x68\x58\x38\x83\x51\x4e\x4b\x18\x87\xd3\x81\x48\x1c\xa4\x5d\x76\x9c\xd0\xe0\x6e\xa7\xf8\x0b\x1b\x86\x19\x7b\x68\xa2\x6c\xe0\xc8\x26\x86\xe9\x0b\xe2\x06\x9d\xe9\x3e\x52\x6c\x04\x0a\x65\x33\x97\xac\xea\xa4\xd3\x45\xcb\x43\x47\x3e\x63\x95\x60\xc6\x66\x91\x10\xda\x80\x72\x21\x92\x8d\xed\xf6\x57\x40\x24\x7c\x87\xf6\x11\xe5\xe4\x8b\xcb\xc6\x31\x8f\x54\xc9\xa1\x04\x17\x36\x3a\x3c\x49\xf8\xbb\x5d\x2e\xf1\xce\x52\x30\x60\xd3\x26\xf0\x5c\x7e\x09\xb4\x4b\xb8\x9b\x3d\x82\xd0\x63\x3a\xb1\xcf\x31\x12\xfc\x33\xff\x84\x65\x0c\xcd\xf2\x0d\x2c\x8c\xd8\x39\x69\x20\x52\x16\xc6\x23\x17\x45\x24\xce\xab\xfe\xb2\x8c\xfb\xa6\x53\x4b\xd6\x97\xee\x35\x5f\xa4\x1e\xda\xa7\x74\x4e\x48\x99\x88\xd0\xe0\xcb\x98\xe8\x66\x07\xb8\x46\xc0\x4c\xc8\xc3\x3e\x89\x0e\xdd\x36\xd7\x28\x48\x6b\xfe\x4f\xc0\x4b\x33\x58\x87\x09\xb9\x23\x24\xd3\x7c\x5f\xac\xcb\xdf\x50\xbf\xbf\x1b\x9e\x92\x3f\x22\x29\xbf\x43\x1b\x82\x8d\x8a\xc3\x3c\x24\x18\xc4\xb9\xfe\x76\x87\x8f\x9f\x53\x7c\x5a\x4f\xd2\xc8\x6d\x24\x7d\x82\xfb\xb9\x3b\x49\x04\x96\x0d\x2e\xed\x98\xed\x4b\xdf\x89\xc5\x90\x54\xf5\xca\xa7\x7c\x19\xf6\xfc\x0d\xce\x48\x8e\x44\x8c\x17\xca\xdb\xf3\x21\xcf\x3e\x5f\x13\xdd\xa5\x21\xfc\xda\x0b\x51\x03\xff\xc3\x73\x61\x32\x87\x83\xa4\x24\x75\x3a\xa5\x18\xd9\x83\xbb\x51\xd1\x68\x70\x27\x05\xb0\xfc\x68\xd6\x87\x95\x4e\x4c\x0d\x5b\x1f\xe4\xa0\x7f\x97\x8f\x00\x13\x3d\xb3\x30\x07\xa7\xb4\xb1\x3f\x0a\x00\x0a\xf1\x91\xdd\x56\x5c\xbf\xa2\x6e\x42\x41\xb7\xbd\x21\xdf\x79\x61\xb7\xf2\x43\x75\x26\x38\xf3\x73\x20\xaf\x91\x7b\x4a\xcb\xe2\x08\xf9\x1d\x67\xf9\x1d\x62\x52\x0f\xee\xba\x82\x22\x92\xee\xe6\xd5\x87\xac\x84\x16\xaa\x4b\x74\xe0\x9e\x7f\x7e\x80\x77\x6e\x26\x0a\x07\x65\x62\x82\x3e\x28\x00\xa1\x07\xa6\x9d\xec\x52\x60\xac\x3f\x37\x83\x59\x9f\xf7\x9a\x18\xb3\x49\x71\xc7\x97\xa3\x3b\x54\x1e\x27\x71\x24\x20\x21\x4d\xd6\x21\xa3\xa6\xcb\x79\x1b\xf2\x53\x78\x7b\x69\x84\x70\x04\x17\xc4\xf9\xfb\x0d\x4f\xee\x2b\x8f\x61\xfe\x03\xda\xc8\x4c\xb0\x63\xf6\xd4\x3f\xb7\x5e\x00\x14\x72\xde\xdf\xa5\x1c\xcf\xf8\xb3\x6b\xa2\x8f\x60\x91\x04\xf7\xdc\x83\xa6\xfa\xc0\x50\x9c\x69\x34\xc7\x58\x3e\xb1\x8e\x9f\xfc\x02\xb5\xde\x7a\xad\xdc\xc7\x2b\xa4\xf3\x88\xf0\xb0\xfe\x67\x5c\x76\xa5\x7a\xff\x8c\xe6\xe3\x01\x88\x8d\x7a\x50\x3f\xe2\xcb\x8d\x82\x32\xce\x4f\x80\x58\xe7\xa7\x13\xd9\x64\x0a\xbd\x47\x75\xc1\x98\xad\xc0\x11\x80\x85\x5d\x33\x9f\xcd\xb5\xcd\xa1\x94\xc0\xfe\xc1\x3d\x16\xf6\xe1\x7a\x08\x7f\xdc\xac\x36\xae\x0b\x4e\xa8\x3c\xd9\x84\xb9\x88\x2d\x03\xe2\x74\xc4\x00\xcf\x5d\x27\x44\xe5\x23\x07\x74\x6c\xf1\xfc\xe0\x93\x0a\xd0\x71\xe2\x27\x61\x3e\x7f\xed\x74\xf5\x3f\xd3\x71\xe8\x19\xe2\x45\x40\x09\x01\xc9\x0e\x45\x23\x75\x65\x2b\xea\xf0\x40\x61\x79\x08\xc6\xbf\x3b\xca\x87\x0d\x9c\x63\x85\x0a\x49\x00\xc8\x37\x06\xf7\x15\xd4\x12\x7d\xaf\x87\x07\x9f\xbe\x35\x52\x34\xca\x81\x74\xab\x9d\xd6\x0c\xe4\x00\x20\x3a\x8e\x20\x05\x61\xd9\x83\x6c\xa8\x03\xdc\x7d\x62\x17\x6f\x38\x86\xf4\xb4\x12\xbc\x99\x5e\x79\x44\x5f\xf0\x08\x56\x3b\xaa\x54\x45\x56\x01\x09\x19\x32\x1f\x4f\x40\xc7\xce\x9a\x27\xdb\x57\x3a\xd9\x4b\xe4\x67\x94\x0b\x76\xd9\xb4\x71\x68\x8a\x15\xe7\x8a\x15\xed\x19\xe1\x37\xcf\x23\x39\x4a\x85\x45\xe7\x2f\x96\x19\x16\x0f\x59\x83\xa8\x00\xcb\x60\x5f\xd1\x03\xe8\x89\xed\x74\x3d\xd6\x6b\xcf\x21\x00\xdf\xf3\xd6\xc9\x48\x68\xcf\xba\x54\x76\xce\xc3\x0e\xb3\x20\x03\x59\x64\x8e\x1f\x47\xa3\x12\x4b\xa9\x22\x47\x54\x72\x05\xb6\xca\x2d\x03\x8e\x7e\xf8\x93\x2d\x61\x65\x3b\x77\xb3\xad\x8b\x4c\xee\x4f\xd5\xcd\x5b\x09\x34\x06\x18\x3e\x29\x23\x21\x82\xc9\xf7\x97\xa4\xa0\x8d\x73\xde\x16\xa6\x7c\x5b\x26\xc0\xb7\xf5\x98\xba\x88\xad\x20\x8d\x3d\xc7\x48\xc9\xc6\x87\xad\x3b\x62\x8f\x6d\xfd\x12\x70\xd6\xd6\xcf\xb1\x8e\x80\x2e\x22\x6c\x06\xfb\x60\x3e\xb0\xaf\x7a\x49\xec\x20\x6f\xc1\x24\xff\x71\xf7\xe4\x36\x85\x4a\x29\x0b\x9f\xaa\x0d\x54\x61\xd9\x29\x02\x28\xe7\x6d\xbb\x98\x70\x8e\x9f\xd0\xdd\xe1\x8f\x16\x8e\x49\x55\xcc\x96\x26\x49\x7c\xd2\x70\x36\x64\x85\x64\x93\xb0\xbb\xc2\xc8\x21\x88\x8f\xae\xe6\xd5\xa7\x09\x51\x3e\xea\x21\x03\x1c\x9c\x2a\x2c\xd4\x57\x04\xab\x8a\x40\xba\x16\xd5\xf1\xa0\x94\xa8\x66\x59\x21\x54\x23\xf4\xf1\xca\x6c\xb9\xae\x4e\x7a\xd7\x3a\xb1\x46\x63\x77\xf5\xf4\x5a\x55\xa4\x6b\x7a\x87\x85\xc3\x5b\x86\xf9\x38\xf5\xe9\xf9\x40\x66\xd5\x9a\x84\xf1\x2d\x75\x3d\x88\x90\x34\x5e\x99\x0d\x19\xae\x5c\xbb\x72\x6b\x40\x41\xdc\x62\x1f\x58\xa0\x89\x7f\x72\x3b\x2c\xa1\xb2\x5c\x30\x71\xdf\xbc\x95\x4c\x75\x7b\x41\x2b\x20\xf4\x9a\x7b\xe9\xb4\xca\x2e\x6b\xb9\xda\x97\xa2\x3f\x7c\x04\xf5\x23\x11\x94\x53\xb0\xca\xec\x22\xd2\x36\x64\x22\x32\xf8\x18\x89\x5e\x12\x9e\x88\x4c\x22\xc9\x15\x9c\x91\xdc\x52\xd1\x04\xe9\x2d\x42\xd4\x17\x24\xbc\x11\xd0\x49\x41\x19\x58\x68\x60\xfd\x67\x9b\x81\xe2\xfb\xc4\x3f\x77\xe1\xf9\x10\x3b\x41\x76\x9e\xbb\x2d\x12\xdc\x06\x6e\xe2\xc7\x93\x7d\xfb\xf9\x29\x90\xa0\x48\x24\x37\x45\x1e\xf7\x84\x84\x0a\x62\x1e\x87\xe8\x18\x65\x09\xe2\xc0\xc4\xc0\x96\xa9\xf4\x55\xae\x8b\x58\x57\xfc\x9e\x99\xea\x52\xf8\xeb\x3b\x47\x64\x56\xf6\xec\xa4\xe0\x3e\x9b\x09\x1d\x82\xc4\xfa\xe5\x07\x51\x4e\x7a\xf9\x4d\x04\xe9\x5a\x5a\x19\x7f\xad\xbc\x21\xf2\x2c\x11\x11\x59\x45\x39\x04\x94\x12\x28\x09\x69\xa7\x53\x23\xf2\xda\x0a\x1d\x2e\x37\x36\xd1\xa0\xa2\x66\x94\xa3\xfd\x41\xcc\x60\x0a\xf0\x11\x83\xf4\xc3\x4c\x66\x21\x25\x37\xd1\x1a\xdb\x83\xea\x3e\x8f\x4d\x9b\x29\x4d\x76\xaa\x83\x98\x24\x08\x90\x72\xbc\x02\x6b\x5c\x2f\x32\x39\xf4\x35\xbb\x1f\x67\x39\x74\x3b\x54\x12\x71\x6c\xae\x83\xa4\x9b\x63\x15\x9a\x84\xeb\xc7\x6a\x10\x6e\x56\x88\xa1\x34\xef\xdd\xb1\x9d\x5f\x23\x75\xe6\x1c\xa9\x3f\x8b\x91\x2a\x95\x91\x44\xfe\xd1\xc5\x98\xfe\x46\x5d\xd1\x22\x8a\x89\x42\x83\x46\x78\x1d\xf9\xc3\x84\x19\x8c\xe9\xa5\xa3\xcd\xad\xc2\x5b\xee\x40\x75\x30\xe7\xf8\x86\x1f\xb6\xfe\x95\x9f\xf9\xa9\x28\x2c\x87\x58\x78\x43\x56\x26\xf9\x13\x32\x1f\xd9\xe7\xaf\x0a\xee\x79\x3a\x04\x1d\x1b\xb1\xa7\x00\x25\x17\xd3\xf8\x94\xfe\x54\xfe\x01\x8e\x61\xdc\x3f\x23\x1d\x3a\x18\x26\xb2\xff\x47\x33\xac\x38\xb7\xa5\xd9\x95\x3c\x73\x8b\xd0\x6e\xee\x02\x4f\x2f\x72\x1e\x61\x10\x39\x3b\x82\x22\xc5\x2a\xa6\x9d\x95\x34\xff\x2c\x52\xb9\xfb\x60\xc4\x97\xe4\xad\x7a\xed\x73\xb2\xd5\x00\xab\xf8\x21\x3b\xe1\x14\x85\xb8\x85\x2f\x34\x01\x10\xbf\x2b\x87\xec\x13\x62\x97\x41\x31\xa3\x2e\x78\x79\x08\x15\xe1\x9d\x3b\x93\xdd\x80\x8e\x3b\x98\x62\xef\x82\xd9\xae\xfe\x07\xb2\x30\x74\x5e\x0d\x39\xeb\x46\xe1\x42\x43\x72\xdb\xec\x21\x55\xf4\xcc\x09\x84\x0c\x34\x2c\x19\x92\x9c\xe4\x06\x57\xb3\xea\xfd\x5b\x11\xd9\xd1\xe9\xe2\x0c\x90\xbb\xaf\x6b\x40\x23\x55\x19\x9b\x80\x4b\x42\x3c\x53\xae\x8b\x26\x8f\x67\xf8\xca\x23\xd3\x88\x62\x9b\x33\x49\x1b\xf3\x09\xcc\x64\x97\x7f\x46\x59\x0f\xb1\x73\x01\xef\x50\x1e\x33\xfb\xe7\xec\x7d\x1f\x1e\xe9\xd9\xf9\x07\x86\x20\x86\x97\x8f\xb0\x69\x48\x3a\x99\x59\xec\x1a\xcf\xcb\x0e\xaa\x4a\x3e\x68\xfa\xde\x5b\xe5\x2d\xbf\x0b\xaf\x88\x4e\x72\x4d\x02\x06\xed\x93\x6d\xef\xbd\x80\xb4\x55\xe5\x67\x87\xb1\xca\xdf\x02\x89\xdb\x42\x6d\x12\x26\x78\x09\x64\xee\x48\x9a\x5b\xd7\x66\x40\x39\x06\x00\x63\x97\x88\x2f\x94\x48\x07\xd8\xb9\x6f\x80\x50\x26\x78\x26\x67\xb0\x4c\x58\x90\x75\x75\xf8\xb6\xf2\xbd\x62\x26\x93\xc9\x7f\xa8\x38\xce\x10\x61\x01\x4b\x19\x9b\x2c\x0f\x39\x73\xdf\x96\x2f\xc7\xff\x02\xae\xf3\xac\x9b\xe6\x4d\x82\x9c\x53\x3e\x7f\x6e\x4b\xd9\xd8\x2c\x41\xc4\x74\x1f\x8d\xdf\x59\xec\x84\xbd\x30\xac\x8c\x08\xf7\xae\x7c\x86\xcb\x0a\x46\x1f\x12\x3d\xeb\x90\x97\x4e\x41\x8c\xf0\x09\xbb\x7a\x9b\x94\xe6\x1c\xf4\xde\x6d\xdb\xf7\x4c\x64\x81\x76\x21\x4a\x77\xab\xca\x48\x14\x08\xca\xac\x2d\x8e\x65\xb8\xfc\x44\xe9\xfe\xc2\x5d\x72\x86\xe9\xb0\x0f\x7a\xb5\x08\x7d\x44\xe9\x51\xfe\x39\x4b\x9e\x12\xe9\x3b\xb3\x97\xb8\xcd\xfe\x0d\x00\x66\x8d\xd0\xf9\x2e\xcf\xa7\x32\xeb\xf0\x22\xfe\x72\xb9\xa3\x54\xc1\x29\x32\x22\xcb\xd8\x45\xe4\xaf\x61\xd3\x57\x43\x62\xb2\xe2\xc6\xcf\x3e\x8b\x1a\x43\x03\x58\x17\xe3\x28\xdc\x2e\xd2\xee\xe8\x0b\x6a\x05\x12\x71\x8f\x8b\x8b\x7a\xba\x84\x18\xa6\xf3\xcb\x48\x1a\xe0\x04\x44\x90\x12\xf5\x7d\x76\x82\xd1\x91\x83\xab\x5f\x3b\xcb\x6c\x66\x1b\x32\xb1\x18\x7d\xc3\xc9\x63\x33\x42\xfd\x85\x5c\x14\x17\xcd\x27\x82\x8a\x56\x11\x2a\x8a\xfc\x9d\x67\x16\x40\xdf\x2c\x43\x3c\x8f\x24\xf0\x23\x5e\x81\x7e\x95\x5f\x60\x33\xcc\xf1\xf9\x00\xd6\x41\x65\xf9\x89\x6c\xec\xf7\x47\x2c\x96\xb2\xca\x65\xf3\x6d\x3b\x75\x3b\x0b\xe9\x9b\xff\x76\x85\x94\xc3\x22\x91\x09\x46\xc2\xca\x3e\x22\x77\xcb\x87\xdb\x95\xe4\x74\xb9\x86\x4d\x96\xa7\xe8\x69\x3f\x00\x9a\x4b\x68\x74\x86\xd8\x17\xc1\x0b\xb3\xb5\xe1\xde\x73\x38\x7f\x4c\x45\x19\x8f\xb8\x52\xe9\x27\xf8\x56\xb3\xdf\x3e\xab\xf4\xb1\xef\x9f\x1c\xd6\x87\x7e\x24\x65\xe3\x0d\xe1\xd3\x4e\xd3\x8e\x53\x1e\x45\xef\x90\x2c\x02\x9c\x69\xb1\x7b\xbd\x80\x9b\xf8\x40\x29\x82\xd2\xdf\x54\xc6\x3e\x90\xc8\x37\x67\x5f\xc3\x5e\x50\x6b\xfb\x2d\x85\xce\xa6\x2f\xf0\xd1\x19\xb7\x50\x40\x74\x99\xf4\x36\x5e\xf3\xc9\x1d\x95\x95\x8c\xdc\x4b\xc0\x49\xd1\xef\xc9\x36\xff\xfd\x8f\xf1\x56\x2f\x9c\x3b\x95\x18\xcf\xe9\x51\x96\x04\x7e\x4e\xae\x3e\x2f\x30\xa2\x1a\xa1\x06\xea\x8e\xfc\x08\xce\xcd\xc6\x04\x2a\xc8\x20\x7c\x8f\x4e\x9a\x0d\x8b\x06\x20\x3f\xfe\xa9\xea\xa4\x65\xf1\xb6\x33\x88\x90\x8d\x7e\xda\xfb\xdd\x2c\x8c\x6d\x98\xb9\xd6\xdf\x55\xcd\x8e\xbb\x9e\x61\xb9\x76\x53\x9a\x78\xc3\x38\xd1\xd9\x06\xf7\x29\xa4\x0a\x7a\x22\xf7\x88\xe7\x98\x95\x0f\xe0\x24\xc5\x3b\xd8\xa8\xa7\xc4\x28\x51\xab\x72\x97\x43\xea\x20\xdb\xf7\xfc\x51\x60\x84\x42\x9e\xeb\xc2\x2d\x4e\xcf\x54\xb6\x4a\xaa\xcd\x94\x28\xf3\xbc\x7f\x89\x00\xb7\xb7\xd4\x05\x26\x65\x89\xf3\x6e\xab\xd6\xf9\x2e\x5a\x5c\xe4\xcd\x7e\xca\xf5\xed\xbf\xf9\x08\xef\xf4\xeb\xa3\xfe\x97\x95\x42\x89\x16\x9c\x35\x92\xf1\xea\x78\x56\xde\x32\x86\x93\xe1\x55\x95\x80\x7e\x7f\xe4\x67\x79\x0b\x7c\xb3\x19\xf2\x58\x44\xe1\x51\x66\xc4\x29\x05\x94\xe3\x54\x1c\x14\x24\xe1\xf0\xe1\x56\x49\xfe\x63\x21\xfa\x27\x93\x2a\xcc\xc4\x9b\x6c\x54\xf2\x3f\x9a\x27\x14\x02\x92\x36\xfd\xcc\x51\x26\x86\x4c\xdc\x21\x1b\x3a\xea\xa9\xfc\xc0\x43\xc8\xd3\x51\x22\xa4\xc8\x26\x22\x1a\xb6\x1c\x69\x0e\xc1\xfa\x25\x03\x98\x6b\x9b\x09\x63\xd9\x0f\x4c\xe7\x9f\xcb\x4a\x33\xa6\x47\xac\xa9\x05\x58\xd8\xa6\xe6\x0e\x53\x53\x5b\x35\x75\xf3\x90\x99\x27\xe3\x7f\x74\x2a\x87\x3c\xa3\x62\x8a\xe8\x2e\xfc\x85\x86\x94\x7f\x2c\x20\x5f\xf4\x34\x39\x68\x73\x73\xe5\x48\xbd\xe8\x4b\xc4\x1f\xce\x0a\x49\x43\x6b\x9a\x1d\x64\x4a\xa1\x7e\x4a\xa8\xca\x63\x0a\xc1\xf4\x15\x51\xba\x17\xfa\x67\x7c\xe1\x74\xb7\x88\xc4\x7d\x28\x1c\x58\x3d\xc0\x99\xf8\x54\x3c\x45\x7d\xe6\xc0\x94\xfe\x4d\x35\xb4\x8b\xda\xbf\x42\x0c\xd4\x12\x3f\x9b\x2a\xe0\x44\x82\x02\x0b\x52\x0c\xb3\x5c\x22\x6c\xbc\xc8\x19\xc5\xdf\x80\x34\xdf\xe0\xfc\xc8\x68\x14\xf9\xef\x1c\x78\xd2\xab\x4d\x13\x11\x94\x48\x32\x10\xa2\x45\x02\x64\x0a\xcb\x90\xd3\x85\x5d\x3e\xe8\x6d\xa7\x83\xcf\xf8\x3f\x2f\x29\x1c\xba\x69\x5b\x0b\xf8\x86\x9e\x2c\x22\x0e\x65\x7f\x79\x20\x3c\x81\xa5\x33\xbb\x31\xf7\x83\x04\x1f\x4a\xa2\x50\x1b\x38\x5c\x84\x7a\x8d\x32\xa3\x08\xf9\x39\xbc\x4d\xef\xaf\x35\x03\x63\x72\x6a\x8c\xed\xcb\xdd\xd6\x24\x33\x56\x05\x47\xf8\x2a\x3f\xc4\xca\x59\x84\x4c\x2d\x61\x23\x3a\x2a\x6b\x12\xe0\x14\x71\x22\xd8\xa4\x50\xbc\x54\x57\xee\xcf\x0e\xcc\x7b\xa9\xa5\x2e\x1c\x29\xf2\xe9\xed\x8a\x43\x1f\x43\xf4\x9c\x48\x7c\x29\xaf\x99\xab\x49\x18\x87\x0e\xf1\xc7\x2e\x86\x63\xc1\x14\xff\x70\xbe\x8e\xec\x54\x4e\x31\xb2\x0b\xc4\x90\x38\xee\xe5\xc9\x6d\x76\x59\x5f\x3c\x53\x4a\xb4\xf7\x8b\x55\xd0\x4f\xdf\x98\x17\x65\xa7\xee\x15\x7b\x25\x16\x84\x7b\x79\x91\xf1\xf3\x7c\x36\xbe\x82\x0c\x49\xac\x42\x13\xd1\xa7\x61\x51\x79\x23\x41\x6f\xc7\xf4\x4f\xf4\x0a\xfd\x19\x3f\x12\x45\xaa\x57\x5b\xe4\xa0\x7f\x6a\xb0\xc0\x66\xdf\x8e\xfa\xed\x1b\x9a\xcd\xaa\x6d\x30\x26\x88\x63\xdb\xe1\x2c\xf3\x6a\xc6\x2d\xd2\x6e\x90\xe3\x4e\xda\x9e\x42\x0b\x89\x4d\x1c\xe6\x4d\x45\xc8\x36\xef\xa4\xd5\xd8\x29\x24\xf6\xee\x36\x07\x36\xee\x6a\xa7\xc8\xd5\xcf\x6f\xd1\xba\xb7\x1c\x1c\x43\xfb\x4d\x84\x07\xd9\x4e\xc8\x31\x9f\x3c\x46\xde\xe2\x10\xe0\xd5\x15\xcb\xf4\xd6\xdb\xe3\x45\xe2\x80\xfe\xc9\x0b\x69\x64\x2f\x56\x67\xb3\xf9\x1f\x63\x15\x36\x8e\x61\xdf\x92\x39\x9c\xc6\x12\x8f\x69\x66\x08\x9b\x6d\xe6\xef\x8f\xa0\xb3\xfe\x94\xb0\xaa\x4f\xef\xf8\x7f\xa0\x2a\x63\x90\x3e\x74\x8b\x45\x76\x87\x7c\x4d\xef\x40\xc7\x4e\x32\x78\x08\x8a\x71\xd0\x50\xaf\x73\x91\x0e\xea\x47\xb2\x4a\xfb\x93\xff\x30\x2d\x55\x50\xd8\x10\x30\x60\x02\x57\x9d\xe2\xa6\xaa\x26\xe2\x6a\x3b\x23\xde\xf0\xe4\x87\x59\xab\x2a\x10\x20\x5c\xbc\xa5\xdc\x13\x6d\x2c\xd7\x42\xd1\xf1\x5a\xa4\xa6\x4a\x03\xda\x49\x0f\xa7\x0e\x09\x6d\x5e\xe1\x09\x11\xb2\x29\x6f\x49\xfa\x28\x2d\x85\x6c\xa1\xb7\xc8\x88\xc0\xc8\xe8\x19\xe3\x26\xa9\x04\xcb\x3c\x13\x74\xcd\xba\x96\xd7\xf4\x9e\xf8\x23\xac\x0a\x10\xb0\xa3\xc8\x45\x69\x8f\xb0\xc7\x5d\x46\xa9\xb2\x88\x49\x4c\x2b\x5d\xce\xd2\xf9\x61\x0e\x01\x55\x98\x7c\x2d\x67\xfe\xeb\x29\xae\x0a\x72\xf2\x52\xe4\x72\x5a\x82\xf6\x03\xa7\x0f\xff\xcf\x4b\xc9\xa1\xb5\xea\xaf\x39\x08\x46\xfa\x42\x17\x98\xf0\xfa\xe3\xd7\x72\x3c\x03\x18\x73\x18\x2b\x22\xeb\x3c\xc0\xd4\x9f\x9f\xcf\xce\x9f\x9e\xb7\x63\x09\xcc\x2b\x7c\xe4\x96\xc4\x50\x9a\x25\x55\x02\x5d\x2d\x43\x32\xd1\x2a\xc6\xf7\xd0\x9a\xd7\xff\x94\x3a\xf4\x97\x3e\x7d\xf9\xcf\x07\x9a\x71\xff\x12\x7b\x6c\xf3\x2b\x5a\x34\xd3\xe7\xf0\x50\x58\x52\x85\xa8\x1f\x45\x5f\x48\xfe\xa4\x55\x50\x56\xed\x9a\x44\xea\x6a\x5a\x29\x49\xa5\xd6\xef\x5b\xe6\x99\x36\xde\x82\x24\xf4\x6b\x38\xc1\x19\x86\xce\xa0\x93\xd8\xa3\x53\xe1\x53\x67\x4f\x64\x15\x04\x16\x41\x92\xa8\x83\x7e\x42\xab\x71\x8a\x66\xc9\x94\xc3\xe8\x15\x09\x1e\x04\xdd\x6c\x96\x3c\x3f\x34\x69\x5d\xd7\x1a\xc8\x97\x4c\x64\x22\x06\x1f\xdc\xa2\xb0\x60\xfd\x5c\xeb\x4c\xbf\xd5\x4f\x43\xba\x2e\xa6\xdc\xc2\x03\x29\x76\xf9\xb6\xe1\xf3\xd9\x7c\xb2\x12\x86\xe8\x17\x4b\x47\xa6\x91\x38\x32\xe4\x7b\x51\x99\xa1\x56\xd9\x22\x5b\xe1\x31\x9c\xc5\x9d\x53\x6b\xb8\xf4\x8c\xbe\x66\xf1\x39\x67\xfa\x0c\xf2\x97\x49\x5b\xb8\xba\x9c\xd4\x2c\x8e\xa5\x1e\xde\xec\x80\x5f\x6e\xd8\xbb\x14\x2c\xc6\x5e\x7b\x62\xf8\x0f\x28\x46\xa3\xd5\x22\x6c\x4b\xc9\x4d\xe3\x4f\xef\xe9\x46\xad\x34\x8b\x99\x51\x67\x05\x67\x28\xea\x20\xdb\xfe\x5f\x11\x5c\xf9\x1d\x84\x6a\xcd\x90\x48\x38\x41\x49\x92\xc5\x67\x55\x80\x1a\x5d\x5d\xea\xc2\x64\xa0\x67\x1d\x85\x67\x55\xd9\xc0\x3e\xdb\xea\x84\x5c\x13\xd5\xaf\x4f\xd8\x7d\xf9\x72\xfa\x14\x0c\xf2\x0c\x7f\x1d\x24\x6d\x79\xf2\xa5\xbd\x47\x02\x69\xfc\xd5\x9e\x84\x9d\x84\x8b\xa5\x33\xe2\x27\xed\xed\x93\xaa\x94\xb4\x13\x78\x26\x36\xfc\x32\x75\xe3\x19\x3a\xf8\x55\x43\x9a\xc0\x3a\x85\x1f\xcf\xf6\xce\xc3\x27\x06\x8e\x7d\x2d\x01\x6c\x8d\xde\xe9\x63\xc8\x0d\x99\x39\x1f\xd9\x46\x0d\x55\xf4\x1b\x12\x3d\x87\x3a\xf1\x2f\x99\xa6\x0e\x9e\xce\x79\x71\x24\x16\xd1\x89\x94\x2e\xde\xc9\x88\xc9\x52\xf4\x1b\x0f\x92\x54\x80\xfb\xa1\x81\x5f\xae\xe4\x06\x0c\x40\x9e\x31\x3d\x3e\xa8\xb2\x3a\x77\x49\x75\xad\x8e\x2c\x44\xbe\x3c\xfc\x5c\xd4\x21\xa9\xc7\x86\xc8\xae\x1f\xf2\x73\x8e\x4b\x6d\xc8\xee\x68\x87\x2d\xac\xfb\xd5\xca\x55\x60\x48\x84\x39\x06\x8f\x8d\x18\x59\xf4\x0f\xc9\xaa\x63\xfa\xca\xd8\xe2\x27\x2b\x5a\xe7\x26\xa0\x9b\x21\x25\x54\x55\x47\xad\xaf\x6c\x82\xf7\x41\xa2\x46\x9f\xa6\x17\x6c\x76\x88\x6d\xe4\x67\x49\x1f\xfa\xf7\x5e\xf5\x47\x2f\x9a\x45\x5f\xce\xaf\x7c\x5c\x8a\xdc\xcb\xfb\x12\x0a\xaf\x35\x64\x5d\x2b\xdb\xce\xd4\x6b\x05\x27\x0f\x7e\x6f\xac\xe8\xfb\xc2\x96\x4c\x5f\x84\x8d\xb7\xd8\x4c\x8c\xe7\xbd\x0f\x3a\xd4\x0c\xd2\x15\x51\x34\x45\x69\xfa\x78\xbe\x64\x3d\x83\x1f\x60\x23\xef\x08\xce\x06\xfe\x9a\x4c\x20\xf7\x21\x32\xe3\x15\xca\x02\x86\x9b\x0f\x9e\x69\xc5\x95\x68\x4b\x24\x7b\xa0\x7d\xf8\x31\xf4\x49\x4e\x33\x67\x88\xbc\xfa\xb4\xe9\x96\x4e\x6e\x34\x90\x63\x10\xb6\x3b\x7e\x16\x35\x81\xcf\xed\x7a\xab\x09\x0a\x5f\x9a\x21\xf6\xb6\x07\xe2\x11\x06\x28\x0d\x75\x6b\x5e\xb4\xc4\x8f\x6a\x03\xb5\x29\x02\xf3\x41\x38\xcf\xd8\xc5\x2b\x20\x09\x6e\xb4\xe2\x32\x21\xfa\x9c\x19\x2d\xf5\xa2\xa9\x0d\x12\xec\x1b\x5c\x16\xd1\x94\xb6\xe1\xa7\x60\x6c\xbe\x3a\xdd\x05\x02\xc9\x3f\x78\x1b\x31\x38\x47\xdb\x74\xbd\xda\xa4\xfd\x0d\x57\x97\x53\xeb\x15\x3c\x52\xde\xed\xb6\x4d\xea\x7f\xf7\xb0\x41\x39\x4d\xc8\xed\x13\x34\xa8\x77\xa4\x6e\x96\x08\x7e\xef\x3c\xbc\x97\x0f\xdb\x29\xc5\x83\xcd\xad\xdb\xe4\x72\x9c\x64\x19\x53\xb2\x88\x50\x9f\x4b\x1a\xc1\x0e\x29\x5d\x29\x94\x69\xc9\xff\xf1\x3c\x0c\xa4\x48\x89\x09\x96\x95\xc8\xd2\xd9\x36\x37\x32\x60\xee\xd8\x30\xe2\x52\x0c\xe2\xd4\xf8\x0c\x87\x5b\xb0\x53\xf8\x48\x71\x58\x5c\x17\x49\x63\x9a\x5f\xe1\xee\x90\x1a\x77\xe5\x9d\x7b\xc7\xf9\x60\x1c\x74\x67\xa4\x12\xa1\xe1\xc0\xb9\xd4\xae\xe0\xab\x31\x28\x12\x3b\x47\x60\x01\x12\x5b\xb3\x3d\x7e\x38\xcd\x9c\xcc\xb3\x4a\xb6\x98\x90\xda\x84\xeb\xd9\x6f\x01\x0e\xed\x13\x74\x92\xfd\xba\x5b\x8b\xfa\x0b\xac\xae\x2b\xb7\x43\xc8\x45\x12\x79\xad\x69\x46\x91\xed\x68\x9c\xb7\x75\x82\xdc\x34\x70\x8e\xb0\x2b\x98\x90\x1e\x4c\x78\xf1\xf8\xaf\xcd\x3f\x59\x84\xa6\xbc\x75\x14\xa0\xf5\x6e\x8f\xfb\x20\xde\x46\xc0\xed\x2d\x74\xe7\x7d\x32\xea\xf2\x5d\x34\xf3\x03\x79\xbb\x62\xb8\x6a\x9b\xf1\x6e\x85\xa8\x4d\xe7\xbd\x60\x36\x50\x09\x9b\xd9\x8d\x2a\x06\x8a\xc9\x6e\x62\x6e\x8b\xd2\x41\x67\xf1\xc9\xdf\x73\x00\xa1\x74\xc2\x51\x34\x4d\x8e\x08\x6c\x3b\x0f\x6f\xa2\x67\xca\xe9\x7b\xd3\xf9\x3b\x0c\x14\xde\x79\x0e\x98\xec\xa7\xd1\x7a\x27\x05\xab\x02\x15\x5b\x35\x3a\x89\x7e\x8d\xb4\xbc\x69\x06\x6d\xaf\xb9\x87\x4b\xad\x10\x30\x84\x13\xfa\x60\xef\x67\xc9\xca\xbe\xaa\xab\x5f\x2e\x35\xf3\xc1\xfe\x65\x06\xac\x8d\x5e\xc2\x5e\x51\xf7\x83\xb0\xa4\x9b\xff\x0f\x07\x0a\xab\xda\x2b\x8b\x31\xa5\x3f\x84\x9c\xd1\x3c\xa1\x7e\x58\x01\xd6\xed\x98\x9a\xa9\x2e\x51\x34\x9e\x84\x77\xbd\x83\x28\x75\x9e\x6d\x67\xed\x52\x34\xdd\x3f\x36\x05\x5a\xa5\xe5\x04\x9c\x6b\x98\x80\x71\x6b\x21\x63\x9c\xcd\xe3\xc7\x99\xe2\xb8\xe9\x5d\xee\x58\xe8\x6e\xe4\xaf\x11\xfa\x9a\xc0\x2f\xa3\x6d\xef\xbf\xc2\x43\x3b\xd3\xc5\x8c\x42\x2b\xdc\x31\x37\xfb\xec\x03\xb9\xaa\xb7\x5d\x08\xb4\xda\xd9\x9b\xd4\xfe\xf2\x4c\x43\x1f\x0c\x81\x1c\xd9\x67\x0a\x91\x59\xb3\x90\xbd\xc5\xcd\xc4\x44\x43\x92\xa1\xbb\xf7\x4a\x84\x65\x5f\xe9\xbb\x19\xf3\x84\x5f\x2a\x86\x13\xed\x77\x4a\x55\x68\x4e\x21\x9a\x76\xbb\xe2\x21\x72\x56\x61\xc4\x14\x34\xac\x10\xab\x85\x4a\xeb\x46\xc1\xbd\x5f\x91\x35\x78\x35\xbf\x20\xdf\xa3\xac\xba\x22\xaf\x43\x7a\x29\x48\xd7\x48\xa1\x42\x76\xab\x0a\xd8\x2b\x92\x81\xe0\xe4\xeb\xff\x67\xaf\xac\x4b\x3d\x46\x27\xb6\xa9\xd7\x16\xd9\x3e\x9b\x26\xb6\x0b\x3d\x3d\x25\xbf\x2e\x32\x13\x90\xd6\x0d\x23\xf8\x8f\xfa\x4d\x00\x7b\x66\xb6\x3a\xae\x59\xaf\x61\x2f\xa1\x4c\xd9\xa9\x28\xd1\x66\x6a\x69\x4e\xc8\xd3\x11\xe5\x6b\xee\x7e\x84\x30\xbd\x5d\x5b\x97\x76\xb6\xbf\x6d\x18\xaa\xc1\x0b\x5b\xeb\x2c\x45\xdc\x99\x95\xab\x13\x42\xb7\x42\xe2\xe6\x25\x26\xb4\x3b\xfe\xde\x77\x8c\xae\x9f\xb9\xef\x89\xfe\xc6\x3e\xd2\xb1\x8f\x71\x3b\x91\xb6\x43\x5b\x85\x0b\x09\x93\xf3\xf0\x4f\xfa\x6b\x73\x21\x73\x3e\x18\x7f\x96\x16\x10\xab\xc7\xc0\xb0\x41\xa3\x68\x95\xd3\x90\x88\x03\xc9\xd2\xdd\xa7\xaf\x6b\xe4\xb1\x66\x48\xf4\xca\xec\xf9\x21\x58\x56\x8c\x99\x2b\xac\x89\xe5\x67\x72\xa1\xfb\x1a\xa1\x3e\x2b\xc0\x0b\x7f\x1f\x09\x22\x11\x59\x12\x55\x72\xc6\x94\xa4\x46\x49\x9e\x13\xb1\xea\x20\xc7\x4d\x57\xe0\x11\xcc\xab\x21\x4f\x75\x0a\x59\x33\x43\x38\xcb\xd6\xac\x87\xff\x11\xb9\xc3\xc5\x5a\xd0\x5e\x8e\x57\xf8\x05\x91\x9c\x20\x6a\x0f\x44\xee\x86\x51\x2f\x6b\xd5\x59\x25\x68\xf7\x11\xb5\xf5\xcc\xff\x9d\xf4\xe7\x38\xdd\xd7\x84\xa3\x4d\xb4\x34\xf0\xf6\x49\x59\xb3\x79\x34\xb4\x7c\x73\x53\xf5\x91\x3d\x62\xab\x94\x9f\x65\xe8\x36\xee\x97\x6d\x12\x9d\x39\xe9\x7f\xe9\xdf\x02\x71\x10\xb3\xe9\x74\x23\xb7\x36\x5e\x78\x20\x66\x38\xfa\xad\xe8\x29\x50\x96\xf5\xbf\x8a\x5c\x3c\x87\xe3\xaa\x13\xf4\x0f\xd0\x04\xe3\x12\x3d\x6e\xda\x10\xc9\xe8\xf1\xc0\x3e\xe6\x14\x72\xe8\xe6\xc9\x82\x15\x89\x0e\x3b\x84\xe8\xb7\xc0\x4b\x49\x81\x44\x51\xd1\x1c\x10\xa8\x15\xc9\x02\x91\xbb\xbc\x47\x4c\x2e\x8d\x68\x0f\x6f\xfe\x48\x5a\x08\x9b\x1d\x09\xf8\xa6\x88\xdc\xc7\x22\x57\x7d\xf7\x7c\x48\x2e\x6c\x5f\x60\xef\x07\xa9\x6d\x8e\xac\x14\xdd\xa4\xab\xed\xb0\xef\x67\x25\xfe\x98\x5e\x32\x57\x8a\xb7\x93\xae\x10\x18\x26\x37\x97\xe0\xa3\x70\x6a\x52\xef\x77\xb8\x65\xa9\x0f\x65\x30\x77\xa4\xa0\xc5\xe9\x0d\x25\xfb\x6d\x08\xc6\x78\x1c\x92\x5e\xcd\xce\x7e\x72\xdf\xe0\x97\xf6\x70\xee\x9b\xf0\x32\x2b\xeb\xe9\x67\x61\x33\xbc\x0c\x11\x8a\x3a\x70\xc5\xde\xb1\x68\xf5\x45\x3e\x3f\x7b\x0d\x4f\xfd\x3d\x3c\xb6\x76\xab\xb5\x3f\x7e\xe1\x7b\xa0\xd1\xcc\x44\xa3\x19\x7a\xe2\x41\xa8\x63\x23\xc4\x51\x32\x63\x3f\x72\xf1\x7a\x7e\x4f\xa2\x74\xec\x29\xa1\xb6\xd9\xaa\xe0\xbe\x0a\xce\xcb\xa2\x08\xa3\x2a\x6f\x24\x9a\x5d\xb8\x97\x33\x31\x42\xad\xde\x9b\xad\x5f\x91\x7a\xb0\x95\xfe\xfb\x19\xb5\x7b\xb1\x3f\xee\xfc\xad\x5e\x74\x6c\x73\x40\xd4\x1b\x99\xf3\x5b\x98\x0e\xcd\x6b\xa1\xaf\xf3\xea\xcc\x2c\x0e\x25\x51\xda\xf0\x4d\xf0\xe9\x99\x1f\x7b\x6b\x81\x89\x36\xa2\xe7\x53\xc6\xf2\x50\x09\x2c\x9e\x81\x38\x9e\x9c\xfa\x6c\xd0\xb3\x1b\xb4\x81\x9d\x40\x58\x52\x70\xf8\xf6\xb3\x60\xe2\x1b\xeb\xb5\x88\xf8\xd8\x7e\x8b\x9b\x8c\xb0\xad\x07\x8f\xca\xe1\x11\x0c\x51\x43\xf2\x21\x90\xac\x69\x3e\xb6\x3f\xd2\xa6\x47\xcf\xeb\x4b\x26\x08\x44\x2c\x75\xb3\xfb\x37\x85\x72\x32\x85\x95\x55\xff\xcc\x4d\x99\x02\xc4\x52\x7e\xa2\x5b\x4f\xab\x3b\x90\xf6\xee\x3e\xe2\x9c\xfa\x24\x49\xd3\xd6\x23\x58\xfe\xe1\xe0\x65\x1f\xf8\xe2\xa7\x13\x56\x69\x3f\x45\x93\x4c\xea\x28\x3e\xdf\xf5\x4b\x2c\x3e\xa3\xe0\xb1\xe1\xa6\x9b\x96\x2b\x5a\x1a\xf8\xa3\xdd\xdd\xd3\x69\x7a\x83\x55\x1c\xb9\x8d\xee\x65\x9d\x64\x2d\x45\x07\xb0\x2d\xe4\x06\x5b\xc2\xcf\x47\xcb\xeb\xf1\x43\x9a\x23\xf0\x5e\x3e\x38\x48\x44\x18\xa1\xb1\x4c\xd8\xb4\x7f\x36\x5c\x75\x95\x3e\x79\xad\xa4\x48\x42\x58\x49\x62\x5e\xed\x7b\x42\xa3\x4a\x2a\x59\x21\xbc\xd1\x57\xb7\x32\x4f\x68\x2d\x32\xba\xbe\x28\xb1\x04\x63\x28\x09\x50\x0d\x0b\x5a\xc0\x87\xc2\x61\x0b\x7e\x35\xda\x4d\x85\xbb\xd2\x3a\x37\xfc\x54\x0e\xd8\xb3\x60\x46\xc7\x56\xff\xd9\xd2\x82\xc2\x4b\xc4\x14\x6f\xa3\x97\x2c\x13\x4b\x87\x8f\x2e\xe5\x6f\xac\x5c\x9c\x57\xcc\x58\x72\xbc\x5b\x31\x37\xe1\xab\x02\xb2\x4a\x94\x37\xd5\x9d\x6e\x05\x6b\x0a\x5d\xd1\x9a\xae\x48\xa4\xba\xc8\x29\x84\xb1\x39\xc1\x3e\x8c\x82\xf7\x88\xd2\x84\x0e\xf8\x98\x35\xc3\xf4\x2a\x8c\xb1\x3c\x8d\xe3\x41\x63\x6d\x29\x42\xb1\x3e\xb7\xb4\x66\xb4\x12\xfc\x28\x7c\x96\xa6\x47\x40\xaf\x83\xcf\xe8\xd0\x68\xa1\xd6\x64\x09\xb6\xde\x82\xec\x0d\x3f\x46\x2c\xe1\x52\x74\x9a\x2e\x7b\xf3\xb9\x52\xee\xbf\x8b\x3f\x79\xe4\x75\xb1\xc8\x7f\x77\x09\x2f\x23\xd7\x7f\x92\xd0\xc8\x15\x00\xd2\x4f\xcd\x01\xf4\xc6\xa2\x1e\x74\x6b\x72\x50\xe5\x94\xd9\xa8\x8f\xfd\xfc\xe2\x84\xad\x71\x0a\x71\xa9\xe8\xd4\xf6\xc2\x2f\x05\x48\x1f\xeb\x4f\x39\x5a\x7c\x2d\xc3\x49\x5e\x48\x8b\xa4\x2b\xc1\x12\x6e\xf7\x4b\xfa\xda\x05\x7d\xca\x71\x7d\x09\xbf\x11\xa0\xc3\xfe\x5e\x30\xef\x13\x1d\xfe\xb1\x0f\xfe\xe8\x7b\xfd\x17\xdb\xa5\x3f\xdb\x44\x45\xca\xd6\xdb\x11\xed\x3f\x59\xbb\xff\xa5\xff\xb9\x78\xe6\x0f\x60\xe8\x97\x03\xdf\xa0\x83\xdf\x43\x2d\xaf\xa4\x80\x2d\x9b\x78\xfa\xec\x22\x69\x4d\x1f\x56\x0a\x53\xe5\x7e\x77\x72\x79\xab\x63\xa8\x55\x1b\x2a\x4a\x4f\xeb\x3f\xd2\x53\x35\x0f\x27\x31\x67\xed\xe8\x74\xe9\xa9\xbc\xc2\xfb\xaa\x54\xa6\x78\x91\xe7\x62\x87\xe6\x8a\x45\x09\x01\x54\xa9\xee\xf9\xca\xb1\x90\x13\xcf\x03\x0b\x98\x02\xba\xf1\x9f\x81\xcc\x44\x7d\xef\x18\x44\x4d\x3f\xa3\x9a\x88\x60\x8a\xa0\x44\x68\x09\x1c\xa7\x86\xec\x29\x60\xe5\x73\x0b\xcf\x28\xec\x84\x77\xd2\xcc\xa7\xac\xa0\x31\xf7\xb6\x6c\xc0\xe2\x84\x75\x43\xd3\x20\x2c\xbf\x22\xce\xeb\x9b\x57\xb4\x53\x7d\xfc\x69\xf7\x9f\x04\x6e\xe0\x6c\x1e\x96\xe6\x2d\x79\xdb\xe6\xba\x5f\x5e\xb6\xbe\xf6\xb4\xaf\x11\xbc\xbd\x0b\xe9\x6e\x9d\x5f\x76\xad\x5b\x5a\xf7\x86\x7a\x98\xff\xe5\x6f\xa8\x1c\xd8\xa5\xd9\xfe\xb5\x9f\xb2\x53\x8b\x37\x9d\x92\x9c\xc3\xd6\xb0\xf9\xb2\xf5\xa7\x53\x76\x38\xf0\x6e\xe5\x36\x8d\x60\x78\x91\xfd\x19\xaa\xcc\xb1\xc5\x63\xc0\x7f\x85\x2e\x0c\x36\x0a\x5c\x5c\x56\xc9\x63\x61\xa3\x75\x5c\xab\x5c\xd8\xe7\xd6\x20\x06\x8a\x2d\x94\xda\xe9\x9b\x44\xae\x45\x70\x18\xb3\xd0\xef\xc4\x4c\xb3\xa4\x5e\xc5\x68\x1b\x24\x32\x96\xa1\x96\xfd\xd9\x82\x45\x84\x99\xc7\x08\x91\xd1\xc9\xcb\xe9\xe9\xb1\x88\x7a\x0e\xfe\xf0\x55\xcb\x6d\xc2\x82\x76\xfb\xb4\x8d\x2d\xe9\x98\x20\xaf\x2a\xc8\x3b\x63\x3f\x5d\x7d\x3f\xfd\x84\x98\x42\x5e\xeb\xe5\x2d\xef\xf3\xc2\x26\xd5\x73\xcd\x55\x86\x40\x36\xf4\x1e\x87\x1d\xc5\x66\x86\xad\x3a\x62\xb1\xe4\x21\x76\xb9\xd2\x82\xbe\xea\x5c\x18\x7c\x08\x44\x4f\xcc\x4f\x00\xc5\xbc\xdf\x83\x21\xfd\x69\xf1\x25\x3d\xa1\x59\xf3\xd7\xcf\xf2\x2e\x00\xce\xae\x92\xc7\x86\x07\x1f\xbd\xca\x44\xfb\x19\x3e\x04\xcf\x84\x20\x0b\xff\x57\xc9\xbe\x7c\x8a\x86\x6d\xed\x23\xc2\x7e\x5d\x6d\x46\x82\x72\x25\x86\x6e\xb9\xae\xe1\xd6\xba\xd5\xcc\xbf\xf7\x5b\xc1\xca\x20\xe3\x74\xd2\x54\x93\x15\x9b\x9e\x07\x17\x15\xfb\x4d\x83\x84\x16\xb9\xfb\x07\x82\x67\xda\x31\x76\x8c\x62\x7c\xd6\x38\xfb\x07\xac\x05\xba\xcd\x6a\xec\x90\x02\xd3\x80\x65\x10\xda\x7d\xe6\x50\xa6\x0f\xe5\xe3\xdf\xc3\x50\xc4\x1b\x80\x8e\x24\xcc\xdd\x3d\xb8\x6f\xd0\xe3\xba\x8f\xff\x04\x48\xe8\xe0\x91\xf6\x69\xc3\x3c\x04\xee\x7e\x41\x2f\x18\x63\x65\x54\x0c\x8a\xf8\x01\x5c\x76\x11\x80\xf7\xda\xd3\x47\xb0\x94\x23\xc9\xb5\x23\xb8\x2e\x04\x06\x8c\x99\x4c\xb6\x2a\x36\x73\x04\xe6\x77\x41\x57\x03\xbb\x41\xfd\x67\x14\x00\x7f\xcd\xbd\x1f\xcf\x67\xd8\xb9\xdb\xaa\x10\x23\xa2\xda\xd1\xc2\xec\x61\x7e\xdc\x38\xac\xc9\x69\xef\x3d\x4a\x2d\x12\x58\x93\xe8\xe7\xfd\x49\x3d\x6c\x8f\xa2\x4a\x47\xb5\x51\xfb\x02\x17\x77\x76\x9c\x7a\x30\xb2\x03\x9f\x47\x83\x97\xa3\x33\xcc\xd4\x6d\xab\x18\x98\xbd\xa0\xf8\x12\xf4\xb2\xbe\xa8\xf4\xe9\xad\x26\xd8\x63\xc4\x7f\x1e\xc2\xe0\x07\xb1\xfb\xe0\xa8\xae\x2a\xf0\xfa\xf9\xa9\x26\x94\xd5\xf7\x2e\x46\x21\x34\xef\xd8\x9e\xe4\xc4\x13\xbb\x4a\xb0\x03\xd9\x35\x13\xd9\xba\xfe\x4a\xdc\xa7\xf4\xb0\x02\x88\x91\xf6\x5c\x18\xad\x54\x4c\x03\x6e\x52\x70\x3a\x30\xcc\xef\x43\xf4\xd9\xb0\x90\xbf\x63\x87\xdf\xdb\x8a\xdd\xb1\x79\xdd\x47\x66\x81\xc7\xc9\x3d\x44\xa9\xa5\xf4\xf8\x88\xf0\xf5\x5d\xaf\xb5\x85\x2f\x9d\x0b\x16\x25\x54\x8e\x14\xb0\xee\xb3\x5e\x5e\x6b\xb8\x8d\x1b\x3d\xdb\xae\xf8\x9a\x81\xe1\xff\x78\xb5\x63\x73\x4b\xb7\x71\xdc\x5a\xf6\xd2\x8b\xff\x59\xf7\x9f\xeb\x5b\xbb\xbf\x6d\xf6\xbb\xba\xa9\xe0\xed\x2a\x75\x5c\x9d\x2d\xb0\x8b\xb0\x7c\x88\x9c\x45\xa7\xb5\xcb\xa0\x01\xf4\x14\x42\x38\xe7\x89\xe8\xbf\x4d\x71\xca\x40\x07\x46\x16\x36\x6f\x70\xa3\xe7\x72\xe1\x38\x7c\x0b\x7f\x03\xc5\x96\xe0\x74\x07\x1d\xb6\x02\xdd\xdf\x43\x91\xfb\xbc\xf2\x4c\x3b\x3b\x9b\x08\xde\x2b\xc0\x6e\x47\xae\x1f\xa1\xfd\x95\x22\xc1\x6e\x8d\x8c\x4b\x90\x6a\x25\x70\x66\x05\xd4\x45\xdd\xdd\x85\xd9\x57\x37\x7b\xdd\xef\x74\x55\xa7\x59\xeb\x99\x39\xb6\x16\x9d\x13\xea\x04\xf3\x07\x85\xaa\x63\xd1\xe6\x78\xbe\x48\xb6\xe3\x15\xcf\xdc\xa9\xa1\xeb\x10\xb7\xee\xc7\xe1\x2a\x91\xfb\x1e\x6a\x64\x38\xe1\x92\x59\xeb\xaf\x4e\xe9\xb5\x1c\xc6\x90\x26\xb9\xa9\x6b\x03\x09\x41\xd6\xa0\x59\xb6\x74\x80\x09\x82\x97\xab\x2e\x81\x0b\x99\xf4\xec\x67\x29\xa4\xe1\xc2\x5c\x8a\x2f\xb3\x22\x8e\xc2\x4f\xa7\xf4\xed\x89\xae\x3b\x45\xcc\x61\xfc\xea\xf8\xa3\x8f\x14\xd3\x9e\x18\xcc\x60\xdf\x8b\x48\xb6\x75\xf0\x29\xde\xdf\x6d\xc4\xe0\x01\x52\xa2\xed\x9c\x03\x2f\x99\xc3\x49\x9b\x89\x74\xc9\x92\x11\x7d\x88\x2f\xa9\xbd\x7b\xa1\xb3\x9d\x0c\x30\xec\xbc\x93\xff\x1d\xfe\x6d\xf2\xf3\x15\x06\xd3\x77\x92\xb6\x7a\xbb\xf8\x23\x61\xdf\xc1\xb3\x3c\xad\x4c\x78\xfa\x17\x65\x4f\xa0\x7d\xf0\x7f\xae\x3d\xf7\xac\x7e\xc9\x90\x08\x9f\xa6\x55\x2d\xe5\x34\x56\xda\xec\x8f\xa8\x32\xfc\xad\x01\x18\xf4\xc7\x0f\x65\x3d\x26\x7d\x84\x5e\xcc\xe1\x7a\xaa\x7d\x21\xba\xf0\x3b\x0a\xa5\x2f\xa1\xc3\x6f\xe6\x56\xfe\xd3\xa9\x53\xf7\xf1\xb6\x8d\x7f\x39\x30\x7d\x16\xe4\x8c\xa1\x4b\x62\x6d\xfc\xae\x04\x0e\x7f\xf6\x61\xef\x12\x61\x29\xef\x52\x84\xe5\xbd\xcb\x3e\x7a\x35\xf2\x0e\x35\x35\x53\xb9\xc8\xff\xbd\xbf\x0b\xbb\x19\xf7\xa4\x0e\xc7\x95\x6d\x4a\x79\xa9\x8d\xd1\x52\xfd\xec\x2b\x69\xbe\x7b\x34\xe9\x56\xe3\xc2\xca\x0d\xf0\x2e\x7c\xb8\xea\x1f\x45\x34\xeb\x5b\x6e\x66\xef\x39\xe6\x82\xb7\x17\x1d\xa4\x81\x79\x6e\xe0\x8f\xd8\x9c\x2f\xba\x29\xbf\x5b\xb3\xc5\xe5\xe0\xd4\x87\xc7\xa2\xfe\xce\x33\xfd\x3d\xd1\x21\x61\x27\x05\xb3\xb1\x97\xd4\xef\x74\x9f\x04\xc7\xde\x49\x3f\xb5\xfd\x30\x11\x4d\x90\xe6\xfd\x67\xe4\x37\x35\xeb\x97\x37\x18\x47\x98\x3e\x5f\xa7\x83\x30\xaf\x02\x00\xf1\xe1\x19\xff\x92\x91\xbf\x98\x14\xd8\xe2\xb5\x9c\x1e\x7f\xf5\xba\x75\x85\x01\x6b\x8f\xce\x4a\xdc\x87\xac\x5c\xfa\xfd\xa5\x35\xa2\xc8\x5e\x20\xf4\x49\x70\xbe\x47\xc3\xe5\x67\xf2\x17\xf7\x25\x7b\x8f\x47\xb4\x54\x40\xcf\xf1\xcf\x88\xf6\x4a\xfd\xfc\x43\x46\x3e\x8a\x3a\x36\x76\xcd\xd9\xd7\xe5\xaa\x77\xa0\x03\x5f\x32\x04\xaa\x0a\xc0\xbb\x9e\x7b\x34\xd9\x6e\x16\x80\xf7\xd9\x18\xff\xf6\x86\x36\x96\x7c\x08\x6f\x64\x3a\x81\xb7\xba\xc4\x0e\xf6\x73\xc5\x15\xe4\x0f\x8f\x34\xec\x25\x38\xcf\xe1\xdd\x97\xf7\xa4\xd4\x84\xfb\xa7\x69\xb7\xa9\x43\xcd\x94\x34\x8b\x1a\xdc\x11\xf0\x8f\x7e\xc2\x3b\xf9\xcf\x7c\xd5\x0f\x31\x19\x34\x51\x98\x05\x50\x83\x08\x6c\xfb\x3d\x7a\xde\xdd\x55\x84\xd6\xdb\xa6\x8c\x20\xa1\x7a\x1a\xe2\xe3\xff\x45\x02\xa2\x8f\x4a\x19\xd4\x09\xb8\x4b\x58\x66\x7b\x6f\xe4\xad\xc5\x1c\x6e\xb6\x4b\x74\x45\xd0\xe2\x6b\xd1\x89\xd9\x36\x9f\x0c\xfe\xbf\x73\x6e\xaf\x02\x1f\x0f\xa9\xdf\xed\xcc\x91\x91\xa8\x07\x22\x06\xe5\x77\xe6\x91\x02\xd2\xab\xb8\xe3\x20\x57\x1f\x9b\xa0\x6f\xa6\xce\xd8\xc8\xc1\x98\xab\x50\xf3\x79\x15\x09\x1f\x6d\x30\x4b\xc0\x68\x43\x55\xcd\x17\x53\xd1\x31\x5a\x55\x5f\x5f\x2b\x52\x8d\xa3\x8b\x62\x55\xa4\xfa\x19\x8b\x6d\x6b\x14\xaa\x38\x53\x93\x75\xfd\xfa\x23\x54\x03\xf8\xeb\x4c\x67\x51\x03\xa4\xa8\x27\x32\x13\x49\xf3\xd0\x44\xde\xe3\xea\x48\xf5\x4e\xa0\x0b\x61\x8f\x25\xe7\x18\xa1\x89\x10\x5d\x94\x68\x6c\xd8\x26\x26\xf9\xba\x64\x7b\xd9\xa7\xa2\x08\x6c\x28\x82\xed\xe5\x84\x10\x3e\x3d\x3b\xc9\xc9\x0e\xbf\x87\x89\x2f\x9c\x83\xeb\x67\x9b\x1b\x09\x9f\xae\xcc\x7d\xe0\x25\x05\xe3\xd5\x83\x07\x4f\x36\x78\x3f\x27\x22\x5a\xe8\xca\xac\xd4\xb8\x79\xe7\xa5\x7d\x3f\x60\x16\xc8\x75\xcf\x3b\x2f\x41\x7d\x9e\xd6\x1f\x95\xb9\x87\x6c\x51\xad\x1a\xef\xd4\x3c\xd8\x4e\x81\xda\x06\x43\x79\xe6\x80\xd5\x3c\x39\x0f\xda\xb6\x4e\x7e\x96\x26\xa6\x39\x7c\xda\x27\x3b\x3f\x32\x02\x3a\xeb\x3d\xb7\x0c\x92\xb3\x16\x3a\xd9\x9f\x55\xb5\xdd\x59\x07\xb1\x64\x4f\xfe\x06\xde\x34\x51\xb5\xc6\x71\x56\x83\x65\x6b\x68\x99\xbb\xe3\xaa\x99\x79\x96\xa9\xbd\xc0\xb4\x86\x55\x22\xe8\xee\x3a\x3e\x5c\xaa\x9f\xf4\x7f\x67\x65\x0c\x9c\x9a\x7c\xcf\x39\x30\x70\xbf\xf6\xfd\x5c\xe6\xff\xc8\xd7\x91\x82\x1b\x1d\xe2\x7f\x3a\x24\x72\x34\x6a\x07\x88\xb0\x38\xb2\x32\xce\xac\x66\xcc\x30\xb8\x5b\xdc\x09\x7a\x68\xf4\x54\xfc\xf9\x29\x90\xc1\xf3\xb7\x7d\x85\xc8\x5a\x1d\x20\x5b\x84\xe3\x93\xa7\x39\x6c\x00\x86\x30\x14\x86\x25\x0d\x8b\x0d\x24\xf1\x3b\x6b\xda\xf6\xa6\xcc\x83\xb0\xb5\x2e\xab\x53\x62\x27\xcb\x2a\xe6\xb5\x5d\xd6\xfc\x1f\x1c\x6b\x70\xf1\xee\xc2\x66\x4c\xf1\x74\x5a\xb6\x1f\x40\xf9\x6d\xa3\xc4\x63\x1f\xd9\x10\x70\x1c\x92\x52\xf9\x50\x7c\xf6\x81\xc6\x5e\x38\xdc\x1c\x45\xd0\x04\xb8\xdc\x9e\x7e\xb9\xfe\x3a\x30\x44\xd7\x0f\xae\xcc\x08\xb5\x14\xed\x7b\xde\x9b\xfc\xf9\xd0\xa4\x7a\xd8\x5e\x82\x1e\x02\xd3\xbc\xea\xc7\x07\x03\xdc\x93\x36\x41\x35\x95\xff\x00\x3b\x08\x87\x36\x06\x87\x78\xc2\x36\xe3\xac\x6a\xaa\x20\xe3\x82\x1d\x99\xc3\x4e\x76\x19\x6d\x00\xcc\xd8\xf8\x50\x0f\xab\xe0\x59\x18\x05\x63\x39\xc9\x6d\x2c\x64\xf8\x16\xc5\xaf\x17\x85\xe8\x95\xc3\xe7\xab\xd2\x4f\x3a\x21\x0a\xfc\xcf\x1c\xd5\xd8\x3f\xda\xda\xef\xa5\x39\x42\xed\x10\x4a\x71\xd3\xba\xcf\x2f\xb5\x4f\xfa\x30\xb1\xda\xf3\x5b\x69\x8e\x3b\x0c\x55\x33\xf9\xd9\x4c\xc9\x4c\xe1\x14\x80\xf6\x00\x5b\x21\x60\xbb\xac\x42\xc9\x91\xa2\x21\xf3\xc2\x72\x06\x1b\xdb\x8a\x81\x70\x4a\xb4\xcf\x1a\x59\x99\x56\x83\xfc\x93\x50\x31\x36\xc8\xbe\x68\x21\xda\xe6\xf3\xcb\x9e\x48\x58\x1c\xa2\x65\x7e\xa8\x3b\x87\xac\x4c\xb6\x47\x82\xa8\x3c\x47\x2e\x65\xbb\x6e\x3c\x43\x93\x0a\x7d\xe9\xca\xb6\xbc\xd6\x31\xba\x22\xb3\x6e\x6a\xde\xa2\x5b\x9e\xd8\x39\x49\x9c\x9a\xb7\x7e\xb7\x95\x5c\xbd\x91\x14\xad\x0e\x00\xd6\x62\x5e\xc3\xa4\x5e\x83\x55\xa9\x66\xe8\x64\x34\x9f\x82\xbe\xbc\xc3\x8a\x80\xf4\x32\x78\x12\x20\xb0\xb4\x7f\x44\x9f\x43\x04\xec\xef\x57\xac\x33\x1b\xf2\xf0\xe1\x37\x93\x44\xc6\xd8\xd2\x1d\x29\x8b\x5b\xba\xc3\x5c\xe0\xbc\x3f\x7c\xfe\x19\xe6\x8f\xe7\xde\xfa\x19\x6b\xcb\xe6\xac\xed\xc3\x42\x8b\xcd\xd6\x86\x7e\xeb\x2d\x4d\x6c\x9e\x3c\x9f\x91\x32\xe2\x25\x98\x0f\x38\xcb\xac\x9f\x9b\xe5\xc7\xfa\x51\xeb\x82\x9b\x34\x26\x78\x66\x79\x69\xac\xd5\xfd\xe1\xfd\x09\xe5\x97\xa6\x69\xc3\xd4\x62\x31\x6e\x71\xbd\x3b\xfa\x16\x14\x6d\x21\xd7\x99\x32\x21\x47\xb5\x3b\xe6\xdb\x7b\x1b\xc2\x07\xe8\x3b\x90\xc3\x8d\x18\x19\x7a\x47\xe6\x5f\x0e\xeb\x9a\x0f\x86\x85\xe6\xed\x9f\xb8\x4e\xa5\x7a\x92\xef\xb4\x2a\xc7\x0d\x56\x6b\x77\x50\xbb\x47\xde\xb2\x0f\x91\xf6\x69\x55\xf7\x29\x75\x06\x0c\xa3\xd8\x7f\x10\xe8\x5f\xd5\x25\xb0\xf3\xea\x4b\xdb\x83\xbc\xd3\xd8\x20\xcb\x44\xc2\x47\x3a\x06\x1b\x3c\x41\x88\xa2\x3c\xce\x9b\x6d\x96\xa5\x5c\x6a\x23\x9c\x7b\xec\x82\x96\x72\xfc\xb0\xf5\xc5\x66\x98\xde\x7d\x3a\xc5\x4d\x5a\x0a\xa3\x0b\x96\xd2\xcb\x56\x61\x4f\xd5\x69\xe7\x9b\xcc\x15\xec\x9b\xd7\xd2\xb6\x00\x84\x54\x0b\x22\x87\x4d\xc2\xfd\xdd\x22\xfc\x7b\x99\xcf\x8b\x0d\x89\x88\xf2\x98\x57\x36\xe0\x96\x19\x69\xd7\x97\x7f\xb6\xdc\xd8\x6b\x4b\xfe\xc7\x83\x72\xe2\xc1\xb9\x59\x4b\xaa\xdf\x35\xa9\x1d\x84\x14\x0a\x35\x0d\x26\xc7\x15\xfe\x0a\x09\x7f\x7f\x73\x00\xf6\x7f\x82\xe5\x61\xa6\x20\x5d\x0f\x5a\x03\xec\xe7\xfc\xa5\xf0\xf7\xfe\xb3\x2a\x0b\x34\x35\x7e\x15\x70\x57\x58\xac\x46\xc3\x90\x15\xa2\xbf\x19\x9e\xb7\xde\x2d\x99\x95\xc0\x8b\x0e\xc1\xb7\xc1\xe4\xb6\x73\x0e\x63\xca\x2c\x3f\x85\x77\x18\x2d\x54\x42\xe1\x45\x26\xf4\x13\x13\xae\xa6\x12\x5c\x50\x77\xae\x92\x13\xe5\x7a\x5f\xfd\xea\x82\xc8\x29\xa8\xa4\x8d\xa3\x2e\x4e\x7b\x9c\xc1\x36\x08\xa6\xfa\x2c\x83\xbe\x09\x41\x48\x20\x35\xfb\x78\x1c\x91\x4e\x26\x16\x8f\x55\x6c\x82\xfc\x7f\x81\x1f\x62\x73\xb9\xc9\xe5\x45\x35\x86\x53\xd3\xc9\x2b\xa7\x08\x91\xcf\x61\x32\x49\x16\x49\x79\x4a\x41\x37\xe6\x88\x03\xe2\xf8\xaf\x55\x28\x3d\x01\xc9\x29\xfd\x96\x15\x99\xa0\xd9\x34\x2f\x0d\x82\x13\xaa\x22\xeb\x24\x9d\x7f\x95\xc7\x21\x74\x12\xde\x02\x08\x07\xcc\x66\x06\xb1\x06\x6f\x76\x6a\xba\xd0\xb1\x1e\xb7\x22\x3c\xec\x57\xe1\x6a\x45\xa6\x7b\x04\x88\xe8\x64\x1d\x6d\xa3\x75\x2a\x40\xc4\xa6\x2d\x5d\xda\xe3\x49\xdd\xc7\x78\x8a\xce\x8c\xff\xbf\xd2\xf4\x01\x84\x77\xfa\x67\x16\xf5\x52\x6c\xf0\xfb\xe1\xc7\x88\xbd\x03\xfc\xef\xbc\x98\x31\xcc\x52\xc6\xb9\x59\xb0\x8d\x40\xfc\x14\x8b\x1a\x1f\x69\xcc\xe1\xb4\x3b\x46\x7c\xca\x98\xa4\x4c\x19\x45\x8c\x97\x42\x6c\x4c\x57\x2f\xae\x3b\xef\xb5\x4f\xe3\x9f\x21\xc9\x3a\x15\x79\xaa\xea\xe3\xd8\x30\xb1\xb3\xe1\x05\x17\x19\xd9\xcf\xca\x1e\x3b\xa8\xf0\xd8\xcc\x3b\x66\x8e\x2d\xb6\xa3\xfb\x27\xaf\xe9\xa7\xe6\x17\xf4\x0a\xf4\xbd\x3e\xb1\x55\xa3\x2b\x84\x7d\xaf\xea\xbc\xdb\xf9\xc5\x68\x55\x59\xa1\x3f\x23\x33\xf2\xe9\x20\xbf\x23\xf3\xab\xfb\x85\xf1\xe3\x3d\x3d\xa0\x55\x0d\x07\x32\xc6\xf9\x3a\x98\x5b\x05\xfb\xaf\x8d\x5e\xaf\x8e\x40\x58\xe6\x3f\xc3\x99\xf3\x95\x65\x09\x51\xd7\x3e\x02\x59\x5b\x0c\x89\x38\xed\x4f\xc4\xe6\xba\xa2\xd3\xff\xe8\xf3\xa6\xee\x47\xf2\xfa\x5b\x94\xb7\xd0\xe7\xe0\x94\xf7\x9b\xb1\xa5\x67\x27\x19\xe5\xb6\x53\xee\xed\xe7\x72\xc2\x19\xb3\x4b\xf8\x39\x40\x20\x12\x1f\x7f\x86\x73\x9a\x23\xe8\xad\x08\xb2\x02\x45\x0f\x1b\x04\x22\x23\xb1\x84\x3f\xf5\x50\x01\x93\xcb\xac\x13\x8e\xe0\x5e\x25\x0c\x60\x40\x25\x0e\xe0\x1d\x40\x1f\xd0\xf2\x4f\x13\xa0\xe3\x94\x36\x44\x59\x03\x1a\xd7\xcd\x9d\xce\x30\x87\xd8\x71\x98\x49\x55\x05\xdb\xfe\x55\x34\x58\x3b\x3a\x1e\xd8\xd8\xbe\x30\x1f\x34\x55\xdb\x90\xaf\x26\xac\xb1\xf1\x94\x28\x4d\xb0\xa9\x4d\x9b\x60\x74\x05\xce\x20\xeb\xc7\x73\x56\x66\x23\x7b\xd4\xab\x18\x63\x43\x0e\xf6\x0b\xae\x58\xc5\xad\x00\xa8\x4a\x8c\x7b\x75\x81\x0e\x5b\x08\x3b\x0f\x70\xfb\xe4\x20\xd1\xd8\xbe\x07\x37\xbc\xb5\xfc\x70\x37\x91\xfe\x72\x75\x33\x47\x59\x0d\x05\x47\xf8\xc5\xc7\x3f\x93\x06\x81\xd1\x97\xa5\x81\x4f\x76\x72\x28\xe9\xd5\xcf\xbf\x5f\x38\x6b\x59\x45\x2b\xef\x6d\xa5\xd0\x6b\x52\x0e\xec\x21\xb0\x0e\xb2\x3b\xe7\xfe\x9f\xf7\x33\x6d\xa4\x6c\x62\x4b\x47\x98\x7c\xf2\x75\x87\x0f\x4e\x94\x16\x00\x99\xf7\x97\x69\x71\xad\x91\xf0\x0a\xd4\x63\xd0\x98\x01\xf4\xbd\x73\x64\x57\xf1\xf0\x85\xfd\x20\x4a\x65\x8e\xf4\x95\x75\xfe\x4f\xc0\xfc\xdc\x93\xbc\xd4\x7d\x02\xe4\xea\xdc\xc7\x89\xa3\x73\x20\x1c\x7f\xf6\x0a\x2f\xea\xaa\x52\x7f\x30\x10\x2e\x9f\xc5\x6f\xeb\x6a\xaf\xc8\xd5\xda\x2f\x42\xe0\x60\x0f\x1a\x8c\xfb\x46\xb3\xaf\x9d\x98\xf2\x44\x90\x80\xb8\x2f\xd0\x4e\x11\x56\xaf\xdb\xa5\x20\xd8\x1a\x5e\x18\x60\xe1\x0b\x69\x2f\xc3\xee\x29\x43\xdd\x99\x7f\x56\x1a\x08\xa2\x26\x8c\xec\x55\x5f\xf3\x15\xdd\x25\x6b\xeb\x68\x66\x08\xe6\xbc\x50\x71\x89\x85\xc0\xa0\x57\x77\xc0\x16\x4e\xc6\xc5\xf2\x97\x74\x8f\x8b\x1c\x98\x3c\xeb\x98\x0e\x19\x2a\x84\xca\xd7\x26\xd8\x05\xe4\xbe\x3f\x02\x83\xdf\xc3\x4e\xc8\x2e\x8d\x3e\x0b\x8c\xea\xa8\xb8\xbc\xd6\xcc\xbf\xec\xe2\xf2\x7f\x85\xfd\x9f\xc2\x67\xf3\x15\x18\x91\x9d\x16\x81\x90\x75\x79\x87\xeb\x0b\x47\x20\x05\xd2\x4e\xd5\x76\x07\x5f\x19\x48\x77\xad\x4c\xee\x5a\x23\x11\x76\x59\x00\xe4\x65\x40\xfa\x11\x0f\x1f\xd3\xbf\x50\xf8\x8b\x86\xa1\xd7\x2e\xa3\x0d\xdb\x3c\xae\x82\xd9\x85\x52\x76\x89\x6c\x63\x40\xec\xe7\x9f\x72\x61\x58\x93\x76\x30\x3f\x91\x17\xa9\xef\x67\xf1\x8d\x8b\x3c\x96\xce\x6f\xf5\x03\x89\xa1\x6a\x3f\x01\xca\x58\xb9\xd7\x4f\x8d\x83\x98\xc4\x26\xc0\xa9\xea\x13\x55\xda\x60\x9d\x10\x68\x78\xe0\xdd\x87\x82\x66\xd7\xa7\x90\x74\xcd\xcb\xb6\x28\xd3\xeb\x94\x5e\xa4\x0e\x6d\xb9\xed\x28\x5d\x88\x12\xe5\x83\x70\xb5\x4a\x80\x4b\x7d\xec\x14\x55\x1b\x7d\x3f\xa3\x2c\x66\xbe\x1f\x5f\x74\xbf\x25\x2c\x42\xbf\xf2\xa2\xfe\x2a\x47\xff\x0b\x76\x22\xf9\x37\xdf\x84\x8e\xe0\x45\x52\xff\xca\x5b\x56\x40\x80\x78\xde\x27\x27\xb2\x32\xde\x9f\x20\xde\x97\xfa\x6f\x42\x0c\x77\x0b\x70\xd0\x68\xc1\xb4\x57\xcb\x85\x59\x7f\x38\xb7\x7b\x13\xbf\xdd\x8b\x3f\x28\x19\xef\xd9\xb6\x6b\x73\x83\xa7\x57\xf9\x33\x39\x8d\x9f\x40\xb1\x0f\xe3\x75\xb3\x80\xf2\x0c\x8e\xb7\x3c\x32\x82\xad\xf9\x0e\xa4\xf7\xdd\x02\xe6\x41\xf3\xc7\x55\xe5\x43\xab\x00\xdd\x90\xfc\xed\x3e\x49\x02\x93\xc2\xf9\x94\x4d\xd0\xb7\xb8\x3a\xef\xe6\x25\x0e\xbc\x3a\xd3\xa4\xfa\x25\x3c\xff\x45\x53\x3d\x41\xcf\x8f\xf0\xc0\x20\xd8\x7c\x6e\xb9\xae\xe2\xe5\x7f\x9b\x0d\x06\xad\xda\xec\xdb\x84\x59\x4f\xb0\xfb\x41\x71\x90\x13\xea\xfe\x6c\xc6\x1b\x76\x4e\xd2\x22\x51\xd0\xf3\x5a\xf2\xc8\xbf\x7a\xf1\x94\x00\x3d\x9f\xa4\x63\xd5\x0f\xda\x54\x8e\x41\xff\x5c\x54\x00\x51\x73\x25\xbc\xdb\xf2\x79\x8b\xa2\x21\x17\x3d\xf0\xfb\x15\x95\x6b\x8b\xc2\x90\xc2\x2a\x75\x8f\x7c\xdb\x13\x27\xb2\x54\x03\x05\x01\x38\xcd\x28\x55\xdb\x13\x67\xfc\xfb\x91\xb1\xab\xee\x74\xa1\xb0\x19\x46\x52\xdc\xd3\xec\xf3\xac\xbd\x24\xda\xed\x62\xfc\x9f\xaa\xac\x00\x6b\xfb\xbd\x1e\x9c\xe0\x03\x5a\xc3\x5c\x88\x63\x95\xb1\x44\xc5\x4f\xa3\xb8\xda\xba\xe9\xa3\x5c\xb4\x09\xbc\x6e\x66\xba\xd9\x89\xf0\xb5\x73\x8c\xc2\x61\x60\xd5\xd7\x3e\x37\x76\x7f\x66\x2e\x84\xfd\x77\x9c\x5a\x5b\xf0\xf5\xcb\x29\x9e\x01\x86\x42\x8c\xa1\x1c\x8a\x0c\x11\xfb\x63\xbc\x06\xae\x09\x58\x81\x08\xa5\xd8\x68\xd3\xbf\x86\x23\x04\x19\x0e\xd0\x4f\x0d\xba\xb7\x5f\xe4\x63\x0f\xe2\xff\x2d\x18\x3c\xed\x11\x59\x03\x84\x2e\x6c\x5b\xd9\xff\xbc\x8e\xd0\x24\x34\xcc\xcb\x46\xb4\xee\x88\x98\x54\x24\xfa\x16\x81\xdb\x35\x2c\xcd\xae\x88\xb9\xe9\xbc\x33\x71\x6d\x14\x92\x03\xbd\xfe\x79\xbd\xda\xbb\x96\x56\xc0\x2d\xb8\x09\x5a\x6f\xad\xf1\x76\x39\xf6\x01\xbf\x0c\x3e\xdc\x7e\x35\x92\x4e\x80\x5c\xf1\x73\x4e\xed\x5d\xd9\x59\x76\xb4\x10\x9c\x73\xf6\x2f\xf5\xc7\xd4\x87\x2a\x57\x78\xb2\xc0\x23\x1b\xa4\x27\x27\xae\x2b\x87\xbf\x46\x5e\xc9\x47\xb0\x32\x49\xed\x25\xac\x0b\xfa\x0f\x39\x49\xf1\x67\x73\x33\x1b\x16\xfd\x0c\x63\xd4\x5b\x3c\xfa\x3e\xbe\xf0\xde\x0d\xb2\x05\x2e\xef\xf3\x2f\x51\x78\xcd\xf5\xda\x7f\x68\xb4\x26\x18\xea\x00\x38\xc8\x9a\x70\x41\xed\x6f\xcc\x7e\xa2\xb7\xf0\x81\xdd\x83\xc4\x7f\xab\x2d\xee\x96\xb0\x67\xe4\xee\x0c\xe4\x63\x5d\x90\xe7\x25\x0d\x44\xd9\xb1\x61\xa8\x50\x82\x6d\x00\x52\xa4\x06\x59\x8a\x04\x7b\x4b\x74\xf5\xf0\x2b\xe0\xac\xd2\x10\x9e\xe0\xbd\x6f\x3e\x10\xec\x8c\xad\x81\x43\xca\xb5\xf9\x91\x20\x42\x56\x2c\xfc\xf2\xbe\xc5\xf3\xaf\x31\xb8\x19\xb4\x0f\xca\x3b\x23\x7e\x0a\x08\x0c\x61\x9d\x77\x96\x33\x2c\x69\xc9\xe2\x43\xd4\x31\x51\x6f\x34\x5a\xf9\xa2\x1d\x77\x60\x11\x32\x7c\x36\x97\x59\xd2\x5c\x4e\xb1\x90\xcf\x9f\x34\xf3\x6c\x64\x46\x0f\xfe\xe1\x20\xd4\x6d\x27\xfd\xad\xf8\x1a\x19\xbe\x03\xee\x40\x92\x5b\x76\x39\x70\x17\x96\xc2\x67\x56\xf7\x04\x97\xab\x5f\x46\x1c\x0f\x9a\x94\xcf\x3c\x3c\x33\xcb\x36\x00\xe1\x44\x47\xcf\xf8\x3f\x29\xfc\x6b\x9b\x46\x07\xd0\xaa\x9f\x41\x36\x70\x7a\xdd\x71\x16\xdf\xd5\xe3\xa8\x73\x9c\xc3\x30\xb9\x1d\x46\xc6\x8b\xda\x25\xfa\xe1\xa2\x73\x20\x8b\xcb\x8f\x21\x47\x3c\x5a\x6a\x0e\x54\x01\x4e\x94\x70\x16\x7d\x99\xcf\x96\x28\x34\x33\xf7\xf4\x80\xd1\x8c\x70\xe9\xff\xc4\xea\x47\x3a\x0b\x07\x32\x86\x07\x67\xec\x52\x22\x32\x4f\xd0\x83\x66\xbb\x04\xaa\x8b\x3b\xd8\x4e\xb2\xc8\xb3\x01\x17\x95\xc3\x23\x4e\xfc\x21\xb9\xfd\xdf\x9f\xab\xf0\x91\x15\xd7\x73\xe4\x2e\x14\x08\x89\x14\xff\x77\xe4\x61\x61\x38\xd3\x0e\xf7\x25\xb1\xf9\xa1\x12\x55\x6e\xd2\x07\x30\x4b\x3e\xeb\xea\x85\x2e\x62\xe8\x42\x05\xfa\xaf\x36\x8b\x93\xbc\xf0\x56\xb8\x75\x64\xde\x06\x6a\x59\xe6\x68\xe1\x97\x69\x73\xa0\xe4\x04\x47\x1a\x93\xc4\x0a\xd1\x06\xb0\xad\x31\x0f\x52\x19\x15\xa8\xe7\x5c\x71\x80\xeb\xdc\x41\xf2\x75\x56\xe9\x6a\x8b\x0e\x4d\x4e\xb8\x4f\x2a\x70\xf8\xb8\x09\x74\xe0\x54\xe6\x37\xbb\x7f\x04\x96\xec\xa4\x83\xef\x25\x20\xd5\x3d\x30\xea\xdd\x56\x2d\xc5\x40\xf3\x02\x86\xf5\xc9\xac\x54\x15\x2b\x83\xd8\x84\xdd\xb3\x56\x4d\x9a\xf6\xca\x24\xc5\x5b\x5e\x04\xd9\xd9\x7e\xdf\x43\xba\xe1\xba\x04\xd1\xcd\xdb\x66\x1c\xd0\x3a\xd5\x06\xb6\xf3\xcd\x36\x79\xf9\xb8\xdc\x04\xba\x43\x35\x6b\x55\x36\x97\x6c\x80\x99\x9d\x40\xdf\x79\xd7\x9e\xce\x15\x07\x6b\x03\xcc\x19\x54\xb6\xcd\x7f\x40\x43\x03\x30\x8f\x7c\x24\x60\xe7\x6b\x28\x0f\xa6\x97\x40\x73\x44\x1e\x3d\x88\xa7\xf3\x61\xf6\x70\xc2\x33\x4e\xb5\xa6\x3a\x61\x8a\xd8\xa2\x2c\x18\xdd\xb6\x5e\x02\xca\xdf\x32\x23\xe9\x8f\x29\x49\xd8\x81\xd4\x25\x78\x75\x73\xe8\x19\x16\x3e\x04\x3a\x44\x44\xdd\x6a\x69\x3e\x70\x9c\x77\xd1\x71\xa1\x2d\xb0\x47\xf2\x7e\x8f\x48\xe5\xcb\x3e\x59\x46\xd2\xfb\x77\x17\x20\xaf\x1d\x26\x00\x77\x31\xa6\xa4\x31\x10\x8c\xaf\x20\x45\x84\x4b\xcf\xf2\x79\x49\xe7\xcf\x1a\x45\xe1\xd9\xd8\xaf\x13\x1f\x57\x52\xd3\x1e\xe6\x9c\xf0\x3c\x39\x53\x66\x22\xf4\xc1\x98\xa4\x9d\x7b\xeb\xf5\x67\xa2\xbb\xd6\x5e\x1a\xa0\xb5\xf4\xb3\xba\xf2\x6b\xe1\xea\x42\x66\x7e\xb8\xf5\x8a\xf8\x3f\xcb\x07\x6c\x9d\x25\xd8\x07\xb2\x44\xe6\x3b\xa6\xcc\x60\xba\xc7\xa4\xb9\x02\x56\x1c\x5a\xf0\xb4\x20\xb2\x15\x4c\x43\x31\xf4\xbf\x04\xcf\xf7\x24\x40\x7d\xfe\x07\x45\x8f\xc2\xc8\x26\xd6\xdc\x55\x71\xee\xbf\x11\xaf\x14\xbc\xe2\x35\x85\x8f\xb0\x4e\x69\x06\x2a\x91\x2d\x8f\x71\x7b\x41\x7c\xd5\x1c\x48\x61\x03\xe8\x29\xc0\xf7\x63\xca\x8d\xd5\xaf\x3b\xb7\x23\x12\x9a\x20\xca\x78\x00\x6d\x77\x13\xea\xa5\xb6\xdc\x69\xdb\xa0\x6d\x51\xcd\x2e\x70\x21\x0f\x28\x9d\x77\x17\xd5\xb1\x0b\x1c\xb4\x79\x94\xab\x4a\x84\x35\x81\xc0\x42\x08\x7d\x0e\x6f\x15\xef\x72\x2f\x3b\xd2\x8f\x7f\x9a\x87\x05\x7d\x42\xbf\x03\x70\x44\x84\x58\xdf\x21\x5c\x5a\xe6\xe6\x32\x0c\x40\xbd\x85\x5c\x87\x9d\x71\x8b\x84\x5d\x66\x2d\xc0\x70\x68\x09\x13\x62\x51\x05\x90\xad\xda\xa2\xac\x27\xde\x92\xce\x20\xe5\xdb\x2e\x2f\x18\xf9\xdb\x41\xc4\x7d\x8b\x34\xec\xc4\x8d\xc1\x22\xed\xc3\x92\xec\xc5\x83\xb5\xaf\x78\xed\xc4\x4f\xfc\xa7\x44\x92\xbf\x1a\xe4\x91\xbf\xf2\xd9\x79\x0c\xe0\xf7\x2f\xd2\xd7\xfe\xb2\xd2\xdb\xe8\x78\x3c\x13\xaf\x77\x8f\x62\x3f\x7f\x6c\x96\xc8\x6c\x96\xfe\x25\x7b\x00\x6e\xb2\x69\x87\x27\x08\x4c\x48\xfd\x06\x08\x3a\x75\x16\xcf\x07\x4f\x19\xf8\x96\x29\x34\x7a\xfa\xfc\x8c\x62\xea\x26\xd6\xc5\x54\x7c\xb1\x98\x6c\xab\x21\x50\x7d\x4f\x93\x5f\x62\xee\x0b\xf3\xc3\xdc\x87\x50\xc1\x60\xdc\xec\x60\x56\x55\xb1\x30\x51\xa6\x23\xcc\x1d\x64\xc8\x69\x8e\x08\xe4\xa9\xe5\xb6\x4d\xe1\x12\x36\xcd\x71\x57\x17\x59\xdb\x2f\xf1\xf3\xf9\x0f\xb3\xdc\x76\x1c\x92\x0f\xbe\xfe\x16\x24\x78\x26\xf2\x5c\xbf\xa1\x1e\x1b\x2c\x4d\x8e\xbc\x37\x60\x1f\x4b\x74\x7e\xed\x87\xa7\x90\x06\xe8\x62\x9a\x52\x15\x65\x3e\x44\x31\x13\xe8\x72\x57\x80\xf1\x77\x1b\x5d\x53\x3b\x47\x01\xd8\x7f\x22\xc2\xea\x94\xa7\xc9\x94\x44\xe1\xdf\x24\x9e\x9e\xd2\x38\xaa\x09\x02\x97\x46\xc9\x61\xa6\x14\xb3\xc4\x94\x06\x7d\x7a\x6c\x64\x17\x2e\xae\x63\x0d\x03\x5e\x8c\x66\xec\x5c\x69\x48\x73\x4f\xad\x95\xe4\x52\xff\xdc\xd8\xfc\x22\xe4\xa3\xb9\x4a\xd0\xbe\x82\xa3\x44\xe2\xbe\x5d\x75\xc4\xef\x73\xeb\xde\x38\x9b\x1f\x47\x06\xf7\x8e\xa5\x85\x68\x8f\x56\x60\x94\xa0\xe1\x17\xa4\xc1\x73\x40\xc6\xdc\x08\xba\x90\xe6\x53\x97\x6d\xf0\x39\xf3\x7a\xa9\x10\xf0\x8c\xf0\x91\xfd\x82\x79\x5d\xf4\x1d\x8e\xb3\xe2\xc9\x67\x24\x25\x73\x8f\x0b\x7b\x67\xf6\x03\x9c\x66\xe0\x6f\x35\x5f\xd1\x0f\x38\x15\xef\x3a\x26\xc1\x6b\x28\x25\x55\x38\x8e\x58\x13\x06\x82\xfe\x6e\xe8\xcc\x07\x62\xfe\xfc\x70\xb4\xce\xea\x19\xfc\x93\xd4\x3a\xda\x05\x4c\xc3\x9b\x33\x12\x44\x9f\x60\xa2\x13\x40\xaf\xea\x49\x3c\x4f\x6f\x75\x3f\x4f\xb5\x19\x40\x00\x8f\xec\x7a\xf8\xe2\xf0\x24\x8e\x80\x98\xe7\x1a\xad\x60\x1b\x69\xf3\xf0\xa4\x3c\xfb\x39\x87\x55\xcd\xb3\x31\xc7\x9e\x12\x2b\x38\xb5\xdf\xbf\x23\x1b\xf6\x22\xf9\xd3\x2f\xa7\x29\xaf\x6d\x11\x90\x67\x74\xfa\xe6\x25\x9c\x73\xe4\x2d\x9d\xe6\x80\xf9\xf3\x9b\xb4\x7c\x4f\x8a\x21\x5c\x7f\x7b\xa9\xed\x80\xfc\xda\xb0\x04\xec\xd1\x9e\x64\xf7\xe7\x8e\xc8\xfd\x70\xf2\xed\xb8\x37\xe9\xc3\xc1\xfa\xdc\x97\xad\xf3\xa1\x0b\x8b\x91\x0f\xe8\x5e\x34\xf6\x2a\xa4\x1d\x0f\xf5\xfa\x41\xc2\x1e\x43\xce\xbd\xb7\x0d\xcb\x11\x3c\xff\xa7\x20\xff\x7d\xd4\x71\x50\xc2\xd7\x2f\xcf\x7e\xa8\x5d\x1c\x23\x6e\x4a\xae\x4b\x03\x0a\x94\x88\x37\x1b\xce\xe8\xcb\x0d\xf8\xc9\x68\xf2\x34\x94\x4b\x36\xeb\x43\x69\x5d\x85\x42\xbd\xd0\x50\x14\xe7\xe0\x44\x5b\x1a\xe2\xfc\x92\x1d\x86\xe2\x3d\x96\xa1\x65\xa9\xc1\xcc\x67\x8f\x51\xb4\x1a\xac\x88\xe1\x13\x63\x87\x3f\x80\xcf\xde\x47\x3b\x61\xd7\x40\x6d\x88\x0c\x49\x81\x77\x2e\x40\x78\x61\x7f\xa0\xa5\x8c\xe5\xdd\x4d\xf9\x87\xfc\xb3\x03\xda\x0a\x9f\xb6\x25\x35\x15\xc6\xb9\x0e\x64\x77\x23\x41\xc1\x0f\x48\xa6\xe0\x3e\x68\xd0\x36\x74\x08\x8b\x6c\xa5\x22\xf6\xf5\x06\x95\x9b\x68\x7e\xbd\x66\x05\xcf\xf5\xf5\x9f\xec\x76\x68\xb4\xd8\x37\xf8\x48\x6b\x50\xaf\xa2\x96\x82\xf2\x3e\xfa\x93\xd6\x4d\x3d\xbe\xe7\xd0\x26\xa4\x83\x1d\x85\x37\x3f\x61\x5f\xea\x41\x8d\x40\x0d\x17\x8c\xbe\xdc\x6c\x2b\xdc\x7b\xa1\x51\x35\x7e\x88\x59\x0e\xda\x87\x3a\x0b\xfb\x05\x76\x17\x5f\xcb\xde\x0a\x5f\x4b\x6a\x69\xec\x66\xce\x78\x31\xca\x16\x6c\xda\x08\xdd\x03\x82\x5b\x7d\x30\xe5\x4b\x9e\x04\x3d\xe7\xd8\x93\xc3\x41\xa0\x5f\x0f\x21\x94\xe0\xeb\xde\x76\x4d\x6b\x34\x1b\xce\x4a\x6a\x24\xc6\xf3\xda\x6e\x0e\x59\x02\x64\x01\x74\xe0\x40\x10\xd9\x18\xb4\x83\x5e\xa9\xda\x2e\x11\xe0\xec\x09\x43\xa0\x70\xd6\x56\x26\x55\x9f\x0e\xf9\xf4\xda\xe8\x8c\x9b\x8e\xf9\xe7\x09\xc4\x6d\x7f\x1f\xa6\xfc\xbd\x1b\x58\xab\xbb\x31\x5b\xa5\x22\x73\x20\x3b\xad\xe4\x19\xb4\x22\x49\x8a\x0f\x98\x41\x45\xf1\x51\x18\xed\xd4\xab\xa7\xcd\x11\xa8\xd0\xd1\xa5\x38\x47\x4d\xb8\xe8\x7e\x38\x06\x6e\x4b\x58\x5a\x99\xfa\x5e\x3d\x9e\x98\x7c\xfe\xb7\x3a\x12\x55\xd4\x7b\x78\x77\x97\xb0\x0f\xda\x18\x68\x56\x42\x46\xa0\xfd\x31\x86\x6c\xec\x33\xe0\x2a\x06\xea\x73\x60\xef\xdb\xf4\x06\x59\xc6\x68\x5d\x63\x62\xb8\x71\x90\x8c\xc2\x13\x13\xd7\xd2\xe7\xd7\xec\x18\x66\xa9\x0b\xa2\x6d\x62\x7b\x26\x75\x3b\x3e\x8e\xae\xa2\xdb\x41\x3b\xa0\x4b\x2c\x28\x74\x3b\xd8\x38\xf1\xd0\x16\xfa\x5d\x17\x84\xc9\xcb\x1d\xb9\x53\x1b\xa7\xb8\x44\x30\x9a\x26\x91\x4e\x8c\x51\x98\x12\x79\x7d\x45\x31\x02\x9d\xb3\xba\xb5\x28\x9a\xa4\x8b\xec\xf9\x55\xf1\x6e\xc8\x63\x8b\x99\xa8\xc3\xee\x4c\x2f\x22\xd3\x8d\xce\x4d\x6b\xf4\xc1\x82\x24\x8b\xb6\x49\xba\xbe\xb1\x46\xba\x27\xb8\x1f\xa3\x4b\x94\x7f\xa4\xba\x0e\x93\x9b\x1f\xcf\xe8\x71\x00\x76\xfe\x63\x2b\x45\xc8\xa6\xdb\x85\x87\x6f\x51\xa4\x29\x76\xe9\x3f\xc4\x56\x49\x93\x70\xc7\xa9\xe0\xdb\x7f\x0e\x26\x29\x4c\xd0\x21\xd9\xba\xd0\x2e\xac\x0d\x5b\x81\x78\x81\x47\xb7\x33\x89\x30\xfd\x49\x1d\x13\x76\x2f\xea\x2d\x61\x41\x1c\xe5\xe6\x1d\x53\xa5\xaf\xbe\xd4\x14\x74\x21\x3a\x06\xd2\xe6\x2d\x0f\xa7\x8a\x3c\x3c\xa5\x8d\x17\x2b\xea\x8a\xe6\x65\xf4\x9a\x07\x6e\x6a\x90\xdc\xa6\x40\x38\x58\x04\x29\x49\xff\x29\x36\x42\xf2\xd8\x3a\xbe\xf0\x12\xad\xbf\xb4\x70\xf3\xe2\xd9\xfa\xfe\x4c\xe2\xc4\xc9\xa6\x00\xb6\x09\xd9\x45\xa1\x6c\x80\xea\xac\xd4\xc9\x4a\x0e\x3b\x73\x1e\xbb\xe2\x1c\x85\xaf\x67\xe4\xe3\x65\xbe\x48\x5f\xe5\x1e\x9f\x81\x70\x0f\x6f\xad\xf8\x72\xf9\x0d\x04\xf5\x9b\xe3\xe1\x1f\xac\x43\xaa\x48\x3e\x85\x22\x0e\x44\xbe\xf9\x91\x94\x6b\x88\x0e\x76\xba\x3b\xbe\x03\xcc\x78\x57\xf7\x2b\xf5\xee\x44\xa4\x93\xd8\x60\x20\xd3\x05\xbd\x97\x87\xf7\x56\xae\x35\xcb\xa3\xfc\x67\x61\xbe\x6e\x5c\xb2\xe1\xa1\x24\xa6\xef\x7b\x0e\xb3\x73\x79\xb6\x41\x29\xf0\x9e\x65\xb5\x14\x3b\x71\xef\xc9\xa8\x11\x32\xcd\x03\x17\x86\xb7\x1b\x48\x8a\x9c\xf8\x8e\xdc\x1a\x5c\xd9\xf2\x15\x87\xff\x22\x82\x5a\x39\xde\x43\x53\x90\xd5\x44\x4a\x9f\x70\x65\x72\xc1\x7f\x9c\xf3\xf0\xd6\xe2\xfd\x6b\xec\x3e\x64\xc7\xd4\x24\x05\x34\x90\x79\xa7\x1c\xf4\xe2\xd7\xfc\x9a\x59\xd6\xbf\x04\xe4\xb4\xd0\x7f\x2a\x02\xd8\x53\x41\xd6\xec\x83\x39\x74\xce\xdb\x78\x81\x6e\x77\x69\x60\x67\x2d\xbb\x2a\x67\xcb\xfc\xf7\x7c\x29\x8d\xcb\xef\x66\x3b\x5d\x68\x6d\xe6\xe3\xa0\x14\xfb\x1f\xdc\xdf\xbd\x64\xad\x85\xa3\xd6\x04\x06\xd6\xd9\xac\xa5\x3f\x92\xaf\x18\xe1\x54\x54\xe5\x4f\x78\x57\x6f\xbd\xdd\x95\xe1\x72\x30\x5d\x5a\x65\x46\xbe\x1d\xec\xcd\x60\x37\xe8\x0f\xb5\xef\x86\xec\xa7\x1b\xab\x27\x5b\x2e\xf0\x5e\x65\x1b\x86\x26\x2b\xf7\x5f\xe2\xbb\xbe\x23\x1a\x2f\x7c\x30\xef\xa9\x9e\x9b\xc2\xea\xe8\xef\x82\xc8\x9b\xca\x46\xec\x6d\xdb\xb2\x53\x36\xe8\x76\xdd\xf8\x07\xb8\x53\x78\x83\xdd\x12\xae\x5e\x9f\x9b\x42\xb6\xeb\x23\xa3\x6f\x88\x52\xe4\x1f\x64\xe7\x4e\x56\x0c\x12\x14\x08\x02\x12\x2f\xdb\xcc\x85\x02\xe4\xaa\xdb\x2e\x18\x3e\x1c\x05\x21\x37\xb8\xee\xe8\xb7\x9c\xc5\x89\x36\xde\x91\x09\x26\xfc\x0d\x1f\x4b\xf6\x69\x10\x85\x24\xe8\x16\x1d\x99\x8d\xc7\x79\x6f\x61\xb3\xfe\x57\x96\x35\x39\x1d\x99\xa2\x6d\x03\xe9\x41\x92\xb2\xe0\x0c\x89\x41\x38\x71\x1f\xf3\xa0\x4e\x0d\x1d\xe6\xae\xf2\x0e\xd4\xe2\x2a\x57\xa3\xb7\x5d\x45\x96\x02\xd0\x20\xb0\x0b\xb3\xca\x31\xd9\x46\x5a\x59\xae\xf2\xc7\xbb\xe0\x14\x1a\x1f\x5c\x51\xda\xd7\x4f\x77\x77\x15\x36\x7a\xaf\x55\x9f\xd9\x36\xb1\x31\x6d\x5f\x00\x21\xd8\x9b\x80\xd0\x87\xb8\xd3\x45\x44\xfb\xf2\xab\x96\x1d\x9d\x3c\x2a\x08\xe0\x9a\xbc\x0d\x7b\x81\x01\xc7\xce\xc3\xb4\xc0\x4f\xd4\x47\x79\x5e\x28\x43\x70\xc2\x78\x34\x72\x64\x8e\x85\x7e\x76\xb4\x8d\xa6\xa4\x47\x66\x71\x44\xd4\xca\x91\x85\x93\xf4\x1a\xe1\x78\x95\xd9\x5a\xc9\x43\x6c\xee\x2f\xab\x05\x28\x6e\xe8\x27\xca\xf7\x30\x88\x4e\x4c\x51\x77\xc6\xd6\xdd\xcc\xd1\x9e\x35\xd8\xc2\xcb\x8d\xb2\x82\xd6\xbb\xd9\x22\x1f\x30\xb3\x63\x44\xa3\x3d\xdb\xa8\xc8\xe2\x2b\x58\xbd\x57\xb2\x5a\xe3\x92\x65\xbb\xbb\x86\xca\xaf\x69\xff\x86\xfd\xd2\x4a\x65\x11\x2c\x99\xe2\x96\x79\xa0\x0a\x61\xe6\x4f\x9b\x26\x35\x64\x5a\x67\x26\x33\xd4\x2f\xf2\xf8\x2a\x01\x9a\xb3\xdc\xd3\x93\x84\x17\x0c\xff\x81\xa0\x60\x41\xf2\x8f\x57\x53\x90\x1d\xbc\x21\x23\xce\x1d\x6c\x9b\xee\xa2\x81\x3c\xd3\xcb\x2a\xc7\x26\x49\x05\x4a\x56\xba\x25\x2e\x80\x49\x36\xea\x85\x7a\x8c\xb3\x74\x55\x4a\x03\xbe\x03\xfb\xac\x2b\xbb\x20\xe4\x2b\xca\xe2\x3d\x43\xe2\x26\x4b\xf6\xd6\x6b\xca\xf0\x0d\xe0\x6d\xbb\x1e\xb6\xc9\xfa\x69\xe3\x65\x72\x46\x5e\x9b\x0d\xbc\x1b\xd5\xb8\xe4\x36\x8a\x57\x99\xa4\xc9\xb0\xdf\x4c\xef\x38\x29\x2c\x0e\x79\xb7\x4e\x79\x80\xb8\xc0\xaf\x52\xfb\x0e\x98\x2c\xf3\xbf\x1a\x2e\x44\x90\xb5\x5e\x6a\xb9\x90\x86\x7f\xda\xaa\x2a\xe5\xe3\xa1\x8b\xfe\xf0\xf6\x94\x3c\x9c\xf6\xdb\xc9\x03\x70\x86\x67\xd3\x03\x5e\x0c\x2f\x45\x05\xda\x67\x49\x4a\x29\x4c\x37\x61\x5f\x4c\xa0\x6c\x58\x94\x61\x29\x6f\x1a\x2a\xc1\x83\x98\x2a\x04\x38\x1a\xec\xd1\xac\xe1\x76\xca\x06\xd8\x47\xc8\xcc\xe9\xbe\x37\x8d\x50\x9e\x8d\xd1\xc4\x61\x8b\x63\x86\x24\x6c\xa7\x58\xa0\xd9\x29\x49\x6b\x80\x8d\x1a\x31\xae\xa3\x69\x4f\x61\x2f\x1f\xdd\x17\xf8\xe9\xf8\xa7\xcb\x6a\xe2\xa2\x27\xa9\x3d\xbb\x0d\xd7\x2d\xf2\x28\xed\xdf\xe6\x23\xeb\xbf\xe4\x17\x5f\x11\x46\xa5\x14\x3e\x64\xcd\xf0\x47\xfa\xfa\xb6\xf2\x48\xec\x4a\xa2\x51\xd3\x22\xfe\x61\x3d\xcf\xc3\xdc\x3a\x36\x4f\x72\xae\x50\x6c\xaa\x7b\x79\xc0\xa3\xac\xe3\x93\x57\x28\xeb\x38\x1c\xd3\x50\xe9\x24\xff\x9f\xb4\x09\xf6\x7d\xc7\xec\x84\x71\xcf\x28\x12\x70\x41\xd4\xb7\xf2\x3c\x45\x3a\xd4\x5f\x6e\xf2\x53\xb0\x79\xc3\xcf\x80\x2d\x36\x57\xfd\x42\xc7\xcc\x42\xf4\x60\xff\x48\xdf\xb9\x23\x7b\xcd\xdb\x2f\x58\xfc\x95\x73\xb1\x17\x85\x82\xec\x60\xe5\x7a\xb3\x25\xab\x26\xd8\x49\x86\xd8\x73\x6d\x62\xc5\xdd\x36\x1e\xab\x9a\x34\x62\xe7\x58\x01\xdd\xcf\x4e\x22\xdf\x69\x6f\x61\x27\x3b\xbf\x1e\xf5\x64\xce\x7f\xbc\xa1\xce\x63\xf2\x2e\x25\xbc\x05\xe4\x21\xb5\xdf\x21\xbd\xc0\xb8\x3e\xe9\x17\x14\x9e\xf5\x57\xfc\x71\x9e\xb4\xf4\x81\x20\x82\xa7\xc3\x66\x0b\xc0\x29\x6d\x85\x5d\xed\xbc\x17\xf6\x16\x6a\x46\xcc\x91\xbc\x09\xaf\x5a\x12\x26\xe0\xc4\x40\x26\x3f\x69\x3e\xdb\x3c\x7c\xb8\x97\x43\x53\x87\x56\x44\xb9\x46\x12\xe3\x75\x25\xdd\xb4\x7f\x69\xba\x44\xea\x29\x0c\x19\xe6\xc8\x40\xcc\x9a\xe9\xdc\x54\xea\x92\xd9\xbd\x9a\x55\x9a\xc2\xa1\xa3\x60\x33\x2a\x43\xab\xe5\x83\x14\x42\x88\x37\xe3\xde\x1e\x6a\x06\xa9\x47\xe3\x7d\xd5\x14\xfd\x20\x3c\xe4\x60\x6f\xe7\xf8\x65\xa6\x41\x83\xa1\x4e\xf8\xd6\x6f\xa9\xb9\xe1\xbb\x19\xbe\xc6\x88\x02\x64\x57\x67\xb1\x39\x26\x5c\xa4\x10\xe1\x43\xcd\x85\x9b\x5a\xbf\xce\xc4\xc7\x00\xb4\xd3\x80\x09\x6f\xe8\x1a\xd5\x55\x83\x41\xaf\x99\x5b\x88\x64\x95\x35\x2b\x44\x1c\xbb\x7a\x41\xde\x53\x8a\x07\xc4\x93\x3d\xfb\xaa\x75\x97\xc2\x1d\x66\x4b\x5f\x1d\x3f\xa1\xb8\xf8\xe8\x2d\x7a\x3a\xff\xdc\x46\x3e\xb0\x2b\xe9\x3f\x0d\xf2\x9b\x5d\x29\x5b\xd4\xb5\x65\x88\x40\x61\xb4\xa7\x66\x3e\x3e\x9c\xc0\x12\xba\x82\x72\xad\x3a\xa3\x75\x15\xdb\x03\x0c\x65\x5a\x85\x49\xfe\x8c\x7e\xd6\xbc\xeb\xe5\x1c\x10\xa4\xcc\x44\xed\x89\x2d\xfd\xe5\xcc\x00\x80\x49\xbe\x61\x36\x21\x77\xfa\xaf\x23\xb5\x37\x89\x4e\x28\x3e\x1c\xb2\x22\x01\x40\xa6\xf0\x56\xd1\x27\x2c\xd8\xd7\xaa\xa0\xca\xea\x9a\x63\x6f\x69\xd5\xac\x6c\x04\xb4\xc5\xd8\x78\x62\x02\xbf\xb7\xa8\xca\xdb\x9b\x42\x85\xf6\x51\x36\x75\xaa\x0f\x15\xd6\x84\x68\x84\x59\x7d\xcc\xdb\xc6\xe9\xee\x93\x47\x07\x58\xb5\x7b\xb3\xfd\xb9\x06\x40\xb6\xe6\x12\xdd\xcf\x96\x06\x6a\x83\x91\xa6\x1e\x6b\x5b\x93\x7c\xab\xc0\xae\xd8\xaf\xc5\xd5\x45\xa5\x09\xd1\x64\x95\x37\x55\x0d\x31\x09\x6c\x7c\x9a\x39\xd5\x2f\xd4\x13\x7c\x4a\x1e\xa9\xa7\xc0\x72\x50\x83\x66\xb0\x26\xe7\x17\x6b\x44\xb8\x7d\x75\x7b\xc8\x9b\x4f\x92\x27\xd6\xb3\x8a\xc7\x68\xc3\xdc\xe2\xb6\xd6\xd4\x17\xde\xbc\x7c\xd4\xa9\xaa\x07\xf3\x60\x97\xba\x7f\x23\xbf\x33\x4f\xb4\x9f\x52\x8e\x01\xc2\x3a\xdd\xfc\xea\x94\x19\xd4\xd9\xec\x2a\x97\x33\xbd\xf5\xfc\x33\x31\xbf\xca\x06\xf7\x42\xfd\xc9\x7d\xcb\x30\x78\x29\x8e\xc3\x2e\xc5\x67\x83\xa5\x44\x43\x0e\x01\xcb\xd2\xb7\x64\xed\x6c\xd8\x4a\x63\x13\x0d\xa5\x73\x14\x8f\xcb\x1f\x83\x36\xdb\xe3\xc0\xeb\x56\x1f\x8c\xff\xc3\x66\x8b\x32\xd1\x6a\x4a\x89\x09\xa1\x6a\x5a\x12\x3b\x50\x4b\xda\xce\x16\x49\x6a\xe7\x9e\x4b\x43\xfe\xca\x64\x13\x28\x5e\xe9\x6f\x5e\xd5\x9e\x92\xe7\x31\x66\x4e\xe9\x4a\xe4\xce\xca\x04\xcf\x4d\x41\x06\x91\x6c\xd0\x17\x4f\xbd\x9d\xa1\x53\xf3\x6f\x17\xdc\x94\x70\xe6\x9f\xed\x67\xe2\x0a\x09\x5b\x3b\x62\xad\x53\xfd\x70\xfa\x9c\xe0\x9e\x2d\xa3\x29\x02\x1a\x90\xa2\x94\xb0\x9e\x8a\xec\xcf\x8f\x7c\x97\xca\x19\xca\x94\xd2\x1a\x07\xa0\x23\xb5\x74\xcd\x72\xce\xf2\x0b\x98\xca\x11\x39\xdf\xce\x53\xd2\x40\x5c\x5b\x6c\x7c\x5b\xcf\xa9\x88\x6c\x3c\xd9\xf4\x39\xfd\xda\x5d\xd1\x03\x5b\x7d\x56\x86\x04\x05\x41\x3a\xa2\x01\xc1\x9e\xea\xe1\x4a\x14\x36\x8c\x64\xd1\xc3\x61\x68\x52\x00\xd8\xb0\x57\x66\xeb\xb5\xa2\x0e\x64\x18\xc4\x3c\xd1\x25\xac\xab\xd8\x21\xd3\x98\xa5\xe8\xe4\xa2\xa4\xd0\x2f\xa3\x73\x16\x6e\xdd\x68\x88\xa9\x89\x16\x6e\xcb\x53\x8e\x46\x86\xdb\x59\xb1\x05\xf6\x55\x73\xeb\x03\xe3\x04\x86\x35\x58\x69\x16\x91\xa4\xe8\x8a\x65\x8d\x28\x84\x81\x0b\x86\x5a\x5d\x67\xf8\xd9\x80\x6f\x36\xab\x47\x36\xd8\x94\xfc\xd1\x50\x8e\x58\x2d\x4d\xd6\x87\x8c\x47\xa5\x89\x93\x2c\xaf\xc4\x6a\x43\x96\x69\x68\x63\xd6\x2b\xde\xe0\x2f\x9c\xcd\xdb\x6c\x0f\x76\xd1\x28\x7d\x09\xd3\x29\x68\x3c\x16\x45\x3c\x9c\xbc\x45\x40\x07\xfc\xb0\x94\xd1\x00\x33\x15\x45\x9c\x16\xce\xe7\x9e\x70\x2a\x60\xc0\xc6\x1d\x0f\x91\x2d\x7a\x4a\x93\x72\xa6\x3f\xe1\xc4\xa3\x4d\x31\x91\xe8\x70\xd3\x26\x2b\x12\x67\x50\x45\xdb\x66\xee\x66\x4f\x7f\x2c\x83\x7a\x61\xab\x9d\x6c\xdc\x4f\xd8\xd0\x0b\x2a\x48\x1b\xd5\xe7\x1f\x95\x70\x3b\xce\x27\x21\xf6\x71\x6e\x4c\x65\xd8\x64\x85\x36\xa6\xb6\xe6\x98\x6e\x40\x47\x84\x3a\x19\x59\xb4\x61\x89\x88\x8e\x1c\x41\x0b\x2b\x78\x5a\xb3\xcb\xbb\xec\x60\x2d\xb3\x77\x75\xba\x8c\x41\x72\x17\x14\x5c\x33\x87\x23\xb1\xc6\x67\xc4\x6b\xa2\xf7\x30\xf3\x28\x42\xc8\xb3\xf0\x3c\xb7\x23\x70\x54\x1a\x50\x15\x7e\xd4\xe7\xea\xe7\xfc\x53\x88\x16\x2c\xb4\x6e\x76\xb4\x6c\xea\xf5\x7b\xb8\xcd\x71\x97\xac\x9d\x83\x9e\x45\x30\x45\x32\xda\x05\x3f\xc3\xca\xed\x99\x2b\x17\x8b\x67\xa4\x08\x3e\x33\x12\x23\xbd\x8a\x78\xe6\x34\x4a\x30\x93\x42\x15\x53\xe1\xef\x1a\xa5\xad\x6d\xfe\x5a\xb0\xc9\x33\xc2\x25\x9f\xda\x2b\xd9\xae\xfa\x25\xa1\x8c\x44\x6f\x4f\x59\x91\x64\xed\x9e\x7d\x65\xa3\xf3\x95\xdc\x76\xe0\x4d\xe8\xc7\x43\x18\x5b\x06\xb9\x96\x83\xe9\x64\x15\x65\x4b\x5e\x24\x56\xa8\xb7\x93\xff\x8d\x47\x70\x1b\x73\x5d\xc2\xb9\xc1\x76\x0a\x81\xc9\x83\x08\x66\x43\x3d\x63\xa0\xc6\x3a\xe5\x34\x3e\xd8\x56\x2f\xf3\x87\xaa\xa9\x62\x08\x4f\xd0\xa1\x86\x1b\xe6\xa0\x36\xf5\x50\xaa\x42\x2d\xce\xc0\x6b\xd1\x30\x9b\x0a\x1b\x66\x5b\x6c\xf1\x07\xba\x11\xfb\x48\xed\xe4\x61\x3e\xd0\xa2\xf0\xf6\x96\x95\x70\x6c\x94\x65\x66\x4e\xcc\x60\x7a\xb6\xf6\xd8\x51\xa4\x87\xb9\x40\xd3\xe5\xf2\x0b\x08\x8d\xb1\x16\x39\xfa\xc0\x68\x85\x85\xe6\xe6\x9c\x23\x3c\x76\x2d\xee\xcf\xc5\x3b\x18\x37\xcc\x8e\x9a\xad\x68\x61\xaa\x44\x7b\xae\x5e\x96\xe3\xf6\xd7\x0f\xfc\x1f\x32\x1d\xcb\xe1\xdb\xb5\x2b\x9c\x36\xb1\x69\x69\xf3\x1b\x8f\xe7\xc8\x6f\x87\x3b\x02\xbe\x7c\x62\xc6\xc0\xa0\x68\x12\x25\x99\xf7\x27\x59\x45\x1e\x2a\xab\x2e\x99\xfd\x46\xb2\x8f\x3a\x15\x03\xde\x23\x51\x58\x1d\x33\x25\xd4\xda\xd7\x2b\x1d\x0e\xf8\xc3\x6c\xb5\x15\x39\xb6\xb9\xe7\x16\x3b\x46\x85\xe2\x99\x12\x41\xc0\x9e\x8c\x41\xf1\xba\x0d\xc3\x7b\xeb\x64\x9b\x2c\xfe\x09\x4e\x9e\xa4\x76\xd9\x76\x90\xb1\x64\xc3\x4c\xcb\xb0\xb9\xaa\xeb\x05\xa2\xb0\xbf\x77\x6f\xbd\x3f\x42\xd8\xc3\xb7\x30\x95\xc4\xe6\xda\xec\x34\x45\x34\x47\xe4\x2b\x36\xa5\x17\x7b\x41\x7d\xba\xd5\xcd\xba\xd3\xfa\x8c\x4c\x8c\xeb\xd7\x64\xee\x9d\xf8\xad\x2b\xb4\xf7\x70\x51\x35\xce\xec\x97\x4e\xea\x9c\xad\x52\xfc\xc0\x41\x34\xee\xd7\x46\xc4\xb6\xea\x6a\x82\x85\x1d\x57\x18\x8f\x81\xe5\x42\x0a\x5a\xf7\x61\xd4\xeb\x87\x8d\x9a\x7a\xb7\x3e\x18\x55\x30\xf0\x38\xd8\xd5\x94\xf3\x00\x06\x79\x6e\xb5\xce\x89\xcd\xbd\x6c\x77\x54\x36\x55\x6a\x84\xab\xd9\xa8\xf0\x59\x72\x22\xed\xe8\x98\x16\x4d\xb3\x68\x6e\xd5\xfc\xff\x8b\xbc\xf5\x3f\xe6\x3b\xfa\x64\x8a\x99\xb0\x3d\x69\x4b\xc8\x98\xe1\x55\xe2\x50\x0f\xfa\x64\x6b\xeb\x8e\x45\x2f\x2c\xfa\xd8\x36\xd4\x01\xe8\x6a\xf0\x81\xba\xf2\x65\xcf\xab\xc4\xcc\xd6\x45\x08\x75\x67\x13\x6a\x24\xcc\xfe\x10\x10\xc8\x7d\x66\x1e\xc3\x55\xeb\x97\x75\xdf\x49\xaf\x0f\xa7\x2d\x7a\x89\xb5\x4e\x1a\xfe\xa1\x37\xb4\x3c\xf8\x42\x1d\xa9\x33\x52\x33\xf6\xa4\xce\x54\x17\xf6\xb3\x1d\xbe\xeb\x52\x9e\x1c\x22\x57\xff\x6a\x21\x1a\x76\x11\xd6\x96\x82\x3b\xec\xe1\xa4\xdd\x65\x9f\x82\xfe\xed\x64\xe5\x9f\x10\x28\x3c\xbb\xe0\xff\x46\x3d\x0f\x12\x25\x69\xa3\x75\x7d\xd4\xc7\x8a\xe4\x5f\x60\xd1\x6b\x69\x2e\x5b\x77\xe3\xf7\xc1\x86\x4b\xee\xa6\x1d\x38\x6f\x52\x0d\x59\x15\xf0\x95\x5f\xd7\xc6\x97\xfa\xb9\x85\x74\xb6\x99\x08\xbd\xd1\xba\x86\x49\x97\xc2\xcb\xba\x34\xb0\xb9\xd3\x14\xbe\x20\x15\x9f\x3c\xe6\x5f\xd6\xed\x7f\xea\x4a\xfd\x97\x57\x2a\x54\xdc\x24\x90\x92\xa2\x78\x22\x52\xfa\x71\x3c\xe6\x50\xc6\xd9\x8e\x58\x5d\xb7\xb4\x2f\xc4\x95\xd1\x32\x7b\xc5\x6d\xc3\x99\x65\xcb\x09\xcb\x00\x3f\x34\xef\x05\xd8\x23\xc5\x68\x53\x5c\xc8\xea\xaa\x57\x7f\x77\x6b\x96\x29\x05\xf3\x40\x38\x71\xa4\xf1\x54\x30\x08\xcd\x90\x93\xad\x7c\x89\x86\x32\xee\x6b\xa2\x0f\x61\x27\xea\xcb\x8f\x83\x7e\x81\x84\x6a\x96\xbf\xef\xf7\xb3\xfb\xff\xfa\x56\xce\xf2\xdf\x59\x06\x5c\x37\xb5\x9b\x5f\x02\x17\x5f\xf8\x91\x29\xc8\x17\xeb\x0c\xcf\xd3\x4f\xd1\x8a\xfe\xb1\x89\x58\x5f\xfa\x07\x5a\x34\x57\x09\xd9\x22\x68\xab\x96\xf7\x92\xca\xb4\xc2\xb6\xca\x87\x79\x65\x58\xf0\x1b\xc5\x3c\x8f\xd1\xe4\xda\xbf\x31\x58\x34\x68\x6c\xa3\x37\xd0\x53\xde\x06\xc2\x97\x63\x30\x6f\x70\xd1\xbc\x4f\x06\xc5\x91\x4f\x4b\x6f\x29\xb8\xdf\x3c\x27\xdf\x79\xee\x14\xdb\x2b\x1b\x28\xd7\x17\x71\x94\xde\xa7\x7d\x7a\xb6\x1c\xde\x52\x01\xbd\x71\xfd\xee\x11\x31\xc2\xe9\xfa\x9d\xb8\x0b\x7a\xd3\xa1\x94\xef\x82\x94\x5b\x98\x67\xa9\x2f\x66\xf3\xae\x3c\xe4\x5e\x25\xec\xb1\x61\x93\xc5\xfc\xdd\x59\xd1\x24\xab\x57\xa1\x2c\x88\xac\x9e\xfa\x9c\xf2\xc6\xc2\x4c\xfd\xb2\xf7\x1e\x42\x25\xd1\x22\x5e\x10\x3f\xb1\x79\x55\xaf\xd6\xdc\xb2\x33\x70\xfd\xa7\xed\x15\xfa\xc5\x97\x4d\xba\x73\xeb\x70\xf1\x18\x61\x9c\x2f\x21\x93\xf5\xfb\xf5\xef\x03\x1b\x0b\xff\x3d\x42\x4b\x59\xa3\x13\x6e\x5b\x5a\x52\xf6\x6e\x25\x0e\xde\x1e\x28\x43\xf1\x52\x73\x89\xbd\x91\x7f\xc0\xc7\x17\xcd\xba\x77\x21\xb6\x72\xcf\xfc\x09\xec\xe8\x93\xd3\x1d\x93\xd3\x3d\xcd\x08\x65\xf1\x3d\xfd\x8d\x70\x3e\x99\x03\xde\x61\xba\x7a\x2b\xb3\x37\x6c\xe1\xd0\x72\xef\xbf\x1f\xef\x53\xbc\x21\x32\x50\x6a\x6b\xe5\xb7\x89\xdd\x77\x6b\x41\x81\x73\xf4\x60\x83\x6a\x4f\xe1\x83\x55\x23\x0d\xd7\x8a\xf6\x2d\x9e\x84\x98\xa2\x48\xbb\xcd\xf4\x39\xb3\xc1\xdf\xb5\x24\x29\x85\x22\x1f\x24\x4b\xc2\x94\xb5\x41\xb5\x51\xb3\x7c\xb2\xe1\xd2\xdc\xb1\xf6\x60\xed\x60\x2c\x3a\xcd\x75\x2a\xfa\x16\x09\xb9\x7e\xc3\x91\x95\x74\x5b\xac\xc4\x27\x0e\xeb\x4d\x27\xbd\xe2\x2e\xdf\xdd\xab\xb9\x4c\x5c\x00\x4b\x9a\x60\x28\x52\xd8\xd1\x79\x8a\xde\x12\xdf\x31\xbc\x7e\x67\x0e\xc2\xf3\x6a\x68\xc7\x3d\x46\xc3\xfc\x3f\x36\x9b\xec\x7b\x08\xdd\x10\x85\xf2\xa8\x6f\x4b\x28\x89\x86\xa1\xfe\x7f\x54\x5d\xe9\x92\xb3\x3c\xaf\xbc\x97\xf9\x7d\x6e\xca\x80\x03\x0c\x8b\x79\x01\x27\x4f\x72\xf5\x47\xad\x6e\x39\xf3\x55\x4d\x15\x0a\x01\x86\xb0\x78\x91\x7a\x91\x0c\xd5\x9b\x47\x47\xca\x64\x55\xa4\x9e\xd3\xa2\x45\x15\xa7\x80\x53\x41\xd9\x4a\xd5\xa6\xac\x46\xfd\xca\x6d\x40\x7e\x61\x18\xc5\x32\x5f\x3f\xa1\xea\x7c\xc5\x1d\x00\xa8\xe5\xbc\x3f\xdf\x8f\x9c\xcf\x51\xd5\x8a\x41\xb8\x96\x40\xdd\x4a\x60\x64\xaf\x48\xdd\x31\x6f\x47\x4d\x6a\x0e\x05\x33\x4a\x5d\x29\x4c\xb1\x27\x49\x3d\xd6\x6d\x3f\x2b\x91\x38\xc8\x33\x1f\x2c\x41\x61\xf4\xd7\xd4\xae\x46\x45\x83\x0d\x95\x48\x32\x72\x1f\x5f\x46\x1b\x0b\x54\x84\x1c\xda\xe8\xfd\xd3\x5c\x44\xde\x89\xae\x1d\x5e\xc6\xf2\xe5\x3c\xb0\x66\xc5\xe4\xde\x95\x12\x39\x44\xfb\x4c\xae\x17\x26\xa7\xdf\xf2\x13\x2d\x44\xb2\xc7\x77\xdd\x77\xd1\x85\x64\x06\xbc\xed\xf2\xfa\x9d\xc2\x3f\x58\xfa\x57\xfd\x22\x6a\x34\xa0\x3d\x1b\xeb\xce\xe7\x2c\x4a\x1b\xca\x4d\x64\x0f\x39\x8c\xe5\xc1\x32\xd2\xbf\x93\xa3\xc3\x53\x0a\xd0\x67\x56\xa1\x2c\x53\x2d\x06\x2c\xc3\x07\x4b\x45\x48\x28\x90\x55\x94\x25\x6d\x6a\x01\xa6\xe7\x1e\x85\xfd\xc7\x2b\x7c\x45\x40\x12\xee\xcf\xd4\x6c\x44\xc8\x79\x82\x3b\xb6\x2f\xf7\xc1\xaf\x27\x34\xaf\xe6\x6f\x4d\x8a\x6e\xc6\xc9\x1e\xc4\x39\x02\x5f\xca\xcd\xef\xd4\x2c\xcd\x9a\x1d\x66\xf5\x30\xc7\x13\x73\xeb\x3f\x38\x39\x06\xa1\xe9\xa8\xbf\x85\x0a\x59\x20\x58\xa8\x98\x73\xba\x3a\x2e\x23\x9b\x35\xa9\x56\x35\x4a\x21\xab\x91\xa1\x0e\xbb\x16\x2c\x4a\x9d\xb7\x26\xfe\xd0\xc1\xe2\xaa\x03\x6a\x5c\x8c\x58\xd9\xb1\xe5\xaf\x1b\xd2\x1e\x65\x3b\xbc\x8a\x72\xac\x75\x50\x89\x2a\xaa\x56\x36\xcf\xaf\xd4\xad\x9a\x3f\x5a\x13\xe6\x67\x47\x60\x7d\x51\xa7\x3a\x25\x84\x35\x89\x61\xa4\xc4\x8d\x4d\xc6\x2e\x71\x26\x8f\x89\x3e\x47\xc8\xe4\xb3\x67\x3e\xb2\x9c\x54\xe0\xad\x28\x7c\x1e\xe5\xb1\x58\x8c\x3a\x1f\x51\x38\xca\x2e\xe6\xe5\xd1\x3e\x15\x8d\xc4\xed\x2d\x95\xa2\xd6\xba\x86\xc1\xc8\x93\x82\x06\x30\x4b\x66\x19\x0a\xcc\xcb\x59\xb6\xc9\xa7\x32\x33\xc7\x17\x3f\x07\x66\xd1\x2f\xcb\x50\xed\x5f\x84\x0d\xfa\x91\x7e\xfd\x60\xe5\x55\x2f\x74\x6a\x61\xb8\x80\xdc\x1c\x2b\x4c\x8e\xf6\x42\x10\xe8\x55\x97\x27\xd6\x64\xb6\x64\xcd\x42\xfc\xc3\xf0\x92\x74\x57\x0f\x31\x08\x92\x8b\x00\xa7\x8a\x6b\xb8\x7f\x05\xe8\xf6\xba\x1e\xae\xa5\x85\x51\xa4\xdf\x62\x2f\x55\xf9\x6b\xb7\x97\xb3\xa9\x75\x0d\xde\xaa\xed\x24\x75\x51\x9e\xeb\x27\xc4\xb9\x7c\x0b\x37\x2c\xf5\x40\x6e\x28\x98\x18\xb3\xf6\x85\x49\x91\x97\xb2\x00\xca\x20\xf3\x6a\x77\x1b\xe4\x81\xc9\xf6\xad\x5e\x3e\xd1\xda\x50\x41\xe5\x32\x95\x85\xd1\xc6\xfc\xed\x56\x6d\x3c\x29\x0e\xd2\x7e\xff\x2a\x94\x08\xb6\xdb\xa4\xb0\xcc\x60\x11\x6b\x60\x60\xa6\xa9\x78\x65\x03\xfa\x21\x97\x3e\xd6\xa3\x10\x34\x9e\x51\xfd\x4a\xfb\xfb\x1b\x0e\x3a\xc8\xcc\x89\xed\x46\xfc\xd1\xe6\x15\x56\x16\xb3\x6e\x39\xa3\x68\x14\x68\xc1\xaf\x44\x97\xa1\x01\x56\xe4\xa1\xee\x7c\xa5\xaf\x4d\x33\x6a\xe9\x3c\xd1\x2c\x6a\xc2\xd6\xc4\x3c\x36\x38\x25\xb0\x52\x85\x7c\x0f\x7f\x7f\x5e\x47\x3c\x2a\x52\x04\x6b\xbc\x18\x54\xb5\x5a\xdc\x3b\x1a\x8f\xa5\x28\x7b\xfe\x55\x7c\x82\x4a\x8f\xea\x48\x76\x63\x54\x82\xeb\x47\x78\x74\xa8\x94\xd2\x8f\x73\xd7\x85\x64\x98\xb5\xd6\xb2\x52\x41\x82\x44\x3b\xda\xb4\xfa\x8c\xca\x56\x8f\x24\x67\x14\xbf\x80\x23\x53\x0d\xa7\x87\xb7\x63\xf3\x50\x01\x4c\x53\x3c\xa4\xb7\x0a\x36\xff\x82\xb2\xf4\x8a\x42\x93\xfd\x03\x55\xbd\x5c\xcf\x3a\x56\x5b\xac\x62\xcf\x7d\x55\xf9\x34\x6d\xe9\x62\xbd\xd6\xcd\xa0\xb9\x24\x31\x03\x81\xf8\x51\x71\x8e\x98\xde\x68\x9b\x7e\x66\x07\xcb\xd0\x83\xbd\x95\x3a\xad\x9b\x7a\x87\x63\x74\x78\xb3\xec\x32\x5e\xf1\xec\xb0\x6b\x39\xfe\xb0\xec\x94\x54\x0a\x5b\x51\x9c\x66\x24\x38\x38\x6a\x51\x34\xe6\xd8\x52\x0f\x35\x38\x86\xcc\x4c\xac\xe1\xc2\xbb\x72\x32\xbe\x56\xb4\x26\x3e\x38\x5d\xab\xf2\x58\xa8\x36\x51\x11\x0c\x6d\xa4\x17\xa7\x00\xf2\x94\x42\x99\x3d\xf7\xd1\xfb\x43\xa4\x4c\xd6\x23\x45\xbe\xcd\x6b\xf9\x22\xa4\x31\x16\x3a\x09\xf1\x5b\x9b\xe2\x13\x74\x26\x58\x81\x77\x7a\xa1\xd7\x76\x9c\xcd\x75\xf0\xe5\x5e\x23\xab\xb8\xce\x9a\xc3\xc3\x6b\xe5\x12\xed\x2b\x77\x41\xbb\x0a\x55\x35\x14\xb1\x52\x44\x2a\x77\xe5\xa7\x9c\xb4\x91\xd8\xe2\x32\x8a\x8d\x2b\x11\x9d\xfe\xff\x9d\x49\x2d\xd2\xd7\xe5\xb0\x0f\x3b\x8b\xf0\xbd\x16\x45\x7c\x95\x00\xaf\xbd\x91\xb4\xde\x5a\xe9\x5d\x1b\x76\x2c\x4e\xfe\xe1\x86\x8f\x1c\xbf\x27\x4b\x6c\x77\xcd\x29\x4a\x68\xd1\x9a\x59\xb4\x85\x2d\xf6\x47\x95\xad\xe7\x5c\xa2\x92\xbf\xe2\x21\x54\x0d\x2d\xc4\xd8\x6c\x12\xc0\xc2\x98\x2e\x42\x0a\x77\x98\x10\x77\x5c\xc3\x76\x6a\xa5\x26\xb4\xb7\xbd\x78\x27\x58\x23\x93\x25\x30\xea\x66\x57\x69\x15\xb4\x1f\x71\xc7\xe2\xbb\x50\x75\x4b\x23\x9d\x71\xec\x21\xaa\xbb\x20\xaf\xb0\x89\x41\xeb\x73\x33\x96\x27\x4e\xea\x08\x22\x59\x53\x5a\x3c\x31\x0e\x85\x26\x5f\x64\xc1\x5e\x60\x18\xc3\x82\xd7\xe9\xbe\x17\x0b\xec\x85\xf9\xb9\xb0\x78\xf6\xad\xac\xe5\xca\x0e\x75\x29\x4f\x82\x32\x97\x72\x7c\xcb\x57\x45\xea\x8c\xa0\xa2\x91\x40\xb9\x48\x30\x64\x89\x14\x1b\x1c\x61\xd4\x13\x2d\xf2\xe6\x5b\xbc\x4e\xf1\xe3\x22\x6d\x7d\xb0\xc7\xce\x2b\xd4\x63\x16\xc7\x55\xb0\xaa\x26\xf9\xa4\x25\xcc\x7e\x97\x1c\xf5\xb6\xed\x58\x65\x14\x83\xbe\x29\x34\x18\xed\x8d\x6e\xac\xb6\x9c\x48\x30\x8b\x0d\x31\x55\x67\x21\x6e\x23\xba\xfe\xb7\xde\xac\x03\x39\xc3\xec\x24\x55\x2c\xb4\x5e\x7e\x53\xc3\xb8\xfd\x26\x9a\x2e\xcf\xd7\xe2\xad\x38\x50\x88\x27\xa1\xdd\xf3\x42\xf1\x5a\x1b\xc7\x7a\xe5\xe4\xdd\x48\x56\x6f\x56\xa5\xa7\x7a\x25\x39\x87\x07\xcd\x02\x23\xed\x18\x8a\x4e\x35\x46\x05\x93\x84\x32\x26\xcf\x7b\x90\x0b\xf6\x26\xc9\x0c\xf9\xd2\x39\xaa\x6c\x61\x51\x8b\x2a\x9b\x2f\xc4\x38\x44\x01\xed\xe2\xb2\xa9\xac\x95\xb5\x15\xef\xd6\x21\xb7\xb0\x93\x34\x5c\xbc\x23\x10\xe3\xf5\x13\x83\x34\x65\xf3\x74\x19\x44\x00\x5b\x55\xac\xfb\x3a\xc5\x64\xa1\x8e\x5c\xc2\x36\x28\x58\x79\x18\x8f\xe0\x9c\x59\xeb\x2e\x65\xe2\x49\x32\xe3\x28\x8d\xbd\xa8\x1c\x3f\x49\x49\x74\x6a\xa4\xc4\x09\x98\x68\x1a\xb9\x5c\x61\x49\x8a\x82\x98\x0d\x62\x1f\xaa\x84\x85\x4c\xdc\x79\x8a\x21\x76\x1e\x5a\x0e\x73\xe8\xc5\xad\x34\x28\x5f\x57\x49\xbd\x01\xf1\xa0\xba\x56\x98\x48\x4d\xae\x0a\x19\x94\xb6\x14\x6c\xba\x94\x0e\x9a\x36\x8d\x55\x86\x56\xa3\x9b\x07\x8b\x77\x55\x49\xa0\x1f\x6b\x64\xcc\xc6\x3a\x48\xac\xed\x94\x71\xf3\x78\x42\xbd\x47\x91\x7e\x33\xa8\x62\x45\x72\x70\x59\xcf\xed\x78\x46\xb1\xc9\xba\x7c\x2f\x60\x95\x53\x2c\xb1\x73\xe4\x80\x75\x2c\xe5\x0a\xe6\x18\xd1\x33\xa3\xa4\x56\x00\xa6\x69\x59\x37\x74\xcd\x90\x37\xf3\x33\x03\x6a\x4b\x1d\x88\xc5\xc3\x4b\xa6\x31\xe8\x39\xfd\x90\xf3\x6a\xf7\x43\x6f\xe1\x88\x5b\x4b\xa2\x98\x3a\x07\x97\x8b\x63\x95\x4b\x39\x77\x15\xc7\xa8\x25\x77\x4b\xb4\x66\x4c\x41\x6f\x01\x77\xac\x44\xa5\x2c\x82\xf0\x92\xb1\x57\x49\xa4\xcc\x31\xb2\xd1\x16\xb8\x90\xf6\x08\x86\x46\x94\xa1\xea\x1f\xd9\xfb\x50\xed\x94\x76\x8e\x2b\xc9\xb1\x86\x75\xfe\x29\xa5\xb1\x44\xa1\x72\x05\x2b\x69\x2f\x55\xd4\xaa\xda\x91\x07\xf0\x87\xe7\x38\xab\x72\x76\xaa\x24\x6d\xed\xe4\xa0\x60\xbe\xc8\x3d\xe3\x5c\xfe\x61\x43\x16\x96\xb3\xc0\xb4\x56\x8d\x13\x5e\x34\x28\x0d\xfb\xfa\xf9\x0a\xf9\xe1\x47\xe4\xa9\x1e\x73\xd8\xf0\xa2\xb0\xe6\x17\xec\x91\xd9\x79\x3e\x9c\x30\xa9\x3c\xcb\x03\x4f\x78\x50\xc1\x1f\x3e\x34\x61\x85\x0e\x30\x97\x29\xae\xa1\x97\xc8\x9c\x28\x76\x35\x73\xb9\x46\x24\x42\x81\x5a\xba\x39\x20\x68\xfa\x0d\xcc\x0b\x7f\x54\x5e\x06\x52\x0d\x72\x48\x45\x5a\x67\x18\xdc\x30\xa6\x8a\xb9\x5d\x22\xab\x22\x27\x9a\xb4\x0d\x95\xf2\x3c\x43\xdd\x91\x32\x24\xbd\x0c\x30\x45\x96\xb9\x64\x69\x3d\x54\xde\x01\x5b\xb2\xb2\x84\xe4\x3f\x8b\x64\xa0\xab\x88\x4e\xe6\x4d\x97\x8d\x7e\xd7\xc6\x17\x13\x72\x69\x28\x9b\x37\xa3\x43\x81\xa4\x57\xd1\x14\xd1\xcf\xc9\x57\xc3\x96\x9c\x3b\x07\xbb\x78\x98\xd7\xba\xae\x51\xf8\x8a\x4a\x97\x8f\x10\x3c\xba\x38\xa0\xb7\x60\x2f\x72\x7b\x85\xff\x4c\xbd\x59\xaa\x05\x87\xac\x4a\x82\xee\xec\xde\x9e\x76\xf1\x0f\x3b\xdb\x07\x6b\xff\x74\x0e\x39\xcc\xe9\x50\x2c\x0b\x26\xda\x2a\x99\xa8\x21\x10\x61\x43\xfe\xad\x32\x83\xf7\x9c\x26\xd7\x11\x93\x32\x40\x67\x88\x5f\xc9\xc9\x67\x80\x90\x07\xbf\xfa\xaa\xe2\xf5\x75\xe6\x77\x21\xeb\x62\x33\xe0\xeb\xad\x25\x37\x96\xbb\xea\x90\x5e\xbc\x0f\x78\x5a\x06\x71\xba\x86\xc6\x0c\x19\x42\x79\x78\x80\x06\x16\x03\x96\xf0\xfc\x94\x6d\x64\x21\x06\x55\x25\xc8\xce\x96\x60\x7c\x65\x86\x49\x7a\x28\xfd\x59\xe7\xe5\x9a\xe8\xc5\xe3\x55\xb8\xf0\x9c\x39\xa3\xc5\xe9\x31\x12\x78\x33\x78\x84\x7f\xce\x4c\x51\x91\xfe\xcc\xd6\xc7\x73\x96\xda\x17\x8d\xb3\x51\x88\xe3\xa4\xd2\x46\xb9\x36\xdf\x29\x32\xa7\x07\x1f\xc5\x9d\x01\xdc\xd1\x7e\xe7\xa6\xd1\x95\x48\x49\x4f\xc7\x2f\x5d\xd7\x3c\x72\xc8\xe8\xe8\x57\x11\x41\x7a\x52\xb1\x7a\xf8\x41\xb3\x32\x87\x4e\xcc\xbf\x41\xfd\xcd\x8f\x3f\x41\x96\x99\xc5\x39\x74\x32\x2c\x15\x42\x2b\x85\x92\x78\x13\x9b\x25\xd0\xdd\x7c\x09\xa3\x68\x52\xec\x6c\x30\xae\x72\x1b\xd2\x22\xfe\x0b\xf3\x10\x5c\xb2\x77\xbe\x23\x7c\x41\x9a\x51\x45\xba\x27\xac\x6c\x64\x4b\x6f\x5d\x01\x01\xb0\x20\xbc\xad\x73\x78\xe5\x9c\x7f\xab\x73\x5f\x42\xdf\xb7\xac\x62\xe1\x18\x72\x7d\xf8\x7f\x1f\x71\xda\x82\x8d\x97\xd6\x10\xc2\xf5\xe4\x8f\xbc\x7c\x6c\x1a\x91\x46\x9d\x11\x8d\x91\xc2\x97\xaa\x7b\x4f\xad\xa0\x17\x52\x7e\x6d\x49\x8f\x3b\xf8\xe9\xcc\x61\x94\xcf\xf1\xb2\x17\xf8\xa2\xfe\xd7\x26\x95\x80\x52\xc3\x82\x04\xe1\xf9\x6e\xb5\xba\xad\x15\xf0\xec\x14\xa8\x0c\x67\x61\x54\xef\xca\x15\x06\x86\x16\xeb\xd4\x41\x87\x53\x16\x17\x8a\x63\xd1\x95\xba\x8d\x7e\x44\xfb\xf5\x35\xcc\x91\x7f\x9e\x33\xe1\x32\x83\xf1\x8f\xb4\x5f\x14\xf8\x36\x11\x91\x3a\x38\x37\x35\x1b\x9e\x02\x99\x76\x0f\xae\x28\xfa\x9d\x55\xe5\xa7\xa6\x53\x23\x01\xa0\x8b\xe1\x18\x76\x3e\x45\xa6\xc5\x9d\x30\xbf\xce\x7e\xd3\x6f\x28\xaa\x6d\x58\x20\xf9\xbf\xf2\xf6\x7f\xa8\x6a\x57\xa3\xa0\xcd\x50\x90\xfd\x61\x59\x90\xdf\xc0\xad\x59\xba\x7e\x1f\x5e\xef\x4c\x51\x99\xae\x65\x8e\x9c\xa6\xc6\xaf\xc0\x43\x5c\xf5\x6d\xaf\xf3\x09\x9d\x2d\x0b\x9a\x7b\xcf\xba\x3b\x42\xb0\x73\xd0\x03\x2b\x82\x84\xb6\x41\x11\x90\xd5\x01\xd6\x0b\x95\xc4\xef\xb2\xa6\xc1\xac\xf7\x69\x1f\x8f\x19\x9c\x5f\xa3\x9d\x2e\xab\x6a\x68\xbd\x97\xa2\xfd\x29\x43\xfd\x0e\x25\x26\xd5\xf5\x00\x3b\x0c\x91\xc0\x60\xb8\x5d\x04\xcd\x27\xcc\xc8\xfd\x19\xb5\x48\xd2\x42\x4e\x73\xdb\xed\x76\xea\x26\xda\xb1\x59\x0e\x3c\xb7\xb0\x14\x77\xf3\x2f\xb6\xc3\xee\x58\x18\x08\x13\x7d\xf0\x4a\x9e\x2b\xe2\x7b\xef\x15\x80\x06\x2f\xee\x09\x77\xee\x56\x9a\x7e\xd4\x0d\x49\x3a\xb2\x56\x41\x8f\xe3\x41\x21\x81\x8c\xe1\x35\xeb\x7a\xf2\xf9\x59\xd9\x3f\x24\x70\xc4\x3c\x6d\x9b\x5c\xca\x17\x41\xf6\x04\xb7\x4c\x7a\xfa\x5e\xd7\x28\x75\x83\xe0\x00\x89\xa5\x8f\xcf\x8b\xed\xef\xa7\x62\x1c\x86\x60\x3e\x9b\xa4\xe0\xc8\x8b\xf8\x99\xdd\x34\xc3\xa5\x08\x91\xd8\xf7\x13\xfa\x10\xc7\xfe\xc9\xec\x22\x3f\xe9\x9d\x7a\x5f\x9e\x01\xc5\x26\x2b\x8e\x02\x3c\x28\xf5\x71\x96\xf5\x92\xe9\x3c\xac\x74\x82\xf2\xc6\x5f\x83\xa1\xf5\xc9\x1a\xe0\x3a\x90\xd0\xc6\xd9\x17\x96\xe2\xa1\x79\xb3\x44\x0d\xc1\x79\x97\x1d\x8a\x45\xb1\x2a\xa0\x6f\xa2\xbb\x79\x38\xcd\x41\x51\xa5\x02\x21\xc9\x75\x6e\xce\xe1\x49\xec\xd7\x14\xf3\xf7\x97\xad\x08\xbe\xdb\xfe\x91\xec\x20\xeb\x80\xae\x28\x28\xae\x1c\x35\x9d\x5f\x8d\x75\xf3\x92\x97\x95\x93\xd9\x54\xf2\x1b\x64\x3b\x53\xf6\x53\x43\x10\xba\xe2\x78\x35\xae\x50\x64\x08\x39\x4e\x1b\xe8\xb4\x12\xdf\x2a\xcb\x1b\x0b\xfb\x59\xe4\xb6\xbb\xaa\x5f\x50\xcc\xa8\x69\x0c\xae\xac\xc0\x58\x34\x15\xc9\x99\xbb\xd3\xcd\xa7\x85\x52\x57\x64\xcd\x8f\x26\x4b\xaa\xff\x45\x96\xe3\x09\x73\xc9\xbc\x44\xd4\x75\xda\x01\xa5\xbd\x9e\xd5\xc3\x55\xcc\x3a\x62\x90\xaa\xbd\x5c\x7e\xc3\xea\x71\x4d\x5e\xae\xaa\xdb\x63\x25\xa0\xf0\xbe\xea\x2f\x59\x6e\x6f\xd2\xa0\x6e\x78\x4b\x15\x06\x4d\xb7\xe7\xb6\xbe\xca\x5f\x2c\xe8\x13\x0a\x86\x7c\x17\x15\x10\x61\xa8\x3e\xee\xe2\xc5\x9d\xb1\x4c\x92\x28\x6c\x8d\xde\x5d\x36\x19\xee\x5c\xa1\xd3\x7a\xcf\xfb\xd2\xa2\x41\x41\xbf\xec\x85\x51\x27\xdc\x12\x22\xea\x18\xbb\x0f\x5e\x7b\x55\xd1\x45\x6e\x51\x9a\xbd\xa7\x50\xf1\xf5\x88\x23\x39\x7b\x08\x2a\x59\xa1\x37\xa6\xee\xfc\xc5\x79\x94\xcd\xcb\x0d\x8c\x99\x5a\xe2\x3b\x55\x65\x93\xee\x3f\x6e\x53\x77\x6a\xbc\x3b\x4f\xb2\xf5\x5c\xd9\x15\x56\xee\xaf\xf7\xa1\x7a\xd4\x5b\xff\xfb\x7a\x69\x90\x78\xd9\x93\x1f\x25\x0a\x1b\xf1\x0c\x51\xfa\x0a\x12\xd0\x55\x4f\xa7\xcc\x50\x02\xb1\x79\xe0\x54\xca\x82\x5d\x75\x1c\xb4\x42\x35\xbe\x2a\xeb\x5c\xf0\xe3\x5b\x09\xb4\x1c\x8a\x40\x95\x09\x4d\x28\x62\x45\xc9\xbc\x73\x09\x49\xed\x96\x42\xbc\xfa\xba\x09\xe5\xf5\xd3\xbc\xed\x26\x53\x52\x0f\x51\xcf\xe0\x11\x47\xb2\x2b\x25\x01\x3d\xb2\xfb\x82\xd0\x37\xad\xe1\x1e\xe4\xd4\x3e\x11\x08\xb3\x38\x58\xfc\xe4\x16\xa8\x2a\x9d\xae\xc1\xf6\x3a\xce\x34\xba\x5e\xc8\x75\xb0\x9d\xb9\x8e\xf9\xab\xba\xc8\x3a\xdb\x91\x50\x32\xe6\x19\x1c\xaa\x71\x5b\x80\x41\x80\x7f\x5f\x9a\xd3\x8f\x18\x5b\x2e\xc1\xc8\x13\xda\xf3\x3a\x9e\x52\x5c\xdc\xb3\xc6\x2a\xd7\x46\x08\xe4\xb5\x8a\xe8\x8c\x1c\xf1\x22\x6b\xa6\xf9\xf4\x6a\xb4\xbd\x18\x71\xfd\xec\x15\xb9\x55\xb3\xd0\xeb\x82\xc7\x81\x68\x07\x9b\x8c\xbc\x36\x51\x13\xa1\x14\xc3\x65\x38\x28\xb9\x22\x0c\x9e\xbb\x6b\x3c\x45\xdb\x7b\xd0\x07\xe2\xca\x87\x26\xfb\x57\x16\xf8\x1a\x81\x7e\x5c\x4e\x8d\xba\x67\xc3\x65\xf1\x31\xad\x49\x71\x2a\x21\x4b\xa9\x3f\xdf\x2a\xaa\x7e\x3c\x3e\x09\x00\xe8\x7a\x8c\x51\x0d\x2d\x50\xcb\x0f\x99\x46\xe4\x0e\xd4\xae\x82\xc8\xd7\xd4\x22\xfb\xf6\x62\xb8\x24\xe3\xac\xa3\x20\xb5\x32\x86\x52\x62\xd5\xab\x61\x91\xf8\x06\x57\x9b\x8b\x5f\x40\xea\xdb\x88\x5a\x64\x3e\xbc\xbc\x0f\x5e\x2a\x60\x7c\x19\x70\xde\x77\x25\xe5\x30\xaf\x44\x52\xc4\x95\xbe\x44\xc5\x44\x9d\xe6\xf3\xed\xed\x96\x35\x41\x89\x54\x3f\x4d\x74\xa1\x72\x20\x22\x1f\xe6\x83\x51\x05\xd5\x15\x38\x69\x2b\x75\x16\xa9\x97\x82\xdf\xd7\x58\x7f\x5c\x6e\x7a\xed\xce\xf2\x2b\x66\x5c\xe9\x27\x6d\xdc\x79\x69\xf0\x47\xba\x8b\x34\xed\x29\xcc\x1f\x41\x30\x81\x5c\xbe\x9d\x7e\x4a\x08\x34\xed\x05\x64\x83\x15\xd8\xe8\xbb\xce\xb9\x4b\x2c\xae\x3e\xc3\x3b\xc8\xcd\xe2\x7e\x58\x5d\x95\x9c\xc3\x99\xc3\x0f\x28\x13\xb5\x0a\x87\x50\x99\x01\xdd\x4c\xfe\x41\xbe\xe3\xc3\x7a\x6a\x28\x7a\x9d\xcd\x30\xe0\x4c\xa4\xa1\x9c\x32\x65\x12\xdb\xe6\x4c\x54\x93\x4c\xfd\xc2\x01\xf9\x29\xc7\xa8\x13\xe2\xe4\xe1\xa6\x94\x3a\x07\xe1\xfd\x57\x9d\xc6\xef\x35\x3d\xe0\x3f\xbc\xca\x57\xf7\x41\x8c\xbf\x5b\x37\xf7\x80\xf2\x35\xe1\xcd\x70\x16\xaa\x77\x33\x19\x1a\x51\xf2\x65\xe9\xf4\x3d\x30\x8b\xe9\x9a\x8c\x2a\x95\xa2\xb2\x9a\x6e\x99\x08\xe9\xd9\x3c\xe0\xc1\xc1\x52\x28\xa0\x91\x94\x56\x64\x75\x1a\x96\x42\x49\x93\x37\x6b\x9c\xe7\x10\x65\x94\x1f\xbb\x3d\x95\x7d\x39\x58\x5f\x45\x77\xc2\x28\x7e\x13\x2a\xac\x89\x81\x3c\x57\xa0\xdd\xc8\xfd\xfa\x9e\xf3\x05\x94\x53\x43\xb2\xf1\xce\x8d\x11\x48\x12\xc9\x01\x2f\x32\x7e\x79\x42\x7b\x49\xc5\xd4\x18\x0c\x1d\xe8\x1a\x28\x4e\x99\x99\x03\x80\xb6\x6e\x33\x55\xca\x5f\x41\x9c\xa3\xbd\x15\x87\x3b\x47\xb3\xda\xba\x6b\x00\x77\x04\x3a\xcd\x02\x32\x20\xd9\xa2\x1e\x61\x24\x77\xa4\x41\x8f\x3b\xdc\x0b\x59\x84\xd3\xd0\xa0\x9c\xf1\xf2\xba\xda\x80\x73\xfe\xd0\x3d\x73\x29\x26\x1f\xb7\x5c\x60\xde\x8e\x60\x4a\x3e\xfd\x0f\xda\x26\x96\x34\xe6\xf0\xf2\x69\x79\x7d\x75\x19\x7d\x08\xe1\xdc\x7a\x91\xff\x9c\x6a\xba\x97\x51\x36\x47\x50\x0b\xf1\xd2\xe8\xdc\xb3\x7d\xb3\xa0\x57\xd1\xf4\xeb\x92\xb8\x43\x1e\x23\x92\xb4\xf6\x61\xcc\xd1\x78\xef\x9e\xf9\x8f\xff\x97\x45\x48\x01\x41\xf0\xd2\x92\x0b\x55\x76\x33\x67\x65\x40\xb1\x52\xb2\xb0\xba\x0d\x1d\x23\x5d\x60\x8b\x84\xc5\xdd\xc2\xa1\xd4\x02\x89\x3c\xd6\xfd\xad\x35\xd0\x47\x22\x89\xf0\x8e\x42\xab\x3d\x4a\x62\x07\xa2\x99\x6b\xd4\x43\xd7\x2d\xc2\xe5\x5f\x45\x24\x04\x16\xc1\x03\x91\xf6\xfc\x31\x4e\x7b\xab\xcd\x6e\xf5\x6c\x85\xda\x14\xdc\x43\x25\x1a\xb7\xd2\xea\xd4\xdb\x9a\xfb\x0f\x8b\x7b\xf3\x9b\xfd\xc5\x36\xdf\x91\xe5\x6f\x9a\x67\x5b\x7b\x9c\x11\x91\xd8\x88\x02\x60\xe8\x48\x4e\x59\x0c\x5a\x67\x66\x6f\x2c\xe6\x92\x51\x97\x75\x4b\x95\xc7\x72\x79\x49\x11\x02\x33\x69\xf5\x5b\xfe\x52\xd3\x32\xec\x68\x83\x8f\x98\x59\xe2\xdb\x90\xf5\xe6\x21\x42\x6e\x65\x8b\x77\x75\xcb\x83\x4d\xfe\x9d\xac\x99\x99\x1e\x01\x23\x51\x1b\x61\xf0\x10\x25\x62\x88\x51\x8a\xc3\xb7\x85\x90\x8a\x85\x59\xd5\xdb\x2d\x11\x25\xb4\xd9\x23\x24\x41\xcb\xb1\xbc\x9a\xe0\xa4\x3d\x1d\x8a\xe1\xfc\x4c\x9e\x65\x0f\xde\xa9\x8a\xc6\x48\xf4\x30\x08\xb3\xc5\xad\xef\xc4\x45\x14\x58\x6d\x4b\x6f\xbb\x69\x64\xea\xbd\x73\x15\x01\xf0\xaa\x2c\xac\x5e\x95\x95\xd3\xf6\x00\xa5\xf3\x13\x5c\xd3\xe4\xc8\xe3\xd0\xac\x54\x41\x04\xf5\x59\x6b\xe5\x1e\x3f\xdf\x0a\x6d\x23\x10\xda\x60\x18\x73\x52\x56\x50\xad\x2d\x50\x51\x96\x86\x25\x9b\x63\xc2\x3c\x08\x33\x65\x2f\x9f\xa9\xd0\x0c\xfd\x68\x56\x66\xa5\x69\x8d\x80\x2b\x6c\xa2\x16\x46\x50\xf6\xe1\x7f\x08\x97\xa9\x1f\x4a\xe3\x21\x72\x36\x03\x22\x60\xd0\xf7\xca\xf3\x29\x1a\x5c\x79\x4a\x87\x6d\x75\x70\xc1\x8f\x97\x4e\x3f\x3f\xaa\xa0\xb2\x3a\x69\x4f\xd8\xb7\xee\xea\x68\xe5\xe1\xfe\x09\x3e\x60\x29\x54\xcb\xdc\x9a\xaf\x54\xee\x58\x05\xb5\xbb\x7a\xe7\x08\xa7\xaf\x79\xe9\xea\x8a\xeb\x08\xf2\xeb\x5b\x6b\x95\x08\xbb\x47\xbe\x14\xa3\x0d\x0a\x9a\x2a\xbf\x7e\xfd\x6c\x90\x58\x3b\xf9\xed\xac\x3a\x6f\x34\x07\x00\x3d\x84\xa3\x54\x58\x01\xad\x10\xd4\xad\x1e\xa4\x8b\x75\xd8\xd4\xb4\x35\x85\xd2\x5a\x61\x4c\xab\x92\xa8\xea\xbd\x09\x4e\x75\x51\x1a\xfd\x51\x61\x94\xe2\xa0\x10\x6f\x65\x89\xd7\x27\xf6\xb4\xec\x4a\x22\x14\xad\x5f\x47\xab\x72\xaa\x20\x3b\x9f\x97\x34\x37\x67\xa9\xbe\xac\x09\x25\x60\x0f\x6c\xe6\xa2\x62\x68\x9f\x3e\xdc\xbe\xab\x03\xff\x73\x47\x9c\x03\x5c\xb1\xf8\x4a\x42\x1c\xdb\xeb\x8e\x6f\x55\x47\xdf\xa2\xfe\xd5\x06\xc1\x45\xe8\x35\xc5\x3a\x49\x4b\x63\xf1\x8a\x83\x17\x31\x6b\x37\x13\xeb\xb9\x20\xeb\x14\x58\x47\x7c\x98\x88\xc1\x43\x3d\x55\x45\xd4\x5b\x5e\x59\xe7\x2e\xdd\xc8\xa5\x9c\x92\xf7\x2c\x92\xbe\x5b\x98\x0a\x5a\x58\x2d\x5b\x8a\x88\xab\xd0\x03\x65\xcd\xf5\x51\x32\xf7\x6c\x86\x38\x8b\x50\x03\x8b\x9c\x01\x97\xb5\x1c\x98\x0d\xa8\x54\xb8\x90\xfd\xf3\x13\x6c\x46\x0f\xf8\x92\xd9\x52\x7b\xb0\x13\x58\xe4\x70\xb1\x34\xf8\x23\x68\xc9\x11\x8c\x57\x8c\x17\xe1\xbd\x55\xb8\x76\x5d\x8f\xa4\xf2\x2e\xa0\x39\x7e\xf4\xf9\xb7\x95\x88\xc9\x95\x59\x20\xb0\x73\x53\xc1\x7a\x71\x6c\x7e\x14\x69\x81\xc4\xd3\x7f\xca\xee\xe1\xc1\x7e\xc2\x3e\x04\xc1\x02\xa6\x5d\x2a\x0c\x5b\xe3\xc0\xe5\x2a\x8a\xe5\xa5\x65\x92\xf0\xff\x22\x08\xf1\x92\xa2\xe8\x4b\x54\xf5\x6f\x25\x81\xee\xb7\xee\x10\xff\x67\x28\x83\x3c\xa8\x89\x2a\x60\x8e\xee\xb7\x78\x17\xf4\x3b\xbb\xea\x24\xcb\xc2\x02\xdb\x58\x60\xcf\x8d\xf3\x33\x71\xb6\x7c\xd9\x7e\x93\x77\x68\xbf\xc9\xdf\x73\x1b\x34\xf9\xa5\x85\x86\x32\x34\xa4\x70\x15\xa0\x1e\x2a\x20\xfa\xf4\x26\x53\x67\xaa\x42\xfb\x21\x33\xe5\x55\x4e\xd8\x7a\xed\x1e\x9c\xcf\xc4\x35\xe7\x75\xb7\x5a\xee\x41\x71\x3e\x0c\x21\x16\xb1\xfb\xca\x7a\xdd\xda\x65\xca\x7f\x2a\xc3\xf9\xa1\x4d\xe5\x1c\x3b\x05\x55\x71\x76\x02\xa3\x93\x10\xb3\xac\x18\x41\x90\x54\x05\xf7\x4b\x3d\xb4\x66\x21\x48\x91\xe7\xf0\xad\x08\xc7\x76\x9d\xf6\x5c\x1b\xdb\x72\x8d\x55\x73\x3b\xb9\x3c\xbb\xef\xc4\xa5\x58\xee\x84\x13\xb8\x66\x71\x44\x58\x1d\x91\x42\x99\x9e\xa7\x38\x9a\xa9\xd9\x77\xbd\xbf\x0e\x5d\xf5\xcf\xaf\x73\x56\xbd\x78\x8a\x54\x05\x71\x87\xaf\xfd\x6b\xe2\xb5\x2f\xda\x72\xd3\x2c\x13\xaa\xa1\xfb\xa5\x80\xa5\xd9\xa6\x35\x3d\xba\xa3\x67\x04\x5e\xfc\x3c\x83\xd5\x3b\x9e\xe2\x80\x8c\x85\x88\x8b\xb1\xe8\x22\xa0\x14\xab\x1a\x6d\x70\x3e\x47\x37\x66\x3e\xc8\x37\x5c\x81\x71\x62\xe9\x95\xff\x70\x0a\xd2\x9f\xab\x72\xaa\x8c\xfa\x07\xa2\x35\x52\x6f\x5a\xf1\x90\xb5\x6d\x48\x92\xa6\x7f\xf2\x19\xb7\x29\x6b\xf3\xdb\x1e\xad\xdf\x0d\x57\x69\x8b\xfb\xa9\x29\x7b\x8e\x33\x91\x85\x08\x7f\x68\xe1\xa5\x62\xee\xa0\x67\xf6\xf1\x96\xa9\x55\xb5\xc1\x35\xb9\x87\x95\x22\x5d\x8f\x1a\xfd\xd4\xe3\x9c\x3f\x11\xc1\x92\x4f\x51\x0e\xeb\x2d\xeb\x16\x16\x07\x5e\x9c\xfa\xc4\x14\xd5\xa3\xd4\x75\x54\xb5\xf5\xdc\x78\xcc\xa2\x59\xfb\x43\xcc\xef\x47\xb8\x74\x82\xe9\xa8\x61\xc0\x03\xfd\x04\xff\x9d\x7b\x7c\xfb\xd7\x33\x2d\xc0\xfc\xb8\xe0\x21\x78\xe7\xcc\x88\x75\x57\x3a\xcd\xf9\xb1\x81\x05\x92\xc7\x57\x9e\xa4\xf1\xf9\xf9\x88\x15\x69\xfd\x40\x63\x28\x3f\xa2\xce\xfd\x40\x59\xe6\x97\x41\xfc\x68\x0c\x40\xb8\xd1\xa9\x8e\xfe\x61\xf3\x85\x68\xcf\x1e\x89\x43\xb6\x66\x7a\x67\x43\xeb\xd6\x68\xe5\x3f\x8c\xae\xa8\x57\x66\x29\xfe\xe5\xf3\x1f\xb2\xd9\xbe\xd9\xee\x5d\x7f\xde\xc2\xe0\x6b\x6d\x75\x5f\x8c\x38\x83\xcc\xb8\x3e\x50\x16\x41\xb4\x4c\x44\x20\xa3\x60\xef\xcf\x43\x53\x5d\xcf\xae\x75\xbe\xd2\x4b\x31\x8f\xcc\xd7\x21\x71\x60\xbd\x1c\xc9\x9d\x6e\x23\xe6\xcb\x60\x4e\x92\xa0\x21\xf5\x5e\x7b\xdf\x82\x03\xf9\x62\xfd\x97\x93\x25\x5b\x9e\x59\x0e\xdd\x03\xe0\xd2\xa2\xcd\x0e\x55\xb3\x07\xaf\x1e\x13\x85\x3f\x54\x61\xfd\x87\xba\xe8\xdd\x1e\xea\x43\x95\xcd\xaa\xad\x75\x28\x7b\x7c\x58\xfa\x2c\x9f\xa4\x1a\x72\x00\xed\x86\xf9\xd6\xf4\x0b\x1c\x67\x19\x95\x6d\xf5\x23\x1d\xd1\x55\xe2\x27\xc3\x0c\xe7\x31\x0e\xc6\x87\xf9\x5b\x5b\x9d\x07\x15\x1a\x87\x29\x34\xba\x6c\x3c\x48\x80\xa3\xe7\x72\xf3\x9f\x92\xb2\x07\xd4\x7a\xb5\x65\x8a\xff\x9b\x4f\x91\xc1\xa1\x03\xb1\xa9\xb2\x7b\x8a\x59\xb9\x5f\xab\x58\x97\xaa\x19\x33\xef\xc2\xb0\xd3\x57\x14\xc1\x02\xfb\x21\xea\xd6\x2b\x2b\x80\x10\xaf\x7c\x0d\x3a\xe4\x9a\xda\xba\x09\xf0\x42\x8f\x7f\x0b\xfb\xe6\xe1\x0f\x5a\x73\xc8\xc3\x2c\xfe\x26\x4a\x81\xe4\x93\xa2\x0e\x2b\x1e\x69\x66\xaa\xd8\x96\x67\x7d\xf3\xc4\x3b\xb4\x51\x1e\xc9\x35\xad\xa2\x52\x5c\x55\x67\xd6\xa8\x69\xc0\xa8\xd9\xb7\x4a\x61\xc8\x96\x24\x4e\xdb\x98\x83\x9f\xe4\x98\xe3\xfe\xdd\x31\xdb\xd8\xd7\x5b\x29\x1b\xd4\x99\xff\x48\xa4\xa2\xea\xc1\x0a\x71\xc3\x61\xf4\x78\x74\x39\x2c\xeb\x4f\x49\x15\xf4\xb0\x67\x66\x71\x39\x35\x8f\x06\x20\xd8\x55\xc7\xeb\xcb\x4b\x3b\xf3\x99\x98\x55\x1f\xae\xc8\xde\xeb\x0b\xf5\x55\x7d\xf9\x8a\xa1\xaa\x1e\x04\x7a\x68\x94\xb8\x30\x59\x71\xdc\x85\x8e\xb0\x13\x9b\x86\xba\x73\x21\x95\x34\xc4\x84\xfb\x36\x2c\xee\x5d\x32\x43\x97\xbd\x97\x13\x30\x96\xbc\x2c\x3d\x74\x15\x58\xbf\xb5\x6e\x42\xac\xd0\x9e\xf2\xca\xc0\x75\xb0\x55\x60\x84\x60\x7a\xdb\x0c\x9f\x78\xa8\x7e\x3a\x3f\x5f\x30\x19\x99\xa3\x73\x50\x47\x6d\xc8\x4e\x3e\xe9\x7c\xe8\xb2\xdb\xd3\x2b\xbd\x9f\x3e\x84\x0d\x20\xe1\x2a\xb7\x72\x97\xde\x94\x97\x6c\xef\x9a\x65\x8a\xc6\x22\xae\xea\x94\xba\x08\x91\xc3\x88\x5a\xf7\x2d\xc1\x54\x19\xc4\x55\xb2\x46\x21\x17\x96\x7f\x82\x7f\xaa\x80\x6f\x28\x44\x5d\x45\x5e\x85\x34\xd8\x8f\x0a\xdd\xb9\x15\xba\x07\x28\x44\x29\x16\x1a\x00\xf6\x5f\xfb\xce\x64\x1e\x3f\x68\x3f\x1f\x6c\xb3\x7a\x7d\x94\xcf\xa7\xe8\x78\x47\x73\xb8\xdb\xf7\x24\xd5\xd8\xed\x00\x31\x47\x52\xb2\xdb\x11\x07\xc3\xb4\x59\x41\x66\xda\xa8\x4f\x61\x57\x6c\x11\x3b\x6c\x0b\x36\x29\xce\xae\xc1\x7f\x5d\xd7\xb6\xd1\xac\x1f\xd6\xf7\x73\x89\xeb\x61\x71\xba\xa5\xf3\xe9\x89\xe7\x26\xee\x69\xbd\x5c\x54\xca\xd3\xa6\x82\xba\x04\x5b\x2b\xe7\x23\x5d\xdd\xba\x14\x3a\xd8\x5d\xd4\xda\xf3\x5f\x1b\xbc\x24\xa2\x32\x68\xb0\x5c\x16\x79\xdf\x95\x14\x12\x5f\x4e\x80\x55\xb1\x3b\x32\x97\x16\x0d\xaa\x74\xab\x0e\x6c\x13\xd1\xae\x90\x1d\x9b\xae\x28\x5a\xbf\x69\x33\x57\xfe\xb9\x4f\x1e\x13\x2b\xb6\x60\xc9\xb9\x9c\x54\x64\x47\x8b\xc0\xcd\x60\xf6\xf0\x94\xb4\x0b\x2a\xdd\xe7\x2f\x07\x88\x5d\x69\xd2\x65\xdd\x4a\xae\xa8\x4d\x25\x62\xc8\x85\x38\xc7\x84\xa5\xc3\x3c\xd6\x97\x8d\x91\xe5\xec\x59\xed\xcc\x9f\xf1\x0b\xa0\x22\x7f\xb7\x0d\x71\x30\xf7\x66\x79\x3c\xb4\x22\xba\xd9\x1a\x4f\x2a\xc8\x42\x82\x4f\x22\xaf\x6e\xe1\xf6\xc3\x1a\xb9\x96\x8d\xb3\xd6\xfd\x49\xf9\x75\xe8\xf4\xbf\x2c\x58\xd4\x79\xc9\xbc\x5d\xbf\x14\x5a\x59\xed\xd9\xe8\x48\xe7\x98\xb3\x63\x5a\x3c\x4c\x75\x38\x25\xae\x9b\xde\xbb\x2e\x78\xaa\x9c\x9c\x59\xa0\xb2\x7f\x8a\x25\x1e\x7b\x1d\x1a\x3a\x1d\x19\xe6\x59\xfc\x20\x4e\x6c\x30\x93\x3a\x57\x4c\x3a\x09\x11\xe9\xd2\x1c\x42\xb3\x76\x05\x64\xa3\xe7\x82\x85\xf6\x1f\x6e\x95\xce\x47\x0b\xbd\xe1\x4e\x8e\x5b\x65\x8d\x1a\x0a\x68\xd0\x78\xeb\x3a\xa7\xae\x9e\xbf\xaa\x6e\x9f\xb9\x23\xcf\xdb\x69\xdf\xba\x32\xae\x11\x7b\x2b\x58\x05\xa4\xc3\x24\xfa\xcd\xa5\x8d\x37\x79\xee\xe8\xe5\x64\xbf\x07\x2f\xdd\x9e\x45\x72\x55\x82\xf1\x2e\xf1\xd0\x16\xb0\x0c\xbe\xd6\xa6\x83\x08\x9f\xed\x18\x5f\xdb\x2f\x19\x45\xd9\x73\x34\x80\xbf\x09\xc9\x85\xd0\x3d\x5a\xe6\x37\xf5\x3d\xd3\xb4\x06\xcc\x35\x0d\x65\x3d\x26\x9d\xc6\x30\x28\x35\x91\x7a\x0e\x6b\x52\xbb\x73\x09\xea\xbb\x64\xc7\x02\x72\xfd\xe3\x35\x75\x20\x88\x11\x65\xb6\x1a\x1f\x8c\x9b\x7a\x4f\xe0\x7e\xd2\x6b\xbe\xc8\xa5\xf9\x40\x3c\x9a\xeb\x22\xd5\xf6\xf6\x27\x43\xbd\x14\x34\x65\xd9\xd1\xbd\xf5\xfc\xbf\x4e\xde\x0c\x3b\x84\x40\xf4\x2f\xd4\x43\xc3\x5d\xe0\x35\x13\xc5\x0d\x15\xd8\xfd\x87\xba\xb1\xa5\x36\xe1\xd8\x39\x0a\xe8\x68\x5e\x6e\x45\xa9\x85\x2c\xc2\xdb\x93\x72\x44\x7d\xdc\x21\x01\x94\x7e\x05\x4b\x30\x0c\xff\x5c\x37\x5e\xe5\x76\x96\xca\xc3\xbe\x2f\x2d\xa1\x75\x0b\xe8\x6c\x51\xa4\xc9\x06\x39\x1c\x62\xcd\x6e\x71\xb0\x10\xef\x44\xa1\x83\x65\xfd\x94\x5c\x94\xb5\x9c\x07\x6b\x26\xcf\xb2\x2e\xfa\x77\xcf\x59\xb8\x30\x7b\x16\xe6\x1b\xc3\x5a\xae\xb5\x91\x71\x8c\xc8\x9f\xa0\xb4\x85\xab\x1f\x3b\xb3\xe7\x3c\x48\x4d\x96\xbf\xf5\x89\x32\x23\x2b\xee\xe7\x40\xe6\x09\x5c\xa5\x78\x2c\x8c\x73\x1c\xed\x8e\x64\x94\xca\xed\xa7\x2a\xf8\xe4\xd5\x7e\xed\x01\xf7\xc1\x13\x41\x4f\x60\xad\x83\x79\x2b\xf8\x7b\xfd\xba\x23\x03\xba\x68\xcd\xd2\xe8\xbe\x7d\xf5\x68\xaf\xcc\xfd\xe2\x24\xe6\xae\x5b\x93\xd7\x42\x92\x97\x53\x72\x18\xfc\x69\x78\x0f\x8b\x3f\xbf\xd9\x08\xac\x0d\x8b\x66\xc4\x3e\xfe\x8a\xb2\x0b\x87\x84\x42\x5b\x3e\x1b\xa8\x70\x14\x78\x63\x64\x23\xd7\x3e\x58\x34\xa9\x37\x03\x6d\xcb\xaf\xc6\x6d\x2d\x39\x29\xb5\xc5\x7b\xf9\xbb\x84\xf1\x06\xac\x00\x7d\xa4\xe5\x15\xf3\x2b\xc4\x66\xd9\x4d\xdd\x50\xa5\xf4\x03\xe5\x7f\x2c\x64\xdf\x00\xcf\xfa\xf3\x79\x03\x04\xaa\x8a\x39\x6a\x21\x0c\x56\x49\xda\xa0\x8a\xae\x75\x1b\x92\x90\x2c\xd4\x07\x90\xf7\xb6\x17\xc3\xbf\x54\x13\x76\x27\xfd\xbf\xb4\x6a\x43\x98\x26\xfa\x70\xc6\xe5\x6a\x1b\x29\xf6\x95\x73\x68\xd6\x5d\xaf\xe0\x75\xd6\x28\x24\x7a\x71\xdc\xeb\x8c\x95\x3e\x87\x60\x04\x8f\x62\xab\xda\xa5\x3f\xe4\x2f\x58\x31\xb1\xb7\x66\x96\x6a\xb4\xf5\xeb\x46\x38\x35\x0a\xa9\x5d\xe4\x4e\x24\x5f\x2f\x8d\x47\x49\xdd\xfa\xa3\x2e\x6c\x03\x1b\x39\x05\x72\x0e\x1f\x2e\xd7\x28\x88\xdb\x46\x51\x4e\x45\xac\xde\x09\xe4\x61\x95\xf5\xed\x32\x86\x87\x1e\xae\xa8\xb5\x13\x21\x59\x7b\x47\x6d\xc0\xa6\x68\x9f\x59\x15\xe9\x42\x67\xb7\x2b\xdc\x05\xbb\x72\x7b\x1b\x73\x6d\x44\x95\x5e\x84\x95\x63\x36\x27\x00\xd7\xf5\x1b\xc6\xe9\xb0\x8d\x98\x19\x60\x48\xc4\x3a\x02\x62\x5f\x60\x94\x32\xff\x09\x13\x43\x92\xb5\x51\x4a\x0d\x9e\xb1\xcd\xb1\x5b\xfe\xf9\xb2\x47\xa4\x70\xa9\xfa\x77\x09\x6a\x31\xd2\x4a\xbe\xcc\x32\x89\xb0\x48\x96\x29\x57\xfe\x27\x08\x40\x0e\xcb\x45\xa0\xa1\x7d\xb9\xba\x68\xe6\xe5\x69\x5d\x16\xc2\xf3\xd8\x88\x7d\xd0\x63\xe5\xef\xcb\xc9\x86\x39\x2a\x80\x97\x51\x04\x70\x1b\x42\xbf\x9b\x59\xa0\xb5\x2e\x1d\xa3\xda\x48\xc4\x6b\x4b\x46\xfb\x27\x49\xde\x9e\x02\x34\x80\xaa\xaf\x25\x0f\x37\xed\x55\x15\xef\x3d\xf0\xb6\x4e\x31\x1e\xee\x45\x3b\xb4\x21\x85\x3b\x16\x46\xe1\x1a\xa3\xdf\xa5\xf9\x17\xe6\xb6\x67\xb2\xc9\xab\x88\x08\x97\x3b\x2a\x7f\x14\xe1\x61\xd0\xea\xf4\xf0\x43\xa7\x77\xe6\x33\x65\x7d\xe0\x20\x0b\x43\xd5\xcb\x4f\x49\x01\xbb\x76\xee\x57\x3c\x77\x91\x71\xe1\xd9\x87\x50\xee\x58\x39\xdc\xf7\x8a\xba\xbc\x11\x63\xea\x70\x25\x3d\x80\xe7\x9b\x4f\xcc\x59\x9f\xf2\x23\x8c\x5e\xff\xac\x4c\xe5\x9d\x35\x3c\x35\x4e\x3a\xfa\x63\xbc\xc6\x2d\xc9\x50\xb5\x65\x50\x54\xcf\xd0\x61\xb3\x80\x2c\xe3\xd0\xc7\x80\x17\x62\x57\x64\x8b\xf8\x96\xa9\xaf\x45\x81\x41\x02\x2b\x17\x93\x0a\x39\x21\x92\x46\x5c\xae\x8c\x59\x98\x17\xcb\x8b\x37\xd1\x60\x37\x47\xb1\xbf\x74\x22\x9e\x53\x73\xd6\xbf\x9e\x65\x72\x7e\x36\xfd\xdb\xb9\x39\x1a\xce\xbd\x88\x11\x67\x54\x80\xcf\xfc\x71\x08\x0b\x50\x89\x82\x02\xe4\xf9\x6e\xc6\x86\xfb\xc0\x55\x2e\x09\x06\x1b\xc3\xb9\x57\x10\x1b\x87\xa8\x6f\x4e\xaa\x9b\x5f\x2a\xc7\xdf\x93\x17\xd0\xc1\x6b\xd6\x4f\x4d\x9b\xd0\x03\x09\x63\x0c\x0f\x46\x55\xe2\xf1\x18\x40\xc6\xe3\xd6\x94\x41\xb1\x22\x76\xa5\xae\xdf\xe9\xf7\x03\x95\x78\x49\x10\x1f\x00\x8f\xa9\xa8\x5c\xcf\x81\xaa\xbc\x75\xdb\x58\x31\x3a\x6a\x54\x8c\xab\x28\xbd\x77\x08\xd1\x9e\x9f\x77\x47\xb6\xa0\xd3\xa4\xfc\xb0\x80\x82\xbe\xa4\xbf\x6b\x27\xe5\x69\x99\xa3\x54\x32\x3a\x8f\x36\x67\x3e\x7c\x76\xeb\xbb\x16\xaf\x3c\xb0\x94\xcf\x77\xc8\x35\x7a\xfd\xcc\x66\xcd\xaf\x0e\xf8\x16\x73\x14\x72\xcc\x1a\x3f\x1d\x73\xa4\x64\x8f\x39\x0c\xef\x2d\x62\x71\x9e\x08\xbb\x63\x1e\xa9\xa7\x64\xbd\x92\x18\x94\xa8\xdb\x13\x40\x2e\xd6\x73\x54\xec\x27\x78\x60\x23\x7e\xac\x55\xb6\x9d\xc7\xc3\x06\x19\x2a\xcb\xb9\xba\x6f\x27\x9a\x75\xd0\xa1\x51\xd3\x17\x42\xc0\x9e\x0f\x9d\x8c\x5b\xdb\x69\x5d\x69\x6c\x69\xe1\x37\x0e\x0e\x97\x6d\x41\x52\x22\xc8\x55\xc1\x99\xde\xf7\xf7\xab\x71\xaa\x77\xda\x32\xa2\xc5\x90\x1e\xca\xd1\x20\x02\x83\x34\x67\x9d\x53\x7d\x29\x10\x85\xe5\x48\x02\x40\x48\x78\xf4\xb0\x71\xfb\xc6\x8d\x8b\x80\xa7\x30\x65\xb4\x4e\xa2\x13\x78\x60\x67\xe5\xee\xb0\x31\xb0\xdf\xb6\xf2\x49\xde\x75\x94\xd7\x4e\xfc\xbd\x8b\xff\x12\xf5\x69\x37\x92\xcf\x75\x51\x26\xc1\x75\xaa\x1c\x21\xa0\x9e\xb4\x2c\xac\x3d\xc1\xff\xc9\x7f\x42\xc9\xd4\xee\xdf\x09\x26\xe0\xfd\x44\xfd\xdf\xbb\x04\x04\x1c\xad\xed\x25\x5a\xb5\xbd\x74\x9c\xd2\x58\x47\xf6\x2a\x6a\x6a\xdc\xa6\x91\x7c\x67\xbd\xaf\x7b\x56\xf2\x66\xcf\x71\xd4\xa4\x67\x0a\x22\x1d\xd2\x0a\x4e\x2c\x97\xd7\x4b\x15\xed\x2a\x9d\x0f\x57\xf8\x3c\x19\xec\xc1\x90\xb6\x7e\x44\x25\x7a\x87\xb5\x4f\x94\x53\x2d\x52\xef\xb0\xa0\x0b\x2a\x35\x73\x2f\x5e\xc2\xcf\xdf\x7a\x7c\x55\x8d\x3e\xbf\x8f\xf0\x32\x2c\x54\xc0\xd9\x8a\x44\xb7\xb7\xa2\x1b\xb4\xd1\xc5\x66\x43\xeb\x1b\xe9\x3e\xfb\xf0\x2d\xca\xce\x67\xab\xda\x41\x29\x58\x72\xc0\x3b\x0d\x0d\xec\xe5\xf4\xf7\x74\x03\x0a\x99\x95\xfc\x25\x26\x16\x20\x50\x8b\x77\xb6\xe1\x2e\x90\x38\x6d\x4f\xa5\x34\x8e\x6d\x60\x55\xb4\x4e\xc5\xef\xbc\xee\x6f\xd6\xee\x07\x11\xa0\xa5\xc8\xbe\xf5\x4f\x52\xc5\x31\x5b\x3d\xa9\xc3\xef\x71\x68\xfa\xd6\x3e\x45\xa1\xbd\x95\xca\x97\x39\xe8\x57\xce\xb5\xde\xc5\xa3\x76\x27\x48\x8f\xc6\xb3\x10\x3d\xd0\xbb\x5d\x43\x93\xff\x5d\x53\xc0\x0a\xfa\x71\x7e\x46\xa9\x7f\xb4\x91\x44\x28\x01\xf7\x35\x0a\x65\x1e\x2b\x38\xdf\x00\xfb\x31\x2e\x5f\x45\x5f\xeb\x31\x1b\x43\xdb\x7e\x56\xec\x96\x36\x6d\x60\x17\x4e\xf7\x04\xae\x89\x73\x96\xa3\xa6\xcd\x92\x55\x5a\xb7\x97\x97\xd3\xb0\x2d\x51\xfa\x45\x9e\x9c\xe9\x0a\x17\x49\x2f\xeb\x33\x6a\x0c\x70\x8c\xfa\x57\x56\x02\x11\x0b\xb5\xbc\x01\xa3\x24\x0d\x72\xc0\x13\x78\xad\xd2\x2c\xbc\xce\x06\xe7\x0a\x21\x0b\xb2\xd8\xdc\x83\x1a\x35\x28\xf5\xca\x41\x06\xda\x4d\xc4\xee\xa3\x80\x1f\xec\x70\x66\xda\xd7\x37\xe7\xf4\x28\xd5\x3b\x09\xcf\x63\xa7\x63\xaf\x75\x91\x69\x65\x15\x95\xd8\xda\xf2\x25\xc7\x00\x75\xad\x6e\xab\xa1\x61\xe5\x5a\x9d\x9e\x63\x0b\x15\xa4\xad\x77\x22\x95\x98\x72\xbf\xe4\x5a\xbf\x72\xd4\xfd\x43\xe0\x57\xa6\x8e\x6b\x71\xc7\x5b\x8f\x68\xe7\x0c\x14\x26\x39\xd6\x4f\x95\xb1\x67\xf9\xc2\xac\x52\xf1\x41\x02\x40\x45\x6c\xf8\x65\xbe\x7e\x1a\x3e\x40\xf0\x00\xd1\x95\x67\x9c\xa7\x28\xd4\xfe\x73\xf2\xb5\xcf\xe1\x22\x02\x99\x71\x12\xa8\xf1\xd2\x91\x04\x4d\x02\xdc\x1a\x45\xd5\x15\x9c\xa6\x9d\xc1\xb6\xd4\x89\xf4\xe9\x38\x56\x5e\xb8\x53\xe0\x84\x6d\x58\x7f\xfd\xf1\xd0\x64\x21\x1d\x52\x33\xd2\x81\xe1\xe7\x71\x8a\x82\xc6\xea\x88\x45\x95\x21\xfd\xc3\x7e\x85\x5e\xcc\xea\xd9\x47\x15\xf6\xcf\x22\x75\x65\xb4\xcd\x39\xa2\xfe\x2b\x20\x7c\xe4\xf7\x29\x92\x2f\xa4\xfa\xce\xa0\x4c\x8b\xdb\x2d\x54\x9a\x1d\x5b\x87\x79\x00\x73\x58\xe6\x20\x4a\x27\x51\xb2\x21\x2b\x48\xe8\x81\xeb\x47\x79\xb3\xb2\xbc\xa5\xff\x0b\x15\x61\x31\x8f\xc1\xf7\xf6\x5a\x73\xcd\x7c\x87\x9d\x24\xdd\x74\x80\x91\xc3\x0d\xc8\x0a\x3f\x10\x00\x20\xec\xc7\xe2\xb5\x44\xdf\x9f\x12\x70\xb6\xe8\x3a\x7e\x93\xe5\xd9\xb4\x9c\xe2\x11\x2c\x25\x30\x31\x16\xa5\xe5\x4b\xae\xbe\xe8\x9d\x59\x0e\x0e\x64\x96\xb2\x2f\x85\xd0\x80\x7c\x08\x60\xb9\x34\x89\x1e\x44\x3f\xce\xbf\x76\x1c\x07\x14\x8c\x1d\x88\xeb\x44\x6c\xdf\x1d\x19\x64\xd5\xfe\x77\x7e\x03\x39\x3e\x0f\x12\x7d\x42\xe7\x56\x43\x5c\xc8\xa1\xc0\x38\x38\x4a\xfa\xe7\x12\x35\x4d\x8b\xfb\x49\xf3\x92\x25\x06\x54\x78\x31\xf9\x8b\x71\x0d\xf4\xe2\x2f\x21\x16\x60\x81\xd0\x0d\xa8\xcd\xc8\xea\x34\x2c\x5f\x17\x48\xc8\x24\x05\x3e\xb2\x70\x88\x00\x4b\xfd\x67\xe6\xb8\x65\xc9\x8d\xf0\x1d\x2f\x06\x1a\x52\xcc\x5b\x5e\x3f\xc2\x0b\x08\x2e\xb0\x46\x4a\x76\xc9\x1a\x34\x81\x0b\x2e\x11\xe6\xca\xe1\xcd\x22\x09\x80\x25\xd8\x45\x8b\x94\x1d\x17\x66\xea\xac\x71\xf8\x94\x3f\x3a\xcd\xfd\x67\x0b\x19\x3e\x00\x07\xfc\xc6\xfd\x96\x33\xca\xa9\x36\x15\xf4\x44\xd7\xaf\x4d\x28\x73\x6c\x97\x01\x9f\x3e\x29\xdd\x7c\x4a\x4d\xee\xd7\xc6\xa6\xc3\x27\x70\x25\xb0\x06\x72\x77\x51\xf9\x7f\xd9\x3c\xd6\x7d\xa4\x9c\x5a\xbe\x39\x59\xcb\x09\xe7\x12\x39\x95\x46\xf0\xbb\x09\xfa\xbe\xd5\xd0\x4c\xef\xe0\xd8\x4f\x50\xd0\xf5\xe5\x1e\x92\xce\x91\xfa\x98\xaa\x2b\x74\x6b\x8f\x02\x00\x9f\x9c\x4b\x6f\xc1\x05\xc4\xb2\x26\xfc\x61\x2a\x02\x1e\x4c\x2b\xc1\x08\x83\xe4\x9b\x6d\x1c\xc1\x3c\xd8\x34\xff\xfb\x8a\x85\x4e\x36\xc3\x76\xd7\x16\x0b\xbc\xee\x03\x59\x37\x02\x8d\x26\xd8\x15\x84\x2d\x01\xca\x1b\x61\x62\x3a\x46\x59\x3e\x9f\x5f\xde\xf9\x99\x04\x50\x80\x6e\x80\xa6\xb3\xfc\xb0\x70\x5b\xc9\x01\xca\xb1\x54\x0e\xaa\xf9\xaf\x7b\xe9\x18\x0e\xa5\xf3\x25\x7c\xc1\xdc\x38\xf1\x74\xa9\x9d\x32\xf5\x14\x26\x9f\x5a\xfa\x9a\xf4\xb2\x19\x1b\xcd\x57\xd1\xb4\x49\x64\xb9\x86\x02\xb3\xcd\xff\xc9\x2f\x0f\x9b\xb1\x89\xb6\x8e\x13\xdf\x1d\x64\x9a\x88\x7c\xb7\x48\xe7\x94\xc8\x1b\xb7\xa5\x68\xe8\xdb\xe9\xf3\x1c\xa8\x32\x37\x68\x03\x21\xa9\x8e\x50\xe8\xaa\x58\xee\x1d\x29\x30\x76\xc7\x3f\x03\x3d\x4b\x09\x59\x1b\x6b\x02\x29\x94\xe9\xd5\xf1\x6c\x62\x65\xa3\xcd\xce\xfc\x30\x6e\x53\xaa\x4c\x9d\x13\xd1\x7d\x19\x12\xdc\x22\xa4\x4b\x9a\x39\x7d\x98\xe8\xb4\x48\xb5\x60\x8b\x82\xa2\x3c\x86\x71\x61\xe8\x2e\x0b\xf5\x60\xcd\xb5\x9f\x59\x99\xf8\xf9\xf1\xd8\x25\xcf\x2c\x3b\xac\x71\xbe\x45\x5e\x0f\xf7\x09\xe0\x22\xde\xe1\x5c\x6a\xaf\x66\x09\x5f\xd3\xc6\x91\x03\x5c\xc2\xff\xe9\x0c\x90\x2a\x4f\x6f\xce\x5d\xec\x2e\xe8\x02\x0a\xe9\xbe\x3c\xed\xb7\xfa\xf4\x1b\x79\xef\x3d\xd0\x13\x9a\xf7\x8c\x59\xed\xf5\xc8\xe2\xd5\x88\x11\x4b\x90\xd5\x6f\x4a\x6e\x00\x37\x11\x66\xa9\x1c\x60\x80\xda\xae\x23\x21\xfb\x28\xcc\x05\x06\x9b\x52\x1a\x1a\xd3\xa6\x63\x6c\x9a\x4a\x8e\x2e\x44\xe4\x01\xe4\x7b\x44\xa4\x4f\x0e\x21\xa8\x85\x4c\x75\xcf\x97\xbd\x04\xfe\x7a\xd8\x24\x80\x1c\x71\xeb\x2f\x58\x2f\x0b\xe5\xdd\x49\x00\x87\x72\x6a\x68\x4c\x5d\x0f\x79\x2e\x3e\x30\xa2\x78\x47\xe0\xcb\xcc\x09\xeb\x23\x54\x66\x1e\x71\xb1\x1f\x48\x61\x2b\x1d\xfc\x98\xed\x66\xdf\xfc\x7a\xa4\x08\xe7\x63\x6e\x10\x89\x39\xf0\x6b\x0f\x55\x5b\x1e\xe9\x25\x17\xd6\xf4\xea\x28\x19\x02\x02\xbb\xd4\xa6\xc3\x23\x0c\xa8\x88\x91\x80\x0a\xeb\x05\x9a\x07\x6c\x9a\x66\x22\x25\x3a\x39\x90\xe5\x67\x78\xe1\xe6\x7a\xf3\x2d\xcc\xd7\x14\x0f\x6c\x96\x15\x61\x3e\x94\x1f\xcf\x10\x51\x0b\x49\x62\xa4\x52\x56\xc9\x43\xe5\xed\x7a\xa9\x8c\xd2\xe0\x46\x8e\x96\xe5\x30\x36\xab\xa3\xc9\x0f\x6f\x73\x72\xf3\xd5\x42\x99\x92\xb7\x2c\xf7\x02\x67\xf4\xf0\x46\xa6\x04\x58\xee\x02\x37\x93\xd3\x39\x65\xf1\xa8\x05\x03\x1b\xde\x2a\xa8\x0c\x6f\x14\x5e\x11\x40\xb4\x91\xb5\xf9\x3a\x11\xde\x60\xef\x30\x39\xf0\xb5\x3b\x03\xe1\xc0\x11\x39\xdc\x58\x45\x98\x7f\x73\xf1\x12\xc1\xfd\xcc\xcd\x71\xf5\x99\x08\x7e\x38\x75\x01\xa1\x21\x4d\x85\xe5\xb2\xf3\xfd\x71\x2e\xbd\x52\x89\x43\xa1\x34\x74\xc9\x37\x9b\xe8\xa1\x10\x84\x66\xdd\x4f\x27\x29\xe8\x66\x8f\x0f\x54\x6b\x42\xf3\xff\xfe\xfb\xc1\x63\xb1\xe6\x75\x16\x73\x8e\x52\x00\x1e\xf2\x31\x8b\x7b\x2f\x2c\xfe\x80\xe7\x32\x09\x72\xe1\x3a\xd3\x24\xcf\xef\x41\x78\xdf\xde\xe2\xb4\x37\xe6\x3c\xdc\x6d\x08\x65\x58\xe7\x60\xf2\xdb\x10\xb2\x12\x19\x31\x33\xdf\x3b\x64\xb9\x62\x0e\xd9\xa5\x76\x15\x52\x90\xf6\xc7\xf1\x0a\xa8\xf5\x1d\x6c\x02\x07\x3c\x77\x42\x2e\xc8\x00\xd7\x1a\x5a\xc9\xe2\xc3\xe3\x95\x6c\x34\x9b\x01\x84\xed\xbc\xbd\x64\xd0\xd6\x66\xd4\xb0\x0d\x60\x28\xb0\x7e\x5c\x9d\xb0\xce\x70\x3b\xae\xaf\xcd\x2b\x89\xf3\xa2\xc1\xf6\x67\xfd\x48\xa3\xda\x46\x5d\xf2\x7a\xe9\xcf\x39\xee\x88\x85\x1b\x97\x59\x48\x25\x8b\xb4\x4c\xef\x90\xe5\x23\xd0\x41\x06\xad\x6f\xca\x53\xd7\xd3\x0d\x59\x33\x3f\x3c\x12\xbf\x3c\x9d\x45\x52\x18\x1f\x1c\xd2\xd8\xbc\x2d\xa0\x31\x7d\xa1\x58\x6c\xcf\xd9\x03\x64\xab\xfd\x39\xe8\x51\x09\xdb\x03\xb2\xf0\x54\xd9\xc6\xc2\xf0\xcb\x06\x6c\x01\xb9\xaa\x3d\x90\x0b\x30\xc5\xfd\x83\x43\xe0\xa4\x15\x67\x44\x06\x73\x3f\x85\x8d\x54\xaf\x17\xd0\xf1\xde\x3b\x8b\xd1\xee\xfe\xba\x33\x4b\xe3\x40\x01\xe1\x03\xce\xca\x9a\x50\xef\x77\x4b\x65\xfe\xa3\xa9\x59\x7b\x96\x86\xe5\x79\xdb\x99\xb5\xf8\xbd\xd7\x76\x4c\x41\x73\xc3\x35\x76\x66\xb9\xda\x86\x3f\xe1\x5d\x5a\xcf\x4e\x33\x8a\xae\xae\x0f\x55\xd7\xe9\x81\x5a\x87\xef\x7c\xc7\xc5\xa6\x59\x23\x57\x51\xde\xc5\xc6\x55\x51\x2f\x83\x35\xde\x8d\x94\xde\x2f\x4d\xa1\x7a\x1e\x9b\x9b\xab\x3c\x4a\xec\xa5\x6e\x52\xd5\x7c\x12\xbb\xb3\xd9\xb1\xd6\x7b\x1e\x49\xb9\x3f\xa3\x44\x7c\xd2\x03\xb1\x49\x22\x43\xec\xdf\x8f\x5d\x84\xa0\xeb\x6c\x28\x29\x8e\x7d\xa1\x88\x76\xd1\xe0\xae\x53\xeb\x8e\x1c\x6e\x24\xdc\x1c\x45\x11\x37\xdf\x3e\xf0\xbf\x95\x66\x9e\xaa\x79\x4d\x57\x68\xfb\xd4\xc1\x62\x5c\xff\xb9\xf8\x58\xca\x23\x6b\x3f\x68\xdd\x0a\x49\x2f\xd6\xec\xa3\xd3\xb7\x48\x06\x37\xac\xd5\x33\x61\x05\xa9\xeb\xa8\xda\xfb\xb5\x9d\x9b\x02\x34\x87\xa0\xd7\x4a\x7a\x7b\x1a\xe7\x22\x02\xba\x06\x8f\x18\x59\xaa\xf6\xec\x0a\xd8\xca\xab\x39\xc9\x5d\xb5\x7c\x49\x65\x50\x04\xfb\x87\x25\xfd\x28\xce\xef\x03\xc6\x92\x1e\xae\xb7\x0d\x70\x78\x9c\x20\x2b\x76\x5f\xdb\xd8\x3f\x30\xc9\x2e\x8f\x84\x06\xa4\x37\xeb\xe3\x9d\x75\xf0\xce\x86\x0f\x30\x0b\x04\xaf\xf5\x4e\xa8\x81\x11\xe3\xbe\xb5\x36\x5e\xcf\x2f\x52\x2f\x4f\xe7\x92\x54\xe4\x97\x79\x6d\x0a\xae\xbe\x8c\x50\x3b\x97\x69\xf0\x6b\xea\xb8\x92\x60\xd4\x87\xce\x36\x2d\x9e\xba\x06\x8c\x00\x83\x86\x47\xb4\x3e\x9e\x5d\x75\x87\x2e\xf2\x74\xd3\xd6\x37\x2d\x50\x43\xf2\x0e\xda\xd7\x58\xbc\x44\x8b\x87\x0c\xf2\x8f\x83\x05\x1a\x99\x25\xb9\x17\x9f\x98\xe9\x55\x7a\x30\x90\x5c\xf1\x05\xc6\x11\xaa\xa3\x87\xef\xa9\x5d\x00\x8e\x85\x5d\x48\x3e\xe6\x0d\x18\x43\xfa\x7f\x3e\x77\xb9\xe9\x41\x67\x77\x24\x32\x25\xf1\x71\x44\xb9\x83\xdb\xf4\x60\xf1\x72\x76\x9e\x1c\x3d\x4d\xfa\x7d\x58\x42\x03\x6a\xc0\xa3\x6d\xef\x7f\xbe\x98\x03\x6f\xc0\xd7\x19\xe8\x08\x19\xd6\x1e\x0d\x36\xc0\xe5\x4c\xea\x69\x9a\xf5\x6c\xa4\xf1\xb6\x3e\xe4\xf0\xc8\x6e\x2b\x87\x12\xc8\xef\xf8\x12\x8e\x89\x7e\x60\xf8\x5a\xc9\xa2\x56\x4b\x20\x72\xfc\xda\x27\x3e\x5b\x42\x6e\x42\x64\x3b\x88\xf8\x23\xd9\xfd\x7a\x01\x3e\xae\xd2\xb4\x2a\x22\x70\x60\xfb\xf8\x73\xf0\x61\xf2\xf5\x5d\x4f\xa7\x0d\xd9\x92\xd7\xd9\x9e\xb2\x85\x3d\xe8\x3b\xab\x9c\xfd\xce\x29\x40\x05\xe7\x17\x5d\xe0\x8b\x89\x0b\x76\x5a\xb6\x74\x5b\x09\xf4\x51\x4d\xfd\xd0\x55\x71\x14\x7c\xeb\x67\xaf\xf2\x7b\xf7\xd4\x5a\x7f\xed\x2e\xcf\x46\xe6\xfe\x2c\xed\xee\x5d\xb4\x65\x8b\xe4\x1b\x3b\x87\xbe\xb6\x17\x5f\x9c\x45\xef\x0f\x1b\x01\x0a\x28\x93\xf9\x95\x79\x21\x23\x41\x2c\x02\x26\x66\x12\xe7\xde\x6f\x3d\x01\x2f\xa7\x65\xff\xd0\xb0\x96\xae\x2d\xfc\x3f\x6d\x68\xfd\xca\xb1\x26\xf9\x4c\xec\xe5\xaf\x19\xc5\xbc\x1b\xd2\xe3\x95\x54\xa7\x7a\xa5\xc6\x14\x02\x0a\xc1\x2b\xfa\xd6\x2a\x90\xa8\xbf\xb2\x5a\x42\x79\xee\x70\xa7\x85\xcf\x79\xd8\xd6\xf2\x3f\x41\x2f\x50\xd6\xb4\x73\x62\xfe\xea\x89\x5f\xb1\x08\x80\x00\xa2\xf3\xc8\x90\xc0\x83\x73\x2a\xd2\x16\x7c\xa2\xf0\xa4\xa3\x39\x0c\xc1\x37\x88\xa2\xe2\x13\x5a\x52\x00\x3a\x30\xeb\x0b\xe3\x5b\x5f\xd8\xcc\x4d\x4f\xa2\xc5\xcf\x52\xce\x49\xfa\xde\xfb\x24\xfc\xba\x6b\x04\xbc\x03\xb2\x60\x73\x91\x52\xa4\xf7\xed\x48\x1b\xc9\x41\x3d\xc3\x54\x13\x58\x53\x29\x8b\x43\xb1\xc4\x96\x35\x14\x8d\xea\x79\x39\xda\xa9\xa2\x46\xe6\xc1\xa1\x9c\xca\x5d\x05\x7f\xbd\x83\x5a\x67\xc1\xde\x69\xa8\x68\xb1\xdf\xf7\x1b\xf2\x35\x8e\x1d\xe0\xb4\xf1\x46\xd5\x90\xdd\xa5\x8d\x81\x8f\x4c\x91\x80\x12\x39\x68\xd8\xe8\x36\xf3\xdc\x46\xa0\x77\xbc\x19\x64\x46\x7f\xa4\x3b\xee\x4f\x08\x64\xc7\x6b\xfe\x09\xfd\x71\x04\xe5\x22\xac\x21\x39\xac\x20\x6a\x67\xb7\x8b\x1f\x13\xe6\xb0\x95\xdb\x1b\xd7\x7b\x16\x0d\xc7\x66\x5b\xfc\x6a\x0a\x87\xe6\xdb\x39\x06\x42\x12\xdf\x53\x16\x80\x21\xd2\x5f\x77\x5e\xea\xea\x1d\xda\x9d\x7f\x39\x0a\xb9\x95\x36\x93\x40\x80\xaf\x4a\xe7\xc1\x7f\xee\xe3\x3b\xbe\x4b\x30\xe8\x95\x5e\xc6\xf5\x71\x50\xdd\xf5\xb6\x43\x5d\x54\x02\x40\x02\x55\x86\xbc\x39\xb5\xfe\xe3\x72\xd6\xf6\xa2\x28\x56\x59\x13\xaa\xca\x79\xf5\x9f\x7c\x55\xaf\xf7\x7b\xe1\xb7\x52\xfd\x84\x58\x88\x61\xc8\x12\x21\xaf\x40\x34\x29\x54\x5e\x18\xf8\x06\x91\xf9\xad\x71\xfc\xf2\xfd\x0b\x07\xfe\x16\x08\xd0\x60\x8f\xc9\x24\x9c\xc4\x1c\x76\xb2\xb7\x3a\xfe\x0b\x4a\xee\xea\x05\xae\xdb\x26\x7b\x55\xd0\x07\x29\x86\xc3\x96\x42\x58\x87\x3e\xc6\xb7\x97\x74\x8b\xae\x5b\x19\x06\x0b\x94\xbb\xbd\x5c\xe7\x9d\x05\x7d\xf4\x5c\xf6\x12\xb0\xc4\xfd\x9f\x1b\x1c\xd0\x43\xf8\x38\x9f\xd2\x53\x87\x09\x69\x44\x3e\xf2\xb9\x8e\xa2\x5a\xfa\xc1\x79\xe8\x75\xb8\x42\xa8\x90\x14\xb9\xa9\x07\x90\x49\x86\x25\x53\x77\xd0\x11\x18\xb9\x1c\xf7\x1a\xda\x01\xd7\xd7\x4e\xf8\xe4\x8d\x2b\x9a\xd4\x21\x2b\xec\xb8\xa5\x0b\x0d\x23\x61\x07\x85\x33\xe6\x6b\xd7\xc1\xb7\x33\x2f\x64\xb5\x6f\x85\xed\x81\x8d\x4d\x42\x53\x00\x1c\x3c\x26\xe5\xed\x85\x24\xc5\x1f\x72\x9e\x0b\x83\x00\x16\x2c\xb3\x04\xe0\x97\xec\xa9\x1d\x94\xb4\x88\xcd\xb0\xe6\xf1\x62\x10\x2c\xfd\x39\x7e\xa7\x84\x62\xaf\x79\x13\x26\x66\xce\xe3\x1f\x11\x78\x39\x0d\xdb\x00\xbe\x51\xa5\xe0\xf2\x71\x84\x00\x7c\x49\x42\x73\xa0\x04\x1e\x70\x8e\x8d\xf8\xe8\x4b\xe2\xe2\xd7\x17\xe4\x30\xe5\x2d\xbc\x91\xa7\xac\xde\xef\xca\x1a\xa1\x5f\x98\x79\xf1\x44\x51\xe6\x22\x8c\xc3\xde\x20\x5d\x55\x81\xa4\xae\xdc\xf2\x2c\x2e\x60\x70\x46\xa0\x2c\x81\xc3\x38\x7e\x84\xbf\xc8\x57\xd3\x7b\x5f\x9b\x0a\x7c\x13\x44\x02\xe6\xa2\xfb\xca\x16\xa4\x80\x5b\x1c\x94\xe8\x00\x8e\x42\xcb\xd8\x21\x55\x21\x40\xfc\xa2\xd3\xbe\xd9\x3a\xf8\x59\x9a\xed\xc8\x37\x49\xd1\xfd\xf0\xd6\x9c\xd7\x21\xed\x5b\x58\x71\x59\xbc\xd6\x39\xa4\x0d\x6e\x2d\xc3\x84\x00\xee\x66\x5c\x47\x5d\xfd\xb4\x72\xf5\xc2\x6c\x3f\x54\x33\xfd\x12\xa4\xbe\x51\xf0\x60\x7c\x58\x37\x05\xb3\x96\x9e\x3b\x38\x4b\x50\x82\x4e\x6a\x72\xc1\x47\x40\x6f\x3e\x30\x14\x4d\xc0\x20\xf4\xeb\xe5\x62\xdc\xcb\xc5\x38\xe9\x71\xf6\x70\x6e\x2b\xb9\x6a\x0e\x3f\x64\xd9\x18\xd3\xea\x00\xca\x58\x7f\xcd\x92\xc1\x07\xf8\xa1\x34\x82\x30\xf1\xee\x32\xf4\x62\x20\x17\x74\x69\xc2\xa1\xc2\x10\x81\x6c\x98\x21\xbb\x42\x14\x46\x97\x2a\x2d\x90\x69\x64\x04\x55\x2d\xbb\x25\x0c\x25\xa5\x7b\x5a\x37\xaf\x94\x2c\x42\xb9\x1f\x4b\x79\xc5\xda\x2c\xf9\x20\xa7\x4b\x82\x08\x3b\xaf\x9a\x80\x64\x2e\xc5\x25\xf4\x85\xf6\xf8\x8d\x49\x18\x10\xfd\x42\xd5\x9d\x54\x5c\x75\xf3\x63\xff\x07\x89\xf2\xb9\x48\x86\xef\x9e\x31\x3a\x44\xb6\x3c\xea\xad\xde\xef\x60\x42\xe3\xc0\x45\xf2\x7a\xbf\xfd\x9c\x37\x83\xdc\xf4\x0f\xee\x00\x56\x74\x04\x49\xbc\x85\xa0\x78\x45\xc1\xd7\xc1\x14\x34\x5f\x3a\x50\x5a\xba\x25\x77\x20\x3e\xff\x51\x34\x25\x3d\xa0\xa1\xec\x4b\x02\xf1\xc1\x50\x65\x95\xe3\x98\x05\xa9\xd8\x35\xd0\x3a\xe0\x2c\x2d\x0c\xc5\x4a\xdc\xfd\xa1\x96\xda\x96\x49\xfa\xf3\x23\xd5\xb1\x80\xae\x68\xe5\x03\x48\xcf\x6f\x21\x53\x07\xb4\x85\xee\x2e\xf4\x11\x84\xe0\xb0\x77\xde\x2d\x53\x8f\x4c\x2d\x9f\xe3\xef\xee\x70\x49\xc6\x50\x85\x40\x0b\x81\x18\x60\x02\x16\x50\x8a\x43\x46\x66\x88\x74\xc0\xbc\x3f\xc2\x8a\xd9\xc6\x09\xf7\xac\xef\x63\x22\x75\x64\x2f\x7f\xfd\x10\x3b\xd1\xac\x8f\x6b\x08\xce\x23\x0b\x4a\x08\xc4\x45\x29\x87\x44\x3d\x2f\x17\x51\x60\x4d\x0e\x32\x2a\x0e\x44\xb1\x9e\x86\x57\x21\xf5\xed\x40\xb0\x73\x70\xcc\x83\x20\xfa\x58\xf2\xce\x94\xaa\xb6\xb4\x5c\x5f\x91\xf9\x2f\x92\x13\x4a\xb5\xbb\x38\x84\x65\x62\x6e\xbe\x50\x1d\x1a\xd1\x50\x5d\xea\x05\xf2\x09\xd2\x4a\x78\xb1\xb8\x08\xd4\xc4\x5a\xa5\xa8\xd0\x69\xfa\x63\x91\x57\x6a\xf6\x39\x66\x4d\x3b\xf2\x9e\x7a\x46\xf6\xb9\x13\xab\x62\xcf\xaf\x2e\x52\x71\x7b\xde\x88\x36\xdc\x33\x4a\xff\x81\x99\x78\x12\x7e\x81\x0c\x88\xb7\x1b\x16\xc5\xbf\x73\x38\x89\xf7\x88\x7b\x5a\x88\x7f\xb2\x20\x39\x3c\xc3\xe3\x66\x7e\x6c\xd3\xb7\x93\xa9\xd5\xad\x52\x43\xa0\x2a\xe7\xb1\x41\x32\xbf\xb1\x62\x70\x38\xd1\xe4\x8b\xf4\x9b\xb6\xa2\x0a\x52\xb3\x58\x26\x86\x62\xbf\xac\x35\x9d\x85\xb7\x50\xfb\x62\xd1\x6f\xe6\x52\x84\xed\xcd\xde\x2d\x61\x2e\x24\xcb\xb7\x15\x8e\xac\x80\xaf\xf0\xd9\xc6\x26\x19\x52\x2c\x57\x29\xd3\x53\xe0\x70\x0b\xc8\xd1\x36\xaf\xc2\x2b\x00\x4c\x44\xa1\x7d\xbf\x9e\x3c\x31\x4c\x3a\xbe\x2e\xbf\xf9\x82\x8c\x3f\x41\x15\x6e\x6e\x46\xa5\x83\xb0\xea\x71\x18\x06\x71\x15\x02\x98\x84\xb1\x2d\x3c\x9b\x2f\x2d\xfd\x57\xe4\x6d\x2b\x01\xca\xa0\xfe\xe4\x96\x03\x20\xb1\xff\x91\x3a\x40\x16\x4d\x18\x8c\x2f\xc4\x02\x05\x6f\x1a\x1f\xaf\xca\x66\x4a\xed\xfe\xad\x50\xba\xf4\x0b\xb8\x02\xda\x7d\xbe\x92\xa3\xd7\x2c\xe2\x0e\x63\x9d\x25\x7f\xe0\xa2\x00\x84\x6c\x3c\x52\xd3\x6b\xa5\x82\x42\x88\x32\xe4\xf5\x4c\x4f\x1e\x1e\x59\x5c\x3a\xcc\xcb\x83\x99\x91\xf8\x49\xef\xff\xf9\x14\x18\x8d\xaf\xb5\xdc\xd6\x77\xb9\x69\x2b\x10\x1c\x71\xbf\x09\x2e\xb1\x0e\x75\x9a\x73\x68\x26\xdc\x57\x65\x82\x7f\x83\xe8\x4b\x40\x35\xa6\xb7\x36\xb5\x07\x94\x2e\x2c\x5b\x12\x1b\xd1\x02\x69\x75\x24\x79\x20\x79\x97\x2b\x80\x06\x40\xcd\xfc\xd2\x22\x65\x75\x80\xdb\xc8\x94\xe7\xdd\xe0\x1a\x3d\x72\x27\x4c\x47\x19\xb4\x23\xef\xa5\x8c\xf2\x67\x08\x6b\x2a\xf4\x83\xb1\xc3\x1c\x9b\x49\xe9\x65\xb3\x01\x6e\xc0\x3b\x56\xaa\x01\x6c\x89\xe9\xe3\x0d\x09\x95\x4f\x6e\xb2\x0f\x33\x94\x51\x18\x82\xef\xfd\xeb\xd1\x28\x5c\xfd\x26\x99\xa3\x0d\xa0\x4c\x21\x50\xfa\xad\xa9\x55\x58\x2b\xd1\x84\x1c\x9a\x10\xc3\x3b\xa4\xe1\x2b\x51\xd5\x6b\x60\xb9\x56\xd0\x64\x7c\xb9\x7f\x88\xdb\x40\xf3\x24\x48\xf3\x5a\x1e\x77\x08\x8e\xaf\x25\x6b\xfa\xb4\x12\x47\xb9\x12\xff\x81\x68\xfe\x70\x5c\xb2\xba\xde\x88\xb0\x1c\x57\xbc\x20\xf0\x09\x6f\x01\x4f\xd8\x22\x8e\x80\x21\x19\x21\x34\x48\x08\x8a\xaf\xd0\xc9\x15\x52\x65\x26\x85\x7c\x9d\x25\xcd\xdf\x54\x1c\x6c\x28\xc0\x46\x7f\x9d\x9b\x19\xb3\xb3\xc5\x42\x14\x22\x4b\x46\xff\xaa\x9c\x4c\x00\xcf\x2d\x60\x87\x2c\xc0\xd6\x2c\x21\xeb\x95\xc6\x90\x74\x4d\x58\xf3\x10\xd2\xfa\x36\x13\xeb\xa1\xba\xea\x71\x55\x39\x91\x51\x13\x54\xb0\x2e\x73\x94\x72\xfd\x4e\x68\xba\x63\x36\x44\x49\x75\x2f\xe9\x5d\xb9\xe7\x35\x35\xfb\x69\x64\x5c\xf5\x3d\x99\x4e\xae\x84\x1f\xb8\x10\xbb\x73\xed\xdf\x86\x6b\x80\xdd\x72\x5a\x1a\xa4\xbe\x09\x43\xc8\x6e\x61\x79\x7b\xf6\xd0\x6b\xfc\x6f\xfe\xb4\xa5\xba\xe2\x8c\x47\x42\xe7\x2f\x62\x72\x2d\xb5\xff\x6a\x3d\x00\xae\xe2\x91\x0d\x41\x24\xbe\x80\x53\xc7\xb2\x7c\xc8\xe7\x86\x1c\x7e\x3f\x6b\x1d\xb4\x6e\x67\x45\x5c\x10\x0d\x02\xa6\xc2\xca\xa1\xa0\xc5\xd6\x7f\xf3\xa8\x7b\x61\x27\xbb\xec\x89\xf8\x8f\x0d\xb0\x13\x22\x32\xac\x09\x11\x14\x04\x9a\xb7\xc8\xf5\x71\x53\x18\x1a\xc4\x17\x59\xab\xd2\xdc\x09\x52\x71\xdf\x81\xda\x68\x06\xc8\xcb\x97\x5a\xbe\x34\x24\xd1\x02\x97\xfe\x29\xa0\x1d\xc2\x93\x83\xc2\xcd\xd3\x8f\xc7\xc8\xda\xc4\x99\x2b\xe6\x93\x9f\xb3\x4c\x00\x52\x75\x9c\x48\x88\x3c\xc3\xe3\x9a\xeb\x4f\x79\x96\x2c\xe9\xa8\x5c\x13\x53\xee\x25\xc9\xf7\x79\x49\x13\x87\x09\xbf\xf5\x0a\x75\x25\x10\x9b\x02\x40\x31\xed\xd7\x59\x07\x0f\x9b\x21\xcd\x2f\x44\xc2\xf0\xca\xfd\x3a\x9d\x5a\x3b\x65\x26\x48\x7f\xc9\xfd\xf6\x68\x08\x4a\xd0\x6f\x5a\x0a\x6e\xff\x7c\x69\x60\x0a\x79\x24\x02\x5e\xdd\xc8\x7b\xa6\x6c\x3f\xea\xe8\xec\x76\xa6\x37\xd1\x04\xf5\x9f\x4f\x8f\x3d\x7c\x71\x71\x4a\xe6\x72\xaa\x9b\x8d\x75\x3c\x18\xa7\xe6\x94\x1d\xb5\x09\x8b\xc6\x90\xf0\x67\x0b\x34\x35\xa7\x05\x1b\x63\x0d\xef\xe8\xf1\xe0\x95\x00\x00\x3b\xc1\x15\xd6\x7c\x34\x40\x07\x9e\x0c\x22\x30\xd6\xab\x11\xea\xdd\x11\x9b\x08\x8c\x06\x99\x01\x18\x43\x5f\xe6\x47\x44\xc3\x35\x08\xd2\xd1\xec\xb2\x7f\x8b\x84\xfd\x01\x49\x6d\x91\x2f\xc3\x60\xdd\x49\xd2\x52\x87\x08\xcd\xfd\xfc\xd6\xa2\x7d\x96\x5b\xf6\x18\x06\xd7\x3b\x50\x18\x01\xc9\x00\xf2\xa8\x79\x67\x0b\x1d\x92\xd7\x51\xf6\xd8\xae\x59\xe4\xca\x10\xd2\x88\x00\x1e\x24\x50\x1e\xb2\xca\x9a\x98\x24\x99\x32\x4d\xcc\x27\x32\xa0\x1a\x69\x0b\x73\x4c\xff\x3e\x52\xe4\x5e\x9d\xb2\x3b\xd0\x37\x98\xc6\x4a\x84\xc5\x1d\x86\xda\x17\xec\x96\x76\x85\x3f\x94\x8b\xf8\x08\xc5\x37\x25\x0d\x39\x26\x64\x26\x14\x8c\x9c\x6e\xa0\x3f\xe1\x9a\xc0\xcd\x00\xa4\xc1\x63\x6e\x49\xa9\xa5\x29\x32\xd8\x13\x14\x01\x7c\xd9\xe3\x65\x28\x42\x90\x74\x70\x80\xf4\x88\x42\x53\x70\x11\x28\x0a\x16\x59\x6a\xaf\xec\xc0\x31\xa2\x20\x72\xa2\xce\x6b\xc7\xe1\x95\x85\x52\x97\xa8\xa8\xc9\x7b\x55\xd5\xad\x06\xb8\x27\xaa\x12\x9e\xb5\x1b\x1d\xc6\xbf\x47\xa4\x89\xa9\xc5\x8e\x44\xb1\x25\x9d\xe0\x47\xa0\xf7\x7d\x49\xa4\x8c\x30\x1e\x39\xbc\x34\xe0\x45\xf0\xb5\x13\x40\xa6\xaf\x89\x6c\x00\xfe\x71\x12\xfe\x61\x57\x5b\xc2\xe9\x16\x0b\x31\x52\x30\x6f\xa5\xb2\x3f\xfa\xc7\x30\xef\x5e\xe7\x41\xe6\xdd\x51\x15\xb2\x48\x28\x12\x21\x7c\xc7\xf5\xbd\xb9\x6d\xd4\xb8\x6a\x44\xed\xde\x05\xfc\x59\x6b\x1a\x1e\x41\xf0\x19\x21\xe2\x23\x20\xc9\xad\x97\x66\x9c\xcf\x71\x66\x20\xf2\x93\x0b\x6d\x08\xee\xf2\xbd\x8c\xa8\xef\x72\xcf\xaf\xec\xca\x68\x6f\x51\x22\x54\xc4\xf1\x23\x8c\x54\x4e\x1f\xdd\xa0\xd5\x2f\x77\x3e\x02\x85\x60\x8d\x05\x4f\x8b\x73\xf5\x11\x2c\x05\x02\x3f\x28\x05\x92\x7c\xfe\xa5\x48\xf3\xcb\x31\x1d\x73\x13\xe7\xd8\x03\x88\xa2\x2e\x16\xd6\x08\xfa\x66\xcc\x1b\x23\x54\xd5\x08\x49\x59\x07\xae\x99\x75\x35\x53\x28\x90\x8f\x69\x68\x77\xc0\x99\xcb\xde\x85\x3f\xbc\x2f\xa3\x6f\xee\xa3\x9e\xf7\x1c\x6a\x1e\x0f\xb9\x92\xd7\x28\x7e\x3f\xa0\x63\x79\x31\x00\xa4\xd5\x61\x22\xa0\xf6\xf8\x61\xce\xfc\xbe\x13\xd5\x3c\x32\xd0\x99\xd6\xb2\x4b\xe6\x83\x8e\x26\x72\x30\x8f\x70\x91\x3f\x03\x68\xee\xff\x8b\x60\xf9\x71\xfc\x8a\x8f\x38\x21\x85\xcf\x93\x28\xab\x2a\x26\x8f\xb5\xfa\x3f\x59\x67\x65\xdf\x1e\x0e\x49\x25\x12\x45\xfa\x69\x8f\xf9\x74\x8d\xc1\x87\x8d\x0b\x29\xfa\x00\xf5\x0f\x65\x8b\x81\x5c\xf9\x21\x6c\x45\x9d\xd9\xc3\x65\x84\x28\x95\xf7\x70\x82\x81\x44\x47\xf2\x4c\x1d\x0f\x72\xe7\x1f\x9a\x7b\x3d\x92\xee\x5f\xe6\x50\x38\x7f\xe4\x00\x68\x8d\x1e\xf1\x21\x92\x3d\x03\x5e\x45\x80\x93\xca\x86\x1b\xde\xd8\x4f\x16\x9a\x32\x95\xe0\x33\x9a\x41\xc1\x5b\xac\x49\xb4\xf7\x15\x43\xee\xbc\x1d\x5a\x07\xee\x45\x68\x82\xb8\x27\x79\xa4\x8d\xf3\x3c\x7c\x65\x3b\xda\xcb\x67\x4f\x58\xb8\x96\x0f\x73\x98\x36\x58\xd3\xf6\x92\x94\x48\xea\xcf\x4c\xf1\x8d\x8f\xd4\x35\xfe\x28\x25\x0d\xb5\x17\x63\x72\xa8\x1d\x47\x27\xc3\x59\xf8\x72\x0d\xa7\xb0\xb6\x83\xa8\x58\xd6\x53\x30\xdd\xec\xf0\x12\x5f\xb6\x99\xf7\x50\x34\x79\x1d\x4a\x3e\x64\xde\x10\xa3\xbe\xa1\x60\x00\x2b\x48\x89\x4f\x81\x29\xca\xc1\x6c\x9c\x87\xbb\xd7\x5e\x87\x39\x1e\xf3\x61\x1e\xe6\x50\xe1\xf8\x48\x7e\x60\xc8\x77\xa8\x6a\x5c\x93\xbe\x6c\x26\x07\x4d\xe8\x1c\x90\x92\x2a\x81\x8c\x4d\x70\x2e\x44\x82\xe6\x03\x68\xf2\x2e\x0a\xce\x25\x02\x21\x49\x56\x25\xf1\x2c\x12\x16\x61\x70\x25\xda\xeb\x62\xfc\xdb\x00\x7c\x43\x76\x63\x66\xa1\x66\xdc\x6a\x94\x58\x92\x6b\xda\x3d\x23\x08\x9e\x32\xd7\x50\x6a\x0e\xcb\x00\x9e\x28\xe9\x3b\xa0\x74\x19\xd2\x28\xe9\xe1\xaf\xcc\xc0\x31\x69\x5f\x79\xd3\x7a\x87\x5d\x36\xdd\x84\xba\x2e\x61\xb5\xf0\x12\x72\xe3\x74\x39\x0a\x8f\xd2\x4b\x06\xdc\x16\x56\x6e\x96\xf6\xef\x2a\x19\x07\xd8\xd5\x61\x93\xd2\xfb\x10\x93\x12\x0a\xd0\x14\xd3\x80\xa2\x47\x26\x5d\xb0\x93\x82\x71\x93\xea\xbe\x36\xd3\xfc\x58\xcb\x23\x0b\x87\x70\x17\xf5\x48\x02\x19\x30\xa2\xd6\xba\x21\x14\x1b\xca\x16\xfe\x0c\x53\x91\xa0\x06\xdb\xf9\xde\x5e\xea\x00\xd0\xac\xf8\x35\xc2\xa6\x44\x0e\x0a\x30\x15\xb7\xdb\xf9\x7e\xa4\x38\xc3\xda\xd2\x50\x36\x29\x3f\xf8\x40\xf6\xce\x57\xe3\x11\x52\xd4\xba\x30\xc4\x7e\x73\x89\xd4\x9b\xff\xf8\xa9\x4a\x30\xe3\x56\xa3\x07\x35\x5d\xca\x7a\x00\x83\x1a\x22\x1a\x10\x23\x0a\x45\x8c\x0c\x44\x81\xef\x8c\xa7\x4d\x7a\x21\x39\xf8\x7c\xbd\x44\x32\x92\x10\x44\x7d\x7a\x6a\x5c\x09\xbb\xf6\x1d\x3c\x08\xc2\x6a\xd2\x95\x04\x83\x39\x25\x54\x64\x91\x27\x40\x92\x3e\xc4\xca\x81\xea\xb1\x7d\x78\x4f\xf5\x6e\x59\xc8\x80\x08\x1e\x1b\x5d\xf0\xc2\xa1\x0b\xd0\x1c\xd5\xe3\x31\x84\x2c\xf4\x98\x24\x8c\xd3\xaa\xbc\xd8\x3d\x43\xed\x90\x03\x3a\xed\x76\xd5\x8b\x45\x92\xa9\x20\xcc\xce\x2b\xb7\x52\x2b\xa8\x3e\x22\xe8\x6a\x08\x23\xa8\x48\xdb\x01\xdd\xce\x6d\xa1\x8d\x19\xe0\x99\x0a\xee\x2d\x0f\xcb\x9c\x40\xe7\x04\xc4\x70\x5f\x70\x85\x4e\x57\x01\x08\x54\xce\xc9\x81\x38\xc1\x34\x22\x4e\x74\x67\xb0\x62\x5d\xd6\x22\xab\xbd\xa1\xf3\x03\xe1\x11\xc8\x9a\x6c\x52\xc5\x68\x7b\xe5\x41\xb8\x9b\xb4\x7c\x6d\x21\xec\x83\xe0\x3c\xf6\x36\x04\xb2\x26\xe6\x96\x04\xdd\xbc\xe6\xac\x98\xea\x98\x5d\x34\x79\x1d\xc4\xbb\x08\x89\x79\x06\x06\x07\x49\x66\x3f\xf3\x12\x83\x48\x8b\x84\x96\xb9\xc2\x8e\xde\x75\x49\x09\xab\xb1\xd7\xe9\x25\xc5\x0a\x78\x80\xc4\x06\xa7\x8e\x7b\x52\xc2\x02\xe3\xad\x13\x25\x1d\xa1\x75\xa8\xae\x81\xe1\x23\x67\x1b\x36\x27\x8f\x3d\x91\x65\x26\xc0\xc8\xae\x45\x60\x78\x26\xb2\xc1\xba\x42\xcf\x12\xc0\xe2\x26\x2d\x69\xc4\xbe\xda\x65\xb8\x9a\xd6\x46\xbf\x68\xe0\xce\x58\x5e\x14\xf1\x36\x00\xb3\x23\xec\x0e\x15\xca\x3a\xa0\xf5\x79\xc2\x10\xe8\xa3\x10\x87\x36\xa5\x99\xbd\xe7\x07\x02\xb9\xd3\x70\x3b\x77\x58\x4c\x84\x77\x07\x73\xb1\x1d\x20\x54\x76\x24\x81\x7e\x46\x54\x3a\xaf\x2b\x00\x3d\x82\xeb\x40\xa2\x86\x84\x97\x2e\x4b\x93\xc0\x81\x3d\x01\xe6\x21\xb7\xd9\xad\xee\x4f\xa6\xb5\x3b\xeb\xb5\x79\x15\x6c\x4a\x10\x9e\xf6\x7a\x1a\x73\x8a\x43\x48\x48\x12\x79\x98\x40\xe8\xbc\xf5\x24\x7d\xfd\xef\xed\x95\x18\xe4\x42\xcf\x76\xa1\x13\xf8\x5f\x42\x1e\x8c\x4a\x98\x61\x14\x3d\xeb\xde\x2f\x30\xb0\x96\x8a\x4a\x21\x5f\x17\xc8\x2e\xc1\xd2\xe6\xe3\xb9\x0c\xc7\xfe\x7c\x84\xf8\x59\x7a\xfd\x0f\xa5\x4f\x81\xf9\xe1\x23\x99\x88\xaf\x4a\xe3\x58\x04\x22\xa2\xbe\x1f\x0a\x6f\x7a\x98\x53\xd7\xba\xf1\x2e\xb1\x8a\x93\x2e\x4e\x3b\xbc\x8d\x71\x6c\x8b\x4d\x5a\x46\xca\x7f\x00\x29\x60\x0d\x73\xd1\x87\x14\xab\x51\x8c\xa1\x0c\xc7\xd9\x4a\x1d\xf8\x85\x92\xd7\x50\xb1\x1f\x09\xbc\xfd\x96\x7f\x4d\x42\xe5\x7f\x6d\x08\x1f\xa5\x8d\x6c\xf6\xce\xd1\x91\x0d\x5e\x05\x42\x41\x9f\xb7\x53\x4a\x64\xd4\x88\xc9\x23\x3e\x1d\x48\x59\xef\x5f\xb8\x8f\x2f\x12\x8a\x15\x44\x04\xad\x77\x08\x85\xdc\x84\x94\xfc\xc8\x82\xbf\xa7\xaa\x08\x93\xd8\x76\x39\xaf\xaf\xba\x48\xfe\x57\x8e\x22\x12\x67\x5a\x59\x9a\x75\x79\x11\xca\x94\x38\x85\x81\x90\xa6\x25\x1e\xcd\xa4\xde\x38\xe4\xdf\x21\x31\xe2\x65\x82\xd4\x17\xc7\xf6\x24\xef\x0c\xfc\xdc\xbb\xc3\x3a\x1a\x9b\xc7\xfd\x50\x77\x44\x86\x1e\x7c\x29\x3f\xaf\xd2\x24\x3f\x3f\x30\x29\xea\x8b\x98\xe7\x1f\xa2\xa6\x3f\x14\xa3\xff\x04\x0a\xf6\x13\xa5\xd1\x4f\x5e\x55\x25\xb2\x08\xf5\x0b\x8f\x04\x1f\xfe\xa4\x4a\xec\x97\x05\x2c\x31\x7f\x40\x37\xf5\xfb\xf0\x49\x63\x39\x9b\xcf\xbf\x90\xd2\xef\xc4\xd1\x0a\x60\xf3\x99\xcb\xe8\x11\xad\x8f\x5a\x65\x4c\x81\xee\x0a\x82\xfa\xeb\x4f\x78\xff\xaf\xf4\xfe\xa7\x16\xe3\x2b\xc8\x13\xf0\x00\x79\x09\x33\x74\x02\xbf\xc9\x95\x7b\x7c\x8b\xe1\x9f\xb4\x4c\x96\x60\x13\x59\x6c\x53\x77\xbf\x86\xf0\xff\xff\x74\x32\x31\x01\x43\x6a\x0a\x85\x92\xb7\xf2\x91\x2f\x17\xf1\x14\x9e\xa8\xad\xdb\xe3\x50\x99\xee\x75\x2f\x09\x42\x32\x94\xb4\x49\x16\x36\x19\xad\xa2\x36\xc7\x8b\xe6\x4b\x01\x86\x5f\x7f\x74\x64\x9e\x04\xfa\x3d\x9d\x40\xeb\x7a\x26\xf3\x2d\x24\xd1\x7e\x71\xc2\xec\x02\x26\xbb\xe4\x4b\x38\xe5\x7b\xce\xda\xb6\x27\xe0\xee\x89\xb4\x9a\x14\x4e\x90\xf3\xf2\xa5\xee\xe8\x33\x03\x22\x89\x20\xbd\xbd\xb3\x7f\x26\x0d\xbf\x9e\xe9\x94\x77\xc8\x19\x5e\x8a\x72\x09\xa1\x45\x37\x2c\x40\xd0\x11\xb2\xc7\x7f\x42\xfc\x3d\xbc\x3a\x2a\xd8\x3f\x14\x27\xae\x24\x8c\xc2\x08\xc4\x2f\x45\x65\x6e\xaa\x86\x61\x4c\xdd\x48\x78\xb7\x2e\x9b\x60\x9f\x3b\xe0\xef\x8e\x01\x4a\x81\x01\xda\x28\x58\x4d\xfc\x0f\xc7\x27\x77\x0d\x2d\x85\xfb\xb4\xf7\x87\x40\x9a\xb3\x4c\x04\x02\xcd\x72\xc3\xb1\xe8\x1a\xa8\x79\x41\x48\xd0\x4d\xc4\xd0\xdc\x75\x11\xe6\x83\x35\x67\x00\x82\x64\x1e\xe2\x06\xe3\x21\xe0\x0a\xc5\x8e\x5b\x56\x08\x1e\x6b\xf2\x72\xf3\xb7\xd8\x42\x48\xb1\xbb\x89\x61\x20\x89\xe0\xd5\x46\x0b\xf8\x0c\x58\x90\xae\x2a\x15\x15\x0b\x75\x76\x45\x2a\x2a\x03\x1e\x0c\x1d\x04\x50\xd9\x1f\xd9\x95\x08\x6c\x74\x1e\xde\x38\x59\xd0\x57\x37\x1a\xb7\xde\x8a\xd3\xb8\x7b\x16\x4f\xfc\xd6\x68\xf3\x9e\x37\x42\x88\x22\xcd\x75\xcf\x1c\x6b\xdc\x93\x5d\xc5\x1f\x0a\xb3\x8c\x10\x16\x0e\x6d\x96\xd3\x06\x6b\x4b\x8e\xd8\x61\xc8\xfa\x20\x7f\x93\x12\x3f\x78\xca\x61\xea\x69\x9d\x43\xe5\xc2\xfa\xfd\x27\xc7\x45\x77\x0e\xd2\x2f\x30\x4e\xb3\x54\xc6\x2c\x16\x94\xf8\xce\xe1\xa2\x6f\x17\x56\x25\xe1\xdb\xd5\xa7\xb8\x52\xa2\x7d\xf6\xca\x32\x51\x0c\x4d\x7f\xef\x38\x81\xed\xa9\xf2\x83\x49\xba\xf9\x09\x6c\x85\x9f\xff\xa3\x84\x0b\x01\x4c\x5f\x4d\x0c\x77\xab\xb0\xb7\x22\x1c\x4b\x5e\x01\xab\xbf\x9a\x2b\xe9\x55\x9b\xf2\xd9\x55\x69\x22\x7f\xd9\xcc\x85\x10\x96\xca\x61\xf8\x65\x8f\x63\x83\x25\xdb\x07\x02\x32\xc0\xb8\x10\xc0\xe8\x64\xe6\x09\x63\x82\x5d\x80\xa7\x3d\xd3\x06\x85\x9f\x62\xbc\x72\xdd\x2c\x1c\x7a\xa8\x1a\x25\x46\xc1\xa8\x99\x52\x58\x26\x47\x3a\x38\x0c\x4f\xf4\xcf\xec\x83\xe8\xa4\x17\xaf\x9f\xff\x54\x98\xc6\xe8\xac\x92\xea\x99\x16\x9d\xb4\xc1\x02\x26\x6a\x14\x28\xca\xfd\x31\x02\x0e\xe5\x14\x50\x6e\x7c\x38\x75\x53\x17\xee\x38\x93\x58\x4c\x08\x71\xce\x8c\x0b\x28\x8e\x01\x78\x42\x6b\xa9\x08\x3e\x36\xcd\x3a\x05\x50\xaa\x37\xc3\x59\x18\xa1\xc3\xda\x14\x22\x41\x2c\x0c\xe9\x99\x14\xca\x95\x57\x61\x59\xed\x2a\x07\x7f\x7b\x51\x89\xf1\x6a\xb8\x50\xee\xe2\xf0\x49\xee\xb1\xc2\x84\xd5\xa3\x05\x28\x52\xff\x7f\x4a\xe9\x5c\xdb\x5b\xff\x76\x9b\xa5\x16\x75\xad\x85\xf6\x20\x3e\x21\x23\xe6\x89\x36\x5c\xd6\xf3\x10\x90\xb4\xb0\x07\xbc\x80\x89\x1a\x14\xac\x04\x41\xb1\xd0\x6b\x33\xfe\x73\x90\x76\x0d\x0a\x35\x02\x0a\xcd\x5b\xe0\xa1\xb8\xbb\x64\xa1\xaf\x70\x02\x03\x3e\x2a\x80\x52\x1a\x64\x5c\x53\x5d\x08\xb4\x9a\x6c\x10\x42\x00\xd3\x54\x7c\xb6\x6b\x4b\x31\x83\x90\x1e\x58\xa2\xe9\x03\x6e\x8a\x27\x03\xb0\x94\xa0\x51\x73\x5b\xfa\x33\x0b\x59\x62\x1b\x1b\x49\x0f\x67\x8f\x6b\x3b\x09\x53\x84\x54\x72\x89\x88\x4b\x42\x51\x60\x88\x38\xc9\x60\xd5\xe2\xc7\x19\xdf\x0a\xa8\x95\x42\x2a\x12\x4e\x32\x04\x54\xbd\xd8\xe7\x3a\x10\x8b\x9a\x3a\xf7\xc4\x6f\xa8\x47\x7d\xe5\xff\xaa\x10\x5a\x47\x5d\x9f\xb4\x4f\xbc\xe4\x5d\x71\x35\x1b\x30\xc7\xe4\xfc\x32\xb2\x5f\x45\x61\xa5\x3c\xfb\x94\xed\x6a\xa8\xb2\x9c\xc7\xd0\xeb\x19\x34\x17\xb1\x48\x00\xbc\xc6\xcb\xbe\xb2\x8f\x7b\x75\xcd\x7b\x09\x59\x5f\x7d\xd8\x2e\xc0\xbf\x23\x74\x73\x5e\x74\x8a\x93\x89\x4d\x03\x7c\xd5\xaf\xe0\x8e\xe4\x36\x5d\xaf\xe7\x52\xb0\x36\x83\x9b\x3d\x0c\x0f\x42\xbe\x87\xe1\x56\x43\x75\x87\xea\xf4\x01\x13\x0b\xd6\x16\x90\x62\xdb\x15\x81\xff\xc8\x3e\x89\x0a\x70\xf5\x49\x78\x9c\xcb\x65\x1a\x67\x46\x8f\xc7\x3c\x10\xc0\xf5\x8f\x62\x49\xe9\x45\x70\xfa\x95\xea\xd3\x46\xd3\x84\x25\xa6\x70\x66\x4a\x4d\x99\x29\xb1\x6a\x8b\x21\xc1\xdc\x22\x0d\xf6\x10\x36\x01\xa8\xf0\x0c\xf0\x80\x42\x3e\x9a\x98\x5f\xb0\xcf\xe1\x9d\x4b\xab\x40\x6c\x23\x1f\xd2\xf4\xe0\x49\xc5\x31\x7a\xcf\xc4\x72\x9d\x14\x10\xaf\x94\xbc\x8f\x81\xf0\xd8\x2c\x89\x1f\x95\x00\x4e\x71\x8d\xce\x2a\x7b\x41\x0b\x3c\xb7\x09\xf5\x78\xe9\x02\xd9\xf5\x23\x36\xad\xe3\x63\x01\xaa\xa7\x30\x67\x9f\x77\x2f\x98\x59\x3d\x08\x4d\xab\x0f\xdf\x04\x35\x01\xa2\xd7\xae\x4d\x5e\xe4\x98\x5b\x9d\x01\x55\x6b\xf6\x37\x65\xa7\x2d\x8d\x4d\xc2\xb8\xe3\x2a\xd4\x59\x00\xbd\x11\x49\xec\xe2\x2c\x01\x89\xcb\x13\x15\x86\xec\xb7\xaa\xbd\x46\xec\x8b\x0e\x15\x50\xff\x16\x3c\x93\xe8\x2f\xcf\x59\x8f\x9c\x9b\xf9\x72\xca\xec\xc6\xe2\x7e\x66\x33\x8d\x7a\x26\x0a\x2b\xd8\x5d\xdd\xc3\xb1\x47\x8f\xe1\x89\x8a\xdf\x25\xc8\xda\x41\x7a\x8e\x45\x74\x03\xb2\xde\x65\xa6\x52\x92\xbd\xde\x8b\xce\xda\x7b\x03\xcf\xc4\x59\x7f\x43\x38\x1b\x34\x7a\x89\x63\x43\xe3\xb3\xf2\x7e\xda\x88\x85\x58\xb4\x3b\xfd\x6a\x92\x73\x26\xf1\x10\xa1\xbd\x11\x4e\x3d\x4c\x19\x9d\x89\x59\xc7\xd3\x26\x17\x9c\x4f\x9d\xf2\x16\x05\x7f\x9f\xdf\xfd\x57\x13\x95\x84\x38\x04\x3d\xea\x05\xd5\x15\x07\x42\x55\x80\xa1\x5b\x13\x7e\x58\x8f\x7e\x4b\x1c\xc8\x2e\x15\x67\x9e\x16\xf3\xe6\x1e\x18\xa4\x85\x04\xd0\x19\x29\xe3\x03\x1d\xf4\x8b\x3e\x8f\x36\x31\xba\x02\xdf\x16\x90\xae\x72\xea\x2b\x6b\x39\x99\x17\x38\x94\x06\x74\x0c\x1c\xf5\x71\x10\x11\x00\x77\xaa\x88\x7e\x94\x10\xe3\x3c\xbe\x93\x1e\x0b\x1f\x81\x8f\x4b\xdf\x75\x90\xb4\xf2\x2d\xd7\xca\xb3\x5f\xe5\x5e\x6d\x73\xa9\xca\xb4\xcb\x31\x2b\x2d\x0f\xd8\xdc\xbd\x7c\x91\x6f\xee\xd0\xff\x43\x0c\x5c\xe0\xd5\x38\x1f\x3d\xe6\x14\x23\xf5\x03\x0c\x58\x3a\x7f\xc2\xc5\x04\x8d\xc3\xf1\x20\x1d\xe5\x20\xf6\x4b\xf0\x35\xd8\xfe\xfd\x08\x11\x47\x41\x25\x29\xf5\x00\x7b\x2f\xe0\xdc\x79\x25\x22\xdc\xce\x35\xea\x59\x16\x2f\xda\x8c\x18\xba\x5d\x22\x68\x87\x2b\x69\x7b\xc0\x73\x0a\x12\xec\xc1\x1c\x0c\xb0\xcc\x1c\x6d\x40\x34\x68\xe0\x37\x74\x8f\x07\xa8\x19\xbe\x95\x21\x19\x34\xd8\xb5\xe3\x21\x84\xfd\x3f\x20\x46\x7b\xde\xd4\x55\x3b\xe4\x12\x05\x47\x95\xb7\x96\x78\x65\x0a\x06\x4d\xbc\x56\x7f\xcc\xdd\xcb\xb9\x49\x19\x03\x62\x42\x4c\x1f\xe0\xb5\xf5\xe7\xae\xac\x3e\x30\xf7\xe7\xab\x84\x56\xb0\x0c\xd8\x8a\x30\x67\x85\x32\x16\x65\x28\x82\xba\x14\x22\x91\x7e\xdc\x87\xc8\x27\x97\xb6\x9c\x59\xcd\x68\x12\xf6\x7b\x55\x26\x67\x2f\xcf\xc2\x85\xe6\x7d\xfb\xd7\x32\x07\xbe\x0c\xec\x74\xf6\x79\x61\x93\xb6\x4b\x39\x1f\xd8\x9e\x46\x64\xd9\x95\xc5\xb3\xe5\xf0\xc1\xbc\x51\x07\x02\xed\x53\x81\x76\x8e\x39\xd3\x9e\xdf\xca\x2c\xee\xf9\x75\xc9\xdf\xe8\x29\x97\x86\xdd\xda\xfb\xc2\x49\x9b\x85\xee\x37\xa5\x1e\x67\x83\xc5\x71\x52\x20\x61\xec\xcd\xfd\xdb\x57\xaf\x67\x6f\x75\x3b\xc4\xab\xdb\xd0\xd4\x3a\xd4\x28\xda\x5a\x0b\x26\x01\x50\xb6\x52\x57\xcc\x05\x0e\x8f\x6f\xc9\x1b\x2d\x61\x47\x14\xba\x88\x80\x99\x09\xe9\x65\xb7\x89\x89\x30\x57\x56\x65\x8d\x18\xe1\x42\x5f\xb9\xcd\x3d\xdd\x19\x80\xf6\x4a\x79\xa4\x1d\x36\x8d\x8a\xb8\x90\x59\x50\xdc\x2a\xf8\x14\x5d\x84\x93\x2a\xcb\x65\x73\xbd\x4b\x42\x45\x9a\xcc\x6c\xf3\x1a\xa0\xbb\xa6\x45\xba\xcd\x13\xe5\x8f\x06\xc0\x16\xc9\xe0\xdb\x34\x24\xd8\xf2\x25\xf9\x21\xf7\x81\xfa\x21\xba\xce\x1e\x2e\x21\xef\xa8\xf9\x93\xd7\x50\xa8\xdc\x5a\xb7\xbd\x65\xbc\xfd\x7e\xe0\xfe\x9e\x47\xa2\xbd\xfa\x3d\x07\x0a\x0e\xf6\x4f\x67\x0e\x30\x9d\x5a\x27\x87\xd8\xfd\x11\x3c\x5a\x9f\x02\xd2\xfd\xd6\x3d\x14\x7c\xfa\xb1\x29\x1e\xcd\x8e\xe9\xb7\x20\x2b\xb5\xb1\xf5\x0d\x09\xd9\xe7\x75\xc2\x94\x4e\xe0\xb8\x56\xfa\x06\x6e\x4e\x5d\x35\x04\x8c\x02\xef\x97\xd0\x3f\x7a\xf0\x92\xdc\x53\xaa\x72\xed\xb9\xab\xdc\x86\x1c\x39\xf7\x43\xec\xdb\xc4\x79\x27\x64\x8b\x02\xf2\x86\xc4\x27\x37\xf4\x0a\xaf\xc2\x6d\xd6\x3f\x38\x97\x10\x33\xf5\x6e\x36\x7d\xe1\x71\xe5\x8f\xc4\x51\x0e\x21\x22\x4a\x04\x63\x29\x93\x21\x6b\x4f\x56\x41\xe6\xde\xa8\x8b\x73\x8f\xbd\xa1\xe7\xb8\xf9\x2e\xc4\xf4\x46\x5e\x91\x0e\xc6\x61\xaa\x2d\xb7\xc8\x88\x58\x9c\xfa\x24\xec\xdf\x6f\x33\x54\x1a\x03\xc0\x07\x52\x15\x6d\xe8\x60\x73\xb4\x2b\x88\x1d\x7a\x4a\xd9\x6e\x3c\x07\xba\x49\x31\xc4\x80\xcc\xdf\xc0\xf5\x4d\x85\x9e\x37\x6b\x3e\xd6\x03\x04\x0a\xad\x3a\x8b\x71\x85\x7d\x1d\xbf\x81\x44\x37\xa9\x80\x6b\x9d\xce\x58\xfe\xb8\x34\x12\xde\x0f\xc2\xbc\xca\x4b\x50\x74\x74\x22\xaa\xb1\x43\x47\x80\xa2\x3f\x90\x40\x70\x8c\x57\xd9\x3d\xb5\x4c\x1d\xa2\x42\xdc\x58\x79\xc4\xcf\x5e\x4b\x7e\x7d\xe1\x0a\xab\x0d\x41\x02\x71\xc7\x72\x1e\xc4\xe3\x5b\xaf\xbb\xda\xff\x5e\xb9\x76\xfd\xcd\x7b\x3b\xc6\x3c\xd6\x42\xdf\x24\x34\x4a\xbe\xec\xd8\xaf\xad\xb9\x2e\x31\xe5\x5c\xb3\x8d\x21\x89\x3b\xcb\x8d\xb3\xb2\xc2\xa9\x91\x8a\x4e\x79\xff\x30\x17\xba\x7a\xa9\x57\x6d\x83\x7d\x38\x04\xc5\x13\xeb\xdf\x3b\x42\xc2\xfc\xe0\x49\x48\x1c\x1e\xaa\x5f\xc2\xbc\x7d\x84\x93\xfb\x30\xad\x0b\x9c\x1d\x31\x6e\xc2\xbf\x61\x94\x6b\x6f\xbb\x84\x94\x24\xa8\x64\x13\x0e\x0e\xc0\xd7\x14\x32\xcf\x70\x47\x8d\x48\xe0\x45\x98\xe2\xf0\x58\x68\x15\x2f\x46\x23\xdd\xc2\xd1\xaa\x29\xf7\x80\x0a\x1c\xdb\x26\x47\xe8\x29\xf8\x6a\xc6\xac\xd1\xb3\xb9\xfb\x92\x56\x35\x33\xd2\x35\x8d\x67\x96\xbb\x53\xcf\x27\x0d\x66\x4b\x51\x97\x81\xcb\xd2\x4d\xa8\x1d\x50\x38\x4a\x7f\x2d\x75\x3e\x78\x46\x4b\xf5\xc9\x8c\x47\x3e\x3e\x86\x98\x12\x67\xb6\xcb\x99\x84\x89\x5a\x5a\x6a\x69\x09\x21\xc5\x05\x0c\xeb\x39\x24\x93\x3e\x14\x72\x2a\xb0\x9e\x22\xee\xee\x0e\x57\xa5\x5b\x3d\x90\xa0\x78\x6f\x86\x42\xf3\x39\x66\x6c\x81\x8b\x66\x83\x01\x96\xf9\x53\x37\x46\x53\x38\x14\x95\x86\xc2\x83\xbf\xbe\x2f\xf9\x54\x2f\x3b\xe7\x8b\x8e\xe5\xfb\x71\x74\x9e\xe7\x96\x96\xc6\x90\x5a\xe6\x06\xc2\x43\x01\xc5\xff\xfd\x7c\x04\x2c\x6f\x68\xea\x48\x8d\xfa\xb2\x7c\x4d\x64\xac\x6d\x88\x92\xc1\xd2\x04\x35\x97\x3c\xf8\x78\x64\xf1\xaa\x48\x56\x18\xc3\x20\x87\xe4\x49\x10\xc9\xde\x76\xb6\xd6\x0b\x58\xf9\x0a\x64\x7a\xb6\x34\x91\x04\x44\x3e\x1e\xb5\x60\x26\x60\x8f\x73\x80\x5f\xb8\xc7\x87\x04\xdf\x2f\x34\xa2\xd2\x52\x3d\x5c\x55\x57\xfc\xcd\xd5\x9f\x0b\xe0\xdc\x89\xa3\xf9\x45\x51\xc1\x93\x88\xbf\x36\xa2\x76\x89\x23\xa9\xb4\x23\x85\xe1\xcb\x2b\xc7\x93\x63\x4d\x70\x1f\x83\x61\x8b\x5d\xf1\x68\x9b\x0a\xae\x27\x3c\x17\xbc\x81\x06\x4c\x8f\x58\x3c\xa7\x39\x8f\xdf\xd0\xa3\x35\xf4\x83\x2c\x9a\x39\x3b\x9d\x6c\x7e\x15\x58\x3d\xf4\x88\x82\xad\x9d\x54\xce\xb0\x93\xf3\xc7\xdf\xe1\x76\x0a\xf6\x2f\x52\x4f\xbd\xe5\x44\xdf\x47\x46\x0b\x37\x3b\x59\xe7\x9b\x4a\x69\x5f\xd9\xd5\x7f\xc5\x9e\x16\x83\x72\xe9\x03\x00\x1a\x34\x31\x98\x03\xcb\xe7\x95\x0e\x62\x00\xed\x69\x52\xfd\x73\x2a\x1c\x41\x41\xe5\x97\x9e\x46\xf6\x86\xf5\x53\x00\xd7\x3c\x75\xe9\xf5\x4e\xff\x34\xea\xb7\x80\x36\x4d\x25\xa5\x2e\x60\x7e\x9e\xc5\xf5\xa3\xe7\xd7\x2a\x4b\xa5\x2b\x5c\x96\xac\x3f\x5b\x85\xe8\xdb\x04\xb0\x3b\xa4\x43\xd4\x6c\x67\x80\xd8\x5b\xbf\x90\xbd\x86\xd8\x9b\x87\xbb\x93\x1f\xcd\x04\x2c\x03\x61\x70\x6c\xa3\x26\xc0\xf6\x42\xf4\xc8\x5a\x0b\xa4\x4d\x05\xbe\xe3\x14\x70\x4a\x2a\x06\x4c\x3e\xcf\xde\x42\x0f\x29\x39\x26\x3f\x3e\x10\xa3\x67\xef\xe0\xc7\x83\x79\x8c\x53\x02\xc6\x5b\xfe\x4d\xfd\xa2\x74\xfc\xa8\x4a\x95\xcd\xc9\x07\xe6\xcf\x02\x49\xb7\x12\x37\x67\x3d\x5f\x65\xfa\xdd\x63\xfb\xaf\x0e\xa1\x42\x33\x2a\x4d\x9f\xb3\x36\x61\xa4\x2a\x38\x9b\x05\xe2\x2f\x5b\x18\x10\xbb\xfb\xb3\x08\x4f\x77\x5d\x8d\x6f\x40\x98\xdd\xc5\x68\xd0\x1a\x3c\xf3\x82\x6a\x9d\x45\xe2\x4b\x04\xb1\xd9\xf2\xf1\x23\xf8\x9d\xf2\x47\x1e\x57\x42\xf6\xac\x21\xe7\x2a\x40\xa7\x2f\x45\x43\x0c\x82\xc6\xa0\x5d\x01\xa5\x17\xc7\x67\x4e\x78\xb4\x41\xeb\xd0\xb8\x86\xa3\x24\xbc\x80\xd0\x0b\x41\x24\x80\xf5\x4e\x90\x73\x7f\x08\xd7\x4b\x42\xe6\xad\xb9\x9b\xdb\xd9\xda\x73\x21\x91\x24\xdb\x91\xa5\x83\x71\xfd\x56\x3f\xc7\x18\x6e\x8e\x9e\xf5\xd4\xa6\x36\xda\x69\x1a\x4f\x90\x60\x5c\x7d\xd2\x30\xe2\x9d\xdd\xd8\x25\x7a\x4c\x0a\xce\x38\x85\x66\x13\xab\xa0\xa8\x45\x7e\xb8\x0c\x49\x2a\x1b\x7e\x9e\x1b\xc7\x63\x16\x4b\x93\x2a\xcf\x4d\xe3\x69\xde\x84\xa3\xab\x36\x4c\x97\xaf\xf8\x88\xfb\x2d\x24\x65\x3a\x39\xcc\x44\xef\x4a\x24\x60\x42\x99\xc5\xff\x4b\x12\x10\xd3\xfa\xbd\x57\xa0\xf1\x06\x56\x2a\xfd\xc3\xfa\xe6\x59\x91\x6d\x0b\x60\x5e\x3c\x5f\x29\xd8\xe3\xa3\x80\x05\x23\x90\x64\x3c\x98\xce\xd1\x96\x6a\xca\xc6\x94\xce\x90\x7b\xba\x66\x51\xbf\xe1\xb1\xc5\xe9\x1a\x3c\xb6\x68\x29\x55\xc7\x4a\xfc\x1c\xa5\xd4\x1e\xd0\x46\x17\x5c\x6f\x26\xe5\xce\x02\x92\xcc\x2c\x18\xe2\xbb\x7c\x51\x5c\xf4\x11\x5a\x42\x8f\x53\xb2\xa6\x0e\xd4\x63\x23\xe2\x40\x3d\x9f\x87\xf9\x43\xf1\x10\x83\xe8\x21\xf1\x67\xc0\xf3\xd4\x5c\x5a\xd8\x31\xc9\xf2\x58\x59\x7f\x7d\xc0\x18\x98\x2b\x72\x00\xf5\xb2\xbe\x08\xd5\x29\x9b\x94\x74\x95\x6f\x38\xf4\xa6\x92\x6a\xc1\x8f\xb9\xc1\xfe\x1f\xb1\x66\x9d\x0f\x26\xd8\x21\x41\x95\x67\x89\x50\x09\x1d\x28\xdb\xc4\xc7\x9c\xae\x96\x41\x03\x92\xcf\xef\x84\xcb\x52\xb1\xd6\xfd\x08\x60\xba\x05\x63\x24\x40\x1e\xd6\xe7\x51\xe6\x2a\xcf\x01\x08\xf4\x96\xf3\x01\xe1\x5f\xff\x05\x18\x08\x31\xa0\xea\x3d\x7c\xbc\x1c\x4f\xf7\x92\xfa\x17\x66\x1b\x6a\x1c\x11\x4a\x2e\x38\xdf\xa1\x33\xa5\xa9\x86\x0b\xfa\x79\xb0\x37\x79\xa9\xdd\x2d\xb6\x54\x89\xce\x33\xd5\x0a\x91\x81\xfa\x03\xea\x13\x8a\xf0\x6b\xdf\x35\x7c\xa1\xcd\x79\xd8\x4a\x30\xcd\x73\xc0\x61\x32\x07\xc6\xd9\xf1\x62\x04\xf0\xbd\x66\xcd\x0a\x87\x7a\x64\x82\xfc\xe0\xd4\xaf\x68\x0c\x2b\xae\x44\x05\xdf\xa1\x62\x12\xab\x70\x24\x1a\x7d\xb0\x76\xcc\x73\x9c\x43\x1d\x3e\x5e\x83\x1b\x00\xf4\x04\x82\x9c\xa9\x8f\xa1\xf6\x52\xaa\x2a\x4e\x04\xf7\xb1\x20\xc4\xaa\x3a\x80\xb7\x2f\x7e\x08\x18\x61\x8e\x47\x67\x70\xd9\x7e\x62\xf8\x80\xfd\x62\x80\xf7\xd7\x8f\x55\x36\x89\xd8\x0d\x01\x7e\x19\x4a\x90\x87\x07\x1b\x14\x01\x4d\xa2\x0f\x6a\xd0\x1d\x4c\x48\x94\xe0\xfc\x75\xd6\x9a\x03\xaf\x38\xab\x65\x18\x9a\xc4\xf5\x30\x07\xfa\x94\xeb\x7b\xd5\x43\x87\xe8\x0d\x01\x32\x14\x74\x64\x80\xd4\xa4\x10\x82\xc8\x8a\xb3\x7d\xb0\x37\xb5\xbe\x03\x5f\xa8\x81\xfb\x90\x43\xab\x75\x50\x72\x6a\xc8\x9b\x4c\xb6\xc0\xba\x2a\x11\x9e\x17\xd3\x58\xd6\x9f\x55\x19\x6e\x2d\x85\x68\xc4\x4e\xc0\x1f\x98\x71\xf1\x02\xe6\xe6\xae\xf5\x5e\x65\xc0\xf5\x0a\x27\xae\x4a\x4b\x80\x21\x69\x98\x0d\x31\x01\x7e\x63\x0d\xad\x70\x88\x67\xa2\x4c\x18\x98\x57\x17\xf4\xe6\xfc\x54\x31\xf6\x7f\x72\x5f\x6b\x2e\xfd\xd6\x5a\xdb\xf4\xf3\x7f\xb4\xea\x22\xa6\x8c\x6e\x17\x7d\x0d\x34\x5f\x4d\x83\xec\x93\xce\x2a\x55\x2b\x6c\x2c\xdc\x1c\x68\xc6\xd2\xa1\xb2\xd0\xde\x5e\x45\x54\x9f\x3a\xf3\x53\x46\x5c\xf6\x1c\x2c\x5b\x65\x28\x9d\xf9\x5e\x7e\x2a\x7d\x00\xb0\xfa\xf2\x21\xd6\xad\x50\x29\xac\x2f\xff\x04\x1f\x7c\xb2\x95\xed\xff\x3c\x6f\x40\x32\x06\xcc\x10\x98\xb6\x42\x05\x2c\x9b\xdf\xcc\xf4\xba\x2a\xe7\xb3\x09\x64\xfd\x91\xb4\x82\x56\xf6\xa9\xa8\x29\xab\x20\xd6\xbf\x2c\x3c\x13\xe5\xde\x7b\x08\x16\xf3\x9f\x40\x66\x88\x08\x3b\xd0\x2b\x0e\xad\x1c\x63\x55\xd6\x52\xde\xc6\x7d\x99\xf5\x5f\xc0\x6d\xf6\xa0\x9f\x38\xec\xf0\x0e\x4e\x66\x62\xa5\x99\x89\x61\x26\xe9\x87\x70\xf6\xfc\x2e\x90\xa4\x24\xe9\x10\x64\x3e\xeb\xbd\xcd\xe5\xbc\xdc\x01\xa5\x2e\xa9\x74\x11\x8e\x38\x41\x29\x8c\xaf\x8e\x7b\x82\xd5\xf0\x04\x93\x01\x1f\x40\x6a\xca\xbb\x03\x81\x78\x0b\x02\x09\xd1\xb8\x50\xf5\xb2\x96\xf9\xc1\x79\x9c\xbd\x25\x7e\xd0\x6c\x93\x75\xa2\x53\xe0\x09\xa6\x47\xc2\x1d\x2d\xb4\xae\x0b\x4c\x64\x1f\xaa\x84\x08\xd3\xd9\xc4\xbc\x5e\x41\x77\x80\x95\x58\x13\xec\xba\xd4\x49\x23\xd2\xb5\x43\x3d\x2a\xbc\xc5\xec\xe5\x8d\x0d\xad\x8d\x70\x02\x00\xcc\xc4\x24\xa7\xef\x61\x03\x54\x6a\x43\xc0\x82\x75\x48\x15\x0b\x90\xe6\x78\x36\xc3\xb0\xe3\xd0\xca\x43\xff\x6e\xbf\x65\xf8\xd5\x04\xca\xec\xb5\x55\x70\xd8\x24\x24\xf4\xc6\xba\x53\x48\xcc\xf5\x93\xb4\xa1\xbd\x46\xbf\x8a\xe6\x71\xae\x52\x23\x7b\xc4\x2e\x83\xb2\x40\x7d\xea\x09\xed\xed\xde\x62\x67\x74\x55\xf2\xe7\x76\xb7\xa5\x83\x85\x82\xfa\xc5\xa0\x19\x24\xa2\x24\x22\xcd\xfc\xae\x86\x1a\x72\x57\x77\x61\x38\xc9\x31\xeb\xaa\x90\x66\x95\xa2\xdb\x5d\x1d\x56\xe1\x2b\x6d\xb6\x3f\x25\x11\x23\xec\x83\xf3\xec\x6e\x01\x34\x25\x81\xd0\x85\xd7\x17\x6a\x46\x12\x2c\x2b\x55\x4b\xb2\x5e\xa0\x85\xb6\x10\x4d\x59\x02\x4c\x89\x77\x9c\xdf\xcd\x9b\x60\x9a\xeb\xda\xe0\x9d\xf6\x3a\x35\xf4\xe6\x10\xf2\x67\x40\xca\x50\x0b\x07\x96\x21\x02\x6f\x86\x0e\xa3\xe3\x38\x2f\xfd\x3b\xb7\x61\x7e\x47\xc8\xd4\x52\x47\x6d\x85\xae\xbc\xa9\xdd\x55\x5e\x64\xe2\x22\x10\xa4\xaf\x54\x69\x8f\x76\xf0\x01\x14\xd4\xb2\x92\x30\xd4\x15\xd5\x1b\x3a\xdc\x93\xc6\x97\x06\x9e\x73\x0e\xf4\xe6\xa5\x2d\x35\x36\x25\x9e\x93\xbf\xa8\x9c\x25\x1c\xd1\xd4\x1e\x76\x45\xec\x0c\x0b\xbe\x47\x93\xee\xda\x3a\x54\x06\x23\x47\x91\x5d\x91\x29\x64\xb7\x5a\x5b\xe6\xe7\xbf\x52\x74\xe9\xe6\xf4\xbf\x5b\x9d\xea\x2c\x00\xea\x9a\x21\xa2\x4e\xd3\x34\x16\x9b\x51\x1d\xc4\x50\xe7\x47\xe0\xcd\x90\x84\x03\x44\x53\x67\x3b\x07\x6e\xaa\xfb\xe3\x78\xde\xcd\xcc\xfc\x75\x18\x12\xfa\xab\x6a\x11\x94\x84\x89\xfd\x1c\x05\x94\x14\xb6\xcf\x96\xcc\xa4\x74\xb0\xc6\xe6\xc3\x00\x6d\x73\x62\x28\x5d\xc6\x3f\x22\xe2\x62\xf9\x16\x3b\x1a\xbe\x27\xac\x04\xba\x6d\x60\xce\xf1\xee\xc3\x16\x73\x9a\x85\xde\xdc\xad\x05\xa9\xc2\x76\xee\x57\x40\x35\x85\x35\xec\x30\x8f\x87\x8d\x22\x35\xdf\xe2\xdb\x55\x0a\x6b\x8e\xfb\x26\xe6\x74\x7d\xac\x21\x76\x97\x57\x8a\xe8\xe5\x25\xce\xf1\x37\x71\x39\x33\x3f\xd3\x61\x60\x15\xea\x71\x54\x10\xb4\x89\x1e\x7f\x44\xda\x3b\x48\x38\x08\x84\xf9\x22\x22\x97\xda\xaf\x9d\xdb\xab\x79\x60\x83\xcb\xac\xc0\x4d\x0c\xe4\xc6\x76\x37\x5b\xb7\x8b\x34\x53\x0b\x94\x0c\x73\x8d\x38\xc2\x59\xd1\x16\x85\x0c\xdc\x42\xac\x28\x85\x03\xd3\xd6\x36\x5e\x01\xa6\x62\xa4\x23\x42\xf8\xe9\xa5\x88\x97\x31\xd9\xf3\xe4\xa8\xc4\xb7\x46\x30\x68\xce\xd8\xe8\xa7\x6b\xd2\x5c\xd4\x22\xbd\x4b\xf0\xea\x14\xf1\x1e\xad\xe2\x5b\x60\x4d\x24\xee\x81\x2c\xf3\x0f\xe4\x72\xba\xa9\x16\xb5\xdb\xdc\x8b\xe7\xd6\xda\xde\xbb\x99\x1f\x87\x73\x96\xc8\x54\x83\xb2\x4d\xa9\xb4\x9d\x0e\x01\x49\x6e\xc7\xe9\xeb\x03\x95\x36\x41\xd2\x93\x35\xdc\xbe\xd3\x26\x25\x33\xf7\x5c\x74\x79\x4d\x78\x74\xa8\x0a\x65\xa1\x10\x51\x0e\xf4\xb4\xe1\x2e\x01\x9a\xa1\xde\x63\x01\x9f\x50\x5c\x0a\xe6\x09\xac\xeb\xf0\xdf\xbd\xf6\x52\xe4\x40\xc4\xda\x68\x5a\xa6\x9b\x10\xd8\xa9\xd2\xf7\x0e\xf3\x5f\x19\x07\x83\xe4\xed\x17\x67\xf8\xa2\x3f\xa1\x0b\xe7\xeb\x3a\xcf\xfd\x26\x60\x71\x7f\x20\xfe\xe6\xfd\xd4\x07\x25\x6c\xc1\x35\x67\xa7\x8b\x7c\x98\xca\xff\xa8\xaf\xf9\x40\xea\xce\x97\x2c\xcc\x7f\x92\x70\x3b\x1f\x89\xca\x7f\xec\x3f\x78\x23\xf5\x2e\x8e\x48\x82\xdc\x20\x0e\xf0\xde\x83\xf9\xf3\xce\xcc\xab\xbd\x73\x90\x2a\xdf\x6e\xe5\xfb\x43\xe3\xb9\x9b\xdf\x35\x89\x80\xb7\x0d\xf5\xa8\x2d\xe7\x75\x0b\xfc\xae\x97\x9c\xbb\x90\xf2\xf8\x23\x10\xf7\xe0\x74\xc3\xa2\x14\xa0\x4f\x98\x5b\xec\x1e\xfd\xee\x89\xab\x7e\x7b\x4e\xb2\x80\xf4\x0c\x33\x39\x07\x84\xd3\xad\xee\x7a\x85\x40\x9c\xca\x7b\x2f\x17\x6d\x87\x7a\x1a\x37\xdd\xa3\xf9\x46\x39\x40\x8f\xf4\xcb\x7a\x42\x51\x21\x2d\x8c\xa6\xf1\xd5\xb8\xe6\x2f\x2a\x9d\xbd\xe6\x24\x91\xb7\x69\xbe\xf9\xb6\x22\x02\x74\x5f\xf2\x73\x02\x4c\xbd\x90\x97\x52\xbe\xfe\xd5\x08\xbe\x2f\x8d\xb1\x5f\x4a\x7b\x41\x85\x8e\x9f\xd7\xc0\x8c\x36\x7d\x3a\x1b\x0f\xf8\xb0\xf1\x85\xd6\x20\x30\xa8\x43\x73\x6c\x78\xb9\x7b\x10\x82\xf4\xd6\x63\x6f\xd1\x1f\x11\x3a\x18\xae\x27\x8f\xee\x4f\x04\xfa\x81\xcd\xfd\xd9\x23\xf6\x6e\x2f\x10\x82\x88\x65\x8d\x39\xfd\x4b\x22\x74\x40\x52\x3c\xcb\xa8\xff\xb6\x7e\x6f\x58\x9a\xf8\x35\xf2\x55\xda\x63\x18\x38\xc8\x79\x92\xf7\xf9\x04\x8e\x97\x50\x56\xbe\xa5\xcf\x22\xc6\xdf\x73\x3e\x47\x0a\xe1\x09\x89\x8a\xdc\x0b\x47\x3b\xae\x93\x17\xa6\xa8\xfe\x41\x82\xac\x40\xb6\x72\x1d\x1c\x7c\xde\x8c\x22\x95\xfa\x04\x75\xca\xbf\xf5\xc2\x01\xad\x42\x41\x99\xf5\x3e\xd9\x02\x01\xb2\x01\xd5\xf7\xc4\x02\x63\xee\x2a\x38\x06\x82\x76\xb8\x35\xfd\xc7\xc7\xfd\x99\x7d\x26\x2c\x78\xac\x27\x8b\x9f\xc8\x3e\x64\x06\x2f\x01\x68\x77\x0d\x00\x9f\x42\xe6\x26\xb8\x44\x14\xd6\xe7\xdc\x85\x54\x5a\x79\x2e\xf7\x12\x36\x80\x4a\x0a\x84\x9e\x1e\x1d\x52\xf9\xe9\x28\x79\x6d\x1f\x60\x86\xc1\xdd\x5d\x61\xcf\x2f\x61\x0d\x05\xa7\x6a\x4b\x57\xf0\xa8\x67\x62\x5f\x54\x0f\xe9\x0d\xd4\x63\x78\xfb\xfd\xad\x1c\x5c\x55\x40\xe2\xbd\xd0\xe3\xf5\x2b\xee\xde\x8c\x2f\x11\x69\x3e\x5f\xd7\x37\xf3\x27\x64\x56\xee\x8c\x92\x50\xba\x0b\x91\xb8\xa7\xfd\x9d\x0a\x77\xb9\x62\x23\x94\x5c\x9e\x0d\xe9\x59\xae\xbf\x61\xb6\x2e\x69\xe0\x1b\x27\xc9\x7d\xfe\x82\x7a\x61\xbe\x1d\x46\x86\xc5\xe1\xa2\x67\xbd\x62\xf7\x33\x28\x3d\xb0\x31\xd4\xd8\x1b\x76\x2c\xfe\xff\xdc\x7b\x39\xe0\xb9\x59\x80\x39\x8b\x06\x36\xbd\x16\x91\xc2\x6a\x77\xf0\xad\xe9\xb5\x85\x8e\xf8\x2d\x74\x68\xbe\x0b\xc5\x23\x6c\x69\x53\x7f\x97\xbc\xe2\x27\x9f\xe0\x5a\xbb\xaf\x22\x2e\x7a\x00\x01\x77\xe5\xa6\x72\xcf\x67\x28\x03\x86\xaa\xdf\xcc\x57\xda\x2e\xb3\x75\x0b\x81\x69\xd5\x35\xbf\xe7\xa1\x4b\x02\xd0\xbe\x00\x65\xf5\xa3\x4e\xca\x61\xb8\x65\x62\xd8\xcd\x3b\xfe\x56\x9b\x96\x53\xd3\x2b\x94\x72\x1b\x18\x77\xce\x4d\x66\x50\x3a\x66\x9a\x1f\x41\x2f\x87\xf9\x77\x9b\x1c\x3d\xd8\x3b\xdc\xe9\xdd\x11\x11\x78\x27\xa5\x94\x6e\xc1\xee\x6f\x75\x0c\x8e\x95\x97\xcb\xea\x6d\x2f\x32\xe5\x0d\x53\xc7\xf9\x8f\x05\xb4\xe6\xbe\x3e\x79\x21\x7f\xee\xfa\xa4\x2f\xf4\xc5\x05\x08\xb1\x44\xfb\xaa\x46\x95\x40\xdc\x47\xfa\x23\x40\x28\xa8\x2e\x56\xdd\xc4\xda\x22\x01\x46\x04\x69\xd5\x83\xee\xf1\xf6\x0d\x39\x5e\x83\xee\xe0\x2c\xd9\xc1\x2e\x94\xca\xdc\x86\x51\x01\xc5\x81\xdd\x80\x31\x0b\x61\x7b\xbe\x03\x12\x6c\x8f\x90\xbe\x86\x02\x9a\x02\xa6\x6a\x91\xe4\x0a\xe3\x45\x28\x9f\x35\x9f\xc5\x33\x4d\x02\xd9\xb6\x87\xce\xc2\xb2\x5c\x61\x19\x88\x11\x53\x7b\x61\xec\xd3\x22\xb0\x6f\xd0\x28\x6c\x94\x1e\xf8\x4d\x3c\x0e\xbe\x04\xe3\x9a\x28\x43\xf7\xb4\xe4\x66\xa0\x81\xef\x47\x99\x56\x7d\xd2\x7c\xc7\x3b\x10\x9d\x42\xe3\x09\x59\xf8\x78\x84\xc4\x61\x0e\xd9\xc5\x66\x0c\x6f\x63\x73\x41\x1f\xed\x7e\x22\x53\xfe\x56\x9c\xc2\xe8\xd2\x15\x12\xb5\x76\xd4\x44\x51\xe8\x60\xdd\xca\xff\xea\xdc\xb9\xe7\xe1\x71\xd6\x10\x80\x74\x7d\xc4\xcc\x28\x4f\x11\x08\x29\x7c\x36\x1b\xca\xa3\x08\xcb\x71\x1d\x73\xe0\x45\xed\xea\x09\xd1\x7b\x68\xab\x02\x48\x2d\xaa\x3d\x7e\x9b\xda\xe5\xda\xcb\xeb\x0b\xff\xdd\x13\x6f\xec\xf6\x26\xc1\xc8\x02\x7e\xb1\xa6\x37\x07\x67\x2e\x6e\x18\x11\xbb\x19\x1b\x67\x3c\x67\x41\x7d\xb3\xae\xf3\x6f\x91\x72\x95\x45\x44\x25\x5d\xb6\x89\x0d\x21\x89\xee\x05\xbb\xe7\x0e\x14\x2f\x81\xc8\xc0\xea\xec\x29\xc0\xb8\xb5\xe9\xbb\x5d\x13\x25\x06\x30\x3b\x10\xca\x73\x9a\xb7\xd4\xd5\x45\x52\x80\x93\x4d\x5c\x08\xdc\x75\xd5\xe7\x9b\xa1\x80\xf8\x00\xe7\x4a\xdb\x70\xd0\xb0\xc9\x71\xba\xfc\x2e\xd5\x8e\x4b\xe4\x24\x78\xb0\x40\xe9\x01\xb3\x3b\xf0\xb0\x69\xd2\x25\x07\x60\x57\x8f\x24\x18\xfe\x36\xac\x70\x5c\xa7\xaa\x0c\x97\x2a\xa4\x97\xb4\xaa\xae\x3c\x7f\x4d\x65\xe9\x89\xe9\xc1\xc4\x05\xd1\xe2\x79\x0c\xa0\xaf\x83\x6b\xf9\x7f\xfa\x79\x63\x95\x9b\x48\xda\x10\x49\x84\x6a\xae\xc0\xad\x2f\x92\x92\xe5\x94\x99\x19\xd6\xaf\x3d\x25\x00\x98\x5a\x7b\x26\x21\x63\xf1\x44\xec\x0a\x65\x44\xda\x83\x66\xd0\x76\x0a\xdd\x4b\x57\x69\x14\x7c\x77\x4b\x4d\x97\xd1\x21\xb7\x0a\xe3\x98\x71\x8f\x00\xf8\x13\x2e\x78\x52\x93\x80\x87\xb3\x0b\xcd\x52\xd8\x67\x86\xac\x35\x3f\x04\x12\xf7\x3c\x42\x9e\x11\xb9\x37\x5f\x09\xc4\x81\xbe\x4f\x47\x51\x9e\xf3\xfa\x56\x8a\x2e\x0a\x43\xcf\x0a\x13\xc5\x7d\xaf\xb4\x7e\xa2\x15\x49\x01\x95\xc7\xb0\x5e\xf0\xef\x06\xbb\x4e\xbd\x48\xb9\x30\xac\x77\x40\x26\x20\x69\x04\x60\x56\xd1\x8a\x4f\xef\x1a\xf9\x25\xd0\x95\x04\xc8\xe2\x75\xfd\x71\x80\x6c\xde\x08\x67\xfd\x47\x88\x29\x58\x55\x44\xe0\x96\x7a\x52\x96\x51\x43\x30\x6b\xdb\x2a\xa5\x77\x91\x22\x39\x37\xbe\x14\x8e\x9f\xe5\xf6\xda\xbc\x50\x02\x51\xae\x65\x67\x11\x12\x36\x50\x01\x27\x9b\x29\x48\xed\x6a\xc3\xe0\x5d\x38\x15\x56\xcb\x3d\xbc\x32\x69\x64\x39\x93\x2a\x79\xce\xcc\xf6\x9d\xf3\x78\x71\x8a\x74\xba\x1b\xa5\x9f\xf9\x1c\xda\xbd\xa7\x6c\x0e\x4e\x70\x4c\x03\x97\xe2\x98\xd9\x4a\x58\xec\x5b\xdd\xe1\x99\x5f\x33\xe1\xae\x4e\x6c\xda\xb9\xae\x86\x63\x27\x75\xed\x4e\xe9\xaa\xb9\xee\x70\x78\x73\x4a\x01\xde\xbd\x39\x39\x69\x73\x3c\x6d\xfb\x5f\xd3\x49\xe5\xca\xb0\x0d\xb3\x06\x2e\x0b\x3b\x0d\x47\x2b\x62\x67\x2b\xcf\x92\xb9\x0e\x20\x01\xe8\x07\x71\xa6\xab\x63\x53\x7b\xda\x33\xc3\xdf\x2d\x2a\xfb\x99\xe4\x47\xe2\x12\x91\x52\x92\xec\x39\x3c\x07\xc2\x23\xeb\x88\xf1\xfb\x52\x77\xca\xf4\xb3\x2b\x7e\x37\x6c\x44\x7a\x69\xec\xee\x16\x7c\xb7\xb2\xd1\x07\x64\xcb\x82\x7d\x84\x56\x5b\x58\x5a\xc9\x76\x1d\x70\xa9\x8c\x48\xad\xed\x81\xfa\x6d\xe2\xff\xb6\xb8\x13\xfb\xf6\x40\xd6\x5b\xea\x91\xa7\x84\x96\x1c\x46\x2b\xa0\xec\x51\x7a\xe2\x69\x37\x8e\xc0\x2d\x10\xbe\x36\x1c\x3c\x57\x50\x07\x9e\x3c\xc2\x3a\x0b\xbb\x7f\xd8\x3c\x2f\x51\x52\x71\x7d\x6f\x2c\xca\x5a\xf3\x26\x67\x18\x8b\xa8\xad\xe8\x72\xb5\x08\xe4\xfb\x0c\xd7\xcf\x49\xc2\x94\xbd\x34\x28\x9b\x28\x88\x85\xc9\x8b\x96\xee\xe3\xa9\xf6\xd6\xba\x98\x8e\x3a\x98\x0f\x3e\xa8\x47\x8e\x5f\x91\x39\xde\xb1\x65\x48\x3a\x5a\xd3\x39\x13\x17\x0b\x90\xa3\xa2\x06\xac\x5d\x22\xb0\x06\xd3\xcf\x32\xef\x49\xfb\xad\xb2\xdf\xf4\xe9\xde\x2c\x4c\x2e\xba\x65\xee\x01\x87\x57\x69\x5e\x66\x25\xb8\x8f\xac\x1f\x9d\xde\xa1\xce\x99\xfe\x6d\x42\xd3\xbe\xd6\xf9\x8b\x21\xa6\x98\xde\x91\x08\x58\xb2\x25\xb3\xe8\x16\x78\xf7\x71\x68\x06\x6a\xcb\x44\xa4\xae\xf5\xd7\xd4\xaa\xbc\x04\x4e\x46\x45\x85\xf7\xd5\x7a\x77\xa1\x78\xcf\x90\x96\x38\x84\x30\x3d\x84\x1d\x3b\x12\x48\x4b\x97\x54\x2f\x0b\xcb\xc0\xc8\x20\x6f\x59\x5e\xa1\x91\x61\x39\x92\x73\xcf\xa0\x3e\xfa\x9c\x59\xbb\x6b\x5e\x6e\xc5\x7b\x42\x04\xe7\xbd\xd2\x89\xa9\x9c\x74\x74\x2b\x9a\x0e\xd8\xc4\xd6\x1e\x19\x3a\x84\xb2\x85\x2e\x13\x69\x39\x25\xae\x5c\x71\xf6\x54\xd0\xc4\x00\xe2\x95\x30\x5d\x19\x38\xb4\x2d\x43\x56\x46\x44\x0e\x64\x05\xf9\xed\x5d\x91\x04\xd7\x5c\xc3\x8f\x80\xde\x97\x26\xde\x16\x0d\x12\xd0\x7c\xea\x2d\xd9\x09\xb8\xde\xed\x34\x55\xe5\x84\xa6\xa6\xbc\xb4\xf6\x28\x6d\xec\x65\x0d\xf4\xef\xf5\xc7\x88\x74\x11\xaf\x74\xb7\x6e\x72\xa2\x62\x2f\x42\x55\xc8\xf7\x5c\xc1\x3d\xa5\xbc\x66\x8d\x19\xfb\x9e\x39\x03\xa4\xfa\xa1\xaf\x49\x73\xe8\x6d\x8e\x69\x93\x5c\x63\xa8\xa1\x6f\x15\xa3\xf8\x3b\xa0\xbe\x83\xa6\x68\xd6\x6c\x53\xf7\xaf\x4e\x6b\xa7\xe9\x85\xbd\x8a\x7f\x84\x0d\x36\xa8\x70\xf9\xd2\xad\x68\x36\xb4\x01\x44\xf7\xda\xe4\x8f\x15\x0b\xff\x64\xef\xbb\x70\xf6\x1e\xff\x10\xeb\xab\x65\xbe\x78\x28\xd9\x1f\x6d\xc5\x5e\x73\x1e\xcd\x66\xcf\xe5\xad\x88\x45\x09\x44\x72\xe1\x72\x13\x54\x8d\x29\x01\xf5\xd5\xb5\xde\x9c\xe3\x25\x70\xf0\x1a\x83\x77\x0b\x07\xdd\xdf\xad\x4c\xfc\x55\xf2\xe4\xf3\xaf\xe7\x6b\x22\x70\xd3\xc1\x53\x81\x87\x70\x74\x70\x12\x74\x18\xcf\xeb\xcc\xc8\xae\x7f\x43\xd1\xc2\xcb\x97\xca\x99\x5e\x3b\xd3\xe3\x60\x1f\xac\x07\xf0\xa0\x09\x61\xe2\xde\x04\xf2\x57\x6a\x09\x5b\xf3\x31\xcd\xca\xa9\x83\xff\xf5\xa2\x58\x97\x7f\xda\xaf\xd8\x65\xbf\x12\xa1\xc4\x3b\x71\xd6\x70\xf6\xe8\x84\xc9\xb6\x46\x82\xef\xe3\x16\xb2\x78\xa8\xfa\xf0\x8a\x79\x11\x9f\xff\x6c\x3a\x55\x2f\xdf\xf2\xa0\x2f\x81\xb9\xf1\xff\xd4\xdf\x32\x78\xdc\x7a\x1b\x4f\x9f\xfe\xfa\x59\x38\x35\x47\xd6\xf2\xda\x15\x85\xb5\x39\x84\x3f\xcf\xc0\x0f\x2f\x73\x8b\xf2\x77\xed\x58\xdf\x01\x3b\x4e\x01\x8f\xb6\x70\x78\x0b\x69\x4c\xa3\xa1\x08\xf9\xec\xf4\x7d\x0d\xc1\x4c\xa0\x92\x43\x3a\x14\x56\x3e\x82\x2a\xaf\x55\x72\xa3\x3d\xdf\x47\xc2\x94\x53\x44\xc3\x1a\xc2\xa4\x69\x2d\xc2\xf7\xbe\x75\x1b\x95\xec\xdb\x92\x0e\x61\x3d\xaa\xd4\x3d\xaf\x2a\x2b\x62\x8b\x25\x8a\x8a\xdb\x40\x69\xcb\x3b\xcb\x11\xf5\x2a\x61\xa6\x7a\x7d\xf1\xcd\x35\x44\x35\x4f\x82\x88\xcf\xab\xac\x59\xe0\x64\x58\xd9\xca\xa9\xd5\xa1\x18\x82\x2a\x07\x84\x64\x83\xe9\x52\x18\xbc\x42\xf2\x93\x67\xc0\x4a\x22\xa4\x3c\x4f\xe9\x76\xee\x63\x60\x9b\x99\x1d\x03\x7c\x59\x50\x76\xe4\x91\xe2\x6e\x43\x13\x90\xbf\x78\x0d\x4c\xf2\xda\xfe\xeb\x2a\xa0\xe6\x96\x7e\x97\x2a\x65\x4f\xe1\xaa\xbd\x35\xf4\xa8\xb7\x59\xe3\xcc\xff\xc0\x37\x35\xf5\xd6\x9c\xe8\xea\xc1\x40\x88\xd8\x65\x71\x41\x57\x6a\x41\xad\x55\x3a\x72\x2b\xd2\x41\x04\x31\x03\xef\x75\x32\xb4\xa1\x48\xc7\x43\x40\xfe\x93\x57\x6b\x85\x66\x88\xa4\x40\xa1\x5b\xe8\xbd\xf2\x2a\xee\xd5\x2a\x42\xcb\x5a\xdc\xe3\x9b\xdb\xef\x41\xaf\x76\x08\x73\x94\x00\xd7\x32\xc9\xf1\xb5\x8c\x4c\x26\x5b\x10\xa9\x8b\xb5\xa8\xd6\xb8\x16\x59\xf1\xd9\x9b\xe2\xfe\xba\x08\x81\x9c\x1c\x09\x6f\xfe\xce\x59\x20\xa4\xc5\x75\xca\x23\xa2\x5d\xe7\x3d\x84\xcc\x4e\x38\xae\xda\x68\x6e\x42\x7b\xee\x31\x7f\x1a\x21\xa5\xab\xb3\x2e\x6e\x1d\xab\x6b\x0a\xa2\xc0\xc6\x04\xc4\x19\x89\xb6\xa6\x56\x6a\xad\x8d\x30\xbb\x6b\x70\x66\x2c\x18\x40\x0c\x64\x48\xd7\xda\xb0\xf9\xf2\xc0\xff\x91\xbd\xcf\x8b\x82\x7d\xa2\x39\xe9\xca\x09\x16\x94\x2c\xf9\x8d\x0c\x68\x07\xae\xd5\x7f\xe9\x56\xb2\xbe\x6c\x28\x34\x09\x0a\xfd\xf9\xb0\xe0\xb0\xa6\x57\xc0\x0e\x56\x8c\x15\x08\x69\x7e\x05\xee\xd0\x42\x5e\x6b\x9b\xb7\x8c\x7b\xa0\xa9\x25\x5f\x5a\xe5\x00\xe3\xbe\xb5\x0a\xe2\x3e\xd8\xfb\x35\x71\x73\x90\x34\xd4\x2a\xe3\x83\x0e\x22\xa4\x99\x3d\xc5\x0b\x57\x08\xc4\x62\x41\x53\x31\xd5\xaf\x05\x52\x43\x27\x78\xc4\x3f\x74\x2d\x21\xfe\xa3\x6d\xd3\x91\x36\x6d\xbe\x81\x4a\x40\x50\x37\x44\x6b\xeb\xd5\x42\xed\xdc\xe7\xa6\xc0\xda\x71\x32\xb8\xa6\x44\x4f\xd9\x7a\x0b\x4c\x2d\x3c\x31\xcb\x26\x4b\x25\x7a\x67\x39\x21\xfa\x24\xab\xda\xda\x04\xea\x16\xf0\xea\xf8\x7d\xfe\x82\xaa\xf7\x08\xc2\xb8\x15\x7d\x24\xdd\x67\xa9\xf4\xbb\x14\x60\x3a\xfd\xf8\x28\xff\x30\x58\x65\x61\xb2\xa8\x45\x59\x8a\x4c\x85\x97\x12\xfa\x2f\x88\x2e\x09\xa6\x42\x1a\xcb\x37\xdb\x39\xb5\x5a\x6c\x04\x40\xf8\xb4\xf5\xfb\x34\xd0\xb5\x91\x03\x16\x6b\x65\x4b\xb8\x48\xfd\x7c\x11\x8e\x76\x59\x93\x4f\xd5\xe1\x53\x3b\x92\x6d\xe3\xe2\xa7\x42\x51\x37\x2f\x81\x25\x64\x01\x10\x34\xaf\x58\x8b\xe5\xe7\xb8\xb8\xf9\x82\xff\xc7\x79\x1d\x89\x95\xb0\xa8\x2f\x6f\xb9\xde\x46\xd7\x67\x91\x4f\xfb\x16\xd7\xba\x50\x0f\x09\x77\xda\xeb\x96\x3d\x2d\x80\x9b\xf2\x94\xcd\x7e\x7c\x02\xc3\xf3\x1f\x8b\xda\x75\xf8\x4a\xac\xce\x61\x70\xfb\x48\xf1\x1b\x13\x38\xfa\x2d\xdf\xb3\x10\x49\xbd\xa4\x4b\x0b\x61\xd5\x53\x5c\x75\x9b\x48\x07\x8a\x3e\xcd\x21\xd0\x3a\xbf\x66\x2e\x77\x47\x6b\xd7\xb3\x27\x07\xfe\xb7\xda\x0f\x8e\x49\xfa\x6f\x05\x67\x08\x01\x78\x7c\xcc\x8b\x43\x80\x75\xa0\x16\xe6\x2f\x7a\xb3\x3d\x14\x58\x11\x37\xb9\xd6\xc7\xe3\x9c\xc3\xf0\xd6\x7f\xf7\xaf\xaa\x12\xbf\x48\xb4\xf0\xfe\xfe\x42\x34\x77\xe7\x97\x52\x87\xfb\xf5\x69\x41\x80\xc5\x31\xdf\xf2\xf4\xcd\x6f\xaa\x7e\x91\x7e\x13\x38\x19\xbe\xaf\x3d\x80\xcc\x52\xff\x72\x9c\x35\xbf\xd2\x95\xc2\x2b\x57\x8a\xae\x17\xe7\x4d\xf3\xce\x7b\x38\xef\xac\x2e\xdb\xcc\x88\x7d\x37\xd0\xd2\x0f\x30\x99\x10\x67\x8e\x87\x20\x05\xc9\x25\xc8\xd1\xd4\x4d\x9c\xde\xab\x8c\xf6\xa7\xfa\x4f\x65\xa6\x29\xd4\x57\xeb\x86\xdc\xdc\x9b\x98\xf3\x48\x8f\x4c\x04\x17\x4f\x6e\x04\x33\x68\xc3\x15\xe0\x0f\x8f\xfa\xc5\x5e\x4d\xa2\xc4\x5f\x59\xd2\xae\x3e\x11\x97\xbf\xee\x53\x18\xf0\xda\x90\xe3\xf5\x14\x62\x72\x2a\x97\x70\xe9\xc7\x28\xd4\x73\xd9\x04\x8c\x75\x18\x79\xa0\xce\xed\x25\x5a\xe9\xaf\x5b\x16\xca\x4b\x63\xb4\x74\xa4\xea\x59\xed\xa9\x44\x16\x0c\x6a\xb0\xf1\x16\x30\x0e\xe3\x13\x7c\x12\x47\x6f\xb2\x1b\xe3\xae\xb6\x40\x78\xb9\x02\xeb\x1c\x9e\x6c\x53\x53\x9e\x46\xb4\xea\xdf\xcf\x7b\x48\xc6\x76\xce\x85\x9e\xbc\xbf\xf0\xe0\xfc\x04\x6a\xfc\xbc\x3f\x71\x7c\x17\x79\x6f\xa7\x01\xa0\x51\x62\xf0\x47\xe3\x75\x3f\xa5\x05\xbb\x07\xd6\x1c\xd4\x1f\x0f\x34\x4c\x80\x14\xcd\xc2\x6d\xd6\x31\x9e\x23\xa0\xd3\xaf\x22\x37\x5f\x1b\xd5\xfa\x72\x54\x1e\x03\x0a\xb3\xda\x75\x94\xc4\xec\xc3\xbb\x6a\x97\x98\xed\x43\x40\xf6\xbd\x86\x97\x6f\x0d\xa1\x5c\x6b\x89\x42\x4b\xf6\xa6\x58\x0f\xbc\x7f\x67\x7a\xfc\xba\x7e\xd4\xa5\xd0\x25\xf6\xa7\x90\x4d\x07\xd6\xbd\x84\xcc\xec\x76\x85\xbc\xac\xde\x50\xc8\xca\x6a\xd5\x57\xb9\x71\x8a\x61\x10\xcc\x44\x43\x04\x0e\x23\xf9\x98\x7f\xb9\xa2\xec\xc5\x89\x1e\xe2\x51\x00\xe4\x1a\x93\x4c\x80\xdf\x1d\xfa\x5c\x5d\x57\x33\xd3\x2c\xf8\x2b\xfd\x0c\x48\xfc\xa3\xbc\x03\xb0\x5e\x57\x87\x4c\x9f\xf3\x25\x24\x35\x52\x4c\x3a\x43\x1b\xc2\xbd\xb8\x59\xa0\xd3\xe5\xa1\xef\x08\x73\x45\xa7\x44\x8e\x46\xf2\xa6\xec\x49\x95\x46\xab\xe3\xca\x1d\x16\x60\x21\x07\xd3\x6e\x16\xcc\x60\x3d\x26\x6d\xa6\x31\xf9\x58\x1a\x0c\xdc\xc1\x1b\x8a\xfc\xab\x55\x4c\xd9\x71\x3e\x87\xaf\x83\xb0\xab\x5d\x78\x90\xa4\x05\x8b\xb1\xe4\x41\x1c\x36\x66\x37\xcd\x56\x78\x10\x38\xdd\x3a\xfe\xde\x83\x66\x24\xdc\x75\xb3\xf6\xc5\x4c\x7f\x17\x58\x7d\xa3\x55\x07\x3e\xe4\x5b\x2f\xe6\xf8\x47\x4f\x14\x31\x99\xb3\x23\x2a\x30\x82\xaa\xc3\xbd\xd1\xab\x72\x0c\x3d\x00\x5a\x25\xf6\x57\x5b\x05\xbc\xb9\x5f\xcd\x24\x03\x59\x97\x7c\xd5\x57\x42\x08\x20\x48\x9c\xe2\x5a\x98\x89\x47\xb7\xd3\xf3\x7f\xce\x6e\x0e\xb8\x71\x59\xf2\x86\xfd\xe5\xa3\x36\xd1\xd0\xc7\xd9\x54\x55\xcf\x79\x26\x3c\x7c\xce\xc3\x4a\xf0\xb7\xcb\xba\xb6\x5e\xc5\x3e\x0b\x84\xf8\x38\x13\x47\x19\xc8\xbd\xfa\xd2\x6e\x34\x91\xe7\x01\x9d\x7a\x14\xe8\x89\x31\xc2\x34\xd6\x83\x55\xc3\x90\x47\x89\xd2\xfe\xa3\xb0\xfc\xfd\x50\x9f\xe0\x3e\xc5\xc4\x8c\x0b\x80\xbd\x66\xd9\x2b\xc3\xba\xd8\xbb\x31\xa8\xbe\x8a\x4f\xff\x10\xdc\xe2\x31\x9f\x34\x42\x7a\x40\x4a\x4d\x0d\xde\x63\xde\x1f\xf2\xd8\x79\x60\x5c\xcb\x76\xe5\x31\xb7\xc3\x38\x79\xd8\xd7\xb9\x44\x78\x03\x8a\xef\x89\xd9\xc9\x47\x30\x09\x1e\x36\x82\xd5\x3b\xf6\x80\xa2\x1c\x45\x62\x9f\x74\x54\x46\x2e\xc1\x77\xb4\x6b\x42\xd3\x65\x9f\x43\xfd\x48\x3f\x96\x5f\xb5\x25\x9f\x9d\x07\xb0\x30\xef\x1f\xe1\xca\x83\xf6\x6c\x1f\x62\x65\x40\xda\x13\xfb\x82\x07\x6c\x88\x42\x51\xd6\xe7\x0f\xf9\x5a\x08\x73\xce\x51\xd8\xcd\xf4\x22\xca\xa7\x8f\xcc\x90\x71\x3b\x7d\x49\x14\xea\x17\xf0\x96\xb7\x47\x3c\x9f\x79\x53\xf5\x0d\xee\x6d\x92\xfe\x41\xcb\x5d\x09\x0d\xc8\xab\x3c\x0e\xa0\x22\x90\x62\x9f\x15\x43\x70\xef\x66\x90\x5c\x6a\x87\x62\x5b\x89\x6b\x2b\x5b\xe5\x41\xd2\xe6\x39\xfc\x04\x81\xe1\xf6\xff\x60\xc3\x12\x99\xa6\x0e\x35\xe0\xd4\xf0\x9e\xd7\x2a\xbf\xac\x43\xdd\x57\x26\xef\x1c\xab\xce\x55\xe1\xc8\x39\xd4\x31\xfc\x92\x65\x67\x6a\x11\x21\xd1\x57\xa9\xde\xc3\x0f\x27\x79\x8e\xc3\x29\x96\xe3\x50\xaa\x8c\x8f\x85\x15\x1a\x90\x88\xd5\x92\x1b\x48\x82\x77\x28\x2b\xc7\xea\x83\x3d\x3c\x82\x78\x37\xe2\xfa\x00\xdd\x2b\x07\xd5\xe3\xc7\x73\x38\x05\xfc\xf8\xd9\x11\xb5\x0d\xc3\x38\x02\xc8\x5f\x84\xae\xe7\x60\xd8\x0d\xe0\xa3\x8f\xd4\x89\x85\x50\xb0\x12\x73\x03\xa4\x82\x7d\x79\x07\x9c\xfc\xe2\x36\xe7\xb7\xa7\xb3\x0f\xc5\xc9\x94\x8c\xb9\xea\x28\x01\x32\xdf\x6d\x3e\xa8\xff\xf6\x15\xa5\x15\x0c\xc3\xf1\xe6\xef\x08\xe4\x14\x9d\xd7\xda\xeb\x4c\x56\x0a\xcf\x66\xa8\xef\x34\x54\x3a\x65\x54\x10\x68\x05\x94\x82\x3d\x9a\x27\x9e\xec\x14\x62\xbc\xae\x23\xcb\x68\x62\x0b\xef\x36\xcd\x95\xdf\x76\x76\x1b\xf8\xad\x14\x7c\x86\xf0\xce\x1c\x32\xf9\xcb\x43\x92\xc5\x34\x06\x72\xe1\x0f\x34\x00\x6b\x4b\xdf\xc0\x21\x85\xb6\xae\xc8\x09\x69\x0a\x67\xe6\x89\xbe\xfa\x43\x92\x6b\xe0\xe0\x34\x64\xc7\x02\xd7\x5b\xaf\x4b\x5f\x65\xb9\x85\x9c\x8b\x80\xc6\x5e\x26\xbf\x18\x91\xbc\xd5\xd7\x50\x33\xeb\x21\x8f\x76\x31\x5b\x0d\xd7\x15\xbe\x00\x16\x59\xef\x8b\x33\xeb\x4f\x72\xc2\x9d\x4d\x1e\x6a\xbc\xc7\x25\x08\x7c\x91\x56\x2c\x0a\x03\xcc\x59\x3b\x02\x5e\xc6\x75\x88\x13\xd4\xb5\x1d\x24\x7c\x72\x26\x0b\x3c\xfc\x20\xc8\xf8\xb3\xe9\xe6\x3e\x85\x39\x3f\x29\x5c\x0b\x2c\x05\xad\x95\xed\xa8\xfa\x15\xd0\x54\x52\x74\x04\xe6\xfc\xd0\xb8\xab\x47\x16\x91\x10\x6c\xb7\xfc\x21\xde\x19\x9c\x7d\x12\xad\xa0\xd1\xcb\x69\x53\x5f\x36\x15\x92\x01\x66\x97\xad\x5d\x2f\x49\x88\xbe\x4c\xa1\x05\x6c\x23\x2b\xc1\xc1\xcb\x1f\x04\x3d\x19\x03\xf6\x9a\x48\x31\xbb\x27\xf1\x1b\x92\xbe\xc4\x2b\x22\x81\xc6\x84\x3f\x45\x7a\x19\x58\xf7\x2c\x15\x60\xe1\xfe\xe7\x9b\x0d\xa4\x05\x81\x64\xb7\x6e\x48\x17\x03\x45\x8a\x2f\x8a\x7c\x3a\x21\xc0\xfa\xfa\xeb\x44\x1d\xc2\xbd\x8d\x6c\x10\xae\x75\x54\xf5\x3d\x19\xd9\xdc\x81\x5f\x4a\x88\x1b\xc1\xdc\xc7\xaa\x59\x68\xf9\x3c\xf3\x86\xdb\x60\x6e\xd2\x2a\x7b\x16\x25\xc5\x2b\xbb\xfb\x1e\x7a\x4c\x84\xbf\xaf\x6b\x20\xdd\x91\x82\xe0\x46\xbf\x04\xc9\xa7\x67\x43\x04\xf4\x98\xa3\xea\xce\x40\x17\x58\x72\xc4\x48\xd3\x48\x82\xd7\x63\x37\x2d\xe6\x87\x4b\xf6\x04\x3d\x6c\x32\xb2\x20\xec\x72\x8b\x81\x14\x21\xf7\x77\x6e\x07\x7f\x1f\xe8\x92\x4d\xa1\x13\xda\xc2\x65\x7f\x2b\x8a\x0d\x56\xe5\x77\x01\x86\x1f\x02\x5f\x7f\x1c\x25\x50\xf1\xb9\xd7\x2a\x09\xca\x23\xf7\x18\xff\x7b\x97\xf2\x71\x72\x55\x0d\x7d\x9b\x05\x6d\x5f\x7b\x94\x51\x78\x69\xd2\xd0\x64\x88\x3b\xbd\xe9\xdd\x9b\xea\xaf\xef\x3f\x72\xc1\x77\x3f\x11\x0e\x5b\x03\x6a\x5d\xc5\xe5\xe9\xaa\x97\xa7\x89\x92\xad\x9c\x55\x76\xc4\xd5\x63\x26\x25\x58\x70\x85\x9c\x5a\xd3\x0d\xae\x0d\xf8\xcc\x91\x8d\x47\x83\x06\x9c\x80\xc0\x87\x86\x31\xc0\xf0\x9a\xd5\xc3\xef\xfb\x97\x70\xfb\xb3\x6e\xab\xdc\xb0\xd1\x6b\xe8\x00\x36\xe6\x9d\x9a\xfd\x77\x6d\x68\xf6\x12\x23\x6c\x0b\x63\xcc\xe0\xe2\x8d\x3c\x94\x32\xf3\xe0\xbb\xea\x94\x2c\x1a\xd8\x9e\x42\xe3\x40\xbd\xac\x85\xb1\x9c\x83\x33\x06\x2c\xfc\xa0\xb5\x78\x02\xc8\x52\x04\x75\x37\xc5\xbf\x49\xa1\x7d\x9c\xa8\x92\x21\x5f\x73\xb1\x64\xba\xf2\x4f\x92\xbe\xa5\x86\x6a\x30\x72\xd9\x9c\x76\x03\xf6\xae\x7f\xa4\xdc\x23\x00\xef\x01\xd8\x2f\xa7\xd3\x49\x5d\xab\xf8\x64\x00\xd9\x97\x33\x80\xee\x36\x26\x2e\x52\x1f\x46\x2b\x42\x4d\xe4\xe0\x15\x94\x5f\xde\x4c\xa4\x6e\xb8\x0c\x98\x5b\x57\x46\xfd\xa4\x22\x0c\xbf\xb5\x75\x40\x0a\x10\x93\x6e\x97\x4d\x33\xdb\xae\x74\xba\xd7\x48\xdd\x91\x58\x80\x21\xa0\x4c\xb4\xe1\x57\xc4\x75\x48\x4b\xcf\x8c\x9c\xa9\x28\x4f\xed\xdf\x72\xc6\x8d\x43\x8b\x20\x41\x65\x3f\x80\x07\xf6\x6b\xfc\xe8\x33\x44\x70\xd7\x10\xbe\x6e\x96\xf6\x9d\xd0\x90\xdd\x3c\x0c\x6e\xa6\xe7\x41\xa0\xf1\x67\x40\xef\x43\xd3\xf8\x29\x86\x01\x5e\x04\xfa\x8a\xa3\x70\xec\x6d\x23\x60\xf0\x36\x2e\x8b\x9e\xdc\x89\x6a\xb7\x02\x00\x41\x3f\x8c\xd7\xb9\xd9\x99\x37\x6f\xf2\x73\x69\x45\x99\x0e\x4a\x38\xdc\x6d\x97\x47\xbc\x0d\x21\x22\x5a\x55\xde\xed\x32\xce\xaf\xf1\x0e\xb2\x3d\x80\xf1\x60\x22\x09\xae\x20\xa8\x01\x50\x62\x51\x98\x02\xe9\xd8\x25\x2f\x54\x3a\x5a\x1d\x40\x34\xc6\xfa\x3f\x29\xc4\x1d\x21\x6e\x2c\x49\x64\x0b\x3a\xed\x11\xe9\x78\x0b\x3b\xf1\x2c\x00\xea\x1e\xd8\x31\x74\x54\xef\x3d\xb9\x71\x53\x41\x9e\xf9\xab\xd4\xbc\x60\xd9\x04\xa2\x3d\x66\x4e\x02\xb1\xb5\x97\x8c\xfa\x44\x59\x06\x0b\xbd\xc7\xe8\x92\x6c\xda\x67\x32\xd1\xd2\xc7\x7f\xa9\x7b\xbb\x24\x06\x5f\x88\x6b\xaa\x97\x8a\xff\x09\x43\x0b\x5f\x02\xed\xee\xe8\x71\x88\xa3\x3a\x84\xdd\x26\xe1\x2c\x32\x41\x2d\xb9\x95\x2d\x92\x46\x06\xf8\xd1\x75\x50\xa0\x1d\xf2\x4e\x04\xfd\x50\xa5\x64\x0c\x61\x2c\x1e\xff\x4c\x12\xf0\x49\xf0\x55\x14\x22\x5e\x4a\xee\x8e\xd8\x0b\xed\xe3\x8b\x94\x23\xaf\x7a\xf0\x4b\x71\xeb\xd2\xf6\x72\x63\x6a\xe0\xe8\x29\x52\x94\x02\x2a\x0f\xd0\x0e\x91\xef\x92\x08\x4b\x6b\xe1\x62\x0d\x4b\x56\xcd\x20\xa0\xf6\xa9\x67\xdc\xae\x94\xde\x71\x9b\x0f\x66\x0e\x4e\x5d\x09\x43\x28\x7d\xd8\x3b\x10\x2a\x3f\x70\xa0\x05\xb4\x22\x3b\x1f\x46\x4d\x16\xd9\xb3\x44\x10\x3e\x26\xc2\xbe\xeb\xbe\xc8\xfa\x2e\xf9\x2d\xfd\x28\x39\xfb\xa9\x92\xc4\xfb\x40\xbb\xd6\xef\xea\xa7\x66\xce\x10\x3f\x1c\x97\x7f\x8a\xf7\xfc\x1f\x96\xd4\x3f\xd9\xc7\x82\x1f\xe6\xef\x3f\x99\xb2\x33\x1f\xa7\x89\xdd\x0c\x3d\x6d\xfd\x2e\xf5\x72\x32\xfc\xbb\x10\x34\x5f\x98\xe2\x7c\x67\x80\x42\x1c\x67\x9f\xf6\x58\xfa\x33\xf0\x4e\x32\x2d\x04\x4a\xde\x11\xd6\x6f\x19\xa7\x07\xb9\x14\x91\x98\x84\x40\xd0\xd3\xf0\xbc\x08\xdd\xf6\xf2\x4c\x04\xbd\xcf\xcb\xa6\x6a\xe4\xcb\x86\x73\xd7\xe7\xcd\xef\xc5\xd0\x78\xed\x39\x60\xf4\x3e\x04\x74\xe7\x75\x06\x91\x71\x79\xcd\xca\xc7\x00\x43\x4f\xbf\x74\x1b\xec\x2c\xde\x72\xbc\xa8\xde\x41\xd0\xfd\xba\x4e\x25\x3c\xdb\x57\x0d\xf7\x5e\xb3\x7e\x06\xf4\xb2\xb8\x42\x08\x7c\x41\xce\xe7\x74\xf3\xe2\x03\x43\x9f\x55\xd4\x7d\x4d\x76\x61\x35\x1d\x76\x3b\x77\x1f\x94\x41\x7e\x39\x55\xc9\x29\xbf\x39\xc2\x7d\x79\xb6\xfa\x0b\xad\x1f\x70\xbb\xfc\x1f\x64\x31\x20\x5c\x84\xd9\xc1\x08\x0e\xad\x8f\xd6\xca\x3e\xac\x78\xf8\xe2\x83\x67\xfa\x5f\x01\x8b\x6b\x3a\xb2\x2f\x81\xfa\xc3\x6e\xc9\x82\xa5\xe9\x48\x27\x25\x95\x2c\xf8\xca\x85\xd8\x87\x59\x10\x7a\xbb\xb6\x99\x11\xaf\xbf\xcf\x45\x79\x7f\x30\x38\xe3\x77\x71\xbe\x02\xbe\xbd\xdc\x5a\xd4\x6f\x9c\x4d\x14\x04\x45\x40\x18\x66\xf9\xa1\xfc\xfd\x02\xfc\x0b\x3f\xeb\x09\xcf\x76\xe2\xe9\xc1\xec\x42\xc6\x0c\xfb\xc0\xdb\x65\xa6\xaa\xf1\x93\x22\x16\x0e\xa3\x3f\xcf\x2a\x69\xe8\xd5\x5f\xb6\x16\x4a\x37\x7a\x85\xe4\xcd\x93\x1e\xf1\x20\xe1\x73\x6d\x7f\x87\xc1\xcd\xd3\x26\xf2\x42\xd5\x43\xe7\xed\x62\x68\x0f\xcb\xbc\x09\x32\xaf\x84\x12\x3d\xf3\xe4\x09\x9f\x89\xe6\x7a\x7a\x66\x21\x33\xa2\x0a\x75\xda\x9f\x39\x80\xf0\x37\x00\xef\x45\x80\xf7\x1c\xa0\xf9\xa1\xc1\xe7\x7b\x4e\x9a\x2d\xb2\x26\x92\x65\xfc\xa7\x2b\x9b\x4f\x3a\x82\xb5\xd0\xcf\x39\x3b\x7a\x25\x7c\xe4\x2b\xc1\xd9\xf5\x18\x72\x40\x92\xea\x7e\x7a\x41\xbd\x42\x6d\x86\x10\xee\x77\xcb\x1f\x5a\xfb\x4a\x91\x99\xbb\xee\x81\x37\xaf\x19\x18\x4d\xa2\xd0\x35\x66\xb9\x25\x18\x0a\x14\x39\xd5\x02\x6e\x98\x00\x4b\xd1\x58\x23\xd6\x1b\xa2\xee\x44\x5f\xdd\x85\xed\xf7\xad\x69\x27\xae\xe7\xc7\x97\xac\x07\xb5\x26\xf6\x2e\x84\x11\xdc\xa5\xa1\x9f\x6f\x7b\xe5\x67\xae\xa3\x28\x32\xe6\xf2\xfe\x85\x9d\x03\xa1\xe8\x32\x0d\xb9\x27\x02\x9b\xef\xe9\x14\x2c\x9c\x6a\xce\xfc\x4e\x58\xf9\x29\xd7\x7d\x16\xcf\x1f\x7c\xb8\x4d\x22\xd0\x59\xa8\xf4\xfc\xef\xf6\x44\xdc\x0d\xb0\x90\x1f\x2d\x53\x98\x5b\x9a\x69\x77\x5e\x12\x5b\x78\x7b\x5f\x4f\x8a\xfc\xde\xd9\xad\x75\x6e\xb9\x97\xc1\xbb\x5e\xea\xe2\xb7\x5d\x38\x97\xe4\xe2\x3f\x0f\x89\xc5\x3b\x51\xf4\x03\x3a\xc5\xf4\x85\xbd\xdd\x23\x8a\xa7\xad\xc6\xf0\x8e\x6c\xb1\x05\x60\x99\xce\x2d\x0c\xca\x40\x5a\x2f\xed\xbc\x12\x21\x0c\x9d\xe8\x81\x6b\x88\x01\xbe\xd3\x94\xb5\xa3\x35\xe9\x3c\xaa\xf5\x0f\x5c\x03\xdf\x4c\x0b\xac\x51\xdc\x24\x8b\x79\x7d\xfa\x4f\x3e\x00\x7e\xe0\x27\x87\x69\x0a\x12\xfd\x7a\xcb\x14\xee\x7a\xd1\x3d\xdd\xa1\xed\x6b\x53\x56\xad\xa1\x3d\x8c\xcc\x3a\xfd\x74\x1c\xcf\x2e\xb1\xe9\xd5\xb5\x8c\x7d\xff\xda\xaa\x62\x16\x06\x92\xbc\xaa\x99\x00\x8c\x5d\x1a\xb1\x18\x19\x35\x79\xe9\x50\xd3\xb5\x50\x62\xd0\xd6\xac\xf1\x4b\x58\xea\xeb\x41\xb3\x0f\x94\x2c\xbe\x0b\x91\xbd\x36\xba\x19\xc3\x66\x1d\x54\x85\xe6\xae\x1f\x66\xed\xf7\x57\xe3\x01\x31\x0f\x02\xcd\xe9\x2f\xa2\xfb\x96\xf3\xf9\x0d\x75\x51\x9e\x63\xda\x8f\x76\x54\xdc\xcb\xe6\x59\xea\x38\xf3\xb3\xa1\xcc\xa7\xca\xff\x46\xe5\x69\x6f\x21\xaf\x13\x46\xfc\xf6\x8c\xf8\x79\x1e\x45\x4e\xf9\x21\xa9\x0d\x04\x2e\x71\xd9\xc7\x1c\x12\xb5\x87\xfc\xdf\xcb\x4b\x87\xa6\xf1\xdb\x55\x4a\xc0\xca\x77\x76\x64\x57\x09\x88\x7b\x41\xa5\x4f\xf0\xe8\x22\x6d\xd7\xab\xb8\x1f\x89\x4e\x74\xa7\xe2\x9f\xff\x9e\x8d\xd3\x49\x6b\xd0\x6f\xe1\x77\x37\x5a\xdc\x40\x59\xd6\x77\x5d\x09\x72\xbc\xe0\x57\xe3\x2b\x16\x10\x85\x42\x96\x78\x49\xa1\x49\x3d\x8b\x99\x79\xcd\x67\xe8\x44\xcb\xe3\x7f\x46\x5e\x32\xb8\xe4\xd0\x8d\xe6\x13\x30\x2f\x0b\xdb\x67\x4e\x56\x28\xaf\x78\x59\x2b\x24\x7d\xe4\xa9\x6e\x3a\xf4\x14\x96\xee\x10\x8f\x5e\x69\xf8\x0e\xd1\x68\x2e\x1b\x74\xda\x9d\x0f\xb9\xc3\xbc\xd1\xbf\xc7\x8d\xf6\xed\xb7\x06\xe6\x79\x9a\x01\xb7\x23\x36\x7d\x96\xea\xf5\x94\x8f\x49\xbe\x45\x16\xab\xa3\x02\x50\xbd\x41\xd7\x81\x49\x12\x06\x1d\xb8\xb4\x90\x8b\x3e\xbf\x18\x75\x24\x19\x89\x6f\xc7\x2b\xfc\x20\x7b\x0d\xc3\x07\x59\xb0\x5b\x98\x05\xf4\x9f\xec\x35\x0c\x04\x3c\x25\x1c\xc0\x43\xf5\x24\xa8\xcb\x4c\x0b\xc2\x6c\x13\xa1\x4f\x08\x3e\x1f\x6c\xad\x2f\x99\xae\x5c\xc0\x62\x86\x2c\xb4\xab\x0b\x08\x85\x9d\x57\x16\x54\x2d\x28\x4f\xed\x1b\xd2\xaf\x57\x86\xc8\x92\x70\xe0\xf9\x21\xd4\x7f\x06\xa4\xd0\x7f\x5a\x86\xd4\x04\xd7\xa5\x90\xcd\xb3\x30\xc0\xfa\x19\x70\xdc\xfc\xf1\xff\xd9\x9f\xca\x3b\x5d\xbd\xf4\x33\x80\x7a\xd7\x53\x0c\x56\xea\xc2\xcd\xa6\xf3\x1d\xdb\x4d\xa7\xe6\x50\x8e\x84\xdf\xb2\x50\xf3\x45\x8a\xd5\xa8\xe9\x3c\xbe\x20\xf6\x3f\x82\x7e\xc0\xc3\xcf\x79\xf8\xc6\x7f\x70\xf0\x4d\x84\x3a\xd0\x69\xc0\xbf\x9f\x7f\xc0\xfe\x78\x8d\x03\xe1\x3e\x85\xeb\x2d\xc2\xef\xc9\xe4\xb9\x0b\x55\x6b\x32\x89\x11\x2c\xa1\x9f\x9d\x7c\x10\x74\xa5\x7f\xbe\x09\x26\x2b\x6f\x06\x4c\xd1\x5e\x5e\x5c\xa2\xde\x3a\x10\xf0\x91\xf4\xba\x9c\xfe\x96\x46\x3d\x4a\xd6\x64\x64\x16\xc7\xa1\x3d\xad\x07\x02\xd1\x18\xeb\x34\xdc\xb9\xa8\x59\x2f\x02\x11\x46\xc4\x64\xc5\xa4\xf5\x03\xfa\x63\xc4\x7c\x0d\xd3\xfa\x0c\xc8\xfe\xfa\x4c\x6d\xfe\x06\xd0\xbd\x84\xb6\x2c\x8c\x8e\xf2\x4a\x31\x8e\xba\xd2\xd4\xc4\xad\x33\xd5\x65\x2e\xf9\xd1\x9c\xef\x1d\xaa\x72\x04\xaf\xcb\x48\xf7\xac\x47\x33\xf3\x3d\x6b\x13\x54\x3c\xeb\x76\xb2\x15\x3c\xeb\xb4\x0b\x0b\x2f\xf6\x89\x2b\x69\x46\x8d\xc4\x26\x49\x92\x52\x2e\x17\xf3\x4f\x08\x9a\x03\x3b\x8c\x2c\x99\x6c\x3d\x61\x5f\xe6\xcb\x4d\x60\x7e\x01\xff\xad\x39\x17\xc4\x09\xe8\x7b\x09\x60\x8f\xcd\x89\xef\x2c\x92\xc2\x1e\xb0\x6a\xe6\x2a\x64\xf3\xa5\x80\x1d\xfb\xce\x14\x5d\x14\xbd\xdb\xed\x30\xfc\xdf\xcd\xfa\xf7\xf2\x2f\xd3\x63\x07\x68\xfe\xe5\x30\x27\x68\x53\x48\xda\x5a\x1c\x80\xd9\x75\x0f\xb8\x4e\x52\x09\xa7\x46\xf5\x8e\xda\xef\x88\x93\x3a\xa7\x10\x28\x83\xff\x8e\x9f\x43\x0e\xc6\x2d\xb0\xf7\xe1\xd4\x76\x8a\xe3\xea\xc7\xce\x23\x6b\x52\x67\x1e\x9a\x59\x1f\xa8\x7b\x85\x4b\x24\xaa\xa9\x50\x4d\xa9\xeb\x44\xd7\x2f\xd4\x8c\xe2\x08\x30\x47\x92\xed\xc1\x29\x33\x24\x87\xe2\x53\xe8\x7a\x17\x5e\x7f\xd7\x11\x09\x54\xb3\x37\x72\x4f\x54\xa7\x3a\xed\xf1\xf0\x9f\x86\x19\xa4\x6f\xfc\x5f\x05\xba\xbf\x48\x2d\x00\xc2\xd7\xb8\xaf\x47\x95\x73\x81\x05\x69\x17\x7a\x9e\xfa\xa1\x07\x20\x92\x97\xb0\xf5\x73\xc8\xd2\x1e\x1c\xf7\x01\x62\x4f\xb4\x0b\x1f\x2b\x42\xec\xe5\x21\x7a\x40\xbe\x82\x40\x79\xe0\xef\x2f\x06\x0d\x1c\x5e\x30\x47\xbf\xa5\x89\xbd\x87\x1b\x1a\x80\xf6\x92\xc5\xb6\x80\x1b\x4a\x87\x7a\x2d\x84\x2d\x1e\x40\x71\x31\x33\x65\x61\xcf\xdc\x09\x60\xf6\x55\x22\xd5\x00\x9b\xf9\xb1\x66\x58\xa4\x0b\xf1\xed\xa3\x4d\x3f\x87\x39\x94\x6c\x2d\x92\x8c\xd8\xc1\x51\xab\x2d\xc6\x70\xfb\x00\xf6\x8b\xf2\x4a\xc0\xe9\x17\x6d\xe6\x88\xfd\x39\x42\x5d\x0b\xeb\x95\xbc\xea\xe7\x81\x96\xf1\x85\x10\xef\xc0\x5d\x50\xd0\xfa\xb1\x86\xb1\xc2\x01\x7b\x45\xfd\xea\x0c\x32\x5f\x44\x99\x28\xfb\x7b\x96\xea\x36\xab\x90\x47\x86\xc0\xc0\x92\xb8\x2a\x0c\xb1\x21\xc2\x29\x55\x6e\x8f\x42\x8e\xcb\x4d\x97\x43\x24\xd7\x49\x01\xc1\x09\xf0\xb9\xd4\x11\xd9\x77\x0b\x46\xfd\xb8\x3f\x40\xa8\x23\xa3\x02\xe4\x2f\x01\xba\x42\x81\xfb\x33\xbc\x26\xfc\x62\xe7\x90\x11\x3d\x72\x8a\xab\x94\x45\x33\x3b\xd2\x73\x15\x17\xf8\x48\x97\x60\xac\x84\xf6\xc7\x80\xf7\x80\x0d\xa5\xef\x0e\xa7\x8f\x5e\x7a\xdd\x9f\x39\x78\x05\xe4\x53\x1f\xc8\x85\xcf\x11\x0d\x69\xe1\x4f\x48\x71\xc4\x9e\x37\xad\x10\xec\x0b\x29\x2b\xc7\xf1\xbf\x6c\x0c\x7c\x72\x18\x5c\x62\x2c\x5b\x50\x53\x65\xf5\x12\xc6\x0a\x9c\x35\xb4\x8c\xac\x4d\x66\x5e\xce\xbc\xf5\x58\x14\xf4\xe6\xb7\x6d\xa3\x38\x0a\x50\x15\x5d\x3b\x1b\xdc\x40\xbc\xc8\x0f\x71\xe6\x07\xa1\xf0\xf0\x79\xe6\x33\x2e\xc7\x67\x5b\x4c\x2c\xd4\x4a\x4d\xbf\xac\x07\x25\xc2\xbd\xce\xe3\xbf\xab\x2c\x7e\x15\xec\x42\x9f\x22\xf6\x95\x25\x09\x26\x5c\xa6\x5b\x84\x82\xe8\xf5\xf6\x37\x41\xf7\xf5\xe6\x14\x67\x47\xff\x4f\xf9\xef\x72\x92\x7d\xb2\x53\x36\xc9\xfa\x58\x21\xb1\xd9\xdb\x4e\x21\xc3\x8a\xa4\x7c\x11\x01\xd9\x66\x2c\xbe\x6f\x7e\xb3\xee\x01\xe9\xee\xa0\x0f\xb8\x57\x8c\x07\xfe\x2e\xee\xc4\xce\x95\x81\x12\xdf\x94\x34\xdc\x31\xd3\xf2\x23\xa4\xca\x5b\x60\x7d\x27\x48\x76\x1c\x44\x78\xca\x95\x6f\xd7\x2e\x08\xd7\xf6\x66\xc7\xb8\x41\x20\x29\xd2\x0f\x5b\xd8\x05\x81\xc5\xc5\x8e\xdd\x3f\xfc\x4a\xce\x9f\x82\xdf\x01\xd4\xaf\xa4\x77\x6c\x45\x1e\x9d\x5b\x91\x66\xdc\x56\x6e\xb9\x18\x6d\x21\xfd\x8c\xb2\xdd\x8f\x70\xfe\xb9\x12\x73\x5c\xc4\xdb\xf5\xdc\xd9\x2a\xab\x13\xfb\xb0\x47\xf3\x6c\xf1\x54\x84\xef\x70\xd8\x7f\x6c\x0e\x60\xc3\xa6\xe3\x61\x0e\xce\xc8\xfa\x8e\x59\xbb\xf5\x2a\x19\x68\x0c\xbe\x95\x91\x8b\x70\xe6\xda\x4a\xcf\x9b\x84\x0a\x67\xd1\x32\x73\xc9\xe2\x8b\xf5\xf4\x37\x47\x6d\x90\x70\x09\xd8\xf6\xdc\xc4\xf0\x37\xb5\x56\x10\x13\x26\x25\xa0\x9f\x44\x02\x98\x72\x80\x16\xf0\x81\x69\x0a\x48\x87\xfb\xe6\x4d\x86\x5a\xa5\x48\xbb\x45\xf3\xc3\xfb\xcb\x0d\x39\xa9\xf2\x23\x3c\x3f\x97\x21\x37\x1e\x2c\xb9\xcd\x61\x7f\x44\xf8\x0f\x14\x01\xda\x72\xef\x95\xc6\x2d\x6b\xb0\x6e\x01\xcf\xc4\x5a\xe3\xc4\xe0\x45\xc5\x16\x8f\xa1\x29\xe7\x9b\xf5\x28\x73\x0b\x69\xde\x2f\x4a\x51\x6c\x3d\x34\x33\x0b\x37\x18\x73\x0a\x71\x71\x5e\x93\x1e\x6e\xf7\x5c\xe3\x30\x4c\xc5\x43\x38\x61\x02\xcf\x6f\xcd\xd6\x1c\x82\xe2\x67\xd5\x48\x5f\x9a\xe3\xb3\xe2\x22\xcc\x3e\xb4\xc2\x02\xf2\xdf\x80\xfc\x10\xfb\x4a\x8d\x09\xc0\x9c\x22\xb0\xfe\xe7\x1d\xf8\xff\x48\xc8\x6f\x7d\x97\x04\xa7\x7f\x6b\xca\x8b\x48\xa0\xf8\x00\x5c\x59\xd4\x73\xbe\xbe\x25\x2a\x0c\x50\xbc\x5c\x41\x26\xfd\x00\x6e\xfc\xdc\xe6\xaa\x21\x46\x6e\x27\x4c\x1e\x06\xea\x02\x92\x17\x40\x4c\x25\x40\x6a\x0c\xdb\x7b\xd5\xd3\x88\x65\xf3\x8a\x01\x9f\xa2\x84\x21\x9b\xa4\xc7\xff\xe8\xfe\x36\xad\x73\x1d\x69\xca\x0f\x81\xff\xcf\x9e\x3a\x68\x5b\x90\xd9\x31\xe8\x08\x8c\x8c\xc5\x92\x13\x05\x25\x60\x0a\x96\x80\xa8\x75\x5b\x5a\x41\x59\x63\xb4\xa2\xd0\xc2\x2d\xd7\x50\x7e\xb6\x70\x2a\xb7\xc4\xca\x75\x95\x93\x44\x81\xa0\x3c\x10\x80\xd6\x2d\x61\x12\x26\x16\xc4\x38\x93\xc9\x90\x4f\xa9\xa5\x0f\xdc\x1f\x52\x6b\xe3\x24\xd9\xfc\x90\xa8\xb6\x80\xf2\xa8\x24\x1a\xfc\xb5\x63\x25\x8f\x60\x1f\x07\x0e\x6e\xb6\x24\x7c\xf6\x7b\xe7\x62\x70\x31\xc5\xb5\xbe\x25\x53\xb9\xd6\x9b\x58\x6e\x60\xac\x25\xb6\x63\xed\x31\x45\xd2\xad\x51\x22\x00\x70\xb5\x26\x69\xe7\xe6\x13\x3f\x4b\x6f\xc4\x02\xd2\xef\xd7\x3a\x00\x31\xe7\x51\x14\x32\xd7\xe0\x0f\x7b\xd0\x68\x0a\xfc\x20\x58\x7e\xed\x67\x9e\x47\x58\xf1\xae\x41\x67\x02\x9d\x61\x24\x0c\x04\xa2\xeb\x5a\x2e\x5c\x0c\x6f\x2d\x73\x0c\xa9\xd7\x12\x12\xef\x70\x3c\x0b\x75\x73\xc4\xec\xf1\x3c\xf4\xdf\x5e\xb6\x7a\xf5\x3a\x2e\xdc\xeb\xd9\x89\x61\x7a\xb8\x07\x8d\x61\x91\x3a\x38\xf4\xf2\x54\xea\xb4\x11\x99\x5e\x78\x70\xa0\x5e\x5c\x75\xd8\x6d\x3e\x3c\x82\x1c\x11\xd7\x41\x20\x9a\x40\xfe\xd9\x45\xca\x23\xcc\xfb\xc6\x81\xa9\x7d\x80\x62\x39\xe9\x0d\x0e\xb2\x06\xd5\xae\xfd\x6c\x94\xf9\x04\x73\x5a\x81\xb5\x7a\xba\x46\xe2\xaa\x32\xdc\x9a\x8b\x74\xdb\x77\xe5\x68\xd6\x3c\xea\xbc\xa1\x90\x11\x01\x17\x43\x93\xc3\x82\x44\xbb\x2c\x1b\xd7\xf4\x56\x86\x65\x85\x8c\x23\x19\x20\xf6\xfa\x56\x2e\xc7\x29\xe8\x01\x16\xea\x46\x92\xeb\x46\xec\x3f\xdd\xd8\x00\xfe\xb3\xc6\x5e\x24\x06\xa0\x06\xc4\x24\x90\x5a\x0f\xd2\xd4\x3e\x62\x58\x93\xf0\xb3\x6b\x3a\x84\xd2\x05\xd6\x86\xc2\x53\xab\xeb\x3a\x05\xd1\x60\x6c\x5c\x01\xf7\xea\x8c\xd5\x43\x5c\x77\xe4\x20\xb8\x14\xc7\xa2\xe1\x23\x10\xcd\xfc\xff\x1a\xdf\xda\xa9\x61\x8c\xed\x91\x4d\xb1\x38\x13\x5b\x3d\x17\xd8\x17\x9d\xf5\xe3\x31\x07\x59\x42\x3e\x01\xf6\x28\xf1\x6a\xf4\x4c\x6d\xad\xc9\x2b\xb0\x24\x26\xc8\x77\x7f\xa9\xec\xba\x96\x2a\xf2\xd7\x52\x3b\x1f\x0b\x2c\xd5\x4e\x0c\xbb\x2f\x2e\x70\xe7\xb8\xf7\x93\xf9\xee\xc5\x9e\x3d\x92\x07\xce\xf0\x57\x73\x7d\xf8\x0f\x03\x4e\x31\x2c\x68\x38\x85\x05\x3e\xc0\x7e\x84\xf2\x91\x26\xfc\x87\x5c\x85\x86\x40\x71\x49\x78\x87\xe8\x17\x19\x52\x2d\xe5\x97\xec\xcd\xa5\x68\x84\x0c\x46\xc2\x2f\x01\x26\xcb\x5a\x95\xe5\xb4\xc8\x67\xac\xc0\x20\x2e\x53\xe3\x4c\xe0\x59\x54\x94\x83\xbf\xa0\x2c\xe6\xb2\x8a\xcc\x64\x41\x22\xef\xe0\xf3\x21\x18\x7f\x21\xd5\xcf\x23\x24\xab\xe8\x27\xb2\xb8\x10\x78\x60\xa3\xe3\x93\x54\x49\xc0\x64\xe0\xab\x8a\x88\x0b\xd1\x15\xf6\x81\x24\x8d\x39\x48\xde\x16\xf5\x49\x7a\xf2\xb3\x06\x6f\x8b\x27\xc2\x78\x1a\xe3\x74\xb7\xd9\xa5\xb5\xfb\xfb\x87\x62\xfc\xec\x47\x96\x69\x26\xfb\xc1\x75\x45\xaf\x26\x4b\x4f\x0a\x83\x1d\x8d\x9f\x65\x89\xb7\xe4\xe0\x23\x64\x1f\xd6\xa0\x9c\xea\x6d\x9e\xea\xaa\xcb\x57\xb1\x3e\x5d\x87\x66\x03\x4b\x3a\x5a\x83\xb5\xe0\xb1\xfe\xfb\x81\x0b\x0e\x46\x97\x38\x16\x3a\x0a\xb2\x1d\x56\x6e\xa0\x3a\xa3\x05\x8d\x18\x81\xb7\x51\xdb\xa4\xf8\x92\xd7\x3d\xcd\xaa\x1d\x2d\x49\x97\xfd\xb7\x72\xcc\xfd\x5b\x5d\xec\xe3\xb7\x28\x39\xe4\x81\x2f\x31\xe6\xe6\x73\x6e\x31\xf9\x0f\x74\xd6\xfb\x9d\x41\x00\xf6\x9d\x6d\x9c\x53\x49\x7e\xb0\xf3\x26\x93\xe1\x7c\xce\x5c\x12\x46\xf1\x0b\xb9\x04\x1e\xd9\x7e\x0a\x47\x71\x8c\xb8\xd9\xdf\x04\xb2\x7f\x0a\x66\x84\xeb\xe8\xbf\x22\x05\x3f\xd7\xc0\x70\xcd\xa7\xdf\x2b\xbb\x7b\x92\x1c\x9a\x35\x52\x9a\x7f\x25\x1c\x3e\xcb\xe4\x63\x16\x8f\x1f\xb8\x47\xdd\x8c\xe9\xbd\xfb\xb3\x34\x85\x4c\x00\x9c\xe0\x48\x7b\x90\xec\xb1\x05\x3e\x0f\x9a\x6c\x54\x39\x48\x51\x7f\xbf\x5b\x91\x88\x8a\x73\xd9\xa3\xc0\x80\x4d\x75\xac\x79\xd7\xde\x23\xa9\x13\x04\x80\x4e\x15\x70\x0d\x8e\x3b\xa6\x2a\x7e\xc3\x4b\xc4\x08\xaf\x74\x79\x14\xd2\xfd\xa8\x10\x85\x85\x02\x98\x6c\x8f\x24\x9d\x7d\xf0\xb8\x79\x1d\x6c\xa0\xfd\x99\x44\xfc\xc5\xa0\x7b\x96\xb6\x7d\xf9\x43\xd2\xb0\x51\xf4\x29\x42\x86\x0d\xf8\x1a\xe8\x68\x9a\xdd\x12\x92\xd1\x95\x45\x85\xe0\x55\x98\xcf\xf4\x16\xc1\xc2\x5e\x2a\x70\xeb\x44\xac\x90\xf0\x94\x5b\xcc\xc3\x29\xfa\xc1\x4f\xeb\xfd\x15\xe8\x5f\xbf\x17\x67\xfe\x03\xe7\x9f\x83\x5a\xe1\xdd\x36\xa6\x90\x9b\x3e\x12\xe2\x3f\x77\xa1\xd4\xff\x0e\x2e\xc5\x2b\xc8\x0e\x61\x3f\x84\xd7\x8f\x57\x0d\x93\xf0\x60\x5c\x0c\xa4\x55\xec\xba\x83\xb9\x31\x82\xa1\xa0\xa0\x23\xac\x0f\x31\x22\xe6\x18\x32\x58\xb8\x2c\x04\x3d\x80\x27\x41\x1e\x07\x86\xe9\xda\xa3\x63\xb7\x28\xce\x04\x6f\x31\x54\xec\xb8\xec\xa2\x24\x37\x29\x4d\x34\xb9\x77\xe9\x2c\x12\x84\xf7\x7e\x6f\x86\xe2\x40\xd8\x0f\xf6\xd1\x2c\x18\x15\xfa\xea\xbc\x0f\xf1\xe4\x30\x82\xa3\x0b\x24\x7a\xfd\x38\xeb\x04\x06\x3a\x47\x90\x93\x4f\x05\x3d\xd8\xaf\x96\x2c\x74\x29\x98\xa9\x64\x85\x2b\x0f\x6b\xf3\x91\x85\xdc\x8d\x35\xd6\x84\xa9\x41\x5a\x1f\xaa\xd5\x20\x94\xea\xd2\x94\xac\x55\x89\x7f\xc9\x6c\xa6\x2d\xcf\xef\xbf\x56\x4f\x3e\x11\xeb\x38\xa5\x07\xa1\x46\x53\x0a\x15\xa1\xb1\x02\x34\x10\x14\x8c\x7b\xd9\x95\x1f\x1d\x2b\x46\x17\xa2\x38\xd4\x5d\xa9\xf3\x11\xdd\x1a\x84\x28\x43\xd3\x1e\x52\x62\x7c\x89\xc0\xe5\x10\xa7\x63\x0d\x9b\xe2\xb1\xfa\xbc\x08\xef\x54\x5b\xf3\xe5\x30\xd8\xfc\x64\xe2\x93\x30\x86\xe2\xc8\x78\xd2\x47\x72\x74\x6c\xe2\x1d\x91\xf3\x14\x6c\x16\x1b\x34\x06\x28\x82\x34\x9e\x83\xac\x09\x06\x6e\x2d\xab\x75\xb7\x26\x08\x47\x84\x14\x09\x5c\x0b\x85\x60\x40\xf4\x78\x44\xc0\x65\x27\x2f\x85\xd4\x51\x85\xbf\xdc\x2c\xc8\x8e\xe5\x7a\x05\x7f\xc4\xe7\xdf\x23\xe0\xaf\x3b\x45\x64\x2d\xee\x3f\xef\x9e\xab\xcb\x10\xcf\xd1\x58\x56\xb2\x4a\xec\xe4\x27\x2f\x54\xc3\xab\xe0\xc5\x66\x0e\x61\xd0\x57\x40\x4d\x23\x81\x74\xc4\xc8\x49\x98\x21\x8b\xe3\x44\xf1\xf3\x89\x88\x1d\x81\x6e\xe4\x55\x9c\xa9\x90\x37\x46\x32\xcd\xe6\x0e\x36\x27\x75\xe7\x86\x19\xe8\x00\x0a\x64\x8c\x60\x8f\x29\x8d\x3f\x66\xe5\x25\xad\x1b\xaf\x42\x41\x8d\x39\xa6\x4f\x76\x94\x97\x37\xb0\x23\x44\x88\x2e\xae\xfa\xb2\xe6\xdc\x80\x1d\x42\x97\x59\x1f\x58\xa2\x18\x95\x0b\xb1\x65\xd7\xf0\x1b\xee\xb6\xce\xb5\xb3\x9c\x12\xb2\xd4\x7a\xc7\x3c\xc8\xf2\x20\x87\xe2\x81\x4d\x73\xf9\xd6\x8e\xe9\x39\x93\x42\x52\x9b\x2f\x42\x15\x2b\x6a\x04\x46\x83\x1b\x9d\x54\x46\x85\xaf\x06\x4f\x3c\xa8\x01\x23\x4a\x08\xfc\x6a\x6d\xe3\x01\x8f\x7b\x39\x38\xa4\x35\xb8\xda\x63\xa2\x93\xf2\x98\x06\x9d\x58\xea\xf8\x1f\x3b\x20\x7f\x2f\xfd\xaf\xce\xde\x7a\x65\xbd\x1f\x55\xfc\x7a\x0b\x1a\x93\xeb\x51\x7f\xe7\xe2\xaf\xeb\xa3\x8a\x6f\xf0\xa8\x21\x34\x63\x97\x6e\x97\x0b\xc2\x28\xde\x8a\x17\xc4\x3c\x4a\x3e\x92\x9c\x19\x2f\xda\x3e\x0d\x7a\x47\x48\x68\x21\xb5\xe5\xbc\x94\x1a\x7b\x80\xc4\xf4\x43\x46\x4b\x7b\x9b\x1e\x65\xd7\x76\xfe\xb4\xd9\x82\xaf\xa5\x05\x6b\x38\x25\xac\x9a\x8a\x3d\x56\x2f\xa5\x3a\xc7\x03\x59\x8e\x46\x6c\x01\x93\x74\x14\xdd\x05\x56\xd5\x49\x53\xb6\x47\x2b\x8a\x5b\xa4\xb4\xa0\x0d\x97\xa7\x59\x56\x09\x4d\x8b\x58\xe1\xac\x30\x3d\xa5\x87\xff\x98\xb3\x08\x30\x9c\x29\x3e\x66\x90\xd3\x2f\x46\xbd\xcf\xa4\x1f\x59\x3c\x9d\xbf\x84\x03\xb0\x60\x26\xfd\x22\xa5\x0a\xdd\x41\x21\x2e\x7b\xe6\xcb\xf8\xc8\x82\x44\x3d\xd2\x73\x16\xf9\xc5\x2e\x5c\x9b\xef\x3c\x92\xe8\x2b\xe7\xae\xcf\x82\x8c\x3e\xd2\xe2\x0c\x46\x0f\xe7\xd3\x86\x07\xde\x70\x3e\xa8\x82\x99\x05\x86\xf6\x50\x7e\x3a\xd9\x25\xd2\x3c\xd0\xc4\x03\x9a\x49\x6a\xeb\xed\x25\xaa\xfc\x0e\x50\x64\xa7\xc9\x9c\xb9\xf3\xd3\xce\x47\x93\xa1\xc9\x87\x6c\x15\xf6\x37\x67\x96\x50\x9a\x58\xa5\x75\x93\xbf\x66\x69\x79\xf3\x76\x21\xbb\xcc\x86\xb7\x0b\xad\xd0\x92\x75\x27\xf5\xfb\xff\x94\x5a\x20\x73\xc4\xdd\x26\xd1\x5c\x06\xd4\xd8\x75\xc4\x41\x67\xd0\x20\x04\x2a\xbf\x81\x0b\x43\xe3\x06\x67\x6b\x37\x83\x1f\xc7\xb3\x69\xdf\x34\x8a\x36\x43\xab\x00\x52\xf2\x06\xeb\x27\x5a\x86\x65\x40\x47\xc1\x9c\x92\x85\xbd\x27\x1a\x87\x1a\x5e\x07\x36\x0f\x1a\x0b\x75\xa9\x07\xa0\x86\x9c\x0c\x72\xe6\x37\xc7\x9a\xc0\xff\xc9\x64\xe1\x45\xef\x80\xd2\xa8\x29\xf6\xbc\xeb\xab\x33\x8c\x0c\x4a\x89\xad\xf7\xe4\xaa\x59\x1e\x88\x2e\xb3\x75\x6d\xea\x34\x14\x09\xc1\x0d\x12\xc4\x1f\x4a\x27\x75\xa7\x01\xfe\xd0\xdc\x03\x33\x3f\xb1\x9d\x87\x59\x33\xbb\x61\x96\xa0\x32\x44\xbd\x39\x1e\x1b\xe6\xf8\x0a\xd9\x05\xb1\x74\xe6\xc3\x0b\x34\x7e\x0e\xf3\x16\x32\x35\xc3\xbc\xca\x6f\xc2\x35\x23\x9b\x65\xf7\xf0\xb5\x85\x70\x0c\x31\xb7\xed\xc2\xf7\x21\x3f\xc5\x9f\xb9\xef\x20\xb6\x5c\xa1\x1b\x6d\xa1\x4d\x6b\x77\x71\x6f\x2a\x69\xfe\x36\x62\xd7\x1c\x77\x00\x29\xae\xb3\x9e\x91\x07\xd8\x50\xef\xfd\x11\xc7\x86\x0d\x8c\x85\xf6\xec\x8a\x25\xb3\x16\x92\x71\x56\x2f\x2d\x0e\x41\x8f\xb5\xc0\x73\x18\x43\x9e\x83\x06\x34\xcb\x89\x6f\xc8\xf4\xc5\xe1\x89\x5a\xc3\x79\x73\xe8\x3e\xe4\x47\x95\x81\x45\xee\x4a\xb0\x7d\xec\xa9\x27\x23\x2a\xa7\x48\x71\x59\x38\x84\x48\xf0\x80\xa6\xc1\x81\x5d\x2e\x82\x73\x37\xdb\x08\x7d\x2b\x49\xf3\x01\x78\x54\x3d\x70\xca\x51\x0d\x50\x5a\xd5\x97\xeb\x87\x16\xda\x16\xf1\xe6\x26\x11\x70\xa8\xb0\x82\x8a\x58\x4c\x35\xfa\x7a\xbf\xce\x99\xaa\x69\x16\xaf\x34\x0d\xef\x2b\xe7\x1d\x7d\x95\x48\x52\x4f\x0c\x3f\xcf\xc7\x3f\xa8\x09\xe9\x6b\xb4\x8d\x3d\x2a\x77\xda\xe0\x9c\xd5\x95\xba\xed\x84\xa8\x3e\x00\xdf\xdb\x06\x9b\x68\x35\x3e\x33\xc6\xd2\x9f\x9d\xfe\x4c\xb3\xce\x0e\xee\xbb\x24\xc4\x88\x10\x51\x68\xf6\xd4\x17\xaf\xcb\xf7\xf6\x6f\xb2\x9c\x23\x9e\x8d\x85\x22\x58\x90\x2d\xc5\x88\xb9\xef\xa0\xdf\x5c\x04\x66\xad\x22\xe8\xfc\x3f\x55\x5f\x96\xe0\x2c\xcf\xf4\xba\x97\x77\x67\x06\x9c\xe0\x66\x30\xbf\x81\xe4\x4b\x56\x7f\x4a\x25\x95\xf3\x9c\x9b\xb6\xa0\x99\xc2\xe0\xa1\x5c\x92\x82\xa8\x73\x44\xdc\xdd\xf0\x46\xed\x72\x43\x62\x01\x1b\x5a\x98\x5f\xe8\x0c\x1d\x1a\x45\xec\xe2\x1d\x21\x03\x9e\x6c\x24\xfb\xb8\x2e\xae\x5a\xab\xf7\x2d\xc0\xd2\xb9\x75\x06\xb8\x12\xcb\xb4\x42\x43\x71\x0f\x2d\x5f\x5a\xa5\xfa\x03\x48\xde\x11\xd0\x86\x62\x97\x73\x8c\xa9\x80\x11\xfe\x89\xb3\xba\xb2\xe3\x1a\x42\x11\xe3\x9a\xc3\x6f\xdf\x20\x12\x15\xb4\x1f\x74\x2b\xc8\xca\xb1\xb7\xcb\x4f\x19\x0c\x96\xf2\x08\x8a\x4e\xb1\xce\x86\x87\x08\x49\xcf\x11\x25\x02\x79\x4b\x71\x43\x5c\xb4\xd0\x7b\x78\x80\x5a\x67\xbd\x36\xd1\x4d\xe6\x7f\x9b\x22\xc4\x2c\xd4\xd6\xc2\xb7\xe2\x15\x9b\xa4\xce\xd4\x89\xfc\x97\x11\xc9\x09\x7c\xd8\x39\x1c\x31\x92\x24\x3b\x5c\xcc\x79\xfd\x4f\xe4\x9a\x35\xd2\xbc\x9d\x5f\x43\x03\x8b\x24\x33\x4a\xf7\x8f\x10\xe3\x66\x63\xfc\x00\x3e\x12\xf1\x1e\x24\xb8\x86\x74\x4b\x8b\xd6\xed\x25\x50\xff\x68\x8b\xa3\xe8\x0b\x1d\x99\xf2\x2d\x94\x7c\x3c\x30\x86\x28\x32\x02\xe8\x9d\xfd\x83\xf1\x26\x8f\x84\x19\x06\xad\x4b\x9b\xec\x26\x98\x49\x3c\x6a\x7e\x00\xe5\x1a\x1c\x9b\x14\x13\xc8\xc0\x64\xdb\xbc\x25\x6a\x36\x7c\xd8\x33\x19\x3e\xf8\xc9\x38\xc2\x80\xa0\x59\x23\x98\xbd\xbb\x65\x20\x2c\x25\x44\x46\x47\x7e\x20\x5f\x65\xb0\x6f\x62\x73\xeb\xf8\x24\xfd\x57\xc3\x10\x43\xa2\xc4\xdc\xf4\x29\x85\x09\x05\x77\x13\x1d\x6f\xb8\x73\xa7\xe5\xa8\xd3\x39\xd8\x70\x2e\x34\xe3\x1d\x07\x83\xa4\x51\x0f\xd5\x4a\x45\x56\xac\x25\xfc\x5f\x50\x69\x38\xb4\x03\xf5\xc6\xa9\x09\x48\x92\x95\x37\x44\x61\x4e\x05\x38\x36\x4c\x3d\x71\x9f\xb9\xce\x0b\x6a\x65\x5b\x63\xcb\x50\xa9\x1a\x64\x0c\x3a\x84\x31\xd1\xa0\xec\x5b\x0c\xc1\x69\x7a\xa1\xee\x20\xfc\x26\x7c\x45\x7d\x73\x86\x6f\x80\xeb\xc0\xa9\x9f\x6f\xf5\x41\xe7\xd8\x9c\xb3\x7c\x23\xc4\x6a\x69\x93\x3e\xa3\xa1\xfe\x63\xc4\x80\xcc\xd4\x2c\xf0\x20\x9b\xc1\x27\xf5\x72\x16\x9b\x46\xb4\x97\x3a\x77\x02\x39\x98\x34\xd1\x03\x1c\xba\x36\xe4\x50\x47\x3a\x6e\x01\xe8\x6a\x06\xc9\x11\x0c\xdd\x52\x66\x60\x78\x09\x2d\x83\x2e\x74\xd5\x78\xc6\x1a\xc0\x6f\xfe\x8b\x30\xd1\x00\x1b\x19\xcd\xef\x39\xe6\x1d\x43\x16\x06\x9a\x05\xe2\xf2\x15\xe8\x5c\x2a\x40\xa9\xe0\x39\x16\x19\x46\x2d\xb5\xdb\x52\xe8\x3a\xf4\x82\x0e\xd0\xd6\xf0\xcd\x4b\x18\x38\xa1\x59\x85\x25\x04\x61\xb7\x12\xb7\x26\x4c\xec\x92\x02\x8d\x47\xd2\x6e\x3e\xa2\x6d\xe5\x8f\xdf\x2d\xb7\xf6\xe6\x7f\xc2\xaf\x68\x70\xd3\x2d\x12\x67\x90\x4d\x97\x88\xc4\x9b\x69\x41\xb4\xe9\xd2\x49\x8e\x45\xb2\xca\x9a\x4e\x1f\x82\xdc\xb3\xe7\x09\xc9\xda\xc4\x0a\x83\x5a\x6f\x41\x31\x2a\x47\x32\xb8\x58\xd7\x68\x67\x87\x1c\xbf\x3e\x2b\x7e\x31\xe4\x7f\x28\x38\xe3\x42\xdf\x12\x68\xb5\x34\x02\xde\x5e\x28\xc4\x4e\xa4\xbe\x38\xe1\x49\x2b\xf5\xea\x3a\x3d\x67\x0e\xb4\xfe\x8c\x25\x12\xeb\x6e\x2e\x05\xd9\x0d\x1a\x42\xe1\x37\x61\x5d\x2f\x31\x78\xfa\xfb\xe4\x55\x97\x38\x3c\x83\x6c\x4f\xa0\x34\x41\x3e\x58\xff\x34\xc3\x0b\x1b\x5c\x1c\x11\x7a\xd2\x79\x83\x7f\xc6\x34\xe5\x21\xf9\x48\x75\x48\x33\x37\xb7\x5e\x70\x0e\x1a\x90\x55\x37\xbc\x12\x11\x84\x98\x0f\x98\x3e\x9c\xe6\x85\x1c\x2c\xaf\xa5\x1b\xf2\x80\xa6\x13\xfd\x5e\x39\x2e\x5b\xd7\x44\xf5\x6d\xba\x16\xb5\x63\x40\x61\xdb\x80\xc1\xaf\x88\x29\x92\xd8\x82\xb8\xac\xd3\x74\xce\x3c\x70\x2c\xef\xd5\x38\x37\x97\x65\xa5\x95\x91\x14\x88\xf9\x49\xae\x72\x7e\xa7\xac\x32\x50\x5b\x67\xe5\x21\xd9\xa0\x56\x34\x39\xe8\xf1\x50\x42\x28\xc9\x9b\x10\x6e\x0f\xe4\x15\x6d\xac\xe8\xd3\x7a\x87\xaf\xc6\x6a\x2d\x30\x6f\x45\xfa\x31\xe5\xd5\x4d\x02\xb3\xc9\xc3\x82\x60\xc0\xa2\x98\x25\x7e\x97\x62\x14\x61\x1d\x28\x3a\x0b\xa5\x27\x7c\x69\xf6\x7f\xa0\xda\x9c\x14\x75\x4b\x9a\x4e\xd7\xb0\xc3\x14\x45\x7c\xbf\x69\x64\x25\x9e\xc6\x48\xd4\x43\x80\xda\x2f\x9d\x5d\x1e\x2b\xd8\xb3\xf8\xba\x6c\x38\x2b\x86\xef\x9d\x43\x93\xf4\x5b\x98\xc9\xf5\xed\x91\xd3\x2f\x63\x34\xce\xc5\xf9\x92\x8a\xc3\xa8\xec\xd7\x33\x67\xfc\x83\xfd\xa6\xe3\x50\xed\xff\xb5\x1b\xc2\xdb\x06\xeb\x0b\x1d\x3f\xe8\x21\x9f\x9b\xfc\xa3\xcf\xc9\xef\xfb\x63\x35\xbb\x78\x3c\x4c\x24\xfb\x60\xd4\xc3\x96\xe6\x23\x49\x5c\x78\x5f\xd0\x5a\xeb\x83\xb7\xc2\x0f\x83\xdb\x8e\xff\xfd\x8f\xd3\xab\xd6\xb7\x25\xaf\xa6\xa1\x1a\x64\x1d\xf2\xb6\xc1\xc6\x38\x87\x21\x05\x3c\x2d\xc9\xc6\xa9\x2b\xeb\x37\x18\x64\x20\x98\xa9\xfc\xbc\x77\xc5\xa8\x4d\x29\x9b\xef\xee\xbc\x0b\x94\x77\x89\x65\xdb\xc2\xe2\x0e\xef\xef\xae\x27\x04\x26\xcf\x97\xe5\x92\xd7\xee\x9f\xb1\x4f\x3a\xce\xae\xf7\xe4\x5d\x36\xae\x58\x5d\x74\x9f\x70\xd5\xd4\xb1\x1b\x69\x9f\xe1\x97\x11\x9d\x35\x43\x9e\x41\xeb\x57\x5d\xf2\xf4\x21\xbd\x68\xea\x27\xb1\xfe\x59\x38\x6f\x8c\x0c\x55\xbc\x4b\xb8\x15\xc9\x51\x83\x14\x20\x28\x3e\x6a\xcb\xb9\x4c\xde\x60\xbe\x99\x55\x03\xd2\xcf\xcf\x03\x03\xb9\x3c\x3e\x55\xf3\xf6\xfa\x99\x7e\x19\xec\xa6\xbf\xc9\x4f\x14\xf3\x28\xab\xa5\x72\xe1\xea\xdc\x7f\x63\xee\x32\x72\x80\xf2\x9f\x7d\x4b\x31\x00\x1e\x1c\xf3\x15\xd6\x1b\x45\xd3\x26\x6f\x28\x36\xc6\x49\x9f\x4d\x9c\xaa\xfc\x8c\x1f\x99\x4e\x36\xc1\x6f\x6b\x9a\xbd\xf2\x44\xd2\xfd\x4c\x22\xfa\xdb\x3b\x0f\x9a\xb5\xb0\x05\x05\xaa\xdc\x54\x43\x64\x9f\x18\xdb\x1b\x4a\xef\x08\xe8\x83\x03\xc4\x78\x0e\x50\x6c\x30\x8b\xe9\xff\xe6\x68\xe1\x65\xc3\xe8\x60\x0a\xbf\x60\x4c\xe5\x44\x53\xff\x0f\xf3\xc4\x5f\x1e\xde\x69\x8e\x20\x9a\xe3\x95\xee\x6b\xb5\x31\x31\x19\x3f\x90\xc0\xf6\x00\xdf\xab\x34\x79\x74\x1c\x5d\x63\xe4\x85\xc1\x02\xc3\x88\xaf\x32\x05\x1d\xe8\x94\xb9\xc6\x49\x7e\xc2\x4b\xb1\x74\x2b\x9f\x24\x00\xc2\x77\x43\x6c\x9e\x8c\x3c\x9d\x22\xc3\x8d\x15\x7d\xdb\xe7\xcd\x4d\x9e\x6c\x93\x5f\xea\x9a\xc2\x53\x83\xc7\x4d\xfb\x8b\xe6\x9e\x5a\xa0\xf2\xc8\xcd\x8d\xa1\xdc\x3a\x31\xf9\x0d\x0b\x5e\xd3\x00\x8c\xf3\xd5\xdd\x35\x74\xdc\x3d\x4b\x8b\xfe\x95\xc2\xa2\x63\x52\x72\x97\x9c\x37\xae\x4e\x44\x12\x67\x03\x28\x09\x29\xa6\xec\x60\x17\xa0\x33\x08\xc0\xca\x03\x43\xbd\xc2\x2b\xb2\x9b\xca\x71\x38\xbe\xb3\x8f\xe4\xc8\xb1\x2a\x56\x80\xe4\x45\xb7\x58\xf8\x6c\xd1\x79\xb8\xde\x55\x95\xae\xdd\x21\xa7\x5d\xc3\x24\x43\x44\x24\x0d\x8e\xae\x76\xff\x4f\x5d\x6f\xd8\x61\xb0\x0f\x65\x5f\xe6\x8b\x7a\x93\x86\x8a\x5c\x2e\x8a\xcf\x75\xbb\x2d\xc6\x49\x60\x55\xff\xaa\x75\x62\x16\x5e\x56\xf3\xc7\xf6\xf9\x41\x8e\x90\xd4\xfc\xc1\x2c\x41\xd0\x56\x27\xb0\xc6\x2f\xd3\x26\xc2\x2d\xc8\x7b\x52\xf3\x55\x6f\x86\x50\x0c\x50\x21\x04\x2e\x1a\x35\xda\x6a\x43\x9b\xfe\x1d\x23\x0f\x43\xf6\xd9\x71\x5f\x51\x94\x2a\xb3\xe0\xe1\xab\x71\xf2\x77\xd7\x9f\x72\x9a\x61\xce\x31\x82\xa3\xeb\xc5\xc0\xf5\x50\xf1\x8a\xc1\xdd\x55\x2e\x46\x79\x2e\x68\x74\xf1\x87\x94\x4c\xb1\xaa\x6b\x0e\x12\x88\x21\xbf\xe5\x73\x63\x82\x35\xb9\x51\x31\xa7\x72\xcd\x65\x80\xdf\xbc\x37\xa8\x17\x03\x99\xbc\xfb\xf6\xcb\x69\x8d\x81\xc1\xdd\x25\xc4\xea\xff\xca\xa2\x8f\xe5\x51\x97\x9b\x35\x86\x31\x80\x3a\x45\xe4\x25\x3d\xf5\x0b\xae\xe4\xe2\x2d\x1d\x4a\x39\xbe\xfa\x04\xc3\x45\xfb\x74\x78\x68\x90\x62\x63\x5f\xbe\xa7\x76\x19\x98\xc2\xbd\xe0\xbe\x1e\x32\xc4\x60\x92\xbb\x1a\x26\x94\xe1\x99\xd1\x93\xd6\x0c\xc3\xf6\xe7\x76\xe7\xf2\xd3\xde\xbd\x1c\x9d\x1d\x5b\x28\x2f\xd1\x8f\xc2\xa1\x1f\x16\x1b\xdd\xf5\xc2\xed\xcf\x85\x9e\x59\x5e\x19\x90\x87\x23\xc8\x94\x26\x83\x7d\xc6\x18\x4e\xaf\x98\xf1\x13\x01\x09\x79\xe7\x8d\xc8\xad\xa3\xf7\x8e\x09\x72\x19\x02\xb0\x84\xc3\x62\xec\x91\x2e\x11\x8c\x5a\x9a\x49\x49\xaa\x4c\x6a\x3a\xdd\x99\xa5\x09\x29\xe5\xff\xaa\x73\xd8\x60\x84\x73\x46\xd5\xeb\x8d\xbe\xc7\xde\x2d\x42\xc4\xaa\xb9\x4a\xa7\x56\x61\xd6\xcf\x5a\xce\xa7\x2e\x2f\x3f\xac\xda\x08\x82\xd2\x95\x95\xc4\x70\xfa\x83\x0c\x9a\xd3\x75\xf3\x8a\xba\x29\xc3\x15\x53\xd5\x3e\xa7\x5a\x82\xb1\x80\x05\x1d\xd6\x3a\x61\xe2\x4b\xcd\x6b\xee\x46\x24\x69\x52\x43\x7d\x1e\x2d\x05\x6b\xec\x88\x01\x1c\x0c\x34\x4a\x18\x68\xc4\x08\xc5\x6a\x57\xca\x23\x1b\x20\x09\x07\x19\xc4\x72\xa0\x38\x42\x85\xd6\xd0\xaa\xae\xf6\x59\xef\x35\xc8\x52\x22\x4f\x1d\x24\x24\xb1\x9a\x71\xca\x94\x5f\x64\xe5\x84\x21\xb4\x71\xa5\xc8\x6f\x50\x94\xf8\xb3\x52\xa0\xc8\x4a\xc9\x2a\x9e\x36\xc4\xc8\xa2\x40\x19\x94\x0d\xc7\x56\xfb\x30\xc8\xb1\x6f\x69\x55\x26\x29\x53\xe9\xa5\x87\xbf\x40\xaf\x41\x9b\x2d\x55\x92\xc8\xb6\x0c\xa7\x53\x47\x59\x64\x24\xc8\x39\x0a\xb0\xa3\x6c\x80\xe7\x2c\xd6\xab\x18\x1d\xbc\xc4\x70\x3e\x83\x86\x53\xc8\x2f\x38\xe9\x79\x76\x86\x99\xd7\x29\x65\x31\x2b\x47\xd1\x93\x60\x9b\xde\x15\x33\xcf\x92\x7b\x35\x8c\x3c\x8f\x3e\x07\x74\x5a\xe5\x21\xde\xcc\xec\x73\x9a\x9e\x18\xe2\xbe\x80\xfc\x77\x99\xc2\x20\xa4\x3b\xd5\xcc\x1e\xb8\xd7\x17\x38\x63\xcc\xa6\x03\xdb\xd3\xe3\xeb\x83\x19\x60\x4d\x9b\x83\x31\x15\x8c\xab\xb4\xa7\x7f\xf9\x52\x2c\x83\xee\xe5\x6c\xf0\xe0\x2a\xc9\x03\xc5\xda\xd9\x85\x6c\x29\x29\x1d\xc0\xef\x57\x16\x28\x59\x09\x90\xd6\xcd\x79\x42\xf8\xd7\xe1\xfa\x8f\xfb\x87\xf8\x4f\xfd\x07\xe4\x0c\x19\x48\x1e\x3f\xbb\x96\x1a\x20\xa5\x7d\x88\xda\x37\x58\x3d\x63\x45\xa3\x2a\x92\x10\x7a\x8d\x81\xa2\x6d\xd0\x72\x50\xc8\xdc\x35\x24\xb8\x50\x6f\xfc\xa6\xd8\xe4\x0e\xfd\x52\xe0\x10\x5f\x74\x12\x95\xb4\x09\x9d\x46\xe5\x69\x9f\x3a\x5d\xab\xba\x1e\x43\xf9\x78\xc4\xce\xad\x4a\x8c\xc8\x29\x56\xdb\xf1\x20\x64\xb8\xc1\x41\xac\xf9\xc7\xf6\xa3\xe6\xbd\xfb\xcf\xf8\x52\x1c\x4d\xc9\xe0\xee\x3c\x12\x6e\x27\x6b\x56\x00\x06\xb3\x6f\xe8\x20\xe8\xf7\x94\x7e\x00\x43\x7d\x0a\x1c\x53\x11\x61\xfb\x03\x1d\xb1\x47\x6f\x33\x4f\x8f\x24\xd5\x40\x3a\x4a\x8a\x53\x93\x68\x35\x3a\x39\xc0\x51\x9f\xf3\x3e\xd3\x9b\x8f\x3b\x75\xbd\xe2\x33\xbd\xe8\xc4\x00\x4a\x96\x98\x58\xb7\xdc\x67\x52\x3b\x44\xcd\x2a\x5a\x76\x63\x69\xc2\xf4\x08\x46\xd6\x22\x12\xd5\xfe\x77\xb7\xbf\xa0\x61\xc5\xe4\x83\x61\x78\xe4\x5c\xda\x78\xbb\xa7\xc2\x4d\x36\xa6\xd3\x6a\x41\xc4\x2f\x9f\x75\x03\xed\x2a\x48\x57\x8b\x88\x6c\x69\x95\xfe\xcb\x99\x16\x6b\xb7\x78\xe5\x0b\x1b\xf4\x33\x49\xf6\xff\x14\x51\xd1\x3a\x18\xde\x8c\x80\x44\xb3\x7b\x19\xcc\x25\xcc\x4c\xbf\x05\xe4\x85\xf2\x0f\x25\x6b\x63\xbe\x1e\xfd\x51\x9c\xe4\x03\xef\x6f\xdd\x28\xc3\x24\x2a\x45\x8a\x33\x92\x1c\x1a\xe9\x56\x60\x3a\x30\x1c\x0d\xa7\x8c\x53\x86\x27\x37\x8d\xcf\xdc\x3c\xa3\x11\x3c\xc5\x71\xb2\xca\x79\x24\xbf\x6b\x13\x33\x0b\xd1\x05\x8d\xfb\xed\xfd\x0c\x6a\x56\xf0\xbd\x9e\xb4\xff\x00\x31\xeb\xff\x6e\xf1\xb2\x5a\xfe\xbf\xa0\x6d\x4d\x3a\xe3\x28\xe7\x90\x2a\x0a\x15\x98\xec\x4d\x48\xcd\x58\xf8\xe4\x37\x17\xc3\xaf\xe2\x36\x42\xa8\x47\xf3\xaf\x5c\xb2\x7e\x0c\xa9\x5c\x1b\xeb\xee\x56\xfc\xb9\xa3\x72\xf0\x9b\x51\x94\xd6\x63\x40\x73\x6d\x4d\x99\x5c\x8d\xf9\x06\x2d\x7f\xea\x26\x60\x9d\x6a\x4d\x06\xb6\x7c\xeb\x14\x5d\xfc\xbe\x85\xd2\x8b\x5b\xe4\xd2\xb8\xc4\xc7\x2b\x95\xb0\x74\x76\x40\x73\xae\xa1\xd6\x8a\x49\x16\x62\x6c\x2d\x3f\x8b\x00\x6f\x59\xd6\xdc\x7a\x73\x09\x01\xf2\xc0\xd2\xad\x2b\x4a\x4f\x12\xd8\xa8\xdd\x09\xcb\x15\x29\x37\x45\x26\xa4\x95\x73\xa6\xf5\xca\x7e\x71\xc5\x2e\xb6\xd8\x3e\x29\xdc\xd8\xd2\x56\x78\xff\x53\x24\xf0\xb7\xa4\x08\x4a\x4b\x7f\x3a\x5e\x39\x35\x6e\x31\xb8\x69\x4a\xd5\xe0\x10\x3e\x2e\x61\xec\x02\x41\x46\xe6\x44\xfd\xdf\x1d\x9e\xd6\x44\x4f\x47\x7c\xd5\x60\xd6\xe2\x1b\x1d\x94\xf2\x38\xee\x06\xc3\x7a\x86\xae\x0e\x74\xfa\x1f\x8c\x22\x19\x0e\x96\xd6\xad\xfc\xb9\xe3\xe6\xec\xb7\x7b\xbc\x90\x51\x86\xcc\x31\xfa\xbe\xb4\x30\xaa\x38\x1a\xf3\xe3\xe1\xce\xe5\x7b\x35\x26\x00\x1e\x60\xc0\x2d\x04\x7a\x1e\x07\xb4\xb4\x88\xbc\xcb\x2e\x20\xf3\x92\xda\xe4\xfe\x72\x70\xbe\xcc\x00\xa6\xaa\x1d\x6d\x69\x15\x23\x6d\xad\x6f\x6e\xbd\xea\xae\x92\x79\x76\x06\x62\xa9\x23\xaa\x9f\xe6\x9c\x34\xd2\xd5\x26\x1b\x4c\xfa\x45\x55\xb7\x6f\x82\x27\x0c\x4f\xb6\xd6\xe0\x79\xad\xca\x27\x3c\xec\xc5\x67\x8f\xf7\x28\xa1\x20\xe1\x88\xa5\x8c\x58\x22\x85\xfe\x70\x3d\x3e\x07\xe8\x71\x71\xe2\x9a\x5c\xb4\xba\x05\x5f\x6d\xe4\x54\xfa\x51\x40\xbd\xf1\xcb\x98\xd9\xfb\x75\x46\xd9\x37\x1a\x3b\x5b\x62\xe2\x0e\x1a\xf5\x60\x6e\xc1\x3c\xfc\xd2\xca\x9f\x67\x4b\x8e\x74\x23\x43\x63\x89\x88\x1d\x98\xeb\x4c\x04\x39\xb2\x24\x18\x8f\xac\x5c\x45\x78\xc8\x5c\xe1\x26\x13\xee\x3a\xb9\xfc\xaf\xca\x4c\x46\xde\x35\x09\x5d\x6b\x92\xd0\x52\xeb\x31\xb3\x23\x47\x78\xea\x48\xaa\x2b\x0d\x90\xf4\xf5\xce\xab\x68\x64\x9c\x1b\x3a\xac\x4a\xe0\x7f\xae\x59\x86\x31\x2d\x24\xdc\x0e\xe8\x94\xf0\xf0\x76\x74\xef\xfe\x1d\x89\x63\xc5\x03\x39\x40\x83\x7a\xa5\xd6\xb3\xac\x8c\x1c\x00\xb1\xd8\xd4\xa8\x1d\x98\x8b\x8f\x3b\x96\x1e\x0f\x89\x77\x1e\xc9\xbd\x3d\xb8\xf1\x14\xd7\xe8\xce\x60\x4e\xd4\x7a\x69\x8a\x96\x86\x7b\x00\x67\xdc\xcd\xd0\x63\xb5\xca\xd6\x3f\x6a\x1b\xe7\x2b\xbf\xc9\xc9\x64\x7c\xd0\xb5\x49\x13\x15\x1f\x80\xdf\xe3\xaa\x2e\x41\x0d\xcb\xcb\x2a\xa7\x24\xe4\xb1\xfa\x17\x54\x97\x7b\xe3\xab\x54\x17\x0e\xec\x2a\x87\xae\x14\x6b\x5f\x93\xde\x3d\x8d\x61\xac\x7d\x65\xe7\x0a\x26\x33\xe4\x9c\x49\x8a\xbe\x52\xea\x62\xff\xcc\x3e\xf6\xdc\x3f\x3f\x17\x98\xdb\x53\xa7\x48\x30\xbb\xa1\x82\xc6\x3b\xb8\x83\xbb\xe0\x95\xe4\x4e\x0f\x1c\x78\xd3\x90\xa0\x66\xb7\x62\x09\x12\xca\x5e\xaf\x3c\xd4\x70\xa3\x79\xf1\x0b\xdc\x35\x0a\xda\x31\xa6\xde\x65\x5a\xa3\xf1\xef\x4e\xa2\x2d\xc2\x1f\x7e\x45\x45\x33\x50\x7b\xb9\xfc\xde\xed\x2e\x13\xff\x21\x9a\x8b\x52\x3c\xf7\x12\xbb\x23\x1f\xc3\x9e\xd4\xc6\x3b\x6f\x8b\x31\x4e\x44\xab\x16\xdb\x8c\x8b\x8e\x30\x42\x90\x9b\xac\xb5\x7c\x3f\x75\x2d\x99\xcc\x35\x7c\x19\xfc\x4f\x59\xd8\x09\xdf\x11\x3f\xf3\x52\x4a\xeb\x7b\xe6\x7c\xe2\x9e\xbe\x89\x31\xb0\x1d\x34\xf2\x2a\x40\x55\x03\xbb\xe5\xce\xd1\x27\x49\x8e\xd2\x49\xbb\xd5\x3b\x07\xff\xb9\xa4\xaa\xd2\x2f\x38\x41\xf9\xd4\x89\x30\x1f\x24\x9d\xd3\x17\x7a\xbb\xa5\x1e\x6f\x4f\x9b\x07\xc2\x63\x17\x45\xd5\xe1\x7f\x64\xce\x91\x5b\x74\x43\x9b\x41\x39\x2e\xdb\x6d\xa3\xc5\x50\x68\xdb\xee\xac\x24\x61\xeb\xfe\x90\x96\x73\x87\x44\xec\xd6\x6a\xef\x62\x80\x58\x47\xda\x9a\x93\x23\xfd\xfe\x6d\x35\xf8\x63\xb5\x49\x80\x1d\x68\x97\xb9\x4f\x70\x21\xb7\x8a\xf8\x08\x6f\xb4\x61\x71\xbf\xea\x7e\x45\x29\x3e\x9b\xa3\x49\xfc\x39\xcc\xe2\x0b\x16\xfd\x97\x3f\x09\xea\x77\xf4\x0e\xa9\x72\x3f\xf1\xfa\x99\x20\x91\x5e\xba\xd5\x85\x04\xac\xda\x15\x95\xe1\xbe\x33\x88\x91\xf7\x4c\xaf\x38\xb8\x8e\x14\x9c\xac\xf2\x01\xbb\xd3\xd1\xf5\x95\xe1\x4a\x39\xe5\xd0\xd3\xb8\xa8\x21\x31\xd2\x1f\x32\xcb\xee\x65\x53\x56\xd1\x11\x83\xc4\x54\x22\xee\xbd\xe1\x6d\x94\xe1\x50\x11\xc9\x6f\xf6\xde\xbb\x95\x89\xe3\x49\x6b\xb2\xff\xb5\x38\xca\xae\x36\x29\xb8\xe8\xd1\x41\xb6\xdf\x27\x53\x7d\x41\x1a\x40\x1b\x66\xcd\xff\x35\xef\x69\x0b\x37\x56\xd6\xc8\x16\x62\x35\x9b\x38\x94\x59\xe3\x77\x03\x36\x40\xf3\x56\xd6\xda\xa6\x1e\xd7\xde\xf2\x42\x67\x21\xab\x89\xbf\xda\x31\x18\x7b\x57\x09\xb7\x9e\xff\x83\x42\x39\x89\x76\x90\x2a\x15\xcd\x0e\x36\x4e\x7c\x60\x48\x50\x10\x27\x0f\x6c\x12\xe5\x32\xb8\x61\xcf\x37\xd0\x30\x88\x85\x67\x1d\x82\x60\xd9\xc1\xc9\x9a\x07\x78\xde\x8d\xcc\x3a\x34\x05\x4d\xff\x9e\xea\x5b\x96\x3a\x4e\xf3\x2a\x41\xc4\xbb\x39\xf7\x05\x72\x9f\x0c\xe5\x9d\xdb\xb7\x04\x17\xd0\x3a\x18\x4b\x40\x3b\x85\xc8\x8c\x20\xf8\x51\xf6\xd3\x1d\x7c\x94\x05\xb2\xa5\xef\xf7\x1e\x45\x68\xfb\x7e\x6b\x87\xf7\xff\xc7\x9f\x83\xf7\x07\xd5\x62\xb7\xf4\x91\xee\xc9\x96\xfe\x37\x92\xb4\xf6\x62\xd2\xef\x96\xee\xaf\x68\x6c\x37\x02\xf4\x24\x9e\x5d\xf7\x8f\x10\xb0\xc1\x4e\x5c\x1f\x92\x41\x91\xf6\xae\xf3\x0e\xa2\xb1\x2f\x88\x7c\x27\x9b\xf1\xcd\x75\xba\x08\x66\x0f\x16\xd3\x57\x44\xcc\xcc\x14\x0a\x8d\x40\xec\x09\x10\x89\x4b\x38\x93\x76\x9b\xdc\xfd\x4e\xa4\x3f\x51\x70\x53\x7b\xd6\xee\x16\xc4\x46\x02\x8c\x40\x06\xfc\x21\x55\xac\xad\xac\x7f\x95\x48\x64\x4d\xfb\xfb\xb7\x77\x70\x76\x93\xd3\x68\x79\x16\xfa\x36\x80\x2e\xd8\x0d\x87\x3c\xb3\x77\x2f\x22\x0a\x06\x37\xd2\x5e\xc1\xec\x12\x83\x1b\x32\x29\x74\xf4\x25\x75\xb6\xa0\xee\x51\x39\x83\xfe\xc8\x5c\x13\x2b\xd7\xb9\x54\x19\x0b\xe9\x37\x4e\x7a\x23\x51\x7b\xf2\xa4\xe3\x12\x5e\x4c\xa3\x54\x76\xc1\x07\x6c\xe1\x00\xf3\xa1\x01\xe9\x0a\x65\xe2\xec\x20\xc4\x61\xdd\x31\x88\xe5\x73\xde\xc2\x85\x8a\x19\x3a\xde\x13\xb7\x6e\xce\x79\x28\x58\x2a\x86\x9d\x23\x6b\xe5\x09\x58\x55\xa0\x74\xe1\x2f\xed\xbf\x67\x11\xf7\xe6\x98\xed\xb1\xae\xa1\x4f\xe6\xad\xb0\x7e\x63\x3f\xc8\x60\x30\xef\x64\x24\x54\x42\xd3\x1e\xac\x3b\xb9\xa7\xac\x85\x1e\xa3\x78\xb5\xc9\xa9\xeb\x89\x04\x10\xf4\xe1\xaa\xac\x79\x17\xa2\x70\xc8\x81\x55\x10\x27\x9a\xd7\xfc\x1d\xc5\x64\xcc\x2f\x3b\x10\x25\x22\xf0\x4d\x78\x07\x1a\x73\x80\xf9\x26\x0a\x65\xd7\x35\xcb\x16\x6a\xcd\xdb\x21\x53\x20\x0f\xee\x40\x65\x91\x9b\x5a\x4d\xaa\x7f\x94\xfd\x1d\x66\x48\x20\x4a\xf3\x2e\xd3\xdd\x77\x95\xf6\xcb\x9a\xde\x68\x8b\xc9\x64\xd3\x5d\x4f\x9e\x67\xe2\x34\xbb\x24\x9a\xdd\xd9\xc9\x72\xe7\xfc\x16\x3b\xad\x61\x20\x99\x09\x19\xf5\x5c\x93\x68\x77\x7b\xbd\xbb\xf3\x0f\x83\x13\x78\xbd\x54\x49\x3b\xa1\x8e\x9b\x87\x70\xe2\x9a\x82\x39\xa7\xc1\x90\x73\xe3\xe4\xca\x94\x1e\x1a\x3e\x58\x6b\x73\xe7\x53\x2b\x47\x5d\xfe\x10\xa6\x41\x43\x55\xc9\x61\xf5\xf2\x4e\x27\x7a\x22\xa2\xa7\x51\x28\x97\x04\xb0\x9b\xbe\x37\x77\xa4\x24\x2c\x36\x6e\xd2\xbf\x1a\xe9\xa0\xec\x85\x2e\xad\x2a\x45\xc1\x50\x16\x31\xad\x91\xda\x60\xe5\x33\x18\x64\x8d\xb2\x7c\xce\xa6\x8b\xe3\xe4\x50\x5c\x36\xf8\xe0\x0e\xe9\xa4\x94\xe6\xc2\x19\xa9\x85\xce\xb5\x4b\xbd\x3f\x9c\x8d\x59\x34\x75\xb0\x54\xb9\x05\x21\x5a\x4e\x02\x9d\xa6\x1c\x00\x06\x4a\x4b\x2f\x9e\xeb\x1f\x3e\x44\xd6\x5f\xe5\x86\x87\x82\xaa\x4b\xdd\x79\x8c\xed\xa7\xf3\xb1\x04\x9b\x7e\xa9\x3d\x99\x66\xa9\xf9\x08\xfa\x5b\xcd\x94\xad\x05\x5f\x8f\xae\x47\xca\x22\x5b\x76\x9f\xb3\xe3\xaa\x94\x29\x6a\xb3\xac\xa4\x47\x80\xb7\x07\x09\x56\xde\x95\xb5\x6c\x71\x05\xa2\x4c\x5a\x53\xad\x0e\xe6\xb2\xa6\x03\x1a\x40\xff\x39\x3b\x2f\x7c\x86\xec\xd3\xe5\x1a\xce\x5f\x38\xeb\x8e\xbc\xb6\xa2\x10\x0c\x0c\x86\x58\xe8\xcc\x25\x38\x72\x98\x1a\xf6\x46\x00\x0d\x1b\x77\x12\x8b\xc3\x59\x76\x3c\x30\x24\xe9\xe9\x1d\x64\x8d\xf2\xcf\x52\x08\x46\x08\xfc\xdd\xc0\xe2\xb8\x00\x4f\x02\x7d\xd3\x6d\x17\x89\x2e\x6f\xba\x68\xbc\x0b\xdc\x6a\xdd\xe7\xfa\xd0\xca\x1f\x4b\x69\x89\xe4\x09\x00\xb2\xc6\x96\xc8\x67\x05\x9f\x0f\xb3\x00\x93\xe3\xbc\xf3\x38\xf2\x54\x36\xf0\xf3\x32\xca\xda\x13\xe2\x78\x7a\xad\x38\x2f\xbf\xd8\x28\x5c\x9d\xee\x25\x8c\xd7\x01\xc8\x2f\x74\x3a\xa0\x58\x96\x4c\x81\xb5\x0a\xfd\x2d\x66\xdc\x92\x0e\xd0\x8d\xe3\xbd\x01\x21\x4e\x44\xc0\x4e\x9a\x00\x29\x70\x12\xcd\x6f\x09\xba\x9f\x5f\x90\xb5\x0c\xa3\x78\x86\xe3\xd7\xdb\x28\xf9\x22\x35\x06\xb0\xfe\x94\xc5\x40\xa1\x34\xaf\xfa\xff\x40\x3a\x61\x28\x03\xee\x48\x8c\xb3\xfd\x95\x9d\x3d\xdf\x3f\x68\x54\xb8\xc9\x52\x50\x02\x7d\xd2\x37\x4c\x8e\x1a\xc9\xd8\x7f\x79\x6f\xfd\xff\x3b\x05\xa6\xff\xa8\x90\xfa\x97\x3e\xcd\x3f\x7b\x03\xa4\x00\x4a\x95\xf8\x2f\x59\xb3\x7d\x39\x60\xec\x08\xfe\x48\x99\x2b\x30\x2d\xe3\x8d\xeb\x9f\x8f\xcd\xf8\x5f\x4d\xf2\x94\xef\xbd\x39\x31\xd0\x3b\xd0\x00\xfb\xf7\xa6\x74\x4e\xb0\x49\x0b\x15\xf4\x40\x7d\x94\xa0\x6c\xf1\x19\xf2\x2c\x94\x64\xbf\x3c\xdf\x4a\x35\x46\x54\x3a\x31\x4c\x30\xf7\xb4\x41\xa0\xe1\xa6\x73\xce\x7d\x84\x4b\x11\xa6\xc4\xfc\xcd\x9a\xef\xf5\xec\x1f\xa9\xf3\xfd\x64\xec\x83\xc0\xa3\x48\x56\x06\xf9\x38\x66\xaa\x4b\xcc\x2d\x7b\x5c\x6e\xae\xc1\xa8\x9e\xeb\x9b\x86\x48\xd6\xb3\xa9\x6b\xf0\xf8\xee\x73\x0a\x13\x25\x6f\x94\xe7\x2a\x57\x5a\xfb\x76\x64\xa4\xc4\x9f\x08\x63\xba\x6e\x43\x84\x49\x9d\x88\x41\x38\x45\x90\xc9\xc8\xa4\x08\x0e\x32\x12\xb5\x1e\x76\xb8\x34\xcf\xf5\x69\x35\x1f\x0f\xf3\xec\xd7\x43\xf2\x59\x9d\x9e\x4b\x77\x55\xea\xc9\x8f\xf3\x9f\x4c\x3a\x67\x18\xf3\xf1\x20\x6e\xa5\xdc\x3d\x94\x20\x4e\x16\x5c\x41\x5e\x61\x59\x83\x85\xb8\x0e\xe4\x05\x0e\x91\x93\x32\xe7\x8f\x47\xd2\xe7\xc8\xe5\x35\x10\x64\xbc\xeb\xf6\xa2\x33\x17\x61\xca\xe9\x6c\x4a\xcc\xc9\x4e\x72\x3b\x52\xa5\x0b\x90\xc4\x57\xf4\x6b\xa1\x7e\x35\xf1\xa8\xd5\x72\x75\x98\x73\x18\x32\x5d\x61\xa4\x67\xb8\xc4\x4a\x1f\x66\xce\x98\x1c\x22\xe7\x70\xdd\x82\xc5\x69\x57\xd8\xfd\x9e\x70\x01\x6d\x09\xa2\xa1\x32\x44\x66\x06\x6f\x66\x7b\x63\x99\x54\x03\xf5\xbc\xf4\xcf\x59\xc2\x63\x65\x4e\x1f\xb6\x54\x06\x94\x1e\x39\xff\x74\x80\x90\xd9\xb3\x26\x3d\x7f\xab\x1d\x74\xab\x12\xd4\xf1\xb4\xad\x8d\x71\xc8\xcf\x6b\x74\xa5\x4a\x3a\x43\x8a\x0e\x0c\xb8\x85\x2c\x31\xdc\x21\x53\x71\x57\x30\x16\x33\x10\xf1\x9b\xd2\xd6\xc8\x1f\xdc\x82\xa9\xb8\xe5\x60\x3b\xba\xcf\x8a\x23\xab\xd6\xc9\x67\x7c\x8a\x0d\xf8\xd4\x26\x0c\x39\x5a\x29\xa3\x29\x0c\xfa\xe2\x72\x13\x63\xda\x10\xf5\xda\xbd\x2c\x2d\x28\x84\x12\xb1\x7e\xde\xcf\x67\xe6\x5c\xa9\x8d\x9f\xa0\x2e\x48\xbb\x25\x60\xab\x81\xab\x30\x7d\xe8\x9f\xf7\xc4\xa8\xc8\xf3\xee\x46\xd0\xcf\x3b\x4c\x8f\xda\x4d\x52\x9f\x63\xf1\xc8\x5a\xe9\xe4\xbd\x2c\xcf\xa7\xcc\x2c\x99\xa7\xe7\xd3\x8a\xb7\xa5\x2f\xe0\xd9\x72\x90\x05\x39\xb2\x71\x6e\xa0\x58\x88\xcd\x95\x46\x88\xc0\x35\xda\x04\x75\x78\xcc\x03\x13\xcc\x41\x21\xec\x8e\x92\xc0\x22\xa8\xc1\xf2\xfb\x43\x5e\xe0\x1d\x22\x06\x18\x57\x60\x0a\x88\x66\x54\xb5\xc5\x5d\x83\xc8\xa5\xc0\xb4\xec\x9a\xe2\x7b\xba\xd4\x33\x57\xc3\x89\x8a\x26\x4a\xee\x9a\x21\xee\x62\x45\xc4\x84\x4c\x42\x6f\x90\x9f\x35\x1e\x08\x48\x84\x97\x1d\x75\x16\x3e\x15\x61\x00\xe6\xe5\xc9\x48\xe3\x59\xe5\x82\x95\x7f\x2e\x56\xbc\xb8\x1c\xd7\x36\x65\xfe\x0a\x6e\x2f\xf1\x8f\xe7\xca\x00\x97\x95\xbf\x1f\xdf\x27\x88\x9f\xab\x7c\xb7\xec\xb9\x4e\x2c\xd5\x87\x78\x16\xdd\xbd\xd2\x2b\xac\x67\x97\x92\x7e\x96\x20\x2c\xe6\xb3\xfb\x3c\x63\xe6\x80\x57\xa2\x7b\x58\x06\x51\x85\x9e\x05\xfa\x8d\x94\x54\x72\x36\x63\xb2\xee\x16\x04\x79\xb9\x4c\xb9\xe2\x67\x37\x06\x06\x4a\x9a\x3a\x7f\x66\xc5\x8e\xec\x84\x90\x49\x22\xf2\x20\x9b\x95\xb2\xcd\x33\x94\x98\x5f\xfd\x8c\x3a\xf4\x89\x29\xc4\xec\x99\xb9\xc8\x03\x4d\x17\x0d\xba\x62\xc2\xe3\x19\x96\xf2\x4f\x70\xad\xbc\x84\x95\x20\x0f\x21\x36\xd3\x13\xb1\xdb\xff\xc8\x69\xdc\x45\x6a\x3c\xc9\x4d\xbc\xde\x9c\xb8\x7f\xa6\xab\x5b\x6d\x49\xb7\x05\xe2\x95\xe2\x34\xb6\x46\x59\xb9\x27\x2c\xd9\x58\x96\xa0\x3d\x2a\xa4\xf5\x84\x6a\x03\x5f\x32\xfb\xb0\x99\x3d\xe5\xc8\x4b\x90\xc9\x49\xd6\x4c\x13\x87\x0e\x06\x2a\xd3\x6a\x1e\xb7\x52\x30\x1e\x91\xc5\x85\x29\x0b\xd6\xd2\x8f\x7b\xb9\x49\x4b\xfc\xb8\x4b\x18\x26\x91\xc3\x02\xcb\xbd\xb9\x4a\x47\x01\xc2\x3f\xaa\x61\x74\x27\x7d\x13\x5b\x90\x25\x16\xd2\xd7\x68\xbb\xd5\x92\xa7\x71\x3f\x7a\x1e\xd8\x83\xef\xee\xa3\x8a\xca\xd8\x94\x54\x0d\xa6\xa3\x7f\x08\x0f\xa7\xae\xd0\x91\xab\xca\x52\xca\xc0\xb3\x93\x1f\x45\xc9\xac\xa1\x7c\xf3\xa8\x24\xae\xf9\xf5\xac\xf2\xfc\x7b\x90\x57\xc2\x53\xda\xe3\x0c\x50\x68\xc8\x75\xf1\x15\x70\x7c\x86\x71\x3f\x2c\xbd\xc4\x72\xdc\x95\xda\xf7\x88\x2c\x4b\xe2\x7e\x4a\x3e\xe0\x87\xdb\x2e\x28\x8b\xc6\xc9\x8c\xe1\xd8\x45\x9f\xa7\x07\xfa\xbb\xe7\xe9\xb7\x3f\xaf\xf2\x2f\x33\x40\xfe\xa2\x8d\x19\x75\x60\xf0\x40\x58\xf2\x51\xe1\x4d\xe0\xed\x4b\x1c\xb9\x3c\x5c\x32\x33\x85\x91\x17\x7f\x61\x8f\x23\x88\xd9\xc8\x5d\x0b\x18\x7d\x8a\x96\x3d\x42\xa2\xde\xc0\x93\x49\x19\x8f\x34\x8a\x06\x23\xd1\x8f\xfc\x1e\x48\x42\xc8\x2f\xb9\x7b\xbd\x34\x9b\x9e\x31\x7d\x2c\x6e\xe4\x3b\x18\x91\xa2\x3a\x52\x91\x2c\xbb\xe0\x6f\x78\x82\xdd\x57\x11\xea\x04\x45\x98\xaf\x2a\x2e\x04\x9f\x8a\xae\x4d\xd9\x7b\xc0\x99\xc9\xac\x3e\xd2\x9e\x83\x21\x39\x50\xa1\x3f\x6b\x06\x3f\xff\xf4\x09\x5c\xf5\x57\xdf\x92\x14\x80\x49\x54\x04\xe1\x57\x9e\x64\xe0\x82\xd2\x2a\xec\xb7\x1b\x26\x5d\x91\x2a\xff\xe5\xc2\x48\xd1\xf1\xa0\xb3\x83\xf6\x26\x60\x7d\x24\x19\x10\xe7\x99\x99\x52\x19\x2f\x97\x7c\xca\x9e\xe1\x2f\x66\xdf\x50\x18\x8e\x85\x20\x6d\x8e\x09\x82\xcc\xe1\x51\xfe\x71\xa3\x9d\x59\x29\xda\xdb\xbb\xc4\x93\x98\xde\x6c\x6e\x27\xd2\xd0\x4e\xc1\x24\xa1\xe9\xe9\xd6\x10\x68\x92\xb1\xca\x74\x33\x3a\x39\x59\x5b\xba\xf0\x67\x4f\xe8\x97\xd2\x72\xcc\x7e\x95\xb8\xe5\xd6\xdf\xa7\x81\x3c\x40\xf4\x16\x0d\x8b\xf4\x67\x2f\xd4\xb3\x92\x45\xf9\x26\x6b\x4f\xce\x4f\x53\xfd\x91\xf3\xea\x3d\x91\x7a\x89\x04\x7c\x17\xc4\x81\x85\x19\x8b\x33\xee\x10\xb8\x9a\xac\xb9\xa7\xda\x19\x97\x7e\xfd\x8e\x10\x4d\x5e\x02\x3d\xbd\x35\x9b\xa2\x67\x3c\xc9\x99\xab\x0e\x54\xbf\x9f\x8a\xc4\x3b\xa6\x88\x7f\x4d\x45\x29\x78\x53\x81\xf2\x27\x57\xc5\xbc\xe1\x64\xf5\x1c\x7f\x77\x59\xc5\xef\x5c\x11\x95\xd0\x6f\x2d\x41\xf5\x5c\x74\xf0\xe7\x33\x40\x06\xd1\x88\xb4\xc6\xd2\x3d\xd5\xf4\xd9\x41\x0f\x8a\x7d\x86\xa9\x88\xf8\xb0\x65\x2e\x26\xb1\xb2\x60\xb6\x46\xea\xa8\x7d\x2d\x22\x51\x5a\x7b\xa3\xa8\x97\xfd\xb6\x3e\x7f\x80\x89\x74\x9f\xc6\x10\x1d\xb2\x91\xa0\x79\x24\x37\xf5\x07\x10\x0b\x14\x2e\xd3\xba\x29\x39\x42\x65\x40\x34\x79\x84\x95\x85\xec\x95\xad\x46\xa5\x0b\xc0\x7f\xe2\x74\x66\x81\xb3\x95\xaa\x0d\xd0\xeb\xe1\x6e\x1e\x28\x12\x52\x18\x6b\xca\x7f\x56\x49\xfa\x1d\xca\x9d\xf9\x9a\x9f\xc5\xcd\x21\x85\x27\x21\x18\x2f\x8a\xd3\xf9\x90\x5b\xdb\x08\x83\x34\xf2\x50\x9d\xab\xa2\x59\x11\x30\x3d\xb3\x12\x04\x27\x5a\xdd\x4e\xe9\xfe\xfa\x84\x86\xf3\x3d\x7f\x43\x16\x79\xab\x4f\x89\x6e\x73\x36\x54\xe6\x74\xc3\x64\x37\xf7\xae\x7c\xec\x21\xcb\x36\xb9\xe7\x34\xef\x06\x3c\x47\x78\xdc\x95\xcf\x83\xb2\x16\x53\x1a\xe5\x99\x46\x97\xba\xf1\x26\xe1\x11\x53\x78\x74\xad\xe2\xcc\x50\xd0\x28\xef\xd1\x19\x0d\xce\x5c\xbb\x07\x9f\xe3\x86\x18\xdc\xc6\x72\x22\x85\xb3\xbe\xbb\x8b\x1a\xb3\xe6\x47\xd7\x02\x26\x71\x10\x73\xb9\x31\x30\x1b\x9b\xe4\xff\x47\x06\x22\x46\xf5\x57\x41\x09\xbd\xc2\xce\x0a\xf7\x45\x4d\xb2\x61\xcd\x08\x8c\xd6\x4c\x66\x71\xef\x5a\xb8\x98\x41\xac\x96\x36\x25\x63\x45\x54\xa2\x33\x3d\xaf\x8b\x9c\x54\xa8\xfa\x91\x15\x2a\xe9\x38\xb8\xb1\x05\x3a\xe4\xe2\x55\x8f\x24\xaa\x27\xdf\x4e\x2b\x27\xd1\x3b\x2f\x8a\x5f\xc1\x91\x4d\xac\xd1\xdf\x4c\x27\x30\x7b\x02\x86\x7a\x5d\x05\x4e\x68\x13\x13\xf4\xd2\x76\xd0\x33\x13\x63\xb2\xae\xa1\x37\x62\xf0\xa1\xab\x05\x67\xa7\xf1\x56\xba\x14\x15\x79\xab\xf1\xc3\xd7\x5f\x87\x19\x73\x16\x27\x4b\x0d\x85\xc7\xb5\x7c\x5d\x61\x0c\xba\x84\xec\xe0\x8f\xab\xeb\x0d\x76\x58\x3b\xbe\xcf\x20\x8d\x26\xf2\xf1\xe0\xeb\x26\x3b\x3d\x28\x4d\x86\x59\x5b\x01\xab\x33\xc6\x19\xf6\xfc\x49\xf3\x1e\x3b\xb5\x65\x2c\x1b\x88\xb4\x7e\x43\x4a\x0a\xa3\x02\x91\x48\x49\x3c\xe2\x42\x30\x5b\x67\x0c\xe9\x48\x34\x2d\x5f\x86\x81\x91\x54\xb6\x97\x91\xdb\x96\x4e\x77\xb2\x9f\xff\x92\xd3\x04\x60\xa7\x99\xca\xfe\x0f\xe3\x57\x92\x50\x23\x3b\x70\x74\xb7\x5d\x82\x8d\xc5\x5a\x9e\xf5\x0f\x30\xaf\x39\xb8\x39\x63\xfa\xc8\x35\x2d\xc8\xa3\xaf\x14\x49\x0d\x23\xbe\x34\xf9\xda\xa5\x6b\x13\x6f\x38\x79\xce\x32\xf7\x95\xea\xe6\x98\xda\xdd\xd9\xa5\x37\x74\x0b\x09\xe1\x34\x21\xf4\x23\x9c\xb6\x7b\x10\x5d\xb5\x31\xad\xc9\x50\xa5\xaf\xcb\xe8\x59\x0d\xfa\x02\x12\x74\xbf\xb5\xe5\x8f\xbb\x3a\x24\xd1\x5e\x8f\x10\x5e\x1a\xa9\x4f\xf2\x9f\x98\xa9\x41\x26\xdd\x82\x54\xed\x39\x6a\x55\x3f\x68\x2b\x3a\xd0\x96\xff\x04\x92\x72\x63\xc6\x54\x4e\xd9\xd2\xd9\x23\x3e\x0e\xfe\x7b\x18\xa0\x59\x66\x9f\xe0\xc2\xc5\xe4\x11\x08\xe7\xad\x92\x1e\x26\x57\x9e\xe1\xfe\x9f\xa2\xca\xc3\x1d\x72\x45\xfe\x02\x04\x0b\x18\x6f\xde\x99\xb5\x7a\xef\x02\x13\xe0\xa3\xfe\xbb\xf0\x24\xcb\xb4\x5c\x8d\x1e\xe0\xc3\x9d\x49\x6d\x83\x28\x48\x27\xa3\x26\x71\x51\x17\xfd\x72\x50\x51\x87\x12\x3c\xcb\x7b\x14\x67\xf4\x3e\x3b\x3b\x75\x3f\xaf\x4e\x46\x2d\x9a\x06\x76\x87\xca\x7a\x93\x3b\x5a\xeb\x26\x16\xa9\x12\xd1\x00\x26\x1e\xa7\x8e\xdd\x48\x2e\xaa\x7a\xa0\xa0\x2d\xe2\xbd\x0c\x9a\x6a\xfe\xb2\x33\x6b\xe8\x0c\xcf\xb8\x1f\x27\xd0\x30\xc5\xe6\x86\x16\x33\xab\x30\x90\xd3\x61\x32\xb7\x8f\x38\x08\x4c\xe4\xee\xe0\xbc\x42\x40\x9e\x04\x72\x60\x5d\x60\x8a\x90\xb3\xc1\x60\x29\x3a\x31\x41\x80\xd3\x14\xe2\xc6\x0e\xe2\x7d\xca\xde\x7e\xa8\xff\x0b\x8a\xeb\x9b\x41\xe1\xa1\x86\x21\x1a\xc8\xdd\x62\x44\xd6\x2e\xf0\x45\xfb\x39\x12\x61\x2f\x49\x27\x0c\xd2\x41\xb6\x92\xd9\x58\x00\xa1\xff\x09\xe9\x17\xde\x2b\xa8\x4f\x88\x6a\x6b\xd5\xd9\x47\x20\xac\xee\xd0\x35\xe1\xf3\xa8\x0a\x75\x0e\x35\xec\xf8\xea\x1a\x6e\x76\x85\x3d\x7f\x03\x6b\x5c\xe5\xbc\xeb\xe4\x73\xf8\xd9\x75\xbe\x72\xb0\xe3\x01\x82\x30\x49\xcf\x98\x61\xfd\x90\x3e\x5c\x5e\x52\x50\x03\xa1\x8e\x25\xc7\xba\x70\x8a\xbe\x44\xcb\x55\x70\x1f\x14\x58\x8d\x46\x86\x22\x1d\x78\x03\xb3\xec\x3d\xc1\x7d\x4d\xc1\xc6\xfd\x35\x52\xe4\xb8\x0a\x90\x9c\xc6\x0b\xce\x8a\x7d\x1b\x08\x2f\x38\x4d\xd0\x83\xd6\x5e\xc9\x7b\x6d\xfb\x15\x5b\x85\x81\x1b\xec\x85\xd4\xa0\x0e\x68\x8a\xdf\x04\x4f\x5a\xbf\x58\x3f\xe2\x23\xbe\xeb\x27\x9c\xed\x38\x4b\x35\x60\x16\x4e\xc7\xda\xe9\xf4\x97\xe5\xd2\x01\xfa\x6b\x8a\x04\x3a\x58\x83\x51\x90\xc2\x4d\xc2\xc8\x1c\xed\xa9\xd1\xc3\xef\x0b\xcc\x4f\x99\xaa\x19\xe2\xa9\x1f\xec\xd1\x0e\xb4\x00\x02\x83\x53\xb7\x60\x9a\xe2\x5a\x7e\x5f\x2b\xcc\x35\x4e\xf1\xbd\x81\x83\x9d\xab\xa1\xff\x90\x3e\x2b\x99\xb3\xb4\x02\x1e\xe4\xb2\x63\x65\x50\xb6\x53\xf4\xe7\x07\x6a\x04\xcb\x84\x0e\xb7\xb9\x3b\xea\x59\xe3\x1d\x37\x2b\xb1\x66\x1b\x10\x38\xd4\x85\x59\x45\x1d\x46\x79\x56\x4d\xaf\xb4\xab\x0b\xa2\x75\x6a\x32\xab\x83\x8e\x15\x59\xb0\xa8\xa3\xf8\x13\x13\x85\x74\x87\xb4\x9e\xac\xb5\x9c\x6c\xab\xb7\x03\x7e\xde\xa2\xf4\xae\x52\x35\xb0\x1a\x54\x84\xac\xc1\xdd\x3e\x89\x46\xd6\x3b\x70\x83\x61\x39\xfa\xc1\x92\x52\x96\x90\x78\x12\xb6\x6f\x9c\x56\x01\xc9\x5b\x8e\x77\x77\x37\xb9\x43\x2e\x84\xd3\x42\xaf\x51\x39\x16\x98\x62\x41\x61\x75\x76\x63\x52\x00\x1e\xe9\xca\xfe\x27\x6c\x7a\x78\x30\x1b\x13\x87\xdf\x80\x4f\x82\x0e\x91\x31\x8f\x9c\x99\x91\xf4\xdb\xb4\x7b\x1b\xe1\x4e\x78\x7a\xa3\xe0\x6b\x02\xa1\x39\xde\xfc\x84\x04\x43\xb2\x5d\x3d\x37\x47\x74\xdb\x5d\x1c\xd6\x9d\xaf\x2d\x14\x49\xe5\x9f\x37\x35\xfd\x67\x6a\x1c\xaa\x00\x65\xa9\x78\x28\x07\x0e\x71\x43\xfd\xcb\xda\xc7\x27\xad\xf4\x3e\x3e\x27\x02\xfa\x6e\x7d\x39\xc8\x2b\x55\x27\x3c\x34\xeb\xbb\x6d\x49\x77\xce\x3a\xf6\x34\xb8\x73\x0d\xc0\xda\x1e\x8e\xed\x3c\x8c\x8f\xdb\x80\xbf\xbc\xb4\x01\xd3\x04\xad\x3c\x3c\x81\x2a\x95\x5f\x7b\x95\x58\x8f\xa7\x42\x6b\xfa\x34\x33\xaa\x0c\xa2\xaf\xf7\x46\xd3\xb3\x69\x4a\x17\x6e\x57\x9a\x09\x48\xd9\x7d\x48\x82\x0b\xac\x18\x27\xba\xf4\x8a\x95\x25\xeb\x66\x7b\xda\x50\x1a\x17\xa6\xf2\xcb\x69\x3f\x0d\x13\xc6\xec\x9b\xeb\x17\xdb\x4b\xe7\x47\xe3\x2c\xd8\xb7\x36\xb5\xea\xdf\x48\x94\xfa\x76\x6d\xaa\x2f\x67\x3a\xbf\xe5\x78\xd0\xb0\x0f\xb3\x3c\xe4\x7c\x7e\x0b\xfd\xfa\x38\x34\xfb\xb2\x8f\x6c\xcf\x8e\x93\x64\xdf\x14\xdc\x5e\xa9\xe4\x7c\xed\x49\x36\xfe\x67\x24\xc1\xe6\x8b\x39\xea\xe8\xfc\xd9\x42\x84\x2d\x3f\xdf\xff\xbb\xad\xcd\xf2\xa7\xe5\x69\x50\x5f\xaa\x49\x7e\xa8\xf0\xf7\x29\x3d\x4c\xf1\x89\x7a\xed\xc3\x59\x00\xe7\x02\x53\x6a\xc6\xe0\x12\x8e\x7f\x5b\x22\x81\xe3\xa3\x58\xfa\x27\xc1\x44\x54\x3c\xe0\x0f\x93\x3f\xde\x6c\xb6\xde\x61\x77\xd7\xbe\xf9\x8c\x0b\x7a\x37\x1f\xbe\xbd\x5b\x67\xe1\xda\x10\xfa\x26\x63\x37\x06\xd3\x4e\x21\xe6\x07\x49\x36\xb1\x03\xe5\x3d\xc1\x1a\xb0\x5b\xe0\xd9\xbd\xf5\xfc\xc7\x67\x5f\xd4\x01\x56\x65\xe8\xbf\xa3\x87\xfe\x56\x8f\xe9\x5d\x82\xab\x5b\x9a\x12\xc2\xdf\x1e\xf4\xda\xb5\x83\x2d\x74\x04\x41\xaf\x22\x18\x5a\xc2\x6f\xa5\x88\xbd\xd1\xde\x64\x91\x8a\x83\x7c\x09\x16\xb2\xbe\x77\x30\x8d\xf5\x25\x02\xe6\xed\x94\x87\xe0\xba\xd4\x93\x3c\x66\xe8\x90\x2c\xdc\x76\x91\x47\x61\x90\xa8\x8b\xc8\x27\x00\x39\xd0\xef\xdd\x7c\x97\xf0\xf8\x7e\x43\xcd\x99\x74\x65\x64\x15\x74\xed\x7f\xb0\x92\x65\xc8\x0f\x56\xf2\x1e\x2b\x25\xaf\xfe\xee\xa4\x7e\x43\x13\x09\x54\x86\x12\x33\xb7\xdf\xca\xc9\x78\x5b\xeb\xd7\x6d\x0a\x7d\x4c\xef\xcf\x29\xff\x04\xf4\xc1\xf4\x6d\x0c\x6c\x3a\xe9\xd7\xcb\x6e\xf1\x98\x8b\xc6\xc7\x20\x29\x2f\x34\x1a\x05\x0d\x39\xa8\xc9\x59\xbe\x88\x9e\xe4\x26\x1a\x17\x2c\x08\xc7\xb0\x20\x9c\x28\x9a\xec\x8c\x63\xf6\xb5\xe1\x3d\x18\xef\xb9\xe1\x91\xd3\x9b\xef\xa4\x2a\xf1\x9d\x94\x65\x61\x40\xc3\xa2\x77\x9a\x74\x57\x5f\xf5\xbd\xca\x73\xb0\x6d\x9a\x6d\x79\x61\xac\x47\xa2\x70\x9d\xc9\x1d\xf6\xbd\x5e\x48\xc3\x71\xca\xaf\x98\xd8\x2f\x99\x05\xb9\x00\x85\xd7\x37\x1e\x94\xd8\xf3\xfd\xea\x0b\xae\xb5\xfc\x5b\x62\x75\x04\xb1\x36\xff\x8c\x5e\xc5\x46\xae\xce\xb9\x2d\x49\x07\x90\xe9\x60\x0b\xf5\xaa\x97\x8c\x80\x5e\x56\x73\xfb\x3d\xc0\x54\xb3\xc0\x2a\xca\x32\x59\x81\xaf\xac\xe4\x9a\x57\xd2\x06\x68\x3b\x76\xb1\x7b\xe1\x7d\x2f\xc6\x6f\x1b\x82\x69\xfc\x66\xa2\x10\xc2\x6c\xe1\x57\x68\xbb\xd4\xe0\x30\xef\x08\xbd\x68\xd3\x2d\xa5\x3d\x78\xc5\x73\x37\x30\xb4\xe7\xa4\x83\xee\xa4\xc8\xbf\xd8\x3a\x04\xed\xb8\xb6\x43\x28\x87\x2e\x0e\x59\xca\xaf\xac\x6c\xb0\x4e\x5a\x5e\xaf\xbe\x34\x57\x17\xa4\xe4\xc2\xd0\x1d\x14\x07\xa4\x5b\xe8\x10\x78\xfc\x25\x08\xd0\x49\xba\x8c\x70\x4e\x64\x1c\xd6\x17\x8a\x38\xcf\x9a\x9b\x73\x25\x75\xf5\x8b\x40\x53\xf0\xde\xf0\x2d\x61\xee\x1b\x59\x45\x2c\x33\xf3\xa8\x7c\x52\x9c\x79\x82\x12\xe7\xbf\xc3\x4c\xf7\x92\x1a\x0a\x5c\x43\xfc\x82\x2e\x7b\xe7\xb8\x06\x13\xe5\xce\xf0\xbd\xd3\x97\x6b\xc0\x66\x16\x63\xd9\x0d\x6f\x2e\x62\xf2\x8c\xbd\xb9\xd6\xaa\x3e\xe9\x7a\x49\x0e\xf3\x6a\x91\x66\x77\xd5\xaf\xca\xff\x05\x13\x19\xe4\xd1\x51\x5c\x64\x0e\xa3\xaf\x7a\x54\xf9\x29\x1b\x2c\xe4\x19\x33\x55\xdf\x4a\x38\xa9\xf8\xf5\x54\xba\x8e\x21\x07\x97\x26\x87\x55\x2c\xe7\xcc\xc4\x2e\x6b\xfe\x55\x75\x5d\x4e\xa3\xcf\xb4\x66\x74\x9a\x71\x95\x9e\xb9\x2d\x6d\x4a\xd3\xbb\x66\x48\xbe\xf9\x8f\x99\xb3\x4c\x39\xf0\xed\x16\x19\x33\x26\x89\x12\x80\x8c\xcc\x2a\x1a\xce\x8c\x02\xed\x4b\x22\xf2\x0e\xef\x19\xd2\x94\x6d\xe4\xcb\x86\xf9\xca\x59\xee\x8c\xc9\xe7\x14\xae\xae\x58\x72\x61\x2e\xa9\x90\x6c\x1c\xf7\x3d\x29\xb3\xe7\xf2\x14\x45\xef\x0f\x5f\x49\x29\xaa\x20\x2c\xeb\x61\xa4\xee\x33\x74\x61\xe0\xfd\x21\x90\xa8\xce\x15\x19\x62\x34\x67\xe4\x56\x83\x68\xa8\x5f\xfb\x66\x9f\x1f\x27\x7d\x7d\x90\x07\x4b\x6e\x33\xe2\x5b\x0b\x11\x7c\x1e\xc2\x92\x0f\x72\x0b\xe1\xb1\x98\x3e\x7c\x76\x86\x68\xf6\xf7\xb2\xd6\x94\xac\xe7\x4b\x8e\x9f\x27\x0c\xd9\xbd\xa4\x61\xe0\x4d\x53\x51\x2b\xc9\x22\x75\xd1\xf2\x08\x01\xfa\x92\x76\x23\x3d\xe2\x44\xa0\xc3\x83\x39\x27\x0c\x08\x50\x5e\x37\xa7\xcd\x0c\x1c\xf2\x3c\x14\xe9\xb7\x1b\x13\xa0\xfa\x14\x9b\xb8\xd5\xb9\xaf\x63\xa6\x23\x4d\x19\x83\xe9\x8c\xfb\xcd\x73\x5f\x6e\x8d\xce\x53\x1c\xe9\x16\x55\x99\x31\x47\x67\x36\xb3\xdc\x36\x59\x40\x6e\x72\x56\xbb\xfe\x6e\x65\x30\xda\x67\x4b\xfe\xdf\x85\x50\x34\x6f\xe3\xd5\x2d\x18\x73\x30\x6c\xfb\xfc\xaf\xa1\x43\x53\x60\x06\xb7\xa0\x49\xc7\x44\x14\x51\xec\x83\x26\x85\x9c\x53\x77\x7e\x94\xd9\xdd\x35\xda\x37\xf8\x3f\xa2\x19\xe4\x10\x51\x99\xef\x6e\x31\xee\x8d\xbd\xa8\xd0\xd6\xa2\x75\x63\x3d\x2c\x91\xd6\xa7\x86\xf3\x3c\x10\xda\xe4\x3e\x07\x19\x53\x7e\x15\x56\xe7\xdc\xec\x07\x1b\x0c\xab\xcc\x23\x7f\x44\x11\x3d\x7a\x35\x66\xd0\x86\xfd\x8c\x15\x1a\xee\x0f\x03\x9e\x47\x37\xb7\xf5\xb7\x30\x68\x96\xd6\xb5\x25\xcb\xf9\xcd\x67\x55\x6f\x25\xb8\x9d\xd5\xd5\x88\xe8\x39\x07\x57\x7c\xfe\xbb\x89\xe6\xbc\xc7\x56\x5b\xd5\x5b\x0b\x5f\x4a\xf2\xa7\x29\x2a\x74\x56\x76\x1e\xfc\xb5\xd8\xad\xd2\x3f\x05\x16\x01\x52\x5d\xf7\xda\x0d\xa9\x4f\xba\x7c\xf8\xc8\x43\xa3\x9b\x13\x69\xb2\xe4\xd8\x6f\xf4\x2a\xdc\x52\x74\x6e\xad\x9f\xf0\xa3\x78\xae\x9a\xdd\x32\x40\x93\xc4\xe8\xa1\x9d\x4b\x15\x77\x7d\xf1\x04\x90\x13\x44\xf1\x44\x00\x91\x56\x32\xa3\xdf\x7c\x01\xca\x6f\xb8\x78\x22\x2a\xc9\x52\x2f\xa4\x55\xfb\x07\x47\x6a\x67\xb1\xcb\x0a\xd2\xf4\x46\x65\xd4\x13\x49\x5f\xb1\xab\x3b\x58\x2a\x73\xc0\x96\xc8\x6d\xef\xb3\x51\x27\xdc\xe8\xfd\xda\xca\x40\x8b\x8a\xb3\x68\xd8\x08\x2a\x75\xb8\x44\xde\x9c\x7f\x30\x90\xc7\x96\x1e\xa4\x3c\xb7\x90\xc8\x3b\x95\xa2\xd4\x9d\x0f\xe7\x2a\xff\x49\x90\x57\xf8\x04\xe0\x70\x79\x0a\x68\x27\xeb\x52\xc8\x79\x32\xb7\x6e\xa1\x38\x33\xd1\xc0\xfd\x2a\xd9\x80\x38\xfc\x51\xae\xe3\x2d\xb2\xe6\x8a\x9f\xb9\x35\xb0\xe2\x1a\x7b\x53\xbb\x92\x44\xdd\xe8\xf2\x71\x4a\x75\xfc\xec\x9f\x13\x72\x25\x65\x59\x29\xc3\xb2\x33\x26\x8c\x4f\xf5\x32\x40\xb2\x16\xb3\x58\xc2\x07\x79\x7e\x26\xbf\xd4\x9c\x43\xb5\xff\xcc\x53\x79\x71\xeb\x41\x1e\x88\xd6\x2b\xf5\x22\x6e\x60\x4e\x1e\x2a\x3f\x03\x6b\xab\x34\x30\x54\x4b\xc2\xf5\xd5\x39\xcc\x6f\x51\xa3\x9c\x72\xdd\xbe\xb1\xba\xdd\xbb\x08\xcd\x30\xf0\xec\x6b\x7f\xce\x92\x52\x93\x03\x21\x9a\xb5\x2e\x6c\x28\x1f\x3f\x4b\xca\x6c\x7d\x9f\xeb\x8a\xad\xe3\xa6\x03\xfd\x93\x89\xab\x65\xa2\xed\xee\x0e\x9e\x20\x54\xff\xd8\xdf\x36\xea\x7b\xf6\xfd\xd7\x5b\x67\x5b\x23\x75\x97\xde\x96\xc1\xa6\xee\xe2\x14\xc0\x0f\xb1\xb9\x73\xbc\x29\x5c\x4c\xc1\x37\x4f\x79\xea\x67\x94\xa9\xa3\xb5\xc8\x0a\x8e\x9d\xc9\x87\x4d\x67\xba\x29\x7d\x75\xba\xcc\xe0\x4e\xb4\xeb\x35\x01\xbf\x99\xa5\xdc\x24\xe5\x92\x01\x7f\xcb\xfa\x11\x80\xf5\xbf\x38\xd3\x56\xbf\x8c\x5a\x3d\x05\x8d\x5a\xf5\x25\x44\x64\x5c\x82\x43\x09\x56\x60\x4c\x8b\x88\xbd\x96\x6e\x62\xe9\x61\x10\xe4\x1d\x4a\x50\xc1\x9a\x7f\x35\x1d\x29\x5f\x59\x46\x95\xd3\x2d\x27\xd8\xd4\xeb\x3f\xc4\xa6\xb5\x1d\xfd\x22\xef\x6b\xa6\x8e\x79\xbb\xcf\x1f\x5b\xa6\x81\xbb\x9b\xff\x23\x53\xda\x49\xb6\xf7\xcc\xfd\x0c\xe4\xa8\xfd\xdb\xfd\x70\x8e\xeb\x2d\x42\xf4\x3d\x71\xc4\xd8\x50\x8b\x0a\x30\xc8\xdd\x7e\xb3\x30\x4e\x8d\xf6\x40\x45\x83\x71\xd1\x26\x1f\xca\x8b\x24\xda\x2a\x13\x20\x04\x9d\x83\x22\x5d\xa5\x69\x61\x68\xe2\x51\xf7\xa7\x72\x25\xc0\xef\x2f\x5c\x27\x8b\xd3\x56\x99\x42\xd8\xaa\x3d\x1a\x91\x89\xab\x06\x6a\x08\x51\x8b\x46\x6d\x0f\xda\x2f\xa1\xbc\x38\x09\x0c\x8f\x61\x44\x58\xe5\xc5\x19\x43\x64\x88\x30\x92\x1c\x8d\xae\x41\xe7\xeb\xb7\x42\xea\xaf\xdb\x5d\xca\x10\x33\xcb\xf9\xb3\x64\x0e\xcf\x91\xb1\x42\x3a\xb5\xab\x1e\x64\xc1\x4d\xc7\xb5\xb7\x8b\x36\x8a\x80\xea\x2d\x23\xd1\x36\xb4\x94\xdb\xec\x6e\x3d\x2d\xbf\x82\xec\xbc\x9d\x61\x0e\x9a\xb7\x7f\xfc\x2f\x6c\x49\x03\xd2\x98\x05\x02\x79\x9a\x89\x2f\x86\x34\xcb\xd0\x60\x55\x4f\x1a\xb5\xcf\x3e\xfb\xaf\xc8\xb3\x0c\x36\x95\xb9\x63\x60\xa7\xea\x94\xa1\x22\x37\xcc\x3c\x49\x4e\x88\xfa\x50\x9a\x57\xb5\x05\xcd\x92\x59\xbf\x7e\x92\xad\xe6\x67\xb8\xc5\x7f\xbe\x67\x26\x9f\x36\x31\x49\x9b\x44\x00\xc1\xf4\xfc\x23\x38\xa4\x8a\x82\xf8\x3c\x77\xdf\x42\xab\xc2\xe1\x1d\x68\x4a\x22\x50\x6f\x24\x54\xc3\x68\x8b\x5c\xec\x22\x5b\xce\xe0\xb2\xc3\xb1\x50\x1c\xeb\x99\x7b\xdb\x57\xc1\xd3\x3f\x92\xba\xec\xcd\xae\x76\xa7\x41\x67\x16\x1b\xcb\xd0\x5f\x62\x99\x45\xaf\xfe\xf8\x17\x74\x58\x55\x24\xd6\x33\xdc\x12\xc8\xa8\xfe\x67\xb6\xe6\xa0\xb4\xf7\x71\x93\x3e\x7c\x6b\xa6\xfc\x68\x9c\xfd\x3d\x14\x08\x86\x9d\x27\x23\xd8\xb0\xf1\xd4\x44\x81\xc1\x3b\x87\xa3\x27\xa4\x75\x7d\x25\x8c\xa9\xc8\x3f\x36\x94\x84\xae\x4b\xe9\x3e\x07\x3e\x18\x99\x76\x92\x31\xab\x21\xe4\x61\x9f\xcd\x4c\x5f\xf4\x43\x93\x0d\x47\x6d\x32\x6d\x3d\xea\xc1\x77\xfc\xa8\x55\x12\x81\x68\xbf\x27\xd2\x59\x0d\x8e\x1f\xae\x5b\x18\xd9\x3a\x2a\x8f\xb9\xde\xaf\x42\x8d\x9e\x43\x21\x21\x7b\x62\x3e\xe2\x3e\x56\xe5\x1a\x1e\x45\xcc\xdf\xf2\x9b\xc2\x3d\x90\xc8\x2b\x11\x06\x78\x7b\x86\x4f\x26\x08\xd5\x62\x62\xff\xd3\x85\x77\x57\x70\x92\x8e\xe7\x42\xf1\xbe\x63\x4e\x5c\x56\xa0\xfb\x78\x50\xcd\xe8\xd0\xdb\x71\x78\x83\xa2\x5f\x19\x8d\x8b\xd3\xb0\xc7\x1f\x0b\xbb\x31\x79\xef\xd4\x52\x12\xf0\xee\xe9\x11\x71\x68\xa7\x58\xb7\x30\xdd\x0c\xa3\x20\x43\x0c\xd7\x3a\xdf\x9a\xa5\x4c\x3d\xa5\x1a\x62\xfd\xc9\x59\xdc\xee\xe4\x8f\x28\x7d\xb7\x30\xcb\x04\x31\xc1\xff\x95\xac\x7b\xd3\x86\xff\x48\x9e\xe6\x29\x52\x84\x28\xa1\xe2\x42\xa2\x75\x1b\x15\xda\x39\x92\xbf\xad\x42\x93\x88\xd5\x87\x8e\xc5\xf9\x08\x74\x59\x6d\x88\x55\x02\x23\xdf\x56\x47\xde\x27\x71\xb9\xf7\x51\xc1\xf3\x43\x73\x8f\xe0\x5d\x6b\x56\x06\x6a\x3f\x62\x7b\x62\x9c\xba\x2f\xb1\x81\xd8\xdf\x3e\x6e\xe7\xc1\xc7\x85\x93\x8d\x70\xff\xe4\x03\xac\xfa\x14\x20\x23\x11\xb5\x80\x55\x16\x41\xbc\xbe\xcb\xf6\x33\xfe\xd4\xcc\xb5\xf5\x75\x6f\xd2\xb1\x35\xc7\x5b\x4f\x37\x9f\x24\x6e\xf5\xeb\xc5\x93\x83\x12\xce\x70\x51\xa8\xb4\x36\xf2\x1f\xac\x4f\xe1\xc4\xed\x43\xea\x79\xd5\x86\x90\xce\xdb\x5e\x41\xd9\xf2\x4e\x64\x0d\x45\x3c\xeb\xa5\x3f\x74\x64\x58\xe7\xeb\x62\xd7\xfc\x67\x8f\x55\x2e\xa5\x4b\x30\xb9\x6f\x0f\x02\xd5\xd9\xab\xc1\x3a\xab\xd5\xae\x70\x0f\x21\xc3\x5c\xe9\xa1\x61\x0a\xea\x47\x0d\x62\x6f\xb5\xa7\x7f\xd2\xc3\x74\xaa\xa1\x29\x6d\xbd\x77\xbf\x49\x48\xcb\x11\x39\x1d\x81\xa5\x0a\xeb\x11\x6f\x41\xea\xd0\x3e\x22\x84\xa3\x2a\x27\x77\xfa\x23\x01\xb5\xfd\x23\xc5\x2e\x03\x5a\xa1\x11\xc3\x7e\x93\x9b\x7d\x4b\x00\x6c\xc7\x67\xed\xaf\x29\xbc\xdf\xb5\x46\x03\x21\x43\xd2\x33\xdb\x91\x8b\xd1\x1c\x88\xbd\xbe\x73\x0c\xba\x87\x40\xda\x6e\x37\x2b\x46\x65\x7b\x1d\xa9\x1f\x05\x13\x53\x68\xa0\x12\xa2\xb5\x91\x01\x6a\xf1\x2c\x0c\xbf\xe4\x2c\x41\xec\x3d\xdf\xc3\xad\x3e\x37\x9c\x4a\xad\xdd\x65\x67\x03\x0b\xcc\x46\x07\x67\x5b\x61\x0d\x7b\x15\xaf\x56\x04\xbc\x78\x56\x45\x75\x61\x56\x8a\x02\x22\x0a\xe4\x5d\x5f\x67\x50\xb7\x90\xe4\x29\x60\x1d\xee\x4c\xeb\x46\x78\x50\x25\x5a\x2b\xde\x68\x14\x48\x94\x36\x38\xa9\xa4\x37\xe3\xed\xaf\x87\x15\xe4\x36\xde\xfb\x88\x2e\xac\x43\xfb\x88\xc8\x35\x0e\x67\xc1\x4d\x71\x9f\xad\x7e\x44\x7d\x56\xb8\xd2\x3d\x4e\x49\x44\xad\x9a\x55\x33\x30\x52\xed\x7a\x83\xae\x18\x77\x67\xe2\xd3\x06\x4b\x0a\x57\x9b\x02\x22\x3f\x7b\xd7\x71\xf6\x91\xb4\x52\xe7\x5b\x3b\xdf\xb5\xae\x5f\x0a\xcb\xda\xd0\x91\xe9\x20\xe0\x4f\xf3\xe2\x99\x42\x63\x45\x09\xe2\x69\xb9\x54\x3d\x61\x54\x28\x5d\xaa\xad\x78\x6d\xc3\x4b\xb7\x2e\x0b\xe9\xc1\x88\x9a\xca\x05\x60\xb3\xee\xcc\x50\xc4\x9d\xde\x5d\xe5\x5d\x98\x94\xe9\xbd\x33\xad\xf7\xae\x40\x01\x0c\x72\x95\xd8\xe0\x3d\xc0\xbe\x21\x0a\xcb\xd8\x16\x46\x7d\x59\x3b\xae\xeb\x9b\xf4\x62\xb7\x71\xfa\xf9\xa7\x32\x0a\xb1\x95\xa5\xdd\xbc\xfe\x39\xad\xbc\xbc\x18\xfe\xb9\x81\xaa\x77\xec\xb6\xec\xf1\x29\xab\x40\x82\x24\x9e\x4f\xd0\xa1\xfd\x27\x20\xc5\xd5\xc6\x10\x3f\x2e\x36\x0b\x8d\x31\x36\x88\x67\x68\x95\x9b\x51\xd2\x92\x16\x86\x85\x2c\x27\xeb\x33\x53\x32\x7c\x0b\x69\x4c\x37\x63\x25\x79\x39\xaf\x61\xcd\xee\xba\x2e\x64\xcb\x6b\x54\x8a\x72\x67\xbe\xf5\x96\x17\xca\x57\x6f\xb9\xd0\x05\x6a\xcb\xfe\x33\xc4\x17\x17\xf9\x19\xa1\x7d\xf1\x75\xc7\xb7\xb2\xac\xb6\x11\x31\x07\xd1\x79\x47\x06\x41\xb7\x71\xa7\x74\x0d\x40\x12\x5d\x7a\xf3\x9e\x85\xc3\x25\x27\x72\xaf\x67\xe4\xb2\xf9\xe9\xc6\xe7\x5d\xa4\xbb\x61\x38\x46\xdd\xb0\x6a\x6d\x99\xea\x19\xc0\xb3\x08\xc1\x80\x59\x71\x37\xb7\x73\x0d\xc3\x4b\x10\xbb\xdb\x95\xc2\xab\xf5\xe6\xa7\x47\x62\x77\x13\x6c\x41\xf6\x6e\x9b\xfb\x77\x92\xe1\xad\x0d\x57\xe9\x31\xb8\x7b\x2b\x72\x3b\xb4\x71\x6a\xfd\x04\xe0\x8d\x38\x4a\x61\xef\x9b\xbe\x5f\x59\xb2\x7e\x45\xb2\xfe\xca\xc6\x6c\xb3\xc1\x95\x5c\x84\x35\x7d\xbd\x91\x9e\x0a\xe6\x36\x3d\x74\x9d\x90\xdd\xb9\xd9\x20\x66\x10\xbb\xe9\x92\x60\xf0\xb0\x8b\x76\xc1\x5c\x6e\x15\x0a\xbb\x5e\x83\xe9\x19\x3c\xed\x2f\xe7\xb2\x0d\xc9\x06\x0b\xdc\x6d\xdd\x96\xd4\xce\xaf\x9a\x44\xe4\x04\xc5\x86\x9b\x8e\xdd\x38\xb7\x00\xef\x57\x65\x05\xbb\xf3\xab\x14\x10\x92\xd2\xaf\x00\xc2\x23\x76\xac\x9b\x40\x89\xf7\x13\x82\xcc\xc1\x2d\x6f\xb2\x0d\xda\x90\xb7\xa4\x7d\xf6\x6f\xa7\x76\x5f\x62\xbb\x63\xe6\x47\xfc\x70\x7c\x36\x64\x67\xef\xa9\xca\xe2\x76\xdd\x9e\xc1\x19\x77\x49\xbf\x2c\x58\x62\x9d\xc4\x03\xd2\xda\x6f\xf8\xd2\x5f\xe0\x30\xb2\x74\x76\xf8\x8f\x90\x5f\xc0\x44\x0f\x8a\x7d\x61\x52\x2e\x18\xe1\xbc\xda\x67\xbd\x3f\xfc\x1a\x23\x0d\x6e\x4b\x39\x18\xee\xd3\x47\x8f\x71\xba\xb3\x2e\x3b\x58\xe9\x93\x06\x59\x08\x27\xc6\x69\xc5\xcf\x73\xd3\x59\xd5\x98\x49\x26\xce\x69\x7c\x62\x14\xf9\x1f\x28\xe5\xcc\x6e\x01\x29\x6c\x25\x48\xec\x3c\xad\xb7\xba\x81\xeb\xad\xb7\x7e\xbd\x17\xef\xf9\xaf\xf7\x94\xbb\x41\x6c\xf1\x36\x7a\xd5\x34\xc3\x5a\xdf\x93\xdc\x5a\xd5\x5b\x70\x0a\x7a\xf7\x60\xad\x67\x10\xd0\x79\x97\xc8\x44\x17\xa1\xbb\xb6\xd8\x57\x23\xfd\xb5\x1e\x1a\xc5\x81\xa7\x1e\xc7\xab\x9b\x80\x8d\x58\x35\xc9\x0e\x8c\x6c\x3b\x8f\x48\xd9\xc2\x48\x5e\x77\x0d\x16\xfb\x53\x6f\xd5\x5a\x21\x39\xb9\xca\x9e\x08\x3d\xf0\x1f\x55\xbe\xbc\x2a\x19\xe6\x65\xd7\x04\x1b\xbe\xd0\x5f\x10\x0c\x29\x93\x2b\xc7\x0c\x74\x83\x55\xd6\x19\xc8\x1a\x4f\xf9\xc5\x8a\xf1\x8d\xd9\x3b\x85\x85\x1d\x77\xd5\x0a\xab\xc1\x4f\x11\xcd\xc9\xa1\xcf\x3e\xd1\x24\x12\x7c\xe6\x48\x6f\xcd\x07\xd3\xf3\x60\x15\xab\x61\xe5\x2a\xaa\x14\xfc\x9e\xb8\xa3\xc2\x5f\x2b\xd3\xe3\x89\x30\x52\xe3\x56\x33\x26\x98\x04\xf9\xa4\x72\xbe\xe4\x3f\x1b\x64\xba\xd5\xc6\xd2\xe1\x40\x3b\xbd\x99\x91\xb1\xe6\x7f\x1c\x67\xf3\xb8\xc6\x85\xe8\xc2\x82\x9c\x9f\x93\x52\x4a\xd7\xe4\x32\x4d\x8e\x3e\x24\xad\x7b\x64\x67\x4d\xff\x13\x5f\x3d\x74\x45\x41\x38\x6a\xca\xf6\x59\xd3\xab\x72\x6f\xc4\x32\xef\x40\x13\xff\x77\xd5\x9b\x71\x12\x83\xe1\x64\x7b\xcd\x3a\x9c\xf2\xcb\x41\xaa\x8f\x0e\x33\x32\x87\xd8\xc1\x72\xcf\x5a\xa6\x86\xae\x29\xdc\x84\xad\xe7\x5f\xf8\x4f\x1b\x7d\x6d\xfc\xdf\xfe\x2d\x6b\x0d\xae\xbc\x75\x66\x68\xb0\x8b\xd9\xbf\xa7\xfc\x72\xf7\x89\x8a\x3e\xce\x28\xf5\x26\x62\x4d\x5b\xb7\xa6\x5d\x75\xba\x95\x67\x58\xe2\x73\xb0\x4f\xfd\xaa\xdc\xc4\xe3\x8c\x6b\x9a\xc5\xfc\x77\xfa\x1a\x51\x6e\xd1\x3e\x82\xa7\xcf\x7f\x3f\xec\x16\xeb\x37\x87\xc8\xc0\xd0\xc4\x29\x86\xd8\x66\xa7\x1f\xdf\xba\x25\x60\xe7\x2b\xf8\xb3\xdc\xeb\x3f\xbe\xb3\xf7\x9f\xf7\xcb\x97\x3b\xcf\x3b\xb9\xe0\xf7\x60\xaf\x08\x57\xb6\xef\x4f\xed\x18\x14\x7e\xa7\xaa\x37\x56\x77\x8b\x0d\x6d\xcb\x52\x05\x69\xde\x8a\x99\x30\x9e\xa3\x85\x27\xe4\x02\xf7\x3e\xa7\x86\xb4\x58\x62\x79\x86\x51\x6e\xb0\xb6\x1b\x19\x1a\xc2\x22\xa2\x37\x66\x0d\x59\x19\xdc\xe9\x96\x90\x5a\x4e\xab\xc1\xa5\xb2\x87\xb9\x54\x65\xa0\x2d\x55\x8a\x83\x4b\x3d\x10\x4e\x72\xb4\x33\xbd\x60\x89\xdc\x17\x03\x64\xe3\xcb\x60\x70\xa9\x73\x90\xff\x31\x18\xe5\x71\xf2\x5e\x9e\x3c\xb4\xa8\xfb\x48\xd3\x0d\x96\xff\x40\x92\x38\xc4\xa0\x74\x55\xbb\x98\xbe\x06\xb2\x42\xb7\xe4\xf4\x93\xc8\x5f\x69\xfb\xe0\x3f\x7d\x75\x5d\x3d\xd1\xf5\xd5\x42\xb9\xcf\x2e\x5b\xd5\xa5\x9c\x29\x88\xf1\x45\xd4\xfb\xc6\x7e\xdb\xa2\x18\x7a\x6f\x34\xe0\xa7\x9b\xc3\x73\x37\x22\x22\x0b\x3b\x0a\x20\xf3\x6b\x76\x7a\x91\x2c\xc4\x52\x86\x3a\x69\x78\x87\xb1\xe5\x0b\x55\xa2\xdf\xa3\xfc\x61\x26\xf9\x12\x4e\xc0\xe2\x5e\x2e\xb2\x27\x5f\x5c\xb6\xff\x95\xfc\x68\x48\x7a\xf0\x41\x3c\x10\x0f\x9c\xb7\xe3\x52\xa9\x15\x99\x44\x74\xeb\x46\x49\xa1\x6e\xc1\x00\x3d\xb7\xa0\xc5\x7f\x45\x80\xff\xfe\x98\xf4\xef\x04\x43\x61\xae\x0e\xef\xe5\x74\xde\xfc\xe7\xa9\x24\x0e\xa0\x1c\x2c\xfc\xdf\xe1\xfa\x40\xca\xa0\x6c\xb1\x16\x37\xe8\x74\xb0\x6b\xe2\x77\x61\xe6\x01\xf8\xf7\xdc\x6d\x8d\xb0\xce\xa2\x6f\x14\x4c\xfc\x9d\xff\x9b\x29\xc7\x90\x66\x4d\xc9\x2d\x31\xa9\x0b\x07\x5e\x8a\xf3\xfd\xdd\x08\x00\x9c\x8e\x48\x3a\xf9\xbb\x33\xd5\x24\xff\x6e\xf9\x8c\xff\x31\xfa\xf6\x57\x31\x77\xe3\x51\x9e\xbf\xba\x2b\xad\xd2\x10\x69\xf2\x55\x9c\xfa\x3a\x04\x33\xff\x49\xd3\x42\xf8\xf3\xfe\xa5\xd1\x2a\x29\xff\x7f\x7a\xff\xf2\x9c\xfe\x78\x03\x1c\xed\x32\x3d\xff\x4b\x92\x45\xfa\x43\xb8\xa2\x93\xf0\x77\x15\xb1\x3c\x2e\xbf\x83\xc4\xbc\xde\x5f\x1a\x06\x06\x1e\x0b\x63\x3c\xe5\xa5\x10\x42\xe4\xcf\xd8\xa0\x44\x12\x98\x05\xd6\xdd\x6a\x92\x0d\xcb\xc6\xd7\xde\xb7\xfa\xa4\xd5\x2f\xdf\xb7\xb2\x64\x99\xa3\xfc\x63\x4d\x39\x7f\x82\x5c\xff\x3f\xb5\x9b\x90\xb1\xa5\x35\xeb\x2d\x42\x74\x58\x17\xfa\x0c\x14\xd7\x74\xb2\xd4\x7c\x6f\xdd\x4b\x61\x16\x3b\x19\x2a\x6e\x85\xa4\x38\xef\xae\x8b\xf4\xff\x64\x2b\x30\xdf\x53\xc8\x04\x0c\xbc\x03\x30\xd0\xf3\x68\xce\x8c\x48\x8a\xc2\xdd\x33\xc6\x99\x24\xcf\x9f\x1c\xdb\x22\x6b\x56\xde\xbf\x9d\x99\x6d\x97\x4c\xf2\x7a\xdd\xd5\x92\xfd\x04\x6a\xc1\xe6\xd7\xa1\xd6\xa0\xf7\x5b\x9f\x44\xde\x35\xa4\xf4\xf7\x09\x57\x5f\xfc\xb1\xfa\x3b\x39\xbd\xce\xfd\x54\xf6\x52\x28\xf1\x63\xae\x53\x9c\x6c\x92\xaf\xaf\x0e\x5a\x1a\x7f\x48\xd9\xbf\xb1\x5f\xd9\x51\xab\xf0\x85\x98\x7f\x16\xbf\xd2\x75\xb0\x8e\xc6\x97\x85\x94\x3d\x66\xeb\x72\x84\xe5\xef\x33\x51\x2b\xcc\x9a\x0a\x72\x52\xe9\x02\x0c\x90\x43\xbb\x79\xce\xdd\xda\x57\x16\x97\x48\x11\x98\x97\x70\xc2\x06\xc3\x3f\x33\xda\xa7\x39\x20\xc1\x27\xf9\xe8\xd9\x2e\xef\x79\x8b\xe1\x2f\xc6\xc7\x1c\x9a\x3b\x60\xec\x77\xd1\x63\x2c\x14\xf1\xf8\xa7\x90\x1c\x90\x69\x37\x0c\x82\xe3\x7c\xe5\x11\x3a\x02\x88\xa8\x72\xb3\xa7\x78\xf0\xd6\x81\xd3\xc3\xcf\x43\x6c\xd5\xd9\xb8\x06\xcf\xb5\x90\x4a\xff\x0d\xf0\xe1\x84\xbe\x01\x0e\x0b\xe7\xf4\x66\xa5\x38\xa3\xd3\xb1\x25\x91\xe8\x5f\xfc\x85\x9e\xe2\xea\xe0\xa4\xfb\x38\x38\xff\x7b\x18\x3d\x83\xe1\xb9\x12\x5c\x7b\x37\x13\x8e\x55\xf4\xf7\x33\x70\x94\x10\x00\x58\x9a\xe6\x50\xe6\xf0\x8d\x81\x16\x80\x56\xa0\x7f\xc5\xe9\x17\xab\x93\xbe\x59\x9e\xc1\x3b\x75\x2f\x52\x37\x66\x9c\x9d\x52\x41\xb0\xb9\x11\xf3\x56\xa8\xd3\x90\xd8\x0a\x23\x0b\x9d\x49\x87\x73\x8c\x4e\x30\x11\xb7\xc6\x55\x14\x2a\x0e\x3c\x9f\xec\x4c\xc2\x47\x98\xdb\xe8\x42\xa6\x22\x86\xb2\xc1\xc4\x6d\xc7\x25\x3e\xe6\x34\xe4\xec\xd6\xb4\xf7\x87\x81\xee\xa7\x0f\x0c\x9c\x95\x7c\xef\x9a\xac\x7b\x92\xda\x10\x56\xc2\x61\x11\x0e\x38\xa8\x81\x32\x2c\xdf\xd9\x1b\xf6\x84\x8d\xda\x03\x4a\x2f\x7c\xde\xb4\x74\xbd\x47\xd1\xaa\x6f\x5c\x94\x23\xeb\xb3\x0c\x3f\xf6\x3e\xb2\x48\x9d\xed\xee\x12\xc4\x91\x64\x6b\x4b\x9b\xbf\x8c\xae\x3c\x10\x8a\xf2\xcf\x7f\xf5\x87\x3d\x68\xa8\x2f\xca\x30\xf7\xd2\x4c\x14\x80\x5c\x4f\x0d\x3e\x1e\x85\x56\xe4\x2e\x59\xa0\x5e\xfe\x33\x26\x82\x20\x56\xa0\x89\x2c\xef\xd3\xe9\x26\xbb\xea\x80\x22\x20\x54\x20\x88\xa4\x64\x5b\x12\xf7\x5f\xa6\x6a\x36\xf4\x92\xd7\x6e\xbd\x99\xe4\xf3\x8c\x38\xd8\x93\xb9\xec\x88\xe3\xc5\x71\x6b\x8b\x59\x16\xc8\x0d\x44\x34\x08\x98\x32\x2e\x40\x6b\x5f\x07\x51\x8b\x90\x26\x98\x03\xe2\xd5\x92\x0a\xc1\xfe\x13\x3a\x58\xa7\x9f\xc0\x23\x96\x68\xd9\xf8\xc4\x14\x5f\x08\x0c\xb4\x55\xda\x0d\xd4\xf9\xe6\xd1\xb2\x3a\x57\xcf\x3a\x48\x99\x60\xc8\x6a\x1f\xec\x1b\xf5\xa6\xfc\xb9\x4a\x68\xcc\x4d\x91\x79\xaf\x3c\xbf\x9f\x1b\x75\x1f\xa2\x67\xb9\x4f\x89\x03\xb4\xae\x30\x70\xb0\xc7\xf1\x8c\x81\x19\xb4\x06\x74\xbf\x8a\xdd\xc9\x4a\x4a\x3e\x6a\x70\x09\x16\x64\x4d\xc0\x3c\x73\x37\xab\x7a\xa2\x72\x12\xf3\x1f\x63\x49\x5d\x4d\x3e\x74\x12\xb6\xce\x56\xe8\x66\x64\xaa\xb7\x3c\xff\xa1\x28\x43\x32\x40\x0a\x01\xc3\x20\x6f\xe3\x8f\x76\x4f\xca\xaa\x7c\x26\x37\x9b\xfd\x8f\x32\x00\x65\xe7\x84\xd4\x13\xb3\x14\xf1\xc2\xe2\x57\x17\xca\x4f\x43\x18\xa0\xb2\x94\x6d\xf1\x2e\xff\x6c\xf1\xbf\x9f\x69\x3d\xf4\x9f\x75\x95\x51\xf2\x5a\xbe\x3a\xf9\xdf\xc4\xd2\x2d\x19\x9e\xf1\x8a\x27\x7b\x5b\x9f\xe1\x6d\x2c\x39\x80\xc6\xbc\x65\x98\x1d\x2b\x6f\xf2\xd1\x6c\x4c\xcc\x80\xfd\xa3\x75\x0b\x22\x97\x07\x38\x05\xda\xc4\x9b\xe4\x8c\x51\x89\x51\x3c\x42\xbe\xd9\x69\xff\x21\x8e\x0a\x5b\x63\x16\x2d\x77\x80\xfb\x82\x0c\x51\x66\x87\x3c\x38\xb9\xf3\x08\x0d\x95\x87\x3c\x0a\xad\x64\xde\xc5\xa3\x87\x49\x9d\x99\xbf\x77\x0a\x95\x2d\xe6\x87\x12\x96\x1e\x65\x75\x2e\x98\x95\xe4\x90\x3c\x4a\xee\xcc\xfe\xde\x82\x80\xaf\x9f\x3f\x8d\x3c\x7e\xeb\x67\x48\x6c\x06\x9e\xc3\x39\x2c\x87\xf5\x5f\xf9\xdb\x3e\xa4\xbd\xff\xc8\x4a\xb3\x7d\x64\xa7\xbe\x96\x80\xbc\x12\xcc\x4b\x4b\x3c\x21\xf8\x21\x8f\x74\x9f\xbb\x9a\xf8\x87\xb7\x14\xbe\x3b\x69\x6c\x8f\x20\x71\x3c\x3c\x76\xb5\xca\xd4\xd8\xee\xa0\xd2\xab\x0c\xb3\xf7\x84\x89\x53\x49\x00\x40\xbf\xd0\xdf\x9a\x47\x82\x67\x2b\x37\x1b\xfc\x3d\xc9\xff\x53\x48\x3a\xbf\xc5\x5b\x27\x51\x10\x0d\x77\x66\x0d\x65\x3d\x78\xe6\xaf\x79\x2c\xb7\x8f\x18\xdc\x04\x59\x29\x53\xf9\x47\xfe\x4f\x3f\x8f\x60\xe5\x20\xe6\xf3\x60\xdd\x01\x11\x29\xe8\x88\xd3\x74\xc0\x6e\xe3\x97\xcc\xfa\x83\xb9\x3d\x56\xb2\xc2\xce\xfb\xf5\x2e\x9c\xa8\xc9\x91\x45\x49\xb3\x63\x07\xe8\x06\x28\x32\x9c\x43\xd2\x1d\x09\x36\xe2\xeb\xcb\x43\x59\x4a\xe7\xa8\x23\x82\xa4\xdf\xed\xa7\x34\xd1\x9f\x9f\x32\x73\x44\x48\x38\x54\x08\x28\x8c\x92\x7f\xd3\x41\x18\x33\xaf\x3f\x31\x02\x4e\x75\x66\xe8\x20\x00\x58\xdd\x4f\x3b\xdd\x0f\x87\xd6\x13\xf9\x96\x93\x24\xd4\xa7\x5b\x92\xce\x53\xa4\x45\x4d\x90\xa4\xe5\x26\xbf\x6e\xdd\x74\x53\x3f\x60\x52\xc2\xe2\xc4\x91\xea\x04\xd7\x96\xdd\xc1\x43\x2d\xe5\x74\xf7\x33\xde\x63\xcb\x87\xfe\x3d\xd0\x7b\xda\x01\xcb\xa2\xd2\x3b\x1c\x93\xa7\x67\x38\x28\xd4\xcd\x9c\x6c\xac\xad\x9e\xfd\x54\xff\x77\x25\x4a\x83\xc3\x63\x99\x47\xac\xef\xdd\x9b\x24\xb0\xfa\x33\x09\xfc\x72\xb4\x9d\x22\x48\x36\x49\x22\x76\xfa\x51\xfc\xcf\x1c\x5c\xff\x21\x6e\x00\x1a\x06\x96\x14\x23\x9b\x6a\xb7\xca\x00\x7c\x32\xa8\x32\xd5\x59\xfa\x02\x21\xef\x04\xa4\x4e\xd8\x54\x07\x9d\x79\x08\x4f\xe3\x42\xf7\x83\xa9\x58\xed\xbe\x8a\xf0\xff\x8f\xf3\x70\x89\x29\xbf\xa9\x28\xb3\x65\x0a\x37\x24\xfb\x70\x93\xf7\xf6\xc0\xca\xff\xa8\xe4\x23\xca\x2f\xcd\xf2\x4e\xfe\xf6\x3b\xb8\x38\x58\x9a\xf2\x09\x9f\x2b\xb6\xad\x13\xec\x1b\xb9\xeb\xc9\x18\xa9\x2b\xe1\xf3\x5f\x47\x9e\x44\xc6\x0f\x52\xc4\xe4\xa4\x2c\xde\xa2\xee\x26\x0d\xc0\x3d\xb7\xd2\xfe\x8a\x8c\x94\x31\x17\x32\x87\x7a\x00\x22\xe9\xd4\x79\x77\x9c\xc5\xd5\x7f\x2a\x5a\x69\xf0\x11\xfa\xbd\xc8\xa9\xbe\x5d\x70\xf8\xd4\xd2\x16\xd4\xff\x34\xf2\x1a\xca\xa5\x06\xca\xe0\x29\xff\xec\x0c\x79\x22\xb2\xe5\x01\xd3\xe3\x3f\xd2\xf9\xa5\xb3\x90\x51\x27\x1e\x87\x8e\x39\x62\xce\x58\x3f\x68\x68\x37\x5f\x13\xb7\x73\x2e\x44\x3a\x63\xd8\x21\x19\xf2\xc6\x71\x82\x24\xac\x0f\x97\x27\x68\xdd\x90\x93\x2f\x1f\x98\xc9\x4d\x10\x9d\x64\x6c\x70\xf8\xb8\xe5\x8a\x2f\x40\xd2\x87\x9b\x1e\x8d\xf3\x60\x53\xda\xc5\xff\x4f\x33\x8f\xbb\x7a\x92\x16\x21\x58\x7e\x92\x4f\x48\x52\x30\xb0\x6e\x26\xc5\x1b\x74\x0b\xd2\x78\xd3\x18\x63\xb2\xda\x83\x7d\xa2\xf1\xfb\x71\x1a\xfe\x7d\xf5\x26\x79\x8c\x61\xe4\x78\x23\xc7\x20\xb8\xc7\xf7\x1e\x12\x67\xa3\x0d\x25\xf9\x7f\x0c\xd3\x04\x30\x9d\xcf\x7f\xca\x10\x74\xbc\xa7\xe0\xf2\x73\x28\x3f\xde\xe3\x2d\x1b\xe7\x5b\x86\xcf\x71\xa0\x6e\x3e\xe1\xb6\xd1\xe4\x54\x3b\xff\x56\x84\xfb\x96\xd5\x21\x01\x0a\xd7\x68\x79\xed\x36\x69\x0b\x8f\x4d\xd4\x46\x03\x72\x9b\xb6\xa1\xb9\x9f\x1f\x0e\xd6\xab\xe8\xe0\x91\x0b\x30\x56\x75\x33\x20\x20\xe0\x93\x7e\x5a\xf8\xe9\x4a\xc1\x49\xfa\x9a\x83\xe9\x7f\x83\xb4\xe0\x8c\x79\xab\x07\xc8\xe0\xae\x4c\x4e\xb7\x72\xbf\xe5\x06\xdd\x96\xa6\x7d\x99\x40\x39\x22\xbd\x9e\x34\xf5\xce\xdc\xaf\xfb\x29\x23\xe2\xba\x33\x59\x1e\x20\xfe\xb7\x07\xa5\x0a\x6e\xd3\x3f\x63\x6e\xcf\xc5\xe7\xea\x6d\xa5\xba\x80\xb5\x0c\x8b\x74\x06\xc8\x3d\x0f\xf9\x6b\xd0\x01\xc3\x14\x7b\x55\x56\x31\xa6\x55\xdd\x58\xc4\x71\x89\xdf\x1e\xf4\x77\x64\xfa\xf2\x5e\x0d\x72\x9d\xe6\x04\xdb\xb8\xba\xd7\xb6\x23\xbc\x20\xfb\xc1\xd9\x44\x57\x1d\x08\xf7\xe9\x7d\x72\x5b\x10\x2e\xf4\x1c\x5a\xe8\x29\xeb\x18\x39\x74\xb1\x5d\x97\x40\x2d\x3b\xc4\x08\xb8\x61\xe9\xec\xfa\x02\x0b\x01\xa2\xca\x86\xc0\xbe\xce\xed\xf8\x09\x0b\x54\x4f\xac\xf7\xa3\xce\x50\xd5\xf7\xa9\x38\x64\x70\x92\x49\x3f\x7b\x6e\x3b\x1a\x58\x4a\x02\x58\xa5\x13\x8f\x66\xce\xbf\x8c\x71\x0a\x0b\xd0\xe7\xdc\x07\xdb\x94\x28\xc8\xeb\xcf\xc6\x1a\x3a\xd4\xd4\xc9\xf0\x81\x66\x12\xaa\x2a\x9f\xd2\x87\xc0\xa0\x31\x8e\x49\x3e\x22\x7f\xe7\x0c\xe3\x19\xdf\xc2\xfa\x4d\xb7\x7f\xa3\xd6\xcd\xa0\x42\x80\x35\xfa\xe1\x34\x6d\x9f\xac\x7a\x2d\x23\x34\x6e\xa5\x26\x70\x53\x85\x22\x75\x09\x83\xee\x56\x0d\xe1\xe6\x1f\x8a\x7f\xb7\x16\xff\x6f\x61\x94\x9e\xf4\x62\x25\xcc\x84\x32\x90\x0b\xf1\x81\x4f\xe8\x15\xac\x59\xff\x9e\x58\x7f\x8f\xe9\x60\x4e\xdf\x18\x96\x5f\x06\xa0\xc2\xec\x68\x3f\x29\x94\x32\xd2\x30\x30\x04\x08\xf4\x9e\x1a\xaa\x67\x5c\x01\x86\x13\x1e\xdd\x76\xf8\x97\x02\xc9\x21\x71\x44\xc6\xb6\x56\x62\x7a\x51\x48\x76\xde\xbf\xef\x30\xd1\x9f\x65\x4c\x2e\xf9\xe2\xa3\x65\xc7\x72\xf3\x1e\xd8\xeb\x1a\x6e\x7a\xd3\x0f\x1a\xe0\x58\x19\x83\xcf\x01\x5c\x6c\x78\xec\x3a\x6e\x5d\x91\x76\xf0\xbc\x9f\x8f\x50\xa8\x0b\x90\x50\x69\xe5\x42\xcd\x94\x81\xc6\x24\x5a\x29\x01\x3b\xf8\x67\xcb\x31\xf9\x2e\x98\x8c\x70\xe4\x0f\x73\x97\x20\xae\x2d\x3e\xb9\x53\x16\x53\x67\xb0\x7a\x43\x1c\xf0\x7b\x9a\x65\x49\x0c\x61\x03\x22\x12\x1b\xa1\x64\x30\x6b\xf2\x6c\x40\xad\x98\x42\x45\xe0\x0e\xdf\x0c\x6f\x70\x42\x6a\x01\x2e\x3f\xec\x3e\xd3\xd6\x2f\x60\x77\xcf\x35\xac\x0b\x69\x3e\x5a\xd5\x3a\xbc\x09\x61\xc8\x5d\xd5\x02\x0c\xad\x7c\x6f\x32\x92\x06\x89\xb2\x0c\x6e\xbb\x23\xd1\xbd\x21\x32\xbf\xc0\x20\x7e\x06\xcf\x1f\x43\x99\xb8\x18\xcc\x3a\x06\x11\xdb\xc6\x08\x2e\x30\x46\xfc\xa2\x09\x71\xcb\x77\xe7\xfe\x0d\x31\x85\x01\xf0\x93\x4d\x58\xcf\xdf\x11\xca\xa5\xc9\x64\x48\x25\x74\xfd\x03\x66\x19\x85\x50\x82\xa0\x86\xb3\x83\x2c\xf4\xa0\x82\x50\xb5\xfd\x30\x48\xe0\xe0\x1b\xd2\x04\x1f\xc9\x1c\x84\xeb\x7a\xfd\x2c\xd2\x17\xe8\x66\xe0\xef\x30\x0c\xbf\xa4\x53\xc0\x74\x48\xaa\x24\x04\xf0\xf2\xea\xea\x10\xf5\x54\x78\x17\xdc\xa7\x38\x50\x93\x3a\x81\x54\xef\x0c\x74\x89\x89\xba\x87\x50\xc3\x1e\x3a\x0a\xa2\xff\x5b\x69\x5f\x2b\x57\x45\x6e\x18\x90\xce\x23\xe9\xde\x21\x26\x60\x06\xb1\xda\xc3\x03\xbe\x5a\x37\x81\x6f\x5a\x9d\xd3\xe8\xe5\x53\x06\x24\x43\xd5\x14\xfb\x10\x8e\x33\x43\xb5\x6e\x74\xdd\x29\xb9\x40\x21\x80\xda\x15\x1b\x94\x03\xe9\x52\x0a\xdc\x7a\x05\xb5\x94\x28\x85\x02\xc6\xca\x40\x9f\x95\x5b\xfc\x0b\xef\x35\xef\x00\x30\x9c\x84\x80\xf5\x56\x89\x13\x39\xfc\xac\x57\x6d\x3c\xca\xff\x28\x29\x04\x32\x0c\xf1\xbe\x3b\x19\xf2\xce\x32\x38\x2f\xcf\x78\x3f\x3c\x49\x99\xc7\x11\xd7\x6f\x98\x91\x78\x4e\xda\x3d\x12\x2d\x63\xba\x7a\xc8\xaf\x50\xb9\x70\x7d\x86\xbe\x1a\x75\xa7\x34\x16\x2e\x25\x8b\x43\x83\x41\x65\x8f\xa3\xba\x1c\x83\xbc\xc9\x35\x99\x01\xc1\x85\x10\x6d\x30\x24\xb5\x87\x5d\xd3\xa5\x86\xf8\xf6\x59\xc9\xa9\x47\x43\xfa\xf1\x19\x93\xe7\x7c\x29\x40\xa0\x90\x78\x02\x75\x8c\x07\x28\xa3\xb3\xb6\x88\x69\x3d\x07\x5c\x23\x62\x3a\xda\x71\x6e\x0c\x56\x92\xae\x81\x5d\x1f\x94\x3c\x5e\x92\xc6\x49\x86\xbc\xd7\x25\xc4\x03\xa6\xaf\x12\x00\x06\x3c\x40\x59\x90\x5b\x97\x4c\x8e\xe7\x4d\x9a\x07\x67\x7c\x0b\xe9\xe4\x1c\xeb\x90\xba\x30\x09\x94\x14\xa8\xd7\xd0\xe2\xed\xa4\xbb\x4e\x98\xb4\x93\x7c\x52\x03\x52\x1f\xc5\x9a\xa2\x5d\x7b\x2d\xf3\xcd\x91\x92\xbb\xa0\xb3\x9c\x52\x38\x99\x87\xb5\x00\x90\x76\x48\xf2\x55\xdf\x1f\x45\x57\x85\x04\x11\x6e\xb4\x6e\xe1\x80\x4e\xc5\x0b\x1c\xe7\x0a\x39\x55\xa8\x33\xe8\xeb\x02\xe3\x73\x95\x3c\x83\x3e\xf8\x54\xa4\x3b\xa1\x20\xdb\x90\x74\xcc\x89\x53\x39\x06\xc2\xdb\x7d\xe2\x33\xe3\xdb\x4c\xa4\xaf\x0f\x8a\x0e\xff\xc1\x37\xfd\x1c\xbd\xad\x4a\xaa\x57\xd2\x7d\x46\x0a\x5a\xba\x7b\xac\x2c\x85\xfe\x6b\x02\x13\x4c\xc6\x15\xe9\x5c\xde\xa1\xd1\x00\x73\x2f\x07\xad\xae\x01\xa9\x48\xa0\x4e\x0b\xa5\x92\x74\x0e\x1b\xb6\xb0\xe1\x4e\xfb\xa5\x14\x7d\xc8\x82\x69\x15\x38\x3a\x54\x5c\x60\x7a\x82\x12\x6e\x6b\x87\x04\x48\x11\xe2\x0e\x53\x4b\x7e\x08\xa4\xed\x51\x6c\x21\xcb\x0f\x3d\x0e\x0e\x3e\x14\xff\xb5\x52\x6b\xcd\xca\x3a\xc4\xef\x84\xe5\xaa\x87\xd2\x12\xb2\xd2\x46\x66\xc9\xd8\x6f\x3c\x98\x51\x8c\x0e\xc0\xc6\x6c\x7e\x0c\x54\x5c\x07\x03\x66\x02\x3c\xe0\x72\xf0\xf7\xf5\x80\x43\x2a\x7c\x63\xd2\xf3\xa3\xf6\x06\x2c\x20\xf9\x74\x62\x50\xe3\x69\x73\x00\x7a\x63\xd2\x48\x52\x9b\xdd\x94\x97\xf4\x2a\xac\x1d\x51\x99\xe6\x24\x1a\x3d\x12\x0a\x71\x0d\x5f\xa7\xbd\x37\x47\xd9\x9b\xa8\xef\xad\xa8\xdb\xf7\xde\x24\xfe\xf4\xbd\xd7\x9b\xb6\x16\xdf\x7b\xf0\xd7\xe3\x5b\xe5\x86\xfb\xb5\x16\x4e\xdd\x8c\x6f\xe9\x33\xfd\x5f\x97\x81\x97\xe8\x02\x66\x3f\xa5\xca\xe0\x71\x85\x6f\xfe\x79\xad\xdf\xa1\xc7\xb0\x85\x34\xf2\x17\xf1\x45\x3f\xeb\xe7\x3e\x9d\xf0\xfe\xb9\xee\xc4\xaa\xef\xc3\x2e\xc7\x07\x06\x5c\xba\xc8\x4f\x96\x30\xe0\x27\x9d\x1c\x5b\x7d\x48\x1c\xfd\x28\x53\xe9\xe3\x9a\xc4\xd4\x5e\x18\xbb\x5e\xcf\xfb\xc3\xdc\x94\x37\xd3\x56\xdf\x61\x33\xe8\x31\x18\x11\xf6\xc1\x71\x67\x3f\x04\x30\x49\x4e\x21\xd2\x74\xdf\x1e\x0f\xf7\xa8\xfe\xdb\x55\x79\x9f\x5d\x8e\x61\x9a\x53\xe8\x31\x4c\x7d\xeb\x55\xea\x3e\xef\xfa\xf7\xcb\x9b\xb7\x85\x51\xc6\x0e\xef\xca\x80\xdf\x7b\xad\x53\x57\x6d\x7f\x97\xff\x5d\x99\x2a\x05\x7e\x59\xe1\xbb\x6e\x4d\x91\x73\xf9\x23\xc0\xf5\xee\x3a\x01\xf0\x89\x69\x5d\xad\xbb\x33\x0a\xdf\x65\x97\x39\xfb\x9e\x7b\x02\xd2\xdb\x93\xbd\x1d\xac\xd7\xa3\x4a\xcb\x01\x73\x8a\x2d\xcc\xda\xd7\x55\x66\xed\x2b\xf3\x34\xdc\xe8\x9d\xd7\xb0\x30\x4a\xf4\xee\xc4\x54\x20\x2f\x7e\x96\xe8\x85\x26\x79\x0e\x35\x3b\x09\x5d\x85\x30\xac\x9f\xdd\xb1\x5e\x3f\xca\x16\x42\x2a\x01\x50\xb1\x3c\x60\x0c\x73\x8e\xda\xb7\xd2\xd9\x80\x7c\xfc\xd9\xd7\x0f\x85\xaa\x09\x73\x0e\xf5\x05\x8f\x51\xf2\x01\x64\xa7\xd7\x2e\x82\x49\xce\xf2\xed\xfa\x27\xdf\xea\x6d\xd7\x12\x36\xf5\x98\x38\x95\x60\x42\x34\xf7\x5d\x15\xf6\x2d\x15\xab\x37\x9a\xdf\xa2\x88\xed\x3b\x4f\x59\xfa\xa1\x6f\x24\x1e\xf9\x35\x06\xfd\xf7\x6d\x15\x96\xd4\xd0\xdf\x64\xf0\x4b\xaa\x61\xbd\x64\xa0\xef\x4f\xec\xc4\xf4\x23\x97\x86\x48\x60\x7b\x5b\x5d\xe0\x83\x5c\x00\xa6\x49\xbd\x15\x99\x7f\xa7\x42\xc7\x71\x03\x30\xb0\xe5\x1b\x1d\xea\x58\x06\x7e\x6f\x4c\x7a\x8a\x2c\x01\x27\x56\xe7\xf9\xdf\x9f\xf8\x84\x5e\x37\x87\xe0\xaf\x7b\x8d\x15\xfd\xf3\x7a\x61\x1a\xba\x11\xac\x32\x78\x03\xec\x52\xcc\xaf\x6a\x75\xe8\x33\xdb\x98\xe7\xe9\x4b\x21\x97\xf3\x5a\xe5\x4d\x2f\xfd\x99\x57\xb9\x34\xb0\x85\x30\x04\xe9\x93\xaf\xa2\x7c\x55\x03\x89\xcd\xd2\xab\x50\x8d\xe8\x55\xc2\x4b\xe5\xe5\xd2\x21\xae\x34\x81\x6e\xd0\x21\xaa\x9d\x2f\x84\x41\xbd\x0e\xa2\x13\xc9\xbc\x1d\xb3\xf2\x0d\x34\x3a\x2e\xd8\x5b\xef\xdc\x0a\x64\x7d\x51\x84\x21\xef\x2c\x34\xe3\xfe\x12\x3f\x04\xde\x75\xaf\x10\x5b\xa8\x2d\x44\x22\xf6\xd5\x5b\x36\xe8\x39\xe4\x2e\xd8\x90\xdb\xbb\x72\x1e\x51\x52\x0c\x88\x34\xc7\x92\x6d\x78\xbf\x7e\x9b\xee\x43\x3d\x03\xeb\x5b\xa0\xd5\x3c\xc1\xb0\x46\x8a\x29\x24\x1a\xba\xf3\x7d\x52\xed\x8c\x80\x96\xd4\x2e\xd2\x1a\x52\x0e\x2b\x3d\x3b\x5e\x29\x56\x48\xda\x22\x69\x2c\x80\x69\x53\x5a\xcf\x43\xe8\xd3\xef\xc4\xdd\xac\x07\xe1\x6d\xc6\xbd\xeb\x79\x42\x3a\x56\x0e\xf5\x4e\x8f\xab\x8a\xb4\xde\x91\x7a\x71\x89\x98\x7d\x41\x52\x31\x8c\xeb\x93\x62\xa9\x36\xda\xdb\x95\x62\x60\x6d\xae\xdf\x41\x7f\xd2\x61\x53\xef\x95\x31\x64\x1d\xe4\x60\x1f\xa2\x72\x86\x24\xa5\x60\x63\x29\xfd\x6b\x70\x0d\x69\x00\xd6\xd8\x17\xdc\x77\xf3\x3b\xb0\x07\x55\x31\xff\xc5\xdd\x72\xa8\xde\xc3\xc9\x3e\x2b\x5f\xef\xf2\x71\x52\x23\xfa\x1f\x8b\x9b\x8a\x11\x6a\x86\xa0\xb6\x17\x1b\xa8\x9a\xbd\xac\x3e\x5e\x43\x2a\xc2\xde\x6c\xbc\x5b\x27\x97\xf6\x70\x8f\xb3\x82\x9b\xc2\xa3\xb5\x0a\xa9\xcd\xbf\x20\xbc\xc3\x1d\x38\xad\x07\xb2\xb9\xa7\x45\x5d\x52\xe4\xb9\x68\xd9\x7b\x39\x25\xbb\x7f\x96\x57\x39\x10\xf5\x77\xc1\x07\x9f\x9e\xea\xb6\xf0\x98\xc0\xfb\x10\x70\x96\xe2\x9a\x3f\xcf\xee\x8f\x3f\x93\xb3\x27\x8f\xab\xcb\xe7\x34\xd2\xf4\x5f\xe8\x4e\xe8\x18\x33\x02\xcd\xd2\x95\x80\xcf\xbe\xce\x39\xc7\x25\x79\x62\x86\x03\x48\xa2\xf0\x48\x79\x08\x33\x7c\xf5\x63\x61\x1d\xc9\xff\xe5\x37\x27\x8b\xae\x7c\x53\x7e\xc9\xc6\x85\xba\x69\x98\x0f\x1b\x42\x29\x23\x47\x08\xe6\x82\x88\xb3\xbe\xe2\x4b\x0f\x8d\x5d\x8c\xeb\x37\xfb\x72\xd9\x18\x2c\x32\xa8\xae\x4c\x83\xe5\x2b\x5d\xfe\x29\x5f\xd6\xe5\x55\x34\xfc\x52\x0f\xe0\x0a\x95\x57\x98\xb0\x8b\x34\x72\x59\x27\xc8\xbb\x78\x97\x5c\x89\xaf\xa4\xae\xc7\x95\xfc\xeb\x3a\xbf\x9f\x69\xed\x6a\x13\xdf\xda\x55\x28\xaa\x34\x22\xac\xba\x9e\x18\x65\x30\x18\x9e\xd2\x56\xa1\x3b\x5f\xf7\x9d\x64\x63\xfd\x0e\xab\x8f\xf3\x96\xb1\xcb\x79\x37\xa5\x1d\x9c\xe4\x10\x42\xc2\x94\x9c\x5e\xb8\x14\xef\x0c\xde\xbb\x04\x05\xc1\xd6\xdd\xe9\x4e\xda\x83\x42\x94\xa2\x88\xd4\x7b\x3f\x1e\x51\xe9\x9f\xf7\x40\x7d\x85\xcf\x28\xf7\xfd\x89\x26\x34\x70\xdf\x27\x43\xdb\x6d\xf8\xbb\x0e\x05\x39\x31\xee\xb7\x1f\x4d\x98\xcf\xc8\x8d\xe4\x1c\xe3\x63\xa1\x64\x44\x50\x75\x4f\x11\x9a\x2f\x1b\x1b\xd9\xfd\xa3\x1e\xc2\x05\x36\xd7\xbd\x08\x4e\x29\xcc\xf5\x8f\x30\xea\xaf\x21\xf0\x60\xe8\x51\x08\x72\x88\x42\x54\xb0\xcd\xc2\x4f\xdc\x25\x2c\x74\x75\x05\x42\x39\x55\x38\xec\xd9\x2f\x10\x0c\x3b\x67\xff\x2a\xcf\xd0\xaa\xc0\xa0\xb6\xc8\xfc\x1f\x33\xed\xaa\xdc\xac\xfa\xba\xb5\x45\xbe\xbb\x9d\xff\x2d\x61\x85\x2b\x1f\x57\xac\x3b\xf6\xfe\xac\x5d\x0d\x23\x2e\x23\x44\xbb\x69\xfb\x1f\xbe\xe6\x57\x24\xf5\xc0\xef\x47\x02\x1b\x99\x29\xa3\xf6\x9e\x7c\xe2\x7f\xe9\xa6\x12\xad\x21\x5d\x5b\x3a\xe9\x63\x7f\x71\x06\x09\x65\x1c\x1f\xc9\x65\xbc\x75\xc8\xfb\x95\x24\xca\xf5\xcf\x84\xb6\x2d\x6c\x1c\x5e\x19\xa2\x3e\xc0\x85\x86\x39\x9e\xa7\x1a\x69\x03\xe3\xcc\x68\xda\xf9\x7f\x77\x9a\x1a\x09\x5e\xa7\x53\x52\xc3\x9f\x08\xca\x1a\x4f\xaa\x5f\xc0\x72\x60\xd3\x3b\x7d\x74\x59\x86\x23\xf1\x4c\xf5\x55\xc7\x4a\xc1\x93\x7a\x23\x4a\x90\x08\xc9\x5a\x3a\x91\x7e\xda\xf4\xc6\x56\x19\x91\xd7\x9d\xa3\xf9\xd3\xfa\xb4\x94\xc5\x60\x24\xe9\xdc\x31\xff\xe1\xff\xd9\x14\x17\x82\xce\x05\xe7\x04\x5d\xf1\x42\xda\x17\x59\x9c\x6d\x48\x81\xfe\xd4\x2d\xea\x4b\x1e\x45\xa7\x7b\xf3\x6a\xb5\xf5\x8a\x9b\x46\xb4\x86\x17\xde\x8a\x45\xa3\x3c\xb7\xfb\xbf\x42\x91\x22\x44\x1f\x4f\xb0\x1f\x59\xc6\x0a\x58\x63\xee\x1d\x25\xc1\xcd\x9e\x02\x0f\xb3\x4a\x41\xe8\x2c\x91\x2f\x67\x43\xd4\x07\xdb\xdb\xb3\x4c\x22\xe2\xcf\x37\x7e\xcc\x4f\xda\x64\x8e\xc7\x6f\x40\xd2\x24\x33\x0d\x4a\x50\x26\x99\xfd\x17\x59\x32\xbb\x30\x05\x4b\x79\xfb\x43\x9f\xec\x24\x9a\xe2\xb9\xcc\xd1\xf5\x35\xb4\xbe\x24\x93\x81\xd7\x92\x0f\x79\x06\x6b\x80\xf1\x7d\x48\x5a\x8c\xad\x4a\x34\x23\x5d\xda\xc0\x9d\x48\x42\x5f\x65\x86\x00\xaa\x84\x02\xe6\xf0\xdd\x3d\xb3\xf4\x55\x0d\x3c\x92\x7e\x8b\x55\xdb\x92\x67\x41\x55\xcd\xc8\x06\xa0\x17\x87\x0d\xf5\x54\xf1\x59\xfd\x45\x71\x8a\xf0\xb4\xc7\x00\x94\x1b\xaf\x8c\x66\x9e\x90\xcc\x6f\x8b\x10\x8b\xf2\x3c\x43\x12\x83\xda\xb2\x67\x2e\xcc\xaf\x39\xd1\x6d\x56\xf5\x90\xbb\x0b\xf9\xe9\x93\xf3\xe7\x15\xc3\xd0\xae\x89\x7e\xe2\x73\x94\xca\x45\xe5\x2c\x27\x08\x41\x92\x46\x33\xf8\xe4\x80\xc9\x10\xc9\xfb\x90\x65\x95\x67\x15\xe0\x3a\xd5\x30\xda\x7f\xbb\x50\x60\xe0\xa8\xb0\x20\x97\xd1\x11\x33\xab\x0d\xdd\x76\x43\xbb\x5f\x18\x96\x31\x7b\x4c\xd8\x6a\xb8\xfe\x5b\x7d\xfb\x8a\x0d\x5a\x19\x3a\xa4\xcb\x9f\x23\xe8\x61\xd7\x33\xb6\x4f\x3f\xad\x8d\xfa\x8f\x76\x45\xa5\x08\xbd\xa3\x55\x67\x41\x3a\xd4\x0f\x7b\x72\xf6\xfe\x5b\x22\x82\xc9\x6f\x1c\x6f\x63\x7a\x80\xe7\xe8\xf1\x01\x43\x4e\x03\xfd\x8c\x78\x53\x31\x77\x88\xac\xf4\x90\x1d\xc1\xb2\x0e\x14\x13\xe1\x80\x43\x21\xc8\x21\xa8\xa1\xce\xac\xa3\xe1\x87\x08\xd2\xbd\xfd\xa3\xc1\x61\xdd\xe4\x2e\xc3\x81\xd6\xd7\xa1\x55\x26\x12\xfc\x71\x98\x45\x6b\x3d\xd1\x43\xf3\xd2\x6a\x0e\xe9\x66\xdc\xd2\x07\xf1\xe4\x2d\xfe\xb3\x61\x3a\x9a\x8d\x2b\x72\xdc\xbd\xe4\x43\x42\x82\x8b\xe6\xa1\x1c\x6b\x23\xa0\xe8\x91\x9d\xae\x61\x4a\x30\xbd\x58\x51\xbb\xc0\x5a\x5a\x03\x0e\x1d\x82\x68\x39\x6a\x13\xef\x5f\x14\x1e\xc2\xde\x72\x2a\x47\x1b\x3c\x43\x96\xf0\x84\xa6\xfc\x49\x90\xf4\xed\xc1\x14\x9e\x25\x7c\x23\x75\xfd\x6a\x02\x92\x64\x5f\x30\x12\x61\x43\x91\xb2\xc6\xda\x67\x9a\xca\xff\x79\x39\x50\x4d\x12\xba\x1e\x2b\xe9\xee\xed\xf3\xe4\x34\x22\xa6\x43\xd8\x37\x69\xf7\x3e\x52\xa8\x83\x4d\x17\x4a\x3f\x7f\xb3\x9a\x7b\x64\xd6\x6a\xbb\xd9\x4b\xb6\x72\xb8\x3d\x60\xd8\xee\x69\xe5\xe9\xda\xcd\x0f\xd2\x85\x3e\xe8\xca\x6f\xdd\x8a\x93\xe3\x63\x9f\x46\x61\xf3\xd3\x14\x28\x6f\x77\x27\x81\xb4\x5b\x8d\x64\x43\x3d\x28\x69\x0b\xa4\x42\xef\xd4\xf6\xf8\xf6\x26\xd7\x70\x64\x74\x20\xcd\x9c\x5b\x56\x1a\x86\x34\x70\x2d\xd2\x4b\xa9\x7b\xb6\x34\xa5\xb8\xc8\x6a\x63\x07\x8a\x67\xc0\x81\x64\x95\x98\x88\x75\x10\xaa\x34\x43\x4e\x85\x0c\x0d\x49\xee\xa2\x9e\x4a\x77\x04\xda\x31\xd7\x4c\x4d\x90\xa6\x73\xad\xeb\x27\x4e\x2f\xc1\xc8\x56\xff\x32\x77\x2d\xf2\x0a\x6d\x95\x29\xfa\xad\x3e\x25\x60\x42\xa3\x28\xfb\xd2\x6d\x54\xa2\x13\x4e\x55\x35\x92\xc1\xde\x07\x37\x9c\xa2\x94\x28\x09\x69\x40\x10\x25\x11\xdb\xab\xd5\x01\xfd\x5c\xef\x50\x37\x97\xb2\x8f\x7d\x93\x34\x45\xec\x17\x2a\xc6\xef\x72\x77\x11\x20\xc5\x42\xbe\x2a\xd7\xbb\x4a\x0a\x65\x38\xca\x3f\x5a\x57\x2d\x9a\xbc\x16\x5d\x27\x9f\x1f\x57\x57\xae\x05\xc3\x12\x8a\x26\x5e\x4c\xfa\x39\x76\x45\x7a\x94\x86\xfa\xb1\x34\xcb\x61\xdd\x59\xbf\x0d\xb3\xfa\x3d\x36\xa0\xe5\x5e\x9a\xa8\x6a\x99\x3c\xec\xe6\xb4\x0d\xdd\x43\xb9\xa3\x34\xf7\x4a\x51\xda\x32\xc4\x4b\x7e\x59\xc3\x58\x9a\xc9\x6e\x02\xd4\x75\xe7\xf2\x38\xf7\x8f\xf4\x5e\xf2\x2c\x2a\xb0\xfc\xf1\x60\xd0\xa5\x90\x85\x8d\xee\x1e\xcc\x7e\x6c\xa1\xf0\x03\x61\x13\xd2\xda\x9a\x4b\x2d\x37\xa1\x50\x44\xc1\x04\x51\xa5\xe4\x8b\xe3\xc4\x0d\x30\xc9\x5e\x5f\x39\x16\xf6\x8e\x02\xe8\xc7\x51\xd5\x18\xfe\x25\x7e\x37\x5c\x76\x59\x3f\x36\x5d\x4a\x7e\x6c\x52\xe0\x69\xe9\x74\x11\x1d\xf4\x53\x78\xaf\x9c\xc9\x45\xd9\x94\xad\xf0\x00\xdb\x44\x99\x9e\x14\x75\x66\x4b\xe0\x52\xe4\x8d\x5a\x26\x4f\xda\x68\x34\xd5\x19\x90\x90\xe0\x97\xca\x0c\x59\xd0\xb2\xf4\x06\x52\xae\x9e\x7b\x0d\x10\x08\x66\x7f\x12\x81\x66\xce\x6c\xff\x9f\x4f\x13\xb5\x8d\x52\xef\xb6\xe4\x3f\xe1\xff\xd0\x4f\x16\x61\xd2\xb0\xc6\x31\x86\x28\x4c\xa1\x68\x3d\x84\x52\xfc\x02\x0e\x5a\x4c\x5b\x5f\xa0\x70\x46\xfe\xb8\x5d\x54\x82\x5b\x59\x8d\xe4\xaf\xbc\x01\xf1\x12\x0e\x17\xfe\xa9\x44\x24\x18\x41\x2d\x45\xea\x21\x4d\xfe\xb8\x47\x88\x08\x1f\x78\x55\x62\xcf\xf4\x5e\x59\x6a\x30\x79\x54\xce\x63\x1f\x4e\xde\xf7\x43\xd5\x97\xfe\x63\x1d\x9f\x26\x05\x95\xfc\x1f\xf5\x52\x62\xa7\x26\x3d\x8d\x1a\x59\xf2\x47\xa5\xdf\xf0\x01\x03\x8b\x26\xdf\x74\x5f\x50\xc9\x62\x84\xdf\x54\xee\x72\x2a\x5b\xde\xb9\x13\xf4\x89\x6f\x9e\x9d\x04\x91\xc3\xda\x65\x3e\x84\x03\x41\x2e\xcd\x65\x1d\x56\x25\x44\x4c\xe4\x40\xab\xeb\xbb\xac\x32\xc0\x3e\xdc\x9e\x97\x3b\xad\xe0\x00\xc7\xc7\x6b\x4b\x45\x6b\x23\x56\x7a\xe0\x56\x8b\x64\x74\x94\x9f\x9d\xbc\x61\xe4\x46\x50\x58\x06\xf4\xb6\x90\x6c\xb1\xc3\x51\x9c\x65\xe7\x68\xd9\x00\x95\x2d\x01\x3c\xe0\x6f\x40\xb7\xa7\x88\x1b\x71\x78\x96\x28\xff\x07\xc3\x57\x9e\x2c\xfa\x54\x47\xd1\x0f\x2c\x5d\x6b\xa5\x30\x6d\xc5\xf5\x5d\xf8\x5c\x91\x50\xcb\xad\x1e\x1c\x1f\x1e\xf9\xfb\x65\xbe\x04\xd5\x5b\xfe\x95\x6f\x89\x93\x65\xc8\x53\x3a\xb0\xae\x80\x02\x2a\xd6\x55\x56\xbe\x07\x10\xeb\x17\x43\x22\x49\x02\xed\x02\x4c\xa2\x02\x45\x75\x0d\x95\x97\x36\x96\xc7\xcd\xb3\x1f\x52\x57\x81\xf2\xcb\xff\x02\xf0\x65\xb4\xea\x48\x71\x98\x23\x6f\x1c\xb9\x1c\xd9\xab\x66\x2b\xd4\x71\x36\x04\xe5\xd2\xcf\x7f\x52\x8a\xd1\x46\x34\xf0\x05\xc8\x50\x00\xe1\x31\xd6\xb2\xe8\xfc\x7a\xfa\x79\x88\x11\xc2\xa1\x98\xf3\x61\xef\xb3\x22\xfa\x87\xe7\xea\xfb\xf7\x79\x84\x67\x9b\x01\x7c\x8b\x39\x04\x64\xe4\xc4\x6e\x40\xdf\x48\x3a\x95\x2c\x71\x58\xef\x97\xed\x3e\x84\x07\x02\xf4\x35\xca\xbe\x70\xc4\x1d\x1b\x5f\x39\x65\x92\x1c\xe9\x40\x8e\x83\x24\x64\x90\x1a\x7a\x49\x6f\x06\x59\x7a\xbb\xb6\xd9\xa5\x5f\xb3\x4f\xba\x8c\x0d\x4f\x3a\x13\xda\x2e\xb7\x74\x69\x36\x9d\x63\xbd\x4f\xee\xc8\xd4\x90\x43\xd5\xd2\x11\xd4\xf1\x23\xfd\xe3\xd5\x5f\xbf\xd2\x96\xb2\x5a\x2a\x22\x5e\xa8\x7d\x43\x9a\xb3\xc2\xf0\xc1\x5f\x0b\xc4\x26\x9b\xb4\x53\xe0\x0e\xee\x5b\x9e\x19\x5c\x0d\x57\x74\x39\x39\xc0\x82\x9e\xaa\x57\x29\xb5\x6d\x12\xec\xaa\xcd\x9e\x49\x3f\x61\xb3\xaa\xb9\xb0\x8e\xb1\x77\x8c\xf5\xb4\x81\xae\x73\x63\x75\xa6\x78\xec\x15\xc9\xc7\x34\x34\xb1\x0f\x09\x63\x4b\xca\xd8\x48\x38\x66\xab\xf3\xbd\x33\x75\x13\x2d\xc9\xf6\xdf\x4f\x8e\xc6\xdf\x5d\xab\x29\x5e\xc8\xed\xf6\x43\xf1\x1b\xb3\x2f\x37\x12\x91\xeb\xc2\xf7\x0f\xba\x33\xbf\xd9\x85\xea\x33\x10\x14\xa1\x51\x87\xa2\x32\x4d\xb2\xda\x6d\x3c\x25\x10\xf3\xbd\xa9\x17\xc3\xf3\xec\xbf\x51\xb3\xd5\xf5\x95\x93\x09\x7b\xed\x74\x5e\xc4\xa7\xa9\x9c\xb6\xd7\xda\x18\x9f\xdb\xab\x33\x3a\x95\x92\xb1\x87\x71\xd6\xfe\x57\x17\x74\x44\xf6\xf2\x65\xfc\x77\x2f\x6f\x4a\xc4\x78\x25\xbb\xff\x78\xa0\x7b\xd9\xfc\xed\x46\xfb\xa4\x00\xe9\x5e\x34\x7e\x71\x0d\x99\x4a\x30\xb3\xa7\xba\x17\x4f\x70\x00\x42\xda\xf8\x7b\x27\x62\xb7\xd2\x00\x83\xda\x3b\x06\x4c\xfa\x91\xf9\x16\x51\xc4\x90\xe7\x80\xbf\x65\xdd\x6a\xcd\x90\xa6\x86\x76\x75\x34\xf6\xfc\xa7\x8b\xca\x88\x38\xf0\x5f\x1c\x3b\x6a\x41\xa2\x2b\xfb\x8f\x06\xba\xa7\x97\xcf\x4c\xed\xfc\x22\x91\x01\xff\xd1\x3f\x76\xa6\xe3\x40\x78\x66\xe3\x5e\xcc\x6a\xb0\x92\x52\x37\x53\x68\x02\x5b\xe5\x3f\xa6\xd0\xa4\x89\x0a\xc2\xa0\xe2\x19\x86\x64\x5d\x0d\xa9\x9a\x4b\xd4\x4f\xc8\xd4\x50\xc8\xc1\xde\x9e\x77\x4c\xb3\x41\xb4\x46\xbb\x93\x8c\x0e\x77\x9d\x3e\x1a\xb3\x05\xcc\x57\xed\x84\x63\x92\x16\xc7\xed\xcc\x2b\x6a\xe1\x8c\x74\xbf\xd8\x90\x90\xa9\x0e\xf1\x86\x1c\x4c\x8a\xcb\x30\x85\x7b\xab\xed\x95\xb8\x02\xd2\x42\xfe\x1e\x6e\x54\x98\x85\xbb\xf8\xc9\x32\xb4\x36\xaa\xbc\xe0\x91\x7c\x21\xe7\x86\x2d\xb4\x10\x00\x42\xab\x9d\x52\x38\xfa\xb7\x4c\x4e\x37\xbc\x69\xa2\x4a\xb9\xe7\xca\x5f\x0d\xf4\xa4\x48\x4c\xdd\xaf\xf8\x6f\x24\xe5\x1a\xec\x57\xbe\xd3\x0e\x62\x43\x1e\x6a\x12\xd2\x39\x66\x6e\x91\x67\x26\xa1\x39\x92\x72\x45\x9d\xc0\x29\x8a\xcb\x9a\x12\xde\x06\x62\x26\x8d\x6f\x95\xae\xe5\x5b\xf9\xa4\x59\xd2\x14\x98\x69\xf2\x6d\x4a\xd4\xaa\x6e\x28\x77\x12\xa8\x12\xde\xca\xa6\x35\x9a\xec\x34\x30\xb4\xd0\xeb\x89\x2c\x83\xad\xc0\x90\xfd\x4f\xdf\x83\x2d\x25\xef\xbb\x6d\x59\xbd\xaa\x2d\x5f\xa1\x37\x13\x52\x8c\x86\xa8\x27\x64\xdd\x18\x65\xb9\x00\xfe\x4b\xa7\xb3\x65\x0c\x12\x78\xb7\xb2\xab\x8f\x52\xd0\xa6\x9d\xec\xb8\x6d\x9a\x33\xdf\xa4\xc6\xb0\x41\x5c\x8b\x2b\x56\xc6\xb1\xb6\xb0\xe2\x32\xf0\xe0\x95\x94\xd0\x70\xd8\xf2\x13\x55\x1b\x77\x7c\x86\xc2\x4c\x8e\x8e\xf5\x96\xbb\x46\x8e\x2a\x4b\x7b\xe3\x18\x8a\xdf\xa2\xa7\xbd\xe5\x98\x3e\xdf\xfa\xfe\x4a\xc6\xd8\x6c\xc4\xc3\x29\xdf\x6d\x3c\x22\xe2\xb4\x45\xb6\x2c\xf4\x6f\xac\xca\x3c\x4e\xe1\xa2\x09\x9a\x6d\x7c\xba\xb8\x82\xf4\x69\x9e\x5d\x29\x64\x7c\x32\x8b\x79\x1b\x11\xb8\xf1\x1f\x3f\x7a\x5a\xf5\x46\xd8\x34\x29\xe6\x70\x5c\x42\xfc\xc6\x7a\x09\x64\x94\x41\xcb\xa6\x66\x01\x77\xb3\x8a\xb5\xe9\xb1\xc6\xd6\x3f\x0d\x9f\x91\x86\x9e\xdb\x98\xee\x95\x1f\xce\x88\x77\xca\x0f\x90\xac\x47\x33\x52\xb6\xe4\x73\xdd\xc3\x40\xc9\x15\x8d\x89\x0c\xf0\xd1\xdb\xb0\xa0\xeb\x2b\x19\x9e\x25\xe1\x72\x5d\x1a\x34\x39\x4c\xda\xd4\xba\x10\xa1\xd2\x82\x89\x65\xeb\xf0\xbf\xba\xee\xcd\x15\x93\xa6\x1b\x4c\xb6\x46\x49\xd6\xc4\x8b\x05\x3f\xc1\x96\xf6\x90\xb7\xb1\xce\xd0\x57\x50\xfb\xb8\xe1\x56\x0a\xe1\x9a\x7f\x8f\xd6\x9a\xae\x09\xea\xb7\x3a\x40\x0d\xf1\x1c\x10\xf0\x84\xe8\x5d\x08\xb1\x9b\x8b\x97\x64\x3d\x75\xa9\xde\x64\x0e\x55\xa5\x6f\x23\x35\x9b\x36\xf0\x43\x87\x6d\x4a\xc8\xdf\x24\xf2\xb6\xb7\xc4\xfe\xbf\xd5\x34\x76\x76\x65\x03\xba\xec\xcd\x19\x80\xe5\xb3\xeb\xe0\x3c\x7b\x30\xde\xbd\xba\x75\xa1\x9b\xee\xd6\x5a\x25\xa6\x63\x77\x9a\xdd\xc5\x2d\xf5\x1f\xf8\x37\xce\x8d\x69\x2f\x1b\x06\x54\xbc\x80\xdf\x97\x9d\x9c\xd2\xea\xc8\x67\xfa\x79\x09\x93\x8e\x3c\x7a\x04\x8b\xb0\x54\xd6\xcc\x88\xb7\x3f\x93\x50\xa8\xee\x20\x9d\x99\xeb\x68\x63\xb8\x7e\x86\x90\x25\xb9\x5f\x01\xd8\xf5\x47\xd9\x68\x9e\xb6\xde\x87\xff\xf8\xf5\x5e\xc2\x83\x1e\x32\x36\x6f\x7f\xed\x56\xab\xd4\x3b\xcb\x00\x02\xd7\xa3\xe4\x83\xd6\x6a\x4d\x3b\x14\x61\xaf\xff\xa4\x64\x23\x25\x9c\xea\xc2\xe3\x8e\x34\x85\x04\x5e\xb8\x54\x40\xa0\x5a\xc3\xbd\x77\x89\xd7\x6c\x4a\x0b\x5e\xad\x33\x92\x39\x52\x58\x6b\x59\xf8\xcf\xb9\xd1\xd1\xdf\x90\xf6\x7f\x5c\xa4\x79\x20\x32\xbc\x4a\xca\xc6\x86\xd3\x03\x4f\xdd\x7b\x8c\x48\xba\xf2\x12\xcc\x7e\x29\xcf\x5c\xaa\x4d\x56\x8d\xc8\xdd\xf1\x99\xff\xda\x25\x41\xb3\x4f\x0a\x45\x5a\x77\xf8\x56\x2e\xad\x55\x2a\xb7\x5f\x20\xa4\x51\xff\xa3\xf4\x8c\x5e\x03\x6b\x1c\x7d\x08\x88\x92\xff\xda\x79\x73\xad\x12\x49\xfc\x47\x99\x22\x4c\x6c\x58\xc2\xa9\x2b\x78\xa2\x5e\x4a\x52\x25\x3f\xa4\x80\x92\xb3\xd7\x10\x6b\x9e\xa8\xd4\x92\x9d\xdb\x40\x94\x79\x11\x91\xe5\x06\x79\x19\x7e\x2b\x86\x46\xfd\x5a\xeb\x5b\xac\x92\x8a\x79\x15\x16\x50\x68\x4c\x01\xbd\x94\xaa\x0c\x9d\x89\x56\xf6\x1d\x56\xcf\x47\x67\xea\xd4\x9a\x8e\x42\x89\x55\x43\x1a\x7b\xc1\x61\xb3\x2b\x15\x21\x1d\xed\xc5\x20\x48\x77\xde\x74\x59\x18\x75\x6f\x81\x95\xba\x0a\xe8\x66\xca\x8e\xbd\x26\xda\x79\xbc\xc8\x1a\xc7\x00\x53\x8d\x0e\x24\x61\x78\x8b\xed\x42\x78\x63\x53\x88\xce\xd0\xa2\x13\x35\xe4\xad\xdd\x1e\x48\x61\x25\xf2\x2e\x9b\xc3\x89\x07\x1f\xab\x74\x69\xc6\x78\x2b\x91\x5b\x26\xd1\x1b\x4f\x09\xe1\x9e\x83\x57\xa8\xcb\x3b\x9d\xec\x6a\x2e\x37\x9b\xcd\xe5\x0e\x69\xb8\xe5\x3e\xe7\xbd\x08\x51\x1b\xe3\x6e\x97\xd2\x93\x97\x3b\x04\x3d\xac\x6f\x14\xd2\x19\xf7\xb2\xfe\x24\x69\x38\x19\xba\xdc\x63\xcf\x65\x87\xe6\x8c\x74\x61\xee\x18\x24\x2f\x8d\x51\x10\x2b\x45\x67\x5f\x5a\x6c\x54\x42\x66\xc4\x23\x65\x0e\x42\x48\xd6\x65\x64\xbc\x4c\xd7\x57\x07\x22\xa1\x67\x91\xe2\xa0\x95\xa1\x2e\x53\xbf\xf7\xc0\x92\xb2\x23\x95\x13\xc1\x0b\xd2\x42\x98\xd1\xba\xd4\x48\x8f\x36\xe4\xfd\x9d\xa5\x62\xba\x83\x80\xea\x31\x27\xa5\x6d\xea\xe1\x1f\x28\x4a\xee\xe9\x73\xb4\x4b\x85\x90\x8c\x22\x07\x0b\x94\x07\x43\x44\x46\xb9\x39\x4b\x9d\xd7\x18\x54\x2d\x95\x44\x9f\xc5\x06\x04\x07\xcb\x68\xca\x0d\x72\xc2\xdf\x65\x65\xd6\x8e\x7a\x54\x62\x81\x36\x61\xcb\x9e\x3e\xbe\xec\x59\x16\x71\xcb\x4a\x2f\xf2\x05\xbe\x9b\x7a\x7a\x6b\x58\x30\x59\x65\xf1\x0e\xf5\x1d\xe4\x40\xfb\x53\xf3\x89\x40\xdf\x05\x55\x19\xc5\x6f\x4a\x8b\x79\x69\x83\x6e\xa3\xac\xbe\xa8\x2d\x8e\x8b\xa7\xd1\x2d\xe5\x50\x3d\x09\x62\x9f\xba\x53\x06\x8b\x34\x69\xd6\x10\xa7\x09\xc9\x7c\xab\xd9\xa8\xe1\xc3\xd9\xef\x85\x7d\x36\x1b\x4b\x85\xa4\x8e\xa4\xaf\x16\x17\xe8\xf0\xca\x7c\xc9\xf1\xcb\xa1\x12\xed\x33\xe9\x50\x9c\x91\x16\x4e\x8e\xd7\x44\xf7\x20\x2b\x95\x6a\xc9\x99\x9c\x24\x03\x13\x35\x69\xd4\x00\x2c\x99\x4a\x98\x56\xb6\x41\x22\x35\xef\xf4\xbc\xe9\x46\xb3\xa4\xfb\xb8\x59\x32\xfa\xb0\x24\xcc\x2e\xa9\xe7\xbf\xa4\xfe\x62\xa0\x0d\x3f\x09\x8e\x2a\xe1\x36\x60\xea\xca\x20\x65\xc7\xc1\x2e\x49\x75\x88\xd0\x34\x29\xca\x6c\x87\x56\xe9\x9e\x80\xe8\xfb\x26\x48\x0f\x6f\xa6\x97\x14\x52\x3c\xc9\x35\x78\x04\x2b\x0b\xb9\x57\xfd\xdd\x5d\x31\xee\xef\x5e\x43\x5b\xe6\x2b\xf7\xee\xbf\x10\xe8\xfe\xab\xf6\xd9\xb2\xf2\x30\xe8\xed\xdc\x9f\x35\xd0\x3e\x68\xfd\xcb\x6f\x29\xd8\xa0\x83\x29\xe0\x2f\xf2\x5f\x6e\x2e\xe8\xf0\x17\xea\xeb\x7f\xa9\xc9\x19\xca\x5a\x6d\x3a\xfa\xa1\xf9\xe6\x8c\xfa\x5f\xf7\x4e\xfb\x4b\x9b\x84\x65\x5c\x08\xfa\xcf\x2a\x20\xfc\x20\x8c\xbd\xf1\xef\x72\x85\xba\x0c\x66\xec\x38\xc7\x50\xaa\x68\x85\xe8\xac\x9c\x23\xee\xbe\xfd\xc0\xe6\x17\x58\x42\x6a\xa7\x6c\xcd\x7f\x4d\xd9\xd8\xf5\x2b\xf6\x9c\x43\xee\xbe\xcc\x5e\xd3\x97\xa7\xe7\x22\x15\x3d\xab\xf9\xfe\x84\x24\x8c\x7d\x30\x5e\xae\x57\xe8\xad\x60\x3e\x26\x74\x62\x9e\xae\xa4\x71\x67\x8f\x93\xa0\x94\x4c\xc8\x59\xbc\xe5\x9a\x63\x86\x7b\xae\x2f\xca\x70\x60\xfa\xbc\x53\xdb\x6c\x09\xb5\xf5\x4e\x28\x55\xb4\xd9\xd5\xc9\xb8\x2e\x3e\x8b\xb9\x9e\x21\x09\xa3\xd4\x3b\x64\xfe\x94\x45\x80\xf2\x23\x95\x59\xca\x73\xdd\x3f\xf7\x76\x20\x81\x87\x4b\xf9\xa3\x8b\x92\xbc\xec\x8c\xac\xa5\xa6\x17\x61\xae\xeb\xff\x27\x30\x53\xff\x11\x98\x41\x0d\x2c\x06\x0d\x96\x24\xf9\x4d\x48\x20\xcd\x58\xb8\x6c\x31\x54\x3b\xd7\x45\x17\x13\x1a\x16\x73\xb5\x0f\xd4\x75\x47\xea\xd4\x65\xa8\x0c\x3f\xe3\xa2\x26\x89\x25\xcc\xf6\x62\xd9\xaf\x0c\xe5\x1d\xcc\xab\x84\x39\xd7\x8c\xd9\x14\xde\x3e\xa6\xfb\xcf\x7b\xa2\xd6\x8c\x02\x6a\xb3\x57\x32\x44\xbb\x77\x9d\x3c\xe3\xd0\x16\x16\x16\xfe\x53\x51\x93\xf0\x78\x25\xb2\xeb\x5c\xc8\x5a\xc1\x7e\x1b\xe0\x5f\xec\xf9\x41\xc2\xc2\x3b\x58\xb3\x67\xb4\x6a\x6f\x0c\x42\x9e\x43\x50\x20\x6d\xe0\xee\x57\x02\x45\xdd\x90\xb7\x41\xfa\x27\xbd\x18\xfc\x9e\x41\x04\xb5\x4a\xee\x26\x54\x6b\x9a\x0d\x05\xd3\x7f\x21\x74\xe3\xbf\x1b\xdd\x1d\x8d\x57\x67\x5a\x85\x9d\x21\x71\x23\x2f\x33\x88\xf7\xb3\xf8\x47\xe8\xe6\x52\x0e\x9b\x0b\xe0\x64\x81\x22\x5e\x37\x30\x1f\x44\x8e\x4c\x64\x20\x48\xf3\x12\xea\xb8\xe5\x8a\x96\xc4\xb0\xae\xba\xec\x3a\x1c\x34\x65\x63\xbb\x3d\x49\xf6\x00\xa2\xce\x67\xd5\x61\xca\xa0\xcb\xa6\x8d\x8a\x95\x12\x94\xc9\x0f\xfd\xc3\x6a\xd2\x95\x19\x9a\x73\x9e\x7e\xa9\x70\xf6\x5a\x8d\x15\xe1\x5b\xc8\xe8\xe4\xf0\x87\xb6\x85\xac\x48\x27\x54\x75\xd8\x9f\x81\x88\xce\x2e\xb1\x43\x7b\x23\xe2\x11\xb9\x26\x9f\x64\x71\xd8\xa5\x44\x6b\xa2\xbc\x68\xc0\x78\x6e\xa9\x75\x39\x1c\x49\x43\xb9\x2b\x64\x4b\x21\x9b\x83\x97\xfc\x4d\x14\x02\x39\x8b\xce\xb1\x4b\xa4\x7e\x46\x87\x98\x97\xbb\x85\x68\x12\x26\x77\xd4\x46\xd9\x80\x21\x04\xa9\xd0\x23\x3b\xa5\xdb\xb3\x4a\xd6\x10\x48\xda\x3f\x25\x1e\x9a\xf5\xd7\x78\xf6\x12\x2a\x3f\x22\xa6\x1a\x58\xc3\xa3\x75\xf6\x71\x8c\xcb\x7f\xdc\x5f\xff\xd2\x9e\x51\x31\x3d\xc3\x00\xd8\x80\xb2\xc2\x9f\x77\x93\xa0\x92\x21\xaf\x88\x9f\x37\xf8\xb6\x14\xfa\xb8\x83\xa6\x6c\x88\x63\xf7\x27\x66\x79\xaa\x34\x64\xee\x4c\x56\x36\x80\x94\xb5\x3d\x1e\x2b\x15\x9d\x29\x15\xf2\xcb\xa1\x9b\x93\x6d\xa0\xe2\xb5\xe2\xd3\x13\x2c\xbc\x99\xb7\x3a\x97\xab\x9c\xc3\x47\x35\x14\x48\xe5\x50\x90\x06\x48\xd5\xe7\x93\x69\xd6\x56\x4c\x5d\x37\x47\xd7\xd3\xca\x97\x13\x52\xcf\xe6\x99\xe2\x44\xa7\x9c\xe7\x00\x67\x51\x7a\x9e\x98\x0e\x7d\xf2\x7c\x25\xd2\xf6\x01\x79\xb6\xf2\x78\x48\x3f\x07\x5a\x3b\xba\x04\x70\xa6\xb8\x12\x2e\x7d\x54\x76\x69\xb1\xc6\xf3\xbf\xb3\x14\x7e\x72\x5a\x19\x27\x74\xfd\x1d\xde\x6e\xa4\x4a\x7d\x05\x22\x8d\x0d\x38\x44\x6d\x42\x70\x06\x72\x82\x14\xe3\x69\xd2\xea\xa9\x2d\xc4\x78\xea\xc4\x19\x1b\x20\x74\xe7\x79\xe3\xe0\x8f\xb1\x71\x22\xe2\x59\xad\xfe\x65\xb9\x4a\x38\x1a\x30\xd1\x14\x1a\x1a\x3a\x6f\xd7\x83\x73\x6d\x9d\x81\x56\x00\x8e\x55\xb6\xd8\x67\xee\xe7\x9c\xb9\x3d\x18\xbd\x7c\x28\x15\x02\x7d\xd2\xd5\xe1\xe4\xc8\x13\x74\x86\x99\xc7\x62\x95\xf4\x5c\xef\xcd\x69\x74\x48\x78\x57\x27\xd0\x20\xf7\x5a\xbb\xb2\xde\x13\x3a\x97\x81\x38\xd6\x7a\xfe\xe5\x35\xc1\x88\xfc\x59\x5e\x6c\xd8\x9e\x85\x5a\xeb\x56\x32\x9e\x60\xe0\x99\xa5\x85\x54\x20\x24\xaf\x20\xc1\xb3\xac\x47\xba\x98\xfa\x01\x6d\x9e\x12\xfb\x6b\xd6\xc0\x00\xe3\xe7\x90\x79\xbf\x2f\x81\xd0\x20\x2a\xea\xa1\xe1\x70\xf1\x4e\xbb\x86\xdc\x25\xfd\x1f\xfb\x5c\x35\xaa\x85\xdb\xad\x6e\x4f\xbe\xa9\x6e\x82\xf9\x8c\x12\x80\xaf\x20\x45\x1b\x3c\x23\x91\x97\x87\x26\x93\x9b\x34\x8d\x3d\x9e\xee\x44\xc0\xcd\xba\xda\x9c\x4b\xfd\xf8\x65\x22\x6d\x6a\x27\x58\x63\x33\xf5\x30\x9f\x79\x9a\xfa\xf6\xa9\x49\x5d\x48\x9f\x8d\x0d\x3f\x43\xee\x28\x7d\xba\xde\x52\xba\xf9\xce\xa5\x9b\x29\x3c\x4f\xe7\xc2\x39\x08\x0a\xcb\x13\xb3\x4d\xdc\xcf\x7a\xc0\xac\x01\x92\xb8\x07\xd0\x06\xea\xca\x43\x76\xc6\x87\xf4\xb3\x52\x28\x06\xed\xac\xb5\x40\x56\x0e\xe5\x1f\x48\x1f\x9f\x44\x03\x8f\xb1\xd7\x3b\x04\x85\xfa\x47\x60\xdf\x00\x8d\x48\x9e\x98\x37\xe2\xd1\x56\x09\x34\x3f\xe1\x44\xa8\x73\xae\x6b\x4c\x52\x40\x74\x28\x44\xb3\x0c\x6e\x01\x16\x96\x39\x31\xf0\xf3\x4c\xf4\xe2\x42\x29\x76\x1f\x14\x43\xa2\x64\x88\xe9\x89\x24\x1c\x6e\x95\xe1\xc5\xc5\x7b\x35\xd9\x90\x1b\x57\xfc\xb8\xe3\x5d\x7d\xdc\xed\x5e\xe8\xe5\xfa\xb8\xdd\xb8\xb2\x33\x2c\x01\xce\xdf\x64\xed\xa3\x7d\x1a\x83\xae\x8f\x56\x39\x58\x7c\xf4\x5b\xe7\xfa\x18\x52\x3c\xf2\x9c\x8b\x99\x6a\xf0\x58\x5a\xfb\x16\x13\x77\xb2\x8e\x17\xdb\x5b\x87\x52\x44\xca\xc8\xed\xed\xa7\xca\x7d\xb5\x7f\xbc\x1c\x51\x3d\x5a\x96\x52\x92\x12\xf5\x3d\x51\xd3\xfb\xb4\xae\x93\x54\x76\x21\x0f\x5a\x3e\x64\x27\xff\x68\xdd\x8c\xf9\x51\x6f\xd9\x8d\x3f\xaa\x44\x42\x1f\x10\x0b\xbf\xb8\x4a\x2a\x2b\x0f\x18\x8b\xf8\x74\x29\x50\x11\x8f\xfd\x21\x05\x9f\x07\x33\x28\x1f\x21\x1f\xf1\x50\x00\xfa\xb1\x4a\xcd\x0f\x92\x4b\x3c\x0e\x34\x83\xb5\xf5\x8a\xf0\x07\xaf\x6f\x4d\x5b\xec\x99\xc4\x80\x7b\xb8\x3d\x1c\xab\xf3\x07\xd4\x8e\xf9\xde\x3e\xca\x4f\x15\xeb\xe1\x36\xb0\x6f\xa1\xdf\x5a\xf0\x4f\x08\x35\x27\xf7\x08\x62\x07\x3a\xf2\xb2\x36\x7a\xe0\xcd\xf1\x53\xa6\x17\x15\xaa\xec\xab\x19\xd5\xed\x7d\xa0\x63\xbb\x87\xca\x12\x3b\x2e\x8f\x14\x8d\xa7\x21\xee\xd8\x38\xf7\xf3\xf0\x20\x6a\x26\xda\xa3\x87\x6c\x58\xc3\xda\x07\x1a\x70\xaf\xcd\x1f\xf1\xb5\x3e\x52\xf8\x4a\x50\x6a\x89\xca\x4d\xc3\x2d\x99\xa6\x61\xf0\x50\x41\xfe\xac\x65\xf4\x8b\xc4\xb0\xc9\xbb\x3d\xc8\x93\x58\x29\xa5\x84\xe9\xef\xd1\x1e\x1d\x97\xc6\xf4\xc7\xe4\xcc\xfc\x7f\xb7\xff\x9e\xfc\x4f\x94\x35\xef\x55\x19\xf1\x36\xb8\xa6\x3c\xd2\xfe\x8c\xf4\x41\xaf\x84\x24\x9a\xa4\x7c\xce\x10\x74\xce\xc1\xaf\xce\x70\x01\xa0\x38\x13\x2a\xa8\xae\xb5\xc4\xe7\x6c\x20\x04\x66\xf3\xda\xc5\x93\x30\xf6\x1a\x18\x2f\x89\x80\x41\x2e\xf4\x04\xf3\xf8\x3b\x32\xfd\x95\x0e\x65\x83\x6a\x9e\xef\x97\x45\x85\x88\x91\x4b\x82\x02\x0f\xe1\xa8\xab\xf4\xeb\xe9\x5b\x6e\xaa\x29\x2d\xb3\x92\x8c\x27\xbb\xef\xd2\x3b\xf2\x6b\x9a\xee\x83\x92\x46\x37\x1b\x63\xc8\x28\xcd\x12\x3f\xda\x11\x61\xe7\x46\x7b\xd4\x48\x93\x67\xe0\xeb\x23\x9c\xee\xb8\x8c\xe9\xb6\x81\xa5\x04\x99\xe4\x09\x03\x90\x4f\xe9\xab\x4d\x8a\xc5\x4f\xf7\x10\xc2\x4b\xf1\xfb\x20\xaa\x44\x25\x1d\xeb\xe7\x80\xe4\xed\x97\x13\x92\x03\x93\x58\xd3\x53\xcb\xff\xa3\x0e\x4d\xd3\xc7\x02\xd0\xf7\xcc\x92\x5c\x9d\x1a\x4d\x49\x1c\x5b\x0f\xc2\x2f\xa4\x4a\x14\x7d\xaa\xa7\x02\x53\x86\xb4\xa6\x69\x64\x31\xd5\x26\xa5\x26\x1f\x74\x4d\x75\xcb\x21\xcb\xb4\xf6\x92\x35\x8b\xa1\xc1\x5f\x53\x00\xed\x34\x34\x9a\x29\x01\x25\x45\x6f\x27\x04\x92\x7a\xdc\x64\xb2\x1e\x1e\x85\x76\x4a\x3d\x27\xf2\x4a\xd1\x60\xe9\xed\x9a\x22\xc1\xd6\x40\x50\x2c\x09\x5d\x62\xc7\x1d\x7f\xa5\x70\x6a\x0b\x89\x29\xed\x13\xcc\xb6\xb8\x37\x99\x03\x0e\xa1\x55\x10\x48\xff\x4d\x9b\x04\xf7\x11\x3d\x67\x20\x16\x06\xb9\xfa\xe9\xf9\x15\x52\x46\xaf\xd0\x61\x31\x48\xf6\xca\x94\x7f\x89\x34\x93\xcf\xb5\x65\xb6\xef\x93\x93\xed\x36\x21\xed\x2f\x0f\x79\x57\x66\x93\xc9\xe9\x84\xcc\x09\xae\xd4\x68\xda\x00\xa2\xdf\x45\x82\x4d\x11\x92\x31\x74\xb9\x34\x0f\xe5\x9a\xf6\x50\xa9\xca\xdb\x39\x4b\x0b\x49\x12\x00\x10\x79\x0a\x51\x27\xf7\xb0\xd5\x76\x12\xa4\x5a\xbf\x3a\xde\xca\xec\x33\x74\xae\xd9\xda\x06\x75\xdb\xca\xce\x0e\x84\xa6\xd3\x2e\xe5\x2a\x43\x63\xa0\xb9\x7e\x78\x7d\x8b\x0e\x5f\x62\x24\x34\x85\x8a\xd4\x2c\x09\x69\x57\x7b\xd2\xed\x7d\x84\x5e\xfa\x24\x3b\x59\x2b\xfd\xa1\xe5\x31\x02\x0e\x06\x43\xe6\x0a\x84\x07\xfe\x12\x4f\x73\xba\x28\xac\xc4\x0f\x13\x39\x2b\x8c\x1b\x4c\xd1\x27\xa4\x32\x4e\xa4\xac\x4d\x50\xaf\x14\xcf\x74\x42\xcb\xa5\x2f\x9c\xa6\xd1\x56\x78\x57\x93\x5b\xc6\x20\xc9\x5e\x3c\x69\x2b\x1b\x9a\x98\x1f\x04\xc4\x07\x93\x46\xd1\x95\xc7\xcf\xa1\x00\xf2\x78\x87\x81\xed\x78\x37\x8a\x3b\x49\x6c\x6a\x24\x3f\xbc\xcf\x01\xf5\x99\xc6\xf1\x9e\xa2\x72\x1f\x25\x98\x36\xb6\x8f\x3e\x4a\x43\x3d\x4d\x16\x92\x4d\x9b\x34\x99\xec\x28\xab\xd4\x9b\xac\xb1\x9c\x25\x07\x37\xa2\xbb\xe0\xa5\x8d\x1a\x35\x79\x36\x36\x58\x2e\x0b\x8c\x02\x29\x14\x9f\x52\xc8\x1d\xbd\xa5\x93\xf4\x9e\xd8\xfa\x8c\xf5\x8e\x60\xe1\x58\xaf\x7f\x64\x99\x10\x61\xe8\xd8\x87\x37\x0e\xc3\x13\xd2\x90\xac\x9b\x0d\x1d\xb5\x0b\x32\x85\x46\x11\x90\x34\x91\xda\x44\x8d\x9f\xda\x49\x5b\x50\x60\x8a\xff\xee\x98\xde\x21\xca\x21\xb1\xb4\x91\xa9\x30\x86\x66\x31\x45\x97\xfc\x1b\x32\x38\x89\xf9\x69\x10\xf2\x46\xd2\x50\x42\x36\xdd\x75\x57\x09\x50\xad\x49\x09\x86\xc8\xe6\x87\x90\x1a\xd5\xaa\x6a\x30\x9b\x80\xa2\x5f\x39\x56\x37\x35\x10\x62\x61\x6f\x19\xb2\x90\xfd\xba\xad\x97\x40\x75\x23\xc8\x22\x7c\x04\x28\xae\xb5\x6a\x8a\xdf\xd5\x97\x74\x63\xd6\x2c\xf9\x20\xc8\x30\x85\x5b\xc2\x88\x09\x9e\x9d\x20\x3e\x31\xcc\x1f\x3f\x78\xc0\x42\xa7\xc4\xd1\x87\x40\x44\x47\x75\x9f\x0e\xc7\x08\x3c\x53\xa7\x29\xff\x79\x6f\x17\xf2\x08\x4a\xc2\x32\x38\xce\x4a\x58\x06\x4f\xe1\x11\x7b\xcd\x98\xaf\xd0\xd3\x70\x59\x27\x25\xee\xba\x6b\x26\x58\x33\x92\x75\x2a\x21\x71\xe4\xb1\x2a\xff\x35\xd6\xb0\xde\xdc\x34\x53\x4a\xd8\x80\x0d\x88\x1d\xa0\xbf\xa3\xed\xd3\x4b\xf5\xb0\x41\x09\x55\x23\x63\xdf\x09\x7b\x94\x83\x0a\xef\x7e\x97\x70\xaa\x5e\x73\x3a\xd4\x4e\x6b\x79\x68\xa5\x74\x0f\xc6\x6c\xad\x92\xd7\x4e\x23\x3b\xa6\x63\x0c\x8b\x01\xf6\x89\xdf\x2d\xf2\x67\x98\x8d\x3c\xfa\x9c\x37\x32\x98\xb4\x80\xfc\x3a\x36\xa9\x5a\x92\x42\x13\x48\x52\x7c\x3c\x49\xf2\x5c\xce\xa0\x3e\x85\xf4\x80\x5d\xf0\x89\x20\x64\xa2\x10\xe8\x91\x3a\x18\x6a\x9e\x53\x00\xbc\x62\xef\x1b\xf9\xc2\x53\x80\x19\x2b\x30\xbb\x29\x91\xac\xe8\xce\x37\x3a\x7a\x5b\x69\xd8\x6e\x48\x83\x49\x77\x76\x7c\x25\x21\x89\xc0\xf9\x8c\x38\x81\x87\x69\x47\xc8\xd4\x49\x93\x49\x3c\x20\x03\xf1\x7e\xa7\xa8\x9b\x30\x04\x95\xa6\x15\xcc\xbb\x79\xa2\x35\x5c\x67\x47\xb7\x7d\x11\x90\x6a\x96\xcf\xe9\x75\x24\xc3\x7e\x88\x47\xfd\x4f\xa5\xb6\xd7\xb7\x8f\xa9\xca\x16\xd2\x58\x6b\xd8\xb4\x8c\x29\x8c\xe6\x0d\x31\xb3\x15\x80\x5f\x19\x84\x11\x36\xfe\xe8\xc1\xa5\x96\x43\x94\xe5\xbe\x12\xe5\xa1\x40\x58\xcb\xbb\x44\xa0\x4e\xca\x3b\x59\xed\xaf\x36\x01\x12\x51\x92\x91\xb9\xdb\xcc\xde\x1b\x6d\xc5\xa8\x10\xa5\x83\x35\xf2\x0f\xa0\x1d\x25\x1d\xa2\x7b\xd5\x9a\xbc\xee\xd2\x8c\x0a\x19\x92\x3b\x36\xa6\x08\xd0\x8d\x2e\xc2\x2e\x95\xa5\x0f\x89\x7a\xd6\x6d\xb9\x42\x49\xe9\xd6\x8a\x8d\x99\xe2\x03\x23\xfe\x56\xc4\xff\x95\xcd\x35\xb4\xfa\x51\xec\x10\x90\xc5\x1b\x7a\xe2\x94\x67\xaa\x78\xb7\x78\x84\x8a\x3c\xbe\x27\x8f\xab\x88\x94\x74\xa3\xba\x74\x4b\xf3\x4c\x3e\xc2\x22\xdd\xad\x16\x31\xe8\xa1\x95\x3d\xe4\x9f\xf8\x00\x5d\xf0\x29\x54\xa3\xd2\xfe\xe9\x68\xe2\x7f\xd3\x26\x41\x1d\x08\x38\x4b\x7b\xa6\xf5\x29\xdd\xa1\x7e\x43\xec\xe8\xc3\x9e\xb6\x75\xe4\xc0\xf2\xf5\x38\xdb\xe0\xec\x39\x02\xde\xb9\x8a\xc6\x2f\x76\x95\x8b\x22\x80\xa4\x6d\x07\xb4\xcf\x7a\x7e\x08\x90\x40\x4a\x99\x7a\x31\xd5\x6a\x55\xff\xfa\x5c\xc5\x09\xaa\xa3\x3c\x81\xc4\x77\x6a\x5b\xe4\x1f\x63\x10\xce\x56\xfa\x2f\x42\x66\x59\xa7\x6e\x12\x8f\xaa\x57\xe8\x3b\x39\x61\x5f\xff\xdd\xd9\x24\x0d\x35\xb2\x91\x91\xa3\xcf\x63\x97\x6b\xe7\x70\xc3\x20\x65\x71\x60\xad\x71\x05\x08\xcf\x5a\x5b\x90\xd0\x52\xcd\x21\x21\x95\x15\x1c\x87\x43\x05\xb5\x9b\x90\x54\x21\x59\xa9\xa1\x9b\x1e\x0c\x60\x65\xa0\x5c\xef\xdc\x2f\x18\x1e\x3f\xf6\xb4\x26\x4a\xb3\xdb\x22\x5f\x87\xd5\x3a\x9c\xbc\x9a\xd5\x27\x2c\x89\x24\x08\x95\x4e\x69\xf4\xac\xc9\x05\x85\xb3\x52\xf0\x87\x55\x1a\x05\x00\x3f\x8e\xab\x2f\xe9\x25\x5c\xd3\x1a\xb2\x52\xc8\x6a\x02\x73\x90\x0b\xa3\xc6\xe4\x43\x81\x34\x8e\x5f\x43\x69\x6a\xb2\x06\xcc\x94\x86\x80\x59\x7f\xb5\x42\x03\x7a\x70\xc1\x38\x7f\x2b\x40\x1e\xf7\x29\xdb\x41\xaf\x5d\xd8\x2f\x0f\xb3\x12\x15\x5d\x1b\x6a\xd5\x6f\xcf\x57\x96\xb4\x93\x7a\x39\xd6\x23\xfa\x16\x09\x2d\x35\x8c\x44\xb9\x59\xfb\xa9\x3c\x31\xd7\xd8\xad\x05\x29\xe5\x30\x64\x65\xf8\x0c\xd1\x07\x1f\x5c\x36\x43\xc2\x4f\x7f\xa8\x3d\x08\x8b\xdd\x84\x53\xbb\x3c\xab\x52\xab\x87\xb0\x01\x19\x5c\x40\x51\xab\x46\xf1\xc2\x0d\xfa\xd7\xe8\x70\x3d\xd9\xeb\x00\x62\x02\xef\x90\x7b\xca\xc9\x20\xad\x76\x1b\x91\x46\xe5\x83\x6c\xd1\x50\x6a\xcb\xb3\xae\xe2\xc1\xea\x22\x4b\x56\x0a\x7a\xb4\x3c\xbb\xf5\xfb\x2f\x75\x4a\x07\xcf\x30\xd1\x55\xe9\xb9\x59\x3f\x57\x04\x2b\x9f\x15\xe3\x51\x41\x79\xe1\x66\x49\x41\x43\x03\x2a\x43\xd7\x7a\x40\x3b\xc6\xd6\x7f\x40\xab\xe5\xb7\x59\x8a\x55\xa0\x66\x84\x90\x53\x83\x60\x40\x6c\xc7\x29\x27\x2b\xf7\x28\xcb\xa8\x13\xb5\x95\xd1\x6e\x88\x47\xf1\xb7\x18\x98\x6e\x09\x43\xb9\x1d\xa9\x14\xab\xda\x10\x03\x32\x68\x48\x85\xf2\x1f\x20\xbb\x76\xd0\x2e\xd6\xad\xa2\xeb\x28\x37\x85\xf3\x8b\xfe\x1f\x9c\x15\x47\x5d\x71\xea\x9a\x9b\x27\x18\x40\x7c\x2a\x8d\x8c\x4a\x0c\xdd\xcd\x7e\x40\xc4\xfc\x5f\x05\xaa\xae\x9b\x05\xfd\x18\xad\x1b\x58\x3e\x43\x78\xea\xc9\xd1\xc4\x90\xa6\xc4\xae\xf1\xa0\x09\x5f\x0c\x94\xf9\x6b\x87\x12\xf7\x53\x1f\x31\xe2\xff\xfc\xa1\xd4\x62\x86\x50\x7a\x98\x25\x26\xe5\xaf\xa0\x4f\xe2\x1a\x4a\x2f\x8d\x6e\x6c\xac\xc1\x0e\x0f\x62\xa7\xcd\x4b\x12\x74\xd2\xcd\xcc\x36\x24\x28\x32\x27\xd7\xe7\xe8\x59\xc3\xe0\x61\xea\x56\xa6\xf3\x50\xba\x7a\x3a\xd7\x60\x98\x27\x98\xf1\xbb\x9c\xd4\x69\x95\x53\xe2\x7f\x3d\x83\xca\x5e\x78\x5f\x8f\x94\x3d\x91\x20\x61\x54\xb7\xb2\x23\x92\xda\xff\x29\xc0\xe3\x88\x8a\x52\xae\xd1\xfc\xbc\xb9\x9b\x74\x67\x31\xde\xa2\x5e\x55\x53\x96\x3b\x8c\x39\x39\x6b\x25\x9d\x30\x3c\x6e\xe9\x50\x1d\x74\x29\x74\x87\xe6\x3d\xc0\x53\x24\xe0\xa4\xfc\x0d\xa7\x3b\x86\x72\x15\x3c\x6e\xa8\x51\xb5\x5f\xdd\xfa\x05\x99\x7a\xa7\xca\x2a\x71\xad\x67\x57\xa7\x72\x05\xad\x55\x22\x5a\xd3\xcd\xdf\xb3\xc7\xbc\x83\x13\x8d\xf4\xf6\x58\x07\x83\x2c\x49\x1b\xcd\xaf\x8f\x22\xbe\x10\x38\x96\xa7\x80\x77\xd4\xec\xc5\xa2\xe9\x0d\x04\x71\xfc\xbc\xeb\x26\xab\x4d\xfb\x61\x6c\x4d\x60\x17\xd7\xd1\x53\xd3\x95\x2e\xac\x19\x36\xc5\xf6\xf6\x31\xe9\xca\x86\x91\x9c\x0b\x4a\xc1\xf6\x4e\x98\x34\xd0\xf1\xe0\x92\x3a\xe9\x21\x2c\xde\x49\x49\xe5\x62\x34\x15\x2a\x3a\x27\xcb\xd8\xf1\xd9\x6f\x14\xe4\xd4\x98\x6c\xeb\x0a\x5c\xbe\x1d\xda\x6c\xff\x36\xd2\x40\xa6\x9b\xbd\x85\x17\x89\x97\x5f\x1b\xe7\xaa\xf1\xf9\xde\x0f\x8e\x71\xbe\xf7\xe0\x8d\xd5\xb7\x90\xee\xf4\x75\xee\xb8\xff\x92\x6f\x51\x38\xc8\x80\x7f\x56\xdf\xc8\x24\xff\xb2\xe3\xf2\x85\x8d\x12\xbf\xef\x6f\xb6\x4e\xb1\x1f\x38\x4b\x40\xdd\xfe\x6a\xec\x07\x73\x99\x90\xe6\xfa\x55\x91\xdf\x41\x24\xd8\x2f\x98\x22\xfe\x40\xbf\x90\x37\xf5\xe9\x1d\xe4\xb9\xb1\xfa\xfd\x26\xce\xdb\x7f\x45\xf1\x80\xc9\xa9\x44\x0b\xbf\x1e\xb6\xe7\x21\x06\x46\xd0\x01\xf6\xec\x7a\x5c\x36\x94\xf2\x00\x9c\x01\xb6\x08\x9f\xca\x18\xe8\xa7\xf2\xe3\xfb\xf0\x0d\x87\x88\x17\x65\xb9\xd8\x9e\x7e\xec\x83\xe3\x5c\xf1\x27\x51\x13\xf9\x03\x3f\x14\x55\x58\x1f\xab\x7b\xb0\xd7\xdb\x3e\xc0\x9d\xa5\x75\x5e\xd4\x97\x7a\x47\x46\x24\x00\x3f\x6c\x43\xa1\xe4\x54\xeb\x1a\x03\x7a\xe0\x47\xd3\x06\x5d\x0c\xf4\x6d\xcf\xeb\xcf\x7f\x99\xa1\x07\x3e\x28\x1e\x6b\xed\x00\x55\xc0\xc2\xbd\xfe\xa4\xb3\x65\xdd\x0e\x4d\x24\xbc\x5d\xd3\xe5\x24\x0a\x65\xcb\x77\x39\xbb\x34\xd7\xb9\x97\x1c\x2a\x5c\xe7\xbf\x82\x5c\xec\xe0\x18\xfa\xe9\x81\xc1\x9a\x60\xe5\xb1\xc0\xf5\xec\x2a\x5e\x6a\xc1\x80\x20\xf9\x1d\x67\x5e\xbb\x4a\x54\x59\x15\x7b\x7a\x33\x95\xe1\xec\x50\x82\x6f\xb6\x30\x93\xcf\x0d\x94\xd7\x7f\x56\x4b\x21\x0f\xb1\xb5\x2e\x23\x16\x3f\x3e\x9c\xe7\xdf\xe2\x86\x41\xe4\xeb\x1f\x61\x04\x1b\x9e\x46\xb6\xc3\x1b\xe2\x09\x62\xfc\xbe\x4b\xd2\xc5\x48\xe8\x81\x52\x5f\x97\x50\x96\x00\x81\x63\x7f\x49\xde\xb3\x9c\x0c\x21\xce\x65\x0d\x4b\x16\x0c\x92\x19\xb0\x7f\x97\xef\xde\x83\x06\x8a\xd8\x9e\xe1\x08\x74\xbf\xa5\xea\x0a\x6d\xaf\x39\x24\x3f\xde\x9e\xf6\x45\x44\x35\x17\x2b\x9f\x7d\xcd\x24\xb0\x1e\x5e\x94\xc8\x52\x34\x78\x46\xe2\xdd\x9b\xa1\x6e\xd2\xac\x6d\x41\x13\x7d\xef\x90\x6f\x31\x30\xf5\x4b\x98\x1b\xdd\x0e\x80\xfa\xe4\xce\x3b\x67\x86\x21\x00\xb8\x4f\xe4\x94\xbf\x43\x9d\xc7\x80\x0c\x2c\xde\x49\xda\xae\x6f\x69\x79\xbf\x21\x7e\xc8\x9b\x6e\xb5\x7c\x17\x07\x87\x72\x58\x1c\x04\x61\xd6\xe8\xd2\x62\x92\xbc\x4a\x11\xcc\x3e\x6a\xaa\x90\xad\x92\x0d\xe3\x7b\x90\x7e\x3c\x1a\xc3\x12\xad\x7b\x85\x4c\x57\xad\x90\xb3\xf2\x55\x75\x77\xbf\xac\xdd\xb1\xb2\x47\xa0\x02\x86\xec\x9c\x4b\x58\x5a\x5c\x4c\xc5\x85\x2a\xd8\x7f\x2e\xf5\x75\x29\x6c\xfa\x2a\x55\x89\x1c\x2f\x25\x38\x58\x39\x16\xb7\xe3\xa1\x16\xd8\xee\xd7\xe8\xc3\x84\x2b\x05\x6a\x72\x3f\x7a\xe1\x3d\xe4\x10\xfc\x85\xee\x6d\xa2\x7c\x0e\xbc\x90\x37\xaa\x7a\x7d\x76\x96\x88\x17\xac\x12\xfa\xb2\xeb\xcb\x1e\x83\x74\xab\x47\x59\x7c\xbe\xc2\x83\xc5\x23\x15\x6c\x2f\x5e\x39\xcc\x19\x5f\xf9\x2f\x49\xbf\xe8\xd5\x99\x7f\x44\x00\x29\xec\xb4\x0c\xc9\x81\xea\xd5\xe9\x7c\xd6\xbd\xf0\xb6\x1d\x99\xfd\x69\x26\x98\x3f\xa1\x63\x66\x98\x89\x94\x40\xec\xd8\x3a\x52\xbc\x07\x5a\x60\xe5\x45\x37\x1e\xd7\x08\x7b\xc5\x7b\x49\x69\x31\x7a\x4b\x11\x2f\xb5\x96\x58\xc0\x28\x73\xe1\x82\x94\x4d\x5c\x2a\x4c\xc7\x54\x9e\x89\x83\xfd\x67\xb4\x09\x0d\x31\x74\x3a\x28\x8e\x86\x39\xd4\xcc\x63\x90\x7d\xf4\x4a\x4f\x1e\xea\x86\xe8\xe4\x75\x62\x7c\x41\xe1\x30\x07\xfe\xa4\x7b\x56\xc7\x45\x21\xed\xeb\xe3\x0f\xe2\x7a\x7f\x44\xf4\xb9\xde\x75\x0b\x1d\xaf\x77\x9d\x29\xfc\xe4\xd1\x05\x4e\x93\x41\x38\x8c\xa2\x59\x77\xfb\x59\x0f\x5e\xb7\xc2\x2a\xd6\x27\x5e\xf8\x5f\x06\x59\x2f\x9c\xd0\xdb\xf4\xeb\x46\x7a\x28\x95\xc9\xe0\x4c\x1f\x32\x50\xa7\x07\xd8\xfc\x96\x5d\x4e\x28\xa5\xda\xd7\x3d\x48\x3f\xac\xba\xb6\x3d\xcf\xd2\x98\xb0\x74\x79\x37\x8b\xdb\xe9\x1f\x52\x94\xb9\x64\x0a\x6e\x65\x6a\x2e\xde\x0a\x31\x32\x3f\x4f\x93\xc7\xb1\x0b\x89\x9d\x6c\x13\xaf\x5a\x25\x39\x65\x4d\x8c\x54\xba\xea\x2e\xc2\xc2\x55\xb7\xa5\x44\x76\xe5\x05\x29\x2e\xef\xf2\x44\x6f\xeb\xf2\x46\xc9\x4f\x56\x25\xd2\x6b\xc3\x07\x1f\x22\x5c\xa5\x51\x05\x6c\x3f\xa5\x58\x70\xf9\x4c\x8b\x2a\x1a\x88\xa6\xf0\x6b\xbc\x0a\xb4\xb9\xa9\x4b\x36\xdf\x3f\x09\xb0\x56\xf7\x2e\xfb\x85\x34\xd7\xed\x3f\xe9\x7d\x79\x29\x73\xeb\x8b\x46\x5d\xfe\xd0\x30\x3d\xa5\x43\x76\xcf\x8a\x0b\xf3\xba\x04\xf1\x9d\x5f\xdd\x67\xdb\x50\x76\x89\xb7\xac\x34\x81\x2b\xcb\x3f\xc4\x80\x77\x57\xae\xf4\x71\x23\xec\xcb\xfa\xe6\xfe\x44\x9d\x4f\x7f\xc6\x3f\xa1\xf8\x41\xf5\xae\x76\xce\xda\x40\x9c\x5e\x00\xbf\xc0\xd4\x2f\x25\x29\x6e\x6a\x20\xde\xb3\x54\xd4\x4e\x5e\xc9\x53\xf2\xaf\x9e\x7f\x0b\xf9\x24\xa6\x8a\xdb\x98\x80\x9a\x5f\xdf\x4f\x70\x1a\xcf\xef\x3b\xb9\x26\xfb\xf9\xad\x47\x17\x21\xfa\xb8\xcc\x90\x14\xc0\x10\x94\x6e\x8e\x4a\x44\xdf\x0d\x52\xd4\xc5\x5a\xbe\x36\x32\x07\x0b\x98\xca\x2a\x48\x97\x95\x3e\x8d\x35\x7d\x97\xfe\x9b\x69\x89\x6f\x40\x89\xc6\x50\x19\xfb\x72\x55\xa2\xa8\x03\x71\xd8\xaf\x19\x94\xdc\xc5\xf9\x66\x3e\xd1\xf9\xca\x1e\xd0\x3a\x5f\x4a\x2c\xe1\x2e\xb4\xa9\x3f\x6f\x30\x91\x1c\x1c\xd4\x28\xb9\x69\xef\x44\xc1\x31\x85\xbb\xb0\x20\x81\xab\x7b\x2d\x54\xcd\xfa\xf8\xa2\x7d\x95\x91\xa7\x6a\xf8\xe0\x8d\xb2\xef\xcd\xad\x90\xce\x2b\x64\x46\x0c\x65\xeb\x04\xfc\xdd\x99\x3e\xf9\xb6\x1c\xe1\x0a\xc2\x55\x68\x5e\xa5\x8c\x75\xeb\x5e\xd8\x47\x78\x85\xec\x17\x47\x8d\x2e\xae\x19\x22\x5d\xad\x2a\x9b\x9e\x3a\x65\x14\x62\xc3\x67\x46\xf1\xa3\x4b\xf5\xb1\x83\x90\x1f\x93\x84\xcc\x85\x94\x05\x82\x83\x8e\x4c\x86\xea\xc5\x9f\x55\xd9\x01\x85\x58\xd9\xd5\xe7\x1f\x6d\x29\x04\x7b\xae\x9a\x55\x4b\x58\xad\x1e\x3a\x4a\x57\x91\x80\x59\x89\x04\x03\xab\x21\xf8\x1d\x1b\x78\xe7\xae\x37\x16\x37\xc5\x9a\xdb\xa5\x71\xf6\x12\x43\xb6\xae\xd7\x63\x6d\xcf\xa1\x87\x68\xf0\x77\x7a\x90\xee\xf8\x23\x22\x80\x04\xb4\xf0\x57\xd8\x28\x3e\x94\xd2\xd2\xae\xe4\x6b\x57\x0e\xcb\x02\x45\xc2\x44\x57\x30\x44\x20\x20\x86\xe2\xff\xee\xc2\x5e\xa8\xeb\x81\x49\x77\x07\x90\x91\x1c\xc0\x2e\x61\x03\x05\x03\xd5\xa0\xe7\x51\xa8\xe6\x60\x80\x3e\x90\xe7\xa1\xf4\x05\x03\x92\x21\x3b\x72\x88\x8a\x65\xf1\xc7\xdc\xbc\xc9\xef\xc4\x91\xe8\xef\x0a\x10\x67\x4d\xae\x53\x41\xa8\xf7\xea\x48\xa5\xe7\x68\x9f\xf5\xd6\xa5\x7a\x66\x33\x15\xc7\x44\xe8\x3e\xd9\xc1\xb2\xa2\x48\x82\xac\x79\xc7\xff\xac\xff\xe7\x83\x61\xd8\x85\x70\x03\x29\xda\xc1\xad\x43\x69\x7b\x67\x65\x52\xf0\xb9\xdd\xa1\x21\xb5\x9d\xd2\x44\xda\x44\xd3\x76\x10\x3a\x59\xc2\x45\x58\x12\x4d\x9b\xcb\xc6\xfa\xf5\xad\x37\x67\x1a\xc0\xc4\xe7\xee\x6b\xed\x2a\x66\x3d\xc9\xe5\xb4\x1e\x8b\x74\xd2\x0c\x65\xad\xd2\xab\xba\xa6\xa7\x5e\xa3\x35\x51\x61\xfc\x84\xa5\x32\xbf\x10\xd8\x23\x1f\x52\x3c\x2b\x24\x6e\x9c\x7f\x35\x94\x67\x0d\x06\xd1\xfb\x2c\xaf\xc6\x1b\x65\x03\x54\xbf\x1f\x50\x8e\xf7\x32\x0c\x11\x4f\x44\x02\xf9\x63\x5d\xfa\x8c\x80\x97\x6f\x25\x77\xb2\x51\xf6\x49\xcd\x6a\xe8\xfe\xa8\x1b\x6a\x90\x5a\x49\x65\x55\x72\x24\xc5\xd1\x66\x3d\x0b\xeb\x7b\x71\xb2\xc6\x3a\xb5\x2d\xae\x27\xff\xc3\xf4\xe7\x12\x15\xcf\x18\xec\xb7\xd2\x73\x3e\xac\x44\xae\xa7\x23\x19\x4f\x9e\x73\x7d\xe9\x45\xb1\x13\x5c\x0c\x6d\x60\x22\x47\x07\x28\x1f\xcf\xbc\x77\x5d\xb5\x42\x53\x29\xc4\x47\xd5\x7b\x3c\xe7\xcc\xff\xe6\x50\xdb\x9b\xb3\x06\x2d\xc8\xfc\xf0\x2e\x03\x26\x60\xf9\x4e\xce\x69\x29\xdc\xc8\x7a\x36\xce\x96\x82\x36\x5a\xe5\xaf\x86\x0b\xa5\x56\x75\xef\x49\x24\x94\x7a\xc8\xe9\xcc\xb7\xe4\xc9\x6e\xf9\x10\x22\x7e\xf7\x51\xa9\xd7\x23\x37\xb6\x68\x50\x4f\xcb\x1d\xf0\x07\xe5\xc6\xd4\x14\xab\xa9\x4f\x38\x41\x13\x52\xc2\x0d\xc6\x30\xac\xb9\xf3\xa6\x8d\xb6\xc4\x8b\x82\x12\x1c\x3f\x4c\xb4\xb5\xf1\xdd\x61\xd0\xe7\x65\xc8\x8e\x59\x85\xc2\xd7\x2b\x43\x71\x2d\xd6\xc1\x66\x53\x30\xb4\xbe\xb2\xb2\x28\xce\x9c\x94\x2b\x72\x4a\xa9\x02\xe5\xaa\x92\x6a\x93\x98\x03\x96\xe2\xd6\x58\x63\x36\xe0\x74\xb7\x1c\xae\x84\x78\x9a\xd2\x2a\x99\xd5\xde\xa5\xd1\xde\x9a\xb7\x83\x60\xda\x1d\xe0\xf8\x09\x8e\x21\x7d\xb4\x2b\x96\xf1\x2b\xb0\x72\x0a\xed\xb1\x7b\x08\x7d\x31\x68\x23\xc5\x76\x5d\xab\xd2\xe5\xd1\xfa\xcd\x70\x21\xb4\x59\x82\x75\x58\x08\x7f\x56\x5f\xe8\x7b\x43\x43\x41\xa8\x76\x14\x23\x2f\x60\xf2\xc5\x1d\x95\x21\x14\xd7\x36\xd5\xe4\xa1\xa1\x46\x5c\xf6\xd8\x16\x52\x82\xfb\x33\x70\x96\xbe\x5a\x59\xaa\x74\xe7\xec\xf5\x64\xb8\xcd\x8d\xc8\x8f\xbe\x6d\x7f\x5d\x01\x25\xa1\x96\xb7\xae\x1c\x67\x0f\x7b\xa6\x85\x04\x16\xa6\xfe\x7b\xac\x53\xaf\x53\xa4\xc7\xa3\xd3\x1f\xb8\xd8\x1f\x44\xea\xdb\x87\x8a\x3d\xd0\xa3\x06\x78\x10\xec\x91\x01\x7f\x8e\xd4\x93\x3a\xd3\x27\xde\x09\x38\xd5\x7a\xf9\xbf\x2e\xd2\xf6\x92\x7e\xd9\xcb\x67\x65\xfd\x60\x1a\x75\x9d\xb0\x48\xf0\x12\xd3\x7e\x0e\x76\xef\x10\xee\xc4\x53\x64\x1f\x9d\xf0\x2b\x62\x35\x85\xf8\x9e\x3a\x2d\x08\xf1\xf1\x64\x36\xd2\xe4\xbb\xe0\x93\x9d\x94\x44\x5b\x59\xed\xdb\x98\xe4\x0a\xf5\x37\x0d\x08\x01\x36\x5e\x9b\x73\x22\x05\xd7\x38\xd7\x80\xb0\x3b\xeb\x3d\xeb\xed\x5d\x21\xf0\x99\x42\xe8\x0d\x96\x06\x38\x6f\xfb\x4c\xdf\x70\xcb\x6c\x9f\xc1\xab\x1d\x2b\xd5\x2d\xc4\x74\x3c\x6b\xcf\x66\xbd\x29\x4d\x2a\xb7\x5b\x9a\xf4\x90\x5a\x63\x86\x0a\xd4\xd4\xa4\xd6\x05\xc4\xd2\x0f\x75\x0f\x1f\xef\xff\xb6\xfa\xf1\x08\x6c\xab\xff\x8b\x96\xb7\xd5\x9b\xdd\x0c\x03\x9b\x67\xd6\xf9\x9d\xf8\xc7\x28\xb6\x49\xcf\xc1\x45\xcd\x72\x80\x61\x65\xca\x1a\x16\x14\x90\x31\xa8\xc0\x40\x0b\xc7\x21\x00\xaa\x98\x59\x5b\x2f\xd6\x8e\xb5\x24\x85\x87\xd9\xf8\x15\xb7\x6e\xa7\x02\x89\x34\x0a\xcb\x21\x3d\x7c\xe4\xd6\xcf\x99\xca\x44\x86\x18\x2d\xb0\xce\xd9\xc5\x6b\xa4\x9e\x58\xb5\x66\x2f\x36\x96\x51\xb9\x81\x10\x48\xb3\x57\x57\xa5\xf4\xd4\xa6\x3c\xe7\x24\xc9\x2d\xa8\xe1\xef\xbf\x5b\x81\xb9\x16\x5d\x88\x86\xd7\xd0\x42\x68\x13\x0f\xa9\x7a\xcb\x00\x26\x03\x6a\x48\xbd\x0d\x89\xf9\x58\xa0\x06\x54\xa6\x72\x03\x52\x3f\xab\xf7\xd2\x61\xb3\x2b\xc1\xae\x72\x4a\xd0\xb7\x95\x83\x3b\x52\x83\xcd\x06\x0d\x90\xb7\xde\x28\xc4\xb6\xbb\x3c\x3b\x77\xdd\x25\x85\xa6\x57\x12\x56\x67\x67\x90\xc1\x99\xe3\xdb\x65\xd3\xdc\xe1\x0b\xd5\x4a\xa1\x12\x5b\x7f\x38\xee\x79\xdd\x9f\x81\x96\x78\xd7\xe6\x4a\xf1\x37\x77\x64\xf1\xdf\x9a\xef\x2b\x94\xce\xce\x43\x32\xd7\x56\xb0\x8f\xd0\x22\x5d\xb3\xfd\xc3\xcd\x01\xee\xe2\x6b\xb1\x6f\xe9\xb1\x1f\xe0\xaa\x72\xc9\xff\xec\xf4\xd0\xc5\xc6\x72\x57\xb3\x43\x9a\x26\x55\xf9\x90\xa5\x59\xf4\xff\x99\xe7\xcd\x7c\x24\xcc\x62\x6e\x79\x94\x9a\xda\x90\xa9\x80\xda\x94\x40\xe3\xf0\x4d\x01\xb3\xfb\x92\xa2\xd9\x0d\x5e\x0a\x11\x1e\xbe\xe4\xfe\x30\xdf\x22\x3d\x3f\x1b\x0c\x4a\x4f\xed\xb8\xf5\x1a\xa7\xa3\x1e\x0c\x84\xb8\x1b\x2c\x7d\x3b\x30\x77\xd5\xb9\x56\x4d\xcd\x8d\x3d\x3f\x70\x4c\xe9\x0d\x00\x75\xb5\x2a\x36\xb1\x75\x1d\x78\xa8\x12\xba\x23\x2e\xa2\x16\xc9\x67\xe8\x0b\xef\x61\x05\x6a\x7d\x2c\x4d\x14\xa1\xb7\xe5\x03\x4e\x37\x1c\xcc\x5a\xb5\x96\xd0\x4d\xbb\x2e\x51\x16\x80\xf5\x6f\x4d\x29\x19\xa0\x6e\xd2\xdd\xc2\x36\xc6\xe0\xc1\x66\xe1\xb0\x36\x21\x80\xde\xd0\xc3\x13\x1a\x58\xff\xdb\xa0\x4d\xe5\xcd\x01\xb9\x01\x1f\xa0\x1d\x50\x65\xdc\xd9\x95\x3e\x30\x81\xcd\x32\x78\xd2\x87\x8b\x25\x36\xfd\xd7\x53\x85\x03\x26\x8a\xaa\x95\x17\xe3\x38\x07\x3f\x0d\x41\xcf\x7b\x3c\xe4\x42\x79\xa8\xc3\x62\xa5\x2e\x22\xf3\x12\xf2\x44\x71\xa8\xc6\xb9\xbd\x03\x72\x78\x92\x6f\x43\xb0\xd0\x2f\xa0\x5e\xa1\x31\x05\x4a\x33\xf7\x47\x44\xc5\x43\x41\x70\x42\xe3\xb4\x9a\xdd\x06\x6b\xeb\xb8\xce\x1a\x04\x0a\x38\x42\x83\x8d\xbb\x6e\x4d\x41\xd9\x03\x95\x14\x5d\x9f\x3c\xaf\x8a\x9b\xad\x88\x0e\xa9\x2f\x7a\x44\xf6\xf9\xc1\x08\xf2\xb1\x4a\x86\x4f\x4a\x6a\x40\x45\x72\xd5\xd6\xf8\x7e\xf9\xb8\x0e\x17\x57\xe7\x08\xf4\xf8\xd9\x86\x3b\xf4\x07\x6f\x28\x1d\x85\x0a\x69\xcf\x3d\x76\x7a\xee\xbd\x15\x39\xfc\x53\xe1\x06\xf9\x70\x6e\x8c\xfd\x90\x38\x22\x58\xd3\xfc\x5f\xc4\xff\x8f\xa2\x2a\xed\x88\x30\xc0\x21\xc7\xbc\x03\x9d\x53\xa4\xb2\xdc\x2f\xee\xcd\x14\x55\x8e\xd1\xb9\xc0\xc9\x66\xe8\xa7\xd9\xfb\x4a\x45\x39\x1b\x92\x30\x4e\x75\x64\xf9\x44\x50\x9e\x0c\x21\x13\xaf\xa5\x0e\xf4\x3c\xbf\x02\x89\xe5\x93\x89\x49\x98\x57\x92\x5c\x59\x93\x2e\x9d\xf3\x80\x79\xe0\xbd\x2b\xc2\x11\x72\xdf\x9d\xea\x42\x50\x50\x93\x5c\x9a\x18\x50\x47\x46\x47\x82\xff\x63\x3a\xfc\x91\x3e\x67\x95\x78\xd9\xab\xa7\x77\x1a\x66\xaf\xdc\x55\xd0\xbc\x49\x3b\xac\xfb\x72\x15\x29\x8a\x85\xfa\x9a\xdd\x1d\x3e\x0b\x7b\xb7\xe8\x2f\x64\xdd\xe6\xef\x2f\x5c\x76\xb0\x57\x04\x5d\x34\xc9\x9e\x02\x7a\x93\xf0\x9f\xa4\xd1\xf4\x30\xad\x32\xff\xe3\x1d\x4d\x52\xc3\x38\xdc\x2d\x82\xd3\xa0\x47\xd2\x53\x45\x8d\x12\x82\x7f\x29\x4c\x26\xa0\x12\x3e\x8e\x3a\xfd\x9e\xba\x6e\x1d\x8c\x95\x42\xb8\x4d\xfd\x9b\xc3\x1d\x94\xa9\x96\xd6\xab\xca\x23\x4d\x4f\x7e\x5d\x52\xee\xb6\x26\xbf\x2a\xce\x59\x5f\x9a\x3f\xae\xf7\xf5\xd6\xdb\x8a\x60\x33\x33\xd2\xeb\xc5\x6e\xa8\xa7\xd7\x76\x65\xb4\x73\x53\x19\x71\x29\xd8\xec\x8e\x12\x39\xab\x89\x1a\x68\xbb\xb4\x7a\xa0\x86\xe6\x05\xc7\xa0\xe8\xc4\xeb\xd8\x07\xb5\xca\x6d\x94\x2a\xde\x35\x22\x22\xfe\x5a\xd6\x18\x35\xd4\xe5\xf6\xf1\x75\x5d\xaa\xd3\x57\x21\x25\x20\x91\xb2\x70\x19\x36\xc0\xda\x16\x7a\x66\xa1\x0a\x5f\x3d\xf7\xcb\xab\xbc\xaa\x94\xf6\x3a\x21\xa3\xd0\xff\x39\x65\xae\xc8\x48\x5f\xd2\x2f\xe0\xd8\x1c\x19\xd8\x71\xd7\x80\xbb\x62\x3c\xbc\xdd\xcf\x99\x19\x42\xd5\x93\x3f\x88\xf7\xdb\x1f\xef\x6e\x5d\x4f\x57\xf3\xaa\xed\x47\x1a\xdd\xab\x42\x0e\x7b\x95\x5c\x5a\x0b\x0a\x09\xa0\xa2\xec\xdd\xc4\x03\x20\x69\xf3\xe4\x4d\xef\x8e\x5e\x4e\x26\x1a\x2a\xe5\xd0\xc2\x06\x6d\x2f\x3d\xce\x6a\x70\x57\x82\x3a\x94\xd0\x9a\xc7\xd1\xfb\xe2\x10\x3f\xd1\xc5\xd1\xf4\x09\x39\xa6\x97\xe3\xee\xda\x9c\xd1\xef\x41\xce\x61\x77\x12\xd8\xf3\x3b\xd2\x83\xf6\x7c\xa3\x1f\x46\x14\x77\x68\xf7\xb1\x66\x28\xe6\xee\x90\x85\x3a\x25\x72\x46\x59\xa7\x5d\x19\x99\x56\xfa\x18\x0e\xa5\x8c\x18\x76\x89\x14\xee\xff\x88\xa5\xec\x39\x85\x06\x9b\xf7\xbc\xf6\xf4\x82\xa2\xe9\x4a\xc8\x1b\x81\xee\xbe\x7f\x73\x86\xbc\x2b\x08\x77\x10\x8a\xa5\x29\x26\x0e\x10\xd3\x02\x12\x73\x95\xc8\x9a\x64\xd6\x0e\x8f\x1c\xee\x69\x73\xe5\x37\x4f\xf9\xe0\x7e\x8b\x01\xeb\x81\x50\x96\x2d\x12\x5c\xb6\x8f\x94\xac\xac\x8f\x1b\x8a\x69\x10\x5f\x0b\x6d\x25\x1b\xc7\x31\xb1\x69\xbb\xc9\x72\xd8\xee\x5a\xf8\x9f\x7d\xd2\x3f\xf6\xf4\xe0\x9a\xc5\xea\xfe\x3f\xaa\x38\xc1\x26\x7b\x15\xdd\x6f\xbb\xc3\x80\xdc\x63\x43\x94\x58\x53\x53\xbc\xb5\x9b\xea\x63\x1f\xaa\x8f\xa9\xf9\x36\xc0\x7c\xd3\xcd\x3e\xdc\x26\x6d\x35\x55\x5b\x5b\x64\xc3\x53\x49\x4d\xb3\x09\x58\xa0\x14\x19\x24\xe1\x3e\x84\xfb\x25\x37\x68\x57\x48\xfb\xde\x5b\xac\xc6\x3c\xc8\xb7\x2f\x6c\x31\xeb\xe4\x4b\xfa\xe5\x06\x07\x64\xb2\x4b\x37\xed\x4a\x6c\x32\x1c\x92\x34\x83\x40\xcf\x93\x73\x6e\x94\x58\xbb\xb8\x76\x55\x3c\x1f\x11\xae\xd0\x4c\x83\xe0\x10\x01\x1f\xfe\x56\xe3\xdd\xdb\x6a\x26\xc5\x60\x8b\xf1\xf3\x86\xcc\xac\xef\xcd\x3d\xcb\x27\x7d\x53\x87\x67\x40\x66\x00\xc3\x79\x97\xc7\x2d\x2e\x2c\xee\x7b\x63\x72\x5b\x3a\x65\x45\xa9\xd2\x5b\x59\x6d\xf0\xf2\xf2\xac\x9d\xcd\xa7\x97\x95\xed\x8d\x34\x67\x56\x20\x5b\x09\x11\x31\xe4\x4d\xab\xe6\x03\x16\x87\x1d\x30\xec\x63\x48\x9d\xd0\xb0\x80\x0b\x3f\xf4\x08\x53\x30\x2c\xea\xf6\x59\x3d\x4e\xe2\xb3\x55\xad\x0b\xc5\xd1\x9a\xf2\xf4\x88\xfc\x51\x64\x06\x8e\x10\x9f\x09\xd5\xb3\x5d\x19\x10\x5b\xd8\xc6\x6d\x88\xb3\x3c\xf8\x4f\x76\x91\xad\xe4\x45\x65\x1b\x44\xbe\x29\xd8\x36\xbe\x23\x46\xb3\xfd\x84\xa6\x46\xeb\x4c\x89\x06\xb2\x8d\x7b\xd6\xca\x5d\xfa\x63\x5b\x61\x02\xa1\xa1\xae\x75\xb6\xba\x6f\x5e\x0e\xfc\x0c\x25\xb3\x88\x2f\x6e\xe3\x72\xeb\xdf\x8b\xdf\xf5\x93\x38\x3d\xf9\x7e\x8c\x73\xd2\xe4\xc0\x36\x5a\x7f\xe0\xe0\x8b\x3a\x3e\xd7\xfa\xd3\x63\x93\xae\xdb\xf8\x08\x7b\x8f\x6d\x9c\xd2\xae\xa4\x23\x88\xb0\x31\x81\x03\x62\x6b\x94\x95\x1b\xc7\xf5\x13\x5c\x27\x2c\x14\x89\x75\x41\x6b\x6d\xd3\xa6\x3d\x3f\xce\x55\xd7\xae\x0e\xa3\xec\x7a\x71\x10\x09\xd5\x05\x02\x4a\x78\x6f\xf4\x2c\x5f\x47\x98\x4d\x5d\x57\x49\xb4\xad\xf9\xd5\x11\x33\x4f\xa1\xd1\x86\xe4\x53\x2a\x74\x7d\x64\x61\x61\x48\x63\x31\x43\xf2\xb2\xd9\x6c\x58\xc2\x5b\xee\x22\x6d\xef\x10\x64\xbb\xe6\xe2\x7c\x7f\xf8\xaa\xb7\x3d\x00\x8f\x27\x0d\xd6\xcd\x93\xf6\x78\xba\xf8\x00\x20\xd1\x56\x09\x70\x01\xbc\x5b\xc8\xf1\xda\x25\x7a\xd6\x1a\x13\xa2\x28\xc4\xa6\x93\x35\xe6\xec\xc3\x8b\x8c\xad\x34\x10\xf5\x19\x01\xa8\xbe\x67\x68\x0b\x61\xb5\x2e\x31\xd7\x86\xa6\xed\x07\xfd\xef\x08\xcd\xb9\xfd\x26\x8d\x68\x73\xbf\x43\x02\xca\xaa\x71\x57\xf4\x82\x74\x7d\xf6\xca\x7f\x25\xbc\xa6\x2a\x02\xd3\x4e\xdc\x6c\xbd\x06\xfe\x0b\x79\xe6\x8b\x90\xe4\xd5\x64\x3a\x05\xd0\xe3\xbe\x5b\xfa\x93\xd8\x1f\x94\xd8\x74\x6b\x8b\x9e\x0a\xbf\x0c\xb8\xf9\x5d\x02\x99\x67\x99\xee\x2c\xa5\x36\x64\x66\x12\x80\xa1\x10\xba\x6b\xaf\xa4\x33\x85\x53\xb6\x21\x37\x03\xd7\x2f\x50\xcf\x7a\x73\x3b\x3f\xb6\x2c\xeb\xc7\x1b\x7d\x2b\xa2\x03\xbe\xde\xd1\xb7\x33\xc4\x0c\x59\x64\xec\x51\x3d\xcb\xbe\x47\x4a\x7c\xf9\x87\x49\x65\x36\xca\x35\xac\xf7\xcf\xc9\x6f\xb5\x96\x82\x73\x9c\x20\x2b\xcc\x0c\x2d\xad\x48\x24\x67\x8d\xb6\xde\x49\x86\x70\x6b\xfd\xea\x14\x22\xed\x41\xd1\x4d\xf2\x6b\xaf\x48\x76\x59\xeb\xad\x12\x62\x80\xdc\xaa\x49\xf3\xad\x29\xf8\xb0\x56\x4e\xad\x59\xa9\x86\x0b\xda\xfd\xf6\xa1\x36\x0a\xb1\x51\x2f\x13\x06\x04\x71\xbe\xb9\x85\x04\x9c\x04\xc3\x30\x20\xf3\x43\xd8\xbd\xe3\x47\x01\xdd\xda\xfe\x9b\x8a\x34\x97\xad\x3b\xaf\x94\x4c\xb8\xfa\x67\xb6\x74\x6b\xf1\xd0\xfb\xea\x02\x92\x13\xd7\xa0\x89\x75\x79\x26\x97\x71\xf3\x4b\xb2\x8b\x0d\x42\xa6\x61\x26\xa2\xae\xe2\x59\x59\x29\xd5\x5f\xeb\x61\x3e\x39\x3f\xb2\x96\xa9\x6b\xc4\x4d\x12\x50\x95\x30\x32\xfc\x12\x9a\xb4\xd2\xf2\xe7\x64\x4d\xb4\x66\x34\xc1\x7a\x7a\x20\x61\x52\xbc\x8d\xb1\x4f\xb0\x8d\x25\xcb\x06\xc5\x38\x76\xfe\x57\x4a\xa6\x10\x15\x8a\xac\x61\x7a\x46\x65\x1c\x6a\xa3\x7a\xc9\x9a\x9f\xcf\xbe\x23\xb8\xa8\x94\x57\xcb\x6a\x33\xd7\x88\xb2\x1b\xd0\x46\xe4\x22\x40\x37\x6e\xd3\x9a\xb1\x8e\xff\xe7\x40\x79\x72\x06\xfc\x66\x42\x86\xd6\xa5\xf5\xd2\xa7\x4a\xec\xed\xdd\xa4\x95\xf6\xe6\x2c\xba\x55\xe3\x94\xb5\x53\x1e\x81\x8b\x55\xfa\x10\x57\xf7\x34\x5d\xb7\x3f\x14\xe2\x4b\x87\xb9\x68\x26\xe4\xb8\xdf\x82\xf0\x1c\x02\x5d\x89\x9b\x85\xc2\x93\x0d\x68\x5b\xd1\xf1\x38\x9b\x0e\xf3\xbc\x50\x9a\xe3\xce\x98\x47\xe6\x1d\x4f\xbb\x46\xcf\x40\xaa\x38\x5d\xdf\x47\x53\x51\x8e\x15\xf6\x72\xec\x99\xd7\xab\xfb\xad\xf0\xff\x50\x8e\xd6\x89\x37\xf8\x0b\xaf\x94\x8c\xf3\x0f\x8c\xbf\x1a\x04\x0e\xea\xd8\xfd\x85\xc2\xde\x93\x43\xa4\x3e\x76\x47\x3a\xa6\x04\x12\x53\x27\xda\xad\xd1\x56\xac\x69\xe4\x5c\x92\x01\xe8\xdb\x09\xce\x9e\xb2\x4b\x51\x3a\x5c\x00\x7c\x64\x51\xbc\x12\xcd\xd8\x00\xfe\x73\xf1\xb9\x33\xd4\xaf\x6e\x8e\x82\x96\x5b\x29\x87\xd6\x86\xf2\xf5\x59\xdc\x02\xcf\x77\xbf\xd7\x3f\xd7\xee\xba\x17\x06\x62\x97\xdb\xeb\x82\x45\x4e\x40\x56\x52\xb2\x0b\x54\x60\x09\xb2\xb5\xcf\x79\x59\xa7\xca\x6b\xcc\x05\x46\x5d\x89\x80\xd2\x73\x77\xaf\x37\x0d\xe7\xa7\x04\xe9\xec\x1d\xee\xda\x6d\x92\xb7\x5e\x9c\x4b\xe2\x27\xc1\xc8\x5d\xff\xa3\x5d\xe1\xd2\xa4\x8b\x60\xe0\xe1\xb3\x49\x0b\x9c\x00\x56\x89\xd1\x15\x89\xe9\xb9\xce\x36\x7f\x5a\x7d\x25\x7a\x9d\x02\x35\xee\x5b\xaf\xee\x87\x66\x6d\xcf\xc6\x48\x2f\x58\x1d\xd4\x9b\x63\xbf\x7f\xa1\x16\xc0\x52\xdb\xc1\x0d\x5b\x8f\x43\x2f\x15\xb2\x71\x0e\xe0\xee\xdc\xb8\xdd\xbc\xce\x99\x92\x6a\x55\x69\x7b\x8b\x0c\x6c\x08\x43\x32\x6f\xfc\xde\x3c\xfe\xc0\xfe\xfc\x82\x18\xef\x5c\x9e\xcc\x25\xc5\x12\x0a\x0c\xfc\xa4\xcb\xb6\xb3\x59\x5c\x82\x55\x4b\xd9\xba\x46\x64\x23\xa0\x2e\x58\xc7\xee\x0b\x14\xeb\x74\x52\xab\x0e\xbd\xb0\xa1\x90\x34\xe1\x56\xa4\x5b\x0a\x40\x94\x28\xc4\xeb\xec\x29\x48\x4e\x6c\x01\x31\xe2\xc8\x5d\xd5\x4e\x93\x2e\x0b\x3e\xc1\x4c\xc0\xf9\x66\x07\xff\x49\xe6\xce\xaf\xa1\xb4\xa5\x4b\xd8\xd9\x87\x18\xc3\x38\xeb\x99\x7d\x23\xd3\x0e\x1e\xa1\xd2\xb8\x73\xb6\x2e\x4a\x5e\x75\xf8\xd9\x2d\xd4\x39\x88\x14\x4b\x5b\x1c\x6f\x35\x34\x8b\xd7\xae\xdc\x3a\x77\xb9\x44\x1b\x9d\xf2\xb6\x60\x6c\xda\x9f\x6b\x99\x24\x5a\x08\x67\x28\x96\x56\x39\xfb\xbf\x7c\x54\xd9\x45\xf2\x10\x19\xa4\xdc\x1d\xe8\xfd\xac\x97\x17\xe6\xd1\xfd\x47\xc5\x3c\x2f\x76\x69\xe4\x45\xbb\x0a\x94\xa4\x86\xb7\xc3\x40\xdd\x7f\x5d\xde\x0e\x4c\xf5\xf3\x6a\xb3\xb4\xb4\xb8\x50\xc8\x1f\x5a\xe0\x36\xa8\x35\x92\xf4\x5e\x32\x05\x97\x96\x9c\x57\xe9\xec\x25\xf7\x61\x59\x22\x97\xc8\xc0\xca\x53\xe9\x94\xd6\x52\x84\xe0\x5d\x97\x0a\x24\x7d\xf0\x24\x3c\xbd\x3e\x86\x88\x1e\xdf\x31\xab\x72\x6e\x95\x08\xbf\xd9\xd8\x96\x1b\x1e\x2a\xd6\x9b\x9b\x1d\x49\x00\x7e\xd1\x21\x8c\x68\x15\x97\x57\xc7\x4b\x0c\xdc\x1d\xfc\x3f\xaa\xde\x2e\xd9\x59\x9e\x07\x16\xbd\x3f\xc3\x78\xe7\x75\x2e\x0c\x18\x70\x02\x98\x8f\x9f\xe4\x49\xaa\xf6\xdc\xb7\x5a\xdd\x72\xd6\xae\x5a\x55\x6e\x58\x04\x0c\x18\x5b\x96\xa5\xee\x85\x80\x9c\x8e\x9a\xfc\x3e\xd3\x4a\xce\xc1\xe4\x31\x27\xc1\xf4\xb7\x2c\x7f\x98\xe0\x9f\x21\xfc\xfc\x4c\xa3\xbe\x73\x2d\xdc\x3e\x53\x17\xf7\x92\xca\xec\x7d\xdd\xe3\x66\x7b\x79\x54\x17\x26\xe5\xeb\x79\xd4\x07\x3b\x58\xf0\xf1\x69\x15\xe5\x51\x07\xb1\xf6\xa5\x9e\x52\xd3\x8f\xb2\xc5\x02\xd6\x03\xd3\x5f\x52\xf9\x79\x08\x87\xb7\x88\x47\xa6\xa7\xe0\x61\x13\x30\x72\xf0\x65\xbb\x6b\xf9\xd3\x80\xa7\xa0\xc4\x7e\xa4\x77\xd0\x17\x3d\x40\x95\xe1\x4d\xdc\x7a\xf1\xe3\xa3\x52\xc3\xa9\x43\x7f\xce\x60\xf8\xbb\x78\xd4\x56\x79\x8a\x4d\xaa\xed\x86\xa4\x55\xfd\x48\x4b\x65\x4d\xcc\x02\x1d\xf8\xbf\x89\xf7\x95\x7a\x0f\x68\x71\xc8\xc9\x53\x79\xd3\xfd\x0c\x35\x41\xf6\x07\x66\x53\xfb\xc1\x88\x4b\x67\x63\x28\x27\x5d\x2e\x56\x9a\xf5\xe8\x68\x73\xde\x79\x47\x63\x7c\x83\xa5\xa3\xcf\x69\xfe\x88\x4e\xee\x23\x02\xbb\x8f\xc7\xb9\xfa\xae\xb7\x3b\x11\x66\x4f\xbb\x0e\x92\x3a\x6c\x84\x0e\xdb\xac\xd0\x90\xf9\x3e\x16\x28\x0c\x9c\x8e\x23\x4e\x6d\x76\xcb\x32\x7e\xb6\x9c\xfc\xc5\xa2\xed\x39\x38\xcb\x6e\xf9\x7d\x0d\xc0\x04\xe0\x39\xc6\x71\x0d\xb9\x88\x19\xce\x0a\xef\x0e\x67\xeb\xe5\xeb\x8b\xaf\x60\xae\xef\x41\x7c\x80\x8d\x54\xdf\x8c\x07\xf7\x14\xc3\x88\xb0\xcf\x88\xbd\x23\x36\x48\xdd\xa7\xbc\x5f\x9b\xc3\x50\xd8\xc3\x40\x4c\xd6\xe7\xba\x93\xd3\xad\x56\x95\x9b\xa8\xf5\x16\xeb\x10\x0b\xd1\x59\x57\x49\x04\xd9\x46\xe5\xc4\xc5\xd0\x1a\x22\x84\x33\x86\x81\xa2\x9a\xcc\x6d\x21\xca\x70\xb0\x82\xcc\x75\x22\x03\x8d\x01\xa6\xae\x19\x68\x4f\xd6\x5e\xa8\xfd\xe6\x0a\x4e\xc3\x58\xd1\x87\x0e\x9b\x17\x79\x0c\xf6\xb6\xaa\x99\xed\x5c\x38\x30\x92\xe9\x2f\x3c\x19\x10\xe5\x03\xbb\x3f\xa1\xf8\x82\x90\x04\x78\xf1\x37\x5b\x63\x21\x2c\x5b\x5a\x1b\xe3\x1f\x6f\xd8\x5b\xbb\x5c\xeb\x33\x5c\x14\x3f\x1e\x40\xf4\x2c\x24\xc3\x8b\x26\x93\x3d\x76\x0e\xb3\x48\x2f\x8e\x20\x66\x74\xf9\xd3\x60\xc6\x13\x57\x64\x0e\x02\x3d\xe4\x09\xfe\xe4\x06\xc1\x7d\x12\xf4\x78\x7f\xe8\xf4\x22\x0a\x90\xf1\x01\x41\xef\xa7\xaa\xd8\xa7\xaa\x1d\x8b\x96\x61\xe7\xfc\xa3\xc5\x81\xef\x23\x18\xef\xe0\x03\xd9\x42\xc9\x1b\x84\x7d\x75\xd3\x55\xcb\x26\x1a\x3a\x30\xb5\xb0\xfa\x25\x18\x73\xe6\x3c\x91\xff\x79\xce\xe3\x78\xe8\x07\xea\x95\x67\x30\xb7\x74\xe2\xb5\xcb\x9e\xa3\xbf\x09\xc6\xb3\x84\xb4\x3b\x6b\x02\xe6\xbe\xc8\x93\x9d\xd3\x5b\x77\x8c\xa5\xe0\xb8\x12\xd6\x93\x38\x7d\x83\x33\x55\xc2\x51\x73\x0a\x82\xbd\xe3\xfd\xa3\xee\x3b\x79\xd4\x11\x4a\x62\x4e\xdd\xc7\xd2\xd9\xe2\xb4\x6f\x50\xc3\x4c\x4e\x66\xd9\xf6\xe6\x55\xa4\x7c\xa1\x88\x37\xd3\x47\x60\x03\xa3\x08\x05\x23\xc5\x0d\x2e\xa9\x68\xe7\xa9\xc4\x47\x43\x02\x3e\xd5\x74\x26\xc5\xe0\x14\xe4\x7c\x41\x5b\x69\xa6\x75\x63\x4e\x4c\xb9\x71\xf0\x21\xb6\x48\x0e\xc5\x99\x74\xda\x73\xb2\x7e\xcc\x03\x4f\xa7\x17\x16\x6d\xc8\x90\x75\xdb\x15\xd4\xb9\x03\x93\xb8\x7a\xba\x49\x60\x6b\x25\x59\x6d\x9a\xd6\xd0\xd4\x3c\x25\xd3\x2d\x6e\x3e\x0a\x2d\x80\xe1\x2f\x18\x21\xc0\xe7\xe7\x95\x98\xee\x01\xbe\x52\x7e\x6b\x13\xa6\xb8\x24\xcd\xba\x91\x5a\x76\x90\x73\xcf\x3a\x2c\xce\xf0\x0d\x92\x48\x0b\x0b\x8d\x2c\xcf\xd3\x0c\xbe\x51\x58\xe7\x36\xd4\xb1\x83\x06\xf7\xde\x15\x68\x09\x22\xbd\x90\x59\x9c\x8e\xfc\x4e\xe4\xf0\xcb\x8d\x98\xb0\x51\x5e\x4c\x8a\x9c\xb4\xb2\x9d\xf8\xf7\xaf\x6d\x8b\x5d\x1b\x67\x86\x58\xd1\x8a\x7f\x76\x4a\x42\x9e\x2a\x17\x4d\x27\x08\x46\xf9\xff\x6c\x04\x0b\x8a\x40\x0c\x66\x71\x62\x28\xc7\x89\x80\x8f\x87\xf9\xf2\x1a\x09\xef\x18\xbe\x35\x55\x67\xc7\x12\x1c\x74\x7b\xc8\x14\x22\x8b\x5a\xdd\x5c\x83\x42\x30\x31\x05\x6b\xaa\x6d\xdd\x0c\x74\x7d\xab\xf8\xfa\x1a\x5b\x9f\x7d\xc5\x25\xc2\x4d\xa6\xca\x14\x65\x1b\xed\xbe\x64\x37\xab\x64\xb0\x03\xb7\x32\xaf\xb6\x78\x02\x92\x6c\x2c\x6d\xc5\x46\x52\x07\x39\x89\xf4\x8c\x32\x51\x01\xf9\xaf\x42\x2f\xef\xc4\xf9\xbd\x5f\xac\xac\x8b\xf6\x41\x5c\x54\x04\x7d\x25\x7c\x3c\x13\x13\x86\x06\x42\xaf\x7d\x28\xca\x4d\xe1\x90\x05\x9b\x9e\x0f\xeb\x93\xf3\xf3\xfc\x47\x2e\xbd\x5b\xf3\x51\xe7\xd5\xc3\xfc\xf4\xe4\xa6\x73\x54\xea\xa1\xe7\x83\xb3\x08\x00\xbe\x92\x1c\x99\x59\x86\x0a\x49\xf7\x40\x06\x9c\xff\x13\x93\x1e\x69\xe8\x48\x50\x6f\x05\x96\xcf\x48\xb7\xc7\x34\x70\x9b\x37\x1d\x37\x7f\x5e\xae\x89\x8d\x0d\x0a\x74\xc1\xa3\x37\x47\x5e\xb3\x4d\x27\x0e\xef\xa3\xa7\xf4\x6a\x32\x1e\xce\x9a\x17\x6c\x76\xd7\x0c\x05\xfd\xcc\x8d\xb5\x0a\x4c\xa2\x91\xe3\x67\x95\x22\xbc\x12\x7c\x75\x65\x6a\x7c\x75\xcb\x0f\xfc\xf6\x89\xba\x77\x6a\xb1\xe9\x13\xc9\xe0\x26\x65\x41\x4d\x49\xa6\xd3\x18\x64\x9d\x60\xa1\xfb\x90\x83\x8e\x52\xaf\xe3\xfd\x28\x92\x5d\x1f\xef\xf9\x28\x41\x07\x77\x67\xf2\xbe\x49\x60\x7c\x64\x1c\x0e\xd9\xd2\xc0\x38\x40\x27\x09\xb8\xe6\xc8\x31\x6e\x7d\x77\x09\xfe\xb9\x0c\xd6\x1d\xfe\x2a\x77\xb1\x4f\x84\x76\xa0\x36\x0d\x90\x1b\x87\xdf\x18\x91\xd6\x4e\x08\xe4\xef\x66\xac\xff\x7c\x46\xe2\x7b\x2b\x32\x51\x17\x31\xc5\x35\x4e\xb6\x7a\x70\x68\x1e\xf1\x45\x9c\xea\x6b\x47\xa4\xfd\x91\x97\x8e\x94\xc1\x8e\x06\xba\xc7\x46\x86\x84\x8e\xf0\x77\x71\x91\xc9\x06\xfe\xfb\x60\x1e\x30\x88\xe5\xe6\xc6\x28\xb7\x51\x4c\x65\x04\x2b\x10\x98\x80\xb5\xa2\x65\xdb\xea\x68\x0c\x69\x7a\x3f\xd2\xba\xb6\xc2\x87\xf2\xb1\xbc\x72\x5f\x9d\x1f\x78\x2c\x3f\x35\xa8\xb1\xd4\x48\x17\x35\xc3\xf0\xee\x39\x5b\x1a\xa3\x07\x1b\x8b\xf5\xac\xbe\x24\x38\x7a\x96\xd2\x40\x38\x0c\xa2\xca\x1d\x41\x15\x49\xf7\xe0\xc8\xf9\x94\xff\x2a\x22\xca\xc7\xbc\x92\x70\xce\x5e\x83\xdf\x72\x9e\x83\xdc\x2e\x4b\x2c\x6b\x04\x3f\xa1\x97\x8d\xca\x2e\xbd\x5f\x02\x60\xbe\xd7\xbb\xf2\x14\x64\xfe\x02\x4c\x9b\x66\xdd\x0b\x33\xfd\xdd\x80\xa8\xeb\x8e\xc3\xec\xc9\x24\x28\x5e\xbb\x43\x54\x00\x63\x23\x33\x19\x29\x7d\x34\xa6\x45\xe3\x92\x21\x27\x66\xb1\xf2\xf9\xcb\x1f\x1b\x45\x2c\x02\x36\xbb\xca\x7a\x14\x11\xf9\x99\x05\xc6\x85\x93\x51\xb6\x18\xe9\xed\x08\x90\x75\x45\x9f\xd1\x98\xf8\xa5\xe5\x8f\xdd\x80\xfc\xdf\xf9\xc3\xfe\x13\x1e\x36\x3a\xa3\xf2\x8b\x49\xc4\x6e\x28\x20\x38\x99\x04\x74\x88\xec\x77\x87\x70\x3e\xf7\xca\xc5\x19\x10\x27\x85\x63\x29\x87\x2b\x27\x6b\xb5\x16\xcd\xe7\x40\x9c\xac\xf0\xc6\xc9\x10\x14\x75\x58\x8a\xde\x3a\xef\xbc\xad\x4c\xb1\x38\x64\x01\x32\xc3\x22\x83\xfd\xc1\x07\xf7\xcc\x00\x30\xc4\x5d\x71\x56\xe3\x76\xd8\x27\x40\xd0\xe2\x5d\x5f\x2d\xfb\x99\x49\x96\x3e\x59\x04\x7a\x5a\x87\xcd\x9e\x8a\xe7\x40\x0e\x69\xac\x4a\x1d\xdc\x63\xa6\x11\x33\xbb\xb3\x66\x2e\x7e\x42\x7c\xae\xf2\x40\x80\x46\xd9\xcb\xf9\x88\x5f\xcf\x47\x8c\x63\x79\xa2\xba\x64\x36\x13\xe4\xed\xa5\x42\xfc\xf3\xc8\x1c\x26\xc4\xc3\xf2\x3b\xcb\xc3\xea\x41\x61\xfe\xa0\x20\x00\x7a\x89\x75\xef\x19\xef\x28\x23\x0a\x89\x57\x68\x31\xd0\x19\xe1\xb6\x7c\x4b\x9a\x27\x65\x17\xec\x59\xc8\xcd\x67\xd3\x6d\x0e\x1c\xc8\x90\x0f\x36\x52\x70\xf4\xdd\x85\x19\xc8\xc3\x47\x9c\x7c\x36\x0c\x47\x27\x64\x1b\x6e\x94\x0f\x58\xed\x97\x0b\x83\x38\x0b\x93\xc4\x71\xc0\x62\xd0\x22\xc0\xe6\x69\xa8\xe8\x5f\xec\x5e\x49\x1b\x56\x1d\x21\x9a\xe9\x14\xc9\xdf\x09\x65\x2a\xc1\x5c\x44\xf1\xb7\x6d\x1e\x61\x6a\x40\xf4\x77\xf7\xa6\x25\xa4\x81\x4b\xb1\xc3\x0d\x55\x16\x02\xa6\x6b\x0f\x77\x51\x73\x1e\x6e\xc6\x5d\x0f\xf7\x98\xc9\x3a\xd8\x57\xde\x9a\x47\x27\x64\xc1\xff\xc8\x0c\xe8\xa4\x7a\x20\x93\xc9\x04\xfe\xdd\x0d\x47\xfd\x99\xe7\xb6\xf1\xe4\x3f\x0b\x12\xae\xf8\xff\xf4\x56\x6c\x93\x41\xf1\x01\xda\x97\xef\x97\x93\x65\x33\xd4\xfb\xc9\x47\x5a\x11\x0b\x40\x20\x0e\xbf\x43\x56\x90\xa1\x8d\xcc\x88\x55\x2e\x61\x46\x63\x88\xc6\xce\xe6\x74\x2f\xd1\xff\xc1\x83\xe1\x0f\x0f\xa2\x18\x5e\x32\xba\x60\xa8\x72\x65\x0f\xc5\x17\x88\x86\xf2\x2a\x36\x18\x92\x4c\x75\x28\x52\x6d\x1c\x8a\x12\xe6\x0c\x60\xd9\x4f\xfb\x3a\x9e\xb2\xec\x12\xda\x1c\x1a\x69\xf9\x50\x96\xfb\xab\x73\x60\xb0\x24\x0f\x63\x21\xd3\x2f\x77\x8f\xb1\x10\x32\xc0\xa9\xa5\x06\xc3\xa8\x4b\x21\x45\x9a\xc0\x22\x63\xe3\x07\x7a\xea\x40\x50\xd3\x25\x2a\xc6\xd8\x46\x3e\xe9\x94\x1b\xf2\xbf\x96\xfe\x3b\x38\x8d\x4b\x16\xe2\x4b\xce\xaf\x7a\x91\x4d\x0f\x89\xa8\xe0\x1f\x3e\xb9\xc5\xe8\xf8\x21\xbb\xbb\x58\x8d\x06\x92\xdc\xf4\x39\x18\x2c\x72\x9b\x0c\x98\x15\x9e\x22\x05\x64\x23\xcc\xc8\x7d\x21\xc9\x1f\xd2\x48\x08\xc2\x6f\x68\xd0\xd9\x57\x79\x1d\x39\xe7\x07\xac\xd2\xb0\xee\x36\x5b\x14\x0d\x64\xde\x29\xb2\x07\x1e\xc1\x8f\x76\x6d\x55\x84\x84\x14\xc6\x1b\x5a\x25\xb6\x70\x16\x03\x3e\xb5\x4f\x17\x59\x83\x06\x70\x0d\xf2\x3f\x03\x3d\x7f\x6f\xf5\x3b\xb8\xde\x33\x60\x72\x19\x74\x82\x2f\xfe\x20\x56\xa2\x86\x90\x17\x1b\xd0\x47\xeb\x66\x97\xb9\x7e\x84\xa6\x43\xda\x7d\xc0\x52\xa6\x37\xd8\x33\x22\x0c\x54\x84\x85\xa0\xfc\xa1\x1b\xbc\x3f\x02\x9c\xe6\x18\xb2\x31\x42\xf7\xa9\x40\x6b\x03\xe3\x2f\x82\x76\xc8\x39\x1e\x4d\x4f\x22\x98\xc1\xe3\xd0\x1c\x74\x95\x27\x71\x82\x1d\x47\xa1\x25\xeb\x14\x85\x24\xf0\x31\x28\x26\x44\x1b\xb3\x0a\xbf\x6a\xc9\x02\x0e\xa2\x9e\x74\xc7\x96\x5e\x38\x03\xa7\x86\x24\x4e\x42\x36\x09\xeb\xea\xc4\x6d\x58\x29\xfe\x33\x90\xef\x63\x48\x4d\xfe\x1a\x4c\x86\xfc\x3c\xa8\x0f\x37\xa4\xc6\xc1\x60\x90\x62\xaa\x03\x88\x27\xbc\x7c\xea\x62\x6a\x0a\x29\x42\x8e\x07\x19\x92\x43\x8c\xab\x06\x26\xd6\x91\xae\x81\xfe\x8b\xb5\x62\x5d\xd3\x36\xf6\xc6\x9f\x0c\x7a\x30\x52\xbc\x99\x79\xc2\x14\xaa\xde\x0d\xaa\xaa\x9d\x9d\x93\x5e\xf5\x9c\x35\x7a\x4c\x5d\xfc\x0b\x29\x3f\xe2\x3d\xf4\xc4\x67\x1d\xd1\x75\x1a\xbb\xfb\x13\x09\x6b\x78\x72\xfd\xf1\xb9\x82\xb2\x30\x42\x62\x7a\x74\x78\xc1\xdb\xef\x1b\x6d\x37\xed\x94\x3e\x74\xc6\xfa\xa3\xb4\xdf\x20\x08\x5a\x64\x83\x98\x1e\x92\x43\x31\xbb\x4f\xd6\x4a\xd1\xa3\x21\x44\xa9\x27\x28\x53\x0d\xc0\xbb\x34\xd4\xb1\x74\x05\xad\x45\xff\xe6\x24\xa5\xaf\x1f\xf8\xa6\x83\x55\xf0\x1d\x64\x86\x11\x50\xd5\x23\x1c\x9f\x24\x6b\xce\xaf\x48\x70\xb6\x5d\x67\x12\x0d\xe3\x9f\x49\xb5\x6d\x78\x12\x04\x69\x18\xc5\x05\xc7\x00\x26\x2b\x98\x5e\x0d\xd6\xc5\x74\x73\x17\x82\x97\x44\x7d\x78\x40\x6c\xf1\x20\x14\xf5\x5e\x3d\x9e\xb7\x7e\x91\xc9\x1d\x67\x63\x30\x17\xe8\x7b\xd0\x7e\xea\xa0\x14\x19\x69\x7d\xdd\xa4\xac\xd9\x23\xb2\x28\x7e\xb3\x9d\x37\xbe\x26\xe1\x4b\xa4\x4c\x86\x2b\x03\x27\x40\xf4\x41\xd2\x39\xd0\x5c\xf1\x37\x2b\x13\xee\xa0\xe7\x79\x16\x01\x56\x65\x41\xd0\x0b\x67\xc4\xa0\x74\x0c\xdd\x05\xa7\x77\x0c\x06\xc9\x85\x61\xf8\x08\x72\x7b\xab\xbc\x8e\x5b\xd4\xdb\x64\x79\x3c\x7e\x50\x92\xfa\x3d\x42\x8f\xbc\x9c\xe2\x26\x86\xe0\xa9\x1c\x86\x20\x9d\xec\x9f\x22\xd4\xab\x58\xe5\x0c\x01\xe5\xbe\xca\x3f\xd1\x2f\x11\xc1\x05\x24\xee\xc7\xaa\x44\x16\x43\x4f\x1e\x04\x6d\x80\x95\x68\xc3\xca\xd3\x42\x3c\x8e\xfc\xb7\xcf\xab\xd9\x84\xac\x79\xc7\x97\x54\x8e\xe0\x03\x2c\x47\x52\x63\x2a\xd6\x6c\xbd\xd2\x90\xd0\xe5\x33\x36\x9b\x83\x2d\xb9\x74\x41\x34\x3a\xdf\xd4\x6c\x82\x7e\x9c\xdf\x86\x73\x39\xea\xd3\x37\x83\x6a\xe5\x3f\x11\x69\xae\xef\x8d\x8c\x8e\x27\xa1\xaa\x32\xbb\x2a\x73\x11\xe4\x4f\xf2\xfd\x3a\x48\xed\x81\x98\xdd\x45\x3b\x4f\x3d\x15\xd0\xaf\x71\xf0\x85\x41\xcc\x93\xd8\x8c\x94\xc4\xf8\x10\x9f\x27\x4d\xb8\x53\x3f\xea\xf9\xcd\x69\x0f\x5a\x4f\x83\xca\x38\x07\xdf\xa3\xb2\xf5\xfb\x7c\xf4\x2c\x17\x2e\xa5\xf7\x79\x09\xf2\x42\x66\xb4\xf4\x08\xee\x0d\xf6\xc3\xeb\x0a\x42\x44\x6b\x77\x49\x74\x90\x57\xb2\x8e\x48\xe4\x82\x8d\xee\xb1\xfe\x36\xb9\x12\x0c\x26\xc7\x70\x9a\xf5\x64\xc8\x4a\x82\x94\x1c\x33\xb4\xeb\xc0\x39\x5a\xa1\xf5\xc9\x72\x3c\x81\x48\x6c\x13\xe3\xe2\xc1\x58\xf8\xd8\x30\x13\x66\x09\xf8\xa2\xeb\xc2\x99\x1f\x15\xba\x01\x9c\x24\x44\x87\x20\x92\xe0\x8c\x70\x4e\xc8\xd8\xbb\x54\x55\xf2\x78\xc2\x29\x4f\x38\xe8\x63\x01\x62\x69\xcf\x61\x08\x1e\xc8\x1f\x8d\xe3\x11\xdf\x60\xf2\xd8\x32\x36\x6d\x7c\x14\xea\x8c\xd2\xb6\x8b\xa5\x92\xf4\xbd\x88\x72\x2a\x90\x9e\xe3\xcf\x25\x7a\x05\xa0\x07\xb1\xa9\x39\x63\xa4\x5e\x4b\x20\x96\xce\x62\xa9\x5e\x26\xc2\x7a\x7a\x9b\xe6\x1d\xfa\x2d\x72\x0a\xf5\x6e\x13\x02\x45\x82\x1c\x12\x5f\x81\x98\x34\xa5\xfa\x06\x9a\x48\xcd\x7a\xfa\x14\x6c\x60\xce\x0f\xf9\xe2\xf8\xd9\x43\xe7\x2d\x08\x36\xbb\x23\x0e\xe8\x24\x30\xd9\xc7\x02\x56\x77\xab\x8f\x77\x42\x49\x96\xc1\x28\x16\x92\x2d\xa0\x95\x2c\xa4\x29\xbb\x35\x5c\x18\x20\xaf\xd7\x7d\xca\x2a\x06\xd2\xbf\x8e\xb6\x2e\x02\xc2\x49\xcd\x71\x3a\x67\x8f\x38\x85\x1a\xe3\xe4\xa4\x20\x23\xc0\x83\xe9\x6e\xe0\xa1\x5c\x58\xaa\x1e\x9c\xab\x75\xf7\xaa\x99\x4f\x77\x2b\x56\xa7\xbb\x1f\x6d\x48\xef\xec\x83\x26\x3b\x5c\x48\x17\x76\x68\xcc\x22\x4a\xbb\xd5\x16\xbb\xe3\x5b\x29\x71\xd0\x1d\x9f\x61\x12\xdd\xe3\xbd\x32\x7c\xcc\x79\x25\xc5\xd3\x47\x8a\x60\xfd\xa8\x3e\x49\x59\xd8\x52\xa8\x9c\xd8\x85\xbc\x7c\xe8\x40\x82\x3e\x92\xdf\x5f\x0b\x06\x75\x7d\xda\xc8\xe5\xe1\xf6\xb9\x89\x4d\x12\xa1\x30\x35\xe0\x46\x06\x97\xae\xf1\x84\xd9\x8b\xfa\xea\xa4\xf6\x95\x15\x51\x4c\x9e\x2d\x40\xc8\x09\xc0\x35\x39\x04\x6e\x3b\xa3\x7a\x56\x01\x91\xc4\x3b\xd6\x3d\x25\x59\x89\x86\x4a\x43\x74\x0e\x74\x07\x27\xd8\x9d\xd8\x30\x6c\xa2\x24\xbe\xcb\xfa\x0e\x0e\xc7\xa0\x02\xe9\x68\x36\x76\x9e\x17\x4e\x0e\x47\x9a\x94\x56\x4a\x4e\xc1\xe3\xea\xc5\x7e\xc9\x4c\x87\xae\x4d\x82\xc0\x37\x39\x1f\x8c\xe3\x35\xcc\x77\x5a\xc9\x85\xd0\x55\xe5\x55\x00\x60\x6e\xc4\x9d\x4b\x7e\x88\x89\xb1\x2e\xa2\x43\xed\xaa\x88\xae\xba\x3a\x71\x8a\x62\x20\xf5\xba\x02\x02\xac\xf9\x74\x6a\x9e\xb7\xe0\x50\x44\x36\x3a\x6f\x72\x48\xa4\xe8\xec\xc8\x10\x5a\x45\x11\x79\x8b\xe0\xbb\x43\x0b\xe3\x7a\x44\xb7\x14\xda\x1c\x06\xb6\xde\x8d\x58\xfb\x78\xd4\x9c\x96\xf4\x19\xf8\x48\x16\x7b\x3b\xa4\xf8\x2b\x92\x22\xb1\x81\xf0\x94\x5a\x59\x57\xb4\x0e\xdd\x95\x08\x86\x75\x22\x48\x71\x68\xba\x92\xb1\x37\xc0\xe2\x8a\x76\x68\xac\x67\xb0\x49\x5a\xaf\xce\x83\x7e\x41\x03\x5d\x89\x98\x3b\xb0\x2a\xfb\x9e\x99\x0e\x97\x6e\xe6\xc1\x59\x46\x54\x97\x25\x1a\x63\xe0\x0a\x16\xaa\x2e\xa8\x0b\x63\xee\xe6\x94\x92\x24\x66\x3c\x83\x5a\x30\x8b\xfd\x11\x54\x87\x3c\xb8\xb1\xcc\x23\x46\xbe\xe4\x1f\x12\x10\x13\x2a\x12\x07\x04\xf2\x36\x89\xd8\x35\x44\x1b\x91\x3d\x20\xc2\x4c\x1b\xd7\x07\x77\xee\x74\x79\xe5\x15\x97\x22\x1a\xc7\xb2\x34\x26\xc8\x7b\xd3\x25\xe7\xf0\xc0\x74\xf6\x33\x31\x10\x66\xfa\xd8\xe0\xf4\x0e\x02\xc8\x1b\x89\x24\x34\xfd\x3a\x10\x9b\x68\xf7\xa4\xfb\x4b\x9f\x9b\xc4\x88\x1f\x31\x16\x7e\xc4\xc7\x8b\x29\x4c\x50\x46\x82\x1e\xf2\x0a\xfa\xc3\x4b\x14\xb5\xa9\x25\x79\x18\x9c\x52\x50\x38\x5e\x30\xc7\xb2\xc8\x1a\xc5\x17\x00\x14\xff\xf6\x20\x16\x00\x2c\xc8\xae\x29\x98\x24\x45\x9f\x01\x38\x89\x83\x12\x6f\xfc\x0c\x18\xf4\x8f\x91\x03\x05\x76\xc8\x60\xdb\x6e\x24\x4f\x5d\x92\x94\x06\xa8\x24\xdb\x7d\x1c\xdd\xcd\xd2\xe6\x2d\x97\x50\x9f\xb6\xe0\x93\xec\x74\x8e\x4d\xc4\x93\x9b\x6a\x18\x3c\x41\x5d\x62\x42\x17\xca\xe9\xe6\xb1\xb4\x4e\xad\x1c\x4a\x90\x42\x8a\x74\x72\xe9\xf9\x5a\xc1\x4d\xc4\x5f\x17\x3e\x2d\x79\x23\xbb\xc4\x06\x9d\xc8\xae\xdf\xa5\x29\xd8\x40\x13\x48\x5b\x74\xde\x81\x6d\xc8\x8c\x09\x3e\xb6\xfe\x79\xf3\xa3\x4a\x5a\x67\xc0\x57\x26\x1d\x93\xf4\xee\x18\xa8\xe7\xb4\x91\xa2\x27\x7c\x25\x34\x2b\x87\x37\x79\x8e\xd2\xad\xf5\xcb\x74\x3f\xfc\xcb\xc0\xfb\x6d\x5e\x53\x84\xa0\xd0\xd0\x01\x45\xa4\x76\xb1\xdf\x49\x71\xce\xe3\x2e\xdf\xfb\xbf\xa0\x80\x74\x10\x7c\x89\xd6\x0d\x86\x13\x9b\x64\x8f\x1b\x11\xfa\xf7\x2b\x76\x43\x86\xd5\x2b\x7c\x3c\x83\x17\x32\x66\xd2\x66\xb5\x9b\xe5\x43\xe6\x43\xf9\x10\xcc\x0e\xb9\x69\xcb\x03\x25\x8a\x3e\x9b\x21\xf2\x26\x67\xa3\xf3\xe3\x6f\x84\xd2\x1a\xf7\x65\x37\xe7\x4c\x44\x88\xbe\x5f\x69\x9b\x38\xaf\x73\x01\xfd\x55\x84\x8b\xee\x3a\xe5\xd9\x26\x0d\x50\xee\xca\x65\x03\xc7\xd8\xf0\xe1\x7f\x7b\xb6\x74\xc4\x19\xb3\x71\xa4\x75\x2d\x07\x79\x0b\xa1\xe3\xd3\xbb\x10\x30\x28\x47\xab\xba\xba\xd4\xe8\x02\x80\xe6\x40\xc1\xeb\xb9\xbc\x8a\xbb\x4c\xdc\xb0\x61\x86\x9a\x41\x5a\x20\x56\xe3\xda\xc9\x8a\x0f\x4c\xd8\xd2\xe7\x11\x9a\x13\x04\x96\x0b\x58\x89\x68\xfc\xa5\xe5\x74\x8e\x2d\x24\x7d\x7d\x78\x63\x2d\xfd\x2b\x2d\x71\x3c\x72\x15\x98\xee\xeb\x51\x3c\xda\x5b\x44\x1f\x96\x3c\xa0\x55\x1c\x94\x20\xdb\x1a\x08\xbb\xd2\x18\x2b\xbb\xe6\x8b\x46\x26\x59\xe1\x0d\xe9\xf1\x95\x20\x9d\x3c\x79\xde\xd9\x49\x95\x2b\x53\xf8\x93\x02\x36\x92\xdd\xb3\x9f\xb5\x0f\x1b\xc8\x1e\x11\xdf\x5c\x2f\xf6\xd0\x3e\x7b\xa4\x6e\xea\x4e\x05\x65\x9a\x81\x76\x93\x68\xd2\x90\x0f\x5f\xa9\x13\x31\x66\xc7\xa9\x28\x32\x62\xb9\x9d\x7d\x02\x03\x79\x1a\xfb\x09\xa7\xe0\x29\x69\x81\xfc\xfb\x71\x15\x80\xef\x2d\xa3\xff\x7b\xaf\xd1\xf4\xbf\x37\x03\x6e\xbf\xf7\xcc\x15\x92\xef\x6d\x83\xe2\x39\xa8\xf6\x5f\x3a\x4a\xad\x50\x7a\xfd\xb7\x1e\xe2\xb4\xfa\x52\x94\x0c\x21\x72\x22\xa4\x34\x34\xa9\xe4\x27\xf2\xe5\x6c\xe8\x8b\x0e\xda\x7d\xa3\x86\xfc\x88\x7c\x90\xfa\xfe\x1b\x91\x12\x5f\x4a\x23\x7e\xf3\xc2\xa0\xdd\x2f\x38\xfc\x58\x0e\x9c\x1f\x7c\xad\x9f\x0f\x52\xcb\x97\x4d\x40\x9d\x15\xd3\x1a\xcf\xd7\x9b\xe1\x17\xd9\x4b\x41\x8b\xb9\xd3\x50\x05\x41\x26\x05\x9b\xbf\xd6\xdf\x89\xc0\xf6\x0b\xba\x5c\x82\x75\x47\x52\x98\xc3\x60\xea\x4c\x36\x89\x7d\x30\x18\x13\x1f\x24\xf7\xd9\x40\xfb\xd5\xe5\xdc\x0e\x28\x82\xb8\xda\x87\x79\xd2\x9f\xdb\xab\x0f\xf6\xcc\x89\x79\x35\x9f\x08\xde\xf9\x38\xd9\xf8\x49\x34\x7b\x77\xe8\x24\x9a\x2c\x19\xeb\xfd\x51\xf7\xfb\xfe\xfc\xb8\x11\x83\x63\xf0\x88\xf8\xc8\xb7\xd5\xa2\xf0\xea\x58\xd2\xfb\xb2\x54\xbc\xdf\xdb\x6c\x28\xd1\x59\x56\x91\x6a\xd6\xa5\x17\x5f\xa3\x35\x19\xb2\x27\xc2\x7e\x92\xd7\xe3\x5d\x17\x8e\x4e\xf0\xb1\x86\x50\xe1\x9b\x4b\xd3\x44\xee\x3d\xb3\x32\x56\x17\xc1\xa6\x65\x36\x39\x09\x22\x1b\xf3\x01\x20\xa9\xec\x0c\x85\x62\xfa\x1b\xcb\xd8\xb7\xb8\x24\x0f\x45\xb0\xbc\x8b\x07\x23\xbc\x0b\x93\xa8\xde\x3c\xe7\xa6\x65\x35\xe8\x0f\x8a\x84\x32\x94\x08\x88\xfa\x80\x24\x61\x2c\xc8\x76\xd0\xf5\xe0\xa4\x0b\x42\xcb\xa0\xed\xb4\x96\xdf\xa0\x59\x86\xa5\xe1\x9f\xbd\xe2\x5b\xdb\x9c\x75\x2b\x39\x88\x1e\xed\xe1\xbe\xe9\x85\x06\x7c\xca\x9c\x04\xd6\x33\x06\x1b\xa6\xe6\xb3\xc0\xa2\xd6\x04\x1b\xe6\xc4\x85\x4a\xe0\xf6\x8c\x7c\xf1\x4c\x84\x9a\x86\x49\x6f\x39\x9b\x8d\x15\x67\xcb\xba\x61\x2c\x9f\x05\x89\xa6\x56\x3c\xdf\xe8\x2c\x82\x1a\x34\x37\x0a\x10\x90\x5e\xa2\xf3\xd0\xee\x9e\x1c\x9b\xe5\x74\x65\xb7\x83\x1b\xdb\x8f\x5d\x53\x89\x49\x06\x94\x6c\x06\xa4\xdf\xf4\x33\xe8\xf1\x79\xa2\xf9\xf0\x7e\xd7\x00\x0f\xa7\x0a\xd2\x5b\x99\xc2\xef\x14\x1a\xd0\xef\xf4\x7b\xed\xe9\xf8\x66\xba\x53\xdf\x18\x08\xff\x09\xe8\xde\x6c\x48\xa4\x7d\xfe\x4e\x62\xd3\x7a\x83\xc8\x24\x4e\xd3\xe8\x2b\x91\xe1\x52\x88\x78\xdf\x50\xb2\x60\xa5\x13\xe8\x5d\x37\x21\x57\xfb\x61\xff\x6d\x9b\x76\x3b\x07\xaf\xbc\xa8\x4f\x76\xa4\x9f\x89\x11\xe4\x9d\x34\xc1\x44\xb4\x40\x10\x77\x16\x5e\xb6\xd0\xb1\xf5\x4e\xf3\x6e\x13\x38\x2e\x45\xda\xc6\x12\x87\xc9\x3c\x7c\x83\xdb\xfa\xb9\xc9\xac\x04\xf1\x26\x0f\x1c\x86\x7a\xef\xa7\xa0\x78\x22\x25\x8e\xf2\xba\x9d\xa0\xc9\xad\xc9\xd7\x3d\x41\x19\x4c\x04\x9c\x27\xe5\x1b\x5e\x9e\xf0\xfb\x24\x62\xde\xd7\xab\x22\x1e\xdf\x01\x95\x1c\x5f\xb0\x9e\x9c\x2a\xb2\x44\x26\x3b\xf8\xb1\x05\xce\x3e\x35\xf2\xcd\x4c\x3a\x4d\xb5\x14\x77\x90\x4e\xe9\xbf\x60\xe1\x74\x4e\x4f\x64\xc6\x93\xe7\xb2\x64\xa9\xbe\xbf\xca\xa0\x3c\x95\x57\xe9\xdd\x8f\xf1\x6a\x81\xc3\x2f\x38\xc9\x5e\x64\xd7\x94\x24\x07\x46\xeb\xe0\xde\xb4\xf7\x3a\x2d\x62\xd7\x3c\xb8\xb0\xfc\x72\x05\xb7\x2c\xd4\x0e\xc4\xa4\x9f\xe4\x5f\x4e\xce\xf9\x9f\x93\x6f\xd2\x12\x78\x71\x92\xf2\x42\x2b\x19\xf8\x3d\xbd\xe0\xf5\xf0\x51\xf0\x05\x17\xcc\x76\x05\xbb\xe6\x79\xaf\xf9\x68\x1b\xfd\x7c\x7f\xb8\xd6\xfe\x82\xfa\x49\xbe\xdf\xf1\x2f\xb0\x94\x05\xaa\xdf\x38\x24\x07\x75\x27\x94\xa7\x96\x67\xec\xc6\x32\x12\xd2\x8b\xdb\x56\xfe\xfd\xe7\xf8\xbe\x53\x9a\xda\xd6\x5b\x73\x75\x6e\xcd\x9f\xdf\x86\x82\xbb\x0d\x9b\x0d\x44\x9a\x38\x60\x46\x8d\x3a\x9b\x27\xff\xbd\x90\xa3\x13\xb1\x10\x34\x97\x0d\xf7\x4e\xf6\xfe\x9f\x58\x3c\xfb\x2a\x34\x24\x22\xd9\x4e\xf7\x61\x0f\xd0\x73\xd1\x6e\xef\x33\xb3\xb4\x65\xef\x0d\x51\xc1\x01\x55\x32\x50\xda\x13\x21\xfa\x16\x89\x79\x8f\x63\x78\xc0\x6d\x2a\xcb\xd5\x83\x9b\xcc\x10\xd7\xe7\xfc\xba\xad\x6f\x80\xdc\x99\x9f\x55\x94\xa0\xbe\xfe\x70\x7d\x12\x43\x48\xcc\xe6\xdc\xdc\x89\x75\xdd\x0c\x52\xb9\xee\x53\x93\xb6\xeb\xd6\xe4\x0e\x64\xa0\x31\x1b\x05\x8e\x9d\x1e\x02\xe1\x27\xbf\x77\x1a\xcb\xd7\xfd\x14\x7d\xe8\xe8\x33\x99\x8b\xe2\x3c\xd7\x2d\xba\x50\xb6\x42\x27\x26\x94\xb1\x77\x9d\xb7\xfd\xb9\xf3\xcc\x4a\x1b\x42\xdd\x3e\xf5\xfc\xdc\xe0\x32\x3d\xca\x8b\xeb\x45\x86\xf6\xbd\x04\x00\x87\x57\xe6\xc6\xfa\x3b\x94\xbe\x3e\x07\xfe\x8c\x0c\x71\x2c\x07\xe8\x48\x3a\x9a\x5f\x85\xb6\xdc\x15\xca\x6f\xd7\x91\x3c\xc7\x97\xd0\x6d\x23\x2b\x9d\xc9\x12\x6b\x0f\xfe\xb3\x7a\xb1\x09\x5f\x2e\xc0\xa1\x10\x2a\x6c\x68\xe9\xe8\x92\x5f\x1f\xd9\x26\x0b\x4b\x4e\xc4\xcc\xde\x6d\x2a\x7d\x57\xa9\x93\x75\x90\xde\x26\xc4\xa5\x74\x81\x2d\xee\x24\x98\xc2\xcd\x73\x05\x03\xe5\xe5\xe1\x15\x0e\x98\x5a\x82\x92\xf7\x5d\xbc\x7f\xb8\x20\x68\xce\x43\x21\x5b\x14\xcc\xa2\x47\x8d\xd8\x42\xa8\x91\x6c\xa7\xd6\xf7\xaf\xa6\x44\x8e\x04\x4b\xed\x2a\xd1\x8a\x00\xff\x13\x19\xa9\x4e\x49\x82\x19\xb3\xd3\x2f\x95\xc1\x4e\xba\xc4\x6c\xc0\xf0\x20\x92\xd1\x3b\x22\x40\xd0\x9e\x6d\xce\xe5\xe6\x4f\x5b\x91\x41\xfe\x26\x1b\x67\xde\x47\x1d\x86\x5b\x67\x2d\xf3\xaa\x80\x3f\x43\x34\xb2\x2e\x0f\xdd\x28\xfc\x85\x72\xab\xc1\x5f\xaa\x9f\x26\xb9\xa3\x6c\xb2\x90\x9e\x74\x5b\x5c\x50\xb1\x3b\x09\xa2\x26\x98\xd1\x4d\xca\x12\xb6\x8d\x9d\x3f\xd9\x82\x3d\x55\xaf\x88\x3c\xcb\x56\x88\xfe\x35\x31\x4c\xef\x4a\xd2\x75\x82\x77\x34\x14\xef\x2e\x9b\x6b\x7f\x18\xb0\x03\xc8\xcf\x02\x74\x65\x5e\x72\x31\xe3\xb2\x67\x72\x9e\x81\xbc\xe8\x62\x55\x0d\xbc\xa7\xce\xab\xf5\xe5\xba\x88\x95\xce\x3b\xf7\xa5\x61\x6b\xa5\x2f\x0b\x9d\x1f\x91\x40\x7e\x16\x11\xcc\x7d\x44\x29\x66\x96\x5e\xe3\x40\x9d\x64\x61\x9d\xd6\x57\x9e\x8d\xc9\x94\x5c\x5e\x36\xd6\x1f\x01\xfc\xb5\x19\x48\xdb\x27\x40\x0e\xb6\xd3\xeb\x5d\x6f\x9e\x43\x5c\x5e\x6f\x11\x30\x9c\x91\xb3\x73\xde\x47\x80\x6d\xe8\x16\xff\x32\x48\x64\x7a\x92\x01\x91\xa4\xa6\x89\xdc\xa5\xe8\xd4\x93\xf8\x4d\xc9\xe1\x76\x4f\x91\x48\x73\xde\x20\xe5\x0f\x79\x86\xd3\x7b\x11\x82\x9d\x1c\xa4\x66\x6b\x07\x43\xe9\xbd\x6a\x9f\xcd\xce\x9f\xcc\x86\x20\x11\xf9\x29\xa4\x59\xaf\xc3\xc6\xca\x78\x89\x14\x09\x29\x50\xdf\xaf\x7e\x94\x6e\x27\x57\xbb\xa0\xad\xc6\x39\x38\xf0\xa3\xce\x41\x0c\x5a\x19\x93\xe2\xae\x20\x9d\x14\xbc\x08\x1f\xf1\xd6\xf8\x56\x8b\xfc\x06\x7b\x29\x23\xb2\x0d\x3d\xc5\x03\x5a\x9f\xba\xa5\x0a\x37\xdc\x22\xd8\x28\x58\x6b\xd0\x02\x3b\x0c\x33\xce\x37\x82\xbf\xd4\xe0\x20\x5e\xd8\xab\x8a\x7d\xcf\x49\x50\x83\x0e\x35\x2f\x24\x29\x2d\xc7\x24\x6e\x53\x30\x86\x10\x59\x33\x88\xf3\x23\x32\x22\x60\xf0\x7f\x42\xe5\x82\xf1\x9b\xa0\x02\xe2\x63\xc5\x12\xd0\x25\x12\xd5\xb2\x35\x2e\xbf\xeb\x8f\x99\xe9\x1b\x0a\x03\x3f\x9d\x4e\xf8\x78\xd3\xff\x80\x2d\xf1\xfa\xd9\x17\x78\x90\x1f\xd2\xbe\xbb\xf1\x4f\x52\x0d\x62\x31\x4e\x5e\xc0\xac\x7a\x55\xb9\x49\x11\x43\xbf\x6a\x8d\xe3\x9e\x99\x8f\x3f\x89\x60\xee\x32\x83\x6d\x0b\x8a\x0c\xdb\x9a\xb6\xc2\xb1\xc6\x70\x56\x1c\x97\xc1\xe4\x8f\xe6\x10\x8b\x1d\xc8\x97\x48\x80\x2a\xed\x28\x50\x77\x6d\x4f\x31\xa7\x3a\xa3\xd1\x22\xd8\xb8\x65\xf7\x43\x5d\x0b\xd0\x16\xe4\xaa\x62\xd9\xdc\xcb\xab\x11\xb1\x26\x96\x66\x6a\x68\x0f\x83\xee\x01\xa4\x54\xed\x54\xac\x6f\x02\xfe\x2b\xfb\x9b\x26\x14\x95\xe0\xce\xe4\x0c\x04\xa5\xe8\x21\x81\xe8\x26\x10\xdf\xfe\x8f\x93\xf5\x9a\x35\xa3\x3b\xeb\xe9\xbd\xe9\x29\x19\xa7\xb3\x56\x2d\xc9\x9e\x1e\x3d\xe4\xcf\xa3\x32\x31\xfc\x24\x67\x89\x15\x3c\x9b\x07\x92\xfa\x53\xad\x43\xcb\x31\x3b\xab\x1c\x14\x67\xed\xd8\x6d\xac\x4c\x7d\xb5\x32\x24\x07\xce\xb5\x5c\xf3\x83\xc0\x2b\xdd\x42\xa2\xa1\xaa\x27\x90\xb7\x86\x74\x43\x0b\x83\xea\xcf\x67\x15\x8f\xcc\xa9\xf4\x39\xc2\xae\x11\x3b\x3f\x33\x3f\xe5\x27\x47\x15\x2b\xc7\x42\x76\xaa\xb3\x90\x95\xe2\x2c\x7f\xb8\x9f\x83\x12\xec\x2c\xe2\xc9\xa5\xea\xd0\x69\xef\xa6\xa7\x00\x1a\x74\x15\x7b\xd1\x9f\xae\x7b\x1c\xbd\x8a\x66\xd6\xec\x04\x9a\xc8\xa4\x52\xd5\xcc\xf0\x2c\x41\xe0\x59\xf2\xf4\xd6\x94\xf3\x2c\x43\xf0\xb0\x0e\x9c\x18\x9c\xe0\x82\x15\xdd\xe9\xbd\xaa\x8b\x9a\x6f\x2e\x1d\x39\x96\xb6\x26\x16\xc9\x3a\x96\xfb\xe5\xee\xcf\x13\x69\x3b\x1f\x02\xe4\xd4\xe8\x2d\xfb\xc2\x73\xa3\x81\x01\x69\x54\xfb\xea\xb0\xd0\x2c\x4a\x03\xc3\xe3\x21\x6a\xd6\xd2\x58\x35\xe7\x88\x1b\x3c\x83\x74\x73\xce\x22\x79\xcd\x20\x81\x62\xed\xf2\xd2\x7a\xc5\x39\xc7\x2f\xd5\x15\xce\xae\x9d\x2c\xb4\x6b\x42\x87\xf5\x79\xdc\x31\xa7\x35\xe7\x6f\xfd\xd9\x65\x20\xff\x7e\xd9\x3f\x82\xcb\x59\x5e\xe6\x73\x14\xcb\xd5\xc9\x34\x9c\x33\xff\x8b\x8e\xc3\x1e\xf0\x47\x25\x59\x4b\xf3\xa5\x96\x4a\x8d\x9d\x33\x8b\x6b\x39\xc7\x97\x8c\x53\x29\xba\xcd\xf0\x40\x79\x66\xb0\x2f\x42\xfa\xd0\xe1\x32\xd4\x35\xd6\x38\xce\x0c\xb6\x5c\xf5\x25\x39\xe8\x98\xa1\x32\xc5\x5c\x82\xb3\x09\xab\x9e\x66\x1e\xfb\x47\x83\x05\x7a\x5e\x49\x7c\x9f\x99\xf1\x7d\x67\x0e\x85\x22\x27\xb7\x4a\x07\xa9\x3e\x8f\xaa\xfb\x66\xa0\x8a\xa3\x02\xc6\xab\x24\x78\x04\xd7\x68\x1b\x74\x9d\x8c\x35\xb8\x58\xef\xb5\x51\x8d\xde\x12\xd1\x01\xec\x23\xa7\xbf\x6d\x10\x1f\x25\x37\x1a\xd2\x83\xce\x33\x47\x76\x6b\x8b\x7e\x5b\x39\x73\xd6\x86\x9d\xf4\x2b\xf8\xe3\x5d\x1d\xa3\xa6\x22\x5d\x72\x02\x56\x09\xda\xf8\x42\x65\x1b\x27\x20\x6c\x79\x05\x9d\xea\x5a\xe8\x22\x70\xbe\x55\x32\x83\x3b\xd5\x62\x3c\x01\xa8\xf0\x9c\x8d\xb2\x75\x09\xba\x7d\x60\x9b\x52\xff\xf8\x59\x39\x3d\x73\x34\xb6\x9d\xa0\xcb\x21\x85\xad\x35\xd8\x6f\xec\xce\xe2\xba\x04\xb4\xf9\x78\x3b\x79\x70\x90\xe3\xd1\xec\x6d\x6f\x6a\x07\xa7\xc4\xea\xc1\x2b\x78\x15\x11\x22\xf7\xd6\x34\xe9\xf1\x3a\xed\x6a\xa1\x17\x71\x42\x45\x94\xe4\xa3\xf1\x82\xcd\x70\x9c\xb8\x00\x7c\xa6\xfb\xc5\xd0\xe7\x33\x18\xce\xd1\x76\xb5\xa8\x7a\x8a\x9c\x37\x41\xda\x59\x88\x5c\x52\x8e\x37\xe5\x21\x9d\x14\x6b\x12\x4a\xcb\xad\xd6\x81\x8d\xfe\xb8\xc5\xa1\xba\x81\x60\xbb\x08\x8b\x5b\xe4\x54\x26\xc4\xe9\x32\x4d\x02\x31\x43\x3a\x13\x38\xe2\x1b\xdd\x2b\x34\x65\x64\xe7\xc1\x15\xae\x3a\xc0\x80\xe7\x19\xac\xcd\x7e\x09\x44\xe8\xfa\x64\x4c\xad\x81\x46\x0d\x9d\x1e\x03\x3f\x3f\x2e\xd8\x9c\x69\x42\x14\x94\x23\xeb\x6b\x48\x27\xab\x78\x9e\x13\xf2\x5e\x2a\xc9\x04\xf9\x91\x96\x52\x38\xaf\x0f\x1b\xa4\x62\xda\x62\x38\x79\x71\x32\xe3\x05\x20\x89\xc1\x95\x14\xaa\xb7\x98\x89\x0c\x04\x7b\xda\xd1\xa6\x0d\xc7\xbd\x93\xc7\xf2\xfe\x65\xd9\x1d\xb7\x48\xde\x0c\x58\xff\x27\x6e\xcc\x3b\xdc\x39\x07\x0c\x0e\x85\xdf\x1f\xb7\x02\x67\xc0\xfe\x2a\x8f\x07\xa0\x17\x6c\x1c\xc7\x6d\x1d\x1a\x0f\xf1\xd5\x4e\x21\xb2\x8b\xda\xb0\xeb\x1d\x3c\xac\x88\x1f\x9d\xd8\x21\xbd\x44\x04\xb3\x4c\xb9\x0b\xa4\x4c\x00\x86\xb8\x34\xf6\xd5\x93\x9e\x70\x4c\x5f\xd9\xda\xb0\x0c\x1e\xe1\x9a\x58\x4a\x21\x7d\x29\x03\x56\x10\xe3\xa2\x78\x7f\x64\x6e\x89\xc0\x54\xc4\x99\x95\x6b\xa1\xf6\x5c\xf3\x29\x07\x04\xf0\xa2\x3a\xb5\x53\xfe\x54\xe1\x1d\x8f\x0d\xd3\x3f\x76\x54\x19\x4b\x47\x15\xad\xed\xcc\x0c\x1f\x80\x2e\x7c\x11\xb6\xd1\x28\x6c\x67\x71\xbc\x86\x60\xa1\x04\x93\xad\x38\xa2\xf6\x79\x96\x81\x00\x95\x00\xef\x36\x8e\x3a\xd8\xa4\xd8\x93\x25\x0d\xfe\x61\xba\x75\x03\x88\x61\x32\x47\x8d\x67\x5f\x3b\x31\xd1\xa6\xa1\xf7\x2f\xd8\x99\x63\x49\xe1\x6b\x03\x7f\x56\xc3\x2a\x2f\x86\xf0\x02\x1c\x62\x88\x3d\x45\x2f\x1b\x69\x6b\xf4\x04\x48\xc1\xc0\x36\xc8\x1c\x4c\x41\x2a\xd4\x37\x04\x55\x98\x11\x13\xec\xb1\x9b\x7c\x5a\x87\x2f\xd5\xfb\xed\x95\xac\xc4\xf7\xc3\x5d\xcb\x9b\x10\x8f\x52\x2e\x14\xba\x65\xb2\xf3\xba\x55\xd0\x12\x93\x8f\xb9\x7a\xa4\x0e\x78\x64\xfd\x21\xda\x74\x59\x59\xa4\xc7\xcc\x20\x9e\x03\xd6\xb5\x5f\x08\x71\xcf\x22\x1c\x86\x66\x18\x9f\x48\x6e\xbb\x7e\xe2\xe8\xee\x9c\xe0\x75\xf3\xff\x7c\x1d\xe4\x50\x0e\xf1\x91\xeb\xae\x76\x83\xb5\xf6\xf8\x14\x14\x7e\x7c\x84\x45\xa6\x91\xe3\x00\x81\x40\xdb\x33\x88\xab\x37\xcb\xd6\x3d\xf2\x4a\xd5\x3c\x92\xda\xf2\x04\xab\x8d\x6d\x5f\xa2\xf4\x54\x93\x03\xf4\xb2\x5c\x9c\x8b\x91\xfc\x36\x78\x69\xcb\x49\x92\x10\xa0\x59\xbf\x28\xca\x92\x3b\x98\xbf\x6b\xc5\xf0\xbb\x39\xb5\x20\x38\xab\x79\xd1\x89\x5d\xd8\x81\x17\xc1\xef\xd0\xcc\x43\xb2\x36\xe7\x4c\x02\x1d\xeb\xa6\x76\x66\x1d\x82\x0a\xb5\xe8\x28\xfb\xfa\xbf\x65\xd2\xdd\xa8\x2b\x80\x0d\x24\x8e\xda\xcf\x56\x3d\xd9\xfc\x48\xff\x44\xcb\x9b\xae\xd6\x17\x1e\xc9\xe6\xa8\xfc\xef\x09\x27\xee\xa9\x1f\x29\x65\x28\x12\x1f\x0f\x57\xea\x73\x36\xc6\x43\x02\x28\x98\x2d\x58\x87\x25\x14\xbc\xfd\x4d\x2f\xf6\xc0\x0a\x94\x7f\xa2\x69\xf8\x36\xea\x4c\xb3\xc7\x8e\x44\x42\x38\xf0\xd9\x5e\x1f\x51\xd7\x16\xcd\x13\x49\x68\xcb\xae\xc3\xf0\xb3\xd1\xf3\xda\x06\xdd\xab\xfb\x47\xa4\xa3\xb7\x16\xd7\x77\x69\x7d\x81\xe5\x96\x81\x06\xfb\xcd\xe8\xf5\xfd\x16\xa7\xc1\x7e\x4f\x2b\xa9\x59\xf6\x3b\x68\x34\x6d\xf0\x7d\xfa\x47\xb3\x1f\x94\x40\x02\xc1\x6d\x16\x33\x6d\x7d\xf1\x44\xf6\x59\xcd\xe2\x02\xb5\x4f\x19\x8a\x03\x1b\xe9\x6c\x69\x57\xed\x70\xb3\xa5\x20\xbe\xd5\x80\x6f\x93\xab\x4b\xf5\xc0\x0a\x39\x09\x3c\x0f\x08\x89\x0b\xf4\xbe\xc6\x09\x29\x44\x91\xdd\x3a\x59\x18\x11\xec\x4a\xa2\xa0\x94\x65\x80\xd2\x8e\xc8\x5e\x35\x1d\xc3\x8c\x3f\x00\x93\x2f\x79\x70\x2b\xde\x4e\x50\xfc\x56\xdd\x7d\x55\x06\xec\x5e\xd7\x35\x78\x70\x23\xdb\x0d\x9c\xb5\x43\x25\x37\xaf\x8d\xe3\xcc\xe1\xdf\x39\xa2\xef\xb5\x50\x0c\x68\x97\x54\xf6\x1e\xd4\x29\x7b\x1d\xea\x12\xc7\x4a\x73\x7a\x5f\x6e\xcd\x7b\xf6\x85\x5e\x6a\x2b\xf9\x74\xac\xe4\x2a\x9c\x21\xbe\x22\x08\xb6\xfb\x25\x96\x26\x55\xb7\x2f\x8c\x4a\x06\x6b\xee\x37\xf6\x90\x79\x17\x1c\xda\x7c\x0a\xc5\xba\x63\x2f\x9b\x40\x14\x18\x75\xf9\xaf\xca\xef\x67\x47\x28\xec\x49\x20\x8f\xc2\xae\x25\x84\xbd\xc0\x39\xde\xa8\x76\x59\x95\xb2\x90\x14\x89\x94\xba\x22\x67\x75\x7a\x4d\xbb\x00\x37\xfa\x78\xf8\x2d\x02\x09\xa8\x55\x20\xc8\x0a\xf6\xb9\x20\x80\x91\x54\xae\xb3\x08\x68\x67\x86\x57\xee\xa3\x0f\x24\x7e\xd8\xe8\xb4\xd8\xc4\xde\xcd\x05\x8f\x6f\xbe\x0e\x31\x60\x89\x5f\xb7\x17\x54\xe8\x92\xf3\xbe\x34\x5a\xd7\x1c\x79\xdf\xbb\xc7\x5f\xc4\x02\xe6\x9e\x65\x73\xec\xf2\xe9\x62\xfa\xc7\x57\x65\x93\x89\x9b\x9e\x20\x40\xb1\x2f\x23\x98\xa9\xa1\x76\x21\xe4\x7e\xeb\xc0\xaa\xf4\xa8\xa6\xaf\xb2\x6b\x10\x72\xee\x5e\x96\x83\x07\x72\x19\x48\x1c\xb2\x76\xb1\x0b\xec\x0c\x74\xdc\x7d\xc6\xc4\xfc\x4c\xd0\xf6\x4e\x7c\x41\xca\x57\x46\xa9\x9a\x96\x5d\xf7\x26\x6a\x90\xdd\x07\x23\x7d\x58\xe8\xe9\x78\x52\x50\x96\xef\x1a\x5a\x6d\x96\xe7\xff\x4d\x9f\xcc\xf2\xb5\x28\xe2\x0e\x90\x0c\xff\x76\xac\x16\x2a\x76\xbc\xd5\x53\x0c\xbb\xe7\x0f\xe4\x8f\x00\xfc\xb9\x1c\x64\x6c\x0b\x5d\x13\xcf\x74\xb2\x5b\x82\xa0\x8c\x28\x84\xed\xb1\xe9\x09\x72\x41\x30\x09\x92\x93\xd8\x23\x64\x78\xce\x83\xf1\x20\x3b\xc2\x99\x83\x61\x18\xcc\xc1\xc7\xc4\x53\x5b\x73\x7d\x7c\x03\xb1\x0c\x9a\xe2\x4d\xb3\xbc\x3d\x2d\x3e\x95\xdc\x53\x08\xfa\xef\xa9\x65\x55\xec\x49\xd4\xc3\x8b\x18\x2c\xc1\xa4\xb5\xf0\xf0\x1e\x79\xa4\x64\x3a\xee\xf9\x99\xa4\xbe\xd5\xd5\x4d\xcc\xfa\xed\x3c\xde\x09\x6c\x1d\x6f\x0f\x5b\x00\x02\xab\x86\xbf\x89\x8a\x25\x1e\x7f\x4b\x48\x42\xa0\xc7\xcb\x93\x0b\x3a\x4e\x09\x14\x5a\xe2\x94\xa3\x52\xba\xa9\x0a\x20\x35\x4b\x8e\xd4\xc2\xbe\x86\xea\x40\xf1\x79\xc8\x3b\x70\xcf\x7f\x75\x27\xa6\x9f\xe0\xa0\xfc\x60\x3d\x9a\x0e\x5a\x05\x13\xae\x37\x47\x24\x8b\xf9\x9e\x95\xd3\x8e\x1a\x0b\xa2\xf5\x19\x4c\x9c\xb5\x31\x14\x99\x05\xa7\x49\x54\x9d\x13\xc3\xa2\xea\x74\xb3\xb2\x5a\xf1\xac\xe3\x95\x83\x2a\x18\x99\x3c\x5e\x86\xb8\x7d\xcd\xb1\x24\x5b\x39\x45\x66\x4c\xe4\x40\x54\x28\xf8\x46\xa6\x60\x11\x22\x23\x68\xa6\x1e\xce\x76\xdc\x25\xc9\xa3\xd6\x4e\x61\x78\x95\x73\xd9\x9a\x9e\x1c\x86\xb7\x0f\xd3\x67\xb6\x0f\xcf\xbe\xdd\xab\x68\x7f\x79\xb6\xa0\xb8\xb1\x72\x8e\xa5\x7c\x30\x84\x44\x84\xbc\x8d\xe1\x3a\xce\x67\xe4\x1b\x94\x81\x50\x82\x3b\x59\xb9\xd9\xc1\xa3\x6c\x7d\x2e\x52\xfd\x1d\x29\x64\x61\xb3\x91\xed\xde\x83\x11\x58\xeb\x25\x06\x66\x11\x06\x0f\xc1\x7a\x84\x69\x9d\x04\xf0\x39\xc3\xfb\xc4\xb1\xd6\x05\xfa\xa0\xe6\x48\xa5\xbe\xa8\xad\xc9\x52\x01\x55\x1f\x16\xb6\xe9\xe3\xed\x6b\xcb\x6f\x25\x3d\x18\x0a\xea\xe2\xfc\xca\x4e\x7e\xb9\xe5\xfb\x4b\xa2\xdf\x7c\x83\x12\xd3\x4f\x91\x63\x25\x7d\xcb\xb2\x46\xb6\xc6\x80\x12\x5d\xe6\xa6\x84\xe7\x0d\x9c\x77\xcc\xdf\xd8\xc0\x72\xe7\xff\xb3\x49\x2f\x49\x7c\x0d\x49\x56\x17\xc8\xcf\x14\x6c\x05\x1b\xc9\x05\xb7\x14\x9b\xd4\x2e\xde\xd2\xf1\x4a\x85\xbf\x38\x86\xb8\x58\x72\x37\x3c\x28\x5c\xe2\x49\xa7\x42\xa6\xe3\xe4\x5e\xd6\x2d\x4d\x3a\xcb\xc4\x59\xea\x96\xc6\x6f\x89\x7d\xe3\x1f\x3c\x44\xf2\xe3\x8a\x0e\xff\x0f\x97\xb0\x4a\x91\x00\x1f\x4c\x99\x5e\x21\x9f\x47\x40\x72\xca\x5b\x8e\xd2\xf5\x5e\x6c\x20\x24\x57\xa9\xf7\x70\xc5\x5d\x70\x86\x15\x15\x60\xc8\x07\xde\xf5\x9e\xb9\x09\xc7\x19\xd9\x82\x3f\x9b\x12\xb9\x56\x84\xf3\xf0\x74\xf5\xba\xe2\xdf\x24\x5e\x5e\xeb\x29\x7b\x0e\xa8\xe3\xbf\xfe\xcc\xa0\xd7\x26\x92\xb4\xe2\xa3\x3e\x0f\x8f\x14\x06\x16\xb1\xae\x7d\xd7\x22\xa6\x6d\xa2\xb7\x4e\x4b\x7c\x04\x00\xdf\xfc\x12\x9b\xbc\x13\xe7\x2b\xd5\x79\xd2\xef\x42\x62\x7a\x06\xf2\x8f\xdf\x01\x4f\x58\xb9\x0d\x71\xe1\x8f\x90\x38\x52\x81\xb2\x80\xa8\x3b\x81\xa6\x80\x27\x8b\x1a\x8f\x22\x18\x15\x0c\x3d\x83\xf3\xd0\x70\x2f\x2a\xd1\xba\xee\x73\x23\x2f\xfe\x6c\xc1\xdd\x5a\xd5\x47\x3a\xd8\xc4\x47\xb2\xd6\x88\x63\x05\xbd\x31\x8b\x35\x6a\xac\x30\xb4\xf5\x27\xd5\x0b\x9e\x63\x1e\xd5\x73\x85\xc0\x00\xa3\x35\xb1\x16\x27\xbe\xe2\xef\x37\x1e\x4d\x09\x9f\x95\x21\x36\x1b\x04\x6c\xdf\x1a\xd2\x7c\x43\x74\xc5\xf0\x49\x5c\x42\xf1\x93\xe0\x29\x06\x79\x35\x1b\x9f\x19\x4b\x62\x4a\x6e\xde\x57\xa7\x47\x2e\x3a\x72\x8b\xa8\xa7\xb5\xfc\x5c\x32\x8e\x05\xa2\x35\x95\x60\x02\x5d\x23\x7a\x13\x60\x6e\x16\xe7\xea\x4c\x46\xed\x1f\xee\x7e\x6e\xff\xa0\xef\xc9\x10\x24\x44\xae\xc0\xd2\xca\x33\x38\xd7\xb8\xc8\xf0\xc7\x09\xbb\x36\xe6\xb9\x15\x5d\x5e\x7a\x88\xc2\x0b\x9c\xc5\x64\xaf\x2d\xcf\x27\x6d\xda\xf5\x8f\x40\xeb\x5a\xc6\xf3\xe6\x95\x33\x89\x2c\x0d\xc4\xea\x23\x98\x9f\xc3\x87\xbe\x96\x81\x27\x07\xf3\xb3\x40\xa9\xcf\x24\x48\xd7\x99\xd3\x39\x37\xa2\x67\x8e\xc9\xae\xd6\xc6\x52\xca\xe7\x6b\xe3\x99\x59\x65\x9e\xad\x5a\x2b\x41\xa9\xe6\x9b\xff\xdf\xef\x22\x73\x3d\x78\xcd\xdb\x1d\xb4\xcf\x21\x25\xb2\x36\x36\x96\x95\x14\x8c\x56\x4c\x62\x83\x05\xd3\x25\x7b\x8d\x2c\xa7\xeb\x8a\x70\x62\x96\x88\xd3\x20\x11\x9b\x93\x65\x88\x86\xba\x81\xf2\x4f\x8d\xd2\x85\x85\x5b\xae\xf2\x8a\xd8\x0c\x36\xdc\x5c\x62\xb5\x65\x75\x1d\x61\xa1\x85\x3c\xe6\x59\xc9\x76\x6b\x1e\xce\x60\xa8\x56\x6a\xb7\x03\x66\xd6\xac\x39\x75\x22\x67\x7e\x07\x19\xdc\xda\xff\x41\x39\xd8\x9f\x0f\x0e\xfc\x2b\x88\xd7\x0e\xf1\x24\xf7\xd0\xa3\x6f\xbb\xe3\x80\x7d\x16\x5f\xf3\x06\xee\x33\x9e\x7c\xcb\x8d\x3b\x7a\xcb\xfa\x6f\x2a\xfa\x01\x28\x86\xd7\x80\x41\x3b\xfd\x14\x57\x2e\x88\xab\x5f\xba\x9c\x7d\x1f\x8b\xc8\xb0\x9f\x39\x89\xed\xbd\x2f\xc7\x2b\xd0\x76\x1d\x1f\x9e\x1d\xea\x2d\x42\xe0\x18\x26\x9c\x0e\xb3\x05\x44\x56\x3d\x89\x4b\xb9\x9f\x70\x7a\xfe\x3e\x6f\x51\xa5\xbc\xcc\x25\x18\xb4\xed\x15\x1e\x5c\x74\x02\xd9\x35\x1c\xf1\xa7\x70\xd7\xa9\x8f\xef\xed\x91\x35\x92\x6b\x58\x6f\x41\x88\x1d\x1a\x80\xe0\xbe\x66\xf2\x8a\xf3\x5d\xcb\x67\xe5\x38\x07\x23\xf6\x12\x0f\xab\x53\x96\xc3\x0a\x66\xec\x99\xa4\xce\x9f\x2a\xd6\xe3\x0f\xf9\x45\xd7\xf4\x2f\x91\x1c\xf9\x3e\x92\x68\x9e\xcd\x1a\xe7\xbf\x2e\x0a\x8f\x62\xfe\xc4\xff\x5c\xfa\xcd\x35\x8b\x8f\xf9\xa2\x27\x1a\x4c\xd6\x59\xff\x4a\x2f\xee\xe1\x84\x13\x25\x7b\x3c\x00\x4e\xde\x56\x4f\x04\x4a\xa2\x92\x76\x39\x31\x47\xc7\xef\x43\x81\x43\x81\x97\x3c\x74\x9a\xe3\x9c\x1b\x57\x75\x78\x82\x57\x27\x20\xd1\xff\xe9\xc2\x75\x40\xbe\xe9\x48\xd8\x32\x34\xdd\xed\x07\x43\x81\xab\x32\xce\x04\x4f\x7a\x9c\x40\x82\x31\x4e\x91\x2f\xfe\x0f\xc7\x04\xdb\xad\x0a\xa9\x33\x43\xa4\x57\x50\xcd\x62\x43\xe5\xa6\x33\x37\xa1\x0f\xe4\xbc\x07\xa7\xb5\x22\x34\x30\x74\x14\x71\x61\x7b\x52\x9d\xa3\x95\x3b\x96\x70\x3c\x1b\xac\x37\x69\xba\xed\x85\x8e\xc1\x37\x8e\xee\x92\x7c\x30\x6b\x72\x62\x5c\x44\x3a\x2f\xb1\x2d\x12\x6d\xac\x5f\xa9\x22\x4f\x0d\xda\xc9\xa3\x3b\x5f\xac\x48\xd9\xaa\x48\xb9\xdb\x7f\x1b\x4d\x77\x8b\xc4\xb3\x6f\x68\xbd\x05\x06\xde\xde\x6c\xd3\x9d\x4b\x94\xe5\x78\x57\x1f\x21\xf1\xd4\xdb\x6c\xd0\x3e\x6a\xd6\x01\xfc\x6e\x1c\x3c\xcc\x84\xba\x54\x7a\x61\x9f\x11\xc1\x60\x33\xac\x1a\x28\xb8\xba\xe9\x86\x61\x60\xfe\x15\x48\xa1\x09\xc2\x44\xb3\xfe\xdb\x47\x13\xec\xff\x34\xf3\xe4\xc3\xf4\xf2\xa1\x78\xc0\x72\x33\xa1\x1c\x25\x17\xf8\x97\x5b\x33\x83\x25\x2c\x26\x03\xa4\x01\xe6\xda\xc2\x72\xab\xf5\x2c\xf7\x2c\x1b\x77\x91\x9c\xe8\x72\x0f\xaf\x22\x69\x5c\xc3\xf4\x47\x81\xdd\xa3\x1d\xf7\x87\xc5\x3b\xb3\xb9\x2c\x77\xa7\x5b\x5c\x44\xe1\xb1\xdc\x9d\xae\x2b\x5f\xbd\xd9\xe2\x22\x90\xae\x2f\xb1\x7b\xbb\x2b\xce\x0a\x91\x75\x33\xee\x6e\x31\x43\xed\x64\x29\x1b\x04\xb4\x46\xdb\x57\x13\x7e\xb0\x8b\x28\xcf\x10\x50\x94\xc6\x86\xac\x02\xa4\xa5\xae\x4f\xbb\xa3\xab\xbe\xfe\x23\x9d\xb7\x17\x39\x2e\x19\xf4\xd7\x95\x6c\xb1\x56\x36\x71\x22\x1b\xc9\x7d\xa0\x04\x89\x8e\xca\xba\x31\xbb\x7b\x71\xd1\xfa\x80\xc1\x1a\x5d\x0e\xf1\x65\x40\x05\x9c\x9e\xa9\xa5\xec\xb2\x61\x60\x0b\x44\x95\xed\x49\x37\x96\xf0\xad\xa9\x67\x2d\x6e\xbf\x04\xa1\xb8\xbd\x26\xd0\xe8\x12\x6b\xa1\x16\x88\x34\xce\x25\xd2\xef\x91\xe3\xe6\x96\x33\x2c\x2d\x25\x23\x3a\xf4\x32\xe2\x1e\x81\x8e\x60\x08\x8f\x50\x3b\x30\xce\xf3\x97\x36\x6a\xcd\x71\xba\x78\x96\xe0\x0c\xe1\xf3\xc3\x42\xe6\x7f\x41\x32\xce\xf0\xe7\xc5\xdd\x49\x1f\xa2\x39\x48\xc8\xd3\x4e\xb2\xec\xcc\x87\x00\x3d\x16\x2f\x45\x07\x67\x20\x9f\x4c\x5e\x04\xd4\x92\xc7\x92\x5f\xad\xde\xf9\xfe\x65\x93\x98\x05\xda\x05\x73\xa5\x59\x5d\x77\x80\xb5\x49\xb3\xd9\x86\xfb\x29\x16\x79\xa4\x96\xbc\xef\x52\x35\x33\x18\x3e\x86\xa5\x89\x5a\x82\x32\x2e\x1e\x02\xd2\x69\x32\x51\xf0\x11\x2e\xca\x68\x40\xee\x61\x94\x53\x12\xc5\x79\xf9\x99\x6c\xbe\xa1\x27\x98\xcb\x93\x5f\x03\xdc\xe2\x3c\x72\xbe\x6a\xd4\x21\x56\xac\x17\xf1\xf0\x2e\x79\x12\x97\x77\x1e\x21\x6e\xe9\xa8\xe7\x2d\xf4\x8a\x5d\x6c\x6c\x5b\x4b\xee\xc2\x13\xbe\xe4\x44\xe7\xd2\x92\xde\x57\x80\x83\xa1\x40\x4b\x7a\x91\x06\xfc\xe6\xfe\x1b\x82\xfc\xf2\x2f\xf9\xd6\x26\x20\x36\xf8\x14\x09\x04\x86\xb8\x9e\xb7\x84\xbc\xe3\x02\x67\x92\xd8\xbf\x0d\x15\xce\x34\x16\xe4\x8a\x93\xf0\x3e\x9d\x3c\xfe\x24\xb1\x27\x7c\xae\xf5\xad\x77\x9b\x48\xa9\x08\xc9\xfb\xad\x63\x8d\xce\x3e\xd8\xca\x61\x34\x09\xf5\x8c\xa5\x5b\xd2\x2e\x12\xf8\xe4\x7e\x3a\x31\xaa\x6f\xf7\x97\xc7\x6d\xad\x96\x6d\xca\x63\xf0\x1c\x38\x88\x1a\xfc\xd1\xa2\x83\x62\xbd\x88\x58\x7d\x92\x87\xc8\x79\xe9\x74\xa4\x8d\x18\xfe\x3a\xd3\xba\x33\x5c\xda\x99\xd4\x09\xca\xf7\xd6\x0d\x97\x4b\x64\x01\x06\x27\x5d\x64\x26\x05\xbb\x35\x59\x9f\x2b\x19\x0a\x65\x01\x43\x18\x96\x8b\x08\xdf\x27\x4d\xf0\x80\xf4\xa8\x46\x46\x7c\x1b\x90\x96\x41\x1a\x31\xa9\x17\x92\x1c\x41\x1a\xee\x5e\x77\x31\x6c\x71\x3f\xe0\xb8\xe1\x39\xfa\xea\x7e\x5d\xe2\x67\xa3\x8f\xef\xf3\xa1\x47\xd7\x31\x7f\xde\x40\x3d\xc4\x45\x2f\x36\x07\x8c\x88\xb7\x0f\x7e\x4f\xd1\x04\x3d\xef\xe6\xba\x35\x18\x44\xde\x37\xd7\x48\x9e\x77\xf0\xb4\x1f\x8c\x20\x32\x30\x7c\x48\x5f\x2e\x06\x86\xe7\xbd\x89\x43\x08\xac\xee\xf1\x73\x6f\x38\x90\x4a\xf9\xb0\x74\x5a\xf0\x5b\xa2\x66\x00\x13\x7f\xbb\x24\x81\x22\x26\xea\x9b\x16\x95\x95\xd4\xa8\x7b\xde\x6e\xea\x2f\xc2\x4f\x5d\x69\x50\x1d\x40\x0f\x47\xd4\x42\x8f\x00\xfb\x40\x89\xac\xef\xdf\xcf\xf7\x47\x1a\x03\xaa\xf8\x42\x6e\x64\x83\xeb\x5b\x4b\x81\xc0\x24\xb0\x3f\xea\x5e\x86\x1f\x63\xfb\x51\xd9\x1f\x3e\x91\x08\xcd\xf3\x95\x46\x13\x5f\x3a\xfe\x2b\x17\xad\x8c\x3c\x7d\x95\x4c\xff\x4d\x9f\x4d\xec\xf4\xe9\xe4\x71\xa9\x3d\x2d\xe8\x27\xa2\xac\xdf\x8f\x08\xdd\xbf\x51\x6f\xb8\x28\xc5\x1c\x8f\x98\xf3\xb9\xe1\x5e\xfe\xd0\x67\xbd\x6e\x91\xc8\x5f\x47\x27\x20\xdf\xb3\x55\xf6\x0f\xd9\xfc\x33\x07\xd9\x7c\xd1\x71\x67\xdf\x9e\x54\xd5\xdb\x85\x80\xa9\x97\x58\x6b\x11\x60\x25\x01\x96\x60\x94\x07\x76\xb0\xbd\x7f\xa7\x58\x1a\x8d\x7d\x38\xa9\x9f\xbe\x40\x1d\x3b\xff\x1c\xa9\x01\x1e\xf3\x56\x51\xdd\x67\x77\x2b\x3c\x49\x46\xfd\x84\xbf\xeb\x95\x8e\x80\x24\x76\xaf\x7c\xc0\x1b\xf3\xb5\xac\xcc\x51\x0d\x32\x39\x90\x92\x9e\x8c\xea\xfa\x56\x9e\x3e\x3b\x22\xc2\x8a\x1e\x4f\xa4\x80\x21\x10\xd1\x5f\x8d\xec\x1d\xe3\xec\x8f\x77\x7c\x91\x18\xa9\x53\xd2\x23\x3b\x99\xd7\x5c\xb2\xc7\x06\x58\xa9\xb5\xd6\xe7\xc3\x2e\x10\x3f\xc2\x9a\x69\x84\xa4\x3f\x0b\x57\x35\xad\xa4\x86\xd6\x93\x5c\x43\xcf\x42\x59\x75\x30\xd3\x8b\x3b\x7e\x9b\x4e\x0d\xa9\xc0\x22\x99\x5f\x77\x11\x94\x97\x55\x3f\x5f\x96\x55\x7b\x82\x40\xd4\xd0\x14\x9e\x5d\x90\xd5\xab\xad\x95\x99\x3b\x82\x9e\xee\x59\x5a\x6d\x75\x04\x0c\x55\x9e\x35\x7f\x4b\x22\xed\x7c\xb0\x89\x90\x96\xde\xcf\x90\xcf\x19\xde\x56\x11\xc3\x1f\xe7\x15\xfc\xf3\x87\x5a\x0b\xd6\xda\x4f\x82\xb8\x78\xde\xf2\xa8\xda\xe5\x75\x9f\x55\xfa\x4b\xcd\xcb\xd9\x26\xdb\x30\x5a\x37\x95\x7f\x09\xda\xf3\x33\xcd\xf7\xc6\xc3\xcb\xd9\x98\xf2\xcb\x21\x92\x77\xfb\x7d\xe1\x2e\xde\x64\xe6\x67\x9d\x18\x58\x6c\xe5\x38\x73\xb2\x03\x38\x06\x03\x3e\xa6\x6d\x3f\x4a\x79\x7b\xb0\x7e\x81\x74\x7e\x3f\xd1\x24\x0d\xdf\x9d\xf6\x9e\x60\xaa\x24\x4b\x3c\x1b\x27\x66\xb6\x59\xa0\x69\x5d\x24\x24\xa3\xf1\xee\x9d\xea\x5e\xcd\xbc\xad\x8d\x3c\xd3\x21\x9e\x7a\x2c\x2f\xfb\x03\x49\xfb\xa1\xea\xea\x9b\x82\x50\x10\x3b\x14\x1b\x14\x66\xae\xcb\x23\xc9\xb9\xd5\x7b\x15\xf9\xfd\xc2\xbb\x5b\xb8\x4a\xef\xac\xf6\x2f\x3e\x87\x98\xdb\x3e\x31\x01\x6c\xea\xca\x4f\x28\x2d\xce\x34\x3a\x9f\xe8\x37\xda\x3c\xf2\xe9\x31\xee\x02\xd6\x9d\xf0\x68\xc9\x13\xd8\xbc\xe5\x9d\x88\x48\xf0\xfa\x00\x51\x88\xf7\x57\x8f\x3f\xe9\x21\x86\x91\x79\x1d\x6b\x04\x0f\xeb\xb2\x5d\x8e\xf8\x51\x77\xbd\xb1\x07\x98\x23\x7d\x0d\xe2\x51\xb9\xf6\xf1\xa8\xe4\xc4\x62\x34\x76\x38\xbd\x1f\x2d\x76\xfa\x91\x5b\xe5\x01\x49\x98\x7f\x8a\x4d\xf8\x01\x36\x49\xaf\x8f\x7d\xf2\xfa\x25\x9c\x43\x64\xfb\x70\x36\x7d\x52\xde\x43\x7a\x46\x68\x64\x95\xcc\xea\x51\x0e\xd3\xc3\x66\xa0\x6c\xb7\x40\xa4\xe4\x17\x3d\xac\x95\xc9\x6c\x00\x3c\xad\xf2\x4e\xe1\x19\xfb\xb3\xea\x50\xce\x3f\x8d\xb4\x9c\x5d\x18\x98\x05\x7a\x22\x3e\xa7\x2b\xa2\x43\x28\xdb\x87\xae\x44\xb7\xe0\x7d\x04\x2b\xdb\x56\x7b\xb1\x0b\xd9\xe3\xd1\xd0\x57\x7c\x02\x1b\xaa\xaa\x08\x56\x79\xd1\x88\x2a\x10\xe8\x43\x89\x85\x19\xff\xcd\x6a\x1d\x90\x0f\x81\x25\x54\x93\x42\x1e\xa0\x80\x11\xcd\x63\x9e\xc0\x04\xac\xf0\xa7\xf9\xa6\x57\x67\xbe\xaf\xe0\xc4\x57\xf4\xde\x7c\x47\xf2\xdf\x7c\x7b\x7f\x6b\xdf\xda\x75\xfe\xb8\xce\x6f\x7b\x90\xc1\x99\x7d\x5b\x63\x73\x3e\x76\x2a\xe2\x5a\x41\x46\x7b\x33\x1c\x83\x5d\x5f\x74\xfc\x4e\x20\x2f\xd2\xed\x3b\xc7\x95\x06\xb1\x48\xce\xf5\x15\x9c\xf9\x0a\x14\xf6\x18\xda\xab\xd1\xca\x9f\x67\xf0\xdb\x2b\xd6\x01\x0e\x26\x75\x88\xa0\xcd\x27\xe3\xfd\xb1\xb5\x38\xa6\xd9\x35\x50\xec\xa2\x1e\x54\x09\x16\xfd\xd2\xd8\xec\xc9\xfd\x03\xd6\x27\xde\x66\x5d\xbe\xbf\xe9\x80\x6d\x7d\x7a\xd1\x43\x1a\xfe\x43\xe1\xbf\x5c\xdf\x25\xe6\x18\xb6\x25\x26\x13\xf0\xec\x2f\x8c\x9b\x41\xaf\x92\x83\xd2\x95\x1b\x8d\xf9\xbe\x2e\xaa\xff\x63\x13\x59\xc9\x5c\xe7\x65\x8c\xff\xce\x7f\x9a\xce\x8c\xc4\xa0\xb8\xa8\xc1\x5e\x8c\xec\x88\x18\x26\x21\x7c\xcd\x4a\x63\x98\x6b\x3c\xbf\x2c\xc1\xf9\xb9\x0e\x7c\x23\xc1\xbb\x5e\x5d\xdd\xfa\xd6\x13\xa2\x7b\xd9\x7a\xec\x33\x1c\xdb\x33\x7a\x50\x3f\x3f\x32\x78\xfd\xf4\x45\x8f\x8e\xca\x00\x45\x62\x32\x90\xd9\xff\xd1\xf3\x33\x4d\xd7\x40\x3f\x07\x07\x05\xd4\xf7\xf5\x13\x1f\xee\x78\xc9\xb2\x0c\x0a\xb6\x72\xa2\x7e\x56\xaa\xe4\x91\x0e\xfd\x19\xb1\xd4\x7c\x8b\x45\x9a\xe2\x73\xfe\x84\xfc\x29\x20\x0b\x11\xe1\xbf\xdc\x67\x31\x3b\xa9\x89\x48\xf2\x79\xd7\x90\xda\xe5\xbf\x30\x08\x0c\xa4\xea\x57\x7e\xda\x9c\x95\x35\x3b\xbb\x1c\x38\xd3\x4b\x67\x4f\x38\xfa\x10\x2d\x2a\x73\xe3\xfb\xcf\x54\x73\x9b\xa1\x74\xea\xe5\x2a\xf5\x2b\x43\x2b\x8b\x50\x09\x68\x3c\xfb\x8b\xf2\x33\x40\xf9\xef\x04\xeb\xdc\x78\x6c\x3c\x53\xd9\x16\x95\x14\x72\xf8\x13\x02\x35\xdb\xc1\x41\xd1\x8f\xe5\xc7\xc6\xe7\xcf\xf5\xc7\x39\x77\x4c\x21\x06\x9f\x6b\xc8\x04\x24\x89\x17\xa4\x89\xa7\x4f\x83\xc8\xc6\x01\xfd\x0e\xc1\xf1\xdf\x49\x1e\x71\xf6\x48\x00\x32\xe7\xc3\x48\xb7\xb7\x72\x71\x83\xf7\xe9\x59\x69\xa3\x82\x79\x66\x9f\x7d\x05\x19\xbf\x99\xf6\x5a\x79\x75\xae\xff\xd0\x8e\x30\xac\x9f\x1e\x57\x7c\xc5\x18\xee\x82\xcc\x7f\xaf\x55\xfb\x56\xf1\xf4\x1f\x43\xfb\xc6\xb0\xa4\xf8\x09\xe0\xde\x21\xd0\xff\xd3\x15\x09\xd4\xbe\x55\xeb\x59\x43\x35\x80\xb9\x19\x36\x30\xb4\xee\x29\x29\x29\x72\x4e\x5b\xc8\x12\x6c\x67\x6b\x78\x49\x39\x1e\x70\x36\xea\xbd\x62\x28\x8d\x5b\xe1\xa8\x09\xf5\x1d\x36\x7a\xcf\x40\xf9\x09\x09\xb4\x59\xc4\x2c\x9d\xaa\xd9\x83\x6f\xb5\xc2\x8f\x50\xbe\x25\x6e\xf5\x8f\x0e\x44\x6a\xc1\xfa\x36\x94\xee\x87\x9e\xd4\x12\x22\x04\x66\x93\xcd\x01\xa5\xff\x33\xa7\xa2\x05\x49\x68\x14\xb0\x26\x13\xb2\x65\x79\xd8\xf0\xd6\x39\xba\xc2\x49\xbc\xf3\x4e\xaf\x14\xb6\x9c\x44\xd7\x3c\xdd\x21\x36\x64\x28\x63\x5e\x75\xfd\x27\xa1\x82\x10\x15\x60\x78\x69\xfe\x8f\xda\x04\x0c\xf6\x70\x95\x02\xee\xda\x1a\xe5\xc6\x74\xaf\x1c\x61\x26\x2c\xa7\x72\x8c\x30\xe8\x6c\x52\x71\xb2\x82\x19\xa0\x13\xc0\xdf\x30\xee\x2a\x11\x35\xa9\xa7\xbb\x7f\xf3\xe7\x9e\xe9\x7b\xd0\x8e\xb0\x2d\xa4\x13\xeb\xd0\x04\xe6\x4c\x87\xc7\xcd\x55\xc0\xe9\xa8\xd4\x00\x01\xf0\xa5\xa6\x49\x42\x2e\x56\x66\x8c\x37\x13\x31\xa2\xdd\x1d\x49\x81\xcd\x40\x53\x39\x81\xaa\x01\x23\x40\x79\x70\xb6\x16\xcd\xfe\xd5\xb1\x1a\xd2\x74\xd0\x0f\x35\x1d\xc9\x66\xb4\x24\xaf\x3f\x12\x4f\x9a\x34\xba\x4e\xce\xbe\xa5\xa3\xb6\xaf\x2e\x69\x4f\xe0\x91\x9b\xe4\x41\xc7\x93\x81\x01\xc5\x51\xfd\x9a\xf1\x44\xb9\x02\x5a\x3c\x53\x65\xa0\xcc\x54\x4f\x71\xda\xdb\x4b\xe5\xa1\x87\x12\xb9\x21\x5e\x30\x31\xd6\x63\xaa\x9b\xe7\xbb\xba\x5e\x41\xbc\x0b\x70\x00\x1f\x3c\x50\x06\x01\xc0\xd4\xa4\x15\x9c\x13\x8b\x47\x16\x29\x17\x20\x7c\xd4\xc1\xa0\xa5\xd9\x09\xa2\x77\x2c\x15\xc9\x08\xe4\xc9\x8f\x53\x1b\xce\xa6\x82\x44\x16\x6a\x5d\x94\x9f\x3c\x00\x3c\x95\x6e\x5d\x18\x5a\x1b\x3c\x5e\xfa\xef\x71\x49\x7e\xe0\x10\x07\xb8\xdd\x0b\xe8\x7a\x79\x1a\x7d\x7e\x13\xe4\x39\x74\x11\x76\xb8\x53\x69\x37\x08\x5f\x20\x05\x1d\x8a\xab\x82\xf0\x02\xf6\xc1\xb2\x79\x30\x9e\x61\xca\x52\x16\x56\x82\xe3\x14\x52\x60\xd3\x4f\xa0\x23\x1f\x8a\x50\xc1\xba\x21\xaf\x92\xc9\x09\x32\xd1\xf0\xcb\x84\xcc\x13\x02\x38\x59\x0e\xa1\x4c\xc0\xb4\x89\x49\xde\x3b\x28\x15\x48\xd0\x60\xfa\xb3\xfc\x37\x79\xf7\xcc\xfd\x59\x33\x9d\x49\xa9\x09\x53\x7a\xc5\x75\x0c\xe9\x81\xd8\x1c\x41\xf5\x4a\xf7\x50\x18\x7d\x3a\x25\xb9\x44\x01\x78\xd4\x75\x9d\xf2\x9c\x4e\x89\xdd\xca\xe4\xbc\xf2\xdc\x83\x6c\xb2\x42\xf4\xe4\xf1\x67\xa9\x8d\x15\x6c\xc2\x7a\x56\x3c\x11\x60\x66\x04\x4e\x29\xb2\xf9\x1d\x51\x4c\x82\x0f\x39\xf6\x62\x49\x44\x87\xb2\x0d\x43\x7d\x8b\x15\xc3\x4a\xce\xc8\xb3\xfb\x78\x37\xa5\x55\x95\x59\xe3\x6e\x63\xaa\xe9\x2a\x0b\xa2\xcb\x70\x99\x85\x8b\xbf\x2b\x5b\x55\x45\x8a\x4b\x1f\x7b\xf3\x4d\xb3\x8c\x81\x09\xe9\xb1\xbc\x68\xee\x42\xae\x61\xf8\xc6\xf4\xdf\xb0\x6c\x96\x29\x75\xba\x79\x37\x78\xc7\x0f\xf9\xfd\x41\xef\xff\x26\xe0\x4d\x18\xe8\x19\x95\x37\xde\x9b\x0a\x4a\x1c\xdc\xcb\x9b\x64\x3a\x86\x5e\xee\xc4\x32\xb0\xa8\xc6\xe3\xfd\xbc\x15\x37\x35\xde\x0c\x32\xb7\xd2\xaf\x3c\x1e\x1f\xc5\xca\x8e\x07\x43\x6f\x47\x44\xc7\xd3\x6c\x82\xc4\x03\x53\x8e\x0c\x05\xa0\x8e\x9d\xdf\x8c\x0b\xfd\x08\xc1\xd4\x72\xe7\x3a\x64\x1f\x36\x95\x14\x72\x00\x3d\x3c\xd7\xf1\xc6\x43\xfc\xcc\xc8\xd1\xf3\x10\x48\x57\x77\xb0\xd9\xc8\xcd\x9f\xda\xdc\x0d\xdf\xf3\x58\xdf\x4c\x63\x1f\x3d\xdf\xa0\xc5\xd4\x8f\x15\xfe\xd4\x8d\x88\x4b\xc0\x23\x39\x7c\xb8\xef\x60\xa7\x6f\x40\xd9\xa7\xa3\x59\xc8\xea\x92\x47\xc6\x86\x30\x92\x6d\x44\x30\x48\xa6\x0a\x41\x95\xd4\xc8\x58\x23\xee\x73\x5c\x6e\x5f\xc1\xb5\x52\xdc\xfe\x86\xfc\x5e\x16\x2e\x55\xc0\x51\x59\xf9\xfc\x16\x8f\x98\x10\xcc\x7c\x02\x4b\xfa\x27\xd1\x0c\xef\x72\xfc\x2a\xf6\xd8\x18\x3c\xe3\x1b\x36\x65\xeb\x6f\x2a\x43\x6c\xcf\x50\x7b\xd8\x64\x14\x21\x91\x29\x94\x12\x6c\x3e\xea\x1c\x00\x63\x79\x24\x57\x4a\x80\xad\xcd\xef\x60\x2c\x41\x9e\x31\xba\xcd\xa9\x4b\xc2\x34\x68\x50\x4f\xc1\xc6\x6e\x8d\x92\x63\xa4\xa7\x8f\xe8\x32\x0e\xee\x52\xe0\x00\x06\x92\x88\x3b\x1d\xb3\xe6\x2f\x23\x46\x15\x1e\x36\x64\x19\xa4\x06\x13\xcb\xce\x5d\x38\x63\x86\x19\xe6\x67\x45\x64\x28\x55\x2c\x12\x75\x1a\x47\x2c\x52\x5c\x04\x07\xc7\x3f\x48\x4b\xa8\xd9\xa4\xbb\xe3\xd0\x30\xa6\x3f\xcd\x2a\x49\x28\xce\xe6\x85\x93\x13\x75\x8f\xce\xcc\x25\xd1\x8c\xb4\xf0\x29\x73\x5a\x3d\xa6\x54\xfc\x95\xe6\x7f\x7c\x79\xf9\x2d\xd9\x03\xe4\x41\xfb\x62\x68\xd6\xaa\x05\x4a\x01\x9b\x08\xbf\x7c\x20\xcb\xd6\xef\x3c\x78\xd0\x5a\xdc\x98\xf3\x75\x17\x3e\x57\x71\xec\x66\x12\x14\x99\xe5\xb4\x2b\xeb\xce\x1e\xdc\xc9\x45\x68\x90\xa8\x4b\x14\xa1\x09\x25\x1c\x1a\x95\xf2\xd1\x27\xc8\x5f\x51\xaa\xe0\xa0\x58\x67\x96\x3f\x11\x5d\xf8\x1a\x77\x8c\x01\xbd\xd0\xd6\xb6\xca\xd3\x69\xda\xe6\xf2\x79\x9b\x22\xaf\x11\x5a\x11\x59\x33\xdf\xbc\xfd\x86\x36\xc4\x8a\x6b\x66\xeb\x61\xe3\xfa\xbf\xec\xed\x4c\x96\x56\xf0\x97\x4b\xdc\x61\x55\x8a\x5c\x5e\xf9\x9f\x45\xdd\x3c\x7c\x51\x3c\x4b\x38\xa3\x7e\x58\x5f\x7a\x66\xc8\x87\x6b\x49\xf8\x65\x16\xb3\x39\xfc\x1f\x50\x74\x90\xe8\x43\x48\x6d\xf8\x88\x7a\x11\x49\x09\x2c\xb6\x54\x35\x38\x22\xf4\xbe\xe6\x63\xa2\xcb\x22\x2b\x80\x33\xc7\x9d\x20\xfb\x64\x0d\xdd\x88\x76\x13\x0e\x75\x64\xff\x8c\xca\xc2\x5f\x2a\xeb\xd8\x9f\x94\x9f\x1a\x8b\xbf\xba\x20\x14\x23\xf8\x6d\x61\x76\xc1\x5d\x3d\x03\x23\x87\x8f\x34\x0e\x6e\xb7\x98\x5c\x08\x42\x72\x0c\x66\xe6\x6d\x0e\x36\x79\x71\x0c\x85\xb8\x91\xcd\x45\x59\xc8\x01\xe5\x20\x11\x50\x6a\xe0\x8e\x14\x8f\xe1\xee\xf8\xe3\x4e\x1c\xfd\x87\xa8\xe4\x41\x99\x2a\xe1\x85\x99\xfa\x0e\x47\x12\x13\xd9\x50\xdf\xcc\x8d\x02\x60\x37\x68\x88\x52\x01\xf5\x3d\x2b\x79\x7a\x08\xfa\xe9\xc1\x1d\xe5\x6e\xe1\x0d\x75\xe7\x22\xfa\x10\x39\x74\x4e\x3a\x4a\xb0\xc6\x28\x34\x78\x72\xff\x4c\x66\x98\xa1\x46\x14\x9a\x21\x25\xac\x0d\xf0\x52\xf3\x9f\xcc\x94\x1c\x22\x76\x6d\xa8\xdd\x53\x55\x03\xe7\x07\x40\x79\x31\x20\x14\xd2\x0d\x5c\xe9\x18\x42\x17\x6a\x08\xea\xa2\xc1\x69\xb6\xa4\x47\xe1\x9f\x8f\x3f\xe4\x72\x70\xb9\x7c\x00\x43\x08\x9f\x24\x8d\x25\x4c\xfa\xde\x49\xa7\x75\x5d\xea\x4a\xb8\x86\x20\x44\x4b\xcb\x00\xad\x5a\xa0\xd2\x24\x1d\xa6\x59\xf2\x04\x25\x02\x23\x87\x92\x6f\x57\xa7\x20\x0e\x3d\x89\x7c\xfc\xea\x65\x5f\xef\x93\xb7\x94\xa3\xc5\xc0\xfb\x74\xa4\x76\xc0\x08\x91\xe1\x83\x5a\x10\xcc\x98\xb2\x32\xad\x0a\xf4\x1c\xc0\xcc\xad\x9a\x76\x74\xb4\x0e\xf9\x0b\x9a\x1f\x47\xef\x50\xa6\x73\xf5\x87\x33\x80\x44\x03\xee\xfd\xe0\x23\x47\x68\x99\x64\x6a\x06\x57\x9f\xa9\x44\xab\xd4\x45\xf2\xf9\x48\xf6\xdc\xa4\x1c\x11\x62\x45\x83\xf2\x4f\xac\xb4\xe1\x35\x74\x25\x8e\x23\x84\x27\xb0\x8a\x18\xda\x0f\xd5\xd9\x02\x75\x04\x92\xa1\x79\x02\x1b\xb5\x79\x29\x25\x16\x0d\x19\x4b\x82\xbb\xb4\x35\xb0\x31\x4a\x8a\xc1\x46\x58\x9d\xd6\xf3\x0d\x1c\x6d\x17\xef\x63\xab\x1b\x74\x18\x78\x9f\x1b\x16\xe6\xa4\xf6\x20\x89\x8d\xbc\x42\xa4\x56\x22\x08\xd9\x23\xee\xf8\x2a\xf2\xca\x88\x71\x00\x89\x69\xac\x49\x5c\xb8\x43\x46\x64\x86\x40\x68\x58\xa0\x31\xa4\x45\x30\x9d\x97\x2a\x02\x4c\x80\xf5\xb5\x4b\x7a\x1b\x92\x1b\xb1\xbe\xeb\xec\x7e\x32\x11\xc7\x1a\x7b\x8f\xe9\x23\x84\x59\x50\xa8\x50\xf8\x42\x65\x88\xae\xe4\xa2\xb7\x47\x5b\x7a\x40\x2e\x83\x6e\x63\x0e\x9d\x0e\x9f\x7e\xf1\x2e\xa6\x90\xf1\xf8\xa3\xbf\x31\x22\x80\x56\x67\xa1\xe5\x00\x6d\x09\x1f\x3a\xa0\x2d\x81\xa4\x46\xee\xec\xee\x99\x75\x54\x27\x37\x20\x17\xef\x08\xc0\x5b\x82\x7c\xd8\x26\x94\x55\x5a\xbf\xc5\x9a\xa4\x8f\x68\x2c\x06\x85\x5d\x0f\x58\x60\xe5\x7b\x4d\xce\xaa\x15\x90\xef\xc7\xc6\xee\x7c\x53\xe6\x05\x10\x19\x89\xc4\xdc\x75\x6c\xfa\x8a\x40\xc4\xfa\x20\xda\xe9\xd5\x1e\xe0\x7d\x95\x28\xdb\x00\x3f\xb3\xce\xbc\x45\x08\xea\x00\x52\x70\xfd\x7b\x51\xee\xd2\x10\xbc\x33\x58\xb3\xe6\xe2\x38\x50\x0c\x40\x03\x63\xb4\xad\xe0\x77\x82\x57\x40\x30\x69\xe9\x7a\x20\x87\x8c\x15\xa4\x18\xff\x7e\x9a\x0f\x1d\xd2\x14\x54\x3c\x80\x46\xc5\x9f\xbd\x0f\x92\xe5\x7f\x26\xd4\x4d\xf9\x6a\x90\xae\xe0\x27\x66\xe8\xe0\x47\xdb\x7b\x76\xef\x49\x94\x22\x24\xbd\xbf\xad\xb1\x93\x9a\xfe\xde\xa8\x6b\x70\xaf\xe5\x94\x2a\xc0\x1d\x12\x07\x77\xf9\x4a\xda\x42\x91\x4d\x00\x59\x25\x5f\x1d\xa4\x2e\xc4\xb9\x0e\x26\x6f\x94\xc7\x87\x5f\x61\x0f\x57\xbc\xff\xfc\x70\xfd\x07\xa2\x55\x3a\x0a\x47\x53\xad\xd0\xc1\x66\xa0\xdd\x42\x7e\xa9\x0a\x7e\x4b\x02\x31\xca\xd7\xb7\x04\x17\xde\x03\xcd\x7f\xd7\x9a\x38\x08\x8e\x8b\xce\x03\xc0\x16\x5c\xd3\xd7\x2b\x44\x27\x4e\xad\xe6\x02\x85\xe8\x44\x28\x09\x40\x6d\x42\x62\x0b\xae\x36\x51\x02\x4a\xee\x82\x5c\xf7\xda\x1b\x3f\xc9\x2a\x3b\xed\xa7\xb5\xd6\xd7\x9d\xcf\xd5\x9d\xdb\x93\x6a\x57\xed\x71\x6b\x86\x63\x1b\x9b\x1b\x64\x00\x94\x17\x71\x21\x8a\x10\x9f\xd8\x24\xd4\x0c\xc5\x09\x55\x73\xbd\x98\x0e\xdc\xbb\xb3\x81\x3f\x59\x7f\x32\x14\xab\xbc\x8a\x06\xf3\xf4\x21\x5a\x3e\xa4\xa5\x97\x04\x03\x24\x94\x34\xc5\x77\xac\xfb\xb2\x8e\x44\xb4\xfd\x22\x22\xb0\xb2\xa3\x5f\xab\xaf\x73\x28\x55\x4c\x5d\x68\x4d\x28\x0f\xa5\xaf\xbd\xd2\xc9\x7b\xc4\x97\xfb\x15\x97\x1a\xe7\x87\xf2\x3e\x4b\x84\x07\x4e\xd1\xd7\xf7\x10\x08\xd7\x0d\x9b\x61\xa1\xd7\xb2\x44\x7e\x53\xbf\x48\x88\x42\xe1\x9e\xfd\x22\xf6\xf3\xbe\x68\xcd\xbf\x47\x0c\x39\x5b\xb7\x99\xcc\x91\xcd\xd4\x3b\xa9\xa5\x03\xa7\x1d\xe5\xbb\x2b\x48\xcd\xa7\x92\x02\x95\x9a\xfb\x42\x5e\x59\x62\xb8\xa8\x42\xb0\xc2\x03\x4c\x25\x3a\x80\x90\x68\x3f\xe9\x2c\x75\x97\xf9\xee\xba\x5b\x4a\x0f\x87\x22\xbc\x29\x4c\x81\x58\x29\xdf\x28\x5c\xe4\xea\xc1\x48\x19\xce\x73\xdb\x30\x9b\x97\x17\xa3\x22\x85\xe0\x32\x70\x60\xf4\xe0\x4c\x89\xb3\x98\xc5\xd7\x14\x03\xe6\x4c\x59\x8c\xd9\x95\xb8\x29\x83\x81\x18\x4f\xab\xd6\xc8\x8d\x7d\x95\xf8\x05\xa4\x10\x3e\x44\xcc\xbf\x42\x18\x6f\x66\xcb\xca\x52\x99\xb1\x6f\x1c\x41\x9d\x27\x71\xf0\xad\xf5\x39\x62\x4b\x7b\xd0\x10\xe6\x21\x0e\xa0\xe2\x4e\x8f\xd8\xa5\x45\x8f\x27\x79\x6a\x95\x34\x0a\x98\x05\xe8\x0f\x06\x21\xaf\x21\x35\x41\x0f\x78\x1f\xf4\xcc\x00\x11\xba\x88\xe5\x38\xbe\x7d\xf8\xa2\x9b\x2e\x82\xbc\x52\xe8\x38\x7e\x1a\x12\xdb\x41\x07\x19\x60\x68\x3c\x20\x57\x5f\xaa\x09\xc7\x22\x35\x0c\x03\x5c\x1f\x33\xc8\x17\x62\x25\x83\x4e\x7a\x77\x9e\xe4\x40\xca\xea\x00\x0e\xb9\x8c\x03\xb6\x8c\x84\x16\xf6\x5b\xe5\x1e\x3a\x15\x88\x43\x68\x4a\x1d\x48\x58\xd5\xb5\x37\x2e\x4b\xbb\xfc\x44\x5e\x84\xd4\xec\xf0\x80\x8f\xf0\x68\x4b\x84\xa2\x0a\xaa\xc6\x58\x1d\x16\x60\xf3\x40\x94\x6c\xa8\x58\x0c\x6c\x10\x69\xdd\x3b\x9d\xda\x53\x1a\xf8\x83\x55\x32\x3c\x69\xa1\xc8\x25\x42\xa0\x4b\x88\xe5\x90\xce\xb0\x4f\x6a\x68\x11\xc4\xdd\x8b\xe1\xcd\x4a\x1b\x11\xa9\x64\x41\x39\x12\xcc\x14\x54\x56\xbd\xc6\xee\xc8\x0f\x21\x51\x27\x1b\x0a\xe9\x10\x58\xa5\x38\x75\xf7\x61\x57\xd0\x31\x53\xad\xbb\x3f\xd2\x78\x88\xe0\x22\x20\x29\x3c\x84\x0d\xd7\xdd\xb1\xd0\xd0\x61\x22\x72\x06\x0c\xa6\xf4\x1b\x42\xff\x9a\xb9\x77\xe2\xd7\xb6\xd2\x4d\x3f\x67\x21\xbf\xd7\x9d\x11\x36\xdd\xfd\xd3\x7f\x03\xe6\x62\x84\xa1\x89\x2e\xc7\xee\x9e\x48\xde\x7e\x73\x5a\x6f\x1f\x6c\x8b\x07\xc0\xc7\xdb\xcc\x5c\xa9\x4e\x70\x7f\x47\x62\x85\xee\xf8\x6c\x91\x6b\xd9\x1d\xb7\x07\xda\xa1\x0c\x47\x02\xf0\x53\x0a\x14\xf7\x26\xaa\x4e\x87\x1c\x48\xa1\x50\xa1\x95\x56\x83\x93\xec\x24\x83\x52\x3b\x40\x0c\xf3\xbd\xe8\xa2\x87\x0d\x69\x8a\x00\x35\xac\x98\x58\x43\x57\xfc\xaa\xf6\x21\x12\x51\xd3\x00\x6e\x34\x77\x3d\x74\xe4\x74\xed\x9a\x0b\xd1\x10\xfd\x66\x06\xd6\x95\xe6\x35\x22\xe2\xa9\x00\x68\x68\xc0\xda\xa0\xa4\x36\x8e\x0c\x85\x26\xd5\x06\x16\x3c\xc1\x8b\x7c\xf8\x87\x18\x6d\x75\x59\x6c\x49\x7c\x02\x84\x61\x11\xfd\xd8\x1d\xb1\xbc\xef\x91\x8a\x5e\xa6\x6f\x93\xc1\xb8\x63\x86\x02\x1c\xff\x6e\x22\x1c\xd0\xb5\x6d\x72\x17\x73\x0a\x14\x8a\x21\x10\x4a\x3d\x04\x9a\xc2\x06\xd9\xe7\x8f\x84\x15\xba\x10\x17\x80\xa8\x9f\x2b\x9b\xd4\xcf\xc5\x22\x74\x27\xde\x1c\x8b\x3b\x7b\xb8\xd2\x9f\xb8\x65\xcd\xba\xfa\x85\x80\x2e\x53\xcf\xff\x89\xc2\x1e\x39\x9f\x89\x80\x0f\x4e\x63\x3b\x46\x40\xac\x5d\xf3\x2a\x47\xa3\x79\xec\x6a\x0c\x3a\x1d\x42\x38\xd7\xbd\x12\x6e\x25\x5a\x40\xdd\x42\x85\x63\x53\x5f\x8b\xee\x86\xdf\x90\x8d\xb0\xac\xed\x5c\x79\xec\x9c\x7c\x96\xd1\xd9\xa8\xca\x73\x7a\xc0\x0e\xeb\xa8\xa4\x47\x03\xf2\xb7\x50\x1f\x43\xff\x6c\x34\x54\x1d\xc2\x2e\xbc\x6d\x54\x77\xfc\x77\x35\xf9\xb9\x17\x33\x35\xce\x88\x88\xf2\xad\x89\xc6\x47\xb7\x90\xfb\xc2\xc9\xb3\x39\xa9\xef\x96\xf2\x8f\x25\x32\x79\x63\x9f\x3d\xda\x98\xe1\xf9\x06\x49\xc2\xba\x07\xd6\xef\x69\xd6\x76\x11\x31\xe7\xaa\x1a\x5b\xd7\xf4\x51\xca\x15\x02\x18\xc8\x3b\xe0\x83\x71\x8f\xa1\xea\x0c\x6a\x72\xa6\x20\xc1\xae\xc8\x4d\x67\x43\x5d\x68\x57\x26\x29\x30\x94\x50\x21\xb0\xe1\xa8\x13\x98\x42\xc0\xe2\xeb\xe6\x42\xa7\x00\x49\x94\x58\xb0\x5a\x42\x10\xe3\x20\xdf\x1b\xd2\xb6\x34\xfb\x45\xb5\xa9\x17\x91\xff\x28\xbc\xd8\xfc\xf0\xc3\xd2\x86\xbd\xcc\x7e\xc4\x3f\x02\x01\x9b\x3a\x4c\x14\x3a\xf1\x64\x27\xfe\x1e\x11\x21\x92\xd1\xc9\x1b\x95\x73\xf2\xb6\x85\xac\xc7\xf6\xf0\x1e\x9c\x38\x7f\x03\xf0\x3e\xb3\x52\x93\xf0\xb8\x18\x90\x0a\x24\xb1\x8d\x6d\x68\xe7\x48\x77\x91\x7a\x49\x6e\x8a\x1b\x8a\x1a\x02\x90\x3a\x09\x66\x82\xae\xf7\xab\x0d\x3d\x93\xe5\x11\xbf\x98\xe8\x55\xe9\xa4\xe6\x89\x7c\x68\x39\xc7\x3b\x74\x70\xba\xcd\x99\x6d\x22\x67\x71\x2d\x19\xfa\x49\x7a\x64\x84\xfc\x53\x16\x64\x78\xc7\xe9\x86\x96\x69\x62\x58\x1f\x96\x35\x4b\xbd\xe1\xac\x65\x11\x03\x34\x6b\xa1\xfe\x81\x4e\x93\xf0\x18\xe4\xf9\x80\x0c\xc8\x9a\x25\x3f\xf1\x65\x1f\x91\xd4\xa8\xd2\xbd\x22\x86\x42\x70\x4a\xf2\x05\xdb\x46\x63\x81\x86\x1c\x88\x5e\x23\x7a\x1b\xce\x45\x3b\x50\x84\xb0\x97\x6e\x5a\x47\xa9\x7d\xf2\xe9\x54\x84\x3f\x50\x28\x3e\x75\xa2\x78\xee\xc0\x00\xf6\x11\x60\xc1\x8c\x3b\x07\xd4\x30\x03\xcc\x52\x12\xd1\xc2\x96\xa3\xf8\xe7\x56\x9b\x40\x08\x9d\x85\x06\x9e\x4d\x51\x24\x44\xc0\x0d\xfe\xd4\x43\x54\xb3\xc3\x49\x89\x63\x5f\x04\x06\x41\x34\x44\x95\x84\x91\xa1\x53\x77\x48\x9e\x4f\xba\x91\x43\xaa\x1d\xb1\x38\x01\xf4\xab\xc6\x7e\xd1\xc5\x00\xa6\xf6\x78\x58\xdb\x53\xb7\x19\xec\x9d\x86\x24\x8f\x82\x00\x1a\xfd\x53\xfe\xc0\x0e\x64\xcd\xda\x23\x19\x1d\x64\xea\xb4\xd1\x3d\x3d\x19\xb7\x6a\x20\x2d\x7a\x97\xe3\x78\xab\x2a\x99\x1a\xc4\x1d\x04\x83\x58\xa1\xfe\x59\x75\x16\xa4\x13\x2c\x42\x47\x48\x9c\xc0\x1c\xe7\x81\x5d\x3c\x0d\x28\xf3\x6c\x85\xa7\xf1\x0b\xa4\xaf\x66\x24\xe9\xeb\xab\xca\xe9\xd3\xd7\x7f\x5e\x26\x09\x51\xbc\x2a\xfd\xbc\xe9\x3e\xff\x58\x67\x3e\x50\x09\x8d\xa0\x90\x16\xa1\xb4\x06\x2d\x0c\x54\x9b\xf4\x48\x93\x7d\xec\x24\x98\x4e\xd1\x2b\xfb\xda\x2c\x35\x2a\xce\x10\x26\xa1\xcb\xcf\xda\x8f\x26\x77\x86\x0e\xb7\xd4\x0d\xb4\x38\xa7\x74\x76\xc1\x7d\x87\xd0\x03\x4e\x2d\xac\x89\x49\x66\xc4\xec\x72\x2e\xff\xa2\x1d\x7d\x55\xfe\xef\x96\x72\x89\xbd\x4b\x77\x02\xc3\x14\xe6\x28\xed\xd1\x1a\x97\x24\xc4\x5c\xda\xa4\x34\x69\x13\x0f\xf3\xc7\x84\x61\x25\x45\x18\x20\x69\xab\x21\x48\xa3\x18\xb5\xa4\x44\x6d\x04\x71\x50\xd1\xe2\x48\x2f\x1e\xc4\x78\x46\xd0\x7e\x2f\xfc\xfd\x76\xdd\x2c\xcc\x38\x67\x8d\xdc\xf0\xbd\x7c\xe8\xf2\xd8\xfd\x7a\xe8\xc0\xf9\xa7\x82\xa2\x00\x36\x08\x61\x71\xdd\xdf\x43\x05\xfd\x01\x6c\xcf\xd0\x32\x81\x1c\xd0\xa9\xdf\x0c\x9a\x3d\x43\xf3\xc4\x79\x16\x1d\xdb\x07\xcc\x9b\xdc\x24\xf1\x90\xd6\x73\xaf\x21\x6b\x72\xb0\x7d\x18\x90\x72\xcc\xaa\xd0\x78\xad\x83\x4a\x0e\x85\x4a\x1f\xcb\xd9\xc9\x59\x9d\x96\xed\xa6\x5c\x0d\x9c\x6b\xfc\x27\x08\x5e\xf8\xda\xb1\x36\x9a\xb3\xbf\x41\x7b\x02\x94\x52\x41\x6b\x27\x4f\x3c\x42\x42\xfc\x0d\x99\x15\xeb\x97\xd0\xf7\x93\x24\x01\x0a\x31\x7c\xff\x89\x07\x0d\x72\xc1\x27\x15\x7e\x3e\x2e\x45\xe2\xb2\x5d\x60\xb7\x47\xe1\x51\x16\x0e\xc8\xa5\x94\x86\x66\xec\xa4\x01\x8c\xc4\x5e\xa3\x41\x41\x11\x09\xae\xa7\x42\x57\x27\xc2\xd3\xf4\xe8\xba\x5b\x9a\x23\xf6\x0c\xa9\xa6\x9d\xba\xc1\x07\x04\x2b\x19\xdf\x63\x76\x3c\xe5\x05\xbe\x9f\x77\x71\x13\xea\xfb\x86\xfb\x77\x99\x2b\x79\x14\xbe\x20\xdc\xf7\x8f\xfd\x7b\x6b\x5d\xdf\xa5\x47\x50\xae\xb7\xd7\xf1\xbb\xca\x7e\xf9\x16\x92\xb3\x7c\x45\x8e\x83\xe5\x60\x9e\x04\xc9\xd9\x59\xe6\x6d\x5b\x0c\x73\x01\x92\xd0\x84\xfc\x16\x24\xf5\x70\x37\xd6\xec\x47\x72\x19\x7d\x41\xd4\xe5\x17\x73\x8e\xc3\x55\xc1\x19\xdf\x2c\xb5\x2e\x48\x94\x78\x3b\x80\x32\x49\xf6\x72\xed\xb4\x70\xf0\x75\x5a\x96\x98\xcd\x7f\xb3\x67\xaa\xfc\x47\xd1\x12\x1e\xca\x5a\x33\xde\xfb\x0b\xd9\x26\xaa\x99\x19\x94\xb6\x08\xc5\x07\xbf\x69\xde\xbc\x9b\x06\xe0\x55\x40\xa0\xe0\x8f\xfb\x9b\x7a\x2e\xba\x7f\x93\xdb\xe8\x3c\xde\xcc\xc1\x84\x87\xf9\xb9\x23\xbf\xf2\x73\x63\x55\x8b\xa3\x25\xf4\x47\x7c\x4a\xf4\x21\x73\xc1\x49\x48\x43\xf1\xc3\xf1\xfb\x83\x55\x60\x7f\xdc\x9f\x7a\x3c\x79\x04\xed\x11\xe8\x93\x64\xe9\x93\x90\x85\xe4\x93\xa5\x28\xf5\xc9\x52\xc3\xf8\xd8\xc8\xc9\x16\x0a\x44\xcb\xe1\x13\x4c\xd1\x9f\x4e\xb1\x22\x1f\x1b\x0f\x2f\xa6\xc7\x7d\x12\x39\x77\x3e\x69\xd3\xf6\xc6\x88\x95\x8f\xba\x7f\xf0\x64\x7f\x19\x1b\x05\xe8\x55\x84\xd7\xda\xca\x7f\xf0\x62\x78\xef\xf4\xfe\x7c\xd9\xe8\xdf\x1f\x59\x7c\xef\x8f\x9b\x69\xef\x8f\x34\x2e\x6e\xc5\x4e\xbc\x95\x19\xf7\xbe\xb5\x04\x69\xa0\x93\x52\x87\xe7\x4f\x33\xd6\xc5\xec\x54\x51\xcc\x19\x62\xbe\x91\x01\x4d\x0f\xde\xa2\x86\x81\x86\xca\x19\x6a\x2a\xc3\x41\x2b\x16\x90\x97\x06\x90\xfc\x44\xdd\xc8\x4f\xf5\x0e\x29\x87\x77\x5d\xcc\x58\x67\x68\xa0\xe1\x85\x2d\xf0\xfd\xcb\x63\x30\xa8\xd5\xc0\x77\x7d\x5c\x7a\x16\x0e\x09\xe6\x65\x0c\x6d\x93\x0a\x0f\x8f\x83\xce\x7a\xa4\x4b\x2a\x29\xbc\x39\x28\x92\x28\x92\xfa\xed\xcc\x06\x5e\x33\xfb\x4e\xf1\x38\x84\xb7\xf0\xe0\xbe\x63\xdd\xf5\x5d\x0e\xc9\xaa\x44\x5a\xc3\xbb\x6c\x3f\x1d\x94\x75\xf7\x09\xdb\xbb\x98\xed\xc7\xab\x2c\xd7\xa9\xb3\x2d\xcc\x63\x77\x90\x0e\x21\x29\xcf\x60\x4d\x69\x38\x25\xf6\xb2\x0c\xb3\x08\x10\x81\x13\x7f\xad\x98\x04\xeb\x09\xde\x7c\xf8\x25\x37\xc1\x99\x7c\x79\x3b\xff\x2f\x54\x5c\x02\xcc\xaa\x32\x98\x5f\x3d\xfa\xe0\x5d\x62\x95\xf3\x5d\x86\x4d\xf5\x1a\x56\x12\xb1\x82\x48\x47\xef\xac\x38\xb3\x20\x6b\x16\x32\xab\xef\xf9\xe2\xcb\x86\xe7\xac\xa1\x36\x19\xc5\x86\x38\x51\x5d\xd7\xc5\xcc\x20\x1f\x4a\x20\xe0\xb2\x37\xdd\x16\x69\x35\x1b\xca\x12\x0a\x71\x74\x36\xc4\xff\xda\x20\x2e\xad\x9d\xfc\x09\xe9\x13\x91\x9f\xba\xe6\x8b\xa7\xa3\x02\xed\x88\xaf\xdf\xb4\xc1\xb6\x05\x10\x3f\x09\x5e\x04\x43\x55\x5e\xd4\x77\xfe\xed\x93\x7c\x89\xcc\x8d\x37\xed\x43\x08\xc8\x88\x92\xd0\xe0\x73\x56\x0e\x2c\xe6\x8c\xe0\x97\x7e\x0a\xc7\xf3\xc9\x4b\x34\xfd\xdc\x16\xc8\xc1\x11\x7f\x4e\xf9\xa7\x2c\x83\x04\x0c\xde\x8e\x84\x55\xb2\x9e\x94\xe7\xf2\xf1\xa0\x26\xcb\xe7\xaa\x32\x7c\x38\x30\xf5\xa5\xc3\x13\xca\x32\x6a\x30\x8d\x49\xf7\x9d\xae\x23\x94\x5c\xce\xd3\xc7\x6a\x89\xcd\x08\xf9\xc2\xca\x3b\x39\x95\x30\x87\x38\xdb\x90\xa9\x13\x8c\xcc\x6f\x0f\x10\x95\x13\xfe\x8d\xf8\x68\x2f\xd7\x35\xa4\x68\x56\xa8\x45\xfa\x1d\xfc\xd1\x12\x83\x2e\xc8\x9f\x5f\x99\xdd\xd9\xe6\xe1\xef\x1f\x95\xd4\x3b\x3d\x9b\x26\x0c\xab\x29\x72\xb7\xd7\xdd\xb7\x0c\xdb\x57\xad\x93\x6b\xae\x54\xe5\xf1\xbc\xea\x53\x0a\x2f\xcc\xf9\xb3\x32\x2f\x41\xe3\xf9\xaa\x83\xd2\xc7\x5e\x0c\xbb\xa6\x90\x4c\xd9\x19\x18\x86\x80\xb9\x42\xb5\x16\xfb\xac\x16\x2e\xac\xbc\xca\x4f\x17\xe3\x15\x43\x95\x01\x2e\xef\xbe\x10\x99\xd1\xfe\x2b\x59\x91\xec\xd3\x88\x42\x78\xba\xd9\xfe\xca\x4c\xf6\x7a\x79\x0c\xc5\x49\x94\x7b\xc9\xba\x80\xf5\x51\x48\x01\x47\xaf\x6c\x1f\xf3\x5d\xa8\x1c\xf9\xb2\x17\xeb\x3f\xc1\x7b\x90\x02\x8a\xbd\x9e\xba\xd7\x9b\xe9\x28\x2f\x64\x66\xf6\xb7\xa3\xe3\x25\x95\x93\x83\xea\x38\xf0\xa3\x9e\x01\x78\x9b\x5a\xd9\x02\x0f\xd1\x49\x65\xae\x57\xda\xde\x1f\x6f\xd4\x40\xf6\x3d\x0d\x3f\xfd\x97\xd2\xd4\x5f\x36\x2d\x5f\x43\x08\xe6\x7e\xc5\x11\x4b\xad\x02\x08\x16\x54\x77\xf3\x02\x8d\xee\xfa\x53\x8a\x71\x36\xbd\x97\x6b\xf3\xee\x4b\x9c\x6f\xaa\xc7\xde\x50\xbb\xa2\x7c\x2f\x2f\xb7\x1c\x4f\xc9\xd8\x40\x96\x4a\x20\x7f\x94\x39\x4f\x8d\x19\x84\x96\xc7\x31\xf9\x75\xc6\x59\x06\xe7\xc4\x5f\xda\x86\xd4\xf0\xb9\x11\xcd\x21\x31\xd2\x9f\x3f\x59\x9e\xbc\x8f\x05\x71\x37\x3e\x04\x02\x87\xa2\xec\x2b\x85\xf6\x0c\xc8\xa2\x93\x10\xca\x9b\xbe\xb6\xbb\x09\x9f\xdf\x10\x55\x3e\x08\x2a\x77\x30\x9c\xfe\xde\xaf\x39\x87\xf2\x3b\x96\x1f\xff\x28\xca\x78\x55\x6f\x39\x17\xee\xb5\x05\xc7\x63\x8d\xce\x0b\x04\xbf\xfa\xc1\x74\x38\xdd\x8b\xb2\x5e\xee\x79\x71\x89\xa5\x7b\xa0\x8c\xdb\xf5\x91\x5f\xe6\xfa\xac\x21\xe3\xf1\x61\x57\x7b\xd9\x68\x34\xd0\xd7\x7d\xbd\xb3\xbe\x26\x20\x92\xa9\x5f\xc1\xf5\x74\xc9\x4f\x8b\x52\xda\x30\x9e\x11\x76\xdd\x92\x7c\x41\xe2\x7f\x48\xc3\xdb\x44\x8b\x3d\x11\x64\x3a\x77\x99\x6b\xc0\x1d\x27\xff\x88\x01\x96\x6e\xc9\x51\x25\xe2\x52\x9b\xc2\x4b\x79\xc1\xc0\xa5\x0e\x08\xa8\xff\x1a\xd0\x2f\xca\x15\x3a\x31\xc8\x04\xff\x10\x82\x79\xa3\x38\x44\xdc\x01\xcd\x66\xc3\xbc\xd7\x23\x3f\xfc\x9b\xb8\x10\x1a\xa0\xc8\x98\xeb\x68\xc1\xa2\xd7\x61\xb3\x48\x84\xf6\x13\xbf\xe2\x4c\x8a\x03\x03\x80\xf6\x12\x5b\x18\x22\x85\x4e\x17\x51\x00\xca\x2a\xfd\x51\xd8\x4b\xf4\xab\x54\xfa\x19\x2e\x36\x65\x48\x59\x44\x14\xd4\xd5\x44\x5d\x2a\x48\x3a\xfc\xce\xaa\xbf\xc2\x4b\xec\xf4\xd6\x01\xaa\x5c\xa1\x6c\xab\x97\x55\x57\x2e\x4e\x41\xce\xa2\x48\x39\xef\x0a\x52\xa7\xab\x3e\xd9\xf1\x5d\x55\x62\x3e\x55\xbc\xb8\xae\x49\xe1\xe5\x2a\x59\xbe\xcb\x75\xb1\xfc\x54\x05\x16\x35\x8f\xea\x16\x0e\x0c\xd7\xfc\xd1\x67\x70\xc1\x03\x48\x15\x9e\xf9\x0e\x3d\x31\x40\xd7\xbc\x99\x8f\x3b\x74\x68\xd0\x47\xe7\x41\x90\x7d\xf3\x05\x92\x5d\x9e\xae\x4a\x9c\xa6\x84\xaa\x4e\x0b\xd5\xc5\x1c\x9b\x7b\xd2\xb6\xa7\xcc\x57\xe2\x41\xfc\xaf\x6a\xa3\x61\xe2\x66\xc8\xdd\xe4\x7f\x85\xde\x24\xa0\x81\x0f\x2c\xbf\x7e\x39\xf0\x36\xf5\xa2\x78\x92\xbd\x27\x7d\x0c\x57\xde\x45\xae\x68\xc8\x6f\x9d\x54\x8f\x17\x06\x96\x85\x63\xeb\xe5\xac\xad\x7c\x76\x08\xf2\xe2\x4c\xcf\x2c\x46\xde\x79\x7e\xe4\xb8\x56\xb9\x9a\x4f\xf5\x02\x11\xae\x86\xd7\x4b\x66\xd1\x95\xa7\x66\x00\x5e\x59\xea\x38\x6e\xc4\x5f\x94\x5a\xb9\xd2\xad\xe0\x37\x43\xf2\xfb\x5c\xe9\x0c\x66\x4e\x17\xbf\x59\xd5\xcb\xd9\x86\xf3\x3a\x56\x62\x68\x80\xb3\x15\x60\x8d\x2a\x16\x34\x6c\xa3\x3c\x78\xf4\xc6\x61\x19\x5a\x39\x69\xa6\x2d\x70\x21\x6f\xc2\xe6\x0c\x54\xb1\x59\xd2\xfc\xb9\xf2\x9b\x47\x3f\xd3\x29\x75\xfc\x2b\x85\x2f\xf9\xf2\x09\x0d\x81\x64\x75\x3a\x55\xf7\xfc\x7e\xd6\x1f\x1f\xf9\x17\x6b\xb7\x6d\x0b\x1a\x60\x92\x01\xf8\x1c\x92\x5c\xf8\x6c\x9b\x74\x30\x3e\x83\xb8\xd2\xcd\x92\x75\xf2\xf2\x77\x0e\x25\x1b\x04\x7a\x4c\xa1\x74\x63\xb7\x2e\xe1\x94\xf7\x4f\x3d\xfb\x7c\x65\x76\x5d\xe7\x7d\x7c\x24\x68\xb3\xe3\x85\x66\xc2\x4d\x6a\x36\xb5\x63\xb6\xb1\xcb\xdd\xf8\xc9\xc1\xcf\x97\x04\xba\xc4\xa3\x16\x8e\x99\xe7\x8d\xac\x6b\x0a\x3c\xdc\x4f\x5f\x0f\x3b\xef\xa9\xd1\xb7\xdf\xd6\x85\xe8\xce\xee\xe1\xad\xb8\x16\x83\xa1\x45\xe1\xbc\xff\x3a\xe0\xba\x2f\x99\x61\x50\xbf\x19\xab\xc4\x60\x9c\xbe\xce\xd1\x71\xbb\xfc\x1d\x00\x09\xff\xbd\x2b\xa4\xb6\xcd\x51\xdf\xcd\x2b\xe4\xe4\x67\x8a\xf0\x03\x66\x3a\x12\xd0\xec\xfd\x7d\xec\x1f\xc7\x70\xe1\x9c\x2e\x90\x15\x97\xc9\x2f\x89\xce\x1c\x9a\x20\x03\x79\x58\x80\xf0\x1c\x47\x26\x26\xaf\x03\xad\x2a\xfb\x50\xc4\xb1\x89\xbe\x62\xa9\xa1\xb6\xc3\x1b\xbf\xaa\x44\x7d\x20\xa8\x63\x06\x78\x28\xbc\x54\xa9\xca\x54\x7e\x5d\x06\xc6\xf8\x4f\xff\x9c\x75\x6e\x83\xa3\x32\x35\xcf\x0b\x19\xa8\x3c\x77\x09\x21\x72\x83\x71\xcf\xe4\xd3\x3c\x03\xa7\x98\xde\xdb\x96\xbc\xd4\xae\x8e\x93\xfa\x80\xbc\x0b\x64\x91\x12\x6c\x3f\x45\xa1\xbc\x35\x69\x9d\x65\x89\x0b\x64\x05\xdd\x38\x8a\x1e\x43\x1b\x7d\xa8\x06\x99\x11\x1d\xcf\x0c\x9e\x41\x3e\xd3\x2c\x05\x2f\x08\xe7\x14\x70\xf0\x09\xeb\x37\xe9\xbc\xf4\xb6\xa0\x32\xce\x6a\x35\xf6\x2a\xc0\x45\x74\xb1\xc4\xac\x8d\x19\x44\x32\xf2\x81\xe3\xdf\xb0\x6b\x02\x85\x08\x03\x70\xfa\x61\x09\x2a\xba\x2a\x49\x04\x56\x9c\xd7\x2f\x71\x07\x7a\x3b\xf1\xb2\x5b\xd0\xbe\xc1\x41\x55\x1c\xae\x96\xd8\x6e\xc6\xcc\x87\x7d\xd0\xb9\xc3\x10\x61\x85\xf7\x83\xf2\xa9\xae\xaf\x13\x92\x37\xd5\x2c\x5e\xb9\xec\x4f\x50\x29\xfb\x4d\xec\x85\xa4\x9e\x66\xe7\x32\x5e\xca\xa3\xeb\x4e\x01\x18\x66\xda\xd9\x6b\x90\xc3\x64\x47\xda\x28\xbb\xe2\xb8\x5c\x43\x87\xd1\x9e\x06\xed\x46\xd5\xba\xcd\x8a\xf7\x53\x83\x01\x81\xfa\x4f\x08\xd5\x39\x04\xe2\x20\x5d\xcd\x4a\xea\x2a\xd4\xa3\x6b\x87\x47\x1b\x40\xd6\x8b\xdf\xbb\x02\x5f\x6d\xda\x56\x25\x68\x54\xa5\x4b\x54\x5b\xd8\xb0\x2b\xee\x60\x89\xcc\x31\x7b\xc1\x73\x2b\x4a\x63\xb6\xc9\xbb\x04\x25\x56\x3a\x3c\xa0\xc1\xb3\x85\xf6\x4e\xad\xee\xa6\x35\xc0\x79\x0d\xde\xce\x57\xa5\x3d\x66\xf6\x18\x6b\x38\xd5\x0d\x71\x79\xe7\x04\x39\x53\x48\xf2\xdc\x3e\x7e\x21\x80\x7a\x63\xf9\xd2\xd9\x97\x2a\x12\xa1\xf3\xa7\xec\xb1\x14\x8f\xc4\xb2\xc9\x67\xa8\x78\x2c\x29\xc4\x7d\x9e\xf7\xc8\xef\x12\xcc\x19\x7e\x99\x67\xf8\xad\xce\xa7\xd3\x23\x9d\xcf\x47\xe6\x66\x51\x92\x9c\x0d\x9e\x94\xc2\x35\x40\x06\x0e\x00\x76\x17\x4f\xaa\xec\x9d\xe5\xdf\xb3\x35\x1f\x99\x15\x67\xd1\xd5\x8b\x54\x61\x4a\x74\xa5\xe5\xa0\xfa\x49\x09\xb6\x58\x20\xfe\x67\x93\x1c\x2e\x50\x6e\x5f\xa3\x0d\x11\x75\xe5\x4f\x40\xa8\x28\x19\xa1\xe5\xc5\x55\x87\xb3\x55\x14\x4b\xab\x7a\x87\xe5\xf9\xa4\x25\x73\xc2\x36\x08\x31\xa1\x2c\x01\xa5\xa2\xb0\x6b\x00\x55\x32\x8f\xcf\x40\x3f\x45\xa1\x4e\x2d\x41\xf4\x47\x67\xe9\xef\x58\xe9\x3d\x9b\x72\x88\xf3\x20\x01\xcc\xf7\xaa\xda\x1b\xb2\xd9\x16\x1f\xcd\x7c\xff\x15\x0b\xba\xc1\xbf\xcf\x0b\xcd\x37\xd7\x1f\xcf\xf9\x88\x43\x0f\xac\xb4\x39\x82\x95\xc8\x53\x55\xd1\xc2\x01\xf1\x77\xe5\x15\x6a\x3e\x65\xb3\xa3\x6e\x41\x4a\x7f\xd8\xf0\x5d\x25\x22\x69\xb8\xb8\x8f\x0e\x1c\x25\x54\xb4\x42\x3a\xed\x4d\x51\xac\x39\xfa\xc8\xd9\x19\x1d\x98\x6c\x07\x81\xa1\x97\x64\x8a\x6c\x16\x7c\xa5\xd8\xb9\xb3\x2c\x71\x94\x26\x76\xd0\x21\x92\x42\xc9\xec\xeb\x5e\x42\x7b\xec\x92\xca\xdb\x6c\xd6\xc9\x47\x40\xe2\x41\x20\x2f\xd0\xe1\x0c\x34\x3c\xf3\x27\x9a\x50\x16\xad\xe0\x09\x5f\x37\x85\x7e\x14\x11\x7f\x62\x1e\xa1\x1e\xc4\x03\x7f\x09\xb8\x2d\x46\x0d\x00\x9d\xa7\xd9\x1b\x19\xa2\x8c\x7a\x3b\x79\xe3\xe3\xce\x5a\x7a\x38\xf3\xba\x8b\xac\xff\x44\xc3\xd0\x3e\x04\x28\x38\x5a\x9c\xdd\xef\x8c\x17\x9f\x97\xce\xdd\x6b\x67\x48\x7d\x42\xab\x88\x05\x3f\x70\x9b\x7c\xb3\x8c\xf4\x62\x43\xf6\x1b\xb9\x6c\xce\x3c\x30\x7c\xfe\x8c\x8c\x87\x33\x27\x24\xa2\x13\xf5\xea\xfe\x73\x12\x6f\xd7\xd9\xdf\x1b\xe3\x83\x5d\xbf\x68\x62\x07\xd8\x1f\xf0\x24\xb6\xd7\x06\x0e\xc5\x5d\x54\x34\xb6\x91\xa2\xd7\xb6\xb3\xf1\x7b\xef\x6b\xc7\xe7\x67\xb3\xf3\x44\x85\x4b\x48\x1b\xdd\x54\x42\x05\xa4\xd8\x86\x23\xcd\x02\x5d\x06\xe9\x88\x43\xc5\x89\x4e\x41\x24\xa7\xdb\xb8\x79\x35\xb3\xfb\x25\x5a\x74\xd3\x94\xc3\x7c\xd7\xff\x3d\x69\x2b\x64\x8b\x6e\xa7\xe6\xd6\xc1\x87\x7a\x7a\x2c\xf1\x35\x4d\xa2\x2a\xe2\x11\xc0\xed\x8f\xaa\x52\x65\x3a\x68\x3b\x2c\x6e\xcf\x50\x47\xb0\x95\xeb\xdb\x64\x90\xa2\xf3\x35\x94\xee\x90\x54\x22\xaf\x08\x40\xfe\x49\x2e\xad\x2e\x5c\xb1\x70\xc3\xba\xdc\xfc\x93\x47\x6a\x5a\x2e\xbe\xd1\xd5\x59\x37\xb7\x48\x80\x0f\x28\x78\xd4\x7c\xa3\xfc\x95\x53\x0a\xd1\x0c\x6e\xc4\x13\x58\x9a\xda\x1e\x16\xcf\x55\xc7\x12\x8f\xe4\x27\x0c\xe6\x12\x4b\x85\x28\xdf\xb1\x0f\x8d\x53\x52\x55\x73\xfe\xa9\x2b\x2d\x7a\x04\x0c\x63\x76\x60\x0f\xcb\x1d\x15\xda\x6e\x62\x51\xbf\x27\x9a\x8e\x5f\xd5\x53\x17\x2a\xa0\x67\x4f\x93\xa6\xc7\x44\x4b\x48\x52\x6a\x06\xc6\xb6\x6f\xd1\x97\x93\x3e\xd2\x4c\xfa\xf8\x34\x0f\xea\x4c\x7c\xdc\xce\x50\x2d\x6a\x05\x6c\x24\xac\x9e\x10\xb3\x27\x42\x28\x3b\x7f\xca\x5e\x2d\x1d\xc8\xcf\xbd\x08\x25\xa7\xe4\xdc\xd5\x7e\xbf\x58\x5c\xfc\x34\x24\x01\x28\x04\xad\x13\xfc\xd1\x44\xda\x46\x9e\xc3\xba\xc3\x77\x80\x2c\x20\x29\xa0\xf3\x8f\x19\x15\xf2\x1e\x0e\x24\xda\x84\x5c\x71\xf7\xcb\x19\x94\xf6\xd3\xca\x81\xde\x7a\x3d\x1d\x9c\x24\xea\x64\x7d\x83\xd4\x91\x0d\x5f\xa2\x8f\x30\x78\x4e\xf2\x82\x9d\xa1\x5b\x0f\x6e\x7e\x49\x3a\x85\x08\x94\x0d\xa3\x2a\xbd\xb0\xde\x7c\xa9\xb7\xf4\x9e\x28\x13\x72\x5a\xcf\x40\x6d\xb5\x34\x4d\xf1\xcc\xa7\x84\x69\x05\x71\x08\x4d\x85\x12\x94\x35\x07\x5e\x24\x2d\xae\x17\x28\xcc\x62\x70\x3d\xc3\xe3\x43\x59\x9d\x8f\xb4\x87\x3e\x54\x8b\xf9\xb8\xcf\xf4\xc0\xfa\x90\x8c\xc9\xe3\xfe\x4a\x10\xe9\xfe\x48\x72\xe5\x7e\x55\x2f\x48\xcb\x74\xdc\x9c\x05\x98\x69\x38\x28\x5e\x07\x30\xab\x4c\x92\x6e\x62\x48\xda\x61\x46\x8c\x52\xb4\x11\x0f\x27\xbb\xf4\xb8\x3d\x53\xf5\xb8\x7d\x69\xf5\xf0\xe8\x3b\xa9\x40\x31\x07\x0f\xd3\x21\xc4\x6a\x51\x7a\x48\xb1\x5a\x2d\x32\x0e\xd3\x23\xb8\xb8\x63\xef\x6c\x83\xd1\x4f\x73\x0a\x29\x09\x1e\x56\x41\xb4\x09\x25\x96\x3c\xbb\x8d\xed\x92\x7a\x62\x60\x2a\xd4\x9c\x36\x70\x77\x9d\xb1\x11\xe7\xca\x5b\x8f\x35\xf7\xef\xdf\xad\x38\x88\xd6\xe5\x51\x15\x4e\xe2\x6a\x82\x17\x17\x59\x91\x5f\xcd\x7f\x82\x5f\x80\x79\xcd\xc0\x52\x5e\x7a\x4a\x09\x6a\x86\x7f\x8d\x57\xca\xd7\x1c\x37\x64\xf3\xdc\xb8\x97\xa1\x69\x4c\x0d\x91\x26\x7c\xd4\xa1\x71\x3d\x00\x8f\x71\x68\x92\xe4\x33\x6c\xbb\x78\x50\x36\xfd\x92\xe2\x57\x24\xe2\x03\xa8\xba\xdd\x22\x99\xaa\x6e\x91\xfa\x13\xe7\x90\x9e\x45\xc3\x43\x7c\x30\x3c\xb4\xc8\x6b\xa5\x77\x4f\x56\xfa\xc5\xcb\x3f\x0a\xfb\x58\xa3\x2f\x3e\x18\x1f\xb4\xcd\x10\x99\x18\x82\x78\x86\x0f\xcc\xb0\xf9\x6f\x4a\x67\x61\xbd\xc2\x4b\x84\xda\x3a\xa0\x83\x0b\x2e\x3b\x35\x4f\x8f\x0a\x8b\xd8\x32\xcf\x63\x2a\x52\xf7\xc1\x24\x91\x8f\xb6\x84\x8e\x56\xa1\x2b\xef\x28\xf9\x19\xa5\x8e\xcd\xfd\x1c\x72\x50\xf0\x5e\xf2\x68\x0d\xb6\x98\x94\xfd\xc4\xae\x4a\xdf\xd6\x42\x80\xb3\x34\xd0\xf2\xc7\x0c\x78\x3f\x69\xfe\xc7\x9e\x13\xc1\x18\x59\x32\x4c\xfb\x1e\xd2\x64\x79\x27\xd1\x21\xc8\x19\x6f\x2e\x6f\x1f\x59\x1a\x36\x47\xf6\x4c\x7c\x4a\x8a\x21\xf4\x4a\x7b\xed\x69\x48\x60\x69\xb4\x69\xc5\x10\x7b\xc7\x00\xdd\x29\x65\xaa\x5c\x64\x33\xd8\xd4\x5d\x9a\x4a\xe1\x6d\x32\xdb\x71\x90\x60\xd3\xa0\x50\xd9\x23\xf7\x9c\xa1\x18\xb8\x65\x77\x19\xd4\x0f\x7b\xc5\x28\x1c\x41\x37\x69\xd3\xf8\xa5\x63\x7c\xce\xa1\x68\xab\x23\xe2\xf1\xe1\x04\x3d\x92\xf7\xad\x06\x7d\x7d\xe0\x48\x6f\xb6\x91\x04\xbd\xfb\xc2\x5e\xf9\xf0\x44\xfa\x2a\xb4\x85\x92\x9b\x9c\xa5\x56\x52\x69\xe9\xba\xc4\x3b\xe3\x50\x02\x4b\xd7\x3c\x4a\x73\xe9\x4a\x0f\x2f\xed\x61\xf4\x0e\x28\xdb\x94\x88\x11\x80\xcd\x8a\x6c\x41\x23\x74\xe8\x19\x78\x2f\x7e\x26\xee\x5a\x29\x95\x96\xc0\x63\x7d\xe8\x2e\xca\x36\x27\x81\xc8\x6b\x3f\x22\xcc\x02\x40\x72\x6b\x50\x30\xe5\xed\x20\xc5\x91\xd5\x1b\xee\x5e\xbf\x1d\x48\x05\x76\x84\x78\x26\xf4\x8b\xd8\x2f\xa4\xfe\xfb\x47\x93\xca\xa6\x0f\xe4\x2a\xfb\x9f\x13\x4d\xbb\x63\x16\xce\x3b\x85\x2f\x82\x01\x9c\x0b\x3a\x20\x00\x6f\xde\xad\x3d\xc8\x9c\xf6\x7b\xf7\x48\xd2\x3d\xb4\xf3\x76\xd9\xda\xfb\x3d\x49\x3c\x8a\xfa\x30\x07\x09\xd5\x76\xeb\x2b\x39\x84\x42\xfa\x49\x7e\x0d\x83\x3e\x0a\xc0\xc2\x7d\xc6\xca\x34\x36\xd4\x3d\xef\xf8\x8c\x78\xa0\x07\x61\xd0\x2a\xdb\xdd\x03\x25\x19\x29\x10\xe5\x52\xdb\xe5\xc8\x14\xd8\x39\x22\x2b\x17\x68\x8b\x13\x21\xb3\x9a\x2a\x36\x58\x86\x39\x03\x78\x19\x52\x30\x1e\xfe\x7b\x0a\x95\x10\xb1\x12\x37\x82\x01\xaf\xaa\xd2\x40\x77\x4f\xe7\xa1\x0e\x0d\xb2\x74\xf2\x3b\xb9\x53\x8d\x1b\x04\xbd\x14\xa4\xa0\x5a\x4e\x24\x09\x9b\xba\x37\x21\xa9\x9d\xd3\x2b\x03\x08\x02\xf1\x1b\x80\x1d\x75\x85\xd0\x4e\x0d\x57\x03\x10\xcf\xbb\xc8\x55\xba\xbb\x0a\x82\x8e\x6a\x3f\x7e\xc2\x51\xcf\x6a\xd9\x98\x3d\xe4\x7f\xa1\x50\x85\x90\x17\xe9\x2a\x99\xf9\xf8\xf5\x56\xbc\x2f\x12\x28\x5a\xb2\xf4\xa4\xf2\xa5\x6a\x23\xf1\x65\x21\xb0\xc9\x32\xff\x99\x3e\x6a\xd2\x06\xff\xb9\x15\x02\x49\x29\xaa\xe0\x2c\x29\xa4\x6f\x77\x68\x0d\x78\x53\x35\xd4\x17\xef\x05\xf7\x72\x86\x07\x72\x2f\xdb\x9d\xb9\x8a\xb7\x97\x46\xa1\xb1\x17\xd9\xc5\x7b\x51\x06\xd1\x8e\xf5\xcb\x10\x8d\xda\xf8\x86\x68\x8d\x5a\x71\xa9\x94\xa0\x0d\xe4\xa7\x20\x4e\x45\xfc\xbc\x79\x30\x48\x5b\x64\xe9\xdb\x86\x6c\x5e\x43\x21\x2a\x55\x32\x52\x7f\x99\xc9\xbe\xa3\x13\x96\xb7\x01\xac\x9c\xad\x12\x08\x21\x3f\xe5\xfa\xdf\xa5\x1e\x04\x12\x80\xc8\xad\xdb\xcd\xcc\x77\x0f\x1a\x34\xaa\x96\xb2\x4b\xf8\x68\xf6\xd8\x60\x7f\xb6\x20\x53\x98\x78\xa8\x58\x42\x77\xcc\x21\xa5\x7f\x94\xaf\x43\x52\x63\x40\x2b\x17\xb4\x1c\xc3\x3f\xe6\xd8\xa7\x39\xbb\xe7\x60\x73\xe4\xda\x99\x56\x7a\x85\x56\xd4\xa1\x06\x91\x0f\xc9\x3b\xa3\x3f\xbd\x58\x86\xc2\x93\x9d\xaf\x48\xd3\x6a\x8b\xd0\x00\x83\xdb\xa7\xe7\x14\x68\xa7\xde\x01\xef\xd3\x70\x92\x03\x0c\x8e\xfb\xd6\x88\x6c\x44\x91\x52\xbc\x19\x59\xf4\x7d\xef\x31\x61\x31\x40\x49\x36\xe6\x8d\xec\x59\xca\x5b\x45\xf4\xe8\x7b\xd6\xd0\xb6\x5b\x87\xcf\x8a\x88\xa2\x6e\x87\xe4\x2e\x4f\x91\x8a\x8f\xe8\x7b\x52\x86\x96\x01\xda\x62\x7b\xb2\x4f\x86\x72\x4b\x2f\x49\xaf\x52\x84\x8a\x72\x4e\xaf\x06\xa4\xc9\x74\x6b\xdd\x10\xfa\x54\x14\x48\x4b\x97\xbc\x81\xbb\x33\x68\x27\x21\xed\x09\xc3\x17\xe9\x1b\x4d\x99\xea\xa4\xd5\x4d\x8d\x2a\xbf\x59\xa7\x41\x91\xd2\x94\x3d\x5a\x5e\xc1\xf5\xff\x1c\xed\x2d\x6a\x0c\x92\x53\xe4\xf4\xdb\xc5\x50\xef\xe2\x53\xda\x31\xdc\x87\x50\x08\x76\x91\xce\x68\x87\xf6\x2a\xcb\x45\xfa\x4f\x2e\x47\x35\x11\x24\xe5\x06\xed\x52\x6f\x4b\x8f\x8b\xdf\x91\x59\xe4\x58\xc5\xa2\x30\x58\x8a\xf6\x8d\x6c\x76\x96\xe8\x43\xde\x29\x36\xa4\xc8\x35\x51\x62\x2c\xe9\xd3\x4f\xda\x8c\xb7\x94\xfa\xc5\xe7\xaf\xd0\xb7\xd2\x2b\x45\x0a\x53\x88\x64\x71\xca\x5e\x15\x78\x5b\xbf\x85\xd3\x90\xfa\x4d\xbe\x5c\x53\xdf\xec\x85\xea\x2d\xaf\xbd\x4d\x1e\x96\x50\x6d\xa9\xb7\x47\x5d\xfb\xf1\x18\x59\x38\x64\x79\x2e\x5e\xf3\xf4\x56\xd1\x6b\x6c\x81\x97\x3a\x07\x14\xfb\x52\x75\x6e\x31\x8e\xb7\x66\x32\x1f\xfe\xb9\x40\x23\xcb\xdf\x55\x0d\xfe\x63\x9f\xb7\x67\x27\xfc\x74\x32\x10\x49\x67\x55\x3a\x0e\x99\x1e\xa1\x03\xf3\xe3\x66\xec\x82\x75\x73\x95\xd3\xc7\xba\x9f\xd2\xb3\xda\xf5\xd5\xd4\xad\x8e\xde\x90\xea\xc6\x60\x58\x33\x98\xe7\xea\xfe\xf4\xba\x9c\xdf\xfa\xf6\x6f\xdc\x20\x57\xd8\xaa\x22\x9e\xaa\xac\x8a\x2a\x27\xa1\xf5\xde\xff\xf8\x9b\x21\x96\x54\x0c\x8e\x49\x86\x97\x75\xe9\x74\x55\x9b\x15\xbe\xb1\x08\x19\xae\x27\x1c\xf9\xbd\x23\x8a\x6f\x31\x14\xbe\x62\xce\x1f\xeb\x02\x20\xba\xcc\x8a\x76\xaf\xc3\x46\xa6\x52\xf0\xc9\xf8\xf7\x05\x97\xf1\x93\xca\x00\x75\xa0\x6b\xbc\xf6\xcc\x77\xa9\x1d\xa2\x5a\xf5\x3d\xd5\xc4\x20\xb0\xed\x96\x8e\x9b\xe7\x19\xd3\x4c\x36\x58\xbf\x2c\x37\x9b\x95\x90\x4b\xd0\x36\xa8\xac\xb3\xc1\x1b\xce\x1e\x72\x73\xc2\x60\x4a\x6a\x41\x79\x2b\xd4\xb6\xcc\xc2\xf1\x40\x9d\xad\x6a\x2d\x6e\xa3\x90\x91\xd7\x1f\x22\x5c\x54\xce\xaa\xc7\x94\x68\x0c\x6e\x48\x5f\x4d\x02\xdc\xd1\x41\xd3\x13\xa8\x7c\x8b\x62\x3d\x37\x0c\x2a\x21\x83\x55\xf2\xcb\xbf\x10\x70\x13\x41\x92\x93\xd0\xbf\xa7\x0d\x7c\xcb\x4d\x6a\x4c\x5b\xa9\x68\x0b\x69\xa6\xfe\xc3\xfc\x6e\xda\x5a\x3d\x0d\x5d\x43\x6d\x1a\xb7\x65\x44\x0e\xf0\xc0\x17\x07\xbd\x4d\x0b\x5b\x10\xdc\x8a\xc6\x1c\xcc\xfa\x5b\x8b\xc5\x42\xce\xa7\x57\xdc\x86\xd9\xcb\x4b\x32\xe9\x62\xdc\x69\x40\x47\x96\xd0\xe8\x2a\x74\x8e\x6d\x61\x7d\x6f\x8c\xaa\x40\x82\x1e\xfb\x08\x43\x21\xad\xb4\x31\x20\xd8\x8a\x21\xce\x83\x5a\x73\x7c\x40\x7b\x70\x33\x66\x03\x8f\x94\x37\x4b\x43\x63\x0b\x80\xdd\x12\xe3\x9d\xb7\x14\x32\x5f\x20\x5c\x63\xf7\x89\x4c\x44\xbd\xc2\x44\x2f\xed\x96\x1a\xa9\xe3\x06\xc3\x96\xcb\x75\x1e\x5f\xe3\x5f\x05\x8c\xb0\xc2\x1f\x3c\xb9\xce\xb3\x29\xfd\x6f\x6b\x91\xf3\x5b\xea\xd0\xd9\xac\x9f\x23\x31\x77\x72\x85\x9a\x9b\xab\x5d\xbc\xd3\xb3\x5c\x6f\xfa\x1c\x20\xea\xf5\xf1\x12\xec\xed\x12\xcb\xb8\x19\xa9\x84\x52\xf2\x0e\x80\x14\xb9\x00\xa0\x40\xc6\xfd\x4b\xf0\x5c\x29\xe5\xef\xb2\x60\x3e\xf6\x1a\x1a\x29\xa5\x62\xc6\x30\xcf\xbf\xe1\x05\x08\x4a\x58\xe3\xde\x44\x60\x03\x39\x5b\x69\x88\xdc\x41\x35\x6b\x28\x49\x64\x0c\xcb\x25\x5b\xfe\xd7\xb0\x9c\x30\xd0\x0e\x9b\x75\x26\x68\x29\xe8\x80\xd0\xfa\xb9\x75\x1a\x30\x43\x52\xb6\xe8\x6e\x6b\x3c\xeb\x3d\xb0\xb7\x76\xc0\x3d\xc8\x11\x54\x2c\xb2\x6d\xcc\xa1\xf4\xf5\xe5\x27\xb2\x56\xad\x40\xac\xf5\x5e\xb8\x20\x8d\xee\x83\x2a\x2a\x35\x46\xb6\x55\x1e\x87\xd5\xdd\x04\x5e\x49\x52\x2c\xad\xf8\x26\xff\xa8\x8f\xcd\x44\xa1\xc5\x75\x14\x1d\x3b\x29\x59\x1e\x6d\x43\x6f\xa1\x1e\xd4\xc0\x01\x0b\x14\xff\x55\xcf\x10\x3a\xb3\x31\xb1\xea\xc2\xb0\xd6\x6a\xa0\x09\x71\x17\x1f\x6d\x51\x19\xc6\x85\xc6\x4a\x6d\x90\x00\x4e\x2d\x5e\x7a\xfb\xfb\x9b\x29\xe9\xff\x43\x88\x6e\xd5\x0d\xce\xe4\x38\x55\x08\x8b\xb9\x7c\x9c\x23\x8a\xd3\xb8\xa8\xd8\x9c\x6e\x9d\x1e\x89\xce\xab\x50\x0d\x55\x97\xaa\xd1\x61\xad\xcf\x78\xc6\xe5\x94\xfa\x4b\x05\x5d\xbc\xa4\xd5\xe6\x50\xee\xaa\x60\x6b\xe4\xf5\x42\x8d\x8d\x53\xa2\x35\xf2\x86\x1d\xa8\xec\x59\xfa\xec\x6d\x45\x62\x3a\xe5\xb5\xdc\xa1\x08\x29\x32\x32\xd4\x03\x49\x9f\xcb\xe9\xdb\x2e\x66\x64\xdb\xc7\x86\xb1\xf0\x22\x94\x3b\xc8\xe9\x3d\xb0\x02\x4b\x2d\x9c\x72\x84\xce\xe4\x5a\x2a\x15\xf8\x0d\x70\xc8\x35\x7b\xe2\xca\x02\x3c\x62\x83\xfd\x13\x0a\x64\x92\x46\x5b\x43\x23\xac\x49\x8b\xd5\x8f\x40\x69\x72\x63\xfc\xa0\x0d\x68\x40\x86\x72\x98\x8d\x24\x84\x60\xda\x97\xda\xd9\x33\x64\xc3\x16\x49\x99\xd9\xdd\xf4\x7c\x0a\x05\xd2\x56\xf2\xbe\xc4\x86\xc3\xd0\xf3\x2a\x03\x17\x8f\x3c\xbf\x5a\x02\x70\xee\x22\xd7\x4e\x49\x18\xe5\xaf\xc4\xbc\xde\x27\x0b\x89\x7c\xc1\xed\xeb\xf5\xc9\x4d\x39\xeb\x54\xb5\x6c\x7a\xa8\x81\x73\x85\x9a\x2a\x9b\xb7\x7d\x98\x7a\xa5\x88\x16\x39\x24\x46\xe6\xaa\xae\x14\xcc\x70\xcc\x45\x9c\xb5\x31\xbd\x80\x3b\x85\x24\x7d\xab\x13\x93\xeb\x04\xc8\xe7\xae\x84\x24\xb2\x43\x3e\x0c\x4d\x68\x20\x3e\x62\x74\x5f\xda\x53\xaf\x38\x8a\xbf\xda\x26\xdd\x1c\x18\xf4\x24\x3e\x16\x9c\x08\xae\x3a\xa6\x6a\xe0\x41\xf3\xbd\x21\x55\x82\x02\x58\x88\x28\x1e\x42\xea\x0c\x3c\xfe\xca\x1d\x5d\x73\x5b\x98\x58\xf3\x7c\xa9\xc1\x60\x89\x28\x54\xc7\x42\xfe\xc7\x06\xc7\x22\xf9\x20\x84\xde\x38\xe8\x2f\x26\x86\x3b\xf0\x72\x6f\x51\x99\x6b\xef\xe4\x23\x44\x39\x94\x89\x5c\x11\xec\x2d\xa5\xaf\x25\x4b\x58\x2b\xa4\xbf\x96\xf4\x0a\xe9\xaf\xa7\x7d\xe5\x4d\x25\x6c\xd3\xeb\xec\x9f\xb1\x2b\xc7\x45\x9e\x7a\xe3\x7d\x59\xde\x36\x2a\x49\xd2\xac\x80\x42\xd9\xd1\x74\xd0\xf0\x81\x2c\xd8\xad\x4f\xbf\x87\xef\xa0\xb4\x8d\x26\xd4\x35\xfa\x92\x34\xef\xcc\x1e\x1d\xe3\xf3\xd6\x7e\x50\x3a\x85\xab\x7e\xfd\x50\xa7\x5b\xef\xb9\x38\xe6\x60\x69\xaa\x61\x55\x9a\xb8\x84\x29\x8e\xa0\x6b\xce\x35\xc3\x42\x49\x0c\x43\x45\x08\x8c\xb9\xe9\x17\xd8\x5a\x61\x53\x10\x13\xfb\x16\x25\xc6\xd8\x40\x00\xd5\xeb\xf5\xbd\x8b\x83\xeb\xd2\x18\x80\x75\x15\xad\xa5\x01\x51\xfc\xaf\x47\x34\x4c\x08\xa5\xa1\xe3\x0a\xf4\xd3\x28\x43\x36\x5c\x08\xa4\x21\x84\x83\x07\x4b\x7b\xd8\x4a\x6d\xde\x8d\x25\xd2\x36\x0a\x19\x74\x21\x66\x16\x12\x53\x1f\x89\x17\xa6\x7f\x91\xc6\xb6\xa6\x7b\x09\x31\x31\x9b\x19\x95\xd4\x94\xcc\x42\xa8\x2c\x6e\xd8\xec\xb4\x10\x0a\x83\x9c\x59\xfb\x3f\xf5\xfe\x92\xbc\x29\x50\x36\xe3\x6c\x7a\x45\xf2\xa4\x14\xb7\x6c\x38\xe3\x39\xce\x10\x0f\xc3\xc2\xa4\xbb\x06\xa1\x61\x56\x34\x2c\x3b\x97\x83\x1e\x2a\x74\x52\x34\xd5\x06\xd6\xd4\x10\xba\x65\xf7\xa0\xbb\x39\x14\x0b\xb0\x46\x42\x27\x72\x05\x42\x0f\x4d\x12\x6a\x47\x0e\xdd\xb2\xdc\xc4\xcc\x3c\xb9\x48\x34\x37\x2e\x68\xc6\x77\x07\x4e\xfe\x68\x80\x69\xfb\xde\xf9\x4a\x82\x09\x19\xd8\xda\x40\x44\xa0\x1a\x4c\xda\xa2\x6f\x49\x1b\x1c\x73\xbc\xd0\x56\x43\xec\x2c\xe4\xcc\x7e\xe2\xa1\x69\x7b\xa0\xa9\xb0\x8a\xdb\xa4\x47\xb8\x85\xf2\xd7\xf6\x1b\xde\x52\x48\x19\x82\x13\x52\xf7\x1b\x7e\xfb\x35\xad\x0c\x57\x59\x11\xef\xce\xbb\x58\xe2\x51\x35\x0d\xb0\xa8\xb1\x9a\x50\xfa\x29\xf2\x00\x4b\xbb\xd2\xd0\xa4\x9e\x1d\x3a\xd0\xf3\xaf\xa6\x5e\x69\xbe\xb4\xa7\x34\x6e\x10\x86\xb0\x67\xc6\x4d\xad\xe9\xf1\xe7\xb6\x1e\xd4\xeb\x4b\x0f\x78\x27\x79\x68\x39\xd5\x6c\x8a\x88\x0b\x5d\x32\xad\x34\xf9\xb4\x29\x94\xd7\xe6\x25\x04\xeb\x38\xd8\xa6\x09\x6b\x67\xdc\x33\xed\x66\x12\xf2\x79\x4c\x5b\x68\x9f\x4d\x21\xa8\x36\x65\x1b\x4c\x79\xfb\xbf\xbc\x30\x60\x44\x29\xf3\xd8\x88\xdf\x5a\xa1\x94\xa3\x87\x37\x0c\xea\x5b\x10\x24\x24\xe6\x21\x9b\xa0\x6d\x3f\xea\x01\xdb\x5a\x83\xc2\x0e\xab\x2e\x71\x2b\xfd\x73\x0b\xa9\x46\xc5\x87\x58\x69\x56\x87\xa7\x6e\x01\xe6\x87\xd6\xc1\xb0\x91\xd8\x34\x30\xd6\xe8\x53\xeb\x67\x91\x26\x1a\x9c\xee\x22\x71\xc0\x9e\x94\x50\xc4\x3d\xe4\xca\x54\x51\xeb\x20\x3a\x1e\xd2\x75\x1a\xce\x68\x8e\x9b\x3d\xc2\xed\xe5\xd3\x4b\x8d\xed\x22\x71\x34\x58\x3a\xc5\xc5\xb7\xc8\xfe\xb6\x12\xfc\x25\x8e\x38\xb9\x59\xee\xe0\xfe\xc4\x6a\x6b\xf8\xc3\xa1\xe8\x76\x7a\xc9\x06\xb1\xdc\x90\xd8\xa2\xf4\xd1\xed\x02\xe4\x22\xf3\x5a\xcc\xd8\x4d\x92\x6b\x1b\xf8\xcb\x7e\x0e\xc7\xd9\x72\x77\xce\xeb\xb7\xd4\x2f\x5b\x32\x74\x18\xa5\x9b\x04\xe2\xa6\xe8\x77\x20\xd7\x36\x1f\xb9\xc1\xa4\xbd\x0a\x4d\x5d\xea\x19\x0a\x6b\x48\x7e\x95\x30\x95\x99\xc3\xd2\x66\xc3\x7a\x32\xe5\xc2\x2a\xd3\xc1\x16\xb0\x22\xd1\xc6\x5e\x14\x3e\x65\xe5\x49\x49\xa6\x5a\xd9\xc4\xa0\xef\x76\x5e\x8c\x3f\x02\xae\x5d\x3c\x19\x08\xbf\xc5\x92\xe1\x52\xb9\x64\x8b\xf2\xdc\x54\xc5\x96\x00\x63\xb0\xcd\x1f\x97\x6a\x13\xb7\x8e\xd5\xc8\xaa\x06\x42\xfc\x16\x47\x83\xc4\xbf\x17\xae\xbe\xb1\x26\xfd\x93\xb6\x2c\xce\xe0\x95\x05\x47\x11\xab\x6d\xb6\x24\x47\x11\xa0\xb8\x5a\x89\xe7\x80\xd8\x74\x96\x9b\x76\xfc\xe6\xd3\x70\x45\x85\xbe\x5b\x5e\xb9\x6e\xe2\x98\x1a\x52\x91\xdc\xb5\x94\x10\xeb\x2b\xf9\x06\xd9\x6c\x26\x3e\xdb\x69\xb2\x2c\x2b\xf8\x42\xf8\xf6\x4a\x08\x0c\xc2\x80\x59\x4a\x48\xa8\x7d\x38\x7f\x37\x10\x22\x6a\x6f\x7f\x4e\xf9\xee\x54\x3d\x4c\xa9\xa9\xf7\x95\xcf\x2d\xe4\xd3\x90\xb1\x59\xc8\xf9\x66\xf7\x4f\x19\x2c\xa4\x27\x53\x4d\x0b\xa4\x79\x89\x80\xdd\xc4\x02\xb2\x11\xfe\x6b\xa3\x99\xb7\xe4\x95\xba\x52\x0b\x98\x5a\x37\x01\x16\x12\x21\xcb\x25\x58\x1a\x16\x04\x6a\x44\x75\x4a\x13\x48\x2b\x9d\x6e\x32\xcf\xab\xd6\x08\x97\x1c\xba\x38\xe0\x14\x1d\x78\x01\x2c\x46\x89\xea\xc2\x36\x34\x59\x01\xbb\xb6\x62\xaf\x16\x1c\x70\x85\x12\xd8\x2b\x49\xb9\xeb\x0e\x9d\x39\x9b\xc3\xfb\xf2\x13\x18\x92\xdd\xd1\x0c\x25\x34\xe9\x72\x9d\x73\xfc\xee\x14\xb5\x24\x84\xcf\xb0\xe4\xe3\x35\xc3\x98\xa8\x16\x83\x01\x8f\xa9\x60\x4b\x62\x38\x8d\x95\x76\x6e\x5d\x6d\x57\x1c\x2b\x44\xd0\xa8\x3b\xb6\x9d\x4a\xba\x70\xb9\x33\x55\x65\xab\xd2\x5e\x8b\x2c\x7d\xe4\xd1\xe8\x03\x45\x66\x8c\x7a\x72\x60\x59\xa7\x80\x11\x2b\xe4\x58\x13\x9a\x45\xc4\xa5\x74\x09\xe9\x49\x7a\xaa\x35\x09\x36\x21\x98\x16\xa5\xb4\xdd\xd6\x90\xa1\xd3\xcc\xd0\xd7\x24\x04\x10\x8d\xc0\x4b\x78\x1e\x81\xb4\xca\xca\xd9\x04\xd6\xf4\xe4\x26\x29\xf9\xa5\xe9\x77\xd1\x20\x75\x5d\xbc\xfb\xf7\xb7\x97\xfa\x7a\x4a\xc1\x0d\xfc\x56\xbb\xa6\xcf\x2e\x75\xb6\x09\x70\x5c\x86\xc0\x59\xf2\xd2\x7b\x52\x2b\x3c\xe8\xfd\x79\x1f\x1f\x37\x5a\x9e\x3f\x5b\xca\xa0\x96\xd4\x9f\xf7\x6e\x63\xa4\x83\x45\x12\x5f\xf7\xc2\xf5\x2a\x07\x5e\xce\x2a\x78\x99\x50\x46\x23\x43\x1e\xc4\xc5\x24\x2d\x56\x9b\xf8\xd0\x51\xd7\xd8\xc7\x78\x8d\xe7\x21\x22\x30\x03\x48\x56\x12\xdc\x38\x5f\x75\x09\x31\x9e\x2c\x5d\x75\x64\xb5\x0f\x4a\xb5\x5b\xd9\x06\xee\x67\xc4\x63\x3f\xeb\x57\xe2\x63\x55\xda\x39\xcf\xca\xe5\x02\x17\x10\xcb\xa1\xc0\x75\x99\x71\xc6\x06\xfa\xac\xe2\x09\x86\x44\x98\x46\x7f\x68\x84\x29\x19\xf7\x59\x19\x6e\x63\x65\x27\x41\x2e\xc4\xd2\xf3\xa8\x06\x28\x62\x54\xb7\x0f\x37\x37\x69\x54\x3f\xab\xbb\x24\xad\xf0\x07\x09\x46\x55\x9e\x62\xe1\x82\x0b\xc0\xf3\x27\x51\xe6\x21\x54\xcf\x3a\xa9\x0e\xf9\x54\x3c\x87\x41\x91\x55\x43\x1f\x8c\xea\x45\xb5\x97\x14\x5a\x3f\xcf\x91\xd9\xfc\xac\x5d\x1c\xd6\xd1\xe5\xf4\x04\x91\x8a\x3c\xa9\x4f\x38\xed\xfc\x01\x6e\xc9\x5e\x2d\xe9\xd9\x9e\xf2\x7a\x3d\x97\x5b\x1c\x4c\x50\xd5\xd6\x65\x97\xf6\x1a\x96\x62\xf3\x53\xc9\x84\xf9\x37\xa8\x54\xc2\xe7\xa2\xb8\x37\x03\xeb\x53\xa0\x6c\xa1\xdf\xc4\x0e\x8a\x22\x51\xa1\x10\x49\x3c\x8e\x71\x8d\xdc\xb7\x73\x25\x29\x84\x9d\x3e\x91\x7f\x82\x2f\x56\x3a\x5f\xa0\x90\xd0\x3a\xda\xb3\xc0\x99\xae\xba\x16\xc6\xdb\xa3\xd4\xf9\x0c\xf1\x6c\x70\x41\xc4\x51\x5b\x24\x57\x19\xdc\x6e\xbd\x63\x48\xba\x7c\x02\x48\x56\xaf\x80\x38\x8c\x3b\x97\x2b\x4a\x5e\x01\x09\xd0\x1f\x5d\xc3\x06\x19\x72\xfd\x02\x26\x1e\x47\x1a\x95\x27\x08\x64\x79\xaa\x1c\x72\x6b\xa5\xe9\xc6\xcd\x69\xf1\x1e\xe0\x99\xad\x71\x7a\x79\x86\x06\x5e\x16\x9f\xdf\x33\x1c\x02\xcf\xc6\x7c\x88\x75\x0a\xc5\xe1\x3c\x3d\x11\x96\x0d\x32\x4b\x48\x0d\x41\xe3\xd2\x2b\xab\xeb\x6d\xdd\x17\xe3\xbe\x9e\x11\x55\xf1\xcc\x5b\xd3\xc8\x7e\x86\x0e\x35\xd8\xbd\xbe\xc8\x47\x66\xd3\xb3\xa1\x46\xcf\x0f\xf9\x0f\x27\x41\x3e\x9a\xfe\xd8\xc2\x49\xcd\x13\x53\xf7\x38\x90\x6a\x7a\x08\xd3\xa0\xde\x57\x2e\x6c\x09\x79\xe2\x6f\x74\x58\xe2\x9c\xea\x99\x3e\x71\x3b\x89\x92\x37\x4f\x9b\x87\x4b\x5e\xea\x69\xf3\xa7\xae\x36\x29\xb2\x0b\x8c\x3e\x52\xc8\xf3\xc5\x30\x82\xfd\xa0\xc1\xf9\x84\x2b\x58\x82\x6e\xc8\xca\x1c\xe3\x87\xea\x2d\x9f\x29\xd4\xd4\xa4\x22\xf9\x6c\xac\x87\x4f\x10\xd9\x35\x8d\xb0\x7d\x9f\x19\x76\xf7\xc4\x04\xc6\x5f\x88\x4d\x37\x98\xf1\xfe\x24\x9d\x1c\x51\x6a\xea\x66\xcb\xcd\xd3\x70\xf9\xd7\xca\xdd\x3e\x60\xbe\x0f\x9b\x0b\x82\x08\x2c\x7e\xe4\x81\xc7\xcf\x54\xfc\xeb\x4a\xb1\xe2\xfb\x4c\xad\xb9\x2b\xb5\x1c\x09\x1e\xd5\x4b\xce\xf2\x51\x52\xa5\x8b\x2a\x63\xde\x2a\x0c\xfb\xe4\xe7\x71\x33\x2a\xf8\x71\xcb\x8c\x7e\x20\x7c\x4c\xe7\x99\x78\x1a\x2d\xfa\x3e\xaa\x56\xf8\x1f\x15\xb6\x2d\xd5\x12\x1f\x35\x7a\xbb\x87\x75\x52\x6e\xd7\x3c\xaa\x77\x28\x8f\x3a\x6f\xbe\x58\x48\x81\x32\xfe\x67\xa8\x85\xc7\x3a\xe9\xb6\x23\x7e\xcd\x8f\xb2\x35\x51\xf7\x07\x3a\x4d\xd6\xae\x44\x86\xe4\xc3\x99\xa4\x2e\x22\x2d\xc5\x3f\x82\x53\xd8\x41\xfe\x8f\x6a\x66\xc1\xeb\xfd\x80\x7f\x88\x15\x32\x03\xc4\xf5\x01\x48\xde\xf9\x48\x5f\x5f\xce\x7b\x24\x90\xe5\x39\x90\xd5\x61\xf3\xcb\x2f\x26\x00\x54\x30\xb3\x47\xcf\x75\x9f\x87\xdd\x2d\x43\x51\x1e\xe9\xe9\x6a\x8e\x8f\x04\x0d\x4b\x52\x78\x3c\x6c\xd8\xec\x28\x1f\xfa\xe0\xdc\xb6\x7c\x13\xcc\x32\x4a\x95\xbd\x13\xa5\xff\x6d\x2c\x62\xd0\x45\xb9\x90\xe4\x01\x70\x1e\x29\x8b\x4b\xa1\x9c\x92\x1f\x43\x44\x21\xbd\x03\x98\x1a\xa1\x22\x25\xfc\x4f\x2e\x88\xa6\x10\x0e\xe4\x22\x4e\x12\x37\x9b\xf2\x24\xb6\x4d\xcf\xbb\x3c\x79\x72\xd4\xdd\xbf\xb5\x02\x07\x31\xe5\xd7\x7a\x9b\xcf\xd3\xe5\x36\x7f\xf8\x30\xad\xa4\xa2\x90\x98\x04\xe7\x9b\x2e\x3a\x94\x14\xef\xb9\x2f\x2e\x7d\x40\xf2\x8c\x2a\x54\xf7\x21\xb1\x9f\x3b\xb8\x21\xe6\x9b\x44\x31\xf3\x2d\x6d\xcb\xf9\xde\xb8\x8c\x07\x90\x75\xb4\xa4\x9c\xee\x26\x98\x36\x5f\xc9\xee\xf1\xe0\xf5\xa4\x00\x75\x8f\xd2\x40\x0b\x49\xa8\xc3\x1e\xae\x7f\x2a\xb3\xd4\x84\xaa\x84\x7c\xe6\xfa\x3a\xf3\x4e\xbf\xae\xa7\xf2\xbb\x53\x79\x46\xc4\x5b\x88\x93\x61\x49\xd6\x01\x33\x24\xa0\x8c\x26\x55\x35\x43\x4d\x4e\x0c\xba\xbb\x3e\xfd\xf2\xd0\xff\x4d\x80\x05\x0f\x56\x06\xf2\x5c\xa3\x0a\x75\x2f\xfc\x81\x84\xc7\xea\xa4\x77\x34\x8b\x52\x76\x46\xca\xac\xce\xbf\x66\xf5\x53\x73\x5d\xa4\xcf\x54\x97\x6b\x3a\xf2\x3b\x0b\xeb\x1e\x9a\x2c\xbb\x43\xd2\x02\x18\xfc\xfd\xc8\xa9\x09\xe6\xac\x73\x2d\xce\x0a\xf8\xd4\x46\x92\xbc\x9b\x87\x68\xa8\x5a\x0a\x49\x9a\xe1\xab\x8f\xfa\x8d\x4a\x32\x98\x6d\xd2\x15\x12\x6a\xba\xab\x34\x85\x36\x5b\x39\x68\xca\x20\xe1\x01\xeb\x89\x4d\x8b\x0a\xf3\xa6\x8d\x20\xb4\xa8\x1c\xc5\x7f\xcd\xd8\xf6\x5a\x43\x63\x73\xb4\xae\x91\x58\x8a\x62\x45\x66\x9f\x07\x95\x48\xe5\x2c\x78\x32\x1c\x51\xf8\x0f\xe4\xac\x4d\xec\xab\x4c\xe1\x35\x30\xa8\xe1\x17\x32\xc4\x88\x6b\xe7\x11\x99\xec\x50\x60\xa0\x90\x48\x1b\x5c\xe1\xd2\x1f\x3b\xbe\xf1\x9a\x0d\xea\xf8\x03\x19\x18\x92\xcc\xc2\x86\x6c\x08\xa9\xa1\x55\x42\x90\xac\x4c\x65\x89\xcd\x76\x3e\x29\x3a\x00\x0d\x02\x9d\x74\xc7\x0e\x26\x33\x02\xf0\x5d\xe4\x9d\x47\xb8\x90\xda\x42\x88\xae\x58\xa2\x7d\x59\x1c\x85\x0e\x7e\xb2\x58\x1e\xd4\x23\xd3\x04\x1b\x2a\x87\x43\x35\xde\x86\x26\x91\x32\xe7\x95\xa4\x84\x00\x2a\x9d\x83\x38\xd4\xd6\xd4\xda\x91\x63\xc2\x7f\x2f\xab\xe4\xdb\x96\xb5\x49\x5d\xcf\x39\xde\x46\x0e\x09\xb5\x65\xa4\x77\x01\xb1\xee\xba\x80\xcd\x26\x0e\xa2\xf2\x0d\xf9\xb5\x98\x5f\x23\x33\xf2\xa3\x32\xfc\x0d\x4e\xe9\xd5\xe4\xde\x1c\x13\xfd\xb9\xd3\x50\x7d\xcb\x8c\x92\x37\xc0\x6a\x64\xd9\x72\x73\xfe\x31\x3b\x38\x8e\x7c\xdb\x39\xa7\x97\xaa\x9a\x86\x10\x0c\x43\x78\x4d\xa4\x05\xdb\x96\x22\x6a\x0c\xa5\x27\xe3\x37\xe6\xf4\x7e\x9e\xa1\x1f\xf6\x72\xf9\xd5\x50\x79\xbb\x58\xe8\xc1\xa5\x9b\x5d\x55\xba\x3b\x7d\xc4\x49\x4b\x55\x0e\xda\x2d\x9a\x29\x11\x9a\x89\x66\x7e\xbb\x6b\xc2\x25\xe0\x82\x2b\x03\x29\x3a\x25\xd0\x1e\x00\x61\xad\xa1\xf2\xb6\x96\xa6\xdd\xa6\xfc\x29\xa7\x3a\x8a\x7d\x85\x9d\x63\x3a\xa8\x6b\xe3\x60\x15\xc0\xbf\x59\xfb\xed\x1b\xda\x6d\x2f\xdd\x9c\x3a\x21\x97\x7a\xcb\x42\x7f\x84\xd7\x36\xb5\xaa\xa4\x9c\xf3\x39\xb5\xdc\x0f\x83\x23\x0b\xad\x20\x52\xb2\x6d\x09\x75\x36\x11\x0f\x1a\x3a\x93\x74\xec\x16\x56\x6d\x59\x22\xf3\x73\x6e\x72\x47\x33\x99\x71\xad\x50\xf2\x83\xa1\xc4\x43\x1e\x74\x44\xcf\x88\x00\xfd\xbd\x35\xdd\xd1\xcc\x9b\x9c\xa2\xc2\x13\x97\x05\xe6\x24\x9d\xc6\x34\xe8\x8e\x87\x21\x5e\xa1\x07\x5e\xf3\x9a\x29\x04\xee\x9c\x00\x6e\xba\xbf\x5f\x79\xec\x0c\x7e\xbc\xb8\xfe\x48\xb8\x85\xb7\xc8\x50\x9b\x39\x41\x0e\x8e\xcb\x9a\xd3\x7d\x70\xf8\x71\x01\xb8\xc3\x81\x75\x1e\xb2\x7e\xa7\xbb\x89\x8e\xdd\xb4\x07\xa7\xbb\x2c\x2b\x1d\x11\x80\x85\xeb\x4f\x06\xf5\x5b\xf6\x76\x28\x25\xc6\x75\x87\xe7\xcd\x90\xe6\x58\xa0\x23\x2b\x49\xe2\x6a\x77\xa8\x8e\x21\xda\x9e\x86\xa1\xc1\x53\xfd\x39\xe0\xcc\xd2\x67\x8c\x13\x32\x95\x55\xc6\x7d\x1d\x35\x6b\x58\x9e\x0e\xb2\x93\x4e\x1e\x56\x3e\x8e\xc2\x3f\x15\x0d\x97\x8a\xd3\x5e\xe6\x15\x18\x50\xda\xed\xe4\x5c\x0e\x3c\x2a\x6f\xbf\x5f\x64\x7a\x19\x26\xb0\x39\xc8\xfa\x07\x8e\xff\x4e\xbc\x8d\xfc\x47\xb3\x13\x72\x06\xdc\x1b\x9f\x95\x6b\xc8\xf1\x22\xd2\xbd\x83\x82\xf7\x87\x60\xd0\xfa\xc1\x54\xf9\xcd\x4f\x88\xbf\xca\x2b\x19\x4b\x6c\x83\x8b\xae\x93\x8d\x58\xfe\x83\x1a\x22\xc7\x56\x1d\xbe\x24\x33\x52\xf4\xea\x6a\x15\xf5\x01\x5c\x53\xef\x24\xa9\x35\x68\x9c\xb2\x9a\x36\xdf\xae\x2c\x25\x1e\x86\x65\xf8\x85\x44\x12\x53\x5d\x5b\x6c\xa6\xe1\x8e\xcf\xbb\x2e\x75\xd3\xa1\x91\xe3\x0c\xa9\xb9\xeb\x48\xbb\x60\x70\x2b\x4f\x35\x2a\xba\xb8\x0d\x39\x55\xdd\xd5\xa4\x5f\xb9\xba\xb2\x23\x4a\x61\xe8\x57\xc3\x53\x77\x3d\x84\x62\xda\x22\xfd\xb0\x85\xa9\xa8\x13\x1c\x5a\x3c\xd8\x65\xeb\x3e\x42\xda\x57\x5e\x45\x92\x7a\xc5\x8c\xab\xa3\x48\x7c\xee\x64\x80\x30\x54\xe8\x9a\x18\xc1\x54\xb6\xca\x37\x53\x36\x29\xcd\xf9\x2a\xdc\xde\xde\x5c\x41\x52\x0f\x1b\xbc\x73\x57\x10\xc4\x3f\x27\x6a\x1c\x4f\x25\x77\x71\xf1\xae\x93\xd8\x5a\x31\x33\xd9\x9d\x16\x8e\x16\x86\xdc\x01\xf7\x7f\xe0\xdc\xb4\xec\x78\x87\x1c\x4b\x25\x75\xa7\x0d\xc0\x31\x02\x89\xb0\xee\xe9\x15\xb5\xb9\x6d\x91\x28\xdd\xa1\x66\x82\x0e\x9e\x2a\x8e\x59\x91\x30\x06\x32\x33\x8c\x26\x2c\x5e\xbb\xca\x9b\x02\x0c\xac\xec\xdb\x32\x08\x04\xed\x24\x9a\xd7\x5c\xd7\x54\xb3\xe3\xb5\xe8\x89\x9b\x72\x34\xa0\xf4\x66\x3c\xa6\xeb\xd6\xf1\x0e\xb0\x02\x97\x08\x0e\xad\x17\x39\x04\xb9\xf2\xc5\x8d\x3a\x52\x13\xc0\x60\xf9\x09\x3e\xa6\x83\x52\x5e\xc4\x79\xf9\x89\xd1\x29\x92\x17\xc2\x73\xa3\xda\x4e\xa2\x5f\xdb\x5a\x90\x1c\x3b\x40\x73\xa0\xc9\x89\x05\xbd\xc7\x80\x58\x06\xfb\x96\x14\x59\xa6\x93\x96\x52\x51\xaa\xf6\xcb\x12\x72\x77\x8b\x19\x6d\x3a\x9c\xd7\x44\xcb\x22\x2a\x13\xeb\x36\x90\x91\x64\x4a\x1d\x0d\x20\x68\xce\xb9\xd8\xd6\x87\x03\x03\xc4\xe6\x28\x48\x76\x73\x32\x07\x6d\x39\xe5\x5a\x18\x94\x8c\xd8\xfd\xbc\x15\x71\x38\xde\x8f\xa2\x81\x7a\x04\x0f\x44\xfe\x8f\xea\x72\xcc\xc6\x1d\xc9\x7e\x38\x1e\x1f\xfd\x10\x04\x0e\x5e\x5a\xb7\xf8\xd3\x75\x3b\x82\x50\x06\xa8\xd7\x64\x89\x4a\x74\xb1\x5b\xe6\xde\x88\x0c\x1b\x3f\xb3\x67\x76\x4a\xe9\xec\x28\x21\xd4\x76\x20\x92\xba\x19\x6b\xbe\x19\xf2\x70\xae\x6e\xc3\x7b\x3c\x42\x9a\xee\x94\xd0\x5d\x0b\x80\x18\xb5\x78\x6a\x25\xfd\x87\xd0\xb1\xd3\xf2\x82\x4b\xda\xc5\x51\x22\x2d\x01\xf2\x07\xea\x80\x39\xc3\x06\x21\x30\x24\x64\x66\xdf\x33\x6a\x93\xb5\x3c\x31\x1e\x4d\x31\xed\x48\x1f\x29\xad\x1d\xa1\x99\x38\x92\xfc\xbb\xfc\x27\xa1\x3c\xed\xdb\x42\x95\x0f\xab\x18\x67\xaf\xe7\xa9\xd5\x35\x03\x53\x3c\x0c\xce\x2e\xc6\xfa\xcf\x7f\x58\xdf\x8b\xa8\x66\xc7\x96\xaa\x89\xc0\x18\x1f\xa8\x0c\x6c\xe9\x92\x3a\x5f\x70\x45\x8d\x5a\x4c\x1a\xa5\x53\x3a\xd6\xe9\x62\xc1\x05\xd7\x11\xc1\x44\x97\xfe\x13\x53\xe1\x71\xf9\x4c\x94\x9e\x19\xed\xc1\xb1\x2c\x72\x82\x8c\xec\x72\xc6\x85\x2e\x15\x94\xda\x9f\x60\xd1\xf1\xe7\xde\xba\xc7\x07\xed\xfe\xb1\x20\x04\x84\x29\xfe\x23\xd4\x22\x78\x7c\xf9\x2d\x8c\x8d\xee\xfe\x53\xf3\x28\xb2\xa5\x46\x84\xee\x27\x21\x1a\x1f\x63\x59\x98\x68\x04\x2d\xbd\x5e\xc1\x8f\x86\xf9\xd2\xe0\xf9\xba\x04\x24\xd4\x07\xbf\xcc\x46\xc4\x4f\x12\xfd\x47\x13\xd2\x43\x22\x1e\x1b\x08\x30\x73\x99\x46\xda\x04\x23\x9c\x87\x0c\xb7\x31\xf8\xd4\x2e\xef\xcf\x38\x3f\x1c\x95\x6a\x3a\x4a\x1f\x09\x25\x8b\xad\x84\x54\x63\xe6\x4a\xc3\x98\x49\x1f\x69\x3d\xa6\x0b\x02\x8d\xe1\x3a\xc4\xbc\x48\xf9\x99\x23\xe6\x77\xb3\xc8\x5a\x6d\xe3\x51\x79\xc9\xe2\x5c\xde\x0e\xe7\xa6\x0a\x48\xb1\xbc\x9c\xa3\xd9\x59\x63\x4d\x21\xe6\xa7\x00\x00\x43\xfa\xcc\x73\x38\xf7\x46\x31\x36\x8c\x29\xbe\x5f\x18\xe8\xfe\x1f\xd7\x08\xfa\x47\x74\x46\x6b\x4d\x5c\xc2\xb0\xb2\xd7\x36\xe7\x90\x23\x1a\xb4\x57\xc9\xfa\xb3\x5b\x3a\x4e\xc0\xf4\x08\x19\x4a\xfa\xc1\xd6\x4e\xb5\x91\xff\xde\x00\x9f\x77\xf2\xa4\x00\x1e\x2e\xa7\xd7\x88\x05\xff\x37\x41\x28\x0a\x3a\xe9\x5d\xe2\xcc\x7f\x4c\x0f\x95\x73\x48\x21\xa6\x4e\xbf\x6c\xb4\x25\xf9\x83\x30\x36\x89\xca\x7d\x34\x63\xce\xff\xe4\x23\xc9\xff\xfc\xf8\xfc\xd2\x88\xe5\xa1\xb5\xec\x89\xb3\x73\xb1\x01\x80\xd6\xcb\xdb\x55\xd6\xfd\xe6\xab\x2f\xa4\xf8\xcd\xd1\x76\xf3\xa9\x46\x94\x7f\x76\x9b\x0d\xd1\x0b\x8d\x4e\x0c\xd6\xdb\x09\x03\x99\x1b\x88\x1b\xf0\x06\x90\x91\xb3\xde\x71\x90\xc9\x9e\xa7\xee\x9f\xe0\xdf\xac\x8b\xe6\x62\x73\xa9\x58\xfe\x77\x09\xf2\x11\x98\xa9\xf2\x23\x1a\x14\x9d\x64\x10\x21\x3a\x63\x76\x48\x10\x36\x73\x3e\x07\x6b\x5a\xe6\x93\xcf\x72\x1e\x90\x9b\x95\x40\x24\x1f\x40\x4d\x90\xcf\x3a\xee\x8d\x61\x1b\x79\xe3\xe2\x84\xf2\x76\xf2\xf2\xf5\x79\x2c\xf8\x02\x19\x2c\x18\xad\x19\xf6\x00\x14\x12\x7d\x42\x8b\x8c\x14\xee\x1c\xe3\x8b\x01\xd1\x8d\xce\xbe\x64\xe5\xda\x01\xf1\x7f\x58\x62\xe6\xd9\x44\x87\x01\x32\xc7\x3d\xb9\x58\x56\xa6\x16\x8a\x75\xd8\x48\xa8\xf7\x33\x94\x48\xf2\x64\x2e\x7b\x0e\x84\x72\xa6\x46\x73\x9e\x6e\xf2\x82\xf9\xda\xa8\x9f\x72\xa8\xb7\x74\x1d\x07\x33\x2b\x98\xdd\x12\x71\x67\x59\xf9\xa0\xb9\xfb\x49\x0d\xbe\xc2\x49\x61\x9f\x0e\x2d\x1a\x10\xb6\x4b\xa7\x50\x69\x10\x59\x13\xa2\xe1\x7b\x73\xf0\x1d\x3e\xb4\x95\x87\x4f\xd4\x71\x78\x73\xde\x3e\x40\x88\xea\x4d\xc0\xb8\xe3\xe1\x3e\xdd\xd8\xb3\x32\x04\xf5\xee\xb3\x09\x15\x7e\xa8\x1b\x77\x1f\xf7\xae\xff\x1d\x97\xb8\x08\x05\xf5\x7f\xbe\xe2\xe1\x66\x5c\x34\x4a\x29\xc3\x0f\xb7\xa6\x97\x03\x79\x0a\x20\x63\xe8\x75\xbc\xf9\x89\xa1\x5c\x54\x25\x05\x58\x1a\x28\x8b\xaf\x11\xba\xd6\x21\xaf\x10\x92\x87\xf4\xdf\x0f\xf7\x4f\xf8\x76\x40\x34\x53\xe1\xde\xfe\x19\xd2\x7e\x58\xf1\x25\x27\xdf\x70\x77\x71\xcf\x64\x5f\x41\x29\x31\xc2\xdb\xed\x9b\xe1\xb8\x29\x9c\x06\x9d\x10\x3d\x84\x23\x9c\x5c\x03\x5c\x74\xbc\xf8\x21\x6e\xd6\xe1\xf8\x85\x17\x0e\x48\xe9\x8c\xdf\x24\xa6\x0e\x0e\x9e\xc0\xa8\x5d\x1d\x4d\xbd\xa1\x7e\xb8\xa7\xbe\x07\x3d\xde\x7a\x77\x5e\x1c\x21\xa7\x88\x21\x48\x92\x7b\xf5\xcf\x17\x39\x44\xe2\xdb\x50\x5b\x1c\xff\xa0\xf5\x43\x2b\x7b\xfe\x6b\xa3\x16\xfa\x50\x23\xbc\xdb\x91\x97\x88\x8f\xf2\xa5\xf0\xa1\x2e\x3c\xc6\xda\x07\x48\x5c\x89\xa5\x7a\x56\xe9\xe7\x1a\x6a\x91\x1e\x5b\x0b\xfc\x35\xa4\x82\x81\x75\x66\x84\x32\x25\x71\xa8\x14\xfd\x47\xf9\x53\x80\xec\x24\x8a\x08\x9e\xce\x2c\x4d\xb6\xc2\xd1\x95\x75\x2f\xf6\xbc\x4f\xee\x3e\x6a\xdf\x13\x85\xb2\xee\x50\xea\xee\xc5\x26\x9e\x41\x84\x16\x37\xc9\xc4\x35\x98\x11\x5d\x4e\x88\x3d\x91\x41\x24\x39\xf9\xe4\xde\xa7\x89\x2f\xe7\x02\x34\x18\x2a\x71\x65\x54\x24\x0f\x34\x1b\x55\x3b\xbc\xa1\x01\x19\x37\x47\x6c\xf2\xb4\x63\xd6\x5a\xa4\xc1\xd4\x49\xf3\x31\x87\x5c\xbc\xcb\x3b\x36\x88\x34\x3c\x31\xf3\xfa\xca\x92\x6c\x43\x17\x6e\xdc\xdd\x66\x1e\x4a\x68\xea\x95\xf4\x35\x6b\x2f\x4b\x3a\x32\xa9\x66\x59\x13\x16\x00\x69\x15\x86\xe6\x25\x00\x4b\x7f\x8e\x97\x30\xeb\x8b\x81\x23\xe4\x22\xf3\x8b\x0e\x96\x21\xe8\x77\x87\x3f\x49\x76\xd0\x7c\xac\x21\x5b\x78\x85\xab\x70\x08\x96\x5c\x17\xf8\x31\xa3\x2d\x24\x1f\x53\x68\x2d\xb6\x17\x03\x2f\x28\x8f\x3c\xa4\x2a\x18\x42\x94\xd0\xa8\x5d\xdb\x3f\x53\xa8\x4d\xfe\xef\xb6\xe1\x39\x84\x1d\x93\xf4\x0c\xf7\x34\xdc\xac\xce\xf6\x91\x60\xe1\x26\xf1\x40\x1f\x36\x78\xf8\xd6\xf8\x94\x86\xac\xcc\x9e\x21\x2b\x70\x1f\xa0\x31\xa9\x62\xc3\x7e\x36\x6d\x9f\xb6\xc1\xdb\x5a\x33\x7b\x33\xd8\x91\xfc\xf9\x02\xf2\x01\x9e\x7e\xd9\xcb\x25\x90\x76\x1d\x56\x7f\x9a\x8f\x52\x48\xcd\x7a\x33\x0b\x52\x20\x02\x4a\x1b\x14\xeb\x7d\x7b\xaa\x8a\x3c\xb2\x4d\x4e\x29\x1d\x34\xb9\x4a\xbc\x64\xcc\x51\x4f\x6d\x29\x76\xc7\x20\xd4\xb2\x6a\x40\x06\x79\x08\x12\xa5\x77\x3c\x13\x04\xad\xf4\xba\x46\x5a\xee\xaf\xd0\x7c\xab\x1d\x20\x9c\x03\xd7\xe0\x21\xcf\x2c\xab\xdf\x20\x45\x62\xf3\x23\xe5\x78\x92\x0f\x88\x3c\x48\x8d\x72\xd6\xbf\xe7\x78\x31\x13\xfb\x2a\xcc\x6c\xb8\xfd\xd3\x27\x8d\x1e\xdc\x85\x48\x08\x86\x23\xee\x05\xd9\xa3\x90\x02\xe1\xc6\x5c\xd4\x58\xfa\x1c\xcd\x06\xba\x63\xbc\x44\xfb\x44\x72\x53\x2e\x85\xf0\x86\xc4\x7c\x29\x3f\xa9\xb7\x6a\x66\x14\x0f\x80\x00\x65\x28\x4f\x62\x0a\x4b\x85\x58\x17\x28\xa1\xef\x62\x48\x5a\x4f\x19\xd2\x4b\x2d\xcf\xe6\xdd\x7e\x42\x30\x26\x7b\x79\x4e\xf7\xce\x5e\x21\x35\xb5\x52\xcc\x80\x45\x3e\x3d\x84\x47\x75\x48\xf5\xe6\xa9\xb6\x53\x1a\xea\x83\x2f\xdd\xf2\x9f\x5b\xd3\x5f\x33\x9c\x67\xfd\xbb\xd7\x45\x57\x49\x7e\x42\xdf\x80\xd7\x5a\xfa\xd8\x35\x2f\x8d\x5c\x73\x48\x43\x30\xd5\x0c\xa9\xbf\x99\xda\x6d\x28\x49\xd2\x7b\x60\xa0\x62\xff\x39\x94\xd6\xd9\x7f\x10\x7a\x84\x7a\xf6\x6f\xee\xb8\xe5\xb6\xea\x6f\xa5\xab\x18\xf0\x06\x6d\xa5\xcf\x3f\x50\x56\x2f\x29\xf0\x8e\xd2\x8d\x0d\x00\xa9\x9f\xd9\x84\x59\x2a\x8b\xb7\x35\x3c\x9f\xcd\xf7\x66\x5e\x6f\x4a\xca\x84\x7e\xa4\x9f\xe2\x94\x2d\xd1\x43\x31\xae\xf0\x2c\x3f\x71\x2d\xc0\x95\x19\xd6\x80\xa1\x21\x59\xef\x36\x83\x80\xb2\x3c\xd5\x05\x1b\xed\x86\x99\x9e\x67\x72\x77\x23\x88\x93\xa8\x52\x07\x53\xb7\x0a\xc4\x60\xd1\x3b\x61\x83\xe4\xf8\x40\xb0\x20\xd5\x4a\x48\xa6\x90\x62\xa1\xaf\xdf\x6f\x96\xde\xe2\xcb\xbb\x09\x2a\x4e\xc6\xda\x31\xb6\x7a\x05\x2e\x19\x8e\xfc\xc6\x1e\x22\x00\xb3\x64\x14\xef\x58\x2d\xb3\xd6\x70\x84\xb4\xe3\x39\x81\x40\xc1\xa1\x8d\xb9\x14\x2d\xe9\xab\xd2\x84\xc0\x5b\x51\x29\x52\x07\x99\x49\x5e\xc8\x97\x9a\x43\x53\xb1\xd6\x2d\x02\xbe\x6c\x42\x1d\xd1\x21\x14\x94\xd4\x15\xa0\xc7\xc1\x3b\xae\x5b\x88\x65\x6e\x87\xa4\x28\xb1\x5c\xa8\x5f\x04\xff\x30\x1c\x63\x12\xd7\xc4\x4a\x36\x91\x52\x95\x0d\x34\x3d\xcd\xc5\x66\xc5\xaa\xe5\x22\x82\x0d\x50\xa5\xbd\xa4\x46\x57\x17\x09\x50\x16\x5d\x60\x9a\xf4\xc3\xbc\x49\x44\x32\x28\xfb\x7b\xd2\x77\xc2\x08\x56\x80\xb9\x43\xbe\x55\x5f\xb2\x0f\xfd\x49\x6a\xf0\x80\xe4\x4a\x34\xb3\xfd\x82\xd5\xd6\x8d\xc8\x5f\xcf\xa2\x47\xb7\x80\x62\x44\x69\x2c\xfd\xd2\x84\x19\x97\x18\x40\x30\xc1\x49\x43\xd3\x93\x0c\x41\x44\xb3\x5a\xd9\x44\x8b\x34\x5d\x9b\x40\x24\xf7\x42\x23\xac\x49\x47\xaa\x73\x05\x18\x14\x37\x0f\x71\x48\xc5\xc3\xf6\x33\x69\x73\xac\xaf\x3a\xa4\xea\x58\xec\x49\xf9\xb4\x08\xa2\x8f\x89\xa9\x5f\x98\xbc\xd1\x42\x75\x62\x7c\x21\x3e\xf7\xd9\x17\x95\x1f\x92\x8c\x4c\x1a\x6d\x81\xd2\xb4\x71\x60\x76\xa9\xc8\xa2\xdd\xfb\x7e\x4b\xa2\x12\xbd\x89\x4e\xb1\xc6\x24\x0d\xe4\x0c\xf7\xce\x75\x05\xe0\x31\xf6\x8e\x63\x9c\x6a\x90\x96\x25\xf4\x47\x42\x1c\x35\x9f\x21\x75\xe8\x4f\xb1\x0a\x49\xfe\x35\xab\x2b\xed\x7d\xf1\x92\xff\xdc\x94\x1d\xd7\x43\x27\x4a\x2a\x89\xd0\xbe\x08\xf9\xc7\x97\x74\x11\x41\xb2\x1c\x4a\x87\x27\x73\x2d\x7b\x10\x46\x95\x3e\x7e\x05\x26\xd2\x1c\xaa\x92\x21\x1a\x09\x22\xcc\x90\x28\x3c\x29\xfb\xa5\x8d\xf3\x8a\xd4\x51\x17\xa8\x44\xba\x2c\xf1\xae\xda\x26\x91\xa7\x01\x24\xa9\x28\xd2\x50\xf3\xd1\xe3\xd4\xe1\x89\xaa\x89\x8d\xcb\x1a\x39\x24\xf1\x49\x23\xe1\x61\xe2\xda\x09\xd2\x49\xf8\x94\x93\xf2\x6e\x7b\xef\xf7\x6b\x20\xc9\x3e\x1e\xbb\x27\x87\xe9\x4e\x8f\x75\x89\x1f\x85\x67\xcb\xe1\x19\xb0\x0c\x43\x11\xea\xeb\xa8\xeb\x4b\x1b\x92\xf2\x20\x28\xef\x26\x82\xd9\x69\x97\x8f\x5b\xbc\xc6\xee\xc9\x54\x25\x70\x12\x10\xc1\xbb\xc1\xa6\x48\x79\xf3\xbc\xdb\x25\x31\xcc\x2d\x64\x72\x95\x71\x6a\xe5\x64\xc6\x63\xe5\x57\x61\xc3\xd1\xae\xe3\x86\xf6\x40\xcc\xae\xd0\xd9\xd6\x9d\x47\x21\xe7\x91\x20\x1e\xc6\xda\xba\x29\x74\xd7\xda\x97\x8e\xa6\x73\x99\xf2\x4c\xb4\xbc\x42\x7f\x93\x92\xb5\x84\xc3\x4f\xef\xd5\x46\xbb\x3b\xf4\x2f\xfb\x38\xb6\x48\x7f\xb2\x17\xc7\x5c\x8f\x40\x7c\x0a\xbf\xa6\xae\x5e\x4c\xda\x31\xc8\x65\xe5\xee\x13\x22\x94\x5f\xea\xca\xfd\x59\x83\x83\x94\xa5\xf4\xeb\xee\x93\x46\x06\x44\x2d\xeb\xab\xf0\xc3\xb1\x8d\x4e\xb2\x93\xa7\xc4\xb3\x6e\x70\x7d\xf5\x9c\xa4\xda\x68\x7e\x35\x85\x4b\xc9\xb3\x00\x29\x2d\x1d\x50\x27\x67\x28\x75\x77\x87\x88\xe5\x31\x85\x24\xd8\xed\x4e\x77\xa1\x12\x87\x83\x57\x57\xd7\xdb\xf8\x25\x18\x08\xe5\xcb\x6d\x90\x0a\xe4\xbd\xea\x8e\x5c\xf3\x84\xa8\x34\x31\xcc\x45\x35\xc3\xa9\xb8\x07\x9c\x38\xfa\xa7\x99\xdb\x7c\x3a\xb7\xce\x4e\xa7\x5b\x67\x33\xdf\xbf\x72\x9a\x12\xc1\x3b\x3c\x84\xcd\x8a\x21\xb4\x33\x3f\x52\x53\x3b\xee\xed\x7c\x71\x01\x1b\x38\x94\xef\x40\x2c\xcb\x7b\x31\x14\xad\x01\xf2\x99\x7a\x27\xc7\xcd\x02\x6a\xcf\x89\xa8\xc9\x8b\x82\x7b\xeb\x43\x50\x9f\xfc\xc0\x0d\x7a\x52\x21\x61\x98\x89\x6e\xf9\xfd\xb4\x3b\x9d\x0d\x8b\x1d\x0b\x54\xb2\xf8\x02\x9d\x21\x27\x50\x9f\x7f\x55\x29\x10\x51\xf0\x8f\xa6\x73\x6a\x8a\x4b\xbf\x63\x07\x04\x30\x68\x8f\x2b\x63\xd2\x8f\xdb\xe1\xca\x71\x8b\x52\xff\x46\xf8\x9c\x6a\x80\x14\xe4\x9f\xa2\xe6\x65\xa3\x1a\x63\x8b\x7d\xab\x93\x32\xe1\xd1\x22\x5d\x00\xb7\xd0\x05\x85\x26\x7b\x3b\x8f\x3c\xf7\x1d\x18\xa1\x36\x01\x4e\x8e\x3d\x13\x6d\x10\x78\x86\x16\x69\xfa\x96\x40\x9f\x96\xfc\x00\x7d\xce\x55\x25\x8b\x50\x5b\x75\x42\x7b\xfd\x6b\x6b\x4a\x61\xd8\xe8\xd3\x15\x50\x29\xbf\xdd\xc1\x74\x8c\xce\x55\xb1\x59\xd7\xd4\x94\x4b\x81\x36\x19\xb5\xee\x1b\x8b\xfd\x5a\x1f\xef\x10\x49\xb5\x11\x78\x27\x0e\x21\xcf\x2d\x54\x3a\xdf\xd2\x38\x84\xd3\xe2\xd2\xf1\xf7\xe5\x0c\x2c\x00\xf3\x8f\xf9\x02\xca\xf7\x67\xf9\xdf\x1d\xff\x3b\x1e\x95\x6e\x08\xe0\xac\x9d\xd0\x69\xe8\x75\x34\xeb\x81\xc8\xfc\x22\x62\x00\x6e\xf0\xdf\x21\x57\x5a\xc1\x02\xed\x7d\x11\x02\x39\x55\x86\xd0\xe8\xf1\x61\xe5\x0e\x15\x12\x03\xb5\x4b\xf3\xb7\xc7\xc4\x4e\xa0\x1e\x89\x55\xaf\xfa\x52\xab\x04\x0f\xeb\xe6\x33\xab\x08\x68\xef\x6a\x48\x43\x75\x10\x55\xe3\xd9\xb6\xa4\x29\xba\x6b\x87\x52\x35\x4f\x64\xfa\x5d\x0d\x51\xbe\x16\x7a\xe5\x88\x65\x61\x6d\x4b\xd0\xe1\x19\x94\x1a\xa9\xe2\x01\xa0\x3d\x7a\x48\xc3\xa2\xab\x13\x62\xd0\x42\x30\x35\x9f\xfb\xb8\xdc\x93\xb0\x2e\xa6\x0e\xa0\x86\xea\xe6\xef\x33\xf3\xe5\x61\x9e\xdb\xb5\x66\xb3\x1e\xde\x10\x42\xa8\x7d\x84\xab\x1b\xec\x79\x6d\x26\x19\x41\xa2\x54\xe5\xcc\xdf\xa4\x81\x91\x66\x1d\xb2\x99\x58\x86\x0a\x29\x65\x11\xe0\xac\x55\xeb\x5a\x62\x7c\x87\xfa\x12\x13\x82\x0d\x35\x39\xd4\x05\xd1\x38\x3c\x57\xd2\xa7\x88\x60\x6c\x8e\x84\x2e\x70\x4a\x85\x4c\x28\x9a\xea\xcb\x7a\xd4\xe3\x19\x11\x24\xdd\x83\x4c\x2a\x5d\xf9\xba\xa7\xac\x43\x48\x4c\x25\xa0\xb3\xcb\x40\x6a\xad\xa7\x78\x9c\x9e\x15\x14\xfa\xea\x8a\xa4\x1c\x6d\xf2\x72\x25\x0f\xf1\x07\x94\x8a\xa1\x3a\x35\xfc\x98\x8f\xb0\xfc\x51\x35\x8e\x6c\x1f\x0a\xa2\x3a\x98\x34\xdb\x34\x44\x67\x0b\x54\x50\xe5\x45\xec\x4a\x04\xa0\x74\x66\x9d\xf2\x8b\x2e\x69\x54\x1b\x9b\x13\x73\x13\x3b\x7a\x26\xba\xfc\x61\x63\x0c\x57\x4e\x07\x56\x0d\xaf\x9d\xf5\x45\xfe\x8b\x1c\x4a\xae\xf6\xa4\x3c\x1c\x49\x1b\xb3\xf4\x50\xaf\x18\x79\x9a\x38\x33\x32\xbb\x6d\x06\x20\xc8\x79\x2b\xd2\xc4\x62\x98\x43\xa2\xd0\x25\x90\x9a\x12\xab\xcd\x38\x0b\xe1\xb3\xc9\xaf\x42\xb1\x87\xad\x1d\x3c\x2a\x92\x26\x85\xe6\x6e\xd6\x01\x64\x6b\x80\x68\xeb\x97\xdd\x5d\x3e\x98\xf2\xdf\x65\x24\xa1\xa9\xae\x1b\x96\x63\x89\xe6\x54\x24\xd3\x6a\x56\x1a\xdf\xbc\xc1\x90\x44\x5d\x99\x87\xd1\x99\x11\x92\x74\xf7\xcb\x22\x5b\x4a\xca\xaa\x01\x43\xf6\xd4\x90\xbb\x65\x5c\x61\xb5\x1c\xf1\x23\x9b\x59\x86\x8c\x69\x7c\x0c\x59\x51\xa7\x5d\xe6\x2c\xa4\xcb\xb3\x9c\xd1\x90\x54\x65\xef\x94\x07\x9d\x0c\xc3\x46\x00\x2f\x43\x1c\x34\x43\x8f\x32\x85\x84\xac\x6d\x68\x6c\xcf\x69\x68\x75\x6a\xfd\x6a\xfa\x4a\x73\xf3\x73\xba\x03\xe3\xff\xff\x3f\xff\xdf\xff\x0d\x00\x00\xff\xff\x9b\xc6\xef\xe7\x7d\x0c\x06\x00") + +func dataSurnamesJsonBytes() ([]byte, error) { + return bindataRead( + _dataSurnamesJson, + "data/Surnames.json", + ) +} + +func dataSurnamesJson() (*asset, error) { + bytes, err := dataSurnamesJsonBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "data/Surnames.json", size: 396413, mode: os.FileMode(420), modTime: time.Unix(1452717629, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +// Asset loads and returns the asset for the given name. +// It returns an error if the asset could not be found or +// could not be loaded. +func Asset(name string) ([]byte, error) { + cannonicalName := strings.Replace(name, "\\", "/", -1) + if f, ok := _bindata[cannonicalName]; ok { + a, err := f() + if err != nil { + return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err) + } + return a.bytes, nil + } + return nil, fmt.Errorf("Asset %s not found", name) +} + +// MustAsset is like Asset but panics when Asset would return an error. +// It simplifies safe initialization of global variables. +func MustAsset(name string) []byte { + a, err := Asset(name) + if err != nil { + panic("asset: Asset(" + name + "): " + err.Error()) + } + + return a +} + +// AssetInfo loads and returns the asset info for the given name. +// It returns an error if the asset could not be found or +// could not be loaded. +func AssetInfo(name string) (os.FileInfo, error) { + cannonicalName := strings.Replace(name, "\\", "/", -1) + if f, ok := _bindata[cannonicalName]; ok { + a, err := f() + if err != nil { + return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err) + } + return a.info, nil + } + return nil, fmt.Errorf("AssetInfo %s not found", name) +} + +// AssetNames returns the names of the assets. +func AssetNames() []string { + names := make([]string, 0, len(_bindata)) + for name := range _bindata { + names = append(names, name) + } + return names +} + +// _bindata is a table, holding each asset generator, mapped to its name. +var _bindata = map[string]func() (*asset, error){ + "data/Dvorak.json": dataDvorakJson, + "data/English.json": dataEnglishJson, + "data/FemaleNames.json": dataFemalenamesJson, + "data/Keypad.json": dataKeypadJson, + "data/L33t.json": dataL33tJson, + "data/MacKeypad.json": dataMackeypadJson, + "data/MaleNames.json": dataMalenamesJson, + "data/Passwords.json": dataPasswordsJson, + "data/Qwerty.json": dataQwertyJson, + "data/Surnames.json": dataSurnamesJson, +} + +// AssetDir returns the file names below a certain +// directory embedded in the file by go-bindata. +// For example if you run go-bindata on data/... and data contains the +// following hierarchy: +// data/ +// foo.txt +// img/ +// a.png +// b.png +// then AssetDir("data") would return []string{"foo.txt", "img"} +// AssetDir("data/img") would return []string{"a.png", "b.png"} +// AssetDir("foo.txt") and AssetDir("notexist") would return an error +// AssetDir("") will return []string{"data"}. +func AssetDir(name string) ([]string, error) { + node := _bintree + if len(name) != 0 { + cannonicalName := strings.Replace(name, "\\", "/", -1) + pathList := strings.Split(cannonicalName, "/") + for _, p := range pathList { + node = node.Children[p] + if node == nil { + return nil, fmt.Errorf("Asset %s not found", name) + } + } + } + if node.Func != nil { + return nil, fmt.Errorf("Asset %s not found", name) + } + rv := make([]string, 0, len(node.Children)) + for childName := range node.Children { + rv = append(rv, childName) + } + return rv, nil +} + +type bintree struct { + Func func() (*asset, error) + Children map[string]*bintree +} + +var _bintree = &bintree{nil, map[string]*bintree{ + "data": &bintree{nil, map[string]*bintree{ + "Dvorak.json": &bintree{dataDvorakJson, map[string]*bintree{}}, + "English.json": &bintree{dataEnglishJson, map[string]*bintree{}}, + "FemaleNames.json": &bintree{dataFemalenamesJson, map[string]*bintree{}}, + "Keypad.json": &bintree{dataKeypadJson, map[string]*bintree{}}, + "L33t.json": &bintree{dataL33tJson, map[string]*bintree{}}, + "MacKeypad.json": &bintree{dataMackeypadJson, map[string]*bintree{}}, + "MaleNames.json": &bintree{dataMalenamesJson, map[string]*bintree{}}, + "Passwords.json": &bintree{dataPasswordsJson, map[string]*bintree{}}, + "Qwerty.json": &bintree{dataQwertyJson, map[string]*bintree{}}, + "Surnames.json": &bintree{dataSurnamesJson, map[string]*bintree{}}, + }}, +}} + +// RestoreAsset restores an asset under the given directory +func RestoreAsset(dir, name string) error { + data, err := Asset(name) + if err != nil { + return err + } + info, err := AssetInfo(name) + if err != nil { + return err + } + err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755)) + if err != nil { + return err + } + err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode()) + if err != nil { + return err + } + err = os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime()) + if err != nil { + return err + } + return nil +} + +// RestoreAssets restores an asset under the given directory recursively +func RestoreAssets(dir, name string) error { + children, err := AssetDir(name) + // File + if err != nil { + return RestoreAsset(dir, name) + } + // Dir + for _, child := range children { + err = RestoreAssets(dir, filepath.Join(name, child)) + if err != nil { + return err + } + } + return nil +} + +func _filePath(dir, name string) string { + cannonicalName := strings.Replace(name, "\\", "/", -1) + return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...) +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/entropy/entropyCalculator.go b/vendor/github.com/nbutton23/zxcvbn-go/entropy/entropyCalculator.go new file mode 100644 index 000000000..8f57ea0a4 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/entropy/entropyCalculator.go @@ -0,0 +1,216 @@ +package entropy + +import ( + "github.com/nbutton23/zxcvbn-go/adjacency" + "github.com/nbutton23/zxcvbn-go/match" + "github.com/nbutton23/zxcvbn-go/utils/math" + "math" + "regexp" + "unicode" +) + +const ( + numYears = float64(119) // years match against 1900 - 2019 + numMonths = float64(12) + numDays = float64(31) +) + +var ( + startUpperRx = regexp.MustCompile(`^[A-Z][^A-Z]+$`) + endUpperRx = regexp.MustCompile(`^[^A-Z]+[A-Z]$'`) + allUpperRx = regexp.MustCompile(`^[A-Z]+$`) + keyPadStartingPositions = len(adjacency.GraphMap["keypad"].Graph) + keyPadAvgDegree = adjacency.GraphMap["keypad"].CalculateAvgDegree() +) + +// DictionaryEntropy calculates the entropy of a dictionary match +func DictionaryEntropy(match match.Match, rank float64) float64 { + baseEntropy := math.Log2(rank) + upperCaseEntropy := extraUpperCaseEntropy(match) + //TODO: L33t + return baseEntropy + upperCaseEntropy +} + +func extraUpperCaseEntropy(match match.Match) float64 { + word := match.Token + + allLower := true + + for _, char := range word { + if unicode.IsUpper(char) { + allLower = false + break + } + } + if allLower { + return float64(0) + } + + //a capitalized word is the most common capitalization scheme, + //so it only doubles the search space (uncapitalized + capitalized): 1 extra bit of entropy. + //allcaps and end-capitalized are common enough too, underestimate as 1 extra bit to be safe. + + for _, matcher := range []*regexp.Regexp{startUpperRx, endUpperRx, allUpperRx} { + if matcher.MatchString(word) { + return float64(1) + } + } + //Otherwise calculate the number of ways to capitalize U+L uppercase+lowercase letters with U uppercase letters or + //less. Or, if there's more uppercase than lower (for e.g. PASSwORD), the number of ways to lowercase U+L letters + //with L lowercase letters or less. + + countUpper, countLower := float64(0), float64(0) + for _, char := range word { + if unicode.IsUpper(char) { + countUpper++ + } else if unicode.IsLower(char) { + countLower++ + } + } + totalLenght := countLower + countUpper + var possibililities float64 + + for i := float64(0); i <= math.Min(countUpper, countLower); i++ { + possibililities += float64(zxcvbnmath.NChoseK(totalLenght, i)) + } + + if possibililities < 1 { + return float64(1) + } + + return float64(math.Log2(possibililities)) +} + +// SpatialEntropy calculates the entropy for spatial matches +func SpatialEntropy(match match.Match, turns int, shiftCount int) float64 { + var s, d float64 + if match.DictionaryName == "qwerty" || match.DictionaryName == "dvorak" { + //todo: verify qwerty and dvorak have the same length and degree + s = float64(len(adjacency.BuildQwerty().Graph)) + d = adjacency.BuildQwerty().CalculateAvgDegree() + } else { + s = float64(keyPadStartingPositions) + d = keyPadAvgDegree + } + + possibilities := float64(0) + + length := float64(len(match.Token)) + + //TODO: Should this be <= or just < ? + //Estimate the number of possible patterns w/ length L or less with t turns or less + for i := float64(2); i <= length+1; i++ { + possibleTurns := math.Min(float64(turns), i-1) + for j := float64(1); j <= possibleTurns+1; j++ { + x := zxcvbnmath.NChoseK(i-1, j-1) * s * math.Pow(d, j) + possibilities += x + } + } + + entropy := math.Log2(possibilities) + //add extra entropu for shifted keys. ( % instead of 5 A instead of a) + //Math is similar to extra entropy for uppercase letters in dictionary matches. + + if S := float64(shiftCount); S > float64(0) { + possibilities = float64(0) + U := length - S + + for i := float64(0); i < math.Min(S, U)+1; i++ { + possibilities += zxcvbnmath.NChoseK(S+U, i) + } + + entropy += math.Log2(possibilities) + } + + return entropy +} + +// RepeatEntropy calculates the entropy for repeating entropy +func RepeatEntropy(match match.Match) float64 { + cardinality := CalcBruteForceCardinality(match.Token) + entropy := math.Log2(cardinality * float64(len(match.Token))) + + return entropy +} + +// CalcBruteForceCardinality calculates the brute force cardinality +//TODO: Validate against python +func CalcBruteForceCardinality(password string) float64 { + lower, upper, digits, symbols := float64(0), float64(0), float64(0), float64(0) + + for _, char := range password { + if unicode.IsLower(char) { + lower = float64(26) + } else if unicode.IsDigit(char) { + digits = float64(10) + } else if unicode.IsUpper(char) { + upper = float64(26) + } else { + symbols = float64(33) + } + } + + cardinality := lower + upper + digits + symbols + return cardinality +} + +// SequenceEntropy calculates the entropy for sequences such as 4567 or cdef +func SequenceEntropy(match match.Match, dictionaryLength int, ascending bool) float64 { + firstChar := match.Token[0] + baseEntropy := float64(0) + if string(firstChar) == "a" || string(firstChar) == "1" { + baseEntropy = float64(0) + } else { + baseEntropy = math.Log2(float64(dictionaryLength)) + //TODO: should this be just the first or any char? + if unicode.IsUpper(rune(firstChar)) { + baseEntropy++ + } + } + + if !ascending { + baseEntropy++ + } + return baseEntropy + math.Log2(float64(len(match.Token))) +} + +// ExtraLeetEntropy calulates the added entropy provied by l33t substitustions +func ExtraLeetEntropy(match match.Match, password string) float64 { + var subsitutions float64 + var unsub float64 + subPassword := password[match.I:match.J] + for index, char := range subPassword { + if string(char) != string(match.Token[index]) { + subsitutions++ + } else { + //TODO: Make this only true for 1337 chars that are not subs? + unsub++ + } + } + + var possibilities float64 + + for i := float64(0); i <= math.Min(subsitutions, unsub)+1; i++ { + possibilities += zxcvbnmath.NChoseK(subsitutions+unsub, i) + } + + if possibilities <= 1 { + return float64(1) + } + return math.Log2(possibilities) +} + +// DateEntropy calculates the entropy provided by a date +func DateEntropy(dateMatch match.DateMatch) float64 { + var entropy float64 + if dateMatch.Year < 100 { + entropy = math.Log2(numDays * numMonths * 100) + } else { + entropy = math.Log2(numDays * numMonths * numYears) + } + + if dateMatch.Separator != "" { + entropy += 2 //add two bits for separator selection [/,-,.,etc] + } + return entropy +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/frequency/frequency.go b/vendor/github.com/nbutton23/zxcvbn-go/frequency/frequency.go new file mode 100644 index 000000000..d056e4d4e --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/frequency/frequency.go @@ -0,0 +1,50 @@ +package frequency + +import ( + "encoding/json" + "log" + + "github.com/nbutton23/zxcvbn-go/data" +) + +// List holds a frequency list +type List struct { + Name string + List []string +} + +// Lists holds all the frequency list in a map +var Lists = make(map[string]List) + +func init() { + maleFilePath := getAsset("data/MaleNames.json") + femaleFilePath := getAsset("data/FemaleNames.json") + surnameFilePath := getAsset("data/Surnames.json") + englishFilePath := getAsset("data/English.json") + passwordsFilePath := getAsset("data/Passwords.json") + + Lists["MaleNames"] = getStringListFromAsset(maleFilePath, "MaleNames") + Lists["FemaleNames"] = getStringListFromAsset(femaleFilePath, "FemaleNames") + Lists["Surname"] = getStringListFromAsset(surnameFilePath, "Surname") + Lists["English"] = getStringListFromAsset(englishFilePath, "English") + Lists["Passwords"] = getStringListFromAsset(passwordsFilePath, "Passwords") + +} +func getAsset(name string) []byte { + data, err := data.Asset(name) + if err != nil { + panic("Error getting asset " + name) + } + + return data +} +func getStringListFromAsset(data []byte, name string) List { + + var tempList List + err := json.Unmarshal(data, &tempList) + if err != nil { + log.Fatal(err) + } + tempList.Name = name + return tempList +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/go.mod b/vendor/github.com/nbutton23/zxcvbn-go/go.mod new file mode 100644 index 000000000..61b9a67ea --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/go.mod @@ -0,0 +1,9 @@ +module github.com/nbutton23/zxcvbn-go + +go 1.14 + +require ( + github.com/davecgh/go-spew v1.1.0 + github.com/pmezard/go-difflib v1.0.0 + github.com/stretchr/testify v1.1.4 +) diff --git a/vendor/github.com/nbutton23/zxcvbn-go/go.sum b/vendor/github.com/nbutton23/zxcvbn-go/go.sum new file mode 100644 index 000000000..656d00476 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/go.sum @@ -0,0 +1,5 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.1.4 h1:ToftOQTytwshuOSj6bDSolVUa3GINfJP/fg3OkkOzQQ= +github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= diff --git a/vendor/github.com/nbutton23/zxcvbn-go/match/match.go b/vendor/github.com/nbutton23/zxcvbn-go/match/match.go new file mode 100644 index 000000000..dd30bea04 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/match/match.go @@ -0,0 +1,44 @@ +package match + +//Matches is an alies for []Match used for sorting +type Matches []Match + +func (s Matches) Len() int { + return len(s) +} +func (s Matches) Swap(i, j int) { + s[i], s[j] = s[j], s[i] +} +func (s Matches) Less(i, j int) bool { + if s[i].I < s[j].I { + return true + } else if s[i].I == s[j].I { + return s[i].J < s[j].J + } else { + return false + } +} + +// Match represents different matches +type Match struct { + Pattern string + I, J int + Token string + DictionaryName string + Entropy float64 +} + +//DateMatch is specifilly a match for type date +type DateMatch struct { + Pattern string + I, J int + Token string + Separator string + Day, Month, Year int64 +} + +//Matcher are a func and ID that can be used to match different passwords +type Matcher struct { + MatchingFunc func(password string) []Match + ID string +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/matching/dateMatchers.go b/vendor/github.com/nbutton23/zxcvbn-go/matching/dateMatchers.go new file mode 100644 index 000000000..8dfdf2410 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/matching/dateMatchers.go @@ -0,0 +1,209 @@ +package matching + +import ( + "regexp" + "strconv" + "strings" + + "github.com/nbutton23/zxcvbn-go/entropy" + "github.com/nbutton23/zxcvbn-go/match" +) + +const ( + dateSepMatcherName = "DATESEP" + dateWithOutSepMatcherName = "DATEWITHOUT" +) + +var ( + dateRxYearSuffix = regexp.MustCompile(`((\d{1,2})(\s|-|\/|\\|_|\.)(\d{1,2})(\s|-|\/|\\|_|\.)(19\d{2}|200\d|201\d|\d{2}))`) + dateRxYearPrefix = regexp.MustCompile(`((19\d{2}|200\d|201\d|\d{2})(\s|-|/|\\|_|\.)(\d{1,2})(\s|-|/|\\|_|\.)(\d{1,2}))`) + dateWithOutSepMatch = regexp.MustCompile(`\d{4,8}`) +) + +//FilterDateSepMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher +func FilterDateSepMatcher(m match.Matcher) bool { + return m.ID == dateSepMatcherName +} + +//FilterDateWithoutSepMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher +func FilterDateWithoutSepMatcher(m match.Matcher) bool { + return m.ID == dateWithOutSepMatcherName +} + +func checkDate(day, month, year int64) (bool, int64, int64, int64) { + if (12 <= month && month <= 31) && day <= 12 { + day, month = month, day + } + + if day > 31 || month > 12 { + return false, 0, 0, 0 + } + + if !((1900 <= year && year <= 2019) || (0 <= year && year <= 99)) { + return false, 0, 0, 0 + } + + return true, day, month, year +} + +func dateSepMatcher(password string) []match.Match { + dateMatches := dateSepMatchHelper(password) + + var matches []match.Match + for _, dateMatch := range dateMatches { + match := match.Match{ + I: dateMatch.I, + J: dateMatch.J, + Entropy: entropy.DateEntropy(dateMatch), + DictionaryName: "date_match", + Token: dateMatch.Token, + } + + matches = append(matches, match) + } + + return matches +} +func dateSepMatchHelper(password string) []match.DateMatch { + + var matches []match.DateMatch + + for _, v := range dateRxYearSuffix.FindAllString(password, len(password)) { + splitV := dateRxYearSuffix.FindAllStringSubmatch(v, len(v)) + i := strings.Index(password, v) + j := i + len(v) + day, _ := strconv.ParseInt(splitV[0][4], 10, 16) + month, _ := strconv.ParseInt(splitV[0][2], 10, 16) + year, _ := strconv.ParseInt(splitV[0][6], 10, 16) + match := match.DateMatch{Day: day, Month: month, Year: year, Separator: splitV[0][5], I: i, J: j, Token: password[i:j]} + matches = append(matches, match) + } + + for _, v := range dateRxYearPrefix.FindAllString(password, len(password)) { + splitV := dateRxYearPrefix.FindAllStringSubmatch(v, len(v)) + i := strings.Index(password, v) + j := i + len(v) + day, _ := strconv.ParseInt(splitV[0][4], 10, 16) + month, _ := strconv.ParseInt(splitV[0][6], 10, 16) + year, _ := strconv.ParseInt(splitV[0][2], 10, 16) + match := match.DateMatch{Day: day, Month: month, Year: year, Separator: splitV[0][5], I: i, J: j, Token: password[i:j]} + matches = append(matches, match) + } + + var out []match.DateMatch + for _, match := range matches { + if valid, day, month, year := checkDate(match.Day, match.Month, match.Year); valid { + match.Pattern = "date" + match.Day = day + match.Month = month + match.Year = year + out = append(out, match) + } + } + return out + +} + +type dateMatchCandidate struct { + DayMonth string + Year string + I, J int +} + +type dateMatchCandidateTwo struct { + Day string + Month string + Year string + I, J int +} + +func dateWithoutSepMatch(password string) []match.Match { + dateMatches := dateWithoutSepMatchHelper(password) + + var matches []match.Match + for _, dateMatch := range dateMatches { + match := match.Match{ + I: dateMatch.I, + J: dateMatch.J, + Entropy: entropy.DateEntropy(dateMatch), + DictionaryName: "date_match", + Token: dateMatch.Token, + } + + matches = append(matches, match) + } + + return matches +} + +//TODO Has issues with 6 digit dates +func dateWithoutSepMatchHelper(password string) (matches []match.DateMatch) { + for _, v := range dateWithOutSepMatch.FindAllString(password, len(password)) { + i := strings.Index(password, v) + j := i + len(v) + length := len(v) + lastIndex := length - 1 + var candidatesRoundOne []dateMatchCandidate + + if length <= 6 { + //2-digit year prefix + candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[2:], v[0:2], i, j)) + + //2-digityear suffix + candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[0:lastIndex-2], v[lastIndex-2:], i, j)) + } + if length >= 6 { + //4-digit year prefix + candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[4:], v[0:4], i, j)) + + //4-digit year sufix + candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[0:lastIndex-3], v[lastIndex-3:], i, j)) + } + + var candidatesRoundTwo []dateMatchCandidateTwo + for _, c := range candidatesRoundOne { + if len(c.DayMonth) == 2 { + candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:0], c.DayMonth[1:1], c.Year, c.I, c.J)) + } else if len(c.DayMonth) == 3 { + candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:2], c.DayMonth[2:2], c.Year, c.I, c.J)) + candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:0], c.DayMonth[1:3], c.Year, c.I, c.J)) + } else if len(c.DayMonth) == 4 { + candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:2], c.DayMonth[2:4], c.Year, c.I, c.J)) + } + } + + for _, candidate := range candidatesRoundTwo { + intDay, err := strconv.ParseInt(candidate.Day, 10, 16) + if err != nil { + continue + } + + intMonth, err := strconv.ParseInt(candidate.Month, 10, 16) + + if err != nil { + continue + } + + intYear, err := strconv.ParseInt(candidate.Year, 10, 16) + if err != nil { + continue + } + + if ok, _, _, _ := checkDate(intDay, intMonth, intYear); ok { + matches = append(matches, match.DateMatch{Token: password, Pattern: "date", Day: intDay, Month: intMonth, Year: intYear, I: i, J: j}) + } + + } + } + + return matches +} + +func buildDateMatchCandidate(dayMonth, year string, i, j int) dateMatchCandidate { + return dateMatchCandidate{DayMonth: dayMonth, Year: year, I: i, J: j} +} + +func buildDateMatchCandidateTwo(day, month string, year string, i, j int) dateMatchCandidateTwo { + + return dateMatchCandidateTwo{Day: day, Month: month, Year: year, I: i, J: j} +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/matching/dictionaryMatch.go b/vendor/github.com/nbutton23/zxcvbn-go/matching/dictionaryMatch.go new file mode 100644 index 000000000..4ddb2c3b0 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/matching/dictionaryMatch.go @@ -0,0 +1,57 @@ +package matching + +import ( + "strings" + + "github.com/nbutton23/zxcvbn-go/entropy" + "github.com/nbutton23/zxcvbn-go/match" +) + +func buildDictMatcher(dictName string, rankedDict map[string]int) func(password string) []match.Match { + return func(password string) []match.Match { + matches := dictionaryMatch(password, dictName, rankedDict) + for _, v := range matches { + v.DictionaryName = dictName + } + return matches + } + +} + +func dictionaryMatch(password string, dictionaryName string, rankedDict map[string]int) []match.Match { + var results []match.Match + pwLower := strings.ToLower(password) + + pwLowerRunes := []rune(pwLower) + length := len(pwLowerRunes) + + for i := 0; i < length; i++ { + for j := i; j < length; j++ { + word := pwLowerRunes[i : j+1] + if val, ok := rankedDict[string(word)]; ok { + matchDic := match.Match{Pattern: "dictionary", + DictionaryName: dictionaryName, + I: i, + J: j, + Token: string([]rune(password)[i : j+1]), + } + matchDic.Entropy = entropy.DictionaryEntropy(matchDic, float64(val)) + + results = append(results, matchDic) + } + } + } + + return results +} + +func buildRankedDict(unrankedList []string) map[string]int { + + result := make(map[string]int) + + for i, v := range unrankedList { + result[strings.ToLower(v)] = i + 1 + } + + return result +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/matching/leet.go b/vendor/github.com/nbutton23/zxcvbn-go/matching/leet.go new file mode 100644 index 000000000..610f1973f --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/matching/leet.go @@ -0,0 +1,234 @@ +package matching + +import ( + "strings" + + "github.com/nbutton23/zxcvbn-go/entropy" + "github.com/nbutton23/zxcvbn-go/match" +) + +// L33TMatcherName id +const L33TMatcherName = "l33t" + +//FilterL33tMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher +func FilterL33tMatcher(m match.Matcher) bool { + return m.ID == L33TMatcherName +} + +func l33tMatch(password string) []match.Match { + permutations := getPermutations(password) + + var matches []match.Match + + for _, permutation := range permutations { + for _, mather := range dictionaryMatchers { + matches = append(matches, mather.MatchingFunc(permutation)...) + } + } + + for _, match := range matches { + match.Entropy += entropy.ExtraLeetEntropy(match, password) + match.DictionaryName = match.DictionaryName + "_3117" + } + + return matches +} + +// This function creates a list of permutations based on a fixed table stored on data. The table +// will be reduced in order to proceed in the function using only relevant values (see +// relevantL33tSubtable). +func getPermutations(password string) []string { + substitutions := relevantL33tSubtable(password) + permutations := getAllPermutationsOfLeetSubstitutions(password, substitutions) + return permutations +} + +// This function loads the table from data but only keep in memory the values that are present +// inside the provided password. +func relevantL33tSubtable(password string) map[string][]string { + relevantSubs := make(map[string][]string) + for key, values := range l33tTable.Graph { + for _, value := range values { + if strings.Contains(password, value) { + relevantSubs[key] = append(relevantSubs[key], value) + } + } + } + + return relevantSubs +} + +// This function creates the list of permutations of a given password using the provided table as +// reference for its operation. +func getAllPermutationsOfLeetSubstitutions(password string, table map[string][]string) []string { + result := []string{} + + // create a list of tables without conflicting keys/values (this happens for "|", "7" and "1") + noConflictsTables := createListOfMapsWithoutConflicts(table) + for _, noConflictsTable := range noConflictsTables { + substitutionsMaps := createSubstitutionsMapsFromTable(noConflictsTable) + for _, substitutionsMap := range substitutionsMaps { + newValue := createWordForSubstitutionMap(password, substitutionsMap) + if !stringSliceContainsValue(result, newValue) { + result = append(result, newValue) + } + } + } + + return result +} + +// Create the possible list of maps removing the conflicts from it. As an example, the value "|" +// may represent "i" and "l". For each representation of the conflicting value, a new map is +// created. This may grow exponencialy according to the number of conflicts. The number of maps +// returned by this function may be reduced if the relevantL33tSubtable function was called to +// identify only relevant items. +func createListOfMapsWithoutConflicts(table map[string][]string) []map[string][]string { + // the resulting list starts with the provided table + result := []map[string][]string{} + result = append(result, table) + + // iterate over the list of conflicts in order to expand the maps for each one + conflicts := retrieveConflictsListFromTable(table) + for _, value := range conflicts { + newMapList := []map[string][]string{} + + // for each conflict a new list of maps will be created for every already known map + for _, currentMap := range result { + newMaps := createDifferentMapsForLeetChar(currentMap, value) + newMapList = append(newMapList, newMaps...) + } + + result = newMapList + } + + return result +} + +// This function retrieves the list of values that appear for one or more keys. This is usefull to +// know which l33t chars can represent more than one letter. +func retrieveConflictsListFromTable(table map[string][]string) []string { + result := []string{} + foundValues := []string{} + + for _, values := range table { + for _, value := range values { + if stringSliceContainsValue(foundValues, value) { + // only add on results if it was not identified as conflict before + if !stringSliceContainsValue(result, value) { + result = append(result, value) + } + } else { + foundValues = append(foundValues, value) + } + } + } + + return result +} + +// This function aims to create different maps for a given char if this char represents a conflict. +// If the specified char is not a conflit one, the same map will be returned. In scenarios which +// the provided char can not be found on map, an empty list will be returned. This function was +// designed to be used on conflicts situations. +func createDifferentMapsForLeetChar(table map[string][]string, leetChar string) []map[string][]string { + result := []map[string][]string{} + + keysWithSameValue := retrieveListOfKeysWithSpecificValueFromTable(table, leetChar) + for _, key := range keysWithSameValue { + newMap := copyMapRemovingSameValueFromOtherKeys(table, key, leetChar) + result = append(result, newMap) + } + + return result +} + +// This function retrieves the list of keys that can be represented using the given value. +func retrieveListOfKeysWithSpecificValueFromTable(table map[string][]string, valueToFind string) []string { + result := []string{} + + for key, values := range table { + for _, value := range values { + if value == valueToFind && !stringSliceContainsValue(result, key) { + result = append(result, key) + } + } + } + + return result +} + +// This function returns a lsit of substitution map from a given table. Each map in the result will +// provide only one representation for each value. As an example, if the provided map contains the +// values "@" and "4" in the possibilities to represent "a", two maps will be created where one +// will contain "a" mapping to "@" and the other one will provide "a" mapping to "4". +func createSubstitutionsMapsFromTable(table map[string][]string) []map[string]string { + result := []map[string]string{{"": ""}} + + for key, values := range table { + newResult := []map[string]string{} + + for _, mapInCurrentResult := range result { + for _, value := range values { + newMapForValue := copyMap(mapInCurrentResult) + newMapForValue[key] = value + newResult = append(newResult, newMapForValue) + } + } + + result = newResult + } + + // verification to make sure that the slice was filled + if len(result) == 1 && len(result[0]) == 1 && result[0][""] == "" { + return []map[string]string{} + } + + return result +} + +// This function replaces the values provided on substitution map over the provided word. +func createWordForSubstitutionMap(word string, substitutionMap map[string]string) string { + result := word + for key, value := range substitutionMap { + result = strings.Replace(result, value, key, -1) + } + + return result +} + +func stringSliceContainsValue(slice []string, value string) bool { + for _, valueInSlice := range slice { + if valueInSlice == value { + return true + } + } + + return false +} + +func copyMap(table map[string]string) map[string]string { + result := make(map[string]string) + + for key, value := range table { + result[key] = value + } + + return result +} + +// This function creates a new map based on the one provided but excluding possible representations +// of the same value on other keys. +func copyMapRemovingSameValueFromOtherKeys(table map[string][]string, keyToFix string, valueToFix string) map[string][]string { + result := make(map[string][]string) + + for key, values := range table { + for _, value := range values { + if !(value == valueToFix && key != keyToFix) { + result[key] = append(result[key], value) + } + } + } + + return result +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/matching/matching.go b/vendor/github.com/nbutton23/zxcvbn-go/matching/matching.go new file mode 100644 index 000000000..4577db8a4 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/matching/matching.go @@ -0,0 +1,82 @@ +package matching + +import ( + "sort" + + "github.com/nbutton23/zxcvbn-go/adjacency" + "github.com/nbutton23/zxcvbn-go/frequency" + "github.com/nbutton23/zxcvbn-go/match" +) + +var ( + dictionaryMatchers []match.Matcher + matchers []match.Matcher + adjacencyGraphs []adjacency.Graph + l33tTable adjacency.Graph + + sequences map[string]string +) + +func init() { + loadFrequencyList() +} + +// Omnimatch runs all matchers against the password +func Omnimatch(password string, userInputs []string, filters ...func(match.Matcher) bool) (matches []match.Match) { + + //Can I run into the issue where nil is not equal to nil? + if dictionaryMatchers == nil || adjacencyGraphs == nil { + loadFrequencyList() + } + + if userInputs != nil { + userInputMatcher := buildDictMatcher("user_inputs", buildRankedDict(userInputs)) + matches = userInputMatcher(password) + } + + for _, matcher := range matchers { + shouldBeFiltered := false + for i := range filters { + if filters[i](matcher) { + shouldBeFiltered = true + break + } + } + if !shouldBeFiltered { + matches = append(matches, matcher.MatchingFunc(password)...) + } + } + sort.Sort(match.Matches(matches)) + return matches +} + +func loadFrequencyList() { + + for n, list := range frequency.Lists { + dictionaryMatchers = append(dictionaryMatchers, match.Matcher{MatchingFunc: buildDictMatcher(n, buildRankedDict(list.List)), ID: n}) + } + + l33tTable = adjacency.GraphMap["l33t"] + + adjacencyGraphs = append(adjacencyGraphs, adjacency.GraphMap["qwerty"]) + adjacencyGraphs = append(adjacencyGraphs, adjacency.GraphMap["dvorak"]) + adjacencyGraphs = append(adjacencyGraphs, adjacency.GraphMap["keypad"]) + adjacencyGraphs = append(adjacencyGraphs, adjacency.GraphMap["macKeypad"]) + + //l33tFilePath, _ := filepath.Abs("adjacency/L33t.json") + //L33T_TABLE = adjacency.GetAdjancencyGraphFromFile(l33tFilePath, "l33t") + + sequences = make(map[string]string) + sequences["lower"] = "abcdefghijklmnopqrstuvwxyz" + sequences["upper"] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + sequences["digits"] = "0123456789" + + matchers = append(matchers, dictionaryMatchers...) + matchers = append(matchers, match.Matcher{MatchingFunc: spatialMatch, ID: spatialMatcherName}) + matchers = append(matchers, match.Matcher{MatchingFunc: repeatMatch, ID: repeatMatcherName}) + matchers = append(matchers, match.Matcher{MatchingFunc: sequenceMatch, ID: sequenceMatcherName}) + matchers = append(matchers, match.Matcher{MatchingFunc: l33tMatch, ID: L33TMatcherName}) + matchers = append(matchers, match.Matcher{MatchingFunc: dateSepMatcher, ID: dateSepMatcherName}) + matchers = append(matchers, match.Matcher{MatchingFunc: dateWithoutSepMatch, ID: dateWithOutSepMatcherName}) + +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/matching/repeatMatch.go b/vendor/github.com/nbutton23/zxcvbn-go/matching/repeatMatch.go new file mode 100644 index 000000000..a93e45935 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/matching/repeatMatch.go @@ -0,0 +1,67 @@ +package matching + +import ( + "strings" + + "github.com/nbutton23/zxcvbn-go/entropy" + "github.com/nbutton23/zxcvbn-go/match" +) + +const repeatMatcherName = "REPEAT" + +//FilterRepeatMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher +func FilterRepeatMatcher(m match.Matcher) bool { + return m.ID == repeatMatcherName +} + +func repeatMatch(password string) []match.Match { + var matches []match.Match + + //Loop through password. if current == prev currentStreak++ else if currentStreak > 2 {buildMatch; currentStreak = 1} prev = current + var current, prev string + currentStreak := 1 + var i int + var char rune + for i, char = range password { + current = string(char) + if i == 0 { + prev = current + continue + } + + if strings.ToLower(current) == strings.ToLower(prev) { + currentStreak++ + + } else if currentStreak > 2 { + iPos := i - currentStreak + jPos := i - 1 + matchRepeat := match.Match{ + Pattern: "repeat", + I: iPos, + J: jPos, + Token: password[iPos : jPos+1], + DictionaryName: prev} + matchRepeat.Entropy = entropy.RepeatEntropy(matchRepeat) + matches = append(matches, matchRepeat) + currentStreak = 1 + } else { + currentStreak = 1 + } + + prev = current + } + + if currentStreak > 2 { + iPos := i - currentStreak + 1 + jPos := i + matchRepeat := match.Match{ + Pattern: "repeat", + I: iPos, + J: jPos, + Token: password[iPos : jPos+1], + DictionaryName: prev} + matchRepeat.Entropy = entropy.RepeatEntropy(matchRepeat) + matches = append(matches, matchRepeat) + } + return matches +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/matching/sequenceMatch.go b/vendor/github.com/nbutton23/zxcvbn-go/matching/sequenceMatch.go new file mode 100644 index 000000000..e0ed05229 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/matching/sequenceMatch.go @@ -0,0 +1,76 @@ +package matching + +import ( + "strings" + + "github.com/nbutton23/zxcvbn-go/entropy" + "github.com/nbutton23/zxcvbn-go/match" +) + +const sequenceMatcherName = "SEQ" + +//FilterSequenceMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher +func FilterSequenceMatcher(m match.Matcher) bool { + return m.ID == sequenceMatcherName +} + +func sequenceMatch(password string) []match.Match { + var matches []match.Match + for i := 0; i < len(password); { + j := i + 1 + var seq string + var seqName string + seqDirection := 0 + for seqCandidateName, seqCandidate := range sequences { + iN := strings.Index(seqCandidate, string(password[i])) + var jN int + if j < len(password) { + jN = strings.Index(seqCandidate, string(password[j])) + } else { + jN = -1 + } + + if iN > -1 && jN > -1 { + direction := jN - iN + if direction == 1 || direction == -1 { + seq = seqCandidate + seqName = seqCandidateName + seqDirection = direction + break + } + } + + } + + if seq != "" { + for { + var prevN, curN int + if j < len(password) { + prevChar, curChar := password[j-1], password[j] + prevN, curN = strings.Index(seq, string(prevChar)), strings.Index(seq, string(curChar)) + } + + if j == len(password) || curN-prevN != seqDirection { + if j-i > 2 { + matchSequence := match.Match{ + Pattern: "sequence", + I: i, + J: j - 1, + Token: password[i:j], + DictionaryName: seqName, + } + + matchSequence.Entropy = entropy.SequenceEntropy(matchSequence, len(seq), (seqDirection == 1)) + matches = append(matches, matchSequence) + } + break + } else { + j++ + } + + } + } + i = j + } + return matches +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/matching/spatialMatch.go b/vendor/github.com/nbutton23/zxcvbn-go/matching/spatialMatch.go new file mode 100644 index 000000000..fd858f5d1 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/matching/spatialMatch.go @@ -0,0 +1,88 @@ +package matching + +import ( + "strings" + + "github.com/nbutton23/zxcvbn-go/adjacency" + "github.com/nbutton23/zxcvbn-go/entropy" + "github.com/nbutton23/zxcvbn-go/match" +) + +const spatialMatcherName = "SPATIAL" + +//FilterSpatialMatcher can be pass to zxcvbn-go.PasswordStrength to skip that matcher +func FilterSpatialMatcher(m match.Matcher) bool { + return m.ID == spatialMatcherName +} + +func spatialMatch(password string) (matches []match.Match) { + for _, graph := range adjacencyGraphs { + if graph.Graph != nil { + matches = append(matches, spatialMatchHelper(password, graph)...) + } + } + return matches +} + +func spatialMatchHelper(password string, graph adjacency.Graph) (matches []match.Match) { + + for i := 0; i < len(password)-1; { + j := i + 1 + lastDirection := -99 //an int that it should never be! + turns := 0 + shiftedCount := 0 + + for { + prevChar := password[j-1] + found := false + foundDirection := -1 + curDirection := -1 + //My graphs seem to be wrong. . . and where the hell is qwerty + adjacents := graph.Graph[string(prevChar)] + //Consider growing pattern by one character if j hasn't gone over the edge + if j < len(password) { + curChar := password[j] + for _, adj := range adjacents { + curDirection++ + + if strings.Index(adj, string(curChar)) != -1 { + found = true + foundDirection = curDirection + + if strings.Index(adj, string(curChar)) == 1 { + //index 1 in the adjacency means the key is shifted, 0 means unshifted: A vs a, % vs 5, etc. + //for example, 'q' is adjacent to the entry '2@'. @ is shifted w/ index 1, 2 is unshifted. + shiftedCount++ + } + + if lastDirection != foundDirection { + //adding a turn is correct even in the initial case when last_direction is null: + //every spatial pattern starts with a turn. + turns++ + lastDirection = foundDirection + } + break + } + } + } + + //if the current pattern continued, extend j and try to grow again + if found { + j++ + } else { + //otherwise push the pattern discovered so far, if any... + //don't consider length 1 or 2 chains. + if j-i > 2 { + matchSpc := match.Match{Pattern: "spatial", I: i, J: j - 1, Token: password[i:j], DictionaryName: graph.Name} + matchSpc.Entropy = entropy.SpatialEntropy(matchSpc, turns, shiftedCount) + matches = append(matches, matchSpc) + } + //. . . and then start a new search from the rest of the password + i = j + break + } + } + + } + return matches +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/scoring/scoring.go b/vendor/github.com/nbutton23/zxcvbn-go/scoring/scoring.go new file mode 100644 index 000000000..4f68a6dca --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/scoring/scoring.go @@ -0,0 +1,177 @@ +package scoring + +import ( + "fmt" + "github.com/nbutton23/zxcvbn-go/entropy" + "github.com/nbutton23/zxcvbn-go/match" + "github.com/nbutton23/zxcvbn-go/utils/math" + "math" + "sort" +) + +const ( + //for a hash function like bcrypt/scrypt/PBKDF2, 10ms per guess is a safe lower bound. + //(usually a guess would take longer -- this assumes fast hardware and a small work factor.) + //adjust for your site accordingly if you use another hash function, possibly by + //several orders of magnitude! + singleGuess float64 = 0.010 + numAttackers float64 = 100 //Cores used to make guesses + secondsPerGuess float64 = singleGuess / numAttackers +) + +// MinEntropyMatch is the lowest entropy match found +type MinEntropyMatch struct { + Password string + Entropy float64 + MatchSequence []match.Match + CrackTime float64 + CrackTimeDisplay string + Score int + CalcTime float64 +} + +/* +MinimumEntropyMatchSequence returns the minimum entropy + + Takes a list of overlapping matches, returns the non-overlapping sublist with + minimum entropy. O(nm) dp alg for length-n password with m candidate matches. +*/ +func MinimumEntropyMatchSequence(password string, matches []match.Match) MinEntropyMatch { + bruteforceCardinality := float64(entropy.CalcBruteForceCardinality(password)) + upToK := make([]float64, len(password)) + backPointers := make([]match.Match, len(password)) + + for k := 0; k < len(password); k++ { + upToK[k] = get(upToK, k-1) + math.Log2(bruteforceCardinality) + + for _, match := range matches { + if match.J != k { + continue + } + + i, j := match.I, match.J + //see if best entropy up to i-1 + entropy of match is less that current min at j + upTo := get(upToK, i-1) + candidateEntropy := upTo + match.Entropy + + if candidateEntropy < upToK[j] { + upToK[j] = candidateEntropy + match.Entropy = candidateEntropy + backPointers[j] = match + } + } + } + + //walk backwards and decode the best sequence + var matchSequence []match.Match + passwordLen := len(password) + passwordLen-- + for k := passwordLen; k >= 0; { + match := backPointers[k] + if match.Pattern != "" { + matchSequence = append(matchSequence, match) + k = match.I - 1 + + } else { + k-- + } + + } + sort.Sort(match.Matches(matchSequence)) + + makeBruteForceMatch := func(i, j int) match.Match { + return match.Match{Pattern: "bruteforce", + I: i, + J: j, + Token: password[i : j+1], + Entropy: math.Log2(math.Pow(bruteforceCardinality, float64(j-i)))} + + } + + k := 0 + var matchSequenceCopy []match.Match + for _, match := range matchSequence { + i, j := match.I, match.J + if i-k > 0 { + matchSequenceCopy = append(matchSequenceCopy, makeBruteForceMatch(k, i-1)) + } + k = j + 1 + matchSequenceCopy = append(matchSequenceCopy, match) + } + + if k < len(password) { + matchSequenceCopy = append(matchSequenceCopy, makeBruteForceMatch(k, len(password)-1)) + } + var minEntropy float64 + if len(password) == 0 { + minEntropy = float64(0) + } else { + minEntropy = upToK[len(password)-1] + } + + crackTime := roundToXDigits(entropyToCrackTime(minEntropy), 3) + return MinEntropyMatch{Password: password, + Entropy: roundToXDigits(minEntropy, 3), + MatchSequence: matchSequenceCopy, + CrackTime: crackTime, + CrackTimeDisplay: displayTime(crackTime), + Score: crackTimeToScore(crackTime)} + +} +func get(a []float64, i int) float64 { + if i < 0 || i >= len(a) { + return float64(0) + } + + return a[i] +} + +func entropyToCrackTime(entropy float64) float64 { + crackTime := (0.5 * math.Pow(float64(2), entropy)) * secondsPerGuess + + return crackTime +} + +func roundToXDigits(number float64, digits int) float64 { + return zxcvbnmath.Round(number, .5, digits) +} + +func displayTime(seconds float64) string { + formater := "%.1f %s" + minute := float64(60) + hour := minute * float64(60) + day := hour * float64(24) + month := day * float64(31) + year := month * float64(12) + century := year * float64(100) + + if seconds < minute { + return "instant" + } else if seconds < hour { + return fmt.Sprintf(formater, (1 + math.Ceil(seconds/minute)), "minutes") + } else if seconds < day { + return fmt.Sprintf(formater, (1 + math.Ceil(seconds/hour)), "hours") + } else if seconds < month { + return fmt.Sprintf(formater, (1 + math.Ceil(seconds/day)), "days") + } else if seconds < year { + return fmt.Sprintf(formater, (1 + math.Ceil(seconds/month)), "months") + } else if seconds < century { + return fmt.Sprintf(formater, (1 + math.Ceil(seconds/century)), "years") + } else { + return "centuries" + } +} + +func crackTimeToScore(seconds float64) int { + if seconds < math.Pow(10, 2) { + return 0 + } else if seconds < math.Pow(10, 4) { + return 1 + } else if seconds < math.Pow(10, 6) { + return 2 + } else if seconds < math.Pow(10, 8) { + return 3 + } + + return 4 +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/utils/math/mathutils.go b/vendor/github.com/nbutton23/zxcvbn-go/utils/math/mathutils.go new file mode 100644 index 000000000..1b989d194 --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/utils/math/mathutils.go @@ -0,0 +1,40 @@ +package zxcvbnmath + +import "math" + +/* +NChoseK http://blog.plover.com/math/choose.html +I am surprised that I have to define these. . . Maybe i just didn't look hard enough for a lib. +*/ +func NChoseK(n, k float64) float64 { + if k > n { + return 0 + } else if k == 0 { + return 1 + } + + var r float64 = 1 + + for d := float64(1); d <= k; d++ { + r *= n + r /= d + n-- + } + + return r +} + +// Round a number +func Round(val float64, roundOn float64, places int) (newVal float64) { + var round float64 + pow := math.Pow(10, float64(places)) + digit := pow * val + _, div := math.Modf(digit) + if div >= roundOn { + round = math.Ceil(digit) + } else { + round = math.Floor(digit) + } + newVal = round / pow + return +} diff --git a/vendor/github.com/nbutton23/zxcvbn-go/zxcvbn.go b/vendor/github.com/nbutton23/zxcvbn-go/zxcvbn.go new file mode 100644 index 000000000..9c34b1c8c --- /dev/null +++ b/vendor/github.com/nbutton23/zxcvbn-go/zxcvbn.go @@ -0,0 +1,22 @@ +package zxcvbn + +import ( + "time" + + "github.com/nbutton23/zxcvbn-go/match" + "github.com/nbutton23/zxcvbn-go/matching" + "github.com/nbutton23/zxcvbn-go/scoring" + "github.com/nbutton23/zxcvbn-go/utils/math" +) + +// PasswordStrength takes a password, userInputs and optional filters and returns a MinEntropyMatch +func PasswordStrength(password string, userInputs []string, filters ...func(match.Matcher) bool) scoring.MinEntropyMatch { + start := time.Now() + matches := matching.Omnimatch(password, userInputs, filters...) + result := scoring.MinimumEntropyMatchSequence(password, matches) + end := time.Now() + + calcTime := end.Nanosecond() - start.Nanosecond() + result.CalcTime = zxcvbnmath.Round(float64(calcTime)*time.Nanosecond.Seconds(), .5, 3) + return result +} diff --git a/vendor/github.com/nishanths/exhaustive/.gitignore b/vendor/github.com/nishanths/exhaustive/.gitignore new file mode 100644 index 000000000..24bde5301 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/.gitignore @@ -0,0 +1,7 @@ +.DS_Store +*.swp +tags + +# binary +cmd/exhaustive/exhaustive +exhaustive diff --git a/vendor/github.com/nishanths/exhaustive/.travis.yml b/vendor/github.com/nishanths/exhaustive/.travis.yml new file mode 100644 index 000000000..bd342f558 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/.travis.yml @@ -0,0 +1,12 @@ +language: go + +go: + - 1.x + - master + +# Only clone the most recent commit. +git: + depth: 1 + +notifications: + email: false diff --git a/vendor/github.com/nishanths/exhaustive/LICENSE b/vendor/github.com/nishanths/exhaustive/LICENSE new file mode 100644 index 000000000..32befa68f --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/LICENSE @@ -0,0 +1,25 @@ +BSD 2-Clause License + +Copyright (c) 2020, Nishanth Shanmugham +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/nishanths/exhaustive/README.md b/vendor/github.com/nishanths/exhaustive/README.md new file mode 100644 index 000000000..633e19f01 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/README.md @@ -0,0 +1,70 @@ +# exhaustive + +[![Godoc](https://godoc.org/github.com/nishanths/exhaustive?status.svg)](https://godoc.org/github.com/nishanths/exhaustive) + +[![Build Status](https://travis-ci.org/nishanths/exhaustive.svg?branch=master)](https://travis-ci.org/nishanths/exhaustive) + +The `exhaustive` package and command line program can be used to detect +enum switch statements that are not exhaustive. + +An enum switch statement is exhaustive if it has cases for each of the enum's members. See godoc for the definition of enum used by the program. + +The `exhaustive` package provides an `Analyzer` that follows the guidelines +described in the [go/analysis](https://godoc.org/golang.org/x/tools/go/analysis) package; this makes +it possible to integrate into existing analysis driver programs. + +## Install + +``` +go get github.com/nishanths/exhaustive/... +``` + +## Docs + +https://godoc.org/github.com/nishanths/exhaustive + +## Example + +Given the code: + +```diff +package token + +type Token int + +const ( + Add Token = iota + Subtract + Multiply ++ Quotient ++ Remainder +) +``` +``` +package calc + +import "token" + +func processToken(t token.Token) { + switch t { + case token.Add: + ... + case token.Subtract: + ... + case token.Multiply: + ... + } +} +``` + +Running the `exhaustive` command will print: + +``` +calc.go:6:2: missing cases in switch of type token.Token: Quotient, Remainder +``` + +Enums can also be defined using explicit constant values instead of `iota`. + +## License + +BSD 2-Clause diff --git a/vendor/github.com/nishanths/exhaustive/enum.go b/vendor/github.com/nishanths/exhaustive/enum.go new file mode 100644 index 000000000..ed0df642b --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/enum.go @@ -0,0 +1,146 @@ +package exhaustive + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" +) + +type enums map[string]*enumMembers // enum type name -> enum members + +type enumMembers struct { + // Names in the order encountered in the AST. + OrderedNames []string + + // Maps name -> (constant.Value).ExactString(). + // If a name is missing in the map, it means that it does not have a + // corresponding constant.Value defined in the AST. + NameToValue map[string]string + + // Maps (constant.Value).ExactString() -> names. + // Names that don't have a constant.Value defined in the AST (e.g., some + // iota constants) will not have a corresponding entry in this map. + ValueToNames map[string][]string +} + +func (em *enumMembers) add(name string, constVal *string) { + em.OrderedNames = append(em.OrderedNames, name) + + if constVal != nil { + if em.NameToValue == nil { + em.NameToValue = make(map[string]string) + } + em.NameToValue[name] = *constVal + + if em.ValueToNames == nil { + em.ValueToNames = make(map[string][]string) + } + em.ValueToNames[*constVal] = append(em.ValueToNames[*constVal], name) + } +} + +func (em *enumMembers) numMembers() int { + return len(em.OrderedNames) +} + +func findEnums(pass *analysis.Pass) enums { + pkgEnums := make(enums) + + // Gather enum types. + for _, f := range pass.Files { + for _, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + if gen.Tok != token.TYPE { + continue + } + for _, s := range gen.Specs { + // Must be TypeSpec since we've filtered on token.TYPE. + t, ok := s.(*ast.TypeSpec) + obj := pass.TypesInfo.Defs[t.Name] + if obj == nil { + continue + } + + named, ok := obj.Type().(*types.Named) + if !ok { + continue + } + basic, ok := named.Underlying().(*types.Basic) + if !ok { + continue + } + + switch i := basic.Info(); { + case i&types.IsInteger != 0: + pkgEnums[named.Obj().Name()] = &enumMembers{} + case i&types.IsFloat != 0: + pkgEnums[named.Obj().Name()] = &enumMembers{} + case i&types.IsString != 0: + pkgEnums[named.Obj().Name()] = &enumMembers{} + } + } + } + } + + // Gather enum members. + for _, f := range pass.Files { + for _, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + if gen.Tok != token.CONST && gen.Tok != token.VAR { + continue + } + for _, s := range gen.Specs { + // Must be ValueSpec since we've filtered on token.CONST, token.VAR. + v := s.(*ast.ValueSpec) + for i, name := range v.Names { + obj := pass.TypesInfo.Defs[name] + if obj == nil { + continue + } + + named, ok := obj.Type().(*types.Named) + if !ok { + continue + } + + // Get the constant.Value representation, if any. + var constVal *string + if len(v.Values) > i { + value := v.Values[i] + if con, ok := pass.TypesInfo.Types[value]; ok && con.Value != nil { + str := con.Value.ExactString() // temp var to be able to take address + constVal = &str + } + } + + em, ok := pkgEnums[named.Obj().Name()] + if !ok { + continue + } + em.add(obj.Name(), constVal) + pkgEnums[named.Obj().Name()] = em + } + } + } + } + + // Delete member-less enum types. + // We can't call these enums, since we can't be sure without + // the existence of members. (The type may just be a named type, + // for instance.) + for k, v := range pkgEnums { + if v.numMembers() == 0 { + delete(pkgEnums, k) + } + } + + return pkgEnums +} diff --git a/vendor/github.com/nishanths/exhaustive/exhaustive.go b/vendor/github.com/nishanths/exhaustive/exhaustive.go new file mode 100644 index 000000000..bee01b108 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/exhaustive.go @@ -0,0 +1,207 @@ +// Package exhaustive provides an analyzer that checks exhaustiveness of enum +// switch statements. The analyzer also provides fixes to make the offending +// switch statements exhaustive (see "Fixes" section). +// +// See "cmd/exhaustive" subpackage for the related command line program. +// +// Definition of enum +// +// The Go language spec does not provide an explicit definition for enums. +// For the purpose of this program, an enum type is a package-level named type +// whose underlying type is an integer (includes byte and rune), a float, or +// a string type. An enum type must have associated with it one or more +// package-level variables of the named type in the package. These variables +// constitute the enum's members. +// +// In the code snippet below, Biome is an enum type with 3 members. (You may +// also use iota instead of explicitly specifying values.) +// +// type Biome int +// +// const ( +// Tundra Biome = 1 +// Savanna Biome = 2 +// Desert Biome = 3 +// ) +// +// Switch statement exhaustiveness +// +// An enum switch statement is exhaustive if it has cases for each of the enum's members. +// +// For an enum type defined in the same package as the switch statement, both +// exported and unexported enum members must be present in order to consider +// the switch exhaustive. On the other hand, for an enum type defined +// in an external package it is sufficient for just exported enum members +// to be present in order to consider the switch exhaustive. +// +// Flags +// +// The analyzer accepts 4 flags. +// +// The -default-signifies-exhaustive boolean flag indicates to the analyzer +// whether switch statements are to be considered exhaustive as long as a +// 'default' case is present (even if all enum members aren't listed in the +// switch statements cases). The default value is false. +// +// The -check-generated boolean flag indicates whether to check switch +// statements in generated Go source files. The default value is false. +// +// The -ignore-pattern flag specifies a regular expression. Member names +// in enum definitions that match the regular expression do not require a case +// clause to satisfy exhaustiveness. The regular expression is matched against +// enum member names inclusive of the import path, e.g. of the +// form: github.com/foo/bar.Tundra, where the import path is github.com/foo/bar +// and the enum member name is Tundra. +// +// The behavior of the -fix flag is described in the next section. +// +// Fixes +// +// The analyzer suggests fixes for a switch statement if it is not exhaustive. +// The suggested fix always adds a single case clause for the missing enum members. +// +// case MissingA, MissingB, MissingC: +// panic(fmt.Sprintf("unhandled value: %v", v)) +// +// where v is the expression in the switch statement's tag (in other words, the +// value being switched upon). If the switch statement's tag is a function or a +// method call the analyzer does not suggest a fix, as reusing the call expression +// in the panic/fmt.Sprintf call could be mutative. +// +// The rationale for the fix using panic is that it might be better to fail loudly on +// existing unhandled or impossible cases than to let them slip by quietly unnoticed. +// An even better fix may, of course, be to manually inspect the sites reported +// by the package and handle the missing cases if necessary. +// +// Imports will be adjusted automatically to account for the "fmt" dependency. +// +// Skipping analysis +// +// If the following directive comment: +// +// //exhaustive:ignore +// +// is associated with a switch statement, the analyzer skips +// checking of the switch statement and no diagnostics are reported. +// +// No diagnostics are reported for switch statements in +// generated files (see https://golang.org/s/generatedcode for definition of +// generated file), unless the -check-generated flag is enabled. +// +// Additionally, see the -ignore-pattern flag. +package exhaustive + +import ( + "go/ast" + "go/types" + "sort" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +// Flag names used by the analyzer. They are exported for use by analyzer +// driver programs. +const ( + DefaultSignifiesExhaustiveFlag = "default-signifies-exhaustive" + CheckGeneratedFlag = "check-generated" + IgnorePatternFlag = "ignore-pattern" +) + +var ( + fDefaultSignifiesExhaustive bool + fCheckGeneratedFiles bool + fIgnorePattern regexpFlag +) + +func init() { + Analyzer.Flags.BoolVar(&fDefaultSignifiesExhaustive, DefaultSignifiesExhaustiveFlag, false, "indicates that switch statements are to be considered exhaustive if a 'default' case is present, even if all enum members aren't listed in the switch") + Analyzer.Flags.BoolVar(&fCheckGeneratedFiles, CheckGeneratedFlag, false, "check switch statements in generated files also") + Analyzer.Flags.Var(&fIgnorePattern, IgnorePatternFlag, "do not require a case clause to satisfy exhaustiveness for enum member names that match the provided regular expression pattern") +} + +// resetFlags resets the flag variables to their default values. +// Useful in tests. +func resetFlags() { + fDefaultSignifiesExhaustive = false + fCheckGeneratedFiles = false + fIgnorePattern = regexpFlag{} +} + +var Analyzer = &analysis.Analyzer{ + Name: "exhaustive", + Doc: "check exhaustiveness of enum switch statements", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + FactTypes: []analysis.Fact{&enumsFact{}}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + e := findEnums(pass) + if len(e) != 0 { + pass.ExportPackageFact(&enumsFact{Enums: e}) + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + err := checkSwitchStatements(pass, inspect) + return nil, err +} + +// IgnoreDirectivePrefix is used to exclude checking of specific switch statements. +// See package comment for details. +const IgnoreDirectivePrefix = "//exhaustive:ignore" + +func containsIgnoreDirective(comments []*ast.Comment) bool { + for _, c := range comments { + if strings.HasPrefix(c.Text, IgnoreDirectivePrefix) { + return true + } + } + return false +} + +type enumsFact struct { + Enums enums +} + +var _ analysis.Fact = (*enumsFact)(nil) + +func (e *enumsFact) AFact() {} + +func (e *enumsFact) String() string { + // sort for stability (required for testing) + var sortedKeys []string + for k := range e.Enums { + sortedKeys = append(sortedKeys, k) + } + sort.Strings(sortedKeys) + + var buf strings.Builder + for i, k := range sortedKeys { + v := e.Enums[k] + buf.WriteString(k) + buf.WriteString(":") + + for j, vv := range v.OrderedNames { + buf.WriteString(vv) + // add comma separator between each enum member in an enum type + if j != len(v.OrderedNames)-1 { + buf.WriteString(",") + } + } + // add semicolon separator between each enum type + if i != len(sortedKeys)-1 { + buf.WriteString("; ") + } + } + return buf.String() +} + +func enumTypeName(e *types.Named, samePkg bool) string { + if samePkg { + return e.Obj().Name() + } + return e.Obj().Pkg().Name() + "." + e.Obj().Name() +} diff --git a/vendor/github.com/nishanths/exhaustive/generated.go b/vendor/github.com/nishanths/exhaustive/generated.go new file mode 100644 index 000000000..19b4fb12b --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/generated.go @@ -0,0 +1,34 @@ +package exhaustive + +import ( + "go/ast" + "strings" +) + +// Adapated from https://gotools.org/dmitri.shuralyov.com/go/generated + +func isGeneratedFile(file *ast.File) bool { + for _, c := range file.Comments { + for _, cc := range c.List { + s := cc.Text // "\n" already removed (see doc comment) + if len(s) >= 1 && s[len(s)-1] == '\r' { + s = s[:len(s)-1] // Trim "\r". + } + if containsGeneratedComment(s) { + return true + } + } + } + + return false +} + +func containsGeneratedComment(s string) bool { + return strings.HasPrefix(s, genCommentPrefix) && + strings.HasSuffix(s, genCommentSuffix) +} + +const ( + genCommentPrefix = "// Code generated " + genCommentSuffix = " DO NOT EDIT." +) diff --git a/vendor/github.com/nishanths/exhaustive/go.mod b/vendor/github.com/nishanths/exhaustive/go.mod new file mode 100644 index 000000000..4db5aeb01 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/go.mod @@ -0,0 +1,8 @@ +module github.com/nishanths/exhaustive + +go 1.14 + +require ( + golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c // indirect + golang.org/x/tools v0.1.4 +) diff --git a/vendor/github.com/nishanths/exhaustive/go.sum b/vendor/github.com/nishanths/exhaustive/go.sum new file mode 100644 index 000000000..20d958ec4 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/go.sum @@ -0,0 +1,28 @@ +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c h1:F1jZWGFhYfh0Ci55sIpILtKKK8p3i2/krTr0H1rg74I= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.4 h1:cVngSRcfgyZCzys3KYOpCFa+4dqX/Oub9tAq00ttGVs= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/nishanths/exhaustive/regexp_flag.go b/vendor/github.com/nishanths/exhaustive/regexp_flag.go new file mode 100644 index 000000000..3a9ef7353 --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/regexp_flag.go @@ -0,0 +1,35 @@ +package exhaustive + +import ( + "regexp" +) + +type regexpFlag struct { + r *regexp.Regexp +} + +func (v *regexpFlag) String() string { + if v.r != nil { + return v.r.String() + } + return "" +} + +func (v *regexpFlag) Set(expr string) error { + if expr == "" { + v.r = nil + return nil + } + + r, err := regexp.Compile(expr) + if err != nil { + return err + } + + v.r = r + return nil +} + +func (v *regexpFlag) Get() interface{} { + return v.r +} diff --git a/vendor/github.com/nishanths/exhaustive/switch.go b/vendor/github.com/nishanths/exhaustive/switch.go new file mode 100644 index 000000000..1a88eec0c --- /dev/null +++ b/vendor/github.com/nishanths/exhaustive/switch.go @@ -0,0 +1,444 @@ +package exhaustive + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "go/token" + "go/types" + "regexp" + "sort" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/ast/inspector" +) + +func isDefaultCase(c *ast.CaseClause) bool { + return c.List == nil // see doc comment on field +} + +func checkSwitchStatements( + pass *analysis.Pass, + inspect *inspector.Inspector, +) error { + comments := make(map[*ast.File]ast.CommentMap) // CommentMap per package file, lazily populated by reference + generated := make(map[*ast.File]bool) + return checkSwitchStatements_(pass, inspect, comments, generated) +} + +func checkSwitchStatements_( + pass *analysis.Pass, + inspect *inspector.Inspector, + comments map[*ast.File]ast.CommentMap, + generated map[*ast.File]bool, +) error { + inspect.WithStack([]ast.Node{&ast.SwitchStmt{}}, func(n ast.Node, push bool, stack []ast.Node) bool { + if !push { + return true + } + + file := stack[0].(*ast.File) + + // Determine if file is a generated file, based on https://golang.org/s/generatedcode. + // If generated, don't check this file. + var isGenerated bool + if gen, ok := generated[file]; ok { + isGenerated = gen + } else { + isGenerated = isGeneratedFile(file) + generated[file] = isGenerated + } + if isGenerated && !fCheckGeneratedFiles { + // don't check + return true + } + + sw := n.(*ast.SwitchStmt) + if sw.Tag == nil { + return true + } + t := pass.TypesInfo.Types[sw.Tag] + if !t.IsValue() { + return true + } + tagType, ok := t.Type.(*types.Named) + if !ok { + return true + } + + tagPkg := tagType.Obj().Pkg() + if tagPkg == nil { + // Doc comment: nil for labels and objects in the Universe scope. + // This happens for the `error` type, for example. + // Continuing would mean that ImportPackageFact panics. + return true + } + + var enums enumsFact + if !pass.ImportPackageFact(tagPkg, &enums) { + // Can't do anything further. + return true + } + + em, isEnum := enums.Enums[tagType.Obj().Name()] + if !isEnum { + // Tag's type is not a known enum. + return true + } + + // Get comment map. + var allComments ast.CommentMap + if cm, ok := comments[file]; ok { + allComments = cm + } else { + allComments = ast.NewCommentMap(pass.Fset, file, file.Comments) + comments[file] = allComments + } + + specificComments := allComments.Filter(sw) + for _, group := range specificComments.Comments() { + if containsIgnoreDirective(group.List) { + return true // skip checking due to ignore directive + } + } + + samePkg := tagPkg == pass.Pkg + checkUnexported := samePkg + + hitlist := hitlistFromEnumMembers(em, tagPkg, checkUnexported, fIgnorePattern.Get().(*regexp.Regexp)) + if len(hitlist) == 0 { + return true + } + + var defaultCase *ast.CaseClause + for _, stmt := range sw.Body.List { + caseCl := stmt.(*ast.CaseClause) + if isDefaultCase(caseCl) { + defaultCase = caseCl + continue // nothing more to do if it's the default case + } + for _, e := range caseCl.List { + e = astutil.Unparen(e) + if samePkg { + ident, ok := e.(*ast.Ident) + if !ok { + continue + } + updateHitlist(hitlist, em, ident.Name) + } else { + selExpr, ok := e.(*ast.SelectorExpr) + if !ok { + continue + } + + // ensure X is package identifier + ident, ok := selExpr.X.(*ast.Ident) + if !ok { + continue + } + if !isPackageNameIdentifier(pass, ident) { + continue + } + + updateHitlist(hitlist, em, selExpr.Sel.Name) + } + } + } + + defaultSuffices := fDefaultSignifiesExhaustive && defaultCase != nil + shouldReport := len(hitlist) > 0 && !defaultSuffices + + if shouldReport { + reportSwitch(pass, sw, defaultCase, samePkg, tagType, em, hitlist, file) + } + return true + }) + + return nil +} + +func updateHitlist(hitlist map[string]struct{}, em *enumMembers, foundName string) { + constVal, ok := em.NameToValue[foundName] + if !ok { + // only delete the name alone from hitlist + delete(hitlist, foundName) + return + } + + // delete all of the same-valued names from hitlist + namesToDelete := em.ValueToNames[constVal] + for _, n := range namesToDelete { + delete(hitlist, n) + } +} + +func isPackageNameIdentifier(pass *analysis.Pass, ident *ast.Ident) bool { + obj := pass.TypesInfo.ObjectOf(ident) + if obj == nil { + return false + } + _, ok := obj.(*types.PkgName) + return ok +} + +func hitlistFromEnumMembers(em *enumMembers, enumPkg *types.Package, checkUnexported bool, ignorePattern *regexp.Regexp) map[string]struct{} { + hitlist := make(map[string]struct{}) + for _, name := range em.OrderedNames { + if name == "_" { + // blank identifier is often used to skip entries in iota lists + continue + } + if ignorePattern != nil && ignorePattern.MatchString(enumPkg.Path()+"."+name) { + continue + } + if !ast.IsExported(name) && !checkUnexported { + continue + } + hitlist[name] = struct{}{} + } + return hitlist +} + +func determineMissingOutput(missingMembers map[string]struct{}, em *enumMembers) []string { + constValMembers := make(map[string][]string) // value -> names + var otherMembers []string // non-constant value names + + for m := range missingMembers { + if constVal, ok := em.NameToValue[m]; ok { + constValMembers[constVal] = append(constValMembers[constVal], m) + } else { + otherMembers = append(otherMembers, m) + } + } + + missingOutput := make([]string, 0, len(constValMembers)+len(otherMembers)) + for _, names := range constValMembers { + sort.Strings(names) + missingOutput = append(missingOutput, strings.Join(names, "|")) + } + missingOutput = append(missingOutput, otherMembers...) + sort.Strings(missingOutput) + return missingOutput +} + +func reportSwitch( + pass *analysis.Pass, + sw *ast.SwitchStmt, + defaultCase *ast.CaseClause, + samePkg bool, + enumType *types.Named, + em *enumMembers, + missingMembers map[string]struct{}, + f *ast.File, +) { + missingOutput := determineMissingOutput(missingMembers, em) + + var fixes []analysis.SuggestedFix + if fix, ok := computeFix(pass, pass.Fset, f, sw, defaultCase, enumType, samePkg, missingMembers); ok { + fixes = append(fixes, fix) + } + + pass.Report(analysis.Diagnostic{ + Pos: sw.Pos(), + End: sw.End(), + Message: fmt.Sprintf("missing cases in switch of type %s: %s", enumTypeName(enumType, samePkg), strings.Join(missingOutput, ", ")), + SuggestedFixes: fixes, + }) +} + +func computeFix(pass *analysis.Pass, fset *token.FileSet, f *ast.File, sw *ast.SwitchStmt, defaultCase *ast.CaseClause, enumType *types.Named, samePkg bool, missingMembers map[string]struct{}) (analysis.SuggestedFix, bool) { + // Function and method calls may be mutative, so we don't want to reuse the + // call expression in the about-to-be-inserted case clause body. So we just + // don't suggest a fix in such situations. + // + // However, we need to make an exception for type conversions, which are + // also call expressions in the AST. + // + // We'll need to lookup type information for this, and can't rely solely + // on the AST. + if containsFuncCall(pass, sw.Tag) { + return analysis.SuggestedFix{}, false + } + + textEdits := []analysis.TextEdit{missingCasesTextEdit(fset, f, samePkg, sw, defaultCase, enumType, missingMembers)} + + // need to add "fmt" import if "fmt" import doesn't already exist + if !hasImportWithPath(fset, f, `"fmt"`) { + textEdits = append(textEdits, fmtImportTextEdit(fset, f)) + } + + missing := make([]string, 0, len(missingMembers)) + for m := range missingMembers { + missing = append(missing, m) + } + sort.Strings(missing) + + return analysis.SuggestedFix{ + Message: fmt.Sprintf("add case clause for: %s", strings.Join(missing, ", ")), + TextEdits: textEdits, + }, true +} + +func containsFuncCall(pass *analysis.Pass, e ast.Expr) bool { + e = astutil.Unparen(e) + c, ok := e.(*ast.CallExpr) + if !ok { + return false + } + if _, isFunc := pass.TypesInfo.TypeOf(c.Fun).Underlying().(*types.Signature); isFunc { + return true + } + for _, a := range c.Args { + if containsFuncCall(pass, a) { + return true + } + } + return false +} + +func firstImportDecl(fset *token.FileSet, f *ast.File) *ast.GenDecl { + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if ok && genDecl.Tok == token.IMPORT { + // first IMPORT GenDecl + return genDecl + } + } + return nil +} + +// copies an GenDecl in a manner such that appending to the returned GenDecl's Specs field +// doesn't mutate the original GenDecl +func copyGenDecl(im *ast.GenDecl) *ast.GenDecl { + imCopy := *im + imCopy.Specs = make([]ast.Spec, len(im.Specs)) + for i := range im.Specs { + imCopy.Specs[i] = im.Specs[i] + } + return &imCopy +} + +func hasImportWithPath(fset *token.FileSet, f *ast.File, pathLiteral string) bool { + igroups := astutil.Imports(fset, f) + for _, igroup := range igroups { + for _, importSpec := range igroup { + if importSpec.Path.Value == pathLiteral { + return true + } + } + } + return false +} + +func fmtImportTextEdit(fset *token.FileSet, f *ast.File) analysis.TextEdit { + firstDecl := firstImportDecl(fset, f) + + if firstDecl == nil { + // file has no import declarations + // insert "fmt" import spec after package statement + return analysis.TextEdit{ + Pos: f.Name.End() + 1, // end of package name + 1 + End: f.Name.End() + 1, + NewText: []byte(`import ( + "fmt" + )`), + } + } + + // copy because we'll be mutating its Specs field + firstDeclCopy := copyGenDecl(firstDecl) + + // find insertion index for "fmt" import spec + var i int + for ; i < len(firstDeclCopy.Specs); i++ { + im := firstDeclCopy.Specs[i].(*ast.ImportSpec) + if v, _ := strconv.Unquote(im.Path.Value); v > "fmt" { + break + } + } + + // insert "fmt" import spec at the index + fmtSpec := &ast.ImportSpec{ + Path: &ast.BasicLit{ + // NOTE: Pos field doesn't seem to be required for our + // purposes here. + Kind: token.STRING, + Value: `"fmt"`, + }, + } + s := firstDeclCopy.Specs // local var for easier comprehension of next line + s = append(s[:i], append([]ast.Spec{fmtSpec}, s[i:]...)...) + firstDeclCopy.Specs = s + + // create the text edit + var buf bytes.Buffer + printer.Fprint(&buf, fset, firstDeclCopy) + + return analysis.TextEdit{ + Pos: firstDecl.Pos(), + End: firstDecl.End(), + NewText: buf.Bytes(), + } +} + +func missingCasesTextEdit(fset *token.FileSet, f *ast.File, samePkg bool, sw *ast.SwitchStmt, defaultCase *ast.CaseClause, enumType *types.Named, missingMembers map[string]struct{}) analysis.TextEdit { + // ... Construct insertion text for case clause and its body ... + + var tag bytes.Buffer + printer.Fprint(&tag, fset, sw.Tag) + + // If possible and if necessary, determine the package identifier based on + // the AST of other `case` clauses. + var pkgIdent *ast.Ident + if !samePkg { + for _, stmt := range sw.Body.List { + caseCl := stmt.(*ast.CaseClause) + if len(caseCl.List) != 0 { // guard against default case + if sel, ok := caseCl.List[0].(*ast.SelectorExpr); ok { + pkgIdent = sel.X.(*ast.Ident) + break + } + } + } + } + + missing := make([]string, 0, len(missingMembers)) + for m := range missingMembers { + if !samePkg { + if pkgIdent != nil { + // we were able to determine package identifier + missing = append(missing, pkgIdent.Name+"."+m) + } else { + // use the package name (may not be correct always) + // + // TODO: May need to also add import if the package isn't imported + // elsewhere. This (ie, a switch with zero case clauses) should + // happen rarely, so don't implement this for now. + missing = append(missing, enumType.Obj().Pkg().Name()+"."+m) + } + } else { + missing = append(missing, m) + } + } + sort.Strings(missing) + + insert := `case ` + strings.Join(missing, ", ") + `: + panic(fmt.Sprintf("unhandled value: %v",` + tag.String() + `))` + + // ... Create the text edit ... + + pos := sw.Body.Rbrace - 1 // put it as last case + if defaultCase != nil { + pos = defaultCase.Case - 2 // put it before the default case (why -2?) + } + + return analysis.TextEdit{ + Pos: pos, + End: pos, + NewText: []byte(insert), + } +} diff --git a/vendor/github.com/nishanths/predeclared/LICENSE b/vendor/github.com/nishanths/predeclared/LICENSE new file mode 100644 index 000000000..946212315 --- /dev/null +++ b/vendor/github.com/nishanths/predeclared/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2017, Nishanth Shanmugham +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go b/vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go new file mode 100644 index 000000000..4083efc74 --- /dev/null +++ b/vendor/github.com/nishanths/predeclared/passes/predeclared/go18.go @@ -0,0 +1,9 @@ +// +build go1.8 + +package predeclared + +import "go/doc" + +func isPredeclaredIdent(name string) bool { + return doc.IsPredeclared(name) +} diff --git a/vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go b/vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go new file mode 100644 index 000000000..5780e0b56 --- /dev/null +++ b/vendor/github.com/nishanths/predeclared/passes/predeclared/pre_go18.go @@ -0,0 +1,53 @@ +// +build !go1.8 + +package predeclared + +func isPredeclaredIdent(name string) bool { + return predeclaredIdents[name] +} + +// Keep in sync with https://golang.org/ref/spec#Predeclared_identifiers +var predeclaredIdents = map[string]bool{ + "bool": true, + "byte": true, + "complex64": true, + "complex128": true, + "error": true, + "float32": true, + "float64": true, + "int": true, + "int8": true, + "int16": true, + "int32": true, + "int64": true, + "rune": true, + "string": true, + "uint": true, + "uint8": true, + "uint16": true, + "uint32": true, + "uint64": true, + "uintptr": true, + + "true": true, + "false": true, + "iota": true, + + "nil": true, + + "append": true, + "cap": true, + "close": true, + "complex": true, + "copy": true, + "delete": true, + "imag": true, + "len": true, + "make": true, + "new": true, + "panic": true, + "print": true, + "println": true, + "real": true, + "recover": true, +} diff --git a/vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go b/vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go new file mode 100644 index 000000000..67c0e0a00 --- /dev/null +++ b/vendor/github.com/nishanths/predeclared/passes/predeclared/predeclared.go @@ -0,0 +1,202 @@ +// Package predeclared provides a static analysis (used by the predeclared command) +// that can detect declarations in Go code that shadow one of Go's predeclared identifiers. +package predeclared + +import ( + "fmt" + "go/ast" + "go/token" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// Flag names used by the analyzer. They are exported for use by analyzer +// driver programs. +const ( + IgnoreFlag = "ignore" + QualifiedFlag = "q" +) + +var ( + fIgnore string + fQualified bool +) + +func init() { + Analyzer.Flags.StringVar(&fIgnore, IgnoreFlag, "", "comma-separated list of predeclared identifiers to not report on") + Analyzer.Flags.BoolVar(&fQualified, QualifiedFlag, false, "include method names and field names (i.e., qualified names) in checks") +} + +var Analyzer = &analysis.Analyzer{ + Name: "predeclared", + Doc: "find code that shadows one of Go's predeclared identifiers", + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + cfg := newConfig(fIgnore, fQualified) + for _, file := range pass.Files { + processFile(pass.Report, cfg, pass.Fset, file) + } + return nil, nil +} + +type config struct { + qualified bool + ignoredIdents map[string]struct{} +} + +func newConfig(ignore string, qualified bool) *config { + cfg := &config{ + qualified: qualified, + ignoredIdents: map[string]struct{}{}, + } + for _, s := range strings.Split(ignore, ",") { + ident := strings.TrimSpace(s) + if ident == "" { + continue + } + cfg.ignoredIdents[ident] = struct{}{} + } + return cfg +} + +type issue struct { + ident *ast.Ident + kind string + fset *token.FileSet +} + +func (i issue) String() string { + pos := i.fset.Position(i.ident.Pos()) + return fmt.Sprintf("%s: %s %s has same name as predeclared identifier", pos, i.kind, i.ident.Name) +} + +func processFile(report func(analysis.Diagnostic), cfg *config, fset *token.FileSet, file *ast.File) []issue { // nolint: gocyclo + var issues []issue + + maybeReport := func(x *ast.Ident, kind string) { + if _, isIgnored := cfg.ignoredIdents[x.Name]; !isIgnored && isPredeclaredIdent(x.Name) { + report(analysis.Diagnostic{ + Pos: x.Pos(), + End: x.End(), + Message: fmt.Sprintf("%s %s has same name as predeclared identifier", kind, x.Name), + }) + issues = append(issues, issue{x, kind, fset}) + } + } + + seenValueSpecs := make(map[*ast.ValueSpec]bool) + + // TODO: consider deduping package name issues for files in the + // same directory. + maybeReport(file.Name, "package name") + + for _, spec := range file.Imports { + if spec.Name == nil { + continue + } + maybeReport(spec.Name, "import name") + } + + // Handle declarations and fields. + // https://golang.org/ref/spec#Declarations_and_scope + ast.Inspect(file, func(n ast.Node) bool { + switch x := n.(type) { + case *ast.GenDecl: + var kind string + switch x.Tok { + case token.CONST: + kind = "const" + case token.VAR: + kind = "variable" + default: + return true + } + for _, spec := range x.Specs { + if vspec, ok := spec.(*ast.ValueSpec); ok && !seenValueSpecs[vspec] { + seenValueSpecs[vspec] = true + for _, name := range vspec.Names { + maybeReport(name, kind) + } + } + } + return true + case *ast.TypeSpec: + maybeReport(x.Name, "type") + return true + case *ast.StructType: + if cfg.qualified && x.Fields != nil { + for _, field := range x.Fields.List { + for _, name := range field.Names { + maybeReport(name, "field") + } + } + } + return true + case *ast.InterfaceType: + if cfg.qualified && x.Methods != nil { + for _, meth := range x.Methods.List { + for _, name := range meth.Names { + maybeReport(name, "method") + } + } + } + return true + case *ast.FuncDecl: + if x.Recv == nil { + // it's a function + maybeReport(x.Name, "function") + } else { + // it's a method + if cfg.qualified { + maybeReport(x.Name, "method") + } + } + // add receivers idents + if x.Recv != nil { + for _, field := range x.Recv.List { + for _, name := range field.Names { + maybeReport(name, "receiver") + } + } + } + // Params and Results will be checked in the *ast.FuncType case. + return true + case *ast.FuncType: + // add params idents + for _, field := range x.Params.List { + for _, name := range field.Names { + maybeReport(name, "param") + } + } + // add returns idents + if x.Results != nil { + for _, field := range x.Results.List { + for _, name := range field.Names { + maybeReport(name, "named return") + } + } + } + return true + case *ast.LabeledStmt: + maybeReport(x.Label, "label") + return true + case *ast.AssignStmt: + // We only care about short variable declarations, which use token.DEFINE. + if x.Tok == token.DEFINE { + for _, expr := range x.Lhs { + if ident, ok := expr.(*ast.Ident); ok { + maybeReport(ident, "variable") + } + } + } + return true + default: + return true + } + }) + + return issues +} diff --git a/vendor/github.com/olekukonko/tablewriter/.gitignore b/vendor/github.com/olekukonko/tablewriter/.gitignore new file mode 100644 index 000000000..b66cec635 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/.gitignore @@ -0,0 +1,15 @@ +# Created by .ignore support plugin (hsz.mobi) +### Go template +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, build with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + diff --git a/vendor/github.com/olekukonko/tablewriter/.travis.yml b/vendor/github.com/olekukonko/tablewriter/.travis.yml new file mode 100644 index 000000000..366d48a35 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/.travis.yml @@ -0,0 +1,22 @@ +language: go +arch: + - ppc64le + - amd64 +go: + - 1.3 + - 1.4 + - 1.5 + - 1.6 + - 1.7 + - 1.8 + - 1.9 + - "1.10" + - tip +jobs: + exclude : + - arch : ppc64le + go : + - 1.3 + - arch : ppc64le + go : + - 1.4 diff --git a/vendor/github.com/olekukonko/tablewriter/LICENSE.md b/vendor/github.com/olekukonko/tablewriter/LICENSE.md new file mode 100644 index 000000000..a0769b5c1 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/LICENSE.md @@ -0,0 +1,19 @@ +Copyright (C) 2014 by Oleku Konko + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/olekukonko/tablewriter/README.md b/vendor/github.com/olekukonko/tablewriter/README.md new file mode 100644 index 000000000..f06530d75 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/README.md @@ -0,0 +1,431 @@ +ASCII Table Writer +========= + +[![Build Status](https://travis-ci.org/olekukonko/tablewriter.png?branch=master)](https://travis-ci.org/olekukonko/tablewriter) +[![Total views](https://img.shields.io/sourcegraph/rrc/github.com/olekukonko/tablewriter.svg)](https://sourcegraph.com/github.com/olekukonko/tablewriter) +[![Godoc](https://godoc.org/github.com/olekukonko/tablewriter?status.svg)](https://godoc.org/github.com/olekukonko/tablewriter) + +Generate ASCII table on the fly ... Installation is simple as + + go get github.com/olekukonko/tablewriter + + +#### Features +- Automatic Padding +- Support Multiple Lines +- Supports Alignment +- Support Custom Separators +- Automatic Alignment of numbers & percentage +- Write directly to http , file etc via `io.Writer` +- Read directly from CSV file +- Optional row line via `SetRowLine` +- Normalise table header +- Make CSV Headers optional +- Enable or disable table border +- Set custom footer support +- Optional identical cells merging +- Set custom caption +- Optional reflowing of paragraphs in multi-line cells. + +#### Example 1 - Basic +```go +data := [][]string{ + []string{"A", "The Good", "500"}, + []string{"B", "The Very very Bad Man", "288"}, + []string{"C", "The Ugly", "120"}, + []string{"D", "The Gopher", "800"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Name", "Sign", "Rating"}) + +for _, v := range data { + table.Append(v) +} +table.Render() // Send output +``` + +##### Output 1 +``` ++------+-----------------------+--------+ +| NAME | SIGN | RATING | ++------+-----------------------+--------+ +| A | The Good | 500 | +| B | The Very very Bad Man | 288 | +| C | The Ugly | 120 | +| D | The Gopher | 800 | ++------+-----------------------+--------+ +``` + +#### Example 2 - Without Border / Footer / Bulk Append +```go +data := [][]string{ + []string{"1/1/2014", "Domain name", "2233", "$10.98"}, + []string{"1/1/2014", "January Hosting", "2233", "$54.95"}, + []string{"1/4/2014", "February Hosting", "2233", "$51.00"}, + []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) +table.SetFooter([]string{"", "", "Total", "$146.93"}) // Add Footer +table.SetBorder(false) // Set Border to false +table.AppendBulk(data) // Add Bulk Data +table.Render() +``` + +##### Output 2 +``` + + DATE | DESCRIPTION | CV2 | AMOUNT +-----------+--------------------------+-------+---------- + 1/1/2014 | Domain name | 2233 | $10.98 + 1/1/2014 | January Hosting | 2233 | $54.95 + 1/4/2014 | February Hosting | 2233 | $51.00 + 1/4/2014 | February Extra Bandwidth | 2233 | $30.00 +-----------+--------------------------+-------+---------- + TOTAL | $146 93 + --------+---------- + +``` + + +#### Example 3 - CSV +```go +table, _ := tablewriter.NewCSV(os.Stdout, "testdata/test_info.csv", true) +table.SetAlignment(tablewriter.ALIGN_LEFT) // Set Alignment +table.Render() +``` + +##### Output 3 +``` ++----------+--------------+------+-----+---------+----------------+ +| FIELD | TYPE | NULL | KEY | DEFAULT | EXTRA | ++----------+--------------+------+-----+---------+----------------+ +| user_id | smallint(5) | NO | PRI | NULL | auto_increment | +| username | varchar(10) | NO | | NULL | | +| password | varchar(100) | NO | | NULL | | ++----------+--------------+------+-----+---------+----------------+ +``` + +#### Example 4 - Custom Separator +```go +table, _ := tablewriter.NewCSV(os.Stdout, "testdata/test.csv", true) +table.SetRowLine(true) // Enable row line + +// Change table lines +table.SetCenterSeparator("*") +table.SetColumnSeparator("╪") +table.SetRowSeparator("-") + +table.SetAlignment(tablewriter.ALIGN_LEFT) +table.Render() +``` + +##### Output 4 +``` +*------------*-----------*---------* +╪ FIRST NAME ╪ LAST NAME ╪ SSN ╪ +*------------*-----------*---------* +╪ John ╪ Barry ╪ 123456 ╪ +*------------*-----------*---------* +╪ Kathy ╪ Smith ╪ 687987 ╪ +*------------*-----------*---------* +╪ Bob ╪ McCornick ╪ 3979870 ╪ +*------------*-----------*---------* +``` + +#### Example 5 - Markdown Format +```go +data := [][]string{ + []string{"1/1/2014", "Domain name", "2233", "$10.98"}, + []string{"1/1/2014", "January Hosting", "2233", "$54.95"}, + []string{"1/4/2014", "February Hosting", "2233", "$51.00"}, + []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) +table.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false}) +table.SetCenterSeparator("|") +table.AppendBulk(data) // Add Bulk Data +table.Render() +``` + +##### Output 5 +``` +| DATE | DESCRIPTION | CV2 | AMOUNT | +|----------|--------------------------|------|--------| +| 1/1/2014 | Domain name | 2233 | $10.98 | +| 1/1/2014 | January Hosting | 2233 | $54.95 | +| 1/4/2014 | February Hosting | 2233 | $51.00 | +| 1/4/2014 | February Extra Bandwidth | 2233 | $30.00 | +``` + +#### Example 6 - Identical cells merging +```go +data := [][]string{ + []string{"1/1/2014", "Domain name", "1234", "$10.98"}, + []string{"1/1/2014", "January Hosting", "2345", "$54.95"}, + []string{"1/4/2014", "February Hosting", "3456", "$51.00"}, + []string{"1/4/2014", "February Extra Bandwidth", "4567", "$30.00"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) +table.SetFooter([]string{"", "", "Total", "$146.93"}) +table.SetAutoMergeCells(true) +table.SetRowLine(true) +table.AppendBulk(data) +table.Render() +``` + +##### Output 6 +``` ++----------+--------------------------+-------+---------+ +| DATE | DESCRIPTION | CV2 | AMOUNT | ++----------+--------------------------+-------+---------+ +| 1/1/2014 | Domain name | 1234 | $10.98 | ++ +--------------------------+-------+---------+ +| | January Hosting | 2345 | $54.95 | ++----------+--------------------------+-------+---------+ +| 1/4/2014 | February Hosting | 3456 | $51.00 | ++ +--------------------------+-------+---------+ +| | February Extra Bandwidth | 4567 | $30.00 | ++----------+--------------------------+-------+---------+ +| TOTAL | $146 93 | ++----------+--------------------------+-------+---------+ +``` + +#### Example 7 - Identical cells merging (specify the column index to merge) +```go +data := [][]string{ + []string{"1/1/2014", "Domain name", "1234", "$10.98"}, + []string{"1/1/2014", "January Hosting", "1234", "$10.98"}, + []string{"1/4/2014", "February Hosting", "3456", "$51.00"}, + []string{"1/4/2014", "February Extra Bandwidth", "4567", "$30.00"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) +table.SetFooter([]string{"", "", "Total", "$146.93"}) +table.SetAutoMergeCellsByColumnIndex([]int{2, 3}) +table.SetRowLine(true) +table.AppendBulk(data) +table.Render() +``` + +##### Output 7 +``` ++----------+--------------------------+-------+---------+ +| DATE | DESCRIPTION | CV2 | AMOUNT | ++----------+--------------------------+-------+---------+ +| 1/1/2014 | Domain name | 1234 | $10.98 | ++----------+--------------------------+ + + +| 1/1/2014 | January Hosting | | | ++----------+--------------------------+-------+---------+ +| 1/4/2014 | February Hosting | 3456 | $51.00 | ++----------+--------------------------+-------+---------+ +| 1/4/2014 | February Extra Bandwidth | 4567 | $30.00 | ++----------+--------------------------+-------+---------+ +| TOTAL | $146.93 | ++----------+--------------------------+-------+---------+ +``` + + +#### Table with color +```go +data := [][]string{ + []string{"1/1/2014", "Domain name", "2233", "$10.98"}, + []string{"1/1/2014", "January Hosting", "2233", "$54.95"}, + []string{"1/4/2014", "February Hosting", "2233", "$51.00"}, + []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Date", "Description", "CV2", "Amount"}) +table.SetFooter([]string{"", "", "Total", "$146.93"}) // Add Footer +table.SetBorder(false) // Set Border to false + +table.SetHeaderColor(tablewriter.Colors{tablewriter.Bold, tablewriter.BgGreenColor}, + tablewriter.Colors{tablewriter.FgHiRedColor, tablewriter.Bold, tablewriter.BgBlackColor}, + tablewriter.Colors{tablewriter.BgRedColor, tablewriter.FgWhiteColor}, + tablewriter.Colors{tablewriter.BgCyanColor, tablewriter.FgWhiteColor}) + +table.SetColumnColor(tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor}, + tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiRedColor}, + tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor}, + tablewriter.Colors{tablewriter.Bold, tablewriter.FgBlackColor}) + +table.SetFooterColor(tablewriter.Colors{}, tablewriter.Colors{}, + tablewriter.Colors{tablewriter.Bold}, + tablewriter.Colors{tablewriter.FgHiRedColor}) + +table.AppendBulk(data) +table.Render() +``` + +#### Table with color Output +![Table with Color](https://cloud.githubusercontent.com/assets/6460392/21101956/bbc7b356-c0a1-11e6-9f36-dba694746efc.png) + +#### Example - 8 Table Cells with Color + +Individual Cell Colors from `func Rich` take precedence over Column Colors + +```go +data := [][]string{ + []string{"Test1Merge", "HelloCol2 - 1", "HelloCol3 - 1", "HelloCol4 - 1"}, + []string{"Test1Merge", "HelloCol2 - 2", "HelloCol3 - 2", "HelloCol4 - 2"}, + []string{"Test1Merge", "HelloCol2 - 3", "HelloCol3 - 3", "HelloCol4 - 3"}, + []string{"Test2Merge", "HelloCol2 - 4", "HelloCol3 - 4", "HelloCol4 - 4"}, + []string{"Test2Merge", "HelloCol2 - 5", "HelloCol3 - 5", "HelloCol4 - 5"}, + []string{"Test2Merge", "HelloCol2 - 6", "HelloCol3 - 6", "HelloCol4 - 6"}, + []string{"Test2Merge", "HelloCol2 - 7", "HelloCol3 - 7", "HelloCol4 - 7"}, + []string{"Test3Merge", "HelloCol2 - 8", "HelloCol3 - 8", "HelloCol4 - 8"}, + []string{"Test3Merge", "HelloCol2 - 9", "HelloCol3 - 9", "HelloCol4 - 9"}, + []string{"Test3Merge", "HelloCol2 - 10", "HelloCol3 -10", "HelloCol4 - 10"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Col1", "Col2", "Col3", "Col4"}) +table.SetFooter([]string{"", "", "Footer3", "Footer4"}) +table.SetBorder(false) + +table.SetHeaderColor(tablewriter.Colors{tablewriter.Bold, tablewriter.BgGreenColor}, + tablewriter.Colors{tablewriter.FgHiRedColor, tablewriter.Bold, tablewriter.BgBlackColor}, + tablewriter.Colors{tablewriter.BgRedColor, tablewriter.FgWhiteColor}, + tablewriter.Colors{tablewriter.BgCyanColor, tablewriter.FgWhiteColor}) + +table.SetColumnColor(tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor}, + tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiRedColor}, + tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor}, + tablewriter.Colors{tablewriter.Bold, tablewriter.FgBlackColor}) + +table.SetFooterColor(tablewriter.Colors{}, tablewriter.Colors{}, + tablewriter.Colors{tablewriter.Bold}, + tablewriter.Colors{tablewriter.FgHiRedColor}) + +colorData1 := []string{"TestCOLOR1Merge", "HelloCol2 - COLOR1", "HelloCol3 - COLOR1", "HelloCol4 - COLOR1"} +colorData2 := []string{"TestCOLOR2Merge", "HelloCol2 - COLOR2", "HelloCol3 - COLOR2", "HelloCol4 - COLOR2"} + +for i, row := range data { + if i == 4 { + table.Rich(colorData1, []tablewriter.Colors{tablewriter.Colors{}, tablewriter.Colors{tablewriter.Normal, tablewriter.FgCyanColor}, tablewriter.Colors{tablewriter.Bold, tablewriter.FgWhiteColor}, tablewriter.Colors{}}) + table.Rich(colorData2, []tablewriter.Colors{tablewriter.Colors{tablewriter.Normal, tablewriter.FgMagentaColor}, tablewriter.Colors{}, tablewriter.Colors{tablewriter.Bold, tablewriter.BgRedColor}, tablewriter.Colors{tablewriter.FgHiGreenColor, tablewriter.Italic, tablewriter.BgHiCyanColor}}) + } + table.Append(row) +} + +table.SetAutoMergeCells(true) +table.Render() + +``` + +##### Table cells with color Output +![Table cells with Color](https://user-images.githubusercontent.com/9064687/63969376-bcd88d80-ca6f-11e9-9466-c3d954700b25.png) + +#### Example 9 - Set table caption +```go +data := [][]string{ + []string{"A", "The Good", "500"}, + []string{"B", "The Very very Bad Man", "288"}, + []string{"C", "The Ugly", "120"}, + []string{"D", "The Gopher", "800"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Name", "Sign", "Rating"}) +table.SetCaption(true, "Movie ratings.") + +for _, v := range data { + table.Append(v) +} +table.Render() // Send output +``` + +Note: Caption text will wrap with total width of rendered table. + +##### Output 9 +``` ++------+-----------------------+--------+ +| NAME | SIGN | RATING | ++------+-----------------------+--------+ +| A | The Good | 500 | +| B | The Very very Bad Man | 288 | +| C | The Ugly | 120 | +| D | The Gopher | 800 | ++------+-----------------------+--------+ +Movie ratings. +``` + +#### Example 10 - Set NoWhiteSpace and TablePadding option +```go +data := [][]string{ + {"node1.example.com", "Ready", "compute", "1.11"}, + {"node2.example.com", "Ready", "compute", "1.11"}, + {"node3.example.com", "Ready", "compute", "1.11"}, + {"node4.example.com", "NotReady", "compute", "1.11"}, +} + +table := tablewriter.NewWriter(os.Stdout) +table.SetHeader([]string{"Name", "Status", "Role", "Version"}) +table.SetAutoWrapText(false) +table.SetAutoFormatHeaders(true) +table.SetHeaderAlignment(ALIGN_LEFT) +table.SetAlignment(ALIGN_LEFT) +table.SetCenterSeparator("") +table.SetColumnSeparator("") +table.SetRowSeparator("") +table.SetHeaderLine(false) +table.SetBorder(false) +table.SetTablePadding("\t") // pad with tabs +table.SetNoWhiteSpace(true) +table.AppendBulk(data) // Add Bulk Data +table.Render() +``` + +##### Output 10 +``` +NAME STATUS ROLE VERSION +node1.example.com Ready compute 1.11 +node2.example.com Ready compute 1.11 +node3.example.com Ready compute 1.11 +node4.example.com NotReady compute 1.11 +``` + +#### Render table into a string + +Instead of rendering the table to `io.Stdout` you can also render it into a string. Go 1.10 introduced the `strings.Builder` type which implements the `io.Writer` interface and can therefore be used for this task. Example: + +```go +package main + +import ( + "strings" + "fmt" + + "github.com/olekukonko/tablewriter" +) + +func main() { + tableString := &strings.Builder{} + table := tablewriter.NewWriter(tableString) + + /* + * Code to fill the table + */ + + table.Render() + + fmt.Println(tableString.String()) +} +``` + +#### TODO +- ~~Import Directly from CSV~~ - `done` +- ~~Support for `SetFooter`~~ - `done` +- ~~Support for `SetBorder`~~ - `done` +- ~~Support table with uneven rows~~ - `done` +- ~~Support custom alignment~~ +- General Improvement & Optimisation +- `NewHTML` Parse table from HTML diff --git a/vendor/github.com/olekukonko/tablewriter/csv.go b/vendor/github.com/olekukonko/tablewriter/csv.go new file mode 100644 index 000000000..98878303b --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/csv.go @@ -0,0 +1,52 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +package tablewriter + +import ( + "encoding/csv" + "io" + "os" +) + +// Start A new table by importing from a CSV file +// Takes io.Writer and csv File name +func NewCSV(writer io.Writer, fileName string, hasHeader bool) (*Table, error) { + file, err := os.Open(fileName) + if err != nil { + return &Table{}, err + } + defer file.Close() + csvReader := csv.NewReader(file) + t, err := NewCSVReader(writer, csvReader, hasHeader) + return t, err +} + +// Start a New Table Writer with csv.Reader +// This enables customisation such as reader.Comma = ';' +// See http://golang.org/src/pkg/encoding/csv/reader.go?s=3213:3671#L94 +func NewCSVReader(writer io.Writer, csvReader *csv.Reader, hasHeader bool) (*Table, error) { + t := NewWriter(writer) + if hasHeader { + // Read the first row + headers, err := csvReader.Read() + if err != nil { + return &Table{}, err + } + t.SetHeader(headers) + } + for { + record, err := csvReader.Read() + if err == io.EOF { + break + } else if err != nil { + return &Table{}, err + } + t.Append(record) + } + return t, nil +} diff --git a/vendor/github.com/olekukonko/tablewriter/go.mod b/vendor/github.com/olekukonko/tablewriter/go.mod new file mode 100644 index 000000000..484ab01f1 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/go.mod @@ -0,0 +1,5 @@ +module github.com/olekukonko/tablewriter + +go 1.12 + +require github.com/mattn/go-runewidth v0.0.9 diff --git a/vendor/github.com/olekukonko/tablewriter/go.sum b/vendor/github.com/olekukonko/tablewriter/go.sum new file mode 100644 index 000000000..4a94bf58b --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/go.sum @@ -0,0 +1,2 @@ +github.com/mattn/go-runewidth v0.0.9 h1:Lm995f3rfxdpd6TSmuVCHVb/QhupuXlYr8sCI/QdE+0= +github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= diff --git a/vendor/github.com/olekukonko/tablewriter/table.go b/vendor/github.com/olekukonko/tablewriter/table.go new file mode 100644 index 000000000..f913149c6 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/table.go @@ -0,0 +1,967 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +// Create & Generate text based table +package tablewriter + +import ( + "bytes" + "fmt" + "io" + "regexp" + "strings" +) + +const ( + MAX_ROW_WIDTH = 30 +) + +const ( + CENTER = "+" + ROW = "-" + COLUMN = "|" + SPACE = " " + NEWLINE = "\n" +) + +const ( + ALIGN_DEFAULT = iota + ALIGN_CENTER + ALIGN_RIGHT + ALIGN_LEFT +) + +var ( + decimal = regexp.MustCompile(`^-?(?:\d{1,3}(?:,\d{3})*|\d+)(?:\.\d+)?$`) + percent = regexp.MustCompile(`^-?\d+\.?\d*$%$`) +) + +type Border struct { + Left bool + Right bool + Top bool + Bottom bool +} + +type Table struct { + out io.Writer + rows [][]string + lines [][][]string + cs map[int]int + rs map[int]int + headers [][]string + footers [][]string + caption bool + captionText string + autoFmt bool + autoWrap bool + reflowText bool + mW int + pCenter string + pRow string + pColumn string + tColumn int + tRow int + hAlign int + fAlign int + align int + newLine string + rowLine bool + autoMergeCells bool + columnsToAutoMergeCells map[int]bool + noWhiteSpace bool + tablePadding string + hdrLine bool + borders Border + colSize int + headerParams []string + columnsParams []string + footerParams []string + columnsAlign []int +} + +// Start New Table +// Take io.Writer Directly +func NewWriter(writer io.Writer) *Table { + t := &Table{ + out: writer, + rows: [][]string{}, + lines: [][][]string{}, + cs: make(map[int]int), + rs: make(map[int]int), + headers: [][]string{}, + footers: [][]string{}, + caption: false, + captionText: "Table caption.", + autoFmt: true, + autoWrap: true, + reflowText: true, + mW: MAX_ROW_WIDTH, + pCenter: CENTER, + pRow: ROW, + pColumn: COLUMN, + tColumn: -1, + tRow: -1, + hAlign: ALIGN_DEFAULT, + fAlign: ALIGN_DEFAULT, + align: ALIGN_DEFAULT, + newLine: NEWLINE, + rowLine: false, + hdrLine: true, + borders: Border{Left: true, Right: true, Bottom: true, Top: true}, + colSize: -1, + headerParams: []string{}, + columnsParams: []string{}, + footerParams: []string{}, + columnsAlign: []int{}} + return t +} + +// Render table output +func (t *Table) Render() { + if t.borders.Top { + t.printLine(true) + } + t.printHeading() + if t.autoMergeCells { + t.printRowsMergeCells() + } else { + t.printRows() + } + if !t.rowLine && t.borders.Bottom { + t.printLine(true) + } + t.printFooter() + + if t.caption { + t.printCaption() + } +} + +const ( + headerRowIdx = -1 + footerRowIdx = -2 +) + +// Set table header +func (t *Table) SetHeader(keys []string) { + t.colSize = len(keys) + for i, v := range keys { + lines := t.parseDimension(v, i, headerRowIdx) + t.headers = append(t.headers, lines) + } +} + +// Set table Footer +func (t *Table) SetFooter(keys []string) { + //t.colSize = len(keys) + for i, v := range keys { + lines := t.parseDimension(v, i, footerRowIdx) + t.footers = append(t.footers, lines) + } +} + +// Set table Caption +func (t *Table) SetCaption(caption bool, captionText ...string) { + t.caption = caption + if len(captionText) == 1 { + t.captionText = captionText[0] + } +} + +// Turn header autoformatting on/off. Default is on (true). +func (t *Table) SetAutoFormatHeaders(auto bool) { + t.autoFmt = auto +} + +// Turn automatic multiline text adjustment on/off. Default is on (true). +func (t *Table) SetAutoWrapText(auto bool) { + t.autoWrap = auto +} + +// Turn automatic reflowing of multiline text when rewrapping. Default is on (true). +func (t *Table) SetReflowDuringAutoWrap(auto bool) { + t.reflowText = auto +} + +// Set the Default column width +func (t *Table) SetColWidth(width int) { + t.mW = width +} + +// Set the minimal width for a column +func (t *Table) SetColMinWidth(column int, width int) { + t.cs[column] = width +} + +// Set the Column Separator +func (t *Table) SetColumnSeparator(sep string) { + t.pColumn = sep +} + +// Set the Row Separator +func (t *Table) SetRowSeparator(sep string) { + t.pRow = sep +} + +// Set the center Separator +func (t *Table) SetCenterSeparator(sep string) { + t.pCenter = sep +} + +// Set Header Alignment +func (t *Table) SetHeaderAlignment(hAlign int) { + t.hAlign = hAlign +} + +// Set Footer Alignment +func (t *Table) SetFooterAlignment(fAlign int) { + t.fAlign = fAlign +} + +// Set Table Alignment +func (t *Table) SetAlignment(align int) { + t.align = align +} + +// Set No White Space +func (t *Table) SetNoWhiteSpace(allow bool) { + t.noWhiteSpace = allow +} + +// Set Table Padding +func (t *Table) SetTablePadding(padding string) { + t.tablePadding = padding +} + +func (t *Table) SetColumnAlignment(keys []int) { + for _, v := range keys { + switch v { + case ALIGN_CENTER: + break + case ALIGN_LEFT: + break + case ALIGN_RIGHT: + break + default: + v = ALIGN_DEFAULT + } + t.columnsAlign = append(t.columnsAlign, v) + } +} + +// Set New Line +func (t *Table) SetNewLine(nl string) { + t.newLine = nl +} + +// Set Header Line +// This would enable / disable a line after the header +func (t *Table) SetHeaderLine(line bool) { + t.hdrLine = line +} + +// Set Row Line +// This would enable / disable a line on each row of the table +func (t *Table) SetRowLine(line bool) { + t.rowLine = line +} + +// Set Auto Merge Cells +// This would enable / disable the merge of cells with identical values +func (t *Table) SetAutoMergeCells(auto bool) { + t.autoMergeCells = auto +} + +// Set Auto Merge Cells By Column Index +// This would enable / disable the merge of cells with identical values for specific columns +// If cols is empty, it is the same as `SetAutoMergeCells(true)`. +func (t *Table) SetAutoMergeCellsByColumnIndex(cols []int) { + t.autoMergeCells = true + + if len(cols) > 0 { + m := make(map[int]bool) + for _, col := range cols { + m[col] = true + } + t.columnsToAutoMergeCells = m + } +} + +// Set Table Border +// This would enable / disable line around the table +func (t *Table) SetBorder(border bool) { + t.SetBorders(Border{border, border, border, border}) +} + +func (t *Table) SetBorders(border Border) { + t.borders = border +} + +// Append row to table +func (t *Table) Append(row []string) { + rowSize := len(t.headers) + if rowSize > t.colSize { + t.colSize = rowSize + } + + n := len(t.lines) + line := [][]string{} + for i, v := range row { + + // Detect string width + // Detect String height + // Break strings into words + out := t.parseDimension(v, i, n) + + // Append broken words + line = append(line, out) + } + t.lines = append(t.lines, line) +} + +// Append row to table with color attributes +func (t *Table) Rich(row []string, colors []Colors) { + rowSize := len(t.headers) + if rowSize > t.colSize { + t.colSize = rowSize + } + + n := len(t.lines) + line := [][]string{} + for i, v := range row { + + // Detect string width + // Detect String height + // Break strings into words + out := t.parseDimension(v, i, n) + + if len(colors) > i { + color := colors[i] + out[0] = format(out[0], color) + } + + // Append broken words + line = append(line, out) + } + t.lines = append(t.lines, line) +} + +// Allow Support for Bulk Append +// Eliminates repeated for loops +func (t *Table) AppendBulk(rows [][]string) { + for _, row := range rows { + t.Append(row) + } +} + +// NumLines to get the number of lines +func (t *Table) NumLines() int { + return len(t.lines) +} + +// Clear rows +func (t *Table) ClearRows() { + t.lines = [][][]string{} +} + +// Clear footer +func (t *Table) ClearFooter() { + t.footers = [][]string{} +} + +// Center based on position and border. +func (t *Table) center(i int) string { + if i == -1 && !t.borders.Left { + return t.pRow + } + + if i == len(t.cs)-1 && !t.borders.Right { + return t.pRow + } + + return t.pCenter +} + +// Print line based on row width +func (t *Table) printLine(nl bool) { + fmt.Fprint(t.out, t.center(-1)) + for i := 0; i < len(t.cs); i++ { + v := t.cs[i] + fmt.Fprintf(t.out, "%s%s%s%s", + t.pRow, + strings.Repeat(string(t.pRow), v), + t.pRow, + t.center(i)) + } + if nl { + fmt.Fprint(t.out, t.newLine) + } +} + +// Print line based on row width with our without cell separator +func (t *Table) printLineOptionalCellSeparators(nl bool, displayCellSeparator []bool) { + fmt.Fprint(t.out, t.pCenter) + for i := 0; i < len(t.cs); i++ { + v := t.cs[i] + if i > len(displayCellSeparator) || displayCellSeparator[i] { + // Display the cell separator + fmt.Fprintf(t.out, "%s%s%s%s", + t.pRow, + strings.Repeat(string(t.pRow), v), + t.pRow, + t.pCenter) + } else { + // Don't display the cell separator for this cell + fmt.Fprintf(t.out, "%s%s", + strings.Repeat(" ", v+2), + t.pCenter) + } + } + if nl { + fmt.Fprint(t.out, t.newLine) + } +} + +// Return the PadRight function if align is left, PadLeft if align is right, +// and Pad by default +func pad(align int) func(string, string, int) string { + padFunc := Pad + switch align { + case ALIGN_LEFT: + padFunc = PadRight + case ALIGN_RIGHT: + padFunc = PadLeft + } + return padFunc +} + +// Print heading information +func (t *Table) printHeading() { + // Check if headers is available + if len(t.headers) < 1 { + return + } + + // Identify last column + end := len(t.cs) - 1 + + // Get pad function + padFunc := pad(t.hAlign) + + // Checking for ANSI escape sequences for header + is_esc_seq := false + if len(t.headerParams) > 0 { + is_esc_seq = true + } + + // Maximum height. + max := t.rs[headerRowIdx] + + // Print Heading + for x := 0; x < max; x++ { + // Check if border is set + // Replace with space if not set + if !t.noWhiteSpace { + fmt.Fprint(t.out, ConditionString(t.borders.Left, t.pColumn, SPACE)) + } + + for y := 0; y <= end; y++ { + v := t.cs[y] + h := "" + + if y < len(t.headers) && x < len(t.headers[y]) { + h = t.headers[y][x] + } + if t.autoFmt { + h = Title(h) + } + pad := ConditionString((y == end && !t.borders.Left), SPACE, t.pColumn) + if t.noWhiteSpace { + pad = ConditionString((y == end && !t.borders.Left), SPACE, t.tablePadding) + } + if is_esc_seq { + if !t.noWhiteSpace { + fmt.Fprintf(t.out, " %s %s", + format(padFunc(h, SPACE, v), + t.headerParams[y]), pad) + } else { + fmt.Fprintf(t.out, "%s %s", + format(padFunc(h, SPACE, v), + t.headerParams[y]), pad) + } + } else { + if !t.noWhiteSpace { + fmt.Fprintf(t.out, " %s %s", + padFunc(h, SPACE, v), + pad) + } else { + // the spaces between breaks the kube formatting + fmt.Fprintf(t.out, "%s%s", + padFunc(h, SPACE, v), + pad) + } + } + } + // Next line + fmt.Fprint(t.out, t.newLine) + } + if t.hdrLine { + t.printLine(true) + } +} + +// Print heading information +func (t *Table) printFooter() { + // Check if headers is available + if len(t.footers) < 1 { + return + } + + // Only print line if border is not set + if !t.borders.Bottom { + t.printLine(true) + } + + // Identify last column + end := len(t.cs) - 1 + + // Get pad function + padFunc := pad(t.fAlign) + + // Checking for ANSI escape sequences for header + is_esc_seq := false + if len(t.footerParams) > 0 { + is_esc_seq = true + } + + // Maximum height. + max := t.rs[footerRowIdx] + + // Print Footer + erasePad := make([]bool, len(t.footers)) + for x := 0; x < max; x++ { + // Check if border is set + // Replace with space if not set + fmt.Fprint(t.out, ConditionString(t.borders.Bottom, t.pColumn, SPACE)) + + for y := 0; y <= end; y++ { + v := t.cs[y] + f := "" + if y < len(t.footers) && x < len(t.footers[y]) { + f = t.footers[y][x] + } + if t.autoFmt { + f = Title(f) + } + pad := ConditionString((y == end && !t.borders.Top), SPACE, t.pColumn) + + if erasePad[y] || (x == 0 && len(f) == 0) { + pad = SPACE + erasePad[y] = true + } + + if is_esc_seq { + fmt.Fprintf(t.out, " %s %s", + format(padFunc(f, SPACE, v), + t.footerParams[y]), pad) + } else { + fmt.Fprintf(t.out, " %s %s", + padFunc(f, SPACE, v), + pad) + } + + //fmt.Fprintf(t.out, " %s %s", + // padFunc(f, SPACE, v), + // pad) + } + // Next line + fmt.Fprint(t.out, t.newLine) + //t.printLine(true) + } + + hasPrinted := false + + for i := 0; i <= end; i++ { + v := t.cs[i] + pad := t.pRow + center := t.pCenter + length := len(t.footers[i][0]) + + if length > 0 { + hasPrinted = true + } + + // Set center to be space if length is 0 + if length == 0 && !t.borders.Right { + center = SPACE + } + + // Print first junction + if i == 0 { + if length > 0 && !t.borders.Left { + center = t.pRow + } + fmt.Fprint(t.out, center) + } + + // Pad With space of length is 0 + if length == 0 { + pad = SPACE + } + // Ignore left space as it has printed before + if hasPrinted || t.borders.Left { + pad = t.pRow + center = t.pCenter + } + + // Change Center end position + if center != SPACE { + if i == end && !t.borders.Right { + center = t.pRow + } + } + + // Change Center start position + if center == SPACE { + if i < end && len(t.footers[i+1][0]) != 0 { + if !t.borders.Left { + center = t.pRow + } else { + center = t.pCenter + } + } + } + + // Print the footer + fmt.Fprintf(t.out, "%s%s%s%s", + pad, + strings.Repeat(string(pad), v), + pad, + center) + + } + + fmt.Fprint(t.out, t.newLine) +} + +// Print caption text +func (t Table) printCaption() { + width := t.getTableWidth() + paragraph, _ := WrapString(t.captionText, width) + for linecount := 0; linecount < len(paragraph); linecount++ { + fmt.Fprintln(t.out, paragraph[linecount]) + } +} + +// Calculate the total number of characters in a row +func (t Table) getTableWidth() int { + var chars int + for _, v := range t.cs { + chars += v + } + + // Add chars, spaces, seperators to calculate the total width of the table. + // ncols := t.colSize + // spaces := ncols * 2 + // seps := ncols + 1 + + return (chars + (3 * t.colSize) + 2) +} + +func (t Table) printRows() { + for i, lines := range t.lines { + t.printRow(lines, i) + } +} + +func (t *Table) fillAlignment(num int) { + if len(t.columnsAlign) < num { + t.columnsAlign = make([]int, num) + for i := range t.columnsAlign { + t.columnsAlign[i] = t.align + } + } +} + +// Print Row Information +// Adjust column alignment based on type + +func (t *Table) printRow(columns [][]string, rowIdx int) { + // Get Maximum Height + max := t.rs[rowIdx] + total := len(columns) + + // TODO Fix uneven col size + // if total < t.colSize { + // for n := t.colSize - total; n < t.colSize ; n++ { + // columns = append(columns, []string{SPACE}) + // t.cs[n] = t.mW + // } + //} + + // Pad Each Height + pads := []int{} + + // Checking for ANSI escape sequences for columns + is_esc_seq := false + if len(t.columnsParams) > 0 { + is_esc_seq = true + } + t.fillAlignment(total) + + for i, line := range columns { + length := len(line) + pad := max - length + pads = append(pads, pad) + for n := 0; n < pad; n++ { + columns[i] = append(columns[i], " ") + } + } + //fmt.Println(max, "\n") + for x := 0; x < max; x++ { + for y := 0; y < total; y++ { + + // Check if border is set + if !t.noWhiteSpace { + fmt.Fprint(t.out, ConditionString((!t.borders.Left && y == 0), SPACE, t.pColumn)) + fmt.Fprintf(t.out, SPACE) + } + + str := columns[y][x] + + // Embedding escape sequence with column value + if is_esc_seq { + str = format(str, t.columnsParams[y]) + } + + // This would print alignment + // Default alignment would use multiple configuration + switch t.columnsAlign[y] { + case ALIGN_CENTER: // + fmt.Fprintf(t.out, "%s", Pad(str, SPACE, t.cs[y])) + case ALIGN_RIGHT: + fmt.Fprintf(t.out, "%s", PadLeft(str, SPACE, t.cs[y])) + case ALIGN_LEFT: + fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y])) + default: + if decimal.MatchString(strings.TrimSpace(str)) || percent.MatchString(strings.TrimSpace(str)) { + fmt.Fprintf(t.out, "%s", PadLeft(str, SPACE, t.cs[y])) + } else { + fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y])) + + // TODO Custom alignment per column + //if max == 1 || pads[y] > 0 { + // fmt.Fprintf(t.out, "%s", Pad(str, SPACE, t.cs[y])) + //} else { + // fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y])) + //} + + } + } + if !t.noWhiteSpace { + fmt.Fprintf(t.out, SPACE) + } else { + fmt.Fprintf(t.out, t.tablePadding) + } + } + // Check if border is set + // Replace with space if not set + if !t.noWhiteSpace { + fmt.Fprint(t.out, ConditionString(t.borders.Left, t.pColumn, SPACE)) + } + fmt.Fprint(t.out, t.newLine) + } + + if t.rowLine { + t.printLine(true) + } +} + +// Print the rows of the table and merge the cells that are identical +func (t *Table) printRowsMergeCells() { + var previousLine []string + var displayCellBorder []bool + var tmpWriter bytes.Buffer + for i, lines := range t.lines { + // We store the display of the current line in a tmp writer, as we need to know which border needs to be print above + previousLine, displayCellBorder = t.printRowMergeCells(&tmpWriter, lines, i, previousLine) + if i > 0 { //We don't need to print borders above first line + if t.rowLine { + t.printLineOptionalCellSeparators(true, displayCellBorder) + } + } + tmpWriter.WriteTo(t.out) + } + //Print the end of the table + if t.rowLine { + t.printLine(true) + } +} + +// Print Row Information to a writer and merge identical cells. +// Adjust column alignment based on type + +func (t *Table) printRowMergeCells(writer io.Writer, columns [][]string, rowIdx int, previousLine []string) ([]string, []bool) { + // Get Maximum Height + max := t.rs[rowIdx] + total := len(columns) + + // Pad Each Height + pads := []int{} + + // Checking for ANSI escape sequences for columns + is_esc_seq := false + if len(t.columnsParams) > 0 { + is_esc_seq = true + } + for i, line := range columns { + length := len(line) + pad := max - length + pads = append(pads, pad) + for n := 0; n < pad; n++ { + columns[i] = append(columns[i], " ") + } + } + + var displayCellBorder []bool + t.fillAlignment(total) + for x := 0; x < max; x++ { + for y := 0; y < total; y++ { + + // Check if border is set + fmt.Fprint(writer, ConditionString((!t.borders.Left && y == 0), SPACE, t.pColumn)) + + fmt.Fprintf(writer, SPACE) + + str := columns[y][x] + + // Embedding escape sequence with column value + if is_esc_seq { + str = format(str, t.columnsParams[y]) + } + + if t.autoMergeCells { + var mergeCell bool + if t.columnsToAutoMergeCells != nil { + // Check to see if the column index is in columnsToAutoMergeCells. + if t.columnsToAutoMergeCells[y] { + mergeCell = true + } + } else { + // columnsToAutoMergeCells was not set. + mergeCell = true + } + //Store the full line to merge mutli-lines cells + fullLine := strings.TrimRight(strings.Join(columns[y], " "), " ") + if len(previousLine) > y && fullLine == previousLine[y] && fullLine != "" && mergeCell { + // If this cell is identical to the one above but not empty, we don't display the border and keep the cell empty. + displayCellBorder = append(displayCellBorder, false) + str = "" + } else { + // First line or different content, keep the content and print the cell border + displayCellBorder = append(displayCellBorder, true) + } + } + + // This would print alignment + // Default alignment would use multiple configuration + switch t.columnsAlign[y] { + case ALIGN_CENTER: // + fmt.Fprintf(writer, "%s", Pad(str, SPACE, t.cs[y])) + case ALIGN_RIGHT: + fmt.Fprintf(writer, "%s", PadLeft(str, SPACE, t.cs[y])) + case ALIGN_LEFT: + fmt.Fprintf(writer, "%s", PadRight(str, SPACE, t.cs[y])) + default: + if decimal.MatchString(strings.TrimSpace(str)) || percent.MatchString(strings.TrimSpace(str)) { + fmt.Fprintf(writer, "%s", PadLeft(str, SPACE, t.cs[y])) + } else { + fmt.Fprintf(writer, "%s", PadRight(str, SPACE, t.cs[y])) + } + } + fmt.Fprintf(writer, SPACE) + } + // Check if border is set + // Replace with space if not set + fmt.Fprint(writer, ConditionString(t.borders.Left, t.pColumn, SPACE)) + fmt.Fprint(writer, t.newLine) + } + + //The new previous line is the current one + previousLine = make([]string, total) + for y := 0; y < total; y++ { + previousLine[y] = strings.TrimRight(strings.Join(columns[y], " "), " ") //Store the full line for multi-lines cells + } + //Returns the newly added line and wether or not a border should be displayed above. + return previousLine, displayCellBorder +} + +func (t *Table) parseDimension(str string, colKey, rowKey int) []string { + var ( + raw []string + maxWidth int + ) + + raw = getLines(str) + maxWidth = 0 + for _, line := range raw { + if w := DisplayWidth(line); w > maxWidth { + maxWidth = w + } + } + + // If wrapping, ensure that all paragraphs in the cell fit in the + // specified width. + if t.autoWrap { + // If there's a maximum allowed width for wrapping, use that. + if maxWidth > t.mW { + maxWidth = t.mW + } + + // In the process of doing so, we need to recompute maxWidth. This + // is because perhaps a word in the cell is longer than the + // allowed maximum width in t.mW. + newMaxWidth := maxWidth + newRaw := make([]string, 0, len(raw)) + + if t.reflowText { + // Make a single paragraph of everything. + raw = []string{strings.Join(raw, " ")} + } + for i, para := range raw { + paraLines, _ := WrapString(para, maxWidth) + for _, line := range paraLines { + if w := DisplayWidth(line); w > newMaxWidth { + newMaxWidth = w + } + } + if i > 0 { + newRaw = append(newRaw, " ") + } + newRaw = append(newRaw, paraLines...) + } + raw = newRaw + maxWidth = newMaxWidth + } + + // Store the new known maximum width. + v, ok := t.cs[colKey] + if !ok || v < maxWidth || v == 0 { + t.cs[colKey] = maxWidth + } + + // Remember the number of lines for the row printer. + h := len(raw) + v, ok = t.rs[rowKey] + + if !ok || v < h || v == 0 { + t.rs[rowKey] = h + } + //fmt.Printf("Raw %+v %d\n", raw, len(raw)) + return raw +} diff --git a/vendor/github.com/olekukonko/tablewriter/table_with_color.go b/vendor/github.com/olekukonko/tablewriter/table_with_color.go new file mode 100644 index 000000000..ae7a364ae --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/table_with_color.go @@ -0,0 +1,136 @@ +package tablewriter + +import ( + "fmt" + "strconv" + "strings" +) + +const ESC = "\033" +const SEP = ";" + +const ( + BgBlackColor int = iota + 40 + BgRedColor + BgGreenColor + BgYellowColor + BgBlueColor + BgMagentaColor + BgCyanColor + BgWhiteColor +) + +const ( + FgBlackColor int = iota + 30 + FgRedColor + FgGreenColor + FgYellowColor + FgBlueColor + FgMagentaColor + FgCyanColor + FgWhiteColor +) + +const ( + BgHiBlackColor int = iota + 100 + BgHiRedColor + BgHiGreenColor + BgHiYellowColor + BgHiBlueColor + BgHiMagentaColor + BgHiCyanColor + BgHiWhiteColor +) + +const ( + FgHiBlackColor int = iota + 90 + FgHiRedColor + FgHiGreenColor + FgHiYellowColor + FgHiBlueColor + FgHiMagentaColor + FgHiCyanColor + FgHiWhiteColor +) + +const ( + Normal = 0 + Bold = 1 + UnderlineSingle = 4 + Italic +) + +type Colors []int + +func startFormat(seq string) string { + return fmt.Sprintf("%s[%sm", ESC, seq) +} + +func stopFormat() string { + return fmt.Sprintf("%s[%dm", ESC, Normal) +} + +// Making the SGR (Select Graphic Rendition) sequence. +func makeSequence(codes []int) string { + codesInString := []string{} + for _, code := range codes { + codesInString = append(codesInString, strconv.Itoa(code)) + } + return strings.Join(codesInString, SEP) +} + +// Adding ANSI escape sequences before and after string +func format(s string, codes interface{}) string { + var seq string + + switch v := codes.(type) { + + case string: + seq = v + case []int: + seq = makeSequence(v) + case Colors: + seq = makeSequence(v) + default: + return s + } + + if len(seq) == 0 { + return s + } + return startFormat(seq) + s + stopFormat() +} + +// Adding header colors (ANSI codes) +func (t *Table) SetHeaderColor(colors ...Colors) { + if t.colSize != len(colors) { + panic("Number of header colors must be equal to number of headers.") + } + for i := 0; i < len(colors); i++ { + t.headerParams = append(t.headerParams, makeSequence(colors[i])) + } +} + +// Adding column colors (ANSI codes) +func (t *Table) SetColumnColor(colors ...Colors) { + if t.colSize != len(colors) { + panic("Number of column colors must be equal to number of headers.") + } + for i := 0; i < len(colors); i++ { + t.columnsParams = append(t.columnsParams, makeSequence(colors[i])) + } +} + +// Adding column colors (ANSI codes) +func (t *Table) SetFooterColor(colors ...Colors) { + if len(t.footers) != len(colors) { + panic("Number of footer colors must be equal to number of footer.") + } + for i := 0; i < len(colors); i++ { + t.footerParams = append(t.footerParams, makeSequence(colors[i])) + } +} + +func Color(colors ...int) []int { + return colors +} diff --git a/vendor/github.com/olekukonko/tablewriter/util.go b/vendor/github.com/olekukonko/tablewriter/util.go new file mode 100644 index 000000000..380e7ab35 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/util.go @@ -0,0 +1,93 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +package tablewriter + +import ( + "math" + "regexp" + "strings" + + "github.com/mattn/go-runewidth" +) + +var ansi = regexp.MustCompile("\033\\[(?:[0-9]{1,3}(?:;[0-9]{1,3})*)?[m|K]") + +func DisplayWidth(str string) int { + return runewidth.StringWidth(ansi.ReplaceAllLiteralString(str, "")) +} + +// Simple Condition for string +// Returns value based on condition +func ConditionString(cond bool, valid, inValid string) string { + if cond { + return valid + } + return inValid +} + +func isNumOrSpace(r rune) bool { + return ('0' <= r && r <= '9') || r == ' ' +} + +// Format Table Header +// Replace _ , . and spaces +func Title(name string) string { + origLen := len(name) + rs := []rune(name) + for i, r := range rs { + switch r { + case '_': + rs[i] = ' ' + case '.': + // ignore floating number 0.0 + if (i != 0 && !isNumOrSpace(rs[i-1])) || (i != len(rs)-1 && !isNumOrSpace(rs[i+1])) { + rs[i] = ' ' + } + } + } + name = string(rs) + name = strings.TrimSpace(name) + if len(name) == 0 && origLen > 0 { + // Keep at least one character. This is important to preserve + // empty lines in multi-line headers/footers. + name = " " + } + return strings.ToUpper(name) +} + +// Pad String +// Attempts to place string in the center +func Pad(s, pad string, width int) string { + gap := width - DisplayWidth(s) + if gap > 0 { + gapLeft := int(math.Ceil(float64(gap / 2))) + gapRight := gap - gapLeft + return strings.Repeat(string(pad), gapLeft) + s + strings.Repeat(string(pad), gapRight) + } + return s +} + +// Pad String Right position +// This would place string at the left side of the screen +func PadRight(s, pad string, width int) string { + gap := width - DisplayWidth(s) + if gap > 0 { + return s + strings.Repeat(string(pad), gap) + } + return s +} + +// Pad String Left position +// This would place string at the right side of the screen +func PadLeft(s, pad string, width int) string { + gap := width - DisplayWidth(s) + if gap > 0 { + return strings.Repeat(string(pad), gap) + s + } + return s +} diff --git a/vendor/github.com/olekukonko/tablewriter/wrap.go b/vendor/github.com/olekukonko/tablewriter/wrap.go new file mode 100644 index 000000000..a092ee1f7 --- /dev/null +++ b/vendor/github.com/olekukonko/tablewriter/wrap.go @@ -0,0 +1,99 @@ +// Copyright 2014 Oleku Konko All rights reserved. +// Use of this source code is governed by a MIT +// license that can be found in the LICENSE file. + +// This module is a Table Writer API for the Go Programming Language. +// The protocols were written in pure Go and works on windows and unix systems + +package tablewriter + +import ( + "math" + "strings" + + "github.com/mattn/go-runewidth" +) + +var ( + nl = "\n" + sp = " " +) + +const defaultPenalty = 1e5 + +// Wrap wraps s into a paragraph of lines of length lim, with minimal +// raggedness. +func WrapString(s string, lim int) ([]string, int) { + words := strings.Split(strings.Replace(s, nl, sp, -1), sp) + var lines []string + max := 0 + for _, v := range words { + max = runewidth.StringWidth(v) + if max > lim { + lim = max + } + } + for _, line := range WrapWords(words, 1, lim, defaultPenalty) { + lines = append(lines, strings.Join(line, sp)) + } + return lines, lim +} + +// WrapWords is the low-level line-breaking algorithm, useful if you need more +// control over the details of the text wrapping process. For most uses, +// WrapString will be sufficient and more convenient. +// +// WrapWords splits a list of words into lines with minimal "raggedness", +// treating each rune as one unit, accounting for spc units between adjacent +// words on each line, and attempting to limit lines to lim units. Raggedness +// is the total error over all lines, where error is the square of the +// difference of the length of the line and lim. Too-long lines (which only +// happen when a single word is longer than lim units) have pen penalty units +// added to the error. +func WrapWords(words []string, spc, lim, pen int) [][]string { + n := len(words) + + length := make([][]int, n) + for i := 0; i < n; i++ { + length[i] = make([]int, n) + length[i][i] = runewidth.StringWidth(words[i]) + for j := i + 1; j < n; j++ { + length[i][j] = length[i][j-1] + spc + runewidth.StringWidth(words[j]) + } + } + nbrk := make([]int, n) + cost := make([]int, n) + for i := range cost { + cost[i] = math.MaxInt32 + } + for i := n - 1; i >= 0; i-- { + if length[i][n-1] <= lim { + cost[i] = 0 + nbrk[i] = n + } else { + for j := i + 1; j < n; j++ { + d := lim - length[i][j-1] + c := d*d + cost[j] + if length[i][j-1] > lim { + c += pen // too-long lines get a worse penalty + } + if c < cost[i] { + cost[i] = c + nbrk[i] = j + } + } + } + } + var lines [][]string + i := 0 + for i < n { + lines = append(lines, words[i:nbrk[i]]) + i = nbrk[i] + } + return lines +} + +// getLines decomposes a multiline string into a slice of strings. +func getLines(s string) []string { + return strings.Split(s, nl) +} diff --git a/vendor/github.com/pelletier/go-toml/.dockerignore b/vendor/github.com/pelletier/go-toml/.dockerignore new file mode 100644 index 000000000..7b5883475 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/.dockerignore @@ -0,0 +1,2 @@ +cmd/tomll/tomll +cmd/tomljson/tomljson diff --git a/vendor/github.com/pelletier/go-toml/.gitignore b/vendor/github.com/pelletier/go-toml/.gitignore new file mode 100644 index 000000000..e6ba63a5c --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/.gitignore @@ -0,0 +1,5 @@ +test_program/test_program_bin +fuzz/ +cmd/tomll/tomll +cmd/tomljson/tomljson +cmd/tomltestgen/tomltestgen diff --git a/vendor/github.com/pelletier/go-toml/CONTRIBUTING.md b/vendor/github.com/pelletier/go-toml/CONTRIBUTING.md new file mode 100644 index 000000000..98b9893d3 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/CONTRIBUTING.md @@ -0,0 +1,132 @@ +## Contributing + +Thank you for your interest in go-toml! We appreciate you considering +contributing to go-toml! + +The main goal is the project is to provide an easy-to-use TOML +implementation for Go that gets the job done and gets out of your way – +dealing with TOML is probably not the central piece of your project. + +As the single maintainer of go-toml, time is scarce. All help, big or +small, is more than welcomed! + +### Ask questions + +Any question you may have, somebody else might have it too. Always feel +free to ask them on the [issues tracker][issues-tracker]. We will try to +answer them as clearly and quickly as possible, time permitting. + +Asking questions also helps us identify areas where the documentation needs +improvement, or new features that weren't envisioned before. Sometimes, a +seemingly innocent question leads to the fix of a bug. Don't hesitate and +ask away! + +### Improve the documentation + +The best way to share your knowledge and experience with go-toml is to +improve the documentation. Fix a typo, clarify an interface, add an +example, anything goes! + +The documentation is present in the [README][readme] and thorough the +source code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a +change to the documentation, create a pull request with your proposed +changes. For simple changes like that, the easiest way to go is probably +the "Fork this project and edit the file" button on Github, displayed at +the top right of the file. Unless it's a trivial change (for example a +typo), provide a little bit of context in your pull request description or +commit message. + +### Report a bug + +Found a bug! Sorry to hear that :(. Help us and other track them down and +fix by reporting it. [File a new bug report][bug-report] on the [issues +tracker][issues-tracker]. The template should provide enough guidance on +what to include. When in doubt: add more details! By reducing ambiguity and +providing more information, it decreases back and forth and saves everyone +time. + +### Code changes + +Want to contribute a patch? Very happy to hear that! + +First, some high-level rules: + +* A short proposal with some POC code is better than a lengthy piece of + text with no code. Code speaks louder than words. +* No backward-incompatible patch will be accepted unless discussed. + Sometimes it's hard, and Go's lack of versioning by default does not + help, but we try not to break people's programs unless we absolutely have + to. +* If you are writing a new feature or extending an existing one, make sure + to write some documentation. +* Bug fixes need to be accompanied with regression tests. +* New code needs to be tested. +* Your commit messages need to explain why the change is needed, even if + already included in the PR description. + +It does sound like a lot, but those best practices are here to save time +overall and continuously improve the quality of the project, which is +something everyone benefits from. + +#### Get started + +The fairly standard code contribution process looks like that: + +1. [Fork the project][fork]. +2. Make your changes, commit on any branch you like. +3. [Open up a pull request][pull-request] +4. Review, potential ask for changes. +5. Merge. You're in! + +Feel free to ask for help! You can create draft pull requests to gather +some early feedback! + +#### Run the tests + +You can run tests for go-toml using Go's test tool: `go test ./...`. +When creating a pull requests, all tests will be ran on Linux on a few Go +versions (Travis CI), and on Windows using the latest Go version +(AppVeyor). + +#### Style + +Try to look around and follow the same format and structure as the rest of +the code. We enforce using `go fmt` on the whole code base. + +--- + +### Maintainers-only + +#### Merge pull request + +Checklist: + +* Passing CI. +* Does not introduce backward-incompatible changes (unless discussed). +* Has relevant doc changes. +* Has relevant unit tests. + +1. Merge using "squash and merge". +2. Make sure to edit the commit message to keep all the useful information + nice and clean. +3. Make sure the commit title is clear and contains the PR number (#123). + +#### New release + +1. Go to [releases][releases]. Click on "X commits to master since this + release". +2. Make note of all the changes. Look for backward incompatible changes, + new features, and bug fixes. +3. Pick the new version using the above and semver. +4. Create a [new release][new-release]. +5. Follow the same format as [1.1.0][release-110]. + +[issues-tracker]: https://github.com/pelletier/go-toml/issues +[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md +[pkg.go.dev]: https://pkg.go.dev/github.com/pelletier/go-toml +[readme]: ./README.md +[fork]: https://help.github.com/articles/fork-a-repo +[pull-request]: https://help.github.com/en/articles/creating-a-pull-request +[releases]: https://github.com/pelletier/go-toml/releases +[new-release]: https://github.com/pelletier/go-toml/releases/new +[release-110]: https://github.com/pelletier/go-toml/releases/tag/v1.1.0 diff --git a/vendor/github.com/pelletier/go-toml/Dockerfile b/vendor/github.com/pelletier/go-toml/Dockerfile new file mode 100644 index 000000000..fffdb0166 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/Dockerfile @@ -0,0 +1,11 @@ +FROM golang:1.12-alpine3.9 as builder +WORKDIR /go/src/github.com/pelletier/go-toml +COPY . . +ENV CGO_ENABLED=0 +ENV GOOS=linux +RUN go install ./... + +FROM scratch +COPY --from=builder /go/bin/tomll /usr/bin/tomll +COPY --from=builder /go/bin/tomljson /usr/bin/tomljson +COPY --from=builder /go/bin/jsontoml /usr/bin/jsontoml diff --git a/vendor/github.com/pelletier/go-toml/LICENSE b/vendor/github.com/pelletier/go-toml/LICENSE new file mode 100644 index 000000000..f414553c2 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/LICENSE @@ -0,0 +1,247 @@ +The bulk of github.com/pelletier/go-toml is distributed under the MIT license +(see below), with the exception of localtime.go and localtime.test.go. +Those two files have been copied over from Google's civil library at revision +ed46f5086358513cf8c25f8e3f022cb838a49d66, and are distributed under the Apache +2.0 license (see below). + + +github.com/pelletier/go-toml: + + +The MIT License (MIT) + +Copyright (c) 2013 - 2021 Thomas Pelletier, Eric Anderton + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + +localtime.go, localtime_test.go: + +Originals: + https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil.go + https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil_test.go +Changes: + * Renamed files from civil* to localtime*. + * Package changed from civil to toml. + * 'Local' prefix added to all structs. +License: + https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/LICENSE + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/pelletier/go-toml/Makefile b/vendor/github.com/pelletier/go-toml/Makefile new file mode 100644 index 000000000..9e4503aea --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/Makefile @@ -0,0 +1,29 @@ +export CGO_ENABLED=0 +go := go +go.goos ?= $(shell echo `go version`|cut -f4 -d ' '|cut -d '/' -f1) +go.goarch ?= $(shell echo `go version`|cut -f4 -d ' '|cut -d '/' -f2) + +out.tools := tomll tomljson jsontoml +out.dist := $(out.tools:=_$(go.goos)_$(go.goarch).tar.xz) +sources := $(wildcard **/*.go) + + +.PHONY: +tools: $(out.tools) + +$(out.tools): $(sources) + GOOS=$(go.goos) GOARCH=$(go.goarch) $(go) build ./cmd/$@ + +.PHONY: +dist: $(out.dist) + +$(out.dist):%_$(go.goos)_$(go.goarch).tar.xz: % + if [ "$(go.goos)" = "windows" ]; then \ + tar -cJf $@ $^.exe; \ + else \ + tar -cJf $@ $^; \ + fi + +.PHONY: +clean: + rm -rf $(out.tools) $(out.dist) diff --git a/vendor/github.com/pelletier/go-toml/PULL_REQUEST_TEMPLATE.md b/vendor/github.com/pelletier/go-toml/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..041cdc4a2 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,5 @@ +**Issue:** add link to pelletier/go-toml issue here + +Explanation of what this pull request does. + +More detailed description of the decisions being made and the reasons why (if the patch is non-trivial). diff --git a/vendor/github.com/pelletier/go-toml/README.md b/vendor/github.com/pelletier/go-toml/README.md new file mode 100644 index 000000000..6c061712b --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/README.md @@ -0,0 +1,176 @@ +# go-toml + +Go library for the [TOML](https://toml.io/) format. + +This library supports TOML version +[v1.0.0-rc.3](https://toml.io/en/v1.0.0-rc.3) + +[![Go Reference](https://pkg.go.dev/badge/github.com/pelletier/go-toml.svg)](https://pkg.go.dev/github.com/pelletier/go-toml) +[![license](https://img.shields.io/github/license/pelletier/go-toml.svg)](https://github.com/pelletier/go-toml/blob/master/LICENSE) +[![Build Status](https://dev.azure.com/pelletierthomas/go-toml-ci/_apis/build/status/pelletier.go-toml?branchName=master)](https://dev.azure.com/pelletierthomas/go-toml-ci/_build/latest?definitionId=1&branchName=master) +[![codecov](https://codecov.io/gh/pelletier/go-toml/branch/master/graph/badge.svg)](https://codecov.io/gh/pelletier/go-toml) +[![Go Report Card](https://goreportcard.com/badge/github.com/pelletier/go-toml)](https://goreportcard.com/report/github.com/pelletier/go-toml) +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fpelletier%2Fgo-toml.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fpelletier%2Fgo-toml?ref=badge_shield) + + +## Development status + +**ℹ️ Consider go-toml v2!** + +The next version of go-toml is in [active development][v2-dev], and +[nearing completion][v2-map]. + +Though technically in beta, v2 is already more tested, [fixes bugs][v1-bugs], +and [much faster][v2-bench]. If you only need reading and writing TOML documents +(majority of cases), those features are implemented and the API unlikely to +change. + +The remaining features (Document structure editing and tooling) will be added +shortly. While pull-requests are welcome on v1, no active development is +expected on it. When v2.0.0 is released, v1 will be deprecated. + +👉 [go-toml v2][v2] + +[v2]: https://github.com/pelletier/go-toml/tree/v2 +[v2-map]: https://github.com/pelletier/go-toml/discussions/506 +[v2-dev]: https://github.com/pelletier/go-toml/tree/v2 +[v1-bugs]: https://github.com/pelletier/go-toml/issues?q=is%3Aissue+is%3Aopen+label%3Av2-fixed +[v2-bench]: https://github.com/pelletier/go-toml/tree/v2#benchmarks + +## Features + +Go-toml provides the following features for using data parsed from TOML documents: + +* Load TOML documents from files and string data +* Easily navigate TOML structure using Tree +* Marshaling and unmarshaling to and from data structures +* Line & column position data for all parsed elements +* [Query support similar to JSON-Path](query/) +* Syntax errors contain line and column numbers + +## Import + +```go +import "github.com/pelletier/go-toml" +``` + +## Usage example + +Read a TOML document: + +```go +config, _ := toml.Load(` +[postgres] +user = "pelletier" +password = "mypassword"`) +// retrieve data directly +user := config.Get("postgres.user").(string) + +// or using an intermediate object +postgresConfig := config.Get("postgres").(*toml.Tree) +password := postgresConfig.Get("password").(string) +``` + +Or use Unmarshal: + +```go +type Postgres struct { + User string + Password string +} +type Config struct { + Postgres Postgres +} + +doc := []byte(` +[Postgres] +User = "pelletier" +Password = "mypassword"`) + +config := Config{} +toml.Unmarshal(doc, &config) +fmt.Println("user=", config.Postgres.User) +``` + +Or use a query: + +```go +// use a query to gather elements without walking the tree +q, _ := query.Compile("$..[user,password]") +results := q.Execute(config) +for ii, item := range results.Values() { + fmt.Printf("Query result %d: %v\n", ii, item) +} +``` + +## Documentation + +The documentation and additional examples are available at +[pkg.go.dev](https://pkg.go.dev/github.com/pelletier/go-toml). + +## Tools + +Go-toml provides three handy command line tools: + +* `tomll`: Reads TOML files and lints them. + + ``` + go install github.com/pelletier/go-toml/cmd/tomll + tomll --help + ``` +* `tomljson`: Reads a TOML file and outputs its JSON representation. + + ``` + go install github.com/pelletier/go-toml/cmd/tomljson + tomljson --help + ``` + + * `jsontoml`: Reads a JSON file and outputs a TOML representation. + + ``` + go install github.com/pelletier/go-toml/cmd/jsontoml + jsontoml --help + ``` + +### Docker image + +Those tools are also available as a Docker image from +[dockerhub](https://hub.docker.com/r/pelletier/go-toml). For example, to +use `tomljson`: + +``` +docker run -v $PWD:/workdir pelletier/go-toml tomljson /workdir/example.toml +``` + +Only master (`latest`) and tagged versions are published to dockerhub. You +can build your own image as usual: + +``` +docker build -t go-toml . +``` + +## Contribute + +Feel free to report bugs and patches using GitHub's pull requests system on +[pelletier/go-toml](https://github.com/pelletier/go-toml). Any feedback would be +much appreciated! + +### Run tests + +`go test ./...` + +### Fuzzing + +The script `./fuzz.sh` is available to +run [go-fuzz](https://github.com/dvyukov/go-fuzz) on go-toml. + +## Versioning + +Go-toml follows [Semantic Versioning](http://semver.org/). The supported version +of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of +this document. The last two major versions of Go are supported +(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)). + +## License + +The MIT License (MIT) + Apache 2.0. Read [LICENSE](LICENSE). diff --git a/vendor/github.com/pelletier/go-toml/azure-pipelines.yml b/vendor/github.com/pelletier/go-toml/azure-pipelines.yml new file mode 100644 index 000000000..4af198b4d --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/azure-pipelines.yml @@ -0,0 +1,188 @@ +trigger: +- master + +stages: +- stage: run_checks + displayName: "Check" + dependsOn: [] + jobs: + - job: fmt + displayName: "fmt" + pool: + vmImage: ubuntu-latest + steps: + - task: GoTool@0 + displayName: "Install Go 1.16" + inputs: + version: "1.16" + - task: Go@0 + displayName: "go fmt ./..." + inputs: + command: 'custom' + customCommand: 'fmt' + arguments: './...' + - job: coverage + displayName: "coverage" + pool: + vmImage: ubuntu-latest + steps: + - task: GoTool@0 + displayName: "Install Go 1.16" + inputs: + version: "1.16" + - task: Go@0 + displayName: "Generate coverage" + inputs: + command: 'test' + arguments: "-race -coverprofile=coverage.txt -covermode=atomic" + - task: Bash@3 + inputs: + targetType: 'inline' + script: 'bash <(curl -s https://codecov.io/bash) -t ${CODECOV_TOKEN}' + env: + CODECOV_TOKEN: $(CODECOV_TOKEN) + - job: benchmark + displayName: "benchmark" + pool: + vmImage: ubuntu-latest + steps: + - task: GoTool@0 + displayName: "Install Go 1.16" + inputs: + version: "1.16" + - script: echo "##vso[task.setvariable variable=PATH]${PATH}:/home/vsts/go/bin/" + - task: Bash@3 + inputs: + filePath: './benchmark.sh' + arguments: "master $(Build.Repository.Uri)" + + - job: go_unit_tests + displayName: "unit tests" + strategy: + matrix: + linux 1.16: + goVersion: '1.16' + imageName: 'ubuntu-latest' + mac 1.16: + goVersion: '1.16' + imageName: 'macOS-latest' + windows 1.16: + goVersion: '1.16' + imageName: 'windows-latest' + linux 1.15: + goVersion: '1.15' + imageName: 'ubuntu-latest' + mac 1.15: + goVersion: '1.15' + imageName: 'macOS-latest' + windows 1.15: + goVersion: '1.15' + imageName: 'windows-latest' + pool: + vmImage: $(imageName) + steps: + - task: GoTool@0 + displayName: "Install Go $(goVersion)" + inputs: + version: $(goVersion) + - task: Go@0 + displayName: "go test ./..." + inputs: + command: 'test' + arguments: './...' +- stage: build_binaries + displayName: "Build binaries" + dependsOn: run_checks + jobs: + - job: build_binary + displayName: "Build binary" + strategy: + matrix: + linux_amd64: + GOOS: linux + GOARCH: amd64 + darwin_amd64: + GOOS: darwin + GOARCH: amd64 + windows_amd64: + GOOS: windows + GOARCH: amd64 + pool: + vmImage: ubuntu-latest + steps: + - task: GoTool@0 + displayName: "Install Go" + inputs: + version: 1.16 + - task: Bash@3 + inputs: + targetType: inline + script: "make dist" + env: + go.goos: $(GOOS) + go.goarch: $(GOARCH) + - task: CopyFiles@2 + inputs: + sourceFolder: '$(Build.SourcesDirectory)' + contents: '*.tar.xz' + TargetFolder: '$(Build.ArtifactStagingDirectory)' + - task: PublishBuildArtifacts@1 + inputs: + pathtoPublish: '$(Build.ArtifactStagingDirectory)' + artifactName: binaries +- stage: build_binaries_manifest + displayName: "Build binaries manifest" + dependsOn: build_binaries + jobs: + - job: build_manifest + displayName: "Build binaries manifest" + steps: + - task: DownloadBuildArtifacts@0 + inputs: + buildType: 'current' + downloadType: 'single' + artifactName: 'binaries' + downloadPath: '$(Build.SourcesDirectory)' + - task: Bash@3 + inputs: + targetType: inline + script: "cd binaries && sha256sum --binary *.tar.xz | tee $(Build.ArtifactStagingDirectory)/sha256sums.txt" + - task: PublishBuildArtifacts@1 + inputs: + pathtoPublish: '$(Build.ArtifactStagingDirectory)' + artifactName: manifest + +- stage: build_docker_image + displayName: "Build Docker image" + dependsOn: run_checks + jobs: + - job: build + displayName: "Build" + pool: + vmImage: ubuntu-latest + steps: + - task: Docker@2 + inputs: + command: 'build' + Dockerfile: 'Dockerfile' + buildContext: '.' + addPipelineData: false + +- stage: publish_docker_image + displayName: "Publish Docker image" + dependsOn: build_docker_image + condition: and(succeeded(), eq(variables['Build.SourceBranchName'], 'master')) + jobs: + - job: publish + displayName: "Publish" + pool: + vmImage: ubuntu-latest + steps: + - task: Docker@2 + inputs: + containerRegistry: 'DockerHub' + repository: 'pelletier/go-toml' + command: 'buildAndPush' + Dockerfile: 'Dockerfile' + buildContext: '.' + tags: 'latest' diff --git a/vendor/github.com/pelletier/go-toml/benchmark.sh b/vendor/github.com/pelletier/go-toml/benchmark.sh new file mode 100644 index 000000000..a69d3040f --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/benchmark.sh @@ -0,0 +1,35 @@ +#!/bin/bash + +set -ex + +reference_ref=${1:-master} +reference_git=${2:-.} + +if ! `hash benchstat 2>/dev/null`; then + echo "Installing benchstat" + go get golang.org/x/perf/cmd/benchstat +fi + +tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX` +ref_tempdir="${tempdir}/ref" +ref_benchmark="${ref_tempdir}/benchmark-`echo -n ${reference_ref}|tr -s '/' '-'`.txt" +local_benchmark="`pwd`/benchmark-local.txt" + +echo "=== ${reference_ref} (${ref_tempdir})" +git clone ${reference_git} ${ref_tempdir} >/dev/null 2>/dev/null +pushd ${ref_tempdir} >/dev/null +git checkout ${reference_ref} >/dev/null 2>/dev/null +go test -bench=. -benchmem | tee ${ref_benchmark} +cd benchmark +go test -bench=. -benchmem | tee -a ${ref_benchmark} +popd >/dev/null + +echo "" +echo "=== local" +go test -bench=. -benchmem | tee ${local_benchmark} +cd benchmark +go test -bench=. -benchmem | tee -a ${local_benchmark} + +echo "" +echo "=== diff" +benchstat -delta-test=none ${ref_benchmark} ${local_benchmark} diff --git a/vendor/github.com/pelletier/go-toml/doc.go b/vendor/github.com/pelletier/go-toml/doc.go new file mode 100644 index 000000000..a1406a32b --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/doc.go @@ -0,0 +1,23 @@ +// Package toml is a TOML parser and manipulation library. +// +// This version supports the specification as described in +// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.5.0.md +// +// Marshaling +// +// Go-toml can marshal and unmarshal TOML documents from and to data +// structures. +// +// TOML document as a tree +// +// Go-toml can operate on a TOML document as a tree. Use one of the Load* +// functions to parse TOML data and obtain a Tree instance, then one of its +// methods to manipulate the tree. +// +// JSONPath-like queries +// +// The package github.com/pelletier/go-toml/query implements a system +// similar to JSONPath to quickly retrieve elements of a TOML document using a +// single expression. See the package documentation for more information. +// +package toml diff --git a/vendor/github.com/pelletier/go-toml/example-crlf.toml b/vendor/github.com/pelletier/go-toml/example-crlf.toml new file mode 100644 index 000000000..780d9c68f --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/example-crlf.toml @@ -0,0 +1,30 @@ +# This is a TOML document. Boom. + +title = "TOML Example" + +[owner] +name = "Tom Preston-Werner" +organization = "GitHub" +bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." +dob = 1979-05-27T07:32:00Z # First class dates? Why not? + +[database] +server = "192.168.1.1" +ports = [ 8001, 8001, 8002 ] +connection_max = 5000 +enabled = true + +[servers] + + # You can indent as you please. Tabs or spaces. TOML don't care. + [servers.alpha] + ip = "10.0.0.1" + dc = "eqdc10" + + [servers.beta] + ip = "10.0.0.2" + dc = "eqdc10" + +[clients] +data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it +score = 4e-08 # to make sure leading zeroes in exponent parts of floats are supported \ No newline at end of file diff --git a/vendor/github.com/pelletier/go-toml/example.toml b/vendor/github.com/pelletier/go-toml/example.toml new file mode 100644 index 000000000..f45bf88b8 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/example.toml @@ -0,0 +1,30 @@ +# This is a TOML document. Boom. + +title = "TOML Example" + +[owner] +name = "Tom Preston-Werner" +organization = "GitHub" +bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." +dob = 1979-05-27T07:32:00Z # First class dates? Why not? + +[database] +server = "192.168.1.1" +ports = [ 8001, 8001, 8002 ] +connection_max = 5000 +enabled = true + +[servers] + + # You can indent as you please. Tabs or spaces. TOML don't care. + [servers.alpha] + ip = "10.0.0.1" + dc = "eqdc10" + + [servers.beta] + ip = "10.0.0.2" + dc = "eqdc10" + +[clients] +data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it +score = 4e-08 # to make sure leading zeroes in exponent parts of floats are supported \ No newline at end of file diff --git a/vendor/github.com/pelletier/go-toml/fuzz.go b/vendor/github.com/pelletier/go-toml/fuzz.go new file mode 100644 index 000000000..14570c8d3 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/fuzz.go @@ -0,0 +1,31 @@ +// +build gofuzz + +package toml + +func Fuzz(data []byte) int { + tree, err := LoadBytes(data) + if err != nil { + if tree != nil { + panic("tree must be nil if there is an error") + } + return 0 + } + + str, err := tree.ToTomlString() + if err != nil { + if str != "" { + panic(`str must be "" if there is an error`) + } + panic(err) + } + + tree, err = Load(str) + if err != nil { + if tree != nil { + panic("tree must be nil if there is an error") + } + return 0 + } + + return 1 +} diff --git a/vendor/github.com/pelletier/go-toml/fuzz.sh b/vendor/github.com/pelletier/go-toml/fuzz.sh new file mode 100644 index 000000000..3204b4c44 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/fuzz.sh @@ -0,0 +1,15 @@ +#! /bin/sh +set -eu + +go get github.com/dvyukov/go-fuzz/go-fuzz +go get github.com/dvyukov/go-fuzz/go-fuzz-build + +if [ ! -e toml-fuzz.zip ]; then + go-fuzz-build github.com/pelletier/go-toml +fi + +rm -fr fuzz +mkdir -p fuzz/corpus +cp *.toml fuzz/corpus + +go-fuzz -bin=toml-fuzz.zip -workdir=fuzz diff --git a/vendor/github.com/pelletier/go-toml/go.mod b/vendor/github.com/pelletier/go-toml/go.mod new file mode 100644 index 000000000..7d29a0a66 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/go.mod @@ -0,0 +1,3 @@ +module github.com/pelletier/go-toml + +go 1.12 diff --git a/vendor/github.com/pelletier/go-toml/keysparsing.go b/vendor/github.com/pelletier/go-toml/keysparsing.go new file mode 100644 index 000000000..e091500b2 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/keysparsing.go @@ -0,0 +1,112 @@ +// Parsing keys handling both bare and quoted keys. + +package toml + +import ( + "errors" + "fmt" +) + +// Convert the bare key group string to an array. +// The input supports double quotation and single quotation, +// but escape sequences are not supported. Lexers must unescape them beforehand. +func parseKey(key string) ([]string, error) { + runes := []rune(key) + var groups []string + + if len(key) == 0 { + return nil, errors.New("empty key") + } + + idx := 0 + for idx < len(runes) { + for ; idx < len(runes) && isSpace(runes[idx]); idx++ { + // skip leading whitespace + } + if idx >= len(runes) { + break + } + r := runes[idx] + if isValidBareChar(r) { + // parse bare key + startIdx := idx + endIdx := -1 + idx++ + for idx < len(runes) { + r = runes[idx] + if isValidBareChar(r) { + idx++ + } else if r == '.' { + endIdx = idx + break + } else if isSpace(r) { + endIdx = idx + for ; idx < len(runes) && isSpace(runes[idx]); idx++ { + // skip trailing whitespace + } + if idx < len(runes) && runes[idx] != '.' { + return nil, fmt.Errorf("invalid key character after whitespace: %c", runes[idx]) + } + break + } else { + return nil, fmt.Errorf("invalid bare key character: %c", r) + } + } + if endIdx == -1 { + endIdx = idx + } + groups = append(groups, string(runes[startIdx:endIdx])) + } else if r == '\'' { + // parse single quoted key + idx++ + startIdx := idx + for { + if idx >= len(runes) { + return nil, fmt.Errorf("unclosed single-quoted key") + } + r = runes[idx] + if r == '\'' { + groups = append(groups, string(runes[startIdx:idx])) + idx++ + break + } + idx++ + } + } else if r == '"' { + // parse double quoted key + idx++ + startIdx := idx + for { + if idx >= len(runes) { + return nil, fmt.Errorf("unclosed double-quoted key") + } + r = runes[idx] + if r == '"' { + groups = append(groups, string(runes[startIdx:idx])) + idx++ + break + } + idx++ + } + } else if r == '.' { + idx++ + if idx >= len(runes) { + return nil, fmt.Errorf("unexpected end of key") + } + r = runes[idx] + if !isValidBareChar(r) && r != '\'' && r != '"' && r != ' ' { + return nil, fmt.Errorf("expecting key part after dot") + } + } else { + return nil, fmt.Errorf("invalid key character: %c", r) + } + } + if len(groups) == 0 { + return nil, fmt.Errorf("empty key") + } + return groups, nil +} + +func isValidBareChar(r rune) bool { + return isAlphanumeric(r) || r == '-' || isDigit(r) +} diff --git a/vendor/github.com/pelletier/go-toml/lexer.go b/vendor/github.com/pelletier/go-toml/lexer.go new file mode 100644 index 000000000..313908e3e --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/lexer.go @@ -0,0 +1,1031 @@ +// TOML lexer. +// +// Written using the principles developed by Rob Pike in +// http://www.youtube.com/watch?v=HxaD_trXwRE + +package toml + +import ( + "bytes" + "errors" + "fmt" + "strconv" + "strings" +) + +// Define state functions +type tomlLexStateFn func() tomlLexStateFn + +// Define lexer +type tomlLexer struct { + inputIdx int + input []rune // Textual source + currentTokenStart int + currentTokenStop int + tokens []token + brackets []rune + line int + col int + endbufferLine int + endbufferCol int +} + +// Basic read operations on input + +func (l *tomlLexer) read() rune { + r := l.peek() + if r == '\n' { + l.endbufferLine++ + l.endbufferCol = 1 + } else { + l.endbufferCol++ + } + l.inputIdx++ + return r +} + +func (l *tomlLexer) next() rune { + r := l.read() + + if r != eof { + l.currentTokenStop++ + } + return r +} + +func (l *tomlLexer) ignore() { + l.currentTokenStart = l.currentTokenStop + l.line = l.endbufferLine + l.col = l.endbufferCol +} + +func (l *tomlLexer) skip() { + l.next() + l.ignore() +} + +func (l *tomlLexer) fastForward(n int) { + for i := 0; i < n; i++ { + l.next() + } +} + +func (l *tomlLexer) emitWithValue(t tokenType, value string) { + l.tokens = append(l.tokens, token{ + Position: Position{l.line, l.col}, + typ: t, + val: value, + }) + l.ignore() +} + +func (l *tomlLexer) emit(t tokenType) { + l.emitWithValue(t, string(l.input[l.currentTokenStart:l.currentTokenStop])) +} + +func (l *tomlLexer) peek() rune { + if l.inputIdx >= len(l.input) { + return eof + } + return l.input[l.inputIdx] +} + +func (l *tomlLexer) peekString(size int) string { + maxIdx := len(l.input) + upperIdx := l.inputIdx + size // FIXME: potential overflow + if upperIdx > maxIdx { + upperIdx = maxIdx + } + return string(l.input[l.inputIdx:upperIdx]) +} + +func (l *tomlLexer) follow(next string) bool { + return next == l.peekString(len(next)) +} + +// Error management + +func (l *tomlLexer) errorf(format string, args ...interface{}) tomlLexStateFn { + l.tokens = append(l.tokens, token{ + Position: Position{l.line, l.col}, + typ: tokenError, + val: fmt.Sprintf(format, args...), + }) + return nil +} + +// State functions + +func (l *tomlLexer) lexVoid() tomlLexStateFn { + for { + next := l.peek() + switch next { + case '}': // after '{' + return l.lexRightCurlyBrace + case '[': + return l.lexTableKey + case '#': + return l.lexComment(l.lexVoid) + case '=': + return l.lexEqual + case '\r': + fallthrough + case '\n': + l.skip() + continue + } + + if isSpace(next) { + l.skip() + } + + if isKeyStartChar(next) { + return l.lexKey + } + + if next == eof { + l.next() + break + } + } + + l.emit(tokenEOF) + return nil +} + +func (l *tomlLexer) lexRvalue() tomlLexStateFn { + for { + next := l.peek() + switch next { + case '.': + return l.errorf("cannot start float with a dot") + case '=': + return l.lexEqual + case '[': + return l.lexLeftBracket + case ']': + return l.lexRightBracket + case '{': + return l.lexLeftCurlyBrace + case '}': + return l.lexRightCurlyBrace + case '#': + return l.lexComment(l.lexRvalue) + case '"': + return l.lexString + case '\'': + return l.lexLiteralString + case ',': + return l.lexComma + case '\r': + fallthrough + case '\n': + l.skip() + if len(l.brackets) > 0 && l.brackets[len(l.brackets)-1] == '[' { + return l.lexRvalue + } + return l.lexVoid + } + + if l.follow("true") { + return l.lexTrue + } + + if l.follow("false") { + return l.lexFalse + } + + if l.follow("inf") { + return l.lexInf + } + + if l.follow("nan") { + return l.lexNan + } + + if isSpace(next) { + l.skip() + continue + } + + if next == eof { + l.next() + break + } + + if next == '+' || next == '-' { + return l.lexNumber + } + + if isDigit(next) { + return l.lexDateTimeOrNumber + } + + return l.errorf("no value can start with %c", next) + } + + l.emit(tokenEOF) + return nil +} + +func (l *tomlLexer) lexDateTimeOrNumber() tomlLexStateFn { + // Could be either a date/time, or a digit. + // The options for date/times are: + // YYYY-... => date or date-time + // HH:... => time + // Anything else should be a number. + + lookAhead := l.peekString(5) + if len(lookAhead) < 3 { + return l.lexNumber() + } + + for idx, r := range lookAhead { + if !isDigit(r) { + if idx == 2 && r == ':' { + return l.lexDateTimeOrTime() + } + if idx == 4 && r == '-' { + return l.lexDateTimeOrTime() + } + return l.lexNumber() + } + } + return l.lexNumber() +} + +func (l *tomlLexer) lexLeftCurlyBrace() tomlLexStateFn { + l.next() + l.emit(tokenLeftCurlyBrace) + l.brackets = append(l.brackets, '{') + return l.lexVoid +} + +func (l *tomlLexer) lexRightCurlyBrace() tomlLexStateFn { + l.next() + l.emit(tokenRightCurlyBrace) + if len(l.brackets) == 0 || l.brackets[len(l.brackets)-1] != '{' { + return l.errorf("cannot have '}' here") + } + l.brackets = l.brackets[:len(l.brackets)-1] + return l.lexRvalue +} + +func (l *tomlLexer) lexDateTimeOrTime() tomlLexStateFn { + // Example matches: + // 1979-05-27T07:32:00Z + // 1979-05-27T00:32:00-07:00 + // 1979-05-27T00:32:00.999999-07:00 + // 1979-05-27 07:32:00Z + // 1979-05-27 00:32:00-07:00 + // 1979-05-27 00:32:00.999999-07:00 + // 1979-05-27T07:32:00 + // 1979-05-27T00:32:00.999999 + // 1979-05-27 07:32:00 + // 1979-05-27 00:32:00.999999 + // 1979-05-27 + // 07:32:00 + // 00:32:00.999999 + + // we already know those two are digits + l.next() + l.next() + + // Got 2 digits. At that point it could be either a time or a date(-time). + + r := l.next() + if r == ':' { + return l.lexTime() + } + + return l.lexDateTime() +} + +func (l *tomlLexer) lexDateTime() tomlLexStateFn { + // This state accepts an offset date-time, a local date-time, or a local date. + // + // v--- cursor + // 1979-05-27T07:32:00Z + // 1979-05-27T00:32:00-07:00 + // 1979-05-27T00:32:00.999999-07:00 + // 1979-05-27 07:32:00Z + // 1979-05-27 00:32:00-07:00 + // 1979-05-27 00:32:00.999999-07:00 + // 1979-05-27T07:32:00 + // 1979-05-27T00:32:00.999999 + // 1979-05-27 07:32:00 + // 1979-05-27 00:32:00.999999 + // 1979-05-27 + + // date + + // already checked by lexRvalue + l.next() // digit + l.next() // - + + for i := 0; i < 2; i++ { + r := l.next() + if !isDigit(r) { + return l.errorf("invalid month digit in date: %c", r) + } + } + + r := l.next() + if r != '-' { + return l.errorf("expected - to separate month of a date, not %c", r) + } + + for i := 0; i < 2; i++ { + r := l.next() + if !isDigit(r) { + return l.errorf("invalid day digit in date: %c", r) + } + } + + l.emit(tokenLocalDate) + + r = l.peek() + + if r == eof { + + return l.lexRvalue + } + + if r != ' ' && r != 'T' { + return l.errorf("incorrect date/time separation character: %c", r) + } + + if r == ' ' { + lookAhead := l.peekString(3)[1:] + if len(lookAhead) < 2 { + return l.lexRvalue + } + for _, r := range lookAhead { + if !isDigit(r) { + return l.lexRvalue + } + } + } + + l.skip() // skip the T or ' ' + + // time + + for i := 0; i < 2; i++ { + r := l.next() + if !isDigit(r) { + return l.errorf("invalid hour digit in time: %c", r) + } + } + + r = l.next() + if r != ':' { + return l.errorf("time hour/minute separator should be :, not %c", r) + } + + for i := 0; i < 2; i++ { + r := l.next() + if !isDigit(r) { + return l.errorf("invalid minute digit in time: %c", r) + } + } + + r = l.next() + if r != ':' { + return l.errorf("time minute/second separator should be :, not %c", r) + } + + for i := 0; i < 2; i++ { + r := l.next() + if !isDigit(r) { + return l.errorf("invalid second digit in time: %c", r) + } + } + + r = l.peek() + if r == '.' { + l.next() + r := l.next() + if !isDigit(r) { + return l.errorf("expected at least one digit in time's fraction, not %c", r) + } + + for { + r := l.peek() + if !isDigit(r) { + break + } + l.next() + } + } + + l.emit(tokenLocalTime) + + return l.lexTimeOffset + +} + +func (l *tomlLexer) lexTimeOffset() tomlLexStateFn { + // potential offset + + // Z + // -07:00 + // +07:00 + // nothing + + r := l.peek() + + if r == 'Z' { + l.next() + l.emit(tokenTimeOffset) + } else if r == '+' || r == '-' { + l.next() + + for i := 0; i < 2; i++ { + r := l.next() + if !isDigit(r) { + return l.errorf("invalid hour digit in time offset: %c", r) + } + } + + r = l.next() + if r != ':' { + return l.errorf("time offset hour/minute separator should be :, not %c", r) + } + + for i := 0; i < 2; i++ { + r := l.next() + if !isDigit(r) { + return l.errorf("invalid minute digit in time offset: %c", r) + } + } + + l.emit(tokenTimeOffset) + } + + return l.lexRvalue +} + +func (l *tomlLexer) lexTime() tomlLexStateFn { + // v--- cursor + // 07:32:00 + // 00:32:00.999999 + + for i := 0; i < 2; i++ { + r := l.next() + if !isDigit(r) { + return l.errorf("invalid minute digit in time: %c", r) + } + } + + r := l.next() + if r != ':' { + return l.errorf("time minute/second separator should be :, not %c", r) + } + + for i := 0; i < 2; i++ { + r := l.next() + if !isDigit(r) { + return l.errorf("invalid second digit in time: %c", r) + } + } + + r = l.peek() + if r == '.' { + l.next() + r := l.next() + if !isDigit(r) { + return l.errorf("expected at least one digit in time's fraction, not %c", r) + } + + for { + r := l.peek() + if !isDigit(r) { + break + } + l.next() + } + } + + l.emit(tokenLocalTime) + return l.lexRvalue + +} + +func (l *tomlLexer) lexTrue() tomlLexStateFn { + l.fastForward(4) + l.emit(tokenTrue) + return l.lexRvalue +} + +func (l *tomlLexer) lexFalse() tomlLexStateFn { + l.fastForward(5) + l.emit(tokenFalse) + return l.lexRvalue +} + +func (l *tomlLexer) lexInf() tomlLexStateFn { + l.fastForward(3) + l.emit(tokenInf) + return l.lexRvalue +} + +func (l *tomlLexer) lexNan() tomlLexStateFn { + l.fastForward(3) + l.emit(tokenNan) + return l.lexRvalue +} + +func (l *tomlLexer) lexEqual() tomlLexStateFn { + l.next() + l.emit(tokenEqual) + return l.lexRvalue +} + +func (l *tomlLexer) lexComma() tomlLexStateFn { + l.next() + l.emit(tokenComma) + if len(l.brackets) > 0 && l.brackets[len(l.brackets)-1] == '{' { + return l.lexVoid + } + return l.lexRvalue +} + +// Parse the key and emits its value without escape sequences. +// bare keys, basic string keys and literal string keys are supported. +func (l *tomlLexer) lexKey() tomlLexStateFn { + var sb strings.Builder + + for r := l.peek(); isKeyChar(r) || r == '\n' || r == '\r'; r = l.peek() { + if r == '"' { + l.next() + str, err := l.lexStringAsString(`"`, false, true) + if err != nil { + return l.errorf(err.Error()) + } + sb.WriteString("\"") + sb.WriteString(str) + sb.WriteString("\"") + l.next() + continue + } else if r == '\'' { + l.next() + str, err := l.lexLiteralStringAsString(`'`, false) + if err != nil { + return l.errorf(err.Error()) + } + sb.WriteString("'") + sb.WriteString(str) + sb.WriteString("'") + l.next() + continue + } else if r == '\n' { + return l.errorf("keys cannot contain new lines") + } else if isSpace(r) { + var str strings.Builder + str.WriteString(" ") + + // skip trailing whitespace + l.next() + for r = l.peek(); isSpace(r); r = l.peek() { + str.WriteRune(r) + l.next() + } + // break loop if not a dot + if r != '.' { + break + } + str.WriteString(".") + // skip trailing whitespace after dot + l.next() + for r = l.peek(); isSpace(r); r = l.peek() { + str.WriteRune(r) + l.next() + } + sb.WriteString(str.String()) + continue + } else if r == '.' { + // skip + } else if !isValidBareChar(r) { + return l.errorf("keys cannot contain %c character", r) + } + sb.WriteRune(r) + l.next() + } + l.emitWithValue(tokenKey, sb.String()) + return l.lexVoid +} + +func (l *tomlLexer) lexComment(previousState tomlLexStateFn) tomlLexStateFn { + return func() tomlLexStateFn { + for next := l.peek(); next != '\n' && next != eof; next = l.peek() { + if next == '\r' && l.follow("\r\n") { + break + } + l.next() + } + l.ignore() + return previousState + } +} + +func (l *tomlLexer) lexLeftBracket() tomlLexStateFn { + l.next() + l.emit(tokenLeftBracket) + l.brackets = append(l.brackets, '[') + return l.lexRvalue +} + +func (l *tomlLexer) lexLiteralStringAsString(terminator string, discardLeadingNewLine bool) (string, error) { + var sb strings.Builder + + if discardLeadingNewLine { + if l.follow("\r\n") { + l.skip() + l.skip() + } else if l.peek() == '\n' { + l.skip() + } + } + + // find end of string + for { + if l.follow(terminator) { + return sb.String(), nil + } + + next := l.peek() + if next == eof { + break + } + sb.WriteRune(l.next()) + } + + return "", errors.New("unclosed string") +} + +func (l *tomlLexer) lexLiteralString() tomlLexStateFn { + l.skip() + + // handle special case for triple-quote + terminator := "'" + discardLeadingNewLine := false + if l.follow("''") { + l.skip() + l.skip() + terminator = "'''" + discardLeadingNewLine = true + } + + str, err := l.lexLiteralStringAsString(terminator, discardLeadingNewLine) + if err != nil { + return l.errorf(err.Error()) + } + + l.emitWithValue(tokenString, str) + l.fastForward(len(terminator)) + l.ignore() + return l.lexRvalue +} + +// Lex a string and return the results as a string. +// Terminator is the substring indicating the end of the token. +// The resulting string does not include the terminator. +func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine, acceptNewLines bool) (string, error) { + var sb strings.Builder + + if discardLeadingNewLine { + if l.follow("\r\n") { + l.skip() + l.skip() + } else if l.peek() == '\n' { + l.skip() + } + } + + for { + if l.follow(terminator) { + return sb.String(), nil + } + + if l.follow("\\") { + l.next() + switch l.peek() { + case '\r': + fallthrough + case '\n': + fallthrough + case '\t': + fallthrough + case ' ': + // skip all whitespace chars following backslash + for strings.ContainsRune("\r\n\t ", l.peek()) { + l.next() + } + case '"': + sb.WriteString("\"") + l.next() + case 'n': + sb.WriteString("\n") + l.next() + case 'b': + sb.WriteString("\b") + l.next() + case 'f': + sb.WriteString("\f") + l.next() + case '/': + sb.WriteString("/") + l.next() + case 't': + sb.WriteString("\t") + l.next() + case 'r': + sb.WriteString("\r") + l.next() + case '\\': + sb.WriteString("\\") + l.next() + case 'u': + l.next() + var code strings.Builder + for i := 0; i < 4; i++ { + c := l.peek() + if !isHexDigit(c) { + return "", errors.New("unfinished unicode escape") + } + l.next() + code.WriteRune(c) + } + intcode, err := strconv.ParseInt(code.String(), 16, 32) + if err != nil { + return "", errors.New("invalid unicode escape: \\u" + code.String()) + } + sb.WriteRune(rune(intcode)) + case 'U': + l.next() + var code strings.Builder + for i := 0; i < 8; i++ { + c := l.peek() + if !isHexDigit(c) { + return "", errors.New("unfinished unicode escape") + } + l.next() + code.WriteRune(c) + } + intcode, err := strconv.ParseInt(code.String(), 16, 64) + if err != nil { + return "", errors.New("invalid unicode escape: \\U" + code.String()) + } + sb.WriteRune(rune(intcode)) + default: + return "", errors.New("invalid escape sequence: \\" + string(l.peek())) + } + } else { + r := l.peek() + + if 0x00 <= r && r <= 0x1F && r != '\t' && !(acceptNewLines && (r == '\n' || r == '\r')) { + return "", fmt.Errorf("unescaped control character %U", r) + } + l.next() + sb.WriteRune(r) + } + + if l.peek() == eof { + break + } + } + + return "", errors.New("unclosed string") +} + +func (l *tomlLexer) lexString() tomlLexStateFn { + l.skip() + + // handle special case for triple-quote + terminator := `"` + discardLeadingNewLine := false + acceptNewLines := false + if l.follow(`""`) { + l.skip() + l.skip() + terminator = `"""` + discardLeadingNewLine = true + acceptNewLines = true + } + + str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines) + if err != nil { + return l.errorf(err.Error()) + } + + l.emitWithValue(tokenString, str) + l.fastForward(len(terminator)) + l.ignore() + return l.lexRvalue +} + +func (l *tomlLexer) lexTableKey() tomlLexStateFn { + l.next() + + if l.peek() == '[' { + // token '[[' signifies an array of tables + l.next() + l.emit(tokenDoubleLeftBracket) + return l.lexInsideTableArrayKey + } + // vanilla table key + l.emit(tokenLeftBracket) + return l.lexInsideTableKey +} + +// Parse the key till "]]", but only bare keys are supported +func (l *tomlLexer) lexInsideTableArrayKey() tomlLexStateFn { + for r := l.peek(); r != eof; r = l.peek() { + switch r { + case ']': + if l.currentTokenStop > l.currentTokenStart { + l.emit(tokenKeyGroupArray) + } + l.next() + if l.peek() != ']' { + break + } + l.next() + l.emit(tokenDoubleRightBracket) + return l.lexVoid + case '[': + return l.errorf("table array key cannot contain ']'") + default: + l.next() + } + } + return l.errorf("unclosed table array key") +} + +// Parse the key till "]" but only bare keys are supported +func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn { + for r := l.peek(); r != eof; r = l.peek() { + switch r { + case ']': + if l.currentTokenStop > l.currentTokenStart { + l.emit(tokenKeyGroup) + } + l.next() + l.emit(tokenRightBracket) + return l.lexVoid + case '[': + return l.errorf("table key cannot contain ']'") + default: + l.next() + } + } + return l.errorf("unclosed table key") +} + +func (l *tomlLexer) lexRightBracket() tomlLexStateFn { + l.next() + l.emit(tokenRightBracket) + if len(l.brackets) == 0 || l.brackets[len(l.brackets)-1] != '[' { + return l.errorf("cannot have ']' here") + } + l.brackets = l.brackets[:len(l.brackets)-1] + return l.lexRvalue +} + +type validRuneFn func(r rune) bool + +func isValidHexRune(r rune) bool { + return r >= 'a' && r <= 'f' || + r >= 'A' && r <= 'F' || + r >= '0' && r <= '9' || + r == '_' +} + +func isValidOctalRune(r rune) bool { + return r >= '0' && r <= '7' || r == '_' +} + +func isValidBinaryRune(r rune) bool { + return r == '0' || r == '1' || r == '_' +} + +func (l *tomlLexer) lexNumber() tomlLexStateFn { + r := l.peek() + + if r == '0' { + follow := l.peekString(2) + if len(follow) == 2 { + var isValidRune validRuneFn + switch follow[1] { + case 'x': + isValidRune = isValidHexRune + case 'o': + isValidRune = isValidOctalRune + case 'b': + isValidRune = isValidBinaryRune + default: + if follow[1] >= 'a' && follow[1] <= 'z' || follow[1] >= 'A' && follow[1] <= 'Z' { + return l.errorf("unknown number base: %s. possible options are x (hex) o (octal) b (binary)", string(follow[1])) + } + } + + if isValidRune != nil { + l.next() + l.next() + digitSeen := false + for { + next := l.peek() + if !isValidRune(next) { + break + } + digitSeen = true + l.next() + } + + if !digitSeen { + return l.errorf("number needs at least one digit") + } + + l.emit(tokenInteger) + + return l.lexRvalue + } + } + } + + if r == '+' || r == '-' { + l.next() + if l.follow("inf") { + return l.lexInf + } + if l.follow("nan") { + return l.lexNan + } + } + + pointSeen := false + expSeen := false + digitSeen := false + for { + next := l.peek() + if next == '.' { + if pointSeen { + return l.errorf("cannot have two dots in one float") + } + l.next() + if !isDigit(l.peek()) { + return l.errorf("float cannot end with a dot") + } + pointSeen = true + } else if next == 'e' || next == 'E' { + expSeen = true + l.next() + r := l.peek() + if r == '+' || r == '-' { + l.next() + } + } else if isDigit(next) { + digitSeen = true + l.next() + } else if next == '_' { + l.next() + } else { + break + } + if pointSeen && !digitSeen { + return l.errorf("cannot start float with a dot") + } + } + + if !digitSeen { + return l.errorf("no digit in that number") + } + if pointSeen || expSeen { + l.emit(tokenFloat) + } else { + l.emit(tokenInteger) + } + return l.lexRvalue +} + +func (l *tomlLexer) run() { + for state := l.lexVoid; state != nil; { + state = state() + } +} + +// Entry point +func lexToml(inputBytes []byte) []token { + runes := bytes.Runes(inputBytes) + l := &tomlLexer{ + input: runes, + tokens: make([]token, 0, 256), + line: 1, + col: 1, + endbufferLine: 1, + endbufferCol: 1, + } + l.run() + return l.tokens +} diff --git a/vendor/github.com/pelletier/go-toml/localtime.go b/vendor/github.com/pelletier/go-toml/localtime.go new file mode 100644 index 000000000..9dfe4b9e6 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/localtime.go @@ -0,0 +1,287 @@ +// Implementation of TOML's local date/time. +// +// Copied over from Google's civil to avoid pulling all the Google dependencies. +// Originals: +// https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil.go +// Changes: +// * Renamed files from civil* to localtime*. +// * Package changed from civil to toml. +// * 'Local' prefix added to all structs. +// +// Copyright 2016 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package civil implements types for civil time, a time-zone-independent +// representation of time that follows the rules of the proleptic +// Gregorian calendar with exactly 24-hour days, 60-minute hours, and 60-second +// minutes. +// +// Because they lack location information, these types do not represent unique +// moments or intervals of time. Use time.Time for that purpose. +package toml + +import ( + "fmt" + "time" +) + +// A LocalDate represents a date (year, month, day). +// +// This type does not include location information, and therefore does not +// describe a unique 24-hour timespan. +type LocalDate struct { + Year int // Year (e.g., 2014). + Month time.Month // Month of the year (January = 1, ...). + Day int // Day of the month, starting at 1. +} + +// LocalDateOf returns the LocalDate in which a time occurs in that time's location. +func LocalDateOf(t time.Time) LocalDate { + var d LocalDate + d.Year, d.Month, d.Day = t.Date() + return d +} + +// ParseLocalDate parses a string in RFC3339 full-date format and returns the date value it represents. +func ParseLocalDate(s string) (LocalDate, error) { + t, err := time.Parse("2006-01-02", s) + if err != nil { + return LocalDate{}, err + } + return LocalDateOf(t), nil +} + +// String returns the date in RFC3339 full-date format. +func (d LocalDate) String() string { + return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day) +} + +// IsValid reports whether the date is valid. +func (d LocalDate) IsValid() bool { + return LocalDateOf(d.In(time.UTC)) == d +} + +// In returns the time corresponding to time 00:00:00 of the date in the location. +// +// In is always consistent with time.LocalDate, even when time.LocalDate returns a time +// on a different day. For example, if loc is America/Indiana/Vincennes, then both +// time.LocalDate(1955, time.May, 1, 0, 0, 0, 0, loc) +// and +// civil.LocalDate{Year: 1955, Month: time.May, Day: 1}.In(loc) +// return 23:00:00 on April 30, 1955. +// +// In panics if loc is nil. +func (d LocalDate) In(loc *time.Location) time.Time { + return time.Date(d.Year, d.Month, d.Day, 0, 0, 0, 0, loc) +} + +// AddDays returns the date that is n days in the future. +// n can also be negative to go into the past. +func (d LocalDate) AddDays(n int) LocalDate { + return LocalDateOf(d.In(time.UTC).AddDate(0, 0, n)) +} + +// DaysSince returns the signed number of days between the date and s, not including the end day. +// This is the inverse operation to AddDays. +func (d LocalDate) DaysSince(s LocalDate) (days int) { + // We convert to Unix time so we do not have to worry about leap seconds: + // Unix time increases by exactly 86400 seconds per day. + deltaUnix := d.In(time.UTC).Unix() - s.In(time.UTC).Unix() + return int(deltaUnix / 86400) +} + +// Before reports whether d1 occurs before d2. +func (d1 LocalDate) Before(d2 LocalDate) bool { + if d1.Year != d2.Year { + return d1.Year < d2.Year + } + if d1.Month != d2.Month { + return d1.Month < d2.Month + } + return d1.Day < d2.Day +} + +// After reports whether d1 occurs after d2. +func (d1 LocalDate) After(d2 LocalDate) bool { + return d2.Before(d1) +} + +// MarshalText implements the encoding.TextMarshaler interface. +// The output is the result of d.String(). +func (d LocalDate) MarshalText() ([]byte, error) { + return []byte(d.String()), nil +} + +// UnmarshalText implements the encoding.TextUnmarshaler interface. +// The date is expected to be a string in a format accepted by ParseLocalDate. +func (d *LocalDate) UnmarshalText(data []byte) error { + var err error + *d, err = ParseLocalDate(string(data)) + return err +} + +// A LocalTime represents a time with nanosecond precision. +// +// This type does not include location information, and therefore does not +// describe a unique moment in time. +// +// This type exists to represent the TIME type in storage-based APIs like BigQuery. +// Most operations on Times are unlikely to be meaningful. Prefer the LocalDateTime type. +type LocalTime struct { + Hour int // The hour of the day in 24-hour format; range [0-23] + Minute int // The minute of the hour; range [0-59] + Second int // The second of the minute; range [0-59] + Nanosecond int // The nanosecond of the second; range [0-999999999] +} + +// LocalTimeOf returns the LocalTime representing the time of day in which a time occurs +// in that time's location. It ignores the date. +func LocalTimeOf(t time.Time) LocalTime { + var tm LocalTime + tm.Hour, tm.Minute, tm.Second = t.Clock() + tm.Nanosecond = t.Nanosecond() + return tm +} + +// ParseLocalTime parses a string and returns the time value it represents. +// ParseLocalTime accepts an extended form of the RFC3339 partial-time format. After +// the HH:MM:SS part of the string, an optional fractional part may appear, +// consisting of a decimal point followed by one to nine decimal digits. +// (RFC3339 admits only one digit after the decimal point). +func ParseLocalTime(s string) (LocalTime, error) { + t, err := time.Parse("15:04:05.999999999", s) + if err != nil { + return LocalTime{}, err + } + return LocalTimeOf(t), nil +} + +// String returns the date in the format described in ParseLocalTime. If Nanoseconds +// is zero, no fractional part will be generated. Otherwise, the result will +// end with a fractional part consisting of a decimal point and nine digits. +func (t LocalTime) String() string { + s := fmt.Sprintf("%02d:%02d:%02d", t.Hour, t.Minute, t.Second) + if t.Nanosecond == 0 { + return s + } + return s + fmt.Sprintf(".%09d", t.Nanosecond) +} + +// IsValid reports whether the time is valid. +func (t LocalTime) IsValid() bool { + // Construct a non-zero time. + tm := time.Date(2, 2, 2, t.Hour, t.Minute, t.Second, t.Nanosecond, time.UTC) + return LocalTimeOf(tm) == t +} + +// MarshalText implements the encoding.TextMarshaler interface. +// The output is the result of t.String(). +func (t LocalTime) MarshalText() ([]byte, error) { + return []byte(t.String()), nil +} + +// UnmarshalText implements the encoding.TextUnmarshaler interface. +// The time is expected to be a string in a format accepted by ParseLocalTime. +func (t *LocalTime) UnmarshalText(data []byte) error { + var err error + *t, err = ParseLocalTime(string(data)) + return err +} + +// A LocalDateTime represents a date and time. +// +// This type does not include location information, and therefore does not +// describe a unique moment in time. +type LocalDateTime struct { + Date LocalDate + Time LocalTime +} + +// Note: We deliberately do not embed LocalDate into LocalDateTime, to avoid promoting AddDays and Sub. + +// LocalDateTimeOf returns the LocalDateTime in which a time occurs in that time's location. +func LocalDateTimeOf(t time.Time) LocalDateTime { + return LocalDateTime{ + Date: LocalDateOf(t), + Time: LocalTimeOf(t), + } +} + +// ParseLocalDateTime parses a string and returns the LocalDateTime it represents. +// ParseLocalDateTime accepts a variant of the RFC3339 date-time format that omits +// the time offset but includes an optional fractional time, as described in +// ParseLocalTime. Informally, the accepted format is +// YYYY-MM-DDTHH:MM:SS[.FFFFFFFFF] +// where the 'T' may be a lower-case 't'. +func ParseLocalDateTime(s string) (LocalDateTime, error) { + t, err := time.Parse("2006-01-02T15:04:05.999999999", s) + if err != nil { + t, err = time.Parse("2006-01-02t15:04:05.999999999", s) + if err != nil { + return LocalDateTime{}, err + } + } + return LocalDateTimeOf(t), nil +} + +// String returns the date in the format described in ParseLocalDate. +func (dt LocalDateTime) String() string { + return dt.Date.String() + "T" + dt.Time.String() +} + +// IsValid reports whether the datetime is valid. +func (dt LocalDateTime) IsValid() bool { + return dt.Date.IsValid() && dt.Time.IsValid() +} + +// In returns the time corresponding to the LocalDateTime in the given location. +// +// If the time is missing or ambigous at the location, In returns the same +// result as time.LocalDate. For example, if loc is America/Indiana/Vincennes, then +// both +// time.LocalDate(1955, time.May, 1, 0, 30, 0, 0, loc) +// and +// civil.LocalDateTime{ +// civil.LocalDate{Year: 1955, Month: time.May, Day: 1}}, +// civil.LocalTime{Minute: 30}}.In(loc) +// return 23:30:00 on April 30, 1955. +// +// In panics if loc is nil. +func (dt LocalDateTime) In(loc *time.Location) time.Time { + return time.Date(dt.Date.Year, dt.Date.Month, dt.Date.Day, dt.Time.Hour, dt.Time.Minute, dt.Time.Second, dt.Time.Nanosecond, loc) +} + +// Before reports whether dt1 occurs before dt2. +func (dt1 LocalDateTime) Before(dt2 LocalDateTime) bool { + return dt1.In(time.UTC).Before(dt2.In(time.UTC)) +} + +// After reports whether dt1 occurs after dt2. +func (dt1 LocalDateTime) After(dt2 LocalDateTime) bool { + return dt2.Before(dt1) +} + +// MarshalText implements the encoding.TextMarshaler interface. +// The output is the result of dt.String(). +func (dt LocalDateTime) MarshalText() ([]byte, error) { + return []byte(dt.String()), nil +} + +// UnmarshalText implements the encoding.TextUnmarshaler interface. +// The datetime is expected to be a string in a format accepted by ParseLocalDateTime +func (dt *LocalDateTime) UnmarshalText(data []byte) error { + var err error + *dt, err = ParseLocalDateTime(string(data)) + return err +} diff --git a/vendor/github.com/pelletier/go-toml/marshal.go b/vendor/github.com/pelletier/go-toml/marshal.go new file mode 100644 index 000000000..3443c3545 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/marshal.go @@ -0,0 +1,1308 @@ +package toml + +import ( + "bytes" + "encoding" + "errors" + "fmt" + "io" + "reflect" + "sort" + "strconv" + "strings" + "time" +) + +const ( + tagFieldName = "toml" + tagFieldComment = "comment" + tagCommented = "commented" + tagMultiline = "multiline" + tagLiteral = "literal" + tagDefault = "default" +) + +type tomlOpts struct { + name string + nameFromTag bool + comment string + commented bool + multiline bool + literal bool + include bool + omitempty bool + defaultValue string +} + +type encOpts struct { + quoteMapKeys bool + arraysOneElementPerLine bool +} + +var encOptsDefaults = encOpts{ + quoteMapKeys: false, +} + +type annotation struct { + tag string + comment string + commented string + multiline string + literal string + defaultValue string +} + +var annotationDefault = annotation{ + tag: tagFieldName, + comment: tagFieldComment, + commented: tagCommented, + multiline: tagMultiline, + literal: tagLiteral, + defaultValue: tagDefault, +} + +type MarshalOrder int + +// Orders the Encoder can write the fields to the output stream. +const ( + // Sort fields alphabetically. + OrderAlphabetical MarshalOrder = iota + 1 + // Preserve the order the fields are encountered. For example, the order of fields in + // a struct. + OrderPreserve +) + +var timeType = reflect.TypeOf(time.Time{}) +var marshalerType = reflect.TypeOf(new(Marshaler)).Elem() +var unmarshalerType = reflect.TypeOf(new(Unmarshaler)).Elem() +var textMarshalerType = reflect.TypeOf(new(encoding.TextMarshaler)).Elem() +var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem() +var localDateType = reflect.TypeOf(LocalDate{}) +var localTimeType = reflect.TypeOf(LocalTime{}) +var localDateTimeType = reflect.TypeOf(LocalDateTime{}) +var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}{}) + +// Check if the given marshal type maps to a Tree primitive +func isPrimitive(mtype reflect.Type) bool { + switch mtype.Kind() { + case reflect.Ptr: + return isPrimitive(mtype.Elem()) + case reflect.Bool: + return true + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return true + case reflect.Float32, reflect.Float64: + return true + case reflect.String: + return true + case reflect.Struct: + return isTimeType(mtype) + default: + return false + } +} + +func isTimeType(mtype reflect.Type) bool { + return mtype == timeType || mtype == localDateType || mtype == localDateTimeType || mtype == localTimeType +} + +// Check if the given marshal type maps to a Tree slice or array +func isTreeSequence(mtype reflect.Type) bool { + switch mtype.Kind() { + case reflect.Ptr: + return isTreeSequence(mtype.Elem()) + case reflect.Slice, reflect.Array: + return isTree(mtype.Elem()) + default: + return false + } +} + +// Check if the given marshal type maps to a slice or array of a custom marshaler type +func isCustomMarshalerSequence(mtype reflect.Type) bool { + switch mtype.Kind() { + case reflect.Ptr: + return isCustomMarshalerSequence(mtype.Elem()) + case reflect.Slice, reflect.Array: + return isCustomMarshaler(mtype.Elem()) || isCustomMarshaler(reflect.New(mtype.Elem()).Type()) + default: + return false + } +} + +// Check if the given marshal type maps to a slice or array of a text marshaler type +func isTextMarshalerSequence(mtype reflect.Type) bool { + switch mtype.Kind() { + case reflect.Ptr: + return isTextMarshalerSequence(mtype.Elem()) + case reflect.Slice, reflect.Array: + return isTextMarshaler(mtype.Elem()) || isTextMarshaler(reflect.New(mtype.Elem()).Type()) + default: + return false + } +} + +// Check if the given marshal type maps to a non-Tree slice or array +func isOtherSequence(mtype reflect.Type) bool { + switch mtype.Kind() { + case reflect.Ptr: + return isOtherSequence(mtype.Elem()) + case reflect.Slice, reflect.Array: + return !isTreeSequence(mtype) + default: + return false + } +} + +// Check if the given marshal type maps to a Tree +func isTree(mtype reflect.Type) bool { + switch mtype.Kind() { + case reflect.Ptr: + return isTree(mtype.Elem()) + case reflect.Map: + return true + case reflect.Struct: + return !isPrimitive(mtype) + default: + return false + } +} + +func isCustomMarshaler(mtype reflect.Type) bool { + return mtype.Implements(marshalerType) +} + +func callCustomMarshaler(mval reflect.Value) ([]byte, error) { + return mval.Interface().(Marshaler).MarshalTOML() +} + +func isTextMarshaler(mtype reflect.Type) bool { + return mtype.Implements(textMarshalerType) && !isTimeType(mtype) +} + +func callTextMarshaler(mval reflect.Value) ([]byte, error) { + return mval.Interface().(encoding.TextMarshaler).MarshalText() +} + +func isCustomUnmarshaler(mtype reflect.Type) bool { + return mtype.Implements(unmarshalerType) +} + +func callCustomUnmarshaler(mval reflect.Value, tval interface{}) error { + return mval.Interface().(Unmarshaler).UnmarshalTOML(tval) +} + +func isTextUnmarshaler(mtype reflect.Type) bool { + return mtype.Implements(textUnmarshalerType) +} + +func callTextUnmarshaler(mval reflect.Value, text []byte) error { + return mval.Interface().(encoding.TextUnmarshaler).UnmarshalText(text) +} + +// Marshaler is the interface implemented by types that +// can marshal themselves into valid TOML. +type Marshaler interface { + MarshalTOML() ([]byte, error) +} + +// Unmarshaler is the interface implemented by types that +// can unmarshal a TOML description of themselves. +type Unmarshaler interface { + UnmarshalTOML(interface{}) error +} + +/* +Marshal returns the TOML encoding of v. Behavior is similar to the Go json +encoder, except that there is no concept of a Marshaler interface or MarshalTOML +function for sub-structs, and currently only definite types can be marshaled +(i.e. no `interface{}`). + +The following struct annotations are supported: + + toml:"Field" Overrides the field's name to output. + omitempty When set, empty values and groups are not emitted. + comment:"comment" Emits a # comment on the same line. This supports new lines. + commented:"true" Emits the value as commented. + +Note that pointers are automatically assigned the "omitempty" option, as TOML +explicitly does not handle null values (saying instead the label should be +dropped). + +Tree structural types and corresponding marshal types: + + *Tree (*)struct, (*)map[string]interface{} + []*Tree (*)[](*)struct, (*)[](*)map[string]interface{} + []interface{} (as interface{}) (*)[]primitive, (*)[]([]interface{}) + interface{} (*)primitive + +Tree primitive types and corresponding marshal types: + + uint64 uint, uint8-uint64, pointers to same + int64 int, int8-uint64, pointers to same + float64 float32, float64, pointers to same + string string, pointers to same + bool bool, pointers to same + time.LocalTime time.LocalTime{}, pointers to same + +For additional flexibility, use the Encoder API. +*/ +func Marshal(v interface{}) ([]byte, error) { + return NewEncoder(nil).marshal(v) +} + +// Encoder writes TOML values to an output stream. +type Encoder struct { + w io.Writer + encOpts + annotation + line int + col int + order MarshalOrder + promoteAnon bool + compactComments bool + indentation string +} + +// NewEncoder returns a new encoder that writes to w. +func NewEncoder(w io.Writer) *Encoder { + return &Encoder{ + w: w, + encOpts: encOptsDefaults, + annotation: annotationDefault, + line: 0, + col: 1, + order: OrderAlphabetical, + indentation: " ", + } +} + +// Encode writes the TOML encoding of v to the stream. +// +// See the documentation for Marshal for details. +func (e *Encoder) Encode(v interface{}) error { + b, err := e.marshal(v) + if err != nil { + return err + } + if _, err := e.w.Write(b); err != nil { + return err + } + return nil +} + +// QuoteMapKeys sets up the encoder to encode +// maps with string type keys with quoted TOML keys. +// +// This relieves the character limitations on map keys. +func (e *Encoder) QuoteMapKeys(v bool) *Encoder { + e.quoteMapKeys = v + return e +} + +// ArraysWithOneElementPerLine sets up the encoder to encode arrays +// with more than one element on multiple lines instead of one. +// +// For example: +// +// A = [1,2,3] +// +// Becomes +// +// A = [ +// 1, +// 2, +// 3, +// ] +func (e *Encoder) ArraysWithOneElementPerLine(v bool) *Encoder { + e.arraysOneElementPerLine = v + return e +} + +// Order allows to change in which order fields will be written to the output stream. +func (e *Encoder) Order(ord MarshalOrder) *Encoder { + e.order = ord + return e +} + +// Indentation allows to change indentation when marshalling. +func (e *Encoder) Indentation(indent string) *Encoder { + e.indentation = indent + return e +} + +// SetTagName allows changing default tag "toml" +func (e *Encoder) SetTagName(v string) *Encoder { + e.tag = v + return e +} + +// SetTagComment allows changing default tag "comment" +func (e *Encoder) SetTagComment(v string) *Encoder { + e.comment = v + return e +} + +// SetTagCommented allows changing default tag "commented" +func (e *Encoder) SetTagCommented(v string) *Encoder { + e.commented = v + return e +} + +// SetTagMultiline allows changing default tag "multiline" +func (e *Encoder) SetTagMultiline(v string) *Encoder { + e.multiline = v + return e +} + +// PromoteAnonymous allows to change how anonymous struct fields are marshaled. +// Usually, they are marshaled as if the inner exported fields were fields in +// the outer struct. However, if an anonymous struct field is given a name in +// its TOML tag, it is treated like a regular struct field with that name. +// rather than being anonymous. +// +// In case anonymous promotion is enabled, all anonymous structs are promoted +// and treated like regular struct fields. +func (e *Encoder) PromoteAnonymous(promote bool) *Encoder { + e.promoteAnon = promote + return e +} + +// CompactComments removes the new line before each comment in the tree. +func (e *Encoder) CompactComments(cc bool) *Encoder { + e.compactComments = cc + return e +} + +func (e *Encoder) marshal(v interface{}) ([]byte, error) { + // Check if indentation is valid + for _, char := range e.indentation { + if !isSpace(char) { + return []byte{}, fmt.Errorf("invalid indentation: must only contains space or tab characters") + } + } + + mtype := reflect.TypeOf(v) + if mtype == nil { + return []byte{}, errors.New("nil cannot be marshaled to TOML") + } + + switch mtype.Kind() { + case reflect.Struct, reflect.Map: + case reflect.Ptr: + if mtype.Elem().Kind() != reflect.Struct { + return []byte{}, errors.New("Only pointer to struct can be marshaled to TOML") + } + if reflect.ValueOf(v).IsNil() { + return []byte{}, errors.New("nil pointer cannot be marshaled to TOML") + } + default: + return []byte{}, errors.New("Only a struct or map can be marshaled to TOML") + } + + sval := reflect.ValueOf(v) + if isCustomMarshaler(mtype) { + return callCustomMarshaler(sval) + } + if isTextMarshaler(mtype) { + return callTextMarshaler(sval) + } + t, err := e.valueToTree(mtype, sval) + if err != nil { + return []byte{}, err + } + + var buf bytes.Buffer + _, err = t.writeToOrdered(&buf, "", "", 0, e.arraysOneElementPerLine, e.order, e.indentation, e.compactComments, false) + + return buf.Bytes(), err +} + +// Create next tree with a position based on Encoder.line +func (e *Encoder) nextTree() *Tree { + return newTreeWithPosition(Position{Line: e.line, Col: 1}) +} + +// Convert given marshal struct or map value to toml tree +func (e *Encoder) valueToTree(mtype reflect.Type, mval reflect.Value) (*Tree, error) { + if mtype.Kind() == reflect.Ptr { + return e.valueToTree(mtype.Elem(), mval.Elem()) + } + tval := e.nextTree() + switch mtype.Kind() { + case reflect.Struct: + switch mval.Interface().(type) { + case Tree: + reflect.ValueOf(tval).Elem().Set(mval) + default: + for i := 0; i < mtype.NumField(); i++ { + mtypef, mvalf := mtype.Field(i), mval.Field(i) + opts := tomlOptions(mtypef, e.annotation) + if opts.include && ((mtypef.Type.Kind() != reflect.Interface && !opts.omitempty) || !isZero(mvalf)) { + val, err := e.valueToToml(mtypef.Type, mvalf) + if err != nil { + return nil, err + } + if tree, ok := val.(*Tree); ok && mtypef.Anonymous && !opts.nameFromTag && !e.promoteAnon { + e.appendTree(tval, tree) + } else { + val = e.wrapTomlValue(val, tval) + tval.SetPathWithOptions([]string{opts.name}, SetOptions{ + Comment: opts.comment, + Commented: opts.commented, + Multiline: opts.multiline, + Literal: opts.literal, + }, val) + } + } + } + } + case reflect.Map: + keys := mval.MapKeys() + if e.order == OrderPreserve && len(keys) > 0 { + // Sorting []reflect.Value is not straight forward. + // + // OrderPreserve will support deterministic results when string is used + // as the key to maps. + typ := keys[0].Type() + kind := keys[0].Kind() + if kind == reflect.String { + ikeys := make([]string, len(keys)) + for i := range keys { + ikeys[i] = keys[i].Interface().(string) + } + sort.Strings(ikeys) + for i := range ikeys { + keys[i] = reflect.ValueOf(ikeys[i]).Convert(typ) + } + } + } + for _, key := range keys { + mvalf := mval.MapIndex(key) + if (mtype.Elem().Kind() == reflect.Ptr || mtype.Elem().Kind() == reflect.Interface) && mvalf.IsNil() { + continue + } + val, err := e.valueToToml(mtype.Elem(), mvalf) + if err != nil { + return nil, err + } + val = e.wrapTomlValue(val, tval) + if e.quoteMapKeys { + keyStr, err := tomlValueStringRepresentation(key.String(), "", "", e.order, e.arraysOneElementPerLine) + if err != nil { + return nil, err + } + tval.SetPath([]string{keyStr}, val) + } else { + tval.SetPath([]string{key.String()}, val) + } + } + } + return tval, nil +} + +// Convert given marshal slice to slice of Toml trees +func (e *Encoder) valueToTreeSlice(mtype reflect.Type, mval reflect.Value) ([]*Tree, error) { + tval := make([]*Tree, mval.Len(), mval.Len()) + for i := 0; i < mval.Len(); i++ { + val, err := e.valueToTree(mtype.Elem(), mval.Index(i)) + if err != nil { + return nil, err + } + tval[i] = val + } + return tval, nil +} + +// Convert given marshal slice to slice of toml values +func (e *Encoder) valueToOtherSlice(mtype reflect.Type, mval reflect.Value) (interface{}, error) { + tval := make([]interface{}, mval.Len(), mval.Len()) + for i := 0; i < mval.Len(); i++ { + val, err := e.valueToToml(mtype.Elem(), mval.Index(i)) + if err != nil { + return nil, err + } + tval[i] = val + } + return tval, nil +} + +// Convert given marshal value to toml value +func (e *Encoder) valueToToml(mtype reflect.Type, mval reflect.Value) (interface{}, error) { + if mtype.Kind() == reflect.Ptr { + switch { + case isCustomMarshaler(mtype): + return callCustomMarshaler(mval) + case isTextMarshaler(mtype): + b, err := callTextMarshaler(mval) + return string(b), err + default: + return e.valueToToml(mtype.Elem(), mval.Elem()) + } + } + if mtype.Kind() == reflect.Interface { + return e.valueToToml(mval.Elem().Type(), mval.Elem()) + } + switch { + case isCustomMarshaler(mtype): + return callCustomMarshaler(mval) + case isTextMarshaler(mtype): + b, err := callTextMarshaler(mval) + return string(b), err + case isTree(mtype): + return e.valueToTree(mtype, mval) + case isOtherSequence(mtype), isCustomMarshalerSequence(mtype), isTextMarshalerSequence(mtype): + return e.valueToOtherSlice(mtype, mval) + case isTreeSequence(mtype): + return e.valueToTreeSlice(mtype, mval) + default: + switch mtype.Kind() { + case reflect.Bool: + return mval.Bool(), nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + if mtype.Kind() == reflect.Int64 && mtype == reflect.TypeOf(time.Duration(1)) { + return fmt.Sprint(mval), nil + } + return mval.Int(), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + return mval.Uint(), nil + case reflect.Float32, reflect.Float64: + return mval.Float(), nil + case reflect.String: + return mval.String(), nil + case reflect.Struct: + return mval.Interface(), nil + default: + return nil, fmt.Errorf("Marshal can't handle %v(%v)", mtype, mtype.Kind()) + } + } +} + +func (e *Encoder) appendTree(t, o *Tree) error { + for key, value := range o.values { + if _, ok := t.values[key]; ok { + continue + } + if tomlValue, ok := value.(*tomlValue); ok { + tomlValue.position.Col = t.position.Col + } + t.values[key] = value + } + return nil +} + +// Create a toml value with the current line number as the position line +func (e *Encoder) wrapTomlValue(val interface{}, parent *Tree) interface{} { + _, isTree := val.(*Tree) + _, isTreeS := val.([]*Tree) + if isTree || isTreeS { + e.line++ + return val + } + + ret := &tomlValue{ + value: val, + position: Position{ + e.line, + parent.position.Col, + }, + } + e.line++ + return ret +} + +// Unmarshal attempts to unmarshal the Tree into a Go struct pointed by v. +// Neither Unmarshaler interfaces nor UnmarshalTOML functions are supported for +// sub-structs, and only definite types can be unmarshaled. +func (t *Tree) Unmarshal(v interface{}) error { + d := Decoder{tval: t, tagName: tagFieldName} + return d.unmarshal(v) +} + +// Marshal returns the TOML encoding of Tree. +// See Marshal() documentation for types mapping table. +func (t *Tree) Marshal() ([]byte, error) { + var buf bytes.Buffer + _, err := t.WriteTo(&buf) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +// Unmarshal parses the TOML-encoded data and stores the result in the value +// pointed to by v. Behavior is similar to the Go json encoder, except that there +// is no concept of an Unmarshaler interface or UnmarshalTOML function for +// sub-structs, and currently only definite types can be unmarshaled to (i.e. no +// `interface{}`). +// +// The following struct annotations are supported: +// +// toml:"Field" Overrides the field's name to map to. +// default:"foo" Provides a default value. +// +// For default values, only fields of the following types are supported: +// * string +// * bool +// * int +// * int64 +// * float64 +// +// See Marshal() documentation for types mapping table. +func Unmarshal(data []byte, v interface{}) error { + t, err := LoadReader(bytes.NewReader(data)) + if err != nil { + return err + } + return t.Unmarshal(v) +} + +// Decoder reads and decodes TOML values from an input stream. +type Decoder struct { + r io.Reader + tval *Tree + encOpts + tagName string + strict bool + visitor visitorState +} + +// NewDecoder returns a new decoder that reads from r. +func NewDecoder(r io.Reader) *Decoder { + return &Decoder{ + r: r, + encOpts: encOptsDefaults, + tagName: tagFieldName, + } +} + +// Decode reads a TOML-encoded value from it's input +// and unmarshals it in the value pointed at by v. +// +// See the documentation for Marshal for details. +func (d *Decoder) Decode(v interface{}) error { + var err error + d.tval, err = LoadReader(d.r) + if err != nil { + return err + } + return d.unmarshal(v) +} + +// SetTagName allows changing default tag "toml" +func (d *Decoder) SetTagName(v string) *Decoder { + d.tagName = v + return d +} + +// Strict allows changing to strict decoding. Any fields that are found in the +// input data and do not have a corresponding struct member cause an error. +func (d *Decoder) Strict(strict bool) *Decoder { + d.strict = strict + return d +} + +func (d *Decoder) unmarshal(v interface{}) error { + mtype := reflect.TypeOf(v) + if mtype == nil { + return errors.New("nil cannot be unmarshaled from TOML") + } + if mtype.Kind() != reflect.Ptr { + return errors.New("only a pointer to struct or map can be unmarshaled from TOML") + } + + elem := mtype.Elem() + + switch elem.Kind() { + case reflect.Struct, reflect.Map: + case reflect.Interface: + elem = mapStringInterfaceType + default: + return errors.New("only a pointer to struct or map can be unmarshaled from TOML") + } + + if reflect.ValueOf(v).IsNil() { + return errors.New("nil pointer cannot be unmarshaled from TOML") + } + + vv := reflect.ValueOf(v).Elem() + + if d.strict { + d.visitor = newVisitorState(d.tval) + } + + sval, err := d.valueFromTree(elem, d.tval, &vv) + if err != nil { + return err + } + if err := d.visitor.validate(); err != nil { + return err + } + reflect.ValueOf(v).Elem().Set(sval) + return nil +} + +// Convert toml tree to marshal struct or map, using marshal type. When mval1 +// is non-nil, merge fields into the given value instead of allocating a new one. +func (d *Decoder) valueFromTree(mtype reflect.Type, tval *Tree, mval1 *reflect.Value) (reflect.Value, error) { + if mtype.Kind() == reflect.Ptr { + return d.unwrapPointer(mtype, tval, mval1) + } + + // Check if pointer to value implements the Unmarshaler interface. + if mvalPtr := reflect.New(mtype); isCustomUnmarshaler(mvalPtr.Type()) { + d.visitor.visitAll() + + if tval == nil { + return mvalPtr.Elem(), nil + } + + if err := callCustomUnmarshaler(mvalPtr, tval.ToMap()); err != nil { + return reflect.ValueOf(nil), fmt.Errorf("unmarshal toml: %v", err) + } + return mvalPtr.Elem(), nil + } + + var mval reflect.Value + switch mtype.Kind() { + case reflect.Struct: + if mval1 != nil { + mval = *mval1 + } else { + mval = reflect.New(mtype).Elem() + } + + switch mval.Interface().(type) { + case Tree: + mval.Set(reflect.ValueOf(tval).Elem()) + default: + for i := 0; i < mtype.NumField(); i++ { + mtypef := mtype.Field(i) + an := annotation{tag: d.tagName} + opts := tomlOptions(mtypef, an) + if !opts.include { + continue + } + baseKey := opts.name + keysToTry := []string{ + baseKey, + strings.ToLower(baseKey), + strings.ToTitle(baseKey), + strings.ToLower(string(baseKey[0])) + baseKey[1:], + } + + found := false + if tval != nil { + for _, key := range keysToTry { + exists := tval.HasPath([]string{key}) + if !exists { + continue + } + + d.visitor.push(key) + val := tval.GetPath([]string{key}) + fval := mval.Field(i) + mvalf, err := d.valueFromToml(mtypef.Type, val, &fval) + if err != nil { + return mval, formatError(err, tval.GetPositionPath([]string{key})) + } + mval.Field(i).Set(mvalf) + found = true + d.visitor.pop() + break + } + } + + if !found && opts.defaultValue != "" { + mvalf := mval.Field(i) + var val interface{} + var err error + switch mvalf.Kind() { + case reflect.String: + val = opts.defaultValue + case reflect.Bool: + val, err = strconv.ParseBool(opts.defaultValue) + case reflect.Uint: + val, err = strconv.ParseUint(opts.defaultValue, 10, 0) + case reflect.Uint8: + val, err = strconv.ParseUint(opts.defaultValue, 10, 8) + case reflect.Uint16: + val, err = strconv.ParseUint(opts.defaultValue, 10, 16) + case reflect.Uint32: + val, err = strconv.ParseUint(opts.defaultValue, 10, 32) + case reflect.Uint64: + val, err = strconv.ParseUint(opts.defaultValue, 10, 64) + case reflect.Int: + val, err = strconv.ParseInt(opts.defaultValue, 10, 0) + case reflect.Int8: + val, err = strconv.ParseInt(opts.defaultValue, 10, 8) + case reflect.Int16: + val, err = strconv.ParseInt(opts.defaultValue, 10, 16) + case reflect.Int32: + val, err = strconv.ParseInt(opts.defaultValue, 10, 32) + case reflect.Int64: + // Check if the provided number has a non-numeric extension. + var hasExtension bool + if len(opts.defaultValue) > 0 { + lastChar := opts.defaultValue[len(opts.defaultValue)-1] + if lastChar < '0' || lastChar > '9' { + hasExtension = true + } + } + // If the value is a time.Duration with extension, parse as duration. + // If the value is an int64 or a time.Duration without extension, parse as number. + if hasExtension && mvalf.Type().String() == "time.Duration" { + val, err = time.ParseDuration(opts.defaultValue) + } else { + val, err = strconv.ParseInt(opts.defaultValue, 10, 64) + } + case reflect.Float32: + val, err = strconv.ParseFloat(opts.defaultValue, 32) + case reflect.Float64: + val, err = strconv.ParseFloat(opts.defaultValue, 64) + default: + return mvalf, fmt.Errorf("unsupported field type for default option") + } + + if err != nil { + return mvalf, err + } + mvalf.Set(reflect.ValueOf(val).Convert(mvalf.Type())) + } + + // save the old behavior above and try to check structs + if !found && opts.defaultValue == "" && mtypef.Type.Kind() == reflect.Struct { + tmpTval := tval + if !mtypef.Anonymous { + tmpTval = nil + } + fval := mval.Field(i) + v, err := d.valueFromTree(mtypef.Type, tmpTval, &fval) + if err != nil { + return v, err + } + mval.Field(i).Set(v) + } + } + } + case reflect.Map: + mval = reflect.MakeMap(mtype) + for _, key := range tval.Keys() { + d.visitor.push(key) + // TODO: path splits key + val := tval.GetPath([]string{key}) + mvalf, err := d.valueFromToml(mtype.Elem(), val, nil) + if err != nil { + return mval, formatError(err, tval.GetPositionPath([]string{key})) + } + mval.SetMapIndex(reflect.ValueOf(key).Convert(mtype.Key()), mvalf) + d.visitor.pop() + } + } + return mval, nil +} + +// Convert toml value to marshal struct/map slice, using marshal type +func (d *Decoder) valueFromTreeSlice(mtype reflect.Type, tval []*Tree) (reflect.Value, error) { + mval, err := makeSliceOrArray(mtype, len(tval)) + if err != nil { + return mval, err + } + + for i := 0; i < len(tval); i++ { + d.visitor.push(strconv.Itoa(i)) + val, err := d.valueFromTree(mtype.Elem(), tval[i], nil) + if err != nil { + return mval, err + } + mval.Index(i).Set(val) + d.visitor.pop() + } + return mval, nil +} + +// Convert toml value to marshal primitive slice, using marshal type +func (d *Decoder) valueFromOtherSlice(mtype reflect.Type, tval []interface{}) (reflect.Value, error) { + mval, err := makeSliceOrArray(mtype, len(tval)) + if err != nil { + return mval, err + } + + for i := 0; i < len(tval); i++ { + val, err := d.valueFromToml(mtype.Elem(), tval[i], nil) + if err != nil { + return mval, err + } + mval.Index(i).Set(val) + } + return mval, nil +} + +// Convert toml value to marshal primitive slice, using marshal type +func (d *Decoder) valueFromOtherSliceI(mtype reflect.Type, tval interface{}) (reflect.Value, error) { + val := reflect.ValueOf(tval) + length := val.Len() + + mval, err := makeSliceOrArray(mtype, length) + if err != nil { + return mval, err + } + + for i := 0; i < length; i++ { + val, err := d.valueFromToml(mtype.Elem(), val.Index(i).Interface(), nil) + if err != nil { + return mval, err + } + mval.Index(i).Set(val) + } + return mval, nil +} + +// Create a new slice or a new array with specified length +func makeSliceOrArray(mtype reflect.Type, tLength int) (reflect.Value, error) { + var mval reflect.Value + switch mtype.Kind() { + case reflect.Slice: + mval = reflect.MakeSlice(mtype, tLength, tLength) + case reflect.Array: + mval = reflect.New(reflect.ArrayOf(mtype.Len(), mtype.Elem())).Elem() + if tLength > mtype.Len() { + return mval, fmt.Errorf("unmarshal: TOML array length (%v) exceeds destination array length (%v)", tLength, mtype.Len()) + } + } + return mval, nil +} + +// Convert toml value to marshal value, using marshal type. When mval1 is non-nil +// and the given type is a struct value, merge fields into it. +func (d *Decoder) valueFromToml(mtype reflect.Type, tval interface{}, mval1 *reflect.Value) (reflect.Value, error) { + if mtype.Kind() == reflect.Ptr { + return d.unwrapPointer(mtype, tval, mval1) + } + + switch t := tval.(type) { + case *Tree: + var mval11 *reflect.Value + if mtype.Kind() == reflect.Struct { + mval11 = mval1 + } + + if isTree(mtype) { + return d.valueFromTree(mtype, t, mval11) + } + + if mtype.Kind() == reflect.Interface { + if mval1 == nil || mval1.IsNil() { + return d.valueFromTree(reflect.TypeOf(map[string]interface{}{}), t, nil) + } else { + return d.valueFromToml(mval1.Elem().Type(), t, nil) + } + } + + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a tree", tval, tval) + case []*Tree: + if isTreeSequence(mtype) { + return d.valueFromTreeSlice(mtype, t) + } + if mtype.Kind() == reflect.Interface { + if mval1 == nil || mval1.IsNil() { + return d.valueFromTreeSlice(reflect.TypeOf([]map[string]interface{}{}), t) + } else { + ival := mval1.Elem() + return d.valueFromToml(mval1.Elem().Type(), t, &ival) + } + } + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to trees", tval, tval) + case []interface{}: + d.visitor.visit() + if isOtherSequence(mtype) { + return d.valueFromOtherSlice(mtype, t) + } + if mtype.Kind() == reflect.Interface { + if mval1 == nil || mval1.IsNil() { + return d.valueFromOtherSlice(reflect.TypeOf([]interface{}{}), t) + } else { + ival := mval1.Elem() + return d.valueFromToml(mval1.Elem().Type(), t, &ival) + } + } + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to a slice", tval, tval) + default: + d.visitor.visit() + mvalPtr := reflect.New(mtype) + + // Check if pointer to value implements the Unmarshaler interface. + if isCustomUnmarshaler(mvalPtr.Type()) { + if err := callCustomUnmarshaler(mvalPtr, tval); err != nil { + return reflect.ValueOf(nil), fmt.Errorf("unmarshal toml: %v", err) + } + return mvalPtr.Elem(), nil + } + + // Check if pointer to value implements the encoding.TextUnmarshaler. + if isTextUnmarshaler(mvalPtr.Type()) && !isTimeType(mtype) { + if err := d.unmarshalText(tval, mvalPtr); err != nil { + return reflect.ValueOf(nil), fmt.Errorf("unmarshal text: %v", err) + } + return mvalPtr.Elem(), nil + } + + switch mtype.Kind() { + case reflect.Bool, reflect.Struct: + val := reflect.ValueOf(tval) + + switch val.Type() { + case localDateType: + localDate := val.Interface().(LocalDate) + switch mtype { + case timeType: + return reflect.ValueOf(time.Date(localDate.Year, localDate.Month, localDate.Day, 0, 0, 0, 0, time.Local)), nil + } + case localDateTimeType: + localDateTime := val.Interface().(LocalDateTime) + switch mtype { + case timeType: + return reflect.ValueOf(time.Date( + localDateTime.Date.Year, + localDateTime.Date.Month, + localDateTime.Date.Day, + localDateTime.Time.Hour, + localDateTime.Time.Minute, + localDateTime.Time.Second, + localDateTime.Time.Nanosecond, + time.Local)), nil + } + } + + // if this passes for when mtype is reflect.Struct, tval is a time.LocalTime + if !val.Type().ConvertibleTo(mtype) { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) + } + + return val.Convert(mtype), nil + case reflect.String: + val := reflect.ValueOf(tval) + // stupidly, int64 is convertible to string. So special case this. + if !val.Type().ConvertibleTo(mtype) || val.Kind() == reflect.Int64 { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) + } + + return val.Convert(mtype), nil + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + val := reflect.ValueOf(tval) + if mtype.Kind() == reflect.Int64 && mtype == reflect.TypeOf(time.Duration(1)) && val.Kind() == reflect.String { + d, err := time.ParseDuration(val.String()) + if err != nil { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v. %s", tval, tval, mtype.String(), err) + } + return reflect.ValueOf(d), nil + } + if !val.Type().ConvertibleTo(mtype) || val.Kind() == reflect.Float64 { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) + } + if reflect.Indirect(reflect.New(mtype)).OverflowInt(val.Convert(reflect.TypeOf(int64(0))).Int()) { + return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String()) + } + + return val.Convert(mtype), nil + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + val := reflect.ValueOf(tval) + if !val.Type().ConvertibleTo(mtype) || val.Kind() == reflect.Float64 { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) + } + + if val.Convert(reflect.TypeOf(int(1))).Int() < 0 { + return reflect.ValueOf(nil), fmt.Errorf("%v(%T) is negative so does not fit in %v", tval, tval, mtype.String()) + } + if reflect.Indirect(reflect.New(mtype)).OverflowUint(val.Convert(reflect.TypeOf(uint64(0))).Uint()) { + return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String()) + } + + return val.Convert(mtype), nil + case reflect.Float32, reflect.Float64: + val := reflect.ValueOf(tval) + if !val.Type().ConvertibleTo(mtype) || val.Kind() == reflect.Int64 { + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v", tval, tval, mtype.String()) + } + if reflect.Indirect(reflect.New(mtype)).OverflowFloat(val.Convert(reflect.TypeOf(float64(0))).Float()) { + return reflect.ValueOf(nil), fmt.Errorf("%v(%T) would overflow %v", tval, tval, mtype.String()) + } + + return val.Convert(mtype), nil + case reflect.Interface: + if mval1 == nil || mval1.IsNil() { + return reflect.ValueOf(tval), nil + } else { + ival := mval1.Elem() + return d.valueFromToml(mval1.Elem().Type(), t, &ival) + } + case reflect.Slice, reflect.Array: + if isOtherSequence(mtype) && isOtherSequence(reflect.TypeOf(t)) { + return d.valueFromOtherSliceI(mtype, t) + } + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v(%v)", tval, tval, mtype, mtype.Kind()) + default: + return reflect.ValueOf(nil), fmt.Errorf("Can't convert %v(%T) to %v(%v)", tval, tval, mtype, mtype.Kind()) + } + } +} + +func (d *Decoder) unwrapPointer(mtype reflect.Type, tval interface{}, mval1 *reflect.Value) (reflect.Value, error) { + var melem *reflect.Value + + if mval1 != nil && !mval1.IsNil() && (mtype.Elem().Kind() == reflect.Struct || mtype.Elem().Kind() == reflect.Interface) { + elem := mval1.Elem() + melem = &elem + } + + val, err := d.valueFromToml(mtype.Elem(), tval, melem) + if err != nil { + return reflect.ValueOf(nil), err + } + mval := reflect.New(mtype.Elem()) + mval.Elem().Set(val) + return mval, nil +} + +func (d *Decoder) unmarshalText(tval interface{}, mval reflect.Value) error { + var buf bytes.Buffer + fmt.Fprint(&buf, tval) + return callTextUnmarshaler(mval, buf.Bytes()) +} + +func tomlOptions(vf reflect.StructField, an annotation) tomlOpts { + tag := vf.Tag.Get(an.tag) + parse := strings.Split(tag, ",") + var comment string + if c := vf.Tag.Get(an.comment); c != "" { + comment = c + } + commented, _ := strconv.ParseBool(vf.Tag.Get(an.commented)) + multiline, _ := strconv.ParseBool(vf.Tag.Get(an.multiline)) + literal, _ := strconv.ParseBool(vf.Tag.Get(an.literal)) + defaultValue := vf.Tag.Get(tagDefault) + result := tomlOpts{ + name: vf.Name, + nameFromTag: false, + comment: comment, + commented: commented, + multiline: multiline, + literal: literal, + include: true, + omitempty: false, + defaultValue: defaultValue, + } + if parse[0] != "" { + if parse[0] == "-" && len(parse) == 1 { + result.include = false + } else { + result.name = strings.Trim(parse[0], " ") + result.nameFromTag = true + } + } + if vf.PkgPath != "" { + result.include = false + } + if len(parse) > 1 && strings.Trim(parse[1], " ") == "omitempty" { + result.omitempty = true + } + if vf.Type.Kind() == reflect.Ptr { + result.omitempty = true + } + return result +} + +func isZero(val reflect.Value) bool { + switch val.Type().Kind() { + case reflect.Slice, reflect.Array, reflect.Map: + return val.Len() == 0 + default: + return reflect.DeepEqual(val.Interface(), reflect.Zero(val.Type()).Interface()) + } +} + +func formatError(err error, pos Position) error { + if err.Error()[0] == '(' { // Error already contains position information + return err + } + return fmt.Errorf("%s: %s", pos, err) +} + +// visitorState keeps track of which keys were unmarshaled. +type visitorState struct { + tree *Tree + path []string + keys map[string]struct{} + active bool +} + +func newVisitorState(tree *Tree) visitorState { + path, result := []string{}, map[string]struct{}{} + insertKeys(path, result, tree) + return visitorState{ + tree: tree, + path: path[:0], + keys: result, + active: true, + } +} + +func (s *visitorState) push(key string) { + if s.active { + s.path = append(s.path, key) + } +} + +func (s *visitorState) pop() { + if s.active { + s.path = s.path[:len(s.path)-1] + } +} + +func (s *visitorState) visit() { + if s.active { + delete(s.keys, strings.Join(s.path, ".")) + } +} + +func (s *visitorState) visitAll() { + if s.active { + for k := range s.keys { + if strings.HasPrefix(k, strings.Join(s.path, ".")) { + delete(s.keys, k) + } + } + } +} + +func (s *visitorState) validate() error { + if !s.active { + return nil + } + undecoded := make([]string, 0, len(s.keys)) + for key := range s.keys { + undecoded = append(undecoded, key) + } + sort.Strings(undecoded) + if len(undecoded) > 0 { + return fmt.Errorf("undecoded keys: %q", undecoded) + } + return nil +} + +func insertKeys(path []string, m map[string]struct{}, tree *Tree) { + for k, v := range tree.values { + switch node := v.(type) { + case []*Tree: + for i, item := range node { + insertKeys(append(path, k, strconv.Itoa(i)), m, item) + } + case *Tree: + insertKeys(append(path, k), m, node) + case *tomlValue: + m[strings.Join(append(path, k), ".")] = struct{}{} + } + } +} diff --git a/vendor/github.com/pelletier/go-toml/marshal_OrderPreserve_test.toml b/vendor/github.com/pelletier/go-toml/marshal_OrderPreserve_test.toml new file mode 100644 index 000000000..792b72ed7 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/marshal_OrderPreserve_test.toml @@ -0,0 +1,39 @@ +title = "TOML Marshal Testing" + +[basic_lists] + floats = [12.3,45.6,78.9] + bools = [true,false,true] + dates = [1979-05-27T07:32:00Z,1980-05-27T07:32:00Z] + ints = [8001,8001,8002] + uints = [5002,5003] + strings = ["One","Two","Three"] + +[[subdocptrs]] + name = "Second" + +[basic_map] + one = "one" + two = "two" + +[subdoc] + + [subdoc.second] + name = "Second" + + [subdoc.first] + name = "First" + +[basic] + uint = 5001 + bool = true + float = 123.4 + float64 = 123.456782132399 + int = 5000 + string = "Bite me" + date = 1979-05-27T07:32:00Z + +[[subdoclist]] + name = "List.First" + +[[subdoclist]] + name = "List.Second" diff --git a/vendor/github.com/pelletier/go-toml/marshal_test.toml b/vendor/github.com/pelletier/go-toml/marshal_test.toml new file mode 100644 index 000000000..ba5e110bf --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/marshal_test.toml @@ -0,0 +1,39 @@ +title = "TOML Marshal Testing" + +[basic] + bool = true + date = 1979-05-27T07:32:00Z + float = 123.4 + float64 = 123.456782132399 + int = 5000 + string = "Bite me" + uint = 5001 + +[basic_lists] + bools = [true,false,true] + dates = [1979-05-27T07:32:00Z,1980-05-27T07:32:00Z] + floats = [12.3,45.6,78.9] + ints = [8001,8001,8002] + strings = ["One","Two","Three"] + uints = [5002,5003] + +[basic_map] + one = "one" + two = "two" + +[subdoc] + + [subdoc.first] + name = "First" + + [subdoc.second] + name = "Second" + +[[subdoclist]] + name = "List.First" + +[[subdoclist]] + name = "List.Second" + +[[subdocptrs]] + name = "Second" diff --git a/vendor/github.com/pelletier/go-toml/parser.go b/vendor/github.com/pelletier/go-toml/parser.go new file mode 100644 index 000000000..f5e1a44fb --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/parser.go @@ -0,0 +1,508 @@ +// TOML Parser. + +package toml + +import ( + "errors" + "fmt" + "math" + "reflect" + "strconv" + "strings" + "time" +) + +type tomlParser struct { + flowIdx int + flow []token + tree *Tree + currentTable []string + seenTableKeys []string +} + +type tomlParserStateFn func() tomlParserStateFn + +// Formats and panics an error message based on a token +func (p *tomlParser) raiseError(tok *token, msg string, args ...interface{}) { + panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...)) +} + +func (p *tomlParser) run() { + for state := p.parseStart; state != nil; { + state = state() + } +} + +func (p *tomlParser) peek() *token { + if p.flowIdx >= len(p.flow) { + return nil + } + return &p.flow[p.flowIdx] +} + +func (p *tomlParser) assume(typ tokenType) { + tok := p.getToken() + if tok == nil { + p.raiseError(tok, "was expecting token %s, but token stream is empty", tok) + } + if tok.typ != typ { + p.raiseError(tok, "was expecting token %s, but got %s instead", typ, tok) + } +} + +func (p *tomlParser) getToken() *token { + tok := p.peek() + if tok == nil { + return nil + } + p.flowIdx++ + return tok +} + +func (p *tomlParser) parseStart() tomlParserStateFn { + tok := p.peek() + + // end of stream, parsing is finished + if tok == nil { + return nil + } + + switch tok.typ { + case tokenDoubleLeftBracket: + return p.parseGroupArray + case tokenLeftBracket: + return p.parseGroup + case tokenKey: + return p.parseAssign + case tokenEOF: + return nil + case tokenError: + p.raiseError(tok, "parsing error: %s", tok.String()) + default: + p.raiseError(tok, "unexpected token %s", tok.typ) + } + return nil +} + +func (p *tomlParser) parseGroupArray() tomlParserStateFn { + startToken := p.getToken() // discard the [[ + key := p.getToken() + if key.typ != tokenKeyGroupArray { + p.raiseError(key, "unexpected token %s, was expecting a table array key", key) + } + + // get or create table array element at the indicated part in the path + keys, err := parseKey(key.val) + if err != nil { + p.raiseError(key, "invalid table array key: %s", err) + } + p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries + destTree := p.tree.GetPath(keys) + var array []*Tree + if destTree == nil { + array = make([]*Tree, 0) + } else if target, ok := destTree.([]*Tree); ok && target != nil { + array = destTree.([]*Tree) + } else { + p.raiseError(key, "key %s is already assigned and not of type table array", key) + } + p.currentTable = keys + + // add a new tree to the end of the table array + newTree := newTree() + newTree.position = startToken.Position + array = append(array, newTree) + p.tree.SetPath(p.currentTable, array) + + // remove all keys that were children of this table array + prefix := key.val + "." + found := false + for ii := 0; ii < len(p.seenTableKeys); { + tableKey := p.seenTableKeys[ii] + if strings.HasPrefix(tableKey, prefix) { + p.seenTableKeys = append(p.seenTableKeys[:ii], p.seenTableKeys[ii+1:]...) + } else { + found = (tableKey == key.val) + ii++ + } + } + + // keep this key name from use by other kinds of assignments + if !found { + p.seenTableKeys = append(p.seenTableKeys, key.val) + } + + // move to next parser state + p.assume(tokenDoubleRightBracket) + return p.parseStart +} + +func (p *tomlParser) parseGroup() tomlParserStateFn { + startToken := p.getToken() // discard the [ + key := p.getToken() + if key.typ != tokenKeyGroup { + p.raiseError(key, "unexpected token %s, was expecting a table key", key) + } + for _, item := range p.seenTableKeys { + if item == key.val { + p.raiseError(key, "duplicated tables") + } + } + + p.seenTableKeys = append(p.seenTableKeys, key.val) + keys, err := parseKey(key.val) + if err != nil { + p.raiseError(key, "invalid table array key: %s", err) + } + if err := p.tree.createSubTree(keys, startToken.Position); err != nil { + p.raiseError(key, "%s", err) + } + destTree := p.tree.GetPath(keys) + if target, ok := destTree.(*Tree); ok && target != nil && target.inline { + p.raiseError(key, "could not re-define exist inline table or its sub-table : %s", + strings.Join(keys, ".")) + } + p.assume(tokenRightBracket) + p.currentTable = keys + return p.parseStart +} + +func (p *tomlParser) parseAssign() tomlParserStateFn { + key := p.getToken() + p.assume(tokenEqual) + + parsedKey, err := parseKey(key.val) + if err != nil { + p.raiseError(key, "invalid key: %s", err.Error()) + } + + value := p.parseRvalue() + var tableKey []string + if len(p.currentTable) > 0 { + tableKey = p.currentTable + } else { + tableKey = []string{} + } + + prefixKey := parsedKey[0 : len(parsedKey)-1] + tableKey = append(tableKey, prefixKey...) + + // find the table to assign, looking out for arrays of tables + var targetNode *Tree + switch node := p.tree.GetPath(tableKey).(type) { + case []*Tree: + targetNode = node[len(node)-1] + case *Tree: + targetNode = node + case nil: + // create intermediate + if err := p.tree.createSubTree(tableKey, key.Position); err != nil { + p.raiseError(key, "could not create intermediate group: %s", err) + } + targetNode = p.tree.GetPath(tableKey).(*Tree) + default: + p.raiseError(key, "Unknown table type for path: %s", + strings.Join(tableKey, ".")) + } + + if targetNode.inline { + p.raiseError(key, "could not add key or sub-table to exist inline table or its sub-table : %s", + strings.Join(tableKey, ".")) + } + + // assign value to the found table + keyVal := parsedKey[len(parsedKey)-1] + localKey := []string{keyVal} + finalKey := append(tableKey, keyVal) + if targetNode.GetPath(localKey) != nil { + p.raiseError(key, "The following key was defined twice: %s", + strings.Join(finalKey, ".")) + } + var toInsert interface{} + + switch value.(type) { + case *Tree, []*Tree: + toInsert = value + default: + toInsert = &tomlValue{value: value, position: key.Position} + } + targetNode.values[keyVal] = toInsert + return p.parseStart +} + +var errInvalidUnderscore = errors.New("invalid use of _ in number") + +func numberContainsInvalidUnderscore(value string) error { + // For large numbers, you may use underscores between digits to enhance + // readability. Each underscore must be surrounded by at least one digit on + // each side. + + hasBefore := false + for idx, r := range value { + if r == '_' { + if !hasBefore || idx+1 >= len(value) { + // can't end with an underscore + return errInvalidUnderscore + } + } + hasBefore = isDigit(r) + } + return nil +} + +var errInvalidUnderscoreHex = errors.New("invalid use of _ in hex number") + +func hexNumberContainsInvalidUnderscore(value string) error { + hasBefore := false + for idx, r := range value { + if r == '_' { + if !hasBefore || idx+1 >= len(value) { + // can't end with an underscore + return errInvalidUnderscoreHex + } + } + hasBefore = isHexDigit(r) + } + return nil +} + +func cleanupNumberToken(value string) string { + cleanedVal := strings.Replace(value, "_", "", -1) + return cleanedVal +} + +func (p *tomlParser) parseRvalue() interface{} { + tok := p.getToken() + if tok == nil || tok.typ == tokenEOF { + p.raiseError(tok, "expecting a value") + } + + switch tok.typ { + case tokenString: + return tok.val + case tokenTrue: + return true + case tokenFalse: + return false + case tokenInf: + if tok.val[0] == '-' { + return math.Inf(-1) + } + return math.Inf(1) + case tokenNan: + return math.NaN() + case tokenInteger: + cleanedVal := cleanupNumberToken(tok.val) + var err error + var val int64 + if len(cleanedVal) >= 3 && cleanedVal[0] == '0' { + switch cleanedVal[1] { + case 'x': + err = hexNumberContainsInvalidUnderscore(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + val, err = strconv.ParseInt(cleanedVal[2:], 16, 64) + case 'o': + err = numberContainsInvalidUnderscore(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + val, err = strconv.ParseInt(cleanedVal[2:], 8, 64) + case 'b': + err = numberContainsInvalidUnderscore(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + val, err = strconv.ParseInt(cleanedVal[2:], 2, 64) + default: + panic("invalid base") // the lexer should catch this first + } + } else { + err = numberContainsInvalidUnderscore(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + val, err = strconv.ParseInt(cleanedVal, 10, 64) + } + if err != nil { + p.raiseError(tok, "%s", err) + } + return val + case tokenFloat: + err := numberContainsInvalidUnderscore(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + cleanedVal := cleanupNumberToken(tok.val) + val, err := strconv.ParseFloat(cleanedVal, 64) + if err != nil { + p.raiseError(tok, "%s", err) + } + return val + case tokenLocalTime: + val, err := ParseLocalTime(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + return val + case tokenLocalDate: + // a local date may be followed by: + // * nothing: this is a local date + // * a local time: this is a local date-time + + next := p.peek() + if next == nil || next.typ != tokenLocalTime { + val, err := ParseLocalDate(tok.val) + if err != nil { + p.raiseError(tok, "%s", err) + } + return val + } + + localDate := tok + localTime := p.getToken() + + next = p.peek() + if next == nil || next.typ != tokenTimeOffset { + v := localDate.val + "T" + localTime.val + val, err := ParseLocalDateTime(v) + if err != nil { + p.raiseError(tok, "%s", err) + } + return val + } + + offset := p.getToken() + + layout := time.RFC3339Nano + v := localDate.val + "T" + localTime.val + offset.val + val, err := time.ParseInLocation(layout, v, time.UTC) + if err != nil { + p.raiseError(tok, "%s", err) + } + return val + case tokenLeftBracket: + return p.parseArray() + case tokenLeftCurlyBrace: + return p.parseInlineTable() + case tokenEqual: + p.raiseError(tok, "cannot have multiple equals for the same key") + case tokenError: + p.raiseError(tok, "%s", tok) + default: + panic(fmt.Errorf("unhandled token: %v", tok)) + } + + return nil +} + +func tokenIsComma(t *token) bool { + return t != nil && t.typ == tokenComma +} + +func (p *tomlParser) parseInlineTable() *Tree { + tree := newTree() + var previous *token +Loop: + for { + follow := p.peek() + if follow == nil || follow.typ == tokenEOF { + p.raiseError(follow, "unterminated inline table") + } + switch follow.typ { + case tokenRightCurlyBrace: + p.getToken() + break Loop + case tokenKey, tokenInteger, tokenString: + if !tokenIsComma(previous) && previous != nil { + p.raiseError(follow, "comma expected between fields in inline table") + } + key := p.getToken() + p.assume(tokenEqual) + + parsedKey, err := parseKey(key.val) + if err != nil { + p.raiseError(key, "invalid key: %s", err) + } + + value := p.parseRvalue() + tree.SetPath(parsedKey, value) + case tokenComma: + if tokenIsComma(previous) { + p.raiseError(follow, "need field between two commas in inline table") + } + p.getToken() + default: + p.raiseError(follow, "unexpected token type in inline table: %s", follow.String()) + } + previous = follow + } + if tokenIsComma(previous) { + p.raiseError(previous, "trailing comma at the end of inline table") + } + tree.inline = true + return tree +} + +func (p *tomlParser) parseArray() interface{} { + var array []interface{} + arrayType := reflect.TypeOf(newTree()) + for { + follow := p.peek() + if follow == nil || follow.typ == tokenEOF { + p.raiseError(follow, "unterminated array") + } + if follow.typ == tokenRightBracket { + p.getToken() + break + } + val := p.parseRvalue() + if reflect.TypeOf(val) != arrayType { + arrayType = nil + } + array = append(array, val) + follow = p.peek() + if follow == nil || follow.typ == tokenEOF { + p.raiseError(follow, "unterminated array") + } + if follow.typ != tokenRightBracket && follow.typ != tokenComma { + p.raiseError(follow, "missing comma") + } + if follow.typ == tokenComma { + p.getToken() + } + } + + // if the array is a mixed-type array or its length is 0, + // don't convert it to a table array + if len(array) <= 0 { + arrayType = nil + } + // An array of Trees is actually an array of inline + // tables, which is a shorthand for a table array. If the + // array was not converted from []interface{} to []*Tree, + // the two notations would not be equivalent. + if arrayType == reflect.TypeOf(newTree()) { + tomlArray := make([]*Tree, len(array)) + for i, v := range array { + tomlArray[i] = v.(*Tree) + } + return tomlArray + } + return array +} + +func parseToml(flow []token) *Tree { + result := newTree() + result.position = Position{1, 1} + parser := &tomlParser{ + flowIdx: 0, + flow: flow, + tree: result, + currentTable: make([]string, 0), + seenTableKeys: make([]string, 0), + } + parser.run() + return result +} diff --git a/vendor/github.com/pelletier/go-toml/position.go b/vendor/github.com/pelletier/go-toml/position.go new file mode 100644 index 000000000..c17bff87b --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/position.go @@ -0,0 +1,29 @@ +// Position support for go-toml + +package toml + +import ( + "fmt" +) + +// Position of a document element within a TOML document. +// +// Line and Col are both 1-indexed positions for the element's line number and +// column number, respectively. Values of zero or less will cause Invalid(), +// to return true. +type Position struct { + Line int // line within the document + Col int // column within the line +} + +// String representation of the position. +// Displays 1-indexed line and column numbers. +func (p Position) String() string { + return fmt.Sprintf("(%d, %d)", p.Line, p.Col) +} + +// Invalid returns whether or not the position is valid (i.e. with negative or +// null values) +func (p Position) Invalid() bool { + return p.Line <= 0 || p.Col <= 0 +} diff --git a/vendor/github.com/pelletier/go-toml/token.go b/vendor/github.com/pelletier/go-toml/token.go new file mode 100644 index 000000000..b437fdd3b --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/token.go @@ -0,0 +1,136 @@ +package toml + +import "fmt" + +// Define tokens +type tokenType int + +const ( + eof = -(iota + 1) +) + +const ( + tokenError tokenType = iota + tokenEOF + tokenComment + tokenKey + tokenString + tokenInteger + tokenTrue + tokenFalse + tokenFloat + tokenInf + tokenNan + tokenEqual + tokenLeftBracket + tokenRightBracket + tokenLeftCurlyBrace + tokenRightCurlyBrace + tokenLeftParen + tokenRightParen + tokenDoubleLeftBracket + tokenDoubleRightBracket + tokenLocalDate + tokenLocalTime + tokenTimeOffset + tokenKeyGroup + tokenKeyGroupArray + tokenComma + tokenColon + tokenDollar + tokenStar + tokenQuestion + tokenDot + tokenDotDot + tokenEOL +) + +var tokenTypeNames = []string{ + "Error", + "EOF", + "Comment", + "Key", + "String", + "Integer", + "True", + "False", + "Float", + "Inf", + "NaN", + "=", + "[", + "]", + "{", + "}", + "(", + ")", + "]]", + "[[", + "LocalDate", + "LocalTime", + "TimeOffset", + "KeyGroup", + "KeyGroupArray", + ",", + ":", + "$", + "*", + "?", + ".", + "..", + "EOL", +} + +type token struct { + Position + typ tokenType + val string +} + +func (tt tokenType) String() string { + idx := int(tt) + if idx < len(tokenTypeNames) { + return tokenTypeNames[idx] + } + return "Unknown" +} + +func (t token) String() string { + switch t.typ { + case tokenEOF: + return "EOF" + case tokenError: + return t.val + } + + return fmt.Sprintf("%q", t.val) +} + +func isSpace(r rune) bool { + return r == ' ' || r == '\t' +} + +func isAlphanumeric(r rune) bool { + return 'a' <= r && r <= 'z' || 'A' <= r && r <= 'Z' || r == '_' +} + +func isKeyChar(r rune) bool { + // Keys start with the first character that isn't whitespace or [ and end + // with the last non-whitespace character before the equals sign. Keys + // cannot contain a # character." + return !(r == '\r' || r == '\n' || r == eof || r == '=') +} + +func isKeyStartChar(r rune) bool { + return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '[') +} + +func isDigit(r rune) bool { + return '0' <= r && r <= '9' +} + +func isHexDigit(r rune) bool { + return isDigit(r) || + (r >= 'a' && r <= 'f') || + (r >= 'A' && r <= 'F') +} diff --git a/vendor/github.com/pelletier/go-toml/toml.go b/vendor/github.com/pelletier/go-toml/toml.go new file mode 100644 index 000000000..6d82587c4 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/toml.go @@ -0,0 +1,533 @@ +package toml + +import ( + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "runtime" + "strings" +) + +type tomlValue struct { + value interface{} // string, int64, uint64, float64, bool, time.Time, [] of any of this list + comment string + commented bool + multiline bool + literal bool + position Position +} + +// Tree is the result of the parsing of a TOML file. +type Tree struct { + values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree + comment string + commented bool + inline bool + position Position +} + +func newTree() *Tree { + return newTreeWithPosition(Position{}) +} + +func newTreeWithPosition(pos Position) *Tree { + return &Tree{ + values: make(map[string]interface{}), + position: pos, + } +} + +// TreeFromMap initializes a new Tree object using the given map. +func TreeFromMap(m map[string]interface{}) (*Tree, error) { + result, err := toTree(m) + if err != nil { + return nil, err + } + return result.(*Tree), nil +} + +// Position returns the position of the tree. +func (t *Tree) Position() Position { + return t.position +} + +// Has returns a boolean indicating if the given key exists. +func (t *Tree) Has(key string) bool { + if key == "" { + return false + } + return t.HasPath(strings.Split(key, ".")) +} + +// HasPath returns true if the given path of keys exists, false otherwise. +func (t *Tree) HasPath(keys []string) bool { + return t.GetPath(keys) != nil +} + +// Keys returns the keys of the toplevel tree (does not recurse). +func (t *Tree) Keys() []string { + keys := make([]string, len(t.values)) + i := 0 + for k := range t.values { + keys[i] = k + i++ + } + return keys +} + +// Get the value at key in the Tree. +// Key is a dot-separated path (e.g. a.b.c) without single/double quoted strings. +// If you need to retrieve non-bare keys, use GetPath. +// Returns nil if the path does not exist in the tree. +// If keys is of length zero, the current tree is returned. +func (t *Tree) Get(key string) interface{} { + if key == "" { + return t + } + return t.GetPath(strings.Split(key, ".")) +} + +// GetPath returns the element in the tree indicated by 'keys'. +// If keys is of length zero, the current tree is returned. +func (t *Tree) GetPath(keys []string) interface{} { + if len(keys) == 0 { + return t + } + subtree := t + for _, intermediateKey := range keys[:len(keys)-1] { + value, exists := subtree.values[intermediateKey] + if !exists { + return nil + } + switch node := value.(type) { + case *Tree: + subtree = node + case []*Tree: + // go to most recent element + if len(node) == 0 { + return nil + } + subtree = node[len(node)-1] + default: + return nil // cannot navigate through other node types + } + } + // branch based on final node type + switch node := subtree.values[keys[len(keys)-1]].(type) { + case *tomlValue: + return node.value + default: + return node + } +} + +// GetArray returns the value at key in the Tree. +// It returns []string, []int64, etc type if key has homogeneous lists +// Key is a dot-separated path (e.g. a.b.c) without single/double quoted strings. +// Returns nil if the path does not exist in the tree. +// If keys is of length zero, the current tree is returned. +func (t *Tree) GetArray(key string) interface{} { + if key == "" { + return t + } + return t.GetArrayPath(strings.Split(key, ".")) +} + +// GetArrayPath returns the element in the tree indicated by 'keys'. +// If keys is of length zero, the current tree is returned. +func (t *Tree) GetArrayPath(keys []string) interface{} { + if len(keys) == 0 { + return t + } + subtree := t + for _, intermediateKey := range keys[:len(keys)-1] { + value, exists := subtree.values[intermediateKey] + if !exists { + return nil + } + switch node := value.(type) { + case *Tree: + subtree = node + case []*Tree: + // go to most recent element + if len(node) == 0 { + return nil + } + subtree = node[len(node)-1] + default: + return nil // cannot navigate through other node types + } + } + // branch based on final node type + switch node := subtree.values[keys[len(keys)-1]].(type) { + case *tomlValue: + switch n := node.value.(type) { + case []interface{}: + return getArray(n) + default: + return node.value + } + default: + return node + } +} + +// if homogeneous array, then return slice type object over []interface{} +func getArray(n []interface{}) interface{} { + var s []string + var i64 []int64 + var f64 []float64 + var bl []bool + for _, value := range n { + switch v := value.(type) { + case string: + s = append(s, v) + case int64: + i64 = append(i64, v) + case float64: + f64 = append(f64, v) + case bool: + bl = append(bl, v) + default: + return n + } + } + if len(s) == len(n) { + return s + } else if len(i64) == len(n) { + return i64 + } else if len(f64) == len(n) { + return f64 + } else if len(bl) == len(n) { + return bl + } + return n +} + +// GetPosition returns the position of the given key. +func (t *Tree) GetPosition(key string) Position { + if key == "" { + return t.position + } + return t.GetPositionPath(strings.Split(key, ".")) +} + +// SetPositionPath sets the position of element in the tree indicated by 'keys'. +// If keys is of length zero, the current tree position is set. +func (t *Tree) SetPositionPath(keys []string, pos Position) { + if len(keys) == 0 { + t.position = pos + return + } + subtree := t + for _, intermediateKey := range keys[:len(keys)-1] { + value, exists := subtree.values[intermediateKey] + if !exists { + return + } + switch node := value.(type) { + case *Tree: + subtree = node + case []*Tree: + // go to most recent element + if len(node) == 0 { + return + } + subtree = node[len(node)-1] + default: + return + } + } + // branch based on final node type + switch node := subtree.values[keys[len(keys)-1]].(type) { + case *tomlValue: + node.position = pos + return + case *Tree: + node.position = pos + return + case []*Tree: + // go to most recent element + if len(node) == 0 { + return + } + node[len(node)-1].position = pos + return + } +} + +// GetPositionPath returns the element in the tree indicated by 'keys'. +// If keys is of length zero, the current tree is returned. +func (t *Tree) GetPositionPath(keys []string) Position { + if len(keys) == 0 { + return t.position + } + subtree := t + for _, intermediateKey := range keys[:len(keys)-1] { + value, exists := subtree.values[intermediateKey] + if !exists { + return Position{0, 0} + } + switch node := value.(type) { + case *Tree: + subtree = node + case []*Tree: + // go to most recent element + if len(node) == 0 { + return Position{0, 0} + } + subtree = node[len(node)-1] + default: + return Position{0, 0} + } + } + // branch based on final node type + switch node := subtree.values[keys[len(keys)-1]].(type) { + case *tomlValue: + return node.position + case *Tree: + return node.position + case []*Tree: + // go to most recent element + if len(node) == 0 { + return Position{0, 0} + } + return node[len(node)-1].position + default: + return Position{0, 0} + } +} + +// GetDefault works like Get but with a default value +func (t *Tree) GetDefault(key string, def interface{}) interface{} { + val := t.Get(key) + if val == nil { + return def + } + return val +} + +// SetOptions arguments are supplied to the SetWithOptions and SetPathWithOptions functions to modify marshalling behaviour. +// The default values within the struct are valid default options. +type SetOptions struct { + Comment string + Commented bool + Multiline bool + Literal bool +} + +// SetWithOptions is the same as Set, but allows you to provide formatting +// instructions to the key, that will be used by Marshal(). +func (t *Tree) SetWithOptions(key string, opts SetOptions, value interface{}) { + t.SetPathWithOptions(strings.Split(key, "."), opts, value) +} + +// SetPathWithOptions is the same as SetPath, but allows you to provide +// formatting instructions to the key, that will be reused by Marshal(). +func (t *Tree) SetPathWithOptions(keys []string, opts SetOptions, value interface{}) { + subtree := t + for i, intermediateKey := range keys[:len(keys)-1] { + nextTree, exists := subtree.values[intermediateKey] + if !exists { + nextTree = newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col}) + subtree.values[intermediateKey] = nextTree // add new element here + } + switch node := nextTree.(type) { + case *Tree: + subtree = node + case []*Tree: + // go to most recent element + if len(node) == 0 { + // create element if it does not exist + node = append(node, newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col})) + subtree.values[intermediateKey] = node + } + subtree = node[len(node)-1] + } + } + + var toInsert interface{} + + switch v := value.(type) { + case *Tree: + v.comment = opts.Comment + v.commented = opts.Commented + toInsert = value + case []*Tree: + for i := range v { + v[i].commented = opts.Commented + } + toInsert = value + case *tomlValue: + v.comment = opts.Comment + v.commented = opts.Commented + v.multiline = opts.Multiline + v.literal = opts.Literal + toInsert = v + default: + toInsert = &tomlValue{value: value, + comment: opts.Comment, + commented: opts.Commented, + multiline: opts.Multiline, + literal: opts.Literal, + position: Position{Line: subtree.position.Line + len(subtree.values) + 1, Col: subtree.position.Col}} + } + + subtree.values[keys[len(keys)-1]] = toInsert +} + +// Set an element in the tree. +// Key is a dot-separated path (e.g. a.b.c). +// Creates all necessary intermediate trees, if needed. +func (t *Tree) Set(key string, value interface{}) { + t.SetWithComment(key, "", false, value) +} + +// SetWithComment is the same as Set, but allows you to provide comment +// information to the key, that will be reused by Marshal(). +func (t *Tree) SetWithComment(key string, comment string, commented bool, value interface{}) { + t.SetPathWithComment(strings.Split(key, "."), comment, commented, value) +} + +// SetPath sets an element in the tree. +// Keys is an array of path elements (e.g. {"a","b","c"}). +// Creates all necessary intermediate trees, if needed. +func (t *Tree) SetPath(keys []string, value interface{}) { + t.SetPathWithComment(keys, "", false, value) +} + +// SetPathWithComment is the same as SetPath, but allows you to provide comment +// information to the key, that will be reused by Marshal(). +func (t *Tree) SetPathWithComment(keys []string, comment string, commented bool, value interface{}) { + t.SetPathWithOptions(keys, SetOptions{Comment: comment, Commented: commented}, value) +} + +// Delete removes a key from the tree. +// Key is a dot-separated path (e.g. a.b.c). +func (t *Tree) Delete(key string) error { + keys, err := parseKey(key) + if err != nil { + return err + } + return t.DeletePath(keys) +} + +// DeletePath removes a key from the tree. +// Keys is an array of path elements (e.g. {"a","b","c"}). +func (t *Tree) DeletePath(keys []string) error { + keyLen := len(keys) + if keyLen == 1 { + delete(t.values, keys[0]) + return nil + } + tree := t.GetPath(keys[:keyLen-1]) + item := keys[keyLen-1] + switch node := tree.(type) { + case *Tree: + delete(node.values, item) + return nil + } + return errors.New("no such key to delete") +} + +// createSubTree takes a tree and a key and create the necessary intermediate +// subtrees to create a subtree at that point. In-place. +// +// e.g. passing a.b.c will create (assuming tree is empty) tree[a], tree[a][b] +// and tree[a][b][c] +// +// Returns nil on success, error object on failure +func (t *Tree) createSubTree(keys []string, pos Position) error { + subtree := t + for i, intermediateKey := range keys { + nextTree, exists := subtree.values[intermediateKey] + if !exists { + tree := newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col}) + tree.position = pos + tree.inline = subtree.inline + subtree.values[intermediateKey] = tree + nextTree = tree + } + + switch node := nextTree.(type) { + case []*Tree: + subtree = node[len(node)-1] + case *Tree: + subtree = node + default: + return fmt.Errorf("unknown type for path %s (%s): %T (%#v)", + strings.Join(keys, "."), intermediateKey, nextTree, nextTree) + } + } + return nil +} + +// LoadBytes creates a Tree from a []byte. +func LoadBytes(b []byte) (tree *Tree, err error) { + defer func() { + if r := recover(); r != nil { + if _, ok := r.(runtime.Error); ok { + panic(r) + } + err = errors.New(r.(string)) + } + }() + + if len(b) >= 4 && (hasUTF32BigEndianBOM4(b) || hasUTF32LittleEndianBOM4(b)) { + b = b[4:] + } else if len(b) >= 3 && hasUTF8BOM3(b) { + b = b[3:] + } else if len(b) >= 2 && (hasUTF16BigEndianBOM2(b) || hasUTF16LittleEndianBOM2(b)) { + b = b[2:] + } + + tree = parseToml(lexToml(b)) + return +} + +func hasUTF16BigEndianBOM2(b []byte) bool { + return b[0] == 0xFE && b[1] == 0xFF +} + +func hasUTF16LittleEndianBOM2(b []byte) bool { + return b[0] == 0xFF && b[1] == 0xFE +} + +func hasUTF8BOM3(b []byte) bool { + return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF +} + +func hasUTF32BigEndianBOM4(b []byte) bool { + return b[0] == 0x00 && b[1] == 0x00 && b[2] == 0xFE && b[3] == 0xFF +} + +func hasUTF32LittleEndianBOM4(b []byte) bool { + return b[0] == 0xFF && b[1] == 0xFE && b[2] == 0x00 && b[3] == 0x00 +} + +// LoadReader creates a Tree from any io.Reader. +func LoadReader(reader io.Reader) (tree *Tree, err error) { + inputBytes, err := ioutil.ReadAll(reader) + if err != nil { + return + } + tree, err = LoadBytes(inputBytes) + return +} + +// Load creates a Tree from a string. +func Load(content string) (tree *Tree, err error) { + return LoadBytes([]byte(content)) +} + +// LoadFile creates a Tree from a file. +func LoadFile(path string) (tree *Tree, err error) { + file, err := os.Open(path) + if err != nil { + return nil, err + } + defer file.Close() + return LoadReader(file) +} diff --git a/vendor/github.com/pelletier/go-toml/tomlpub.go b/vendor/github.com/pelletier/go-toml/tomlpub.go new file mode 100644 index 000000000..4136b4625 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/tomlpub.go @@ -0,0 +1,71 @@ +package toml + +// PubTOMLValue wrapping tomlValue in order to access all properties from outside. +type PubTOMLValue = tomlValue + +func (ptv *PubTOMLValue) Value() interface{} { + return ptv.value +} +func (ptv *PubTOMLValue) Comment() string { + return ptv.comment +} +func (ptv *PubTOMLValue) Commented() bool { + return ptv.commented +} +func (ptv *PubTOMLValue) Multiline() bool { + return ptv.multiline +} +func (ptv *PubTOMLValue) Position() Position { + return ptv.position +} + +func (ptv *PubTOMLValue) SetValue(v interface{}) { + ptv.value = v +} +func (ptv *PubTOMLValue) SetComment(s string) { + ptv.comment = s +} +func (ptv *PubTOMLValue) SetCommented(c bool) { + ptv.commented = c +} +func (ptv *PubTOMLValue) SetMultiline(m bool) { + ptv.multiline = m +} +func (ptv *PubTOMLValue) SetPosition(p Position) { + ptv.position = p +} + +// PubTree wrapping Tree in order to access all properties from outside. +type PubTree = Tree + +func (pt *PubTree) Values() map[string]interface{} { + return pt.values +} + +func (pt *PubTree) Comment() string { + return pt.comment +} + +func (pt *PubTree) Commented() bool { + return pt.commented +} + +func (pt *PubTree) Inline() bool { + return pt.inline +} + +func (pt *PubTree) SetValues(v map[string]interface{}) { + pt.values = v +} + +func (pt *PubTree) SetComment(c string) { + pt.comment = c +} + +func (pt *PubTree) SetCommented(c bool) { + pt.commented = c +} + +func (pt *PubTree) SetInline(i bool) { + pt.inline = i +} diff --git a/vendor/github.com/pelletier/go-toml/tomltree_create.go b/vendor/github.com/pelletier/go-toml/tomltree_create.go new file mode 100644 index 000000000..80353500a --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/tomltree_create.go @@ -0,0 +1,155 @@ +package toml + +import ( + "fmt" + "reflect" + "time" +) + +var kindToType = [reflect.String + 1]reflect.Type{ + reflect.Bool: reflect.TypeOf(true), + reflect.String: reflect.TypeOf(""), + reflect.Float32: reflect.TypeOf(float64(1)), + reflect.Float64: reflect.TypeOf(float64(1)), + reflect.Int: reflect.TypeOf(int64(1)), + reflect.Int8: reflect.TypeOf(int64(1)), + reflect.Int16: reflect.TypeOf(int64(1)), + reflect.Int32: reflect.TypeOf(int64(1)), + reflect.Int64: reflect.TypeOf(int64(1)), + reflect.Uint: reflect.TypeOf(uint64(1)), + reflect.Uint8: reflect.TypeOf(uint64(1)), + reflect.Uint16: reflect.TypeOf(uint64(1)), + reflect.Uint32: reflect.TypeOf(uint64(1)), + reflect.Uint64: reflect.TypeOf(uint64(1)), +} + +// typeFor returns a reflect.Type for a reflect.Kind, or nil if none is found. +// supported values: +// string, bool, int64, uint64, float64, time.Time, int, int8, int16, int32, uint, uint8, uint16, uint32, float32 +func typeFor(k reflect.Kind) reflect.Type { + if k > 0 && int(k) < len(kindToType) { + return kindToType[k] + } + return nil +} + +func simpleValueCoercion(object interface{}) (interface{}, error) { + switch original := object.(type) { + case string, bool, int64, uint64, float64, time.Time: + return original, nil + case int: + return int64(original), nil + case int8: + return int64(original), nil + case int16: + return int64(original), nil + case int32: + return int64(original), nil + case uint: + return uint64(original), nil + case uint8: + return uint64(original), nil + case uint16: + return uint64(original), nil + case uint32: + return uint64(original), nil + case float32: + return float64(original), nil + case fmt.Stringer: + return original.String(), nil + case []interface{}: + value := reflect.ValueOf(original) + length := value.Len() + arrayValue := reflect.MakeSlice(value.Type(), 0, length) + for i := 0; i < length; i++ { + val := value.Index(i).Interface() + simpleValue, err := simpleValueCoercion(val) + if err != nil { + return nil, err + } + arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue)) + } + return arrayValue.Interface(), nil + default: + return nil, fmt.Errorf("cannot convert type %T to Tree", object) + } +} + +func sliceToTree(object interface{}) (interface{}, error) { + // arrays are a bit tricky, since they can represent either a + // collection of simple values, which is represented by one + // *tomlValue, or an array of tables, which is represented by an + // array of *Tree. + + // holding the assumption that this function is called from toTree only when value.Kind() is Array or Slice + value := reflect.ValueOf(object) + insideType := value.Type().Elem() + length := value.Len() + if length > 0 { + insideType = reflect.ValueOf(value.Index(0).Interface()).Type() + } + if insideType.Kind() == reflect.Map { + // this is considered as an array of tables + tablesArray := make([]*Tree, 0, length) + for i := 0; i < length; i++ { + table := value.Index(i) + tree, err := toTree(table.Interface()) + if err != nil { + return nil, err + } + tablesArray = append(tablesArray, tree.(*Tree)) + } + return tablesArray, nil + } + + sliceType := typeFor(insideType.Kind()) + if sliceType == nil { + sliceType = insideType + } + + arrayValue := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, length) + + for i := 0; i < length; i++ { + val := value.Index(i).Interface() + simpleValue, err := simpleValueCoercion(val) + if err != nil { + return nil, err + } + arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue)) + } + return &tomlValue{value: arrayValue.Interface(), position: Position{}}, nil +} + +func toTree(object interface{}) (interface{}, error) { + value := reflect.ValueOf(object) + + if value.Kind() == reflect.Map { + values := map[string]interface{}{} + keys := value.MapKeys() + for _, key := range keys { + if key.Kind() != reflect.String { + if _, ok := key.Interface().(string); !ok { + return nil, fmt.Errorf("map key needs to be a string, not %T (%v)", key.Interface(), key.Kind()) + } + } + + v := value.MapIndex(key) + newValue, err := toTree(v.Interface()) + if err != nil { + return nil, err + } + values[key.String()] = newValue + } + return &Tree{values: values, position: Position{}}, nil + } + + if value.Kind() == reflect.Array || value.Kind() == reflect.Slice { + return sliceToTree(object) + } + + simpleValue, err := simpleValueCoercion(object) + if err != nil { + return nil, err + } + return &tomlValue{value: simpleValue, position: Position{}}, nil +} diff --git a/vendor/github.com/pelletier/go-toml/tomltree_write.go b/vendor/github.com/pelletier/go-toml/tomltree_write.go new file mode 100644 index 000000000..c9afbdab7 --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/tomltree_write.go @@ -0,0 +1,552 @@ +package toml + +import ( + "bytes" + "fmt" + "io" + "math" + "math/big" + "reflect" + "sort" + "strconv" + "strings" + "time" +) + +type valueComplexity int + +const ( + valueSimple valueComplexity = iota + 1 + valueComplex +) + +type sortNode struct { + key string + complexity valueComplexity +} + +// Encodes a string to a TOML-compliant multi-line string value +// This function is a clone of the existing encodeTomlString function, except that whitespace characters +// are preserved. Quotation marks and backslashes are also not escaped. +func encodeMultilineTomlString(value string, commented string) string { + var b bytes.Buffer + adjacentQuoteCount := 0 + + b.WriteString(commented) + for i, rr := range value { + if rr != '"' { + adjacentQuoteCount = 0 + } else { + adjacentQuoteCount++ + } + switch rr { + case '\b': + b.WriteString(`\b`) + case '\t': + b.WriteString("\t") + case '\n': + b.WriteString("\n" + commented) + case '\f': + b.WriteString(`\f`) + case '\r': + b.WriteString("\r") + case '"': + if adjacentQuoteCount >= 3 || i == len(value)-1 { + adjacentQuoteCount = 0 + b.WriteString(`\"`) + } else { + b.WriteString(`"`) + } + case '\\': + b.WriteString(`\`) + default: + intRr := uint16(rr) + if intRr < 0x001F { + b.WriteString(fmt.Sprintf("\\u%0.4X", intRr)) + } else { + b.WriteRune(rr) + } + } + } + return b.String() +} + +// Encodes a string to a TOML-compliant string value +func encodeTomlString(value string) string { + var b bytes.Buffer + + for _, rr := range value { + switch rr { + case '\b': + b.WriteString(`\b`) + case '\t': + b.WriteString(`\t`) + case '\n': + b.WriteString(`\n`) + case '\f': + b.WriteString(`\f`) + case '\r': + b.WriteString(`\r`) + case '"': + b.WriteString(`\"`) + case '\\': + b.WriteString(`\\`) + default: + intRr := uint16(rr) + if intRr < 0x001F { + b.WriteString(fmt.Sprintf("\\u%0.4X", intRr)) + } else { + b.WriteRune(rr) + } + } + } + return b.String() +} + +func tomlTreeStringRepresentation(t *Tree, ord MarshalOrder) (string, error) { + var orderedVals []sortNode + switch ord { + case OrderPreserve: + orderedVals = sortByLines(t) + default: + orderedVals = sortAlphabetical(t) + } + + var values []string + for _, node := range orderedVals { + k := node.key + v := t.values[k] + + repr, err := tomlValueStringRepresentation(v, "", "", ord, false) + if err != nil { + return "", err + } + values = append(values, quoteKeyIfNeeded(k)+" = "+repr) + } + return "{ " + strings.Join(values, ", ") + " }", nil +} + +func tomlValueStringRepresentation(v interface{}, commented string, indent string, ord MarshalOrder, arraysOneElementPerLine bool) (string, error) { + // this interface check is added to dereference the change made in the writeTo function. + // That change was made to allow this function to see formatting options. + tv, ok := v.(*tomlValue) + if ok { + v = tv.value + } else { + tv = &tomlValue{} + } + + switch value := v.(type) { + case uint64: + return strconv.FormatUint(value, 10), nil + case int64: + return strconv.FormatInt(value, 10), nil + case float64: + // Default bit length is full 64 + bits := 64 + // Float panics if nan is used + if !math.IsNaN(value) { + // if 32 bit accuracy is enough to exactly show, use 32 + _, acc := big.NewFloat(value).Float32() + if acc == big.Exact { + bits = 32 + } + } + if math.Trunc(value) == value { + return strings.ToLower(strconv.FormatFloat(value, 'f', 1, bits)), nil + } + return strings.ToLower(strconv.FormatFloat(value, 'f', -1, bits)), nil + case string: + if tv.multiline { + if tv.literal { + b := strings.Builder{} + b.WriteString("'''\n") + b.Write([]byte(value)) + b.WriteString("\n'''") + return b.String(), nil + } else { + return "\"\"\"\n" + encodeMultilineTomlString(value, commented) + "\"\"\"", nil + } + } + return "\"" + encodeTomlString(value) + "\"", nil + case []byte: + b, _ := v.([]byte) + return string(b), nil + case bool: + if value { + return "true", nil + } + return "false", nil + case time.Time: + return value.Format(time.RFC3339), nil + case LocalDate: + return value.String(), nil + case LocalDateTime: + return value.String(), nil + case LocalTime: + return value.String(), nil + case *Tree: + return tomlTreeStringRepresentation(value, ord) + case nil: + return "", nil + } + + rv := reflect.ValueOf(v) + + if rv.Kind() == reflect.Slice { + var values []string + for i := 0; i < rv.Len(); i++ { + item := rv.Index(i).Interface() + itemRepr, err := tomlValueStringRepresentation(item, commented, indent, ord, arraysOneElementPerLine) + if err != nil { + return "", err + } + values = append(values, itemRepr) + } + if arraysOneElementPerLine && len(values) > 1 { + stringBuffer := bytes.Buffer{} + valueIndent := indent + ` ` // TODO: move that to a shared encoder state + + stringBuffer.WriteString("[\n") + + for _, value := range values { + stringBuffer.WriteString(valueIndent) + stringBuffer.WriteString(commented + value) + stringBuffer.WriteString(`,`) + stringBuffer.WriteString("\n") + } + + stringBuffer.WriteString(indent + commented + "]") + + return stringBuffer.String(), nil + } + return "[" + strings.Join(values, ", ") + "]", nil + } + return "", fmt.Errorf("unsupported value type %T: %v", v, v) +} + +func getTreeArrayLine(trees []*Tree) (line int) { + // Prevent returning 0 for empty trees + line = int(^uint(0) >> 1) + // get lowest line number >= 0 + for _, tv := range trees { + if tv.position.Line < line || line == 0 { + line = tv.position.Line + } + } + return +} + +func sortByLines(t *Tree) (vals []sortNode) { + var ( + line int + lines []int + tv *Tree + tom *tomlValue + node sortNode + ) + vals = make([]sortNode, 0) + m := make(map[int]sortNode) + + for k := range t.values { + v := t.values[k] + switch v.(type) { + case *Tree: + tv = v.(*Tree) + line = tv.position.Line + node = sortNode{key: k, complexity: valueComplex} + case []*Tree: + line = getTreeArrayLine(v.([]*Tree)) + node = sortNode{key: k, complexity: valueComplex} + default: + tom = v.(*tomlValue) + line = tom.position.Line + node = sortNode{key: k, complexity: valueSimple} + } + lines = append(lines, line) + vals = append(vals, node) + m[line] = node + } + sort.Ints(lines) + + for i, line := range lines { + vals[i] = m[line] + } + + return vals +} + +func sortAlphabetical(t *Tree) (vals []sortNode) { + var ( + node sortNode + simpVals []string + compVals []string + ) + vals = make([]sortNode, 0) + m := make(map[string]sortNode) + + for k := range t.values { + v := t.values[k] + switch v.(type) { + case *Tree, []*Tree: + node = sortNode{key: k, complexity: valueComplex} + compVals = append(compVals, node.key) + default: + node = sortNode{key: k, complexity: valueSimple} + simpVals = append(simpVals, node.key) + } + vals = append(vals, node) + m[node.key] = node + } + + // Simples first to match previous implementation + sort.Strings(simpVals) + i := 0 + for _, key := range simpVals { + vals[i] = m[key] + i++ + } + + sort.Strings(compVals) + for _, key := range compVals { + vals[i] = m[key] + i++ + } + + return vals +} + +func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) { + return t.writeToOrdered(w, indent, keyspace, bytesCount, arraysOneElementPerLine, OrderAlphabetical, " ", false, false) +} + +func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool, ord MarshalOrder, indentString string, compactComments, parentCommented bool) (int64, error) { + var orderedVals []sortNode + + switch ord { + case OrderPreserve: + orderedVals = sortByLines(t) + default: + orderedVals = sortAlphabetical(t) + } + + for _, node := range orderedVals { + switch node.complexity { + case valueComplex: + k := node.key + v := t.values[k] + + combinedKey := quoteKeyIfNeeded(k) + if keyspace != "" { + combinedKey = keyspace + "." + combinedKey + } + + switch node := v.(type) { + // node has to be of those two types given how keys are sorted above + case *Tree: + tv, ok := t.values[k].(*Tree) + if !ok { + return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k]) + } + if tv.comment != "" { + comment := strings.Replace(tv.comment, "\n", "\n"+indent+"#", -1) + start := "# " + if strings.HasPrefix(comment, "#") { + start = "" + } + writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment) + bytesCount += int64(writtenBytesCountComment) + if errc != nil { + return bytesCount, errc + } + } + + var commented string + if parentCommented || t.commented || tv.commented { + commented = "# " + } + writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n") + bytesCount += int64(writtenBytesCount) + if err != nil { + return bytesCount, err + } + bytesCount, err = node.writeToOrdered(w, indent+indentString, combinedKey, bytesCount, arraysOneElementPerLine, ord, indentString, compactComments, parentCommented || t.commented || tv.commented) + if err != nil { + return bytesCount, err + } + case []*Tree: + for _, subTree := range node { + var commented string + if parentCommented || t.commented || subTree.commented { + commented = "# " + } + writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n") + bytesCount += int64(writtenBytesCount) + if err != nil { + return bytesCount, err + } + + bytesCount, err = subTree.writeToOrdered(w, indent+indentString, combinedKey, bytesCount, arraysOneElementPerLine, ord, indentString, compactComments, parentCommented || t.commented || subTree.commented) + if err != nil { + return bytesCount, err + } + } + } + default: // Simple + k := node.key + v, ok := t.values[k].(*tomlValue) + if !ok { + return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k]) + } + + var commented string + if parentCommented || t.commented || v.commented { + commented = "# " + } + repr, err := tomlValueStringRepresentation(v, commented, indent, ord, arraysOneElementPerLine) + if err != nil { + return bytesCount, err + } + + if v.comment != "" { + comment := strings.Replace(v.comment, "\n", "\n"+indent+"#", -1) + start := "# " + if strings.HasPrefix(comment, "#") { + start = "" + } + if !compactComments { + writtenBytesCountComment, errc := writeStrings(w, "\n") + bytesCount += int64(writtenBytesCountComment) + if errc != nil { + return bytesCount, errc + } + } + writtenBytesCountComment, errc := writeStrings(w, indent, start, comment, "\n") + bytesCount += int64(writtenBytesCountComment) + if errc != nil { + return bytesCount, errc + } + } + + quotedKey := quoteKeyIfNeeded(k) + writtenBytesCount, err := writeStrings(w, indent, commented, quotedKey, " = ", repr, "\n") + bytesCount += int64(writtenBytesCount) + if err != nil { + return bytesCount, err + } + } + } + + return bytesCount, nil +} + +// quote a key if it does not fit the bare key format (A-Za-z0-9_-) +// quoted keys use the same rules as strings +func quoteKeyIfNeeded(k string) string { + // when encoding a map with the 'quoteMapKeys' option enabled, the tree will contain + // keys that have already been quoted. + // not an ideal situation, but good enough of a stop gap. + if len(k) >= 2 && k[0] == '"' && k[len(k)-1] == '"' { + return k + } + isBare := true + for _, r := range k { + if !isValidBareChar(r) { + isBare = false + break + } + } + if isBare { + return k + } + return quoteKey(k) +} + +func quoteKey(k string) string { + return "\"" + encodeTomlString(k) + "\"" +} + +func writeStrings(w io.Writer, s ...string) (int, error) { + var n int + for i := range s { + b, err := io.WriteString(w, s[i]) + n += b + if err != nil { + return n, err + } + } + return n, nil +} + +// WriteTo encode the Tree as Toml and writes it to the writer w. +// Returns the number of bytes written in case of success, or an error if anything happened. +func (t *Tree) WriteTo(w io.Writer) (int64, error) { + return t.writeTo(w, "", "", 0, false) +} + +// ToTomlString generates a human-readable representation of the current tree. +// Output spans multiple lines, and is suitable for ingest by a TOML parser. +// If the conversion cannot be performed, ToString returns a non-nil error. +func (t *Tree) ToTomlString() (string, error) { + b, err := t.Marshal() + if err != nil { + return "", err + } + return string(b), nil +} + +// String generates a human-readable representation of the current tree. +// Alias of ToString. Present to implement the fmt.Stringer interface. +func (t *Tree) String() string { + result, _ := t.ToTomlString() + return result +} + +// ToMap recursively generates a representation of the tree using Go built-in structures. +// The following types are used: +// +// * bool +// * float64 +// * int64 +// * string +// * uint64 +// * time.Time +// * map[string]interface{} (where interface{} is any of this list) +// * []interface{} (where interface{} is any of this list) +func (t *Tree) ToMap() map[string]interface{} { + result := map[string]interface{}{} + + for k, v := range t.values { + switch node := v.(type) { + case []*Tree: + var array []interface{} + for _, item := range node { + array = append(array, item.ToMap()) + } + result[k] = array + case *Tree: + result[k] = node.ToMap() + case *tomlValue: + result[k] = tomlValueToGo(node.value) + } + } + return result +} + +func tomlValueToGo(v interface{}) interface{} { + if tree, ok := v.(*Tree); ok { + return tree.ToMap() + } + + rv := reflect.ValueOf(v) + + if rv.Kind() != reflect.Slice { + return v + } + values := make([]interface{}, rv.Len()) + for i := 0; i < rv.Len(); i++ { + item := rv.Index(i).Interface() + values[i] = tomlValueToGo(item) + } + return values +} diff --git a/vendor/github.com/pelletier/go-toml/tomltree_writepub.go b/vendor/github.com/pelletier/go-toml/tomltree_writepub.go new file mode 100644 index 000000000..fa326308c --- /dev/null +++ b/vendor/github.com/pelletier/go-toml/tomltree_writepub.go @@ -0,0 +1,6 @@ +package toml + +// ValueStringRepresentation transforms an interface{} value into its toml string representation. +func ValueStringRepresentation(v interface{}, commented string, indent string, ord MarshalOrder, arraysOneElementPerLine bool) (string, error) { + return tomlValueStringRepresentation(v, commented, indent, ord, arraysOneElementPerLine) +} diff --git a/vendor/github.com/phayes/checkstyle/.scrutinizer.yml b/vendor/github.com/phayes/checkstyle/.scrutinizer.yml new file mode 100644 index 000000000..d9284b6b4 --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/.scrutinizer.yml @@ -0,0 +1,15 @@ +build: + dependencies: + before: + - 'source <(curl -fsSL https://raw.githubusercontent.com/phayes/go-scrutinize/master/install-golang)' + + tests: + override: + - + command: 'cd $PROJECTPATH && go-scrutinize' + coverage: + file: 'coverage.xml' + format: 'clover' + analysis: + file: 'checkstyle_report.xml' + format: 'general-checkstyle' \ No newline at end of file diff --git a/vendor/github.com/phayes/checkstyle/LICENSE b/vendor/github.com/phayes/checkstyle/LICENSE new file mode 100644 index 000000000..6dc912f39 --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2017, Patrick D Hayes +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/phayes/checkstyle/README.md b/vendor/github.com/phayes/checkstyle/README.md new file mode 100644 index 000000000..358cf6752 --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/README.md @@ -0,0 +1,44 @@ +# checkstyle +[![GoDoc](https://godoc.org/github.com/phayes/checkstyle?status.svg)](https://godoc.org/github.com/phayes/checkstyle) +[![Go Report Card](https://goreportcard.com/badge/github.com/phayes/checkstyle)](https://goreportcard.com/report/github.com/phayes/checkstyle) +[![Build Status](https://scrutinizer-ci.com/g/phayes/checkstyle/badges/build.png?b=master)](https://scrutinizer-ci.com/g/phayes/checkstyle/build-status/master) + +Read and write checksyle_report.xml files with golang + +Checkstyle XML files are a standard file format for reporting errors in source code, and is often generated by static analysis tools. + +Example usage: + +```go + +import "github.com/phayes/checkstyle" + +// Print XML into human readable format +checkSyle, err := checkstyle.ReadFile("checkstyle_report.xml") +if err != nil { + log.Fatal(err) +} +for _, file := range checkStyle.File { + fmt.Println(File.Name) + for _, codingError := range file.Error { + fmt.Println("\t", codingError.Line, codingError.Message) + } +} + +// Create a new XML file from scratch +check := checkstyle.New() + +// Ensure that a file has been added +file := check.EnsureFile("/path/to/file") + +// Create an error on line 10 +codingError := checkstyle.NewError(10, "format", "line must end with a full stop") + +// Add the error to the file +file.AddError(codingError) + +// Output XML +fmt.Print(check) +``` + +For more information on checkstyle XML see: http://checkstyle.sourceforge.net/checks.html diff --git a/vendor/github.com/phayes/checkstyle/checkstyle.go b/vendor/github.com/phayes/checkstyle/checkstyle.go new file mode 100644 index 000000000..cabbd4b40 --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/checkstyle.go @@ -0,0 +1,112 @@ +package checkstyle + +import "encoding/xml" +import "io/ioutil" + +// DefaultCheckStyleVersion defines the default "version" attribute on "" lememnt +var DefaultCheckStyleVersion = "1.0.0" + +// Severity defines a checkstyle severity code +type Severity string + +var ( + SeverityError Severity = "error" + SeverityInfo Severity = "info" + SeverityWarning Severity = "warning" + SeverityIgnore Severity = "ignore" + SeverityNone Severity +) + +// CheckStyle represents a xml element found in a checkstyle_report.xml file. +type CheckStyle struct { + XMLName xml.Name `xml:"checkstyle"` + Version string `xml:"version,attr"` + File []*File `xml:"file"` +} + +// AddFile adds a checkstyle.File with the given filename. +func (cs *CheckStyle) AddFile(csf *File) { + cs.File = append(cs.File, csf) +} + +// GetFile gets a CheckStyleFile with the given filename. +func (cs *CheckStyle) GetFile(filename string) (csf *File, ok bool) { + for _, file := range cs.File { + if file.Name == filename { + csf = file + ok = true + return + } + } + return +} + +// EnsureFile ensures that a CheckStyleFile with the given name exists +// Returns either an exiting CheckStyleFile (if a file with that name exists) +// or a new CheckStyleFile (if a file with that name does not exists) +func (cs *CheckStyle) EnsureFile(filename string) (csf *File) { + csf, ok := cs.GetFile(filename) + if !ok { + csf = NewFile(filename) + cs.AddFile(csf) + } + return csf +} + +// String implements Stringer. Returns as xml. +func (cs *CheckStyle) String() string { + checkStyleXML, err := xml.Marshal(cs) + if err != nil { + panic(err) + } + return string(checkStyleXML) +} + +// New returns a new CheckStyle +func New() *CheckStyle { + return &CheckStyle{Version: DefaultCheckStyleVersion, File: []*File{}} +} + +// File represents a xml element. +type File struct { + XMLName xml.Name `xml:"file"` + Name string `xml:"name,attr"` + Error []*Error `xml:"error"` +} + +// AddError adds a checkstyle.Error to the file. +func (csf *File) AddError(cse *Error) { + csf.Error = append(csf.Error, cse) +} + +// NewFile creates a new checkstyle.File +func NewFile(filename string) *File { + return &File{Name: filename, Error: []*Error{}} +} + +// Error represents a xml element +type Error struct { + XMLName xml.Name `xml:"error"` + Line int `xml:"line,attr"` + Column int `xml:"column,attr,omitempty"` + Severity Severity `xml:"severity,attr,omitempty"` + Message string `xml:"message,attr"` + Source string `xml:"source,attr"` +} + +// NewError creates a new checkstyle.Error +// Note that line starts at 0, and column starts at 1 +func NewError(line int, column int, severity Severity, message string, source string) *Error { + return &Error{Line: line, Column: column, Severity: severity, Message: message, Source: source} +} + +// ReadFile reads a checkfile.xml file and returns a CheckStyle object. +func ReadFile(filename string) (*CheckStyle, error) { + checkStyleXML, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + checkStyle := New() + err = xml.Unmarshal(checkStyleXML, checkStyle) + return checkStyle, err +} diff --git a/vendor/github.com/phayes/checkstyle/godoc.go b/vendor/github.com/phayes/checkstyle/godoc.go new file mode 100644 index 000000000..c9662fe9e --- /dev/null +++ b/vendor/github.com/phayes/checkstyle/godoc.go @@ -0,0 +1,36 @@ +/* +Package checkstyle allows the parsing of generation of checkstyle XML files. + +Checkstyle XML files are a standard file format for reporting errors in source code, and is often generated by static analysis tools. + +Example usage: + // Print XML into human readable format + checkSyle, err := checkstyle.ReadFile("checkstyle_report.xml") + if err != nil { + log.Fatal(err) + } + for _, file := range checkStyle.File { + fmt.Println(File.Name) + for _, codingError := range file.Error { + fmt.Println("\t", codingError.Line, codingError.Message) + } + } + + // Create a new XML file from scratch + check := checkstyle.New() + + // Ensure that a file has been added + file := check.EnsureFile("/path/to/file") + + // Create an error on line 10, column 5 + codingError := checkstyle.NewError(10, 5, checkstyle.SeverityWarning, "format", "line must end with a full stop") + + // Add the error to the file + file.AddError(codingError) + + // Output XML + fmt.Print(check) + +For more information on checkstyle XML see: http://checkstyle.sourceforge.net/checks.html +*/ +package checkstyle diff --git a/vendor/github.com/polyfloyd/go-errorlint/LICENSE b/vendor/github.com/polyfloyd/go-errorlint/LICENSE new file mode 100644 index 000000000..b7f88cf1c --- /dev/null +++ b/vendor/github.com/polyfloyd/go-errorlint/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 polyfloyd + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go new file mode 100644 index 000000000..9b35388ef --- /dev/null +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/allowed.go @@ -0,0 +1,143 @@ +package errorlint + +import ( + "fmt" + "go/ast" + "go/types" +) + +var allowedErrors = []struct { + err string + fun string +}{ + // pkg/archive/tar + {err: "io.EOF", fun: "(*tar.Reader).Next"}, + {err: "io.EOF", fun: "(*tar.Reader).Read"}, + // pkg/bufio + {err: "io.EOF", fun: "(*bufio.Reader).Read"}, + {err: "io.EOF", fun: "(*bufio.Reader).ReadByte"}, + {err: "io.EOF", fun: "(*bufio.Reader).ReadBytes"}, + {err: "io.EOF", fun: "(*bufio.Reader).ReadLine"}, + {err: "io.EOF", fun: "(*bufio.Reader).ReadSlice"}, + {err: "io.EOF", fun: "(*bufio.Reader).ReadString"}, + {err: "io.EOF", fun: "(*bufio.Scanner).Scan"}, + // pkg/bytes + {err: "io.EOF", fun: "(*bytes.Buffer).Read"}, + {err: "io.EOF", fun: "(*bytes.Buffer).ReadByte"}, + {err: "io.EOF", fun: "(*bytes.Buffer).ReadBytes"}, + {err: "io.EOF", fun: "(*bytes.Buffer).ReadRune"}, + {err: "io.EOF", fun: "(*bytes.Buffer).ReadString"}, + {err: "io.EOF", fun: "(*bytes.Reader).Read"}, + {err: "io.EOF", fun: "(*bytes.Reader).ReadAt"}, + {err: "io.EOF", fun: "(*bytes.Reader).ReadByte"}, + {err: "io.EOF", fun: "(*bytes.Reader).ReadRune"}, + {err: "io.EOF", fun: "(*bytes.Reader).ReadString"}, + // pkg/database/sql + {err: "sql.ErrNoRows", fun: "(*database/sql.Row).Scan"}, + // pkg/io + {err: "io.EOF", fun: "(io.Reader).Read"}, + {err: "io.ErrClosedPipe", fun: "(*io.PipeWriter).Write"}, + {err: "io.ErrShortBuffer", fun: "io.ReadAtLeast"}, + {err: "io.ErrUnexpectedEOF", fun: "io.ReadAtLeast"}, + {err: "io.ErrUnexpectedEOF", fun: "io.ReadFull"}, + // pkg/os + {err: "io.EOF", fun: "(*os.File).Read"}, + {err: "io.EOF", fun: "(*os.File).ReadAt"}, + {err: "io.EOF", fun: "(*os.File).ReadDir"}, + {err: "io.EOF", fun: "(*os.File).Readdir"}, + {err: "io.EOF", fun: "(*os.File).Readdirnames"}, + // pkg/strings + {err: "io.EOF", fun: "(*strings.Reader).Read"}, + {err: "io.EOF", fun: "(*strings.Reader).ReadAt"}, + {err: "io.EOF", fun: "(*strings.Reader).ReadByte"}, + {err: "io.EOF", fun: "(*strings.Reader).ReadRune"}, +} + +func isAllowedErrorComparison(info types.Info, binExpr *ast.BinaryExpr) bool { + var errName string // `.`, e.g. `io.EOF` + var callExpr *ast.CallExpr + + // Figure out which half of the expression is the returned error and which + // half is the presumed error declaration. + for _, expr := range []ast.Expr{binExpr.X, binExpr.Y} { + switch t := expr.(type) { + case *ast.SelectorExpr: + // A selector which we assume refers to a staticaly declared error + // in a package. + errName = selectorToString(t) + case *ast.Ident: + // Identifier, most likely to be the `err` variable or whatever + // produces it. + callExpr = assigningCallExpr(info, t) + case *ast.CallExpr: + callExpr = t + } + } + + // Unimplemented or not sure, disallow the expression. + if errName == "" || callExpr == nil { + return false + } + + // Find the expression that last assigned the subject identifier. + functionSelector, ok := callExpr.Fun.(*ast.SelectorExpr) + if !ok { + // If the function is not a selector it is not an Std function that is + // allowed. + return false + } + var functionName string + if sel, ok := info.Selections[functionSelector]; ok { + functionName = fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) + } else { + // If there is no selection, assume it is a package. + functionName = selectorToString(callExpr.Fun.(*ast.SelectorExpr)) + } + + for _, w := range allowedErrors { + if w.fun == functionName && w.err == errName { + return true + } + } + return false +} + +func assigningCallExpr(info types.Info, subject *ast.Ident) *ast.CallExpr { + if subject.Obj == nil { + return nil + } + switch declT := subject.Obj.Decl.(type) { + case *ast.AssignStmt: + // The identifier is LHS of an assignment. + assignment := declT + + assigningExpr := assignment.Rhs[0] + // If the assignment is comprised of multiple expressions, find out + // which LHS expression we should use by finding its index in the LHS. + if len(assignment.Rhs) > 1 { + for i, lhs := range assignment.Lhs { + if subject.Name == lhs.(*ast.Ident).Name { + assigningExpr = assignment.Rhs[i] + break + } + } + } + + switch assignT := assigningExpr.(type) { + case *ast.CallExpr: + // Found the function call. + return assignT + case *ast.Ident: + // The subject was the result of assigning from another identifier. + return assigningCallExpr(info, assignT) + } + } + return nil +} + +func selectorToString(selExpr *ast.SelectorExpr) string { + if ident, ok := selExpr.X.(*ast.Ident); ok { + return ident.Name + "." + selExpr.Sel.Name + } + return "" +} diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go new file mode 100644 index 000000000..e2449f8f9 --- /dev/null +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/analysis.go @@ -0,0 +1,52 @@ +package errorlint + +import ( + "flag" + "sort" + + "golang.org/x/tools/go/analysis" +) + +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "errorlint", + Doc: "Source code linter for Go software that can be used to find code that will cause problems with the error wrapping scheme introduced in Go 1.13.", + Run: run, + Flags: flagSet, + } +} + +var ( + flagSet flag.FlagSet + checkComparison bool + checkAsserts bool + checkErrorf bool +) + +func init() { + flagSet.BoolVar(&checkComparison, "comparison", true, "Check for plain error comparisons") + flagSet.BoolVar(&checkAsserts, "asserts", true, "Check for plain type assertions and type switches") + flagSet.BoolVar(&checkErrorf, "errorf", false, "Check whether fmt.Errorf uses the %w verb for formatting errors. See the readme for caveats") +} + +func run(pass *analysis.Pass) (interface{}, error) { + lints := []Lint{} + if checkComparison { + l := LintErrorComparisons(pass.Fset, *pass.TypesInfo) + lints = append(lints, l...) + } + if checkAsserts { + l := LintErrorTypeAssertions(pass.Fset, *pass.TypesInfo) + lints = append(lints, l...) + } + if checkErrorf { + l := LintFmtErrorfCalls(pass.Fset, *pass.TypesInfo) + lints = append(lints, l...) + } + sort.Sort(ByPosition(lints)) + + for _, l := range lints { + pass.Report(analysis.Diagnostic{Pos: l.Pos, Message: l.Message}) + } + return nil, nil +} diff --git a/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go new file mode 100644 index 000000000..3d11946a0 --- /dev/null +++ b/vendor/github.com/polyfloyd/go-errorlint/errorlint/lint.go @@ -0,0 +1,249 @@ +package errorlint + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "regexp" +) + +type Lint struct { + Message string + Pos token.Pos +} + +type ByPosition []Lint + +func (l ByPosition) Len() int { return len(l) } +func (l ByPosition) Swap(i, j int) { l[i], l[j] = l[j], l[i] } + +func (l ByPosition) Less(i, j int) bool { + return l[i].Pos < l[j].Pos +} + +func LintFmtErrorfCalls(fset *token.FileSet, info types.Info) []Lint { + lints := []Lint{} + for expr, t := range info.Types { + // Search for error expressions that are the result of fmt.Errorf + // invocations. + if t.Type.String() != "error" { + continue + } + call, ok := isFmtErrorfCallExpr(info, expr) + if !ok { + continue + } + + // Find all % fields in the format string. + formatVerbs, ok := printfFormatStringVerbs(info, call) + if !ok { + continue + } + + // For any arguments that are errors, check whether the wrapping verb + // is used. Only one %w verb may be used in a single format string at a + // time, so we stop after finding a correct %w. + var lintArg ast.Expr + args := call.Args[1:] + for i := 0; i < len(args) && i < len(formatVerbs); i++ { + if info.Types[args[i]].Type.String() != "error" && !isErrorStringCall(info, args[i]) { + continue + } + + if formatVerbs[i] == "%w" { + lintArg = nil + break + } + + if lintArg == nil { + lintArg = args[i] + } + } + if lintArg != nil { + lints = append(lints, Lint{ + Message: "non-wrapping format verb for fmt.Errorf. Use `%w` to format errors", + Pos: lintArg.Pos(), + }) + } + } + return lints +} + +// isErrorStringCall tests whether the expression is a string expression that +// is the result of an `(error).Error()` method call. +func isErrorStringCall(info types.Info, expr ast.Expr) bool { + if info.Types[expr].Type.String() == "string" { + if call, ok := expr.(*ast.CallExpr); ok { + if callSel, ok := call.Fun.(*ast.SelectorExpr); ok { + fun := info.Uses[callSel.Sel].(*types.Func) + return fun.Type().String() == "func() string" && fun.Name() == "Error" + } + } + } + return false +} + +func printfFormatStringVerbs(info types.Info, call *ast.CallExpr) ([]string, bool) { + if len(call.Args) <= 1 { + return nil, false + } + strLit, ok := call.Args[0].(*ast.BasicLit) + if !ok { + // Ignore format strings that are not literals. + return nil, false + } + formatString := constant.StringVal(info.Types[strLit].Value) + + // Naive format string argument verb. This does not take modifiers such as + // padding into account... + re := regexp.MustCompile(`%[^%]`) + return re.FindAllString(formatString, -1), true +} + +func isFmtErrorfCallExpr(info types.Info, expr ast.Expr) (*ast.CallExpr, bool) { + call, ok := expr.(*ast.CallExpr) + if !ok { + return nil, false + } + fn, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + // TODO: Support fmt.Errorf variable aliases? + return nil, false + } + obj := info.Uses[fn.Sel] + + pkg := obj.Pkg() + if pkg != nil && pkg.Name() == "fmt" && obj.Name() == "Errorf" { + return call, true + } + return nil, false +} + +func LintErrorComparisons(fset *token.FileSet, info types.Info) []Lint { + lints := []Lint{} + + for expr := range info.Types { + // Find == and != operations. + binExpr, ok := expr.(*ast.BinaryExpr) + if !ok { + continue + } + if binExpr.Op != token.EQL && binExpr.Op != token.NEQ { + continue + } + // Comparing errors with nil is okay. + if isNilComparison(binExpr) { + continue + } + // Find comparisons of which one side is a of type error. + if !isErrorComparison(info, binExpr) { + continue + } + + if isAllowedErrorComparison(info, binExpr) { + continue + } + + lints = append(lints, Lint{ + Message: fmt.Sprintf("comparing with %s will fail on wrapped errors. Use errors.Is to check for a specific error", binExpr.Op), + Pos: binExpr.Pos(), + }) + } + + for scope := range info.Scopes { + // Find value switch blocks. + switchStmt, ok := scope.(*ast.SwitchStmt) + if !ok { + continue + } + // Check whether the switch operates on an error type. + if switchStmt.Tag == nil { + continue + } + tagType := info.Types[switchStmt.Tag] + if tagType.Type.String() != "error" { + continue + } + + lints = append(lints, Lint{ + Message: "switch on an error will fail on wrapped errors. Use errors.Is to check for specific errors", + Pos: switchStmt.Pos(), + }) + } + + return lints +} + +func isNilComparison(binExpr *ast.BinaryExpr) bool { + if ident, ok := binExpr.X.(*ast.Ident); ok && ident.Name == "nil" { + return true + } + if ident, ok := binExpr.Y.(*ast.Ident); ok && ident.Name == "nil" { + return true + } + return false +} + +func isErrorComparison(info types.Info, binExpr *ast.BinaryExpr) bool { + tx := info.Types[binExpr.X] + ty := info.Types[binExpr.Y] + return tx.Type.String() == "error" || ty.Type.String() == "error" +} + +func LintErrorTypeAssertions(fset *token.FileSet, info types.Info) []Lint { + lints := []Lint{} + + for expr := range info.Types { + // Find type assertions. + typeAssert, ok := expr.(*ast.TypeAssertExpr) + if !ok { + continue + } + + // Find type assertions that operate on values of type error. + if !isErrorTypeAssertion(info, typeAssert) { + continue + } + + lints = append(lints, Lint{ + Message: "type assertion on error will fail on wrapped errors. Use errors.As to check for specific errors", + Pos: typeAssert.Pos(), + }) + } + + for scope := range info.Scopes { + // Find type switches. + typeSwitch, ok := scope.(*ast.TypeSwitchStmt) + if !ok { + continue + } + + // Find the type assertion in the type switch. + var typeAssert *ast.TypeAssertExpr + switch t := typeSwitch.Assign.(type) { + case *ast.ExprStmt: + typeAssert = t.X.(*ast.TypeAssertExpr) + case *ast.AssignStmt: + typeAssert = t.Rhs[0].(*ast.TypeAssertExpr) + } + + // Check whether the type switch is on a value of type error. + if !isErrorTypeAssertion(info, typeAssert) { + continue + } + + lints = append(lints, Lint{ + Message: "type switch on error will fail on wrapped errors. Use errors.As to check for specific errors", + Pos: typeAssert.Pos(), + }) + } + + return lints +} + +func isErrorTypeAssertion(info types.Info, typeAssert *ast.TypeAssertExpr) bool { + t := info.Types[typeAssert.X] + return t.Type.String() == "error" +} diff --git a/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/promlint.go b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/promlint.go new file mode 100644 index 000000000..ec8061706 --- /dev/null +++ b/vendor/github.com/prometheus/client_golang/prometheus/testutil/promlint/promlint.go @@ -0,0 +1,386 @@ +// Copyright 2020 The Prometheus Authors +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package promlint provides a linter for Prometheus metrics. +package promlint + +import ( + "fmt" + "io" + "regexp" + "sort" + "strings" + + "github.com/prometheus/common/expfmt" + + dto "github.com/prometheus/client_model/go" +) + +// A Linter is a Prometheus metrics linter. It identifies issues with metric +// names, types, and metadata, and reports them to the caller. +type Linter struct { + // The linter will read metrics in the Prometheus text format from r and + // then lint it, _and_ it will lint the metrics provided directly as + // MetricFamily proto messages in mfs. Note, however, that the current + // constructor functions New and NewWithMetricFamilies only ever set one + // of them. + r io.Reader + mfs []*dto.MetricFamily +} + +// A Problem is an issue detected by a Linter. +type Problem struct { + // The name of the metric indicated by this Problem. + Metric string + + // A description of the issue for this Problem. + Text string +} + +// newProblem is helper function to create a Problem. +func newProblem(mf *dto.MetricFamily, text string) Problem { + return Problem{ + Metric: mf.GetName(), + Text: text, + } +} + +// New creates a new Linter that reads an input stream of Prometheus metrics in +// the Prometheus text exposition format. +func New(r io.Reader) *Linter { + return &Linter{ + r: r, + } +} + +// NewWithMetricFamilies creates a new Linter that reads from a slice of +// MetricFamily protobuf messages. +func NewWithMetricFamilies(mfs []*dto.MetricFamily) *Linter { + return &Linter{ + mfs: mfs, + } +} + +// Lint performs a linting pass, returning a slice of Problems indicating any +// issues found in the metrics stream. The slice is sorted by metric name +// and issue description. +func (l *Linter) Lint() ([]Problem, error) { + var problems []Problem + + if l.r != nil { + d := expfmt.NewDecoder(l.r, expfmt.FmtText) + + mf := &dto.MetricFamily{} + for { + if err := d.Decode(mf); err != nil { + if err == io.EOF { + break + } + + return nil, err + } + + problems = append(problems, lint(mf)...) + } + } + for _, mf := range l.mfs { + problems = append(problems, lint(mf)...) + } + + // Ensure deterministic output. + sort.SliceStable(problems, func(i, j int) bool { + if problems[i].Metric == problems[j].Metric { + return problems[i].Text < problems[j].Text + } + return problems[i].Metric < problems[j].Metric + }) + + return problems, nil +} + +// lint is the entry point for linting a single metric. +func lint(mf *dto.MetricFamily) []Problem { + fns := []func(mf *dto.MetricFamily) []Problem{ + lintHelp, + lintMetricUnits, + lintCounter, + lintHistogramSummaryReserved, + lintMetricTypeInName, + lintReservedChars, + lintCamelCase, + lintUnitAbbreviations, + } + + var problems []Problem + for _, fn := range fns { + problems = append(problems, fn(mf)...) + } + + // TODO(mdlayher): lint rules for specific metrics types. + return problems +} + +// lintHelp detects issues related to the help text for a metric. +func lintHelp(mf *dto.MetricFamily) []Problem { + var problems []Problem + + // Expect all metrics to have help text available. + if mf.Help == nil { + problems = append(problems, newProblem(mf, "no help text")) + } + + return problems +} + +// lintMetricUnits detects issues with metric unit names. +func lintMetricUnits(mf *dto.MetricFamily) []Problem { + var problems []Problem + + unit, base, ok := metricUnits(*mf.Name) + if !ok { + // No known units detected. + return nil + } + + // Unit is already a base unit. + if unit == base { + return nil + } + + problems = append(problems, newProblem(mf, fmt.Sprintf("use base unit %q instead of %q", base, unit))) + + return problems +} + +// lintCounter detects issues specific to counters, as well as patterns that should +// only be used with counters. +func lintCounter(mf *dto.MetricFamily) []Problem { + var problems []Problem + + isCounter := mf.GetType() == dto.MetricType_COUNTER + isUntyped := mf.GetType() == dto.MetricType_UNTYPED + hasTotalSuffix := strings.HasSuffix(mf.GetName(), "_total") + + switch { + case isCounter && !hasTotalSuffix: + problems = append(problems, newProblem(mf, `counter metrics should have "_total" suffix`)) + case !isUntyped && !isCounter && hasTotalSuffix: + problems = append(problems, newProblem(mf, `non-counter metrics should not have "_total" suffix`)) + } + + return problems +} + +// lintHistogramSummaryReserved detects when other types of metrics use names or labels +// reserved for use by histograms and/or summaries. +func lintHistogramSummaryReserved(mf *dto.MetricFamily) []Problem { + // These rules do not apply to untyped metrics. + t := mf.GetType() + if t == dto.MetricType_UNTYPED { + return nil + } + + var problems []Problem + + isHistogram := t == dto.MetricType_HISTOGRAM + isSummary := t == dto.MetricType_SUMMARY + + n := mf.GetName() + + if !isHistogram && strings.HasSuffix(n, "_bucket") { + problems = append(problems, newProblem(mf, `non-histogram metrics should not have "_bucket" suffix`)) + } + if !isHistogram && !isSummary && strings.HasSuffix(n, "_count") { + problems = append(problems, newProblem(mf, `non-histogram and non-summary metrics should not have "_count" suffix`)) + } + if !isHistogram && !isSummary && strings.HasSuffix(n, "_sum") { + problems = append(problems, newProblem(mf, `non-histogram and non-summary metrics should not have "_sum" suffix`)) + } + + for _, m := range mf.GetMetric() { + for _, l := range m.GetLabel() { + ln := l.GetName() + + if !isHistogram && ln == "le" { + problems = append(problems, newProblem(mf, `non-histogram metrics should not have "le" label`)) + } + if !isSummary && ln == "quantile" { + problems = append(problems, newProblem(mf, `non-summary metrics should not have "quantile" label`)) + } + } + } + + return problems +} + +// lintMetricTypeInName detects when metric types are included in the metric name. +func lintMetricTypeInName(mf *dto.MetricFamily) []Problem { + var problems []Problem + n := strings.ToLower(mf.GetName()) + + for i, t := range dto.MetricType_name { + if i == int32(dto.MetricType_UNTYPED) { + continue + } + + typename := strings.ToLower(t) + if strings.Contains(n, "_"+typename+"_") || strings.HasSuffix(n, "_"+typename) { + problems = append(problems, newProblem(mf, fmt.Sprintf(`metric name should not include type '%s'`, typename))) + } + } + return problems +} + +// lintReservedChars detects colons in metric names. +func lintReservedChars(mf *dto.MetricFamily) []Problem { + var problems []Problem + if strings.Contains(mf.GetName(), ":") { + problems = append(problems, newProblem(mf, "metric names should not contain ':'")) + } + return problems +} + +var camelCase = regexp.MustCompile(`[a-z][A-Z]`) + +// lintCamelCase detects metric names and label names written in camelCase. +func lintCamelCase(mf *dto.MetricFamily) []Problem { + var problems []Problem + if camelCase.FindString(mf.GetName()) != "" { + problems = append(problems, newProblem(mf, "metric names should be written in 'snake_case' not 'camelCase'")) + } + + for _, m := range mf.GetMetric() { + for _, l := range m.GetLabel() { + if camelCase.FindString(l.GetName()) != "" { + problems = append(problems, newProblem(mf, "label names should be written in 'snake_case' not 'camelCase'")) + } + } + } + return problems +} + +// lintUnitAbbreviations detects abbreviated units in the metric name. +func lintUnitAbbreviations(mf *dto.MetricFamily) []Problem { + var problems []Problem + n := strings.ToLower(mf.GetName()) + for _, s := range unitAbbreviations { + if strings.Contains(n, "_"+s+"_") || strings.HasSuffix(n, "_"+s) { + problems = append(problems, newProblem(mf, "metric names should not contain abbreviated units")) + } + } + return problems +} + +// metricUnits attempts to detect known unit types used as part of a metric name, +// e.g. "foo_bytes_total" or "bar_baz_milligrams". +func metricUnits(m string) (unit string, base string, ok bool) { + ss := strings.Split(m, "_") + + for unit, base := range units { + // Also check for "no prefix". + for _, p := range append(unitPrefixes, "") { + for _, s := range ss { + // Attempt to explicitly match a known unit with a known prefix, + // as some words may look like "units" when matching suffix. + // + // As an example, "thermometers" should not match "meters", but + // "kilometers" should. + if s == p+unit { + return p + unit, base, true + } + } + } + } + + return "", "", false +} + +// Units and their possible prefixes recognized by this library. More can be +// added over time as needed. +var ( + // map a unit to the appropriate base unit. + units = map[string]string{ + // Base units. + "amperes": "amperes", + "bytes": "bytes", + "celsius": "celsius", // Also allow Celsius because it is common in typical Prometheus use cases. + "grams": "grams", + "joules": "joules", + "kelvin": "kelvin", // SI base unit, used in special cases (e.g. color temperature, scientific measurements). + "meters": "meters", // Both American and international spelling permitted. + "metres": "metres", + "seconds": "seconds", + "volts": "volts", + + // Non base units. + // Time. + "minutes": "seconds", + "hours": "seconds", + "days": "seconds", + "weeks": "seconds", + // Temperature. + "kelvins": "kelvin", + "fahrenheit": "celsius", + "rankine": "celsius", + // Length. + "inches": "meters", + "yards": "meters", + "miles": "meters", + // Bytes. + "bits": "bytes", + // Energy. + "calories": "joules", + // Mass. + "pounds": "grams", + "ounces": "grams", + } + + unitPrefixes = []string{ + "pico", + "nano", + "micro", + "milli", + "centi", + "deci", + "deca", + "hecto", + "kilo", + "kibi", + "mega", + "mibi", + "giga", + "gibi", + "tera", + "tebi", + "peta", + "pebi", + } + + // Common abbreviations that we'd like to discourage. + unitAbbreviations = []string{ + "s", + "ms", + "us", + "ns", + "sec", + "b", + "kb", + "mb", + "gb", + "tb", + "pb", + "m", + "h", + "d", + } +) diff --git a/vendor/github.com/quasilyte/go-ruleguard/LICENSE b/vendor/github.com/quasilyte/go-ruleguard/LICENSE new file mode 100644 index 000000000..f0381fb49 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/LICENSE @@ -0,0 +1,29 @@ +BSD 3-Clause License + +Copyright (c) 2019, Iskander (Alex) Sharipov / quasilyte +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/compile.go b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/compile.go new file mode 100644 index 000000000..d6e1b1e65 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/compile.go @@ -0,0 +1,976 @@ +package gogrep + +import ( + "fmt" + "go/ast" + "go/token" +) + +type compileError string + +func (e compileError) Error() string { return string(e) } + +type compiler struct { + prog *program + stringIndexes map[string]uint8 + ifaceIndexes map[interface{}]uint8 + strict bool + fset *token.FileSet +} + +func (c *compiler) Compile(fset *token.FileSet, root ast.Node, strict bool) (p *program, err error) { + defer func() { + if err != nil { + return + } + rv := recover() + if rv == nil { + return + } + if parseErr, ok := rv.(compileError); ok { + err = parseErr + return + } + panic(rv) // Not our panic + }() + + c.fset = fset + c.strict = strict + c.prog = &program{ + insts: make([]instruction, 0, 8), + } + c.stringIndexes = make(map[string]uint8) + c.ifaceIndexes = make(map[interface{}]uint8) + + c.compileNode(root) + + if len(c.prog.insts) == 0 { + return nil, c.errorf(root, "0 instructions generated") + } + + return c.prog, nil +} + +func (c *compiler) errorf(n ast.Node, format string, args ...interface{}) compileError { + loc := c.fset.Position(n.Pos()) + message := fmt.Sprintf("%s:%d: %s", loc.Filename, loc.Line, fmt.Sprintf(format, args...)) + return compileError(message) +} + +func (c *compiler) toUint8(n ast.Node, v int) uint8 { + if !fitsUint8(v) { + panic(c.errorf(n, "implementation error: %v can't be converted to uint8", v)) + } + return uint8(v) +} + +func (c *compiler) internString(n ast.Node, s string) uint8 { + if index, ok := c.stringIndexes[s]; ok { + return index + } + index := len(c.prog.strings) + if !fitsUint8(index) { + panic(c.errorf(n, "implementation limitation: too many string values")) + } + c.stringIndexes[s] = uint8(index) + c.prog.strings = append(c.prog.strings, s) + return uint8(index) +} + +func (c *compiler) internIface(n ast.Node, v interface{}) uint8 { + if index, ok := c.ifaceIndexes[v]; ok { + return index + } + index := len(c.prog.ifaces) + if !fitsUint8(index) { + panic(c.errorf(n, "implementation limitation: too many values")) + } + c.ifaceIndexes[v] = uint8(index) + c.prog.ifaces = append(c.prog.ifaces, v) + return uint8(index) +} + +func (c *compiler) emitInst(inst instruction) { + c.prog.insts = append(c.prog.insts, inst) +} + +func (c *compiler) emitInstOp(op operation) { + c.emitInst(instruction{op: op}) +} + +func (c *compiler) compileNode(n ast.Node) { + switch n := n.(type) { + case *ast.File: + c.compileFile(n) + case ast.Decl: + c.compileDecl(n) + case ast.Expr: + c.compileExpr(n) + case ast.Stmt: + c.compileStmt(n) + case *ast.ValueSpec: + c.compileValueSpec(n) + case stmtSlice: + c.compileStmtSlice(n) + case exprSlice: + c.compileExprSlice(n) + default: + panic(c.errorf(n, "compileNode: unexpected %T", n)) + } +} + +func (c *compiler) compileOptStmt(n ast.Stmt) { + if exprStmt, ok := n.(*ast.ExprStmt); ok { + if ident, ok := exprStmt.X.(*ast.Ident); ok && isWildName(ident.Name) { + c.compileWildIdent(ident, true) + return + } + } + c.compileStmt(n) +} + +func (c *compiler) compileOptExpr(n ast.Expr) { + if ident, ok := n.(*ast.Ident); ok && isWildName(ident.Name) { + c.compileWildIdent(ident, true) + return + } + c.compileExpr(n) +} + +func (c *compiler) compileFieldList(n *ast.FieldList) { + c.emitInstOp(opFieldList) + for _, x := range n.List { + c.compileField(x) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileField(n *ast.Field) { + switch { + case len(n.Names) == 0: + c.emitInstOp(opUnnamedField) + case len(n.Names) == 1: + name := n.Names[0] + if isWildName(name.Name) { + c.emitInstOp(opField) + c.compileWildIdent(name, false) + } else { + c.emitInst(instruction{ + op: opSimpleField, + valueIndex: c.internString(name, name.Name), + }) + } + default: + c.emitInstOp(opMultiField) + for _, name := range n.Names { + c.compileIdent(name) + } + c.emitInstOp(opEnd) + } + c.compileExpr(n.Type) +} + +func (c *compiler) compileValueSpec(spec *ast.ValueSpec) { + switch { + case spec.Type == nil: + c.emitInstOp(opValueInitSpec) + case len(spec.Values) == 0: + c.emitInstOp(opTypedValueSpec) + default: + c.emitInstOp(opTypedValueInitSpec) + } + for _, name := range spec.Names { + c.compileIdent(name) + } + c.emitInstOp(opEnd) + if spec.Type != nil { + c.compileExpr(spec.Type) + } + if len(spec.Values) != 0 { + for _, v := range spec.Values { + c.compileExpr(v) + } + c.emitInstOp(opEnd) + } +} + +func (c *compiler) compileTypeSpec(spec *ast.TypeSpec) { + c.emitInstOp(pickOp(spec.Assign.IsValid(), opTypeAliasSpec, opTypeSpec)) + c.compileIdent(spec.Name) + c.compileExpr(spec.Type) +} + +func (c *compiler) compileFile(n *ast.File) { + if len(n.Imports) == 0 && len(n.Decls) == 0 { + c.emitInstOp(opEmptyPackage) + c.compileIdent(n.Name) + return + } + + panic(c.errorf(n, "compileFile: unsupported file pattern")) +} + +func (c *compiler) compileDecl(n ast.Decl) { + switch n := n.(type) { + case *ast.FuncDecl: + c.compileFuncDecl(n) + case *ast.GenDecl: + c.compileGenDecl(n) + + default: + panic(c.errorf(n, "compileDecl: unexpected %T", n)) + } +} + +func (c *compiler) compileFuncDecl(n *ast.FuncDecl) { + if n.Recv == nil { + c.emitInstOp(pickOp(n.Body == nil, opFuncProtoDecl, opFuncDecl)) + } else { + c.emitInstOp(pickOp(n.Body == nil, opMethodProtoDecl, opMethodDecl)) + } + + if n.Recv != nil { + c.compileFieldList(n.Recv) + } + c.compileIdent(n.Name) + c.compileFuncType(n.Type) + if n.Body != nil { + c.compileBlockStmt(n.Body) + } +} + +func (c *compiler) compileGenDecl(n *ast.GenDecl) { + switch n.Tok { + case token.CONST, token.VAR: + c.emitInstOp(pickOp(n.Tok == token.CONST, opConstDecl, opVarDecl)) + for _, spec := range n.Specs { + c.compileValueSpec(spec.(*ast.ValueSpec)) + } + c.emitInstOp(opEnd) + case token.TYPE: + c.emitInstOp(opTypeDecl) + for _, spec := range n.Specs { + c.compileTypeSpec(spec.(*ast.TypeSpec)) + } + c.emitInstOp(opEnd) + + default: + panic(c.errorf(n, "unexpected gen decl")) + } +} + +func (c *compiler) compileExpr(n ast.Expr) { + switch n := n.(type) { + case *ast.BasicLit: + c.compileBasicLit(n) + case *ast.BinaryExpr: + c.compileBinaryExpr(n) + case *ast.IndexExpr: + c.compileIndexExpr(n) + case *ast.Ident: + c.compileIdent(n) + case *ast.CallExpr: + c.compileCallExpr(n) + case *ast.UnaryExpr: + c.compileUnaryExpr(n) + case *ast.StarExpr: + c.compileStarExpr(n) + case *ast.ParenExpr: + c.compileParenExpr(n) + case *ast.SliceExpr: + c.compileSliceExpr(n) + case *ast.FuncType: + c.compileFuncType(n) + case *ast.ArrayType: + c.compileArrayType(n) + case *ast.MapType: + c.compileMapType(n) + case *ast.ChanType: + c.compileChanType(n) + case *ast.CompositeLit: + c.compileCompositeLit(n) + case *ast.FuncLit: + c.compileFuncLit(n) + case *ast.Ellipsis: + c.compileEllipsis(n) + case *ast.KeyValueExpr: + c.compileKeyValueExpr(n) + case *ast.SelectorExpr: + c.compileSelectorExpr(n) + case *ast.TypeAssertExpr: + c.compileTypeAssertExpr(n) + + default: + panic(c.errorf(n, "compileExpr: unexpected %T", n)) + } +} + +func (c *compiler) compileBasicLit(n *ast.BasicLit) { + if !c.strict { + v := literalValue(n) + if v == nil { + panic(c.errorf(n, "can't convert %s (%s) value", n.Value, n.Kind)) + } + c.prog.insts = append(c.prog.insts, instruction{ + op: opBasicLit, + valueIndex: c.internIface(n, v), + }) + return + } + + var inst instruction + switch n.Kind { + case token.INT: + inst.op = opStrictIntLit + case token.FLOAT: + inst.op = opStrictFloatLit + case token.STRING: + inst.op = opStrictStringLit + case token.CHAR: + inst.op = opStrictCharLit + default: + inst.op = opStrictComplexLit + } + inst.valueIndex = c.internString(n, n.Value) + c.prog.insts = append(c.prog.insts, inst) +} + +func (c *compiler) compileBinaryExpr(n *ast.BinaryExpr) { + c.prog.insts = append(c.prog.insts, instruction{ + op: opBinaryExpr, + value: c.toUint8(n, int(n.Op)), + }) + c.compileExpr(n.X) + c.compileExpr(n.Y) +} + +func (c *compiler) compileIndexExpr(n *ast.IndexExpr) { + c.emitInstOp(opIndexExpr) + c.compileExpr(n.X) + c.compileExpr(n.Index) +} + +func (c *compiler) compileWildIdent(n *ast.Ident, optional bool) { + info := decodeWildName(n.Name) + var inst instruction + switch { + case info.Name == "_" && !info.Seq: + inst.op = opNode + case info.Name == "_" && info.Seq: + inst.op = pickOp(optional, opOptNode, opNodeSeq) + case info.Name != "_" && !info.Seq: + inst.op = opNamedNode + inst.valueIndex = c.internString(n, info.Name) + default: + inst.op = pickOp(optional, opNamedOptNode, opNamedNodeSeq) + inst.valueIndex = c.internString(n, info.Name) + } + c.prog.insts = append(c.prog.insts, inst) +} + +func (c *compiler) compileIdent(n *ast.Ident) { + if isWildName(n.Name) { + c.compileWildIdent(n, false) + return + } + + c.prog.insts = append(c.prog.insts, instruction{ + op: opIdent, + valueIndex: c.internString(n, n.Name), + }) +} + +func (c *compiler) compileCallExpr(n *ast.CallExpr) { + op := opCallExpr + if n.Ellipsis.IsValid() { + op = opVariadicCallExpr + } + c.emitInstOp(op) + c.compileExpr(n.Fun) + for _, arg := range n.Args { + c.compileExpr(arg) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileUnaryExpr(n *ast.UnaryExpr) { + c.prog.insts = append(c.prog.insts, instruction{ + op: opUnaryExpr, + value: c.toUint8(n, int(n.Op)), + }) + c.compileExpr(n.X) +} + +func (c *compiler) compileStarExpr(n *ast.StarExpr) { + c.emitInstOp(opStarExpr) + c.compileExpr(n.X) +} + +func (c *compiler) compileParenExpr(n *ast.ParenExpr) { + c.emitInstOp(opParenExpr) + c.compileExpr(n.X) +} + +func (c *compiler) compileSliceExpr(n *ast.SliceExpr) { + switch { + case n.Low == nil && n.High == nil && !n.Slice3: + c.emitInstOp(opSliceExpr) + c.compileExpr(n.X) + case n.Low != nil && n.High == nil && !n.Slice3: + c.emitInstOp(opSliceFromExpr) + c.compileExpr(n.X) + c.compileExpr(n.Low) + case n.Low == nil && n.High != nil && !n.Slice3: + c.emitInstOp(opSliceToExpr) + c.compileExpr(n.X) + c.compileExpr(n.High) + case n.Low != nil && n.High != nil && !n.Slice3: + c.emitInstOp(opSliceFromToExpr) + c.compileExpr(n.X) + c.compileExpr(n.Low) + c.compileExpr(n.High) + case n.Low == nil && n.Slice3: + c.emitInstOp(opSliceToCapExpr) + c.compileExpr(n.X) + c.compileExpr(n.High) + c.compileExpr(n.Max) + case n.Low != nil && n.Slice3: + c.emitInstOp(opSliceFromToCapExpr) + c.compileExpr(n.X) + c.compileExpr(n.Low) + c.compileExpr(n.High) + c.compileExpr(n.Max) + default: + panic(c.errorf(n, "unexpected slice expr")) + } +} + +func (c *compiler) compileFuncType(n *ast.FuncType) { + void := n.Results == nil || len(n.Results.List) == 0 + if void { + c.emitInstOp(opVoidFuncType) + } else { + c.emitInstOp(opFuncType) + } + c.compileFieldList(n.Params) + if !void { + c.compileFieldList(n.Results) + } +} + +func (c *compiler) compileArrayType(n *ast.ArrayType) { + if n.Len == nil { + c.emitInstOp(opSliceType) + c.compileExpr(n.Elt) + } else { + c.emitInstOp(opArrayType) + c.compileExpr(n.Len) + c.compileExpr(n.Elt) + } +} + +func (c *compiler) compileMapType(n *ast.MapType) { + c.emitInstOp(opMapType) + c.compileExpr(n.Key) + c.compileExpr(n.Value) +} + +func (c *compiler) compileChanType(n *ast.ChanType) { + c.emitInst(instruction{ + op: opChanType, + value: c.toUint8(n, int(n.Dir)), + }) + c.compileExpr(n.Value) +} + +func (c *compiler) compileCompositeLit(n *ast.CompositeLit) { + if n.Type == nil { + c.emitInstOp(opCompositeLit) + } else { + c.emitInstOp(opTypedCompositeLit) + c.compileExpr(n.Type) + } + for _, elt := range n.Elts { + c.compileExpr(elt) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileFuncLit(n *ast.FuncLit) { + c.emitInstOp(opFuncLit) + c.compileFuncType(n.Type) + c.compileBlockStmt(n.Body) +} + +func (c *compiler) compileEllipsis(n *ast.Ellipsis) { + if n.Elt == nil { + c.emitInstOp(opEllipsis) + } else { + c.emitInstOp(opTypedEllipsis) + c.compileExpr(n.Elt) + } +} + +func (c *compiler) compileKeyValueExpr(n *ast.KeyValueExpr) { + c.emitInstOp(opKeyValueExpr) + c.compileExpr(n.Key) + c.compileExpr(n.Value) +} + +func (c *compiler) compileSelectorExpr(n *ast.SelectorExpr) { + if isWildName(n.Sel.Name) { + c.emitInstOp(opSelectorExpr) + c.compileWildIdent(n.Sel, false) + c.compileExpr(n.X) + return + } + + c.prog.insts = append(c.prog.insts, instruction{ + op: opSimpleSelectorExpr, + valueIndex: c.internString(n.Sel, n.Sel.String()), + }) + c.compileExpr(n.X) +} + +func (c *compiler) compileTypeAssertExpr(n *ast.TypeAssertExpr) { + if n.Type != nil { + c.emitInstOp(opTypeAssertExpr) + c.compileExpr(n.X) + c.compileExpr(n.Type) + } else { + c.emitInstOp(opTypeSwitchAssertExpr) + c.compileExpr(n.X) + } +} + +func (c *compiler) compileStmt(n ast.Stmt) { + switch n := n.(type) { + case *ast.AssignStmt: + c.compileAssignStmt(n) + case *ast.BlockStmt: + c.compileBlockStmt(n) + case *ast.ExprStmt: + c.compileExprStmt(n) + case *ast.IfStmt: + c.compileIfStmt(n) + case *ast.CaseClause: + c.compileCaseClause(n) + case *ast.SwitchStmt: + c.compileSwitchStmt(n) + case *ast.TypeSwitchStmt: + c.compileTypeSwitchStmt(n) + case *ast.SelectStmt: + c.compileSelectStmt(n) + case *ast.ForStmt: + c.compileForStmt(n) + case *ast.RangeStmt: + c.compileRangeStmt(n) + case *ast.IncDecStmt: + c.compileIncDecStmt(n) + case *ast.EmptyStmt: + c.compileEmptyStmt(n) + case *ast.ReturnStmt: + c.compileReturnStmt(n) + case *ast.BranchStmt: + c.compileBranchStmt(n) + case *ast.LabeledStmt: + c.compileLabeledStmt(n) + case *ast.GoStmt: + c.compileGoStmt(n) + case *ast.DeferStmt: + c.compileDeferStmt(n) + case *ast.SendStmt: + c.compileSendStmt(n) + case *ast.DeclStmt: + c.compileDecl(n.Decl) + + default: + panic(c.errorf(n, "compileStmt: unexpected %T", n)) + } +} + +func (c *compiler) compileAssignStmt(n *ast.AssignStmt) { + if len(n.Lhs) == 1 && len(n.Rhs) == 1 { + lhsInfo := decodeWildNode(n.Lhs[0]) + rhsInfo := decodeWildNode(n.Rhs[0]) + if !lhsInfo.Seq && !rhsInfo.Seq { + c.emitInst(instruction{ + op: opAssignStmt, + value: uint8(n.Tok), + }) + c.compileExpr(n.Lhs[0]) + c.compileExpr(n.Rhs[0]) + return + } + } + + c.emitInst(instruction{ + op: opMultiAssignStmt, + value: uint8(n.Tok), + }) + for _, x := range n.Lhs { + c.compileExpr(x) + } + c.emitInstOp(opEnd) + for _, x := range n.Rhs { + c.compileExpr(x) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileBlockStmt(n *ast.BlockStmt) { + c.emitInstOp(opBlockStmt) + for _, elt := range n.List { + c.compileStmt(elt) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileExprStmt(n *ast.ExprStmt) { + if ident, ok := n.X.(*ast.Ident); ok && isWildName(ident.Name) { + c.compileIdent(ident) + } else { + c.emitInstOp(opExprStmt) + c.compileExpr(n.X) + } +} + +func (c *compiler) compileIfStmt(n *ast.IfStmt) { + // Check for the special case: `if $*_ ...` should match all if statements. + if ident, ok := n.Cond.(*ast.Ident); ok && n.Init == nil && isWildName(ident.Name) { + info := decodeWildName(ident.Name) + if info.Seq && info.Name == "_" { + // Set Init to Cond, change cond from $*_ to $_. + n.Init = &ast.ExprStmt{X: n.Cond} + cond := &ast.Ident{Name: encodeWildName(info.Name, false)} + n.Cond = cond + c.compileIfStmt(n) + return + } + // Named $* is harder and slower. + c.prog.insts = append(c.prog.insts, instruction{ + op: pickOp(n.Else == nil, opIfNamedOptStmt, opIfNamedOptElseStmt), + valueIndex: c.internString(ident, info.Name), + }) + c.compileStmt(n.Body) + if n.Else != nil { + c.compileStmt(n.Else) + } + return + } + + switch { + case n.Init == nil && n.Else == nil: + c.emitInstOp(opIfStmt) + c.compileExpr(n.Cond) + c.compileStmt(n.Body) + case n.Init != nil && n.Else == nil: + c.emitInstOp(opIfInitStmt) + c.compileOptStmt(n.Init) + c.compileExpr(n.Cond) + c.compileStmt(n.Body) + case n.Init == nil && n.Else != nil: + c.emitInstOp(opIfElseStmt) + c.compileExpr(n.Cond) + c.compileStmt(n.Body) + c.compileStmt(n.Else) + case n.Init != nil && n.Else != nil: + c.emitInstOp(opIfInitElseStmt) + c.compileOptStmt(n.Init) + c.compileExpr(n.Cond) + c.compileStmt(n.Body) + c.compileStmt(n.Else) + + default: + panic(c.errorf(n, "unexpected if stmt")) + } +} + +func (c *compiler) compileCommClause(n *ast.CommClause) { + c.emitInstOp(pickOp(n.Comm == nil, opDefaultCommClause, opCommClause)) + if n.Comm != nil { + c.compileStmt(n.Comm) + } + for _, x := range n.Body { + c.compileStmt(x) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileCaseClause(n *ast.CaseClause) { + c.emitInstOp(pickOp(n.List == nil, opDefaultCaseClause, opCaseClause)) + if n.List != nil { + for _, x := range n.List { + c.compileExpr(x) + } + c.emitInstOp(opEnd) + } + for _, x := range n.Body { + c.compileStmt(x) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileSwitchBody(n *ast.BlockStmt) { + wildcardCase := func(cc *ast.CaseClause) *ast.Ident { + if len(cc.List) != 1 || len(cc.Body) != 1 { + return nil + } + v, ok := cc.List[0].(*ast.Ident) + if !ok || !isWildName(v.Name) { + return nil + } + bodyStmt, ok := cc.Body[0].(*ast.ExprStmt) + if !ok { + return nil + } + bodyIdent, ok := bodyStmt.X.(*ast.Ident) + if !ok || bodyIdent.Name != "gogrep_body" { + return nil + } + return v + } + for _, cc := range n.List { + cc := cc.(*ast.CaseClause) + wildcard := wildcardCase(cc) + if wildcard == nil { + c.compileCaseClause(cc) + continue + } + c.compileWildIdent(wildcard, false) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileSwitchStmt(n *ast.SwitchStmt) { + var op operation + switch { + case n.Init == nil && n.Tag == nil: + op = opSwitchStmt + case n.Init == nil && n.Tag != nil: + op = opSwitchTagStmt + case n.Init != nil && n.Tag == nil: + op = opSwitchInitStmt + default: + op = opSwitchInitTagStmt + } + + c.emitInstOp(op) + if n.Init != nil { + c.compileOptStmt(n.Init) + } + if n.Tag != nil { + c.compileOptExpr(n.Tag) + } + c.compileSwitchBody(n.Body) +} + +func (c *compiler) compileTypeSwitchStmt(n *ast.TypeSwitchStmt) { + c.emitInstOp(pickOp(n.Init == nil, opTypeSwitchStmt, opTypeSwitchInitStmt)) + if n.Init != nil { + c.compileOptStmt(n.Init) + } + c.compileStmt(n.Assign) + c.compileSwitchBody(n.Body) +} + +func (c *compiler) compileSelectStmt(n *ast.SelectStmt) { + c.emitInstOp(opSelectStmt) + + wildcardCase := func(cc *ast.CommClause) *ast.Ident { + if cc.Comm == nil { + return nil + } + vStmt, ok := cc.Comm.(*ast.ExprStmt) + if !ok { + return nil + } + v, ok := vStmt.X.(*ast.Ident) + if !ok || !isWildName(v.Name) { + return nil + } + bodyStmt, ok := cc.Body[0].(*ast.ExprStmt) + if !ok { + return nil + } + bodyIdent, ok := bodyStmt.X.(*ast.Ident) + if !ok || bodyIdent.Name != "gogrep_body" { + return nil + } + return v + } + for _, cc := range n.Body.List { + cc := cc.(*ast.CommClause) + wildcard := wildcardCase(cc) + if wildcard == nil { + c.compileCommClause(cc) + continue + } + c.compileWildIdent(wildcard, false) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileForStmt(n *ast.ForStmt) { + var op operation + switch { + case n.Init == nil && n.Cond == nil && n.Post == nil: + op = opForStmt + case n.Init == nil && n.Cond == nil && n.Post != nil: + op = opForPostStmt + case n.Init == nil && n.Cond != nil && n.Post == nil: + op = opForCondStmt + case n.Init == nil && n.Cond != nil && n.Post != nil: + op = opForCondPostStmt + case n.Init != nil && n.Cond == nil && n.Post == nil: + op = opForInitStmt + case n.Init != nil && n.Cond == nil && n.Post != nil: + op = opForInitPostStmt + case n.Init != nil && n.Cond != nil && n.Post == nil: + op = opForInitCondStmt + default: + op = opForInitCondPostStmt + } + + c.emitInstOp(op) + if n.Init != nil { + c.compileOptStmt(n.Init) + } + if n.Cond != nil { + c.compileOptExpr(n.Cond) + } + if n.Post != nil { + c.compileOptStmt(n.Post) + } + c.compileBlockStmt(n.Body) +} + +func (c *compiler) compileRangeStmt(n *ast.RangeStmt) { + switch { + case n.Key == nil && n.Value == nil: + c.emitInstOp(opRangeStmt) + c.compileExpr(n.X) + c.compileStmt(n.Body) + case n.Key != nil && n.Value == nil: + c.emitInst(instruction{ + op: opRangeKeyStmt, + value: c.toUint8(n, int(n.Tok)), + }) + c.compileExpr(n.Key) + c.compileExpr(n.X) + c.compileStmt(n.Body) + case n.Key != nil && n.Value != nil: + c.emitInst(instruction{ + op: opRangeKeyValueStmt, + value: c.toUint8(n, int(n.Tok)), + }) + c.compileExpr(n.Key) + c.compileExpr(n.Value) + c.compileExpr(n.X) + c.compileStmt(n.Body) + default: + panic(c.errorf(n, "unexpected range stmt")) + } +} + +func (c *compiler) compileIncDecStmt(n *ast.IncDecStmt) { + c.prog.insts = append(c.prog.insts, instruction{ + op: opIncDecStmt, + value: c.toUint8(n, int(n.Tok)), + }) + c.compileExpr(n.X) +} + +func (c *compiler) compileEmptyStmt(n *ast.EmptyStmt) { + _ = n // unused + c.emitInstOp(opEmptyStmt) +} + +func (c *compiler) compileReturnStmt(n *ast.ReturnStmt) { + c.emitInstOp(opReturnStmt) + for _, x := range n.Results { + c.compileExpr(x) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileBranchStmt(n *ast.BranchStmt) { + if n.Label != nil { + if isWildName(n.Label.Name) { + c.prog.insts = append(c.prog.insts, instruction{ + op: opLabeledBranchStmt, + value: c.toUint8(n, int(n.Tok)), + }) + c.compileWildIdent(n.Label, false) + } else { + c.prog.insts = append(c.prog.insts, instruction{ + op: opSimpleLabeledBranchStmt, + value: c.toUint8(n, int(n.Tok)), + valueIndex: c.internString(n.Label, n.Label.Name), + }) + } + return + } + c.prog.insts = append(c.prog.insts, instruction{ + op: opBranchStmt, + value: c.toUint8(n, int(n.Tok)), + }) +} + +func (c *compiler) compileLabeledStmt(n *ast.LabeledStmt) { + if isWildName(n.Label.Name) { + c.emitInstOp(opLabeledStmt) + c.compileWildIdent(n.Label, false) + c.compileStmt(n.Stmt) + return + } + + c.prog.insts = append(c.prog.insts, instruction{ + op: opSimpleLabeledStmt, + valueIndex: c.internString(n.Label, n.Label.Name), + }) + c.compileStmt(n.Stmt) +} + +func (c *compiler) compileGoStmt(n *ast.GoStmt) { + c.emitInstOp(opGoStmt) + c.compileExpr(n.Call) +} + +func (c *compiler) compileDeferStmt(n *ast.DeferStmt) { + c.emitInstOp(opDeferStmt) + c.compileExpr(n.Call) +} + +func (c *compiler) compileSendStmt(n *ast.SendStmt) { + c.emitInstOp(opSendStmt) + c.compileExpr(n.Chan) + c.compileExpr(n.Value) +} + +func (c *compiler) compileStmtSlice(stmts stmtSlice) { + c.emitInstOp(opMultiStmt) + for _, n := range stmts { + c.compileStmt(n) + } + c.emitInstOp(opEnd) +} + +func (c *compiler) compileExprSlice(exprs exprSlice) { + c.emitInstOp(opMultiExpr) + for _, n := range exprs { + c.compileExpr(n) + } + c.emitInstOp(opEnd) +} + +func pickOp(cond bool, ifTrue, ifFalse operation) operation { + if cond { + return ifTrue + } + return ifFalse +} + +func fitsUint8(v int) bool { + return v >= 0 && v <= 0xff +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gen_operations.go b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gen_operations.go new file mode 100644 index 000000000..dbf2ae9a7 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gen_operations.go @@ -0,0 +1,311 @@ +// +build main + +package main + +import ( + "bytes" + "fmt" + "go/format" + "io/ioutil" + "log" + "strings" + "text/template" +) + +var opPrototypes = []operationProto{ + {name: "Node", tag: "Node"}, + {name: "NamedNode", tag: "Node", valueIndex: "strings | wildcard name"}, + {name: "NodeSeq"}, + {name: "NamedNodeSeq", valueIndex: "strings | wildcard name"}, + {name: "OptNode"}, + {name: "NamedOptNode", valueIndex: "strings | wildcard name"}, + + {name: "MultiStmt", tag: "StmtList", args: "stmts...", example: "f(); g()"}, + {name: "MultiExpr", tag: "ExprList", args: "exprs...", example: "f(), g()"}, + + {name: "End"}, + + {name: "BasicLit", tag: "BasicLit", valueIndex: "ifaces | parsed literal value"}, + {name: "StrictIntLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, + {name: "StrictFloatLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, + {name: "StrictCharLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, + {name: "StrictStringLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, + {name: "StrictComplexLit", tag: "BasicLit", valueIndex: "strings | raw literal value"}, + + {name: "Ident", tag: "Ident", valueIndex: "strings | ident name"}, + + {name: "IndexExpr", tag: "IndexExpr", args: "x expr"}, + + {name: "SliceExpr", tag: "SliceExpr", args: "x"}, + {name: "SliceFromExpr", tag: "SliceExpr", args: "x from", example: "x[from:]"}, + {name: "SliceToExpr", tag: "SliceExpr", args: "x to", example: "x[:to]"}, + {name: "SliceFromToExpr", tag: "SliceExpr", args: "x from to", example: "x[from:to]"}, + {name: "SliceToCapExpr", tag: "SliceExpr", args: "x from cap", example: "x[:from:cap]"}, + {name: "SliceFromToCapExpr", tag: "SliceExpr", args: "x from to cap", example: "x[from:to:cap]"}, + + {name: "FuncLit", tag: "FuncLit", args: "type block"}, + + {name: "CompositeLit", tag: "CompositeLit", args: "elts...", example: "{elts...}"}, + {name: "TypedCompositeLit", tag: "CompositeLit", args: "typ elts...", example: "typ{elts...}"}, + + {name: "SimpleSelectorExpr", tag: "SelectorExpr", args: "x", valueIndex: "strings | selector name"}, + {name: "SelectorExpr", tag: "SelectorExpr", args: "x sel"}, + {name: "TypeAssertExpr", tag: "TypeAssertExpr", args: "x typ"}, + {name: "TypeSwitchAssertExpr", tag: "TypeAssertExpr", args: "x"}, + + {name: "VoidFuncType", tag: "FuncType", args: "params"}, + {name: "FuncType", tag: "FuncType", args: "params results"}, + {name: "ArrayType", tag: "ArrayType", args: "length elem"}, + {name: "SliceType", tag: "ArrayType", args: "elem"}, + {name: "MapType", tag: "MapType", args: "key value"}, + {name: "ChanType", tag: "ChanType", args: "value", value: "ast.ChanDir | channel direction"}, + {name: "KeyValueExpr", tag: "KeyValueExpr", args: "key value"}, + + {name: "Ellipsis", tag: "Ellipsis"}, + {name: "TypedEllipsis", tag: "Ellipsis", args: "type"}, + + {name: "StarExpr", tag: "StarExpr", args: "x"}, + {name: "UnaryExpr", tag: "UnaryExpr", args: "x", value: "token.Token | unary operator"}, + {name: "BinaryExpr", tag: "BinaryExpr", args: "x y", value: "token.Token | binary operator"}, + {name: "ParenExpr", tag: "ParenExpr", args: "x"}, + + {name: "VariadicCallExpr", tag: "CallExpr", args: "fn args...", example: "f(1, xs...)"}, + {name: "CallExpr", tag: "CallExpr", args: "fn args...", example: "f(1, xs)"}, + + {name: "AssignStmt", tag: "AssignStmt", args: "lhs rhs", value: "token.Token | ':=' or '='", example: "lhs := rhs()"}, + {name: "MultiAssignStmt", tag: "AssignStmt", args: "lhs... rhs...", value: "token.Token | ':=' or '='", example: "lhs1, lhs2 := rhs()"}, + + {name: "BranchStmt", tag: "BranchStmt", args: "x", value: "token.Token | branch kind"}, + {name: "SimpleLabeledBranchStmt", tag: "BranchStmt", args: "x", valueIndex: "strings | label name", value: "token.Token | branch kind"}, + {name: "LabeledBranchStmt", tag: "BranchStmt", args: "label x", value: "token.Token | branch kind"}, + {name: "SimpleLabeledStmt", tag: "LabeledStmt", args: "x", valueIndex: "strings | label name"}, + {name: "LabeledStmt", tag: "LabeledStmt", args: "label x"}, + + {name: "BlockStmt", tag: "BlockStmt", args: "body..."}, + {name: "ExprStmt", tag: "ExprStmt", args: "x"}, + + {name: "GoStmt", tag: "GoStmt", args: "x"}, + {name: "DeferStmt", tag: "DeferStmt", args: "x"}, + + {name: "SendStmt", tag: "SendStmt", args: "ch value"}, + + {name: "EmptyStmt", tag: "EmptyStmt"}, + {name: "IncDecStmt", tag: "IncDecStmt", args: "x", value: "token.Token | '++' or '--'"}, + {name: "ReturnStmt", tag: "ReturnStmt", args: "results..."}, + + {name: "IfStmt", tag: "IfStmt", args: "cond block", example: "if cond {}"}, + {name: "IfInitStmt", tag: "IfStmt", args: "init cond block", example: "if init; cond {}"}, + {name: "IfElseStmt", tag: "IfStmt", args: "cond block else", example: "if cond {} else ..."}, + {name: "IfInitElseStmt", tag: "IfStmt", args: "init cond block else", example: "if init; cond {} else ..."}, + {name: "IfNamedOptStmt", tag: "IfStmt", args: "block", valueIndex: "strings | wildcard name", example: "if $*x {}"}, + {name: "IfNamedOptElseStmt", tag: "IfStmt", args: "block else", valueIndex: "strings | wildcard name", example: "if $*x {} else ..."}, + + {name: "SwitchStmt", tag: "SwitchStmt", args: "body...", example: "switch {}"}, + {name: "SwitchTagStmt", tag: "SwitchStmt", args: "tag body...", example: "switch tag {}"}, + {name: "SwitchInitStmt", tag: "SwitchStmt", args: "init body...", example: "switch init; {}"}, + {name: "SwitchInitTagStmt", tag: "SwitchStmt", args: "init tag body...", example: "switch init; tag {}"}, + + {name: "SelectStmt", tag: "SelectStmt", args: "body..."}, + + {name: "TypeSwitchStmt", tag: "TypeSwitchStmt", args: "x block", example: "switch x.(type) {}"}, + {name: "TypeSwitchInitStmt", tag: "TypeSwitchStmt", args: "init x block", example: "switch init; x.(type) {}"}, + + {name: "CaseClause", tag: "CaseClause", args: "values... body..."}, + {name: "DefaultCaseClause", tag: "CaseClause", args: "body..."}, + + {name: "CommClause", tag: "CommClause", args: "comm body..."}, + {name: "DefaultCommClause", tag: "CommClause", args: "body..."}, + + {name: "ForStmt", tag: "ForStmt", args: "blocl", example: "for {}"}, + {name: "ForPostStmt", tag: "ForStmt", args: "post block", example: "for ; ; post {}"}, + {name: "ForCondStmt", tag: "ForStmt", args: "cond block", example: "for ; cond; {}"}, + {name: "ForCondPostStmt", tag: "ForStmt", args: "cond post block", example: "for ; cond; post {}"}, + {name: "ForInitStmt", tag: "ForStmt", args: "init block", example: "for init; ; {}"}, + {name: "ForInitPostStmt", tag: "ForStmt", args: "init post block", example: "for init; ; post {}"}, + {name: "ForInitCondStmt", tag: "ForStmt", args: "init cond block", example: "for init; cond; {}"}, + {name: "ForInitCondPostStmt", tag: "ForStmt", args: "init cond post block", example: "for init; cond; post {}"}, + + {name: "RangeStmt", tag: "RangeStmt", args: "x block", example: "for range x {}"}, + {name: "RangeKeyStmt", tag: "RangeStmt", args: "key x block", value: "token.Token | ':=' or '='", example: "for key := range x {}"}, + {name: "RangeKeyValueStmt", tag: "RangeStmt", args: "key value x block", value: "token.Token | ':=' or '='", example: "for key, value := range x {}"}, + + {name: "FieldList", args: "fields..."}, + {name: "UnnamedField", args: "typ", example: "type"}, + {name: "SimpleField", args: "typ", valueIndex: "strings | field name", example: "name type"}, + {name: "Field", args: "name typ", example: "$name type"}, + {name: "MultiField", args: "names... typ", example: "name1, name2 type"}, + + {name: "ValueInitSpec", tag: "ValueSpec", args: "lhs... rhs...", example: "lhs = rhs"}, + {name: "TypedValueInitSpec", tag: "ValueSpec", args: "lhs... type rhs...", example: "lhs typ = rhs"}, + {name: "TypedValueSpec", tag: "ValueSpec", args: "lhs... type", example: "lhs typ"}, + + {name: "TypeSpec", tag: "TypeSpec", args: "name type", example: "name type"}, + {name: "TypeAliasSpec", tag: "TypeSpec", args: "name type", example: "name = type"}, + + {name: "FuncDecl", tag: "FuncDecl", args: "name type block"}, + {name: "MethodDecl", tag: "FuncDecl", args: "recv name type block"}, + {name: "FuncProtoDecl", tag: "FuncDecl", args: "name type"}, + {name: "MethodProtoDecl", tag: "FuncDecl", args: "recv name type"}, + + {name: "ConstDecl", tag: "GenDecl", args: "valuespecs..."}, + {name: "VarDecl", tag: "GenDecl", args: "valuespecs..."}, + {name: "TypeDecl", tag: "GenDecl", args: "typespecs..."}, + + {name: "EmptyPackage", tag: "File", args: "name"}, +} + +type operationProto struct { + name string + value string + valueIndex string + tag string + example string + args string +} + +type operationInfo struct { + Example string + Args string + Enum uint8 + TagName string + Name string + ValueDoc string + ValueIndexDoc string + ExtraValueKindName string + ValueKindName string + VariadicMap uint64 + NumArgs int +} + +const stackUnchanged = "" + +var fileTemplate = template.Must(template.New("operations.go").Parse(`// Code generated "gen_operations.go"; DO NOT EDIT. + +package gogrep + +import ( + "github.com/quasilyte/go-ruleguard/nodetag" +) + +//go:generate stringer -type=operation -trimprefix=op +type operation uint8 + +const ( + opInvalid operation = 0 +{{ range .Operations }} + // Tag: {{.TagName}} + {{- if .Args}}{{print "\n"}}// Args: {{.Args}}{{end}} + {{- if .Example}}{{print "\n"}}// Example: {{.Example}}{{end}} + {{- if .ValueDoc}}{{print "\n"}}// Value: {{.ValueDoc}}{{end}} + {{- if .ValueIndexDoc}}{{print "\n"}}// ValueIndex: {{.ValueIndexDoc}}{{end}} + op{{ .Name }} operation = {{.Enum}} +{{ end -}} +) + +type operationInfo struct { + Tag nodetag.Value + NumArgs int + ValueKind valueKind + ExtraValueKind valueKind + VariadicMap bitmap64 +} + +var operationInfoTable = [256]operationInfo{ + opInvalid: {}, + +{{ range .Operations -}} + op{{.Name}}: { + Tag: nodetag.{{.TagName}}, + NumArgs: {{.NumArgs}}, + ValueKind: {{.ValueKindName}}, + ExtraValueKind: {{.ExtraValueKindName}}, + VariadicMap: {{.VariadicMap}}, // {{printf "%b" .VariadicMap}} + }, +{{ end }} +} +`)) + +func main() { + operations := make([]operationInfo, len(opPrototypes)) + for i, proto := range opPrototypes { + enum := uint8(i + 1) + + tagName := proto.tag + if tagName == "" { + tagName = "Unknown" + } + + variadicMap := uint64(0) + numArgs := 0 + if proto.args != "" { + args := strings.Split(proto.args, " ") + numArgs = len(args) + for i, arg := range args { + isVariadic := strings.HasSuffix(arg, "...") + if isVariadic { + variadicMap |= 1 << i + } + } + } + + extraValueKindName := "emptyValue" + if proto.valueIndex != "" { + parts := strings.Split(proto.valueIndex, " | ") + typ := parts[0] + switch typ { + case "strings": + extraValueKindName = "stringValue" + case "ifaces": + extraValueKindName = "ifaceValue" + default: + panic(fmt.Sprintf("%s: unexpected %s type", proto.name, typ)) + } + } + valueKindName := "emptyValue" + if proto.value != "" { + parts := strings.Split(proto.value, " | ") + typ := parts[0] + switch typ { + case "token.Token": + valueKindName = "tokenValue" + case "ast.ChanDir": + valueKindName = "chandirValue" + default: + panic(fmt.Sprintf("%s: unexpected %s type", proto.name, typ)) + } + } + + operations[i] = operationInfo{ + Example: proto.example, + Args: proto.args, + Enum: enum, + TagName: tagName, + Name: proto.name, + ValueDoc: proto.value, + ValueIndexDoc: proto.valueIndex, + NumArgs: numArgs, + VariadicMap: variadicMap, + ExtraValueKindName: extraValueKindName, + ValueKindName: valueKindName, + } + } + + var buf bytes.Buffer + err := fileTemplate.Execute(&buf, map[string]interface{}{ + "Operations": operations, + }) + if err != nil { + log.Panicf("execute template: %v", err) + } + writeFile("operations.gen.go", buf.Bytes()) +} + +func writeFile(filename string, data []byte) { + pretty, err := format.Source(data) + if err != nil { + log.Panicf("gofmt: %v", err) + } + if err := ioutil.WriteFile(filename, pretty, 0666); err != nil { + log.Panicf("write %s: %v", filename, err) + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gogrep.go b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gogrep.go new file mode 100644 index 000000000..e0d3d0696 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/gogrep.go @@ -0,0 +1,66 @@ +package gogrep + +import ( + "go/ast" + "go/token" + + "github.com/quasilyte/go-ruleguard/nodetag" +) + +func IsEmptyNodeSlice(n ast.Node) bool { + if list, ok := n.(nodeSlice); ok { + return list.len() == 0 + } + return false +} + +// MatchData describes a successful pattern match. +type MatchData struct { + Node ast.Node + Capture []CapturedNode +} + +type CapturedNode struct { + Name string + Node ast.Node +} + +func (data MatchData) CapturedByName(name string) (ast.Node, bool) { + return findNamed(data.Capture, name) +} + +type Pattern struct { + m *matcher +} + +func (p *Pattern) NodeTag() nodetag.Value { + return operationInfoTable[p.m.prog.insts[0].op].Tag +} + +// MatchNode calls cb if n matches a pattern. +func (p *Pattern) MatchNode(n ast.Node, cb func(MatchData)) { + p.m.MatchNode(n, cb) +} + +// Clone creates a pattern copy. +func (p *Pattern) Clone() *Pattern { + clone := *p + clone.m = &matcher{} + *clone.m = *p.m + clone.m.capture = make([]CapturedNode, 0, 8) + return &clone +} + +func Compile(fset *token.FileSet, src string, strict bool) (*Pattern, error) { + n, err := parseExpr(fset, src) + if err != nil { + return nil, err + } + var c compiler + prog, err := c.Compile(fset, n, strict) + if err != nil { + return nil, err + } + m := newMatcher(prog) + return &Pattern{m: m}, nil +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/instructions.go b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/instructions.go new file mode 100644 index 000000000..5d286eaec --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/instructions.go @@ -0,0 +1,107 @@ +package gogrep + +import ( + "fmt" + "go/ast" + "go/token" + "strings" +) + +type bitmap64 uint64 + +func (m bitmap64) IsSet(pos int) bool { + return m&(1<= sliceLen { + break + } + } +} + +func (m *matcher) matchNamed(name string, n ast.Node) bool { + prev, ok := findNamed(m.capture, name) + if !ok { + // First occurrence, record value. + m.capture = append(m.capture, CapturedNode{Name: name, Node: n}) + return true + } + return equalNodes(prev, n) +} + +func (m *matcher) matchNodeWithInst(inst instruction, n ast.Node) bool { + switch inst.op { + case opNode: + return n != nil + case opOptNode: + return true + + case opNamedNode: + return n != nil && m.matchNamed(m.stringValue(inst), n) + case opNamedOptNode: + return m.matchNamed(m.stringValue(inst), n) + + case opBasicLit: + n, ok := n.(*ast.BasicLit) + return ok && m.ifaceValue(inst) == literalValue(n) + + case opStrictIntLit: + n, ok := n.(*ast.BasicLit) + return ok && n.Kind == token.INT && m.stringValue(inst) == n.Value + case opStrictFloatLit: + n, ok := n.(*ast.BasicLit) + return ok && n.Kind == token.FLOAT && m.stringValue(inst) == n.Value + case opStrictCharLit: + n, ok := n.(*ast.BasicLit) + return ok && n.Kind == token.CHAR && m.stringValue(inst) == n.Value + case opStrictStringLit: + n, ok := n.(*ast.BasicLit) + return ok && n.Kind == token.STRING && m.stringValue(inst) == n.Value + case opStrictComplexLit: + n, ok := n.(*ast.BasicLit) + return ok && n.Kind == token.IMAG && m.stringValue(inst) == n.Value + + case opIdent: + n, ok := n.(*ast.Ident) + return ok && m.stringValue(inst) == n.Name + + case opBinaryExpr: + n, ok := n.(*ast.BinaryExpr) + return ok && n.Op == token.Token(inst.value) && + m.matchNode(n.X) && m.matchNode(n.Y) + + case opUnaryExpr: + n, ok := n.(*ast.UnaryExpr) + return ok && n.Op == token.Token(inst.value) && m.matchNode(n.X) + + case opStarExpr: + n, ok := n.(*ast.StarExpr) + return ok && m.matchNode(n.X) + + case opVariadicCallExpr: + n, ok := n.(*ast.CallExpr) + return ok && n.Ellipsis.IsValid() && m.matchNode(n.Fun) && m.matchExprSlice(n.Args) + case opCallExpr: + n, ok := n.(*ast.CallExpr) + return ok && !n.Ellipsis.IsValid() && m.matchNode(n.Fun) && m.matchExprSlice(n.Args) + + case opSimpleSelectorExpr: + n, ok := n.(*ast.SelectorExpr) + return ok && m.stringValue(inst) == n.Sel.Name && m.matchNode(n.X) + case opSelectorExpr: + n, ok := n.(*ast.SelectorExpr) + return ok && m.matchNode(n.Sel) && m.matchNode(n.X) + + case opTypeAssertExpr: + n, ok := n.(*ast.TypeAssertExpr) + return ok && m.matchNode(n.X) && m.matchNode(n.Type) + case opTypeSwitchAssertExpr: + n, ok := n.(*ast.TypeAssertExpr) + return ok && n.Type == nil && m.matchNode(n.X) + + case opSliceExpr: + n, ok := n.(*ast.SliceExpr) + return ok && n.Low == nil && n.High == nil && m.matchNode(n.X) + case opSliceFromExpr: + n, ok := n.(*ast.SliceExpr) + return ok && n.Low != nil && n.High == nil && !n.Slice3 && + m.matchNode(n.X) && m.matchNode(n.Low) + case opSliceToExpr: + n, ok := n.(*ast.SliceExpr) + return ok && n.Low == nil && n.High != nil && !n.Slice3 && + m.matchNode(n.X) && m.matchNode(n.High) + case opSliceFromToExpr: + n, ok := n.(*ast.SliceExpr) + return ok && n.Low != nil && n.High != nil && !n.Slice3 && + m.matchNode(n.X) && m.matchNode(n.Low) && m.matchNode(n.High) + case opSliceToCapExpr: + n, ok := n.(*ast.SliceExpr) + return ok && n.Low == nil && n.High != nil && n.Max != nil && + m.matchNode(n.X) && m.matchNode(n.High) && m.matchNode(n.Max) + case opSliceFromToCapExpr: + n, ok := n.(*ast.SliceExpr) + return ok && n.Low != nil && n.High != nil && n.Max != nil && + m.matchNode(n.X) && m.matchNode(n.Low) && m.matchNode(n.High) && m.matchNode(n.Max) + + case opIndexExpr: + n, ok := n.(*ast.IndexExpr) + return ok && m.matchNode(n.X) && m.matchNode(n.Index) + + case opKeyValueExpr: + n, ok := n.(*ast.KeyValueExpr) + return ok && m.matchNode(n.Key) && m.matchNode(n.Value) + + case opParenExpr: + n, ok := n.(*ast.ParenExpr) + return ok && m.matchNode(n.X) + + case opEllipsis: + n, ok := n.(*ast.Ellipsis) + return ok && n.Elt == nil + case opTypedEllipsis: + n, ok := n.(*ast.Ellipsis) + return ok && n.Elt != nil && m.matchNode(n.Elt) + + case opSliceType: + n, ok := n.(*ast.ArrayType) + return ok && n.Len == nil && m.matchNode(n.Elt) + case opArrayType: + n, ok := n.(*ast.ArrayType) + return ok && n.Len != nil && m.matchNode(n.Len) && m.matchNode(n.Elt) + case opMapType: + n, ok := n.(*ast.MapType) + return ok && m.matchNode(n.Key) && m.matchNode(n.Value) + case opChanType: + n, ok := n.(*ast.ChanType) + return ok && ast.ChanDir(inst.value) == n.Dir && m.matchNode(n.Value) + case opVoidFuncType: + n, ok := n.(*ast.FuncType) + return ok && n.Results == nil && m.matchNode(n.Params) + case opFuncType: + n, ok := n.(*ast.FuncType) + return ok && n.Results != nil && m.matchNode(n.Params) && m.matchNode(n.Results) + + case opCompositeLit: + n, ok := n.(*ast.CompositeLit) + return ok && n.Type == nil && m.matchExprSlice(n.Elts) + case opTypedCompositeLit: + n, ok := n.(*ast.CompositeLit) + return ok && n.Type != nil && m.matchNode(n.Type) && m.matchExprSlice(n.Elts) + + case opUnnamedField: + n, ok := n.(*ast.Field) + return ok && len(n.Names) == 0 && m.matchNode(n.Type) + case opSimpleField: + n, ok := n.(*ast.Field) + return ok && len(n.Names) == 1 && m.stringValue(inst) == n.Names[0].Name && m.matchNode(n.Type) + case opField: + n, ok := n.(*ast.Field) + return ok && len(n.Names) == 1 && m.matchNode(n.Names[0]) && m.matchNode(n.Type) + case opMultiField: + n, ok := n.(*ast.Field) + return ok && len(n.Names) >= 2 && m.matchIdentSlice(n.Names) && m.matchNode(n.Type) + case opFieldList: + n, ok := n.(*ast.FieldList) + return ok && m.matchFieldSlice(n.List) + + case opFuncLit: + n, ok := n.(*ast.FuncLit) + return ok && m.matchNode(n.Type) && m.matchNode(n.Body) + + case opAssignStmt: + n, ok := n.(*ast.AssignStmt) + return ok && token.Token(inst.value) == n.Tok && + len(n.Lhs) == 1 && m.matchNode(n.Lhs[0]) && + len(n.Rhs) == 1 && m.matchNode(n.Rhs[0]) + case opMultiAssignStmt: + n, ok := n.(*ast.AssignStmt) + return ok && token.Token(inst.value) == n.Tok && + m.matchExprSlice(n.Lhs) && m.matchExprSlice(n.Rhs) + + case opExprStmt: + n, ok := n.(*ast.ExprStmt) + return ok && m.matchNode(n.X) + + case opGoStmt: + n, ok := n.(*ast.GoStmt) + return ok && m.matchNode(n.Call) + case opDeferStmt: + n, ok := n.(*ast.DeferStmt) + return ok && m.matchNode(n.Call) + case opSendStmt: + n, ok := n.(*ast.SendStmt) + return ok && m.matchNode(n.Chan) && m.matchNode(n.Value) + + case opBlockStmt: + n, ok := n.(*ast.BlockStmt) + return ok && m.matchStmtSlice(n.List) + + case opIfStmt: + n, ok := n.(*ast.IfStmt) + return ok && n.Init == nil && n.Else == nil && + m.matchNode(n.Cond) && m.matchNode(n.Body) + case opIfElseStmt: + n, ok := n.(*ast.IfStmt) + return ok && n.Init == nil && n.Else != nil && + m.matchNode(n.Cond) && m.matchNode(n.Body) && m.matchNode(n.Else) + case opIfInitStmt: + n, ok := n.(*ast.IfStmt) + return ok && n.Else == nil && + m.matchNode(n.Init) && m.matchNode(n.Cond) && m.matchNode(n.Body) + case opIfInitElseStmt: + n, ok := n.(*ast.IfStmt) + return ok && n.Else != nil && + m.matchNode(n.Init) && m.matchNode(n.Cond) && m.matchNode(n.Body) && m.matchNode(n.Else) + + case opIfNamedOptStmt: + n, ok := n.(*ast.IfStmt) + return ok && n.Else == nil && m.matchNode(n.Body) && + m.matchNamed(m.stringValue(inst), toStmtSlice(n.Cond, n.Init)) + case opIfNamedOptElseStmt: + n, ok := n.(*ast.IfStmt) + return ok && n.Else != nil && m.matchNode(n.Body) && m.matchNode(n.Else) && + m.matchNamed(m.stringValue(inst), toStmtSlice(n.Cond, n.Init)) + + case opCaseClause: + n, ok := n.(*ast.CaseClause) + return ok && n.List != nil && m.matchExprSlice(n.List) && m.matchStmtSlice(n.Body) + case opDefaultCaseClause: + n, ok := n.(*ast.CaseClause) + return ok && n.List == nil && m.matchStmtSlice(n.Body) + + case opSwitchStmt: + n, ok := n.(*ast.SwitchStmt) + return ok && n.Init == nil && n.Tag == nil && m.matchStmtSlice(n.Body.List) + case opSwitchTagStmt: + n, ok := n.(*ast.SwitchStmt) + return ok && n.Init == nil && m.matchNode(n.Tag) && m.matchStmtSlice(n.Body.List) + case opSwitchInitStmt: + n, ok := n.(*ast.SwitchStmt) + return ok && n.Tag == nil && m.matchNode(n.Init) && m.matchStmtSlice(n.Body.List) + case opSwitchInitTagStmt: + n, ok := n.(*ast.SwitchStmt) + return ok && m.matchNode(n.Init) && m.matchNode(n.Tag) && m.matchStmtSlice(n.Body.List) + + case opTypeSwitchStmt: + n, ok := n.(*ast.TypeSwitchStmt) + return ok && n.Init == nil && m.matchNode(n.Assign) && m.matchStmtSlice(n.Body.List) + case opTypeSwitchInitStmt: + n, ok := n.(*ast.TypeSwitchStmt) + return ok && m.matchNode(n.Init) && + m.matchNode(n.Assign) && m.matchStmtSlice(n.Body.List) + + case opCommClause: + n, ok := n.(*ast.CommClause) + return ok && n.Comm != nil && m.matchNode(n.Comm) && m.matchStmtSlice(n.Body) + case opDefaultCommClause: + n, ok := n.(*ast.CommClause) + return ok && n.Comm == nil && m.matchStmtSlice(n.Body) + + case opSelectStmt: + n, ok := n.(*ast.SelectStmt) + return ok && m.matchStmtSlice(n.Body.List) + + case opRangeStmt: + n, ok := n.(*ast.RangeStmt) + return ok && n.Key == nil && n.Value == nil && m.matchNode(n.X) && m.matchNode(n.Body) + case opRangeKeyStmt: + n, ok := n.(*ast.RangeStmt) + return ok && n.Key != nil && n.Value == nil && token.Token(inst.value) == n.Tok && + m.matchNode(n.Key) && m.matchNode(n.X) && m.matchNode(n.Body) + case opRangeKeyValueStmt: + n, ok := n.(*ast.RangeStmt) + return ok && n.Key != nil && n.Value != nil && token.Token(inst.value) == n.Tok && + m.matchNode(n.Key) && m.matchNode(n.Value) && m.matchNode(n.X) && m.matchNode(n.Body) + + case opForStmt: + n, ok := n.(*ast.ForStmt) + return ok && n.Init == nil && n.Cond == nil && n.Post == nil && + m.matchNode(n.Body) + case opForPostStmt: + n, ok := n.(*ast.ForStmt) + return ok && n.Init == nil && n.Cond == nil && n.Post != nil && + m.matchNode(n.Post) && m.matchNode(n.Body) + case opForCondStmt: + n, ok := n.(*ast.ForStmt) + return ok && n.Init == nil && n.Cond != nil && n.Post == nil && + m.matchNode(n.Cond) && m.matchNode(n.Body) + case opForCondPostStmt: + n, ok := n.(*ast.ForStmt) + return ok && n.Init == nil && n.Cond != nil && n.Post != nil && + m.matchNode(n.Cond) && m.matchNode(n.Post) && m.matchNode(n.Body) + case opForInitStmt: + n, ok := n.(*ast.ForStmt) + return ok && n.Init != nil && n.Cond == nil && n.Post == nil && + m.matchNode(n.Init) && m.matchNode(n.Body) + case opForInitPostStmt: + n, ok := n.(*ast.ForStmt) + return ok && n.Init != nil && n.Cond == nil && n.Post != nil && + m.matchNode(n.Init) && m.matchNode(n.Post) && m.matchNode(n.Body) + case opForInitCondStmt: + n, ok := n.(*ast.ForStmt) + return ok && n.Init != nil && n.Cond != nil && n.Post == nil && + m.matchNode(n.Init) && m.matchNode(n.Cond) && m.matchNode(n.Body) + case opForInitCondPostStmt: + n, ok := n.(*ast.ForStmt) + return ok && m.matchNode(n.Init) && m.matchNode(n.Cond) && m.matchNode(n.Post) && m.matchNode(n.Body) + + case opIncDecStmt: + n, ok := n.(*ast.IncDecStmt) + return ok && token.Token(inst.value) == n.Tok && m.matchNode(n.X) + + case opReturnStmt: + n, ok := n.(*ast.ReturnStmt) + return ok && m.matchExprSlice(n.Results) + + case opLabeledStmt: + n, ok := n.(*ast.LabeledStmt) + return ok && m.matchNode(n.Label) && m.matchNode(n.Stmt) + case opSimpleLabeledStmt: + n, ok := n.(*ast.LabeledStmt) + return ok && m.stringValue(inst) == n.Label.Name && m.matchNode(n.Stmt) + + case opLabeledBranchStmt: + n, ok := n.(*ast.BranchStmt) + return ok && n.Label != nil && token.Token(inst.value) == n.Tok && m.matchNode(n.Label) + case opSimpleLabeledBranchStmt: + n, ok := n.(*ast.BranchStmt) + return ok && n.Label != nil && m.stringValue(inst) == n.Label.Name && token.Token(inst.value) == n.Tok + case opBranchStmt: + n, ok := n.(*ast.BranchStmt) + return ok && n.Label == nil && token.Token(inst.value) == n.Tok + + case opEmptyStmt: + _, ok := n.(*ast.EmptyStmt) + return ok + + case opFuncDecl: + n, ok := n.(*ast.FuncDecl) + return ok && n.Recv == nil && n.Body != nil && + m.matchNode(n.Name) && m.matchNode(n.Type) && m.matchNode(n.Body) + case opFuncProtoDecl: + n, ok := n.(*ast.FuncDecl) + return ok && n.Recv == nil && n.Body == nil && + m.matchNode(n.Name) && m.matchNode(n.Type) + case opMethodDecl: + n, ok := n.(*ast.FuncDecl) + return ok && n.Recv != nil && n.Body != nil && + m.matchNode(n.Recv) && m.matchNode(n.Name) && m.matchNode(n.Type) && m.matchNode(n.Body) + case opMethodProtoDecl: + n, ok := n.(*ast.FuncDecl) + return ok && n.Recv != nil && n.Body == nil && + m.matchNode(n.Recv) && m.matchNode(n.Name) && m.matchNode(n.Type) + + case opValueInitSpec: + n, ok := n.(*ast.ValueSpec) + return ok && len(n.Values) != 0 && n.Type == nil && + m.matchIdentSlice(n.Names) && m.matchExprSlice(n.Values) + case opTypedValueSpec: + n, ok := n.(*ast.ValueSpec) + return ok && len(n.Values) == 0 && n.Type != nil && + m.matchIdentSlice(n.Names) && m.matchNode(n.Type) + case opTypedValueInitSpec: + n, ok := n.(*ast.ValueSpec) + return ok && len(n.Values) != 0 && n.Type != nil && + m.matchIdentSlice(n.Names) && m.matchNode(n.Type) && m.matchExprSlice(n.Values) + + case opTypeSpec: + n, ok := n.(*ast.TypeSpec) + return ok && !n.Assign.IsValid() && m.matchNode(n.Name) && m.matchNode(n.Type) + case opTypeAliasSpec: + n, ok := n.(*ast.TypeSpec) + return ok && n.Assign.IsValid() && m.matchNode(n.Name) && m.matchNode(n.Type) + + case opConstDecl: + n, ok := n.(*ast.GenDecl) + return ok && n.Tok == token.CONST && m.matchSpecSlice(n.Specs) + case opVarDecl: + n, ok := n.(*ast.GenDecl) + return ok && n.Tok == token.VAR && m.matchSpecSlice(n.Specs) + case opTypeDecl: + n, ok := n.(*ast.GenDecl) + return ok && n.Tok == token.TYPE && m.matchSpecSlice(n.Specs) + + case opEmptyPackage: + n, ok := n.(*ast.File) + return ok && len(n.Imports) == 0 && len(n.Decls) == 0 && m.matchNode(n.Name) + + default: + panic(fmt.Sprintf("unexpected op %s", inst.op)) + } +} + +func (m *matcher) matchNode(n ast.Node) bool { + return m.matchNodeWithInst(m.nextInst(), n) +} + +func (m *matcher) matchStmtSlice(stmts []ast.Stmt) bool { + matched, _ := m.matchNodeList(stmtSlice(stmts), false) + return matched != nil +} + +func (m *matcher) matchExprSlice(exprs []ast.Expr) bool { + matched, _ := m.matchNodeList(exprSlice(exprs), false) + return matched != nil +} + +func (m *matcher) matchFieldSlice(fields []*ast.Field) bool { + matched, _ := m.matchNodeList(fieldSlice(fields), false) + return matched != nil +} + +func (m *matcher) matchIdentSlice(idents []*ast.Ident) bool { + matched, _ := m.matchNodeList(identSlice(idents), false) + return matched != nil +} + +func (m *matcher) matchSpecSlice(specs []ast.Spec) bool { + matched, _ := m.matchNodeList(specSlice(specs), false) + return matched != nil +} + +// matchNodeList matches two lists of nodes. It uses a common algorithm to match +// wildcard patterns with any number of nodes without recursion. +func (m *matcher) matchNodeList(nodes nodeSlice, partial bool) (ast.Node, int) { + sliceLen := nodes.len() + inst := m.nextInst() + if inst.op == opEnd { + if sliceLen == 0 { + return nodes, 0 + } + return nil, -1 + } + pcBase := m.pc + pcNext := 0 + j := 0 + jNext := 0 + partialStart, partialEnd := 0, sliceLen + + type restart struct { + matches []CapturedNode + pc int + j int + wildStart int + wildName string + } + // We need to stack these because otherwise some edge cases + // would not match properly. Since we have various kinds of + // wildcards (nodes containing them, $_, and $*_), in some cases + // we may have to go back and do multiple restarts to get to the + // right starting position. + var stack []restart + wildName := "" + wildStart := 0 + push := func(next int) { + if next > sliceLen { + return // would be discarded anyway + } + pcNext = m.pc - 1 + jNext = next + stack = append(stack, restart{m.capture, pcNext, next, wildStart, wildName}) + } + pop := func() { + j = jNext + m.pc = pcNext + m.capture = stack[len(stack)-1].matches + wildName = stack[len(stack)-1].wildName + wildStart = stack[len(stack)-1].wildStart + stack = stack[:len(stack)-1] + pcNext = 0 + jNext = 0 + if len(stack) > 0 { + pcNext = stack[len(stack)-1].pc + jNext = stack[len(stack)-1].j + } + } + + // wouldMatch returns whether the current wildcard - if any - + // matches the nodes we are currently trying it on. + wouldMatch := func() bool { + switch wildName { + case "", "_": + return true + } + return m.matchNamed(wildName, nodes.slice(wildStart, j)) + } + for ; inst.op != opEnd || j < sliceLen; inst = m.nextInst() { + if inst.op != opEnd { + if inst.op == opNodeSeq || inst.op == opNamedNodeSeq { + // keep track of where this wildcard + // started (if name == wildName, + // we're trying the same wildcard + // matching one more node) + name := "_" + if inst.op == opNamedNodeSeq { + name = m.stringValue(inst) + } + if name != wildName { + wildStart = j + wildName = name + } + // try to match zero or more at j, + // restarting at j+1 if it fails + push(j + 1) + continue + } + if partial && m.pc == pcBase { + // let "b; c" match "a; b; c" + // (simulates a $*_ at the beginning) + partialStart = j + push(j + 1) + } + if j < sliceLen && wouldMatch() && m.matchNodeWithInst(inst, nodes.at(j)) { + // ordinary match + wildName = "" + j++ + continue + } + } + if partial && inst.op == opEnd && wildName == "" { + partialEnd = j + break // let "b; c" match "b; c; d" + } + // mismatch, try to restart + if 0 < jNext && jNext <= sliceLen && (m.pc != pcNext || j != jNext) { + pop() + continue + } + return nil, -1 + } + if !wouldMatch() { + return nil, -1 + } + return nodes.slice(partialStart, partialEnd), partialEnd + 1 +} + +func findNamed(capture []CapturedNode, name string) (ast.Node, bool) { + for _, c := range capture { + if c.Name == name { + return c.Node, true + } + } + return nil, false +} + +func literalValue(lit *ast.BasicLit) interface{} { + switch lit.Kind { + case token.INT: + v, err := strconv.ParseInt(lit.Value, 0, 64) + if err == nil { + return v + } + case token.CHAR: + s, err := strconv.Unquote(lit.Value) + if err != nil { + return nil + } + // Return the first rune. + for _, c := range s { + return c + } + case token.STRING: + s, err := strconv.Unquote(lit.Value) + if err == nil { + return s + } + case token.FLOAT: + v, err := strconv.ParseFloat(lit.Value, 64) + if err == nil { + return v + } + case token.IMAG: + v, err := strconv.ParseComplex(lit.Value, 128) + if err == nil { + return v + } + } + return nil +} + +func equalNodes(x, y ast.Node) bool { + if x == nil || y == nil { + return x == y + } + switch x := x.(type) { + case stmtSlice: + y, ok := y.(stmtSlice) + if !ok || len(x) != len(y) { + return false + } + for i := range x { + if !astequal.Stmt(x[i], y[i]) { + return false + } + } + return true + case exprSlice: + y, ok := y.(exprSlice) + if !ok || len(x) != len(y) { + return false + } + for i := range x { + if !astequal.Expr(x[i], y[i]) { + return false + } + } + return true + default: + return astequal.Node(x, y) + } +} + +func toStmtSlice(nodes ...ast.Node) stmtSlice { + var stmts []ast.Stmt + for _, node := range nodes { + switch x := node.(type) { + case nil: + case ast.Stmt: + stmts = append(stmts, x) + case ast.Expr: + stmts = append(stmts, &ast.ExprStmt{X: x}) + default: + panic(fmt.Sprintf("unexpected node type: %T", x)) + } + } + return stmtSlice(stmts) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operation_string.go b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operation_string.go new file mode 100644 index 000000000..9f50e2795 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operation_string.go @@ -0,0 +1,129 @@ +// Code generated by "stringer -type=operation -trimprefix=op"; DO NOT EDIT. + +package gogrep + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[opInvalid-0] + _ = x[opNode-1] + _ = x[opNamedNode-2] + _ = x[opNodeSeq-3] + _ = x[opNamedNodeSeq-4] + _ = x[opOptNode-5] + _ = x[opNamedOptNode-6] + _ = x[opMultiStmt-7] + _ = x[opMultiExpr-8] + _ = x[opEnd-9] + _ = x[opBasicLit-10] + _ = x[opStrictIntLit-11] + _ = x[opStrictFloatLit-12] + _ = x[opStrictCharLit-13] + _ = x[opStrictStringLit-14] + _ = x[opStrictComplexLit-15] + _ = x[opIdent-16] + _ = x[opIndexExpr-17] + _ = x[opSliceExpr-18] + _ = x[opSliceFromExpr-19] + _ = x[opSliceToExpr-20] + _ = x[opSliceFromToExpr-21] + _ = x[opSliceToCapExpr-22] + _ = x[opSliceFromToCapExpr-23] + _ = x[opFuncLit-24] + _ = x[opCompositeLit-25] + _ = x[opTypedCompositeLit-26] + _ = x[opSimpleSelectorExpr-27] + _ = x[opSelectorExpr-28] + _ = x[opTypeAssertExpr-29] + _ = x[opTypeSwitchAssertExpr-30] + _ = x[opVoidFuncType-31] + _ = x[opFuncType-32] + _ = x[opArrayType-33] + _ = x[opSliceType-34] + _ = x[opMapType-35] + _ = x[opChanType-36] + _ = x[opKeyValueExpr-37] + _ = x[opEllipsis-38] + _ = x[opTypedEllipsis-39] + _ = x[opStarExpr-40] + _ = x[opUnaryExpr-41] + _ = x[opBinaryExpr-42] + _ = x[opParenExpr-43] + _ = x[opVariadicCallExpr-44] + _ = x[opCallExpr-45] + _ = x[opAssignStmt-46] + _ = x[opMultiAssignStmt-47] + _ = x[opBranchStmt-48] + _ = x[opSimpleLabeledBranchStmt-49] + _ = x[opLabeledBranchStmt-50] + _ = x[opSimpleLabeledStmt-51] + _ = x[opLabeledStmt-52] + _ = x[opBlockStmt-53] + _ = x[opExprStmt-54] + _ = x[opGoStmt-55] + _ = x[opDeferStmt-56] + _ = x[opSendStmt-57] + _ = x[opEmptyStmt-58] + _ = x[opIncDecStmt-59] + _ = x[opReturnStmt-60] + _ = x[opIfStmt-61] + _ = x[opIfInitStmt-62] + _ = x[opIfElseStmt-63] + _ = x[opIfInitElseStmt-64] + _ = x[opIfNamedOptStmt-65] + _ = x[opIfNamedOptElseStmt-66] + _ = x[opSwitchStmt-67] + _ = x[opSwitchTagStmt-68] + _ = x[opSwitchInitStmt-69] + _ = x[opSwitchInitTagStmt-70] + _ = x[opSelectStmt-71] + _ = x[opTypeSwitchStmt-72] + _ = x[opTypeSwitchInitStmt-73] + _ = x[opCaseClause-74] + _ = x[opDefaultCaseClause-75] + _ = x[opCommClause-76] + _ = x[opDefaultCommClause-77] + _ = x[opForStmt-78] + _ = x[opForPostStmt-79] + _ = x[opForCondStmt-80] + _ = x[opForCondPostStmt-81] + _ = x[opForInitStmt-82] + _ = x[opForInitPostStmt-83] + _ = x[opForInitCondStmt-84] + _ = x[opForInitCondPostStmt-85] + _ = x[opRangeStmt-86] + _ = x[opRangeKeyStmt-87] + _ = x[opRangeKeyValueStmt-88] + _ = x[opFieldList-89] + _ = x[opUnnamedField-90] + _ = x[opSimpleField-91] + _ = x[opField-92] + _ = x[opMultiField-93] + _ = x[opValueInitSpec-94] + _ = x[opTypedValueInitSpec-95] + _ = x[opTypedValueSpec-96] + _ = x[opTypeSpec-97] + _ = x[opTypeAliasSpec-98] + _ = x[opFuncDecl-99] + _ = x[opMethodDecl-100] + _ = x[opFuncProtoDecl-101] + _ = x[opMethodProtoDecl-102] + _ = x[opConstDecl-103] + _ = x[opVarDecl-104] + _ = x[opTypeDecl-105] + _ = x[opEmptyPackage-106] +} + +const _operation_name = "InvalidNodeNamedNodeNodeSeqNamedNodeSeqOptNodeNamedOptNodeMultiStmtMultiExprEndBasicLitStrictIntLitStrictFloatLitStrictCharLitStrictStringLitStrictComplexLitIdentIndexExprSliceExprSliceFromExprSliceToExprSliceFromToExprSliceToCapExprSliceFromToCapExprFuncLitCompositeLitTypedCompositeLitSimpleSelectorExprSelectorExprTypeAssertExprTypeSwitchAssertExprVoidFuncTypeFuncTypeArrayTypeSliceTypeMapTypeChanTypeKeyValueExprEllipsisTypedEllipsisStarExprUnaryExprBinaryExprParenExprVariadicCallExprCallExprAssignStmtMultiAssignStmtBranchStmtSimpleLabeledBranchStmtLabeledBranchStmtSimpleLabeledStmtLabeledStmtBlockStmtExprStmtGoStmtDeferStmtSendStmtEmptyStmtIncDecStmtReturnStmtIfStmtIfInitStmtIfElseStmtIfInitElseStmtIfNamedOptStmtIfNamedOptElseStmtSwitchStmtSwitchTagStmtSwitchInitStmtSwitchInitTagStmtSelectStmtTypeSwitchStmtTypeSwitchInitStmtCaseClauseDefaultCaseClauseCommClauseDefaultCommClauseForStmtForPostStmtForCondStmtForCondPostStmtForInitStmtForInitPostStmtForInitCondStmtForInitCondPostStmtRangeStmtRangeKeyStmtRangeKeyValueStmtFieldListUnnamedFieldSimpleFieldFieldMultiFieldValueInitSpecTypedValueInitSpecTypedValueSpecTypeSpecTypeAliasSpecFuncDeclMethodDeclFuncProtoDeclMethodProtoDeclConstDeclVarDeclTypeDeclEmptyPackage" + +var _operation_index = [...]uint16{0, 7, 11, 20, 27, 39, 46, 58, 67, 76, 79, 87, 99, 113, 126, 141, 157, 162, 171, 180, 193, 204, 219, 233, 251, 258, 270, 287, 305, 317, 331, 351, 363, 371, 380, 389, 396, 404, 416, 424, 437, 445, 454, 464, 473, 489, 497, 507, 522, 532, 555, 572, 589, 600, 609, 617, 623, 632, 640, 649, 659, 669, 675, 685, 695, 709, 723, 741, 751, 764, 778, 795, 805, 819, 837, 847, 864, 874, 891, 898, 909, 920, 935, 946, 961, 976, 995, 1004, 1016, 1033, 1042, 1054, 1065, 1070, 1080, 1093, 1111, 1125, 1133, 1146, 1154, 1164, 1177, 1192, 1201, 1208, 1216, 1228} + +func (i operation) String() string { + if i >= operation(len(_operation_index)-1) { + return "operation(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _operation_name[_operation_index[i]:_operation_index[i+1]] +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operations.gen.go b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operations.gen.go new file mode 100644 index 000000000..f4d7cff82 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/operations.gen.go @@ -0,0 +1,1249 @@ +// Code generated "gen_operations.go"; DO NOT EDIT. + +package gogrep + +import ( + "github.com/quasilyte/go-ruleguard/nodetag" +) + +//go:generate stringer -type=operation -trimprefix=op +type operation uint8 + +const ( + opInvalid operation = 0 + + // Tag: Node + opNode operation = 1 + + // Tag: Node + // ValueIndex: strings | wildcard name + opNamedNode operation = 2 + + // Tag: Unknown + opNodeSeq operation = 3 + + // Tag: Unknown + // ValueIndex: strings | wildcard name + opNamedNodeSeq operation = 4 + + // Tag: Unknown + opOptNode operation = 5 + + // Tag: Unknown + // ValueIndex: strings | wildcard name + opNamedOptNode operation = 6 + + // Tag: StmtList + // Args: stmts... + // Example: f(); g() + opMultiStmt operation = 7 + + // Tag: ExprList + // Args: exprs... + // Example: f(), g() + opMultiExpr operation = 8 + + // Tag: Unknown + opEnd operation = 9 + + // Tag: BasicLit + // ValueIndex: ifaces | parsed literal value + opBasicLit operation = 10 + + // Tag: BasicLit + // ValueIndex: strings | raw literal value + opStrictIntLit operation = 11 + + // Tag: BasicLit + // ValueIndex: strings | raw literal value + opStrictFloatLit operation = 12 + + // Tag: BasicLit + // ValueIndex: strings | raw literal value + opStrictCharLit operation = 13 + + // Tag: BasicLit + // ValueIndex: strings | raw literal value + opStrictStringLit operation = 14 + + // Tag: BasicLit + // ValueIndex: strings | raw literal value + opStrictComplexLit operation = 15 + + // Tag: Ident + // ValueIndex: strings | ident name + opIdent operation = 16 + + // Tag: IndexExpr + // Args: x expr + opIndexExpr operation = 17 + + // Tag: SliceExpr + // Args: x + opSliceExpr operation = 18 + + // Tag: SliceExpr + // Args: x from + // Example: x[from:] + opSliceFromExpr operation = 19 + + // Tag: SliceExpr + // Args: x to + // Example: x[:to] + opSliceToExpr operation = 20 + + // Tag: SliceExpr + // Args: x from to + // Example: x[from:to] + opSliceFromToExpr operation = 21 + + // Tag: SliceExpr + // Args: x from cap + // Example: x[:from:cap] + opSliceToCapExpr operation = 22 + + // Tag: SliceExpr + // Args: x from to cap + // Example: x[from:to:cap] + opSliceFromToCapExpr operation = 23 + + // Tag: FuncLit + // Args: type block + opFuncLit operation = 24 + + // Tag: CompositeLit + // Args: elts... + // Example: {elts...} + opCompositeLit operation = 25 + + // Tag: CompositeLit + // Args: typ elts... + // Example: typ{elts...} + opTypedCompositeLit operation = 26 + + // Tag: SelectorExpr + // Args: x + // ValueIndex: strings | selector name + opSimpleSelectorExpr operation = 27 + + // Tag: SelectorExpr + // Args: x sel + opSelectorExpr operation = 28 + + // Tag: TypeAssertExpr + // Args: x typ + opTypeAssertExpr operation = 29 + + // Tag: TypeAssertExpr + // Args: x + opTypeSwitchAssertExpr operation = 30 + + // Tag: FuncType + // Args: params + opVoidFuncType operation = 31 + + // Tag: FuncType + // Args: params results + opFuncType operation = 32 + + // Tag: ArrayType + // Args: length elem + opArrayType operation = 33 + + // Tag: ArrayType + // Args: elem + opSliceType operation = 34 + + // Tag: MapType + // Args: key value + opMapType operation = 35 + + // Tag: ChanType + // Args: value + // Value: ast.ChanDir | channel direction + opChanType operation = 36 + + // Tag: KeyValueExpr + // Args: key value + opKeyValueExpr operation = 37 + + // Tag: Ellipsis + opEllipsis operation = 38 + + // Tag: Ellipsis + // Args: type + opTypedEllipsis operation = 39 + + // Tag: StarExpr + // Args: x + opStarExpr operation = 40 + + // Tag: UnaryExpr + // Args: x + // Value: token.Token | unary operator + opUnaryExpr operation = 41 + + // Tag: BinaryExpr + // Args: x y + // Value: token.Token | binary operator + opBinaryExpr operation = 42 + + // Tag: ParenExpr + // Args: x + opParenExpr operation = 43 + + // Tag: CallExpr + // Args: fn args... + // Example: f(1, xs...) + opVariadicCallExpr operation = 44 + + // Tag: CallExpr + // Args: fn args... + // Example: f(1, xs) + opCallExpr operation = 45 + + // Tag: AssignStmt + // Args: lhs rhs + // Example: lhs := rhs() + // Value: token.Token | ':=' or '=' + opAssignStmt operation = 46 + + // Tag: AssignStmt + // Args: lhs... rhs... + // Example: lhs1, lhs2 := rhs() + // Value: token.Token | ':=' or '=' + opMultiAssignStmt operation = 47 + + // Tag: BranchStmt + // Args: x + // Value: token.Token | branch kind + opBranchStmt operation = 48 + + // Tag: BranchStmt + // Args: x + // Value: token.Token | branch kind + // ValueIndex: strings | label name + opSimpleLabeledBranchStmt operation = 49 + + // Tag: BranchStmt + // Args: label x + // Value: token.Token | branch kind + opLabeledBranchStmt operation = 50 + + // Tag: LabeledStmt + // Args: x + // ValueIndex: strings | label name + opSimpleLabeledStmt operation = 51 + + // Tag: LabeledStmt + // Args: label x + opLabeledStmt operation = 52 + + // Tag: BlockStmt + // Args: body... + opBlockStmt operation = 53 + + // Tag: ExprStmt + // Args: x + opExprStmt operation = 54 + + // Tag: GoStmt + // Args: x + opGoStmt operation = 55 + + // Tag: DeferStmt + // Args: x + opDeferStmt operation = 56 + + // Tag: SendStmt + // Args: ch value + opSendStmt operation = 57 + + // Tag: EmptyStmt + opEmptyStmt operation = 58 + + // Tag: IncDecStmt + // Args: x + // Value: token.Token | '++' or '--' + opIncDecStmt operation = 59 + + // Tag: ReturnStmt + // Args: results... + opReturnStmt operation = 60 + + // Tag: IfStmt + // Args: cond block + // Example: if cond {} + opIfStmt operation = 61 + + // Tag: IfStmt + // Args: init cond block + // Example: if init; cond {} + opIfInitStmt operation = 62 + + // Tag: IfStmt + // Args: cond block else + // Example: if cond {} else ... + opIfElseStmt operation = 63 + + // Tag: IfStmt + // Args: init cond block else + // Example: if init; cond {} else ... + opIfInitElseStmt operation = 64 + + // Tag: IfStmt + // Args: block + // Example: if $*x {} + // ValueIndex: strings | wildcard name + opIfNamedOptStmt operation = 65 + + // Tag: IfStmt + // Args: block else + // Example: if $*x {} else ... + // ValueIndex: strings | wildcard name + opIfNamedOptElseStmt operation = 66 + + // Tag: SwitchStmt + // Args: body... + // Example: switch {} + opSwitchStmt operation = 67 + + // Tag: SwitchStmt + // Args: tag body... + // Example: switch tag {} + opSwitchTagStmt operation = 68 + + // Tag: SwitchStmt + // Args: init body... + // Example: switch init; {} + opSwitchInitStmt operation = 69 + + // Tag: SwitchStmt + // Args: init tag body... + // Example: switch init; tag {} + opSwitchInitTagStmt operation = 70 + + // Tag: SelectStmt + // Args: body... + opSelectStmt operation = 71 + + // Tag: TypeSwitchStmt + // Args: x block + // Example: switch x.(type) {} + opTypeSwitchStmt operation = 72 + + // Tag: TypeSwitchStmt + // Args: init x block + // Example: switch init; x.(type) {} + opTypeSwitchInitStmt operation = 73 + + // Tag: CaseClause + // Args: values... body... + opCaseClause operation = 74 + + // Tag: CaseClause + // Args: body... + opDefaultCaseClause operation = 75 + + // Tag: CommClause + // Args: comm body... + opCommClause operation = 76 + + // Tag: CommClause + // Args: body... + opDefaultCommClause operation = 77 + + // Tag: ForStmt + // Args: blocl + // Example: for {} + opForStmt operation = 78 + + // Tag: ForStmt + // Args: post block + // Example: for ; ; post {} + opForPostStmt operation = 79 + + // Tag: ForStmt + // Args: cond block + // Example: for ; cond; {} + opForCondStmt operation = 80 + + // Tag: ForStmt + // Args: cond post block + // Example: for ; cond; post {} + opForCondPostStmt operation = 81 + + // Tag: ForStmt + // Args: init block + // Example: for init; ; {} + opForInitStmt operation = 82 + + // Tag: ForStmt + // Args: init post block + // Example: for init; ; post {} + opForInitPostStmt operation = 83 + + // Tag: ForStmt + // Args: init cond block + // Example: for init; cond; {} + opForInitCondStmt operation = 84 + + // Tag: ForStmt + // Args: init cond post block + // Example: for init; cond; post {} + opForInitCondPostStmt operation = 85 + + // Tag: RangeStmt + // Args: x block + // Example: for range x {} + opRangeStmt operation = 86 + + // Tag: RangeStmt + // Args: key x block + // Example: for key := range x {} + // Value: token.Token | ':=' or '=' + opRangeKeyStmt operation = 87 + + // Tag: RangeStmt + // Args: key value x block + // Example: for key, value := range x {} + // Value: token.Token | ':=' or '=' + opRangeKeyValueStmt operation = 88 + + // Tag: Unknown + // Args: fields... + opFieldList operation = 89 + + // Tag: Unknown + // Args: typ + // Example: type + opUnnamedField operation = 90 + + // Tag: Unknown + // Args: typ + // Example: name type + // ValueIndex: strings | field name + opSimpleField operation = 91 + + // Tag: Unknown + // Args: name typ + // Example: $name type + opField operation = 92 + + // Tag: Unknown + // Args: names... typ + // Example: name1, name2 type + opMultiField operation = 93 + + // Tag: ValueSpec + // Args: lhs... rhs... + // Example: lhs = rhs + opValueInitSpec operation = 94 + + // Tag: ValueSpec + // Args: lhs... type rhs... + // Example: lhs typ = rhs + opTypedValueInitSpec operation = 95 + + // Tag: ValueSpec + // Args: lhs... type + // Example: lhs typ + opTypedValueSpec operation = 96 + + // Tag: TypeSpec + // Args: name type + // Example: name type + opTypeSpec operation = 97 + + // Tag: TypeSpec + // Args: name type + // Example: name = type + opTypeAliasSpec operation = 98 + + // Tag: FuncDecl + // Args: name type block + opFuncDecl operation = 99 + + // Tag: FuncDecl + // Args: recv name type block + opMethodDecl operation = 100 + + // Tag: FuncDecl + // Args: name type + opFuncProtoDecl operation = 101 + + // Tag: FuncDecl + // Args: recv name type + opMethodProtoDecl operation = 102 + + // Tag: GenDecl + // Args: valuespecs... + opConstDecl operation = 103 + + // Tag: GenDecl + // Args: valuespecs... + opVarDecl operation = 104 + + // Tag: GenDecl + // Args: typespecs... + opTypeDecl operation = 105 + + // Tag: File + // Args: name + opEmptyPackage operation = 106 +) + +type operationInfo struct { + Tag nodetag.Value + NumArgs int + ValueKind valueKind + ExtraValueKind valueKind + VariadicMap bitmap64 +} + +var operationInfoTable = [256]operationInfo{ + opInvalid: {}, + + opNode: { + Tag: nodetag.Node, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opNamedNode: { + Tag: nodetag.Node, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opNodeSeq: { + Tag: nodetag.Unknown, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opNamedNodeSeq: { + Tag: nodetag.Unknown, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opOptNode: { + Tag: nodetag.Unknown, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opNamedOptNode: { + Tag: nodetag.Unknown, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opMultiStmt: { + Tag: nodetag.StmtList, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opMultiExpr: { + Tag: nodetag.ExprList, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opEnd: { + Tag: nodetag.Unknown, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opBasicLit: { + Tag: nodetag.BasicLit, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: ifaceValue, + VariadicMap: 0, // 0 + }, + opStrictIntLit: { + Tag: nodetag.BasicLit, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opStrictFloatLit: { + Tag: nodetag.BasicLit, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opStrictCharLit: { + Tag: nodetag.BasicLit, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opStrictStringLit: { + Tag: nodetag.BasicLit, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opStrictComplexLit: { + Tag: nodetag.BasicLit, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opIdent: { + Tag: nodetag.Ident, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opIndexExpr: { + Tag: nodetag.IndexExpr, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSliceExpr: { + Tag: nodetag.SliceExpr, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSliceFromExpr: { + Tag: nodetag.SliceExpr, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSliceToExpr: { + Tag: nodetag.SliceExpr, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSliceFromToExpr: { + Tag: nodetag.SliceExpr, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSliceToCapExpr: { + Tag: nodetag.SliceExpr, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSliceFromToCapExpr: { + Tag: nodetag.SliceExpr, + NumArgs: 4, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opFuncLit: { + Tag: nodetag.FuncLit, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opCompositeLit: { + Tag: nodetag.CompositeLit, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opTypedCompositeLit: { + Tag: nodetag.CompositeLit, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 2, // 10 + }, + opSimpleSelectorExpr: { + Tag: nodetag.SelectorExpr, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opSelectorExpr: { + Tag: nodetag.SelectorExpr, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opTypeAssertExpr: { + Tag: nodetag.TypeAssertExpr, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opTypeSwitchAssertExpr: { + Tag: nodetag.TypeAssertExpr, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opVoidFuncType: { + Tag: nodetag.FuncType, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opFuncType: { + Tag: nodetag.FuncType, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opArrayType: { + Tag: nodetag.ArrayType, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSliceType: { + Tag: nodetag.ArrayType, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opMapType: { + Tag: nodetag.MapType, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opChanType: { + Tag: nodetag.ChanType, + NumArgs: 1, + ValueKind: chandirValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opKeyValueExpr: { + Tag: nodetag.KeyValueExpr, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opEllipsis: { + Tag: nodetag.Ellipsis, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opTypedEllipsis: { + Tag: nodetag.Ellipsis, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opStarExpr: { + Tag: nodetag.StarExpr, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opUnaryExpr: { + Tag: nodetag.UnaryExpr, + NumArgs: 1, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opBinaryExpr: { + Tag: nodetag.BinaryExpr, + NumArgs: 2, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opParenExpr: { + Tag: nodetag.ParenExpr, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opVariadicCallExpr: { + Tag: nodetag.CallExpr, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 2, // 10 + }, + opCallExpr: { + Tag: nodetag.CallExpr, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 2, // 10 + }, + opAssignStmt: { + Tag: nodetag.AssignStmt, + NumArgs: 2, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opMultiAssignStmt: { + Tag: nodetag.AssignStmt, + NumArgs: 2, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 3, // 11 + }, + opBranchStmt: { + Tag: nodetag.BranchStmt, + NumArgs: 1, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSimpleLabeledBranchStmt: { + Tag: nodetag.BranchStmt, + NumArgs: 1, + ValueKind: tokenValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opLabeledBranchStmt: { + Tag: nodetag.BranchStmt, + NumArgs: 2, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSimpleLabeledStmt: { + Tag: nodetag.LabeledStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opLabeledStmt: { + Tag: nodetag.LabeledStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opBlockStmt: { + Tag: nodetag.BlockStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opExprStmt: { + Tag: nodetag.ExprStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opGoStmt: { + Tag: nodetag.GoStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opDeferStmt: { + Tag: nodetag.DeferStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSendStmt: { + Tag: nodetag.SendStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opEmptyStmt: { + Tag: nodetag.EmptyStmt, + NumArgs: 0, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opIncDecStmt: { + Tag: nodetag.IncDecStmt, + NumArgs: 1, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opReturnStmt: { + Tag: nodetag.ReturnStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opIfStmt: { + Tag: nodetag.IfStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opIfInitStmt: { + Tag: nodetag.IfStmt, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opIfElseStmt: { + Tag: nodetag.IfStmt, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opIfInitElseStmt: { + Tag: nodetag.IfStmt, + NumArgs: 4, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opIfNamedOptStmt: { + Tag: nodetag.IfStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opIfNamedOptElseStmt: { + Tag: nodetag.IfStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opSwitchStmt: { + Tag: nodetag.SwitchStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opSwitchTagStmt: { + Tag: nodetag.SwitchStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 2, // 10 + }, + opSwitchInitStmt: { + Tag: nodetag.SwitchStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 2, // 10 + }, + opSwitchInitTagStmt: { + Tag: nodetag.SwitchStmt, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 4, // 100 + }, + opSelectStmt: { + Tag: nodetag.SelectStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opTypeSwitchStmt: { + Tag: nodetag.TypeSwitchStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opTypeSwitchInitStmt: { + Tag: nodetag.TypeSwitchStmt, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opCaseClause: { + Tag: nodetag.CaseClause, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 3, // 11 + }, + opDefaultCaseClause: { + Tag: nodetag.CaseClause, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opCommClause: { + Tag: nodetag.CommClause, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 2, // 10 + }, + opDefaultCommClause: { + Tag: nodetag.CommClause, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opForStmt: { + Tag: nodetag.ForStmt, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opForPostStmt: { + Tag: nodetag.ForStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opForCondStmt: { + Tag: nodetag.ForStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opForCondPostStmt: { + Tag: nodetag.ForStmt, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opForInitStmt: { + Tag: nodetag.ForStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opForInitPostStmt: { + Tag: nodetag.ForStmt, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opForInitCondStmt: { + Tag: nodetag.ForStmt, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opForInitCondPostStmt: { + Tag: nodetag.ForStmt, + NumArgs: 4, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opRangeStmt: { + Tag: nodetag.RangeStmt, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opRangeKeyStmt: { + Tag: nodetag.RangeStmt, + NumArgs: 3, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opRangeKeyValueStmt: { + Tag: nodetag.RangeStmt, + NumArgs: 4, + ValueKind: tokenValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opFieldList: { + Tag: nodetag.Unknown, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opUnnamedField: { + Tag: nodetag.Unknown, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opSimpleField: { + Tag: nodetag.Unknown, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: stringValue, + VariadicMap: 0, // 0 + }, + opField: { + Tag: nodetag.Unknown, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opMultiField: { + Tag: nodetag.Unknown, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opValueInitSpec: { + Tag: nodetag.ValueSpec, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 3, // 11 + }, + opTypedValueInitSpec: { + Tag: nodetag.ValueSpec, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 5, // 101 + }, + opTypedValueSpec: { + Tag: nodetag.ValueSpec, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opTypeSpec: { + Tag: nodetag.TypeSpec, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opTypeAliasSpec: { + Tag: nodetag.TypeSpec, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opFuncDecl: { + Tag: nodetag.FuncDecl, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opMethodDecl: { + Tag: nodetag.FuncDecl, + NumArgs: 4, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opFuncProtoDecl: { + Tag: nodetag.FuncDecl, + NumArgs: 2, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opMethodProtoDecl: { + Tag: nodetag.FuncDecl, + NumArgs: 3, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, + opConstDecl: { + Tag: nodetag.GenDecl, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opVarDecl: { + Tag: nodetag.GenDecl, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opTypeDecl: { + Tag: nodetag.GenDecl, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 1, // 1 + }, + opEmptyPackage: { + Tag: nodetag.File, + NumArgs: 1, + ValueKind: emptyValue, + ExtraValueKind: emptyValue, + VariadicMap: 0, // 0 + }, +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/parse.go b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/parse.go new file mode 100644 index 000000000..e26a07212 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/parse.go @@ -0,0 +1,360 @@ +// Copyright (c) 2017, Daniel Martí +// See LICENSE for licensing information + +package gogrep + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "strings" + "text/template" +) + +func transformSource(expr string) (string, []posOffset, error) { + toks, err := tokenize([]byte(expr)) + if err != nil { + return "", nil, fmt.Errorf("cannot tokenize expr: %v", err) + } + var offs []posOffset + lbuf := lineColBuffer{line: 1, col: 1} + lastLit := false + for _, t := range toks { + if lbuf.offs >= t.pos.Offset && lastLit && t.lit != "" { + _, _ = lbuf.WriteString(" ") + } + for lbuf.offs < t.pos.Offset { + _, _ = lbuf.WriteString(" ") + } + if t.lit == "" { + _, _ = lbuf.WriteString(t.tok.String()) + lastLit = false + continue + } + _, _ = lbuf.WriteString(t.lit) + lastLit = strings.TrimSpace(t.lit) != "" + } + // trailing newlines can cause issues with commas + return strings.TrimSpace(lbuf.String()), offs, nil +} + +func parseExpr(fset *token.FileSet, expr string) (ast.Node, error) { + exprStr, offs, err := transformSource(expr) + if err != nil { + return nil, err + } + node, _, err := parseDetectingNode(fset, exprStr) + if err != nil { + err = subPosOffsets(err, offs...) + return nil, fmt.Errorf("cannot parse expr: %v", err) + } + return node, nil +} + +type lineColBuffer struct { + bytes.Buffer + line, col, offs int +} + +func (l *lineColBuffer) WriteString(s string) (n int, err error) { + for _, r := range s { + if r == '\n' { + l.line++ + l.col = 1 + } else { + l.col++ + } + l.offs++ + } + return l.Buffer.WriteString(s) +} + +var tmplDecl = template.Must(template.New("").Parse(`` + + `package p; {{ . }}`)) + +var tmplBlock = template.Must(template.New("").Parse(`` + + `package p; func _() { if true {{ . }} else {} }`)) + +var tmplExprs = template.Must(template.New("").Parse(`` + + `package p; var _ = []interface{}{ {{ . }}, }`)) + +var tmplStmts = template.Must(template.New("").Parse(`` + + `package p; func _() { {{ . }} }`)) + +var tmplType = template.Must(template.New("").Parse(`` + + `package p; var _ {{ . }}`)) + +var tmplValSpec = template.Must(template.New("").Parse(`` + + `package p; var {{ . }}`)) + +func execTmpl(tmpl *template.Template, src string) string { + var buf bytes.Buffer + if err := tmpl.Execute(&buf, src); err != nil { + panic(err) + } + return buf.String() +} + +func noBadNodes(node ast.Node) bool { + any := false + ast.Inspect(node, func(n ast.Node) bool { + if any { + return false + } + switch n.(type) { + case *ast.BadExpr, *ast.BadDecl: + any = true + } + return true + }) + return !any +} + +func parseType(fset *token.FileSet, src string) (ast.Expr, *ast.File, error) { + asType := execTmpl(tmplType, src) + f, err := parser.ParseFile(fset, "", asType, 0) + if err != nil { + err = subPosOffsets(err, posOffset{1, 1, 17}) + return nil, nil, err + } + vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) + return vs.Type, f, nil +} + +// parseDetectingNode tries its best to parse the ast.Node contained in src, as +// one of: *ast.File, ast.Decl, ast.Expr, ast.Stmt, *ast.ValueSpec. +// It also returns the *ast.File used for the parsing, so that the returned node +// can be easily type-checked. +func parseDetectingNode(fset *token.FileSet, src string) (ast.Node, *ast.File, error) { + file := fset.AddFile("", fset.Base(), len(src)) + scan := scanner.Scanner{} + scan.Init(file, []byte(src), nil, 0) + if _, tok, _ := scan.Scan(); tok == token.EOF { + return nil, nil, fmt.Errorf("empty source code") + } + var mainErr error + + // first try as a whole file + if f, err := parser.ParseFile(fset, "", src, 0); err == nil && noBadNodes(f) { + return f, f, nil + } + + // then as a single declaration, or many + asDecl := execTmpl(tmplDecl, src) + if f, err := parser.ParseFile(fset, "", asDecl, 0); err == nil && noBadNodes(f) { + if len(f.Decls) == 1 { + return f.Decls[0], f, nil + } + return f, f, nil + } + + // then as a block; otherwise blocks might be mistaken for composite + // literals further below + asBlock := execTmpl(tmplBlock, src) + if f, err := parser.ParseFile(fset, "", asBlock, 0); err == nil && noBadNodes(f) { + bl := f.Decls[0].(*ast.FuncDecl).Body + if len(bl.List) == 1 { + ifs := bl.List[0].(*ast.IfStmt) + return ifs.Body, f, nil + } + } + + // then as value expressions + asExprs := execTmpl(tmplExprs, src) + if f, err := parser.ParseFile(fset, "", asExprs, 0); err == nil && noBadNodes(f) { + vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) + cl := vs.Values[0].(*ast.CompositeLit) + if len(cl.Elts) == 1 { + return cl.Elts[0], f, nil + } + return exprSlice(cl.Elts), f, nil + } + + // then try as statements + asStmts := execTmpl(tmplStmts, src) + f, err := parser.ParseFile(fset, "", asStmts, 0) + if err == nil && noBadNodes(f) { + bl := f.Decls[0].(*ast.FuncDecl).Body + if len(bl.List) == 1 { + return bl.List[0], f, nil + } + return stmtSlice(bl.List), f, nil + } + // Statements is what covers most cases, so it will give + // the best overall error message. Show positions + // relative to where the user's code is put in the + // template. + mainErr = subPosOffsets(err, posOffset{1, 1, 22}) + + // type expressions not yet picked up, for e.g. chans and interfaces + if typ, f, err := parseType(fset, src); err == nil && noBadNodes(f) { + return typ, f, nil + } + + // value specs + asValSpec := execTmpl(tmplValSpec, src) + if f, err := parser.ParseFile(fset, "", asValSpec, 0); err == nil && noBadNodes(f) { + vs := f.Decls[0].(*ast.GenDecl).Specs[0].(*ast.ValueSpec) + return vs, f, nil + } + return nil, nil, mainErr +} + +type posOffset struct { + atLine, atCol int + offset int +} + +func subPosOffsets(err error, offs ...posOffset) error { + list, ok := err.(scanner.ErrorList) + if !ok { + return err + } + for i, err := range list { + for _, off := range offs { + if err.Pos.Line != off.atLine { + continue + } + if err.Pos.Column < off.atCol { + continue + } + err.Pos.Column -= off.offset + } + list[i] = err + } + return list +} + +type fullToken struct { + pos token.Position + tok token.Token + lit string +} + +type caseStatus uint + +const ( + caseNone caseStatus = iota + caseNeedBlock + caseHere +) + +func tokenize(src []byte) ([]fullToken, error) { + var s scanner.Scanner + fset := token.NewFileSet() + file := fset.AddFile("", fset.Base(), len(src)) + + var err error + onError := func(pos token.Position, msg string) { + switch msg { // allow certain extra chars + case `illegal character U+0024 '$'`: + case `illegal character U+007E '~'`: + default: + err = fmt.Errorf("%v: %s", pos, msg) + } + } + + // we will modify the input source under the scanner's nose to + // enable some features such as regexes. + s.Init(file, src, onError, scanner.ScanComments) + + next := func() fullToken { + pos, tok, lit := s.Scan() + return fullToken{fset.Position(pos), tok, lit} + } + + caseStat := caseNone + + var toks []fullToken + for t := next(); t.tok != token.EOF; t = next() { + switch t.lit { + case "$": // continues below + case "switch", "select", "case": + if t.lit == "case" { + caseStat = caseNone + } else { + caseStat = caseNeedBlock + } + fallthrough + default: // regular Go code + if t.tok == token.LBRACE && caseStat == caseNeedBlock { + caseStat = caseHere + } + toks = append(toks, t) + continue + } + wt, err := tokenizeWildcard(t.pos, next) + if err != nil { + return nil, err + } + if caseStat == caseHere { + toks = append(toks, fullToken{wt.pos, token.IDENT, "case"}) + } + toks = append(toks, wt) + if caseStat == caseHere { + toks = append(toks, fullToken{wt.pos, token.COLON, ""}) + toks = append(toks, fullToken{wt.pos, token.IDENT, "gogrep_body"}) + } + } + return toks, err +} + +type varInfo struct { + Name string + Seq bool +} + +func tokenizeWildcard(pos token.Position, next func() fullToken) (fullToken, error) { + t := next() + any := false + if t.tok == token.MUL { + t = next() + any = true + } + wildName := encodeWildName(t.lit, any) + wt := fullToken{pos, token.IDENT, wildName} + if t.tok != token.IDENT { + return wt, fmt.Errorf("%v: $ must be followed by ident, got %v", + t.pos, t.tok) + } + return wt, nil +} + +const wildSeparator = "ᐸᐳ" + +func isWildName(s string) bool { + return strings.HasPrefix(s, wildSeparator) +} + +func encodeWildName(name string, any bool) string { + suffix := "v" + if any { + suffix = "a" + } + return wildSeparator + name + wildSeparator + suffix +} + +func decodeWildName(s string) varInfo { + s = s[len(wildSeparator):] + nameEnd := strings.Index(s, wildSeparator) + name := s[:nameEnd] + s = s[nameEnd:] + s = s[len(wildSeparator):] + kind := s + return varInfo{Name: name, Seq: kind == "a"} +} + +func decodeWildNode(n ast.Node) varInfo { + switch n := n.(type) { + case *ast.ExprStmt: + return decodeWildNode(n.X) + case *ast.Ident: + if isWildName(n.Name) { + return decodeWildName(n.Name) + } + } + return varInfo{} +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/slices.go b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/slices.go new file mode 100644 index 000000000..a9f6c0ae0 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/gogrep/slices.go @@ -0,0 +1,51 @@ +package gogrep + +import ( + "go/ast" + "go/token" +) + +type nodeSlice interface { + at(i int) ast.Node + len() int + slice(from, to int) nodeSlice + ast.Node +} + +type ( + exprSlice []ast.Expr + stmtSlice []ast.Stmt + fieldSlice []*ast.Field + identSlice []*ast.Ident + specSlice []ast.Spec +) + +func (l exprSlice) len() int { return len(l) } +func (l exprSlice) at(i int) ast.Node { return l[i] } +func (l exprSlice) slice(i, j int) nodeSlice { return l[i:j] } +func (l exprSlice) Pos() token.Pos { return l[0].Pos() } +func (l exprSlice) End() token.Pos { return l[len(l)-1].End() } + +func (l stmtSlice) len() int { return len(l) } +func (l stmtSlice) at(i int) ast.Node { return l[i] } +func (l stmtSlice) slice(i, j int) nodeSlice { return l[i:j] } +func (l stmtSlice) Pos() token.Pos { return l[0].Pos() } +func (l stmtSlice) End() token.Pos { return l[len(l)-1].End() } + +func (l fieldSlice) len() int { return len(l) } +func (l fieldSlice) at(i int) ast.Node { return l[i] } +func (l fieldSlice) slice(i, j int) nodeSlice { return l[i:j] } +func (l fieldSlice) Pos() token.Pos { return l[0].Pos() } +func (l fieldSlice) End() token.Pos { return l[len(l)-1].End() } + +func (l identSlice) len() int { return len(l) } +func (l identSlice) at(i int) ast.Node { return l[i] } +func (l identSlice) slice(i, j int) nodeSlice { return l[i:j] } +func (l identSlice) Pos() token.Pos { return l[0].Pos() } +func (l identSlice) End() token.Pos { return l[len(l)-1].End() } + +func (l specSlice) len() int { return len(l) } +func (l specSlice) at(i int) ast.Node { return l[i] } +func (l specSlice) slice(i, j int) nodeSlice { return l[i:j] } +func (l specSlice) Pos() token.Pos { return l[0].Pos() } +func (l specSlice) End() token.Pos { return l[len(l)-1].End() } diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go b/vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go new file mode 100644 index 000000000..50f9cca0b --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/golist/golist.go @@ -0,0 +1,30 @@ +package golist + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "os/exec" +) + +// Package is `go list --json` output structure. +type Package struct { + Dir string // directory containing package sources + ImportPath string // import path of package in dir + GoFiles []string // .go source files (excluding CgoFiles, TestGoFiles, XTestGoFiles) +} + +// JSON runs `go list --json` for the specified pkgName and returns the parsed JSON. +func JSON(pkgPath string) (*Package, error) { + out, err := exec.Command("go", "list", "--json", pkgPath).CombinedOutput() + if err != nil { + return nil, fmt.Errorf("go list error (%v): %s", err, out) + } + + var pkg Package + if err := json.NewDecoder(bytes.NewReader(out)).Decode(&pkg); err != io.EOF && err != nil { + return nil, err + } + return &pkg, nil +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go b/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go new file mode 100644 index 000000000..028a5f141 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/internal/xtypes/xtypes.go @@ -0,0 +1,256 @@ +package xtypes + +import ( + "go/types" +) + +// Implements reports whether type v implements iface. +// +// Unlike types.Implements(), it permits X and Y named types +// to be considered identical even if their addresses are different. +func Implements(v types.Type, iface *types.Interface) bool { + if iface.Empty() { + return true + } + + if v, _ := v.Underlying().(*types.Interface); v != nil { + for i := 0; i < iface.NumMethods(); i++ { + m := iface.Method(i) + obj, _, _ := types.LookupFieldOrMethod(v, false, m.Pkg(), m.Name()) + switch { + case obj == nil: + return false + case !Identical(obj.Type(), m.Type()): + return false + } + } + return true + } + + // A concrete type v implements iface if it implements all methods of iface. + for i := 0; i < iface.NumMethods(); i++ { + m := iface.Method(i) + + obj, _, _ := types.LookupFieldOrMethod(v, false, m.Pkg(), m.Name()) + if obj == nil { + return false + } + + f, ok := obj.(*types.Func) + if !ok { + return false + } + + if !Identical(f.Type(), m.Type()) { + return false + } + } + + return true +} + +// Identical reports whether x and y are identical types. +// +// Unlike types.Identical(), it permits X and Y named types +// to be considered identical even if their addresses are different. +func Identical(x, y types.Type) bool { + return typeIdentical(x, y, nil) +} + +func typeIdentical(x, y types.Type, p *ifacePair) bool { + if x == y { + return true + } + + switch x := x.(type) { + case nil: + return false + + case *types.Basic: + // Basic types are singletons except for the rune and byte + // aliases, thus we cannot solely rely on the x == y check + // above. See also comment in TypeName.IsAlias. + if y, ok := y.(*types.Basic); ok { + return x.Kind() == y.Kind() + } + + case *types.Array: + // Two array types are identical if they have identical element types + // and the same array length. + if y, ok := y.(*types.Array); ok { + // If one or both array lengths are unknown (< 0) due to some error, + // assume they are the same to avoid spurious follow-on errors. + return (x.Len() < 0 || y.Len() < 0 || x.Len() == y.Len()) && typeIdentical(x.Elem(), y.Elem(), p) + } + + case *types.Slice: + // Two slice types are identical if they have identical element types. + if y, ok := y.(*types.Slice); ok { + return typeIdentical(x.Elem(), y.Elem(), p) + } + + case *types.Struct: + // Two struct types are identical if they have the same sequence of fields, + // and if corresponding fields have the same names, and identical types, + // and identical tags. Two embedded fields are considered to have the same + // name. Lower-case field names from different packages are always different. + if y, ok := y.(*types.Struct); ok { + if x.NumFields() == y.NumFields() { + for i := 0; i < x.NumFields(); i++ { + f := x.Field(i) + g := y.Field(i) + if f.Embedded() != g.Embedded() || !sameID(f, g.Pkg(), g.Name()) || !typeIdentical(f.Type(), g.Type(), p) { + return false + } + } + return true + } + } + + case *types.Pointer: + // Two pointer types are identical if they have identical base types. + if y, ok := y.(*types.Pointer); ok { + return typeIdentical(x.Elem(), y.Elem(), p) + } + + case *types.Tuple: + // Two tuples types are identical if they have the same number of elements + // and corresponding elements have identical types. + if y, ok := y.(*types.Tuple); ok { + if x.Len() == y.Len() { + if x != nil { + for i := 0; i < x.Len(); i++ { + v := x.At(i) + w := y.At(i) + if !typeIdentical(v.Type(), w.Type(), p) { + return false + } + } + } + return true + } + } + + case *types.Signature: + // Two function types are identical if they have the same number of parameters + // and result values, corresponding parameter and result types are identical, + // and either both functions are variadic or neither is. Parameter and result + // names are not required to match. + if y, ok := y.(*types.Signature); ok { + return x.Variadic() == y.Variadic() && + typeIdentical(x.Params(), y.Params(), p) && + typeIdentical(x.Results(), y.Results(), p) + } + + case *types.Interface: + // Two interface types are identical if they have the same set of methods with + // the same names and identical function types. Lower-case method names from + // different packages are always different. The order of the methods is irrelevant. + if y, ok := y.(*types.Interface); ok { + if x.NumMethods() != y.NumMethods() { + return false + } + // Interface types are the only types where cycles can occur + // that are not "terminated" via named types; and such cycles + // can only be created via method parameter types that are + // anonymous interfaces (directly or indirectly) embedding + // the current interface. Example: + // + // type T interface { + // m() interface{T} + // } + // + // If two such (differently named) interfaces are compared, + // endless recursion occurs if the cycle is not detected. + // + // If x and y were compared before, they must be equal + // (if they were not, the recursion would have stopped); + // search the ifacePair stack for the same pair. + // + // This is a quadratic algorithm, but in practice these stacks + // are extremely short (bounded by the nesting depth of interface + // type declarations that recur via parameter types, an extremely + // rare occurrence). An alternative implementation might use a + // "visited" map, but that is probably less efficient overall. + q := &ifacePair{x, y, p} + for p != nil { + if p.identical(q) { + return true // same pair was compared before + } + p = p.prev + } + for i := 0; i < x.NumMethods(); i++ { + f := x.Method(i) + g := y.Method(i) + if f.Id() != g.Id() || !typeIdentical(f.Type(), g.Type(), q) { + return false + } + } + return true + } + + case *types.Map: + // Two map types are identical if they have identical key and value types. + if y, ok := y.(*types.Map); ok { + return typeIdentical(x.Key(), y.Key(), p) && typeIdentical(x.Elem(), y.Elem(), p) + } + + case *types.Chan: + // Two channel types are identical if they have identical value types + // and the same direction. + if y, ok := y.(*types.Chan); ok { + return x.Dir() == y.Dir() && typeIdentical(x.Elem(), y.Elem(), p) + } + + case *types.Named: + // Two named types are identical if their type names originate + // in the same type declaration. + y, ok := y.(*types.Named) + if !ok { + return false + } + if x.Obj() == y.Obj() { + return true + } + return sameID(x.Obj(), y.Obj().Pkg(), y.Obj().Name()) + + default: + panic("unreachable") + } + + return false +} + +// An ifacePair is a node in a stack of interface type pairs compared for identity. +type ifacePair struct { + x *types.Interface + y *types.Interface + prev *ifacePair +} + +func (p *ifacePair) identical(q *ifacePair) bool { + return (p.x == q.x && p.y == q.y) || + (p.x == q.y && p.y == q.x) +} + +func sameID(obj types.Object, pkg *types.Package, name string) bool { + // spec: + // "Two identifiers are different if they are spelled differently, + // or if they appear in different packages and are not exported. + // Otherwise, they are the same." + if name != obj.Name() { + return false + } + // obj.Name == name + if obj.Exported() { + return true + } + // not exported, so packages must be the same (pkg == nil for + // fields in Universe scope; this can only happen for types + // introduced via Eval) + if pkg == nil || obj.Pkg() == nil { + return pkg == obj.Pkg() + } + // pkg != nil && obj.pkg != nil + return pkg.Path() == obj.Pkg().Path() +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/nodetag/nodetag.go b/vendor/github.com/quasilyte/go-ruleguard/nodetag/nodetag.go new file mode 100644 index 000000000..a9098c29f --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/nodetag/nodetag.go @@ -0,0 +1,277 @@ +package nodetag + +import "go/ast" + +type Value int + +const ( + Unknown Value = iota + + ArrayType + AssignStmt + BasicLit + BinaryExpr + BlockStmt + BranchStmt + CallExpr + CaseClause + ChanType + CommClause + CompositeLit + DeclStmt + DeferStmt + Ellipsis + EmptyStmt + ExprStmt + File + ForStmt + FuncDecl + FuncLit + FuncType + GenDecl + GoStmt + Ident + IfStmt + ImportSpec + IncDecStmt + IndexExpr + InterfaceType + KeyValueExpr + LabeledStmt + MapType + ParenExpr + RangeStmt + ReturnStmt + SelectStmt + SelectorExpr + SendStmt + SliceExpr + StarExpr + StructType + SwitchStmt + TypeAssertExpr + TypeSpec + TypeSwitchStmt + UnaryExpr + ValueSpec + + NumBuckets + + StmtList // gogrep stmt list + ExprList // gogrep expr list + + Node // ast.Node + Expr // ast.Expr + Stmt // ast.Stmt +) + +func FromNode(n ast.Node) Value { + switch n.(type) { + case *ast.ArrayType: + return ArrayType + case *ast.AssignStmt: + return AssignStmt + case *ast.BasicLit: + return BasicLit + case *ast.BinaryExpr: + return BinaryExpr + case *ast.BlockStmt: + return BlockStmt + case *ast.BranchStmt: + return BranchStmt + case *ast.CallExpr: + return CallExpr + case *ast.CaseClause: + return CaseClause + case *ast.ChanType: + return ChanType + case *ast.CommClause: + return CommClause + case *ast.CompositeLit: + return CompositeLit + case *ast.DeclStmt: + return DeclStmt + case *ast.DeferStmt: + return DeferStmt + case *ast.Ellipsis: + return Ellipsis + case *ast.EmptyStmt: + return EmptyStmt + case *ast.ExprStmt: + return ExprStmt + case *ast.File: + return File + case *ast.ForStmt: + return ForStmt + case *ast.FuncDecl: + return FuncDecl + case *ast.FuncLit: + return FuncLit + case *ast.FuncType: + return FuncType + case *ast.GenDecl: + return GenDecl + case *ast.GoStmt: + return GoStmt + case *ast.Ident: + return Ident + case *ast.IfStmt: + return IfStmt + case *ast.ImportSpec: + return ImportSpec + case *ast.IncDecStmt: + return IncDecStmt + case *ast.IndexExpr: + return IndexExpr + case *ast.InterfaceType: + return InterfaceType + case *ast.KeyValueExpr: + return KeyValueExpr + case *ast.LabeledStmt: + return LabeledStmt + case *ast.MapType: + return MapType + case *ast.ParenExpr: + return ParenExpr + case *ast.RangeStmt: + return RangeStmt + case *ast.ReturnStmt: + return ReturnStmt + case *ast.SelectStmt: + return SelectStmt + case *ast.SelectorExpr: + return SelectorExpr + case *ast.SendStmt: + return SendStmt + case *ast.SliceExpr: + return SliceExpr + case *ast.StarExpr: + return StarExpr + case *ast.StructType: + return StructType + case *ast.SwitchStmt: + return SwitchStmt + case *ast.TypeAssertExpr: + return TypeAssertExpr + case *ast.TypeSpec: + return TypeSpec + case *ast.TypeSwitchStmt: + return TypeSwitchStmt + case *ast.UnaryExpr: + return UnaryExpr + case *ast.ValueSpec: + return ValueSpec + default: + return Unknown + } +} + +func FromString(s string) Value { + switch s { + case "Expr": + return Expr + case "Stmt": + return Stmt + case "Node": + return Node + } + + switch s { + case "ArrayType": + return ArrayType + case "AssignStmt": + return AssignStmt + case "BasicLit": + return BasicLit + case "BinaryExpr": + return BinaryExpr + case "BlockStmt": + return BlockStmt + case "BranchStmt": + return BranchStmt + case "CallExpr": + return CallExpr + case "CaseClause": + return CaseClause + case "ChanType": + return ChanType + case "CommClause": + return CommClause + case "CompositeLit": + return CompositeLit + case "DeclStmt": + return DeclStmt + case "DeferStmt": + return DeferStmt + case "Ellipsis": + return Ellipsis + case "EmptyStmt": + return EmptyStmt + case "ExprStmt": + return ExprStmt + case "File": + return File + case "ForStmt": + return ForStmt + case "FuncDecl": + return FuncDecl + case "FuncLit": + return FuncLit + case "FuncType": + return FuncType + case "GenDecl": + return GenDecl + case "GoStmt": + return GoStmt + case "Ident": + return Ident + case "IfStmt": + return IfStmt + case "ImportSpec": + return ImportSpec + case "IncDecStmt": + return IncDecStmt + case "IndexExpr": + return IndexExpr + case "InterfaceType": + return InterfaceType + case "KeyValueExpr": + return KeyValueExpr + case "LabeledStmt": + return LabeledStmt + case "MapType": + return MapType + case "ParenExpr": + return ParenExpr + case "RangeStmt": + return RangeStmt + case "ReturnStmt": + return ReturnStmt + case "SelectStmt": + return SelectStmt + case "SelectorExpr": + return SelectorExpr + case "SendStmt": + return SendStmt + case "SliceExpr": + return SliceExpr + case "StarExpr": + return StarExpr + case "StructType": + return StructType + case "SwitchStmt": + return SwitchStmt + case "TypeAssertExpr": + return TypeAssertExpr + case "TypeSpec": + return TypeSpec + case "TypeSwitchStmt": + return TypeSwitchStmt + case "UnaryExpr": + return UnaryExpr + case "ValueSpec": + return ValueSpec + default: + return Unknown + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go new file mode 100644 index 000000000..950e3c410 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/bundle.go @@ -0,0 +1,19 @@ +package ruleguard + +import ( + "path/filepath" + + "github.com/quasilyte/go-ruleguard/internal/golist" +) + +func findBundleFiles(pkgPath string) ([]string, error) { + pkg, err := golist.JSON(pkgPath) + if err != nil { + return nil, err + } + files := make([]string, 0, len(pkg.GoFiles)) + for _, f := range pkg.GoFiles { + files = append(files, filepath.Join(pkg.Dir, f)) + } + return files, nil +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go new file mode 100644 index 000000000..66a4fd58a --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/engine.go @@ -0,0 +1,171 @@ +package ruleguard + +import ( + "errors" + "fmt" + "go/ast" + "go/types" + "io" + "strings" + "sync" + + "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" + "github.com/quasilyte/go-ruleguard/ruleguard/typematch" +) + +type engine struct { + state *engineState + + ruleSet *goRuleSet +} + +func newEngine() *engine { + return &engine{ + state: newEngineState(), + } +} + +func (e *engine) Load(ctx *ParseContext, filename string, r io.Reader) error { + config := rulesParserConfig{ + state: e.state, + ctx: ctx, + importer: newGoImporter(e.state, goImporterConfig{ + fset: ctx.Fset, + debugImports: ctx.DebugImports, + debugPrint: ctx.DebugPrint, + }), + itab: typematch.NewImportsTab(stdlibPackages), + } + p := newRulesParser(config) + rset, err := p.ParseFile(filename, r) + if err != nil { + return err + } + + if e.ruleSet == nil { + e.ruleSet = rset + } else { + combinedRuleSet, err := mergeRuleSets([]*goRuleSet{e.ruleSet, rset}) + if err != nil { + return err + } + e.ruleSet = combinedRuleSet + } + + return nil +} + +func (e *engine) Run(ctx *RunContext, f *ast.File) error { + if e.ruleSet == nil { + return errors.New("used Run() with an empty rule set; forgot to call Load() first?") + } + rset := cloneRuleSet(e.ruleSet) + return newRulesRunner(ctx, e.state, rset).run(f) +} + +// engineState is a shared state inside the engine. +type engineState struct { + env *quasigo.Env + + typeByFQNMu sync.RWMutex + typeByFQN map[string]types.Type + + pkgCacheMu sync.RWMutex + // pkgCache contains all imported packages, from any importer. + pkgCache map[string]*types.Package +} + +func newEngineState() *engineState { + env := quasigo.NewEnv() + state := &engineState{ + env: env, + pkgCache: make(map[string]*types.Package), + typeByFQN: map[string]types.Type{}, + } + for key, typ := range typeByName { + state.typeByFQN[key] = typ + } + initEnv(state, env) + return state +} + +func (state *engineState) GetCachedPackage(pkgPath string) *types.Package { + state.pkgCacheMu.RLock() + pkg := state.pkgCache[pkgPath] + state.pkgCacheMu.RUnlock() + return pkg +} + +func (state *engineState) AddCachedPackage(pkgPath string, pkg *types.Package) { + state.pkgCacheMu.Lock() + state.addCachedPackage(pkgPath, pkg) + state.pkgCacheMu.Unlock() +} + +func (state *engineState) addCachedPackage(pkgPath string, pkg *types.Package) { + state.pkgCache[pkgPath] = pkg + + // Also add all complete packages that are dependencies of the pkg. + // This way we cache more and avoid duplicated package loading + // which can lead to typechecking issues. + // + // Note that it does not increase our memory consumption + // as these packages are reachable via pkg, so they'll + // not be freed by GC anyway. + for _, imported := range pkg.Imports() { + if imported.Complete() { + state.addCachedPackage(imported.Path(), imported) + } + } +} + +func (state *engineState) FindType(importer *goImporter, currentPkg *types.Package, fqn string) (types.Type, error) { + // TODO(quasilyte): we can pre-populate the cache during the Load() phase. + // If we inspect the AST of a user function, all constant FQN can be preloaded. + // It could be a good thing as Load() is not expected to be executed in + // concurrent environment, so write-locking is not a big deal there. + + state.typeByFQNMu.RLock() + cachedType, ok := state.typeByFQN[fqn] + state.typeByFQNMu.RUnlock() + if ok { + return cachedType, nil + } + + // Code below is under a write critical section. + state.typeByFQNMu.Lock() + defer state.typeByFQNMu.Unlock() + + typ, err := state.findTypeNoCache(importer, currentPkg, fqn) + if err != nil { + return nil, err + } + state.typeByFQN[fqn] = typ + return typ, nil +} + +func (state *engineState) findTypeNoCache(importer *goImporter, currentPkg *types.Package, fqn string) (types.Type, error) { + pos := strings.LastIndexByte(fqn, '.') + if pos == -1 { + return nil, fmt.Errorf("%s is not a valid FQN", fqn) + } + pkgPath := fqn[:pos] + objectName := fqn[pos+1:] + var pkg *types.Package + if directDep := findDependency(currentPkg, pkgPath); directDep != nil { + pkg = directDep + } else { + loadedPkg, err := importer.Import(pkgPath) + if err != nil { + return nil, err + } + pkg = loadedPkg + } + obj := pkg.Scope().Lookup(objectName) + if obj == nil { + return nil, fmt.Errorf("%s is not found in %s", objectName, pkgPath) + } + typ := obj.Type() + state.typeByFQN[fqn] = typ + return typ, nil +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go new file mode 100644 index 000000000..4918cbb3c --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/filters.go @@ -0,0 +1,267 @@ +package ruleguard + +import ( + "go/ast" + "go/constant" + "go/token" + "go/types" + "path/filepath" + "regexp" + + "github.com/quasilyte/go-ruleguard/internal/xtypes" + "github.com/quasilyte/go-ruleguard/nodetag" + "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" + "github.com/quasilyte/go-ruleguard/ruleguard/typematch" +) + +const filterSuccess = matchFilterResult("") + +func filterFailure(reason string) matchFilterResult { + return matchFilterResult(reason) +} + +func makeNotFilter(src string, x matchFilter) filterFunc { + return func(params *filterParams) matchFilterResult { + if x.fn(params).Matched() { + return matchFilterResult(src) + } + return "" + } +} + +func makeAndFilter(lhs, rhs matchFilter) filterFunc { + return func(params *filterParams) matchFilterResult { + if lhsResult := lhs.fn(params); !lhsResult.Matched() { + return lhsResult + } + return rhs.fn(params) + } +} + +func makeOrFilter(lhs, rhs matchFilter) filterFunc { + return func(params *filterParams) matchFilterResult { + if lhsResult := lhs.fn(params); lhsResult.Matched() { + return filterSuccess + } + return rhs.fn(params) + } +} + +func makeFileImportsFilter(src, pkgPath string) filterFunc { + return func(params *filterParams) matchFilterResult { + _, imported := params.imports[pkgPath] + if imported { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeFilePkgPathMatchesFilter(src string, re *regexp.Regexp) filterFunc { + return func(params *filterParams) matchFilterResult { + pkgPath := params.ctx.Pkg.Path() + if re.MatchString(pkgPath) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeFileNameMatchesFilter(src string, re *regexp.Regexp) filterFunc { + return func(params *filterParams) matchFilterResult { + if re.MatchString(filepath.Base(params.filename)) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makePureFilter(src, varname string) filterFunc { + return func(params *filterParams) matchFilterResult { + n := params.subExpr(varname) + if isPure(params.ctx.Types, n) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeConstFilter(src, varname string) filterFunc { + return func(params *filterParams) matchFilterResult { + n := params.subExpr(varname) + if isConstant(params.ctx.Types, n) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeAddressableFilter(src, varname string) filterFunc { + return func(params *filterParams) matchFilterResult { + n := params.subExpr(varname) + if isAddressable(params.ctx.Types, n) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeCustomVarFilter(src, varname string, fn *quasigo.Func) filterFunc { + return func(params *filterParams) matchFilterResult { + // TODO(quasilyte): what if bytecode function panics due to the programming error? + // We should probably catch the panic here, print trace and return "false" + // from the filter (or even propagate that panic to let it crash). + params.varname = varname + result := quasigo.Call(params.env, fn, params) + if result.Value().(bool) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeTypeImplementsFilter(src, varname string, iface *types.Interface) filterFunc { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)) + if xtypes.Implements(typ, iface) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeTypeIsFilter(src, varname string, underlying bool, pat *typematch.Pattern) filterFunc { + if underlying { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)).Underlying() + if pat.MatchIdentical(typ) { + return filterSuccess + } + return filterFailure(src) + } + } + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)) + if pat.MatchIdentical(typ) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeTypeConvertibleToFilter(src, varname string, dstType types.Type) filterFunc { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)) + if types.ConvertibleTo(typ, dstType) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeTypeAssignableToFilter(src, varname string, dstType types.Type) filterFunc { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)) + if types.AssignableTo(typ, dstType) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeTypeSizeConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { + return func(params *filterParams) matchFilterResult { + typ := params.typeofNode(params.subExpr(varname)) + lhsValue := constant.MakeInt64(params.ctx.Sizes.Sizeof(typ)) + if constant.Compare(lhsValue, op, rhsValue) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeValueIntConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { + return func(params *filterParams) matchFilterResult { + lhsValue := intValueOf(params.ctx.Types, params.subExpr(varname)) + if lhsValue == nil { + return filterFailure(src) // The value is unknown + } + if constant.Compare(lhsValue, op, rhsValue) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeValueIntFilter(src, varname string, op token.Token, rhsVarname string) filterFunc { + return func(params *filterParams) matchFilterResult { + lhsValue := intValueOf(params.ctx.Types, params.subExpr(varname)) + if lhsValue == nil { + return filterFailure(src) + } + rhsValue := intValueOf(params.ctx.Types, params.subExpr(rhsVarname)) + if rhsValue == nil { + return filterFailure(src) + } + if constant.Compare(lhsValue, op, rhsValue) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeTextConstFilter(src, varname string, op token.Token, rhsValue constant.Value) filterFunc { + return func(params *filterParams) matchFilterResult { + s := params.nodeText(params.subNode(varname)) + lhsValue := constant.MakeString(string(s)) + if constant.Compare(lhsValue, op, rhsValue) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeTextFilter(src, varname string, op token.Token, rhsVarname string) filterFunc { + return func(params *filterParams) matchFilterResult { + s1 := params.nodeText(params.subNode(varname)) + lhsValue := constant.MakeString(string(s1)) + n, _ := params.match.CapturedByName(rhsVarname) + s2 := params.nodeText(n) + rhsValue := constant.MakeString(string(s2)) + if constant.Compare(lhsValue, op, rhsValue) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeTextMatchesFilter(src, varname string, re *regexp.Regexp) filterFunc { + return func(params *filterParams) matchFilterResult { + if re.Match(params.nodeText(params.subNode(varname))) { + return filterSuccess + } + return filterFailure(src) + } +} + +func makeNodeIsFilter(src, varname string, tag nodetag.Value) filterFunc { + // TODO: add comment nodes support? + return func(params *filterParams) matchFilterResult { + n := params.subExpr(varname) + var matched bool + switch tag { + case nodetag.Expr: + _, matched = n.(ast.Expr) + case nodetag.Stmt: + _, matched = n.(ast.Stmt) + case nodetag.Node: + _, matched = n.(ast.Node) + default: + matched = (tag == nodetag.FromNode(n)) + } + if matched { + return filterSuccess + } + return filterFailure(src) + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go new file mode 100644 index 000000000..08aee9132 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/gorule.go @@ -0,0 +1,146 @@ +package ruleguard + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "regexp" + + "github.com/quasilyte/go-ruleguard/internal/gogrep" + "github.com/quasilyte/go-ruleguard/nodetag" + "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" +) + +type goRuleSet struct { + universal *scopedGoRuleSet + + groups map[string]token.Position // To handle redefinitions +} + +type scopedGoRuleSet struct { + categorizedNum int + rulesByTag [nodetag.NumBuckets][]goRule + commentRules []goCommentRule +} + +type goCommentRule struct { + base goRule + pat *regexp.Regexp + captureGroups bool +} + +type goRule struct { + group string + filename string + line int + pat *gogrep.Pattern + msg string + location string + suggestion string + filter matchFilter +} + +type matchFilterResult string + +func (s matchFilterResult) Matched() bool { return s == "" } + +func (s matchFilterResult) RejectReason() string { return string(s) } + +type filterFunc func(*filterParams) matchFilterResult + +type matchFilter struct { + src string + fn func(*filterParams) matchFilterResult +} + +type filterParams struct { + ctx *RunContext + filename string + imports map[string]struct{} + env *quasigo.EvalEnv + + importer *goImporter + + match matchData + + nodeText func(n ast.Node) []byte + + // varname is set only for custom filters before bytecode function is called. + varname string +} + +func (params *filterParams) subNode(name string) ast.Node { + n, _ := params.match.CapturedByName(name) + return n +} + +func (params *filterParams) subExpr(name string) ast.Expr { + n, _ := params.match.CapturedByName(name) + switch n := n.(type) { + case ast.Expr: + return n + case *ast.ExprStmt: + return n.X + default: + return nil + } +} + +func (params *filterParams) typeofNode(n ast.Node) types.Type { + if e, ok := n.(ast.Expr); ok { + if typ := params.ctx.Types.TypeOf(e); typ != nil { + return typ + } + } + + return types.Typ[types.Invalid] +} + +func cloneRuleSet(rset *goRuleSet) *goRuleSet { + out, err := mergeRuleSets([]*goRuleSet{rset}) + if err != nil { + panic(err) // Should never happen + } + return out +} + +func mergeRuleSets(toMerge []*goRuleSet) (*goRuleSet, error) { + out := &goRuleSet{ + universal: &scopedGoRuleSet{}, + groups: make(map[string]token.Position), + } + + for _, x := range toMerge { + out.universal = appendScopedRuleSet(out.universal, x.universal) + for group, pos := range x.groups { + if prevPos, ok := out.groups[group]; ok { + newRef := fmt.Sprintf("%s:%d", pos.Filename, pos.Line) + oldRef := fmt.Sprintf("%s:%d", prevPos.Filename, prevPos.Line) + return nil, fmt.Errorf("%s: redefinition of %s(), previously defined at %s", newRef, group, oldRef) + } + out.groups[group] = pos + } + } + + return out, nil +} + +func appendScopedRuleSet(dst, src *scopedGoRuleSet) *scopedGoRuleSet { + for tag, rules := range src.rulesByTag { + dst.rulesByTag[tag] = append(dst.rulesByTag[tag], cloneRuleSlice(rules)...) + dst.categorizedNum += len(rules) + } + dst.commentRules = append(dst.commentRules, src.commentRules...) + return dst +} + +func cloneRuleSlice(slice []goRule) []goRule { + out := make([]goRule, len(slice)) + for i, rule := range slice { + clone := rule + clone.pat = rule.pat.Clone() + out[i] = clone + } + return out +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go new file mode 100644 index 000000000..6cc4d9056 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/goutil.go @@ -0,0 +1,21 @@ +package goutil + +import ( + "go/ast" + "go/printer" + "go/token" + "strings" +) + +// SprintNode returns the textual representation of n. +// If fset is nil, freshly created file set will be used. +func SprintNode(fset *token.FileSet, n ast.Node) string { + if fset == nil { + fset = token.NewFileSet() + } + var buf strings.Builder + if err := printer.Fprint(&buf, fset, n); err != nil { + return "" + } + return buf.String() +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go new file mode 100644 index 000000000..8705707ac --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/goutil/resolve.go @@ -0,0 +1,33 @@ +package goutil + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/ast/astutil" +) + +func ResolveFunc(info *types.Info, callable ast.Expr) (ast.Expr, *types.Func) { + switch callable := astutil.Unparen(callable).(type) { + case *ast.Ident: + sig, ok := info.ObjectOf(callable).(*types.Func) + if !ok { + return nil, nil + } + return nil, sig + + case *ast.SelectorExpr: + sig, ok := info.ObjectOf(callable.Sel).(*types.Func) + if !ok { + return nil, nil + } + isMethod := sig.Type().(*types.Signature).Recv() != nil + if _, ok := callable.X.(*ast.Ident); ok && !isMethod { + return nil, sig + } + return callable.X, sig + + default: + return nil, nil + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go new file mode 100644 index 000000000..06a0bbf9f --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/importer.go @@ -0,0 +1,116 @@ +package ruleguard + +import ( + "fmt" + "go/ast" + "go/importer" + "go/parser" + "go/token" + "go/types" + "path/filepath" + "runtime" + + "github.com/quasilyte/go-ruleguard/internal/golist" +) + +// goImporter is a `types.Importer` that tries to load a package no matter what. +// It iterates through multiple import strategies and accepts whatever succeeds first. +type goImporter struct { + // TODO(quasilyte): share importers with gogrep? + + state *engineState + + defaultImporter types.Importer + srcImporter types.Importer + + fset *token.FileSet + + debugImports bool + debugPrint func(string) +} + +type goImporterConfig struct { + fset *token.FileSet + debugImports bool + debugPrint func(string) +} + +func newGoImporter(state *engineState, config goImporterConfig) *goImporter { + return &goImporter{ + state: state, + fset: config.fset, + debugImports: config.debugImports, + debugPrint: config.debugPrint, + defaultImporter: importer.Default(), + srcImporter: importer.ForCompiler(config.fset, "source", nil), + } +} + +func (imp *goImporter) Import(path string) (*types.Package, error) { + if pkg := imp.state.GetCachedPackage(path); pkg != nil { + if imp.debugImports { + imp.debugPrint(fmt.Sprintf(`imported "%s" from importer cache`, path)) + } + return pkg, nil + } + + pkg, err1 := imp.srcImporter.Import(path) + if err1 == nil { + imp.state.AddCachedPackage(path, pkg) + if imp.debugImports { + imp.debugPrint(fmt.Sprintf(`imported "%s" from source importer`, path)) + } + return pkg, nil + } + + pkg, err2 := imp.defaultImporter.Import(path) + if err2 == nil { + imp.state.AddCachedPackage(path, pkg) + if imp.debugImports { + imp.debugPrint(fmt.Sprintf(`imported "%s" from %s importer`, path, runtime.Compiler)) + } + return pkg, nil + } + + // Fallback to `go list` as a last resort. + pkg, err3 := imp.golistImport(path) + if err3 == nil { + imp.state.AddCachedPackage(path, pkg) + if imp.debugImports { + imp.debugPrint(fmt.Sprintf(`imported "%s" from golist importer`, path)) + } + return pkg, nil + } + + if imp.debugImports { + imp.debugPrint(fmt.Sprintf(`failed to import "%s":`, path)) + imp.debugPrint(fmt.Sprintf(" source importer: %v", err1)) + imp.debugPrint(fmt.Sprintf(" %s importer: %v", runtime.Compiler, err2)) + imp.debugPrint(fmt.Sprintf(" golist importer: %v", err3)) + } + + return nil, err2 +} + +func (imp *goImporter) golistImport(path string) (*types.Package, error) { + golistPkg, err := golist.JSON(path) + if err != nil { + return nil, err + } + + files := make([]*ast.File, 0, len(golistPkg.GoFiles)) + for _, filename := range golistPkg.GoFiles { + fullname := filepath.Join(golistPkg.Dir, filename) + f, err := parser.ParseFile(imp.fset, fullname, nil, 0) + if err != nil { + return nil, err + } + files = append(files, f) + } + + // TODO: do we want to assign imp as importer for this nested typecherker? + // Otherwise it won't be able to resolve imports. + var typecheker types.Config + var info types.Info + return typecheker.Check(path, imp.fset, files, &info) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go new file mode 100644 index 000000000..6202b7b8a --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/libdsl.go @@ -0,0 +1,276 @@ +package ruleguard + +import ( + "go/types" + + "github.com/quasilyte/go-ruleguard/internal/xtypes" + "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" +) + +// This file implements `dsl/*` packages as native functions in quasigo. +// +// Every function and method defined in any `dsl/*` package should have +// associated Go function that implements it. +// +// In quasigo, it's impossible to have a pointer to an interface and +// non-pointer struct type. All interface type methods have FQN without `*` prefix +// while all struct type methods always begin with `*`. +// +// Fields are readonly. +// Field access is compiled into a method call that have a name identical to the field. +// For example, `foo.Bar` field access will be compiled as `foo.Bar()`. +// This may change in the future; benchmarks are needed to figure out +// what is more efficient: reflect-based field access or a function call. +// +// To keep this code organized, every type and package functions are represented +// as structs with methods. Then we bind a method value to quasigo symbol. +// The naming scheme is `dsl{$name}Package` for packages and `dsl{$pkg}{$name}` for types. + +func initEnv(state *engineState, env *quasigo.Env) { + nativeTypes := map[string]quasigoNative{ + `*github.com/quasilyte/go-ruleguard/dsl.VarFilterContext`: dslVarFilterContext{state: state}, + `github.com/quasilyte/go-ruleguard/dsl/types.Type`: dslTypesType{}, + `*github.com/quasilyte/go-ruleguard/dsl/types.Interface`: dslTypesInterface{}, + `*github.com/quasilyte/go-ruleguard/dsl/types.Pointer`: dslTypesPointer{}, + `*github.com/quasilyte/go-ruleguard/dsl/types.Array`: dslTypesArray{}, + `*github.com/quasilyte/go-ruleguard/dsl/types.Slice`: dslTypesSlice{}, + } + + for qualifier, typ := range nativeTypes { + for methodName, fn := range typ.funcs() { + env.AddNativeMethod(qualifier, methodName, fn) + } + } + + nativePackages := map[string]quasigoNative{ + `github.com/quasilyte/go-ruleguard/dsl/types`: dslTypesPackage{}, + } + + for qualifier, pkg := range nativePackages { + for funcName, fn := range pkg.funcs() { + env.AddNativeMethod(qualifier, funcName, fn) + } + } +} + +type quasigoNative interface { + funcs() map[string]func(*quasigo.ValueStack) +} + +type dslTypesType struct{} + +func (native dslTypesType) funcs() map[string]func(*quasigo.ValueStack) { + return map[string]func(*quasigo.ValueStack){ + "Underlying": native.Underlying, + "String": native.String, + } +} + +func (dslTypesType) Underlying(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(types.Type).Underlying()) +} + +func (dslTypesType) String(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(types.Type).String()) +} + +type dslTypesInterface struct{} + +func (native dslTypesInterface) funcs() map[string]func(*quasigo.ValueStack) { + return map[string]func(*quasigo.ValueStack){ + "Underlying": native.Underlying, + "String": native.String, + } +} + +func (dslTypesInterface) Underlying(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Interface).Underlying()) +} + +func (dslTypesInterface) String(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Interface).String()) +} + +type dslTypesSlice struct{} + +func (native dslTypesSlice) funcs() map[string]func(*quasigo.ValueStack) { + return map[string]func(*quasigo.ValueStack){ + "Underlying": native.Underlying, + "String": native.String, + "Elem": native.Elem, + } +} + +func (dslTypesSlice) Underlying(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Slice).Underlying()) +} + +func (dslTypesSlice) String(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Slice).String()) +} + +func (dslTypesSlice) Elem(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Slice).Elem()) +} + +type dslTypesArray struct{} + +func (native dslTypesArray) funcs() map[string]func(*quasigo.ValueStack) { + return map[string]func(*quasigo.ValueStack){ + "Underlying": native.Underlying, + "String": native.String, + "Elem": native.Elem, + "Len": native.Len, + } +} + +func (dslTypesArray) Underlying(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Array).Underlying()) +} + +func (dslTypesArray) String(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Array).String()) +} + +func (dslTypesArray) Elem(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Array).Elem()) +} + +func (dslTypesArray) Len(stack *quasigo.ValueStack) { + stack.PushInt(int(stack.Pop().(*types.Array).Len())) +} + +type dslTypesPointer struct{} + +func (native dslTypesPointer) funcs() map[string]func(*quasigo.ValueStack) { + return map[string]func(*quasigo.ValueStack){ + "Underlying": native.Underlying, + "String": native.String, + "Elem": native.Elem, + } +} + +func (dslTypesPointer) Underlying(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Pointer).Underlying()) +} + +func (dslTypesPointer) String(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Pointer).String()) +} + +func (dslTypesPointer) Elem(stack *quasigo.ValueStack) { + stack.Push(stack.Pop().(*types.Pointer).Elem()) +} + +type dslTypesPackage struct{} + +func (native dslTypesPackage) funcs() map[string]func(*quasigo.ValueStack) { + return map[string]func(*quasigo.ValueStack){ + "Implements": native.Implements, + "Identical": native.Identical, + "NewArray": native.NewArray, + "NewSlice": native.NewSlice, + "NewPointer": native.NewPointer, + "AsArray": native.AsArray, + "AsSlice": native.AsSlice, + "AsPointer": native.AsPointer, + "AsInterface": native.AsInterface, + } +} + +func (dslTypesPackage) Implements(stack *quasigo.ValueStack) { + iface := stack.Pop().(*types.Interface) + typ := stack.Pop().(types.Type) + stack.Push(xtypes.Implements(typ, iface)) +} + +func (dslTypesPackage) Identical(stack *quasigo.ValueStack) { + y := stack.Pop().(types.Type) + x := stack.Pop().(types.Type) + stack.Push(xtypes.Identical(x, y)) +} + +func (dslTypesPackage) NewArray(stack *quasigo.ValueStack) { + length := stack.PopInt() + typ := stack.Pop().(types.Type) + stack.Push(types.NewArray(typ, int64(length))) +} + +func (dslTypesPackage) NewSlice(stack *quasigo.ValueStack) { + typ := stack.Pop().(types.Type) + stack.Push(types.NewSlice(typ)) +} + +func (dslTypesPackage) NewPointer(stack *quasigo.ValueStack) { + typ := stack.Pop().(types.Type) + stack.Push(types.NewPointer(typ)) +} + +func (dslTypesPackage) AsArray(stack *quasigo.ValueStack) { + typ, _ := stack.Pop().(types.Type).(*types.Array) + stack.Push(typ) +} + +func (dslTypesPackage) AsSlice(stack *quasigo.ValueStack) { + typ, _ := stack.Pop().(types.Type).(*types.Slice) + stack.Push(typ) +} + +func (dslTypesPackage) AsPointer(stack *quasigo.ValueStack) { + typ, _ := stack.Pop().(types.Type).(*types.Pointer) + stack.Push(typ) +} + +func (dslTypesPackage) AsInterface(stack *quasigo.ValueStack) { + typ, _ := stack.Pop().(types.Type).(*types.Interface) + stack.Push(typ) +} + +type dslVarFilterContext struct { + state *engineState +} + +func (native dslVarFilterContext) funcs() map[string]func(*quasigo.ValueStack) { + return map[string]func(*quasigo.ValueStack){ + "Type": native.Type, + "SizeOf": native.SizeOf, + "GetType": native.GetType, + "GetInterface": native.GetInterface, + } +} + +func (dslVarFilterContext) Type(stack *quasigo.ValueStack) { + params := stack.Pop().(*filterParams) + typ := params.typeofNode(params.subExpr(params.varname)) + stack.Push(typ) +} + +func (native dslVarFilterContext) SizeOf(stack *quasigo.ValueStack) { + typ := stack.Pop().(types.Type) + params := stack.Pop().(*filterParams) + stack.PushInt(int(params.ctx.Sizes.Sizeof(typ))) +} + +func (native dslVarFilterContext) GetType(stack *quasigo.ValueStack) { + fqn := stack.Pop().(string) + params := stack.Pop().(*filterParams) + typ, err := native.state.FindType(params.importer, params.ctx.Pkg, fqn) + if err != nil { + panic(err) + } + stack.Push(typ) +} + +func (native dslVarFilterContext) GetInterface(stack *quasigo.ValueStack) { + fqn := stack.Pop().(string) + params := stack.Pop().(*filterParams) + typ, err := native.state.FindType(params.importer, params.ctx.Pkg, fqn) + if err != nil { + panic(err) + } + if ifaceType, ok := typ.Underlying().(*types.Interface); ok { + stack.Push(ifaceType) + return + } + stack.Push((*types.Interface)(nil)) // Not found or not an interface +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go new file mode 100644 index 000000000..c9d64aff7 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/match_data.go @@ -0,0 +1,46 @@ +package ruleguard + +import ( + "go/ast" + + "github.com/quasilyte/go-ruleguard/internal/gogrep" +) + +// matchData is used to handle both regexp and AST match sets in the same way. +type matchData interface { + // TODO: don't use gogrep.CapturedNode type here. + + Node() ast.Node + CaptureList() []gogrep.CapturedNode + CapturedByName(name string) (ast.Node, bool) +} + +type commentMatchData struct { + node ast.Node + capture []gogrep.CapturedNode +} + +func (m commentMatchData) Node() ast.Node { return m.node } + +func (m commentMatchData) CaptureList() []gogrep.CapturedNode { return m.capture } + +func (m commentMatchData) CapturedByName(name string) (ast.Node, bool) { + for _, c := range m.capture { + if c.Name == name { + return c.Node, true + } + } + return nil, false +} + +type astMatchData struct { + match gogrep.MatchData +} + +func (m astMatchData) Node() ast.Node { return m.match.Node } + +func (m astMatchData) CaptureList() []gogrep.CapturedNode { return m.match.Capture } + +func (m astMatchData) CapturedByName(name string) (ast.Node, bool) { + return m.match.CapturedByName(name) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go new file mode 100644 index 000000000..94826d497 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/parser.go @@ -0,0 +1,988 @@ +package ruleguard + +import ( + "bytes" + "errors" + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + "io" + "io/ioutil" + "path" + "regexp" + "strconv" + + "github.com/quasilyte/go-ruleguard/internal/gogrep" + "github.com/quasilyte/go-ruleguard/nodetag" + "github.com/quasilyte/go-ruleguard/ruleguard/goutil" + "github.com/quasilyte/go-ruleguard/ruleguard/quasigo" + "github.com/quasilyte/go-ruleguard/ruleguard/typematch" +) + +// TODO(quasilyte): use source code byte slicing instead of SprintNode? + +type parseError struct{ error } + +// ImportError is returned when a ruleguard file references a package that cannot be imported. +type ImportError struct { + msg string + err error +} + +func (e *ImportError) Error() string { return e.msg } +func (e *ImportError) Unwrap() error { return e.err } + +type rulesParser struct { + state *engineState + ctx *ParseContext + + prefix string // For imported packages, a prefix that is added to a rule group name + importedPkg string // Package path; only for imported packages + + filename string + group string + res *goRuleSet + pkg *types.Package + types *types.Info + + importer *goImporter + + itab *typematch.ImportsTab + + imported []*goRuleSet + + dslPkgname string // The local name of the "ruleguard/dsl" package (usually its just "dsl") +} + +type rulesParserConfig struct { + state *engineState + + ctx *ParseContext + + importer *goImporter + + prefix string + importedPkg string + + itab *typematch.ImportsTab +} + +func newRulesParser(config rulesParserConfig) *rulesParser { + return &rulesParser{ + state: config.state, + ctx: config.ctx, + importer: config.importer, + prefix: config.prefix, + importedPkg: config.importedPkg, + itab: config.itab, + } +} + +func (p *rulesParser) ParseFile(filename string, r io.Reader) (*goRuleSet, error) { + p.dslPkgname = "dsl" + p.filename = filename + p.res = &goRuleSet{ + universal: &scopedGoRuleSet{}, + groups: make(map[string]token.Position), + } + + parserFlags := parser.Mode(0) + f, err := parser.ParseFile(p.ctx.Fset, filename, r, parserFlags) + if err != nil { + return nil, fmt.Errorf("parse file error: %w", err) + } + + for _, imp := range f.Imports { + importPath, err := strconv.Unquote(imp.Path.Value) + if err != nil { + return nil, p.errorf(imp, fmt.Errorf("unquote %s import path: %w", imp.Path.Value, err)) + } + if importPath == "github.com/quasilyte/go-ruleguard/dsl" { + if imp.Name != nil { + p.dslPkgname = imp.Name.Name + } + } + } + + if f.Name.Name != "gorules" { + return nil, fmt.Errorf("expected a gorules package name, found %s", f.Name.Name) + } + + typechecker := types.Config{Importer: p.importer} + p.types = &types.Info{ + Types: map[ast.Expr]types.TypeAndValue{}, + Uses: map[*ast.Ident]types.Object{}, + Defs: map[*ast.Ident]types.Object{}, + } + pkg, err := typechecker.Check("gorules", p.ctx.Fset, []*ast.File{f}, p.types) + if err != nil { + return nil, fmt.Errorf("typechecker error: %w", err) + } + p.pkg = pkg + + var matcherFuncs []*ast.FuncDecl + var userFuncs []*ast.FuncDecl + for _, decl := range f.Decls { + decl, ok := decl.(*ast.FuncDecl) + if !ok { + continue + } + if decl.Name.String() == "init" { + if err := p.parseInitFunc(decl); err != nil { + return nil, err + } + continue + } + + if p.isMatcherFunc(decl) { + matcherFuncs = append(matcherFuncs, decl) + } else { + userFuncs = append(userFuncs, decl) + } + } + + for _, decl := range userFuncs { + if err := p.parseUserFunc(decl); err != nil { + return nil, err + } + } + for _, decl := range matcherFuncs { + if err := p.parseRuleGroup(decl); err != nil { + return nil, err + } + } + + if len(p.imported) != 0 { + toMerge := []*goRuleSet{p.res} + toMerge = append(toMerge, p.imported...) + merged, err := mergeRuleSets(toMerge) + if err != nil { + return nil, err + } + p.res = merged + } + + return p.res, nil +} + +func (p *rulesParser) parseUserFunc(f *ast.FuncDecl) error { + ctx := &quasigo.CompileContext{ + Env: p.state.env, + Types: p.types, + Fset: p.ctx.Fset, + } + compiled, err := quasigo.Compile(ctx, f) + if err != nil { + return err + } + if p.ctx.DebugFilter == f.Name.String() { + p.ctx.DebugPrint(quasigo.Disasm(p.state.env, compiled)) + } + ctx.Env.AddFunc(p.pkg.Path(), f.Name.String(), compiled) + return nil +} + +func (p *rulesParser) parseInitFunc(f *ast.FuncDecl) error { + type bundleImport struct { + node ast.Node + prefix string + pkgPath string + } + + var imported []bundleImport + + for _, stmt := range f.Body.List { + exprStmt, ok := stmt.(*ast.ExprStmt) + if !ok { + return p.errorf(stmt, errors.New("unsupported statement")) + } + call, ok := exprStmt.X.(*ast.CallExpr) + if !ok { + return p.errorf(stmt, errors.New("unsupported expr")) + } + fn, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return p.errorf(stmt, errors.New("unsupported call")) + } + pkg, ok := fn.X.(*ast.Ident) + if !ok || pkg.Name != p.dslPkgname { + return p.errorf(stmt, errors.New("unsupported call")) + } + + switch fn.Sel.Name { + case "ImportRules": + if p.importedPkg != "" { + return p.errorf(call, errors.New("imports from imported packages are not supported yet")) + } + prefix := p.parseStringArg(call.Args[0]) + bundleSelector, ok := call.Args[1].(*ast.SelectorExpr) + if !ok { + return p.errorf(call.Args[1], errors.New("expected a `pkgname.Bundle` argument")) + } + bundleObj := p.types.ObjectOf(bundleSelector.Sel) + imported = append(imported, bundleImport{ + node: stmt, + prefix: prefix, + pkgPath: bundleObj.Pkg().Path(), + }) + + default: + return p.errorf(stmt, fmt.Errorf("unsupported %s call", fn.Sel.Name)) + } + } + + for _, imp := range imported { + files, err := findBundleFiles(imp.pkgPath) + if err != nil { + return p.errorf(imp.node, fmt.Errorf("import lookup error: %w", err)) + } + for _, filename := range files { + rset, err := p.importRules(imp.prefix, imp.pkgPath, filename) + if err != nil { + return p.errorf(imp.node, fmt.Errorf("import parsing error: %w", err)) + } + p.imported = append(p.imported, rset) + } + } + + return nil +} + +func (p *rulesParser) importRules(prefix, pkgPath, filename string) (*goRuleSet, error) { + data, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + config := rulesParserConfig{ + state: p.state, + ctx: p.ctx, + importer: p.importer, + prefix: prefix, + importedPkg: pkgPath, + itab: p.itab, + } + rset, err := newRulesParser(config).ParseFile(filename, bytes.NewReader(data)) + if err != nil { + return nil, fmt.Errorf("%s: %w", p.importedPkg, err) + } + return rset, nil +} + +func (p *rulesParser) isMatcherFunc(f *ast.FuncDecl) bool { + typ := p.types.ObjectOf(f.Name).Type().(*types.Signature) + return typ.Results().Len() == 0 && + typ.Params().Len() == 1 && + typ.Params().At(0).Type().String() == "github.com/quasilyte/go-ruleguard/dsl.Matcher" +} + +func (p *rulesParser) parseRuleGroup(f *ast.FuncDecl) (err error) { + defer func() { + if err != nil { + return + } + rv := recover() + if rv == nil { + return + } + if parseErr, ok := rv.(parseError); ok { + err = parseErr.error + return + } + panic(rv) // not our panic + }() + + if f.Name.String() == "_" { + return p.errorf(f.Name, errors.New("`_` is not a valid rule group function name")) + } + if f.Body == nil { + return p.errorf(f, errors.New("unexpected empty function body")) + } + params := f.Type.Params.List + matcher := params[0].Names[0].Name + + p.group = f.Name.Name + if p.prefix != "" { + p.group = p.prefix + "/" + f.Name.Name + } + + if p.ctx.GroupFilter != nil && !p.ctx.GroupFilter(p.group) { + return nil // Skip this group + } + if _, ok := p.res.groups[p.group]; ok { + panic(fmt.Sprintf("duplicated function %s after the typecheck", p.group)) // Should never happen + } + p.res.groups[p.group] = token.Position{ + Filename: p.filename, + Line: p.ctx.Fset.Position(f.Name.Pos()).Line, + } + + p.itab.EnterScope() + defer p.itab.LeaveScope() + + for _, stmt := range f.Body.List { + if _, ok := stmt.(*ast.DeclStmt); ok { + continue + } + stmtExpr, ok := stmt.(*ast.ExprStmt) + if !ok { + return p.errorf(stmt, fmt.Errorf("expected a %s method call, found %s", matcher, goutil.SprintNode(p.ctx.Fset, stmt))) + } + call, ok := stmtExpr.X.(*ast.CallExpr) + if !ok { + return p.errorf(stmt, fmt.Errorf("expected a %s method call, found %s", matcher, goutil.SprintNode(p.ctx.Fset, stmt))) + } + if err := p.parseCall(matcher, call); err != nil { + return err + } + + } + + return nil +} + +func (p *rulesParser) parseCall(matcher string, call *ast.CallExpr) error { + f := call.Fun.(*ast.SelectorExpr) + x, ok := f.X.(*ast.Ident) + if ok && x.Name == matcher { + return p.parseStmt(f.Sel, call.Args) + } + + return p.parseRule(matcher, call) +} + +func (p *rulesParser) parseStmt(fn *ast.Ident, args []ast.Expr) error { + switch fn.Name { + case "Import": + pkgPath, ok := p.toStringValue(args[0]) + if !ok { + return p.errorf(args[0], errors.New("expected a string literal argument")) + } + pkgName := path.Base(pkgPath) + p.itab.Load(pkgName, pkgPath) + return nil + default: + return p.errorf(fn, fmt.Errorf("unexpected %s method", fn.Name)) + } +} + +func (p *rulesParser) parseRule(matcher string, call *ast.CallExpr) error { + origCall := call + var ( + matchArgs *[]ast.Expr + matchCommentArgs *[]ast.Expr + whereArgs *[]ast.Expr + suggestArgs *[]ast.Expr + reportArgs *[]ast.Expr + atArgs *[]ast.Expr + ) + for { + chain, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + break + } + switch chain.Sel.Name { + case "Match": + if matchArgs != nil { + return p.errorf(chain.Sel, errors.New("Match() can't be repeated")) + } + if matchCommentArgs != nil { + return p.errorf(chain.Sel, errors.New("Match() and MatchComment() can't be combined")) + } + matchArgs = &call.Args + case "MatchComment": + if matchCommentArgs != nil { + return p.errorf(chain.Sel, errors.New("MatchComment() can't be repeated")) + } + if matchArgs != nil { + return p.errorf(chain.Sel, errors.New("Match() and MatchComment() can't be combined")) + } + matchCommentArgs = &call.Args + case "Where": + if whereArgs != nil { + return p.errorf(chain.Sel, errors.New("Where() can't be repeated")) + } + whereArgs = &call.Args + case "Suggest": + if suggestArgs != nil { + return p.errorf(chain.Sel, errors.New("Suggest() can't be repeated")) + } + suggestArgs = &call.Args + case "Report": + if reportArgs != nil { + return p.errorf(chain.Sel, errors.New("Report() can't be repeated")) + } + reportArgs = &call.Args + case "At": + if atArgs != nil { + return p.errorf(chain.Sel, errors.New("At() can't be repeated")) + } + atArgs = &call.Args + default: + return p.errorf(chain.Sel, fmt.Errorf("unexpected %s method", chain.Sel.Name)) + } + call, ok = chain.X.(*ast.CallExpr) + if !ok { + break + } + } + + proto := goRule{ + filename: p.filename, + line: p.ctx.Fset.Position(origCall.Pos()).Line, + group: p.group, + } + + // AST patterns for Match() or regexp patterns for MatchComment(). + var alternatives []string + + if matchArgs == nil && matchCommentArgs == nil { + return p.errorf(origCall, errors.New("missing Match() or MatchComment() call")) + } + + if matchArgs != nil { + for _, arg := range *matchArgs { + alternatives = append(alternatives, p.parseStringArg(arg)) + } + } else { + for _, arg := range *matchCommentArgs { + alternatives = append(alternatives, p.parseStringArg(arg)) + } + } + + if whereArgs != nil { + proto.filter = p.parseFilter((*whereArgs)[0]) + } + + if suggestArgs != nil { + proto.suggestion = p.parseStringArg((*suggestArgs)[0]) + } + + if reportArgs == nil { + if suggestArgs == nil { + return p.errorf(origCall, errors.New("missing Report() or Suggest() call")) + } + proto.msg = "suggestion: " + proto.suggestion + } else { + proto.msg = p.parseStringArg((*reportArgs)[0]) + } + + if atArgs != nil { + index, ok := (*atArgs)[0].(*ast.IndexExpr) + if !ok { + return p.errorf((*atArgs)[0], fmt.Errorf("expected %s[`varname`] expression", matcher)) + } + arg, ok := p.toStringValue(index.Index) + if !ok { + return p.errorf(index.Index, errors.New("expected a string literal index")) + } + proto.location = arg + } + + if matchArgs != nil { + return p.loadGogrepRules(proto, *matchArgs, alternatives) + } + return p.loadCommentRules(proto, *matchCommentArgs, alternatives) +} + +func (p *rulesParser) loadCommentRules(proto goRule, matchArgs []ast.Expr, alternatives []string) error { + dst := p.res.universal + for i, alt := range alternatives { + pat, err := regexp.Compile(alt) + if err != nil { + return p.errorf(matchArgs[i], fmt.Errorf("parse match comment pattern: %w", err)) + } + rule := goCommentRule{ + base: proto, + pat: pat, + captureGroups: regexpHasCaptureGroups(alt), + } + dst.commentRules = append(dst.commentRules, rule) + } + + return nil +} + +func (p *rulesParser) loadGogrepRules(proto goRule, matchArgs []ast.Expr, alternatives []string) error { + dst := p.res.universal + for i, alt := range alternatives { + rule := proto + pat, err := gogrep.Compile(p.ctx.Fset, alt, false) + if err != nil { + return p.errorf(matchArgs[i], fmt.Errorf("parse match pattern: %w", err)) + } + rule.pat = pat + var dstTags []nodetag.Value + switch tag := pat.NodeTag(); tag { + case nodetag.Unknown: + return p.errorf(matchArgs[i], fmt.Errorf("can't infer a tag of %s", alt)) + case nodetag.Node: + // TODO: add to every bucket? + return p.errorf(matchArgs[i], fmt.Errorf("%s is too general", alt)) + case nodetag.StmtList: + dstTags = []nodetag.Value{ + nodetag.BlockStmt, + nodetag.CaseClause, + nodetag.CommClause, + } + case nodetag.ExprList: + dstTags = []nodetag.Value{ + nodetag.CallExpr, + nodetag.CompositeLit, + nodetag.ReturnStmt, + } + default: + dstTags = []nodetag.Value{tag} + } + for _, tag := range dstTags { + dst.rulesByTag[tag] = append(dst.rulesByTag[tag], rule) + } + dst.categorizedNum++ + } + + return nil +} + +func (p *rulesParser) parseFilter(root ast.Expr) matchFilter { + return p.parseFilterExpr(root) +} + +func (p *rulesParser) errorf(n ast.Node, err error) parseError { + loc := p.ctx.Fset.Position(n.Pos()) + return parseError{fmt.Errorf("%s:%d: %w", loc.Filename, loc.Line, err)} +} + +func (p *rulesParser) parseStringArg(e ast.Expr) string { + s, ok := p.toStringValue(e) + if !ok { + panic(p.errorf(e, errors.New("expected a string literal argument"))) + } + return s +} + +func (p *rulesParser) parseRegexpArg(e ast.Expr) *regexp.Regexp { + patternString, ok := p.toStringValue(e) + if !ok { + panic(p.errorf(e, errors.New("expected a regexp pattern argument"))) + } + re, err := regexp.Compile(patternString) + if err != nil { + panic(p.errorf(e, err)) + } + return re +} + +func (p *rulesParser) parseTypeStringArg(e ast.Expr) types.Type { + typeString, ok := p.toStringValue(e) + if !ok { + panic(p.errorf(e, errors.New("expected a type string argument"))) + } + typ, err := typeFromString(typeString) + if err != nil { + panic(p.errorf(e, fmt.Errorf("parse type expr: %w", err))) + } + if typ == nil { + panic(p.errorf(e, fmt.Errorf("can't convert %s into a type constraint yet", typeString))) + } + return typ +} + +func (p *rulesParser) parseFilterExpr(e ast.Expr) matchFilter { + result := matchFilter{src: goutil.SprintNode(p.ctx.Fset, e)} + + switch e := e.(type) { + case *ast.ParenExpr: + return p.parseFilterExpr(e.X) + + case *ast.UnaryExpr: + x := p.parseFilterExpr(e.X) + if e.Op == token.NOT { + result.fn = makeNotFilter(result.src, x) + return result + } + panic(p.errorf(e, fmt.Errorf("unsupported unary op: %s", result.src))) + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND: + result.fn = makeAndFilter(p.parseFilterExpr(e.X), p.parseFilterExpr(e.Y)) + return result + case token.LOR: + result.fn = makeOrFilter(p.parseFilterExpr(e.X), p.parseFilterExpr(e.Y)) + return result + case token.GEQ, token.LEQ, token.LSS, token.GTR, token.EQL, token.NEQ: + operand := p.toFilterOperand(e.X) + rhs := p.toFilterOperand(e.Y) + rhsValue := p.types.Types[e.Y].Value + if operand.path == "Type.Size" && rhsValue != nil { + result.fn = makeTypeSizeConstFilter(result.src, operand.varName, e.Op, rhsValue) + return result + } + if operand.path == "Value.Int" && rhsValue != nil { + result.fn = makeValueIntConstFilter(result.src, operand.varName, e.Op, rhsValue) + return result + } + if operand.path == "Value.Int" && rhs.path == "Value.Int" && rhs.varName != "" { + result.fn = makeValueIntFilter(result.src, operand.varName, e.Op, rhs.varName) + return result + } + if operand.path == "Text" && rhsValue != nil { + result.fn = makeTextConstFilter(result.src, operand.varName, e.Op, rhsValue) + return result + } + if operand.path == "Text" && rhs.path == "Text" && rhs.varName != "" { + result.fn = makeTextFilter(result.src, operand.varName, e.Op, rhs.varName) + return result + } + } + panic(p.errorf(e, fmt.Errorf("unsupported binary op: %s", result.src))) + } + + operand := p.toFilterOperand(e) + args := operand.args + switch operand.path { + default: + panic(p.errorf(e, fmt.Errorf("unsupported expr: %s", result.src))) + + case "File.Imports": + pkgPath := p.parseStringArg(args[0]) + result.fn = makeFileImportsFilter(result.src, pkgPath) + + case "File.PkgPath.Matches": + re := p.parseRegexpArg(args[0]) + result.fn = makeFilePkgPathMatchesFilter(result.src, re) + + case "File.Name.Matches": + re := p.parseRegexpArg(args[0]) + result.fn = makeFileNameMatchesFilter(result.src, re) + + case "Pure": + result.fn = makePureFilter(result.src, operand.varName) + + case "Const": + result.fn = makeConstFilter(result.src, operand.varName) + + case "Addressable": + result.fn = makeAddressableFilter(result.src, operand.varName) + + case "Filter": + expr, fn := goutil.ResolveFunc(p.types, args[0]) + if expr != nil { + panic(p.errorf(expr, errors.New("expected a simple function name, found expression"))) + } + sig := fn.Type().(*types.Signature) + userFn := p.state.env.GetFunc(fn.Pkg().Path(), fn.Name()) + if userFn == nil { + panic(p.errorf(args[0], fmt.Errorf("can't find a compiled version of %s", sig.String()))) + } + result.fn = makeCustomVarFilter(result.src, operand.varName, userFn) + + case "Type.Is", "Type.Underlying.Is": + // TODO(quasilyte): add FQN support? + typeString, ok := p.toStringValue(args[0]) + if !ok { + panic(p.errorf(args[0], errors.New("expected a string literal argument"))) + } + ctx := typematch.Context{Itab: p.itab} + pat, err := typematch.Parse(&ctx, typeString) + if err != nil { + panic(p.errorf(args[0], fmt.Errorf("parse type expr: %w", err))) + } + underlying := operand.path == "Type.Underlying.Is" + result.fn = makeTypeIsFilter(result.src, operand.varName, underlying, pat) + + case "Type.ConvertibleTo": + dstType := p.parseTypeStringArg(args[0]) + result.fn = makeTypeConvertibleToFilter(result.src, operand.varName, dstType) + + case "Type.AssignableTo": + dstType := p.parseTypeStringArg(args[0]) + result.fn = makeTypeAssignableToFilter(result.src, operand.varName, dstType) + + case "Type.Implements": + iface := p.toInterfaceValue(args[0]) + result.fn = makeTypeImplementsFilter(result.src, operand.varName, iface) + + case "Text.Matches": + re := p.parseRegexpArg(args[0]) + result.fn = makeTextMatchesFilter(result.src, operand.varName, re) + + case "Node.Is": + typeString, ok := p.toStringValue(args[0]) + if !ok { + panic(p.errorf(args[0], errors.New("expected a string literal argument"))) + } + tag := nodetag.FromString(typeString) + if tag == nodetag.Unknown { + panic(p.errorf(args[0], fmt.Errorf("%s is not a valid go/ast type name", typeString))) + } + result.fn = makeNodeIsFilter(result.src, operand.varName, tag) + } + + if result.fn == nil { + panic("bug: nil func for the filter") // Should never happen + } + return result +} + +func (p *rulesParser) toInterfaceValue(x ast.Node) *types.Interface { + typeString, ok := p.toStringValue(x) + if !ok { + panic(p.errorf(x, errors.New("expected a string literal argument"))) + } + + typ, err := p.state.FindType(p.importer, p.pkg, typeString) + if err == nil { + iface, ok := typ.Underlying().(*types.Interface) + if !ok { + panic(p.errorf(x, fmt.Errorf("%s is not an interface type", typeString))) + } + return iface + } + + n, err := parser.ParseExpr(typeString) + if err != nil { + panic(p.errorf(x, fmt.Errorf("parse type expr: %w", err))) + } + qn, ok := n.(*ast.SelectorExpr) + if !ok { + panic(p.errorf(x, fmt.Errorf("can't resolve %s type; try a fully-qualified name", typeString))) + } + pkgName, ok := qn.X.(*ast.Ident) + if !ok { + panic(p.errorf(qn.X, errors.New("invalid package name"))) + } + pkgPath, ok := p.itab.Lookup(pkgName.Name) + if !ok { + panic(p.errorf(qn.X, fmt.Errorf("package %s is not imported", pkgName.Name))) + } + pkg, err := p.importer.Import(pkgPath) + if err != nil { + panic(p.errorf(n, &ImportError{msg: fmt.Sprintf("can't load %s", pkgPath), err: err})) + } + obj := pkg.Scope().Lookup(qn.Sel.Name) + if obj == nil { + panic(p.errorf(n, fmt.Errorf("%s is not found in %s", qn.Sel.Name, pkgPath))) + } + iface, ok := obj.Type().Underlying().(*types.Interface) + if !ok { + panic(p.errorf(n, fmt.Errorf("%s is not an interface type", qn.Sel.Name))) + } + return iface +} + +func (p *rulesParser) toStringValue(x ast.Node) (string, bool) { + switch x := x.(type) { + case *ast.BasicLit: + if x.Kind != token.STRING { + return "", false + } + s, err := strconv.Unquote(x.Value) + if err != nil { + return "", false + } + return s, true + case ast.Expr: + typ, ok := p.types.Types[x] + if !ok || typ.Type.String() != "string" { + return "", false + } + str := typ.Value.ExactString() + str = str[1 : len(str)-1] // remove quotes + return str, true + } + return "", false +} + +func (p *rulesParser) toFilterOperand(e ast.Expr) filterOperand { + var o filterOperand + + if call, ok := e.(*ast.CallExpr); ok { + o.args = call.Args + e = call.Fun + } + var path string + for { + if call, ok := e.(*ast.CallExpr); ok { + e = call.Fun + continue + } + selector, ok := e.(*ast.SelectorExpr) + if !ok { + break + } + if path == "" { + path = selector.Sel.Name + } else { + path = selector.Sel.Name + "." + path + } + e = selector.X + } + + o.path = path + + indexing, ok := e.(*ast.IndexExpr) + if !ok { + return o + } + mapIdent, ok := indexing.X.(*ast.Ident) + if !ok { + return o + } + o.mapName = mapIdent.Name + indexString, _ := p.toStringValue(indexing.Index) + o.varName = indexString + + return o +} + +type filterOperand struct { + mapName string + varName string + path string + args []ast.Expr +} + +var stdlibPackages = map[string]string{ + "adler32": "hash/adler32", + "aes": "crypto/aes", + "ascii85": "encoding/ascii85", + "asn1": "encoding/asn1", + "ast": "go/ast", + "atomic": "sync/atomic", + "base32": "encoding/base32", + "base64": "encoding/base64", + "big": "math/big", + "binary": "encoding/binary", + "bits": "math/bits", + "bufio": "bufio", + "build": "go/build", + "bytes": "bytes", + "bzip2": "compress/bzip2", + "cgi": "net/http/cgi", + "cgo": "runtime/cgo", + "cipher": "crypto/cipher", + "cmplx": "math/cmplx", + "color": "image/color", + "constant": "go/constant", + "context": "context", + "cookiejar": "net/http/cookiejar", + "crc32": "hash/crc32", + "crc64": "hash/crc64", + "crypto": "crypto", + "csv": "encoding/csv", + "debug": "runtime/debug", + "des": "crypto/des", + "doc": "go/doc", + "draw": "image/draw", + "driver": "database/sql/driver", + "dsa": "crypto/dsa", + "dwarf": "debug/dwarf", + "ecdsa": "crypto/ecdsa", + "ed25519": "crypto/ed25519", + "elf": "debug/elf", + "elliptic": "crypto/elliptic", + "encoding": "encoding", + "errors": "errors", + "exec": "os/exec", + "expvar": "expvar", + "fcgi": "net/http/fcgi", + "filepath": "path/filepath", + "flag": "flag", + "flate": "compress/flate", + "fmt": "fmt", + "fnv": "hash/fnv", + "format": "go/format", + "gif": "image/gif", + "gob": "encoding/gob", + "gosym": "debug/gosym", + "gzip": "compress/gzip", + "hash": "hash", + "heap": "container/heap", + "hex": "encoding/hex", + "hmac": "crypto/hmac", + "html": "html", + "http": "net/http", + "httptest": "net/http/httptest", + "httptrace": "net/http/httptrace", + "httputil": "net/http/httputil", + "image": "image", + "importer": "go/importer", + "io": "io", + "iotest": "testing/iotest", + "ioutil": "io/ioutil", + "jpeg": "image/jpeg", + "json": "encoding/json", + "jsonrpc": "net/rpc/jsonrpc", + "list": "container/list", + "log": "log", + "lzw": "compress/lzw", + "macho": "debug/macho", + "mail": "net/mail", + "math": "math", + "md5": "crypto/md5", + "mime": "mime", + "multipart": "mime/multipart", + "net": "net", + "os": "os", + "palette": "image/color/palette", + "parse": "text/template/parse", + "parser": "go/parser", + "path": "path", + "pe": "debug/pe", + "pem": "encoding/pem", + "pkix": "crypto/x509/pkix", + "plan9obj": "debug/plan9obj", + "plugin": "plugin", + "png": "image/png", + "pprof": "runtime/pprof", + "printer": "go/printer", + "quick": "testing/quick", + "quotedprintable": "mime/quotedprintable", + "race": "runtime/race", + "rand": "math/rand", + "rc4": "crypto/rc4", + "reflect": "reflect", + "regexp": "regexp", + "ring": "container/ring", + "rpc": "net/rpc", + "rsa": "crypto/rsa", + "runtime": "runtime", + "scanner": "text/scanner", + "sha1": "crypto/sha1", + "sha256": "crypto/sha256", + "sha512": "crypto/sha512", + "signal": "os/signal", + "smtp": "net/smtp", + "sort": "sort", + "sql": "database/sql", + "strconv": "strconv", + "strings": "strings", + "subtle": "crypto/subtle", + "suffixarray": "index/suffixarray", + "sync": "sync", + "syntax": "regexp/syntax", + "syscall": "syscall", + "syslog": "log/syslog", + "tabwriter": "text/tabwriter", + "tar": "archive/tar", + "template": "text/template", + "testing": "testing", + "textproto": "net/textproto", + "time": "time", + "tls": "crypto/tls", + "token": "go/token", + "trace": "runtime/trace", + "types": "go/types", + "unicode": "unicode", + "unsafe": "unsafe", + "url": "net/url", + "user": "os/user", + "utf16": "unicode/utf16", + "utf8": "unicode/utf8", + "x509": "crypto/x509", + "xml": "encoding/xml", + "zip": "archive/zip", + "zlib": "compress/zlib", +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go new file mode 100644 index 000000000..db61b40ee --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/compile.go @@ -0,0 +1,707 @@ +package quasigo + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + + "github.com/quasilyte/go-ruleguard/ruleguard/goutil" + "golang.org/x/tools/go/ast/astutil" +) + +func compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error) { + defer func() { + if err != nil { + return + } + rv := recover() + if rv == nil { + return + } + if compileErr, ok := rv.(compileError); ok { + err = compileErr + return + } + panic(rv) // not our panic + }() + + return compileFunc(ctx, fn), nil +} + +func compileFunc(ctx *CompileContext, fn *ast.FuncDecl) *Func { + cl := compiler{ + ctx: ctx, + fnType: ctx.Types.ObjectOf(fn.Name).Type().(*types.Signature), + constantsPool: make(map[interface{}]int), + intConstantsPool: make(map[int]int), + locals: make(map[string]int), + } + return cl.compileFunc(fn) +} + +type compiler struct { + ctx *CompileContext + + fnType *types.Signature + retType types.Type + + lastOp opcode + + locals map[string]int + constantsPool map[interface{}]int + intConstantsPool map[int]int + params map[string]int + + code []byte + constants []interface{} + intConstants []int + + breakTarget *label + continueTarget *label + + labels []*label +} + +type label struct { + targetPos int + sources []int +} + +type compileError string + +func (e compileError) Error() string { return string(e) } + +func (cl *compiler) compileFunc(fn *ast.FuncDecl) *Func { + if cl.fnType.Results().Len() != 1 { + panic(cl.errorf(fn.Name, "only functions with a single non-void results are supported")) + } + cl.retType = cl.fnType.Results().At(0).Type() + + if !cl.isSupportedType(cl.retType) { + panic(cl.errorUnsupportedType(fn.Name, cl.retType, "function result")) + } + + dbg := funcDebugInfo{ + paramNames: make([]string, cl.fnType.Params().Len()), + } + + cl.params = make(map[string]int, cl.fnType.Params().Len()) + for i := 0; i < cl.fnType.Params().Len(); i++ { + p := cl.fnType.Params().At(i) + paramName := p.Name() + paramType := p.Type() + cl.params[paramName] = i + dbg.paramNames[i] = paramName + if !cl.isSupportedType(paramType) { + panic(cl.errorUnsupportedType(fn.Name, paramType, paramName+" param")) + } + } + + cl.compileStmt(fn.Body) + compiled := &Func{ + code: cl.code, + constants: cl.constants, + intConstants: cl.intConstants, + } + if len(cl.locals) != 0 { + dbg.localNames = make([]string, len(cl.locals)) + for localName, localIndex := range cl.locals { + dbg.localNames[localIndex] = localName + } + } + cl.ctx.Env.debug.funcs[compiled] = dbg + cl.linkJumps() + return compiled +} + +func (cl *compiler) compileStmt(stmt ast.Stmt) { + switch stmt := stmt.(type) { + case *ast.ReturnStmt: + cl.compileReturnStmt(stmt) + + case *ast.AssignStmt: + cl.compileAssignStmt(stmt) + + case *ast.IncDecStmt: + cl.compileIncDecStmt(stmt) + + case *ast.IfStmt: + cl.compileIfStmt(stmt) + + case *ast.ForStmt: + cl.compileForStmt(stmt) + + case *ast.BranchStmt: + cl.compileBranchStmt(stmt) + + case *ast.BlockStmt: + for i := range stmt.List { + cl.compileStmt(stmt.List[i]) + } + + default: + panic(cl.errorf(stmt, "can't compile %T yet", stmt)) + } +} + +func (cl *compiler) compileIncDecStmt(stmt *ast.IncDecStmt) { + varname, ok := stmt.X.(*ast.Ident) + if !ok { + panic(cl.errorf(stmt.X, "can assign only to simple variables")) + } + id := cl.getLocal(varname, varname.String()) + if stmt.Tok == token.INC { + cl.emit8(opIncLocal, id) + } else { + cl.emit8(opDecLocal, id) + } +} + +func (cl *compiler) compileBranchStmt(branch *ast.BranchStmt) { + if branch.Label != nil { + panic(cl.errorf(branch.Label, "can't compile %s with a label", branch.Tok)) + } + + switch branch.Tok { + case token.BREAK: + cl.emitJump(opJump, cl.breakTarget) + default: + panic(cl.errorf(branch, "can't compile %s yet", branch.Tok)) + } +} + +func (cl *compiler) compileForStmt(stmt *ast.ForStmt) { + labelBreak := cl.newLabel() + labelContinue := cl.newLabel() + prevBreakTarget := cl.breakTarget + prevContinueTarget := cl.continueTarget + cl.breakTarget = labelBreak + cl.continueTarget = labelContinue + + switch { + case stmt.Cond != nil && stmt.Init != nil && stmt.Post != nil: + // Will be implemented later; probably when the max number of locals will be lifted. + panic(cl.errorf(stmt, "can't compile C-style for loops yet")) + + case stmt.Cond != nil && stmt.Init == nil && stmt.Post == nil: + // `for { ... }` + labelBody := cl.newLabel() + cl.emitJump(opJump, labelContinue) + cl.bindLabel(labelBody) + cl.compileStmt(stmt.Body) + cl.bindLabel(labelContinue) + cl.compileExpr(stmt.Cond) + cl.emitJump(opJumpTrue, labelBody) + cl.bindLabel(labelBreak) + + default: + // `for { ... }` + cl.bindLabel(labelContinue) + cl.compileStmt(stmt.Body) + cl.emitJump(opJump, labelContinue) + cl.bindLabel(labelBreak) + } + + cl.breakTarget = prevBreakTarget + cl.continueTarget = prevContinueTarget +} + +func (cl *compiler) compileIfStmt(stmt *ast.IfStmt) { + if stmt.Else == nil { + labelEnd := cl.newLabel() + cl.compileExpr(stmt.Cond) + cl.emitJump(opJumpFalse, labelEnd) + cl.compileStmt(stmt.Body) + cl.bindLabel(labelEnd) + return + } + + labelEnd := cl.newLabel() + labelElse := cl.newLabel() + cl.compileExpr(stmt.Cond) + cl.emitJump(opJumpFalse, labelElse) + cl.compileStmt(stmt.Body) + if !cl.isUncondJump(cl.lastOp) { + cl.emitJump(opJump, labelEnd) + } + cl.bindLabel(labelElse) + cl.compileStmt(stmt.Else) + cl.bindLabel(labelEnd) +} + +func (cl *compiler) compileAssignStmt(assign *ast.AssignStmt) { + if len(assign.Lhs) != 1 { + panic(cl.errorf(assign, "only single left operand is allowed in assignments")) + } + if len(assign.Rhs) != 1 { + panic(cl.errorf(assign, "only single right operand is allowed in assignments")) + } + lhs := assign.Lhs[0] + rhs := assign.Rhs[0] + varname, ok := lhs.(*ast.Ident) + if !ok { + panic(cl.errorf(lhs, "can assign only to simple variables")) + } + + cl.compileExpr(rhs) + + typ := cl.ctx.Types.TypeOf(varname) + if assign.Tok == token.DEFINE { + if _, ok := cl.locals[varname.String()]; ok { + panic(cl.errorf(lhs, "%s variable shadowing is not allowed", varname)) + } + if !cl.isSupportedType(typ) { + panic(cl.errorUnsupportedType(varname, typ, varname.String()+" local variable")) + } + if len(cl.locals) == maxFuncLocals { + panic(cl.errorf(lhs, "can't define %s: too many locals", varname)) + } + id := len(cl.locals) + cl.locals[varname.String()] = id + cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id) + } else { + id := cl.getLocal(varname, varname.String()) + cl.emit8(pickOp(typeIsInt(typ), opSetIntLocal, opSetLocal), id) + } +} + +func (cl *compiler) getLocal(v ast.Expr, varname string) int { + id, ok := cl.locals[varname] + if !ok { + if _, ok := cl.params[varname]; ok { + panic(cl.errorf(v, "can't assign to %s, params are readonly", varname)) + } + panic(cl.errorf(v, "%s is not a writeable local variable", varname)) + } + return id +} + +func (cl *compiler) compileReturnStmt(ret *ast.ReturnStmt) { + if ret.Results == nil { + panic(cl.errorf(ret, "'naked' return statements are not allowed")) + } + + switch { + case identName(ret.Results[0]) == "true": + cl.emit(opReturnTrue) + case identName(ret.Results[0]) == "false": + cl.emit(opReturnFalse) + default: + cl.compileExpr(ret.Results[0]) + typ := cl.ctx.Types.TypeOf(ret.Results[0]) + cl.emit(pickOp(typeIsInt(typ), opReturnIntTop, opReturnTop)) + } +} + +func (cl *compiler) compileExpr(e ast.Expr) { + cv := cl.ctx.Types.Types[e].Value + if cv != nil { + cl.compileConstantValue(e, cv) + return + } + + switch e := e.(type) { + case *ast.ParenExpr: + cl.compileExpr(e.X) + + case *ast.Ident: + cl.compileIdent(e) + + case *ast.SelectorExpr: + cl.compileSelectorExpr(e) + + case *ast.UnaryExpr: + switch e.Op { + case token.NOT: + cl.compileUnaryOp(opNot, e) + default: + panic(cl.errorf(e, "can't compile unary %s yet", e.Op)) + } + + case *ast.SliceExpr: + cl.compileSliceExpr(e) + + case *ast.BinaryExpr: + cl.compileBinaryExpr(e) + + case *ast.CallExpr: + cl.compileCallExpr(e) + + default: + panic(cl.errorf(e, "can't compile %T yet", e)) + } +} + +func (cl *compiler) compileSelectorExpr(e *ast.SelectorExpr) { + typ := cl.ctx.Types.TypeOf(e.X) + key := funcKey{ + name: e.Sel.String(), + qualifier: typ.String(), + } + + if funcID, ok := cl.ctx.Env.nameToNativeFuncID[key]; ok { + cl.compileExpr(e.X) + cl.emit16(opCallNative, int(funcID)) + return + } + + panic(cl.errorf(e, "can't compile %s field access", e.Sel)) +} + +func (cl *compiler) compileBinaryExpr(e *ast.BinaryExpr) { + typ := cl.ctx.Types.TypeOf(e.X) + + switch e.Op { + case token.LOR: + cl.compileOr(e) + case token.LAND: + cl.compileAnd(e) + + case token.NEQ: + switch { + case identName(e.X) == "nil": + cl.compileExpr(e.Y) + cl.emit(opIsNotNil) + case identName(e.Y) == "nil": + cl.compileExpr(e.X) + cl.emit(opIsNotNil) + case typeIsString(typ): + cl.compileBinaryOp(opNotEqString, e) + case typeIsInt(typ): + cl.compileBinaryOp(opNotEqInt, e) + default: + panic(cl.errorf(e, "!= is not implemented for %s operands", typ)) + } + case token.EQL: + switch { + case identName(e.X) == "nil": + cl.compileExpr(e.Y) + cl.emit(opIsNil) + case identName(e.Y) == "nil": + cl.compileExpr(e.X) + cl.emit(opIsNil) + case typeIsString(cl.ctx.Types.TypeOf(e.X)): + cl.compileBinaryOp(opEqString, e) + case typeIsInt(cl.ctx.Types.TypeOf(e.X)): + cl.compileBinaryOp(opEqInt, e) + default: + panic(cl.errorf(e, "== is not implemented for %s operands", typ)) + } + + case token.GTR: + cl.compileIntBinaryOp(e, opGtInt, typ) + case token.GEQ: + cl.compileIntBinaryOp(e, opGtEqInt, typ) + case token.LSS: + cl.compileIntBinaryOp(e, opLtInt, typ) + case token.LEQ: + cl.compileIntBinaryOp(e, opLtEqInt, typ) + + case token.ADD: + switch { + case typeIsString(typ): + cl.compileBinaryOp(opConcat, e) + case typeIsInt(typ): + cl.compileBinaryOp(opAdd, e) + default: + panic(cl.errorf(e, "+ is not implemented for %s operands", typ)) + } + + case token.SUB: + cl.compileIntBinaryOp(e, opSub, typ) + + default: + panic(cl.errorf(e, "can't compile binary %s yet", e.Op)) + } +} + +func (cl *compiler) compileIntBinaryOp(e *ast.BinaryExpr, op opcode, typ types.Type) { + switch { + case typeIsInt(typ): + cl.compileBinaryOp(op, e) + default: + panic(cl.errorf(e, "%s is not implemented for %s operands", e.Op, typ)) + } +} + +func (cl *compiler) compileSliceExpr(slice *ast.SliceExpr) { + if slice.Slice3 { + panic(cl.errorf(slice, "can't compile 3-index slicing")) + } + + // No need to do slicing, its no-op `s[:]`. + if slice.Low == nil && slice.High == nil { + cl.compileExpr(slice.X) + return + } + + sliceOp := opStringSlice + sliceFromOp := opStringSliceFrom + sliceToOp := opStringSliceTo + + if !typeIsString(cl.ctx.Types.TypeOf(slice.X)) { + panic(cl.errorf(slice.X, "can't compile slicing of something that is not a string")) + } + + switch { + case slice.Low == nil && slice.High != nil: + cl.compileExpr(slice.X) + cl.compileExpr(slice.High) + cl.emit(sliceToOp) + case slice.Low != nil && slice.High == nil: + cl.compileExpr(slice.X) + cl.compileExpr(slice.Low) + cl.emit(sliceFromOp) + default: + cl.compileExpr(slice.X) + cl.compileExpr(slice.Low) + cl.compileExpr(slice.High) + cl.emit(sliceOp) + } +} + +func (cl *compiler) compileBuiltinCall(fn *ast.Ident, call *ast.CallExpr) { + switch fn.Name { + case `len`: + s := call.Args[0] + cl.compileExpr(s) + if !typeIsString(cl.ctx.Types.TypeOf(s)) { + panic(cl.errorf(s, "can't compile len() with non-string argument yet")) + } + cl.emit(opStringLen) + default: + panic(cl.errorf(fn, "can't compile %s() builtin function call yet", fn)) + } +} + +func (cl *compiler) compileCallExpr(call *ast.CallExpr) { + if id, ok := astutil.Unparen(call.Fun).(*ast.Ident); ok { + _, isBuiltin := cl.ctx.Types.ObjectOf(id).(*types.Builtin) + if isBuiltin { + cl.compileBuiltinCall(id, call) + return + } + } + + expr, fn := goutil.ResolveFunc(cl.ctx.Types, call.Fun) + if fn == nil { + panic(cl.errorf(call.Fun, "can't resolve the called function")) + } + + // TODO: just use Func.FullName as a key? + key := funcKey{name: fn.Name()} + sig := fn.Type().(*types.Signature) + if sig.Recv() != nil { + key.qualifier = sig.Recv().Type().String() + } else { + key.qualifier = fn.Pkg().Path() + } + + if funcID, ok := cl.ctx.Env.nameToNativeFuncID[key]; ok { + if expr != nil { + cl.compileExpr(expr) + } + for _, arg := range call.Args { + cl.compileExpr(arg) + } + cl.emit16(opCallNative, int(funcID)) + return + } + + panic(cl.errorf(call.Fun, "can't compile a call to %s func", key)) +} + +func (cl *compiler) compileUnaryOp(op opcode, e *ast.UnaryExpr) { + cl.compileExpr(e.X) + cl.emit(op) +} + +func (cl *compiler) compileBinaryOp(op opcode, e *ast.BinaryExpr) { + cl.compileExpr(e.X) + cl.compileExpr(e.Y) + cl.emit(op) +} + +func (cl *compiler) compileOr(e *ast.BinaryExpr) { + labelEnd := cl.newLabel() + cl.compileExpr(e.X) + cl.emit(opDup) + cl.emitJump(opJumpTrue, labelEnd) + cl.compileExpr(e.Y) + cl.bindLabel(labelEnd) +} + +func (cl *compiler) compileAnd(e *ast.BinaryExpr) { + labelEnd := cl.newLabel() + cl.compileExpr(e.X) + cl.emit(opDup) + cl.emitJump(opJumpFalse, labelEnd) + cl.compileExpr(e.Y) + cl.bindLabel(labelEnd) +} + +func (cl *compiler) compileIdent(ident *ast.Ident) { + tv := cl.ctx.Types.Types[ident] + cv := tv.Value + if cv != nil { + cl.compileConstantValue(ident, cv) + return + } + if paramIndex, ok := cl.params[ident.String()]; ok { + cl.emit8(pickOp(typeIsInt(tv.Type), opPushIntParam, opPushParam), paramIndex) + return + } + if localIndex, ok := cl.locals[ident.String()]; ok { + cl.emit8(pickOp(typeIsInt(tv.Type), opPushIntLocal, opPushLocal), localIndex) + return + } + + panic(cl.errorf(ident, "can't compile a %s (type %s) variable read", ident.String(), tv.Type)) +} + +func (cl *compiler) compileConstantValue(source ast.Expr, cv constant.Value) { + switch cv.Kind() { + case constant.Bool: + v := constant.BoolVal(cv) + if v { + cl.emit(opPushTrue) + } else { + cl.emit(opPushFalse) + } + + case constant.String: + v := constant.StringVal(cv) + id := cl.internConstant(v) + cl.emit8(opPushConst, id) + + case constant.Int: + v, exact := constant.Int64Val(cv) + if !exact { + panic(cl.errorf(source, "non-exact int value")) + } + id := cl.internIntConstant(int(v)) + cl.emit8(opPushIntConst, id) + + case constant.Complex: + panic(cl.errorf(source, "can't compile complex number constants yet")) + + case constant.Float: + panic(cl.errorf(source, "can't compile float constants yet")) + + default: + panic(cl.errorf(source, "unexpected constant %v", cv)) + } +} + +func (cl *compiler) internIntConstant(v int) int { + if id, ok := cl.intConstantsPool[v]; ok { + return id + } + id := len(cl.intConstants) + cl.intConstants = append(cl.intConstants, v) + cl.intConstantsPool[v] = id + return id +} + +func (cl *compiler) internConstant(v interface{}) int { + if _, ok := v.(int); ok { + panic("compiler error: int constant interned as interface{}") + } + if id, ok := cl.constantsPool[v]; ok { + return id + } + id := len(cl.constants) + cl.constants = append(cl.constants, v) + cl.constantsPool[v] = id + return id +} + +func (cl *compiler) linkJumps() { + for _, l := range cl.labels { + for _, jumpPos := range l.sources { + offset := l.targetPos - jumpPos + patchPos := jumpPos + 1 + put16(cl.code, patchPos, offset) + } + } +} + +func (cl *compiler) newLabel() *label { + l := &label{} + cl.labels = append(cl.labels, l) + return l +} + +func (cl *compiler) bindLabel(l *label) { + l.targetPos = len(cl.code) +} + +func (cl *compiler) emit(op opcode) { + cl.lastOp = op + cl.code = append(cl.code, byte(op)) +} + +func (cl *compiler) emitJump(op opcode, l *label) { + l.sources = append(l.sources, len(cl.code)) + cl.emit(op) + cl.code = append(cl.code, 0, 0) +} + +func (cl *compiler) emit8(op opcode, arg8 int) { + cl.emit(op) + cl.code = append(cl.code, byte(arg8)) +} + +func (cl *compiler) emit16(op opcode, arg16 int) { + cl.emit(op) + buf := make([]byte, 2) + put16(buf, 0, arg16) + cl.code = append(cl.code, buf...) +} + +func (cl *compiler) errorUnsupportedType(e ast.Node, typ types.Type, where string) compileError { + return cl.errorf(e, "%s type: %s is not supported, try something simpler", where, typ) +} + +func (cl *compiler) errorf(n ast.Node, format string, args ...interface{}) compileError { + loc := cl.ctx.Fset.Position(n.Pos()) + message := fmt.Sprintf("%s:%d: %s", loc.Filename, loc.Line, fmt.Sprintf(format, args...)) + return compileError(message) +} + +func (cl *compiler) isUncondJump(op opcode) bool { + switch op { + case opJump, opReturnFalse, opReturnTrue, opReturnTop, opReturnIntTop: + return true + default: + return false + } +} + +func (cl *compiler) isSupportedType(typ types.Type) bool { + switch typ := typ.Underlying().(type) { + case *types.Pointer: + // 1. Pointers to structs are supported. + _, isStruct := typ.Elem().Underlying().(*types.Struct) + return isStruct + + case *types.Basic: + // 2. Some of the basic types are supported. + // TODO: support byte/uint8 and maybe float64. + switch typ.Kind() { + case types.Bool, types.Int, types.String: + return true + default: + return false + } + + case *types.Interface: + // 3. Interfaces are supported. + return true + + default: + return false + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go new file mode 100644 index 000000000..e42bbb76a --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/debug_info.go @@ -0,0 +1,16 @@ +package quasigo + +type debugInfo struct { + funcs map[*Func]funcDebugInfo +} + +type funcDebugInfo struct { + paramNames []string + localNames []string +} + +func newDebugInfo() *debugInfo { + return &debugInfo{ + funcs: make(map[*Func]funcDebugInfo), + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go new file mode 100644 index 000000000..192cf0710 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/disasm.go @@ -0,0 +1,74 @@ +package quasigo + +import ( + "fmt" + "strings" +) + +// TODO(quasilyte): generate extra opcode info so we can simplify disasm function? + +func disasm(env *Env, fn *Func) string { + var out strings.Builder + + dbg, ok := env.debug.funcs[fn] + if !ok { + return "\n" + } + + code := fn.code + labels := map[int]string{} + walkBytecode(code, func(pc int, op opcode) { + switch op { + case opJumpTrue, opJumpFalse, opJump: + offset := decode16(code, pc+1) + targetPC := pc + offset + if _, ok := labels[targetPC]; !ok { + labels[targetPC] = fmt.Sprintf("L%d", len(labels)) + } + } + }) + + walkBytecode(code, func(pc int, op opcode) { + if l := labels[pc]; l != "" { + fmt.Fprintf(&out, "%s:\n", l) + } + var arg interface{} + var comment string + switch op { + case opCallNative: + id := decode16(code, pc+1) + arg = id + comment = env.nativeFuncs[id].name + case opPushParam, opPushIntParam: + index := int(code[pc+1]) + arg = index + comment = dbg.paramNames[index] + case opSetLocal, opSetIntLocal, opPushLocal, opPushIntLocal, opIncLocal, opDecLocal: + index := int(code[pc+1]) + arg = index + comment = dbg.localNames[index] + case opPushConst: + arg = int(code[pc+1]) + comment = fmt.Sprintf("value=%#v", fn.constants[code[pc+1]]) + case opPushIntConst: + arg = int(code[pc+1]) + comment = fmt.Sprintf("value=%#v", fn.intConstants[code[pc+1]]) + case opJumpTrue, opJumpFalse, opJump: + offset := decode16(code, pc+1) + targetPC := pc + offset + arg = offset + comment = labels[targetPC] + } + + if comment != "" { + comment = " # " + comment + } + if arg == nil { + fmt.Fprintf(&out, " %s%s\n", op, comment) + } else { + fmt.Fprintf(&out, " %s %#v%s\n", op, arg, comment) + } + }) + + return out.String() +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go new file mode 100644 index 000000000..0e2a450b1 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/env.go @@ -0,0 +1,42 @@ +package quasigo + +type funcKey struct { + qualifier string + name string +} + +func (k funcKey) String() string { + if k.qualifier != "" { + return k.qualifier + "." + k.name + } + return k.name +} + +type nativeFunc struct { + mappedFunc func(*ValueStack) + name string // Needed for the readable disasm +} + +func newEnv() *Env { + return &Env{ + nameToNativeFuncID: make(map[funcKey]uint16), + nameToFuncID: make(map[funcKey]uint16), + + debug: newDebugInfo(), + } +} + +func (env *Env) addNativeFunc(key funcKey, f func(*ValueStack)) { + id := len(env.nativeFuncs) + env.nativeFuncs = append(env.nativeFuncs, nativeFunc{ + mappedFunc: f, + name: key.String(), + }) + env.nameToNativeFuncID[key] = uint16(id) +} + +func (env *Env) addFunc(key funcKey, f *Func) { + id := len(env.userFuncs) + env.userFuncs = append(env.userFuncs, f) + env.nameToFuncID[key] = uint16(id) +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go new file mode 100644 index 000000000..afc000ea3 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/eval.go @@ -0,0 +1,239 @@ +package quasigo + +import ( + "fmt" + "reflect" +) + +const maxFuncLocals = 8 + +// pop2 removes the two top stack elements and returns them. +// +// Note that it returns the popped elements in the reverse order +// to make it easier to map the order in which they were pushed. +func (s *ValueStack) pop2() (second, top interface{}) { + x := s.objects[len(s.objects)-2] + y := s.objects[len(s.objects)-1] + s.objects = s.objects[:len(s.objects)-2] + return x, y +} + +func (s *ValueStack) popInt2() (second, top int) { + x := s.ints[len(s.ints)-2] + y := s.ints[len(s.ints)-1] + s.ints = s.ints[:len(s.ints)-2] + return x, y +} + +// top returns top of the stack without popping it. +func (s *ValueStack) top() interface{} { return s.objects[len(s.objects)-1] } + +func (s *ValueStack) topInt() int { return s.ints[len(s.ints)-1] } + +// dup copies the top stack element. +// Identical to s.Push(s.Top()), but more concise. +func (s *ValueStack) dup() { s.objects = append(s.objects, s.objects[len(s.objects)-1]) } + +// discard drops the top stack element. +// Identical to s.Pop() without using the result. +func (s *ValueStack) discard() { s.objects = s.objects[:len(s.objects)-1] } + +func eval(env *EvalEnv, fn *Func, args []interface{}) CallResult { + pc := 0 + code := fn.code + stack := env.stack + var locals [maxFuncLocals]interface{} + var intLocals [maxFuncLocals]int + + for { + switch op := opcode(code[pc]); op { + case opPushParam: + index := code[pc+1] + stack.Push(args[index]) + pc += 2 + case opPushIntParam: + index := code[pc+1] + stack.PushInt(args[index].(int)) + pc += 2 + + case opPushLocal: + index := code[pc+1] + stack.Push(locals[index]) + pc += 2 + case opPushIntLocal: + index := code[pc+1] + stack.PushInt(intLocals[index]) + pc += 2 + + case opSetLocal: + index := code[pc+1] + locals[index] = stack.Pop() + pc += 2 + case opSetIntLocal: + index := code[pc+1] + intLocals[index] = stack.PopInt() + pc += 2 + + case opIncLocal: + index := code[pc+1] + intLocals[index]++ + pc += 2 + case opDecLocal: + index := code[pc+1] + intLocals[index]-- + pc += 2 + + case opPop: + stack.discard() + pc++ + case opDup: + stack.dup() + pc++ + + case opPushConst: + id := code[pc+1] + stack.Push(fn.constants[id]) + pc += 2 + case opPushIntConst: + id := code[pc+1] + stack.PushInt(fn.intConstants[id]) + pc += 2 + + case opPushTrue: + stack.Push(true) + pc++ + case opPushFalse: + stack.Push(false) + pc++ + + case opReturnTrue: + return CallResult{value: true} + case opReturnFalse: + return CallResult{value: false} + case opReturnTop: + return CallResult{value: stack.top()} + case opReturnIntTop: + return CallResult{scalarValue: uint64(stack.topInt())} + + case opCallNative: + id := decode16(code, pc+1) + fn := env.nativeFuncs[id].mappedFunc + fn(stack) + pc += 3 + + case opJump: + offset := decode16(code, pc+1) + pc += offset + + case opJumpFalse: + if !stack.Pop().(bool) { + offset := decode16(code, pc+1) + pc += offset + } else { + pc += 3 + } + case opJumpTrue: + if stack.Pop().(bool) { + offset := decode16(code, pc+1) + pc += offset + } else { + pc += 3 + } + + case opNot: + stack.Push(!stack.Pop().(bool)) + pc++ + + case opConcat: + x, y := stack.pop2() + stack.Push(x.(string) + y.(string)) + pc++ + + case opAdd: + x, y := stack.popInt2() + stack.PushInt(x + y) + pc++ + + case opSub: + x, y := stack.popInt2() + stack.PushInt(x - y) + pc++ + + case opEqInt: + x, y := stack.popInt2() + stack.Push(x == y) + pc++ + + case opNotEqInt: + x, y := stack.popInt2() + stack.Push(x != y) + pc++ + + case opGtInt: + x, y := stack.popInt2() + stack.Push(x > y) + pc++ + + case opGtEqInt: + x, y := stack.popInt2() + stack.Push(x >= y) + pc++ + + case opLtInt: + x, y := stack.popInt2() + stack.Push(x < y) + pc++ + + case opLtEqInt: + x, y := stack.popInt2() + stack.Push(x <= y) + pc++ + + case opEqString: + x, y := stack.pop2() + stack.Push(x.(string) == y.(string)) + pc++ + + case opNotEqString: + x, y := stack.pop2() + stack.Push(x.(string) != y.(string)) + pc++ + + case opIsNil: + x := stack.Pop() + stack.Push(x == nil || reflect.ValueOf(x).IsNil()) + pc++ + + case opIsNotNil: + x := stack.Pop() + stack.Push(x != nil && !reflect.ValueOf(x).IsNil()) + pc++ + + case opStringSlice: + to := stack.PopInt() + from := stack.PopInt() + s := stack.Pop().(string) + stack.Push(s[from:to]) + pc++ + + case opStringSliceFrom: + from := stack.PopInt() + s := stack.Pop().(string) + stack.Push(s[from:]) + pc++ + + case opStringSliceTo: + to := stack.PopInt() + s := stack.Pop().(string) + stack.Push(s[:to]) + pc++ + + case opStringLen: + stack.PushInt(len(stack.Pop().(string))) + pc++ + + default: + panic(fmt.Sprintf("malformed bytecode: unexpected %s found", op)) + } + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go new file mode 100644 index 000000000..fde48b7cd --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/gen_opcodes.go @@ -0,0 +1,184 @@ +// +build main + +package main + +import ( + "bytes" + "fmt" + "go/format" + "io/ioutil" + "log" + "strings" + "text/template" +) + +var opcodePrototypes = []opcodeProto{ + {"Pop", "op", "(value) -> ()"}, + {"Dup", "op", "(x) -> (x x)"}, + + {"PushParam", "op index:u8", "() -> (value)"}, + {"PushIntParam", "op index:u8", "() -> (value:int)"}, + {"PushLocal", "op index:u8", "() -> (value)"}, + {"PushIntLocal", "op index:u8", "() -> (value:int)"}, + {"PushFalse", "op", "() -> (false)"}, + {"PushTrue", "op", "() -> (true)"}, + {"PushConst", "op constid:u8", "() -> (const)"}, + {"PushIntConst", "op constid:u8", "() -> (const:int)"}, + + {"SetLocal", "op index:u8", "(value) -> ()"}, + {"SetIntLocal", "op index:u8", "(value:int) -> ()"}, + {"IncLocal", "op index:u8", stackUnchanged}, + {"DecLocal", "op index:u8", stackUnchanged}, + + {"ReturnTop", "op", "(value) -> (value)"}, + {"ReturnIntTop", "op", "(value) -> (value)"}, + {"ReturnFalse", "op", stackUnchanged}, + {"ReturnTrue", "op", stackUnchanged}, + + {"Jump", "op offset:i16", stackUnchanged}, + {"JumpFalse", "op offset:i16", "(cond:bool) -> ()"}, + {"JumpTrue", "op offset:i16", "(cond:bool) -> ()"}, + + {"CallNative", "op funcid:u16", "(args...) -> (results...)"}, + + {"IsNil", "op", "(value) -> (result:bool)"}, + {"IsNotNil", "op", "(value) -> (result:bool)"}, + + {"Not", "op", "(value:bool) -> (result:bool)"}, + + {"EqInt", "op", "(x:int y:int) -> (result:bool)"}, + {"NotEqInt", "op", "(x:int y:int) -> (result:bool)"}, + {"GtInt", "op", "(x:int y:int) -> (result:bool)"}, + {"GtEqInt", "op", "(x:int y:int) -> (result:bool)"}, + {"LtInt", "op", "(x:int y:int) -> (result:bool)"}, + {"LtEqInt", "op", "(x:int y:int) -> (result:bool)"}, + + {"EqString", "op", "(x:string y:string) -> (result:bool)"}, + {"NotEqString", "op", "(x:string y:string) -> (result:bool)"}, + + {"Concat", "op", "(x:string y:string) -> (result:string)"}, + {"Add", "op", "(x:int y:int) -> (result:int)"}, + {"Sub", "op", "(x:int y:int) -> (result:int)"}, + + {"StringSlice", "op", "(s:string from:int to:int) -> (result:string)"}, + {"StringSliceFrom", "op", "(s:string from:int) -> (result:string)"}, + {"StringSliceTo", "op", "(s:string to:int) -> (result:string)"}, + {"StringLen", "op", "(s:string) -> (result:int)"}, +} + +type opcodeProto struct { + name string + enc string + stack string +} + +type encodingInfo struct { + width int + parts int +} + +type opcodeInfo struct { + Opcode byte + Name string + Enc string + EncString string + Stack string + Width int +} + +const stackUnchanged = "" + +var fileTemplate = template.Must(template.New("opcodes.go").Parse(`// Code generated "gen_opcodes.go"; DO NOT EDIT. + +package quasigo + +//go:generate stringer -type=opcode -trimprefix=op +type opcode byte + +const ( + opInvalid opcode = 0 +{{ range .Opcodes }} + // Encoding: {{.EncString}} + // Stack effect: {{ if .Stack}}{{.Stack}}{{else}}unchanged{{end}} + op{{ .Name }} opcode = {{.Opcode}} +{{ end -}} +) + +type opcodeInfo struct { + width int +} + +var opcodeInfoTable = [256]opcodeInfo{ + opInvalid: {width: 1}, + +{{ range .Opcodes -}} + op{{.Name}}: {width: {{.Width}}}, +{{ end }} +} +`)) + +func main() { + opcodes := make([]opcodeInfo, len(opcodePrototypes)) + for i, proto := range opcodePrototypes { + opcode := byte(i + 1) + encInfo := decodeEnc(proto.enc) + var encString string + if encInfo.parts == 1 { + encString = fmt.Sprintf("0x%02x (width=%d)", opcode, encInfo.width) + } else { + encString = fmt.Sprintf("0x%02x %s (width=%d)", + opcode, strings.TrimPrefix(proto.enc, "op "), encInfo.width) + } + + opcodes[i] = opcodeInfo{ + Opcode: opcode, + Name: proto.name, + Enc: proto.enc, + EncString: encString, + Stack: proto.stack, + Width: encInfo.width, + } + } + + var buf bytes.Buffer + err := fileTemplate.Execute(&buf, map[string]interface{}{ + "Opcodes": opcodes, + }) + if err != nil { + log.Panicf("execute template: %v", err) + } + writeFile("opcodes.gen.go", buf.Bytes()) +} + +func decodeEnc(enc string) encodingInfo { + fields := strings.Fields(enc) + width := 0 + for _, f := range fields { + parts := strings.Split(f, ":") + var typ string + if len(parts) == 2 { + typ = parts[1] + } else { + typ = "u8" + } + switch typ { + case "i8", "u8": + width++ + case "i16", "u16": + width += 2 + default: + panic(fmt.Sprintf("unknown op argument type: %s", typ)) + } + } + return encodingInfo{width: width, parts: len(fields)} +} + +func writeFile(filename string, data []byte) { + pretty, err := format.Source(data) + if err != nil { + log.Panicf("gofmt: %v", err) + } + if err := ioutil.WriteFile(filename, pretty, 0666); err != nil { + log.Panicf("write %s: %v", filename, err) + } +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go new file mode 100644 index 000000000..27dfc1f67 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcode_string.go @@ -0,0 +1,63 @@ +// Code generated by "stringer -type=opcode -trimprefix=op"; DO NOT EDIT. + +package quasigo + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[opInvalid-0] + _ = x[opPop-1] + _ = x[opDup-2] + _ = x[opPushParam-3] + _ = x[opPushIntParam-4] + _ = x[opPushLocal-5] + _ = x[opPushIntLocal-6] + _ = x[opPushFalse-7] + _ = x[opPushTrue-8] + _ = x[opPushConst-9] + _ = x[opPushIntConst-10] + _ = x[opSetLocal-11] + _ = x[opSetIntLocal-12] + _ = x[opIncLocal-13] + _ = x[opDecLocal-14] + _ = x[opReturnTop-15] + _ = x[opReturnIntTop-16] + _ = x[opReturnFalse-17] + _ = x[opReturnTrue-18] + _ = x[opJump-19] + _ = x[opJumpFalse-20] + _ = x[opJumpTrue-21] + _ = x[opCallNative-22] + _ = x[opIsNil-23] + _ = x[opIsNotNil-24] + _ = x[opNot-25] + _ = x[opEqInt-26] + _ = x[opNotEqInt-27] + _ = x[opGtInt-28] + _ = x[opGtEqInt-29] + _ = x[opLtInt-30] + _ = x[opLtEqInt-31] + _ = x[opEqString-32] + _ = x[opNotEqString-33] + _ = x[opConcat-34] + _ = x[opAdd-35] + _ = x[opSub-36] + _ = x[opStringSlice-37] + _ = x[opStringSliceFrom-38] + _ = x[opStringSliceTo-39] + _ = x[opStringLen-40] +} + +const _opcode_name = "InvalidPopDupPushParamPushIntParamPushLocalPushIntLocalPushFalsePushTruePushConstPushIntConstSetLocalSetIntLocalIncLocalDecLocalReturnTopReturnIntTopReturnFalseReturnTrueJumpJumpFalseJumpTrueCallNativeIsNilIsNotNilNotEqIntNotEqIntGtIntGtEqIntLtIntLtEqIntEqStringNotEqStringConcatAddSubStringSliceStringSliceFromStringSliceToStringLen" + +var _opcode_index = [...]uint16{0, 7, 10, 13, 22, 34, 43, 55, 64, 72, 81, 93, 101, 112, 120, 128, 137, 149, 160, 170, 174, 183, 191, 201, 206, 214, 217, 222, 230, 235, 242, 247, 254, 262, 273, 279, 282, 285, 296, 311, 324, 333} + +func (i opcode) String() string { + if i >= opcode(len(_opcode_index)-1) { + return "opcode(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _opcode_name[_opcode_index[i]:_opcode_index[i+1]] +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go new file mode 100644 index 000000000..268b42a1e --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/opcodes.gen.go @@ -0,0 +1,219 @@ +// Code generated "gen_opcodes.go"; DO NOT EDIT. + +package quasigo + +//go:generate stringer -type=opcode -trimprefix=op +type opcode byte + +const ( + opInvalid opcode = 0 + + // Encoding: 0x01 (width=1) + // Stack effect: (value) -> () + opPop opcode = 1 + + // Encoding: 0x02 (width=1) + // Stack effect: (x) -> (x x) + opDup opcode = 2 + + // Encoding: 0x03 index:u8 (width=2) + // Stack effect: () -> (value) + opPushParam opcode = 3 + + // Encoding: 0x04 index:u8 (width=2) + // Stack effect: () -> (value:int) + opPushIntParam opcode = 4 + + // Encoding: 0x05 index:u8 (width=2) + // Stack effect: () -> (value) + opPushLocal opcode = 5 + + // Encoding: 0x06 index:u8 (width=2) + // Stack effect: () -> (value:int) + opPushIntLocal opcode = 6 + + // Encoding: 0x07 (width=1) + // Stack effect: () -> (false) + opPushFalse opcode = 7 + + // Encoding: 0x08 (width=1) + // Stack effect: () -> (true) + opPushTrue opcode = 8 + + // Encoding: 0x09 constid:u8 (width=2) + // Stack effect: () -> (const) + opPushConst opcode = 9 + + // Encoding: 0x0a constid:u8 (width=2) + // Stack effect: () -> (const:int) + opPushIntConst opcode = 10 + + // Encoding: 0x0b index:u8 (width=2) + // Stack effect: (value) -> () + opSetLocal opcode = 11 + + // Encoding: 0x0c index:u8 (width=2) + // Stack effect: (value:int) -> () + opSetIntLocal opcode = 12 + + // Encoding: 0x0d index:u8 (width=2) + // Stack effect: unchanged + opIncLocal opcode = 13 + + // Encoding: 0x0e index:u8 (width=2) + // Stack effect: unchanged + opDecLocal opcode = 14 + + // Encoding: 0x0f (width=1) + // Stack effect: (value) -> (value) + opReturnTop opcode = 15 + + // Encoding: 0x10 (width=1) + // Stack effect: (value) -> (value) + opReturnIntTop opcode = 16 + + // Encoding: 0x11 (width=1) + // Stack effect: unchanged + opReturnFalse opcode = 17 + + // Encoding: 0x12 (width=1) + // Stack effect: unchanged + opReturnTrue opcode = 18 + + // Encoding: 0x13 offset:i16 (width=3) + // Stack effect: unchanged + opJump opcode = 19 + + // Encoding: 0x14 offset:i16 (width=3) + // Stack effect: (cond:bool) -> () + opJumpFalse opcode = 20 + + // Encoding: 0x15 offset:i16 (width=3) + // Stack effect: (cond:bool) -> () + opJumpTrue opcode = 21 + + // Encoding: 0x16 funcid:u16 (width=3) + // Stack effect: (args...) -> (results...) + opCallNative opcode = 22 + + // Encoding: 0x17 (width=1) + // Stack effect: (value) -> (result:bool) + opIsNil opcode = 23 + + // Encoding: 0x18 (width=1) + // Stack effect: (value) -> (result:bool) + opIsNotNil opcode = 24 + + // Encoding: 0x19 (width=1) + // Stack effect: (value:bool) -> (result:bool) + opNot opcode = 25 + + // Encoding: 0x1a (width=1) + // Stack effect: (x:int y:int) -> (result:bool) + opEqInt opcode = 26 + + // Encoding: 0x1b (width=1) + // Stack effect: (x:int y:int) -> (result:bool) + opNotEqInt opcode = 27 + + // Encoding: 0x1c (width=1) + // Stack effect: (x:int y:int) -> (result:bool) + opGtInt opcode = 28 + + // Encoding: 0x1d (width=1) + // Stack effect: (x:int y:int) -> (result:bool) + opGtEqInt opcode = 29 + + // Encoding: 0x1e (width=1) + // Stack effect: (x:int y:int) -> (result:bool) + opLtInt opcode = 30 + + // Encoding: 0x1f (width=1) + // Stack effect: (x:int y:int) -> (result:bool) + opLtEqInt opcode = 31 + + // Encoding: 0x20 (width=1) + // Stack effect: (x:string y:string) -> (result:bool) + opEqString opcode = 32 + + // Encoding: 0x21 (width=1) + // Stack effect: (x:string y:string) -> (result:bool) + opNotEqString opcode = 33 + + // Encoding: 0x22 (width=1) + // Stack effect: (x:string y:string) -> (result:string) + opConcat opcode = 34 + + // Encoding: 0x23 (width=1) + // Stack effect: (x:int y:int) -> (result:int) + opAdd opcode = 35 + + // Encoding: 0x24 (width=1) + // Stack effect: (x:int y:int) -> (result:int) + opSub opcode = 36 + + // Encoding: 0x25 (width=1) + // Stack effect: (s:string from:int to:int) -> (result:string) + opStringSlice opcode = 37 + + // Encoding: 0x26 (width=1) + // Stack effect: (s:string from:int) -> (result:string) + opStringSliceFrom opcode = 38 + + // Encoding: 0x27 (width=1) + // Stack effect: (s:string to:int) -> (result:string) + opStringSliceTo opcode = 39 + + // Encoding: 0x28 (width=1) + // Stack effect: (s:string) -> (result:int) + opStringLen opcode = 40 +) + +type opcodeInfo struct { + width int +} + +var opcodeInfoTable = [256]opcodeInfo{ + opInvalid: {width: 1}, + + opPop: {width: 1}, + opDup: {width: 1}, + opPushParam: {width: 2}, + opPushIntParam: {width: 2}, + opPushLocal: {width: 2}, + opPushIntLocal: {width: 2}, + opPushFalse: {width: 1}, + opPushTrue: {width: 1}, + opPushConst: {width: 2}, + opPushIntConst: {width: 2}, + opSetLocal: {width: 2}, + opSetIntLocal: {width: 2}, + opIncLocal: {width: 2}, + opDecLocal: {width: 2}, + opReturnTop: {width: 1}, + opReturnIntTop: {width: 1}, + opReturnFalse: {width: 1}, + opReturnTrue: {width: 1}, + opJump: {width: 3}, + opJumpFalse: {width: 3}, + opJumpTrue: {width: 3}, + opCallNative: {width: 3}, + opIsNil: {width: 1}, + opIsNotNil: {width: 1}, + opNot: {width: 1}, + opEqInt: {width: 1}, + opNotEqInt: {width: 1}, + opGtInt: {width: 1}, + opGtEqInt: {width: 1}, + opLtInt: {width: 1}, + opLtEqInt: {width: 1}, + opEqString: {width: 1}, + opNotEqString: {width: 1}, + opConcat: {width: 1}, + opAdd: {width: 1}, + opSub: {width: 1}, + opStringSlice: {width: 1}, + opStringSliceFrom: {width: 1}, + opStringSliceTo: {width: 1}, + opStringLen: {width: 1}, +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go new file mode 100644 index 000000000..7d457538d --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/quasigo.go @@ -0,0 +1,165 @@ +// Package quasigo implements a Go subset compiler and interpreter. +// +// The implementation details are not part of the contract of this package. +package quasigo + +import ( + "go/ast" + "go/token" + "go/types" +) + +// TODO(quasilyte): document what is thread-safe and what not. +// TODO(quasilyte): add a readme. + +// Env is used to hold both compilation and evaluation data. +type Env struct { + // TODO(quasilyte): store both native and user func ids in one map? + + nativeFuncs []nativeFunc + nameToNativeFuncID map[funcKey]uint16 + + userFuncs []*Func + nameToFuncID map[funcKey]uint16 + + // debug contains all information that is only needed + // for better debugging and compiled code introspection. + // Right now it's always enabled, but we may allow stripping it later. + debug *debugInfo +} + +// EvalEnv is a goroutine-local handle for Env. +// To get one, use Env.GetEvalEnv() method. +type EvalEnv struct { + nativeFuncs []nativeFunc + userFuncs []*Func + + stack *ValueStack +} + +// NewEnv creates a new empty environment. +func NewEnv() *Env { + return newEnv() +} + +// GetEvalEnv creates a new goroutine-local handle of env. +func (env *Env) GetEvalEnv() *EvalEnv { + return &EvalEnv{ + nativeFuncs: env.nativeFuncs, + userFuncs: env.userFuncs, + stack: &ValueStack{ + objects: make([]interface{}, 0, 32), + ints: make([]int, 0, 16), + }, + } +} + +// AddNativeMethod binds `$typeName.$methodName` symbol with f. +// A typeName should be fully qualified, like `github.com/user/pkgname.TypeName`. +// It method is defined only on pointer type, the typeName should start with `*`. +func (env *Env) AddNativeMethod(typeName, methodName string, f func(*ValueStack)) { + env.addNativeFunc(funcKey{qualifier: typeName, name: methodName}, f) +} + +// AddNativeFunc binds `$pkgPath.$funcName` symbol with f. +// A pkgPath should be a full package path in which funcName is defined. +func (env *Env) AddNativeFunc(pkgPath, funcName string, f func(*ValueStack)) { + env.addNativeFunc(funcKey{qualifier: pkgPath, name: funcName}, f) +} + +// AddFunc binds `$pkgPath.$funcName` symbol with f. +func (env *Env) AddFunc(pkgPath, funcName string, f *Func) { + env.addFunc(funcKey{qualifier: pkgPath, name: funcName}, f) +} + +// GetFunc finds previously bound function searching for the `$pkgPath.$funcName` symbol. +func (env *Env) GetFunc(pkgPath, funcName string) *Func { + id := env.nameToFuncID[funcKey{qualifier: pkgPath, name: funcName}] + return env.userFuncs[id] +} + +// CompileContext is used to provide necessary data to the compiler. +type CompileContext struct { + // Env is shared environment that should be used for all functions + // being compiled; then it should be used to execute these functions. + Env *Env + + Types *types.Info + Fset *token.FileSet +} + +// Compile prepares an executable version of fn. +func Compile(ctx *CompileContext, fn *ast.FuncDecl) (compiled *Func, err error) { + return compile(ctx, fn) +} + +// Call invokes a given function with provided arguments. +func Call(env *EvalEnv, fn *Func, args ...interface{}) CallResult { + env.stack.objects = env.stack.objects[:0] + env.stack.ints = env.stack.ints[:0] + return eval(env, fn, args) +} + +// CallResult is a return value of Call function. +// For most functions, Value() should be called to get the actual result. +// For int-typed functions, IntValue() should be used instead. +type CallResult struct { + value interface{} + scalarValue uint64 +} + +// Value unboxes an actual call return value. +// For int results, use IntValue(). +func (res CallResult) Value() interface{} { return res.value } + +// IntValue unboxes an actual call return value. +func (res CallResult) IntValue() int { return int(res.scalarValue) } + +// Disasm returns the compiled function disassembly text. +// This output is not guaranteed to be stable between versions +// and should be used only for debugging purposes. +func Disasm(env *Env, fn *Func) string { + return disasm(env, fn) +} + +// Func is a compiled function that is ready to be executed. +type Func struct { + code []byte + + constants []interface{} + intConstants []int +} + +// ValueStack is used to manipulate runtime values during the evaluation. +// Function arguments are pushed to the stack. +// Function results are returned via stack as well. +// +// For the sake of efficiency, it stores different types separately. +// If int was pushed with PushInt(), it should be retrieved by PopInt(). +// It's a bad idea to do a Push() and then PopInt() and vice-versa. +type ValueStack struct { + objects []interface{} + ints []int +} + +// Pop removes the top stack element and returns it. +// Important: for int-typed values, use PopInt. +func (s *ValueStack) Pop() interface{} { + x := s.objects[len(s.objects)-1] + s.objects = s.objects[:len(s.objects)-1] + return x +} + +// PopInt removes the top stack element and returns it. +func (s *ValueStack) PopInt() int { + x := s.ints[len(s.ints)-1] + s.ints = s.ints[:len(s.ints)-1] + return x +} + +// Push adds x to the stack. +// Important: for int-typed values, use PushInt. +func (s *ValueStack) Push(x interface{}) { s.objects = append(s.objects, x) } + +// PushInt adds x to the stack. +func (s *ValueStack) PushInt(x int) { s.ints = append(s.ints, x) } diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go new file mode 100644 index 000000000..a5c3676a4 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/quasigo/utils.go @@ -0,0 +1,60 @@ +package quasigo + +import ( + "encoding/binary" + "go/ast" + "go/types" +) + +func pickOp(cond bool, ifTrue, otherwise opcode) opcode { + if cond { + return ifTrue + } + return otherwise +} + +func put16(code []byte, pos, value int) { + binary.LittleEndian.PutUint16(code[pos:], uint16(value)) +} + +func decode16(code []byte, pos int) int { + return int(int16(binary.LittleEndian.Uint16(code[pos:]))) +} + +func typeIsInt(typ types.Type) bool { + basic, ok := typ.Underlying().(*types.Basic) + if !ok { + return false + } + switch basic.Kind() { + case types.Int, types.UntypedInt: + return true + default: + return false + } +} + +func typeIsString(typ types.Type) bool { + basic, ok := typ.Underlying().(*types.Basic) + if !ok { + return false + } + return basic.Info()&types.IsString != 0 +} + +func walkBytecode(code []byte, fn func(pc int, op opcode)) { + pc := 0 + for pc < len(code) { + op := opcode(code[pc]) + fn(pc, op) + pc += opcodeInfoTable[op].width + } +} + +func identName(n ast.Expr) string { + id, ok := n.(*ast.Ident) + if ok { + return id.Name + } + return "" +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go new file mode 100644 index 000000000..ba23861a2 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/ruleguard.go @@ -0,0 +1,87 @@ +package ruleguard + +import ( + "go/ast" + "go/token" + "go/types" + "io" +) + +// Engine is the main ruleguard package API object. +// +// First, load some ruleguard files with Load() to build a rule set. +// Then use Run() to execute the rules. +// +// It's advised to have only 1 engine per application as it does a lot of caching. +// The Run() method is synchronized, so it can be used concurrently. +// +// An Engine must be created with NewEngine() function. +type Engine struct { + impl *engine +} + +// NewEngine creates an engine with empty rule set. +func NewEngine() *Engine { + return &Engine{impl: newEngine()} +} + +// Load reads a ruleguard file from r and adds it to the engine rule set. +// +// Load() is not thread-safe, especially if used concurrently with Run() method. +// It's advised to Load() all ruleguard files under a critical section (like sync.Once) +// and then use Run() to execute all of them. +func (e *Engine) Load(ctx *ParseContext, filename string, r io.Reader) error { + return e.impl.Load(ctx, filename, r) +} + +// Run executes all loaded rules on a given file. +// Matched rules invoke `RunContext.Report()` method. +// +// Run() is thread-safe, unless used in parallel with Load(), +// which modifies the engine state. +func (e *Engine) Run(ctx *RunContext, f *ast.File) error { + return e.impl.Run(ctx, f) +} + +type ParseContext struct { + DebugFilter string + DebugImports bool + DebugPrint func(string) + + // GroupFilter is called for every rule group being parsed. + // If function returns false, that group will not be included + // in the resulting rules set. + // Nil filter accepts all rule groups. + GroupFilter func(string) bool + + Fset *token.FileSet +} + +type RunContext struct { + Debug string + DebugImports bool + DebugPrint func(string) + + Types *types.Info + Sizes types.Sizes + Fset *token.FileSet + Report func(rule GoRuleInfo, n ast.Node, msg string, s *Suggestion) + Pkg *types.Package +} + +type Suggestion struct { + From token.Pos + To token.Pos + Replacement []byte +} + +type GoRuleInfo struct { + // Filename is a file that defined this rule. + Filename string + + // Line is a line inside a file that defined this rule. + Line int + + // Group is a function name that contained this rule. + Group string +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go new file mode 100644 index 000000000..a5d254411 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/runner.go @@ -0,0 +1,349 @@ +package ruleguard + +import ( + "bytes" + "fmt" + "go/ast" + "go/printer" + "io/ioutil" + "path/filepath" + "sort" + "strconv" + "strings" + + "github.com/quasilyte/go-ruleguard/internal/gogrep" + "github.com/quasilyte/go-ruleguard/nodetag" + "github.com/quasilyte/go-ruleguard/ruleguard/goutil" +) + +type rulesRunner struct { + state *engineState + + ctx *RunContext + rules *goRuleSet + + importer *goImporter + + filename string + src []byte + + filterParams filterParams +} + +func newRulesRunner(ctx *RunContext, state *engineState, rules *goRuleSet) *rulesRunner { + importer := newGoImporter(state, goImporterConfig{ + fset: ctx.Fset, + debugImports: ctx.DebugImports, + debugPrint: ctx.DebugPrint, + }) + rr := &rulesRunner{ + ctx: ctx, + importer: importer, + rules: rules, + filterParams: filterParams{ + env: state.env.GetEvalEnv(), + importer: importer, + ctx: ctx, + }, + } + rr.filterParams.nodeText = rr.nodeText + return rr +} + +func (rr *rulesRunner) nodeText(n ast.Node) []byte { + if gogrep.IsEmptyNodeSlice(n) { + return nil + } + + from := rr.ctx.Fset.Position(n.Pos()).Offset + to := rr.ctx.Fset.Position(n.End()).Offset + src := rr.fileBytes() + if (from >= 0 && from < len(src)) && (to >= 0 && to < len(src)) { + return src[from:to] + } + + // Go printer would panic on comments. + if n, ok := n.(*ast.Comment); ok { + return []byte(n.Text) + } + + // Fallback to the printer. + var buf bytes.Buffer + if err := printer.Fprint(&buf, rr.ctx.Fset, n); err != nil { + panic(err) + } + return buf.Bytes() +} + +func (rr *rulesRunner) fileBytes() []byte { + if rr.src != nil { + return rr.src + } + + // TODO(quasilyte): re-use src slice? + src, err := ioutil.ReadFile(rr.filename) + if err != nil || src == nil { + // Assign a zero-length slice so rr.src + // is never nil during the second fileBytes call. + rr.src = make([]byte, 0) + } else { + rr.src = src + } + return rr.src +} + +func (rr *rulesRunner) run(f *ast.File) error { + // TODO(quasilyte): run local rules as well. + + rr.filename = rr.ctx.Fset.Position(f.Pos()).Filename + rr.filterParams.filename = rr.filename + rr.collectImports(f) + + if rr.rules.universal.categorizedNum != 0 { + ast.Inspect(f, func(n ast.Node) bool { + if n == nil { + return false + } + rr.runRules(n) + return true + }) + } + + if len(rr.rules.universal.commentRules) != 0 { + for _, commentGroup := range f.Comments { + for _, comment := range commentGroup.List { + rr.runCommentRules(comment) + } + } + } + + return nil +} + +func (rr *rulesRunner) runCommentRules(comment *ast.Comment) { + // We'll need that file to create a token.Pos from the artificial offset. + file := rr.ctx.Fset.File(comment.Pos()) + + for _, rule := range rr.rules.universal.commentRules { + var m commentMatchData + if rule.captureGroups { + result := rule.pat.FindStringSubmatchIndex(comment.Text) + if result == nil { + continue + } + for i, name := range rule.pat.SubexpNames() { + if i == 0 || name == "" { + continue + } + resultIndex := i * 2 + beginPos := result[resultIndex+0] + endPos := result[resultIndex+1] + // Negative index a special case when named group captured nothing. + // Consider this pattern: `(?Pfoo)|(bar)`. + // If we have `bar` input string, will remain empty. + if beginPos < 0 || endPos < 0 { + m.capture = append(m.capture, gogrep.CapturedNode{ + Name: name, + Node: &ast.Comment{Slash: comment.Pos()}, + }) + continue + } + m.capture = append(m.capture, gogrep.CapturedNode{ + Name: name, + Node: &ast.Comment{ + Slash: file.Pos(beginPos + file.Offset(comment.Pos())), + Text: comment.Text[beginPos:endPos], + }, + }) + } + m.node = &ast.Comment{ + Slash: file.Pos(result[0] + file.Offset(comment.Pos())), + Text: comment.Text[result[0]:result[1]], + } + } else { + // Fast path: no need to save any submatches. + result := rule.pat.FindStringIndex(comment.Text) + if result == nil { + continue + } + m.node = &ast.Comment{ + Slash: file.Pos(result[0] + file.Offset(comment.Pos())), + Text: comment.Text[result[0]:result[1]], + } + } + + accept := rr.handleCommentMatch(rule, m) + if accept { + break + } + } +} + +func (rr *rulesRunner) runRules(n ast.Node) { + tag := nodetag.FromNode(n) + for _, rule := range rr.rules.universal.rulesByTag[tag] { + matched := false + rule.pat.MatchNode(n, func(m gogrep.MatchData) { + matched = rr.handleMatch(rule, m) + }) + if matched { + break + } + } +} + +func (rr *rulesRunner) reject(rule goRule, reason string, m matchData) { + if rule.group != rr.ctx.Debug { + return // This rule is not being debugged + } + + pos := rr.ctx.Fset.Position(m.Node().Pos()) + rr.ctx.DebugPrint(fmt.Sprintf("%s:%d: [%s:%d] rejected by %s", + pos.Filename, pos.Line, filepath.Base(rule.filename), rule.line, reason)) + + values := make([]gogrep.CapturedNode, len(m.CaptureList())) + copy(values, m.CaptureList()) + sort.Slice(values, func(i, j int) bool { + return values[i].Name < values[j].Name + }) + + for _, v := range values { + name := v.Name + node := v.Node + + if comment, ok := node.(*ast.Comment); ok { + s := strings.ReplaceAll(comment.Text, "\n", `\n`) + rr.ctx.DebugPrint(fmt.Sprintf(" $%s: %s", name, s)) + continue + } + + var expr ast.Expr + switch node := node.(type) { + case ast.Expr: + expr = node + case *ast.ExprStmt: + expr = node.X + default: + continue + } + + typ := rr.ctx.Types.TypeOf(expr) + typeString := "" + if typ != nil { + typeString = typ.String() + } + s := strings.ReplaceAll(goutil.SprintNode(rr.ctx.Fset, expr), "\n", `\n`) + rr.ctx.DebugPrint(fmt.Sprintf(" $%s %s: %s", name, typeString, s)) + } +} + +func (rr *rulesRunner) handleCommentMatch(rule goCommentRule, m commentMatchData) bool { + if rule.base.filter.fn != nil { + rr.filterParams.match = m + filterResult := rule.base.filter.fn(&rr.filterParams) + if !filterResult.Matched() { + rr.reject(rule.base, filterResult.RejectReason(), m) + return false + } + } + + message := rr.renderMessage(rule.base.msg, m, true) + node := m.Node() + if rule.base.location != "" { + node, _ = m.CapturedByName(rule.base.location) + } + var suggestion *Suggestion + if rule.base.suggestion != "" { + suggestion = &Suggestion{ + Replacement: []byte(rr.renderMessage(rule.base.suggestion, m, false)), + From: node.Pos(), + To: node.End(), + } + } + info := GoRuleInfo{ + Group: rule.base.group, + Filename: rule.base.filename, + Line: rule.base.line, + } + rr.ctx.Report(info, node, message, suggestion) + return true +} + +func (rr *rulesRunner) handleMatch(rule goRule, m gogrep.MatchData) bool { + if rule.filter.fn != nil { + rr.filterParams.match = astMatchData{match: m} + filterResult := rule.filter.fn(&rr.filterParams) + if !filterResult.Matched() { + rr.reject(rule, filterResult.RejectReason(), astMatchData{match: m}) + return false + } + } + + message := rr.renderMessage(rule.msg, astMatchData{match: m}, true) + node := m.Node + if rule.location != "" { + node, _ = m.CapturedByName(rule.location) + } + var suggestion *Suggestion + if rule.suggestion != "" { + suggestion = &Suggestion{ + Replacement: []byte(rr.renderMessage(rule.suggestion, astMatchData{match: m}, false)), + From: node.Pos(), + To: node.End(), + } + } + info := GoRuleInfo{ + Group: rule.group, + Filename: rule.filename, + Line: rule.line, + } + rr.ctx.Report(info, node, message, suggestion) + return true +} + +func (rr *rulesRunner) collectImports(f *ast.File) { + rr.filterParams.imports = make(map[string]struct{}, len(f.Imports)) + for _, spec := range f.Imports { + s, err := strconv.Unquote(spec.Path.Value) + if err != nil { + continue + } + rr.filterParams.imports[s] = struct{}{} + } +} + +func (rr *rulesRunner) renderMessage(msg string, m matchData, truncate bool) string { + var buf strings.Builder + if strings.Contains(msg, "$$") { + buf.Write(rr.nodeText(m.Node())) + msg = strings.ReplaceAll(msg, "$$", buf.String()) + } + if len(m.CaptureList()) == 0 { + return msg + } + + capture := make([]gogrep.CapturedNode, len(m.CaptureList())) + copy(capture, m.CaptureList()) + sort.Slice(capture, func(i, j int) bool { + return len(capture[i].Name) > len(capture[j].Name) + }) + + for _, c := range capture { + n := c.Node + key := "$" + c.Name + if !strings.Contains(msg, key) { + continue + } + buf.Reset() + buf.Write(rr.nodeText(n)) + // Don't interpolate strings that are too long. + var replacement string + if truncate && buf.Len() > 60 { + replacement = key + } else { + replacement = buf.String() + } + msg = strings.ReplaceAll(msg, key, replacement) + } + return msg +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go new file mode 100644 index 000000000..1d739819d --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/patternop_string.go @@ -0,0 +1,34 @@ +// Code generated by "stringer -type=patternOp"; DO NOT EDIT. + +package typematch + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[opBuiltinType-0] + _ = x[opPointer-1] + _ = x[opVar-2] + _ = x[opVarSeq-3] + _ = x[opSlice-4] + _ = x[opArray-5] + _ = x[opMap-6] + _ = x[opChan-7] + _ = x[opFunc-8] + _ = x[opStructNoSeq-9] + _ = x[opStruct-10] + _ = x[opNamed-11] +} + +const _patternOp_name = "opBuiltinTypeopPointeropVaropVarSeqopSliceopArrayopMapopChanopFuncopStructNoSeqopStructopNamed" + +var _patternOp_index = [...]uint8{0, 13, 22, 27, 35, 42, 49, 54, 60, 66, 79, 87, 94} + +func (i patternOp) String() string { + if i < 0 || i >= patternOp(len(_patternOp_index)-1) { + return "patternOp(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _patternOp_name[_patternOp_index[i]:_patternOp_index[i+1]] +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go new file mode 100644 index 000000000..19391ecd4 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/typematch/typematch.go @@ -0,0 +1,536 @@ +package typematch + +import ( + "fmt" + "go/ast" + "go/parser" + "go/token" + "go/types" + "strconv" + "strings" + + "github.com/quasilyte/go-ruleguard/internal/xtypes" +) + +//go:generate stringer -type=patternOp +type patternOp int + +const ( + opBuiltinType patternOp = iota + opPointer + opVar + opVarSeq + opSlice + opArray + opMap + opChan + opFunc + opStructNoSeq + opStruct + opNamed +) + +type Pattern struct { + typeMatches map[string]types.Type + int64Matches map[string]int64 + + root *pattern +} + +type pattern struct { + value interface{} + op patternOp + subs []*pattern +} + +func (pat pattern) String() string { + if len(pat.subs) == 0 { + return fmt.Sprintf("<%s %#v>", pat.op, pat.value) + } + parts := make([]string, len(pat.subs)) + for i, sub := range pat.subs { + parts[i] = sub.String() + } + return fmt.Sprintf("<%s %#v (%s)>", pat.op, pat.value, strings.Join(parts, ", ")) +} + +type ImportsTab struct { + imports []map[string]string +} + +func NewImportsTab(initial map[string]string) *ImportsTab { + return &ImportsTab{imports: []map[string]string{initial}} +} + +func (itab *ImportsTab) Lookup(pkgName string) (string, bool) { + for i := len(itab.imports) - 1; i >= 0; i-- { + pkgPath, ok := itab.imports[i][pkgName] + if ok { + return pkgPath, true + } + } + return "", false +} + +func (itab *ImportsTab) Load(pkgName, pkgPath string) { + itab.imports[len(itab.imports)-1][pkgName] = pkgPath +} + +func (itab *ImportsTab) EnterScope() { + itab.imports = append(itab.imports, map[string]string{}) +} + +func (itab *ImportsTab) LeaveScope() { + itab.imports = itab.imports[:len(itab.imports)-1] +} + +type Context struct { + Itab *ImportsTab +} + +const ( + varPrefix = `ᐸvarᐳ` + varSeqPrefix = `ᐸvar_seqᐳ` +) + +func Parse(ctx *Context, s string) (*Pattern, error) { + noDollars := strings.ReplaceAll(s, "$*", varSeqPrefix) + noDollars = strings.ReplaceAll(noDollars, "$", varPrefix) + n, err := parser.ParseExpr(noDollars) + if err != nil { + return nil, err + } + root := parseExpr(ctx, n) + if root == nil { + return nil, fmt.Errorf("can't convert %s type expression", s) + } + p := &Pattern{ + typeMatches: map[string]types.Type{}, + int64Matches: map[string]int64{}, + root: root, + } + return p, nil +} + +var ( + builtinTypeByName = map[string]types.Type{ + "bool": types.Typ[types.Bool], + "int": types.Typ[types.Int], + "int8": types.Typ[types.Int8], + "int16": types.Typ[types.Int16], + "int32": types.Typ[types.Int32], + "int64": types.Typ[types.Int64], + "uint": types.Typ[types.Uint], + "uint8": types.Typ[types.Uint8], + "uint16": types.Typ[types.Uint16], + "uint32": types.Typ[types.Uint32], + "uint64": types.Typ[types.Uint64], + "uintptr": types.Typ[types.Uintptr], + "float32": types.Typ[types.Float32], + "float64": types.Typ[types.Float64], + "complex64": types.Typ[types.Complex64], + "complex128": types.Typ[types.Complex128], + "string": types.Typ[types.String], + + "error": types.Universe.Lookup("error").Type(), + + // Aliases. + "byte": types.Typ[types.Uint8], + "rune": types.Typ[types.Int32], + } + + efaceType = types.NewInterfaceType(nil, nil) +) + +func parseExpr(ctx *Context, e ast.Expr) *pattern { + switch e := e.(type) { + case *ast.Ident: + basic, ok := builtinTypeByName[e.Name] + if ok { + return &pattern{op: opBuiltinType, value: basic} + } + if strings.HasPrefix(e.Name, varPrefix) { + name := strings.TrimPrefix(e.Name, varPrefix) + return &pattern{op: opVar, value: name} + } + if strings.HasPrefix(e.Name, varSeqPrefix) { + name := strings.TrimPrefix(e.Name, varSeqPrefix) + // Only unnamed seq are supported right now. + if name == "_" { + return &pattern{op: opVarSeq, value: name} + } + } + + case *ast.SelectorExpr: + pkg, ok := e.X.(*ast.Ident) + if !ok { + return nil + } + pkgPath, ok := ctx.Itab.Lookup(pkg.Name) + if !ok { + return nil + } + return &pattern{op: opNamed, value: [2]string{pkgPath, e.Sel.Name}} + + case *ast.StarExpr: + elem := parseExpr(ctx, e.X) + if elem == nil { + return nil + } + return &pattern{op: opPointer, subs: []*pattern{elem}} + + case *ast.ArrayType: + elem := parseExpr(ctx, e.Elt) + if elem == nil { + return nil + } + if e.Len == nil { + return &pattern{ + op: opSlice, + subs: []*pattern{elem}, + } + } + if id, ok := e.Len.(*ast.Ident); ok && strings.HasPrefix(id.Name, varPrefix) { + name := strings.TrimPrefix(id.Name, varPrefix) + return &pattern{ + op: opArray, + value: name, + subs: []*pattern{elem}, + } + } + lit, ok := e.Len.(*ast.BasicLit) + if !ok || lit.Kind != token.INT { + return nil + } + length, err := strconv.ParseInt(lit.Value, 10, 64) + if err != nil { + return nil + } + return &pattern{ + op: opArray, + value: length, + subs: []*pattern{elem}, + } + + case *ast.MapType: + keyType := parseExpr(ctx, e.Key) + if keyType == nil { + return nil + } + valType := parseExpr(ctx, e.Value) + if valType == nil { + return nil + } + return &pattern{ + op: opMap, + subs: []*pattern{keyType, valType}, + } + + case *ast.ChanType: + valType := parseExpr(ctx, e.Value) + if valType == nil { + return nil + } + var dir types.ChanDir + switch { + case e.Dir&ast.SEND != 0 && e.Dir&ast.RECV != 0: + dir = types.SendRecv + case e.Dir&ast.SEND != 0: + dir = types.SendOnly + case e.Dir&ast.RECV != 0: + dir = types.RecvOnly + default: + return nil + } + return &pattern{ + op: opChan, + value: dir, + subs: []*pattern{valType}, + } + + case *ast.ParenExpr: + return parseExpr(ctx, e.X) + + case *ast.FuncType: + var params []*pattern + var results []*pattern + if e.Params != nil { + for _, field := range e.Params.List { + p := parseExpr(ctx, field.Type) + if p == nil { + return nil + } + if len(field.Names) != 0 { + return nil + } + params = append(params, p) + } + } + if e.Results != nil { + for _, field := range e.Results.List { + p := parseExpr(ctx, field.Type) + if p == nil { + return nil + } + if len(field.Names) != 0 { + return nil + } + results = append(results, p) + } + } + return &pattern{ + op: opFunc, + value: len(params), + subs: append(params, results...), + } + + case *ast.StructType: + hasSeq := false + members := make([]*pattern, 0, len(e.Fields.List)) + for _, field := range e.Fields.List { + p := parseExpr(ctx, field.Type) + if p == nil { + return nil + } + if len(field.Names) != 0 { + return nil + } + if p.op == opVarSeq { + hasSeq = true + } + members = append(members, p) + } + op := opStructNoSeq + if hasSeq { + op = opStruct + } + return &pattern{ + op: op, + subs: members, + } + + case *ast.InterfaceType: + if len(e.Methods.List) == 0 { + return &pattern{op: opBuiltinType, value: efaceType} + } + } + + return nil +} + +// MatchIdentical returns true if the go typ matches pattern p. +func (p *Pattern) MatchIdentical(typ types.Type) bool { + p.reset() + return p.matchIdentical(p.root, typ) +} + +func (p *Pattern) reset() { + if len(p.int64Matches) != 0 { + p.int64Matches = map[string]int64{} + } + if len(p.typeMatches) != 0 { + p.typeMatches = map[string]types.Type{} + } +} + +func (p *Pattern) matchIdenticalFielder(subs []*pattern, f fielder) bool { + // TODO: do backtracking. + + numFields := f.NumFields() + fieldsMatched := 0 + + if len(subs) == 0 && numFields != 0 { + return false + } + + matchAny := false + + i := 0 + for i < len(subs) { + pat := subs[i] + + if pat.op == opVarSeq { + matchAny = true + } + + fieldsLeft := numFields - fieldsMatched + if matchAny { + switch { + // "Nothing left to match" stop condition. + case fieldsLeft == 0: + matchAny = false + i++ + // Lookahead for non-greedy matching. + case i+1 < len(subs) && p.matchIdentical(subs[i+1], f.Field(fieldsMatched).Type()): + matchAny = false + i += 2 + fieldsMatched++ + default: + fieldsMatched++ + } + continue + } + + if fieldsLeft == 0 || !p.matchIdentical(pat, f.Field(fieldsMatched).Type()) { + return false + } + i++ + fieldsMatched++ + } + + return numFields == fieldsMatched +} + +func (p *Pattern) matchIdentical(sub *pattern, typ types.Type) bool { + switch sub.op { + case opVar: + name := sub.value.(string) + if name == "_" { + return true + } + y, ok := p.typeMatches[name] + if !ok { + p.typeMatches[name] = typ + return true + } + if y == nil { + return typ == nil + } + return xtypes.Identical(typ, y) + + case opBuiltinType: + return xtypes.Identical(typ, sub.value.(types.Type)) + + case opPointer: + typ, ok := typ.(*types.Pointer) + if !ok { + return false + } + return p.matchIdentical(sub.subs[0], typ.Elem()) + + case opSlice: + typ, ok := typ.(*types.Slice) + if !ok { + return false + } + return p.matchIdentical(sub.subs[0], typ.Elem()) + + case opArray: + typ, ok := typ.(*types.Array) + if !ok { + return false + } + var wantLen int64 + switch v := sub.value.(type) { + case string: + if v == "_" { + wantLen = typ.Len() + break + } + length, ok := p.int64Matches[v] + if ok { + wantLen = length + } else { + p.int64Matches[v] = typ.Len() + wantLen = typ.Len() + } + case int64: + wantLen = v + } + return wantLen == typ.Len() && p.matchIdentical(sub.subs[0], typ.Elem()) + + case opMap: + typ, ok := typ.(*types.Map) + if !ok { + return false + } + return p.matchIdentical(sub.subs[0], typ.Key()) && + p.matchIdentical(sub.subs[1], typ.Elem()) + + case opChan: + typ, ok := typ.(*types.Chan) + if !ok { + return false + } + dir := sub.value.(types.ChanDir) + return dir == typ.Dir() && p.matchIdentical(sub.subs[0], typ.Elem()) + + case opNamed: + typ, ok := typ.(*types.Named) + if !ok { + return false + } + obj := typ.Obj() + pkg := obj.Pkg() + // pkg can be nil for builtin named types. + // There is no point in checking anything else as we never + // generate the opNamed for such types. + if pkg == nil { + return false + } + pkgPath := sub.value.([2]string)[0] + typeName := sub.value.([2]string)[1] + // obj.Pkg().Path() may be in a vendor directory. + path := strings.SplitAfter(obj.Pkg().Path(), "/vendor/") + return path[len(path)-1] == pkgPath && typeName == obj.Name() + + case opFunc: + typ, ok := typ.(*types.Signature) + if !ok { + return false + } + numParams := sub.value.(int) + params := sub.subs[:numParams] + results := sub.subs[numParams:] + if typ.Params().Len() != len(params) { + return false + } + if typ.Results().Len() != len(results) { + return false + } + for i := 0; i < typ.Params().Len(); i++ { + if !p.matchIdentical(params[i], typ.Params().At(i).Type()) { + return false + } + } + for i := 0; i < typ.Results().Len(); i++ { + if !p.matchIdentical(results[i], typ.Results().At(i).Type()) { + return false + } + } + return true + + case opStructNoSeq: + typ, ok := typ.(*types.Struct) + if !ok { + return false + } + if typ.NumFields() != len(sub.subs) { + return false + } + for i, member := range sub.subs { + if !p.matchIdentical(member, typ.Field(i).Type()) { + return false + } + } + return true + + case opStruct: + typ, ok := typ.(*types.Struct) + if !ok { + return false + } + if !p.matchIdenticalFielder(sub.subs, typ) { + return false + } + return true + + default: + return false + } +} + +type fielder interface { + Field(i int) *types.Var + NumFields() int +} diff --git a/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go new file mode 100644 index 000000000..de3bb04c3 --- /dev/null +++ b/vendor/github.com/quasilyte/go-ruleguard/ruleguard/utils.go @@ -0,0 +1,251 @@ +package ruleguard + +import ( + "go/ast" + "go/constant" + "go/parser" + "go/token" + "go/types" + "regexp/syntax" + "strconv" + "strings" +) + +func regexpHasCaptureGroups(pattern string) bool { + // regexp.Compile() uses syntax.Perl flags, so + // we use the same flags here. + re, err := syntax.Parse(pattern, syntax.Perl) + if err != nil { + return true // true is more conservative than false + } + + found := false + + var walkRegexp func(*syntax.Regexp) + walkRegexp = func(re *syntax.Regexp) { + if found { + return + } + // OpCapture handles both named and unnamed capture groups. + if re.Op == syntax.OpCapture { + found = true + return + } + for _, sub := range re.Sub { + walkRegexp(sub) + } + } + walkRegexp(re) + + return found +} + +func findDependency(pkg *types.Package, path string) *types.Package { + if pkg.Path() == path { + return pkg + } + // It looks like indirect dependencies are always incomplete? + // If it's true, then we don't have to recurse here. + for _, imported := range pkg.Imports() { + if dep := findDependency(imported, path); dep != nil && dep.Complete() { + return dep + } + } + return nil +} + +var typeByName = map[string]types.Type{ + // Predeclared types. + `error`: types.Universe.Lookup("error").Type(), + `bool`: types.Typ[types.Bool], + `int`: types.Typ[types.Int], + `int8`: types.Typ[types.Int8], + `int16`: types.Typ[types.Int16], + `int32`: types.Typ[types.Int32], + `int64`: types.Typ[types.Int64], + `uint`: types.Typ[types.Uint], + `uint8`: types.Typ[types.Uint8], + `uint16`: types.Typ[types.Uint16], + `uint32`: types.Typ[types.Uint32], + `uint64`: types.Typ[types.Uint64], + `uintptr`: types.Typ[types.Uintptr], + `string`: types.Typ[types.String], + `float32`: types.Typ[types.Float32], + `float64`: types.Typ[types.Float64], + `complex64`: types.Typ[types.Complex64], + `complex128`: types.Typ[types.Complex128], + + // Predeclared aliases (provided for convenience). + `byte`: types.Typ[types.Uint8], + `rune`: types.Typ[types.Int32], +} + +func typeFromString(s string) (types.Type, error) { + s = strings.ReplaceAll(s, "?", "__any") + + n, err := parser.ParseExpr(s) + if err != nil { + return nil, err + } + return typeFromNode(n), nil +} + +func typeFromNode(e ast.Expr) types.Type { + switch e := e.(type) { + case *ast.Ident: + typ, ok := typeByName[e.Name] + if ok { + return typ + } + + case *ast.ArrayType: + elem := typeFromNode(e.Elt) + if elem == nil { + return nil + } + if e.Len == nil { + return types.NewSlice(elem) + } + lit, ok := e.Len.(*ast.BasicLit) + if !ok || lit.Kind != token.INT { + return nil + } + length, err := strconv.Atoi(lit.Value) + if err != nil { + return nil + } + return types.NewArray(elem, int64(length)) + + case *ast.MapType: + keyType := typeFromNode(e.Key) + if keyType == nil { + return nil + } + valType := typeFromNode(e.Value) + if valType == nil { + return nil + } + return types.NewMap(keyType, valType) + + case *ast.StarExpr: + typ := typeFromNode(e.X) + if typ != nil { + return types.NewPointer(typ) + } + + case *ast.ParenExpr: + return typeFromNode(e.X) + + case *ast.InterfaceType: + if len(e.Methods.List) == 0 { + return types.NewInterfaceType(nil, nil) + } + } + + return nil +} + +func intValueOf(info *types.Info, expr ast.Expr) constant.Value { + tv := info.Types[expr] + if tv.Value == nil { + return nil + } + if tv.Value.Kind() != constant.Int { + return nil + } + return tv.Value +} + +// isPure reports whether expr is a softly safe expression and contains +// no significant side-effects. As opposed to strictly safe expressions, +// soft safe expressions permit some forms of side-effects, like +// panic possibility during indexing or nil pointer dereference. +// +// Uses types info to determine type conversion expressions that +// are the only permitted kinds of call expressions. +// Note that is does not check whether called function really +// has any side effects. The analysis is very conservative. +func isPure(info *types.Info, expr ast.Expr) bool { + // This list switch is not comprehensive and uses + // whitelist to be on the conservative side. + // Can be extended as needed. + + switch expr := expr.(type) { + case *ast.StarExpr: + return isPure(info, expr.X) + case *ast.BinaryExpr: + return isPure(info, expr.X) && + isPure(info, expr.Y) + case *ast.UnaryExpr: + return expr.Op != token.ARROW && + isPure(info, expr.X) + case *ast.BasicLit, *ast.Ident: + return true + case *ast.IndexExpr: + return isPure(info, expr.X) && + isPure(info, expr.Index) + case *ast.SelectorExpr: + return isPure(info, expr.X) + case *ast.ParenExpr: + return isPure(info, expr.X) + case *ast.CompositeLit: + return isPureList(info, expr.Elts) + case *ast.CallExpr: + return isTypeExpr(info, expr.Fun) && isPureList(info, expr.Args) + + default: + return false + } +} + +// isPureList reports whether every expr in list is safe. +// +// See isPure. +func isPureList(info *types.Info, list []ast.Expr) bool { + for _, expr := range list { + if !isPure(info, expr) { + return false + } + } + return true +} + +func isAddressable(info *types.Info, expr ast.Expr) bool { + tv, ok := info.Types[expr] + return ok && tv.Addressable() +} + +func isConstant(info *types.Info, expr ast.Expr) bool { + tv, ok := info.Types[expr] + return ok && tv.Value != nil +} + +// isTypeExpr reports whether x represents a type expression. +// +// Type expression does not evaluate to any run time value, +// but rather describes a type that is used inside Go expression. +// +// For example, (*T)(v) is a CallExpr that "calls" (*T). +// (*T) is a type expression that tells Go compiler type v should be converted to. +func isTypeExpr(info *types.Info, x ast.Expr) bool { + switch x := x.(type) { + case *ast.StarExpr: + return isTypeExpr(info, x.X) + case *ast.ParenExpr: + return isTypeExpr(info, x.X) + case *ast.SelectorExpr: + return isTypeExpr(info, x.Sel) + + case *ast.Ident: + // Identifier may be a type expression if object + // it reffers to is a type name. + _, ok := info.ObjectOf(x).(*types.TypeName) + return ok + + case *ast.FuncType, *ast.StructType, *ast.InterfaceType, *ast.ArrayType, *ast.MapType, *ast.ChanType: + return true + + default: + return false + } +} diff --git a/vendor/github.com/quasilyte/regex/syntax/LICENSE b/vendor/github.com/quasilyte/regex/syntax/LICENSE new file mode 100644 index 000000000..f0c81282b --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Iskander (Alex) Sharipov / quasilyte + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/quasilyte/regex/syntax/README.md b/vendor/github.com/quasilyte/regex/syntax/README.md new file mode 100644 index 000000000..13064ec39 --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/README.md @@ -0,0 +1,26 @@ +# Package `regex/syntax` + +Package `syntax` provides regular expressions parser as well as AST definitions. + +## Rationale + +There are several problems with the stdlib [regexp/syntax](https://golang.org/pkg/regexp/syntax/) package: + +1. It does several transformations during the parsing that make it + hard to do any kind of syntax analysis afterward. + +2. The AST used there is optimized for the compilation and + execution inside the [regexp](https://golang.org/pkg/regexp) package. + It's somewhat complicated, especially in a way character ranges are encoded. + +3. It only supports [re2](https://github.com/google/re2/wiki/Syntax) syntax. + This parser recognizes most PCRE operations. + +4. It's easier to extend this package than something from the standard library. + +This package does almost no assumptions about how generated AST is going to be used +so it preserves as much syntax information as possible. + +It's easy to write another intermediate representation on top of it. The main +function of this package is to convert a textual regexp pattern into a more +structured form that can be processed more easily. diff --git a/vendor/github.com/quasilyte/regex/syntax/ast.go b/vendor/github.com/quasilyte/regex/syntax/ast.go new file mode 100644 index 000000000..44b7b61bb --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/ast.go @@ -0,0 +1,147 @@ +package syntax + +import ( + "fmt" + "strings" +) + +type Regexp struct { + Pattern string + Expr Expr +} + +type RegexpPCRE struct { + Pattern string + Expr Expr + + Source string + Modifiers string + Delim [2]byte +} + +func (re *RegexpPCRE) HasModifier(mod byte) bool { + return strings.IndexByte(re.Modifiers, mod) >= 0 +} + +type Expr struct { + // The operations that this expression performs. See `operation.go`. + Op Operation + + Form Form + + _ [2]byte // Reserved + + // Pos describes a source location inside regexp pattern. + Pos Position + + // Args is a list of sub-expressions of this expression. + // + // See Operation constants documentation to learn how to + // interpret the particular expression args. + Args []Expr + + // Value holds expression textual value. + // + // Usually, that value is identical to src[Begin():End()], + // but this is not true for programmatically generated objects. + Value string +} + +// Begin returns expression leftmost offset. +func (e Expr) Begin() uint16 { return e.Pos.Begin } + +// End returns expression rightmost offset. +func (e Expr) End() uint16 { return e.Pos.End } + +// LastArg returns expression last argument. +// +// Should not be called on expressions that may have 0 arguments. +func (e Expr) LastArg() Expr { + return e.Args[len(e.Args)-1] +} + +type Operation byte + +type Form byte + +func FormatSyntax(re *Regexp) string { + return formatExprSyntax(re, re.Expr) +} + +func formatExprSyntax(re *Regexp, e Expr) string { + switch e.Op { + case OpChar, OpLiteral: + switch e.Value { + case "{": + return "'{'" + case "}": + return "'}'" + default: + return e.Value + } + case OpString, OpEscapeChar, OpEscapeMeta, OpEscapeOctal, OpEscapeUni, OpEscapeHex, OpPosixClass: + return e.Value + case OpRepeat: + return fmt.Sprintf("(repeat %s %s)", formatExprSyntax(re, e.Args[0]), e.Args[1].Value) + case OpCaret: + return "^" + case OpDollar: + return "$" + case OpDot: + return "." + case OpQuote: + return fmt.Sprintf("(q %s)", e.Value) + case OpCharRange: + return fmt.Sprintf("%s-%s", formatExprSyntax(re, e.Args[0]), formatExprSyntax(re, e.Args[1])) + case OpCharClass: + return fmt.Sprintf("[%s]", formatArgsSyntax(re, e.Args)) + case OpNegCharClass: + return fmt.Sprintf("[^%s]", formatArgsSyntax(re, e.Args)) + case OpConcat: + return fmt.Sprintf("{%s}", formatArgsSyntax(re, e.Args)) + case OpAlt: + return fmt.Sprintf("(or %s)", formatArgsSyntax(re, e.Args)) + case OpCapture: + return fmt.Sprintf("(capture %s)", formatExprSyntax(re, e.Args[0])) + case OpNamedCapture: + return fmt.Sprintf("(capture %s %s)", formatExprSyntax(re, e.Args[0]), e.Args[1].Value) + case OpGroup: + return fmt.Sprintf("(group %s)", formatExprSyntax(re, e.Args[0])) + case OpAtomicGroup: + return fmt.Sprintf("(atomic %s)", formatExprSyntax(re, e.Args[0])) + case OpGroupWithFlags: + return fmt.Sprintf("(group %s ?%s)", formatExprSyntax(re, e.Args[0]), e.Args[1].Value) + case OpFlagOnlyGroup: + return fmt.Sprintf("(flags ?%s)", formatExprSyntax(re, e.Args[0])) + case OpPositiveLookahead: + return fmt.Sprintf("(?= %s)", formatExprSyntax(re, e.Args[0])) + case OpNegativeLookahead: + return fmt.Sprintf("(?! %s)", formatExprSyntax(re, e.Args[0])) + case OpPositiveLookbehind: + return fmt.Sprintf("(?<= %s)", formatExprSyntax(re, e.Args[0])) + case OpNegativeLookbehind: + return fmt.Sprintf("(?", e.Op) + } +} + +func formatArgsSyntax(re *Regexp, args []Expr) string { + parts := make([]string, len(args)) + for i, e := range args { + parts[i] = formatExprSyntax(re, e) + } + return strings.Join(parts, " ") +} diff --git a/vendor/github.com/quasilyte/regex/syntax/errors.go b/vendor/github.com/quasilyte/regex/syntax/errors.go new file mode 100644 index 000000000..cfafc1d0e --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/errors.go @@ -0,0 +1,27 @@ +package syntax + +import ( + "fmt" +) + +type ParseError struct { + Pos Position + Message string +} + +func (e ParseError) Error() string { return e.Message } + +func throwfPos(pos Position, format string, args ...interface{}) { + panic(ParseError{ + Pos: pos, + Message: fmt.Sprintf(format, args...), + }) +} + +func throwErrorf(posBegin, posEnd int, format string, args ...interface{}) { + pos := Position{ + Begin: uint16(posBegin), + End: uint16(posEnd), + } + throwfPos(pos, format, args...) +} diff --git a/vendor/github.com/quasilyte/regex/syntax/go.mod b/vendor/github.com/quasilyte/regex/syntax/go.mod new file mode 100644 index 000000000..2a4e1f33b --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/go.mod @@ -0,0 +1,3 @@ +module github.com/quasilyte/regex/syntax + +go 1.14 diff --git a/vendor/github.com/quasilyte/regex/syntax/lexer.go b/vendor/github.com/quasilyte/regex/syntax/lexer.go new file mode 100644 index 000000000..e92b038c2 --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/lexer.go @@ -0,0 +1,455 @@ +package syntax + +import ( + "strings" + "unicode" + "unicode/utf8" +) + +type token struct { + kind tokenKind + pos Position +} + +func (tok token) String() string { + return tok.kind.String() +} + +type tokenKind byte + +//go:generate stringer -type=tokenKind -trimprefix=tok -linecomment=true +const ( + tokNone tokenKind = iota + + tokChar + tokGroupFlags + tokPosixClass + tokConcat + tokRepeat + tokEscapeChar + tokEscapeMeta + tokEscapeOctal + tokEscapeUni + tokEscapeUniFull + tokEscapeHex + tokEscapeHexFull + tokComment + + tokQ // \Q + tokMinus // - + tokLbracket // [ + tokLbracketCaret // [^ + tokRbracket // ] + tokDollar // $ + tokCaret // ^ + tokQuestion // ? + tokDot // . + tokPlus // + + tokStar // * + tokPipe // | + tokLparen // ( + tokLparenName // (?P + tokLparenNameAngle // (? + tokLparenNameQuote // (?'name' + tokLparenFlags // (?flags + tokLparenAtomic // (?> + tokLparenPositiveLookahead // (?= + tokLparenPositiveLookbehind // (?<= + tokLparenNegativeLookahead // (?! + tokLparenNegativeLookbehind // (? unicode.MaxASCII { + _, size := utf8.DecodeRuneInString(l.input[l.pos:]) + l.pushTok(tokChar, size) + l.maybeInsertConcat() + continue + } + switch ch { + case '\\': + l.scanEscape(false) + case '.': + l.pushTok(tokDot, 1) + case '+': + l.pushTok(tokPlus, 1) + case '*': + l.pushTok(tokStar, 1) + case '^': + l.pushTok(tokCaret, 1) + case '$': + l.pushTok(tokDollar, 1) + case '?': + l.pushTok(tokQuestion, 1) + case ')': + l.pushTok(tokRparen, 1) + case '|': + l.pushTok(tokPipe, 1) + case '[': + if l.byteAt(l.pos+1) == '^' { + l.pushTok(tokLbracketCaret, 2) + } else { + l.pushTok(tokLbracket, 1) + } + l.scanCharClass() + case '(': + if l.byteAt(l.pos+1) == '?' { + switch { + case l.byteAt(l.pos+2) == '>': + l.pushTok(tokLparenAtomic, len("(?>")) + case l.byteAt(l.pos+2) == '=': + l.pushTok(tokLparenPositiveLookahead, len("(?=")) + case l.byteAt(l.pos+2) == '!': + l.pushTok(tokLparenNegativeLookahead, len("(?!")) + case l.byteAt(l.pos+2) == '<' && l.byteAt(l.pos+3) == '=': + l.pushTok(tokLparenPositiveLookbehind, len("(?<=")) + case l.byteAt(l.pos+2) == '<' && l.byteAt(l.pos+3) == '!': + l.pushTok(tokLparenNegativeLookbehind, len("(?= 0 { + l.pushTok(tokRepeat, len("{")+j) + } else { + l.pushTok(tokChar, 1) + } + default: + l.pushTok(tokChar, 1) + } + l.maybeInsertConcat() + } +} + +func (l *lexer) scanCharClass() { + l.maybeInsertConcat() + + // We need to handle first `]` in a special way. See #3. + if l.byteAt(l.pos) == ']' { + l.pushTok(tokChar, 1) + } + + for l.pos < len(l.input) { + ch := l.input[l.pos] + if ch > unicode.MaxASCII { + _, size := utf8.DecodeRuneInString(l.input[l.pos:]) + l.pushTok(tokChar, size) + continue + } + switch ch { + case '\\': + l.scanEscape(true) + case '[': + isPosixClass := false + if l.byteAt(l.pos+1) == ':' { + j := l.stringIndex(l.pos+2, ":]") + if j >= 0 { + isPosixClass = true + l.pushTok(tokPosixClass, j+len("[::]")) + } + } + if !isPosixClass { + l.pushTok(tokChar, 1) + } + case '-': + l.pushTok(tokMinus, 1) + case ']': + l.pushTok(tokRbracket, 1) + return // Stop scanning in the char context + default: + l.pushTok(tokChar, 1) + } + } +} + +func (l *lexer) scanEscape(insideCharClass bool) { + s := l.input + if l.pos+1 >= len(s) { + throwErrorf(l.pos, l.pos+1, `unexpected end of pattern: trailing '\'`) + } + switch { + case s[l.pos+1] == 'p' || s[l.pos+1] == 'P': + if l.pos+2 >= len(s) { + throwErrorf(l.pos, l.pos+2, "unexpected end of pattern: expected uni-class-short or '{'") + } + if s[l.pos+2] == '{' { + j := strings.IndexByte(s[l.pos+2:], '}') + if j < 0 { + throwErrorf(l.pos, l.pos+2, "can't find closing '}'") + } + l.pushTok(tokEscapeUniFull, len(`\p{`)+j) + } else { + l.pushTok(tokEscapeUni, len(`\pL`)) + } + case s[l.pos+1] == 'x': + if l.pos+2 >= len(s) { + throwErrorf(l.pos, l.pos+2, "unexpected end of pattern: expected hex-digit or '{'") + } + if s[l.pos+2] == '{' { + j := strings.IndexByte(s[l.pos+2:], '}') + if j < 0 { + throwErrorf(l.pos, l.pos+2, "can't find closing '}'") + } + l.pushTok(tokEscapeHexFull, len(`\x{`)+j) + } else { + if isHexDigit(l.byteAt(l.pos + 3)) { + l.pushTok(tokEscapeHex, len(`\xFF`)) + } else { + l.pushTok(tokEscapeHex, len(`\xF`)) + } + } + case isOctalDigit(s[l.pos+1]): + digits := 1 + if isOctalDigit(l.byteAt(l.pos + 2)) { + if isOctalDigit(l.byteAt(l.pos + 3)) { + digits = 3 + } else { + digits = 2 + } + } + l.pushTok(tokEscapeOctal, len(`\`)+digits) + case s[l.pos+1] == 'Q': + size := len(s) - l.pos // Until the pattern ends + j := l.stringIndex(l.pos+2, `\E`) + if j >= 0 { + size = j + len(`\Q\E`) + } + l.pushTok(tokQ, size) + + default: + ch := l.byteAt(l.pos + 1) + if ch > unicode.MaxASCII { + _, size := utf8.DecodeRuneInString(l.input[l.pos+1:]) + l.pushTok(tokEscapeChar, len(`\`)+size) + return + } + kind := tokEscapeChar + if insideCharClass { + if charClassMetachar[ch] { + kind = tokEscapeMeta + } + } else { + if reMetachar[ch] { + kind = tokEscapeMeta + } + } + l.pushTok(kind, 2) + } +} + +func (l *lexer) maybeInsertConcat() { + if l.isConcatPos() { + last := len(l.tokens) - 1 + tok := l.tokens[last] + l.tokens[last].kind = tokConcat + l.tokens = append(l.tokens, tok) + } +} + +func (l *lexer) Init(s string) { + l.pos = 0 + l.tokens = l.tokens[:0] + l.input = s + + l.scan() + + l.pos = 0 +} + +func (l *lexer) tryScanGroupName(pos int) bool { + tok := tokLparenName + endCh := byte('>') + offset := 1 + switch l.byteAt(pos) { + case '\'': + endCh = '\'' + tok = tokLparenNameQuote + case '<': + tok = tokLparenNameAngle + case 'P': + offset = 2 + default: + return false + } + if pos+offset >= len(l.input) { + return false + } + end := strings.IndexByte(l.input[pos+offset:], endCh) + if end < 0 { + return false + } + l.pushTok(tok, len("(?")+offset+end+1) + return true +} + +func (l *lexer) tryScanGroupFlags(pos int) bool { + colonPos := strings.IndexByte(l.input[pos:], ':') + parenPos := strings.IndexByte(l.input[pos:], ')') + if parenPos < 0 { + return false + } + end := parenPos + if colonPos >= 0 && colonPos < parenPos { + end = colonPos + len(":") + } + l.pushTok(tokLparenFlags, len("(?")+end) + return true +} + +func (l *lexer) tryScanComment(pos int) bool { + if l.byteAt(pos) != '#' { + return false + } + parenPos := strings.IndexByte(l.input[pos:], ')') + if parenPos < 0 { + return false + } + l.pushTok(tokComment, len("(?")+parenPos+len(")")) + return true +} + +func (l *lexer) repeatWidth(pos int) int { + j := pos + for isDigit(l.byteAt(j)) { + j++ + } + if j == pos { + return -1 + } + if l.byteAt(j) == '}' { + return (j + len("}")) - pos // {min} + } + if l.byteAt(j) != ',' { + return -1 + } + j += len(",") + for isDigit(l.byteAt(j)) { + j++ + } + if l.byteAt(j) == '}' { + return (j + len("}")) - pos // {min,} or {min,max} + } + return -1 +} + +func (l *lexer) stringIndex(offset int, s string) int { + if offset < len(l.input) { + return strings.Index(l.input[offset:], s) + } + return -1 +} + +func (l *lexer) byteAt(pos int) byte { + if pos >= 0 && pos < len(l.input) { + return l.input[pos] + } + return 0 +} + +func (l *lexer) pushTok(kind tokenKind, size int) { + l.tokens = append(l.tokens, token{ + kind: kind, + pos: Position{Begin: uint16(l.pos), End: uint16(l.pos + size)}, + }) + l.pos += size +} + +func (l *lexer) isConcatPos() bool { + if len(l.tokens) < 2 { + return false + } + x := l.tokens[len(l.tokens)-2].kind + if concatTable[x]&concatX != 0 { + return false + } + y := l.tokens[len(l.tokens)-1].kind + return concatTable[y]&concatY == 0 +} + +const ( + concatX byte = 1 << iota + concatY +) + +var concatTable = [256]byte{ + tokPipe: concatX | concatY, + + tokLparen: concatX, + tokLparenFlags: concatX, + tokLparenName: concatX, + tokLparenNameAngle: concatX, + tokLparenNameQuote: concatX, + tokLparenAtomic: concatX, + tokLbracket: concatX, + tokLbracketCaret: concatX, + tokLparenPositiveLookahead: concatX, + tokLparenPositiveLookbehind: concatX, + tokLparenNegativeLookahead: concatX, + tokLparenNegativeLookbehind: concatX, + + tokRparen: concatY, + tokRbracket: concatY, + tokPlus: concatY, + tokStar: concatY, + tokQuestion: concatY, + tokRepeat: concatY, +} diff --git a/vendor/github.com/quasilyte/regex/syntax/operation.go b/vendor/github.com/quasilyte/regex/syntax/operation.go new file mode 100644 index 000000000..284e5dc5b --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/operation.go @@ -0,0 +1,189 @@ +package syntax + +//go:generate stringer -type=Operation -trimprefix=Op +const ( + OpNone Operation = iota + + // OpConcat is a concatenation of ops. + // Examples: `xy` `abc\d` `` + // Args - concatenated ops + // + // As a special case, OpConcat with 0 Args is used for "empty" + // set of operations. + OpConcat + + // OpDot is a '.' wildcard. + OpDot + + // OpAlt is x|y alternation of ops. + // Examples: `a|bc` `x(.*?)|y(.*?)` + // Args - union-connected regexp branches + OpAlt + + // OpStar is a shorthand for {0,} repetition. + // Examples: `x*` + // Args[0] - repeated expression + OpStar + + // OpPlus is a shorthand for {1,} repetition. + // Examples: `x+` + // Args[0] - repeated expression + OpPlus + + // OpQuestion is a shorthand for {0,1} repetition. + // Examples: `x?` + // Args[0] - repeated expression + OpQuestion + + // OpNonGreedy makes its operand quantifier non-greedy. + // Examples: `x??` `x*?` `x+?` + // Args[0] - quantified expression + OpNonGreedy + + // OpPossessive makes its operand quantifier possessive. + // Examples: `x?+` `x*+` `x++` + // Args[0] - quantified expression + OpPossessive + + // OpCaret is ^ anchor. + OpCaret + + // OpDollar is $ anchor. + OpDollar + + // OpLiteral is a collection of consecutive chars. + // Examples: `ab` `10x` + // Args - enclosed characters (OpChar) + OpLiteral + + // OpChar is a single literal pattern character. + // Examples: `a` `6` `ф` + OpChar + + // OpString is an artificial element that is used in other expressions. + OpString + + // OpQuote is a \Q...\E enclosed literal. + // Examples: `\Q.?\E` `\Q?q[]=1` + // + // Note that closing \E is not mandatory. + OpQuote + + // OpEscapeChar is a single char escape. + // Examples: `\d` `\a` `\n` + OpEscapeChar + + // OpEscapeMeta is an escaped meta char. + // Examples: `\(` `\[` `\+` + OpEscapeMeta + + // OpEscapeOctal is an octal char code escape (up to 3 digits). + // Examples: `\123` `\12` + OpEscapeOctal + + // OpEscapeHex is a hex char code escape. + // Examples: `\x7F` `\xF7` + // FormEscapeHexFull examples: `\x{10FFFF}` `\x{F}`. + OpEscapeHex + + // OpEscapeUni is a Unicode char class escape. + // Examples: `\pS` `\pL` `\PL` + // FormEscapeUniFull examples: `\p{Greek}` `\p{Symbol}` `\p{^L}` + OpEscapeUni + + // OpCharClass is a char class enclosed in []. + // Examples: `[abc]` `[a-z0-9\]]` + // Args - char class elements (can include OpCharRange and OpPosixClass). + OpCharClass + + // OpNegCharClass is a negated char class enclosed in []. + // Examples: `[^abc]` `[^a-z0-9\]]` + // Args - char class elements (can include OpCharRange and OpPosixClass). + OpNegCharClass + + // OpCharRange is an inclusive char range inside a char class. + // Examples: `0-9` `A-Z` + // Args[0] - range lower bound (OpChar or OpEscape). + // Args[1] - range upper bound (OpChar or OpEscape). + OpCharRange + + // OpPosixClass is a named ASCII char set inside a char class. + // Examples: `[:alpha:]` `[:blank:]` + OpPosixClass + + // OpRepeat is a {min,max} repetition quantifier. + // Examples: `x{5}` `x{min,max}` `x{min,}` + // Args[0] - repeated expression + // Args[1] - repeat count (OpString) + OpRepeat + + // OpCapture is `(re)` capturing group. + // Examples: `(abc)` `(x|y)` + // Args[0] - enclosed expression + OpCapture + + // OpNamedCapture is `(?Pre)` capturing group. + // Examples: `(?Pabc)` `(?Px|y)` + // FormNamedCaptureAngle examples: `(?abc)` `(?x|y)` + // FormNamedCaptureQuote examples: `(?'foo'abc)` `(?'name'x|y)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + // Args[1] - group name (OpString) + OpNamedCapture + + // OpGroup is `(?:re)` non-capturing group. + // Examples: `(?:abc)` `(?:x|y)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpGroup + + // OpGroupWithFlags is `(?flags:re)` non-capturing group. + // Examples: `(?i:abc)` `(?i:x|y)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + // Args[1] - flags (OpString) + OpGroupWithFlags + + // OpAtomicGroup is `(?>re)` non-capturing group without backtracking. + // Examples: `(?>foo)` `(?>)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpAtomicGroup + + // OpPositiveLookahead is `(?=re)` asserts that following text matches re. + // Examples: `(?=foo)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpPositiveLookahead + + // OpNegativeLookahead is `(?!re)` asserts that following text doesn't match re. + // Examples: `(?!foo)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpNegativeLookahead + + // OpPositiveLookbehind is `(?<=re)` asserts that preceding text matches re. + // Examples: `(?<=foo)` + // Args[0] - enclosed expression (OpConcat with 0 args for empty group) + OpPositiveLookbehind + + // OpNegativeLookbehind is `(?=re)` asserts that preceding text doesn't match re. + // Examples: `(?= Operation(len(_Operation_index)-1) { + return "Operation(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _Operation_name[_Operation_index[i]:_Operation_index[i+1]] +} diff --git a/vendor/github.com/quasilyte/regex/syntax/parser.go b/vendor/github.com/quasilyte/regex/syntax/parser.go new file mode 100644 index 000000000..faf0f8b21 --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/parser.go @@ -0,0 +1,471 @@ +package syntax + +import ( + "errors" + "fmt" + "strings" +) + +type ParserOptions struct { + // NoLiterals disables OpChar merging into OpLiteral. + NoLiterals bool +} + +func NewParser(opts *ParserOptions) *Parser { + return newParser(opts) +} + +type Parser struct { + out Regexp + lexer lexer + exprPool []Expr + + prefixParselets [256]prefixParselet + infixParselets [256]infixParselet + + charClass []Expr + allocated uint + + opts ParserOptions +} + +// ParsePCRE parses PHP-style pattern with delimiters. +// An example of such pattern is `/foo/i`. +func (p *Parser) ParsePCRE(pattern string) (*RegexpPCRE, error) { + pcre, err := p.newPCRE(pattern) + if err != nil { + return nil, err + } + if pcre.HasModifier('x') { + return nil, errors.New("'x' modifier is not supported") + } + re, err := p.Parse(pcre.Pattern) + if re != nil { + pcre.Expr = re.Expr + } + return pcre, err +} + +func (p *Parser) Parse(pattern string) (result *Regexp, err error) { + defer func() { + r := recover() + if r == nil { + return + } + if err2, ok := r.(ParseError); ok { + err = err2 + return + } + panic(r) + }() + + p.lexer.Init(pattern) + p.allocated = 0 + p.out.Pattern = pattern + if pattern == "" { + p.out.Expr = *p.newExpr(OpConcat, Position{}) + } else { + p.out.Expr = *p.parseExpr(0) + } + + if !p.opts.NoLiterals { + p.mergeChars(&p.out.Expr) + } + p.setValues(&p.out.Expr) + + return &p.out, nil +} + +type prefixParselet func(token) *Expr + +type infixParselet func(*Expr, token) *Expr + +func newParser(opts *ParserOptions) *Parser { + var p Parser + + if opts != nil { + p.opts = *opts + } + p.exprPool = make([]Expr, 256) + + for tok, op := range tok2op { + if op != 0 { + p.prefixParselets[tokenKind(tok)] = p.parsePrefixElementary + } + } + + p.prefixParselets[tokEscapeHexFull] = func(tok token) *Expr { + return p.newExprForm(OpEscapeHex, FormEscapeHexFull, tok.pos) + } + p.prefixParselets[tokEscapeUniFull] = func(tok token) *Expr { + return p.newExprForm(OpEscapeUni, FormEscapeUniFull, tok.pos) + } + + p.prefixParselets[tokLparen] = func(tok token) *Expr { return p.parseGroup(OpCapture, tok) } + p.prefixParselets[tokLparenAtomic] = func(tok token) *Expr { return p.parseGroup(OpAtomicGroup, tok) } + p.prefixParselets[tokLparenPositiveLookahead] = func(tok token) *Expr { return p.parseGroup(OpPositiveLookahead, tok) } + p.prefixParselets[tokLparenNegativeLookahead] = func(tok token) *Expr { return p.parseGroup(OpNegativeLookahead, tok) } + p.prefixParselets[tokLparenPositiveLookbehind] = func(tok token) *Expr { return p.parseGroup(OpPositiveLookbehind, tok) } + p.prefixParselets[tokLparenNegativeLookbehind] = func(tok token) *Expr { return p.parseGroup(OpNegativeLookbehind, tok) } + + p.prefixParselets[tokLparenName] = func(tok token) *Expr { + return p.parseNamedCapture(FormDefault, tok) + } + p.prefixParselets[tokLparenNameAngle] = func(tok token) *Expr { + return p.parseNamedCapture(FormNamedCaptureAngle, tok) + } + p.prefixParselets[tokLparenNameQuote] = func(tok token) *Expr { + return p.parseNamedCapture(FormNamedCaptureQuote, tok) + } + + p.prefixParselets[tokLparenFlags] = p.parseGroupWithFlags + + p.prefixParselets[tokPipe] = func(tok token) *Expr { + // We need prefix pipe parselet to handle `(|x)` syntax. + right := p.parseExpr(1) + return p.newExpr(OpAlt, tok.pos, p.newEmpty(tok.pos), right) + } + p.prefixParselets[tokLbracket] = func(tok token) *Expr { + return p.parseCharClass(OpCharClass, tok) + } + p.prefixParselets[tokLbracketCaret] = func(tok token) *Expr { + return p.parseCharClass(OpNegCharClass, tok) + } + + p.infixParselets[tokRepeat] = func(left *Expr, tok token) *Expr { + repeatLit := p.newExpr(OpString, tok.pos) + return p.newExpr(OpRepeat, combinePos(left.Pos, tok.pos), left, repeatLit) + } + p.infixParselets[tokStar] = func(left *Expr, tok token) *Expr { + return p.newExpr(OpStar, combinePos(left.Pos, tok.pos), left) + } + p.infixParselets[tokConcat] = func(left *Expr, tok token) *Expr { + right := p.parseExpr(2) + if left.Op == OpConcat { + left.Args = append(left.Args, *right) + left.Pos.End = right.End() + return left + } + return p.newExpr(OpConcat, combinePos(left.Pos, right.Pos), left, right) + } + p.infixParselets[tokPipe] = p.parseAlt + p.infixParselets[tokMinus] = p.parseMinus + p.infixParselets[tokPlus] = p.parsePlus + p.infixParselets[tokQuestion] = p.parseQuestion + + return &p +} + +func (p *Parser) setValues(e *Expr) { + for i := range e.Args { + p.setValues(&e.Args[i]) + } + e.Value = p.exprValue(e) +} + +func (p *Parser) exprValue(e *Expr) string { + return p.out.Pattern[e.Begin():e.End()] +} + +func (p *Parser) mergeChars(e *Expr) { + for i := range e.Args { + p.mergeChars(&e.Args[i]) + } + if e.Op != OpConcat || len(e.Args) < 2 { + return + } + + args := e.Args[:0] + i := 0 + for i < len(e.Args) { + first := i + chars := 0 + for j := i; j < len(e.Args) && e.Args[j].Op == OpChar; j++ { + chars++ + } + if chars > 1 { + c1 := e.Args[first] + c2 := e.Args[first+chars-1] + lit := p.newExpr(OpLiteral, combinePos(c1.Pos, c2.Pos)) + for j := 0; j < chars; j++ { + lit.Args = append(lit.Args, e.Args[first+j]) + } + args = append(args, *lit) + i += chars + } else { + args = append(args, e.Args[i]) + i++ + } + } + if len(args) == 1 { + *e = args[0] // Turn OpConcat into OpLiteral + } else { + e.Args = args + } +} + +func (p *Parser) newEmpty(pos Position) *Expr { + return p.newExpr(OpConcat, pos) +} + +func (p *Parser) newExprForm(op Operation, form Form, pos Position, args ...*Expr) *Expr { + e := p.newExpr(op, pos, args...) + e.Form = form + return e +} + +func (p *Parser) newExpr(op Operation, pos Position, args ...*Expr) *Expr { + e := p.allocExpr() + *e = Expr{ + Op: op, + Pos: pos, + Args: e.Args[:0], + } + for _, arg := range args { + e.Args = append(e.Args, *arg) + } + return e +} + +func (p *Parser) allocExpr() *Expr { + i := p.allocated + if i < uint(len(p.exprPool)) { + p.allocated++ + return &p.exprPool[i] + } + return &Expr{} +} + +func (p *Parser) expect(kind tokenKind) Position { + tok := p.lexer.NextToken() + if tok.kind != kind { + throwErrorf(int(tok.pos.Begin), int(tok.pos.End), "expected '%s', found '%s'", kind, tok.kind) + } + return tok.pos +} + +func (p *Parser) parseExpr(precedence int) *Expr { + tok := p.lexer.NextToken() + prefix := p.prefixParselets[tok.kind] + if prefix == nil { + throwfPos(tok.pos, "unexpected token: %v", tok) + } + left := prefix(tok) + + for precedence < p.precedenceOf(p.lexer.Peek()) { + tok := p.lexer.NextToken() + infix := p.infixParselets[tok.kind] + left = infix(left, tok) + } + + return left +} + +func (p *Parser) parsePrefixElementary(tok token) *Expr { + return p.newExpr(tok2op[tok.kind], tok.pos) +} + +func (p *Parser) parseCharClass(op Operation, tok token) *Expr { + var endPos Position + p.charClass = p.charClass[:0] + for { + p.charClass = append(p.charClass, *p.parseExpr(0)) + next := p.lexer.Peek() + if next.kind == tokRbracket { + endPos = next.pos + p.lexer.NextToken() + break + } + if next.kind == tokNone { + throwfPos(tok.pos, "unterminated '['") + } + } + + result := p.newExpr(op, combinePos(tok.pos, endPos)) + result.Args = append(result.Args, p.charClass...) + return result +} + +func (p *Parser) parseMinus(left *Expr, tok token) *Expr { + if p.isValidCharRangeOperand(left) { + if p.lexer.Peek().kind != tokRbracket { + right := p.parseExpr(2) + return p.newExpr(OpCharRange, combinePos(left.Pos, right.Pos), left, right) + } + } + p.charClass = append(p.charClass, *left) + return p.newExpr(OpChar, tok.pos) +} + +func (p *Parser) isValidCharRangeOperand(e *Expr) bool { + switch e.Op { + case OpEscapeHex, OpEscapeOctal, OpEscapeMeta, OpChar: + return true + case OpEscapeChar: + switch p.exprValue(e) { + case `\\`, `\|`, `\*`, `\+`, `\?`, `\.`, `\[`, `\^`, `\$`, `\(`, `\)`: + return true + } + } + return false +} + +func (p *Parser) parsePlus(left *Expr, tok token) *Expr { + op := OpPlus + switch left.Op { + case OpPlus, OpStar, OpQuestion, OpRepeat: + op = OpPossessive + } + return p.newExpr(op, combinePos(left.Pos, tok.pos), left) +} + +func (p *Parser) parseQuestion(left *Expr, tok token) *Expr { + op := OpQuestion + switch left.Op { + case OpPlus, OpStar, OpQuestion, OpRepeat: + op = OpNonGreedy + } + return p.newExpr(op, combinePos(left.Pos, tok.pos), left) +} + +func (p *Parser) parseAlt(left *Expr, tok token) *Expr { + var right *Expr + switch p.lexer.Peek().kind { + case tokRparen, tokNone: + // This is needed to handle `(x|)` syntax. + right = p.newEmpty(tok.pos) + default: + right = p.parseExpr(1) + } + if left.Op == OpAlt { + left.Args = append(left.Args, *right) + left.Pos.End = right.End() + return left + } + return p.newExpr(OpAlt, combinePos(left.Pos, right.Pos), left, right) +} + +func (p *Parser) parseGroupItem(tok token) *Expr { + if p.lexer.Peek().kind == tokRparen { + // This is needed to handle `() syntax.` + return p.newEmpty(tok.pos) + } + return p.parseExpr(0) +} + +func (p *Parser) parseGroup(op Operation, tok token) *Expr { + x := p.parseGroupItem(tok) + result := p.newExpr(op, tok.pos, x) + result.Pos.End = p.expect(tokRparen).End + return result +} + +func (p *Parser) parseNamedCapture(form Form, tok token) *Expr { + prefixLen := len("(?<") + if form == FormDefault { + prefixLen = len("(?P<") + } + name := p.newExpr(OpString, Position{ + Begin: tok.pos.Begin + uint16(prefixLen), + End: tok.pos.End - uint16(len(">")), + }) + x := p.parseGroupItem(tok) + result := p.newExprForm(OpNamedCapture, form, tok.pos, x, name) + result.Pos.End = p.expect(tokRparen).End + return result +} + +func (p *Parser) parseGroupWithFlags(tok token) *Expr { + var result *Expr + val := p.out.Pattern[tok.pos.Begin+1 : tok.pos.End] + switch { + case !strings.HasSuffix(val, ":"): + flags := p.newExpr(OpString, Position{ + Begin: tok.pos.Begin + uint16(len("(?")), + End: tok.pos.End, + }) + result = p.newExpr(OpFlagOnlyGroup, tok.pos, flags) + case val == "?:": + x := p.parseGroupItem(tok) + result = p.newExpr(OpGroup, tok.pos, x) + default: + flags := p.newExpr(OpString, Position{ + Begin: tok.pos.Begin + uint16(len("(?")), + End: tok.pos.End - uint16(len(":")), + }) + x := p.parseGroupItem(tok) + result = p.newExpr(OpGroupWithFlags, tok.pos, x, flags) + } + result.Pos.End = p.expect(tokRparen).End + return result +} + +func (p *Parser) precedenceOf(tok token) int { + switch tok.kind { + case tokPipe: + return 1 + case tokConcat, tokMinus: + return 2 + case tokPlus, tokStar, tokQuestion, tokRepeat: + return 3 + default: + return 0 + } +} + +func (p *Parser) newPCRE(source string) (*RegexpPCRE, error) { + if source == "" { + return nil, errors.New("empty pattern: can't find delimiters") + } + + delim := source[0] + endDelim := delim + switch delim { + case '(': + endDelim = ')' + case '{': + endDelim = '}' + case '[': + endDelim = ']' + case '<': + endDelim = '>' + case '\\': + return nil, errors.New("'\\' is not a valid delimiter") + default: + if isSpace(delim) { + return nil, errors.New("whitespace is not a valid delimiter") + } + if isAlphanumeric(delim) { + return nil, fmt.Errorf("'%c' is not a valid delimiter", delim) + } + } + + j := strings.LastIndexByte(source, endDelim) + if j == -1 { + return nil, fmt.Errorf("can't find '%c' ending delimiter", endDelim) + } + + pcre := &RegexpPCRE{ + Pattern: source[1:j], + Source: source, + Delim: [2]byte{delim, endDelim}, + Modifiers: source[j+1:], + } + return pcre, nil +} + +var tok2op = [256]Operation{ + tokDollar: OpDollar, + tokCaret: OpCaret, + tokDot: OpDot, + tokChar: OpChar, + tokMinus: OpChar, + tokEscapeChar: OpEscapeChar, + tokEscapeMeta: OpEscapeMeta, + tokEscapeHex: OpEscapeHex, + tokEscapeOctal: OpEscapeOctal, + tokEscapeUni: OpEscapeUni, + tokPosixClass: OpPosixClass, + tokQ: OpQuote, + tokComment: OpComment, +} diff --git a/vendor/github.com/quasilyte/regex/syntax/pos.go b/vendor/github.com/quasilyte/regex/syntax/pos.go new file mode 100644 index 000000000..51bdbf87a --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/pos.go @@ -0,0 +1,10 @@ +package syntax + +type Position struct { + Begin uint16 + End uint16 +} + +func combinePos(begin, end Position) Position { + return Position{Begin: begin.Begin, End: end.End} +} diff --git a/vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go b/vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go new file mode 100644 index 000000000..8800436bc --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/tokenkind_string.go @@ -0,0 +1,59 @@ +// Code generated by "stringer -type=tokenKind -trimprefix=tok -linecomment=true"; DO NOT EDIT. + +package syntax + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[tokNone-0] + _ = x[tokChar-1] + _ = x[tokGroupFlags-2] + _ = x[tokPosixClass-3] + _ = x[tokConcat-4] + _ = x[tokRepeat-5] + _ = x[tokEscapeChar-6] + _ = x[tokEscapeMeta-7] + _ = x[tokEscapeOctal-8] + _ = x[tokEscapeUni-9] + _ = x[tokEscapeUniFull-10] + _ = x[tokEscapeHex-11] + _ = x[tokEscapeHexFull-12] + _ = x[tokComment-13] + _ = x[tokQ-14] + _ = x[tokMinus-15] + _ = x[tokLbracket-16] + _ = x[tokLbracketCaret-17] + _ = x[tokRbracket-18] + _ = x[tokDollar-19] + _ = x[tokCaret-20] + _ = x[tokQuestion-21] + _ = x[tokDot-22] + _ = x[tokPlus-23] + _ = x[tokStar-24] + _ = x[tokPipe-25] + _ = x[tokLparen-26] + _ = x[tokLparenName-27] + _ = x[tokLparenNameAngle-28] + _ = x[tokLparenNameQuote-29] + _ = x[tokLparenFlags-30] + _ = x[tokLparenAtomic-31] + _ = x[tokLparenPositiveLookahead-32] + _ = x[tokLparenPositiveLookbehind-33] + _ = x[tokLparenNegativeLookahead-34] + _ = x[tokLparenNegativeLookbehind-35] + _ = x[tokRparen-36] +} + +const _tokenKind_name = "NoneCharGroupFlagsPosixClassConcatRepeatEscapeCharEscapeMetaEscapeOctalEscapeUniEscapeUniFullEscapeHexEscapeHexFullComment\\Q-[[^]$^?.+*|((?P(?(?'name'(?flags(?>(?=(?<=(?!(?= tokenKind(len(_tokenKind_index)-1) { + return "tokenKind(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _tokenKind_name[_tokenKind_index[i]:_tokenKind_index[i+1]] +} diff --git a/vendor/github.com/quasilyte/regex/syntax/utils.go b/vendor/github.com/quasilyte/regex/syntax/utils.go new file mode 100644 index 000000000..934680c8b --- /dev/null +++ b/vendor/github.com/quasilyte/regex/syntax/utils.go @@ -0,0 +1,30 @@ +package syntax + +func isSpace(ch byte) bool { + switch ch { + case '\r', '\n', '\t', '\f', '\v': + return true + default: + return false + } +} + +func isAlphanumeric(ch byte) bool { + return (ch >= 'a' && ch <= 'z') || + (ch >= 'A' && ch <= 'Z') || + (ch >= '0' && ch <= '9') +} + +func isDigit(ch byte) bool { + return ch >= '0' && ch <= '9' +} + +func isOctalDigit(ch byte) bool { + return ch >= '0' && ch <= '7' +} + +func isHexDigit(ch byte) bool { + return (ch >= '0' && ch <= '9') || + (ch >= 'a' && ch <= 'f') || + (ch >= 'A' && ch <= 'F') +} diff --git a/vendor/github.com/ryancurrah/gomodguard/.dockerignore b/vendor/github.com/ryancurrah/gomodguard/.dockerignore new file mode 100644 index 000000000..77738287f --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/.dockerignore @@ -0,0 +1 @@ +dist/ \ No newline at end of file diff --git a/vendor/github.com/ryancurrah/gomodguard/.gitignore b/vendor/github.com/ryancurrah/gomodguard/.gitignore new file mode 100644 index 000000000..4ebc79c5d --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/.gitignore @@ -0,0 +1,25 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +/gomodguard + +*.xml + +dist/ + +coverage.* + +.idea/ diff --git a/vendor/github.com/ryancurrah/gomodguard/.golangci.yml b/vendor/github.com/ryancurrah/gomodguard/.golangci.yml new file mode 100644 index 000000000..fc05c0e3d --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/.golangci.yml @@ -0,0 +1,132 @@ +# See https://golangci-lint.run/usage/configuration/ + +linters-settings: + revive: + # see https://github.com/mgechev/revive#available-rules for details. + ignore-generated-header: true + severity: warning + rules: + - name: atomic + # - name: bare-return + - name: blank-imports + - name: bool-literal-in-expr + - name: call-to-gc + - name: confusing-naming + - name: confusing-results + - name: constant-logical-expr + - name: context-as-argument + - name: context-keys-type + - name: deep-exit + - name: defer + - name: dot-imports + - name: duplicated-imports + - name: early-return + - name: empty-block + - name: empty-lines + - name: error-naming + - name: error-return + - name: error-strings + - name: errorf + - name: exported + # - name: flag-parameter + - name: get-return + - name: identical-branches + - name: if-return + - name: import-shadowing + - name: increment-decrement + - name: indent-error-flow + - name: modifies-parameter + - name: modifies-value-receiver + - name: package-comments + - name: range + - name: range-val-address + - name: range-val-in-closure + - name: receiver-naming + - name: redefines-builtin-id + - name: string-of-int + - name: struct-tag + - name: superfluous-else + - name: time-naming + - name: unconditional-recursion + - name: unexported-naming + - name: unexported-return + # - name: unhandled-error + - name: unnecessary-stmt + - name: unreachable-code + - name: unused-parameter + # - name: unused-receiver + - name: var-declaration + - name: var-naming + - name: waitgroup-by-value + +linters: + disable-all: true + enable: + - asciicheck + - bodyclose + # - cyclop + - deadcode + - dogsled + - dupl + - durationcheck + - errcheck + - errorlint + - exhaustive + # - exhaustivestruct + - exportloopref + # - forbidigo + - forcetypeassert + - funlen + # - gci + # - gochecknoglobals + - gochecknoinits + - gocognit + - goconst + - gocritic + - gocyclo + - godot + - godox + - goerr113 + - goimports + - gomnd + - gomoddirectives + - gomodguard + - goprintffuncname + - gosec + - gosimple + - govet + # - ifshort + - importas + - ineffassign + - lll + - makezero + - misspell + - nakedret + - nestif + - nilerr + # - nlreturn + - noctx + - nolintlint + # - paralleltest + - prealloc + - predeclared + # - promlinter + - revive + - rowserrcheck + - sqlclosecheck + - staticcheck + - structcheck + - stylecheck + # - tagliatelle + - testpackage + - thelper + - tparallel + - typecheck + - unconvert + - unparam + - unused + - varcheck + # - wastedassign + - whitespace + # - wrapcheck + - wsl diff --git a/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml b/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml new file mode 100644 index 000000000..3daecfd79 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/.goreleaser.yml @@ -0,0 +1,29 @@ +builds: +- main: ./cmd/gomodguard/main.go + env: + - CGO_ENABLED=0 +archives: +- replacements: + darwin: Darwin + linux: Linux + windows: Windows + 386: i386 + amd64: x86_64 +checksum: + name_template: 'checksums.txt' +dockers: +- goos: linux + goarch: amd64 + image_templates: + - "ryancurrah/gomodguard:latest" + - "ryancurrah/gomodguard:{{.Tag}}" + skip_push: false + dockerfile: Dockerfile.goreleaser + build_flag_templates: + - "--pull" + - "--build-arg=gomodguard_VERSION={{.Version}}" + - "--label=org.opencontainers.image.created={{.Date}}" + - "--label=org.opencontainers.image.name={{.ProjectName}}" + - "--label=org.opencontainers.image.revision={{.FullCommit}}" + - "--label=org.opencontainers.image.version={{.Version}}" + - "--label=org.opencontainers.image.source={{.GitURL}}" diff --git a/vendor/github.com/ryancurrah/gomodguard/Dockerfile b/vendor/github.com/ryancurrah/gomodguard/Dockerfile new file mode 100644 index 000000000..719a0ebdb --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/Dockerfile @@ -0,0 +1,17 @@ +ARG GO_VERSION=1.14.2 +ARG ALPINE_VERSION=3.11 +ARG gomodguard_VERSION= + +# ---- Build container +FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} AS builder +WORKDIR /gomodguard +COPY . . +RUN apk add --no-cache git +RUN go build -o gomodguard cmd/gomodguard/main.go + +# ---- App container +FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} +WORKDIR / +RUN apk --no-cache add ca-certificates +COPY --from=builder gomodguard/gomodguard / +ENTRYPOINT ./gomodguard diff --git a/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser b/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser new file mode 100644 index 000000000..57a042a67 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/Dockerfile.goreleaser @@ -0,0 +1,10 @@ +ARG GO_VERSION=1.14.2 +ARG ALPINE_VERSION=3.11 +ARG gomodguard_VERSION= + +# ---- App container +FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} +WORKDIR / +RUN apk --no-cache add ca-certificates +COPY gomodguard /gomodguard +ENTRYPOINT ./gomodguard diff --git a/vendor/github.com/ryancurrah/gomodguard/LICENSE b/vendor/github.com/ryancurrah/gomodguard/LICENSE new file mode 100644 index 000000000..acd8a81e1 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Ryan Currah + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/ryancurrah/gomodguard/Makefile b/vendor/github.com/ryancurrah/gomodguard/Makefile new file mode 100644 index 000000000..a1962834c --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/Makefile @@ -0,0 +1,42 @@ +current_dir = $(shell pwd) + +.PHONY: lint +lint: + golangci-lint run ./... + +.PHONY: build +build: + go build -o gomodguard cmd/gomodguard/main.go + +.PHONY: run +run: build + ./gomodguard + +.PHONY: test +test: + go test -v -coverprofile coverage.out + +.PHONY: cover +cover: + gocover-cobertura < coverage.out > coverage.xml + +.PHONY: dockerrun +dockerrun: dockerbuild + docker run -v "${current_dir}/.gomodguard.yaml:/.gomodguard.yaml" ryancurrah/gomodguard:latest + +.PHONY: release +release: + goreleaser --rm-dist + +.PHONY: clean +clean: + rm -rf dist/ + rm -f gomodguard coverage.xml coverage.out + +.PHONY: install-tools-mac +install-tools-mac: + brew install goreleaser/tap/goreleaser + +.PHONY: install-go-tools +install-go-tools: + go get github.com/t-yuki/gocover-cobertura diff --git a/vendor/github.com/ryancurrah/gomodguard/README.md b/vendor/github.com/ryancurrah/gomodguard/README.md new file mode 100644 index 000000000..8e2e41688 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/README.md @@ -0,0 +1,131 @@ +# gomodguard +[![License](https://img.shields.io/github/license/ryancurrah/gomodguard?style=flat-square)](/LICENSE) +[![Codecov](https://img.shields.io/codecov/c/gh/ryancurrah/gomodguard?style=flat-square)](https://codecov.io/gh/ryancurrah/gomodguard) +[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/ryancurrah/gomodguard/Go?logo=Go&style=flat-square)](https://github.com/ryancurrah/gomodguard/actions?query=workflow%3AGo) +[![GitHub release (latest SemVer)](https://img.shields.io/github/v/release/ryancurrah/gomodguard?style=flat-square)](https://github.com/ryancurrah/gomodguard/releases/latest) +[![Docker](https://img.shields.io/docker/pulls/ryancurrah/gomodguard?style=flat-square)](https://hub.docker.com/r/ryancurrah/gomodguard) +[![Github Releases Stats of golangci-lint](https://img.shields.io/github/downloads/ryancurrah/gomodguard/total.svg?logo=github&style=flat-square)](https://somsubhra.com/github-release-stats/?username=ryancurrah&repository=gomodguard) + + + +Allow and block list linter for direct Go module dependencies. This is useful for organizations where they want to standardize on the modules used and be able to recommend alternative modules. + +## Description + +Allowed and blocked modules are defined in a `./.gomodguard.yaml` or `~/.gomodguard.yaml` file. + +Modules can be allowed by module or domain name. When allowed modules are specified any modules not in the allowed configuration are blocked. + +If no allowed modules or domains are specified then all modules are allowed except for blocked ones. + +The linter looks for blocked modules in `go.mod` and searches for imported packages where the imported packages module is blocked. Indirect modules are not considered. + +Alternative modules can be optionally recommended in the blocked modules list. + +If the linted module imports a blocked module but the linted module is in the recommended modules list the blocked module is ignored. Usually, this means the linted module wraps that blocked module for use by other modules, therefore the import of the blocked module should not be blocked. + +Version constraints can be specified for modules as well which lets you block new or old versions of modules or specific versions. + +Results are printed to `stdout`. + +Logging statements are printed to `stderr`. + +Results can be exported to different report formats. Which can be imported into CI tools. See the help section for more information. + +## Configuration + +```yaml +allowed: + modules: # List of allowed modules + - gopkg.in/yaml.v2 + - github.com/go-xmlfmt/xmlfmt + - github.com/phayes/checkstyle + - github.com/mitchellh/go-homedir + domains: # List of allowed module domains + - golang.org + +blocked: + modules: # List of blocked modules + - github.com/uudashr/go-module: # Blocked module + recommendations: # Recommended modules that should be used instead (Optional) + - golang.org/x/mod + reason: "`mod` is the official go.mod parser library." # Reason why the recommended module should be used (Optional) + versions: # List of blocked module version constraints. + - github.com/mitchellh/go-homedir: # Blocked module with version constraint. + version: "<= 1.1.0" # Version constraint, see https://github.com/Masterminds/semver#basic-comparisons. + reason: "testing if blocked version constraint works." # Reason why the version constraint exists. +``` + +## Usage + +``` +╰─ ./gomodguard -h +Usage: gomodguard [files...] +Also supports package syntax but will use it in relative path, i.e. ./pkg/... +Flags: + -f string + Report results to the specified file. A report type must also be specified + -file string + + -h Show this help text + -help + + -i int + Exit code when issues were found (default 2) + -issues-exit-code int + (default 2) + + -n Don't lint test files + -no-test + + -r string + Report results to one of the following formats: checkstyle. A report file destination must also be specified + -report string +``` + +## Example + +``` +╰─ ./gomodguard -r checkstyle -f gomodguard-checkstyle.xml ./... + +info: allowed modules, [gopkg.in/yaml.v2 github.com/go-xmlfmt/xmlfmt github.com/phayes/checkstyle github.com/mitchellh/go-homedir] +info: allowed module domains, [golang.org] +info: blocked modules, [github.com/uudashr/go-module] +info: found `2` blocked modules in the go.mod file, [github.com/gofrs/uuid github.com/uudashr/go-module] +blocked_example.go:6: import of package `github.com/gofrs/uuid` is blocked because the module is not in the allowed modules list. +blocked_example.go:7: import of package `github.com/uudashr/go-module` is blocked because the module is in the blocked modules list. `golang.org/x/mod` is a recommended module. `mod` is the official go.mod parser library. +``` + +Resulting checkstyle file + +``` +╰─ cat gomodguard-checkstyle.xml + + + + + + + + + + +``` + +## Install + +``` +go get -u github.com/ryancurrah/gomodguard/cmd/gomodguard +``` + +## Develop + +``` +git clone https://github.com/ryancurrah/gomodguard.git && cd gomodguard + +go build -o gomodguard cmd/gomodguard/main.go +``` + +## License + +**MIT** diff --git a/vendor/github.com/ryancurrah/gomodguard/cmd.go b/vendor/github.com/ryancurrah/gomodguard/cmd.go new file mode 100644 index 000000000..a26fac890 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/cmd.go @@ -0,0 +1,247 @@ +package gomodguard + +import ( + "flag" + "fmt" + "io/ioutil" + "log" + "os" + "path/filepath" + "strings" + + "github.com/go-xmlfmt/xmlfmt" + "github.com/mitchellh/go-homedir" + "github.com/phayes/checkstyle" + "gopkg.in/yaml.v2" +) + +const ( + errFindingHomedir = "unable to find home directory, %w" + errReadingConfigFile = "could not read config file: %w" + errParsingConfigFile = "could not parse config file: %w" +) + +var ( + configFile = ".gomodguard.yaml" + logger = log.New(os.Stderr, "", 0) + errFindingConfigFile = fmt.Errorf("could not find config file") +) + +// Run the gomodguard linter. Returns the exit code to use. +//nolint:funlen +func Run() int { + var ( + args []string + help bool + noTest bool + report string + reportFile string + issuesExitCode int + cwd, _ = os.Getwd() + ) + + flag.BoolVar(&help, "h", false, "Show this help text") + flag.BoolVar(&help, "help", false, "") + flag.BoolVar(&noTest, "n", false, "Don't lint test files") + flag.BoolVar(&noTest, "no-test", false, "") + flag.StringVar(&report, "r", "", "Report results to one of the following formats: checkstyle. "+ + "A report file destination must also be specified") + flag.StringVar(&report, "report", "", "") + flag.StringVar(&reportFile, "f", "", "Report results to the specified file. A report type must also be specified") + flag.StringVar(&reportFile, "file", "", "") + flag.IntVar(&issuesExitCode, "i", 2, "Exit code when issues were found") + flag.IntVar(&issuesExitCode, "issues-exit-code", 2, "") + flag.Parse() + + report = strings.TrimSpace(strings.ToLower(report)) + + if help { + showHelp() + return 0 + } + + if report != "" && report != "checkstyle" { + logger.Fatalf("error: invalid report type '%s'", report) + } + + if report != "" && reportFile == "" { + logger.Fatalf("error: a report file must be specified when a report is enabled") + } + + if report == "" && reportFile != "" { + logger.Fatalf("error: a report type must be specified when a report file is enabled") + } + + args = flag.Args() + if len(args) == 0 { + args = []string{"./..."} + } + + config, err := GetConfig(configFile) + if err != nil { + logger.Fatalf("error: %s", err) + } + + filteredFiles := GetFilteredFiles(cwd, noTest, args) + + processor, err := NewProcessor(config) + if err != nil { + logger.Fatalf("error: %s", err) + } + + logger.Printf("info: allowed modules, %+v", config.Allowed.Modules) + logger.Printf("info: allowed module domains, %+v", config.Allowed.Domains) + logger.Printf("info: blocked modules, %+v", config.Blocked.Modules.Get()) + logger.Printf("info: blocked modules with version constraints, %+v", config.Blocked.Versions.Get()) + + results := processor.ProcessFiles(filteredFiles) + + if report == "checkstyle" { + err := WriteCheckstyle(reportFile, results) + if err != nil { + logger.Fatalf("error: %s", err) + } + } + + for _, r := range results { + fmt.Println(r.String()) + } + + if len(results) > 0 { + return issuesExitCode + } + + return 0 +} + +// GetConfig from YAML file. +func GetConfig(configFile string) (*Configuration, error) { + config := Configuration{} + + home, err := homedir.Dir() + if err != nil { + return nil, fmt.Errorf(errFindingHomedir, err) + } + + cfgFile := "" + homeDirCfgFile := filepath.Join(home, configFile) + + switch { + case fileExists(configFile): + cfgFile = configFile + case fileExists(homeDirCfgFile): + cfgFile = homeDirCfgFile + default: + return nil, fmt.Errorf("%w: %s %s", errFindingConfigFile, configFile, homeDirCfgFile) + } + + data, err := ioutil.ReadFile(cfgFile) + if err != nil { + return nil, fmt.Errorf(errReadingConfigFile, err) + } + + err = yaml.Unmarshal(data, &config) + if err != nil { + return nil, fmt.Errorf(errParsingConfigFile, err) + } + + return &config, nil +} + +// GetFilteredFiles returns files based on search string arguments and filters. +func GetFilteredFiles(cwd string, skipTests bool, args []string) []string { + var ( + foundFiles = []string{} + filteredFiles = []string{} + ) + + for _, f := range args { + if strings.HasSuffix(f, "/...") { + dir, _ := filepath.Split(f) + + foundFiles = append(foundFiles, expandGoWildcard(dir)...) + + continue + } + + if _, err := os.Stat(f); err == nil { + foundFiles = append(foundFiles, f) + } + } + + // Use relative path to print shorter names, sort out test foundFiles if chosen. + for _, f := range foundFiles { + if skipTests { + if strings.HasSuffix(f, "_test.go") { + continue + } + } + + if relativePath, err := filepath.Rel(cwd, f); err == nil { + filteredFiles = append(filteredFiles, relativePath) + + continue + } + + filteredFiles = append(filteredFiles, f) + } + + return filteredFiles +} + +// showHelp text for command line. +func showHelp() { + helpText := `Usage: gomodguard [files...] +Also supports package syntax but will use it in relative path, i.e. ./pkg/... +Flags:` + fmt.Println(helpText) + flag.PrintDefaults() +} + +// WriteCheckstyle takes the results and writes them to a checkstyle formated file. +func WriteCheckstyle(checkstyleFilePath string, results []Issue) error { + check := checkstyle.New() + + for i := range results { + file := check.EnsureFile(results[i].FileName) + file.AddError(checkstyle.NewError(results[i].LineNumber, 1, checkstyle.SeverityError, results[i].Reason, + "gomodguard")) + } + + checkstyleXML := fmt.Sprintf("\n%s", check.String()) + + err := ioutil.WriteFile(checkstyleFilePath, []byte(xmlfmt.FormatXML(checkstyleXML, "", " ")), 0644) // nolint:gosec + if err != nil { + return err + } + + return nil +} + +// fileExists returns true if the file path provided exists. +func fileExists(filename string) bool { + info, err := os.Stat(filename) + if os.IsNotExist(err) { + return false + } + + return !info.IsDir() +} + +// expandGoWildcard path provided. +func expandGoWildcard(root string) []string { + foundFiles := []string{} + + _ = filepath.Walk(root, func(path string, info os.FileInfo, err error) error { + // Only append go foundFiles. + if !strings.HasSuffix(info.Name(), ".go") { + return nil + } + + foundFiles = append(foundFiles, path) + + return nil + }) + + return foundFiles +} diff --git a/vendor/github.com/ryancurrah/gomodguard/go.mod b/vendor/github.com/ryancurrah/gomodguard/go.mod new file mode 100644 index 000000000..3a19311ba --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/go.mod @@ -0,0 +1,12 @@ +module github.com/ryancurrah/gomodguard + +go 1.16 + +require ( + github.com/Masterminds/semver v1.5.0 + github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b + github.com/mitchellh/go-homedir v1.1.0 + github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d + golang.org/x/mod v0.4.2 + gopkg.in/yaml.v2 v2.4.0 +) diff --git a/vendor/github.com/ryancurrah/gomodguard/go.sum b/vendor/github.com/ryancurrah/gomodguard/go.sum new file mode 100644 index 000000000..30447d902 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/go.sum @@ -0,0 +1,26 @@ +github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo= +github.com/go-xmlfmt/xmlfmt v0.0.0-20191208150333-d5b6f63a941b/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA= +github.com/phayes/checkstyle v0.0.0-20170904204023-bfd46e6a821d/go.mod h1:3OzsM7FXDQlpCiw2j81fOmAwQLnZnLGXVKUzeKQXIAw= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 h1:/atklqdjdhuosWIl6AIbOeHJjicWYPqR9bpxqxYG2pA= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= diff --git a/vendor/github.com/ryancurrah/gomodguard/gomodguard.go b/vendor/github.com/ryancurrah/gomodguard/gomodguard.go new file mode 100644 index 000000000..7ef462363 --- /dev/null +++ b/vendor/github.com/ryancurrah/gomodguard/gomodguard.go @@ -0,0 +1,486 @@ +package gomodguard + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "go/parser" + "go/token" + "io/ioutil" + "os" + "os/exec" + "strings" + + "github.com/Masterminds/semver" + + "golang.org/x/mod/modfile" +) + +const ( + goModFilename = "go.mod" + errReadingGoModFile = "unable to read module file %s: %w" + errParsingGoModFile = "unable to parse module file %s: %w" +) + +var ( + blockReasonNotInAllowedList = "import of package `%s` is blocked because the module is not in the " + + "allowed modules list." + blockReasonInBlockedList = "import of package `%s` is blocked because the module is in the " + + "blocked modules list." + blockReasonHasLocalReplaceDirective = "import of package `%s` is blocked because the module has a " + + "local replace directive." +) + +// BlockedVersion has a version constraint a reason why the the module version is blocked. +type BlockedVersion struct { + Version string `yaml:"version"` + Reason string `yaml:"reason"` +} + +// IsLintedModuleVersionBlocked returns true if a version constraint is specified and the +// linted module version matches the constraint. +func (r *BlockedVersion) IsLintedModuleVersionBlocked(lintedModuleVersion string) bool { + if r.Version == "" { + return false + } + + constraint, err := semver.NewConstraint(r.Version) + if err != nil { + return false + } + + version, err := semver.NewVersion(lintedModuleVersion) + if err != nil { + return false + } + + meet := constraint.Check(version) + + return meet +} + +// Message returns the reason why the module version is blocked. +func (r *BlockedVersion) Message(lintedModuleVersion string) string { + var sb strings.Builder + + // Add version contraint to message. + _, _ = fmt.Fprintf(&sb, "version `%s` is blocked because it does not meet the version constraint `%s`.", + lintedModuleVersion, r.Version) + + if r.Reason == "" { + return sb.String() + } + + // Add reason to message. + _, _ = fmt.Fprintf(&sb, " %s.", strings.TrimRight(r.Reason, ".")) + + return sb.String() +} + +// BlockedModule has alternative modules to use and a reason why the module is blocked. +type BlockedModule struct { + Recommendations []string `yaml:"recommendations"` + Reason string `yaml:"reason"` +} + +// IsCurrentModuleARecommendation returns true if the current module is in the Recommendations list. +// +// If the current go.mod file being linted is a recommended module of a +// blocked module and it imports that blocked module, do not set as blocked. +// This could mean that the linted module is a wrapper for that blocked module. +func (r *BlockedModule) IsCurrentModuleARecommendation(currentModuleName string) bool { + if r == nil { + return false + } + + for n := range r.Recommendations { + if strings.TrimSpace(currentModuleName) == strings.TrimSpace(r.Recommendations[n]) { + return true + } + } + + return false +} + +// Message returns the reason why the module is blocked and a list of recommended modules if provided. +func (r *BlockedModule) Message() string { + var sb strings.Builder + + // Add recommendations to message + for i := range r.Recommendations { + switch { + case len(r.Recommendations) == 1: + _, _ = fmt.Fprintf(&sb, "`%s` is a recommended module.", r.Recommendations[i]) + case (i+1) != len(r.Recommendations) && (i+1) == (len(r.Recommendations)-1): + _, _ = fmt.Fprintf(&sb, "`%s` ", r.Recommendations[i]) + case (i + 1) != len(r.Recommendations): + _, _ = fmt.Fprintf(&sb, "`%s`, ", r.Recommendations[i]) + default: + _, _ = fmt.Fprintf(&sb, "and `%s` are recommended modules.", r.Recommendations[i]) + } + } + + if r.Reason == "" { + return sb.String() + } + + // Add reason to message + if sb.Len() == 0 { + _, _ = fmt.Fprintf(&sb, "%s.", strings.TrimRight(r.Reason, ".")) + } else { + _, _ = fmt.Fprintf(&sb, " %s.", strings.TrimRight(r.Reason, ".")) + } + + return sb.String() +} + +// HasRecommendations returns true if the blocked package has +// recommended modules. +func (r *BlockedModule) HasRecommendations() bool { + if r == nil { + return false + } + + return len(r.Recommendations) > 0 +} + +// BlockedVersions a list of blocked modules by a version constraint. +type BlockedVersions []map[string]BlockedVersion + +// Get returns the module names that are blocked. +func (b BlockedVersions) Get() []string { + modules := make([]string, len(b)) + + for n := range b { + for module := range b[n] { + modules[n] = module + break + } + } + + return modules +} + +// GetBlockReason returns a block version if one is set for the provided linted module name. +func (b BlockedVersions) GetBlockReason(lintedModuleName string) *BlockedVersion { + for _, blockedModule := range b { + for blockedModuleName, blockedVersion := range blockedModule { + if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { + return &blockedVersion + } + } + } + + return nil +} + +// BlockedModules a list of blocked modules. +type BlockedModules []map[string]BlockedModule + +// Get returns the module names that are blocked. +func (b BlockedModules) Get() []string { + modules := make([]string, len(b)) + + for n := range b { + for module := range b[n] { + modules[n] = module + break + } + } + + return modules +} + +// GetBlockReason returns a block module if one is set for the provided linted module name. +func (b BlockedModules) GetBlockReason(lintedModuleName string) *BlockedModule { + for _, blockedModule := range b { + for blockedModuleName, blockedModule := range blockedModule { + if strings.TrimSpace(lintedModuleName) == strings.TrimSpace(blockedModuleName) { + return &blockedModule + } + } + } + + return nil +} + +// Allowed is a list of modules and module +// domains that are allowed to be used. +type Allowed struct { + Modules []string `yaml:"modules"` + Domains []string `yaml:"domains"` +} + +// IsAllowedModule returns true if the given module +// name is in the allowed modules list. +func (a *Allowed) IsAllowedModule(moduleName string) bool { + allowedModules := a.Modules + + for i := range allowedModules { + if strings.TrimSpace(moduleName) == strings.TrimSpace(allowedModules[i]) { + return true + } + } + + return false +} + +// IsAllowedModuleDomain returns true if the given modules domain is +// in the allowed module domains list. +func (a *Allowed) IsAllowedModuleDomain(moduleName string) bool { + allowedDomains := a.Domains + + for i := range allowedDomains { + if strings.HasPrefix(strings.TrimSpace(strings.ToLower(moduleName)), + strings.TrimSpace(strings.ToLower(allowedDomains[i]))) { + return true + } + } + + return false +} + +// Blocked is a list of modules that are +// blocked and not to be used. +type Blocked struct { + Modules BlockedModules `yaml:"modules"` + Versions BlockedVersions `yaml:"versions"` + LocalReplaceDirectives bool `yaml:"local_replace_directives"` +} + +// Configuration of gomodguard allow and block lists. +type Configuration struct { + Allowed Allowed `yaml:"allowed"` + Blocked Blocked `yaml:"blocked"` +} + +// Issue represents the result of one error. +type Issue struct { + FileName string + LineNumber int + Position token.Position + Reason string +} + +// String returns the filename, line +// number and reason of a Issue. +func (r *Issue) String() string { + return fmt.Sprintf("%s:%d:1 %s", r.FileName, r.LineNumber, r.Reason) +} + +// Processor processes Go files. +type Processor struct { + Config *Configuration + Modfile *modfile.File + blockedModulesFromModFile map[string][]string +} + +// NewProcessor will create a Processor to lint blocked packages. +func NewProcessor(config *Configuration) (*Processor, error) { + goModFileBytes, err := loadGoModFile() + if err != nil { + return nil, fmt.Errorf(errReadingGoModFile, goModFilename, err) + } + + modFile, err := modfile.Parse(goModFilename, goModFileBytes, nil) + if err != nil { + return nil, fmt.Errorf(errParsingGoModFile, goModFilename, err) + } + + p := &Processor{ + Config: config, + Modfile: modFile, + } + + p.SetBlockedModules() + + return p, nil +} + +// ProcessFiles takes a string slice with file names (full paths) +// and lints them. +func (p *Processor) ProcessFiles(filenames []string) (issues []Issue) { + for _, filename := range filenames { + data, err := ioutil.ReadFile(filename) + if err != nil { + issues = append(issues, Issue{ + FileName: filename, + LineNumber: 0, + Reason: fmt.Sprintf("unable to read file, file cannot be linted (%s)", err.Error()), + }) + + continue + } + + issues = append(issues, p.process(filename, data)...) + } + + return issues +} + +// process file imports and add lint error if blocked package is imported. +func (p *Processor) process(filename string, data []byte) (issues []Issue) { + fileSet := token.NewFileSet() + + file, err := parser.ParseFile(fileSet, filename, data, parser.ParseComments) + if err != nil { + issues = append(issues, Issue{ + FileName: filename, + LineNumber: 0, + Reason: fmt.Sprintf("invalid syntax, file cannot be linted (%s)", err.Error()), + }) + + return + } + + imports := file.Imports + for n := range imports { + importedPkg := strings.TrimSpace(strings.Trim(imports[n].Path.Value, "\"")) + + blockReasons := p.isBlockedPackageFromModFile(importedPkg) + if blockReasons == nil { + continue + } + + for _, blockReason := range blockReasons { + issues = append(issues, p.addError(fileSet, imports[n].Pos(), blockReason)) + } + } + + return issues +} + +// addError adds an error for the file and line number for the current token.Pos +// with the given reason. +func (p *Processor) addError(fileset *token.FileSet, pos token.Pos, reason string) Issue { + position := fileset.Position(pos) + + return Issue{ + FileName: position.Filename, + LineNumber: position.Line, + Position: position, + Reason: reason, + } +} + +// SetBlockedModules determines and sets which modules are blocked by reading +// the go.mod file of the module that is being linted. +// +// It works by iterating over the dependant modules specified in the require +// directive, checking if the module domain or full name is in the allowed list. +func (p *Processor) SetBlockedModules() { //nolint:gocognit,funlen + blockedModules := make(map[string][]string, len(p.Modfile.Require)) + currentModuleName := p.Modfile.Module.Mod.Path + lintedModules := p.Modfile.Require + replacedModules := p.Modfile.Replace + + for i := range lintedModules { + if lintedModules[i].Indirect { + continue // Do not lint indirect modules. + } + + lintedModuleName := strings.TrimSpace(lintedModules[i].Mod.Path) + lintedModuleVersion := strings.TrimSpace(lintedModules[i].Mod.Version) + + var isAllowed bool + + switch { + case len(p.Config.Allowed.Modules) == 0 && len(p.Config.Allowed.Domains) == 0: + isAllowed = true + case p.Config.Allowed.IsAllowedModuleDomain(lintedModuleName): + isAllowed = true + case p.Config.Allowed.IsAllowedModule(lintedModuleName): + isAllowed = true + default: + isAllowed = false + } + + blockModuleReason := p.Config.Blocked.Modules.GetBlockReason(lintedModuleName) + blockVersionReason := p.Config.Blocked.Versions.GetBlockReason(lintedModuleName) + + if !isAllowed && blockModuleReason == nil && blockVersionReason == nil { + blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], blockReasonNotInAllowedList) + continue + } + + if blockModuleReason != nil && !blockModuleReason.IsCurrentModuleARecommendation(currentModuleName) { + blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], + fmt.Sprintf("%s %s", blockReasonInBlockedList, blockModuleReason.Message())) + } + + if blockVersionReason != nil && blockVersionReason.IsLintedModuleVersionBlocked(lintedModuleVersion) { + blockedModules[lintedModuleName] = append(blockedModules[lintedModuleName], + fmt.Sprintf("%s %s", blockReasonInBlockedList, blockVersionReason.Message(lintedModuleVersion))) + } + } + + // Replace directives with local paths are blocked. + // Filesystem paths found in "replace" directives are represented by a path with an empty version. + // https://github.com/golang/mod/blob/bc388b264a244501debfb9caea700c6dcaff10e2/module/module.go#L122-L124 + if p.Config.Blocked.LocalReplaceDirectives { + for i := range replacedModules { + replacedModuleOldName := strings.TrimSpace(replacedModules[i].Old.Path) + replacedModuleNewName := strings.TrimSpace(replacedModules[i].New.Path) + replacedModuleNewVersion := strings.TrimSpace(replacedModules[i].New.Version) + + if replacedModuleNewName != "" && replacedModuleNewVersion == "" { + blockedModules[replacedModuleOldName] = append(blockedModules[replacedModuleOldName], + blockReasonHasLocalReplaceDirective) + } + } + } + + p.blockedModulesFromModFile = blockedModules +} + +// isBlockedPackageFromModFile returns the block reason if the package is blocked. +func (p *Processor) isBlockedPackageFromModFile(packageName string) []string { + for blockedModuleName, blockReasons := range p.blockedModulesFromModFile { + if strings.HasPrefix(strings.TrimSpace(packageName), strings.TrimSpace(blockedModuleName)) { + formattedReasons := make([]string, 0, len(blockReasons)) + + for _, blockReason := range blockReasons { + formattedReasons = append(formattedReasons, fmt.Sprintf(blockReason, packageName)) + } + + return formattedReasons + } + } + + return nil +} + +func loadGoModFile() ([]byte, error) { + cmd := exec.Command("go", "env", "-json") + stdout, _ := cmd.StdoutPipe() + _ = cmd.Start() + + if stdout == nil { + return ioutil.ReadFile(goModFilename) + } + + buf := new(bytes.Buffer) + _, _ = buf.ReadFrom(stdout) + + goEnv := make(map[string]string) + + err := json.Unmarshal(buf.Bytes(), &goEnv) + if err != nil { + return ioutil.ReadFile(goModFilename) + } + + if _, ok := goEnv["GOMOD"]; !ok { + return ioutil.ReadFile(goModFilename) + } + + if _, err = os.Stat(goEnv["GOMOD"]); os.IsNotExist(err) { + return ioutil.ReadFile(goModFilename) + } + + if goEnv["GOMOD"] == "/dev/null" { + return nil, errors.New("current working directory must have a go.mod file") + } + + return ioutil.ReadFile(goEnv["GOMOD"]) +} diff --git a/vendor/github.com/ryanrolds/sqlclosecheck/LICENSE b/vendor/github.com/ryanrolds/sqlclosecheck/LICENSE new file mode 100644 index 000000000..77b261d7a --- /dev/null +++ b/vendor/github.com/ryanrolds/sqlclosecheck/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2020 Ryan R. Olds + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go b/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go new file mode 100644 index 000000000..bc42dfb3a --- /dev/null +++ b/vendor/github.com/ryanrolds/sqlclosecheck/pkg/analyzer/analyzer.go @@ -0,0 +1,311 @@ +package analyzer + +import ( + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" +) + +const ( + rowsName = "Rows" + stmtName = "Stmt" + closeMethod = "Close" +) + +var ( + sqlPackages = []string{ + "database/sql", + "github.com/jmoiron/sqlx", + } +) + +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "sqlclosecheck", + Doc: "Checks that sql.Rows and sql.Stmt are closed.", + Run: run, + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + }, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + pssa := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + + // Build list of types we are looking for + targetTypes := getTargetTypes(pssa, sqlPackages) + + // If non of the types are found, skip + if len(targetTypes) == 0 { + return nil, nil + } + + funcs := pssa.SrcFuncs + for _, f := range funcs { + for _, b := range f.Blocks { + for i := range b.Instrs { + // Check if instruction is call that returns a target type + targetValues := getTargetTypesValues(b, i, targetTypes) + if len(targetValues) == 0 { + continue + } + + // log.Printf("%s", f.Name()) + + // For each found target check if they are closed and deferred + for _, targetValue := range targetValues { + refs := (*targetValue.value).Referrers() + isClosed := checkClosed(refs, targetTypes) + if !isClosed { + pass.Reportf((targetValue.instr).Pos(), "Rows/Stmt was not closed") + } + + checkDeferred(pass, refs, targetTypes, false) + } + } + } + } + + return nil, nil +} + +func getTargetTypes(pssa *buildssa.SSA, targetPackages []string) []*types.Pointer { + targets := []*types.Pointer{} + + for _, sqlPkg := range targetPackages { + pkg := pssa.Pkg.Prog.ImportedPackage(sqlPkg) + if pkg == nil { + // the SQL package being checked isn't imported + return targets + } + + rowsType := getTypePointerFromName(pkg, rowsName) + if rowsType != nil { + targets = append(targets, rowsType) + } + + stmtType := getTypePointerFromName(pkg, stmtName) + if stmtType != nil { + targets = append(targets, stmtType) + } + } + + return targets +} + +func getTypePointerFromName(pkg *ssa.Package, name string) *types.Pointer { + pkgType := pkg.Type(name) + if pkgType == nil { + // this package does not use Rows/Stmt + return nil + } + + obj := pkgType.Object() + named, ok := obj.Type().(*types.Named) + if !ok { + return nil + } + + return types.NewPointer(named) +} + +type targetValue struct { + value *ssa.Value + instr ssa.Instruction +} + +func getTargetTypesValues(b *ssa.BasicBlock, i int, targetTypes []*types.Pointer) []targetValue { + targetValues := []targetValue{} + + instr := b.Instrs[i] + call, ok := instr.(*ssa.Call) + if !ok { + return targetValues + } + + signature := call.Call.Signature() + results := signature.Results() + for i := 0; i < results.Len(); i++ { + v := results.At(i) + varType := v.Type() + + for _, targetType := range targetTypes { + if !types.Identical(varType, targetType) { + continue + } + + for _, cRef := range *call.Referrers() { + switch instr := cRef.(type) { + case *ssa.Call: + if len(instr.Call.Args) >= 1 && types.Identical(instr.Call.Args[0].Type(), targetType) { + targetValues = append(targetValues, targetValue{ + value: &instr.Call.Args[0], + instr: call, + }) + } + case ssa.Value: + if types.Identical(instr.Type(), targetType) { + targetValues = append(targetValues, targetValue{ + value: &instr, + instr: call, + }) + } + } + } + } + } + + return targetValues +} + +func checkClosed(refs *[]ssa.Instruction, targetTypes []*types.Pointer) bool { + numInstrs := len(*refs) + for idx, ref := range *refs { + // log.Printf("%T - %s", ref, ref) + + action := getAction(ref, targetTypes) + switch action { + case "closed": + return true + case "passed": + // Passed and not used after + if numInstrs == idx+1 { + return true + } + case "returned": + return true + case "handled": + return true + default: + // log.Printf(action) + } + } + + return false +} + +func getAction(instr ssa.Instruction, targetTypes []*types.Pointer) string { + switch instr := instr.(type) { + case *ssa.Defer: + if instr.Call.Value == nil { + return "unvalued defer" + } + + name := instr.Call.Value.Name() + if name == closeMethod { + return "closed" + } + case *ssa.Call: + if instr.Call.Value == nil { + return "unvalued call" + } + + isTarget := false + receiver := instr.Call.StaticCallee().Signature.Recv() + if receiver != nil { + isTarget = isTargetType(receiver.Type(), targetTypes) + } + + name := instr.Call.Value.Name() + if isTarget && name == closeMethod { + return "closed" + } + + if !isTarget { + return "passed" + } + case *ssa.Phi: + return "passed" + case *ssa.MakeInterface: + return "passed" + case *ssa.Store: + if len(*instr.Addr.Referrers()) == 0 { + return "noop" + } + + for _, aRef := range *instr.Addr.Referrers() { + if c, ok := aRef.(*ssa.MakeClosure); ok { + f := c.Fn.(*ssa.Function) + for _, b := range f.Blocks { + if checkClosed(&b.Instrs, targetTypes) { + return "handled" + } + } + } + } + case *ssa.UnOp: + instrType := instr.Type() + for _, targetType := range targetTypes { + if types.Identical(instrType, targetType) { + if checkClosed(instr.Referrers(), targetTypes) { + return "handled" + } + } + } + case *ssa.FieldAddr: + if checkClosed(instr.Referrers(), targetTypes) { + return "handled" + } + case *ssa.Return: + return "returned" + default: + // log.Printf("%s", instr) + } + + return "unhandled" +} + +func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes []*types.Pointer, inDefer bool) { + for _, instr := range *instrs { + switch instr := instr.(type) { + case *ssa.Defer: + if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod { + return + } + case *ssa.Call: + if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod { + if !inDefer { + pass.Reportf(instr.Pos(), "Close should use defer") + } + + return + } + case *ssa.Store: + if len(*instr.Addr.Referrers()) == 0 { + return + } + + for _, aRef := range *instr.Addr.Referrers() { + if c, ok := aRef.(*ssa.MakeClosure); ok { + f := c.Fn.(*ssa.Function) + + for _, b := range f.Blocks { + checkDeferred(pass, &b.Instrs, targetTypes, true) + } + } + } + case *ssa.UnOp: + instrType := instr.Type() + for _, targetType := range targetTypes { + if types.Identical(instrType, targetType) { + checkDeferred(pass, instr.Referrers(), targetTypes, inDefer) + } + } + case *ssa.FieldAddr: + checkDeferred(pass, instr.Referrers(), targetTypes, inDefer) + } + } +} + +func isTargetType(t types.Type, targetTypes []*types.Pointer) bool { + for _, targetType := range targetTypes { + if types.Identical(t, targetType) { + return true + } + } + + return false +} diff --git a/vendor/github.com/sanposhiho/wastedassign/v2/LICENSE b/vendor/github.com/sanposhiho/wastedassign/v2/LICENSE new file mode 100644 index 000000000..4ed7724fe --- /dev/null +++ b/vendor/github.com/sanposhiho/wastedassign/v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Kensei Nakada + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/sanposhiho/wastedassign/v2/README.md b/vendor/github.com/sanposhiho/wastedassign/v2/README.md new file mode 100644 index 000000000..cd2deedad --- /dev/null +++ b/vendor/github.com/sanposhiho/wastedassign/v2/README.md @@ -0,0 +1,66 @@ +# wastedassign +`wastedassign` finds wasted assignment statements + +found the value ... + +- reassigned, but never used afterward +- reassigned, but reassigned without using the value + +## Example + +```go +package main + +import "fmt" + +func f() int { + a := 0 + b := 0 + fmt.Print(a) + fmt.Print(b) + a = 1 // This reassignment is wasted, because never used afterwards. Wastedassign find this + + b = 1 // This reassignment is wasted, because reassigned without use this value. Wastedassign find this + b = 2 + fmt.Print(b) + + return 1 + 2 +} +``` + + +```bash +$ go vet -vettool=`which wastedassign` sample.go +# command-line-arguments +./sample.go:10:2: assigned to a, but never used afterwards +./sample.go:12:2: assigned to b, but reassigned without using the value +``` + + +## Installation + +``` +go get -u github.com/sanposhiho/wastedassign/v2/cmd/wastedassign +``` + +## Usage + +``` +# in your project + +go vet -vettool=`which wastedassign` ./... +``` + +And, you can use wastedassign in [golangci-lint](https://github.com/golangci/golangci-lint). + +## Contribution + +I am waiting for your contribution :D + +Feel free to create an issue or a PR! + +### Run test + +``` +go test +``` diff --git a/vendor/github.com/sanposhiho/wastedassign/v2/go.mod b/vendor/github.com/sanposhiho/wastedassign/v2/go.mod new file mode 100644 index 000000000..32e5685f6 --- /dev/null +++ b/vendor/github.com/sanposhiho/wastedassign/v2/go.mod @@ -0,0 +1,5 @@ +module github.com/sanposhiho/wastedassign/v2 + +go 1.14 + +require golang.org/x/tools v0.1.0 diff --git a/vendor/github.com/sanposhiho/wastedassign/v2/go.sum b/vendor/github.com/sanposhiho/wastedassign/v2/go.sum new file mode 100644 index 000000000..21d696a65 --- /dev/null +++ b/vendor/github.com/sanposhiho/wastedassign/v2/go.sum @@ -0,0 +1,26 @@ +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/sanposhiho/wastedassign/v2/wastedassign.go b/vendor/github.com/sanposhiho/wastedassign/v2/wastedassign.go new file mode 100644 index 000000000..e0c0da616 --- /dev/null +++ b/vendor/github.com/sanposhiho/wastedassign/v2/wastedassign.go @@ -0,0 +1,272 @@ +package wastedassign + +import ( + "errors" + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/ssa" +) + +const doc = "wastedassign finds wasted assignment statements." + +// Analyzer is the wastedassign analyzer. +var Analyzer = &analysis.Analyzer{ + Name: "wastedassign", + Doc: doc, + Run: run, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + }, +} + +type wastedAssignStruct struct { + pos token.Pos + reason string +} + +func run(pass *analysis.Pass) (interface{}, error) { + // Plundered from buildssa.Run. + prog := ssa.NewProgram(pass.Fset, ssa.NaiveForm) + + // Create SSA packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + prog.CreatePackage(p, nil, nil, true) + createAll(p.Imports()) + } + } + } + createAll(pass.Pkg.Imports()) + + // Create and build the primary package. + ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false) + ssapkg.Build() + + var srcFuncs []*ssa.Function + for _, f := range pass.Files { + for _, decl := range f.Decls { + if fdecl, ok := decl.(*ast.FuncDecl); ok { + + // SSA will not build a Function + // for a FuncDecl named blank. + // That's arguably too strict but + // relaxing it would break uniqueness of + // names of package members. + if fdecl.Name.Name == "_" { + continue + } + + // (init functions have distinct Func + // objects named "init" and distinct + // ssa.Functions named "init#1", ...) + + fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func) + if fn == nil { + return nil, errors.New("failed to get func's typesinfo") + } + + f := ssapkg.Prog.FuncValue(fn) + if f == nil { + return nil, errors.New("failed to get func's SSA-form intermediate representation") + } + + var addAnons func(f *ssa.Function) + addAnons = func(f *ssa.Function) { + srcFuncs = append(srcFuncs, f) + for _, anon := range f.AnonFuncs { + addAnons(anon) + } + } + addAnons(f) + } + } + } + + typeSwitchPos := map[int]bool{} + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + inspect.Preorder([]ast.Node{new(ast.TypeSwitchStmt)}, func(n ast.Node) { + if _, ok := n.(*ast.TypeSwitchStmt); ok { + typeSwitchPos[pass.Fset.Position(n.Pos()).Line] = true + } + }) + + var wastedAssignMap []wastedAssignStruct + + for _, sf := range srcFuncs { + for _, bl := range sf.Blocks { + blCopy := *bl + for _, ist := range bl.Instrs { + blCopy.Instrs = rmInstrFromInstrs(blCopy.Instrs, ist) + if _, ok := ist.(*ssa.Store); !ok { + continue + } + + var buf [10]*ssa.Value + for _, op := range ist.Operands(buf[:0]) { + if (*op) == nil || !opInLocals(sf.Locals, op) { + continue + } + + reason := isNextOperationToOpIsStore([]*ssa.BasicBlock{&blCopy}, op, nil) + if reason == notWasted { + continue + } + + if ist.Pos() == 0 || typeSwitchPos[pass.Fset.Position(ist.Pos()).Line] { + continue + } + + v, ok := (*op).(*ssa.Alloc) + if !ok { + // This block should never have been executed. + continue + } + wastedAssignMap = append(wastedAssignMap, wastedAssignStruct{ + pos: ist.Pos(), + reason: reason.String(v), + }) + } + } + } + } + + for _, was := range wastedAssignMap { + pass.Reportf(was.pos, was.reason) + } + + return nil, nil +} + +type wastedReason string + +const ( + noUseUntilReturn wastedReason = "assigned, but never used afterwards" + reassignedSoon wastedReason = "wasted assignment" + notWasted wastedReason = "" +) + +func (wr wastedReason) String(a *ssa.Alloc) string { + switch wr { + case noUseUntilReturn: + return fmt.Sprintf("assigned to %s, but never used afterwards", a.Comment) + case reassignedSoon: + return fmt.Sprintf("assigned to %s, but reassigned without using the value", a.Comment) + case notWasted: + return "" + default: + return "" + } +} + +func isNextOperationToOpIsStore(bls []*ssa.BasicBlock, currentOp *ssa.Value, haveCheckedMap map[int]int) wastedReason { + var wastedReasons []wastedReason + var wastedReasonsCurrentBls []wastedReason + + if haveCheckedMap == nil { + haveCheckedMap = map[int]int{} + } + + for _, bl := range bls { + if haveCheckedMap[bl.Index] == 2 { + continue + } + + haveCheckedMap[bl.Index]++ + breakFlag := false + for _, ist := range bl.Instrs { + if breakFlag { + break + } + + switch w := ist.(type) { + case *ssa.Store: + var buf [10]*ssa.Value + for _, op := range ist.Operands(buf[:0]) { + if *op == *currentOp { + if w.Addr.Name() == (*currentOp).Name() { + wastedReasonsCurrentBls = append(wastedReasonsCurrentBls, reassignedSoon) + breakFlag = true + break + } else { + return notWasted + } + } + } + default: + var buf [10]*ssa.Value + for _, op := range ist.Operands(buf[:0]) { + if *op == *currentOp { + // It wasn't a continuous store. + return notWasted + } + } + } + } + + if len(bl.Succs) != 0 && !breakFlag { + wastedReason := isNextOperationToOpIsStore(rmSameBlock(bl.Succs, bl), currentOp, haveCheckedMap) + if wastedReason == notWasted { + return notWasted + } + wastedReasons = append(wastedReasons, wastedReason) + } + } + + wastedReasons = append(wastedReasons, wastedReasonsCurrentBls...) + + if len(wastedReasons) != 0 && containReassignedSoon(wastedReasons) { + return reassignedSoon + } + + return noUseUntilReturn +} + +func rmSameBlock(bls []*ssa.BasicBlock, currentBl *ssa.BasicBlock) []*ssa.BasicBlock { + var rto []*ssa.BasicBlock + + for _, bl := range bls { + if bl != currentBl { + rto = append(rto, bl) + } + } + return rto +} + +func containReassignedSoon(ws []wastedReason) bool { + for _, w := range ws { + if w == reassignedSoon { + return true + } + } + return false +} + +func rmInstrFromInstrs(instrs []ssa.Instruction, instrToRm ssa.Instruction) []ssa.Instruction { + var rto []ssa.Instruction + for _, i := range instrs { + if i != instrToRm { + rto = append(rto, i) + } + } + return rto +} + +func opInLocals(locals []*ssa.Alloc, op *ssa.Value) bool { + for _, l := range locals { + if *op == ssa.Value(l) { + return true + } + } + return false +} diff --git a/vendor/github.com/securego/gosec/v2/.gitignore b/vendor/github.com/securego/gosec/v2/.gitignore new file mode 100644 index 000000000..f282cda24 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/.gitignore @@ -0,0 +1,35 @@ +# transient files +/image + +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so +*.swp +/gosec + +# Folders +_obj +_test +vendor +dist + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test +*.prof + +.DS_Store + +.vscode diff --git a/vendor/github.com/securego/gosec/v2/.golangci.yml b/vendor/github.com/securego/gosec/v2/.golangci.yml new file mode 100644 index 000000000..dcda64666 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/.golangci.yml @@ -0,0 +1,23 @@ +linters: + enable: + - asciicheck + - bodyclose + - depguard + - dogsled + - durationcheck + - errcheck + - exportloopref + - gofmt + - gofumpt + - goimports + - gosec + - govet + - importas + - megacheck + - misspell + - nakedret + - nolintlint + - revive + - unconvert + - unparam + - wastedassign \ No newline at end of file diff --git a/vendor/github.com/securego/gosec/v2/.goreleaser.yml b/vendor/github.com/securego/gosec/v2/.goreleaser.yml new file mode 100644 index 000000000..263e522bc --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/.goreleaser.yml @@ -0,0 +1,21 @@ +--- +project_name: gosec + +release: + github: + owner: securego + name: gosec + +builds: + - main : ./cmd/gosec/ + binary: gosec + goos: + - darwin + - linux + - windows + goarch: + - amd64 + - arm64 + ldflags: -X main.Version={{.Version}} -X main.GitTag={{.Tag}} -X main.BuildDate={{.Date}} + env: + - CGO_ENABLED=0 diff --git a/vendor/github.com/securego/gosec/v2/Dockerfile b/vendor/github.com/securego/gosec/v2/Dockerfile new file mode 100644 index 000000000..c937d5255 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/Dockerfile @@ -0,0 +1,15 @@ +ARG GO_VERSION +FROM golang:${GO_VERSION}-alpine AS builder +RUN apk add --update --no-cache ca-certificates make git curl gcc libc-dev +RUN mkdir -p /build +WORKDIR /build +COPY . /build/ +RUN go mod download +RUN make build-linux + +FROM golang:${GO_VERSION}-alpine +RUN apk add --update --no-cache ca-certificates bash git gcc libc-dev +ENV GO111MODULE on +COPY --from=builder /build/gosec /bin/gosec +COPY entrypoint.sh /bin/entrypoint.sh +ENTRYPOINT ["/bin/entrypoint.sh"] diff --git a/vendor/github.com/securego/gosec/v2/LICENSE.txt b/vendor/github.com/securego/gosec/v2/LICENSE.txt new file mode 100644 index 000000000..1756c7821 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/LICENSE.txt @@ -0,0 +1,154 @@ +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this +License, each Contributor hereby grants to You a perpetual, worldwide, +non-exclusive, no-charge, royalty-free, irrevocable copyright license to +reproduce, prepare Derivative Works of, publicly display, publicly perform, +sublicense, and distribute the Work and such Derivative Works in Source or +Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, +each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) patent +license to make, have made, use, offer to sell, sell, import, and otherwise +transfer the Work, where such license applies only to those patent claims +licensable by such Contributor that are necessarily infringed by their +Contribution(s) alone or by combination of their Contribution(s) with the Work +to which such Contribution(s) was submitted. If You institute patent litigation +against any entity (including a cross-claim or counterclaim in a lawsuit) +alleging that the Work or a Contribution incorporated within the Work +constitutes direct or contributory patent infringement, then any patent licenses +granted to You under this License for that Work shall terminate as of the date +such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or +Derivative Works thereof in any medium, with or without modifications, and in +Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and You must cause any modified files to carry prominent notices +stating that You changed the files; and You must retain, in the Source form of +any Derivative Works that You distribute, all copyright, patent, trademark, and +attribution notices from the Source form of the Work, excluding those notices +that do not pertain to any part of the Derivative Works; and If the Work +includes a "NOTICE" text file as part of its distribution, then any Derivative +Works that You distribute must include a readable copy of the attribution +notices contained within such NOTICE file, excluding those notices that do not +pertain to any part of the Derivative Works, in at least one of the following +places: within a NOTICE text file distributed as part of the Derivative Works; +within the Source form or documentation, if provided along with the Derivative +Works; or, within a display generated by the Derivative Works, if and wherever +such third-party notices normally appear. The contents of the NOTICE file are +for informational purposes only and do not modify the License. You may add Your +own attribution notices within Derivative Works that You distribute, alongside +or as an addendum to the NOTICE text from the Work, provided that such +additional attribution notices cannot be construed as modifying the License. + +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. 5. Submission of Contributions. +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, +trademarks, service marks, or product names of the Licensor, except as required +for reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in +writing, Licensor provides the Work (and each Contributor provides its +Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied, including, without limitation, any warranties +or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A +PARTICULAR PURPOSE. You are solely responsible for determining the +appropriateness of using or redistributing the Work and assume any risks +associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in +tort (including negligence), contract, or otherwise, unless required by +applicable law (such as deliberate and grossly negligent acts) or agreed to in +writing, shall any Contributor be liable to You for damages, including any +direct, indirect, special, incidental, or consequential damages of any character +arising as a result of this License or out of the use or inability to use the +Work (including but not limited to damages for loss of goodwill, work stoppage, +computer failure or malfunction, or any and all other commercial damages or +losses), even if such Contributor has been advised of the possibility of such +damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or +Derivative Works thereof, You may choose to offer, and charge a fee for, +acceptance of support, warranty, indemnity, or other liability obligations +and/or rights consistent with this License. However, in accepting such +obligations, You may act only on Your own behalf and on Your sole +responsibility, not on behalf of any other Contributor, and only if You agree to +indemnify, defend, and hold each Contributor harmless for any liability incurred +by, or claims asserted against, such Contributor by reason of your accepting any +such warranty or additional liability. + +END OF TERMS AND CONDITIONS diff --git a/vendor/github.com/securego/gosec/v2/Makefile b/vendor/github.com/securego/gosec/v2/Makefile new file mode 100644 index 000000000..5974e5c08 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/Makefile @@ -0,0 +1,71 @@ +GIT_TAG?= $(shell git describe --always --tags) +BIN = gosec +FMT_CMD = $(gofmt -s -l -w $(find . -type f -name '*.go' -not -path './vendor/*') | tee /dev/stderr) +IMAGE_REPO = securego +BUILDFLAGS := '-w -s' +CGO_ENABLED = 0 +GO := GO111MODULE=on go +GO_NOMOD :=GO111MODULE=off go +GOPATH ?= $(shell $(GO) env GOPATH) +GOBIN ?= $(GOPATH)/bin +GOLINT ?= $(GOBIN)/golint +GOSEC ?= $(GOBIN)/gosec +GINKGO ?= $(GOBIN)/ginkgo +GO_VERSION = 1.15 + +default: + $(MAKE) build + +install-test-deps: + $(GO_NOMOD) get -u github.com/onsi/ginkgo/ginkgo + $(GO_NOMOD) get -u golang.org/x/crypto/ssh + $(GO_NOMOD) get -u github.com/lib/pq + +test: install-test-deps build fmt lint sec + $(GINKGO) -r -v + +fmt: + @echo "FORMATTING" + @FORMATTED=`$(GO) fmt ./...` + @([[ ! -z "$(FORMATTED)" ]] && printf "Fixed unformatted files:\n$(FORMATTED)") || true + +lint: + @echo "LINTING" + $(GO_NOMOD) get -u golang.org/x/lint/golint + $(GOLINT) -set_exit_status ./... + @echo "VETTING" + $(GO) vet ./... + +sec: + @echo "SECURITY SCANNING" + ./$(BIN) ./... + +test-coverage: install-test-deps + go test -race -v -count=1 -coverprofile=coverage.out ./... + +build: + go build -o $(BIN) ./cmd/gosec/ + +clean: + rm -rf build vendor dist coverage.txt + rm -f release image $(BIN) + +release: + @echo "Releasing the gosec binary..." + goreleaser release + +build-linux: + CGO_ENABLED=$(CGO_ENABLED) GOOS=linux GOARCH=amd64 go build -ldflags $(BUILDFLAGS) -o $(BIN) ./cmd/gosec/ + +image: + @echo "Building the Docker image..." + docker build -t $(IMAGE_REPO)/$(BIN):$(GIT_TAG) --build-arg GO_VERSION=$(GO_VERSION) . + docker tag $(IMAGE_REPO)/$(BIN):$(GIT_TAG) $(IMAGE_REPO)/$(BIN):latest + touch image + +image-push: image + @echo "Pushing the Docker image..." + docker push $(IMAGE_REPO)/$(BIN):$(GIT_TAG) + docker push $(IMAGE_REPO)/$(BIN):latest + +.PHONY: test build clean release image image-push diff --git a/vendor/github.com/securego/gosec/v2/README.md b/vendor/github.com/securego/gosec/v2/README.md new file mode 100644 index 000000000..e69e41684 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/README.md @@ -0,0 +1,401 @@ + +# gosec - Golang Security Checker + +Inspects source code for security problems by scanning the Go AST. + + + +## License + +Licensed under the Apache License, Version 2.0 (the "License"). +You may not use this file except in compliance with the License. +You may obtain a copy of the License [here](http://www.apache.org/licenses/LICENSE-2.0). + +## Project status + +[![CII Best Practices](https://bestpractices.coreinfrastructure.org/projects/3218/badge)](https://bestpractices.coreinfrastructure.org/projects/3218) +[![Build Status](https://github.com/securego/gosec/workflows/CI/badge.svg)](https://github.com/securego/gosec/actions?query=workflows%3ACI) +[![Coverage Status](https://codecov.io/gh/securego/gosec/branch/master/graph/badge.svg)](https://codecov.io/gh/securego/gosec) +[![GoReport](https://goreportcard.com/badge/github.com/securego/gosec)](https://goreportcard.com/report/github.com/securego/gosec) +[![GoDoc](https://pkg.go.dev/badge/github.com/securego/gosec/v2)](https://pkg.go.dev/github.com/securego/gosec/v2) +[![Docs](https://readthedocs.org/projects/docs/badge/?version=latest)](https://securego.io/) +[![Downloads](https://img.shields.io/github/downloads/securego/gosec/total.svg)](https://github.com/securego/gosec/releases) +[![Docker Pulls](https://img.shields.io/docker/pulls/securego/gosec.svg)](https://hub.docker.com/r/securego/gosec/tags) +[![Slack](http://securego.herokuapp.com/badge.svg)](http://securego.herokuapp.com) + +## Install + +### CI Installation + +```bash +# binary will be $(go env GOPATH)/bin/gosec +curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s -- -b $(go env GOPATH)/bin vX.Y.Z + +# or install it into ./bin/ +curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s vX.Y.Z + +# In alpine linux (as it does not come with curl by default) +wget -O - -q https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s vX.Y.Z + +# If you want to use the checksums provided on the "Releases" page +# then you will have to download a tar.gz file for your operating system instead of a binary file +wget https://github.com/securego/gosec/releases/download/vX.Y.Z/gosec_vX.Y.Z_OS.tar.gz + +# The file will be in the current folder where you run the command +# and you can check the checksum like this +echo " gosec_vX.Y.Z_OS.tar.gz" | sha256sum -c - + +gosec --help +``` + +### GitHub Action + +You can run `gosec` as a GitHub action as follows: + +```yaml +name: Run Gosec +on: + push: + branches: + - master + pull_request: + branches: + - master +jobs: + tests: + runs-on: ubuntu-latest + env: + GO111MODULE: on + steps: + - name: Checkout Source + uses: actions/checkout@v2 + - name: Run Gosec Security Scanner + uses: securego/gosec@master + with: + args: ./... +``` + +### Integrating with code scanning + +You can [integrate third-party code analysis tools](https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/integrating-with-code-scanning) with GitHub code scanning by uploading data as SARIF files. + +The workflow shows an example of running the `gosec` as a step in a GitHub action workflow which outputs the `results.sarif` file. The workflow then uploads the `results.sarif` file to GitHub using the `upload-sarif` action. + +```yaml +name: "Security Scan" + +# Run workflow each time code is pushed to your repository and on a schedule. +# The scheduled workflow runs every at 00:00 on Sunday UTC time. +on: + push: + schedule: + - cron: '0 0 * * 0' + +jobs: + tests: + runs-on: ubuntu-latest + env: + GO111MODULE: on + steps: + - name: Checkout Source + uses: actions/checkout@v2 + - name: Run Gosec Security Scanner + uses: securego/gosec@master + with: + # we let the report trigger content trigger a failure using the GitHub Security features. + args: '-no-fail -fmt sarif -out results.sarif ./...' + - name: Upload SARIF file + uses: github/codeql-action/upload-sarif@v1 + with: + # Path to SARIF file relative to the root of the repository + sarif_file: results.sarif +``` + +### Local Installation + +```bash +go get -u github.com/securego/gosec/v2/cmd/gosec +``` + +## Usage + +Gosec can be configured to only run a subset of rules, to exclude certain file +paths, and produce reports in different formats. By default all rules will be +run against the supplied input files. To recursively scan from the current +directory you can supply `./...` as the input argument. + +### Available rules + +- G101: Look for hard coded credentials +- G102: Bind to all interfaces +- G103: Audit the use of unsafe block +- G104: Audit errors not checked +- G106: Audit the use of ssh.InsecureIgnoreHostKey +- G107: Url provided to HTTP request as taint input +- G108: Profiling endpoint automatically exposed on /debug/pprof +- G109: Potential Integer overflow made by strconv.Atoi result conversion to int16/32 +- G110: Potential DoS vulnerability via decompression bomb +- G201: SQL query construction using format string +- G202: SQL query construction using string concatenation +- G203: Use of unescaped data in HTML templates +- G204: Audit use of command execution +- G301: Poor file permissions used when creating a directory +- G302: Poor file permissions used with chmod +- G303: Creating tempfile using a predictable path +- G304: File path provided as taint input +- G305: File traversal when extracting zip/tar archive +- G306: Poor file permissions used when writing to a new file +- G307: Deferring a method which returns an error +- G401: Detect the usage of DES, RC4, MD5 or SHA1 +- G402: Look for bad TLS connection settings +- G403: Ensure minimum RSA key length of 2048 bits +- G404: Insecure random number source (rand) +- G501: Import blocklist: crypto/md5 +- G502: Import blocklist: crypto/des +- G503: Import blocklist: crypto/rc4 +- G504: Import blocklist: net/http/cgi +- G505: Import blocklist: crypto/sha1 +- G601: Implicit memory aliasing of items from a range statement + +### Retired rules + +- G105: Audit the use of math/big.Int.Exp - [CVE is fixed](https://github.com/golang/go/issues/15184) + +### Selecting rules + +By default, gosec will run all rules against the supplied file paths. It is however possible to select a subset of rules to run via the `-include=` flag, +or to specify a set of rules to explicitly exclude using the `-exclude=` flag. + +```bash +# Run a specific set of rules +$ gosec -include=G101,G203,G401 ./... + +# Run everything except for rule G303 +$ gosec -exclude=G303 ./... +``` + +### CWE Mapping + +Every issue detected by `gosec` is mapped to a [CWE (Common Weakness Enumeration)](http://cwe.mitre.org/data/index.html) which describes in more generic terms the vulnerability. The exact mapping can be found [here](https://github.com/securego/gosec/blob/master/issue.go#L50). + +### Configuration + +A number of global settings can be provided in a configuration file as follows: + +```JSON +{ + "global": { + "nosec": "enabled", + "audit": "enabled" + } +} +``` + +- `nosec`: this setting will overwrite all `#nosec` directives defined throughout the code base +- `audit`: runs in audit mode which enables addition checks that for normal code analysis might be too nosy + +```bash +# Run with a global configuration file +$ gosec -conf config.json . +``` + +Also some rules accept configuration. For instance on rule `G104`, it is possible to define packages along with a list +of functions which will be skipped when auditing the not checked errors: + +```JSON +{ + "G104": { + "io/ioutil": ["WriteFile"] + } +} +``` + +You can also configure the hard-coded credentials rule `G101` with additional patters, or adjust the entropy threshold: + +```JSON +{ + "G101": { + "pattern": "(?i)passwd|pass|password|pwd|secret|private_key|token", + "ignore_entropy": false, + "entropy_threshold": "80.0", + "per_char_threshold": "3.0", + "truncate": "32" + } +} +``` + +### Dependencies + +gosec will fetch automatically the dependencies of the code which is being analyzed when go module is turned on (e.g.`GO111MODULE=on`). If this is not the case, +the dependencies need to be explicitly downloaded by running the `go get -d` command before the scan. + +### Excluding test files and folders + +gosec will ignore test files across all packages and any dependencies in your vendor directory. + +The scanning of test files can be enabled with the following flag: + +```bash + +gosec -tests ./... +``` + +Also additional folders can be excluded as follows: + +```bash + gosec -exclude-dir=rules -exclude-dir=cmd ./... +``` + +### Annotating code + +As with all automated detection tools, there will be cases of false positives. In cases where gosec reports a failure that has been manually verified as being safe, +it is possible to annotate the code with a `#nosec` comment. + +The annotation causes gosec to stop processing any further nodes within the +AST so can apply to a whole block or more granularly to a single expression. + +```go + +import "md5" // #nosec + + +func main(){ + + /* #nosec */ + if x > y { + h := md5.New() // this will also be ignored + } + +} + +``` + +When a specific false positive has been identified and verified as safe, you may wish to suppress only that single rule (or a specific set of rules) +within a section of code, while continuing to scan for other problems. To do this, you can list the rule(s) to be suppressed within +the `#nosec` annotation, e.g: `/* #nosec G401 */` or `// #nosec G201 G202 G203` + +In some cases you may also want to revisit places where `#nosec` annotations +have been used. To run the scanner and ignore any `#nosec` annotations you +can do the following: + +```bash +gosec -nosec=true ./... +``` + +### Build tags + +gosec is able to pass your [Go build tags](https://golang.org/pkg/go/build/) to the analyzer. +They can be provided as a comma separated list as follows: + +```bash +gosec -tag debug,ignore ./... +``` + +### Output formats + +gosec currently supports `text`, `json`, `yaml`, `csv`, `sonarqube`, `JUnit XML`, `html` and `golint` output formats. By default +results will be reported to stdout, but can also be written to an output +file. The output format is controlled by the `-fmt` flag, and the output file is controlled by the `-out` flag as follows: + +```bash +# Write output in json format to results.json +$ gosec -fmt=json -out=results.json *.go +``` + +Results will be reported to stdout as well as to the provided output file by `-stdout` flag. The `-verbose` flag overrides the +output format when stdout the results while saving them in the output file +```bash +# Write output in json format to results.json as well as stdout +$ gosec -fmt=json -out=results.json -stdout *.go + +# Overrides the output format to 'text' when stdout the results, while writing it to results.json +$ gosec -fmt=json -out=results.json -stdout -verbose=text *.go +``` + +**Note:** gosec generates the [generic issue import format](https://docs.sonarqube.org/latest/analysis/generic-issue/) for SonarQube, and a report has to be imported into SonarQube using `sonar.externalIssuesReportPaths=path/to/gosec-report.json`. + +## Development + +### Build + +You can build the binary with: + +```bash +make +``` + +### Note on Sarif Types Generation + +Install the tool with : + +```bash +go get -u github.com/a-h/generate/cmd/schema-generate +``` + +Then generate the types with : + +```bash +schema-generate -i sarif-schema-2.1.0.json -o mypath/types.go +``` + +Most of the MarshallJSON/UnmarshalJSON are removed except the one for PropertyBag which is handy to inline the additionnal properties. The rest can be removed. +The URI,ID, UUID, GUID were renamed so it fits the Golang convention defined [here](https://github.com/golang/lint/blob/master/lint.go#L700) + +### Tests + +You can run all unit tests using: + +```bash +make test +``` + +### Release + +You can create a release by tagging the version as follows: + +``` bash +git tag v1.0.0 -m "Release version v1.0.0" +git push origin v1.0.0 +``` + +The GitHub [release workflow](.github/workflows/release.yml) triggers immediately after the tag is pushed upstream. This flow will +release the binaries using the [goreleaser](https://goreleaser.com/actions/) action and then it will build and publish the docker image into Docker Hub. + +### Docker image + +You can also build locally the docker image by using the command: + +```bash +make image +``` + +You can run the `gosec` tool in a container against your local Go project. You only have to mount the project +into a volume as follows: + +```bash +docker run --rm -it -w // -v /:/ securego/gosec //... +``` + +**Note:** the current working directory needs to be set with `-w` option in order to get successfully resolved the dependencies from go module file + +### Generate TLS rule + +The configuration of TLS rule can be generated from [Mozilla's TLS ciphers recommendation](https://statics.tls.security.mozilla.org/server-side-tls-conf.json). + +First you need to install the generator tool: + +```bash +go get github.com/securego/gosec/v2/cmd/tlsconfig/... +``` + +You can invoke now the `go generate` in the root of the project: + +```bash +go generate ./... +``` + +This will generate the `rules/tls_config.go` file which will contain the current ciphers recommendation from Mozilla. + +## Who is using gosec? + +This is a [list](USERS.md) with some of the gosec's users. diff --git a/vendor/github.com/securego/gosec/v2/USERS.md b/vendor/github.com/securego/gosec/v2/USERS.md new file mode 100644 index 000000000..73369ceec --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/USERS.md @@ -0,0 +1,28 @@ +# Users + +This is a list of gosec's users. Please send a pull request with your organisation or project name if you are using gosec. + +## Companies + +1. [Gitlab](https://docs.gitlab.com/ee/user/application_security/sast/) +2. [CloudBees](https://cloudbees.com) +3. [VMware](https://www.vmware.com) +4. [Codacy](https://support.codacy.com/hc/en-us/articles/213632009-Engines) +5. [Coinbase](https://github.com/coinbase/watchdog/blob/master/Makefile#L12) +6. [RedHat/OpenShift](https://github.com/openshift/openshift-azure) +7. [Guardalis](https://www.guardrails.io/) +8. [1Password](https://github.com/1Password/srp) +9. [PingCAP/tidb](https://github.com/pingcap/tidb) +10. [Checkmarx](https://www.checkmarx.com/) + +## Projects + +1. [golangci-lint](https://github.com/golangci/golangci-lint) +2. [Kubernetes](https://github.com/kubernetes/kubernetes) (via golangci) +3. [caddy](https://github.com/caddyserver/caddy) (via golangci) +4. [Jenkins X](https://github.com/jenkins-x/jx/blob/bdc51840a41b75776159c1c7b7faa1cf477be473/hack/linter.sh#L25) +5. [HuskyCI](https://huskyci.opensource.globo.com/) +6. [GolangCI](https://golangci.com/) +7. [semgrep.live](https://semgrep.live/) +8. [gofiber](https://github.com/gofiber/fiber) +9. [KICS](https://github.com/Checkmarx/kics) diff --git a/vendor/github.com/securego/gosec/v2/action.yml b/vendor/github.com/securego/gosec/v2/action.yml new file mode 100644 index 000000000..aab6c8039 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/action.yml @@ -0,0 +1,19 @@ +name: 'Gosec Security Checker' +description: 'Runs the gosec security checker' +author: '@ccojocar' + +inputs: + args: + description: 'Arguments for gosec' + required: true + default: '-h' + +runs: + using: 'docker' + image: 'docker://securego/gosec' + args: + - ${{ inputs.args }} + +branding: + icon: 'shield' + color: 'blue' diff --git a/vendor/github.com/securego/gosec/v2/analyzer.go b/vendor/github.com/securego/gosec/v2/analyzer.go new file mode 100644 index 000000000..f669d5ae0 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/analyzer.go @@ -0,0 +1,375 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package gosec holds the central scanning logic used by gosec security scanner +package gosec + +import ( + "fmt" + "go/ast" + "go/build" + "go/token" + "go/types" + "log" + "os" + "path" + "path/filepath" + "reflect" + "regexp" + "strconv" + "strings" + + "golang.org/x/tools/go/packages" +) + +// LoadMode controls the amount of details to return when loading the packages +const LoadMode = packages.NeedName | + packages.NeedFiles | + packages.NeedCompiledGoFiles | + packages.NeedImports | + packages.NeedTypes | + packages.NeedTypesSizes | + packages.NeedTypesInfo | + packages.NeedSyntax + +// The Context is populated with data parsed from the source code as it is scanned. +// It is passed through to all rule functions as they are called. Rules may use +// this data in conjunction withe the encountered AST node. +type Context struct { + FileSet *token.FileSet + Comments ast.CommentMap + Info *types.Info + Pkg *types.Package + PkgFiles []*ast.File + Root *ast.File + Config Config + Imports *ImportTracker + Ignores []map[string]bool + PassedValues map[string]interface{} +} + +// Metrics used when reporting information about a scanning run. +type Metrics struct { + NumFiles int `json:"files"` + NumLines int `json:"lines"` + NumNosec int `json:"nosec"` + NumFound int `json:"found"` +} + +// Analyzer object is the main object of gosec. It has methods traverse an AST +// and invoke the correct checking rules as on each node as required. +type Analyzer struct { + ignoreNosec bool + ruleset RuleSet + context *Context + config Config + logger *log.Logger + issues []*Issue + stats *Metrics + errors map[string][]Error // keys are file paths; values are the golang errors in those files + tests bool +} + +// NewAnalyzer builds a new analyzer. +func NewAnalyzer(conf Config, tests bool, logger *log.Logger) *Analyzer { + ignoreNoSec := false + if enabled, err := conf.IsGlobalEnabled(Nosec); err == nil { + ignoreNoSec = enabled + } + if logger == nil { + logger = log.New(os.Stderr, "[gosec]", log.LstdFlags) + } + return &Analyzer{ + ignoreNosec: ignoreNoSec, + ruleset: make(RuleSet), + context: &Context{}, + config: conf, + logger: logger, + issues: make([]*Issue, 0, 16), + stats: &Metrics{}, + errors: make(map[string][]Error), + tests: tests, + } +} + +// SetConfig upates the analyzer configuration +func (gosec *Analyzer) SetConfig(conf Config) { + gosec.config = conf +} + +// Config returns the current configuration +func (gosec *Analyzer) Config() Config { + return gosec.config +} + +// LoadRules instantiates all the rules to be used when analyzing source +// packages +func (gosec *Analyzer) LoadRules(ruleDefinitions map[string]RuleBuilder) { + for id, def := range ruleDefinitions { + r, nodes := def(id, gosec.config) + gosec.ruleset.Register(r, nodes...) + } +} + +// Process kicks off the analysis process for a given package +func (gosec *Analyzer) Process(buildTags []string, packagePaths ...string) error { + config := &packages.Config{ + Mode: LoadMode, + BuildFlags: buildTags, + Tests: gosec.tests, + } + + for _, pkgPath := range packagePaths { + pkgs, err := gosec.load(pkgPath, config) + if err != nil { + gosec.AppendError(pkgPath, err) + } + for _, pkg := range pkgs { + if pkg.Name != "" { + err := gosec.ParseErrors(pkg) + if err != nil { + return fmt.Errorf("parsing errors in pkg %q: %v", pkg.Name, err) + } + gosec.Check(pkg) + } + } + } + sortErrors(gosec.errors) + return nil +} + +func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages.Package, error) { + abspath, err := GetPkgAbsPath(pkgPath) + if err != nil { + gosec.logger.Printf("Skipping: %s. Path doesn't exist.", abspath) + return []*packages.Package{}, nil + } + + gosec.logger.Println("Import directory:", abspath) + // step 1/3 create build context. + buildD := build.Default + // step 2/3: add build tags to get env dependent files into basePackage. + buildD.BuildTags = conf.BuildFlags + basePackage, err := buildD.ImportDir(pkgPath, build.ImportComment) + if err != nil { + return []*packages.Package{}, fmt.Errorf("importing dir %q: %v", pkgPath, err) + } + + var packageFiles []string + for _, filename := range basePackage.GoFiles { + packageFiles = append(packageFiles, path.Join(pkgPath, filename)) + } + for _, filename := range basePackage.CgoFiles { + packageFiles = append(packageFiles, path.Join(pkgPath, filename)) + } + + if gosec.tests { + testsFiles := []string{} + testsFiles = append(testsFiles, basePackage.TestGoFiles...) + testsFiles = append(testsFiles, basePackage.XTestGoFiles...) + for _, filename := range testsFiles { + packageFiles = append(packageFiles, path.Join(pkgPath, filename)) + } + } + + // step 3/3 remove build tags from conf to proceed build correctly. + conf.BuildFlags = nil + pkgs, err := packages.Load(conf, packageFiles...) + if err != nil { + return []*packages.Package{}, fmt.Errorf("loading files from package %q: %v", pkgPath, err) + } + return pkgs, nil +} + +// Check runs analysis on the given package +func (gosec *Analyzer) Check(pkg *packages.Package) { + gosec.logger.Println("Checking package:", pkg.Name) + for _, file := range pkg.Syntax { + checkedFile := pkg.Fset.File(file.Pos()).Name() + // Skip the no-Go file from analysis (e.g. a Cgo files is expanded in 3 different files + // stored in the cache which do not need to by analyzed) + if filepath.Ext(checkedFile) != ".go" { + continue + } + gosec.logger.Println("Checking file:", checkedFile) + gosec.context.FileSet = pkg.Fset + gosec.context.Config = gosec.config + gosec.context.Comments = ast.NewCommentMap(gosec.context.FileSet, file, file.Comments) + gosec.context.Root = file + gosec.context.Info = pkg.TypesInfo + gosec.context.Pkg = pkg.Types + gosec.context.PkgFiles = pkg.Syntax + gosec.context.Imports = NewImportTracker() + gosec.context.Imports.TrackFile(file) + gosec.context.PassedValues = make(map[string]interface{}) + ast.Walk(gosec, file) + gosec.stats.NumFiles++ + gosec.stats.NumLines += pkg.Fset.File(file.Pos()).LineCount() + } +} + +// ParseErrors parses the errors from given package +func (gosec *Analyzer) ParseErrors(pkg *packages.Package) error { + if len(pkg.Errors) == 0 { + return nil + } + for _, pkgErr := range pkg.Errors { + parts := strings.Split(pkgErr.Pos, ":") + file := parts[0] + var err error + var line int + if len(parts) > 1 { + if line, err = strconv.Atoi(parts[1]); err != nil { + return fmt.Errorf("parsing line: %v", err) + } + } + var column int + if len(parts) > 2 { + if column, err = strconv.Atoi(parts[2]); err != nil { + return fmt.Errorf("parsing column: %v", err) + } + } + msg := strings.TrimSpace(pkgErr.Msg) + newErr := NewError(line, column, msg) + if errSlice, ok := gosec.errors[file]; ok { + gosec.errors[file] = append(errSlice, *newErr) + } else { + errSlice = []Error{} + gosec.errors[file] = append(errSlice, *newErr) + } + } + return nil +} + +// AppendError appends an error to the file errors +func (gosec *Analyzer) AppendError(file string, err error) { + // Do not report the error for empty packages (e.g. files excluded from build with a tag) + r := regexp.MustCompile(`no buildable Go source files in`) + if r.MatchString(err.Error()) { + return + } + errors := []Error{} + if ferrs, ok := gosec.errors[file]; ok { + errors = ferrs + } + ferr := NewError(0, 0, err.Error()) + errors = append(errors, *ferr) + gosec.errors[file] = errors +} + +// ignore a node (and sub-tree) if it is tagged with a nosec tag comment +func (gosec *Analyzer) ignore(n ast.Node) ([]string, bool) { + if groups, ok := gosec.context.Comments[n]; ok && !gosec.ignoreNosec { + + // Checks if an alternative for #nosec is set and, if not, uses the default. + noSecDefaultTag := "#nosec" + noSecAlternativeTag, err := gosec.config.GetGlobal(NoSecAlternative) + if err != nil { + noSecAlternativeTag = noSecDefaultTag + } + + for _, group := range groups { + + foundDefaultTag := strings.Contains(group.Text(), noSecDefaultTag) + foundAlternativeTag := strings.Contains(group.Text(), noSecAlternativeTag) + + if foundDefaultTag || foundAlternativeTag { + gosec.stats.NumNosec++ + + // Pull out the specific rules that are listed to be ignored. + re := regexp.MustCompile(`(G\d{3})`) + matches := re.FindAllStringSubmatch(group.Text(), -1) + + // If no specific rules were given, ignore everything. + if len(matches) == 0 { + return nil, true + } + + // Find the rule IDs to ignore. + var ignores []string + for _, v := range matches { + ignores = append(ignores, v[1]) + } + return ignores, false + } + } + } + return nil, false +} + +// Visit runs the gosec visitor logic over an AST created by parsing go code. +// Rule methods added with AddRule will be invoked as necessary. +func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor { + // If we've reached the end of this branch, pop off the ignores stack. + if n == nil { + if len(gosec.context.Ignores) > 0 { + gosec.context.Ignores = gosec.context.Ignores[1:] + } + return gosec + } + + // Get any new rule exclusions. + ignoredRules, ignoreAll := gosec.ignore(n) + if ignoreAll { + return nil + } + + // Now create the union of exclusions. + ignores := map[string]bool{} + if len(gosec.context.Ignores) > 0 { + for k, v := range gosec.context.Ignores[0] { + ignores[k] = v + } + } + + for _, v := range ignoredRules { + ignores[v] = true + } + + // Push the new set onto the stack. + gosec.context.Ignores = append([]map[string]bool{ignores}, gosec.context.Ignores...) + + // Track aliased and initialization imports + gosec.context.Imports.TrackImport(n) + + for _, rule := range gosec.ruleset.RegisteredFor(n) { + if _, ok := ignores[rule.ID()]; ok { + continue + } + issue, err := rule.Match(n, gosec.context) + if err != nil { + file, line := GetLocation(n, gosec.context) + file = path.Base(file) + gosec.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line) + } + if issue != nil { + gosec.issues = append(gosec.issues, issue) + gosec.stats.NumFound++ + } + } + return gosec +} + +// Report returns the current issues discovered and the metrics about the scan +func (gosec *Analyzer) Report() ([]*Issue, *Metrics, map[string][]Error) { + return gosec.issues, gosec.stats, gosec.errors +} + +// Reset clears state such as context, issues and metrics from the configured analyzer +func (gosec *Analyzer) Reset() { + gosec.context = &Context{} + gosec.issues = make([]*Issue, 0, 16) + gosec.stats = &Metrics{} + gosec.ruleset = NewRuleSet() +} diff --git a/vendor/github.com/securego/gosec/v2/call_list.go b/vendor/github.com/securego/gosec/v2/call_list.go new file mode 100644 index 000000000..4b3fcf057 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/call_list.go @@ -0,0 +1,109 @@ +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gosec + +import ( + "go/ast" + "strings" +) + +const vendorPath = "vendor/" + +type set map[string]bool + +// CallList is used to check for usage of specific packages +// and functions. +type CallList map[string]set + +// NewCallList creates a new empty CallList +func NewCallList() CallList { + return make(CallList) +} + +// AddAll will add several calls to the call list at once +func (c CallList) AddAll(selector string, idents ...string) { + for _, ident := range idents { + c.Add(selector, ident) + } +} + +// Add a selector and call to the call list +func (c CallList) Add(selector, ident string) { + if _, ok := c[selector]; !ok { + c[selector] = make(set) + } + c[selector][ident] = true +} + +// Contains returns true if the package and function are +/// members of this call list. +func (c CallList) Contains(selector, ident string) bool { + if idents, ok := c[selector]; ok { + _, found := idents[ident] + return found + } + return false +} + +// ContainsPointer returns true if a pointer to the selector type or the type +// itself is a members of this call list. +func (c CallList) ContainsPointer(selector, indent string) bool { + if strings.HasPrefix(selector, "*") { + if c.Contains(selector, indent) { + return true + } + s := strings.TrimPrefix(selector, "*") + return c.Contains(s, indent) + } + return false +} + +// ContainsPkgCallExpr resolves the call expression name and type, and then further looks +// up the package path for that type. Finally, it determines if the call exists within the call list +func (c CallList) ContainsPkgCallExpr(n ast.Node, ctx *Context, stripVendor bool) *ast.CallExpr { + selector, ident, err := GetCallInfo(n, ctx) + if err != nil { + return nil + } + + // Use only explicit path (optionally strip vendor path prefix) to reduce conflicts + path, ok := GetImportPath(selector, ctx) + if !ok { + return nil + } + if stripVendor { + if vendorIdx := strings.Index(path, vendorPath); vendorIdx >= 0 { + path = path[vendorIdx+len(vendorPath):] + } + } + if !c.Contains(path, ident) { + return nil + } + + return n.(*ast.CallExpr) +} + +// ContainsCallExpr resolves the call expression name and type, and then determines +// if the call exists with the call list +func (c CallList) ContainsCallExpr(n ast.Node, ctx *Context) *ast.CallExpr { + selector, ident, err := GetCallInfo(n, ctx) + if err != nil { + return nil + } + if !c.Contains(selector, ident) && !c.ContainsPointer(selector, ident) { + return nil + } + + return n.(*ast.CallExpr) +} diff --git a/vendor/github.com/securego/gosec/v2/config.go b/vendor/github.com/securego/gosec/v2/config.go new file mode 100644 index 000000000..4af62b295 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/config.go @@ -0,0 +1,125 @@ +package gosec + +import ( + "bytes" + "encoding/json" + "fmt" + "io" + "io/ioutil" +) + +const ( + // Globals are applicable to all rules and used for general + // configuration settings for gosec. + Globals = "global" +) + +// GlobalOption defines the name of the global options +type GlobalOption string + +const ( + // Nosec global option for #nosec directive + Nosec GlobalOption = "nosec" + // Audit global option which indicates that gosec runs in audit mode + Audit GlobalOption = "audit" + // NoSecAlternative global option alternative for #nosec directive + NoSecAlternative GlobalOption = "#nosec" +) + +// Config is used to provide configuration and customization to each of the rules. +type Config map[string]interface{} + +// NewConfig initializes a new configuration instance. The configuration data then +// needs to be loaded via c.ReadFrom(strings.NewReader("config data")) +// or from a *os.File. +func NewConfig() Config { + cfg := make(Config) + cfg[Globals] = make(map[GlobalOption]string) + return cfg +} + +func (c Config) keyToGlobalOptions(key string) GlobalOption { + return GlobalOption(key) +} + +func (c Config) convertGlobals() { + if globals, ok := c[Globals]; ok { + if settings, ok := globals.(map[string]interface{}); ok { + validGlobals := map[GlobalOption]string{} + for k, v := range settings { + validGlobals[c.keyToGlobalOptions(k)] = fmt.Sprintf("%v", v) + } + c[Globals] = validGlobals + } + } +} + +// ReadFrom implements the io.ReaderFrom interface. This +// should be used with io.Reader to load configuration from +// file or from string etc. +func (c Config) ReadFrom(r io.Reader) (int64, error) { + data, err := ioutil.ReadAll(r) + if err != nil { + return int64(len(data)), err + } + if err = json.Unmarshal(data, &c); err != nil { + return int64(len(data)), err + } + c.convertGlobals() + return int64(len(data)), nil +} + +// WriteTo implements the io.WriteTo interface. This should +// be used to save or print out the configuration information. +func (c Config) WriteTo(w io.Writer) (int64, error) { + data, err := json.Marshal(c) + if err != nil { + return int64(len(data)), err + } + return io.Copy(w, bytes.NewReader(data)) +} + +// Get returns the configuration section for the supplied key +func (c Config) Get(section string) (interface{}, error) { + settings, found := c[section] + if !found { + return nil, fmt.Errorf("Section %s not in configuration", section) + } + return settings, nil +} + +// Set section in the configuration to specified value +func (c Config) Set(section string, value interface{}) { + c[section] = value +} + +// GetGlobal returns value associated with global configuration option +func (c Config) GetGlobal(option GlobalOption) (string, error) { + if globals, ok := c[Globals]; ok { + if settings, ok := globals.(map[GlobalOption]string); ok { + if value, ok := settings[option]; ok { + return value, nil + } + return "", fmt.Errorf("global setting for %s not found", option) + } + } + return "", fmt.Errorf("no global config options found") +} + +// SetGlobal associates a value with a global configuration option +func (c Config) SetGlobal(option GlobalOption, value string) { + if globals, ok := c[Globals]; ok { + if settings, ok := globals.(map[GlobalOption]string); ok { + settings[option] = value + } + } +} + +// IsGlobalEnabled checks if a global option is enabled +func (c Config) IsGlobalEnabled(option GlobalOption) (bool, error) { + value, err := c.GetGlobal(option) + if err != nil { + return false, err + } + return (value == "true" || value == "enabled"), nil +} diff --git a/vendor/github.com/securego/gosec/v2/cwe/data.go b/vendor/github.com/securego/gosec/v2/cwe/data.go new file mode 100644 index 000000000..8789ddd63 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/cwe/data.go @@ -0,0 +1,143 @@ +package cwe + +import "fmt" + +const ( + // Acronym is the acronym of CWE + Acronym = "CWE" + // Version the CWE version + Version = "4.4" + // ReleaseDateUtc the release Date of CWE Version + ReleaseDateUtc = "2021-03-15" + // Organization MITRE + Organization = "MITRE" + // Description the description of CWE + Description = "The MITRE Common Weakness Enumeration" +) + +var ( + // InformationURI link to the published CWE PDF + InformationURI = fmt.Sprintf("https://cwe.mitre.org/data/published/cwe_v%s.pdf/", Version) + // DownloadURI link to the zipped XML of the CWE list + DownloadURI = fmt.Sprintf("https://cwe.mitre.org/data/xml/cwec_v%s.xml.zip", Version) + + data = map[string]*Weakness{} + + weaknesses = []*Weakness{ + { + ID: "118", + Description: "The software does not restrict or incorrectly restricts operations within the boundaries of a resource that is accessed using an index or pointer, such as memory or files.", + Name: "Incorrect Access of Indexable Resource ('Range Error')", + }, + { + ID: "190", + Description: "The software performs a calculation that can produce an integer overflow or wraparound, when the logic assumes that the resulting value will always be larger than the original value. This can introduce other weaknesses when the calculation is used for resource management or execution control.", + Name: "Integer Overflow or Wraparound", + }, + { + ID: "200", + Description: "The product exposes sensitive information to an actor that is not explicitly authorized to have access to that information.", + Name: "Exposure of Sensitive Information to an Unauthorized Actor", + }, + { + ID: "22", + Description: "The software uses external input to construct a pathname that is intended to identify a file or directory that is located underneath a restricted parent directory, but the software does not properly neutralize special elements within the pathname that can cause the pathname to resolve to a location that is outside of the restricted directory.", + Name: "Improper Limitation of a Pathname to a Restricted Directory ('Path Traversal')", + }, + { + ID: "242", + Description: "The program calls a function that can never be guaranteed to work safely.", + Name: "Use of Inherently Dangerous Function", + }, + { + ID: "276", + Description: "During installation, installed file permissions are set to allow anyone to modify those files.", + Name: "Incorrect Default Permissions", + }, + { + ID: "295", + Description: "The software does not validate, or incorrectly validates, a certificate.", + Name: "Improper Certificate Validation", + }, + { + ID: "310", + Description: "Weaknesses in this category are related to the design and implementation of data confidentiality and integrity. Frequently these deal with the use of encoding techniques, encryption libraries, and hashing algorithms. The weaknesses in this category could lead to a degradation of the quality data if they are not addressed.", + Name: "Cryptographic Issues", + }, + { + ID: "322", + Description: "The software performs a key exchange with an actor without verifying the identity of that actor.", + Name: "Key Exchange without Entity Authentication", + }, + { + ID: "326", + Description: "The software stores or transmits sensitive data using an encryption scheme that is theoretically sound, but is not strong enough for the level of protection required.", + Name: "Inadequate Encryption Strength", + }, + { + ID: "327", + Description: "The use of a broken or risky cryptographic algorithm is an unnecessary risk that may result in the exposure of sensitive information.", + Name: "Use of a Broken or Risky Cryptographic Algorithm", + }, + { + ID: "338", + Description: "The product uses a Pseudo-Random Number Generator (PRNG) in a security context, but the PRNG's algorithm is not cryptographically strong.", + Name: "Use of Cryptographically Weak Pseudo-Random Number Generator (PRNG)", + }, + { + ID: "377", + Description: "Creating and using insecure temporary files can leave application and system data vulnerable to attack.", + Name: "Insecure Temporary File", + }, + { + ID: "409", + Description: "The software does not handle or incorrectly handles a compressed input with a very high compression ratio that produces a large output.", + Name: "Improper Handling of Highly Compressed Data (Data Amplification)", + }, + { + ID: "703", + Description: "The software does not properly anticipate or handle exceptional conditions that rarely occur during normal operation of the software.", + Name: "Improper Check or Handling of Exceptional Conditions", + }, + { + ID: "78", + Description: "The software constructs all or part of an OS command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended OS command when it is sent to a downstream component.", + Name: "Improper Neutralization of Special Elements used in an OS Command ('OS Command Injection')", + }, + { + ID: "79", + Description: "The software does not neutralize or incorrectly neutralizes user-controllable input before it is placed in output that is used as a web page that is served to other users.", + Name: "Improper Neutralization of Input During Web Page Generation ('Cross-site Scripting')", + }, + { + ID: "798", + Description: "The software contains hard-coded credentials, such as a password or cryptographic key, which it uses for its own inbound authentication, outbound communication to external components, or encryption of internal data.", + Name: "Use of Hard-coded Credentials", + }, + { + ID: "88", + Description: "The software constructs a string for a command to executed by a separate component\nin another control sphere, but it does not properly delimit the\nintended arguments, options, or switches within that command string.", + Name: "Improper Neutralization of Argument Delimiters in a Command ('Argument Injection')", + }, + { + ID: "89", + Description: "The software constructs all or part of an SQL command using externally-influenced input from an upstream component, but it does not neutralize or incorrectly neutralizes special elements that could modify the intended SQL command when it is sent to a downstream component.", + Name: "Improper Neutralization of Special Elements used in an SQL Command ('SQL Injection')", + }, + } +) + +func init() { + for _, weakness := range weaknesses { + data[weakness.ID] = weakness + } +} + +// Get Retrieves a CWE weakness by it's id +func Get(id string) *Weakness { + weakness, ok := data[id] + if ok && weakness != nil { + return weakness + } + return nil +} diff --git a/vendor/github.com/securego/gosec/v2/cwe/types.go b/vendor/github.com/securego/gosec/v2/cwe/types.go new file mode 100644 index 000000000..a14ccb53a --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/cwe/types.go @@ -0,0 +1,34 @@ +package cwe + +import ( + "encoding/json" + "fmt" +) + +// Weakness defines a CWE weakness based on http://cwe.mitre.org/data/xsd/cwe_schema_v6.4.xsd +type Weakness struct { + ID string + Name string + Description string +} + +// SprintURL format the CWE URL +func (w *Weakness) SprintURL() string { + return fmt.Sprintf("https://cwe.mitre.org/data/definitions/%s.html", w.ID) +} + +// SprintID format the CWE ID +func (w *Weakness) SprintID() string { + return fmt.Sprintf("%s-%s", Acronym, w.ID) +} + +// MarshalJSON print only id and URL +func (w *Weakness) MarshalJSON() ([]byte, error) { + return json.Marshal(&struct { + ID string `json:"id"` + URL string `json:"url"` + }{ + ID: w.ID, + URL: w.SprintURL(), + }) +} diff --git a/vendor/github.com/securego/gosec/v2/entrypoint.sh b/vendor/github.com/securego/gosec/v2/entrypoint.sh new file mode 100644 index 000000000..4dc046729 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/entrypoint.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +# Expand the arguments into an array of strings. This is requires because the GitHub action +# provides all arguments concatenated as a single string. +ARGS=("$@") + +/bin/gosec ${ARGS[*]} diff --git a/vendor/github.com/securego/gosec/v2/errors.go b/vendor/github.com/securego/gosec/v2/errors.go new file mode 100644 index 000000000..a27aa5821 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/errors.go @@ -0,0 +1,33 @@ +package gosec + +import ( + "sort" +) + +// Error is used when there are golang errors while parsing the AST +type Error struct { + Line int `json:"line"` + Column int `json:"column"` + Err string `json:"error"` +} + +// NewError creates Error object +func NewError(line, column int, err string) *Error { + return &Error{ + Line: line, + Column: column, + Err: err, + } +} + +// sortErros sorts the golang erros by line +func sortErrors(allErrors map[string][]Error) { + for _, errors := range allErrors { + sort.Slice(errors, func(i, j int) bool { + if errors[i].Line == errors[j].Line { + return errors[i].Column <= errors[j].Column + } + return errors[i].Line < errors[j].Line + }) + } +} diff --git a/vendor/github.com/securego/gosec/v2/go.mod b/vendor/github.com/securego/gosec/v2/go.mod new file mode 100644 index 000000000..1d08b1c74 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/go.mod @@ -0,0 +1,18 @@ +module github.com/securego/gosec/v2 + +require ( + github.com/google/uuid v1.2.0 + github.com/gookit/color v1.4.2 + github.com/lib/pq v1.10.2 + github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5 + github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 + github.com/onsi/ginkgo v1.16.4 + github.com/onsi/gomega v1.13.0 + golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a + golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 + golang.org/x/text v0.3.6 + golang.org/x/tools v0.1.3 + gopkg.in/yaml.v2 v2.4.0 +) + +go 1.16 diff --git a/vendor/github.com/securego/gosec/v2/go.sum b/vendor/github.com/securego/gosec/v2/go.sum new file mode 100644 index 000000000..bdf02fb39 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/go.sum @@ -0,0 +1,702 @@ +bitbucket.org/creachadair/shell v0.0.6/go.mod h1:8Qqi/cYk7vPnsOePHroKXDJYmb5x7ENhtiFtfZq8K+M= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.60.0/go.mod h1:yw2G51M9IfRboUH61Us8GqCeF1PzPblB823Mn2q2eAU= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/pubsub v1.5.0/go.mod h1:ZEwJccE3z93Z2HWvstpri00jOg7oO4UZDtKhwDwqF0w= +cloud.google.com/go/spanner v1.7.0/go.mod h1:sd3K2gZ9Fd0vMPLXzeCrF6fq4i63Q7aTLW/lBIfBkIk= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +contrib.go.opencensus.io/exporter/stackdriver v0.13.4/go.mod h1:aXENhDJ1Y4lIg4EUaVTwzvYETVNZk10Pu26tevFKLUc= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/Masterminds/goutils v1.1.0/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= +github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= +github.com/Masterminds/sprig v2.15.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= +github.com/Masterminds/sprig v2.22.0+incompatible/go.mod h1:y6hNFY5UBTIWBxnzTeuNhlNS5hqE0NB0E6fgfo2Br3o= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/antihax/optional v0.0.0-20180407024304-ca021399b1a6/go.mod h1:V8iCPQYkqmusNa815XgQio277wI47sdRh1dUOLdyC6Q= +github.com/aokoli/goutils v1.0.1/go.mod h1:SijmP0QR8LtwsmDs8Yii5Z/S4trXFGFC2oO5g9DP+DQ= +github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/aws/aws-sdk-go v1.23.20/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.25.37/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.36.30/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cockroachdb/datadriven v0.0.0-20190809214429-80d97fb3cbaa/go.mod h1:zn76sxSg3SzpJ0PPJaLDCu+Bu0Lg3sKTORVIj19EIF8= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20180511133405-39ca1b05acc7/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/go-systemd v0.0.0-20190620071333-e64a0ec8b42a/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= +github.com/davecgh/go-spew v0.0.0-20161028175848-04cdfd42973b/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/protoc-gen-validate v0.0.14/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fatih/color v1.10.0/go.mod h1:ELkj/draVOlAH/xkhN6mQ50Qd0MPOk5AAr3maGEBuJM= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/fullstorydev/grpcurl v1.6.0/go.mod h1:ZQ+ayqbKMJNhzLmbpCiurTVlaK2M/3nqZCxaQ2Ze/sM= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-redis/redis v6.15.8+incompatible/go.mod h1:NAIEuMOZ/fxfXJIrKDQDz8wamY7mA7PouImQ2Jvg6kA= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= +github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20160516000752-02826c3e7903/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/certificate-transparency-go v1.0.21/go.mod h1:QeJfpSbVSfYc7RgB3gJFj9cbuQMMchQxrWXz8Ruopmg= +github.com/google/certificate-transparency-go v1.1.1/go.mod h1:FDKqPvSXawb2ecErVRrD+nfy23RCzyl7eqVCEmlT1Zs= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200507031123-427632fa3b1c/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/trillian v1.3.11/go.mod h1:0tPraVHrSDkA3BO6vKX67zgLXs6SsOAbHEivX+9mPgw= +github.com/google/uuid v0.0.0-20161128191214-064e2069ce9c/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs= +github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gookit/color v1.4.2 h1:tXy44JFSFkKnELV6WaMo/lLfu/meqITX3iAV52do7lk= +github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQHCoQ= +github.com/gordonklaus/ineffassign v0.0.0-20200309095847-7953dde2c7bf/go.mod h1:cuNKsD1zp2v6XfE/orVX2QE1LC+i254ceGcVeDT3pTU= +github.com/gorhill/cronexpr v0.0.0-20180427100037-88b0669f7d75/go.mod h1:g2644b03hfBX9Ov0ZBDgXXens4rxSxmqFBbhvKv2yVA= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.1-0.20190118093823-f849b5445de4/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-middleware v1.2.2/go.mod h1:EaizFBKfUKtMIF5iaDEhniwNedqGo9FuLFzppDr3uwI= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.9.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/grpc-ecosystem/grpc-gateway v1.12.1/go.mod h1:8XEsbTttt/W+VvjtQhLACqCisSPWTxCZ7sBRjU6iH9c= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/huandu/xstrings v1.0.0/go.mod h1:4qWG/gcEcfX4z/mBDHJ++3ReCw9ibxbsNJbcucJdbSo= +github.com/huandu/xstrings v1.2.0/go.mod h1:DvyZB1rfVYsBIigL8HwpZgxHwXozlTgGqn63UyNX5k4= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/imdario/mergo v0.3.4/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/imdario/mergo v0.3.8/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJh5FfA= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/jhump/protoreflect v1.6.1/go.mod h1:RZQ/lnuN+zqeRVpQigTwO6o0AJUkxbnSnpuG7toUTG4= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= +github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/jonboulle/clockwork v0.2.0/go.mod h1:Pkfl5aHPm1nk2H9h0bjmnJD/BcgbGXUBGnn1kMkgxc8= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/juju/ratelimit v1.0.1/go.mod h1:qapgC/Gy+xNh9UxzV13HGGl/6UXNN+ct+vwSgWNm/qk= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= +github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/letsencrypt/pkcs11key/v4 v4.0.0/go.mod h1:EFUvBDay26dErnNb70Nd0/VW3tJiIbETBPTl9ATXQag= +github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.9.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/lib/pq v1.10.2 h1:AqzbZs4ZoCBp+GtejcpCpcxM3zlSMx29dXbUSeVtJb8= +github.com/lib/pq v1.10.2/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= +github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.6/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.1.35/go.mod h1:KNUDUusw/aVsxyTYZM1oqvCicbwhgbNgztCETuNZ7xM= +github.com/miekg/pkcs11 v1.0.2/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/miekg/pkcs11 v1.0.3/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/mitchellh/reflectwalk v1.0.1/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826/go.mod h1:TaXosZuwdSHYgviHp1DAtfrULt5eUgsSMsZf+YrPgl8= +github.com/mozilla/scribe v0.0.0-20180711195314-fb71baf557c1/go.mod h1:FIczTrinKo8VaLxe6PWTPEXRXDIHz2QAwiaBaP5/4a8= +github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5 h1:0KqC6/sLy7fDpBdybhVkkv4Yz+PmB7c9Dz9z3dLW804= +github.com/mozilla/tls-observatory v0.0.0-20210609171429-7bc42856d2e5/go.mod h1:FUqVoUPHSEdDR0MnFM3Dh8AU0pZHLXUD127SAJGER/s= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-proto-validators v0.0.0-20180403085117-0950a7990007/go.mod h1:m2XC9Qq0AlmmVksL6FktJCdTYyLk7V3fKyp0sl1yWQo= +github.com/mwitkow/go-proto-validators v0.2.0/go.mod h1:ZfA1hW+UH/2ZHOWvQ3HnQaU0DtnpXu850MZiy+YUgcc= +github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354 h1:4kuARK6Y6FxaNu/BnU2OAaLF86eTVhP2hjTB6iMvItA= +github.com/nbutton23/zxcvbn-go v0.0.0-20210217022336-fa2cb2858354/go.mod h1:KSVJerMDfblTH7p5MZaTt+8zaT2iEk3AkVb9PQdZuE8= +github.com/nishanths/predeclared v0.0.0-20190419143655-18a43bb90ffc/go.mod h1:62PewwiQTlm/7Rj+cxVYqZvDIUc+JjZq6GHAC1fsObQ= +github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= +github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= +github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= +github.com/olekukonko/tablewriter v0.0.0-20170122224234-a0225b3f23b5/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/olekukonko/tablewriter v0.0.2/go.mod h1:rSAaSIOAGT9odnlyGlUfAJaoc5w2fSBUmeGDbRWPxyQ= +github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= +github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= +github.com/onsi/ginkgo v1.16.2/go.mod h1:CObGmKUOKaSC0RjmoAK7tKyn4Azo5P2IWuoMnvwxz1E= +github.com/onsi/ginkgo v1.16.4 h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc= +github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= +github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= +github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= +github.com/onsi/gomega v1.13.0 h1:7lLHu94wT9Ij0o6EWWclhu0aOh32VxhkwEJvzuWPeak= +github.com/onsi/gomega v1.13.0/go.mod h1:lRk9szgn8TxENtWd0Tp4c3wjlRfMTMH27I+3Je41yGY= +github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= +github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v0.0.0-20151028094244-d8ed2627bdf0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/pseudomuto/protoc-gen-doc v1.3.2/go.mod h1:y5+P6n3iGrbKG+9O04V5ld71in3v/bX88wUwgt+U8EA= +github.com/pseudomuto/protokit v0.2.0/go.mod h1:2PdH30hxVHsup8KpBTOXTBeMVhJZVio3Q8ViKSAXT0Q= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= +github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= +github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= +github.com/spf13/pflag v1.0.1/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v0.0.0-20170130113145-4d4bfba8f1d1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.1.4/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/tmc/grpc-websocket-proxy v0.0.0-20170815181823-89b8d40f7ca8/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tmc/grpc-websocket-proxy v0.0.0-20200427203606-3cfed13b9966/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/tomasen/realip v0.0.0-20180522021738-f0c99a92ddce/go.mod h1:o8v6yHRoik09Xen7gje4m9ERNah1d1PPsVq1VEx9vE4= +github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= +github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijbERA= +github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/viki-org/dnscache v0.0.0-20130720023526-c70c1f23c5d8/go.mod h1:dniwbG03GafCjFohMDmz6Zc6oCuiqgH6tGNyXTkHzXE= +github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778 h1:QldyIu/L63oPpyvQmHgvgickp1Yw510KJOqX7H24mg8= +github.com/xo/terminfo v0.0.0-20210125001918-ca9a967f8778/go.mod h1:2MuV+tbUrU1zIOPMxZ5EncGwgmMJsa+9ucAQZXxsObs= +github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= +github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= +github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= +github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU= +go.etcd.io/bbolt v1.3.4/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ= +go.etcd.io/etcd v0.0.0-20200513171258-e048e166ab9c/go.mod h1:xCI7ZzBfRuGgBXyXO6yfWfDmlWd35khcWpUa4L0xI/k= +go.mozilla.org/mozlog v0.0.0-20170222151521-4bb13139d403/go.mod h1:jHoPAGnDrCy6kaI2tAze5Prf0Nr0w/oNkROt2lw3n3o= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/atomic v1.5.0/go.mod h1:sABNBOSYdrvTF6hTgEIbc7YasKWGhgEQZyfxyTvoXHQ= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= +go.uber.org/tools v0.0.0-20190618225709-2cfd321de3ee/go.mod h1:vJERXedbb3MVM5f9Ejo0C68/HhF8uaILCdgjnY+goOA= +go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +go.uber.org/zap v1.13.0/go.mod h1:zwrFLgMcdUuIBviXEYEH1YKNaOBnKXsx2IPda5bBwHM= +golang.org/x/crypto v0.0.0-20180501155221-613d6eafa307/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a h1:kr2P4QFmQr29mSLA43kwrOcgcReGTfbE9N577tCTuBc= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/exp v0.0.0-20200331195152-e8c3332aa8e5/go.mod h1:4M0jN8W1tt0AVLNr8HDosyJCDCDuyL9N9+3m7wDWgKw= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191002035440-2ec189313ef0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200421231249-e086a090c8fd/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781 h1:DzZ89McO9/gWPsQXS/FVKAlG02ZjaQ6AlZRBimEYOd0= +golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200420163511-1957bb5e6d1f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20200416051211-89c76fbcd5d1/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191010075000-0337d82405ff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216052735-49a3e744a425/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200426102838-f3a5411a4c3b/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200626171337-aa94e735be7f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200630154851-b2d8b0336632/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200706234117-b22de6825cf7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.3 h1:L69ShwSZEyCsLKoAxDKeMvLDZkumEe8gXUZAjab0tX8= +golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.10.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.2/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20181107211654-5fc9ac540362/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20190927181202-20e1ac93f88c/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200423170343-7949de9c1215/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200626011028-ee7919e894b5/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200707001353-8e8330bf89df/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.24.0/go.mod h1:XDChyiUovWa60DnaeDeZmSW86xtLtjtZbwvSiRnRtcA= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.0/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/cheggaaa/pb.v1 v1.0.28/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= +gopkg.in/gcfg.v1 v1.2.3/go.mod h1:yesOnuUOFQAhST5vPY4nbZsb/huCgGGXlipJsBn0b3o= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= +gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.6/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= +sigs.k8s.io/yaml v1.1.0/go.mod h1:UJmg0vDUVViEyp3mgSv9WPwZCDxu4rQW1olrI1uml+o= diff --git a/vendor/github.com/securego/gosec/v2/helpers.go b/vendor/github.com/securego/gosec/v2/helpers.go new file mode 100644 index 000000000..50ce19859 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/helpers.go @@ -0,0 +1,451 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gosec + +import ( + "errors" + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "os/user" + "path/filepath" + "regexp" + "runtime" + "strconv" + "strings" +) + +// MatchCallByPackage ensures that the specified package is imported, +// adjusts the name for any aliases and ignores cases that are +// initialization only imports. +// +// Usage: +// node, matched := MatchCallByPackage(n, ctx, "math/rand", "Read") +// +func MatchCallByPackage(n ast.Node, c *Context, pkg string, names ...string) (*ast.CallExpr, bool) { + importedName, found := GetImportedName(pkg, c) + if !found { + return nil, false + } + + if callExpr, ok := n.(*ast.CallExpr); ok { + packageName, callName, err := GetCallInfo(callExpr, c) + if err != nil { + return nil, false + } + if packageName == importedName { + for _, name := range names { + if callName == name { + return callExpr, true + } + } + } + } + return nil, false +} + +// MatchCompLit will match an ast.CompositeLit based on the supplied type +func MatchCompLit(n ast.Node, ctx *Context, required string) *ast.CompositeLit { + if complit, ok := n.(*ast.CompositeLit); ok { + typeOf := ctx.Info.TypeOf(complit) + if typeOf.String() == required { + return complit + } + } + return nil +} + +// GetInt will read and return an integer value from an ast.BasicLit +func GetInt(n ast.Node) (int64, error) { + if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.INT { + return strconv.ParseInt(node.Value, 0, 64) + } + return 0, fmt.Errorf("Unexpected AST node type: %T", n) +} + +// GetFloat will read and return a float value from an ast.BasicLit +func GetFloat(n ast.Node) (float64, error) { + if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.FLOAT { + return strconv.ParseFloat(node.Value, 64) + } + return 0.0, fmt.Errorf("Unexpected AST node type: %T", n) +} + +// GetChar will read and return a char value from an ast.BasicLit +func GetChar(n ast.Node) (byte, error) { + if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.CHAR { + return node.Value[0], nil + } + return 0, fmt.Errorf("Unexpected AST node type: %T", n) +} + +// GetString will read and return a string value from an ast.BasicLit +func GetString(n ast.Node) (string, error) { + if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.STRING { + return strconv.Unquote(node.Value) + } + return "", fmt.Errorf("Unexpected AST node type: %T", n) +} + +// GetCallObject returns the object and call expression and associated +// object for a given AST node. nil, nil will be returned if the +// object cannot be resolved. +func GetCallObject(n ast.Node, ctx *Context) (*ast.CallExpr, types.Object) { + switch node := n.(type) { + case *ast.CallExpr: + switch fn := node.Fun.(type) { + case *ast.Ident: + return node, ctx.Info.Uses[fn] + case *ast.SelectorExpr: + return node, ctx.Info.Uses[fn.Sel] + } + } + return nil, nil +} + +// GetCallInfo returns the package or type and name associated with a +// call expression. +func GetCallInfo(n ast.Node, ctx *Context) (string, string, error) { + switch node := n.(type) { + case *ast.CallExpr: + switch fn := node.Fun.(type) { + case *ast.SelectorExpr: + switch expr := fn.X.(type) { + case *ast.Ident: + if expr.Obj != nil && expr.Obj.Kind == ast.Var { + t := ctx.Info.TypeOf(expr) + if t != nil { + return t.String(), fn.Sel.Name, nil + } + return "undefined", fn.Sel.Name, fmt.Errorf("missing type info") + } + return expr.Name, fn.Sel.Name, nil + case *ast.SelectorExpr: + if expr.Sel != nil { + t := ctx.Info.TypeOf(expr.Sel) + if t != nil { + return t.String(), fn.Sel.Name, nil + } + return "undefined", fn.Sel.Name, fmt.Errorf("missing type info") + } + case *ast.CallExpr: + switch call := expr.Fun.(type) { + case *ast.Ident: + if call.Name == "new" { + t := ctx.Info.TypeOf(expr.Args[0]) + if t != nil { + return t.String(), fn.Sel.Name, nil + } + return "undefined", fn.Sel.Name, fmt.Errorf("missing type info") + } + if call.Obj != nil { + switch decl := call.Obj.Decl.(type) { + case *ast.FuncDecl: + ret := decl.Type.Results + if ret != nil && len(ret.List) > 0 { + ret1 := ret.List[0] + if ret1 != nil { + t := ctx.Info.TypeOf(ret1.Type) + if t != nil { + return t.String(), fn.Sel.Name, nil + } + return "undefined", fn.Sel.Name, fmt.Errorf("missing type info") + } + } + } + } + } + } + case *ast.Ident: + return ctx.Pkg.Name(), fn.Name, nil + } + } + + return "", "", fmt.Errorf("unable to determine call info") +} + +// GetCallStringArgsValues returns the values of strings arguments if they can be resolved +func GetCallStringArgsValues(n ast.Node, ctx *Context) []string { + values := []string{} + switch node := n.(type) { + case *ast.CallExpr: + for _, arg := range node.Args { + switch param := arg.(type) { + case *ast.BasicLit: + value, err := GetString(param) + if err == nil { + values = append(values, value) + } + case *ast.Ident: + values = append(values, GetIdentStringValues(param)...) + } + } + } + return values +} + +// GetIdentStringValues return the string values of an Ident if they can be resolved +func GetIdentStringValues(ident *ast.Ident) []string { + values := []string{} + obj := ident.Obj + if obj != nil { + switch decl := obj.Decl.(type) { + case *ast.ValueSpec: + for _, v := range decl.Values { + value, err := GetString(v) + if err == nil { + values = append(values, value) + } + } + case *ast.AssignStmt: + for _, v := range decl.Rhs { + value, err := GetString(v) + if err == nil { + values = append(values, value) + } + } + } + } + return values +} + +// GetBinaryExprOperands returns all operands of a binary expression by traversing +// the expression tree +func GetBinaryExprOperands(be *ast.BinaryExpr) []ast.Node { + var traverse func(be *ast.BinaryExpr) + result := []ast.Node{} + traverse = func(be *ast.BinaryExpr) { + if lhs, ok := be.X.(*ast.BinaryExpr); ok { + traverse(lhs) + } else { + result = append(result, be.X) + } + if rhs, ok := be.Y.(*ast.BinaryExpr); ok { + traverse(rhs) + } else { + result = append(result, be.Y) + } + } + traverse(be) + return result +} + +// GetImportedName returns the name used for the package within the +// code. It will resolve aliases and ignores initialization only imports. +func GetImportedName(path string, ctx *Context) (string, bool) { + importName, imported := ctx.Imports.Imported[path] + if !imported { + return "", false + } + + if _, initonly := ctx.Imports.InitOnly[path]; initonly { + return "", false + } + + if alias, ok := ctx.Imports.Aliased[path]; ok { + importName = alias + } + return importName, true +} + +// GetImportPath resolves the full import path of an identifier based on +// the imports in the current context. +func GetImportPath(name string, ctx *Context) (string, bool) { + for path := range ctx.Imports.Imported { + if imported, ok := GetImportedName(path, ctx); ok && imported == name { + return path, true + } + } + return "", false +} + +// GetLocation returns the filename and line number of an ast.Node +func GetLocation(n ast.Node, ctx *Context) (string, int) { + fobj := ctx.FileSet.File(n.Pos()) + return fobj.Name(), fobj.Line(n.Pos()) +} + +// Gopath returns all GOPATHs +func Gopath() []string { + defaultGoPath := runtime.GOROOT() + if u, err := user.Current(); err == nil { + defaultGoPath = filepath.Join(u.HomeDir, "go") + } + path := Getenv("GOPATH", defaultGoPath) + paths := strings.Split(path, string(os.PathListSeparator)) + for idx, path := range paths { + if abs, err := filepath.Abs(path); err == nil { + paths[idx] = abs + } + } + return paths +} + +// Getenv returns the values of the environment variable, otherwise +// returns the default if variable is not set +func Getenv(key, userDefault string) string { + if val := os.Getenv(key); val != "" { + return val + } + return userDefault +} + +// GetPkgRelativePath returns the Go relative relative path derived +// form the given path +func GetPkgRelativePath(path string) (string, error) { + abspath, err := filepath.Abs(path) + if err != nil { + abspath = path + } + if strings.HasSuffix(abspath, ".go") { + abspath = filepath.Dir(abspath) + } + for _, base := range Gopath() { + projectRoot := filepath.FromSlash(fmt.Sprintf("%s/src/", base)) + if strings.HasPrefix(abspath, projectRoot) { + return strings.TrimPrefix(abspath, projectRoot), nil + } + } + return "", errors.New("no project relative path found") +} + +// GetPkgAbsPath returns the Go package absolute path derived from +// the given path +func GetPkgAbsPath(pkgPath string) (string, error) { + absPath, err := filepath.Abs(pkgPath) + if err != nil { + return "", err + } + if _, err := os.Stat(absPath); os.IsNotExist(err) { + return "", errors.New("no project absolute path found") + } + return absPath, nil +} + +// ConcatString recursively concatenates strings from a binary expression +func ConcatString(n *ast.BinaryExpr) (string, bool) { + var s string + // sub expressions are found in X object, Y object is always last BasicLit + if rightOperand, ok := n.Y.(*ast.BasicLit); ok { + if str, err := GetString(rightOperand); err == nil { + s = str + s + } + } else { + return "", false + } + if leftOperand, ok := n.X.(*ast.BinaryExpr); ok { + if recursion, ok := ConcatString(leftOperand); ok { + s = recursion + s + } + } else if leftOperand, ok := n.X.(*ast.BasicLit); ok { + if str, err := GetString(leftOperand); err == nil { + s = str + s + } + } else { + return "", false + } + return s, true +} + +// FindVarIdentities returns array of all variable identities in a given binary expression +func FindVarIdentities(n *ast.BinaryExpr, c *Context) ([]*ast.Ident, bool) { + identities := []*ast.Ident{} + // sub expressions are found in X object, Y object is always the last term + if rightOperand, ok := n.Y.(*ast.Ident); ok { + obj := c.Info.ObjectOf(rightOperand) + if _, ok := obj.(*types.Var); ok && !TryResolve(rightOperand, c) { + identities = append(identities, rightOperand) + } + } + if leftOperand, ok := n.X.(*ast.BinaryExpr); ok { + if leftIdentities, ok := FindVarIdentities(leftOperand, c); ok { + identities = append(identities, leftIdentities...) + } + } else { + if leftOperand, ok := n.X.(*ast.Ident); ok { + obj := c.Info.ObjectOf(leftOperand) + if _, ok := obj.(*types.Var); ok && !TryResolve(leftOperand, c) { + identities = append(identities, leftOperand) + } + } + } + + if len(identities) > 0 { + return identities, true + } + // if nil or error, return false + return nil, false +} + +// PackagePaths returns a slice with all packages path at given root directory +func PackagePaths(root string, excludes []*regexp.Regexp) ([]string, error) { + if strings.HasSuffix(root, "...") { + root = root[0 : len(root)-3] + } else { + return []string{root}, nil + } + paths := map[string]bool{} + err := filepath.Walk(root, func(path string, f os.FileInfo, err error) error { + if filepath.Ext(path) == ".go" { + path = filepath.Dir(path) + if isExcluded(path, excludes) { + return nil + } + paths[path] = true + } + return nil + }) + if err != nil { + return []string{}, err + } + + result := []string{} + for path := range paths { + result = append(result, path) + } + return result, nil +} + +// isExcluded checks if a string matches any of the exclusion regexps +func isExcluded(str string, excludes []*regexp.Regexp) bool { + if excludes == nil { + return false + } + for _, exclude := range excludes { + if exclude != nil && exclude.MatchString(str) { + return true + } + } + return false +} + +// ExcludedDirsRegExp builds the regexps for a list of excluded dirs provided as strings +func ExcludedDirsRegExp(excludedDirs []string) []*regexp.Regexp { + var exps []*regexp.Regexp + for _, excludedDir := range excludedDirs { + str := fmt.Sprintf(`([\\/])?%s([\\/])?`, excludedDir) + r := regexp.MustCompile(str) + exps = append(exps, r) + } + return exps +} + +// RootPath returns the absolute root path of a scan +func RootPath(root string) (string, error) { + root = strings.TrimSuffix(root, "...") + return filepath.Abs(root) +} diff --git a/vendor/github.com/securego/gosec/v2/import_tracker.go b/vendor/github.com/securego/gosec/v2/import_tracker.go new file mode 100644 index 000000000..cbb8c5518 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/import_tracker.go @@ -0,0 +1,75 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gosec + +import ( + "go/ast" + "go/types" + "strings" +) + +// ImportTracker is used to normalize the packages that have been imported +// by a source file. It is able to differentiate between plain imports, aliased +// imports and init only imports. +type ImportTracker struct { + Imported map[string]string + Aliased map[string]string + InitOnly map[string]bool +} + +// NewImportTracker creates an empty Import tracker instance +func NewImportTracker() *ImportTracker { + return &ImportTracker{ + make(map[string]string), + make(map[string]string), + make(map[string]bool), + } +} + +// TrackFile track all the imports used by the supplied file +func (t *ImportTracker) TrackFile(file *ast.File) { + for _, imp := range file.Imports { + path := strings.Trim(imp.Path.Value, `"`) + parts := strings.Split(path, "/") + if len(parts) > 0 { + name := parts[len(parts)-1] + t.Imported[path] = name + } + } +} + +// TrackPackages tracks all the imports used by the supplied packages +func (t *ImportTracker) TrackPackages(pkgs ...*types.Package) { + for _, pkg := range pkgs { + t.Imported[pkg.Path()] = pkg.Name() + } +} + +// TrackImport tracks imports and handles the 'unsafe' import +func (t *ImportTracker) TrackImport(n ast.Node) { + if imported, ok := n.(*ast.ImportSpec); ok { + path := strings.Trim(imported.Path.Value, `"`) + if imported.Name != nil { + if imported.Name.Name == "_" { + // Initialization only import + t.InitOnly[path] = true + } else { + // Aliased import + t.Aliased[path] = imported.Name.Name + } + } + if path == "unsafe" { + t.Imported[path] = path + } + } +} diff --git a/vendor/github.com/securego/gosec/v2/install.sh b/vendor/github.com/securego/gosec/v2/install.sh new file mode 100644 index 000000000..0da55d379 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/install.sh @@ -0,0 +1,375 @@ +#!/bin/sh +set -e +# Code generated by godownloader. DO NOT EDIT. +# + +usage() { + this=$1 + cat </dev/null +} +echoerr() { + echo "$@" 1>&2 +} +log_prefix() { + echo "$0" +} +_logp=6 +log_set_priority() { + _logp="$1" +} +log_priority() { + if test -z "$1"; then + echo "$_logp" + return + fi + [ "$1" -le "$_logp" ] +} +log_tag() { + case $1 in + 0) echo "emerg" ;; + 1) echo "alert" ;; + 2) echo "crit" ;; + 3) echo "err" ;; + 4) echo "warning" ;; + 5) echo "notice" ;; + 6) echo "info" ;; + 7) echo "debug" ;; + *) echo "$1" ;; + esac +} +log_debug() { + log_priority 7 || return 0 + echoerr "$(log_prefix)" "$(log_tag 7)" "$@" +} +log_info() { + log_priority 6 || return 0 + echoerr "$(log_prefix)" "$(log_tag 6)" "$@" +} +log_err() { + log_priority 3 || return 0 + echoerr "$(log_prefix)" "$(log_tag 3)" "$@" +} +log_crit() { + log_priority 2 || return 0 + echoerr "$(log_prefix)" "$(log_tag 2)" "$@" +} +uname_os() { + os=$(uname -s | tr '[:upper:]' '[:lower:]') + case "$os" in + cygwin_nt*) os="windows" ;; + mingw*) os="windows" ;; + msys_nt*) os="windows" ;; + esac + echo "$os" +} +uname_arch() { + arch=$(uname -m) + case $arch in + x86_64) arch="amd64" ;; + x86) arch="386" ;; + i686) arch="386" ;; + i386) arch="386" ;; + aarch64) arch="arm64" ;; + armv5*) arch="armv5" ;; + armv6*) arch="armv6" ;; + armv7*) arch="armv7" ;; + esac + echo ${arch} +} +uname_os_check() { + os=$(uname_os) + case "$os" in + darwin) return 0 ;; + dragonfly) return 0 ;; + freebsd) return 0 ;; + linux) return 0 ;; + android) return 0 ;; + nacl) return 0 ;; + netbsd) return 0 ;; + openbsd) return 0 ;; + plan9) return 0 ;; + solaris) return 0 ;; + windows) return 0 ;; + esac + log_crit "uname_os_check '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib" + return 1 +} +uname_arch_check() { + arch=$(uname_arch) + case "$arch" in + 386) return 0 ;; + amd64) return 0 ;; + arm64) return 0 ;; + armv5) return 0 ;; + armv6) return 0 ;; + armv7) return 0 ;; + ppc64) return 0 ;; + ppc64le) return 0 ;; + mips) return 0 ;; + mipsle) return 0 ;; + mips64) return 0 ;; + mips64le) return 0 ;; + s390x) return 0 ;; + amd64p32) return 0 ;; + esac + log_crit "uname_arch_check '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib" + return 1 +} +untar() { + tarball=$1 + case "${tarball}" in + *.tar.gz | *.tgz) tar --no-same-owner -xzf "${tarball}" ;; + *.tar) tar --no-same-owner -xf "${tarball}" ;; + *.zip) unzip "${tarball}" ;; + *) + log_err "untar unknown archive format for ${tarball}" + return 1 + ;; + esac +} +http_download_curl() { + local_file=$1 + source_url=$2 + header=$3 + if [ -z "$header" ]; then + code=$(curl -w '%{http_code}' -sL -o "$local_file" "$source_url") + else + code=$(curl -w '%{http_code}' -sL -H "$header" -o "$local_file" "$source_url") + fi + if [ "$code" != "200" ]; then + log_debug "http_download_curl received HTTP status $code" + return 1 + fi + return 0 +} +http_download_wget() { + local_file=$1 + source_url=$2 + header=$3 + if [ -z "$header" ]; then + wget -q -O "$local_file" "$source_url" + else + wget -q --header "$header" -O "$local_file" "$source_url" + fi +} +http_download() { + log_debug "http_download $2" + if is_command curl; then + http_download_curl "$@" + return + elif is_command wget; then + http_download_wget "$@" + return + fi + log_crit "http_download unable to find wget or curl" + return 1 +} +http_copy() { + tmp=$(mktemp) + http_download "${tmp}" "$1" "$2" || return 1 + body=$(cat "$tmp") + rm -f "${tmp}" + echo "$body" +} +github_release() { + owner_repo=$1 + version=$2 + test -z "$version" && version="latest" + giturl="https://github.com/${owner_repo}/releases/${version}" + json=$(http_copy "$giturl" "Accept:application/json") + test -z "$json" && return 1 + version=$(echo "$json" | tr -s '\n' ' ' | sed 's/.*"tag_name":"//' | sed 's/".*//') + test -z "$version" && return 1 + echo "$version" +} +hash_sha256() { + TARGET=${1:-/dev/stdin} + if is_command gsha256sum; then + hash=$(gsha256sum "$TARGET") || return 1 + echo "$hash" | cut -d ' ' -f 1 + elif is_command sha256sum; then + hash=$(sha256sum "$TARGET") || return 1 + echo "$hash" | cut -d ' ' -f 1 + elif is_command shasum; then + hash=$(shasum -a 256 "$TARGET" 2>/dev/null) || return 1 + echo "$hash" | cut -d ' ' -f 1 + elif is_command openssl; then + hash=$(openssl -dst openssl dgst -sha256 "$TARGET") || return 1 + echo "$hash" | cut -d ' ' -f a + else + log_crit "hash_sha256 unable to find command to compute sha-256 hash" + return 1 + fi +} +hash_sha256_verify() { + TARGET=$1 + checksums=$2 + if [ -z "$checksums" ]; then + log_err "hash_sha256_verify checksum file not specified in arg2" + return 1 + fi + BASENAME=${TARGET##*/} + want=$(grep "${BASENAME}" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1) + if [ -z "$want" ]; then + log_err "hash_sha256_verify unable to find checksum for '${TARGET}' in '${checksums}'" + return 1 + fi + got=$(hash_sha256 "$TARGET") + if [ "$want" != "$got" ]; then + log_err "hash_sha256_verify checksum for '$TARGET' did not verify ${want} vs $got" + return 1 + fi +} +cat /dev/null < end { + break + } else if pos >= start && pos <= end { + code := fmt.Sprintf("%d: %s\n", pos, scanner.Text()) + buf.WriteString(code) + } + } + return buf.String(), nil +} + +func codeSnippetStartLine(node ast.Node, fobj *token.File) int64 { + s := (int64)(fobj.Line(node.Pos())) + if s-SnippetOffset > 0 { + return s - SnippetOffset + } + return s +} + +func codeSnippetEndLine(node ast.Node, fobj *token.File) int64 { + e := (int64)(fobj.Line(node.End())) + return e + SnippetOffset +} + +// NewIssue creates a new Issue +func NewIssue(ctx *Context, node ast.Node, ruleID, desc string, severity Score, confidence Score) *Issue { + fobj := ctx.FileSet.File(node.Pos()) + name := fobj.Name() + start, end := fobj.Line(node.Pos()), fobj.Line(node.End()) + line := strconv.Itoa(start) + if start != end { + line = fmt.Sprintf("%d-%d", start, end) + } + col := strconv.Itoa(fobj.Position(node.Pos()).Column) + + var code string + if file, err := os.Open(fobj.Name()); err == nil { + defer file.Close() // #nosec + s := codeSnippetStartLine(node, fobj) + e := codeSnippetEndLine(node, fobj) + code, err = codeSnippet(file, s, e, node) + if err != nil { + code = err.Error() + } + } + + return &Issue{ + File: name, + Line: line, + Col: col, + RuleID: ruleID, + What: desc, + Confidence: confidence, + Severity: severity, + Code: code, + Cwe: GetCweByRule(ruleID), + } +} diff --git a/vendor/github.com/securego/gosec/v2/renovate.json b/vendor/github.com/securego/gosec/v2/renovate.json new file mode 100644 index 000000000..95f2b7c1c --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/renovate.json @@ -0,0 +1,24 @@ +{ + "dependencyDashboard": true, + "vulnerabilityAlerts": { + "enabled": true + }, + "extends": [ + ":preserveSemverRanges", + "group:all", + "schedule:weekly" + ], + "lockFileMaintenance": { + "commitMessageAction": "Update", + "enabled": true, + "extends": [ + "group:all", + "schedule:weekly" + ] + }, + "postUpdateOptions": [ + "gomodTidy", + "gomodUpdateImportPaths" + ], + "separateMajorMinor": false +} diff --git a/vendor/github.com/securego/gosec/v2/report.go b/vendor/github.com/securego/gosec/v2/report.go new file mode 100644 index 000000000..96b1466d5 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/report.go @@ -0,0 +1,24 @@ +package gosec + +// ReportInfo this is report information +type ReportInfo struct { + Errors map[string][]Error `json:"Golang errors"` + Issues []*Issue + Stats *Metrics + GosecVersion string +} + +// NewReportInfo instantiate a ReportInfo +func NewReportInfo(issues []*Issue, metrics *Metrics, errors map[string][]Error) *ReportInfo { + return &ReportInfo{ + Errors: errors, + Issues: issues, + Stats: metrics, + } +} + +// WithVersion defines the version of gosec used to generate the report +func (r *ReportInfo) WithVersion(version string) *ReportInfo { + r.GosecVersion = version + return r +} diff --git a/vendor/github.com/securego/gosec/v2/resolve.go b/vendor/github.com/securego/gosec/v2/resolve.go new file mode 100644 index 000000000..cdc287e8e --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/resolve.go @@ -0,0 +1,95 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gosec + +import "go/ast" + +func resolveIdent(n *ast.Ident, c *Context) bool { + if n.Obj == nil || n.Obj.Kind != ast.Var { + return true + } + if node, ok := n.Obj.Decl.(ast.Node); ok { + return TryResolve(node, c) + } + return false +} + +func resolveValueSpec(n *ast.ValueSpec, c *Context) bool { + if len(n.Values) == 0 { + return false + } + for _, value := range n.Values { + if !TryResolve(value, c) { + return false + } + } + return true +} + +func resolveAssign(n *ast.AssignStmt, c *Context) bool { + if len(n.Rhs) == 0 { + return false + } + for _, arg := range n.Rhs { + if !TryResolve(arg, c) { + return false + } + } + return true +} + +func resolveCompLit(n *ast.CompositeLit, c *Context) bool { + if len(n.Elts) == 0 { + return false + } + for _, arg := range n.Elts { + if !TryResolve(arg, c) { + return false + } + } + return true +} + +func resolveBinExpr(n *ast.BinaryExpr, c *Context) bool { + return (TryResolve(n.X, c) && TryResolve(n.Y, c)) +} + +func resolveCallExpr(n *ast.CallExpr, c *Context) bool { + // TODO(tkelsey): next step, full function resolution + return false +} + +// TryResolve will attempt, given a subtree starting at some AST node, to resolve +// all values contained within to a known constant. It is used to check for any +// unknown values in compound expressions. +func TryResolve(n ast.Node, c *Context) bool { + switch node := n.(type) { + case *ast.BasicLit: + return true + case *ast.CompositeLit: + return resolveCompLit(node, c) + case *ast.Ident: + return resolveIdent(node, c) + case *ast.ValueSpec: + return resolveValueSpec(node, c) + case *ast.AssignStmt: + return resolveAssign(node, c) + case *ast.CallExpr: + return resolveCallExpr(node, c) + case *ast.BinaryExpr: + return resolveBinExpr(node, c) + } + return false +} diff --git a/vendor/github.com/securego/gosec/v2/rule.go b/vendor/github.com/securego/gosec/v2/rule.go new file mode 100644 index 000000000..fbba089bb --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rule.go @@ -0,0 +1,59 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gosec + +import ( + "go/ast" + "reflect" +) + +// The Rule interface used by all rules supported by gosec. +type Rule interface { + ID() string + Match(ast.Node, *Context) (*Issue, error) +} + +// RuleBuilder is used to register a rule definition with the analyzer +type RuleBuilder func(id string, c Config) (Rule, []ast.Node) + +// A RuleSet maps lists of rules to the type of AST node they should be run on. +// The analyzer will only invoke rules contained in the list associated with the +// type of AST node it is currently visiting. +type RuleSet map[reflect.Type][]Rule + +// NewRuleSet constructs a new RuleSet +func NewRuleSet() RuleSet { + return make(RuleSet) +} + +// Register adds a trigger for the supplied rule for the the +// specified ast nodes. +func (r RuleSet) Register(rule Rule, nodes ...ast.Node) { + for _, n := range nodes { + t := reflect.TypeOf(n) + if rules, ok := r[t]; ok { + r[t] = append(rules, rule) + } else { + r[t] = []Rule{rule} + } + } +} + +// RegisteredFor will return all rules that are registered for a +// specified ast node. +func (r RuleSet) RegisteredFor(n ast.Node) []Rule { + if rules, found := r[reflect.TypeOf(n)]; found { + return rules + } + return []Rule{} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/archive.go b/vendor/github.com/securego/gosec/v2/rules/archive.go new file mode 100644 index 000000000..92c7e4481 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/archive.go @@ -0,0 +1,65 @@ +package rules + +import ( + "go/ast" + "go/types" + + "github.com/securego/gosec/v2" +) + +type archive struct { + gosec.MetaData + calls gosec.CallList + argTypes []string +} + +func (a *archive) ID() string { + return a.MetaData.ID +} + +// Match inspects AST nodes to determine if the filepath.Joins uses any argument derived from type zip.File or tar.Header +func (a *archive) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + if node := a.calls.ContainsPkgCallExpr(n, c, false); node != nil { + for _, arg := range node.Args { + var argType types.Type + if selector, ok := arg.(*ast.SelectorExpr); ok { + argType = c.Info.TypeOf(selector.X) + } else if ident, ok := arg.(*ast.Ident); ok { + if ident.Obj != nil && ident.Obj.Kind == ast.Var { + decl := ident.Obj.Decl + if assign, ok := decl.(*ast.AssignStmt); ok { + if selector, ok := assign.Rhs[0].(*ast.SelectorExpr); ok { + argType = c.Info.TypeOf(selector.X) + } + } + } + } + + if argType != nil { + for _, t := range a.argTypes { + if argType.String() == t { + return gosec.NewIssue(c, n, a.ID(), a.What, a.Severity, a.Confidence), nil + } + } + } + } + } + return nil, nil +} + +// NewArchive creates a new rule which detects the file traversal when extracting zip/tar archives +func NewArchive(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + calls := gosec.NewCallList() + calls.Add("path/filepath", "Join") + calls.Add("path", "Join") + return &archive{ + calls: calls, + argTypes: []string{"*archive/zip.File", "*archive/tar.Header"}, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: "File traversal when extracting zip/tar archive", + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/bad_defer.go b/vendor/github.com/securego/gosec/v2/rules/bad_defer.go new file mode 100644 index 000000000..13b42070d --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/bad_defer.go @@ -0,0 +1,68 @@ +package rules + +import ( + "fmt" + "go/ast" + "strings" + + "github.com/securego/gosec/v2" +) + +type deferType struct { + typ string + methods []string +} + +type badDefer struct { + gosec.MetaData + types []deferType +} + +func (r *badDefer) ID() string { + return r.MetaData.ID +} + +func normalize(typ string) string { + return strings.TrimPrefix(typ, "*") +} + +func contains(methods []string, method string) bool { + for _, m := range methods { + if m == method { + return true + } + } + return false +} + +func (r *badDefer) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + if deferStmt, ok := n.(*ast.DeferStmt); ok { + for _, deferTyp := range r.types { + if typ, method, err := gosec.GetCallInfo(deferStmt.Call, c); err == nil { + if normalize(typ) == deferTyp.typ && contains(deferTyp.methods, method) { + return gosec.NewIssue(c, n, r.ID(), fmt.Sprintf(r.What, method, typ), r.Severity, r.Confidence), nil + } + } + } + } + + return nil, nil +} + +// NewDeferredClosing detects unsafe defer of error returning methods +func NewDeferredClosing(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return &badDefer{ + types: []deferType{ + { + typ: "os.File", + methods: []string{"Close"}, + }, + }, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: "Deferring unsafe method %q on type %q", + }, + }, []ast.Node{(*ast.DeferStmt)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/bind.go b/vendor/github.com/securego/gosec/v2/rules/bind.go new file mode 100644 index 000000000..8f6af067a --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/bind.go @@ -0,0 +1,83 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + "regexp" + + "github.com/securego/gosec/v2" +) + +// Looks for net.Listen("0.0.0.0") or net.Listen(":8080") +type bindsToAllNetworkInterfaces struct { + gosec.MetaData + calls gosec.CallList + pattern *regexp.Regexp +} + +func (r *bindsToAllNetworkInterfaces) ID() string { + return r.MetaData.ID +} + +func (r *bindsToAllNetworkInterfaces) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + callExpr := r.calls.ContainsPkgCallExpr(n, c, false) + if callExpr == nil { + return nil, nil + } + if len(callExpr.Args) > 1 { + arg := callExpr.Args[1] + if bl, ok := arg.(*ast.BasicLit); ok { + if arg, err := gosec.GetString(bl); err == nil { + if r.pattern.MatchString(arg) { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } else if ident, ok := arg.(*ast.Ident); ok { + values := gosec.GetIdentStringValues(ident) + for _, value := range values { + if r.pattern.MatchString(value) { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + } else if len(callExpr.Args) > 0 { + values := gosec.GetCallStringArgsValues(callExpr.Args[0], c) + for _, value := range values { + if r.pattern.MatchString(value) { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + return nil, nil +} + +// NewBindsToAllNetworkInterfaces detects socket connections that are setup to +// listen on all network interfaces. +func NewBindsToAllNetworkInterfaces(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + calls := gosec.NewCallList() + calls.Add("net", "Listen") + calls.Add("crypto/tls", "Listen") + return &bindsToAllNetworkInterfaces{ + calls: calls, + pattern: regexp.MustCompile(`^(0.0.0.0|:).*$`), + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: "Binds to all network interfaces", + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/blocklist.go b/vendor/github.com/securego/gosec/v2/rules/blocklist.go new file mode 100644 index 000000000..afd4ee56b --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/blocklist.go @@ -0,0 +1,94 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + "strings" + + "github.com/securego/gosec/v2" +) + +type blocklistedImport struct { + gosec.MetaData + Blocklisted map[string]string +} + +func unquote(original string) string { + copy := strings.TrimSpace(original) + copy = strings.TrimLeft(copy, `"`) + return strings.TrimRight(copy, `"`) +} + +func (r *blocklistedImport) ID() string { + return r.MetaData.ID +} + +func (r *blocklistedImport) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + if node, ok := n.(*ast.ImportSpec); ok { + if description, ok := r.Blocklisted[unquote(node.Path.Value)]; ok { + return gosec.NewIssue(c, node, r.ID(), description, r.Severity, r.Confidence), nil + } + } + return nil, nil +} + +// NewBlocklistedImports reports when a blocklisted import is being used. +// Typically when a deprecated technology is being used. +func NewBlocklistedImports(id string, conf gosec.Config, blocklist map[string]string) (gosec.Rule, []ast.Node) { + return &blocklistedImport{ + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + }, + Blocklisted: blocklist, + }, []ast.Node{(*ast.ImportSpec)(nil)} +} + +// NewBlocklistedImportMD5 fails if MD5 is imported +func NewBlocklistedImportMD5(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "crypto/md5": "Blocklisted import crypto/md5: weak cryptographic primitive", + }) +} + +// NewBlocklistedImportDES fails if DES is imported +func NewBlocklistedImportDES(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "crypto/des": "Blocklisted import crypto/des: weak cryptographic primitive", + }) +} + +// NewBlocklistedImportRC4 fails if DES is imported +func NewBlocklistedImportRC4(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "crypto/rc4": "Blocklisted import crypto/rc4: weak cryptographic primitive", + }) +} + +// NewBlocklistedImportCGI fails if CGI is imported +func NewBlocklistedImportCGI(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "net/http/cgi": "Blocklisted import net/http/cgi: Go versions < 1.6.3 are vulnerable to Httpoxy attack: (CVE-2016-5386)", + }) +} + +// NewBlocklistedImportSHA1 fails if SHA1 is imported +func NewBlocklistedImportSHA1(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return NewBlocklistedImports(id, conf, map[string]string{ + "crypto/sha1": "Blocklisted import crypto/sha1: weak cryptographic primitive", + }) +} diff --git a/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go b/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go new file mode 100644 index 000000000..02256faa9 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/decompression-bomb.go @@ -0,0 +1,110 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "fmt" + "go/ast" + + "github.com/securego/gosec/v2" +) + +type decompressionBombCheck struct { + gosec.MetaData + readerCalls gosec.CallList + copyCalls gosec.CallList +} + +func (d *decompressionBombCheck) ID() string { + return d.MetaData.ID +} + +func containsReaderCall(node ast.Node, ctx *gosec.Context, list gosec.CallList) bool { + if list.ContainsPkgCallExpr(node, ctx, false) != nil { + return true + } + // Resolve type info of ident (for *archive/zip.File.Open) + s, idt, _ := gosec.GetCallInfo(node, ctx) + return list.Contains(s, idt) +} + +func (d *decompressionBombCheck) Match(node ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { + var readerVarObj map[*ast.Object]struct{} + + // To check multiple lines, ctx.PassedValues is used to store temporary data. + if _, ok := ctx.PassedValues[d.ID()]; !ok { + readerVarObj = make(map[*ast.Object]struct{}) + ctx.PassedValues[d.ID()] = readerVarObj + } else if pv, ok := ctx.PassedValues[d.ID()].(map[*ast.Object]struct{}); ok { + readerVarObj = pv + } else { + return nil, fmt.Errorf("PassedValues[%s] of Context is not map[*ast.Object]struct{}, but %T", d.ID(), ctx.PassedValues[d.ID()]) + } + + // io.Copy is a common function. + // To reduce false positives, This rule detects code which is used for compressed data only. + switch n := node.(type) { + case *ast.AssignStmt: + for _, expr := range n.Rhs { + if callExpr, ok := expr.(*ast.CallExpr); ok && containsReaderCall(callExpr, ctx, d.readerCalls) { + if idt, ok := n.Lhs[0].(*ast.Ident); ok && idt.Name != "_" { + // Example: + // r, _ := zlib.NewReader(buf) + // Add r's Obj to readerVarObj map + readerVarObj[idt.Obj] = struct{}{} + } + } + } + case *ast.CallExpr: + if d.copyCalls.ContainsPkgCallExpr(n, ctx, false) != nil { + if idt, ok := n.Args[1].(*ast.Ident); ok { + if _, ok := readerVarObj[idt.Obj]; ok { + // Detect io.Copy(x, r) + return gosec.NewIssue(ctx, n, d.ID(), d.What, d.Severity, d.Confidence), nil + } + } + } + } + + return nil, nil +} + +// NewDecompressionBombCheck detects if there is potential DoS vulnerability via decompression bomb +func NewDecompressionBombCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + readerCalls := gosec.NewCallList() + readerCalls.Add("compress/gzip", "NewReader") + readerCalls.AddAll("compress/zlib", "NewReader", "NewReaderDict") + readerCalls.Add("compress/bzip2", "NewReader") + readerCalls.AddAll("compress/flate", "NewReader", "NewReaderDict") + readerCalls.Add("compress/lzw", "NewReader") + readerCalls.Add("archive/tar", "NewReader") + readerCalls.Add("archive/zip", "NewReader") + readerCalls.Add("*archive/zip.File", "Open") + + copyCalls := gosec.NewCallList() + copyCalls.Add("io", "Copy") + copyCalls.Add("io", "CopyBuffer") + + return &decompressionBombCheck{ + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.Medium, + What: "Potential DoS vulnerability via decompression bomb", + }, + readerCalls: readerCalls, + copyCalls: copyCalls, + }, []ast.Node{(*ast.FuncDecl)(nil), (*ast.AssignStmt)(nil), (*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/errors.go b/vendor/github.com/securego/gosec/v2/rules/errors.go new file mode 100644 index 000000000..7a34bc634 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/errors.go @@ -0,0 +1,120 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + "go/types" + + "github.com/securego/gosec/v2" +) + +type noErrorCheck struct { + gosec.MetaData + whitelist gosec.CallList +} + +func (r *noErrorCheck) ID() string { + return r.MetaData.ID +} + +func returnsError(callExpr *ast.CallExpr, ctx *gosec.Context) int { + if tv := ctx.Info.TypeOf(callExpr); tv != nil { + switch t := tv.(type) { + case *types.Tuple: + for pos := 0; pos < t.Len(); pos++ { + variable := t.At(pos) + if variable != nil && variable.Type().String() == "error" { + return pos + } + } + case *types.Named: + if t.String() == "error" { + return 0 + } + } + } + return -1 +} + +func (r *noErrorCheck) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { + switch stmt := n.(type) { + case *ast.AssignStmt: + cfg := ctx.Config + if enabled, err := cfg.IsGlobalEnabled(gosec.Audit); err == nil && enabled { + for _, expr := range stmt.Rhs { + if callExpr, ok := expr.(*ast.CallExpr); ok && r.whitelist.ContainsCallExpr(expr, ctx) == nil { + pos := returnsError(callExpr, ctx) + if pos < 0 || pos >= len(stmt.Lhs) { + return nil, nil + } + if id, ok := stmt.Lhs[pos].(*ast.Ident); ok && id.Name == "_" { + return gosec.NewIssue(ctx, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + } + case *ast.ExprStmt: + if callExpr, ok := stmt.X.(*ast.CallExpr); ok && r.whitelist.ContainsCallExpr(stmt.X, ctx) == nil { + pos := returnsError(callExpr, ctx) + if pos >= 0 { + return gosec.NewIssue(ctx, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + return nil, nil +} + +// NewNoErrorCheck detects if the returned error is unchecked +func NewNoErrorCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + // TODO(gm) Come up with sensible defaults here. Or flip it to use a + // black list instead. + whitelist := gosec.NewCallList() + whitelist.AddAll("bytes.Buffer", "Write", "WriteByte", "WriteRune", "WriteString") + whitelist.AddAll("fmt", "Print", "Printf", "Println", "Fprint", "Fprintf", "Fprintln") + whitelist.AddAll("strings.Builder", "Write", "WriteByte", "WriteRune", "WriteString") + whitelist.Add("io.PipeWriter", "CloseWithError") + whitelist.Add("hash.Hash", "Write") + + if configured, ok := conf["G104"]; ok { + if whitelisted, ok := configured.(map[string]interface{}); ok { + for pkg, funcs := range whitelisted { + if funcs, ok := funcs.([]interface{}); ok { + whitelist.AddAll(pkg, toStringSlice(funcs)...) + } + } + } + } + + return &noErrorCheck{ + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Low, + Confidence: gosec.High, + What: "Errors unhandled.", + }, + whitelist: whitelist, + }, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)} +} + +func toStringSlice(values []interface{}) []string { + result := []string{} + for _, value := range values { + if value, ok := value.(string); ok { + result = append(result, value) + } + } + return result +} diff --git a/vendor/github.com/securego/gosec/v2/rules/fileperms.go b/vendor/github.com/securego/gosec/v2/rules/fileperms.go new file mode 100644 index 000000000..e6a80a5fb --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/fileperms.go @@ -0,0 +1,113 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "fmt" + "go/ast" + "strconv" + + "github.com/securego/gosec/v2" +) + +type filePermissions struct { + gosec.MetaData + mode int64 + pkgs []string + calls []string +} + +func (r *filePermissions) ID() string { + return r.MetaData.ID +} + +func getConfiguredMode(conf map[string]interface{}, configKey string, defaultMode int64) int64 { + mode := defaultMode + if value, ok := conf[configKey]; ok { + switch value := value.(type) { + case int64: + mode = value + case string: + if m, e := strconv.ParseInt(value, 0, 64); e != nil { + mode = defaultMode + } else { + mode = m + } + } + } + return mode +} + +func (r *filePermissions) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + for _, pkg := range r.pkgs { + if callexpr, matched := gosec.MatchCallByPackage(n, c, pkg, r.calls...); matched { + modeArg := callexpr.Args[len(callexpr.Args)-1] + if mode, err := gosec.GetInt(modeArg); err == nil && mode > r.mode { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + return nil, nil +} + +// NewWritePerms creates a rule to detect file Writes with bad permissions. +func NewWritePerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + mode := getConfiguredMode(conf, "G306", 0600) + return &filePermissions{ + mode: mode, + pkgs: []string{"io/ioutil", "os"}, + calls: []string{"WriteFile"}, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: fmt.Sprintf("Expect WriteFile permissions to be %#o or less", mode), + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} + +// NewFilePerms creates a rule to detect file creation with a more permissive than configured +// permission mask. +func NewFilePerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + mode := getConfiguredMode(conf, "G302", 0600) + return &filePermissions{ + mode: mode, + pkgs: []string{"os"}, + calls: []string{"OpenFile", "Chmod"}, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: fmt.Sprintf("Expect file permissions to be %#o or less", mode), + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} + +// NewMkdirPerms creates a rule to detect directory creation with more permissive than +// configured permission mask. +func NewMkdirPerms(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + mode := getConfiguredMode(conf, "G301", 0750) + return &filePermissions{ + mode: mode, + pkgs: []string{"os"}, + calls: []string{"Mkdir", "MkdirAll"}, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: fmt.Sprintf("Expect directory permissions to be %#o or less", mode), + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go b/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go new file mode 100644 index 000000000..acdd583e4 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/hardcoded_credentials.go @@ -0,0 +1,173 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + "go/token" + "regexp" + "strconv" + + zxcvbn "github.com/nbutton23/zxcvbn-go" + "github.com/securego/gosec/v2" +) + +type credentials struct { + gosec.MetaData + pattern *regexp.Regexp + entropyThreshold float64 + perCharThreshold float64 + truncate int + ignoreEntropy bool +} + +func (r *credentials) ID() string { + return r.MetaData.ID +} + +func truncate(s string, n int) string { + if n > len(s) { + return s + } + return s[:n] +} + +func (r *credentials) isHighEntropyString(str string) bool { + s := truncate(str, r.truncate) + info := zxcvbn.PasswordStrength(s, []string{}) + entropyPerChar := info.Entropy / float64(len(s)) + return (info.Entropy >= r.entropyThreshold || + (info.Entropy >= (r.entropyThreshold/2) && + entropyPerChar >= r.perCharThreshold)) +} + +func (r *credentials) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { + switch node := n.(type) { + case *ast.AssignStmt: + return r.matchAssign(node, ctx) + case *ast.ValueSpec: + return r.matchValueSpec(node, ctx) + case *ast.BinaryExpr: + return r.matchEqualityCheck(node, ctx) + } + return nil, nil +} + +func (r *credentials) matchAssign(assign *ast.AssignStmt, ctx *gosec.Context) (*gosec.Issue, error) { + for _, i := range assign.Lhs { + if ident, ok := i.(*ast.Ident); ok { + if r.pattern.MatchString(ident.Name) { + for _, e := range assign.Rhs { + if val, err := gosec.GetString(e); err == nil { + if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { + return gosec.NewIssue(ctx, assign, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + } + } + } + return nil, nil +} + +func (r *credentials) matchValueSpec(valueSpec *ast.ValueSpec, ctx *gosec.Context) (*gosec.Issue, error) { + for index, ident := range valueSpec.Names { + if r.pattern.MatchString(ident.Name) && valueSpec.Values != nil { + // const foo, bar = "same value" + if len(valueSpec.Values) <= index { + index = len(valueSpec.Values) - 1 + } + if val, err := gosec.GetString(valueSpec.Values[index]); err == nil { + if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { + return gosec.NewIssue(ctx, valueSpec, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + } + return nil, nil +} + +func (r *credentials) matchEqualityCheck(binaryExpr *ast.BinaryExpr, ctx *gosec.Context) (*gosec.Issue, error) { + if binaryExpr.Op == token.EQL || binaryExpr.Op == token.NEQ { + if ident, ok := binaryExpr.X.(*ast.Ident); ok { + if r.pattern.MatchString(ident.Name) { + if val, err := gosec.GetString(binaryExpr.Y); err == nil { + if r.ignoreEntropy || (!r.ignoreEntropy && r.isHighEntropyString(val)) { + return gosec.NewIssue(ctx, binaryExpr, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + } + } + return nil, nil +} + +// NewHardcodedCredentials attempts to find high entropy string constants being +// assigned to variables that appear to be related to credentials. +func NewHardcodedCredentials(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + pattern := `(?i)passwd|pass|password|pwd|secret|token` + entropyThreshold := 80.0 + perCharThreshold := 3.0 + ignoreEntropy := false + truncateString := 16 + if val, ok := conf["G101"]; ok { + conf := val.(map[string]interface{}) + if configPattern, ok := conf["pattern"]; ok { + if cfgPattern, ok := configPattern.(string); ok { + pattern = cfgPattern + } + } + if configIgnoreEntropy, ok := conf["ignore_entropy"]; ok { + if cfgIgnoreEntropy, ok := configIgnoreEntropy.(bool); ok { + ignoreEntropy = cfgIgnoreEntropy + } + } + if configEntropyThreshold, ok := conf["entropy_threshold"]; ok { + if cfgEntropyThreshold, ok := configEntropyThreshold.(string); ok { + if parsedNum, err := strconv.ParseFloat(cfgEntropyThreshold, 64); err == nil { + entropyThreshold = parsedNum + } + } + } + if configCharThreshold, ok := conf["per_char_threshold"]; ok { + if cfgCharThreshold, ok := configCharThreshold.(string); ok { + if parsedNum, err := strconv.ParseFloat(cfgCharThreshold, 64); err == nil { + perCharThreshold = parsedNum + } + } + } + if configTruncate, ok := conf["truncate"]; ok { + if cfgTruncate, ok := configTruncate.(string); ok { + if parsedInt, err := strconv.Atoi(cfgTruncate); err == nil { + truncateString = parsedInt + } + } + } + } + + return &credentials{ + pattern: regexp.MustCompile(pattern), + entropyThreshold: entropyThreshold, + perCharThreshold: perCharThreshold, + ignoreEntropy: ignoreEntropy, + truncate: truncateString, + MetaData: gosec.MetaData{ + ID: id, + What: "Potential hardcoded credentials", + Confidence: gosec.Low, + Severity: gosec.High, + }, + }, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ValueSpec)(nil), (*ast.BinaryExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go b/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go new file mode 100644 index 000000000..b2668dec8 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/implicit_aliasing.go @@ -0,0 +1,119 @@ +package rules + +import ( + "go/ast" + "go/token" + + "github.com/securego/gosec/v2" +) + +type implicitAliasing struct { + gosec.MetaData + aliases map[*ast.Object]struct{} + rightBrace token.Pos + acceptableAlias []*ast.UnaryExpr +} + +func (r *implicitAliasing) ID() string { + return r.MetaData.ID +} + +func containsUnary(exprs []*ast.UnaryExpr, expr *ast.UnaryExpr) bool { + for _, e := range exprs { + if e == expr { + return true + } + } + return false +} + +func (r *implicitAliasing) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + switch node := n.(type) { + case *ast.RangeStmt: + // When presented with a range statement, get the underlying Object bound to + // by assignment and add it to our set (r.aliases) of objects to check for. + if key, ok := node.Value.(*ast.Ident); ok { + if key.Obj != nil { + if assignment, ok := key.Obj.Decl.(*ast.AssignStmt); ok { + if len(assignment.Lhs) < 2 { + return nil, nil + } + + if object, ok := assignment.Lhs[1].(*ast.Ident); ok { + r.aliases[object.Obj] = struct{}{} + + if r.rightBrace < node.Body.Rbrace { + r.rightBrace = node.Body.Rbrace + } + } + } + } + } + + case *ast.UnaryExpr: + // If this unary expression is outside of the last range statement we were looking at + // then clear the list of objects we're concerned about because they're no longer in + // scope + if node.Pos() > r.rightBrace { + r.aliases = make(map[*ast.Object]struct{}) + r.acceptableAlias = make([]*ast.UnaryExpr, 0) + } + + // Short circuit logic to skip checking aliases if we have nothing to check against. + if len(r.aliases) == 0 { + return nil, nil + } + + // If this unary is at the top level of a return statement then it is okay-- + // see *ast.ReturnStmt comment below. + if containsUnary(r.acceptableAlias, node) { + return nil, nil + } + + // If we find a unary op of & (reference) of an object within r.aliases, complain. + if ident, ok := node.X.(*ast.Ident); ok && node.Op.String() == "&" { + if _, contains := r.aliases[ident.Obj]; contains { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + case *ast.ReturnStmt: + // Returning a rangeStmt yielded value is acceptable since only one value will be returned + for _, item := range node.Results { + if unary, ok := item.(*ast.UnaryExpr); ok && unary.Op.String() == "&" { + r.acceptableAlias = append(r.acceptableAlias, unary) + } + } + } + + return nil, nil +} + +// NewImplicitAliasing detects implicit memory aliasing of type: for blah := SomeCall() {... SomeOtherCall(&blah) ...} +func NewImplicitAliasing(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return &implicitAliasing{ + aliases: make(map[*ast.Object]struct{}), + rightBrace: token.NoPos, + acceptableAlias: make([]*ast.UnaryExpr, 0), + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.Medium, + What: "Implicit memory aliasing in for loop.", + }, + }, []ast.Node{(*ast.RangeStmt)(nil), (*ast.UnaryExpr)(nil), (*ast.ReturnStmt)(nil)} +} + +/* +This rule is prone to flag false positives. + +Within GoSec, the rule is just an AST match-- there are a handful of other +implementation strategies which might lend more nuance to the rule at the +cost of allowing false negatives. + +From a tooling side, I'd rather have this rule flag false positives than +potentially have some false negatives-- especially if the sentiment of this +rule (as I understand it, and Go) is that referencing a rangeStmt-yielded +value is kinda strange and does not have a strongly justified use case. + +Which is to say-- a false positive _should_ just be changed. +*/ diff --git a/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go b/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go new file mode 100644 index 000000000..dfcda94a8 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/integer_overflow.go @@ -0,0 +1,89 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "fmt" + "go/ast" + + "github.com/securego/gosec/v2" +) + +type integerOverflowCheck struct { + gosec.MetaData + calls gosec.CallList +} + +func (i *integerOverflowCheck) ID() string { + return i.MetaData.ID +} + +func (i *integerOverflowCheck) Match(node ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { + var atoiVarObj map[*ast.Object]ast.Node + + // To check multiple lines, ctx.PassedValues is used to store temporary data. + if _, ok := ctx.PassedValues[i.ID()]; !ok { + atoiVarObj = make(map[*ast.Object]ast.Node) + ctx.PassedValues[i.ID()] = atoiVarObj + } else if pv, ok := ctx.PassedValues[i.ID()].(map[*ast.Object]ast.Node); ok { + atoiVarObj = pv + } else { + return nil, fmt.Errorf("PassedValues[%s] of Context is not map[*ast.Object]ast.Node, but %T", i.ID(), ctx.PassedValues[i.ID()]) + } + + // strconv.Atoi is a common function. + // To reduce false positives, This rule detects code which is converted to int32/int16 only. + switch n := node.(type) { + case *ast.AssignStmt: + for _, expr := range n.Rhs { + if callExpr, ok := expr.(*ast.CallExpr); ok && i.calls.ContainsPkgCallExpr(callExpr, ctx, false) != nil { + if idt, ok := n.Lhs[0].(*ast.Ident); ok && idt.Name != "_" { + // Example: + // v, _ := strconv.Atoi("1111") + // Add v's Obj to atoiVarObj map + atoiVarObj[idt.Obj] = n + } + } + } + case *ast.CallExpr: + if fun, ok := n.Fun.(*ast.Ident); ok { + if fun.Name == "int32" || fun.Name == "int16" { + if idt, ok := n.Args[0].(*ast.Ident); ok { + if n, ok := atoiVarObj[idt.Obj]; ok { + // Detect int32(v) and int16(v) + return gosec.NewIssue(ctx, n, i.ID(), i.What, i.Severity, i.Confidence), nil + } + } + } + } + } + + return nil, nil +} + +// NewIntegerOverflowCheck detects if there is potential Integer OverFlow +func NewIntegerOverflowCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + calls := gosec.NewCallList() + calls.Add("strconv", "Atoi") + return &integerOverflowCheck{ + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.High, + Confidence: gosec.Medium, + What: "Potential Integer overflow made by strconv.Atoi result conversion to int16/32", + }, + calls: calls, + }, []ast.Node{(*ast.FuncDecl)(nil), (*ast.AssignStmt)(nil), (*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/pprof.go b/vendor/github.com/securego/gosec/v2/rules/pprof.go new file mode 100644 index 000000000..4c99af752 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/pprof.go @@ -0,0 +1,42 @@ +package rules + +import ( + "go/ast" + + "github.com/securego/gosec/v2" +) + +type pprofCheck struct { + gosec.MetaData + importPath string + importName string +} + +// ID returns the ID of the check +func (p *pprofCheck) ID() string { + return p.MetaData.ID +} + +// Match checks for pprof imports +func (p *pprofCheck) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + if node, ok := n.(*ast.ImportSpec); ok { + if p.importPath == unquote(node.Path.Value) && node.Name != nil && p.importName == node.Name.Name { + return gosec.NewIssue(c, node, p.ID(), p.What, p.Severity, p.Confidence), nil + } + } + return nil, nil +} + +// NewPprofCheck detects when the profiling endpoint is automatically exposed +func NewPprofCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return &pprofCheck{ + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.High, + Confidence: gosec.High, + What: "Profiling endpoint is automatically exposed on /debug/pprof", + }, + importPath: "net/http/pprof", + importName: "_", + }, []ast.Node{(*ast.ImportSpec)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/rand.go b/vendor/github.com/securego/gosec/v2/rules/rand.go new file mode 100644 index 000000000..055adce4d --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/rand.go @@ -0,0 +1,58 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + + "github.com/securego/gosec/v2" +) + +type weakRand struct { + gosec.MetaData + funcNames []string + packagePath string +} + +func (w *weakRand) ID() string { + return w.MetaData.ID +} + +func (w *weakRand) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + for _, funcName := range w.funcNames { + if _, matched := gosec.MatchCallByPackage(n, c, w.packagePath, funcName); matched { + return gosec.NewIssue(c, n, w.ID(), w.What, w.Severity, w.Confidence), nil + } + } + + return nil, nil +} + +// NewWeakRandCheck detects the use of random number generator that isn't cryptographically secure +func NewWeakRandCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return &weakRand{ + funcNames: []string{ + "New", "Read", "Float32", "Float64", "Int", "Int31", + "Int31n", "Int63", "Int63n", "Intn", "NormalFloat64", "Uint32", "Uint64", + }, + packagePath: "math/rand", + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.High, + Confidence: gosec.Medium, + What: "Use of weak random number generator (math/rand instead of crypto/rand)", + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/readfile.go b/vendor/github.com/securego/gosec/v2/rules/readfile.go new file mode 100644 index 000000000..072b016e2 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/readfile.go @@ -0,0 +1,128 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + "go/types" + + "github.com/securego/gosec/v2" +) + +type readfile struct { + gosec.MetaData + gosec.CallList + pathJoin gosec.CallList + clean gosec.CallList +} + +// ID returns the identifier for this rule +func (r *readfile) ID() string { + return r.MetaData.ID +} + +// isJoinFunc checks if there is a filepath.Join or other join function +func (r *readfile) isJoinFunc(n ast.Node, c *gosec.Context) bool { + if call := r.pathJoin.ContainsPkgCallExpr(n, c, false); call != nil { + for _, arg := range call.Args { + // edge case: check if one of the args is a BinaryExpr + if binExp, ok := arg.(*ast.BinaryExpr); ok { + // iterate and resolve all found identities from the BinaryExpr + if _, ok := gosec.FindVarIdentities(binExp, c); ok { + return true + } + } + + // try and resolve identity + if ident, ok := arg.(*ast.Ident); ok { + obj := c.Info.ObjectOf(ident) + if _, ok := obj.(*types.Var); ok && !gosec.TryResolve(ident, c) { + return true + } + } + } + } + return false +} + +// isFilepathClean checks if there is a filepath.Clean before assigning to a variable +func (r *readfile) isFilepathClean(n *ast.Ident, c *gosec.Context) bool { + if n.Obj.Kind != ast.Var { + return false + } + if node, ok := n.Obj.Decl.(*ast.AssignStmt); ok { + if call, ok := node.Rhs[0].(*ast.CallExpr); ok { + if clean := r.clean.ContainsPkgCallExpr(call, c, false); clean != nil { + return true + } + } + } + return false +} + +// Match inspects AST nodes to determine if the match the methods `os.Open` or `ioutil.ReadFile` +func (r *readfile) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + if node := r.ContainsPkgCallExpr(n, c, false); node != nil { + for _, arg := range node.Args { + // handles path joining functions in Arg + // eg. os.Open(filepath.Join("/tmp/", file)) + if callExpr, ok := arg.(*ast.CallExpr); ok { + if r.isJoinFunc(callExpr, c) { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + // handles binary string concatenation eg. ioutil.Readfile("/tmp/" + file + "/blob") + if binExp, ok := arg.(*ast.BinaryExpr); ok { + // resolve all found identities from the BinaryExpr + if _, ok := gosec.FindVarIdentities(binExp, c); ok { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + + if ident, ok := arg.(*ast.Ident); ok { + obj := c.Info.ObjectOf(ident) + if _, ok := obj.(*types.Var); ok && + !gosec.TryResolve(ident, c) && + !r.isFilepathClean(ident, c) { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + } + } + return nil, nil +} + +// NewReadFile detects cases where we read files +func NewReadFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + rule := &readfile{ + pathJoin: gosec.NewCallList(), + clean: gosec.NewCallList(), + CallList: gosec.NewCallList(), + MetaData: gosec.MetaData{ + ID: id, + What: "Potential file inclusion via variable", + Severity: gosec.Medium, + Confidence: gosec.High, + }, + } + rule.pathJoin.Add("path/filepath", "Join") + rule.pathJoin.Add("path", "Join") + rule.clean.Add("path/filepath", "Clean") + rule.clean.Add("path/filepath", "Rel") + rule.Add("io/ioutil", "ReadFile") + rule.Add("os", "Open") + rule.Add("os", "OpenFile") + return rule, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/rsa.go b/vendor/github.com/securego/gosec/v2/rules/rsa.go new file mode 100644 index 000000000..f2ed5db53 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/rsa.go @@ -0,0 +1,58 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "fmt" + "go/ast" + + "github.com/securego/gosec/v2" +) + +type weakKeyStrength struct { + gosec.MetaData + calls gosec.CallList + bits int +} + +func (w *weakKeyStrength) ID() string { + return w.MetaData.ID +} + +func (w *weakKeyStrength) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + if callExpr := w.calls.ContainsPkgCallExpr(n, c, false); callExpr != nil { + if bits, err := gosec.GetInt(callExpr.Args[1]); err == nil && bits < (int64)(w.bits) { + return gosec.NewIssue(c, n, w.ID(), w.What, w.Severity, w.Confidence), nil + } + } + return nil, nil +} + +// NewWeakKeyStrength builds a rule that detects RSA keys < 2048 bits +func NewWeakKeyStrength(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + calls := gosec.NewCallList() + calls.Add("crypto/rsa", "GenerateKey") + bits := 2048 + return &weakKeyStrength{ + calls: calls, + bits: bits, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: fmt.Sprintf("RSA keys should be at least %d bits", bits), + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/rulelist.go b/vendor/github.com/securego/gosec/v2/rules/rulelist.go new file mode 100644 index 000000000..a3d9ca2f6 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/rulelist.go @@ -0,0 +1,116 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import "github.com/securego/gosec/v2" + +// RuleDefinition contains the description of a rule and a mechanism to +// create it. +type RuleDefinition struct { + ID string + Description string + Create gosec.RuleBuilder +} + +// RuleList is a mapping of rule ID's to rule definitions +type RuleList map[string]RuleDefinition + +// Builders returns all the create methods for a given rule list +func (rl RuleList) Builders() map[string]gosec.RuleBuilder { + builders := make(map[string]gosec.RuleBuilder) + for _, def := range rl { + builders[def.ID] = def.Create + } + return builders +} + +// RuleFilter can be used to include or exclude a rule depending on the return +// value of the function +type RuleFilter func(string) bool + +// NewRuleFilter is a closure that will include/exclude the rule ID's based on +// the supplied boolean value. +func NewRuleFilter(action bool, ruleIDs ...string) RuleFilter { + rulelist := make(map[string]bool) + for _, rule := range ruleIDs { + rulelist[rule] = true + } + return func(rule string) bool { + if _, found := rulelist[rule]; found { + return action + } + return !action + } +} + +// Generate the list of rules to use +func Generate(filters ...RuleFilter) RuleList { + rules := []RuleDefinition{ + // misc + {"G101", "Look for hardcoded credentials", NewHardcodedCredentials}, + {"G102", "Bind to all interfaces", NewBindsToAllNetworkInterfaces}, + {"G103", "Audit the use of unsafe block", NewUsingUnsafe}, + {"G104", "Audit errors not checked", NewNoErrorCheck}, + {"G106", "Audit the use of ssh.InsecureIgnoreHostKey function", NewSSHHostKey}, + {"G107", "Url provided to HTTP request as taint input", NewSSRFCheck}, + {"G108", "Profiling endpoint is automatically exposed", NewPprofCheck}, + {"G109", "Converting strconv.Atoi result to int32/int16", NewIntegerOverflowCheck}, + {"G110", "Detect io.Copy instead of io.CopyN when decompression", NewDecompressionBombCheck}, + + // injection + {"G201", "SQL query construction using format string", NewSQLStrFormat}, + {"G202", "SQL query construction using string concatenation", NewSQLStrConcat}, + {"G203", "Use of unescaped data in HTML templates", NewTemplateCheck}, + {"G204", "Audit use of command execution", NewSubproc}, + + // filesystem + {"G301", "Poor file permissions used when creating a directory", NewMkdirPerms}, + {"G302", "Poor file permissions used when creation file or using chmod", NewFilePerms}, + {"G303", "Creating tempfile using a predictable path", NewBadTempFile}, + {"G304", "File path provided as taint input", NewReadFile}, + {"G305", "File path traversal when extracting zip archive", NewArchive}, + {"G306", "Poor file permissions used when writing to a file", NewWritePerms}, + {"G307", "Unsafe defer call of a method returning an error", NewDeferredClosing}, + + // crypto + {"G401", "Detect the usage of DES, RC4, MD5 or SHA1", NewUsesWeakCryptography}, + {"G402", "Look for bad TLS connection settings", NewIntermediateTLSCheck}, + {"G403", "Ensure minimum RSA key length of 2048 bits", NewWeakKeyStrength}, + {"G404", "Insecure random number source (rand)", NewWeakRandCheck}, + + // blocklist + {"G501", "Import blocklist: crypto/md5", NewBlocklistedImportMD5}, + {"G502", "Import blocklist: crypto/des", NewBlocklistedImportDES}, + {"G503", "Import blocklist: crypto/rc4", NewBlocklistedImportRC4}, + {"G504", "Import blocklist: net/http/cgi", NewBlocklistedImportCGI}, + {"G505", "Import blocklist: crypto/sha1", NewBlocklistedImportSHA1}, + + // memory safety + {"G601", "Implicit memory aliasing in RangeStmt", NewImplicitAliasing}, + } + + ruleMap := make(map[string]RuleDefinition) + +RULES: + for _, rule := range rules { + for _, filter := range filters { + if filter(rule.ID) { + continue RULES + } + } + ruleMap[rule.ID] = rule + } + return ruleMap +} diff --git a/vendor/github.com/securego/gosec/v2/rules/sql.go b/vendor/github.com/securego/gosec/v2/rules/sql.go new file mode 100644 index 000000000..8a5b63861 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/sql.go @@ -0,0 +1,303 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + "regexp" + "strings" + + "github.com/securego/gosec/v2" +) + +type sqlStatement struct { + gosec.MetaData + gosec.CallList + + // Contains a list of patterns which must all match for the rule to match. + patterns []*regexp.Regexp +} + +func (s *sqlStatement) ID() string { + return s.MetaData.ID +} + +// See if the string matches the patterns for the statement. +func (s *sqlStatement) MatchPatterns(str string) bool { + for _, pattern := range s.patterns { + if !pattern.MatchString(str) { + return false + } + } + return true +} + +type sqlStrConcat struct { + sqlStatement +} + +func (s *sqlStrConcat) ID() string { + return s.MetaData.ID +} + +// see if we can figure out what it is +func (s *sqlStrConcat) checkObject(n *ast.Ident, c *gosec.Context) bool { + if n.Obj != nil { + return n.Obj.Kind != ast.Var && n.Obj.Kind != ast.Fun + } + + // Try to resolve unresolved identifiers using other files in same package + for _, file := range c.PkgFiles { + if node, ok := file.Scope.Objects[n.String()]; ok { + return node.Kind != ast.Var && node.Kind != ast.Fun + } + } + return false +} + +// checkQuery verifies if the query parameters is a string concatenation +func (s *sqlStrConcat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*gosec.Issue, error) { + _, fnName, err := gosec.GetCallInfo(call, ctx) + if err != nil { + return nil, err + } + var query ast.Node + if strings.HasSuffix(fnName, "Context") { + query = call.Args[1] + } else { + query = call.Args[0] + } + + if be, ok := query.(*ast.BinaryExpr); ok { + operands := gosec.GetBinaryExprOperands(be) + if start, ok := operands[0].(*ast.BasicLit); ok { + if str, e := gosec.GetString(start); e == nil { + if !s.MatchPatterns(str) { + return nil, nil + } + } + for _, op := range operands[1:] { + if _, ok := op.(*ast.BasicLit); ok { + continue + } + if op, ok := op.(*ast.Ident); ok && s.checkObject(op, ctx) { + continue + } + return gosec.NewIssue(ctx, be, s.ID(), s.What, s.Severity, s.Confidence), nil + } + } + } + + return nil, nil +} + +// Checks SQL query concatenation issues such as "SELECT * FROM table WHERE " + " ' OR 1=1" +func (s *sqlStrConcat) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { + switch stmt := n.(type) { + case *ast.AssignStmt: + for _, expr := range stmt.Rhs { + if sqlQueryCall, ok := expr.(*ast.CallExpr); ok && s.ContainsCallExpr(expr, ctx) != nil { + return s.checkQuery(sqlQueryCall, ctx) + } + } + case *ast.ExprStmt: + if sqlQueryCall, ok := stmt.X.(*ast.CallExpr); ok && s.ContainsCallExpr(stmt.X, ctx) != nil { + return s.checkQuery(sqlQueryCall, ctx) + } + } + return nil, nil +} + +// NewSQLStrConcat looks for cases where we are building SQL strings via concatenation +func NewSQLStrConcat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + rule := &sqlStrConcat{ + sqlStatement: sqlStatement{ + patterns: []*regexp.Regexp{ + regexp.MustCompile(`(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) `), + }, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: "SQL string concatenation", + }, + CallList: gosec.NewCallList(), + }, + } + + rule.AddAll("*database/sql.DB", "Query", "QueryContext", "QueryRow", "QueryRowContext") + rule.AddAll("*database/sql.Tx", "Query", "QueryContext", "QueryRow", "QueryRowContext") + return rule, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)} +} + +type sqlStrFormat struct { + gosec.CallList + sqlStatement + fmtCalls gosec.CallList + noIssue gosec.CallList + noIssueQuoted gosec.CallList +} + +// see if we can figure out what it is +func (s *sqlStrFormat) constObject(e ast.Expr, c *gosec.Context) bool { + n, ok := e.(*ast.Ident) + if !ok { + return false + } + + if n.Obj != nil { + return n.Obj.Kind == ast.Con + } + + // Try to resolve unresolved identifiers using other files in same package + for _, file := range c.PkgFiles { + if node, ok := file.Scope.Objects[n.String()]; ok { + return node.Kind == ast.Con + } + } + return false +} + +func (s *sqlStrFormat) checkQuery(call *ast.CallExpr, ctx *gosec.Context) (*gosec.Issue, error) { + _, fnName, err := gosec.GetCallInfo(call, ctx) + if err != nil { + return nil, err + } + var query ast.Node + if strings.HasSuffix(fnName, "Context") { + query = call.Args[1] + } else { + query = call.Args[0] + } + + if ident, ok := query.(*ast.Ident); ok && ident.Obj != nil { + decl := ident.Obj.Decl + if assign, ok := decl.(*ast.AssignStmt); ok { + for _, expr := range assign.Rhs { + issue := s.checkFormatting(expr, ctx) + if issue != nil { + return issue, err + } + } + } + } + + return nil, nil +} + +func (s *sqlStrFormat) checkFormatting(n ast.Node, ctx *gosec.Context) *gosec.Issue { + // argIndex changes the function argument which gets matched to the regex + argIndex := 0 + if node := s.fmtCalls.ContainsPkgCallExpr(n, ctx, false); node != nil { + // if the function is fmt.Fprintf, search for SQL statement in Args[1] instead + if sel, ok := node.Fun.(*ast.SelectorExpr); ok { + if sel.Sel.Name == "Fprintf" { + // if os.Stderr or os.Stdout is in Arg[0], mark as no issue + if arg, ok := node.Args[0].(*ast.SelectorExpr); ok { + if ident, ok := arg.X.(*ast.Ident); ok { + if s.noIssue.Contains(ident.Name, arg.Sel.Name) { + return nil + } + } + } + // the function is Fprintf so set argIndex = 1 + argIndex = 1 + } + } + + // no formatter + if len(node.Args) == 0 { + return nil + } + + var formatter string + + // concats callexpr arg strings together if needed before regex evaluation + if argExpr, ok := node.Args[argIndex].(*ast.BinaryExpr); ok { + if fullStr, ok := gosec.ConcatString(argExpr); ok { + formatter = fullStr + } + } else if arg, e := gosec.GetString(node.Args[argIndex]); e == nil { + formatter = arg + } + if len(formatter) <= 0 { + return nil + } + + // If all formatter args are quoted or constant, then the SQL construction is safe + if argIndex+1 < len(node.Args) { + allSafe := true + for _, arg := range node.Args[argIndex+1:] { + if n := s.noIssueQuoted.ContainsPkgCallExpr(arg, ctx, true); n == nil && !s.constObject(arg, ctx) { + allSafe = false + break + } + } + if allSafe { + return nil + } + } + if s.MatchPatterns(formatter) { + return gosec.NewIssue(ctx, n, s.ID(), s.What, s.Severity, s.Confidence) + } + } + return nil +} + +// Check SQL query formatting issues such as "fmt.Sprintf("SELECT * FROM foo where '%s', userInput)" +func (s *sqlStrFormat) Match(n ast.Node, ctx *gosec.Context) (*gosec.Issue, error) { + switch stmt := n.(type) { + case *ast.AssignStmt: + for _, expr := range stmt.Rhs { + if sqlQueryCall, ok := expr.(*ast.CallExpr); ok && s.ContainsCallExpr(expr, ctx) != nil { + return s.checkQuery(sqlQueryCall, ctx) + } + } + case *ast.ExprStmt: + if sqlQueryCall, ok := stmt.X.(*ast.CallExpr); ok && s.ContainsCallExpr(stmt.X, ctx) != nil { + return s.checkQuery(sqlQueryCall, ctx) + } + } + return nil, nil +} + +// NewSQLStrFormat looks for cases where we're building SQL query strings using format strings +func NewSQLStrFormat(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + rule := &sqlStrFormat{ + CallList: gosec.NewCallList(), + fmtCalls: gosec.NewCallList(), + noIssue: gosec.NewCallList(), + noIssueQuoted: gosec.NewCallList(), + sqlStatement: sqlStatement{ + patterns: []*regexp.Regexp{ + regexp.MustCompile("(?i)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) "), + regexp.MustCompile("%[^bdoxXfFp]"), + }, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: "SQL string formatting", + }, + }, + } + rule.AddAll("*database/sql.DB", "Query", "QueryContext", "QueryRow", "QueryRowContext") + rule.AddAll("*database/sql.Tx", "Query", "QueryContext", "QueryRow", "QueryRowContext") + rule.fmtCalls.AddAll("fmt", "Sprint", "Sprintf", "Sprintln", "Fprintf") + rule.noIssue.AddAll("os", "Stdout", "Stderr") + rule.noIssueQuoted.Add("github.com/lib/pq", "QuoteIdentifier") + + return rule, []ast.Node{(*ast.AssignStmt)(nil), (*ast.ExprStmt)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/ssh.go b/vendor/github.com/securego/gosec/v2/rules/ssh.go new file mode 100644 index 000000000..01f37da51 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/ssh.go @@ -0,0 +1,38 @@ +package rules + +import ( + "go/ast" + + "github.com/securego/gosec/v2" +) + +type sshHostKey struct { + gosec.MetaData + pkg string + calls []string +} + +func (r *sshHostKey) ID() string { + return r.MetaData.ID +} + +func (r *sshHostKey) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err error) { + if _, matches := gosec.MatchCallByPackage(n, c, r.pkg, r.calls...); matches { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + return nil, nil +} + +// NewSSHHostKey rule detects the use of insecure ssh HostKeyCallback. +func NewSSHHostKey(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return &sshHostKey{ + pkg: "golang.org/x/crypto/ssh", + calls: []string{"InsecureIgnoreHostKey"}, + MetaData: gosec.MetaData{ + ID: id, + What: "Use of ssh InsecureIgnoreHostKey should be audited", + Severity: gosec.Medium, + Confidence: gosec.High, + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/ssrf.go b/vendor/github.com/securego/gosec/v2/rules/ssrf.go new file mode 100644 index 000000000..86bb8278d --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/ssrf.go @@ -0,0 +1,66 @@ +package rules + +import ( + "go/ast" + "go/types" + + "github.com/securego/gosec/v2" +) + +type ssrf struct { + gosec.MetaData + gosec.CallList +} + +// ID returns the identifier for this rule +func (r *ssrf) ID() string { + return r.MetaData.ID +} + +// ResolveVar tries to resolve the first argument of a call expression +// The first argument is the url +func (r *ssrf) ResolveVar(n *ast.CallExpr, c *gosec.Context) bool { + if len(n.Args) > 0 { + arg := n.Args[0] + if ident, ok := arg.(*ast.Ident); ok { + obj := c.Info.ObjectOf(ident) + if _, ok := obj.(*types.Var); ok { + scope := c.Pkg.Scope() + if scope != nil && scope.Lookup(ident.Name) != nil { + // a URL defined in a variable at package scope can be changed at any time + return true + } + if !gosec.TryResolve(ident, c) { + return true + } + } + } + } + return false +} + +// Match inspects AST nodes to determine if certain net/http methods are called with variable input +func (r *ssrf) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + // Call expression is using http package directly + if node := r.ContainsPkgCallExpr(n, c, false); node != nil { + if r.ResolveVar(node, c) { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + return nil, nil +} + +// NewSSRFCheck detects cases where HTTP requests are sent +func NewSSRFCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + rule := &ssrf{ + CallList: gosec.NewCallList(), + MetaData: gosec.MetaData{ + ID: id, + What: "Potential HTTP request made with variable url", + Severity: gosec.Medium, + Confidence: gosec.Medium, + }, + } + rule.AddAll("net/http", "Do", "Get", "Head", "Post", "PostForm", "RoundTrip") + return rule, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/subproc.go b/vendor/github.com/securego/gosec/v2/rules/subproc.go new file mode 100644 index 000000000..48a07269f --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/subproc.go @@ -0,0 +1,85 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + "go/types" + + "github.com/securego/gosec/v2" +) + +type subprocess struct { + gosec.MetaData + gosec.CallList +} + +func (r *subprocess) ID() string { + return r.MetaData.ID +} + +// TODO(gm) The only real potential for command injection with a Go project +// is something like this: +// +// syscall.Exec("/bin/sh", []string{"-c", tainted}) +// +// E.g. Input is correctly escaped but the execution context being used +// is unsafe. For example: +// +// syscall.Exec("echo", "foobar" + tainted) +func (r *subprocess) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + if node := r.ContainsPkgCallExpr(n, c, false); node != nil { + args := node.Args + if r.isContext(n, c) { + args = args[1:] + } + for _, arg := range args { + if ident, ok := arg.(*ast.Ident); ok { + obj := c.Info.ObjectOf(ident) + if _, ok := obj.(*types.Var); ok && !gosec.TryResolve(ident, c) { + return gosec.NewIssue(c, n, r.ID(), "Subprocess launched with variable", gosec.Medium, gosec.High), nil + } + } else if !gosec.TryResolve(arg, c) { + // the arg is not a constant or a variable but instead a function call or os.Args[i] + return gosec.NewIssue(c, n, r.ID(), "Subprocess launched with function call as argument or cmd arguments", gosec.Medium, gosec.High), nil + } + } + } + return nil, nil +} + +// isContext checks whether or not the node is a CommandContext call or not +// Thi is required in order to skip the first argument from the check. +func (r *subprocess) isContext(n ast.Node, ctx *gosec.Context) bool { + selector, indent, err := gosec.GetCallInfo(n, ctx) + if err != nil { + return false + } + if selector == "exec" && indent == "CommandContext" { + return true + } + return false +} + +// NewSubproc detects cases where we are forking out to an external process +func NewSubproc(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + rule := &subprocess{gosec.MetaData{ID: id}, gosec.NewCallList()} + rule.Add("os/exec", "Command") + rule.Add("os/exec", "CommandContext") + rule.Add("syscall", "Exec") + rule.Add("syscall", "ForkExec") + rule.Add("syscall", "StartProcess") + return rule, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/tempfiles.go b/vendor/github.com/securego/gosec/v2/rules/tempfiles.go new file mode 100644 index 000000000..36f0f979b --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/tempfiles.go @@ -0,0 +1,58 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + "regexp" + + "github.com/securego/gosec/v2" +) + +type badTempFile struct { + gosec.MetaData + calls gosec.CallList + args *regexp.Regexp +} + +func (t *badTempFile) ID() string { + return t.MetaData.ID +} + +func (t *badTempFile) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err error) { + if node := t.calls.ContainsPkgCallExpr(n, c, false); node != nil { + if arg, e := gosec.GetString(node.Args[0]); t.args.MatchString(arg) && e == nil { + return gosec.NewIssue(c, n, t.ID(), t.What, t.Severity, t.Confidence), nil + } + } + return nil, nil +} + +// NewBadTempFile detects direct writes to predictable path in temporary directory +func NewBadTempFile(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + calls := gosec.NewCallList() + calls.Add("io/ioutil", "WriteFile") + calls.Add("os", "Create") + return &badTempFile{ + calls: calls, + args: regexp.MustCompile(`^/tmp/.*$|^/var/tmp/.*$`), + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: "File creation in shared tmp directory without using ioutil.Tempfile", + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/templates.go b/vendor/github.com/securego/gosec/v2/rules/templates.go new file mode 100644 index 000000000..b9e7bb7bf --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/templates.go @@ -0,0 +1,60 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + + "github.com/securego/gosec/v2" +) + +type templateCheck struct { + gosec.MetaData + calls gosec.CallList +} + +func (t *templateCheck) ID() string { + return t.MetaData.ID +} + +func (t *templateCheck) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + if node := t.calls.ContainsPkgCallExpr(n, c, false); node != nil { + for _, arg := range node.Args { + if _, ok := arg.(*ast.BasicLit); !ok { // basic lits are safe + return gosec.NewIssue(c, n, t.ID(), t.What, t.Severity, t.Confidence), nil + } + } + } + return nil, nil +} + +// NewTemplateCheck constructs the template check rule. This rule is used to +// find use of templates where HTML/JS escaping is not being used +func NewTemplateCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + calls := gosec.NewCallList() + calls.Add("html/template", "HTML") + calls.Add("html/template", "HTMLAttr") + calls.Add("html/template", "JS") + calls.Add("html/template", "URL") + return &templateCheck{ + calls: calls, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.Low, + What: "this method will not auto-escape HTML. Verify data is well formed.", + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/tls.go b/vendor/github.com/securego/gosec/v2/rules/tls.go new file mode 100644 index 000000000..219d8fcde --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/tls.go @@ -0,0 +1,171 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//go:generate tlsconfig + +package rules + +import ( + "crypto/tls" + "fmt" + "go/ast" + + "github.com/securego/gosec/v2" +) + +type insecureConfigTLS struct { + gosec.MetaData + MinVersion int64 + MaxVersion int64 + requiredType string + goodCiphers []string + actualMinVersion int64 + actualMaxVersion int64 +} + +func (t *insecureConfigTLS) ID() string { + return t.MetaData.ID +} + +func stringInSlice(a string, list []string) bool { + for _, b := range list { + if b == a { + return true + } + } + return false +} + +func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gosec.Context) *gosec.Issue { + if ciphers, ok := n.(*ast.CompositeLit); ok { + for _, cipher := range ciphers.Elts { + if ident, ok := cipher.(*ast.SelectorExpr); ok { + if !stringInSlice(ident.Sel.Name, t.goodCiphers) { + err := fmt.Sprintf("TLS Bad Cipher Suite: %s", ident.Sel.Name) + return gosec.NewIssue(c, ident, t.ID(), err, gosec.High, gosec.High) + } + } + } + } + return nil +} + +func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gosec.Context) *gosec.Issue { + if ident, ok := n.Key.(*ast.Ident); ok { + switch ident.Name { + case "InsecureSkipVerify": + if node, ok := n.Value.(*ast.Ident); ok { + if node.Name != "false" { + return gosec.NewIssue(c, n, t.ID(), "TLS InsecureSkipVerify set true.", gosec.High, gosec.High) + } + } else { + // TODO(tk): symbol tab look up to get the actual value + return gosec.NewIssue(c, n, t.ID(), "TLS InsecureSkipVerify may be true.", gosec.High, gosec.Low) + } + + case "PreferServerCipherSuites": + if node, ok := n.Value.(*ast.Ident); ok { + if node.Name == "false" { + return gosec.NewIssue(c, n, t.ID(), "TLS PreferServerCipherSuites set false.", gosec.Medium, gosec.High) + } + } else { + // TODO(tk): symbol tab look up to get the actual value + return gosec.NewIssue(c, n, t.ID(), "TLS PreferServerCipherSuites may be false.", gosec.Medium, gosec.Low) + } + + case "MinVersion": + if ival, ierr := gosec.GetInt(n.Value); ierr == nil { + t.actualMinVersion = ival + } else { + if se, ok := n.Value.(*ast.SelectorExpr); ok { + if pkg, ok := se.X.(*ast.Ident); ok && pkg.Name == "tls" { + t.actualMinVersion = t.mapVersion(se.Sel.Name) + } + } + } + + case "MaxVersion": + if ival, ierr := gosec.GetInt(n.Value); ierr == nil { + t.actualMaxVersion = ival + } else { + if se, ok := n.Value.(*ast.SelectorExpr); ok { + if pkg, ok := se.X.(*ast.Ident); ok && pkg.Name == "tls" { + t.actualMaxVersion = t.mapVersion(se.Sel.Name) + } + } + } + + case "CipherSuites": + if ret := t.processTLSCipherSuites(n.Value, c); ret != nil { + return ret + } + + } + } + return nil +} + +func (t *insecureConfigTLS) mapVersion(version string) int64 { + var v int64 + switch version { + case "VersionTLS13": + v = tls.VersionTLS13 + case "VersionTLS12": + v = tls.VersionTLS12 + case "VersionTLS11": + v = tls.VersionTLS11 + case "VersionTLS10": + v = tls.VersionTLS10 + } + return v +} + +func (t *insecureConfigTLS) checkVersion(n ast.Node, c *gosec.Context) *gosec.Issue { + if t.actualMaxVersion == 0 && t.actualMinVersion >= t.MinVersion { + // no warning is generated since the min version is greater than the secure min version + return nil + } + if t.actualMinVersion < t.MinVersion { + return gosec.NewIssue(c, n, t.ID(), "TLS MinVersion too low.", gosec.High, gosec.High) + } + if t.actualMaxVersion < t.MaxVersion { + return gosec.NewIssue(c, n, t.ID(), "TLS MaxVersion too low.", gosec.High, gosec.High) + } + return nil +} + +func (t *insecureConfigTLS) resetVersion() { + t.actualMaxVersion = 0 + t.actualMinVersion = 0 +} + +func (t *insecureConfigTLS) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + if complit, ok := n.(*ast.CompositeLit); ok && complit.Type != nil { + actualType := c.Info.TypeOf(complit.Type) + if actualType != nil && actualType.String() == t.requiredType { + for _, elt := range complit.Elts { + if kve, ok := elt.(*ast.KeyValueExpr); ok { + issue := t.processTLSConfVal(kve, c) + if issue != nil { + return issue, nil + } + } + } + issue := t.checkVersion(complit, c) + t.resetVersion() + return issue, nil + } + } + return nil, nil +} diff --git a/vendor/github.com/securego/gosec/v2/rules/tls_config.go b/vendor/github.com/securego/gosec/v2/rules/tls_config.go new file mode 100644 index 000000000..5d68593d8 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/tls_config.go @@ -0,0 +1,92 @@ +package rules + +import ( + "go/ast" + + "github.com/securego/gosec/v2" +) + +// NewModernTLSCheck creates a check for Modern TLS ciphers +// DO NOT EDIT - generated by tlsconfig tool +func NewModernTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return &insecureConfigTLS{ + MetaData: gosec.MetaData{ID: id}, + requiredType: "crypto/tls.Config", + MinVersion: 0x0304, + MaxVersion: 0x0304, + goodCiphers: []string{ + "TLS_AES_128_GCM_SHA256", + "TLS_AES_256_GCM_SHA384", + "TLS_CHACHA20_POLY1305_SHA256", + }, + }, []ast.Node{(*ast.CompositeLit)(nil)} +} + +// NewIntermediateTLSCheck creates a check for Intermediate TLS ciphers +// DO NOT EDIT - generated by tlsconfig tool +func NewIntermediateTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return &insecureConfigTLS{ + MetaData: gosec.MetaData{ID: id}, + requiredType: "crypto/tls.Config", + MinVersion: 0x0303, + MaxVersion: 0x0304, + goodCiphers: []string{ + "TLS_AES_128_GCM_SHA256", + "TLS_AES_256_GCM_SHA384", + "TLS_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256", + "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384", + }, + }, []ast.Node{(*ast.CompositeLit)(nil)} +} + +// NewOldTLSCheck creates a check for Old TLS ciphers +// DO NOT EDIT - generated by tlsconfig tool +func NewOldTLSCheck(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return &insecureConfigTLS{ + MetaData: gosec.MetaData{ID: id}, + requiredType: "crypto/tls.Config", + MinVersion: 0x0301, + MaxVersion: 0x0304, + goodCiphers: []string{ + "TLS_AES_128_GCM_SHA256", + "TLS_AES_256_GCM_SHA384", + "TLS_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256", + "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384", + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384", + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", + "TLS_DHE_RSA_WITH_AES_128_CBC_SHA256", + "TLS_DHE_RSA_WITH_AES_256_CBC_SHA256", + "TLS_RSA_WITH_AES_128_GCM_SHA256", + "TLS_RSA_WITH_AES_256_GCM_SHA384", + "TLS_RSA_WITH_AES_128_CBC_SHA256", + "TLS_RSA_WITH_AES_256_CBC_SHA256", + "TLS_RSA_WITH_AES_128_CBC_SHA", + "TLS_RSA_WITH_AES_256_CBC_SHA", + "TLS_RSA_WITH_3DES_EDE_CBC_SHA", + }, + }, []ast.Node{(*ast.CompositeLit)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/unsafe.go b/vendor/github.com/securego/gosec/v2/rules/unsafe.go new file mode 100644 index 000000000..88a298fb5 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/unsafe.go @@ -0,0 +1,53 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + + "github.com/securego/gosec/v2" +) + +type usingUnsafe struct { + gosec.MetaData + pkg string + calls []string +} + +func (r *usingUnsafe) ID() string { + return r.MetaData.ID +} + +func (r *usingUnsafe) Match(n ast.Node, c *gosec.Context) (gi *gosec.Issue, err error) { + if _, matches := gosec.MatchCallByPackage(n, c, r.pkg, r.calls...); matches { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + return nil, nil +} + +// NewUsingUnsafe rule detects the use of the unsafe package. This is only +// really useful for auditing purposes. +func NewUsingUnsafe(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + return &usingUnsafe{ + pkg: "unsafe", + calls: []string{"Alignof", "Offsetof", "Sizeof", "Pointer"}, + MetaData: gosec.MetaData{ + ID: id, + What: "Use of unsafe calls should be audited", + Severity: gosec.Low, + Confidence: gosec.High, + }, + }, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go b/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go new file mode 100644 index 000000000..eecb88f04 --- /dev/null +++ b/vendor/github.com/securego/gosec/v2/rules/weakcrypto.go @@ -0,0 +1,58 @@ +// (c) Copyright 2016 Hewlett Packard Enterprise Development LP +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package rules + +import ( + "go/ast" + + "github.com/securego/gosec/v2" +) + +type usesWeakCryptography struct { + gosec.MetaData + blocklist map[string][]string +} + +func (r *usesWeakCryptography) ID() string { + return r.MetaData.ID +} + +func (r *usesWeakCryptography) Match(n ast.Node, c *gosec.Context) (*gosec.Issue, error) { + for pkg, funcs := range r.blocklist { + if _, matched := gosec.MatchCallByPackage(n, c, pkg, funcs...); matched { + return gosec.NewIssue(c, n, r.ID(), r.What, r.Severity, r.Confidence), nil + } + } + return nil, nil +} + +// NewUsesWeakCryptography detects uses of des.* md5.* or rc4.* +func NewUsesWeakCryptography(id string, conf gosec.Config) (gosec.Rule, []ast.Node) { + calls := make(map[string][]string) + calls["crypto/des"] = []string{"NewCipher", "NewTripleDESCipher"} + calls["crypto/md5"] = []string{"New", "Sum"} + calls["crypto/sha1"] = []string{"New", "Sum"} + calls["crypto/rc4"] = []string{"NewCipher"} + rule := &usesWeakCryptography{ + blocklist: calls, + MetaData: gosec.MetaData{ + ID: id, + Severity: gosec.Medium, + Confidence: gosec.High, + What: "Use of weak cryptographic primitive", + }, + } + return rule, []ast.Node{(*ast.CallExpr)(nil)} +} diff --git a/vendor/github.com/shazow/go-diff/LICENSE b/vendor/github.com/shazow/go-diff/LICENSE new file mode 100644 index 000000000..85e1e4b33 --- /dev/null +++ b/vendor/github.com/shazow/go-diff/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2015 Andrey Petrov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/vendor/github.com/shazow/go-diff/difflib/differ.go b/vendor/github.com/shazow/go-diff/difflib/differ.go new file mode 100644 index 000000000..43dc84d9a --- /dev/null +++ b/vendor/github.com/shazow/go-diff/difflib/differ.go @@ -0,0 +1,39 @@ +// This package implements the diff.Differ interface using github.com/mb0/diff as a backend. +package difflib + +import ( + "io" + "io/ioutil" + + "github.com/pmezard/go-difflib/difflib" +) + +type differ struct{} + +// New returns an implementation of diff.Differ using mb0diff as the backend. +func New() *differ { + return &differ{} +} + +// Diff consumes the entire reader streams into memory before generating a diff +// which then gets filled into the buffer. This implementation stores and +// manipulates all three values in memory. +func (diff *differ) Diff(out io.Writer, a io.ReadSeeker, b io.ReadSeeker) error { + var src, dst []byte + var err error + + if src, err = ioutil.ReadAll(a); err != nil { + return err + } + if dst, err = ioutil.ReadAll(b); err != nil { + return err + } + + d := difflib.UnifiedDiff{ + A: difflib.SplitLines(string(src)), + B: difflib.SplitLines(string(dst)), + Context: 3, + } + + return difflib.WriteUnifiedDiff(out, d) +} diff --git a/vendor/github.com/sonatard/noctx/.gitignore b/vendor/github.com/sonatard/noctx/.gitignore new file mode 100644 index 000000000..2d830686d --- /dev/null +++ b/vendor/github.com/sonatard/noctx/.gitignore @@ -0,0 +1 @@ +coverage.out diff --git a/vendor/github.com/sonatard/noctx/.golangci.yml b/vendor/github.com/sonatard/noctx/.golangci.yml new file mode 100644 index 000000000..1580acde2 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/.golangci.yml @@ -0,0 +1,20 @@ +run: + +linters-settings: + govet: + enable-all: true + +linters: + enable-all: true + disable: + - gochecknoglobals + - gomnd + - gocognit + - nestif + +issues: + exclude-rules: + - path: reqwithoutctx/ssa.go + text: "Consider preallocating `exts`" + linters: + - prealloc diff --git a/vendor/github.com/sonatard/noctx/LICENSE b/vendor/github.com/sonatard/noctx/LICENSE new file mode 100644 index 000000000..a00d5727f --- /dev/null +++ b/vendor/github.com/sonatard/noctx/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 sonatard + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/sonatard/noctx/Makefile b/vendor/github.com/sonatard/noctx/Makefile new file mode 100644 index 000000000..1a27f6b59 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/Makefile @@ -0,0 +1,16 @@ +.PHONY: all imports test lint + +all: imports test lint + +imports: + goimports -w ./ + +test: + go test -race ./... + +test_coverage: + go test -race -coverprofile=coverage.out -covermode=atomic ./... + +lint: + golangci-lint run ./... + diff --git a/vendor/github.com/sonatard/noctx/README.md b/vendor/github.com/sonatard/noctx/README.md new file mode 100644 index 000000000..bfe9782c6 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/README.md @@ -0,0 +1,95 @@ +# noctx + +![](https://github.com/sonatard/noctx/workflows/.github/workflows/ci.yml/badge.svg) + +`noctx` finds sending http request without context.Context. + +You should use `noctx` if sending http request in your library. +Passing `context.Context` enables library user to cancel http request, getting trace information and so on. + +## Install + +```sh +$ go get -u github.com/sonatard/noctx/cmd/noctx +``` + +## Usage + +```sh +$ go vet -vettool=`which noctx` main.go +./main.go:6:11: net/http.Get must not be called +``` + +## Detection rules +- Executing following functions + - `net/http.Get` + - `net/http.Head` + - `net/http.Post` + - `net/http.PostForm` + - `(*net/http.Client).Get` + - `(*net/http.Client).Head` + - `(*net/http.Client).Post` + - `(*net/http.Client).PostForm` +- `http.Request` returned by `http.NewRequest` function and passes it to other function. + +## How to fix +- Send http request using `(*http.Client).Do(*http.Request)` method. +- In Go 1.13 and later, use `http.NewRequestWithContext` function instead of using `http.NewRequest` function. +- In Go 1.12 and earlier, call `(http.Request).WithContext(ctx)` after `http.NewRequest`. + +`(http.Request).WithContext(ctx)` has a disadvantage of performance because it returns a copy of `http.Request`. Use `http.NewRequestWithContext` function if you only support Go1.13 or later. + +## Sample Code + +```go +package main + +import ( + "context" + "net/http" +) + +func main() { + const url = "http://example.com" + http.Get(url) // want `net/http\.Get must not be called` + http.Head(url) // want `net/http\.Head must not be called` + http.Post(url, "", nil) // want `net/http\.Post must not be called` + http.PostForm(url, nil) // want `net/http\.PostForm must not be called` + + cli := &http.Client{} + cli.Get(url) // want `\(\*net/http\.Client\)\.Get must not be called` + cli.Head(url) // want `\(\*net/http\.Client\)\.Head must not be called` + cli.Post(url, "", nil) // want `\(\*net/http\.Client\)\.Post must not be called` + cli.PostForm(url, nil) // want `\(\*net/http\.Client\)\.PostForm must not be called` + + req, _ := http.NewRequest(http.MethodPost, url, nil) // want `should rewrite http.NewRequestWithContext or add \(\*Request\).WithContext` + cli.Do(req) + + ctx := context.Background() + req2, _ := http.NewRequestWithContext(ctx, http.MethodPost, url, nil) // OK + cli.Do(req2) + + req3, _ := http.NewRequest(http.MethodPost, url, nil) // OK + req3 = req3.WithContext(ctx) + cli.Do(req3) + + f2 := func(req *http.Request, ctx context.Context) *http.Request { + return req + } + req4, _ := http.NewRequest(http.MethodPost, url, nil) // want `should rewrite http.NewRequestWithContext or add \(\*Request\).WithContext` + req4 = f2(req4, ctx) + cli.Do(req4) + + req5, _ := func() (*http.Request, error) { + return http.NewRequest(http.MethodPost, url, nil) // want `should rewrite http.NewRequestWithContext or add \(\*Request\).WithContext` + }() + cli.Do(req5) + +} +``` + +## Reference +- [net/http - NewRequest](https://golang.org/pkg/net/http/#NewRequest) +- [net/http - NewRequestWithContext](https://golang.org/pkg/net/http/#NewRequestWithContext) +- [net/http - Request.WithContext](https://golang.org/pkg/net/http/#Request.WithContext) + diff --git a/vendor/github.com/sonatard/noctx/go.mod b/vendor/github.com/sonatard/noctx/go.mod new file mode 100644 index 000000000..47b7901a0 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/go.mod @@ -0,0 +1,8 @@ +module github.com/sonatard/noctx + +go 1.13 + +require ( + github.com/gostaticanalysis/analysisutil v0.0.3 + golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9 +) diff --git a/vendor/github.com/sonatard/noctx/go.sum b/vendor/github.com/sonatard/noctx/go.sum new file mode 100644 index 000000000..f8e5b0759 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/go.sum @@ -0,0 +1,16 @@ +github.com/gostaticanalysis/analysisutil v0.0.3 h1:iwp+5/UAyzQSFgQ4uR2sni99sJ8Eo9DEacKWM5pekIg= +github.com/gostaticanalysis/analysisutil v0.0.3/go.mod h1:eEOZF4jCKGi+aprrirO9e7WKB3beBRtWgqGunKl6pKE= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20190311215038-5c2858a9cfe5/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9 h1:KOkk4e2xd5OeCDJGwacvr75ICCbCsShrHiqPEdsA9hg= +golang.org/x/tools v0.0.0-20200117220505-0cba7a3a9ee9/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/sonatard/noctx/ngfunc/main.go b/vendor/github.com/sonatard/noctx/ngfunc/main.go new file mode 100644 index 000000000..cfeb0f001 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/ngfunc/main.go @@ -0,0 +1,57 @@ +package ngfunc + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" +) + +func Run(pass *analysis.Pass) (interface{}, error) { + ngFuncNames := []string{ + "net/http.Get", + "net/http.Head", + "net/http.Post", + "net/http.PostForm", + "(*net/http.Client).Get", + "(*net/http.Client).Head", + "(*net/http.Client).Post", + "(*net/http.Client).PostForm", + } + + ngFuncs := typeFuncs(pass, ngFuncNames) + if len(ngFuncs) == 0 { + return nil, nil + } + + reportFuncs := ngCalledFuncs(pass, ngFuncs) + report(pass, reportFuncs) + + return nil, nil +} + +func ngCalledFuncs(pass *analysis.Pass, ngFuncs []*types.Func) []*Report { + var reports []*Report + + srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs + for _, sf := range srcFuncs { + for _, b := range sf.Blocks { + for _, instr := range b.Instrs { + for _, ngFunc := range ngFuncs { + if analysisutil.Called(instr, nil, ngFunc) { + ngCalledFunc := &Report{ + Instruction: instr, + function: ngFunc, + } + reports = append(reports, ngCalledFunc) + + break + } + } + } + } + } + + return reports +} diff --git a/vendor/github.com/sonatard/noctx/ngfunc/report.go b/vendor/github.com/sonatard/noctx/ngfunc/report.go new file mode 100644 index 000000000..e50051798 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/ngfunc/report.go @@ -0,0 +1,29 @@ +package ngfunc + +import ( + "fmt" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ssa" +) + +type Report struct { + Instruction ssa.Instruction + function *types.Func +} + +func (n *Report) Pos() token.Pos { + return n.Instruction.Pos() +} + +func (n *Report) Message() string { + return fmt.Sprintf("%s must not be called", n.function.FullName()) +} + +func report(pass *analysis.Pass, reports []*Report) { + for _, report := range reports { + pass.Reportf(report.Pos(), report.Message()) + } +} diff --git a/vendor/github.com/sonatard/noctx/ngfunc/types.go b/vendor/github.com/sonatard/noctx/ngfunc/types.go new file mode 100644 index 000000000..f1877386c --- /dev/null +++ b/vendor/github.com/sonatard/noctx/ngfunc/types.go @@ -0,0 +1,65 @@ +package ngfunc + +import ( + "fmt" + "go/types" + "strings" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" +) + +var errNotFound = fmt.Errorf("function not found") + +func typeFuncs(pass *analysis.Pass, funcs []string) []*types.Func { + fs := make([]*types.Func, 0, len(funcs)) + + for _, fn := range funcs { + f, err := typeFunc(pass, fn) + if err != nil { + continue + } + + fs = append(fs, f) + } + + return fs +} + +func typeFunc(pass *analysis.Pass, funcName string) (*types.Func, error) { + ss := strings.Split(strings.TrimSpace(funcName), ".") + + switch len(ss) { + case 2: + // package function: pkgname.Func + f, ok := analysisutil.ObjectOf(pass, ss[0], ss[1]).(*types.Func) + if !ok || f == nil { + return nil, errNotFound + } + + return f, nil + case 3: + // method: (*pkgname.Type).Method + pkgname := strings.TrimLeft(ss[0], "(") + typename := strings.TrimRight(ss[1], ")") + + if pkgname != "" && pkgname[0] == '*' { + pkgname = pkgname[1:] + typename = "*" + typename + } + + typ := analysisutil.TypeOf(pass, pkgname, typename) + if typ == nil { + return nil, errNotFound + } + + m := analysisutil.MethodOf(typ, ss[2]) + if m == nil { + return nil, errNotFound + } + + return m, nil + } + + return nil, errNotFound +} diff --git a/vendor/github.com/sonatard/noctx/noctx.go b/vendor/github.com/sonatard/noctx/noctx.go new file mode 100644 index 000000000..478ad8855 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/noctx.go @@ -0,0 +1,31 @@ +package noctx + +import ( + "github.com/sonatard/noctx/ngfunc" + "github.com/sonatard/noctx/reqwithoutctx" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" +) + +var Analyzer = &analysis.Analyzer{ + Name: "noctx", + Doc: Doc, + Run: run, + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + }, +} + +const Doc = "noctx finds sending http request without context.Context" + +func run(pass *analysis.Pass) (interface{}, error) { + if _, err := ngfunc.Run(pass); err != nil { + return nil, err + } + + if _, err := reqwithoutctx.Run(pass); err != nil { + return nil, err + } + + return nil, nil +} diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/main.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/main.go new file mode 100644 index 000000000..b09e1de1b --- /dev/null +++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/main.go @@ -0,0 +1,14 @@ +package reqwithoutctx + +import ( + "golang.org/x/tools/go/analysis" +) + +func Run(pass *analysis.Pass) (interface{}, error) { + analyzer := NewAnalyzer(pass) + reports := analyzer.Exec() + + report(pass, reports) + + return nil, nil +} diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go new file mode 100644 index 000000000..1c94e3148 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go @@ -0,0 +1,26 @@ +package reqwithoutctx + +import ( + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ssa" +) + +type Report struct { + Instruction ssa.Instruction +} + +func (n *Report) Pos() token.Pos { + return n.Instruction.Pos() +} + +func (n *Report) Message() string { + return "should rewrite http.NewRequestWithContext or add (*Request).WithContext" +} + +func report(pass *analysis.Pass, reports []*Report) { + for _, report := range reports { + pass.Reportf(report.Pos(), report.Message()) + } +} diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go new file mode 100644 index 000000000..35751269e --- /dev/null +++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go @@ -0,0 +1,180 @@ +package reqwithoutctx + +import ( + "go/types" + + "github.com/gostaticanalysis/analysisutil" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" +) + +type Analyzer struct { + Funcs []*ssa.Function + newRequestType types.Type + requestType types.Type +} + +func NewAnalyzer(pass *analysis.Pass) *Analyzer { + newRequestType := analysisutil.TypeOf(pass, "net/http", "NewRequest") + requestType := analysisutil.TypeOf(pass, "net/http", "*Request") + + srcFuncs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs + + return &Analyzer{ + Funcs: srcFuncs, + newRequestType: newRequestType, + requestType: requestType, + } +} + +func (a *Analyzer) Exec() []*Report { + if a.newRequestType == nil || a.requestType == nil { + return []*Report{} + } + + usedReqs := a.usedReqs() + newReqs := a.requestsByNewRequest() + + return a.report(usedReqs, newReqs) +} + +func (a *Analyzer) report(usedReqs map[string]*ssa.Extract, newReqs map[*ssa.Call]*ssa.Extract) []*Report { + var reports []*Report + + for _, fReq := range usedReqs { + for newRequest, req := range newReqs { + if fReq == req { + reports = append(reports, &Report{Instruction: newRequest}) + } + } + } + + return reports +} + +func (a *Analyzer) usedReqs() map[string]*ssa.Extract { + reqExts := make(map[string]*ssa.Extract) + + for _, f := range a.Funcs { + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + switch i := instr.(type) { + case *ssa.Call: + exts := a.usedReqByCall(i) + for _, ext := range exts { + key := i.String() + ext.String() + reqExts[key] = ext + } + case *ssa.UnOp: + ext := a.usedReqByUnOp(i) + if ext != nil { + key := i.String() + ext.String() + reqExts[key] = ext + } + case *ssa.Return: + exts := a.usedReqByReturn(i) + for _, ext := range exts { + key := i.String() + ext.String() + reqExts[key] = ext + } + } + } + } + } + + return reqExts +} + +func (a *Analyzer) usedReqByCall(call *ssa.Call) []*ssa.Extract { + var exts []*ssa.Extract + + // skip net/http.Request method call + if call.Common().Signature().Recv() != nil && types.Identical(call.Value().Type(), a.requestType) { + return exts + } + + args := call.Common().Args + if len(args) == 0 { + return exts + } + + for _, arg := range args { + ext, ok := arg.(*ssa.Extract) + if !ok { + continue + } + + if !types.Identical(ext.Type(), a.requestType) { + continue + } + + exts = append(exts, ext) + } + + return exts +} + +func (a *Analyzer) usedReqByUnOp(op *ssa.UnOp) *ssa.Extract { + if ext, ok := op.X.(*ssa.Extract); ok && types.Identical(ext.Type(), a.requestType) { + return ext + } + + return nil +} + +func (a *Analyzer) usedReqByReturn(ret *ssa.Return) []*ssa.Extract { + rets := ret.Results + exts := make([]*ssa.Extract, 0, len(rets)) + + for _, ret := range rets { + ext, ok := ret.(*ssa.Extract) + if !ok { + continue + } + + if types.Identical(ext.Type(), a.requestType) { + exts = append(exts, ext) + } + } + + return exts +} + +func (a *Analyzer) requestsByNewRequest() map[*ssa.Call]*ssa.Extract { + reqs := make(map[*ssa.Call]*ssa.Extract) + + for _, f := range a.Funcs { + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + ext, ok := instr.(*ssa.Extract) + if !ok { + continue + } + + if !types.Identical(ext.Type(), a.requestType) { + continue + } + + operands := ext.Operands([]*ssa.Value{}) + if len(operands) != 1 { + continue + } + + operand := *operands[0] + + f, ok := operand.(*ssa.Call) + if !ok { + continue + } + + if types.Identical(f.Call.Value.Type(), a.newRequestType) { + reqs[f] = ext + } + } + } + } + + return reqs +} diff --git a/vendor/github.com/sourcegraph/go-diff/LICENSE b/vendor/github.com/sourcegraph/go-diff/LICENSE new file mode 100644 index 000000000..0733b6e5f --- /dev/null +++ b/vendor/github.com/sourcegraph/go-diff/LICENSE @@ -0,0 +1,35 @@ +Copyright (c) 2014 Sourcegraph, Inc. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. + +----------------------------------------------------------------- + +Portions adapted from python-unidiff: + +Copyright (c) 2012 Matias Bordese + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: diff --git a/vendor/github.com/sourcegraph/go-diff/diff/diff.go b/vendor/github.com/sourcegraph/go-diff/diff/diff.go new file mode 100644 index 000000000..0f465b9e2 --- /dev/null +++ b/vendor/github.com/sourcegraph/go-diff/diff/diff.go @@ -0,0 +1,132 @@ +package diff + +import ( + "bytes" + "time" +) + +// A FileDiff represents a unified diff for a single file. +// +// A file unified diff has a header that resembles the following: +// +// --- oldname 2009-10-11 15:12:20.000000000 -0700 +// +++ newname 2009-10-11 15:12:30.000000000 -0700 +type FileDiff struct { + // the original name of the file + OrigName string + // the original timestamp (nil if not present) + OrigTime *time.Time + // the new name of the file (often same as OrigName) + NewName string + // the new timestamp (nil if not present) + NewTime *time.Time + // extended header lines (e.g., git's "new mode ", "rename from ", etc.) + Extended []string + // hunks that were changed from orig to new + Hunks []*Hunk +} + +// A Hunk represents a series of changes (additions or deletions) in a file's +// unified diff. +type Hunk struct { + // starting line number in original file + OrigStartLine int32 + // number of lines the hunk applies to in the original file + OrigLines int32 + // if > 0, then the original file had a 'No newline at end of file' mark at this offset + OrigNoNewlineAt int32 + // starting line number in new file + NewStartLine int32 + // number of lines the hunk applies to in the new file + NewLines int32 + // optional section heading + Section string + // 0-indexed line offset in unified file diff (including section headers); this is + // only set when Hunks are read from entire file diff (i.e., when ReadAllHunks is + // called) This accounts for hunk headers, too, so the StartPosition of the first + // hunk will be 1. + StartPosition int32 + // hunk body (lines prefixed with '-', '+', or ' ') + Body []byte +} + +// A Stat is a diff stat that represents the number of lines added/changed/deleted. +type Stat struct { + // number of lines added + Added int32 + // number of lines changed + Changed int32 + // number of lines deleted + Deleted int32 +} + +// Stat computes the number of lines added/changed/deleted in all +// hunks in this file's diff. +func (d *FileDiff) Stat() Stat { + total := Stat{} + for _, h := range d.Hunks { + total.add(h.Stat()) + } + return total +} + +// Stat computes the number of lines added/changed/deleted in this +// hunk. +func (h *Hunk) Stat() Stat { + lines := bytes.Split(h.Body, []byte{'\n'}) + var last byte + st := Stat{} + for _, line := range lines { + if len(line) == 0 { + last = 0 + continue + } + switch line[0] { + case '-': + if last == '+' { + st.Added-- + st.Changed++ + last = 0 // next line can't change this one since this is already a change + } else { + st.Deleted++ + last = line[0] + } + case '+': + if last == '-' { + st.Deleted-- + st.Changed++ + last = 0 // next line can't change this one since this is already a change + } else { + st.Added++ + last = line[0] + } + default: + last = 0 + } + } + return st +} + +var ( + hunkPrefix = []byte("@@ ") + onlyInMessagePrefix = []byte("Only in ") +) + +const hunkHeader = "@@ -%d,%d +%d,%d @@" +const onlyInMessage = "Only in %s: %s\n" + +// diffTimeParseLayout is the layout used to parse the time in unified diff file +// header timestamps. +// See https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html. +const diffTimeParseLayout = "2006-01-02 15:04:05 -0700" + +// diffTimeFormatLayout is the layout used to format (i.e., print) the time in unified diff file +// header timestamps. +// See https://www.gnu.org/software/diffutils/manual/html_node/Detailed-Unified.html. +const diffTimeFormatLayout = "2006-01-02 15:04:05.000000000 -0700" + +func (s *Stat) add(o Stat) { + s.Added += o.Added + s.Changed += o.Changed + s.Deleted += o.Deleted +} diff --git a/vendor/github.com/sourcegraph/go-diff/diff/doc.go b/vendor/github.com/sourcegraph/go-diff/diff/doc.go new file mode 100644 index 000000000..12fe96a07 --- /dev/null +++ b/vendor/github.com/sourcegraph/go-diff/diff/doc.go @@ -0,0 +1,2 @@ +// Package diff provides a parser for unified diffs. +package diff diff --git a/vendor/github.com/sourcegraph/go-diff/diff/parse.go b/vendor/github.com/sourcegraph/go-diff/diff/parse.go new file mode 100644 index 000000000..8d5cfc238 --- /dev/null +++ b/vendor/github.com/sourcegraph/go-diff/diff/parse.go @@ -0,0 +1,725 @@ +package diff + +import ( + "bufio" + "bytes" + "errors" + "fmt" + "io" + "path/filepath" + "strconv" + "strings" + "time" +) + +// ParseMultiFileDiff parses a multi-file unified diff. It returns an error if +// parsing failed as a whole, but does its best to parse as many files in the +// case of per-file errors. If it cannot detect when the diff of the next file +// begins, the hunks are added to the FileDiff of the previous file. +func ParseMultiFileDiff(diff []byte) ([]*FileDiff, error) { + return NewMultiFileDiffReader(bytes.NewReader(diff)).ReadAllFiles() +} + +// NewMultiFileDiffReader returns a new MultiFileDiffReader that reads +// a multi-file unified diff from r. +func NewMultiFileDiffReader(r io.Reader) *MultiFileDiffReader { + return &MultiFileDiffReader{reader: bufio.NewReader(r)} +} + +// MultiFileDiffReader reads a multi-file unified diff. +type MultiFileDiffReader struct { + line int + offset int64 + reader *bufio.Reader + + // TODO(sqs): line and offset tracking in multi-file diffs is broken; add tests and fix + + // nextFileFirstLine is a line that was read by a HunksReader that + // was how it determined the hunk was complete. But to determine + // that, it needed to read the first line of the next file. We + // store nextFileFirstLine so we can "give the first line back" to + // the next file. + nextFileFirstLine []byte +} + +// ReadFile reads the next file unified diff (including headers and +// all hunks) from r. If there are no more files in the diff, it +// returns error io.EOF. +func (r *MultiFileDiffReader) ReadFile() (*FileDiff, error) { + fr := &FileDiffReader{ + line: r.line, + offset: r.offset, + reader: r.reader, + fileHeaderLine: r.nextFileFirstLine, + } + r.nextFileFirstLine = nil + + fd, err := fr.ReadAllHeaders() + if err != nil { + switch e := err.(type) { + case *ParseError: + if e.Err == ErrNoFileHeader || e.Err == ErrExtendedHeadersEOF { + return nil, io.EOF + } + return nil, err + + case OverflowError: + r.nextFileFirstLine = []byte(e) + return fd, nil + + default: + return nil, err + } + } + + // FileDiff is added/deleted file + // No further collection of hunks needed + if fd.NewName == "" { + return fd, nil + } + + // Before reading hunks, check to see if there are any. If there + // aren't any, and there's another file after this file in the + // diff, then the hunks reader will complain ErrNoHunkHeader. It's + // not easy for us to tell from that error alone if that was + // caused by the lack of any hunks, or a malformatted hunk, so we + // need to perform the check here. + hr := fr.HunksReader() + line, err := readLine(r.reader) + if err != nil && err != io.EOF { + return fd, err + } + line = bytes.TrimSuffix(line, []byte{'\n'}) + if bytes.HasPrefix(line, hunkPrefix) { + hr.nextHunkHeaderLine = line + fd.Hunks, err = hr.ReadAllHunks() + r.line = fr.line + r.offset = fr.offset + if err != nil { + if e0, ok := err.(*ParseError); ok { + if e, ok := e0.Err.(*ErrBadHunkLine); ok { + // This just means we finished reading the hunks for the + // current file. See the ErrBadHunkLine doc for more info. + r.nextFileFirstLine = e.Line + return fd, nil + } + } + return nil, err + } + } else { + // There weren't any hunks, so that line we peeked ahead at + // actually belongs to the next file. Put it back. + r.nextFileFirstLine = line + } + + return fd, nil +} + +// ReadAllFiles reads all file unified diffs (including headers and all +// hunks) remaining in r. +func (r *MultiFileDiffReader) ReadAllFiles() ([]*FileDiff, error) { + var ds []*FileDiff + for { + d, err := r.ReadFile() + if d != nil { + ds = append(ds, d) + } + if err == io.EOF { + return ds, nil + } + if err != nil { + return nil, err + } + } +} + +// ParseFileDiff parses a file unified diff. +func ParseFileDiff(diff []byte) (*FileDiff, error) { + return NewFileDiffReader(bytes.NewReader(diff)).Read() +} + +// NewFileDiffReader returns a new FileDiffReader that reads a file +// unified diff. +func NewFileDiffReader(r io.Reader) *FileDiffReader { + return &FileDiffReader{reader: bufio.NewReader(r)} +} + +// FileDiffReader reads a unified file diff. +type FileDiffReader struct { + line int + offset int64 + reader *bufio.Reader + + // fileHeaderLine is the first file header line, set by: + // + // (1) ReadExtendedHeaders if it encroaches on a file header line + // (which it must to detect when extended headers are done); or + // (2) (*MultiFileDiffReader).ReadFile() if it encroaches on a + // file header line while reading the previous file's hunks (in a + // multi-file diff). + fileHeaderLine []byte +} + +// Read reads a file unified diff, including headers and hunks, from r. +func (r *FileDiffReader) Read() (*FileDiff, error) { + fd, err := r.ReadAllHeaders() + if err != nil { + return nil, err + } + + fd.Hunks, err = r.HunksReader().ReadAllHunks() + if err != nil { + return nil, err + } + + return fd, nil +} + +// ReadAllHeaders reads the file headers and extended headers (if any) +// from a file unified diff. It does not read hunks, and the returned +// FileDiff's Hunks field is nil. To read the hunks, call the +// (*FileDiffReader).HunksReader() method to get a HunksReader and +// read hunks from that. +func (r *FileDiffReader) ReadAllHeaders() (*FileDiff, error) { + var err error + fd := &FileDiff{} + + fd.Extended, err = r.ReadExtendedHeaders() + if pe, ok := err.(*ParseError); ok && pe.Err == ErrExtendedHeadersEOF { + wasEmpty := handleEmpty(fd) + if wasEmpty { + return fd, nil + } + return fd, err + } else if _, ok := err.(OverflowError); ok { + handleEmpty(fd) + return fd, err + } else if err != nil { + return fd, err + } + + var origTime, newTime *time.Time + fd.OrigName, fd.NewName, origTime, newTime, err = r.ReadFileHeaders() + if err != nil { + return nil, err + } + if origTime != nil { + fd.OrigTime = origTime + } + if newTime != nil { + fd.NewTime = newTime + } + + return fd, nil +} + +// HunksReader returns a new HunksReader that reads hunks from r. The +// HunksReader's line and offset (used in error messages) is set to +// start where the file diff header ended (which means errors have the +// correct position information). +func (r *FileDiffReader) HunksReader() *HunksReader { + return &HunksReader{ + line: r.line, + offset: r.offset, + reader: r.reader, + } +} + +// ReadFileHeaders reads the unified file diff header (the lines that +// start with "---" and "+++" with the orig/new file names and +// timestamps). Or which starts with "Only in " with dir path and filename. +// "Only in" message is supported in POSIX locale: https://pubs.opengroup.org/onlinepubs/9699919799/utilities/diff.html#tag_20_34_10 +func (r *FileDiffReader) ReadFileHeaders() (origName, newName string, origTimestamp, newTimestamp *time.Time, err error) { + if r.fileHeaderLine != nil { + if isOnlyMessage, source, filename := parseOnlyInMessage(r.fileHeaderLine); isOnlyMessage { + return filepath.Join(string(source), string(filename)), + "", nil, nil, nil + } + } + + origName, origTimestamp, err = r.readOneFileHeader([]byte("--- ")) + if err != nil { + return "", "", nil, nil, err + } + + newName, newTimestamp, err = r.readOneFileHeader([]byte("+++ ")) + if err != nil { + return "", "", nil, nil, err + } + + unquotedOrigName, err := strconv.Unquote(origName) + if err == nil { + origName = unquotedOrigName + } + unquotedNewName, err := strconv.Unquote(newName) + if err == nil { + newName = unquotedNewName + } + + return origName, newName, origTimestamp, newTimestamp, nil +} + +// readOneFileHeader reads one of the file headers (prefix should be +// either "+++ " or "--- "). +func (r *FileDiffReader) readOneFileHeader(prefix []byte) (filename string, timestamp *time.Time, err error) { + var line []byte + + if r.fileHeaderLine == nil { + var err error + line, err = readLine(r.reader) + if err == io.EOF { + return "", nil, &ParseError{r.line, r.offset, ErrNoFileHeader} + } else if err != nil { + return "", nil, err + } + } else { + line = r.fileHeaderLine + r.fileHeaderLine = nil + } + + if !bytes.HasPrefix(line, prefix) { + return "", nil, &ParseError{r.line, r.offset, ErrBadFileHeader} + } + + r.offset += int64(len(line)) + r.line++ + line = line[len(prefix):] + + trimmedLine := strings.TrimSpace(string(line)) // filenames that contain spaces may be terminated by a tab + parts := strings.SplitN(trimmedLine, "\t", 2) + filename = parts[0] + if len(parts) == 2 { + // Timestamp is optional, but this header has it. + ts, err := time.Parse(diffTimeParseLayout, parts[1]) + if err != nil { + return "", nil, err + } + timestamp = &ts + } + + return filename, timestamp, err +} + +// OverflowError is returned when we have overflowed into the start +// of the next file while reading extended headers. +type OverflowError string + +func (e OverflowError) Error() string { + return fmt.Sprintf("overflowed into next file: %s", string(e)) +} + +// ReadExtendedHeaders reads the extended header lines, if any, from a +// unified diff file (e.g., git's "diff --git a/foo.go b/foo.go", "new +// mode ", "rename from ", etc.). +func (r *FileDiffReader) ReadExtendedHeaders() ([]string, error) { + var xheaders []string + firstLine := true + for { + var line []byte + if r.fileHeaderLine == nil { + var err error + line, err = readLine(r.reader) + if err == io.EOF { + return xheaders, &ParseError{r.line, r.offset, ErrExtendedHeadersEOF} + } else if err != nil { + return xheaders, err + } + } else { + line = r.fileHeaderLine + r.fileHeaderLine = nil + } + + if bytes.HasPrefix(line, []byte("diff --git ")) { + if firstLine { + firstLine = false + } else { + return xheaders, OverflowError(line) + } + } + if bytes.HasPrefix(line, []byte("--- ")) { + // We've reached the file header. + r.fileHeaderLine = line // pass to readOneFileHeader (see fileHeaderLine field doc) + return xheaders, nil + } + + // Reached message that file is added/deleted + if isOnlyInMessage, _, _ := parseOnlyInMessage(line); isOnlyInMessage { + r.fileHeaderLine = line // pass to readOneFileHeader (see fileHeaderLine field doc) + return xheaders, nil + } + + r.line++ + r.offset += int64(len(line)) + xheaders = append(xheaders, string(line)) + } +} + +// handleEmpty detects when FileDiff was an empty diff and will not have any hunks +// that follow. It updates fd fields from the parsed extended headers. +func handleEmpty(fd *FileDiff) (wasEmpty bool) { + var err error + lineCount := len(fd.Extended) + if lineCount > 0 && !strings.HasPrefix(fd.Extended[0], "diff --git ") { + return false + } + switch { + case (lineCount == 3 || lineCount == 4 && strings.HasPrefix(fd.Extended[3], "Binary files ") || lineCount > 4 && strings.HasPrefix(fd.Extended[3], "GIT binary patch")) && + strings.HasPrefix(fd.Extended[1], "new file mode "): + + names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) + fd.OrigName = "/dev/null" + fd.NewName, err = strconv.Unquote(names[1]) + if err != nil { + fd.NewName = names[1] + } + return true + case (lineCount == 3 || lineCount == 4 && strings.HasPrefix(fd.Extended[3], "Binary files ") || lineCount > 4 && strings.HasPrefix(fd.Extended[3], "GIT binary patch")) && + strings.HasPrefix(fd.Extended[1], "deleted file mode "): + + names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) + fd.OrigName, err = strconv.Unquote(names[0]) + if err != nil { + fd.OrigName = names[0] + } + fd.NewName = "/dev/null" + return true + case lineCount == 4 && strings.HasPrefix(fd.Extended[2], "rename from ") && strings.HasPrefix(fd.Extended[3], "rename to "): + names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) + fd.OrigName, err = strconv.Unquote(names[0]) + if err != nil { + fd.OrigName = names[0] + } + fd.NewName, err = strconv.Unquote(names[1]) + if err != nil { + fd.NewName = names[1] + } + return true + case lineCount == 6 && strings.HasPrefix(fd.Extended[5], "Binary files ") && strings.HasPrefix(fd.Extended[2], "rename from ") && strings.HasPrefix(fd.Extended[3], "rename to "): + names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) + fd.OrigName = names[0] + fd.NewName = names[1] + return true + case lineCount == 3 && strings.HasPrefix(fd.Extended[2], "Binary files ") || lineCount > 3 && strings.HasPrefix(fd.Extended[2], "GIT binary patch"): + names := strings.SplitN(fd.Extended[0][len("diff --git "):], " ", 2) + fd.OrigName, err = strconv.Unquote(names[0]) + if err != nil { + fd.OrigName = names[0] + } + fd.NewName, err = strconv.Unquote(names[1]) + if err != nil { + fd.NewName = names[1] + } + return true + default: + return false + } +} + +var ( + // ErrNoFileHeader is when a file unified diff has no file header + // (i.e., the lines that begin with "---" and "+++"). + ErrNoFileHeader = errors.New("expected file header, got EOF") + + // ErrBadFileHeader is when a file unified diff has a malformed + // file header (i.e., the lines that begin with "---" and "+++"). + ErrBadFileHeader = errors.New("bad file header") + + // ErrExtendedHeadersEOF is when an EOF was encountered while reading extended file headers, which means that there were no ---/+++ headers encountered before hunks (if any) began. + ErrExtendedHeadersEOF = errors.New("expected file header while reading extended headers, got EOF") + + // ErrBadOnlyInMessage is when a file have a malformed `only in` message + // Should be in format `Only in {source}: {filename}` + ErrBadOnlyInMessage = errors.New("bad 'only in' message") +) + +// ParseHunks parses hunks from a unified diff. The diff must consist +// only of hunks and not include a file header; if it has a file +// header, use ParseFileDiff. +func ParseHunks(diff []byte) ([]*Hunk, error) { + r := NewHunksReader(bytes.NewReader(diff)) + hunks, err := r.ReadAllHunks() + if err != nil { + return nil, err + } + return hunks, nil +} + +// NewHunksReader returns a new HunksReader that reads unified diff hunks +// from r. +func NewHunksReader(r io.Reader) *HunksReader { + return &HunksReader{reader: bufio.NewReader(r)} +} + +// A HunksReader reads hunks from a unified diff. +type HunksReader struct { + line int + offset int64 + hunk *Hunk + reader *bufio.Reader + + nextHunkHeaderLine []byte +} + +// ReadHunk reads one hunk from r. If there are no more hunks, it +// returns error io.EOF. +func (r *HunksReader) ReadHunk() (*Hunk, error) { + r.hunk = nil + lastLineFromOrig := true + var line []byte + var err error + for { + if r.nextHunkHeaderLine != nil { + // Use stored hunk header line that was scanned in at the + // completion of the previous hunk's ReadHunk. + line = r.nextHunkHeaderLine + r.nextHunkHeaderLine = nil + } else { + line, err = readLine(r.reader) + if err != nil { + if err == io.EOF && r.hunk != nil { + return r.hunk, nil + } + return nil, err + } + } + + // Record position. + r.line++ + r.offset += int64(len(line)) + + if r.hunk == nil { + // Check for presence of hunk header. + if !bytes.HasPrefix(line, hunkPrefix) { + return nil, &ParseError{r.line, r.offset, ErrNoHunkHeader} + } + + // Parse hunk header. + r.hunk = &Hunk{} + items := []interface{}{ + &r.hunk.OrigStartLine, &r.hunk.OrigLines, + &r.hunk.NewStartLine, &r.hunk.NewLines, + } + header, section, err := normalizeHeader(string(line)) + if err != nil { + return nil, &ParseError{r.line, r.offset, err} + } + n, err := fmt.Sscanf(header, hunkHeader, items...) + if err != nil { + return nil, err + } + if n < len(items) { + return nil, &ParseError{r.line, r.offset, &ErrBadHunkHeader{header: string(line)}} + } + + r.hunk.Section = section + } else { + // Read hunk body line. + + // If the line starts with `---` and the next one with `+++` we're + // looking at a non-extended file header and need to abort. + if bytes.HasPrefix(line, []byte("---")) { + ok, err := peekPrefix(r.reader, "+++") + if err != nil { + return r.hunk, err + } + if ok { + return r.hunk, &ParseError{r.line, r.offset, &ErrBadHunkLine{Line: line}} + } + } + + // If the line starts with the hunk prefix, this hunk is complete. + if bytes.HasPrefix(line, hunkPrefix) { + // But we've already read in the next hunk's + // header, so we need to be sure that the next call to + // ReadHunk starts with that header. + r.nextHunkHeaderLine = line + + // Rewind position. + r.line-- + r.offset -= int64(len(line)) + + return r.hunk, nil + } + + if len(line) >= 1 && !linePrefix(line[0]) { + // Bad hunk header line. If we're reading a multi-file + // diff, this may be the end of the current + // file. Return a "rich" error that lets our caller + // handle that case. + return r.hunk, &ParseError{r.line, r.offset, &ErrBadHunkLine{Line: line}} + } + if bytes.Equal(line, []byte(noNewlineMessage)) { + if lastLineFromOrig { + // Retain the newline in the body (otherwise the + // diff line would be like "-a+b", where "+b" is + // the the next line of the new file, which is not + // validly formatted) but record that the orig had + // no newline. + r.hunk.OrigNoNewlineAt = int32(len(r.hunk.Body)) + } else { + // Remove previous line's newline. + if len(r.hunk.Body) != 0 { + r.hunk.Body = r.hunk.Body[:len(r.hunk.Body)-1] + } + } + continue + } + + if len(line) > 0 { + lastLineFromOrig = line[0] == '-' + } + + r.hunk.Body = append(r.hunk.Body, line...) + r.hunk.Body = append(r.hunk.Body, '\n') + } + } +} + +const noNewlineMessage = `\ No newline at end of file` + +// linePrefixes is the set of all characters a valid line in a diff +// hunk can start with. '\' can appear in diffs when no newline is +// present at the end of a file. +// See: 'http://www.gnu.org/software/diffutils/manual/diffutils.html#Incomplete-Lines' +var linePrefixes = []byte{' ', '-', '+', '\\'} + +// linePrefix returns true if 'c' is in 'linePrefixes'. +func linePrefix(c byte) bool { + for _, p := range linePrefixes { + if p == c { + return true + } + } + return false +} + +// peekPrefix peeks into the given reader to check whether the next +// bytes match the given prefix. +func peekPrefix(reader *bufio.Reader, prefix string) (bool, error) { + next, err := reader.Peek(len(prefix)) + if err != nil { + if err == io.EOF { + return false, nil + } + return false, err + } + return bytes.HasPrefix(next, []byte(prefix)), nil +} + +// normalizeHeader takes a header of the form: +// "@@ -linestart[,chunksize] +linestart[,chunksize] @@ section" +// and returns two strings, with the first in the form: +// "@@ -linestart,chunksize +linestart,chunksize @@". +// where linestart and chunksize are both integers. The second is the +// optional section header. chunksize may be omitted from the header +// if its value is 1. normalizeHeader returns an error if the header +// is not in the correct format. +func normalizeHeader(header string) (string, string, error) { + // Split the header into five parts: the first '@@', the two + // ranges, the last '@@', and the optional section. + pieces := strings.SplitN(header, " ", 5) + if len(pieces) < 4 { + return "", "", &ErrBadHunkHeader{header: header} + } + + if pieces[0] != "@@" { + return "", "", &ErrBadHunkHeader{header: header} + } + for i := 1; i < 3; i++ { + if !strings.ContainsRune(pieces[i], ',') { + pieces[i] = pieces[i] + ",1" + } + } + if pieces[3] != "@@" { + return "", "", &ErrBadHunkHeader{header: header} + } + + var section string + if len(pieces) == 5 { + section = pieces[4] + } + return strings.Join(pieces, " "), strings.TrimSpace(section), nil +} + +// ReadAllHunks reads all remaining hunks from r. A successful call +// returns err == nil, not err == EOF. Because ReadAllHunks is defined +// to read until EOF, it does not treat end of file as an error to be +// reported. +func (r *HunksReader) ReadAllHunks() ([]*Hunk, error) { + var hunks []*Hunk + linesRead := int32(0) + for { + hunk, err := r.ReadHunk() + if err == io.EOF { + return hunks, nil + } + if hunk != nil { + linesRead++ // account for the hunk header line + hunk.StartPosition = linesRead + hunks = append(hunks, hunk) + linesRead += int32(bytes.Count(hunk.Body, []byte{'\n'})) + } + if err != nil { + return hunks, err + } + } +} + +// parseOnlyInMessage checks if line is a "Only in {source}: {filename}" and returns source and filename +func parseOnlyInMessage(line []byte) (bool, []byte, []byte) { + if !bytes.HasPrefix(line, onlyInMessagePrefix) { + return false, nil, nil + } + line = line[len(onlyInMessagePrefix):] + idx := bytes.Index(line, []byte(": ")) + if idx < 0 { + return false, nil, nil + } + return true, line[:idx], line[idx+2:] +} + +// A ParseError is a description of a unified diff syntax error. +type ParseError struct { + Line int // Line where the error occurred + Offset int64 // Offset where the error occurred + Err error // The actual error +} + +func (e *ParseError) Error() string { + return fmt.Sprintf("line %d, char %d: %s", e.Line, e.Offset, e.Err) +} + +// ErrNoHunkHeader indicates that a unified diff hunk header was +// expected but not found during parsing. +var ErrNoHunkHeader = errors.New("no hunk header") + +// ErrBadHunkHeader indicates that a malformed unified diff hunk +// header was encountered during parsing. +type ErrBadHunkHeader struct { + header string +} + +func (e *ErrBadHunkHeader) Error() string { + if e.header == "" { + return "bad hunk header" + } + return "bad hunk header: " + e.header +} + +// ErrBadHunkLine is when a line not beginning with ' ', '-', '+', or +// '\' is encountered while reading a hunk. In the context of reading +// a single hunk or file, it is an unexpected error. In a multi-file +// diff, however, it indicates that the current file's diff is +// complete (and remaining diff data will describe another file +// unified diff). +type ErrBadHunkLine struct { + Line []byte +} + +func (e *ErrBadHunkLine) Error() string { + m := "bad hunk line (does not start with ' ', '-', '+', or '\\')" + if len(e.Line) == 0 { + return m + } + return m + ": " + string(e.Line) +} diff --git a/vendor/github.com/sourcegraph/go-diff/diff/print.go b/vendor/github.com/sourcegraph/go-diff/diff/print.go new file mode 100644 index 000000000..012651a33 --- /dev/null +++ b/vendor/github.com/sourcegraph/go-diff/diff/print.go @@ -0,0 +1,141 @@ +package diff + +import ( + "bytes" + "fmt" + "io" + "path/filepath" + "time" +) + +// PrintMultiFileDiff prints a multi-file diff in unified diff format. +func PrintMultiFileDiff(ds []*FileDiff) ([]byte, error) { + var buf bytes.Buffer + for _, d := range ds { + diff, err := PrintFileDiff(d) + if err != nil { + return nil, err + } + if _, err := buf.Write(diff); err != nil { + return nil, err + } + } + return buf.Bytes(), nil +} + +// PrintFileDiff prints a FileDiff in unified diff format. +// +// TODO(sqs): handle escaping whitespace/etc. chars in filenames +func PrintFileDiff(d *FileDiff) ([]byte, error) { + var buf bytes.Buffer + + for _, xheader := range d.Extended { + if _, err := fmt.Fprintln(&buf, xheader); err != nil { + return nil, err + } + } + + // FileDiff is added/deleted file + // No further hunks printing needed + if d.NewName == "" { + _, err := fmt.Fprintf(&buf, onlyInMessage, filepath.Dir(d.OrigName), filepath.Base(d.OrigName)) + if err != nil { + return nil, err + } + return buf.Bytes(), nil + } + + if d.Hunks == nil { + return buf.Bytes(), nil + } + + if err := printFileHeader(&buf, "--- ", d.OrigName, d.OrigTime); err != nil { + return nil, err + } + if err := printFileHeader(&buf, "+++ ", d.NewName, d.NewTime); err != nil { + return nil, err + } + + ph, err := PrintHunks(d.Hunks) + if err != nil { + return nil, err + } + + if _, err := buf.Write(ph); err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +func printFileHeader(w io.Writer, prefix string, filename string, timestamp *time.Time) error { + if _, err := fmt.Fprint(w, prefix, filename); err != nil { + return err + } + if timestamp != nil { + if _, err := fmt.Fprint(w, "\t", timestamp.Format(diffTimeFormatLayout)); err != nil { + return err + } + } + if _, err := fmt.Fprintln(w); err != nil { + return err + } + return nil +} + +// PrintHunks prints diff hunks in unified diff format. +func PrintHunks(hunks []*Hunk) ([]byte, error) { + var buf bytes.Buffer + for _, hunk := range hunks { + _, err := fmt.Fprintf(&buf, + "@@ -%d,%d +%d,%d @@", hunk.OrigStartLine, hunk.OrigLines, hunk.NewStartLine, hunk.NewLines, + ) + if err != nil { + return nil, err + } + if hunk.Section != "" { + _, err := fmt.Fprint(&buf, " ", hunk.Section) + if err != nil { + return nil, err + } + } + if _, err := fmt.Fprintln(&buf); err != nil { + return nil, err + } + + if hunk.OrigNoNewlineAt == 0 { + if _, err := buf.Write(hunk.Body); err != nil { + return nil, err + } + } else { + if _, err := buf.Write(hunk.Body[:hunk.OrigNoNewlineAt]); err != nil { + return nil, err + } + if err := printNoNewlineMessage(&buf); err != nil { + return nil, err + } + if _, err := buf.Write(hunk.Body[hunk.OrigNoNewlineAt:]); err != nil { + return nil, err + } + } + + if !bytes.HasSuffix(hunk.Body, []byte{'\n'}) { + if _, err := fmt.Fprintln(&buf); err != nil { + return nil, err + } + if err := printNoNewlineMessage(&buf); err != nil { + return nil, err + } + } + } + return buf.Bytes(), nil +} + +func printNoNewlineMessage(w io.Writer) error { + if _, err := w.Write([]byte(noNewlineMessage)); err != nil { + return err + } + if _, err := fmt.Fprintln(w); err != nil { + return err + } + return nil +} diff --git a/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go b/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go new file mode 100644 index 000000000..395fb7baf --- /dev/null +++ b/vendor/github.com/sourcegraph/go-diff/diff/reader_util.go @@ -0,0 +1,37 @@ +package diff + +import ( + "bufio" + "io" +) + +// readLine is a helper that mimics the functionality of calling bufio.Scanner.Scan() and +// bufio.Scanner.Bytes(), but without the token size limitation. It will read and return +// the next line in the Reader with the trailing newline stripped. It will return an +// io.EOF error when there is nothing left to read (at the start of the function call). It +// will return any other errors it receives from the underlying call to ReadBytes. +func readLine(r *bufio.Reader) ([]byte, error) { + line_, err := r.ReadBytes('\n') + if err == io.EOF { + if len(line_) == 0 { + return nil, io.EOF + } + + // ReadBytes returned io.EOF, because it didn't find another newline, but there is + // still the remainder of the file to return as a line. + line := line_ + return line, nil + } else if err != nil { + return nil, err + } + line := line_[0 : len(line_)-1] + return dropCR(line), nil +} + +// dropCR drops a terminal \r from the data. +func dropCR(data []byte) []byte { + if len(data) > 0 && data[len(data)-1] == '\r' { + return data[0 : len(data)-1] + } + return data +} diff --git a/vendor/github.com/spf13/afero/.gitignore b/vendor/github.com/spf13/afero/.gitignore new file mode 100644 index 000000000..9c1d98611 --- /dev/null +++ b/vendor/github.com/spf13/afero/.gitignore @@ -0,0 +1,2 @@ +sftpfs/file1 +sftpfs/test/ diff --git a/vendor/github.com/spf13/afero/.travis.yml b/vendor/github.com/spf13/afero/.travis.yml new file mode 100644 index 000000000..e944f5947 --- /dev/null +++ b/vendor/github.com/spf13/afero/.travis.yml @@ -0,0 +1,26 @@ +sudo: false +language: go +arch: + - amd64 + - ppc64e + +go: + - "1.14" + - "1.15" + - "1.16" + - tip + +os: + - linux + - osx + +matrix: + allow_failures: + - go: tip + fast_finish: true + +script: + - go build -v ./... + - go test -count=1 -cover -race -v ./... + - go vet ./... + - FILES=$(gofmt -s -l . zipfs sftpfs mem tarfs); if [[ -n "${FILES}" ]]; then echo "You have go format errors; gofmt your changes"; exit 1; fi diff --git a/vendor/github.com/spf13/afero/LICENSE.txt b/vendor/github.com/spf13/afero/LICENSE.txt new file mode 100644 index 000000000..298f0e266 --- /dev/null +++ b/vendor/github.com/spf13/afero/LICENSE.txt @@ -0,0 +1,174 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/vendor/github.com/spf13/afero/README.md b/vendor/github.com/spf13/afero/README.md new file mode 100644 index 000000000..fb8eaaf89 --- /dev/null +++ b/vendor/github.com/spf13/afero/README.md @@ -0,0 +1,430 @@ +![afero logo-sm](https://cloud.githubusercontent.com/assets/173412/11490338/d50e16dc-97a5-11e5-8b12-019a300d0fcb.png) + +A FileSystem Abstraction System for Go + +[![Build Status](https://travis-ci.org/spf13/afero.svg)](https://travis-ci.org/spf13/afero) [![Build status](https://ci.appveyor.com/api/projects/status/github/spf13/afero?branch=master&svg=true)](https://ci.appveyor.com/project/spf13/afero) [![GoDoc](https://godoc.org/github.com/spf13/afero?status.svg)](https://godoc.org/github.com/spf13/afero) [![Join the chat at https://gitter.im/spf13/afero](https://badges.gitter.im/Dev%20Chat.svg)](https://gitter.im/spf13/afero?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) + +# Overview + +Afero is a filesystem framework providing a simple, uniform and universal API +interacting with any filesystem, as an abstraction layer providing interfaces, +types and methods. Afero has an exceptionally clean interface and simple design +without needless constructors or initialization methods. + +Afero is also a library providing a base set of interoperable backend +filesystems that make it easy to work with afero while retaining all the power +and benefit of the os and ioutil packages. + +Afero provides significant improvements over using the os package alone, most +notably the ability to create mock and testing filesystems without relying on the disk. + +It is suitable for use in any situation where you would consider using the OS +package as it provides an additional abstraction that makes it easy to use a +memory backed file system during testing. It also adds support for the http +filesystem for full interoperability. + + +## Afero Features + +* A single consistent API for accessing a variety of filesystems +* Interoperation between a variety of file system types +* A set of interfaces to encourage and enforce interoperability between backends +* An atomic cross platform memory backed file system +* Support for compositional (union) file systems by combining multiple file systems acting as one +* Specialized backends which modify existing filesystems (Read Only, Regexp filtered) +* A set of utility functions ported from io, ioutil & hugo to be afero aware +* Wrapper for go 1.16 filesystem abstraction `io/fs.FS` + +# Using Afero + +Afero is easy to use and easier to adopt. + +A few different ways you could use Afero: + +* Use the interfaces alone to define your own file system. +* Wrapper for the OS packages. +* Define different filesystems for different parts of your application. +* Use Afero for mock filesystems while testing + +## Step 1: Install Afero + +First use go get to install the latest version of the library. + + $ go get github.com/spf13/afero + +Next include Afero in your application. +```go +import "github.com/spf13/afero" +``` + +## Step 2: Declare a backend + +First define a package variable and set it to a pointer to a filesystem. +```go +var AppFs = afero.NewMemMapFs() + +or + +var AppFs = afero.NewOsFs() +``` +It is important to note that if you repeat the composite literal you +will be using a completely new and isolated filesystem. In the case of +OsFs it will still use the same underlying filesystem but will reduce +the ability to drop in other filesystems as desired. + +## Step 3: Use it like you would the OS package + +Throughout your application use any function and method like you normally +would. + +So if my application before had: +```go +os.Open('/tmp/foo') +``` +We would replace it with: +```go +AppFs.Open('/tmp/foo') +``` + +`AppFs` being the variable we defined above. + + +## List of all available functions + +File System Methods Available: +```go +Chmod(name string, mode os.FileMode) : error +Chown(name string, uid, gid int) : error +Chtimes(name string, atime time.Time, mtime time.Time) : error +Create(name string) : File, error +Mkdir(name string, perm os.FileMode) : error +MkdirAll(path string, perm os.FileMode) : error +Name() : string +Open(name string) : File, error +OpenFile(name string, flag int, perm os.FileMode) : File, error +Remove(name string) : error +RemoveAll(path string) : error +Rename(oldname, newname string) : error +Stat(name string) : os.FileInfo, error +``` +File Interfaces and Methods Available: +```go +io.Closer +io.Reader +io.ReaderAt +io.Seeker +io.Writer +io.WriterAt + +Name() : string +Readdir(count int) : []os.FileInfo, error +Readdirnames(n int) : []string, error +Stat() : os.FileInfo, error +Sync() : error +Truncate(size int64) : error +WriteString(s string) : ret int, err error +``` +In some applications it may make sense to define a new package that +simply exports the file system variable for easy access from anywhere. + +## Using Afero's utility functions + +Afero provides a set of functions to make it easier to use the underlying file systems. +These functions have been primarily ported from io & ioutil with some developed for Hugo. + +The afero utilities support all afero compatible backends. + +The list of utilities includes: + +```go +DirExists(path string) (bool, error) +Exists(path string) (bool, error) +FileContainsBytes(filename string, subslice []byte) (bool, error) +GetTempDir(subPath string) string +IsDir(path string) (bool, error) +IsEmpty(path string) (bool, error) +ReadDir(dirname string) ([]os.FileInfo, error) +ReadFile(filename string) ([]byte, error) +SafeWriteReader(path string, r io.Reader) (err error) +TempDir(dir, prefix string) (name string, err error) +TempFile(dir, prefix string) (f File, err error) +Walk(root string, walkFn filepath.WalkFunc) error +WriteFile(filename string, data []byte, perm os.FileMode) error +WriteReader(path string, r io.Reader) (err error) +``` +For a complete list see [Afero's GoDoc](https://godoc.org/github.com/spf13/afero) + +They are available under two different approaches to use. You can either call +them directly where the first parameter of each function will be the file +system, or you can declare a new `Afero`, a custom type used to bind these +functions as methods to a given filesystem. + +### Calling utilities directly + +```go +fs := new(afero.MemMapFs) +f, err := afero.TempFile(fs,"", "ioutil-test") + +``` + +### Calling via Afero + +```go +fs := afero.NewMemMapFs() +afs := &afero.Afero{Fs: fs} +f, err := afs.TempFile("", "ioutil-test") +``` + +## Using Afero for Testing + +There is a large benefit to using a mock filesystem for testing. It has a +completely blank state every time it is initialized and can be easily +reproducible regardless of OS. You could create files to your heart’s content +and the file access would be fast while also saving you from all the annoying +issues with deleting temporary files, Windows file locking, etc. The MemMapFs +backend is perfect for testing. + +* Much faster than performing I/O operations on disk +* Avoid security issues and permissions +* Far more control. 'rm -rf /' with confidence +* Test setup is far more easier to do +* No test cleanup needed + +One way to accomplish this is to define a variable as mentioned above. +In your application this will be set to afero.NewOsFs() during testing you +can set it to afero.NewMemMapFs(). + +It wouldn't be uncommon to have each test initialize a blank slate memory +backend. To do this I would define my `appFS = afero.NewOsFs()` somewhere +appropriate in my application code. This approach ensures that Tests are order +independent, with no test relying on the state left by an earlier test. + +Then in my tests I would initialize a new MemMapFs for each test: +```go +func TestExist(t *testing.T) { + appFS := afero.NewMemMapFs() + // create test files and directories + appFS.MkdirAll("src/a", 0755) + afero.WriteFile(appFS, "src/a/b", []byte("file b"), 0644) + afero.WriteFile(appFS, "src/c", []byte("file c"), 0644) + name := "src/c" + _, err := appFS.Stat(name) + if os.IsNotExist(err) { + t.Errorf("file \"%s\" does not exist.\n", name) + } +} +``` + +# Available Backends + +## Operating System Native + +### OsFs + +The first is simply a wrapper around the native OS calls. This makes it +very easy to use as all of the calls are the same as the existing OS +calls. It also makes it trivial to have your code use the OS during +operation and a mock filesystem during testing or as needed. + +```go +appfs := afero.NewOsFs() +appfs.MkdirAll("src/a", 0755) +``` + +## Memory Backed Storage + +### MemMapFs + +Afero also provides a fully atomic memory backed filesystem perfect for use in +mocking and to speed up unnecessary disk io when persistence isn’t +necessary. It is fully concurrent and will work within go routines +safely. + +```go +mm := afero.NewMemMapFs() +mm.MkdirAll("src/a", 0755) +``` + +#### InMemoryFile + +As part of MemMapFs, Afero also provides an atomic, fully concurrent memory +backed file implementation. This can be used in other memory backed file +systems with ease. Plans are to add a radix tree memory stored file +system using InMemoryFile. + +## Network Interfaces + +### SftpFs + +Afero has experimental support for secure file transfer protocol (sftp). Which can +be used to perform file operations over a encrypted channel. + +## Filtering Backends + +### BasePathFs + +The BasePathFs restricts all operations to a given path within an Fs. +The given file name to the operations on this Fs will be prepended with +the base path before calling the source Fs. + +```go +bp := afero.NewBasePathFs(afero.NewOsFs(), "/base/path") +``` + +### ReadOnlyFs + +A thin wrapper around the source Fs providing a read only view. + +```go +fs := afero.NewReadOnlyFs(afero.NewOsFs()) +_, err := fs.Create("/file.txt") +// err = syscall.EPERM +``` + +# RegexpFs + +A filtered view on file names, any file NOT matching +the passed regexp will be treated as non-existing. +Files not matching the regexp provided will not be created. +Directories are not filtered. + +```go +fs := afero.NewRegexpFs(afero.NewMemMapFs(), regexp.MustCompile(`\.txt$`)) +_, err := fs.Create("/file.html") +// err = syscall.ENOENT +``` + +### HttpFs + +Afero provides an http compatible backend which can wrap any of the existing +backends. + +The Http package requires a slightly specific version of Open which +returns an http.File type. + +Afero provides an httpFs file system which satisfies this requirement. +Any Afero FileSystem can be used as an httpFs. + +```go +httpFs := afero.NewHttpFs() +fileserver := http.FileServer(httpFs.Dir()) +http.Handle("/", fileserver) +``` + +## Composite Backends + +Afero provides the ability have two filesystems (or more) act as a single +file system. + +### CacheOnReadFs + +The CacheOnReadFs will lazily make copies of any accessed files from the base +layer into the overlay. Subsequent reads will be pulled from the overlay +directly permitting the request is within the cache duration of when it was +created in the overlay. + +If the base filesystem is writeable, any changes to files will be +done first to the base, then to the overlay layer. Write calls to open file +handles like `Write()` or `Truncate()` to the overlay first. + +To writing files to the overlay only, you can use the overlay Fs directly (not +via the union Fs). + +Cache files in the layer for the given time.Duration, a cache duration of 0 +means "forever" meaning the file will not be re-requested from the base ever. + +A read-only base will make the overlay also read-only but still copy files +from the base to the overlay when they're not present (or outdated) in the +caching layer. + +```go +base := afero.NewOsFs() +layer := afero.NewMemMapFs() +ufs := afero.NewCacheOnReadFs(base, layer, 100 * time.Second) +``` + +### CopyOnWriteFs() + +The CopyOnWriteFs is a read only base file system with a potentially +writeable layer on top. + +Read operations will first look in the overlay and if not found there, will +serve the file from the base. + +Changes to the file system will only be made in the overlay. + +Any attempt to modify a file found only in the base will copy the file to the +overlay layer before modification (including opening a file with a writable +handle). + +Removing and Renaming files present only in the base layer is not currently +permitted. If a file is present in the base layer and the overlay, only the +overlay will be removed/renamed. + +```go + base := afero.NewOsFs() + roBase := afero.NewReadOnlyFs(base) + ufs := afero.NewCopyOnWriteFs(roBase, afero.NewMemMapFs()) + + fh, _ = ufs.Create("/home/test/file2.txt") + fh.WriteString("This is a test") + fh.Close() +``` + +In this example all write operations will only occur in memory (MemMapFs) +leaving the base filesystem (OsFs) untouched. + + +## Desired/possible backends + +The following is a short list of possible backends we hope someone will +implement: + +* SSH +* S3 + +# About the project + +## What's in the name + +Afero comes from the latin roots Ad-Facere. + +**"Ad"** is a prefix meaning "to". + +**"Facere"** is a form of the root "faciō" making "make or do". + +The literal meaning of afero is "to make" or "to do" which seems very fitting +for a library that allows one to make files and directories and do things with them. + +The English word that shares the same roots as Afero is "affair". Affair shares +the same concept but as a noun it means "something that is made or done" or "an +object of a particular type". + +It's also nice that unlike some of my other libraries (hugo, cobra, viper) it +Googles very well. + +## Release Notes + +See the [Releases Page](https://github.com/spf13/afero/releases). + +## Contributing + +1. Fork it +2. Create your feature branch (`git checkout -b my-new-feature`) +3. Commit your changes (`git commit -am 'Add some feature'`) +4. Push to the branch (`git push origin my-new-feature`) +5. Create new Pull Request + +## Contributors + +Names in no particular order: + +* [spf13](https://github.com/spf13) +* [jaqx0r](https://github.com/jaqx0r) +* [mbertschler](https://github.com/mbertschler) +* [xor-gate](https://github.com/xor-gate) + +## License + +Afero is released under the Apache 2.0 license. See +[LICENSE.txt](https://github.com/spf13/afero/blob/master/LICENSE.txt) diff --git a/vendor/github.com/spf13/afero/afero.go b/vendor/github.com/spf13/afero/afero.go new file mode 100644 index 000000000..469ff7d2d --- /dev/null +++ b/vendor/github.com/spf13/afero/afero.go @@ -0,0 +1,111 @@ +// Copyright © 2014 Steve Francia . +// Copyright 2013 tsuru authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package afero provides types and methods for interacting with the filesystem, +// as an abstraction layer. + +// Afero also provides a few implementations that are mostly interoperable. One that +// uses the operating system filesystem, one that uses memory to store files +// (cross platform) and an interface that should be implemented if you want to +// provide your own filesystem. + +package afero + +import ( + "errors" + "io" + "os" + "time" +) + +type Afero struct { + Fs +} + +// File represents a file in the filesystem. +type File interface { + io.Closer + io.Reader + io.ReaderAt + io.Seeker + io.Writer + io.WriterAt + + Name() string + Readdir(count int) ([]os.FileInfo, error) + Readdirnames(n int) ([]string, error) + Stat() (os.FileInfo, error) + Sync() error + Truncate(size int64) error + WriteString(s string) (ret int, err error) +} + +// Fs is the filesystem interface. +// +// Any simulated or real filesystem should implement this interface. +type Fs interface { + // Create creates a file in the filesystem, returning the file and an + // error, if any happens. + Create(name string) (File, error) + + // Mkdir creates a directory in the filesystem, return an error if any + // happens. + Mkdir(name string, perm os.FileMode) error + + // MkdirAll creates a directory path and all parents that does not exist + // yet. + MkdirAll(path string, perm os.FileMode) error + + // Open opens a file, returning it or an error, if any happens. + Open(name string) (File, error) + + // OpenFile opens a file using the given flags and the given mode. + OpenFile(name string, flag int, perm os.FileMode) (File, error) + + // Remove removes a file identified by name, returning an error, if any + // happens. + Remove(name string) error + + // RemoveAll removes a directory path and any children it contains. It + // does not fail if the path does not exist (return nil). + RemoveAll(path string) error + + // Rename renames a file. + Rename(oldname, newname string) error + + // Stat returns a FileInfo describing the named file, or an error, if any + // happens. + Stat(name string) (os.FileInfo, error) + + // The name of this FileSystem + Name() string + + // Chmod changes the mode of the named file to mode. + Chmod(name string, mode os.FileMode) error + + // Chown changes the uid and gid of the named file. + Chown(name string, uid, gid int) error + + //Chtimes changes the access and modification times of the named file + Chtimes(name string, atime time.Time, mtime time.Time) error +} + +var ( + ErrFileClosed = errors.New("File is closed") + ErrOutOfRange = errors.New("Out of range") + ErrTooLarge = errors.New("Too large") + ErrFileNotFound = os.ErrNotExist + ErrFileExists = os.ErrExist + ErrDestinationExists = os.ErrExist +) diff --git a/vendor/github.com/spf13/afero/appveyor.yml b/vendor/github.com/spf13/afero/appveyor.yml new file mode 100644 index 000000000..5d2f34bf1 --- /dev/null +++ b/vendor/github.com/spf13/afero/appveyor.yml @@ -0,0 +1,15 @@ +version: '{build}' +clone_folder: C:\gopath\src\github.com\spf13\afero +environment: + GOPATH: C:\gopath +build_script: +- cmd: >- + go version + + go env + + go get -v github.com/spf13/afero/... + + go build -v github.com/spf13/afero/... +test_script: +- cmd: go test -count=1 -cover -race -v github.com/spf13/afero/... diff --git a/vendor/github.com/spf13/afero/basepath.go b/vendor/github.com/spf13/afero/basepath.go new file mode 100644 index 000000000..4f9832829 --- /dev/null +++ b/vendor/github.com/spf13/afero/basepath.go @@ -0,0 +1,211 @@ +package afero + +import ( + "os" + "path/filepath" + "runtime" + "strings" + "time" +) + +var _ Lstater = (*BasePathFs)(nil) + +// The BasePathFs restricts all operations to a given path within an Fs. +// The given file name to the operations on this Fs will be prepended with +// the base path before calling the base Fs. +// Any file name (after filepath.Clean()) outside this base path will be +// treated as non existing file. +// +// Note that it does not clean the error messages on return, so you may +// reveal the real path on errors. +type BasePathFs struct { + source Fs + path string +} + +type BasePathFile struct { + File + path string +} + +func (f *BasePathFile) Name() string { + sourcename := f.File.Name() + return strings.TrimPrefix(sourcename, filepath.Clean(f.path)) +} + +func NewBasePathFs(source Fs, path string) Fs { + return &BasePathFs{source: source, path: path} +} + +// on a file outside the base path it returns the given file name and an error, +// else the given file with the base path prepended +func (b *BasePathFs) RealPath(name string) (path string, err error) { + if err := validateBasePathName(name); err != nil { + return name, err + } + + bpath := filepath.Clean(b.path) + path = filepath.Clean(filepath.Join(bpath, name)) + if !strings.HasPrefix(path, bpath) { + return name, os.ErrNotExist + } + + return path, nil +} + +func validateBasePathName(name string) error { + if runtime.GOOS != "windows" { + // Not much to do here; + // the virtual file paths all look absolute on *nix. + return nil + } + + // On Windows a common mistake would be to provide an absolute OS path + // We could strip out the base part, but that would not be very portable. + if filepath.IsAbs(name) { + return os.ErrNotExist + } + + return nil +} + +func (b *BasePathFs) Chtimes(name string, atime, mtime time.Time) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "chtimes", Path: name, Err: err} + } + return b.source.Chtimes(name, atime, mtime) +} + +func (b *BasePathFs) Chmod(name string, mode os.FileMode) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "chmod", Path: name, Err: err} + } + return b.source.Chmod(name, mode) +} + +func (b *BasePathFs) Chown(name string, uid, gid int) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "chown", Path: name, Err: err} + } + return b.source.Chown(name, uid, gid) +} + +func (b *BasePathFs) Name() string { + return "BasePathFs" +} + +func (b *BasePathFs) Stat(name string) (fi os.FileInfo, err error) { + if name, err = b.RealPath(name); err != nil { + return nil, &os.PathError{Op: "stat", Path: name, Err: err} + } + return b.source.Stat(name) +} + +func (b *BasePathFs) Rename(oldname, newname string) (err error) { + if oldname, err = b.RealPath(oldname); err != nil { + return &os.PathError{Op: "rename", Path: oldname, Err: err} + } + if newname, err = b.RealPath(newname); err != nil { + return &os.PathError{Op: "rename", Path: newname, Err: err} + } + return b.source.Rename(oldname, newname) +} + +func (b *BasePathFs) RemoveAll(name string) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "remove_all", Path: name, Err: err} + } + return b.source.RemoveAll(name) +} + +func (b *BasePathFs) Remove(name string) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "remove", Path: name, Err: err} + } + return b.source.Remove(name) +} + +func (b *BasePathFs) OpenFile(name string, flag int, mode os.FileMode) (f File, err error) { + if name, err = b.RealPath(name); err != nil { + return nil, &os.PathError{Op: "openfile", Path: name, Err: err} + } + sourcef, err := b.source.OpenFile(name, flag, mode) + if err != nil { + return nil, err + } + return &BasePathFile{sourcef, b.path}, nil +} + +func (b *BasePathFs) Open(name string) (f File, err error) { + if name, err = b.RealPath(name); err != nil { + return nil, &os.PathError{Op: "open", Path: name, Err: err} + } + sourcef, err := b.source.Open(name) + if err != nil { + return nil, err + } + return &BasePathFile{File: sourcef, path: b.path}, nil +} + +func (b *BasePathFs) Mkdir(name string, mode os.FileMode) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "mkdir", Path: name, Err: err} + } + return b.source.Mkdir(name, mode) +} + +func (b *BasePathFs) MkdirAll(name string, mode os.FileMode) (err error) { + if name, err = b.RealPath(name); err != nil { + return &os.PathError{Op: "mkdir", Path: name, Err: err} + } + return b.source.MkdirAll(name, mode) +} + +func (b *BasePathFs) Create(name string) (f File, err error) { + if name, err = b.RealPath(name); err != nil { + return nil, &os.PathError{Op: "create", Path: name, Err: err} + } + sourcef, err := b.source.Create(name) + if err != nil { + return nil, err + } + return &BasePathFile{File: sourcef, path: b.path}, nil +} + +func (b *BasePathFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + name, err := b.RealPath(name) + if err != nil { + return nil, false, &os.PathError{Op: "lstat", Path: name, Err: err} + } + if lstater, ok := b.source.(Lstater); ok { + return lstater.LstatIfPossible(name) + } + fi, err := b.source.Stat(name) + return fi, false, err +} + +func (b *BasePathFs) SymlinkIfPossible(oldname, newname string) error { + oldname, err := b.RealPath(oldname) + if err != nil { + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: err} + } + newname, err = b.RealPath(newname) + if err != nil { + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: err} + } + if linker, ok := b.source.(Linker); ok { + return linker.SymlinkIfPossible(oldname, newname) + } + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: ErrNoSymlink} +} + +func (b *BasePathFs) ReadlinkIfPossible(name string) (string, error) { + name, err := b.RealPath(name) + if err != nil { + return "", &os.PathError{Op: "readlink", Path: name, Err: err} + } + if reader, ok := b.source.(LinkReader); ok { + return reader.ReadlinkIfPossible(name) + } + return "", &os.PathError{Op: "readlink", Path: name, Err: ErrNoReadlink} +} diff --git a/vendor/github.com/spf13/afero/cacheOnReadFs.go b/vendor/github.com/spf13/afero/cacheOnReadFs.go new file mode 100644 index 000000000..71471aa25 --- /dev/null +++ b/vendor/github.com/spf13/afero/cacheOnReadFs.go @@ -0,0 +1,311 @@ +package afero + +import ( + "os" + "syscall" + "time" +) + +// If the cache duration is 0, cache time will be unlimited, i.e. once +// a file is in the layer, the base will never be read again for this file. +// +// For cache times greater than 0, the modification time of a file is +// checked. Note that a lot of file system implementations only allow a +// resolution of a second for timestamps... or as the godoc for os.Chtimes() +// states: "The underlying filesystem may truncate or round the values to a +// less precise time unit." +// +// This caching union will forward all write calls also to the base file +// system first. To prevent writing to the base Fs, wrap it in a read-only +// filter - Note: this will also make the overlay read-only, for writing files +// in the overlay, use the overlay Fs directly, not via the union Fs. +type CacheOnReadFs struct { + base Fs + layer Fs + cacheTime time.Duration +} + +func NewCacheOnReadFs(base Fs, layer Fs, cacheTime time.Duration) Fs { + return &CacheOnReadFs{base: base, layer: layer, cacheTime: cacheTime} +} + +type cacheState int + +const ( + // not present in the overlay, unknown if it exists in the base: + cacheMiss cacheState = iota + // present in the overlay and in base, base file is newer: + cacheStale + // present in the overlay - with cache time == 0 it may exist in the base, + // with cacheTime > 0 it exists in the base and is same age or newer in the + // overlay + cacheHit + // happens if someone writes directly to the overlay without + // going through this union + cacheLocal +) + +func (u *CacheOnReadFs) cacheStatus(name string) (state cacheState, fi os.FileInfo, err error) { + var lfi, bfi os.FileInfo + lfi, err = u.layer.Stat(name) + if err == nil { + if u.cacheTime == 0 { + return cacheHit, lfi, nil + } + if lfi.ModTime().Add(u.cacheTime).Before(time.Now()) { + bfi, err = u.base.Stat(name) + if err != nil { + return cacheLocal, lfi, nil + } + if bfi.ModTime().After(lfi.ModTime()) { + return cacheStale, bfi, nil + } + } + return cacheHit, lfi, nil + } + + if err == syscall.ENOENT || os.IsNotExist(err) { + return cacheMiss, nil, nil + } + + return cacheMiss, nil, err +} + +func (u *CacheOnReadFs) copyToLayer(name string) error { + return copyToLayer(u.base, u.layer, name) +} + +func (u *CacheOnReadFs) Chtimes(name string, atime, mtime time.Time) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit: + err = u.base.Chtimes(name, atime, mtime) + case cacheStale, cacheMiss: + if err := u.copyToLayer(name); err != nil { + return err + } + err = u.base.Chtimes(name, atime, mtime) + } + if err != nil { + return err + } + return u.layer.Chtimes(name, atime, mtime) +} + +func (u *CacheOnReadFs) Chmod(name string, mode os.FileMode) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit: + err = u.base.Chmod(name, mode) + case cacheStale, cacheMiss: + if err := u.copyToLayer(name); err != nil { + return err + } + err = u.base.Chmod(name, mode) + } + if err != nil { + return err + } + return u.layer.Chmod(name, mode) +} + +func (u *CacheOnReadFs) Chown(name string, uid, gid int) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit: + err = u.base.Chown(name, uid, gid) + case cacheStale, cacheMiss: + if err := u.copyToLayer(name); err != nil { + return err + } + err = u.base.Chown(name, uid, gid) + } + if err != nil { + return err + } + return u.layer.Chown(name, uid, gid) +} + +func (u *CacheOnReadFs) Stat(name string) (os.FileInfo, error) { + st, fi, err := u.cacheStatus(name) + if err != nil { + return nil, err + } + switch st { + case cacheMiss: + return u.base.Stat(name) + default: // cacheStale has base, cacheHit and cacheLocal the layer os.FileInfo + return fi, nil + } +} + +func (u *CacheOnReadFs) Rename(oldname, newname string) error { + st, _, err := u.cacheStatus(oldname) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit: + err = u.base.Rename(oldname, newname) + case cacheStale, cacheMiss: + if err := u.copyToLayer(oldname); err != nil { + return err + } + err = u.base.Rename(oldname, newname) + } + if err != nil { + return err + } + return u.layer.Rename(oldname, newname) +} + +func (u *CacheOnReadFs) Remove(name string) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit, cacheStale, cacheMiss: + err = u.base.Remove(name) + } + if err != nil { + return err + } + return u.layer.Remove(name) +} + +func (u *CacheOnReadFs) RemoveAll(name string) error { + st, _, err := u.cacheStatus(name) + if err != nil { + return err + } + switch st { + case cacheLocal: + case cacheHit, cacheStale, cacheMiss: + err = u.base.RemoveAll(name) + } + if err != nil { + return err + } + return u.layer.RemoveAll(name) +} + +func (u *CacheOnReadFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + st, _, err := u.cacheStatus(name) + if err != nil { + return nil, err + } + switch st { + case cacheLocal, cacheHit: + default: + if err := u.copyToLayer(name); err != nil { + return nil, err + } + } + if flag&(os.O_WRONLY|syscall.O_RDWR|os.O_APPEND|os.O_CREATE|os.O_TRUNC) != 0 { + bfi, err := u.base.OpenFile(name, flag, perm) + if err != nil { + return nil, err + } + lfi, err := u.layer.OpenFile(name, flag, perm) + if err != nil { + bfi.Close() // oops, what if O_TRUNC was set and file opening in the layer failed...? + return nil, err + } + return &UnionFile{Base: bfi, Layer: lfi}, nil + } + return u.layer.OpenFile(name, flag, perm) +} + +func (u *CacheOnReadFs) Open(name string) (File, error) { + st, fi, err := u.cacheStatus(name) + if err != nil { + return nil, err + } + + switch st { + case cacheLocal: + return u.layer.Open(name) + + case cacheMiss: + bfi, err := u.base.Stat(name) + if err != nil { + return nil, err + } + if bfi.IsDir() { + return u.base.Open(name) + } + if err := u.copyToLayer(name); err != nil { + return nil, err + } + return u.layer.Open(name) + + case cacheStale: + if !fi.IsDir() { + if err := u.copyToLayer(name); err != nil { + return nil, err + } + return u.layer.Open(name) + } + case cacheHit: + if !fi.IsDir() { + return u.layer.Open(name) + } + } + // the dirs from cacheHit, cacheStale fall down here: + bfile, _ := u.base.Open(name) + lfile, err := u.layer.Open(name) + if err != nil && bfile == nil { + return nil, err + } + return &UnionFile{Base: bfile, Layer: lfile}, nil +} + +func (u *CacheOnReadFs) Mkdir(name string, perm os.FileMode) error { + err := u.base.Mkdir(name, perm) + if err != nil { + return err + } + return u.layer.MkdirAll(name, perm) // yes, MkdirAll... we cannot assume it exists in the cache +} + +func (u *CacheOnReadFs) Name() string { + return "CacheOnReadFs" +} + +func (u *CacheOnReadFs) MkdirAll(name string, perm os.FileMode) error { + err := u.base.MkdirAll(name, perm) + if err != nil { + return err + } + return u.layer.MkdirAll(name, perm) +} + +func (u *CacheOnReadFs) Create(name string) (File, error) { + bfh, err := u.base.Create(name) + if err != nil { + return nil, err + } + lfh, err := u.layer.Create(name) + if err != nil { + // oops, see comment about OS_TRUNC above, should we remove? then we have to + // remember if the file did not exist before + bfh.Close() + return nil, err + } + return &UnionFile{Base: bfh, Layer: lfh}, nil +} diff --git a/vendor/github.com/spf13/afero/const_bsds.go b/vendor/github.com/spf13/afero/const_bsds.go new file mode 100644 index 000000000..18b45824b --- /dev/null +++ b/vendor/github.com/spf13/afero/const_bsds.go @@ -0,0 +1,22 @@ +// Copyright © 2016 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// +build aix darwin openbsd freebsd netbsd dragonfly + +package afero + +import ( + "syscall" +) + +const BADFD = syscall.EBADF diff --git a/vendor/github.com/spf13/afero/const_win_unix.go b/vendor/github.com/spf13/afero/const_win_unix.go new file mode 100644 index 000000000..2b850e4dd --- /dev/null +++ b/vendor/github.com/spf13/afero/const_win_unix.go @@ -0,0 +1,26 @@ +// Copyright © 2016 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +build !darwin +// +build !openbsd +// +build !freebsd +// +build !dragonfly +// +build !netbsd +// +build !aix + +package afero + +import ( + "syscall" +) + +const BADFD = syscall.EBADFD diff --git a/vendor/github.com/spf13/afero/copyOnWriteFs.go b/vendor/github.com/spf13/afero/copyOnWriteFs.go new file mode 100644 index 000000000..6ff8f3099 --- /dev/null +++ b/vendor/github.com/spf13/afero/copyOnWriteFs.go @@ -0,0 +1,326 @@ +package afero + +import ( + "fmt" + "os" + "path/filepath" + "syscall" + "time" +) + +var _ Lstater = (*CopyOnWriteFs)(nil) + +// The CopyOnWriteFs is a union filesystem: a read only base file system with +// a possibly writeable layer on top. Changes to the file system will only +// be made in the overlay: Changing an existing file in the base layer which +// is not present in the overlay will copy the file to the overlay ("changing" +// includes also calls to e.g. Chtimes(), Chmod() and Chown()). +// +// Reading directories is currently only supported via Open(), not OpenFile(). +type CopyOnWriteFs struct { + base Fs + layer Fs +} + +func NewCopyOnWriteFs(base Fs, layer Fs) Fs { + return &CopyOnWriteFs{base: base, layer: layer} +} + +// Returns true if the file is not in the overlay +func (u *CopyOnWriteFs) isBaseFile(name string) (bool, error) { + if _, err := u.layer.Stat(name); err == nil { + return false, nil + } + _, err := u.base.Stat(name) + if err != nil { + if oerr, ok := err.(*os.PathError); ok { + if oerr.Err == os.ErrNotExist || oerr.Err == syscall.ENOENT || oerr.Err == syscall.ENOTDIR { + return false, nil + } + } + if err == syscall.ENOENT { + return false, nil + } + } + return true, err +} + +func (u *CopyOnWriteFs) copyToLayer(name string) error { + return copyToLayer(u.base, u.layer, name) +} + +func (u *CopyOnWriteFs) Chtimes(name string, atime, mtime time.Time) error { + b, err := u.isBaseFile(name) + if err != nil { + return err + } + if b { + if err := u.copyToLayer(name); err != nil { + return err + } + } + return u.layer.Chtimes(name, atime, mtime) +} + +func (u *CopyOnWriteFs) Chmod(name string, mode os.FileMode) error { + b, err := u.isBaseFile(name) + if err != nil { + return err + } + if b { + if err := u.copyToLayer(name); err != nil { + return err + } + } + return u.layer.Chmod(name, mode) +} + +func (u *CopyOnWriteFs) Chown(name string, uid, gid int) error { + b, err := u.isBaseFile(name) + if err != nil { + return err + } + if b { + if err := u.copyToLayer(name); err != nil { + return err + } + } + return u.layer.Chown(name, uid, gid) +} + +func (u *CopyOnWriteFs) Stat(name string) (os.FileInfo, error) { + fi, err := u.layer.Stat(name) + if err != nil { + isNotExist := u.isNotExist(err) + if isNotExist { + return u.base.Stat(name) + } + return nil, err + } + return fi, nil +} + +func (u *CopyOnWriteFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + llayer, ok1 := u.layer.(Lstater) + lbase, ok2 := u.base.(Lstater) + + if ok1 { + fi, b, err := llayer.LstatIfPossible(name) + if err == nil { + return fi, b, nil + } + + if !u.isNotExist(err) { + return nil, b, err + } + } + + if ok2 { + fi, b, err := lbase.LstatIfPossible(name) + if err == nil { + return fi, b, nil + } + if !u.isNotExist(err) { + return nil, b, err + } + } + + fi, err := u.Stat(name) + + return fi, false, err +} + +func (u *CopyOnWriteFs) SymlinkIfPossible(oldname, newname string) error { + if slayer, ok := u.layer.(Linker); ok { + return slayer.SymlinkIfPossible(oldname, newname) + } + + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: ErrNoSymlink} +} + +func (u *CopyOnWriteFs) ReadlinkIfPossible(name string) (string, error) { + if rlayer, ok := u.layer.(LinkReader); ok { + return rlayer.ReadlinkIfPossible(name) + } + + if rbase, ok := u.base.(LinkReader); ok { + return rbase.ReadlinkIfPossible(name) + } + + return "", &os.PathError{Op: "readlink", Path: name, Err: ErrNoReadlink} +} + +func (u *CopyOnWriteFs) isNotExist(err error) bool { + if e, ok := err.(*os.PathError); ok { + err = e.Err + } + if err == os.ErrNotExist || err == syscall.ENOENT || err == syscall.ENOTDIR { + return true + } + return false +} + +// Renaming files present only in the base layer is not permitted +func (u *CopyOnWriteFs) Rename(oldname, newname string) error { + b, err := u.isBaseFile(oldname) + if err != nil { + return err + } + if b { + return syscall.EPERM + } + return u.layer.Rename(oldname, newname) +} + +// Removing files present only in the base layer is not permitted. If +// a file is present in the base layer and the overlay, only the overlay +// will be removed. +func (u *CopyOnWriteFs) Remove(name string) error { + err := u.layer.Remove(name) + switch err { + case syscall.ENOENT: + _, err = u.base.Stat(name) + if err == nil { + return syscall.EPERM + } + return syscall.ENOENT + default: + return err + } +} + +func (u *CopyOnWriteFs) RemoveAll(name string) error { + err := u.layer.RemoveAll(name) + switch err { + case syscall.ENOENT: + _, err = u.base.Stat(name) + if err == nil { + return syscall.EPERM + } + return syscall.ENOENT + default: + return err + } +} + +func (u *CopyOnWriteFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + b, err := u.isBaseFile(name) + if err != nil { + return nil, err + } + + if flag&(os.O_WRONLY|os.O_RDWR|os.O_APPEND|os.O_CREATE|os.O_TRUNC) != 0 { + if b { + if err = u.copyToLayer(name); err != nil { + return nil, err + } + return u.layer.OpenFile(name, flag, perm) + } + + dir := filepath.Dir(name) + isaDir, err := IsDir(u.base, dir) + if err != nil && !os.IsNotExist(err) { + return nil, err + } + if isaDir { + if err = u.layer.MkdirAll(dir, 0777); err != nil { + return nil, err + } + return u.layer.OpenFile(name, flag, perm) + } + + isaDir, err = IsDir(u.layer, dir) + if err != nil { + return nil, err + } + if isaDir { + return u.layer.OpenFile(name, flag, perm) + } + + return nil, &os.PathError{Op: "open", Path: name, Err: syscall.ENOTDIR} // ...or os.ErrNotExist? + } + if b { + return u.base.OpenFile(name, flag, perm) + } + return u.layer.OpenFile(name, flag, perm) +} + +// This function handles the 9 different possibilities caused +// by the union which are the intersection of the following... +// layer: doesn't exist, exists as a file, and exists as a directory +// base: doesn't exist, exists as a file, and exists as a directory +func (u *CopyOnWriteFs) Open(name string) (File, error) { + // Since the overlay overrides the base we check that first + b, err := u.isBaseFile(name) + if err != nil { + return nil, err + } + + // If overlay doesn't exist, return the base (base state irrelevant) + if b { + return u.base.Open(name) + } + + // If overlay is a file, return it (base state irrelevant) + dir, err := IsDir(u.layer, name) + if err != nil { + return nil, err + } + if !dir { + return u.layer.Open(name) + } + + // Overlay is a directory, base state now matters. + // Base state has 3 states to check but 2 outcomes: + // A. It's a file or non-readable in the base (return just the overlay) + // B. It's an accessible directory in the base (return a UnionFile) + + // If base is file or nonreadable, return overlay + dir, err = IsDir(u.base, name) + if !dir || err != nil { + return u.layer.Open(name) + } + + // Both base & layer are directories + // Return union file (if opens are without error) + bfile, bErr := u.base.Open(name) + lfile, lErr := u.layer.Open(name) + + // If either have errors at this point something is very wrong. Return nil and the errors + if bErr != nil || lErr != nil { + return nil, fmt.Errorf("BaseErr: %v\nOverlayErr: %v", bErr, lErr) + } + + return &UnionFile{Base: bfile, Layer: lfile}, nil +} + +func (u *CopyOnWriteFs) Mkdir(name string, perm os.FileMode) error { + dir, err := IsDir(u.base, name) + if err != nil { + return u.layer.MkdirAll(name, perm) + } + if dir { + return ErrFileExists + } + return u.layer.MkdirAll(name, perm) +} + +func (u *CopyOnWriteFs) Name() string { + return "CopyOnWriteFs" +} + +func (u *CopyOnWriteFs) MkdirAll(name string, perm os.FileMode) error { + dir, err := IsDir(u.base, name) + if err != nil { + return u.layer.MkdirAll(name, perm) + } + if dir { + // This is in line with how os.MkdirAll behaves. + return nil + } + return u.layer.MkdirAll(name, perm) +} + +func (u *CopyOnWriteFs) Create(name string) (File, error) { + return u.OpenFile(name, os.O_CREATE|os.O_TRUNC|os.O_RDWR, 0666) +} diff --git a/vendor/github.com/spf13/afero/go.mod b/vendor/github.com/spf13/afero/go.mod new file mode 100644 index 000000000..abe4fe1cf --- /dev/null +++ b/vendor/github.com/spf13/afero/go.mod @@ -0,0 +1,9 @@ +module github.com/spf13/afero + +require ( + github.com/pkg/sftp v1.10.1 + golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586 + golang.org/x/text v0.3.3 +) + +go 1.13 diff --git a/vendor/github.com/spf13/afero/go.sum b/vendor/github.com/spf13/afero/go.sum new file mode 100644 index 000000000..89d9bfbc4 --- /dev/null +++ b/vendor/github.com/spf13/afero/go.sum @@ -0,0 +1,29 @@ +github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1 h1:VasscCm72135zRysgrJDKsntdmPN+OuU3+nnHYA9wyc= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586 h1:7KByu05hhLed2MO29w7p1XfZvZ13m8mub3shuVftRs0= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d h1:+R4KGOnez64A81RvjARKc4UT5/tI9ujCIVX+P5KiHuI= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3 h1:cokOdA+Jmi5PJGXLlLllQSgYigAEfHXJAERHVMaCc2k= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/spf13/afero/httpFs.go b/vendor/github.com/spf13/afero/httpFs.go new file mode 100644 index 000000000..2b86e30d1 --- /dev/null +++ b/vendor/github.com/spf13/afero/httpFs.go @@ -0,0 +1,114 @@ +// Copyright © 2014 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "errors" + "net/http" + "os" + "path" + "path/filepath" + "strings" + "time" +) + +type httpDir struct { + basePath string + fs HttpFs +} + +func (d httpDir) Open(name string) (http.File, error) { + if filepath.Separator != '/' && strings.IndexRune(name, filepath.Separator) >= 0 || + strings.Contains(name, "\x00") { + return nil, errors.New("http: invalid character in file path") + } + dir := string(d.basePath) + if dir == "" { + dir = "." + } + + f, err := d.fs.Open(filepath.Join(dir, filepath.FromSlash(path.Clean("/"+name)))) + if err != nil { + return nil, err + } + return f, nil +} + +type HttpFs struct { + source Fs +} + +func NewHttpFs(source Fs) *HttpFs { + return &HttpFs{source: source} +} + +func (h HttpFs) Dir(s string) *httpDir { + return &httpDir{basePath: s, fs: h} +} + +func (h HttpFs) Name() string { return "h HttpFs" } + +func (h HttpFs) Create(name string) (File, error) { + return h.source.Create(name) +} + +func (h HttpFs) Chmod(name string, mode os.FileMode) error { + return h.source.Chmod(name, mode) +} + +func (h HttpFs) Chown(name string, uid, gid int) error { + return h.source.Chown(name, uid, gid) +} + +func (h HttpFs) Chtimes(name string, atime time.Time, mtime time.Time) error { + return h.source.Chtimes(name, atime, mtime) +} + +func (h HttpFs) Mkdir(name string, perm os.FileMode) error { + return h.source.Mkdir(name, perm) +} + +func (h HttpFs) MkdirAll(path string, perm os.FileMode) error { + return h.source.MkdirAll(path, perm) +} + +func (h HttpFs) Open(name string) (http.File, error) { + f, err := h.source.Open(name) + if err == nil { + if httpfile, ok := f.(http.File); ok { + return httpfile, nil + } + } + return nil, err +} + +func (h HttpFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + return h.source.OpenFile(name, flag, perm) +} + +func (h HttpFs) Remove(name string) error { + return h.source.Remove(name) +} + +func (h HttpFs) RemoveAll(path string) error { + return h.source.RemoveAll(path) +} + +func (h HttpFs) Rename(oldname, newname string) error { + return h.source.Rename(oldname, newname) +} + +func (h HttpFs) Stat(name string) (os.FileInfo, error) { + return h.source.Stat(name) +} diff --git a/vendor/github.com/spf13/afero/iofs.go b/vendor/github.com/spf13/afero/iofs.go new file mode 100644 index 000000000..c80345536 --- /dev/null +++ b/vendor/github.com/spf13/afero/iofs.go @@ -0,0 +1,288 @@ +// +build go1.16 + +package afero + +import ( + "io" + "io/fs" + "os" + "path" + "time" +) + +// IOFS adopts afero.Fs to stdlib io/fs.FS +type IOFS struct { + Fs +} + +func NewIOFS(fs Fs) IOFS { + return IOFS{Fs: fs} +} + +var ( + _ fs.FS = IOFS{} + _ fs.GlobFS = IOFS{} + _ fs.ReadDirFS = IOFS{} + _ fs.ReadFileFS = IOFS{} + _ fs.StatFS = IOFS{} + _ fs.SubFS = IOFS{} +) + +func (iofs IOFS) Open(name string) (fs.File, error) { + const op = "open" + + // by convention for fs.FS implementations we should perform this check + if !fs.ValidPath(name) { + return nil, iofs.wrapError(op, name, fs.ErrInvalid) + } + + file, err := iofs.Fs.Open(name) + if err != nil { + return nil, iofs.wrapError(op, name, err) + } + + // file should implement fs.ReadDirFile + if _, ok := file.(fs.ReadDirFile); !ok { + file = readDirFile{file} + } + + return file, nil +} + +func (iofs IOFS) Glob(pattern string) ([]string, error) { + const op = "glob" + + // afero.Glob does not perform this check but it's required for implementations + if _, err := path.Match(pattern, ""); err != nil { + return nil, iofs.wrapError(op, pattern, err) + } + + items, err := Glob(iofs.Fs, pattern) + if err != nil { + return nil, iofs.wrapError(op, pattern, err) + } + + return items, nil +} + +func (iofs IOFS) ReadDir(name string) ([]fs.DirEntry, error) { + items, err := ReadDir(iofs.Fs, name) + if err != nil { + return nil, iofs.wrapError("readdir", name, err) + } + + ret := make([]fs.DirEntry, len(items)) + for i := range items { + ret[i] = dirEntry{items[i]} + } + + return ret, nil +} + +func (iofs IOFS) ReadFile(name string) ([]byte, error) { + const op = "readfile" + + if !fs.ValidPath(name) { + return nil, iofs.wrapError(op, name, fs.ErrInvalid) + } + + bytes, err := ReadFile(iofs.Fs, name) + if err != nil { + return nil, iofs.wrapError(op, name, err) + } + + return bytes, nil +} + +func (iofs IOFS) Sub(dir string) (fs.FS, error) { return IOFS{NewBasePathFs(iofs.Fs, dir)}, nil } + +func (IOFS) wrapError(op, path string, err error) error { + if _, ok := err.(*fs.PathError); ok { + return err // don't need to wrap again + } + + return &fs.PathError{ + Op: op, + Path: path, + Err: err, + } +} + +// dirEntry provides adapter from os.FileInfo to fs.DirEntry +type dirEntry struct { + fs.FileInfo +} + +var _ fs.DirEntry = dirEntry{} + +func (d dirEntry) Type() fs.FileMode { return d.FileInfo.Mode().Type() } + +func (d dirEntry) Info() (fs.FileInfo, error) { return d.FileInfo, nil } + +// readDirFile provides adapter from afero.File to fs.ReadDirFile needed for correct Open +type readDirFile struct { + File +} + +var _ fs.ReadDirFile = readDirFile{} + +func (r readDirFile) ReadDir(n int) ([]fs.DirEntry, error) { + items, err := r.File.Readdir(n) + if err != nil { + return nil, err + } + + ret := make([]fs.DirEntry, len(items)) + for i := range items { + ret[i] = dirEntry{items[i]} + } + + return ret, nil +} + +// FromIOFS adopts io/fs.FS to use it as afero.Fs +// Note that io/fs.FS is read-only so all mutating methods will return fs.PathError with fs.ErrPermission +// To store modifications you may use afero.CopyOnWriteFs +type FromIOFS struct { + fs.FS +} + +var _ Fs = FromIOFS{} + +func (f FromIOFS) Create(name string) (File, error) { return nil, notImplemented("create", name) } + +func (f FromIOFS) Mkdir(name string, perm os.FileMode) error { return notImplemented("mkdir", name) } + +func (f FromIOFS) MkdirAll(path string, perm os.FileMode) error { + return notImplemented("mkdirall", path) +} + +func (f FromIOFS) Open(name string) (File, error) { + file, err := f.FS.Open(name) + if err != nil { + return nil, err + } + + return fromIOFSFile{File: file, name: name}, nil +} + +func (f FromIOFS) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + return f.Open(name) +} + +func (f FromIOFS) Remove(name string) error { + return notImplemented("remove", name) +} + +func (f FromIOFS) RemoveAll(path string) error { + return notImplemented("removeall", path) +} + +func (f FromIOFS) Rename(oldname, newname string) error { + return notImplemented("rename", oldname) +} + +func (f FromIOFS) Stat(name string) (os.FileInfo, error) { return fs.Stat(f.FS, name) } + +func (f FromIOFS) Name() string { return "fromiofs" } + +func (f FromIOFS) Chmod(name string, mode os.FileMode) error { + return notImplemented("chmod", name) +} + +func (f FromIOFS) Chown(name string, uid, gid int) error { + return notImplemented("chown", name) +} + +func (f FromIOFS) Chtimes(name string, atime time.Time, mtime time.Time) error { + return notImplemented("chtimes", name) +} + +type fromIOFSFile struct { + fs.File + name string +} + +func (f fromIOFSFile) ReadAt(p []byte, off int64) (n int, err error) { + readerAt, ok := f.File.(io.ReaderAt) + if !ok { + return -1, notImplemented("readat", f.name) + } + + return readerAt.ReadAt(p, off) +} + +func (f fromIOFSFile) Seek(offset int64, whence int) (int64, error) { + seeker, ok := f.File.(io.Seeker) + if !ok { + return -1, notImplemented("seek", f.name) + } + + return seeker.Seek(offset, whence) +} + +func (f fromIOFSFile) Write(p []byte) (n int, err error) { + return -1, notImplemented("write", f.name) +} + +func (f fromIOFSFile) WriteAt(p []byte, off int64) (n int, err error) { + return -1, notImplemented("writeat", f.name) +} + +func (f fromIOFSFile) Name() string { return f.name } + +func (f fromIOFSFile) Readdir(count int) ([]os.FileInfo, error) { + rdfile, ok := f.File.(fs.ReadDirFile) + if !ok { + return nil, notImplemented("readdir", f.name) + } + + entries, err := rdfile.ReadDir(count) + if err != nil { + return nil, err + } + + ret := make([]os.FileInfo, len(entries)) + for i := range entries { + ret[i], err = entries[i].Info() + + if err != nil { + return nil, err + } + } + + return ret, nil +} + +func (f fromIOFSFile) Readdirnames(n int) ([]string, error) { + rdfile, ok := f.File.(fs.ReadDirFile) + if !ok { + return nil, notImplemented("readdir", f.name) + } + + entries, err := rdfile.ReadDir(n) + if err != nil { + return nil, err + } + + ret := make([]string, len(entries)) + for i := range entries { + ret[i] = entries[i].Name() + } + + return ret, nil +} + +func (f fromIOFSFile) Sync() error { return nil } + +func (f fromIOFSFile) Truncate(size int64) error { + return notImplemented("truncate", f.name) +} + +func (f fromIOFSFile) WriteString(s string) (ret int, err error) { + return -1, notImplemented("writestring", f.name) +} + +func notImplemented(op, path string) error { + return &fs.PathError{Op: op, Path: path, Err: fs.ErrPermission} +} diff --git a/vendor/github.com/spf13/afero/ioutil.go b/vendor/github.com/spf13/afero/ioutil.go new file mode 100644 index 000000000..a403133e2 --- /dev/null +++ b/vendor/github.com/spf13/afero/ioutil.go @@ -0,0 +1,240 @@ +// Copyright ©2015 The Go Authors +// Copyright ©2015 Steve Francia +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "bytes" + "io" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "sync" + "time" +) + +// byName implements sort.Interface. +type byName []os.FileInfo + +func (f byName) Len() int { return len(f) } +func (f byName) Less(i, j int) bool { return f[i].Name() < f[j].Name() } +func (f byName) Swap(i, j int) { f[i], f[j] = f[j], f[i] } + +// ReadDir reads the directory named by dirname and returns +// a list of sorted directory entries. +func (a Afero) ReadDir(dirname string) ([]os.FileInfo, error) { + return ReadDir(a.Fs, dirname) +} + +func ReadDir(fs Fs, dirname string) ([]os.FileInfo, error) { + f, err := fs.Open(dirname) + if err != nil { + return nil, err + } + list, err := f.Readdir(-1) + f.Close() + if err != nil { + return nil, err + } + sort.Sort(byName(list)) + return list, nil +} + +// ReadFile reads the file named by filename and returns the contents. +// A successful call returns err == nil, not err == EOF. Because ReadFile +// reads the whole file, it does not treat an EOF from Read as an error +// to be reported. +func (a Afero) ReadFile(filename string) ([]byte, error) { + return ReadFile(a.Fs, filename) +} + +func ReadFile(fs Fs, filename string) ([]byte, error) { + f, err := fs.Open(filename) + if err != nil { + return nil, err + } + defer f.Close() + // It's a good but not certain bet that FileInfo will tell us exactly how much to + // read, so let's try it but be prepared for the answer to be wrong. + var n int64 + + if fi, err := f.Stat(); err == nil { + // Don't preallocate a huge buffer, just in case. + if size := fi.Size(); size < 1e9 { + n = size + } + } + // As initial capacity for readAll, use n + a little extra in case Size is zero, + // and to avoid another allocation after Read has filled the buffer. The readAll + // call will read into its allocated internal buffer cheaply. If the size was + // wrong, we'll either waste some space off the end or reallocate as needed, but + // in the overwhelmingly common case we'll get it just right. + return readAll(f, n+bytes.MinRead) +} + +// readAll reads from r until an error or EOF and returns the data it read +// from the internal buffer allocated with a specified capacity. +func readAll(r io.Reader, capacity int64) (b []byte, err error) { + buf := bytes.NewBuffer(make([]byte, 0, capacity)) + // If the buffer overflows, we will get bytes.ErrTooLarge. + // Return that as an error. Any other panic remains. + defer func() { + e := recover() + if e == nil { + return + } + if panicErr, ok := e.(error); ok && panicErr == bytes.ErrTooLarge { + err = panicErr + } else { + panic(e) + } + }() + _, err = buf.ReadFrom(r) + return buf.Bytes(), err +} + +// ReadAll reads from r until an error or EOF and returns the data it read. +// A successful call returns err == nil, not err == EOF. Because ReadAll is +// defined to read from src until EOF, it does not treat an EOF from Read +// as an error to be reported. +func ReadAll(r io.Reader) ([]byte, error) { + return readAll(r, bytes.MinRead) +} + +// WriteFile writes data to a file named by filename. +// If the file does not exist, WriteFile creates it with permissions perm; +// otherwise WriteFile truncates it before writing. +func (a Afero) WriteFile(filename string, data []byte, perm os.FileMode) error { + return WriteFile(a.Fs, filename, data, perm) +} + +func WriteFile(fs Fs, filename string, data []byte, perm os.FileMode) error { + f, err := fs.OpenFile(filename, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, perm) + if err != nil { + return err + } + n, err := f.Write(data) + if err == nil && n < len(data) { + err = io.ErrShortWrite + } + if err1 := f.Close(); err == nil { + err = err1 + } + return err +} + +// Random number state. +// We generate random temporary file names so that there's a good +// chance the file doesn't exist yet - keeps the number of tries in +// TempFile to a minimum. +var rand uint32 +var randmu sync.Mutex + +func reseed() uint32 { + return uint32(time.Now().UnixNano() + int64(os.Getpid())) +} + +func nextRandom() string { + randmu.Lock() + r := rand + if r == 0 { + r = reseed() + } + r = r*1664525 + 1013904223 // constants from Numerical Recipes + rand = r + randmu.Unlock() + return strconv.Itoa(int(1e9 + r%1e9))[1:] +} + +// TempFile creates a new temporary file in the directory dir, +// opens the file for reading and writing, and returns the resulting *os.File. +// The filename is generated by taking pattern and adding a random +// string to the end. If pattern includes a "*", the random string +// replaces the last "*". +// If dir is the empty string, TempFile uses the default directory +// for temporary files (see os.TempDir). +// Multiple programs calling TempFile simultaneously +// will not choose the same file. The caller can use f.Name() +// to find the pathname of the file. It is the caller's responsibility +// to remove the file when no longer needed. +func (a Afero) TempFile(dir, pattern string) (f File, err error) { + return TempFile(a.Fs, dir, pattern) +} + +func TempFile(fs Fs, dir, pattern string) (f File, err error) { + if dir == "" { + dir = os.TempDir() + } + + var prefix, suffix string + if pos := strings.LastIndex(pattern, "*"); pos != -1 { + prefix, suffix = pattern[:pos], pattern[pos+1:] + } else { + prefix = pattern + } + + nconflict := 0 + for i := 0; i < 10000; i++ { + name := filepath.Join(dir, prefix+nextRandom()+suffix) + f, err = fs.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0600) + if os.IsExist(err) { + if nconflict++; nconflict > 10 { + randmu.Lock() + rand = reseed() + randmu.Unlock() + } + continue + } + break + } + return +} + +// TempDir creates a new temporary directory in the directory dir +// with a name beginning with prefix and returns the path of the +// new directory. If dir is the empty string, TempDir uses the +// default directory for temporary files (see os.TempDir). +// Multiple programs calling TempDir simultaneously +// will not choose the same directory. It is the caller's responsibility +// to remove the directory when no longer needed. +func (a Afero) TempDir(dir, prefix string) (name string, err error) { + return TempDir(a.Fs, dir, prefix) +} +func TempDir(fs Fs, dir, prefix string) (name string, err error) { + if dir == "" { + dir = os.TempDir() + } + + nconflict := 0 + for i := 0; i < 10000; i++ { + try := filepath.Join(dir, prefix+nextRandom()) + err = fs.Mkdir(try, 0700) + if os.IsExist(err) { + if nconflict++; nconflict > 10 { + randmu.Lock() + rand = reseed() + randmu.Unlock() + } + continue + } + if err == nil { + name = try + } + break + } + return +} diff --git a/vendor/github.com/spf13/afero/lstater.go b/vendor/github.com/spf13/afero/lstater.go new file mode 100644 index 000000000..89c1bfc0a --- /dev/null +++ b/vendor/github.com/spf13/afero/lstater.go @@ -0,0 +1,27 @@ +// Copyright © 2018 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "os" +) + +// Lstater is an optional interface in Afero. It is only implemented by the +// filesystems saying so. +// It will call Lstat if the filesystem iself is, or it delegates to, the os filesystem. +// Else it will call Stat. +// In addtion to the FileInfo, it will return a boolean telling whether Lstat was called or not. +type Lstater interface { + LstatIfPossible(name string) (os.FileInfo, bool, error) +} diff --git a/vendor/github.com/spf13/afero/match.go b/vendor/github.com/spf13/afero/match.go new file mode 100644 index 000000000..7db4b7de6 --- /dev/null +++ b/vendor/github.com/spf13/afero/match.go @@ -0,0 +1,110 @@ +// Copyright © 2014 Steve Francia . +// Copyright 2009 The Go Authors. All rights reserved. + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "path/filepath" + "sort" + "strings" +) + +// Glob returns the names of all files matching pattern or nil +// if there is no matching file. The syntax of patterns is the same +// as in Match. The pattern may describe hierarchical names such as +// /usr/*/bin/ed (assuming the Separator is '/'). +// +// Glob ignores file system errors such as I/O errors reading directories. +// The only possible returned error is ErrBadPattern, when pattern +// is malformed. +// +// This was adapted from (http://golang.org/pkg/path/filepath) and uses several +// built-ins from that package. +func Glob(fs Fs, pattern string) (matches []string, err error) { + if !hasMeta(pattern) { + // Lstat not supported by a ll filesystems. + if _, err = lstatIfPossible(fs, pattern); err != nil { + return nil, nil + } + return []string{pattern}, nil + } + + dir, file := filepath.Split(pattern) + switch dir { + case "": + dir = "." + case string(filepath.Separator): + // nothing + default: + dir = dir[0 : len(dir)-1] // chop off trailing separator + } + + if !hasMeta(dir) { + return glob(fs, dir, file, nil) + } + + var m []string + m, err = Glob(fs, dir) + if err != nil { + return + } + for _, d := range m { + matches, err = glob(fs, d, file, matches) + if err != nil { + return + } + } + return +} + +// glob searches for files matching pattern in the directory dir +// and appends them to matches. If the directory cannot be +// opened, it returns the existing matches. New matches are +// added in lexicographical order. +func glob(fs Fs, dir, pattern string, matches []string) (m []string, e error) { + m = matches + fi, err := fs.Stat(dir) + if err != nil { + return + } + if !fi.IsDir() { + return + } + d, err := fs.Open(dir) + if err != nil { + return + } + defer d.Close() + + names, _ := d.Readdirnames(-1) + sort.Strings(names) + + for _, n := range names { + matched, err := filepath.Match(pattern, n) + if err != nil { + return m, err + } + if matched { + m = append(m, filepath.Join(dir, n)) + } + } + return +} + +// hasMeta reports whether path contains any of the magic characters +// recognized by Match. +func hasMeta(path string) bool { + // TODO(niemeyer): Should other magic characters be added here? + return strings.ContainsAny(path, "*?[") +} diff --git a/vendor/github.com/spf13/afero/mem/dir.go b/vendor/github.com/spf13/afero/mem/dir.go new file mode 100644 index 000000000..e104013f4 --- /dev/null +++ b/vendor/github.com/spf13/afero/mem/dir.go @@ -0,0 +1,37 @@ +// Copyright © 2014 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mem + +type Dir interface { + Len() int + Names() []string + Files() []*FileData + Add(*FileData) + Remove(*FileData) +} + +func RemoveFromMemDir(dir *FileData, f *FileData) { + dir.memDir.Remove(f) +} + +func AddToMemDir(dir *FileData, f *FileData) { + dir.memDir.Add(f) +} + +func InitializeDir(d *FileData) { + if d.memDir == nil { + d.dir = true + d.memDir = &DirMap{} + } +} diff --git a/vendor/github.com/spf13/afero/mem/dirmap.go b/vendor/github.com/spf13/afero/mem/dirmap.go new file mode 100644 index 000000000..03a57ee5b --- /dev/null +++ b/vendor/github.com/spf13/afero/mem/dirmap.go @@ -0,0 +1,43 @@ +// Copyright © 2015 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mem + +import "sort" + +type DirMap map[string]*FileData + +func (m DirMap) Len() int { return len(m) } +func (m DirMap) Add(f *FileData) { m[f.name] = f } +func (m DirMap) Remove(f *FileData) { delete(m, f.name) } +func (m DirMap) Files() (files []*FileData) { + for _, f := range m { + files = append(files, f) + } + sort.Sort(filesSorter(files)) + return files +} + +// implement sort.Interface for []*FileData +type filesSorter []*FileData + +func (s filesSorter) Len() int { return len(s) } +func (s filesSorter) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s filesSorter) Less(i, j int) bool { return s[i].name < s[j].name } + +func (m DirMap) Names() (names []string) { + for x := range m { + names = append(names, x) + } + return names +} diff --git a/vendor/github.com/spf13/afero/mem/file.go b/vendor/github.com/spf13/afero/mem/file.go new file mode 100644 index 000000000..5a20730c2 --- /dev/null +++ b/vendor/github.com/spf13/afero/mem/file.go @@ -0,0 +1,338 @@ +// Copyright © 2015 Steve Francia . +// Copyright 2013 tsuru authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package mem + +import ( + "bytes" + "errors" + "io" + "os" + "path/filepath" + "sync" + "sync/atomic" + "time" +) + +const FilePathSeparator = string(filepath.Separator) + +type File struct { + // atomic requires 64-bit alignment for struct field access + at int64 + readDirCount int64 + closed bool + readOnly bool + fileData *FileData +} + +func NewFileHandle(data *FileData) *File { + return &File{fileData: data} +} + +func NewReadOnlyFileHandle(data *FileData) *File { + return &File{fileData: data, readOnly: true} +} + +func (f File) Data() *FileData { + return f.fileData +} + +type FileData struct { + sync.Mutex + name string + data []byte + memDir Dir + dir bool + mode os.FileMode + modtime time.Time + uid int + gid int +} + +func (d *FileData) Name() string { + d.Lock() + defer d.Unlock() + return d.name +} + +func CreateFile(name string) *FileData { + return &FileData{name: name, mode: os.ModeTemporary, modtime: time.Now()} +} + +func CreateDir(name string) *FileData { + return &FileData{name: name, memDir: &DirMap{}, dir: true} +} + +func ChangeFileName(f *FileData, newname string) { + f.Lock() + f.name = newname + f.Unlock() +} + +func SetMode(f *FileData, mode os.FileMode) { + f.Lock() + f.mode = mode + f.Unlock() +} + +func SetModTime(f *FileData, mtime time.Time) { + f.Lock() + setModTime(f, mtime) + f.Unlock() +} + +func setModTime(f *FileData, mtime time.Time) { + f.modtime = mtime +} + +func SetUID(f *FileData, uid int) { + f.Lock() + f.uid = uid + f.Unlock() +} + +func SetGID(f *FileData, gid int) { + f.Lock() + f.gid = gid + f.Unlock() +} + +func GetFileInfo(f *FileData) *FileInfo { + return &FileInfo{f} +} + +func (f *File) Open() error { + atomic.StoreInt64(&f.at, 0) + atomic.StoreInt64(&f.readDirCount, 0) + f.fileData.Lock() + f.closed = false + f.fileData.Unlock() + return nil +} + +func (f *File) Close() error { + f.fileData.Lock() + f.closed = true + if !f.readOnly { + setModTime(f.fileData, time.Now()) + } + f.fileData.Unlock() + return nil +} + +func (f *File) Name() string { + return f.fileData.Name() +} + +func (f *File) Stat() (os.FileInfo, error) { + return &FileInfo{f.fileData}, nil +} + +func (f *File) Sync() error { + return nil +} + +func (f *File) Readdir(count int) (res []os.FileInfo, err error) { + if !f.fileData.dir { + return nil, &os.PathError{Op: "readdir", Path: f.fileData.name, Err: errors.New("not a dir")} + } + var outLength int64 + + f.fileData.Lock() + files := f.fileData.memDir.Files()[f.readDirCount:] + if count > 0 { + if len(files) < count { + outLength = int64(len(files)) + } else { + outLength = int64(count) + } + if len(files) == 0 { + err = io.EOF + } + } else { + outLength = int64(len(files)) + } + f.readDirCount += outLength + f.fileData.Unlock() + + res = make([]os.FileInfo, outLength) + for i := range res { + res[i] = &FileInfo{files[i]} + } + + return res, err +} + +func (f *File) Readdirnames(n int) (names []string, err error) { + fi, err := f.Readdir(n) + names = make([]string, len(fi)) + for i, f := range fi { + _, names[i] = filepath.Split(f.Name()) + } + return names, err +} + +func (f *File) Read(b []byte) (n int, err error) { + f.fileData.Lock() + defer f.fileData.Unlock() + if f.closed == true { + return 0, ErrFileClosed + } + if len(b) > 0 && int(f.at) == len(f.fileData.data) { + return 0, io.EOF + } + if int(f.at) > len(f.fileData.data) { + return 0, io.ErrUnexpectedEOF + } + if len(f.fileData.data)-int(f.at) >= len(b) { + n = len(b) + } else { + n = len(f.fileData.data) - int(f.at) + } + copy(b, f.fileData.data[f.at:f.at+int64(n)]) + atomic.AddInt64(&f.at, int64(n)) + return +} + +func (f *File) ReadAt(b []byte, off int64) (n int, err error) { + prev := atomic.LoadInt64(&f.at) + atomic.StoreInt64(&f.at, off) + n, err = f.Read(b) + atomic.StoreInt64(&f.at, prev) + return +} + +func (f *File) Truncate(size int64) error { + if f.closed == true { + return ErrFileClosed + } + if f.readOnly { + return &os.PathError{Op: "truncate", Path: f.fileData.name, Err: errors.New("file handle is read only")} + } + if size < 0 { + return ErrOutOfRange + } + f.fileData.Lock() + defer f.fileData.Unlock() + if size > int64(len(f.fileData.data)) { + diff := size - int64(len(f.fileData.data)) + f.fileData.data = append(f.fileData.data, bytes.Repeat([]byte{00}, int(diff))...) + } else { + f.fileData.data = f.fileData.data[0:size] + } + setModTime(f.fileData, time.Now()) + return nil +} + +func (f *File) Seek(offset int64, whence int) (int64, error) { + if f.closed == true { + return 0, ErrFileClosed + } + switch whence { + case io.SeekStart: + atomic.StoreInt64(&f.at, offset) + case io.SeekCurrent: + atomic.AddInt64(&f.at, offset) + case io.SeekEnd: + atomic.StoreInt64(&f.at, int64(len(f.fileData.data))+offset) + } + return f.at, nil +} + +func (f *File) Write(b []byte) (n int, err error) { + if f.closed == true { + return 0, ErrFileClosed + } + if f.readOnly { + return 0, &os.PathError{Op: "write", Path: f.fileData.name, Err: errors.New("file handle is read only")} + } + n = len(b) + cur := atomic.LoadInt64(&f.at) + f.fileData.Lock() + defer f.fileData.Unlock() + diff := cur - int64(len(f.fileData.data)) + var tail []byte + if n+int(cur) < len(f.fileData.data) { + tail = f.fileData.data[n+int(cur):] + } + if diff > 0 { + f.fileData.data = append(f.fileData.data, append(bytes.Repeat([]byte{00}, int(diff)), b...)...) + f.fileData.data = append(f.fileData.data, tail...) + } else { + f.fileData.data = append(f.fileData.data[:cur], b...) + f.fileData.data = append(f.fileData.data, tail...) + } + setModTime(f.fileData, time.Now()) + + atomic.AddInt64(&f.at, int64(n)) + return +} + +func (f *File) WriteAt(b []byte, off int64) (n int, err error) { + atomic.StoreInt64(&f.at, off) + return f.Write(b) +} + +func (f *File) WriteString(s string) (ret int, err error) { + return f.Write([]byte(s)) +} + +func (f *File) Info() *FileInfo { + return &FileInfo{f.fileData} +} + +type FileInfo struct { + *FileData +} + +// Implements os.FileInfo +func (s *FileInfo) Name() string { + s.Lock() + _, name := filepath.Split(s.name) + s.Unlock() + return name +} +func (s *FileInfo) Mode() os.FileMode { + s.Lock() + defer s.Unlock() + return s.mode +} +func (s *FileInfo) ModTime() time.Time { + s.Lock() + defer s.Unlock() + return s.modtime +} +func (s *FileInfo) IsDir() bool { + s.Lock() + defer s.Unlock() + return s.dir +} +func (s *FileInfo) Sys() interface{} { return nil } +func (s *FileInfo) Size() int64 { + if s.IsDir() { + return int64(42) + } + s.Lock() + defer s.Unlock() + return int64(len(s.data)) +} + +var ( + ErrFileClosed = errors.New("File is closed") + ErrOutOfRange = errors.New("Out of range") + ErrTooLarge = errors.New("Too large") + ErrFileNotFound = os.ErrNotExist + ErrFileExists = os.ErrExist + ErrDestinationExists = os.ErrExist +) diff --git a/vendor/github.com/spf13/afero/memmap.go b/vendor/github.com/spf13/afero/memmap.go new file mode 100644 index 000000000..5c265f92b --- /dev/null +++ b/vendor/github.com/spf13/afero/memmap.go @@ -0,0 +1,404 @@ +// Copyright © 2014 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "fmt" + "log" + "os" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/spf13/afero/mem" +) + +const chmodBits = os.ModePerm | os.ModeSetuid | os.ModeSetgid | os.ModeSticky // Only a subset of bits are allowed to be changed. Documented under os.Chmod() + +type MemMapFs struct { + mu sync.RWMutex + data map[string]*mem.FileData + init sync.Once +} + +func NewMemMapFs() Fs { + return &MemMapFs{} +} + +func (m *MemMapFs) getData() map[string]*mem.FileData { + m.init.Do(func() { + m.data = make(map[string]*mem.FileData) + // Root should always exist, right? + // TODO: what about windows? + root := mem.CreateDir(FilePathSeparator) + mem.SetMode(root, os.ModeDir|0755) + m.data[FilePathSeparator] = root + }) + return m.data +} + +func (*MemMapFs) Name() string { return "MemMapFS" } + +func (m *MemMapFs) Create(name string) (File, error) { + name = normalizePath(name) + m.mu.Lock() + file := mem.CreateFile(name) + m.getData()[name] = file + m.registerWithParent(file, 0) + m.mu.Unlock() + return mem.NewFileHandle(file), nil +} + +func (m *MemMapFs) unRegisterWithParent(fileName string) error { + f, err := m.lockfreeOpen(fileName) + if err != nil { + return err + } + parent := m.findParent(f) + if parent == nil { + log.Panic("parent of ", f.Name(), " is nil") + } + + parent.Lock() + mem.RemoveFromMemDir(parent, f) + parent.Unlock() + return nil +} + +func (m *MemMapFs) findParent(f *mem.FileData) *mem.FileData { + pdir, _ := filepath.Split(f.Name()) + pdir = filepath.Clean(pdir) + pfile, err := m.lockfreeOpen(pdir) + if err != nil { + return nil + } + return pfile +} + +func (m *MemMapFs) registerWithParent(f *mem.FileData, perm os.FileMode) { + if f == nil { + return + } + parent := m.findParent(f) + if parent == nil { + pdir := filepath.Dir(filepath.Clean(f.Name())) + err := m.lockfreeMkdir(pdir, perm) + if err != nil { + //log.Println("Mkdir error:", err) + return + } + parent, err = m.lockfreeOpen(pdir) + if err != nil { + //log.Println("Open after Mkdir error:", err) + return + } + } + + parent.Lock() + mem.InitializeDir(parent) + mem.AddToMemDir(parent, f) + parent.Unlock() +} + +func (m *MemMapFs) lockfreeMkdir(name string, perm os.FileMode) error { + name = normalizePath(name) + x, ok := m.getData()[name] + if ok { + // Only return ErrFileExists if it's a file, not a directory. + i := mem.FileInfo{FileData: x} + if !i.IsDir() { + return ErrFileExists + } + } else { + item := mem.CreateDir(name) + mem.SetMode(item, os.ModeDir|perm) + m.getData()[name] = item + m.registerWithParent(item, perm) + } + return nil +} + +func (m *MemMapFs) Mkdir(name string, perm os.FileMode) error { + perm &= chmodBits + name = normalizePath(name) + + m.mu.RLock() + _, ok := m.getData()[name] + m.mu.RUnlock() + if ok { + return &os.PathError{Op: "mkdir", Path: name, Err: ErrFileExists} + } + + m.mu.Lock() + item := mem.CreateDir(name) + mem.SetMode(item, os.ModeDir|perm) + m.getData()[name] = item + m.registerWithParent(item, perm) + m.mu.Unlock() + + return m.setFileMode(name, perm|os.ModeDir) +} + +func (m *MemMapFs) MkdirAll(path string, perm os.FileMode) error { + err := m.Mkdir(path, perm) + if err != nil { + if err.(*os.PathError).Err == ErrFileExists { + return nil + } + return err + } + return nil +} + +// Handle some relative paths +func normalizePath(path string) string { + path = filepath.Clean(path) + + switch path { + case ".": + return FilePathSeparator + case "..": + return FilePathSeparator + default: + return path + } +} + +func (m *MemMapFs) Open(name string) (File, error) { + f, err := m.open(name) + if f != nil { + return mem.NewReadOnlyFileHandle(f), err + } + return nil, err +} + +func (m *MemMapFs) openWrite(name string) (File, error) { + f, err := m.open(name) + if f != nil { + return mem.NewFileHandle(f), err + } + return nil, err +} + +func (m *MemMapFs) open(name string) (*mem.FileData, error) { + name = normalizePath(name) + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return nil, &os.PathError{Op: "open", Path: name, Err: ErrFileNotFound} + } + return f, nil +} + +func (m *MemMapFs) lockfreeOpen(name string) (*mem.FileData, error) { + name = normalizePath(name) + f, ok := m.getData()[name] + if ok { + return f, nil + } else { + return nil, ErrFileNotFound + } +} + +func (m *MemMapFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + perm &= chmodBits + chmod := false + file, err := m.openWrite(name) + if err == nil && (flag&os.O_EXCL > 0) { + return nil, &os.PathError{Op: "open", Path: name, Err: ErrFileExists} + } + if os.IsNotExist(err) && (flag&os.O_CREATE > 0) { + file, err = m.Create(name) + chmod = true + } + if err != nil { + return nil, err + } + if flag == os.O_RDONLY { + file = mem.NewReadOnlyFileHandle(file.(*mem.File).Data()) + } + if flag&os.O_APPEND > 0 { + _, err = file.Seek(0, os.SEEK_END) + if err != nil { + file.Close() + return nil, err + } + } + if flag&os.O_TRUNC > 0 && flag&(os.O_RDWR|os.O_WRONLY) > 0 { + err = file.Truncate(0) + if err != nil { + file.Close() + return nil, err + } + } + if chmod { + return file, m.setFileMode(name, perm) + } + return file, nil +} + +func (m *MemMapFs) Remove(name string) error { + name = normalizePath(name) + + m.mu.Lock() + defer m.mu.Unlock() + + if _, ok := m.getData()[name]; ok { + err := m.unRegisterWithParent(name) + if err != nil { + return &os.PathError{Op: "remove", Path: name, Err: err} + } + delete(m.getData(), name) + } else { + return &os.PathError{Op: "remove", Path: name, Err: os.ErrNotExist} + } + return nil +} + +func (m *MemMapFs) RemoveAll(path string) error { + path = normalizePath(path) + m.mu.Lock() + m.unRegisterWithParent(path) + m.mu.Unlock() + + m.mu.RLock() + defer m.mu.RUnlock() + + for p := range m.getData() { + if strings.HasPrefix(p, path) { + m.mu.RUnlock() + m.mu.Lock() + delete(m.getData(), p) + m.mu.Unlock() + m.mu.RLock() + } + } + return nil +} + +func (m *MemMapFs) Rename(oldname, newname string) error { + oldname = normalizePath(oldname) + newname = normalizePath(newname) + + if oldname == newname { + return nil + } + + m.mu.RLock() + defer m.mu.RUnlock() + if _, ok := m.getData()[oldname]; ok { + m.mu.RUnlock() + m.mu.Lock() + m.unRegisterWithParent(oldname) + fileData := m.getData()[oldname] + delete(m.getData(), oldname) + mem.ChangeFileName(fileData, newname) + m.getData()[newname] = fileData + m.registerWithParent(fileData, 0) + m.mu.Unlock() + m.mu.RLock() + } else { + return &os.PathError{Op: "rename", Path: oldname, Err: ErrFileNotFound} + } + return nil +} + +func (m *MemMapFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + fileInfo, err := m.Stat(name) + return fileInfo, false, err +} + +func (m *MemMapFs) Stat(name string) (os.FileInfo, error) { + f, err := m.Open(name) + if err != nil { + return nil, err + } + fi := mem.GetFileInfo(f.(*mem.File).Data()) + return fi, nil +} + +func (m *MemMapFs) Chmod(name string, mode os.FileMode) error { + mode &= chmodBits + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return &os.PathError{Op: "chmod", Path: name, Err: ErrFileNotFound} + } + prevOtherBits := mem.GetFileInfo(f).Mode() & ^chmodBits + + mode = prevOtherBits | mode + return m.setFileMode(name, mode) +} + +func (m *MemMapFs) setFileMode(name string, mode os.FileMode) error { + name = normalizePath(name) + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return &os.PathError{Op: "chmod", Path: name, Err: ErrFileNotFound} + } + + m.mu.Lock() + mem.SetMode(f, mode) + m.mu.Unlock() + + return nil +} + +func (m *MemMapFs) Chown(name string, uid, gid int) error { + name = normalizePath(name) + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return &os.PathError{Op: "chown", Path: name, Err: ErrFileNotFound} + } + + mem.SetUID(f, uid) + mem.SetGID(f, gid) + + return nil +} + +func (m *MemMapFs) Chtimes(name string, atime time.Time, mtime time.Time) error { + name = normalizePath(name) + + m.mu.RLock() + f, ok := m.getData()[name] + m.mu.RUnlock() + if !ok { + return &os.PathError{Op: "chtimes", Path: name, Err: ErrFileNotFound} + } + + m.mu.Lock() + mem.SetModTime(f, mtime) + m.mu.Unlock() + + return nil +} + +func (m *MemMapFs) List() { + for _, x := range m.data { + y := mem.FileInfo{FileData: x} + fmt.Println(x.Name(), y.Size()) + } +} diff --git a/vendor/github.com/spf13/afero/os.go b/vendor/github.com/spf13/afero/os.go new file mode 100644 index 000000000..f1366321e --- /dev/null +++ b/vendor/github.com/spf13/afero/os.go @@ -0,0 +1,113 @@ +// Copyright © 2014 Steve Francia . +// Copyright 2013 tsuru authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "os" + "time" +) + +var _ Lstater = (*OsFs)(nil) + +// OsFs is a Fs implementation that uses functions provided by the os package. +// +// For details in any method, check the documentation of the os package +// (http://golang.org/pkg/os/). +type OsFs struct{} + +func NewOsFs() Fs { + return &OsFs{} +} + +func (OsFs) Name() string { return "OsFs" } + +func (OsFs) Create(name string) (File, error) { + f, e := os.Create(name) + if f == nil { + // while this looks strange, we need to return a bare nil (of type nil) not + // a nil value of type *os.File or nil won't be nil + return nil, e + } + return f, e +} + +func (OsFs) Mkdir(name string, perm os.FileMode) error { + return os.Mkdir(name, perm) +} + +func (OsFs) MkdirAll(path string, perm os.FileMode) error { + return os.MkdirAll(path, perm) +} + +func (OsFs) Open(name string) (File, error) { + f, e := os.Open(name) + if f == nil { + // while this looks strange, we need to return a bare nil (of type nil) not + // a nil value of type *os.File or nil won't be nil + return nil, e + } + return f, e +} + +func (OsFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + f, e := os.OpenFile(name, flag, perm) + if f == nil { + // while this looks strange, we need to return a bare nil (of type nil) not + // a nil value of type *os.File or nil won't be nil + return nil, e + } + return f, e +} + +func (OsFs) Remove(name string) error { + return os.Remove(name) +} + +func (OsFs) RemoveAll(path string) error { + return os.RemoveAll(path) +} + +func (OsFs) Rename(oldname, newname string) error { + return os.Rename(oldname, newname) +} + +func (OsFs) Stat(name string) (os.FileInfo, error) { + return os.Stat(name) +} + +func (OsFs) Chmod(name string, mode os.FileMode) error { + return os.Chmod(name, mode) +} + +func (OsFs) Chown(name string, uid, gid int) error { + return os.Chown(name, uid, gid) +} + +func (OsFs) Chtimes(name string, atime time.Time, mtime time.Time) error { + return os.Chtimes(name, atime, mtime) +} + +func (OsFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + fi, err := os.Lstat(name) + return fi, true, err +} + +func (OsFs) SymlinkIfPossible(oldname, newname string) error { + return os.Symlink(oldname, newname) +} + +func (OsFs) ReadlinkIfPossible(name string) (string, error) { + return os.Readlink(name) +} diff --git a/vendor/github.com/spf13/afero/path.go b/vendor/github.com/spf13/afero/path.go new file mode 100644 index 000000000..18f60a0f6 --- /dev/null +++ b/vendor/github.com/spf13/afero/path.go @@ -0,0 +1,106 @@ +// Copyright ©2015 The Go Authors +// Copyright ©2015 Steve Francia +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "os" + "path/filepath" + "sort" +) + +// readDirNames reads the directory named by dirname and returns +// a sorted list of directory entries. +// adapted from https://golang.org/src/path/filepath/path.go +func readDirNames(fs Fs, dirname string) ([]string, error) { + f, err := fs.Open(dirname) + if err != nil { + return nil, err + } + names, err := f.Readdirnames(-1) + f.Close() + if err != nil { + return nil, err + } + sort.Strings(names) + return names, nil +} + +// walk recursively descends path, calling walkFn +// adapted from https://golang.org/src/path/filepath/path.go +func walk(fs Fs, path string, info os.FileInfo, walkFn filepath.WalkFunc) error { + err := walkFn(path, info, nil) + if err != nil { + if info.IsDir() && err == filepath.SkipDir { + return nil + } + return err + } + + if !info.IsDir() { + return nil + } + + names, err := readDirNames(fs, path) + if err != nil { + return walkFn(path, info, err) + } + + for _, name := range names { + filename := filepath.Join(path, name) + fileInfo, err := lstatIfPossible(fs, filename) + if err != nil { + if err := walkFn(filename, fileInfo, err); err != nil && err != filepath.SkipDir { + return err + } + } else { + err = walk(fs, filename, fileInfo, walkFn) + if err != nil { + if !fileInfo.IsDir() || err != filepath.SkipDir { + return err + } + } + } + } + return nil +} + +// if the filesystem supports it, use Lstat, else use fs.Stat +func lstatIfPossible(fs Fs, path string) (os.FileInfo, error) { + if lfs, ok := fs.(Lstater); ok { + fi, _, err := lfs.LstatIfPossible(path) + return fi, err + } + return fs.Stat(path) +} + +// Walk walks the file tree rooted at root, calling walkFn for each file or +// directory in the tree, including root. All errors that arise visiting files +// and directories are filtered by walkFn. The files are walked in lexical +// order, which makes the output deterministic but means that for very +// large directories Walk can be inefficient. +// Walk does not follow symbolic links. + +func (a Afero) Walk(root string, walkFn filepath.WalkFunc) error { + return Walk(a.Fs, root, walkFn) +} + +func Walk(fs Fs, root string, walkFn filepath.WalkFunc) error { + info, err := lstatIfPossible(fs, root) + if err != nil { + return walkFn(root, nil, err) + } + return walk(fs, root, info, walkFn) +} diff --git a/vendor/github.com/spf13/afero/readonlyfs.go b/vendor/github.com/spf13/afero/readonlyfs.go new file mode 100644 index 000000000..bd8f9264d --- /dev/null +++ b/vendor/github.com/spf13/afero/readonlyfs.go @@ -0,0 +1,96 @@ +package afero + +import ( + "os" + "syscall" + "time" +) + +var _ Lstater = (*ReadOnlyFs)(nil) + +type ReadOnlyFs struct { + source Fs +} + +func NewReadOnlyFs(source Fs) Fs { + return &ReadOnlyFs{source: source} +} + +func (r *ReadOnlyFs) ReadDir(name string) ([]os.FileInfo, error) { + return ReadDir(r.source, name) +} + +func (r *ReadOnlyFs) Chtimes(n string, a, m time.Time) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Chmod(n string, m os.FileMode) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Chown(n string, uid, gid int) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Name() string { + return "ReadOnlyFilter" +} + +func (r *ReadOnlyFs) Stat(name string) (os.FileInfo, error) { + return r.source.Stat(name) +} + +func (r *ReadOnlyFs) LstatIfPossible(name string) (os.FileInfo, bool, error) { + if lsf, ok := r.source.(Lstater); ok { + return lsf.LstatIfPossible(name) + } + fi, err := r.Stat(name) + return fi, false, err +} + +func (r *ReadOnlyFs) SymlinkIfPossible(oldname, newname string) error { + return &os.LinkError{Op: "symlink", Old: oldname, New: newname, Err: ErrNoSymlink} +} + +func (r *ReadOnlyFs) ReadlinkIfPossible(name string) (string, error) { + if srdr, ok := r.source.(LinkReader); ok { + return srdr.ReadlinkIfPossible(name) + } + + return "", &os.PathError{Op: "readlink", Path: name, Err: ErrNoReadlink} +} + +func (r *ReadOnlyFs) Rename(o, n string) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) RemoveAll(p string) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Remove(n string) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + if flag&(os.O_WRONLY|syscall.O_RDWR|os.O_APPEND|os.O_CREATE|os.O_TRUNC) != 0 { + return nil, syscall.EPERM + } + return r.source.OpenFile(name, flag, perm) +} + +func (r *ReadOnlyFs) Open(n string) (File, error) { + return r.source.Open(n) +} + +func (r *ReadOnlyFs) Mkdir(n string, p os.FileMode) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) MkdirAll(n string, p os.FileMode) error { + return syscall.EPERM +} + +func (r *ReadOnlyFs) Create(n string) (File, error) { + return nil, syscall.EPERM +} diff --git a/vendor/github.com/spf13/afero/regexpfs.go b/vendor/github.com/spf13/afero/regexpfs.go new file mode 100644 index 000000000..ac359c62a --- /dev/null +++ b/vendor/github.com/spf13/afero/regexpfs.go @@ -0,0 +1,224 @@ +package afero + +import ( + "os" + "regexp" + "syscall" + "time" +) + +// The RegexpFs filters files (not directories) by regular expression. Only +// files matching the given regexp will be allowed, all others get a ENOENT error ( +// "No such file or directory"). +// +type RegexpFs struct { + re *regexp.Regexp + source Fs +} + +func NewRegexpFs(source Fs, re *regexp.Regexp) Fs { + return &RegexpFs{source: source, re: re} +} + +type RegexpFile struct { + f File + re *regexp.Regexp +} + +func (r *RegexpFs) matchesName(name string) error { + if r.re == nil { + return nil + } + if r.re.MatchString(name) { + return nil + } + return syscall.ENOENT +} + +func (r *RegexpFs) dirOrMatches(name string) error { + dir, err := IsDir(r.source, name) + if err != nil { + return err + } + if dir { + return nil + } + return r.matchesName(name) +} + +func (r *RegexpFs) Chtimes(name string, a, m time.Time) error { + if err := r.dirOrMatches(name); err != nil { + return err + } + return r.source.Chtimes(name, a, m) +} + +func (r *RegexpFs) Chmod(name string, mode os.FileMode) error { + if err := r.dirOrMatches(name); err != nil { + return err + } + return r.source.Chmod(name, mode) +} + +func (r *RegexpFs) Chown(name string, uid, gid int) error { + if err := r.dirOrMatches(name); err != nil { + return err + } + return r.source.Chown(name, uid, gid) +} + +func (r *RegexpFs) Name() string { + return "RegexpFs" +} + +func (r *RegexpFs) Stat(name string) (os.FileInfo, error) { + if err := r.dirOrMatches(name); err != nil { + return nil, err + } + return r.source.Stat(name) +} + +func (r *RegexpFs) Rename(oldname, newname string) error { + dir, err := IsDir(r.source, oldname) + if err != nil { + return err + } + if dir { + return nil + } + if err := r.matchesName(oldname); err != nil { + return err + } + if err := r.matchesName(newname); err != nil { + return err + } + return r.source.Rename(oldname, newname) +} + +func (r *RegexpFs) RemoveAll(p string) error { + dir, err := IsDir(r.source, p) + if err != nil { + return err + } + if !dir { + if err := r.matchesName(p); err != nil { + return err + } + } + return r.source.RemoveAll(p) +} + +func (r *RegexpFs) Remove(name string) error { + if err := r.dirOrMatches(name); err != nil { + return err + } + return r.source.Remove(name) +} + +func (r *RegexpFs) OpenFile(name string, flag int, perm os.FileMode) (File, error) { + if err := r.dirOrMatches(name); err != nil { + return nil, err + } + return r.source.OpenFile(name, flag, perm) +} + +func (r *RegexpFs) Open(name string) (File, error) { + dir, err := IsDir(r.source, name) + if err != nil { + return nil, err + } + if !dir { + if err := r.matchesName(name); err != nil { + return nil, err + } + } + f, err := r.source.Open(name) + if err != nil { + return nil, err + } + return &RegexpFile{f: f, re: r.re}, nil +} + +func (r *RegexpFs) Mkdir(n string, p os.FileMode) error { + return r.source.Mkdir(n, p) +} + +func (r *RegexpFs) MkdirAll(n string, p os.FileMode) error { + return r.source.MkdirAll(n, p) +} + +func (r *RegexpFs) Create(name string) (File, error) { + if err := r.matchesName(name); err != nil { + return nil, err + } + return r.source.Create(name) +} + +func (f *RegexpFile) Close() error { + return f.f.Close() +} + +func (f *RegexpFile) Read(s []byte) (int, error) { + return f.f.Read(s) +} + +func (f *RegexpFile) ReadAt(s []byte, o int64) (int, error) { + return f.f.ReadAt(s, o) +} + +func (f *RegexpFile) Seek(o int64, w int) (int64, error) { + return f.f.Seek(o, w) +} + +func (f *RegexpFile) Write(s []byte) (int, error) { + return f.f.Write(s) +} + +func (f *RegexpFile) WriteAt(s []byte, o int64) (int, error) { + return f.f.WriteAt(s, o) +} + +func (f *RegexpFile) Name() string { + return f.f.Name() +} + +func (f *RegexpFile) Readdir(c int) (fi []os.FileInfo, err error) { + var rfi []os.FileInfo + rfi, err = f.f.Readdir(c) + if err != nil { + return nil, err + } + for _, i := range rfi { + if i.IsDir() || f.re.MatchString(i.Name()) { + fi = append(fi, i) + } + } + return fi, nil +} + +func (f *RegexpFile) Readdirnames(c int) (n []string, err error) { + fi, err := f.Readdir(c) + if err != nil { + return nil, err + } + for _, s := range fi { + n = append(n, s.Name()) + } + return n, nil +} + +func (f *RegexpFile) Stat() (os.FileInfo, error) { + return f.f.Stat() +} + +func (f *RegexpFile) Sync() error { + return f.f.Sync() +} + +func (f *RegexpFile) Truncate(s int64) error { + return f.f.Truncate(s) +} + +func (f *RegexpFile) WriteString(s string) (int, error) { + return f.f.WriteString(s) +} diff --git a/vendor/github.com/spf13/afero/symlink.go b/vendor/github.com/spf13/afero/symlink.go new file mode 100644 index 000000000..d1c6ea53d --- /dev/null +++ b/vendor/github.com/spf13/afero/symlink.go @@ -0,0 +1,55 @@ +// Copyright © 2018 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "errors" +) + +// Symlinker is an optional interface in Afero. It is only implemented by the +// filesystems saying so. +// It indicates support for 3 symlink related interfaces that implement the +// behaviors of the os methods: +// - Lstat +// - Symlink, and +// - Readlink +type Symlinker interface { + Lstater + Linker + LinkReader +} + +// Linker is an optional interface in Afero. It is only implemented by the +// filesystems saying so. +// It will call Symlink if the filesystem itself is, or it delegates to, the os filesystem, +// or the filesystem otherwise supports Symlink's. +type Linker interface { + SymlinkIfPossible(oldname, newname string) error +} + +// ErrNoSymlink is the error that will be wrapped in an os.LinkError if a file system +// does not support Symlink's either directly or through its delegated filesystem. +// As expressed by support for the Linker interface. +var ErrNoSymlink = errors.New("symlink not supported") + +// LinkReader is an optional interface in Afero. It is only implemented by the +// filesystems saying so. +type LinkReader interface { + ReadlinkIfPossible(name string) (string, error) +} + +// ErrNoReadlink is the error that will be wrapped in an os.Path if a file system +// does not support the readlink operation either directly or through its delegated filesystem. +// As expressed by support for the LinkReader interface. +var ErrNoReadlink = errors.New("readlink not supported") diff --git a/vendor/github.com/spf13/afero/unionFile.go b/vendor/github.com/spf13/afero/unionFile.go new file mode 100644 index 000000000..985363eea --- /dev/null +++ b/vendor/github.com/spf13/afero/unionFile.go @@ -0,0 +1,317 @@ +package afero + +import ( + "io" + "os" + "path/filepath" + "syscall" +) + +// The UnionFile implements the afero.File interface and will be returned +// when reading a directory present at least in the overlay or opening a file +// for writing. +// +// The calls to +// Readdir() and Readdirnames() merge the file os.FileInfo / names from the +// base and the overlay - for files present in both layers, only those +// from the overlay will be used. +// +// When opening files for writing (Create() / OpenFile() with the right flags) +// the operations will be done in both layers, starting with the overlay. A +// successful read in the overlay will move the cursor position in the base layer +// by the number of bytes read. +type UnionFile struct { + Base File + Layer File + Merger DirsMerger + off int + files []os.FileInfo +} + +func (f *UnionFile) Close() error { + // first close base, so we have a newer timestamp in the overlay. If we'd close + // the overlay first, we'd get a cacheStale the next time we access this file + // -> cache would be useless ;-) + if f.Base != nil { + f.Base.Close() + } + if f.Layer != nil { + return f.Layer.Close() + } + return BADFD +} + +func (f *UnionFile) Read(s []byte) (int, error) { + if f.Layer != nil { + n, err := f.Layer.Read(s) + if (err == nil || err == io.EOF) && f.Base != nil { + // advance the file position also in the base file, the next + // call may be a write at this position (or a seek with SEEK_CUR) + if _, seekErr := f.Base.Seek(int64(n), os.SEEK_CUR); seekErr != nil { + // only overwrite err in case the seek fails: we need to + // report an eventual io.EOF to the caller + err = seekErr + } + } + return n, err + } + if f.Base != nil { + return f.Base.Read(s) + } + return 0, BADFD +} + +func (f *UnionFile) ReadAt(s []byte, o int64) (int, error) { + if f.Layer != nil { + n, err := f.Layer.ReadAt(s, o) + if (err == nil || err == io.EOF) && f.Base != nil { + _, err = f.Base.Seek(o+int64(n), os.SEEK_SET) + } + return n, err + } + if f.Base != nil { + return f.Base.ReadAt(s, o) + } + return 0, BADFD +} + +func (f *UnionFile) Seek(o int64, w int) (pos int64, err error) { + if f.Layer != nil { + pos, err = f.Layer.Seek(o, w) + if (err == nil || err == io.EOF) && f.Base != nil { + _, err = f.Base.Seek(o, w) + } + return pos, err + } + if f.Base != nil { + return f.Base.Seek(o, w) + } + return 0, BADFD +} + +func (f *UnionFile) Write(s []byte) (n int, err error) { + if f.Layer != nil { + n, err = f.Layer.Write(s) + if err == nil && f.Base != nil { // hmm, do we have fixed size files where a write may hit the EOF mark? + _, err = f.Base.Write(s) + } + return n, err + } + if f.Base != nil { + return f.Base.Write(s) + } + return 0, BADFD +} + +func (f *UnionFile) WriteAt(s []byte, o int64) (n int, err error) { + if f.Layer != nil { + n, err = f.Layer.WriteAt(s, o) + if err == nil && f.Base != nil { + _, err = f.Base.WriteAt(s, o) + } + return n, err + } + if f.Base != nil { + return f.Base.WriteAt(s, o) + } + return 0, BADFD +} + +func (f *UnionFile) Name() string { + if f.Layer != nil { + return f.Layer.Name() + } + return f.Base.Name() +} + +// DirsMerger is how UnionFile weaves two directories together. +// It takes the FileInfo slices from the layer and the base and returns a +// single view. +type DirsMerger func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) + +var defaultUnionMergeDirsFn = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) { + var files = make(map[string]os.FileInfo) + + for _, fi := range lofi { + files[fi.Name()] = fi + } + + for _, fi := range bofi { + if _, exists := files[fi.Name()]; !exists { + files[fi.Name()] = fi + } + } + + rfi := make([]os.FileInfo, len(files)) + + i := 0 + for _, fi := range files { + rfi[i] = fi + i++ + } + + return rfi, nil + +} + +// Readdir will weave the two directories together and +// return a single view of the overlayed directories. +// At the end of the directory view, the error is io.EOF if c > 0. +func (f *UnionFile) Readdir(c int) (ofi []os.FileInfo, err error) { + var merge DirsMerger = f.Merger + if merge == nil { + merge = defaultUnionMergeDirsFn + } + + if f.off == 0 { + var lfi []os.FileInfo + if f.Layer != nil { + lfi, err = f.Layer.Readdir(-1) + if err != nil { + return nil, err + } + } + + var bfi []os.FileInfo + if f.Base != nil { + bfi, err = f.Base.Readdir(-1) + if err != nil { + return nil, err + } + + } + merged, err := merge(lfi, bfi) + if err != nil { + return nil, err + } + f.files = append(f.files, merged...) + } + files := f.files[f.off:] + + if c <= 0 { + return files, nil + } + + if len(files) == 0 { + return nil, io.EOF + } + + if c > len(files) { + c = len(files) + } + + defer func() { f.off += c }() + return files[:c], nil +} + +func (f *UnionFile) Readdirnames(c int) ([]string, error) { + rfi, err := f.Readdir(c) + if err != nil { + return nil, err + } + var names []string + for _, fi := range rfi { + names = append(names, fi.Name()) + } + return names, nil +} + +func (f *UnionFile) Stat() (os.FileInfo, error) { + if f.Layer != nil { + return f.Layer.Stat() + } + if f.Base != nil { + return f.Base.Stat() + } + return nil, BADFD +} + +func (f *UnionFile) Sync() (err error) { + if f.Layer != nil { + err = f.Layer.Sync() + if err == nil && f.Base != nil { + err = f.Base.Sync() + } + return err + } + if f.Base != nil { + return f.Base.Sync() + } + return BADFD +} + +func (f *UnionFile) Truncate(s int64) (err error) { + if f.Layer != nil { + err = f.Layer.Truncate(s) + if err == nil && f.Base != nil { + err = f.Base.Truncate(s) + } + return err + } + if f.Base != nil { + return f.Base.Truncate(s) + } + return BADFD +} + +func (f *UnionFile) WriteString(s string) (n int, err error) { + if f.Layer != nil { + n, err = f.Layer.WriteString(s) + if err == nil && f.Base != nil { + _, err = f.Base.WriteString(s) + } + return n, err + } + if f.Base != nil { + return f.Base.WriteString(s) + } + return 0, BADFD +} + +func copyToLayer(base Fs, layer Fs, name string) error { + bfh, err := base.Open(name) + if err != nil { + return err + } + defer bfh.Close() + + // First make sure the directory exists + exists, err := Exists(layer, filepath.Dir(name)) + if err != nil { + return err + } + if !exists { + err = layer.MkdirAll(filepath.Dir(name), 0777) // FIXME? + if err != nil { + return err + } + } + + // Create the file on the overlay + lfh, err := layer.Create(name) + if err != nil { + return err + } + n, err := io.Copy(lfh, bfh) + if err != nil { + // If anything fails, clean up the file + layer.Remove(name) + lfh.Close() + return err + } + + bfi, err := bfh.Stat() + if err != nil || bfi.Size() != n { + layer.Remove(name) + lfh.Close() + return syscall.EIO + } + + err = lfh.Close() + if err != nil { + layer.Remove(name) + lfh.Close() + return err + } + return layer.Chtimes(name, bfi.ModTime(), bfi.ModTime()) +} diff --git a/vendor/github.com/spf13/afero/util.go b/vendor/github.com/spf13/afero/util.go new file mode 100644 index 000000000..4f253f481 --- /dev/null +++ b/vendor/github.com/spf13/afero/util.go @@ -0,0 +1,330 @@ +// Copyright ©2015 Steve Francia +// Portions Copyright ©2015 The Hugo Authors +// Portions Copyright 2016-present Bjørn Erik Pedersen +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package afero + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "strings" + "unicode" + + "golang.org/x/text/transform" + "golang.org/x/text/unicode/norm" +) + +// Filepath separator defined by os.Separator. +const FilePathSeparator = string(filepath.Separator) + +// Takes a reader and a path and writes the content +func (a Afero) WriteReader(path string, r io.Reader) (err error) { + return WriteReader(a.Fs, path, r) +} + +func WriteReader(fs Fs, path string, r io.Reader) (err error) { + dir, _ := filepath.Split(path) + ospath := filepath.FromSlash(dir) + + if ospath != "" { + err = fs.MkdirAll(ospath, 0777) // rwx, rw, r + if err != nil { + if err != os.ErrExist { + return err + } + } + } + + file, err := fs.Create(path) + if err != nil { + return + } + defer file.Close() + + _, err = io.Copy(file, r) + return +} + +// Same as WriteReader but checks to see if file/directory already exists. +func (a Afero) SafeWriteReader(path string, r io.Reader) (err error) { + return SafeWriteReader(a.Fs, path, r) +} + +func SafeWriteReader(fs Fs, path string, r io.Reader) (err error) { + dir, _ := filepath.Split(path) + ospath := filepath.FromSlash(dir) + + if ospath != "" { + err = fs.MkdirAll(ospath, 0777) // rwx, rw, r + if err != nil { + return + } + } + + exists, err := Exists(fs, path) + if err != nil { + return + } + if exists { + return fmt.Errorf("%v already exists", path) + } + + file, err := fs.Create(path) + if err != nil { + return + } + defer file.Close() + + _, err = io.Copy(file, r) + return +} + +func (a Afero) GetTempDir(subPath string) string { + return GetTempDir(a.Fs, subPath) +} + +// GetTempDir returns the default temp directory with trailing slash +// if subPath is not empty then it will be created recursively with mode 777 rwx rwx rwx +func GetTempDir(fs Fs, subPath string) string { + addSlash := func(p string) string { + if FilePathSeparator != p[len(p)-1:] { + p = p + FilePathSeparator + } + return p + } + dir := addSlash(os.TempDir()) + + if subPath != "" { + // preserve windows backslash :-( + if FilePathSeparator == "\\" { + subPath = strings.Replace(subPath, "\\", "____", -1) + } + dir = dir + UnicodeSanitize((subPath)) + if FilePathSeparator == "\\" { + dir = strings.Replace(dir, "____", "\\", -1) + } + + if exists, _ := Exists(fs, dir); exists { + return addSlash(dir) + } + + err := fs.MkdirAll(dir, 0777) + if err != nil { + panic(err) + } + dir = addSlash(dir) + } + return dir +} + +// Rewrite string to remove non-standard path characters +func UnicodeSanitize(s string) string { + source := []rune(s) + target := make([]rune, 0, len(source)) + + for _, r := range source { + if unicode.IsLetter(r) || + unicode.IsDigit(r) || + unicode.IsMark(r) || + r == '.' || + r == '/' || + r == '\\' || + r == '_' || + r == '-' || + r == '%' || + r == ' ' || + r == '#' { + target = append(target, r) + } + } + + return string(target) +} + +// Transform characters with accents into plain forms. +func NeuterAccents(s string) string { + t := transform.Chain(norm.NFD, transform.RemoveFunc(isMn), norm.NFC) + result, _, _ := transform.String(t, string(s)) + + return result +} + +func isMn(r rune) bool { + return unicode.Is(unicode.Mn, r) // Mn: nonspacing marks +} + +func (a Afero) FileContainsBytes(filename string, subslice []byte) (bool, error) { + return FileContainsBytes(a.Fs, filename, subslice) +} + +// Check if a file contains a specified byte slice. +func FileContainsBytes(fs Fs, filename string, subslice []byte) (bool, error) { + f, err := fs.Open(filename) + if err != nil { + return false, err + } + defer f.Close() + + return readerContainsAny(f, subslice), nil +} + +func (a Afero) FileContainsAnyBytes(filename string, subslices [][]byte) (bool, error) { + return FileContainsAnyBytes(a.Fs, filename, subslices) +} + +// Check if a file contains any of the specified byte slices. +func FileContainsAnyBytes(fs Fs, filename string, subslices [][]byte) (bool, error) { + f, err := fs.Open(filename) + if err != nil { + return false, err + } + defer f.Close() + + return readerContainsAny(f, subslices...), nil +} + +// readerContains reports whether any of the subslices is within r. +func readerContainsAny(r io.Reader, subslices ...[]byte) bool { + + if r == nil || len(subslices) == 0 { + return false + } + + largestSlice := 0 + + for _, sl := range subslices { + if len(sl) > largestSlice { + largestSlice = len(sl) + } + } + + if largestSlice == 0 { + return false + } + + bufflen := largestSlice * 4 + halflen := bufflen / 2 + buff := make([]byte, bufflen) + var err error + var n, i int + + for { + i++ + if i == 1 { + n, err = io.ReadAtLeast(r, buff[:halflen], halflen) + } else { + if i != 2 { + // shift left to catch overlapping matches + copy(buff[:], buff[halflen:]) + } + n, err = io.ReadAtLeast(r, buff[halflen:], halflen) + } + + if n > 0 { + for _, sl := range subslices { + if bytes.Contains(buff, sl) { + return true + } + } + } + + if err != nil { + break + } + } + return false +} + +func (a Afero) DirExists(path string) (bool, error) { + return DirExists(a.Fs, path) +} + +// DirExists checks if a path exists and is a directory. +func DirExists(fs Fs, path string) (bool, error) { + fi, err := fs.Stat(path) + if err == nil && fi.IsDir() { + return true, nil + } + if os.IsNotExist(err) { + return false, nil + } + return false, err +} + +func (a Afero) IsDir(path string) (bool, error) { + return IsDir(a.Fs, path) +} + +// IsDir checks if a given path is a directory. +func IsDir(fs Fs, path string) (bool, error) { + fi, err := fs.Stat(path) + if err != nil { + return false, err + } + return fi.IsDir(), nil +} + +func (a Afero) IsEmpty(path string) (bool, error) { + return IsEmpty(a.Fs, path) +} + +// IsEmpty checks if a given file or directory is empty. +func IsEmpty(fs Fs, path string) (bool, error) { + if b, _ := Exists(fs, path); !b { + return false, fmt.Errorf("%q path does not exist", path) + } + fi, err := fs.Stat(path) + if err != nil { + return false, err + } + if fi.IsDir() { + f, err := fs.Open(path) + if err != nil { + return false, err + } + defer f.Close() + list, err := f.Readdir(-1) + return len(list) == 0, nil + } + return fi.Size() == 0, nil +} + +func (a Afero) Exists(path string) (bool, error) { + return Exists(a.Fs, path) +} + +// Check if a file or directory exists. +func Exists(fs Fs, path string) (bool, error) { + _, err := fs.Stat(path) + if err == nil { + return true, nil + } + if os.IsNotExist(err) { + return false, nil + } + return false, err +} + +func FullBaseFsPath(basePathFs *BasePathFs, relativePath string) string { + combinedPath := filepath.Join(basePathFs.path, relativePath) + if parent, ok := basePathFs.source.(*BasePathFs); ok { + return FullBaseFsPath(parent, combinedPath) + } + + return combinedPath +} diff --git a/vendor/github.com/spf13/cast/.gitignore b/vendor/github.com/spf13/cast/.gitignore new file mode 100644 index 000000000..53053a8ac --- /dev/null +++ b/vendor/github.com/spf13/cast/.gitignore @@ -0,0 +1,25 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.test + +*.bench diff --git a/vendor/github.com/spf13/cast/.travis.yml b/vendor/github.com/spf13/cast/.travis.yml new file mode 100644 index 000000000..833a48799 --- /dev/null +++ b/vendor/github.com/spf13/cast/.travis.yml @@ -0,0 +1,16 @@ +language: go +env: + - GO111MODULE=on +sudo: required +go: + - "1.11.x" + - "1.12.x" + - tip +os: + - linux +matrix: + allow_failures: + - go: tip + fast_finish: true +script: + - make check diff --git a/vendor/github.com/spf13/cast/LICENSE b/vendor/github.com/spf13/cast/LICENSE new file mode 100644 index 000000000..4527efb9c --- /dev/null +++ b/vendor/github.com/spf13/cast/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Steve Francia + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/spf13/cast/Makefile b/vendor/github.com/spf13/cast/Makefile new file mode 100644 index 000000000..f01a5dbb6 --- /dev/null +++ b/vendor/github.com/spf13/cast/Makefile @@ -0,0 +1,40 @@ +GOVERSION := $(shell go version | cut -d ' ' -f 3 | cut -d '.' -f 2) + +.PHONY: check fmt lint test test-race vet test-cover-html help +.DEFAULT_GOAL := help + +check: test-race fmt vet lint ## Run tests and linters + +test: ## Run tests + go test ./... + +test-race: ## Run tests with race detector + go test -race ./... + +fmt: ## Run gofmt linter +ifeq "$(GOVERSION)" "12" + @for d in `go list` ; do \ + if [ "`gofmt -l -s $$GOPATH/src/$$d | tee /dev/stderr`" ]; then \ + echo "^ improperly formatted go files" && echo && exit 1; \ + fi \ + done +endif + +lint: ## Run golint linter + @for d in `go list` ; do \ + if [ "`golint $$d | tee /dev/stderr`" ]; then \ + echo "^ golint errors!" && echo && exit 1; \ + fi \ + done + +vet: ## Run go vet linter + @if [ "`go vet | tee /dev/stderr`" ]; then \ + echo "^ go vet errors!" && echo && exit 1; \ + fi + +test-cover-html: ## Generate test coverage report + go test -coverprofile=coverage.out -covermode=count + go tool cover -func=coverage.out + +help: + @grep -E '^[a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/vendor/github.com/spf13/cast/README.md b/vendor/github.com/spf13/cast/README.md new file mode 100644 index 000000000..e6939397d --- /dev/null +++ b/vendor/github.com/spf13/cast/README.md @@ -0,0 +1,75 @@ +cast +==== +[![GoDoc](https://godoc.org/github.com/spf13/cast?status.svg)](https://godoc.org/github.com/spf13/cast) +[![Build Status](https://api.travis-ci.org/spf13/cast.svg?branch=master)](https://travis-ci.org/spf13/cast) +[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/cast)](https://goreportcard.com/report/github.com/spf13/cast) + +Easy and safe casting from one type to another in Go + +Don’t Panic! ... Cast + +## What is Cast? + +Cast is a library to convert between different go types in a consistent and easy way. + +Cast provides simple functions to easily convert a number to a string, an +interface into a bool, etc. Cast does this intelligently when an obvious +conversion is possible. It doesn’t make any attempts to guess what you meant, +for example you can only convert a string to an int when it is a string +representation of an int such as “8”. Cast was developed for use in +[Hugo](http://hugo.spf13.com), a website engine which uses YAML, TOML or JSON +for meta data. + +## Why use Cast? + +When working with dynamic data in Go you often need to cast or convert the data +from one type into another. Cast goes beyond just using type assertion (though +it uses that when possible) to provide a very straightforward and convenient +library. + +If you are working with interfaces to handle things like dynamic content +you’ll need an easy way to convert an interface into a given type. This +is the library for you. + +If you are taking in data from YAML, TOML or JSON or other formats which lack +full types, then Cast is the library for you. + +## Usage + +Cast provides a handful of To_____ methods. These methods will always return +the desired type. **If input is provided that will not convert to that type, the +0 or nil value for that type will be returned**. + +Cast also provides identical methods To_____E. These return the same result as +the To_____ methods, plus an additional error which tells you if it successfully +converted. Using these methods you can tell the difference between when the +input matched the zero value or when the conversion failed and the zero value +was returned. + +The following examples are merely a sample of what is available. Please review +the code for a complete set. + +### Example ‘ToString’: + + cast.ToString("mayonegg") // "mayonegg" + cast.ToString(8) // "8" + cast.ToString(8.31) // "8.31" + cast.ToString([]byte("one time")) // "one time" + cast.ToString(nil) // "" + + var foo interface{} = "one more time" + cast.ToString(foo) // "one more time" + + +### Example ‘ToInt’: + + cast.ToInt(8) // 8 + cast.ToInt(8.31) // 8 + cast.ToInt("8") // 8 + cast.ToInt(true) // 1 + cast.ToInt(false) // 0 + + var eight interface{} = 8 + cast.ToInt(eight) // 8 + cast.ToInt(nil) // 0 + diff --git a/vendor/github.com/spf13/cast/cast.go b/vendor/github.com/spf13/cast/cast.go new file mode 100644 index 000000000..9fba638d4 --- /dev/null +++ b/vendor/github.com/spf13/cast/cast.go @@ -0,0 +1,171 @@ +// Copyright © 2014 Steve Francia . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +// Package cast provides easy and safe casting in Go. +package cast + +import "time" + +// ToBool casts an interface to a bool type. +func ToBool(i interface{}) bool { + v, _ := ToBoolE(i) + return v +} + +// ToTime casts an interface to a time.Time type. +func ToTime(i interface{}) time.Time { + v, _ := ToTimeE(i) + return v +} + +// ToDuration casts an interface to a time.Duration type. +func ToDuration(i interface{}) time.Duration { + v, _ := ToDurationE(i) + return v +} + +// ToFloat64 casts an interface to a float64 type. +func ToFloat64(i interface{}) float64 { + v, _ := ToFloat64E(i) + return v +} + +// ToFloat32 casts an interface to a float32 type. +func ToFloat32(i interface{}) float32 { + v, _ := ToFloat32E(i) + return v +} + +// ToInt64 casts an interface to an int64 type. +func ToInt64(i interface{}) int64 { + v, _ := ToInt64E(i) + return v +} + +// ToInt32 casts an interface to an int32 type. +func ToInt32(i interface{}) int32 { + v, _ := ToInt32E(i) + return v +} + +// ToInt16 casts an interface to an int16 type. +func ToInt16(i interface{}) int16 { + v, _ := ToInt16E(i) + return v +} + +// ToInt8 casts an interface to an int8 type. +func ToInt8(i interface{}) int8 { + v, _ := ToInt8E(i) + return v +} + +// ToInt casts an interface to an int type. +func ToInt(i interface{}) int { + v, _ := ToIntE(i) + return v +} + +// ToUint casts an interface to a uint type. +func ToUint(i interface{}) uint { + v, _ := ToUintE(i) + return v +} + +// ToUint64 casts an interface to a uint64 type. +func ToUint64(i interface{}) uint64 { + v, _ := ToUint64E(i) + return v +} + +// ToUint32 casts an interface to a uint32 type. +func ToUint32(i interface{}) uint32 { + v, _ := ToUint32E(i) + return v +} + +// ToUint16 casts an interface to a uint16 type. +func ToUint16(i interface{}) uint16 { + v, _ := ToUint16E(i) + return v +} + +// ToUint8 casts an interface to a uint8 type. +func ToUint8(i interface{}) uint8 { + v, _ := ToUint8E(i) + return v +} + +// ToString casts an interface to a string type. +func ToString(i interface{}) string { + v, _ := ToStringE(i) + return v +} + +// ToStringMapString casts an interface to a map[string]string type. +func ToStringMapString(i interface{}) map[string]string { + v, _ := ToStringMapStringE(i) + return v +} + +// ToStringMapStringSlice casts an interface to a map[string][]string type. +func ToStringMapStringSlice(i interface{}) map[string][]string { + v, _ := ToStringMapStringSliceE(i) + return v +} + +// ToStringMapBool casts an interface to a map[string]bool type. +func ToStringMapBool(i interface{}) map[string]bool { + v, _ := ToStringMapBoolE(i) + return v +} + +// ToStringMapInt casts an interface to a map[string]int type. +func ToStringMapInt(i interface{}) map[string]int { + v, _ := ToStringMapIntE(i) + return v +} + +// ToStringMapInt64 casts an interface to a map[string]int64 type. +func ToStringMapInt64(i interface{}) map[string]int64 { + v, _ := ToStringMapInt64E(i) + return v +} + +// ToStringMap casts an interface to a map[string]interface{} type. +func ToStringMap(i interface{}) map[string]interface{} { + v, _ := ToStringMapE(i) + return v +} + +// ToSlice casts an interface to a []interface{} type. +func ToSlice(i interface{}) []interface{} { + v, _ := ToSliceE(i) + return v +} + +// ToBoolSlice casts an interface to a []bool type. +func ToBoolSlice(i interface{}) []bool { + v, _ := ToBoolSliceE(i) + return v +} + +// ToStringSlice casts an interface to a []string type. +func ToStringSlice(i interface{}) []string { + v, _ := ToStringSliceE(i) + return v +} + +// ToIntSlice casts an interface to a []int type. +func ToIntSlice(i interface{}) []int { + v, _ := ToIntSliceE(i) + return v +} + +// ToDurationSlice casts an interface to a []time.Duration type. +func ToDurationSlice(i interface{}) []time.Duration { + v, _ := ToDurationSliceE(i) + return v +} diff --git a/vendor/github.com/spf13/cast/caste.go b/vendor/github.com/spf13/cast/caste.go new file mode 100644 index 000000000..70c7291be --- /dev/null +++ b/vendor/github.com/spf13/cast/caste.go @@ -0,0 +1,1249 @@ +// Copyright © 2014 Steve Francia . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +package cast + +import ( + "encoding/json" + "errors" + "fmt" + "html/template" + "reflect" + "strconv" + "strings" + "time" +) + +var errNegativeNotAllowed = errors.New("unable to cast negative value") + +// ToTimeE casts an interface to a time.Time type. +func ToTimeE(i interface{}) (tim time.Time, err error) { + i = indirect(i) + + switch v := i.(type) { + case time.Time: + return v, nil + case string: + return StringToDate(v) + case int: + return time.Unix(int64(v), 0), nil + case int64: + return time.Unix(v, 0), nil + case int32: + return time.Unix(int64(v), 0), nil + case uint: + return time.Unix(int64(v), 0), nil + case uint64: + return time.Unix(int64(v), 0), nil + case uint32: + return time.Unix(int64(v), 0), nil + default: + return time.Time{}, fmt.Errorf("unable to cast %#v of type %T to Time", i, i) + } +} + +// ToDurationE casts an interface to a time.Duration type. +func ToDurationE(i interface{}) (d time.Duration, err error) { + i = indirect(i) + + switch s := i.(type) { + case time.Duration: + return s, nil + case int, int64, int32, int16, int8, uint, uint64, uint32, uint16, uint8: + d = time.Duration(ToInt64(s)) + return + case float32, float64: + d = time.Duration(ToFloat64(s)) + return + case string: + if strings.ContainsAny(s, "nsuµmh") { + d, err = time.ParseDuration(s) + } else { + d, err = time.ParseDuration(s + "ns") + } + return + default: + err = fmt.Errorf("unable to cast %#v of type %T to Duration", i, i) + return + } +} + +// ToBoolE casts an interface to a bool type. +func ToBoolE(i interface{}) (bool, error) { + i = indirect(i) + + switch b := i.(type) { + case bool: + return b, nil + case nil: + return false, nil + case int: + if i.(int) != 0 { + return true, nil + } + return false, nil + case string: + return strconv.ParseBool(i.(string)) + default: + return false, fmt.Errorf("unable to cast %#v of type %T to bool", i, i) + } +} + +// ToFloat64E casts an interface to a float64 type. +func ToFloat64E(i interface{}) (float64, error) { + i = indirect(i) + + switch s := i.(type) { + case float64: + return s, nil + case float32: + return float64(s), nil + case int: + return float64(s), nil + case int64: + return float64(s), nil + case int32: + return float64(s), nil + case int16: + return float64(s), nil + case int8: + return float64(s), nil + case uint: + return float64(s), nil + case uint64: + return float64(s), nil + case uint32: + return float64(s), nil + case uint16: + return float64(s), nil + case uint8: + return float64(s), nil + case string: + v, err := strconv.ParseFloat(s, 64) + if err == nil { + return v, nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to float64", i, i) + case bool: + if s { + return 1, nil + } + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to float64", i, i) + } +} + +// ToFloat32E casts an interface to a float32 type. +func ToFloat32E(i interface{}) (float32, error) { + i = indirect(i) + + switch s := i.(type) { + case float64: + return float32(s), nil + case float32: + return s, nil + case int: + return float32(s), nil + case int64: + return float32(s), nil + case int32: + return float32(s), nil + case int16: + return float32(s), nil + case int8: + return float32(s), nil + case uint: + return float32(s), nil + case uint64: + return float32(s), nil + case uint32: + return float32(s), nil + case uint16: + return float32(s), nil + case uint8: + return float32(s), nil + case string: + v, err := strconv.ParseFloat(s, 32) + if err == nil { + return float32(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to float32", i, i) + case bool: + if s { + return 1, nil + } + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to float32", i, i) + } +} + +// ToInt64E casts an interface to an int64 type. +func ToInt64E(i interface{}) (int64, error) { + i = indirect(i) + + switch s := i.(type) { + case int: + return int64(s), nil + case int64: + return s, nil + case int32: + return int64(s), nil + case int16: + return int64(s), nil + case int8: + return int64(s), nil + case uint: + return int64(s), nil + case uint64: + return int64(s), nil + case uint32: + return int64(s), nil + case uint16: + return int64(s), nil + case uint8: + return int64(s), nil + case float64: + return int64(s), nil + case float32: + return int64(s), nil + case string: + v, err := strconv.ParseInt(s, 0, 0) + if err == nil { + return v, nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int64", i, i) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int64", i, i) + } +} + +// ToInt32E casts an interface to an int32 type. +func ToInt32E(i interface{}) (int32, error) { + i = indirect(i) + + switch s := i.(type) { + case int: + return int32(s), nil + case int64: + return int32(s), nil + case int32: + return s, nil + case int16: + return int32(s), nil + case int8: + return int32(s), nil + case uint: + return int32(s), nil + case uint64: + return int32(s), nil + case uint32: + return int32(s), nil + case uint16: + return int32(s), nil + case uint8: + return int32(s), nil + case float64: + return int32(s), nil + case float32: + return int32(s), nil + case string: + v, err := strconv.ParseInt(s, 0, 0) + if err == nil { + return int32(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int32", i, i) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int32", i, i) + } +} + +// ToInt16E casts an interface to an int16 type. +func ToInt16E(i interface{}) (int16, error) { + i = indirect(i) + + switch s := i.(type) { + case int: + return int16(s), nil + case int64: + return int16(s), nil + case int32: + return int16(s), nil + case int16: + return s, nil + case int8: + return int16(s), nil + case uint: + return int16(s), nil + case uint64: + return int16(s), nil + case uint32: + return int16(s), nil + case uint16: + return int16(s), nil + case uint8: + return int16(s), nil + case float64: + return int16(s), nil + case float32: + return int16(s), nil + case string: + v, err := strconv.ParseInt(s, 0, 0) + if err == nil { + return int16(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int16", i, i) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int16", i, i) + } +} + +// ToInt8E casts an interface to an int8 type. +func ToInt8E(i interface{}) (int8, error) { + i = indirect(i) + + switch s := i.(type) { + case int: + return int8(s), nil + case int64: + return int8(s), nil + case int32: + return int8(s), nil + case int16: + return int8(s), nil + case int8: + return s, nil + case uint: + return int8(s), nil + case uint64: + return int8(s), nil + case uint32: + return int8(s), nil + case uint16: + return int8(s), nil + case uint8: + return int8(s), nil + case float64: + return int8(s), nil + case float32: + return int8(s), nil + case string: + v, err := strconv.ParseInt(s, 0, 0) + if err == nil { + return int8(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int8", i, i) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int8", i, i) + } +} + +// ToIntE casts an interface to an int type. +func ToIntE(i interface{}) (int, error) { + i = indirect(i) + + switch s := i.(type) { + case int: + return s, nil + case int64: + return int(s), nil + case int32: + return int(s), nil + case int16: + return int(s), nil + case int8: + return int(s), nil + case uint: + return int(s), nil + case uint64: + return int(s), nil + case uint32: + return int(s), nil + case uint16: + return int(s), nil + case uint8: + return int(s), nil + case float64: + return int(s), nil + case float32: + return int(s), nil + case string: + v, err := strconv.ParseInt(s, 0, 0) + if err == nil { + return int(v), nil + } + return 0, fmt.Errorf("unable to cast %#v of type %T to int", i, i) + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to int", i, i) + } +} + +// ToUintE casts an interface to a uint type. +func ToUintE(i interface{}) (uint, error) { + i = indirect(i) + + switch s := i.(type) { + case string: + v, err := strconv.ParseUint(s, 0, 0) + if err == nil { + return uint(v), nil + } + return 0, fmt.Errorf("unable to cast %#v to uint: %s", i, err) + case int: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case uint: + return s, nil + case uint64: + return uint(s), nil + case uint32: + return uint(s), nil + case uint16: + return uint(s), nil + case uint8: + return uint(s), nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint", i, i) + } +} + +// ToUint64E casts an interface to a uint64 type. +func ToUint64E(i interface{}) (uint64, error) { + i = indirect(i) + + switch s := i.(type) { + case string: + v, err := strconv.ParseUint(s, 0, 64) + if err == nil { + return v, nil + } + return 0, fmt.Errorf("unable to cast %#v to uint64: %s", i, err) + case int: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case uint: + return uint64(s), nil + case uint64: + return s, nil + case uint32: + return uint64(s), nil + case uint16: + return uint64(s), nil + case uint8: + return uint64(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint64(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint64", i, i) + } +} + +// ToUint32E casts an interface to a uint32 type. +func ToUint32E(i interface{}) (uint32, error) { + i = indirect(i) + + switch s := i.(type) { + case string: + v, err := strconv.ParseUint(s, 0, 32) + if err == nil { + return uint32(v), nil + } + return 0, fmt.Errorf("unable to cast %#v to uint32: %s", i, err) + case int: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case uint: + return uint32(s), nil + case uint64: + return uint32(s), nil + case uint32: + return s, nil + case uint16: + return uint32(s), nil + case uint8: + return uint32(s), nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint32(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint32", i, i) + } +} + +// ToUint16E casts an interface to a uint16 type. +func ToUint16E(i interface{}) (uint16, error) { + i = indirect(i) + + switch s := i.(type) { + case string: + v, err := strconv.ParseUint(s, 0, 16) + if err == nil { + return uint16(v), nil + } + return 0, fmt.Errorf("unable to cast %#v to uint16: %s", i, err) + case int: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case uint: + return uint16(s), nil + case uint64: + return uint16(s), nil + case uint32: + return uint16(s), nil + case uint16: + return s, nil + case uint8: + return uint16(s), nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint16(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint16", i, i) + } +} + +// ToUint8E casts an interface to a uint type. +func ToUint8E(i interface{}) (uint8, error) { + i = indirect(i) + + switch s := i.(type) { + case string: + v, err := strconv.ParseUint(s, 0, 8) + if err == nil { + return uint8(v), nil + } + return 0, fmt.Errorf("unable to cast %#v to uint8: %s", i, err) + case int: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case int64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case int32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case int16: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case int8: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case uint: + return uint8(s), nil + case uint64: + return uint8(s), nil + case uint32: + return uint8(s), nil + case uint16: + return uint8(s), nil + case uint8: + return s, nil + case float64: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case float32: + if s < 0 { + return 0, errNegativeNotAllowed + } + return uint8(s), nil + case bool: + if s { + return 1, nil + } + return 0, nil + case nil: + return 0, nil + default: + return 0, fmt.Errorf("unable to cast %#v of type %T to uint8", i, i) + } +} + +// From html/template/content.go +// Copyright 2011 The Go Authors. All rights reserved. +// indirect returns the value, after dereferencing as many times +// as necessary to reach the base type (or nil). +func indirect(a interface{}) interface{} { + if a == nil { + return nil + } + if t := reflect.TypeOf(a); t.Kind() != reflect.Ptr { + // Avoid creating a reflect.Value if it's not a pointer. + return a + } + v := reflect.ValueOf(a) + for v.Kind() == reflect.Ptr && !v.IsNil() { + v = v.Elem() + } + return v.Interface() +} + +// From html/template/content.go +// Copyright 2011 The Go Authors. All rights reserved. +// indirectToStringerOrError returns the value, after dereferencing as many times +// as necessary to reach the base type (or nil) or an implementation of fmt.Stringer +// or error, +func indirectToStringerOrError(a interface{}) interface{} { + if a == nil { + return nil + } + + var errorType = reflect.TypeOf((*error)(nil)).Elem() + var fmtStringerType = reflect.TypeOf((*fmt.Stringer)(nil)).Elem() + + v := reflect.ValueOf(a) + for !v.Type().Implements(fmtStringerType) && !v.Type().Implements(errorType) && v.Kind() == reflect.Ptr && !v.IsNil() { + v = v.Elem() + } + return v.Interface() +} + +// ToStringE casts an interface to a string type. +func ToStringE(i interface{}) (string, error) { + i = indirectToStringerOrError(i) + + switch s := i.(type) { + case string: + return s, nil + case bool: + return strconv.FormatBool(s), nil + case float64: + return strconv.FormatFloat(s, 'f', -1, 64), nil + case float32: + return strconv.FormatFloat(float64(s), 'f', -1, 32), nil + case int: + return strconv.Itoa(s), nil + case int64: + return strconv.FormatInt(s, 10), nil + case int32: + return strconv.Itoa(int(s)), nil + case int16: + return strconv.FormatInt(int64(s), 10), nil + case int8: + return strconv.FormatInt(int64(s), 10), nil + case uint: + return strconv.FormatUint(uint64(s), 10), nil + case uint64: + return strconv.FormatUint(uint64(s), 10), nil + case uint32: + return strconv.FormatUint(uint64(s), 10), nil + case uint16: + return strconv.FormatUint(uint64(s), 10), nil + case uint8: + return strconv.FormatUint(uint64(s), 10), nil + case []byte: + return string(s), nil + case template.HTML: + return string(s), nil + case template.URL: + return string(s), nil + case template.JS: + return string(s), nil + case template.CSS: + return string(s), nil + case template.HTMLAttr: + return string(s), nil + case nil: + return "", nil + case fmt.Stringer: + return s.String(), nil + case error: + return s.Error(), nil + default: + return "", fmt.Errorf("unable to cast %#v of type %T to string", i, i) + } +} + +// ToStringMapStringE casts an interface to a map[string]string type. +func ToStringMapStringE(i interface{}) (map[string]string, error) { + var m = map[string]string{} + + switch v := i.(type) { + case map[string]string: + return v, nil + case map[string]interface{}: + for k, val := range v { + m[ToString(k)] = ToString(val) + } + return m, nil + case map[interface{}]string: + for k, val := range v { + m[ToString(k)] = ToString(val) + } + return m, nil + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToString(val) + } + return m, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + default: + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]string", i, i) + } +} + +// ToStringMapStringSliceE casts an interface to a map[string][]string type. +func ToStringMapStringSliceE(i interface{}) (map[string][]string, error) { + var m = map[string][]string{} + + switch v := i.(type) { + case map[string][]string: + return v, nil + case map[string][]interface{}: + for k, val := range v { + m[ToString(k)] = ToStringSlice(val) + } + return m, nil + case map[string]string: + for k, val := range v { + m[ToString(k)] = []string{val} + } + case map[string]interface{}: + for k, val := range v { + switch vt := val.(type) { + case []interface{}: + m[ToString(k)] = ToStringSlice(vt) + case []string: + m[ToString(k)] = vt + default: + m[ToString(k)] = []string{ToString(val)} + } + } + return m, nil + case map[interface{}][]string: + for k, val := range v { + m[ToString(k)] = ToStringSlice(val) + } + return m, nil + case map[interface{}]string: + for k, val := range v { + m[ToString(k)] = ToStringSlice(val) + } + return m, nil + case map[interface{}][]interface{}: + for k, val := range v { + m[ToString(k)] = ToStringSlice(val) + } + return m, nil + case map[interface{}]interface{}: + for k, val := range v { + key, err := ToStringE(k) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) + } + value, err := ToStringSliceE(val) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) + } + m[key] = value + } + case string: + err := jsonStringToObject(v, &m) + return m, err + default: + return m, fmt.Errorf("unable to cast %#v of type %T to map[string][]string", i, i) + } + return m, nil +} + +// ToStringMapBoolE casts an interface to a map[string]bool type. +func ToStringMapBoolE(i interface{}) (map[string]bool, error) { + var m = map[string]bool{} + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToBool(val) + } + return m, nil + case map[string]interface{}: + for k, val := range v { + m[ToString(k)] = ToBool(val) + } + return m, nil + case map[string]bool: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + default: + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]bool", i, i) + } +} + +// ToStringMapE casts an interface to a map[string]interface{} type. +func ToStringMapE(i interface{}) (map[string]interface{}, error) { + var m = map[string]interface{}{} + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = val + } + return m, nil + case map[string]interface{}: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + default: + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]interface{}", i, i) + } +} + +// ToStringMapIntE casts an interface to a map[string]int{} type. +func ToStringMapIntE(i interface{}) (map[string]int, error) { + var m = map[string]int{} + if i == nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) + } + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToInt(val) + } + return m, nil + case map[string]interface{}: + for k, val := range v { + m[k] = ToInt(val) + } + return m, nil + case map[string]int: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + } + + if reflect.TypeOf(i).Kind() != reflect.Map { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) + } + + mVal := reflect.ValueOf(m) + v := reflect.ValueOf(i) + for _, keyVal := range v.MapKeys() { + val, err := ToIntE(v.MapIndex(keyVal).Interface()) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int", i, i) + } + mVal.SetMapIndex(keyVal, reflect.ValueOf(val)) + } + return m, nil +} + +// ToStringMapInt64E casts an interface to a map[string]int64{} type. +func ToStringMapInt64E(i interface{}) (map[string]int64, error) { + var m = map[string]int64{} + if i == nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) + } + + switch v := i.(type) { + case map[interface{}]interface{}: + for k, val := range v { + m[ToString(k)] = ToInt64(val) + } + return m, nil + case map[string]interface{}: + for k, val := range v { + m[k] = ToInt64(val) + } + return m, nil + case map[string]int64: + return v, nil + case string: + err := jsonStringToObject(v, &m) + return m, err + } + + if reflect.TypeOf(i).Kind() != reflect.Map { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) + } + mVal := reflect.ValueOf(m) + v := reflect.ValueOf(i) + for _, keyVal := range v.MapKeys() { + val, err := ToInt64E(v.MapIndex(keyVal).Interface()) + if err != nil { + return m, fmt.Errorf("unable to cast %#v of type %T to map[string]int64", i, i) + } + mVal.SetMapIndex(keyVal, reflect.ValueOf(val)) + } + return m, nil +} + +// ToSliceE casts an interface to a []interface{} type. +func ToSliceE(i interface{}) ([]interface{}, error) { + var s []interface{} + + switch v := i.(type) { + case []interface{}: + return append(s, v...), nil + case []map[string]interface{}: + for _, u := range v { + s = append(s, u) + } + return s, nil + default: + return s, fmt.Errorf("unable to cast %#v of type %T to []interface{}", i, i) + } +} + +// ToBoolSliceE casts an interface to a []bool type. +func ToBoolSliceE(i interface{}) ([]bool, error) { + if i == nil { + return []bool{}, fmt.Errorf("unable to cast %#v of type %T to []bool", i, i) + } + + switch v := i.(type) { + case []bool: + return v, nil + } + + kind := reflect.TypeOf(i).Kind() + switch kind { + case reflect.Slice, reflect.Array: + s := reflect.ValueOf(i) + a := make([]bool, s.Len()) + for j := 0; j < s.Len(); j++ { + val, err := ToBoolE(s.Index(j).Interface()) + if err != nil { + return []bool{}, fmt.Errorf("unable to cast %#v of type %T to []bool", i, i) + } + a[j] = val + } + return a, nil + default: + return []bool{}, fmt.Errorf("unable to cast %#v of type %T to []bool", i, i) + } +} + +// ToStringSliceE casts an interface to a []string type. +func ToStringSliceE(i interface{}) ([]string, error) { + var a []string + + switch v := i.(type) { + case []interface{}: + for _, u := range v { + a = append(a, ToString(u)) + } + return a, nil + case []string: + return v, nil + case string: + return strings.Fields(v), nil + case interface{}: + str, err := ToStringE(v) + if err != nil { + return a, fmt.Errorf("unable to cast %#v of type %T to []string", i, i) + } + return []string{str}, nil + default: + return a, fmt.Errorf("unable to cast %#v of type %T to []string", i, i) + } +} + +// ToIntSliceE casts an interface to a []int type. +func ToIntSliceE(i interface{}) ([]int, error) { + if i == nil { + return []int{}, fmt.Errorf("unable to cast %#v of type %T to []int", i, i) + } + + switch v := i.(type) { + case []int: + return v, nil + } + + kind := reflect.TypeOf(i).Kind() + switch kind { + case reflect.Slice, reflect.Array: + s := reflect.ValueOf(i) + a := make([]int, s.Len()) + for j := 0; j < s.Len(); j++ { + val, err := ToIntE(s.Index(j).Interface()) + if err != nil { + return []int{}, fmt.Errorf("unable to cast %#v of type %T to []int", i, i) + } + a[j] = val + } + return a, nil + default: + return []int{}, fmt.Errorf("unable to cast %#v of type %T to []int", i, i) + } +} + +// ToDurationSliceE casts an interface to a []time.Duration type. +func ToDurationSliceE(i interface{}) ([]time.Duration, error) { + if i == nil { + return []time.Duration{}, fmt.Errorf("unable to cast %#v of type %T to []time.Duration", i, i) + } + + switch v := i.(type) { + case []time.Duration: + return v, nil + } + + kind := reflect.TypeOf(i).Kind() + switch kind { + case reflect.Slice, reflect.Array: + s := reflect.ValueOf(i) + a := make([]time.Duration, s.Len()) + for j := 0; j < s.Len(); j++ { + val, err := ToDurationE(s.Index(j).Interface()) + if err != nil { + return []time.Duration{}, fmt.Errorf("unable to cast %#v of type %T to []time.Duration", i, i) + } + a[j] = val + } + return a, nil + default: + return []time.Duration{}, fmt.Errorf("unable to cast %#v of type %T to []time.Duration", i, i) + } +} + +// StringToDate attempts to parse a string into a time.Time type using a +// predefined list of formats. If no suitable format is found, an error is +// returned. +func StringToDate(s string) (time.Time, error) { + return parseDateWith(s, []string{ + time.RFC3339, + "2006-01-02T15:04:05", // iso8601 without timezone + time.RFC1123Z, + time.RFC1123, + time.RFC822Z, + time.RFC822, + time.RFC850, + time.ANSIC, + time.UnixDate, + time.RubyDate, + "2006-01-02 15:04:05.999999999 -0700 MST", // Time.String() + "2006-01-02", + "02 Jan 2006", + "2006-01-02T15:04:05-0700", // RFC3339 without timezone hh:mm colon + "2006-01-02 15:04:05 -07:00", + "2006-01-02 15:04:05 -0700", + "2006-01-02 15:04:05Z07:00", // RFC3339 without T + "2006-01-02 15:04:05Z0700", // RFC3339 without T or timezone hh:mm colon + "2006-01-02 15:04:05", + time.Kitchen, + time.Stamp, + time.StampMilli, + time.StampMicro, + time.StampNano, + }) +} + +func parseDateWith(s string, dates []string) (d time.Time, e error) { + for _, dateType := range dates { + if d, e = time.Parse(dateType, s); e == nil { + return + } + } + return d, fmt.Errorf("unable to parse date: %s", s) +} + +// jsonStringToObject attempts to unmarshall a string as JSON into +// the object passed as pointer. +func jsonStringToObject(s string, v interface{}) error { + data := []byte(s) + return json.Unmarshal(data, v) +} diff --git a/vendor/github.com/spf13/cast/go.mod b/vendor/github.com/spf13/cast/go.mod new file mode 100644 index 000000000..c1c0232dd --- /dev/null +++ b/vendor/github.com/spf13/cast/go.mod @@ -0,0 +1,7 @@ +module github.com/spf13/cast + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/stretchr/testify v1.2.2 +) diff --git a/vendor/github.com/spf13/cast/go.sum b/vendor/github.com/spf13/cast/go.sum new file mode 100644 index 000000000..e03ee77d9 --- /dev/null +++ b/vendor/github.com/spf13/cast/go.sum @@ -0,0 +1,6 @@ +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= diff --git a/vendor/github.com/spf13/cobra/.gitignore b/vendor/github.com/spf13/cobra/.gitignore new file mode 100644 index 000000000..c7b459e4d --- /dev/null +++ b/vendor/github.com/spf13/cobra/.gitignore @@ -0,0 +1,39 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +# Vim files https://github.com/github/gitignore/blob/master/Global/Vim.gitignore +# swap +[._]*.s[a-w][a-z] +[._]s[a-w][a-z] +# session +Session.vim +# temporary +.netrwhist +*~ +# auto-generated tag files +tags + +*.exe +cobra.test +bin + +.idea/ +*.iml diff --git a/vendor/github.com/spf13/cobra/.golangci.yml b/vendor/github.com/spf13/cobra/.golangci.yml new file mode 100644 index 000000000..0d6e61793 --- /dev/null +++ b/vendor/github.com/spf13/cobra/.golangci.yml @@ -0,0 +1,48 @@ +run: + deadline: 5m + +linters: + disable-all: true + enable: + #- bodyclose + - deadcode + #- depguard + #- dogsled + #- dupl + - errcheck + #- exhaustive + #- funlen + - gas + #- gochecknoinits + - goconst + #- gocritic + #- gocyclo + #- gofmt + - goimports + - golint + #- gomnd + #- goprintffuncname + #- gosec + #- gosimple + - govet + - ineffassign + - interfacer + #- lll + - maligned + - megacheck + #- misspell + #- nakedret + #- noctx + #- nolintlint + #- rowserrcheck + #- scopelint + #- staticcheck + - structcheck + #- stylecheck + #- typecheck + - unconvert + #- unparam + #- unused + - varcheck + #- whitespace + fast: false diff --git a/vendor/github.com/spf13/cobra/.mailmap b/vendor/github.com/spf13/cobra/.mailmap new file mode 100644 index 000000000..94ec53068 --- /dev/null +++ b/vendor/github.com/spf13/cobra/.mailmap @@ -0,0 +1,3 @@ +Steve Francia +Bjørn Erik Pedersen +Fabiano Franz diff --git a/vendor/github.com/spf13/cobra/CHANGELOG.md b/vendor/github.com/spf13/cobra/CHANGELOG.md new file mode 100644 index 000000000..8a23b4f85 --- /dev/null +++ b/vendor/github.com/spf13/cobra/CHANGELOG.md @@ -0,0 +1,51 @@ +# Cobra Changelog + +## v1.1.3 + +* **Fix:** release-branch.cobra1.1 only: Revert "Deprecate Go < 1.14" to maintain backward compatibility + +## v1.1.2 + +### Notable Changes + +* Bump license year to 2021 in golden files (#1309) @Bowbaq +* Enhance PowerShell completion with custom comp (#1208) @Luap99 +* Update gopkg.in/yaml.v2 to v2.4.0: The previous breaking change in yaml.v2 v2.3.0 has been reverted, see go-yaml/yaml#670 +* Documentation readability improvements (#1228 etc.) @zaataylor etc. +* Use golangci-lint: Repair warnings and errors resulting from linting (#1044) @umarcor + +## v1.1.1 + +* **Fix:** yaml.v2 2.3.0 contained a unintended breaking change. This release reverts to yaml.v2 v2.2.8 which has recent critical CVE fixes, but does not have the breaking changes. See https://github.com/spf13/cobra/pull/1259 for context. +* **Fix:** correct internal formatting for go-md2man v2 (which caused man page generation to be broken). See https://github.com/spf13/cobra/issues/1049 for context. + +## v1.1.0 + +### Notable Changes + +* Extend Go completions and revamp zsh comp (#1070) +* Fix man page doc generation - no auto generated tag when `cmd.DisableAutoGenTag = true` (#1104) @jpmcb +* Add completion for help command (#1136) +* Complete subcommands when TraverseChildren is set (#1171) +* Fix stderr printing functions (#894) +* fix: fish output redirection (#1247) + +## v1.0.0 + +Announcing v1.0.0 of Cobra. 🎉 + +### Notable Changes +* Fish completion (including support for Go custom completion) @marckhouzam +* API (urgent): Rename BashCompDirectives to ShellCompDirectives @marckhouzam +* Remove/replace SetOutput on Command - deprecated @jpmcb +* add support for autolabel stale PR @xchapter7x +* Add Labeler Actions @xchapter7x +* Custom completions coded in Go (instead of Bash) @marckhouzam +* Partial Revert of #922 @jharshman +* Add Makefile to project @jharshman +* Correct documentation for InOrStdin @desponda +* Apply formatting to templates @jharshman +* Revert change so help is printed on stdout again @marckhouzam +* Update md2man to v2.0.0 @pdf +* update viper to v1.4.0 @umarcor +* Update cmd/root.go example in README.md @jharshman diff --git a/vendor/github.com/spf13/cobra/CONDUCT.md b/vendor/github.com/spf13/cobra/CONDUCT.md new file mode 100644 index 000000000..9d16f88fd --- /dev/null +++ b/vendor/github.com/spf13/cobra/CONDUCT.md @@ -0,0 +1,37 @@ +## Cobra User Contract + +### Versioning +Cobra will follow a steady release cadence. Non breaking changes will be released as minor versions quarterly. Patch bug releases are at the discretion of the maintainers. Users can expect security patch fixes to be released within relatively short order of a CVE becoming known. For more information on security patch fixes see the CVE section below. Releases will follow [Semantic Versioning](https://semver.org/). Users tracking the Master branch should expect unpredictable breaking changes as the project continues to move forward. For stability, it is highly recommended to use a release. + +### Backward Compatibility +We will maintain two major releases in a moving window. The N-1 release will only receive bug fixes and security updates and will be dropped once N+1 is released. + +### Deprecation +Deprecation of Go versions or dependent packages will only occur in major releases. To reduce the change of this taking users by surprise, any large deprecation will be preceded by an announcement in the [#cobra slack channel](https://gophers.slack.com/archives/CD3LP1199) and an Issue on Github. + +### CVE +Maintainers will make every effort to release security patches in the case of a medium to high severity CVE directly impacting the library. The speed in which these patches reach a release is up to the discretion of the maintainers. A low severity CVE may be a lower priority than a high severity one. + +### Communication +Cobra maintainers will use GitHub issues and the [#cobra slack channel](https://gophers.slack.com/archives/CD3LP1199) as the primary means of communication with the community. This is to foster open communication with all users and contributors. + +### Breaking Changes +Breaking changes are generally allowed in the master branch, as this is the branch used to develop the next release of Cobra. + +There may be times, however, when master is closed for breaking changes. This is likely to happen as we near the release of a new version. + +Breaking changes are not allowed in release branches, as these represent minor versions that have already been released. These version have consumers who expect the APIs, behaviors, etc, to remain stable during the lifetime of the patch stream for the minor release. + +Examples of breaking changes include: +- Removing or renaming exported constant, variable, type, or function. +- Updating the version of critical libraries such as `spf13/pflag`, `spf13/viper` etc... + - Some version updates may be acceptable for picking up bug fixes, but maintainers must exercise caution when reviewing. + +There may, at times, need to be exceptions where breaking changes are allowed in release branches. These are at the discretion of the project's maintainers, and must be carefully considered before merging. + +### CI Testing +Maintainers will ensure the Cobra test suite utilizes the current supported versions of Golang. + +### Disclaimer +Changes to this document and the contents therein are at the discretion of the maintainers. +None of the contents of this document are legally binding in any way to the maintainers or the users. diff --git a/vendor/github.com/spf13/cobra/CONTRIBUTING.md b/vendor/github.com/spf13/cobra/CONTRIBUTING.md new file mode 100644 index 000000000..6f356e6a8 --- /dev/null +++ b/vendor/github.com/spf13/cobra/CONTRIBUTING.md @@ -0,0 +1,50 @@ +# Contributing to Cobra + +Thank you so much for contributing to Cobra. We appreciate your time and help. +Here are some guidelines to help you get started. + +## Code of Conduct + +Be kind and respectful to the members of the community. Take time to educate +others who are seeking help. Harassment of any kind will not be tolerated. + +## Questions + +If you have questions regarding Cobra, feel free to ask it in the community +[#cobra Slack channel][cobra-slack] + +## Filing a bug or feature + +1. Before filing an issue, please check the existing issues to see if a + similar one was already opened. If there is one already opened, feel free + to comment on it. +1. If you believe you've found a bug, please provide detailed steps of + reproduction, the version of Cobra and anything else you believe will be + useful to help troubleshoot it (e.g. OS environment, environment variables, + etc...). Also state the current behavior vs. the expected behavior. +1. If you'd like to see a feature or an enhancement please open an issue with + a clear title and description of what the feature is and why it would be + beneficial to the project and its users. + +## Submitting changes + +1. CLA: Upon submitting a Pull Request (PR), contributors will be prompted to + sign a CLA. Please sign the CLA :slightly_smiling_face: +1. Tests: If you are submitting code, please ensure you have adequate tests + for the feature. Tests can be run via `go test ./...` or `make test`. +1. Since this is golang project, ensure the new code is properly formatted to + ensure code consistency. Run `make all`. + +### Quick steps to contribute + +1. Fork the project. +1. Download your fork to your PC (`git clone https://github.com/your_username/cobra && cd cobra`) +1. Create your feature branch (`git checkout -b my-new-feature`) +1. Make changes and run tests (`make test`) +1. Add them to staging (`git add .`) +1. Commit your changes (`git commit -m 'Add some feature'`) +1. Push to the branch (`git push origin my-new-feature`) +1. Create new pull request + + +[cobra-slack]: https://gophers.slack.com/archives/CD3LP1199 diff --git a/vendor/github.com/spf13/cobra/LICENSE.txt b/vendor/github.com/spf13/cobra/LICENSE.txt new file mode 100644 index 000000000..298f0e266 --- /dev/null +++ b/vendor/github.com/spf13/cobra/LICENSE.txt @@ -0,0 +1,174 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/vendor/github.com/spf13/cobra/Makefile b/vendor/github.com/spf13/cobra/Makefile new file mode 100644 index 000000000..472c73bf1 --- /dev/null +++ b/vendor/github.com/spf13/cobra/Makefile @@ -0,0 +1,40 @@ +BIN="./bin" +SRC=$(shell find . -name "*.go") + +ifeq (, $(shell which golangci-lint)) +$(warning "could not find golangci-lint in $(PATH), run: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh") +endif + +ifeq (, $(shell which richgo)) +$(warning "could not find richgo in $(PATH), run: go get github.com/kyoh86/richgo") +endif + +.PHONY: fmt lint test cobra_generator install_deps clean + +default: all + +all: fmt test cobra_generator + +fmt: + $(info ******************** checking formatting ********************) + @test -z $(shell gofmt -l $(SRC)) || (gofmt -d $(SRC); exit 1) + +lint: + $(info ******************** running lint tools ********************) + golangci-lint run -v + +test: install_deps lint + $(info ******************** running tests ********************) + richgo test -v ./... + +cobra_generator: install_deps + $(info ******************** building generator ********************) + mkdir -p $(BIN) + make -C cobra all + +install_deps: + $(info ******************** downloading dependencies ********************) + go get -v ./... + +clean: + rm -rf $(BIN) diff --git a/vendor/github.com/spf13/cobra/README.md b/vendor/github.com/spf13/cobra/README.md new file mode 100644 index 000000000..074e3979f --- /dev/null +++ b/vendor/github.com/spf13/cobra/README.md @@ -0,0 +1,125 @@ +![cobra logo](https://cloud.githubusercontent.com/assets/173412/10886352/ad566232-814f-11e5-9cd0-aa101788c117.png) + +Cobra is both a library for creating powerful modern CLI applications as well as a program to generate applications and command files. + +Cobra is used in many Go projects such as [Kubernetes](http://kubernetes.io/), +[Hugo](https://gohugo.io), and [Github CLI](https://github.com/cli/cli) to +name a few. [This list](./projects_using_cobra.md) contains a more extensive list of projects using Cobra. + +[![](https://img.shields.io/github/workflow/status/spf13/cobra/Test?longCache=tru&label=Test&logo=github%20actions&logoColor=fff)](https://github.com/spf13/cobra/actions?query=workflow%3ATest) +[![GoDoc](https://godoc.org/github.com/spf13/cobra?status.svg)](https://godoc.org/github.com/spf13/cobra) +[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/cobra)](https://goreportcard.com/report/github.com/spf13/cobra) +[![Slack](https://img.shields.io/badge/Slack-cobra-brightgreen)](https://gophers.slack.com/archives/CD3LP1199) + +# Table of Contents + +- [Overview](#overview) +- [Concepts](#concepts) + * [Commands](#commands) + * [Flags](#flags) +- [Installing](#installing) +- [Usage](#usage) + * [Using the Cobra Generator](user_guide.md#using-the-cobra-generator) + * [Using the Cobra Library](user_guide.md#using-the-cobra-library) + * [Working with Flags](user_guide.md#working-with-flags) + * [Positional and Custom Arguments](user_guide.md#positional-and-custom-arguments) + * [Example](user_guide.md#example) + * [Help Command](user_guide.md#help-command) + * [Usage Message](user_guide.md#usage-message) + * [PreRun and PostRun Hooks](user_guide.md#prerun-and-postrun-hooks) + * [Suggestions when "unknown command" happens](user_guide.md#suggestions-when-unknown-command-happens) + * [Generating documentation for your command](user_guide.md#generating-documentation-for-your-command) + * [Generating shell completions](user_guide.md#generating-shell-completions) +- [Contributing](CONTRIBUTING.md) +- [License](#license) + +# Overview + +Cobra is a library providing a simple interface to create powerful modern CLI +interfaces similar to git & go tools. + +Cobra is also an application that will generate your application scaffolding to rapidly +develop a Cobra-based application. + +Cobra provides: +* Easy subcommand-based CLIs: `app server`, `app fetch`, etc. +* Fully POSIX-compliant flags (including short & long versions) +* Nested subcommands +* Global, local and cascading flags +* Easy generation of applications & commands with `cobra init appname` & `cobra add cmdname` +* Intelligent suggestions (`app srver`... did you mean `app server`?) +* Automatic help generation for commands and flags +* Automatic help flag recognition of `-h`, `--help`, etc. +* Automatically generated shell autocomplete for your application (bash, zsh, fish, powershell) +* Automatically generated man pages for your application +* Command aliases so you can change things without breaking them +* The flexibility to define your own help, usage, etc. +* Optional tight integration with [viper](http://github.com/spf13/viper) for 12-factor apps + +# Concepts + +Cobra is built on a structure of commands, arguments & flags. + +**Commands** represent actions, **Args** are things and **Flags** are modifiers for those actions. + +The best applications read like sentences when used, and as a result, users +intuitively know how to interact with them. + +The pattern to follow is +`APPNAME VERB NOUN --ADJECTIVE.` + or +`APPNAME COMMAND ARG --FLAG` + +A few good real world examples may better illustrate this point. + +In the following example, 'server' is a command, and 'port' is a flag: + + hugo server --port=1313 + +In this command we are telling Git to clone the url bare. + + git clone URL --bare + +## Commands + +Command is the central point of the application. Each interaction that +the application supports will be contained in a Command. A command can +have children commands and optionally run an action. + +In the example above, 'server' is the command. + +[More about cobra.Command](https://godoc.org/github.com/spf13/cobra#Command) + +## Flags + +A flag is a way to modify the behavior of a command. Cobra supports +fully POSIX-compliant flags as well as the Go [flag package](https://golang.org/pkg/flag/). +A Cobra command can define flags that persist through to children commands +and flags that are only available to that command. + +In the example above, 'port' is the flag. + +Flag functionality is provided by the [pflag +library](https://github.com/spf13/pflag), a fork of the flag standard library +which maintains the same interface while adding POSIX compliance. + +# Installing +Using Cobra is easy. First, use `go get` to install the latest version +of the library. This command will install the `cobra` generator executable +along with the library and its dependencies: + + go get -u github.com/spf13/cobra + +Next, include Cobra in your application: + +```go +import "github.com/spf13/cobra" +``` + +# Usage + +See [User Guide](user_guide.md). + +# License + +Cobra is released under the Apache 2.0 license. See [LICENSE.txt](https://github.com/spf13/cobra/blob/master/LICENSE.txt) diff --git a/vendor/github.com/spf13/cobra/args.go b/vendor/github.com/spf13/cobra/args.go new file mode 100644 index 000000000..70e9b2629 --- /dev/null +++ b/vendor/github.com/spf13/cobra/args.go @@ -0,0 +1,109 @@ +package cobra + +import ( + "fmt" + "strings" +) + +type PositionalArgs func(cmd *Command, args []string) error + +// Legacy arg validation has the following behaviour: +// - root commands with no subcommands can take arbitrary arguments +// - root commands with subcommands will do subcommand validity checking +// - subcommands will always accept arbitrary arguments +func legacyArgs(cmd *Command, args []string) error { + // no subcommand, always take args + if !cmd.HasSubCommands() { + return nil + } + + // root command with subcommands, do subcommand checking. + if !cmd.HasParent() && len(args) > 0 { + return fmt.Errorf("unknown command %q for %q%s", args[0], cmd.CommandPath(), cmd.findSuggestions(args[0])) + } + return nil +} + +// NoArgs returns an error if any args are included. +func NoArgs(cmd *Command, args []string) error { + if len(args) > 0 { + return fmt.Errorf("unknown command %q for %q", args[0], cmd.CommandPath()) + } + return nil +} + +// OnlyValidArgs returns an error if any args are not in the list of ValidArgs. +func OnlyValidArgs(cmd *Command, args []string) error { + if len(cmd.ValidArgs) > 0 { + // Remove any description that may be included in ValidArgs. + // A description is following a tab character. + var validArgs []string + for _, v := range cmd.ValidArgs { + validArgs = append(validArgs, strings.Split(v, "\t")[0]) + } + + for _, v := range args { + if !stringInSlice(v, validArgs) { + return fmt.Errorf("invalid argument %q for %q%s", v, cmd.CommandPath(), cmd.findSuggestions(args[0])) + } + } + } + return nil +} + +// ArbitraryArgs never returns an error. +func ArbitraryArgs(cmd *Command, args []string) error { + return nil +} + +// MinimumNArgs returns an error if there is not at least N args. +func MinimumNArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) < n { + return fmt.Errorf("requires at least %d arg(s), only received %d", n, len(args)) + } + return nil + } +} + +// MaximumNArgs returns an error if there are more than N args. +func MaximumNArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) > n { + return fmt.Errorf("accepts at most %d arg(s), received %d", n, len(args)) + } + return nil + } +} + +// ExactArgs returns an error if there are not exactly n args. +func ExactArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) != n { + return fmt.Errorf("accepts %d arg(s), received %d", n, len(args)) + } + return nil + } +} + +// ExactValidArgs returns an error if +// there are not exactly N positional args OR +// there are any positional args that are not in the `ValidArgs` field of `Command` +func ExactValidArgs(n int) PositionalArgs { + return func(cmd *Command, args []string) error { + if err := ExactArgs(n)(cmd, args); err != nil { + return err + } + return OnlyValidArgs(cmd, args) + } +} + +// RangeArgs returns an error if the number of args is not within the expected range. +func RangeArgs(min int, max int) PositionalArgs { + return func(cmd *Command, args []string) error { + if len(args) < min || len(args) > max { + return fmt.Errorf("accepts between %d and %d arg(s), received %d", min, max, len(args)) + } + return nil + } +} diff --git a/vendor/github.com/spf13/cobra/bash_completions.go b/vendor/github.com/spf13/cobra/bash_completions.go new file mode 100644 index 000000000..733f4d121 --- /dev/null +++ b/vendor/github.com/spf13/cobra/bash_completions.go @@ -0,0 +1,685 @@ +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" + "sort" + "strings" + + "github.com/spf13/pflag" +) + +// Annotations for Bash completion. +const ( + BashCompFilenameExt = "cobra_annotation_bash_completion_filename_extensions" + BashCompCustom = "cobra_annotation_bash_completion_custom" + BashCompOneRequiredFlag = "cobra_annotation_bash_completion_one_required_flag" + BashCompSubdirsInDir = "cobra_annotation_bash_completion_subdirs_in_dir" +) + +func writePreamble(buf io.StringWriter, name string) { + WriteStringAndCheck(buf, fmt.Sprintf("# bash completion for %-36s -*- shell-script -*-\n", name)) + WriteStringAndCheck(buf, fmt.Sprintf(` +__%[1]s_debug() +{ + if [[ -n ${BASH_COMP_DEBUG_FILE} ]]; then + echo "$*" >> "${BASH_COMP_DEBUG_FILE}" + fi +} + +# Homebrew on Macs have version 1.3 of bash-completion which doesn't include +# _init_completion. This is a very minimal version of that function. +__%[1]s_init_completion() +{ + COMPREPLY=() + _get_comp_words_by_ref "$@" cur prev words cword +} + +__%[1]s_index_of_word() +{ + local w word=$1 + shift + index=0 + for w in "$@"; do + [[ $w = "$word" ]] && return + index=$((index+1)) + done + index=-1 +} + +__%[1]s_contains_word() +{ + local w word=$1; shift + for w in "$@"; do + [[ $w = "$word" ]] && return + done + return 1 +} + +__%[1]s_handle_go_custom_completion() +{ + __%[1]s_debug "${FUNCNAME[0]}: cur is ${cur}, words[*] is ${words[*]}, #words[@] is ${#words[@]}" + + local shellCompDirectiveError=%[3]d + local shellCompDirectiveNoSpace=%[4]d + local shellCompDirectiveNoFileComp=%[5]d + local shellCompDirectiveFilterFileExt=%[6]d + local shellCompDirectiveFilterDirs=%[7]d + + local out requestComp lastParam lastChar comp directive args + + # Prepare the command to request completions for the program. + # Calling ${words[0]} instead of directly %[1]s allows to handle aliases + args=("${words[@]:1}") + requestComp="${words[0]} %[2]s ${args[*]}" + + lastParam=${words[$((${#words[@]}-1))]} + lastChar=${lastParam:$((${#lastParam}-1)):1} + __%[1]s_debug "${FUNCNAME[0]}: lastParam ${lastParam}, lastChar ${lastChar}" + + if [ -z "${cur}" ] && [ "${lastChar}" != "=" ]; then + # If the last parameter is complete (there is a space following it) + # We add an extra empty parameter so we can indicate this to the go method. + __%[1]s_debug "${FUNCNAME[0]}: Adding extra empty parameter" + requestComp="${requestComp} \"\"" + fi + + __%[1]s_debug "${FUNCNAME[0]}: calling ${requestComp}" + # Use eval to handle any environment variables and such + out=$(eval "${requestComp}" 2>/dev/null) + + # Extract the directive integer at the very end of the output following a colon (:) + directive=${out##*:} + # Remove the directive + out=${out%%:*} + if [ "${directive}" = "${out}" ]; then + # There is not directive specified + directive=0 + fi + __%[1]s_debug "${FUNCNAME[0]}: the completion directive is: ${directive}" + __%[1]s_debug "${FUNCNAME[0]}: the completions are: ${out[*]}" + + if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then + # Error code. No completion. + __%[1]s_debug "${FUNCNAME[0]}: received error from custom completion go code" + return + else + if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then + if [[ $(type -t compopt) = "builtin" ]]; then + __%[1]s_debug "${FUNCNAME[0]}: activating no space" + compopt -o nospace + fi + fi + if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then + if [[ $(type -t compopt) = "builtin" ]]; then + __%[1]s_debug "${FUNCNAME[0]}: activating no file completion" + compopt +o default + fi + fi + fi + + if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then + # File extension filtering + local fullFilter filter filteringCmd + # Do not use quotes around the $out variable or else newline + # characters will be kept. + for filter in ${out[*]}; do + fullFilter+="$filter|" + done + + filteringCmd="_filedir $fullFilter" + __%[1]s_debug "File filtering command: $filteringCmd" + $filteringCmd + elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then + # File completion for directories only + local subDir + # Use printf to strip any trailing newline + subdir=$(printf "%%s" "${out[0]}") + if [ -n "$subdir" ]; then + __%[1]s_debug "Listing directories in $subdir" + __%[1]s_handle_subdirs_in_dir_flag "$subdir" + else + __%[1]s_debug "Listing directories in ." + _filedir -d + fi + else + while IFS='' read -r comp; do + COMPREPLY+=("$comp") + done < <(compgen -W "${out[*]}" -- "$cur") + fi +} + +__%[1]s_handle_reply() +{ + __%[1]s_debug "${FUNCNAME[0]}" + local comp + case $cur in + -*) + if [[ $(type -t compopt) = "builtin" ]]; then + compopt -o nospace + fi + local allflags + if [ ${#must_have_one_flag[@]} -ne 0 ]; then + allflags=("${must_have_one_flag[@]}") + else + allflags=("${flags[*]} ${two_word_flags[*]}") + fi + while IFS='' read -r comp; do + COMPREPLY+=("$comp") + done < <(compgen -W "${allflags[*]}" -- "$cur") + if [[ $(type -t compopt) = "builtin" ]]; then + [[ "${COMPREPLY[0]}" == *= ]] || compopt +o nospace + fi + + # complete after --flag=abc + if [[ $cur == *=* ]]; then + if [[ $(type -t compopt) = "builtin" ]]; then + compopt +o nospace + fi + + local index flag + flag="${cur%%=*}" + __%[1]s_index_of_word "${flag}" "${flags_with_completion[@]}" + COMPREPLY=() + if [[ ${index} -ge 0 ]]; then + PREFIX="" + cur="${cur#*=}" + ${flags_completion[${index}]} + if [ -n "${ZSH_VERSION}" ]; then + # zsh completion needs --flag= prefix + eval "COMPREPLY=( \"\${COMPREPLY[@]/#/${flag}=}\" )" + fi + fi + fi + return 0; + ;; + esac + + # check if we are handling a flag with special work handling + local index + __%[1]s_index_of_word "${prev}" "${flags_with_completion[@]}" + if [[ ${index} -ge 0 ]]; then + ${flags_completion[${index}]} + return + fi + + # we are parsing a flag and don't have a special handler, no completion + if [[ ${cur} != "${words[cword]}" ]]; then + return + fi + + local completions + completions=("${commands[@]}") + if [[ ${#must_have_one_noun[@]} -ne 0 ]]; then + completions+=("${must_have_one_noun[@]}") + elif [[ -n "${has_completion_function}" ]]; then + # if a go completion function is provided, defer to that function + __%[1]s_handle_go_custom_completion + fi + if [[ ${#must_have_one_flag[@]} -ne 0 ]]; then + completions+=("${must_have_one_flag[@]}") + fi + while IFS='' read -r comp; do + COMPREPLY+=("$comp") + done < <(compgen -W "${completions[*]}" -- "$cur") + + if [[ ${#COMPREPLY[@]} -eq 0 && ${#noun_aliases[@]} -gt 0 && ${#must_have_one_noun[@]} -ne 0 ]]; then + while IFS='' read -r comp; do + COMPREPLY+=("$comp") + done < <(compgen -W "${noun_aliases[*]}" -- "$cur") + fi + + if [[ ${#COMPREPLY[@]} -eq 0 ]]; then + if declare -F __%[1]s_custom_func >/dev/null; then + # try command name qualified custom func + __%[1]s_custom_func + else + # otherwise fall back to unqualified for compatibility + declare -F __custom_func >/dev/null && __custom_func + fi + fi + + # available in bash-completion >= 2, not always present on macOS + if declare -F __ltrim_colon_completions >/dev/null; then + __ltrim_colon_completions "$cur" + fi + + # If there is only 1 completion and it is a flag with an = it will be completed + # but we don't want a space after the = + if [[ "${#COMPREPLY[@]}" -eq "1" ]] && [[ $(type -t compopt) = "builtin" ]] && [[ "${COMPREPLY[0]}" == --*= ]]; then + compopt -o nospace + fi +} + +# The arguments should be in the form "ext1|ext2|extn" +__%[1]s_handle_filename_extension_flag() +{ + local ext="$1" + _filedir "@(${ext})" +} + +__%[1]s_handle_subdirs_in_dir_flag() +{ + local dir="$1" + pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 || return +} + +__%[1]s_handle_flag() +{ + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + + # if a command required a flag, and we found it, unset must_have_one_flag() + local flagname=${words[c]} + local flagvalue + # if the word contained an = + if [[ ${words[c]} == *"="* ]]; then + flagvalue=${flagname#*=} # take in as flagvalue after the = + flagname=${flagname%%=*} # strip everything after the = + flagname="${flagname}=" # but put the = back + fi + __%[1]s_debug "${FUNCNAME[0]}: looking for ${flagname}" + if __%[1]s_contains_word "${flagname}" "${must_have_one_flag[@]}"; then + must_have_one_flag=() + fi + + # if you set a flag which only applies to this command, don't show subcommands + if __%[1]s_contains_word "${flagname}" "${local_nonpersistent_flags[@]}"; then + commands=() + fi + + # keep flag value with flagname as flaghash + # flaghash variable is an associative array which is only supported in bash > 3. + if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then + if [ -n "${flagvalue}" ] ; then + flaghash[${flagname}]=${flagvalue} + elif [ -n "${words[ $((c+1)) ]}" ] ; then + flaghash[${flagname}]=${words[ $((c+1)) ]} + else + flaghash[${flagname}]="true" # pad "true" for bool flag + fi + fi + + # skip the argument to a two word flag + if [[ ${words[c]} != *"="* ]] && __%[1]s_contains_word "${words[c]}" "${two_word_flags[@]}"; then + __%[1]s_debug "${FUNCNAME[0]}: found a flag ${words[c]}, skip the next argument" + c=$((c+1)) + # if we are looking for a flags value, don't show commands + if [[ $c -eq $cword ]]; then + commands=() + fi + fi + + c=$((c+1)) + +} + +__%[1]s_handle_noun() +{ + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + + if __%[1]s_contains_word "${words[c]}" "${must_have_one_noun[@]}"; then + must_have_one_noun=() + elif __%[1]s_contains_word "${words[c]}" "${noun_aliases[@]}"; then + must_have_one_noun=() + fi + + nouns+=("${words[c]}") + c=$((c+1)) +} + +__%[1]s_handle_command() +{ + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + + local next_command + if [[ -n ${last_command} ]]; then + next_command="_${last_command}_${words[c]//:/__}" + else + if [[ $c -eq 0 ]]; then + next_command="_%[1]s_root_command" + else + next_command="_${words[c]//:/__}" + fi + fi + c=$((c+1)) + __%[1]s_debug "${FUNCNAME[0]}: looking for ${next_command}" + declare -F "$next_command" >/dev/null && $next_command +} + +__%[1]s_handle_word() +{ + if [[ $c -ge $cword ]]; then + __%[1]s_handle_reply + return + fi + __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" + if [[ "${words[c]}" == -* ]]; then + __%[1]s_handle_flag + elif __%[1]s_contains_word "${words[c]}" "${commands[@]}"; then + __%[1]s_handle_command + elif [[ $c -eq 0 ]]; then + __%[1]s_handle_command + elif __%[1]s_contains_word "${words[c]}" "${command_aliases[@]}"; then + # aliashash variable is an associative array which is only supported in bash > 3. + if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then + words[c]=${aliashash[${words[c]}]} + __%[1]s_handle_command + else + __%[1]s_handle_noun + fi + else + __%[1]s_handle_noun + fi + __%[1]s_handle_word +} + +`, name, ShellCompNoDescRequestCmd, + ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs)) +} + +func writePostscript(buf io.StringWriter, name string) { + name = strings.Replace(name, ":", "__", -1) + WriteStringAndCheck(buf, fmt.Sprintf("__start_%s()\n", name)) + WriteStringAndCheck(buf, fmt.Sprintf(`{ + local cur prev words cword split + declare -A flaghash 2>/dev/null || : + declare -A aliashash 2>/dev/null || : + if declare -F _init_completion >/dev/null 2>&1; then + _init_completion -s || return + else + __%[1]s_init_completion -n "=" || return + fi + + local c=0 + local flags=() + local two_word_flags=() + local local_nonpersistent_flags=() + local flags_with_completion=() + local flags_completion=() + local commands=("%[1]s") + local command_aliases=() + local must_have_one_flag=() + local must_have_one_noun=() + local has_completion_function + local last_command + local nouns=() + local noun_aliases=() + + __%[1]s_handle_word +} + +`, name)) + WriteStringAndCheck(buf, fmt.Sprintf(`if [[ $(type -t compopt) = "builtin" ]]; then + complete -o default -F __start_%s %s +else + complete -o default -o nospace -F __start_%s %s +fi + +`, name, name, name, name)) + WriteStringAndCheck(buf, "# ex: ts=4 sw=4 et filetype=sh\n") +} + +func writeCommands(buf io.StringWriter, cmd *Command) { + WriteStringAndCheck(buf, " commands=()\n") + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() && c != cmd.helpCommand { + continue + } + WriteStringAndCheck(buf, fmt.Sprintf(" commands+=(%q)\n", c.Name())) + writeCmdAliases(buf, c) + } + WriteStringAndCheck(buf, "\n") +} + +func writeFlagHandler(buf io.StringWriter, name string, annotations map[string][]string, cmd *Command) { + for key, value := range annotations { + switch key { + case BashCompFilenameExt: + WriteStringAndCheck(buf, fmt.Sprintf(" flags_with_completion+=(%q)\n", name)) + + var ext string + if len(value) > 0 { + ext = fmt.Sprintf("__%s_handle_filename_extension_flag ", cmd.Root().Name()) + strings.Join(value, "|") + } else { + ext = "_filedir" + } + WriteStringAndCheck(buf, fmt.Sprintf(" flags_completion+=(%q)\n", ext)) + case BashCompCustom: + WriteStringAndCheck(buf, fmt.Sprintf(" flags_with_completion+=(%q)\n", name)) + + if len(value) > 0 { + handlers := strings.Join(value, "; ") + WriteStringAndCheck(buf, fmt.Sprintf(" flags_completion+=(%q)\n", handlers)) + } else { + WriteStringAndCheck(buf, " flags_completion+=(:)\n") + } + case BashCompSubdirsInDir: + WriteStringAndCheck(buf, fmt.Sprintf(" flags_with_completion+=(%q)\n", name)) + + var ext string + if len(value) == 1 { + ext = fmt.Sprintf("__%s_handle_subdirs_in_dir_flag ", cmd.Root().Name()) + value[0] + } else { + ext = "_filedir -d" + } + WriteStringAndCheck(buf, fmt.Sprintf(" flags_completion+=(%q)\n", ext)) + } + } +} + +const cbn = "\")\n" + +func writeShortFlag(buf io.StringWriter, flag *pflag.Flag, cmd *Command) { + name := flag.Shorthand + format := " " + if len(flag.NoOptDefVal) == 0 { + format += "two_word_" + } + format += "flags+=(\"-%s" + cbn + WriteStringAndCheck(buf, fmt.Sprintf(format, name)) + writeFlagHandler(buf, "-"+name, flag.Annotations, cmd) +} + +func writeFlag(buf io.StringWriter, flag *pflag.Flag, cmd *Command) { + name := flag.Name + format := " flags+=(\"--%s" + if len(flag.NoOptDefVal) == 0 { + format += "=" + } + format += cbn + WriteStringAndCheck(buf, fmt.Sprintf(format, name)) + if len(flag.NoOptDefVal) == 0 { + format = " two_word_flags+=(\"--%s" + cbn + WriteStringAndCheck(buf, fmt.Sprintf(format, name)) + } + writeFlagHandler(buf, "--"+name, flag.Annotations, cmd) +} + +func writeLocalNonPersistentFlag(buf io.StringWriter, flag *pflag.Flag) { + name := flag.Name + format := " local_nonpersistent_flags+=(\"--%[1]s" + cbn + if len(flag.NoOptDefVal) == 0 { + format += " local_nonpersistent_flags+=(\"--%[1]s=" + cbn + } + WriteStringAndCheck(buf, fmt.Sprintf(format, name)) + if len(flag.Shorthand) > 0 { + WriteStringAndCheck(buf, fmt.Sprintf(" local_nonpersistent_flags+=(\"-%s\")\n", flag.Shorthand)) + } +} + +// Setup annotations for go completions for registered flags +func prepareCustomAnnotationsForFlags(cmd *Command) { + flagCompletionMutex.RLock() + defer flagCompletionMutex.RUnlock() + for flag := range flagCompletionFunctions { + // Make sure the completion script calls the __*_go_custom_completion function for + // every registered flag. We need to do this here (and not when the flag was registered + // for completion) so that we can know the root command name for the prefix + // of ___go_custom_completion + if flag.Annotations == nil { + flag.Annotations = map[string][]string{} + } + flag.Annotations[BashCompCustom] = []string{fmt.Sprintf("__%[1]s_handle_go_custom_completion", cmd.Root().Name())} + } +} + +func writeFlags(buf io.StringWriter, cmd *Command) { + prepareCustomAnnotationsForFlags(cmd) + WriteStringAndCheck(buf, ` flags=() + two_word_flags=() + local_nonpersistent_flags=() + flags_with_completion=() + flags_completion=() + +`) + localNonPersistentFlags := cmd.LocalNonPersistentFlags() + cmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { + if nonCompletableFlag(flag) { + return + } + writeFlag(buf, flag, cmd) + if len(flag.Shorthand) > 0 { + writeShortFlag(buf, flag, cmd) + } + // localNonPersistentFlags are used to stop the completion of subcommands when one is set + // if TraverseChildren is true we should allow to complete subcommands + if localNonPersistentFlags.Lookup(flag.Name) != nil && !cmd.Root().TraverseChildren { + writeLocalNonPersistentFlag(buf, flag) + } + }) + cmd.InheritedFlags().VisitAll(func(flag *pflag.Flag) { + if nonCompletableFlag(flag) { + return + } + writeFlag(buf, flag, cmd) + if len(flag.Shorthand) > 0 { + writeShortFlag(buf, flag, cmd) + } + }) + + WriteStringAndCheck(buf, "\n") +} + +func writeRequiredFlag(buf io.StringWriter, cmd *Command) { + WriteStringAndCheck(buf, " must_have_one_flag=()\n") + flags := cmd.NonInheritedFlags() + flags.VisitAll(func(flag *pflag.Flag) { + if nonCompletableFlag(flag) { + return + } + for key := range flag.Annotations { + switch key { + case BashCompOneRequiredFlag: + format := " must_have_one_flag+=(\"--%s" + if flag.Value.Type() != "bool" { + format += "=" + } + format += cbn + WriteStringAndCheck(buf, fmt.Sprintf(format, flag.Name)) + + if len(flag.Shorthand) > 0 { + WriteStringAndCheck(buf, fmt.Sprintf(" must_have_one_flag+=(\"-%s"+cbn, flag.Shorthand)) + } + } + } + }) +} + +func writeRequiredNouns(buf io.StringWriter, cmd *Command) { + WriteStringAndCheck(buf, " must_have_one_noun=()\n") + sort.Strings(cmd.ValidArgs) + for _, value := range cmd.ValidArgs { + // Remove any description that may be included following a tab character. + // Descriptions are not supported by bash completion. + value = strings.Split(value, "\t")[0] + WriteStringAndCheck(buf, fmt.Sprintf(" must_have_one_noun+=(%q)\n", value)) + } + if cmd.ValidArgsFunction != nil { + WriteStringAndCheck(buf, " has_completion_function=1\n") + } +} + +func writeCmdAliases(buf io.StringWriter, cmd *Command) { + if len(cmd.Aliases) == 0 { + return + } + + sort.Strings(cmd.Aliases) + + WriteStringAndCheck(buf, fmt.Sprint(` if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then`, "\n")) + for _, value := range cmd.Aliases { + WriteStringAndCheck(buf, fmt.Sprintf(" command_aliases+=(%q)\n", value)) + WriteStringAndCheck(buf, fmt.Sprintf(" aliashash[%q]=%q\n", value, cmd.Name())) + } + WriteStringAndCheck(buf, ` fi`) + WriteStringAndCheck(buf, "\n") +} +func writeArgAliases(buf io.StringWriter, cmd *Command) { + WriteStringAndCheck(buf, " noun_aliases=()\n") + sort.Strings(cmd.ArgAliases) + for _, value := range cmd.ArgAliases { + WriteStringAndCheck(buf, fmt.Sprintf(" noun_aliases+=(%q)\n", value)) + } +} + +func gen(buf io.StringWriter, cmd *Command) { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() && c != cmd.helpCommand { + continue + } + gen(buf, c) + } + commandName := cmd.CommandPath() + commandName = strings.Replace(commandName, " ", "_", -1) + commandName = strings.Replace(commandName, ":", "__", -1) + + if cmd.Root() == cmd { + WriteStringAndCheck(buf, fmt.Sprintf("_%s_root_command()\n{\n", commandName)) + } else { + WriteStringAndCheck(buf, fmt.Sprintf("_%s()\n{\n", commandName)) + } + + WriteStringAndCheck(buf, fmt.Sprintf(" last_command=%q\n", commandName)) + WriteStringAndCheck(buf, "\n") + WriteStringAndCheck(buf, " command_aliases=()\n") + WriteStringAndCheck(buf, "\n") + + writeCommands(buf, cmd) + writeFlags(buf, cmd) + writeRequiredFlag(buf, cmd) + writeRequiredNouns(buf, cmd) + writeArgAliases(buf, cmd) + WriteStringAndCheck(buf, "}\n\n") +} + +// GenBashCompletion generates bash completion file and writes to the passed writer. +func (c *Command) GenBashCompletion(w io.Writer) error { + buf := new(bytes.Buffer) + writePreamble(buf, c.Name()) + if len(c.BashCompletionFunction) > 0 { + buf.WriteString(c.BashCompletionFunction + "\n") + } + gen(buf, c) + writePostscript(buf, c.Name()) + + _, err := buf.WriteTo(w) + return err +} + +func nonCompletableFlag(flag *pflag.Flag) bool { + return flag.Hidden || len(flag.Deprecated) > 0 +} + +// GenBashCompletionFile generates bash completion file. +func (c *Command) GenBashCompletionFile(filename string) error { + outFile, err := os.Create(filename) + if err != nil { + return err + } + defer outFile.Close() + + return c.GenBashCompletion(outFile) +} diff --git a/vendor/github.com/spf13/cobra/bash_completions.md b/vendor/github.com/spf13/cobra/bash_completions.md new file mode 100644 index 000000000..52919b2fa --- /dev/null +++ b/vendor/github.com/spf13/cobra/bash_completions.md @@ -0,0 +1,93 @@ +# Generating Bash Completions For Your cobra.Command + +Please refer to [Shell Completions](shell_completions.md) for details. + +## Bash legacy dynamic completions + +For backward compatibility, Cobra still supports its legacy dynamic completion solution (described below). Unlike the `ValidArgsFunction` solution, the legacy solution will only work for Bash shell-completion and not for other shells. This legacy solution can be used along-side `ValidArgsFunction` and `RegisterFlagCompletionFunc()`, as long as both solutions are not used for the same command. This provides a path to gradually migrate from the legacy solution to the new solution. + +**Note**: Cobra's default `completion` command uses bash completion V2. If you are currently using Cobra's legacy dynamic completion solution, you should not use the default `completion` command but continue using your own. + +The legacy solution allows you to inject bash functions into the bash completion script. Those bash functions are responsible for providing the completion choices for your own completions. + +Some code that works in kubernetes: + +```bash +const ( + bash_completion_func = `__kubectl_parse_get() +{ + local kubectl_output out + if kubectl_output=$(kubectl get --no-headers "$1" 2>/dev/null); then + out=($(echo "${kubectl_output}" | awk '{print $1}')) + COMPREPLY=( $( compgen -W "${out[*]}" -- "$cur" ) ) + fi +} + +__kubectl_get_resource() +{ + if [[ ${#nouns[@]} -eq 0 ]]; then + return 1 + fi + __kubectl_parse_get ${nouns[${#nouns[@]} -1]} + if [[ $? -eq 0 ]]; then + return 0 + fi +} + +__kubectl_custom_func() { + case ${last_command} in + kubectl_get | kubectl_describe | kubectl_delete | kubectl_stop) + __kubectl_get_resource + return + ;; + *) + ;; + esac +} +`) +``` + +And then I set that in my command definition: + +```go +cmds := &cobra.Command{ + Use: "kubectl", + Short: "kubectl controls the Kubernetes cluster manager", + Long: `kubectl controls the Kubernetes cluster manager. + +Find more information at https://github.com/GoogleCloudPlatform/kubernetes.`, + Run: runHelp, + BashCompletionFunction: bash_completion_func, +} +``` + +The `BashCompletionFunction` option is really only valid/useful on the root command. Doing the above will cause `__kubectl_custom_func()` (`___custom_func()`) to be called when the built in processor was unable to find a solution. In the case of kubernetes a valid command might look something like `kubectl get pod [mypod]`. If you type `kubectl get pod [tab][tab]` the `__kubectl_customc_func()` will run because the cobra.Command only understood "kubectl" and "get." `__kubectl_custom_func()` will see that the cobra.Command is "kubectl_get" and will thus call another helper `__kubectl_get_resource()`. `__kubectl_get_resource` will look at the 'nouns' collected. In our example the only noun will be `pod`. So it will call `__kubectl_parse_get pod`. `__kubectl_parse_get` will actually call out to kubernetes and get any pods. It will then set `COMPREPLY` to valid pods! + +Similarly, for flags: + +```go + annotation := make(map[string][]string) + annotation[cobra.BashCompCustom] = []string{"__kubectl_get_namespaces"} + + flag := &pflag.Flag{ + Name: "namespace", + Usage: usage, + Annotations: annotation, + } + cmd.Flags().AddFlag(flag) +``` + +In addition add the `__kubectl_get_namespaces` implementation in the `BashCompletionFunction` +value, e.g.: + +```bash +__kubectl_get_namespaces() +{ + local template + template="{{ range .items }}{{ .metadata.name }} {{ end }}" + local kubectl_out + if kubectl_out=$(kubectl get -o template --template="${template}" namespace 2>/dev/null); then + COMPREPLY=( $( compgen -W "${kubectl_out}[*]" -- "$cur" ) ) + fi +} +``` diff --git a/vendor/github.com/spf13/cobra/bash_completionsV2.go b/vendor/github.com/spf13/cobra/bash_completionsV2.go new file mode 100644 index 000000000..8859b57c4 --- /dev/null +++ b/vendor/github.com/spf13/cobra/bash_completionsV2.go @@ -0,0 +1,302 @@ +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" +) + +func (c *Command) genBashCompletion(w io.Writer, includeDesc bool) error { + buf := new(bytes.Buffer) + genBashComp(buf, c.Name(), includeDesc) + _, err := buf.WriteTo(w) + return err +} + +func genBashComp(buf io.StringWriter, name string, includeDesc bool) { + compCmd := ShellCompRequestCmd + if !includeDesc { + compCmd = ShellCompNoDescRequestCmd + } + + WriteStringAndCheck(buf, fmt.Sprintf(`# bash completion V2 for %-36[1]s -*- shell-script -*- + +__%[1]s_debug() +{ + if [[ -n ${BASH_COMP_DEBUG_FILE:-} ]]; then + echo "$*" >> "${BASH_COMP_DEBUG_FILE}" + fi +} + +# Macs have bash3 for which the bash-completion package doesn't include +# _init_completion. This is a minimal version of that function. +__%[1]s_init_completion() +{ + COMPREPLY=() + _get_comp_words_by_ref "$@" cur prev words cword +} + +# This function calls the %[1]s program to obtain the completion +# results and the directive. It fills the 'out' and 'directive' vars. +__%[1]s_get_completion_results() { + local requestComp lastParam lastChar args + + # Prepare the command to request completions for the program. + # Calling ${words[0]} instead of directly %[1]s allows to handle aliases + args=("${words[@]:1}") + requestComp="${words[0]} %[2]s ${args[*]}" + + lastParam=${words[$((${#words[@]}-1))]} + lastChar=${lastParam:$((${#lastParam}-1)):1} + __%[1]s_debug "lastParam ${lastParam}, lastChar ${lastChar}" + + if [ -z "${cur}" ] && [ "${lastChar}" != "=" ]; then + # If the last parameter is complete (there is a space following it) + # We add an extra empty parameter so we can indicate this to the go method. + __%[1]s_debug "Adding extra empty parameter" + requestComp="${requestComp} ''" + fi + + # When completing a flag with an = (e.g., %[1]s -n=) + # bash focuses on the part after the =, so we need to remove + # the flag part from $cur + if [[ "${cur}" == -*=* ]]; then + cur="${cur#*=}" + fi + + __%[1]s_debug "Calling ${requestComp}" + # Use eval to handle any environment variables and such + out=$(eval "${requestComp}" 2>/dev/null) + + # Extract the directive integer at the very end of the output following a colon (:) + directive=${out##*:} + # Remove the directive + out=${out%%:*} + if [ "${directive}" = "${out}" ]; then + # There is not directive specified + directive=0 + fi + __%[1]s_debug "The completion directive is: ${directive}" + __%[1]s_debug "The completions are: ${out[*]}" +} + +__%[1]s_process_completion_results() { + local shellCompDirectiveError=%[3]d + local shellCompDirectiveNoSpace=%[4]d + local shellCompDirectiveNoFileComp=%[5]d + local shellCompDirectiveFilterFileExt=%[6]d + local shellCompDirectiveFilterDirs=%[7]d + + if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then + # Error code. No completion. + __%[1]s_debug "Received error from custom completion go code" + return + else + if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then + if [[ $(type -t compopt) = "builtin" ]]; then + __%[1]s_debug "Activating no space" + compopt -o nospace + else + __%[1]s_debug "No space directive not supported in this version of bash" + fi + fi + if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then + if [[ $(type -t compopt) = "builtin" ]]; then + __%[1]s_debug "Activating no file completion" + compopt +o default + else + __%[1]s_debug "No file completion directive not supported in this version of bash" + fi + fi + fi + + if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then + # File extension filtering + local fullFilter filter filteringCmd + + # Do not use quotes around the $out variable or else newline + # characters will be kept. + for filter in ${out[*]}; do + fullFilter+="$filter|" + done + + filteringCmd="_filedir $fullFilter" + __%[1]s_debug "File filtering command: $filteringCmd" + $filteringCmd + elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then + # File completion for directories only + + # Use printf to strip any trailing newline + local subdir + subdir=$(printf "%%s" "${out[0]}") + if [ -n "$subdir" ]; then + __%[1]s_debug "Listing directories in $subdir" + pushd "$subdir" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 || return + else + __%[1]s_debug "Listing directories in ." + _filedir -d + fi + else + __%[1]s_handle_standard_completion_case + fi + + __%[1]s_handle_special_char "$cur" : + __%[1]s_handle_special_char "$cur" = +} + +__%[1]s_handle_standard_completion_case() { + local tab comp + tab=$(printf '\t') + + local longest=0 + # Look for the longest completion so that we can format things nicely + while IFS='' read -r comp; do + # Strip any description before checking the length + comp=${comp%%%%$tab*} + # Only consider the completions that match + comp=$(compgen -W "$comp" -- "$cur") + if ((${#comp}>longest)); then + longest=${#comp} + fi + done < <(printf "%%s\n" "${out[@]}") + + local completions=() + while IFS='' read -r comp; do + if [ -z "$comp" ]; then + continue + fi + + __%[1]s_debug "Original comp: $comp" + comp="$(__%[1]s_format_comp_descriptions "$comp" "$longest")" + __%[1]s_debug "Final comp: $comp" + completions+=("$comp") + done < <(printf "%%s\n" "${out[@]}") + + while IFS='' read -r comp; do + COMPREPLY+=("$comp") + done < <(compgen -W "${completions[*]}" -- "$cur") + + # If there is a single completion left, remove the description text + if [ ${#COMPREPLY[*]} -eq 1 ]; then + __%[1]s_debug "COMPREPLY[0]: ${COMPREPLY[0]}" + comp="${COMPREPLY[0]%%%% *}" + __%[1]s_debug "Removed description from single completion, which is now: ${comp}" + COMPREPLY=() + COMPREPLY+=("$comp") + fi +} + +__%[1]s_handle_special_char() +{ + local comp="$1" + local char=$2 + if [[ "$comp" == *${char}* && "$COMP_WORDBREAKS" == *${char}* ]]; then + local word=${comp%%"${comp##*${char}}"} + local idx=${#COMPREPLY[*]} + while [[ $((--idx)) -ge 0 ]]; do + COMPREPLY[$idx]=${COMPREPLY[$idx]#"$word"} + done + fi +} + +__%[1]s_format_comp_descriptions() +{ + local tab + tab=$(printf '\t') + local comp="$1" + local longest=$2 + + # Properly format the description string which follows a tab character if there is one + if [[ "$comp" == *$tab* ]]; then + desc=${comp#*$tab} + comp=${comp%%%%$tab*} + + # $COLUMNS stores the current shell width. + # Remove an extra 4 because we add 2 spaces and 2 parentheses. + maxdesclength=$(( COLUMNS - longest - 4 )) + + # Make sure we can fit a description of at least 8 characters + # if we are to align the descriptions. + if [[ $maxdesclength -gt 8 ]]; then + # Add the proper number of spaces to align the descriptions + for ((i = ${#comp} ; i < longest ; i++)); do + comp+=" " + done + else + # Don't pad the descriptions so we can fit more text after the completion + maxdesclength=$(( COLUMNS - ${#comp} - 4 )) + fi + + # If there is enough space for any description text, + # truncate the descriptions that are too long for the shell width + if [ $maxdesclength -gt 0 ]; then + if [ ${#desc} -gt $maxdesclength ]; then + desc=${desc:0:$(( maxdesclength - 1 ))} + desc+="…" + fi + comp+=" ($desc)" + fi + fi + + # Must use printf to escape all special characters + printf "%%q" "${comp}" +} + +__start_%[1]s() +{ + local cur prev words cword split + + COMPREPLY=() + + # Call _init_completion from the bash-completion package + # to prepare the arguments properly + if declare -F _init_completion >/dev/null 2>&1; then + _init_completion -n "=:" || return + else + __%[1]s_init_completion -n "=:" || return + fi + + __%[1]s_debug + __%[1]s_debug "========= starting completion logic ==========" + __%[1]s_debug "cur is ${cur}, words[*] is ${words[*]}, #words[@] is ${#words[@]}, cword is $cword" + + # The user could have moved the cursor backwards on the command-line. + # We need to trigger completion from the $cword location, so we need + # to truncate the command-line ($words) up to the $cword location. + words=("${words[@]:0:$cword+1}") + __%[1]s_debug "Truncated words[*]: ${words[*]}," + + local out directive + __%[1]s_get_completion_results + __%[1]s_process_completion_results +} + +if [[ $(type -t compopt) = "builtin" ]]; then + complete -o default -F __start_%[1]s %[1]s +else + complete -o default -o nospace -F __start_%[1]s %[1]s +fi + +# ex: ts=4 sw=4 et filetype=sh +`, name, compCmd, + ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs)) +} + +// GenBashCompletionFileV2 generates Bash completion version 2. +func (c *Command) GenBashCompletionFileV2(filename string, includeDesc bool) error { + outFile, err := os.Create(filename) + if err != nil { + return err + } + defer outFile.Close() + + return c.GenBashCompletionV2(outFile, includeDesc) +} + +// GenBashCompletionV2 generates Bash completion file version 2 +// and writes it to the passed writer. +func (c *Command) GenBashCompletionV2(w io.Writer, includeDesc bool) error { + return c.genBashCompletion(w, includeDesc) +} diff --git a/vendor/github.com/spf13/cobra/cobra.go b/vendor/github.com/spf13/cobra/cobra.go new file mode 100644 index 000000000..d6cbfd719 --- /dev/null +++ b/vendor/github.com/spf13/cobra/cobra.go @@ -0,0 +1,222 @@ +// Copyright © 2013 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Commands similar to git, go tools and other modern CLI tools +// inspired by go, go-Commander, gh and subcommand + +package cobra + +import ( + "fmt" + "io" + "os" + "reflect" + "strconv" + "strings" + "text/template" + "time" + "unicode" +) + +var templateFuncs = template.FuncMap{ + "trim": strings.TrimSpace, + "trimRightSpace": trimRightSpace, + "trimTrailingWhitespaces": trimRightSpace, + "appendIfNotPresent": appendIfNotPresent, + "rpad": rpad, + "gt": Gt, + "eq": Eq, +} + +var initializers []func() + +// EnablePrefixMatching allows to set automatic prefix matching. Automatic prefix matching can be a dangerous thing +// to automatically enable in CLI tools. +// Set this to true to enable it. +var EnablePrefixMatching = false + +// EnableCommandSorting controls sorting of the slice of commands, which is turned on by default. +// To disable sorting, set it to false. +var EnableCommandSorting = true + +// MousetrapHelpText enables an information splash screen on Windows +// if the CLI is started from explorer.exe. +// To disable the mousetrap, just set this variable to blank string (""). +// Works only on Microsoft Windows. +var MousetrapHelpText = `This is a command line tool. + +You need to open cmd.exe and run it from there. +` + +// MousetrapDisplayDuration controls how long the MousetrapHelpText message is displayed on Windows +// if the CLI is started from explorer.exe. Set to 0 to wait for the return key to be pressed. +// To disable the mousetrap, just set MousetrapHelpText to blank string (""). +// Works only on Microsoft Windows. +var MousetrapDisplayDuration = 5 * time.Second + +// AddTemplateFunc adds a template function that's available to Usage and Help +// template generation. +func AddTemplateFunc(name string, tmplFunc interface{}) { + templateFuncs[name] = tmplFunc +} + +// AddTemplateFuncs adds multiple template functions that are available to Usage and +// Help template generation. +func AddTemplateFuncs(tmplFuncs template.FuncMap) { + for k, v := range tmplFuncs { + templateFuncs[k] = v + } +} + +// OnInitialize sets the passed functions to be run when each command's +// Execute method is called. +func OnInitialize(y ...func()) { + initializers = append(initializers, y...) +} + +// FIXME Gt is unused by cobra and should be removed in a version 2. It exists only for compatibility with users of cobra. + +// Gt takes two types and checks whether the first type is greater than the second. In case of types Arrays, Chans, +// Maps and Slices, Gt will compare their lengths. Ints are compared directly while strings are first parsed as +// ints and then compared. +func Gt(a interface{}, b interface{}) bool { + var left, right int64 + av := reflect.ValueOf(a) + + switch av.Kind() { + case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice: + left = int64(av.Len()) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + left = av.Int() + case reflect.String: + left, _ = strconv.ParseInt(av.String(), 10, 64) + } + + bv := reflect.ValueOf(b) + + switch bv.Kind() { + case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice: + right = int64(bv.Len()) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + right = bv.Int() + case reflect.String: + right, _ = strconv.ParseInt(bv.String(), 10, 64) + } + + return left > right +} + +// FIXME Eq is unused by cobra and should be removed in a version 2. It exists only for compatibility with users of cobra. + +// Eq takes two types and checks whether they are equal. Supported types are int and string. Unsupported types will panic. +func Eq(a interface{}, b interface{}) bool { + av := reflect.ValueOf(a) + bv := reflect.ValueOf(b) + + switch av.Kind() { + case reflect.Array, reflect.Chan, reflect.Map, reflect.Slice: + panic("Eq called on unsupported type") + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return av.Int() == bv.Int() + case reflect.String: + return av.String() == bv.String() + } + return false +} + +func trimRightSpace(s string) string { + return strings.TrimRightFunc(s, unicode.IsSpace) +} + +// FIXME appendIfNotPresent is unused by cobra and should be removed in a version 2. It exists only for compatibility with users of cobra. + +// appendIfNotPresent will append stringToAppend to the end of s, but only if it's not yet present in s. +func appendIfNotPresent(s, stringToAppend string) string { + if strings.Contains(s, stringToAppend) { + return s + } + return s + " " + stringToAppend +} + +// rpad adds padding to the right of a string. +func rpad(s string, padding int) string { + template := fmt.Sprintf("%%-%ds", padding) + return fmt.Sprintf(template, s) +} + +// tmpl executes the given template text on data, writing the result to w. +func tmpl(w io.Writer, text string, data interface{}) error { + t := template.New("top") + t.Funcs(templateFuncs) + template.Must(t.Parse(text)) + return t.Execute(w, data) +} + +// ld compares two strings and returns the levenshtein distance between them. +func ld(s, t string, ignoreCase bool) int { + if ignoreCase { + s = strings.ToLower(s) + t = strings.ToLower(t) + } + d := make([][]int, len(s)+1) + for i := range d { + d[i] = make([]int, len(t)+1) + } + for i := range d { + d[i][0] = i + } + for j := range d[0] { + d[0][j] = j + } + for j := 1; j <= len(t); j++ { + for i := 1; i <= len(s); i++ { + if s[i-1] == t[j-1] { + d[i][j] = d[i-1][j-1] + } else { + min := d[i-1][j] + if d[i][j-1] < min { + min = d[i][j-1] + } + if d[i-1][j-1] < min { + min = d[i-1][j-1] + } + d[i][j] = min + 1 + } + } + + } + return d[len(s)][len(t)] +} + +func stringInSlice(a string, list []string) bool { + for _, b := range list { + if b == a { + return true + } + } + return false +} + +// CheckErr prints the msg with the prefix 'Error:' and exits with error code 1. If the msg is nil, it does nothing. +func CheckErr(msg interface{}) { + if msg != nil { + fmt.Fprintln(os.Stderr, "Error:", msg) + os.Exit(1) + } +} + +// WriteStringAndCheck writes a string into a buffer, and checks if the error is not nil. +func WriteStringAndCheck(b io.StringWriter, s string) { + _, err := b.WriteString(s) + CheckErr(err) +} diff --git a/vendor/github.com/spf13/cobra/command.go b/vendor/github.com/spf13/cobra/command.go new file mode 100644 index 000000000..2cc18891d --- /dev/null +++ b/vendor/github.com/spf13/cobra/command.go @@ -0,0 +1,1680 @@ +// Copyright © 2013 Steve Francia . +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package cobra is a commander providing a simple interface to create powerful modern CLI interfaces. +// In addition to providing an interface, Cobra simultaneously provides a controller to organize your application code. +package cobra + +import ( + "bytes" + "context" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + + flag "github.com/spf13/pflag" +) + +// FParseErrWhitelist configures Flag parse errors to be ignored +type FParseErrWhitelist flag.ParseErrorsWhitelist + +// Command is just that, a command for your application. +// E.g. 'go run ...' - 'run' is the command. Cobra requires +// you to define the usage and description as part of your command +// definition to ensure usability. +type Command struct { + // Use is the one-line usage message. + // Recommended syntax is as follow: + // [ ] identifies an optional argument. Arguments that are not enclosed in brackets are required. + // ... indicates that you can specify multiple values for the previous argument. + // | indicates mutually exclusive information. You can use the argument to the left of the separator or the + // argument to the right of the separator. You cannot use both arguments in a single use of the command. + // { } delimits a set of mutually exclusive arguments when one of the arguments is required. If the arguments are + // optional, they are enclosed in brackets ([ ]). + // Example: add [-F file | -D dir]... [-f format] profile + Use string + + // Aliases is an array of aliases that can be used instead of the first word in Use. + Aliases []string + + // SuggestFor is an array of command names for which this command will be suggested - + // similar to aliases but only suggests. + SuggestFor []string + + // Short is the short description shown in the 'help' output. + Short string + + // Long is the long message shown in the 'help ' output. + Long string + + // Example is examples of how to use the command. + Example string + + // ValidArgs is list of all valid non-flag arguments that are accepted in shell completions + ValidArgs []string + // ValidArgsFunction is an optional function that provides valid non-flag arguments for shell completion. + // It is a dynamic version of using ValidArgs. + // Only one of ValidArgs and ValidArgsFunction can be used for a command. + ValidArgsFunction func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) + + // Expected arguments + Args PositionalArgs + + // ArgAliases is List of aliases for ValidArgs. + // These are not suggested to the user in the shell completion, + // but accepted if entered manually. + ArgAliases []string + + // BashCompletionFunction is custom bash functions used by the legacy bash autocompletion generator. + // For portability with other shells, it is recommended to instead use ValidArgsFunction + BashCompletionFunction string + + // Deprecated defines, if this command is deprecated and should print this string when used. + Deprecated string + + // Annotations are key/value pairs that can be used by applications to identify or + // group commands. + Annotations map[string]string + + // Version defines the version for this command. If this value is non-empty and the command does not + // define a "version" flag, a "version" boolean flag will be added to the command and, if specified, + // will print content of the "Version" variable. A shorthand "v" flag will also be added if the + // command does not define one. + Version string + + // The *Run functions are executed in the following order: + // * PersistentPreRun() + // * PreRun() + // * Run() + // * PostRun() + // * PersistentPostRun() + // All functions get the same args, the arguments after the command name. + // + // PersistentPreRun: children of this command will inherit and execute. + PersistentPreRun func(cmd *Command, args []string) + // PersistentPreRunE: PersistentPreRun but returns an error. + PersistentPreRunE func(cmd *Command, args []string) error + // PreRun: children of this command will not inherit. + PreRun func(cmd *Command, args []string) + // PreRunE: PreRun but returns an error. + PreRunE func(cmd *Command, args []string) error + // Run: Typically the actual work function. Most commands will only implement this. + Run func(cmd *Command, args []string) + // RunE: Run but returns an error. + RunE func(cmd *Command, args []string) error + // PostRun: run after the Run command. + PostRun func(cmd *Command, args []string) + // PostRunE: PostRun but returns an error. + PostRunE func(cmd *Command, args []string) error + // PersistentPostRun: children of this command will inherit and execute after PostRun. + PersistentPostRun func(cmd *Command, args []string) + // PersistentPostRunE: PersistentPostRun but returns an error. + PersistentPostRunE func(cmd *Command, args []string) error + + // args is actual args parsed from flags. + args []string + // flagErrorBuf contains all error messages from pflag. + flagErrorBuf *bytes.Buffer + // flags is full set of flags. + flags *flag.FlagSet + // pflags contains persistent flags. + pflags *flag.FlagSet + // lflags contains local flags. + lflags *flag.FlagSet + // iflags contains inherited flags. + iflags *flag.FlagSet + // parentsPflags is all persistent flags of cmd's parents. + parentsPflags *flag.FlagSet + // globNormFunc is the global normalization function + // that we can use on every pflag set and children commands + globNormFunc func(f *flag.FlagSet, name string) flag.NormalizedName + + // usageFunc is usage func defined by user. + usageFunc func(*Command) error + // usageTemplate is usage template defined by user. + usageTemplate string + // flagErrorFunc is func defined by user and it's called when the parsing of + // flags returns an error. + flagErrorFunc func(*Command, error) error + // helpTemplate is help template defined by user. + helpTemplate string + // helpFunc is help func defined by user. + helpFunc func(*Command, []string) + // helpCommand is command with usage 'help'. If it's not defined by user, + // cobra uses default help command. + helpCommand *Command + // versionTemplate is the version template defined by user. + versionTemplate string + + // inReader is a reader defined by the user that replaces stdin + inReader io.Reader + // outWriter is a writer defined by the user that replaces stdout + outWriter io.Writer + // errWriter is a writer defined by the user that replaces stderr + errWriter io.Writer + + //FParseErrWhitelist flag parse errors to be ignored + FParseErrWhitelist FParseErrWhitelist + + // CompletionOptions is a set of options to control the handling of shell completion + CompletionOptions CompletionOptions + + // commandsAreSorted defines, if command slice are sorted or not. + commandsAreSorted bool + // commandCalledAs is the name or alias value used to call this command. + commandCalledAs struct { + name string + called bool + } + + ctx context.Context + + // commands is the list of commands supported by this program. + commands []*Command + // parent is a parent command for this command. + parent *Command + // Max lengths of commands' string lengths for use in padding. + commandsMaxUseLen int + commandsMaxCommandPathLen int + commandsMaxNameLen int + + // TraverseChildren parses flags on all parents before executing child command. + TraverseChildren bool + + // Hidden defines, if this command is hidden and should NOT show up in the list of available commands. + Hidden bool + + // SilenceErrors is an option to quiet errors down stream. + SilenceErrors bool + + // SilenceUsage is an option to silence usage when an error occurs. + SilenceUsage bool + + // DisableFlagParsing disables the flag parsing. + // If this is true all flags will be passed to the command as arguments. + DisableFlagParsing bool + + // DisableAutoGenTag defines, if gen tag ("Auto generated by spf13/cobra...") + // will be printed by generating docs for this command. + DisableAutoGenTag bool + + // DisableFlagsInUseLine will disable the addition of [flags] to the usage + // line of a command when printing help or generating docs + DisableFlagsInUseLine bool + + // DisableSuggestions disables the suggestions based on Levenshtein distance + // that go along with 'unknown command' messages. + DisableSuggestions bool + + // SuggestionsMinimumDistance defines minimum levenshtein distance to display suggestions. + // Must be > 0. + SuggestionsMinimumDistance int +} + +// Context returns underlying command context. If command wasn't +// executed with ExecuteContext Context returns Background context. +func (c *Command) Context() context.Context { + return c.ctx +} + +// SetArgs sets arguments for the command. It is set to os.Args[1:] by default, if desired, can be overridden +// particularly useful when testing. +func (c *Command) SetArgs(a []string) { + c.args = a +} + +// SetOutput sets the destination for usage and error messages. +// If output is nil, os.Stderr is used. +// Deprecated: Use SetOut and/or SetErr instead +func (c *Command) SetOutput(output io.Writer) { + c.outWriter = output + c.errWriter = output +} + +// SetOut sets the destination for usage messages. +// If newOut is nil, os.Stdout is used. +func (c *Command) SetOut(newOut io.Writer) { + c.outWriter = newOut +} + +// SetErr sets the destination for error messages. +// If newErr is nil, os.Stderr is used. +func (c *Command) SetErr(newErr io.Writer) { + c.errWriter = newErr +} + +// SetIn sets the source for input data +// If newIn is nil, os.Stdin is used. +func (c *Command) SetIn(newIn io.Reader) { + c.inReader = newIn +} + +// SetUsageFunc sets usage function. Usage can be defined by application. +func (c *Command) SetUsageFunc(f func(*Command) error) { + c.usageFunc = f +} + +// SetUsageTemplate sets usage template. Can be defined by Application. +func (c *Command) SetUsageTemplate(s string) { + c.usageTemplate = s +} + +// SetFlagErrorFunc sets a function to generate an error when flag parsing +// fails. +func (c *Command) SetFlagErrorFunc(f func(*Command, error) error) { + c.flagErrorFunc = f +} + +// SetHelpFunc sets help function. Can be defined by Application. +func (c *Command) SetHelpFunc(f func(*Command, []string)) { + c.helpFunc = f +} + +// SetHelpCommand sets help command. +func (c *Command) SetHelpCommand(cmd *Command) { + c.helpCommand = cmd +} + +// SetHelpTemplate sets help template to be used. Application can use it to set custom template. +func (c *Command) SetHelpTemplate(s string) { + c.helpTemplate = s +} + +// SetVersionTemplate sets version template to be used. Application can use it to set custom template. +func (c *Command) SetVersionTemplate(s string) { + c.versionTemplate = s +} + +// SetGlobalNormalizationFunc sets a normalization function to all flag sets and also to child commands. +// The user should not have a cyclic dependency on commands. +func (c *Command) SetGlobalNormalizationFunc(n func(f *flag.FlagSet, name string) flag.NormalizedName) { + c.Flags().SetNormalizeFunc(n) + c.PersistentFlags().SetNormalizeFunc(n) + c.globNormFunc = n + + for _, command := range c.commands { + command.SetGlobalNormalizationFunc(n) + } +} + +// OutOrStdout returns output to stdout. +func (c *Command) OutOrStdout() io.Writer { + return c.getOut(os.Stdout) +} + +// OutOrStderr returns output to stderr +func (c *Command) OutOrStderr() io.Writer { + return c.getOut(os.Stderr) +} + +// ErrOrStderr returns output to stderr +func (c *Command) ErrOrStderr() io.Writer { + return c.getErr(os.Stderr) +} + +// InOrStdin returns input to stdin +func (c *Command) InOrStdin() io.Reader { + return c.getIn(os.Stdin) +} + +func (c *Command) getOut(def io.Writer) io.Writer { + if c.outWriter != nil { + return c.outWriter + } + if c.HasParent() { + return c.parent.getOut(def) + } + return def +} + +func (c *Command) getErr(def io.Writer) io.Writer { + if c.errWriter != nil { + return c.errWriter + } + if c.HasParent() { + return c.parent.getErr(def) + } + return def +} + +func (c *Command) getIn(def io.Reader) io.Reader { + if c.inReader != nil { + return c.inReader + } + if c.HasParent() { + return c.parent.getIn(def) + } + return def +} + +// UsageFunc returns either the function set by SetUsageFunc for this command +// or a parent, or it returns a default usage function. +func (c *Command) UsageFunc() (f func(*Command) error) { + if c.usageFunc != nil { + return c.usageFunc + } + if c.HasParent() { + return c.Parent().UsageFunc() + } + return func(c *Command) error { + c.mergePersistentFlags() + err := tmpl(c.OutOrStderr(), c.UsageTemplate(), c) + if err != nil { + c.PrintErrln(err) + } + return err + } +} + +// Usage puts out the usage for the command. +// Used when a user provides invalid input. +// Can be defined by user by overriding UsageFunc. +func (c *Command) Usage() error { + return c.UsageFunc()(c) +} + +// HelpFunc returns either the function set by SetHelpFunc for this command +// or a parent, or it returns a function with default help behavior. +func (c *Command) HelpFunc() func(*Command, []string) { + if c.helpFunc != nil { + return c.helpFunc + } + if c.HasParent() { + return c.Parent().HelpFunc() + } + return func(c *Command, a []string) { + c.mergePersistentFlags() + // The help should be sent to stdout + // See https://github.com/spf13/cobra/issues/1002 + err := tmpl(c.OutOrStdout(), c.HelpTemplate(), c) + if err != nil { + c.PrintErrln(err) + } + } +} + +// Help puts out the help for the command. +// Used when a user calls help [command]. +// Can be defined by user by overriding HelpFunc. +func (c *Command) Help() error { + c.HelpFunc()(c, []string{}) + return nil +} + +// UsageString returns usage string. +func (c *Command) UsageString() string { + // Storing normal writers + tmpOutput := c.outWriter + tmpErr := c.errWriter + + bb := new(bytes.Buffer) + c.outWriter = bb + c.errWriter = bb + + CheckErr(c.Usage()) + + // Setting things back to normal + c.outWriter = tmpOutput + c.errWriter = tmpErr + + return bb.String() +} + +// FlagErrorFunc returns either the function set by SetFlagErrorFunc for this +// command or a parent, or it returns a function which returns the original +// error. +func (c *Command) FlagErrorFunc() (f func(*Command, error) error) { + if c.flagErrorFunc != nil { + return c.flagErrorFunc + } + + if c.HasParent() { + return c.parent.FlagErrorFunc() + } + return func(c *Command, err error) error { + return err + } +} + +var minUsagePadding = 25 + +// UsagePadding return padding for the usage. +func (c *Command) UsagePadding() int { + if c.parent == nil || minUsagePadding > c.parent.commandsMaxUseLen { + return minUsagePadding + } + return c.parent.commandsMaxUseLen +} + +var minCommandPathPadding = 11 + +// CommandPathPadding return padding for the command path. +func (c *Command) CommandPathPadding() int { + if c.parent == nil || minCommandPathPadding > c.parent.commandsMaxCommandPathLen { + return minCommandPathPadding + } + return c.parent.commandsMaxCommandPathLen +} + +var minNamePadding = 11 + +// NamePadding returns padding for the name. +func (c *Command) NamePadding() int { + if c.parent == nil || minNamePadding > c.parent.commandsMaxNameLen { + return minNamePadding + } + return c.parent.commandsMaxNameLen +} + +// UsageTemplate returns usage template for the command. +func (c *Command) UsageTemplate() string { + if c.usageTemplate != "" { + return c.usageTemplate + } + + if c.HasParent() { + return c.parent.UsageTemplate() + } + return `Usage:{{if .Runnable}} + {{.UseLine}}{{end}}{{if .HasAvailableSubCommands}} + {{.CommandPath}} [command]{{end}}{{if gt (len .Aliases) 0}} + +Aliases: + {{.NameAndAliases}}{{end}}{{if .HasExample}} + +Examples: +{{.Example}}{{end}}{{if .HasAvailableSubCommands}} + +Available Commands:{{range .Commands}}{{if (or .IsAvailableCommand (eq .Name "help"))}} + {{rpad .Name .NamePadding }} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableLocalFlags}} + +Flags: +{{.LocalFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasAvailableInheritedFlags}} + +Global Flags: +{{.InheritedFlags.FlagUsages | trimTrailingWhitespaces}}{{end}}{{if .HasHelpSubCommands}} + +Additional help topics:{{range .Commands}}{{if .IsAdditionalHelpTopicCommand}} + {{rpad .CommandPath .CommandPathPadding}} {{.Short}}{{end}}{{end}}{{end}}{{if .HasAvailableSubCommands}} + +Use "{{.CommandPath}} [command] --help" for more information about a command.{{end}} +` +} + +// HelpTemplate return help template for the command. +func (c *Command) HelpTemplate() string { + if c.helpTemplate != "" { + return c.helpTemplate + } + + if c.HasParent() { + return c.parent.HelpTemplate() + } + return `{{with (or .Long .Short)}}{{. | trimTrailingWhitespaces}} + +{{end}}{{if or .Runnable .HasSubCommands}}{{.UsageString}}{{end}}` +} + +// VersionTemplate return version template for the command. +func (c *Command) VersionTemplate() string { + if c.versionTemplate != "" { + return c.versionTemplate + } + + if c.HasParent() { + return c.parent.VersionTemplate() + } + return `{{with .Name}}{{printf "%s " .}}{{end}}{{printf "version %s" .Version}} +` +} + +func hasNoOptDefVal(name string, fs *flag.FlagSet) bool { + flag := fs.Lookup(name) + if flag == nil { + return false + } + return flag.NoOptDefVal != "" +} + +func shortHasNoOptDefVal(name string, fs *flag.FlagSet) bool { + if len(name) == 0 { + return false + } + + flag := fs.ShorthandLookup(name[:1]) + if flag == nil { + return false + } + return flag.NoOptDefVal != "" +} + +func stripFlags(args []string, c *Command) []string { + if len(args) == 0 { + return args + } + c.mergePersistentFlags() + + commands := []string{} + flags := c.Flags() + +Loop: + for len(args) > 0 { + s := args[0] + args = args[1:] + switch { + case s == "--": + // "--" terminates the flags + break Loop + case strings.HasPrefix(s, "--") && !strings.Contains(s, "=") && !hasNoOptDefVal(s[2:], flags): + // If '--flag arg' then + // delete arg from args. + fallthrough // (do the same as below) + case strings.HasPrefix(s, "-") && !strings.Contains(s, "=") && len(s) == 2 && !shortHasNoOptDefVal(s[1:], flags): + // If '-f arg' then + // delete 'arg' from args or break the loop if len(args) <= 1. + if len(args) <= 1 { + break Loop + } else { + args = args[1:] + continue + } + case s != "" && !strings.HasPrefix(s, "-"): + commands = append(commands, s) + } + } + + return commands +} + +// argsMinusFirstX removes only the first x from args. Otherwise, commands that look like +// openshift admin policy add-role-to-user admin my-user, lose the admin argument (arg[4]). +func argsMinusFirstX(args []string, x string) []string { + for i, y := range args { + if x == y { + ret := []string{} + ret = append(ret, args[:i]...) + ret = append(ret, args[i+1:]...) + return ret + } + } + return args +} + +func isFlagArg(arg string) bool { + return ((len(arg) >= 3 && arg[1] == '-') || + (len(arg) >= 2 && arg[0] == '-' && arg[1] != '-')) +} + +// Find the target command given the args and command tree +// Meant to be run on the highest node. Only searches down. +func (c *Command) Find(args []string) (*Command, []string, error) { + var innerfind func(*Command, []string) (*Command, []string) + + innerfind = func(c *Command, innerArgs []string) (*Command, []string) { + argsWOflags := stripFlags(innerArgs, c) + if len(argsWOflags) == 0 { + return c, innerArgs + } + nextSubCmd := argsWOflags[0] + + cmd := c.findNext(nextSubCmd) + if cmd != nil { + return innerfind(cmd, argsMinusFirstX(innerArgs, nextSubCmd)) + } + return c, innerArgs + } + + commandFound, a := innerfind(c, args) + if commandFound.Args == nil { + return commandFound, a, legacyArgs(commandFound, stripFlags(a, commandFound)) + } + return commandFound, a, nil +} + +func (c *Command) findSuggestions(arg string) string { + if c.DisableSuggestions { + return "" + } + if c.SuggestionsMinimumDistance <= 0 { + c.SuggestionsMinimumDistance = 2 + } + suggestionsString := "" + if suggestions := c.SuggestionsFor(arg); len(suggestions) > 0 { + suggestionsString += "\n\nDid you mean this?\n" + for _, s := range suggestions { + suggestionsString += fmt.Sprintf("\t%v\n", s) + } + } + return suggestionsString +} + +func (c *Command) findNext(next string) *Command { + matches := make([]*Command, 0) + for _, cmd := range c.commands { + if cmd.Name() == next || cmd.HasAlias(next) { + cmd.commandCalledAs.name = next + return cmd + } + if EnablePrefixMatching && cmd.hasNameOrAliasPrefix(next) { + matches = append(matches, cmd) + } + } + + if len(matches) == 1 { + return matches[0] + } + + return nil +} + +// Traverse the command tree to find the command, and parse args for +// each parent. +func (c *Command) Traverse(args []string) (*Command, []string, error) { + flags := []string{} + inFlag := false + + for i, arg := range args { + switch { + // A long flag with a space separated value + case strings.HasPrefix(arg, "--") && !strings.Contains(arg, "="): + // TODO: this isn't quite right, we should really check ahead for 'true' or 'false' + inFlag = !hasNoOptDefVal(arg[2:], c.Flags()) + flags = append(flags, arg) + continue + // A short flag with a space separated value + case strings.HasPrefix(arg, "-") && !strings.Contains(arg, "=") && len(arg) == 2 && !shortHasNoOptDefVal(arg[1:], c.Flags()): + inFlag = true + flags = append(flags, arg) + continue + // The value for a flag + case inFlag: + inFlag = false + flags = append(flags, arg) + continue + // A flag without a value, or with an `=` separated value + case isFlagArg(arg): + flags = append(flags, arg) + continue + } + + cmd := c.findNext(arg) + if cmd == nil { + return c, args, nil + } + + if err := c.ParseFlags(flags); err != nil { + return nil, args, err + } + return cmd.Traverse(args[i+1:]) + } + return c, args, nil +} + +// SuggestionsFor provides suggestions for the typedName. +func (c *Command) SuggestionsFor(typedName string) []string { + suggestions := []string{} + for _, cmd := range c.commands { + if cmd.IsAvailableCommand() { + levenshteinDistance := ld(typedName, cmd.Name(), true) + suggestByLevenshtein := levenshteinDistance <= c.SuggestionsMinimumDistance + suggestByPrefix := strings.HasPrefix(strings.ToLower(cmd.Name()), strings.ToLower(typedName)) + if suggestByLevenshtein || suggestByPrefix { + suggestions = append(suggestions, cmd.Name()) + } + for _, explicitSuggestion := range cmd.SuggestFor { + if strings.EqualFold(typedName, explicitSuggestion) { + suggestions = append(suggestions, cmd.Name()) + } + } + } + } + return suggestions +} + +// VisitParents visits all parents of the command and invokes fn on each parent. +func (c *Command) VisitParents(fn func(*Command)) { + if c.HasParent() { + fn(c.Parent()) + c.Parent().VisitParents(fn) + } +} + +// Root finds root command. +func (c *Command) Root() *Command { + if c.HasParent() { + return c.Parent().Root() + } + return c +} + +// ArgsLenAtDash will return the length of c.Flags().Args at the moment +// when a -- was found during args parsing. +func (c *Command) ArgsLenAtDash() int { + return c.Flags().ArgsLenAtDash() +} + +func (c *Command) execute(a []string) (err error) { + if c == nil { + return fmt.Errorf("Called Execute() on a nil Command") + } + + if len(c.Deprecated) > 0 { + c.Printf("Command %q is deprecated, %s\n", c.Name(), c.Deprecated) + } + + // initialize help and version flag at the last point possible to allow for user + // overriding + c.InitDefaultHelpFlag() + c.InitDefaultVersionFlag() + + err = c.ParseFlags(a) + if err != nil { + return c.FlagErrorFunc()(c, err) + } + + // If help is called, regardless of other flags, return we want help. + // Also say we need help if the command isn't runnable. + helpVal, err := c.Flags().GetBool("help") + if err != nil { + // should be impossible to get here as we always declare a help + // flag in InitDefaultHelpFlag() + c.Println("\"help\" flag declared as non-bool. Please correct your code") + return err + } + + if helpVal { + return flag.ErrHelp + } + + // for back-compat, only add version flag behavior if version is defined + if c.Version != "" { + versionVal, err := c.Flags().GetBool("version") + if err != nil { + c.Println("\"version\" flag declared as non-bool. Please correct your code") + return err + } + if versionVal { + err := tmpl(c.OutOrStdout(), c.VersionTemplate(), c) + if err != nil { + c.Println(err) + } + return err + } + } + + if !c.Runnable() { + return flag.ErrHelp + } + + c.preRun() + + argWoFlags := c.Flags().Args() + if c.DisableFlagParsing { + argWoFlags = a + } + + if err := c.ValidateArgs(argWoFlags); err != nil { + return err + } + + for p := c; p != nil; p = p.Parent() { + if p.PersistentPreRunE != nil { + if err := p.PersistentPreRunE(c, argWoFlags); err != nil { + return err + } + break + } else if p.PersistentPreRun != nil { + p.PersistentPreRun(c, argWoFlags) + break + } + } + if c.PreRunE != nil { + if err := c.PreRunE(c, argWoFlags); err != nil { + return err + } + } else if c.PreRun != nil { + c.PreRun(c, argWoFlags) + } + + if err := c.validateRequiredFlags(); err != nil { + return err + } + if c.RunE != nil { + if err := c.RunE(c, argWoFlags); err != nil { + return err + } + } else { + c.Run(c, argWoFlags) + } + if c.PostRunE != nil { + if err := c.PostRunE(c, argWoFlags); err != nil { + return err + } + } else if c.PostRun != nil { + c.PostRun(c, argWoFlags) + } + for p := c; p != nil; p = p.Parent() { + if p.PersistentPostRunE != nil { + if err := p.PersistentPostRunE(c, argWoFlags); err != nil { + return err + } + break + } else if p.PersistentPostRun != nil { + p.PersistentPostRun(c, argWoFlags) + break + } + } + + return nil +} + +func (c *Command) preRun() { + for _, x := range initializers { + x() + } +} + +// ExecuteContext is the same as Execute(), but sets the ctx on the command. +// Retrieve ctx by calling cmd.Context() inside your *Run lifecycle or ValidArgs +// functions. +func (c *Command) ExecuteContext(ctx context.Context) error { + c.ctx = ctx + return c.Execute() +} + +// Execute uses the args (os.Args[1:] by default) +// and run through the command tree finding appropriate matches +// for commands and then corresponding flags. +func (c *Command) Execute() error { + _, err := c.ExecuteC() + return err +} + +// ExecuteContextC is the same as ExecuteC(), but sets the ctx on the command. +// Retrieve ctx by calling cmd.Context() inside your *Run lifecycle or ValidArgs +// functions. +func (c *Command) ExecuteContextC(ctx context.Context) (*Command, error) { + c.ctx = ctx + return c.ExecuteC() +} + +// ExecuteC executes the command. +func (c *Command) ExecuteC() (cmd *Command, err error) { + if c.ctx == nil { + c.ctx = context.Background() + } + + // Regardless of what command execute is called on, run on Root only + if c.HasParent() { + return c.Root().ExecuteC() + } + + // windows hook + if preExecHookFn != nil { + preExecHookFn(c) + } + + // initialize help at the last point to allow for user overriding + c.InitDefaultHelpCmd() + // initialize completion at the last point to allow for user overriding + c.initDefaultCompletionCmd() + + args := c.args + + // Workaround FAIL with "go test -v" or "cobra.test -test.v", see #155 + if c.args == nil && filepath.Base(os.Args[0]) != "cobra.test" { + args = os.Args[1:] + } + + // initialize the hidden command to be used for shell completion + c.initCompleteCmd(args) + + var flags []string + if c.TraverseChildren { + cmd, flags, err = c.Traverse(args) + } else { + cmd, flags, err = c.Find(args) + } + if err != nil { + // If found parse to a subcommand and then failed, talk about the subcommand + if cmd != nil { + c = cmd + } + if !c.SilenceErrors { + c.PrintErrln("Error:", err.Error()) + c.PrintErrf("Run '%v --help' for usage.\n", c.CommandPath()) + } + return c, err + } + + cmd.commandCalledAs.called = true + if cmd.commandCalledAs.name == "" { + cmd.commandCalledAs.name = cmd.Name() + } + + // We have to pass global context to children command + // if context is present on the parent command. + if cmd.ctx == nil { + cmd.ctx = c.ctx + } + + err = cmd.execute(flags) + if err != nil { + // Always show help if requested, even if SilenceErrors is in + // effect + if err == flag.ErrHelp { + cmd.HelpFunc()(cmd, args) + return cmd, nil + } + + // If root command has SilenceErrors flagged, + // all subcommands should respect it + if !cmd.SilenceErrors && !c.SilenceErrors { + c.PrintErrln("Error:", err.Error()) + } + + // If root command has SilenceUsage flagged, + // all subcommands should respect it + if !cmd.SilenceUsage && !c.SilenceUsage { + c.Println(cmd.UsageString()) + } + } + return cmd, err +} + +func (c *Command) ValidateArgs(args []string) error { + if c.Args == nil { + return nil + } + return c.Args(c, args) +} + +func (c *Command) validateRequiredFlags() error { + if c.DisableFlagParsing { + return nil + } + + flags := c.Flags() + missingFlagNames := []string{} + flags.VisitAll(func(pflag *flag.Flag) { + requiredAnnotation, found := pflag.Annotations[BashCompOneRequiredFlag] + if !found { + return + } + if (requiredAnnotation[0] == "true") && !pflag.Changed { + missingFlagNames = append(missingFlagNames, pflag.Name) + } + }) + + if len(missingFlagNames) > 0 { + return fmt.Errorf(`required flag(s) "%s" not set`, strings.Join(missingFlagNames, `", "`)) + } + return nil +} + +// InitDefaultHelpFlag adds default help flag to c. +// It is called automatically by executing the c or by calling help and usage. +// If c already has help flag, it will do nothing. +func (c *Command) InitDefaultHelpFlag() { + c.mergePersistentFlags() + if c.Flags().Lookup("help") == nil { + usage := "help for " + if c.Name() == "" { + usage += "this command" + } else { + usage += c.Name() + } + c.Flags().BoolP("help", "h", false, usage) + } +} + +// InitDefaultVersionFlag adds default version flag to c. +// It is called automatically by executing the c. +// If c already has a version flag, it will do nothing. +// If c.Version is empty, it will do nothing. +func (c *Command) InitDefaultVersionFlag() { + if c.Version == "" { + return + } + + c.mergePersistentFlags() + if c.Flags().Lookup("version") == nil { + usage := "version for " + if c.Name() == "" { + usage += "this command" + } else { + usage += c.Name() + } + if c.Flags().ShorthandLookup("v") == nil { + c.Flags().BoolP("version", "v", false, usage) + } else { + c.Flags().Bool("version", false, usage) + } + } +} + +// InitDefaultHelpCmd adds default help command to c. +// It is called automatically by executing the c or by calling help and usage. +// If c already has help command or c has no subcommands, it will do nothing. +func (c *Command) InitDefaultHelpCmd() { + if !c.HasSubCommands() { + return + } + + if c.helpCommand == nil { + c.helpCommand = &Command{ + Use: "help [command]", + Short: "Help about any command", + Long: `Help provides help for any command in the application. +Simply type ` + c.Name() + ` help [path to command] for full details.`, + ValidArgsFunction: func(c *Command, args []string, toComplete string) ([]string, ShellCompDirective) { + var completions []string + cmd, _, e := c.Root().Find(args) + if e != nil { + return nil, ShellCompDirectiveNoFileComp + } + if cmd == nil { + // Root help command. + cmd = c.Root() + } + for _, subCmd := range cmd.Commands() { + if subCmd.IsAvailableCommand() || subCmd == cmd.helpCommand { + if strings.HasPrefix(subCmd.Name(), toComplete) { + completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short)) + } + } + } + return completions, ShellCompDirectiveNoFileComp + }, + Run: func(c *Command, args []string) { + cmd, _, e := c.Root().Find(args) + if cmd == nil || e != nil { + c.Printf("Unknown help topic %#q\n", args) + CheckErr(c.Root().Usage()) + } else { + cmd.InitDefaultHelpFlag() // make possible 'help' flag to be shown + CheckErr(cmd.Help()) + } + }, + } + } + c.RemoveCommand(c.helpCommand) + c.AddCommand(c.helpCommand) +} + +// ResetCommands delete parent, subcommand and help command from c. +func (c *Command) ResetCommands() { + c.parent = nil + c.commands = nil + c.helpCommand = nil + c.parentsPflags = nil +} + +// Sorts commands by their names. +type commandSorterByName []*Command + +func (c commandSorterByName) Len() int { return len(c) } +func (c commandSorterByName) Swap(i, j int) { c[i], c[j] = c[j], c[i] } +func (c commandSorterByName) Less(i, j int) bool { return c[i].Name() < c[j].Name() } + +// Commands returns a sorted slice of child commands. +func (c *Command) Commands() []*Command { + // do not sort commands if it already sorted or sorting was disabled + if EnableCommandSorting && !c.commandsAreSorted { + sort.Sort(commandSorterByName(c.commands)) + c.commandsAreSorted = true + } + return c.commands +} + +// AddCommand adds one or more commands to this parent command. +func (c *Command) AddCommand(cmds ...*Command) { + for i, x := range cmds { + if cmds[i] == c { + panic("Command can't be a child of itself") + } + cmds[i].parent = c + // update max lengths + usageLen := len(x.Use) + if usageLen > c.commandsMaxUseLen { + c.commandsMaxUseLen = usageLen + } + commandPathLen := len(x.CommandPath()) + if commandPathLen > c.commandsMaxCommandPathLen { + c.commandsMaxCommandPathLen = commandPathLen + } + nameLen := len(x.Name()) + if nameLen > c.commandsMaxNameLen { + c.commandsMaxNameLen = nameLen + } + // If global normalization function exists, update all children + if c.globNormFunc != nil { + x.SetGlobalNormalizationFunc(c.globNormFunc) + } + c.commands = append(c.commands, x) + c.commandsAreSorted = false + } +} + +// RemoveCommand removes one or more commands from a parent command. +func (c *Command) RemoveCommand(cmds ...*Command) { + commands := []*Command{} +main: + for _, command := range c.commands { + for _, cmd := range cmds { + if command == cmd { + command.parent = nil + continue main + } + } + commands = append(commands, command) + } + c.commands = commands + // recompute all lengths + c.commandsMaxUseLen = 0 + c.commandsMaxCommandPathLen = 0 + c.commandsMaxNameLen = 0 + for _, command := range c.commands { + usageLen := len(command.Use) + if usageLen > c.commandsMaxUseLen { + c.commandsMaxUseLen = usageLen + } + commandPathLen := len(command.CommandPath()) + if commandPathLen > c.commandsMaxCommandPathLen { + c.commandsMaxCommandPathLen = commandPathLen + } + nameLen := len(command.Name()) + if nameLen > c.commandsMaxNameLen { + c.commandsMaxNameLen = nameLen + } + } +} + +// Print is a convenience method to Print to the defined output, fallback to Stderr if not set. +func (c *Command) Print(i ...interface{}) { + fmt.Fprint(c.OutOrStderr(), i...) +} + +// Println is a convenience method to Println to the defined output, fallback to Stderr if not set. +func (c *Command) Println(i ...interface{}) { + c.Print(fmt.Sprintln(i...)) +} + +// Printf is a convenience method to Printf to the defined output, fallback to Stderr if not set. +func (c *Command) Printf(format string, i ...interface{}) { + c.Print(fmt.Sprintf(format, i...)) +} + +// PrintErr is a convenience method to Print to the defined Err output, fallback to Stderr if not set. +func (c *Command) PrintErr(i ...interface{}) { + fmt.Fprint(c.ErrOrStderr(), i...) +} + +// PrintErrln is a convenience method to Println to the defined Err output, fallback to Stderr if not set. +func (c *Command) PrintErrln(i ...interface{}) { + c.PrintErr(fmt.Sprintln(i...)) +} + +// PrintErrf is a convenience method to Printf to the defined Err output, fallback to Stderr if not set. +func (c *Command) PrintErrf(format string, i ...interface{}) { + c.PrintErr(fmt.Sprintf(format, i...)) +} + +// CommandPath returns the full path to this command. +func (c *Command) CommandPath() string { + if c.HasParent() { + return c.Parent().CommandPath() + " " + c.Name() + } + return c.Name() +} + +// UseLine puts out the full usage for a given command (including parents). +func (c *Command) UseLine() string { + var useline string + if c.HasParent() { + useline = c.parent.CommandPath() + " " + c.Use + } else { + useline = c.Use + } + if c.DisableFlagsInUseLine { + return useline + } + if c.HasAvailableFlags() && !strings.Contains(useline, "[flags]") { + useline += " [flags]" + } + return useline +} + +// DebugFlags used to determine which flags have been assigned to which commands +// and which persist. +func (c *Command) DebugFlags() { + c.Println("DebugFlags called on", c.Name()) + var debugflags func(*Command) + + debugflags = func(x *Command) { + if x.HasFlags() || x.HasPersistentFlags() { + c.Println(x.Name()) + } + if x.HasFlags() { + x.flags.VisitAll(func(f *flag.Flag) { + if x.HasPersistentFlags() && x.persistentFlag(f.Name) != nil { + c.Println(" -"+f.Shorthand+",", "--"+f.Name, "["+f.DefValue+"]", "", f.Value, " [LP]") + } else { + c.Println(" -"+f.Shorthand+",", "--"+f.Name, "["+f.DefValue+"]", "", f.Value, " [L]") + } + }) + } + if x.HasPersistentFlags() { + x.pflags.VisitAll(func(f *flag.Flag) { + if x.HasFlags() { + if x.flags.Lookup(f.Name) == nil { + c.Println(" -"+f.Shorthand+",", "--"+f.Name, "["+f.DefValue+"]", "", f.Value, " [P]") + } + } else { + c.Println(" -"+f.Shorthand+",", "--"+f.Name, "["+f.DefValue+"]", "", f.Value, " [P]") + } + }) + } + c.Println(x.flagErrorBuf) + if x.HasSubCommands() { + for _, y := range x.commands { + debugflags(y) + } + } + } + + debugflags(c) +} + +// Name returns the command's name: the first word in the use line. +func (c *Command) Name() string { + name := c.Use + i := strings.Index(name, " ") + if i >= 0 { + name = name[:i] + } + return name +} + +// HasAlias determines if a given string is an alias of the command. +func (c *Command) HasAlias(s string) bool { + for _, a := range c.Aliases { + if a == s { + return true + } + } + return false +} + +// CalledAs returns the command name or alias that was used to invoke +// this command or an empty string if the command has not been called. +func (c *Command) CalledAs() string { + if c.commandCalledAs.called { + return c.commandCalledAs.name + } + return "" +} + +// hasNameOrAliasPrefix returns true if the Name or any of aliases start +// with prefix +func (c *Command) hasNameOrAliasPrefix(prefix string) bool { + if strings.HasPrefix(c.Name(), prefix) { + c.commandCalledAs.name = c.Name() + return true + } + for _, alias := range c.Aliases { + if strings.HasPrefix(alias, prefix) { + c.commandCalledAs.name = alias + return true + } + } + return false +} + +// NameAndAliases returns a list of the command name and all aliases +func (c *Command) NameAndAliases() string { + return strings.Join(append([]string{c.Name()}, c.Aliases...), ", ") +} + +// HasExample determines if the command has example. +func (c *Command) HasExample() bool { + return len(c.Example) > 0 +} + +// Runnable determines if the command is itself runnable. +func (c *Command) Runnable() bool { + return c.Run != nil || c.RunE != nil +} + +// HasSubCommands determines if the command has children commands. +func (c *Command) HasSubCommands() bool { + return len(c.commands) > 0 +} + +// IsAvailableCommand determines if a command is available as a non-help command +// (this includes all non deprecated/hidden commands). +func (c *Command) IsAvailableCommand() bool { + if len(c.Deprecated) != 0 || c.Hidden { + return false + } + + if c.HasParent() && c.Parent().helpCommand == c { + return false + } + + if c.Runnable() || c.HasAvailableSubCommands() { + return true + } + + return false +} + +// IsAdditionalHelpTopicCommand determines if a command is an additional +// help topic command; additional help topic command is determined by the +// fact that it is NOT runnable/hidden/deprecated, and has no sub commands that +// are runnable/hidden/deprecated. +// Concrete example: https://github.com/spf13/cobra/issues/393#issuecomment-282741924. +func (c *Command) IsAdditionalHelpTopicCommand() bool { + // if a command is runnable, deprecated, or hidden it is not a 'help' command + if c.Runnable() || len(c.Deprecated) != 0 || c.Hidden { + return false + } + + // if any non-help sub commands are found, the command is not a 'help' command + for _, sub := range c.commands { + if !sub.IsAdditionalHelpTopicCommand() { + return false + } + } + + // the command either has no sub commands, or no non-help sub commands + return true +} + +// HasHelpSubCommands determines if a command has any available 'help' sub commands +// that need to be shown in the usage/help default template under 'additional help +// topics'. +func (c *Command) HasHelpSubCommands() bool { + // return true on the first found available 'help' sub command + for _, sub := range c.commands { + if sub.IsAdditionalHelpTopicCommand() { + return true + } + } + + // the command either has no sub commands, or no available 'help' sub commands + return false +} + +// HasAvailableSubCommands determines if a command has available sub commands that +// need to be shown in the usage/help default template under 'available commands'. +func (c *Command) HasAvailableSubCommands() bool { + // return true on the first found available (non deprecated/help/hidden) + // sub command + for _, sub := range c.commands { + if sub.IsAvailableCommand() { + return true + } + } + + // the command either has no sub commands, or no available (non deprecated/help/hidden) + // sub commands + return false +} + +// HasParent determines if the command is a child command. +func (c *Command) HasParent() bool { + return c.parent != nil +} + +// GlobalNormalizationFunc returns the global normalization function or nil if it doesn't exist. +func (c *Command) GlobalNormalizationFunc() func(f *flag.FlagSet, name string) flag.NormalizedName { + return c.globNormFunc +} + +// Flags returns the complete FlagSet that applies +// to this command (local and persistent declared here and by all parents). +func (c *Command) Flags() *flag.FlagSet { + if c.flags == nil { + c.flags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + c.flags.SetOutput(c.flagErrorBuf) + } + + return c.flags +} + +// LocalNonPersistentFlags are flags specific to this command which will NOT persist to subcommands. +func (c *Command) LocalNonPersistentFlags() *flag.FlagSet { + persistentFlags := c.PersistentFlags() + + out := flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.LocalFlags().VisitAll(func(f *flag.Flag) { + if persistentFlags.Lookup(f.Name) == nil { + out.AddFlag(f) + } + }) + return out +} + +// LocalFlags returns the local FlagSet specifically set in the current command. +func (c *Command) LocalFlags() *flag.FlagSet { + c.mergePersistentFlags() + + if c.lflags == nil { + c.lflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + c.lflags.SetOutput(c.flagErrorBuf) + } + c.lflags.SortFlags = c.Flags().SortFlags + if c.globNormFunc != nil { + c.lflags.SetNormalizeFunc(c.globNormFunc) + } + + addToLocal := func(f *flag.Flag) { + if c.lflags.Lookup(f.Name) == nil && c.parentsPflags.Lookup(f.Name) == nil { + c.lflags.AddFlag(f) + } + } + c.Flags().VisitAll(addToLocal) + c.PersistentFlags().VisitAll(addToLocal) + return c.lflags +} + +// InheritedFlags returns all flags which were inherited from parent commands. +func (c *Command) InheritedFlags() *flag.FlagSet { + c.mergePersistentFlags() + + if c.iflags == nil { + c.iflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + c.iflags.SetOutput(c.flagErrorBuf) + } + + local := c.LocalFlags() + if c.globNormFunc != nil { + c.iflags.SetNormalizeFunc(c.globNormFunc) + } + + c.parentsPflags.VisitAll(func(f *flag.Flag) { + if c.iflags.Lookup(f.Name) == nil && local.Lookup(f.Name) == nil { + c.iflags.AddFlag(f) + } + }) + return c.iflags +} + +// NonInheritedFlags returns all flags which were not inherited from parent commands. +func (c *Command) NonInheritedFlags() *flag.FlagSet { + return c.LocalFlags() +} + +// PersistentFlags returns the persistent FlagSet specifically set in the current command. +func (c *Command) PersistentFlags() *flag.FlagSet { + if c.pflags == nil { + c.pflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + c.pflags.SetOutput(c.flagErrorBuf) + } + return c.pflags +} + +// ResetFlags deletes all flags from command. +func (c *Command) ResetFlags() { + c.flagErrorBuf = new(bytes.Buffer) + c.flagErrorBuf.Reset() + c.flags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.flags.SetOutput(c.flagErrorBuf) + c.pflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.pflags.SetOutput(c.flagErrorBuf) + + c.lflags = nil + c.iflags = nil + c.parentsPflags = nil +} + +// HasFlags checks if the command contains any flags (local plus persistent from the entire structure). +func (c *Command) HasFlags() bool { + return c.Flags().HasFlags() +} + +// HasPersistentFlags checks if the command contains persistent flags. +func (c *Command) HasPersistentFlags() bool { + return c.PersistentFlags().HasFlags() +} + +// HasLocalFlags checks if the command has flags specifically declared locally. +func (c *Command) HasLocalFlags() bool { + return c.LocalFlags().HasFlags() +} + +// HasInheritedFlags checks if the command has flags inherited from its parent command. +func (c *Command) HasInheritedFlags() bool { + return c.InheritedFlags().HasFlags() +} + +// HasAvailableFlags checks if the command contains any flags (local plus persistent from the entire +// structure) which are not hidden or deprecated. +func (c *Command) HasAvailableFlags() bool { + return c.Flags().HasAvailableFlags() +} + +// HasAvailablePersistentFlags checks if the command contains persistent flags which are not hidden or deprecated. +func (c *Command) HasAvailablePersistentFlags() bool { + return c.PersistentFlags().HasAvailableFlags() +} + +// HasAvailableLocalFlags checks if the command has flags specifically declared locally which are not hidden +// or deprecated. +func (c *Command) HasAvailableLocalFlags() bool { + return c.LocalFlags().HasAvailableFlags() +} + +// HasAvailableInheritedFlags checks if the command has flags inherited from its parent command which are +// not hidden or deprecated. +func (c *Command) HasAvailableInheritedFlags() bool { + return c.InheritedFlags().HasAvailableFlags() +} + +// Flag climbs up the command tree looking for matching flag. +func (c *Command) Flag(name string) (flag *flag.Flag) { + flag = c.Flags().Lookup(name) + + if flag == nil { + flag = c.persistentFlag(name) + } + + return +} + +// Recursively find matching persistent flag. +func (c *Command) persistentFlag(name string) (flag *flag.Flag) { + if c.HasPersistentFlags() { + flag = c.PersistentFlags().Lookup(name) + } + + if flag == nil { + c.updateParentsPflags() + flag = c.parentsPflags.Lookup(name) + } + return +} + +// ParseFlags parses persistent flag tree and local flags. +func (c *Command) ParseFlags(args []string) error { + if c.DisableFlagParsing { + return nil + } + + if c.flagErrorBuf == nil { + c.flagErrorBuf = new(bytes.Buffer) + } + beforeErrorBufLen := c.flagErrorBuf.Len() + c.mergePersistentFlags() + + // do it here after merging all flags and just before parse + c.Flags().ParseErrorsWhitelist = flag.ParseErrorsWhitelist(c.FParseErrWhitelist) + + err := c.Flags().Parse(args) + // Print warnings if they occurred (e.g. deprecated flag messages). + if c.flagErrorBuf.Len()-beforeErrorBufLen > 0 && err == nil { + c.Print(c.flagErrorBuf.String()) + } + + return err +} + +// Parent returns a commands parent command. +func (c *Command) Parent() *Command { + return c.parent +} + +// mergePersistentFlags merges c.PersistentFlags() to c.Flags() +// and adds missing persistent flags of all parents. +func (c *Command) mergePersistentFlags() { + c.updateParentsPflags() + c.Flags().AddFlagSet(c.PersistentFlags()) + c.Flags().AddFlagSet(c.parentsPflags) +} + +// updateParentsPflags updates c.parentsPflags by adding +// new persistent flags of all parents. +// If c.parentsPflags == nil, it makes new. +func (c *Command) updateParentsPflags() { + if c.parentsPflags == nil { + c.parentsPflags = flag.NewFlagSet(c.Name(), flag.ContinueOnError) + c.parentsPflags.SetOutput(c.flagErrorBuf) + c.parentsPflags.SortFlags = false + } + + if c.globNormFunc != nil { + c.parentsPflags.SetNormalizeFunc(c.globNormFunc) + } + + c.Root().PersistentFlags().AddFlagSet(flag.CommandLine) + + c.VisitParents(func(parent *Command) { + c.parentsPflags.AddFlagSet(parent.PersistentFlags()) + }) +} diff --git a/vendor/github.com/spf13/cobra/command_notwin.go b/vendor/github.com/spf13/cobra/command_notwin.go new file mode 100644 index 000000000..6159c1cc1 --- /dev/null +++ b/vendor/github.com/spf13/cobra/command_notwin.go @@ -0,0 +1,5 @@ +// +build !windows + +package cobra + +var preExecHookFn func(*Command) diff --git a/vendor/github.com/spf13/cobra/command_win.go b/vendor/github.com/spf13/cobra/command_win.go new file mode 100644 index 000000000..8768b1736 --- /dev/null +++ b/vendor/github.com/spf13/cobra/command_win.go @@ -0,0 +1,26 @@ +// +build windows + +package cobra + +import ( + "fmt" + "os" + "time" + + "github.com/inconshreveable/mousetrap" +) + +var preExecHookFn = preExecHook + +func preExecHook(c *Command) { + if MousetrapHelpText != "" && mousetrap.StartedByExplorer() { + c.Print(MousetrapHelpText) + if MousetrapDisplayDuration > 0 { + time.Sleep(MousetrapDisplayDuration) + } else { + c.Println("Press return to continue...") + fmt.Scanln() + } + os.Exit(1) + } +} diff --git a/vendor/github.com/spf13/cobra/completions.go b/vendor/github.com/spf13/cobra/completions.go new file mode 100644 index 000000000..b849b9c84 --- /dev/null +++ b/vendor/github.com/spf13/cobra/completions.go @@ -0,0 +1,781 @@ +package cobra + +import ( + "fmt" + "os" + "strings" + "sync" + + "github.com/spf13/pflag" +) + +const ( + // ShellCompRequestCmd is the name of the hidden command that is used to request + // completion results from the program. It is used by the shell completion scripts. + ShellCompRequestCmd = "__complete" + // ShellCompNoDescRequestCmd is the name of the hidden command that is used to request + // completion results without their description. It is used by the shell completion scripts. + ShellCompNoDescRequestCmd = "__completeNoDesc" +) + +// Global map of flag completion functions. Make sure to use flagCompletionMutex before you try to read and write from it. +var flagCompletionFunctions = map[*pflag.Flag]func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective){} + +// lock for reading and writing from flagCompletionFunctions +var flagCompletionMutex = &sync.RWMutex{} + +// ShellCompDirective is a bit map representing the different behaviors the shell +// can be instructed to have once completions have been provided. +type ShellCompDirective int + +type flagCompError struct { + subCommand string + flagName string +} + +func (e *flagCompError) Error() string { + return "Subcommand '" + e.subCommand + "' does not support flag '" + e.flagName + "'" +} + +const ( + // ShellCompDirectiveError indicates an error occurred and completions should be ignored. + ShellCompDirectiveError ShellCompDirective = 1 << iota + + // ShellCompDirectiveNoSpace indicates that the shell should not add a space + // after the completion even if there is a single completion provided. + ShellCompDirectiveNoSpace + + // ShellCompDirectiveNoFileComp indicates that the shell should not provide + // file completion even when no completion is provided. + ShellCompDirectiveNoFileComp + + // ShellCompDirectiveFilterFileExt indicates that the provided completions + // should be used as file extension filters. + // For flags, using Command.MarkFlagFilename() and Command.MarkPersistentFlagFilename() + // is a shortcut to using this directive explicitly. The BashCompFilenameExt + // annotation can also be used to obtain the same behavior for flags. + ShellCompDirectiveFilterFileExt + + // ShellCompDirectiveFilterDirs indicates that only directory names should + // be provided in file completion. To request directory names within another + // directory, the returned completions should specify the directory within + // which to search. The BashCompSubdirsInDir annotation can be used to + // obtain the same behavior but only for flags. + ShellCompDirectiveFilterDirs + + // =========================================================================== + + // All directives using iota should be above this one. + // For internal use. + shellCompDirectiveMaxValue + + // ShellCompDirectiveDefault indicates to let the shell perform its default + // behavior after completions have been provided. + // This one must be last to avoid messing up the iota count. + ShellCompDirectiveDefault ShellCompDirective = 0 +) + +const ( + // Constants for the completion command + compCmdName = "completion" + compCmdNoDescFlagName = "no-descriptions" + compCmdNoDescFlagDesc = "disable completion descriptions" + compCmdNoDescFlagDefault = false +) + +// CompletionOptions are the options to control shell completion +type CompletionOptions struct { + // DisableDefaultCmd prevents Cobra from creating a default 'completion' command + DisableDefaultCmd bool + // DisableNoDescFlag prevents Cobra from creating the '--no-descriptions' flag + // for shells that support completion descriptions + DisableNoDescFlag bool + // DisableDescriptions turns off all completion descriptions for shells + // that support them + DisableDescriptions bool +} + +// NoFileCompletions can be used to disable file completion for commands that should +// not trigger file completions. +func NoFileCompletions(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) { + return nil, ShellCompDirectiveNoFileComp +} + +// RegisterFlagCompletionFunc should be called to register a function to provide completion for a flag. +func (c *Command) RegisterFlagCompletionFunc(flagName string, f func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective)) error { + flag := c.Flag(flagName) + if flag == nil { + return fmt.Errorf("RegisterFlagCompletionFunc: flag '%s' does not exist", flagName) + } + flagCompletionMutex.Lock() + defer flagCompletionMutex.Unlock() + + if _, exists := flagCompletionFunctions[flag]; exists { + return fmt.Errorf("RegisterFlagCompletionFunc: flag '%s' already registered", flagName) + } + flagCompletionFunctions[flag] = f + return nil +} + +// Returns a string listing the different directive enabled in the specified parameter +func (d ShellCompDirective) string() string { + var directives []string + if d&ShellCompDirectiveError != 0 { + directives = append(directives, "ShellCompDirectiveError") + } + if d&ShellCompDirectiveNoSpace != 0 { + directives = append(directives, "ShellCompDirectiveNoSpace") + } + if d&ShellCompDirectiveNoFileComp != 0 { + directives = append(directives, "ShellCompDirectiveNoFileComp") + } + if d&ShellCompDirectiveFilterFileExt != 0 { + directives = append(directives, "ShellCompDirectiveFilterFileExt") + } + if d&ShellCompDirectiveFilterDirs != 0 { + directives = append(directives, "ShellCompDirectiveFilterDirs") + } + if len(directives) == 0 { + directives = append(directives, "ShellCompDirectiveDefault") + } + + if d >= shellCompDirectiveMaxValue { + return fmt.Sprintf("ERROR: unexpected ShellCompDirective value: %d", d) + } + return strings.Join(directives, ", ") +} + +// Adds a special hidden command that can be used to request custom completions. +func (c *Command) initCompleteCmd(args []string) { + completeCmd := &Command{ + Use: fmt.Sprintf("%s [command-line]", ShellCompRequestCmd), + Aliases: []string{ShellCompNoDescRequestCmd}, + DisableFlagsInUseLine: true, + Hidden: true, + DisableFlagParsing: true, + Args: MinimumNArgs(1), + Short: "Request shell completion choices for the specified command-line", + Long: fmt.Sprintf("%[2]s is a special command that is used by the shell completion logic\n%[1]s", + "to request completion choices for the specified command-line.", ShellCompRequestCmd), + Run: func(cmd *Command, args []string) { + finalCmd, completions, directive, err := cmd.getCompletions(args) + if err != nil { + CompErrorln(err.Error()) + // Keep going for multiple reasons: + // 1- There could be some valid completions even though there was an error + // 2- Even without completions, we need to print the directive + } + + noDescriptions := (cmd.CalledAs() == ShellCompNoDescRequestCmd) + for _, comp := range completions { + if noDescriptions { + // Remove any description that may be included following a tab character. + comp = strings.Split(comp, "\t")[0] + } + + // Make sure we only write the first line to the output. + // This is needed if a description contains a linebreak. + // Otherwise the shell scripts will interpret the other lines as new flags + // and could therefore provide a wrong completion. + comp = strings.Split(comp, "\n")[0] + + // Finally trim the completion. This is especially important to get rid + // of a trailing tab when there are no description following it. + // For example, a sub-command without a description should not be completed + // with a tab at the end (or else zsh will show a -- following it + // although there is no description). + comp = strings.TrimSpace(comp) + + // Print each possible completion to stdout for the completion script to consume. + fmt.Fprintln(finalCmd.OutOrStdout(), comp) + } + + // As the last printout, print the completion directive for the completion script to parse. + // The directive integer must be that last character following a single colon (:). + // The completion script expects : + fmt.Fprintf(finalCmd.OutOrStdout(), ":%d\n", directive) + + // Print some helpful info to stderr for the user to understand. + // Output from stderr must be ignored by the completion script. + fmt.Fprintf(finalCmd.ErrOrStderr(), "Completion ended with directive: %s\n", directive.string()) + }, + } + c.AddCommand(completeCmd) + subCmd, _, err := c.Find(args) + if err != nil || subCmd.Name() != ShellCompRequestCmd { + // Only create this special command if it is actually being called. + // This reduces possible side-effects of creating such a command; + // for example, having this command would cause problems to a + // cobra program that only consists of the root command, since this + // command would cause the root command to suddenly have a subcommand. + c.RemoveCommand(completeCmd) + } +} + +func (c *Command) getCompletions(args []string) (*Command, []string, ShellCompDirective, error) { + // The last argument, which is not completely typed by the user, + // should not be part of the list of arguments + toComplete := args[len(args)-1] + trimmedArgs := args[:len(args)-1] + + var finalCmd *Command + var finalArgs []string + var err error + // Find the real command for which completion must be performed + // check if we need to traverse here to parse local flags on parent commands + if c.Root().TraverseChildren { + finalCmd, finalArgs, err = c.Root().Traverse(trimmedArgs) + } else { + finalCmd, finalArgs, err = c.Root().Find(trimmedArgs) + } + if err != nil { + // Unable to find the real command. E.g., someInvalidCmd + return c, []string{}, ShellCompDirectiveDefault, fmt.Errorf("Unable to find a command for arguments: %v", trimmedArgs) + } + finalCmd.ctx = c.ctx + + // Check if we are doing flag value completion before parsing the flags. + // This is important because if we are completing a flag value, we need to also + // remove the flag name argument from the list of finalArgs or else the parsing + // could fail due to an invalid value (incomplete) for the flag. + flag, finalArgs, toComplete, flagErr := checkIfFlagCompletion(finalCmd, finalArgs, toComplete) + + // Check if interspersed is false or -- was set on a previous arg. + // This works by counting the arguments. Normally -- is not counted as arg but + // if -- was already set or interspersed is false and there is already one arg then + // the extra added -- is counted as arg. + flagCompletion := true + _ = finalCmd.ParseFlags(append(finalArgs, "--")) + newArgCount := finalCmd.Flags().NArg() + + // Parse the flags early so we can check if required flags are set + if err = finalCmd.ParseFlags(finalArgs); err != nil { + return finalCmd, []string{}, ShellCompDirectiveDefault, fmt.Errorf("Error while parsing flags from args %v: %s", finalArgs, err.Error()) + } + + realArgCount := finalCmd.Flags().NArg() + if newArgCount > realArgCount { + // don't do flag completion (see above) + flagCompletion = false + } + // Error while attempting to parse flags + if flagErr != nil { + // If error type is flagCompError and we don't want flagCompletion we should ignore the error + if _, ok := flagErr.(*flagCompError); !(ok && !flagCompletion) { + return finalCmd, []string{}, ShellCompDirectiveDefault, flagErr + } + } + + if flag != nil && flagCompletion { + // Check if we are completing a flag value subject to annotations + if validExts, present := flag.Annotations[BashCompFilenameExt]; present { + if len(validExts) != 0 { + // File completion filtered by extensions + return finalCmd, validExts, ShellCompDirectiveFilterFileExt, nil + } + + // The annotation requests simple file completion. There is no reason to do + // that since it is the default behavior anyway. Let's ignore this annotation + // in case the program also registered a completion function for this flag. + // Even though it is a mistake on the program's side, let's be nice when we can. + } + + if subDir, present := flag.Annotations[BashCompSubdirsInDir]; present { + if len(subDir) == 1 { + // Directory completion from within a directory + return finalCmd, subDir, ShellCompDirectiveFilterDirs, nil + } + // Directory completion + return finalCmd, []string{}, ShellCompDirectiveFilterDirs, nil + } + } + + // When doing completion of a flag name, as soon as an argument starts with + // a '-' we know it is a flag. We cannot use isFlagArg() here as it requires + // the flag name to be complete + if flag == nil && len(toComplete) > 0 && toComplete[0] == '-' && !strings.Contains(toComplete, "=") && flagCompletion { + var completions []string + + // First check for required flags + completions = completeRequireFlags(finalCmd, toComplete) + + // If we have not found any required flags, only then can we show regular flags + if len(completions) == 0 { + doCompleteFlags := func(flag *pflag.Flag) { + if !flag.Changed || + strings.Contains(flag.Value.Type(), "Slice") || + strings.Contains(flag.Value.Type(), "Array") { + // If the flag is not already present, or if it can be specified multiple times (Array or Slice) + // we suggest it as a completion + completions = append(completions, getFlagNameCompletions(flag, toComplete)...) + } + } + + // We cannot use finalCmd.Flags() because we may not have called ParsedFlags() for commands + // that have set DisableFlagParsing; it is ParseFlags() that merges the inherited and + // non-inherited flags. + finalCmd.InheritedFlags().VisitAll(func(flag *pflag.Flag) { + doCompleteFlags(flag) + }) + finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { + doCompleteFlags(flag) + }) + } + + directive := ShellCompDirectiveNoFileComp + if len(completions) == 1 && strings.HasSuffix(completions[0], "=") { + // If there is a single completion, the shell usually adds a space + // after the completion. We don't want that if the flag ends with an = + directive = ShellCompDirectiveNoSpace + } + return finalCmd, completions, directive, nil + } + + // We only remove the flags from the arguments if DisableFlagParsing is not set. + // This is important for commands which have requested to do their own flag completion. + if !finalCmd.DisableFlagParsing { + finalArgs = finalCmd.Flags().Args() + } + + var completions []string + directive := ShellCompDirectiveDefault + if flag == nil { + foundLocalNonPersistentFlag := false + // If TraverseChildren is true on the root command we don't check for + // local flags because we can use a local flag on a parent command + if !finalCmd.Root().TraverseChildren { + // Check if there are any local, non-persistent flags on the command-line + localNonPersistentFlags := finalCmd.LocalNonPersistentFlags() + finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { + if localNonPersistentFlags.Lookup(flag.Name) != nil && flag.Changed { + foundLocalNonPersistentFlag = true + } + }) + } + + // Complete subcommand names, including the help command + if len(finalArgs) == 0 && !foundLocalNonPersistentFlag { + // We only complete sub-commands if: + // - there are no arguments on the command-line and + // - there are no local, non-persistent flags on the command-line or TraverseChildren is true + for _, subCmd := range finalCmd.Commands() { + if subCmd.IsAvailableCommand() || subCmd == finalCmd.helpCommand { + if strings.HasPrefix(subCmd.Name(), toComplete) { + completions = append(completions, fmt.Sprintf("%s\t%s", subCmd.Name(), subCmd.Short)) + } + directive = ShellCompDirectiveNoFileComp + } + } + } + + // Complete required flags even without the '-' prefix + completions = append(completions, completeRequireFlags(finalCmd, toComplete)...) + + // Always complete ValidArgs, even if we are completing a subcommand name. + // This is for commands that have both subcommands and ValidArgs. + if len(finalCmd.ValidArgs) > 0 { + if len(finalArgs) == 0 { + // ValidArgs are only for the first argument + for _, validArg := range finalCmd.ValidArgs { + if strings.HasPrefix(validArg, toComplete) { + completions = append(completions, validArg) + } + } + directive = ShellCompDirectiveNoFileComp + + // If no completions were found within commands or ValidArgs, + // see if there are any ArgAliases that should be completed. + if len(completions) == 0 { + for _, argAlias := range finalCmd.ArgAliases { + if strings.HasPrefix(argAlias, toComplete) { + completions = append(completions, argAlias) + } + } + } + } + + // If there are ValidArgs specified (even if they don't match), we stop completion. + // Only one of ValidArgs or ValidArgsFunction can be used for a single command. + return finalCmd, completions, directive, nil + } + + // Let the logic continue so as to add any ValidArgsFunction completions, + // even if we already found sub-commands. + // This is for commands that have subcommands but also specify a ValidArgsFunction. + } + + // Find the completion function for the flag or command + var completionFn func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) + if flag != nil && flagCompletion { + flagCompletionMutex.RLock() + completionFn = flagCompletionFunctions[flag] + flagCompletionMutex.RUnlock() + } else { + completionFn = finalCmd.ValidArgsFunction + } + if completionFn != nil { + // Go custom completion defined for this flag or command. + // Call the registered completion function to get the completions. + var comps []string + comps, directive = completionFn(finalCmd, finalArgs, toComplete) + completions = append(completions, comps...) + } + + return finalCmd, completions, directive, nil +} + +func getFlagNameCompletions(flag *pflag.Flag, toComplete string) []string { + if nonCompletableFlag(flag) { + return []string{} + } + + var completions []string + flagName := "--" + flag.Name + if strings.HasPrefix(flagName, toComplete) { + // Flag without the = + completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage)) + + // Why suggest both long forms: --flag and --flag= ? + // This forces the user to *always* have to type either an = or a space after the flag name. + // Let's be nice and avoid making users have to do that. + // Since boolean flags and shortname flags don't show the = form, let's go that route and never show it. + // The = form will still work, we just won't suggest it. + // This also makes the list of suggested flags shorter as we avoid all the = forms. + // + // if len(flag.NoOptDefVal) == 0 { + // // Flag requires a value, so it can be suffixed with = + // flagName += "=" + // completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage)) + // } + } + + flagName = "-" + flag.Shorthand + if len(flag.Shorthand) > 0 && strings.HasPrefix(flagName, toComplete) { + completions = append(completions, fmt.Sprintf("%s\t%s", flagName, flag.Usage)) + } + + return completions +} + +func completeRequireFlags(finalCmd *Command, toComplete string) []string { + var completions []string + + doCompleteRequiredFlags := func(flag *pflag.Flag) { + if _, present := flag.Annotations[BashCompOneRequiredFlag]; present { + if !flag.Changed { + // If the flag is not already present, we suggest it as a completion + completions = append(completions, getFlagNameCompletions(flag, toComplete)...) + } + } + } + + // We cannot use finalCmd.Flags() because we may not have called ParsedFlags() for commands + // that have set DisableFlagParsing; it is ParseFlags() that merges the inherited and + // non-inherited flags. + finalCmd.InheritedFlags().VisitAll(func(flag *pflag.Flag) { + doCompleteRequiredFlags(flag) + }) + finalCmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { + doCompleteRequiredFlags(flag) + }) + + return completions +} + +func checkIfFlagCompletion(finalCmd *Command, args []string, lastArg string) (*pflag.Flag, []string, string, error) { + if finalCmd.DisableFlagParsing { + // We only do flag completion if we are allowed to parse flags + // This is important for commands which have requested to do their own flag completion. + return nil, args, lastArg, nil + } + + var flagName string + trimmedArgs := args + flagWithEqual := false + orgLastArg := lastArg + + // When doing completion of a flag name, as soon as an argument starts with + // a '-' we know it is a flag. We cannot use isFlagArg() here as that function + // requires the flag name to be complete + if len(lastArg) > 0 && lastArg[0] == '-' { + if index := strings.Index(lastArg, "="); index >= 0 { + // Flag with an = + if strings.HasPrefix(lastArg[:index], "--") { + // Flag has full name + flagName = lastArg[2:index] + } else { + // Flag is shorthand + // We have to get the last shorthand flag name + // e.g. `-asd` => d to provide the correct completion + // https://github.com/spf13/cobra/issues/1257 + flagName = lastArg[index-1 : index] + } + lastArg = lastArg[index+1:] + flagWithEqual = true + } else { + // Normal flag completion + return nil, args, lastArg, nil + } + } + + if len(flagName) == 0 { + if len(args) > 0 { + prevArg := args[len(args)-1] + if isFlagArg(prevArg) { + // Only consider the case where the flag does not contain an =. + // If the flag contains an = it means it has already been fully processed, + // so we don't need to deal with it here. + if index := strings.Index(prevArg, "="); index < 0 { + if strings.HasPrefix(prevArg, "--") { + // Flag has full name + flagName = prevArg[2:] + } else { + // Flag is shorthand + // We have to get the last shorthand flag name + // e.g. `-asd` => d to provide the correct completion + // https://github.com/spf13/cobra/issues/1257 + flagName = prevArg[len(prevArg)-1:] + } + // Remove the uncompleted flag or else there could be an error created + // for an invalid value for that flag + trimmedArgs = args[:len(args)-1] + } + } + } + } + + if len(flagName) == 0 { + // Not doing flag completion + return nil, trimmedArgs, lastArg, nil + } + + flag := findFlag(finalCmd, flagName) + if flag == nil { + // Flag not supported by this command, the interspersed option might be set so return the original args + return nil, args, orgLastArg, &flagCompError{subCommand: finalCmd.Name(), flagName: flagName} + } + + if !flagWithEqual { + if len(flag.NoOptDefVal) != 0 { + // We had assumed dealing with a two-word flag but the flag is a boolean flag. + // In that case, there is no value following it, so we are not really doing flag completion. + // Reset everything to do noun completion. + trimmedArgs = args + flag = nil + } + } + + return flag, trimmedArgs, lastArg, nil +} + +// initDefaultCompletionCmd adds a default 'completion' command to c. +// This function will do nothing if any of the following is true: +// 1- the feature has been explicitly disabled by the program, +// 2- c has no subcommands (to avoid creating one), +// 3- c already has a 'completion' command provided by the program. +func (c *Command) initDefaultCompletionCmd() { + if c.CompletionOptions.DisableDefaultCmd || !c.HasSubCommands() { + return + } + + for _, cmd := range c.commands { + if cmd.Name() == compCmdName || cmd.HasAlias(compCmdName) { + // A completion command is already available + return + } + } + + haveNoDescFlag := !c.CompletionOptions.DisableNoDescFlag && !c.CompletionOptions.DisableDescriptions + + completionCmd := &Command{ + Use: compCmdName, + Short: "generate the autocompletion script for the specified shell", + Long: fmt.Sprintf(` +Generate the autocompletion script for %[1]s for the specified shell. +See each sub-command's help for details on how to use the generated script. +`, c.Root().Name()), + Args: NoArgs, + ValidArgsFunction: NoFileCompletions, + } + c.AddCommand(completionCmd) + + out := c.OutOrStdout() + noDesc := c.CompletionOptions.DisableDescriptions + shortDesc := "generate the autocompletion script for %s" + bash := &Command{ + Use: "bash", + Short: fmt.Sprintf(shortDesc, "bash"), + Long: fmt.Sprintf(` +Generate the autocompletion script for the bash shell. + +This script depends on the 'bash-completion' package. +If it is not installed already, you can install it via your OS's package manager. + +To load completions in your current shell session: +$ source <(%[1]s completion bash) + +To load completions for every new session, execute once: +Linux: + $ %[1]s completion bash > /etc/bash_completion.d/%[1]s +MacOS: + $ %[1]s completion bash > /usr/local/etc/bash_completion.d/%[1]s + +You will need to start a new shell for this setup to take effect. + `, c.Root().Name()), + Args: NoArgs, + DisableFlagsInUseLine: true, + ValidArgsFunction: NoFileCompletions, + RunE: func(cmd *Command, args []string) error { + return cmd.Root().GenBashCompletionV2(out, !noDesc) + }, + } + if haveNoDescFlag { + bash.Flags().BoolVar(&noDesc, compCmdNoDescFlagName, compCmdNoDescFlagDefault, compCmdNoDescFlagDesc) + } + + zsh := &Command{ + Use: "zsh", + Short: fmt.Sprintf(shortDesc, "zsh"), + Long: fmt.Sprintf(` +Generate the autocompletion script for the zsh shell. + +If shell completion is not already enabled in your environment you will need +to enable it. You can execute the following once: + +$ echo "autoload -U compinit; compinit" >> ~/.zshrc + +To load completions for every new session, execute once: +# Linux: +$ %[1]s completion zsh > "${fpath[1]}/_%[1]s" +# macOS: +$ %[1]s completion zsh > /usr/local/share/zsh/site-functions/_%[1]s + +You will need to start a new shell for this setup to take effect. +`, c.Root().Name()), + Args: NoArgs, + ValidArgsFunction: NoFileCompletions, + RunE: func(cmd *Command, args []string) error { + if noDesc { + return cmd.Root().GenZshCompletionNoDesc(out) + } + return cmd.Root().GenZshCompletion(out) + }, + } + if haveNoDescFlag { + zsh.Flags().BoolVar(&noDesc, compCmdNoDescFlagName, compCmdNoDescFlagDefault, compCmdNoDescFlagDesc) + } + + fish := &Command{ + Use: "fish", + Short: fmt.Sprintf(shortDesc, "fish"), + Long: fmt.Sprintf(` +Generate the autocompletion script for the fish shell. + +To load completions in your current shell session: +$ %[1]s completion fish | source + +To load completions for every new session, execute once: +$ %[1]s completion fish > ~/.config/fish/completions/%[1]s.fish + +You will need to start a new shell for this setup to take effect. +`, c.Root().Name()), + Args: NoArgs, + ValidArgsFunction: NoFileCompletions, + RunE: func(cmd *Command, args []string) error { + return cmd.Root().GenFishCompletion(out, !noDesc) + }, + } + if haveNoDescFlag { + fish.Flags().BoolVar(&noDesc, compCmdNoDescFlagName, compCmdNoDescFlagDefault, compCmdNoDescFlagDesc) + } + + powershell := &Command{ + Use: "powershell", + Short: fmt.Sprintf(shortDesc, "powershell"), + Long: fmt.Sprintf(` +Generate the autocompletion script for powershell. + +To load completions in your current shell session: +PS C:\> %[1]s completion powershell | Out-String | Invoke-Expression + +To load completions for every new session, add the output of the above command +to your powershell profile. +`, c.Root().Name()), + Args: NoArgs, + ValidArgsFunction: NoFileCompletions, + RunE: func(cmd *Command, args []string) error { + if noDesc { + return cmd.Root().GenPowerShellCompletion(out) + } + return cmd.Root().GenPowerShellCompletionWithDesc(out) + + }, + } + if haveNoDescFlag { + powershell.Flags().BoolVar(&noDesc, compCmdNoDescFlagName, compCmdNoDescFlagDefault, compCmdNoDescFlagDesc) + } + + completionCmd.AddCommand(bash, zsh, fish, powershell) +} + +func findFlag(cmd *Command, name string) *pflag.Flag { + flagSet := cmd.Flags() + if len(name) == 1 { + // First convert the short flag into a long flag + // as the cmd.Flag() search only accepts long flags + if short := flagSet.ShorthandLookup(name); short != nil { + name = short.Name + } else { + set := cmd.InheritedFlags() + if short = set.ShorthandLookup(name); short != nil { + name = short.Name + } else { + return nil + } + } + } + return cmd.Flag(name) +} + +// CompDebug prints the specified string to the same file as where the +// completion script prints its logs. +// Note that completion printouts should never be on stdout as they would +// be wrongly interpreted as actual completion choices by the completion script. +func CompDebug(msg string, printToStdErr bool) { + msg = fmt.Sprintf("[Debug] %s", msg) + + // Such logs are only printed when the user has set the environment + // variable BASH_COMP_DEBUG_FILE to the path of some file to be used. + if path := os.Getenv("BASH_COMP_DEBUG_FILE"); path != "" { + f, err := os.OpenFile(path, + os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err == nil { + defer f.Close() + WriteStringAndCheck(f, msg) + } + } + + if printToStdErr { + // Must print to stderr for this not to be read by the completion script. + fmt.Fprint(os.Stderr, msg) + } +} + +// CompDebugln prints the specified string with a newline at the end +// to the same file as where the completion script prints its logs. +// Such logs are only printed when the user has set the environment +// variable BASH_COMP_DEBUG_FILE to the path of some file to be used. +func CompDebugln(msg string, printToStdErr bool) { + CompDebug(fmt.Sprintf("%s\n", msg), printToStdErr) +} + +// CompError prints the specified completion message to stderr. +func CompError(msg string) { + msg = fmt.Sprintf("[Error] %s", msg) + CompDebug(msg, true) +} + +// CompErrorln prints the specified completion message to stderr with a newline at the end. +func CompErrorln(msg string) { + CompError(fmt.Sprintf("%s\n", msg)) +} diff --git a/vendor/github.com/spf13/cobra/fish_completions.go b/vendor/github.com/spf13/cobra/fish_completions.go new file mode 100644 index 000000000..bb57fd568 --- /dev/null +++ b/vendor/github.com/spf13/cobra/fish_completions.go @@ -0,0 +1,219 @@ +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" + "strings" +) + +func genFishComp(buf io.StringWriter, name string, includeDesc bool) { + // Variables should not contain a '-' or ':' character + nameForVar := name + nameForVar = strings.Replace(nameForVar, "-", "_", -1) + nameForVar = strings.Replace(nameForVar, ":", "_", -1) + + compCmd := ShellCompRequestCmd + if !includeDesc { + compCmd = ShellCompNoDescRequestCmd + } + WriteStringAndCheck(buf, fmt.Sprintf("# fish completion for %-36s -*- shell-script -*-\n", name)) + WriteStringAndCheck(buf, fmt.Sprintf(` +function __%[1]s_debug + set -l file "$BASH_COMP_DEBUG_FILE" + if test -n "$file" + echo "$argv" >> $file + end +end + +function __%[1]s_perform_completion + __%[1]s_debug "Starting __%[1]s_perform_completion" + + # Extract all args except the last one + set -l args (commandline -opc) + # Extract the last arg and escape it in case it is a space + set -l lastArg (string escape -- (commandline -ct)) + + __%[1]s_debug "args: $args" + __%[1]s_debug "last arg: $lastArg" + + set -l requestComp "$args[1] %[3]s $args[2..-1] $lastArg" + + __%[1]s_debug "Calling $requestComp" + set -l results (eval $requestComp 2> /dev/null) + + # Some programs may output extra empty lines after the directive. + # Let's ignore them or else it will break completion. + # Ref: https://github.com/spf13/cobra/issues/1279 + for line in $results[-1..1] + if test (string trim -- $line) = "" + # Found an empty line, remove it + set results $results[1..-2] + else + # Found non-empty line, we have our proper output + break + end + end + + set -l comps $results[1..-2] + set -l directiveLine $results[-1] + + # For Fish, when completing a flag with an = (e.g., -n=) + # completions must be prefixed with the flag + set -l flagPrefix (string match -r -- '-.*=' "$lastArg") + + __%[1]s_debug "Comps: $comps" + __%[1]s_debug "DirectiveLine: $directiveLine" + __%[1]s_debug "flagPrefix: $flagPrefix" + + for comp in $comps + printf "%%s%%s\n" "$flagPrefix" "$comp" + end + + printf "%%s\n" "$directiveLine" +end + +# This function does two things: +# - Obtain the completions and store them in the global __%[1]s_comp_results +# - Return false if file completion should be performed +function __%[1]s_prepare_completions + __%[1]s_debug "" + __%[1]s_debug "========= starting completion logic ==========" + + # Start fresh + set --erase __%[1]s_comp_results + + set -l results (__%[1]s_perform_completion) + __%[1]s_debug "Completion results: $results" + + if test -z "$results" + __%[1]s_debug "No completion, probably due to a failure" + # Might as well do file completion, in case it helps + return 1 + end + + set -l directive (string sub --start 2 $results[-1]) + set --global __%[1]s_comp_results $results[1..-2] + + __%[1]s_debug "Completions are: $__%[1]s_comp_results" + __%[1]s_debug "Directive is: $directive" + + set -l shellCompDirectiveError %[4]d + set -l shellCompDirectiveNoSpace %[5]d + set -l shellCompDirectiveNoFileComp %[6]d + set -l shellCompDirectiveFilterFileExt %[7]d + set -l shellCompDirectiveFilterDirs %[8]d + + if test -z "$directive" + set directive 0 + end + + set -l compErr (math (math --scale 0 $directive / $shellCompDirectiveError) %% 2) + if test $compErr -eq 1 + __%[1]s_debug "Received error directive: aborting." + # Might as well do file completion, in case it helps + return 1 + end + + set -l filefilter (math (math --scale 0 $directive / $shellCompDirectiveFilterFileExt) %% 2) + set -l dirfilter (math (math --scale 0 $directive / $shellCompDirectiveFilterDirs) %% 2) + if test $filefilter -eq 1; or test $dirfilter -eq 1 + __%[1]s_debug "File extension filtering or directory filtering not supported" + # Do full file completion instead + return 1 + end + + set -l nospace (math (math --scale 0 $directive / $shellCompDirectiveNoSpace) %% 2) + set -l nofiles (math (math --scale 0 $directive / $shellCompDirectiveNoFileComp) %% 2) + + __%[1]s_debug "nospace: $nospace, nofiles: $nofiles" + + # If we want to prevent a space, or if file completion is NOT disabled, + # we need to count the number of valid completions. + # To do so, we will filter on prefix as the completions we have received + # may not already be filtered so as to allow fish to match on different + # criteria than the prefix. + if test $nospace -ne 0; or test $nofiles -eq 0 + set -l prefix (commandline -t | string escape --style=regex) + __%[1]s_debug "prefix: $prefix" + + set -l completions (string match -r -- "^$prefix.*" $__%[1]s_comp_results) + set --global __%[1]s_comp_results $completions + __%[1]s_debug "Filtered completions are: $__%[1]s_comp_results" + + # Important not to quote the variable for count to work + set -l numComps (count $__%[1]s_comp_results) + __%[1]s_debug "numComps: $numComps" + + if test $numComps -eq 1; and test $nospace -ne 0 + # We must first split on \t to get rid of the descriptions to be + # able to check what the actual completion will be. + # We don't need descriptions anyway since there is only a single + # real completion which the shell will expand immediately. + set -l split (string split --max 1 \t $__%[1]s_comp_results[1]) + + # Fish won't add a space if the completion ends with any + # of the following characters: @=/:., + set -l lastChar (string sub -s -1 -- $split) + if not string match -r -q "[@=/:.,]" -- "$lastChar" + # In other cases, to support the "nospace" directive we trick the shell + # by outputting an extra, longer completion. + __%[1]s_debug "Adding second completion to perform nospace directive" + set --global __%[1]s_comp_results $split[1] $split[1]. + __%[1]s_debug "Completions are now: $__%[1]s_comp_results" + end + end + + if test $numComps -eq 0; and test $nofiles -eq 0 + # To be consistent with bash and zsh, we only trigger file + # completion when there are no other completions + __%[1]s_debug "Requesting file completion" + return 1 + end + end + + return 0 +end + +# Since Fish completions are only loaded once the user triggers them, we trigger them ourselves +# so we can properly delete any completions provided by another script. +# Only do this if the program can be found, or else fish may print some errors; besides, +# the existing completions will only be loaded if the program can be found. +if type -q "%[2]s" + # The space after the program name is essential to trigger completion for the program + # and not completion of the program name itself. + # Also, we use '> /dev/null 2>&1' since '&>' is not supported in older versions of fish. + complete --do-complete "%[2]s " > /dev/null 2>&1 +end + +# Remove any pre-existing completions for the program since we will be handling all of them. +complete -c %[2]s -e + +# The call to __%[1]s_prepare_completions will setup __%[1]s_comp_results +# which provides the program's completion choices. +complete -c %[2]s -n '__%[1]s_prepare_completions' -f -a '$__%[1]s_comp_results' + +`, nameForVar, name, compCmd, + ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs)) +} + +// GenFishCompletion generates fish completion file and writes to the passed writer. +func (c *Command) GenFishCompletion(w io.Writer, includeDesc bool) error { + buf := new(bytes.Buffer) + genFishComp(buf, c.Name(), includeDesc) + _, err := buf.WriteTo(w) + return err +} + +// GenFishCompletionFile generates fish completion file. +func (c *Command) GenFishCompletionFile(filename string, includeDesc bool) error { + outFile, err := os.Create(filename) + if err != nil { + return err + } + defer outFile.Close() + + return c.GenFishCompletion(outFile, includeDesc) +} diff --git a/vendor/github.com/spf13/cobra/fish_completions.md b/vendor/github.com/spf13/cobra/fish_completions.md new file mode 100644 index 000000000..19b2ed129 --- /dev/null +++ b/vendor/github.com/spf13/cobra/fish_completions.md @@ -0,0 +1,4 @@ +## Generating Fish Completions For Your cobra.Command + +Please refer to [Shell Completions](shell_completions.md) for details. + diff --git a/vendor/github.com/spf13/cobra/go.mod b/vendor/github.com/spf13/cobra/go.mod new file mode 100644 index 000000000..1fb9439dd --- /dev/null +++ b/vendor/github.com/spf13/cobra/go.mod @@ -0,0 +1,11 @@ +module github.com/spf13/cobra + +go 1.14 + +require ( + github.com/cpuguy83/go-md2man/v2 v2.0.0 + github.com/inconshreveable/mousetrap v1.0.0 + github.com/spf13/pflag v1.0.5 + github.com/spf13/viper v1.8.1 + gopkg.in/yaml.v2 v2.4.0 +) diff --git a/vendor/github.com/spf13/cobra/go.sum b/vendor/github.com/spf13/cobra/go.sum new file mode 100644 index 000000000..3e22df29a --- /dev/null +++ b/vendor/github.com/spf13/cobra/go.sum @@ -0,0 +1,592 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/spf13/viper v1.8.1 h1:Kq1fyeebqsBfbjZj4EL7gj2IO0mMaiyjYUWcUsl2O44= +github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/vendor/github.com/spf13/cobra/powershell_completions.go b/vendor/github.com/spf13/cobra/powershell_completions.go new file mode 100644 index 000000000..59234c09f --- /dev/null +++ b/vendor/github.com/spf13/cobra/powershell_completions.go @@ -0,0 +1,285 @@ +// The generated scripts require PowerShell v5.0+ (which comes Windows 10, but +// can be downloaded separately for windows 7 or 8.1). + +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" +) + +func genPowerShellComp(buf io.StringWriter, name string, includeDesc bool) { + compCmd := ShellCompRequestCmd + if !includeDesc { + compCmd = ShellCompNoDescRequestCmd + } + WriteStringAndCheck(buf, fmt.Sprintf(`# powershell completion for %-36[1]s -*- shell-script -*- + +function __%[1]s_debug { + if ($env:BASH_COMP_DEBUG_FILE) { + "$args" | Out-File -Append -FilePath "$env:BASH_COMP_DEBUG_FILE" + } +} + +filter __%[1]s_escapeStringWithSpecialChars { +`+" $_ -replace '\\s|#|@|\\$|;|,|''|\\{|\\}|\\(|\\)|\"|`|\\||<|>|&','`$&'"+` +} + +Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock { + param( + $WordToComplete, + $CommandAst, + $CursorPosition + ) + + # Get the current command line and convert into a string + $Command = $CommandAst.CommandElements + $Command = "$Command" + + __%[1]s_debug "" + __%[1]s_debug "========= starting completion logic ==========" + __%[1]s_debug "WordToComplete: $WordToComplete Command: $Command CursorPosition: $CursorPosition" + + # The user could have moved the cursor backwards on the command-line. + # We need to trigger completion from the $CursorPosition location, so we need + # to truncate the command-line ($Command) up to the $CursorPosition location. + # Make sure the $Command is longer then the $CursorPosition before we truncate. + # This happens because the $Command does not include the last space. + if ($Command.Length -gt $CursorPosition) { + $Command=$Command.Substring(0,$CursorPosition) + } + __%[1]s_debug "Truncated command: $Command" + + $ShellCompDirectiveError=%[3]d + $ShellCompDirectiveNoSpace=%[4]d + $ShellCompDirectiveNoFileComp=%[5]d + $ShellCompDirectiveFilterFileExt=%[6]d + $ShellCompDirectiveFilterDirs=%[7]d + + # Prepare the command to request completions for the program. + # Split the command at the first space to separate the program and arguments. + $Program,$Arguments = $Command.Split(" ",2) + $RequestComp="$Program %[2]s $Arguments" + __%[1]s_debug "RequestComp: $RequestComp" + + # we cannot use $WordToComplete because it + # has the wrong values if the cursor was moved + # so use the last argument + if ($WordToComplete -ne "" ) { + $WordToComplete = $Arguments.Split(" ")[-1] + } + __%[1]s_debug "New WordToComplete: $WordToComplete" + + + # Check for flag with equal sign + $IsEqualFlag = ($WordToComplete -Like "--*=*" ) + if ( $IsEqualFlag ) { + __%[1]s_debug "Completing equal sign flag" + # Remove the flag part + $Flag,$WordToComplete = $WordToComplete.Split("=",2) + } + + if ( $WordToComplete -eq "" -And ( -Not $IsEqualFlag )) { + # If the last parameter is complete (there is a space following it) + # We add an extra empty parameter so we can indicate this to the go method. + __%[1]s_debug "Adding extra empty parameter" +`+" # We need to use `\"`\" to pass an empty argument a \"\" or '' does not work!!!"+` +`+" $RequestComp=\"$RequestComp\" + ' `\"`\"'"+` + } + + __%[1]s_debug "Calling $RequestComp" + #call the command store the output in $out and redirect stderr and stdout to null + # $Out is an array contains each line per element + Invoke-Expression -OutVariable out "$RequestComp" 2>&1 | Out-Null + + + # get directive from last line + [int]$Directive = $Out[-1].TrimStart(':') + if ($Directive -eq "") { + # There is no directive specified + $Directive = 0 + } + __%[1]s_debug "The completion directive is: $Directive" + + # remove directive (last element) from out + $Out = $Out | Where-Object { $_ -ne $Out[-1] } + __%[1]s_debug "The completions are: $Out" + + if (($Directive -band $ShellCompDirectiveError) -ne 0 ) { + # Error code. No completion. + __%[1]s_debug "Received error from custom completion go code" + return + } + + $Longest = 0 + $Values = $Out | ForEach-Object { + #Split the output in name and description +`+" $Name, $Description = $_.Split(\"`t\",2)"+` + __%[1]s_debug "Name: $Name Description: $Description" + + # Look for the longest completion so that we can format things nicely + if ($Longest -lt $Name.Length) { + $Longest = $Name.Length + } + + # Set the description to a one space string if there is none set. + # This is needed because the CompletionResult does not accept an empty string as argument + if (-Not $Description) { + $Description = " " + } + @{Name="$Name";Description="$Description"} + } + + + $Space = " " + if (($Directive -band $ShellCompDirectiveNoSpace) -ne 0 ) { + # remove the space here + __%[1]s_debug "ShellCompDirectiveNoSpace is called" + $Space = "" + } + + if ((($Directive -band $ShellCompDirectiveFilterFileExt) -ne 0 ) -or + (($Directive -band $ShellCompDirectiveFilterDirs) -ne 0 )) { + __%[1]s_debug "ShellCompDirectiveFilterFileExt ShellCompDirectiveFilterDirs are not supported" + + # return here to prevent the completion of the extensions + return + } + + $Values = $Values | Where-Object { + # filter the result + $_.Name -like "$WordToComplete*" + + # Join the flag back if we have an equal sign flag + if ( $IsEqualFlag ) { + __%[1]s_debug "Join the equal sign flag back to the completion value" + $_.Name = $Flag + "=" + $_.Name + } + } + + if (($Directive -band $ShellCompDirectiveNoFileComp) -ne 0 ) { + __%[1]s_debug "ShellCompDirectiveNoFileComp is called" + + if ($Values.Length -eq 0) { + # Just print an empty string here so the + # shell does not start to complete paths. + # We cannot use CompletionResult here because + # it does not accept an empty string as argument. + "" + return + } + } + + # Get the current mode + $Mode = (Get-PSReadLineKeyHandler | Where-Object {$_.Key -eq "Tab" }).Function + __%[1]s_debug "Mode: $Mode" + + $Values | ForEach-Object { + + # store temporary because switch will overwrite $_ + $comp = $_ + + # PowerShell supports three different completion modes + # - TabCompleteNext (default windows style - on each key press the next option is displayed) + # - Complete (works like bash) + # - MenuComplete (works like zsh) + # You set the mode with Set-PSReadLineKeyHandler -Key Tab -Function + + # CompletionResult Arguments: + # 1) CompletionText text to be used as the auto completion result + # 2) ListItemText text to be displayed in the suggestion list + # 3) ResultType type of completion result + # 4) ToolTip text for the tooltip with details about the object + + switch ($Mode) { + + # bash like + "Complete" { + + if ($Values.Length -eq 1) { + __%[1]s_debug "Only one completion left" + + # insert space after value + [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars) + $Space, "$($comp.Name)", 'ParameterValue', "$($comp.Description)") + + } else { + # Add the proper number of spaces to align the descriptions + while($comp.Name.Length -lt $Longest) { + $comp.Name = $comp.Name + " " + } + + # Check for empty description and only add parentheses if needed + if ($($comp.Description) -eq " " ) { + $Description = "" + } else { + $Description = " ($($comp.Description))" + } + + [System.Management.Automation.CompletionResult]::new("$($comp.Name)$Description", "$($comp.Name)$Description", 'ParameterValue', "$($comp.Description)") + } + } + + # zsh like + "MenuComplete" { + # insert space after value + # MenuComplete will automatically show the ToolTip of + # the highlighted value at the bottom of the suggestions. + [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars) + $Space, "$($comp.Name)", 'ParameterValue', "$($comp.Description)") + } + + # TabCompleteNext and in case we get something unknown + Default { + # Like MenuComplete but we don't want to add a space here because + # the user need to press space anyway to get the completion. + # Description will not be shown because thats not possible with TabCompleteNext + [System.Management.Automation.CompletionResult]::new($($comp.Name | __%[1]s_escapeStringWithSpecialChars), "$($comp.Name)", 'ParameterValue', "$($comp.Description)") + } + } + + } +} +`, name, compCmd, + ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs)) +} + +func (c *Command) genPowerShellCompletion(w io.Writer, includeDesc bool) error { + buf := new(bytes.Buffer) + genPowerShellComp(buf, c.Name(), includeDesc) + _, err := buf.WriteTo(w) + return err +} + +func (c *Command) genPowerShellCompletionFile(filename string, includeDesc bool) error { + outFile, err := os.Create(filename) + if err != nil { + return err + } + defer outFile.Close() + + return c.genPowerShellCompletion(outFile, includeDesc) +} + +// GenPowerShellCompletionFile generates powershell completion file without descriptions. +func (c *Command) GenPowerShellCompletionFile(filename string) error { + return c.genPowerShellCompletionFile(filename, false) +} + +// GenPowerShellCompletion generates powershell completion file without descriptions +// and writes it to the passed writer. +func (c *Command) GenPowerShellCompletion(w io.Writer) error { + return c.genPowerShellCompletion(w, false) +} + +// GenPowerShellCompletionFileWithDesc generates powershell completion file with descriptions. +func (c *Command) GenPowerShellCompletionFileWithDesc(filename string) error { + return c.genPowerShellCompletionFile(filename, true) +} + +// GenPowerShellCompletionWithDesc generates powershell completion file with descriptions +// and writes it to the passed writer. +func (c *Command) GenPowerShellCompletionWithDesc(w io.Writer) error { + return c.genPowerShellCompletion(w, true) +} diff --git a/vendor/github.com/spf13/cobra/powershell_completions.md b/vendor/github.com/spf13/cobra/powershell_completions.md new file mode 100644 index 000000000..c449f1e5c --- /dev/null +++ b/vendor/github.com/spf13/cobra/powershell_completions.md @@ -0,0 +1,3 @@ +# Generating PowerShell Completions For Your Own cobra.Command + +Please refer to [Shell Completions](shell_completions.md#powershell-completions) for details. diff --git a/vendor/github.com/spf13/cobra/projects_using_cobra.md b/vendor/github.com/spf13/cobra/projects_using_cobra.md new file mode 100644 index 000000000..d98a71e36 --- /dev/null +++ b/vendor/github.com/spf13/cobra/projects_using_cobra.md @@ -0,0 +1,38 @@ +## Projects using Cobra + +- [Arduino CLI](https://github.com/arduino/arduino-cli) +- [Bleve](http://www.blevesearch.com/) +- [CockroachDB](http://www.cockroachlabs.com/) +- [Cosmos SDK](https://github.com/cosmos/cosmos-sdk) +- [Delve](https://github.com/derekparker/delve) +- [Docker (distribution)](https://github.com/docker/distribution) +- [Etcd](https://etcd.io/) +- [Gardener](https://github.com/gardener/gardenctl) +- [Giant Swarm's gsctl](https://github.com/giantswarm/gsctl) +- [Git Bump](https://github.com/erdaltsksn/git-bump) +- [Github CLI](https://github.com/cli/cli) +- [GitHub Labeler](https://github.com/erdaltsksn/gh-label) +- [Golangci-lint](https://golangci-lint.run) +- [GopherJS](http://www.gopherjs.org/) +- [Helm](https://helm.sh) +- [Hugo](https://gohugo.io) +- [Istio](https://istio.io) +- [Kool](https://github.com/kool-dev/kool) +- [Kubernetes](http://kubernetes.io/) +- [Linkerd](https://linkerd.io/) +- [Mattermost-server](https://github.com/mattermost/mattermost-server) +- [Metal Stack CLI](https://github.com/metal-stack/metalctl) +- [Moby (former Docker)](https://github.com/moby/moby) +- [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack) +- [OpenShift](https://www.openshift.com/) +- [Ory Hydra](https://github.com/ory/hydra) +- [Ory Kratos](https://github.com/ory/kratos) +- [Pouch](https://github.com/alibaba/pouch) +- [ProjectAtomic (enterprise)](http://www.projectatomic.io/) +- [Prototool](https://github.com/uber/prototool) +- [Random](https://github.com/erdaltsksn/random) +- [Rclone](https://rclone.org/) +- [Skaffold](https://skaffold.dev/) +- [Tendermint](https://github.com/tendermint/tendermint) +- [Twitch CLI](https://github.com/twitchdev/twitch-cli) +- [Werf](https://werf.io/) diff --git a/vendor/github.com/spf13/cobra/shell_completions.go b/vendor/github.com/spf13/cobra/shell_completions.go new file mode 100644 index 000000000..d99bf91e5 --- /dev/null +++ b/vendor/github.com/spf13/cobra/shell_completions.go @@ -0,0 +1,84 @@ +package cobra + +import ( + "github.com/spf13/pflag" +) + +// MarkFlagRequired instructs the various shell completion implementations to +// prioritize the named flag when performing completion, +// and causes your command to report an error if invoked without the flag. +func (c *Command) MarkFlagRequired(name string) error { + return MarkFlagRequired(c.Flags(), name) +} + +// MarkPersistentFlagRequired instructs the various shell completion implementations to +// prioritize the named persistent flag when performing completion, +// and causes your command to report an error if invoked without the flag. +func (c *Command) MarkPersistentFlagRequired(name string) error { + return MarkFlagRequired(c.PersistentFlags(), name) +} + +// MarkFlagRequired instructs the various shell completion implementations to +// prioritize the named flag when performing completion, +// and causes your command to report an error if invoked without the flag. +func MarkFlagRequired(flags *pflag.FlagSet, name string) error { + return flags.SetAnnotation(name, BashCompOneRequiredFlag, []string{"true"}) +} + +// MarkFlagFilename instructs the various shell completion implementations to +// limit completions for the named flag to the specified file extensions. +func (c *Command) MarkFlagFilename(name string, extensions ...string) error { + return MarkFlagFilename(c.Flags(), name, extensions...) +} + +// MarkFlagCustom adds the BashCompCustom annotation to the named flag, if it exists. +// The bash completion script will call the bash function f for the flag. +// +// This will only work for bash completion. +// It is recommended to instead use c.RegisterFlagCompletionFunc(...) which allows +// to register a Go function which will work across all shells. +func (c *Command) MarkFlagCustom(name string, f string) error { + return MarkFlagCustom(c.Flags(), name, f) +} + +// MarkPersistentFlagFilename instructs the various shell completion +// implementations to limit completions for the named persistent flag to the +// specified file extensions. +func (c *Command) MarkPersistentFlagFilename(name string, extensions ...string) error { + return MarkFlagFilename(c.PersistentFlags(), name, extensions...) +} + +// MarkFlagFilename instructs the various shell completion implementations to +// limit completions for the named flag to the specified file extensions. +func MarkFlagFilename(flags *pflag.FlagSet, name string, extensions ...string) error { + return flags.SetAnnotation(name, BashCompFilenameExt, extensions) +} + +// MarkFlagCustom adds the BashCompCustom annotation to the named flag, if it exists. +// The bash completion script will call the bash function f for the flag. +// +// This will only work for bash completion. +// It is recommended to instead use c.RegisterFlagCompletionFunc(...) which allows +// to register a Go function which will work across all shells. +func MarkFlagCustom(flags *pflag.FlagSet, name string, f string) error { + return flags.SetAnnotation(name, BashCompCustom, []string{f}) +} + +// MarkFlagDirname instructs the various shell completion implementations to +// limit completions for the named flag to directory names. +func (c *Command) MarkFlagDirname(name string) error { + return MarkFlagDirname(c.Flags(), name) +} + +// MarkPersistentFlagDirname instructs the various shell completion +// implementations to limit completions for the named persistent flag to +// directory names. +func (c *Command) MarkPersistentFlagDirname(name string) error { + return MarkFlagDirname(c.PersistentFlags(), name) +} + +// MarkFlagDirname instructs the various shell completion implementations to +// limit completions for the named flag to directory names. +func MarkFlagDirname(flags *pflag.FlagSet, name string) error { + return flags.SetAnnotation(name, BashCompSubdirsInDir, []string{}) +} diff --git a/vendor/github.com/spf13/cobra/shell_completions.md b/vendor/github.com/spf13/cobra/shell_completions.md new file mode 100644 index 000000000..4ba06a11c --- /dev/null +++ b/vendor/github.com/spf13/cobra/shell_completions.md @@ -0,0 +1,546 @@ +# Generating shell completions + +Cobra can generate shell completions for multiple shells. +The currently supported shells are: +- Bash +- Zsh +- fish +- PowerShell + +Cobra will automatically provide your program with a fully functional `completion` command, +similarly to how it provides the `help` command. + +## Creating your own completion command + +If you do not wish to use the default `completion` command, you can choose to +provide your own, which will take precedence over the default one. (This also provides +backwards-compatibility with programs that already have their own `completion` command.) + +If you are using the generator, you can create a completion command by running + +```bash +cobra add completion +``` +and then modifying the generated `cmd/completion.go` file to look something like this +(writing the shell script to stdout allows the most flexible use): + +```go +var completionCmd = &cobra.Command{ + Use: "completion [bash|zsh|fish|powershell]", + Short: "Generate completion script", + Long: `To load completions: + +Bash: + + $ source <(yourprogram completion bash) + + # To load completions for each session, execute once: + # Linux: + $ yourprogram completion bash > /etc/bash_completion.d/yourprogram + # macOS: + $ yourprogram completion bash > /usr/local/etc/bash_completion.d/yourprogram + +Zsh: + + # If shell completion is not already enabled in your environment, + # you will need to enable it. You can execute the following once: + + $ echo "autoload -U compinit; compinit" >> ~/.zshrc + + # To load completions for each session, execute once: + $ yourprogram completion zsh > "${fpath[1]}/_yourprogram" + + # You will need to start a new shell for this setup to take effect. + +fish: + + $ yourprogram completion fish | source + + # To load completions for each session, execute once: + $ yourprogram completion fish > ~/.config/fish/completions/yourprogram.fish + +PowerShell: + + PS> yourprogram completion powershell | Out-String | Invoke-Expression + + # To load completions for every new session, run: + PS> yourprogram completion powershell > yourprogram.ps1 + # and source this file from your PowerShell profile. +`, + DisableFlagsInUseLine: true, + ValidArgs: []string{"bash", "zsh", "fish", "powershell"}, + Args: cobra.ExactValidArgs(1), + Run: func(cmd *cobra.Command, args []string) { + switch args[0] { + case "bash": + cmd.Root().GenBashCompletion(os.Stdout) + case "zsh": + cmd.Root().GenZshCompletion(os.Stdout) + case "fish": + cmd.Root().GenFishCompletion(os.Stdout, true) + case "powershell": + cmd.Root().GenPowerShellCompletionWithDesc(os.Stdout) + } + }, +} +``` + +**Note:** The cobra generator may include messages printed to stdout, for example, if the config file is loaded; this will break the auto-completion script so must be removed. + +## Adapting the default completion command + +Cobra provides a few options for the default `completion` command. To configure such options you must set +the `CompletionOptions` field on the *root* command. + +To tell Cobra *not* to provide the default `completion` command: +``` +rootCmd.CompletionOptions.DisableDefaultCmd = true +``` + +To tell Cobra *not* to provide the user with the `--no-descriptions` flag to the completion sub-commands: +``` +rootCmd.CompletionOptions.DisableNoDescFlag = true +``` + +To tell Cobra to completely disable descriptions for completions: +``` +rootCmd.CompletionOptions.DisableDescriptions = true +``` + +# Customizing completions + +The generated completion scripts will automatically handle completing commands and flags. However, you can make your completions much more powerful by providing information to complete your program's nouns and flag values. + +## Completion of nouns + +### Static completion of nouns + +Cobra allows you to provide a pre-defined list of completion choices for your nouns using the `ValidArgs` field. +For example, if you want `kubectl get [tab][tab]` to show a list of valid "nouns" you have to set them. +Some simplified code from `kubectl get` looks like: + +```go +validArgs []string = { "pod", "node", "service", "replicationcontroller" } + +cmd := &cobra.Command{ + Use: "get [(-o|--output=)json|yaml|template|...] (RESOURCE [NAME] | RESOURCE/NAME ...)", + Short: "Display one or many resources", + Long: get_long, + Example: get_example, + Run: func(cmd *cobra.Command, args []string) { + cobra.CheckErr(RunGet(f, out, cmd, args)) + }, + ValidArgs: validArgs, +} +``` + +Notice we put the `ValidArgs` field on the `get` sub-command. Doing so will give results like: + +```bash +$ kubectl get [tab][tab] +node pod replicationcontroller service +``` + +#### Aliases for nouns + +If your nouns have aliases, you can define them alongside `ValidArgs` using `ArgAliases`: + +```go +argAliases []string = { "pods", "nodes", "services", "svc", "replicationcontrollers", "rc" } + +cmd := &cobra.Command{ + ... + ValidArgs: validArgs, + ArgAliases: argAliases +} +``` + +The aliases are not shown to the user on tab completion, but they are accepted as valid nouns by +the completion algorithm if entered manually, e.g. in: + +```bash +$ kubectl get rc [tab][tab] +backend frontend database +``` + +Note that without declaring `rc` as an alias, the completion algorithm would not know to show the list of +replication controllers following `rc`. + +### Dynamic completion of nouns + +In some cases it is not possible to provide a list of completions in advance. Instead, the list of completions must be determined at execution-time. In a similar fashion as for static completions, you can use the `ValidArgsFunction` field to provide a Go function that Cobra will execute when it needs the list of completion choices for the nouns of a command. Note that either `ValidArgs` or `ValidArgsFunction` can be used for a single cobra command, but not both. +Simplified code from `helm status` looks like: + +```go +cmd := &cobra.Command{ + Use: "status RELEASE_NAME", + Short: "Display the status of the named release", + Long: status_long, + RunE: func(cmd *cobra.Command, args []string) { + RunGet(args[0]) + }, + ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + if len(args) != 0 { + return nil, cobra.ShellCompDirectiveNoFileComp + } + return getReleasesFromCluster(toComplete), cobra.ShellCompDirectiveNoFileComp + }, +} +``` +Where `getReleasesFromCluster()` is a Go function that obtains the list of current Helm releases running on the Kubernetes cluster. +Notice we put the `ValidArgsFunction` on the `status` sub-command. Let's assume the Helm releases on the cluster are: `harbor`, `notary`, `rook` and `thanos` then this dynamic completion will give results like: + +```bash +$ helm status [tab][tab] +harbor notary rook thanos +``` +You may have noticed the use of `cobra.ShellCompDirective`. These directives are bit fields allowing to control some shell completion behaviors for your particular completion. You can combine them with the bit-or operator such as `cobra.ShellCompDirectiveNoSpace | cobra.ShellCompDirectiveNoFileComp` +```go +// Indicates that the shell will perform its default behavior after completions +// have been provided (this implies none of the other directives). +ShellCompDirectiveDefault + +// Indicates an error occurred and completions should be ignored. +ShellCompDirectiveError + +// Indicates that the shell should not add a space after the completion, +// even if there is a single completion provided. +ShellCompDirectiveNoSpace + +// Indicates that the shell should not provide file completion even when +// no completion is provided. +ShellCompDirectiveNoFileComp + +// Indicates that the returned completions should be used as file extension filters. +// For example, to complete only files of the form *.json or *.yaml: +// return []string{"yaml", "json"}, ShellCompDirectiveFilterFileExt +// For flags, using MarkFlagFilename() and MarkPersistentFlagFilename() +// is a shortcut to using this directive explicitly. +// +ShellCompDirectiveFilterFileExt + +// Indicates that only directory names should be provided in file completion. +// For example: +// return nil, ShellCompDirectiveFilterDirs +// For flags, using MarkFlagDirname() is a shortcut to using this directive explicitly. +// +// To request directory names within another directory, the returned completions +// should specify a single directory name within which to search. For example, +// to complete directories within "themes/": +// return []string{"themes"}, ShellCompDirectiveFilterDirs +// +ShellCompDirectiveFilterDirs +``` + +***Note***: When using the `ValidArgsFunction`, Cobra will call your registered function after having parsed all flags and arguments provided in the command-line. You therefore don't need to do this parsing yourself. For example, when a user calls `helm status --namespace my-rook-ns [tab][tab]`, Cobra will call your registered `ValidArgsFunction` after having parsed the `--namespace` flag, as it would have done when calling the `RunE` function. + +#### Debugging + +Cobra achieves dynamic completion through the use of a hidden command called by the completion script. To debug your Go completion code, you can call this hidden command directly: +```bash +$ helm __complete status har +harbor +:4 +Completion ended with directive: ShellCompDirectiveNoFileComp # This is on stderr +``` +***Important:*** If the noun to complete is empty (when the user has not yet typed any letters of that noun), you must pass an empty parameter to the `__complete` command: +```bash +$ helm __complete status "" +harbor +notary +rook +thanos +:4 +Completion ended with directive: ShellCompDirectiveNoFileComp # This is on stderr +``` +Calling the `__complete` command directly allows you to run the Go debugger to troubleshoot your code. You can also add printouts to your code; Cobra provides the following functions to use for printouts in Go completion code: +```go +// Prints to the completion script debug file (if BASH_COMP_DEBUG_FILE +// is set to a file path) and optionally prints to stderr. +cobra.CompDebug(msg string, printToStdErr bool) { +cobra.CompDebugln(msg string, printToStdErr bool) + +// Prints to the completion script debug file (if BASH_COMP_DEBUG_FILE +// is set to a file path) and to stderr. +cobra.CompError(msg string) +cobra.CompErrorln(msg string) +``` +***Important:*** You should **not** leave traces that print directly to stdout in your completion code as they will be interpreted as completion choices by the completion script. Instead, use the cobra-provided debugging traces functions mentioned above. + +## Completions for flags + +### Mark flags as required + +Most of the time completions will only show sub-commands. But if a flag is required to make a sub-command work, you probably want it to show up when the user types [tab][tab]. You can mark a flag as 'Required' like so: + +```go +cmd.MarkFlagRequired("pod") +cmd.MarkFlagRequired("container") +``` + +and you'll get something like + +```bash +$ kubectl exec [tab][tab] +-c --container= -p --pod= +``` + +### Specify dynamic flag completion + +As for nouns, Cobra provides a way of defining dynamic completion of flags. To provide a Go function that Cobra will execute when it needs the list of completion choices for a flag, you must register the function using the `command.RegisterFlagCompletionFunc()` function. + +```go +flagName := "output" +cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"json", "table", "yaml"}, cobra.ShellCompDirectiveDefault +}) +``` +Notice that calling `RegisterFlagCompletionFunc()` is done through the `command` with which the flag is associated. In our example this dynamic completion will give results like so: + +```bash +$ helm status --output [tab][tab] +json table yaml +``` + +#### Debugging + +You can also easily debug your Go completion code for flags: +```bash +$ helm __complete status --output "" +json +table +yaml +:4 +Completion ended with directive: ShellCompDirectiveNoFileComp # This is on stderr +``` +***Important:*** You should **not** leave traces that print to stdout in your completion code as they will be interpreted as completion choices by the completion script. Instead, use the cobra-provided debugging traces functions mentioned further above. + +### Specify valid filename extensions for flags that take a filename + +To limit completions of flag values to file names with certain extensions you can either use the different `MarkFlagFilename()` functions or a combination of `RegisterFlagCompletionFunc()` and `ShellCompDirectiveFilterFileExt`, like so: +```go +flagName := "output" +cmd.MarkFlagFilename(flagName, "yaml", "json") +``` +or +```go +flagName := "output" +cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"yaml", "json"}, ShellCompDirectiveFilterFileExt}) +``` + +### Limit flag completions to directory names + +To limit completions of flag values to directory names you can either use the `MarkFlagDirname()` functions or a combination of `RegisterFlagCompletionFunc()` and `ShellCompDirectiveFilterDirs`, like so: +```go +flagName := "output" +cmd.MarkFlagDirname(flagName) +``` +or +```go +flagName := "output" +cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return nil, cobra.ShellCompDirectiveFilterDirs +}) +``` +To limit completions of flag values to directory names *within another directory* you can use a combination of `RegisterFlagCompletionFunc()` and `ShellCompDirectiveFilterDirs` like so: +```go +flagName := "output" +cmd.RegisterFlagCompletionFunc(flagName, func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"themes"}, cobra.ShellCompDirectiveFilterDirs +}) +``` +### Descriptions for completions + +Cobra provides support for completion descriptions. Such descriptions are supported for each shell +(however, for bash, it is only available in the [completion V2 version](#bash-completion-v2)). +For commands and flags, Cobra will provide the descriptions automatically, based on usage information. +For example, using zsh: +``` +$ helm s[tab] +search -- search for a keyword in charts +show -- show information of a chart +status -- displays the status of the named release +``` +while using fish: +``` +$ helm s[tab] +search (search for a keyword in charts) show (show information of a chart) status (displays the status of the named release) +``` + +Cobra allows you to add descriptions to your own completions. Simply add the description text after each completion, following a `\t` separator. This technique applies to completions returned by `ValidArgs`, `ValidArgsFunction` and `RegisterFlagCompletionFunc()`. For example: +```go +ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { + return []string{"harbor\tAn image registry", "thanos\tLong-term metrics"}, cobra.ShellCompDirectiveNoFileComp +}} +``` +or +```go +ValidArgs: []string{"bash\tCompletions for bash", "zsh\tCompletions for zsh"} +``` +## Bash completions + +### Dependencies + +The bash completion script generated by Cobra requires the `bash_completion` package. You should update the help text of your completion command to show how to install the `bash_completion` package ([Kubectl docs](https://kubernetes.io/docs/tasks/tools/install-kubectl/#enabling-shell-autocompletion)) + +### Aliases + +You can also configure `bash` aliases for your program and they will also support completions. + +```bash +alias aliasname=origcommand +complete -o default -F __start_origcommand aliasname + +# and now when you run `aliasname` completion will make +# suggestions as it did for `origcommand`. + +$ aliasname +completion firstcommand secondcommand +``` +### Bash legacy dynamic completions + +For backward compatibility, Cobra still supports its bash legacy dynamic completion solution. +Please refer to [Bash Completions](bash_completions.md) for details. + +### Bash completion V2 + +Cobra provides two versions for bash completion. The original bash completion (which started it all!) can be used by calling +`GenBashCompletion()` or `GenBashCompletionFile()`. + +A new V2 bash completion version is also available. This version can be used by calling `GenBashCompletionV2()` or +`GenBashCompletionFileV2()`. The V2 version does **not** support the legacy dynamic completion +(see [Bash Completions](bash_completions.md)) but instead works only with the Go dynamic completion +solution described in this document. +Unless your program already uses the legacy dynamic completion solution, it is recommended that you use the bash +completion V2 solution which provides the following extra features: +- Supports completion descriptions (like the other shells) +- Small completion script of less than 300 lines (v1 generates scripts of thousands of lines; `kubectl` for example has a bash v1 completion script of over 13K lines) +- Streamlined user experience thanks to a completion behavior aligned with the other shells + +`Bash` completion V2 supports descriptions for completions. When calling `GenBashCompletionV2()` or `GenBashCompletionFileV2()` +you must provide these functions with a parameter indicating if the completions should be annotated with a description; Cobra +will provide the description automatically based on usage information. You can choose to make this option configurable by +your users. + +``` +# With descriptions +$ helm s[tab][tab] +search (search for a keyword in charts) status (display the status of the named release) +show (show information of a chart) + +# Without descriptions +$ helm s[tab][tab] +search show status +``` +**Note**: Cobra's default `completion` command uses bash completion V2. If for some reason you need to use bash completion V1, you will need to implement your own `completion` command. +## Zsh completions + +Cobra supports native zsh completion generated from the root `cobra.Command`. +The generated completion script should be put somewhere in your `$fpath` and be named +`_`. You will need to start a new shell for the completions to become available. + +Zsh supports descriptions for completions. Cobra will provide the description automatically, +based on usage information. Cobra provides a way to completely disable such descriptions by +using `GenZshCompletionNoDesc()` or `GenZshCompletionFileNoDesc()`. You can choose to make +this a configurable option to your users. +``` +# With descriptions +$ helm s[tab] +search -- search for a keyword in charts +show -- show information of a chart +status -- displays the status of the named release + +# Without descriptions +$ helm s[tab] +search show status +``` +*Note*: Because of backward-compatibility requirements, we were forced to have a different API to disable completion descriptions between `zsh` and `fish`. + +### Limitations + +* Custom completions implemented in Bash scripting (legacy) are not supported and will be ignored for `zsh` (including the use of the `BashCompCustom` flag annotation). + * You should instead use `ValidArgsFunction` and `RegisterFlagCompletionFunc()` which are portable to the different shells (`bash`, `zsh`, `fish`, `powershell`). +* The function `MarkFlagCustom()` is not supported and will be ignored for `zsh`. + * You should instead use `RegisterFlagCompletionFunc()`. + +### Zsh completions standardization + +Cobra 1.1 standardized its zsh completion support to align it with its other shell completions. Although the API was kept backward-compatible, some small changes in behavior were introduced. +Please refer to [Zsh Completions](zsh_completions.md) for details. + +## fish completions + +Cobra supports native fish completions generated from the root `cobra.Command`. You can use the `command.GenFishCompletion()` or `command.GenFishCompletionFile()` functions. You must provide these functions with a parameter indicating if the completions should be annotated with a description; Cobra will provide the description automatically based on usage information. You can choose to make this option configurable by your users. +``` +# With descriptions +$ helm s[tab] +search (search for a keyword in charts) show (show information of a chart) status (displays the status of the named release) + +# Without descriptions +$ helm s[tab] +search show status +``` +*Note*: Because of backward-compatibility requirements, we were forced to have a different API to disable completion descriptions between `zsh` and `fish`. + +### Limitations + +* Custom completions implemented in bash scripting (legacy) are not supported and will be ignored for `fish` (including the use of the `BashCompCustom` flag annotation). + * You should instead use `ValidArgsFunction` and `RegisterFlagCompletionFunc()` which are portable to the different shells (`bash`, `zsh`, `fish`, `powershell`). +* The function `MarkFlagCustom()` is not supported and will be ignored for `fish`. + * You should instead use `RegisterFlagCompletionFunc()`. +* The following flag completion annotations are not supported and will be ignored for `fish`: + * `BashCompFilenameExt` (filtering by file extension) + * `BashCompSubdirsInDir` (filtering by directory) +* The functions corresponding to the above annotations are consequently not supported and will be ignored for `fish`: + * `MarkFlagFilename()` and `MarkPersistentFlagFilename()` (filtering by file extension) + * `MarkFlagDirname()` and `MarkPersistentFlagDirname()` (filtering by directory) +* Similarly, the following completion directives are not supported and will be ignored for `fish`: + * `ShellCompDirectiveFilterFileExt` (filtering by file extension) + * `ShellCompDirectiveFilterDirs` (filtering by directory) + +## PowerShell completions + +Cobra supports native PowerShell completions generated from the root `cobra.Command`. You can use the `command.GenPowerShellCompletion()` or `command.GenPowerShellCompletionFile()` functions. To include descriptions use `command.GenPowerShellCompletionWithDesc()` and `command.GenPowerShellCompletionFileWithDesc()`. Cobra will provide the description automatically based on usage information. You can choose to make this option configurable by your users. + +The script is designed to support all three PowerShell completion modes: + +* TabCompleteNext (default windows style - on each key press the next option is displayed) +* Complete (works like bash) +* MenuComplete (works like zsh) + +You set the mode with `Set-PSReadLineKeyHandler -Key Tab -Function `. Descriptions are only displayed when using the `Complete` or `MenuComplete` mode. + +Users need PowerShell version 5.0 or above, which comes with Windows 10 and can be downloaded separately for Windows 7 or 8.1. They can then write the completions to a file and source this file from their PowerShell profile, which is referenced by the `$Profile` environment variable. See `Get-Help about_Profiles` for more info about PowerShell profiles. + +``` +# With descriptions and Mode 'Complete' +$ helm s[tab] +search (search for a keyword in charts) show (show information of a chart) status (displays the status of the named release) + +# With descriptions and Mode 'MenuComplete' The description of the current selected value will be displayed below the suggestions. +$ helm s[tab] +search show status + +search for a keyword in charts + +# Without descriptions +$ helm s[tab] +search show status +``` + +### Limitations + +* Custom completions implemented in bash scripting (legacy) are not supported and will be ignored for `powershell` (including the use of the `BashCompCustom` flag annotation). + * You should instead use `ValidArgsFunction` and `RegisterFlagCompletionFunc()` which are portable to the different shells (`bash`, `zsh`, `fish`, `powershell`). +* The function `MarkFlagCustom()` is not supported and will be ignored for `powershell`. + * You should instead use `RegisterFlagCompletionFunc()`. +* The following flag completion annotations are not supported and will be ignored for `powershell`: + * `BashCompFilenameExt` (filtering by file extension) + * `BashCompSubdirsInDir` (filtering by directory) +* The functions corresponding to the above annotations are consequently not supported and will be ignored for `powershell`: + * `MarkFlagFilename()` and `MarkPersistentFlagFilename()` (filtering by file extension) + * `MarkFlagDirname()` and `MarkPersistentFlagDirname()` (filtering by directory) +* Similarly, the following completion directives are not supported and will be ignored for `powershell`: + * `ShellCompDirectiveFilterFileExt` (filtering by file extension) + * `ShellCompDirectiveFilterDirs` (filtering by directory) diff --git a/vendor/github.com/spf13/cobra/user_guide.md b/vendor/github.com/spf13/cobra/user_guide.md new file mode 100644 index 000000000..311abce28 --- /dev/null +++ b/vendor/github.com/spf13/cobra/user_guide.md @@ -0,0 +1,637 @@ +# User Guide + +While you are welcome to provide your own organization, typically a Cobra-based +application will follow the following organizational structure: + +``` + ▾ appName/ + ▾ cmd/ + add.go + your.go + commands.go + here.go + main.go +``` + +In a Cobra app, typically the main.go file is very bare. It serves one purpose: initializing Cobra. + +```go +package main + +import ( + "{pathToYourApp}/cmd" +) + +func main() { + cmd.Execute() +} +``` + +## Using the Cobra Generator + +Cobra provides its own program that will create your application and add any +commands you want. It's the easiest way to incorporate Cobra into your application. + +[Here](https://github.com/spf13/cobra/blob/master/cobra/README.md) you can find more information about it. + +## Using the Cobra Library + +To manually implement Cobra you need to create a bare main.go file and a rootCmd file. +You will optionally provide additional commands as you see fit. + +### Create rootCmd + +Cobra doesn't require any special constructors. Simply create your commands. + +Ideally you place this in app/cmd/root.go: + +```go +var rootCmd = &cobra.Command{ + Use: "hugo", + Short: "Hugo is a very fast static site generator", + Long: `A Fast and Flexible Static Site Generator built with + love by spf13 and friends in Go. + Complete documentation is available at http://hugo.spf13.com`, + Run: func(cmd *cobra.Command, args []string) { + // Do Stuff Here + }, +} + +func Execute() { + if err := rootCmd.Execute(); err != nil { + fmt.Fprintln(os.Stderr, err) + os.Exit(1) + } +} +``` + +You will additionally define flags and handle configuration in your init() function. + +For example cmd/root.go: + +```go +package cmd + +import ( + "fmt" + "os" + + "github.com/spf13/cobra" + "github.com/spf13/viper" +) + +var ( + // Used for flags. + cfgFile string + userLicense string + + rootCmd = &cobra.Command{ + Use: "cobra", + Short: "A generator for Cobra based Applications", + Long: `Cobra is a CLI library for Go that empowers applications. +This application is a tool to generate the needed files +to quickly create a Cobra application.`, + } +) + +// Execute executes the root command. +func Execute() error { + return rootCmd.Execute() +} + +func init() { + cobra.OnInitialize(initConfig) + + rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.cobra.yaml)") + rootCmd.PersistentFlags().StringP("author", "a", "YOUR NAME", "author name for copyright attribution") + rootCmd.PersistentFlags().StringVarP(&userLicense, "license", "l", "", "name of license for the project") + rootCmd.PersistentFlags().Bool("viper", true, "use Viper for configuration") + viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author")) + viper.BindPFlag("useViper", rootCmd.PersistentFlags().Lookup("viper")) + viper.SetDefault("author", "NAME HERE ") + viper.SetDefault("license", "apache") + + rootCmd.AddCommand(addCmd) + rootCmd.AddCommand(initCmd) +} + +func initConfig() { + if cfgFile != "" { + // Use config file from the flag. + viper.SetConfigFile(cfgFile) + } else { + // Find home directory. + home, err := os.UserHomeDir() + cobra.CheckErr(err) + + // Search config in home directory with name ".cobra" (without extension). + viper.AddConfigPath(home) + viper.SetConfigType("yaml") + viper.SetConfigName(".cobra") + } + + viper.AutomaticEnv() + + if err := viper.ReadInConfig(); err == nil { + fmt.Println("Using config file:", viper.ConfigFileUsed()) + } +} +``` + +### Create your main.go + +With the root command you need to have your main function execute it. +Execute should be run on the root for clarity, though it can be called on any command. + +In a Cobra app, typically the main.go file is very bare. It serves one purpose: to initialize Cobra. + +```go +package main + +import ( + "{pathToYourApp}/cmd" +) + +func main() { + cmd.Execute() +} +``` + +### Create additional commands + +Additional commands can be defined and typically are each given their own file +inside of the cmd/ directory. + +If you wanted to create a version command you would create cmd/version.go and +populate it with the following: + +```go +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(versionCmd) +} + +var versionCmd = &cobra.Command{ + Use: "version", + Short: "Print the version number of Hugo", + Long: `All software has versions. This is Hugo's`, + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Hugo Static Site Generator v0.9 -- HEAD") + }, +} +``` + +### Returning and handling errors + +If you wish to return an error to the caller of a command, `RunE` can be used. + +```go +package cmd + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +func init() { + rootCmd.AddCommand(tryCmd) +} + +var tryCmd = &cobra.Command{ + Use: "try", + Short: "Try and possibly fail at something", + RunE: func(cmd *cobra.Command, args []string) error { + if err := someFunc(); err != nil { + return err + } + return nil + }, +} +``` + +The error can then be caught at the execute function call. + +## Working with Flags + +Flags provide modifiers to control how the action command operates. + +### Assign flags to a command + +Since the flags are defined and used in different locations, we need to +define a variable outside with the correct scope to assign the flag to +work with. + +```go +var Verbose bool +var Source string +``` + +There are two different approaches to assign a flag. + +### Persistent Flags + +A flag can be 'persistent', meaning that this flag will be available to the +command it's assigned to as well as every command under that command. For +global flags, assign a flag as a persistent flag on the root. + +```go +rootCmd.PersistentFlags().BoolVarP(&Verbose, "verbose", "v", false, "verbose output") +``` + +### Local Flags + +A flag can also be assigned locally, which will only apply to that specific command. + +```go +localCmd.Flags().StringVarP(&Source, "source", "s", "", "Source directory to read from") +``` + +### Local Flag on Parent Commands + +By default, Cobra only parses local flags on the target command, and any local flags on +parent commands are ignored. By enabling `Command.TraverseChildren`, Cobra will +parse local flags on each command before executing the target command. + +```go +command := cobra.Command{ + Use: "print [OPTIONS] [COMMANDS]", + TraverseChildren: true, +} +``` + +### Bind Flags with Config + +You can also bind your flags with [viper](https://github.com/spf13/viper): +```go +var author string + +func init() { + rootCmd.PersistentFlags().StringVar(&author, "author", "YOUR NAME", "Author name for copyright attribution") + viper.BindPFlag("author", rootCmd.PersistentFlags().Lookup("author")) +} +``` + +In this example, the persistent flag `author` is bound with `viper`. +**Note**: the variable `author` will not be set to the value from config, +when the `--author` flag is not provided by user. + +More in [viper documentation](https://github.com/spf13/viper#working-with-flags). + +### Required flags + +Flags are optional by default. If instead you wish your command to report an error +when a flag has not been set, mark it as required: +```go +rootCmd.Flags().StringVarP(&Region, "region", "r", "", "AWS region (required)") +rootCmd.MarkFlagRequired("region") +``` + +Or, for persistent flags: +```go +rootCmd.PersistentFlags().StringVarP(&Region, "region", "r", "", "AWS region (required)") +rootCmd.MarkPersistentFlagRequired("region") +``` + +## Positional and Custom Arguments + +Validation of positional arguments can be specified using the `Args` field +of `Command`. + +The following validators are built in: + +- `NoArgs` - the command will report an error if there are any positional args. +- `ArbitraryArgs` - the command will accept any args. +- `OnlyValidArgs` - the command will report an error if there are any positional args that are not in the `ValidArgs` field of `Command`. +- `MinimumNArgs(int)` - the command will report an error if there are not at least N positional args. +- `MaximumNArgs(int)` - the command will report an error if there are more than N positional args. +- `ExactArgs(int)` - the command will report an error if there are not exactly N positional args. +- `ExactValidArgs(int)` - the command will report an error if there are not exactly N positional args OR if there are any positional args that are not in the `ValidArgs` field of `Command` +- `RangeArgs(min, max)` - the command will report an error if the number of args is not between the minimum and maximum number of expected args. + +An example of setting the custom validator: + +```go +var cmd = &cobra.Command{ + Short: "hello", + Args: func(cmd *cobra.Command, args []string) error { + if len(args) < 1 { + return errors.New("requires a color argument") + } + if myapp.IsValidColor(args[0]) { + return nil + } + return fmt.Errorf("invalid color specified: %s", args[0]) + }, + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Hello, World!") + }, +} +``` + +## Example + +In the example below, we have defined three commands. Two are at the top level +and one (cmdTimes) is a child of one of the top commands. In this case the root +is not executable, meaning that a subcommand is required. This is accomplished +by not providing a 'Run' for the 'rootCmd'. + +We have only defined one flag for a single command. + +More documentation about flags is available at https://github.com/spf13/pflag + +```go +package main + +import ( + "fmt" + "strings" + + "github.com/spf13/cobra" +) + +func main() { + var echoTimes int + + var cmdPrint = &cobra.Command{ + Use: "print [string to print]", + Short: "Print anything to the screen", + Long: `print is for printing anything back to the screen. +For many years people have printed back to the screen.`, + Args: cobra.MinimumNArgs(1), + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Print: " + strings.Join(args, " ")) + }, + } + + var cmdEcho = &cobra.Command{ + Use: "echo [string to echo]", + Short: "Echo anything to the screen", + Long: `echo is for echoing anything back. +Echo works a lot like print, except it has a child command.`, + Args: cobra.MinimumNArgs(1), + Run: func(cmd *cobra.Command, args []string) { + fmt.Println("Echo: " + strings.Join(args, " ")) + }, + } + + var cmdTimes = &cobra.Command{ + Use: "times [string to echo]", + Short: "Echo anything to the screen more times", + Long: `echo things multiple times back to the user by providing +a count and a string.`, + Args: cobra.MinimumNArgs(1), + Run: func(cmd *cobra.Command, args []string) { + for i := 0; i < echoTimes; i++ { + fmt.Println("Echo: " + strings.Join(args, " ")) + } + }, + } + + cmdTimes.Flags().IntVarP(&echoTimes, "times", "t", 1, "times to echo the input") + + var rootCmd = &cobra.Command{Use: "app"} + rootCmd.AddCommand(cmdPrint, cmdEcho) + cmdEcho.AddCommand(cmdTimes) + rootCmd.Execute() +} +``` + +For a more complete example of a larger application, please checkout [Hugo](http://gohugo.io/). + +## Help Command + +Cobra automatically adds a help command to your application when you have subcommands. +This will be called when a user runs 'app help'. Additionally, help will also +support all other commands as input. Say, for instance, you have a command called +'create' without any additional configuration; Cobra will work when 'app help +create' is called. Every command will automatically have the '--help' flag added. + +### Example + +The following output is automatically generated by Cobra. Nothing beyond the +command and flag definitions are needed. + + $ cobra help + + Cobra is a CLI library for Go that empowers applications. + This application is a tool to generate the needed files + to quickly create a Cobra application. + + Usage: + cobra [command] + + Available Commands: + add Add a command to a Cobra Application + help Help about any command + init Initialize a Cobra Application + + Flags: + -a, --author string author name for copyright attribution (default "YOUR NAME") + --config string config file (default is $HOME/.cobra.yaml) + -h, --help help for cobra + -l, --license string name of license for the project + --viper use Viper for configuration (default true) + + Use "cobra [command] --help" for more information about a command. + + +Help is just a command like any other. There is no special logic or behavior +around it. In fact, you can provide your own if you want. + +### Defining your own help + +You can provide your own Help command or your own template for the default command to use +with following functions: + +```go +cmd.SetHelpCommand(cmd *Command) +cmd.SetHelpFunc(f func(*Command, []string)) +cmd.SetHelpTemplate(s string) +``` + +The latter two will also apply to any children commands. + +## Usage Message + +When the user provides an invalid flag or invalid command, Cobra responds by +showing the user the 'usage'. + +### Example +You may recognize this from the help above. That's because the default help +embeds the usage as part of its output. + + $ cobra --invalid + Error: unknown flag: --invalid + Usage: + cobra [command] + + Available Commands: + add Add a command to a Cobra Application + help Help about any command + init Initialize a Cobra Application + + Flags: + -a, --author string author name for copyright attribution (default "YOUR NAME") + --config string config file (default is $HOME/.cobra.yaml) + -h, --help help for cobra + -l, --license string name of license for the project + --viper use Viper for configuration (default true) + + Use "cobra [command] --help" for more information about a command. + +### Defining your own usage +You can provide your own usage function or template for Cobra to use. +Like help, the function and template are overridable through public methods: + +```go +cmd.SetUsageFunc(f func(*Command) error) +cmd.SetUsageTemplate(s string) +``` + +## Version Flag + +Cobra adds a top-level '--version' flag if the Version field is set on the root command. +Running an application with the '--version' flag will print the version to stdout using +the version template. The template can be customized using the +`cmd.SetVersionTemplate(s string)` function. + +## PreRun and PostRun Hooks + +It is possible to run functions before or after the main `Run` function of your command. The `PersistentPreRun` and `PreRun` functions will be executed before `Run`. `PersistentPostRun` and `PostRun` will be executed after `Run`. The `Persistent*Run` functions will be inherited by children if they do not declare their own. These functions are run in the following order: + +- `PersistentPreRun` +- `PreRun` +- `Run` +- `PostRun` +- `PersistentPostRun` + +An example of two commands which use all of these features is below. When the subcommand is executed, it will run the root command's `PersistentPreRun` but not the root command's `PersistentPostRun`: + +```go +package main + +import ( + "fmt" + + "github.com/spf13/cobra" +) + +func main() { + + var rootCmd = &cobra.Command{ + Use: "root [sub]", + Short: "My root command", + PersistentPreRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd PersistentPreRun with args: %v\n", args) + }, + PreRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd PreRun with args: %v\n", args) + }, + Run: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd Run with args: %v\n", args) + }, + PostRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd PostRun with args: %v\n", args) + }, + PersistentPostRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside rootCmd PersistentPostRun with args: %v\n", args) + }, + } + + var subCmd = &cobra.Command{ + Use: "sub [no options!]", + Short: "My subcommand", + PreRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside subCmd PreRun with args: %v\n", args) + }, + Run: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside subCmd Run with args: %v\n", args) + }, + PostRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside subCmd PostRun with args: %v\n", args) + }, + PersistentPostRun: func(cmd *cobra.Command, args []string) { + fmt.Printf("Inside subCmd PersistentPostRun with args: %v\n", args) + }, + } + + rootCmd.AddCommand(subCmd) + + rootCmd.SetArgs([]string{""}) + rootCmd.Execute() + fmt.Println() + rootCmd.SetArgs([]string{"sub", "arg1", "arg2"}) + rootCmd.Execute() +} +``` + +Output: +``` +Inside rootCmd PersistentPreRun with args: [] +Inside rootCmd PreRun with args: [] +Inside rootCmd Run with args: [] +Inside rootCmd PostRun with args: [] +Inside rootCmd PersistentPostRun with args: [] + +Inside rootCmd PersistentPreRun with args: [arg1 arg2] +Inside subCmd PreRun with args: [arg1 arg2] +Inside subCmd Run with args: [arg1 arg2] +Inside subCmd PostRun with args: [arg1 arg2] +Inside subCmd PersistentPostRun with args: [arg1 arg2] +``` + +## Suggestions when "unknown command" happens + +Cobra will print automatic suggestions when "unknown command" errors happen. This allows Cobra to behave similarly to the `git` command when a typo happens. For example: + +``` +$ hugo srever +Error: unknown command "srever" for "hugo" + +Did you mean this? + server + +Run 'hugo --help' for usage. +``` + +Suggestions are automatic based on every subcommand registered and use an implementation of [Levenshtein distance](http://en.wikipedia.org/wiki/Levenshtein_distance). Every registered command that matches a minimum distance of 2 (ignoring case) will be displayed as a suggestion. + +If you need to disable suggestions or tweak the string distance in your command, use: + +```go +command.DisableSuggestions = true +``` + +or + +```go +command.SuggestionsMinimumDistance = 1 +``` + +You can also explicitly set names for which a given command will be suggested using the `SuggestFor` attribute. This allows suggestions for strings that are not close in terms of string distance, but makes sense in your set of commands and for some which you don't want aliases. Example: + +``` +$ kubectl remove +Error: unknown command "remove" for "kubectl" + +Did you mean this? + delete + +Run 'kubectl help' for usage. +``` + +## Generating documentation for your command + +Cobra can generate documentation based on subcommands, flags, etc. Read more about it in the [docs generation documentation](doc/README.md). + +## Generating shell completions + +Cobra can generate a shell-completion file for the following shells: bash, zsh, fish, PowerShell. If you add more information to your commands, these completions can be amazingly powerful and flexible. Read more about it in [Shell Completions](shell_completions.md). diff --git a/vendor/github.com/spf13/cobra/zsh_completions.go b/vendor/github.com/spf13/cobra/zsh_completions.go new file mode 100644 index 000000000..1afec30ea --- /dev/null +++ b/vendor/github.com/spf13/cobra/zsh_completions.go @@ -0,0 +1,258 @@ +package cobra + +import ( + "bytes" + "fmt" + "io" + "os" +) + +// GenZshCompletionFile generates zsh completion file including descriptions. +func (c *Command) GenZshCompletionFile(filename string) error { + return c.genZshCompletionFile(filename, true) +} + +// GenZshCompletion generates zsh completion file including descriptions +// and writes it to the passed writer. +func (c *Command) GenZshCompletion(w io.Writer) error { + return c.genZshCompletion(w, true) +} + +// GenZshCompletionFileNoDesc generates zsh completion file without descriptions. +func (c *Command) GenZshCompletionFileNoDesc(filename string) error { + return c.genZshCompletionFile(filename, false) +} + +// GenZshCompletionNoDesc generates zsh completion file without descriptions +// and writes it to the passed writer. +func (c *Command) GenZshCompletionNoDesc(w io.Writer) error { + return c.genZshCompletion(w, false) +} + +// MarkZshCompPositionalArgumentFile only worked for zsh and its behavior was +// not consistent with Bash completion. It has therefore been disabled. +// Instead, when no other completion is specified, file completion is done by +// default for every argument. One can disable file completion on a per-argument +// basis by using ValidArgsFunction and ShellCompDirectiveNoFileComp. +// To achieve file extension filtering, one can use ValidArgsFunction and +// ShellCompDirectiveFilterFileExt. +// +// Deprecated +func (c *Command) MarkZshCompPositionalArgumentFile(argPosition int, patterns ...string) error { + return nil +} + +// MarkZshCompPositionalArgumentWords only worked for zsh. It has therefore +// been disabled. +// To achieve the same behavior across all shells, one can use +// ValidArgs (for the first argument only) or ValidArgsFunction for +// any argument (can include the first one also). +// +// Deprecated +func (c *Command) MarkZshCompPositionalArgumentWords(argPosition int, words ...string) error { + return nil +} + +func (c *Command) genZshCompletionFile(filename string, includeDesc bool) error { + outFile, err := os.Create(filename) + if err != nil { + return err + } + defer outFile.Close() + + return c.genZshCompletion(outFile, includeDesc) +} + +func (c *Command) genZshCompletion(w io.Writer, includeDesc bool) error { + buf := new(bytes.Buffer) + genZshComp(buf, c.Name(), includeDesc) + _, err := buf.WriteTo(w) + return err +} + +func genZshComp(buf io.StringWriter, name string, includeDesc bool) { + compCmd := ShellCompRequestCmd + if !includeDesc { + compCmd = ShellCompNoDescRequestCmd + } + WriteStringAndCheck(buf, fmt.Sprintf(`#compdef _%[1]s %[1]s + +# zsh completion for %-36[1]s -*- shell-script -*- + +__%[1]s_debug() +{ + local file="$BASH_COMP_DEBUG_FILE" + if [[ -n ${file} ]]; then + echo "$*" >> "${file}" + fi +} + +_%[1]s() +{ + local shellCompDirectiveError=%[3]d + local shellCompDirectiveNoSpace=%[4]d + local shellCompDirectiveNoFileComp=%[5]d + local shellCompDirectiveFilterFileExt=%[6]d + local shellCompDirectiveFilterDirs=%[7]d + + local lastParam lastChar flagPrefix requestComp out directive comp lastComp noSpace + local -a completions + + __%[1]s_debug "\n========= starting completion logic ==========" + __%[1]s_debug "CURRENT: ${CURRENT}, words[*]: ${words[*]}" + + # The user could have moved the cursor backwards on the command-line. + # We need to trigger completion from the $CURRENT location, so we need + # to truncate the command-line ($words) up to the $CURRENT location. + # (We cannot use $CURSOR as its value does not work when a command is an alias.) + words=("${=words[1,CURRENT]}") + __%[1]s_debug "Truncated words[*]: ${words[*]}," + + lastParam=${words[-1]} + lastChar=${lastParam[-1]} + __%[1]s_debug "lastParam: ${lastParam}, lastChar: ${lastChar}" + + # For zsh, when completing a flag with an = (e.g., %[1]s -n=) + # completions must be prefixed with the flag + setopt local_options BASH_REMATCH + if [[ "${lastParam}" =~ '-.*=' ]]; then + # We are dealing with a flag with an = + flagPrefix="-P ${BASH_REMATCH}" + fi + + # Prepare the command to obtain completions + requestComp="${words[1]} %[2]s ${words[2,-1]}" + if [ "${lastChar}" = "" ]; then + # If the last parameter is complete (there is a space following it) + # We add an extra empty parameter so we can indicate this to the go completion code. + __%[1]s_debug "Adding extra empty parameter" + requestComp="${requestComp} \"\"" + fi + + __%[1]s_debug "About to call: eval ${requestComp}" + + # Use eval to handle any environment variables and such + out=$(eval ${requestComp} 2>/dev/null) + __%[1]s_debug "completion output: ${out}" + + # Extract the directive integer following a : from the last line + local lastLine + while IFS='\n' read -r line; do + lastLine=${line} + done < <(printf "%%s\n" "${out[@]}") + __%[1]s_debug "last line: ${lastLine}" + + if [ "${lastLine[1]}" = : ]; then + directive=${lastLine[2,-1]} + # Remove the directive including the : and the newline + local suffix + (( suffix=${#lastLine}+2)) + out=${out[1,-$suffix]} + else + # There is no directive specified. Leave $out as is. + __%[1]s_debug "No directive found. Setting do default" + directive=0 + fi + + __%[1]s_debug "directive: ${directive}" + __%[1]s_debug "completions: ${out}" + __%[1]s_debug "flagPrefix: ${flagPrefix}" + + if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then + __%[1]s_debug "Completion received error. Ignoring completions." + return + fi + + while IFS='\n' read -r comp; do + if [ -n "$comp" ]; then + # If requested, completions are returned with a description. + # The description is preceded by a TAB character. + # For zsh's _describe, we need to use a : instead of a TAB. + # We first need to escape any : as part of the completion itself. + comp=${comp//:/\\:} + + local tab=$(printf '\t') + comp=${comp//$tab/:} + + __%[1]s_debug "Adding completion: ${comp}" + completions+=${comp} + lastComp=$comp + fi + done < <(printf "%%s\n" "${out[@]}") + + if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then + __%[1]s_debug "Activating nospace." + noSpace="-S ''" + fi + + if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then + # File extension filtering + local filteringCmd + filteringCmd='_files' + for filter in ${completions[@]}; do + if [ ${filter[1]} != '*' ]; then + # zsh requires a glob pattern to do file filtering + filter="\*.$filter" + fi + filteringCmd+=" -g $filter" + done + filteringCmd+=" ${flagPrefix}" + + __%[1]s_debug "File filtering command: $filteringCmd" + _arguments '*:filename:'"$filteringCmd" + elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then + # File completion for directories only + local subDir + subdir="${completions[1]}" + if [ -n "$subdir" ]; then + __%[1]s_debug "Listing directories in $subdir" + pushd "${subdir}" >/dev/null 2>&1 + else + __%[1]s_debug "Listing directories in ." + fi + + local result + _arguments '*:dirname:_files -/'" ${flagPrefix}" + result=$? + if [ -n "$subdir" ]; then + popd >/dev/null 2>&1 + fi + return $result + else + __%[1]s_debug "Calling _describe" + if eval _describe "completions" completions $flagPrefix $noSpace; then + __%[1]s_debug "_describe found some completions" + + # Return the success of having called _describe + return 0 + else + __%[1]s_debug "_describe did not find completions." + __%[1]s_debug "Checking if we should do file completion." + if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then + __%[1]s_debug "deactivating file completion" + + # We must return an error code here to let zsh know that there were no + # completions found by _describe; this is what will trigger other + # matching algorithms to attempt to find completions. + # For example zsh can match letters in the middle of words. + return 1 + else + # Perform file completion + __%[1]s_debug "Activating file completion" + + # We must return the result of this command, so it must be the + # last command, or else we must store its result to return it. + _arguments '*:filename:_files'" ${flagPrefix}" + fi + fi + fi +} + +# don't run the completion function when being source-ed or eval-ed +if [ "$funcstack[1]" = "_%[1]s" ]; then + _%[1]s +fi +`, name, compCmd, + ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp, + ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs)) +} diff --git a/vendor/github.com/spf13/cobra/zsh_completions.md b/vendor/github.com/spf13/cobra/zsh_completions.md new file mode 100644 index 000000000..7cff61787 --- /dev/null +++ b/vendor/github.com/spf13/cobra/zsh_completions.md @@ -0,0 +1,48 @@ +## Generating Zsh Completion For Your cobra.Command + +Please refer to [Shell Completions](shell_completions.md) for details. + +## Zsh completions standardization + +Cobra 1.1 standardized its zsh completion support to align it with its other shell completions. Although the API was kept backwards-compatible, some small changes in behavior were introduced. + +### Deprecation summary + +See further below for more details on these deprecations. + +* `cmd.MarkZshCompPositionalArgumentFile(pos, []string{})` is no longer needed. It is therefore **deprecated** and silently ignored. +* `cmd.MarkZshCompPositionalArgumentFile(pos, glob[])` is **deprecated** and silently ignored. + * Instead use `ValidArgsFunction` with `ShellCompDirectiveFilterFileExt`. +* `cmd.MarkZshCompPositionalArgumentWords()` is **deprecated** and silently ignored. + * Instead use `ValidArgsFunction`. + +### Behavioral changes + +**Noun completion** +|Old behavior|New behavior| +|---|---| +|No file completion by default (opposite of bash)|File completion by default; use `ValidArgsFunction` with `ShellCompDirectiveNoFileComp` to turn off file completion on a per-argument basis| +|Completion of flag names without the `-` prefix having been typed|Flag names are only completed if the user has typed the first `-`| +`cmd.MarkZshCompPositionalArgumentFile(pos, []string{})` used to turn on file completion on a per-argument position basis|File completion for all arguments by default; `cmd.MarkZshCompPositionalArgumentFile()` is **deprecated** and silently ignored| +|`cmd.MarkZshCompPositionalArgumentFile(pos, glob[])` used to turn on file completion **with glob filtering** on a per-argument position basis (zsh-specific)|`cmd.MarkZshCompPositionalArgumentFile()` is **deprecated** and silently ignored; use `ValidArgsFunction` with `ShellCompDirectiveFilterFileExt` for file **extension** filtering (not full glob filtering)| +|`cmd.MarkZshCompPositionalArgumentWords(pos, words[])` used to provide completion choices on a per-argument position basis (zsh-specific)|`cmd.MarkZshCompPositionalArgumentWords()` is **deprecated** and silently ignored; use `ValidArgsFunction` to achieve the same behavior| + +**Flag-value completion** + +|Old behavior|New behavior| +|---|---| +|No file completion by default (opposite of bash)|File completion by default; use `RegisterFlagCompletionFunc()` with `ShellCompDirectiveNoFileComp` to turn off file completion| +|`cmd.MarkFlagFilename(flag, []string{})` and similar used to turn on file completion|File completion by default; `cmd.MarkFlagFilename(flag, []string{})` no longer needed in this context and silently ignored| +|`cmd.MarkFlagFilename(flag, glob[])` used to turn on file completion **with glob filtering** (syntax of `[]string{"*.yaml", "*.yml"}` incompatible with bash)|Will continue to work, however, support for bash syntax is added and should be used instead so as to work for all shells (`[]string{"yaml", "yml"}`)| +|`cmd.MarkFlagDirname(flag)` only completes directories (zsh-specific)|Has been added for all shells| +|Completion of a flag name does not repeat, unless flag is of type `*Array` or `*Slice` (not supported by bash)|Retained for `zsh` and added to `fish`| +|Completion of a flag name does not provide the `=` form (unlike bash)|Retained for `zsh` and added to `fish`| + +**Improvements** + +* Custom completion support (`ValidArgsFunction` and `RegisterFlagCompletionFunc()`) +* File completion by default if no other completions found +* Handling of required flags +* File extension filtering no longer mutually exclusive with bash usage +* Completion of directory names *within* another directory +* Support for `=` form of flags diff --git a/vendor/github.com/spf13/jwalterweatherman/.gitignore b/vendor/github.com/spf13/jwalterweatherman/.gitignore new file mode 100644 index 000000000..a71f88af8 --- /dev/null +++ b/vendor/github.com/spf13/jwalterweatherman/.gitignore @@ -0,0 +1,24 @@ +# Compiled Object files, Static and Dynamic libs (Shared Objects) +*.o +*.a +*.so + +# Folders +_obj +_test + +# Architecture specific extensions/prefixes +*.[568vq] +[568vq].out + +*.cgo1.go +*.cgo2.c +_cgo_defun.c +_cgo_gotypes.go +_cgo_export.* + +_testmain.go + +*.exe +*.bench +go.sum \ No newline at end of file diff --git a/vendor/github.com/spf13/jwalterweatherman/LICENSE b/vendor/github.com/spf13/jwalterweatherman/LICENSE new file mode 100644 index 000000000..4527efb9c --- /dev/null +++ b/vendor/github.com/spf13/jwalterweatherman/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Steve Francia + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/spf13/jwalterweatherman/README.md b/vendor/github.com/spf13/jwalterweatherman/README.md new file mode 100644 index 000000000..932a23fc6 --- /dev/null +++ b/vendor/github.com/spf13/jwalterweatherman/README.md @@ -0,0 +1,148 @@ +jWalterWeatherman +================= + +Seamless printing to the terminal (stdout) and logging to a io.Writer +(file) that’s as easy to use as fmt.Println. + +![and_that__s_why_you_always_leave_a_note_by_jonnyetc-d57q7um](https://cloud.githubusercontent.com/assets/173412/11002937/ccd01654-847d-11e5-828e-12ebaf582eaf.jpg) +Graphic by [JonnyEtc](http://jonnyetc.deviantart.com/art/And-That-s-Why-You-Always-Leave-a-Note-315311422) + +JWW is primarily a wrapper around the excellent standard log library. It +provides a few advantages over using the standard log library alone. + +1. Ready to go out of the box. +2. One library for both printing to the terminal and logging (to files). +3. Really easy to log to either a temp file or a file you specify. + + +I really wanted a very straightforward library that could seamlessly do +the following things. + +1. Replace all the println, printf, etc statements thoughout my code with + something more useful +2. Allow the user to easily control what levels are printed to stdout +3. Allow the user to easily control what levels are logged +4. Provide an easy mechanism (like fmt.Println) to print info to the user + which can be easily logged as well +5. Due to 2 & 3 provide easy verbose mode for output and logs +6. Not have any unnecessary initialization cruft. Just use it. + +# Usage + +## Step 1. Use it +Put calls throughout your source based on type of feedback. +No initialization or setup needs to happen. Just start calling things. + +Available Loggers are: + + * TRACE + * DEBUG + * INFO + * WARN + * ERROR + * CRITICAL + * FATAL + +These each are loggers based on the log standard library and follow the +standard usage. Eg. + +```go + import ( + jww "github.com/spf13/jwalterweatherman" + ) + + ... + + if err != nil { + + // This is a pretty serious error and the user should know about + // it. It will be printed to the terminal as well as logged under the + // default thresholds. + + jww.ERROR.Println(err) + } + + if err2 != nil { + // This error isn’t going to materially change the behavior of the + // application, but it’s something that may not be what the user + // expects. Under the default thresholds, Warn will be logged, but + // not printed to the terminal. + + jww.WARN.Println(err2) + } + + // Information that’s relevant to what’s happening, but not very + // important for the user. Under the default thresholds this will be + // discarded. + + jww.INFO.Printf("information %q", response) + +``` + +NOTE: You can also use the library in a non-global setting by creating an instance of a Notebook: + +```go +notepad = jww.NewNotepad(jww.LevelInfo, jww.LevelTrace, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime) +notepad.WARN.Println("Some warning"") +``` + +_Why 7 levels?_ + +Maybe you think that 7 levels are too much for any application... and you +are probably correct. Just because there are seven levels doesn’t mean +that you should be using all 7 levels. Pick the right set for your needs. +Remember they only have to mean something to your project. + +## Step 2. Optionally configure JWW + +Under the default thresholds : + + * Debug, Trace & Info goto /dev/null + * Warn and above is logged (when a log file/io.Writer is provided) + * Error and above is printed to the terminal (stdout) + +### Changing the thresholds + +The threshold can be changed at any time, but will only affect calls that +execute after the change was made. + +This is very useful if your application has a verbose mode. Of course you +can decide what verbose means to you or even have multiple levels of +verbosity. + + +```go + import ( + jww "github.com/spf13/jwalterweatherman" + ) + + if Verbose { + jww.SetLogThreshold(jww.LevelTrace) + jww.SetStdoutThreshold(jww.LevelInfo) + } +``` + +Note that JWW's own internal output uses log levels as well, so set the log +level before making any other calls if you want to see what it's up to. + + +### Setting a log file + +JWW can log to any `io.Writer`: + + +```go + + jww.SetLogOutput(customWriter) + +``` + + +# More information + +This is an early release. I’ve been using it for a while and this is the +third interface I’ve tried. I like this one pretty well, but no guarantees +that it won’t change a bit. + +I wrote this for use in [hugo](https://gohugo.io). If you are looking +for a static website engine that’s super fast please checkout Hugo. diff --git a/vendor/github.com/spf13/jwalterweatherman/default_notepad.go b/vendor/github.com/spf13/jwalterweatherman/default_notepad.go new file mode 100644 index 000000000..a018c15c4 --- /dev/null +++ b/vendor/github.com/spf13/jwalterweatherman/default_notepad.go @@ -0,0 +1,111 @@ +// Copyright © 2016 Steve Francia . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +package jwalterweatherman + +import ( + "io" + "io/ioutil" + "log" + "os" +) + +var ( + TRACE *log.Logger + DEBUG *log.Logger + INFO *log.Logger + WARN *log.Logger + ERROR *log.Logger + CRITICAL *log.Logger + FATAL *log.Logger + + LOG *log.Logger + FEEDBACK *Feedback + + defaultNotepad *Notepad +) + +func reloadDefaultNotepad() { + TRACE = defaultNotepad.TRACE + DEBUG = defaultNotepad.DEBUG + INFO = defaultNotepad.INFO + WARN = defaultNotepad.WARN + ERROR = defaultNotepad.ERROR + CRITICAL = defaultNotepad.CRITICAL + FATAL = defaultNotepad.FATAL + + LOG = defaultNotepad.LOG + FEEDBACK = defaultNotepad.FEEDBACK +} + +func init() { + defaultNotepad = NewNotepad(LevelError, LevelWarn, os.Stdout, ioutil.Discard, "", log.Ldate|log.Ltime) + reloadDefaultNotepad() +} + +// SetLogThreshold set the log threshold for the default notepad. Trace by default. +func SetLogThreshold(threshold Threshold) { + defaultNotepad.SetLogThreshold(threshold) + reloadDefaultNotepad() +} + +// SetLogOutput set the log output for the default notepad. Discarded by default. +func SetLogOutput(handle io.Writer) { + defaultNotepad.SetLogOutput(handle) + reloadDefaultNotepad() +} + +// SetStdoutThreshold set the standard output threshold for the default notepad. +// Info by default. +func SetStdoutThreshold(threshold Threshold) { + defaultNotepad.SetStdoutThreshold(threshold) + reloadDefaultNotepad() +} + +// SetStdoutOutput set the stdout output for the default notepad. Default is stdout. +func SetStdoutOutput(handle io.Writer) { + defaultNotepad.outHandle = handle + defaultNotepad.init() + reloadDefaultNotepad() +} + +// SetPrefix set the prefix for the default logger. Empty by default. +func SetPrefix(prefix string) { + defaultNotepad.SetPrefix(prefix) + reloadDefaultNotepad() +} + +// SetFlags set the flags for the default logger. "log.Ldate | log.Ltime" by default. +func SetFlags(flags int) { + defaultNotepad.SetFlags(flags) + reloadDefaultNotepad() +} + +// SetLogListeners configures the default logger with one or more log listeners. +func SetLogListeners(l ...LogListener) { + defaultNotepad.logListeners = l + defaultNotepad.init() + reloadDefaultNotepad() +} + +// Level returns the current global log threshold. +func LogThreshold() Threshold { + return defaultNotepad.logThreshold +} + +// Level returns the current global output threshold. +func StdoutThreshold() Threshold { + return defaultNotepad.stdoutThreshold +} + +// GetStdoutThreshold returns the defined Treshold for the log logger. +func GetLogThreshold() Threshold { + return defaultNotepad.GetLogThreshold() +} + +// GetStdoutThreshold returns the Treshold for the stdout logger. +func GetStdoutThreshold() Threshold { + return defaultNotepad.GetStdoutThreshold() +} diff --git a/vendor/github.com/spf13/jwalterweatherman/go.mod b/vendor/github.com/spf13/jwalterweatherman/go.mod new file mode 100644 index 000000000..1dbcfd3e8 --- /dev/null +++ b/vendor/github.com/spf13/jwalterweatherman/go.mod @@ -0,0 +1,7 @@ +module github.com/spf13/jwalterweatherman + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/pmezard/go-difflib v1.0.0 // indirect + github.com/stretchr/testify v1.2.2 +) diff --git a/vendor/github.com/spf13/jwalterweatherman/log_counter.go b/vendor/github.com/spf13/jwalterweatherman/log_counter.go new file mode 100644 index 000000000..41285f3dc --- /dev/null +++ b/vendor/github.com/spf13/jwalterweatherman/log_counter.go @@ -0,0 +1,46 @@ +// Copyright © 2016 Steve Francia . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +package jwalterweatherman + +import ( + "io" + "sync/atomic" +) + +// Counter is an io.Writer that increments a counter on Write. +type Counter struct { + count uint64 +} + +func (c *Counter) incr() { + atomic.AddUint64(&c.count, 1) +} + +// Reset resets the counter. +func (c *Counter) Reset() { + atomic.StoreUint64(&c.count, 0) +} + +// Count returns the current count. +func (c *Counter) Count() uint64 { + return atomic.LoadUint64(&c.count) +} + +func (c *Counter) Write(p []byte) (n int, err error) { + c.incr() + return len(p), nil +} + +// LogCounter creates a LogListener that counts log statements >= the given threshold. +func LogCounter(counter *Counter, t1 Threshold) LogListener { + return func(t2 Threshold) io.Writer { + if t2 < t1 { + // Not interested in this threshold. + return nil + } + return counter + } +} diff --git a/vendor/github.com/spf13/jwalterweatherman/notepad.go b/vendor/github.com/spf13/jwalterweatherman/notepad.go new file mode 100644 index 000000000..cc7957bf7 --- /dev/null +++ b/vendor/github.com/spf13/jwalterweatherman/notepad.go @@ -0,0 +1,225 @@ +// Copyright © 2016 Steve Francia . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +package jwalterweatherman + +import ( + "fmt" + "io" + "io/ioutil" + "log" +) + +type Threshold int + +func (t Threshold) String() string { + return prefixes[t] +} + +const ( + LevelTrace Threshold = iota + LevelDebug + LevelInfo + LevelWarn + LevelError + LevelCritical + LevelFatal +) + +var prefixes map[Threshold]string = map[Threshold]string{ + LevelTrace: "TRACE", + LevelDebug: "DEBUG", + LevelInfo: "INFO", + LevelWarn: "WARN", + LevelError: "ERROR", + LevelCritical: "CRITICAL", + LevelFatal: "FATAL", +} + +// Notepad is where you leave a note! +type Notepad struct { + TRACE *log.Logger + DEBUG *log.Logger + INFO *log.Logger + WARN *log.Logger + ERROR *log.Logger + CRITICAL *log.Logger + FATAL *log.Logger + + LOG *log.Logger + FEEDBACK *Feedback + + loggers [7]**log.Logger + logHandle io.Writer + outHandle io.Writer + logThreshold Threshold + stdoutThreshold Threshold + prefix string + flags int + + logListeners []LogListener +} + +// A LogListener can ble supplied to a Notepad to listen on log writes for a given +// threshold. This can be used to capture log events in unit tests and similar. +// Note that this function will be invoked once for each log threshold. If +// the given threshold is not of interest to you, return nil. +// Note that these listeners will receive log events for a given threshold, even +// if the current configuration says not to log it. That way you can count ERRORs even +// if you don't print them to the console. +type LogListener func(t Threshold) io.Writer + +// NewNotepad creates a new Notepad. +func NewNotepad( + outThreshold Threshold, + logThreshold Threshold, + outHandle, logHandle io.Writer, + prefix string, flags int, + logListeners ...LogListener, +) *Notepad { + + n := &Notepad{logListeners: logListeners} + + n.loggers = [7]**log.Logger{&n.TRACE, &n.DEBUG, &n.INFO, &n.WARN, &n.ERROR, &n.CRITICAL, &n.FATAL} + n.outHandle = outHandle + n.logHandle = logHandle + n.stdoutThreshold = outThreshold + n.logThreshold = logThreshold + + if len(prefix) != 0 { + n.prefix = "[" + prefix + "] " + } else { + n.prefix = "" + } + + n.flags = flags + + n.LOG = log.New(n.logHandle, + "LOG: ", + n.flags) + n.FEEDBACK = &Feedback{out: log.New(outHandle, "", 0), log: n.LOG} + + n.init() + return n +} + +// init creates the loggers for each level depending on the notepad thresholds. +func (n *Notepad) init() { + logAndOut := io.MultiWriter(n.outHandle, n.logHandle) + + for t, logger := range n.loggers { + threshold := Threshold(t) + prefix := n.prefix + threshold.String() + " " + + switch { + case threshold >= n.logThreshold && threshold >= n.stdoutThreshold: + *logger = log.New(n.createLogWriters(threshold, logAndOut), prefix, n.flags) + + case threshold >= n.logThreshold: + *logger = log.New(n.createLogWriters(threshold, n.logHandle), prefix, n.flags) + + case threshold >= n.stdoutThreshold: + *logger = log.New(n.createLogWriters(threshold, n.outHandle), prefix, n.flags) + + default: + *logger = log.New(n.createLogWriters(threshold, ioutil.Discard), prefix, n.flags) + } + } +} + +func (n *Notepad) createLogWriters(t Threshold, handle io.Writer) io.Writer { + if len(n.logListeners) == 0 { + return handle + } + writers := []io.Writer{handle} + for _, l := range n.logListeners { + w := l(t) + if w != nil { + writers = append(writers, w) + } + } + + if len(writers) == 1 { + return handle + } + + return io.MultiWriter(writers...) +} + +// SetLogThreshold changes the threshold above which messages are written to the +// log file. +func (n *Notepad) SetLogThreshold(threshold Threshold) { + n.logThreshold = threshold + n.init() +} + +// SetLogOutput changes the file where log messages are written. +func (n *Notepad) SetLogOutput(handle io.Writer) { + n.logHandle = handle + n.init() +} + +// GetStdoutThreshold returns the defined Treshold for the log logger. +func (n *Notepad) GetLogThreshold() Threshold { + return n.logThreshold +} + +// SetStdoutThreshold changes the threshold above which messages are written to the +// standard output. +func (n *Notepad) SetStdoutThreshold(threshold Threshold) { + n.stdoutThreshold = threshold + n.init() +} + +// GetStdoutThreshold returns the Treshold for the stdout logger. +func (n *Notepad) GetStdoutThreshold() Threshold { + return n.stdoutThreshold +} + +// SetPrefix changes the prefix used by the notepad. Prefixes are displayed between +// brackets at the beginning of the line. An empty prefix won't be displayed at all. +func (n *Notepad) SetPrefix(prefix string) { + if len(prefix) != 0 { + n.prefix = "[" + prefix + "] " + } else { + n.prefix = "" + } + n.init() +} + +// SetFlags choose which flags the logger will display (after prefix and message +// level). See the package log for more informations on this. +func (n *Notepad) SetFlags(flags int) { + n.flags = flags + n.init() +} + +// Feedback writes plainly to the outHandle while +// logging with the standard extra information (date, file, etc). +type Feedback struct { + out *log.Logger + log *log.Logger +} + +func (fb *Feedback) Println(v ...interface{}) { + fb.output(fmt.Sprintln(v...)) +} + +func (fb *Feedback) Printf(format string, v ...interface{}) { + fb.output(fmt.Sprintf(format, v...)) +} + +func (fb *Feedback) Print(v ...interface{}) { + fb.output(fmt.Sprint(v...)) +} + +func (fb *Feedback) output(s string) { + if fb.out != nil { + fb.out.Output(2, s) + } + if fb.log != nil { + fb.log.Output(2, s) + } +} diff --git a/vendor/github.com/spf13/pflag/.gitignore b/vendor/github.com/spf13/pflag/.gitignore new file mode 100644 index 000000000..c3da29013 --- /dev/null +++ b/vendor/github.com/spf13/pflag/.gitignore @@ -0,0 +1,2 @@ +.idea/* + diff --git a/vendor/github.com/spf13/pflag/.travis.yml b/vendor/github.com/spf13/pflag/.travis.yml new file mode 100644 index 000000000..00d04cb9b --- /dev/null +++ b/vendor/github.com/spf13/pflag/.travis.yml @@ -0,0 +1,22 @@ +sudo: false + +language: go + +go: + - 1.9.x + - 1.10.x + - 1.11.x + - tip + +matrix: + allow_failures: + - go: tip + +install: + - go get golang.org/x/lint/golint + - export PATH=$GOPATH/bin:$PATH + - go install ./... + +script: + - verify/all.sh -v + - go test ./... diff --git a/vendor/github.com/spf13/pflag/LICENSE b/vendor/github.com/spf13/pflag/LICENSE new file mode 100644 index 000000000..63ed1cfea --- /dev/null +++ b/vendor/github.com/spf13/pflag/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2012 Alex Ogier. All rights reserved. +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/spf13/pflag/README.md b/vendor/github.com/spf13/pflag/README.md new file mode 100644 index 000000000..7eacc5bdb --- /dev/null +++ b/vendor/github.com/spf13/pflag/README.md @@ -0,0 +1,296 @@ +[![Build Status](https://travis-ci.org/spf13/pflag.svg?branch=master)](https://travis-ci.org/spf13/pflag) +[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/pflag)](https://goreportcard.com/report/github.com/spf13/pflag) +[![GoDoc](https://godoc.org/github.com/spf13/pflag?status.svg)](https://godoc.org/github.com/spf13/pflag) + +## Description + +pflag is a drop-in replacement for Go's flag package, implementing +POSIX/GNU-style --flags. + +pflag is compatible with the [GNU extensions to the POSIX recommendations +for command-line options][1]. For a more precise description, see the +"Command-line flag syntax" section below. + +[1]: http://www.gnu.org/software/libc/manual/html_node/Argument-Syntax.html + +pflag is available under the same style of BSD license as the Go language, +which can be found in the LICENSE file. + +## Installation + +pflag is available using the standard `go get` command. + +Install by running: + + go get github.com/spf13/pflag + +Run tests by running: + + go test github.com/spf13/pflag + +## Usage + +pflag is a drop-in replacement of Go's native flag package. If you import +pflag under the name "flag" then all code should continue to function +with no changes. + +``` go +import flag "github.com/spf13/pflag" +``` + +There is one exception to this: if you directly instantiate the Flag struct +there is one more field "Shorthand" that you will need to set. +Most code never instantiates this struct directly, and instead uses +functions such as String(), BoolVar(), and Var(), and is therefore +unaffected. + +Define flags using flag.String(), Bool(), Int(), etc. + +This declares an integer flag, -flagname, stored in the pointer ip, with type *int. + +``` go +var ip *int = flag.Int("flagname", 1234, "help message for flagname") +``` + +If you like, you can bind the flag to a variable using the Var() functions. + +``` go +var flagvar int +func init() { + flag.IntVar(&flagvar, "flagname", 1234, "help message for flagname") +} +``` + +Or you can create custom flags that satisfy the Value interface (with +pointer receivers) and couple them to flag parsing by + +``` go +flag.Var(&flagVal, "name", "help message for flagname") +``` + +For such flags, the default value is just the initial value of the variable. + +After all flags are defined, call + +``` go +flag.Parse() +``` + +to parse the command line into the defined flags. + +Flags may then be used directly. If you're using the flags themselves, +they are all pointers; if you bind to variables, they're values. + +``` go +fmt.Println("ip has value ", *ip) +fmt.Println("flagvar has value ", flagvar) +``` + +There are helper functions available to get the value stored in a Flag if you have a FlagSet but find +it difficult to keep up with all of the pointers in your code. +If you have a pflag.FlagSet with a flag called 'flagname' of type int you +can use GetInt() to get the int value. But notice that 'flagname' must exist +and it must be an int. GetString("flagname") will fail. + +``` go +i, err := flagset.GetInt("flagname") +``` + +After parsing, the arguments after the flag are available as the +slice flag.Args() or individually as flag.Arg(i). +The arguments are indexed from 0 through flag.NArg()-1. + +The pflag package also defines some new functions that are not in flag, +that give one-letter shorthands for flags. You can use these by appending +'P' to the name of any function that defines a flag. + +``` go +var ip = flag.IntP("flagname", "f", 1234, "help message") +var flagvar bool +func init() { + flag.BoolVarP(&flagvar, "boolname", "b", true, "help message") +} +flag.VarP(&flagVal, "varname", "v", "help message") +``` + +Shorthand letters can be used with single dashes on the command line. +Boolean shorthand flags can be combined with other shorthand flags. + +The default set of command-line flags is controlled by +top-level functions. The FlagSet type allows one to define +independent sets of flags, such as to implement subcommands +in a command-line interface. The methods of FlagSet are +analogous to the top-level functions for the command-line +flag set. + +## Setting no option default values for flags + +After you create a flag it is possible to set the pflag.NoOptDefVal for +the given flag. Doing this changes the meaning of the flag slightly. If +a flag has a NoOptDefVal and the flag is set on the command line without +an option the flag will be set to the NoOptDefVal. For example given: + +``` go +var ip = flag.IntP("flagname", "f", 1234, "help message") +flag.Lookup("flagname").NoOptDefVal = "4321" +``` + +Would result in something like + +| Parsed Arguments | Resulting Value | +| ------------- | ------------- | +| --flagname=1357 | ip=1357 | +| --flagname | ip=4321 | +| [nothing] | ip=1234 | + +## Command line flag syntax + +``` +--flag // boolean flags, or flags with no option default values +--flag x // only on flags without a default value +--flag=x +``` + +Unlike the flag package, a single dash before an option means something +different than a double dash. Single dashes signify a series of shorthand +letters for flags. All but the last shorthand letter must be boolean flags +or a flag with a default value + +``` +// boolean or flags where the 'no option default value' is set +-f +-f=true +-abc +but +-b true is INVALID + +// non-boolean and flags without a 'no option default value' +-n 1234 +-n=1234 +-n1234 + +// mixed +-abcs "hello" +-absd="hello" +-abcs1234 +``` + +Flag parsing stops after the terminator "--". Unlike the flag package, +flags can be interspersed with arguments anywhere on the command line +before this terminator. + +Integer flags accept 1234, 0664, 0x1234 and may be negative. +Boolean flags (in their long form) accept 1, 0, t, f, true, false, +TRUE, FALSE, True, False. +Duration flags accept any input valid for time.ParseDuration. + +## Mutating or "Normalizing" Flag names + +It is possible to set a custom flag name 'normalization function.' It allows flag names to be mutated both when created in the code and when used on the command line to some 'normalized' form. The 'normalized' form is used for comparison. Two examples of using the custom normalization func follow. + +**Example #1**: You want -, _, and . in flags to compare the same. aka --my-flag == --my_flag == --my.flag + +``` go +func wordSepNormalizeFunc(f *pflag.FlagSet, name string) pflag.NormalizedName { + from := []string{"-", "_"} + to := "." + for _, sep := range from { + name = strings.Replace(name, sep, to, -1) + } + return pflag.NormalizedName(name) +} + +myFlagSet.SetNormalizeFunc(wordSepNormalizeFunc) +``` + +**Example #2**: You want to alias two flags. aka --old-flag-name == --new-flag-name + +``` go +func aliasNormalizeFunc(f *pflag.FlagSet, name string) pflag.NormalizedName { + switch name { + case "old-flag-name": + name = "new-flag-name" + break + } + return pflag.NormalizedName(name) +} + +myFlagSet.SetNormalizeFunc(aliasNormalizeFunc) +``` + +## Deprecating a flag or its shorthand +It is possible to deprecate a flag, or just its shorthand. Deprecating a flag/shorthand hides it from help text and prints a usage message when the deprecated flag/shorthand is used. + +**Example #1**: You want to deprecate a flag named "badflag" as well as inform the users what flag they should use instead. +```go +// deprecate a flag by specifying its name and a usage message +flags.MarkDeprecated("badflag", "please use --good-flag instead") +``` +This hides "badflag" from help text, and prints `Flag --badflag has been deprecated, please use --good-flag instead` when "badflag" is used. + +**Example #2**: You want to keep a flag name "noshorthandflag" but deprecate its shortname "n". +```go +// deprecate a flag shorthand by specifying its flag name and a usage message +flags.MarkShorthandDeprecated("noshorthandflag", "please use --noshorthandflag only") +``` +This hides the shortname "n" from help text, and prints `Flag shorthand -n has been deprecated, please use --noshorthandflag only` when the shorthand "n" is used. + +Note that usage message is essential here, and it should not be empty. + +## Hidden flags +It is possible to mark a flag as hidden, meaning it will still function as normal, however will not show up in usage/help text. + +**Example**: You have a flag named "secretFlag" that you need for internal use only and don't want it showing up in help text, or for its usage text to be available. +```go +// hide a flag by specifying its name +flags.MarkHidden("secretFlag") +``` + +## Disable sorting of flags +`pflag` allows you to disable sorting of flags for help and usage message. + +**Example**: +```go +flags.BoolP("verbose", "v", false, "verbose output") +flags.String("coolflag", "yeaah", "it's really cool flag") +flags.Int("usefulflag", 777, "sometimes it's very useful") +flags.SortFlags = false +flags.PrintDefaults() +``` +**Output**: +``` + -v, --verbose verbose output + --coolflag string it's really cool flag (default "yeaah") + --usefulflag int sometimes it's very useful (default 777) +``` + + +## Supporting Go flags when using pflag +In order to support flags defined using Go's `flag` package, they must be added to the `pflag` flagset. This is usually necessary +to support flags defined by third-party dependencies (e.g. `golang/glog`). + +**Example**: You want to add the Go flags to the `CommandLine` flagset +```go +import ( + goflag "flag" + flag "github.com/spf13/pflag" +) + +var ip *int = flag.Int("flagname", 1234, "help message for flagname") + +func main() { + flag.CommandLine.AddGoFlagSet(goflag.CommandLine) + flag.Parse() +} +``` + +## More info + +You can see the full reference documentation of the pflag package +[at godoc.org][3], or through go's standard documentation system by +running `godoc -http=:6060` and browsing to +[http://localhost:6060/pkg/github.com/spf13/pflag][2] after +installation. + +[2]: http://localhost:6060/pkg/github.com/spf13/pflag +[3]: http://godoc.org/github.com/spf13/pflag diff --git a/vendor/github.com/spf13/pflag/bool.go b/vendor/github.com/spf13/pflag/bool.go new file mode 100644 index 000000000..c4c5c0bfd --- /dev/null +++ b/vendor/github.com/spf13/pflag/bool.go @@ -0,0 +1,94 @@ +package pflag + +import "strconv" + +// optional interface to indicate boolean flags that can be +// supplied without "=value" text +type boolFlag interface { + Value + IsBoolFlag() bool +} + +// -- bool Value +type boolValue bool + +func newBoolValue(val bool, p *bool) *boolValue { + *p = val + return (*boolValue)(p) +} + +func (b *boolValue) Set(s string) error { + v, err := strconv.ParseBool(s) + *b = boolValue(v) + return err +} + +func (b *boolValue) Type() string { + return "bool" +} + +func (b *boolValue) String() string { return strconv.FormatBool(bool(*b)) } + +func (b *boolValue) IsBoolFlag() bool { return true } + +func boolConv(sval string) (interface{}, error) { + return strconv.ParseBool(sval) +} + +// GetBool return the bool value of a flag with the given name +func (f *FlagSet) GetBool(name string) (bool, error) { + val, err := f.getFlagType(name, "bool", boolConv) + if err != nil { + return false, err + } + return val.(bool), nil +} + +// BoolVar defines a bool flag with specified name, default value, and usage string. +// The argument p points to a bool variable in which to store the value of the flag. +func (f *FlagSet) BoolVar(p *bool, name string, value bool, usage string) { + f.BoolVarP(p, name, "", value, usage) +} + +// BoolVarP is like BoolVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BoolVarP(p *bool, name, shorthand string, value bool, usage string) { + flag := f.VarPF(newBoolValue(value, p), name, shorthand, usage) + flag.NoOptDefVal = "true" +} + +// BoolVar defines a bool flag with specified name, default value, and usage string. +// The argument p points to a bool variable in which to store the value of the flag. +func BoolVar(p *bool, name string, value bool, usage string) { + BoolVarP(p, name, "", value, usage) +} + +// BoolVarP is like BoolVar, but accepts a shorthand letter that can be used after a single dash. +func BoolVarP(p *bool, name, shorthand string, value bool, usage string) { + flag := CommandLine.VarPF(newBoolValue(value, p), name, shorthand, usage) + flag.NoOptDefVal = "true" +} + +// Bool defines a bool flag with specified name, default value, and usage string. +// The return value is the address of a bool variable that stores the value of the flag. +func (f *FlagSet) Bool(name string, value bool, usage string) *bool { + return f.BoolP(name, "", value, usage) +} + +// BoolP is like Bool, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BoolP(name, shorthand string, value bool, usage string) *bool { + p := new(bool) + f.BoolVarP(p, name, shorthand, value, usage) + return p +} + +// Bool defines a bool flag with specified name, default value, and usage string. +// The return value is the address of a bool variable that stores the value of the flag. +func Bool(name string, value bool, usage string) *bool { + return BoolP(name, "", value, usage) +} + +// BoolP is like Bool, but accepts a shorthand letter that can be used after a single dash. +func BoolP(name, shorthand string, value bool, usage string) *bool { + b := CommandLine.BoolP(name, shorthand, value, usage) + return b +} diff --git a/vendor/github.com/spf13/pflag/bool_slice.go b/vendor/github.com/spf13/pflag/bool_slice.go new file mode 100644 index 000000000..3731370d6 --- /dev/null +++ b/vendor/github.com/spf13/pflag/bool_slice.go @@ -0,0 +1,185 @@ +package pflag + +import ( + "io" + "strconv" + "strings" +) + +// -- boolSlice Value +type boolSliceValue struct { + value *[]bool + changed bool +} + +func newBoolSliceValue(val []bool, p *[]bool) *boolSliceValue { + bsv := new(boolSliceValue) + bsv.value = p + *bsv.value = val + return bsv +} + +// Set converts, and assigns, the comma-separated boolean argument string representation as the []bool value of this flag. +// If Set is called on a flag that already has a []bool assigned, the newly converted values will be appended. +func (s *boolSliceValue) Set(val string) error { + + // remove all quote characters + rmQuote := strings.NewReplacer(`"`, "", `'`, "", "`", "") + + // read flag arguments with CSV parser + boolStrSlice, err := readAsCSV(rmQuote.Replace(val)) + if err != nil && err != io.EOF { + return err + } + + // parse boolean values into slice + out := make([]bool, 0, len(boolStrSlice)) + for _, boolStr := range boolStrSlice { + b, err := strconv.ParseBool(strings.TrimSpace(boolStr)) + if err != nil { + return err + } + out = append(out, b) + } + + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + + s.changed = true + + return nil +} + +// Type returns a string that uniquely represents this flag's type. +func (s *boolSliceValue) Type() string { + return "boolSlice" +} + +// String defines a "native" format for this boolean slice flag value. +func (s *boolSliceValue) String() string { + + boolStrSlice := make([]string, len(*s.value)) + for i, b := range *s.value { + boolStrSlice[i] = strconv.FormatBool(b) + } + + out, _ := writeAsCSV(boolStrSlice) + + return "[" + out + "]" +} + +func (s *boolSliceValue) fromString(val string) (bool, error) { + return strconv.ParseBool(val) +} + +func (s *boolSliceValue) toString(val bool) string { + return strconv.FormatBool(val) +} + +func (s *boolSliceValue) Append(val string) error { + i, err := s.fromString(val) + if err != nil { + return err + } + *s.value = append(*s.value, i) + return nil +} + +func (s *boolSliceValue) Replace(val []string) error { + out := make([]bool, len(val)) + for i, d := range val { + var err error + out[i], err = s.fromString(d) + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *boolSliceValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = s.toString(d) + } + return out +} + +func boolSliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []bool{}, nil + } + ss := strings.Split(val, ",") + out := make([]bool, len(ss)) + for i, t := range ss { + var err error + out[i], err = strconv.ParseBool(t) + if err != nil { + return nil, err + } + } + return out, nil +} + +// GetBoolSlice returns the []bool value of a flag with the given name. +func (f *FlagSet) GetBoolSlice(name string) ([]bool, error) { + val, err := f.getFlagType(name, "boolSlice", boolSliceConv) + if err != nil { + return []bool{}, err + } + return val.([]bool), nil +} + +// BoolSliceVar defines a boolSlice flag with specified name, default value, and usage string. +// The argument p points to a []bool variable in which to store the value of the flag. +func (f *FlagSet) BoolSliceVar(p *[]bool, name string, value []bool, usage string) { + f.VarP(newBoolSliceValue(value, p), name, "", usage) +} + +// BoolSliceVarP is like BoolSliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BoolSliceVarP(p *[]bool, name, shorthand string, value []bool, usage string) { + f.VarP(newBoolSliceValue(value, p), name, shorthand, usage) +} + +// BoolSliceVar defines a []bool flag with specified name, default value, and usage string. +// The argument p points to a []bool variable in which to store the value of the flag. +func BoolSliceVar(p *[]bool, name string, value []bool, usage string) { + CommandLine.VarP(newBoolSliceValue(value, p), name, "", usage) +} + +// BoolSliceVarP is like BoolSliceVar, but accepts a shorthand letter that can be used after a single dash. +func BoolSliceVarP(p *[]bool, name, shorthand string, value []bool, usage string) { + CommandLine.VarP(newBoolSliceValue(value, p), name, shorthand, usage) +} + +// BoolSlice defines a []bool flag with specified name, default value, and usage string. +// The return value is the address of a []bool variable that stores the value of the flag. +func (f *FlagSet) BoolSlice(name string, value []bool, usage string) *[]bool { + p := []bool{} + f.BoolSliceVarP(&p, name, "", value, usage) + return &p +} + +// BoolSliceP is like BoolSlice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BoolSliceP(name, shorthand string, value []bool, usage string) *[]bool { + p := []bool{} + f.BoolSliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// BoolSlice defines a []bool flag with specified name, default value, and usage string. +// The return value is the address of a []bool variable that stores the value of the flag. +func BoolSlice(name string, value []bool, usage string) *[]bool { + return CommandLine.BoolSliceP(name, "", value, usage) +} + +// BoolSliceP is like BoolSlice, but accepts a shorthand letter that can be used after a single dash. +func BoolSliceP(name, shorthand string, value []bool, usage string) *[]bool { + return CommandLine.BoolSliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/bytes.go b/vendor/github.com/spf13/pflag/bytes.go new file mode 100644 index 000000000..67d530457 --- /dev/null +++ b/vendor/github.com/spf13/pflag/bytes.go @@ -0,0 +1,209 @@ +package pflag + +import ( + "encoding/base64" + "encoding/hex" + "fmt" + "strings" +) + +// BytesHex adapts []byte for use as a flag. Value of flag is HEX encoded +type bytesHexValue []byte + +// String implements pflag.Value.String. +func (bytesHex bytesHexValue) String() string { + return fmt.Sprintf("%X", []byte(bytesHex)) +} + +// Set implements pflag.Value.Set. +func (bytesHex *bytesHexValue) Set(value string) error { + bin, err := hex.DecodeString(strings.TrimSpace(value)) + + if err != nil { + return err + } + + *bytesHex = bin + + return nil +} + +// Type implements pflag.Value.Type. +func (*bytesHexValue) Type() string { + return "bytesHex" +} + +func newBytesHexValue(val []byte, p *[]byte) *bytesHexValue { + *p = val + return (*bytesHexValue)(p) +} + +func bytesHexConv(sval string) (interface{}, error) { + + bin, err := hex.DecodeString(sval) + + if err == nil { + return bin, nil + } + + return nil, fmt.Errorf("invalid string being converted to Bytes: %s %s", sval, err) +} + +// GetBytesHex return the []byte value of a flag with the given name +func (f *FlagSet) GetBytesHex(name string) ([]byte, error) { + val, err := f.getFlagType(name, "bytesHex", bytesHexConv) + + if err != nil { + return []byte{}, err + } + + return val.([]byte), nil +} + +// BytesHexVar defines an []byte flag with specified name, default value, and usage string. +// The argument p points to an []byte variable in which to store the value of the flag. +func (f *FlagSet) BytesHexVar(p *[]byte, name string, value []byte, usage string) { + f.VarP(newBytesHexValue(value, p), name, "", usage) +} + +// BytesHexVarP is like BytesHexVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BytesHexVarP(p *[]byte, name, shorthand string, value []byte, usage string) { + f.VarP(newBytesHexValue(value, p), name, shorthand, usage) +} + +// BytesHexVar defines an []byte flag with specified name, default value, and usage string. +// The argument p points to an []byte variable in which to store the value of the flag. +func BytesHexVar(p *[]byte, name string, value []byte, usage string) { + CommandLine.VarP(newBytesHexValue(value, p), name, "", usage) +} + +// BytesHexVarP is like BytesHexVar, but accepts a shorthand letter that can be used after a single dash. +func BytesHexVarP(p *[]byte, name, shorthand string, value []byte, usage string) { + CommandLine.VarP(newBytesHexValue(value, p), name, shorthand, usage) +} + +// BytesHex defines an []byte flag with specified name, default value, and usage string. +// The return value is the address of an []byte variable that stores the value of the flag. +func (f *FlagSet) BytesHex(name string, value []byte, usage string) *[]byte { + p := new([]byte) + f.BytesHexVarP(p, name, "", value, usage) + return p +} + +// BytesHexP is like BytesHex, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BytesHexP(name, shorthand string, value []byte, usage string) *[]byte { + p := new([]byte) + f.BytesHexVarP(p, name, shorthand, value, usage) + return p +} + +// BytesHex defines an []byte flag with specified name, default value, and usage string. +// The return value is the address of an []byte variable that stores the value of the flag. +func BytesHex(name string, value []byte, usage string) *[]byte { + return CommandLine.BytesHexP(name, "", value, usage) +} + +// BytesHexP is like BytesHex, but accepts a shorthand letter that can be used after a single dash. +func BytesHexP(name, shorthand string, value []byte, usage string) *[]byte { + return CommandLine.BytesHexP(name, shorthand, value, usage) +} + +// BytesBase64 adapts []byte for use as a flag. Value of flag is Base64 encoded +type bytesBase64Value []byte + +// String implements pflag.Value.String. +func (bytesBase64 bytesBase64Value) String() string { + return base64.StdEncoding.EncodeToString([]byte(bytesBase64)) +} + +// Set implements pflag.Value.Set. +func (bytesBase64 *bytesBase64Value) Set(value string) error { + bin, err := base64.StdEncoding.DecodeString(strings.TrimSpace(value)) + + if err != nil { + return err + } + + *bytesBase64 = bin + + return nil +} + +// Type implements pflag.Value.Type. +func (*bytesBase64Value) Type() string { + return "bytesBase64" +} + +func newBytesBase64Value(val []byte, p *[]byte) *bytesBase64Value { + *p = val + return (*bytesBase64Value)(p) +} + +func bytesBase64ValueConv(sval string) (interface{}, error) { + + bin, err := base64.StdEncoding.DecodeString(sval) + if err == nil { + return bin, nil + } + + return nil, fmt.Errorf("invalid string being converted to Bytes: %s %s", sval, err) +} + +// GetBytesBase64 return the []byte value of a flag with the given name +func (f *FlagSet) GetBytesBase64(name string) ([]byte, error) { + val, err := f.getFlagType(name, "bytesBase64", bytesBase64ValueConv) + + if err != nil { + return []byte{}, err + } + + return val.([]byte), nil +} + +// BytesBase64Var defines an []byte flag with specified name, default value, and usage string. +// The argument p points to an []byte variable in which to store the value of the flag. +func (f *FlagSet) BytesBase64Var(p *[]byte, name string, value []byte, usage string) { + f.VarP(newBytesBase64Value(value, p), name, "", usage) +} + +// BytesBase64VarP is like BytesBase64Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BytesBase64VarP(p *[]byte, name, shorthand string, value []byte, usage string) { + f.VarP(newBytesBase64Value(value, p), name, shorthand, usage) +} + +// BytesBase64Var defines an []byte flag with specified name, default value, and usage string. +// The argument p points to an []byte variable in which to store the value of the flag. +func BytesBase64Var(p *[]byte, name string, value []byte, usage string) { + CommandLine.VarP(newBytesBase64Value(value, p), name, "", usage) +} + +// BytesBase64VarP is like BytesBase64Var, but accepts a shorthand letter that can be used after a single dash. +func BytesBase64VarP(p *[]byte, name, shorthand string, value []byte, usage string) { + CommandLine.VarP(newBytesBase64Value(value, p), name, shorthand, usage) +} + +// BytesBase64 defines an []byte flag with specified name, default value, and usage string. +// The return value is the address of an []byte variable that stores the value of the flag. +func (f *FlagSet) BytesBase64(name string, value []byte, usage string) *[]byte { + p := new([]byte) + f.BytesBase64VarP(p, name, "", value, usage) + return p +} + +// BytesBase64P is like BytesBase64, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) BytesBase64P(name, shorthand string, value []byte, usage string) *[]byte { + p := new([]byte) + f.BytesBase64VarP(p, name, shorthand, value, usage) + return p +} + +// BytesBase64 defines an []byte flag with specified name, default value, and usage string. +// The return value is the address of an []byte variable that stores the value of the flag. +func BytesBase64(name string, value []byte, usage string) *[]byte { + return CommandLine.BytesBase64P(name, "", value, usage) +} + +// BytesBase64P is like BytesBase64, but accepts a shorthand letter that can be used after a single dash. +func BytesBase64P(name, shorthand string, value []byte, usage string) *[]byte { + return CommandLine.BytesBase64P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/count.go b/vendor/github.com/spf13/pflag/count.go new file mode 100644 index 000000000..a0b2679f7 --- /dev/null +++ b/vendor/github.com/spf13/pflag/count.go @@ -0,0 +1,96 @@ +package pflag + +import "strconv" + +// -- count Value +type countValue int + +func newCountValue(val int, p *int) *countValue { + *p = val + return (*countValue)(p) +} + +func (i *countValue) Set(s string) error { + // "+1" means that no specific value was passed, so increment + if s == "+1" { + *i = countValue(*i + 1) + return nil + } + v, err := strconv.ParseInt(s, 0, 0) + *i = countValue(v) + return err +} + +func (i *countValue) Type() string { + return "count" +} + +func (i *countValue) String() string { return strconv.Itoa(int(*i)) } + +func countConv(sval string) (interface{}, error) { + i, err := strconv.Atoi(sval) + if err != nil { + return nil, err + } + return i, nil +} + +// GetCount return the int value of a flag with the given name +func (f *FlagSet) GetCount(name string) (int, error) { + val, err := f.getFlagType(name, "count", countConv) + if err != nil { + return 0, err + } + return val.(int), nil +} + +// CountVar defines a count flag with specified name, default value, and usage string. +// The argument p points to an int variable in which to store the value of the flag. +// A count flag will add 1 to its value every time it is found on the command line +func (f *FlagSet) CountVar(p *int, name string, usage string) { + f.CountVarP(p, name, "", usage) +} + +// CountVarP is like CountVar only take a shorthand for the flag name. +func (f *FlagSet) CountVarP(p *int, name, shorthand string, usage string) { + flag := f.VarPF(newCountValue(0, p), name, shorthand, usage) + flag.NoOptDefVal = "+1" +} + +// CountVar like CountVar only the flag is placed on the CommandLine instead of a given flag set +func CountVar(p *int, name string, usage string) { + CommandLine.CountVar(p, name, usage) +} + +// CountVarP is like CountVar only take a shorthand for the flag name. +func CountVarP(p *int, name, shorthand string, usage string) { + CommandLine.CountVarP(p, name, shorthand, usage) +} + +// Count defines a count flag with specified name, default value, and usage string. +// The return value is the address of an int variable that stores the value of the flag. +// A count flag will add 1 to its value every time it is found on the command line +func (f *FlagSet) Count(name string, usage string) *int { + p := new(int) + f.CountVarP(p, name, "", usage) + return p +} + +// CountP is like Count only takes a shorthand for the flag name. +func (f *FlagSet) CountP(name, shorthand string, usage string) *int { + p := new(int) + f.CountVarP(p, name, shorthand, usage) + return p +} + +// Count defines a count flag with specified name, default value, and usage string. +// The return value is the address of an int variable that stores the value of the flag. +// A count flag will add 1 to its value evey time it is found on the command line +func Count(name string, usage string) *int { + return CommandLine.CountP(name, "", usage) +} + +// CountP is like Count only takes a shorthand for the flag name. +func CountP(name, shorthand string, usage string) *int { + return CommandLine.CountP(name, shorthand, usage) +} diff --git a/vendor/github.com/spf13/pflag/duration.go b/vendor/github.com/spf13/pflag/duration.go new file mode 100644 index 000000000..e9debef88 --- /dev/null +++ b/vendor/github.com/spf13/pflag/duration.go @@ -0,0 +1,86 @@ +package pflag + +import ( + "time" +) + +// -- time.Duration Value +type durationValue time.Duration + +func newDurationValue(val time.Duration, p *time.Duration) *durationValue { + *p = val + return (*durationValue)(p) +} + +func (d *durationValue) Set(s string) error { + v, err := time.ParseDuration(s) + *d = durationValue(v) + return err +} + +func (d *durationValue) Type() string { + return "duration" +} + +func (d *durationValue) String() string { return (*time.Duration)(d).String() } + +func durationConv(sval string) (interface{}, error) { + return time.ParseDuration(sval) +} + +// GetDuration return the duration value of a flag with the given name +func (f *FlagSet) GetDuration(name string) (time.Duration, error) { + val, err := f.getFlagType(name, "duration", durationConv) + if err != nil { + return 0, err + } + return val.(time.Duration), nil +} + +// DurationVar defines a time.Duration flag with specified name, default value, and usage string. +// The argument p points to a time.Duration variable in which to store the value of the flag. +func (f *FlagSet) DurationVar(p *time.Duration, name string, value time.Duration, usage string) { + f.VarP(newDurationValue(value, p), name, "", usage) +} + +// DurationVarP is like DurationVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) DurationVarP(p *time.Duration, name, shorthand string, value time.Duration, usage string) { + f.VarP(newDurationValue(value, p), name, shorthand, usage) +} + +// DurationVar defines a time.Duration flag with specified name, default value, and usage string. +// The argument p points to a time.Duration variable in which to store the value of the flag. +func DurationVar(p *time.Duration, name string, value time.Duration, usage string) { + CommandLine.VarP(newDurationValue(value, p), name, "", usage) +} + +// DurationVarP is like DurationVar, but accepts a shorthand letter that can be used after a single dash. +func DurationVarP(p *time.Duration, name, shorthand string, value time.Duration, usage string) { + CommandLine.VarP(newDurationValue(value, p), name, shorthand, usage) +} + +// Duration defines a time.Duration flag with specified name, default value, and usage string. +// The return value is the address of a time.Duration variable that stores the value of the flag. +func (f *FlagSet) Duration(name string, value time.Duration, usage string) *time.Duration { + p := new(time.Duration) + f.DurationVarP(p, name, "", value, usage) + return p +} + +// DurationP is like Duration, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) DurationP(name, shorthand string, value time.Duration, usage string) *time.Duration { + p := new(time.Duration) + f.DurationVarP(p, name, shorthand, value, usage) + return p +} + +// Duration defines a time.Duration flag with specified name, default value, and usage string. +// The return value is the address of a time.Duration variable that stores the value of the flag. +func Duration(name string, value time.Duration, usage string) *time.Duration { + return CommandLine.DurationP(name, "", value, usage) +} + +// DurationP is like Duration, but accepts a shorthand letter that can be used after a single dash. +func DurationP(name, shorthand string, value time.Duration, usage string) *time.Duration { + return CommandLine.DurationP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/duration_slice.go b/vendor/github.com/spf13/pflag/duration_slice.go new file mode 100644 index 000000000..badadda53 --- /dev/null +++ b/vendor/github.com/spf13/pflag/duration_slice.go @@ -0,0 +1,166 @@ +package pflag + +import ( + "fmt" + "strings" + "time" +) + +// -- durationSlice Value +type durationSliceValue struct { + value *[]time.Duration + changed bool +} + +func newDurationSliceValue(val []time.Duration, p *[]time.Duration) *durationSliceValue { + dsv := new(durationSliceValue) + dsv.value = p + *dsv.value = val + return dsv +} + +func (s *durationSliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]time.Duration, len(ss)) + for i, d := range ss { + var err error + out[i], err = time.ParseDuration(d) + if err != nil { + return err + } + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *durationSliceValue) Type() string { + return "durationSlice" +} + +func (s *durationSliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%s", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func (s *durationSliceValue) fromString(val string) (time.Duration, error) { + return time.ParseDuration(val) +} + +func (s *durationSliceValue) toString(val time.Duration) string { + return fmt.Sprintf("%s", val) +} + +func (s *durationSliceValue) Append(val string) error { + i, err := s.fromString(val) + if err != nil { + return err + } + *s.value = append(*s.value, i) + return nil +} + +func (s *durationSliceValue) Replace(val []string) error { + out := make([]time.Duration, len(val)) + for i, d := range val { + var err error + out[i], err = s.fromString(d) + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *durationSliceValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = s.toString(d) + } + return out +} + +func durationSliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []time.Duration{}, nil + } + ss := strings.Split(val, ",") + out := make([]time.Duration, len(ss)) + for i, d := range ss { + var err error + out[i], err = time.ParseDuration(d) + if err != nil { + return nil, err + } + + } + return out, nil +} + +// GetDurationSlice returns the []time.Duration value of a flag with the given name +func (f *FlagSet) GetDurationSlice(name string) ([]time.Duration, error) { + val, err := f.getFlagType(name, "durationSlice", durationSliceConv) + if err != nil { + return []time.Duration{}, err + } + return val.([]time.Duration), nil +} + +// DurationSliceVar defines a durationSlice flag with specified name, default value, and usage string. +// The argument p points to a []time.Duration variable in which to store the value of the flag. +func (f *FlagSet) DurationSliceVar(p *[]time.Duration, name string, value []time.Duration, usage string) { + f.VarP(newDurationSliceValue(value, p), name, "", usage) +} + +// DurationSliceVarP is like DurationSliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) DurationSliceVarP(p *[]time.Duration, name, shorthand string, value []time.Duration, usage string) { + f.VarP(newDurationSliceValue(value, p), name, shorthand, usage) +} + +// DurationSliceVar defines a duration[] flag with specified name, default value, and usage string. +// The argument p points to a duration[] variable in which to store the value of the flag. +func DurationSliceVar(p *[]time.Duration, name string, value []time.Duration, usage string) { + CommandLine.VarP(newDurationSliceValue(value, p), name, "", usage) +} + +// DurationSliceVarP is like DurationSliceVar, but accepts a shorthand letter that can be used after a single dash. +func DurationSliceVarP(p *[]time.Duration, name, shorthand string, value []time.Duration, usage string) { + CommandLine.VarP(newDurationSliceValue(value, p), name, shorthand, usage) +} + +// DurationSlice defines a []time.Duration flag with specified name, default value, and usage string. +// The return value is the address of a []time.Duration variable that stores the value of the flag. +func (f *FlagSet) DurationSlice(name string, value []time.Duration, usage string) *[]time.Duration { + p := []time.Duration{} + f.DurationSliceVarP(&p, name, "", value, usage) + return &p +} + +// DurationSliceP is like DurationSlice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) DurationSliceP(name, shorthand string, value []time.Duration, usage string) *[]time.Duration { + p := []time.Duration{} + f.DurationSliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// DurationSlice defines a []time.Duration flag with specified name, default value, and usage string. +// The return value is the address of a []time.Duration variable that stores the value of the flag. +func DurationSlice(name string, value []time.Duration, usage string) *[]time.Duration { + return CommandLine.DurationSliceP(name, "", value, usage) +} + +// DurationSliceP is like DurationSlice, but accepts a shorthand letter that can be used after a single dash. +func DurationSliceP(name, shorthand string, value []time.Duration, usage string) *[]time.Duration { + return CommandLine.DurationSliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/flag.go b/vendor/github.com/spf13/pflag/flag.go new file mode 100644 index 000000000..24a5036e9 --- /dev/null +++ b/vendor/github.com/spf13/pflag/flag.go @@ -0,0 +1,1239 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package pflag is a drop-in replacement for Go's flag package, implementing +POSIX/GNU-style --flags. + +pflag is compatible with the GNU extensions to the POSIX recommendations +for command-line options. See +http://www.gnu.org/software/libc/manual/html_node/Argument-Syntax.html + +Usage: + +pflag is a drop-in replacement of Go's native flag package. If you import +pflag under the name "flag" then all code should continue to function +with no changes. + + import flag "github.com/spf13/pflag" + +There is one exception to this: if you directly instantiate the Flag struct +there is one more field "Shorthand" that you will need to set. +Most code never instantiates this struct directly, and instead uses +functions such as String(), BoolVar(), and Var(), and is therefore +unaffected. + +Define flags using flag.String(), Bool(), Int(), etc. + +This declares an integer flag, -flagname, stored in the pointer ip, with type *int. + var ip = flag.Int("flagname", 1234, "help message for flagname") +If you like, you can bind the flag to a variable using the Var() functions. + var flagvar int + func init() { + flag.IntVar(&flagvar, "flagname", 1234, "help message for flagname") + } +Or you can create custom flags that satisfy the Value interface (with +pointer receivers) and couple them to flag parsing by + flag.Var(&flagVal, "name", "help message for flagname") +For such flags, the default value is just the initial value of the variable. + +After all flags are defined, call + flag.Parse() +to parse the command line into the defined flags. + +Flags may then be used directly. If you're using the flags themselves, +they are all pointers; if you bind to variables, they're values. + fmt.Println("ip has value ", *ip) + fmt.Println("flagvar has value ", flagvar) + +After parsing, the arguments after the flag are available as the +slice flag.Args() or individually as flag.Arg(i). +The arguments are indexed from 0 through flag.NArg()-1. + +The pflag package also defines some new functions that are not in flag, +that give one-letter shorthands for flags. You can use these by appending +'P' to the name of any function that defines a flag. + var ip = flag.IntP("flagname", "f", 1234, "help message") + var flagvar bool + func init() { + flag.BoolVarP(&flagvar, "boolname", "b", true, "help message") + } + flag.VarP(&flagval, "varname", "v", "help message") +Shorthand letters can be used with single dashes on the command line. +Boolean shorthand flags can be combined with other shorthand flags. + +Command line flag syntax: + --flag // boolean flags only + --flag=x + +Unlike the flag package, a single dash before an option means something +different than a double dash. Single dashes signify a series of shorthand +letters for flags. All but the last shorthand letter must be boolean flags. + // boolean flags + -f + -abc + // non-boolean flags + -n 1234 + -Ifile + // mixed + -abcs "hello" + -abcn1234 + +Flag parsing stops after the terminator "--". Unlike the flag package, +flags can be interspersed with arguments anywhere on the command line +before this terminator. + +Integer flags accept 1234, 0664, 0x1234 and may be negative. +Boolean flags (in their long form) accept 1, 0, t, f, true, false, +TRUE, FALSE, True, False. +Duration flags accept any input valid for time.ParseDuration. + +The default set of command-line flags is controlled by +top-level functions. The FlagSet type allows one to define +independent sets of flags, such as to implement subcommands +in a command-line interface. The methods of FlagSet are +analogous to the top-level functions for the command-line +flag set. +*/ +package pflag + +import ( + "bytes" + "errors" + goflag "flag" + "fmt" + "io" + "os" + "sort" + "strings" +) + +// ErrHelp is the error returned if the flag -help is invoked but no such flag is defined. +var ErrHelp = errors.New("pflag: help requested") + +// ErrorHandling defines how to handle flag parsing errors. +type ErrorHandling int + +const ( + // ContinueOnError will return an err from Parse() if an error is found + ContinueOnError ErrorHandling = iota + // ExitOnError will call os.Exit(2) if an error is found when parsing + ExitOnError + // PanicOnError will panic() if an error is found when parsing flags + PanicOnError +) + +// ParseErrorsWhitelist defines the parsing errors that can be ignored +type ParseErrorsWhitelist struct { + // UnknownFlags will ignore unknown flags errors and continue parsing rest of the flags + UnknownFlags bool +} + +// NormalizedName is a flag name that has been normalized according to rules +// for the FlagSet (e.g. making '-' and '_' equivalent). +type NormalizedName string + +// A FlagSet represents a set of defined flags. +type FlagSet struct { + // Usage is the function called when an error occurs while parsing flags. + // The field is a function (not a method) that may be changed to point to + // a custom error handler. + Usage func() + + // SortFlags is used to indicate, if user wants to have sorted flags in + // help/usage messages. + SortFlags bool + + // ParseErrorsWhitelist is used to configure a whitelist of errors + ParseErrorsWhitelist ParseErrorsWhitelist + + name string + parsed bool + actual map[NormalizedName]*Flag + orderedActual []*Flag + sortedActual []*Flag + formal map[NormalizedName]*Flag + orderedFormal []*Flag + sortedFormal []*Flag + shorthands map[byte]*Flag + args []string // arguments after flags + argsLenAtDash int // len(args) when a '--' was located when parsing, or -1 if no -- + errorHandling ErrorHandling + output io.Writer // nil means stderr; use out() accessor + interspersed bool // allow interspersed option/non-option args + normalizeNameFunc func(f *FlagSet, name string) NormalizedName + + addedGoFlagSets []*goflag.FlagSet +} + +// A Flag represents the state of a flag. +type Flag struct { + Name string // name as it appears on command line + Shorthand string // one-letter abbreviated flag + Usage string // help message + Value Value // value as set + DefValue string // default value (as text); for usage message + Changed bool // If the user set the value (or if left to default) + NoOptDefVal string // default value (as text); if the flag is on the command line without any options + Deprecated string // If this flag is deprecated, this string is the new or now thing to use + Hidden bool // used by cobra.Command to allow flags to be hidden from help/usage text + ShorthandDeprecated string // If the shorthand of this flag is deprecated, this string is the new or now thing to use + Annotations map[string][]string // used by cobra.Command bash autocomple code +} + +// Value is the interface to the dynamic value stored in a flag. +// (The default value is represented as a string.) +type Value interface { + String() string + Set(string) error + Type() string +} + +// SliceValue is a secondary interface to all flags which hold a list +// of values. This allows full control over the value of list flags, +// and avoids complicated marshalling and unmarshalling to csv. +type SliceValue interface { + // Append adds the specified value to the end of the flag value list. + Append(string) error + // Replace will fully overwrite any data currently in the flag value list. + Replace([]string) error + // GetSlice returns the flag value list as an array of strings. + GetSlice() []string +} + +// sortFlags returns the flags as a slice in lexicographical sorted order. +func sortFlags(flags map[NormalizedName]*Flag) []*Flag { + list := make(sort.StringSlice, len(flags)) + i := 0 + for k := range flags { + list[i] = string(k) + i++ + } + list.Sort() + result := make([]*Flag, len(list)) + for i, name := range list { + result[i] = flags[NormalizedName(name)] + } + return result +} + +// SetNormalizeFunc allows you to add a function which can translate flag names. +// Flags added to the FlagSet will be translated and then when anything tries to +// look up the flag that will also be translated. So it would be possible to create +// a flag named "getURL" and have it translated to "geturl". A user could then pass +// "--getUrl" which may also be translated to "geturl" and everything will work. +func (f *FlagSet) SetNormalizeFunc(n func(f *FlagSet, name string) NormalizedName) { + f.normalizeNameFunc = n + f.sortedFormal = f.sortedFormal[:0] + for fname, flag := range f.formal { + nname := f.normalizeFlagName(flag.Name) + if fname == nname { + continue + } + flag.Name = string(nname) + delete(f.formal, fname) + f.formal[nname] = flag + if _, set := f.actual[fname]; set { + delete(f.actual, fname) + f.actual[nname] = flag + } + } +} + +// GetNormalizeFunc returns the previously set NormalizeFunc of a function which +// does no translation, if not set previously. +func (f *FlagSet) GetNormalizeFunc() func(f *FlagSet, name string) NormalizedName { + if f.normalizeNameFunc != nil { + return f.normalizeNameFunc + } + return func(f *FlagSet, name string) NormalizedName { return NormalizedName(name) } +} + +func (f *FlagSet) normalizeFlagName(name string) NormalizedName { + n := f.GetNormalizeFunc() + return n(f, name) +} + +func (f *FlagSet) out() io.Writer { + if f.output == nil { + return os.Stderr + } + return f.output +} + +// SetOutput sets the destination for usage and error messages. +// If output is nil, os.Stderr is used. +func (f *FlagSet) SetOutput(output io.Writer) { + f.output = output +} + +// VisitAll visits the flags in lexicographical order or +// in primordial order if f.SortFlags is false, calling fn for each. +// It visits all flags, even those not set. +func (f *FlagSet) VisitAll(fn func(*Flag)) { + if len(f.formal) == 0 { + return + } + + var flags []*Flag + if f.SortFlags { + if len(f.formal) != len(f.sortedFormal) { + f.sortedFormal = sortFlags(f.formal) + } + flags = f.sortedFormal + } else { + flags = f.orderedFormal + } + + for _, flag := range flags { + fn(flag) + } +} + +// HasFlags returns a bool to indicate if the FlagSet has any flags defined. +func (f *FlagSet) HasFlags() bool { + return len(f.formal) > 0 +} + +// HasAvailableFlags returns a bool to indicate if the FlagSet has any flags +// that are not hidden. +func (f *FlagSet) HasAvailableFlags() bool { + for _, flag := range f.formal { + if !flag.Hidden { + return true + } + } + return false +} + +// VisitAll visits the command-line flags in lexicographical order or +// in primordial order if f.SortFlags is false, calling fn for each. +// It visits all flags, even those not set. +func VisitAll(fn func(*Flag)) { + CommandLine.VisitAll(fn) +} + +// Visit visits the flags in lexicographical order or +// in primordial order if f.SortFlags is false, calling fn for each. +// It visits only those flags that have been set. +func (f *FlagSet) Visit(fn func(*Flag)) { + if len(f.actual) == 0 { + return + } + + var flags []*Flag + if f.SortFlags { + if len(f.actual) != len(f.sortedActual) { + f.sortedActual = sortFlags(f.actual) + } + flags = f.sortedActual + } else { + flags = f.orderedActual + } + + for _, flag := range flags { + fn(flag) + } +} + +// Visit visits the command-line flags in lexicographical order or +// in primordial order if f.SortFlags is false, calling fn for each. +// It visits only those flags that have been set. +func Visit(fn func(*Flag)) { + CommandLine.Visit(fn) +} + +// Lookup returns the Flag structure of the named flag, returning nil if none exists. +func (f *FlagSet) Lookup(name string) *Flag { + return f.lookup(f.normalizeFlagName(name)) +} + +// ShorthandLookup returns the Flag structure of the short handed flag, +// returning nil if none exists. +// It panics, if len(name) > 1. +func (f *FlagSet) ShorthandLookup(name string) *Flag { + if name == "" { + return nil + } + if len(name) > 1 { + msg := fmt.Sprintf("can not look up shorthand which is more than one ASCII character: %q", name) + fmt.Fprintf(f.out(), msg) + panic(msg) + } + c := name[0] + return f.shorthands[c] +} + +// lookup returns the Flag structure of the named flag, returning nil if none exists. +func (f *FlagSet) lookup(name NormalizedName) *Flag { + return f.formal[name] +} + +// func to return a given type for a given flag name +func (f *FlagSet) getFlagType(name string, ftype string, convFunc func(sval string) (interface{}, error)) (interface{}, error) { + flag := f.Lookup(name) + if flag == nil { + err := fmt.Errorf("flag accessed but not defined: %s", name) + return nil, err + } + + if flag.Value.Type() != ftype { + err := fmt.Errorf("trying to get %s value of flag of type %s", ftype, flag.Value.Type()) + return nil, err + } + + sval := flag.Value.String() + result, err := convFunc(sval) + if err != nil { + return nil, err + } + return result, nil +} + +// ArgsLenAtDash will return the length of f.Args at the moment when a -- was +// found during arg parsing. This allows your program to know which args were +// before the -- and which came after. +func (f *FlagSet) ArgsLenAtDash() int { + return f.argsLenAtDash +} + +// MarkDeprecated indicated that a flag is deprecated in your program. It will +// continue to function but will not show up in help or usage messages. Using +// this flag will also print the given usageMessage. +func (f *FlagSet) MarkDeprecated(name string, usageMessage string) error { + flag := f.Lookup(name) + if flag == nil { + return fmt.Errorf("flag %q does not exist", name) + } + if usageMessage == "" { + return fmt.Errorf("deprecated message for flag %q must be set", name) + } + flag.Deprecated = usageMessage + flag.Hidden = true + return nil +} + +// MarkShorthandDeprecated will mark the shorthand of a flag deprecated in your +// program. It will continue to function but will not show up in help or usage +// messages. Using this flag will also print the given usageMessage. +func (f *FlagSet) MarkShorthandDeprecated(name string, usageMessage string) error { + flag := f.Lookup(name) + if flag == nil { + return fmt.Errorf("flag %q does not exist", name) + } + if usageMessage == "" { + return fmt.Errorf("deprecated message for flag %q must be set", name) + } + flag.ShorthandDeprecated = usageMessage + return nil +} + +// MarkHidden sets a flag to 'hidden' in your program. It will continue to +// function but will not show up in help or usage messages. +func (f *FlagSet) MarkHidden(name string) error { + flag := f.Lookup(name) + if flag == nil { + return fmt.Errorf("flag %q does not exist", name) + } + flag.Hidden = true + return nil +} + +// Lookup returns the Flag structure of the named command-line flag, +// returning nil if none exists. +func Lookup(name string) *Flag { + return CommandLine.Lookup(name) +} + +// ShorthandLookup returns the Flag structure of the short handed flag, +// returning nil if none exists. +func ShorthandLookup(name string) *Flag { + return CommandLine.ShorthandLookup(name) +} + +// Set sets the value of the named flag. +func (f *FlagSet) Set(name, value string) error { + normalName := f.normalizeFlagName(name) + flag, ok := f.formal[normalName] + if !ok { + return fmt.Errorf("no such flag -%v", name) + } + + err := flag.Value.Set(value) + if err != nil { + var flagName string + if flag.Shorthand != "" && flag.ShorthandDeprecated == "" { + flagName = fmt.Sprintf("-%s, --%s", flag.Shorthand, flag.Name) + } else { + flagName = fmt.Sprintf("--%s", flag.Name) + } + return fmt.Errorf("invalid argument %q for %q flag: %v", value, flagName, err) + } + + if !flag.Changed { + if f.actual == nil { + f.actual = make(map[NormalizedName]*Flag) + } + f.actual[normalName] = flag + f.orderedActual = append(f.orderedActual, flag) + + flag.Changed = true + } + + if flag.Deprecated != "" { + fmt.Fprintf(f.out(), "Flag --%s has been deprecated, %s\n", flag.Name, flag.Deprecated) + } + return nil +} + +// SetAnnotation allows one to set arbitrary annotations on a flag in the FlagSet. +// This is sometimes used by spf13/cobra programs which want to generate additional +// bash completion information. +func (f *FlagSet) SetAnnotation(name, key string, values []string) error { + normalName := f.normalizeFlagName(name) + flag, ok := f.formal[normalName] + if !ok { + return fmt.Errorf("no such flag -%v", name) + } + if flag.Annotations == nil { + flag.Annotations = map[string][]string{} + } + flag.Annotations[key] = values + return nil +} + +// Changed returns true if the flag was explicitly set during Parse() and false +// otherwise +func (f *FlagSet) Changed(name string) bool { + flag := f.Lookup(name) + // If a flag doesn't exist, it wasn't changed.... + if flag == nil { + return false + } + return flag.Changed +} + +// Set sets the value of the named command-line flag. +func Set(name, value string) error { + return CommandLine.Set(name, value) +} + +// PrintDefaults prints, to standard error unless configured +// otherwise, the default values of all defined flags in the set. +func (f *FlagSet) PrintDefaults() { + usages := f.FlagUsages() + fmt.Fprint(f.out(), usages) +} + +// defaultIsZeroValue returns true if the default value for this flag represents +// a zero value. +func (f *Flag) defaultIsZeroValue() bool { + switch f.Value.(type) { + case boolFlag: + return f.DefValue == "false" + case *durationValue: + // Beginning in Go 1.7, duration zero values are "0s" + return f.DefValue == "0" || f.DefValue == "0s" + case *intValue, *int8Value, *int32Value, *int64Value, *uintValue, *uint8Value, *uint16Value, *uint32Value, *uint64Value, *countValue, *float32Value, *float64Value: + return f.DefValue == "0" + case *stringValue: + return f.DefValue == "" + case *ipValue, *ipMaskValue, *ipNetValue: + return f.DefValue == "" + case *intSliceValue, *stringSliceValue, *stringArrayValue: + return f.DefValue == "[]" + default: + switch f.Value.String() { + case "false": + return true + case "": + return true + case "": + return true + case "0": + return true + } + return false + } +} + +// UnquoteUsage extracts a back-quoted name from the usage +// string for a flag and returns it and the un-quoted usage. +// Given "a `name` to show" it returns ("name", "a name to show"). +// If there are no back quotes, the name is an educated guess of the +// type of the flag's value, or the empty string if the flag is boolean. +func UnquoteUsage(flag *Flag) (name string, usage string) { + // Look for a back-quoted name, but avoid the strings package. + usage = flag.Usage + for i := 0; i < len(usage); i++ { + if usage[i] == '`' { + for j := i + 1; j < len(usage); j++ { + if usage[j] == '`' { + name = usage[i+1 : j] + usage = usage[:i] + name + usage[j+1:] + return name, usage + } + } + break // Only one back quote; use type name. + } + } + + name = flag.Value.Type() + switch name { + case "bool": + name = "" + case "float64": + name = "float" + case "int64": + name = "int" + case "uint64": + name = "uint" + case "stringSlice": + name = "strings" + case "intSlice": + name = "ints" + case "uintSlice": + name = "uints" + case "boolSlice": + name = "bools" + } + + return +} + +// Splits the string `s` on whitespace into an initial substring up to +// `i` runes in length and the remainder. Will go `slop` over `i` if +// that encompasses the entire string (which allows the caller to +// avoid short orphan words on the final line). +func wrapN(i, slop int, s string) (string, string) { + if i+slop > len(s) { + return s, "" + } + + w := strings.LastIndexAny(s[:i], " \t\n") + if w <= 0 { + return s, "" + } + nlPos := strings.LastIndex(s[:i], "\n") + if nlPos > 0 && nlPos < w { + return s[:nlPos], s[nlPos+1:] + } + return s[:w], s[w+1:] +} + +// Wraps the string `s` to a maximum width `w` with leading indent +// `i`. The first line is not indented (this is assumed to be done by +// caller). Pass `w` == 0 to do no wrapping +func wrap(i, w int, s string) string { + if w == 0 { + return strings.Replace(s, "\n", "\n"+strings.Repeat(" ", i), -1) + } + + // space between indent i and end of line width w into which + // we should wrap the text. + wrap := w - i + + var r, l string + + // Not enough space for sensible wrapping. Wrap as a block on + // the next line instead. + if wrap < 24 { + i = 16 + wrap = w - i + r += "\n" + strings.Repeat(" ", i) + } + // If still not enough space then don't even try to wrap. + if wrap < 24 { + return strings.Replace(s, "\n", r, -1) + } + + // Try to avoid short orphan words on the final line, by + // allowing wrapN to go a bit over if that would fit in the + // remainder of the line. + slop := 5 + wrap = wrap - slop + + // Handle first line, which is indented by the caller (or the + // special case above) + l, s = wrapN(wrap, slop, s) + r = r + strings.Replace(l, "\n", "\n"+strings.Repeat(" ", i), -1) + + // Now wrap the rest + for s != "" { + var t string + + t, s = wrapN(wrap, slop, s) + r = r + "\n" + strings.Repeat(" ", i) + strings.Replace(t, "\n", "\n"+strings.Repeat(" ", i), -1) + } + + return r + +} + +// FlagUsagesWrapped returns a string containing the usage information +// for all flags in the FlagSet. Wrapped to `cols` columns (0 for no +// wrapping) +func (f *FlagSet) FlagUsagesWrapped(cols int) string { + buf := new(bytes.Buffer) + + lines := make([]string, 0, len(f.formal)) + + maxlen := 0 + f.VisitAll(func(flag *Flag) { + if flag.Hidden { + return + } + + line := "" + if flag.Shorthand != "" && flag.ShorthandDeprecated == "" { + line = fmt.Sprintf(" -%s, --%s", flag.Shorthand, flag.Name) + } else { + line = fmt.Sprintf(" --%s", flag.Name) + } + + varname, usage := UnquoteUsage(flag) + if varname != "" { + line += " " + varname + } + if flag.NoOptDefVal != "" { + switch flag.Value.Type() { + case "string": + line += fmt.Sprintf("[=\"%s\"]", flag.NoOptDefVal) + case "bool": + if flag.NoOptDefVal != "true" { + line += fmt.Sprintf("[=%s]", flag.NoOptDefVal) + } + case "count": + if flag.NoOptDefVal != "+1" { + line += fmt.Sprintf("[=%s]", flag.NoOptDefVal) + } + default: + line += fmt.Sprintf("[=%s]", flag.NoOptDefVal) + } + } + + // This special character will be replaced with spacing once the + // correct alignment is calculated + line += "\x00" + if len(line) > maxlen { + maxlen = len(line) + } + + line += usage + if !flag.defaultIsZeroValue() { + if flag.Value.Type() == "string" { + line += fmt.Sprintf(" (default %q)", flag.DefValue) + } else { + line += fmt.Sprintf(" (default %s)", flag.DefValue) + } + } + if len(flag.Deprecated) != 0 { + line += fmt.Sprintf(" (DEPRECATED: %s)", flag.Deprecated) + } + + lines = append(lines, line) + }) + + for _, line := range lines { + sidx := strings.Index(line, "\x00") + spacing := strings.Repeat(" ", maxlen-sidx) + // maxlen + 2 comes from + 1 for the \x00 and + 1 for the (deliberate) off-by-one in maxlen-sidx + fmt.Fprintln(buf, line[:sidx], spacing, wrap(maxlen+2, cols, line[sidx+1:])) + } + + return buf.String() +} + +// FlagUsages returns a string containing the usage information for all flags in +// the FlagSet +func (f *FlagSet) FlagUsages() string { + return f.FlagUsagesWrapped(0) +} + +// PrintDefaults prints to standard error the default values of all defined command-line flags. +func PrintDefaults() { + CommandLine.PrintDefaults() +} + +// defaultUsage is the default function to print a usage message. +func defaultUsage(f *FlagSet) { + fmt.Fprintf(f.out(), "Usage of %s:\n", f.name) + f.PrintDefaults() +} + +// NOTE: Usage is not just defaultUsage(CommandLine) +// because it serves (via godoc flag Usage) as the example +// for how to write your own usage function. + +// Usage prints to standard error a usage message documenting all defined command-line flags. +// The function is a variable that may be changed to point to a custom function. +// By default it prints a simple header and calls PrintDefaults; for details about the +// format of the output and how to control it, see the documentation for PrintDefaults. +var Usage = func() { + fmt.Fprintf(os.Stderr, "Usage of %s:\n", os.Args[0]) + PrintDefaults() +} + +// NFlag returns the number of flags that have been set. +func (f *FlagSet) NFlag() int { return len(f.actual) } + +// NFlag returns the number of command-line flags that have been set. +func NFlag() int { return len(CommandLine.actual) } + +// Arg returns the i'th argument. Arg(0) is the first remaining argument +// after flags have been processed. +func (f *FlagSet) Arg(i int) string { + if i < 0 || i >= len(f.args) { + return "" + } + return f.args[i] +} + +// Arg returns the i'th command-line argument. Arg(0) is the first remaining argument +// after flags have been processed. +func Arg(i int) string { + return CommandLine.Arg(i) +} + +// NArg is the number of arguments remaining after flags have been processed. +func (f *FlagSet) NArg() int { return len(f.args) } + +// NArg is the number of arguments remaining after flags have been processed. +func NArg() int { return len(CommandLine.args) } + +// Args returns the non-flag arguments. +func (f *FlagSet) Args() []string { return f.args } + +// Args returns the non-flag command-line arguments. +func Args() []string { return CommandLine.args } + +// Var defines a flag with the specified name and usage string. The type and +// value of the flag are represented by the first argument, of type Value, which +// typically holds a user-defined implementation of Value. For instance, the +// caller could create a flag that turns a comma-separated string into a slice +// of strings by giving the slice the methods of Value; in particular, Set would +// decompose the comma-separated string into the slice. +func (f *FlagSet) Var(value Value, name string, usage string) { + f.VarP(value, name, "", usage) +} + +// VarPF is like VarP, but returns the flag created +func (f *FlagSet) VarPF(value Value, name, shorthand, usage string) *Flag { + // Remember the default value as a string; it won't change. + flag := &Flag{ + Name: name, + Shorthand: shorthand, + Usage: usage, + Value: value, + DefValue: value.String(), + } + f.AddFlag(flag) + return flag +} + +// VarP is like Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) VarP(value Value, name, shorthand, usage string) { + f.VarPF(value, name, shorthand, usage) +} + +// AddFlag will add the flag to the FlagSet +func (f *FlagSet) AddFlag(flag *Flag) { + normalizedFlagName := f.normalizeFlagName(flag.Name) + + _, alreadyThere := f.formal[normalizedFlagName] + if alreadyThere { + msg := fmt.Sprintf("%s flag redefined: %s", f.name, flag.Name) + fmt.Fprintln(f.out(), msg) + panic(msg) // Happens only if flags are declared with identical names + } + if f.formal == nil { + f.formal = make(map[NormalizedName]*Flag) + } + + flag.Name = string(normalizedFlagName) + f.formal[normalizedFlagName] = flag + f.orderedFormal = append(f.orderedFormal, flag) + + if flag.Shorthand == "" { + return + } + if len(flag.Shorthand) > 1 { + msg := fmt.Sprintf("%q shorthand is more than one ASCII character", flag.Shorthand) + fmt.Fprintf(f.out(), msg) + panic(msg) + } + if f.shorthands == nil { + f.shorthands = make(map[byte]*Flag) + } + c := flag.Shorthand[0] + used, alreadyThere := f.shorthands[c] + if alreadyThere { + msg := fmt.Sprintf("unable to redefine %q shorthand in %q flagset: it's already used for %q flag", c, f.name, used.Name) + fmt.Fprintf(f.out(), msg) + panic(msg) + } + f.shorthands[c] = flag +} + +// AddFlagSet adds one FlagSet to another. If a flag is already present in f +// the flag from newSet will be ignored. +func (f *FlagSet) AddFlagSet(newSet *FlagSet) { + if newSet == nil { + return + } + newSet.VisitAll(func(flag *Flag) { + if f.Lookup(flag.Name) == nil { + f.AddFlag(flag) + } + }) +} + +// Var defines a flag with the specified name and usage string. The type and +// value of the flag are represented by the first argument, of type Value, which +// typically holds a user-defined implementation of Value. For instance, the +// caller could create a flag that turns a comma-separated string into a slice +// of strings by giving the slice the methods of Value; in particular, Set would +// decompose the comma-separated string into the slice. +func Var(value Value, name string, usage string) { + CommandLine.VarP(value, name, "", usage) +} + +// VarP is like Var, but accepts a shorthand letter that can be used after a single dash. +func VarP(value Value, name, shorthand, usage string) { + CommandLine.VarP(value, name, shorthand, usage) +} + +// failf prints to standard error a formatted error and usage message and +// returns the error. +func (f *FlagSet) failf(format string, a ...interface{}) error { + err := fmt.Errorf(format, a...) + if f.errorHandling != ContinueOnError { + fmt.Fprintln(f.out(), err) + f.usage() + } + return err +} + +// usage calls the Usage method for the flag set, or the usage function if +// the flag set is CommandLine. +func (f *FlagSet) usage() { + if f == CommandLine { + Usage() + } else if f.Usage == nil { + defaultUsage(f) + } else { + f.Usage() + } +} + +//--unknown (args will be empty) +//--unknown --next-flag ... (args will be --next-flag ...) +//--unknown arg ... (args will be arg ...) +func stripUnknownFlagValue(args []string) []string { + if len(args) == 0 { + //--unknown + return args + } + + first := args[0] + if len(first) > 0 && first[0] == '-' { + //--unknown --next-flag ... + return args + } + + //--unknown arg ... (args will be arg ...) + if len(args) > 1 { + return args[1:] + } + return nil +} + +func (f *FlagSet) parseLongArg(s string, args []string, fn parseFunc) (a []string, err error) { + a = args + name := s[2:] + if len(name) == 0 || name[0] == '-' || name[0] == '=' { + err = f.failf("bad flag syntax: %s", s) + return + } + + split := strings.SplitN(name, "=", 2) + name = split[0] + flag, exists := f.formal[f.normalizeFlagName(name)] + + if !exists { + switch { + case name == "help": + f.usage() + return a, ErrHelp + case f.ParseErrorsWhitelist.UnknownFlags: + // --unknown=unknownval arg ... + // we do not want to lose arg in this case + if len(split) >= 2 { + return a, nil + } + + return stripUnknownFlagValue(a), nil + default: + err = f.failf("unknown flag: --%s", name) + return + } + } + + var value string + if len(split) == 2 { + // '--flag=arg' + value = split[1] + } else if flag.NoOptDefVal != "" { + // '--flag' (arg was optional) + value = flag.NoOptDefVal + } else if len(a) > 0 { + // '--flag arg' + value = a[0] + a = a[1:] + } else { + // '--flag' (arg was required) + err = f.failf("flag needs an argument: %s", s) + return + } + + err = fn(flag, value) + if err != nil { + f.failf(err.Error()) + } + return +} + +func (f *FlagSet) parseSingleShortArg(shorthands string, args []string, fn parseFunc) (outShorts string, outArgs []string, err error) { + outArgs = args + + if strings.HasPrefix(shorthands, "test.") { + return + } + + outShorts = shorthands[1:] + c := shorthands[0] + + flag, exists := f.shorthands[c] + if !exists { + switch { + case c == 'h': + f.usage() + err = ErrHelp + return + case f.ParseErrorsWhitelist.UnknownFlags: + // '-f=arg arg ...' + // we do not want to lose arg in this case + if len(shorthands) > 2 && shorthands[1] == '=' { + outShorts = "" + return + } + + outArgs = stripUnknownFlagValue(outArgs) + return + default: + err = f.failf("unknown shorthand flag: %q in -%s", c, shorthands) + return + } + } + + var value string + if len(shorthands) > 2 && shorthands[1] == '=' { + // '-f=arg' + value = shorthands[2:] + outShorts = "" + } else if flag.NoOptDefVal != "" { + // '-f' (arg was optional) + value = flag.NoOptDefVal + } else if len(shorthands) > 1 { + // '-farg' + value = shorthands[1:] + outShorts = "" + } else if len(args) > 0 { + // '-f arg' + value = args[0] + outArgs = args[1:] + } else { + // '-f' (arg was required) + err = f.failf("flag needs an argument: %q in -%s", c, shorthands) + return + } + + if flag.ShorthandDeprecated != "" { + fmt.Fprintf(f.out(), "Flag shorthand -%s has been deprecated, %s\n", flag.Shorthand, flag.ShorthandDeprecated) + } + + err = fn(flag, value) + if err != nil { + f.failf(err.Error()) + } + return +} + +func (f *FlagSet) parseShortArg(s string, args []string, fn parseFunc) (a []string, err error) { + a = args + shorthands := s[1:] + + // "shorthands" can be a series of shorthand letters of flags (e.g. "-vvv"). + for len(shorthands) > 0 { + shorthands, a, err = f.parseSingleShortArg(shorthands, args, fn) + if err != nil { + return + } + } + + return +} + +func (f *FlagSet) parseArgs(args []string, fn parseFunc) (err error) { + for len(args) > 0 { + s := args[0] + args = args[1:] + if len(s) == 0 || s[0] != '-' || len(s) == 1 { + if !f.interspersed { + f.args = append(f.args, s) + f.args = append(f.args, args...) + return nil + } + f.args = append(f.args, s) + continue + } + + if s[1] == '-' { + if len(s) == 2 { // "--" terminates the flags + f.argsLenAtDash = len(f.args) + f.args = append(f.args, args...) + break + } + args, err = f.parseLongArg(s, args, fn) + } else { + args, err = f.parseShortArg(s, args, fn) + } + if err != nil { + return + } + } + return +} + +// Parse parses flag definitions from the argument list, which should not +// include the command name. Must be called after all flags in the FlagSet +// are defined and before flags are accessed by the program. +// The return value will be ErrHelp if -help was set but not defined. +func (f *FlagSet) Parse(arguments []string) error { + if f.addedGoFlagSets != nil { + for _, goFlagSet := range f.addedGoFlagSets { + goFlagSet.Parse(nil) + } + } + f.parsed = true + + if len(arguments) < 0 { + return nil + } + + f.args = make([]string, 0, len(arguments)) + + set := func(flag *Flag, value string) error { + return f.Set(flag.Name, value) + } + + err := f.parseArgs(arguments, set) + if err != nil { + switch f.errorHandling { + case ContinueOnError: + return err + case ExitOnError: + fmt.Println(err) + os.Exit(2) + case PanicOnError: + panic(err) + } + } + return nil +} + +type parseFunc func(flag *Flag, value string) error + +// ParseAll parses flag definitions from the argument list, which should not +// include the command name. The arguments for fn are flag and value. Must be +// called after all flags in the FlagSet are defined and before flags are +// accessed by the program. The return value will be ErrHelp if -help was set +// but not defined. +func (f *FlagSet) ParseAll(arguments []string, fn func(flag *Flag, value string) error) error { + f.parsed = true + f.args = make([]string, 0, len(arguments)) + + err := f.parseArgs(arguments, fn) + if err != nil { + switch f.errorHandling { + case ContinueOnError: + return err + case ExitOnError: + os.Exit(2) + case PanicOnError: + panic(err) + } + } + return nil +} + +// Parsed reports whether f.Parse has been called. +func (f *FlagSet) Parsed() bool { + return f.parsed +} + +// Parse parses the command-line flags from os.Args[1:]. Must be called +// after all flags are defined and before flags are accessed by the program. +func Parse() { + // Ignore errors; CommandLine is set for ExitOnError. + CommandLine.Parse(os.Args[1:]) +} + +// ParseAll parses the command-line flags from os.Args[1:] and called fn for each. +// The arguments for fn are flag and value. Must be called after all flags are +// defined and before flags are accessed by the program. +func ParseAll(fn func(flag *Flag, value string) error) { + // Ignore errors; CommandLine is set for ExitOnError. + CommandLine.ParseAll(os.Args[1:], fn) +} + +// SetInterspersed sets whether to support interspersed option/non-option arguments. +func SetInterspersed(interspersed bool) { + CommandLine.SetInterspersed(interspersed) +} + +// Parsed returns true if the command-line flags have been parsed. +func Parsed() bool { + return CommandLine.Parsed() +} + +// CommandLine is the default set of command-line flags, parsed from os.Args. +var CommandLine = NewFlagSet(os.Args[0], ExitOnError) + +// NewFlagSet returns a new, empty flag set with the specified name, +// error handling property and SortFlags set to true. +func NewFlagSet(name string, errorHandling ErrorHandling) *FlagSet { + f := &FlagSet{ + name: name, + errorHandling: errorHandling, + argsLenAtDash: -1, + interspersed: true, + SortFlags: true, + } + return f +} + +// SetInterspersed sets whether to support interspersed option/non-option arguments. +func (f *FlagSet) SetInterspersed(interspersed bool) { + f.interspersed = interspersed +} + +// Init sets the name and error handling property for a flag set. +// By default, the zero FlagSet uses an empty name and the +// ContinueOnError error handling policy. +func (f *FlagSet) Init(name string, errorHandling ErrorHandling) { + f.name = name + f.errorHandling = errorHandling + f.argsLenAtDash = -1 +} diff --git a/vendor/github.com/spf13/pflag/float32.go b/vendor/github.com/spf13/pflag/float32.go new file mode 100644 index 000000000..a243f81f7 --- /dev/null +++ b/vendor/github.com/spf13/pflag/float32.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- float32 Value +type float32Value float32 + +func newFloat32Value(val float32, p *float32) *float32Value { + *p = val + return (*float32Value)(p) +} + +func (f *float32Value) Set(s string) error { + v, err := strconv.ParseFloat(s, 32) + *f = float32Value(v) + return err +} + +func (f *float32Value) Type() string { + return "float32" +} + +func (f *float32Value) String() string { return strconv.FormatFloat(float64(*f), 'g', -1, 32) } + +func float32Conv(sval string) (interface{}, error) { + v, err := strconv.ParseFloat(sval, 32) + if err != nil { + return 0, err + } + return float32(v), nil +} + +// GetFloat32 return the float32 value of a flag with the given name +func (f *FlagSet) GetFloat32(name string) (float32, error) { + val, err := f.getFlagType(name, "float32", float32Conv) + if err != nil { + return 0, err + } + return val.(float32), nil +} + +// Float32Var defines a float32 flag with specified name, default value, and usage string. +// The argument p points to a float32 variable in which to store the value of the flag. +func (f *FlagSet) Float32Var(p *float32, name string, value float32, usage string) { + f.VarP(newFloat32Value(value, p), name, "", usage) +} + +// Float32VarP is like Float32Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float32VarP(p *float32, name, shorthand string, value float32, usage string) { + f.VarP(newFloat32Value(value, p), name, shorthand, usage) +} + +// Float32Var defines a float32 flag with specified name, default value, and usage string. +// The argument p points to a float32 variable in which to store the value of the flag. +func Float32Var(p *float32, name string, value float32, usage string) { + CommandLine.VarP(newFloat32Value(value, p), name, "", usage) +} + +// Float32VarP is like Float32Var, but accepts a shorthand letter that can be used after a single dash. +func Float32VarP(p *float32, name, shorthand string, value float32, usage string) { + CommandLine.VarP(newFloat32Value(value, p), name, shorthand, usage) +} + +// Float32 defines a float32 flag with specified name, default value, and usage string. +// The return value is the address of a float32 variable that stores the value of the flag. +func (f *FlagSet) Float32(name string, value float32, usage string) *float32 { + p := new(float32) + f.Float32VarP(p, name, "", value, usage) + return p +} + +// Float32P is like Float32, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float32P(name, shorthand string, value float32, usage string) *float32 { + p := new(float32) + f.Float32VarP(p, name, shorthand, value, usage) + return p +} + +// Float32 defines a float32 flag with specified name, default value, and usage string. +// The return value is the address of a float32 variable that stores the value of the flag. +func Float32(name string, value float32, usage string) *float32 { + return CommandLine.Float32P(name, "", value, usage) +} + +// Float32P is like Float32, but accepts a shorthand letter that can be used after a single dash. +func Float32P(name, shorthand string, value float32, usage string) *float32 { + return CommandLine.Float32P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/float32_slice.go b/vendor/github.com/spf13/pflag/float32_slice.go new file mode 100644 index 000000000..caa352741 --- /dev/null +++ b/vendor/github.com/spf13/pflag/float32_slice.go @@ -0,0 +1,174 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- float32Slice Value +type float32SliceValue struct { + value *[]float32 + changed bool +} + +func newFloat32SliceValue(val []float32, p *[]float32) *float32SliceValue { + isv := new(float32SliceValue) + isv.value = p + *isv.value = val + return isv +} + +func (s *float32SliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]float32, len(ss)) + for i, d := range ss { + var err error + var temp64 float64 + temp64, err = strconv.ParseFloat(d, 32) + if err != nil { + return err + } + out[i] = float32(temp64) + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *float32SliceValue) Type() string { + return "float32Slice" +} + +func (s *float32SliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%f", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func (s *float32SliceValue) fromString(val string) (float32, error) { + t64, err := strconv.ParseFloat(val, 32) + if err != nil { + return 0, err + } + return float32(t64), nil +} + +func (s *float32SliceValue) toString(val float32) string { + return fmt.Sprintf("%f", val) +} + +func (s *float32SliceValue) Append(val string) error { + i, err := s.fromString(val) + if err != nil { + return err + } + *s.value = append(*s.value, i) + return nil +} + +func (s *float32SliceValue) Replace(val []string) error { + out := make([]float32, len(val)) + for i, d := range val { + var err error + out[i], err = s.fromString(d) + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *float32SliceValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = s.toString(d) + } + return out +} + +func float32SliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []float32{}, nil + } + ss := strings.Split(val, ",") + out := make([]float32, len(ss)) + for i, d := range ss { + var err error + var temp64 float64 + temp64, err = strconv.ParseFloat(d, 32) + if err != nil { + return nil, err + } + out[i] = float32(temp64) + + } + return out, nil +} + +// GetFloat32Slice return the []float32 value of a flag with the given name +func (f *FlagSet) GetFloat32Slice(name string) ([]float32, error) { + val, err := f.getFlagType(name, "float32Slice", float32SliceConv) + if err != nil { + return []float32{}, err + } + return val.([]float32), nil +} + +// Float32SliceVar defines a float32Slice flag with specified name, default value, and usage string. +// The argument p points to a []float32 variable in which to store the value of the flag. +func (f *FlagSet) Float32SliceVar(p *[]float32, name string, value []float32, usage string) { + f.VarP(newFloat32SliceValue(value, p), name, "", usage) +} + +// Float32SliceVarP is like Float32SliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float32SliceVarP(p *[]float32, name, shorthand string, value []float32, usage string) { + f.VarP(newFloat32SliceValue(value, p), name, shorthand, usage) +} + +// Float32SliceVar defines a float32[] flag with specified name, default value, and usage string. +// The argument p points to a float32[] variable in which to store the value of the flag. +func Float32SliceVar(p *[]float32, name string, value []float32, usage string) { + CommandLine.VarP(newFloat32SliceValue(value, p), name, "", usage) +} + +// Float32SliceVarP is like Float32SliceVar, but accepts a shorthand letter that can be used after a single dash. +func Float32SliceVarP(p *[]float32, name, shorthand string, value []float32, usage string) { + CommandLine.VarP(newFloat32SliceValue(value, p), name, shorthand, usage) +} + +// Float32Slice defines a []float32 flag with specified name, default value, and usage string. +// The return value is the address of a []float32 variable that stores the value of the flag. +func (f *FlagSet) Float32Slice(name string, value []float32, usage string) *[]float32 { + p := []float32{} + f.Float32SliceVarP(&p, name, "", value, usage) + return &p +} + +// Float32SliceP is like Float32Slice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float32SliceP(name, shorthand string, value []float32, usage string) *[]float32 { + p := []float32{} + f.Float32SliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// Float32Slice defines a []float32 flag with specified name, default value, and usage string. +// The return value is the address of a []float32 variable that stores the value of the flag. +func Float32Slice(name string, value []float32, usage string) *[]float32 { + return CommandLine.Float32SliceP(name, "", value, usage) +} + +// Float32SliceP is like Float32Slice, but accepts a shorthand letter that can be used after a single dash. +func Float32SliceP(name, shorthand string, value []float32, usage string) *[]float32 { + return CommandLine.Float32SliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/float64.go b/vendor/github.com/spf13/pflag/float64.go new file mode 100644 index 000000000..04b5492a7 --- /dev/null +++ b/vendor/github.com/spf13/pflag/float64.go @@ -0,0 +1,84 @@ +package pflag + +import "strconv" + +// -- float64 Value +type float64Value float64 + +func newFloat64Value(val float64, p *float64) *float64Value { + *p = val + return (*float64Value)(p) +} + +func (f *float64Value) Set(s string) error { + v, err := strconv.ParseFloat(s, 64) + *f = float64Value(v) + return err +} + +func (f *float64Value) Type() string { + return "float64" +} + +func (f *float64Value) String() string { return strconv.FormatFloat(float64(*f), 'g', -1, 64) } + +func float64Conv(sval string) (interface{}, error) { + return strconv.ParseFloat(sval, 64) +} + +// GetFloat64 return the float64 value of a flag with the given name +func (f *FlagSet) GetFloat64(name string) (float64, error) { + val, err := f.getFlagType(name, "float64", float64Conv) + if err != nil { + return 0, err + } + return val.(float64), nil +} + +// Float64Var defines a float64 flag with specified name, default value, and usage string. +// The argument p points to a float64 variable in which to store the value of the flag. +func (f *FlagSet) Float64Var(p *float64, name string, value float64, usage string) { + f.VarP(newFloat64Value(value, p), name, "", usage) +} + +// Float64VarP is like Float64Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float64VarP(p *float64, name, shorthand string, value float64, usage string) { + f.VarP(newFloat64Value(value, p), name, shorthand, usage) +} + +// Float64Var defines a float64 flag with specified name, default value, and usage string. +// The argument p points to a float64 variable in which to store the value of the flag. +func Float64Var(p *float64, name string, value float64, usage string) { + CommandLine.VarP(newFloat64Value(value, p), name, "", usage) +} + +// Float64VarP is like Float64Var, but accepts a shorthand letter that can be used after a single dash. +func Float64VarP(p *float64, name, shorthand string, value float64, usage string) { + CommandLine.VarP(newFloat64Value(value, p), name, shorthand, usage) +} + +// Float64 defines a float64 flag with specified name, default value, and usage string. +// The return value is the address of a float64 variable that stores the value of the flag. +func (f *FlagSet) Float64(name string, value float64, usage string) *float64 { + p := new(float64) + f.Float64VarP(p, name, "", value, usage) + return p +} + +// Float64P is like Float64, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float64P(name, shorthand string, value float64, usage string) *float64 { + p := new(float64) + f.Float64VarP(p, name, shorthand, value, usage) + return p +} + +// Float64 defines a float64 flag with specified name, default value, and usage string. +// The return value is the address of a float64 variable that stores the value of the flag. +func Float64(name string, value float64, usage string) *float64 { + return CommandLine.Float64P(name, "", value, usage) +} + +// Float64P is like Float64, but accepts a shorthand letter that can be used after a single dash. +func Float64P(name, shorthand string, value float64, usage string) *float64 { + return CommandLine.Float64P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/float64_slice.go b/vendor/github.com/spf13/pflag/float64_slice.go new file mode 100644 index 000000000..85bf3073d --- /dev/null +++ b/vendor/github.com/spf13/pflag/float64_slice.go @@ -0,0 +1,166 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- float64Slice Value +type float64SliceValue struct { + value *[]float64 + changed bool +} + +func newFloat64SliceValue(val []float64, p *[]float64) *float64SliceValue { + isv := new(float64SliceValue) + isv.value = p + *isv.value = val + return isv +} + +func (s *float64SliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]float64, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.ParseFloat(d, 64) + if err != nil { + return err + } + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *float64SliceValue) Type() string { + return "float64Slice" +} + +func (s *float64SliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%f", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func (s *float64SliceValue) fromString(val string) (float64, error) { + return strconv.ParseFloat(val, 64) +} + +func (s *float64SliceValue) toString(val float64) string { + return fmt.Sprintf("%f", val) +} + +func (s *float64SliceValue) Append(val string) error { + i, err := s.fromString(val) + if err != nil { + return err + } + *s.value = append(*s.value, i) + return nil +} + +func (s *float64SliceValue) Replace(val []string) error { + out := make([]float64, len(val)) + for i, d := range val { + var err error + out[i], err = s.fromString(d) + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *float64SliceValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = s.toString(d) + } + return out +} + +func float64SliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []float64{}, nil + } + ss := strings.Split(val, ",") + out := make([]float64, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.ParseFloat(d, 64) + if err != nil { + return nil, err + } + + } + return out, nil +} + +// GetFloat64Slice return the []float64 value of a flag with the given name +func (f *FlagSet) GetFloat64Slice(name string) ([]float64, error) { + val, err := f.getFlagType(name, "float64Slice", float64SliceConv) + if err != nil { + return []float64{}, err + } + return val.([]float64), nil +} + +// Float64SliceVar defines a float64Slice flag with specified name, default value, and usage string. +// The argument p points to a []float64 variable in which to store the value of the flag. +func (f *FlagSet) Float64SliceVar(p *[]float64, name string, value []float64, usage string) { + f.VarP(newFloat64SliceValue(value, p), name, "", usage) +} + +// Float64SliceVarP is like Float64SliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float64SliceVarP(p *[]float64, name, shorthand string, value []float64, usage string) { + f.VarP(newFloat64SliceValue(value, p), name, shorthand, usage) +} + +// Float64SliceVar defines a float64[] flag with specified name, default value, and usage string. +// The argument p points to a float64[] variable in which to store the value of the flag. +func Float64SliceVar(p *[]float64, name string, value []float64, usage string) { + CommandLine.VarP(newFloat64SliceValue(value, p), name, "", usage) +} + +// Float64SliceVarP is like Float64SliceVar, but accepts a shorthand letter that can be used after a single dash. +func Float64SliceVarP(p *[]float64, name, shorthand string, value []float64, usage string) { + CommandLine.VarP(newFloat64SliceValue(value, p), name, shorthand, usage) +} + +// Float64Slice defines a []float64 flag with specified name, default value, and usage string. +// The return value is the address of a []float64 variable that stores the value of the flag. +func (f *FlagSet) Float64Slice(name string, value []float64, usage string) *[]float64 { + p := []float64{} + f.Float64SliceVarP(&p, name, "", value, usage) + return &p +} + +// Float64SliceP is like Float64Slice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Float64SliceP(name, shorthand string, value []float64, usage string) *[]float64 { + p := []float64{} + f.Float64SliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// Float64Slice defines a []float64 flag with specified name, default value, and usage string. +// The return value is the address of a []float64 variable that stores the value of the flag. +func Float64Slice(name string, value []float64, usage string) *[]float64 { + return CommandLine.Float64SliceP(name, "", value, usage) +} + +// Float64SliceP is like Float64Slice, but accepts a shorthand letter that can be used after a single dash. +func Float64SliceP(name, shorthand string, value []float64, usage string) *[]float64 { + return CommandLine.Float64SliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/go.mod b/vendor/github.com/spf13/pflag/go.mod new file mode 100644 index 000000000..b2287eec1 --- /dev/null +++ b/vendor/github.com/spf13/pflag/go.mod @@ -0,0 +1,3 @@ +module github.com/spf13/pflag + +go 1.12 diff --git a/vendor/github.com/spf13/pflag/go.sum b/vendor/github.com/spf13/pflag/go.sum new file mode 100644 index 000000000..e69de29bb diff --git a/vendor/github.com/spf13/pflag/golangflag.go b/vendor/github.com/spf13/pflag/golangflag.go new file mode 100644 index 000000000..d3dd72b7f --- /dev/null +++ b/vendor/github.com/spf13/pflag/golangflag.go @@ -0,0 +1,105 @@ +// Copyright 2009 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package pflag + +import ( + goflag "flag" + "reflect" + "strings" +) + +// flagValueWrapper implements pflag.Value around a flag.Value. The main +// difference here is the addition of the Type method that returns a string +// name of the type. As this is generally unknown, we approximate that with +// reflection. +type flagValueWrapper struct { + inner goflag.Value + flagType string +} + +// We are just copying the boolFlag interface out of goflag as that is what +// they use to decide if a flag should get "true" when no arg is given. +type goBoolFlag interface { + goflag.Value + IsBoolFlag() bool +} + +func wrapFlagValue(v goflag.Value) Value { + // If the flag.Value happens to also be a pflag.Value, just use it directly. + if pv, ok := v.(Value); ok { + return pv + } + + pv := &flagValueWrapper{ + inner: v, + } + + t := reflect.TypeOf(v) + if t.Kind() == reflect.Interface || t.Kind() == reflect.Ptr { + t = t.Elem() + } + + pv.flagType = strings.TrimSuffix(t.Name(), "Value") + return pv +} + +func (v *flagValueWrapper) String() string { + return v.inner.String() +} + +func (v *flagValueWrapper) Set(s string) error { + return v.inner.Set(s) +} + +func (v *flagValueWrapper) Type() string { + return v.flagType +} + +// PFlagFromGoFlag will return a *pflag.Flag given a *flag.Flag +// If the *flag.Flag.Name was a single character (ex: `v`) it will be accessiblei +// with both `-v` and `--v` in flags. If the golang flag was more than a single +// character (ex: `verbose`) it will only be accessible via `--verbose` +func PFlagFromGoFlag(goflag *goflag.Flag) *Flag { + // Remember the default value as a string; it won't change. + flag := &Flag{ + Name: goflag.Name, + Usage: goflag.Usage, + Value: wrapFlagValue(goflag.Value), + // Looks like golang flags don't set DefValue correctly :-( + //DefValue: goflag.DefValue, + DefValue: goflag.Value.String(), + } + // Ex: if the golang flag was -v, allow both -v and --v to work + if len(flag.Name) == 1 { + flag.Shorthand = flag.Name + } + if fv, ok := goflag.Value.(goBoolFlag); ok && fv.IsBoolFlag() { + flag.NoOptDefVal = "true" + } + return flag +} + +// AddGoFlag will add the given *flag.Flag to the pflag.FlagSet +func (f *FlagSet) AddGoFlag(goflag *goflag.Flag) { + if f.Lookup(goflag.Name) != nil { + return + } + newflag := PFlagFromGoFlag(goflag) + f.AddFlag(newflag) +} + +// AddGoFlagSet will add the given *flag.FlagSet to the pflag.FlagSet +func (f *FlagSet) AddGoFlagSet(newSet *goflag.FlagSet) { + if newSet == nil { + return + } + newSet.VisitAll(func(goflag *goflag.Flag) { + f.AddGoFlag(goflag) + }) + if f.addedGoFlagSets == nil { + f.addedGoFlagSets = make([]*goflag.FlagSet, 0) + } + f.addedGoFlagSets = append(f.addedGoFlagSets, newSet) +} diff --git a/vendor/github.com/spf13/pflag/int.go b/vendor/github.com/spf13/pflag/int.go new file mode 100644 index 000000000..1474b89df --- /dev/null +++ b/vendor/github.com/spf13/pflag/int.go @@ -0,0 +1,84 @@ +package pflag + +import "strconv" + +// -- int Value +type intValue int + +func newIntValue(val int, p *int) *intValue { + *p = val + return (*intValue)(p) +} + +func (i *intValue) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 64) + *i = intValue(v) + return err +} + +func (i *intValue) Type() string { + return "int" +} + +func (i *intValue) String() string { return strconv.Itoa(int(*i)) } + +func intConv(sval string) (interface{}, error) { + return strconv.Atoi(sval) +} + +// GetInt return the int value of a flag with the given name +func (f *FlagSet) GetInt(name string) (int, error) { + val, err := f.getFlagType(name, "int", intConv) + if err != nil { + return 0, err + } + return val.(int), nil +} + +// IntVar defines an int flag with specified name, default value, and usage string. +// The argument p points to an int variable in which to store the value of the flag. +func (f *FlagSet) IntVar(p *int, name string, value int, usage string) { + f.VarP(newIntValue(value, p), name, "", usage) +} + +// IntVarP is like IntVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IntVarP(p *int, name, shorthand string, value int, usage string) { + f.VarP(newIntValue(value, p), name, shorthand, usage) +} + +// IntVar defines an int flag with specified name, default value, and usage string. +// The argument p points to an int variable in which to store the value of the flag. +func IntVar(p *int, name string, value int, usage string) { + CommandLine.VarP(newIntValue(value, p), name, "", usage) +} + +// IntVarP is like IntVar, but accepts a shorthand letter that can be used after a single dash. +func IntVarP(p *int, name, shorthand string, value int, usage string) { + CommandLine.VarP(newIntValue(value, p), name, shorthand, usage) +} + +// Int defines an int flag with specified name, default value, and usage string. +// The return value is the address of an int variable that stores the value of the flag. +func (f *FlagSet) Int(name string, value int, usage string) *int { + p := new(int) + f.IntVarP(p, name, "", value, usage) + return p +} + +// IntP is like Int, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IntP(name, shorthand string, value int, usage string) *int { + p := new(int) + f.IntVarP(p, name, shorthand, value, usage) + return p +} + +// Int defines an int flag with specified name, default value, and usage string. +// The return value is the address of an int variable that stores the value of the flag. +func Int(name string, value int, usage string) *int { + return CommandLine.IntP(name, "", value, usage) +} + +// IntP is like Int, but accepts a shorthand letter that can be used after a single dash. +func IntP(name, shorthand string, value int, usage string) *int { + return CommandLine.IntP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int16.go b/vendor/github.com/spf13/pflag/int16.go new file mode 100644 index 000000000..f1a01d05e --- /dev/null +++ b/vendor/github.com/spf13/pflag/int16.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- int16 Value +type int16Value int16 + +func newInt16Value(val int16, p *int16) *int16Value { + *p = val + return (*int16Value)(p) +} + +func (i *int16Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 16) + *i = int16Value(v) + return err +} + +func (i *int16Value) Type() string { + return "int16" +} + +func (i *int16Value) String() string { return strconv.FormatInt(int64(*i), 10) } + +func int16Conv(sval string) (interface{}, error) { + v, err := strconv.ParseInt(sval, 0, 16) + if err != nil { + return 0, err + } + return int16(v), nil +} + +// GetInt16 returns the int16 value of a flag with the given name +func (f *FlagSet) GetInt16(name string) (int16, error) { + val, err := f.getFlagType(name, "int16", int16Conv) + if err != nil { + return 0, err + } + return val.(int16), nil +} + +// Int16Var defines an int16 flag with specified name, default value, and usage string. +// The argument p points to an int16 variable in which to store the value of the flag. +func (f *FlagSet) Int16Var(p *int16, name string, value int16, usage string) { + f.VarP(newInt16Value(value, p), name, "", usage) +} + +// Int16VarP is like Int16Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int16VarP(p *int16, name, shorthand string, value int16, usage string) { + f.VarP(newInt16Value(value, p), name, shorthand, usage) +} + +// Int16Var defines an int16 flag with specified name, default value, and usage string. +// The argument p points to an int16 variable in which to store the value of the flag. +func Int16Var(p *int16, name string, value int16, usage string) { + CommandLine.VarP(newInt16Value(value, p), name, "", usage) +} + +// Int16VarP is like Int16Var, but accepts a shorthand letter that can be used after a single dash. +func Int16VarP(p *int16, name, shorthand string, value int16, usage string) { + CommandLine.VarP(newInt16Value(value, p), name, shorthand, usage) +} + +// Int16 defines an int16 flag with specified name, default value, and usage string. +// The return value is the address of an int16 variable that stores the value of the flag. +func (f *FlagSet) Int16(name string, value int16, usage string) *int16 { + p := new(int16) + f.Int16VarP(p, name, "", value, usage) + return p +} + +// Int16P is like Int16, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int16P(name, shorthand string, value int16, usage string) *int16 { + p := new(int16) + f.Int16VarP(p, name, shorthand, value, usage) + return p +} + +// Int16 defines an int16 flag with specified name, default value, and usage string. +// The return value is the address of an int16 variable that stores the value of the flag. +func Int16(name string, value int16, usage string) *int16 { + return CommandLine.Int16P(name, "", value, usage) +} + +// Int16P is like Int16, but accepts a shorthand letter that can be used after a single dash. +func Int16P(name, shorthand string, value int16, usage string) *int16 { + return CommandLine.Int16P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int32.go b/vendor/github.com/spf13/pflag/int32.go new file mode 100644 index 000000000..9b95944f0 --- /dev/null +++ b/vendor/github.com/spf13/pflag/int32.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- int32 Value +type int32Value int32 + +func newInt32Value(val int32, p *int32) *int32Value { + *p = val + return (*int32Value)(p) +} + +func (i *int32Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 32) + *i = int32Value(v) + return err +} + +func (i *int32Value) Type() string { + return "int32" +} + +func (i *int32Value) String() string { return strconv.FormatInt(int64(*i), 10) } + +func int32Conv(sval string) (interface{}, error) { + v, err := strconv.ParseInt(sval, 0, 32) + if err != nil { + return 0, err + } + return int32(v), nil +} + +// GetInt32 return the int32 value of a flag with the given name +func (f *FlagSet) GetInt32(name string) (int32, error) { + val, err := f.getFlagType(name, "int32", int32Conv) + if err != nil { + return 0, err + } + return val.(int32), nil +} + +// Int32Var defines an int32 flag with specified name, default value, and usage string. +// The argument p points to an int32 variable in which to store the value of the flag. +func (f *FlagSet) Int32Var(p *int32, name string, value int32, usage string) { + f.VarP(newInt32Value(value, p), name, "", usage) +} + +// Int32VarP is like Int32Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int32VarP(p *int32, name, shorthand string, value int32, usage string) { + f.VarP(newInt32Value(value, p), name, shorthand, usage) +} + +// Int32Var defines an int32 flag with specified name, default value, and usage string. +// The argument p points to an int32 variable in which to store the value of the flag. +func Int32Var(p *int32, name string, value int32, usage string) { + CommandLine.VarP(newInt32Value(value, p), name, "", usage) +} + +// Int32VarP is like Int32Var, but accepts a shorthand letter that can be used after a single dash. +func Int32VarP(p *int32, name, shorthand string, value int32, usage string) { + CommandLine.VarP(newInt32Value(value, p), name, shorthand, usage) +} + +// Int32 defines an int32 flag with specified name, default value, and usage string. +// The return value is the address of an int32 variable that stores the value of the flag. +func (f *FlagSet) Int32(name string, value int32, usage string) *int32 { + p := new(int32) + f.Int32VarP(p, name, "", value, usage) + return p +} + +// Int32P is like Int32, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int32P(name, shorthand string, value int32, usage string) *int32 { + p := new(int32) + f.Int32VarP(p, name, shorthand, value, usage) + return p +} + +// Int32 defines an int32 flag with specified name, default value, and usage string. +// The return value is the address of an int32 variable that stores the value of the flag. +func Int32(name string, value int32, usage string) *int32 { + return CommandLine.Int32P(name, "", value, usage) +} + +// Int32P is like Int32, but accepts a shorthand letter that can be used after a single dash. +func Int32P(name, shorthand string, value int32, usage string) *int32 { + return CommandLine.Int32P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int32_slice.go b/vendor/github.com/spf13/pflag/int32_slice.go new file mode 100644 index 000000000..ff128ff06 --- /dev/null +++ b/vendor/github.com/spf13/pflag/int32_slice.go @@ -0,0 +1,174 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- int32Slice Value +type int32SliceValue struct { + value *[]int32 + changed bool +} + +func newInt32SliceValue(val []int32, p *[]int32) *int32SliceValue { + isv := new(int32SliceValue) + isv.value = p + *isv.value = val + return isv +} + +func (s *int32SliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]int32, len(ss)) + for i, d := range ss { + var err error + var temp64 int64 + temp64, err = strconv.ParseInt(d, 0, 32) + if err != nil { + return err + } + out[i] = int32(temp64) + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *int32SliceValue) Type() string { + return "int32Slice" +} + +func (s *int32SliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%d", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func (s *int32SliceValue) fromString(val string) (int32, error) { + t64, err := strconv.ParseInt(val, 0, 32) + if err != nil { + return 0, err + } + return int32(t64), nil +} + +func (s *int32SliceValue) toString(val int32) string { + return fmt.Sprintf("%d", val) +} + +func (s *int32SliceValue) Append(val string) error { + i, err := s.fromString(val) + if err != nil { + return err + } + *s.value = append(*s.value, i) + return nil +} + +func (s *int32SliceValue) Replace(val []string) error { + out := make([]int32, len(val)) + for i, d := range val { + var err error + out[i], err = s.fromString(d) + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *int32SliceValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = s.toString(d) + } + return out +} + +func int32SliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []int32{}, nil + } + ss := strings.Split(val, ",") + out := make([]int32, len(ss)) + for i, d := range ss { + var err error + var temp64 int64 + temp64, err = strconv.ParseInt(d, 0, 32) + if err != nil { + return nil, err + } + out[i] = int32(temp64) + + } + return out, nil +} + +// GetInt32Slice return the []int32 value of a flag with the given name +func (f *FlagSet) GetInt32Slice(name string) ([]int32, error) { + val, err := f.getFlagType(name, "int32Slice", int32SliceConv) + if err != nil { + return []int32{}, err + } + return val.([]int32), nil +} + +// Int32SliceVar defines a int32Slice flag with specified name, default value, and usage string. +// The argument p points to a []int32 variable in which to store the value of the flag. +func (f *FlagSet) Int32SliceVar(p *[]int32, name string, value []int32, usage string) { + f.VarP(newInt32SliceValue(value, p), name, "", usage) +} + +// Int32SliceVarP is like Int32SliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int32SliceVarP(p *[]int32, name, shorthand string, value []int32, usage string) { + f.VarP(newInt32SliceValue(value, p), name, shorthand, usage) +} + +// Int32SliceVar defines a int32[] flag with specified name, default value, and usage string. +// The argument p points to a int32[] variable in which to store the value of the flag. +func Int32SliceVar(p *[]int32, name string, value []int32, usage string) { + CommandLine.VarP(newInt32SliceValue(value, p), name, "", usage) +} + +// Int32SliceVarP is like Int32SliceVar, but accepts a shorthand letter that can be used after a single dash. +func Int32SliceVarP(p *[]int32, name, shorthand string, value []int32, usage string) { + CommandLine.VarP(newInt32SliceValue(value, p), name, shorthand, usage) +} + +// Int32Slice defines a []int32 flag with specified name, default value, and usage string. +// The return value is the address of a []int32 variable that stores the value of the flag. +func (f *FlagSet) Int32Slice(name string, value []int32, usage string) *[]int32 { + p := []int32{} + f.Int32SliceVarP(&p, name, "", value, usage) + return &p +} + +// Int32SliceP is like Int32Slice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int32SliceP(name, shorthand string, value []int32, usage string) *[]int32 { + p := []int32{} + f.Int32SliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// Int32Slice defines a []int32 flag with specified name, default value, and usage string. +// The return value is the address of a []int32 variable that stores the value of the flag. +func Int32Slice(name string, value []int32, usage string) *[]int32 { + return CommandLine.Int32SliceP(name, "", value, usage) +} + +// Int32SliceP is like Int32Slice, but accepts a shorthand letter that can be used after a single dash. +func Int32SliceP(name, shorthand string, value []int32, usage string) *[]int32 { + return CommandLine.Int32SliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int64.go b/vendor/github.com/spf13/pflag/int64.go new file mode 100644 index 000000000..0026d781d --- /dev/null +++ b/vendor/github.com/spf13/pflag/int64.go @@ -0,0 +1,84 @@ +package pflag + +import "strconv" + +// -- int64 Value +type int64Value int64 + +func newInt64Value(val int64, p *int64) *int64Value { + *p = val + return (*int64Value)(p) +} + +func (i *int64Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 64) + *i = int64Value(v) + return err +} + +func (i *int64Value) Type() string { + return "int64" +} + +func (i *int64Value) String() string { return strconv.FormatInt(int64(*i), 10) } + +func int64Conv(sval string) (interface{}, error) { + return strconv.ParseInt(sval, 0, 64) +} + +// GetInt64 return the int64 value of a flag with the given name +func (f *FlagSet) GetInt64(name string) (int64, error) { + val, err := f.getFlagType(name, "int64", int64Conv) + if err != nil { + return 0, err + } + return val.(int64), nil +} + +// Int64Var defines an int64 flag with specified name, default value, and usage string. +// The argument p points to an int64 variable in which to store the value of the flag. +func (f *FlagSet) Int64Var(p *int64, name string, value int64, usage string) { + f.VarP(newInt64Value(value, p), name, "", usage) +} + +// Int64VarP is like Int64Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int64VarP(p *int64, name, shorthand string, value int64, usage string) { + f.VarP(newInt64Value(value, p), name, shorthand, usage) +} + +// Int64Var defines an int64 flag with specified name, default value, and usage string. +// The argument p points to an int64 variable in which to store the value of the flag. +func Int64Var(p *int64, name string, value int64, usage string) { + CommandLine.VarP(newInt64Value(value, p), name, "", usage) +} + +// Int64VarP is like Int64Var, but accepts a shorthand letter that can be used after a single dash. +func Int64VarP(p *int64, name, shorthand string, value int64, usage string) { + CommandLine.VarP(newInt64Value(value, p), name, shorthand, usage) +} + +// Int64 defines an int64 flag with specified name, default value, and usage string. +// The return value is the address of an int64 variable that stores the value of the flag. +func (f *FlagSet) Int64(name string, value int64, usage string) *int64 { + p := new(int64) + f.Int64VarP(p, name, "", value, usage) + return p +} + +// Int64P is like Int64, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int64P(name, shorthand string, value int64, usage string) *int64 { + p := new(int64) + f.Int64VarP(p, name, shorthand, value, usage) + return p +} + +// Int64 defines an int64 flag with specified name, default value, and usage string. +// The return value is the address of an int64 variable that stores the value of the flag. +func Int64(name string, value int64, usage string) *int64 { + return CommandLine.Int64P(name, "", value, usage) +} + +// Int64P is like Int64, but accepts a shorthand letter that can be used after a single dash. +func Int64P(name, shorthand string, value int64, usage string) *int64 { + return CommandLine.Int64P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int64_slice.go b/vendor/github.com/spf13/pflag/int64_slice.go new file mode 100644 index 000000000..25464638f --- /dev/null +++ b/vendor/github.com/spf13/pflag/int64_slice.go @@ -0,0 +1,166 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- int64Slice Value +type int64SliceValue struct { + value *[]int64 + changed bool +} + +func newInt64SliceValue(val []int64, p *[]int64) *int64SliceValue { + isv := new(int64SliceValue) + isv.value = p + *isv.value = val + return isv +} + +func (s *int64SliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]int64, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.ParseInt(d, 0, 64) + if err != nil { + return err + } + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *int64SliceValue) Type() string { + return "int64Slice" +} + +func (s *int64SliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%d", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func (s *int64SliceValue) fromString(val string) (int64, error) { + return strconv.ParseInt(val, 0, 64) +} + +func (s *int64SliceValue) toString(val int64) string { + return fmt.Sprintf("%d", val) +} + +func (s *int64SliceValue) Append(val string) error { + i, err := s.fromString(val) + if err != nil { + return err + } + *s.value = append(*s.value, i) + return nil +} + +func (s *int64SliceValue) Replace(val []string) error { + out := make([]int64, len(val)) + for i, d := range val { + var err error + out[i], err = s.fromString(d) + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *int64SliceValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = s.toString(d) + } + return out +} + +func int64SliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []int64{}, nil + } + ss := strings.Split(val, ",") + out := make([]int64, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.ParseInt(d, 0, 64) + if err != nil { + return nil, err + } + + } + return out, nil +} + +// GetInt64Slice return the []int64 value of a flag with the given name +func (f *FlagSet) GetInt64Slice(name string) ([]int64, error) { + val, err := f.getFlagType(name, "int64Slice", int64SliceConv) + if err != nil { + return []int64{}, err + } + return val.([]int64), nil +} + +// Int64SliceVar defines a int64Slice flag with specified name, default value, and usage string. +// The argument p points to a []int64 variable in which to store the value of the flag. +func (f *FlagSet) Int64SliceVar(p *[]int64, name string, value []int64, usage string) { + f.VarP(newInt64SliceValue(value, p), name, "", usage) +} + +// Int64SliceVarP is like Int64SliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int64SliceVarP(p *[]int64, name, shorthand string, value []int64, usage string) { + f.VarP(newInt64SliceValue(value, p), name, shorthand, usage) +} + +// Int64SliceVar defines a int64[] flag with specified name, default value, and usage string. +// The argument p points to a int64[] variable in which to store the value of the flag. +func Int64SliceVar(p *[]int64, name string, value []int64, usage string) { + CommandLine.VarP(newInt64SliceValue(value, p), name, "", usage) +} + +// Int64SliceVarP is like Int64SliceVar, but accepts a shorthand letter that can be used after a single dash. +func Int64SliceVarP(p *[]int64, name, shorthand string, value []int64, usage string) { + CommandLine.VarP(newInt64SliceValue(value, p), name, shorthand, usage) +} + +// Int64Slice defines a []int64 flag with specified name, default value, and usage string. +// The return value is the address of a []int64 variable that stores the value of the flag. +func (f *FlagSet) Int64Slice(name string, value []int64, usage string) *[]int64 { + p := []int64{} + f.Int64SliceVarP(&p, name, "", value, usage) + return &p +} + +// Int64SliceP is like Int64Slice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int64SliceP(name, shorthand string, value []int64, usage string) *[]int64 { + p := []int64{} + f.Int64SliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// Int64Slice defines a []int64 flag with specified name, default value, and usage string. +// The return value is the address of a []int64 variable that stores the value of the flag. +func Int64Slice(name string, value []int64, usage string) *[]int64 { + return CommandLine.Int64SliceP(name, "", value, usage) +} + +// Int64SliceP is like Int64Slice, but accepts a shorthand letter that can be used after a single dash. +func Int64SliceP(name, shorthand string, value []int64, usage string) *[]int64 { + return CommandLine.Int64SliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int8.go b/vendor/github.com/spf13/pflag/int8.go new file mode 100644 index 000000000..4da92228e --- /dev/null +++ b/vendor/github.com/spf13/pflag/int8.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- int8 Value +type int8Value int8 + +func newInt8Value(val int8, p *int8) *int8Value { + *p = val + return (*int8Value)(p) +} + +func (i *int8Value) Set(s string) error { + v, err := strconv.ParseInt(s, 0, 8) + *i = int8Value(v) + return err +} + +func (i *int8Value) Type() string { + return "int8" +} + +func (i *int8Value) String() string { return strconv.FormatInt(int64(*i), 10) } + +func int8Conv(sval string) (interface{}, error) { + v, err := strconv.ParseInt(sval, 0, 8) + if err != nil { + return 0, err + } + return int8(v), nil +} + +// GetInt8 return the int8 value of a flag with the given name +func (f *FlagSet) GetInt8(name string) (int8, error) { + val, err := f.getFlagType(name, "int8", int8Conv) + if err != nil { + return 0, err + } + return val.(int8), nil +} + +// Int8Var defines an int8 flag with specified name, default value, and usage string. +// The argument p points to an int8 variable in which to store the value of the flag. +func (f *FlagSet) Int8Var(p *int8, name string, value int8, usage string) { + f.VarP(newInt8Value(value, p), name, "", usage) +} + +// Int8VarP is like Int8Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int8VarP(p *int8, name, shorthand string, value int8, usage string) { + f.VarP(newInt8Value(value, p), name, shorthand, usage) +} + +// Int8Var defines an int8 flag with specified name, default value, and usage string. +// The argument p points to an int8 variable in which to store the value of the flag. +func Int8Var(p *int8, name string, value int8, usage string) { + CommandLine.VarP(newInt8Value(value, p), name, "", usage) +} + +// Int8VarP is like Int8Var, but accepts a shorthand letter that can be used after a single dash. +func Int8VarP(p *int8, name, shorthand string, value int8, usage string) { + CommandLine.VarP(newInt8Value(value, p), name, shorthand, usage) +} + +// Int8 defines an int8 flag with specified name, default value, and usage string. +// The return value is the address of an int8 variable that stores the value of the flag. +func (f *FlagSet) Int8(name string, value int8, usage string) *int8 { + p := new(int8) + f.Int8VarP(p, name, "", value, usage) + return p +} + +// Int8P is like Int8, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Int8P(name, shorthand string, value int8, usage string) *int8 { + p := new(int8) + f.Int8VarP(p, name, shorthand, value, usage) + return p +} + +// Int8 defines an int8 flag with specified name, default value, and usage string. +// The return value is the address of an int8 variable that stores the value of the flag. +func Int8(name string, value int8, usage string) *int8 { + return CommandLine.Int8P(name, "", value, usage) +} + +// Int8P is like Int8, but accepts a shorthand letter that can be used after a single dash. +func Int8P(name, shorthand string, value int8, usage string) *int8 { + return CommandLine.Int8P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/int_slice.go b/vendor/github.com/spf13/pflag/int_slice.go new file mode 100644 index 000000000..e71c39d91 --- /dev/null +++ b/vendor/github.com/spf13/pflag/int_slice.go @@ -0,0 +1,158 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- intSlice Value +type intSliceValue struct { + value *[]int + changed bool +} + +func newIntSliceValue(val []int, p *[]int) *intSliceValue { + isv := new(intSliceValue) + isv.value = p + *isv.value = val + return isv +} + +func (s *intSliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]int, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.Atoi(d) + if err != nil { + return err + } + + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *intSliceValue) Type() string { + return "intSlice" +} + +func (s *intSliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%d", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func (s *intSliceValue) Append(val string) error { + i, err := strconv.Atoi(val) + if err != nil { + return err + } + *s.value = append(*s.value, i) + return nil +} + +func (s *intSliceValue) Replace(val []string) error { + out := make([]int, len(val)) + for i, d := range val { + var err error + out[i], err = strconv.Atoi(d) + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *intSliceValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = strconv.Itoa(d) + } + return out +} + +func intSliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []int{}, nil + } + ss := strings.Split(val, ",") + out := make([]int, len(ss)) + for i, d := range ss { + var err error + out[i], err = strconv.Atoi(d) + if err != nil { + return nil, err + } + + } + return out, nil +} + +// GetIntSlice return the []int value of a flag with the given name +func (f *FlagSet) GetIntSlice(name string) ([]int, error) { + val, err := f.getFlagType(name, "intSlice", intSliceConv) + if err != nil { + return []int{}, err + } + return val.([]int), nil +} + +// IntSliceVar defines a intSlice flag with specified name, default value, and usage string. +// The argument p points to a []int variable in which to store the value of the flag. +func (f *FlagSet) IntSliceVar(p *[]int, name string, value []int, usage string) { + f.VarP(newIntSliceValue(value, p), name, "", usage) +} + +// IntSliceVarP is like IntSliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IntSliceVarP(p *[]int, name, shorthand string, value []int, usage string) { + f.VarP(newIntSliceValue(value, p), name, shorthand, usage) +} + +// IntSliceVar defines a int[] flag with specified name, default value, and usage string. +// The argument p points to a int[] variable in which to store the value of the flag. +func IntSliceVar(p *[]int, name string, value []int, usage string) { + CommandLine.VarP(newIntSliceValue(value, p), name, "", usage) +} + +// IntSliceVarP is like IntSliceVar, but accepts a shorthand letter that can be used after a single dash. +func IntSliceVarP(p *[]int, name, shorthand string, value []int, usage string) { + CommandLine.VarP(newIntSliceValue(value, p), name, shorthand, usage) +} + +// IntSlice defines a []int flag with specified name, default value, and usage string. +// The return value is the address of a []int variable that stores the value of the flag. +func (f *FlagSet) IntSlice(name string, value []int, usage string) *[]int { + p := []int{} + f.IntSliceVarP(&p, name, "", value, usage) + return &p +} + +// IntSliceP is like IntSlice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IntSliceP(name, shorthand string, value []int, usage string) *[]int { + p := []int{} + f.IntSliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// IntSlice defines a []int flag with specified name, default value, and usage string. +// The return value is the address of a []int variable that stores the value of the flag. +func IntSlice(name string, value []int, usage string) *[]int { + return CommandLine.IntSliceP(name, "", value, usage) +} + +// IntSliceP is like IntSlice, but accepts a shorthand letter that can be used after a single dash. +func IntSliceP(name, shorthand string, value []int, usage string) *[]int { + return CommandLine.IntSliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/ip.go b/vendor/github.com/spf13/pflag/ip.go new file mode 100644 index 000000000..3d414ba69 --- /dev/null +++ b/vendor/github.com/spf13/pflag/ip.go @@ -0,0 +1,94 @@ +package pflag + +import ( + "fmt" + "net" + "strings" +) + +// -- net.IP value +type ipValue net.IP + +func newIPValue(val net.IP, p *net.IP) *ipValue { + *p = val + return (*ipValue)(p) +} + +func (i *ipValue) String() string { return net.IP(*i).String() } +func (i *ipValue) Set(s string) error { + ip := net.ParseIP(strings.TrimSpace(s)) + if ip == nil { + return fmt.Errorf("failed to parse IP: %q", s) + } + *i = ipValue(ip) + return nil +} + +func (i *ipValue) Type() string { + return "ip" +} + +func ipConv(sval string) (interface{}, error) { + ip := net.ParseIP(sval) + if ip != nil { + return ip, nil + } + return nil, fmt.Errorf("invalid string being converted to IP address: %s", sval) +} + +// GetIP return the net.IP value of a flag with the given name +func (f *FlagSet) GetIP(name string) (net.IP, error) { + val, err := f.getFlagType(name, "ip", ipConv) + if err != nil { + return nil, err + } + return val.(net.IP), nil +} + +// IPVar defines an net.IP flag with specified name, default value, and usage string. +// The argument p points to an net.IP variable in which to store the value of the flag. +func (f *FlagSet) IPVar(p *net.IP, name string, value net.IP, usage string) { + f.VarP(newIPValue(value, p), name, "", usage) +} + +// IPVarP is like IPVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IPVarP(p *net.IP, name, shorthand string, value net.IP, usage string) { + f.VarP(newIPValue(value, p), name, shorthand, usage) +} + +// IPVar defines an net.IP flag with specified name, default value, and usage string. +// The argument p points to an net.IP variable in which to store the value of the flag. +func IPVar(p *net.IP, name string, value net.IP, usage string) { + CommandLine.VarP(newIPValue(value, p), name, "", usage) +} + +// IPVarP is like IPVar, but accepts a shorthand letter that can be used after a single dash. +func IPVarP(p *net.IP, name, shorthand string, value net.IP, usage string) { + CommandLine.VarP(newIPValue(value, p), name, shorthand, usage) +} + +// IP defines an net.IP flag with specified name, default value, and usage string. +// The return value is the address of an net.IP variable that stores the value of the flag. +func (f *FlagSet) IP(name string, value net.IP, usage string) *net.IP { + p := new(net.IP) + f.IPVarP(p, name, "", value, usage) + return p +} + +// IPP is like IP, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IPP(name, shorthand string, value net.IP, usage string) *net.IP { + p := new(net.IP) + f.IPVarP(p, name, shorthand, value, usage) + return p +} + +// IP defines an net.IP flag with specified name, default value, and usage string. +// The return value is the address of an net.IP variable that stores the value of the flag. +func IP(name string, value net.IP, usage string) *net.IP { + return CommandLine.IPP(name, "", value, usage) +} + +// IPP is like IP, but accepts a shorthand letter that can be used after a single dash. +func IPP(name, shorthand string, value net.IP, usage string) *net.IP { + return CommandLine.IPP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/ip_slice.go b/vendor/github.com/spf13/pflag/ip_slice.go new file mode 100644 index 000000000..775faae4f --- /dev/null +++ b/vendor/github.com/spf13/pflag/ip_slice.go @@ -0,0 +1,186 @@ +package pflag + +import ( + "fmt" + "io" + "net" + "strings" +) + +// -- ipSlice Value +type ipSliceValue struct { + value *[]net.IP + changed bool +} + +func newIPSliceValue(val []net.IP, p *[]net.IP) *ipSliceValue { + ipsv := new(ipSliceValue) + ipsv.value = p + *ipsv.value = val + return ipsv +} + +// Set converts, and assigns, the comma-separated IP argument string representation as the []net.IP value of this flag. +// If Set is called on a flag that already has a []net.IP assigned, the newly converted values will be appended. +func (s *ipSliceValue) Set(val string) error { + + // remove all quote characters + rmQuote := strings.NewReplacer(`"`, "", `'`, "", "`", "") + + // read flag arguments with CSV parser + ipStrSlice, err := readAsCSV(rmQuote.Replace(val)) + if err != nil && err != io.EOF { + return err + } + + // parse ip values into slice + out := make([]net.IP, 0, len(ipStrSlice)) + for _, ipStr := range ipStrSlice { + ip := net.ParseIP(strings.TrimSpace(ipStr)) + if ip == nil { + return fmt.Errorf("invalid string being converted to IP address: %s", ipStr) + } + out = append(out, ip) + } + + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + + s.changed = true + + return nil +} + +// Type returns a string that uniquely represents this flag's type. +func (s *ipSliceValue) Type() string { + return "ipSlice" +} + +// String defines a "native" format for this net.IP slice flag value. +func (s *ipSliceValue) String() string { + + ipStrSlice := make([]string, len(*s.value)) + for i, ip := range *s.value { + ipStrSlice[i] = ip.String() + } + + out, _ := writeAsCSV(ipStrSlice) + + return "[" + out + "]" +} + +func (s *ipSliceValue) fromString(val string) (net.IP, error) { + return net.ParseIP(strings.TrimSpace(val)), nil +} + +func (s *ipSliceValue) toString(val net.IP) string { + return val.String() +} + +func (s *ipSliceValue) Append(val string) error { + i, err := s.fromString(val) + if err != nil { + return err + } + *s.value = append(*s.value, i) + return nil +} + +func (s *ipSliceValue) Replace(val []string) error { + out := make([]net.IP, len(val)) + for i, d := range val { + var err error + out[i], err = s.fromString(d) + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *ipSliceValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = s.toString(d) + } + return out +} + +func ipSliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []net.IP{}, nil + } + ss := strings.Split(val, ",") + out := make([]net.IP, len(ss)) + for i, sval := range ss { + ip := net.ParseIP(strings.TrimSpace(sval)) + if ip == nil { + return nil, fmt.Errorf("invalid string being converted to IP address: %s", sval) + } + out[i] = ip + } + return out, nil +} + +// GetIPSlice returns the []net.IP value of a flag with the given name +func (f *FlagSet) GetIPSlice(name string) ([]net.IP, error) { + val, err := f.getFlagType(name, "ipSlice", ipSliceConv) + if err != nil { + return []net.IP{}, err + } + return val.([]net.IP), nil +} + +// IPSliceVar defines a ipSlice flag with specified name, default value, and usage string. +// The argument p points to a []net.IP variable in which to store the value of the flag. +func (f *FlagSet) IPSliceVar(p *[]net.IP, name string, value []net.IP, usage string) { + f.VarP(newIPSliceValue(value, p), name, "", usage) +} + +// IPSliceVarP is like IPSliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IPSliceVarP(p *[]net.IP, name, shorthand string, value []net.IP, usage string) { + f.VarP(newIPSliceValue(value, p), name, shorthand, usage) +} + +// IPSliceVar defines a []net.IP flag with specified name, default value, and usage string. +// The argument p points to a []net.IP variable in which to store the value of the flag. +func IPSliceVar(p *[]net.IP, name string, value []net.IP, usage string) { + CommandLine.VarP(newIPSliceValue(value, p), name, "", usage) +} + +// IPSliceVarP is like IPSliceVar, but accepts a shorthand letter that can be used after a single dash. +func IPSliceVarP(p *[]net.IP, name, shorthand string, value []net.IP, usage string) { + CommandLine.VarP(newIPSliceValue(value, p), name, shorthand, usage) +} + +// IPSlice defines a []net.IP flag with specified name, default value, and usage string. +// The return value is the address of a []net.IP variable that stores the value of that flag. +func (f *FlagSet) IPSlice(name string, value []net.IP, usage string) *[]net.IP { + p := []net.IP{} + f.IPSliceVarP(&p, name, "", value, usage) + return &p +} + +// IPSliceP is like IPSlice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IPSliceP(name, shorthand string, value []net.IP, usage string) *[]net.IP { + p := []net.IP{} + f.IPSliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// IPSlice defines a []net.IP flag with specified name, default value, and usage string. +// The return value is the address of a []net.IP variable that stores the value of the flag. +func IPSlice(name string, value []net.IP, usage string) *[]net.IP { + return CommandLine.IPSliceP(name, "", value, usage) +} + +// IPSliceP is like IPSlice, but accepts a shorthand letter that can be used after a single dash. +func IPSliceP(name, shorthand string, value []net.IP, usage string) *[]net.IP { + return CommandLine.IPSliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/ipmask.go b/vendor/github.com/spf13/pflag/ipmask.go new file mode 100644 index 000000000..5bd44bd21 --- /dev/null +++ b/vendor/github.com/spf13/pflag/ipmask.go @@ -0,0 +1,122 @@ +package pflag + +import ( + "fmt" + "net" + "strconv" +) + +// -- net.IPMask value +type ipMaskValue net.IPMask + +func newIPMaskValue(val net.IPMask, p *net.IPMask) *ipMaskValue { + *p = val + return (*ipMaskValue)(p) +} + +func (i *ipMaskValue) String() string { return net.IPMask(*i).String() } +func (i *ipMaskValue) Set(s string) error { + ip := ParseIPv4Mask(s) + if ip == nil { + return fmt.Errorf("failed to parse IP mask: %q", s) + } + *i = ipMaskValue(ip) + return nil +} + +func (i *ipMaskValue) Type() string { + return "ipMask" +} + +// ParseIPv4Mask written in IP form (e.g. 255.255.255.0). +// This function should really belong to the net package. +func ParseIPv4Mask(s string) net.IPMask { + mask := net.ParseIP(s) + if mask == nil { + if len(s) != 8 { + return nil + } + // net.IPMask.String() actually outputs things like ffffff00 + // so write a horrible parser for that as well :-( + m := []int{} + for i := 0; i < 4; i++ { + b := "0x" + s[2*i:2*i+2] + d, err := strconv.ParseInt(b, 0, 0) + if err != nil { + return nil + } + m = append(m, int(d)) + } + s := fmt.Sprintf("%d.%d.%d.%d", m[0], m[1], m[2], m[3]) + mask = net.ParseIP(s) + if mask == nil { + return nil + } + } + return net.IPv4Mask(mask[12], mask[13], mask[14], mask[15]) +} + +func parseIPv4Mask(sval string) (interface{}, error) { + mask := ParseIPv4Mask(sval) + if mask == nil { + return nil, fmt.Errorf("unable to parse %s as net.IPMask", sval) + } + return mask, nil +} + +// GetIPv4Mask return the net.IPv4Mask value of a flag with the given name +func (f *FlagSet) GetIPv4Mask(name string) (net.IPMask, error) { + val, err := f.getFlagType(name, "ipMask", parseIPv4Mask) + if err != nil { + return nil, err + } + return val.(net.IPMask), nil +} + +// IPMaskVar defines an net.IPMask flag with specified name, default value, and usage string. +// The argument p points to an net.IPMask variable in which to store the value of the flag. +func (f *FlagSet) IPMaskVar(p *net.IPMask, name string, value net.IPMask, usage string) { + f.VarP(newIPMaskValue(value, p), name, "", usage) +} + +// IPMaskVarP is like IPMaskVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IPMaskVarP(p *net.IPMask, name, shorthand string, value net.IPMask, usage string) { + f.VarP(newIPMaskValue(value, p), name, shorthand, usage) +} + +// IPMaskVar defines an net.IPMask flag with specified name, default value, and usage string. +// The argument p points to an net.IPMask variable in which to store the value of the flag. +func IPMaskVar(p *net.IPMask, name string, value net.IPMask, usage string) { + CommandLine.VarP(newIPMaskValue(value, p), name, "", usage) +} + +// IPMaskVarP is like IPMaskVar, but accepts a shorthand letter that can be used after a single dash. +func IPMaskVarP(p *net.IPMask, name, shorthand string, value net.IPMask, usage string) { + CommandLine.VarP(newIPMaskValue(value, p), name, shorthand, usage) +} + +// IPMask defines an net.IPMask flag with specified name, default value, and usage string. +// The return value is the address of an net.IPMask variable that stores the value of the flag. +func (f *FlagSet) IPMask(name string, value net.IPMask, usage string) *net.IPMask { + p := new(net.IPMask) + f.IPMaskVarP(p, name, "", value, usage) + return p +} + +// IPMaskP is like IPMask, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IPMaskP(name, shorthand string, value net.IPMask, usage string) *net.IPMask { + p := new(net.IPMask) + f.IPMaskVarP(p, name, shorthand, value, usage) + return p +} + +// IPMask defines an net.IPMask flag with specified name, default value, and usage string. +// The return value is the address of an net.IPMask variable that stores the value of the flag. +func IPMask(name string, value net.IPMask, usage string) *net.IPMask { + return CommandLine.IPMaskP(name, "", value, usage) +} + +// IPMaskP is like IP, but accepts a shorthand letter that can be used after a single dash. +func IPMaskP(name, shorthand string, value net.IPMask, usage string) *net.IPMask { + return CommandLine.IPMaskP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/ipnet.go b/vendor/github.com/spf13/pflag/ipnet.go new file mode 100644 index 000000000..e2c1b8bcd --- /dev/null +++ b/vendor/github.com/spf13/pflag/ipnet.go @@ -0,0 +1,98 @@ +package pflag + +import ( + "fmt" + "net" + "strings" +) + +// IPNet adapts net.IPNet for use as a flag. +type ipNetValue net.IPNet + +func (ipnet ipNetValue) String() string { + n := net.IPNet(ipnet) + return n.String() +} + +func (ipnet *ipNetValue) Set(value string) error { + _, n, err := net.ParseCIDR(strings.TrimSpace(value)) + if err != nil { + return err + } + *ipnet = ipNetValue(*n) + return nil +} + +func (*ipNetValue) Type() string { + return "ipNet" +} + +func newIPNetValue(val net.IPNet, p *net.IPNet) *ipNetValue { + *p = val + return (*ipNetValue)(p) +} + +func ipNetConv(sval string) (interface{}, error) { + _, n, err := net.ParseCIDR(strings.TrimSpace(sval)) + if err == nil { + return *n, nil + } + return nil, fmt.Errorf("invalid string being converted to IPNet: %s", sval) +} + +// GetIPNet return the net.IPNet value of a flag with the given name +func (f *FlagSet) GetIPNet(name string) (net.IPNet, error) { + val, err := f.getFlagType(name, "ipNet", ipNetConv) + if err != nil { + return net.IPNet{}, err + } + return val.(net.IPNet), nil +} + +// IPNetVar defines an net.IPNet flag with specified name, default value, and usage string. +// The argument p points to an net.IPNet variable in which to store the value of the flag. +func (f *FlagSet) IPNetVar(p *net.IPNet, name string, value net.IPNet, usage string) { + f.VarP(newIPNetValue(value, p), name, "", usage) +} + +// IPNetVarP is like IPNetVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IPNetVarP(p *net.IPNet, name, shorthand string, value net.IPNet, usage string) { + f.VarP(newIPNetValue(value, p), name, shorthand, usage) +} + +// IPNetVar defines an net.IPNet flag with specified name, default value, and usage string. +// The argument p points to an net.IPNet variable in which to store the value of the flag. +func IPNetVar(p *net.IPNet, name string, value net.IPNet, usage string) { + CommandLine.VarP(newIPNetValue(value, p), name, "", usage) +} + +// IPNetVarP is like IPNetVar, but accepts a shorthand letter that can be used after a single dash. +func IPNetVarP(p *net.IPNet, name, shorthand string, value net.IPNet, usage string) { + CommandLine.VarP(newIPNetValue(value, p), name, shorthand, usage) +} + +// IPNet defines an net.IPNet flag with specified name, default value, and usage string. +// The return value is the address of an net.IPNet variable that stores the value of the flag. +func (f *FlagSet) IPNet(name string, value net.IPNet, usage string) *net.IPNet { + p := new(net.IPNet) + f.IPNetVarP(p, name, "", value, usage) + return p +} + +// IPNetP is like IPNet, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) IPNetP(name, shorthand string, value net.IPNet, usage string) *net.IPNet { + p := new(net.IPNet) + f.IPNetVarP(p, name, shorthand, value, usage) + return p +} + +// IPNet defines an net.IPNet flag with specified name, default value, and usage string. +// The return value is the address of an net.IPNet variable that stores the value of the flag. +func IPNet(name string, value net.IPNet, usage string) *net.IPNet { + return CommandLine.IPNetP(name, "", value, usage) +} + +// IPNetP is like IPNet, but accepts a shorthand letter that can be used after a single dash. +func IPNetP(name, shorthand string, value net.IPNet, usage string) *net.IPNet { + return CommandLine.IPNetP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/string.go b/vendor/github.com/spf13/pflag/string.go new file mode 100644 index 000000000..04e0a26ff --- /dev/null +++ b/vendor/github.com/spf13/pflag/string.go @@ -0,0 +1,80 @@ +package pflag + +// -- string Value +type stringValue string + +func newStringValue(val string, p *string) *stringValue { + *p = val + return (*stringValue)(p) +} + +func (s *stringValue) Set(val string) error { + *s = stringValue(val) + return nil +} +func (s *stringValue) Type() string { + return "string" +} + +func (s *stringValue) String() string { return string(*s) } + +func stringConv(sval string) (interface{}, error) { + return sval, nil +} + +// GetString return the string value of a flag with the given name +func (f *FlagSet) GetString(name string) (string, error) { + val, err := f.getFlagType(name, "string", stringConv) + if err != nil { + return "", err + } + return val.(string), nil +} + +// StringVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a string variable in which to store the value of the flag. +func (f *FlagSet) StringVar(p *string, name string, value string, usage string) { + f.VarP(newStringValue(value, p), name, "", usage) +} + +// StringVarP is like StringVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringVarP(p *string, name, shorthand string, value string, usage string) { + f.VarP(newStringValue(value, p), name, shorthand, usage) +} + +// StringVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a string variable in which to store the value of the flag. +func StringVar(p *string, name string, value string, usage string) { + CommandLine.VarP(newStringValue(value, p), name, "", usage) +} + +// StringVarP is like StringVar, but accepts a shorthand letter that can be used after a single dash. +func StringVarP(p *string, name, shorthand string, value string, usage string) { + CommandLine.VarP(newStringValue(value, p), name, shorthand, usage) +} + +// String defines a string flag with specified name, default value, and usage string. +// The return value is the address of a string variable that stores the value of the flag. +func (f *FlagSet) String(name string, value string, usage string) *string { + p := new(string) + f.StringVarP(p, name, "", value, usage) + return p +} + +// StringP is like String, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringP(name, shorthand string, value string, usage string) *string { + p := new(string) + f.StringVarP(p, name, shorthand, value, usage) + return p +} + +// String defines a string flag with specified name, default value, and usage string. +// The return value is the address of a string variable that stores the value of the flag. +func String(name string, value string, usage string) *string { + return CommandLine.StringP(name, "", value, usage) +} + +// StringP is like String, but accepts a shorthand letter that can be used after a single dash. +func StringP(name, shorthand string, value string, usage string) *string { + return CommandLine.StringP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/string_array.go b/vendor/github.com/spf13/pflag/string_array.go new file mode 100644 index 000000000..4894af818 --- /dev/null +++ b/vendor/github.com/spf13/pflag/string_array.go @@ -0,0 +1,129 @@ +package pflag + +// -- stringArray Value +type stringArrayValue struct { + value *[]string + changed bool +} + +func newStringArrayValue(val []string, p *[]string) *stringArrayValue { + ssv := new(stringArrayValue) + ssv.value = p + *ssv.value = val + return ssv +} + +func (s *stringArrayValue) Set(val string) error { + if !s.changed { + *s.value = []string{val} + s.changed = true + } else { + *s.value = append(*s.value, val) + } + return nil +} + +func (s *stringArrayValue) Append(val string) error { + *s.value = append(*s.value, val) + return nil +} + +func (s *stringArrayValue) Replace(val []string) error { + out := make([]string, len(val)) + for i, d := range val { + var err error + out[i] = d + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *stringArrayValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = d + } + return out +} + +func (s *stringArrayValue) Type() string { + return "stringArray" +} + +func (s *stringArrayValue) String() string { + str, _ := writeAsCSV(*s.value) + return "[" + str + "]" +} + +func stringArrayConv(sval string) (interface{}, error) { + sval = sval[1 : len(sval)-1] + // An empty string would cause a array with one (empty) string + if len(sval) == 0 { + return []string{}, nil + } + return readAsCSV(sval) +} + +// GetStringArray return the []string value of a flag with the given name +func (f *FlagSet) GetStringArray(name string) ([]string, error) { + val, err := f.getFlagType(name, "stringArray", stringArrayConv) + if err != nil { + return []string{}, err + } + return val.([]string), nil +} + +// StringArrayVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a []string variable in which to store the values of the multiple flags. +// The value of each argument will not try to be separated by comma. Use a StringSlice for that. +func (f *FlagSet) StringArrayVar(p *[]string, name string, value []string, usage string) { + f.VarP(newStringArrayValue(value, p), name, "", usage) +} + +// StringArrayVarP is like StringArrayVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringArrayVarP(p *[]string, name, shorthand string, value []string, usage string) { + f.VarP(newStringArrayValue(value, p), name, shorthand, usage) +} + +// StringArrayVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a []string variable in which to store the value of the flag. +// The value of each argument will not try to be separated by comma. Use a StringSlice for that. +func StringArrayVar(p *[]string, name string, value []string, usage string) { + CommandLine.VarP(newStringArrayValue(value, p), name, "", usage) +} + +// StringArrayVarP is like StringArrayVar, but accepts a shorthand letter that can be used after a single dash. +func StringArrayVarP(p *[]string, name, shorthand string, value []string, usage string) { + CommandLine.VarP(newStringArrayValue(value, p), name, shorthand, usage) +} + +// StringArray defines a string flag with specified name, default value, and usage string. +// The return value is the address of a []string variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma. Use a StringSlice for that. +func (f *FlagSet) StringArray(name string, value []string, usage string) *[]string { + p := []string{} + f.StringArrayVarP(&p, name, "", value, usage) + return &p +} + +// StringArrayP is like StringArray, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringArrayP(name, shorthand string, value []string, usage string) *[]string { + p := []string{} + f.StringArrayVarP(&p, name, shorthand, value, usage) + return &p +} + +// StringArray defines a string flag with specified name, default value, and usage string. +// The return value is the address of a []string variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma. Use a StringSlice for that. +func StringArray(name string, value []string, usage string) *[]string { + return CommandLine.StringArrayP(name, "", value, usage) +} + +// StringArrayP is like StringArray, but accepts a shorthand letter that can be used after a single dash. +func StringArrayP(name, shorthand string, value []string, usage string) *[]string { + return CommandLine.StringArrayP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/string_slice.go b/vendor/github.com/spf13/pflag/string_slice.go new file mode 100644 index 000000000..3cb2e69db --- /dev/null +++ b/vendor/github.com/spf13/pflag/string_slice.go @@ -0,0 +1,163 @@ +package pflag + +import ( + "bytes" + "encoding/csv" + "strings" +) + +// -- stringSlice Value +type stringSliceValue struct { + value *[]string + changed bool +} + +func newStringSliceValue(val []string, p *[]string) *stringSliceValue { + ssv := new(stringSliceValue) + ssv.value = p + *ssv.value = val + return ssv +} + +func readAsCSV(val string) ([]string, error) { + if val == "" { + return []string{}, nil + } + stringReader := strings.NewReader(val) + csvReader := csv.NewReader(stringReader) + return csvReader.Read() +} + +func writeAsCSV(vals []string) (string, error) { + b := &bytes.Buffer{} + w := csv.NewWriter(b) + err := w.Write(vals) + if err != nil { + return "", err + } + w.Flush() + return strings.TrimSuffix(b.String(), "\n"), nil +} + +func (s *stringSliceValue) Set(val string) error { + v, err := readAsCSV(val) + if err != nil { + return err + } + if !s.changed { + *s.value = v + } else { + *s.value = append(*s.value, v...) + } + s.changed = true + return nil +} + +func (s *stringSliceValue) Type() string { + return "stringSlice" +} + +func (s *stringSliceValue) String() string { + str, _ := writeAsCSV(*s.value) + return "[" + str + "]" +} + +func (s *stringSliceValue) Append(val string) error { + *s.value = append(*s.value, val) + return nil +} + +func (s *stringSliceValue) Replace(val []string) error { + *s.value = val + return nil +} + +func (s *stringSliceValue) GetSlice() []string { + return *s.value +} + +func stringSliceConv(sval string) (interface{}, error) { + sval = sval[1 : len(sval)-1] + // An empty string would cause a slice with one (empty) string + if len(sval) == 0 { + return []string{}, nil + } + return readAsCSV(sval) +} + +// GetStringSlice return the []string value of a flag with the given name +func (f *FlagSet) GetStringSlice(name string) ([]string, error) { + val, err := f.getFlagType(name, "stringSlice", stringSliceConv) + if err != nil { + return []string{}, err + } + return val.([]string), nil +} + +// StringSliceVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a []string variable in which to store the value of the flag. +// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly. +// For example: +// --ss="v1,v2" --ss="v3" +// will result in +// []string{"v1", "v2", "v3"} +func (f *FlagSet) StringSliceVar(p *[]string, name string, value []string, usage string) { + f.VarP(newStringSliceValue(value, p), name, "", usage) +} + +// StringSliceVarP is like StringSliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringSliceVarP(p *[]string, name, shorthand string, value []string, usage string) { + f.VarP(newStringSliceValue(value, p), name, shorthand, usage) +} + +// StringSliceVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a []string variable in which to store the value of the flag. +// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly. +// For example: +// --ss="v1,v2" --ss="v3" +// will result in +// []string{"v1", "v2", "v3"} +func StringSliceVar(p *[]string, name string, value []string, usage string) { + CommandLine.VarP(newStringSliceValue(value, p), name, "", usage) +} + +// StringSliceVarP is like StringSliceVar, but accepts a shorthand letter that can be used after a single dash. +func StringSliceVarP(p *[]string, name, shorthand string, value []string, usage string) { + CommandLine.VarP(newStringSliceValue(value, p), name, shorthand, usage) +} + +// StringSlice defines a string flag with specified name, default value, and usage string. +// The return value is the address of a []string variable that stores the value of the flag. +// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly. +// For example: +// --ss="v1,v2" --ss="v3" +// will result in +// []string{"v1", "v2", "v3"} +func (f *FlagSet) StringSlice(name string, value []string, usage string) *[]string { + p := []string{} + f.StringSliceVarP(&p, name, "", value, usage) + return &p +} + +// StringSliceP is like StringSlice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringSliceP(name, shorthand string, value []string, usage string) *[]string { + p := []string{} + f.StringSliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// StringSlice defines a string flag with specified name, default value, and usage string. +// The return value is the address of a []string variable that stores the value of the flag. +// Compared to StringArray flags, StringSlice flags take comma-separated value as arguments and split them accordingly. +// For example: +// --ss="v1,v2" --ss="v3" +// will result in +// []string{"v1", "v2", "v3"} +func StringSlice(name string, value []string, usage string) *[]string { + return CommandLine.StringSliceP(name, "", value, usage) +} + +// StringSliceP is like StringSlice, but accepts a shorthand letter that can be used after a single dash. +func StringSliceP(name, shorthand string, value []string, usage string) *[]string { + return CommandLine.StringSliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/string_to_int.go b/vendor/github.com/spf13/pflag/string_to_int.go new file mode 100644 index 000000000..5ceda3965 --- /dev/null +++ b/vendor/github.com/spf13/pflag/string_to_int.go @@ -0,0 +1,149 @@ +package pflag + +import ( + "bytes" + "fmt" + "strconv" + "strings" +) + +// -- stringToInt Value +type stringToIntValue struct { + value *map[string]int + changed bool +} + +func newStringToIntValue(val map[string]int, p *map[string]int) *stringToIntValue { + ssv := new(stringToIntValue) + ssv.value = p + *ssv.value = val + return ssv +} + +// Format: a=1,b=2 +func (s *stringToIntValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make(map[string]int, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return fmt.Errorf("%s must be formatted as key=value", pair) + } + var err error + out[kv[0]], err = strconv.Atoi(kv[1]) + if err != nil { + return err + } + } + if !s.changed { + *s.value = out + } else { + for k, v := range out { + (*s.value)[k] = v + } + } + s.changed = true + return nil +} + +func (s *stringToIntValue) Type() string { + return "stringToInt" +} + +func (s *stringToIntValue) String() string { + var buf bytes.Buffer + i := 0 + for k, v := range *s.value { + if i > 0 { + buf.WriteRune(',') + } + buf.WriteString(k) + buf.WriteRune('=') + buf.WriteString(strconv.Itoa(v)) + i++ + } + return "[" + buf.String() + "]" +} + +func stringToIntConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // An empty string would cause an empty map + if len(val) == 0 { + return map[string]int{}, nil + } + ss := strings.Split(val, ",") + out := make(map[string]int, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return nil, fmt.Errorf("%s must be formatted as key=value", pair) + } + var err error + out[kv[0]], err = strconv.Atoi(kv[1]) + if err != nil { + return nil, err + } + } + return out, nil +} + +// GetStringToInt return the map[string]int value of a flag with the given name +func (f *FlagSet) GetStringToInt(name string) (map[string]int, error) { + val, err := f.getFlagType(name, "stringToInt", stringToIntConv) + if err != nil { + return map[string]int{}, err + } + return val.(map[string]int), nil +} + +// StringToIntVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a map[string]int variable in which to store the values of the multiple flags. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToIntVar(p *map[string]int, name string, value map[string]int, usage string) { + f.VarP(newStringToIntValue(value, p), name, "", usage) +} + +// StringToIntVarP is like StringToIntVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToIntVarP(p *map[string]int, name, shorthand string, value map[string]int, usage string) { + f.VarP(newStringToIntValue(value, p), name, shorthand, usage) +} + +// StringToIntVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a map[string]int variable in which to store the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToIntVar(p *map[string]int, name string, value map[string]int, usage string) { + CommandLine.VarP(newStringToIntValue(value, p), name, "", usage) +} + +// StringToIntVarP is like StringToIntVar, but accepts a shorthand letter that can be used after a single dash. +func StringToIntVarP(p *map[string]int, name, shorthand string, value map[string]int, usage string) { + CommandLine.VarP(newStringToIntValue(value, p), name, shorthand, usage) +} + +// StringToInt defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]int variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToInt(name string, value map[string]int, usage string) *map[string]int { + p := map[string]int{} + f.StringToIntVarP(&p, name, "", value, usage) + return &p +} + +// StringToIntP is like StringToInt, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToIntP(name, shorthand string, value map[string]int, usage string) *map[string]int { + p := map[string]int{} + f.StringToIntVarP(&p, name, shorthand, value, usage) + return &p +} + +// StringToInt defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]int variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToInt(name string, value map[string]int, usage string) *map[string]int { + return CommandLine.StringToIntP(name, "", value, usage) +} + +// StringToIntP is like StringToInt, but accepts a shorthand letter that can be used after a single dash. +func StringToIntP(name, shorthand string, value map[string]int, usage string) *map[string]int { + return CommandLine.StringToIntP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/string_to_int64.go b/vendor/github.com/spf13/pflag/string_to_int64.go new file mode 100644 index 000000000..a807a04a0 --- /dev/null +++ b/vendor/github.com/spf13/pflag/string_to_int64.go @@ -0,0 +1,149 @@ +package pflag + +import ( + "bytes" + "fmt" + "strconv" + "strings" +) + +// -- stringToInt64 Value +type stringToInt64Value struct { + value *map[string]int64 + changed bool +} + +func newStringToInt64Value(val map[string]int64, p *map[string]int64) *stringToInt64Value { + ssv := new(stringToInt64Value) + ssv.value = p + *ssv.value = val + return ssv +} + +// Format: a=1,b=2 +func (s *stringToInt64Value) Set(val string) error { + ss := strings.Split(val, ",") + out := make(map[string]int64, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return fmt.Errorf("%s must be formatted as key=value", pair) + } + var err error + out[kv[0]], err = strconv.ParseInt(kv[1], 10, 64) + if err != nil { + return err + } + } + if !s.changed { + *s.value = out + } else { + for k, v := range out { + (*s.value)[k] = v + } + } + s.changed = true + return nil +} + +func (s *stringToInt64Value) Type() string { + return "stringToInt64" +} + +func (s *stringToInt64Value) String() string { + var buf bytes.Buffer + i := 0 + for k, v := range *s.value { + if i > 0 { + buf.WriteRune(',') + } + buf.WriteString(k) + buf.WriteRune('=') + buf.WriteString(strconv.FormatInt(v, 10)) + i++ + } + return "[" + buf.String() + "]" +} + +func stringToInt64Conv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // An empty string would cause an empty map + if len(val) == 0 { + return map[string]int64{}, nil + } + ss := strings.Split(val, ",") + out := make(map[string]int64, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return nil, fmt.Errorf("%s must be formatted as key=value", pair) + } + var err error + out[kv[0]], err = strconv.ParseInt(kv[1], 10, 64) + if err != nil { + return nil, err + } + } + return out, nil +} + +// GetStringToInt64 return the map[string]int64 value of a flag with the given name +func (f *FlagSet) GetStringToInt64(name string) (map[string]int64, error) { + val, err := f.getFlagType(name, "stringToInt64", stringToInt64Conv) + if err != nil { + return map[string]int64{}, err + } + return val.(map[string]int64), nil +} + +// StringToInt64Var defines a string flag with specified name, default value, and usage string. +// The argument p point64s to a map[string]int64 variable in which to store the values of the multiple flags. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToInt64Var(p *map[string]int64, name string, value map[string]int64, usage string) { + f.VarP(newStringToInt64Value(value, p), name, "", usage) +} + +// StringToInt64VarP is like StringToInt64Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToInt64VarP(p *map[string]int64, name, shorthand string, value map[string]int64, usage string) { + f.VarP(newStringToInt64Value(value, p), name, shorthand, usage) +} + +// StringToInt64Var defines a string flag with specified name, default value, and usage string. +// The argument p point64s to a map[string]int64 variable in which to store the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToInt64Var(p *map[string]int64, name string, value map[string]int64, usage string) { + CommandLine.VarP(newStringToInt64Value(value, p), name, "", usage) +} + +// StringToInt64VarP is like StringToInt64Var, but accepts a shorthand letter that can be used after a single dash. +func StringToInt64VarP(p *map[string]int64, name, shorthand string, value map[string]int64, usage string) { + CommandLine.VarP(newStringToInt64Value(value, p), name, shorthand, usage) +} + +// StringToInt64 defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]int64 variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToInt64(name string, value map[string]int64, usage string) *map[string]int64 { + p := map[string]int64{} + f.StringToInt64VarP(&p, name, "", value, usage) + return &p +} + +// StringToInt64P is like StringToInt64, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToInt64P(name, shorthand string, value map[string]int64, usage string) *map[string]int64 { + p := map[string]int64{} + f.StringToInt64VarP(&p, name, shorthand, value, usage) + return &p +} + +// StringToInt64 defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]int64 variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToInt64(name string, value map[string]int64, usage string) *map[string]int64 { + return CommandLine.StringToInt64P(name, "", value, usage) +} + +// StringToInt64P is like StringToInt64, but accepts a shorthand letter that can be used after a single dash. +func StringToInt64P(name, shorthand string, value map[string]int64, usage string) *map[string]int64 { + return CommandLine.StringToInt64P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/string_to_string.go b/vendor/github.com/spf13/pflag/string_to_string.go new file mode 100644 index 000000000..890a01afc --- /dev/null +++ b/vendor/github.com/spf13/pflag/string_to_string.go @@ -0,0 +1,160 @@ +package pflag + +import ( + "bytes" + "encoding/csv" + "fmt" + "strings" +) + +// -- stringToString Value +type stringToStringValue struct { + value *map[string]string + changed bool +} + +func newStringToStringValue(val map[string]string, p *map[string]string) *stringToStringValue { + ssv := new(stringToStringValue) + ssv.value = p + *ssv.value = val + return ssv +} + +// Format: a=1,b=2 +func (s *stringToStringValue) Set(val string) error { + var ss []string + n := strings.Count(val, "=") + switch n { + case 0: + return fmt.Errorf("%s must be formatted as key=value", val) + case 1: + ss = append(ss, strings.Trim(val, `"`)) + default: + r := csv.NewReader(strings.NewReader(val)) + var err error + ss, err = r.Read() + if err != nil { + return err + } + } + + out := make(map[string]string, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return fmt.Errorf("%s must be formatted as key=value", pair) + } + out[kv[0]] = kv[1] + } + if !s.changed { + *s.value = out + } else { + for k, v := range out { + (*s.value)[k] = v + } + } + s.changed = true + return nil +} + +func (s *stringToStringValue) Type() string { + return "stringToString" +} + +func (s *stringToStringValue) String() string { + records := make([]string, 0, len(*s.value)>>1) + for k, v := range *s.value { + records = append(records, k+"="+v) + } + + var buf bytes.Buffer + w := csv.NewWriter(&buf) + if err := w.Write(records); err != nil { + panic(err) + } + w.Flush() + return "[" + strings.TrimSpace(buf.String()) + "]" +} + +func stringToStringConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // An empty string would cause an empty map + if len(val) == 0 { + return map[string]string{}, nil + } + r := csv.NewReader(strings.NewReader(val)) + ss, err := r.Read() + if err != nil { + return nil, err + } + out := make(map[string]string, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return nil, fmt.Errorf("%s must be formatted as key=value", pair) + } + out[kv[0]] = kv[1] + } + return out, nil +} + +// GetStringToString return the map[string]string value of a flag with the given name +func (f *FlagSet) GetStringToString(name string) (map[string]string, error) { + val, err := f.getFlagType(name, "stringToString", stringToStringConv) + if err != nil { + return map[string]string{}, err + } + return val.(map[string]string), nil +} + +// StringToStringVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a map[string]string variable in which to store the values of the multiple flags. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToStringVar(p *map[string]string, name string, value map[string]string, usage string) { + f.VarP(newStringToStringValue(value, p), name, "", usage) +} + +// StringToStringVarP is like StringToStringVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToStringVarP(p *map[string]string, name, shorthand string, value map[string]string, usage string) { + f.VarP(newStringToStringValue(value, p), name, shorthand, usage) +} + +// StringToStringVar defines a string flag with specified name, default value, and usage string. +// The argument p points to a map[string]string variable in which to store the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToStringVar(p *map[string]string, name string, value map[string]string, usage string) { + CommandLine.VarP(newStringToStringValue(value, p), name, "", usage) +} + +// StringToStringVarP is like StringToStringVar, but accepts a shorthand letter that can be used after a single dash. +func StringToStringVarP(p *map[string]string, name, shorthand string, value map[string]string, usage string) { + CommandLine.VarP(newStringToStringValue(value, p), name, shorthand, usage) +} + +// StringToString defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]string variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func (f *FlagSet) StringToString(name string, value map[string]string, usage string) *map[string]string { + p := map[string]string{} + f.StringToStringVarP(&p, name, "", value, usage) + return &p +} + +// StringToStringP is like StringToString, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) StringToStringP(name, shorthand string, value map[string]string, usage string) *map[string]string { + p := map[string]string{} + f.StringToStringVarP(&p, name, shorthand, value, usage) + return &p +} + +// StringToString defines a string flag with specified name, default value, and usage string. +// The return value is the address of a map[string]string variable that stores the value of the flag. +// The value of each argument will not try to be separated by comma +func StringToString(name string, value map[string]string, usage string) *map[string]string { + return CommandLine.StringToStringP(name, "", value, usage) +} + +// StringToStringP is like StringToString, but accepts a shorthand letter that can be used after a single dash. +func StringToStringP(name, shorthand string, value map[string]string, usage string) *map[string]string { + return CommandLine.StringToStringP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/uint.go b/vendor/github.com/spf13/pflag/uint.go new file mode 100644 index 000000000..dcbc2b758 --- /dev/null +++ b/vendor/github.com/spf13/pflag/uint.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- uint Value +type uintValue uint + +func newUintValue(val uint, p *uint) *uintValue { + *p = val + return (*uintValue)(p) +} + +func (i *uintValue) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 64) + *i = uintValue(v) + return err +} + +func (i *uintValue) Type() string { + return "uint" +} + +func (i *uintValue) String() string { return strconv.FormatUint(uint64(*i), 10) } + +func uintConv(sval string) (interface{}, error) { + v, err := strconv.ParseUint(sval, 0, 0) + if err != nil { + return 0, err + } + return uint(v), nil +} + +// GetUint return the uint value of a flag with the given name +func (f *FlagSet) GetUint(name string) (uint, error) { + val, err := f.getFlagType(name, "uint", uintConv) + if err != nil { + return 0, err + } + return val.(uint), nil +} + +// UintVar defines a uint flag with specified name, default value, and usage string. +// The argument p points to a uint variable in which to store the value of the flag. +func (f *FlagSet) UintVar(p *uint, name string, value uint, usage string) { + f.VarP(newUintValue(value, p), name, "", usage) +} + +// UintVarP is like UintVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) UintVarP(p *uint, name, shorthand string, value uint, usage string) { + f.VarP(newUintValue(value, p), name, shorthand, usage) +} + +// UintVar defines a uint flag with specified name, default value, and usage string. +// The argument p points to a uint variable in which to store the value of the flag. +func UintVar(p *uint, name string, value uint, usage string) { + CommandLine.VarP(newUintValue(value, p), name, "", usage) +} + +// UintVarP is like UintVar, but accepts a shorthand letter that can be used after a single dash. +func UintVarP(p *uint, name, shorthand string, value uint, usage string) { + CommandLine.VarP(newUintValue(value, p), name, shorthand, usage) +} + +// Uint defines a uint flag with specified name, default value, and usage string. +// The return value is the address of a uint variable that stores the value of the flag. +func (f *FlagSet) Uint(name string, value uint, usage string) *uint { + p := new(uint) + f.UintVarP(p, name, "", value, usage) + return p +} + +// UintP is like Uint, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) UintP(name, shorthand string, value uint, usage string) *uint { + p := new(uint) + f.UintVarP(p, name, shorthand, value, usage) + return p +} + +// Uint defines a uint flag with specified name, default value, and usage string. +// The return value is the address of a uint variable that stores the value of the flag. +func Uint(name string, value uint, usage string) *uint { + return CommandLine.UintP(name, "", value, usage) +} + +// UintP is like Uint, but accepts a shorthand letter that can be used after a single dash. +func UintP(name, shorthand string, value uint, usage string) *uint { + return CommandLine.UintP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/uint16.go b/vendor/github.com/spf13/pflag/uint16.go new file mode 100644 index 000000000..7e9914edd --- /dev/null +++ b/vendor/github.com/spf13/pflag/uint16.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- uint16 value +type uint16Value uint16 + +func newUint16Value(val uint16, p *uint16) *uint16Value { + *p = val + return (*uint16Value)(p) +} + +func (i *uint16Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 16) + *i = uint16Value(v) + return err +} + +func (i *uint16Value) Type() string { + return "uint16" +} + +func (i *uint16Value) String() string { return strconv.FormatUint(uint64(*i), 10) } + +func uint16Conv(sval string) (interface{}, error) { + v, err := strconv.ParseUint(sval, 0, 16) + if err != nil { + return 0, err + } + return uint16(v), nil +} + +// GetUint16 return the uint16 value of a flag with the given name +func (f *FlagSet) GetUint16(name string) (uint16, error) { + val, err := f.getFlagType(name, "uint16", uint16Conv) + if err != nil { + return 0, err + } + return val.(uint16), nil +} + +// Uint16Var defines a uint flag with specified name, default value, and usage string. +// The argument p points to a uint variable in which to store the value of the flag. +func (f *FlagSet) Uint16Var(p *uint16, name string, value uint16, usage string) { + f.VarP(newUint16Value(value, p), name, "", usage) +} + +// Uint16VarP is like Uint16Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Uint16VarP(p *uint16, name, shorthand string, value uint16, usage string) { + f.VarP(newUint16Value(value, p), name, shorthand, usage) +} + +// Uint16Var defines a uint flag with specified name, default value, and usage string. +// The argument p points to a uint variable in which to store the value of the flag. +func Uint16Var(p *uint16, name string, value uint16, usage string) { + CommandLine.VarP(newUint16Value(value, p), name, "", usage) +} + +// Uint16VarP is like Uint16Var, but accepts a shorthand letter that can be used after a single dash. +func Uint16VarP(p *uint16, name, shorthand string, value uint16, usage string) { + CommandLine.VarP(newUint16Value(value, p), name, shorthand, usage) +} + +// Uint16 defines a uint flag with specified name, default value, and usage string. +// The return value is the address of a uint variable that stores the value of the flag. +func (f *FlagSet) Uint16(name string, value uint16, usage string) *uint16 { + p := new(uint16) + f.Uint16VarP(p, name, "", value, usage) + return p +} + +// Uint16P is like Uint16, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Uint16P(name, shorthand string, value uint16, usage string) *uint16 { + p := new(uint16) + f.Uint16VarP(p, name, shorthand, value, usage) + return p +} + +// Uint16 defines a uint flag with specified name, default value, and usage string. +// The return value is the address of a uint variable that stores the value of the flag. +func Uint16(name string, value uint16, usage string) *uint16 { + return CommandLine.Uint16P(name, "", value, usage) +} + +// Uint16P is like Uint16, but accepts a shorthand letter that can be used after a single dash. +func Uint16P(name, shorthand string, value uint16, usage string) *uint16 { + return CommandLine.Uint16P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/uint32.go b/vendor/github.com/spf13/pflag/uint32.go new file mode 100644 index 000000000..d8024539b --- /dev/null +++ b/vendor/github.com/spf13/pflag/uint32.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- uint32 value +type uint32Value uint32 + +func newUint32Value(val uint32, p *uint32) *uint32Value { + *p = val + return (*uint32Value)(p) +} + +func (i *uint32Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 32) + *i = uint32Value(v) + return err +} + +func (i *uint32Value) Type() string { + return "uint32" +} + +func (i *uint32Value) String() string { return strconv.FormatUint(uint64(*i), 10) } + +func uint32Conv(sval string) (interface{}, error) { + v, err := strconv.ParseUint(sval, 0, 32) + if err != nil { + return 0, err + } + return uint32(v), nil +} + +// GetUint32 return the uint32 value of a flag with the given name +func (f *FlagSet) GetUint32(name string) (uint32, error) { + val, err := f.getFlagType(name, "uint32", uint32Conv) + if err != nil { + return 0, err + } + return val.(uint32), nil +} + +// Uint32Var defines a uint32 flag with specified name, default value, and usage string. +// The argument p points to a uint32 variable in which to store the value of the flag. +func (f *FlagSet) Uint32Var(p *uint32, name string, value uint32, usage string) { + f.VarP(newUint32Value(value, p), name, "", usage) +} + +// Uint32VarP is like Uint32Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Uint32VarP(p *uint32, name, shorthand string, value uint32, usage string) { + f.VarP(newUint32Value(value, p), name, shorthand, usage) +} + +// Uint32Var defines a uint32 flag with specified name, default value, and usage string. +// The argument p points to a uint32 variable in which to store the value of the flag. +func Uint32Var(p *uint32, name string, value uint32, usage string) { + CommandLine.VarP(newUint32Value(value, p), name, "", usage) +} + +// Uint32VarP is like Uint32Var, but accepts a shorthand letter that can be used after a single dash. +func Uint32VarP(p *uint32, name, shorthand string, value uint32, usage string) { + CommandLine.VarP(newUint32Value(value, p), name, shorthand, usage) +} + +// Uint32 defines a uint32 flag with specified name, default value, and usage string. +// The return value is the address of a uint32 variable that stores the value of the flag. +func (f *FlagSet) Uint32(name string, value uint32, usage string) *uint32 { + p := new(uint32) + f.Uint32VarP(p, name, "", value, usage) + return p +} + +// Uint32P is like Uint32, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Uint32P(name, shorthand string, value uint32, usage string) *uint32 { + p := new(uint32) + f.Uint32VarP(p, name, shorthand, value, usage) + return p +} + +// Uint32 defines a uint32 flag with specified name, default value, and usage string. +// The return value is the address of a uint32 variable that stores the value of the flag. +func Uint32(name string, value uint32, usage string) *uint32 { + return CommandLine.Uint32P(name, "", value, usage) +} + +// Uint32P is like Uint32, but accepts a shorthand letter that can be used after a single dash. +func Uint32P(name, shorthand string, value uint32, usage string) *uint32 { + return CommandLine.Uint32P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/uint64.go b/vendor/github.com/spf13/pflag/uint64.go new file mode 100644 index 000000000..f62240f2c --- /dev/null +++ b/vendor/github.com/spf13/pflag/uint64.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- uint64 Value +type uint64Value uint64 + +func newUint64Value(val uint64, p *uint64) *uint64Value { + *p = val + return (*uint64Value)(p) +} + +func (i *uint64Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 64) + *i = uint64Value(v) + return err +} + +func (i *uint64Value) Type() string { + return "uint64" +} + +func (i *uint64Value) String() string { return strconv.FormatUint(uint64(*i), 10) } + +func uint64Conv(sval string) (interface{}, error) { + v, err := strconv.ParseUint(sval, 0, 64) + if err != nil { + return 0, err + } + return uint64(v), nil +} + +// GetUint64 return the uint64 value of a flag with the given name +func (f *FlagSet) GetUint64(name string) (uint64, error) { + val, err := f.getFlagType(name, "uint64", uint64Conv) + if err != nil { + return 0, err + } + return val.(uint64), nil +} + +// Uint64Var defines a uint64 flag with specified name, default value, and usage string. +// The argument p points to a uint64 variable in which to store the value of the flag. +func (f *FlagSet) Uint64Var(p *uint64, name string, value uint64, usage string) { + f.VarP(newUint64Value(value, p), name, "", usage) +} + +// Uint64VarP is like Uint64Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Uint64VarP(p *uint64, name, shorthand string, value uint64, usage string) { + f.VarP(newUint64Value(value, p), name, shorthand, usage) +} + +// Uint64Var defines a uint64 flag with specified name, default value, and usage string. +// The argument p points to a uint64 variable in which to store the value of the flag. +func Uint64Var(p *uint64, name string, value uint64, usage string) { + CommandLine.VarP(newUint64Value(value, p), name, "", usage) +} + +// Uint64VarP is like Uint64Var, but accepts a shorthand letter that can be used after a single dash. +func Uint64VarP(p *uint64, name, shorthand string, value uint64, usage string) { + CommandLine.VarP(newUint64Value(value, p), name, shorthand, usage) +} + +// Uint64 defines a uint64 flag with specified name, default value, and usage string. +// The return value is the address of a uint64 variable that stores the value of the flag. +func (f *FlagSet) Uint64(name string, value uint64, usage string) *uint64 { + p := new(uint64) + f.Uint64VarP(p, name, "", value, usage) + return p +} + +// Uint64P is like Uint64, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Uint64P(name, shorthand string, value uint64, usage string) *uint64 { + p := new(uint64) + f.Uint64VarP(p, name, shorthand, value, usage) + return p +} + +// Uint64 defines a uint64 flag with specified name, default value, and usage string. +// The return value is the address of a uint64 variable that stores the value of the flag. +func Uint64(name string, value uint64, usage string) *uint64 { + return CommandLine.Uint64P(name, "", value, usage) +} + +// Uint64P is like Uint64, but accepts a shorthand letter that can be used after a single dash. +func Uint64P(name, shorthand string, value uint64, usage string) *uint64 { + return CommandLine.Uint64P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/uint8.go b/vendor/github.com/spf13/pflag/uint8.go new file mode 100644 index 000000000..bb0e83c1f --- /dev/null +++ b/vendor/github.com/spf13/pflag/uint8.go @@ -0,0 +1,88 @@ +package pflag + +import "strconv" + +// -- uint8 Value +type uint8Value uint8 + +func newUint8Value(val uint8, p *uint8) *uint8Value { + *p = val + return (*uint8Value)(p) +} + +func (i *uint8Value) Set(s string) error { + v, err := strconv.ParseUint(s, 0, 8) + *i = uint8Value(v) + return err +} + +func (i *uint8Value) Type() string { + return "uint8" +} + +func (i *uint8Value) String() string { return strconv.FormatUint(uint64(*i), 10) } + +func uint8Conv(sval string) (interface{}, error) { + v, err := strconv.ParseUint(sval, 0, 8) + if err != nil { + return 0, err + } + return uint8(v), nil +} + +// GetUint8 return the uint8 value of a flag with the given name +func (f *FlagSet) GetUint8(name string) (uint8, error) { + val, err := f.getFlagType(name, "uint8", uint8Conv) + if err != nil { + return 0, err + } + return val.(uint8), nil +} + +// Uint8Var defines a uint8 flag with specified name, default value, and usage string. +// The argument p points to a uint8 variable in which to store the value of the flag. +func (f *FlagSet) Uint8Var(p *uint8, name string, value uint8, usage string) { + f.VarP(newUint8Value(value, p), name, "", usage) +} + +// Uint8VarP is like Uint8Var, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Uint8VarP(p *uint8, name, shorthand string, value uint8, usage string) { + f.VarP(newUint8Value(value, p), name, shorthand, usage) +} + +// Uint8Var defines a uint8 flag with specified name, default value, and usage string. +// The argument p points to a uint8 variable in which to store the value of the flag. +func Uint8Var(p *uint8, name string, value uint8, usage string) { + CommandLine.VarP(newUint8Value(value, p), name, "", usage) +} + +// Uint8VarP is like Uint8Var, but accepts a shorthand letter that can be used after a single dash. +func Uint8VarP(p *uint8, name, shorthand string, value uint8, usage string) { + CommandLine.VarP(newUint8Value(value, p), name, shorthand, usage) +} + +// Uint8 defines a uint8 flag with specified name, default value, and usage string. +// The return value is the address of a uint8 variable that stores the value of the flag. +func (f *FlagSet) Uint8(name string, value uint8, usage string) *uint8 { + p := new(uint8) + f.Uint8VarP(p, name, "", value, usage) + return p +} + +// Uint8P is like Uint8, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) Uint8P(name, shorthand string, value uint8, usage string) *uint8 { + p := new(uint8) + f.Uint8VarP(p, name, shorthand, value, usage) + return p +} + +// Uint8 defines a uint8 flag with specified name, default value, and usage string. +// The return value is the address of a uint8 variable that stores the value of the flag. +func Uint8(name string, value uint8, usage string) *uint8 { + return CommandLine.Uint8P(name, "", value, usage) +} + +// Uint8P is like Uint8, but accepts a shorthand letter that can be used after a single dash. +func Uint8P(name, shorthand string, value uint8, usage string) *uint8 { + return CommandLine.Uint8P(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/pflag/uint_slice.go b/vendor/github.com/spf13/pflag/uint_slice.go new file mode 100644 index 000000000..5fa924835 --- /dev/null +++ b/vendor/github.com/spf13/pflag/uint_slice.go @@ -0,0 +1,168 @@ +package pflag + +import ( + "fmt" + "strconv" + "strings" +) + +// -- uintSlice Value +type uintSliceValue struct { + value *[]uint + changed bool +} + +func newUintSliceValue(val []uint, p *[]uint) *uintSliceValue { + uisv := new(uintSliceValue) + uisv.value = p + *uisv.value = val + return uisv +} + +func (s *uintSliceValue) Set(val string) error { + ss := strings.Split(val, ",") + out := make([]uint, len(ss)) + for i, d := range ss { + u, err := strconv.ParseUint(d, 10, 0) + if err != nil { + return err + } + out[i] = uint(u) + } + if !s.changed { + *s.value = out + } else { + *s.value = append(*s.value, out...) + } + s.changed = true + return nil +} + +func (s *uintSliceValue) Type() string { + return "uintSlice" +} + +func (s *uintSliceValue) String() string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = fmt.Sprintf("%d", d) + } + return "[" + strings.Join(out, ",") + "]" +} + +func (s *uintSliceValue) fromString(val string) (uint, error) { + t, err := strconv.ParseUint(val, 10, 0) + if err != nil { + return 0, err + } + return uint(t), nil +} + +func (s *uintSliceValue) toString(val uint) string { + return fmt.Sprintf("%d", val) +} + +func (s *uintSliceValue) Append(val string) error { + i, err := s.fromString(val) + if err != nil { + return err + } + *s.value = append(*s.value, i) + return nil +} + +func (s *uintSliceValue) Replace(val []string) error { + out := make([]uint, len(val)) + for i, d := range val { + var err error + out[i], err = s.fromString(d) + if err != nil { + return err + } + } + *s.value = out + return nil +} + +func (s *uintSliceValue) GetSlice() []string { + out := make([]string, len(*s.value)) + for i, d := range *s.value { + out[i] = s.toString(d) + } + return out +} + +func uintSliceConv(val string) (interface{}, error) { + val = strings.Trim(val, "[]") + // Empty string would cause a slice with one (empty) entry + if len(val) == 0 { + return []uint{}, nil + } + ss := strings.Split(val, ",") + out := make([]uint, len(ss)) + for i, d := range ss { + u, err := strconv.ParseUint(d, 10, 0) + if err != nil { + return nil, err + } + out[i] = uint(u) + } + return out, nil +} + +// GetUintSlice returns the []uint value of a flag with the given name. +func (f *FlagSet) GetUintSlice(name string) ([]uint, error) { + val, err := f.getFlagType(name, "uintSlice", uintSliceConv) + if err != nil { + return []uint{}, err + } + return val.([]uint), nil +} + +// UintSliceVar defines a uintSlice flag with specified name, default value, and usage string. +// The argument p points to a []uint variable in which to store the value of the flag. +func (f *FlagSet) UintSliceVar(p *[]uint, name string, value []uint, usage string) { + f.VarP(newUintSliceValue(value, p), name, "", usage) +} + +// UintSliceVarP is like UintSliceVar, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) UintSliceVarP(p *[]uint, name, shorthand string, value []uint, usage string) { + f.VarP(newUintSliceValue(value, p), name, shorthand, usage) +} + +// UintSliceVar defines a uint[] flag with specified name, default value, and usage string. +// The argument p points to a uint[] variable in which to store the value of the flag. +func UintSliceVar(p *[]uint, name string, value []uint, usage string) { + CommandLine.VarP(newUintSliceValue(value, p), name, "", usage) +} + +// UintSliceVarP is like the UintSliceVar, but accepts a shorthand letter that can be used after a single dash. +func UintSliceVarP(p *[]uint, name, shorthand string, value []uint, usage string) { + CommandLine.VarP(newUintSliceValue(value, p), name, shorthand, usage) +} + +// UintSlice defines a []uint flag with specified name, default value, and usage string. +// The return value is the address of a []uint variable that stores the value of the flag. +func (f *FlagSet) UintSlice(name string, value []uint, usage string) *[]uint { + p := []uint{} + f.UintSliceVarP(&p, name, "", value, usage) + return &p +} + +// UintSliceP is like UintSlice, but accepts a shorthand letter that can be used after a single dash. +func (f *FlagSet) UintSliceP(name, shorthand string, value []uint, usage string) *[]uint { + p := []uint{} + f.UintSliceVarP(&p, name, shorthand, value, usage) + return &p +} + +// UintSlice defines a []uint flag with specified name, default value, and usage string. +// The return value is the address of a []uint variable that stores the value of the flag. +func UintSlice(name string, value []uint, usage string) *[]uint { + return CommandLine.UintSliceP(name, "", value, usage) +} + +// UintSliceP is like UintSlice, but accepts a shorthand letter that can be used after a single dash. +func UintSliceP(name, shorthand string, value []uint, usage string) *[]uint { + return CommandLine.UintSliceP(name, shorthand, value, usage) +} diff --git a/vendor/github.com/spf13/viper/.editorconfig b/vendor/github.com/spf13/viper/.editorconfig new file mode 100644 index 000000000..63afcbcdd --- /dev/null +++ b/vendor/github.com/spf13/viper/.editorconfig @@ -0,0 +1,15 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +indent_size = 4 +indent_style = space +insert_final_newline = true +trim_trailing_whitespace = true + +[*.go] +indent_style = tab + +[{Makefile, *.mk}] +indent_style = tab diff --git a/vendor/github.com/spf13/viper/.gitignore b/vendor/github.com/spf13/viper/.gitignore new file mode 100644 index 000000000..896250839 --- /dev/null +++ b/vendor/github.com/spf13/viper/.gitignore @@ -0,0 +1,5 @@ +/.idea/ +/bin/ +/build/ +/var/ +/vendor/ diff --git a/vendor/github.com/spf13/viper/.golangci.yml b/vendor/github.com/spf13/viper/.golangci.yml new file mode 100644 index 000000000..4f970acb1 --- /dev/null +++ b/vendor/github.com/spf13/viper/.golangci.yml @@ -0,0 +1,93 @@ +run: + timeout: 5m + +linters-settings: + gci: + local-prefixes: github.com/spf13/viper + golint: + min-confidence: 0 + goimports: + local-prefixes: github.com/spf13/viper + +linters: + disable-all: true + enable: + - bodyclose + - deadcode + - dogsled + - dupl + - durationcheck + - exhaustive + - exportloopref + - gci + - goconst + - gofmt + - gofumpt + - goimports + - gomoddirectives + - goprintffuncname + - govet + - importas + - ineffassign + - makezero + - misspell + - nakedret + - nilerr + - noctx + - nolintlint + - prealloc + - predeclared + - revive + - rowserrcheck + - sqlclosecheck + - staticcheck + - structcheck + - stylecheck + - tparallel + - typecheck + - unconvert + - unparam + - unused + - varcheck + - wastedassign + - whitespace + + # fixme + # - cyclop + # - errcheck + # - errorlint + # - exhaustivestruct + # - forbidigo + # - forcetypeassert + # - gochecknoglobals + # - gochecknoinits + # - gocognit + # - gocritic + # - gocyclo + # - godot + # - gosec + # - gosimple + # - ifshort + # - lll + # - nlreturn + # - paralleltest + # - scopelint + # - thelper + # - wrapcheck + + # unused + # - depguard + # - goheader + # - gomodguard + + # don't enable: + # - asciicheck + # - funlen + # - godox + # - goerr113 + # - gomnd + # - interfacer + # - maligned + # - nestif + # - testpackage + # - wsl diff --git a/vendor/github.com/spf13/viper/LICENSE b/vendor/github.com/spf13/viper/LICENSE new file mode 100644 index 000000000..4527efb9c --- /dev/null +++ b/vendor/github.com/spf13/viper/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Steve Francia + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/spf13/viper/Makefile b/vendor/github.com/spf13/viper/Makefile new file mode 100644 index 000000000..b0f9acf24 --- /dev/null +++ b/vendor/github.com/spf13/viper/Makefile @@ -0,0 +1,76 @@ +# A Self-Documenting Makefile: http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html + +OS = $(shell uname | tr A-Z a-z) +export PATH := $(abspath bin/):${PATH} + +# Build variables +BUILD_DIR ?= build +export CGO_ENABLED ?= 0 +export GOOS = $(shell go env GOOS) +ifeq (${VERBOSE}, 1) +ifeq ($(filter -v,${GOARGS}),) + GOARGS += -v +endif +TEST_FORMAT = short-verbose +endif + +# Dependency versions +GOTESTSUM_VERSION = 1.6.4 +GOLANGCI_VERSION = 1.40.1 + +# Add the ability to override some variables +# Use with care +-include override.mk + +.PHONY: clear +clear: ## Clear the working area and the project + rm -rf bin/ + +.PHONY: check +check: test lint ## Run tests and linters + +bin/gotestsum: bin/gotestsum-${GOTESTSUM_VERSION} + @ln -sf gotestsum-${GOTESTSUM_VERSION} bin/gotestsum +bin/gotestsum-${GOTESTSUM_VERSION}: + @mkdir -p bin + curl -L https://github.com/gotestyourself/gotestsum/releases/download/v${GOTESTSUM_VERSION}/gotestsum_${GOTESTSUM_VERSION}_${OS}_amd64.tar.gz | tar -zOxf - gotestsum > ./bin/gotestsum-${GOTESTSUM_VERSION} && chmod +x ./bin/gotestsum-${GOTESTSUM_VERSION} + +TEST_PKGS ?= ./... +.PHONY: test +test: TEST_FORMAT ?= short +test: SHELL = /bin/bash +test: export CGO_ENABLED=1 +test: bin/gotestsum ## Run tests + @mkdir -p ${BUILD_DIR} + bin/gotestsum --no-summary=skipped --junitfile ${BUILD_DIR}/coverage.xml --format ${TEST_FORMAT} -- -race -coverprofile=${BUILD_DIR}/coverage.txt -covermode=atomic $(filter-out -v,${GOARGS}) $(if ${TEST_PKGS},${TEST_PKGS},./...) + +bin/golangci-lint: bin/golangci-lint-${GOLANGCI_VERSION} + @ln -sf golangci-lint-${GOLANGCI_VERSION} bin/golangci-lint +bin/golangci-lint-${GOLANGCI_VERSION}: + @mkdir -p bin + curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s -- -b ./bin/ v${GOLANGCI_VERSION} + @mv bin/golangci-lint "$@" + +.PHONY: lint +lint: bin/golangci-lint ## Run linter + bin/golangci-lint run + +.PHONY: fix +fix: bin/golangci-lint ## Fix lint violations + bin/golangci-lint run --fix + +# Add custom targets here +-include custom.mk + +.PHONY: list +list: ## List all make targets + @${MAKE} -pRrn : -f $(MAKEFILE_LIST) 2>/dev/null | awk -v RS= -F: '/^# File/,/^# Finished Make data base/ {if ($$1 !~ "^[#.]") {print $$1}}' | egrep -v -e '^[^[:alnum:]]' -e '^$@$$' | sort + +.PHONY: help +.DEFAULT_GOAL := help +help: + @grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' + +# Variable outputting/exporting rules +var-%: ; @echo $($*) +varexport-%: ; @echo $*=$($*) diff --git a/vendor/github.com/spf13/viper/README.md b/vendor/github.com/spf13/viper/README.md new file mode 100644 index 000000000..f409b1519 --- /dev/null +++ b/vendor/github.com/spf13/viper/README.md @@ -0,0 +1,865 @@ +> ## Viper v2 feedback +> Viper is heading towards v2 and we would love to hear what _**you**_ would like to see in it. Share your thoughts here: https://forms.gle/R6faU74qPRPAzchZ9 +> +> **Thank you!** + +![Viper](.github/logo.png?raw=true) + +[![Mentioned in Awesome Go](https://awesome.re/mentioned-badge-flat.svg)](https://github.com/avelino/awesome-go#configuration) +[![run on repl.it](https://repl.it/badge/github/sagikazarmark/Viper-example)](https://repl.it/@sagikazarmark/Viper-example#main.go) + +[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/spf13/viper/CI?style=flat-square)](https://github.com/spf13/viper/actions?query=workflow%3ACI) +[![Join the chat at https://gitter.im/spf13/viper](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/spf13/viper?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) +[![Go Report Card](https://goreportcard.com/badge/github.com/spf13/viper?style=flat-square)](https://goreportcard.com/report/github.com/spf13/viper) +![Go Version](https://img.shields.io/badge/go%20version-%3E=1.14-61CFDD.svg?style=flat-square) +[![PkgGoDev](https://pkg.go.dev/badge/mod/github.com/spf13/viper)](https://pkg.go.dev/mod/github.com/spf13/viper) + +**Go configuration with fangs!** + +Many Go projects are built using Viper including: + +* [Hugo](http://gohugo.io) +* [EMC RexRay](http://rexray.readthedocs.org/en/stable/) +* [Imgur’s Incus](https://github.com/Imgur/incus) +* [Nanobox](https://github.com/nanobox-io/nanobox)/[Nanopack](https://github.com/nanopack) +* [Docker Notary](https://github.com/docker/Notary) +* [BloomApi](https://www.bloomapi.com/) +* [doctl](https://github.com/digitalocean/doctl) +* [Clairctl](https://github.com/jgsqware/clairctl) +* [Mercure](https://mercure.rocks) + + +## Install + +```shell +go get github.com/spf13/viper +``` + +**Note:** Viper uses [Go Modules](https://github.com/golang/go/wiki/Modules) to manage dependencies. + + +## What is Viper? + +Viper is a complete configuration solution for Go applications including 12-Factor apps. It is designed +to work within an application, and can handle all types of configuration needs +and formats. It supports: + +* setting defaults +* reading from JSON, TOML, YAML, HCL, envfile and Java properties config files +* live watching and re-reading of config files (optional) +* reading from environment variables +* reading from remote config systems (etcd or Consul), and watching changes +* reading from command line flags +* reading from buffer +* setting explicit values + +Viper can be thought of as a registry for all of your applications configuration needs. + + +## Why Viper? + +When building a modern application, you don’t want to worry about +configuration file formats; you want to focus on building awesome software. +Viper is here to help with that. + +Viper does the following for you: + +1. Find, load, and unmarshal a configuration file in JSON, TOML, YAML, HCL, INI, envfile or Java properties formats. +2. Provide a mechanism to set default values for your different configuration options. +3. Provide a mechanism to set override values for options specified through command line flags. +4. Provide an alias system to easily rename parameters without breaking existing code. +5. Make it easy to tell the difference between when a user has provided a command line or config file which is the same as the default. + +Viper uses the following precedence order. Each item takes precedence over the item below it: + + * explicit call to `Set` + * flag + * env + * config + * key/value store + * default + +**Important:** Viper configuration keys are case insensitive. +There are ongoing discussions about making that optional. + + +## Putting Values into Viper + +### Establishing Defaults + +A good configuration system will support default values. A default value is not +required for a key, but it’s useful in the event that a key hasn't been set via +config file, environment variable, remote configuration or flag. + +Examples: + +```go +viper.SetDefault("ContentDir", "content") +viper.SetDefault("LayoutDir", "layouts") +viper.SetDefault("Taxonomies", map[string]string{"tag": "tags", "category": "categories"}) +``` + +### Reading Config Files + +Viper requires minimal configuration so it knows where to look for config files. +Viper supports JSON, TOML, YAML, HCL, INI, envfile and Java Properties files. Viper can search multiple paths, but +currently a single Viper instance only supports a single configuration file. +Viper does not default to any configuration search paths leaving defaults decision +to an application. + +Here is an example of how to use Viper to search for and read a configuration file. +None of the specific paths are required, but at least one path should be provided +where a configuration file is expected. + +```go +viper.SetConfigName("config") // name of config file (without extension) +viper.SetConfigType("yaml") // REQUIRED if the config file does not have the extension in the name +viper.AddConfigPath("/etc/appname/") // path to look for the config file in +viper.AddConfigPath("$HOME/.appname") // call multiple times to add many search paths +viper.AddConfigPath(".") // optionally look for config in the working directory +err := viper.ReadInConfig() // Find and read the config file +if err != nil { // Handle errors reading the config file + panic(fmt.Errorf("Fatal error config file: %w \n", err)) +} +``` + +You can handle the specific case where no config file is found like this: + +```go +if err := viper.ReadInConfig(); err != nil { + if _, ok := err.(viper.ConfigFileNotFoundError); ok { + // Config file not found; ignore error if desired + } else { + // Config file was found but another error was produced + } +} + +// Config file found and successfully parsed +``` + +*NOTE [since 1.6]:* You can also have a file without an extension and specify the format programmaticaly. For those configuration files that lie in the home of the user without any extension like `.bashrc` + +### Writing Config Files + +Reading from config files is useful, but at times you want to store all modifications made at run time. +For that, a bunch of commands are available, each with its own purpose: + +* WriteConfig - writes the current viper configuration to the predefined path, if exists. Errors if no predefined path. Will overwrite the current config file, if it exists. +* SafeWriteConfig - writes the current viper configuration to the predefined path. Errors if no predefined path. Will not overwrite the current config file, if it exists. +* WriteConfigAs - writes the current viper configuration to the given filepath. Will overwrite the given file, if it exists. +* SafeWriteConfigAs - writes the current viper configuration to the given filepath. Will not overwrite the given file, if it exists. + +As a rule of the thumb, everything marked with safe won't overwrite any file, but just create if not existent, whilst the default behavior is to create or truncate. + +A small examples section: + +```go +viper.WriteConfig() // writes current config to predefined path set by 'viper.AddConfigPath()' and 'viper.SetConfigName' +viper.SafeWriteConfig() +viper.WriteConfigAs("/path/to/my/.config") +viper.SafeWriteConfigAs("/path/to/my/.config") // will error since it has already been written +viper.SafeWriteConfigAs("/path/to/my/.other_config") +``` + +### Watching and re-reading config files + +Viper supports the ability to have your application live read a config file while running. + +Gone are the days of needing to restart a server to have a config take effect, +viper powered applications can read an update to a config file while running and +not miss a beat. + +Simply tell the viper instance to watchConfig. +Optionally you can provide a function for Viper to run each time a change occurs. + +**Make sure you add all of the configPaths prior to calling `WatchConfig()`** + +```go +viper.WatchConfig() +viper.OnConfigChange(func(e fsnotify.Event) { + fmt.Println("Config file changed:", e.Name) +}) +``` + +### Reading Config from io.Reader + +Viper predefines many configuration sources such as files, environment +variables, flags, and remote K/V store, but you are not bound to them. You can +also implement your own required configuration source and feed it to viper. + +```go +viper.SetConfigType("yaml") // or viper.SetConfigType("YAML") + +// any approach to require this configuration into your program. +var yamlExample = []byte(` +Hacker: true +name: steve +hobbies: +- skateboarding +- snowboarding +- go +clothing: + jacket: leather + trousers: denim +age: 35 +eyes : brown +beard: true +`) + +viper.ReadConfig(bytes.NewBuffer(yamlExample)) + +viper.Get("name") // this would be "steve" +``` + +### Setting Overrides + +These could be from a command line flag, or from your own application logic. + +```go +viper.Set("Verbose", true) +viper.Set("LogFile", LogFile) +``` + +### Registering and Using Aliases + +Aliases permit a single value to be referenced by multiple keys + +```go +viper.RegisterAlias("loud", "Verbose") + +viper.Set("verbose", true) // same result as next line +viper.Set("loud", true) // same result as prior line + +viper.GetBool("loud") // true +viper.GetBool("verbose") // true +``` + +### Working with Environment Variables + +Viper has full support for environment variables. This enables 12 factor +applications out of the box. There are five methods that exist to aid working +with ENV: + + * `AutomaticEnv()` + * `BindEnv(string...) : error` + * `SetEnvPrefix(string)` + * `SetEnvKeyReplacer(string...) *strings.Replacer` + * `AllowEmptyEnv(bool)` + +_When working with ENV variables, it’s important to recognize that Viper +treats ENV variables as case sensitive._ + +Viper provides a mechanism to try to ensure that ENV variables are unique. By +using `SetEnvPrefix`, you can tell Viper to use a prefix while reading from +the environment variables. Both `BindEnv` and `AutomaticEnv` will use this +prefix. + +`BindEnv` takes one or more parameters. The first parameter is the key name, the +rest are the name of the environment variables to bind to this key. If more than +one are provided, they will take precedence in the specified order. The name of +the environment variable is case sensitive. If the ENV variable name is not provided, then +Viper will automatically assume that the ENV variable matches the following format: prefix + "_" + the key name in ALL CAPS. When you explicitly provide the ENV variable name (the second parameter), +it **does not** automatically add the prefix. For example if the second parameter is "id", +Viper will look for the ENV variable "ID". + +One important thing to recognize when working with ENV variables is that the +value will be read each time it is accessed. Viper does not fix the value when +the `BindEnv` is called. + +`AutomaticEnv` is a powerful helper especially when combined with +`SetEnvPrefix`. When called, Viper will check for an environment variable any +time a `viper.Get` request is made. It will apply the following rules. It will +check for an environment variable with a name matching the key uppercased and +prefixed with the `EnvPrefix` if set. + +`SetEnvKeyReplacer` allows you to use a `strings.Replacer` object to rewrite Env +keys to an extent. This is useful if you want to use `-` or something in your +`Get()` calls, but want your environmental variables to use `_` delimiters. An +example of using it can be found in `viper_test.go`. + +Alternatively, you can use `EnvKeyReplacer` with `NewWithOptions` factory function. +Unlike `SetEnvKeyReplacer`, it accepts a `StringReplacer` interface allowing you to write custom string replacing logic. + +By default empty environment variables are considered unset and will fall back to +the next configuration source. To treat empty environment variables as set, use +the `AllowEmptyEnv` method. + +#### Env example + +```go +SetEnvPrefix("spf") // will be uppercased automatically +BindEnv("id") + +os.Setenv("SPF_ID", "13") // typically done outside of the app + +id := Get("id") // 13 +``` + +### Working with Flags + +Viper has the ability to bind to flags. Specifically, Viper supports `Pflags` +as used in the [Cobra](https://github.com/spf13/cobra) library. + +Like `BindEnv`, the value is not set when the binding method is called, but when +it is accessed. This means you can bind as early as you want, even in an +`init()` function. + +For individual flags, the `BindPFlag()` method provides this functionality. + +Example: + +```go +serverCmd.Flags().Int("port", 1138, "Port to run Application server on") +viper.BindPFlag("port", serverCmd.Flags().Lookup("port")) +``` + +You can also bind an existing set of pflags (pflag.FlagSet): + +Example: + +```go +pflag.Int("flagname", 1234, "help message for flagname") + +pflag.Parse() +viper.BindPFlags(pflag.CommandLine) + +i := viper.GetInt("flagname") // retrieve values from viper instead of pflag +``` + +The use of [pflag](https://github.com/spf13/pflag/) in Viper does not preclude +the use of other packages that use the [flag](https://golang.org/pkg/flag/) +package from the standard library. The pflag package can handle the flags +defined for the flag package by importing these flags. This is accomplished +by a calling a convenience function provided by the pflag package called +AddGoFlagSet(). + +Example: + +```go +package main + +import ( + "flag" + "github.com/spf13/pflag" +) + +func main() { + + // using standard library "flag" package + flag.Int("flagname", 1234, "help message for flagname") + + pflag.CommandLine.AddGoFlagSet(flag.CommandLine) + pflag.Parse() + viper.BindPFlags(pflag.CommandLine) + + i := viper.GetInt("flagname") // retrieve value from viper + + ... +} +``` + +#### Flag interfaces + +Viper provides two Go interfaces to bind other flag systems if you don’t use `Pflags`. + +`FlagValue` represents a single flag. This is a very simple example on how to implement this interface: + +```go +type myFlag struct {} +func (f myFlag) HasChanged() bool { return false } +func (f myFlag) Name() string { return "my-flag-name" } +func (f myFlag) ValueString() string { return "my-flag-value" } +func (f myFlag) ValueType() string { return "string" } +``` + +Once your flag implements this interface, you can simply tell Viper to bind it: + +```go +viper.BindFlagValue("my-flag-name", myFlag{}) +``` + +`FlagValueSet` represents a group of flags. This is a very simple example on how to implement this interface: + +```go +type myFlagSet struct { + flags []myFlag +} + +func (f myFlagSet) VisitAll(fn func(FlagValue)) { + for _, flag := range flags { + fn(flag) + } +} +``` + +Once your flag set implements this interface, you can simply tell Viper to bind it: + +```go +fSet := myFlagSet{ + flags: []myFlag{myFlag{}, myFlag{}}, +} +viper.BindFlagValues("my-flags", fSet) +``` + +### Remote Key/Value Store Support + +To enable remote support in Viper, do a blank import of the `viper/remote` +package: + +`import _ "github.com/spf13/viper/remote"` + +Viper will read a config string (as JSON, TOML, YAML, HCL or envfile) retrieved from a path +in a Key/Value store such as etcd or Consul. These values take precedence over +default values, but are overridden by configuration values retrieved from disk, +flags, or environment variables. + +Viper uses [crypt](https://github.com/bketelsen/crypt) to retrieve +configuration from the K/V store, which means that you can store your +configuration values encrypted and have them automatically decrypted if you have +the correct gpg keyring. Encryption is optional. + +You can use remote configuration in conjunction with local configuration, or +independently of it. + +`crypt` has a command-line helper that you can use to put configurations in your +K/V store. `crypt` defaults to etcd on http://127.0.0.1:4001. + +```bash +$ go get github.com/bketelsen/crypt/bin/crypt +$ crypt set -plaintext /config/hugo.json /Users/hugo/settings/config.json +``` + +Confirm that your value was set: + +```bash +$ crypt get -plaintext /config/hugo.json +``` + +See the `crypt` documentation for examples of how to set encrypted values, or +how to use Consul. + +### Remote Key/Value Store Example - Unencrypted + +#### etcd +```go +viper.AddRemoteProvider("etcd", "http://127.0.0.1:4001","/config/hugo.json") +viper.SetConfigType("json") // because there is no file extension in a stream of bytes, supported extensions are "json", "toml", "yaml", "yml", "properties", "props", "prop", "env", "dotenv" +err := viper.ReadRemoteConfig() +``` + +#### Consul +You need to set a key to Consul key/value storage with JSON value containing your desired config. +For example, create a Consul key/value store key `MY_CONSUL_KEY` with value: + +```json +{ + "port": 8080, + "hostname": "myhostname.com" +} +``` + +```go +viper.AddRemoteProvider("consul", "localhost:8500", "MY_CONSUL_KEY") +viper.SetConfigType("json") // Need to explicitly set this to json +err := viper.ReadRemoteConfig() + +fmt.Println(viper.Get("port")) // 8080 +fmt.Println(viper.Get("hostname")) // myhostname.com +``` + +#### Firestore + +```go +viper.AddRemoteProvider("firestore", "google-cloud-project-id", "collection/document") +viper.SetConfigType("json") // Config's format: "json", "toml", "yaml", "yml" +err := viper.ReadRemoteConfig() +``` + +Of course, you're allowed to use `SecureRemoteProvider` also + +### Remote Key/Value Store Example - Encrypted + +```go +viper.AddSecureRemoteProvider("etcd","http://127.0.0.1:4001","/config/hugo.json","/etc/secrets/mykeyring.gpg") +viper.SetConfigType("json") // because there is no file extension in a stream of bytes, supported extensions are "json", "toml", "yaml", "yml", "properties", "props", "prop", "env", "dotenv" +err := viper.ReadRemoteConfig() +``` + +### Watching Changes in etcd - Unencrypted + +```go +// alternatively, you can create a new viper instance. +var runtime_viper = viper.New() + +runtime_viper.AddRemoteProvider("etcd", "http://127.0.0.1:4001", "/config/hugo.yml") +runtime_viper.SetConfigType("yaml") // because there is no file extension in a stream of bytes, supported extensions are "json", "toml", "yaml", "yml", "properties", "props", "prop", "env", "dotenv" + +// read from remote config the first time. +err := runtime_viper.ReadRemoteConfig() + +// unmarshal config +runtime_viper.Unmarshal(&runtime_conf) + +// open a goroutine to watch remote changes forever +go func(){ + for { + time.Sleep(time.Second * 5) // delay after each request + + // currently, only tested with etcd support + err := runtime_viper.WatchRemoteConfig() + if err != nil { + log.Errorf("unable to read remote config: %v", err) + continue + } + + // unmarshal new config into our runtime config struct. you can also use channel + // to implement a signal to notify the system of the changes + runtime_viper.Unmarshal(&runtime_conf) + } +}() +``` + +## Getting Values From Viper + +In Viper, there are a few ways to get a value depending on the value’s type. +The following functions and methods exist: + + * `Get(key string) : interface{}` + * `GetBool(key string) : bool` + * `GetFloat64(key string) : float64` + * `GetInt(key string) : int` + * `GetIntSlice(key string) : []int` + * `GetString(key string) : string` + * `GetStringMap(key string) : map[string]interface{}` + * `GetStringMapString(key string) : map[string]string` + * `GetStringSlice(key string) : []string` + * `GetTime(key string) : time.Time` + * `GetDuration(key string) : time.Duration` + * `IsSet(key string) : bool` + * `AllSettings() : map[string]interface{}` + +One important thing to recognize is that each Get function will return a zero +value if it’s not found. To check if a given key exists, the `IsSet()` method +has been provided. + +Example: +```go +viper.GetString("logfile") // case-insensitive Setting & Getting +if viper.GetBool("verbose") { + fmt.Println("verbose enabled") +} +``` +### Accessing nested keys + +The accessor methods also accept formatted paths to deeply nested keys. For +example, if the following JSON file is loaded: + +```json +{ + "host": { + "address": "localhost", + "port": 5799 + }, + "datastore": { + "metric": { + "host": "127.0.0.1", + "port": 3099 + }, + "warehouse": { + "host": "198.0.0.1", + "port": 2112 + } + } +} + +``` + +Viper can access a nested field by passing a `.` delimited path of keys: + +```go +GetString("datastore.metric.host") // (returns "127.0.0.1") +``` + +This obeys the precedence rules established above; the search for the path +will cascade through the remaining configuration registries until found. + +For example, given this configuration file, both `datastore.metric.host` and +`datastore.metric.port` are already defined (and may be overridden). If in addition +`datastore.metric.protocol` was defined in the defaults, Viper would also find it. + +However, if `datastore.metric` was overridden (by a flag, an environment variable, +the `Set()` method, …) with an immediate value, then all sub-keys of +`datastore.metric` become undefined, they are “shadowed” by the higher-priority +configuration level. + +Viper can access array indices by using numbers in the path. For example: + +```json +{ + "host": { + "address": "localhost", + "ports": [ + 5799, + 6029 + ] + }, + "datastore": { + "metric": { + "host": "127.0.0.1", + "port": 3099 + }, + "warehouse": { + "host": "198.0.0.1", + "port": 2112 + } + } +} + +GetInt("host.ports.1") // returns 6029 + +``` + +Lastly, if there exists a key that matches the delimited key path, its value +will be returned instead. E.g. + +```json +{ + "datastore.metric.host": "0.0.0.0", + "host": { + "address": "localhost", + "port": 5799 + }, + "datastore": { + "metric": { + "host": "127.0.0.1", + "port": 3099 + }, + "warehouse": { + "host": "198.0.0.1", + "port": 2112 + } + } +} + +GetString("datastore.metric.host") // returns "0.0.0.0" +``` + +### Extracting a sub-tree + +When developing reusable modules, it's often useful to extract a subset of the configuration +and pass it to a module. This way the module can be instantiated more than once, with different configurations. + +For example, an application might use multiple different cache stores for different purposes: + +```yaml +cache: + cache1: + max-items: 100 + item-size: 64 + cache2: + max-items: 200 + item-size: 80 +``` + +We could pass the cache name to a module (eg. `NewCache("cache1")`), +but it would require weird concatenation for accessing config keys and would be less separated from the global config. + +So instead of doing that let's pass a Viper instance to the constructor that represents a subset of the configuration: + +```go +cache1Config := viper.Sub("cache.cache1") +if cache1Config == nil { // Sub returns nil if the key cannot be found + panic("cache configuration not found") +} + +cache1 := NewCache(cache1Config) +``` + +**Note:** Always check the return value of `Sub`. It returns `nil` if a key cannot be found. + +Internally, the `NewCache` function can address `max-items` and `item-size` keys directly: + +```go +func NewCache(v *Viper) *Cache { + return &Cache{ + MaxItems: v.GetInt("max-items"), + ItemSize: v.GetInt("item-size"), + } +} +``` + +The resulting code is easy to test, since it's decoupled from the main config structure, +and easier to reuse (for the same reason). + + +### Unmarshaling + +You also have the option of Unmarshaling all or a specific value to a struct, map, +etc. + +There are two methods to do this: + + * `Unmarshal(rawVal interface{}) : error` + * `UnmarshalKey(key string, rawVal interface{}) : error` + +Example: + +```go +type config struct { + Port int + Name string + PathMap string `mapstructure:"path_map"` +} + +var C config + +err := viper.Unmarshal(&C) +if err != nil { + t.Fatalf("unable to decode into struct, %v", err) +} +``` + +If you want to unmarshal configuration where the keys themselves contain dot (the default key delimiter), +you have to change the delimiter: + +```go +v := viper.NewWithOptions(viper.KeyDelimiter("::")) + +v.SetDefault("chart::values", map[string]interface{}{ + "ingress": map[string]interface{}{ + "annotations": map[string]interface{}{ + "traefik.frontend.rule.type": "PathPrefix", + "traefik.ingress.kubernetes.io/ssl-redirect": "true", + }, + }, +}) + +type config struct { + Chart struct{ + Values map[string]interface{} + } +} + +var C config + +v.Unmarshal(&C) +``` + +Viper also supports unmarshaling into embedded structs: + +```go +/* +Example config: + +module: + enabled: true + token: 89h3f98hbwf987h3f98wenf89ehf +*/ +type config struct { + Module struct { + Enabled bool + + moduleConfig `mapstructure:",squash"` + } +} + +// moduleConfig could be in a module specific package +type moduleConfig struct { + Token string +} + +var C config + +err := viper.Unmarshal(&C) +if err != nil { + t.Fatalf("unable to decode into struct, %v", err) +} +``` + +Viper uses [github.com/mitchellh/mapstructure](https://github.com/mitchellh/mapstructure) under the hood for unmarshaling values which uses `mapstructure` tags by default. + +### Marshalling to string + +You may need to marshal all the settings held in viper into a string rather than write them to a file. +You can use your favorite format's marshaller with the config returned by `AllSettings()`. + +```go +import ( + yaml "gopkg.in/yaml.v2" + // ... +) + +func yamlStringSettings() string { + c := viper.AllSettings() + bs, err := yaml.Marshal(c) + if err != nil { + log.Fatalf("unable to marshal config to YAML: %v", err) + } + return string(bs) +} +``` + +## Viper or Vipers? + +Viper comes ready to use out of the box. There is no configuration or +initialization needed to begin using Viper. Since most applications will want +to use a single central repository for their configuration, the viper package +provides this. It is similar to a singleton. + +In all of the examples above, they demonstrate using viper in its singleton +style approach. + +### Working with multiple vipers + +You can also create many different vipers for use in your application. Each will +have its own unique set of configurations and values. Each can read from a +different config file, key value store, etc. All of the functions that viper +package supports are mirrored as methods on a viper. + +Example: + +```go +x := viper.New() +y := viper.New() + +x.SetDefault("ContentDir", "content") +y.SetDefault("ContentDir", "foobar") + +//... +``` + +When working with multiple vipers, it is up to the user to keep track of the +different vipers. + + +## Q & A + +### Why is it called “Viper”? + +A: Viper is designed to be a [companion](http://en.wikipedia.org/wiki/Viper_(G.I._Joe)) +to [Cobra](https://github.com/spf13/cobra). While both can operate completely +independently, together they make a powerful pair to handle much of your +application foundation needs. + +### Why is it called “Cobra”? + +Is there a better name for a [commander](http://en.wikipedia.org/wiki/Cobra_Commander)? + +### Does Viper support case sensitive keys? + +**tl;dr:** No. + +Viper merges configuration from various sources, many of which are either case insensitive or uses different casing than the rest of the sources (eg. env vars). +In order to provide the best experience when using multiple sources, the decision has been made to make all keys case insensitive. + +There has been several attempts to implement case sensitivity, but unfortunately it's not that trivial. We might take a stab at implementing it in [Viper v2](https://github.com/spf13/viper/issues/772), but despite the initial noise, it does not seem to be requested that much. + +You can vote for case sensitivity by filling out this feedback form: https://forms.gle/R6faU74qPRPAzchZ9 + +### Is it safe to concurrently read and write to a viper? + +No, you will need to synchronize access to the viper yourself (for example by using the `sync` package). Concurrent reads and writes can cause a panic. + +## Troubleshooting + +See [TROUBLESHOOTING.md](TROUBLESHOOTING.md). diff --git a/vendor/github.com/spf13/viper/TROUBLESHOOTING.md b/vendor/github.com/spf13/viper/TROUBLESHOOTING.md new file mode 100644 index 000000000..096277af7 --- /dev/null +++ b/vendor/github.com/spf13/viper/TROUBLESHOOTING.md @@ -0,0 +1,23 @@ +# Troubleshooting + +## Unmarshaling doesn't work + +The most common reason for this issue is improper use of struct tags (eg. `yaml` or `json`). Viper uses [github.com/mitchellh/mapstructure](https://github.com/mitchellh/mapstructure) under the hood for unmarshaling values which uses `mapstructure` tags by default. Please refer to the library's documentation for using other struct tags. + +## Cannot find package + +Viper installation seems to fail a lot lately with the following (or a similar) error: + +``` +cannot find package "github.com/hashicorp/hcl/tree/hcl1" in any of: +/usr/local/Cellar/go/1.15.7_1/libexec/src/github.com/hashicorp/hcl/tree/hcl1 (from $GOROOT) +/Users/user/go/src/github.com/hashicorp/hcl/tree/hcl1 (from $GOPATH) +``` + +As the error message suggests, Go tries to look up dependencies in `GOPATH` mode (as it's commonly called) from the `GOPATH`. +Viper opted to use [Go Modules](https://github.com/golang/go/wiki/Modules) to manage its dependencies. While in many cases the two methods are interchangeable, once a dependency releases new (major) versions, `GOPATH` mode is no longer able to decide which version to use, so it'll either use one that's already present or pick a version (usually the `master` branch). + +The solution is easy: switch to using Go Modules. +Please refer to the [wiki](https://github.com/golang/go/wiki/Modules) on how to do that. + +**tl;dr* `export GO111MODULE=on` diff --git a/vendor/github.com/spf13/viper/flags.go b/vendor/github.com/spf13/viper/flags.go new file mode 100644 index 000000000..b5ddbf5d4 --- /dev/null +++ b/vendor/github.com/spf13/viper/flags.go @@ -0,0 +1,57 @@ +package viper + +import "github.com/spf13/pflag" + +// FlagValueSet is an interface that users can implement +// to bind a set of flags to viper. +type FlagValueSet interface { + VisitAll(fn func(FlagValue)) +} + +// FlagValue is an interface that users can implement +// to bind different flags to viper. +type FlagValue interface { + HasChanged() bool + Name() string + ValueString() string + ValueType() string +} + +// pflagValueSet is a wrapper around *pflag.ValueSet +// that implements FlagValueSet. +type pflagValueSet struct { + flags *pflag.FlagSet +} + +// VisitAll iterates over all *pflag.Flag inside the *pflag.FlagSet. +func (p pflagValueSet) VisitAll(fn func(flag FlagValue)) { + p.flags.VisitAll(func(flag *pflag.Flag) { + fn(pflagValue{flag}) + }) +} + +// pflagValue is a wrapper aroung *pflag.flag +// that implements FlagValue +type pflagValue struct { + flag *pflag.Flag +} + +// HasChanged returns whether the flag has changes or not. +func (p pflagValue) HasChanged() bool { + return p.flag.Changed +} + +// Name returns the name of the flag. +func (p pflagValue) Name() string { + return p.flag.Name +} + +// ValueString returns the value of the flag as a string. +func (p pflagValue) ValueString() string { + return p.flag.Value.String() +} + +// ValueType returns the type of the flag as a string. +func (p pflagValue) ValueType() string { + return p.flag.Value.Type() +} diff --git a/vendor/github.com/spf13/viper/go.mod b/vendor/github.com/spf13/viper/go.mod new file mode 100644 index 000000000..145e0a100 --- /dev/null +++ b/vendor/github.com/spf13/viper/go.mod @@ -0,0 +1,21 @@ +module github.com/spf13/viper + +go 1.12 + +require ( + github.com/bketelsen/crypt v0.0.4 + github.com/fsnotify/fsnotify v1.4.9 + github.com/hashicorp/hcl v1.0.0 + github.com/magiconair/properties v1.8.5 + github.com/mitchellh/mapstructure v1.4.1 + github.com/pelletier/go-toml v1.9.3 + github.com/smartystreets/goconvey v1.6.4 // indirect + github.com/spf13/afero v1.6.0 + github.com/spf13/cast v1.3.1 + github.com/spf13/jwalterweatherman v1.1.0 + github.com/spf13/pflag v1.0.5 + github.com/stretchr/testify v1.7.0 + github.com/subosito/gotenv v1.2.0 + gopkg.in/ini.v1 v1.62.0 + gopkg.in/yaml.v2 v2.4.0 +) diff --git a/vendor/github.com/spf13/viper/go.sum b/vendor/github.com/spf13/viper/go.sum new file mode 100644 index 000000000..27730e2aa --- /dev/null +++ b/vendor/github.com/spf13/viper/go.sum @@ -0,0 +1,632 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go v0.72.0/go.mod h1:M+5Vjvlc2wnp6tjzE102Dw08nGShTscUx2nZMufOKPI= +cloud.google.com/go v0.74.0/go.mod h1:VV1xSbzvo+9QJOxLDaJfTjx5e+MePCpCWwvftOeQmWk= +cloud.google.com/go v0.78.0/go.mod h1:QjdrLG0uq+YwhjoVOLsS1t7TW8fs36kLs4XO5R5ECHg= +cloud.google.com/go v0.79.0/go.mod h1:3bzgcEeQlzbuEAYu4mrWhKqWjmpprinYgKJLgKHnbb8= +cloud.google.com/go v0.81.0 h1:at8Tk2zUz63cLPR0JPWm5vp77pEZmzxEQBEfRKn1VV8= +cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E0= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/firestore v1.1.0 h1:9x7Bx0A9R5/M9jibeJeZWqjeVEIxYW9fZYqB9a70/bY= +cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY= +github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da h1:8GUt8eRujhVEGZFFEjBj46YV4rDjvGrNxb0KMWYkL2I= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/bketelsen/crypt v0.0.4 h1:w/jqZtC9YD4DS/Vp9GhWfWcCpuAL58oTnLoI8vE9YHU= +github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= +github.com/cncf/udpa/go v0.0.0-20200629203442-efcf912fb354/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= +github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM= +github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/go-control-plane v0.9.7/go.mod h1:cwu0lG7PUMfa9snN8LXBig5ynNVH9qI8YYLbd1fK2po= +github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4= +github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= +github.com/golang/mock v1.5.0/go.mod h1:CWnOUgYIOo4TcNZ0wHX3YZCqsaM1I1Jvs6v3mP3KVu8= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= +github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM= +github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= +github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0 h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/martian/v3 v3.1.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20201023163331-3e6fc7fc9c4c/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20201203190320-1bf35d6f28c2/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210122040257-d980be63207e/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= +github.com/hashicorp/consul/api v1.1.0 h1:BNQPM9ytxj6jbjjdRPioQ94T6YXriSopn0i8COv6SRA= +github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q= +github.com/hashicorp/consul/sdk v0.1.1 h1:LnuDWGNsoajlhGyHJvuWW6FVqRl8JOTPqS6CPTsYjhY= +github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8= +github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-immutable-radix v1.0.0 h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0= +github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60= +github.com/hashicorp/go-msgpack v0.5.3 h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4= +github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-rootcerts v1.0.0 h1:Rqb66Oo1X/eSV1x66xbDccZjhJigjg0+e82kpwzSwCI= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-sockaddr v1.0.0 h1:GeH6tui99pF4NJgfnhp+L6+FfobzVW3Ah46sLo0ICXs= +github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1 h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= +github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= +github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= +github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ= +github.com/hashicorp/memberlist v0.1.3 h1:EmmoJme1matNzb+hMpDuR/0sbJSUisxyqBGG676r31M= +github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I= +github.com/hashicorp/serf v0.8.2 h1:YZ7UKsJv+hKjqGVUUbtE3HNj79Eln2oQ75tniF6iPt0= +github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= +github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo= +github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/magiconair/properties v1.8.5 h1:b6kJs+EmPFMYGkow9GiUyCyOvIwYetYJ3fSaWak/Gls= +github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/miekg/dns v1.0.14 h1:9jZdLNd/P4+SfEJ0TNyxYpsK8N4GtfylBLqtbYN1sbA= +github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/go-homedir v1.0.0 h1:vKb8ShqSby24Yrqr/yDYkuFz8d0WUjys40rvnGC8aR0= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg= +github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY= +github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/mapstructure v1.4.1 h1:CpVNEelQCZBooIPDn+AR3NpivK/TIKU8bDxdASFVQag= +github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421 h1:ZqeYNhU3OHLH3mGKHDcjJRFFRrJa6eAM5H+CtDdOsPc= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c h1:Lgl0gzECD8GnQ5QCWA8o6BtfL6mDH5rQgM4/fX3avOs= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= +github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s= +github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= +github.com/spf13/afero v1.6.0 h1:xoax2sJ2DT8S8xA2paPFjDCScCNeWsg75VG0DLRreiY= +github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I= +github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng= +github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= +github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk= +github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo= +github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= +github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s= +github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.etcd.io/etcd/api/v3 v3.5.0 h1:GsV3S+OfZEOCNXdtNkBSR7kgLobAa/SO6tCxRa0GAYw= +go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs= +go.etcd.io/etcd/client/pkg/v3 v3.5.0 h1:2aQv6F436YnN7I4VbI8PPYrBhu+SmrTaADcf8Mi/6PU= +go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g= +go.etcd.io/etcd/client/v2 v2.305.0 h1:ftQ0nOOHMcbMS3KIaDQ0g5Qcd6bhaBrQT6b89DfwLTs= +go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.5/go.mod h1:5pWMHQbX5EPX2/62yrJeAkowc+lfs/XD7Uxpq3pI6kk= +go.opencensus.io v0.23.0 h1:gqCw0LfLxScz8irSi8exQc7fyQ0fKQU/qnC/X8+V/1M= +go.opencensus.io v0.23.0/go.mod h1:XItmlyltB5F7CS4xOC1DcqMoFqwtC6OG2xF7mCv7P7E= +go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc= +go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU= +go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo= +golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug= +golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181023162649-9b4f9f5ad519/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181201002055-351d144fa1fc/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201031054903-ff519b6c9102/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20201209123823-ac852fbbde11/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= +golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4 h1:4nGaVu0QrbjT/AK2PRLuQfQuh6DJve+pELhqTdAj3x0= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602 h1:0Ja1LBD+yisY6RWM/BH7TJVXWsSjs2VwBSmvSX4HdBc= +golang.org/x/oauth2 v0.0.0-20210402161424-2e8d93401602/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c h1:5KslGYwFpkhGh+Q16bwMP3cOontH8FOep7tGV86Y7SQ= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200905004654-be1d3432aa8f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210104204734-6f8348627aad/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210220050731-9a76102bfb43/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210305230114-8fe3ee5dd75b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.5 h1:i6eZZ+zk0SOf0xgBpEpPD18qWcJda6q1sxt3S0kzyUQ= +golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200904185747-39188db58858/go.mod h1:Cj7w3i3Rnn0Xh82ur9kSqwfTHTeVxaDqrfMjpcNT6bE= +golang.org/x/tools v0.0.0-20201110124207-079ba7bd75cd/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= +golang.org/x/tools v0.1.2 h1:kRBLX7v7Af8W7Gdbbc908OJcdgtK8bOz9Uaj8/F1ACA= +golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/api v0.35.0/go.mod h1:/XrVsuzM0rZmrsbjJutiuftIzeuTQcEeaYcSk/mQ1dg= +google.golang.org/api v0.36.0/go.mod h1:+z5ficQTmoYpPn8LCUNVpK5I7hwkpjbcgqA7I34qYtE= +google.golang.org/api v0.40.0/go.mod h1:fYKFpnQN0DsDSKRVRcQSDQNtqWPfM9i+zNPxepjRCQ8= +google.golang.org/api v0.41.0/go.mod h1:RkxM5lITDfTzmyKFPt+wGrCJbVfniCr2ool8kTBzRTU= +google.golang.org/api v0.43.0/go.mod h1:nQsDGjRXMo4lvh5hP0TKqF244gqhGcr/YSIykhUk/94= +google.golang.org/api v0.44.0 h1:URs6qR1lAxDsqWITsQXI4ZkGiYJ5dHtRNiCpfs2OeKA= +google.golang.org/api v0.44.0/go.mod h1:EBOGZqzyhtvMDoxwS97ctnh0zUmYY6CxqXsc1AvkYD8= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c= +google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200904004341-0bd0a958aa1d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201109203340-2640f1f9cdfb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201201144952-b05cb90ed32e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201210142538-e3217bee35cc/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20201214200347-8c77b98c765d/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210222152913-aa3ee6e6a81c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210303154014-9728d6b83eeb/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210310155132-4ce2db91004e/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210319143718-93e7006c17a6/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20210402141018-6c239bbf2bb1/go.mod h1:9lPAdzaEmUacj36I+k7YKbEc5CXzPIeORRgDAUOu28A= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c h1:wtujag7C+4D6KMoulW9YauvK2lgdvCMS260jsqqBXr0= +google.golang.org/genproto v0.0.0-20210602131652-f16073e35f0c/go.mod h1:UODoCrxHCcBojKKwX1terBiRUaqAsFqJiF615XL43r0= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.1/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= +google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc= +google.golang.org/grpc v1.34.0/go.mod h1:WotjhfgOW/POjDeRt8vscBtXq+2VjORFy659qA51WJ8= +google.golang.org/grpc v1.35.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.36.1/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= +google.golang.org/grpc v1.38.0 h1:/9BgsAsa5nWe26HqOlvlgJnqBuktYOLCgjCPqsa56W0= +google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= +google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= +google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= +google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/ini.v1 v1.62.0 h1:duBzk771uxoUuOlyRLkHsygud9+5lrlGjdFBb4mSKDU= +gopkg.in/ini.v1 v1.62.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/vendor/github.com/spf13/viper/util.go b/vendor/github.com/spf13/viper/util.go new file mode 100644 index 000000000..cee6b2429 --- /dev/null +++ b/vendor/github.com/spf13/viper/util.go @@ -0,0 +1,230 @@ +// Copyright © 2014 Steve Francia . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +// Viper is a application configuration system. +// It believes that applications can be configured a variety of ways +// via flags, ENVIRONMENT variables, configuration files retrieved +// from the file system, or a remote key/value store. + +package viper + +import ( + "fmt" + "os" + "path/filepath" + "runtime" + "strings" + "unicode" + + "github.com/spf13/afero" + "github.com/spf13/cast" + jww "github.com/spf13/jwalterweatherman" +) + +// ConfigParseError denotes failing to parse configuration file. +type ConfigParseError struct { + err error +} + +// Error returns the formatted configuration error. +func (pe ConfigParseError) Error() string { + return fmt.Sprintf("While parsing config: %s", pe.err.Error()) +} + +// toCaseInsensitiveValue checks if the value is a map; +// if so, create a copy and lower-case the keys recursively. +func toCaseInsensitiveValue(value interface{}) interface{} { + switch v := value.(type) { + case map[interface{}]interface{}: + value = copyAndInsensitiviseMap(cast.ToStringMap(v)) + case map[string]interface{}: + value = copyAndInsensitiviseMap(v) + } + + return value +} + +// copyAndInsensitiviseMap behaves like insensitiviseMap, but creates a copy of +// any map it makes case insensitive. +func copyAndInsensitiviseMap(m map[string]interface{}) map[string]interface{} { + nm := make(map[string]interface{}) + + for key, val := range m { + lkey := strings.ToLower(key) + switch v := val.(type) { + case map[interface{}]interface{}: + nm[lkey] = copyAndInsensitiviseMap(cast.ToStringMap(v)) + case map[string]interface{}: + nm[lkey] = copyAndInsensitiviseMap(v) + default: + nm[lkey] = v + } + } + + return nm +} + +func insensitiviseMap(m map[string]interface{}) { + for key, val := range m { + switch val.(type) { + case map[interface{}]interface{}: + // nested map: cast and recursively insensitivise + val = cast.ToStringMap(val) + insensitiviseMap(val.(map[string]interface{})) + case map[string]interface{}: + // nested map: recursively insensitivise + insensitiviseMap(val.(map[string]interface{})) + } + + lower := strings.ToLower(key) + if key != lower { + // remove old key (not lower-cased) + delete(m, key) + } + // update map + m[lower] = val + } +} + +func absPathify(inPath string) string { + jww.INFO.Println("Trying to resolve absolute path to", inPath) + + if inPath == "$HOME" || strings.HasPrefix(inPath, "$HOME"+string(os.PathSeparator)) { + inPath = userHomeDir() + inPath[5:] + } + + if strings.HasPrefix(inPath, "$") { + end := strings.Index(inPath, string(os.PathSeparator)) + + var value, suffix string + if end == -1 { + value = os.Getenv(inPath[1:]) + } else { + value = os.Getenv(inPath[1:end]) + suffix = inPath[end:] + } + + inPath = value + suffix + } + + if filepath.IsAbs(inPath) { + return filepath.Clean(inPath) + } + + p, err := filepath.Abs(inPath) + if err == nil { + return filepath.Clean(p) + } + + jww.ERROR.Println("Couldn't discover absolute path") + jww.ERROR.Println(err) + return "" +} + +// Check if file Exists +func exists(fs afero.Fs, path string) (bool, error) { + stat, err := fs.Stat(path) + if err == nil { + return !stat.IsDir(), nil + } + if os.IsNotExist(err) { + return false, nil + } + return false, err +} + +func stringInSlice(a string, list []string) bool { + for _, b := range list { + if b == a { + return true + } + } + return false +} + +func userHomeDir() string { + if runtime.GOOS == "windows" { + home := os.Getenv("HOMEDRIVE") + os.Getenv("HOMEPATH") + if home == "" { + home = os.Getenv("USERPROFILE") + } + return home + } + return os.Getenv("HOME") +} + +func safeMul(a, b uint) uint { + c := a * b + if a > 1 && b > 1 && c/b != a { + return 0 + } + return c +} + +// parseSizeInBytes converts strings like 1GB or 12 mb into an unsigned integer number of bytes +func parseSizeInBytes(sizeStr string) uint { + sizeStr = strings.TrimSpace(sizeStr) + lastChar := len(sizeStr) - 1 + multiplier := uint(1) + + if lastChar > 0 { + if sizeStr[lastChar] == 'b' || sizeStr[lastChar] == 'B' { + if lastChar > 1 { + switch unicode.ToLower(rune(sizeStr[lastChar-1])) { + case 'k': + multiplier = 1 << 10 + sizeStr = strings.TrimSpace(sizeStr[:lastChar-1]) + case 'm': + multiplier = 1 << 20 + sizeStr = strings.TrimSpace(sizeStr[:lastChar-1]) + case 'g': + multiplier = 1 << 30 + sizeStr = strings.TrimSpace(sizeStr[:lastChar-1]) + default: + multiplier = 1 + sizeStr = strings.TrimSpace(sizeStr[:lastChar]) + } + } + } + } + + size := cast.ToInt(sizeStr) + if size < 0 { + size = 0 + } + + return safeMul(uint(size), multiplier) +} + +// deepSearch scans deep maps, following the key indexes listed in the +// sequence "path". +// The last value is expected to be another map, and is returned. +// +// In case intermediate keys do not exist, or map to a non-map value, +// a new map is created and inserted, and the search continues from there: +// the initial map "m" may be modified! +func deepSearch(m map[string]interface{}, path []string) map[string]interface{} { + for _, k := range path { + m2, ok := m[k] + if !ok { + // intermediate key does not exist + // => create it and continue from there + m3 := make(map[string]interface{}) + m[k] = m3 + m = m3 + continue + } + m3, ok := m2.(map[string]interface{}) + if !ok { + // intermediate key is a value + // => replace with a new map + m3 = make(map[string]interface{}) + m[k] = m3 + } + // continue search from here + m = m3 + } + return m +} diff --git a/vendor/github.com/spf13/viper/viper.go b/vendor/github.com/spf13/viper/viper.go new file mode 100644 index 000000000..e8c04627b --- /dev/null +++ b/vendor/github.com/spf13/viper/viper.go @@ -0,0 +1,2169 @@ +// Copyright © 2014 Steve Francia . +// +// Use of this source code is governed by an MIT-style +// license that can be found in the LICENSE file. + +// Viper is an application configuration system. +// It believes that applications can be configured a variety of ways +// via flags, ENVIRONMENT variables, configuration files retrieved +// from the file system, or a remote key/value store. + +// Each item takes precedence over the item below it: + +// overrides +// flag +// env +// config +// key/value store +// default + +package viper + +import ( + "bytes" + "encoding/csv" + "encoding/json" + "errors" + "fmt" + "io" + "log" + "os" + "path/filepath" + "reflect" + "strconv" + "strings" + "sync" + "time" + + "github.com/fsnotify/fsnotify" + "github.com/hashicorp/hcl" + "github.com/hashicorp/hcl/hcl/printer" + "github.com/magiconair/properties" + "github.com/mitchellh/mapstructure" + "github.com/pelletier/go-toml" + "github.com/spf13/afero" + "github.com/spf13/cast" + jww "github.com/spf13/jwalterweatherman" + "github.com/spf13/pflag" + "github.com/subosito/gotenv" + "gopkg.in/ini.v1" + "gopkg.in/yaml.v2" +) + +// ConfigMarshalError happens when failing to marshal the configuration. +type ConfigMarshalError struct { + err error +} + +// Error returns the formatted configuration error. +func (e ConfigMarshalError) Error() string { + return fmt.Sprintf("While marshaling config: %s", e.err.Error()) +} + +var v *Viper + +type RemoteResponse struct { + Value []byte + Error error +} + +func init() { + v = New() +} + +type remoteConfigFactory interface { + Get(rp RemoteProvider) (io.Reader, error) + Watch(rp RemoteProvider) (io.Reader, error) + WatchChannel(rp RemoteProvider) (<-chan *RemoteResponse, chan bool) +} + +// RemoteConfig is optional, see the remote package +var RemoteConfig remoteConfigFactory + +// UnsupportedConfigError denotes encountering an unsupported +// configuration filetype. +type UnsupportedConfigError string + +// Error returns the formatted configuration error. +func (str UnsupportedConfigError) Error() string { + return fmt.Sprintf("Unsupported Config Type %q", string(str)) +} + +// UnsupportedRemoteProviderError denotes encountering an unsupported remote +// provider. Currently only etcd and Consul are supported. +type UnsupportedRemoteProviderError string + +// Error returns the formatted remote provider error. +func (str UnsupportedRemoteProviderError) Error() string { + return fmt.Sprintf("Unsupported Remote Provider Type %q", string(str)) +} + +// RemoteConfigError denotes encountering an error while trying to +// pull the configuration from the remote provider. +type RemoteConfigError string + +// Error returns the formatted remote provider error +func (rce RemoteConfigError) Error() string { + return fmt.Sprintf("Remote Configurations Error: %s", string(rce)) +} + +// ConfigFileNotFoundError denotes failing to find configuration file. +type ConfigFileNotFoundError struct { + name, locations string +} + +// Error returns the formatted configuration error. +func (fnfe ConfigFileNotFoundError) Error() string { + return fmt.Sprintf("Config File %q Not Found in %q", fnfe.name, fnfe.locations) +} + +// ConfigFileAlreadyExistsError denotes failure to write new configuration file. +type ConfigFileAlreadyExistsError string + +// Error returns the formatted error when configuration already exists. +func (faee ConfigFileAlreadyExistsError) Error() string { + return fmt.Sprintf("Config File %q Already Exists", string(faee)) +} + +// A DecoderConfigOption can be passed to viper.Unmarshal to configure +// mapstructure.DecoderConfig options +type DecoderConfigOption func(*mapstructure.DecoderConfig) + +// DecodeHook returns a DecoderConfigOption which overrides the default +// DecoderConfig.DecodeHook value, the default is: +// +// mapstructure.ComposeDecodeHookFunc( +// mapstructure.StringToTimeDurationHookFunc(), +// mapstructure.StringToSliceHookFunc(","), +// ) +func DecodeHook(hook mapstructure.DecodeHookFunc) DecoderConfigOption { + return func(c *mapstructure.DecoderConfig) { + c.DecodeHook = hook + } +} + +// Viper is a prioritized configuration registry. It +// maintains a set of configuration sources, fetches +// values to populate those, and provides them according +// to the source's priority. +// The priority of the sources is the following: +// 1. overrides +// 2. flags +// 3. env. variables +// 4. config file +// 5. key/value store +// 6. defaults +// +// For example, if values from the following sources were loaded: +// +// Defaults : { +// "secret": "", +// "user": "default", +// "endpoint": "https://localhost" +// } +// Config : { +// "user": "root" +// "secret": "defaultsecret" +// } +// Env : { +// "secret": "somesecretkey" +// } +// +// The resulting config will have the following values: +// +// { +// "secret": "somesecretkey", +// "user": "root", +// "endpoint": "https://localhost" +// } +// +// Note: Vipers are not safe for concurrent Get() and Set() operations. +type Viper struct { + // Delimiter that separates a list of keys + // used to access a nested value in one go + keyDelim string + + // A set of paths to look for the config file in + configPaths []string + + // The filesystem to read config from. + fs afero.Fs + + // A set of remote providers to search for the configuration + remoteProviders []*defaultRemoteProvider + + // Name of file to look for inside the path + configName string + configFile string + configType string + configPermissions os.FileMode + envPrefix string + + // Specific commands for ini parsing + iniLoadOptions ini.LoadOptions + + automaticEnvApplied bool + envKeyReplacer StringReplacer + allowEmptyEnv bool + + config map[string]interface{} + override map[string]interface{} + defaults map[string]interface{} + kvstore map[string]interface{} + pflags map[string]FlagValue + env map[string][]string + aliases map[string]string + typeByDefValue bool + + // Store read properties on the object so that we can write back in order with comments. + // This will only be used if the configuration read is a properties file. + properties *properties.Properties + + onConfigChange func(fsnotify.Event) +} + +// New returns an initialized Viper instance. +func New() *Viper { + v := new(Viper) + v.keyDelim = "." + v.configName = "config" + v.configPermissions = os.FileMode(0644) + v.fs = afero.NewOsFs() + v.config = make(map[string]interface{}) + v.override = make(map[string]interface{}) + v.defaults = make(map[string]interface{}) + v.kvstore = make(map[string]interface{}) + v.pflags = make(map[string]FlagValue) + v.env = make(map[string][]string) + v.aliases = make(map[string]string) + v.typeByDefValue = false + + return v +} + +// Option configures Viper using the functional options paradigm popularized by Rob Pike and Dave Cheney. +// If you're unfamiliar with this style, +// see https://commandcenter.blogspot.com/2014/01/self-referential-functions-and-design.html and +// https://dave.cheney.net/2014/10/17/functional-options-for-friendly-apis. +type Option interface { + apply(v *Viper) +} + +type optionFunc func(v *Viper) + +func (fn optionFunc) apply(v *Viper) { + fn(v) +} + +// KeyDelimiter sets the delimiter used for determining key parts. +// By default it's value is ".". +func KeyDelimiter(d string) Option { + return optionFunc(func(v *Viper) { + v.keyDelim = d + }) +} + +// StringReplacer applies a set of replacements to a string. +type StringReplacer interface { + // Replace returns a copy of s with all replacements performed. + Replace(s string) string +} + +// EnvKeyReplacer sets a replacer used for mapping environment variables to internal keys. +func EnvKeyReplacer(r StringReplacer) Option { + return optionFunc(func(v *Viper) { + v.envKeyReplacer = r + }) +} + +// NewWithOptions creates a new Viper instance. +func NewWithOptions(opts ...Option) *Viper { + v := New() + + for _, opt := range opts { + opt.apply(v) + } + + return v +} + +// Reset is intended for testing, will reset all to default settings. +// In the public interface for the viper package so applications +// can use it in their testing as well. +func Reset() { + v = New() + SupportedExts = []string{"json", "toml", "yaml", "yml", "properties", "props", "prop", "hcl", "dotenv", "env", "ini"} + SupportedRemoteProviders = []string{"etcd", "consul", "firestore"} +} + +type defaultRemoteProvider struct { + provider string + endpoint string + path string + secretKeyring string +} + +func (rp defaultRemoteProvider) Provider() string { + return rp.provider +} + +func (rp defaultRemoteProvider) Endpoint() string { + return rp.endpoint +} + +func (rp defaultRemoteProvider) Path() string { + return rp.path +} + +func (rp defaultRemoteProvider) SecretKeyring() string { + return rp.secretKeyring +} + +// RemoteProvider stores the configuration necessary +// to connect to a remote key/value store. +// Optional secretKeyring to unencrypt encrypted values +// can be provided. +type RemoteProvider interface { + Provider() string + Endpoint() string + Path() string + SecretKeyring() string +} + +// SupportedExts are universally supported extensions. +var SupportedExts = []string{"json", "toml", "yaml", "yml", "properties", "props", "prop", "hcl", "dotenv", "env", "ini"} + +// SupportedRemoteProviders are universally supported remote providers. +var SupportedRemoteProviders = []string{"etcd", "consul", "firestore"} + +func OnConfigChange(run func(in fsnotify.Event)) { v.OnConfigChange(run) } +func (v *Viper) OnConfigChange(run func(in fsnotify.Event)) { + v.onConfigChange = run +} + +func WatchConfig() { v.WatchConfig() } + +func (v *Viper) WatchConfig() { + initWG := sync.WaitGroup{} + initWG.Add(1) + go func() { + watcher, err := newWatcher() + if err != nil { + log.Fatal(err) + } + defer watcher.Close() + // we have to watch the entire directory to pick up renames/atomic saves in a cross-platform way + filename, err := v.getConfigFile() + if err != nil { + log.Printf("error: %v\n", err) + initWG.Done() + return + } + + configFile := filepath.Clean(filename) + configDir, _ := filepath.Split(configFile) + realConfigFile, _ := filepath.EvalSymlinks(filename) + + eventsWG := sync.WaitGroup{} + eventsWG.Add(1) + go func() { + for { + select { + case event, ok := <-watcher.Events: + if !ok { // 'Events' channel is closed + eventsWG.Done() + return + } + currentConfigFile, _ := filepath.EvalSymlinks(filename) + // we only care about the config file with the following cases: + // 1 - if the config file was modified or created + // 2 - if the real path to the config file changed (eg: k8s ConfigMap replacement) + const writeOrCreateMask = fsnotify.Write | fsnotify.Create + if (filepath.Clean(event.Name) == configFile && + event.Op&writeOrCreateMask != 0) || + (currentConfigFile != "" && currentConfigFile != realConfigFile) { + realConfigFile = currentConfigFile + err := v.ReadInConfig() + if err != nil { + log.Printf("error reading config file: %v\n", err) + } + if v.onConfigChange != nil { + v.onConfigChange(event) + } + } else if filepath.Clean(event.Name) == configFile && + event.Op&fsnotify.Remove&fsnotify.Remove != 0 { + eventsWG.Done() + return + } + + case err, ok := <-watcher.Errors: + if ok { // 'Errors' channel is not closed + log.Printf("watcher error: %v\n", err) + } + eventsWG.Done() + return + } + } + }() + watcher.Add(configDir) + initWG.Done() // done initializing the watch in this go routine, so the parent routine can move on... + eventsWG.Wait() // now, wait for event loop to end in this go-routine... + }() + initWG.Wait() // make sure that the go routine above fully ended before returning +} + +// SetConfigFile explicitly defines the path, name and extension of the config file. +// Viper will use this and not check any of the config paths. +func SetConfigFile(in string) { v.SetConfigFile(in) } + +func (v *Viper) SetConfigFile(in string) { + if in != "" { + v.configFile = in + } +} + +// SetEnvPrefix defines a prefix that ENVIRONMENT variables will use. +// E.g. if your prefix is "spf", the env registry will look for env +// variables that start with "SPF_". +func SetEnvPrefix(in string) { v.SetEnvPrefix(in) } + +func (v *Viper) SetEnvPrefix(in string) { + if in != "" { + v.envPrefix = in + } +} + +func (v *Viper) mergeWithEnvPrefix(in string) string { + if v.envPrefix != "" { + return strings.ToUpper(v.envPrefix + "_" + in) + } + + return strings.ToUpper(in) +} + +// AllowEmptyEnv tells Viper to consider set, +// but empty environment variables as valid values instead of falling back. +// For backward compatibility reasons this is false by default. +func AllowEmptyEnv(allowEmptyEnv bool) { v.AllowEmptyEnv(allowEmptyEnv) } + +func (v *Viper) AllowEmptyEnv(allowEmptyEnv bool) { + v.allowEmptyEnv = allowEmptyEnv +} + +// TODO: should getEnv logic be moved into find(). Can generalize the use of +// rewriting keys many things, Ex: Get('someKey') -> some_key +// (camel case to snake case for JSON keys perhaps) + +// getEnv is a wrapper around os.Getenv which replaces characters in the original +// key. This allows env vars which have different keys than the config object +// keys. +func (v *Viper) getEnv(key string) (string, bool) { + if v.envKeyReplacer != nil { + key = v.envKeyReplacer.Replace(key) + } + + val, ok := os.LookupEnv(key) + + return val, ok && (v.allowEmptyEnv || val != "") +} + +// ConfigFileUsed returns the file used to populate the config registry. +func ConfigFileUsed() string { return v.ConfigFileUsed() } +func (v *Viper) ConfigFileUsed() string { return v.configFile } + +// AddConfigPath adds a path for Viper to search for the config file in. +// Can be called multiple times to define multiple search paths. +func AddConfigPath(in string) { v.AddConfigPath(in) } + +func (v *Viper) AddConfigPath(in string) { + if in != "" { + absin := absPathify(in) + jww.INFO.Println("adding", absin, "to paths to search") + if !stringInSlice(absin, v.configPaths) { + v.configPaths = append(v.configPaths, absin) + } + } +} + +// AddRemoteProvider adds a remote configuration source. +// Remote Providers are searched in the order they are added. +// provider is a string value: "etcd", "consul" or "firestore" are currently supported. +// endpoint is the url. etcd requires http://ip:port consul requires ip:port +// path is the path in the k/v store to retrieve configuration +// To retrieve a config file called myapp.json from /configs/myapp.json +// you should set path to /configs and set config name (SetConfigName()) to +// "myapp" +func AddRemoteProvider(provider, endpoint, path string) error { + return v.AddRemoteProvider(provider, endpoint, path) +} + +func (v *Viper) AddRemoteProvider(provider, endpoint, path string) error { + if !stringInSlice(provider, SupportedRemoteProviders) { + return UnsupportedRemoteProviderError(provider) + } + if provider != "" && endpoint != "" { + jww.INFO.Printf("adding %s:%s to remote provider list", provider, endpoint) + rp := &defaultRemoteProvider{ + endpoint: endpoint, + provider: provider, + path: path, + } + if !v.providerPathExists(rp) { + v.remoteProviders = append(v.remoteProviders, rp) + } + } + return nil +} + +// AddSecureRemoteProvider adds a remote configuration source. +// Secure Remote Providers are searched in the order they are added. +// provider is a string value: "etcd", "consul" or "firestore" are currently supported. +// endpoint is the url. etcd requires http://ip:port consul requires ip:port +// secretkeyring is the filepath to your openpgp secret keyring. e.g. /etc/secrets/myring.gpg +// path is the path in the k/v store to retrieve configuration +// To retrieve a config file called myapp.json from /configs/myapp.json +// you should set path to /configs and set config name (SetConfigName()) to +// "myapp" +// Secure Remote Providers are implemented with github.com/bketelsen/crypt +func AddSecureRemoteProvider(provider, endpoint, path, secretkeyring string) error { + return v.AddSecureRemoteProvider(provider, endpoint, path, secretkeyring) +} + +func (v *Viper) AddSecureRemoteProvider(provider, endpoint, path, secretkeyring string) error { + if !stringInSlice(provider, SupportedRemoteProviders) { + return UnsupportedRemoteProviderError(provider) + } + if provider != "" && endpoint != "" { + jww.INFO.Printf("adding %s:%s to remote provider list", provider, endpoint) + rp := &defaultRemoteProvider{ + endpoint: endpoint, + provider: provider, + path: path, + secretKeyring: secretkeyring, + } + if !v.providerPathExists(rp) { + v.remoteProviders = append(v.remoteProviders, rp) + } + } + return nil +} + +func (v *Viper) providerPathExists(p *defaultRemoteProvider) bool { + for _, y := range v.remoteProviders { + if reflect.DeepEqual(y, p) { + return true + } + } + return false +} + +// searchMap recursively searches for a value for path in source map. +// Returns nil if not found. +// Note: This assumes that the path entries and map keys are lower cased. +func (v *Viper) searchMap(source map[string]interface{}, path []string) interface{} { + if len(path) == 0 { + return source + } + + next, ok := source[path[0]] + if ok { + // Fast path + if len(path) == 1 { + return next + } + + // Nested case + switch next.(type) { + case map[interface{}]interface{}: + return v.searchMap(cast.ToStringMap(next), path[1:]) + case map[string]interface{}: + // Type assertion is safe here since it is only reached + // if the type of `next` is the same as the type being asserted + return v.searchMap(next.(map[string]interface{}), path[1:]) + default: + // got a value but nested key expected, return "nil" for not found + return nil + } + } + return nil +} + +// searchIndexableWithPathPrefixes recursively searches for a value for path in source map/slice. +// +// While searchMap() considers each path element as a single map key or slice index, this +// function searches for, and prioritizes, merged path elements. +// e.g., if in the source, "foo" is defined with a sub-key "bar", and "foo.bar" +// is also defined, this latter value is returned for path ["foo", "bar"]. +// +// This should be useful only at config level (other maps may not contain dots +// in their keys). +// +// Note: This assumes that the path entries and map keys are lower cased. +func (v *Viper) searchIndexableWithPathPrefixes(source interface{}, path []string) interface{} { + if len(path) == 0 { + return source + } + + // search for path prefixes, starting from the longest one + for i := len(path); i > 0; i-- { + prefixKey := strings.ToLower(strings.Join(path[0:i], v.keyDelim)) + + var val interface{} + switch sourceIndexable := source.(type) { + case []interface{}: + val = v.searchSliceWithPathPrefixes(sourceIndexable, prefixKey, i, path) + case map[string]interface{}: + val = v.searchMapWithPathPrefixes(sourceIndexable, prefixKey, i, path) + } + if val != nil { + return val + } + } + + // not found + return nil +} + +// searchSliceWithPathPrefixes searches for a value for path in sourceSlice +// +// This function is part of the searchIndexableWithPathPrefixes recurring search and +// should not be called directly from functions other than searchIndexableWithPathPrefixes. +func (v *Viper) searchSliceWithPathPrefixes( + sourceSlice []interface{}, + prefixKey string, + pathIndex int, + path []string, +) interface{} { + // if the prefixKey is not a number or it is out of bounds of the slice + index, err := strconv.Atoi(prefixKey) + if err != nil || len(sourceSlice) <= index { + return nil + } + + next := sourceSlice[index] + + // Fast path + if pathIndex == len(path) { + return next + } + + switch n := next.(type) { + case map[interface{}]interface{}: + return v.searchIndexableWithPathPrefixes(cast.ToStringMap(n), path[pathIndex:]) + case map[string]interface{}, []interface{}: + return v.searchIndexableWithPathPrefixes(n, path[pathIndex:]) + default: + // got a value but nested key expected, do nothing and look for next prefix + } + + // not found + return nil +} + +// searchMapWithPathPrefixes searches for a value for path in sourceMap +// +// This function is part of the searchIndexableWithPathPrefixes recurring search and +// should not be called directly from functions other than searchIndexableWithPathPrefixes. +func (v *Viper) searchMapWithPathPrefixes( + sourceMap map[string]interface{}, + prefixKey string, + pathIndex int, + path []string, +) interface{} { + next, ok := sourceMap[prefixKey] + if !ok { + return nil + } + + // Fast path + if pathIndex == len(path) { + return next + } + + // Nested case + switch n := next.(type) { + case map[interface{}]interface{}: + return v.searchIndexableWithPathPrefixes(cast.ToStringMap(n), path[pathIndex:]) + case map[string]interface{}, []interface{}: + return v.searchIndexableWithPathPrefixes(n, path[pathIndex:]) + default: + // got a value but nested key expected, do nothing and look for next prefix + } + + // not found + return nil +} + +// isPathShadowedInDeepMap makes sure the given path is not shadowed somewhere +// on its path in the map. +// e.g., if "foo.bar" has a value in the given map, it “shadows” +// "foo.bar.baz" in a lower-priority map +func (v *Viper) isPathShadowedInDeepMap(path []string, m map[string]interface{}) string { + var parentVal interface{} + for i := 1; i < len(path); i++ { + parentVal = v.searchMap(m, path[0:i]) + if parentVal == nil { + // not found, no need to add more path elements + return "" + } + switch parentVal.(type) { + case map[interface{}]interface{}: + continue + case map[string]interface{}: + continue + default: + // parentVal is a regular value which shadows "path" + return strings.Join(path[0:i], v.keyDelim) + } + } + return "" +} + +// isPathShadowedInFlatMap makes sure the given path is not shadowed somewhere +// in a sub-path of the map. +// e.g., if "foo.bar" has a value in the given map, it “shadows” +// "foo.bar.baz" in a lower-priority map +func (v *Viper) isPathShadowedInFlatMap(path []string, mi interface{}) string { + // unify input map + var m map[string]interface{} + switch mi.(type) { + case map[string]string, map[string]FlagValue: + m = cast.ToStringMap(mi) + default: + return "" + } + + // scan paths + var parentKey string + for i := 1; i < len(path); i++ { + parentKey = strings.Join(path[0:i], v.keyDelim) + if _, ok := m[parentKey]; ok { + return parentKey + } + } + return "" +} + +// isPathShadowedInAutoEnv makes sure the given path is not shadowed somewhere +// in the environment, when automatic env is on. +// e.g., if "foo.bar" has a value in the environment, it “shadows” +// "foo.bar.baz" in a lower-priority map +func (v *Viper) isPathShadowedInAutoEnv(path []string) string { + var parentKey string + for i := 1; i < len(path); i++ { + parentKey = strings.Join(path[0:i], v.keyDelim) + if _, ok := v.getEnv(v.mergeWithEnvPrefix(parentKey)); ok { + return parentKey + } + } + return "" +} + +// SetTypeByDefaultValue enables or disables the inference of a key value's +// type when the Get function is used based upon a key's default value as +// opposed to the value returned based on the normal fetch logic. +// +// For example, if a key has a default value of []string{} and the same key +// is set via an environment variable to "a b c", a call to the Get function +// would return a string slice for the key if the key's type is inferred by +// the default value and the Get function would return: +// +// []string {"a", "b", "c"} +// +// Otherwise the Get function would return: +// +// "a b c" +func SetTypeByDefaultValue(enable bool) { v.SetTypeByDefaultValue(enable) } + +func (v *Viper) SetTypeByDefaultValue(enable bool) { + v.typeByDefValue = enable +} + +// GetViper gets the global Viper instance. +func GetViper() *Viper { + return v +} + +// Get can retrieve any value given the key to use. +// Get is case-insensitive for a key. +// Get has the behavior of returning the value associated with the first +// place from where it is set. Viper will check in the following order: +// override, flag, env, config file, key/value store, default +// +// Get returns an interface. For a specific value use one of the Get____ methods. +func Get(key string) interface{} { return v.Get(key) } + +func (v *Viper) Get(key string) interface{} { + lcaseKey := strings.ToLower(key) + val := v.find(lcaseKey, true) + if val == nil { + return nil + } + + if v.typeByDefValue { + // TODO(bep) this branch isn't covered by a single test. + valType := val + path := strings.Split(lcaseKey, v.keyDelim) + defVal := v.searchMap(v.defaults, path) + if defVal != nil { + valType = defVal + } + + switch valType.(type) { + case bool: + return cast.ToBool(val) + case string: + return cast.ToString(val) + case int32, int16, int8, int: + return cast.ToInt(val) + case uint: + return cast.ToUint(val) + case uint32: + return cast.ToUint32(val) + case uint64: + return cast.ToUint64(val) + case int64: + return cast.ToInt64(val) + case float64, float32: + return cast.ToFloat64(val) + case time.Time: + return cast.ToTime(val) + case time.Duration: + return cast.ToDuration(val) + case []string: + return cast.ToStringSlice(val) + case []int: + return cast.ToIntSlice(val) + } + } + + return val +} + +// Sub returns new Viper instance representing a sub tree of this instance. +// Sub is case-insensitive for a key. +func Sub(key string) *Viper { return v.Sub(key) } + +func (v *Viper) Sub(key string) *Viper { + subv := New() + data := v.Get(key) + if data == nil { + return nil + } + + if reflect.TypeOf(data).Kind() == reflect.Map { + subv.config = cast.ToStringMap(data) + return subv + } + return nil +} + +// GetString returns the value associated with the key as a string. +func GetString(key string) string { return v.GetString(key) } + +func (v *Viper) GetString(key string) string { + return cast.ToString(v.Get(key)) +} + +// GetBool returns the value associated with the key as a boolean. +func GetBool(key string) bool { return v.GetBool(key) } + +func (v *Viper) GetBool(key string) bool { + return cast.ToBool(v.Get(key)) +} + +// GetInt returns the value associated with the key as an integer. +func GetInt(key string) int { return v.GetInt(key) } + +func (v *Viper) GetInt(key string) int { + return cast.ToInt(v.Get(key)) +} + +// GetInt32 returns the value associated with the key as an integer. +func GetInt32(key string) int32 { return v.GetInt32(key) } + +func (v *Viper) GetInt32(key string) int32 { + return cast.ToInt32(v.Get(key)) +} + +// GetInt64 returns the value associated with the key as an integer. +func GetInt64(key string) int64 { return v.GetInt64(key) } + +func (v *Viper) GetInt64(key string) int64 { + return cast.ToInt64(v.Get(key)) +} + +// GetUint returns the value associated with the key as an unsigned integer. +func GetUint(key string) uint { return v.GetUint(key) } + +func (v *Viper) GetUint(key string) uint { + return cast.ToUint(v.Get(key)) +} + +// GetUint32 returns the value associated with the key as an unsigned integer. +func GetUint32(key string) uint32 { return v.GetUint32(key) } + +func (v *Viper) GetUint32(key string) uint32 { + return cast.ToUint32(v.Get(key)) +} + +// GetUint64 returns the value associated with the key as an unsigned integer. +func GetUint64(key string) uint64 { return v.GetUint64(key) } + +func (v *Viper) GetUint64(key string) uint64 { + return cast.ToUint64(v.Get(key)) +} + +// GetFloat64 returns the value associated with the key as a float64. +func GetFloat64(key string) float64 { return v.GetFloat64(key) } + +func (v *Viper) GetFloat64(key string) float64 { + return cast.ToFloat64(v.Get(key)) +} + +// GetTime returns the value associated with the key as time. +func GetTime(key string) time.Time { return v.GetTime(key) } + +func (v *Viper) GetTime(key string) time.Time { + return cast.ToTime(v.Get(key)) +} + +// GetDuration returns the value associated with the key as a duration. +func GetDuration(key string) time.Duration { return v.GetDuration(key) } + +func (v *Viper) GetDuration(key string) time.Duration { + return cast.ToDuration(v.Get(key)) +} + +// GetIntSlice returns the value associated with the key as a slice of int values. +func GetIntSlice(key string) []int { return v.GetIntSlice(key) } + +func (v *Viper) GetIntSlice(key string) []int { + return cast.ToIntSlice(v.Get(key)) +} + +// GetStringSlice returns the value associated with the key as a slice of strings. +func GetStringSlice(key string) []string { return v.GetStringSlice(key) } + +func (v *Viper) GetStringSlice(key string) []string { + return cast.ToStringSlice(v.Get(key)) +} + +// GetStringMap returns the value associated with the key as a map of interfaces. +func GetStringMap(key string) map[string]interface{} { return v.GetStringMap(key) } + +func (v *Viper) GetStringMap(key string) map[string]interface{} { + return cast.ToStringMap(v.Get(key)) +} + +// GetStringMapString returns the value associated with the key as a map of strings. +func GetStringMapString(key string) map[string]string { return v.GetStringMapString(key) } + +func (v *Viper) GetStringMapString(key string) map[string]string { + return cast.ToStringMapString(v.Get(key)) +} + +// GetStringMapStringSlice returns the value associated with the key as a map to a slice of strings. +func GetStringMapStringSlice(key string) map[string][]string { return v.GetStringMapStringSlice(key) } + +func (v *Viper) GetStringMapStringSlice(key string) map[string][]string { + return cast.ToStringMapStringSlice(v.Get(key)) +} + +// GetSizeInBytes returns the size of the value associated with the given key +// in bytes. +func GetSizeInBytes(key string) uint { return v.GetSizeInBytes(key) } + +func (v *Viper) GetSizeInBytes(key string) uint { + sizeStr := cast.ToString(v.Get(key)) + return parseSizeInBytes(sizeStr) +} + +// UnmarshalKey takes a single key and unmarshals it into a Struct. +func UnmarshalKey(key string, rawVal interface{}, opts ...DecoderConfigOption) error { + return v.UnmarshalKey(key, rawVal, opts...) +} + +func (v *Viper) UnmarshalKey(key string, rawVal interface{}, opts ...DecoderConfigOption) error { + return decode(v.Get(key), defaultDecoderConfig(rawVal, opts...)) +} + +// Unmarshal unmarshals the config into a Struct. Make sure that the tags +// on the fields of the structure are properly set. +func Unmarshal(rawVal interface{}, opts ...DecoderConfigOption) error { + return v.Unmarshal(rawVal, opts...) +} + +func (v *Viper) Unmarshal(rawVal interface{}, opts ...DecoderConfigOption) error { + return decode(v.AllSettings(), defaultDecoderConfig(rawVal, opts...)) +} + +// defaultDecoderConfig returns default mapsstructure.DecoderConfig with suppot +// of time.Duration values & string slices +func defaultDecoderConfig(output interface{}, opts ...DecoderConfigOption) *mapstructure.DecoderConfig { + c := &mapstructure.DecoderConfig{ + Metadata: nil, + Result: output, + WeaklyTypedInput: true, + DecodeHook: mapstructure.ComposeDecodeHookFunc( + mapstructure.StringToTimeDurationHookFunc(), + mapstructure.StringToSliceHookFunc(","), + ), + } + for _, opt := range opts { + opt(c) + } + return c +} + +// A wrapper around mapstructure.Decode that mimics the WeakDecode functionality +func decode(input interface{}, config *mapstructure.DecoderConfig) error { + decoder, err := mapstructure.NewDecoder(config) + if err != nil { + return err + } + return decoder.Decode(input) +} + +// UnmarshalExact unmarshals the config into a Struct, erroring if a field is nonexistent +// in the destination struct. +func UnmarshalExact(rawVal interface{}, opts ...DecoderConfigOption) error { + return v.UnmarshalExact(rawVal, opts...) +} + +func (v *Viper) UnmarshalExact(rawVal interface{}, opts ...DecoderConfigOption) error { + config := defaultDecoderConfig(rawVal, opts...) + config.ErrorUnused = true + + return decode(v.AllSettings(), config) +} + +// BindPFlags binds a full flag set to the configuration, using each flag's long +// name as the config key. +func BindPFlags(flags *pflag.FlagSet) error { return v.BindPFlags(flags) } + +func (v *Viper) BindPFlags(flags *pflag.FlagSet) error { + return v.BindFlagValues(pflagValueSet{flags}) +} + +// BindPFlag binds a specific key to a pflag (as used by cobra). +// Example (where serverCmd is a Cobra instance): +// +// serverCmd.Flags().Int("port", 1138, "Port to run Application server on") +// Viper.BindPFlag("port", serverCmd.Flags().Lookup("port")) +// +func BindPFlag(key string, flag *pflag.Flag) error { return v.BindPFlag(key, flag) } + +func (v *Viper) BindPFlag(key string, flag *pflag.Flag) error { + if flag == nil { + return fmt.Errorf("flag for %q is nil", key) + } + return v.BindFlagValue(key, pflagValue{flag}) +} + +// BindFlagValues binds a full FlagValue set to the configuration, using each flag's long +// name as the config key. +func BindFlagValues(flags FlagValueSet) error { return v.BindFlagValues(flags) } + +func (v *Viper) BindFlagValues(flags FlagValueSet) (err error) { + flags.VisitAll(func(flag FlagValue) { + if err = v.BindFlagValue(flag.Name(), flag); err != nil { + return + } + }) + return nil +} + +// BindFlagValue binds a specific key to a FlagValue. +func BindFlagValue(key string, flag FlagValue) error { return v.BindFlagValue(key, flag) } + +func (v *Viper) BindFlagValue(key string, flag FlagValue) error { + if flag == nil { + return fmt.Errorf("flag for %q is nil", key) + } + v.pflags[strings.ToLower(key)] = flag + return nil +} + +// BindEnv binds a Viper key to a ENV variable. +// ENV variables are case sensitive. +// If only a key is provided, it will use the env key matching the key, uppercased. +// If more arguments are provided, they will represent the env variable names that +// should bind to this key and will be taken in the specified order. +// EnvPrefix will be used when set when env name is not provided. +func BindEnv(input ...string) error { return v.BindEnv(input...) } + +func (v *Viper) BindEnv(input ...string) error { + if len(input) == 0 { + return fmt.Errorf("missing key to bind to") + } + + key := strings.ToLower(input[0]) + + if len(input) == 1 { + v.env[key] = append(v.env[key], v.mergeWithEnvPrefix(key)) + } else { + v.env[key] = append(v.env[key], input[1:]...) + } + + return nil +} + +// Given a key, find the value. +// +// Viper will check to see if an alias exists first. +// Viper will then check in the following order: +// flag, env, config file, key/value store. +// Lastly, if no value was found and flagDefault is true, and if the key +// corresponds to a flag, the flag's default value is returned. +// +// Note: this assumes a lower-cased key given. +func (v *Viper) find(lcaseKey string, flagDefault bool) interface{} { + var ( + val interface{} + exists bool + path = strings.Split(lcaseKey, v.keyDelim) + nested = len(path) > 1 + ) + + // compute the path through the nested maps to the nested value + if nested && v.isPathShadowedInDeepMap(path, castMapStringToMapInterface(v.aliases)) != "" { + return nil + } + + // if the requested key is an alias, then return the proper key + lcaseKey = v.realKey(lcaseKey) + path = strings.Split(lcaseKey, v.keyDelim) + nested = len(path) > 1 + + // Set() override first + val = v.searchMap(v.override, path) + if val != nil { + return val + } + if nested && v.isPathShadowedInDeepMap(path, v.override) != "" { + return nil + } + + // PFlag override next + flag, exists := v.pflags[lcaseKey] + if exists && flag.HasChanged() { + switch flag.ValueType() { + case "int", "int8", "int16", "int32", "int64": + return cast.ToInt(flag.ValueString()) + case "bool": + return cast.ToBool(flag.ValueString()) + case "stringSlice", "stringArray": + s := strings.TrimPrefix(flag.ValueString(), "[") + s = strings.TrimSuffix(s, "]") + res, _ := readAsCSV(s) + return res + case "intSlice": + s := strings.TrimPrefix(flag.ValueString(), "[") + s = strings.TrimSuffix(s, "]") + res, _ := readAsCSV(s) + return cast.ToIntSlice(res) + case "stringToString": + return stringToStringConv(flag.ValueString()) + default: + return flag.ValueString() + } + } + if nested && v.isPathShadowedInFlatMap(path, v.pflags) != "" { + return nil + } + + // Env override next + if v.automaticEnvApplied { + // even if it hasn't been registered, if automaticEnv is used, + // check any Get request + if val, ok := v.getEnv(v.mergeWithEnvPrefix(lcaseKey)); ok { + return val + } + if nested && v.isPathShadowedInAutoEnv(path) != "" { + return nil + } + } + envkeys, exists := v.env[lcaseKey] + if exists { + for _, envkey := range envkeys { + if val, ok := v.getEnv(envkey); ok { + return val + } + } + } + if nested && v.isPathShadowedInFlatMap(path, v.env) != "" { + return nil + } + + // Config file next + val = v.searchIndexableWithPathPrefixes(v.config, path) + if val != nil { + return val + } + if nested && v.isPathShadowedInDeepMap(path, v.config) != "" { + return nil + } + + // K/V store next + val = v.searchMap(v.kvstore, path) + if val != nil { + return val + } + if nested && v.isPathShadowedInDeepMap(path, v.kvstore) != "" { + return nil + } + + // Default next + val = v.searchMap(v.defaults, path) + if val != nil { + return val + } + if nested && v.isPathShadowedInDeepMap(path, v.defaults) != "" { + return nil + } + + if flagDefault { + // last chance: if no value is found and a flag does exist for the key, + // get the flag's default value even if the flag's value has not been set. + if flag, exists := v.pflags[lcaseKey]; exists { + switch flag.ValueType() { + case "int", "int8", "int16", "int32", "int64": + return cast.ToInt(flag.ValueString()) + case "bool": + return cast.ToBool(flag.ValueString()) + case "stringSlice", "stringArray": + s := strings.TrimPrefix(flag.ValueString(), "[") + s = strings.TrimSuffix(s, "]") + res, _ := readAsCSV(s) + return res + case "intSlice": + s := strings.TrimPrefix(flag.ValueString(), "[") + s = strings.TrimSuffix(s, "]") + res, _ := readAsCSV(s) + return cast.ToIntSlice(res) + case "stringToString": + return stringToStringConv(flag.ValueString()) + default: + return flag.ValueString() + } + } + // last item, no need to check shadowing + } + + return nil +} + +func readAsCSV(val string) ([]string, error) { + if val == "" { + return []string{}, nil + } + stringReader := strings.NewReader(val) + csvReader := csv.NewReader(stringReader) + return csvReader.Read() +} + +// mostly copied from pflag's implementation of this operation here https://github.com/spf13/pflag/blob/master/string_to_string.go#L79 +// alterations are: errors are swallowed, map[string]interface{} is returned in order to enable cast.ToStringMap +func stringToStringConv(val string) interface{} { + val = strings.Trim(val, "[]") + // An empty string would cause an empty map + if len(val) == 0 { + return map[string]interface{}{} + } + r := csv.NewReader(strings.NewReader(val)) + ss, err := r.Read() + if err != nil { + return nil + } + out := make(map[string]interface{}, len(ss)) + for _, pair := range ss { + kv := strings.SplitN(pair, "=", 2) + if len(kv) != 2 { + return nil + } + out[kv[0]] = kv[1] + } + return out +} + +// IsSet checks to see if the key has been set in any of the data locations. +// IsSet is case-insensitive for a key. +func IsSet(key string) bool { return v.IsSet(key) } + +func (v *Viper) IsSet(key string) bool { + lcaseKey := strings.ToLower(key) + val := v.find(lcaseKey, false) + return val != nil +} + +// AutomaticEnv makes Viper check if environment variables match any of the existing keys +// (config, default or flags). If matching env vars are found, they are loaded into Viper. +func AutomaticEnv() { v.AutomaticEnv() } + +func (v *Viper) AutomaticEnv() { + v.automaticEnvApplied = true +} + +// SetEnvKeyReplacer sets the strings.Replacer on the viper object +// Useful for mapping an environmental variable to a key that does +// not match it. +func SetEnvKeyReplacer(r *strings.Replacer) { v.SetEnvKeyReplacer(r) } + +func (v *Viper) SetEnvKeyReplacer(r *strings.Replacer) { + v.envKeyReplacer = r +} + +// RegisterAlias creates an alias that provides another accessor for the same key. +// This enables one to change a name without breaking the application. +func RegisterAlias(alias string, key string) { v.RegisterAlias(alias, key) } + +func (v *Viper) RegisterAlias(alias string, key string) { + v.registerAlias(alias, strings.ToLower(key)) +} + +func (v *Viper) registerAlias(alias string, key string) { + alias = strings.ToLower(alias) + if alias != key && alias != v.realKey(key) { + _, exists := v.aliases[alias] + + if !exists { + // if we alias something that exists in one of the maps to another + // name, we'll never be able to get that value using the original + // name, so move the config value to the new realkey. + if val, ok := v.config[alias]; ok { + delete(v.config, alias) + v.config[key] = val + } + if val, ok := v.kvstore[alias]; ok { + delete(v.kvstore, alias) + v.kvstore[key] = val + } + if val, ok := v.defaults[alias]; ok { + delete(v.defaults, alias) + v.defaults[key] = val + } + if val, ok := v.override[alias]; ok { + delete(v.override, alias) + v.override[key] = val + } + v.aliases[alias] = key + } + } else { + jww.WARN.Println("Creating circular reference alias", alias, key, v.realKey(key)) + } +} + +func (v *Viper) realKey(key string) string { + newkey, exists := v.aliases[key] + if exists { + jww.DEBUG.Println("Alias", key, "to", newkey) + return v.realKey(newkey) + } + return key +} + +// InConfig checks to see if the given key (or an alias) is in the config file. +func InConfig(key string) bool { return v.InConfig(key) } + +func (v *Viper) InConfig(key string) bool { + // if the requested key is an alias, then return the proper key + key = v.realKey(key) + + _, exists := v.config[key] + return exists +} + +// SetDefault sets the default value for this key. +// SetDefault is case-insensitive for a key. +// Default only used when no value is provided by the user via flag, config or ENV. +func SetDefault(key string, value interface{}) { v.SetDefault(key, value) } + +func (v *Viper) SetDefault(key string, value interface{}) { + // If alias passed in, then set the proper default + key = v.realKey(strings.ToLower(key)) + value = toCaseInsensitiveValue(value) + + path := strings.Split(key, v.keyDelim) + lastKey := strings.ToLower(path[len(path)-1]) + deepestMap := deepSearch(v.defaults, path[0:len(path)-1]) + + // set innermost value + deepestMap[lastKey] = value +} + +// Set sets the value for the key in the override register. +// Set is case-insensitive for a key. +// Will be used instead of values obtained via +// flags, config file, ENV, default, or key/value store. +func Set(key string, value interface{}) { v.Set(key, value) } + +func (v *Viper) Set(key string, value interface{}) { + // If alias passed in, then set the proper override + key = v.realKey(strings.ToLower(key)) + value = toCaseInsensitiveValue(value) + + path := strings.Split(key, v.keyDelim) + lastKey := strings.ToLower(path[len(path)-1]) + deepestMap := deepSearch(v.override, path[0:len(path)-1]) + + // set innermost value + deepestMap[lastKey] = value +} + +// ReadInConfig will discover and load the configuration file from disk +// and key/value stores, searching in one of the defined paths. +func ReadInConfig() error { return v.ReadInConfig() } + +func (v *Viper) ReadInConfig() error { + jww.INFO.Println("Attempting to read in config file") + filename, err := v.getConfigFile() + if err != nil { + return err + } + + if !stringInSlice(v.getConfigType(), SupportedExts) { + return UnsupportedConfigError(v.getConfigType()) + } + + jww.DEBUG.Println("Reading file: ", filename) + file, err := afero.ReadFile(v.fs, filename) + if err != nil { + return err + } + + config := make(map[string]interface{}) + + err = v.unmarshalReader(bytes.NewReader(file), config) + if err != nil { + return err + } + + v.config = config + return nil +} + +// MergeInConfig merges a new configuration with an existing config. +func MergeInConfig() error { return v.MergeInConfig() } + +func (v *Viper) MergeInConfig() error { + jww.INFO.Println("Attempting to merge in config file") + filename, err := v.getConfigFile() + if err != nil { + return err + } + + if !stringInSlice(v.getConfigType(), SupportedExts) { + return UnsupportedConfigError(v.getConfigType()) + } + + file, err := afero.ReadFile(v.fs, filename) + if err != nil { + return err + } + + return v.MergeConfig(bytes.NewReader(file)) +} + +// ReadConfig will read a configuration file, setting existing keys to nil if the +// key does not exist in the file. +func ReadConfig(in io.Reader) error { return v.ReadConfig(in) } + +func (v *Viper) ReadConfig(in io.Reader) error { + v.config = make(map[string]interface{}) + return v.unmarshalReader(in, v.config) +} + +// MergeConfig merges a new configuration with an existing config. +func MergeConfig(in io.Reader) error { return v.MergeConfig(in) } + +func (v *Viper) MergeConfig(in io.Reader) error { + cfg := make(map[string]interface{}) + if err := v.unmarshalReader(in, cfg); err != nil { + return err + } + return v.MergeConfigMap(cfg) +} + +// MergeConfigMap merges the configuration from the map given with an existing config. +// Note that the map given may be modified. +func MergeConfigMap(cfg map[string]interface{}) error { return v.MergeConfigMap(cfg) } + +func (v *Viper) MergeConfigMap(cfg map[string]interface{}) error { + if v.config == nil { + v.config = make(map[string]interface{}) + } + insensitiviseMap(cfg) + mergeMaps(cfg, v.config, nil) + return nil +} + +// WriteConfig writes the current configuration to a file. +func WriteConfig() error { return v.WriteConfig() } + +func (v *Viper) WriteConfig() error { + filename, err := v.getConfigFile() + if err != nil { + return err + } + return v.writeConfig(filename, true) +} + +// SafeWriteConfig writes current configuration to file only if the file does not exist. +func SafeWriteConfig() error { return v.SafeWriteConfig() } + +func (v *Viper) SafeWriteConfig() error { + if len(v.configPaths) < 1 { + return errors.New("missing configuration for 'configPath'") + } + return v.SafeWriteConfigAs(filepath.Join(v.configPaths[0], v.configName+"."+v.configType)) +} + +// WriteConfigAs writes current configuration to a given filename. +func WriteConfigAs(filename string) error { return v.WriteConfigAs(filename) } + +func (v *Viper) WriteConfigAs(filename string) error { + return v.writeConfig(filename, true) +} + +// SafeWriteConfigAs writes current configuration to a given filename if it does not exist. +func SafeWriteConfigAs(filename string) error { return v.SafeWriteConfigAs(filename) } + +func (v *Viper) SafeWriteConfigAs(filename string) error { + alreadyExists, err := afero.Exists(v.fs, filename) + if alreadyExists && err == nil { + return ConfigFileAlreadyExistsError(filename) + } + return v.writeConfig(filename, false) +} + +func (v *Viper) writeConfig(filename string, force bool) error { + jww.INFO.Println("Attempting to write configuration to file.") + var configType string + + ext := filepath.Ext(filename) + if ext != "" { + configType = ext[1:] + } else { + configType = v.configType + } + if configType == "" { + return fmt.Errorf("config type could not be determined for %s", filename) + } + + if !stringInSlice(configType, SupportedExts) { + return UnsupportedConfigError(configType) + } + if v.config == nil { + v.config = make(map[string]interface{}) + } + flags := os.O_CREATE | os.O_TRUNC | os.O_WRONLY + if !force { + flags |= os.O_EXCL + } + f, err := v.fs.OpenFile(filename, flags, v.configPermissions) + if err != nil { + return err + } + defer f.Close() + + if err := v.marshalWriter(f, configType); err != nil { + return err + } + + return f.Sync() +} + +// Unmarshal a Reader into a map. +// Should probably be an unexported function. +func unmarshalReader(in io.Reader, c map[string]interface{}) error { + return v.unmarshalReader(in, c) +} + +func (v *Viper) unmarshalReader(in io.Reader, c map[string]interface{}) error { + buf := new(bytes.Buffer) + buf.ReadFrom(in) + + switch strings.ToLower(v.getConfigType()) { + case "yaml", "yml": + if err := yaml.Unmarshal(buf.Bytes(), &c); err != nil { + return ConfigParseError{err} + } + + case "json": + if err := json.Unmarshal(buf.Bytes(), &c); err != nil { + return ConfigParseError{err} + } + + case "hcl": + obj, err := hcl.Parse(buf.String()) + if err != nil { + return ConfigParseError{err} + } + if err = hcl.DecodeObject(&c, obj); err != nil { + return ConfigParseError{err} + } + + case "toml": + tree, err := toml.LoadReader(buf) + if err != nil { + return ConfigParseError{err} + } + tmap := tree.ToMap() + for k, v := range tmap { + c[k] = v + } + + case "dotenv", "env": + env, err := gotenv.StrictParse(buf) + if err != nil { + return ConfigParseError{err} + } + for k, v := range env { + c[k] = v + } + + case "properties", "props", "prop": + v.properties = properties.NewProperties() + var err error + if v.properties, err = properties.Load(buf.Bytes(), properties.UTF8); err != nil { + return ConfigParseError{err} + } + for _, key := range v.properties.Keys() { + value, _ := v.properties.Get(key) + // recursively build nested maps + path := strings.Split(key, ".") + lastKey := strings.ToLower(path[len(path)-1]) + deepestMap := deepSearch(c, path[0:len(path)-1]) + // set innermost value + deepestMap[lastKey] = value + } + + case "ini": + cfg := ini.Empty(v.iniLoadOptions) + err := cfg.Append(buf.Bytes()) + if err != nil { + return ConfigParseError{err} + } + sections := cfg.Sections() + for i := 0; i < len(sections); i++ { + section := sections[i] + keys := section.Keys() + for j := 0; j < len(keys); j++ { + key := keys[j] + value := cfg.Section(section.Name()).Key(key.Name()).String() + c[section.Name()+"."+key.Name()] = value + } + } + } + + insensitiviseMap(c) + return nil +} + +// Marshal a map into Writer. +func (v *Viper) marshalWriter(f afero.File, configType string) error { + c := v.AllSettings() + switch configType { + case "json": + b, err := json.MarshalIndent(c, "", " ") + if err != nil { + return ConfigMarshalError{err} + } + _, err = f.WriteString(string(b)) + if err != nil { + return ConfigMarshalError{err} + } + + case "hcl": + b, err := json.Marshal(c) + if err != nil { + return ConfigMarshalError{err} + } + ast, err := hcl.Parse(string(b)) + if err != nil { + return ConfigMarshalError{err} + } + err = printer.Fprint(f, ast.Node) + if err != nil { + return ConfigMarshalError{err} + } + + case "prop", "props", "properties": + if v.properties == nil { + v.properties = properties.NewProperties() + } + p := v.properties + for _, key := range v.AllKeys() { + _, _, err := p.Set(key, v.GetString(key)) + if err != nil { + return ConfigMarshalError{err} + } + } + _, err := p.WriteComment(f, "#", properties.UTF8) + if err != nil { + return ConfigMarshalError{err} + } + + case "dotenv", "env": + lines := []string{} + for _, key := range v.AllKeys() { + envName := strings.ToUpper(strings.Replace(key, ".", "_", -1)) + val := v.Get(key) + lines = append(lines, fmt.Sprintf("%v=%v", envName, val)) + } + s := strings.Join(lines, "\n") + if _, err := f.WriteString(s); err != nil { + return ConfigMarshalError{err} + } + + case "toml": + t, err := toml.TreeFromMap(c) + if err != nil { + return ConfigMarshalError{err} + } + s := t.String() + if _, err := f.WriteString(s); err != nil { + return ConfigMarshalError{err} + } + + case "yaml", "yml": + b, err := yaml.Marshal(c) + if err != nil { + return ConfigMarshalError{err} + } + if _, err = f.WriteString(string(b)); err != nil { + return ConfigMarshalError{err} + } + + case "ini": + keys := v.AllKeys() + cfg := ini.Empty() + ini.PrettyFormat = false + for i := 0; i < len(keys); i++ { + key := keys[i] + lastSep := strings.LastIndex(key, ".") + sectionName := key[:(lastSep)] + keyName := key[(lastSep + 1):] + if sectionName == "default" { + sectionName = "" + } + cfg.Section(sectionName).Key(keyName).SetValue(v.GetString(key)) + } + cfg.WriteTo(f) + } + return nil +} + +func keyExists(k string, m map[string]interface{}) string { + lk := strings.ToLower(k) + for mk := range m { + lmk := strings.ToLower(mk) + if lmk == lk { + return mk + } + } + return "" +} + +func castToMapStringInterface( + src map[interface{}]interface{}) map[string]interface{} { + tgt := map[string]interface{}{} + for k, v := range src { + tgt[fmt.Sprintf("%v", k)] = v + } + return tgt +} + +func castMapStringSliceToMapInterface(src map[string][]string) map[string]interface{} { + tgt := map[string]interface{}{} + for k, v := range src { + tgt[k] = v + } + return tgt +} + +func castMapStringToMapInterface(src map[string]string) map[string]interface{} { + tgt := map[string]interface{}{} + for k, v := range src { + tgt[k] = v + } + return tgt +} + +func castMapFlagToMapInterface(src map[string]FlagValue) map[string]interface{} { + tgt := map[string]interface{}{} + for k, v := range src { + tgt[k] = v + } + return tgt +} + +// mergeMaps merges two maps. The `itgt` parameter is for handling go-yaml's +// insistence on parsing nested structures as `map[interface{}]interface{}` +// instead of using a `string` as the key for nest structures beyond one level +// deep. Both map types are supported as there is a go-yaml fork that uses +// `map[string]interface{}` instead. +func mergeMaps( + src, tgt map[string]interface{}, itgt map[interface{}]interface{}) { + for sk, sv := range src { + tk := keyExists(sk, tgt) + if tk == "" { + jww.TRACE.Printf("tk=\"\", tgt[%s]=%v", sk, sv) + tgt[sk] = sv + if itgt != nil { + itgt[sk] = sv + } + continue + } + + tv, ok := tgt[tk] + if !ok { + jww.TRACE.Printf("tgt[%s] != ok, tgt[%s]=%v", tk, sk, sv) + tgt[sk] = sv + if itgt != nil { + itgt[sk] = sv + } + continue + } + + svType := reflect.TypeOf(sv) + tvType := reflect.TypeOf(tv) + if tvType != nil && svType != tvType { // Allow for the target to be nil + jww.ERROR.Printf( + "svType != tvType; key=%s, st=%v, tt=%v, sv=%v, tv=%v", + sk, svType, tvType, sv, tv) + continue + } + + jww.TRACE.Printf("processing key=%s, st=%v, tt=%v, sv=%v, tv=%v", + sk, svType, tvType, sv, tv) + + switch ttv := tv.(type) { + case map[interface{}]interface{}: + jww.TRACE.Printf("merging maps (must convert)") + tsv := sv.(map[interface{}]interface{}) + ssv := castToMapStringInterface(tsv) + stv := castToMapStringInterface(ttv) + mergeMaps(ssv, stv, ttv) + case map[string]interface{}: + jww.TRACE.Printf("merging maps") + mergeMaps(sv.(map[string]interface{}), ttv, nil) + default: + jww.TRACE.Printf("setting value") + tgt[tk] = sv + if itgt != nil { + itgt[tk] = sv + } + } + } +} + +// ReadRemoteConfig attempts to get configuration from a remote source +// and read it in the remote configuration registry. +func ReadRemoteConfig() error { return v.ReadRemoteConfig() } + +func (v *Viper) ReadRemoteConfig() error { + return v.getKeyValueConfig() +} + +func WatchRemoteConfig() error { return v.WatchRemoteConfig() } +func (v *Viper) WatchRemoteConfig() error { + return v.watchKeyValueConfig() +} + +func (v *Viper) WatchRemoteConfigOnChannel() error { + return v.watchKeyValueConfigOnChannel() +} + +// Retrieve the first found remote configuration. +func (v *Viper) getKeyValueConfig() error { + if RemoteConfig == nil { + return RemoteConfigError("Enable the remote features by doing a blank import of the viper/remote package: '_ github.com/spf13/viper/remote'") + } + + for _, rp := range v.remoteProviders { + val, err := v.getRemoteConfig(rp) + if err != nil { + jww.ERROR.Printf("get remote config: %s", err) + + continue + } + + v.kvstore = val + + return nil + } + return RemoteConfigError("No Files Found") +} + +func (v *Viper) getRemoteConfig(provider RemoteProvider) (map[string]interface{}, error) { + reader, err := RemoteConfig.Get(provider) + if err != nil { + return nil, err + } + err = v.unmarshalReader(reader, v.kvstore) + return v.kvstore, err +} + +// Retrieve the first found remote configuration. +func (v *Viper) watchKeyValueConfigOnChannel() error { + for _, rp := range v.remoteProviders { + respc, _ := RemoteConfig.WatchChannel(rp) + // Todo: Add quit channel + go func(rc <-chan *RemoteResponse) { + for { + b := <-rc + reader := bytes.NewReader(b.Value) + v.unmarshalReader(reader, v.kvstore) + } + }(respc) + return nil + } + return RemoteConfigError("No Files Found") +} + +// Retrieve the first found remote configuration. +func (v *Viper) watchKeyValueConfig() error { + for _, rp := range v.remoteProviders { + val, err := v.watchRemoteConfig(rp) + if err != nil { + continue + } + v.kvstore = val + return nil + } + return RemoteConfigError("No Files Found") +} + +func (v *Viper) watchRemoteConfig(provider RemoteProvider) (map[string]interface{}, error) { + reader, err := RemoteConfig.Watch(provider) + if err != nil { + return nil, err + } + err = v.unmarshalReader(reader, v.kvstore) + return v.kvstore, err +} + +// AllKeys returns all keys holding a value, regardless of where they are set. +// Nested keys are returned with a v.keyDelim separator +func AllKeys() []string { return v.AllKeys() } + +func (v *Viper) AllKeys() []string { + m := map[string]bool{} + // add all paths, by order of descending priority to ensure correct shadowing + m = v.flattenAndMergeMap(m, castMapStringToMapInterface(v.aliases), "") + m = v.flattenAndMergeMap(m, v.override, "") + m = v.mergeFlatMap(m, castMapFlagToMapInterface(v.pflags)) + m = v.mergeFlatMap(m, castMapStringSliceToMapInterface(v.env)) + m = v.flattenAndMergeMap(m, v.config, "") + m = v.flattenAndMergeMap(m, v.kvstore, "") + m = v.flattenAndMergeMap(m, v.defaults, "") + + // convert set of paths to list + a := make([]string, 0, len(m)) + for x := range m { + a = append(a, x) + } + return a +} + +// flattenAndMergeMap recursively flattens the given map into a map[string]bool +// of key paths (used as a set, easier to manipulate than a []string): +// - each path is merged into a single key string, delimited with v.keyDelim +// - if a path is shadowed by an earlier value in the initial shadow map, +// it is skipped. +// The resulting set of paths is merged to the given shadow set at the same time. +func (v *Viper) flattenAndMergeMap(shadow map[string]bool, m map[string]interface{}, prefix string) map[string]bool { + if shadow != nil && prefix != "" && shadow[prefix] { + // prefix is shadowed => nothing more to flatten + return shadow + } + if shadow == nil { + shadow = make(map[string]bool) + } + + var m2 map[string]interface{} + if prefix != "" { + prefix += v.keyDelim + } + for k, val := range m { + fullKey := prefix + k + switch val.(type) { + case map[string]interface{}: + m2 = val.(map[string]interface{}) + case map[interface{}]interface{}: + m2 = cast.ToStringMap(val) + default: + // immediate value + shadow[strings.ToLower(fullKey)] = true + continue + } + // recursively merge to shadow map + shadow = v.flattenAndMergeMap(shadow, m2, fullKey) + } + return shadow +} + +// mergeFlatMap merges the given maps, excluding values of the second map +// shadowed by values from the first map. +func (v *Viper) mergeFlatMap(shadow map[string]bool, m map[string]interface{}) map[string]bool { + // scan keys +outer: + for k := range m { + path := strings.Split(k, v.keyDelim) + // scan intermediate paths + var parentKey string + for i := 1; i < len(path); i++ { + parentKey = strings.Join(path[0:i], v.keyDelim) + if shadow[parentKey] { + // path is shadowed, continue + continue outer + } + } + // add key + shadow[strings.ToLower(k)] = true + } + return shadow +} + +// AllSettings merges all settings and returns them as a map[string]interface{}. +func AllSettings() map[string]interface{} { return v.AllSettings() } + +func (v *Viper) AllSettings() map[string]interface{} { + m := map[string]interface{}{} + // start from the list of keys, and construct the map one value at a time + for _, k := range v.AllKeys() { + value := v.Get(k) + if value == nil { + // should not happen, since AllKeys() returns only keys holding a value, + // check just in case anything changes + continue + } + path := strings.Split(k, v.keyDelim) + lastKey := strings.ToLower(path[len(path)-1]) + deepestMap := deepSearch(m, path[0:len(path)-1]) + // set innermost value + deepestMap[lastKey] = value + } + return m +} + +// SetFs sets the filesystem to use to read configuration. +func SetFs(fs afero.Fs) { v.SetFs(fs) } + +func (v *Viper) SetFs(fs afero.Fs) { + v.fs = fs +} + +// SetConfigName sets name for the config file. +// Does not include extension. +func SetConfigName(in string) { v.SetConfigName(in) } + +func (v *Viper) SetConfigName(in string) { + if in != "" { + v.configName = in + v.configFile = "" + } +} + +// SetConfigType sets the type of the configuration returned by the +// remote source, e.g. "json". +func SetConfigType(in string) { v.SetConfigType(in) } + +func (v *Viper) SetConfigType(in string) { + if in != "" { + v.configType = in + } +} + +// SetConfigPermissions sets the permissions for the config file. +func SetConfigPermissions(perm os.FileMode) { v.SetConfigPermissions(perm) } + +func (v *Viper) SetConfigPermissions(perm os.FileMode) { + v.configPermissions = perm.Perm() +} + +// IniLoadOptions sets the load options for ini parsing. +func IniLoadOptions(in ini.LoadOptions) Option { + return optionFunc(func(v *Viper) { + v.iniLoadOptions = in + }) +} + +func (v *Viper) getConfigType() string { + if v.configType != "" { + return v.configType + } + + cf, err := v.getConfigFile() + if err != nil { + return "" + } + + ext := filepath.Ext(cf) + + if len(ext) > 1 { + return ext[1:] + } + + return "" +} + +func (v *Viper) getConfigFile() (string, error) { + if v.configFile == "" { + cf, err := v.findConfigFile() + if err != nil { + return "", err + } + v.configFile = cf + } + return v.configFile, nil +} + +func (v *Viper) searchInPath(in string) (filename string) { + jww.DEBUG.Println("Searching for config in ", in) + for _, ext := range SupportedExts { + jww.DEBUG.Println("Checking for", filepath.Join(in, v.configName+"."+ext)) + if b, _ := exists(v.fs, filepath.Join(in, v.configName+"."+ext)); b { + jww.DEBUG.Println("Found: ", filepath.Join(in, v.configName+"."+ext)) + return filepath.Join(in, v.configName+"."+ext) + } + } + + if v.configType != "" { + if b, _ := exists(v.fs, filepath.Join(in, v.configName)); b { + return filepath.Join(in, v.configName) + } + } + + return "" +} + +// Search all configPaths for any config file. +// Returns the first path that exists (and is a config file). +func (v *Viper) findConfigFile() (string, error) { + jww.INFO.Println("Searching for config in ", v.configPaths) + + for _, cp := range v.configPaths { + file := v.searchInPath(cp) + if file != "" { + return file, nil + } + } + return "", ConfigFileNotFoundError{v.configName, fmt.Sprintf("%s", v.configPaths)} +} + +// Debug prints all configuration registries for debugging +// purposes. +func Debug() { v.Debug() } + +func (v *Viper) Debug() { + fmt.Printf("Aliases:\n%#v\n", v.aliases) + fmt.Printf("Override:\n%#v\n", v.override) + fmt.Printf("PFlags:\n%#v\n", v.pflags) + fmt.Printf("Env:\n%#v\n", v.env) + fmt.Printf("Key/Value Store:\n%#v\n", v.kvstore) + fmt.Printf("Config:\n%#v\n", v.config) + fmt.Printf("Defaults:\n%#v\n", v.defaults) +} diff --git a/vendor/github.com/spf13/viper/watch.go b/vendor/github.com/spf13/viper/watch.go new file mode 100644 index 000000000..c433a8fa4 --- /dev/null +++ b/vendor/github.com/spf13/viper/watch.go @@ -0,0 +1,11 @@ +// +build !js + +package viper + +import "github.com/fsnotify/fsnotify" + +type watcher = fsnotify.Watcher + +func newWatcher() (*watcher, error) { + return fsnotify.NewWatcher() +} diff --git a/vendor/github.com/spf13/viper/watch_wasm.go b/vendor/github.com/spf13/viper/watch_wasm.go new file mode 100644 index 000000000..8e47e6a91 --- /dev/null +++ b/vendor/github.com/spf13/viper/watch_wasm.go @@ -0,0 +1,30 @@ +// +build js,wasm + +package viper + +import ( + "errors" + + "github.com/fsnotify/fsnotify" +) + +type watcher struct { + Events chan fsnotify.Event + Errors chan error +} + +func (*watcher) Close() error { + return nil +} + +func (*watcher) Add(name string) error { + return nil +} + +func (*watcher) Remove(name string) error { + return nil +} + +func newWatcher() (*watcher, error) { + return &watcher{}, errors.New("fsnotify is not supported on WASM") +} diff --git a/vendor/github.com/ssgreg/nlreturn/v2/LICENSE b/vendor/github.com/ssgreg/nlreturn/v2/LICENSE new file mode 100644 index 000000000..0a5b4d106 --- /dev/null +++ b/vendor/github.com/ssgreg/nlreturn/v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 Grigory Zubankov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go b/vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go new file mode 100644 index 000000000..52318ccfd --- /dev/null +++ b/vendor/github.com/ssgreg/nlreturn/v2/pkg/nlreturn/nlreturn.go @@ -0,0 +1,86 @@ +package nlreturn + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" +) + +const ( + linterName = "nlreturn" + linterDoc = `Linter requires a new line before return and branch statements except when the return is alone inside a statement group (such as an if statement) to increase code clarity.` +) + +// NewAnalyzer returns a new nlreturn analyzer. +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: linterName, + Doc: linterDoc, + Run: run, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { + ast.Inspect(f, func(node ast.Node) bool { + switch c := node.(type) { + case *ast.CaseClause: + inspectBlock(pass, c.Body) + case *ast.CommClause: + inspectBlock(pass, c.Body) + case *ast.BlockStmt: + inspectBlock(pass, c.List) + } + + return true + }) + } + + return nil, nil +} + +func inspectBlock(pass *analysis.Pass, block []ast.Stmt) { + for i, stmt := range block { + switch stmt.(type) { + case *ast.BranchStmt, *ast.ReturnStmt: + if i == 0 { + return + } + + if line(pass, stmt.Pos())-line(pass, block[i-1].End()) <= 1 { + pass.Report(analysis.Diagnostic{ + Pos: stmt.Pos(), + Message: fmt.Sprintf("%s with no blank line before", name(stmt)), + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + { + Pos: stmt.Pos(), + NewText: []byte("\n"), + End: stmt.Pos(), + }, + }, + }, + }, + }) + } + } + } +} + +func name(stmt ast.Stmt) string { + switch c := stmt.(type) { + case *ast.BranchStmt: + return c.Tok.String() + case *ast.ReturnStmt: + return "return" + default: + return "unknown" + } +} + +func line(pass *analysis.Pass, pos token.Pos) int { + return pass.Fset.Position(pos).Line +} diff --git a/vendor/github.com/subosito/gotenv/.env b/vendor/github.com/subosito/gotenv/.env new file mode 100644 index 000000000..6405eca71 --- /dev/null +++ b/vendor/github.com/subosito/gotenv/.env @@ -0,0 +1 @@ +HELLO=world diff --git a/vendor/github.com/subosito/gotenv/.env.invalid b/vendor/github.com/subosito/gotenv/.env.invalid new file mode 100644 index 000000000..016d5e0ce --- /dev/null +++ b/vendor/github.com/subosito/gotenv/.env.invalid @@ -0,0 +1 @@ +lol$wut diff --git a/vendor/github.com/subosito/gotenv/.gitignore b/vendor/github.com/subosito/gotenv/.gitignore new file mode 100644 index 000000000..2b8d45610 --- /dev/null +++ b/vendor/github.com/subosito/gotenv/.gitignore @@ -0,0 +1,3 @@ +*.test +*.out +annotate.json diff --git a/vendor/github.com/subosito/gotenv/.travis.yml b/vendor/github.com/subosito/gotenv/.travis.yml new file mode 100644 index 000000000..3370d5f40 --- /dev/null +++ b/vendor/github.com/subosito/gotenv/.travis.yml @@ -0,0 +1,10 @@ +language: go +go: + - 1.x +os: + - linux + - osx +script: + - go test -test.v -coverprofile=coverage.out -covermode=count +after_success: + - bash <(curl -s https://codecov.io/bash) diff --git a/vendor/github.com/subosito/gotenv/CHANGELOG.md b/vendor/github.com/subosito/gotenv/CHANGELOG.md new file mode 100644 index 000000000..67f687382 --- /dev/null +++ b/vendor/github.com/subosito/gotenv/CHANGELOG.md @@ -0,0 +1,47 @@ +# Changelog + +## [1.2.0] - 2019-08-03 + +### Added + +- Add `Must` helper to raise an error as panic. It can be used with `Load` and `OverLoad`. +- Add more tests to be 100% coverage. +- Add CHANGELOG +- Add more OS for the test: OSX and Windows + +### Changed + +- Reduce complexity and improve source code for having `A+` score in [goreportcard](https://goreportcard.com/report/github.com/subosito/gotenv). +- Updated README with mentions to all available functions + +### Removed + +- Remove `ErrFormat` +- Remove `MustLoad` and `MustOverload`, replaced with `Must` helper. + +## [1.1.1] - 2018-06-05 + +### Changed + +- Replace `os.Getenv` with `os.LookupEnv` to ensure that the environment variable is not set, by [radding](https://github.com/radding) + +## [1.1.0] - 2017-03-20 + +### Added + +- Supports carriage return in env +- Handle files with UTF-8 BOM + +### Changed + +- Whitespace handling + +### Fixed + +- Incorrect variable expansion +- Handling escaped '$' characters + +## [1.0.0] - 2014-10-05 + +First stable release. + diff --git a/vendor/github.com/subosito/gotenv/LICENSE b/vendor/github.com/subosito/gotenv/LICENSE new file mode 100644 index 000000000..f64ccaedc --- /dev/null +++ b/vendor/github.com/subosito/gotenv/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013 Alif Rachmawadi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/subosito/gotenv/README.md b/vendor/github.com/subosito/gotenv/README.md new file mode 100644 index 000000000..d610cdf0b --- /dev/null +++ b/vendor/github.com/subosito/gotenv/README.md @@ -0,0 +1,131 @@ +# gotenv + +[![Build Status](https://travis-ci.org/subosito/gotenv.svg?branch=master)](https://travis-ci.org/subosito/gotenv) +[![Build status](https://ci.appveyor.com/api/projects/status/wb2e075xkfl0m0v2/branch/master?svg=true)](https://ci.appveyor.com/project/subosito/gotenv/branch/master) +[![Coverage Status](https://badgen.net/codecov/c/github/subosito/gotenv)](https://codecov.io/gh/subosito/gotenv) +[![Go Report Card](https://goreportcard.com/badge/github.com/subosito/gotenv)](https://goreportcard.com/report/github.com/subosito/gotenv) +[![GoDoc](https://godoc.org/github.com/subosito/gotenv?status.svg)](https://godoc.org/github.com/subosito/gotenv) + +Load environment variables dynamically in Go. + +## Usage + +Put the gotenv package on your `import` statement: + +```go +import "github.com/subosito/gotenv" +``` + +To modify your app environment variables, `gotenv` expose 2 main functions: + +- `gotenv.Load` +- `gotenv.Apply` + +By default, `gotenv.Load` will look for a file called `.env` in the current working directory. + +Behind the scene, it will then load `.env` file and export the valid variables to the environment variables. Make sure you call the method as soon as possible to ensure it loads all variables, say, put it on `init()` function. + +Once loaded you can use `os.Getenv()` to get the value of the variable. + +Let's say you have `.env` file: + +``` +APP_ID=1234567 +APP_SECRET=abcdef +``` + +Here's the example of your app: + +```go +package main + +import ( + "github.com/subosito/gotenv" + "log" + "os" +) + +func init() { + gotenv.Load() +} + +func main() { + log.Println(os.Getenv("APP_ID")) // "1234567" + log.Println(os.Getenv("APP_SECRET")) // "abcdef" +} +``` + +You can also load other than `.env` file if you wish. Just supply filenames when calling `Load()`. It will load them in order and the first value set for a variable will win.: + +```go +gotenv.Load(".env.production", "credentials") +``` + +While `gotenv.Load` loads entries from `.env` file, `gotenv.Apply` allows you to use any `io.Reader`: + +```go +gotenv.Apply(strings.NewReader("APP_ID=1234567")) + +log.Println(os.Getenv("APP_ID")) +// Output: "1234567" +``` + +Both `gotenv.Load` and `gotenv.Apply` **DO NOT** overrides existing environment variables. If you want to override existing ones, you can see section below. + +### Environment Overrides + +Besides above functions, `gotenv` also provides another functions that overrides existing: + +- `gotenv.OverLoad` +- `gotenv.OverApply` + + +Here's the example of this overrides behavior: + +```go +os.Setenv("HELLO", "world") + +// NOTE: using Apply existing value will be reserved +gotenv.Apply(strings.NewReader("HELLO=universe")) +fmt.Println(os.Getenv("HELLO")) +// Output: "world" + +// NOTE: using OverApply existing value will be overridden +gotenv.OverApply(strings.NewReader("HELLO=universe")) +fmt.Println(os.Getenv("HELLO")) +// Output: "universe" +``` + +### Throw a Panic + +Both `gotenv.Load` and `gotenv.OverLoad` returns an error on something wrong occurred, like your env file is not exist, and so on. To make it easier to use, `gotenv` also provides `gotenv.Must` helper, to let it panic when an error returned. + +```go +err := gotenv.Load(".env-is-not-exist") +fmt.Println("error", err) +// error: open .env-is-not-exist: no such file or directory + +gotenv.Must(gotenv.Load, ".env-is-not-exist") +// it will throw a panic +// panic: open .env-is-not-exist: no such file or directory +``` + +### Another Scenario + +Just in case you want to parse environment variables from any `io.Reader`, gotenv keeps its `Parse` and `StrictParse` function as public API so you can use that. + +```go +// import "strings" + +pairs := gotenv.Parse(strings.NewReader("FOO=test\nBAR=$FOO")) +// gotenv.Env{"FOO": "test", "BAR": "test"} + +err, pairs = gotenv.StrictParse(strings.NewReader(`FOO="bar"`)) +// gotenv.Env{"FOO": "bar"} +``` + +`Parse` ignores invalid lines and returns `Env` of valid environment variables, while `StrictParse` returns an error for invalid lines. + +## Notes + +The gotenv package is a Go port of [`dotenv`](https://github.com/bkeepers/dotenv) project with some additions made for Go. For general features, it aims to be compatible as close as possible. diff --git a/vendor/github.com/subosito/gotenv/appveyor.yml b/vendor/github.com/subosito/gotenv/appveyor.yml new file mode 100644 index 000000000..33b4c4046 --- /dev/null +++ b/vendor/github.com/subosito/gotenv/appveyor.yml @@ -0,0 +1,9 @@ +build: off +clone_folder: c:\gopath\src\github.com\subosito\gotenv +environment: + GOPATH: c:\gopath +stack: go 1.10 +before_test: + - go get -t +test_script: + - go test -v -cover -race diff --git a/vendor/github.com/subosito/gotenv/gotenv.go b/vendor/github.com/subosito/gotenv/gotenv.go new file mode 100644 index 000000000..745a34489 --- /dev/null +++ b/vendor/github.com/subosito/gotenv/gotenv.go @@ -0,0 +1,265 @@ +// Package gotenv provides functionality to dynamically load the environment variables +package gotenv + +import ( + "bufio" + "fmt" + "io" + "os" + "regexp" + "strings" +) + +const ( + // Pattern for detecting valid line format + linePattern = `\A\s*(?:export\s+)?([\w\.]+)(?:\s*=\s*|:\s+?)('(?:\'|[^'])*'|"(?:\"|[^"])*"|[^#\n]+)?\s*(?:\s*\#.*)?\z` + + // Pattern for detecting valid variable within a value + variablePattern = `(\\)?(\$)(\{?([A-Z0-9_]+)?\}?)` +) + +// Env holds key/value pair of valid environment variable +type Env map[string]string + +/* +Load is a function to load a file or multiple files and then export the valid variables into environment variables if they do not exist. +When it's called with no argument, it will load `.env` file on the current path and set the environment variables. +Otherwise, it will loop over the filenames parameter and set the proper environment variables. +*/ +func Load(filenames ...string) error { + return loadenv(false, filenames...) +} + +/* +OverLoad is a function to load a file or multiple files and then export and override the valid variables into environment variables. +*/ +func OverLoad(filenames ...string) error { + return loadenv(true, filenames...) +} + +/* +Must is wrapper function that will panic when supplied function returns an error. +*/ +func Must(fn func(filenames ...string) error, filenames ...string) { + if err := fn(filenames...); err != nil { + panic(err.Error()) + } +} + +/* +Apply is a function to load an io Reader then export the valid variables into environment variables if they do not exist. +*/ +func Apply(r io.Reader) error { + return parset(r, false) +} + +/* +OverApply is a function to load an io Reader then export and override the valid variables into environment variables. +*/ +func OverApply(r io.Reader) error { + return parset(r, true) +} + +func loadenv(override bool, filenames ...string) error { + if len(filenames) == 0 { + filenames = []string{".env"} + } + + for _, filename := range filenames { + f, err := os.Open(filename) + if err != nil { + return err + } + + err = parset(f, override) + if err != nil { + return err + } + + f.Close() + } + + return nil +} + +// parse and set :) +func parset(r io.Reader, override bool) error { + env, err := StrictParse(r) + if err != nil { + return err + } + + for key, val := range env { + setenv(key, val, override) + } + + return nil +} + +func setenv(key, val string, override bool) { + if override { + os.Setenv(key, val) + } else { + if _, present := os.LookupEnv(key); !present { + os.Setenv(key, val) + } + } +} + +// Parse is a function to parse line by line any io.Reader supplied and returns the valid Env key/value pair of valid variables. +// It expands the value of a variable from the environment variable but does not set the value to the environment itself. +// This function is skipping any invalid lines and only processing the valid one. +func Parse(r io.Reader) Env { + env, _ := StrictParse(r) + return env +} + +// StrictParse is a function to parse line by line any io.Reader supplied and returns the valid Env key/value pair of valid variables. +// It expands the value of a variable from the environment variable but does not set the value to the environment itself. +// This function is returning an error if there are any invalid lines. +func StrictParse(r io.Reader) (Env, error) { + env := make(Env) + scanner := bufio.NewScanner(r) + + i := 1 + bom := string([]byte{239, 187, 191}) + + for scanner.Scan() { + line := scanner.Text() + + if i == 1 { + line = strings.TrimPrefix(line, bom) + } + + i++ + + err := parseLine(line, env) + if err != nil { + return env, err + } + } + + return env, nil +} + +func parseLine(s string, env Env) error { + rl := regexp.MustCompile(linePattern) + rm := rl.FindStringSubmatch(s) + + if len(rm) == 0 { + return checkFormat(s, env) + } + + key := rm[1] + val := rm[2] + + // determine if string has quote prefix + hdq := strings.HasPrefix(val, `"`) + + // determine if string has single quote prefix + hsq := strings.HasPrefix(val, `'`) + + // trim whitespace + val = strings.Trim(val, " ") + + // remove quotes '' or "" + rq := regexp.MustCompile(`\A(['"])(.*)(['"])\z`) + val = rq.ReplaceAllString(val, "$2") + + if hdq { + val = strings.Replace(val, `\n`, "\n", -1) + val = strings.Replace(val, `\r`, "\r", -1) + + // Unescape all characters except $ so variables can be escaped properly + re := regexp.MustCompile(`\\([^$])`) + val = re.ReplaceAllString(val, "$1") + } + + rv := regexp.MustCompile(variablePattern) + fv := func(s string) string { + return varReplacement(s, hsq, env) + } + + val = rv.ReplaceAllStringFunc(val, fv) + val = parseVal(val, env) + + env[key] = val + return nil +} + +func parseExport(st string, env Env) error { + if strings.HasPrefix(st, "export") { + vs := strings.SplitN(st, " ", 2) + + if len(vs) > 1 { + if _, ok := env[vs[1]]; !ok { + return fmt.Errorf("line `%s` has an unset variable", st) + } + } + } + + return nil +} + +func varReplacement(s string, hsq bool, env Env) string { + if strings.HasPrefix(s, "\\") { + return strings.TrimPrefix(s, "\\") + } + + if hsq { + return s + } + + sn := `(\$)(\{?([A-Z0-9_]+)\}?)` + rn := regexp.MustCompile(sn) + mn := rn.FindStringSubmatch(s) + + if len(mn) == 0 { + return s + } + + v := mn[3] + + replace, ok := env[v] + if !ok { + replace = os.Getenv(v) + } + + return replace +} + +func checkFormat(s string, env Env) error { + st := strings.TrimSpace(s) + + if (st == "") || strings.HasPrefix(st, "#") { + return nil + } + + if err := parseExport(st, env); err != nil { + return err + } + + return fmt.Errorf("line `%s` doesn't match format", s) +} + +func parseVal(val string, env Env) string { + if strings.Contains(val, "=") { + if !(val == "\n" || val == "\r") { + kv := strings.Split(val, "\n") + + if len(kv) == 1 { + kv = strings.Split(val, "\r") + } + + if len(kv) > 1 { + val = kv[0] + + for i := 1; i < len(kv); i++ { + parseLine(kv[i], env) + } + } + } + } + + return val +} diff --git a/vendor/github.com/tdakkota/asciicheck/.gitignore b/vendor/github.com/tdakkota/asciicheck/.gitignore new file mode 100644 index 000000000..cf875a711 --- /dev/null +++ b/vendor/github.com/tdakkota/asciicheck/.gitignore @@ -0,0 +1,33 @@ +# IntelliJ project files +.idea +*.iml +out +gen + +# Go template +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ +.idea/$CACHE_FILE$ +.idea/$PRODUCT_WORKSPACE_FILE$ +.idea/.gitignore +.idea/codeStyles +.idea/deployment.xml +.idea/inspectionProfiles/ +.idea/kotlinc.xml +.idea/misc.xml +.idea/modules.xml +asciicheck.iml +go.sum diff --git a/vendor/github.com/tdakkota/asciicheck/LICENSE b/vendor/github.com/tdakkota/asciicheck/LICENSE new file mode 100644 index 000000000..48a60cf1c --- /dev/null +++ b/vendor/github.com/tdakkota/asciicheck/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 tdakkota + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/tdakkota/asciicheck/README.md b/vendor/github.com/tdakkota/asciicheck/README.md new file mode 100644 index 000000000..a7ff5884f --- /dev/null +++ b/vendor/github.com/tdakkota/asciicheck/README.md @@ -0,0 +1,72 @@ +# asciicheck [![Go Report Card](https://goreportcard.com/badge/github.com/tdakkota/asciicheck)](https://goreportcard.com/report/github.com/tdakkota/asciicheck) [![codecov](https://codecov.io/gh/tdakkota/asciicheck/branch/master/graph/badge.svg)](https://codecov.io/gh/tdakkota/asciicheck) ![Go](https://github.com/tdakkota/asciicheck/workflows/Go/badge.svg) +Simple linter to check that your code does not contain non-ASCII identifiers + +# Install + +``` +go get -u github.com/tdakkota/asciicheck/cmd/asciicheck +``` + +# Reason to use +So, do you see this code? Looks correct, isn't it? + +```go +package main + +import "fmt" + +type TеstStruct struct{} + +func main() { + s := TestStruct{} + fmt.Println(s) +} +``` +But if you try to run it, you will get an error: +``` +./prog.go:8:7: undefined: TestStruct +``` +What? `TestStruct` is defined above, but compiler thinks diffrent. Why? + +**Answer**: +Because `TestStruct` is not `TеstStruct`. +``` +type TеstStruct struct{} + ^ this 'e' (U+0435) is not 'e' (U+0065) +``` + +# Usage +asciicheck uses [`singlechecker`](https://pkg.go.dev/golang.org/x/tools/go/analysis/singlechecker) package to run: + +``` +asciicheck: checks that all code identifiers does not have non-ASCII symbols in the name + +Usage: asciicheck [-flag] [package] + + +Flags: + -V print version and exit + -all + no effect (deprecated) + -c int + display offending line with this many lines of context (default -1) + -cpuprofile string + write CPU profile to this file + -debug string + debug flags, any subset of "fpstv" + -fix + apply all suggested fixes + -flags + print analyzer flags in JSON + -json + emit JSON output + -memprofile string + write memory profile to this file + -source + no effect (deprecated) + -tags string + no effect (deprecated) + -trace string + write trace log to this file + -v no effect (deprecated) +``` diff --git a/vendor/github.com/tdakkota/asciicheck/ascii.go b/vendor/github.com/tdakkota/asciicheck/ascii.go new file mode 100644 index 000000000..9e70c391d --- /dev/null +++ b/vendor/github.com/tdakkota/asciicheck/ascii.go @@ -0,0 +1,18 @@ +package asciicheck + +import "unicode" + +func isASCII(s string) (rune, bool) { + if len(s) == 1 { + return []rune(s)[0], s[0] <= unicode.MaxASCII + } + + r := []rune(s) + for i := 0; i < len(s); i++ { + if r[i] > unicode.MaxASCII { + return r[i], false + } + } + + return 0, true +} diff --git a/vendor/github.com/tdakkota/asciicheck/asciicheck.go b/vendor/github.com/tdakkota/asciicheck/asciicheck.go new file mode 100644 index 000000000..690728022 --- /dev/null +++ b/vendor/github.com/tdakkota/asciicheck/asciicheck.go @@ -0,0 +1,49 @@ +package asciicheck + +import ( + "fmt" + "go/ast" + "golang.org/x/tools/go/analysis" +) + +func NewAnalyzer() *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "asciicheck", + Doc: "checks that all code identifiers does not have non-ASCII symbols in the name", + Run: run, + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, file := range pass.Files { + alreadyViewed := map[*ast.Object]struct{}{} + ast.Inspect( + file, func(node ast.Node) bool { + cb(pass, node, alreadyViewed) + return true + }, + ) + } + + return nil, nil +} + +func cb(pass *analysis.Pass, n ast.Node, m map[*ast.Object]struct{}) { + if v, ok := n.(*ast.Ident); ok { + if _, ok := m[v.Obj]; ok { + return + } else { + m[v.Obj] = struct{}{} + } + + ch, ascii := isASCII(v.Name) + if !ascii { + pass.Report( + analysis.Diagnostic{ + Pos: v.Pos(), + Message: fmt.Sprintf("identifier \"%s\" contain non-ASCII character: %#U", v.Name, ch), + }, + ) + } + } +} diff --git a/vendor/github.com/tdakkota/asciicheck/go.mod b/vendor/github.com/tdakkota/asciicheck/go.mod new file mode 100644 index 000000000..43aa5d806 --- /dev/null +++ b/vendor/github.com/tdakkota/asciicheck/go.mod @@ -0,0 +1,5 @@ +module github.com/tdakkota/asciicheck + +go 1.13 + +require golang.org/x/tools v0.0.0-20200414032229-332987a829c3 diff --git a/vendor/github.com/tetafro/godot/.gitignore b/vendor/github.com/tetafro/godot/.gitignore new file mode 100644 index 000000000..db77fd15d --- /dev/null +++ b/vendor/github.com/tetafro/godot/.gitignore @@ -0,0 +1,4 @@ +/dist/ +/vendor/ +/godot +/profile.out diff --git a/vendor/github.com/tetafro/godot/.godot.yaml b/vendor/github.com/tetafro/godot/.godot.yaml new file mode 100644 index 000000000..af36858f9 --- /dev/null +++ b/vendor/github.com/tetafro/godot/.godot.yaml @@ -0,0 +1,16 @@ +# Which comments to check: +# declarations - for top level declaration comments (default); +# toplevel - for top level comments; +# all - for all comments. +scope: declarations + +# List of regexps for excluding particular comment lines from check. +# Example: exclude comments which contain numbers. +exclude: + # - '[0-9]+' + +# Check periods at the end of sentences. +period: true + +# Check that first letter of each sentence is capital. +capital: false diff --git a/vendor/github.com/tetafro/godot/.golangci.yml b/vendor/github.com/tetafro/godot/.golangci.yml new file mode 100644 index 000000000..2b799b265 --- /dev/null +++ b/vendor/github.com/tetafro/godot/.golangci.yml @@ -0,0 +1,67 @@ +run: + concurrency: 2 + deadline: 5m + +skip-dirs: + - path: ./testdata/ + +linters: + disable-all: true + enable: + - deadcode + - errcheck + - gosimple + - govet + - ineffassign + - staticcheck + - structcheck + - typecheck + - unused + - varcheck + - bodyclose + - depguard + - dogsled + - dupl + - funlen + - gochecknoinits + - goconst + - gocritic + - gocyclo + - godot + - gofmt + - gofumpt + - goimports + - golint + - gomnd + - gomodguard + - goprintffuncname + - gosec + - lll + - maligned + - misspell + - nakedret + - nestif + - prealloc + - rowserrcheck + - scopelint + - stylecheck + - unconvert + - unparam + - whitespace + +linters-settings: + godot: + check-all: true + +issues: + exclude-use-default: false + exclude-rules: + - path: _test\.go + linters: + - dupl + - errcheck + - funlen + - gosec + - path: cmd/godot/main\.go + linters: + - gomnd diff --git a/vendor/github.com/tetafro/godot/.goreleaser.yml b/vendor/github.com/tetafro/godot/.goreleaser.yml new file mode 100644 index 000000000..c0fc2b6b1 --- /dev/null +++ b/vendor/github.com/tetafro/godot/.goreleaser.yml @@ -0,0 +1,11 @@ +builds: + - dir: ./cmd/godot +checksum: + name_template: checksums.txt +snapshot: + name_template: "{{ .Tag }}" +changelog: + sort: asc + filters: + exclude: + - '^Merge pull request' diff --git a/vendor/github.com/tetafro/godot/LICENSE b/vendor/github.com/tetafro/godot/LICENSE new file mode 100644 index 000000000..120c6d502 --- /dev/null +++ b/vendor/github.com/tetafro/godot/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Denis Krivak + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/tetafro/godot/Makefile b/vendor/github.com/tetafro/godot/Makefile new file mode 100644 index 000000000..f167051e8 --- /dev/null +++ b/vendor/github.com/tetafro/godot/Makefile @@ -0,0 +1,25 @@ +.PHONY: dep +dep: + go mod tidy && go mod verify + +.PHONY: test +test: + go test ./... + +.PHONY: cover +cover: + go test -coverprofile cover.out ./... + go tool cover -html=cover.out + rm -f cover.out + +.PHONY: lint +lint: + golangci-lint run + +.PHONY: build +build: + go build -o godot ./cmd/godot + +.PHONY: release +release: + goreleaser release --rm-dist diff --git a/vendor/github.com/tetafro/godot/README.md b/vendor/github.com/tetafro/godot/README.md new file mode 100644 index 000000000..ff3516e6c --- /dev/null +++ b/vendor/github.com/tetafro/godot/README.md @@ -0,0 +1,84 @@ +# godot + +[![License](http://img.shields.io/badge/license-MIT-green.svg?style=flat)](https://raw.githubusercontent.com/tetafro/godot/master/LICENSE) +[![Github CI](https://img.shields.io/github/workflow/status/tetafro/godot/Test)](https://github.com/tetafro/godot/actions?query=workflow%3ATest) +[![Go Report](https://goreportcard.com/badge/github.com/tetafro/godot)](https://goreportcard.com/report/github.com/tetafro/godot) +[![Codecov](https://codecov.io/gh/tetafro/godot/branch/master/graph/badge.svg)](https://codecov.io/gh/tetafro/godot) + +Linter that checks if all top-level comments contain a period at the +end of the last sentence if needed. + +[CodeReviewComments](https://github.com/golang/go/wiki/CodeReviewComments#comment-sentences) quote: + +> Comments should begin with the name of the thing being described +> and end in a period + +## Install + +*NOTE: Godot is available as a part of [GolangCI Lint](https://github.com/golangci/golangci-lint) +(disabled by default).* + +Build from source + +```sh +go get -u github.com/tetafro/godot/cmd/godot +``` + +or download binary from [releases page](https://github.com/tetafro/godot/releases). + +## Config + +You can specify options using config file. Use default name `.godot.yaml`, or +set it using `-c filename.yaml` argument. If no config provided the following +defaults are used: + +```yaml +# Which comments to check: +# declarations - for top level declaration comments (default); +# toplevel - for top level comments; +# all - for all comments. +scope: declarations + +# List pf regexps for excluding particular comment lines from check. +exclude: + +# Check periods at the end of sentences. +period: true + +# Check that first letter of each sentence is capital. +capital: false +``` + +## Run + +```sh +godot ./myproject +``` + +Autofix flags are also available + +```sh +godot -f ./myproject # fix issues and print the result +godot -w ./myproject # fix issues and replace the original file +``` + +See all flags with `godot -h`. + +## Example + +Code + +```go +package math + +// Sum sums two integers +func Sum(a, b int) int { + return a + b // result +} +``` + +Output + +```sh +Comment should end in a period: math/math.go:3:1 +``` diff --git a/vendor/github.com/tetafro/godot/checks.go b/vendor/github.com/tetafro/godot/checks.go new file mode 100644 index 000000000..4a4532767 --- /dev/null +++ b/vendor/github.com/tetafro/godot/checks.go @@ -0,0 +1,269 @@ +package godot + +import ( + "go/token" + "regexp" + "strings" + "unicode" +) + +// Error messages. +const ( + noPeriodMessage = "Comment should end in a period" + noCapitalMessage = "Sentence should start with a capital letter" +) + +var ( + // List of valid sentence ending. + // A sentence can be inside parenthesis, and therefore ends with parenthesis. + lastChars = []string{".", "?", "!", ".)", "?)", "!)", "。", "?", "!", "。)", "?)", "!)", specialReplacer} + + // Abbreviations to exclude from capital letters check. + abbreviations = []string{"i.e.", "i. e.", "e.g.", "e. g.", "etc."} + + // Special tags in comments like "// nolint:", or "// +k8s:". + tags = regexp.MustCompile(`^\+?[a-z0-9]+:`) + + // Special hashtags in comments like "// #nosec". + hashtags = regexp.MustCompile(`^#[a-z]+($|\s)`) + + // URL at the end of the line. + endURL = regexp.MustCompile(`[a-z]+://[^\s]+$`) +) + +// checkComments checks every comment accordings to the rules from +// `settings` argument. +func checkComments(comments []comment, settings Settings) []Issue { + var issues []Issue // nolint: prealloc + for _, c := range comments { + if settings.Period { + if iss := checkCommentForPeriod(c); iss != nil { + issues = append(issues, *iss) + } + } + if settings.Capital { + if iss := checkCommentForCapital(c); len(iss) > 0 { + issues = append(issues, iss...) + } + } + } + return issues +} + +// checkCommentForPeriod checks that the last sentense of the comment ends +// in a period. +func checkCommentForPeriod(c comment) *Issue { + pos, ok := checkPeriod(c.text) + if ok { + return nil + } + + // Shift position by the length of comment's special symbols: /* or // + isBlock := strings.HasPrefix(c.lines[0], "/*") + if (isBlock && pos.line == 1) || !isBlock { + pos.column += 2 + } + + iss := Issue{ + Pos: token.Position{ + Filename: c.start.Filename, + Offset: c.start.Offset, + Line: pos.line + c.start.Line - 1, + Column: pos.column + c.start.Column - 1, + }, + Message: noPeriodMessage, + } + + // Make a replacement. Use `pos.line` to get an original line from + // attached lines. Use `iss.Pos.Column` because it's a position in + // the original line. + original := []rune(c.lines[pos.line-1]) + iss.Replacement = string(original[:iss.Pos.Column-1]) + "." + + string(original[iss.Pos.Column-1:]) + + // Save replacement to raw lines to be able to combine it with + // further replacements + c.lines[pos.line-1] = iss.Replacement + + return &iss +} + +// checkCommentForCapital checks that each sentense of the comment starts with +// a capital letter. +// nolint: unparam +func checkCommentForCapital(c comment) []Issue { + pp := checkCapital(c.text, c.decl) + if len(pp) == 0 { + return nil + } + + issues := make([]Issue, len(pp)) + for i, pos := range pp { + // Shift position by the length of comment's special symbols: /* or // + isBlock := strings.HasPrefix(c.lines[0], "/*") + if (isBlock && pos.line == 1) || !isBlock { + pos.column += 2 + } + + iss := Issue{ + Pos: token.Position{ + Filename: c.start.Filename, + Offset: c.start.Offset, + Line: pos.line + c.start.Line - 1, + Column: pos.column + c.start.Column - 1, + }, + Message: noCapitalMessage, + } + + // Make a replacement. Use `pos.line` to get an original line from + // attached lines. Use `iss.Pos.Column` because it's a position in + // the original line. + rep := []rune(c.lines[pos.line-1]) + rep[iss.Pos.Column-1] = unicode.ToTitle(rep[iss.Pos.Column-1]) + iss.Replacement = string(rep) + + // Save replacement to raw lines to be able to combine it with + // further replacements + c.lines[pos.line-1] = iss.Replacement + + issues[i] = iss + } + + return issues +} + +// checkPeriod checks that the last sentense of the text ends in a period. +// NOTE: Returned position is a position inside given text, not in the +// original file. +func checkPeriod(comment string) (pos position, ok bool) { + // Check last non-empty line + var found bool + var line string + lines := strings.Split(comment, "\n") + for i := len(lines) - 1; i >= 0; i-- { + line = strings.TrimRightFunc(lines[i], unicode.IsSpace) + if line == "" { + continue + } + found = true + pos.line = i + 1 + break + } + // All lines are empty + if !found { + return position{}, true + } + // Correct line + if hasSuffix(line, lastChars) { + return position{}, true + } + + pos.column = len([]rune(line)) + 1 + return pos, false +} + +// checkCapital checks that each sentense of the text starts with +// a capital letter. +// NOTE: First letter is not checked in declaration comments, because they +// can describe unexported functions, which start from small letter. +func checkCapital(comment string, skipFirst bool) (pp []position) { + // Remove common abbreviations from the comment + for _, abbr := range abbreviations { + repl := strings.ReplaceAll(abbr, ".", "_") + comment = strings.ReplaceAll(comment, abbr, repl) + } + + // List of states during the scan: `empty` - nothing special, + // `endChar` - found one of sentence ending chars (.!?), + // `endOfSentence` - found `endChar`, and then space or newline. + const empty, endChar, endOfSentence = 1, 2, 3 + + pos := position{line: 1} + state := endOfSentence + if skipFirst { + state = empty + } + for _, r := range comment { + s := string(r) + + pos.column++ + if s == "\n" { + pos.line++ + pos.column = 0 + if state == endChar { + state = endOfSentence + } + continue + } + if s == "." || s == "!" || s == "?" { + state = endChar + continue + } + if s == ")" && state == endChar { + continue + } + if s == " " { + if state == endChar { + state = endOfSentence + } + continue + } + if state == endOfSentence && unicode.IsLower(r) { + pp = append(pp, position{line: pos.line, column: pos.column}) + } + state = empty + } + return pp +} + +// isSpecialBlock checks that given block of comment lines is special and +// shouldn't be checked as a regular sentence. +func isSpecialBlock(comment string) bool { + // Skip cgo code blocks + // TODO: Find a better way to detect cgo code + if strings.HasPrefix(comment, "/*") && (strings.Contains(comment, "#include") || + strings.Contains(comment, "#define")) { + return true + } + return false +} + +// isSpecialBlock checks that given comment line is special and +// shouldn't be checked as a regular sentence. +func isSpecialLine(comment string) bool { + // Skip cgo export tags: https://golang.org/cmd/cgo/#hdr-C_references_to_Go + if strings.HasPrefix(comment, "//export ") { + return true + } + + comment = strings.TrimPrefix(comment, "//") + comment = strings.TrimPrefix(comment, "/*") + + // Don't check comments starting with space indentation - they may + // contain code examples, which shouldn't end with period + if strings.HasPrefix(comment, " ") || + strings.HasPrefix(comment, " \t") || + strings.HasPrefix(comment, "\t") { + return true + } + + // Skip tags and URLs + comment = strings.TrimSpace(comment) + if tags.MatchString(comment) || + hashtags.MatchString(comment) || + endURL.MatchString(comment) || + strings.HasPrefix(comment, "+build") { + return true + } + + return false +} + +func hasSuffix(s string, suffixes []string) bool { + for _, suffix := range suffixes { + if strings.HasSuffix(s, suffix) { + return true + } + } + return false +} diff --git a/vendor/github.com/tetafro/godot/getters.go b/vendor/github.com/tetafro/godot/getters.go new file mode 100644 index 000000000..6153772bd --- /dev/null +++ b/vendor/github.com/tetafro/godot/getters.go @@ -0,0 +1,283 @@ +package godot + +import ( + "errors" + "fmt" + "go/ast" + "go/token" + "io/ioutil" + "regexp" + "strings" +) + +var ( + errEmptyInput = errors.New("empty input") + errUnsuitableInput = errors.New("unsuitable input") +) + +// specialReplacer is a replacer for some types of special lines in comments, +// which shouldn't be checked. For example, if comment ends with a block of +// code it should not necessarily have a period at the end. +const specialReplacer = "" + +type parsedFile struct { + fset *token.FileSet + file *ast.File + lines []string +} + +func newParsedFile(file *ast.File, fset *token.FileSet) (*parsedFile, error) { + if file == nil || fset == nil || len(file.Comments) == 0 { + return nil, errEmptyInput + } + + pf := parsedFile{ + fset: fset, + file: file, + } + + var err error + + // Read original file. This is necessary for making a replacements for + // inline comments. I couldn't find a better way to get original line + // with code and comment without reading the file. Function `Format` + // from "go/format" won't help here if the original file is not gofmt-ed. + pf.lines, err = readFile(file, fset) + if err != nil { + return nil, fmt.Errorf("read file: %v", err) + } + + // Dirty hack. For some cases Go generates temporary files during + // compilation process if there is a cgo block in the source file. Some of + // these temporary files are just copies of original source files but with + // new generated comments at the top. Because of them the content differs + // from AST. For some reason it differs only in golangci-lint. I failed to + // find out the exact description of the process, so let's just skip files + // generated by cgo. + if isCgoGenerated(pf.lines) { + return nil, errUnsuitableInput + } + + // Check consistency to avoid checking slice indexes in each function + lastComment := pf.file.Comments[len(pf.file.Comments)-1] + if p := pf.fset.Position(lastComment.End()); len(pf.lines) < p.Line { + return nil, fmt.Errorf("inconsistency between file and AST: %s", p.Filename) + } + + return &pf, nil +} + +// getComments extracts comments from a file. +func (pf *parsedFile) getComments(scope Scope, exclude []*regexp.Regexp) []comment { + var comments []comment + decl := pf.getDeclarationComments(exclude) + switch scope { + case AllScope: + // All comments + comments = pf.getAllComments(exclude) + case TopLevelScope: + // All top level comments and comments from the inside + // of top level blocks + comments = append( + pf.getBlockComments(exclude), + pf.getTopLevelComments(exclude)..., + ) + default: + // Top level declaration comments and comments from the inside + // of top level blocks + comments = append(pf.getBlockComments(exclude), decl...) + } + + // Set `decl` flag + setDecl(comments, decl) + + return comments +} + +// getBlockComments gets comments from the inside of top level blocks: +// var (...), const (...). +func (pf *parsedFile) getBlockComments(exclude []*regexp.Regexp) []comment { + var comments []comment + for _, decl := range pf.file.Decls { + d, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + // No parenthesis == no block + if d.Lparen == 0 { + continue + } + for _, c := range pf.file.Comments { + if c == nil || len(c.List) == 0 { + continue + } + // Skip comments outside this block + if d.Lparen > c.Pos() || c.Pos() > d.Rparen { + continue + } + // Skip comments that are not top-level for this block + // (the block itself is top level, so comments inside this block + // would be on column 2) + // nolint: gomnd + if pf.fset.Position(c.Pos()).Column != 2 { + continue + } + firstLine := pf.fset.Position(c.Pos()).Line + lastLine := pf.fset.Position(c.End()).Line + comments = append(comments, comment{ + lines: pf.lines[firstLine-1 : lastLine], + text: getText(c, exclude), + start: pf.fset.Position(c.List[0].Slash), + }) + } + } + return comments +} + +// getTopLevelComments gets all top level comments. +func (pf *parsedFile) getTopLevelComments(exclude []*regexp.Regexp) []comment { + var comments []comment // nolint: prealloc + for _, c := range pf.file.Comments { + if c == nil || len(c.List) == 0 { + continue + } + if pf.fset.Position(c.Pos()).Column != 1 { + continue + } + firstLine := pf.fset.Position(c.Pos()).Line + lastLine := pf.fset.Position(c.End()).Line + comments = append(comments, comment{ + lines: pf.lines[firstLine-1 : lastLine], + text: getText(c, exclude), + start: pf.fset.Position(c.List[0].Slash), + }) + } + return comments +} + +// getDeclarationComments gets top level declaration comments. +func (pf *parsedFile) getDeclarationComments(exclude []*regexp.Regexp) []comment { + var comments []comment // nolint: prealloc + for _, decl := range pf.file.Decls { + var cg *ast.CommentGroup + switch d := decl.(type) { + case *ast.GenDecl: + cg = d.Doc + case *ast.FuncDecl: + cg = d.Doc + } + + if cg == nil || len(cg.List) == 0 { + continue + } + + firstLine := pf.fset.Position(cg.Pos()).Line + lastLine := pf.fset.Position(cg.End()).Line + comments = append(comments, comment{ + lines: pf.lines[firstLine-1 : lastLine], + text: getText(cg, exclude), + start: pf.fset.Position(cg.List[0].Slash), + }) + } + return comments +} + +// getAllComments gets every single comment from the file. +func (pf *parsedFile) getAllComments(exclude []*regexp.Regexp) []comment { + var comments []comment //nolint: prealloc + for _, c := range pf.file.Comments { + if c == nil || len(c.List) == 0 { + continue + } + firstLine := pf.fset.Position(c.Pos()).Line + lastLine := pf.fset.Position(c.End()).Line + comments = append(comments, comment{ + lines: pf.lines[firstLine-1 : lastLine], + start: pf.fset.Position(c.List[0].Slash), + text: getText(c, exclude), + }) + } + return comments +} + +// getText extracts text from comment. If comment is a special block +// (e.g., CGO code), a block of empty lines is returned. If comment contains +// special lines (e.g., tags or indented code examples), they are replaced +// with `specialReplacer` to skip checks for it. +// The result can be multiline. +func getText(comment *ast.CommentGroup, exclude []*regexp.Regexp) (s string) { + if len(comment.List) == 1 && + strings.HasPrefix(comment.List[0].Text, "/*") && + isSpecialBlock(comment.List[0].Text) { + return "" + } + + for _, c := range comment.List { + text := c.Text + isBlock := false + if strings.HasPrefix(c.Text, "/*") { + isBlock = true + text = strings.TrimPrefix(text, "/*") + text = strings.TrimSuffix(text, "*/") + } + for _, line := range strings.Split(text, "\n") { + if isSpecialLine(line) { + s += specialReplacer + "\n" + continue + } + if !isBlock { + line = strings.TrimPrefix(line, "//") + } + if matchAny(line, exclude) { + s += specialReplacer + "\n" + continue + } + s += line + "\n" + } + } + if len(s) == 0 { + return "" + } + return s[:len(s)-1] // trim last "\n" +} + +// readFile reads file and returns it's lines as strings. +func readFile(file *ast.File, fset *token.FileSet) ([]string, error) { + fname := fset.File(file.Package) + f, err := ioutil.ReadFile(fname.Name()) + if err != nil { + return nil, err + } + return strings.Split(string(f), "\n"), nil +} + +// setDecl sets `decl` flag to comments which are declaration comments. +func setDecl(comments, decl []comment) { + for _, d := range decl { + for i, c := range comments { + if d.start == c.start { + comments[i].decl = true + break + } + } + } +} + +// matchAny checks if string matches any of given regexps. +func matchAny(s string, rr []*regexp.Regexp) bool { + for _, re := range rr { + if re.MatchString(s) { + return true + } + } + return false +} + +func isCgoGenerated(lines []string) bool { + for i := range lines { + if strings.Contains(lines[i], "Code generated by cmd/cgo") { + return true + } + } + return false +} diff --git a/vendor/github.com/tetafro/godot/go.mod b/vendor/github.com/tetafro/godot/go.mod new file mode 100644 index 000000000..86ead9453 --- /dev/null +++ b/vendor/github.com/tetafro/godot/go.mod @@ -0,0 +1,5 @@ +module github.com/tetafro/godot + +go 1.16 + +require gopkg.in/yaml.v2 v2.4.0 diff --git a/vendor/github.com/tetafro/godot/go.sum b/vendor/github.com/tetafro/godot/go.sum new file mode 100644 index 000000000..dd0bc19f1 --- /dev/null +++ b/vendor/github.com/tetafro/godot/go.sum @@ -0,0 +1,4 @@ +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= diff --git a/vendor/github.com/tetafro/godot/godot.go b/vendor/github.com/tetafro/godot/godot.go new file mode 100644 index 000000000..dcc515b9c --- /dev/null +++ b/vendor/github.com/tetafro/godot/godot.go @@ -0,0 +1,135 @@ +// Package godot checks if comments contain a period at the end of the last +// sentence if needed. +package godot + +import ( + "fmt" + "go/ast" + "go/token" + "io/ioutil" + "os" + "regexp" + "sort" + "strings" +) + +// NOTE: Line and column indexes are 1-based. + +// NOTE: Errors `invalid line number inside comment...` should never happen. +// Their goal is to prevent panic, if there's a bug with array indexes. + +// Issue contains a description of linting error and a recommended replacement. +type Issue struct { + Pos token.Position + Message string + Replacement string +} + +// position is a position inside a comment (might be multiline comment). +type position struct { + line int + column int +} + +// comment is an internal representation of AST comment entity with additional +// data attached. The latter is used for creating a full replacement for +// the line with issues. +type comment struct { + lines []string // unmodified lines from file + text string // concatenated `lines` with special parts excluded + start token.Position // position of the first symbol in comment + decl bool // whether comment is a special one (should not be checked) +} + +// Run runs this linter on the provided code. +func Run(file *ast.File, fset *token.FileSet, settings Settings) ([]Issue, error) { + pf, err := newParsedFile(file, fset) + if err == errEmptyInput || err == errUnsuitableInput { + return nil, nil + } + if err != nil { + return nil, fmt.Errorf("parse input file: %v", err) + } + + exclude := make([]*regexp.Regexp, len(settings.Exclude)) + for i := 0; i < len(settings.Exclude); i++ { + exclude[i], err = regexp.Compile(settings.Exclude[i]) + if err != nil { + return nil, fmt.Errorf("invalid regexp: %v", err) + } + } + + comments := pf.getComments(settings.Scope, exclude) + issues := checkComments(comments, settings) + sortIssues(issues) + + return issues, nil +} + +// Fix fixes all issues and returns new version of file content. +func Fix(path string, file *ast.File, fset *token.FileSet, settings Settings) ([]byte, error) { + // Read file + content, err := ioutil.ReadFile(path) // nolint: gosec + if err != nil { + return nil, fmt.Errorf("read file: %v", err) + } + if len(content) == 0 { + return nil, nil + } + + issues, err := Run(file, fset, settings) + if err != nil { + return nil, fmt.Errorf("run linter: %v", err) + } + + // slice -> map + m := map[int]Issue{} + for _, iss := range issues { + m[iss.Pos.Line] = iss + } + + // Replace lines from issues + fixed := make([]byte, 0, len(content)) + for i, line := range strings.Split(string(content), "\n") { + newline := line + if iss, ok := m[i+1]; ok { + newline = iss.Replacement + } + fixed = append(fixed, []byte(newline+"\n")...) + } + fixed = fixed[:len(fixed)-1] // trim last "\n" + + return fixed, nil +} + +// Replace rewrites original file with it's fixed version. +func Replace(path string, file *ast.File, fset *token.FileSet, settings Settings) error { + info, err := os.Stat(path) + if err != nil { + return fmt.Errorf("check file: %v", err) + } + mode := info.Mode() + + fixed, err := Fix(path, file, fset, settings) + if err != nil { + return fmt.Errorf("fix issues: %v", err) + } + + if err := ioutil.WriteFile(path, fixed, mode); err != nil { + return fmt.Errorf("write file: %v", err) + } + return nil +} + +// sortIssues sorts by filename, line and column. +func sortIssues(iss []Issue) { + sort.Slice(iss, func(i, j int) bool { + if iss[i].Pos.Filename != iss[j].Pos.Filename { + return iss[i].Pos.Filename < iss[j].Pos.Filename + } + if iss[i].Pos.Line != iss[j].Pos.Line { + return iss[i].Pos.Line < iss[j].Pos.Line + } + return iss[i].Pos.Column < iss[j].Pos.Column + }) +} diff --git a/vendor/github.com/tetafro/godot/settings.go b/vendor/github.com/tetafro/godot/settings.go new file mode 100644 index 000000000..b71bf5d58 --- /dev/null +++ b/vendor/github.com/tetafro/godot/settings.go @@ -0,0 +1,29 @@ +package godot + +// Settings contains linter settings. +type Settings struct { + // Which comments to check (top level declarations, top level, all). + Scope Scope + + // Regexp for excluding particular comment lines from check. + Exclude []string + + // Check periods at the end of sentences. + Period bool + + // Check that first letter of each sentence is capital. + Capital bool +} + +// Scope sets which comments should be checked. +type Scope string + +// List of available check scopes. +const ( + // DeclScope is for top level declaration comments. + DeclScope Scope = "declarations" + // TopLevelScope is for all top level comments. + TopLevelScope Scope = "toplevel" + // AllScope is for all comments. + AllScope Scope = "all" +) diff --git a/vendor/github.com/timakin/bodyclose/LICENSE b/vendor/github.com/timakin/bodyclose/LICENSE new file mode 100644 index 000000000..6957f1889 --- /dev/null +++ b/vendor/github.com/timakin/bodyclose/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Seiji Takahashi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go b/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go new file mode 100644 index 000000000..145d5409e --- /dev/null +++ b/vendor/github.com/timakin/bodyclose/passes/bodyclose/bodyclose.go @@ -0,0 +1,368 @@ +package bodyclose + +import ( + "fmt" + "go/ast" + "go/types" + "strconv" + "strings" + + "github.com/gostaticanalysis/analysisutil" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" +) + +var Analyzer = &analysis.Analyzer{ + Name: "bodyclose", + Doc: Doc, + Run: new(runner).run, + Requires: []*analysis.Analyzer{ + buildssa.Analyzer, + }, +} + +const ( + Doc = "bodyclose checks whether HTTP response body is closed successfully" + + nethttpPath = "net/http" + closeMethod = "Close" +) + +type runner struct { + pass *analysis.Pass + resObj types.Object + resTyp *types.Pointer + bodyObj types.Object + closeMthd *types.Func + skipFile map[*ast.File]bool +} + +// run executes an analysis for the pass. The receiver is passed +// by value because this func is called in parallel for different passes. +func (r runner) run(pass *analysis.Pass) (interface{}, error) { + r.pass = pass + funcs := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA).SrcFuncs + + r.resObj = analysisutil.LookupFromImports(pass.Pkg.Imports(), nethttpPath, "Response") + if r.resObj == nil { + // skip checking + return nil, nil + } + + resNamed, ok := r.resObj.Type().(*types.Named) + if !ok { + return nil, fmt.Errorf("cannot find http.Response") + } + r.resTyp = types.NewPointer(resNamed) + + resStruct, ok := r.resObj.Type().Underlying().(*types.Struct) + if !ok { + return nil, fmt.Errorf("cannot find http.Response") + } + for i := 0; i < resStruct.NumFields(); i++ { + field := resStruct.Field(i) + if field.Id() == "Body" { + r.bodyObj = field + } + } + if r.bodyObj == nil { + return nil, fmt.Errorf("cannot find the object http.Response.Body") + } + bodyNamed := r.bodyObj.Type().(*types.Named) + bodyItrf := bodyNamed.Underlying().(*types.Interface) + for i := 0; i < bodyItrf.NumMethods(); i++ { + bmthd := bodyItrf.Method(i) + if bmthd.Id() == closeMethod { + r.closeMthd = bmthd + } + } + + r.skipFile = map[*ast.File]bool{} + for _, f := range funcs { + if r.noImportedNetHTTP(f) { + // skip this + continue + } + + // skip if the function is just referenced + var isreffunc bool + for i := 0; i < f.Signature.Results().Len(); i++ { + if f.Signature.Results().At(i).Type().String() == r.resTyp.String() { + isreffunc = true + } + } + if isreffunc { + continue + } + + for _, b := range f.Blocks { + for i := range b.Instrs { + pos := b.Instrs[i].Pos() + if r.isopen(b, i) { + pass.Reportf(pos, "response body must be closed") + } + } + } + } + + return nil, nil +} + +func (r *runner) isopen(b *ssa.BasicBlock, i int) bool { + call, ok := r.getReqCall(b.Instrs[i]) + if !ok { + return false + } + + if len(*call.Referrers()) == 0 { + return true + } + cRefs := *call.Referrers() + for _, cRef := range cRefs { + val, ok := r.getResVal(cRef) + if !ok { + continue + } + + if len(*val.Referrers()) == 0 { + return true + } + resRefs := *val.Referrers() + for _, resRef := range resRefs { + switch resRef := resRef.(type) { + case *ssa.Store: // Call in Closure function + if len(*resRef.Addr.Referrers()) == 0 { + return true + } + + for _, aref := range *resRef.Addr.Referrers() { + if c, ok := aref.(*ssa.MakeClosure); ok { + f := c.Fn.(*ssa.Function) + if r.noImportedNetHTTP(f) { + // skip this + return false + } + called := r.isClosureCalled(c) + + return r.calledInFunc(f, called) + } + + } + case *ssa.Call: // Indirect function call + if f, ok := resRef.Call.Value.(*ssa.Function); ok { + for _, b := range f.Blocks { + for i := range b.Instrs { + return r.isopen(b, i) + } + } + } + case *ssa.FieldAddr: // Normal reference to response entity + if resRef.Referrers() == nil { + return true + } + + bRefs := *resRef.Referrers() + + for _, bRef := range bRefs { + bOp, ok := r.getBodyOp(bRef) + if !ok { + continue + } + if len(*bOp.Referrers()) == 0 { + return true + } + ccalls := *bOp.Referrers() + for _, ccall := range ccalls { + if r.isCloseCall(ccall) { + return false + } + } + } + } + } + } + + return true +} + +func (r *runner) getReqCall(instr ssa.Instruction) (*ssa.Call, bool) { + call, ok := instr.(*ssa.Call) + if !ok { + return nil, false + } + if !strings.Contains(call.Type().String(), r.resTyp.String()) { + return nil, false + } + return call, true +} + +func (r *runner) getResVal(instr ssa.Instruction) (ssa.Value, bool) { + switch instr := instr.(type) { + case *ssa.FieldAddr: + if instr.X.Type().String() == r.resTyp.String() { + return instr.X.(ssa.Value), true + } + case ssa.Value: + if instr.Type().String() == r.resTyp.String() { + return instr, true + } + } + return nil, false +} + +func (r *runner) getBodyOp(instr ssa.Instruction) (*ssa.UnOp, bool) { + op, ok := instr.(*ssa.UnOp) + if !ok { + return nil, false + } + if op.Type() != r.bodyObj.Type() { + return nil, false + } + return op, true +} + +func (r *runner) isCloseCall(ccall ssa.Instruction) bool { + switch ccall := ccall.(type) { + case *ssa.Defer: + if ccall.Call.Method != nil && ccall.Call.Method.Name() == r.closeMthd.Name() { + return true + } + case *ssa.Call: + if ccall.Call.Method != nil && ccall.Call.Method.Name() == r.closeMthd.Name() { + return true + } + case *ssa.ChangeInterface: + if ccall.Type().String() == "io.Closer" { + closeMtd := ccall.Type().Underlying().(*types.Interface).Method(0) + crs := *ccall.Referrers() + for _, cs := range crs { + if cs, ok := cs.(*ssa.Defer); ok { + if val, ok := cs.Common().Value.(*ssa.Function); ok { + for _, b := range val.Blocks { + for _, instr := range b.Instrs { + if c, ok := instr.(*ssa.Call); ok { + if c.Call.Method == closeMtd { + return true + } + } + } + } + } + } + + if returnOp, ok := cs.(*ssa.Return); ok { + for _, resultValue := range returnOp.Results { + if resultValue.Type().String() == "io.Closer" { + return true + } + } + } + } + } + case *ssa.Return: + for _, resultValue := range ccall.Results { + if resultValue.Type().String() == "io.ReadCloser" { + return true + } + } + } + return false +} + +func (r *runner) isClosureCalled(c *ssa.MakeClosure) bool { + refs := *c.Referrers() + if len(refs) == 0 { + return false + } + for _, ref := range refs { + switch ref.(type) { + case *ssa.Call, *ssa.Defer: + return true + } + } + return false +} + +func (r *runner) noImportedNetHTTP(f *ssa.Function) (ret bool) { + obj := f.Object() + if obj == nil { + return false + } + + file := analysisutil.File(r.pass, obj.Pos()) + if file == nil { + return false + } + + if skip, has := r.skipFile[file]; has { + return skip + } + defer func() { + r.skipFile[file] = ret + }() + + for _, impt := range file.Imports { + path, err := strconv.Unquote(impt.Path.Value) + if err != nil { + continue + } + path = analysisutil.RemoveVendor(path) + if path == nethttpPath { + return false + } + } + + return true +} + +func (r *runner) calledInFunc(f *ssa.Function, called bool) bool { + for _, b := range f.Blocks { + for i, instr := range b.Instrs { + switch instr := instr.(type) { + case *ssa.UnOp: + refs := *instr.Referrers() + if len(refs) == 0 { + return true + } + for _, r := range refs { + if v, ok := r.(ssa.Value); ok { + if ptr, ok := v.Type().(*types.Pointer); !ok || !isNamedType(ptr.Elem(), "io", "ReadCloser") { + continue + } + vrefs := *v.Referrers() + for _, vref := range vrefs { + if vref, ok := vref.(*ssa.UnOp); ok { + vrefs := *vref.Referrers() + if len(vrefs) == 0 { + return true + } + for _, vref := range vrefs { + if c, ok := vref.(*ssa.Call); ok { + if c.Call.Method != nil && c.Call.Method.Name() == closeMethod { + return !called + } + } + } + } + } + } + + } + default: + return r.isopen(b, i) || !called + } + } + } + return false +} + +// isNamedType reports whether t is the named type path.name. +func isNamedType(t types.Type, path, name string) bool { + n, ok := t.(*types.Named) + if !ok { + return false + } + obj := n.Obj() + return obj.Name() == name && obj.Pkg() != nil && obj.Pkg().Path() == path +} diff --git a/vendor/github.com/tomarrell/wrapcheck/v2/LICENSE b/vendor/github.com/tomarrell/wrapcheck/v2/LICENSE new file mode 100644 index 000000000..b5d9d30d3 --- /dev/null +++ b/vendor/github.com/tomarrell/wrapcheck/v2/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Tom Arrell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go b/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go new file mode 100644 index 000000000..3b445e295 --- /dev/null +++ b/vendor/github.com/tomarrell/wrapcheck/v2/wrapcheck/wrapcheck.go @@ -0,0 +1,337 @@ +package wrapcheck + +import ( + "go/ast" + "go/token" + "go/types" + "log" + "os" + "strings" + + "github.com/gobwas/glob" + "golang.org/x/tools/go/analysis" +) + +var ( + DefaultIgnoreSigs = []string{ + ".Errorf(", + "errors.New(", + "errors.Unwrap(", + ".Wrap(", + ".Wrapf(", + ".WithMessage(", + ".WithMessagef(", + ".WithStack(", + } +) + +// WrapcheckConfig is the set of configuration values which configure the +// behaviour of the linter. +type WrapcheckConfig struct { + // IgnoreSigs defines a list of substrings which if contained within the + // signature of the function call returning the error, will be ignored. This + // allows you to specify functions that wrapcheck will not report as + // unwrapped. + // + // For example, an ingoredSig of `[]string{"errors.New("}` will ignore errors + // returned from the stdlib package error's function: + // + // `func errors.New(message string) error` + // + // Due to the signature containing the substring `errors.New(`. + // + // Note: Setting this value will intentionally override the default ignored + // sigs. To achieve the same behaviour as default, you should add the default + // list to your config. + IgnoreSigs []string `mapstructure:"ignoreSigs" yaml:"ignoreSigs"` + + // IgnorePackageGlobs defines a list of globs which, if matching the package + // of the function returning the error, will ignore the error when doing + // wrapcheck analysis. + // + // This is useful for broadly ignoring packages and subpackages from wrapcheck + // analysis. For example, to ignore all errors from all packages and + // subpackages of "encoding" you may include the configuration: + // + // -- .wrapcheck.yaml + // ignorePackageGlobs: + // - encoding/* + IgnorePackageGlobs []string `mapstructure:"ignorePackageGlobs" yaml:"ignorePackageGlobs"` +} + +func NewDefaultConfig() WrapcheckConfig { + return WrapcheckConfig{ + IgnoreSigs: DefaultIgnoreSigs, + IgnorePackageGlobs: []string{}, + } +} + +func NewAnalyzer(cfg WrapcheckConfig) *analysis.Analyzer { + return &analysis.Analyzer{ + Name: "wrapcheck", + Doc: "Checks that errors returned from external packages are wrapped", + Run: run(cfg), + } +} + +func run(cfg WrapcheckConfig) func(*analysis.Pass) (interface{}, error) { + return func(pass *analysis.Pass) (interface{}, error) { + for _, file := range pass.Files { + ast.Inspect(file, func(n ast.Node) bool { + if _, ok := n.(*ast.AssignStmt); ok { + return true + } + + ret, ok := n.(*ast.ReturnStmt) + if !ok { + return true + } + + if len(ret.Results) < 1 { + return true + } + + // Iterate over the values to be returned looking for errors + for _, expr := range ret.Results { + // Check if the return expression is a function call, if it is, we need + // to handle it by checking the return params of the function. + retFn, ok := expr.(*ast.CallExpr) + if ok { + // If the return type of the function is a single error. This will not + // match an error within multiple return values, for that, the below + // tuple check is required. + if isError(pass.TypesInfo.TypeOf(expr)) { + reportUnwrapped(pass, retFn, retFn.Pos(), cfg) + return true + } + + // Check if one of the return values from the function is an error + tup, ok := pass.TypesInfo.TypeOf(expr).(*types.Tuple) + if !ok { + return true + } + + // Iterate over the return values of the function looking for error + // types + for i := 0; i < tup.Len(); i++ { + v := tup.At(i) + if v == nil { + return true + } + if isError(v.Type()) { + reportUnwrapped(pass, retFn, expr.Pos(), cfg) + return true + } + } + } + + if !isError(pass.TypesInfo.TypeOf(expr)) { + continue + } + + ident, ok := expr.(*ast.Ident) + if !ok { + return true + } + + var ( + call *ast.CallExpr + ) + + // Attempt to find the most recent short assign + if shortAss := prevErrAssign(pass, file, ident); shortAss != nil { + call, ok = shortAss.Rhs[0].(*ast.CallExpr) + if !ok { + return true + } + } else if isUnresolved(file, ident) { + // TODO Check if the identifier is unresolved, and try to resolve it in + // another file. + return true + } else { + // Check for ValueSpec nodes in order to locate a possible var + // declaration. + if ident.Obj == nil { + return true + } + + vSpec, ok := ident.Obj.Decl.(*ast.ValueSpec) + if !ok { + // We couldn't find a short or var assign for this error return. + // This is an error. Where did this identifier come from? Possibly a + // function param. + // + // TODO decide how to handle this case, whether to follow function + // param back, or assert wrapping at call site. + + return true + } + + if len(vSpec.Values) < 1 { + return true + } + + call, ok = vSpec.Values[0].(*ast.CallExpr) + if !ok { + return true + } + } + + // Make sure there is a call identified as producing the error being + // returned, otherwise just bail + if call == nil { + return true + } + + reportUnwrapped(pass, call, ident.NamePos, cfg) + } + + return true + }) + } + + return nil, nil + } +} + +// Report unwrapped takes a call expression and an identifier and reports +// if the call is unwrapped. +func reportUnwrapped(pass *analysis.Pass, call *ast.CallExpr, tokenPos token.Pos, cfg WrapcheckConfig) { + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return + } + + // Check for ignored signatures + fnSig := pass.TypesInfo.ObjectOf(sel.Sel).String() + if contains(cfg.IgnoreSigs, fnSig) { + return + } + + // Check if the underlying type of the "x" in x.y.z is an interface, as + // errors returned from interface types should be wrapped. + if isInterface(pass, sel) { + pass.Reportf(tokenPos, "error returned from interface method should be wrapped: sig: %s", fnSig) + return + } + + // Check whether the function being called comes from another package, + // as functions called across package boundaries which returns errors + // should be wrapped + if isFromOtherPkg(pass, sel, cfg) { + pass.Reportf(tokenPos, "error returned from external package is unwrapped: sig: %s", fnSig) + return + } +} + +// isInterface returns whether the function call is one defined on an interface. +func isInterface(pass *analysis.Pass, sel *ast.SelectorExpr) bool { + _, ok := pass.TypesInfo.TypeOf(sel.X).Underlying().(*types.Interface) + + return ok +} + +func isFromOtherPkg(pass *analysis.Pass, sel *ast.SelectorExpr, config WrapcheckConfig) bool { + // The package of the function that we are calling which returns the error + fn := pass.TypesInfo.ObjectOf(sel.Sel) + + for _, globString := range config.IgnorePackageGlobs { + g, err := glob.Compile(globString) + if err != nil { + log.Printf("unable to parse glob: %s\n", globString) + os.Exit(1) + } + + if g.Match(fn.Pkg().Path()) { + return false + } + } + + // If it's not a package name, then we should check the selector to make sure + // that it's an identifier from the same package + if pass.Pkg.Path() == fn.Pkg().Path() { + return false + } + + return true +} + +// prevErrAssign traverses the AST of a file looking for the most recent +// assignment to an error declaration which is specified by the returnIdent +// identifier. +// +// This only returns short form assignments and reassignments, e.g. `:=` and +// `=`. This does not include `var` statements. This function will return nil if +// the only declaration is a `var` (aka ValueSpec) declaration. +func prevErrAssign(pass *analysis.Pass, file *ast.File, returnIdent *ast.Ident) *ast.AssignStmt { + // A slice containing all the assignments which contain an identifer + // referring to the source declaration of the error. This is to catch + // cases where err is defined once, and then reassigned multiple times + // within the same block. In these cases, we should check the method of + // the most recent call. + var assigns []*ast.AssignStmt + + // Find all assignments which have the same declaration + ast.Inspect(file, func(n ast.Node) bool { + if ass, ok := n.(*ast.AssignStmt); ok { + for _, expr := range ass.Lhs { + if !isError(pass.TypesInfo.TypeOf(expr)) { + continue + } + if assIdent, ok := expr.(*ast.Ident); ok { + if assIdent.Obj == nil || returnIdent.Obj == nil { + // If we can't find the Obj for one of the identifiers, just skip + // it. + return true + } else if assIdent.Obj.Decl == returnIdent.Obj.Decl { + assigns = append(assigns, ass) + } + } + } + } + + return true + }) + + // Iterate through the assignments, comparing the token positions to + // find the assignment that directly precedes the return position + var mostRecentAssign *ast.AssignStmt + + for _, ass := range assigns { + if ass.Pos() > returnIdent.Pos() { + break + } + mostRecentAssign = ass + } + + return mostRecentAssign +} + +func contains(slice []string, el string) bool { + for _, s := range slice { + if strings.Contains(el, s) { + return true + } + } + + return false +} + +// isError returns whether or not the provided type interface is an error +func isError(typ types.Type) bool { + if typ == nil { + return false + } + + return typ.String() == "error" +} + +func isUnresolved(file *ast.File, ident *ast.Ident) bool { + for _, unresolvedIdent := range file.Unresolved { + if unresolvedIdent.Pos() == ident.Pos() { + return true + } + } + + return false +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig b/vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig new file mode 100644 index 000000000..fe2c20fb0 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/.editorconfig @@ -0,0 +1,21 @@ +root = true + +[*] +charset = utf-8 +indent_size = 4 +indent_style = space +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + +[*.md] +trim_trailing_whitespace = false + +[*.json] +indent_size = 2 + +[*.{yaml,yml}] +indent_size = 2 + +[Makefile] +indent_style = tab diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes new file mode 100644 index 000000000..005358190 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitattributes @@ -0,0 +1,9 @@ +/.gitattributes export-ignore +/.gitignore export-ignore +/.editorconfig export-ignore +/.goreleaser.yml export-ignore +/.github/ export-ignore +/examples/ export-ignore +/testdata/ export-ignore +/tools/ export-ignore +/Makefile export-ignore diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore new file mode 100644 index 000000000..abc11b330 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/.gitignore @@ -0,0 +1,3 @@ +build/ +dist/ +coverage.txt diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml b/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml new file mode 100644 index 000000000..47cbca5e5 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/.goreleaser.yml @@ -0,0 +1,29 @@ +builds: + - main: ./cmd/mnd/main.go + binary: mnd + goos: + - windows + - darwin + - linux + goarch: + - amd64 + ldflags: -s -w -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.buildTime={{.Date}}`. + +archives: + - format: tar.gz + format_overrides: + - goos: windows + format: zip + +brews: + - name: mnd + tap: + owner: tommy-muehle + name: homebrew-tap + folder: Formula + homepage: https://github.com/tommy-muehle/go-mnd + description: Magic number detector for Go + test: | + system "#{bin}/mnd --version" + install: | + bin.install "mnd" diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile b/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile new file mode 100644 index 000000000..bb8e2b7f4 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/Dockerfile @@ -0,0 +1,17 @@ +ARG GO_VERSION=1.15 + +FROM golang:${GO_VERSION}-alpine AS builder +RUN apk add --update --no-cache make git curl gcc libc-dev +RUN mkdir -p /build +WORKDIR /build +COPY . /build/ +RUN go mod download +RUN go build -o go-mnd cmd/mnd/main.go + +FROM golang:${GO_VERSION}-alpine +RUN apk add --update --no-cache bash git gcc libc-dev +COPY --from=builder /build/go-mnd /bin/go-mnd +COPY entrypoint.sh /bin/entrypoint.sh +VOLUME /app +WORKDIR /app +ENTRYPOINT ["/bin/entrypoint.sh"] diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/LICENSE b/vendor/github.com/tommy-muehle/go-mnd/v2/LICENSE new file mode 100644 index 000000000..8825fad20 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2019 Tommy Muehle + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile b/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile new file mode 100644 index 000000000..b8a32316b --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/Makefile @@ -0,0 +1,32 @@ +GIT_TAG?= $(shell git describe --abbrev=0) + +GO_VERSION = 1.15 +BUILDFLAGS := '-w -s' + +IMAGE_REPO = "tommymuehle" +BIN = "go-mnd" + +clean: + rm -rf build dist coverage.txt + +test: + go test -race ./... + +test-coverage: + go test -race -coverprofile=coverage.txt -covermode=atomic -coverpkg=./checks,./config + +build: + go build -o build/$(BIN) cmd/mnd/main.go + +image: + @echo "Building the Docker image..." + docker build --rm -t $(IMAGE_REPO)/$(BIN):$(GIT_TAG) --build-arg GO_VERSION=$(GO_VERSION) . + docker tag $(IMAGE_REPO)/$(BIN):$(GIT_TAG) $(IMAGE_REPO)/$(BIN):$(GIT_TAG) + docker tag $(IMAGE_REPO)/$(BIN):$(GIT_TAG) $(IMAGE_REPO)/$(BIN):latest + +image-push: image + @echo "Pushing the Docker image..." + docker push $(IMAGE_REPO)/$(BIN):$(GIT_TAG) + docker push $(IMAGE_REPO)/$(BIN):latest + +.PHONY: clean test test-coverage build image image-push diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/README.md b/vendor/github.com/tommy-muehle/go-mnd/v2/README.md new file mode 100644 index 000000000..6e3a55573 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/README.md @@ -0,0 +1,230 @@ +# go-mnd - Magic number detector for Golang + + + +A vet analyzer to detect magic numbers. + +> **What is a magic number?** +> A magic number is a numeric literal that is not defined as a constant, but which may change, and therefore can be hard to update. It's considered a bad programming practice to use numbers directly in any source code without an explanation. It makes programs harder to read, understand, and maintain. + +## Project status + +![CI](https://github.com/tommy-muehle/go-mnd/workflows/CI/badge.svg) +[![Go Report Card](https://goreportcard.com/badge/github.com/tommy-muehle/go-mnd)](https://goreportcard.com/report/github.com/tommy-muehle/go-mnd) +[![codecov](https://codecov.io/gh/tommy-muehle/go-mnd/branch/master/graph/badge.svg)](https://codecov.io/gh/tommy-muehle/go-mnd) + +## Install + +### Local + +This analyzer requires Golang in version >= 1.12 because it's depends on the **go/analysis** API. + +``` +go get -u github.com/tommy-muehle/go-mnd/v2/cmd/mnd +``` + +### Github action + +You can run go-mnd as a GitHub action as follows: + +``` +name: Example workflow +on: + push: + branches: + - master + pull_request: + branches: + - master +jobs: + tests: + runs-on: ubuntu-latest + env: + GO111MODULE: on + steps: + - name: Checkout Source + uses: actions/checkout@v2 + - name: Run go-mnd + uses: tommy-muehle/go-mnd@master + with: + args: ./... +``` + +### GitLab CI + +You can run go-mnd inside a GitLab CI pipeline as follows: + +``` +stages: + - lint + +go:lint:mnd: + stage: lint + needs: [] + image: golang:latest + before_script: + - go get -u github.com/tommy-muehle/go-mnd/cmd/mnd + - go mod tidy + - go mod vendor + script: + - go vet -vettool $(which mnd) ./... +``` + +### Homebrew + +To install with [Homebrew](https://brew.sh/), run: + +``` +brew tap tommy-muehle/tap && brew install tommy-muehle/tap/mnd +``` + +### Docker + +To get the latest available Docker image: + +``` +docker pull tommymuehle/go-mnd +``` + +### Windows + +On Windows download the [latest release](https://github.com/tommy-muehle/go-mnd/releases). + +## Usage + +[![asciicast](https://asciinema.org/a/231021.svg)](https://asciinema.org/a/231021) + +``` +go vet -vettool $(which mnd) ./... +``` + +or directly + +``` +mnd ./... +``` + +or via Docker + +``` +docker run --rm -v "$PWD":/app -w /app tommymuehle/go-mnd:latest ./... +``` + +## Options + +The ```-checks``` option let's you define a comma separated list of checks. + +The ```-ignored-numbers``` option let's you define a comma separated list of numbers to ignore. +For example: `-ignored-numbers=1000,10_000,3.14159264` + +The ```-ignored-functions``` option let's you define a comma separated list of function name regexp patterns to exclude. +For example: `-ignored-functions=math.*,http.StatusText` + +The ```-ignored-files``` option let's you define a comma separated list of filename regexp patterns to exclude. +For example: `-ignored-files=magic_.*.go,.*_numbers.go` + +## Checks + +By default this detector analyses arguments, assigns, cases, conditions, operations and return statements. + +* argument + +``` +t := http.StatusText(200) +``` + +* assign + +``` +c := &http.Client{ + Timeout: 5 * time.Second, +} +``` + +* case + +``` +switch x { + case 3: +} +``` + +* condition + +``` +if x > 7 { +} +``` + +* operation + +``` +var x, y int +y = 10 * x +``` + +* return + +``` +return 3 +``` + +## Excludes + +By default the numbers 0 and 1 as well as test files are excluded! + +### Further known excludes + +The function "Date" in the "Time" package. + +``` +t := time.Date(2017, time.September, 26, 12, 13, 14, 0, time.UTC) +``` + +Additional custom excludes can be defined via option flag. + +## Development + +### Build + +You can build the binary with: + +``` +make +``` + +### Tests + +You can run all unit tests using: + +``` +make test +``` + +And with coverage report: + +``` +make test-coverage +``` + +### Docker image + +You can also build locally the docker image by using the command: + +``` +make image +``` + +## Stickers + +

+ Stickers image + Sticker image +

+ +Just drop me a message via Twitter DM or email if you want some go-mnd stickers +for you or your Gopher usergroup. + +## License + +The MIT License (MIT). Please see [LICENSE](LICENSE) for more information. diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/action.yml b/vendor/github.com/tommy-muehle/go-mnd/v2/action.yml new file mode 100644 index 000000000..3a1f8eb11 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/action.yml @@ -0,0 +1,19 @@ +name: 'go-mnd' +description: 'Runs the Golang magic number detector' +author: '@tommy-muehle' + +inputs: + args: + description: 'Arguments for go-mnd' + required: true + default: '-h' + +runs: + using: 'docker' + image: 'Dockerfile' + args: + - ${{ inputs.args }} + +branding: + icon: 'check-circle' + color: 'blue' diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go b/vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go new file mode 100644 index 000000000..bf658f42d --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/analyzer.go @@ -0,0 +1,118 @@ +package magic_numbers + +import ( + "flag" + "go/ast" + "strings" + + "github.com/tommy-muehle/go-mnd/v2/checks" + "github.com/tommy-muehle/go-mnd/v2/config" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `magic number detector` + +var Analyzer = &analysis.Analyzer{ + Name: "mnd", + Doc: Doc, + Run: run, + Flags: options(), + Requires: []*analysis.Analyzer{inspect.Analyzer}, + RunDespiteErrors: true, +} + +type Checker interface { + NodeFilter() []ast.Node + Check(n ast.Node) +} + +func options() flag.FlagSet { + options := flag.NewFlagSet("", flag.ExitOnError) + options.String("excludes", "", "deprecated: use ignored-files instead") + options.String("ignored-files", "", "comma separated list of file patterns to exclude from analysis") + options.String("ignored-functions", "", "comma separated list of function patterns to exclude from analysis") + options.String("ignored-numbers", "", "comma separated list of numbers to exclude from analysis") + options.String( + "checks", + checks.ArgumentCheck+","+ + checks.CaseCheck+","+ + checks.ConditionCheck+","+ + checks.OperationCheck+","+ + checks.ReturnCheck+","+ + checks.AssignCheck, + "comma separated list of checks", + ) + + return *options +} + +func run(pass *analysis.Pass) (interface{}, error) { + var ignoredFiles string + + ignoredFiles = strings.Join( + []string{ + pass.Analyzer.Flags.Lookup("excludes").Value.String(), // is deprecated + pass.Analyzer.Flags.Lookup("ignored-files").Value.String(), + }, + ",", + ) + + if ignoredFiles == "," { + ignoredFiles = "" + } + + conf := config.WithOptions( + config.WithCustomChecks(pass.Analyzer.Flags.Lookup("checks").Value.String()), + config.WithIgnoredFiles(ignoredFiles), + config.WithIgnoredFunctions(pass.Analyzer.Flags.Lookup("ignored-functions").Value.String()), + config.WithIgnoredNumbers(pass.Analyzer.Flags.Lookup("ignored-numbers").Value.String()), + ) + + var checker []Checker + if conf.IsCheckEnabled(checks.ArgumentCheck) { + checker = append(checker, checks.NewArgumentAnalyzer(pass, conf)) + } + + if conf.IsCheckEnabled(checks.CaseCheck) { + checker = append(checker, checks.NewCaseAnalyzer(pass, conf)) + } + + if conf.IsCheckEnabled(checks.ConditionCheck) { + checker = append(checker, checks.NewConditionAnalyzer(pass, conf)) + } + + if conf.IsCheckEnabled(checks.OperationCheck) { + checker = append(checker, checks.NewOperationAnalyzer(pass, conf)) + } + + if conf.IsCheckEnabled(checks.ReturnCheck) { + checker = append(checker, checks.NewReturnAnalyzer(pass, conf)) + } + + if conf.IsCheckEnabled(checks.AssignCheck) { + checker = append(checker, checks.NewAssignAnalyzer(pass, conf)) + } + + i := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + for _, c := range checker { + c := c + i.Preorder(c.NodeFilter(), func(node ast.Node) { + for _, exclude := range conf.IgnoredFiles { + if exclude.String() == "" { + continue + } + if exclude.MatchString(pass.Fset.Position(node.Pos()).Filename) { + return + } + } + + c.Check(node) + }) + } + + return nil, nil +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go new file mode 100644 index 000000000..df6ad676d --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/argument.go @@ -0,0 +1,121 @@ +package checks + +import ( + "go/ast" + "go/token" + "strconv" + "sync" + + "golang.org/x/tools/go/analysis" + + "github.com/tommy-muehle/go-mnd/v2/config" +) + +const ArgumentCheck = "argument" + +// constantDefinitions is used to save lines (by number) which contain a constant definition. +var constantDefinitions = map[string]bool{} +var mu sync.RWMutex + +type ArgumentAnalyzer struct { + config *config.Config + pass *analysis.Pass +} + +func NewArgumentAnalyzer(pass *analysis.Pass, config *config.Config) *ArgumentAnalyzer { + return &ArgumentAnalyzer{ + pass: pass, + config: config, + } +} + +func (a *ArgumentAnalyzer) NodeFilter() []ast.Node { + return []ast.Node{ + (*ast.GenDecl)(nil), + (*ast.CallExpr)(nil), + } +} + +func (a *ArgumentAnalyzer) Check(n ast.Node) { + switch expr := n.(type) { + case *ast.CallExpr: + a.checkCallExpr(expr) + case *ast.GenDecl: + if expr.Tok != token.CONST { + return + } + + for _, x := range expr.Specs { + pos := a.pass.Fset.Position(x.Pos()) + + mu.Lock() + constantDefinitions[pos.Filename+":"+strconv.Itoa(pos.Line)] = true + mu.Unlock() + } + } +} + +func (a *ArgumentAnalyzer) checkCallExpr(expr *ast.CallExpr) { + pos := a.pass.Fset.Position(expr.Pos()) + + mu.RLock() + ok := constantDefinitions[pos.Filename+":"+strconv.Itoa(pos.Line)] + mu.RUnlock() + + if ok { + return + } + + switch f := expr.Fun.(type) { + case *ast.SelectorExpr: + switch prefix := f.X.(type) { + case *ast.Ident: + if a.config.IsIgnoredFunction(prefix.Name + "." + f.Sel.Name) { + return + } + } + } + + for i, arg := range expr.Args { + switch x := arg.(type) { + case *ast.BasicLit: + if !a.isMagicNumber(x) { + continue + } + // If it's a magic number and has no previous element, report it + if i == 0 { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, ArgumentCheck) + } else { + // Otherwise check all args + switch expr.Args[i].(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, ArgumentCheck) + } + } + } + case *ast.BinaryExpr: + a.checkBinaryExpr(x) + } + } +} + +func (a *ArgumentAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { + switch x := expr.X.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, ArgumentCheck) + } + } + + switch y := expr.Y.(type) { + case *ast.BasicLit: + if a.isMagicNumber(y) { + a.pass.Reportf(y.Pos(), reportMsg, y.Value, ArgumentCheck) + } + } +} + +func (a *ArgumentAnalyzer) isMagicNumber(l *ast.BasicLit) bool { + return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go new file mode 100644 index 000000000..f930d0880 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/assign.go @@ -0,0 +1,86 @@ +package checks + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + + config "github.com/tommy-muehle/go-mnd/v2/config" +) + +const AssignCheck = "assign" + +type AssignAnalyzer struct { + pass *analysis.Pass + config *config.Config +} + +func NewAssignAnalyzer(pass *analysis.Pass, config *config.Config) *AssignAnalyzer { + return &AssignAnalyzer{ + pass: pass, + config: config, + } +} + +func (a *AssignAnalyzer) NodeFilter() []ast.Node { + return []ast.Node{ + (*ast.KeyValueExpr)(nil), + (*ast.AssignStmt)(nil), + } +} + +func (a *AssignAnalyzer) Check(n ast.Node) { + switch expr := n.(type) { + case *ast.KeyValueExpr: + switch x := expr.Value.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, AssignCheck) + } + case *ast.BinaryExpr: + a.checkBinaryExpr(x) + } + case *ast.AssignStmt: + for _, e := range expr.Rhs { + switch y := e.(type) { + case *ast.UnaryExpr: + a.checkUnaryExpr(y) + case *ast.BinaryExpr: + switch x := y.Y.(type) { + case *ast.UnaryExpr: + a.checkUnaryExpr(x) + } + } + } + } +} + +func (a *AssignAnalyzer) checkUnaryExpr(expr *ast.UnaryExpr) { + switch x := expr.X.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, AssignCheck) + } + } +} + +func (a *AssignAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { + switch x := expr.X.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, AssignCheck) + } + } + + switch y := expr.Y.(type) { + case *ast.BasicLit: + if a.isMagicNumber(y) { + a.pass.Reportf(y.Pos(), reportMsg, y.Value, AssignCheck) + } + } +} + +func (a *AssignAnalyzer) isMagicNumber(l *ast.BasicLit) bool { + return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go new file mode 100644 index 000000000..228cab4b8 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/case.go @@ -0,0 +1,68 @@ +package checks + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + + config "github.com/tommy-muehle/go-mnd/v2/config" +) + +const CaseCheck = "case" + +type CaseAnalyzer struct { + pass *analysis.Pass + config *config.Config +} + +func NewCaseAnalyzer(pass *analysis.Pass, config *config.Config) *CaseAnalyzer { + return &CaseAnalyzer{ + pass: pass, + config: config, + } +} + +func (a *CaseAnalyzer) NodeFilter() []ast.Node { + return []ast.Node{ + (*ast.CaseClause)(nil), + } +} + +func (a *CaseAnalyzer) Check(n ast.Node) { + caseClause, ok := n.(*ast.CaseClause) + if !ok { + return + } + + for _, c := range caseClause.List { + switch x := c.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, CaseCheck) + } + case *ast.BinaryExpr: + a.checkBinaryExpr(x) + } + } +} + +func (a *CaseAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { + switch x := expr.X.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, CaseCheck) + } + } + + switch y := expr.Y.(type) { + case *ast.BasicLit: + if a.isMagicNumber(y) { + a.pass.Reportf(y.Pos(), reportMsg, y.Value, CaseCheck) + } + } +} + +func (a *CaseAnalyzer) isMagicNumber(l *ast.BasicLit) bool { + return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/checks.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/checks.go new file mode 100644 index 000000000..deff0c7bf --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/checks.go @@ -0,0 +1,3 @@ +package checks + +const reportMsg = "Magic number: %v, in <%s> detected" diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go new file mode 100644 index 000000000..20f892ede --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/condition.go @@ -0,0 +1,55 @@ +package checks + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + + config "github.com/tommy-muehle/go-mnd/v2/config" +) + +const ConditionCheck = "condition" + +type ConditionAnalyzer struct { + pass *analysis.Pass + config *config.Config +} + +func NewConditionAnalyzer(pass *analysis.Pass, config *config.Config) *ConditionAnalyzer { + return &ConditionAnalyzer{ + pass: pass, + config: config, + } +} + +func (a *ConditionAnalyzer) NodeFilter() []ast.Node { + return []ast.Node{ + (*ast.IfStmt)(nil), + } +} + +func (a *ConditionAnalyzer) Check(n ast.Node) { + expr, ok := n.(*ast.IfStmt).Cond.(*ast.BinaryExpr) + if !ok { + return + } + + switch x := expr.X.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, ConditionCheck) + } + } + + switch y := expr.Y.(type) { + case *ast.BasicLit: + if a.isMagicNumber(y) { + a.pass.Reportf(y.Pos(), reportMsg, y.Value, ConditionCheck) + } + } +} + +func (a *ConditionAnalyzer) isMagicNumber(l *ast.BasicLit) bool { + return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go new file mode 100644 index 000000000..ddf3a0363 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/operation.go @@ -0,0 +1,77 @@ +package checks + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + + config "github.com/tommy-muehle/go-mnd/v2/config" +) + +const OperationCheck = "operation" + +type OperationAnalyzer struct { + pass *analysis.Pass + config *config.Config +} + +func NewOperationAnalyzer(pass *analysis.Pass, config *config.Config) *OperationAnalyzer { + return &OperationAnalyzer{ + pass: pass, + config: config, + } +} + +func (a *OperationAnalyzer) NodeFilter() []ast.Node { + return []ast.Node{ + (*ast.AssignStmt)(nil), + (*ast.ParenExpr)(nil), + } +} + +func (a *OperationAnalyzer) Check(n ast.Node) { + switch expr := n.(type) { + case *ast.ParenExpr: + switch x := expr.X.(type) { + case *ast.BinaryExpr: + a.checkBinaryExpr(x) + } + case *ast.AssignStmt: + for _, y := range expr.Rhs { + switch x := y.(type) { + case *ast.BinaryExpr: + switch xExpr := x.X.(type) { + case *ast.BinaryExpr: + a.checkBinaryExpr(xExpr) + } + switch yExpr := x.Y.(type) { + case *ast.BinaryExpr: + a.checkBinaryExpr(yExpr) + } + + a.checkBinaryExpr(x) + } + } + } +} + +func (a *OperationAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { + switch x := expr.X.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, OperationCheck) + } + } + + switch y := expr.Y.(type) { + case *ast.BasicLit: + if a.isMagicNumber(y) { + a.pass.Reportf(y.Pos(), reportMsg, y.Value, OperationCheck) + } + } +} + +func (a *OperationAnalyzer) isMagicNumber(l *ast.BasicLit) bool { + return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go new file mode 100644 index 000000000..bc53940c7 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/checks/return.go @@ -0,0 +1,68 @@ +package checks + +import ( + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + + config "github.com/tommy-muehle/go-mnd/v2/config" +) + +const ReturnCheck = "return" + +type ReturnAnalyzer struct { + pass *analysis.Pass + config *config.Config +} + +func NewReturnAnalyzer(pass *analysis.Pass, config *config.Config) *ReturnAnalyzer { + return &ReturnAnalyzer{ + pass: pass, + config: config, + } +} + +func (a *ReturnAnalyzer) NodeFilter() []ast.Node { + return []ast.Node{ + (*ast.ReturnStmt)(nil), + } +} + +func (a *ReturnAnalyzer) Check(n ast.Node) { + stmt, ok := n.(*ast.ReturnStmt) + if !ok { + return + } + + for _, expr := range stmt.Results { + switch x := expr.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, ReturnCheck) + } + case *ast.BinaryExpr: + a.checkBinaryExpr(x) + } + } +} + +func (a *ReturnAnalyzer) checkBinaryExpr(expr *ast.BinaryExpr) { + switch x := expr.X.(type) { + case *ast.BasicLit: + if a.isMagicNumber(x) { + a.pass.Reportf(x.Pos(), reportMsg, x.Value, ReturnCheck) + } + } + + switch y := expr.Y.(type) { + case *ast.BasicLit: + if a.isMagicNumber(y) { + a.pass.Reportf(y.Pos(), reportMsg, y.Value, ReturnCheck) + } + } +} + +func (a *ReturnAnalyzer) isMagicNumber(l *ast.BasicLit) bool { + return (l.Kind == token.FLOAT || l.Kind == token.INT) && !a.config.IsIgnoredNumber(l.Value) +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go b/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go new file mode 100644 index 000000000..a4681e37d --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/config/config.go @@ -0,0 +1,118 @@ +package config + +import ( + "regexp" + "strings" +) + +type Config struct { + Checks map[string]bool + IgnoredNumbers map[string]struct{} + IgnoredFunctions []*regexp.Regexp + IgnoredFiles []*regexp.Regexp +} + +type Option func(config *Config) + +func DefaultConfig() *Config { + return &Config{ + Checks: map[string]bool{}, + IgnoredNumbers: map[string]struct{}{ + "0": {}, + "0.0": {}, + "1": {}, + "1.0": {}, + }, + IgnoredFiles: []*regexp.Regexp{ + regexp.MustCompile(`_test.go`), + }, + IgnoredFunctions: []*regexp.Regexp{ + regexp.MustCompile(`time.Date`), + }, + } +} + +func WithOptions(options ...Option) *Config { + c := DefaultConfig() + + for _, option := range options { + option(c) + } + + return c +} + +func WithIgnoredFunctions(excludes string) Option { + return func(config *Config) { + if excludes == "" { + return + } + + for _, exclude := range strings.Split(excludes, ",") { + config.IgnoredFunctions = append(config.IgnoredFunctions, regexp.MustCompile(exclude)) + } + } +} + +func WithIgnoredFiles(excludes string) Option { + return func(config *Config) { + if excludes == "" { + return + } + + for _, exclude := range strings.Split(excludes, ",") { + config.IgnoredFiles = append(config.IgnoredFiles, regexp.MustCompile(exclude)) + } + } +} + +func WithIgnoredNumbers(numbers string) Option { + return func(config *Config) { + if numbers == "" { + return + } + + for _, number := range strings.Split(numbers, ",") { + config.IgnoredNumbers[config.removeDigitSeparator(number)] = struct{}{} + } + } +} + +func WithCustomChecks(checks string) Option { + return func(config *Config) { + if checks == "" { + return + } + + for name, _ := range config.Checks { + config.Checks[name] = false + } + + for _, name := range strings.Split(checks, ",") { + config.Checks[name] = true + } + } +} + +func (c *Config) IsCheckEnabled(name string) bool { + return c.Checks[name] +} + +func (c *Config) IsIgnoredNumber(number string) bool { + _, ok := c.IgnoredNumbers[c.removeDigitSeparator(number)] + return ok +} + +func (c *Config) IsIgnoredFunction(f string) bool { + for _, pattern := range c.IgnoredFunctions { + if pattern.MatchString(f) { + return true + } + } + + return false +} + +func (c *Config) removeDigitSeparator(number string) string { + return strings.Replace(number, "_", "", -1) +} diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh b/vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh new file mode 100644 index 000000000..cabc2f63d --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/entrypoint.sh @@ -0,0 +1,7 @@ +#!/usr/bin/env bash + +# Expand the arguments into an array of strings. This is required because the GitHub action +# provides all arguments concatenated as a single string. +ARGS=("$@") + +/bin/go-mnd "${ARGS[*]}" diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/go.mod b/vendor/github.com/tommy-muehle/go-mnd/v2/go.mod new file mode 100644 index 000000000..8e7c18e22 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/go.mod @@ -0,0 +1,9 @@ +module github.com/tommy-muehle/go-mnd/v2 + +go 1.12 + +require ( + github.com/davecgh/go-spew v1.1.1 // indirect + github.com/stretchr/testify v1.3.0 + golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65 +) diff --git a/vendor/github.com/tommy-muehle/go-mnd/v2/go.sum b/vendor/github.com/tommy-muehle/go-mnd/v2/go.sum new file mode 100644 index 000000000..991a43759 --- /dev/null +++ b/vendor/github.com/tommy-muehle/go-mnd/v2/go.sum @@ -0,0 +1,28 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65 h1:1KSbntBked74wYsKq0jzXYy7ZwcjAUtrl7EmPE97Iiw= +golang.org/x/tools v0.0.0-20200329025819-fd4102a86c65/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/ultraware/funlen/LICENSE b/vendor/github.com/ultraware/funlen/LICENSE new file mode 100644 index 000000000..dca75556d --- /dev/null +++ b/vendor/github.com/ultraware/funlen/LICENSE @@ -0,0 +1,7 @@ +Copyright 2018 Ultraware Consultancy and Development B.V. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/ultraware/funlen/README.md b/vendor/github.com/ultraware/funlen/README.md new file mode 100644 index 000000000..aaf348521 --- /dev/null +++ b/vendor/github.com/ultraware/funlen/README.md @@ -0,0 +1,9 @@ +# Funlen linter + +Funlen is a linter that checks for long functions. It can checks both on the number of lines and the number of statements. + +The default limits are 60 lines and 40 statements. You can configure these. + +## Installation guide + +Funlen is included in [golangci-lint](https://github.com/golangci/golangci-lint/). Install it and enable funlen. diff --git a/vendor/github.com/ultraware/funlen/main.go b/vendor/github.com/ultraware/funlen/main.go new file mode 100644 index 000000000..2ba353002 --- /dev/null +++ b/vendor/github.com/ultraware/funlen/main.go @@ -0,0 +1,104 @@ +package funlen + +import ( + "fmt" + "go/ast" + "go/token" + "reflect" +) + +const ( + defaultLineLimit = 60 + defaultStmtLimit = 40 +) + +// Run runs this linter on the provided code +func Run(file *ast.File, fset *token.FileSet, lineLimit, stmtLimit int) []Message { + if lineLimit == 0 { + lineLimit = defaultLineLimit + } + if stmtLimit == 0 { + stmtLimit = defaultStmtLimit + } + + var msgs []Message + for _, f := range file.Decls { + decl, ok := f.(*ast.FuncDecl) + if !ok || decl.Body == nil { // decl.Body can be nil for e.g. cgo + continue + } + + if stmtLimit > 0 { + if stmts := parseStmts(decl.Body.List); stmts > stmtLimit { + msgs = append(msgs, makeStmtMessage(fset, decl.Name, stmts, stmtLimit)) + continue + } + } + + if lineLimit > 0 { + if lines := getLines(fset, decl); lines > lineLimit { + msgs = append(msgs, makeLineMessage(fset, decl.Name, lines, lineLimit)) + } + } + } + + return msgs +} + +// Message contains a message +type Message struct { + Pos token.Position + Message string +} + +func makeLineMessage(fset *token.FileSet, funcInfo *ast.Ident, lines, lineLimit int) Message { + return Message{ + fset.Position(funcInfo.Pos()), + fmt.Sprintf("Function '%s' is too long (%d > %d)\n", funcInfo.Name, lines, lineLimit), + } +} + +func makeStmtMessage(fset *token.FileSet, funcInfo *ast.Ident, stmts, stmtLimit int) Message { + return Message{ + fset.Position(funcInfo.Pos()), + fmt.Sprintf("Function '%s' has too many statements (%d > %d)\n", funcInfo.Name, stmts, stmtLimit), + } +} + +func getLines(fset *token.FileSet, f *ast.FuncDecl) int { // nolint: interfacer + return fset.Position(f.End()).Line - fset.Position(f.Pos()).Line - 1 +} + +func parseStmts(s []ast.Stmt) (total int) { + for _, v := range s { + total++ + switch stmt := v.(type) { + case *ast.BlockStmt: + total += parseStmts(stmt.List) - 1 + case *ast.ForStmt, *ast.RangeStmt, *ast.IfStmt, + *ast.SwitchStmt, *ast.TypeSwitchStmt, *ast.SelectStmt: + total += parseBodyListStmts(stmt) + case *ast.CaseClause: + total += parseStmts(stmt.Body) + case *ast.AssignStmt: + total += checkInlineFunc(stmt.Rhs[0]) + case *ast.GoStmt: + total += checkInlineFunc(stmt.Call.Fun) + case *ast.DeferStmt: + total += checkInlineFunc(stmt.Call.Fun) + } + } + return +} + +func checkInlineFunc(stmt ast.Expr) int { + if block, ok := stmt.(*ast.FuncLit); ok { + return parseStmts(block.Body.List) + } + return 0 +} + +func parseBodyListStmts(t interface{}) int { + i := reflect.ValueOf(t).Elem().FieldByName(`Body`).Elem().FieldByName(`List`).Interface() + return parseStmts(i.([]ast.Stmt)) +} diff --git a/vendor/github.com/ultraware/whitespace/LICENSE b/vendor/github.com/ultraware/whitespace/LICENSE new file mode 100644 index 000000000..dca75556d --- /dev/null +++ b/vendor/github.com/ultraware/whitespace/LICENSE @@ -0,0 +1,7 @@ +Copyright 2018 Ultraware Consultancy and Development B.V. + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/github.com/ultraware/whitespace/README.md b/vendor/github.com/ultraware/whitespace/README.md new file mode 100644 index 000000000..aed9a485f --- /dev/null +++ b/vendor/github.com/ultraware/whitespace/README.md @@ -0,0 +1,7 @@ +# Whitespace linter + +Whitespace is a linter that checks for unnecessary newlines at the start and end of functions, if, for, etc. + +## Installation guide + +Whitespace is included in [https://github.com/golangci/golangci-lint/](golangci-lint). Install it and enable whitespace. diff --git a/vendor/github.com/ultraware/whitespace/main.go b/vendor/github.com/ultraware/whitespace/main.go new file mode 100644 index 000000000..c36086c0e --- /dev/null +++ b/vendor/github.com/ultraware/whitespace/main.go @@ -0,0 +1,158 @@ +package whitespace + +import ( + "go/ast" + "go/token" +) + +// Message contains a message +type Message struct { + Pos token.Position + Type MessageType + Message string +} + +// MessageType describes what should happen to fix the warning +type MessageType uint8 + +// List of MessageTypes +const ( + MessageTypeLeading MessageType = iota + 1 + MessageTypeTrailing + MessageTypeAddAfter +) + +// Settings contains settings for edge-cases +type Settings struct { + MultiIf bool + MultiFunc bool +} + +// Run runs this linter on the provided code +func Run(file *ast.File, fset *token.FileSet, settings Settings) []Message { + var messages []Message + + for _, f := range file.Decls { + decl, ok := f.(*ast.FuncDecl) + if !ok || decl.Body == nil { // decl.Body can be nil for e.g. cgo + continue + } + + vis := visitor{file.Comments, fset, nil, make(map[*ast.BlockStmt]bool), settings} + ast.Walk(&vis, decl) + + messages = append(messages, vis.messages...) + } + + return messages +} + +type visitor struct { + comments []*ast.CommentGroup + fset *token.FileSet + messages []Message + wantNewline map[*ast.BlockStmt]bool + settings Settings +} + +func (v *visitor) Visit(node ast.Node) ast.Visitor { + if node == nil { + return v + } + + if stmt, ok := node.(*ast.IfStmt); ok && v.settings.MultiIf { + checkMultiLine(v, stmt.Body, stmt.Cond) + } + + if stmt, ok := node.(*ast.FuncDecl); ok && v.settings.MultiFunc { + checkMultiLine(v, stmt.Body, stmt.Type) + } + + if stmt, ok := node.(*ast.BlockStmt); ok { + wantNewline := v.wantNewline[stmt] + + comments := v.comments + if wantNewline { + comments = nil // Comments also count as a newline if we want a newline + } + first, last := firstAndLast(comments, v.fset, stmt.Pos(), stmt.End(), stmt.List) + + startMsg := checkStart(v.fset, stmt.Lbrace, first) + + if wantNewline && startMsg == nil { + v.messages = append(v.messages, Message{v.fset.Position(stmt.Pos()), MessageTypeAddAfter, `multi-line statement should be followed by a newline`}) + } else if !wantNewline && startMsg != nil { + v.messages = append(v.messages, *startMsg) + } + + if msg := checkEnd(v.fset, stmt.Rbrace, last); msg != nil { + v.messages = append(v.messages, *msg) + } + } + + return v +} + +func checkMultiLine(v *visitor, body *ast.BlockStmt, stmtStart ast.Node) { + start, end := posLine(v.fset, stmtStart.Pos()), posLine(v.fset, stmtStart.End()) + + if end > start { // Check only multi line conditions + v.wantNewline[body] = true + } +} + +func posLine(fset *token.FileSet, pos token.Pos) int { + return fset.Position(pos).Line +} + +func firstAndLast(comments []*ast.CommentGroup, fset *token.FileSet, start, end token.Pos, stmts []ast.Stmt) (ast.Node, ast.Node) { + if len(stmts) == 0 { + return nil, nil + } + + first, last := ast.Node(stmts[0]), ast.Node(stmts[len(stmts)-1]) + + for _, c := range comments { + if posLine(fset, c.Pos()) == posLine(fset, start) || posLine(fset, c.End()) == posLine(fset, end) { + continue + } + + if c.Pos() < start || c.End() > end { + continue + } + if c.Pos() < first.Pos() { + first = c + } + if c.End() > last.End() { + last = c + } + } + + return first, last +} + +func checkStart(fset *token.FileSet, start token.Pos, first ast.Node) *Message { + if first == nil { + return nil + } + + if posLine(fset, start)+1 < posLine(fset, first.Pos()) { + pos := fset.Position(start) + return &Message{pos, MessageTypeLeading, `unnecessary leading newline`} + } + + return nil +} + +func checkEnd(fset *token.FileSet, end token.Pos, last ast.Node) *Message { + if last == nil { + return nil + } + + if posLine(fset, end)-1 > posLine(fset, last.End()) { + pos := fset.Position(end) + return &Message{pos, MessageTypeTrailing, `unnecessary trailing newline`} + } + + return nil +} diff --git a/vendor/github.com/uudashr/gocognit/LICENSE b/vendor/github.com/uudashr/gocognit/LICENSE new file mode 100644 index 000000000..75d4b9c98 --- /dev/null +++ b/vendor/github.com/uudashr/gocognit/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2019 Nuruddin Ashr + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/uudashr/gocognit/README.md b/vendor/github.com/uudashr/gocognit/README.md new file mode 100644 index 000000000..4a8846907 --- /dev/null +++ b/vendor/github.com/uudashr/gocognit/README.md @@ -0,0 +1,185 @@ +[![GoDoc](https://godoc.org/github.com/uudashr/gocognit?status.svg)](https://godoc.org/github.com/uudashr/gocognit) +# Gocognit +Gocognit calculates cognitive complexities of functions in Go source code. A measurement of how hard does the code is intuitively to understand. + +## Understanding the complexity + +Given code using `if` statement, +```go +func GetWords(number int) string { + if number == 1 { // +1 + return "one" + } else if number == 2 { // +1 + return "a couple" + } else if number == 3 { // +1 + return "a few" + } else { // +1 + return "lots" + } +} // Cognitive complexity = 4 +``` + +Above code can be refactored using `switch` statement, +```go +func GetWords(number int) string { + switch number { // +1 + case 1: + return "one" + case 2: + return "a couple" + case 3: + return "a few" + default: + return "lots" + } +} // Cognitive complexity = 1 +``` + +As you see above codes are the same, but the second code are easier to understand, that is why the cognitive complexity score are lower compare to the first one. + +## Comparison with cyclometic complexity + +### Example 1 +#### Cyclometic complexity +```go +func GetWords(number int) string { // +1 + switch number { + case 1: // +1 + return "one" + case 2: // +1 + return "a couple" + case 3: // +1 + return "a few" + default: + return "lots" + } +} // Cyclomatic complexity = 4 +``` + +#### Cognitive complexity +```go +func GetWords(number int) string { + switch number { // +1 + case 1: + return "one" + case 2: + return "a couple" + case 3: + return "a few" + default: + return "lots" + } +} // Cognitive complexity = 1 +``` + +Cognitive complexity give lower score compare to cyclomatic complexity. + +### Example 2 +#### Cyclomatic complexity +```go +func SumOfPrimes(max int) int { // +1 + var total int + +OUT: + for i := 1; i < max; i++ { // +1 + for j := 2; j < i; j++ { // +1 + if i%j == 0 { // +1 + continue OUT + } + } + total += i + } + + return total +} // Cyclomatic complexity = 4 +``` + +#### Cognitive complexity +```go +func SumOfPrimes(max int) int { + var total int + +OUT: + for i := 1; i < max; i++ { // +1 + for j := 2; j < i; j++ { // +2 (nesting = 1) + if i%j == 0 { // +3 (nesting = 2) + continue OUT // +1 + } + } + total += i + } + + return total +} // Cognitive complexity = 7 +``` + +Cognitive complexity give higher score compare to cyclomatic complexity. + +## Rules + +The cognitive complexity of a function is calculated according to the +following rules: +> Note: these rules are specific for Go, please see the [original whitepaper](https://www.sonarsource.com/docs/CognitiveComplexity.pdf) for more complete reference. + +### Increments +There is an increment for each of the following: +1. `if`, `else if`, `else` +2. `switch`, `select` +3. `for` +4. `goto` LABEL, `break` LABEL, `continue` LABEL +5. sequence of binary logical operators +6. each method in a recursion cycle + +### Nesting level +The following structures increment the nesting level: +1. `if`, `else if`, `else` +2. `switch`, `select` +3. `for` +4. function literal or lambda + +### Nesting increments +The following structures receive a nesting increment commensurate with their nested depth inside nesting structures: +1. `if` +2. `switch`, `select` +3. `for` + +## Installation +``` +$ go get github.com/uudashr/gocognit/cmd/gocognit +``` + +## Usage + +``` +$ gocognit +Calculate cognitive complexities of Go functions. +Usage: + gocognit [flags] ... +Flags: + -over N show functions with complexity > N only and + return exit code 1 if the set is non-empty + -top N show the top N most complex functions only + -avg show the average complexity over all functions, + not depending on whether -over or -top are set +The output fields for each line are: + +``` + +Examples: + +``` +$ gocognit . +$ gocognit main.go +$ gocognit -top 10 src/ +$ gocognit -over 25 docker +$ gocognit -avg . +``` + +The output fields for each line are: +``` + +``` + +## Related project +- [Gocyclo](https://github.com/fzipp/gocyclo) where the code are based on. +- [Cognitive Complexity: A new way of measuring understandability](https://www.sonarsource.com/docs/CognitiveComplexity.pdf) white paper by G. Ann Campbell. \ No newline at end of file diff --git a/vendor/github.com/uudashr/gocognit/doc.go b/vendor/github.com/uudashr/gocognit/doc.go new file mode 100644 index 000000000..ae3d0a226 --- /dev/null +++ b/vendor/github.com/uudashr/gocognit/doc.go @@ -0,0 +1,2 @@ +// Package gocognit defines Analyzer other utilities to checks and calculate the complexity of function based on "cognitive complexity" methods. +package gocognit diff --git a/vendor/github.com/uudashr/gocognit/go.mod b/vendor/github.com/uudashr/gocognit/go.mod new file mode 100644 index 000000000..749f228ac --- /dev/null +++ b/vendor/github.com/uudashr/gocognit/go.mod @@ -0,0 +1,5 @@ +module github.com/uudashr/gocognit + +go 1.16 + +require golang.org/x/tools v0.1.4 diff --git a/vendor/github.com/uudashr/gocognit/go.sum b/vendor/github.com/uudashr/gocognit/go.sum new file mode 100644 index 000000000..40fff4fa9 --- /dev/null +++ b/vendor/github.com/uudashr/gocognit/go.sum @@ -0,0 +1,27 @@ +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.1.4 h1:cVngSRcfgyZCzys3KYOpCFa+4dqX/Oub9tAq00ttGVs= +golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/vendor/github.com/uudashr/gocognit/gocognit.go b/vendor/github.com/uudashr/gocognit/gocognit.go new file mode 100644 index 000000000..0687f5e2e --- /dev/null +++ b/vendor/github.com/uudashr/gocognit/gocognit.go @@ -0,0 +1,385 @@ +package gocognit + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +// Stat is statistic of the complexity. +type Stat struct { + PkgName string + FuncName string + Complexity int + Pos token.Position +} + +func (s Stat) String() string { + return fmt.Sprintf("%d %s %s %s", s.Complexity, s.PkgName, s.FuncName, s.Pos) +} + +// ComplexityStats builds the complexity statistics. +func ComplexityStats(f *ast.File, fset *token.FileSet, stats []Stat) []Stat { + for _, decl := range f.Decls { + if fn, ok := decl.(*ast.FuncDecl); ok { + stats = append(stats, Stat{ + PkgName: f.Name.Name, + FuncName: funcName(fn), + Complexity: Complexity(fn), + Pos: fset.Position(fn.Pos()), + }) + } + } + return stats +} + +// funcName returns the name representation of a function or method: +// "(Type).Name" for methods or simply "Name" for functions. +func funcName(fn *ast.FuncDecl) string { + if fn.Recv != nil { + if fn.Recv.NumFields() > 0 { + typ := fn.Recv.List[0].Type + return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) + } + } + return fn.Name.Name +} + +// recvString returns a string representation of recv of the +// form "T", "*T", or "BADRECV" (if not a proper receiver type). +func recvString(recv ast.Expr) string { + switch t := recv.(type) { + case *ast.Ident: + return t.Name + case *ast.StarExpr: + return "*" + recvString(t.X) + } + return "BADRECV" +} + +// Complexity calculates the cognitive complexity of a function. +func Complexity(fn *ast.FuncDecl) int { + v := complexityVisitor{ + name: fn.Name, + } + + ast.Walk(&v, fn) + return v.complexity +} + +type complexityVisitor struct { + name *ast.Ident + complexity int + nesting int + elseNodes map[ast.Node]bool + calculatedExprs map[ast.Expr]bool +} + +func (v *complexityVisitor) incNesting() { + v.nesting++ +} + +func (v *complexityVisitor) decNesting() { + v.nesting-- +} + +func (v *complexityVisitor) incComplexity() { + v.complexity++ +} + +func (v *complexityVisitor) nestIncComplexity() { + v.complexity += (v.nesting + 1) +} + +func (v *complexityVisitor) markAsElseNode(n ast.Node) { + if v.elseNodes == nil { + v.elseNodes = make(map[ast.Node]bool) + } + + v.elseNodes[n] = true +} + +func (v *complexityVisitor) markedAsElseNode(n ast.Node) bool { + if v.elseNodes == nil { + return false + } + + return v.elseNodes[n] +} + +func (v *complexityVisitor) markCalculated(e ast.Expr) { + if v.calculatedExprs == nil { + v.calculatedExprs = make(map[ast.Expr]bool) + } + + v.calculatedExprs[e] = true +} + +func (v *complexityVisitor) isCalculated(e ast.Expr) bool { + if v.calculatedExprs == nil { + return false + } + + return v.calculatedExprs[e] +} + +// Visit implements the ast.Visitor interface. +func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor { + switch n := n.(type) { + case *ast.IfStmt: + return v.visitIfStmt(n) + case *ast.SwitchStmt: + return v.visitSwitchStmt(n) + case *ast.TypeSwitchStmt: + return v.visitTypeSwitchStmt(n) + case *ast.SelectStmt: + return v.visitSelectStmt(n) + case *ast.ForStmt: + return v.visitForStmt(n) + case *ast.RangeStmt: + return v.visitRangeStmt(n) + case *ast.FuncLit: + return v.visitFuncLit(n) + case *ast.BranchStmt: + return v.visitBranchStmt(n) + case *ast.BinaryExpr: + return v.visitBinaryExpr(n) + case *ast.CallExpr: + return v.visitCallExpr(n) + } + return v +} + +func (v *complexityVisitor) visitIfStmt(n *ast.IfStmt) ast.Visitor { + v.incIfComplexity(n) + + if n := n.Init; n != nil { + ast.Walk(v, n) + } + + ast.Walk(v, n.Cond) + + pure := !v.markedAsElseNode(n) // pure `if` statement, not an `else if` + if pure { + v.incNesting() + ast.Walk(v, n.Body) + v.decNesting() + } else { + ast.Walk(v, n.Body) + } + + if _, ok := n.Else.(*ast.BlockStmt); ok { + v.incComplexity() + + ast.Walk(v, n.Else) + } else if _, ok := n.Else.(*ast.IfStmt); ok { + v.markAsElseNode(n.Else) + ast.Walk(v, n.Else) + } + + return nil +} + +func (v *complexityVisitor) visitSwitchStmt(n *ast.SwitchStmt) ast.Visitor { + v.nestIncComplexity() + + if n := n.Init; n != nil { + ast.Walk(v, n) + } + + if n := n.Tag; n != nil { + ast.Walk(v, n) + } + + v.incNesting() + ast.Walk(v, n.Body) + v.decNesting() + return nil +} + +func (v *complexityVisitor) visitTypeSwitchStmt(n *ast.TypeSwitchStmt) ast.Visitor { + v.nestIncComplexity() + + if n := n.Init; n != nil { + ast.Walk(v, n) + } + + if n := n.Assign; n != nil { + ast.Walk(v, n) + } + + v.incNesting() + ast.Walk(v, n.Body) + v.decNesting() + return nil +} + +func (v *complexityVisitor) visitSelectStmt(n *ast.SelectStmt) ast.Visitor { + v.nestIncComplexity() + + v.incNesting() + ast.Walk(v, n.Body) + v.decNesting() + return nil +} + +func (v *complexityVisitor) visitForStmt(n *ast.ForStmt) ast.Visitor { + v.nestIncComplexity() + + if n := n.Init; n != nil { + ast.Walk(v, n) + } + + if n := n.Cond; n != nil { + ast.Walk(v, n) + } + + if n := n.Post; n != nil { + ast.Walk(v, n) + } + + v.incNesting() + ast.Walk(v, n.Body) + v.decNesting() + return nil +} + +func (v *complexityVisitor) visitRangeStmt(n *ast.RangeStmt) ast.Visitor { + v.nestIncComplexity() + + if n := n.Key; n != nil { + ast.Walk(v, n) + } + + if n := n.Value; n != nil { + ast.Walk(v, n) + } + + ast.Walk(v, n.X) + + v.incNesting() + ast.Walk(v, n.Body) + v.decNesting() + return nil +} + +func (v *complexityVisitor) visitFuncLit(n *ast.FuncLit) ast.Visitor { + ast.Walk(v, n.Type) + + v.incNesting() + ast.Walk(v, n.Body) + v.decNesting() + return nil +} + +func (v *complexityVisitor) visitBranchStmt(n *ast.BranchStmt) ast.Visitor { + if n.Label != nil { + v.incComplexity() + } + return v +} + +func (v *complexityVisitor) visitBinaryExpr(n *ast.BinaryExpr) ast.Visitor { + if (n.Op == token.LAND || n.Op == token.LOR) && !v.isCalculated(n) { + ops := v.collectBinaryOps(n) + + var lastOp token.Token + for _, op := range ops { + if lastOp != op { + v.incComplexity() + lastOp = op + } + } + } + return v +} + +func (v *complexityVisitor) visitCallExpr(n *ast.CallExpr) ast.Visitor { + if callIdent, ok := n.Fun.(*ast.Ident); ok { + obj, name := callIdent.Obj, callIdent.Name + if obj == v.name.Obj && name == v.name.Name { + // called by same function directly (direct recursion) + v.incComplexity() + } + } + return v +} + +func (v *complexityVisitor) collectBinaryOps(exp ast.Expr) []token.Token { + v.markCalculated(exp) + switch exp := exp.(type) { + case *ast.BinaryExpr: + return mergeBinaryOps(v.collectBinaryOps(exp.X), exp.Op, v.collectBinaryOps(exp.Y)) + case *ast.ParenExpr: + // interest only on what inside paranthese + return v.collectBinaryOps(exp.X) + default: + return []token.Token{} + } +} + +func (v *complexityVisitor) incIfComplexity(n *ast.IfStmt) { + if v.markedAsElseNode(n) { + v.incComplexity() + } else { + v.nestIncComplexity() + } +} + +func mergeBinaryOps(x []token.Token, op token.Token, y []token.Token) []token.Token { + var out []token.Token + if len(x) != 0 { + out = append(out, x...) + } + out = append(out, op) + if len(y) != 0 { + out = append(out, y...) + } + return out +} + +const Doc = `Find complex function using cognitive complexity calculation. + +The gocognit analysis repots functions or methods which the complexity is over +than the specified limit.` + +// Analyzer reports a diagnostic for every function or method which is +// too complex specified by its -over flag. +var Analyzer = &analysis.Analyzer{ + Name: "gocognit", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +var ( + over int // -over flag +) + +func init() { + Analyzer.Flags.IntVar(&over, "over", over, "show functions with complexity > N only") +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + fnDecl := n.(*ast.FuncDecl) + + fnName := funcName(fnDecl) + fnComplexity := Complexity(fnDecl) + + if fnComplexity > over { + pass.Reportf(fnDecl.Pos(), "cognitive complexity %d of func %s is high (> %d)", fnComplexity, fnName, over) + } + }) + + return nil, nil +} diff --git a/vendor/github.com/yeya24/promlinter/.gitignore b/vendor/github.com/yeya24/promlinter/.gitignore new file mode 100644 index 000000000..bffb9a029 --- /dev/null +++ b/vendor/github.com/yeya24/promlinter/.gitignore @@ -0,0 +1,20 @@ +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# binary +bin + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +.idea diff --git a/vendor/github.com/yeya24/promlinter/LICENSE b/vendor/github.com/yeya24/promlinter/LICENSE new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/vendor/github.com/yeya24/promlinter/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/yeya24/promlinter/Makefile b/vendor/github.com/yeya24/promlinter/Makefile new file mode 100644 index 000000000..d50db1ee7 --- /dev/null +++ b/vendor/github.com/yeya24/promlinter/Makefile @@ -0,0 +1,39 @@ +GOOS := $(if $(GOOS),$(GOOS),linux) +GOARCH := $(if $(GOARCH),$(GOARCH),amd64) +GO=CGO_ENABLED=0 GOOS=$(GOOS) GOARCH=$(GOARCH) GO111MODULE=on go +GOVERSION = $(shell $(GO) version | cut -c 14- | cut -d' ' -f1) + +GIT_COMMIT = $(shell git rev-parse --short HEAD) + +BUILDFLAGS ?= + +ifeq ($(shell expr ${GOVERSION} \>= 1.14), 1) + BUILDFLAGS += -mod=mod +endif + +PACKAGE_LIST := go list ./... +PACKAGES := $$($(PACKAGE_LIST)) +FILES_TO_FMT := $(shell find . -path -prune -o -name '*.go' -print) + +all: format build test + +format: vet fmt + +fmt: + @echo "gofmt" + @gofmt -w ${FILES_TO_FMT} + @git diff --exit-code . + +build: mod + $(GO) build ${BUILDFLAGS} -o ./bin/promlinter cmd/promlinter/main.go + +vet: + $(GO) vet ${BUILDFLAGS} ./... + +mod: + @echo "go mod tidy" + $(GO) mod tidy + @git diff --exit-code -- go.mod + +test: + $(GO) test ${BUILDFLAGS} ./... -cover $(PACKAGES) diff --git a/vendor/github.com/yeya24/promlinter/README.md b/vendor/github.com/yeya24/promlinter/README.md new file mode 100644 index 000000000..c7e664103 --- /dev/null +++ b/vendor/github.com/yeya24/promlinter/README.md @@ -0,0 +1,74 @@ +# promlinter + +A linter for checking Prometheus metrics name via promlint. + +![example](assets/example.png) + +## Installation + +### Build from source + +#### Requirements + +- Go >= 1.13 +- make + +``` bash +git clone https://github.com/yeya24/promlinter.git +make build +``` + +Then you can find the `promlinter` binary file in the `./bin` directory. + +### Download from release + +TBD + +## Usage + +``` bash +promlinter -h + +usage: promlinter [] [...] + +Prometheus metrics linter for Go code. + +This tool can cover most of the patterns of metrics naming issues, but it cannot detect metric values that can only be determined in the runtime. + +By default it doesn't output parsing failures, if you want to see them, you can add --strict flag to enable it. + +It is also supported to disable the lint functions using repeated flag --disable. Current supported functions are: + + [Help]: Help detects issues related to the help text for a metric. + + [MetricUnits]: MetricUnits detects issues with metric unit names. + + [Counter]: Counter detects issues specific to counters, as well as patterns that should only be used with counters. + + [HistogramSummaryReserved]: HistogramSummaryReserved detects when other types of metrics use names or labels reserved for use by histograms and/or summaries. + + [MetricTypeInName]: MetricTypeInName detects when metric types are included in the metric name. + + [ReservedChars]: ReservedChars detects colons in metric names. + + [CamelCase]: CamelCase detects metric names and label names written in camelCase. + + [UnitAbbreviations]: UnitAbbreviations detects abbreviated units in the metric name. + +Flags: + -h, --help Show context-sensitive help (also try --help-long and --help-man). + --version Show application version. + -s, --strict Strict mode. If true, linter will output more issues including parsing failures. + -d, --disable=DISABLE ... Disable lint functions (repeated).Supported options: Help, Counter, MetricUnits, HistogramSummaryReserved, MetricTypeInName, + ReservedChars, CamelCase, UnitAbbreviations + +Args: + [] Files to lint. + +``` + +## Run tests + +``` bash +make test +``` diff --git a/vendor/github.com/yeya24/promlinter/go.mod b/vendor/github.com/yeya24/promlinter/go.mod new file mode 100644 index 000000000..941cabdda --- /dev/null +++ b/vendor/github.com/yeya24/promlinter/go.mod @@ -0,0 +1,9 @@ +module github.com/yeya24/promlinter + +go 1.14 + +require ( + github.com/prometheus/client_golang v1.7.1 + github.com/prometheus/client_model v0.2.0 + gopkg.in/alecthomas/kingpin.v2 v2.2.6 +) diff --git a/vendor/github.com/yeya24/promlinter/go.sum b/vendor/github.com/yeya24/promlinter/go.sum new file mode 100644 index 000000000..303d57dc3 --- /dev/null +++ b/vendor/github.com/yeya24/promlinter/go.sum @@ -0,0 +1,114 @@ +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4 h1:Hs82Z41s6SdL1CELW+XaDYmOH4hkBN4/N9og/AsOv7E= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= +github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= +github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= +github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= +github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= +github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0= +github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1 h1:NTGy1Ja9pByO+xAeH/qiWnLrKtr3hJPNjaVUwnjpdpA= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0 h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0 h1:RyRA7RzGXQZiW+tGMr7sxa85G1z0yOpM1qq5c8lNawc= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3 h1:F0+tqvhOksq22sc6iCHF5WGlWjdwj92p0udFh1VFBS8= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1 h1:ogLJMz+qpzav7lGMh10LMvAkM/fAoGlaiiHYiFYdm80= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= +google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= +google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= +google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= +google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.23.0 h1:4MY060fB1DLGMB/7MBTLnwQUY6+F09GEiz6SsrNqyzM= +google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5 h1:ymVxjfMaHvXD8RqPRmzHHsB3VvucivSkIAvJFDI5O3c= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/vendor/github.com/yeya24/promlinter/promlinter.go b/vendor/github.com/yeya24/promlinter/promlinter.go new file mode 100644 index 000000000..898336a6f --- /dev/null +++ b/vendor/github.com/yeya24/promlinter/promlinter.go @@ -0,0 +1,664 @@ +package promlinter + +import ( + "fmt" + "go/ast" + "go/token" + "sort" + "strconv" + "strings" + + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/testutil/promlint" + dto "github.com/prometheus/client_model/go" +) + +var ( + metricsType map[string]dto.MetricType + constMetricArgNum map[string]int + validOptsFields map[string]bool + lintFuncText map[string][]string + LintFuncNames []string +) + +func init() { + metricsType = map[string]dto.MetricType{ + "Counter": dto.MetricType_COUNTER, + "NewCounter": dto.MetricType_COUNTER, + "NewCounterVec": dto.MetricType_COUNTER, + "Gauge": dto.MetricType_GAUGE, + "NewGauge": dto.MetricType_GAUGE, + "NewGaugeVec": dto.MetricType_GAUGE, + "NewHistogram": dto.MetricType_HISTOGRAM, + "NewHistogramVec": dto.MetricType_HISTOGRAM, + "NewSummary": dto.MetricType_SUMMARY, + "NewSummaryVec": dto.MetricType_SUMMARY, + } + + constMetricArgNum = map[string]int{ + "MustNewConstMetric": 3, + "MustNewHistogram": 4, + "MustNewSummary": 4, + "NewLazyConstMetric": 3, + } + + // Doesn't contain ConstLabels since we don't need this field here. + validOptsFields = map[string]bool{ + "Name": true, + "Namespace": true, + "Subsystem": true, + "Help": true, + } + + lintFuncText = map[string][]string{ + "Help": {"no help text"}, + "MetricUnits": {"use base unit"}, + "Counter": {"counter metrics should"}, + "HistogramSummaryReserved": {"non-histogram", "non-summary"}, + "MetricTypeInName": {"metric name should not include type"}, + "ReservedChars": {"metric names should not contain ':'"}, + "CamelCase": {"'snake_case' not 'camelCase'"}, + "lintUnitAbbreviations": {"metric names should not contain abbreviated units"}, + } + + LintFuncNames = []string{"Help", "MetricUnits", "Counter", "HistogramSummaryReserved", + "MetricTypeInName", "ReservedChars", "CamelCase", "lintUnitAbbreviations"} +} + +type Setting struct { + Strict bool + DisabledLintFuncs []string +} + +// Issue contains metric name, error text and metric position. +type Issue struct { + Text string + Metric string + Pos token.Position +} + +type MetricFamilyWithPos struct { + MetricFamily *dto.MetricFamily + Pos token.Position +} + +type visitor struct { + fs *token.FileSet + metrics []MetricFamilyWithPos + issues []Issue + strict bool +} + +type opt struct { + namespace string + subsystem string + name string +} + +func RunList(fs *token.FileSet, files []*ast.File, strict bool) []MetricFamilyWithPos { + v := &visitor{ + fs: fs, + metrics: make([]MetricFamilyWithPos, 0), + issues: make([]Issue, 0), + strict: strict, + } + + for _, file := range files { + ast.Walk(v, file) + } + + sort.Slice(v.metrics, func(i, j int) bool { + return v.metrics[i].Pos.String() < v.metrics[j].Pos.String() + }) + return v.metrics +} + +func RunLint(fs *token.FileSet, files []*ast.File, s Setting) []Issue { + v := &visitor{ + fs: fs, + metrics: make([]MetricFamilyWithPos, 0), + issues: make([]Issue, 0), + strict: s.Strict, + } + + for _, file := range files { + ast.Walk(v, file) + } + + // lint metrics + for _, mfp := range v.metrics { + problems, err := promlint.NewWithMetricFamilies([]*dto.MetricFamily{mfp.MetricFamily}).Lint() + if err != nil { + panic(err) + } + + for _, p := range problems { + for _, disabledFunc := range s.DisabledLintFuncs { + for _, pattern := range lintFuncText[disabledFunc] { + if strings.Contains(p.Text, pattern) { + goto END + } + } + } + + v.issues = append(v.issues, Issue{ + Pos: mfp.Pos, + Metric: p.Metric, + Text: p.Text, + }) + + END: + } + } + + sort.Slice(v.issues, func(i, j int) bool { + return v.issues[i].Pos.String() < v.issues[j].Pos.String() + }) + return v.issues +} + +func (v *visitor) Visit(n ast.Node) ast.Visitor { + if n == nil { + return v + } + + switch t := n.(type) { + case *ast.CallExpr: + return v.parseCallerExpr(t) + + case *ast.SendStmt: + return v.parseSendMetricChanExpr(t) + } + + return v +} + +func (v *visitor) parseCallerExpr(call *ast.CallExpr) ast.Visitor { + var ( + metricType dto.MetricType + methodName string + ok bool + ) + + switch stmt := call.Fun.(type) { + + /* + That's the case of setting alias . to client_golang/prometheus or promauto package. + + import . "github.com/prometheus/client_golang/prometheus" + metric := NewCounter(CounterOpts{}) + */ + case *ast.Ident: + if stmt.Name == "NewCounterFunc" { + return v.parseOpts(call.Args[0], dto.MetricType_COUNTER) + } + + if stmt.Name == "NewGaugeFunc" { + return v.parseOpts(call.Args[0], dto.MetricType_GAUGE) + } + + if metricType, ok = metricsType[stmt.Name]; !ok { + return v + } + methodName = stmt.Name + + /* + This case covers the most of cases to initialize metrics. + + prometheus.NewCounter(CounterOpts{}) + + promauto.With(nil).NewCounter(CounterOpts{}) + + factory := promauto.With(nil) + factory.NewCounter(CounterOpts{}) + + prometheus.NewCounterFunc() + */ + case *ast.SelectorExpr: + if stmt.Sel.Name == "NewCounterFunc" { + return v.parseOpts(call.Args[0], dto.MetricType_COUNTER) + } + + if stmt.Sel.Name == "NewGaugeFunc" { + return v.parseOpts(call.Args[0], dto.MetricType_GAUGE) + } + + if stmt.Sel.Name == "NewFamilyGenerator" && len(call.Args) == 5 { + return v.parseKSMMetrics(call.Args[0], call.Args[1], call.Args[2]) + } + + if metricType, ok = metricsType[stmt.Sel.Name]; !ok { + return v + } + methodName = stmt.Sel.Name + + default: + return v + } + + argNum := 1 + if strings.HasSuffix(methodName, "Vec") { + argNum = 2 + } + // The methods used to initialize metrics should have at least one arg. + if len(call.Args) < argNum && v.strict { + v.issues = append(v.issues, Issue{ + Pos: v.fs.Position(call.Pos()), + Metric: "", + Text: fmt.Sprintf("%s should have at least %d arguments", methodName, argNum), + }) + return v + } + + return v.parseOpts(call.Args[0], metricType) +} + +func (v *visitor) parseOpts(optArg ast.Node, metricType dto.MetricType) ast.Visitor { + // position for the first arg of the CallExpr + optsPosition := v.fs.Position(optArg.Pos()) + opts, help := v.parseOptsExpr(optArg) + if opts == nil { + return v + } + currentMetric := dto.MetricFamily{ + Type: &metricType, + Help: help, + } + + metricName := prometheus.BuildFQName(opts.namespace, opts.subsystem, opts.name) + // We skip the invalid metric if the name is an empty string. + // This kind of metric declaration might be used as a stud metric + // https://github.com/thanos-io/thanos/blob/main/cmd/thanos/tools_bucket.go#L538. + if metricName == "" { + return v + } + currentMetric.Name = &metricName + + v.metrics = append(v.metrics, MetricFamilyWithPos{MetricFamily: ¤tMetric, Pos: optsPosition}) + return v +} + +// Parser for kube-state-metrics generators. +func (v *visitor) parseKSMMetrics(nameArg ast.Node, helpArg ast.Node, metricTypeArg ast.Node) ast.Visitor { + optsPosition := v.fs.Position(nameArg.Pos()) + currentMetric := dto.MetricFamily{} + name, ok := v.parseValue("name", nameArg) + if !ok { + return v + } + currentMetric.Name = &name + + help, ok := v.parseValue("help", helpArg) + if !ok { + return v + } + currentMetric.Help = &help + + switch stmt := metricTypeArg.(type) { + case *ast.SelectorExpr: + if metricType, ok := metricsType[stmt.Sel.Name]; !ok { + return v + } else { + currentMetric.Type = &metricType + } + } + + v.metrics = append(v.metrics, MetricFamilyWithPos{MetricFamily: ¤tMetric, Pos: optsPosition}) + return v +} + +func (v *visitor) parseSendMetricChanExpr(chExpr *ast.SendStmt) ast.Visitor { + var ( + ok bool + requiredArgNum int + methodName string + metricType dto.MetricType + ) + + call, ok := chExpr.Value.(*ast.CallExpr) + if !ok { + return v + } + + switch stmt := call.Fun.(type) { + case *ast.Ident: + if requiredArgNum, ok = constMetricArgNum[stmt.Name]; !ok { + return v + } + methodName = stmt.Name + + case *ast.SelectorExpr: + if requiredArgNum, ok = constMetricArgNum[stmt.Sel.Name]; !ok { + return v + } + methodName = stmt.Sel.Name + } + + if len(call.Args) < requiredArgNum && v.strict { + v.issues = append(v.issues, Issue{ + Metric: "", + Pos: v.fs.Position(call.Pos()), + Text: fmt.Sprintf("%s should have at least %d arguments", methodName, requiredArgNum), + }) + return v + } + + name, help := v.parseConstMetricOptsExpr(call.Args[0]) + if name == nil { + return v + } + + metric := &dto.MetricFamily{ + Name: name, + Help: help, + } + switch methodName { + case "MustNewConstMetric", "NewLazyConstMetric": + switch t := call.Args[1].(type) { + case *ast.Ident: + metric.Type = getConstMetricType(t.Name) + case *ast.SelectorExpr: + metric.Type = getConstMetricType(t.Sel.Name) + } + + case "MustNewHistogram": + metricType = dto.MetricType_HISTOGRAM + metric.Type = &metricType + case "MustNewSummary": + metricType = dto.MetricType_SUMMARY + metric.Type = &metricType + } + + v.metrics = append(v.metrics, MetricFamilyWithPos{MetricFamily: metric, Pos: v.fs.Position(call.Pos())}) + return v +} + +func (v *visitor) parseOptsExpr(n ast.Node) (*opt, *string) { + switch stmt := n.(type) { + case *ast.CompositeLit: + return v.parseCompositeOpts(stmt) + + case *ast.Ident: + if stmt.Obj != nil { + if decl, ok := stmt.Obj.Decl.(*ast.AssignStmt); ok && len(decl.Rhs) > 0 { + if t, ok := decl.Rhs[0].(*ast.CompositeLit); ok { + return v.parseCompositeOpts(t) + } + } + } + + case *ast.UnaryExpr: + return v.parseOptsExpr(stmt.X) + } + + return nil, nil +} + +func (v *visitor) parseCompositeOpts(stmt *ast.CompositeLit) (*opt, *string) { + metricOption := &opt{} + var help *string + for _, elt := range stmt.Elts { + kvExpr, ok := elt.(*ast.KeyValueExpr) + if !ok { + continue + } + object, ok := kvExpr.Key.(*ast.Ident) + if !ok { + continue + } + + if _, ok := validOptsFields[object.Name]; !ok { + continue + } + + // If failed to parse field value, stop parsing. + stringLiteral, ok := v.parseValue(object.Name, kvExpr.Value) + if !ok { + return nil, nil + } + + switch object.Name { + case "Namespace": + metricOption.namespace = stringLiteral + case "Subsystem": + metricOption.subsystem = stringLiteral + case "Name": + metricOption.name = stringLiteral + case "Help": + help = &stringLiteral + } + } + + return metricOption, help +} + +func (v *visitor) parseValue(object string, n ast.Node) (string, bool) { + switch t := n.(type) { + + // make sure it is string literal value + case *ast.BasicLit: + if t.Kind == token.STRING { + return mustUnquote(t.Value), true + } + + return "", false + + case *ast.Ident: + if t.Obj == nil { + return "", false + } + + if vs, ok := t.Obj.Decl.(*ast.ValueSpec); ok { + return v.parseValue(object, vs) + } + + case *ast.ValueSpec: + if len(t.Values) == 0 { + return "", false + } + return v.parseValue(object, t.Values[0]) + + // For binary expr, we only support adding two strings like `foo` + `bar`. + case *ast.BinaryExpr: + if t.Op == token.ADD { + x, ok := v.parseValue(object, t.X) + if !ok { + return "", false + } + + y, ok := v.parseValue(object, t.Y) + if !ok { + return "", false + } + + return x + y, true + } + + // We can only cover some basic cases here + case *ast.CallExpr: + return v.parseValueCallExpr(object, t) + + default: + if v.strict { + v.issues = append(v.issues, Issue{ + Pos: v.fs.Position(n.Pos()), + Metric: "", + Text: fmt.Sprintf("parsing %s with type %T is not supported", object, t), + }) + } + } + + return "", false +} + +func (v *visitor) parseValueCallExpr(object string, call *ast.CallExpr) (string, bool) { + var ( + methodName string + namespace string + subsystem string + name string + ok bool + ) + switch expr := call.Fun.(type) { + case *ast.SelectorExpr: + methodName = expr.Sel.Name + case *ast.Ident: + methodName = expr.Name + default: + return "", false + } + + if methodName == "BuildFQName" && len(call.Args) == 3 { + namespace, ok = v.parseValue("namespace", call.Args[0]) + if !ok { + return "", false + } + subsystem, ok = v.parseValue("subsystem", call.Args[1]) + if !ok { + return "", false + } + name, ok = v.parseValue("name", call.Args[2]) + if !ok { + return "", false + } + return prometheus.BuildFQName(namespace, subsystem, name), true + } + + if v.strict { + v.issues = append(v.issues, Issue{ + Metric: "", + Pos: v.fs.Position(call.Pos()), + Text: fmt.Sprintf("parsing %s with function %s is not supported", object, methodName), + }) + } + + return "", false +} + +func (v *visitor) parseConstMetricOptsExpr(n ast.Node) (*string, *string) { + switch stmt := n.(type) { + case *ast.CallExpr: + return v.parseNewDescCallExpr(stmt) + + case *ast.Ident: + if stmt.Obj != nil { + switch t := stmt.Obj.Decl.(type) { + case *ast.AssignStmt: + if len(t.Rhs) > 0 { + if call, ok := t.Rhs[0].(*ast.CallExpr); ok { + return v.parseNewDescCallExpr(call) + } + } + case *ast.ValueSpec: + if len(t.Values) > 0 { + if call, ok := t.Values[0].(*ast.CallExpr); ok { + return v.parseNewDescCallExpr(call) + } + } + } + + if v.strict { + v.issues = append(v.issues, Issue{ + Pos: v.fs.Position(stmt.Pos()), + Metric: "", + Text: fmt.Sprintf("parsing desc of type %T is not supported", stmt.Obj.Decl), + }) + } + } + + default: + if v.strict { + v.issues = append(v.issues, Issue{ + Pos: v.fs.Position(stmt.Pos()), + Metric: "", + Text: fmt.Sprintf("parsing desc of type %T is not supported", stmt), + }) + } + } + + return nil, nil +} + +func (v *visitor) parseNewDescCallExpr(call *ast.CallExpr) (*string, *string) { + var ( + help string + name string + ok bool + ) + + switch expr := call.Fun.(type) { + case *ast.Ident: + if expr.Name != "NewDesc" { + if v.strict { + v.issues = append(v.issues, Issue{ + Pos: v.fs.Position(expr.Pos()), + Metric: "", + Text: fmt.Sprintf("parsing desc with function %s is not supported", expr.Name), + }) + } + return nil, nil + } + case *ast.SelectorExpr: + if expr.Sel.Name != "NewDesc" { + if v.strict { + v.issues = append(v.issues, Issue{ + Pos: v.fs.Position(expr.Sel.Pos()), + Metric: "", + Text: fmt.Sprintf("parsing desc with function %s is not supported", expr.Sel.Name), + }) + } + return nil, nil + } + default: + if v.strict { + v.issues = append(v.issues, Issue{ + Pos: v.fs.Position(expr.Pos()), + Metric: "", + Text: fmt.Sprintf("parsing desc of %T is not supported", expr), + }) + } + return nil, nil + } + + // k8s.io/component-base/metrics.NewDesc has 6 args + // while prometheus.NewDesc has 4 args + if len(call.Args) < 4 && v.strict { + v.issues = append(v.issues, Issue{ + Metric: "", + Pos: v.fs.Position(call.Pos()), + Text: "NewDesc should have at least 4 args", + }) + return nil, nil + } + + name, ok = v.parseValue("fqName", call.Args[0]) + if !ok { + return nil, nil + } + help, ok = v.parseValue("help", call.Args[1]) + if !ok { + return nil, nil + } + + return &name, &help +} + +func mustUnquote(str string) string { + stringLiteral, err := strconv.Unquote(str) + if err != nil { + panic(err) + } + + return stringLiteral +} + +func getConstMetricType(name string) *dto.MetricType { + metricType := dto.MetricType_UNTYPED + if name == "CounterValue" { + metricType = dto.MetricType_COUNTER + } else if name == "GaugeValue" { + metricType = dto.MetricType_GAUGE + } + + return &metricType +} diff --git a/vendor/golang.org/x/mod/LICENSE b/vendor/golang.org/x/mod/LICENSE new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/vendor/golang.org/x/mod/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/mod/PATENTS b/vendor/golang.org/x/mod/PATENTS new file mode 100644 index 000000000..733099041 --- /dev/null +++ b/vendor/golang.org/x/mod/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go b/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go new file mode 100644 index 000000000..2681af35a --- /dev/null +++ b/vendor/golang.org/x/mod/internal/lazyregexp/lazyre.go @@ -0,0 +1,78 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package lazyregexp is a thin wrapper over regexp, allowing the use of global +// regexp variables without forcing them to be compiled at init. +package lazyregexp + +import ( + "os" + "regexp" + "strings" + "sync" +) + +// Regexp is a wrapper around regexp.Regexp, where the underlying regexp will be +// compiled the first time it is needed. +type Regexp struct { + str string + once sync.Once + rx *regexp.Regexp +} + +func (r *Regexp) re() *regexp.Regexp { + r.once.Do(r.build) + return r.rx +} + +func (r *Regexp) build() { + r.rx = regexp.MustCompile(r.str) + r.str = "" +} + +func (r *Regexp) FindSubmatch(s []byte) [][]byte { + return r.re().FindSubmatch(s) +} + +func (r *Regexp) FindStringSubmatch(s string) []string { + return r.re().FindStringSubmatch(s) +} + +func (r *Regexp) FindStringSubmatchIndex(s string) []int { + return r.re().FindStringSubmatchIndex(s) +} + +func (r *Regexp) ReplaceAllString(src, repl string) string { + return r.re().ReplaceAllString(src, repl) +} + +func (r *Regexp) FindString(s string) string { + return r.re().FindString(s) +} + +func (r *Regexp) FindAllString(s string, n int) []string { + return r.re().FindAllString(s, n) +} + +func (r *Regexp) MatchString(s string) bool { + return r.re().MatchString(s) +} + +func (r *Regexp) SubexpNames() []string { + return r.re().SubexpNames() +} + +var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test") + +// New creates a new lazy regexp, delaying the compiling work until it is first +// needed. If the code is being run as part of tests, the regexp compiling will +// happen immediately. +func New(str string) *Regexp { + lr := &Regexp{str: str} + if inTest { + // In tests, always compile the regexps early. + lr.re() + } + return lr +} diff --git a/vendor/golang.org/x/mod/modfile/print.go b/vendor/golang.org/x/mod/modfile/print.go new file mode 100644 index 000000000..524f93022 --- /dev/null +++ b/vendor/golang.org/x/mod/modfile/print.go @@ -0,0 +1,174 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Module file printer. + +package modfile + +import ( + "bytes" + "fmt" + "strings" +) + +// Format returns a go.mod file as a byte slice, formatted in standard style. +func Format(f *FileSyntax) []byte { + pr := &printer{} + pr.file(f) + return pr.Bytes() +} + +// A printer collects the state during printing of a file or expression. +type printer struct { + bytes.Buffer // output buffer + comment []Comment // pending end-of-line comments + margin int // left margin (indent), a number of tabs +} + +// printf prints to the buffer. +func (p *printer) printf(format string, args ...interface{}) { + fmt.Fprintf(p, format, args...) +} + +// indent returns the position on the current line, in bytes, 0-indexed. +func (p *printer) indent() int { + b := p.Bytes() + n := 0 + for n < len(b) && b[len(b)-1-n] != '\n' { + n++ + } + return n +} + +// newline ends the current line, flushing end-of-line comments. +func (p *printer) newline() { + if len(p.comment) > 0 { + p.printf(" ") + for i, com := range p.comment { + if i > 0 { + p.trim() + p.printf("\n") + for i := 0; i < p.margin; i++ { + p.printf("\t") + } + } + p.printf("%s", strings.TrimSpace(com.Token)) + } + p.comment = p.comment[:0] + } + + p.trim() + p.printf("\n") + for i := 0; i < p.margin; i++ { + p.printf("\t") + } +} + +// trim removes trailing spaces and tabs from the current line. +func (p *printer) trim() { + // Remove trailing spaces and tabs from line we're about to end. + b := p.Bytes() + n := len(b) + for n > 0 && (b[n-1] == '\t' || b[n-1] == ' ') { + n-- + } + p.Truncate(n) +} + +// file formats the given file into the print buffer. +func (p *printer) file(f *FileSyntax) { + for _, com := range f.Before { + p.printf("%s", strings.TrimSpace(com.Token)) + p.newline() + } + + for i, stmt := range f.Stmt { + switch x := stmt.(type) { + case *CommentBlock: + // comments already handled + p.expr(x) + + default: + p.expr(x) + p.newline() + } + + for _, com := range stmt.Comment().After { + p.printf("%s", strings.TrimSpace(com.Token)) + p.newline() + } + + if i+1 < len(f.Stmt) { + p.newline() + } + } +} + +func (p *printer) expr(x Expr) { + // Emit line-comments preceding this expression. + if before := x.Comment().Before; len(before) > 0 { + // Want to print a line comment. + // Line comments must be at the current margin. + p.trim() + if p.indent() > 0 { + // There's other text on the line. Start a new line. + p.printf("\n") + } + // Re-indent to margin. + for i := 0; i < p.margin; i++ { + p.printf("\t") + } + for _, com := range before { + p.printf("%s", strings.TrimSpace(com.Token)) + p.newline() + } + } + + switch x := x.(type) { + default: + panic(fmt.Errorf("printer: unexpected type %T", x)) + + case *CommentBlock: + // done + + case *LParen: + p.printf("(") + case *RParen: + p.printf(")") + + case *Line: + p.tokens(x.Token) + + case *LineBlock: + p.tokens(x.Token) + p.printf(" ") + p.expr(&x.LParen) + p.margin++ + for _, l := range x.Line { + p.newline() + p.expr(l) + } + p.margin-- + p.newline() + p.expr(&x.RParen) + } + + // Queue end-of-line comments for printing when we + // reach the end of the line. + p.comment = append(p.comment, x.Comment().Suffix...) +} + +func (p *printer) tokens(tokens []string) { + sep := "" + for _, t := range tokens { + if t == "," || t == ")" || t == "]" || t == "}" { + sep = "" + } + p.printf("%s%s", sep, t) + sep = " " + if t == "(" || t == "[" || t == "{" { + sep = "" + } + } +} diff --git a/vendor/golang.org/x/mod/modfile/read.go b/vendor/golang.org/x/mod/modfile/read.go new file mode 100644 index 000000000..2a961ca81 --- /dev/null +++ b/vendor/golang.org/x/mod/modfile/read.go @@ -0,0 +1,956 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package modfile + +import ( + "bytes" + "errors" + "fmt" + "os" + "strconv" + "strings" + "unicode" + "unicode/utf8" +) + +// A Position describes an arbitrary source position in a file, including the +// file, line, column, and byte offset. +type Position struct { + Line int // line in input (starting at 1) + LineRune int // rune in line (starting at 1) + Byte int // byte in input (starting at 0) +} + +// add returns the position at the end of s, assuming it starts at p. +func (p Position) add(s string) Position { + p.Byte += len(s) + if n := strings.Count(s, "\n"); n > 0 { + p.Line += n + s = s[strings.LastIndex(s, "\n")+1:] + p.LineRune = 1 + } + p.LineRune += utf8.RuneCountInString(s) + return p +} + +// An Expr represents an input element. +type Expr interface { + // Span returns the start and end position of the expression, + // excluding leading or trailing comments. + Span() (start, end Position) + + // Comment returns the comments attached to the expression. + // This method would normally be named 'Comments' but that + // would interfere with embedding a type of the same name. + Comment() *Comments +} + +// A Comment represents a single // comment. +type Comment struct { + Start Position + Token string // without trailing newline + Suffix bool // an end of line (not whole line) comment +} + +// Comments collects the comments associated with an expression. +type Comments struct { + Before []Comment // whole-line comments before this expression + Suffix []Comment // end-of-line comments after this expression + + // For top-level expressions only, After lists whole-line + // comments following the expression. + After []Comment +} + +// Comment returns the receiver. This isn't useful by itself, but +// a Comments struct is embedded into all the expression +// implementation types, and this gives each of those a Comment +// method to satisfy the Expr interface. +func (c *Comments) Comment() *Comments { + return c +} + +// A FileSyntax represents an entire go.mod file. +type FileSyntax struct { + Name string // file path + Comments + Stmt []Expr +} + +func (x *FileSyntax) Span() (start, end Position) { + if len(x.Stmt) == 0 { + return + } + start, _ = x.Stmt[0].Span() + _, end = x.Stmt[len(x.Stmt)-1].Span() + return start, end +} + +// addLine adds a line containing the given tokens to the file. +// +// If the first token of the hint matches the first token of the +// line, the new line is added at the end of the block containing hint, +// extracting hint into a new block if it is not yet in one. +// +// If the hint is non-nil buts its first token does not match, +// the new line is added after the block containing hint +// (or hint itself, if not in a block). +// +// If no hint is provided, addLine appends the line to the end of +// the last block with a matching first token, +// or to the end of the file if no such block exists. +func (x *FileSyntax) addLine(hint Expr, tokens ...string) *Line { + if hint == nil { + // If no hint given, add to the last statement of the given type. + Loop: + for i := len(x.Stmt) - 1; i >= 0; i-- { + stmt := x.Stmt[i] + switch stmt := stmt.(type) { + case *Line: + if stmt.Token != nil && stmt.Token[0] == tokens[0] { + hint = stmt + break Loop + } + case *LineBlock: + if stmt.Token[0] == tokens[0] { + hint = stmt + break Loop + } + } + } + } + + newLineAfter := func(i int) *Line { + new := &Line{Token: tokens} + if i == len(x.Stmt) { + x.Stmt = append(x.Stmt, new) + } else { + x.Stmt = append(x.Stmt, nil) + copy(x.Stmt[i+2:], x.Stmt[i+1:]) + x.Stmt[i+1] = new + } + return new + } + + if hint != nil { + for i, stmt := range x.Stmt { + switch stmt := stmt.(type) { + case *Line: + if stmt == hint { + if stmt.Token == nil || stmt.Token[0] != tokens[0] { + return newLineAfter(i) + } + + // Convert line to line block. + stmt.InBlock = true + block := &LineBlock{Token: stmt.Token[:1], Line: []*Line{stmt}} + stmt.Token = stmt.Token[1:] + x.Stmt[i] = block + new := &Line{Token: tokens[1:], InBlock: true} + block.Line = append(block.Line, new) + return new + } + + case *LineBlock: + if stmt == hint { + if stmt.Token[0] != tokens[0] { + return newLineAfter(i) + } + + new := &Line{Token: tokens[1:], InBlock: true} + stmt.Line = append(stmt.Line, new) + return new + } + + for j, line := range stmt.Line { + if line == hint { + if stmt.Token[0] != tokens[0] { + return newLineAfter(i) + } + + // Add new line after hint within the block. + stmt.Line = append(stmt.Line, nil) + copy(stmt.Line[j+2:], stmt.Line[j+1:]) + new := &Line{Token: tokens[1:], InBlock: true} + stmt.Line[j+1] = new + return new + } + } + } + } + } + + new := &Line{Token: tokens} + x.Stmt = append(x.Stmt, new) + return new +} + +func (x *FileSyntax) updateLine(line *Line, tokens ...string) { + if line.InBlock { + tokens = tokens[1:] + } + line.Token = tokens +} + +func (x *FileSyntax) removeLine(line *Line) { + line.Token = nil +} + +// Cleanup cleans up the file syntax x after any edit operations. +// To avoid quadratic behavior, removeLine marks the line as dead +// by setting line.Token = nil but does not remove it from the slice +// in which it appears. After edits have all been indicated, +// calling Cleanup cleans out the dead lines. +func (x *FileSyntax) Cleanup() { + w := 0 + for _, stmt := range x.Stmt { + switch stmt := stmt.(type) { + case *Line: + if stmt.Token == nil { + continue + } + case *LineBlock: + ww := 0 + for _, line := range stmt.Line { + if line.Token != nil { + stmt.Line[ww] = line + ww++ + } + } + if ww == 0 { + continue + } + if ww == 1 { + // Collapse block into single line. + line := &Line{ + Comments: Comments{ + Before: commentsAdd(stmt.Before, stmt.Line[0].Before), + Suffix: commentsAdd(stmt.Line[0].Suffix, stmt.Suffix), + After: commentsAdd(stmt.Line[0].After, stmt.After), + }, + Token: stringsAdd(stmt.Token, stmt.Line[0].Token), + } + x.Stmt[w] = line + w++ + continue + } + stmt.Line = stmt.Line[:ww] + } + x.Stmt[w] = stmt + w++ + } + x.Stmt = x.Stmt[:w] +} + +func commentsAdd(x, y []Comment) []Comment { + return append(x[:len(x):len(x)], y...) +} + +func stringsAdd(x, y []string) []string { + return append(x[:len(x):len(x)], y...) +} + +// A CommentBlock represents a top-level block of comments separate +// from any rule. +type CommentBlock struct { + Comments + Start Position +} + +func (x *CommentBlock) Span() (start, end Position) { + return x.Start, x.Start +} + +// A Line is a single line of tokens. +type Line struct { + Comments + Start Position + Token []string + InBlock bool + End Position +} + +func (x *Line) Span() (start, end Position) { + return x.Start, x.End +} + +// A LineBlock is a factored block of lines, like +// +// require ( +// "x" +// "y" +// ) +// +type LineBlock struct { + Comments + Start Position + LParen LParen + Token []string + Line []*Line + RParen RParen +} + +func (x *LineBlock) Span() (start, end Position) { + return x.Start, x.RParen.Pos.add(")") +} + +// An LParen represents the beginning of a parenthesized line block. +// It is a place to store suffix comments. +type LParen struct { + Comments + Pos Position +} + +func (x *LParen) Span() (start, end Position) { + return x.Pos, x.Pos.add(")") +} + +// An RParen represents the end of a parenthesized line block. +// It is a place to store whole-line (before) comments. +type RParen struct { + Comments + Pos Position +} + +func (x *RParen) Span() (start, end Position) { + return x.Pos, x.Pos.add(")") +} + +// An input represents a single input file being parsed. +type input struct { + // Lexing state. + filename string // name of input file, for errors + complete []byte // entire input + remaining []byte // remaining input + tokenStart []byte // token being scanned to end of input + token token // next token to be returned by lex, peek + pos Position // current input position + comments []Comment // accumulated comments + + // Parser state. + file *FileSyntax // returned top-level syntax tree + parseErrors ErrorList // errors encountered during parsing + + // Comment assignment state. + pre []Expr // all expressions, in preorder traversal + post []Expr // all expressions, in postorder traversal +} + +func newInput(filename string, data []byte) *input { + return &input{ + filename: filename, + complete: data, + remaining: data, + pos: Position{Line: 1, LineRune: 1, Byte: 0}, + } +} + +// parse parses the input file. +func parse(file string, data []byte) (f *FileSyntax, err error) { + // The parser panics for both routine errors like syntax errors + // and for programmer bugs like array index errors. + // Turn both into error returns. Catching bug panics is + // especially important when processing many files. + in := newInput(file, data) + defer func() { + if e := recover(); e != nil && e != &in.parseErrors { + in.parseErrors = append(in.parseErrors, Error{ + Filename: in.filename, + Pos: in.pos, + Err: fmt.Errorf("internal error: %v", e), + }) + } + if err == nil && len(in.parseErrors) > 0 { + err = in.parseErrors + } + }() + + // Prime the lexer by reading in the first token. It will be available + // in the next peek() or lex() call. + in.readToken() + + // Invoke the parser. + in.parseFile() + if len(in.parseErrors) > 0 { + return nil, in.parseErrors + } + in.file.Name = in.filename + + // Assign comments to nearby syntax. + in.assignComments() + + return in.file, nil +} + +// Error is called to report an error. +// Error does not return: it panics. +func (in *input) Error(s string) { + in.parseErrors = append(in.parseErrors, Error{ + Filename: in.filename, + Pos: in.pos, + Err: errors.New(s), + }) + panic(&in.parseErrors) +} + +// eof reports whether the input has reached end of file. +func (in *input) eof() bool { + return len(in.remaining) == 0 +} + +// peekRune returns the next rune in the input without consuming it. +func (in *input) peekRune() int { + if len(in.remaining) == 0 { + return 0 + } + r, _ := utf8.DecodeRune(in.remaining) + return int(r) +} + +// peekPrefix reports whether the remaining input begins with the given prefix. +func (in *input) peekPrefix(prefix string) bool { + // This is like bytes.HasPrefix(in.remaining, []byte(prefix)) + // but without the allocation of the []byte copy of prefix. + for i := 0; i < len(prefix); i++ { + if i >= len(in.remaining) || in.remaining[i] != prefix[i] { + return false + } + } + return true +} + +// readRune consumes and returns the next rune in the input. +func (in *input) readRune() int { + if len(in.remaining) == 0 { + in.Error("internal lexer error: readRune at EOF") + } + r, size := utf8.DecodeRune(in.remaining) + in.remaining = in.remaining[size:] + if r == '\n' { + in.pos.Line++ + in.pos.LineRune = 1 + } else { + in.pos.LineRune++ + } + in.pos.Byte += size + return int(r) +} + +type token struct { + kind tokenKind + pos Position + endPos Position + text string +} + +type tokenKind int + +const ( + _EOF tokenKind = -(iota + 1) + _EOLCOMMENT + _IDENT + _STRING + _COMMENT + + // newlines and punctuation tokens are allowed as ASCII codes. +) + +func (k tokenKind) isComment() bool { + return k == _COMMENT || k == _EOLCOMMENT +} + +// isEOL returns whether a token terminates a line. +func (k tokenKind) isEOL() bool { + return k == _EOF || k == _EOLCOMMENT || k == '\n' +} + +// startToken marks the beginning of the next input token. +// It must be followed by a call to endToken, once the token's text has +// been consumed using readRune. +func (in *input) startToken() { + in.tokenStart = in.remaining + in.token.text = "" + in.token.pos = in.pos +} + +// endToken marks the end of an input token. +// It records the actual token string in tok.text. +// A single trailing newline (LF or CRLF) will be removed from comment tokens. +func (in *input) endToken(kind tokenKind) { + in.token.kind = kind + text := string(in.tokenStart[:len(in.tokenStart)-len(in.remaining)]) + if kind.isComment() { + if strings.HasSuffix(text, "\r\n") { + text = text[:len(text)-2] + } else { + text = strings.TrimSuffix(text, "\n") + } + } + in.token.text = text + in.token.endPos = in.pos +} + +// peek returns the kind of the the next token returned by lex. +func (in *input) peek() tokenKind { + return in.token.kind +} + +// lex is called from the parser to obtain the next input token. +func (in *input) lex() token { + tok := in.token + in.readToken() + return tok +} + +// readToken lexes the next token from the text and stores it in in.token. +func (in *input) readToken() { + // Skip past spaces, stopping at non-space or EOF. + for !in.eof() { + c := in.peekRune() + if c == ' ' || c == '\t' || c == '\r' { + in.readRune() + continue + } + + // Comment runs to end of line. + if in.peekPrefix("//") { + in.startToken() + + // Is this comment the only thing on its line? + // Find the last \n before this // and see if it's all + // spaces from there to here. + i := bytes.LastIndex(in.complete[:in.pos.Byte], []byte("\n")) + suffix := len(bytes.TrimSpace(in.complete[i+1:in.pos.Byte])) > 0 + in.readRune() + in.readRune() + + // Consume comment. + for len(in.remaining) > 0 && in.readRune() != '\n' { + } + + // If we are at top level (not in a statement), hand the comment to + // the parser as a _COMMENT token. The grammar is written + // to handle top-level comments itself. + if !suffix { + in.endToken(_COMMENT) + return + } + + // Otherwise, save comment for later attachment to syntax tree. + in.endToken(_EOLCOMMENT) + in.comments = append(in.comments, Comment{in.token.pos, in.token.text, suffix}) + return + } + + if in.peekPrefix("/*") { + in.Error("mod files must use // comments (not /* */ comments)") + } + + // Found non-space non-comment. + break + } + + // Found the beginning of the next token. + in.startToken() + + // End of file. + if in.eof() { + in.endToken(_EOF) + return + } + + // Punctuation tokens. + switch c := in.peekRune(); c { + case '\n', '(', ')', '[', ']', '{', '}', ',': + in.readRune() + in.endToken(tokenKind(c)) + return + + case '"', '`': // quoted string + quote := c + in.readRune() + for { + if in.eof() { + in.pos = in.token.pos + in.Error("unexpected EOF in string") + } + if in.peekRune() == '\n' { + in.Error("unexpected newline in string") + } + c := in.readRune() + if c == quote { + break + } + if c == '\\' && quote != '`' { + if in.eof() { + in.pos = in.token.pos + in.Error("unexpected EOF in string") + } + in.readRune() + } + } + in.endToken(_STRING) + return + } + + // Checked all punctuation. Must be identifier token. + if c := in.peekRune(); !isIdent(c) { + in.Error(fmt.Sprintf("unexpected input character %#q", c)) + } + + // Scan over identifier. + for isIdent(in.peekRune()) { + if in.peekPrefix("//") { + break + } + if in.peekPrefix("/*") { + in.Error("mod files must use // comments (not /* */ comments)") + } + in.readRune() + } + in.endToken(_IDENT) +} + +// isIdent reports whether c is an identifier rune. +// We treat most printable runes as identifier runes, except for a handful of +// ASCII punctuation characters. +func isIdent(c int) bool { + switch r := rune(c); r { + case ' ', '(', ')', '[', ']', '{', '}', ',': + return false + default: + return !unicode.IsSpace(r) && unicode.IsPrint(r) + } +} + +// Comment assignment. +// We build two lists of all subexpressions, preorder and postorder. +// The preorder list is ordered by start location, with outer expressions first. +// The postorder list is ordered by end location, with outer expressions last. +// We use the preorder list to assign each whole-line comment to the syntax +// immediately following it, and we use the postorder list to assign each +// end-of-line comment to the syntax immediately preceding it. + +// order walks the expression adding it and its subexpressions to the +// preorder and postorder lists. +func (in *input) order(x Expr) { + if x != nil { + in.pre = append(in.pre, x) + } + switch x := x.(type) { + default: + panic(fmt.Errorf("order: unexpected type %T", x)) + case nil: + // nothing + case *LParen, *RParen: + // nothing + case *CommentBlock: + // nothing + case *Line: + // nothing + case *FileSyntax: + for _, stmt := range x.Stmt { + in.order(stmt) + } + case *LineBlock: + in.order(&x.LParen) + for _, l := range x.Line { + in.order(l) + } + in.order(&x.RParen) + } + if x != nil { + in.post = append(in.post, x) + } +} + +// assignComments attaches comments to nearby syntax. +func (in *input) assignComments() { + const debug = false + + // Generate preorder and postorder lists. + in.order(in.file) + + // Split into whole-line comments and suffix comments. + var line, suffix []Comment + for _, com := range in.comments { + if com.Suffix { + suffix = append(suffix, com) + } else { + line = append(line, com) + } + } + + if debug { + for _, c := range line { + fmt.Fprintf(os.Stderr, "LINE %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte) + } + } + + // Assign line comments to syntax immediately following. + for _, x := range in.pre { + start, _ := x.Span() + if debug { + fmt.Fprintf(os.Stderr, "pre %T :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte) + } + xcom := x.Comment() + for len(line) > 0 && start.Byte >= line[0].Start.Byte { + if debug { + fmt.Fprintf(os.Stderr, "ASSIGN LINE %q #%d\n", line[0].Token, line[0].Start.Byte) + } + xcom.Before = append(xcom.Before, line[0]) + line = line[1:] + } + } + + // Remaining line comments go at end of file. + in.file.After = append(in.file.After, line...) + + if debug { + for _, c := range suffix { + fmt.Fprintf(os.Stderr, "SUFFIX %q :%d:%d #%d\n", c.Token, c.Start.Line, c.Start.LineRune, c.Start.Byte) + } + } + + // Assign suffix comments to syntax immediately before. + for i := len(in.post) - 1; i >= 0; i-- { + x := in.post[i] + + start, end := x.Span() + if debug { + fmt.Fprintf(os.Stderr, "post %T :%d:%d #%d :%d:%d #%d\n", x, start.Line, start.LineRune, start.Byte, end.Line, end.LineRune, end.Byte) + } + + // Do not assign suffix comments to end of line block or whole file. + // Instead assign them to the last element inside. + switch x.(type) { + case *FileSyntax: + continue + } + + // Do not assign suffix comments to something that starts + // on an earlier line, so that in + // + // x ( y + // z ) // comment + // + // we assign the comment to z and not to x ( ... ). + if start.Line != end.Line { + continue + } + xcom := x.Comment() + for len(suffix) > 0 && end.Byte <= suffix[len(suffix)-1].Start.Byte { + if debug { + fmt.Fprintf(os.Stderr, "ASSIGN SUFFIX %q #%d\n", suffix[len(suffix)-1].Token, suffix[len(suffix)-1].Start.Byte) + } + xcom.Suffix = append(xcom.Suffix, suffix[len(suffix)-1]) + suffix = suffix[:len(suffix)-1] + } + } + + // We assigned suffix comments in reverse. + // If multiple suffix comments were appended to the same + // expression node, they are now in reverse. Fix that. + for _, x := range in.post { + reverseComments(x.Comment().Suffix) + } + + // Remaining suffix comments go at beginning of file. + in.file.Before = append(in.file.Before, suffix...) +} + +// reverseComments reverses the []Comment list. +func reverseComments(list []Comment) { + for i, j := 0, len(list)-1; i < j; i, j = i+1, j-1 { + list[i], list[j] = list[j], list[i] + } +} + +func (in *input) parseFile() { + in.file = new(FileSyntax) + var cb *CommentBlock + for { + switch in.peek() { + case '\n': + in.lex() + if cb != nil { + in.file.Stmt = append(in.file.Stmt, cb) + cb = nil + } + case _COMMENT: + tok := in.lex() + if cb == nil { + cb = &CommentBlock{Start: tok.pos} + } + com := cb.Comment() + com.Before = append(com.Before, Comment{Start: tok.pos, Token: tok.text}) + case _EOF: + if cb != nil { + in.file.Stmt = append(in.file.Stmt, cb) + } + return + default: + in.parseStmt() + if cb != nil { + in.file.Stmt[len(in.file.Stmt)-1].Comment().Before = cb.Before + cb = nil + } + } + } +} + +func (in *input) parseStmt() { + tok := in.lex() + start := tok.pos + end := tok.endPos + tokens := []string{tok.text} + for { + tok := in.lex() + switch { + case tok.kind.isEOL(): + in.file.Stmt = append(in.file.Stmt, &Line{ + Start: start, + Token: tokens, + End: end, + }) + return + + case tok.kind == '(': + if next := in.peek(); next.isEOL() { + // Start of block: no more tokens on this line. + in.file.Stmt = append(in.file.Stmt, in.parseLineBlock(start, tokens, tok)) + return + } else if next == ')' { + rparen := in.lex() + if in.peek().isEOL() { + // Empty block. + in.lex() + in.file.Stmt = append(in.file.Stmt, &LineBlock{ + Start: start, + Token: tokens, + LParen: LParen{Pos: tok.pos}, + RParen: RParen{Pos: rparen.pos}, + }) + return + } + // '( )' in the middle of the line, not a block. + tokens = append(tokens, tok.text, rparen.text) + } else { + // '(' in the middle of the line, not a block. + tokens = append(tokens, tok.text) + } + + default: + tokens = append(tokens, tok.text) + end = tok.endPos + } + } +} + +func (in *input) parseLineBlock(start Position, token []string, lparen token) *LineBlock { + x := &LineBlock{ + Start: start, + Token: token, + LParen: LParen{Pos: lparen.pos}, + } + var comments []Comment + for { + switch in.peek() { + case _EOLCOMMENT: + // Suffix comment, will be attached later by assignComments. + in.lex() + case '\n': + // Blank line. Add an empty comment to preserve it. + in.lex() + if len(comments) == 0 && len(x.Line) > 0 || len(comments) > 0 && comments[len(comments)-1].Token != "" { + comments = append(comments, Comment{}) + } + case _COMMENT: + tok := in.lex() + comments = append(comments, Comment{Start: tok.pos, Token: tok.text}) + case _EOF: + in.Error(fmt.Sprintf("syntax error (unterminated block started at %s:%d:%d)", in.filename, x.Start.Line, x.Start.LineRune)) + case ')': + rparen := in.lex() + x.RParen.Before = comments + x.RParen.Pos = rparen.pos + if !in.peek().isEOL() { + in.Error("syntax error (expected newline after closing paren)") + } + in.lex() + return x + default: + l := in.parseLine() + x.Line = append(x.Line, l) + l.Comment().Before = comments + comments = nil + } + } +} + +func (in *input) parseLine() *Line { + tok := in.lex() + if tok.kind.isEOL() { + in.Error("internal parse error: parseLine at end of line") + } + start := tok.pos + end := tok.endPos + tokens := []string{tok.text} + for { + tok := in.lex() + if tok.kind.isEOL() { + return &Line{ + Start: start, + Token: tokens, + End: end, + InBlock: true, + } + } + tokens = append(tokens, tok.text) + end = tok.endPos + } +} + +var ( + slashSlash = []byte("//") + moduleStr = []byte("module") +) + +// ModulePath returns the module path from the gomod file text. +// If it cannot find a module path, it returns an empty string. +// It is tolerant of unrelated problems in the go.mod file. +func ModulePath(mod []byte) string { + for len(mod) > 0 { + line := mod + mod = nil + if i := bytes.IndexByte(line, '\n'); i >= 0 { + line, mod = line[:i], line[i+1:] + } + if i := bytes.Index(line, slashSlash); i >= 0 { + line = line[:i] + } + line = bytes.TrimSpace(line) + if !bytes.HasPrefix(line, moduleStr) { + continue + } + line = line[len(moduleStr):] + n := len(line) + line = bytes.TrimSpace(line) + if len(line) == n || len(line) == 0 { + continue + } + + if line[0] == '"' || line[0] == '`' { + p, err := strconv.Unquote(string(line)) + if err != nil { + return "" // malformed quoted string or multiline module path + } + return p + } + + return string(line) + } + return "" // missing module path +} diff --git a/vendor/golang.org/x/mod/modfile/rule.go b/vendor/golang.org/x/mod/modfile/rule.go new file mode 100644 index 000000000..f8c938498 --- /dev/null +++ b/vendor/golang.org/x/mod/modfile/rule.go @@ -0,0 +1,1180 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package modfile implements a parser and formatter for go.mod files. +// +// The go.mod syntax is described in +// https://golang.org/cmd/go/#hdr-The_go_mod_file. +// +// The Parse and ParseLax functions both parse a go.mod file and return an +// abstract syntax tree. ParseLax ignores unknown statements and may be used to +// parse go.mod files that may have been developed with newer versions of Go. +// +// The File struct returned by Parse and ParseLax represent an abstract +// go.mod file. File has several methods like AddNewRequire and DropReplace +// that can be used to programmatically edit a file. +// +// The Format function formats a File back to a byte slice which can be +// written to a file. +package modfile + +import ( + "errors" + "fmt" + "path/filepath" + "sort" + "strconv" + "strings" + "unicode" + + "golang.org/x/mod/internal/lazyregexp" + "golang.org/x/mod/module" + "golang.org/x/mod/semver" +) + +// A File is the parsed, interpreted form of a go.mod file. +type File struct { + Module *Module + Go *Go + Require []*Require + Exclude []*Exclude + Replace []*Replace + Retract []*Retract + + Syntax *FileSyntax +} + +// A Module is the module statement. +type Module struct { + Mod module.Version + Syntax *Line +} + +// A Go is the go statement. +type Go struct { + Version string // "1.23" + Syntax *Line +} + +// A Require is a single require statement. +type Require struct { + Mod module.Version + Indirect bool // has "// indirect" comment + Syntax *Line +} + +// An Exclude is a single exclude statement. +type Exclude struct { + Mod module.Version + Syntax *Line +} + +// A Replace is a single replace statement. +type Replace struct { + Old module.Version + New module.Version + Syntax *Line +} + +// A Retract is a single retract statement. +type Retract struct { + VersionInterval + Rationale string + Syntax *Line +} + +// A VersionInterval represents a range of versions with upper and lower bounds. +// Intervals are closed: both bounds are included. When Low is equal to High, +// the interval may refer to a single version ('v1.2.3') or an interval +// ('[v1.2.3, v1.2.3]'); both have the same representation. +type VersionInterval struct { + Low, High string +} + +func (f *File) AddModuleStmt(path string) error { + if f.Syntax == nil { + f.Syntax = new(FileSyntax) + } + if f.Module == nil { + f.Module = &Module{ + Mod: module.Version{Path: path}, + Syntax: f.Syntax.addLine(nil, "module", AutoQuote(path)), + } + } else { + f.Module.Mod.Path = path + f.Syntax.updateLine(f.Module.Syntax, "module", AutoQuote(path)) + } + return nil +} + +func (f *File) AddComment(text string) { + if f.Syntax == nil { + f.Syntax = new(FileSyntax) + } + f.Syntax.Stmt = append(f.Syntax.Stmt, &CommentBlock{ + Comments: Comments{ + Before: []Comment{ + { + Token: text, + }, + }, + }, + }) +} + +type VersionFixer func(path, version string) (string, error) + +// errDontFix is returned by a VersionFixer to indicate the version should be +// left alone, even if it's not canonical. +var dontFixRetract VersionFixer = func(_, vers string) (string, error) { + return vers, nil +} + +// Parse parses the data, reported in errors as being from file, +// into a File struct. It applies fix, if non-nil, to canonicalize all module versions found. +func Parse(file string, data []byte, fix VersionFixer) (*File, error) { + return parseToFile(file, data, fix, true) +} + +// ParseLax is like Parse but ignores unknown statements. +// It is used when parsing go.mod files other than the main module, +// under the theory that most statement types we add in the future will +// only apply in the main module, like exclude and replace, +// and so we get better gradual deployments if old go commands +// simply ignore those statements when found in go.mod files +// in dependencies. +func ParseLax(file string, data []byte, fix VersionFixer) (*File, error) { + return parseToFile(file, data, fix, false) +} + +func parseToFile(file string, data []byte, fix VersionFixer, strict bool) (parsed *File, err error) { + fs, err := parse(file, data) + if err != nil { + return nil, err + } + f := &File{ + Syntax: fs, + } + var errs ErrorList + + // fix versions in retract directives after the file is parsed. + // We need the module path to fix versions, and it might be at the end. + defer func() { + oldLen := len(errs) + f.fixRetract(fix, &errs) + if len(errs) > oldLen { + parsed, err = nil, errs + } + }() + + for _, x := range fs.Stmt { + switch x := x.(type) { + case *Line: + f.add(&errs, nil, x, x.Token[0], x.Token[1:], fix, strict) + + case *LineBlock: + if len(x.Token) > 1 { + if strict { + errs = append(errs, Error{ + Filename: file, + Pos: x.Start, + Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), + }) + } + continue + } + switch x.Token[0] { + default: + if strict { + errs = append(errs, Error{ + Filename: file, + Pos: x.Start, + Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")), + }) + } + continue + case "module", "require", "exclude", "replace", "retract": + for _, l := range x.Line { + f.add(&errs, x, l, x.Token[0], l.Token, fix, strict) + } + } + } + } + + if len(errs) > 0 { + return nil, errs + } + return f, nil +} + +var GoVersionRE = lazyregexp.New(`^([1-9][0-9]*)\.(0|[1-9][0-9]*)$`) + +func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, args []string, fix VersionFixer, strict bool) { + // If strict is false, this module is a dependency. + // We ignore all unknown directives as well as main-module-only + // directives like replace and exclude. It will work better for + // forward compatibility if we can depend on modules that have unknown + // statements (presumed relevant only when acting as the main module) + // and simply ignore those statements. + if !strict { + switch verb { + case "go", "module", "retract", "require": + // want these even for dependency go.mods + default: + return + } + } + + wrapModPathError := func(modPath string, err error) { + *errs = append(*errs, Error{ + Filename: f.Syntax.Name, + Pos: line.Start, + ModPath: modPath, + Verb: verb, + Err: err, + }) + } + wrapError := func(err error) { + *errs = append(*errs, Error{ + Filename: f.Syntax.Name, + Pos: line.Start, + Err: err, + }) + } + errorf := func(format string, args ...interface{}) { + wrapError(fmt.Errorf(format, args...)) + } + + switch verb { + default: + errorf("unknown directive: %s", verb) + + case "go": + if f.Go != nil { + errorf("repeated go statement") + return + } + if len(args) != 1 { + errorf("go directive expects exactly one argument") + return + } else if !GoVersionRE.MatchString(args[0]) { + errorf("invalid go version '%s': must match format 1.23", args[0]) + return + } + + f.Go = &Go{Syntax: line} + f.Go.Version = args[0] + + case "module": + if f.Module != nil { + errorf("repeated module statement") + return + } + f.Module = &Module{Syntax: line} + if len(args) != 1 { + errorf("usage: module module/path") + return + } + s, err := parseString(&args[0]) + if err != nil { + errorf("invalid quoted string: %v", err) + return + } + f.Module.Mod = module.Version{Path: s} + + case "require", "exclude": + if len(args) != 2 { + errorf("usage: %s module/path v1.2.3", verb) + return + } + s, err := parseString(&args[0]) + if err != nil { + errorf("invalid quoted string: %v", err) + return + } + v, err := parseVersion(verb, s, &args[1], fix) + if err != nil { + wrapError(err) + return + } + pathMajor, err := modulePathMajor(s) + if err != nil { + wrapError(err) + return + } + if err := module.CheckPathMajor(v, pathMajor); err != nil { + wrapModPathError(s, err) + return + } + if verb == "require" { + f.Require = append(f.Require, &Require{ + Mod: module.Version{Path: s, Version: v}, + Syntax: line, + Indirect: isIndirect(line), + }) + } else { + f.Exclude = append(f.Exclude, &Exclude{ + Mod: module.Version{Path: s, Version: v}, + Syntax: line, + }) + } + + case "replace": + arrow := 2 + if len(args) >= 2 && args[1] == "=>" { + arrow = 1 + } + if len(args) < arrow+2 || len(args) > arrow+3 || args[arrow] != "=>" { + errorf("usage: %s module/path [v1.2.3] => other/module v1.4\n\t or %s module/path [v1.2.3] => ../local/directory", verb, verb) + return + } + s, err := parseString(&args[0]) + if err != nil { + errorf("invalid quoted string: %v", err) + return + } + pathMajor, err := modulePathMajor(s) + if err != nil { + wrapModPathError(s, err) + return + } + var v string + if arrow == 2 { + v, err = parseVersion(verb, s, &args[1], fix) + if err != nil { + wrapError(err) + return + } + if err := module.CheckPathMajor(v, pathMajor); err != nil { + wrapModPathError(s, err) + return + } + } + ns, err := parseString(&args[arrow+1]) + if err != nil { + errorf("invalid quoted string: %v", err) + return + } + nv := "" + if len(args) == arrow+2 { + if !IsDirectoryPath(ns) { + errorf("replacement module without version must be directory path (rooted or starting with ./ or ../)") + return + } + if filepath.Separator == '/' && strings.Contains(ns, `\`) { + errorf("replacement directory appears to be Windows path (on a non-windows system)") + return + } + } + if len(args) == arrow+3 { + nv, err = parseVersion(verb, ns, &args[arrow+2], fix) + if err != nil { + wrapError(err) + return + } + if IsDirectoryPath(ns) { + errorf("replacement module directory path %q cannot have version", ns) + return + } + } + f.Replace = append(f.Replace, &Replace{ + Old: module.Version{Path: s, Version: v}, + New: module.Version{Path: ns, Version: nv}, + Syntax: line, + }) + + case "retract": + rationale := parseRetractRationale(block, line) + vi, err := parseVersionInterval(verb, "", &args, dontFixRetract) + if err != nil { + if strict { + wrapError(err) + return + } else { + // Only report errors parsing intervals in the main module. We may + // support additional syntax in the future, such as open and half-open + // intervals. Those can't be supported now, because they break the + // go.mod parser, even in lax mode. + return + } + } + if len(args) > 0 && strict { + // In the future, there may be additional information after the version. + errorf("unexpected token after version: %q", args[0]) + return + } + retract := &Retract{ + VersionInterval: vi, + Rationale: rationale, + Syntax: line, + } + f.Retract = append(f.Retract, retract) + } +} + +// fixRetract applies fix to each retract directive in f, appending any errors +// to errs. +// +// Most versions are fixed as we parse the file, but for retract directives, +// the relevant module path is the one specified with the module directive, +// and that might appear at the end of the file (or not at all). +func (f *File) fixRetract(fix VersionFixer, errs *ErrorList) { + if fix == nil { + return + } + path := "" + if f.Module != nil { + path = f.Module.Mod.Path + } + var r *Retract + wrapError := func(err error) { + *errs = append(*errs, Error{ + Filename: f.Syntax.Name, + Pos: r.Syntax.Start, + Err: err, + }) + } + + for _, r = range f.Retract { + if path == "" { + wrapError(errors.New("no module directive found, so retract cannot be used")) + return // only print the first one of these + } + + args := r.Syntax.Token + if args[0] == "retract" { + args = args[1:] + } + vi, err := parseVersionInterval("retract", path, &args, fix) + if err != nil { + wrapError(err) + } + r.VersionInterval = vi + } +} + +// isIndirect reports whether line has a "// indirect" comment, +// meaning it is in go.mod only for its effect on indirect dependencies, +// so that it can be dropped entirely once the effective version of the +// indirect dependency reaches the given minimum version. +func isIndirect(line *Line) bool { + if len(line.Suffix) == 0 { + return false + } + f := strings.Fields(strings.TrimPrefix(line.Suffix[0].Token, string(slashSlash))) + return (len(f) == 1 && f[0] == "indirect" || len(f) > 1 && f[0] == "indirect;") +} + +// setIndirect sets line to have (or not have) a "// indirect" comment. +func setIndirect(line *Line, indirect bool) { + if isIndirect(line) == indirect { + return + } + if indirect { + // Adding comment. + if len(line.Suffix) == 0 { + // New comment. + line.Suffix = []Comment{{Token: "// indirect", Suffix: true}} + return + } + + com := &line.Suffix[0] + text := strings.TrimSpace(strings.TrimPrefix(com.Token, string(slashSlash))) + if text == "" { + // Empty comment. + com.Token = "// indirect" + return + } + + // Insert at beginning of existing comment. + com.Token = "// indirect; " + text + return + } + + // Removing comment. + f := strings.Fields(line.Suffix[0].Token) + if len(f) == 2 { + // Remove whole comment. + line.Suffix = nil + return + } + + // Remove comment prefix. + com := &line.Suffix[0] + i := strings.Index(com.Token, "indirect;") + com.Token = "//" + com.Token[i+len("indirect;"):] +} + +// IsDirectoryPath reports whether the given path should be interpreted +// as a directory path. Just like on the go command line, relative paths +// and rooted paths are directory paths; the rest are module paths. +func IsDirectoryPath(ns string) bool { + // Because go.mod files can move from one system to another, + // we check all known path syntaxes, both Unix and Windows. + return strings.HasPrefix(ns, "./") || strings.HasPrefix(ns, "../") || strings.HasPrefix(ns, "/") || + strings.HasPrefix(ns, `.\`) || strings.HasPrefix(ns, `..\`) || strings.HasPrefix(ns, `\`) || + len(ns) >= 2 && ('A' <= ns[0] && ns[0] <= 'Z' || 'a' <= ns[0] && ns[0] <= 'z') && ns[1] == ':' +} + +// MustQuote reports whether s must be quoted in order to appear as +// a single token in a go.mod line. +func MustQuote(s string) bool { + for _, r := range s { + switch r { + case ' ', '"', '\'', '`': + return true + + case '(', ')', '[', ']', '{', '}', ',': + if len(s) > 1 { + return true + } + + default: + if !unicode.IsPrint(r) { + return true + } + } + } + return s == "" || strings.Contains(s, "//") || strings.Contains(s, "/*") +} + +// AutoQuote returns s or, if quoting is required for s to appear in a go.mod, +// the quotation of s. +func AutoQuote(s string) string { + if MustQuote(s) { + return strconv.Quote(s) + } + return s +} + +func parseVersionInterval(verb string, path string, args *[]string, fix VersionFixer) (VersionInterval, error) { + toks := *args + if len(toks) == 0 || toks[0] == "(" { + return VersionInterval{}, fmt.Errorf("expected '[' or version") + } + if toks[0] != "[" { + v, err := parseVersion(verb, path, &toks[0], fix) + if err != nil { + return VersionInterval{}, err + } + *args = toks[1:] + return VersionInterval{Low: v, High: v}, nil + } + toks = toks[1:] + + if len(toks) == 0 { + return VersionInterval{}, fmt.Errorf("expected version after '['") + } + low, err := parseVersion(verb, path, &toks[0], fix) + if err != nil { + return VersionInterval{}, err + } + toks = toks[1:] + + if len(toks) == 0 || toks[0] != "," { + return VersionInterval{}, fmt.Errorf("expected ',' after version") + } + toks = toks[1:] + + if len(toks) == 0 { + return VersionInterval{}, fmt.Errorf("expected version after ','") + } + high, err := parseVersion(verb, path, &toks[0], fix) + if err != nil { + return VersionInterval{}, err + } + toks = toks[1:] + + if len(toks) == 0 || toks[0] != "]" { + return VersionInterval{}, fmt.Errorf("expected ']' after version") + } + toks = toks[1:] + + *args = toks + return VersionInterval{Low: low, High: high}, nil +} + +func parseString(s *string) (string, error) { + t := *s + if strings.HasPrefix(t, `"`) { + var err error + if t, err = strconv.Unquote(t); err != nil { + return "", err + } + } else if strings.ContainsAny(t, "\"'`") { + // Other quotes are reserved both for possible future expansion + // and to avoid confusion. For example if someone types 'x' + // we want that to be a syntax error and not a literal x in literal quotation marks. + return "", fmt.Errorf("unquoted string cannot contain quote") + } + *s = AutoQuote(t) + return t, nil +} + +// parseRetractRationale extracts the rationale for a retract directive from the +// surrounding comments. If the line does not have comments and is part of a +// block that does have comments, the block's comments are used. +func parseRetractRationale(block *LineBlock, line *Line) string { + comments := line.Comment() + if block != nil && len(comments.Before) == 0 && len(comments.Suffix) == 0 { + comments = block.Comment() + } + groups := [][]Comment{comments.Before, comments.Suffix} + var lines []string + for _, g := range groups { + for _, c := range g { + if !strings.HasPrefix(c.Token, "//") { + continue // blank line + } + lines = append(lines, strings.TrimSpace(strings.TrimPrefix(c.Token, "//"))) + } + } + return strings.Join(lines, "\n") +} + +type ErrorList []Error + +func (e ErrorList) Error() string { + errStrs := make([]string, len(e)) + for i, err := range e { + errStrs[i] = err.Error() + } + return strings.Join(errStrs, "\n") +} + +type Error struct { + Filename string + Pos Position + Verb string + ModPath string + Err error +} + +func (e *Error) Error() string { + var pos string + if e.Pos.LineRune > 1 { + // Don't print LineRune if it's 1 (beginning of line). + // It's always 1 except in scanner errors, which are rare. + pos = fmt.Sprintf("%s:%d:%d: ", e.Filename, e.Pos.Line, e.Pos.LineRune) + } else if e.Pos.Line > 0 { + pos = fmt.Sprintf("%s:%d: ", e.Filename, e.Pos.Line) + } else if e.Filename != "" { + pos = fmt.Sprintf("%s: ", e.Filename) + } + + var directive string + if e.ModPath != "" { + directive = fmt.Sprintf("%s %s: ", e.Verb, e.ModPath) + } else if e.Verb != "" { + directive = fmt.Sprintf("%s: ", e.Verb) + } + + return pos + directive + e.Err.Error() +} + +func (e *Error) Unwrap() error { return e.Err } + +func parseVersion(verb string, path string, s *string, fix VersionFixer) (string, error) { + t, err := parseString(s) + if err != nil { + return "", &Error{ + Verb: verb, + ModPath: path, + Err: &module.InvalidVersionError{ + Version: *s, + Err: err, + }, + } + } + if fix != nil { + fixed, err := fix(path, t) + if err != nil { + if err, ok := err.(*module.ModuleError); ok { + return "", &Error{ + Verb: verb, + ModPath: path, + Err: err.Err, + } + } + return "", err + } + t = fixed + } else { + cv := module.CanonicalVersion(t) + if cv == "" { + return "", &Error{ + Verb: verb, + ModPath: path, + Err: &module.InvalidVersionError{ + Version: t, + Err: errors.New("must be of the form v1.2.3"), + }, + } + } + t = cv + } + *s = t + return *s, nil +} + +func modulePathMajor(path string) (string, error) { + _, major, ok := module.SplitPathVersion(path) + if !ok { + return "", fmt.Errorf("invalid module path") + } + return major, nil +} + +func (f *File) Format() ([]byte, error) { + return Format(f.Syntax), nil +} + +// Cleanup cleans up the file f after any edit operations. +// To avoid quadratic behavior, modifications like DropRequire +// clear the entry but do not remove it from the slice. +// Cleanup cleans out all the cleared entries. +func (f *File) Cleanup() { + w := 0 + for _, r := range f.Require { + if r.Mod.Path != "" { + f.Require[w] = r + w++ + } + } + f.Require = f.Require[:w] + + w = 0 + for _, x := range f.Exclude { + if x.Mod.Path != "" { + f.Exclude[w] = x + w++ + } + } + f.Exclude = f.Exclude[:w] + + w = 0 + for _, r := range f.Replace { + if r.Old.Path != "" { + f.Replace[w] = r + w++ + } + } + f.Replace = f.Replace[:w] + + w = 0 + for _, r := range f.Retract { + if r.Low != "" || r.High != "" { + f.Retract[w] = r + w++ + } + } + f.Retract = f.Retract[:w] + + f.Syntax.Cleanup() +} + +func (f *File) AddGoStmt(version string) error { + if !GoVersionRE.MatchString(version) { + return fmt.Errorf("invalid language version string %q", version) + } + if f.Go == nil { + var hint Expr + if f.Module != nil && f.Module.Syntax != nil { + hint = f.Module.Syntax + } + f.Go = &Go{ + Version: version, + Syntax: f.Syntax.addLine(hint, "go", version), + } + } else { + f.Go.Version = version + f.Syntax.updateLine(f.Go.Syntax, "go", version) + } + return nil +} + +func (f *File) AddRequire(path, vers string) error { + need := true + for _, r := range f.Require { + if r.Mod.Path == path { + if need { + r.Mod.Version = vers + f.Syntax.updateLine(r.Syntax, "require", AutoQuote(path), vers) + need = false + } else { + f.Syntax.removeLine(r.Syntax) + *r = Require{} + } + } + } + + if need { + f.AddNewRequire(path, vers, false) + } + return nil +} + +func (f *File) AddNewRequire(path, vers string, indirect bool) { + line := f.Syntax.addLine(nil, "require", AutoQuote(path), vers) + setIndirect(line, indirect) + f.Require = append(f.Require, &Require{module.Version{Path: path, Version: vers}, indirect, line}) +} + +func (f *File) SetRequire(req []*Require) { + need := make(map[string]string) + indirect := make(map[string]bool) + for _, r := range req { + need[r.Mod.Path] = r.Mod.Version + indirect[r.Mod.Path] = r.Indirect + } + + for _, r := range f.Require { + if v, ok := need[r.Mod.Path]; ok { + r.Mod.Version = v + r.Indirect = indirect[r.Mod.Path] + } else { + *r = Require{} + } + } + + var newStmts []Expr + for _, stmt := range f.Syntax.Stmt { + switch stmt := stmt.(type) { + case *LineBlock: + if len(stmt.Token) > 0 && stmt.Token[0] == "require" { + var newLines []*Line + for _, line := range stmt.Line { + if p, err := parseString(&line.Token[0]); err == nil && need[p] != "" { + if len(line.Comments.Before) == 1 && len(line.Comments.Before[0].Token) == 0 { + line.Comments.Before = line.Comments.Before[:0] + } + line.Token[1] = need[p] + delete(need, p) + setIndirect(line, indirect[p]) + newLines = append(newLines, line) + } + } + if len(newLines) == 0 { + continue // drop stmt + } + stmt.Line = newLines + } + + case *Line: + if len(stmt.Token) > 0 && stmt.Token[0] == "require" { + if p, err := parseString(&stmt.Token[1]); err == nil && need[p] != "" { + stmt.Token[2] = need[p] + delete(need, p) + setIndirect(stmt, indirect[p]) + } else { + continue // drop stmt + } + } + } + newStmts = append(newStmts, stmt) + } + f.Syntax.Stmt = newStmts + + for path, vers := range need { + f.AddNewRequire(path, vers, indirect[path]) + } + f.SortBlocks() +} + +func (f *File) DropRequire(path string) error { + for _, r := range f.Require { + if r.Mod.Path == path { + f.Syntax.removeLine(r.Syntax) + *r = Require{} + } + } + return nil +} + +// AddExclude adds a exclude statement to the mod file. Errors if the provided +// version is not a canonical version string +func (f *File) AddExclude(path, vers string) error { + if err := checkCanonicalVersion(path, vers); err != nil { + return err + } + + var hint *Line + for _, x := range f.Exclude { + if x.Mod.Path == path && x.Mod.Version == vers { + return nil + } + if x.Mod.Path == path { + hint = x.Syntax + } + } + + f.Exclude = append(f.Exclude, &Exclude{Mod: module.Version{Path: path, Version: vers}, Syntax: f.Syntax.addLine(hint, "exclude", AutoQuote(path), vers)}) + return nil +} + +func (f *File) DropExclude(path, vers string) error { + for _, x := range f.Exclude { + if x.Mod.Path == path && x.Mod.Version == vers { + f.Syntax.removeLine(x.Syntax) + *x = Exclude{} + } + } + return nil +} + +func (f *File) AddReplace(oldPath, oldVers, newPath, newVers string) error { + need := true + old := module.Version{Path: oldPath, Version: oldVers} + new := module.Version{Path: newPath, Version: newVers} + tokens := []string{"replace", AutoQuote(oldPath)} + if oldVers != "" { + tokens = append(tokens, oldVers) + } + tokens = append(tokens, "=>", AutoQuote(newPath)) + if newVers != "" { + tokens = append(tokens, newVers) + } + + var hint *Line + for _, r := range f.Replace { + if r.Old.Path == oldPath && (oldVers == "" || r.Old.Version == oldVers) { + if need { + // Found replacement for old; update to use new. + r.New = new + f.Syntax.updateLine(r.Syntax, tokens...) + need = false + continue + } + // Already added; delete other replacements for same. + f.Syntax.removeLine(r.Syntax) + *r = Replace{} + } + if r.Old.Path == oldPath { + hint = r.Syntax + } + } + if need { + f.Replace = append(f.Replace, &Replace{Old: old, New: new, Syntax: f.Syntax.addLine(hint, tokens...)}) + } + return nil +} + +func (f *File) DropReplace(oldPath, oldVers string) error { + for _, r := range f.Replace { + if r.Old.Path == oldPath && r.Old.Version == oldVers { + f.Syntax.removeLine(r.Syntax) + *r = Replace{} + } + } + return nil +} + +// AddRetract adds a retract statement to the mod file. Errors if the provided +// version interval does not consist of canonical version strings +func (f *File) AddRetract(vi VersionInterval, rationale string) error { + var path string + if f.Module != nil { + path = f.Module.Mod.Path + } + if err := checkCanonicalVersion(path, vi.High); err != nil { + return err + } + if err := checkCanonicalVersion(path, vi.Low); err != nil { + return err + } + + r := &Retract{ + VersionInterval: vi, + } + if vi.Low == vi.High { + r.Syntax = f.Syntax.addLine(nil, "retract", AutoQuote(vi.Low)) + } else { + r.Syntax = f.Syntax.addLine(nil, "retract", "[", AutoQuote(vi.Low), ",", AutoQuote(vi.High), "]") + } + if rationale != "" { + for _, line := range strings.Split(rationale, "\n") { + com := Comment{Token: "// " + line} + r.Syntax.Comment().Before = append(r.Syntax.Comment().Before, com) + } + } + return nil +} + +func (f *File) DropRetract(vi VersionInterval) error { + for _, r := range f.Retract { + if r.VersionInterval == vi { + f.Syntax.removeLine(r.Syntax) + *r = Retract{} + } + } + return nil +} + +func (f *File) SortBlocks() { + f.removeDups() // otherwise sorting is unsafe + + for _, stmt := range f.Syntax.Stmt { + block, ok := stmt.(*LineBlock) + if !ok { + continue + } + less := lineLess + if block.Token[0] == "retract" { + less = lineRetractLess + } + sort.SliceStable(block.Line, func(i, j int) bool { + return less(block.Line[i], block.Line[j]) + }) + } +} + +// removeDups removes duplicate exclude and replace directives. +// +// Earlier exclude directives take priority. +// +// Later replace directives take priority. +// +// require directives are not de-duplicated. That's left up to higher-level +// logic (MVS). +// +// retract directives are not de-duplicated since comments are +// meaningful, and versions may be retracted multiple times. +func (f *File) removeDups() { + kill := make(map[*Line]bool) + + // Remove duplicate excludes. + haveExclude := make(map[module.Version]bool) + for _, x := range f.Exclude { + if haveExclude[x.Mod] { + kill[x.Syntax] = true + continue + } + haveExclude[x.Mod] = true + } + var excl []*Exclude + for _, x := range f.Exclude { + if !kill[x.Syntax] { + excl = append(excl, x) + } + } + f.Exclude = excl + + // Remove duplicate replacements. + // Later replacements take priority over earlier ones. + haveReplace := make(map[module.Version]bool) + for i := len(f.Replace) - 1; i >= 0; i-- { + x := f.Replace[i] + if haveReplace[x.Old] { + kill[x.Syntax] = true + continue + } + haveReplace[x.Old] = true + } + var repl []*Replace + for _, x := range f.Replace { + if !kill[x.Syntax] { + repl = append(repl, x) + } + } + f.Replace = repl + + // Duplicate require and retract directives are not removed. + + // Drop killed statements from the syntax tree. + var stmts []Expr + for _, stmt := range f.Syntax.Stmt { + switch stmt := stmt.(type) { + case *Line: + if kill[stmt] { + continue + } + case *LineBlock: + var lines []*Line + for _, line := range stmt.Line { + if !kill[line] { + lines = append(lines, line) + } + } + stmt.Line = lines + if len(lines) == 0 { + continue + } + } + stmts = append(stmts, stmt) + } + f.Syntax.Stmt = stmts +} + +// lineLess returns whether li should be sorted before lj. It sorts +// lexicographically without assigning any special meaning to tokens. +func lineLess(li, lj *Line) bool { + for k := 0; k < len(li.Token) && k < len(lj.Token); k++ { + if li.Token[k] != lj.Token[k] { + return li.Token[k] < lj.Token[k] + } + } + return len(li.Token) < len(lj.Token) +} + +// lineRetractLess returns whether li should be sorted before lj for lines in +// a "retract" block. It treats each line as a version interval. Single versions +// are compared as if they were intervals with the same low and high version. +// Intervals are sorted in descending order, first by low version, then by +// high version, using semver.Compare. +func lineRetractLess(li, lj *Line) bool { + interval := func(l *Line) VersionInterval { + if len(l.Token) == 1 { + return VersionInterval{Low: l.Token[0], High: l.Token[0]} + } else if len(l.Token) == 5 && l.Token[0] == "[" && l.Token[2] == "," && l.Token[4] == "]" { + return VersionInterval{Low: l.Token[1], High: l.Token[3]} + } else { + // Line in unknown format. Treat as an invalid version. + return VersionInterval{} + } + } + vii := interval(li) + vij := interval(lj) + if cmp := semver.Compare(vii.Low, vij.Low); cmp != 0 { + return cmp > 0 + } + return semver.Compare(vii.High, vij.High) > 0 +} + +// checkCanonicalVersion returns a non-nil error if vers is not a canonical +// version string or does not match the major version of path. +// +// If path is non-empty, the error text suggests a format with a major version +// corresponding to the path. +func checkCanonicalVersion(path, vers string) error { + _, pathMajor, pathMajorOk := module.SplitPathVersion(path) + + if vers == "" || vers != module.CanonicalVersion(vers) { + if pathMajor == "" { + return &module.InvalidVersionError{ + Version: vers, + Err: fmt.Errorf("must be of the form v1.2.3"), + } + } + return &module.InvalidVersionError{ + Version: vers, + Err: fmt.Errorf("must be of the form %s.2.3", module.PathMajorPrefix(pathMajor)), + } + } + + if pathMajorOk { + if err := module.CheckPathMajor(vers, pathMajor); err != nil { + if pathMajor == "" { + // In this context, the user probably wrote "v2.3.4" when they meant + // "v2.3.4+incompatible". Suggest that instead of "v0 or v1". + return &module.InvalidVersionError{ + Version: vers, + Err: fmt.Errorf("should be %s+incompatible (or module %s/%v)", vers, path, semver.Major(vers)), + } + } + return err + } + } + + return nil +} diff --git a/vendor/golang.org/x/mod/module/module.go b/vendor/golang.org/x/mod/module/module.go new file mode 100644 index 000000000..0e0301483 --- /dev/null +++ b/vendor/golang.org/x/mod/module/module.go @@ -0,0 +1,822 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package module defines the module.Version type along with support code. +// +// The module.Version type is a simple Path, Version pair: +// +// type Version struct { +// Path string +// Version string +// } +// +// There are no restrictions imposed directly by use of this structure, +// but additional checking functions, most notably Check, verify that +// a particular path, version pair is valid. +// +// Escaped Paths +// +// Module paths appear as substrings of file system paths +// (in the download cache) and of web server URLs in the proxy protocol. +// In general we cannot rely on file systems to be case-sensitive, +// nor can we rely on web servers, since they read from file systems. +// That is, we cannot rely on the file system to keep rsc.io/QUOTE +// and rsc.io/quote separate. Windows and macOS don't. +// Instead, we must never require two different casings of a file path. +// Because we want the download cache to match the proxy protocol, +// and because we want the proxy protocol to be possible to serve +// from a tree of static files (which might be stored on a case-insensitive +// file system), the proxy protocol must never require two different casings +// of a URL path either. +// +// One possibility would be to make the escaped form be the lowercase +// hexadecimal encoding of the actual path bytes. This would avoid ever +// needing different casings of a file path, but it would be fairly illegible +// to most programmers when those paths appeared in the file system +// (including in file paths in compiler errors and stack traces) +// in web server logs, and so on. Instead, we want a safe escaped form that +// leaves most paths unaltered. +// +// The safe escaped form is to replace every uppercase letter +// with an exclamation mark followed by the letter's lowercase equivalent. +// +// For example, +// +// github.com/Azure/azure-sdk-for-go -> github.com/!azure/azure-sdk-for-go. +// github.com/GoogleCloudPlatform/cloudsql-proxy -> github.com/!google!cloud!platform/cloudsql-proxy +// github.com/Sirupsen/logrus -> github.com/!sirupsen/logrus. +// +// Import paths that avoid upper-case letters are left unchanged. +// Note that because import paths are ASCII-only and avoid various +// problematic punctuation (like : < and >), the escaped form is also ASCII-only +// and avoids the same problematic punctuation. +// +// Import paths have never allowed exclamation marks, so there is no +// need to define how to escape a literal !. +// +// Unicode Restrictions +// +// Today, paths are disallowed from using Unicode. +// +// Although paths are currently disallowed from using Unicode, +// we would like at some point to allow Unicode letters as well, to assume that +// file systems and URLs are Unicode-safe (storing UTF-8), and apply +// the !-for-uppercase convention for escaping them in the file system. +// But there are at least two subtle considerations. +// +// First, note that not all case-fold equivalent distinct runes +// form an upper/lower pair. +// For example, U+004B ('K'), U+006B ('k'), and U+212A ('K' for Kelvin) +// are three distinct runes that case-fold to each other. +// When we do add Unicode letters, we must not assume that upper/lower +// are the only case-equivalent pairs. +// Perhaps the Kelvin symbol would be disallowed entirely, for example. +// Or perhaps it would escape as "!!k", or perhaps as "(212A)". +// +// Second, it would be nice to allow Unicode marks as well as letters, +// but marks include combining marks, and then we must deal not +// only with case folding but also normalization: both U+00E9 ('é') +// and U+0065 U+0301 ('e' followed by combining acute accent) +// look the same on the page and are treated by some file systems +// as the same path. If we do allow Unicode marks in paths, there +// must be some kind of normalization to allow only one canonical +// encoding of any character used in an import path. +package module + +// IMPORTANT NOTE +// +// This file essentially defines the set of valid import paths for the go command. +// There are many subtle considerations, including Unicode ambiguity, +// security, network, and file system representations. +// +// This file also defines the set of valid module path and version combinations, +// another topic with many subtle considerations. +// +// Changes to the semantics in this file require approval from rsc. + +import ( + "fmt" + "path" + "sort" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/mod/semver" + errors "golang.org/x/xerrors" +) + +// A Version (for clients, a module.Version) is defined by a module path and version pair. +// These are stored in their plain (unescaped) form. +type Version struct { + // Path is a module path, like "golang.org/x/text" or "rsc.io/quote/v2". + Path string + + // Version is usually a semantic version in canonical form. + // There are three exceptions to this general rule. + // First, the top-level target of a build has no specific version + // and uses Version = "". + // Second, during MVS calculations the version "none" is used + // to represent the decision to take no version of a given module. + // Third, filesystem paths found in "replace" directives are + // represented by a path with an empty version. + Version string `json:",omitempty"` +} + +// String returns a representation of the Version suitable for logging +// (Path@Version, or just Path if Version is empty). +func (m Version) String() string { + if m.Version == "" { + return m.Path + } + return m.Path + "@" + m.Version +} + +// A ModuleError indicates an error specific to a module. +type ModuleError struct { + Path string + Version string + Err error +} + +// VersionError returns a ModuleError derived from a Version and error, +// or err itself if it is already such an error. +func VersionError(v Version, err error) error { + var mErr *ModuleError + if errors.As(err, &mErr) && mErr.Path == v.Path && mErr.Version == v.Version { + return err + } + return &ModuleError{ + Path: v.Path, + Version: v.Version, + Err: err, + } +} + +func (e *ModuleError) Error() string { + if v, ok := e.Err.(*InvalidVersionError); ok { + return fmt.Sprintf("%s@%s: invalid %s: %v", e.Path, v.Version, v.noun(), v.Err) + } + if e.Version != "" { + return fmt.Sprintf("%s@%s: %v", e.Path, e.Version, e.Err) + } + return fmt.Sprintf("module %s: %v", e.Path, e.Err) +} + +func (e *ModuleError) Unwrap() error { return e.Err } + +// An InvalidVersionError indicates an error specific to a version, with the +// module path unknown or specified externally. +// +// A ModuleError may wrap an InvalidVersionError, but an InvalidVersionError +// must not wrap a ModuleError. +type InvalidVersionError struct { + Version string + Pseudo bool + Err error +} + +// noun returns either "version" or "pseudo-version", depending on whether +// e.Version is a pseudo-version. +func (e *InvalidVersionError) noun() string { + if e.Pseudo { + return "pseudo-version" + } + return "version" +} + +func (e *InvalidVersionError) Error() string { + return fmt.Sprintf("%s %q invalid: %s", e.noun(), e.Version, e.Err) +} + +func (e *InvalidVersionError) Unwrap() error { return e.Err } + +// Check checks that a given module path, version pair is valid. +// In addition to the path being a valid module path +// and the version being a valid semantic version, +// the two must correspond. +// For example, the path "yaml/v2" only corresponds to +// semantic versions beginning with "v2.". +func Check(path, version string) error { + if err := CheckPath(path); err != nil { + return err + } + if !semver.IsValid(version) { + return &ModuleError{ + Path: path, + Err: &InvalidVersionError{Version: version, Err: errors.New("not a semantic version")}, + } + } + _, pathMajor, _ := SplitPathVersion(path) + if err := CheckPathMajor(version, pathMajor); err != nil { + return &ModuleError{Path: path, Err: err} + } + return nil +} + +// firstPathOK reports whether r can appear in the first element of a module path. +// The first element of the path must be an LDH domain name, at least for now. +// To avoid case ambiguity, the domain name must be entirely lower case. +func firstPathOK(r rune) bool { + return r == '-' || r == '.' || + '0' <= r && r <= '9' || + 'a' <= r && r <= 'z' +} + +// modPathOK reports whether r can appear in a module path element. +// Paths can be ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~. +// +// This matches what "go get" has historically recognized in import paths, +// and avoids confusing sequences like '%20' or '+' that would change meaning +// if used in a URL. +// +// TODO(rsc): We would like to allow Unicode letters, but that requires additional +// care in the safe encoding (see "escaped paths" above). +func modPathOK(r rune) bool { + if r < utf8.RuneSelf { + return r == '-' || r == '.' || r == '_' || r == '~' || + '0' <= r && r <= '9' || + 'A' <= r && r <= 'Z' || + 'a' <= r && r <= 'z' + } + return false +} + +// modPathOK reports whether r can appear in a package import path element. +// +// Import paths are intermediate between module paths and file paths: we allow +// disallow characters that would be confusing or ambiguous as arguments to +// 'go get' (such as '@' and ' ' ), but allow certain characters that are +// otherwise-unambiguous on the command line and historically used for some +// binary names (such as '++' as a suffix for compiler binaries and wrappers). +func importPathOK(r rune) bool { + return modPathOK(r) || r == '+' +} + +// fileNameOK reports whether r can appear in a file name. +// For now we allow all Unicode letters but otherwise limit to pathOK plus a few more punctuation characters. +// If we expand the set of allowed characters here, we have to +// work harder at detecting potential case-folding and normalization collisions. +// See note about "escaped paths" above. +func fileNameOK(r rune) bool { + if r < utf8.RuneSelf { + // Entire set of ASCII punctuation, from which we remove characters: + // ! " # $ % & ' ( ) * + , - . / : ; < = > ? @ [ \ ] ^ _ ` { | } ~ + // We disallow some shell special characters: " ' * < > ? ` | + // (Note that some of those are disallowed by the Windows file system as well.) + // We also disallow path separators / : and \ (fileNameOK is only called on path element characters). + // We allow spaces (U+0020) in file names. + const allowed = "!#$%&()+,-.=@[]^_{}~ " + if '0' <= r && r <= '9' || 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' { + return true + } + for i := 0; i < len(allowed); i++ { + if rune(allowed[i]) == r { + return true + } + } + return false + } + // It may be OK to add more ASCII punctuation here, but only carefully. + // For example Windows disallows < > \, and macOS disallows :, so we must not allow those. + return unicode.IsLetter(r) +} + +// CheckPath checks that a module path is valid. +// A valid module path is a valid import path, as checked by CheckImportPath, +// with three additional constraints. +// First, the leading path element (up to the first slash, if any), +// by convention a domain name, must contain only lower-case ASCII letters, +// ASCII digits, dots (U+002E), and dashes (U+002D); +// it must contain at least one dot and cannot start with a dash. +// Second, for a final path element of the form /vN, where N looks numeric +// (ASCII digits and dots) must not begin with a leading zero, must not be /v1, +// and must not contain any dots. For paths beginning with "gopkg.in/", +// this second requirement is replaced by a requirement that the path +// follow the gopkg.in server's conventions. +// Third, no path element may begin with a dot. +func CheckPath(path string) error { + if err := checkPath(path, modulePath); err != nil { + return fmt.Errorf("malformed module path %q: %v", path, err) + } + i := strings.Index(path, "/") + if i < 0 { + i = len(path) + } + if i == 0 { + return fmt.Errorf("malformed module path %q: leading slash", path) + } + if !strings.Contains(path[:i], ".") { + return fmt.Errorf("malformed module path %q: missing dot in first path element", path) + } + if path[0] == '-' { + return fmt.Errorf("malformed module path %q: leading dash in first path element", path) + } + for _, r := range path[:i] { + if !firstPathOK(r) { + return fmt.Errorf("malformed module path %q: invalid char %q in first path element", path, r) + } + } + if _, _, ok := SplitPathVersion(path); !ok { + return fmt.Errorf("malformed module path %q: invalid version", path) + } + return nil +} + +// CheckImportPath checks that an import path is valid. +// +// A valid import path consists of one or more valid path elements +// separated by slashes (U+002F). (It must not begin with nor end in a slash.) +// +// A valid path element is a non-empty string made up of +// ASCII letters, ASCII digits, and limited ASCII punctuation: - . _ and ~. +// It must not end with a dot (U+002E), nor contain two dots in a row. +// +// The element prefix up to the first dot must not be a reserved file name +// on Windows, regardless of case (CON, com1, NuL, and so on). The element +// must not have a suffix of a tilde followed by one or more ASCII digits +// (to exclude paths elements that look like Windows short-names). +// +// CheckImportPath may be less restrictive in the future, but see the +// top-level package documentation for additional information about +// subtleties of Unicode. +func CheckImportPath(path string) error { + if err := checkPath(path, importPath); err != nil { + return fmt.Errorf("malformed import path %q: %v", path, err) + } + return nil +} + +// pathKind indicates what kind of path we're checking. Module paths, +// import paths, and file paths have different restrictions. +type pathKind int + +const ( + modulePath pathKind = iota + importPath + filePath +) + +// checkPath checks that a general path is valid. +// It returns an error describing why but not mentioning path. +// Because these checks apply to both module paths and import paths, +// the caller is expected to add the "malformed ___ path %q: " prefix. +// fileName indicates whether the final element of the path is a file name +// (as opposed to a directory name). +func checkPath(path string, kind pathKind) error { + if !utf8.ValidString(path) { + return fmt.Errorf("invalid UTF-8") + } + if path == "" { + return fmt.Errorf("empty string") + } + if path[0] == '-' { + return fmt.Errorf("leading dash") + } + if strings.Contains(path, "//") { + return fmt.Errorf("double slash") + } + if path[len(path)-1] == '/' { + return fmt.Errorf("trailing slash") + } + elemStart := 0 + for i, r := range path { + if r == '/' { + if err := checkElem(path[elemStart:i], kind); err != nil { + return err + } + elemStart = i + 1 + } + } + if err := checkElem(path[elemStart:], kind); err != nil { + return err + } + return nil +} + +// checkElem checks whether an individual path element is valid. +func checkElem(elem string, kind pathKind) error { + if elem == "" { + return fmt.Errorf("empty path element") + } + if strings.Count(elem, ".") == len(elem) { + return fmt.Errorf("invalid path element %q", elem) + } + if elem[0] == '.' && kind == modulePath { + return fmt.Errorf("leading dot in path element") + } + if elem[len(elem)-1] == '.' { + return fmt.Errorf("trailing dot in path element") + } + for _, r := range elem { + ok := false + switch kind { + case modulePath: + ok = modPathOK(r) + case importPath: + ok = importPathOK(r) + case filePath: + ok = fileNameOK(r) + default: + panic(fmt.Sprintf("internal error: invalid kind %v", kind)) + } + if !ok { + return fmt.Errorf("invalid char %q", r) + } + } + + // Windows disallows a bunch of path elements, sadly. + // See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file + short := elem + if i := strings.Index(short, "."); i >= 0 { + short = short[:i] + } + for _, bad := range badWindowsNames { + if strings.EqualFold(bad, short) { + return fmt.Errorf("%q disallowed as path element component on Windows", short) + } + } + + if kind == filePath { + // don't check for Windows short-names in file names. They're + // only an issue for import paths. + return nil + } + + // Reject path components that look like Windows short-names. + // Those usually end in a tilde followed by one or more ASCII digits. + if tilde := strings.LastIndexByte(short, '~'); tilde >= 0 && tilde < len(short)-1 { + suffix := short[tilde+1:] + suffixIsDigits := true + for _, r := range suffix { + if r < '0' || r > '9' { + suffixIsDigits = false + break + } + } + if suffixIsDigits { + return fmt.Errorf("trailing tilde and digits in path element") + } + } + + return nil +} + +// CheckFilePath checks that a slash-separated file path is valid. +// The definition of a valid file path is the same as the definition +// of a valid import path except that the set of allowed characters is larger: +// all Unicode letters, ASCII digits, the ASCII space character (U+0020), +// and the ASCII punctuation characters +// “!#$%&()+,-.=@[]^_{}~”. +// (The excluded punctuation characters, " * < > ? ` ' | / \ and :, +// have special meanings in certain shells or operating systems.) +// +// CheckFilePath may be less restrictive in the future, but see the +// top-level package documentation for additional information about +// subtleties of Unicode. +func CheckFilePath(path string) error { + if err := checkPath(path, filePath); err != nil { + return fmt.Errorf("malformed file path %q: %v", path, err) + } + return nil +} + +// badWindowsNames are the reserved file path elements on Windows. +// See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file +var badWindowsNames = []string{ + "CON", + "PRN", + "AUX", + "NUL", + "COM1", + "COM2", + "COM3", + "COM4", + "COM5", + "COM6", + "COM7", + "COM8", + "COM9", + "LPT1", + "LPT2", + "LPT3", + "LPT4", + "LPT5", + "LPT6", + "LPT7", + "LPT8", + "LPT9", +} + +// SplitPathVersion returns prefix and major version such that prefix+pathMajor == path +// and version is either empty or "/vN" for N >= 2. +// As a special case, gopkg.in paths are recognized directly; +// they require ".vN" instead of "/vN", and for all N, not just N >= 2. +// SplitPathVersion returns with ok = false when presented with +// a path whose last path element does not satisfy the constraints +// applied by CheckPath, such as "example.com/pkg/v1" or "example.com/pkg/v1.2". +func SplitPathVersion(path string) (prefix, pathMajor string, ok bool) { + if strings.HasPrefix(path, "gopkg.in/") { + return splitGopkgIn(path) + } + + i := len(path) + dot := false + for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9' || path[i-1] == '.') { + if path[i-1] == '.' { + dot = true + } + i-- + } + if i <= 1 || i == len(path) || path[i-1] != 'v' || path[i-2] != '/' { + return path, "", true + } + prefix, pathMajor = path[:i-2], path[i-2:] + if dot || len(pathMajor) <= 2 || pathMajor[2] == '0' || pathMajor == "/v1" { + return path, "", false + } + return prefix, pathMajor, true +} + +// splitGopkgIn is like SplitPathVersion but only for gopkg.in paths. +func splitGopkgIn(path string) (prefix, pathMajor string, ok bool) { + if !strings.HasPrefix(path, "gopkg.in/") { + return path, "", false + } + i := len(path) + if strings.HasSuffix(path, "-unstable") { + i -= len("-unstable") + } + for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9') { + i-- + } + if i <= 1 || path[i-1] != 'v' || path[i-2] != '.' { + // All gopkg.in paths must end in vN for some N. + return path, "", false + } + prefix, pathMajor = path[:i-2], path[i-2:] + if len(pathMajor) <= 2 || pathMajor[2] == '0' && pathMajor != ".v0" { + return path, "", false + } + return prefix, pathMajor, true +} + +// MatchPathMajor reports whether the semantic version v +// matches the path major version pathMajor. +// +// MatchPathMajor returns true if and only if CheckPathMajor returns nil. +func MatchPathMajor(v, pathMajor string) bool { + return CheckPathMajor(v, pathMajor) == nil +} + +// CheckPathMajor returns a non-nil error if the semantic version v +// does not match the path major version pathMajor. +func CheckPathMajor(v, pathMajor string) error { + // TODO(jayconrod): return errors or panic for invalid inputs. This function + // (and others) was covered by integration tests for cmd/go, and surrounding + // code protected against invalid inputs like non-canonical versions. + if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") { + pathMajor = strings.TrimSuffix(pathMajor, "-unstable") + } + if strings.HasPrefix(v, "v0.0.0-") && pathMajor == ".v1" { + // Allow old bug in pseudo-versions that generated v0.0.0- pseudoversion for gopkg .v1. + // For example, gopkg.in/yaml.v2@v2.2.1's go.mod requires gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405. + return nil + } + m := semver.Major(v) + if pathMajor == "" { + if m == "v0" || m == "v1" || semver.Build(v) == "+incompatible" { + return nil + } + pathMajor = "v0 or v1" + } else if pathMajor[0] == '/' || pathMajor[0] == '.' { + if m == pathMajor[1:] { + return nil + } + pathMajor = pathMajor[1:] + } + return &InvalidVersionError{ + Version: v, + Err: fmt.Errorf("should be %s, not %s", pathMajor, semver.Major(v)), + } +} + +// PathMajorPrefix returns the major-version tag prefix implied by pathMajor. +// An empty PathMajorPrefix allows either v0 or v1. +// +// Note that MatchPathMajor may accept some versions that do not actually begin +// with this prefix: namely, it accepts a 'v0.0.0-' prefix for a '.v1' +// pathMajor, even though that pathMajor implies 'v1' tagging. +func PathMajorPrefix(pathMajor string) string { + if pathMajor == "" { + return "" + } + if pathMajor[0] != '/' && pathMajor[0] != '.' { + panic("pathMajor suffix " + pathMajor + " passed to PathMajorPrefix lacks separator") + } + if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") { + pathMajor = strings.TrimSuffix(pathMajor, "-unstable") + } + m := pathMajor[1:] + if m != semver.Major(m) { + panic("pathMajor suffix " + pathMajor + "passed to PathMajorPrefix is not a valid major version") + } + return m +} + +// CanonicalVersion returns the canonical form of the version string v. +// It is the same as semver.Canonical(v) except that it preserves the special build suffix "+incompatible". +func CanonicalVersion(v string) string { + cv := semver.Canonical(v) + if semver.Build(v) == "+incompatible" { + cv += "+incompatible" + } + return cv +} + +// Sort sorts the list by Path, breaking ties by comparing Version fields. +// The Version fields are interpreted as semantic versions (using semver.Compare) +// optionally followed by a tie-breaking suffix introduced by a slash character, +// like in "v0.0.1/go.mod". +func Sort(list []Version) { + sort.Slice(list, func(i, j int) bool { + mi := list[i] + mj := list[j] + if mi.Path != mj.Path { + return mi.Path < mj.Path + } + // To help go.sum formatting, allow version/file. + // Compare semver prefix by semver rules, + // file by string order. + vi := mi.Version + vj := mj.Version + var fi, fj string + if k := strings.Index(vi, "/"); k >= 0 { + vi, fi = vi[:k], vi[k:] + } + if k := strings.Index(vj, "/"); k >= 0 { + vj, fj = vj[:k], vj[k:] + } + if vi != vj { + return semver.Compare(vi, vj) < 0 + } + return fi < fj + }) +} + +// EscapePath returns the escaped form of the given module path. +// It fails if the module path is invalid. +func EscapePath(path string) (escaped string, err error) { + if err := CheckPath(path); err != nil { + return "", err + } + + return escapeString(path) +} + +// EscapeVersion returns the escaped form of the given module version. +// Versions are allowed to be in non-semver form but must be valid file names +// and not contain exclamation marks. +func EscapeVersion(v string) (escaped string, err error) { + if err := checkElem(v, filePath); err != nil || strings.Contains(v, "!") { + return "", &InvalidVersionError{ + Version: v, + Err: fmt.Errorf("disallowed version string"), + } + } + return escapeString(v) +} + +func escapeString(s string) (escaped string, err error) { + haveUpper := false + for _, r := range s { + if r == '!' || r >= utf8.RuneSelf { + // This should be disallowed by CheckPath, but diagnose anyway. + // The correctness of the escaping loop below depends on it. + return "", fmt.Errorf("internal error: inconsistency in EscapePath") + } + if 'A' <= r && r <= 'Z' { + haveUpper = true + } + } + + if !haveUpper { + return s, nil + } + + var buf []byte + for _, r := range s { + if 'A' <= r && r <= 'Z' { + buf = append(buf, '!', byte(r+'a'-'A')) + } else { + buf = append(buf, byte(r)) + } + } + return string(buf), nil +} + +// UnescapePath returns the module path for the given escaped path. +// It fails if the escaped path is invalid or describes an invalid path. +func UnescapePath(escaped string) (path string, err error) { + path, ok := unescapeString(escaped) + if !ok { + return "", fmt.Errorf("invalid escaped module path %q", escaped) + } + if err := CheckPath(path); err != nil { + return "", fmt.Errorf("invalid escaped module path %q: %v", escaped, err) + } + return path, nil +} + +// UnescapeVersion returns the version string for the given escaped version. +// It fails if the escaped form is invalid or describes an invalid version. +// Versions are allowed to be in non-semver form but must be valid file names +// and not contain exclamation marks. +func UnescapeVersion(escaped string) (v string, err error) { + v, ok := unescapeString(escaped) + if !ok { + return "", fmt.Errorf("invalid escaped version %q", escaped) + } + if err := checkElem(v, filePath); err != nil { + return "", fmt.Errorf("invalid escaped version %q: %v", v, err) + } + return v, nil +} + +func unescapeString(escaped string) (string, bool) { + var buf []byte + + bang := false + for _, r := range escaped { + if r >= utf8.RuneSelf { + return "", false + } + if bang { + bang = false + if r < 'a' || 'z' < r { + return "", false + } + buf = append(buf, byte(r+'A'-'a')) + continue + } + if r == '!' { + bang = true + continue + } + if 'A' <= r && r <= 'Z' { + return "", false + } + buf = append(buf, byte(r)) + } + if bang { + return "", false + } + return string(buf), true +} + +// MatchPrefixPatterns reports whether any path prefix of target matches one of +// the glob patterns (as defined by path.Match) in the comma-separated globs +// list. This implements the algorithm used when matching a module path to the +// GOPRIVATE environment variable, as described by 'go help module-private'. +// +// It ignores any empty or malformed patterns in the list. +func MatchPrefixPatterns(globs, target string) bool { + for globs != "" { + // Extract next non-empty glob in comma-separated list. + var glob string + if i := strings.Index(globs, ","); i >= 0 { + glob, globs = globs[:i], globs[i+1:] + } else { + glob, globs = globs, "" + } + if glob == "" { + continue + } + + // A glob with N+1 path elements (N slashes) needs to be matched + // against the first N+1 path elements of target, + // which end just before the N+1'th slash. + n := strings.Count(glob, "/") + prefix := target + // Walk target, counting slashes, truncating at the N+1'th slash. + for i := 0; i < len(target); i++ { + if target[i] == '/' { + if n == 0 { + prefix = target[:i] + break + } + n-- + } + } + if n > 0 { + // Not enough prefix elements. + continue + } + matched, _ := path.Match(glob, prefix) + if matched { + return true + } + } + return false +} diff --git a/vendor/golang.org/x/mod/semver/semver.go b/vendor/golang.org/x/mod/semver/semver.go new file mode 100644 index 000000000..4338f3517 --- /dev/null +++ b/vendor/golang.org/x/mod/semver/semver.go @@ -0,0 +1,391 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package semver implements comparison of semantic version strings. +// In this package, semantic version strings must begin with a leading "v", +// as in "v1.0.0". +// +// The general form of a semantic version string accepted by this package is +// +// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]] +// +// where square brackets indicate optional parts of the syntax; +// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros; +// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers +// using only alphanumeric characters and hyphens; and +// all-numeric PRERELEASE identifiers must not have leading zeros. +// +// This package follows Semantic Versioning 2.0.0 (see semver.org) +// with two exceptions. First, it requires the "v" prefix. Second, it recognizes +// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes) +// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0. +package semver + +// parsed returns the parsed form of a semantic version string. +type parsed struct { + major string + minor string + patch string + short string + prerelease string + build string + err string +} + +// IsValid reports whether v is a valid semantic version string. +func IsValid(v string) bool { + _, ok := parse(v) + return ok +} + +// Canonical returns the canonical formatting of the semantic version v. +// It fills in any missing .MINOR or .PATCH and discards build metadata. +// Two semantic versions compare equal only if their canonical formattings +// are identical strings. +// The canonical invalid semantic version is the empty string. +func Canonical(v string) string { + p, ok := parse(v) + if !ok { + return "" + } + if p.build != "" { + return v[:len(v)-len(p.build)] + } + if p.short != "" { + return v + p.short + } + return v +} + +// Major returns the major version prefix of the semantic version v. +// For example, Major("v2.1.0") == "v2". +// If v is an invalid semantic version string, Major returns the empty string. +func Major(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return v[:1+len(pv.major)] +} + +// MajorMinor returns the major.minor version prefix of the semantic version v. +// For example, MajorMinor("v2.1.0") == "v2.1". +// If v is an invalid semantic version string, MajorMinor returns the empty string. +func MajorMinor(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + i := 1 + len(pv.major) + if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor { + return v[:j] + } + return v[:i] + "." + pv.minor +} + +// Prerelease returns the prerelease suffix of the semantic version v. +// For example, Prerelease("v2.1.0-pre+meta") == "-pre". +// If v is an invalid semantic version string, Prerelease returns the empty string. +func Prerelease(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return pv.prerelease +} + +// Build returns the build suffix of the semantic version v. +// For example, Build("v2.1.0+meta") == "+meta". +// If v is an invalid semantic version string, Build returns the empty string. +func Build(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return pv.build +} + +// Compare returns an integer comparing two versions according to +// semantic version precedence. +// The result will be 0 if v == w, -1 if v < w, or +1 if v > w. +// +// An invalid semantic version string is considered less than a valid one. +// All invalid semantic version strings compare equal to each other. +func Compare(v, w string) int { + pv, ok1 := parse(v) + pw, ok2 := parse(w) + if !ok1 && !ok2 { + return 0 + } + if !ok1 { + return -1 + } + if !ok2 { + return +1 + } + if c := compareInt(pv.major, pw.major); c != 0 { + return c + } + if c := compareInt(pv.minor, pw.minor); c != 0 { + return c + } + if c := compareInt(pv.patch, pw.patch); c != 0 { + return c + } + return comparePrerelease(pv.prerelease, pw.prerelease) +} + +// Max canonicalizes its arguments and then returns the version string +// that compares greater. +// +// Deprecated: use Compare instead. In most cases, returning a canonicalized +// version is not expected or desired. +func Max(v, w string) string { + v = Canonical(v) + w = Canonical(w) + if Compare(v, w) > 0 { + return v + } + return w +} + +func parse(v string) (p parsed, ok bool) { + if v == "" || v[0] != 'v' { + p.err = "missing v prefix" + return + } + p.major, v, ok = parseInt(v[1:]) + if !ok { + p.err = "bad major version" + return + } + if v == "" { + p.minor = "0" + p.patch = "0" + p.short = ".0.0" + return + } + if v[0] != '.' { + p.err = "bad minor prefix" + ok = false + return + } + p.minor, v, ok = parseInt(v[1:]) + if !ok { + p.err = "bad minor version" + return + } + if v == "" { + p.patch = "0" + p.short = ".0" + return + } + if v[0] != '.' { + p.err = "bad patch prefix" + ok = false + return + } + p.patch, v, ok = parseInt(v[1:]) + if !ok { + p.err = "bad patch version" + return + } + if len(v) > 0 && v[0] == '-' { + p.prerelease, v, ok = parsePrerelease(v) + if !ok { + p.err = "bad prerelease" + return + } + } + if len(v) > 0 && v[0] == '+' { + p.build, v, ok = parseBuild(v) + if !ok { + p.err = "bad build" + return + } + } + if v != "" { + p.err = "junk on end" + ok = false + return + } + ok = true + return +} + +func parseInt(v string) (t, rest string, ok bool) { + if v == "" { + return + } + if v[0] < '0' || '9' < v[0] { + return + } + i := 1 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + if v[0] == '0' && i != 1 { + return + } + return v[:i], v[i:], true +} + +func parsePrerelease(v string) (t, rest string, ok bool) { + // "A pre-release version MAY be denoted by appending a hyphen and + // a series of dot separated identifiers immediately following the patch version. + // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. + // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes." + if v == "" || v[0] != '-' { + return + } + i := 1 + start := 1 + for i < len(v) && v[i] != '+' { + if !isIdentChar(v[i]) && v[i] != '.' { + return + } + if v[i] == '.' { + if start == i || isBadNum(v[start:i]) { + return + } + start = i + 1 + } + i++ + } + if start == i || isBadNum(v[start:i]) { + return + } + return v[:i], v[i:], true +} + +func parseBuild(v string) (t, rest string, ok bool) { + if v == "" || v[0] != '+' { + return + } + i := 1 + start := 1 + for i < len(v) { + if !isIdentChar(v[i]) && v[i] != '.' { + return + } + if v[i] == '.' { + if start == i { + return + } + start = i + 1 + } + i++ + } + if start == i { + return + } + return v[:i], v[i:], true +} + +func isIdentChar(c byte) bool { + return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-' +} + +func isBadNum(v string) bool { + i := 0 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + return i == len(v) && i > 1 && v[0] == '0' +} + +func isNum(v string) bool { + i := 0 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + return i == len(v) +} + +func compareInt(x, y string) int { + if x == y { + return 0 + } + if len(x) < len(y) { + return -1 + } + if len(x) > len(y) { + return +1 + } + if x < y { + return -1 + } else { + return +1 + } +} + +func comparePrerelease(x, y string) int { + // "When major, minor, and patch are equal, a pre-release version has + // lower precedence than a normal version. + // Example: 1.0.0-alpha < 1.0.0. + // Precedence for two pre-release versions with the same major, minor, + // and patch version MUST be determined by comparing each dot separated + // identifier from left to right until a difference is found as follows: + // identifiers consisting of only digits are compared numerically and + // identifiers with letters or hyphens are compared lexically in ASCII + // sort order. Numeric identifiers always have lower precedence than + // non-numeric identifiers. A larger set of pre-release fields has a + // higher precedence than a smaller set, if all of the preceding + // identifiers are equal. + // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < + // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0." + if x == y { + return 0 + } + if x == "" { + return +1 + } + if y == "" { + return -1 + } + for x != "" && y != "" { + x = x[1:] // skip - or . + y = y[1:] // skip - or . + var dx, dy string + dx, x = nextIdent(x) + dy, y = nextIdent(y) + if dx != dy { + ix := isNum(dx) + iy := isNum(dy) + if ix != iy { + if ix { + return -1 + } else { + return +1 + } + } + if ix { + if len(dx) < len(dy) { + return -1 + } + if len(dx) > len(dy) { + return +1 + } + } + if dx < dy { + return -1 + } else { + return +1 + } + } + } + if x == "" { + return -1 + } else { + return +1 + } +} + +func nextIdent(x string) (dx, rest string) { + i := 0 + for i < len(x) && x[i] != '.' { + i++ + } + return x[:i], x[i:] +} diff --git a/vendor/golang.org/x/text/width/kind_string.go b/vendor/golang.org/x/text/width/kind_string.go new file mode 100644 index 000000000..dd3febd43 --- /dev/null +++ b/vendor/golang.org/x/text/width/kind_string.go @@ -0,0 +1,28 @@ +// Code generated by "stringer -type=Kind"; DO NOT EDIT. + +package width + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[Neutral-0] + _ = x[EastAsianAmbiguous-1] + _ = x[EastAsianWide-2] + _ = x[EastAsianNarrow-3] + _ = x[EastAsianFullwidth-4] + _ = x[EastAsianHalfwidth-5] +} + +const _Kind_name = "NeutralEastAsianAmbiguousEastAsianWideEastAsianNarrowEastAsianFullwidthEastAsianHalfwidth" + +var _Kind_index = [...]uint8{0, 7, 25, 38, 53, 71, 89} + +func (i Kind) String() string { + if i < 0 || i >= Kind(len(_Kind_index)-1) { + return "Kind(" + strconv.FormatInt(int64(i), 10) + ")" + } + return _Kind_name[_Kind_index[i]:_Kind_index[i+1]] +} diff --git a/vendor/golang.org/x/text/width/tables10.0.0.go b/vendor/golang.org/x/text/width/tables10.0.0.go new file mode 100644 index 000000000..186b1d4ef --- /dev/null +++ b/vendor/golang.org/x/text/width/tables10.0.0.go @@ -0,0 +1,1319 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.10 && !go1.13 +// +build go1.10,!go1.13 + +package width + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "10.0.0" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// widthTrie. Total size: 14336 bytes (14.00 KiB). Checksum: c59df54630d3dc4a. +type widthTrie struct{} + +func newWidthTrie(i int) *widthTrie { + return &widthTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { + switch { + default: + return uint16(widthValues[n<<6+uint32(b)]) + } +} + +// widthValues: 101 blocks, 6464 entries, 12928 bytes +// The third block is the zero block. +var widthValues = [6464]uint16{ + // Block 0x0, offset 0x0 + 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, + 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, + 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, + 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, + 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, + 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, + // Block 0x1, offset 0x40 + 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, + 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, + 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, + 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, + 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, + 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, + 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, + 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, + 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, + 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, + 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, + 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, + 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, + 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, + 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, + 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, + // Block 0x4, offset 0x100 + 0x106: 0x2000, + 0x110: 0x2000, + 0x117: 0x2000, + 0x118: 0x2000, + 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, + 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, + 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, + 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, + 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, + 0x13c: 0x2000, 0x13e: 0x2000, + // Block 0x5, offset 0x140 + 0x141: 0x2000, + 0x151: 0x2000, + 0x153: 0x2000, + 0x15b: 0x2000, + 0x166: 0x2000, 0x167: 0x2000, + 0x16b: 0x2000, + 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, + 0x178: 0x2000, + 0x17f: 0x2000, + // Block 0x6, offset 0x180 + 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, + 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, + 0x18d: 0x2000, + 0x192: 0x2000, 0x193: 0x2000, + 0x1a6: 0x2000, 0x1a7: 0x2000, + 0x1ab: 0x2000, + // Block 0x7, offset 0x1c0 + 0x1ce: 0x2000, 0x1d0: 0x2000, + 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, + 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, + // Block 0x8, offset 0x200 + 0x211: 0x2000, + 0x221: 0x2000, + // Block 0x9, offset 0x240 + 0x244: 0x2000, + 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, + 0x24d: 0x2000, 0x250: 0x2000, + 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, + 0x25f: 0x2000, + // Block 0xa, offset 0x280 + 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, + 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, + 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, + 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, + 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, + 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, + 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, + 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, + 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, + 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, + 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, + 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, + 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, + 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, + 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, + 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, + 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, + 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, + // Block 0xc, offset 0x300 + 0x311: 0x2000, + 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, + 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, + 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, + 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, + 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, + 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, + 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, + // Block 0xd, offset 0x340 + 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, + 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, + // Block 0xe, offset 0x380 + 0x381: 0x2000, + 0x390: 0x2000, 0x391: 0x2000, + 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, + 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, + 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, + 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, + 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, + 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, + 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, + 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, + 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, + 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, + // Block 0x10, offset 0x400 + 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, + 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, + 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, + 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, + 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, + 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, + 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, + 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, + 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, + 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, + 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, + // Block 0x11, offset 0x440 + 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, + 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, + 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, + 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, + 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, + 0x45e: 0x4000, 0x45f: 0x4000, + // Block 0x12, offset 0x480 + 0x490: 0x2000, + 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, + 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, + 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, + 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, + 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, + 0x4bb: 0x2000, + 0x4be: 0x2000, + // Block 0x13, offset 0x4c0 + 0x4f4: 0x2000, + 0x4ff: 0x2000, + // Block 0x14, offset 0x500 + 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, + 0x529: 0xa009, + 0x52c: 0x2000, + // Block 0x15, offset 0x540 + 0x543: 0x2000, 0x545: 0x2000, + 0x549: 0x2000, + 0x553: 0x2000, 0x556: 0x2000, + 0x561: 0x2000, 0x562: 0x2000, + 0x566: 0x2000, + 0x56b: 0x2000, + // Block 0x16, offset 0x580 + 0x593: 0x2000, 0x594: 0x2000, + 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, + 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, + 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, + 0x5aa: 0x2000, 0x5ab: 0x2000, + 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, + 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, + // Block 0x17, offset 0x5c0 + 0x5c9: 0x2000, + 0x5d0: 0x200a, 0x5d1: 0x200b, + 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, + 0x5d8: 0x2000, 0x5d9: 0x2000, + 0x5f8: 0x2000, 0x5f9: 0x2000, + // Block 0x18, offset 0x600 + 0x612: 0x2000, 0x614: 0x2000, + 0x627: 0x2000, + // Block 0x19, offset 0x640 + 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, + 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, + 0x64f: 0x2000, 0x651: 0x2000, + 0x655: 0x2000, + 0x65a: 0x2000, 0x65d: 0x2000, + 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, + 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, + 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, + 0x674: 0x2000, 0x675: 0x2000, + 0x676: 0x2000, 0x677: 0x2000, + 0x67c: 0x2000, 0x67d: 0x2000, + // Block 0x1a, offset 0x680 + 0x688: 0x2000, + 0x68c: 0x2000, + 0x692: 0x2000, + 0x6a0: 0x2000, 0x6a1: 0x2000, + 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, + 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, + // Block 0x1b, offset 0x6c0 + 0x6c2: 0x2000, 0x6c3: 0x2000, + 0x6c6: 0x2000, 0x6c7: 0x2000, + 0x6d5: 0x2000, + 0x6d9: 0x2000, + 0x6e5: 0x2000, + 0x6ff: 0x2000, + // Block 0x1c, offset 0x700 + 0x712: 0x2000, + 0x71a: 0x4000, 0x71b: 0x4000, + 0x729: 0x4000, + 0x72a: 0x4000, + // Block 0x1d, offset 0x740 + 0x769: 0x4000, + 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, + 0x770: 0x4000, 0x773: 0x4000, + // Block 0x1e, offset 0x780 + 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, + 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, + 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, + 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, + 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, + 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, + // Block 0x1f, offset 0x7c0 + 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, + 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, + 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, + 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, + 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, + 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, + 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, + 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, + 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, + 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, + 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, + // Block 0x20, offset 0x800 + 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, + 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, + 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, + 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, + 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, + 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, + 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, + 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, + 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, + 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, + 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, + // Block 0x21, offset 0x840 + 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, + 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, + 0x850: 0x2000, 0x851: 0x2000, + 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, + 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, + 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, + 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, + 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, + 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, + // Block 0x22, offset 0x880 + 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, + 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, + 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, + 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, + 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, + 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, + 0x8b2: 0x2000, 0x8b3: 0x2000, + 0x8b6: 0x2000, 0x8b7: 0x2000, + 0x8bc: 0x2000, 0x8bd: 0x2000, + // Block 0x23, offset 0x8c0 + 0x8c0: 0x2000, 0x8c1: 0x2000, + 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, + 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, + 0x8e2: 0x2000, 0x8e3: 0x2000, + 0x8e4: 0x2000, 0x8e5: 0x2000, + 0x8ef: 0x2000, + 0x8fd: 0x4000, 0x8fe: 0x4000, + // Block 0x24, offset 0x900 + 0x905: 0x2000, + 0x906: 0x2000, 0x909: 0x2000, + 0x90e: 0x2000, 0x90f: 0x2000, + 0x914: 0x4000, 0x915: 0x4000, + 0x91c: 0x2000, + 0x91e: 0x2000, + // Block 0x25, offset 0x940 + 0x940: 0x2000, 0x942: 0x2000, + 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, + 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, + 0x952: 0x4000, 0x953: 0x4000, + 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, + 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, + 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, + 0x97f: 0x4000, + // Block 0x26, offset 0x980 + 0x993: 0x4000, + 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, + 0x9aa: 0x4000, 0x9ab: 0x4000, + 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, + // Block 0x27, offset 0x9c0 + 0x9c4: 0x4000, 0x9c5: 0x4000, + 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, + 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, + 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, + 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, + 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, + 0x9e8: 0x2000, 0x9e9: 0x2000, + 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, + 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, + 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, + 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, + // Block 0x28, offset 0xa00 + 0xa05: 0x4000, + 0xa0a: 0x4000, 0xa0b: 0x4000, + 0xa28: 0x4000, + 0xa3d: 0x2000, + // Block 0x29, offset 0xa40 + 0xa4c: 0x4000, 0xa4e: 0x4000, + 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, + 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, + 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, + // Block 0x2a, offset 0xa80 + 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, + 0xab0: 0x4000, + 0xabf: 0x4000, + // Block 0x2b, offset 0xac0 + 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, + 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, + // Block 0x2c, offset 0xb00 + 0xb05: 0x6010, + 0xb06: 0x6011, + // Block 0x2d, offset 0xb40 + 0xb5b: 0x4000, 0xb5c: 0x4000, + // Block 0x2e, offset 0xb80 + 0xb90: 0x4000, + 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, + 0xb98: 0x2000, 0xb99: 0x2000, + // Block 0x2f, offset 0xbc0 + 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, + 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, + 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, + 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, + 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, + 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, + 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, + 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, + 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, + 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, + 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, + // Block 0x30, offset 0xc00 + 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, + 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, + 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, + 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, + 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, + 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, + 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, + 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, + 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, + // Block 0x31, offset 0xc40 + 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, + 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, + 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, + 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, + 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, + 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, + // Block 0x32, offset 0xc80 + 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, + 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, + 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, + 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, + 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, + 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, + 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, + 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, + 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, + 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, + 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, + // Block 0x33, offset 0xcc0 + 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, + 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, + 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, + 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, + 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, + 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, + 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, + 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, + 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, + 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, + 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, + // Block 0x34, offset 0xd00 + 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, + 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, + 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, + 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, + 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, + 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, + 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, + 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, + 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, + 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, + 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, + // Block 0x35, offset 0xd40 + 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, + 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, + 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, + 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, + 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, + 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, + 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, + 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, + 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, + 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, + 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, + // Block 0x36, offset 0xd80 + 0xd85: 0x4000, + 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, + 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, + 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, + 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, + 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, + 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, + 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, + 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, + 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, + 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, + // Block 0x37, offset 0xdc0 + 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, + 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, + 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, + 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, + 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, + 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, + 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, + 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, + 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, + 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, + 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, + // Block 0x38, offset 0xe00 + 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, + 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, + 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, + 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, + 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, + 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, + 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, + 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, + 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, + 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, + // Block 0x39, offset 0xe40 + 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, + 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, + 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, + 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, + 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, + 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, + 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, + 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, + 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, + // Block 0x3a, offset 0xe80 + 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, + 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, + 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, + 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, + 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, + 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, + 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, + 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, + 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, + 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, + 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, + // Block 0x3b, offset 0xec0 + 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, + 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, + 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, + 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, + 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, + 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, + 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, + 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, + 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, + 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, + 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, + // Block 0x3c, offset 0xf00 + 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, + 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, + 0xf0c: 0x4000, 0xf0d: 0x4000, 0xf0e: 0x4000, 0xf0f: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, + 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, + 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, + 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, + 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, + 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, + 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, + 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, + 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, + // Block 0x3d, offset 0xf40 + 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, + 0xf46: 0x4000, 0xf47: 0x4000, 0xf48: 0x4000, 0xf49: 0x4000, 0xf4a: 0x4000, 0xf4b: 0x4000, + 0xf4c: 0x4000, 0xf50: 0x4000, 0xf51: 0x4000, + 0xf52: 0x4000, 0xf53: 0x4000, 0xf54: 0x4000, 0xf55: 0x4000, 0xf56: 0x4000, 0xf57: 0x4000, + 0xf58: 0x4000, 0xf59: 0x4000, 0xf5a: 0x4000, 0xf5b: 0x4000, 0xf5c: 0x4000, 0xf5d: 0x4000, + 0xf5e: 0x4000, 0xf5f: 0x4000, 0xf60: 0x4000, 0xf61: 0x4000, 0xf62: 0x4000, 0xf63: 0x4000, + 0xf64: 0x4000, 0xf65: 0x4000, 0xf66: 0x4000, 0xf67: 0x4000, 0xf68: 0x4000, 0xf69: 0x4000, + 0xf6a: 0x4000, 0xf6b: 0x4000, 0xf6c: 0x4000, 0xf6d: 0x4000, 0xf6e: 0x4000, 0xf6f: 0x4000, + 0xf70: 0x4000, 0xf71: 0x4000, 0xf72: 0x4000, 0xf73: 0x4000, 0xf74: 0x4000, 0xf75: 0x4000, + 0xf76: 0x4000, 0xf77: 0x4000, 0xf78: 0x4000, 0xf79: 0x4000, 0xf7a: 0x4000, 0xf7b: 0x4000, + 0xf7c: 0x4000, 0xf7d: 0x4000, 0xf7e: 0x4000, 0xf7f: 0x4000, + // Block 0x3e, offset 0xf80 + 0xf80: 0x4000, 0xf81: 0x4000, 0xf82: 0x4000, 0xf83: 0x4000, 0xf84: 0x4000, 0xf85: 0x4000, + 0xf86: 0x4000, + // Block 0x3f, offset 0xfc0 + 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, + 0xfe4: 0x4000, 0xfe5: 0x4000, 0xfe6: 0x4000, 0xfe7: 0x4000, 0xfe8: 0x4000, 0xfe9: 0x4000, + 0xfea: 0x4000, 0xfeb: 0x4000, 0xfec: 0x4000, 0xfed: 0x4000, 0xfee: 0x4000, 0xfef: 0x4000, + 0xff0: 0x4000, 0xff1: 0x4000, 0xff2: 0x4000, 0xff3: 0x4000, 0xff4: 0x4000, 0xff5: 0x4000, + 0xff6: 0x4000, 0xff7: 0x4000, 0xff8: 0x4000, 0xff9: 0x4000, 0xffa: 0x4000, 0xffb: 0x4000, + 0xffc: 0x4000, + // Block 0x40, offset 0x1000 + 0x1000: 0x4000, 0x1001: 0x4000, 0x1002: 0x4000, 0x1003: 0x4000, 0x1004: 0x4000, 0x1005: 0x4000, + 0x1006: 0x4000, 0x1007: 0x4000, 0x1008: 0x4000, 0x1009: 0x4000, 0x100a: 0x4000, 0x100b: 0x4000, + 0x100c: 0x4000, 0x100d: 0x4000, 0x100e: 0x4000, 0x100f: 0x4000, 0x1010: 0x4000, 0x1011: 0x4000, + 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, + 0x1018: 0x4000, 0x1019: 0x4000, 0x101a: 0x4000, 0x101b: 0x4000, 0x101c: 0x4000, 0x101d: 0x4000, + 0x101e: 0x4000, 0x101f: 0x4000, 0x1020: 0x4000, 0x1021: 0x4000, 0x1022: 0x4000, 0x1023: 0x4000, + // Block 0x41, offset 0x1040 + 0x1040: 0x2000, 0x1041: 0x2000, 0x1042: 0x2000, 0x1043: 0x2000, 0x1044: 0x2000, 0x1045: 0x2000, + 0x1046: 0x2000, 0x1047: 0x2000, 0x1048: 0x2000, 0x1049: 0x2000, 0x104a: 0x2000, 0x104b: 0x2000, + 0x104c: 0x2000, 0x104d: 0x2000, 0x104e: 0x2000, 0x104f: 0x2000, 0x1050: 0x4000, 0x1051: 0x4000, + 0x1052: 0x4000, 0x1053: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, + 0x1058: 0x4000, 0x1059: 0x4000, + 0x1070: 0x4000, 0x1071: 0x4000, 0x1072: 0x4000, 0x1073: 0x4000, 0x1074: 0x4000, 0x1075: 0x4000, + 0x1076: 0x4000, 0x1077: 0x4000, 0x1078: 0x4000, 0x1079: 0x4000, 0x107a: 0x4000, 0x107b: 0x4000, + 0x107c: 0x4000, 0x107d: 0x4000, 0x107e: 0x4000, 0x107f: 0x4000, + // Block 0x42, offset 0x1080 + 0x1080: 0x4000, 0x1081: 0x4000, 0x1082: 0x4000, 0x1083: 0x4000, 0x1084: 0x4000, 0x1085: 0x4000, + 0x1086: 0x4000, 0x1087: 0x4000, 0x1088: 0x4000, 0x1089: 0x4000, 0x108a: 0x4000, 0x108b: 0x4000, + 0x108c: 0x4000, 0x108d: 0x4000, 0x108e: 0x4000, 0x108f: 0x4000, 0x1090: 0x4000, 0x1091: 0x4000, + 0x1092: 0x4000, 0x1094: 0x4000, 0x1095: 0x4000, 0x1096: 0x4000, 0x1097: 0x4000, + 0x1098: 0x4000, 0x1099: 0x4000, 0x109a: 0x4000, 0x109b: 0x4000, 0x109c: 0x4000, 0x109d: 0x4000, + 0x109e: 0x4000, 0x109f: 0x4000, 0x10a0: 0x4000, 0x10a1: 0x4000, 0x10a2: 0x4000, 0x10a3: 0x4000, + 0x10a4: 0x4000, 0x10a5: 0x4000, 0x10a6: 0x4000, 0x10a8: 0x4000, 0x10a9: 0x4000, + 0x10aa: 0x4000, 0x10ab: 0x4000, + // Block 0x43, offset 0x10c0 + 0x10c1: 0x9012, 0x10c2: 0x9012, 0x10c3: 0x9012, 0x10c4: 0x9012, 0x10c5: 0x9012, + 0x10c6: 0x9012, 0x10c7: 0x9012, 0x10c8: 0x9012, 0x10c9: 0x9012, 0x10ca: 0x9012, 0x10cb: 0x9012, + 0x10cc: 0x9012, 0x10cd: 0x9012, 0x10ce: 0x9012, 0x10cf: 0x9012, 0x10d0: 0x9012, 0x10d1: 0x9012, + 0x10d2: 0x9012, 0x10d3: 0x9012, 0x10d4: 0x9012, 0x10d5: 0x9012, 0x10d6: 0x9012, 0x10d7: 0x9012, + 0x10d8: 0x9012, 0x10d9: 0x9012, 0x10da: 0x9012, 0x10db: 0x9012, 0x10dc: 0x9012, 0x10dd: 0x9012, + 0x10de: 0x9012, 0x10df: 0x9012, 0x10e0: 0x9049, 0x10e1: 0x9049, 0x10e2: 0x9049, 0x10e3: 0x9049, + 0x10e4: 0x9049, 0x10e5: 0x9049, 0x10e6: 0x9049, 0x10e7: 0x9049, 0x10e8: 0x9049, 0x10e9: 0x9049, + 0x10ea: 0x9049, 0x10eb: 0x9049, 0x10ec: 0x9049, 0x10ed: 0x9049, 0x10ee: 0x9049, 0x10ef: 0x9049, + 0x10f0: 0x9049, 0x10f1: 0x9049, 0x10f2: 0x9049, 0x10f3: 0x9049, 0x10f4: 0x9049, 0x10f5: 0x9049, + 0x10f6: 0x9049, 0x10f7: 0x9049, 0x10f8: 0x9049, 0x10f9: 0x9049, 0x10fa: 0x9049, 0x10fb: 0x9049, + 0x10fc: 0x9049, 0x10fd: 0x9049, 0x10fe: 0x9049, 0x10ff: 0x9049, + // Block 0x44, offset 0x1100 + 0x1100: 0x9049, 0x1101: 0x9049, 0x1102: 0x9049, 0x1103: 0x9049, 0x1104: 0x9049, 0x1105: 0x9049, + 0x1106: 0x9049, 0x1107: 0x9049, 0x1108: 0x9049, 0x1109: 0x9049, 0x110a: 0x9049, 0x110b: 0x9049, + 0x110c: 0x9049, 0x110d: 0x9049, 0x110e: 0x9049, 0x110f: 0x9049, 0x1110: 0x9049, 0x1111: 0x9049, + 0x1112: 0x9049, 0x1113: 0x9049, 0x1114: 0x9049, 0x1115: 0x9049, 0x1116: 0x9049, 0x1117: 0x9049, + 0x1118: 0x9049, 0x1119: 0x9049, 0x111a: 0x9049, 0x111b: 0x9049, 0x111c: 0x9049, 0x111d: 0x9049, + 0x111e: 0x9049, 0x111f: 0x904a, 0x1120: 0x904b, 0x1121: 0xb04c, 0x1122: 0xb04d, 0x1123: 0xb04d, + 0x1124: 0xb04e, 0x1125: 0xb04f, 0x1126: 0xb050, 0x1127: 0xb051, 0x1128: 0xb052, 0x1129: 0xb053, + 0x112a: 0xb054, 0x112b: 0xb055, 0x112c: 0xb056, 0x112d: 0xb057, 0x112e: 0xb058, 0x112f: 0xb059, + 0x1130: 0xb05a, 0x1131: 0xb05b, 0x1132: 0xb05c, 0x1133: 0xb05d, 0x1134: 0xb05e, 0x1135: 0xb05f, + 0x1136: 0xb060, 0x1137: 0xb061, 0x1138: 0xb062, 0x1139: 0xb063, 0x113a: 0xb064, 0x113b: 0xb065, + 0x113c: 0xb052, 0x113d: 0xb066, 0x113e: 0xb067, 0x113f: 0xb055, + // Block 0x45, offset 0x1140 + 0x1140: 0xb068, 0x1141: 0xb069, 0x1142: 0xb06a, 0x1143: 0xb06b, 0x1144: 0xb05a, 0x1145: 0xb056, + 0x1146: 0xb06c, 0x1147: 0xb06d, 0x1148: 0xb06b, 0x1149: 0xb06e, 0x114a: 0xb06b, 0x114b: 0xb06f, + 0x114c: 0xb06f, 0x114d: 0xb070, 0x114e: 0xb070, 0x114f: 0xb071, 0x1150: 0xb056, 0x1151: 0xb072, + 0x1152: 0xb073, 0x1153: 0xb072, 0x1154: 0xb074, 0x1155: 0xb073, 0x1156: 0xb075, 0x1157: 0xb075, + 0x1158: 0xb076, 0x1159: 0xb076, 0x115a: 0xb077, 0x115b: 0xb077, 0x115c: 0xb073, 0x115d: 0xb078, + 0x115e: 0xb079, 0x115f: 0xb067, 0x1160: 0xb07a, 0x1161: 0xb07b, 0x1162: 0xb07b, 0x1163: 0xb07b, + 0x1164: 0xb07b, 0x1165: 0xb07b, 0x1166: 0xb07b, 0x1167: 0xb07b, 0x1168: 0xb07b, 0x1169: 0xb07b, + 0x116a: 0xb07b, 0x116b: 0xb07b, 0x116c: 0xb07b, 0x116d: 0xb07b, 0x116e: 0xb07b, 0x116f: 0xb07b, + 0x1170: 0xb07c, 0x1171: 0xb07c, 0x1172: 0xb07c, 0x1173: 0xb07c, 0x1174: 0xb07c, 0x1175: 0xb07c, + 0x1176: 0xb07c, 0x1177: 0xb07c, 0x1178: 0xb07c, 0x1179: 0xb07c, 0x117a: 0xb07c, 0x117b: 0xb07c, + 0x117c: 0xb07c, 0x117d: 0xb07c, 0x117e: 0xb07c, + // Block 0x46, offset 0x1180 + 0x1182: 0xb07d, 0x1183: 0xb07e, 0x1184: 0xb07f, 0x1185: 0xb080, + 0x1186: 0xb07f, 0x1187: 0xb07e, 0x118a: 0xb081, 0x118b: 0xb082, + 0x118c: 0xb083, 0x118d: 0xb07f, 0x118e: 0xb080, 0x118f: 0xb07f, + 0x1192: 0xb084, 0x1193: 0xb085, 0x1194: 0xb084, 0x1195: 0xb086, 0x1196: 0xb084, 0x1197: 0xb087, + 0x119a: 0xb088, 0x119b: 0xb089, 0x119c: 0xb08a, + 0x11a0: 0x908b, 0x11a1: 0x908b, 0x11a2: 0x908c, 0x11a3: 0x908d, + 0x11a4: 0x908b, 0x11a5: 0x908e, 0x11a6: 0x908f, 0x11a8: 0xb090, 0x11a9: 0xb091, + 0x11aa: 0xb092, 0x11ab: 0xb091, 0x11ac: 0xb093, 0x11ad: 0xb094, 0x11ae: 0xb095, + 0x11bd: 0x2000, + // Block 0x47, offset 0x11c0 + 0x11e0: 0x4000, 0x11e1: 0x4000, + // Block 0x48, offset 0x1200 + 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, + 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, + 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, + 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, 0x1216: 0x4000, 0x1217: 0x4000, + 0x1218: 0x4000, 0x1219: 0x4000, 0x121a: 0x4000, 0x121b: 0x4000, 0x121c: 0x4000, 0x121d: 0x4000, + 0x121e: 0x4000, 0x121f: 0x4000, 0x1220: 0x4000, 0x1221: 0x4000, 0x1222: 0x4000, 0x1223: 0x4000, + 0x1224: 0x4000, 0x1225: 0x4000, 0x1226: 0x4000, 0x1227: 0x4000, 0x1228: 0x4000, 0x1229: 0x4000, + 0x122a: 0x4000, 0x122b: 0x4000, 0x122c: 0x4000, + // Block 0x49, offset 0x1240 + 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, + 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, 0x1249: 0x4000, 0x124a: 0x4000, 0x124b: 0x4000, + 0x124c: 0x4000, 0x124d: 0x4000, 0x124e: 0x4000, 0x124f: 0x4000, 0x1250: 0x4000, 0x1251: 0x4000, + 0x1252: 0x4000, 0x1253: 0x4000, 0x1254: 0x4000, 0x1255: 0x4000, 0x1256: 0x4000, 0x1257: 0x4000, + 0x1258: 0x4000, 0x1259: 0x4000, 0x125a: 0x4000, 0x125b: 0x4000, 0x125c: 0x4000, 0x125d: 0x4000, + 0x125e: 0x4000, 0x125f: 0x4000, 0x1260: 0x4000, 0x1261: 0x4000, 0x1262: 0x4000, 0x1263: 0x4000, + 0x1264: 0x4000, 0x1265: 0x4000, 0x1266: 0x4000, 0x1267: 0x4000, 0x1268: 0x4000, 0x1269: 0x4000, + 0x126a: 0x4000, 0x126b: 0x4000, 0x126c: 0x4000, 0x126d: 0x4000, 0x126e: 0x4000, 0x126f: 0x4000, + 0x1270: 0x4000, 0x1271: 0x4000, 0x1272: 0x4000, + // Block 0x4a, offset 0x1280 + 0x1280: 0x4000, 0x1281: 0x4000, 0x1282: 0x4000, 0x1283: 0x4000, 0x1284: 0x4000, 0x1285: 0x4000, + 0x1286: 0x4000, 0x1287: 0x4000, 0x1288: 0x4000, 0x1289: 0x4000, 0x128a: 0x4000, 0x128b: 0x4000, + 0x128c: 0x4000, 0x128d: 0x4000, 0x128e: 0x4000, 0x128f: 0x4000, 0x1290: 0x4000, 0x1291: 0x4000, + 0x1292: 0x4000, 0x1293: 0x4000, 0x1294: 0x4000, 0x1295: 0x4000, 0x1296: 0x4000, 0x1297: 0x4000, + 0x1298: 0x4000, 0x1299: 0x4000, 0x129a: 0x4000, 0x129b: 0x4000, 0x129c: 0x4000, 0x129d: 0x4000, + 0x129e: 0x4000, + // Block 0x4b, offset 0x12c0 + 0x12f0: 0x4000, 0x12f1: 0x4000, 0x12f2: 0x4000, 0x12f3: 0x4000, 0x12f4: 0x4000, 0x12f5: 0x4000, + 0x12f6: 0x4000, 0x12f7: 0x4000, 0x12f8: 0x4000, 0x12f9: 0x4000, 0x12fa: 0x4000, 0x12fb: 0x4000, + 0x12fc: 0x4000, 0x12fd: 0x4000, 0x12fe: 0x4000, 0x12ff: 0x4000, + // Block 0x4c, offset 0x1300 + 0x1300: 0x4000, 0x1301: 0x4000, 0x1302: 0x4000, 0x1303: 0x4000, 0x1304: 0x4000, 0x1305: 0x4000, + 0x1306: 0x4000, 0x1307: 0x4000, 0x1308: 0x4000, 0x1309: 0x4000, 0x130a: 0x4000, 0x130b: 0x4000, + 0x130c: 0x4000, 0x130d: 0x4000, 0x130e: 0x4000, 0x130f: 0x4000, 0x1310: 0x4000, 0x1311: 0x4000, + 0x1312: 0x4000, 0x1313: 0x4000, 0x1314: 0x4000, 0x1315: 0x4000, 0x1316: 0x4000, 0x1317: 0x4000, + 0x1318: 0x4000, 0x1319: 0x4000, 0x131a: 0x4000, 0x131b: 0x4000, 0x131c: 0x4000, 0x131d: 0x4000, + 0x131e: 0x4000, 0x131f: 0x4000, 0x1320: 0x4000, 0x1321: 0x4000, 0x1322: 0x4000, 0x1323: 0x4000, + 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, 0x1328: 0x4000, 0x1329: 0x4000, + 0x132a: 0x4000, 0x132b: 0x4000, 0x132c: 0x4000, 0x132d: 0x4000, 0x132e: 0x4000, 0x132f: 0x4000, + 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, + 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, + // Block 0x4d, offset 0x1340 + 0x1344: 0x4000, + // Block 0x4e, offset 0x1380 + 0x138f: 0x4000, + // Block 0x4f, offset 0x13c0 + 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, + 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, + 0x13d0: 0x2000, 0x13d1: 0x2000, + 0x13d2: 0x2000, 0x13d3: 0x2000, 0x13d4: 0x2000, 0x13d5: 0x2000, 0x13d6: 0x2000, 0x13d7: 0x2000, + 0x13d8: 0x2000, 0x13d9: 0x2000, 0x13da: 0x2000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, + 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, + 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, + 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, 0x13ed: 0x2000, + 0x13f0: 0x2000, 0x13f1: 0x2000, 0x13f2: 0x2000, 0x13f3: 0x2000, 0x13f4: 0x2000, 0x13f5: 0x2000, + 0x13f6: 0x2000, 0x13f7: 0x2000, 0x13f8: 0x2000, 0x13f9: 0x2000, 0x13fa: 0x2000, 0x13fb: 0x2000, + 0x13fc: 0x2000, 0x13fd: 0x2000, 0x13fe: 0x2000, 0x13ff: 0x2000, + // Block 0x50, offset 0x1400 + 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, + 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, 0x140b: 0x2000, + 0x140c: 0x2000, 0x140d: 0x2000, 0x140e: 0x2000, 0x140f: 0x2000, 0x1410: 0x2000, 0x1411: 0x2000, + 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, + 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, + 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, + 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, + 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, + 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, + 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, + // Block 0x51, offset 0x1440 + 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, + 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, + 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x4000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x4000, + 0x1452: 0x4000, 0x1453: 0x4000, 0x1454: 0x4000, 0x1455: 0x4000, 0x1456: 0x4000, 0x1457: 0x4000, + 0x1458: 0x4000, 0x1459: 0x4000, 0x145a: 0x4000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, + 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, + 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, + 0x146a: 0x2000, 0x146b: 0x2000, 0x146c: 0x2000, + // Block 0x52, offset 0x1480 + 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, + 0x1490: 0x4000, 0x1491: 0x4000, + 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, + 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, + 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, 0x14a1: 0x4000, 0x14a2: 0x4000, 0x14a3: 0x4000, + 0x14a4: 0x4000, 0x14a5: 0x4000, 0x14a6: 0x4000, 0x14a7: 0x4000, 0x14a8: 0x4000, 0x14a9: 0x4000, + 0x14aa: 0x4000, 0x14ab: 0x4000, 0x14ac: 0x4000, 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, + 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, + 0x14b6: 0x4000, 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, + // Block 0x53, offset 0x14c0 + 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, + 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, + 0x14d0: 0x4000, 0x14d1: 0x4000, + 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, + 0x14e4: 0x4000, 0x14e5: 0x4000, + // Block 0x54, offset 0x1500 + 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, + 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, + 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, + 0x1512: 0x4000, 0x1513: 0x4000, 0x1514: 0x4000, 0x1515: 0x4000, 0x1516: 0x4000, 0x1517: 0x4000, + 0x1518: 0x4000, 0x1519: 0x4000, 0x151a: 0x4000, 0x151b: 0x4000, 0x151c: 0x4000, 0x151d: 0x4000, + 0x151e: 0x4000, 0x151f: 0x4000, 0x1520: 0x4000, + 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, + 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, + 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, + 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, + // Block 0x55, offset 0x1540 + 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, + 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, + 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, + 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, + 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, + 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, + 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, + 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, + 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, + 0x1576: 0x4000, 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, + 0x157c: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, + // Block 0x56, offset 0x1580 + 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, + 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, + 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, + 0x1592: 0x4000, 0x1593: 0x4000, + 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, + 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, + 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, + 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, + 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, + 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, + // Block 0x57, offset 0x15c0 + 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, + 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, + 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, + 0x15d2: 0x4000, 0x15d3: 0x4000, + 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, + 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, + 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, + 0x15f0: 0x4000, 0x15f4: 0x4000, + 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, + 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, + // Block 0x58, offset 0x1600 + 0x1600: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, + 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, + 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, + 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, + 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, + 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, + 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, + 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, + 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, + 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, + 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, 0x163f: 0x4000, + // Block 0x59, offset 0x1640 + 0x1640: 0x4000, 0x1641: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, + 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, + 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, + 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, + 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, + 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, + 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, + 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, + 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, + 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, + 0x167c: 0x4000, 0x167f: 0x4000, + // Block 0x5a, offset 0x1680 + 0x1680: 0x4000, 0x1681: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, + 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, + 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, + 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, + 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, + 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, + 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, + 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, + 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, + 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, + 0x16bc: 0x4000, 0x16bd: 0x4000, + // Block 0x5b, offset 0x16c0 + 0x16cb: 0x4000, + 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, + 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, + 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, + 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, + 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, + 0x16fa: 0x4000, + // Block 0x5c, offset 0x1700 + 0x1715: 0x4000, 0x1716: 0x4000, + 0x1724: 0x4000, + // Block 0x5d, offset 0x1740 + 0x177b: 0x4000, + 0x177c: 0x4000, 0x177d: 0x4000, 0x177e: 0x4000, 0x177f: 0x4000, + // Block 0x5e, offset 0x1780 + 0x1780: 0x4000, 0x1781: 0x4000, 0x1782: 0x4000, 0x1783: 0x4000, 0x1784: 0x4000, 0x1785: 0x4000, + 0x1786: 0x4000, 0x1787: 0x4000, 0x1788: 0x4000, 0x1789: 0x4000, 0x178a: 0x4000, 0x178b: 0x4000, + 0x178c: 0x4000, 0x178d: 0x4000, 0x178e: 0x4000, 0x178f: 0x4000, + // Block 0x5f, offset 0x17c0 + 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, + 0x17cc: 0x4000, 0x17d0: 0x4000, 0x17d1: 0x4000, + 0x17d2: 0x4000, + 0x17eb: 0x4000, 0x17ec: 0x4000, + 0x17f4: 0x4000, 0x17f5: 0x4000, + 0x17f6: 0x4000, 0x17f7: 0x4000, 0x17f8: 0x4000, + // Block 0x60, offset 0x1800 + 0x1810: 0x4000, 0x1811: 0x4000, + 0x1812: 0x4000, 0x1813: 0x4000, 0x1814: 0x4000, 0x1815: 0x4000, 0x1816: 0x4000, 0x1817: 0x4000, + 0x1818: 0x4000, 0x1819: 0x4000, 0x181a: 0x4000, 0x181b: 0x4000, 0x181c: 0x4000, 0x181d: 0x4000, + 0x181e: 0x4000, 0x181f: 0x4000, 0x1820: 0x4000, 0x1821: 0x4000, 0x1822: 0x4000, 0x1823: 0x4000, + 0x1824: 0x4000, 0x1825: 0x4000, 0x1826: 0x4000, 0x1827: 0x4000, 0x1828: 0x4000, 0x1829: 0x4000, + 0x182a: 0x4000, 0x182b: 0x4000, 0x182c: 0x4000, 0x182d: 0x4000, 0x182e: 0x4000, 0x182f: 0x4000, + 0x1830: 0x4000, 0x1831: 0x4000, 0x1832: 0x4000, 0x1833: 0x4000, 0x1834: 0x4000, 0x1835: 0x4000, + 0x1836: 0x4000, 0x1837: 0x4000, 0x1838: 0x4000, 0x1839: 0x4000, 0x183a: 0x4000, 0x183b: 0x4000, + 0x183c: 0x4000, 0x183d: 0x4000, 0x183e: 0x4000, + // Block 0x61, offset 0x1840 + 0x1840: 0x4000, 0x1841: 0x4000, 0x1842: 0x4000, 0x1843: 0x4000, 0x1844: 0x4000, 0x1845: 0x4000, + 0x1846: 0x4000, 0x1847: 0x4000, 0x1848: 0x4000, 0x1849: 0x4000, 0x184a: 0x4000, 0x184b: 0x4000, + 0x184c: 0x4000, 0x1850: 0x4000, 0x1851: 0x4000, + 0x1852: 0x4000, 0x1853: 0x4000, 0x1854: 0x4000, 0x1855: 0x4000, 0x1856: 0x4000, 0x1857: 0x4000, + 0x1858: 0x4000, 0x1859: 0x4000, 0x185a: 0x4000, 0x185b: 0x4000, 0x185c: 0x4000, 0x185d: 0x4000, + 0x185e: 0x4000, 0x185f: 0x4000, 0x1860: 0x4000, 0x1861: 0x4000, 0x1862: 0x4000, 0x1863: 0x4000, + 0x1864: 0x4000, 0x1865: 0x4000, 0x1866: 0x4000, 0x1867: 0x4000, 0x1868: 0x4000, 0x1869: 0x4000, + 0x186a: 0x4000, 0x186b: 0x4000, + // Block 0x62, offset 0x1880 + 0x1880: 0x4000, 0x1881: 0x4000, 0x1882: 0x4000, 0x1883: 0x4000, 0x1884: 0x4000, 0x1885: 0x4000, + 0x1886: 0x4000, 0x1887: 0x4000, 0x1888: 0x4000, 0x1889: 0x4000, 0x188a: 0x4000, 0x188b: 0x4000, + 0x188c: 0x4000, 0x188d: 0x4000, 0x188e: 0x4000, 0x188f: 0x4000, 0x1890: 0x4000, 0x1891: 0x4000, + 0x1892: 0x4000, 0x1893: 0x4000, 0x1894: 0x4000, 0x1895: 0x4000, 0x1896: 0x4000, 0x1897: 0x4000, + // Block 0x63, offset 0x18c0 + 0x18c0: 0x4000, + 0x18d0: 0x4000, 0x18d1: 0x4000, + 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, + 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, + 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, 0x18e3: 0x4000, + 0x18e4: 0x4000, 0x18e5: 0x4000, 0x18e6: 0x4000, + // Block 0x64, offset 0x1900 + 0x1900: 0x2000, 0x1901: 0x2000, 0x1902: 0x2000, 0x1903: 0x2000, 0x1904: 0x2000, 0x1905: 0x2000, + 0x1906: 0x2000, 0x1907: 0x2000, 0x1908: 0x2000, 0x1909: 0x2000, 0x190a: 0x2000, 0x190b: 0x2000, + 0x190c: 0x2000, 0x190d: 0x2000, 0x190e: 0x2000, 0x190f: 0x2000, 0x1910: 0x2000, 0x1911: 0x2000, + 0x1912: 0x2000, 0x1913: 0x2000, 0x1914: 0x2000, 0x1915: 0x2000, 0x1916: 0x2000, 0x1917: 0x2000, + 0x1918: 0x2000, 0x1919: 0x2000, 0x191a: 0x2000, 0x191b: 0x2000, 0x191c: 0x2000, 0x191d: 0x2000, + 0x191e: 0x2000, 0x191f: 0x2000, 0x1920: 0x2000, 0x1921: 0x2000, 0x1922: 0x2000, 0x1923: 0x2000, + 0x1924: 0x2000, 0x1925: 0x2000, 0x1926: 0x2000, 0x1927: 0x2000, 0x1928: 0x2000, 0x1929: 0x2000, + 0x192a: 0x2000, 0x192b: 0x2000, 0x192c: 0x2000, 0x192d: 0x2000, 0x192e: 0x2000, 0x192f: 0x2000, + 0x1930: 0x2000, 0x1931: 0x2000, 0x1932: 0x2000, 0x1933: 0x2000, 0x1934: 0x2000, 0x1935: 0x2000, + 0x1936: 0x2000, 0x1937: 0x2000, 0x1938: 0x2000, 0x1939: 0x2000, 0x193a: 0x2000, 0x193b: 0x2000, + 0x193c: 0x2000, 0x193d: 0x2000, +} + +// widthIndex: 22 blocks, 1408 entries, 1408 bytes +// Block 0 is the zero block. +var widthIndex = [1408]uint8{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, + 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, + 0xd0: 0x0c, 0xd1: 0x0d, + 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, + 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, + 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, + // Block 0x4, offset 0x100 + 0x104: 0x0e, 0x105: 0x0f, + // Block 0x5, offset 0x140 + 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, + 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, + 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, + 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, + 0x166: 0x2a, + 0x16c: 0x2b, 0x16d: 0x2c, + 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, + // Block 0x6, offset 0x180 + 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, + 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x3a, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, + 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, + 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, + 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, + 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, + 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, + 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, + 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, + 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, + 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, + 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, + 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, + 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, + 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, + // Block 0x8, offset 0x200 + 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, + 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, + 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, + 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, + 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, + 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, + 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, + 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, + // Block 0x9, offset 0x240 + 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, + 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, + 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3b, 0x253: 0x3c, + 0x265: 0x3d, + 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, + 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, + // Block 0xa, offset 0x280 + 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, + 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, + 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, + 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3e, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, + 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, + 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, + 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, + 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, + 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, + 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, + 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, + // Block 0xc, offset 0x300 + 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, + 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, + 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, + 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, + 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, + 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, + 0x338: 0x3f, 0x339: 0x40, 0x33c: 0x41, 0x33d: 0x42, 0x33e: 0x43, 0x33f: 0x44, + // Block 0xd, offset 0x340 + 0x37f: 0x45, + // Block 0xe, offset 0x380 + 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, + 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, + 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, + 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x46, + 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, + 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x47, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x0e, 0x3c1: 0x0e, 0x3c2: 0x0e, 0x3c3: 0x0e, 0x3c4: 0x48, 0x3c5: 0x49, 0x3c6: 0x0e, 0x3c7: 0x0e, + 0x3c8: 0x0e, 0x3c9: 0x0e, 0x3ca: 0x0e, 0x3cb: 0x4a, + // Block 0x10, offset 0x400 + 0x400: 0x4b, 0x403: 0x4c, 0x404: 0x4d, 0x405: 0x4e, 0x406: 0x4f, + 0x408: 0x50, 0x409: 0x51, 0x40c: 0x52, 0x40d: 0x53, 0x40e: 0x54, 0x40f: 0x55, + 0x410: 0x3a, 0x411: 0x56, 0x412: 0x0e, 0x413: 0x57, 0x414: 0x58, 0x415: 0x59, 0x416: 0x5a, 0x417: 0x5b, + 0x418: 0x0e, 0x419: 0x5c, 0x41a: 0x0e, 0x41b: 0x5d, + 0x424: 0x5e, 0x425: 0x5f, 0x426: 0x60, 0x427: 0x61, + // Block 0x11, offset 0x440 + 0x456: 0x0b, 0x457: 0x06, + 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, + 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, + 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, + 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, + 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, + // Block 0x12, offset 0x480 + 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, + // Block 0x13, offset 0x4c0 + 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, + 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, + 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, + 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, + 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, + 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, + 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, + 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x62, + // Block 0x14, offset 0x500 + 0x520: 0x10, + 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, + 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, + // Block 0x15, offset 0x540 + 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, + 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, +} + +// inverseData contains 4-byte entries of the following format: +// <0 padding> +// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the +// UTF-8 encoding of the original rune. Mappings often have the following +// pattern: +// A -> A (U+FF21 -> U+0041) +// B -> B (U+FF22 -> U+0042) +// ... +// By xor-ing the last byte the same entry can be shared by many mappings. This +// reduces the total number of distinct entries by about two thirds. +// The resulting entry for the aforementioned mappings is +// { 0x01, 0xE0, 0x00, 0x00 } +// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get +// E0 ^ A1 = 41. +// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get +// E0 ^ A2 = 42. +// Note that because of the xor-ing, the byte sequence stored in the entry is +// not valid UTF-8. +var inverseData = [150][4]byte{ + {0x00, 0x00, 0x00, 0x00}, + {0x03, 0xe3, 0x80, 0xa0}, + {0x03, 0xef, 0xbc, 0xa0}, + {0x03, 0xef, 0xbc, 0xe0}, + {0x03, 0xef, 0xbd, 0xe0}, + {0x03, 0xef, 0xbf, 0x02}, + {0x03, 0xef, 0xbf, 0x00}, + {0x03, 0xef, 0xbf, 0x0e}, + {0x03, 0xef, 0xbf, 0x0c}, + {0x03, 0xef, 0xbf, 0x0f}, + {0x03, 0xef, 0xbf, 0x39}, + {0x03, 0xef, 0xbf, 0x3b}, + {0x03, 0xef, 0xbf, 0x3f}, + {0x03, 0xef, 0xbf, 0x2a}, + {0x03, 0xef, 0xbf, 0x0d}, + {0x03, 0xef, 0xbf, 0x25}, + {0x03, 0xef, 0xbd, 0x1a}, + {0x03, 0xef, 0xbd, 0x26}, + {0x01, 0xa0, 0x00, 0x00}, + {0x03, 0xef, 0xbd, 0x25}, + {0x03, 0xef, 0xbd, 0x23}, + {0x03, 0xef, 0xbd, 0x2e}, + {0x03, 0xef, 0xbe, 0x07}, + {0x03, 0xef, 0xbe, 0x05}, + {0x03, 0xef, 0xbd, 0x06}, + {0x03, 0xef, 0xbd, 0x13}, + {0x03, 0xef, 0xbd, 0x0b}, + {0x03, 0xef, 0xbd, 0x16}, + {0x03, 0xef, 0xbd, 0x0c}, + {0x03, 0xef, 0xbd, 0x15}, + {0x03, 0xef, 0xbd, 0x0d}, + {0x03, 0xef, 0xbd, 0x1c}, + {0x03, 0xef, 0xbd, 0x02}, + {0x03, 0xef, 0xbd, 0x1f}, + {0x03, 0xef, 0xbd, 0x1d}, + {0x03, 0xef, 0xbd, 0x17}, + {0x03, 0xef, 0xbd, 0x08}, + {0x03, 0xef, 0xbd, 0x09}, + {0x03, 0xef, 0xbd, 0x0e}, + {0x03, 0xef, 0xbd, 0x04}, + {0x03, 0xef, 0xbd, 0x05}, + {0x03, 0xef, 0xbe, 0x3f}, + {0x03, 0xef, 0xbe, 0x00}, + {0x03, 0xef, 0xbd, 0x2c}, + {0x03, 0xef, 0xbe, 0x06}, + {0x03, 0xef, 0xbe, 0x0c}, + {0x03, 0xef, 0xbe, 0x0f}, + {0x03, 0xef, 0xbe, 0x0d}, + {0x03, 0xef, 0xbe, 0x0b}, + {0x03, 0xef, 0xbe, 0x19}, + {0x03, 0xef, 0xbe, 0x15}, + {0x03, 0xef, 0xbe, 0x11}, + {0x03, 0xef, 0xbe, 0x31}, + {0x03, 0xef, 0xbe, 0x33}, + {0x03, 0xef, 0xbd, 0x0f}, + {0x03, 0xef, 0xbe, 0x30}, + {0x03, 0xef, 0xbe, 0x3e}, + {0x03, 0xef, 0xbe, 0x32}, + {0x03, 0xef, 0xbe, 0x36}, + {0x03, 0xef, 0xbd, 0x14}, + {0x03, 0xef, 0xbe, 0x2e}, + {0x03, 0xef, 0xbd, 0x1e}, + {0x03, 0xef, 0xbe, 0x10}, + {0x03, 0xef, 0xbf, 0x13}, + {0x03, 0xef, 0xbf, 0x15}, + {0x03, 0xef, 0xbf, 0x17}, + {0x03, 0xef, 0xbf, 0x1f}, + {0x03, 0xef, 0xbf, 0x1d}, + {0x03, 0xef, 0xbf, 0x1b}, + {0x03, 0xef, 0xbf, 0x09}, + {0x03, 0xef, 0xbf, 0x0b}, + {0x03, 0xef, 0xbf, 0x37}, + {0x03, 0xef, 0xbe, 0x04}, + {0x01, 0xe0, 0x00, 0x00}, + {0x03, 0xe2, 0xa6, 0x1a}, + {0x03, 0xe2, 0xa6, 0x26}, + {0x03, 0xe3, 0x80, 0x23}, + {0x03, 0xe3, 0x80, 0x2e}, + {0x03, 0xe3, 0x80, 0x25}, + {0x03, 0xe3, 0x83, 0x1e}, + {0x03, 0xe3, 0x83, 0x14}, + {0x03, 0xe3, 0x82, 0x06}, + {0x03, 0xe3, 0x82, 0x0b}, + {0x03, 0xe3, 0x82, 0x0c}, + {0x03, 0xe3, 0x82, 0x0d}, + {0x03, 0xe3, 0x82, 0x02}, + {0x03, 0xe3, 0x83, 0x0f}, + {0x03, 0xe3, 0x83, 0x08}, + {0x03, 0xe3, 0x83, 0x09}, + {0x03, 0xe3, 0x83, 0x2c}, + {0x03, 0xe3, 0x83, 0x0c}, + {0x03, 0xe3, 0x82, 0x13}, + {0x03, 0xe3, 0x82, 0x16}, + {0x03, 0xe3, 0x82, 0x15}, + {0x03, 0xe3, 0x82, 0x1c}, + {0x03, 0xe3, 0x82, 0x1f}, + {0x03, 0xe3, 0x82, 0x1d}, + {0x03, 0xe3, 0x82, 0x1a}, + {0x03, 0xe3, 0x82, 0x17}, + {0x03, 0xe3, 0x82, 0x08}, + {0x03, 0xe3, 0x82, 0x09}, + {0x03, 0xe3, 0x82, 0x0e}, + {0x03, 0xe3, 0x82, 0x04}, + {0x03, 0xe3, 0x82, 0x05}, + {0x03, 0xe3, 0x82, 0x3f}, + {0x03, 0xe3, 0x83, 0x00}, + {0x03, 0xe3, 0x83, 0x06}, + {0x03, 0xe3, 0x83, 0x05}, + {0x03, 0xe3, 0x83, 0x0d}, + {0x03, 0xe3, 0x83, 0x0b}, + {0x03, 0xe3, 0x83, 0x07}, + {0x03, 0xe3, 0x83, 0x19}, + {0x03, 0xe3, 0x83, 0x15}, + {0x03, 0xe3, 0x83, 0x11}, + {0x03, 0xe3, 0x83, 0x31}, + {0x03, 0xe3, 0x83, 0x33}, + {0x03, 0xe3, 0x83, 0x30}, + {0x03, 0xe3, 0x83, 0x3e}, + {0x03, 0xe3, 0x83, 0x32}, + {0x03, 0xe3, 0x83, 0x36}, + {0x03, 0xe3, 0x83, 0x2e}, + {0x03, 0xe3, 0x82, 0x07}, + {0x03, 0xe3, 0x85, 0x04}, + {0x03, 0xe3, 0x84, 0x10}, + {0x03, 0xe3, 0x85, 0x30}, + {0x03, 0xe3, 0x85, 0x0d}, + {0x03, 0xe3, 0x85, 0x13}, + {0x03, 0xe3, 0x85, 0x15}, + {0x03, 0xe3, 0x85, 0x17}, + {0x03, 0xe3, 0x85, 0x1f}, + {0x03, 0xe3, 0x85, 0x1d}, + {0x03, 0xe3, 0x85, 0x1b}, + {0x03, 0xe3, 0x85, 0x09}, + {0x03, 0xe3, 0x85, 0x0f}, + {0x03, 0xe3, 0x85, 0x0b}, + {0x03, 0xe3, 0x85, 0x37}, + {0x03, 0xe3, 0x85, 0x3b}, + {0x03, 0xe3, 0x85, 0x39}, + {0x03, 0xe3, 0x85, 0x3f}, + {0x02, 0xc2, 0x02, 0x00}, + {0x02, 0xc2, 0x0e, 0x00}, + {0x02, 0xc2, 0x0c, 0x00}, + {0x02, 0xc2, 0x00, 0x00}, + {0x03, 0xe2, 0x82, 0x0f}, + {0x03, 0xe2, 0x94, 0x2a}, + {0x03, 0xe2, 0x86, 0x39}, + {0x03, 0xe2, 0x86, 0x3b}, + {0x03, 0xe2, 0x86, 0x3f}, + {0x03, 0xe2, 0x96, 0x0d}, + {0x03, 0xe2, 0x97, 0x25}, +} + +// Total table size 14936 bytes (14KiB) diff --git a/vendor/golang.org/x/text/width/tables11.0.0.go b/vendor/golang.org/x/text/width/tables11.0.0.go new file mode 100644 index 000000000..990f7622f --- /dev/null +++ b/vendor/golang.org/x/text/width/tables11.0.0.go @@ -0,0 +1,1331 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.13 && !go1.14 +// +build go1.13,!go1.14 + +package width + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "11.0.0" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// widthTrie. Total size: 14336 bytes (14.00 KiB). Checksum: c0f7712776e71cd4. +type widthTrie struct{} + +func newWidthTrie(i int) *widthTrie { + return &widthTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { + switch { + default: + return uint16(widthValues[n<<6+uint32(b)]) + } +} + +// widthValues: 101 blocks, 6464 entries, 12928 bytes +// The third block is the zero block. +var widthValues = [6464]uint16{ + // Block 0x0, offset 0x0 + 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, + 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, + 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, + 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, + 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, + 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, + // Block 0x1, offset 0x40 + 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, + 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, + 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, + 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, + 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, + 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, + 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, + 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, + 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, + 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, + 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, + 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, + 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, + 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, + 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, + 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, + // Block 0x4, offset 0x100 + 0x106: 0x2000, + 0x110: 0x2000, + 0x117: 0x2000, + 0x118: 0x2000, + 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, + 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, + 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, + 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, + 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, + 0x13c: 0x2000, 0x13e: 0x2000, + // Block 0x5, offset 0x140 + 0x141: 0x2000, + 0x151: 0x2000, + 0x153: 0x2000, + 0x15b: 0x2000, + 0x166: 0x2000, 0x167: 0x2000, + 0x16b: 0x2000, + 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, + 0x178: 0x2000, + 0x17f: 0x2000, + // Block 0x6, offset 0x180 + 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, + 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, + 0x18d: 0x2000, + 0x192: 0x2000, 0x193: 0x2000, + 0x1a6: 0x2000, 0x1a7: 0x2000, + 0x1ab: 0x2000, + // Block 0x7, offset 0x1c0 + 0x1ce: 0x2000, 0x1d0: 0x2000, + 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, + 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, + // Block 0x8, offset 0x200 + 0x211: 0x2000, + 0x221: 0x2000, + // Block 0x9, offset 0x240 + 0x244: 0x2000, + 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, + 0x24d: 0x2000, 0x250: 0x2000, + 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, + 0x25f: 0x2000, + // Block 0xa, offset 0x280 + 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, + 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, + 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, + 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, + 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, + 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, + 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, + 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, + 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, + 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, + 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, + 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, + 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, + 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, + 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, + 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, + 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, + 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, + // Block 0xc, offset 0x300 + 0x311: 0x2000, + 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, + 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, + 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, + 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, + 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, + 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, + 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, + // Block 0xd, offset 0x340 + 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, + 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, + // Block 0xe, offset 0x380 + 0x381: 0x2000, + 0x390: 0x2000, 0x391: 0x2000, + 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, + 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, + 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, + 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, + 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, + 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, + 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, + 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, + 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, + 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, + // Block 0x10, offset 0x400 + 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, + 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, + 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, + 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, + 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, + 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, + 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, + 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, + 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, + 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, + 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, + // Block 0x11, offset 0x440 + 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, + 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, + 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, + 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, + 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, + 0x45e: 0x4000, 0x45f: 0x4000, + // Block 0x12, offset 0x480 + 0x490: 0x2000, + 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, + 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, + 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, + 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, + 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, + 0x4bb: 0x2000, + 0x4be: 0x2000, + // Block 0x13, offset 0x4c0 + 0x4f4: 0x2000, + 0x4ff: 0x2000, + // Block 0x14, offset 0x500 + 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, + 0x529: 0xa009, + 0x52c: 0x2000, + // Block 0x15, offset 0x540 + 0x543: 0x2000, 0x545: 0x2000, + 0x549: 0x2000, + 0x553: 0x2000, 0x556: 0x2000, + 0x561: 0x2000, 0x562: 0x2000, + 0x566: 0x2000, + 0x56b: 0x2000, + // Block 0x16, offset 0x580 + 0x593: 0x2000, 0x594: 0x2000, + 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, + 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, + 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, + 0x5aa: 0x2000, 0x5ab: 0x2000, + 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, + 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, + // Block 0x17, offset 0x5c0 + 0x5c9: 0x2000, + 0x5d0: 0x200a, 0x5d1: 0x200b, + 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, + 0x5d8: 0x2000, 0x5d9: 0x2000, + 0x5f8: 0x2000, 0x5f9: 0x2000, + // Block 0x18, offset 0x600 + 0x612: 0x2000, 0x614: 0x2000, + 0x627: 0x2000, + // Block 0x19, offset 0x640 + 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, + 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, + 0x64f: 0x2000, 0x651: 0x2000, + 0x655: 0x2000, + 0x65a: 0x2000, 0x65d: 0x2000, + 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, + 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, + 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, + 0x674: 0x2000, 0x675: 0x2000, + 0x676: 0x2000, 0x677: 0x2000, + 0x67c: 0x2000, 0x67d: 0x2000, + // Block 0x1a, offset 0x680 + 0x688: 0x2000, + 0x68c: 0x2000, + 0x692: 0x2000, + 0x6a0: 0x2000, 0x6a1: 0x2000, + 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, + 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, + // Block 0x1b, offset 0x6c0 + 0x6c2: 0x2000, 0x6c3: 0x2000, + 0x6c6: 0x2000, 0x6c7: 0x2000, + 0x6d5: 0x2000, + 0x6d9: 0x2000, + 0x6e5: 0x2000, + 0x6ff: 0x2000, + // Block 0x1c, offset 0x700 + 0x712: 0x2000, + 0x71a: 0x4000, 0x71b: 0x4000, + 0x729: 0x4000, + 0x72a: 0x4000, + // Block 0x1d, offset 0x740 + 0x769: 0x4000, + 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, + 0x770: 0x4000, 0x773: 0x4000, + // Block 0x1e, offset 0x780 + 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, + 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, + 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, + 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, + 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, + 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, + // Block 0x1f, offset 0x7c0 + 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, + 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, + 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, + 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, + 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, + 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, + 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, + 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, + 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, + 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, + 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, + // Block 0x20, offset 0x800 + 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, + 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, + 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, + 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, + 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, + 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, + 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, + 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, + 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, + 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, + 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, + // Block 0x21, offset 0x840 + 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, + 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, + 0x850: 0x2000, 0x851: 0x2000, + 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, + 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, + 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, + 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, + 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, + 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, + // Block 0x22, offset 0x880 + 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, + 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, + 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, + 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, + 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, + 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, + 0x8b2: 0x2000, 0x8b3: 0x2000, + 0x8b6: 0x2000, 0x8b7: 0x2000, + 0x8bc: 0x2000, 0x8bd: 0x2000, + // Block 0x23, offset 0x8c0 + 0x8c0: 0x2000, 0x8c1: 0x2000, + 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, + 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, + 0x8e2: 0x2000, 0x8e3: 0x2000, + 0x8e4: 0x2000, 0x8e5: 0x2000, + 0x8ef: 0x2000, + 0x8fd: 0x4000, 0x8fe: 0x4000, + // Block 0x24, offset 0x900 + 0x905: 0x2000, + 0x906: 0x2000, 0x909: 0x2000, + 0x90e: 0x2000, 0x90f: 0x2000, + 0x914: 0x4000, 0x915: 0x4000, + 0x91c: 0x2000, + 0x91e: 0x2000, + // Block 0x25, offset 0x940 + 0x940: 0x2000, 0x942: 0x2000, + 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, + 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, + 0x952: 0x4000, 0x953: 0x4000, + 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, + 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, + 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, + 0x97f: 0x4000, + // Block 0x26, offset 0x980 + 0x993: 0x4000, + 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, + 0x9aa: 0x4000, 0x9ab: 0x4000, + 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, + // Block 0x27, offset 0x9c0 + 0x9c4: 0x4000, 0x9c5: 0x4000, + 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, + 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, + 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, + 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, + 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, + 0x9e8: 0x2000, 0x9e9: 0x2000, + 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, + 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, + 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, + 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, + // Block 0x28, offset 0xa00 + 0xa05: 0x4000, + 0xa0a: 0x4000, 0xa0b: 0x4000, + 0xa28: 0x4000, + 0xa3d: 0x2000, + // Block 0x29, offset 0xa40 + 0xa4c: 0x4000, 0xa4e: 0x4000, + 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, + 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, + 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, + // Block 0x2a, offset 0xa80 + 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, + 0xab0: 0x4000, + 0xabf: 0x4000, + // Block 0x2b, offset 0xac0 + 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, + 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, + // Block 0x2c, offset 0xb00 + 0xb05: 0x6010, + 0xb06: 0x6011, + // Block 0x2d, offset 0xb40 + 0xb5b: 0x4000, 0xb5c: 0x4000, + // Block 0x2e, offset 0xb80 + 0xb90: 0x4000, + 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, + 0xb98: 0x2000, 0xb99: 0x2000, + // Block 0x2f, offset 0xbc0 + 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, + 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, + 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, + 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, + 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, + 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, + 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, + 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, + 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, + 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, + 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, + // Block 0x30, offset 0xc00 + 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, + 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, + 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, + 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, + 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, + 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, + 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, + 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, + 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, + // Block 0x31, offset 0xc40 + 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, + 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, + 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, + 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, + 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, + 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, + // Block 0x32, offset 0xc80 + 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, + 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, + 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, + 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, + 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, + 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, + 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, + 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, + 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, + 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, + 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, + // Block 0x33, offset 0xcc0 + 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, + 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, + 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, + 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, + 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, + 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, + 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, + 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, + 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, + 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, + 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, + // Block 0x34, offset 0xd00 + 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, + 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, + 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, + 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, + 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, + 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, + 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, + 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, + 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, + 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, + 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, + // Block 0x35, offset 0xd40 + 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, + 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, + 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, + 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, + 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, + 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, + 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, + 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, + 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, + 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, + 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, + // Block 0x36, offset 0xd80 + 0xd85: 0x4000, + 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, + 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, + 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, + 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, + 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, + 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, + 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, 0xdaf: 0x4000, + 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, + 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, + 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, + // Block 0x37, offset 0xdc0 + 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, + 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, + 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, + 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, + 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, + 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, + 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, + 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, + 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, + 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, + 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, + // Block 0x38, offset 0xe00 + 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, + 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, + 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, + 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, + 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, + 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, + 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, + 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, + 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, + 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, + // Block 0x39, offset 0xe40 + 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, + 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, + 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, + 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, + 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, + 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, + 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, + 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, + 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, + // Block 0x3a, offset 0xe80 + 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, + 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, + 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, + 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, + 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, + 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, + 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, + 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, + 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, + 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, + 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, + // Block 0x3b, offset 0xec0 + 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, + 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, + 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, + 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, + 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, + 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, + 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, + 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, + 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, + 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, + 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, + // Block 0x3c, offset 0xf00 + 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, + 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, + 0xf0c: 0x4000, 0xf0d: 0x4000, 0xf0e: 0x4000, 0xf0f: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, + 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, + 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, + 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, + 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, + 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, + 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, + 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, + 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, + // Block 0x3d, offset 0xf40 + 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, + 0xf46: 0x4000, 0xf47: 0x4000, 0xf48: 0x4000, 0xf49: 0x4000, 0xf4a: 0x4000, 0xf4b: 0x4000, + 0xf4c: 0x4000, 0xf50: 0x4000, 0xf51: 0x4000, + 0xf52: 0x4000, 0xf53: 0x4000, 0xf54: 0x4000, 0xf55: 0x4000, 0xf56: 0x4000, 0xf57: 0x4000, + 0xf58: 0x4000, 0xf59: 0x4000, 0xf5a: 0x4000, 0xf5b: 0x4000, 0xf5c: 0x4000, 0xf5d: 0x4000, + 0xf5e: 0x4000, 0xf5f: 0x4000, 0xf60: 0x4000, 0xf61: 0x4000, 0xf62: 0x4000, 0xf63: 0x4000, + 0xf64: 0x4000, 0xf65: 0x4000, 0xf66: 0x4000, 0xf67: 0x4000, 0xf68: 0x4000, 0xf69: 0x4000, + 0xf6a: 0x4000, 0xf6b: 0x4000, 0xf6c: 0x4000, 0xf6d: 0x4000, 0xf6e: 0x4000, 0xf6f: 0x4000, + 0xf70: 0x4000, 0xf71: 0x4000, 0xf72: 0x4000, 0xf73: 0x4000, 0xf74: 0x4000, 0xf75: 0x4000, + 0xf76: 0x4000, 0xf77: 0x4000, 0xf78: 0x4000, 0xf79: 0x4000, 0xf7a: 0x4000, 0xf7b: 0x4000, + 0xf7c: 0x4000, 0xf7d: 0x4000, 0xf7e: 0x4000, 0xf7f: 0x4000, + // Block 0x3e, offset 0xf80 + 0xf80: 0x4000, 0xf81: 0x4000, 0xf82: 0x4000, 0xf83: 0x4000, 0xf84: 0x4000, 0xf85: 0x4000, + 0xf86: 0x4000, + // Block 0x3f, offset 0xfc0 + 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, + 0xfe4: 0x4000, 0xfe5: 0x4000, 0xfe6: 0x4000, 0xfe7: 0x4000, 0xfe8: 0x4000, 0xfe9: 0x4000, + 0xfea: 0x4000, 0xfeb: 0x4000, 0xfec: 0x4000, 0xfed: 0x4000, 0xfee: 0x4000, 0xfef: 0x4000, + 0xff0: 0x4000, 0xff1: 0x4000, 0xff2: 0x4000, 0xff3: 0x4000, 0xff4: 0x4000, 0xff5: 0x4000, + 0xff6: 0x4000, 0xff7: 0x4000, 0xff8: 0x4000, 0xff9: 0x4000, 0xffa: 0x4000, 0xffb: 0x4000, + 0xffc: 0x4000, + // Block 0x40, offset 0x1000 + 0x1000: 0x4000, 0x1001: 0x4000, 0x1002: 0x4000, 0x1003: 0x4000, 0x1004: 0x4000, 0x1005: 0x4000, + 0x1006: 0x4000, 0x1007: 0x4000, 0x1008: 0x4000, 0x1009: 0x4000, 0x100a: 0x4000, 0x100b: 0x4000, + 0x100c: 0x4000, 0x100d: 0x4000, 0x100e: 0x4000, 0x100f: 0x4000, 0x1010: 0x4000, 0x1011: 0x4000, + 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, + 0x1018: 0x4000, 0x1019: 0x4000, 0x101a: 0x4000, 0x101b: 0x4000, 0x101c: 0x4000, 0x101d: 0x4000, + 0x101e: 0x4000, 0x101f: 0x4000, 0x1020: 0x4000, 0x1021: 0x4000, 0x1022: 0x4000, 0x1023: 0x4000, + // Block 0x41, offset 0x1040 + 0x1040: 0x2000, 0x1041: 0x2000, 0x1042: 0x2000, 0x1043: 0x2000, 0x1044: 0x2000, 0x1045: 0x2000, + 0x1046: 0x2000, 0x1047: 0x2000, 0x1048: 0x2000, 0x1049: 0x2000, 0x104a: 0x2000, 0x104b: 0x2000, + 0x104c: 0x2000, 0x104d: 0x2000, 0x104e: 0x2000, 0x104f: 0x2000, 0x1050: 0x4000, 0x1051: 0x4000, + 0x1052: 0x4000, 0x1053: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, + 0x1058: 0x4000, 0x1059: 0x4000, + 0x1070: 0x4000, 0x1071: 0x4000, 0x1072: 0x4000, 0x1073: 0x4000, 0x1074: 0x4000, 0x1075: 0x4000, + 0x1076: 0x4000, 0x1077: 0x4000, 0x1078: 0x4000, 0x1079: 0x4000, 0x107a: 0x4000, 0x107b: 0x4000, + 0x107c: 0x4000, 0x107d: 0x4000, 0x107e: 0x4000, 0x107f: 0x4000, + // Block 0x42, offset 0x1080 + 0x1080: 0x4000, 0x1081: 0x4000, 0x1082: 0x4000, 0x1083: 0x4000, 0x1084: 0x4000, 0x1085: 0x4000, + 0x1086: 0x4000, 0x1087: 0x4000, 0x1088: 0x4000, 0x1089: 0x4000, 0x108a: 0x4000, 0x108b: 0x4000, + 0x108c: 0x4000, 0x108d: 0x4000, 0x108e: 0x4000, 0x108f: 0x4000, 0x1090: 0x4000, 0x1091: 0x4000, + 0x1092: 0x4000, 0x1094: 0x4000, 0x1095: 0x4000, 0x1096: 0x4000, 0x1097: 0x4000, + 0x1098: 0x4000, 0x1099: 0x4000, 0x109a: 0x4000, 0x109b: 0x4000, 0x109c: 0x4000, 0x109d: 0x4000, + 0x109e: 0x4000, 0x109f: 0x4000, 0x10a0: 0x4000, 0x10a1: 0x4000, 0x10a2: 0x4000, 0x10a3: 0x4000, + 0x10a4: 0x4000, 0x10a5: 0x4000, 0x10a6: 0x4000, 0x10a8: 0x4000, 0x10a9: 0x4000, + 0x10aa: 0x4000, 0x10ab: 0x4000, + // Block 0x43, offset 0x10c0 + 0x10c1: 0x9012, 0x10c2: 0x9012, 0x10c3: 0x9012, 0x10c4: 0x9012, 0x10c5: 0x9012, + 0x10c6: 0x9012, 0x10c7: 0x9012, 0x10c8: 0x9012, 0x10c9: 0x9012, 0x10ca: 0x9012, 0x10cb: 0x9012, + 0x10cc: 0x9012, 0x10cd: 0x9012, 0x10ce: 0x9012, 0x10cf: 0x9012, 0x10d0: 0x9012, 0x10d1: 0x9012, + 0x10d2: 0x9012, 0x10d3: 0x9012, 0x10d4: 0x9012, 0x10d5: 0x9012, 0x10d6: 0x9012, 0x10d7: 0x9012, + 0x10d8: 0x9012, 0x10d9: 0x9012, 0x10da: 0x9012, 0x10db: 0x9012, 0x10dc: 0x9012, 0x10dd: 0x9012, + 0x10de: 0x9012, 0x10df: 0x9012, 0x10e0: 0x9049, 0x10e1: 0x9049, 0x10e2: 0x9049, 0x10e3: 0x9049, + 0x10e4: 0x9049, 0x10e5: 0x9049, 0x10e6: 0x9049, 0x10e7: 0x9049, 0x10e8: 0x9049, 0x10e9: 0x9049, + 0x10ea: 0x9049, 0x10eb: 0x9049, 0x10ec: 0x9049, 0x10ed: 0x9049, 0x10ee: 0x9049, 0x10ef: 0x9049, + 0x10f0: 0x9049, 0x10f1: 0x9049, 0x10f2: 0x9049, 0x10f3: 0x9049, 0x10f4: 0x9049, 0x10f5: 0x9049, + 0x10f6: 0x9049, 0x10f7: 0x9049, 0x10f8: 0x9049, 0x10f9: 0x9049, 0x10fa: 0x9049, 0x10fb: 0x9049, + 0x10fc: 0x9049, 0x10fd: 0x9049, 0x10fe: 0x9049, 0x10ff: 0x9049, + // Block 0x44, offset 0x1100 + 0x1100: 0x9049, 0x1101: 0x9049, 0x1102: 0x9049, 0x1103: 0x9049, 0x1104: 0x9049, 0x1105: 0x9049, + 0x1106: 0x9049, 0x1107: 0x9049, 0x1108: 0x9049, 0x1109: 0x9049, 0x110a: 0x9049, 0x110b: 0x9049, + 0x110c: 0x9049, 0x110d: 0x9049, 0x110e: 0x9049, 0x110f: 0x9049, 0x1110: 0x9049, 0x1111: 0x9049, + 0x1112: 0x9049, 0x1113: 0x9049, 0x1114: 0x9049, 0x1115: 0x9049, 0x1116: 0x9049, 0x1117: 0x9049, + 0x1118: 0x9049, 0x1119: 0x9049, 0x111a: 0x9049, 0x111b: 0x9049, 0x111c: 0x9049, 0x111d: 0x9049, + 0x111e: 0x9049, 0x111f: 0x904a, 0x1120: 0x904b, 0x1121: 0xb04c, 0x1122: 0xb04d, 0x1123: 0xb04d, + 0x1124: 0xb04e, 0x1125: 0xb04f, 0x1126: 0xb050, 0x1127: 0xb051, 0x1128: 0xb052, 0x1129: 0xb053, + 0x112a: 0xb054, 0x112b: 0xb055, 0x112c: 0xb056, 0x112d: 0xb057, 0x112e: 0xb058, 0x112f: 0xb059, + 0x1130: 0xb05a, 0x1131: 0xb05b, 0x1132: 0xb05c, 0x1133: 0xb05d, 0x1134: 0xb05e, 0x1135: 0xb05f, + 0x1136: 0xb060, 0x1137: 0xb061, 0x1138: 0xb062, 0x1139: 0xb063, 0x113a: 0xb064, 0x113b: 0xb065, + 0x113c: 0xb052, 0x113d: 0xb066, 0x113e: 0xb067, 0x113f: 0xb055, + // Block 0x45, offset 0x1140 + 0x1140: 0xb068, 0x1141: 0xb069, 0x1142: 0xb06a, 0x1143: 0xb06b, 0x1144: 0xb05a, 0x1145: 0xb056, + 0x1146: 0xb06c, 0x1147: 0xb06d, 0x1148: 0xb06b, 0x1149: 0xb06e, 0x114a: 0xb06b, 0x114b: 0xb06f, + 0x114c: 0xb06f, 0x114d: 0xb070, 0x114e: 0xb070, 0x114f: 0xb071, 0x1150: 0xb056, 0x1151: 0xb072, + 0x1152: 0xb073, 0x1153: 0xb072, 0x1154: 0xb074, 0x1155: 0xb073, 0x1156: 0xb075, 0x1157: 0xb075, + 0x1158: 0xb076, 0x1159: 0xb076, 0x115a: 0xb077, 0x115b: 0xb077, 0x115c: 0xb073, 0x115d: 0xb078, + 0x115e: 0xb079, 0x115f: 0xb067, 0x1160: 0xb07a, 0x1161: 0xb07b, 0x1162: 0xb07b, 0x1163: 0xb07b, + 0x1164: 0xb07b, 0x1165: 0xb07b, 0x1166: 0xb07b, 0x1167: 0xb07b, 0x1168: 0xb07b, 0x1169: 0xb07b, + 0x116a: 0xb07b, 0x116b: 0xb07b, 0x116c: 0xb07b, 0x116d: 0xb07b, 0x116e: 0xb07b, 0x116f: 0xb07b, + 0x1170: 0xb07c, 0x1171: 0xb07c, 0x1172: 0xb07c, 0x1173: 0xb07c, 0x1174: 0xb07c, 0x1175: 0xb07c, + 0x1176: 0xb07c, 0x1177: 0xb07c, 0x1178: 0xb07c, 0x1179: 0xb07c, 0x117a: 0xb07c, 0x117b: 0xb07c, + 0x117c: 0xb07c, 0x117d: 0xb07c, 0x117e: 0xb07c, + // Block 0x46, offset 0x1180 + 0x1182: 0xb07d, 0x1183: 0xb07e, 0x1184: 0xb07f, 0x1185: 0xb080, + 0x1186: 0xb07f, 0x1187: 0xb07e, 0x118a: 0xb081, 0x118b: 0xb082, + 0x118c: 0xb083, 0x118d: 0xb07f, 0x118e: 0xb080, 0x118f: 0xb07f, + 0x1192: 0xb084, 0x1193: 0xb085, 0x1194: 0xb084, 0x1195: 0xb086, 0x1196: 0xb084, 0x1197: 0xb087, + 0x119a: 0xb088, 0x119b: 0xb089, 0x119c: 0xb08a, + 0x11a0: 0x908b, 0x11a1: 0x908b, 0x11a2: 0x908c, 0x11a3: 0x908d, + 0x11a4: 0x908b, 0x11a5: 0x908e, 0x11a6: 0x908f, 0x11a8: 0xb090, 0x11a9: 0xb091, + 0x11aa: 0xb092, 0x11ab: 0xb091, 0x11ac: 0xb093, 0x11ad: 0xb094, 0x11ae: 0xb095, + 0x11bd: 0x2000, + // Block 0x47, offset 0x11c0 + 0x11e0: 0x4000, 0x11e1: 0x4000, + // Block 0x48, offset 0x1200 + 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, + 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, + 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, + 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, 0x1216: 0x4000, 0x1217: 0x4000, + 0x1218: 0x4000, 0x1219: 0x4000, 0x121a: 0x4000, 0x121b: 0x4000, 0x121c: 0x4000, 0x121d: 0x4000, + 0x121e: 0x4000, 0x121f: 0x4000, 0x1220: 0x4000, 0x1221: 0x4000, 0x1222: 0x4000, 0x1223: 0x4000, + 0x1224: 0x4000, 0x1225: 0x4000, 0x1226: 0x4000, 0x1227: 0x4000, 0x1228: 0x4000, 0x1229: 0x4000, + 0x122a: 0x4000, 0x122b: 0x4000, 0x122c: 0x4000, 0x122d: 0x4000, 0x122e: 0x4000, 0x122f: 0x4000, + 0x1230: 0x4000, 0x1231: 0x4000, + // Block 0x49, offset 0x1240 + 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, + 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, 0x1249: 0x4000, 0x124a: 0x4000, 0x124b: 0x4000, + 0x124c: 0x4000, 0x124d: 0x4000, 0x124e: 0x4000, 0x124f: 0x4000, 0x1250: 0x4000, 0x1251: 0x4000, + 0x1252: 0x4000, 0x1253: 0x4000, 0x1254: 0x4000, 0x1255: 0x4000, 0x1256: 0x4000, 0x1257: 0x4000, + 0x1258: 0x4000, 0x1259: 0x4000, 0x125a: 0x4000, 0x125b: 0x4000, 0x125c: 0x4000, 0x125d: 0x4000, + 0x125e: 0x4000, 0x125f: 0x4000, 0x1260: 0x4000, 0x1261: 0x4000, 0x1262: 0x4000, 0x1263: 0x4000, + 0x1264: 0x4000, 0x1265: 0x4000, 0x1266: 0x4000, 0x1267: 0x4000, 0x1268: 0x4000, 0x1269: 0x4000, + 0x126a: 0x4000, 0x126b: 0x4000, 0x126c: 0x4000, 0x126d: 0x4000, 0x126e: 0x4000, 0x126f: 0x4000, + 0x1270: 0x4000, 0x1271: 0x4000, 0x1272: 0x4000, + // Block 0x4a, offset 0x1280 + 0x1280: 0x4000, 0x1281: 0x4000, 0x1282: 0x4000, 0x1283: 0x4000, 0x1284: 0x4000, 0x1285: 0x4000, + 0x1286: 0x4000, 0x1287: 0x4000, 0x1288: 0x4000, 0x1289: 0x4000, 0x128a: 0x4000, 0x128b: 0x4000, + 0x128c: 0x4000, 0x128d: 0x4000, 0x128e: 0x4000, 0x128f: 0x4000, 0x1290: 0x4000, 0x1291: 0x4000, + 0x1292: 0x4000, 0x1293: 0x4000, 0x1294: 0x4000, 0x1295: 0x4000, 0x1296: 0x4000, 0x1297: 0x4000, + 0x1298: 0x4000, 0x1299: 0x4000, 0x129a: 0x4000, 0x129b: 0x4000, 0x129c: 0x4000, 0x129d: 0x4000, + 0x129e: 0x4000, + // Block 0x4b, offset 0x12c0 + 0x12f0: 0x4000, 0x12f1: 0x4000, 0x12f2: 0x4000, 0x12f3: 0x4000, 0x12f4: 0x4000, 0x12f5: 0x4000, + 0x12f6: 0x4000, 0x12f7: 0x4000, 0x12f8: 0x4000, 0x12f9: 0x4000, 0x12fa: 0x4000, 0x12fb: 0x4000, + 0x12fc: 0x4000, 0x12fd: 0x4000, 0x12fe: 0x4000, 0x12ff: 0x4000, + // Block 0x4c, offset 0x1300 + 0x1300: 0x4000, 0x1301: 0x4000, 0x1302: 0x4000, 0x1303: 0x4000, 0x1304: 0x4000, 0x1305: 0x4000, + 0x1306: 0x4000, 0x1307: 0x4000, 0x1308: 0x4000, 0x1309: 0x4000, 0x130a: 0x4000, 0x130b: 0x4000, + 0x130c: 0x4000, 0x130d: 0x4000, 0x130e: 0x4000, 0x130f: 0x4000, 0x1310: 0x4000, 0x1311: 0x4000, + 0x1312: 0x4000, 0x1313: 0x4000, 0x1314: 0x4000, 0x1315: 0x4000, 0x1316: 0x4000, 0x1317: 0x4000, + 0x1318: 0x4000, 0x1319: 0x4000, 0x131a: 0x4000, 0x131b: 0x4000, 0x131c: 0x4000, 0x131d: 0x4000, + 0x131e: 0x4000, 0x131f: 0x4000, 0x1320: 0x4000, 0x1321: 0x4000, 0x1322: 0x4000, 0x1323: 0x4000, + 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, 0x1328: 0x4000, 0x1329: 0x4000, + 0x132a: 0x4000, 0x132b: 0x4000, 0x132c: 0x4000, 0x132d: 0x4000, 0x132e: 0x4000, 0x132f: 0x4000, + 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, + 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, + // Block 0x4d, offset 0x1340 + 0x1344: 0x4000, + // Block 0x4e, offset 0x1380 + 0x138f: 0x4000, + // Block 0x4f, offset 0x13c0 + 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, + 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, + 0x13d0: 0x2000, 0x13d1: 0x2000, + 0x13d2: 0x2000, 0x13d3: 0x2000, 0x13d4: 0x2000, 0x13d5: 0x2000, 0x13d6: 0x2000, 0x13d7: 0x2000, + 0x13d8: 0x2000, 0x13d9: 0x2000, 0x13da: 0x2000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, + 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, + 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, + 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, 0x13ed: 0x2000, + 0x13f0: 0x2000, 0x13f1: 0x2000, 0x13f2: 0x2000, 0x13f3: 0x2000, 0x13f4: 0x2000, 0x13f5: 0x2000, + 0x13f6: 0x2000, 0x13f7: 0x2000, 0x13f8: 0x2000, 0x13f9: 0x2000, 0x13fa: 0x2000, 0x13fb: 0x2000, + 0x13fc: 0x2000, 0x13fd: 0x2000, 0x13fe: 0x2000, 0x13ff: 0x2000, + // Block 0x50, offset 0x1400 + 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, + 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, 0x140b: 0x2000, + 0x140c: 0x2000, 0x140d: 0x2000, 0x140e: 0x2000, 0x140f: 0x2000, 0x1410: 0x2000, 0x1411: 0x2000, + 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, + 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, + 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, + 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, + 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, + 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, + 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, + // Block 0x51, offset 0x1440 + 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, + 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, + 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x4000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x4000, + 0x1452: 0x4000, 0x1453: 0x4000, 0x1454: 0x4000, 0x1455: 0x4000, 0x1456: 0x4000, 0x1457: 0x4000, + 0x1458: 0x4000, 0x1459: 0x4000, 0x145a: 0x4000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, + 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, + 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, + 0x146a: 0x2000, 0x146b: 0x2000, 0x146c: 0x2000, + // Block 0x52, offset 0x1480 + 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, + 0x1490: 0x4000, 0x1491: 0x4000, + 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, + 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, + 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, 0x14a1: 0x4000, 0x14a2: 0x4000, 0x14a3: 0x4000, + 0x14a4: 0x4000, 0x14a5: 0x4000, 0x14a6: 0x4000, 0x14a7: 0x4000, 0x14a8: 0x4000, 0x14a9: 0x4000, + 0x14aa: 0x4000, 0x14ab: 0x4000, 0x14ac: 0x4000, 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, + 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, + 0x14b6: 0x4000, 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, + // Block 0x53, offset 0x14c0 + 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, + 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, + 0x14d0: 0x4000, 0x14d1: 0x4000, + 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, + 0x14e4: 0x4000, 0x14e5: 0x4000, + // Block 0x54, offset 0x1500 + 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, + 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, + 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, + 0x1512: 0x4000, 0x1513: 0x4000, 0x1514: 0x4000, 0x1515: 0x4000, 0x1516: 0x4000, 0x1517: 0x4000, + 0x1518: 0x4000, 0x1519: 0x4000, 0x151a: 0x4000, 0x151b: 0x4000, 0x151c: 0x4000, 0x151d: 0x4000, + 0x151e: 0x4000, 0x151f: 0x4000, 0x1520: 0x4000, + 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, + 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, + 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, + 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, + // Block 0x55, offset 0x1540 + 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, + 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, + 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, + 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, + 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, + 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, + 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, + 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, + 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, + 0x1576: 0x4000, 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, + 0x157c: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, + // Block 0x56, offset 0x1580 + 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, + 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, + 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, + 0x1592: 0x4000, 0x1593: 0x4000, + 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, + 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, + 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, + 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, + 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, + 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, + // Block 0x57, offset 0x15c0 + 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, + 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, + 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, + 0x15d2: 0x4000, 0x15d3: 0x4000, + 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, + 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, + 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, + 0x15f0: 0x4000, 0x15f4: 0x4000, + 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, + 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, + // Block 0x58, offset 0x1600 + 0x1600: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, + 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, + 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, + 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, + 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, + 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, + 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, + 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, + 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, + 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, + 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, 0x163f: 0x4000, + // Block 0x59, offset 0x1640 + 0x1640: 0x4000, 0x1641: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, + 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, + 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, + 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, + 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, + 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, + 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, + 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, + 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, + 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, + 0x167c: 0x4000, 0x167f: 0x4000, + // Block 0x5a, offset 0x1680 + 0x1680: 0x4000, 0x1681: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, + 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, + 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, + 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, + 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, + 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, + 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, + 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, + 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, + 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, + 0x16bc: 0x4000, 0x16bd: 0x4000, + // Block 0x5b, offset 0x16c0 + 0x16cb: 0x4000, + 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, + 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, + 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, + 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, + 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, + 0x16fa: 0x4000, + // Block 0x5c, offset 0x1700 + 0x1715: 0x4000, 0x1716: 0x4000, + 0x1724: 0x4000, + // Block 0x5d, offset 0x1740 + 0x177b: 0x4000, + 0x177c: 0x4000, 0x177d: 0x4000, 0x177e: 0x4000, 0x177f: 0x4000, + // Block 0x5e, offset 0x1780 + 0x1780: 0x4000, 0x1781: 0x4000, 0x1782: 0x4000, 0x1783: 0x4000, 0x1784: 0x4000, 0x1785: 0x4000, + 0x1786: 0x4000, 0x1787: 0x4000, 0x1788: 0x4000, 0x1789: 0x4000, 0x178a: 0x4000, 0x178b: 0x4000, + 0x178c: 0x4000, 0x178d: 0x4000, 0x178e: 0x4000, 0x178f: 0x4000, + // Block 0x5f, offset 0x17c0 + 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, + 0x17cc: 0x4000, 0x17d0: 0x4000, 0x17d1: 0x4000, + 0x17d2: 0x4000, + 0x17eb: 0x4000, 0x17ec: 0x4000, + 0x17f4: 0x4000, 0x17f5: 0x4000, + 0x17f6: 0x4000, 0x17f7: 0x4000, 0x17f8: 0x4000, 0x17f9: 0x4000, + // Block 0x60, offset 0x1800 + 0x1810: 0x4000, 0x1811: 0x4000, + 0x1812: 0x4000, 0x1813: 0x4000, 0x1814: 0x4000, 0x1815: 0x4000, 0x1816: 0x4000, 0x1817: 0x4000, + 0x1818: 0x4000, 0x1819: 0x4000, 0x181a: 0x4000, 0x181b: 0x4000, 0x181c: 0x4000, 0x181d: 0x4000, + 0x181e: 0x4000, 0x181f: 0x4000, 0x1820: 0x4000, 0x1821: 0x4000, 0x1822: 0x4000, 0x1823: 0x4000, + 0x1824: 0x4000, 0x1825: 0x4000, 0x1826: 0x4000, 0x1827: 0x4000, 0x1828: 0x4000, 0x1829: 0x4000, + 0x182a: 0x4000, 0x182b: 0x4000, 0x182c: 0x4000, 0x182d: 0x4000, 0x182e: 0x4000, 0x182f: 0x4000, + 0x1830: 0x4000, 0x1831: 0x4000, 0x1832: 0x4000, 0x1833: 0x4000, 0x1834: 0x4000, 0x1835: 0x4000, + 0x1836: 0x4000, 0x1837: 0x4000, 0x1838: 0x4000, 0x1839: 0x4000, 0x183a: 0x4000, 0x183b: 0x4000, + 0x183c: 0x4000, 0x183d: 0x4000, 0x183e: 0x4000, + // Block 0x61, offset 0x1840 + 0x1840: 0x4000, 0x1841: 0x4000, 0x1842: 0x4000, 0x1843: 0x4000, 0x1844: 0x4000, 0x1845: 0x4000, + 0x1846: 0x4000, 0x1847: 0x4000, 0x1848: 0x4000, 0x1849: 0x4000, 0x184a: 0x4000, 0x184b: 0x4000, + 0x184c: 0x4000, 0x184d: 0x4000, 0x184e: 0x4000, 0x184f: 0x4000, 0x1850: 0x4000, 0x1851: 0x4000, + 0x1852: 0x4000, 0x1853: 0x4000, 0x1854: 0x4000, 0x1855: 0x4000, 0x1856: 0x4000, 0x1857: 0x4000, + 0x1858: 0x4000, 0x1859: 0x4000, 0x185a: 0x4000, 0x185b: 0x4000, 0x185c: 0x4000, 0x185d: 0x4000, + 0x185e: 0x4000, 0x185f: 0x4000, 0x1860: 0x4000, 0x1861: 0x4000, 0x1862: 0x4000, 0x1863: 0x4000, + 0x1864: 0x4000, 0x1865: 0x4000, 0x1866: 0x4000, 0x1867: 0x4000, 0x1868: 0x4000, 0x1869: 0x4000, + 0x186a: 0x4000, 0x186b: 0x4000, 0x186c: 0x4000, 0x186d: 0x4000, 0x186e: 0x4000, 0x186f: 0x4000, + 0x1870: 0x4000, 0x1873: 0x4000, 0x1874: 0x4000, 0x1875: 0x4000, + 0x1876: 0x4000, 0x187a: 0x4000, + 0x187c: 0x4000, 0x187d: 0x4000, 0x187e: 0x4000, 0x187f: 0x4000, + // Block 0x62, offset 0x1880 + 0x1880: 0x4000, 0x1881: 0x4000, 0x1882: 0x4000, 0x1883: 0x4000, 0x1884: 0x4000, 0x1885: 0x4000, + 0x1886: 0x4000, 0x1887: 0x4000, 0x1888: 0x4000, 0x1889: 0x4000, 0x188a: 0x4000, 0x188b: 0x4000, + 0x188c: 0x4000, 0x188d: 0x4000, 0x188e: 0x4000, 0x188f: 0x4000, 0x1890: 0x4000, 0x1891: 0x4000, + 0x1892: 0x4000, 0x1893: 0x4000, 0x1894: 0x4000, 0x1895: 0x4000, 0x1896: 0x4000, 0x1897: 0x4000, + 0x1898: 0x4000, 0x1899: 0x4000, 0x189a: 0x4000, 0x189b: 0x4000, 0x189c: 0x4000, 0x189d: 0x4000, + 0x189e: 0x4000, 0x189f: 0x4000, 0x18a0: 0x4000, 0x18a1: 0x4000, 0x18a2: 0x4000, + 0x18b0: 0x4000, 0x18b1: 0x4000, 0x18b2: 0x4000, 0x18b3: 0x4000, 0x18b4: 0x4000, 0x18b5: 0x4000, + 0x18b6: 0x4000, 0x18b7: 0x4000, 0x18b8: 0x4000, 0x18b9: 0x4000, + // Block 0x63, offset 0x18c0 + 0x18c0: 0x4000, 0x18c1: 0x4000, 0x18c2: 0x4000, + 0x18d0: 0x4000, 0x18d1: 0x4000, + 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, + 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, + 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, 0x18e3: 0x4000, + 0x18e4: 0x4000, 0x18e5: 0x4000, 0x18e6: 0x4000, 0x18e7: 0x4000, 0x18e8: 0x4000, 0x18e9: 0x4000, + 0x18ea: 0x4000, 0x18eb: 0x4000, 0x18ec: 0x4000, 0x18ed: 0x4000, 0x18ee: 0x4000, 0x18ef: 0x4000, + 0x18f0: 0x4000, 0x18f1: 0x4000, 0x18f2: 0x4000, 0x18f3: 0x4000, 0x18f4: 0x4000, 0x18f5: 0x4000, + 0x18f6: 0x4000, 0x18f7: 0x4000, 0x18f8: 0x4000, 0x18f9: 0x4000, 0x18fa: 0x4000, 0x18fb: 0x4000, + 0x18fc: 0x4000, 0x18fd: 0x4000, 0x18fe: 0x4000, 0x18ff: 0x4000, + // Block 0x64, offset 0x1900 + 0x1900: 0x2000, 0x1901: 0x2000, 0x1902: 0x2000, 0x1903: 0x2000, 0x1904: 0x2000, 0x1905: 0x2000, + 0x1906: 0x2000, 0x1907: 0x2000, 0x1908: 0x2000, 0x1909: 0x2000, 0x190a: 0x2000, 0x190b: 0x2000, + 0x190c: 0x2000, 0x190d: 0x2000, 0x190e: 0x2000, 0x190f: 0x2000, 0x1910: 0x2000, 0x1911: 0x2000, + 0x1912: 0x2000, 0x1913: 0x2000, 0x1914: 0x2000, 0x1915: 0x2000, 0x1916: 0x2000, 0x1917: 0x2000, + 0x1918: 0x2000, 0x1919: 0x2000, 0x191a: 0x2000, 0x191b: 0x2000, 0x191c: 0x2000, 0x191d: 0x2000, + 0x191e: 0x2000, 0x191f: 0x2000, 0x1920: 0x2000, 0x1921: 0x2000, 0x1922: 0x2000, 0x1923: 0x2000, + 0x1924: 0x2000, 0x1925: 0x2000, 0x1926: 0x2000, 0x1927: 0x2000, 0x1928: 0x2000, 0x1929: 0x2000, + 0x192a: 0x2000, 0x192b: 0x2000, 0x192c: 0x2000, 0x192d: 0x2000, 0x192e: 0x2000, 0x192f: 0x2000, + 0x1930: 0x2000, 0x1931: 0x2000, 0x1932: 0x2000, 0x1933: 0x2000, 0x1934: 0x2000, 0x1935: 0x2000, + 0x1936: 0x2000, 0x1937: 0x2000, 0x1938: 0x2000, 0x1939: 0x2000, 0x193a: 0x2000, 0x193b: 0x2000, + 0x193c: 0x2000, 0x193d: 0x2000, +} + +// widthIndex: 22 blocks, 1408 entries, 1408 bytes +// Block 0 is the zero block. +var widthIndex = [1408]uint8{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, + 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, + 0xd0: 0x0c, 0xd1: 0x0d, + 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, + 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, + 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, + // Block 0x4, offset 0x100 + 0x104: 0x0e, 0x105: 0x0f, + // Block 0x5, offset 0x140 + 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, + 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, + 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, + 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, + 0x166: 0x2a, + 0x16c: 0x2b, 0x16d: 0x2c, + 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, + // Block 0x6, offset 0x180 + 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, + 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x3a, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, + 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, + 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, + 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, + 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, + 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, + 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, + 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, + 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, + 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, + 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, + 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, + 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, + 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, + // Block 0x8, offset 0x200 + 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, + 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, + 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, + 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, + 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, + 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, + 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, + 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, + // Block 0x9, offset 0x240 + 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, + 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, + 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3b, 0x253: 0x3c, + 0x265: 0x3d, + 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, + 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, + // Block 0xa, offset 0x280 + 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, + 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, + 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, + 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3e, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, + 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, + 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, + 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, + 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, + 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, + 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, + 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, + // Block 0xc, offset 0x300 + 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, + 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, + 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, + 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, + 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, + 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, + 0x338: 0x3f, 0x339: 0x40, 0x33c: 0x41, 0x33d: 0x42, 0x33e: 0x43, 0x33f: 0x44, + // Block 0xd, offset 0x340 + 0x37f: 0x45, + // Block 0xe, offset 0x380 + 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, + 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, + 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, + 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x46, + 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, + 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x47, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x0e, 0x3c1: 0x0e, 0x3c2: 0x0e, 0x3c3: 0x0e, 0x3c4: 0x48, 0x3c5: 0x49, 0x3c6: 0x0e, 0x3c7: 0x0e, + 0x3c8: 0x0e, 0x3c9: 0x0e, 0x3ca: 0x0e, 0x3cb: 0x4a, + // Block 0x10, offset 0x400 + 0x400: 0x4b, 0x403: 0x4c, 0x404: 0x4d, 0x405: 0x4e, 0x406: 0x4f, + 0x408: 0x50, 0x409: 0x51, 0x40c: 0x52, 0x40d: 0x53, 0x40e: 0x54, 0x40f: 0x55, + 0x410: 0x3a, 0x411: 0x56, 0x412: 0x0e, 0x413: 0x57, 0x414: 0x58, 0x415: 0x59, 0x416: 0x5a, 0x417: 0x5b, + 0x418: 0x0e, 0x419: 0x5c, 0x41a: 0x0e, 0x41b: 0x5d, + 0x424: 0x5e, 0x425: 0x5f, 0x426: 0x60, 0x427: 0x61, + // Block 0x11, offset 0x440 + 0x456: 0x0b, 0x457: 0x06, + 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, + 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, + 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, + 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, + 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, + // Block 0x12, offset 0x480 + 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, + // Block 0x13, offset 0x4c0 + 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, + 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, + 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, + 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, + 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, + 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, + 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, + 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x62, + // Block 0x14, offset 0x500 + 0x520: 0x10, + 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, + 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, + // Block 0x15, offset 0x540 + 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, + 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, +} + +// inverseData contains 4-byte entries of the following format: +// <0 padding> +// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the +// UTF-8 encoding of the original rune. Mappings often have the following +// pattern: +// A -> A (U+FF21 -> U+0041) +// B -> B (U+FF22 -> U+0042) +// ... +// By xor-ing the last byte the same entry can be shared by many mappings. This +// reduces the total number of distinct entries by about two thirds. +// The resulting entry for the aforementioned mappings is +// { 0x01, 0xE0, 0x00, 0x00 } +// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get +// E0 ^ A1 = 41. +// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get +// E0 ^ A2 = 42. +// Note that because of the xor-ing, the byte sequence stored in the entry is +// not valid UTF-8. +var inverseData = [150][4]byte{ + {0x00, 0x00, 0x00, 0x00}, + {0x03, 0xe3, 0x80, 0xa0}, + {0x03, 0xef, 0xbc, 0xa0}, + {0x03, 0xef, 0xbc, 0xe0}, + {0x03, 0xef, 0xbd, 0xe0}, + {0x03, 0xef, 0xbf, 0x02}, + {0x03, 0xef, 0xbf, 0x00}, + {0x03, 0xef, 0xbf, 0x0e}, + {0x03, 0xef, 0xbf, 0x0c}, + {0x03, 0xef, 0xbf, 0x0f}, + {0x03, 0xef, 0xbf, 0x39}, + {0x03, 0xef, 0xbf, 0x3b}, + {0x03, 0xef, 0xbf, 0x3f}, + {0x03, 0xef, 0xbf, 0x2a}, + {0x03, 0xef, 0xbf, 0x0d}, + {0x03, 0xef, 0xbf, 0x25}, + {0x03, 0xef, 0xbd, 0x1a}, + {0x03, 0xef, 0xbd, 0x26}, + {0x01, 0xa0, 0x00, 0x00}, + {0x03, 0xef, 0xbd, 0x25}, + {0x03, 0xef, 0xbd, 0x23}, + {0x03, 0xef, 0xbd, 0x2e}, + {0x03, 0xef, 0xbe, 0x07}, + {0x03, 0xef, 0xbe, 0x05}, + {0x03, 0xef, 0xbd, 0x06}, + {0x03, 0xef, 0xbd, 0x13}, + {0x03, 0xef, 0xbd, 0x0b}, + {0x03, 0xef, 0xbd, 0x16}, + {0x03, 0xef, 0xbd, 0x0c}, + {0x03, 0xef, 0xbd, 0x15}, + {0x03, 0xef, 0xbd, 0x0d}, + {0x03, 0xef, 0xbd, 0x1c}, + {0x03, 0xef, 0xbd, 0x02}, + {0x03, 0xef, 0xbd, 0x1f}, + {0x03, 0xef, 0xbd, 0x1d}, + {0x03, 0xef, 0xbd, 0x17}, + {0x03, 0xef, 0xbd, 0x08}, + {0x03, 0xef, 0xbd, 0x09}, + {0x03, 0xef, 0xbd, 0x0e}, + {0x03, 0xef, 0xbd, 0x04}, + {0x03, 0xef, 0xbd, 0x05}, + {0x03, 0xef, 0xbe, 0x3f}, + {0x03, 0xef, 0xbe, 0x00}, + {0x03, 0xef, 0xbd, 0x2c}, + {0x03, 0xef, 0xbe, 0x06}, + {0x03, 0xef, 0xbe, 0x0c}, + {0x03, 0xef, 0xbe, 0x0f}, + {0x03, 0xef, 0xbe, 0x0d}, + {0x03, 0xef, 0xbe, 0x0b}, + {0x03, 0xef, 0xbe, 0x19}, + {0x03, 0xef, 0xbe, 0x15}, + {0x03, 0xef, 0xbe, 0x11}, + {0x03, 0xef, 0xbe, 0x31}, + {0x03, 0xef, 0xbe, 0x33}, + {0x03, 0xef, 0xbd, 0x0f}, + {0x03, 0xef, 0xbe, 0x30}, + {0x03, 0xef, 0xbe, 0x3e}, + {0x03, 0xef, 0xbe, 0x32}, + {0x03, 0xef, 0xbe, 0x36}, + {0x03, 0xef, 0xbd, 0x14}, + {0x03, 0xef, 0xbe, 0x2e}, + {0x03, 0xef, 0xbd, 0x1e}, + {0x03, 0xef, 0xbe, 0x10}, + {0x03, 0xef, 0xbf, 0x13}, + {0x03, 0xef, 0xbf, 0x15}, + {0x03, 0xef, 0xbf, 0x17}, + {0x03, 0xef, 0xbf, 0x1f}, + {0x03, 0xef, 0xbf, 0x1d}, + {0x03, 0xef, 0xbf, 0x1b}, + {0x03, 0xef, 0xbf, 0x09}, + {0x03, 0xef, 0xbf, 0x0b}, + {0x03, 0xef, 0xbf, 0x37}, + {0x03, 0xef, 0xbe, 0x04}, + {0x01, 0xe0, 0x00, 0x00}, + {0x03, 0xe2, 0xa6, 0x1a}, + {0x03, 0xe2, 0xa6, 0x26}, + {0x03, 0xe3, 0x80, 0x23}, + {0x03, 0xe3, 0x80, 0x2e}, + {0x03, 0xe3, 0x80, 0x25}, + {0x03, 0xe3, 0x83, 0x1e}, + {0x03, 0xe3, 0x83, 0x14}, + {0x03, 0xe3, 0x82, 0x06}, + {0x03, 0xe3, 0x82, 0x0b}, + {0x03, 0xe3, 0x82, 0x0c}, + {0x03, 0xe3, 0x82, 0x0d}, + {0x03, 0xe3, 0x82, 0x02}, + {0x03, 0xe3, 0x83, 0x0f}, + {0x03, 0xe3, 0x83, 0x08}, + {0x03, 0xe3, 0x83, 0x09}, + {0x03, 0xe3, 0x83, 0x2c}, + {0x03, 0xe3, 0x83, 0x0c}, + {0x03, 0xe3, 0x82, 0x13}, + {0x03, 0xe3, 0x82, 0x16}, + {0x03, 0xe3, 0x82, 0x15}, + {0x03, 0xe3, 0x82, 0x1c}, + {0x03, 0xe3, 0x82, 0x1f}, + {0x03, 0xe3, 0x82, 0x1d}, + {0x03, 0xe3, 0x82, 0x1a}, + {0x03, 0xe3, 0x82, 0x17}, + {0x03, 0xe3, 0x82, 0x08}, + {0x03, 0xe3, 0x82, 0x09}, + {0x03, 0xe3, 0x82, 0x0e}, + {0x03, 0xe3, 0x82, 0x04}, + {0x03, 0xe3, 0x82, 0x05}, + {0x03, 0xe3, 0x82, 0x3f}, + {0x03, 0xe3, 0x83, 0x00}, + {0x03, 0xe3, 0x83, 0x06}, + {0x03, 0xe3, 0x83, 0x05}, + {0x03, 0xe3, 0x83, 0x0d}, + {0x03, 0xe3, 0x83, 0x0b}, + {0x03, 0xe3, 0x83, 0x07}, + {0x03, 0xe3, 0x83, 0x19}, + {0x03, 0xe3, 0x83, 0x15}, + {0x03, 0xe3, 0x83, 0x11}, + {0x03, 0xe3, 0x83, 0x31}, + {0x03, 0xe3, 0x83, 0x33}, + {0x03, 0xe3, 0x83, 0x30}, + {0x03, 0xe3, 0x83, 0x3e}, + {0x03, 0xe3, 0x83, 0x32}, + {0x03, 0xe3, 0x83, 0x36}, + {0x03, 0xe3, 0x83, 0x2e}, + {0x03, 0xe3, 0x82, 0x07}, + {0x03, 0xe3, 0x85, 0x04}, + {0x03, 0xe3, 0x84, 0x10}, + {0x03, 0xe3, 0x85, 0x30}, + {0x03, 0xe3, 0x85, 0x0d}, + {0x03, 0xe3, 0x85, 0x13}, + {0x03, 0xe3, 0x85, 0x15}, + {0x03, 0xe3, 0x85, 0x17}, + {0x03, 0xe3, 0x85, 0x1f}, + {0x03, 0xe3, 0x85, 0x1d}, + {0x03, 0xe3, 0x85, 0x1b}, + {0x03, 0xe3, 0x85, 0x09}, + {0x03, 0xe3, 0x85, 0x0f}, + {0x03, 0xe3, 0x85, 0x0b}, + {0x03, 0xe3, 0x85, 0x37}, + {0x03, 0xe3, 0x85, 0x3b}, + {0x03, 0xe3, 0x85, 0x39}, + {0x03, 0xe3, 0x85, 0x3f}, + {0x02, 0xc2, 0x02, 0x00}, + {0x02, 0xc2, 0x0e, 0x00}, + {0x02, 0xc2, 0x0c, 0x00}, + {0x02, 0xc2, 0x00, 0x00}, + {0x03, 0xe2, 0x82, 0x0f}, + {0x03, 0xe2, 0x94, 0x2a}, + {0x03, 0xe2, 0x86, 0x39}, + {0x03, 0xe2, 0x86, 0x3b}, + {0x03, 0xe2, 0x86, 0x3f}, + {0x03, 0xe2, 0x96, 0x0d}, + {0x03, 0xe2, 0x97, 0x25}, +} + +// Total table size 14936 bytes (14KiB) diff --git a/vendor/golang.org/x/text/width/tables12.0.0.go b/vendor/golang.org/x/text/width/tables12.0.0.go new file mode 100644 index 000000000..85296297e --- /dev/null +++ b/vendor/golang.org/x/text/width/tables12.0.0.go @@ -0,0 +1,1351 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.14 && !go1.16 +// +build go1.14,!go1.16 + +package width + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "12.0.0" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// widthTrie. Total size: 14720 bytes (14.38 KiB). Checksum: 3f4f2516ded5489b. +type widthTrie struct{} + +func newWidthTrie(i int) *widthTrie { + return &widthTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { + switch { + default: + return uint16(widthValues[n<<6+uint32(b)]) + } +} + +// widthValues: 104 blocks, 6656 entries, 13312 bytes +// The third block is the zero block. +var widthValues = [6656]uint16{ + // Block 0x0, offset 0x0 + 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, + 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, + 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, + 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, + 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, + 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, + // Block 0x1, offset 0x40 + 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, + 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, + 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, + 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, + 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, + 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, + 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, + 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, + 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, + 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, + 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, + 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, + 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, + 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, + 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, + 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, + // Block 0x4, offset 0x100 + 0x106: 0x2000, + 0x110: 0x2000, + 0x117: 0x2000, + 0x118: 0x2000, + 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, + 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, + 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, + 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, + 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, + 0x13c: 0x2000, 0x13e: 0x2000, + // Block 0x5, offset 0x140 + 0x141: 0x2000, + 0x151: 0x2000, + 0x153: 0x2000, + 0x15b: 0x2000, + 0x166: 0x2000, 0x167: 0x2000, + 0x16b: 0x2000, + 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, + 0x178: 0x2000, + 0x17f: 0x2000, + // Block 0x6, offset 0x180 + 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, + 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, + 0x18d: 0x2000, + 0x192: 0x2000, 0x193: 0x2000, + 0x1a6: 0x2000, 0x1a7: 0x2000, + 0x1ab: 0x2000, + // Block 0x7, offset 0x1c0 + 0x1ce: 0x2000, 0x1d0: 0x2000, + 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, + 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, + // Block 0x8, offset 0x200 + 0x211: 0x2000, + 0x221: 0x2000, + // Block 0x9, offset 0x240 + 0x244: 0x2000, + 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, + 0x24d: 0x2000, 0x250: 0x2000, + 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, + 0x25f: 0x2000, + // Block 0xa, offset 0x280 + 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, + 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, + 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, + 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, + 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, + 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, + 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, + 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, + 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, + 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, + 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, + 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, + 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, + 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, + 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, + 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, + 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, + 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, + // Block 0xc, offset 0x300 + 0x311: 0x2000, + 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, + 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, + 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, + 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, + 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, + 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, + 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, + // Block 0xd, offset 0x340 + 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, + 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, + // Block 0xe, offset 0x380 + 0x381: 0x2000, + 0x390: 0x2000, 0x391: 0x2000, + 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, + 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, + 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, + 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, + 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, + 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, + 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, + 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, + 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, + 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, + // Block 0x10, offset 0x400 + 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, + 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, + 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, + 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, + 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, + 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, + 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, + 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, + 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, + 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, + 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, + // Block 0x11, offset 0x440 + 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, + 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, + 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, + 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, + 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, + 0x45e: 0x4000, 0x45f: 0x4000, + // Block 0x12, offset 0x480 + 0x490: 0x2000, + 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, + 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, + 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, + 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, + 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, + 0x4bb: 0x2000, + 0x4be: 0x2000, + // Block 0x13, offset 0x4c0 + 0x4f4: 0x2000, + 0x4ff: 0x2000, + // Block 0x14, offset 0x500 + 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, + 0x529: 0xa009, + 0x52c: 0x2000, + // Block 0x15, offset 0x540 + 0x543: 0x2000, 0x545: 0x2000, + 0x549: 0x2000, + 0x553: 0x2000, 0x556: 0x2000, + 0x561: 0x2000, 0x562: 0x2000, + 0x566: 0x2000, + 0x56b: 0x2000, + // Block 0x16, offset 0x580 + 0x593: 0x2000, 0x594: 0x2000, + 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, + 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, + 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, + 0x5aa: 0x2000, 0x5ab: 0x2000, + 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, + 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, + // Block 0x17, offset 0x5c0 + 0x5c9: 0x2000, + 0x5d0: 0x200a, 0x5d1: 0x200b, + 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, + 0x5d8: 0x2000, 0x5d9: 0x2000, + 0x5f8: 0x2000, 0x5f9: 0x2000, + // Block 0x18, offset 0x600 + 0x612: 0x2000, 0x614: 0x2000, + 0x627: 0x2000, + // Block 0x19, offset 0x640 + 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, + 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, + 0x64f: 0x2000, 0x651: 0x2000, + 0x655: 0x2000, + 0x65a: 0x2000, 0x65d: 0x2000, + 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, + 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, + 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, + 0x674: 0x2000, 0x675: 0x2000, + 0x676: 0x2000, 0x677: 0x2000, + 0x67c: 0x2000, 0x67d: 0x2000, + // Block 0x1a, offset 0x680 + 0x688: 0x2000, + 0x68c: 0x2000, + 0x692: 0x2000, + 0x6a0: 0x2000, 0x6a1: 0x2000, + 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, + 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, + // Block 0x1b, offset 0x6c0 + 0x6c2: 0x2000, 0x6c3: 0x2000, + 0x6c6: 0x2000, 0x6c7: 0x2000, + 0x6d5: 0x2000, + 0x6d9: 0x2000, + 0x6e5: 0x2000, + 0x6ff: 0x2000, + // Block 0x1c, offset 0x700 + 0x712: 0x2000, + 0x71a: 0x4000, 0x71b: 0x4000, + 0x729: 0x4000, + 0x72a: 0x4000, + // Block 0x1d, offset 0x740 + 0x769: 0x4000, + 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, + 0x770: 0x4000, 0x773: 0x4000, + // Block 0x1e, offset 0x780 + 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, + 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, + 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, + 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, + 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, + 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, + // Block 0x1f, offset 0x7c0 + 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, + 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, + 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, + 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, + 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, + 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, + 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, + 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, + 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, + 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, + 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, + // Block 0x20, offset 0x800 + 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, + 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, + 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, + 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, + 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, + 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, + 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, + 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, + 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, + 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, + 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, + // Block 0x21, offset 0x840 + 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, + 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, + 0x850: 0x2000, 0x851: 0x2000, + 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, + 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, + 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, + 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, + 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, + 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, + // Block 0x22, offset 0x880 + 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, + 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, + 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, + 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, + 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, + 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, + 0x8b2: 0x2000, 0x8b3: 0x2000, + 0x8b6: 0x2000, 0x8b7: 0x2000, + 0x8bc: 0x2000, 0x8bd: 0x2000, + // Block 0x23, offset 0x8c0 + 0x8c0: 0x2000, 0x8c1: 0x2000, + 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, + 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, + 0x8e2: 0x2000, 0x8e3: 0x2000, + 0x8e4: 0x2000, 0x8e5: 0x2000, + 0x8ef: 0x2000, + 0x8fd: 0x4000, 0x8fe: 0x4000, + // Block 0x24, offset 0x900 + 0x905: 0x2000, + 0x906: 0x2000, 0x909: 0x2000, + 0x90e: 0x2000, 0x90f: 0x2000, + 0x914: 0x4000, 0x915: 0x4000, + 0x91c: 0x2000, + 0x91e: 0x2000, + // Block 0x25, offset 0x940 + 0x940: 0x2000, 0x942: 0x2000, + 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, + 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, + 0x952: 0x4000, 0x953: 0x4000, + 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, + 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, + 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, + 0x97f: 0x4000, + // Block 0x26, offset 0x980 + 0x993: 0x4000, + 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, + 0x9aa: 0x4000, 0x9ab: 0x4000, + 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, + // Block 0x27, offset 0x9c0 + 0x9c4: 0x4000, 0x9c5: 0x4000, + 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, + 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, + 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, + 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, + 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, + 0x9e8: 0x2000, 0x9e9: 0x2000, + 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, + 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, + 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, + 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, + // Block 0x28, offset 0xa00 + 0xa05: 0x4000, + 0xa0a: 0x4000, 0xa0b: 0x4000, + 0xa28: 0x4000, + 0xa3d: 0x2000, + // Block 0x29, offset 0xa40 + 0xa4c: 0x4000, 0xa4e: 0x4000, + 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, + 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, + 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, + // Block 0x2a, offset 0xa80 + 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, + 0xab0: 0x4000, + 0xabf: 0x4000, + // Block 0x2b, offset 0xac0 + 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, + 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, + // Block 0x2c, offset 0xb00 + 0xb05: 0x6010, + 0xb06: 0x6011, + // Block 0x2d, offset 0xb40 + 0xb5b: 0x4000, 0xb5c: 0x4000, + // Block 0x2e, offset 0xb80 + 0xb90: 0x4000, + 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, + 0xb98: 0x2000, 0xb99: 0x2000, + // Block 0x2f, offset 0xbc0 + 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, + 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, + 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, + 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, + 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, + 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, + 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, + 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, + 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, + 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, + 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, + // Block 0x30, offset 0xc00 + 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, + 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, + 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, + 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, + 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, + 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, + 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, + 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, + 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, + // Block 0x31, offset 0xc40 + 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, + 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, + 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, + 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, + 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, + 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, + // Block 0x32, offset 0xc80 + 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, + 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, + 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, + 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, + 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, + 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, + 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, + 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, + 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, + 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, + 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, + // Block 0x33, offset 0xcc0 + 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, + 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, + 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, + 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, + 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, + 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, + 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, + 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, + 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, + 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, + 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, + // Block 0x34, offset 0xd00 + 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, + 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, + 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, + 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, + 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, + 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, + 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, + 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, + 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, + 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, + 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, + // Block 0x35, offset 0xd40 + 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, + 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, + 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, + 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, + 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, + 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, + 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, + 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, + 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, + 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, + 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, + // Block 0x36, offset 0xd80 + 0xd85: 0x4000, + 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, + 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, + 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, + 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, + 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, + 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, + 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, 0xdaf: 0x4000, + 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, + 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, + 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, + // Block 0x37, offset 0xdc0 + 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, + 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, + 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, + 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, + 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, + 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, + 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, + 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, + 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, + 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, + 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, + // Block 0x38, offset 0xe00 + 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, + 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, + 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, + 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, + 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, + 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, + 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, + 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, + 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, + 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, + // Block 0x39, offset 0xe40 + 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, + 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, + 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, + 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, + 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, + 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, + 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, + 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, + 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, + // Block 0x3a, offset 0xe80 + 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, + 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, + 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, + 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, + 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, + 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, + 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, + 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, + 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, + 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, + 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, + // Block 0x3b, offset 0xec0 + 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, + 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, + 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, + 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, + 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, + 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, + 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, + 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, + 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, + 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, + 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, + // Block 0x3c, offset 0xf00 + 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, + 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, + 0xf0c: 0x4000, 0xf0d: 0x4000, 0xf0e: 0x4000, 0xf0f: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, + 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, + 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, + 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, + 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, + 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, + 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, + 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, + 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, + // Block 0x3d, offset 0xf40 + 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, + 0xf46: 0x4000, 0xf47: 0x4000, 0xf48: 0x4000, 0xf49: 0x4000, 0xf4a: 0x4000, 0xf4b: 0x4000, + 0xf4c: 0x4000, 0xf50: 0x4000, 0xf51: 0x4000, + 0xf52: 0x4000, 0xf53: 0x4000, 0xf54: 0x4000, 0xf55: 0x4000, 0xf56: 0x4000, 0xf57: 0x4000, + 0xf58: 0x4000, 0xf59: 0x4000, 0xf5a: 0x4000, 0xf5b: 0x4000, 0xf5c: 0x4000, 0xf5d: 0x4000, + 0xf5e: 0x4000, 0xf5f: 0x4000, 0xf60: 0x4000, 0xf61: 0x4000, 0xf62: 0x4000, 0xf63: 0x4000, + 0xf64: 0x4000, 0xf65: 0x4000, 0xf66: 0x4000, 0xf67: 0x4000, 0xf68: 0x4000, 0xf69: 0x4000, + 0xf6a: 0x4000, 0xf6b: 0x4000, 0xf6c: 0x4000, 0xf6d: 0x4000, 0xf6e: 0x4000, 0xf6f: 0x4000, + 0xf70: 0x4000, 0xf71: 0x4000, 0xf72: 0x4000, 0xf73: 0x4000, 0xf74: 0x4000, 0xf75: 0x4000, + 0xf76: 0x4000, 0xf77: 0x4000, 0xf78: 0x4000, 0xf79: 0x4000, 0xf7a: 0x4000, 0xf7b: 0x4000, + 0xf7c: 0x4000, 0xf7d: 0x4000, 0xf7e: 0x4000, 0xf7f: 0x4000, + // Block 0x3e, offset 0xf80 + 0xf80: 0x4000, 0xf81: 0x4000, 0xf82: 0x4000, 0xf83: 0x4000, 0xf84: 0x4000, 0xf85: 0x4000, + 0xf86: 0x4000, + // Block 0x3f, offset 0xfc0 + 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, + 0xfe4: 0x4000, 0xfe5: 0x4000, 0xfe6: 0x4000, 0xfe7: 0x4000, 0xfe8: 0x4000, 0xfe9: 0x4000, + 0xfea: 0x4000, 0xfeb: 0x4000, 0xfec: 0x4000, 0xfed: 0x4000, 0xfee: 0x4000, 0xfef: 0x4000, + 0xff0: 0x4000, 0xff1: 0x4000, 0xff2: 0x4000, 0xff3: 0x4000, 0xff4: 0x4000, 0xff5: 0x4000, + 0xff6: 0x4000, 0xff7: 0x4000, 0xff8: 0x4000, 0xff9: 0x4000, 0xffa: 0x4000, 0xffb: 0x4000, + 0xffc: 0x4000, + // Block 0x40, offset 0x1000 + 0x1000: 0x4000, 0x1001: 0x4000, 0x1002: 0x4000, 0x1003: 0x4000, 0x1004: 0x4000, 0x1005: 0x4000, + 0x1006: 0x4000, 0x1007: 0x4000, 0x1008: 0x4000, 0x1009: 0x4000, 0x100a: 0x4000, 0x100b: 0x4000, + 0x100c: 0x4000, 0x100d: 0x4000, 0x100e: 0x4000, 0x100f: 0x4000, 0x1010: 0x4000, 0x1011: 0x4000, + 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, + 0x1018: 0x4000, 0x1019: 0x4000, 0x101a: 0x4000, 0x101b: 0x4000, 0x101c: 0x4000, 0x101d: 0x4000, + 0x101e: 0x4000, 0x101f: 0x4000, 0x1020: 0x4000, 0x1021: 0x4000, 0x1022: 0x4000, 0x1023: 0x4000, + // Block 0x41, offset 0x1040 + 0x1040: 0x2000, 0x1041: 0x2000, 0x1042: 0x2000, 0x1043: 0x2000, 0x1044: 0x2000, 0x1045: 0x2000, + 0x1046: 0x2000, 0x1047: 0x2000, 0x1048: 0x2000, 0x1049: 0x2000, 0x104a: 0x2000, 0x104b: 0x2000, + 0x104c: 0x2000, 0x104d: 0x2000, 0x104e: 0x2000, 0x104f: 0x2000, 0x1050: 0x4000, 0x1051: 0x4000, + 0x1052: 0x4000, 0x1053: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, + 0x1058: 0x4000, 0x1059: 0x4000, + 0x1070: 0x4000, 0x1071: 0x4000, 0x1072: 0x4000, 0x1073: 0x4000, 0x1074: 0x4000, 0x1075: 0x4000, + 0x1076: 0x4000, 0x1077: 0x4000, 0x1078: 0x4000, 0x1079: 0x4000, 0x107a: 0x4000, 0x107b: 0x4000, + 0x107c: 0x4000, 0x107d: 0x4000, 0x107e: 0x4000, 0x107f: 0x4000, + // Block 0x42, offset 0x1080 + 0x1080: 0x4000, 0x1081: 0x4000, 0x1082: 0x4000, 0x1083: 0x4000, 0x1084: 0x4000, 0x1085: 0x4000, + 0x1086: 0x4000, 0x1087: 0x4000, 0x1088: 0x4000, 0x1089: 0x4000, 0x108a: 0x4000, 0x108b: 0x4000, + 0x108c: 0x4000, 0x108d: 0x4000, 0x108e: 0x4000, 0x108f: 0x4000, 0x1090: 0x4000, 0x1091: 0x4000, + 0x1092: 0x4000, 0x1094: 0x4000, 0x1095: 0x4000, 0x1096: 0x4000, 0x1097: 0x4000, + 0x1098: 0x4000, 0x1099: 0x4000, 0x109a: 0x4000, 0x109b: 0x4000, 0x109c: 0x4000, 0x109d: 0x4000, + 0x109e: 0x4000, 0x109f: 0x4000, 0x10a0: 0x4000, 0x10a1: 0x4000, 0x10a2: 0x4000, 0x10a3: 0x4000, + 0x10a4: 0x4000, 0x10a5: 0x4000, 0x10a6: 0x4000, 0x10a8: 0x4000, 0x10a9: 0x4000, + 0x10aa: 0x4000, 0x10ab: 0x4000, + // Block 0x43, offset 0x10c0 + 0x10c1: 0x9012, 0x10c2: 0x9012, 0x10c3: 0x9012, 0x10c4: 0x9012, 0x10c5: 0x9012, + 0x10c6: 0x9012, 0x10c7: 0x9012, 0x10c8: 0x9012, 0x10c9: 0x9012, 0x10ca: 0x9012, 0x10cb: 0x9012, + 0x10cc: 0x9012, 0x10cd: 0x9012, 0x10ce: 0x9012, 0x10cf: 0x9012, 0x10d0: 0x9012, 0x10d1: 0x9012, + 0x10d2: 0x9012, 0x10d3: 0x9012, 0x10d4: 0x9012, 0x10d5: 0x9012, 0x10d6: 0x9012, 0x10d7: 0x9012, + 0x10d8: 0x9012, 0x10d9: 0x9012, 0x10da: 0x9012, 0x10db: 0x9012, 0x10dc: 0x9012, 0x10dd: 0x9012, + 0x10de: 0x9012, 0x10df: 0x9012, 0x10e0: 0x9049, 0x10e1: 0x9049, 0x10e2: 0x9049, 0x10e3: 0x9049, + 0x10e4: 0x9049, 0x10e5: 0x9049, 0x10e6: 0x9049, 0x10e7: 0x9049, 0x10e8: 0x9049, 0x10e9: 0x9049, + 0x10ea: 0x9049, 0x10eb: 0x9049, 0x10ec: 0x9049, 0x10ed: 0x9049, 0x10ee: 0x9049, 0x10ef: 0x9049, + 0x10f0: 0x9049, 0x10f1: 0x9049, 0x10f2: 0x9049, 0x10f3: 0x9049, 0x10f4: 0x9049, 0x10f5: 0x9049, + 0x10f6: 0x9049, 0x10f7: 0x9049, 0x10f8: 0x9049, 0x10f9: 0x9049, 0x10fa: 0x9049, 0x10fb: 0x9049, + 0x10fc: 0x9049, 0x10fd: 0x9049, 0x10fe: 0x9049, 0x10ff: 0x9049, + // Block 0x44, offset 0x1100 + 0x1100: 0x9049, 0x1101: 0x9049, 0x1102: 0x9049, 0x1103: 0x9049, 0x1104: 0x9049, 0x1105: 0x9049, + 0x1106: 0x9049, 0x1107: 0x9049, 0x1108: 0x9049, 0x1109: 0x9049, 0x110a: 0x9049, 0x110b: 0x9049, + 0x110c: 0x9049, 0x110d: 0x9049, 0x110e: 0x9049, 0x110f: 0x9049, 0x1110: 0x9049, 0x1111: 0x9049, + 0x1112: 0x9049, 0x1113: 0x9049, 0x1114: 0x9049, 0x1115: 0x9049, 0x1116: 0x9049, 0x1117: 0x9049, + 0x1118: 0x9049, 0x1119: 0x9049, 0x111a: 0x9049, 0x111b: 0x9049, 0x111c: 0x9049, 0x111d: 0x9049, + 0x111e: 0x9049, 0x111f: 0x904a, 0x1120: 0x904b, 0x1121: 0xb04c, 0x1122: 0xb04d, 0x1123: 0xb04d, + 0x1124: 0xb04e, 0x1125: 0xb04f, 0x1126: 0xb050, 0x1127: 0xb051, 0x1128: 0xb052, 0x1129: 0xb053, + 0x112a: 0xb054, 0x112b: 0xb055, 0x112c: 0xb056, 0x112d: 0xb057, 0x112e: 0xb058, 0x112f: 0xb059, + 0x1130: 0xb05a, 0x1131: 0xb05b, 0x1132: 0xb05c, 0x1133: 0xb05d, 0x1134: 0xb05e, 0x1135: 0xb05f, + 0x1136: 0xb060, 0x1137: 0xb061, 0x1138: 0xb062, 0x1139: 0xb063, 0x113a: 0xb064, 0x113b: 0xb065, + 0x113c: 0xb052, 0x113d: 0xb066, 0x113e: 0xb067, 0x113f: 0xb055, + // Block 0x45, offset 0x1140 + 0x1140: 0xb068, 0x1141: 0xb069, 0x1142: 0xb06a, 0x1143: 0xb06b, 0x1144: 0xb05a, 0x1145: 0xb056, + 0x1146: 0xb06c, 0x1147: 0xb06d, 0x1148: 0xb06b, 0x1149: 0xb06e, 0x114a: 0xb06b, 0x114b: 0xb06f, + 0x114c: 0xb06f, 0x114d: 0xb070, 0x114e: 0xb070, 0x114f: 0xb071, 0x1150: 0xb056, 0x1151: 0xb072, + 0x1152: 0xb073, 0x1153: 0xb072, 0x1154: 0xb074, 0x1155: 0xb073, 0x1156: 0xb075, 0x1157: 0xb075, + 0x1158: 0xb076, 0x1159: 0xb076, 0x115a: 0xb077, 0x115b: 0xb077, 0x115c: 0xb073, 0x115d: 0xb078, + 0x115e: 0xb079, 0x115f: 0xb067, 0x1160: 0xb07a, 0x1161: 0xb07b, 0x1162: 0xb07b, 0x1163: 0xb07b, + 0x1164: 0xb07b, 0x1165: 0xb07b, 0x1166: 0xb07b, 0x1167: 0xb07b, 0x1168: 0xb07b, 0x1169: 0xb07b, + 0x116a: 0xb07b, 0x116b: 0xb07b, 0x116c: 0xb07b, 0x116d: 0xb07b, 0x116e: 0xb07b, 0x116f: 0xb07b, + 0x1170: 0xb07c, 0x1171: 0xb07c, 0x1172: 0xb07c, 0x1173: 0xb07c, 0x1174: 0xb07c, 0x1175: 0xb07c, + 0x1176: 0xb07c, 0x1177: 0xb07c, 0x1178: 0xb07c, 0x1179: 0xb07c, 0x117a: 0xb07c, 0x117b: 0xb07c, + 0x117c: 0xb07c, 0x117d: 0xb07c, 0x117e: 0xb07c, + // Block 0x46, offset 0x1180 + 0x1182: 0xb07d, 0x1183: 0xb07e, 0x1184: 0xb07f, 0x1185: 0xb080, + 0x1186: 0xb07f, 0x1187: 0xb07e, 0x118a: 0xb081, 0x118b: 0xb082, + 0x118c: 0xb083, 0x118d: 0xb07f, 0x118e: 0xb080, 0x118f: 0xb07f, + 0x1192: 0xb084, 0x1193: 0xb085, 0x1194: 0xb084, 0x1195: 0xb086, 0x1196: 0xb084, 0x1197: 0xb087, + 0x119a: 0xb088, 0x119b: 0xb089, 0x119c: 0xb08a, + 0x11a0: 0x908b, 0x11a1: 0x908b, 0x11a2: 0x908c, 0x11a3: 0x908d, + 0x11a4: 0x908b, 0x11a5: 0x908e, 0x11a6: 0x908f, 0x11a8: 0xb090, 0x11a9: 0xb091, + 0x11aa: 0xb092, 0x11ab: 0xb091, 0x11ac: 0xb093, 0x11ad: 0xb094, 0x11ae: 0xb095, + 0x11bd: 0x2000, + // Block 0x47, offset 0x11c0 + 0x11e0: 0x4000, 0x11e1: 0x4000, 0x11e2: 0x4000, 0x11e3: 0x4000, + // Block 0x48, offset 0x1200 + 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, + 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, + 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, + 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, 0x1216: 0x4000, 0x1217: 0x4000, + 0x1218: 0x4000, 0x1219: 0x4000, 0x121a: 0x4000, 0x121b: 0x4000, 0x121c: 0x4000, 0x121d: 0x4000, + 0x121e: 0x4000, 0x121f: 0x4000, 0x1220: 0x4000, 0x1221: 0x4000, 0x1222: 0x4000, 0x1223: 0x4000, + 0x1224: 0x4000, 0x1225: 0x4000, 0x1226: 0x4000, 0x1227: 0x4000, 0x1228: 0x4000, 0x1229: 0x4000, + 0x122a: 0x4000, 0x122b: 0x4000, 0x122c: 0x4000, 0x122d: 0x4000, 0x122e: 0x4000, 0x122f: 0x4000, + 0x1230: 0x4000, 0x1231: 0x4000, 0x1232: 0x4000, 0x1233: 0x4000, 0x1234: 0x4000, 0x1235: 0x4000, + 0x1236: 0x4000, 0x1237: 0x4000, + // Block 0x49, offset 0x1240 + 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, + 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, 0x1249: 0x4000, 0x124a: 0x4000, 0x124b: 0x4000, + 0x124c: 0x4000, 0x124d: 0x4000, 0x124e: 0x4000, 0x124f: 0x4000, 0x1250: 0x4000, 0x1251: 0x4000, + 0x1252: 0x4000, 0x1253: 0x4000, 0x1254: 0x4000, 0x1255: 0x4000, 0x1256: 0x4000, 0x1257: 0x4000, + 0x1258: 0x4000, 0x1259: 0x4000, 0x125a: 0x4000, 0x125b: 0x4000, 0x125c: 0x4000, 0x125d: 0x4000, + 0x125e: 0x4000, 0x125f: 0x4000, 0x1260: 0x4000, 0x1261: 0x4000, 0x1262: 0x4000, 0x1263: 0x4000, + 0x1264: 0x4000, 0x1265: 0x4000, 0x1266: 0x4000, 0x1267: 0x4000, 0x1268: 0x4000, 0x1269: 0x4000, + 0x126a: 0x4000, 0x126b: 0x4000, 0x126c: 0x4000, 0x126d: 0x4000, 0x126e: 0x4000, 0x126f: 0x4000, + 0x1270: 0x4000, 0x1271: 0x4000, 0x1272: 0x4000, + // Block 0x4a, offset 0x1280 + 0x1280: 0x4000, 0x1281: 0x4000, 0x1282: 0x4000, 0x1283: 0x4000, 0x1284: 0x4000, 0x1285: 0x4000, + 0x1286: 0x4000, 0x1287: 0x4000, 0x1288: 0x4000, 0x1289: 0x4000, 0x128a: 0x4000, 0x128b: 0x4000, + 0x128c: 0x4000, 0x128d: 0x4000, 0x128e: 0x4000, 0x128f: 0x4000, 0x1290: 0x4000, 0x1291: 0x4000, + 0x1292: 0x4000, 0x1293: 0x4000, 0x1294: 0x4000, 0x1295: 0x4000, 0x1296: 0x4000, 0x1297: 0x4000, + 0x1298: 0x4000, 0x1299: 0x4000, 0x129a: 0x4000, 0x129b: 0x4000, 0x129c: 0x4000, 0x129d: 0x4000, + 0x129e: 0x4000, + // Block 0x4b, offset 0x12c0 + 0x12d0: 0x4000, 0x12d1: 0x4000, + 0x12d2: 0x4000, + 0x12e4: 0x4000, 0x12e5: 0x4000, 0x12e6: 0x4000, 0x12e7: 0x4000, + 0x12f0: 0x4000, 0x12f1: 0x4000, 0x12f2: 0x4000, 0x12f3: 0x4000, 0x12f4: 0x4000, 0x12f5: 0x4000, + 0x12f6: 0x4000, 0x12f7: 0x4000, 0x12f8: 0x4000, 0x12f9: 0x4000, 0x12fa: 0x4000, 0x12fb: 0x4000, + 0x12fc: 0x4000, 0x12fd: 0x4000, 0x12fe: 0x4000, 0x12ff: 0x4000, + // Block 0x4c, offset 0x1300 + 0x1300: 0x4000, 0x1301: 0x4000, 0x1302: 0x4000, 0x1303: 0x4000, 0x1304: 0x4000, 0x1305: 0x4000, + 0x1306: 0x4000, 0x1307: 0x4000, 0x1308: 0x4000, 0x1309: 0x4000, 0x130a: 0x4000, 0x130b: 0x4000, + 0x130c: 0x4000, 0x130d: 0x4000, 0x130e: 0x4000, 0x130f: 0x4000, 0x1310: 0x4000, 0x1311: 0x4000, + 0x1312: 0x4000, 0x1313: 0x4000, 0x1314: 0x4000, 0x1315: 0x4000, 0x1316: 0x4000, 0x1317: 0x4000, + 0x1318: 0x4000, 0x1319: 0x4000, 0x131a: 0x4000, 0x131b: 0x4000, 0x131c: 0x4000, 0x131d: 0x4000, + 0x131e: 0x4000, 0x131f: 0x4000, 0x1320: 0x4000, 0x1321: 0x4000, 0x1322: 0x4000, 0x1323: 0x4000, + 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, 0x1328: 0x4000, 0x1329: 0x4000, + 0x132a: 0x4000, 0x132b: 0x4000, 0x132c: 0x4000, 0x132d: 0x4000, 0x132e: 0x4000, 0x132f: 0x4000, + 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, + 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, + // Block 0x4d, offset 0x1340 + 0x1344: 0x4000, + // Block 0x4e, offset 0x1380 + 0x138f: 0x4000, + // Block 0x4f, offset 0x13c0 + 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, + 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, + 0x13d0: 0x2000, 0x13d1: 0x2000, + 0x13d2: 0x2000, 0x13d3: 0x2000, 0x13d4: 0x2000, 0x13d5: 0x2000, 0x13d6: 0x2000, 0x13d7: 0x2000, + 0x13d8: 0x2000, 0x13d9: 0x2000, 0x13da: 0x2000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, + 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, + 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, + 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, 0x13ed: 0x2000, + 0x13f0: 0x2000, 0x13f1: 0x2000, 0x13f2: 0x2000, 0x13f3: 0x2000, 0x13f4: 0x2000, 0x13f5: 0x2000, + 0x13f6: 0x2000, 0x13f7: 0x2000, 0x13f8: 0x2000, 0x13f9: 0x2000, 0x13fa: 0x2000, 0x13fb: 0x2000, + 0x13fc: 0x2000, 0x13fd: 0x2000, 0x13fe: 0x2000, 0x13ff: 0x2000, + // Block 0x50, offset 0x1400 + 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, + 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, 0x140b: 0x2000, + 0x140c: 0x2000, 0x140d: 0x2000, 0x140e: 0x2000, 0x140f: 0x2000, 0x1410: 0x2000, 0x1411: 0x2000, + 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, + 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, + 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, + 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, + 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, + 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, + 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, + // Block 0x51, offset 0x1440 + 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, + 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, + 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x4000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x4000, + 0x1452: 0x4000, 0x1453: 0x4000, 0x1454: 0x4000, 0x1455: 0x4000, 0x1456: 0x4000, 0x1457: 0x4000, + 0x1458: 0x4000, 0x1459: 0x4000, 0x145a: 0x4000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, + 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, + 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, + 0x146a: 0x2000, 0x146b: 0x2000, 0x146c: 0x2000, + // Block 0x52, offset 0x1480 + 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, + 0x1490: 0x4000, 0x1491: 0x4000, + 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, + 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, + 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, 0x14a1: 0x4000, 0x14a2: 0x4000, 0x14a3: 0x4000, + 0x14a4: 0x4000, 0x14a5: 0x4000, 0x14a6: 0x4000, 0x14a7: 0x4000, 0x14a8: 0x4000, 0x14a9: 0x4000, + 0x14aa: 0x4000, 0x14ab: 0x4000, 0x14ac: 0x4000, 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, + 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, + 0x14b6: 0x4000, 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, + // Block 0x53, offset 0x14c0 + 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, + 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, + 0x14d0: 0x4000, 0x14d1: 0x4000, + 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, + 0x14e4: 0x4000, 0x14e5: 0x4000, + // Block 0x54, offset 0x1500 + 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, + 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, + 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, + 0x1512: 0x4000, 0x1513: 0x4000, 0x1514: 0x4000, 0x1515: 0x4000, 0x1516: 0x4000, 0x1517: 0x4000, + 0x1518: 0x4000, 0x1519: 0x4000, 0x151a: 0x4000, 0x151b: 0x4000, 0x151c: 0x4000, 0x151d: 0x4000, + 0x151e: 0x4000, 0x151f: 0x4000, 0x1520: 0x4000, + 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, + 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, + 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, + 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, + // Block 0x55, offset 0x1540 + 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, + 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, + 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, + 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, + 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, + 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, + 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, + 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, + 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, + 0x1576: 0x4000, 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, + 0x157c: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, + // Block 0x56, offset 0x1580 + 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, + 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, + 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, + 0x1592: 0x4000, 0x1593: 0x4000, + 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, + 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, + 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, + 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, + 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, + 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, + // Block 0x57, offset 0x15c0 + 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, + 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, + 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, + 0x15d2: 0x4000, 0x15d3: 0x4000, + 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, + 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, + 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, + 0x15f0: 0x4000, 0x15f4: 0x4000, + 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, + 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, + // Block 0x58, offset 0x1600 + 0x1600: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, + 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, + 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, + 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, + 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, + 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, + 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, + 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, + 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, + 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, + 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, 0x163f: 0x4000, + // Block 0x59, offset 0x1640 + 0x1640: 0x4000, 0x1641: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, + 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, + 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, + 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, + 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, + 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, + 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, + 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, + 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, + 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, + 0x167c: 0x4000, 0x167f: 0x4000, + // Block 0x5a, offset 0x1680 + 0x1680: 0x4000, 0x1681: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, + 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, + 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, + 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, + 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, + 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, + 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, + 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, + 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, + 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, + 0x16bc: 0x4000, 0x16bd: 0x4000, + // Block 0x5b, offset 0x16c0 + 0x16cb: 0x4000, + 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, + 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, + 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, + 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, + 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, + 0x16fa: 0x4000, + // Block 0x5c, offset 0x1700 + 0x1715: 0x4000, 0x1716: 0x4000, + 0x1724: 0x4000, + // Block 0x5d, offset 0x1740 + 0x177b: 0x4000, + 0x177c: 0x4000, 0x177d: 0x4000, 0x177e: 0x4000, 0x177f: 0x4000, + // Block 0x5e, offset 0x1780 + 0x1780: 0x4000, 0x1781: 0x4000, 0x1782: 0x4000, 0x1783: 0x4000, 0x1784: 0x4000, 0x1785: 0x4000, + 0x1786: 0x4000, 0x1787: 0x4000, 0x1788: 0x4000, 0x1789: 0x4000, 0x178a: 0x4000, 0x178b: 0x4000, + 0x178c: 0x4000, 0x178d: 0x4000, 0x178e: 0x4000, 0x178f: 0x4000, + // Block 0x5f, offset 0x17c0 + 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, + 0x17cc: 0x4000, 0x17d0: 0x4000, 0x17d1: 0x4000, + 0x17d2: 0x4000, 0x17d5: 0x4000, + 0x17eb: 0x4000, 0x17ec: 0x4000, + 0x17f4: 0x4000, 0x17f5: 0x4000, + 0x17f6: 0x4000, 0x17f7: 0x4000, 0x17f8: 0x4000, 0x17f9: 0x4000, 0x17fa: 0x4000, + // Block 0x60, offset 0x1800 + 0x1820: 0x4000, 0x1821: 0x4000, 0x1822: 0x4000, 0x1823: 0x4000, + 0x1824: 0x4000, 0x1825: 0x4000, 0x1826: 0x4000, 0x1827: 0x4000, 0x1828: 0x4000, 0x1829: 0x4000, + 0x182a: 0x4000, 0x182b: 0x4000, + // Block 0x61, offset 0x1840 + 0x184d: 0x4000, 0x184e: 0x4000, 0x184f: 0x4000, 0x1850: 0x4000, 0x1851: 0x4000, + 0x1852: 0x4000, 0x1853: 0x4000, 0x1854: 0x4000, 0x1855: 0x4000, 0x1856: 0x4000, 0x1857: 0x4000, + 0x1858: 0x4000, 0x1859: 0x4000, 0x185a: 0x4000, 0x185b: 0x4000, 0x185c: 0x4000, 0x185d: 0x4000, + 0x185e: 0x4000, 0x185f: 0x4000, 0x1860: 0x4000, 0x1861: 0x4000, 0x1862: 0x4000, 0x1863: 0x4000, + 0x1864: 0x4000, 0x1865: 0x4000, 0x1866: 0x4000, 0x1867: 0x4000, 0x1868: 0x4000, 0x1869: 0x4000, + 0x186a: 0x4000, 0x186b: 0x4000, 0x186c: 0x4000, 0x186d: 0x4000, 0x186e: 0x4000, 0x186f: 0x4000, + 0x1870: 0x4000, 0x1871: 0x4000, 0x1872: 0x4000, 0x1873: 0x4000, 0x1874: 0x4000, 0x1875: 0x4000, + 0x1876: 0x4000, 0x1877: 0x4000, 0x1878: 0x4000, 0x1879: 0x4000, 0x187a: 0x4000, 0x187b: 0x4000, + 0x187c: 0x4000, 0x187d: 0x4000, 0x187e: 0x4000, 0x187f: 0x4000, + // Block 0x62, offset 0x1880 + 0x1880: 0x4000, 0x1881: 0x4000, 0x1882: 0x4000, 0x1883: 0x4000, 0x1884: 0x4000, 0x1885: 0x4000, + 0x1886: 0x4000, 0x1887: 0x4000, 0x1888: 0x4000, 0x1889: 0x4000, 0x188a: 0x4000, 0x188b: 0x4000, + 0x188c: 0x4000, 0x188d: 0x4000, 0x188e: 0x4000, 0x188f: 0x4000, 0x1890: 0x4000, 0x1891: 0x4000, + 0x1892: 0x4000, 0x1893: 0x4000, 0x1894: 0x4000, 0x1895: 0x4000, 0x1896: 0x4000, 0x1897: 0x4000, + 0x1898: 0x4000, 0x1899: 0x4000, 0x189a: 0x4000, 0x189b: 0x4000, 0x189c: 0x4000, 0x189d: 0x4000, + 0x189e: 0x4000, 0x189f: 0x4000, 0x18a0: 0x4000, 0x18a1: 0x4000, 0x18a2: 0x4000, 0x18a3: 0x4000, + 0x18a4: 0x4000, 0x18a5: 0x4000, 0x18a6: 0x4000, 0x18a7: 0x4000, 0x18a8: 0x4000, 0x18a9: 0x4000, + 0x18aa: 0x4000, 0x18ab: 0x4000, 0x18ac: 0x4000, 0x18ad: 0x4000, 0x18ae: 0x4000, 0x18af: 0x4000, + 0x18b0: 0x4000, 0x18b1: 0x4000, 0x18b3: 0x4000, 0x18b4: 0x4000, 0x18b5: 0x4000, + 0x18b6: 0x4000, 0x18ba: 0x4000, 0x18bb: 0x4000, + 0x18bc: 0x4000, 0x18bd: 0x4000, 0x18be: 0x4000, 0x18bf: 0x4000, + // Block 0x63, offset 0x18c0 + 0x18c0: 0x4000, 0x18c1: 0x4000, 0x18c2: 0x4000, 0x18c3: 0x4000, 0x18c4: 0x4000, 0x18c5: 0x4000, + 0x18c6: 0x4000, 0x18c7: 0x4000, 0x18c8: 0x4000, 0x18c9: 0x4000, 0x18ca: 0x4000, 0x18cb: 0x4000, + 0x18cc: 0x4000, 0x18cd: 0x4000, 0x18ce: 0x4000, 0x18cf: 0x4000, 0x18d0: 0x4000, 0x18d1: 0x4000, + 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, + 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, + 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, + 0x18e5: 0x4000, 0x18e6: 0x4000, 0x18e7: 0x4000, 0x18e8: 0x4000, 0x18e9: 0x4000, + 0x18ea: 0x4000, 0x18ee: 0x4000, 0x18ef: 0x4000, + 0x18f0: 0x4000, 0x18f1: 0x4000, 0x18f2: 0x4000, 0x18f3: 0x4000, 0x18f4: 0x4000, 0x18f5: 0x4000, + 0x18f6: 0x4000, 0x18f7: 0x4000, 0x18f8: 0x4000, 0x18f9: 0x4000, 0x18fa: 0x4000, 0x18fb: 0x4000, + 0x18fc: 0x4000, 0x18fd: 0x4000, 0x18fe: 0x4000, 0x18ff: 0x4000, + // Block 0x64, offset 0x1900 + 0x1900: 0x4000, 0x1901: 0x4000, 0x1902: 0x4000, 0x1903: 0x4000, 0x1904: 0x4000, 0x1905: 0x4000, + 0x1906: 0x4000, 0x1907: 0x4000, 0x1908: 0x4000, 0x1909: 0x4000, 0x190a: 0x4000, + 0x190d: 0x4000, 0x190e: 0x4000, 0x190f: 0x4000, 0x1910: 0x4000, 0x1911: 0x4000, + 0x1912: 0x4000, 0x1913: 0x4000, 0x1914: 0x4000, 0x1915: 0x4000, 0x1916: 0x4000, 0x1917: 0x4000, + 0x1918: 0x4000, 0x1919: 0x4000, 0x191a: 0x4000, 0x191b: 0x4000, 0x191c: 0x4000, 0x191d: 0x4000, + 0x191e: 0x4000, 0x191f: 0x4000, 0x1920: 0x4000, 0x1921: 0x4000, 0x1922: 0x4000, 0x1923: 0x4000, + 0x1924: 0x4000, 0x1925: 0x4000, 0x1926: 0x4000, 0x1927: 0x4000, 0x1928: 0x4000, 0x1929: 0x4000, + 0x192a: 0x4000, 0x192b: 0x4000, 0x192c: 0x4000, 0x192d: 0x4000, 0x192e: 0x4000, 0x192f: 0x4000, + 0x1930: 0x4000, 0x1931: 0x4000, 0x1932: 0x4000, 0x1933: 0x4000, 0x1934: 0x4000, 0x1935: 0x4000, + 0x1936: 0x4000, 0x1937: 0x4000, 0x1938: 0x4000, 0x1939: 0x4000, 0x193a: 0x4000, 0x193b: 0x4000, + 0x193c: 0x4000, 0x193d: 0x4000, 0x193e: 0x4000, 0x193f: 0x4000, + // Block 0x65, offset 0x1940 + 0x1970: 0x4000, 0x1971: 0x4000, 0x1972: 0x4000, 0x1973: 0x4000, + 0x1978: 0x4000, 0x1979: 0x4000, 0x197a: 0x4000, + // Block 0x66, offset 0x1980 + 0x1980: 0x4000, 0x1981: 0x4000, 0x1982: 0x4000, + 0x1990: 0x4000, 0x1991: 0x4000, + 0x1992: 0x4000, 0x1993: 0x4000, 0x1994: 0x4000, 0x1995: 0x4000, + // Block 0x67, offset 0x19c0 + 0x19c0: 0x2000, 0x19c1: 0x2000, 0x19c2: 0x2000, 0x19c3: 0x2000, 0x19c4: 0x2000, 0x19c5: 0x2000, + 0x19c6: 0x2000, 0x19c7: 0x2000, 0x19c8: 0x2000, 0x19c9: 0x2000, 0x19ca: 0x2000, 0x19cb: 0x2000, + 0x19cc: 0x2000, 0x19cd: 0x2000, 0x19ce: 0x2000, 0x19cf: 0x2000, 0x19d0: 0x2000, 0x19d1: 0x2000, + 0x19d2: 0x2000, 0x19d3: 0x2000, 0x19d4: 0x2000, 0x19d5: 0x2000, 0x19d6: 0x2000, 0x19d7: 0x2000, + 0x19d8: 0x2000, 0x19d9: 0x2000, 0x19da: 0x2000, 0x19db: 0x2000, 0x19dc: 0x2000, 0x19dd: 0x2000, + 0x19de: 0x2000, 0x19df: 0x2000, 0x19e0: 0x2000, 0x19e1: 0x2000, 0x19e2: 0x2000, 0x19e3: 0x2000, + 0x19e4: 0x2000, 0x19e5: 0x2000, 0x19e6: 0x2000, 0x19e7: 0x2000, 0x19e8: 0x2000, 0x19e9: 0x2000, + 0x19ea: 0x2000, 0x19eb: 0x2000, 0x19ec: 0x2000, 0x19ed: 0x2000, 0x19ee: 0x2000, 0x19ef: 0x2000, + 0x19f0: 0x2000, 0x19f1: 0x2000, 0x19f2: 0x2000, 0x19f3: 0x2000, 0x19f4: 0x2000, 0x19f5: 0x2000, + 0x19f6: 0x2000, 0x19f7: 0x2000, 0x19f8: 0x2000, 0x19f9: 0x2000, 0x19fa: 0x2000, 0x19fb: 0x2000, + 0x19fc: 0x2000, 0x19fd: 0x2000, +} + +// widthIndex: 22 blocks, 1408 entries, 1408 bytes +// Block 0 is the zero block. +var widthIndex = [1408]uint8{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, + 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, + 0xd0: 0x0c, 0xd1: 0x0d, + 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, + 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, + 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, + // Block 0x4, offset 0x100 + 0x104: 0x0e, 0x105: 0x0f, + // Block 0x5, offset 0x140 + 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, + 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, + 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, + 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, + 0x166: 0x2a, + 0x16c: 0x2b, 0x16d: 0x2c, + 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, + // Block 0x6, offset 0x180 + 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, + 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x3a, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, + 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, + 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, + 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, + 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, + 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, + 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, + 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, + 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, + 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, + 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, + 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, + 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, + 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, + // Block 0x8, offset 0x200 + 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, + 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, + 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, + 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, + 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, + 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, + 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, + 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, + // Block 0x9, offset 0x240 + 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, + 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, + 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3b, 0x253: 0x3c, + 0x265: 0x3d, + 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, + 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, + // Block 0xa, offset 0x280 + 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, + 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, + 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, + 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3e, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, + 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, + 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, + 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, + 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, + 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, + 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, + 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, + // Block 0xc, offset 0x300 + 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, + 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, + 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, + 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, + 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, + 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, + 0x338: 0x3f, 0x339: 0x40, 0x33c: 0x41, 0x33d: 0x42, 0x33e: 0x43, 0x33f: 0x44, + // Block 0xd, offset 0x340 + 0x37f: 0x45, + // Block 0xe, offset 0x380 + 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, + 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, + 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, + 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x46, + 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, + 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x47, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x0e, 0x3c1: 0x0e, 0x3c2: 0x0e, 0x3c3: 0x0e, 0x3c4: 0x48, 0x3c5: 0x49, 0x3c6: 0x0e, 0x3c7: 0x0e, + 0x3c8: 0x0e, 0x3c9: 0x0e, 0x3ca: 0x0e, 0x3cb: 0x4a, + // Block 0x10, offset 0x400 + 0x400: 0x4b, 0x403: 0x4c, 0x404: 0x4d, 0x405: 0x4e, 0x406: 0x4f, + 0x408: 0x50, 0x409: 0x51, 0x40c: 0x52, 0x40d: 0x53, 0x40e: 0x54, 0x40f: 0x55, + 0x410: 0x3a, 0x411: 0x56, 0x412: 0x0e, 0x413: 0x57, 0x414: 0x58, 0x415: 0x59, 0x416: 0x5a, 0x417: 0x5b, + 0x418: 0x0e, 0x419: 0x5c, 0x41a: 0x0e, 0x41b: 0x5d, 0x41f: 0x5e, + 0x424: 0x5f, 0x425: 0x60, 0x426: 0x61, 0x427: 0x62, + 0x429: 0x63, 0x42a: 0x64, + // Block 0x11, offset 0x440 + 0x456: 0x0b, 0x457: 0x06, + 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, + 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, + 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, + 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, + 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, + // Block 0x12, offset 0x480 + 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, + // Block 0x13, offset 0x4c0 + 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, + 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, + 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, + 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, + 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, + 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, + 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, + 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x65, + // Block 0x14, offset 0x500 + 0x520: 0x10, + 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, + 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, + // Block 0x15, offset 0x540 + 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, + 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, +} + +// inverseData contains 4-byte entries of the following format: +// <0 padding> +// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the +// UTF-8 encoding of the original rune. Mappings often have the following +// pattern: +// A -> A (U+FF21 -> U+0041) +// B -> B (U+FF22 -> U+0042) +// ... +// By xor-ing the last byte the same entry can be shared by many mappings. This +// reduces the total number of distinct entries by about two thirds. +// The resulting entry for the aforementioned mappings is +// { 0x01, 0xE0, 0x00, 0x00 } +// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get +// E0 ^ A1 = 41. +// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get +// E0 ^ A2 = 42. +// Note that because of the xor-ing, the byte sequence stored in the entry is +// not valid UTF-8. +var inverseData = [150][4]byte{ + {0x00, 0x00, 0x00, 0x00}, + {0x03, 0xe3, 0x80, 0xa0}, + {0x03, 0xef, 0xbc, 0xa0}, + {0x03, 0xef, 0xbc, 0xe0}, + {0x03, 0xef, 0xbd, 0xe0}, + {0x03, 0xef, 0xbf, 0x02}, + {0x03, 0xef, 0xbf, 0x00}, + {0x03, 0xef, 0xbf, 0x0e}, + {0x03, 0xef, 0xbf, 0x0c}, + {0x03, 0xef, 0xbf, 0x0f}, + {0x03, 0xef, 0xbf, 0x39}, + {0x03, 0xef, 0xbf, 0x3b}, + {0x03, 0xef, 0xbf, 0x3f}, + {0x03, 0xef, 0xbf, 0x2a}, + {0x03, 0xef, 0xbf, 0x0d}, + {0x03, 0xef, 0xbf, 0x25}, + {0x03, 0xef, 0xbd, 0x1a}, + {0x03, 0xef, 0xbd, 0x26}, + {0x01, 0xa0, 0x00, 0x00}, + {0x03, 0xef, 0xbd, 0x25}, + {0x03, 0xef, 0xbd, 0x23}, + {0x03, 0xef, 0xbd, 0x2e}, + {0x03, 0xef, 0xbe, 0x07}, + {0x03, 0xef, 0xbe, 0x05}, + {0x03, 0xef, 0xbd, 0x06}, + {0x03, 0xef, 0xbd, 0x13}, + {0x03, 0xef, 0xbd, 0x0b}, + {0x03, 0xef, 0xbd, 0x16}, + {0x03, 0xef, 0xbd, 0x0c}, + {0x03, 0xef, 0xbd, 0x15}, + {0x03, 0xef, 0xbd, 0x0d}, + {0x03, 0xef, 0xbd, 0x1c}, + {0x03, 0xef, 0xbd, 0x02}, + {0x03, 0xef, 0xbd, 0x1f}, + {0x03, 0xef, 0xbd, 0x1d}, + {0x03, 0xef, 0xbd, 0x17}, + {0x03, 0xef, 0xbd, 0x08}, + {0x03, 0xef, 0xbd, 0x09}, + {0x03, 0xef, 0xbd, 0x0e}, + {0x03, 0xef, 0xbd, 0x04}, + {0x03, 0xef, 0xbd, 0x05}, + {0x03, 0xef, 0xbe, 0x3f}, + {0x03, 0xef, 0xbe, 0x00}, + {0x03, 0xef, 0xbd, 0x2c}, + {0x03, 0xef, 0xbe, 0x06}, + {0x03, 0xef, 0xbe, 0x0c}, + {0x03, 0xef, 0xbe, 0x0f}, + {0x03, 0xef, 0xbe, 0x0d}, + {0x03, 0xef, 0xbe, 0x0b}, + {0x03, 0xef, 0xbe, 0x19}, + {0x03, 0xef, 0xbe, 0x15}, + {0x03, 0xef, 0xbe, 0x11}, + {0x03, 0xef, 0xbe, 0x31}, + {0x03, 0xef, 0xbe, 0x33}, + {0x03, 0xef, 0xbd, 0x0f}, + {0x03, 0xef, 0xbe, 0x30}, + {0x03, 0xef, 0xbe, 0x3e}, + {0x03, 0xef, 0xbe, 0x32}, + {0x03, 0xef, 0xbe, 0x36}, + {0x03, 0xef, 0xbd, 0x14}, + {0x03, 0xef, 0xbe, 0x2e}, + {0x03, 0xef, 0xbd, 0x1e}, + {0x03, 0xef, 0xbe, 0x10}, + {0x03, 0xef, 0xbf, 0x13}, + {0x03, 0xef, 0xbf, 0x15}, + {0x03, 0xef, 0xbf, 0x17}, + {0x03, 0xef, 0xbf, 0x1f}, + {0x03, 0xef, 0xbf, 0x1d}, + {0x03, 0xef, 0xbf, 0x1b}, + {0x03, 0xef, 0xbf, 0x09}, + {0x03, 0xef, 0xbf, 0x0b}, + {0x03, 0xef, 0xbf, 0x37}, + {0x03, 0xef, 0xbe, 0x04}, + {0x01, 0xe0, 0x00, 0x00}, + {0x03, 0xe2, 0xa6, 0x1a}, + {0x03, 0xe2, 0xa6, 0x26}, + {0x03, 0xe3, 0x80, 0x23}, + {0x03, 0xe3, 0x80, 0x2e}, + {0x03, 0xe3, 0x80, 0x25}, + {0x03, 0xe3, 0x83, 0x1e}, + {0x03, 0xe3, 0x83, 0x14}, + {0x03, 0xe3, 0x82, 0x06}, + {0x03, 0xe3, 0x82, 0x0b}, + {0x03, 0xe3, 0x82, 0x0c}, + {0x03, 0xe3, 0x82, 0x0d}, + {0x03, 0xe3, 0x82, 0x02}, + {0x03, 0xe3, 0x83, 0x0f}, + {0x03, 0xe3, 0x83, 0x08}, + {0x03, 0xe3, 0x83, 0x09}, + {0x03, 0xe3, 0x83, 0x2c}, + {0x03, 0xe3, 0x83, 0x0c}, + {0x03, 0xe3, 0x82, 0x13}, + {0x03, 0xe3, 0x82, 0x16}, + {0x03, 0xe3, 0x82, 0x15}, + {0x03, 0xe3, 0x82, 0x1c}, + {0x03, 0xe3, 0x82, 0x1f}, + {0x03, 0xe3, 0x82, 0x1d}, + {0x03, 0xe3, 0x82, 0x1a}, + {0x03, 0xe3, 0x82, 0x17}, + {0x03, 0xe3, 0x82, 0x08}, + {0x03, 0xe3, 0x82, 0x09}, + {0x03, 0xe3, 0x82, 0x0e}, + {0x03, 0xe3, 0x82, 0x04}, + {0x03, 0xe3, 0x82, 0x05}, + {0x03, 0xe3, 0x82, 0x3f}, + {0x03, 0xe3, 0x83, 0x00}, + {0x03, 0xe3, 0x83, 0x06}, + {0x03, 0xe3, 0x83, 0x05}, + {0x03, 0xe3, 0x83, 0x0d}, + {0x03, 0xe3, 0x83, 0x0b}, + {0x03, 0xe3, 0x83, 0x07}, + {0x03, 0xe3, 0x83, 0x19}, + {0x03, 0xe3, 0x83, 0x15}, + {0x03, 0xe3, 0x83, 0x11}, + {0x03, 0xe3, 0x83, 0x31}, + {0x03, 0xe3, 0x83, 0x33}, + {0x03, 0xe3, 0x83, 0x30}, + {0x03, 0xe3, 0x83, 0x3e}, + {0x03, 0xe3, 0x83, 0x32}, + {0x03, 0xe3, 0x83, 0x36}, + {0x03, 0xe3, 0x83, 0x2e}, + {0x03, 0xe3, 0x82, 0x07}, + {0x03, 0xe3, 0x85, 0x04}, + {0x03, 0xe3, 0x84, 0x10}, + {0x03, 0xe3, 0x85, 0x30}, + {0x03, 0xe3, 0x85, 0x0d}, + {0x03, 0xe3, 0x85, 0x13}, + {0x03, 0xe3, 0x85, 0x15}, + {0x03, 0xe3, 0x85, 0x17}, + {0x03, 0xe3, 0x85, 0x1f}, + {0x03, 0xe3, 0x85, 0x1d}, + {0x03, 0xe3, 0x85, 0x1b}, + {0x03, 0xe3, 0x85, 0x09}, + {0x03, 0xe3, 0x85, 0x0f}, + {0x03, 0xe3, 0x85, 0x0b}, + {0x03, 0xe3, 0x85, 0x37}, + {0x03, 0xe3, 0x85, 0x3b}, + {0x03, 0xe3, 0x85, 0x39}, + {0x03, 0xe3, 0x85, 0x3f}, + {0x02, 0xc2, 0x02, 0x00}, + {0x02, 0xc2, 0x0e, 0x00}, + {0x02, 0xc2, 0x0c, 0x00}, + {0x02, 0xc2, 0x00, 0x00}, + {0x03, 0xe2, 0x82, 0x0f}, + {0x03, 0xe2, 0x94, 0x2a}, + {0x03, 0xe2, 0x86, 0x39}, + {0x03, 0xe2, 0x86, 0x3b}, + {0x03, 0xe2, 0x86, 0x3f}, + {0x03, 0xe2, 0x96, 0x0d}, + {0x03, 0xe2, 0x97, 0x25}, +} + +// Total table size 15320 bytes (14KiB) diff --git a/vendor/golang.org/x/text/width/tables13.0.0.go b/vendor/golang.org/x/text/width/tables13.0.0.go new file mode 100644 index 000000000..bac3f1aee --- /dev/null +++ b/vendor/golang.org/x/text/width/tables13.0.0.go @@ -0,0 +1,1352 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build go1.16 +// +build go1.16 + +package width + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "13.0.0" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// widthTrie. Total size: 14848 bytes (14.50 KiB). Checksum: 17e24343536472f6. +type widthTrie struct{} + +func newWidthTrie(i int) *widthTrie { + return &widthTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { + switch { + default: + return uint16(widthValues[n<<6+uint32(b)]) + } +} + +// widthValues: 105 blocks, 6720 entries, 13440 bytes +// The third block is the zero block. +var widthValues = [6720]uint16{ + // Block 0x0, offset 0x0 + 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, + 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, + 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, + 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, + 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, + 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, + // Block 0x1, offset 0x40 + 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, + 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, + 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, + 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, + 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, + 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, + 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, + 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, + 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, + 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, + 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, + 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, + 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, + 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, + 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, + 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, + // Block 0x4, offset 0x100 + 0x106: 0x2000, + 0x110: 0x2000, + 0x117: 0x2000, + 0x118: 0x2000, + 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, + 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, + 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, + 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, + 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, + 0x13c: 0x2000, 0x13e: 0x2000, + // Block 0x5, offset 0x140 + 0x141: 0x2000, + 0x151: 0x2000, + 0x153: 0x2000, + 0x15b: 0x2000, + 0x166: 0x2000, 0x167: 0x2000, + 0x16b: 0x2000, + 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, + 0x178: 0x2000, + 0x17f: 0x2000, + // Block 0x6, offset 0x180 + 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, + 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, + 0x18d: 0x2000, + 0x192: 0x2000, 0x193: 0x2000, + 0x1a6: 0x2000, 0x1a7: 0x2000, + 0x1ab: 0x2000, + // Block 0x7, offset 0x1c0 + 0x1ce: 0x2000, 0x1d0: 0x2000, + 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, + 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, + // Block 0x8, offset 0x200 + 0x211: 0x2000, + 0x221: 0x2000, + // Block 0x9, offset 0x240 + 0x244: 0x2000, + 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, + 0x24d: 0x2000, 0x250: 0x2000, + 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, + 0x25f: 0x2000, + // Block 0xa, offset 0x280 + 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, + 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, + 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, + 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, + 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, + 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, + 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, + 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, + 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, + 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, + 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, + 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, + 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, + 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, + 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, + 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, + 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, + 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, + // Block 0xc, offset 0x300 + 0x311: 0x2000, + 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, + 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, + 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, + 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, + 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, + 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, + 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, + // Block 0xd, offset 0x340 + 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, + 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, + // Block 0xe, offset 0x380 + 0x381: 0x2000, + 0x390: 0x2000, 0x391: 0x2000, + 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, + 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, + 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, + 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, + 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, + 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, + 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, + 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, + 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, + 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, + // Block 0x10, offset 0x400 + 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, + 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, + 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, + 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, + 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, + 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, + 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, + 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, + 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, + 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, + 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, + // Block 0x11, offset 0x440 + 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, + 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, + 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, + 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, + 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, + 0x45e: 0x4000, 0x45f: 0x4000, + // Block 0x12, offset 0x480 + 0x490: 0x2000, + 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, + 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, + 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, + 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, + 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, + 0x4bb: 0x2000, + 0x4be: 0x2000, + // Block 0x13, offset 0x4c0 + 0x4f4: 0x2000, + 0x4ff: 0x2000, + // Block 0x14, offset 0x500 + 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, + 0x529: 0xa009, + 0x52c: 0x2000, + // Block 0x15, offset 0x540 + 0x543: 0x2000, 0x545: 0x2000, + 0x549: 0x2000, + 0x553: 0x2000, 0x556: 0x2000, + 0x561: 0x2000, 0x562: 0x2000, + 0x566: 0x2000, + 0x56b: 0x2000, + // Block 0x16, offset 0x580 + 0x593: 0x2000, 0x594: 0x2000, + 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, + 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, + 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, + 0x5aa: 0x2000, 0x5ab: 0x2000, + 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, + 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, + // Block 0x17, offset 0x5c0 + 0x5c9: 0x2000, + 0x5d0: 0x200a, 0x5d1: 0x200b, + 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, + 0x5d8: 0x2000, 0x5d9: 0x2000, + 0x5f8: 0x2000, 0x5f9: 0x2000, + // Block 0x18, offset 0x600 + 0x612: 0x2000, 0x614: 0x2000, + 0x627: 0x2000, + // Block 0x19, offset 0x640 + 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, + 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, + 0x64f: 0x2000, 0x651: 0x2000, + 0x655: 0x2000, + 0x65a: 0x2000, 0x65d: 0x2000, + 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, + 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, + 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, + 0x674: 0x2000, 0x675: 0x2000, + 0x676: 0x2000, 0x677: 0x2000, + 0x67c: 0x2000, 0x67d: 0x2000, + // Block 0x1a, offset 0x680 + 0x688: 0x2000, + 0x68c: 0x2000, + 0x692: 0x2000, + 0x6a0: 0x2000, 0x6a1: 0x2000, + 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, + 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, + // Block 0x1b, offset 0x6c0 + 0x6c2: 0x2000, 0x6c3: 0x2000, + 0x6c6: 0x2000, 0x6c7: 0x2000, + 0x6d5: 0x2000, + 0x6d9: 0x2000, + 0x6e5: 0x2000, + 0x6ff: 0x2000, + // Block 0x1c, offset 0x700 + 0x712: 0x2000, + 0x71a: 0x4000, 0x71b: 0x4000, + 0x729: 0x4000, + 0x72a: 0x4000, + // Block 0x1d, offset 0x740 + 0x769: 0x4000, + 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, + 0x770: 0x4000, 0x773: 0x4000, + // Block 0x1e, offset 0x780 + 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, + 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, + 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, + 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, + 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, + 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, + // Block 0x1f, offset 0x7c0 + 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, + 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, + 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, + 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, + 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, + 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, + 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, + 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, + 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, + 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, + 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, + // Block 0x20, offset 0x800 + 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, + 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, + 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, + 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, + 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, + 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, + 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, + 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, + 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, + 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, + 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, + // Block 0x21, offset 0x840 + 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, + 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, + 0x850: 0x2000, 0x851: 0x2000, + 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, + 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, + 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, + 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, + 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, + 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, + // Block 0x22, offset 0x880 + 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, + 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, + 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, + 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, + 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, + 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, + 0x8b2: 0x2000, 0x8b3: 0x2000, + 0x8b6: 0x2000, 0x8b7: 0x2000, + 0x8bc: 0x2000, 0x8bd: 0x2000, + // Block 0x23, offset 0x8c0 + 0x8c0: 0x2000, 0x8c1: 0x2000, + 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, + 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, + 0x8e2: 0x2000, 0x8e3: 0x2000, + 0x8e4: 0x2000, 0x8e5: 0x2000, + 0x8ef: 0x2000, + 0x8fd: 0x4000, 0x8fe: 0x4000, + // Block 0x24, offset 0x900 + 0x905: 0x2000, + 0x906: 0x2000, 0x909: 0x2000, + 0x90e: 0x2000, 0x90f: 0x2000, + 0x914: 0x4000, 0x915: 0x4000, + 0x91c: 0x2000, + 0x91e: 0x2000, + // Block 0x25, offset 0x940 + 0x940: 0x2000, 0x942: 0x2000, + 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, + 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, + 0x952: 0x4000, 0x953: 0x4000, + 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, + 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, + 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, + 0x97f: 0x4000, + // Block 0x26, offset 0x980 + 0x993: 0x4000, + 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, + 0x9aa: 0x4000, 0x9ab: 0x4000, + 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, + // Block 0x27, offset 0x9c0 + 0x9c4: 0x4000, 0x9c5: 0x4000, + 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, + 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, + 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, + 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, + 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, + 0x9e8: 0x2000, 0x9e9: 0x2000, + 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, + 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, + 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, + 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, + // Block 0x28, offset 0xa00 + 0xa05: 0x4000, + 0xa0a: 0x4000, 0xa0b: 0x4000, + 0xa28: 0x4000, + 0xa3d: 0x2000, + // Block 0x29, offset 0xa40 + 0xa4c: 0x4000, 0xa4e: 0x4000, + 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, + 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, + 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, + // Block 0x2a, offset 0xa80 + 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, + 0xab0: 0x4000, + 0xabf: 0x4000, + // Block 0x2b, offset 0xac0 + 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, + 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, + // Block 0x2c, offset 0xb00 + 0xb05: 0x6010, + 0xb06: 0x6011, + // Block 0x2d, offset 0xb40 + 0xb5b: 0x4000, 0xb5c: 0x4000, + // Block 0x2e, offset 0xb80 + 0xb90: 0x4000, + 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, + 0xb98: 0x2000, 0xb99: 0x2000, + // Block 0x2f, offset 0xbc0 + 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, + 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, + 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, + 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, + 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, + 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, + 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, + 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, + 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, + 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, + 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, + // Block 0x30, offset 0xc00 + 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, + 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, + 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, + 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, + 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, + 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, + 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, + 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, + 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, + // Block 0x31, offset 0xc40 + 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, + 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, + 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, + 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, + 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, + 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, + // Block 0x32, offset 0xc80 + 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, + 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, + 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, + 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, + 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, + 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, + 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, + 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, + 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, + 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, + 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, + // Block 0x33, offset 0xcc0 + 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, + 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, + 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, + 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, + 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, + 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, + 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, + 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, + 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, + 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, + 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, + // Block 0x34, offset 0xd00 + 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, + 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, + 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, + 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, + 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, + 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, + 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, + 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, + 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, + 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, + 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, + // Block 0x35, offset 0xd40 + 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, + 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, + 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, + 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, + 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, + 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, + 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, + 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, + 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, + 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, + 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, + // Block 0x36, offset 0xd80 + 0xd85: 0x4000, + 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, + 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, + 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, + 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, + 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, + 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, + 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, 0xdae: 0x4000, 0xdaf: 0x4000, + 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, + 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, + 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, + // Block 0x37, offset 0xdc0 + 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, + 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, + 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, + 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, + 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, + 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, + 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, + 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, + 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, + 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, + 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, + // Block 0x38, offset 0xe00 + 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, + 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, + 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, + 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, + 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, + 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, + 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, + 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, + 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, + 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, 0xe3b: 0x4000, + 0xe3c: 0x4000, 0xe3d: 0x4000, 0xe3e: 0x4000, 0xe3f: 0x4000, + // Block 0x39, offset 0xe40 + 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, + 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, + 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, + 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, + 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, + 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, + 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, + 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, + 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, + // Block 0x3a, offset 0xe80 + 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, + 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, + 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, + 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, + 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, + 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, + 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, + 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, + 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, + 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, + 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, + // Block 0x3b, offset 0xec0 + 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, + 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, + 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, + 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, + 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, + 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, + 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, + 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, + 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, + 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, + 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, + // Block 0x3c, offset 0xf00 + 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, + 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, + 0xf0c: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, + 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, + 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, + 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, + 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, + 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, + 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, + 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, + 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, 0xf3f: 0x4000, + // Block 0x3d, offset 0xf40 + 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, + 0xf46: 0x4000, + // Block 0x3e, offset 0xf80 + 0xfa0: 0x4000, 0xfa1: 0x4000, 0xfa2: 0x4000, 0xfa3: 0x4000, + 0xfa4: 0x4000, 0xfa5: 0x4000, 0xfa6: 0x4000, 0xfa7: 0x4000, 0xfa8: 0x4000, 0xfa9: 0x4000, + 0xfaa: 0x4000, 0xfab: 0x4000, 0xfac: 0x4000, 0xfad: 0x4000, 0xfae: 0x4000, 0xfaf: 0x4000, + 0xfb0: 0x4000, 0xfb1: 0x4000, 0xfb2: 0x4000, 0xfb3: 0x4000, 0xfb4: 0x4000, 0xfb5: 0x4000, + 0xfb6: 0x4000, 0xfb7: 0x4000, 0xfb8: 0x4000, 0xfb9: 0x4000, 0xfba: 0x4000, 0xfbb: 0x4000, + 0xfbc: 0x4000, + // Block 0x3f, offset 0xfc0 + 0xfc0: 0x4000, 0xfc1: 0x4000, 0xfc2: 0x4000, 0xfc3: 0x4000, 0xfc4: 0x4000, 0xfc5: 0x4000, + 0xfc6: 0x4000, 0xfc7: 0x4000, 0xfc8: 0x4000, 0xfc9: 0x4000, 0xfca: 0x4000, 0xfcb: 0x4000, + 0xfcc: 0x4000, 0xfcd: 0x4000, 0xfce: 0x4000, 0xfcf: 0x4000, 0xfd0: 0x4000, 0xfd1: 0x4000, + 0xfd2: 0x4000, 0xfd3: 0x4000, 0xfd4: 0x4000, 0xfd5: 0x4000, 0xfd6: 0x4000, 0xfd7: 0x4000, + 0xfd8: 0x4000, 0xfd9: 0x4000, 0xfda: 0x4000, 0xfdb: 0x4000, 0xfdc: 0x4000, 0xfdd: 0x4000, + 0xfde: 0x4000, 0xfdf: 0x4000, 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, + // Block 0x40, offset 0x1000 + 0x1000: 0x2000, 0x1001: 0x2000, 0x1002: 0x2000, 0x1003: 0x2000, 0x1004: 0x2000, 0x1005: 0x2000, + 0x1006: 0x2000, 0x1007: 0x2000, 0x1008: 0x2000, 0x1009: 0x2000, 0x100a: 0x2000, 0x100b: 0x2000, + 0x100c: 0x2000, 0x100d: 0x2000, 0x100e: 0x2000, 0x100f: 0x2000, 0x1010: 0x4000, 0x1011: 0x4000, + 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, + 0x1018: 0x4000, 0x1019: 0x4000, + 0x1030: 0x4000, 0x1031: 0x4000, 0x1032: 0x4000, 0x1033: 0x4000, 0x1034: 0x4000, 0x1035: 0x4000, + 0x1036: 0x4000, 0x1037: 0x4000, 0x1038: 0x4000, 0x1039: 0x4000, 0x103a: 0x4000, 0x103b: 0x4000, + 0x103c: 0x4000, 0x103d: 0x4000, 0x103e: 0x4000, 0x103f: 0x4000, + // Block 0x41, offset 0x1040 + 0x1040: 0x4000, 0x1041: 0x4000, 0x1042: 0x4000, 0x1043: 0x4000, 0x1044: 0x4000, 0x1045: 0x4000, + 0x1046: 0x4000, 0x1047: 0x4000, 0x1048: 0x4000, 0x1049: 0x4000, 0x104a: 0x4000, 0x104b: 0x4000, + 0x104c: 0x4000, 0x104d: 0x4000, 0x104e: 0x4000, 0x104f: 0x4000, 0x1050: 0x4000, 0x1051: 0x4000, + 0x1052: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, + 0x1058: 0x4000, 0x1059: 0x4000, 0x105a: 0x4000, 0x105b: 0x4000, 0x105c: 0x4000, 0x105d: 0x4000, + 0x105e: 0x4000, 0x105f: 0x4000, 0x1060: 0x4000, 0x1061: 0x4000, 0x1062: 0x4000, 0x1063: 0x4000, + 0x1064: 0x4000, 0x1065: 0x4000, 0x1066: 0x4000, 0x1068: 0x4000, 0x1069: 0x4000, + 0x106a: 0x4000, 0x106b: 0x4000, + // Block 0x42, offset 0x1080 + 0x1081: 0x9012, 0x1082: 0x9012, 0x1083: 0x9012, 0x1084: 0x9012, 0x1085: 0x9012, + 0x1086: 0x9012, 0x1087: 0x9012, 0x1088: 0x9012, 0x1089: 0x9012, 0x108a: 0x9012, 0x108b: 0x9012, + 0x108c: 0x9012, 0x108d: 0x9012, 0x108e: 0x9012, 0x108f: 0x9012, 0x1090: 0x9012, 0x1091: 0x9012, + 0x1092: 0x9012, 0x1093: 0x9012, 0x1094: 0x9012, 0x1095: 0x9012, 0x1096: 0x9012, 0x1097: 0x9012, + 0x1098: 0x9012, 0x1099: 0x9012, 0x109a: 0x9012, 0x109b: 0x9012, 0x109c: 0x9012, 0x109d: 0x9012, + 0x109e: 0x9012, 0x109f: 0x9012, 0x10a0: 0x9049, 0x10a1: 0x9049, 0x10a2: 0x9049, 0x10a3: 0x9049, + 0x10a4: 0x9049, 0x10a5: 0x9049, 0x10a6: 0x9049, 0x10a7: 0x9049, 0x10a8: 0x9049, 0x10a9: 0x9049, + 0x10aa: 0x9049, 0x10ab: 0x9049, 0x10ac: 0x9049, 0x10ad: 0x9049, 0x10ae: 0x9049, 0x10af: 0x9049, + 0x10b0: 0x9049, 0x10b1: 0x9049, 0x10b2: 0x9049, 0x10b3: 0x9049, 0x10b4: 0x9049, 0x10b5: 0x9049, + 0x10b6: 0x9049, 0x10b7: 0x9049, 0x10b8: 0x9049, 0x10b9: 0x9049, 0x10ba: 0x9049, 0x10bb: 0x9049, + 0x10bc: 0x9049, 0x10bd: 0x9049, 0x10be: 0x9049, 0x10bf: 0x9049, + // Block 0x43, offset 0x10c0 + 0x10c0: 0x9049, 0x10c1: 0x9049, 0x10c2: 0x9049, 0x10c3: 0x9049, 0x10c4: 0x9049, 0x10c5: 0x9049, + 0x10c6: 0x9049, 0x10c7: 0x9049, 0x10c8: 0x9049, 0x10c9: 0x9049, 0x10ca: 0x9049, 0x10cb: 0x9049, + 0x10cc: 0x9049, 0x10cd: 0x9049, 0x10ce: 0x9049, 0x10cf: 0x9049, 0x10d0: 0x9049, 0x10d1: 0x9049, + 0x10d2: 0x9049, 0x10d3: 0x9049, 0x10d4: 0x9049, 0x10d5: 0x9049, 0x10d6: 0x9049, 0x10d7: 0x9049, + 0x10d8: 0x9049, 0x10d9: 0x9049, 0x10da: 0x9049, 0x10db: 0x9049, 0x10dc: 0x9049, 0x10dd: 0x9049, + 0x10de: 0x9049, 0x10df: 0x904a, 0x10e0: 0x904b, 0x10e1: 0xb04c, 0x10e2: 0xb04d, 0x10e3: 0xb04d, + 0x10e4: 0xb04e, 0x10e5: 0xb04f, 0x10e6: 0xb050, 0x10e7: 0xb051, 0x10e8: 0xb052, 0x10e9: 0xb053, + 0x10ea: 0xb054, 0x10eb: 0xb055, 0x10ec: 0xb056, 0x10ed: 0xb057, 0x10ee: 0xb058, 0x10ef: 0xb059, + 0x10f0: 0xb05a, 0x10f1: 0xb05b, 0x10f2: 0xb05c, 0x10f3: 0xb05d, 0x10f4: 0xb05e, 0x10f5: 0xb05f, + 0x10f6: 0xb060, 0x10f7: 0xb061, 0x10f8: 0xb062, 0x10f9: 0xb063, 0x10fa: 0xb064, 0x10fb: 0xb065, + 0x10fc: 0xb052, 0x10fd: 0xb066, 0x10fe: 0xb067, 0x10ff: 0xb055, + // Block 0x44, offset 0x1100 + 0x1100: 0xb068, 0x1101: 0xb069, 0x1102: 0xb06a, 0x1103: 0xb06b, 0x1104: 0xb05a, 0x1105: 0xb056, + 0x1106: 0xb06c, 0x1107: 0xb06d, 0x1108: 0xb06b, 0x1109: 0xb06e, 0x110a: 0xb06b, 0x110b: 0xb06f, + 0x110c: 0xb06f, 0x110d: 0xb070, 0x110e: 0xb070, 0x110f: 0xb071, 0x1110: 0xb056, 0x1111: 0xb072, + 0x1112: 0xb073, 0x1113: 0xb072, 0x1114: 0xb074, 0x1115: 0xb073, 0x1116: 0xb075, 0x1117: 0xb075, + 0x1118: 0xb076, 0x1119: 0xb076, 0x111a: 0xb077, 0x111b: 0xb077, 0x111c: 0xb073, 0x111d: 0xb078, + 0x111e: 0xb079, 0x111f: 0xb067, 0x1120: 0xb07a, 0x1121: 0xb07b, 0x1122: 0xb07b, 0x1123: 0xb07b, + 0x1124: 0xb07b, 0x1125: 0xb07b, 0x1126: 0xb07b, 0x1127: 0xb07b, 0x1128: 0xb07b, 0x1129: 0xb07b, + 0x112a: 0xb07b, 0x112b: 0xb07b, 0x112c: 0xb07b, 0x112d: 0xb07b, 0x112e: 0xb07b, 0x112f: 0xb07b, + 0x1130: 0xb07c, 0x1131: 0xb07c, 0x1132: 0xb07c, 0x1133: 0xb07c, 0x1134: 0xb07c, 0x1135: 0xb07c, + 0x1136: 0xb07c, 0x1137: 0xb07c, 0x1138: 0xb07c, 0x1139: 0xb07c, 0x113a: 0xb07c, 0x113b: 0xb07c, + 0x113c: 0xb07c, 0x113d: 0xb07c, 0x113e: 0xb07c, + // Block 0x45, offset 0x1140 + 0x1142: 0xb07d, 0x1143: 0xb07e, 0x1144: 0xb07f, 0x1145: 0xb080, + 0x1146: 0xb07f, 0x1147: 0xb07e, 0x114a: 0xb081, 0x114b: 0xb082, + 0x114c: 0xb083, 0x114d: 0xb07f, 0x114e: 0xb080, 0x114f: 0xb07f, + 0x1152: 0xb084, 0x1153: 0xb085, 0x1154: 0xb084, 0x1155: 0xb086, 0x1156: 0xb084, 0x1157: 0xb087, + 0x115a: 0xb088, 0x115b: 0xb089, 0x115c: 0xb08a, + 0x1160: 0x908b, 0x1161: 0x908b, 0x1162: 0x908c, 0x1163: 0x908d, + 0x1164: 0x908b, 0x1165: 0x908e, 0x1166: 0x908f, 0x1168: 0xb090, 0x1169: 0xb091, + 0x116a: 0xb092, 0x116b: 0xb091, 0x116c: 0xb093, 0x116d: 0xb094, 0x116e: 0xb095, + 0x117d: 0x2000, + // Block 0x46, offset 0x1180 + 0x11a0: 0x4000, 0x11a1: 0x4000, 0x11a2: 0x4000, 0x11a3: 0x4000, + 0x11a4: 0x4000, + 0x11b0: 0x4000, 0x11b1: 0x4000, + // Block 0x47, offset 0x11c0 + 0x11c0: 0x4000, 0x11c1: 0x4000, 0x11c2: 0x4000, 0x11c3: 0x4000, 0x11c4: 0x4000, 0x11c5: 0x4000, + 0x11c6: 0x4000, 0x11c7: 0x4000, 0x11c8: 0x4000, 0x11c9: 0x4000, 0x11ca: 0x4000, 0x11cb: 0x4000, + 0x11cc: 0x4000, 0x11cd: 0x4000, 0x11ce: 0x4000, 0x11cf: 0x4000, 0x11d0: 0x4000, 0x11d1: 0x4000, + 0x11d2: 0x4000, 0x11d3: 0x4000, 0x11d4: 0x4000, 0x11d5: 0x4000, 0x11d6: 0x4000, 0x11d7: 0x4000, + 0x11d8: 0x4000, 0x11d9: 0x4000, 0x11da: 0x4000, 0x11db: 0x4000, 0x11dc: 0x4000, 0x11dd: 0x4000, + 0x11de: 0x4000, 0x11df: 0x4000, 0x11e0: 0x4000, 0x11e1: 0x4000, 0x11e2: 0x4000, 0x11e3: 0x4000, + 0x11e4: 0x4000, 0x11e5: 0x4000, 0x11e6: 0x4000, 0x11e7: 0x4000, 0x11e8: 0x4000, 0x11e9: 0x4000, + 0x11ea: 0x4000, 0x11eb: 0x4000, 0x11ec: 0x4000, 0x11ed: 0x4000, 0x11ee: 0x4000, 0x11ef: 0x4000, + 0x11f0: 0x4000, 0x11f1: 0x4000, 0x11f2: 0x4000, 0x11f3: 0x4000, 0x11f4: 0x4000, 0x11f5: 0x4000, + 0x11f6: 0x4000, 0x11f7: 0x4000, + // Block 0x48, offset 0x1200 + 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, + 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, + 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, + 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, + // Block 0x49, offset 0x1240 + 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, + 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, + // Block 0x4a, offset 0x1280 + 0x1280: 0x4000, 0x1281: 0x4000, 0x1282: 0x4000, 0x1283: 0x4000, 0x1284: 0x4000, 0x1285: 0x4000, + 0x1286: 0x4000, 0x1287: 0x4000, 0x1288: 0x4000, 0x1289: 0x4000, 0x128a: 0x4000, 0x128b: 0x4000, + 0x128c: 0x4000, 0x128d: 0x4000, 0x128e: 0x4000, 0x128f: 0x4000, 0x1290: 0x4000, 0x1291: 0x4000, + 0x1292: 0x4000, 0x1293: 0x4000, 0x1294: 0x4000, 0x1295: 0x4000, 0x1296: 0x4000, 0x1297: 0x4000, + 0x1298: 0x4000, 0x1299: 0x4000, 0x129a: 0x4000, 0x129b: 0x4000, 0x129c: 0x4000, 0x129d: 0x4000, + 0x129e: 0x4000, + // Block 0x4b, offset 0x12c0 + 0x12d0: 0x4000, 0x12d1: 0x4000, + 0x12d2: 0x4000, + 0x12e4: 0x4000, 0x12e5: 0x4000, 0x12e6: 0x4000, 0x12e7: 0x4000, + 0x12f0: 0x4000, 0x12f1: 0x4000, 0x12f2: 0x4000, 0x12f3: 0x4000, 0x12f4: 0x4000, 0x12f5: 0x4000, + 0x12f6: 0x4000, 0x12f7: 0x4000, 0x12f8: 0x4000, 0x12f9: 0x4000, 0x12fa: 0x4000, 0x12fb: 0x4000, + 0x12fc: 0x4000, 0x12fd: 0x4000, 0x12fe: 0x4000, 0x12ff: 0x4000, + // Block 0x4c, offset 0x1300 + 0x1300: 0x4000, 0x1301: 0x4000, 0x1302: 0x4000, 0x1303: 0x4000, 0x1304: 0x4000, 0x1305: 0x4000, + 0x1306: 0x4000, 0x1307: 0x4000, 0x1308: 0x4000, 0x1309: 0x4000, 0x130a: 0x4000, 0x130b: 0x4000, + 0x130c: 0x4000, 0x130d: 0x4000, 0x130e: 0x4000, 0x130f: 0x4000, 0x1310: 0x4000, 0x1311: 0x4000, + 0x1312: 0x4000, 0x1313: 0x4000, 0x1314: 0x4000, 0x1315: 0x4000, 0x1316: 0x4000, 0x1317: 0x4000, + 0x1318: 0x4000, 0x1319: 0x4000, 0x131a: 0x4000, 0x131b: 0x4000, 0x131c: 0x4000, 0x131d: 0x4000, + 0x131e: 0x4000, 0x131f: 0x4000, 0x1320: 0x4000, 0x1321: 0x4000, 0x1322: 0x4000, 0x1323: 0x4000, + 0x1324: 0x4000, 0x1325: 0x4000, 0x1326: 0x4000, 0x1327: 0x4000, 0x1328: 0x4000, 0x1329: 0x4000, + 0x132a: 0x4000, 0x132b: 0x4000, 0x132c: 0x4000, 0x132d: 0x4000, 0x132e: 0x4000, 0x132f: 0x4000, + 0x1330: 0x4000, 0x1331: 0x4000, 0x1332: 0x4000, 0x1333: 0x4000, 0x1334: 0x4000, 0x1335: 0x4000, + 0x1336: 0x4000, 0x1337: 0x4000, 0x1338: 0x4000, 0x1339: 0x4000, 0x133a: 0x4000, 0x133b: 0x4000, + // Block 0x4d, offset 0x1340 + 0x1344: 0x4000, + // Block 0x4e, offset 0x1380 + 0x138f: 0x4000, + // Block 0x4f, offset 0x13c0 + 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, + 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, + 0x13d0: 0x2000, 0x13d1: 0x2000, + 0x13d2: 0x2000, 0x13d3: 0x2000, 0x13d4: 0x2000, 0x13d5: 0x2000, 0x13d6: 0x2000, 0x13d7: 0x2000, + 0x13d8: 0x2000, 0x13d9: 0x2000, 0x13da: 0x2000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, + 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, + 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, + 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, 0x13ed: 0x2000, + 0x13f0: 0x2000, 0x13f1: 0x2000, 0x13f2: 0x2000, 0x13f3: 0x2000, 0x13f4: 0x2000, 0x13f5: 0x2000, + 0x13f6: 0x2000, 0x13f7: 0x2000, 0x13f8: 0x2000, 0x13f9: 0x2000, 0x13fa: 0x2000, 0x13fb: 0x2000, + 0x13fc: 0x2000, 0x13fd: 0x2000, 0x13fe: 0x2000, 0x13ff: 0x2000, + // Block 0x50, offset 0x1400 + 0x1400: 0x2000, 0x1401: 0x2000, 0x1402: 0x2000, 0x1403: 0x2000, 0x1404: 0x2000, 0x1405: 0x2000, + 0x1406: 0x2000, 0x1407: 0x2000, 0x1408: 0x2000, 0x1409: 0x2000, 0x140a: 0x2000, 0x140b: 0x2000, + 0x140c: 0x2000, 0x140d: 0x2000, 0x140e: 0x2000, 0x140f: 0x2000, 0x1410: 0x2000, 0x1411: 0x2000, + 0x1412: 0x2000, 0x1413: 0x2000, 0x1414: 0x2000, 0x1415: 0x2000, 0x1416: 0x2000, 0x1417: 0x2000, + 0x1418: 0x2000, 0x1419: 0x2000, 0x141a: 0x2000, 0x141b: 0x2000, 0x141c: 0x2000, 0x141d: 0x2000, + 0x141e: 0x2000, 0x141f: 0x2000, 0x1420: 0x2000, 0x1421: 0x2000, 0x1422: 0x2000, 0x1423: 0x2000, + 0x1424: 0x2000, 0x1425: 0x2000, 0x1426: 0x2000, 0x1427: 0x2000, 0x1428: 0x2000, 0x1429: 0x2000, + 0x1430: 0x2000, 0x1431: 0x2000, 0x1432: 0x2000, 0x1433: 0x2000, 0x1434: 0x2000, 0x1435: 0x2000, + 0x1436: 0x2000, 0x1437: 0x2000, 0x1438: 0x2000, 0x1439: 0x2000, 0x143a: 0x2000, 0x143b: 0x2000, + 0x143c: 0x2000, 0x143d: 0x2000, 0x143e: 0x2000, 0x143f: 0x2000, + // Block 0x51, offset 0x1440 + 0x1440: 0x2000, 0x1441: 0x2000, 0x1442: 0x2000, 0x1443: 0x2000, 0x1444: 0x2000, 0x1445: 0x2000, + 0x1446: 0x2000, 0x1447: 0x2000, 0x1448: 0x2000, 0x1449: 0x2000, 0x144a: 0x2000, 0x144b: 0x2000, + 0x144c: 0x2000, 0x144d: 0x2000, 0x144e: 0x4000, 0x144f: 0x2000, 0x1450: 0x2000, 0x1451: 0x4000, + 0x1452: 0x4000, 0x1453: 0x4000, 0x1454: 0x4000, 0x1455: 0x4000, 0x1456: 0x4000, 0x1457: 0x4000, + 0x1458: 0x4000, 0x1459: 0x4000, 0x145a: 0x4000, 0x145b: 0x2000, 0x145c: 0x2000, 0x145d: 0x2000, + 0x145e: 0x2000, 0x145f: 0x2000, 0x1460: 0x2000, 0x1461: 0x2000, 0x1462: 0x2000, 0x1463: 0x2000, + 0x1464: 0x2000, 0x1465: 0x2000, 0x1466: 0x2000, 0x1467: 0x2000, 0x1468: 0x2000, 0x1469: 0x2000, + 0x146a: 0x2000, 0x146b: 0x2000, 0x146c: 0x2000, + // Block 0x52, offset 0x1480 + 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, + 0x1490: 0x4000, 0x1491: 0x4000, + 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, + 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, + 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, 0x14a1: 0x4000, 0x14a2: 0x4000, 0x14a3: 0x4000, + 0x14a4: 0x4000, 0x14a5: 0x4000, 0x14a6: 0x4000, 0x14a7: 0x4000, 0x14a8: 0x4000, 0x14a9: 0x4000, + 0x14aa: 0x4000, 0x14ab: 0x4000, 0x14ac: 0x4000, 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, + 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, + 0x14b6: 0x4000, 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, + // Block 0x53, offset 0x14c0 + 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, + 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, + 0x14d0: 0x4000, 0x14d1: 0x4000, + 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, + 0x14e4: 0x4000, 0x14e5: 0x4000, + // Block 0x54, offset 0x1500 + 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, + 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, + 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, + 0x1512: 0x4000, 0x1513: 0x4000, 0x1514: 0x4000, 0x1515: 0x4000, 0x1516: 0x4000, 0x1517: 0x4000, + 0x1518: 0x4000, 0x1519: 0x4000, 0x151a: 0x4000, 0x151b: 0x4000, 0x151c: 0x4000, 0x151d: 0x4000, + 0x151e: 0x4000, 0x151f: 0x4000, 0x1520: 0x4000, + 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, + 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, + 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, + 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, + // Block 0x55, offset 0x1540 + 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, + 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, 0x154b: 0x4000, + 0x154c: 0x4000, 0x154d: 0x4000, 0x154e: 0x4000, 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, + 0x1552: 0x4000, 0x1553: 0x4000, 0x1554: 0x4000, 0x1555: 0x4000, 0x1556: 0x4000, 0x1557: 0x4000, + 0x1558: 0x4000, 0x1559: 0x4000, 0x155a: 0x4000, 0x155b: 0x4000, 0x155c: 0x4000, 0x155d: 0x4000, + 0x155e: 0x4000, 0x155f: 0x4000, 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, + 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, + 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, + 0x1570: 0x4000, 0x1571: 0x4000, 0x1572: 0x4000, 0x1573: 0x4000, 0x1574: 0x4000, 0x1575: 0x4000, + 0x1576: 0x4000, 0x1577: 0x4000, 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, + 0x157c: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, + // Block 0x56, offset 0x1580 + 0x1580: 0x4000, 0x1581: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, + 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, + 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, + 0x1592: 0x4000, 0x1593: 0x4000, + 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, + 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, + 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, + 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, + 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, + 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, + // Block 0x57, offset 0x15c0 + 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, + 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, + 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, + 0x15d2: 0x4000, 0x15d3: 0x4000, + 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, + 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, + 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, + 0x15f0: 0x4000, 0x15f4: 0x4000, + 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, + 0x15fc: 0x4000, 0x15fd: 0x4000, 0x15fe: 0x4000, 0x15ff: 0x4000, + // Block 0x58, offset 0x1600 + 0x1600: 0x4000, 0x1601: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, + 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, + 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, + 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, + 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, + 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, + 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, + 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, + 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, + 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, + 0x163c: 0x4000, 0x163d: 0x4000, 0x163e: 0x4000, + // Block 0x59, offset 0x1640 + 0x1640: 0x4000, 0x1642: 0x4000, 0x1643: 0x4000, 0x1644: 0x4000, 0x1645: 0x4000, + 0x1646: 0x4000, 0x1647: 0x4000, 0x1648: 0x4000, 0x1649: 0x4000, 0x164a: 0x4000, 0x164b: 0x4000, + 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x164f: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, + 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, + 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, + 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, + 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, 0x1668: 0x4000, 0x1669: 0x4000, + 0x166a: 0x4000, 0x166b: 0x4000, 0x166c: 0x4000, 0x166d: 0x4000, 0x166e: 0x4000, 0x166f: 0x4000, + 0x1670: 0x4000, 0x1671: 0x4000, 0x1672: 0x4000, 0x1673: 0x4000, 0x1674: 0x4000, 0x1675: 0x4000, + 0x1676: 0x4000, 0x1677: 0x4000, 0x1678: 0x4000, 0x1679: 0x4000, 0x167a: 0x4000, 0x167b: 0x4000, + 0x167c: 0x4000, 0x167d: 0x4000, 0x167e: 0x4000, 0x167f: 0x4000, + // Block 0x5a, offset 0x1680 + 0x1680: 0x4000, 0x1681: 0x4000, 0x1682: 0x4000, 0x1683: 0x4000, 0x1684: 0x4000, 0x1685: 0x4000, + 0x1686: 0x4000, 0x1687: 0x4000, 0x1688: 0x4000, 0x1689: 0x4000, 0x168a: 0x4000, 0x168b: 0x4000, + 0x168c: 0x4000, 0x168d: 0x4000, 0x168e: 0x4000, 0x168f: 0x4000, 0x1690: 0x4000, 0x1691: 0x4000, + 0x1692: 0x4000, 0x1693: 0x4000, 0x1694: 0x4000, 0x1695: 0x4000, 0x1696: 0x4000, 0x1697: 0x4000, + 0x1698: 0x4000, 0x1699: 0x4000, 0x169a: 0x4000, 0x169b: 0x4000, 0x169c: 0x4000, 0x169d: 0x4000, + 0x169e: 0x4000, 0x169f: 0x4000, 0x16a0: 0x4000, 0x16a1: 0x4000, 0x16a2: 0x4000, 0x16a3: 0x4000, + 0x16a4: 0x4000, 0x16a5: 0x4000, 0x16a6: 0x4000, 0x16a7: 0x4000, 0x16a8: 0x4000, 0x16a9: 0x4000, + 0x16aa: 0x4000, 0x16ab: 0x4000, 0x16ac: 0x4000, 0x16ad: 0x4000, 0x16ae: 0x4000, 0x16af: 0x4000, + 0x16b0: 0x4000, 0x16b1: 0x4000, 0x16b2: 0x4000, 0x16b3: 0x4000, 0x16b4: 0x4000, 0x16b5: 0x4000, + 0x16b6: 0x4000, 0x16b7: 0x4000, 0x16b8: 0x4000, 0x16b9: 0x4000, 0x16ba: 0x4000, 0x16bb: 0x4000, + 0x16bc: 0x4000, 0x16bf: 0x4000, + // Block 0x5b, offset 0x16c0 + 0x16c0: 0x4000, 0x16c1: 0x4000, 0x16c2: 0x4000, 0x16c3: 0x4000, 0x16c4: 0x4000, 0x16c5: 0x4000, + 0x16c6: 0x4000, 0x16c7: 0x4000, 0x16c8: 0x4000, 0x16c9: 0x4000, 0x16ca: 0x4000, 0x16cb: 0x4000, + 0x16cc: 0x4000, 0x16cd: 0x4000, 0x16ce: 0x4000, 0x16cf: 0x4000, 0x16d0: 0x4000, 0x16d1: 0x4000, + 0x16d2: 0x4000, 0x16d3: 0x4000, 0x16d4: 0x4000, 0x16d5: 0x4000, 0x16d6: 0x4000, 0x16d7: 0x4000, + 0x16d8: 0x4000, 0x16d9: 0x4000, 0x16da: 0x4000, 0x16db: 0x4000, 0x16dc: 0x4000, 0x16dd: 0x4000, + 0x16de: 0x4000, 0x16df: 0x4000, 0x16e0: 0x4000, 0x16e1: 0x4000, 0x16e2: 0x4000, 0x16e3: 0x4000, + 0x16e4: 0x4000, 0x16e5: 0x4000, 0x16e6: 0x4000, 0x16e7: 0x4000, 0x16e8: 0x4000, 0x16e9: 0x4000, + 0x16ea: 0x4000, 0x16eb: 0x4000, 0x16ec: 0x4000, 0x16ed: 0x4000, 0x16ee: 0x4000, 0x16ef: 0x4000, + 0x16f0: 0x4000, 0x16f1: 0x4000, 0x16f2: 0x4000, 0x16f3: 0x4000, 0x16f4: 0x4000, 0x16f5: 0x4000, + 0x16f6: 0x4000, 0x16f7: 0x4000, 0x16f8: 0x4000, 0x16f9: 0x4000, 0x16fa: 0x4000, 0x16fb: 0x4000, + 0x16fc: 0x4000, 0x16fd: 0x4000, + // Block 0x5c, offset 0x1700 + 0x170b: 0x4000, + 0x170c: 0x4000, 0x170d: 0x4000, 0x170e: 0x4000, 0x1710: 0x4000, 0x1711: 0x4000, + 0x1712: 0x4000, 0x1713: 0x4000, 0x1714: 0x4000, 0x1715: 0x4000, 0x1716: 0x4000, 0x1717: 0x4000, + 0x1718: 0x4000, 0x1719: 0x4000, 0x171a: 0x4000, 0x171b: 0x4000, 0x171c: 0x4000, 0x171d: 0x4000, + 0x171e: 0x4000, 0x171f: 0x4000, 0x1720: 0x4000, 0x1721: 0x4000, 0x1722: 0x4000, 0x1723: 0x4000, + 0x1724: 0x4000, 0x1725: 0x4000, 0x1726: 0x4000, 0x1727: 0x4000, + 0x173a: 0x4000, + // Block 0x5d, offset 0x1740 + 0x1755: 0x4000, 0x1756: 0x4000, + 0x1764: 0x4000, + // Block 0x5e, offset 0x1780 + 0x17bb: 0x4000, + 0x17bc: 0x4000, 0x17bd: 0x4000, 0x17be: 0x4000, 0x17bf: 0x4000, + // Block 0x5f, offset 0x17c0 + 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, + 0x17c6: 0x4000, 0x17c7: 0x4000, 0x17c8: 0x4000, 0x17c9: 0x4000, 0x17ca: 0x4000, 0x17cb: 0x4000, + 0x17cc: 0x4000, 0x17cd: 0x4000, 0x17ce: 0x4000, 0x17cf: 0x4000, + // Block 0x60, offset 0x1800 + 0x1800: 0x4000, 0x1801: 0x4000, 0x1802: 0x4000, 0x1803: 0x4000, 0x1804: 0x4000, 0x1805: 0x4000, + 0x180c: 0x4000, 0x1810: 0x4000, 0x1811: 0x4000, + 0x1812: 0x4000, 0x1815: 0x4000, 0x1816: 0x4000, 0x1817: 0x4000, + 0x182b: 0x4000, 0x182c: 0x4000, + 0x1834: 0x4000, 0x1835: 0x4000, + 0x1836: 0x4000, 0x1837: 0x4000, 0x1838: 0x4000, 0x1839: 0x4000, 0x183a: 0x4000, 0x183b: 0x4000, + 0x183c: 0x4000, + // Block 0x61, offset 0x1840 + 0x1860: 0x4000, 0x1861: 0x4000, 0x1862: 0x4000, 0x1863: 0x4000, + 0x1864: 0x4000, 0x1865: 0x4000, 0x1866: 0x4000, 0x1867: 0x4000, 0x1868: 0x4000, 0x1869: 0x4000, + 0x186a: 0x4000, 0x186b: 0x4000, + // Block 0x62, offset 0x1880 + 0x188c: 0x4000, 0x188d: 0x4000, 0x188e: 0x4000, 0x188f: 0x4000, 0x1890: 0x4000, 0x1891: 0x4000, + 0x1892: 0x4000, 0x1893: 0x4000, 0x1894: 0x4000, 0x1895: 0x4000, 0x1896: 0x4000, 0x1897: 0x4000, + 0x1898: 0x4000, 0x1899: 0x4000, 0x189a: 0x4000, 0x189b: 0x4000, 0x189c: 0x4000, 0x189d: 0x4000, + 0x189e: 0x4000, 0x189f: 0x4000, 0x18a0: 0x4000, 0x18a1: 0x4000, 0x18a2: 0x4000, 0x18a3: 0x4000, + 0x18a4: 0x4000, 0x18a5: 0x4000, 0x18a6: 0x4000, 0x18a7: 0x4000, 0x18a8: 0x4000, 0x18a9: 0x4000, + 0x18aa: 0x4000, 0x18ab: 0x4000, 0x18ac: 0x4000, 0x18ad: 0x4000, 0x18ae: 0x4000, 0x18af: 0x4000, + 0x18b0: 0x4000, 0x18b1: 0x4000, 0x18b2: 0x4000, 0x18b3: 0x4000, 0x18b4: 0x4000, 0x18b5: 0x4000, + 0x18b6: 0x4000, 0x18b7: 0x4000, 0x18b8: 0x4000, 0x18b9: 0x4000, 0x18ba: 0x4000, + 0x18bc: 0x4000, 0x18bd: 0x4000, 0x18be: 0x4000, 0x18bf: 0x4000, + // Block 0x63, offset 0x18c0 + 0x18c0: 0x4000, 0x18c1: 0x4000, 0x18c2: 0x4000, 0x18c3: 0x4000, 0x18c4: 0x4000, 0x18c5: 0x4000, + 0x18c7: 0x4000, 0x18c8: 0x4000, 0x18c9: 0x4000, 0x18ca: 0x4000, 0x18cb: 0x4000, + 0x18cc: 0x4000, 0x18cd: 0x4000, 0x18ce: 0x4000, 0x18cf: 0x4000, 0x18d0: 0x4000, 0x18d1: 0x4000, + 0x18d2: 0x4000, 0x18d3: 0x4000, 0x18d4: 0x4000, 0x18d5: 0x4000, 0x18d6: 0x4000, 0x18d7: 0x4000, + 0x18d8: 0x4000, 0x18d9: 0x4000, 0x18da: 0x4000, 0x18db: 0x4000, 0x18dc: 0x4000, 0x18dd: 0x4000, + 0x18de: 0x4000, 0x18df: 0x4000, 0x18e0: 0x4000, 0x18e1: 0x4000, 0x18e2: 0x4000, 0x18e3: 0x4000, + 0x18e4: 0x4000, 0x18e5: 0x4000, 0x18e6: 0x4000, 0x18e7: 0x4000, 0x18e8: 0x4000, 0x18e9: 0x4000, + 0x18ea: 0x4000, 0x18eb: 0x4000, 0x18ec: 0x4000, 0x18ed: 0x4000, 0x18ee: 0x4000, 0x18ef: 0x4000, + 0x18f0: 0x4000, 0x18f1: 0x4000, 0x18f2: 0x4000, 0x18f3: 0x4000, 0x18f4: 0x4000, 0x18f5: 0x4000, + 0x18f6: 0x4000, 0x18f7: 0x4000, 0x18f8: 0x4000, 0x18fa: 0x4000, 0x18fb: 0x4000, + 0x18fc: 0x4000, 0x18fd: 0x4000, 0x18fe: 0x4000, 0x18ff: 0x4000, + // Block 0x64, offset 0x1900 + 0x1900: 0x4000, 0x1901: 0x4000, 0x1902: 0x4000, 0x1903: 0x4000, 0x1904: 0x4000, 0x1905: 0x4000, + 0x1906: 0x4000, 0x1907: 0x4000, 0x1908: 0x4000, 0x1909: 0x4000, 0x190a: 0x4000, 0x190b: 0x4000, + 0x190d: 0x4000, 0x190e: 0x4000, 0x190f: 0x4000, 0x1910: 0x4000, 0x1911: 0x4000, + 0x1912: 0x4000, 0x1913: 0x4000, 0x1914: 0x4000, 0x1915: 0x4000, 0x1916: 0x4000, 0x1917: 0x4000, + 0x1918: 0x4000, 0x1919: 0x4000, 0x191a: 0x4000, 0x191b: 0x4000, 0x191c: 0x4000, 0x191d: 0x4000, + 0x191e: 0x4000, 0x191f: 0x4000, 0x1920: 0x4000, 0x1921: 0x4000, 0x1922: 0x4000, 0x1923: 0x4000, + 0x1924: 0x4000, 0x1925: 0x4000, 0x1926: 0x4000, 0x1927: 0x4000, 0x1928: 0x4000, 0x1929: 0x4000, + 0x192a: 0x4000, 0x192b: 0x4000, 0x192c: 0x4000, 0x192d: 0x4000, 0x192e: 0x4000, 0x192f: 0x4000, + 0x1930: 0x4000, 0x1931: 0x4000, 0x1932: 0x4000, 0x1933: 0x4000, 0x1934: 0x4000, 0x1935: 0x4000, + 0x1936: 0x4000, 0x1937: 0x4000, 0x1938: 0x4000, 0x1939: 0x4000, 0x193a: 0x4000, 0x193b: 0x4000, + 0x193c: 0x4000, 0x193d: 0x4000, 0x193e: 0x4000, 0x193f: 0x4000, + // Block 0x65, offset 0x1940 + 0x1970: 0x4000, 0x1971: 0x4000, 0x1972: 0x4000, 0x1973: 0x4000, 0x1974: 0x4000, + 0x1978: 0x4000, 0x1979: 0x4000, 0x197a: 0x4000, + // Block 0x66, offset 0x1980 + 0x1980: 0x4000, 0x1981: 0x4000, 0x1982: 0x4000, 0x1983: 0x4000, 0x1984: 0x4000, 0x1985: 0x4000, + 0x1986: 0x4000, + 0x1990: 0x4000, 0x1991: 0x4000, + 0x1992: 0x4000, 0x1993: 0x4000, 0x1994: 0x4000, 0x1995: 0x4000, 0x1996: 0x4000, 0x1997: 0x4000, + 0x1998: 0x4000, 0x1999: 0x4000, 0x199a: 0x4000, 0x199b: 0x4000, 0x199c: 0x4000, 0x199d: 0x4000, + 0x199e: 0x4000, 0x199f: 0x4000, 0x19a0: 0x4000, 0x19a1: 0x4000, 0x19a2: 0x4000, 0x19a3: 0x4000, + 0x19a4: 0x4000, 0x19a5: 0x4000, 0x19a6: 0x4000, 0x19a7: 0x4000, 0x19a8: 0x4000, + 0x19b0: 0x4000, 0x19b1: 0x4000, 0x19b2: 0x4000, 0x19b3: 0x4000, 0x19b4: 0x4000, 0x19b5: 0x4000, + 0x19b6: 0x4000, + // Block 0x67, offset 0x19c0 + 0x19c0: 0x4000, 0x19c1: 0x4000, 0x19c2: 0x4000, + 0x19d0: 0x4000, 0x19d1: 0x4000, + 0x19d2: 0x4000, 0x19d3: 0x4000, 0x19d4: 0x4000, 0x19d5: 0x4000, 0x19d6: 0x4000, + // Block 0x68, offset 0x1a00 + 0x1a00: 0x2000, 0x1a01: 0x2000, 0x1a02: 0x2000, 0x1a03: 0x2000, 0x1a04: 0x2000, 0x1a05: 0x2000, + 0x1a06: 0x2000, 0x1a07: 0x2000, 0x1a08: 0x2000, 0x1a09: 0x2000, 0x1a0a: 0x2000, 0x1a0b: 0x2000, + 0x1a0c: 0x2000, 0x1a0d: 0x2000, 0x1a0e: 0x2000, 0x1a0f: 0x2000, 0x1a10: 0x2000, 0x1a11: 0x2000, + 0x1a12: 0x2000, 0x1a13: 0x2000, 0x1a14: 0x2000, 0x1a15: 0x2000, 0x1a16: 0x2000, 0x1a17: 0x2000, + 0x1a18: 0x2000, 0x1a19: 0x2000, 0x1a1a: 0x2000, 0x1a1b: 0x2000, 0x1a1c: 0x2000, 0x1a1d: 0x2000, + 0x1a1e: 0x2000, 0x1a1f: 0x2000, 0x1a20: 0x2000, 0x1a21: 0x2000, 0x1a22: 0x2000, 0x1a23: 0x2000, + 0x1a24: 0x2000, 0x1a25: 0x2000, 0x1a26: 0x2000, 0x1a27: 0x2000, 0x1a28: 0x2000, 0x1a29: 0x2000, + 0x1a2a: 0x2000, 0x1a2b: 0x2000, 0x1a2c: 0x2000, 0x1a2d: 0x2000, 0x1a2e: 0x2000, 0x1a2f: 0x2000, + 0x1a30: 0x2000, 0x1a31: 0x2000, 0x1a32: 0x2000, 0x1a33: 0x2000, 0x1a34: 0x2000, 0x1a35: 0x2000, + 0x1a36: 0x2000, 0x1a37: 0x2000, 0x1a38: 0x2000, 0x1a39: 0x2000, 0x1a3a: 0x2000, 0x1a3b: 0x2000, + 0x1a3c: 0x2000, 0x1a3d: 0x2000, +} + +// widthIndex: 22 blocks, 1408 entries, 1408 bytes +// Block 0 is the zero block. +var widthIndex = [1408]uint8{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, + 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, + 0xd0: 0x0c, 0xd1: 0x0d, + 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, + 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, + 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, + // Block 0x4, offset 0x100 + 0x104: 0x0e, 0x105: 0x0f, + // Block 0x5, offset 0x140 + 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, + 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, + 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, + 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, + 0x166: 0x2a, + 0x16c: 0x2b, 0x16d: 0x2c, + 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, + // Block 0x6, offset 0x180 + 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, + 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x0e, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, + 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, + 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, + 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, + 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, + 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, + 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, + 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, + 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, + 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, + 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, + 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, + 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, + 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, + // Block 0x8, offset 0x200 + 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, + 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, + 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, + 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, + 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, + 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, + 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, + 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, + // Block 0x9, offset 0x240 + 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, + 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, + 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3a, 0x253: 0x3b, + 0x265: 0x3c, + 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, + 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, + // Block 0xa, offset 0x280 + 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, + 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, + 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, + 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3d, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, + 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, + 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, + 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, + 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, + 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, + 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, + 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, + // Block 0xc, offset 0x300 + 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, + 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, + 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, + 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, + 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, + 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, + 0x338: 0x3e, 0x339: 0x3f, 0x33c: 0x40, 0x33d: 0x41, 0x33e: 0x42, 0x33f: 0x43, + // Block 0xd, offset 0x340 + 0x37f: 0x44, + // Block 0xe, offset 0x380 + 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, + 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, + 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, + 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x45, + 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, + 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x0e, 0x3ac: 0x0e, 0x3ad: 0x0e, 0x3ae: 0x0e, 0x3af: 0x0e, + 0x3b0: 0x0e, 0x3b1: 0x0e, 0x3b2: 0x0e, 0x3b3: 0x46, 0x3b4: 0x47, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x0e, 0x3c1: 0x0e, 0x3c2: 0x0e, 0x3c3: 0x0e, 0x3c4: 0x48, 0x3c5: 0x49, 0x3c6: 0x0e, 0x3c7: 0x0e, + 0x3c8: 0x0e, 0x3c9: 0x0e, 0x3ca: 0x0e, 0x3cb: 0x4a, + // Block 0x10, offset 0x400 + 0x400: 0x4b, 0x403: 0x4c, 0x404: 0x4d, 0x405: 0x4e, 0x406: 0x4f, + 0x408: 0x50, 0x409: 0x51, 0x40c: 0x52, 0x40d: 0x53, 0x40e: 0x54, 0x40f: 0x55, + 0x410: 0x56, 0x411: 0x57, 0x412: 0x0e, 0x413: 0x58, 0x414: 0x59, 0x415: 0x5a, 0x416: 0x5b, 0x417: 0x5c, + 0x418: 0x0e, 0x419: 0x5d, 0x41a: 0x0e, 0x41b: 0x5e, 0x41f: 0x5f, + 0x424: 0x60, 0x425: 0x61, 0x426: 0x0e, 0x427: 0x62, + 0x429: 0x63, 0x42a: 0x64, 0x42b: 0x65, + // Block 0x11, offset 0x440 + 0x456: 0x0b, 0x457: 0x06, + 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, + 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, + 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, + 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, + 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, + // Block 0x12, offset 0x480 + 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, + // Block 0x13, offset 0x4c0 + 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, + 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, + 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, + 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, + 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, + 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, + 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, + 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x66, + // Block 0x14, offset 0x500 + 0x520: 0x10, + 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, + 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, + // Block 0x15, offset 0x540 + 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, + 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, +} + +// inverseData contains 4-byte entries of the following format: +// <0 padding> +// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the +// UTF-8 encoding of the original rune. Mappings often have the following +// pattern: +// A -> A (U+FF21 -> U+0041) +// B -> B (U+FF22 -> U+0042) +// ... +// By xor-ing the last byte the same entry can be shared by many mappings. This +// reduces the total number of distinct entries by about two thirds. +// The resulting entry for the aforementioned mappings is +// { 0x01, 0xE0, 0x00, 0x00 } +// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get +// E0 ^ A1 = 41. +// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get +// E0 ^ A2 = 42. +// Note that because of the xor-ing, the byte sequence stored in the entry is +// not valid UTF-8. +var inverseData = [150][4]byte{ + {0x00, 0x00, 0x00, 0x00}, + {0x03, 0xe3, 0x80, 0xa0}, + {0x03, 0xef, 0xbc, 0xa0}, + {0x03, 0xef, 0xbc, 0xe0}, + {0x03, 0xef, 0xbd, 0xe0}, + {0x03, 0xef, 0xbf, 0x02}, + {0x03, 0xef, 0xbf, 0x00}, + {0x03, 0xef, 0xbf, 0x0e}, + {0x03, 0xef, 0xbf, 0x0c}, + {0x03, 0xef, 0xbf, 0x0f}, + {0x03, 0xef, 0xbf, 0x39}, + {0x03, 0xef, 0xbf, 0x3b}, + {0x03, 0xef, 0xbf, 0x3f}, + {0x03, 0xef, 0xbf, 0x2a}, + {0x03, 0xef, 0xbf, 0x0d}, + {0x03, 0xef, 0xbf, 0x25}, + {0x03, 0xef, 0xbd, 0x1a}, + {0x03, 0xef, 0xbd, 0x26}, + {0x01, 0xa0, 0x00, 0x00}, + {0x03, 0xef, 0xbd, 0x25}, + {0x03, 0xef, 0xbd, 0x23}, + {0x03, 0xef, 0xbd, 0x2e}, + {0x03, 0xef, 0xbe, 0x07}, + {0x03, 0xef, 0xbe, 0x05}, + {0x03, 0xef, 0xbd, 0x06}, + {0x03, 0xef, 0xbd, 0x13}, + {0x03, 0xef, 0xbd, 0x0b}, + {0x03, 0xef, 0xbd, 0x16}, + {0x03, 0xef, 0xbd, 0x0c}, + {0x03, 0xef, 0xbd, 0x15}, + {0x03, 0xef, 0xbd, 0x0d}, + {0x03, 0xef, 0xbd, 0x1c}, + {0x03, 0xef, 0xbd, 0x02}, + {0x03, 0xef, 0xbd, 0x1f}, + {0x03, 0xef, 0xbd, 0x1d}, + {0x03, 0xef, 0xbd, 0x17}, + {0x03, 0xef, 0xbd, 0x08}, + {0x03, 0xef, 0xbd, 0x09}, + {0x03, 0xef, 0xbd, 0x0e}, + {0x03, 0xef, 0xbd, 0x04}, + {0x03, 0xef, 0xbd, 0x05}, + {0x03, 0xef, 0xbe, 0x3f}, + {0x03, 0xef, 0xbe, 0x00}, + {0x03, 0xef, 0xbd, 0x2c}, + {0x03, 0xef, 0xbe, 0x06}, + {0x03, 0xef, 0xbe, 0x0c}, + {0x03, 0xef, 0xbe, 0x0f}, + {0x03, 0xef, 0xbe, 0x0d}, + {0x03, 0xef, 0xbe, 0x0b}, + {0x03, 0xef, 0xbe, 0x19}, + {0x03, 0xef, 0xbe, 0x15}, + {0x03, 0xef, 0xbe, 0x11}, + {0x03, 0xef, 0xbe, 0x31}, + {0x03, 0xef, 0xbe, 0x33}, + {0x03, 0xef, 0xbd, 0x0f}, + {0x03, 0xef, 0xbe, 0x30}, + {0x03, 0xef, 0xbe, 0x3e}, + {0x03, 0xef, 0xbe, 0x32}, + {0x03, 0xef, 0xbe, 0x36}, + {0x03, 0xef, 0xbd, 0x14}, + {0x03, 0xef, 0xbe, 0x2e}, + {0x03, 0xef, 0xbd, 0x1e}, + {0x03, 0xef, 0xbe, 0x10}, + {0x03, 0xef, 0xbf, 0x13}, + {0x03, 0xef, 0xbf, 0x15}, + {0x03, 0xef, 0xbf, 0x17}, + {0x03, 0xef, 0xbf, 0x1f}, + {0x03, 0xef, 0xbf, 0x1d}, + {0x03, 0xef, 0xbf, 0x1b}, + {0x03, 0xef, 0xbf, 0x09}, + {0x03, 0xef, 0xbf, 0x0b}, + {0x03, 0xef, 0xbf, 0x37}, + {0x03, 0xef, 0xbe, 0x04}, + {0x01, 0xe0, 0x00, 0x00}, + {0x03, 0xe2, 0xa6, 0x1a}, + {0x03, 0xe2, 0xa6, 0x26}, + {0x03, 0xe3, 0x80, 0x23}, + {0x03, 0xe3, 0x80, 0x2e}, + {0x03, 0xe3, 0x80, 0x25}, + {0x03, 0xe3, 0x83, 0x1e}, + {0x03, 0xe3, 0x83, 0x14}, + {0x03, 0xe3, 0x82, 0x06}, + {0x03, 0xe3, 0x82, 0x0b}, + {0x03, 0xe3, 0x82, 0x0c}, + {0x03, 0xe3, 0x82, 0x0d}, + {0x03, 0xe3, 0x82, 0x02}, + {0x03, 0xe3, 0x83, 0x0f}, + {0x03, 0xe3, 0x83, 0x08}, + {0x03, 0xe3, 0x83, 0x09}, + {0x03, 0xe3, 0x83, 0x2c}, + {0x03, 0xe3, 0x83, 0x0c}, + {0x03, 0xe3, 0x82, 0x13}, + {0x03, 0xe3, 0x82, 0x16}, + {0x03, 0xe3, 0x82, 0x15}, + {0x03, 0xe3, 0x82, 0x1c}, + {0x03, 0xe3, 0x82, 0x1f}, + {0x03, 0xe3, 0x82, 0x1d}, + {0x03, 0xe3, 0x82, 0x1a}, + {0x03, 0xe3, 0x82, 0x17}, + {0x03, 0xe3, 0x82, 0x08}, + {0x03, 0xe3, 0x82, 0x09}, + {0x03, 0xe3, 0x82, 0x0e}, + {0x03, 0xe3, 0x82, 0x04}, + {0x03, 0xe3, 0x82, 0x05}, + {0x03, 0xe3, 0x82, 0x3f}, + {0x03, 0xe3, 0x83, 0x00}, + {0x03, 0xe3, 0x83, 0x06}, + {0x03, 0xe3, 0x83, 0x05}, + {0x03, 0xe3, 0x83, 0x0d}, + {0x03, 0xe3, 0x83, 0x0b}, + {0x03, 0xe3, 0x83, 0x07}, + {0x03, 0xe3, 0x83, 0x19}, + {0x03, 0xe3, 0x83, 0x15}, + {0x03, 0xe3, 0x83, 0x11}, + {0x03, 0xe3, 0x83, 0x31}, + {0x03, 0xe3, 0x83, 0x33}, + {0x03, 0xe3, 0x83, 0x30}, + {0x03, 0xe3, 0x83, 0x3e}, + {0x03, 0xe3, 0x83, 0x32}, + {0x03, 0xe3, 0x83, 0x36}, + {0x03, 0xe3, 0x83, 0x2e}, + {0x03, 0xe3, 0x82, 0x07}, + {0x03, 0xe3, 0x85, 0x04}, + {0x03, 0xe3, 0x84, 0x10}, + {0x03, 0xe3, 0x85, 0x30}, + {0x03, 0xe3, 0x85, 0x0d}, + {0x03, 0xe3, 0x85, 0x13}, + {0x03, 0xe3, 0x85, 0x15}, + {0x03, 0xe3, 0x85, 0x17}, + {0x03, 0xe3, 0x85, 0x1f}, + {0x03, 0xe3, 0x85, 0x1d}, + {0x03, 0xe3, 0x85, 0x1b}, + {0x03, 0xe3, 0x85, 0x09}, + {0x03, 0xe3, 0x85, 0x0f}, + {0x03, 0xe3, 0x85, 0x0b}, + {0x03, 0xe3, 0x85, 0x37}, + {0x03, 0xe3, 0x85, 0x3b}, + {0x03, 0xe3, 0x85, 0x39}, + {0x03, 0xe3, 0x85, 0x3f}, + {0x02, 0xc2, 0x02, 0x00}, + {0x02, 0xc2, 0x0e, 0x00}, + {0x02, 0xc2, 0x0c, 0x00}, + {0x02, 0xc2, 0x00, 0x00}, + {0x03, 0xe2, 0x82, 0x0f}, + {0x03, 0xe2, 0x94, 0x2a}, + {0x03, 0xe2, 0x86, 0x39}, + {0x03, 0xe2, 0x86, 0x3b}, + {0x03, 0xe2, 0x86, 0x3f}, + {0x03, 0xe2, 0x96, 0x0d}, + {0x03, 0xe2, 0x97, 0x25}, +} + +// Total table size 15448 bytes (15KiB) diff --git a/vendor/golang.org/x/text/width/tables9.0.0.go b/vendor/golang.org/x/text/width/tables9.0.0.go new file mode 100644 index 000000000..b3db84f6f --- /dev/null +++ b/vendor/golang.org/x/text/width/tables9.0.0.go @@ -0,0 +1,1287 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +//go:build !go1.10 +// +build !go1.10 + +package width + +// UnicodeVersion is the Unicode version from which the tables in this package are derived. +const UnicodeVersion = "9.0.0" + +// lookup returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookup(s []byte) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupUnsafe(s []byte) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// lookupString returns the trie value for the first UTF-8 encoding in s and +// the width in bytes of this encoding. The size will be 0 if s does not +// hold enough bytes to complete the encoding. len(s) must be greater than 0. +func (t *widthTrie) lookupString(s string) (v uint16, sz int) { + c0 := s[0] + switch { + case c0 < 0x80: // is ASCII + return widthValues[c0], 1 + case c0 < 0xC2: + return 0, 1 // Illegal UTF-8: not a starter, not ASCII. + case c0 < 0xE0: // 2-byte UTF-8 + if len(s) < 2 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c1), 2 + case c0 < 0xF0: // 3-byte UTF-8 + if len(s) < 3 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c2), 3 + case c0 < 0xF8: // 4-byte UTF-8 + if len(s) < 4 { + return 0, 0 + } + i := widthIndex[c0] + c1 := s[1] + if c1 < 0x80 || 0xC0 <= c1 { + return 0, 1 // Illegal UTF-8: not a continuation byte. + } + o := uint32(i)<<6 + uint32(c1) + i = widthIndex[o] + c2 := s[2] + if c2 < 0x80 || 0xC0 <= c2 { + return 0, 2 // Illegal UTF-8: not a continuation byte. + } + o = uint32(i)<<6 + uint32(c2) + i = widthIndex[o] + c3 := s[3] + if c3 < 0x80 || 0xC0 <= c3 { + return 0, 3 // Illegal UTF-8: not a continuation byte. + } + return t.lookupValue(uint32(i), c3), 4 + } + // Illegal rune + return 0, 1 +} + +// lookupStringUnsafe returns the trie value for the first UTF-8 encoding in s. +// s must start with a full and valid UTF-8 encoded rune. +func (t *widthTrie) lookupStringUnsafe(s string) uint16 { + c0 := s[0] + if c0 < 0x80 { // is ASCII + return widthValues[c0] + } + i := widthIndex[c0] + if c0 < 0xE0 { // 2-byte UTF-8 + return t.lookupValue(uint32(i), s[1]) + } + i = widthIndex[uint32(i)<<6+uint32(s[1])] + if c0 < 0xF0 { // 3-byte UTF-8 + return t.lookupValue(uint32(i), s[2]) + } + i = widthIndex[uint32(i)<<6+uint32(s[2])] + if c0 < 0xF8 { // 4-byte UTF-8 + return t.lookupValue(uint32(i), s[3]) + } + return 0 +} + +// widthTrie. Total size: 14080 bytes (13.75 KiB). Checksum: 3b8aeb3dc03667a3. +type widthTrie struct{} + +func newWidthTrie(i int) *widthTrie { + return &widthTrie{} +} + +// lookupValue determines the type of block n and looks up the value for b. +func (t *widthTrie) lookupValue(n uint32, b byte) uint16 { + switch { + default: + return uint16(widthValues[n<<6+uint32(b)]) + } +} + +// widthValues: 99 blocks, 6336 entries, 12672 bytes +// The third block is the zero block. +var widthValues = [6336]uint16{ + // Block 0x0, offset 0x0 + 0x20: 0x6001, 0x21: 0x6002, 0x22: 0x6002, 0x23: 0x6002, + 0x24: 0x6002, 0x25: 0x6002, 0x26: 0x6002, 0x27: 0x6002, 0x28: 0x6002, 0x29: 0x6002, + 0x2a: 0x6002, 0x2b: 0x6002, 0x2c: 0x6002, 0x2d: 0x6002, 0x2e: 0x6002, 0x2f: 0x6002, + 0x30: 0x6002, 0x31: 0x6002, 0x32: 0x6002, 0x33: 0x6002, 0x34: 0x6002, 0x35: 0x6002, + 0x36: 0x6002, 0x37: 0x6002, 0x38: 0x6002, 0x39: 0x6002, 0x3a: 0x6002, 0x3b: 0x6002, + 0x3c: 0x6002, 0x3d: 0x6002, 0x3e: 0x6002, 0x3f: 0x6002, + // Block 0x1, offset 0x40 + 0x40: 0x6003, 0x41: 0x6003, 0x42: 0x6003, 0x43: 0x6003, 0x44: 0x6003, 0x45: 0x6003, + 0x46: 0x6003, 0x47: 0x6003, 0x48: 0x6003, 0x49: 0x6003, 0x4a: 0x6003, 0x4b: 0x6003, + 0x4c: 0x6003, 0x4d: 0x6003, 0x4e: 0x6003, 0x4f: 0x6003, 0x50: 0x6003, 0x51: 0x6003, + 0x52: 0x6003, 0x53: 0x6003, 0x54: 0x6003, 0x55: 0x6003, 0x56: 0x6003, 0x57: 0x6003, + 0x58: 0x6003, 0x59: 0x6003, 0x5a: 0x6003, 0x5b: 0x6003, 0x5c: 0x6003, 0x5d: 0x6003, + 0x5e: 0x6003, 0x5f: 0x6003, 0x60: 0x6004, 0x61: 0x6004, 0x62: 0x6004, 0x63: 0x6004, + 0x64: 0x6004, 0x65: 0x6004, 0x66: 0x6004, 0x67: 0x6004, 0x68: 0x6004, 0x69: 0x6004, + 0x6a: 0x6004, 0x6b: 0x6004, 0x6c: 0x6004, 0x6d: 0x6004, 0x6e: 0x6004, 0x6f: 0x6004, + 0x70: 0x6004, 0x71: 0x6004, 0x72: 0x6004, 0x73: 0x6004, 0x74: 0x6004, 0x75: 0x6004, + 0x76: 0x6004, 0x77: 0x6004, 0x78: 0x6004, 0x79: 0x6004, 0x7a: 0x6004, 0x7b: 0x6004, + 0x7c: 0x6004, 0x7d: 0x6004, 0x7e: 0x6004, + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xe1: 0x2000, 0xe2: 0x6005, 0xe3: 0x6005, + 0xe4: 0x2000, 0xe5: 0x6006, 0xe6: 0x6005, 0xe7: 0x2000, 0xe8: 0x2000, + 0xea: 0x2000, 0xec: 0x6007, 0xed: 0x2000, 0xee: 0x2000, 0xef: 0x6008, + 0xf0: 0x2000, 0xf1: 0x2000, 0xf2: 0x2000, 0xf3: 0x2000, 0xf4: 0x2000, + 0xf6: 0x2000, 0xf7: 0x2000, 0xf8: 0x2000, 0xf9: 0x2000, 0xfa: 0x2000, + 0xfc: 0x2000, 0xfd: 0x2000, 0xfe: 0x2000, 0xff: 0x2000, + // Block 0x4, offset 0x100 + 0x106: 0x2000, + 0x110: 0x2000, + 0x117: 0x2000, + 0x118: 0x2000, + 0x11e: 0x2000, 0x11f: 0x2000, 0x120: 0x2000, 0x121: 0x2000, + 0x126: 0x2000, 0x128: 0x2000, 0x129: 0x2000, + 0x12a: 0x2000, 0x12c: 0x2000, 0x12d: 0x2000, + 0x130: 0x2000, 0x132: 0x2000, 0x133: 0x2000, + 0x137: 0x2000, 0x138: 0x2000, 0x139: 0x2000, 0x13a: 0x2000, + 0x13c: 0x2000, 0x13e: 0x2000, + // Block 0x5, offset 0x140 + 0x141: 0x2000, + 0x151: 0x2000, + 0x153: 0x2000, + 0x15b: 0x2000, + 0x166: 0x2000, 0x167: 0x2000, + 0x16b: 0x2000, + 0x171: 0x2000, 0x172: 0x2000, 0x173: 0x2000, + 0x178: 0x2000, + 0x17f: 0x2000, + // Block 0x6, offset 0x180 + 0x180: 0x2000, 0x181: 0x2000, 0x182: 0x2000, 0x184: 0x2000, + 0x188: 0x2000, 0x189: 0x2000, 0x18a: 0x2000, 0x18b: 0x2000, + 0x18d: 0x2000, + 0x192: 0x2000, 0x193: 0x2000, + 0x1a6: 0x2000, 0x1a7: 0x2000, + 0x1ab: 0x2000, + // Block 0x7, offset 0x1c0 + 0x1ce: 0x2000, 0x1d0: 0x2000, + 0x1d2: 0x2000, 0x1d4: 0x2000, 0x1d6: 0x2000, + 0x1d8: 0x2000, 0x1da: 0x2000, 0x1dc: 0x2000, + // Block 0x8, offset 0x200 + 0x211: 0x2000, + 0x221: 0x2000, + // Block 0x9, offset 0x240 + 0x244: 0x2000, + 0x247: 0x2000, 0x249: 0x2000, 0x24a: 0x2000, 0x24b: 0x2000, + 0x24d: 0x2000, 0x250: 0x2000, + 0x258: 0x2000, 0x259: 0x2000, 0x25a: 0x2000, 0x25b: 0x2000, 0x25d: 0x2000, + 0x25f: 0x2000, + // Block 0xa, offset 0x280 + 0x280: 0x2000, 0x281: 0x2000, 0x282: 0x2000, 0x283: 0x2000, 0x284: 0x2000, 0x285: 0x2000, + 0x286: 0x2000, 0x287: 0x2000, 0x288: 0x2000, 0x289: 0x2000, 0x28a: 0x2000, 0x28b: 0x2000, + 0x28c: 0x2000, 0x28d: 0x2000, 0x28e: 0x2000, 0x28f: 0x2000, 0x290: 0x2000, 0x291: 0x2000, + 0x292: 0x2000, 0x293: 0x2000, 0x294: 0x2000, 0x295: 0x2000, 0x296: 0x2000, 0x297: 0x2000, + 0x298: 0x2000, 0x299: 0x2000, 0x29a: 0x2000, 0x29b: 0x2000, 0x29c: 0x2000, 0x29d: 0x2000, + 0x29e: 0x2000, 0x29f: 0x2000, 0x2a0: 0x2000, 0x2a1: 0x2000, 0x2a2: 0x2000, 0x2a3: 0x2000, + 0x2a4: 0x2000, 0x2a5: 0x2000, 0x2a6: 0x2000, 0x2a7: 0x2000, 0x2a8: 0x2000, 0x2a9: 0x2000, + 0x2aa: 0x2000, 0x2ab: 0x2000, 0x2ac: 0x2000, 0x2ad: 0x2000, 0x2ae: 0x2000, 0x2af: 0x2000, + 0x2b0: 0x2000, 0x2b1: 0x2000, 0x2b2: 0x2000, 0x2b3: 0x2000, 0x2b4: 0x2000, 0x2b5: 0x2000, + 0x2b6: 0x2000, 0x2b7: 0x2000, 0x2b8: 0x2000, 0x2b9: 0x2000, 0x2ba: 0x2000, 0x2bb: 0x2000, + 0x2bc: 0x2000, 0x2bd: 0x2000, 0x2be: 0x2000, 0x2bf: 0x2000, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x2000, 0x2c1: 0x2000, 0x2c2: 0x2000, 0x2c3: 0x2000, 0x2c4: 0x2000, 0x2c5: 0x2000, + 0x2c6: 0x2000, 0x2c7: 0x2000, 0x2c8: 0x2000, 0x2c9: 0x2000, 0x2ca: 0x2000, 0x2cb: 0x2000, + 0x2cc: 0x2000, 0x2cd: 0x2000, 0x2ce: 0x2000, 0x2cf: 0x2000, 0x2d0: 0x2000, 0x2d1: 0x2000, + 0x2d2: 0x2000, 0x2d3: 0x2000, 0x2d4: 0x2000, 0x2d5: 0x2000, 0x2d6: 0x2000, 0x2d7: 0x2000, + 0x2d8: 0x2000, 0x2d9: 0x2000, 0x2da: 0x2000, 0x2db: 0x2000, 0x2dc: 0x2000, 0x2dd: 0x2000, + 0x2de: 0x2000, 0x2df: 0x2000, 0x2e0: 0x2000, 0x2e1: 0x2000, 0x2e2: 0x2000, 0x2e3: 0x2000, + 0x2e4: 0x2000, 0x2e5: 0x2000, 0x2e6: 0x2000, 0x2e7: 0x2000, 0x2e8: 0x2000, 0x2e9: 0x2000, + 0x2ea: 0x2000, 0x2eb: 0x2000, 0x2ec: 0x2000, 0x2ed: 0x2000, 0x2ee: 0x2000, 0x2ef: 0x2000, + // Block 0xc, offset 0x300 + 0x311: 0x2000, + 0x312: 0x2000, 0x313: 0x2000, 0x314: 0x2000, 0x315: 0x2000, 0x316: 0x2000, 0x317: 0x2000, + 0x318: 0x2000, 0x319: 0x2000, 0x31a: 0x2000, 0x31b: 0x2000, 0x31c: 0x2000, 0x31d: 0x2000, + 0x31e: 0x2000, 0x31f: 0x2000, 0x320: 0x2000, 0x321: 0x2000, 0x323: 0x2000, + 0x324: 0x2000, 0x325: 0x2000, 0x326: 0x2000, 0x327: 0x2000, 0x328: 0x2000, 0x329: 0x2000, + 0x331: 0x2000, 0x332: 0x2000, 0x333: 0x2000, 0x334: 0x2000, 0x335: 0x2000, + 0x336: 0x2000, 0x337: 0x2000, 0x338: 0x2000, 0x339: 0x2000, 0x33a: 0x2000, 0x33b: 0x2000, + 0x33c: 0x2000, 0x33d: 0x2000, 0x33e: 0x2000, 0x33f: 0x2000, + // Block 0xd, offset 0x340 + 0x340: 0x2000, 0x341: 0x2000, 0x343: 0x2000, 0x344: 0x2000, 0x345: 0x2000, + 0x346: 0x2000, 0x347: 0x2000, 0x348: 0x2000, 0x349: 0x2000, + // Block 0xe, offset 0x380 + 0x381: 0x2000, + 0x390: 0x2000, 0x391: 0x2000, + 0x392: 0x2000, 0x393: 0x2000, 0x394: 0x2000, 0x395: 0x2000, 0x396: 0x2000, 0x397: 0x2000, + 0x398: 0x2000, 0x399: 0x2000, 0x39a: 0x2000, 0x39b: 0x2000, 0x39c: 0x2000, 0x39d: 0x2000, + 0x39e: 0x2000, 0x39f: 0x2000, 0x3a0: 0x2000, 0x3a1: 0x2000, 0x3a2: 0x2000, 0x3a3: 0x2000, + 0x3a4: 0x2000, 0x3a5: 0x2000, 0x3a6: 0x2000, 0x3a7: 0x2000, 0x3a8: 0x2000, 0x3a9: 0x2000, + 0x3aa: 0x2000, 0x3ab: 0x2000, 0x3ac: 0x2000, 0x3ad: 0x2000, 0x3ae: 0x2000, 0x3af: 0x2000, + 0x3b0: 0x2000, 0x3b1: 0x2000, 0x3b2: 0x2000, 0x3b3: 0x2000, 0x3b4: 0x2000, 0x3b5: 0x2000, + 0x3b6: 0x2000, 0x3b7: 0x2000, 0x3b8: 0x2000, 0x3b9: 0x2000, 0x3ba: 0x2000, 0x3bb: 0x2000, + 0x3bc: 0x2000, 0x3bd: 0x2000, 0x3be: 0x2000, 0x3bf: 0x2000, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x2000, 0x3c1: 0x2000, 0x3c2: 0x2000, 0x3c3: 0x2000, 0x3c4: 0x2000, 0x3c5: 0x2000, + 0x3c6: 0x2000, 0x3c7: 0x2000, 0x3c8: 0x2000, 0x3c9: 0x2000, 0x3ca: 0x2000, 0x3cb: 0x2000, + 0x3cc: 0x2000, 0x3cd: 0x2000, 0x3ce: 0x2000, 0x3cf: 0x2000, 0x3d1: 0x2000, + // Block 0x10, offset 0x400 + 0x400: 0x4000, 0x401: 0x4000, 0x402: 0x4000, 0x403: 0x4000, 0x404: 0x4000, 0x405: 0x4000, + 0x406: 0x4000, 0x407: 0x4000, 0x408: 0x4000, 0x409: 0x4000, 0x40a: 0x4000, 0x40b: 0x4000, + 0x40c: 0x4000, 0x40d: 0x4000, 0x40e: 0x4000, 0x40f: 0x4000, 0x410: 0x4000, 0x411: 0x4000, + 0x412: 0x4000, 0x413: 0x4000, 0x414: 0x4000, 0x415: 0x4000, 0x416: 0x4000, 0x417: 0x4000, + 0x418: 0x4000, 0x419: 0x4000, 0x41a: 0x4000, 0x41b: 0x4000, 0x41c: 0x4000, 0x41d: 0x4000, + 0x41e: 0x4000, 0x41f: 0x4000, 0x420: 0x4000, 0x421: 0x4000, 0x422: 0x4000, 0x423: 0x4000, + 0x424: 0x4000, 0x425: 0x4000, 0x426: 0x4000, 0x427: 0x4000, 0x428: 0x4000, 0x429: 0x4000, + 0x42a: 0x4000, 0x42b: 0x4000, 0x42c: 0x4000, 0x42d: 0x4000, 0x42e: 0x4000, 0x42f: 0x4000, + 0x430: 0x4000, 0x431: 0x4000, 0x432: 0x4000, 0x433: 0x4000, 0x434: 0x4000, 0x435: 0x4000, + 0x436: 0x4000, 0x437: 0x4000, 0x438: 0x4000, 0x439: 0x4000, 0x43a: 0x4000, 0x43b: 0x4000, + 0x43c: 0x4000, 0x43d: 0x4000, 0x43e: 0x4000, 0x43f: 0x4000, + // Block 0x11, offset 0x440 + 0x440: 0x4000, 0x441: 0x4000, 0x442: 0x4000, 0x443: 0x4000, 0x444: 0x4000, 0x445: 0x4000, + 0x446: 0x4000, 0x447: 0x4000, 0x448: 0x4000, 0x449: 0x4000, 0x44a: 0x4000, 0x44b: 0x4000, + 0x44c: 0x4000, 0x44d: 0x4000, 0x44e: 0x4000, 0x44f: 0x4000, 0x450: 0x4000, 0x451: 0x4000, + 0x452: 0x4000, 0x453: 0x4000, 0x454: 0x4000, 0x455: 0x4000, 0x456: 0x4000, 0x457: 0x4000, + 0x458: 0x4000, 0x459: 0x4000, 0x45a: 0x4000, 0x45b: 0x4000, 0x45c: 0x4000, 0x45d: 0x4000, + 0x45e: 0x4000, 0x45f: 0x4000, + // Block 0x12, offset 0x480 + 0x490: 0x2000, + 0x493: 0x2000, 0x494: 0x2000, 0x495: 0x2000, 0x496: 0x2000, + 0x498: 0x2000, 0x499: 0x2000, 0x49c: 0x2000, 0x49d: 0x2000, + 0x4a0: 0x2000, 0x4a1: 0x2000, 0x4a2: 0x2000, + 0x4a4: 0x2000, 0x4a5: 0x2000, 0x4a6: 0x2000, 0x4a7: 0x2000, + 0x4b0: 0x2000, 0x4b2: 0x2000, 0x4b3: 0x2000, 0x4b5: 0x2000, + 0x4bb: 0x2000, + 0x4be: 0x2000, + // Block 0x13, offset 0x4c0 + 0x4f4: 0x2000, + 0x4ff: 0x2000, + // Block 0x14, offset 0x500 + 0x501: 0x2000, 0x502: 0x2000, 0x503: 0x2000, 0x504: 0x2000, + 0x529: 0xa009, + 0x52c: 0x2000, + // Block 0x15, offset 0x540 + 0x543: 0x2000, 0x545: 0x2000, + 0x549: 0x2000, + 0x553: 0x2000, 0x556: 0x2000, + 0x561: 0x2000, 0x562: 0x2000, + 0x566: 0x2000, + 0x56b: 0x2000, + // Block 0x16, offset 0x580 + 0x593: 0x2000, 0x594: 0x2000, + 0x59b: 0x2000, 0x59c: 0x2000, 0x59d: 0x2000, + 0x59e: 0x2000, 0x5a0: 0x2000, 0x5a1: 0x2000, 0x5a2: 0x2000, 0x5a3: 0x2000, + 0x5a4: 0x2000, 0x5a5: 0x2000, 0x5a6: 0x2000, 0x5a7: 0x2000, 0x5a8: 0x2000, 0x5a9: 0x2000, + 0x5aa: 0x2000, 0x5ab: 0x2000, + 0x5b0: 0x2000, 0x5b1: 0x2000, 0x5b2: 0x2000, 0x5b3: 0x2000, 0x5b4: 0x2000, 0x5b5: 0x2000, + 0x5b6: 0x2000, 0x5b7: 0x2000, 0x5b8: 0x2000, 0x5b9: 0x2000, + // Block 0x17, offset 0x5c0 + 0x5c9: 0x2000, + 0x5d0: 0x200a, 0x5d1: 0x200b, + 0x5d2: 0x200a, 0x5d3: 0x200c, 0x5d4: 0x2000, 0x5d5: 0x2000, 0x5d6: 0x2000, 0x5d7: 0x2000, + 0x5d8: 0x2000, 0x5d9: 0x2000, + 0x5f8: 0x2000, 0x5f9: 0x2000, + // Block 0x18, offset 0x600 + 0x612: 0x2000, 0x614: 0x2000, + 0x627: 0x2000, + // Block 0x19, offset 0x640 + 0x640: 0x2000, 0x642: 0x2000, 0x643: 0x2000, + 0x647: 0x2000, 0x648: 0x2000, 0x64b: 0x2000, + 0x64f: 0x2000, 0x651: 0x2000, + 0x655: 0x2000, + 0x65a: 0x2000, 0x65d: 0x2000, + 0x65e: 0x2000, 0x65f: 0x2000, 0x660: 0x2000, 0x663: 0x2000, + 0x665: 0x2000, 0x667: 0x2000, 0x668: 0x2000, 0x669: 0x2000, + 0x66a: 0x2000, 0x66b: 0x2000, 0x66c: 0x2000, 0x66e: 0x2000, + 0x674: 0x2000, 0x675: 0x2000, + 0x676: 0x2000, 0x677: 0x2000, + 0x67c: 0x2000, 0x67d: 0x2000, + // Block 0x1a, offset 0x680 + 0x688: 0x2000, + 0x68c: 0x2000, + 0x692: 0x2000, + 0x6a0: 0x2000, 0x6a1: 0x2000, + 0x6a4: 0x2000, 0x6a5: 0x2000, 0x6a6: 0x2000, 0x6a7: 0x2000, + 0x6aa: 0x2000, 0x6ab: 0x2000, 0x6ae: 0x2000, 0x6af: 0x2000, + // Block 0x1b, offset 0x6c0 + 0x6c2: 0x2000, 0x6c3: 0x2000, + 0x6c6: 0x2000, 0x6c7: 0x2000, + 0x6d5: 0x2000, + 0x6d9: 0x2000, + 0x6e5: 0x2000, + 0x6ff: 0x2000, + // Block 0x1c, offset 0x700 + 0x712: 0x2000, + 0x71a: 0x4000, 0x71b: 0x4000, + 0x729: 0x4000, + 0x72a: 0x4000, + // Block 0x1d, offset 0x740 + 0x769: 0x4000, + 0x76a: 0x4000, 0x76b: 0x4000, 0x76c: 0x4000, + 0x770: 0x4000, 0x773: 0x4000, + // Block 0x1e, offset 0x780 + 0x7a0: 0x2000, 0x7a1: 0x2000, 0x7a2: 0x2000, 0x7a3: 0x2000, + 0x7a4: 0x2000, 0x7a5: 0x2000, 0x7a6: 0x2000, 0x7a7: 0x2000, 0x7a8: 0x2000, 0x7a9: 0x2000, + 0x7aa: 0x2000, 0x7ab: 0x2000, 0x7ac: 0x2000, 0x7ad: 0x2000, 0x7ae: 0x2000, 0x7af: 0x2000, + 0x7b0: 0x2000, 0x7b1: 0x2000, 0x7b2: 0x2000, 0x7b3: 0x2000, 0x7b4: 0x2000, 0x7b5: 0x2000, + 0x7b6: 0x2000, 0x7b7: 0x2000, 0x7b8: 0x2000, 0x7b9: 0x2000, 0x7ba: 0x2000, 0x7bb: 0x2000, + 0x7bc: 0x2000, 0x7bd: 0x2000, 0x7be: 0x2000, 0x7bf: 0x2000, + // Block 0x1f, offset 0x7c0 + 0x7c0: 0x2000, 0x7c1: 0x2000, 0x7c2: 0x2000, 0x7c3: 0x2000, 0x7c4: 0x2000, 0x7c5: 0x2000, + 0x7c6: 0x2000, 0x7c7: 0x2000, 0x7c8: 0x2000, 0x7c9: 0x2000, 0x7ca: 0x2000, 0x7cb: 0x2000, + 0x7cc: 0x2000, 0x7cd: 0x2000, 0x7ce: 0x2000, 0x7cf: 0x2000, 0x7d0: 0x2000, 0x7d1: 0x2000, + 0x7d2: 0x2000, 0x7d3: 0x2000, 0x7d4: 0x2000, 0x7d5: 0x2000, 0x7d6: 0x2000, 0x7d7: 0x2000, + 0x7d8: 0x2000, 0x7d9: 0x2000, 0x7da: 0x2000, 0x7db: 0x2000, 0x7dc: 0x2000, 0x7dd: 0x2000, + 0x7de: 0x2000, 0x7df: 0x2000, 0x7e0: 0x2000, 0x7e1: 0x2000, 0x7e2: 0x2000, 0x7e3: 0x2000, + 0x7e4: 0x2000, 0x7e5: 0x2000, 0x7e6: 0x2000, 0x7e7: 0x2000, 0x7e8: 0x2000, 0x7e9: 0x2000, + 0x7eb: 0x2000, 0x7ec: 0x2000, 0x7ed: 0x2000, 0x7ee: 0x2000, 0x7ef: 0x2000, + 0x7f0: 0x2000, 0x7f1: 0x2000, 0x7f2: 0x2000, 0x7f3: 0x2000, 0x7f4: 0x2000, 0x7f5: 0x2000, + 0x7f6: 0x2000, 0x7f7: 0x2000, 0x7f8: 0x2000, 0x7f9: 0x2000, 0x7fa: 0x2000, 0x7fb: 0x2000, + 0x7fc: 0x2000, 0x7fd: 0x2000, 0x7fe: 0x2000, 0x7ff: 0x2000, + // Block 0x20, offset 0x800 + 0x800: 0x2000, 0x801: 0x2000, 0x802: 0x200d, 0x803: 0x2000, 0x804: 0x2000, 0x805: 0x2000, + 0x806: 0x2000, 0x807: 0x2000, 0x808: 0x2000, 0x809: 0x2000, 0x80a: 0x2000, 0x80b: 0x2000, + 0x80c: 0x2000, 0x80d: 0x2000, 0x80e: 0x2000, 0x80f: 0x2000, 0x810: 0x2000, 0x811: 0x2000, + 0x812: 0x2000, 0x813: 0x2000, 0x814: 0x2000, 0x815: 0x2000, 0x816: 0x2000, 0x817: 0x2000, + 0x818: 0x2000, 0x819: 0x2000, 0x81a: 0x2000, 0x81b: 0x2000, 0x81c: 0x2000, 0x81d: 0x2000, + 0x81e: 0x2000, 0x81f: 0x2000, 0x820: 0x2000, 0x821: 0x2000, 0x822: 0x2000, 0x823: 0x2000, + 0x824: 0x2000, 0x825: 0x2000, 0x826: 0x2000, 0x827: 0x2000, 0x828: 0x2000, 0x829: 0x2000, + 0x82a: 0x2000, 0x82b: 0x2000, 0x82c: 0x2000, 0x82d: 0x2000, 0x82e: 0x2000, 0x82f: 0x2000, + 0x830: 0x2000, 0x831: 0x2000, 0x832: 0x2000, 0x833: 0x2000, 0x834: 0x2000, 0x835: 0x2000, + 0x836: 0x2000, 0x837: 0x2000, 0x838: 0x2000, 0x839: 0x2000, 0x83a: 0x2000, 0x83b: 0x2000, + 0x83c: 0x2000, 0x83d: 0x2000, 0x83e: 0x2000, 0x83f: 0x2000, + // Block 0x21, offset 0x840 + 0x840: 0x2000, 0x841: 0x2000, 0x842: 0x2000, 0x843: 0x2000, 0x844: 0x2000, 0x845: 0x2000, + 0x846: 0x2000, 0x847: 0x2000, 0x848: 0x2000, 0x849: 0x2000, 0x84a: 0x2000, 0x84b: 0x2000, + 0x850: 0x2000, 0x851: 0x2000, + 0x852: 0x2000, 0x853: 0x2000, 0x854: 0x2000, 0x855: 0x2000, 0x856: 0x2000, 0x857: 0x2000, + 0x858: 0x2000, 0x859: 0x2000, 0x85a: 0x2000, 0x85b: 0x2000, 0x85c: 0x2000, 0x85d: 0x2000, + 0x85e: 0x2000, 0x85f: 0x2000, 0x860: 0x2000, 0x861: 0x2000, 0x862: 0x2000, 0x863: 0x2000, + 0x864: 0x2000, 0x865: 0x2000, 0x866: 0x2000, 0x867: 0x2000, 0x868: 0x2000, 0x869: 0x2000, + 0x86a: 0x2000, 0x86b: 0x2000, 0x86c: 0x2000, 0x86d: 0x2000, 0x86e: 0x2000, 0x86f: 0x2000, + 0x870: 0x2000, 0x871: 0x2000, 0x872: 0x2000, 0x873: 0x2000, + // Block 0x22, offset 0x880 + 0x880: 0x2000, 0x881: 0x2000, 0x882: 0x2000, 0x883: 0x2000, 0x884: 0x2000, 0x885: 0x2000, + 0x886: 0x2000, 0x887: 0x2000, 0x888: 0x2000, 0x889: 0x2000, 0x88a: 0x2000, 0x88b: 0x2000, + 0x88c: 0x2000, 0x88d: 0x2000, 0x88e: 0x2000, 0x88f: 0x2000, + 0x892: 0x2000, 0x893: 0x2000, 0x894: 0x2000, 0x895: 0x2000, + 0x8a0: 0x200e, 0x8a1: 0x2000, 0x8a3: 0x2000, + 0x8a4: 0x2000, 0x8a5: 0x2000, 0x8a6: 0x2000, 0x8a7: 0x2000, 0x8a8: 0x2000, 0x8a9: 0x2000, + 0x8b2: 0x2000, 0x8b3: 0x2000, + 0x8b6: 0x2000, 0x8b7: 0x2000, + 0x8bc: 0x2000, 0x8bd: 0x2000, + // Block 0x23, offset 0x8c0 + 0x8c0: 0x2000, 0x8c1: 0x2000, + 0x8c6: 0x2000, 0x8c7: 0x2000, 0x8c8: 0x2000, 0x8cb: 0x200f, + 0x8ce: 0x2000, 0x8cf: 0x2000, 0x8d0: 0x2000, 0x8d1: 0x2000, + 0x8e2: 0x2000, 0x8e3: 0x2000, + 0x8e4: 0x2000, 0x8e5: 0x2000, + 0x8ef: 0x2000, + 0x8fd: 0x4000, 0x8fe: 0x4000, + // Block 0x24, offset 0x900 + 0x905: 0x2000, + 0x906: 0x2000, 0x909: 0x2000, + 0x90e: 0x2000, 0x90f: 0x2000, + 0x914: 0x4000, 0x915: 0x4000, + 0x91c: 0x2000, + 0x91e: 0x2000, + // Block 0x25, offset 0x940 + 0x940: 0x2000, 0x942: 0x2000, + 0x948: 0x4000, 0x949: 0x4000, 0x94a: 0x4000, 0x94b: 0x4000, + 0x94c: 0x4000, 0x94d: 0x4000, 0x94e: 0x4000, 0x94f: 0x4000, 0x950: 0x4000, 0x951: 0x4000, + 0x952: 0x4000, 0x953: 0x4000, + 0x960: 0x2000, 0x961: 0x2000, 0x963: 0x2000, + 0x964: 0x2000, 0x965: 0x2000, 0x967: 0x2000, 0x968: 0x2000, 0x969: 0x2000, + 0x96a: 0x2000, 0x96c: 0x2000, 0x96d: 0x2000, 0x96f: 0x2000, + 0x97f: 0x4000, + // Block 0x26, offset 0x980 + 0x993: 0x4000, + 0x99e: 0x2000, 0x99f: 0x2000, 0x9a1: 0x4000, + 0x9aa: 0x4000, 0x9ab: 0x4000, + 0x9bd: 0x4000, 0x9be: 0x4000, 0x9bf: 0x2000, + // Block 0x27, offset 0x9c0 + 0x9c4: 0x4000, 0x9c5: 0x4000, + 0x9c6: 0x2000, 0x9c7: 0x2000, 0x9c8: 0x2000, 0x9c9: 0x2000, 0x9ca: 0x2000, 0x9cb: 0x2000, + 0x9cc: 0x2000, 0x9cd: 0x2000, 0x9ce: 0x4000, 0x9cf: 0x2000, 0x9d0: 0x2000, 0x9d1: 0x2000, + 0x9d2: 0x2000, 0x9d3: 0x2000, 0x9d4: 0x4000, 0x9d5: 0x2000, 0x9d6: 0x2000, 0x9d7: 0x2000, + 0x9d8: 0x2000, 0x9d9: 0x2000, 0x9da: 0x2000, 0x9db: 0x2000, 0x9dc: 0x2000, 0x9dd: 0x2000, + 0x9de: 0x2000, 0x9df: 0x2000, 0x9e0: 0x2000, 0x9e1: 0x2000, 0x9e3: 0x2000, + 0x9e8: 0x2000, 0x9e9: 0x2000, + 0x9ea: 0x4000, 0x9eb: 0x2000, 0x9ec: 0x2000, 0x9ed: 0x2000, 0x9ee: 0x2000, 0x9ef: 0x2000, + 0x9f0: 0x2000, 0x9f1: 0x2000, 0x9f2: 0x4000, 0x9f3: 0x4000, 0x9f4: 0x2000, 0x9f5: 0x4000, + 0x9f6: 0x2000, 0x9f7: 0x2000, 0x9f8: 0x2000, 0x9f9: 0x2000, 0x9fa: 0x4000, 0x9fb: 0x2000, + 0x9fc: 0x2000, 0x9fd: 0x4000, 0x9fe: 0x2000, 0x9ff: 0x2000, + // Block 0x28, offset 0xa00 + 0xa05: 0x4000, + 0xa0a: 0x4000, 0xa0b: 0x4000, + 0xa28: 0x4000, + 0xa3d: 0x2000, + // Block 0x29, offset 0xa40 + 0xa4c: 0x4000, 0xa4e: 0x4000, + 0xa53: 0x4000, 0xa54: 0x4000, 0xa55: 0x4000, 0xa57: 0x4000, + 0xa76: 0x2000, 0xa77: 0x2000, 0xa78: 0x2000, 0xa79: 0x2000, 0xa7a: 0x2000, 0xa7b: 0x2000, + 0xa7c: 0x2000, 0xa7d: 0x2000, 0xa7e: 0x2000, 0xa7f: 0x2000, + // Block 0x2a, offset 0xa80 + 0xa95: 0x4000, 0xa96: 0x4000, 0xa97: 0x4000, + 0xab0: 0x4000, + 0xabf: 0x4000, + // Block 0x2b, offset 0xac0 + 0xae6: 0x6000, 0xae7: 0x6000, 0xae8: 0x6000, 0xae9: 0x6000, + 0xaea: 0x6000, 0xaeb: 0x6000, 0xaec: 0x6000, 0xaed: 0x6000, + // Block 0x2c, offset 0xb00 + 0xb05: 0x6010, + 0xb06: 0x6011, + // Block 0x2d, offset 0xb40 + 0xb5b: 0x4000, 0xb5c: 0x4000, + // Block 0x2e, offset 0xb80 + 0xb90: 0x4000, + 0xb95: 0x4000, 0xb96: 0x2000, 0xb97: 0x2000, + 0xb98: 0x2000, 0xb99: 0x2000, + // Block 0x2f, offset 0xbc0 + 0xbc0: 0x4000, 0xbc1: 0x4000, 0xbc2: 0x4000, 0xbc3: 0x4000, 0xbc4: 0x4000, 0xbc5: 0x4000, + 0xbc6: 0x4000, 0xbc7: 0x4000, 0xbc8: 0x4000, 0xbc9: 0x4000, 0xbca: 0x4000, 0xbcb: 0x4000, + 0xbcc: 0x4000, 0xbcd: 0x4000, 0xbce: 0x4000, 0xbcf: 0x4000, 0xbd0: 0x4000, 0xbd1: 0x4000, + 0xbd2: 0x4000, 0xbd3: 0x4000, 0xbd4: 0x4000, 0xbd5: 0x4000, 0xbd6: 0x4000, 0xbd7: 0x4000, + 0xbd8: 0x4000, 0xbd9: 0x4000, 0xbdb: 0x4000, 0xbdc: 0x4000, 0xbdd: 0x4000, + 0xbde: 0x4000, 0xbdf: 0x4000, 0xbe0: 0x4000, 0xbe1: 0x4000, 0xbe2: 0x4000, 0xbe3: 0x4000, + 0xbe4: 0x4000, 0xbe5: 0x4000, 0xbe6: 0x4000, 0xbe7: 0x4000, 0xbe8: 0x4000, 0xbe9: 0x4000, + 0xbea: 0x4000, 0xbeb: 0x4000, 0xbec: 0x4000, 0xbed: 0x4000, 0xbee: 0x4000, 0xbef: 0x4000, + 0xbf0: 0x4000, 0xbf1: 0x4000, 0xbf2: 0x4000, 0xbf3: 0x4000, 0xbf4: 0x4000, 0xbf5: 0x4000, + 0xbf6: 0x4000, 0xbf7: 0x4000, 0xbf8: 0x4000, 0xbf9: 0x4000, 0xbfa: 0x4000, 0xbfb: 0x4000, + 0xbfc: 0x4000, 0xbfd: 0x4000, 0xbfe: 0x4000, 0xbff: 0x4000, + // Block 0x30, offset 0xc00 + 0xc00: 0x4000, 0xc01: 0x4000, 0xc02: 0x4000, 0xc03: 0x4000, 0xc04: 0x4000, 0xc05: 0x4000, + 0xc06: 0x4000, 0xc07: 0x4000, 0xc08: 0x4000, 0xc09: 0x4000, 0xc0a: 0x4000, 0xc0b: 0x4000, + 0xc0c: 0x4000, 0xc0d: 0x4000, 0xc0e: 0x4000, 0xc0f: 0x4000, 0xc10: 0x4000, 0xc11: 0x4000, + 0xc12: 0x4000, 0xc13: 0x4000, 0xc14: 0x4000, 0xc15: 0x4000, 0xc16: 0x4000, 0xc17: 0x4000, + 0xc18: 0x4000, 0xc19: 0x4000, 0xc1a: 0x4000, 0xc1b: 0x4000, 0xc1c: 0x4000, 0xc1d: 0x4000, + 0xc1e: 0x4000, 0xc1f: 0x4000, 0xc20: 0x4000, 0xc21: 0x4000, 0xc22: 0x4000, 0xc23: 0x4000, + 0xc24: 0x4000, 0xc25: 0x4000, 0xc26: 0x4000, 0xc27: 0x4000, 0xc28: 0x4000, 0xc29: 0x4000, + 0xc2a: 0x4000, 0xc2b: 0x4000, 0xc2c: 0x4000, 0xc2d: 0x4000, 0xc2e: 0x4000, 0xc2f: 0x4000, + 0xc30: 0x4000, 0xc31: 0x4000, 0xc32: 0x4000, 0xc33: 0x4000, + // Block 0x31, offset 0xc40 + 0xc40: 0x4000, 0xc41: 0x4000, 0xc42: 0x4000, 0xc43: 0x4000, 0xc44: 0x4000, 0xc45: 0x4000, + 0xc46: 0x4000, 0xc47: 0x4000, 0xc48: 0x4000, 0xc49: 0x4000, 0xc4a: 0x4000, 0xc4b: 0x4000, + 0xc4c: 0x4000, 0xc4d: 0x4000, 0xc4e: 0x4000, 0xc4f: 0x4000, 0xc50: 0x4000, 0xc51: 0x4000, + 0xc52: 0x4000, 0xc53: 0x4000, 0xc54: 0x4000, 0xc55: 0x4000, + 0xc70: 0x4000, 0xc71: 0x4000, 0xc72: 0x4000, 0xc73: 0x4000, 0xc74: 0x4000, 0xc75: 0x4000, + 0xc76: 0x4000, 0xc77: 0x4000, 0xc78: 0x4000, 0xc79: 0x4000, 0xc7a: 0x4000, 0xc7b: 0x4000, + // Block 0x32, offset 0xc80 + 0xc80: 0x9012, 0xc81: 0x4013, 0xc82: 0x4014, 0xc83: 0x4000, 0xc84: 0x4000, 0xc85: 0x4000, + 0xc86: 0x4000, 0xc87: 0x4000, 0xc88: 0x4000, 0xc89: 0x4000, 0xc8a: 0x4000, 0xc8b: 0x4000, + 0xc8c: 0x4015, 0xc8d: 0x4015, 0xc8e: 0x4000, 0xc8f: 0x4000, 0xc90: 0x4000, 0xc91: 0x4000, + 0xc92: 0x4000, 0xc93: 0x4000, 0xc94: 0x4000, 0xc95: 0x4000, 0xc96: 0x4000, 0xc97: 0x4000, + 0xc98: 0x4000, 0xc99: 0x4000, 0xc9a: 0x4000, 0xc9b: 0x4000, 0xc9c: 0x4000, 0xc9d: 0x4000, + 0xc9e: 0x4000, 0xc9f: 0x4000, 0xca0: 0x4000, 0xca1: 0x4000, 0xca2: 0x4000, 0xca3: 0x4000, + 0xca4: 0x4000, 0xca5: 0x4000, 0xca6: 0x4000, 0xca7: 0x4000, 0xca8: 0x4000, 0xca9: 0x4000, + 0xcaa: 0x4000, 0xcab: 0x4000, 0xcac: 0x4000, 0xcad: 0x4000, 0xcae: 0x4000, 0xcaf: 0x4000, + 0xcb0: 0x4000, 0xcb1: 0x4000, 0xcb2: 0x4000, 0xcb3: 0x4000, 0xcb4: 0x4000, 0xcb5: 0x4000, + 0xcb6: 0x4000, 0xcb7: 0x4000, 0xcb8: 0x4000, 0xcb9: 0x4000, 0xcba: 0x4000, 0xcbb: 0x4000, + 0xcbc: 0x4000, 0xcbd: 0x4000, 0xcbe: 0x4000, + // Block 0x33, offset 0xcc0 + 0xcc1: 0x4000, 0xcc2: 0x4000, 0xcc3: 0x4000, 0xcc4: 0x4000, 0xcc5: 0x4000, + 0xcc6: 0x4000, 0xcc7: 0x4000, 0xcc8: 0x4000, 0xcc9: 0x4000, 0xcca: 0x4000, 0xccb: 0x4000, + 0xccc: 0x4000, 0xccd: 0x4000, 0xcce: 0x4000, 0xccf: 0x4000, 0xcd0: 0x4000, 0xcd1: 0x4000, + 0xcd2: 0x4000, 0xcd3: 0x4000, 0xcd4: 0x4000, 0xcd5: 0x4000, 0xcd6: 0x4000, 0xcd7: 0x4000, + 0xcd8: 0x4000, 0xcd9: 0x4000, 0xcda: 0x4000, 0xcdb: 0x4000, 0xcdc: 0x4000, 0xcdd: 0x4000, + 0xcde: 0x4000, 0xcdf: 0x4000, 0xce0: 0x4000, 0xce1: 0x4000, 0xce2: 0x4000, 0xce3: 0x4000, + 0xce4: 0x4000, 0xce5: 0x4000, 0xce6: 0x4000, 0xce7: 0x4000, 0xce8: 0x4000, 0xce9: 0x4000, + 0xcea: 0x4000, 0xceb: 0x4000, 0xcec: 0x4000, 0xced: 0x4000, 0xcee: 0x4000, 0xcef: 0x4000, + 0xcf0: 0x4000, 0xcf1: 0x4000, 0xcf2: 0x4000, 0xcf3: 0x4000, 0xcf4: 0x4000, 0xcf5: 0x4000, + 0xcf6: 0x4000, 0xcf7: 0x4000, 0xcf8: 0x4000, 0xcf9: 0x4000, 0xcfa: 0x4000, 0xcfb: 0x4000, + 0xcfc: 0x4000, 0xcfd: 0x4000, 0xcfe: 0x4000, 0xcff: 0x4000, + // Block 0x34, offset 0xd00 + 0xd00: 0x4000, 0xd01: 0x4000, 0xd02: 0x4000, 0xd03: 0x4000, 0xd04: 0x4000, 0xd05: 0x4000, + 0xd06: 0x4000, 0xd07: 0x4000, 0xd08: 0x4000, 0xd09: 0x4000, 0xd0a: 0x4000, 0xd0b: 0x4000, + 0xd0c: 0x4000, 0xd0d: 0x4000, 0xd0e: 0x4000, 0xd0f: 0x4000, 0xd10: 0x4000, 0xd11: 0x4000, + 0xd12: 0x4000, 0xd13: 0x4000, 0xd14: 0x4000, 0xd15: 0x4000, 0xd16: 0x4000, + 0xd19: 0x4016, 0xd1a: 0x4017, 0xd1b: 0x4000, 0xd1c: 0x4000, 0xd1d: 0x4000, + 0xd1e: 0x4000, 0xd1f: 0x4000, 0xd20: 0x4000, 0xd21: 0x4018, 0xd22: 0x4019, 0xd23: 0x401a, + 0xd24: 0x401b, 0xd25: 0x401c, 0xd26: 0x401d, 0xd27: 0x401e, 0xd28: 0x401f, 0xd29: 0x4020, + 0xd2a: 0x4021, 0xd2b: 0x4022, 0xd2c: 0x4000, 0xd2d: 0x4010, 0xd2e: 0x4000, 0xd2f: 0x4023, + 0xd30: 0x4000, 0xd31: 0x4024, 0xd32: 0x4000, 0xd33: 0x4025, 0xd34: 0x4000, 0xd35: 0x4026, + 0xd36: 0x4000, 0xd37: 0x401a, 0xd38: 0x4000, 0xd39: 0x4027, 0xd3a: 0x4000, 0xd3b: 0x4028, + 0xd3c: 0x4000, 0xd3d: 0x4020, 0xd3e: 0x4000, 0xd3f: 0x4029, + // Block 0x35, offset 0xd40 + 0xd40: 0x4000, 0xd41: 0x402a, 0xd42: 0x4000, 0xd43: 0x402b, 0xd44: 0x402c, 0xd45: 0x4000, + 0xd46: 0x4017, 0xd47: 0x4000, 0xd48: 0x402d, 0xd49: 0x4000, 0xd4a: 0x402e, 0xd4b: 0x402f, + 0xd4c: 0x4030, 0xd4d: 0x4017, 0xd4e: 0x4016, 0xd4f: 0x4017, 0xd50: 0x4000, 0xd51: 0x4000, + 0xd52: 0x4031, 0xd53: 0x4000, 0xd54: 0x4000, 0xd55: 0x4031, 0xd56: 0x4000, 0xd57: 0x4000, + 0xd58: 0x4032, 0xd59: 0x4000, 0xd5a: 0x4000, 0xd5b: 0x4032, 0xd5c: 0x4000, 0xd5d: 0x4000, + 0xd5e: 0x4033, 0xd5f: 0x402e, 0xd60: 0x4034, 0xd61: 0x4035, 0xd62: 0x4034, 0xd63: 0x4036, + 0xd64: 0x4037, 0xd65: 0x4024, 0xd66: 0x4035, 0xd67: 0x4025, 0xd68: 0x4038, 0xd69: 0x4038, + 0xd6a: 0x4039, 0xd6b: 0x4039, 0xd6c: 0x403a, 0xd6d: 0x403a, 0xd6e: 0x4000, 0xd6f: 0x4035, + 0xd70: 0x4000, 0xd71: 0x4000, 0xd72: 0x403b, 0xd73: 0x403c, 0xd74: 0x4000, 0xd75: 0x4000, + 0xd76: 0x4000, 0xd77: 0x4000, 0xd78: 0x4000, 0xd79: 0x4000, 0xd7a: 0x4000, 0xd7b: 0x403d, + 0xd7c: 0x401c, 0xd7d: 0x4000, 0xd7e: 0x4000, 0xd7f: 0x4000, + // Block 0x36, offset 0xd80 + 0xd85: 0x4000, + 0xd86: 0x4000, 0xd87: 0x4000, 0xd88: 0x4000, 0xd89: 0x4000, 0xd8a: 0x4000, 0xd8b: 0x4000, + 0xd8c: 0x4000, 0xd8d: 0x4000, 0xd8e: 0x4000, 0xd8f: 0x4000, 0xd90: 0x4000, 0xd91: 0x4000, + 0xd92: 0x4000, 0xd93: 0x4000, 0xd94: 0x4000, 0xd95: 0x4000, 0xd96: 0x4000, 0xd97: 0x4000, + 0xd98: 0x4000, 0xd99: 0x4000, 0xd9a: 0x4000, 0xd9b: 0x4000, 0xd9c: 0x4000, 0xd9d: 0x4000, + 0xd9e: 0x4000, 0xd9f: 0x4000, 0xda0: 0x4000, 0xda1: 0x4000, 0xda2: 0x4000, 0xda3: 0x4000, + 0xda4: 0x4000, 0xda5: 0x4000, 0xda6: 0x4000, 0xda7: 0x4000, 0xda8: 0x4000, 0xda9: 0x4000, + 0xdaa: 0x4000, 0xdab: 0x4000, 0xdac: 0x4000, 0xdad: 0x4000, + 0xdb1: 0x403e, 0xdb2: 0x403e, 0xdb3: 0x403e, 0xdb4: 0x403e, 0xdb5: 0x403e, + 0xdb6: 0x403e, 0xdb7: 0x403e, 0xdb8: 0x403e, 0xdb9: 0x403e, 0xdba: 0x403e, 0xdbb: 0x403e, + 0xdbc: 0x403e, 0xdbd: 0x403e, 0xdbe: 0x403e, 0xdbf: 0x403e, + // Block 0x37, offset 0xdc0 + 0xdc0: 0x4037, 0xdc1: 0x4037, 0xdc2: 0x4037, 0xdc3: 0x4037, 0xdc4: 0x4037, 0xdc5: 0x4037, + 0xdc6: 0x4037, 0xdc7: 0x4037, 0xdc8: 0x4037, 0xdc9: 0x4037, 0xdca: 0x4037, 0xdcb: 0x4037, + 0xdcc: 0x4037, 0xdcd: 0x4037, 0xdce: 0x4037, 0xdcf: 0x400e, 0xdd0: 0x403f, 0xdd1: 0x4040, + 0xdd2: 0x4041, 0xdd3: 0x4040, 0xdd4: 0x403f, 0xdd5: 0x4042, 0xdd6: 0x4043, 0xdd7: 0x4044, + 0xdd8: 0x4040, 0xdd9: 0x4041, 0xdda: 0x4040, 0xddb: 0x4045, 0xddc: 0x4009, 0xddd: 0x4045, + 0xdde: 0x4046, 0xddf: 0x4045, 0xde0: 0x4047, 0xde1: 0x400b, 0xde2: 0x400a, 0xde3: 0x400c, + 0xde4: 0x4048, 0xde5: 0x4000, 0xde6: 0x4000, 0xde7: 0x4000, 0xde8: 0x4000, 0xde9: 0x4000, + 0xdea: 0x4000, 0xdeb: 0x4000, 0xdec: 0x4000, 0xded: 0x4000, 0xdee: 0x4000, 0xdef: 0x4000, + 0xdf0: 0x4000, 0xdf1: 0x4000, 0xdf2: 0x4000, 0xdf3: 0x4000, 0xdf4: 0x4000, 0xdf5: 0x4000, + 0xdf6: 0x4000, 0xdf7: 0x4000, 0xdf8: 0x4000, 0xdf9: 0x4000, 0xdfa: 0x4000, 0xdfb: 0x4000, + 0xdfc: 0x4000, 0xdfd: 0x4000, 0xdfe: 0x4000, 0xdff: 0x4000, + // Block 0x38, offset 0xe00 + 0xe00: 0x4000, 0xe01: 0x4000, 0xe02: 0x4000, 0xe03: 0x4000, 0xe04: 0x4000, 0xe05: 0x4000, + 0xe06: 0x4000, 0xe07: 0x4000, 0xe08: 0x4000, 0xe09: 0x4000, 0xe0a: 0x4000, 0xe0b: 0x4000, + 0xe0c: 0x4000, 0xe0d: 0x4000, 0xe0e: 0x4000, 0xe10: 0x4000, 0xe11: 0x4000, + 0xe12: 0x4000, 0xe13: 0x4000, 0xe14: 0x4000, 0xe15: 0x4000, 0xe16: 0x4000, 0xe17: 0x4000, + 0xe18: 0x4000, 0xe19: 0x4000, 0xe1a: 0x4000, 0xe1b: 0x4000, 0xe1c: 0x4000, 0xe1d: 0x4000, + 0xe1e: 0x4000, 0xe1f: 0x4000, 0xe20: 0x4000, 0xe21: 0x4000, 0xe22: 0x4000, 0xe23: 0x4000, + 0xe24: 0x4000, 0xe25: 0x4000, 0xe26: 0x4000, 0xe27: 0x4000, 0xe28: 0x4000, 0xe29: 0x4000, + 0xe2a: 0x4000, 0xe2b: 0x4000, 0xe2c: 0x4000, 0xe2d: 0x4000, 0xe2e: 0x4000, 0xe2f: 0x4000, + 0xe30: 0x4000, 0xe31: 0x4000, 0xe32: 0x4000, 0xe33: 0x4000, 0xe34: 0x4000, 0xe35: 0x4000, + 0xe36: 0x4000, 0xe37: 0x4000, 0xe38: 0x4000, 0xe39: 0x4000, 0xe3a: 0x4000, + // Block 0x39, offset 0xe40 + 0xe40: 0x4000, 0xe41: 0x4000, 0xe42: 0x4000, 0xe43: 0x4000, 0xe44: 0x4000, 0xe45: 0x4000, + 0xe46: 0x4000, 0xe47: 0x4000, 0xe48: 0x4000, 0xe49: 0x4000, 0xe4a: 0x4000, 0xe4b: 0x4000, + 0xe4c: 0x4000, 0xe4d: 0x4000, 0xe4e: 0x4000, 0xe4f: 0x4000, 0xe50: 0x4000, 0xe51: 0x4000, + 0xe52: 0x4000, 0xe53: 0x4000, 0xe54: 0x4000, 0xe55: 0x4000, 0xe56: 0x4000, 0xe57: 0x4000, + 0xe58: 0x4000, 0xe59: 0x4000, 0xe5a: 0x4000, 0xe5b: 0x4000, 0xe5c: 0x4000, 0xe5d: 0x4000, + 0xe5e: 0x4000, 0xe5f: 0x4000, 0xe60: 0x4000, 0xe61: 0x4000, 0xe62: 0x4000, 0xe63: 0x4000, + 0xe70: 0x4000, 0xe71: 0x4000, 0xe72: 0x4000, 0xe73: 0x4000, 0xe74: 0x4000, 0xe75: 0x4000, + 0xe76: 0x4000, 0xe77: 0x4000, 0xe78: 0x4000, 0xe79: 0x4000, 0xe7a: 0x4000, 0xe7b: 0x4000, + 0xe7c: 0x4000, 0xe7d: 0x4000, 0xe7e: 0x4000, 0xe7f: 0x4000, + // Block 0x3a, offset 0xe80 + 0xe80: 0x4000, 0xe81: 0x4000, 0xe82: 0x4000, 0xe83: 0x4000, 0xe84: 0x4000, 0xe85: 0x4000, + 0xe86: 0x4000, 0xe87: 0x4000, 0xe88: 0x4000, 0xe89: 0x4000, 0xe8a: 0x4000, 0xe8b: 0x4000, + 0xe8c: 0x4000, 0xe8d: 0x4000, 0xe8e: 0x4000, 0xe8f: 0x4000, 0xe90: 0x4000, 0xe91: 0x4000, + 0xe92: 0x4000, 0xe93: 0x4000, 0xe94: 0x4000, 0xe95: 0x4000, 0xe96: 0x4000, 0xe97: 0x4000, + 0xe98: 0x4000, 0xe99: 0x4000, 0xe9a: 0x4000, 0xe9b: 0x4000, 0xe9c: 0x4000, 0xe9d: 0x4000, + 0xe9e: 0x4000, 0xea0: 0x4000, 0xea1: 0x4000, 0xea2: 0x4000, 0xea3: 0x4000, + 0xea4: 0x4000, 0xea5: 0x4000, 0xea6: 0x4000, 0xea7: 0x4000, 0xea8: 0x4000, 0xea9: 0x4000, + 0xeaa: 0x4000, 0xeab: 0x4000, 0xeac: 0x4000, 0xead: 0x4000, 0xeae: 0x4000, 0xeaf: 0x4000, + 0xeb0: 0x4000, 0xeb1: 0x4000, 0xeb2: 0x4000, 0xeb3: 0x4000, 0xeb4: 0x4000, 0xeb5: 0x4000, + 0xeb6: 0x4000, 0xeb7: 0x4000, 0xeb8: 0x4000, 0xeb9: 0x4000, 0xeba: 0x4000, 0xebb: 0x4000, + 0xebc: 0x4000, 0xebd: 0x4000, 0xebe: 0x4000, 0xebf: 0x4000, + // Block 0x3b, offset 0xec0 + 0xec0: 0x4000, 0xec1: 0x4000, 0xec2: 0x4000, 0xec3: 0x4000, 0xec4: 0x4000, 0xec5: 0x4000, + 0xec6: 0x4000, 0xec7: 0x4000, 0xec8: 0x2000, 0xec9: 0x2000, 0xeca: 0x2000, 0xecb: 0x2000, + 0xecc: 0x2000, 0xecd: 0x2000, 0xece: 0x2000, 0xecf: 0x2000, 0xed0: 0x4000, 0xed1: 0x4000, + 0xed2: 0x4000, 0xed3: 0x4000, 0xed4: 0x4000, 0xed5: 0x4000, 0xed6: 0x4000, 0xed7: 0x4000, + 0xed8: 0x4000, 0xed9: 0x4000, 0xeda: 0x4000, 0xedb: 0x4000, 0xedc: 0x4000, 0xedd: 0x4000, + 0xede: 0x4000, 0xedf: 0x4000, 0xee0: 0x4000, 0xee1: 0x4000, 0xee2: 0x4000, 0xee3: 0x4000, + 0xee4: 0x4000, 0xee5: 0x4000, 0xee6: 0x4000, 0xee7: 0x4000, 0xee8: 0x4000, 0xee9: 0x4000, + 0xeea: 0x4000, 0xeeb: 0x4000, 0xeec: 0x4000, 0xeed: 0x4000, 0xeee: 0x4000, 0xeef: 0x4000, + 0xef0: 0x4000, 0xef1: 0x4000, 0xef2: 0x4000, 0xef3: 0x4000, 0xef4: 0x4000, 0xef5: 0x4000, + 0xef6: 0x4000, 0xef7: 0x4000, 0xef8: 0x4000, 0xef9: 0x4000, 0xefa: 0x4000, 0xefb: 0x4000, + 0xefc: 0x4000, 0xefd: 0x4000, 0xefe: 0x4000, 0xeff: 0x4000, + // Block 0x3c, offset 0xf00 + 0xf00: 0x4000, 0xf01: 0x4000, 0xf02: 0x4000, 0xf03: 0x4000, 0xf04: 0x4000, 0xf05: 0x4000, + 0xf06: 0x4000, 0xf07: 0x4000, 0xf08: 0x4000, 0xf09: 0x4000, 0xf0a: 0x4000, 0xf0b: 0x4000, + 0xf0c: 0x4000, 0xf0d: 0x4000, 0xf0e: 0x4000, 0xf0f: 0x4000, 0xf10: 0x4000, 0xf11: 0x4000, + 0xf12: 0x4000, 0xf13: 0x4000, 0xf14: 0x4000, 0xf15: 0x4000, 0xf16: 0x4000, 0xf17: 0x4000, + 0xf18: 0x4000, 0xf19: 0x4000, 0xf1a: 0x4000, 0xf1b: 0x4000, 0xf1c: 0x4000, 0xf1d: 0x4000, + 0xf1e: 0x4000, 0xf1f: 0x4000, 0xf20: 0x4000, 0xf21: 0x4000, 0xf22: 0x4000, 0xf23: 0x4000, + 0xf24: 0x4000, 0xf25: 0x4000, 0xf26: 0x4000, 0xf27: 0x4000, 0xf28: 0x4000, 0xf29: 0x4000, + 0xf2a: 0x4000, 0xf2b: 0x4000, 0xf2c: 0x4000, 0xf2d: 0x4000, 0xf2e: 0x4000, 0xf2f: 0x4000, + 0xf30: 0x4000, 0xf31: 0x4000, 0xf32: 0x4000, 0xf33: 0x4000, 0xf34: 0x4000, 0xf35: 0x4000, + 0xf36: 0x4000, 0xf37: 0x4000, 0xf38: 0x4000, 0xf39: 0x4000, 0xf3a: 0x4000, 0xf3b: 0x4000, + 0xf3c: 0x4000, 0xf3d: 0x4000, 0xf3e: 0x4000, + // Block 0x3d, offset 0xf40 + 0xf40: 0x4000, 0xf41: 0x4000, 0xf42: 0x4000, 0xf43: 0x4000, 0xf44: 0x4000, 0xf45: 0x4000, + 0xf46: 0x4000, 0xf47: 0x4000, 0xf48: 0x4000, 0xf49: 0x4000, 0xf4a: 0x4000, 0xf4b: 0x4000, + 0xf4c: 0x4000, 0xf50: 0x4000, 0xf51: 0x4000, + 0xf52: 0x4000, 0xf53: 0x4000, 0xf54: 0x4000, 0xf55: 0x4000, 0xf56: 0x4000, 0xf57: 0x4000, + 0xf58: 0x4000, 0xf59: 0x4000, 0xf5a: 0x4000, 0xf5b: 0x4000, 0xf5c: 0x4000, 0xf5d: 0x4000, + 0xf5e: 0x4000, 0xf5f: 0x4000, 0xf60: 0x4000, 0xf61: 0x4000, 0xf62: 0x4000, 0xf63: 0x4000, + 0xf64: 0x4000, 0xf65: 0x4000, 0xf66: 0x4000, 0xf67: 0x4000, 0xf68: 0x4000, 0xf69: 0x4000, + 0xf6a: 0x4000, 0xf6b: 0x4000, 0xf6c: 0x4000, 0xf6d: 0x4000, 0xf6e: 0x4000, 0xf6f: 0x4000, + 0xf70: 0x4000, 0xf71: 0x4000, 0xf72: 0x4000, 0xf73: 0x4000, 0xf74: 0x4000, 0xf75: 0x4000, + 0xf76: 0x4000, 0xf77: 0x4000, 0xf78: 0x4000, 0xf79: 0x4000, 0xf7a: 0x4000, 0xf7b: 0x4000, + 0xf7c: 0x4000, 0xf7d: 0x4000, 0xf7e: 0x4000, 0xf7f: 0x4000, + // Block 0x3e, offset 0xf80 + 0xf80: 0x4000, 0xf81: 0x4000, 0xf82: 0x4000, 0xf83: 0x4000, 0xf84: 0x4000, 0xf85: 0x4000, + 0xf86: 0x4000, + // Block 0x3f, offset 0xfc0 + 0xfe0: 0x4000, 0xfe1: 0x4000, 0xfe2: 0x4000, 0xfe3: 0x4000, + 0xfe4: 0x4000, 0xfe5: 0x4000, 0xfe6: 0x4000, 0xfe7: 0x4000, 0xfe8: 0x4000, 0xfe9: 0x4000, + 0xfea: 0x4000, 0xfeb: 0x4000, 0xfec: 0x4000, 0xfed: 0x4000, 0xfee: 0x4000, 0xfef: 0x4000, + 0xff0: 0x4000, 0xff1: 0x4000, 0xff2: 0x4000, 0xff3: 0x4000, 0xff4: 0x4000, 0xff5: 0x4000, + 0xff6: 0x4000, 0xff7: 0x4000, 0xff8: 0x4000, 0xff9: 0x4000, 0xffa: 0x4000, 0xffb: 0x4000, + 0xffc: 0x4000, + // Block 0x40, offset 0x1000 + 0x1000: 0x4000, 0x1001: 0x4000, 0x1002: 0x4000, 0x1003: 0x4000, 0x1004: 0x4000, 0x1005: 0x4000, + 0x1006: 0x4000, 0x1007: 0x4000, 0x1008: 0x4000, 0x1009: 0x4000, 0x100a: 0x4000, 0x100b: 0x4000, + 0x100c: 0x4000, 0x100d: 0x4000, 0x100e: 0x4000, 0x100f: 0x4000, 0x1010: 0x4000, 0x1011: 0x4000, + 0x1012: 0x4000, 0x1013: 0x4000, 0x1014: 0x4000, 0x1015: 0x4000, 0x1016: 0x4000, 0x1017: 0x4000, + 0x1018: 0x4000, 0x1019: 0x4000, 0x101a: 0x4000, 0x101b: 0x4000, 0x101c: 0x4000, 0x101d: 0x4000, + 0x101e: 0x4000, 0x101f: 0x4000, 0x1020: 0x4000, 0x1021: 0x4000, 0x1022: 0x4000, 0x1023: 0x4000, + // Block 0x41, offset 0x1040 + 0x1040: 0x2000, 0x1041: 0x2000, 0x1042: 0x2000, 0x1043: 0x2000, 0x1044: 0x2000, 0x1045: 0x2000, + 0x1046: 0x2000, 0x1047: 0x2000, 0x1048: 0x2000, 0x1049: 0x2000, 0x104a: 0x2000, 0x104b: 0x2000, + 0x104c: 0x2000, 0x104d: 0x2000, 0x104e: 0x2000, 0x104f: 0x2000, 0x1050: 0x4000, 0x1051: 0x4000, + 0x1052: 0x4000, 0x1053: 0x4000, 0x1054: 0x4000, 0x1055: 0x4000, 0x1056: 0x4000, 0x1057: 0x4000, + 0x1058: 0x4000, 0x1059: 0x4000, + 0x1070: 0x4000, 0x1071: 0x4000, 0x1072: 0x4000, 0x1073: 0x4000, 0x1074: 0x4000, 0x1075: 0x4000, + 0x1076: 0x4000, 0x1077: 0x4000, 0x1078: 0x4000, 0x1079: 0x4000, 0x107a: 0x4000, 0x107b: 0x4000, + 0x107c: 0x4000, 0x107d: 0x4000, 0x107e: 0x4000, 0x107f: 0x4000, + // Block 0x42, offset 0x1080 + 0x1080: 0x4000, 0x1081: 0x4000, 0x1082: 0x4000, 0x1083: 0x4000, 0x1084: 0x4000, 0x1085: 0x4000, + 0x1086: 0x4000, 0x1087: 0x4000, 0x1088: 0x4000, 0x1089: 0x4000, 0x108a: 0x4000, 0x108b: 0x4000, + 0x108c: 0x4000, 0x108d: 0x4000, 0x108e: 0x4000, 0x108f: 0x4000, 0x1090: 0x4000, 0x1091: 0x4000, + 0x1092: 0x4000, 0x1094: 0x4000, 0x1095: 0x4000, 0x1096: 0x4000, 0x1097: 0x4000, + 0x1098: 0x4000, 0x1099: 0x4000, 0x109a: 0x4000, 0x109b: 0x4000, 0x109c: 0x4000, 0x109d: 0x4000, + 0x109e: 0x4000, 0x109f: 0x4000, 0x10a0: 0x4000, 0x10a1: 0x4000, 0x10a2: 0x4000, 0x10a3: 0x4000, + 0x10a4: 0x4000, 0x10a5: 0x4000, 0x10a6: 0x4000, 0x10a8: 0x4000, 0x10a9: 0x4000, + 0x10aa: 0x4000, 0x10ab: 0x4000, + // Block 0x43, offset 0x10c0 + 0x10c1: 0x9012, 0x10c2: 0x9012, 0x10c3: 0x9012, 0x10c4: 0x9012, 0x10c5: 0x9012, + 0x10c6: 0x9012, 0x10c7: 0x9012, 0x10c8: 0x9012, 0x10c9: 0x9012, 0x10ca: 0x9012, 0x10cb: 0x9012, + 0x10cc: 0x9012, 0x10cd: 0x9012, 0x10ce: 0x9012, 0x10cf: 0x9012, 0x10d0: 0x9012, 0x10d1: 0x9012, + 0x10d2: 0x9012, 0x10d3: 0x9012, 0x10d4: 0x9012, 0x10d5: 0x9012, 0x10d6: 0x9012, 0x10d7: 0x9012, + 0x10d8: 0x9012, 0x10d9: 0x9012, 0x10da: 0x9012, 0x10db: 0x9012, 0x10dc: 0x9012, 0x10dd: 0x9012, + 0x10de: 0x9012, 0x10df: 0x9012, 0x10e0: 0x9049, 0x10e1: 0x9049, 0x10e2: 0x9049, 0x10e3: 0x9049, + 0x10e4: 0x9049, 0x10e5: 0x9049, 0x10e6: 0x9049, 0x10e7: 0x9049, 0x10e8: 0x9049, 0x10e9: 0x9049, + 0x10ea: 0x9049, 0x10eb: 0x9049, 0x10ec: 0x9049, 0x10ed: 0x9049, 0x10ee: 0x9049, 0x10ef: 0x9049, + 0x10f0: 0x9049, 0x10f1: 0x9049, 0x10f2: 0x9049, 0x10f3: 0x9049, 0x10f4: 0x9049, 0x10f5: 0x9049, + 0x10f6: 0x9049, 0x10f7: 0x9049, 0x10f8: 0x9049, 0x10f9: 0x9049, 0x10fa: 0x9049, 0x10fb: 0x9049, + 0x10fc: 0x9049, 0x10fd: 0x9049, 0x10fe: 0x9049, 0x10ff: 0x9049, + // Block 0x44, offset 0x1100 + 0x1100: 0x9049, 0x1101: 0x9049, 0x1102: 0x9049, 0x1103: 0x9049, 0x1104: 0x9049, 0x1105: 0x9049, + 0x1106: 0x9049, 0x1107: 0x9049, 0x1108: 0x9049, 0x1109: 0x9049, 0x110a: 0x9049, 0x110b: 0x9049, + 0x110c: 0x9049, 0x110d: 0x9049, 0x110e: 0x9049, 0x110f: 0x9049, 0x1110: 0x9049, 0x1111: 0x9049, + 0x1112: 0x9049, 0x1113: 0x9049, 0x1114: 0x9049, 0x1115: 0x9049, 0x1116: 0x9049, 0x1117: 0x9049, + 0x1118: 0x9049, 0x1119: 0x9049, 0x111a: 0x9049, 0x111b: 0x9049, 0x111c: 0x9049, 0x111d: 0x9049, + 0x111e: 0x9049, 0x111f: 0x904a, 0x1120: 0x904b, 0x1121: 0xb04c, 0x1122: 0xb04d, 0x1123: 0xb04d, + 0x1124: 0xb04e, 0x1125: 0xb04f, 0x1126: 0xb050, 0x1127: 0xb051, 0x1128: 0xb052, 0x1129: 0xb053, + 0x112a: 0xb054, 0x112b: 0xb055, 0x112c: 0xb056, 0x112d: 0xb057, 0x112e: 0xb058, 0x112f: 0xb059, + 0x1130: 0xb05a, 0x1131: 0xb05b, 0x1132: 0xb05c, 0x1133: 0xb05d, 0x1134: 0xb05e, 0x1135: 0xb05f, + 0x1136: 0xb060, 0x1137: 0xb061, 0x1138: 0xb062, 0x1139: 0xb063, 0x113a: 0xb064, 0x113b: 0xb065, + 0x113c: 0xb052, 0x113d: 0xb066, 0x113e: 0xb067, 0x113f: 0xb055, + // Block 0x45, offset 0x1140 + 0x1140: 0xb068, 0x1141: 0xb069, 0x1142: 0xb06a, 0x1143: 0xb06b, 0x1144: 0xb05a, 0x1145: 0xb056, + 0x1146: 0xb06c, 0x1147: 0xb06d, 0x1148: 0xb06b, 0x1149: 0xb06e, 0x114a: 0xb06b, 0x114b: 0xb06f, + 0x114c: 0xb06f, 0x114d: 0xb070, 0x114e: 0xb070, 0x114f: 0xb071, 0x1150: 0xb056, 0x1151: 0xb072, + 0x1152: 0xb073, 0x1153: 0xb072, 0x1154: 0xb074, 0x1155: 0xb073, 0x1156: 0xb075, 0x1157: 0xb075, + 0x1158: 0xb076, 0x1159: 0xb076, 0x115a: 0xb077, 0x115b: 0xb077, 0x115c: 0xb073, 0x115d: 0xb078, + 0x115e: 0xb079, 0x115f: 0xb067, 0x1160: 0xb07a, 0x1161: 0xb07b, 0x1162: 0xb07b, 0x1163: 0xb07b, + 0x1164: 0xb07b, 0x1165: 0xb07b, 0x1166: 0xb07b, 0x1167: 0xb07b, 0x1168: 0xb07b, 0x1169: 0xb07b, + 0x116a: 0xb07b, 0x116b: 0xb07b, 0x116c: 0xb07b, 0x116d: 0xb07b, 0x116e: 0xb07b, 0x116f: 0xb07b, + 0x1170: 0xb07c, 0x1171: 0xb07c, 0x1172: 0xb07c, 0x1173: 0xb07c, 0x1174: 0xb07c, 0x1175: 0xb07c, + 0x1176: 0xb07c, 0x1177: 0xb07c, 0x1178: 0xb07c, 0x1179: 0xb07c, 0x117a: 0xb07c, 0x117b: 0xb07c, + 0x117c: 0xb07c, 0x117d: 0xb07c, 0x117e: 0xb07c, + // Block 0x46, offset 0x1180 + 0x1182: 0xb07d, 0x1183: 0xb07e, 0x1184: 0xb07f, 0x1185: 0xb080, + 0x1186: 0xb07f, 0x1187: 0xb07e, 0x118a: 0xb081, 0x118b: 0xb082, + 0x118c: 0xb083, 0x118d: 0xb07f, 0x118e: 0xb080, 0x118f: 0xb07f, + 0x1192: 0xb084, 0x1193: 0xb085, 0x1194: 0xb084, 0x1195: 0xb086, 0x1196: 0xb084, 0x1197: 0xb087, + 0x119a: 0xb088, 0x119b: 0xb089, 0x119c: 0xb08a, + 0x11a0: 0x908b, 0x11a1: 0x908b, 0x11a2: 0x908c, 0x11a3: 0x908d, + 0x11a4: 0x908b, 0x11a5: 0x908e, 0x11a6: 0x908f, 0x11a8: 0xb090, 0x11a9: 0xb091, + 0x11aa: 0xb092, 0x11ab: 0xb091, 0x11ac: 0xb093, 0x11ad: 0xb094, 0x11ae: 0xb095, + 0x11bd: 0x2000, + // Block 0x47, offset 0x11c0 + 0x11e0: 0x4000, + // Block 0x48, offset 0x1200 + 0x1200: 0x4000, 0x1201: 0x4000, 0x1202: 0x4000, 0x1203: 0x4000, 0x1204: 0x4000, 0x1205: 0x4000, + 0x1206: 0x4000, 0x1207: 0x4000, 0x1208: 0x4000, 0x1209: 0x4000, 0x120a: 0x4000, 0x120b: 0x4000, + 0x120c: 0x4000, 0x120d: 0x4000, 0x120e: 0x4000, 0x120f: 0x4000, 0x1210: 0x4000, 0x1211: 0x4000, + 0x1212: 0x4000, 0x1213: 0x4000, 0x1214: 0x4000, 0x1215: 0x4000, 0x1216: 0x4000, 0x1217: 0x4000, + 0x1218: 0x4000, 0x1219: 0x4000, 0x121a: 0x4000, 0x121b: 0x4000, 0x121c: 0x4000, 0x121d: 0x4000, + 0x121e: 0x4000, 0x121f: 0x4000, 0x1220: 0x4000, 0x1221: 0x4000, 0x1222: 0x4000, 0x1223: 0x4000, + 0x1224: 0x4000, 0x1225: 0x4000, 0x1226: 0x4000, 0x1227: 0x4000, 0x1228: 0x4000, 0x1229: 0x4000, + 0x122a: 0x4000, 0x122b: 0x4000, 0x122c: 0x4000, + // Block 0x49, offset 0x1240 + 0x1240: 0x4000, 0x1241: 0x4000, 0x1242: 0x4000, 0x1243: 0x4000, 0x1244: 0x4000, 0x1245: 0x4000, + 0x1246: 0x4000, 0x1247: 0x4000, 0x1248: 0x4000, 0x1249: 0x4000, 0x124a: 0x4000, 0x124b: 0x4000, + 0x124c: 0x4000, 0x124d: 0x4000, 0x124e: 0x4000, 0x124f: 0x4000, 0x1250: 0x4000, 0x1251: 0x4000, + 0x1252: 0x4000, 0x1253: 0x4000, 0x1254: 0x4000, 0x1255: 0x4000, 0x1256: 0x4000, 0x1257: 0x4000, + 0x1258: 0x4000, 0x1259: 0x4000, 0x125a: 0x4000, 0x125b: 0x4000, 0x125c: 0x4000, 0x125d: 0x4000, + 0x125e: 0x4000, 0x125f: 0x4000, 0x1260: 0x4000, 0x1261: 0x4000, 0x1262: 0x4000, 0x1263: 0x4000, + 0x1264: 0x4000, 0x1265: 0x4000, 0x1266: 0x4000, 0x1267: 0x4000, 0x1268: 0x4000, 0x1269: 0x4000, + 0x126a: 0x4000, 0x126b: 0x4000, 0x126c: 0x4000, 0x126d: 0x4000, 0x126e: 0x4000, 0x126f: 0x4000, + 0x1270: 0x4000, 0x1271: 0x4000, 0x1272: 0x4000, + // Block 0x4a, offset 0x1280 + 0x1280: 0x4000, 0x1281: 0x4000, + // Block 0x4b, offset 0x12c0 + 0x12c4: 0x4000, + // Block 0x4c, offset 0x1300 + 0x130f: 0x4000, + // Block 0x4d, offset 0x1340 + 0x1340: 0x2000, 0x1341: 0x2000, 0x1342: 0x2000, 0x1343: 0x2000, 0x1344: 0x2000, 0x1345: 0x2000, + 0x1346: 0x2000, 0x1347: 0x2000, 0x1348: 0x2000, 0x1349: 0x2000, 0x134a: 0x2000, + 0x1350: 0x2000, 0x1351: 0x2000, + 0x1352: 0x2000, 0x1353: 0x2000, 0x1354: 0x2000, 0x1355: 0x2000, 0x1356: 0x2000, 0x1357: 0x2000, + 0x1358: 0x2000, 0x1359: 0x2000, 0x135a: 0x2000, 0x135b: 0x2000, 0x135c: 0x2000, 0x135d: 0x2000, + 0x135e: 0x2000, 0x135f: 0x2000, 0x1360: 0x2000, 0x1361: 0x2000, 0x1362: 0x2000, 0x1363: 0x2000, + 0x1364: 0x2000, 0x1365: 0x2000, 0x1366: 0x2000, 0x1367: 0x2000, 0x1368: 0x2000, 0x1369: 0x2000, + 0x136a: 0x2000, 0x136b: 0x2000, 0x136c: 0x2000, 0x136d: 0x2000, + 0x1370: 0x2000, 0x1371: 0x2000, 0x1372: 0x2000, 0x1373: 0x2000, 0x1374: 0x2000, 0x1375: 0x2000, + 0x1376: 0x2000, 0x1377: 0x2000, 0x1378: 0x2000, 0x1379: 0x2000, 0x137a: 0x2000, 0x137b: 0x2000, + 0x137c: 0x2000, 0x137d: 0x2000, 0x137e: 0x2000, 0x137f: 0x2000, + // Block 0x4e, offset 0x1380 + 0x1380: 0x2000, 0x1381: 0x2000, 0x1382: 0x2000, 0x1383: 0x2000, 0x1384: 0x2000, 0x1385: 0x2000, + 0x1386: 0x2000, 0x1387: 0x2000, 0x1388: 0x2000, 0x1389: 0x2000, 0x138a: 0x2000, 0x138b: 0x2000, + 0x138c: 0x2000, 0x138d: 0x2000, 0x138e: 0x2000, 0x138f: 0x2000, 0x1390: 0x2000, 0x1391: 0x2000, + 0x1392: 0x2000, 0x1393: 0x2000, 0x1394: 0x2000, 0x1395: 0x2000, 0x1396: 0x2000, 0x1397: 0x2000, + 0x1398: 0x2000, 0x1399: 0x2000, 0x139a: 0x2000, 0x139b: 0x2000, 0x139c: 0x2000, 0x139d: 0x2000, + 0x139e: 0x2000, 0x139f: 0x2000, 0x13a0: 0x2000, 0x13a1: 0x2000, 0x13a2: 0x2000, 0x13a3: 0x2000, + 0x13a4: 0x2000, 0x13a5: 0x2000, 0x13a6: 0x2000, 0x13a7: 0x2000, 0x13a8: 0x2000, 0x13a9: 0x2000, + 0x13b0: 0x2000, 0x13b1: 0x2000, 0x13b2: 0x2000, 0x13b3: 0x2000, 0x13b4: 0x2000, 0x13b5: 0x2000, + 0x13b6: 0x2000, 0x13b7: 0x2000, 0x13b8: 0x2000, 0x13b9: 0x2000, 0x13ba: 0x2000, 0x13bb: 0x2000, + 0x13bc: 0x2000, 0x13bd: 0x2000, 0x13be: 0x2000, 0x13bf: 0x2000, + // Block 0x4f, offset 0x13c0 + 0x13c0: 0x2000, 0x13c1: 0x2000, 0x13c2: 0x2000, 0x13c3: 0x2000, 0x13c4: 0x2000, 0x13c5: 0x2000, + 0x13c6: 0x2000, 0x13c7: 0x2000, 0x13c8: 0x2000, 0x13c9: 0x2000, 0x13ca: 0x2000, 0x13cb: 0x2000, + 0x13cc: 0x2000, 0x13cd: 0x2000, 0x13ce: 0x4000, 0x13cf: 0x2000, 0x13d0: 0x2000, 0x13d1: 0x4000, + 0x13d2: 0x4000, 0x13d3: 0x4000, 0x13d4: 0x4000, 0x13d5: 0x4000, 0x13d6: 0x4000, 0x13d7: 0x4000, + 0x13d8: 0x4000, 0x13d9: 0x4000, 0x13da: 0x4000, 0x13db: 0x2000, 0x13dc: 0x2000, 0x13dd: 0x2000, + 0x13de: 0x2000, 0x13df: 0x2000, 0x13e0: 0x2000, 0x13e1: 0x2000, 0x13e2: 0x2000, 0x13e3: 0x2000, + 0x13e4: 0x2000, 0x13e5: 0x2000, 0x13e6: 0x2000, 0x13e7: 0x2000, 0x13e8: 0x2000, 0x13e9: 0x2000, + 0x13ea: 0x2000, 0x13eb: 0x2000, 0x13ec: 0x2000, + // Block 0x50, offset 0x1400 + 0x1400: 0x4000, 0x1401: 0x4000, 0x1402: 0x4000, + 0x1410: 0x4000, 0x1411: 0x4000, + 0x1412: 0x4000, 0x1413: 0x4000, 0x1414: 0x4000, 0x1415: 0x4000, 0x1416: 0x4000, 0x1417: 0x4000, + 0x1418: 0x4000, 0x1419: 0x4000, 0x141a: 0x4000, 0x141b: 0x4000, 0x141c: 0x4000, 0x141d: 0x4000, + 0x141e: 0x4000, 0x141f: 0x4000, 0x1420: 0x4000, 0x1421: 0x4000, 0x1422: 0x4000, 0x1423: 0x4000, + 0x1424: 0x4000, 0x1425: 0x4000, 0x1426: 0x4000, 0x1427: 0x4000, 0x1428: 0x4000, 0x1429: 0x4000, + 0x142a: 0x4000, 0x142b: 0x4000, 0x142c: 0x4000, 0x142d: 0x4000, 0x142e: 0x4000, 0x142f: 0x4000, + 0x1430: 0x4000, 0x1431: 0x4000, 0x1432: 0x4000, 0x1433: 0x4000, 0x1434: 0x4000, 0x1435: 0x4000, + 0x1436: 0x4000, 0x1437: 0x4000, 0x1438: 0x4000, 0x1439: 0x4000, 0x143a: 0x4000, 0x143b: 0x4000, + // Block 0x51, offset 0x1440 + 0x1440: 0x4000, 0x1441: 0x4000, 0x1442: 0x4000, 0x1443: 0x4000, 0x1444: 0x4000, 0x1445: 0x4000, + 0x1446: 0x4000, 0x1447: 0x4000, 0x1448: 0x4000, + 0x1450: 0x4000, 0x1451: 0x4000, + // Block 0x52, offset 0x1480 + 0x1480: 0x4000, 0x1481: 0x4000, 0x1482: 0x4000, 0x1483: 0x4000, 0x1484: 0x4000, 0x1485: 0x4000, + 0x1486: 0x4000, 0x1487: 0x4000, 0x1488: 0x4000, 0x1489: 0x4000, 0x148a: 0x4000, 0x148b: 0x4000, + 0x148c: 0x4000, 0x148d: 0x4000, 0x148e: 0x4000, 0x148f: 0x4000, 0x1490: 0x4000, 0x1491: 0x4000, + 0x1492: 0x4000, 0x1493: 0x4000, 0x1494: 0x4000, 0x1495: 0x4000, 0x1496: 0x4000, 0x1497: 0x4000, + 0x1498: 0x4000, 0x1499: 0x4000, 0x149a: 0x4000, 0x149b: 0x4000, 0x149c: 0x4000, 0x149d: 0x4000, + 0x149e: 0x4000, 0x149f: 0x4000, 0x14a0: 0x4000, + 0x14ad: 0x4000, 0x14ae: 0x4000, 0x14af: 0x4000, + 0x14b0: 0x4000, 0x14b1: 0x4000, 0x14b2: 0x4000, 0x14b3: 0x4000, 0x14b4: 0x4000, 0x14b5: 0x4000, + 0x14b7: 0x4000, 0x14b8: 0x4000, 0x14b9: 0x4000, 0x14ba: 0x4000, 0x14bb: 0x4000, + 0x14bc: 0x4000, 0x14bd: 0x4000, 0x14be: 0x4000, 0x14bf: 0x4000, + // Block 0x53, offset 0x14c0 + 0x14c0: 0x4000, 0x14c1: 0x4000, 0x14c2: 0x4000, 0x14c3: 0x4000, 0x14c4: 0x4000, 0x14c5: 0x4000, + 0x14c6: 0x4000, 0x14c7: 0x4000, 0x14c8: 0x4000, 0x14c9: 0x4000, 0x14ca: 0x4000, 0x14cb: 0x4000, + 0x14cc: 0x4000, 0x14cd: 0x4000, 0x14ce: 0x4000, 0x14cf: 0x4000, 0x14d0: 0x4000, 0x14d1: 0x4000, + 0x14d2: 0x4000, 0x14d3: 0x4000, 0x14d4: 0x4000, 0x14d5: 0x4000, 0x14d6: 0x4000, 0x14d7: 0x4000, + 0x14d8: 0x4000, 0x14d9: 0x4000, 0x14da: 0x4000, 0x14db: 0x4000, 0x14dc: 0x4000, 0x14dd: 0x4000, + 0x14de: 0x4000, 0x14df: 0x4000, 0x14e0: 0x4000, 0x14e1: 0x4000, 0x14e2: 0x4000, 0x14e3: 0x4000, + 0x14e4: 0x4000, 0x14e5: 0x4000, 0x14e6: 0x4000, 0x14e7: 0x4000, 0x14e8: 0x4000, 0x14e9: 0x4000, + 0x14ea: 0x4000, 0x14eb: 0x4000, 0x14ec: 0x4000, 0x14ed: 0x4000, 0x14ee: 0x4000, 0x14ef: 0x4000, + 0x14f0: 0x4000, 0x14f1: 0x4000, 0x14f2: 0x4000, 0x14f3: 0x4000, 0x14f4: 0x4000, 0x14f5: 0x4000, + 0x14f6: 0x4000, 0x14f7: 0x4000, 0x14f8: 0x4000, 0x14f9: 0x4000, 0x14fa: 0x4000, 0x14fb: 0x4000, + 0x14fc: 0x4000, 0x14fe: 0x4000, 0x14ff: 0x4000, + // Block 0x54, offset 0x1500 + 0x1500: 0x4000, 0x1501: 0x4000, 0x1502: 0x4000, 0x1503: 0x4000, 0x1504: 0x4000, 0x1505: 0x4000, + 0x1506: 0x4000, 0x1507: 0x4000, 0x1508: 0x4000, 0x1509: 0x4000, 0x150a: 0x4000, 0x150b: 0x4000, + 0x150c: 0x4000, 0x150d: 0x4000, 0x150e: 0x4000, 0x150f: 0x4000, 0x1510: 0x4000, 0x1511: 0x4000, + 0x1512: 0x4000, 0x1513: 0x4000, + 0x1520: 0x4000, 0x1521: 0x4000, 0x1522: 0x4000, 0x1523: 0x4000, + 0x1524: 0x4000, 0x1525: 0x4000, 0x1526: 0x4000, 0x1527: 0x4000, 0x1528: 0x4000, 0x1529: 0x4000, + 0x152a: 0x4000, 0x152b: 0x4000, 0x152c: 0x4000, 0x152d: 0x4000, 0x152e: 0x4000, 0x152f: 0x4000, + 0x1530: 0x4000, 0x1531: 0x4000, 0x1532: 0x4000, 0x1533: 0x4000, 0x1534: 0x4000, 0x1535: 0x4000, + 0x1536: 0x4000, 0x1537: 0x4000, 0x1538: 0x4000, 0x1539: 0x4000, 0x153a: 0x4000, 0x153b: 0x4000, + 0x153c: 0x4000, 0x153d: 0x4000, 0x153e: 0x4000, 0x153f: 0x4000, + // Block 0x55, offset 0x1540 + 0x1540: 0x4000, 0x1541: 0x4000, 0x1542: 0x4000, 0x1543: 0x4000, 0x1544: 0x4000, 0x1545: 0x4000, + 0x1546: 0x4000, 0x1547: 0x4000, 0x1548: 0x4000, 0x1549: 0x4000, 0x154a: 0x4000, + 0x154f: 0x4000, 0x1550: 0x4000, 0x1551: 0x4000, + 0x1552: 0x4000, 0x1553: 0x4000, + 0x1560: 0x4000, 0x1561: 0x4000, 0x1562: 0x4000, 0x1563: 0x4000, + 0x1564: 0x4000, 0x1565: 0x4000, 0x1566: 0x4000, 0x1567: 0x4000, 0x1568: 0x4000, 0x1569: 0x4000, + 0x156a: 0x4000, 0x156b: 0x4000, 0x156c: 0x4000, 0x156d: 0x4000, 0x156e: 0x4000, 0x156f: 0x4000, + 0x1570: 0x4000, 0x1574: 0x4000, + 0x1578: 0x4000, 0x1579: 0x4000, 0x157a: 0x4000, 0x157b: 0x4000, + 0x157c: 0x4000, 0x157d: 0x4000, 0x157e: 0x4000, 0x157f: 0x4000, + // Block 0x56, offset 0x1580 + 0x1580: 0x4000, 0x1582: 0x4000, 0x1583: 0x4000, 0x1584: 0x4000, 0x1585: 0x4000, + 0x1586: 0x4000, 0x1587: 0x4000, 0x1588: 0x4000, 0x1589: 0x4000, 0x158a: 0x4000, 0x158b: 0x4000, + 0x158c: 0x4000, 0x158d: 0x4000, 0x158e: 0x4000, 0x158f: 0x4000, 0x1590: 0x4000, 0x1591: 0x4000, + 0x1592: 0x4000, 0x1593: 0x4000, 0x1594: 0x4000, 0x1595: 0x4000, 0x1596: 0x4000, 0x1597: 0x4000, + 0x1598: 0x4000, 0x1599: 0x4000, 0x159a: 0x4000, 0x159b: 0x4000, 0x159c: 0x4000, 0x159d: 0x4000, + 0x159e: 0x4000, 0x159f: 0x4000, 0x15a0: 0x4000, 0x15a1: 0x4000, 0x15a2: 0x4000, 0x15a3: 0x4000, + 0x15a4: 0x4000, 0x15a5: 0x4000, 0x15a6: 0x4000, 0x15a7: 0x4000, 0x15a8: 0x4000, 0x15a9: 0x4000, + 0x15aa: 0x4000, 0x15ab: 0x4000, 0x15ac: 0x4000, 0x15ad: 0x4000, 0x15ae: 0x4000, 0x15af: 0x4000, + 0x15b0: 0x4000, 0x15b1: 0x4000, 0x15b2: 0x4000, 0x15b3: 0x4000, 0x15b4: 0x4000, 0x15b5: 0x4000, + 0x15b6: 0x4000, 0x15b7: 0x4000, 0x15b8: 0x4000, 0x15b9: 0x4000, 0x15ba: 0x4000, 0x15bb: 0x4000, + 0x15bc: 0x4000, 0x15bd: 0x4000, 0x15be: 0x4000, 0x15bf: 0x4000, + // Block 0x57, offset 0x15c0 + 0x15c0: 0x4000, 0x15c1: 0x4000, 0x15c2: 0x4000, 0x15c3: 0x4000, 0x15c4: 0x4000, 0x15c5: 0x4000, + 0x15c6: 0x4000, 0x15c7: 0x4000, 0x15c8: 0x4000, 0x15c9: 0x4000, 0x15ca: 0x4000, 0x15cb: 0x4000, + 0x15cc: 0x4000, 0x15cd: 0x4000, 0x15ce: 0x4000, 0x15cf: 0x4000, 0x15d0: 0x4000, 0x15d1: 0x4000, + 0x15d2: 0x4000, 0x15d3: 0x4000, 0x15d4: 0x4000, 0x15d5: 0x4000, 0x15d6: 0x4000, 0x15d7: 0x4000, + 0x15d8: 0x4000, 0x15d9: 0x4000, 0x15da: 0x4000, 0x15db: 0x4000, 0x15dc: 0x4000, 0x15dd: 0x4000, + 0x15de: 0x4000, 0x15df: 0x4000, 0x15e0: 0x4000, 0x15e1: 0x4000, 0x15e2: 0x4000, 0x15e3: 0x4000, + 0x15e4: 0x4000, 0x15e5: 0x4000, 0x15e6: 0x4000, 0x15e7: 0x4000, 0x15e8: 0x4000, 0x15e9: 0x4000, + 0x15ea: 0x4000, 0x15eb: 0x4000, 0x15ec: 0x4000, 0x15ed: 0x4000, 0x15ee: 0x4000, 0x15ef: 0x4000, + 0x15f0: 0x4000, 0x15f1: 0x4000, 0x15f2: 0x4000, 0x15f3: 0x4000, 0x15f4: 0x4000, 0x15f5: 0x4000, + 0x15f6: 0x4000, 0x15f7: 0x4000, 0x15f8: 0x4000, 0x15f9: 0x4000, 0x15fa: 0x4000, 0x15fb: 0x4000, + 0x15fc: 0x4000, 0x15ff: 0x4000, + // Block 0x58, offset 0x1600 + 0x1600: 0x4000, 0x1601: 0x4000, 0x1602: 0x4000, 0x1603: 0x4000, 0x1604: 0x4000, 0x1605: 0x4000, + 0x1606: 0x4000, 0x1607: 0x4000, 0x1608: 0x4000, 0x1609: 0x4000, 0x160a: 0x4000, 0x160b: 0x4000, + 0x160c: 0x4000, 0x160d: 0x4000, 0x160e: 0x4000, 0x160f: 0x4000, 0x1610: 0x4000, 0x1611: 0x4000, + 0x1612: 0x4000, 0x1613: 0x4000, 0x1614: 0x4000, 0x1615: 0x4000, 0x1616: 0x4000, 0x1617: 0x4000, + 0x1618: 0x4000, 0x1619: 0x4000, 0x161a: 0x4000, 0x161b: 0x4000, 0x161c: 0x4000, 0x161d: 0x4000, + 0x161e: 0x4000, 0x161f: 0x4000, 0x1620: 0x4000, 0x1621: 0x4000, 0x1622: 0x4000, 0x1623: 0x4000, + 0x1624: 0x4000, 0x1625: 0x4000, 0x1626: 0x4000, 0x1627: 0x4000, 0x1628: 0x4000, 0x1629: 0x4000, + 0x162a: 0x4000, 0x162b: 0x4000, 0x162c: 0x4000, 0x162d: 0x4000, 0x162e: 0x4000, 0x162f: 0x4000, + 0x1630: 0x4000, 0x1631: 0x4000, 0x1632: 0x4000, 0x1633: 0x4000, 0x1634: 0x4000, 0x1635: 0x4000, + 0x1636: 0x4000, 0x1637: 0x4000, 0x1638: 0x4000, 0x1639: 0x4000, 0x163a: 0x4000, 0x163b: 0x4000, + 0x163c: 0x4000, 0x163d: 0x4000, + // Block 0x59, offset 0x1640 + 0x164b: 0x4000, + 0x164c: 0x4000, 0x164d: 0x4000, 0x164e: 0x4000, 0x1650: 0x4000, 0x1651: 0x4000, + 0x1652: 0x4000, 0x1653: 0x4000, 0x1654: 0x4000, 0x1655: 0x4000, 0x1656: 0x4000, 0x1657: 0x4000, + 0x1658: 0x4000, 0x1659: 0x4000, 0x165a: 0x4000, 0x165b: 0x4000, 0x165c: 0x4000, 0x165d: 0x4000, + 0x165e: 0x4000, 0x165f: 0x4000, 0x1660: 0x4000, 0x1661: 0x4000, 0x1662: 0x4000, 0x1663: 0x4000, + 0x1664: 0x4000, 0x1665: 0x4000, 0x1666: 0x4000, 0x1667: 0x4000, + 0x167a: 0x4000, + // Block 0x5a, offset 0x1680 + 0x1695: 0x4000, 0x1696: 0x4000, + 0x16a4: 0x4000, + // Block 0x5b, offset 0x16c0 + 0x16fb: 0x4000, + 0x16fc: 0x4000, 0x16fd: 0x4000, 0x16fe: 0x4000, 0x16ff: 0x4000, + // Block 0x5c, offset 0x1700 + 0x1700: 0x4000, 0x1701: 0x4000, 0x1702: 0x4000, 0x1703: 0x4000, 0x1704: 0x4000, 0x1705: 0x4000, + 0x1706: 0x4000, 0x1707: 0x4000, 0x1708: 0x4000, 0x1709: 0x4000, 0x170a: 0x4000, 0x170b: 0x4000, + 0x170c: 0x4000, 0x170d: 0x4000, 0x170e: 0x4000, 0x170f: 0x4000, + // Block 0x5d, offset 0x1740 + 0x1740: 0x4000, 0x1741: 0x4000, 0x1742: 0x4000, 0x1743: 0x4000, 0x1744: 0x4000, 0x1745: 0x4000, + 0x174c: 0x4000, 0x1750: 0x4000, 0x1751: 0x4000, + 0x1752: 0x4000, + 0x176b: 0x4000, 0x176c: 0x4000, + 0x1774: 0x4000, 0x1775: 0x4000, + 0x1776: 0x4000, + // Block 0x5e, offset 0x1780 + 0x1790: 0x4000, 0x1791: 0x4000, + 0x1792: 0x4000, 0x1793: 0x4000, 0x1794: 0x4000, 0x1795: 0x4000, 0x1796: 0x4000, 0x1797: 0x4000, + 0x1798: 0x4000, 0x1799: 0x4000, 0x179a: 0x4000, 0x179b: 0x4000, 0x179c: 0x4000, 0x179d: 0x4000, + 0x179e: 0x4000, 0x17a0: 0x4000, 0x17a1: 0x4000, 0x17a2: 0x4000, 0x17a3: 0x4000, + 0x17a4: 0x4000, 0x17a5: 0x4000, 0x17a6: 0x4000, 0x17a7: 0x4000, + 0x17b0: 0x4000, 0x17b3: 0x4000, 0x17b4: 0x4000, 0x17b5: 0x4000, + 0x17b6: 0x4000, 0x17b7: 0x4000, 0x17b8: 0x4000, 0x17b9: 0x4000, 0x17ba: 0x4000, 0x17bb: 0x4000, + 0x17bc: 0x4000, 0x17bd: 0x4000, 0x17be: 0x4000, + // Block 0x5f, offset 0x17c0 + 0x17c0: 0x4000, 0x17c1: 0x4000, 0x17c2: 0x4000, 0x17c3: 0x4000, 0x17c4: 0x4000, 0x17c5: 0x4000, + 0x17c6: 0x4000, 0x17c7: 0x4000, 0x17c8: 0x4000, 0x17c9: 0x4000, 0x17ca: 0x4000, 0x17cb: 0x4000, + 0x17d0: 0x4000, 0x17d1: 0x4000, + 0x17d2: 0x4000, 0x17d3: 0x4000, 0x17d4: 0x4000, 0x17d5: 0x4000, 0x17d6: 0x4000, 0x17d7: 0x4000, + 0x17d8: 0x4000, 0x17d9: 0x4000, 0x17da: 0x4000, 0x17db: 0x4000, 0x17dc: 0x4000, 0x17dd: 0x4000, + 0x17de: 0x4000, + // Block 0x60, offset 0x1800 + 0x1800: 0x4000, 0x1801: 0x4000, 0x1802: 0x4000, 0x1803: 0x4000, 0x1804: 0x4000, 0x1805: 0x4000, + 0x1806: 0x4000, 0x1807: 0x4000, 0x1808: 0x4000, 0x1809: 0x4000, 0x180a: 0x4000, 0x180b: 0x4000, + 0x180c: 0x4000, 0x180d: 0x4000, 0x180e: 0x4000, 0x180f: 0x4000, 0x1810: 0x4000, 0x1811: 0x4000, + // Block 0x61, offset 0x1840 + 0x1840: 0x4000, + // Block 0x62, offset 0x1880 + 0x1880: 0x2000, 0x1881: 0x2000, 0x1882: 0x2000, 0x1883: 0x2000, 0x1884: 0x2000, 0x1885: 0x2000, + 0x1886: 0x2000, 0x1887: 0x2000, 0x1888: 0x2000, 0x1889: 0x2000, 0x188a: 0x2000, 0x188b: 0x2000, + 0x188c: 0x2000, 0x188d: 0x2000, 0x188e: 0x2000, 0x188f: 0x2000, 0x1890: 0x2000, 0x1891: 0x2000, + 0x1892: 0x2000, 0x1893: 0x2000, 0x1894: 0x2000, 0x1895: 0x2000, 0x1896: 0x2000, 0x1897: 0x2000, + 0x1898: 0x2000, 0x1899: 0x2000, 0x189a: 0x2000, 0x189b: 0x2000, 0x189c: 0x2000, 0x189d: 0x2000, + 0x189e: 0x2000, 0x189f: 0x2000, 0x18a0: 0x2000, 0x18a1: 0x2000, 0x18a2: 0x2000, 0x18a3: 0x2000, + 0x18a4: 0x2000, 0x18a5: 0x2000, 0x18a6: 0x2000, 0x18a7: 0x2000, 0x18a8: 0x2000, 0x18a9: 0x2000, + 0x18aa: 0x2000, 0x18ab: 0x2000, 0x18ac: 0x2000, 0x18ad: 0x2000, 0x18ae: 0x2000, 0x18af: 0x2000, + 0x18b0: 0x2000, 0x18b1: 0x2000, 0x18b2: 0x2000, 0x18b3: 0x2000, 0x18b4: 0x2000, 0x18b5: 0x2000, + 0x18b6: 0x2000, 0x18b7: 0x2000, 0x18b8: 0x2000, 0x18b9: 0x2000, 0x18ba: 0x2000, 0x18bb: 0x2000, + 0x18bc: 0x2000, 0x18bd: 0x2000, +} + +// widthIndex: 22 blocks, 1408 entries, 1408 bytes +// Block 0 is the zero block. +var widthIndex = [1408]uint8{ + // Block 0x0, offset 0x0 + // Block 0x1, offset 0x40 + // Block 0x2, offset 0x80 + // Block 0x3, offset 0xc0 + 0xc2: 0x01, 0xc3: 0x02, 0xc4: 0x03, 0xc5: 0x04, 0xc7: 0x05, + 0xc9: 0x06, 0xcb: 0x07, 0xcc: 0x08, 0xcd: 0x09, 0xce: 0x0a, 0xcf: 0x0b, + 0xd0: 0x0c, 0xd1: 0x0d, + 0xe1: 0x02, 0xe2: 0x03, 0xe3: 0x04, 0xe4: 0x05, 0xe5: 0x06, 0xe6: 0x06, 0xe7: 0x06, + 0xe8: 0x06, 0xe9: 0x06, 0xea: 0x07, 0xeb: 0x06, 0xec: 0x06, 0xed: 0x08, 0xee: 0x09, 0xef: 0x0a, + 0xf0: 0x0f, 0xf3: 0x12, 0xf4: 0x13, + // Block 0x4, offset 0x100 + 0x104: 0x0e, 0x105: 0x0f, + // Block 0x5, offset 0x140 + 0x140: 0x10, 0x141: 0x11, 0x142: 0x12, 0x144: 0x13, 0x145: 0x14, 0x146: 0x15, 0x147: 0x16, + 0x148: 0x17, 0x149: 0x18, 0x14a: 0x19, 0x14c: 0x1a, 0x14f: 0x1b, + 0x151: 0x1c, 0x152: 0x08, 0x153: 0x1d, 0x154: 0x1e, 0x155: 0x1f, 0x156: 0x20, 0x157: 0x21, + 0x158: 0x22, 0x159: 0x23, 0x15a: 0x24, 0x15b: 0x25, 0x15c: 0x26, 0x15d: 0x27, 0x15e: 0x28, 0x15f: 0x29, + 0x166: 0x2a, + 0x16c: 0x2b, 0x16d: 0x2c, + 0x17a: 0x2d, 0x17b: 0x2e, 0x17c: 0x0e, 0x17d: 0x0e, 0x17e: 0x0e, 0x17f: 0x2f, + // Block 0x6, offset 0x180 + 0x180: 0x30, 0x181: 0x31, 0x182: 0x32, 0x183: 0x33, 0x184: 0x34, 0x185: 0x35, 0x186: 0x36, 0x187: 0x37, + 0x188: 0x38, 0x189: 0x39, 0x18a: 0x0e, 0x18b: 0x3a, 0x18c: 0x0e, 0x18d: 0x0e, 0x18e: 0x0e, 0x18f: 0x0e, + 0x190: 0x0e, 0x191: 0x0e, 0x192: 0x0e, 0x193: 0x0e, 0x194: 0x0e, 0x195: 0x0e, 0x196: 0x0e, 0x197: 0x0e, + 0x198: 0x0e, 0x199: 0x0e, 0x19a: 0x0e, 0x19b: 0x0e, 0x19c: 0x0e, 0x19d: 0x0e, 0x19e: 0x0e, 0x19f: 0x0e, + 0x1a0: 0x0e, 0x1a1: 0x0e, 0x1a2: 0x0e, 0x1a3: 0x0e, 0x1a4: 0x0e, 0x1a5: 0x0e, 0x1a6: 0x0e, 0x1a7: 0x0e, + 0x1a8: 0x0e, 0x1a9: 0x0e, 0x1aa: 0x0e, 0x1ab: 0x0e, 0x1ac: 0x0e, 0x1ad: 0x0e, 0x1ae: 0x0e, 0x1af: 0x0e, + 0x1b0: 0x0e, 0x1b1: 0x0e, 0x1b2: 0x0e, 0x1b3: 0x0e, 0x1b4: 0x0e, 0x1b5: 0x0e, 0x1b6: 0x0e, 0x1b7: 0x0e, + 0x1b8: 0x0e, 0x1b9: 0x0e, 0x1ba: 0x0e, 0x1bb: 0x0e, 0x1bc: 0x0e, 0x1bd: 0x0e, 0x1be: 0x0e, 0x1bf: 0x0e, + // Block 0x7, offset 0x1c0 + 0x1c0: 0x0e, 0x1c1: 0x0e, 0x1c2: 0x0e, 0x1c3: 0x0e, 0x1c4: 0x0e, 0x1c5: 0x0e, 0x1c6: 0x0e, 0x1c7: 0x0e, + 0x1c8: 0x0e, 0x1c9: 0x0e, 0x1ca: 0x0e, 0x1cb: 0x0e, 0x1cc: 0x0e, 0x1cd: 0x0e, 0x1ce: 0x0e, 0x1cf: 0x0e, + 0x1d0: 0x0e, 0x1d1: 0x0e, 0x1d2: 0x0e, 0x1d3: 0x0e, 0x1d4: 0x0e, 0x1d5: 0x0e, 0x1d6: 0x0e, 0x1d7: 0x0e, + 0x1d8: 0x0e, 0x1d9: 0x0e, 0x1da: 0x0e, 0x1db: 0x0e, 0x1dc: 0x0e, 0x1dd: 0x0e, 0x1de: 0x0e, 0x1df: 0x0e, + 0x1e0: 0x0e, 0x1e1: 0x0e, 0x1e2: 0x0e, 0x1e3: 0x0e, 0x1e4: 0x0e, 0x1e5: 0x0e, 0x1e6: 0x0e, 0x1e7: 0x0e, + 0x1e8: 0x0e, 0x1e9: 0x0e, 0x1ea: 0x0e, 0x1eb: 0x0e, 0x1ec: 0x0e, 0x1ed: 0x0e, 0x1ee: 0x0e, 0x1ef: 0x0e, + 0x1f0: 0x0e, 0x1f1: 0x0e, 0x1f2: 0x0e, 0x1f3: 0x0e, 0x1f4: 0x0e, 0x1f5: 0x0e, 0x1f6: 0x0e, + 0x1f8: 0x0e, 0x1f9: 0x0e, 0x1fa: 0x0e, 0x1fb: 0x0e, 0x1fc: 0x0e, 0x1fd: 0x0e, 0x1fe: 0x0e, 0x1ff: 0x0e, + // Block 0x8, offset 0x200 + 0x200: 0x0e, 0x201: 0x0e, 0x202: 0x0e, 0x203: 0x0e, 0x204: 0x0e, 0x205: 0x0e, 0x206: 0x0e, 0x207: 0x0e, + 0x208: 0x0e, 0x209: 0x0e, 0x20a: 0x0e, 0x20b: 0x0e, 0x20c: 0x0e, 0x20d: 0x0e, 0x20e: 0x0e, 0x20f: 0x0e, + 0x210: 0x0e, 0x211: 0x0e, 0x212: 0x0e, 0x213: 0x0e, 0x214: 0x0e, 0x215: 0x0e, 0x216: 0x0e, 0x217: 0x0e, + 0x218: 0x0e, 0x219: 0x0e, 0x21a: 0x0e, 0x21b: 0x0e, 0x21c: 0x0e, 0x21d: 0x0e, 0x21e: 0x0e, 0x21f: 0x0e, + 0x220: 0x0e, 0x221: 0x0e, 0x222: 0x0e, 0x223: 0x0e, 0x224: 0x0e, 0x225: 0x0e, 0x226: 0x0e, 0x227: 0x0e, + 0x228: 0x0e, 0x229: 0x0e, 0x22a: 0x0e, 0x22b: 0x0e, 0x22c: 0x0e, 0x22d: 0x0e, 0x22e: 0x0e, 0x22f: 0x0e, + 0x230: 0x0e, 0x231: 0x0e, 0x232: 0x0e, 0x233: 0x0e, 0x234: 0x0e, 0x235: 0x0e, 0x236: 0x0e, 0x237: 0x0e, + 0x238: 0x0e, 0x239: 0x0e, 0x23a: 0x0e, 0x23b: 0x0e, 0x23c: 0x0e, 0x23d: 0x0e, 0x23e: 0x0e, 0x23f: 0x0e, + // Block 0x9, offset 0x240 + 0x240: 0x0e, 0x241: 0x0e, 0x242: 0x0e, 0x243: 0x0e, 0x244: 0x0e, 0x245: 0x0e, 0x246: 0x0e, 0x247: 0x0e, + 0x248: 0x0e, 0x249: 0x0e, 0x24a: 0x0e, 0x24b: 0x0e, 0x24c: 0x0e, 0x24d: 0x0e, 0x24e: 0x0e, 0x24f: 0x0e, + 0x250: 0x0e, 0x251: 0x0e, 0x252: 0x3b, 0x253: 0x3c, + 0x265: 0x3d, + 0x270: 0x0e, 0x271: 0x0e, 0x272: 0x0e, 0x273: 0x0e, 0x274: 0x0e, 0x275: 0x0e, 0x276: 0x0e, 0x277: 0x0e, + 0x278: 0x0e, 0x279: 0x0e, 0x27a: 0x0e, 0x27b: 0x0e, 0x27c: 0x0e, 0x27d: 0x0e, 0x27e: 0x0e, 0x27f: 0x0e, + // Block 0xa, offset 0x280 + 0x280: 0x0e, 0x281: 0x0e, 0x282: 0x0e, 0x283: 0x0e, 0x284: 0x0e, 0x285: 0x0e, 0x286: 0x0e, 0x287: 0x0e, + 0x288: 0x0e, 0x289: 0x0e, 0x28a: 0x0e, 0x28b: 0x0e, 0x28c: 0x0e, 0x28d: 0x0e, 0x28e: 0x0e, 0x28f: 0x0e, + 0x290: 0x0e, 0x291: 0x0e, 0x292: 0x0e, 0x293: 0x0e, 0x294: 0x0e, 0x295: 0x0e, 0x296: 0x0e, 0x297: 0x0e, + 0x298: 0x0e, 0x299: 0x0e, 0x29a: 0x0e, 0x29b: 0x0e, 0x29c: 0x0e, 0x29d: 0x0e, 0x29e: 0x3e, + // Block 0xb, offset 0x2c0 + 0x2c0: 0x08, 0x2c1: 0x08, 0x2c2: 0x08, 0x2c3: 0x08, 0x2c4: 0x08, 0x2c5: 0x08, 0x2c6: 0x08, 0x2c7: 0x08, + 0x2c8: 0x08, 0x2c9: 0x08, 0x2ca: 0x08, 0x2cb: 0x08, 0x2cc: 0x08, 0x2cd: 0x08, 0x2ce: 0x08, 0x2cf: 0x08, + 0x2d0: 0x08, 0x2d1: 0x08, 0x2d2: 0x08, 0x2d3: 0x08, 0x2d4: 0x08, 0x2d5: 0x08, 0x2d6: 0x08, 0x2d7: 0x08, + 0x2d8: 0x08, 0x2d9: 0x08, 0x2da: 0x08, 0x2db: 0x08, 0x2dc: 0x08, 0x2dd: 0x08, 0x2de: 0x08, 0x2df: 0x08, + 0x2e0: 0x08, 0x2e1: 0x08, 0x2e2: 0x08, 0x2e3: 0x08, 0x2e4: 0x08, 0x2e5: 0x08, 0x2e6: 0x08, 0x2e7: 0x08, + 0x2e8: 0x08, 0x2e9: 0x08, 0x2ea: 0x08, 0x2eb: 0x08, 0x2ec: 0x08, 0x2ed: 0x08, 0x2ee: 0x08, 0x2ef: 0x08, + 0x2f0: 0x08, 0x2f1: 0x08, 0x2f2: 0x08, 0x2f3: 0x08, 0x2f4: 0x08, 0x2f5: 0x08, 0x2f6: 0x08, 0x2f7: 0x08, + 0x2f8: 0x08, 0x2f9: 0x08, 0x2fa: 0x08, 0x2fb: 0x08, 0x2fc: 0x08, 0x2fd: 0x08, 0x2fe: 0x08, 0x2ff: 0x08, + // Block 0xc, offset 0x300 + 0x300: 0x08, 0x301: 0x08, 0x302: 0x08, 0x303: 0x08, 0x304: 0x08, 0x305: 0x08, 0x306: 0x08, 0x307: 0x08, + 0x308: 0x08, 0x309: 0x08, 0x30a: 0x08, 0x30b: 0x08, 0x30c: 0x08, 0x30d: 0x08, 0x30e: 0x08, 0x30f: 0x08, + 0x310: 0x08, 0x311: 0x08, 0x312: 0x08, 0x313: 0x08, 0x314: 0x08, 0x315: 0x08, 0x316: 0x08, 0x317: 0x08, + 0x318: 0x08, 0x319: 0x08, 0x31a: 0x08, 0x31b: 0x08, 0x31c: 0x08, 0x31d: 0x08, 0x31e: 0x08, 0x31f: 0x08, + 0x320: 0x08, 0x321: 0x08, 0x322: 0x08, 0x323: 0x08, 0x324: 0x0e, 0x325: 0x0e, 0x326: 0x0e, 0x327: 0x0e, + 0x328: 0x0e, 0x329: 0x0e, 0x32a: 0x0e, 0x32b: 0x0e, + 0x338: 0x3f, 0x339: 0x40, 0x33c: 0x41, 0x33d: 0x42, 0x33e: 0x43, 0x33f: 0x44, + // Block 0xd, offset 0x340 + 0x37f: 0x45, + // Block 0xe, offset 0x380 + 0x380: 0x0e, 0x381: 0x0e, 0x382: 0x0e, 0x383: 0x0e, 0x384: 0x0e, 0x385: 0x0e, 0x386: 0x0e, 0x387: 0x0e, + 0x388: 0x0e, 0x389: 0x0e, 0x38a: 0x0e, 0x38b: 0x0e, 0x38c: 0x0e, 0x38d: 0x0e, 0x38e: 0x0e, 0x38f: 0x0e, + 0x390: 0x0e, 0x391: 0x0e, 0x392: 0x0e, 0x393: 0x0e, 0x394: 0x0e, 0x395: 0x0e, 0x396: 0x0e, 0x397: 0x0e, + 0x398: 0x0e, 0x399: 0x0e, 0x39a: 0x0e, 0x39b: 0x0e, 0x39c: 0x0e, 0x39d: 0x0e, 0x39e: 0x0e, 0x39f: 0x46, + 0x3a0: 0x0e, 0x3a1: 0x0e, 0x3a2: 0x0e, 0x3a3: 0x0e, 0x3a4: 0x0e, 0x3a5: 0x0e, 0x3a6: 0x0e, 0x3a7: 0x0e, + 0x3a8: 0x0e, 0x3a9: 0x0e, 0x3aa: 0x0e, 0x3ab: 0x47, + // Block 0xf, offset 0x3c0 + 0x3c0: 0x48, + // Block 0x10, offset 0x400 + 0x400: 0x49, 0x403: 0x4a, 0x404: 0x4b, 0x405: 0x4c, 0x406: 0x4d, + 0x408: 0x4e, 0x409: 0x4f, 0x40c: 0x50, 0x40d: 0x51, 0x40e: 0x52, 0x40f: 0x53, + 0x410: 0x3a, 0x411: 0x54, 0x412: 0x0e, 0x413: 0x55, 0x414: 0x56, 0x415: 0x57, 0x416: 0x58, 0x417: 0x59, + 0x418: 0x0e, 0x419: 0x5a, 0x41a: 0x0e, 0x41b: 0x5b, + 0x424: 0x5c, 0x425: 0x5d, 0x426: 0x5e, 0x427: 0x5f, + // Block 0x11, offset 0x440 + 0x456: 0x0b, 0x457: 0x06, + 0x458: 0x0c, 0x45b: 0x0d, 0x45f: 0x0e, + 0x460: 0x06, 0x461: 0x06, 0x462: 0x06, 0x463: 0x06, 0x464: 0x06, 0x465: 0x06, 0x466: 0x06, 0x467: 0x06, + 0x468: 0x06, 0x469: 0x06, 0x46a: 0x06, 0x46b: 0x06, 0x46c: 0x06, 0x46d: 0x06, 0x46e: 0x06, 0x46f: 0x06, + 0x470: 0x06, 0x471: 0x06, 0x472: 0x06, 0x473: 0x06, 0x474: 0x06, 0x475: 0x06, 0x476: 0x06, 0x477: 0x06, + 0x478: 0x06, 0x479: 0x06, 0x47a: 0x06, 0x47b: 0x06, 0x47c: 0x06, 0x47d: 0x06, 0x47e: 0x06, 0x47f: 0x06, + // Block 0x12, offset 0x480 + 0x484: 0x08, 0x485: 0x08, 0x486: 0x08, 0x487: 0x09, + // Block 0x13, offset 0x4c0 + 0x4c0: 0x08, 0x4c1: 0x08, 0x4c2: 0x08, 0x4c3: 0x08, 0x4c4: 0x08, 0x4c5: 0x08, 0x4c6: 0x08, 0x4c7: 0x08, + 0x4c8: 0x08, 0x4c9: 0x08, 0x4ca: 0x08, 0x4cb: 0x08, 0x4cc: 0x08, 0x4cd: 0x08, 0x4ce: 0x08, 0x4cf: 0x08, + 0x4d0: 0x08, 0x4d1: 0x08, 0x4d2: 0x08, 0x4d3: 0x08, 0x4d4: 0x08, 0x4d5: 0x08, 0x4d6: 0x08, 0x4d7: 0x08, + 0x4d8: 0x08, 0x4d9: 0x08, 0x4da: 0x08, 0x4db: 0x08, 0x4dc: 0x08, 0x4dd: 0x08, 0x4de: 0x08, 0x4df: 0x08, + 0x4e0: 0x08, 0x4e1: 0x08, 0x4e2: 0x08, 0x4e3: 0x08, 0x4e4: 0x08, 0x4e5: 0x08, 0x4e6: 0x08, 0x4e7: 0x08, + 0x4e8: 0x08, 0x4e9: 0x08, 0x4ea: 0x08, 0x4eb: 0x08, 0x4ec: 0x08, 0x4ed: 0x08, 0x4ee: 0x08, 0x4ef: 0x08, + 0x4f0: 0x08, 0x4f1: 0x08, 0x4f2: 0x08, 0x4f3: 0x08, 0x4f4: 0x08, 0x4f5: 0x08, 0x4f6: 0x08, 0x4f7: 0x08, + 0x4f8: 0x08, 0x4f9: 0x08, 0x4fa: 0x08, 0x4fb: 0x08, 0x4fc: 0x08, 0x4fd: 0x08, 0x4fe: 0x08, 0x4ff: 0x60, + // Block 0x14, offset 0x500 + 0x520: 0x10, + 0x530: 0x09, 0x531: 0x09, 0x532: 0x09, 0x533: 0x09, 0x534: 0x09, 0x535: 0x09, 0x536: 0x09, 0x537: 0x09, + 0x538: 0x09, 0x539: 0x09, 0x53a: 0x09, 0x53b: 0x09, 0x53c: 0x09, 0x53d: 0x09, 0x53e: 0x09, 0x53f: 0x11, + // Block 0x15, offset 0x540 + 0x540: 0x09, 0x541: 0x09, 0x542: 0x09, 0x543: 0x09, 0x544: 0x09, 0x545: 0x09, 0x546: 0x09, 0x547: 0x09, + 0x548: 0x09, 0x549: 0x09, 0x54a: 0x09, 0x54b: 0x09, 0x54c: 0x09, 0x54d: 0x09, 0x54e: 0x09, 0x54f: 0x11, +} + +// inverseData contains 4-byte entries of the following format: +// <0 padding> +// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the +// UTF-8 encoding of the original rune. Mappings often have the following +// pattern: +// A -> A (U+FF21 -> U+0041) +// B -> B (U+FF22 -> U+0042) +// ... +// By xor-ing the last byte the same entry can be shared by many mappings. This +// reduces the total number of distinct entries by about two thirds. +// The resulting entry for the aforementioned mappings is +// { 0x01, 0xE0, 0x00, 0x00 } +// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get +// E0 ^ A1 = 41. +// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get +// E0 ^ A2 = 42. +// Note that because of the xor-ing, the byte sequence stored in the entry is +// not valid UTF-8. +var inverseData = [150][4]byte{ + {0x00, 0x00, 0x00, 0x00}, + {0x03, 0xe3, 0x80, 0xa0}, + {0x03, 0xef, 0xbc, 0xa0}, + {0x03, 0xef, 0xbc, 0xe0}, + {0x03, 0xef, 0xbd, 0xe0}, + {0x03, 0xef, 0xbf, 0x02}, + {0x03, 0xef, 0xbf, 0x00}, + {0x03, 0xef, 0xbf, 0x0e}, + {0x03, 0xef, 0xbf, 0x0c}, + {0x03, 0xef, 0xbf, 0x0f}, + {0x03, 0xef, 0xbf, 0x39}, + {0x03, 0xef, 0xbf, 0x3b}, + {0x03, 0xef, 0xbf, 0x3f}, + {0x03, 0xef, 0xbf, 0x2a}, + {0x03, 0xef, 0xbf, 0x0d}, + {0x03, 0xef, 0xbf, 0x25}, + {0x03, 0xef, 0xbd, 0x1a}, + {0x03, 0xef, 0xbd, 0x26}, + {0x01, 0xa0, 0x00, 0x00}, + {0x03, 0xef, 0xbd, 0x25}, + {0x03, 0xef, 0xbd, 0x23}, + {0x03, 0xef, 0xbd, 0x2e}, + {0x03, 0xef, 0xbe, 0x07}, + {0x03, 0xef, 0xbe, 0x05}, + {0x03, 0xef, 0xbd, 0x06}, + {0x03, 0xef, 0xbd, 0x13}, + {0x03, 0xef, 0xbd, 0x0b}, + {0x03, 0xef, 0xbd, 0x16}, + {0x03, 0xef, 0xbd, 0x0c}, + {0x03, 0xef, 0xbd, 0x15}, + {0x03, 0xef, 0xbd, 0x0d}, + {0x03, 0xef, 0xbd, 0x1c}, + {0x03, 0xef, 0xbd, 0x02}, + {0x03, 0xef, 0xbd, 0x1f}, + {0x03, 0xef, 0xbd, 0x1d}, + {0x03, 0xef, 0xbd, 0x17}, + {0x03, 0xef, 0xbd, 0x08}, + {0x03, 0xef, 0xbd, 0x09}, + {0x03, 0xef, 0xbd, 0x0e}, + {0x03, 0xef, 0xbd, 0x04}, + {0x03, 0xef, 0xbd, 0x05}, + {0x03, 0xef, 0xbe, 0x3f}, + {0x03, 0xef, 0xbe, 0x00}, + {0x03, 0xef, 0xbd, 0x2c}, + {0x03, 0xef, 0xbe, 0x06}, + {0x03, 0xef, 0xbe, 0x0c}, + {0x03, 0xef, 0xbe, 0x0f}, + {0x03, 0xef, 0xbe, 0x0d}, + {0x03, 0xef, 0xbe, 0x0b}, + {0x03, 0xef, 0xbe, 0x19}, + {0x03, 0xef, 0xbe, 0x15}, + {0x03, 0xef, 0xbe, 0x11}, + {0x03, 0xef, 0xbe, 0x31}, + {0x03, 0xef, 0xbe, 0x33}, + {0x03, 0xef, 0xbd, 0x0f}, + {0x03, 0xef, 0xbe, 0x30}, + {0x03, 0xef, 0xbe, 0x3e}, + {0x03, 0xef, 0xbe, 0x32}, + {0x03, 0xef, 0xbe, 0x36}, + {0x03, 0xef, 0xbd, 0x14}, + {0x03, 0xef, 0xbe, 0x2e}, + {0x03, 0xef, 0xbd, 0x1e}, + {0x03, 0xef, 0xbe, 0x10}, + {0x03, 0xef, 0xbf, 0x13}, + {0x03, 0xef, 0xbf, 0x15}, + {0x03, 0xef, 0xbf, 0x17}, + {0x03, 0xef, 0xbf, 0x1f}, + {0x03, 0xef, 0xbf, 0x1d}, + {0x03, 0xef, 0xbf, 0x1b}, + {0x03, 0xef, 0xbf, 0x09}, + {0x03, 0xef, 0xbf, 0x0b}, + {0x03, 0xef, 0xbf, 0x37}, + {0x03, 0xef, 0xbe, 0x04}, + {0x01, 0xe0, 0x00, 0x00}, + {0x03, 0xe2, 0xa6, 0x1a}, + {0x03, 0xe2, 0xa6, 0x26}, + {0x03, 0xe3, 0x80, 0x23}, + {0x03, 0xe3, 0x80, 0x2e}, + {0x03, 0xe3, 0x80, 0x25}, + {0x03, 0xe3, 0x83, 0x1e}, + {0x03, 0xe3, 0x83, 0x14}, + {0x03, 0xe3, 0x82, 0x06}, + {0x03, 0xe3, 0x82, 0x0b}, + {0x03, 0xe3, 0x82, 0x0c}, + {0x03, 0xe3, 0x82, 0x0d}, + {0x03, 0xe3, 0x82, 0x02}, + {0x03, 0xe3, 0x83, 0x0f}, + {0x03, 0xe3, 0x83, 0x08}, + {0x03, 0xe3, 0x83, 0x09}, + {0x03, 0xe3, 0x83, 0x2c}, + {0x03, 0xe3, 0x83, 0x0c}, + {0x03, 0xe3, 0x82, 0x13}, + {0x03, 0xe3, 0x82, 0x16}, + {0x03, 0xe3, 0x82, 0x15}, + {0x03, 0xe3, 0x82, 0x1c}, + {0x03, 0xe3, 0x82, 0x1f}, + {0x03, 0xe3, 0x82, 0x1d}, + {0x03, 0xe3, 0x82, 0x1a}, + {0x03, 0xe3, 0x82, 0x17}, + {0x03, 0xe3, 0x82, 0x08}, + {0x03, 0xe3, 0x82, 0x09}, + {0x03, 0xe3, 0x82, 0x0e}, + {0x03, 0xe3, 0x82, 0x04}, + {0x03, 0xe3, 0x82, 0x05}, + {0x03, 0xe3, 0x82, 0x3f}, + {0x03, 0xe3, 0x83, 0x00}, + {0x03, 0xe3, 0x83, 0x06}, + {0x03, 0xe3, 0x83, 0x05}, + {0x03, 0xe3, 0x83, 0x0d}, + {0x03, 0xe3, 0x83, 0x0b}, + {0x03, 0xe3, 0x83, 0x07}, + {0x03, 0xe3, 0x83, 0x19}, + {0x03, 0xe3, 0x83, 0x15}, + {0x03, 0xe3, 0x83, 0x11}, + {0x03, 0xe3, 0x83, 0x31}, + {0x03, 0xe3, 0x83, 0x33}, + {0x03, 0xe3, 0x83, 0x30}, + {0x03, 0xe3, 0x83, 0x3e}, + {0x03, 0xe3, 0x83, 0x32}, + {0x03, 0xe3, 0x83, 0x36}, + {0x03, 0xe3, 0x83, 0x2e}, + {0x03, 0xe3, 0x82, 0x07}, + {0x03, 0xe3, 0x85, 0x04}, + {0x03, 0xe3, 0x84, 0x10}, + {0x03, 0xe3, 0x85, 0x30}, + {0x03, 0xe3, 0x85, 0x0d}, + {0x03, 0xe3, 0x85, 0x13}, + {0x03, 0xe3, 0x85, 0x15}, + {0x03, 0xe3, 0x85, 0x17}, + {0x03, 0xe3, 0x85, 0x1f}, + {0x03, 0xe3, 0x85, 0x1d}, + {0x03, 0xe3, 0x85, 0x1b}, + {0x03, 0xe3, 0x85, 0x09}, + {0x03, 0xe3, 0x85, 0x0f}, + {0x03, 0xe3, 0x85, 0x0b}, + {0x03, 0xe3, 0x85, 0x37}, + {0x03, 0xe3, 0x85, 0x3b}, + {0x03, 0xe3, 0x85, 0x39}, + {0x03, 0xe3, 0x85, 0x3f}, + {0x02, 0xc2, 0x02, 0x00}, + {0x02, 0xc2, 0x0e, 0x00}, + {0x02, 0xc2, 0x0c, 0x00}, + {0x02, 0xc2, 0x00, 0x00}, + {0x03, 0xe2, 0x82, 0x0f}, + {0x03, 0xe2, 0x94, 0x2a}, + {0x03, 0xe2, 0x86, 0x39}, + {0x03, 0xe2, 0x86, 0x3b}, + {0x03, 0xe2, 0x86, 0x3f}, + {0x03, 0xe2, 0x96, 0x0d}, + {0x03, 0xe2, 0x97, 0x25}, +} + +// Total table size 14680 bytes (14KiB) diff --git a/vendor/golang.org/x/text/width/transform.go b/vendor/golang.org/x/text/width/transform.go new file mode 100644 index 000000000..0049f700a --- /dev/null +++ b/vendor/golang.org/x/text/width/transform.go @@ -0,0 +1,239 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package width + +import ( + "unicode/utf8" + + "golang.org/x/text/transform" +) + +type foldTransform struct { + transform.NopResetter +} + +func (foldTransform) Span(src []byte, atEOF bool) (n int, err error) { + for n < len(src) { + if src[n] < utf8.RuneSelf { + // ASCII fast path. + for n++; n < len(src) && src[n] < utf8.RuneSelf; n++ { + } + continue + } + v, size := trie.lookup(src[n:]) + if size == 0 { // incomplete UTF-8 encoding + if !atEOF { + err = transform.ErrShortSrc + } else { + n = len(src) + } + break + } + if elem(v)&tagNeedsFold != 0 { + err = transform.ErrEndOfSpan + break + } + n += size + } + return n, err +} + +func (foldTransform) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for nSrc < len(src) { + if src[nSrc] < utf8.RuneSelf { + // ASCII fast path. + start, end := nSrc, len(src) + if d := len(dst) - nDst; d < end-start { + end = nSrc + d + } + for nSrc++; nSrc < end && src[nSrc] < utf8.RuneSelf; nSrc++ { + } + n := copy(dst[nDst:], src[start:nSrc]) + if nDst += n; nDst == len(dst) { + nSrc = start + n + if nSrc == len(src) { + return nDst, nSrc, nil + } + if src[nSrc] < utf8.RuneSelf { + return nDst, nSrc, transform.ErrShortDst + } + } + continue + } + v, size := trie.lookup(src[nSrc:]) + if size == 0 { // incomplete UTF-8 encoding + if !atEOF { + return nDst, nSrc, transform.ErrShortSrc + } + size = 1 // gobble 1 byte + } + if elem(v)&tagNeedsFold == 0 { + if size != copy(dst[nDst:], src[nSrc:nSrc+size]) { + return nDst, nSrc, transform.ErrShortDst + } + nDst += size + } else { + data := inverseData[byte(v)] + if len(dst)-nDst < int(data[0]) { + return nDst, nSrc, transform.ErrShortDst + } + i := 1 + for end := int(data[0]); i < end; i++ { + dst[nDst] = data[i] + nDst++ + } + dst[nDst] = data[i] ^ src[nSrc+size-1] + nDst++ + } + nSrc += size + } + return nDst, nSrc, nil +} + +type narrowTransform struct { + transform.NopResetter +} + +func (narrowTransform) Span(src []byte, atEOF bool) (n int, err error) { + for n < len(src) { + if src[n] < utf8.RuneSelf { + // ASCII fast path. + for n++; n < len(src) && src[n] < utf8.RuneSelf; n++ { + } + continue + } + v, size := trie.lookup(src[n:]) + if size == 0 { // incomplete UTF-8 encoding + if !atEOF { + err = transform.ErrShortSrc + } else { + n = len(src) + } + break + } + if k := elem(v).kind(); byte(v) == 0 || k != EastAsianFullwidth && k != EastAsianWide && k != EastAsianAmbiguous { + } else { + err = transform.ErrEndOfSpan + break + } + n += size + } + return n, err +} + +func (narrowTransform) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for nSrc < len(src) { + if src[nSrc] < utf8.RuneSelf { + // ASCII fast path. + start, end := nSrc, len(src) + if d := len(dst) - nDst; d < end-start { + end = nSrc + d + } + for nSrc++; nSrc < end && src[nSrc] < utf8.RuneSelf; nSrc++ { + } + n := copy(dst[nDst:], src[start:nSrc]) + if nDst += n; nDst == len(dst) { + nSrc = start + n + if nSrc == len(src) { + return nDst, nSrc, nil + } + if src[nSrc] < utf8.RuneSelf { + return nDst, nSrc, transform.ErrShortDst + } + } + continue + } + v, size := trie.lookup(src[nSrc:]) + if size == 0 { // incomplete UTF-8 encoding + if !atEOF { + return nDst, nSrc, transform.ErrShortSrc + } + size = 1 // gobble 1 byte + } + if k := elem(v).kind(); byte(v) == 0 || k != EastAsianFullwidth && k != EastAsianWide && k != EastAsianAmbiguous { + if size != copy(dst[nDst:], src[nSrc:nSrc+size]) { + return nDst, nSrc, transform.ErrShortDst + } + nDst += size + } else { + data := inverseData[byte(v)] + if len(dst)-nDst < int(data[0]) { + return nDst, nSrc, transform.ErrShortDst + } + i := 1 + for end := int(data[0]); i < end; i++ { + dst[nDst] = data[i] + nDst++ + } + dst[nDst] = data[i] ^ src[nSrc+size-1] + nDst++ + } + nSrc += size + } + return nDst, nSrc, nil +} + +type wideTransform struct { + transform.NopResetter +} + +func (wideTransform) Span(src []byte, atEOF bool) (n int, err error) { + for n < len(src) { + // TODO: Consider ASCII fast path. Special-casing ASCII handling can + // reduce the ns/op of BenchmarkWideASCII by about 30%. This is probably + // not enough to warrant the extra code and complexity. + v, size := trie.lookup(src[n:]) + if size == 0 { // incomplete UTF-8 encoding + if !atEOF { + err = transform.ErrShortSrc + } else { + n = len(src) + } + break + } + if k := elem(v).kind(); byte(v) == 0 || k != EastAsianHalfwidth && k != EastAsianNarrow { + } else { + err = transform.ErrEndOfSpan + break + } + n += size + } + return n, err +} + +func (wideTransform) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for nSrc < len(src) { + // TODO: Consider ASCII fast path. Special-casing ASCII handling can + // reduce the ns/op of BenchmarkWideASCII by about 30%. This is probably + // not enough to warrant the extra code and complexity. + v, size := trie.lookup(src[nSrc:]) + if size == 0 { // incomplete UTF-8 encoding + if !atEOF { + return nDst, nSrc, transform.ErrShortSrc + } + size = 1 // gobble 1 byte + } + if k := elem(v).kind(); byte(v) == 0 || k != EastAsianHalfwidth && k != EastAsianNarrow { + if size != copy(dst[nDst:], src[nSrc:nSrc+size]) { + return nDst, nSrc, transform.ErrShortDst + } + nDst += size + } else { + data := inverseData[byte(v)] + if len(dst)-nDst < int(data[0]) { + return nDst, nSrc, transform.ErrShortDst + } + i := 1 + for end := int(data[0]); i < end; i++ { + dst[nDst] = data[i] + nDst++ + } + dst[nDst] = data[i] ^ src[nSrc+size-1] + nDst++ + } + nSrc += size + } + return nDst, nSrc, nil +} diff --git a/vendor/golang.org/x/text/width/trieval.go b/vendor/golang.org/x/text/width/trieval.go new file mode 100644 index 000000000..ca8e45fd1 --- /dev/null +++ b/vendor/golang.org/x/text/width/trieval.go @@ -0,0 +1,30 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +package width + +// elem is an entry of the width trie. The high byte is used to encode the type +// of the rune. The low byte is used to store the index to a mapping entry in +// the inverseData array. +type elem uint16 + +const ( + tagNeutral elem = iota << typeShift + tagAmbiguous + tagWide + tagNarrow + tagFullwidth + tagHalfwidth +) + +const ( + numTypeBits = 3 + typeShift = 16 - numTypeBits + + // tagNeedsFold is true for all fullwidth and halfwidth runes except for + // the Won sign U+20A9. + tagNeedsFold = 0x1000 + + // The Korean Won sign is halfwidth, but SHOULD NOT be mapped to a wide + // variant. + wonSign rune = 0x20A9 +) diff --git a/vendor/golang.org/x/text/width/width.go b/vendor/golang.org/x/text/width/width.go new file mode 100644 index 000000000..29c7509be --- /dev/null +++ b/vendor/golang.org/x/text/width/width.go @@ -0,0 +1,206 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate stringer -type=Kind +//go:generate go run gen.go gen_common.go gen_trieval.go + +// Package width provides functionality for handling different widths in text. +// +// Wide characters behave like ideographs; they tend to allow line breaks after +// each character and remain upright in vertical text layout. Narrow characters +// are kept together in words or runs that are rotated sideways in vertical text +// layout. +// +// For more information, see https://unicode.org/reports/tr11/. +package width // import "golang.org/x/text/width" + +import ( + "unicode/utf8" + + "golang.org/x/text/transform" +) + +// TODO +// 1) Reduce table size by compressing blocks. +// 2) API proposition for computing display length +// (approximation, fixed pitch only). +// 3) Implement display length. + +// Kind indicates the type of width property as defined in https://unicode.org/reports/tr11/. +type Kind int + +const ( + // Neutral characters do not occur in legacy East Asian character sets. + Neutral Kind = iota + + // EastAsianAmbiguous characters that can be sometimes wide and sometimes + // narrow and require additional information not contained in the character + // code to further resolve their width. + EastAsianAmbiguous + + // EastAsianWide characters are wide in its usual form. They occur only in + // the context of East Asian typography. These runes may have explicit + // halfwidth counterparts. + EastAsianWide + + // EastAsianNarrow characters are narrow in its usual form. They often have + // fullwidth counterparts. + EastAsianNarrow + + // Note: there exist Narrow runes that do not have fullwidth or wide + // counterparts, despite what the definition says (e.g. U+27E6). + + // EastAsianFullwidth characters have a compatibility decompositions of type + // wide that map to a narrow counterpart. + EastAsianFullwidth + + // EastAsianHalfwidth characters have a compatibility decomposition of type + // narrow that map to a wide or ambiguous counterpart, plus U+20A9 ₩ WON + // SIGN. + EastAsianHalfwidth + + // Note: there exist runes that have a halfwidth counterparts but that are + // classified as Ambiguous, rather than wide (e.g. U+2190). +) + +// TODO: the generated tries need to return size 1 for invalid runes for the +// width to be computed correctly (each byte should render width 1) + +var trie = newWidthTrie(0) + +// Lookup reports the Properties of the first rune in b and the number of bytes +// of its UTF-8 encoding. +func Lookup(b []byte) (p Properties, size int) { + v, sz := trie.lookup(b) + return Properties{elem(v), b[sz-1]}, sz +} + +// LookupString reports the Properties of the first rune in s and the number of +// bytes of its UTF-8 encoding. +func LookupString(s string) (p Properties, size int) { + v, sz := trie.lookupString(s) + return Properties{elem(v), s[sz-1]}, sz +} + +// LookupRune reports the Properties of rune r. +func LookupRune(r rune) Properties { + var buf [4]byte + n := utf8.EncodeRune(buf[:], r) + v, _ := trie.lookup(buf[:n]) + last := byte(r) + if r >= utf8.RuneSelf { + last = 0x80 + byte(r&0x3f) + } + return Properties{elem(v), last} +} + +// Properties provides access to width properties of a rune. +type Properties struct { + elem elem + last byte +} + +func (e elem) kind() Kind { + return Kind(e >> typeShift) +} + +// Kind returns the Kind of a rune as defined in Unicode TR #11. +// See https://unicode.org/reports/tr11/ for more details. +func (p Properties) Kind() Kind { + return p.elem.kind() +} + +// Folded returns the folded variant of a rune or 0 if the rune is canonical. +func (p Properties) Folded() rune { + if p.elem&tagNeedsFold != 0 { + buf := inverseData[byte(p.elem)] + buf[buf[0]] ^= p.last + r, _ := utf8.DecodeRune(buf[1 : 1+buf[0]]) + return r + } + return 0 +} + +// Narrow returns the narrow variant of a rune or 0 if the rune is already +// narrow or doesn't have a narrow variant. +func (p Properties) Narrow() rune { + if k := p.elem.kind(); byte(p.elem) != 0 && (k == EastAsianFullwidth || k == EastAsianWide || k == EastAsianAmbiguous) { + buf := inverseData[byte(p.elem)] + buf[buf[0]] ^= p.last + r, _ := utf8.DecodeRune(buf[1 : 1+buf[0]]) + return r + } + return 0 +} + +// Wide returns the wide variant of a rune or 0 if the rune is already +// wide or doesn't have a wide variant. +func (p Properties) Wide() rune { + if k := p.elem.kind(); byte(p.elem) != 0 && (k == EastAsianHalfwidth || k == EastAsianNarrow) { + buf := inverseData[byte(p.elem)] + buf[buf[0]] ^= p.last + r, _ := utf8.DecodeRune(buf[1 : 1+buf[0]]) + return r + } + return 0 +} + +// TODO for Properties: +// - Add Fullwidth/Halfwidth or Inverted methods for computing variants +// mapping. +// - Add width information (including information on non-spacing runes). + +// Transformer implements the transform.Transformer interface. +type Transformer struct { + t transform.SpanningTransformer +} + +// Reset implements the transform.Transformer interface. +func (t Transformer) Reset() { t.t.Reset() } + +// Transform implements the transform.Transformer interface. +func (t Transformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + return t.t.Transform(dst, src, atEOF) +} + +// Span implements the transform.SpanningTransformer interface. +func (t Transformer) Span(src []byte, atEOF bool) (n int, err error) { + return t.t.Span(src, atEOF) +} + +// Bytes returns a new byte slice with the result of applying t to b. +func (t Transformer) Bytes(b []byte) []byte { + b, _, _ = transform.Bytes(t, b) + return b +} + +// String returns a string with the result of applying t to s. +func (t Transformer) String(s string) string { + s, _, _ = transform.String(t, s) + return s +} + +var ( + // Fold is a transform that maps all runes to their canonical width. + // + // Note that the NFKC and NFKD transforms in golang.org/x/text/unicode/norm + // provide a more generic folding mechanism. + Fold Transformer = Transformer{foldTransform{}} + + // Widen is a transform that maps runes to their wide variant, if + // available. + Widen Transformer = Transformer{wideTransform{}} + + // Narrow is a transform that maps runes to their narrow variant, if + // available. + Narrow Transformer = Transformer{narrowTransform{}} +) + +// TODO: Consider the following options: +// - Treat Ambiguous runes that have a halfwidth counterpart as wide, or some +// generalized variant of this. +// - Consider a wide Won character to be the default width (or some generalized +// variant of this). +// - Filter the set of characters that gets converted (the preferred approach is +// to allow applying filters to transforms). diff --git a/vendor/golang.org/x/tools/go/analysis/analysis.go b/vendor/golang.org/x/tools/go/analysis/analysis.go new file mode 100644 index 000000000..d11505a16 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/analysis.go @@ -0,0 +1,242 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package analysis + +import ( + "flag" + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" + + "golang.org/x/tools/internal/analysisinternal" +) + +// An Analyzer describes an analysis function and its options. +type Analyzer struct { + // The Name of the analyzer must be a valid Go identifier + // as it may appear in command-line flags, URLs, and so on. + Name string + + // Doc is the documentation for the analyzer. + // The part before the first "\n\n" is the title + // (no capital or period, max ~60 letters). + Doc string + + // Flags defines any flags accepted by the analyzer. + // The manner in which these flags are exposed to the user + // depends on the driver which runs the analyzer. + Flags flag.FlagSet + + // Run applies the analyzer to a package. + // It returns an error if the analyzer failed. + // + // On success, the Run function may return a result + // computed by the Analyzer; its type must match ResultType. + // The driver makes this result available as an input to + // another Analyzer that depends directly on this one (see + // Requires) when it analyzes the same package. + // + // To pass analysis results between packages (and thus + // potentially between address spaces), use Facts, which are + // serializable. + Run func(*Pass) (interface{}, error) + + // RunDespiteErrors allows the driver to invoke + // the Run method of this analyzer even on a + // package that contains parse or type errors. + RunDespiteErrors bool + + // Requires is a set of analyzers that must run successfully + // before this one on a given package. This analyzer may inspect + // the outputs produced by each analyzer in Requires. + // The graph over analyzers implied by Requires edges must be acyclic. + // + // Requires establishes a "horizontal" dependency between + // analysis passes (different analyzers, same package). + Requires []*Analyzer + + // ResultType is the type of the optional result of the Run function. + ResultType reflect.Type + + // FactTypes indicates that this analyzer imports and exports + // Facts of the specified concrete types. + // An analyzer that uses facts may assume that its import + // dependencies have been similarly analyzed before it runs. + // Facts must be pointers. + // + // FactTypes establishes a "vertical" dependency between + // analysis passes (same analyzer, different packages). + FactTypes []Fact +} + +func (a *Analyzer) String() string { return a.Name } + +func init() { + // Set the analysisinternal functions to be able to pass type errors + // to the Pass type without modifying the go/analysis API. + analysisinternal.SetTypeErrors = func(p interface{}, errors []types.Error) { + p.(*Pass).typeErrors = errors + } + analysisinternal.GetTypeErrors = func(p interface{}) []types.Error { + return p.(*Pass).typeErrors + } +} + +// A Pass provides information to the Run function that +// applies a specific analyzer to a single Go package. +// +// It forms the interface between the analysis logic and the driver +// program, and has both input and an output components. +// +// As in a compiler, one pass may depend on the result computed by another. +// +// The Run function should not call any of the Pass functions concurrently. +type Pass struct { + Analyzer *Analyzer // the identity of the current analyzer + + // syntax and type information + Fset *token.FileSet // file position information + Files []*ast.File // the abstract syntax tree of each file + OtherFiles []string // names of non-Go files of this package + IgnoredFiles []string // names of ignored source files in this package + Pkg *types.Package // type information about the package + TypesInfo *types.Info // type information about the syntax trees + TypesSizes types.Sizes // function for computing sizes of types + + // Report reports a Diagnostic, a finding about a specific location + // in the analyzed source code such as a potential mistake. + // It may be called by the Run function. + Report func(Diagnostic) + + // ResultOf provides the inputs to this analysis pass, which are + // the corresponding results of its prerequisite analyzers. + // The map keys are the elements of Analysis.Required, + // and the type of each corresponding value is the required + // analysis's ResultType. + ResultOf map[*Analyzer]interface{} + + // -- facts -- + + // ImportObjectFact retrieves a fact associated with obj. + // Given a value ptr of type *T, where *T satisfies Fact, + // ImportObjectFact copies the value to *ptr. + // + // ImportObjectFact panics if called after the pass is complete. + // ImportObjectFact is not concurrency-safe. + ImportObjectFact func(obj types.Object, fact Fact) bool + + // ImportPackageFact retrieves a fact associated with package pkg, + // which must be this package or one of its dependencies. + // See comments for ImportObjectFact. + ImportPackageFact func(pkg *types.Package, fact Fact) bool + + // ExportObjectFact associates a fact of type *T with the obj, + // replacing any previous fact of that type. + // + // ExportObjectFact panics if it is called after the pass is + // complete, or if obj does not belong to the package being analyzed. + // ExportObjectFact is not concurrency-safe. + ExportObjectFact func(obj types.Object, fact Fact) + + // ExportPackageFact associates a fact with the current package. + // See comments for ExportObjectFact. + ExportPackageFact func(fact Fact) + + // AllPackageFacts returns a new slice containing all package facts of the analysis's FactTypes + // in unspecified order. + // WARNING: This is an experimental API and may change in the future. + AllPackageFacts func() []PackageFact + + // AllObjectFacts returns a new slice containing all object facts of the analysis's FactTypes + // in unspecified order. + // WARNING: This is an experimental API and may change in the future. + AllObjectFacts func() []ObjectFact + + // typeErrors contains types.Errors that are associated with the pkg. + typeErrors []types.Error + + /* Further fields may be added in future. */ + // For example, suggested or applied refactorings. +} + +// PackageFact is a package together with an associated fact. +// WARNING: This is an experimental API and may change in the future. +type PackageFact struct { + Package *types.Package + Fact Fact +} + +// ObjectFact is an object together with an associated fact. +// WARNING: This is an experimental API and may change in the future. +type ObjectFact struct { + Object types.Object + Fact Fact +} + +// Reportf is a helper function that reports a Diagnostic using the +// specified position and formatted error message. +func (pass *Pass) Reportf(pos token.Pos, format string, args ...interface{}) { + msg := fmt.Sprintf(format, args...) + pass.Report(Diagnostic{Pos: pos, Message: msg}) +} + +// The Range interface provides a range. It's equivalent to and satisfied by +// ast.Node. +type Range interface { + Pos() token.Pos // position of first character belonging to the node + End() token.Pos // position of first character immediately after the node +} + +// ReportRangef is a helper function that reports a Diagnostic using the +// range provided. ast.Node values can be passed in as the range because +// they satisfy the Range interface. +func (pass *Pass) ReportRangef(rng Range, format string, args ...interface{}) { + msg := fmt.Sprintf(format, args...) + pass.Report(Diagnostic{Pos: rng.Pos(), End: rng.End(), Message: msg}) +} + +func (pass *Pass) String() string { + return fmt.Sprintf("%s@%s", pass.Analyzer.Name, pass.Pkg.Path()) +} + +// A Fact is an intermediate fact produced during analysis. +// +// Each fact is associated with a named declaration (a types.Object) or +// with a package as a whole. A single object or package may have +// multiple associated facts, but only one of any particular fact type. +// +// A Fact represents a predicate such as "never returns", but does not +// represent the subject of the predicate such as "function F" or "package P". +// +// Facts may be produced in one analysis pass and consumed by another +// analysis pass even if these are in different address spaces. +// If package P imports Q, all facts about Q produced during +// analysis of that package will be available during later analysis of P. +// Facts are analogous to type export data in a build system: +// just as export data enables separate compilation of several passes, +// facts enable "separate analysis". +// +// Each pass (a, p) starts with the set of facts produced by the +// same analyzer a applied to the packages directly imported by p. +// The analysis may add facts to the set, and they may be exported in turn. +// An analysis's Run function may retrieve facts by calling +// Pass.Import{Object,Package}Fact and update them using +// Pass.Export{Object,Package}Fact. +// +// A fact is logically private to its Analysis. To pass values +// between different analyzers, use the results mechanism; +// see Analyzer.Requires, Analyzer.ResultType, and Pass.ResultOf. +// +// A Fact type must be a pointer. +// Facts are encoded and decoded using encoding/gob. +// A Fact may implement the GobEncoder/GobDecoder interfaces +// to customize its encoding. Fact encoding should not fail. +// +// A Fact should not be modified once exported. +type Fact interface { + AFact() // dummy method to avoid type errors +} diff --git a/vendor/golang.org/x/tools/go/analysis/diagnostic.go b/vendor/golang.org/x/tools/go/analysis/diagnostic.go new file mode 100644 index 000000000..cd462a0cb --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/diagnostic.go @@ -0,0 +1,65 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package analysis + +import "go/token" + +// A Diagnostic is a message associated with a source location or range. +// +// An Analyzer may return a variety of diagnostics; the optional Category, +// which should be a constant, may be used to classify them. +// It is primarily intended to make it easy to look up documentation. +// +// If End is provided, the diagnostic is specified to apply to the range between +// Pos and End. +type Diagnostic struct { + Pos token.Pos + End token.Pos // optional + Category string // optional + Message string + + // SuggestedFixes contains suggested fixes for a diagnostic which can be used to perform + // edits to a file that address the diagnostic. + // TODO(matloob): Should multiple SuggestedFixes be allowed for a diagnostic? + // Diagnostics should not contain SuggestedFixes that overlap. + // Experimental: This API is experimental and may change in the future. + SuggestedFixes []SuggestedFix // optional + + // Experimental: This API is experimental and may change in the future. + Related []RelatedInformation // optional +} + +// RelatedInformation contains information related to a diagnostic. +// For example, a diagnostic that flags duplicated declarations of a +// variable may include one RelatedInformation per existing +// declaration. +type RelatedInformation struct { + Pos token.Pos + End token.Pos + Message string +} + +// A SuggestedFix is a code change associated with a Diagnostic that a user can choose +// to apply to their code. Usually the SuggestedFix is meant to fix the issue flagged +// by the diagnostic. +// TextEdits for a SuggestedFix should not overlap. TextEdits for a SuggestedFix +// should not contain edits for other packages. +// Experimental: This API is experimental and may change in the future. +type SuggestedFix struct { + // A description for this suggested fix to be shown to a user deciding + // whether to accept it. + Message string + TextEdits []TextEdit +} + +// A TextEdit represents the replacement of the code between Pos and End with the new text. +// Each TextEdit should apply to a single file. End should not be earlier in the file than Pos. +// Experimental: This API is experimental and may change in the future. +type TextEdit struct { + // For a pure insertion, End can either be set to Pos or token.NoPos. + Pos token.Pos + End token.Pos + NewText []byte +} diff --git a/vendor/golang.org/x/tools/go/analysis/doc.go b/vendor/golang.org/x/tools/go/analysis/doc.go new file mode 100644 index 000000000..94a3bd5d0 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/doc.go @@ -0,0 +1,321 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* + +Package analysis defines the interface between a modular static +analysis and an analysis driver program. + + +Background + +A static analysis is a function that inspects a package of Go code and +reports a set of diagnostics (typically mistakes in the code), and +perhaps produces other results as well, such as suggested refactorings +or other facts. An analysis that reports mistakes is informally called a +"checker". For example, the printf checker reports mistakes in +fmt.Printf format strings. + +A "modular" analysis is one that inspects one package at a time but can +save information from a lower-level package and use it when inspecting a +higher-level package, analogous to separate compilation in a toolchain. +The printf checker is modular: when it discovers that a function such as +log.Fatalf delegates to fmt.Printf, it records this fact, and checks +calls to that function too, including calls made from another package. + +By implementing a common interface, checkers from a variety of sources +can be easily selected, incorporated, and reused in a wide range of +driver programs including command-line tools (such as vet), text editors and +IDEs, build and test systems (such as go build, Bazel, or Buck), test +frameworks, code review tools, code-base indexers (such as SourceGraph), +documentation viewers (such as godoc), batch pipelines for large code +bases, and so on. + + +Analyzer + +The primary type in the API is Analyzer. An Analyzer statically +describes an analysis function: its name, documentation, flags, +relationship to other analyzers, and of course, its logic. + +To define an analysis, a user declares a (logically constant) variable +of type Analyzer. Here is a typical example from one of the analyzers in +the go/analysis/passes/ subdirectory: + + package unusedresult + + var Analyzer = &analysis.Analyzer{ + Name: "unusedresult", + Doc: "check for unused results of calls to some functions", + Run: run, + ... + } + + func run(pass *analysis.Pass) (interface{}, error) { + ... + } + +An analysis driver is a program such as vet that runs a set of +analyses and prints the diagnostics that they report. +The driver program must import the list of Analyzers it needs. +Typically each Analyzer resides in a separate package. +To add a new Analyzer to an existing driver, add another item to the list: + + import ( "unusedresult"; "nilness"; "printf" ) + + var analyses = []*analysis.Analyzer{ + unusedresult.Analyzer, + nilness.Analyzer, + printf.Analyzer, + } + +A driver may use the name, flags, and documentation to provide on-line +help that describes the analyses it performs. +The doc comment contains a brief one-line summary, +optionally followed by paragraphs of explanation. + +The Analyzer type has more fields besides those shown above: + + type Analyzer struct { + Name string + Doc string + Flags flag.FlagSet + Run func(*Pass) (interface{}, error) + RunDespiteErrors bool + ResultType reflect.Type + Requires []*Analyzer + FactTypes []Fact + } + +The Flags field declares a set of named (global) flag variables that +control analysis behavior. Unlike vet, analysis flags are not declared +directly in the command line FlagSet; it is up to the driver to set the +flag variables. A driver for a single analysis, a, might expose its flag +f directly on the command line as -f, whereas a driver for multiple +analyses might prefix the flag name by the analysis name (-a.f) to avoid +ambiguity. An IDE might expose the flags through a graphical interface, +and a batch pipeline might configure them from a config file. +See the "findcall" analyzer for an example of flags in action. + +The RunDespiteErrors flag indicates whether the analysis is equipped to +handle ill-typed code. If not, the driver will skip the analysis if +there were parse or type errors. +The optional ResultType field specifies the type of the result value +computed by this analysis and made available to other analyses. +The Requires field specifies a list of analyses upon which +this one depends and whose results it may access, and it constrains the +order in which a driver may run analyses. +The FactTypes field is discussed in the section on Modularity. +The analysis package provides a Validate function to perform basic +sanity checks on an Analyzer, such as that its Requires graph is +acyclic, its fact and result types are unique, and so on. + +Finally, the Run field contains a function to be called by the driver to +execute the analysis on a single package. The driver passes it an +instance of the Pass type. + + +Pass + +A Pass describes a single unit of work: the application of a particular +Analyzer to a particular package of Go code. +The Pass provides information to the Analyzer's Run function about the +package being analyzed, and provides operations to the Run function for +reporting diagnostics and other information back to the driver. + + type Pass struct { + Fset *token.FileSet + Files []*ast.File + OtherFiles []string + IgnoredFiles []string + Pkg *types.Package + TypesInfo *types.Info + ResultOf map[*Analyzer]interface{} + Report func(Diagnostic) + ... + } + +The Fset, Files, Pkg, and TypesInfo fields provide the syntax trees, +type information, and source positions for a single package of Go code. + +The OtherFiles field provides the names, but not the contents, of non-Go +files such as assembly that are part of this package. See the "asmdecl" +or "buildtags" analyzers for examples of loading non-Go files and reporting +diagnostics against them. + +The IgnoredFiles field provides the names, but not the contents, +of ignored Go and non-Go source files that are not part of this package +with the current build configuration but may be part of other build +configurations. See the "buildtags" analyzer for an example of loading +and checking IgnoredFiles. + +The ResultOf field provides the results computed by the analyzers +required by this one, as expressed in its Analyzer.Requires field. The +driver runs the required analyzers first and makes their results +available in this map. Each Analyzer must return a value of the type +described in its Analyzer.ResultType field. +For example, the "ctrlflow" analyzer returns a *ctrlflow.CFGs, which +provides a control-flow graph for each function in the package (see +golang.org/x/tools/go/cfg); the "inspect" analyzer returns a value that +enables other Analyzers to traverse the syntax trees of the package more +efficiently; and the "buildssa" analyzer constructs an SSA-form +intermediate representation. +Each of these Analyzers extends the capabilities of later Analyzers +without adding a dependency to the core API, so an analysis tool pays +only for the extensions it needs. + +The Report function emits a diagnostic, a message associated with a +source position. For most analyses, diagnostics are their primary +result. +For convenience, Pass provides a helper method, Reportf, to report a new +diagnostic by formatting a string. +Diagnostic is defined as: + + type Diagnostic struct { + Pos token.Pos + Category string // optional + Message string + } + +The optional Category field is a short identifier that classifies the +kind of message when an analysis produces several kinds of diagnostic. + +Many analyses want to associate diagnostics with a severity level. +Because Diagnostic does not have a severity level field, an Analyzer's +diagnostics effectively all have the same severity level. To separate which +diagnostics are high severity and which are low severity, expose multiple +Analyzers instead. Analyzers should also be separated when their +diagnostics belong in different groups, or could be tagged differently +before being shown to the end user. Analyzers should document their severity +level to help downstream tools surface diagnostics properly. + +Most Analyzers inspect typed Go syntax trees, but a few, such as asmdecl +and buildtag, inspect the raw text of Go source files or even non-Go +files such as assembly. To report a diagnostic against a line of a +raw text file, use the following sequence: + + content, err := ioutil.ReadFile(filename) + if err != nil { ... } + tf := fset.AddFile(filename, -1, len(content)) + tf.SetLinesForContent(content) + ... + pass.Reportf(tf.LineStart(line), "oops") + + +Modular analysis with Facts + +To improve efficiency and scalability, large programs are routinely +built using separate compilation: units of the program are compiled +separately, and recompiled only when one of their dependencies changes; +independent modules may be compiled in parallel. The same technique may +be applied to static analyses, for the same benefits. Such analyses are +described as "modular". + +A compiler’s type checker is an example of a modular static analysis. +Many other checkers we would like to apply to Go programs can be +understood as alternative or non-standard type systems. For example, +vet's printf checker infers whether a function has the "printf wrapper" +type, and it applies stricter checks to calls of such functions. In +addition, it records which functions are printf wrappers for use by +later analysis passes to identify other printf wrappers by induction. +A result such as “f is a printf wrapper” that is not interesting by +itself but serves as a stepping stone to an interesting result (such as +a diagnostic) is called a "fact". + +The analysis API allows an analysis to define new types of facts, to +associate facts of these types with objects (named entities) declared +within the current package, or with the package as a whole, and to query +for an existing fact of a given type associated with an object or +package. + +An Analyzer that uses facts must declare their types: + + var Analyzer = &analysis.Analyzer{ + Name: "printf", + FactTypes: []analysis.Fact{new(isWrapper)}, + ... + } + + type isWrapper struct{} // => *types.Func f “is a printf wrapper” + +The driver program ensures that facts for a pass’s dependencies are +generated before analyzing the package and is responsible for propagating +facts from one package to another, possibly across address spaces. +Consequently, Facts must be serializable. The API requires that drivers +use the gob encoding, an efficient, robust, self-describing binary +protocol. A fact type may implement the GobEncoder/GobDecoder interfaces +if the default encoding is unsuitable. Facts should be stateless. + +The Pass type has functions to import and export facts, +associated either with an object or with a package: + + type Pass struct { + ... + ExportObjectFact func(types.Object, Fact) + ImportObjectFact func(types.Object, Fact) bool + + ExportPackageFact func(fact Fact) + ImportPackageFact func(*types.Package, Fact) bool + } + +An Analyzer may only export facts associated with the current package or +its objects, though it may import facts from any package or object that +is an import dependency of the current package. + +Conceptually, ExportObjectFact(obj, fact) inserts fact into a hidden map keyed by +the pair (obj, TypeOf(fact)), and the ImportObjectFact function +retrieves the entry from this map and copies its value into the variable +pointed to by fact. This scheme assumes that the concrete type of fact +is a pointer; this assumption is checked by the Validate function. +See the "printf" analyzer for an example of object facts in action. + +Some driver implementations (such as those based on Bazel and Blaze) do +not currently apply analyzers to packages of the standard library. +Therefore, for best results, analyzer authors should not rely on +analysis facts being available for standard packages. +For example, although the printf checker is capable of deducing during +analysis of the log package that log.Printf is a printf wrapper, +this fact is built in to the analyzer so that it correctly checks +calls to log.Printf even when run in a driver that does not apply +it to standard packages. We would like to remove this limitation in future. + + +Testing an Analyzer + +The analysistest subpackage provides utilities for testing an Analyzer. +In a few lines of code, it is possible to run an analyzer on a package +of testdata files and check that it reported all the expected +diagnostics and facts (and no more). Expectations are expressed using +"// want ..." comments in the input code. + + +Standalone commands + +Analyzers are provided in the form of packages that a driver program is +expected to import. The vet command imports a set of several analyzers, +but users may wish to define their own analysis commands that perform +additional checks. To simplify the task of creating an analysis command, +either for a single analyzer or for a whole suite, we provide the +singlechecker and multichecker subpackages. + +The singlechecker package provides the main function for a command that +runs one analyzer. By convention, each analyzer such as +go/passes/findcall should be accompanied by a singlechecker-based +command such as go/analysis/passes/findcall/cmd/findcall, defined in its +entirety as: + + package main + + import ( + "golang.org/x/tools/go/analysis/passes/findcall" + "golang.org/x/tools/go/analysis/singlechecker" + ) + + func main() { singlechecker.Main(findcall.Analyzer) } + +A tool that provides multiple analyzers can use multichecker in a +similar way, giving it the list of Analyzers. + +*/ +package analysis diff --git a/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go b/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go new file mode 100644 index 000000000..eb0016b18 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go @@ -0,0 +1,802 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package asmdecl defines an Analyzer that reports mismatches between +// assembly files and Go declarations. +package asmdecl + +import ( + "bytes" + "fmt" + "go/ast" + "go/build" + "go/token" + "go/types" + "log" + "regexp" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" +) + +const Doc = "report mismatches between assembly files and Go declarations" + +var Analyzer = &analysis.Analyzer{ + Name: "asmdecl", + Doc: Doc, + Run: run, +} + +// 'kind' is a kind of assembly variable. +// The kinds 1, 2, 4, 8 stand for values of that size. +type asmKind int + +// These special kinds are not valid sizes. +const ( + asmString asmKind = 100 + iota + asmSlice + asmArray + asmInterface + asmEmptyInterface + asmStruct + asmComplex +) + +// An asmArch describes assembly parameters for an architecture +type asmArch struct { + name string + bigEndian bool + stack string + lr bool + // calculated during initialization + sizes types.Sizes + intSize int + ptrSize int + maxAlign int +} + +// An asmFunc describes the expected variables for a function on a given architecture. +type asmFunc struct { + arch *asmArch + size int // size of all arguments + vars map[string]*asmVar + varByOffset map[int]*asmVar +} + +// An asmVar describes a single assembly variable. +type asmVar struct { + name string + kind asmKind + typ string + off int + size int + inner []*asmVar +} + +var ( + asmArch386 = asmArch{name: "386", bigEndian: false, stack: "SP", lr: false} + asmArchArm = asmArch{name: "arm", bigEndian: false, stack: "R13", lr: true} + asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true} + asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false} + asmArchMips = asmArch{name: "mips", bigEndian: true, stack: "R29", lr: true} + asmArchMipsLE = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true} + asmArchMips64 = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true} + asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true} + asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true} + asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true} + asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true} + asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true} + asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false} + + arches = []*asmArch{ + &asmArch386, + &asmArchArm, + &asmArchArm64, + &asmArchAmd64, + &asmArchMips, + &asmArchMipsLE, + &asmArchMips64, + &asmArchMips64LE, + &asmArchPpc64, + &asmArchPpc64LE, + &asmArchRISCV64, + &asmArchS390X, + &asmArchWasm, + } +) + +func init() { + for _, arch := range arches { + arch.sizes = types.SizesFor("gc", arch.name) + if arch.sizes == nil { + // TODO(adonovan): fix: now that asmdecl is not in the standard + // library we cannot assume types.SizesFor is consistent with arches. + // For now, assume 64-bit norms and print a warning. + // But this warning should really be deferred until we attempt to use + // arch, which is very unlikely. Better would be + // to defer size computation until we have Pass.TypesSizes. + arch.sizes = types.SizesFor("gc", "amd64") + log.Printf("unknown architecture %s", arch.name) + } + arch.intSize = int(arch.sizes.Sizeof(types.Typ[types.Int])) + arch.ptrSize = int(arch.sizes.Sizeof(types.Typ[types.UnsafePointer])) + arch.maxAlign = int(arch.sizes.Alignof(types.Typ[types.Int64])) + } +} + +var ( + re = regexp.MustCompile + asmPlusBuild = re(`//\s+\+build\s+([^\n]+)`) + asmTEXT = re(`\bTEXT\b(.*)·([^\(]+)\(SB\)(?:\s*,\s*([0-9A-Z|+()]+))?(?:\s*,\s*\$(-?[0-9]+)(?:-([0-9]+))?)?`) + asmDATA = re(`\b(DATA|GLOBL)\b`) + asmNamedFP = re(`\$?([a-zA-Z0-9_\xFF-\x{10FFFF}]+)(?:\+([0-9]+))\(FP\)`) + asmUnnamedFP = re(`[^+\-0-9](([0-9]+)\(FP\))`) + asmSP = re(`[^+\-0-9](([0-9]+)\(([A-Z0-9]+)\))`) + asmOpcode = re(`^\s*(?:[A-Z0-9a-z_]+:)?\s*([A-Z]+)\s*([^,]*)(?:,\s*(.*))?`) + ppc64Suff = re(`([BHWD])(ZU|Z|U|BR)?$`) + abiSuff = re(`^(.+)$`) +) + +func run(pass *analysis.Pass) (interface{}, error) { + // No work if no assembly files. + var sfiles []string + for _, fname := range pass.OtherFiles { + if strings.HasSuffix(fname, ".s") { + sfiles = append(sfiles, fname) + } + } + if sfiles == nil { + return nil, nil + } + + // Gather declarations. knownFunc[name][arch] is func description. + knownFunc := make(map[string]map[string]*asmFunc) + + for _, f := range pass.Files { + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok && decl.Body == nil { + knownFunc[decl.Name.Name] = asmParseDecl(pass, decl) + } + } + } + +Files: + for _, fname := range sfiles { + content, tf, err := analysisutil.ReadFile(pass.Fset, fname) + if err != nil { + return nil, err + } + + // Determine architecture from file name if possible. + var arch string + var archDef *asmArch + for _, a := range arches { + if strings.HasSuffix(fname, "_"+a.name+".s") { + arch = a.name + archDef = a + break + } + } + + lines := strings.SplitAfter(string(content), "\n") + var ( + fn *asmFunc + fnName string + localSize, argSize int + wroteSP bool + noframe bool + haveRetArg bool + retLine []int + ) + + flushRet := func() { + if fn != nil && fn.vars["ret"] != nil && !haveRetArg && len(retLine) > 0 { + v := fn.vars["ret"] + for _, line := range retLine { + pass.Reportf(analysisutil.LineStart(tf, line), "[%s] %s: RET without writing to %d-byte ret+%d(FP)", arch, fnName, v.size, v.off) + } + } + retLine = nil + } + trimABI := func(fnName string) string { + m := abiSuff.FindStringSubmatch(fnName) + if m != nil { + return m[1] + } + return fnName + } + for lineno, line := range lines { + lineno++ + + badf := func(format string, args ...interface{}) { + pass.Reportf(analysisutil.LineStart(tf, lineno), "[%s] %s: %s", arch, fnName, fmt.Sprintf(format, args...)) + } + + if arch == "" { + // Determine architecture from +build line if possible. + if m := asmPlusBuild.FindStringSubmatch(line); m != nil { + // There can be multiple architectures in a single +build line, + // so accumulate them all and then prefer the one that + // matches build.Default.GOARCH. + var archCandidates []*asmArch + for _, fld := range strings.Fields(m[1]) { + for _, a := range arches { + if a.name == fld { + archCandidates = append(archCandidates, a) + } + } + } + for _, a := range archCandidates { + if a.name == build.Default.GOARCH { + archCandidates = []*asmArch{a} + break + } + } + if len(archCandidates) > 0 { + arch = archCandidates[0].name + archDef = archCandidates[0] + } + } + } + + // Ignore comments and commented-out code. + if i := strings.Index(line, "//"); i >= 0 { + line = line[:i] + } + + if m := asmTEXT.FindStringSubmatch(line); m != nil { + flushRet() + if arch == "" { + // Arch not specified by filename or build tags. + // Fall back to build.Default.GOARCH. + for _, a := range arches { + if a.name == build.Default.GOARCH { + arch = a.name + archDef = a + break + } + } + if arch == "" { + log.Printf("%s: cannot determine architecture for assembly file", fname) + continue Files + } + } + fnName = m[2] + if pkgPath := strings.TrimSpace(m[1]); pkgPath != "" { + // The assembler uses Unicode division slash within + // identifiers to represent the directory separator. + pkgPath = strings.Replace(pkgPath, "∕", "/", -1) + if pkgPath != pass.Pkg.Path() { + // log.Printf("%s:%d: [%s] cannot check cross-package assembly function: %s is in package %s", fname, lineno, arch, fnName, pkgPath) + fn = nil + fnName = "" + continue + } + } + // Trim off optional ABI selector. + fnName := trimABI(fnName) + flag := m[3] + fn = knownFunc[fnName][arch] + if fn != nil { + size, _ := strconv.Atoi(m[5]) + if size != fn.size && (flag != "7" && !strings.Contains(flag, "NOSPLIT") || size != 0) { + badf("wrong argument size %d; expected $...-%d", size, fn.size) + } + } + localSize, _ = strconv.Atoi(m[4]) + localSize += archDef.intSize + if archDef.lr && !strings.Contains(flag, "NOFRAME") { + // Account for caller's saved LR + localSize += archDef.intSize + } + argSize, _ = strconv.Atoi(m[5]) + noframe = strings.Contains(flag, "NOFRAME") + if fn == nil && !strings.Contains(fnName, "<>") && !noframe { + badf("function %s missing Go declaration", fnName) + } + wroteSP = false + haveRetArg = false + continue + } else if strings.Contains(line, "TEXT") && strings.Contains(line, "SB") { + // function, but not visible from Go (didn't match asmTEXT), so stop checking + flushRet() + fn = nil + fnName = "" + continue + } + + if strings.Contains(line, "RET") && !strings.Contains(line, "(SB)") { + // RET f(SB) is a tail call. It is okay to not write the results. + retLine = append(retLine, lineno) + } + + if fnName == "" { + continue + } + + if asmDATA.FindStringSubmatch(line) != nil { + fn = nil + } + + if archDef == nil { + continue + } + + if strings.Contains(line, ", "+archDef.stack) || strings.Contains(line, ",\t"+archDef.stack) || strings.Contains(line, "NOP "+archDef.stack) || strings.Contains(line, "NOP\t"+archDef.stack) { + wroteSP = true + continue + } + + if arch == "wasm" && strings.Contains(line, "CallImport") { + // CallImport is a call out to magic that can write the result. + haveRetArg = true + } + + for _, m := range asmSP.FindAllStringSubmatch(line, -1) { + if m[3] != archDef.stack || wroteSP || noframe { + continue + } + off := 0 + if m[1] != "" { + off, _ = strconv.Atoi(m[2]) + } + if off >= localSize { + if fn != nil { + v := fn.varByOffset[off-localSize] + if v != nil { + badf("%s should be %s+%d(FP)", m[1], v.name, off-localSize) + continue + } + } + if off >= localSize+argSize { + badf("use of %s points beyond argument frame", m[1]) + continue + } + badf("use of %s to access argument frame", m[1]) + } + } + + if fn == nil { + continue + } + + for _, m := range asmUnnamedFP.FindAllStringSubmatch(line, -1) { + off, _ := strconv.Atoi(m[2]) + v := fn.varByOffset[off] + if v != nil { + badf("use of unnamed argument %s; offset %d is %s+%d(FP)", m[1], off, v.name, v.off) + } else { + badf("use of unnamed argument %s", m[1]) + } + } + + for _, m := range asmNamedFP.FindAllStringSubmatch(line, -1) { + name := m[1] + off := 0 + if m[2] != "" { + off, _ = strconv.Atoi(m[2]) + } + if name == "ret" || strings.HasPrefix(name, "ret_") { + haveRetArg = true + } + v := fn.vars[name] + if v == nil { + // Allow argframe+0(FP). + if name == "argframe" && off == 0 { + continue + } + v = fn.varByOffset[off] + if v != nil { + badf("unknown variable %s; offset %d is %s+%d(FP)", name, off, v.name, v.off) + } else { + badf("unknown variable %s", name) + } + continue + } + asmCheckVar(badf, fn, line, m[0], off, v, archDef) + } + } + flushRet() + } + return nil, nil +} + +func asmKindForType(t types.Type, size int) asmKind { + switch t := t.Underlying().(type) { + case *types.Basic: + switch t.Kind() { + case types.String: + return asmString + case types.Complex64, types.Complex128: + return asmComplex + } + return asmKind(size) + case *types.Pointer, *types.Chan, *types.Map, *types.Signature: + return asmKind(size) + case *types.Struct: + return asmStruct + case *types.Interface: + if t.Empty() { + return asmEmptyInterface + } + return asmInterface + case *types.Array: + return asmArray + case *types.Slice: + return asmSlice + } + panic("unreachable") +} + +// A component is an assembly-addressable component of a composite type, +// or a composite type itself. +type component struct { + size int + offset int + kind asmKind + typ string + suffix string // Such as _base for string base, _0_lo for lo half of first element of [1]uint64 on 32 bit machine. + outer string // The suffix for immediately containing composite type. +} + +func newComponent(suffix string, kind asmKind, typ string, offset, size int, outer string) component { + return component{suffix: suffix, kind: kind, typ: typ, offset: offset, size: size, outer: outer} +} + +// componentsOfType generates a list of components of type t. +// For example, given string, the components are the string itself, the base, and the length. +func componentsOfType(arch *asmArch, t types.Type) []component { + return appendComponentsRecursive(arch, t, nil, "", 0) +} + +// appendComponentsRecursive implements componentsOfType. +// Recursion is required to correct handle structs and arrays, +// which can contain arbitrary other types. +func appendComponentsRecursive(arch *asmArch, t types.Type, cc []component, suffix string, off int) []component { + s := t.String() + size := int(arch.sizes.Sizeof(t)) + kind := asmKindForType(t, size) + cc = append(cc, newComponent(suffix, kind, s, off, size, suffix)) + + switch kind { + case 8: + if arch.ptrSize == 4 { + w1, w2 := "lo", "hi" + if arch.bigEndian { + w1, w2 = w2, w1 + } + cc = append(cc, newComponent(suffix+"_"+w1, 4, "half "+s, off, 4, suffix)) + cc = append(cc, newComponent(suffix+"_"+w2, 4, "half "+s, off+4, 4, suffix)) + } + + case asmEmptyInterface: + cc = append(cc, newComponent(suffix+"_type", asmKind(arch.ptrSize), "interface type", off, arch.ptrSize, suffix)) + cc = append(cc, newComponent(suffix+"_data", asmKind(arch.ptrSize), "interface data", off+arch.ptrSize, arch.ptrSize, suffix)) + + case asmInterface: + cc = append(cc, newComponent(suffix+"_itable", asmKind(arch.ptrSize), "interface itable", off, arch.ptrSize, suffix)) + cc = append(cc, newComponent(suffix+"_data", asmKind(arch.ptrSize), "interface data", off+arch.ptrSize, arch.ptrSize, suffix)) + + case asmSlice: + cc = append(cc, newComponent(suffix+"_base", asmKind(arch.ptrSize), "slice base", off, arch.ptrSize, suffix)) + cc = append(cc, newComponent(suffix+"_len", asmKind(arch.intSize), "slice len", off+arch.ptrSize, arch.intSize, suffix)) + cc = append(cc, newComponent(suffix+"_cap", asmKind(arch.intSize), "slice cap", off+arch.ptrSize+arch.intSize, arch.intSize, suffix)) + + case asmString: + cc = append(cc, newComponent(suffix+"_base", asmKind(arch.ptrSize), "string base", off, arch.ptrSize, suffix)) + cc = append(cc, newComponent(suffix+"_len", asmKind(arch.intSize), "string len", off+arch.ptrSize, arch.intSize, suffix)) + + case asmComplex: + fsize := size / 2 + cc = append(cc, newComponent(suffix+"_real", asmKind(fsize), fmt.Sprintf("real(complex%d)", size*8), off, fsize, suffix)) + cc = append(cc, newComponent(suffix+"_imag", asmKind(fsize), fmt.Sprintf("imag(complex%d)", size*8), off+fsize, fsize, suffix)) + + case asmStruct: + tu := t.Underlying().(*types.Struct) + fields := make([]*types.Var, tu.NumFields()) + for i := 0; i < tu.NumFields(); i++ { + fields[i] = tu.Field(i) + } + offsets := arch.sizes.Offsetsof(fields) + for i, f := range fields { + cc = appendComponentsRecursive(arch, f.Type(), cc, suffix+"_"+f.Name(), off+int(offsets[i])) + } + + case asmArray: + tu := t.Underlying().(*types.Array) + elem := tu.Elem() + // Calculate offset of each element array. + fields := []*types.Var{ + types.NewVar(token.NoPos, nil, "fake0", elem), + types.NewVar(token.NoPos, nil, "fake1", elem), + } + offsets := arch.sizes.Offsetsof(fields) + elemoff := int(offsets[1]) + for i := 0; i < int(tu.Len()); i++ { + cc = appendComponentsRecursive(arch, elem, cc, suffix+"_"+strconv.Itoa(i), off+i*elemoff) + } + } + + return cc +} + +// asmParseDecl parses a function decl for expected assembly variables. +func asmParseDecl(pass *analysis.Pass, decl *ast.FuncDecl) map[string]*asmFunc { + var ( + arch *asmArch + fn *asmFunc + offset int + ) + + // addParams adds asmVars for each of the parameters in list. + // isret indicates whether the list are the arguments or the return values. + // TODO(adonovan): simplify by passing (*types.Signature).{Params,Results} + // instead of list. + addParams := func(list []*ast.Field, isret bool) { + argnum := 0 + for _, fld := range list { + t := pass.TypesInfo.Types[fld.Type].Type + + // Work around https://golang.org/issue/28277. + if t == nil { + if ell, ok := fld.Type.(*ast.Ellipsis); ok { + t = types.NewSlice(pass.TypesInfo.Types[ell.Elt].Type) + } + } + + align := int(arch.sizes.Alignof(t)) + size := int(arch.sizes.Sizeof(t)) + offset += -offset & (align - 1) + cc := componentsOfType(arch, t) + + // names is the list of names with this type. + names := fld.Names + if len(names) == 0 { + // Anonymous args will be called arg, arg1, arg2, ... + // Similarly so for return values: ret, ret1, ret2, ... + name := "arg" + if isret { + name = "ret" + } + if argnum > 0 { + name += strconv.Itoa(argnum) + } + names = []*ast.Ident{ast.NewIdent(name)} + } + argnum += len(names) + + // Create variable for each name. + for _, id := range names { + name := id.Name + for _, c := range cc { + outer := name + c.outer + v := asmVar{ + name: name + c.suffix, + kind: c.kind, + typ: c.typ, + off: offset + c.offset, + size: c.size, + } + if vo := fn.vars[outer]; vo != nil { + vo.inner = append(vo.inner, &v) + } + fn.vars[v.name] = &v + for i := 0; i < v.size; i++ { + fn.varByOffset[v.off+i] = &v + } + } + offset += size + } + } + } + + m := make(map[string]*asmFunc) + for _, arch = range arches { + fn = &asmFunc{ + arch: arch, + vars: make(map[string]*asmVar), + varByOffset: make(map[int]*asmVar), + } + offset = 0 + addParams(decl.Type.Params.List, false) + if decl.Type.Results != nil && len(decl.Type.Results.List) > 0 { + offset += -offset & (arch.maxAlign - 1) + addParams(decl.Type.Results.List, true) + } + fn.size = offset + m[arch.name] = fn + } + + return m +} + +// asmCheckVar checks a single variable reference. +func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr string, off int, v *asmVar, archDef *asmArch) { + m := asmOpcode.FindStringSubmatch(line) + if m == nil { + if !strings.HasPrefix(strings.TrimSpace(line), "//") { + badf("cannot find assembly opcode") + } + return + } + + addr := strings.HasPrefix(expr, "$") + + // Determine operand sizes from instruction. + // Typically the suffix suffices, but there are exceptions. + var src, dst, kind asmKind + op := m[1] + switch fn.arch.name + "." + op { + case "386.FMOVLP": + src, dst = 8, 4 + case "arm.MOVD": + src = 8 + case "arm.MOVW": + src = 4 + case "arm.MOVH", "arm.MOVHU": + src = 2 + case "arm.MOVB", "arm.MOVBU": + src = 1 + // LEA* opcodes don't really read the second arg. + // They just take the address of it. + case "386.LEAL": + dst = 4 + addr = true + case "amd64.LEAQ": + dst = 8 + addr = true + default: + switch fn.arch.name { + case "386", "amd64": + if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "D") || strings.HasSuffix(op, "DP")) { + // FMOVDP, FXCHD, etc + src = 8 + break + } + if strings.HasPrefix(op, "P") && strings.HasSuffix(op, "RD") { + // PINSRD, PEXTRD, etc + src = 4 + break + } + if strings.HasPrefix(op, "F") && (strings.HasSuffix(op, "F") || strings.HasSuffix(op, "FP")) { + // FMOVFP, FXCHF, etc + src = 4 + break + } + if strings.HasSuffix(op, "SD") { + // MOVSD, SQRTSD, etc + src = 8 + break + } + if strings.HasSuffix(op, "SS") { + // MOVSS, SQRTSS, etc + src = 4 + break + } + if op == "MOVO" || op == "MOVOU" { + src = 16 + break + } + if strings.HasPrefix(op, "SET") { + // SETEQ, etc + src = 1 + break + } + switch op[len(op)-1] { + case 'B': + src = 1 + case 'W': + src = 2 + case 'L': + src = 4 + case 'D', 'Q': + src = 8 + } + case "ppc64", "ppc64le": + // Strip standard suffixes to reveal size letter. + m := ppc64Suff.FindStringSubmatch(op) + if m != nil { + switch m[1][0] { + case 'B': + src = 1 + case 'H': + src = 2 + case 'W': + src = 4 + case 'D': + src = 8 + } + } + case "mips", "mipsle", "mips64", "mips64le": + switch op { + case "MOVB", "MOVBU": + src = 1 + case "MOVH", "MOVHU": + src = 2 + case "MOVW", "MOVWU", "MOVF": + src = 4 + case "MOVV", "MOVD": + src = 8 + } + case "s390x": + switch op { + case "MOVB", "MOVBZ": + src = 1 + case "MOVH", "MOVHZ": + src = 2 + case "MOVW", "MOVWZ", "FMOVS": + src = 4 + case "MOVD", "FMOVD": + src = 8 + } + } + } + if dst == 0 { + dst = src + } + + // Determine whether the match we're holding + // is the first or second argument. + if strings.Index(line, expr) > strings.Index(line, ",") { + kind = dst + } else { + kind = src + } + + vk := v.kind + vs := v.size + vt := v.typ + switch vk { + case asmInterface, asmEmptyInterface, asmString, asmSlice: + // allow reference to first word (pointer) + vk = v.inner[0].kind + vs = v.inner[0].size + vt = v.inner[0].typ + case asmComplex: + // Allow a single instruction to load both parts of a complex. + if int(kind) == vs { + kind = asmComplex + } + } + if addr { + vk = asmKind(archDef.ptrSize) + vs = archDef.ptrSize + vt = "address" + } + + if off != v.off { + var inner bytes.Buffer + for i, vi := range v.inner { + if len(v.inner) > 1 { + fmt.Fprintf(&inner, ",") + } + fmt.Fprintf(&inner, " ") + if i == len(v.inner)-1 { + fmt.Fprintf(&inner, "or ") + } + fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off) + } + badf("invalid offset %s; expected %s+%d(FP)%s", expr, v.name, v.off, inner.String()) + return + } + if kind != 0 && kind != vk { + var inner bytes.Buffer + if len(v.inner) > 0 { + fmt.Fprintf(&inner, " containing") + for i, vi := range v.inner { + if i > 0 && len(v.inner) > 2 { + fmt.Fprintf(&inner, ",") + } + fmt.Fprintf(&inner, " ") + if i > 0 && i == len(v.inner)-1 { + fmt.Fprintf(&inner, "and ") + } + fmt.Fprintf(&inner, "%s+%d(FP)", vi.name, vi.off) + } + } + badf("invalid %s of %s; %s is %d-byte value%s", op, expr, vt, vs, inner.String()) + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go b/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go new file mode 100644 index 000000000..3586638ef --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go @@ -0,0 +1,76 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package assign defines an Analyzer that detects useless assignments. +package assign + +// TODO(adonovan): check also for assignments to struct fields inside +// methods that are on T instead of *T. + +import ( + "fmt" + "go/ast" + "go/token" + "reflect" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for useless assignments + +This checker reports assignments of the form x = x or a[i] = a[i]. +These are almost always useless, and even when they aren't they are +usually a mistake.` + +var Analyzer = &analysis.Analyzer{ + Name: "assign", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.AssignStmt)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + stmt := n.(*ast.AssignStmt) + if stmt.Tok != token.ASSIGN { + return // ignore := + } + if len(stmt.Lhs) != len(stmt.Rhs) { + // If LHS and RHS have different cardinality, they can't be the same. + return + } + for i, lhs := range stmt.Lhs { + rhs := stmt.Rhs[i] + if analysisutil.HasSideEffects(pass.TypesInfo, lhs) || + analysisutil.HasSideEffects(pass.TypesInfo, rhs) { + continue // expressions may not be equal + } + if reflect.TypeOf(lhs) != reflect.TypeOf(rhs) { + continue // short-circuit the heavy-weight gofmt check + } + le := analysisutil.Format(pass.Fset, lhs) + re := analysisutil.Format(pass.Fset, rhs) + if le == re { + pass.Report(analysis.Diagnostic{ + Pos: stmt.Pos(), Message: fmt.Sprintf("self-assignment of %s to %s", re, le), + SuggestedFixes: []analysis.SuggestedFix{ + {Message: "Remove", TextEdits: []analysis.TextEdit{ + {Pos: stmt.Pos(), End: stmt.End(), NewText: []byte{}}, + }}, + }, + }) + } + } + }) + + return nil, nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go b/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go new file mode 100644 index 000000000..9261db7e4 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/atomic/atomic.go @@ -0,0 +1,96 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package atomic defines an Analyzer that checks for common mistakes +// using the sync/atomic package. +package atomic + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for common mistakes using the sync/atomic package + +The atomic checker looks for assignment statements of the form: + + x = atomic.AddUint64(&x, 1) + +which are not atomic.` + +var Analyzer = &analysis.Analyzer{ + Name: "atomic", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + RunDespiteErrors: true, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.AssignStmt)(nil), + } + inspect.Preorder(nodeFilter, func(node ast.Node) { + n := node.(*ast.AssignStmt) + if len(n.Lhs) != len(n.Rhs) { + return + } + if len(n.Lhs) == 1 && n.Tok == token.DEFINE { + return + } + + for i, right := range n.Rhs { + call, ok := right.(*ast.CallExpr) + if !ok { + continue + } + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + continue + } + pkgIdent, _ := sel.X.(*ast.Ident) + pkgName, ok := pass.TypesInfo.Uses[pkgIdent].(*types.PkgName) + if !ok || pkgName.Imported().Path() != "sync/atomic" { + continue + } + + switch sel.Sel.Name { + case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr": + checkAtomicAddAssignment(pass, n.Lhs[i], call) + } + } + }) + return nil, nil +} + +// checkAtomicAddAssignment walks the atomic.Add* method calls checking +// for assigning the return value to the same variable being used in the +// operation +func checkAtomicAddAssignment(pass *analysis.Pass, left ast.Expr, call *ast.CallExpr) { + if len(call.Args) != 2 { + return + } + arg := call.Args[0] + broken := false + + gofmt := func(e ast.Expr) string { return analysisutil.Format(pass.Fset, e) } + + if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND { + broken = gofmt(left) == gofmt(uarg.X) + } else if star, ok := left.(*ast.StarExpr); ok { + broken = gofmt(star.X) == gofmt(arg) + } + + if broken { + pass.ReportRangef(left, "direct assignment to atomic value") + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go b/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go new file mode 100644 index 000000000..e2e1a4f67 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/atomicalign/atomicalign.go @@ -0,0 +1,117 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package atomicalign defines an Analyzer that checks for non-64-bit-aligned +// arguments to sync/atomic functions. On non-32-bit platforms, those functions +// panic if their argument variables are not 64-bit aligned. It is therefore +// the caller's responsibility to arrange for 64-bit alignment of such variables. +// See https://golang.org/pkg/sync/atomic/#pkg-note-BUG +package atomicalign + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = "check for non-64-bits-aligned arguments to sync/atomic functions" + +var Analyzer = &analysis.Analyzer{ + Name: "atomicalign", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + if 8*pass.TypesSizes.Sizeof(types.Typ[types.Uintptr]) == 64 { + return nil, nil // 64-bit platform + } + if !analysisutil.Imports(pass.Pkg, "sync/atomic") { + return nil, nil // doesn't directly import sync/atomic + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + + inspect.Preorder(nodeFilter, func(node ast.Node) { + call := node.(*ast.CallExpr) + sel, ok := call.Fun.(*ast.SelectorExpr) + if !ok { + return + } + pkgIdent, ok := sel.X.(*ast.Ident) + if !ok { + return + } + pkgName, ok := pass.TypesInfo.Uses[pkgIdent].(*types.PkgName) + if !ok || pkgName.Imported().Path() != "sync/atomic" { + return + } + + switch sel.Sel.Name { + case "AddInt64", "AddUint64", + "LoadInt64", "LoadUint64", + "StoreInt64", "StoreUint64", + "SwapInt64", "SwapUint64", + "CompareAndSwapInt64", "CompareAndSwapUint64": + + // For all the listed functions, the expression to check is always the first function argument. + check64BitAlignment(pass, sel.Sel.Name, call.Args[0]) + } + }) + + return nil, nil +} + +func check64BitAlignment(pass *analysis.Pass, funcName string, arg ast.Expr) { + // Checks the argument is made of the address operator (&) applied to + // to a struct field (as opposed to a variable as the first word of + // uint64 and int64 variables can be relied upon to be 64-bit aligned. + unary, ok := arg.(*ast.UnaryExpr) + if !ok || unary.Op != token.AND { + return + } + + // Retrieve the types.Struct in order to get the offset of the + // atomically accessed field. + sel, ok := unary.X.(*ast.SelectorExpr) + if !ok { + return + } + tvar, ok := pass.TypesInfo.Selections[sel].Obj().(*types.Var) + if !ok || !tvar.IsField() { + return + } + + stype, ok := pass.TypesInfo.Types[sel.X].Type.Underlying().(*types.Struct) + if !ok { + return + } + + var offset int64 + var fields []*types.Var + for i := 0; i < stype.NumFields(); i++ { + f := stype.Field(i) + fields = append(fields, f) + if f == tvar { + // We're done, this is the field we were looking for, + // no need to fill the fields slice further. + offset = pass.TypesSizes.Offsetsof(fields)[i] + break + } + } + if offset&7 == 0 { + return // 64-bit aligned + } + + pass.ReportRangef(arg, "address of non 64-bit aligned field .%s passed to atomic.%s", tvar.Name(), funcName) +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go b/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go new file mode 100644 index 000000000..5ae47d894 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go @@ -0,0 +1,221 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package bools defines an Analyzer that detects common mistakes +// involving boolean operators. +package bools + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = "check for common mistakes involving boolean operators" + +var Analyzer = &analysis.Analyzer{ + Name: "bools", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.BinaryExpr)(nil), + } + seen := make(map[*ast.BinaryExpr]bool) + inspect.Preorder(nodeFilter, func(n ast.Node) { + e := n.(*ast.BinaryExpr) + if seen[e] { + // Already processed as a subexpression of an earlier node. + return + } + + var op boolOp + switch e.Op { + case token.LOR: + op = or + case token.LAND: + op = and + default: + return + } + + comm := op.commutativeSets(pass.TypesInfo, e, seen) + for _, exprs := range comm { + op.checkRedundant(pass, exprs) + op.checkSuspect(pass, exprs) + } + }) + return nil, nil +} + +type boolOp struct { + name string + tok token.Token // token corresponding to this operator + badEq token.Token // token corresponding to the equality test that should not be used with this operator +} + +var ( + or = boolOp{"or", token.LOR, token.NEQ} + and = boolOp{"and", token.LAND, token.EQL} +) + +// commutativeSets returns all side effect free sets of +// expressions in e that are connected by op. +// For example, given 'a || b || f() || c || d' with the or op, +// commutativeSets returns {{b, a}, {d, c}}. +// commutativeSets adds any expanded BinaryExprs to seen. +func (op boolOp) commutativeSets(info *types.Info, e *ast.BinaryExpr, seen map[*ast.BinaryExpr]bool) [][]ast.Expr { + exprs := op.split(e, seen) + + // Partition the slice of expressions into commutative sets. + i := 0 + var sets [][]ast.Expr + for j := 0; j <= len(exprs); j++ { + if j == len(exprs) || hasSideEffects(info, exprs[j]) { + if i < j { + sets = append(sets, exprs[i:j]) + } + i = j + 1 + } + } + + return sets +} + +// checkRedundant checks for expressions of the form +// e && e +// e || e +// Exprs must contain only side effect free expressions. +func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) { + seen := make(map[string]bool) + for _, e := range exprs { + efmt := analysisutil.Format(pass.Fset, e) + if seen[efmt] { + pass.ReportRangef(e, "redundant %s: %s %s %s", op.name, efmt, op.tok, efmt) + } else { + seen[efmt] = true + } + } +} + +// checkSuspect checks for expressions of the form +// x != c1 || x != c2 +// x == c1 && x == c2 +// where c1 and c2 are constant expressions. +// If c1 and c2 are the same then it's redundant; +// if c1 and c2 are different then it's always true or always false. +// Exprs must contain only side effect free expressions. +func (op boolOp) checkSuspect(pass *analysis.Pass, exprs []ast.Expr) { + // seen maps from expressions 'x' to equality expressions 'x != c'. + seen := make(map[string]string) + + for _, e := range exprs { + bin, ok := e.(*ast.BinaryExpr) + if !ok || bin.Op != op.badEq { + continue + } + + // In order to avoid false positives, restrict to cases + // in which one of the operands is constant. We're then + // interested in the other operand. + // In the rare case in which both operands are constant + // (e.g. runtime.GOOS and "windows"), we'll only catch + // mistakes if the LHS is repeated, which is how most + // code is written. + var x ast.Expr + switch { + case pass.TypesInfo.Types[bin.Y].Value != nil: + x = bin.X + case pass.TypesInfo.Types[bin.X].Value != nil: + x = bin.Y + default: + continue + } + + // e is of the form 'x != c' or 'x == c'. + xfmt := analysisutil.Format(pass.Fset, x) + efmt := analysisutil.Format(pass.Fset, e) + if prev, found := seen[xfmt]; found { + // checkRedundant handles the case in which efmt == prev. + if efmt != prev { + pass.ReportRangef(e, "suspect %s: %s %s %s", op.name, efmt, op.tok, prev) + } + } else { + seen[xfmt] = efmt + } + } +} + +// hasSideEffects reports whether evaluation of e has side effects. +func hasSideEffects(info *types.Info, e ast.Expr) bool { + safe := true + ast.Inspect(e, func(node ast.Node) bool { + switch n := node.(type) { + case *ast.CallExpr: + typVal := info.Types[n.Fun] + switch { + case typVal.IsType(): + // Type conversion, which is safe. + case typVal.IsBuiltin(): + // Builtin func, conservatively assumed to not + // be safe for now. + safe = false + return false + default: + // A non-builtin func or method call. + // Conservatively assume that all of them have + // side effects for now. + safe = false + return false + } + case *ast.UnaryExpr: + if n.Op == token.ARROW { + safe = false + return false + } + } + return true + }) + return !safe +} + +// split returns a slice of all subexpressions in e that are connected by op. +// For example, given 'a || (b || c) || d' with the or op, +// split returns []{d, c, b, a}. +// seen[e] is already true; any newly processed exprs are added to seen. +func (op boolOp) split(e ast.Expr, seen map[*ast.BinaryExpr]bool) (exprs []ast.Expr) { + for { + e = unparen(e) + if b, ok := e.(*ast.BinaryExpr); ok && b.Op == op.tok { + seen[b] = true + exprs = append(exprs, op.split(b.Y, seen)...) + e = b.X + } else { + exprs = append(exprs, e) + break + } + } + return +} + +// unparen returns e with any enclosing parentheses stripped. +func unparen(e ast.Expr) ast.Expr { + for { + p, ok := e.(*ast.ParenExpr) + if !ok { + return e + } + e = p.X + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go b/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go new file mode 100644 index 000000000..02b7b18b3 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go @@ -0,0 +1,117 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package buildssa defines an Analyzer that constructs the SSA +// representation of an error-free package and returns the set of all +// functions within it. It does not report any diagnostics itself but +// may be used as an input to other analyzers. +// +// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE. +package buildssa + +import ( + "go/ast" + "go/types" + "reflect" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ssa" +) + +var Analyzer = &analysis.Analyzer{ + Name: "buildssa", + Doc: "build SSA-form IR for later passes", + Run: run, + ResultType: reflect.TypeOf(new(SSA)), +} + +// SSA provides SSA-form intermediate representation for all the +// non-blank source functions in the current package. +type SSA struct { + Pkg *ssa.Package + SrcFuncs []*ssa.Function +} + +func run(pass *analysis.Pass) (interface{}, error) { + // Plundered from ssautil.BuildPackage. + + // We must create a new Program for each Package because the + // analysis API provides no place to hang a Program shared by + // all Packages. Consequently, SSA Packages and Functions do not + // have a canonical representation across an analysis session of + // multiple packages. This is unlikely to be a problem in + // practice because the analysis API essentially forces all + // packages to be analysed independently, so any given call to + // Analysis.Run on a package will see only SSA objects belonging + // to a single Program. + + // Some Analyzers may need GlobalDebug, in which case we'll have + // to set it globally, but let's wait till we need it. + mode := ssa.BuilderMode(0) + + prog := ssa.NewProgram(pass.Fset, mode) + + // Create SSA packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + prog.CreatePackage(p, nil, nil, true) + createAll(p.Imports()) + } + } + } + createAll(pass.Pkg.Imports()) + + // Create and build the primary package. + ssapkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false) + ssapkg.Build() + + // Compute list of source functions, including literals, + // in source order. + var funcs []*ssa.Function + for _, f := range pass.Files { + for _, decl := range f.Decls { + if fdecl, ok := decl.(*ast.FuncDecl); ok { + + // SSA will not build a Function + // for a FuncDecl named blank. + // That's arguably too strict but + // relaxing it would break uniqueness of + // names of package members. + if fdecl.Name.Name == "_" { + continue + } + + // (init functions have distinct Func + // objects named "init" and distinct + // ssa.Functions named "init#1", ...) + + fn := pass.TypesInfo.Defs[fdecl.Name].(*types.Func) + if fn == nil { + panic(fn) + } + + f := ssapkg.Prog.FuncValue(fn) + if f == nil { + panic(fn) + } + + var addAnons func(f *ssa.Function) + addAnons = func(f *ssa.Function) { + funcs = append(funcs, f) + for _, anon := range f.AnonFuncs { + addAnons(anon) + } + } + addAnons(f) + } + } + } + + return &SSA{Pkg: ssapkg, SrcFuncs: funcs}, nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go new file mode 100644 index 000000000..c4407ad91 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag.go @@ -0,0 +1,367 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.16 +// +build go1.16 + +// Package buildtag defines an Analyzer that checks build tags. +package buildtag + +import ( + "go/ast" + "go/build/constraint" + "go/parser" + "go/token" + "strings" + "unicode" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" +) + +const Doc = "check that +build tags are well-formed and correctly located" + +var Analyzer = &analysis.Analyzer{ + Name: "buildtag", + Doc: Doc, + Run: runBuildTag, +} + +func runBuildTag(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { + checkGoFile(pass, f) + } + for _, name := range pass.OtherFiles { + if err := checkOtherFile(pass, name); err != nil { + return nil, err + } + } + for _, name := range pass.IgnoredFiles { + if strings.HasSuffix(name, ".go") { + f, err := parser.ParseFile(pass.Fset, name, nil, parser.ParseComments) + if err != nil { + // Not valid Go source code - not our job to diagnose, so ignore. + return nil, nil + } + checkGoFile(pass, f) + } else { + if err := checkOtherFile(pass, name); err != nil { + return nil, err + } + } + } + return nil, nil +} + +func checkGoFile(pass *analysis.Pass, f *ast.File) { + var check checker + check.init(pass) + defer check.finish() + + for _, group := range f.Comments { + // A +build comment is ignored after or adjoining the package declaration. + if group.End()+1 >= f.Package { + check.plusBuildOK = false + } + // A //go:build comment is ignored after the package declaration + // (but adjoining it is OK, in contrast to +build comments). + if group.Pos() >= f.Package { + check.goBuildOK = false + } + + // Check each line of a //-comment. + for _, c := range group.List { + // "+build" is ignored within or after a /*...*/ comment. + if !strings.HasPrefix(c.Text, "//") { + check.plusBuildOK = false + } + check.comment(c.Slash, c.Text) + } + } +} + +func checkOtherFile(pass *analysis.Pass, filename string) error { + var check checker + check.init(pass) + defer check.finish() + + // We cannot use the Go parser, since this may not be a Go source file. + // Read the raw bytes instead. + content, tf, err := analysisutil.ReadFile(pass.Fset, filename) + if err != nil { + return err + } + + check.file(token.Pos(tf.Base()), string(content)) + return nil +} + +type checker struct { + pass *analysis.Pass + plusBuildOK bool // "+build" lines still OK + goBuildOK bool // "go:build" lines still OK + crossCheck bool // cross-check go:build and +build lines when done reading file + inStar bool // currently in a /* */ comment + goBuildPos token.Pos // position of first go:build line found + plusBuildPos token.Pos // position of first "+build" line found + goBuild constraint.Expr // go:build constraint found + plusBuild constraint.Expr // AND of +build constraints found +} + +func (check *checker) init(pass *analysis.Pass) { + check.pass = pass + check.goBuildOK = true + check.plusBuildOK = true + check.crossCheck = true +} + +func (check *checker) file(pos token.Pos, text string) { + // Determine cutpoint where +build comments are no longer valid. + // They are valid in leading // comments in the file followed by + // a blank line. + // + // This must be done as a separate pass because of the + // requirement that the comment be followed by a blank line. + var plusBuildCutoff int + fullText := text + for text != "" { + i := strings.Index(text, "\n") + if i < 0 { + i = len(text) + } else { + i++ + } + offset := len(fullText) - len(text) + line := text[:i] + text = text[i:] + line = strings.TrimSpace(line) + if !strings.HasPrefix(line, "//") && line != "" { + break + } + if line == "" { + plusBuildCutoff = offset + } + } + + // Process each line. + // Must stop once we hit goBuildOK == false + text = fullText + check.inStar = false + for text != "" { + i := strings.Index(text, "\n") + if i < 0 { + i = len(text) + } else { + i++ + } + offset := len(fullText) - len(text) + line := text[:i] + text = text[i:] + check.plusBuildOK = offset < plusBuildCutoff + + if strings.HasPrefix(line, "//") { + check.comment(pos+token.Pos(offset), line) + continue + } + + // Keep looking for the point at which //go:build comments + // stop being allowed. Skip over, cut out any /* */ comments. + for { + line = strings.TrimSpace(line) + if check.inStar { + i := strings.Index(line, "*/") + if i < 0 { + line = "" + break + } + line = line[i+len("*/"):] + check.inStar = false + continue + } + if strings.HasPrefix(line, "/*") { + check.inStar = true + line = line[len("/*"):] + continue + } + break + } + if line != "" { + // Found non-comment non-blank line. + // Ends space for valid //go:build comments, + // but also ends the fraction of the file we can + // reliably parse. From this point on we might + // incorrectly flag "comments" inside multiline + // string constants or anything else (this might + // not even be a Go program). So stop. + break + } + } +} + +func (check *checker) comment(pos token.Pos, text string) { + if strings.HasPrefix(text, "//") { + if strings.Contains(text, "+build") { + check.plusBuildLine(pos, text) + } + if strings.Contains(text, "//go:build") { + check.goBuildLine(pos, text) + } + } + if strings.HasPrefix(text, "/*") { + if i := strings.Index(text, "\n"); i >= 0 { + // multiline /* */ comment - process interior lines + check.inStar = true + i++ + pos += token.Pos(i) + text = text[i:] + for text != "" { + i := strings.Index(text, "\n") + if i < 0 { + i = len(text) + } else { + i++ + } + line := text[:i] + if strings.HasPrefix(line, "//") { + check.comment(pos, line) + } + pos += token.Pos(i) + text = text[i:] + } + check.inStar = false + } + } +} + +func (check *checker) goBuildLine(pos token.Pos, line string) { + if !constraint.IsGoBuild(line) { + if !strings.HasPrefix(line, "//go:build") && constraint.IsGoBuild("//"+strings.TrimSpace(line[len("//"):])) { + check.pass.Reportf(pos, "malformed //go:build line (space between // and go:build)") + } + return + } + if !check.goBuildOK || check.inStar { + check.pass.Reportf(pos, "misplaced //go:build comment") + check.crossCheck = false + return + } + + if check.goBuildPos == token.NoPos { + check.goBuildPos = pos + } else { + check.pass.Reportf(pos, "unexpected extra //go:build line") + check.crossCheck = false + } + + // testing hack: stop at // ERROR + if i := strings.Index(line, " // ERROR "); i >= 0 { + line = line[:i] + } + + x, err := constraint.Parse(line) + if err != nil { + check.pass.Reportf(pos, "%v", err) + check.crossCheck = false + return + } + + if check.goBuild == nil { + check.goBuild = x + } +} + +func (check *checker) plusBuildLine(pos token.Pos, line string) { + line = strings.TrimSpace(line) + if !constraint.IsPlusBuild(line) { + // Comment with +build but not at beginning. + // Only report early in file. + if check.plusBuildOK && !strings.HasPrefix(line, "// want") { + check.pass.Reportf(pos, "possible malformed +build comment") + } + return + } + if !check.plusBuildOK { // inStar implies !plusBuildOK + check.pass.Reportf(pos, "misplaced +build comment") + check.crossCheck = false + } + + if check.plusBuildPos == token.NoPos { + check.plusBuildPos = pos + } + + // testing hack: stop at // ERROR + if i := strings.Index(line, " // ERROR "); i >= 0 { + line = line[:i] + } + + fields := strings.Fields(line[len("//"):]) + // IsPlusBuildConstraint check above implies fields[0] == "+build" + for _, arg := range fields[1:] { + for _, elem := range strings.Split(arg, ",") { + if strings.HasPrefix(elem, "!!") { + check.pass.Reportf(pos, "invalid double negative in build constraint: %s", arg) + check.crossCheck = false + continue + } + elem = strings.TrimPrefix(elem, "!") + for _, c := range elem { + if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' { + check.pass.Reportf(pos, "invalid non-alphanumeric build constraint: %s", arg) + check.crossCheck = false + break + } + } + } + } + + if check.crossCheck { + y, err := constraint.Parse(line) + if err != nil { + // Should never happen - constraint.Parse never rejects a // +build line. + // Also, we just checked the syntax above. + // Even so, report. + check.pass.Reportf(pos, "%v", err) + check.crossCheck = false + return + } + if check.plusBuild == nil { + check.plusBuild = y + } else { + check.plusBuild = &constraint.AndExpr{X: check.plusBuild, Y: y} + } + } +} + +func (check *checker) finish() { + if !check.crossCheck || check.plusBuildPos == token.NoPos || check.goBuildPos == token.NoPos { + return + } + + // Have both //go:build and // +build, + // with no errors found (crossCheck still true). + // Check they match. + var want constraint.Expr + lines, err := constraint.PlusBuildLines(check.goBuild) + if err != nil { + check.pass.Reportf(check.goBuildPos, "%v", err) + return + } + for _, line := range lines { + y, err := constraint.Parse(line) + if err != nil { + // Definitely should not happen, but not the user's fault. + // Do not report. + return + } + if want == nil { + want = y + } else { + want = &constraint.AndExpr{X: want, Y: y} + } + } + if want.String() != check.plusBuild.String() { + check.pass.Reportf(check.plusBuildPos, "+build lines do not match //go:build condition") + return + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go new file mode 100644 index 000000000..e9234925f --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/buildtag/buildtag_old.go @@ -0,0 +1,174 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// TODO(rsc): Delete this file once Go 1.17 comes out and we can retire Go 1.15 support. + +//go:build !go1.16 +// +build !go1.16 + +// Package buildtag defines an Analyzer that checks build tags. +package buildtag + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "strings" + "unicode" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" +) + +const Doc = "check that +build tags are well-formed and correctly located" + +var Analyzer = &analysis.Analyzer{ + Name: "buildtag", + Doc: Doc, + Run: runBuildTag, +} + +func runBuildTag(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { + checkGoFile(pass, f) + } + for _, name := range pass.OtherFiles { + if err := checkOtherFile(pass, name); err != nil { + return nil, err + } + } + for _, name := range pass.IgnoredFiles { + if strings.HasSuffix(name, ".go") { + f, err := parser.ParseFile(pass.Fset, name, nil, parser.ParseComments) + if err != nil { + // Not valid Go source code - not our job to diagnose, so ignore. + return nil, nil + } + checkGoFile(pass, f) + } else { + if err := checkOtherFile(pass, name); err != nil { + return nil, err + } + } + } + return nil, nil +} + +func checkGoFile(pass *analysis.Pass, f *ast.File) { + pastCutoff := false + for _, group := range f.Comments { + // A +build comment is ignored after or adjoining the package declaration. + if group.End()+1 >= f.Package { + pastCutoff = true + } + + // "+build" is ignored within or after a /*...*/ comment. + if !strings.HasPrefix(group.List[0].Text, "//") { + pastCutoff = true + continue + } + + // Check each line of a //-comment. + for _, c := range group.List { + if !strings.Contains(c.Text, "+build") { + continue + } + if err := checkLine(c.Text, pastCutoff); err != nil { + pass.Reportf(c.Pos(), "%s", err) + } + } + } +} + +func checkOtherFile(pass *analysis.Pass, filename string) error { + content, tf, err := analysisutil.ReadFile(pass.Fset, filename) + if err != nil { + return err + } + + // We must look at the raw lines, as build tags may appear in non-Go + // files such as assembly files. + lines := bytes.SplitAfter(content, nl) + + // Determine cutpoint where +build comments are no longer valid. + // They are valid in leading // comments in the file followed by + // a blank line. + // + // This must be done as a separate pass because of the + // requirement that the comment be followed by a blank line. + var cutoff int + for i, line := range lines { + line = bytes.TrimSpace(line) + if !bytes.HasPrefix(line, slashSlash) { + if len(line) > 0 { + break + } + cutoff = i + } + } + + for i, line := range lines { + line = bytes.TrimSpace(line) + if !bytes.HasPrefix(line, slashSlash) { + continue + } + if !bytes.Contains(line, []byte("+build")) { + continue + } + if err := checkLine(string(line), i >= cutoff); err != nil { + pass.Reportf(analysisutil.LineStart(tf, i+1), "%s", err) + continue + } + } + return nil +} + +// checkLine checks a line that starts with "//" and contains "+build". +func checkLine(line string, pastCutoff bool) error { + line = strings.TrimPrefix(line, "//") + line = strings.TrimSpace(line) + + if strings.HasPrefix(line, "+build") { + fields := strings.Fields(line) + if fields[0] != "+build" { + // Comment is something like +buildasdf not +build. + return fmt.Errorf("possible malformed +build comment") + } + if pastCutoff { + return fmt.Errorf("+build comment must appear before package clause and be followed by a blank line") + } + if err := checkArguments(fields); err != nil { + return err + } + } else { + // Comment with +build but not at beginning. + if !pastCutoff { + return fmt.Errorf("possible malformed +build comment") + } + } + return nil +} + +func checkArguments(fields []string) error { + for _, arg := range fields[1:] { + for _, elem := range strings.Split(arg, ",") { + if strings.HasPrefix(elem, "!!") { + return fmt.Errorf("invalid double negative in build constraint: %s", arg) + } + elem = strings.TrimPrefix(elem, "!") + for _, c := range elem { + if !unicode.IsLetter(c) && !unicode.IsDigit(c) && c != '_' && c != '.' { + return fmt.Errorf("invalid non-alphanumeric build constraint: %s", arg) + } + } + } + } + return nil +} + +var ( + nl = []byte("\n") + slashSlash = []byte("//") +) diff --git a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go b/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go new file mode 100644 index 000000000..5768d0b9b --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go @@ -0,0 +1,376 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cgocall defines an Analyzer that detects some violations of +// the cgo pointer passing rules. +package cgocall + +import ( + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/token" + "go/types" + "log" + "os" + "strconv" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" +) + +const debug = false + +const Doc = `detect some violations of the cgo pointer passing rules + +Check for invalid cgo pointer passing. +This looks for code that uses cgo to call C code passing values +whose types are almost always invalid according to the cgo pointer +sharing rules. +Specifically, it warns about attempts to pass a Go chan, map, func, +or slice to C, either directly, or via a pointer, array, or struct.` + +var Analyzer = &analysis.Analyzer{ + Name: "cgocall", + Doc: Doc, + RunDespiteErrors: true, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + if !analysisutil.Imports(pass.Pkg, "runtime/cgo") { + return nil, nil // doesn't use cgo + } + + cgofiles, info, err := typeCheckCgoSourceFiles(pass.Fset, pass.Pkg, pass.Files, pass.TypesInfo, pass.TypesSizes) + if err != nil { + return nil, err + } + for _, f := range cgofiles { + checkCgo(pass.Fset, f, info, pass.Reportf) + } + return nil, nil +} + +func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(token.Pos, string, ...interface{})) { + ast.Inspect(f, func(n ast.Node) bool { + call, ok := n.(*ast.CallExpr) + if !ok { + return true + } + + // Is this a C.f() call? + var name string + if sel, ok := analysisutil.Unparen(call.Fun).(*ast.SelectorExpr); ok { + if id, ok := sel.X.(*ast.Ident); ok && id.Name == "C" { + name = sel.Sel.Name + } + } + if name == "" { + return true // not a call we need to check + } + + // A call to C.CBytes passes a pointer but is always safe. + if name == "CBytes" { + return true + } + + if debug { + log.Printf("%s: call to C.%s", fset.Position(call.Lparen), name) + } + + for _, arg := range call.Args { + if !typeOKForCgoCall(cgoBaseType(info, arg), make(map[types.Type]bool)) { + reportf(arg.Pos(), "possibly passing Go type with embedded pointer to C") + break + } + + // Check for passing the address of a bad type. + if conv, ok := arg.(*ast.CallExpr); ok && len(conv.Args) == 1 && + isUnsafePointer(info, conv.Fun) { + arg = conv.Args[0] + } + if u, ok := arg.(*ast.UnaryExpr); ok && u.Op == token.AND { + if !typeOKForCgoCall(cgoBaseType(info, u.X), make(map[types.Type]bool)) { + reportf(arg.Pos(), "possibly passing Go type with embedded pointer to C") + break + } + } + } + return true + }) +} + +// typeCheckCgoSourceFiles returns type-checked syntax trees for the raw +// cgo files of a package (those that import "C"). Such files are not +// Go, so there may be gaps in type information around C.f references. +// +// This checker was initially written in vet to inspect raw cgo source +// files using partial type information. However, Analyzers in the new +// analysis API are presented with the type-checked, "cooked" Go ASTs +// resulting from cgo-processing files, so we must choose between +// working with the cooked file generated by cgo (which was tried but +// proved fragile) or locating the raw cgo file (e.g. from //line +// directives) and working with that, as we now do. +// +// Specifically, we must type-check the raw cgo source files (or at +// least the subtrees needed for this analyzer) in an environment that +// simulates the rest of the already type-checked package. +// +// For example, for each raw cgo source file in the original package, +// such as this one: +// +// package p +// import "C" +// import "fmt" +// type T int +// const k = 3 +// var x, y = fmt.Println() +// func f() { ... } +// func g() { ... C.malloc(k) ... } +// func (T) f(int) string { ... } +// +// we synthesize a new ast.File, shown below, that dot-imports the +// original "cooked" package using a special name ("·this·"), so that all +// references to package members resolve correctly. (References to +// unexported names cause an "unexported" error, which we ignore.) +// +// To avoid shadowing names imported from the cooked package, +// package-level declarations in the new source file are modified so +// that they do not declare any names. +// (The cgocall analysis is concerned with uses, not declarations.) +// Specifically, type declarations are discarded; +// all names in each var and const declaration are blanked out; +// each method is turned into a regular function by turning +// the receiver into the first parameter; +// and all functions are renamed to "_". +// +// package p +// import . "·this·" // declares T, k, x, y, f, g, T.f +// import "C" +// import "fmt" +// const _ = 3 +// var _, _ = fmt.Println() +// func _() { ... } +// func _() { ... C.malloc(k) ... } +// func _(T, int) string { ... } +// +// In this way, the raw function bodies and const/var initializer +// expressions are preserved but refer to the "cooked" objects imported +// from "·this·", and none of the transformed package-level declarations +// actually declares anything. In the example above, the reference to k +// in the argument of the call to C.malloc resolves to "·this·".k, which +// has an accurate type. +// +// This approach could in principle be generalized to more complex +// analyses on raw cgo files. One could synthesize a "C" package so that +// C.f would resolve to "·this·"._C_func_f, for example. But we have +// limited ourselves here to preserving function bodies and initializer +// expressions since that is all that the cgocall analyzer needs. +// +func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*ast.File, info *types.Info, sizes types.Sizes) ([]*ast.File, *types.Info, error) { + const thispkg = "·this·" + + // Which files are cgo files? + var cgoFiles []*ast.File + importMap := map[string]*types.Package{thispkg: pkg} + for _, raw := range files { + // If f is a cgo-generated file, Position reports + // the original file, honoring //line directives. + filename := fset.Position(raw.Pos()).Filename + f, err := parser.ParseFile(fset, filename, nil, parser.Mode(0)) + if err != nil { + return nil, nil, fmt.Errorf("can't parse raw cgo file: %v", err) + } + found := false + for _, spec := range f.Imports { + if spec.Path.Value == `"C"` { + found = true + break + } + } + if !found { + continue // not a cgo file + } + + // Record the original import map. + for _, spec := range raw.Imports { + path, _ := strconv.Unquote(spec.Path.Value) + importMap[path] = imported(info, spec) + } + + // Add special dot-import declaration: + // import . "·this·" + var decls []ast.Decl + decls = append(decls, &ast.GenDecl{ + Tok: token.IMPORT, + Specs: []ast.Spec{ + &ast.ImportSpec{ + Name: &ast.Ident{Name: "."}, + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: strconv.Quote(thispkg), + }, + }, + }, + }) + + // Transform declarations from the raw cgo file. + for _, decl := range f.Decls { + switch decl := decl.(type) { + case *ast.GenDecl: + switch decl.Tok { + case token.TYPE: + // Discard type declarations. + continue + case token.IMPORT: + // Keep imports. + case token.VAR, token.CONST: + // Blank the declared var/const names. + for _, spec := range decl.Specs { + spec := spec.(*ast.ValueSpec) + for i := range spec.Names { + spec.Names[i].Name = "_" + } + } + } + case *ast.FuncDecl: + // Blank the declared func name. + decl.Name.Name = "_" + + // Turn a method receiver: func (T) f(P) R {...} + // into regular parameter: func _(T, P) R {...} + if decl.Recv != nil { + var params []*ast.Field + params = append(params, decl.Recv.List...) + params = append(params, decl.Type.Params.List...) + decl.Type.Params.List = params + decl.Recv = nil + } + } + decls = append(decls, decl) + } + f.Decls = decls + if debug { + format.Node(os.Stderr, fset, f) // debugging + } + cgoFiles = append(cgoFiles, f) + } + if cgoFiles == nil { + return nil, nil, nil // nothing to do (can't happen?) + } + + // Type-check the synthetic files. + tc := &types.Config{ + FakeImportC: true, + Importer: importerFunc(func(path string) (*types.Package, error) { + return importMap[path], nil + }), + Sizes: sizes, + Error: func(error) {}, // ignore errors (e.g. unused import) + } + + // It's tempting to record the new types in the + // existing pass.TypesInfo, but we don't own it. + altInfo := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + } + tc.Check(pkg.Path(), fset, cgoFiles, altInfo) + + return cgoFiles, altInfo, nil +} + +// cgoBaseType tries to look through type conversions involving +// unsafe.Pointer to find the real type. It converts: +// unsafe.Pointer(x) => x +// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x +func cgoBaseType(info *types.Info, arg ast.Expr) types.Type { + switch arg := arg.(type) { + case *ast.CallExpr: + if len(arg.Args) == 1 && isUnsafePointer(info, arg.Fun) { + return cgoBaseType(info, arg.Args[0]) + } + case *ast.StarExpr: + call, ok := arg.X.(*ast.CallExpr) + if !ok || len(call.Args) != 1 { + break + } + // Here arg is *f(v). + t := info.Types[call.Fun].Type + if t == nil { + break + } + ptr, ok := t.Underlying().(*types.Pointer) + if !ok { + break + } + // Here arg is *(*p)(v) + elem, ok := ptr.Elem().Underlying().(*types.Basic) + if !ok || elem.Kind() != types.UnsafePointer { + break + } + // Here arg is *(*unsafe.Pointer)(v) + call, ok = call.Args[0].(*ast.CallExpr) + if !ok || len(call.Args) != 1 { + break + } + // Here arg is *(*unsafe.Pointer)(f(v)) + if !isUnsafePointer(info, call.Fun) { + break + } + // Here arg is *(*unsafe.Pointer)(unsafe.Pointer(v)) + u, ok := call.Args[0].(*ast.UnaryExpr) + if !ok || u.Op != token.AND { + break + } + // Here arg is *(*unsafe.Pointer)(unsafe.Pointer(&v)) + return cgoBaseType(info, u.X) + } + + return info.Types[arg].Type +} + +// typeOKForCgoCall reports whether the type of arg is OK to pass to a +// C function using cgo. This is not true for Go types with embedded +// pointers. m is used to avoid infinite recursion on recursive types. +func typeOKForCgoCall(t types.Type, m map[types.Type]bool) bool { + if t == nil || m[t] { + return true + } + m[t] = true + switch t := t.Underlying().(type) { + case *types.Chan, *types.Map, *types.Signature, *types.Slice: + return false + case *types.Pointer: + return typeOKForCgoCall(t.Elem(), m) + case *types.Array: + return typeOKForCgoCall(t.Elem(), m) + case *types.Struct: + for i := 0; i < t.NumFields(); i++ { + if !typeOKForCgoCall(t.Field(i).Type(), m) { + return false + } + } + } + return true +} + +func isUnsafePointer(info *types.Info, e ast.Expr) bool { + t := info.Types[e].Type + return t != nil && t.Underlying() == types.Typ[types.UnsafePointer] +} + +type importerFunc func(path string) (*types.Package, error) + +func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } + +// TODO(adonovan): make this a library function or method of Info. +func imported(info *types.Info, spec *ast.ImportSpec) *types.Package { + obj, ok := info.Implicits[spec] + if !ok { + obj = info.Defs[spec.Name] // renaming import + } + return obj.(*types.PkgName).Imported() +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go b/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go new file mode 100644 index 000000000..4c3ac6647 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go @@ -0,0 +1,117 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package composite defines an Analyzer that checks for unkeyed +// composite literals. +package composite + +import ( + "go/ast" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for unkeyed composite literals + +This analyzer reports a diagnostic for composite literals of struct +types imported from another package that do not use the field-keyed +syntax. Such literals are fragile because the addition of a new field +(even if unexported) to the struct will cause compilation to fail. + +As an example, + + err = &net.DNSConfigError{err} + +should be replaced by: + + err = &net.DNSConfigError{Err: err} +` + +var Analyzer = &analysis.Analyzer{ + Name: "composites", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + RunDespiteErrors: true, + Run: run, +} + +var whitelist = true + +func init() { + Analyzer.Flags.BoolVar(&whitelist, "whitelist", whitelist, "use composite white list; for testing only") +} + +// runUnkeyedLiteral checks if a composite literal is a struct literal with +// unkeyed fields. +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CompositeLit)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + cl := n.(*ast.CompositeLit) + + typ := pass.TypesInfo.Types[cl].Type + if typ == nil { + // cannot determine composite literals' type, skip it + return + } + typeName := typ.String() + if whitelist && unkeyedLiteral[typeName] { + // skip whitelisted types + return + } + under := typ.Underlying() + for { + ptr, ok := under.(*types.Pointer) + if !ok { + break + } + under = ptr.Elem().Underlying() + } + if _, ok := under.(*types.Struct); !ok { + // skip non-struct composite literals + return + } + if isLocalType(pass, typ) { + // allow unkeyed locally defined composite literal + return + } + + // check if the CompositeLit contains an unkeyed field + allKeyValue := true + for _, e := range cl.Elts { + if _, ok := e.(*ast.KeyValueExpr); !ok { + allKeyValue = false + break + } + } + if allKeyValue { + // all the composite literal fields are keyed + return + } + + pass.ReportRangef(cl, "%s composite literal uses unkeyed fields", typeName) + }) + return nil, nil +} + +func isLocalType(pass *analysis.Pass, typ types.Type) bool { + switch x := typ.(type) { + case *types.Struct: + // struct literals are local types + return true + case *types.Pointer: + return isLocalType(pass, x.Elem()) + case *types.Named: + // names in package foo are local to foo_test too + return strings.TrimSuffix(x.Obj().Pkg().Path(), "_test") == strings.TrimSuffix(pass.Pkg.Path(), "_test") + } + return false +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go b/vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go new file mode 100644 index 000000000..1e5f5fd20 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go @@ -0,0 +1,34 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package composite + +// unkeyedLiteral is a white list of types in the standard packages +// that are used with unkeyed literals we deem to be acceptable. +var unkeyedLiteral = map[string]bool{ + // These image and image/color struct types are frozen. We will never add fields to them. + "image/color.Alpha16": true, + "image/color.Alpha": true, + "image/color.CMYK": true, + "image/color.Gray16": true, + "image/color.Gray": true, + "image/color.NRGBA64": true, + "image/color.NRGBA": true, + "image/color.NYCbCrA": true, + "image/color.RGBA64": true, + "image/color.RGBA": true, + "image/color.YCbCr": true, + "image.Point": true, + "image.Rectangle": true, + "image.Uniform": true, + + "unicode.Range16": true, + "unicode.Range32": true, + + // These three structs are used in generated test main files, + // but the generator can be trusted. + "testing.InternalBenchmark": true, + "testing.InternalExample": true, + "testing.InternalTest": true, +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go b/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go new file mode 100644 index 000000000..c4ebf7857 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go @@ -0,0 +1,300 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package copylock defines an Analyzer that checks for locks +// erroneously passed by value. +package copylock + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for locks erroneously passed by value + +Inadvertently copying a value containing a lock, such as sync.Mutex or +sync.WaitGroup, may cause both copies to malfunction. Generally such +values should be referred to through a pointer.` + +var Analyzer = &analysis.Analyzer{ + Name: "copylocks", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + RunDespiteErrors: true, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.AssignStmt)(nil), + (*ast.CallExpr)(nil), + (*ast.CompositeLit)(nil), + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + (*ast.GenDecl)(nil), + (*ast.RangeStmt)(nil), + (*ast.ReturnStmt)(nil), + } + inspect.Preorder(nodeFilter, func(node ast.Node) { + switch node := node.(type) { + case *ast.RangeStmt: + checkCopyLocksRange(pass, node) + case *ast.FuncDecl: + checkCopyLocksFunc(pass, node.Name.Name, node.Recv, node.Type) + case *ast.FuncLit: + checkCopyLocksFunc(pass, "func", nil, node.Type) + case *ast.CallExpr: + checkCopyLocksCallExpr(pass, node) + case *ast.AssignStmt: + checkCopyLocksAssign(pass, node) + case *ast.GenDecl: + checkCopyLocksGenDecl(pass, node) + case *ast.CompositeLit: + checkCopyLocksCompositeLit(pass, node) + case *ast.ReturnStmt: + checkCopyLocksReturnStmt(pass, node) + } + }) + return nil, nil +} + +// checkCopyLocksAssign checks whether an assignment +// copies a lock. +func checkCopyLocksAssign(pass *analysis.Pass, as *ast.AssignStmt) { + for i, x := range as.Rhs { + if path := lockPathRhs(pass, x); path != nil { + pass.ReportRangef(x, "assignment copies lock value to %v: %v", analysisutil.Format(pass.Fset, as.Lhs[i]), path) + } + } +} + +// checkCopyLocksGenDecl checks whether lock is copied +// in variable declaration. +func checkCopyLocksGenDecl(pass *analysis.Pass, gd *ast.GenDecl) { + if gd.Tok != token.VAR { + return + } + for _, spec := range gd.Specs { + valueSpec := spec.(*ast.ValueSpec) + for i, x := range valueSpec.Values { + if path := lockPathRhs(pass, x); path != nil { + pass.ReportRangef(x, "variable declaration copies lock value to %v: %v", valueSpec.Names[i].Name, path) + } + } + } +} + +// checkCopyLocksCompositeLit detects lock copy inside a composite literal +func checkCopyLocksCompositeLit(pass *analysis.Pass, cl *ast.CompositeLit) { + for _, x := range cl.Elts { + if node, ok := x.(*ast.KeyValueExpr); ok { + x = node.Value + } + if path := lockPathRhs(pass, x); path != nil { + pass.ReportRangef(x, "literal copies lock value from %v: %v", analysisutil.Format(pass.Fset, x), path) + } + } +} + +// checkCopyLocksReturnStmt detects lock copy in return statement +func checkCopyLocksReturnStmt(pass *analysis.Pass, rs *ast.ReturnStmt) { + for _, x := range rs.Results { + if path := lockPathRhs(pass, x); path != nil { + pass.ReportRangef(x, "return copies lock value: %v", path) + } + } +} + +// checkCopyLocksCallExpr detects lock copy in the arguments to a function call +func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) { + var id *ast.Ident + switch fun := ce.Fun.(type) { + case *ast.Ident: + id = fun + case *ast.SelectorExpr: + id = fun.Sel + } + if fun, ok := pass.TypesInfo.Uses[id].(*types.Builtin); ok { + switch fun.Name() { + case "new", "len", "cap", "Sizeof": + return + } + } + for _, x := range ce.Args { + if path := lockPathRhs(pass, x); path != nil { + pass.ReportRangef(x, "call of %s copies lock value: %v", analysisutil.Format(pass.Fset, ce.Fun), path) + } + } +} + +// checkCopyLocksFunc checks whether a function might +// inadvertently copy a lock, by checking whether +// its receiver, parameters, or return values +// are locks. +func checkCopyLocksFunc(pass *analysis.Pass, name string, recv *ast.FieldList, typ *ast.FuncType) { + if recv != nil && len(recv.List) > 0 { + expr := recv.List[0].Type + if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil { + pass.ReportRangef(expr, "%s passes lock by value: %v", name, path) + } + } + + if typ.Params != nil { + for _, field := range typ.Params.List { + expr := field.Type + if path := lockPath(pass.Pkg, pass.TypesInfo.Types[expr].Type); path != nil { + pass.ReportRangef(expr, "%s passes lock by value: %v", name, path) + } + } + } + + // Don't check typ.Results. If T has a Lock field it's OK to write + // return T{} + // because that is returning the zero value. Leave result checking + // to the return statement. +} + +// checkCopyLocksRange checks whether a range statement +// might inadvertently copy a lock by checking whether +// any of the range variables are locks. +func checkCopyLocksRange(pass *analysis.Pass, r *ast.RangeStmt) { + checkCopyLocksRangeVar(pass, r.Tok, r.Key) + checkCopyLocksRangeVar(pass, r.Tok, r.Value) +} + +func checkCopyLocksRangeVar(pass *analysis.Pass, rtok token.Token, e ast.Expr) { + if e == nil { + return + } + id, isId := e.(*ast.Ident) + if isId && id.Name == "_" { + return + } + + var typ types.Type + if rtok == token.DEFINE { + if !isId { + return + } + obj := pass.TypesInfo.Defs[id] + if obj == nil { + return + } + typ = obj.Type() + } else { + typ = pass.TypesInfo.Types[e].Type + } + + if typ == nil { + return + } + if path := lockPath(pass.Pkg, typ); path != nil { + pass.Reportf(e.Pos(), "range var %s copies lock: %v", analysisutil.Format(pass.Fset, e), path) + } +} + +type typePath []types.Type + +// String pretty-prints a typePath. +func (path typePath) String() string { + n := len(path) + var buf bytes.Buffer + for i := range path { + if i > 0 { + fmt.Fprint(&buf, " contains ") + } + // The human-readable path is in reverse order, outermost to innermost. + fmt.Fprint(&buf, path[n-i-1].String()) + } + return buf.String() +} + +func lockPathRhs(pass *analysis.Pass, x ast.Expr) typePath { + if _, ok := x.(*ast.CompositeLit); ok { + return nil + } + if _, ok := x.(*ast.CallExpr); ok { + // A call may return a zero value. + return nil + } + if star, ok := x.(*ast.StarExpr); ok { + if _, ok := star.X.(*ast.CallExpr); ok { + // A call may return a pointer to a zero value. + return nil + } + } + return lockPath(pass.Pkg, pass.TypesInfo.Types[x].Type) +} + +// lockPath returns a typePath describing the location of a lock value +// contained in typ. If there is no contained lock, it returns nil. +func lockPath(tpkg *types.Package, typ types.Type) typePath { + if typ == nil { + return nil + } + + for { + atyp, ok := typ.Underlying().(*types.Array) + if !ok { + break + } + typ = atyp.Elem() + } + + // We're only interested in the case in which the underlying + // type is a struct. (Interfaces and pointers are safe to copy.) + styp, ok := typ.Underlying().(*types.Struct) + if !ok { + return nil + } + + // We're looking for cases in which a pointer to this type + // is a sync.Locker, but a value is not. This differentiates + // embedded interfaces from embedded values. + if types.Implements(types.NewPointer(typ), lockerType) && !types.Implements(typ, lockerType) { + return []types.Type{typ} + } + + // In go1.10, sync.noCopy did not implement Locker. + // (The Unlock method was added only in CL 121876.) + // TODO(adonovan): remove workaround when we drop go1.10. + if named, ok := typ.(*types.Named); ok && + named.Obj().Name() == "noCopy" && + named.Obj().Pkg().Path() == "sync" { + return []types.Type{typ} + } + + nfields := styp.NumFields() + for i := 0; i < nfields; i++ { + ftyp := styp.Field(i).Type() + subpath := lockPath(tpkg, ftyp) + if subpath != nil { + return append(subpath, typ) + } + } + + return nil +} + +var lockerType *types.Interface + +// Construct a sync.Locker interface type. +func init() { + nullary := types.NewSignature(nil, nil, nil, false) // func() + methods := []*types.Func{ + types.NewFunc(token.NoPos, nil, "Lock", nullary), + types.NewFunc(token.NoPos, nil, "Unlock", nullary), + } + lockerType = types.NewInterface(methods, nil).Complete() +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go b/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go new file mode 100644 index 000000000..51600ffc7 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/ctrlflow/ctrlflow.go @@ -0,0 +1,226 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ctrlflow is an analysis that provides a syntactic +// control-flow graph (CFG) for the body of a function. +// It records whether a function cannot return. +// By itself, it does not report any diagnostics. +package ctrlflow + +import ( + "go/ast" + "go/types" + "log" + "reflect" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/cfg" + "golang.org/x/tools/go/types/typeutil" +) + +var Analyzer = &analysis.Analyzer{ + Name: "ctrlflow", + Doc: "build a control-flow graph", + Run: run, + ResultType: reflect.TypeOf(new(CFGs)), + FactTypes: []analysis.Fact{new(noReturn)}, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +// noReturn is a fact indicating that a function does not return. +type noReturn struct{} + +func (*noReturn) AFact() {} + +func (*noReturn) String() string { return "noReturn" } + +// A CFGs holds the control-flow graphs +// for all the functions of the current package. +type CFGs struct { + defs map[*ast.Ident]types.Object // from Pass.TypesInfo.Defs + funcDecls map[*types.Func]*declInfo + funcLits map[*ast.FuncLit]*litInfo + pass *analysis.Pass // transient; nil after construction +} + +// CFGs has two maps: funcDecls for named functions and funcLits for +// unnamed ones. Unlike funcLits, the funcDecls map is not keyed by its +// syntax node, *ast.FuncDecl, because callMayReturn needs to do a +// look-up by *types.Func, and you can get from an *ast.FuncDecl to a +// *types.Func but not the other way. + +type declInfo struct { + decl *ast.FuncDecl + cfg *cfg.CFG // iff decl.Body != nil + started bool // to break cycles + noReturn bool +} + +type litInfo struct { + cfg *cfg.CFG + noReturn bool +} + +// FuncDecl returns the control-flow graph for a named function. +// It returns nil if decl.Body==nil. +func (c *CFGs) FuncDecl(decl *ast.FuncDecl) *cfg.CFG { + if decl.Body == nil { + return nil + } + fn := c.defs[decl.Name].(*types.Func) + return c.funcDecls[fn].cfg +} + +// FuncLit returns the control-flow graph for a literal function. +func (c *CFGs) FuncLit(lit *ast.FuncLit) *cfg.CFG { + return c.funcLits[lit].cfg +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // Because CFG construction consumes and produces noReturn + // facts, CFGs for exported FuncDecls must be built before 'run' + // returns; we cannot construct them lazily. + // (We could build CFGs for FuncLits lazily, + // but the benefit is marginal.) + + // Pass 1. Map types.Funcs to ast.FuncDecls in this package. + funcDecls := make(map[*types.Func]*declInfo) // functions and methods + funcLits := make(map[*ast.FuncLit]*litInfo) + + var decls []*types.Func // keys(funcDecls), in order + var lits []*ast.FuncLit // keys(funcLits), in order + + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + switch n := n.(type) { + case *ast.FuncDecl: + // Type information may be incomplete. + if fn, ok := pass.TypesInfo.Defs[n.Name].(*types.Func); ok { + funcDecls[fn] = &declInfo{decl: n} + decls = append(decls, fn) + } + case *ast.FuncLit: + funcLits[n] = new(litInfo) + lits = append(lits, n) + } + }) + + c := &CFGs{ + defs: pass.TypesInfo.Defs, + funcDecls: funcDecls, + funcLits: funcLits, + pass: pass, + } + + // Pass 2. Build CFGs. + + // Build CFGs for named functions. + // Cycles in the static call graph are broken + // arbitrarily but deterministically. + // We create noReturn facts as discovered. + for _, fn := range decls { + c.buildDecl(fn, funcDecls[fn]) + } + + // Build CFGs for literal functions. + // These aren't relevant to facts (since they aren't named) + // but are required for the CFGs.FuncLit API. + for _, lit := range lits { + li := funcLits[lit] + if li.cfg == nil { + li.cfg = cfg.New(lit.Body, c.callMayReturn) + if !hasReachableReturn(li.cfg) { + li.noReturn = true + } + } + } + + // All CFGs are now built. + c.pass = nil + + return c, nil +} + +// di.cfg may be nil on return. +func (c *CFGs) buildDecl(fn *types.Func, di *declInfo) { + // buildDecl may call itself recursively for the same function, + // because cfg.New is passed the callMayReturn method, which + // builds the CFG of the callee, leading to recursion. + // The buildDecl call tree thus resembles the static call graph. + // We mark each node when we start working on it to break cycles. + + if !di.started { // break cycle + di.started = true + + if isIntrinsicNoReturn(fn) { + di.noReturn = true + } + if di.decl.Body != nil { + di.cfg = cfg.New(di.decl.Body, c.callMayReturn) + if !hasReachableReturn(di.cfg) { + di.noReturn = true + } + } + if di.noReturn { + c.pass.ExportObjectFact(fn, new(noReturn)) + } + + // debugging + if false { + log.Printf("CFG for %s:\n%s (noreturn=%t)\n", fn, di.cfg.Format(c.pass.Fset), di.noReturn) + } + } +} + +// callMayReturn reports whether the called function may return. +// It is passed to the CFG builder. +func (c *CFGs) callMayReturn(call *ast.CallExpr) (r bool) { + if id, ok := call.Fun.(*ast.Ident); ok && c.pass.TypesInfo.Uses[id] == panicBuiltin { + return false // panic never returns + } + + // Is this a static call? + fn := typeutil.StaticCallee(c.pass.TypesInfo, call) + if fn == nil { + return true // callee not statically known; be conservative + } + + // Function or method declared in this package? + if di, ok := c.funcDecls[fn]; ok { + c.buildDecl(fn, di) + return !di.noReturn + } + + // Not declared in this package. + // Is there a fact from another package? + return !c.pass.ImportObjectFact(fn, new(noReturn)) +} + +var panicBuiltin = types.Universe.Lookup("panic").(*types.Builtin) + +func hasReachableReturn(g *cfg.CFG) bool { + for _, b := range g.Blocks { + if b.Live && b.Return() != nil { + return true + } + } + return false +} + +// isIntrinsicNoReturn reports whether a function intrinsically never +// returns because it stops execution of the calling thread. +// It is the base case in the recursion. +func isIntrinsicNoReturn(fn *types.Func) bool { + // Add functions here as the need arises, but don't allocate memory. + path, name := fn.Pkg().Path(), fn.Name() + return path == "syscall" && (name == "Exit" || name == "ExitProcess" || name == "ExitThread") || + path == "runtime" && name == "Goexit" +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go b/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go new file mode 100644 index 000000000..9ea137386 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go @@ -0,0 +1,115 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package deepequalerrors defines an Analyzer that checks for the use +// of reflect.DeepEqual with error values. +package deepequalerrors + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +const Doc = `check for calls of reflect.DeepEqual on error values + +The deepequalerrors checker looks for calls of the form: + + reflect.DeepEqual(err1, err2) + +where err1 and err2 are errors. Using reflect.DeepEqual to compare +errors is discouraged.` + +var Analyzer = &analysis.Analyzer{ + Name: "deepequalerrors", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + call := n.(*ast.CallExpr) + fn, ok := typeutil.Callee(pass.TypesInfo, call).(*types.Func) + if !ok { + return + } + if fn.FullName() == "reflect.DeepEqual" && hasError(pass, call.Args[0]) && hasError(pass, call.Args[1]) { + pass.ReportRangef(call, "avoid using reflect.DeepEqual with errors") + } + }) + return nil, nil +} + +var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + +// hasError reports whether the type of e contains the type error. +// See containsError, below, for the meaning of "contains". +func hasError(pass *analysis.Pass, e ast.Expr) bool { + tv, ok := pass.TypesInfo.Types[e] + if !ok { // no type info, assume good + return false + } + return containsError(tv.Type) +} + +// Report whether any type that typ could store and that could be compared is the +// error type. This includes typ itself, as well as the types of struct field, slice +// and array elements, map keys and elements, and pointers. It does not include +// channel types (incomparable), arg and result types of a Signature (not stored), or +// methods of a named or interface type (not stored). +func containsError(typ types.Type) bool { + // Track types being processed, to avoid infinite recursion. + // Using types as keys here is OK because we are checking for the identical pointer, not + // type identity. See analysis/passes/printf/types.go. + inProgress := make(map[types.Type]bool) + + var check func(t types.Type) bool + check = func(t types.Type) bool { + if t == errorType { + return true + } + if inProgress[t] { + return false + } + inProgress[t] = true + switch t := t.(type) { + case *types.Pointer: + return check(t.Elem()) + case *types.Slice: + return check(t.Elem()) + case *types.Array: + return check(t.Elem()) + case *types.Map: + return check(t.Key()) || check(t.Elem()) + case *types.Struct: + for i := 0; i < t.NumFields(); i++ { + if check(t.Field(i).Type()) { + return true + } + } + case *types.Named: + return check(t.Underlying()) + + // We list the remaining valid type kinds for completeness. + case *types.Basic: + case *types.Chan: // channels store values, but they are not comparable + case *types.Signature: + case *types.Tuple: // tuples are only part of signatures + case *types.Interface: + } + return false + } + + return check(typ) +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go b/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go new file mode 100644 index 000000000..384f02557 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go @@ -0,0 +1,75 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The errorsas package defines an Analyzer that checks that the second argument to +// errors.As is a pointer to a type implementing error. +package errorsas + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +const Doc = `report passing non-pointer or non-error values to errors.As + +The errorsas analysis reports calls to errors.As where the type +of the second argument is not a pointer to a type implementing error.` + +var Analyzer = &analysis.Analyzer{ + Name: "errorsas", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + switch pass.Pkg.Path() { + case "errors", "errors_test": + // These packages know how to use their own APIs. + // Sometimes they are testing what happens to incorrect programs. + return nil, nil + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + call := n.(*ast.CallExpr) + fn := typeutil.StaticCallee(pass.TypesInfo, call) + if fn == nil { + return // not a static call + } + if len(call.Args) < 2 { + return // not enough arguments, e.g. called with return values of another function + } + if fn.FullName() == "errors.As" && !pointerToInterfaceOrError(pass, call.Args[1]) { + pass.ReportRangef(call, "second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type") + } + }) + return nil, nil +} + +var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + +// pointerToInterfaceOrError reports whether the type of e is a pointer to an interface or a type implementing error, +// or is the empty interface. +func pointerToInterfaceOrError(pass *analysis.Pass, e ast.Expr) bool { + t := pass.TypesInfo.Types[e].Type + if it, ok := t.Underlying().(*types.Interface); ok && it.NumMethods() == 0 { + return true + } + pt, ok := t.Underlying().(*types.Pointer) + if !ok { + return false + } + _, ok = pt.Elem().Underlying().(*types.Interface) + return ok || types.Implements(pt.Elem(), errorType) +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go b/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go new file mode 100644 index 000000000..78afe94ab --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go @@ -0,0 +1,368 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fieldalignment defines an Analyzer that detects structs that would use less +// memory if their fields were sorted. +package fieldalignment + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/token" + "go/types" + "sort" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `find structs that would use less memory if their fields were sorted + +This analyzer find structs that can be rearranged to use less memory, and provides +a suggested edit with the optimal order. + +Note that there are two different diagnostics reported. One checks struct size, +and the other reports "pointer bytes" used. Pointer bytes is how many bytes of the +object that the garbage collector has to potentially scan for pointers, for example: + + struct { uint32; string } + +have 16 pointer bytes because the garbage collector has to scan up through the string's +inner pointer. + + struct { string; *uint32 } + +has 24 pointer bytes because it has to scan further through the *uint32. + + struct { string; uint32 } + +has 8 because it can stop immediately after the string pointer. +` + +var Analyzer = &analysis.Analyzer{ + Name: "fieldalignment", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.StructType)(nil), + } + inspect.Preorder(nodeFilter, func(node ast.Node) { + var s *ast.StructType + var ok bool + if s, ok = node.(*ast.StructType); !ok { + return + } + if tv, ok := pass.TypesInfo.Types[s]; ok { + fieldalignment(pass, s, tv.Type.(*types.Struct)) + } + }) + return nil, nil +} + +var unsafePointerTyp = types.Unsafe.Scope().Lookup("Pointer").(*types.TypeName).Type() + +func fieldalignment(pass *analysis.Pass, node *ast.StructType, typ *types.Struct) { + wordSize := pass.TypesSizes.Sizeof(unsafePointerTyp) + maxAlign := pass.TypesSizes.Alignof(unsafePointerTyp) + + s := gcSizes{wordSize, maxAlign} + optimal, indexes := optimalOrder(typ, &s) + optsz, optptrs := s.Sizeof(optimal), s.ptrdata(optimal) + + var message string + if sz := s.Sizeof(typ); sz != optsz { + message = fmt.Sprintf("struct of size %d could be %d", sz, optsz) + } else if ptrs := s.ptrdata(typ); ptrs != optptrs { + message = fmt.Sprintf("struct with %d pointer bytes could be %d", ptrs, optptrs) + } else { + // Already optimal order. + return + } + + // Flatten the ast node since it could have multiple field names per list item while + // *types.Struct only have one item per field. + // TODO: Preserve multi-named fields instead of flattening. + var flat []*ast.Field + for _, f := range node.Fields.List { + // TODO: Preserve comment, for now get rid of them. + // See https://github.com/golang/go/issues/20744 + f.Comment = nil + f.Doc = nil + if len(f.Names) <= 1 { + flat = append(flat, f) + continue + } + for _, name := range f.Names { + flat = append(flat, &ast.Field{ + Names: []*ast.Ident{name}, + Type: f.Type, + }) + } + } + + // Sort fields according to the optimal order. + var reordered []*ast.Field + for _, index := range indexes { + reordered = append(reordered, flat[index]) + } + + newStr := &ast.StructType{ + Fields: &ast.FieldList{ + List: reordered, + }, + } + + // Write the newly aligned struct node to get the content for suggested fixes. + var buf bytes.Buffer + if err := format.Node(&buf, token.NewFileSet(), newStr); err != nil { + return + } + + pass.Report(analysis.Diagnostic{ + Pos: node.Pos(), + End: node.Pos() + token.Pos(len("struct")), + Message: message, + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Rearrange fields", + TextEdits: []analysis.TextEdit{{ + Pos: node.Pos(), + End: node.End(), + NewText: buf.Bytes(), + }}, + }}, + }) +} + +func optimalOrder(str *types.Struct, sizes *gcSizes) (*types.Struct, []int) { + nf := str.NumFields() + + type elem struct { + index int + alignof int64 + sizeof int64 + ptrdata int64 + } + + elems := make([]elem, nf) + for i := 0; i < nf; i++ { + field := str.Field(i) + ft := field.Type() + elems[i] = elem{ + i, + sizes.Alignof(ft), + sizes.Sizeof(ft), + sizes.ptrdata(ft), + } + } + + sort.Slice(elems, func(i, j int) bool { + ei := &elems[i] + ej := &elems[j] + + // Place zero sized objects before non-zero sized objects. + zeroi := ei.sizeof == 0 + zeroj := ej.sizeof == 0 + if zeroi != zeroj { + return zeroi + } + + // Next, place more tightly aligned objects before less tightly aligned objects. + if ei.alignof != ej.alignof { + return ei.alignof > ej.alignof + } + + // Place pointerful objects before pointer-free objects. + noptrsi := ei.ptrdata == 0 + noptrsj := ej.ptrdata == 0 + if noptrsi != noptrsj { + return noptrsj + } + + if !noptrsi { + // If both have pointers... + + // ... then place objects with less trailing + // non-pointer bytes earlier. That is, place + // the field with the most trailing + // non-pointer bytes at the end of the + // pointerful section. + traili := ei.sizeof - ei.ptrdata + trailj := ej.sizeof - ej.ptrdata + if traili != trailj { + return traili < trailj + } + } + + // Lastly, order by size. + if ei.sizeof != ej.sizeof { + return ei.sizeof > ej.sizeof + } + + return false + }) + + fields := make([]*types.Var, nf) + indexes := make([]int, nf) + for i, e := range elems { + fields[i] = str.Field(e.index) + indexes[i] = e.index + } + return types.NewStruct(fields, nil), indexes +} + +// Code below based on go/types.StdSizes. + +type gcSizes struct { + WordSize int64 + MaxAlign int64 +} + +func (s *gcSizes) Alignof(T types.Type) int64 { + // For arrays and structs, alignment is defined in terms + // of alignment of the elements and fields, respectively. + switch t := T.Underlying().(type) { + case *types.Array: + // spec: "For a variable x of array type: unsafe.Alignof(x) + // is the same as unsafe.Alignof(x[0]), but at least 1." + return s.Alignof(t.Elem()) + case *types.Struct: + // spec: "For a variable x of struct type: unsafe.Alignof(x) + // is the largest of the values unsafe.Alignof(x.f) for each + // field f of x, but at least 1." + max := int64(1) + for i, nf := 0, t.NumFields(); i < nf; i++ { + if a := s.Alignof(t.Field(i).Type()); a > max { + max = a + } + } + return max + } + a := s.Sizeof(T) // may be 0 + // spec: "For a variable x of any type: unsafe.Alignof(x) is at least 1." + if a < 1 { + return 1 + } + if a > s.MaxAlign { + return s.MaxAlign + } + return a +} + +var basicSizes = [...]byte{ + types.Bool: 1, + types.Int8: 1, + types.Int16: 2, + types.Int32: 4, + types.Int64: 8, + types.Uint8: 1, + types.Uint16: 2, + types.Uint32: 4, + types.Uint64: 8, + types.Float32: 4, + types.Float64: 8, + types.Complex64: 8, + types.Complex128: 16, +} + +func (s *gcSizes) Sizeof(T types.Type) int64 { + switch t := T.Underlying().(type) { + case *types.Basic: + k := t.Kind() + if int(k) < len(basicSizes) { + if s := basicSizes[k]; s > 0 { + return int64(s) + } + } + if k == types.String { + return s.WordSize * 2 + } + case *types.Array: + return t.Len() * s.Sizeof(t.Elem()) + case *types.Slice: + return s.WordSize * 3 + case *types.Struct: + nf := t.NumFields() + if nf == 0 { + return 0 + } + + var o int64 + max := int64(1) + for i := 0; i < nf; i++ { + ft := t.Field(i).Type() + a, sz := s.Alignof(ft), s.Sizeof(ft) + if a > max { + max = a + } + if i == nf-1 && sz == 0 && o != 0 { + sz = 1 + } + o = align(o, a) + sz + } + return align(o, max) + case *types.Interface: + return s.WordSize * 2 + } + return s.WordSize // catch-all +} + +// align returns the smallest y >= x such that y % a == 0. +func align(x, a int64) int64 { + y := x + a - 1 + return y - y%a +} + +func (s *gcSizes) ptrdata(T types.Type) int64 { + switch t := T.Underlying().(type) { + case *types.Basic: + switch t.Kind() { + case types.String, types.UnsafePointer: + return s.WordSize + } + return 0 + case *types.Chan, *types.Map, *types.Pointer, *types.Signature, *types.Slice: + return s.WordSize + case *types.Interface: + return 2 * s.WordSize + case *types.Array: + n := t.Len() + if n == 0 { + return 0 + } + a := s.ptrdata(t.Elem()) + if a == 0 { + return 0 + } + z := s.Sizeof(t.Elem()) + return (n-1)*z + a + case *types.Struct: + nf := t.NumFields() + if nf == 0 { + return 0 + } + + var o, p int64 + for i := 0; i < nf; i++ { + ft := t.Field(i).Type() + a, sz := s.Alignof(ft), s.Sizeof(ft) + fp := s.ptrdata(ft) + o = align(o, a) + if fp != 0 { + p = o + fp + } + o += sz + } + return p + } + + panic("impossible") +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go b/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go new file mode 100644 index 000000000..27b1b8400 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/findcall/findcall.go @@ -0,0 +1,98 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package findcall defines an Analyzer that serves as a trivial +// example and test of the Analysis API. It reports a diagnostic for +// every call to a function or method of the name specified by its +// -name flag. It also exports a fact for each declaration that +// matches the name, plus a package-level fact if the package contained +// one or more such declarations. +package findcall + +import ( + "fmt" + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" +) + +const Doc = `find calls to a particular function + +The findcall analysis reports calls to functions or methods +of a particular name.` + +var Analyzer = &analysis.Analyzer{ + Name: "findcall", + Doc: Doc, + Run: run, + RunDespiteErrors: true, + FactTypes: []analysis.Fact{new(foundFact)}, +} + +var name string // -name flag + +func init() { + Analyzer.Flags.StringVar(&name, "name", name, "name of the function to find") +} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { + ast.Inspect(f, func(n ast.Node) bool { + if call, ok := n.(*ast.CallExpr); ok { + var id *ast.Ident + switch fun := call.Fun.(type) { + case *ast.Ident: + id = fun + case *ast.SelectorExpr: + id = fun.Sel + } + if id != nil && !pass.TypesInfo.Types[id].IsType() && id.Name == name { + pass.Report(analysis.Diagnostic{ + Pos: call.Lparen, + Message: fmt.Sprintf("call of %s(...)", id.Name), + SuggestedFixes: []analysis.SuggestedFix{{ + Message: fmt.Sprintf("Add '_TEST_'"), + TextEdits: []analysis.TextEdit{{ + Pos: call.Lparen, + End: call.Lparen, + NewText: []byte("_TEST_"), + }}, + }}, + }) + } + } + return true + }) + } + + // Export a fact for each matching function. + // + // These facts are produced only to test the testing + // infrastructure in the analysistest package. + // They are not consumed by the findcall Analyzer + // itself, as would happen in a more realistic example. + for _, f := range pass.Files { + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok && decl.Name.Name == name { + if obj, ok := pass.TypesInfo.Defs[decl.Name].(*types.Func); ok { + pass.ExportObjectFact(obj, new(foundFact)) + } + } + } + } + + if len(pass.AllObjectFacts()) > 0 { + pass.ExportPackageFact(new(foundFact)) + } + + return nil, nil +} + +// foundFact is a fact associated with functions that match -name. +// We use it to exercise the fact machinery in tests. +type foundFact struct{} + +func (*foundFact) String() string { return "found" } +func (*foundFact) AFact() {} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go b/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go new file mode 100644 index 000000000..741492e47 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/framepointer/framepointer.go @@ -0,0 +1,91 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package framepointer defines an Analyzer that reports assembly code +// that clobbers the frame pointer before saving it. +package framepointer + +import ( + "go/build" + "regexp" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" +) + +const Doc = "report assembly that clobbers the frame pointer before saving it" + +var Analyzer = &analysis.Analyzer{ + Name: "framepointer", + Doc: Doc, + Run: run, +} + +var ( + re = regexp.MustCompile + asmWriteBP = re(`,\s*BP$`) // TODO: can have false positive, e.g. for TESTQ BP,BP. Seems unlikely. + asmMentionBP = re(`\bBP\b`) + asmControlFlow = re(`^(J|RET)`) +) + +func run(pass *analysis.Pass) (interface{}, error) { + if build.Default.GOARCH != "amd64" { // TODO: arm64 also? + return nil, nil + } + if build.Default.GOOS != "linux" && build.Default.GOOS != "darwin" { + return nil, nil + } + + // Find assembly files to work on. + var sfiles []string + for _, fname := range pass.OtherFiles { + if strings.HasSuffix(fname, ".s") && pass.Pkg.Path() != "runtime" { + sfiles = append(sfiles, fname) + } + } + + for _, fname := range sfiles { + content, tf, err := analysisutil.ReadFile(pass.Fset, fname) + if err != nil { + return nil, err + } + + lines := strings.SplitAfter(string(content), "\n") + active := false + for lineno, line := range lines { + lineno++ + + // Ignore comments and commented-out code. + if i := strings.Index(line, "//"); i >= 0 { + line = line[:i] + } + line = strings.TrimSpace(line) + + // We start checking code at a TEXT line for a frameless function. + if strings.HasPrefix(line, "TEXT") && strings.Contains(line, "(SB)") && strings.Contains(line, "$0") { + active = true + continue + } + if !active { + continue + } + + if asmWriteBP.MatchString(line) { // clobber of BP, function is not OK + pass.Reportf(analysisutil.LineStart(tf, lineno), "frame pointer is clobbered before saving") + active = false + continue + } + if asmMentionBP.MatchString(line) { // any other use of BP might be a read, so function is OK + active = false + continue + } + if asmControlFlow.MatchString(line) { // give up after any branch instruction + active = false + continue + } + } + } + return nil, nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go b/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go new file mode 100644 index 000000000..fd9e2af2b --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go @@ -0,0 +1,169 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package httpresponse defines an Analyzer that checks for mistakes +// using HTTP responses. +package httpresponse + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for mistakes using HTTP responses + +A common mistake when using the net/http package is to defer a function +call to close the http.Response Body before checking the error that +determines whether the response is valid: + + resp, err := http.Head(url) + defer resp.Body.Close() + if err != nil { + log.Fatal(err) + } + // (defer statement belongs here) + +This checker helps uncover latent nil dereference bugs by reporting a +diagnostic for such mistakes.` + +var Analyzer = &analysis.Analyzer{ + Name: "httpresponse", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // Fast path: if the package doesn't import net/http, + // skip the traversal. + if !analysisutil.Imports(pass.Pkg, "net/http") { + return nil, nil + } + + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + inspect.WithStack(nodeFilter, func(n ast.Node, push bool, stack []ast.Node) bool { + if !push { + return true + } + call := n.(*ast.CallExpr) + if !isHTTPFuncOrMethodOnClient(pass.TypesInfo, call) { + return true // the function call is not related to this check. + } + + // Find the innermost containing block, and get the list + // of statements starting with the one containing call. + stmts := restOfBlock(stack) + if len(stmts) < 2 { + return true // the call to the http function is the last statement of the block. + } + + asg, ok := stmts[0].(*ast.AssignStmt) + if !ok { + return true // the first statement is not assignment. + } + resp := rootIdent(asg.Lhs[0]) + if resp == nil { + return true // could not find the http.Response in the assignment. + } + + def, ok := stmts[1].(*ast.DeferStmt) + if !ok { + return true // the following statement is not a defer. + } + root := rootIdent(def.Call.Fun) + if root == nil { + return true // could not find the receiver of the defer call. + } + + if resp.Obj == root.Obj { + pass.ReportRangef(root, "using %s before checking for errors", resp.Name) + } + return true + }) + return nil, nil +} + +// isHTTPFuncOrMethodOnClient checks whether the given call expression is on +// either a function of the net/http package or a method of http.Client that +// returns (*http.Response, error). +func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool { + fun, _ := expr.Fun.(*ast.SelectorExpr) + sig, _ := info.Types[fun].Type.(*types.Signature) + if sig == nil { + return false // the call is not of the form x.f() + } + + res := sig.Results() + if res.Len() != 2 { + return false // the function called does not return two values. + } + if ptr, ok := res.At(0).Type().(*types.Pointer); !ok || !isNamedType(ptr.Elem(), "net/http", "Response") { + return false // the first return type is not *http.Response. + } + + errorType := types.Universe.Lookup("error").Type() + if !types.Identical(res.At(1).Type(), errorType) { + return false // the second return type is not error + } + + typ := info.Types[fun.X].Type + if typ == nil { + id, ok := fun.X.(*ast.Ident) + return ok && id.Name == "http" // function in net/http package. + } + + if isNamedType(typ, "net/http", "Client") { + return true // method on http.Client. + } + ptr, ok := typ.(*types.Pointer) + return ok && isNamedType(ptr.Elem(), "net/http", "Client") // method on *http.Client. +} + +// restOfBlock, given a traversal stack, finds the innermost containing +// block and returns the suffix of its statements starting with the +// current node (the last element of stack). +func restOfBlock(stack []ast.Node) []ast.Stmt { + for i := len(stack) - 1; i >= 0; i-- { + if b, ok := stack[i].(*ast.BlockStmt); ok { + for j, v := range b.List { + if v == stack[i+1] { + return b.List[j:] + } + } + break + } + } + return nil +} + +// rootIdent finds the root identifier x in a chain of selections x.y.z, or nil if not found. +func rootIdent(n ast.Node) *ast.Ident { + switch n := n.(type) { + case *ast.SelectorExpr: + return rootIdent(n.X) + case *ast.Ident: + return n + default: + return nil + } +} + +// isNamedType reports whether t is the named type path.name. +func isNamedType(t types.Type, path, name string) bool { + n, ok := t.(*types.Named) + if !ok { + return false + } + obj := n.Obj() + return obj.Name() == name && obj.Pkg() != nil && obj.Pkg().Path() == path +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go new file mode 100644 index 000000000..fd2285332 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/ifaceassert/ifaceassert.go @@ -0,0 +1,105 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ifaceassert defines an Analyzer that flags +// impossible interface-interface type assertions. +package ifaceassert + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `detect impossible interface-to-interface type assertions + +This checker flags type assertions v.(T) and corresponding type-switch cases +in which the static type V of v is an interface that cannot possibly implement +the target interface T. This occurs when V and T contain methods with the same +name but different signatures. Example: + + var v interface { + Read() + } + _ = v.(io.Reader) + +The Read method in v has a different signature than the Read method in +io.Reader, so this assertion cannot succeed. +` + +var Analyzer = &analysis.Analyzer{ + Name: "ifaceassert", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +// assertableTo checks whether interface v can be asserted into t. It returns +// nil on success, or the first conflicting method on failure. +func assertableTo(v, t types.Type) *types.Func { + if t == nil || v == nil { + // not assertable to, but there is no missing method + return nil + } + // ensure that v and t are interfaces + V, _ := v.Underlying().(*types.Interface) + T, _ := t.Underlying().(*types.Interface) + if V == nil || T == nil { + return nil + } + if f, wrongType := types.MissingMethod(V, T, false); wrongType { + return f + } + return nil +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.TypeAssertExpr)(nil), + (*ast.TypeSwitchStmt)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + var ( + assert *ast.TypeAssertExpr // v.(T) expression + targets []ast.Expr // interfaces T in v.(T) + ) + switch n := n.(type) { + case *ast.TypeAssertExpr: + // take care of v.(type) in *ast.TypeSwitchStmt + if n.Type == nil { + return + } + assert = n + targets = append(targets, n.Type) + case *ast.TypeSwitchStmt: + // retrieve type assertion from type switch's 'assign' field + switch t := n.Assign.(type) { + case *ast.ExprStmt: + assert = t.X.(*ast.TypeAssertExpr) + case *ast.AssignStmt: + assert = t.Rhs[0].(*ast.TypeAssertExpr) + } + // gather target types from case clauses + for _, c := range n.Body.List { + targets = append(targets, c.(*ast.CaseClause).List...) + } + } + V := pass.TypesInfo.TypeOf(assert.X) + for _, target := range targets { + T := pass.TypesInfo.TypeOf(target) + if f := assertableTo(V, T); f != nil { + pass.Reportf( + target.Pos(), + "impossible type assertion: no type can implement both %v and %v (conflicting types for %v method)", + V, T, f.Name(), + ) + } + } + }) + return nil, nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go b/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go new file mode 100644 index 000000000..4bb652a72 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go @@ -0,0 +1,49 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package inspect defines an Analyzer that provides an AST inspector +// (golang.org/x/tools/go/ast/inspector.Inspector) for the syntax trees +// of a package. It is only a building block for other analyzers. +// +// Example of use in another analysis: +// +// import ( +// "golang.org/x/tools/go/analysis" +// "golang.org/x/tools/go/analysis/passes/inspect" +// "golang.org/x/tools/go/ast/inspector" +// ) +// +// var Analyzer = &analysis.Analyzer{ +// ... +// Requires: []*analysis.Analyzer{inspect.Analyzer}, +// } +// +// func run(pass *analysis.Pass) (interface{}, error) { +// inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) +// inspect.Preorder(nil, func(n ast.Node) { +// ... +// }) +// return nil +// } +// +package inspect + +import ( + "reflect" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/ast/inspector" +) + +var Analyzer = &analysis.Analyzer{ + Name: "inspect", + Doc: "optimize AST traversal for later passes", + Run: run, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(new(inspector.Inspector)), +} + +func run(pass *analysis.Pass) (interface{}, error) { + return inspector.New(pass.Files), nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go b/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go new file mode 100644 index 000000000..ac37e4784 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go @@ -0,0 +1,120 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package analysisutil defines various helper functions +// used by two or more packages beneath go/analysis. +package analysisutil + +import ( + "bytes" + "go/ast" + "go/printer" + "go/token" + "go/types" + "io/ioutil" +) + +// Format returns a string representation of the expression. +func Format(fset *token.FileSet, x ast.Expr) string { + var b bytes.Buffer + printer.Fprint(&b, fset, x) + return b.String() +} + +// HasSideEffects reports whether evaluation of e has side effects. +func HasSideEffects(info *types.Info, e ast.Expr) bool { + safe := true + ast.Inspect(e, func(node ast.Node) bool { + switch n := node.(type) { + case *ast.CallExpr: + typVal := info.Types[n.Fun] + switch { + case typVal.IsType(): + // Type conversion, which is safe. + case typVal.IsBuiltin(): + // Builtin func, conservatively assumed to not + // be safe for now. + safe = false + return false + default: + // A non-builtin func or method call. + // Conservatively assume that all of them have + // side effects for now. + safe = false + return false + } + case *ast.UnaryExpr: + if n.Op == token.ARROW { + safe = false + return false + } + } + return true + }) + return !safe +} + +// Unparen returns e with any enclosing parentheses stripped. +func Unparen(e ast.Expr) ast.Expr { + for { + p, ok := e.(*ast.ParenExpr) + if !ok { + return e + } + e = p.X + } +} + +// ReadFile reads a file and adds it to the FileSet +// so that we can report errors against it using lineStart. +func ReadFile(fset *token.FileSet, filename string) ([]byte, *token.File, error) { + content, err := ioutil.ReadFile(filename) + if err != nil { + return nil, nil, err + } + tf := fset.AddFile(filename, -1, len(content)) + tf.SetLinesForContent(content) + return content, tf, nil +} + +// LineStart returns the position of the start of the specified line +// within file f, or NoPos if there is no line of that number. +func LineStart(f *token.File, line int) token.Pos { + // Use binary search to find the start offset of this line. + // + // TODO(adonovan): eventually replace this function with the + // simpler and more efficient (*go/token.File).LineStart, added + // in go1.12. + + min := 0 // inclusive + max := f.Size() // exclusive + for { + offset := (min + max) / 2 + pos := f.Pos(offset) + posn := f.Position(pos) + if posn.Line == line { + return pos - (token.Pos(posn.Column) - 1) + } + + if min+1 >= max { + return token.NoPos + } + + if posn.Line < line { + min = offset + } else { + max = offset + } + } +} + +// Imports returns true if path is imported by pkg. +func Imports(pkg *types.Package, path string) bool { + for _, imp := range pkg.Imports() { + if imp.Path() == path { + return true + } + } + return false +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go b/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go new file mode 100644 index 000000000..3ea91574d --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go @@ -0,0 +1,165 @@ +// Copyright 2012 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package loopclosure defines an Analyzer that checks for references to +// enclosing loop variables from within nested functions. +package loopclosure + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +const Doc = `check references to loop variables from within nested functions + +This analyzer checks for references to loop variables from within a +function literal inside the loop body. It checks only instances where +the function literal is called in a defer or go statement that is the +last statement in the loop body, as otherwise we would need whole +program analysis. + +For example: + + for i, v := range s { + go func() { + println(i, v) // not what you might expect + }() + } + +See: https://golang.org/doc/go_faq.html#closures_and_goroutines` + +var Analyzer = &analysis.Analyzer{ + Name: "loopclosure", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.RangeStmt)(nil), + (*ast.ForStmt)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + // Find the variables updated by the loop statement. + var vars []*ast.Ident + addVar := func(expr ast.Expr) { + if id, ok := expr.(*ast.Ident); ok { + vars = append(vars, id) + } + } + var body *ast.BlockStmt + switch n := n.(type) { + case *ast.RangeStmt: + body = n.Body + addVar(n.Key) + addVar(n.Value) + case *ast.ForStmt: + body = n.Body + switch post := n.Post.(type) { + case *ast.AssignStmt: + // e.g. for p = head; p != nil; p = p.next + for _, lhs := range post.Lhs { + addVar(lhs) + } + case *ast.IncDecStmt: + // e.g. for i := 0; i < n; i++ + addVar(post.X) + } + } + if vars == nil { + return + } + + // Inspect a go or defer statement + // if it's the last one in the loop body. + // (We give up if there are following statements, + // because it's hard to prove go isn't followed by wait, + // or defer by return.) + if len(body.List) == 0 { + return + } + // The function invoked in the last return statement. + var fun ast.Expr + switch s := body.List[len(body.List)-1].(type) { + case *ast.GoStmt: + fun = s.Call.Fun + case *ast.DeferStmt: + fun = s.Call.Fun + case *ast.ExprStmt: // check for errgroup.Group.Go() + if call, ok := s.X.(*ast.CallExpr); ok { + fun = goInvokes(pass.TypesInfo, call) + } + } + lit, ok := fun.(*ast.FuncLit) + if !ok { + return + } + ast.Inspect(lit.Body, func(n ast.Node) bool { + id, ok := n.(*ast.Ident) + if !ok || id.Obj == nil { + return true + } + if pass.TypesInfo.Types[id].Type == nil { + // Not referring to a variable (e.g. struct field name) + return true + } + for _, v := range vars { + if v.Obj == id.Obj { + pass.ReportRangef(id, "loop variable %s captured by func literal", + id.Name) + } + } + return true + }) + }) + return nil, nil +} + +// goInvokes returns a function expression that would be called asynchronously +// (but not awaited) in another goroutine as a consequence of the call. +// For example, given the g.Go call below, it returns the function literal expression. +// +// import "sync/errgroup" +// var g errgroup.Group +// g.Go(func() error { ... }) +// +// Currently only "golang.org/x/sync/errgroup.Group()" is considered. +func goInvokes(info *types.Info, call *ast.CallExpr) ast.Expr { + f := typeutil.StaticCallee(info, call) + // Note: Currently only supports: golang.org/x/sync/errgroup.Go. + if f == nil || f.Name() != "Go" { + return nil + } + recv := f.Type().(*types.Signature).Recv() + if recv == nil { + return nil + } + rtype, ok := recv.Type().(*types.Pointer) + if !ok { + return nil + } + named, ok := rtype.Elem().(*types.Named) + if !ok { + return nil + } + if named.Obj().Name() != "Group" { + return nil + } + pkg := f.Pkg() + if pkg == nil { + return nil + } + if pkg.Path() != "golang.org/x/sync/errgroup" { + return nil + } + return call.Args[0] +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go new file mode 100644 index 000000000..de6f840f6 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go @@ -0,0 +1,330 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package lostcancel defines an Analyzer that checks for failure to +// call a context cancellation function. +package lostcancel + +import ( + "fmt" + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/ctrlflow" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/cfg" +) + +const Doc = `check cancel func returned by context.WithCancel is called + +The cancellation function returned by context.WithCancel, WithTimeout, +and WithDeadline must be called or the new context will remain live +until its parent context is cancelled. +(The background context is never cancelled.)` + +var Analyzer = &analysis.Analyzer{ + Name: "lostcancel", + Doc: Doc, + Run: run, + Requires: []*analysis.Analyzer{ + inspect.Analyzer, + ctrlflow.Analyzer, + }, +} + +const debug = false + +var contextPackage = "context" + +// checkLostCancel reports a failure to the call the cancel function +// returned by context.WithCancel, either because the variable was +// assigned to the blank identifier, or because there exists a +// control-flow path from the call to a return statement and that path +// does not "use" the cancel function. Any reference to the variable +// counts as a use, even within a nested function literal. +// If the variable's scope is larger than the function +// containing the assignment, we assume that other uses exist. +// +// checkLostCancel analyzes a single named or literal function. +func run(pass *analysis.Pass) (interface{}, error) { + // Fast path: bypass check if file doesn't use context.WithCancel. + if !hasImport(pass.Pkg, contextPackage) { + return nil, nil + } + + // Call runFunc for each Func{Decl,Lit}. + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeTypes := []ast.Node{ + (*ast.FuncLit)(nil), + (*ast.FuncDecl)(nil), + } + inspect.Preorder(nodeTypes, func(n ast.Node) { + runFunc(pass, n) + }) + return nil, nil +} + +func runFunc(pass *analysis.Pass, node ast.Node) { + // Find scope of function node + var funcScope *types.Scope + switch v := node.(type) { + case *ast.FuncLit: + funcScope = pass.TypesInfo.Scopes[v.Type] + case *ast.FuncDecl: + funcScope = pass.TypesInfo.Scopes[v.Type] + } + + // Maps each cancel variable to its defining ValueSpec/AssignStmt. + cancelvars := make(map[*types.Var]ast.Node) + + // TODO(adonovan): opt: refactor to make a single pass + // over the AST using inspect.WithStack and node types + // {FuncDecl,FuncLit,CallExpr,SelectorExpr}. + + // Find the set of cancel vars to analyze. + stack := make([]ast.Node, 0, 32) + ast.Inspect(node, func(n ast.Node) bool { + switch n.(type) { + case *ast.FuncLit: + if len(stack) > 0 { + return false // don't stray into nested functions + } + case nil: + stack = stack[:len(stack)-1] // pop + return true + } + stack = append(stack, n) // push + + // Look for [{AssignStmt,ValueSpec} CallExpr SelectorExpr]: + // + // ctx, cancel := context.WithCancel(...) + // ctx, cancel = context.WithCancel(...) + // var ctx, cancel = context.WithCancel(...) + // + if !isContextWithCancel(pass.TypesInfo, n) || !isCall(stack[len(stack)-2]) { + return true + } + var id *ast.Ident // id of cancel var + stmt := stack[len(stack)-3] + switch stmt := stmt.(type) { + case *ast.ValueSpec: + if len(stmt.Names) > 1 { + id = stmt.Names[1] + } + case *ast.AssignStmt: + if len(stmt.Lhs) > 1 { + id, _ = stmt.Lhs[1].(*ast.Ident) + } + } + if id != nil { + if id.Name == "_" { + pass.ReportRangef(id, + "the cancel function returned by context.%s should be called, not discarded, to avoid a context leak", + n.(*ast.SelectorExpr).Sel.Name) + } else if v, ok := pass.TypesInfo.Uses[id].(*types.Var); ok { + // If the cancel variable is defined outside function scope, + // do not analyze it. + if funcScope.Contains(v.Pos()) { + cancelvars[v] = stmt + } + } else if v, ok := pass.TypesInfo.Defs[id].(*types.Var); ok { + cancelvars[v] = stmt + } + } + return true + }) + + if len(cancelvars) == 0 { + return // no need to inspect CFG + } + + // Obtain the CFG. + cfgs := pass.ResultOf[ctrlflow.Analyzer].(*ctrlflow.CFGs) + var g *cfg.CFG + var sig *types.Signature + switch node := node.(type) { + case *ast.FuncDecl: + sig, _ = pass.TypesInfo.Defs[node.Name].Type().(*types.Signature) + if node.Name.Name == "main" && sig.Recv() == nil && pass.Pkg.Name() == "main" { + // Returning from main.main terminates the process, + // so there's no need to cancel contexts. + return + } + g = cfgs.FuncDecl(node) + + case *ast.FuncLit: + sig, _ = pass.TypesInfo.Types[node.Type].Type.(*types.Signature) + g = cfgs.FuncLit(node) + } + if sig == nil { + return // missing type information + } + + // Print CFG. + if debug { + fmt.Println(g.Format(pass.Fset)) + } + + // Examine the CFG for each variable in turn. + // (It would be more efficient to analyze all cancelvars in a + // single pass over the AST, but seldom is there more than one.) + for v, stmt := range cancelvars { + if ret := lostCancelPath(pass, g, v, stmt, sig); ret != nil { + lineno := pass.Fset.Position(stmt.Pos()).Line + pass.ReportRangef(stmt, "the %s function is not used on all paths (possible context leak)", v.Name()) + pass.ReportRangef(ret, "this return statement may be reached without using the %s var defined on line %d", v.Name(), lineno) + } + } +} + +func isCall(n ast.Node) bool { _, ok := n.(*ast.CallExpr); return ok } + +func hasImport(pkg *types.Package, path string) bool { + for _, imp := range pkg.Imports() { + if imp.Path() == path { + return true + } + } + return false +} + +// isContextWithCancel reports whether n is one of the qualified identifiers +// context.With{Cancel,Timeout,Deadline}. +func isContextWithCancel(info *types.Info, n ast.Node) bool { + sel, ok := n.(*ast.SelectorExpr) + if !ok { + return false + } + switch sel.Sel.Name { + case "WithCancel", "WithTimeout", "WithDeadline": + default: + return false + } + if x, ok := sel.X.(*ast.Ident); ok { + if pkgname, ok := info.Uses[x].(*types.PkgName); ok { + return pkgname.Imported().Path() == contextPackage + } + // Import failed, so we can't check package path. + // Just check the local package name (heuristic). + return x.Name == "context" + } + return false +} + +// lostCancelPath finds a path through the CFG, from stmt (which defines +// the 'cancel' variable v) to a return statement, that doesn't "use" v. +// If it finds one, it returns the return statement (which may be synthetic). +// sig is the function's type, if known. +func lostCancelPath(pass *analysis.Pass, g *cfg.CFG, v *types.Var, stmt ast.Node, sig *types.Signature) *ast.ReturnStmt { + vIsNamedResult := sig != nil && tupleContains(sig.Results(), v) + + // uses reports whether stmts contain a "use" of variable v. + uses := func(pass *analysis.Pass, v *types.Var, stmts []ast.Node) bool { + found := false + for _, stmt := range stmts { + ast.Inspect(stmt, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.Ident: + if pass.TypesInfo.Uses[n] == v { + found = true + } + case *ast.ReturnStmt: + // A naked return statement counts as a use + // of the named result variables. + if n.Results == nil && vIsNamedResult { + found = true + } + } + return !found + }) + } + return found + } + + // blockUses computes "uses" for each block, caching the result. + memo := make(map[*cfg.Block]bool) + blockUses := func(pass *analysis.Pass, v *types.Var, b *cfg.Block) bool { + res, ok := memo[b] + if !ok { + res = uses(pass, v, b.Nodes) + memo[b] = res + } + return res + } + + // Find the var's defining block in the CFG, + // plus the rest of the statements of that block. + var defblock *cfg.Block + var rest []ast.Node +outer: + for _, b := range g.Blocks { + for i, n := range b.Nodes { + if n == stmt { + defblock = b + rest = b.Nodes[i+1:] + break outer + } + } + } + if defblock == nil { + panic("internal error: can't find defining block for cancel var") + } + + // Is v "used" in the remainder of its defining block? + if uses(pass, v, rest) { + return nil + } + + // Does the defining block return without using v? + if ret := defblock.Return(); ret != nil { + return ret + } + + // Search the CFG depth-first for a path, from defblock to a + // return block, in which v is never "used". + seen := make(map[*cfg.Block]bool) + var search func(blocks []*cfg.Block) *ast.ReturnStmt + search = func(blocks []*cfg.Block) *ast.ReturnStmt { + for _, b := range blocks { + if seen[b] { + continue + } + seen[b] = true + + // Prune the search if the block uses v. + if blockUses(pass, v, b) { + continue + } + + // Found path to return statement? + if ret := b.Return(); ret != nil { + if debug { + fmt.Printf("found path to return in block %s\n", b) + } + return ret // found + } + + // Recur + if ret := search(b.Succs); ret != nil { + if debug { + fmt.Printf(" from block %s\n", b) + } + return ret + } + } + return nil + } + return search(defblock.Succs) +} + +func tupleContains(tuple *types.Tuple, v *types.Var) bool { + for i := 0; i < tuple.Len(); i++ { + if tuple.At(i) == v { + return true + } + } + return false +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go b/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go new file mode 100644 index 000000000..cd42c9897 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/nilfunc/nilfunc.go @@ -0,0 +1,74 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package nilfunc defines an Analyzer that checks for useless +// comparisons against nil. +package nilfunc + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for useless comparisons between functions and nil + +A useless comparison is one like f == nil as opposed to f() == nil.` + +var Analyzer = &analysis.Analyzer{ + Name: "nilfunc", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.BinaryExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + e := n.(*ast.BinaryExpr) + + // Only want == or != comparisons. + if e.Op != token.EQL && e.Op != token.NEQ { + return + } + + // Only want comparisons with a nil identifier on one side. + var e2 ast.Expr + switch { + case pass.TypesInfo.Types[e.X].IsNil(): + e2 = e.Y + case pass.TypesInfo.Types[e.Y].IsNil(): + e2 = e.X + default: + return + } + + // Only want identifiers or selector expressions. + var obj types.Object + switch v := e2.(type) { + case *ast.Ident: + obj = pass.TypesInfo.Uses[v] + case *ast.SelectorExpr: + obj = pass.TypesInfo.Uses[v.Sel] + default: + return + } + + // Only want functions. + if _, ok := obj.(*types.Func); !ok { + return + } + + pass.ReportRangef(e, "comparison of function %v %v nil is always %v", obj.Name(), e.Op, e.Op == token.NEQ) + }) + return nil, nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go b/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go new file mode 100644 index 000000000..f0d2c7edf --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go @@ -0,0 +1,354 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package nilness inspects the control-flow graph of an SSA function +// and reports errors such as nil pointer dereferences and degenerate +// nil pointer comparisons. +package nilness + +import ( + "fmt" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" +) + +const Doc = `check for redundant or impossible nil comparisons + +The nilness checker inspects the control-flow graph of each function in +a package and reports nil pointer dereferences, degenerate nil +pointers, and panics with nil values. A degenerate comparison is of the form +x==nil or x!=nil where x is statically known to be nil or non-nil. These are +often a mistake, especially in control flow related to errors. Panics with nil +values are checked because they are not detectable by + + if r := recover(); r != nil { + +This check reports conditions such as: + + if f == nil { // impossible condition (f is a function) + } + +and: + + p := &v + ... + if p != nil { // tautological condition + } + +and: + + if p == nil { + print(*p) // nil dereference + } + +and: + + if p == nil { + panic(p) + } +` + +var Analyzer = &analysis.Analyzer{ + Name: "nilness", + Doc: Doc, + Run: run, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, +} + +func run(pass *analysis.Pass) (interface{}, error) { + ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + for _, fn := range ssainput.SrcFuncs { + runFunc(pass, fn) + } + return nil, nil +} + +func runFunc(pass *analysis.Pass, fn *ssa.Function) { + reportf := func(category string, pos token.Pos, format string, args ...interface{}) { + pass.Report(analysis.Diagnostic{ + Pos: pos, + Category: category, + Message: fmt.Sprintf(format, args...), + }) + } + + // notNil reports an error if v is provably nil. + notNil := func(stack []fact, instr ssa.Instruction, v ssa.Value, descr string) { + if nilnessOf(stack, v) == isnil { + reportf("nilderef", instr.Pos(), "nil dereference in "+descr) + } + } + + // visit visits reachable blocks of the CFG in dominance order, + // maintaining a stack of dominating nilness facts. + // + // By traversing the dom tree, we can pop facts off the stack as + // soon as we've visited a subtree. Had we traversed the CFG, + // we would need to retain the set of facts for each block. + seen := make([]bool, len(fn.Blocks)) // seen[i] means visit should ignore block i + var visit func(b *ssa.BasicBlock, stack []fact) + visit = func(b *ssa.BasicBlock, stack []fact) { + if seen[b.Index] { + return + } + seen[b.Index] = true + + // Report nil dereferences. + for _, instr := range b.Instrs { + switch instr := instr.(type) { + case ssa.CallInstruction: + notNil(stack, instr, instr.Common().Value, + instr.Common().Description()) + case *ssa.FieldAddr: + notNil(stack, instr, instr.X, "field selection") + case *ssa.IndexAddr: + notNil(stack, instr, instr.X, "index operation") + case *ssa.MapUpdate: + notNil(stack, instr, instr.Map, "map update") + case *ssa.Slice: + // A nilcheck occurs in ptr[:] iff ptr is a pointer to an array. + if _, ok := instr.X.Type().Underlying().(*types.Pointer); ok { + notNil(stack, instr, instr.X, "slice operation") + } + case *ssa.Store: + notNil(stack, instr, instr.Addr, "store") + case *ssa.TypeAssert: + if !instr.CommaOk { + notNil(stack, instr, instr.X, "type assertion") + } + case *ssa.UnOp: + if instr.Op == token.MUL { // *X + notNil(stack, instr, instr.X, "load") + } + } + } + + // Look for panics with nil value + for _, instr := range b.Instrs { + switch instr := instr.(type) { + case *ssa.Panic: + if nilnessOf(stack, instr.X) == isnil { + reportf("nilpanic", instr.Pos(), "panic with nil value") + } + } + } + + // For nil comparison blocks, report an error if the condition + // is degenerate, and push a nilness fact on the stack when + // visiting its true and false successor blocks. + if binop, tsucc, fsucc := eq(b); binop != nil { + xnil := nilnessOf(stack, binop.X) + ynil := nilnessOf(stack, binop.Y) + + if ynil != unknown && xnil != unknown && (xnil == isnil || ynil == isnil) { + // Degenerate condition: + // the nilness of both operands is known, + // and at least one of them is nil. + var adj string + if (xnil == ynil) == (binop.Op == token.EQL) { + adj = "tautological" + } else { + adj = "impossible" + } + reportf("cond", binop.Pos(), "%s condition: %s %s %s", adj, xnil, binop.Op, ynil) + + // If tsucc's or fsucc's sole incoming edge is impossible, + // it is unreachable. Prune traversal of it and + // all the blocks it dominates. + // (We could be more precise with full dataflow + // analysis of control-flow joins.) + var skip *ssa.BasicBlock + if xnil == ynil { + skip = fsucc + } else { + skip = tsucc + } + for _, d := range b.Dominees() { + if d == skip && len(d.Preds) == 1 { + continue + } + visit(d, stack) + } + return + } + + // "if x == nil" or "if nil == y" condition; x, y are unknown. + if xnil == isnil || ynil == isnil { + var newFacts facts + if xnil == isnil { + // x is nil, y is unknown: + // t successor learns y is nil. + newFacts = expandFacts(fact{binop.Y, isnil}) + } else { + // x is nil, y is unknown: + // t successor learns x is nil. + newFacts = expandFacts(fact{binop.X, isnil}) + } + + for _, d := range b.Dominees() { + // Successor blocks learn a fact + // only at non-critical edges. + // (We could do be more precise with full dataflow + // analysis of control-flow joins.) + s := stack + if len(d.Preds) == 1 { + if d == tsucc { + s = append(s, newFacts...) + } else if d == fsucc { + s = append(s, newFacts.negate()...) + } + } + visit(d, s) + } + return + } + } + + for _, d := range b.Dominees() { + visit(d, stack) + } + } + + // Visit the entry block. No need to visit fn.Recover. + if fn.Blocks != nil { + visit(fn.Blocks[0], make([]fact, 0, 20)) // 20 is plenty + } +} + +// A fact records that a block is dominated +// by the condition v == nil or v != nil. +type fact struct { + value ssa.Value + nilness nilness +} + +func (f fact) negate() fact { return fact{f.value, -f.nilness} } + +type nilness int + +const ( + isnonnil = -1 + unknown nilness = 0 + isnil = 1 +) + +var nilnessStrings = []string{"non-nil", "unknown", "nil"} + +func (n nilness) String() string { return nilnessStrings[n+1] } + +// nilnessOf reports whether v is definitely nil, definitely not nil, +// or unknown given the dominating stack of facts. +func nilnessOf(stack []fact, v ssa.Value) nilness { + switch v := v.(type) { + // unwrap ChangeInterface values recursively, to detect if underlying + // values have any facts recorded or are otherwise known with regard to nilness. + // + // This work must be in addition to expanding facts about + // ChangeInterfaces during inference/fact gathering because this covers + // cases where the nilness of a value is intrinsic, rather than based + // on inferred facts, such as a zero value interface variable. That + // said, this work alone would only inform us when facts are about + // underlying values, rather than outer values, when the analysis is + // transitive in both directions. + case *ssa.ChangeInterface: + if underlying := nilnessOf(stack, v.X); underlying != unknown { + return underlying + } + } + + // Is value intrinsically nil or non-nil? + switch v := v.(type) { + case *ssa.Alloc, + *ssa.FieldAddr, + *ssa.FreeVar, + *ssa.Function, + *ssa.Global, + *ssa.IndexAddr, + *ssa.MakeChan, + *ssa.MakeClosure, + *ssa.MakeInterface, + *ssa.MakeMap, + *ssa.MakeSlice: + return isnonnil + case *ssa.Const: + if v.IsNil() { + return isnil + } else { + return isnonnil + } + } + + // Search dominating control-flow facts. + for _, f := range stack { + if f.value == v { + return f.nilness + } + } + return unknown +} + +// If b ends with an equality comparison, eq returns the operation and +// its true (equal) and false (not equal) successors. +func eq(b *ssa.BasicBlock) (op *ssa.BinOp, tsucc, fsucc *ssa.BasicBlock) { + if If, ok := b.Instrs[len(b.Instrs)-1].(*ssa.If); ok { + if binop, ok := If.Cond.(*ssa.BinOp); ok { + switch binop.Op { + case token.EQL: + return binop, b.Succs[0], b.Succs[1] + case token.NEQ: + return binop, b.Succs[1], b.Succs[0] + } + } + } + return nil, nil, nil +} + +// expandFacts takes a single fact and returns the set of facts that can be +// known about it or any of its related values. Some operations, like +// ChangeInterface, have transitive nilness, such that if you know the +// underlying value is nil, you also know the value itself is nil, and vice +// versa. This operation allows callers to match on any of the related values +// in analyses, rather than just the one form of the value that happend to +// appear in a comparison. +// +// This work must be in addition to unwrapping values within nilnessOf because +// while this work helps give facts about transitively known values based on +// inferred facts, the recursive check within nilnessOf covers cases where +// nilness facts are intrinsic to the underlying value, such as a zero value +// interface variables. +// +// ChangeInterface is the only expansion currently supported, but others, like +// Slice, could be added. At this time, this tool does not check slice +// operations in a way this expansion could help. See +// https://play.golang.org/p/mGqXEp7w4fR for an example. +func expandFacts(f fact) []fact { + ff := []fact{f} + +Loop: + for { + switch v := f.value.(type) { + case *ssa.ChangeInterface: + f = fact{v.X, f.nilness} + ff = append(ff, f) + default: + break Loop + } + } + + return ff +} + +type facts []fact + +func (ff facts) negate() facts { + nn := make([]fact, len(ff)) + for i, f := range ff { + nn[i] = f.negate() + } + return nn +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go b/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go new file mode 100644 index 000000000..2262fc4f1 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go @@ -0,0 +1,127 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The pkgfact package is a demonstration and test of the package fact +// mechanism. +// +// The output of the pkgfact analysis is a set of key/values pairs +// gathered from the analyzed package and its imported dependencies. +// Each key/value pair comes from a top-level constant declaration +// whose name starts and ends with "_". For example: +// +// package p +// +// const _greeting_ = "hello" +// const _audience_ = "world" +// +// the pkgfact analysis output for package p would be: +// +// {"greeting": "hello", "audience": "world"}. +// +// In addition, the analysis reports a diagnostic at each import +// showing which key/value pairs it contributes. +package pkgfact + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" + "sort" + "strings" + + "golang.org/x/tools/go/analysis" +) + +var Analyzer = &analysis.Analyzer{ + Name: "pkgfact", + Doc: "gather name/value pairs from constant declarations", + Run: run, + FactTypes: []analysis.Fact{new(pairsFact)}, + ResultType: reflect.TypeOf(map[string]string{}), +} + +// A pairsFact is a package-level fact that records +// an set of key=value strings accumulated from constant +// declarations in this package and its dependencies. +// Elements are ordered by keys, which are unique. +type pairsFact []string + +func (f *pairsFact) AFact() {} +func (f *pairsFact) String() string { return "pairs(" + strings.Join(*f, ", ") + ")" } + +func run(pass *analysis.Pass) (interface{}, error) { + result := make(map[string]string) + + // At each import, print the fact from the imported + // package and accumulate its information into the result. + // (Warning: accumulation leads to quadratic growth of work.) + doImport := func(spec *ast.ImportSpec) { + pkg := imported(pass.TypesInfo, spec) + var fact pairsFact + if pass.ImportPackageFact(pkg, &fact) { + for _, pair := range fact { + eq := strings.IndexByte(pair, '=') + result[pair[:eq]] = pair[1+eq:] + } + pass.ReportRangef(spec, "%s", strings.Join(fact, " ")) + } + } + + // At each "const _name_ = value", add a fact into env. + doConst := func(spec *ast.ValueSpec) { + if len(spec.Names) == len(spec.Values) { + for i := range spec.Names { + name := spec.Names[i].Name + if strings.HasPrefix(name, "_") && strings.HasSuffix(name, "_") { + + if key := strings.Trim(name, "_"); key != "" { + value := pass.TypesInfo.Types[spec.Values[i]].Value.String() + result[key] = value + } + } + } + } + } + + for _, f := range pass.Files { + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.GenDecl); ok { + for _, spec := range decl.Specs { + switch decl.Tok { + case token.IMPORT: + doImport(spec.(*ast.ImportSpec)) + case token.CONST: + doConst(spec.(*ast.ValueSpec)) + } + } + } + } + } + + // Sort/deduplicate the result and save it as a package fact. + keys := make([]string, 0, len(result)) + for key := range result { + keys = append(keys, key) + } + sort.Strings(keys) + var fact pairsFact + for _, key := range keys { + fact = append(fact, fmt.Sprintf("%s=%s", key, result[key])) + } + if len(fact) > 0 { + pass.ExportPackageFact(&fact) + } + + return result, nil +} + +func imported(info *types.Info, spec *ast.ImportSpec) *types.Package { + obj, ok := info.Implicits[spec] + if !ok { + obj = info.Defs[spec.Name] // renaming import + } + return obj.(*types.PkgName).Imported() +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go new file mode 100644 index 000000000..6589478af --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go @@ -0,0 +1,1122 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package printf defines an Analyzer that checks consistency +// of Printf format strings and arguments. +package printf + +import ( + "bytes" + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "reflect" + "regexp" + "sort" + "strconv" + "strings" + "unicode/utf8" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +func init() { + Analyzer.Flags.Var(isPrint, "funcs", "comma-separated list of print function names to check") +} + +var Analyzer = &analysis.Analyzer{ + Name: "printf", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, + ResultType: reflect.TypeOf((*Result)(nil)), + FactTypes: []analysis.Fact{new(isWrapper)}, +} + +const Doc = `check consistency of Printf format strings and arguments + +The check applies to known functions (for example, those in package fmt) +as well as any detected wrappers of known functions. + +A function that wants to avail itself of printf checking but is not +found by this analyzer's heuristics (for example, due to use of +dynamic calls) can insert a bogus call: + + if false { + _ = fmt.Sprintf(format, args...) // enable printf checking + } + +The -funcs flag specifies a comma-separated list of names of additional +known formatting functions or methods. If the name contains a period, +it must denote a specific function using one of the following forms: + + dir/pkg.Function + dir/pkg.Type.Method + (*dir/pkg.Type).Method + +Otherwise the name is interpreted as a case-insensitive unqualified +identifier such as "errorf". Either way, if a listed name ends in f, the +function is assumed to be Printf-like, taking a format string before the +argument list. Otherwise it is assumed to be Print-like, taking a list +of arguments with no format string. +` + +// Kind is a kind of fmt function behavior. +type Kind int + +const ( + KindNone Kind = iota // not a fmt wrapper function + KindPrint // function behaves like fmt.Print + KindPrintf // function behaves like fmt.Printf + KindErrorf // function behaves like fmt.Errorf +) + +func (kind Kind) String() string { + switch kind { + case KindPrint: + return "print" + case KindPrintf: + return "printf" + case KindErrorf: + return "errorf" + } + return "" +} + +// Result is the printf analyzer's result type. Clients may query the result +// to learn whether a function behaves like fmt.Print or fmt.Printf. +type Result struct { + funcs map[*types.Func]Kind +} + +// Kind reports whether fn behaves like fmt.Print or fmt.Printf. +func (r *Result) Kind(fn *types.Func) Kind { + _, ok := isPrint[fn.FullName()] + if !ok { + // Next look up just "printf", for use with -printf.funcs. + _, ok = isPrint[strings.ToLower(fn.Name())] + } + if ok { + if strings.HasSuffix(fn.Name(), "f") { + return KindPrintf + } else { + return KindPrint + } + } + + return r.funcs[fn] +} + +// isWrapper is a fact indicating that a function is a print or printf wrapper. +type isWrapper struct{ Kind Kind } + +func (f *isWrapper) AFact() {} + +func (f *isWrapper) String() string { + switch f.Kind { + case KindPrintf: + return "printfWrapper" + case KindPrint: + return "printWrapper" + case KindErrorf: + return "errorfWrapper" + default: + return "unknownWrapper" + } +} + +func run(pass *analysis.Pass) (interface{}, error) { + res := &Result{ + funcs: make(map[*types.Func]Kind), + } + findPrintfLike(pass, res) + checkCall(pass) + return res, nil +} + +type printfWrapper struct { + obj *types.Func + fdecl *ast.FuncDecl + format *types.Var + args *types.Var + callers []printfCaller + failed bool // if true, not a printf wrapper +} + +type printfCaller struct { + w *printfWrapper + call *ast.CallExpr +} + +// maybePrintfWrapper decides whether decl (a declared function) may be a wrapper +// around a fmt.Printf or fmt.Print function. If so it returns a printfWrapper +// function describing the declaration. Later processing will analyze the +// graph of potential printf wrappers to pick out the ones that are true wrappers. +// A function may be a Printf or Print wrapper if its last argument is ...interface{}. +// If the next-to-last argument is a string, then this may be a Printf wrapper. +// Otherwise it may be a Print wrapper. +func maybePrintfWrapper(info *types.Info, decl ast.Decl) *printfWrapper { + // Look for functions with final argument type ...interface{}. + fdecl, ok := decl.(*ast.FuncDecl) + if !ok || fdecl.Body == nil { + return nil + } + fn, ok := info.Defs[fdecl.Name].(*types.Func) + // Type information may be incomplete. + if !ok { + return nil + } + + sig := fn.Type().(*types.Signature) + if !sig.Variadic() { + return nil // not variadic + } + + params := sig.Params() + nparams := params.Len() // variadic => nonzero + + args := params.At(nparams - 1) + iface, ok := args.Type().(*types.Slice).Elem().(*types.Interface) + if !ok || !iface.Empty() { + return nil // final (args) param is not ...interface{} + } + + // Is second last param 'format string'? + var format *types.Var + if nparams >= 2 { + if p := params.At(nparams - 2); p.Type() == types.Typ[types.String] { + format = p + } + } + + return &printfWrapper{ + obj: fn, + fdecl: fdecl, + format: format, + args: args, + } +} + +// findPrintfLike scans the entire package to find printf-like functions. +func findPrintfLike(pass *analysis.Pass, res *Result) (interface{}, error) { + // Gather potential wrappers and call graph between them. + byObj := make(map[*types.Func]*printfWrapper) + var wrappers []*printfWrapper + for _, file := range pass.Files { + for _, decl := range file.Decls { + w := maybePrintfWrapper(pass.TypesInfo, decl) + if w == nil { + continue + } + byObj[w.obj] = w + wrappers = append(wrappers, w) + } + } + + // Walk the graph to figure out which are really printf wrappers. + for _, w := range wrappers { + // Scan function for calls that could be to other printf-like functions. + ast.Inspect(w.fdecl.Body, func(n ast.Node) bool { + if w.failed { + return false + } + + // TODO: Relax these checks; issue 26555. + if assign, ok := n.(*ast.AssignStmt); ok { + for _, lhs := range assign.Lhs { + if match(pass.TypesInfo, lhs, w.format) || + match(pass.TypesInfo, lhs, w.args) { + // Modifies the format + // string or args in + // some way, so not a + // simple wrapper. + w.failed = true + return false + } + } + } + if un, ok := n.(*ast.UnaryExpr); ok && un.Op == token.AND { + if match(pass.TypesInfo, un.X, w.format) || + match(pass.TypesInfo, un.X, w.args) { + // Taking the address of the + // format string or args, + // so not a simple wrapper. + w.failed = true + return false + } + } + + call, ok := n.(*ast.CallExpr) + if !ok || len(call.Args) == 0 || !match(pass.TypesInfo, call.Args[len(call.Args)-1], w.args) { + return true + } + + fn, kind := printfNameAndKind(pass, call) + if kind != 0 { + checkPrintfFwd(pass, w, call, kind, res) + return true + } + + // If the call is to another function in this package, + // maybe we will find out it is printf-like later. + // Remember this call for later checking. + if fn != nil && fn.Pkg() == pass.Pkg && byObj[fn] != nil { + callee := byObj[fn] + callee.callers = append(callee.callers, printfCaller{w, call}) + } + + return true + }) + } + return nil, nil +} + +func match(info *types.Info, arg ast.Expr, param *types.Var) bool { + id, ok := arg.(*ast.Ident) + return ok && info.ObjectOf(id) == param +} + +// checkPrintfFwd checks that a printf-forwarding wrapper is forwarding correctly. +// It diagnoses writing fmt.Printf(format, args) instead of fmt.Printf(format, args...). +func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, kind Kind, res *Result) { + matched := kind == KindPrint || + kind != KindNone && len(call.Args) >= 2 && match(pass.TypesInfo, call.Args[len(call.Args)-2], w.format) + if !matched { + return + } + + if !call.Ellipsis.IsValid() { + typ, ok := pass.TypesInfo.Types[call.Fun].Type.(*types.Signature) + if !ok { + return + } + if len(call.Args) > typ.Params().Len() { + // If we're passing more arguments than what the + // print/printf function can take, adding an ellipsis + // would break the program. For example: + // + // func foo(arg1 string, arg2 ...interface{} { + // fmt.Printf("%s %v", arg1, arg2) + // } + return + } + desc := "printf" + if kind == KindPrint { + desc = "print" + } + pass.ReportRangef(call, "missing ... in args forwarded to %s-like function", desc) + return + } + fn := w.obj + var fact isWrapper + if !pass.ImportObjectFact(fn, &fact) { + fact.Kind = kind + pass.ExportObjectFact(fn, &fact) + res.funcs[fn] = kind + for _, caller := range w.callers { + checkPrintfFwd(pass, caller.w, caller.call, kind, res) + } + } +} + +// isPrint records the print functions. +// If a key ends in 'f' then it is assumed to be a formatted print. +// +// Keys are either values returned by (*types.Func).FullName, +// or case-insensitive identifiers such as "errorf". +// +// The -funcs flag adds to this set. +// +// The set below includes facts for many important standard library +// functions, even though the analysis is capable of deducing that, for +// example, fmt.Printf forwards to fmt.Fprintf. We avoid relying on the +// driver applying analyzers to standard packages because "go vet" does +// not do so with gccgo, and nor do some other build systems. +// TODO(adonovan): eliminate the redundant facts once this restriction +// is lifted. +// +var isPrint = stringSet{ + "fmt.Errorf": true, + "fmt.Fprint": true, + "fmt.Fprintf": true, + "fmt.Fprintln": true, + "fmt.Print": true, + "fmt.Printf": true, + "fmt.Println": true, + "fmt.Sprint": true, + "fmt.Sprintf": true, + "fmt.Sprintln": true, + + "runtime/trace.Logf": true, + + "log.Print": true, + "log.Printf": true, + "log.Println": true, + "log.Fatal": true, + "log.Fatalf": true, + "log.Fatalln": true, + "log.Panic": true, + "log.Panicf": true, + "log.Panicln": true, + "(*log.Logger).Fatal": true, + "(*log.Logger).Fatalf": true, + "(*log.Logger).Fatalln": true, + "(*log.Logger).Panic": true, + "(*log.Logger).Panicf": true, + "(*log.Logger).Panicln": true, + "(*log.Logger).Print": true, + "(*log.Logger).Printf": true, + "(*log.Logger).Println": true, + + "(*testing.common).Error": true, + "(*testing.common).Errorf": true, + "(*testing.common).Fatal": true, + "(*testing.common).Fatalf": true, + "(*testing.common).Log": true, + "(*testing.common).Logf": true, + "(*testing.common).Skip": true, + "(*testing.common).Skipf": true, + // *testing.T and B are detected by induction, but testing.TB is + // an interface and the inference can't follow dynamic calls. + "(testing.TB).Error": true, + "(testing.TB).Errorf": true, + "(testing.TB).Fatal": true, + "(testing.TB).Fatalf": true, + "(testing.TB).Log": true, + "(testing.TB).Logf": true, + "(testing.TB).Skip": true, + "(testing.TB).Skipf": true, +} + +// formatString returns the format string argument and its index within +// the given printf-like call expression. +// +// The last parameter before variadic arguments is assumed to be +// a format string. +// +// The first string literal or string constant is assumed to be a format string +// if the call's signature cannot be determined. +// +// If it cannot find any format string parameter, it returns ("", -1). +func formatString(pass *analysis.Pass, call *ast.CallExpr) (format string, idx int) { + typ := pass.TypesInfo.Types[call.Fun].Type + if typ != nil { + if sig, ok := typ.(*types.Signature); ok { + if !sig.Variadic() { + // Skip checking non-variadic functions. + return "", -1 + } + idx := sig.Params().Len() - 2 + if idx < 0 { + // Skip checking variadic functions without + // fixed arguments. + return "", -1 + } + s, ok := stringConstantArg(pass, call, idx) + if !ok { + // The last argument before variadic args isn't a string. + return "", -1 + } + return s, idx + } + } + + // Cannot determine call's signature. Fall back to scanning for the first + // string constant in the call. + for idx := range call.Args { + if s, ok := stringConstantArg(pass, call, idx); ok { + return s, idx + } + if pass.TypesInfo.Types[call.Args[idx]].Type == types.Typ[types.String] { + // Skip checking a call with a non-constant format + // string argument, since its contents are unavailable + // for validation. + return "", -1 + } + } + return "", -1 +} + +// stringConstantArg returns call's string constant argument at the index idx. +// +// ("", false) is returned if call's argument at the index idx isn't a string +// constant. +func stringConstantArg(pass *analysis.Pass, call *ast.CallExpr, idx int) (string, bool) { + if idx >= len(call.Args) { + return "", false + } + arg := call.Args[idx] + lit := pass.TypesInfo.Types[arg].Value + if lit != nil && lit.Kind() == constant.String { + return constant.StringVal(lit), true + } + return "", false +} + +// checkCall triggers the print-specific checks if the call invokes a print function. +func checkCall(pass *analysis.Pass) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + call := n.(*ast.CallExpr) + fn, kind := printfNameAndKind(pass, call) + switch kind { + case KindPrintf, KindErrorf: + checkPrintf(pass, kind, call, fn) + case KindPrint: + checkPrint(pass, call, fn) + } + }) +} + +func printfNameAndKind(pass *analysis.Pass, call *ast.CallExpr) (fn *types.Func, kind Kind) { + fn, _ = typeutil.Callee(pass.TypesInfo, call).(*types.Func) + if fn == nil { + return nil, 0 + } + + _, ok := isPrint[fn.FullName()] + if !ok { + // Next look up just "printf", for use with -printf.funcs. + _, ok = isPrint[strings.ToLower(fn.Name())] + } + if ok { + if fn.Name() == "Errorf" { + kind = KindErrorf + } else if strings.HasSuffix(fn.Name(), "f") { + kind = KindPrintf + } else { + kind = KindPrint + } + return fn, kind + } + + var fact isWrapper + if pass.ImportObjectFact(fn, &fact) { + return fn, fact.Kind + } + + return fn, KindNone +} + +// isFormatter reports whether t could satisfy fmt.Formatter. +// The only interface method to look for is "Format(State, rune)". +func isFormatter(typ types.Type) bool { + // If the type is an interface, the value it holds might satisfy fmt.Formatter. + if _, ok := typ.Underlying().(*types.Interface); ok { + return true + } + obj, _, _ := types.LookupFieldOrMethod(typ, false, nil, "Format") + fn, ok := obj.(*types.Func) + if !ok { + return false + } + sig := fn.Type().(*types.Signature) + return sig.Params().Len() == 2 && + sig.Results().Len() == 0 && + isNamed(sig.Params().At(0).Type(), "fmt", "State") && + types.Identical(sig.Params().At(1).Type(), types.Typ[types.Rune]) +} + +func isNamed(T types.Type, pkgpath, name string) bool { + named, ok := T.(*types.Named) + return ok && named.Obj().Pkg().Path() == pkgpath && named.Obj().Name() == name +} + +// formatState holds the parsed representation of a printf directive such as "%3.*[4]d". +// It is constructed by parsePrintfVerb. +type formatState struct { + verb rune // the format verb: 'd' for "%d" + format string // the full format directive from % through verb, "%.3d". + name string // Printf, Sprintf etc. + flags []byte // the list of # + etc. + argNums []int // the successive argument numbers that are consumed, adjusted to refer to actual arg in call + firstArg int // Index of first argument after the format in the Printf call. + // Used only during parse. + pass *analysis.Pass + call *ast.CallExpr + argNum int // Which argument we're expecting to format now. + hasIndex bool // Whether the argument is indexed. + indexPending bool // Whether we have an indexed argument that has not resolved. + nbytes int // number of bytes of the format string consumed. +} + +// checkPrintf checks a call to a formatted print routine such as Printf. +func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.Func) { + format, idx := formatString(pass, call) + if idx < 0 { + if false { + pass.Reportf(call.Lparen, "can't check non-constant format in call to %s", fn.FullName()) + } + return + } + + firstArg := idx + 1 // Arguments are immediately after format string. + if !strings.Contains(format, "%") { + if len(call.Args) > firstArg { + pass.Reportf(call.Lparen, "%s call has arguments but no formatting directives", fn.FullName()) + } + return + } + // Hard part: check formats against args. + argNum := firstArg + maxArgNum := firstArg + anyIndex := false + anyW := false + for i, w := 0, 0; i < len(format); i += w { + w = 1 + if format[i] != '%' { + continue + } + state := parsePrintfVerb(pass, call, fn.FullName(), format[i:], firstArg, argNum) + if state == nil { + return + } + w = len(state.format) + if !okPrintfArg(pass, call, state) { // One error per format is enough. + return + } + if state.hasIndex { + anyIndex = true + } + if state.verb == 'w' { + switch kind { + case KindNone, KindPrint: + pass.Reportf(call.Pos(), "%s does not support error-wrapping directive %%w", state.name) + return + case KindPrintf: + pass.Reportf(call.Pos(), "%s call has error-wrapping directive %%w, which is only supported for functions backed by fmt.Errorf", state.name) + return + } + if anyW { + pass.Reportf(call.Pos(), "%s call has more than one error-wrapping directive %%w", state.name) + return + } + anyW = true + } + if len(state.argNums) > 0 { + // Continue with the next sequential argument. + argNum = state.argNums[len(state.argNums)-1] + 1 + } + for _, n := range state.argNums { + if n >= maxArgNum { + maxArgNum = n + 1 + } + } + } + // Dotdotdot is hard. + if call.Ellipsis.IsValid() && maxArgNum >= len(call.Args)-1 { + return + } + // If any formats are indexed, extra arguments are ignored. + if anyIndex { + return + } + // There should be no leftover arguments. + if maxArgNum != len(call.Args) { + expect := maxArgNum - firstArg + numArgs := len(call.Args) - firstArg + pass.ReportRangef(call, "%s call needs %v but has %v", fn.FullName(), count(expect, "arg"), count(numArgs, "arg")) + } +} + +// parseFlags accepts any printf flags. +func (s *formatState) parseFlags() { + for s.nbytes < len(s.format) { + switch c := s.format[s.nbytes]; c { + case '#', '0', '+', '-', ' ': + s.flags = append(s.flags, c) + s.nbytes++ + default: + return + } + } +} + +// scanNum advances through a decimal number if present. +func (s *formatState) scanNum() { + for ; s.nbytes < len(s.format); s.nbytes++ { + c := s.format[s.nbytes] + if c < '0' || '9' < c { + return + } + } +} + +// parseIndex scans an index expression. It returns false if there is a syntax error. +func (s *formatState) parseIndex() bool { + if s.nbytes == len(s.format) || s.format[s.nbytes] != '[' { + return true + } + // Argument index present. + s.nbytes++ // skip '[' + start := s.nbytes + s.scanNum() + ok := true + if s.nbytes == len(s.format) || s.nbytes == start || s.format[s.nbytes] != ']' { + ok = false + s.nbytes = strings.Index(s.format, "]") + if s.nbytes < 0 { + s.pass.ReportRangef(s.call, "%s format %s is missing closing ]", s.name, s.format) + return false + } + } + arg32, err := strconv.ParseInt(s.format[start:s.nbytes], 10, 32) + if err != nil || !ok || arg32 <= 0 || arg32 > int64(len(s.call.Args)-s.firstArg) { + s.pass.ReportRangef(s.call, "%s format has invalid argument index [%s]", s.name, s.format[start:s.nbytes]) + return false + } + s.nbytes++ // skip ']' + arg := int(arg32) + arg += s.firstArg - 1 // We want to zero-index the actual arguments. + s.argNum = arg + s.hasIndex = true + s.indexPending = true + return true +} + +// parseNum scans a width or precision (or *). It returns false if there's a bad index expression. +func (s *formatState) parseNum() bool { + if s.nbytes < len(s.format) && s.format[s.nbytes] == '*' { + if s.indexPending { // Absorb it. + s.indexPending = false + } + s.nbytes++ + s.argNums = append(s.argNums, s.argNum) + s.argNum++ + } else { + s.scanNum() + } + return true +} + +// parsePrecision scans for a precision. It returns false if there's a bad index expression. +func (s *formatState) parsePrecision() bool { + // If there's a period, there may be a precision. + if s.nbytes < len(s.format) && s.format[s.nbytes] == '.' { + s.flags = append(s.flags, '.') // Treat precision as a flag. + s.nbytes++ + if !s.parseIndex() { + return false + } + if !s.parseNum() { + return false + } + } + return true +} + +// parsePrintfVerb looks the formatting directive that begins the format string +// and returns a formatState that encodes what the directive wants, without looking +// at the actual arguments present in the call. The result is nil if there is an error. +func parsePrintfVerb(pass *analysis.Pass, call *ast.CallExpr, name, format string, firstArg, argNum int) *formatState { + state := &formatState{ + format: format, + name: name, + flags: make([]byte, 0, 5), + argNum: argNum, + argNums: make([]int, 0, 1), + nbytes: 1, // There's guaranteed to be a percent sign. + firstArg: firstArg, + pass: pass, + call: call, + } + // There may be flags. + state.parseFlags() + // There may be an index. + if !state.parseIndex() { + return nil + } + // There may be a width. + if !state.parseNum() { + return nil + } + // There may be a precision. + if !state.parsePrecision() { + return nil + } + // Now a verb, possibly prefixed by an index (which we may already have). + if !state.indexPending && !state.parseIndex() { + return nil + } + if state.nbytes == len(state.format) { + pass.ReportRangef(call.Fun, "%s format %s is missing verb at end of string", name, state.format) + return nil + } + verb, w := utf8.DecodeRuneInString(state.format[state.nbytes:]) + state.verb = verb + state.nbytes += w + if verb != '%' { + state.argNums = append(state.argNums, state.argNum) + } + state.format = state.format[:state.nbytes] + return state +} + +// printfArgType encodes the types of expressions a printf verb accepts. It is a bitmask. +type printfArgType int + +const ( + argBool printfArgType = 1 << iota + argInt + argRune + argString + argFloat + argComplex + argPointer + argError + anyType printfArgType = ^0 +) + +type printVerb struct { + verb rune // User may provide verb through Formatter; could be a rune. + flags string // known flags are all ASCII + typ printfArgType +} + +// Common flag sets for printf verbs. +const ( + noFlag = "" + numFlag = " -+.0" + sharpNumFlag = " -+.0#" + allFlags = " -+.0#" +) + +// printVerbs identifies which flags are known to printf for each verb. +var printVerbs = []printVerb{ + // '-' is a width modifier, always valid. + // '.' is a precision for float, max width for strings. + // '+' is required sign for numbers, Go format for %v. + // '#' is alternate format for several verbs. + // ' ' is spacer for numbers + {'%', noFlag, 0}, + {'b', sharpNumFlag, argInt | argFloat | argComplex | argPointer}, + {'c', "-", argRune | argInt}, + {'d', numFlag, argInt | argPointer}, + {'e', sharpNumFlag, argFloat | argComplex}, + {'E', sharpNumFlag, argFloat | argComplex}, + {'f', sharpNumFlag, argFloat | argComplex}, + {'F', sharpNumFlag, argFloat | argComplex}, + {'g', sharpNumFlag, argFloat | argComplex}, + {'G', sharpNumFlag, argFloat | argComplex}, + {'o', sharpNumFlag, argInt | argPointer}, + {'O', sharpNumFlag, argInt | argPointer}, + {'p', "-#", argPointer}, + {'q', " -+.0#", argRune | argInt | argString}, + {'s', " -+.0", argString}, + {'t', "-", argBool}, + {'T', "-", anyType}, + {'U', "-#", argRune | argInt}, + {'v', allFlags, anyType}, + {'w', allFlags, argError}, + {'x', sharpNumFlag, argRune | argInt | argString | argPointer | argFloat | argComplex}, + {'X', sharpNumFlag, argRune | argInt | argString | argPointer | argFloat | argComplex}, +} + +// okPrintfArg compares the formatState to the arguments actually present, +// reporting any discrepancies it can discern. If the final argument is ellipsissed, +// there's little it can do for that. +func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (ok bool) { + var v printVerb + found := false + // Linear scan is fast enough for a small list. + for _, v = range printVerbs { + if v.verb == state.verb { + found = true + break + } + } + + // Could current arg implement fmt.Formatter? + formatter := false + if state.argNum < len(call.Args) { + if tv, ok := pass.TypesInfo.Types[call.Args[state.argNum]]; ok { + formatter = isFormatter(tv.Type) + } + } + + if !formatter { + if !found { + pass.ReportRangef(call, "%s format %s has unknown verb %c", state.name, state.format, state.verb) + return false + } + for _, flag := range state.flags { + // TODO: Disable complaint about '0' for Go 1.10. To be fixed properly in 1.11. + // See issues 23598 and 23605. + if flag == '0' { + continue + } + if !strings.ContainsRune(v.flags, rune(flag)) { + pass.ReportRangef(call, "%s format %s has unrecognized flag %c", state.name, state.format, flag) + return false + } + } + } + // Verb is good. If len(state.argNums)>trueArgs, we have something like %.*s and all + // but the final arg must be an integer. + trueArgs := 1 + if state.verb == '%' { + trueArgs = 0 + } + nargs := len(state.argNums) + for i := 0; i < nargs-trueArgs; i++ { + argNum := state.argNums[i] + if !argCanBeChecked(pass, call, i, state) { + return + } + arg := call.Args[argNum] + if !matchArgType(pass, argInt, nil, arg) { + pass.ReportRangef(call, "%s format %s uses non-int %s as argument of *", state.name, state.format, analysisutil.Format(pass.Fset, arg)) + return false + } + } + + if state.verb == '%' || formatter { + return true + } + argNum := state.argNums[len(state.argNums)-1] + if !argCanBeChecked(pass, call, len(state.argNums)-1, state) { + return false + } + arg := call.Args[argNum] + if isFunctionValue(pass, arg) && state.verb != 'p' && state.verb != 'T' { + pass.ReportRangef(call, "%s format %s arg %s is a func value, not called", state.name, state.format, analysisutil.Format(pass.Fset, arg)) + return false + } + if !matchArgType(pass, v.typ, nil, arg) { + typeString := "" + if typ := pass.TypesInfo.Types[arg].Type; typ != nil { + typeString = typ.String() + } + pass.ReportRangef(call, "%s format %s has arg %s of wrong type %s", state.name, state.format, analysisutil.Format(pass.Fset, arg), typeString) + return false + } + if v.typ&argString != 0 && v.verb != 'T' && !bytes.Contains(state.flags, []byte{'#'}) { + if methodName, ok := recursiveStringer(pass, arg); ok { + pass.ReportRangef(call, "%s format %s with arg %s causes recursive %s method call", state.name, state.format, analysisutil.Format(pass.Fset, arg), methodName) + return false + } + } + return true +} + +// recursiveStringer reports whether the argument e is a potential +// recursive call to stringer or is an error, such as t and &t in these examples: +// +// func (t *T) String() string { printf("%s", t) } +// func (t T) Error() string { printf("%s", t) } +// func (t T) String() string { printf("%s", &t) } +func recursiveStringer(pass *analysis.Pass, e ast.Expr) (string, bool) { + typ := pass.TypesInfo.Types[e].Type + + // It's unlikely to be a recursive stringer if it has a Format method. + if isFormatter(typ) { + return "", false + } + + // Does e allow e.String() or e.Error()? + strObj, _, _ := types.LookupFieldOrMethod(typ, false, pass.Pkg, "String") + strMethod, strOk := strObj.(*types.Func) + errObj, _, _ := types.LookupFieldOrMethod(typ, false, pass.Pkg, "Error") + errMethod, errOk := errObj.(*types.Func) + if !strOk && !errOk { + return "", false + } + + // Is the expression e within the body of that String or Error method? + var method *types.Func + if strOk && strMethod.Pkg() == pass.Pkg && strMethod.Scope().Contains(e.Pos()) { + method = strMethod + } else if errOk && errMethod.Pkg() == pass.Pkg && errMethod.Scope().Contains(e.Pos()) { + method = errMethod + } else { + return "", false + } + + sig := method.Type().(*types.Signature) + if !isStringer(sig) { + return "", false + } + + // Is it the receiver r, or &r? + if u, ok := e.(*ast.UnaryExpr); ok && u.Op == token.AND { + e = u.X // strip off & from &r + } + if id, ok := e.(*ast.Ident); ok { + if pass.TypesInfo.Uses[id] == sig.Recv() { + return method.FullName(), true + } + } + return "", false +} + +// isStringer reports whether the method signature matches the String() definition in fmt.Stringer. +func isStringer(sig *types.Signature) bool { + return sig.Params().Len() == 0 && + sig.Results().Len() == 1 && + sig.Results().At(0).Type() == types.Typ[types.String] +} + +// isFunctionValue reports whether the expression is a function as opposed to a function call. +// It is almost always a mistake to print a function value. +func isFunctionValue(pass *analysis.Pass, e ast.Expr) bool { + if typ := pass.TypesInfo.Types[e].Type; typ != nil { + _, ok := typ.(*types.Signature) + return ok + } + return false +} + +// argCanBeChecked reports whether the specified argument is statically present; +// it may be beyond the list of arguments or in a terminal slice... argument, which +// means we can't see it. +func argCanBeChecked(pass *analysis.Pass, call *ast.CallExpr, formatArg int, state *formatState) bool { + argNum := state.argNums[formatArg] + if argNum <= 0 { + // Shouldn't happen, so catch it with prejudice. + panic("negative arg num") + } + if argNum < len(call.Args)-1 { + return true // Always OK. + } + if call.Ellipsis.IsValid() { + return false // We just can't tell; there could be many more arguments. + } + if argNum < len(call.Args) { + return true + } + // There are bad indexes in the format or there are fewer arguments than the format needs. + // This is the argument number relative to the format: Printf("%s", "hi") will give 1 for the "hi". + arg := argNum - state.firstArg + 1 // People think of arguments as 1-indexed. + pass.ReportRangef(call, "%s format %s reads arg #%d, but call has %v", state.name, state.format, arg, count(len(call.Args)-state.firstArg, "arg")) + return false +} + +// printFormatRE is the regexp we match and report as a possible format string +// in the first argument to unformatted prints like fmt.Print. +// We exclude the space flag, so that printing a string like "x % y" is not reported as a format. +var printFormatRE = regexp.MustCompile(`%` + flagsRE + numOptRE + `\.?` + numOptRE + indexOptRE + verbRE) + +const ( + flagsRE = `[+\-#]*` + indexOptRE = `(\[[0-9]+\])?` + numOptRE = `([0-9]+|` + indexOptRE + `\*)?` + verbRE = `[bcdefgopqstvxEFGTUX]` +) + +// checkPrint checks a call to an unformatted print routine such as Println. +func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { + firstArg := 0 + typ := pass.TypesInfo.Types[call.Fun].Type + if typ == nil { + // Skip checking functions with unknown type. + return + } + if sig, ok := typ.(*types.Signature); ok { + if !sig.Variadic() { + // Skip checking non-variadic functions. + return + } + params := sig.Params() + firstArg = params.Len() - 1 + + typ := params.At(firstArg).Type() + typ = typ.(*types.Slice).Elem() + it, ok := typ.(*types.Interface) + if !ok || !it.Empty() { + // Skip variadic functions accepting non-interface{} args. + return + } + } + args := call.Args + if len(args) <= firstArg { + // Skip calls without variadic args. + return + } + args = args[firstArg:] + + if firstArg == 0 { + if sel, ok := call.Args[0].(*ast.SelectorExpr); ok { + if x, ok := sel.X.(*ast.Ident); ok { + if x.Name == "os" && strings.HasPrefix(sel.Sel.Name, "Std") { + pass.ReportRangef(call, "%s does not take io.Writer but has first arg %s", fn.FullName(), analysisutil.Format(pass.Fset, call.Args[0])) + } + } + } + } + + arg := args[0] + if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING { + // Ignore trailing % character in lit.Value. + // The % in "abc 0.0%" couldn't be a formatting directive. + s := strings.TrimSuffix(lit.Value, `%"`) + if strings.Contains(s, "%") { + m := printFormatRE.FindStringSubmatch(s) + if m != nil { + pass.ReportRangef(call, "%s call has possible formatting directive %s", fn.FullName(), m[0]) + } + } + } + if strings.HasSuffix(fn.Name(), "ln") { + // The last item, if a string, should not have a newline. + arg = args[len(args)-1] + if lit, ok := arg.(*ast.BasicLit); ok && lit.Kind == token.STRING { + str, _ := strconv.Unquote(lit.Value) + if strings.HasSuffix(str, "\n") { + pass.ReportRangef(call, "%s arg list ends with redundant newline", fn.FullName()) + } + } + } + for _, arg := range args { + if isFunctionValue(pass, arg) { + pass.ReportRangef(call, "%s arg %s is a func value, not called", fn.FullName(), analysisutil.Format(pass.Fset, arg)) + } + if methodName, ok := recursiveStringer(pass, arg); ok { + pass.ReportRangef(call, "%s arg %s causes recursive call to %s method", fn.FullName(), analysisutil.Format(pass.Fset, arg), methodName) + } + } +} + +// count(n, what) returns "1 what" or "N whats" +// (assuming the plural of what is whats). +func count(n int, what string) string { + if n == 1 { + return "1 " + what + } + return fmt.Sprintf("%d %ss", n, what) +} + +// stringSet is a set-of-nonempty-strings-valued flag. +// Note: elements without a '.' get lower-cased. +type stringSet map[string]bool + +func (ss stringSet) String() string { + var list []string + for name := range ss { + list = append(list, name) + } + sort.Strings(list) + return strings.Join(list, ",") +} + +func (ss stringSet) Set(flag string) error { + for _, name := range strings.Split(flag, ",") { + if len(name) == 0 { + return fmt.Errorf("empty string") + } + if !strings.Contains(name, ".") { + name = strings.ToLower(name) + } + ss[name] = true + } + return nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go new file mode 100644 index 000000000..6a5fae44f --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go @@ -0,0 +1,246 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package printf + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" +) + +var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + +// matchArgType reports an error if printf verb t is not appropriate +// for operand arg. +// +// typ is used only for recursive calls; external callers must supply nil. +// +// (Recursion arises from the compound types {map,chan,slice} which +// may be printed with %d etc. if that is appropriate for their element +// types.) +func matchArgType(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr) bool { + return matchArgTypeInternal(pass, t, typ, arg, make(map[types.Type]bool)) +} + +// matchArgTypeInternal is the internal version of matchArgType. It carries a map +// remembering what types are in progress so we don't recur when faced with recursive +// types or mutually recursive types. +func matchArgTypeInternal(pass *analysis.Pass, t printfArgType, typ types.Type, arg ast.Expr, inProgress map[types.Type]bool) bool { + // %v, %T accept any argument type. + if t == anyType { + return true + } + if typ == nil { + // external call + typ = pass.TypesInfo.Types[arg].Type + if typ == nil { + return true // probably a type check problem + } + } + + // %w accepts only errors. + if t == argError { + return types.ConvertibleTo(typ, errorType) + } + + // If the type implements fmt.Formatter, we have nothing to check. + if isFormatter(typ) { + return true + } + // If we can use a string, might arg (dynamically) implement the Stringer or Error interface? + if t&argString != 0 && isConvertibleToString(pass, typ) { + return true + } + + typ = typ.Underlying() + if inProgress[typ] { + // We're already looking at this type. The call that started it will take care of it. + return true + } + inProgress[typ] = true + + switch typ := typ.(type) { + case *types.Signature: + return t == argPointer + + case *types.Map: + return t == argPointer || + // Recur: map[int]int matches %d. + (matchArgTypeInternal(pass, t, typ.Key(), arg, inProgress) && matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress)) + + case *types.Chan: + return t&argPointer != 0 + + case *types.Array: + // Same as slice. + if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 { + return true // %s matches []byte + } + // Recur: []int matches %d. + return matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress) + + case *types.Slice: + // Same as array. + if types.Identical(typ.Elem().Underlying(), types.Typ[types.Byte]) && t&argString != 0 { + return true // %s matches []byte + } + if t == argPointer { + return true // %p prints a slice's 0th element + } + // Recur: []int matches %d. But watch out for + // type T []T + // If the element is a pointer type (type T[]*T), it's handled fine by the Pointer case below. + return matchArgTypeInternal(pass, t, typ.Elem(), arg, inProgress) + + case *types.Pointer: + // Ugly, but dealing with an edge case: a known pointer to an invalid type, + // probably something from a failed import. + if typ.Elem().String() == "invalid type" { + if false { + pass.Reportf(arg.Pos(), "printf argument %v is pointer to invalid or unknown type", analysisutil.Format(pass.Fset, arg)) + } + return true // special case + } + // If it's actually a pointer with %p, it prints as one. + if t == argPointer { + return true + } + + under := typ.Elem().Underlying() + switch under.(type) { + case *types.Struct: // see below + case *types.Array: // see below + case *types.Slice: // see below + case *types.Map: // see below + default: + // Check whether the rest can print pointers. + return t&argPointer != 0 + } + // If it's a top-level pointer to a struct, array, slice, or + // map, that's equivalent in our analysis to whether we can + // print the type being pointed to. Pointers in nested levels + // are not supported to minimize fmt running into loops. + if len(inProgress) > 1 { + return false + } + return matchArgTypeInternal(pass, t, under, arg, inProgress) + + case *types.Struct: + return matchStructArgType(pass, t, typ, arg, inProgress) + + case *types.Interface: + // There's little we can do. + // Whether any particular verb is valid depends on the argument. + // The user may have reasonable prior knowledge of the contents of the interface. + return true + + case *types.Basic: + switch typ.Kind() { + case types.UntypedBool, + types.Bool: + return t&argBool != 0 + + case types.UntypedInt, + types.Int, + types.Int8, + types.Int16, + types.Int32, + types.Int64, + types.Uint, + types.Uint8, + types.Uint16, + types.Uint32, + types.Uint64, + types.Uintptr: + return t&argInt != 0 + + case types.UntypedFloat, + types.Float32, + types.Float64: + return t&argFloat != 0 + + case types.UntypedComplex, + types.Complex64, + types.Complex128: + return t&argComplex != 0 + + case types.UntypedString, + types.String: + return t&argString != 0 + + case types.UnsafePointer: + return t&(argPointer|argInt) != 0 + + case types.UntypedRune: + return t&(argInt|argRune) != 0 + + case types.UntypedNil: + return false + + case types.Invalid: + if false { + pass.Reportf(arg.Pos(), "printf argument %v has invalid or unknown type", analysisutil.Format(pass.Fset, arg)) + } + return true // Probably a type check problem. + } + panic("unreachable") + } + + return false +} + +func isConvertibleToString(pass *analysis.Pass, typ types.Type) bool { + if bt, ok := typ.(*types.Basic); ok && bt.Kind() == types.UntypedNil { + // We explicitly don't want untyped nil, which is + // convertible to both of the interfaces below, as it + // would just panic anyway. + return false + } + if types.ConvertibleTo(typ, errorType) { + return true // via .Error() + } + + // Does it implement fmt.Stringer? + if obj, _, _ := types.LookupFieldOrMethod(typ, false, nil, "String"); obj != nil { + if fn, ok := obj.(*types.Func); ok { + sig := fn.Type().(*types.Signature) + if sig.Params().Len() == 0 && + sig.Results().Len() == 1 && + sig.Results().At(0).Type() == types.Typ[types.String] { + return true + } + } + } + + return false +} + +// hasBasicType reports whether x's type is a types.Basic with the given kind. +func hasBasicType(pass *analysis.Pass, x ast.Expr, kind types.BasicKind) bool { + t := pass.TypesInfo.Types[x].Type + if t != nil { + t = t.Underlying() + } + b, ok := t.(*types.Basic) + return ok && b.Kind() == kind +} + +// matchStructArgType reports whether all the elements of the struct match the expected +// type. For instance, with "%d" all the elements must be printable with the "%d" format. +func matchStructArgType(pass *analysis.Pass, t printfArgType, typ *types.Struct, arg ast.Expr, inProgress map[types.Type]bool) bool { + for i := 0; i < typ.NumFields(); i++ { + typf := typ.Field(i) + if !matchArgTypeInternal(pass, t, typf.Type(), arg, inProgress) { + return false + } + if t&argString != 0 && !typf.Exported() && isConvertibleToString(pass, typf.Type()) { + // Issue #17798: unexported Stringer or error cannot be properly formatted. + return false + } + } + return true +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go b/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go new file mode 100644 index 000000000..ef21f0e7d --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/reflectvaluecompare/reflectvaluecompare.go @@ -0,0 +1,99 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package reflectvaluecompare defines an Analyzer that checks for accidentally +// using == or reflect.DeepEqual to compare reflect.Value values. +// See issues 43993 and 18871. +package reflectvaluecompare + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +const Doc = `check for comparing reflect.Value values with == or reflect.DeepEqual + +The reflectvaluecompare checker looks for expressions of the form: + + v1 == v2 + v1 != v2 + reflect.DeepEqual(v1, v2) + +where v1 or v2 are reflect.Values. Comparing reflect.Values directly +is almost certainly not correct, as it compares the reflect package's +internal representation, not the underlying value. +Likely what is intended is: + + v1.Interface() == v2.Interface() + v1.Interface() != v2.Interface() + reflect.DeepEqual(v1.Interface(), v2.Interface()) +` + +var Analyzer = &analysis.Analyzer{ + Name: "reflectvaluecompare", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.BinaryExpr)(nil), + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + switch n := n.(type) { + case *ast.BinaryExpr: + if n.Op != token.EQL && n.Op != token.NEQ { + return + } + if isReflectValue(pass, n.X) || isReflectValue(pass, n.Y) { + if n.Op == token.EQL { + pass.ReportRangef(n, "avoid using == with reflect.Value") + } else { + pass.ReportRangef(n, "avoid using != with reflect.Value") + } + } + case *ast.CallExpr: + fn, ok := typeutil.Callee(pass.TypesInfo, n).(*types.Func) + if !ok { + return + } + if fn.FullName() == "reflect.DeepEqual" && (isReflectValue(pass, n.Args[0]) || isReflectValue(pass, n.Args[1])) { + pass.ReportRangef(n, "avoid using reflect.DeepEqual with reflect.Value") + } + } + }) + return nil, nil +} + +// isReflectValue reports whether the type of e is reflect.Value. +func isReflectValue(pass *analysis.Pass, e ast.Expr) bool { + tv, ok := pass.TypesInfo.Types[e] + if !ok { // no type info, something else is wrong + return false + } + // See if the type is reflect.Value + named, ok := tv.Type.(*types.Named) + if !ok { + return false + } + if obj := named.Obj(); obj == nil || obj.Pkg() == nil || obj.Pkg().Path() != "reflect" || obj.Name() != "Value" { + return false + } + if _, ok := e.(*ast.CompositeLit); ok { + // This is reflect.Value{}. Don't treat that as an error. + // Users should probably use x.IsValid() rather than x == reflect.Value{}, but the latter isn't wrong. + return false + } + return true +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go b/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go new file mode 100644 index 000000000..b160dcf5b --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go @@ -0,0 +1,290 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package shadow defines an Analyzer that checks for shadowed variables. +package shadow + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +// NOTE: Experimental. Not part of the vet suite. + +const Doc = `check for possible unintended shadowing of variables + +This analyzer check for shadowed variables. +A shadowed variable is a variable declared in an inner scope +with the same name and type as a variable in an outer scope, +and where the outer variable is mentioned after the inner one +is declared. + +(This definition can be refined; the module generates too many +false positives and is not yet enabled by default.) + +For example: + + func BadRead(f *os.File, buf []byte) error { + var err error + for { + n, err := f.Read(buf) // shadows the function variable 'err' + if err != nil { + break // causes return of wrong value + } + foo(buf) + } + return err + } +` + +var Analyzer = &analysis.Analyzer{ + Name: "shadow", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +// flags +var strict = false + +func init() { + Analyzer.Flags.BoolVar(&strict, "strict", strict, "whether to be strict about shadowing; can be noisy") +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + spans := make(map[types.Object]span) + for id, obj := range pass.TypesInfo.Defs { + // Ignore identifiers that don't denote objects + // (package names, symbolic variables such as t + // in t := x.(type) of type switch headers). + if obj != nil { + growSpan(spans, obj, id.Pos(), id.End()) + } + } + for id, obj := range pass.TypesInfo.Uses { + growSpan(spans, obj, id.Pos(), id.End()) + } + for node, obj := range pass.TypesInfo.Implicits { + // A type switch with a short variable declaration + // such as t := x.(type) doesn't declare the symbolic + // variable (t in the example) at the switch header; + // instead a new variable t (with specific type) is + // declared implicitly for each case. Such variables + // are found in the types.Info.Implicits (not Defs) + // map. Add them here, assuming they are declared at + // the type cases' colon ":". + if cc, ok := node.(*ast.CaseClause); ok { + growSpan(spans, obj, cc.Colon, cc.Colon) + } + } + + nodeFilter := []ast.Node{ + (*ast.AssignStmt)(nil), + (*ast.GenDecl)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + switch n := n.(type) { + case *ast.AssignStmt: + checkShadowAssignment(pass, spans, n) + case *ast.GenDecl: + checkShadowDecl(pass, spans, n) + } + }) + return nil, nil +} + +// A span stores the minimum range of byte positions in the file in which a +// given variable (types.Object) is mentioned. It is lexically defined: it spans +// from the beginning of its first mention to the end of its last mention. +// A variable is considered shadowed (if strict is off) only if the +// shadowing variable is declared within the span of the shadowed variable. +// In other words, if a variable is shadowed but not used after the shadowed +// variable is declared, it is inconsequential and not worth complaining about. +// This simple check dramatically reduces the nuisance rate for the shadowing +// check, at least until something cleverer comes along. +// +// One wrinkle: A "naked return" is a silent use of a variable that the Span +// will not capture, but the compilers catch naked returns of shadowed +// variables so we don't need to. +// +// Cases this gets wrong (TODO): +// - If a for loop's continuation statement mentions a variable redeclared in +// the block, we should complain about it but don't. +// - A variable declared inside a function literal can falsely be identified +// as shadowing a variable in the outer function. +// +type span struct { + min token.Pos + max token.Pos +} + +// contains reports whether the position is inside the span. +func (s span) contains(pos token.Pos) bool { + return s.min <= pos && pos < s.max +} + +// growSpan expands the span for the object to contain the source range [pos, end). +func growSpan(spans map[types.Object]span, obj types.Object, pos, end token.Pos) { + if strict { + return // No need + } + s, ok := spans[obj] + if ok { + if s.min > pos { + s.min = pos + } + if s.max < end { + s.max = end + } + } else { + s = span{pos, end} + } + spans[obj] = s +} + +// checkShadowAssignment checks for shadowing in a short variable declaration. +func checkShadowAssignment(pass *analysis.Pass, spans map[types.Object]span, a *ast.AssignStmt) { + if a.Tok != token.DEFINE { + return + } + if idiomaticShortRedecl(pass, a) { + return + } + for _, expr := range a.Lhs { + ident, ok := expr.(*ast.Ident) + if !ok { + pass.ReportRangef(expr, "invalid AST: short variable declaration of non-identifier") + return + } + checkShadowing(pass, spans, ident) + } +} + +// idiomaticShortRedecl reports whether this short declaration can be ignored for +// the purposes of shadowing, that is, that any redeclarations it contains are deliberate. +func idiomaticShortRedecl(pass *analysis.Pass, a *ast.AssignStmt) bool { + // Don't complain about deliberate redeclarations of the form + // i := i + // Such constructs are idiomatic in range loops to create a new variable + // for each iteration. Another example is + // switch n := n.(type) + if len(a.Rhs) != len(a.Lhs) { + return false + } + // We know it's an assignment, so the LHS must be all identifiers. (We check anyway.) + for i, expr := range a.Lhs { + lhs, ok := expr.(*ast.Ident) + if !ok { + pass.ReportRangef(expr, "invalid AST: short variable declaration of non-identifier") + return true // Don't do any more processing. + } + switch rhs := a.Rhs[i].(type) { + case *ast.Ident: + if lhs.Name != rhs.Name { + return false + } + case *ast.TypeAssertExpr: + if id, ok := rhs.X.(*ast.Ident); ok { + if lhs.Name != id.Name { + return false + } + } + default: + return false + } + } + return true +} + +// idiomaticRedecl reports whether this declaration spec can be ignored for +// the purposes of shadowing, that is, that any redeclarations it contains are deliberate. +func idiomaticRedecl(d *ast.ValueSpec) bool { + // Don't complain about deliberate redeclarations of the form + // var i, j = i, j + // Don't ignore redeclarations of the form + // var i = 3 + if len(d.Names) != len(d.Values) { + return false + } + for i, lhs := range d.Names { + rhs, ok := d.Values[i].(*ast.Ident) + if !ok || lhs.Name != rhs.Name { + return false + } + } + return true +} + +// checkShadowDecl checks for shadowing in a general variable declaration. +func checkShadowDecl(pass *analysis.Pass, spans map[types.Object]span, d *ast.GenDecl) { + if d.Tok != token.VAR { + return + } + for _, spec := range d.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + pass.ReportRangef(spec, "invalid AST: var GenDecl not ValueSpec") + return + } + // Don't complain about deliberate redeclarations of the form + // var i = i + if idiomaticRedecl(valueSpec) { + return + } + for _, ident := range valueSpec.Names { + checkShadowing(pass, spans, ident) + } + } +} + +// checkShadowing checks whether the identifier shadows an identifier in an outer scope. +func checkShadowing(pass *analysis.Pass, spans map[types.Object]span, ident *ast.Ident) { + if ident.Name == "_" { + // Can't shadow the blank identifier. + return + } + obj := pass.TypesInfo.Defs[ident] + if obj == nil { + return + } + // obj.Parent.Parent is the surrounding scope. If we can find another declaration + // starting from there, we have a shadowed identifier. + _, shadowed := obj.Parent().Parent().LookupParent(obj.Name(), obj.Pos()) + if shadowed == nil { + return + } + // Don't complain if it's shadowing a universe-declared identifier; that's fine. + if shadowed.Parent() == types.Universe { + return + } + if strict { + // The shadowed identifier must appear before this one to be an instance of shadowing. + if shadowed.Pos() > ident.Pos() { + return + } + } else { + // Don't complain if the span of validity of the shadowed identifier doesn't include + // the shadowing identifier. + span, ok := spans[shadowed] + if !ok { + pass.ReportRangef(ident, "internal error: no range for %q", ident.Name) + return + } + if !span.contains(ident.Pos()) { + return + } + } + // Don't complain if the types differ: that implies the programmer really wants two different things. + if types.Identical(obj.Type(), shadowed.Type()) { + line := pass.Fset.Position(shadowed.Pos()).Line + pass.ReportRangef(ident, "declaration of %q shadows declaration at line %d", obj.Name(), line) + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go b/vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go new file mode 100644 index 000000000..43415a98d --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/shift/dead.go @@ -0,0 +1,101 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package shift + +// Simplified dead code detector. +// Used for skipping shift checks on unreachable arch-specific code. + +import ( + "go/ast" + "go/constant" + "go/types" +) + +// updateDead puts unreachable "if" and "case" nodes into dead. +func updateDead(info *types.Info, dead map[ast.Node]bool, node ast.Node) { + if dead[node] { + // The node is already marked as dead. + return + } + + // setDead marks the node and all the children as dead. + setDead := func(n ast.Node) { + ast.Inspect(n, func(node ast.Node) bool { + if node != nil { + dead[node] = true + } + return true + }) + } + + switch stmt := node.(type) { + case *ast.IfStmt: + // "if" branch is dead if its condition evaluates + // to constant false. + v := info.Types[stmt.Cond].Value + if v == nil { + return + } + if !constant.BoolVal(v) { + setDead(stmt.Body) + return + } + if stmt.Else != nil { + setDead(stmt.Else) + } + case *ast.SwitchStmt: + // Case clause with empty switch tag is dead if it evaluates + // to constant false. + if stmt.Tag == nil { + BodyLoopBool: + for _, stmt := range stmt.Body.List { + cc := stmt.(*ast.CaseClause) + if cc.List == nil { + // Skip default case. + continue + } + for _, expr := range cc.List { + v := info.Types[expr].Value + if v == nil || v.Kind() != constant.Bool || constant.BoolVal(v) { + continue BodyLoopBool + } + } + setDead(cc) + } + return + } + + // Case clause is dead if its constant value doesn't match + // the constant value from the switch tag. + // TODO: This handles integer comparisons only. + v := info.Types[stmt.Tag].Value + if v == nil || v.Kind() != constant.Int { + return + } + tagN, ok := constant.Uint64Val(v) + if !ok { + return + } + BodyLoopInt: + for _, x := range stmt.Body.List { + cc := x.(*ast.CaseClause) + if cc.List == nil { + // Skip default case. + continue + } + for _, expr := range cc.List { + v := info.Types[expr].Value + if v == nil { + continue BodyLoopInt + } + n, ok := constant.Uint64Val(v) + if !ok || tagN == n { + continue BodyLoopInt + } + } + setDead(cc) + } + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go b/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go new file mode 100644 index 000000000..1f3df07cc --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go @@ -0,0 +1,101 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package shift defines an Analyzer that checks for shifts that exceed +// the width of an integer. +package shift + +// TODO(adonovan): integrate with ctrflow (CFG-based) dead code analysis. May +// have impedance mismatch due to its (non-)treatment of constant +// expressions (such as runtime.GOARCH=="386"). + +import ( + "go/ast" + "go/constant" + "go/token" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = "check for shifts that equal or exceed the width of the integer" + +var Analyzer = &analysis.Analyzer{ + Name: "shift", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // Do a complete pass to compute dead nodes. + dead := make(map[ast.Node]bool) + nodeFilter := []ast.Node{ + (*ast.IfStmt)(nil), + (*ast.SwitchStmt)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + // TODO(adonovan): move updateDead into this file. + updateDead(pass.TypesInfo, dead, n) + }) + + nodeFilter = []ast.Node{ + (*ast.AssignStmt)(nil), + (*ast.BinaryExpr)(nil), + } + inspect.Preorder(nodeFilter, func(node ast.Node) { + if dead[node] { + // Skip shift checks on unreachable nodes. + return + } + + switch node := node.(type) { + case *ast.BinaryExpr: + if node.Op == token.SHL || node.Op == token.SHR { + checkLongShift(pass, node, node.X, node.Y) + } + case *ast.AssignStmt: + if len(node.Lhs) != 1 || len(node.Rhs) != 1 { + return + } + if node.Tok == token.SHL_ASSIGN || node.Tok == token.SHR_ASSIGN { + checkLongShift(pass, node, node.Lhs[0], node.Rhs[0]) + } + } + }) + return nil, nil +} + +// checkLongShift checks if shift or shift-assign operations shift by more than +// the length of the underlying variable. +func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) { + if pass.TypesInfo.Types[x].Value != nil { + // Ignore shifts of constants. + // These are frequently used for bit-twiddling tricks + // like ^uint(0) >> 63 for 32/64 bit detection and compatibility. + return + } + + v := pass.TypesInfo.Types[y].Value + if v == nil { + return + } + amt, ok := constant.Int64Val(v) + if !ok { + return + } + t := pass.TypesInfo.Types[x].Type + if t == nil { + return + } + size := 8 * pass.TypesSizes.Sizeof(t) + if amt >= size { + ident := analysisutil.Format(pass.Fset, x) + pass.ReportRangef(node, "%s (%d bits) too small for shift of %d", ident, size, amt) + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go b/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go new file mode 100644 index 000000000..0d6c8ebf1 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go @@ -0,0 +1,154 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package sigchanyzer defines an Analyzer that detects +// misuse of unbuffered signal as argument to signal.Notify. +package sigchanyzer + +import ( + "bytes" + "go/ast" + "go/format" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for unbuffered channel of os.Signal + +This checker reports call expression of the form signal.Notify(c <-chan os.Signal, sig ...os.Signal), +where c is an unbuffered channel, which can be at risk of missing the signal.` + +// Analyzer describes sigchanyzer analysis function detector. +var Analyzer = &analysis.Analyzer{ + Name: "sigchanyzer", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + call := n.(*ast.CallExpr) + if !isSignalNotify(pass.TypesInfo, call) { + return + } + var chanDecl *ast.CallExpr + switch arg := call.Args[0].(type) { + case *ast.Ident: + if decl, ok := findDecl(arg).(*ast.CallExpr); ok { + chanDecl = decl + } + case *ast.CallExpr: + // Only signal.Notify(make(chan os.Signal), os.Interrupt) is safe, + // conservatively treate others as not safe, see golang/go#45043 + if isBuiltinMake(pass.TypesInfo, arg) { + return + } + chanDecl = arg + } + if chanDecl == nil || len(chanDecl.Args) != 1 { + return + } + + // Make a copy of the channel's declaration to avoid + // mutating the AST. See https://golang.org/issue/46129. + chanDeclCopy := &ast.CallExpr{} + *chanDeclCopy = *chanDecl + chanDeclCopy.Args = append([]ast.Expr(nil), chanDecl.Args...) + chanDeclCopy.Args = append(chanDeclCopy.Args, &ast.BasicLit{ + Kind: token.INT, + Value: "1", + }) + + var buf bytes.Buffer + if err := format.Node(&buf, token.NewFileSet(), chanDeclCopy); err != nil { + return + } + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: "misuse of unbuffered os.Signal channel as argument to signal.Notify", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Change to buffer channel", + TextEdits: []analysis.TextEdit{{ + Pos: chanDecl.Pos(), + End: chanDecl.End(), + NewText: buf.Bytes(), + }}, + }}, + }) + }) + return nil, nil +} + +func isSignalNotify(info *types.Info, call *ast.CallExpr) bool { + check := func(id *ast.Ident) bool { + obj := info.ObjectOf(id) + return obj.Name() == "Notify" && obj.Pkg().Path() == "os/signal" + } + switch fun := call.Fun.(type) { + case *ast.SelectorExpr: + return check(fun.Sel) + case *ast.Ident: + if fun, ok := findDecl(fun).(*ast.SelectorExpr); ok { + return check(fun.Sel) + } + return false + default: + return false + } +} + +func findDecl(arg *ast.Ident) ast.Node { + if arg.Obj == nil { + return nil + } + switch as := arg.Obj.Decl.(type) { + case *ast.AssignStmt: + if len(as.Lhs) != len(as.Rhs) { + return nil + } + for i, lhs := range as.Lhs { + lid, ok := lhs.(*ast.Ident) + if !ok { + continue + } + if lid.Obj == arg.Obj { + return as.Rhs[i] + } + } + case *ast.ValueSpec: + if len(as.Names) != len(as.Values) { + return nil + } + for i, name := range as.Names { + if name.Obj == arg.Obj { + return as.Values[i] + } + } + } + return nil +} + +func isBuiltinMake(info *types.Info, call *ast.CallExpr) bool { + typVal := info.Types[call.Fun] + if !typVal.IsBuiltin() { + return false + } + switch fun := call.Fun.(type) { + case *ast.Ident: + return info.ObjectOf(fun).Name() == "make" + default: + return false + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go b/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go new file mode 100644 index 000000000..69a67939d --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/sortslice/analyzer.go @@ -0,0 +1,123 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package sortslice defines an Analyzer that checks for calls +// to sort.Slice that do not use a slice type as first argument. +package sortslice + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +const Doc = `check the argument type of sort.Slice + +sort.Slice requires an argument of a slice type. Check that +the interface{} value passed to sort.Slice is actually a slice.` + +var Analyzer = &analysis.Analyzer{ + Name: "sortslice", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + + inspect.Preorder(nodeFilter, func(n ast.Node) { + call := n.(*ast.CallExpr) + fn, _ := typeutil.Callee(pass.TypesInfo, call).(*types.Func) + if fn == nil { + return + } + + if fn.FullName() != "sort.Slice" { + return + } + + arg := call.Args[0] + typ := pass.TypesInfo.Types[arg].Type + switch typ.Underlying().(type) { + case *types.Slice, *types.Interface: + return + } + + var fixes []analysis.SuggestedFix + switch v := typ.Underlying().(type) { + case *types.Array: + var buf bytes.Buffer + format.Node(&buf, pass.Fset, &ast.SliceExpr{ + X: arg, + Slice3: false, + Lbrack: arg.End() + 1, + Rbrack: arg.End() + 3, + }) + fixes = append(fixes, analysis.SuggestedFix{ + Message: "Get a slice of the full array", + TextEdits: []analysis.TextEdit{{ + Pos: arg.Pos(), + End: arg.End(), + NewText: buf.Bytes(), + }}, + }) + case *types.Pointer: + _, ok := v.Elem().Underlying().(*types.Slice) + if !ok { + break + } + var buf bytes.Buffer + format.Node(&buf, pass.Fset, &ast.StarExpr{ + X: arg, + }) + fixes = append(fixes, analysis.SuggestedFix{ + Message: "Dereference the pointer to the slice", + TextEdits: []analysis.TextEdit{{ + Pos: arg.Pos(), + End: arg.End(), + NewText: buf.Bytes(), + }}, + }) + case *types.Signature: + if v.Params().Len() != 0 || v.Results().Len() != 1 { + break + } + if _, ok := v.Results().At(0).Type().Underlying().(*types.Slice); !ok { + break + } + var buf bytes.Buffer + format.Node(&buf, pass.Fset, &ast.CallExpr{ + Fun: arg, + }) + fixes = append(fixes, analysis.SuggestedFix{ + Message: "Call the function", + TextEdits: []analysis.TextEdit{{ + Pos: arg.Pos(), + End: arg.End(), + NewText: buf.Bytes(), + }}, + }) + } + + pass.Report(analysis.Diagnostic{ + Pos: call.Pos(), + End: call.End(), + Message: fmt.Sprintf("sort.Slice's argument must be a slice; is called with %s", typ.String()), + SuggestedFixes: fixes, + }) + }) + return nil, nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go b/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go new file mode 100644 index 000000000..64a28ac0b --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/stdmethods/stdmethods.go @@ -0,0 +1,204 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package stdmethods defines an Analyzer that checks for misspellings +// in the signatures of methods similar to well-known interfaces. +package stdmethods + +import ( + "go/ast" + "go/types" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check signature of methods of well-known interfaces + +Sometimes a type may be intended to satisfy an interface but may fail to +do so because of a mistake in its method signature. +For example, the result of this WriteTo method should be (int64, error), +not error, to satisfy io.WriterTo: + + type myWriterTo struct{...} + func (myWriterTo) WriteTo(w io.Writer) error { ... } + +This check ensures that each method whose name matches one of several +well-known interface methods from the standard library has the correct +signature for that interface. + +Checked method names include: + Format GobEncode GobDecode MarshalJSON MarshalXML + Peek ReadByte ReadFrom ReadRune Scan Seek + UnmarshalJSON UnreadByte UnreadRune WriteByte + WriteTo +` + +var Analyzer = &analysis.Analyzer{ + Name: "stdmethods", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +// canonicalMethods lists the input and output types for Go methods +// that are checked using dynamic interface checks. Because the +// checks are dynamic, such methods would not cause a compile error +// if they have the wrong signature: instead the dynamic check would +// fail, sometimes mysteriously. If a method is found with a name listed +// here but not the input/output types listed here, vet complains. +// +// A few of the canonical methods have very common names. +// For example, a type might implement a Scan method that +// has nothing to do with fmt.Scanner, but we still want to check +// the methods that are intended to implement fmt.Scanner. +// To do that, the arguments that have a = prefix are treated as +// signals that the canonical meaning is intended: if a Scan +// method doesn't have a fmt.ScanState as its first argument, +// we let it go. But if it does have a fmt.ScanState, then the +// rest has to match. +var canonicalMethods = map[string]struct{ args, results []string }{ + "As": {[]string{"interface{}"}, []string{"bool"}}, // errors.As + // "Flush": {{}, {"error"}}, // http.Flusher and jpeg.writer conflict + "Format": {[]string{"=fmt.State", "rune"}, []string{}}, // fmt.Formatter + "GobDecode": {[]string{"[]byte"}, []string{"error"}}, // gob.GobDecoder + "GobEncode": {[]string{}, []string{"[]byte", "error"}}, // gob.GobEncoder + "Is": {[]string{"error"}, []string{"bool"}}, // errors.Is + "MarshalJSON": {[]string{}, []string{"[]byte", "error"}}, // json.Marshaler + "MarshalXML": {[]string{"*xml.Encoder", "xml.StartElement"}, []string{"error"}}, // xml.Marshaler + "ReadByte": {[]string{}, []string{"byte", "error"}}, // io.ByteReader + "ReadFrom": {[]string{"=io.Reader"}, []string{"int64", "error"}}, // io.ReaderFrom + "ReadRune": {[]string{}, []string{"rune", "int", "error"}}, // io.RuneReader + "Scan": {[]string{"=fmt.ScanState", "rune"}, []string{"error"}}, // fmt.Scanner + "Seek": {[]string{"=int64", "int"}, []string{"int64", "error"}}, // io.Seeker + "UnmarshalJSON": {[]string{"[]byte"}, []string{"error"}}, // json.Unmarshaler + "UnmarshalXML": {[]string{"*xml.Decoder", "xml.StartElement"}, []string{"error"}}, // xml.Unmarshaler + "UnreadByte": {[]string{}, []string{"error"}}, + "UnreadRune": {[]string{}, []string{"error"}}, + "Unwrap": {[]string{}, []string{"error"}}, // errors.Unwrap + "WriteByte": {[]string{"byte"}, []string{"error"}}, // jpeg.writer (matching bufio.Writer) + "WriteTo": {[]string{"=io.Writer"}, []string{"int64", "error"}}, // io.WriterTo +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + (*ast.InterfaceType)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + switch n := n.(type) { + case *ast.FuncDecl: + if n.Recv != nil { + canonicalMethod(pass, n.Name) + } + case *ast.InterfaceType: + for _, field := range n.Methods.List { + for _, id := range field.Names { + canonicalMethod(pass, id) + } + } + } + }) + return nil, nil +} + +func canonicalMethod(pass *analysis.Pass, id *ast.Ident) { + // Expected input/output. + expect, ok := canonicalMethods[id.Name] + if !ok { + return + } + + // Actual input/output + sign := pass.TypesInfo.Defs[id].Type().(*types.Signature) + args := sign.Params() + results := sign.Results() + + // Special case: WriteTo with more than one argument, + // not trying at all to implement io.WriterTo, + // comes up often enough to skip. + if id.Name == "WriteTo" && args.Len() > 1 { + return + } + + // Special case: Is, As and Unwrap only apply when type + // implements error. + if id.Name == "Is" || id.Name == "As" || id.Name == "Unwrap" { + if recv := sign.Recv(); recv == nil || !implementsError(recv.Type()) { + return + } + } + + // Do the =s (if any) all match? + if !matchParams(pass, expect.args, args, "=") || !matchParams(pass, expect.results, results, "=") { + return + } + + // Everything must match. + if !matchParams(pass, expect.args, args, "") || !matchParams(pass, expect.results, results, "") { + expectFmt := id.Name + "(" + argjoin(expect.args) + ")" + if len(expect.results) == 1 { + expectFmt += " " + argjoin(expect.results) + } else if len(expect.results) > 1 { + expectFmt += " (" + argjoin(expect.results) + ")" + } + + actual := typeString(sign) + actual = strings.TrimPrefix(actual, "func") + actual = id.Name + actual + + pass.ReportRangef(id, "method %s should have signature %s", actual, expectFmt) + } +} + +func typeString(typ types.Type) string { + return types.TypeString(typ, (*types.Package).Name) +} + +func argjoin(x []string) string { + y := make([]string, len(x)) + for i, s := range x { + if s[0] == '=' { + s = s[1:] + } + y[i] = s + } + return strings.Join(y, ", ") +} + +// Does each type in expect with the given prefix match the corresponding type in actual? +func matchParams(pass *analysis.Pass, expect []string, actual *types.Tuple, prefix string) bool { + for i, x := range expect { + if !strings.HasPrefix(x, prefix) { + continue + } + if i >= actual.Len() { + return false + } + if !matchParamType(x, actual.At(i).Type()) { + return false + } + } + if prefix == "" && actual.Len() > len(expect) { + return false + } + return true +} + +// Does this one type match? +func matchParamType(expect string, actual types.Type) bool { + expect = strings.TrimPrefix(expect, "=") + // Overkill but easy. + return typeString(actual) == expect +} + +var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) + +func implementsError(actual types.Type) bool { + return types.Implements(actual, errorType) +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go b/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go new file mode 100644 index 000000000..7a005901e --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go @@ -0,0 +1,126 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package stringintconv defines an Analyzer that flags type conversions +// from integers to strings. +package stringintconv + +import ( + "fmt" + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for string(int) conversions + +This checker flags conversions of the form string(x) where x is an integer +(but not byte or rune) type. Such conversions are discouraged because they +return the UTF-8 representation of the Unicode code point x, and not a decimal +string representation of x as one might expect. Furthermore, if x denotes an +invalid code point, the conversion cannot be statically rejected. + +For conversions that intend on using the code point, consider replacing them +with string(rune(x)). Otherwise, strconv.Itoa and its equivalents return the +string representation of the value in the desired base. +` + +var Analyzer = &analysis.Analyzer{ + Name: "stringintconv", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func typeName(typ types.Type) string { + if v, _ := typ.(interface{ Name() string }); v != nil { + return v.Name() + } + if v, _ := typ.(interface{ Obj() *types.TypeName }); v != nil { + return v.Obj().Name() + } + return "" +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + call := n.(*ast.CallExpr) + + // Retrieve target type name. + var tname *types.TypeName + switch fun := call.Fun.(type) { + case *ast.Ident: + tname, _ = pass.TypesInfo.Uses[fun].(*types.TypeName) + case *ast.SelectorExpr: + tname, _ = pass.TypesInfo.Uses[fun.Sel].(*types.TypeName) + } + if tname == nil { + return + } + target := tname.Name() + + // Check that target type T in T(v) has an underlying type of string. + T, _ := tname.Type().Underlying().(*types.Basic) + if T == nil || T.Kind() != types.String { + return + } + if s := T.Name(); target != s { + target += " (" + s + ")" + } + + // Check that type V of v has an underlying integral type that is not byte or rune. + if len(call.Args) != 1 { + return + } + v := call.Args[0] + vtyp := pass.TypesInfo.TypeOf(v) + V, _ := vtyp.Underlying().(*types.Basic) + if V == nil || V.Info()&types.IsInteger == 0 { + return + } + switch V.Kind() { + case types.Byte, types.Rune, types.UntypedRune: + return + } + + // Retrieve source type name. + source := typeName(vtyp) + if source == "" { + return + } + if s := V.Name(); source != s { + source += " (" + s + ")" + } + diag := analysis.Diagnostic{ + Pos: n.Pos(), + Message: fmt.Sprintf("conversion from %s to %s yields a string of one rune, not a string of digits (did you mean fmt.Sprint(x)?)", source, target), + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: "Did you mean to convert a rune to a string?", + TextEdits: []analysis.TextEdit{ + { + Pos: v.Pos(), + End: v.Pos(), + NewText: []byte("rune("), + }, + { + Pos: v.End(), + End: v.End(), + NewText: []byte(")"), + }, + }, + }, + }, + } + pass.Report(diag) + }) + return nil, nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go b/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go new file mode 100644 index 000000000..f0b15051c --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/structtag/structtag.go @@ -0,0 +1,313 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package structtag defines an Analyzer that checks struct field tags +// are well formed. +package structtag + +import ( + "errors" + "go/ast" + "go/token" + "go/types" + "path/filepath" + "reflect" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check that struct field tags conform to reflect.StructTag.Get + +Also report certain struct tags (json, xml) used with unexported fields.` + +var Analyzer = &analysis.Analyzer{ + Name: "structtag", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + RunDespiteErrors: true, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.StructType)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + styp, ok := pass.TypesInfo.Types[n.(*ast.StructType)].Type.(*types.Struct) + // Type information may be incomplete. + if !ok { + return + } + var seen namesSeen + for i := 0; i < styp.NumFields(); i++ { + field := styp.Field(i) + tag := styp.Tag(i) + checkCanonicalFieldTag(pass, field, tag, &seen) + } + }) + return nil, nil +} + +// namesSeen keeps track of encoding tags by their key, name, and nested level +// from the initial struct. The level is taken into account because equal +// encoding key names only conflict when at the same level; otherwise, the lower +// level shadows the higher level. +type namesSeen map[uniqueName]token.Pos + +type uniqueName struct { + key string // "xml" or "json" + name string // the encoding name + level int // anonymous struct nesting level +} + +func (s *namesSeen) Get(key, name string, level int) (token.Pos, bool) { + if *s == nil { + *s = make(map[uniqueName]token.Pos) + } + pos, ok := (*s)[uniqueName{key, name, level}] + return pos, ok +} + +func (s *namesSeen) Set(key, name string, level int, pos token.Pos) { + if *s == nil { + *s = make(map[uniqueName]token.Pos) + } + (*s)[uniqueName{key, name, level}] = pos +} + +var checkTagDups = []string{"json", "xml"} +var checkTagSpaces = map[string]bool{"json": true, "xml": true, "asn1": true} + +// checkCanonicalFieldTag checks a single struct field tag. +func checkCanonicalFieldTag(pass *analysis.Pass, field *types.Var, tag string, seen *namesSeen) { + switch pass.Pkg.Path() { + case "encoding/json", "encoding/xml": + // These packages know how to use their own APIs. + // Sometimes they are testing what happens to incorrect programs. + return + } + + for _, key := range checkTagDups { + checkTagDuplicates(pass, tag, key, field, field, seen, 1) + } + + if err := validateStructTag(tag); err != nil { + pass.Reportf(field.Pos(), "struct field tag %#q not compatible with reflect.StructTag.Get: %s", tag, err) + } + + // Check for use of json or xml tags with unexported fields. + + // Embedded struct. Nothing to do for now, but that + // may change, depending on what happens with issue 7363. + // TODO(adonovan): investigate, now that that issue is fixed. + if field.Anonymous() { + return + } + + if field.Exported() { + return + } + + for _, enc := range [...]string{"json", "xml"} { + switch reflect.StructTag(tag).Get(enc) { + // Ignore warning if the field not exported and the tag is marked as + // ignored. + case "", "-": + default: + pass.Reportf(field.Pos(), "struct field %s has %s tag but is not exported", field.Name(), enc) + return + } + } +} + +// checkTagDuplicates checks a single struct field tag to see if any tags are +// duplicated. nearest is the field that's closest to the field being checked, +// while still being part of the top-level struct type. +func checkTagDuplicates(pass *analysis.Pass, tag, key string, nearest, field *types.Var, seen *namesSeen, level int) { + val := reflect.StructTag(tag).Get(key) + if val == "-" { + // Ignored, even if the field is anonymous. + return + } + if val == "" || val[0] == ',' { + if !field.Anonymous() { + // Ignored if the field isn't anonymous. + return + } + typ, ok := field.Type().Underlying().(*types.Struct) + if !ok { + return + } + for i := 0; i < typ.NumFields(); i++ { + field := typ.Field(i) + if !field.Exported() { + continue + } + tag := typ.Tag(i) + checkTagDuplicates(pass, tag, key, nearest, field, seen, level+1) + } + return + } + if key == "xml" && field.Name() == "XMLName" { + // XMLName defines the XML element name of the struct being + // checked. That name cannot collide with element or attribute + // names defined on other fields of the struct. Vet does not have a + // check for untagged fields of type struct defining their own name + // by containing a field named XMLName; see issue 18256. + return + } + if i := strings.Index(val, ","); i >= 0 { + if key == "xml" { + // Use a separate namespace for XML attributes. + for _, opt := range strings.Split(val[i:], ",") { + if opt == "attr" { + key += " attribute" // Key is part of the error message. + break + } + } + } + val = val[:i] + } + if pos, ok := seen.Get(key, val, level); ok { + alsoPos := pass.Fset.Position(pos) + alsoPos.Column = 0 + + // Make the "also at" position relative to the current position, + // to ensure that all warnings are unambiguous and correct. For + // example, via anonymous struct fields, it's possible for the + // two fields to be in different packages and directories. + thisPos := pass.Fset.Position(field.Pos()) + rel, err := filepath.Rel(filepath.Dir(thisPos.Filename), alsoPos.Filename) + if err != nil { + // Possibly because the paths are relative; leave the + // filename alone. + } else { + alsoPos.Filename = rel + } + + pass.Reportf(nearest.Pos(), "struct field %s repeats %s tag %q also at %s", field.Name(), key, val, alsoPos) + } else { + seen.Set(key, val, level, field.Pos()) + } +} + +var ( + errTagSyntax = errors.New("bad syntax for struct tag pair") + errTagKeySyntax = errors.New("bad syntax for struct tag key") + errTagValueSyntax = errors.New("bad syntax for struct tag value") + errTagValueSpace = errors.New("suspicious space in struct tag value") + errTagSpace = errors.New("key:\"value\" pairs not separated by spaces") +) + +// validateStructTag parses the struct tag and returns an error if it is not +// in the canonical format, which is a space-separated list of key:"value" +// settings. The value may contain spaces. +func validateStructTag(tag string) error { + // This code is based on the StructTag.Get code in package reflect. + + n := 0 + for ; tag != ""; n++ { + if n > 0 && tag != "" && tag[0] != ' ' { + // More restrictive than reflect, but catches likely mistakes + // like `x:"foo",y:"bar"`, which parses as `x:"foo" ,y:"bar"` with second key ",y". + return errTagSpace + } + // Skip leading space. + i := 0 + for i < len(tag) && tag[i] == ' ' { + i++ + } + tag = tag[i:] + if tag == "" { + break + } + + // Scan to colon. A space, a quote or a control character is a syntax error. + // Strictly speaking, control chars include the range [0x7f, 0x9f], not just + // [0x00, 0x1f], but in practice, we ignore the multi-byte control characters + // as it is simpler to inspect the tag's bytes than the tag's runes. + i = 0 + for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f { + i++ + } + if i == 0 { + return errTagKeySyntax + } + if i+1 >= len(tag) || tag[i] != ':' { + return errTagSyntax + } + if tag[i+1] != '"' { + return errTagValueSyntax + } + key := tag[:i] + tag = tag[i+1:] + + // Scan quoted string to find value. + i = 1 + for i < len(tag) && tag[i] != '"' { + if tag[i] == '\\' { + i++ + } + i++ + } + if i >= len(tag) { + return errTagValueSyntax + } + qvalue := tag[:i+1] + tag = tag[i+1:] + + value, err := strconv.Unquote(qvalue) + if err != nil { + return errTagValueSyntax + } + + if !checkTagSpaces[key] { + continue + } + + switch key { + case "xml": + // If the first or last character in the XML tag is a space, it is + // suspicious. + if strings.Trim(value, " ") != value { + return errTagValueSpace + } + + // If there are multiple spaces, they are suspicious. + if strings.Count(value, " ") > 1 { + return errTagValueSpace + } + + // If there is no comma, skip the rest of the checks. + comma := strings.IndexRune(value, ',') + if comma < 0 { + continue + } + + // If the character before a comma is a space, this is suspicious. + if comma > 0 && value[comma-1] == ' ' { + return errTagValueSpace + } + value = value[comma+1:] + case "json": + // JSON allows using spaces in the name, so skip it. + comma := strings.IndexRune(value, ',') + if comma < 0 { + continue + } + value = value[comma+1:] + } + + if strings.IndexByte(value, ' ') >= 0 { + return errTagValueSpace + } + } + return nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go new file mode 100644 index 000000000..d2b9a5640 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go @@ -0,0 +1,154 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package testinggoroutine + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `report calls to (*testing.T).Fatal from goroutines started by a test. + +Functions that abruptly terminate a test, such as the Fatal, Fatalf, FailNow, and +Skip{,f,Now} methods of *testing.T, must be called from the test goroutine itself. +This checker detects calls to these functions that occur within a goroutine +started by the test. For example: + +func TestFoo(t *testing.T) { + go func() { + t.Fatal("oops") // error: (*T).Fatal called from non-test goroutine + }() +} +` + +var Analyzer = &analysis.Analyzer{ + Name: "testinggoroutine", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +var forbidden = map[string]bool{ + "FailNow": true, + "Fatal": true, + "Fatalf": true, + "Skip": true, + "Skipf": true, + "SkipNow": true, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + if !analysisutil.Imports(pass.Pkg, "testing") { + return nil, nil + } + + // Filter out anything that isn't a function declaration. + onlyFuncs := []ast.Node{ + (*ast.FuncDecl)(nil), + } + + inspect.Nodes(onlyFuncs, func(node ast.Node, push bool) bool { + fnDecl, ok := node.(*ast.FuncDecl) + if !ok { + return false + } + + if !hasBenchmarkOrTestParams(fnDecl) { + return false + } + + // Now traverse the benchmark/test's body and check that none of the + // forbidden methods are invoked in the goroutines within the body. + ast.Inspect(fnDecl, func(n ast.Node) bool { + goStmt, ok := n.(*ast.GoStmt) + if !ok { + return true + } + + checkGoStmt(pass, goStmt) + + // No need to further traverse the GoStmt since right + // above we manually traversed it in the ast.Inspect(goStmt, ...) + return false + }) + + return false + }) + + return nil, nil +} + +func hasBenchmarkOrTestParams(fnDecl *ast.FuncDecl) bool { + // Check that the function's arguments include "*testing.T" or "*testing.B". + params := fnDecl.Type.Params.List + + for _, param := range params { + if _, ok := typeIsTestingDotTOrB(param.Type); ok { + return true + } + } + + return false +} + +func typeIsTestingDotTOrB(expr ast.Expr) (string, bool) { + starExpr, ok := expr.(*ast.StarExpr) + if !ok { + return "", false + } + selExpr, ok := starExpr.X.(*ast.SelectorExpr) + if !ok { + return "", false + } + + varPkg := selExpr.X.(*ast.Ident) + if varPkg.Name != "testing" { + return "", false + } + + varTypeName := selExpr.Sel.Name + ok = varTypeName == "B" || varTypeName == "T" + return varTypeName, ok +} + +// checkGoStmt traverses the goroutine and checks for the +// use of the forbidden *testing.(B, T) methods. +func checkGoStmt(pass *analysis.Pass, goStmt *ast.GoStmt) { + // Otherwise examine the goroutine to check for the forbidden methods. + ast.Inspect(goStmt, func(n ast.Node) bool { + selExpr, ok := n.(*ast.SelectorExpr) + if !ok { + return true + } + + _, bad := forbidden[selExpr.Sel.Name] + if !bad { + return true + } + + // Now filter out false positives by the import-path/type. + ident, ok := selExpr.X.(*ast.Ident) + if !ok { + return true + } + if ident.Obj == nil || ident.Obj.Decl == nil { + return true + } + field, ok := ident.Obj.Decl.(*ast.Field) + if !ok { + return true + } + if typeName, ok := typeIsTestingDotTOrB(field.Type); ok { + pass.ReportRangef(selExpr, "call to (*%s).%s from a non-test goroutine", typeName, selExpr.Sel) + } + return true + }) +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go new file mode 100644 index 000000000..823227618 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go @@ -0,0 +1,188 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package tests defines an Analyzer that checks for common mistaken +// usages of tests and examples. +package tests + +import ( + "go/ast" + "go/types" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/go/analysis" +) + +const Doc = `check for common mistaken usages of tests and examples + +The tests checker walks Test, Benchmark and Example functions checking +malformed names, wrong signatures and examples documenting non-existent +identifiers. + +Please see the documentation for package testing in golang.org/pkg/testing +for the conventions that are enforced for Tests, Benchmarks, and Examples.` + +var Analyzer = &analysis.Analyzer{ + Name: "tests", + Doc: Doc, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + for _, f := range pass.Files { + if !strings.HasSuffix(pass.Fset.File(f.Pos()).Name(), "_test.go") { + continue + } + for _, decl := range f.Decls { + fn, ok := decl.(*ast.FuncDecl) + if !ok || fn.Recv != nil { + // Ignore non-functions or functions with receivers. + continue + } + + switch { + case strings.HasPrefix(fn.Name.Name, "Example"): + checkExample(pass, fn) + case strings.HasPrefix(fn.Name.Name, "Test"): + checkTest(pass, fn, "Test") + case strings.HasPrefix(fn.Name.Name, "Benchmark"): + checkTest(pass, fn, "Benchmark") + } + } + } + return nil, nil +} + +func isExampleSuffix(s string) bool { + r, size := utf8.DecodeRuneInString(s) + return size > 0 && unicode.IsLower(r) +} + +func isTestSuffix(name string) bool { + if len(name) == 0 { + // "Test" is ok. + return true + } + r, _ := utf8.DecodeRuneInString(name) + return !unicode.IsLower(r) +} + +func isTestParam(typ ast.Expr, wantType string) bool { + ptr, ok := typ.(*ast.StarExpr) + if !ok { + // Not a pointer. + return false + } + // No easy way of making sure it's a *testing.T or *testing.B: + // ensure the name of the type matches. + if name, ok := ptr.X.(*ast.Ident); ok { + return name.Name == wantType + } + if sel, ok := ptr.X.(*ast.SelectorExpr); ok { + return sel.Sel.Name == wantType + } + return false +} + +func lookup(pkg *types.Package, name string) []types.Object { + if o := pkg.Scope().Lookup(name); o != nil { + return []types.Object{o} + } + + var ret []types.Object + // Search through the imports to see if any of them define name. + // It's hard to tell in general which package is being tested, so + // for the purposes of the analysis, allow the object to appear + // in any of the imports. This guarantees there are no false positives + // because the example needs to use the object so it must be defined + // in the package or one if its imports. On the other hand, false + // negatives are possible, but should be rare. + for _, imp := range pkg.Imports() { + if obj := imp.Scope().Lookup(name); obj != nil { + ret = append(ret, obj) + } + } + return ret +} + +func checkExample(pass *analysis.Pass, fn *ast.FuncDecl) { + fnName := fn.Name.Name + if params := fn.Type.Params; len(params.List) != 0 { + pass.Reportf(fn.Pos(), "%s should be niladic", fnName) + } + if results := fn.Type.Results; results != nil && len(results.List) != 0 { + pass.Reportf(fn.Pos(), "%s should return nothing", fnName) + } + + if fnName == "Example" { + // Nothing more to do. + return + } + + var ( + exName = strings.TrimPrefix(fnName, "Example") + elems = strings.SplitN(exName, "_", 3) + ident = elems[0] + objs = lookup(pass.Pkg, ident) + ) + if ident != "" && len(objs) == 0 { + // Check ExampleFoo and ExampleBadFoo. + pass.Reportf(fn.Pos(), "%s refers to unknown identifier: %s", fnName, ident) + // Abort since obj is absent and no subsequent checks can be performed. + return + } + if len(elems) < 2 { + // Nothing more to do. + return + } + + if ident == "" { + // Check Example_suffix and Example_BadSuffix. + if residual := strings.TrimPrefix(exName, "_"); !isExampleSuffix(residual) { + pass.Reportf(fn.Pos(), "%s has malformed example suffix: %s", fnName, residual) + } + return + } + + mmbr := elems[1] + if !isExampleSuffix(mmbr) { + // Check ExampleFoo_Method and ExampleFoo_BadMethod. + found := false + // Check if Foo.Method exists in this package or its imports. + for _, obj := range objs { + if obj, _, _ := types.LookupFieldOrMethod(obj.Type(), true, obj.Pkg(), mmbr); obj != nil { + found = true + break + } + } + if !found { + pass.Reportf(fn.Pos(), "%s refers to unknown field or method: %s.%s", fnName, ident, mmbr) + } + } + if len(elems) == 3 && !isExampleSuffix(elems[2]) { + // Check ExampleFoo_Method_suffix and ExampleFoo_Method_Badsuffix. + pass.Reportf(fn.Pos(), "%s has malformed example suffix: %s", fnName, elems[2]) + } +} + +func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) { + // Want functions with 0 results and 1 parameter. + if fn.Type.Results != nil && len(fn.Type.Results.List) > 0 || + fn.Type.Params == nil || + len(fn.Type.Params.List) != 1 || + len(fn.Type.Params.List[0].Names) > 1 { + return + } + + // The param must look like a *testing.T or *testing.B. + if !isTestParam(fn.Type.Params.List[0].Type, prefix[:1]) { + return + } + + if !isTestSuffix(fn.Name.Name[len(prefix):]) { + pass.Reportf(fn.Pos(), "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix) + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go b/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go new file mode 100644 index 000000000..92b37caff --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/unmarshal/unmarshal.go @@ -0,0 +1,100 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// The unmarshal package defines an Analyzer that checks for passing +// non-pointer or non-interface types to unmarshal and decode functions. +package unmarshal + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" + "golang.org/x/tools/go/types/typeutil" +) + +const Doc = `report passing non-pointer or non-interface values to unmarshal + +The unmarshal analysis reports calls to functions such as json.Unmarshal +in which the argument type is not a pointer or an interface.` + +var Analyzer = &analysis.Analyzer{ + Name: "unmarshal", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + switch pass.Pkg.Path() { + case "encoding/gob", "encoding/json", "encoding/xml", "encoding/asn1": + // These packages know how to use their own APIs. + // Sometimes they are testing what happens to incorrect programs. + return nil, nil + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + call := n.(*ast.CallExpr) + fn := typeutil.StaticCallee(pass.TypesInfo, call) + if fn == nil { + return // not a static call + } + + // Classify the callee (without allocating memory). + argidx := -1 + recv := fn.Type().(*types.Signature).Recv() + if fn.Name() == "Unmarshal" && recv == nil { + // "encoding/json".Unmarshal + // "encoding/xml".Unmarshal + // "encoding/asn1".Unmarshal + switch fn.Pkg().Path() { + case "encoding/json", "encoding/xml", "encoding/asn1": + argidx = 1 // func([]byte, interface{}) + } + } else if fn.Name() == "Decode" && recv != nil { + // (*"encoding/json".Decoder).Decode + // (* "encoding/gob".Decoder).Decode + // (* "encoding/xml".Decoder).Decode + t := recv.Type() + if ptr, ok := t.(*types.Pointer); ok { + t = ptr.Elem() + } + tname := t.(*types.Named).Obj() + if tname.Name() == "Decoder" { + switch tname.Pkg().Path() { + case "encoding/json", "encoding/xml", "encoding/gob": + argidx = 0 // func(interface{}) + } + } + } + if argidx < 0 { + return // not a function we are interested in + } + + if len(call.Args) < argidx+1 { + return // not enough arguments, e.g. called with return values of another function + } + + t := pass.TypesInfo.Types[call.Args[argidx]].Type + switch t.Underlying().(type) { + case *types.Pointer, *types.Interface: + return + } + + switch argidx { + case 0: + pass.Reportf(call.Lparen, "call of %s passes non-pointer", fn.Name()) + case 1: + pass.Reportf(call.Lparen, "call of %s passes non-pointer as second argument", fn.Name()) + } + }) + return nil, nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go b/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go new file mode 100644 index 000000000..90896dd1b --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/unreachable/unreachable.go @@ -0,0 +1,325 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unreachable defines an Analyzer that checks for unreachable code. +package unreachable + +// TODO(adonovan): use the new cfg package, which is more precise. + +import ( + "go/ast" + "go/token" + "log" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for unreachable code + +The unreachable analyzer finds statements that execution can never reach +because they are preceded by an return statement, a call to panic, an +infinite loop, or similar constructs.` + +var Analyzer = &analysis.Analyzer{ + Name: "unreachable", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + RunDespiteErrors: true, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + (*ast.FuncLit)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + var body *ast.BlockStmt + switch n := n.(type) { + case *ast.FuncDecl: + body = n.Body + case *ast.FuncLit: + body = n.Body + } + if body == nil { + return + } + d := &deadState{ + pass: pass, + hasBreak: make(map[ast.Stmt]bool), + hasGoto: make(map[string]bool), + labels: make(map[string]ast.Stmt), + } + d.findLabels(body) + d.reachable = true + d.findDead(body) + }) + return nil, nil +} + +type deadState struct { + pass *analysis.Pass + hasBreak map[ast.Stmt]bool + hasGoto map[string]bool + labels map[string]ast.Stmt + breakTarget ast.Stmt + + reachable bool +} + +// findLabels gathers information about the labels defined and used by stmt +// and about which statements break, whether a label is involved or not. +func (d *deadState) findLabels(stmt ast.Stmt) { + switch x := stmt.(type) { + default: + log.Fatalf("%s: internal error in findLabels: unexpected statement %T", d.pass.Fset.Position(x.Pos()), x) + + case *ast.AssignStmt, + *ast.BadStmt, + *ast.DeclStmt, + *ast.DeferStmt, + *ast.EmptyStmt, + *ast.ExprStmt, + *ast.GoStmt, + *ast.IncDecStmt, + *ast.ReturnStmt, + *ast.SendStmt: + // no statements inside + + case *ast.BlockStmt: + for _, stmt := range x.List { + d.findLabels(stmt) + } + + case *ast.BranchStmt: + switch x.Tok { + case token.GOTO: + if x.Label != nil { + d.hasGoto[x.Label.Name] = true + } + + case token.BREAK: + stmt := d.breakTarget + if x.Label != nil { + stmt = d.labels[x.Label.Name] + } + if stmt != nil { + d.hasBreak[stmt] = true + } + } + + case *ast.IfStmt: + d.findLabels(x.Body) + if x.Else != nil { + d.findLabels(x.Else) + } + + case *ast.LabeledStmt: + d.labels[x.Label.Name] = x.Stmt + d.findLabels(x.Stmt) + + // These cases are all the same, but the x.Body only works + // when the specific type of x is known, so the cases cannot + // be merged. + case *ast.ForStmt: + outer := d.breakTarget + d.breakTarget = x + d.findLabels(x.Body) + d.breakTarget = outer + + case *ast.RangeStmt: + outer := d.breakTarget + d.breakTarget = x + d.findLabels(x.Body) + d.breakTarget = outer + + case *ast.SelectStmt: + outer := d.breakTarget + d.breakTarget = x + d.findLabels(x.Body) + d.breakTarget = outer + + case *ast.SwitchStmt: + outer := d.breakTarget + d.breakTarget = x + d.findLabels(x.Body) + d.breakTarget = outer + + case *ast.TypeSwitchStmt: + outer := d.breakTarget + d.breakTarget = x + d.findLabels(x.Body) + d.breakTarget = outer + + case *ast.CommClause: + for _, stmt := range x.Body { + d.findLabels(stmt) + } + + case *ast.CaseClause: + for _, stmt := range x.Body { + d.findLabels(stmt) + } + } +} + +// findDead walks the statement looking for dead code. +// If d.reachable is false on entry, stmt itself is dead. +// When findDead returns, d.reachable tells whether the +// statement following stmt is reachable. +func (d *deadState) findDead(stmt ast.Stmt) { + // Is this a labeled goto target? + // If so, assume it is reachable due to the goto. + // This is slightly conservative, in that we don't + // check that the goto is reachable, so + // L: goto L + // will not provoke a warning. + // But it's good enough. + if x, isLabel := stmt.(*ast.LabeledStmt); isLabel && d.hasGoto[x.Label.Name] { + d.reachable = true + } + + if !d.reachable { + switch stmt.(type) { + case *ast.EmptyStmt: + // do not warn about unreachable empty statements + default: + d.pass.Report(analysis.Diagnostic{ + Pos: stmt.Pos(), + End: stmt.End(), + Message: "unreachable code", + SuggestedFixes: []analysis.SuggestedFix{{ + Message: "Remove", + TextEdits: []analysis.TextEdit{{ + Pos: stmt.Pos(), + End: stmt.End(), + }}, + }}, + }) + d.reachable = true // silence error about next statement + } + } + + switch x := stmt.(type) { + default: + log.Fatalf("%s: internal error in findDead: unexpected statement %T", d.pass.Fset.Position(x.Pos()), x) + + case *ast.AssignStmt, + *ast.BadStmt, + *ast.DeclStmt, + *ast.DeferStmt, + *ast.EmptyStmt, + *ast.GoStmt, + *ast.IncDecStmt, + *ast.SendStmt: + // no control flow + + case *ast.BlockStmt: + for _, stmt := range x.List { + d.findDead(stmt) + } + + case *ast.BranchStmt: + switch x.Tok { + case token.BREAK, token.GOTO, token.FALLTHROUGH: + d.reachable = false + case token.CONTINUE: + // NOTE: We accept "continue" statements as terminating. + // They are not necessary in the spec definition of terminating, + // because a continue statement cannot be the final statement + // before a return. But for the more general problem of syntactically + // identifying dead code, continue redirects control flow just + // like the other terminating statements. + d.reachable = false + } + + case *ast.ExprStmt: + // Call to panic? + call, ok := x.X.(*ast.CallExpr) + if ok { + name, ok := call.Fun.(*ast.Ident) + if ok && name.Name == "panic" && name.Obj == nil { + d.reachable = false + } + } + + case *ast.ForStmt: + d.findDead(x.Body) + d.reachable = x.Cond != nil || d.hasBreak[x] + + case *ast.IfStmt: + d.findDead(x.Body) + if x.Else != nil { + r := d.reachable + d.reachable = true + d.findDead(x.Else) + d.reachable = d.reachable || r + } else { + // might not have executed if statement + d.reachable = true + } + + case *ast.LabeledStmt: + d.findDead(x.Stmt) + + case *ast.RangeStmt: + d.findDead(x.Body) + d.reachable = true + + case *ast.ReturnStmt: + d.reachable = false + + case *ast.SelectStmt: + // NOTE: Unlike switch and type switch below, we don't care + // whether a select has a default, because a select without a + // default blocks until one of the cases can run. That's different + // from a switch without a default, which behaves like it has + // a default with an empty body. + anyReachable := false + for _, comm := range x.Body.List { + d.reachable = true + for _, stmt := range comm.(*ast.CommClause).Body { + d.findDead(stmt) + } + anyReachable = anyReachable || d.reachable + } + d.reachable = anyReachable || d.hasBreak[x] + + case *ast.SwitchStmt: + anyReachable := false + hasDefault := false + for _, cas := range x.Body.List { + cc := cas.(*ast.CaseClause) + if cc.List == nil { + hasDefault = true + } + d.reachable = true + for _, stmt := range cc.Body { + d.findDead(stmt) + } + anyReachable = anyReachable || d.reachable + } + d.reachable = anyReachable || d.hasBreak[x] || !hasDefault + + case *ast.TypeSwitchStmt: + anyReachable := false + hasDefault := false + for _, cas := range x.Body.List { + cc := cas.(*ast.CaseClause) + if cc.List == nil { + hasDefault = true + } + d.reachable = true + for _, stmt := range cc.Body { + d.findDead(stmt) + } + anyReachable = anyReachable || d.reachable + } + d.reachable = anyReachable || d.hasBreak[x] || !hasDefault + } +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go b/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go new file mode 100644 index 000000000..ed86e5ebf --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go @@ -0,0 +1,168 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unsafeptr defines an Analyzer that checks for invalid +// conversions of uintptr to unsafe.Pointer. +package unsafeptr + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +const Doc = `check for invalid conversions of uintptr to unsafe.Pointer + +The unsafeptr analyzer reports likely incorrect uses of unsafe.Pointer +to convert integers to pointers. A conversion from uintptr to +unsafe.Pointer is invalid if it implies that there is a uintptr-typed +word in memory that holds a pointer value, because that word will be +invisible to stack copying and to the garbage collector.` + +var Analyzer = &analysis.Analyzer{ + Name: "unsafeptr", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.CallExpr)(nil), + (*ast.StarExpr)(nil), + (*ast.UnaryExpr)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + switch x := n.(type) { + case *ast.CallExpr: + if len(x.Args) == 1 && + hasBasicType(pass.TypesInfo, x.Fun, types.UnsafePointer) && + hasBasicType(pass.TypesInfo, x.Args[0], types.Uintptr) && + !isSafeUintptr(pass.TypesInfo, x.Args[0]) { + pass.ReportRangef(x, "possible misuse of unsafe.Pointer") + } + case *ast.StarExpr: + if t := pass.TypesInfo.Types[x].Type; isReflectHeader(t) { + pass.ReportRangef(x, "possible misuse of %s", t) + } + case *ast.UnaryExpr: + if x.Op != token.AND { + return + } + if t := pass.TypesInfo.Types[x.X].Type; isReflectHeader(t) { + pass.ReportRangef(x, "possible misuse of %s", t) + } + } + }) + return nil, nil +} + +// isSafeUintptr reports whether x - already known to be a uintptr - +// is safe to convert to unsafe.Pointer. +func isSafeUintptr(info *types.Info, x ast.Expr) bool { + // Check unsafe.Pointer safety rules according to + // https://golang.org/pkg/unsafe/#Pointer. + + switch x := analysisutil.Unparen(x).(type) { + case *ast.SelectorExpr: + // "(6) Conversion of a reflect.SliceHeader or + // reflect.StringHeader Data field to or from Pointer." + if x.Sel.Name != "Data" { + break + } + // reflect.SliceHeader and reflect.StringHeader are okay, + // but only if they are pointing at a real slice or string. + // It's not okay to do: + // var x SliceHeader + // x.Data = uintptr(unsafe.Pointer(...)) + // ... use x ... + // p := unsafe.Pointer(x.Data) + // because in the middle the garbage collector doesn't + // see x.Data as a pointer and so x.Data may be dangling + // by the time we get to the conversion at the end. + // For now approximate by saying that *Header is okay + // but Header is not. + pt, ok := info.Types[x.X].Type.(*types.Pointer) + if ok && isReflectHeader(pt.Elem()) { + return true + } + + case *ast.CallExpr: + // "(5) Conversion of the result of reflect.Value.Pointer or + // reflect.Value.UnsafeAddr from uintptr to Pointer." + if len(x.Args) != 0 { + break + } + sel, ok := x.Fun.(*ast.SelectorExpr) + if !ok { + break + } + switch sel.Sel.Name { + case "Pointer", "UnsafeAddr": + t, ok := info.Types[sel.X].Type.(*types.Named) + if ok && t.Obj().Pkg().Path() == "reflect" && t.Obj().Name() == "Value" { + return true + } + } + } + + // "(3) Conversion of a Pointer to a uintptr and back, with arithmetic." + return isSafeArith(info, x) +} + +// isSafeArith reports whether x is a pointer arithmetic expression that is safe +// to convert to unsafe.Pointer. +func isSafeArith(info *types.Info, x ast.Expr) bool { + switch x := analysisutil.Unparen(x).(type) { + case *ast.CallExpr: + // Base case: initial conversion from unsafe.Pointer to uintptr. + return len(x.Args) == 1 && + hasBasicType(info, x.Fun, types.Uintptr) && + hasBasicType(info, x.Args[0], types.UnsafePointer) + + case *ast.BinaryExpr: + // "It is valid both to add and to subtract offsets from a + // pointer in this way. It is also valid to use &^ to round + // pointers, usually for alignment." + switch x.Op { + case token.ADD, token.SUB, token.AND_NOT: + // TODO(mdempsky): Match compiler + // semantics. ADD allows a pointer on either + // side; SUB and AND_NOT don't care about RHS. + return isSafeArith(info, x.X) && !isSafeArith(info, x.Y) + } + } + + return false +} + +// hasBasicType reports whether x's type is a types.Basic with the given kind. +func hasBasicType(info *types.Info, x ast.Expr, kind types.BasicKind) bool { + t := info.Types[x].Type + if t != nil { + t = t.Underlying() + } + b, ok := t.(*types.Basic) + return ok && b.Kind() == kind +} + +// isReflectHeader reports whether t is reflect.SliceHeader or reflect.StringHeader. +func isReflectHeader(t types.Type) bool { + if named, ok := t.(*types.Named); ok { + if obj := named.Obj(); obj.Pkg() != nil && obj.Pkg().Path() == "reflect" { + switch obj.Name() { + case "SliceHeader", "StringHeader": + return true + } + } + } + return false +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go new file mode 100644 index 000000000..bececee7e --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go @@ -0,0 +1,131 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unusedresult defines an analyzer that checks for unused +// results of calls to certain pure functions. +package unusedresult + +import ( + "go/ast" + "go/token" + "go/types" + "sort" + "strings" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/analysis/passes/internal/analysisutil" + "golang.org/x/tools/go/ast/inspector" +) + +// TODO(adonovan): make this analysis modular: export a mustUseResult +// fact for each function that tail-calls one of the functions that we +// check, and check those functions too. + +const Doc = `check for unused results of calls to some functions + +Some functions like fmt.Errorf return a result and have no side effects, +so it is always a mistake to discard the result. This analyzer reports +calls to certain functions in which the result of the call is ignored. + +The set of functions may be controlled using flags.` + +var Analyzer = &analysis.Analyzer{ + Name: "unusedresult", + Doc: Doc, + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: run, +} + +// flags +var funcs, stringMethods stringSetFlag + +func init() { + // TODO(adonovan): provide a comment syntax to allow users to + // add their functions to this set using facts. + funcs.Set("errors.New,fmt.Errorf,fmt.Sprintf,fmt.Sprint,sort.Reverse,context.WithValue,context.WithCancel,context.WithDeadline,context.WithTimeout") + Analyzer.Flags.Var(&funcs, "funcs", + "comma-separated list of functions whose results must be used") + + stringMethods.Set("Error,String") + Analyzer.Flags.Var(&stringMethods, "stringmethods", + "comma-separated list of names of methods of type func() string whose results must be used") +} + +func run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + nodeFilter := []ast.Node{ + (*ast.ExprStmt)(nil), + } + inspect.Preorder(nodeFilter, func(n ast.Node) { + call, ok := analysisutil.Unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr) + if !ok { + return // not a call statement + } + fun := analysisutil.Unparen(call.Fun) + + if pass.TypesInfo.Types[fun].IsType() { + return // a conversion, not a call + } + + selector, ok := fun.(*ast.SelectorExpr) + if !ok { + return // neither a method call nor a qualified ident + } + + sel, ok := pass.TypesInfo.Selections[selector] + if ok && sel.Kind() == types.MethodVal { + // method (e.g. foo.String()) + obj := sel.Obj().(*types.Func) + sig := sel.Type().(*types.Signature) + if types.Identical(sig, sigNoArgsStringResult) { + if stringMethods[obj.Name()] { + pass.Reportf(call.Lparen, "result of (%s).%s call not used", + sig.Recv().Type(), obj.Name()) + } + } + } else if !ok { + // package-qualified function (e.g. fmt.Errorf) + obj := pass.TypesInfo.Uses[selector.Sel] + if obj, ok := obj.(*types.Func); ok { + qname := obj.Pkg().Path() + "." + obj.Name() + if funcs[qname] { + pass.Reportf(call.Lparen, "result of %v call not used", qname) + } + } + } + }) + return nil, nil +} + +// func() string +var sigNoArgsStringResult = types.NewSignature(nil, nil, + types.NewTuple(types.NewVar(token.NoPos, nil, "", types.Typ[types.String])), + false) + +type stringSetFlag map[string]bool + +func (ss *stringSetFlag) String() string { + var items []string + for item := range *ss { + items = append(items, item) + } + sort.Strings(items) + return strings.Join(items, ",") +} + +func (ss *stringSetFlag) Set(s string) error { + m := make(map[string]bool) // clobber previous value + if s != "" { + for _, name := range strings.Split(s, ",") { + if name == "" { + continue // TODO: report error? proceed? + } + m[name] = true + } + } + *ss = m + return nil +} diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go new file mode 100644 index 000000000..37a0e784b --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go @@ -0,0 +1,184 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unusedwrite checks for unused writes to the elements of a struct or array object. +package unusedwrite + +import ( + "fmt" + "go/types" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/buildssa" + "golang.org/x/tools/go/ssa" +) + +// Doc is a documentation string. +const Doc = `checks for unused writes + +The analyzer reports instances of writes to struct fields and +arrays that are never read. Specifically, when a struct object +or an array is copied, its elements are copied implicitly by +the compiler, and any element write to this copy does nothing +with the original object. + +For example: + + type T struct { x int } + func f(input []T) { + for i, v := range input { // v is a copy + v.x = i // unused write to field x + } + } + +Another example is about non-pointer receiver: + + type T struct { x int } + func (t T) f() { // t is a copy + t.x = i // unused write to field x + } +` + +// Analyzer reports instances of writes to struct fields and arrays +//that are never read. +var Analyzer = &analysis.Analyzer{ + Name: "unusedwrite", + Doc: Doc, + Requires: []*analysis.Analyzer{buildssa.Analyzer}, + Run: run, +} + +func run(pass *analysis.Pass) (interface{}, error) { + // Check the writes to struct and array objects. + checkStore := func(store *ssa.Store) { + // Consider field/index writes to an object whose elements are copied and not shared. + // MapUpdate is excluded since only the reference of the map is copied. + switch addr := store.Addr.(type) { + case *ssa.FieldAddr: + if isDeadStore(store, addr.X, addr) { + // Report the bug. + pass.Reportf(store.Pos(), + "unused write to field %s", + getFieldName(addr.X.Type(), addr.Field)) + } + case *ssa.IndexAddr: + if isDeadStore(store, addr.X, addr) { + // Report the bug. + pass.Reportf(store.Pos(), + "unused write to array index %s", addr.Index) + } + } + } + + ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) + for _, fn := range ssainput.SrcFuncs { + // Visit each block. No need to visit fn.Recover. + for _, blk := range fn.Blocks { + for _, instr := range blk.Instrs { + // Identify writes. + if store, ok := instr.(*ssa.Store); ok { + checkStore(store) + } + } + } + } + return nil, nil +} + +// isDeadStore determines whether a field/index write to an object is dead. +// Argument "obj" is the object, and "addr" is the instruction fetching the field/index. +func isDeadStore(store *ssa.Store, obj ssa.Value, addr ssa.Instruction) bool { + // Consider only struct or array objects. + if !hasStructOrArrayType(obj) { + return false + } + // Check liveness: if the value is used later, then don't report the write. + for _, ref := range *obj.Referrers() { + if ref == store || ref == addr { + continue + } + switch ins := ref.(type) { + case ssa.CallInstruction: + return false + case *ssa.FieldAddr: + // Check whether the same field is used. + if ins.X == obj { + if faddr, ok := addr.(*ssa.FieldAddr); ok { + if faddr.Field == ins.Field { + return false + } + } + } + // Otherwise another field is used, and this usage doesn't count. + continue + case *ssa.IndexAddr: + if ins.X == obj { + return false + } + continue // Otherwise another object is used + case *ssa.Lookup: + if ins.X == obj { + return false + } + continue // Otherwise another object is used + case *ssa.Store: + if ins.Val == obj { + return false + } + continue // Otherwise other object is stored + default: // consider live if the object is used in any other instruction + return false + } + } + return true +} + +// isStructOrArray returns whether the underlying type is struct or array. +func isStructOrArray(tp types.Type) bool { + if named, ok := tp.(*types.Named); ok { + tp = named.Underlying() + } + switch tp.(type) { + case *types.Array: + return true + case *types.Struct: + return true + } + return false +} + +// hasStructOrArrayType returns whether a value is of struct or array type. +func hasStructOrArrayType(v ssa.Value) bool { + if instr, ok := v.(ssa.Instruction); ok { + if alloc, ok := instr.(*ssa.Alloc); ok { + // Check the element type of an allocated register (which always has pointer type) + // e.g., for + // func (t T) f() { ...} + // the receiver object is of type *T: + // t0 = local T (t) *T + if tp, ok := alloc.Type().(*types.Pointer); ok { + return isStructOrArray(tp.Elem()) + } + return false + } + } + return isStructOrArray(v.Type()) +} + +// getFieldName returns the name of a field in a struct. +// It the field is not found, then it returns the string format of the index. +// +// For example, for struct T {x int, y int), getFieldName(*T, 1) returns "y". +func getFieldName(tp types.Type, index int) string { + if pt, ok := tp.(*types.Pointer); ok { + tp = pt.Elem() + } + if named, ok := tp.(*types.Named); ok { + tp = named.Underlying() + } + if stp, ok := tp.(*types.Struct); ok { + return stp.Field(index).Name() + } + return fmt.Sprintf("%d", index) +} diff --git a/vendor/golang.org/x/tools/go/analysis/validate.go b/vendor/golang.org/x/tools/go/analysis/validate.go new file mode 100644 index 000000000..23e57bf02 --- /dev/null +++ b/vendor/golang.org/x/tools/go/analysis/validate.go @@ -0,0 +1,130 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package analysis + +import ( + "fmt" + "reflect" + "strings" + "unicode" +) + +// Validate reports an error if any of the analyzers are misconfigured. +// Checks include: +// that the name is a valid identifier; +// that the Requires graph is acyclic; +// that analyzer fact types are unique; +// that each fact type is a pointer. +func Validate(analyzers []*Analyzer) error { + // Map each fact type to its sole generating analyzer. + factTypes := make(map[reflect.Type]*Analyzer) + + // Traverse the Requires graph, depth first. + const ( + white = iota + grey + black + finished + ) + color := make(map[*Analyzer]uint8) + var visit func(a *Analyzer) error + visit = func(a *Analyzer) error { + if a == nil { + return fmt.Errorf("nil *Analyzer") + } + if color[a] == white { + color[a] = grey + + // names + if !validIdent(a.Name) { + return fmt.Errorf("invalid analyzer name %q", a) + } + + if a.Doc == "" { + return fmt.Errorf("analyzer %q is undocumented", a) + } + + // fact types + for _, f := range a.FactTypes { + if f == nil { + return fmt.Errorf("analyzer %s has nil FactType", a) + } + t := reflect.TypeOf(f) + if prev := factTypes[t]; prev != nil { + return fmt.Errorf("fact type %s registered by two analyzers: %v, %v", + t, a, prev) + } + if t.Kind() != reflect.Ptr { + return fmt.Errorf("%s: fact type %s is not a pointer", a, t) + } + factTypes[t] = a + } + + // recursion + for _, req := range a.Requires { + if err := visit(req); err != nil { + return err + } + } + color[a] = black + } + + if color[a] == grey { + stack := []*Analyzer{a} + inCycle := map[string]bool{} + for len(stack) > 0 { + current := stack[len(stack)-1] + stack = stack[:len(stack)-1] + if color[current] == grey && !inCycle[current.Name] { + inCycle[current.Name] = true + stack = append(stack, current.Requires...) + } + } + return &CycleInRequiresGraphError{AnalyzerNames: inCycle} + } + + return nil + } + for _, a := range analyzers { + if err := visit(a); err != nil { + return err + } + } + + // Reject duplicates among analyzers. + // Precondition: color[a] == black. + // Postcondition: color[a] == finished. + for _, a := range analyzers { + if color[a] == finished { + return fmt.Errorf("duplicate analyzer: %s", a.Name) + } + color[a] = finished + } + + return nil +} + +func validIdent(name string) bool { + for i, r := range name { + if !(r == '_' || unicode.IsLetter(r) || i > 0 && unicode.IsDigit(r)) { + return false + } + } + return name != "" +} + +type CycleInRequiresGraphError struct { + AnalyzerNames map[string]bool +} + +func (e *CycleInRequiresGraphError) Error() string { + var b strings.Builder + b.WriteString("cycle detected involving the following analyzers:") + for n := range e.AnalyzerNames { + b.WriteByte(' ') + b.WriteString(n) + } + return b.String() +} diff --git a/vendor/golang.org/x/tools/go/ast/inspector/inspector.go b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go new file mode 100644 index 000000000..af5e17fee --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go @@ -0,0 +1,186 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package inspector provides helper functions for traversal over the +// syntax trees of a package, including node filtering by type, and +// materialization of the traversal stack. +// +// During construction, the inspector does a complete traversal and +// builds a list of push/pop events and their node type. Subsequent +// method calls that request a traversal scan this list, rather than walk +// the AST, and perform type filtering using efficient bit sets. +// +// Experiments suggest the inspector's traversals are about 2.5x faster +// than ast.Inspect, but it may take around 5 traversals for this +// benefit to amortize the inspector's construction cost. +// If efficiency is the primary concern, do not use Inspector for +// one-off traversals. +package inspector + +// There are four orthogonal features in a traversal: +// 1 type filtering +// 2 pruning +// 3 postorder calls to f +// 4 stack +// Rather than offer all of them in the API, +// only a few combinations are exposed: +// - Preorder is the fastest and has fewest features, +// but is the most commonly needed traversal. +// - Nodes and WithStack both provide pruning and postorder calls, +// even though few clients need it, because supporting two versions +// is not justified. +// More combinations could be supported by expressing them as +// wrappers around a more generic traversal, but this was measured +// and found to degrade performance significantly (30%). + +import ( + "go/ast" +) + +// An Inspector provides methods for inspecting +// (traversing) the syntax trees of a package. +type Inspector struct { + events []event +} + +// New returns an Inspector for the specified syntax trees. +func New(files []*ast.File) *Inspector { + return &Inspector{traverse(files)} +} + +// An event represents a push or a pop +// of an ast.Node during a traversal. +type event struct { + node ast.Node + typ uint64 // typeOf(node) + index int // 1 + index of corresponding pop event, or 0 if this is a pop +} + +// Preorder visits all the nodes of the files supplied to New in +// depth-first order. It calls f(n) for each node n before it visits +// n's children. +// +// The types argument, if non-empty, enables type-based filtering of +// events. The function f if is called only for nodes whose type +// matches an element of the types slice. +func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) { + // Because it avoids postorder calls to f, and the pruning + // check, Preorder is almost twice as fast as Nodes. The two + // features seem to contribute similar slowdowns (~1.4x each). + + mask := maskOf(types) + for i := 0; i < len(in.events); { + ev := in.events[i] + if ev.typ&mask != 0 { + if ev.index > 0 { + f(ev.node) + } + } + i++ + } +} + +// Nodes visits the nodes of the files supplied to New in depth-first +// order. It calls f(n, true) for each node n before it visits n's +// children. If f returns true, Nodes invokes f recursively for each +// of the non-nil children of the node, followed by a call of +// f(n, false). +// +// The types argument, if non-empty, enables type-based filtering of +// events. The function f if is called only for nodes whose type +// matches an element of the types slice. +func (in *Inspector) Nodes(types []ast.Node, f func(n ast.Node, push bool) (proceed bool)) { + mask := maskOf(types) + for i := 0; i < len(in.events); { + ev := in.events[i] + if ev.typ&mask != 0 { + if ev.index > 0 { + // push + if !f(ev.node, true) { + i = ev.index // jump to corresponding pop + 1 + continue + } + } else { + // pop + f(ev.node, false) + } + } + i++ + } +} + +// WithStack visits nodes in a similar manner to Nodes, but it +// supplies each call to f an additional argument, the current +// traversal stack. The stack's first element is the outermost node, +// an *ast.File; its last is the innermost, n. +func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, stack []ast.Node) (proceed bool)) { + mask := maskOf(types) + var stack []ast.Node + for i := 0; i < len(in.events); { + ev := in.events[i] + if ev.index > 0 { + // push + stack = append(stack, ev.node) + if ev.typ&mask != 0 { + if !f(ev.node, true, stack) { + i = ev.index + stack = stack[:len(stack)-1] + continue + } + } + } else { + // pop + if ev.typ&mask != 0 { + f(ev.node, false, stack) + } + stack = stack[:len(stack)-1] + } + i++ + } +} + +// traverse builds the table of events representing a traversal. +func traverse(files []*ast.File) []event { + // Preallocate approximate number of events + // based on source file extent. + // This makes traverse faster by 4x (!). + var extent int + for _, f := range files { + extent += int(f.End() - f.Pos()) + } + // This estimate is based on the net/http package. + capacity := extent * 33 / 100 + if capacity > 1e6 { + capacity = 1e6 // impose some reasonable maximum + } + events := make([]event, 0, capacity) + + var stack []event + for _, f := range files { + ast.Inspect(f, func(n ast.Node) bool { + if n != nil { + // push + ev := event{ + node: n, + typ: typeOf(n), + index: len(events), // push event temporarily holds own index + } + stack = append(stack, ev) + events = append(events, ev) + } else { + // pop + ev := stack[len(stack)-1] + stack = stack[:len(stack)-1] + + events[ev.index].index = len(events) + 1 // make push refer to pop + + ev.index = 0 // turn ev into a pop event + events = append(events, ev) + } + return true + }) + } + + return events +} diff --git a/vendor/golang.org/x/tools/go/ast/inspector/typeof.go b/vendor/golang.org/x/tools/go/ast/inspector/typeof.go new file mode 100644 index 000000000..b6b00cf2e --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/inspector/typeof.go @@ -0,0 +1,220 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package inspector + +// This file defines func typeOf(ast.Node) uint64. +// +// The initial map-based implementation was too slow; +// see https://go-review.googlesource.com/c/tools/+/135655/1/go/ast/inspector/inspector.go#196 + +import "go/ast" + +const ( + nArrayType = iota + nAssignStmt + nBadDecl + nBadExpr + nBadStmt + nBasicLit + nBinaryExpr + nBlockStmt + nBranchStmt + nCallExpr + nCaseClause + nChanType + nCommClause + nComment + nCommentGroup + nCompositeLit + nDeclStmt + nDeferStmt + nEllipsis + nEmptyStmt + nExprStmt + nField + nFieldList + nFile + nForStmt + nFuncDecl + nFuncLit + nFuncType + nGenDecl + nGoStmt + nIdent + nIfStmt + nImportSpec + nIncDecStmt + nIndexExpr + nInterfaceType + nKeyValueExpr + nLabeledStmt + nMapType + nPackage + nParenExpr + nRangeStmt + nReturnStmt + nSelectStmt + nSelectorExpr + nSendStmt + nSliceExpr + nStarExpr + nStructType + nSwitchStmt + nTypeAssertExpr + nTypeSpec + nTypeSwitchStmt + nUnaryExpr + nValueSpec +) + +// typeOf returns a distinct single-bit value that represents the type of n. +// +// Various implementations were benchmarked with BenchmarkNewInspector: +// GOGC=off +// - type switch 4.9-5.5ms 2.1ms +// - binary search over a sorted list of types 5.5-5.9ms 2.5ms +// - linear scan, frequency-ordered list 5.9-6.1ms 2.7ms +// - linear scan, unordered list 6.4ms 2.7ms +// - hash table 6.5ms 3.1ms +// A perfect hash seemed like overkill. +// +// The compiler's switch statement is the clear winner +// as it produces a binary tree in code, +// with constant conditions and good branch prediction. +// (Sadly it is the most verbose in source code.) +// Binary search suffered from poor branch prediction. +// +func typeOf(n ast.Node) uint64 { + // Fast path: nearly half of all nodes are identifiers. + if _, ok := n.(*ast.Ident); ok { + return 1 << nIdent + } + + // These cases include all nodes encountered by ast.Inspect. + switch n.(type) { + case *ast.ArrayType: + return 1 << nArrayType + case *ast.AssignStmt: + return 1 << nAssignStmt + case *ast.BadDecl: + return 1 << nBadDecl + case *ast.BadExpr: + return 1 << nBadExpr + case *ast.BadStmt: + return 1 << nBadStmt + case *ast.BasicLit: + return 1 << nBasicLit + case *ast.BinaryExpr: + return 1 << nBinaryExpr + case *ast.BlockStmt: + return 1 << nBlockStmt + case *ast.BranchStmt: + return 1 << nBranchStmt + case *ast.CallExpr: + return 1 << nCallExpr + case *ast.CaseClause: + return 1 << nCaseClause + case *ast.ChanType: + return 1 << nChanType + case *ast.CommClause: + return 1 << nCommClause + case *ast.Comment: + return 1 << nComment + case *ast.CommentGroup: + return 1 << nCommentGroup + case *ast.CompositeLit: + return 1 << nCompositeLit + case *ast.DeclStmt: + return 1 << nDeclStmt + case *ast.DeferStmt: + return 1 << nDeferStmt + case *ast.Ellipsis: + return 1 << nEllipsis + case *ast.EmptyStmt: + return 1 << nEmptyStmt + case *ast.ExprStmt: + return 1 << nExprStmt + case *ast.Field: + return 1 << nField + case *ast.FieldList: + return 1 << nFieldList + case *ast.File: + return 1 << nFile + case *ast.ForStmt: + return 1 << nForStmt + case *ast.FuncDecl: + return 1 << nFuncDecl + case *ast.FuncLit: + return 1 << nFuncLit + case *ast.FuncType: + return 1 << nFuncType + case *ast.GenDecl: + return 1 << nGenDecl + case *ast.GoStmt: + return 1 << nGoStmt + case *ast.Ident: + return 1 << nIdent + case *ast.IfStmt: + return 1 << nIfStmt + case *ast.ImportSpec: + return 1 << nImportSpec + case *ast.IncDecStmt: + return 1 << nIncDecStmt + case *ast.IndexExpr: + return 1 << nIndexExpr + case *ast.InterfaceType: + return 1 << nInterfaceType + case *ast.KeyValueExpr: + return 1 << nKeyValueExpr + case *ast.LabeledStmt: + return 1 << nLabeledStmt + case *ast.MapType: + return 1 << nMapType + case *ast.Package: + return 1 << nPackage + case *ast.ParenExpr: + return 1 << nParenExpr + case *ast.RangeStmt: + return 1 << nRangeStmt + case *ast.ReturnStmt: + return 1 << nReturnStmt + case *ast.SelectStmt: + return 1 << nSelectStmt + case *ast.SelectorExpr: + return 1 << nSelectorExpr + case *ast.SendStmt: + return 1 << nSendStmt + case *ast.SliceExpr: + return 1 << nSliceExpr + case *ast.StarExpr: + return 1 << nStarExpr + case *ast.StructType: + return 1 << nStructType + case *ast.SwitchStmt: + return 1 << nSwitchStmt + case *ast.TypeAssertExpr: + return 1 << nTypeAssertExpr + case *ast.TypeSpec: + return 1 << nTypeSpec + case *ast.TypeSwitchStmt: + return 1 << nTypeSwitchStmt + case *ast.UnaryExpr: + return 1 << nUnaryExpr + case *ast.ValueSpec: + return 1 << nValueSpec + } + return 0 +} + +func maskOf(nodes []ast.Node) uint64 { + if nodes == nil { + return 1<<64 - 1 // match all node types + } + var mask uint64 + for _, n := range nodes { + mask |= typeOf(n) + } + return mask +} diff --git a/vendor/golang.org/x/tools/go/cfg/builder.go b/vendor/golang.org/x/tools/go/cfg/builder.go new file mode 100644 index 000000000..7f95a2961 --- /dev/null +++ b/vendor/golang.org/x/tools/go/cfg/builder.go @@ -0,0 +1,510 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cfg + +// This file implements the CFG construction pass. + +import ( + "fmt" + "go/ast" + "go/token" +) + +type builder struct { + cfg *CFG + mayReturn func(*ast.CallExpr) bool + current *Block + lblocks map[*ast.Object]*lblock // labeled blocks + targets *targets // linked stack of branch targets +} + +func (b *builder) stmt(_s ast.Stmt) { + // The label of the current statement. If non-nil, its _goto + // target is always set; its _break and _continue are set only + // within the body of switch/typeswitch/select/for/range. + // It is effectively an additional default-nil parameter of stmt(). + var label *lblock +start: + switch s := _s.(type) { + case *ast.BadStmt, + *ast.SendStmt, + *ast.IncDecStmt, + *ast.GoStmt, + *ast.DeferStmt, + *ast.EmptyStmt, + *ast.AssignStmt: + // No effect on control flow. + b.add(s) + + case *ast.ExprStmt: + b.add(s) + if call, ok := s.X.(*ast.CallExpr); ok && !b.mayReturn(call) { + // Calls to panic, os.Exit, etc, never return. + b.current = b.newBlock("unreachable.call") + } + + case *ast.DeclStmt: + // Treat each var ValueSpec as a separate statement. + d := s.Decl.(*ast.GenDecl) + if d.Tok == token.VAR { + for _, spec := range d.Specs { + if spec, ok := spec.(*ast.ValueSpec); ok { + b.add(spec) + } + } + } + + case *ast.LabeledStmt: + label = b.labeledBlock(s.Label) + b.jump(label._goto) + b.current = label._goto + _s = s.Stmt + goto start // effectively: tailcall stmt(g, s.Stmt, label) + + case *ast.ReturnStmt: + b.add(s) + b.current = b.newBlock("unreachable.return") + + case *ast.BranchStmt: + b.branchStmt(s) + + case *ast.BlockStmt: + b.stmtList(s.List) + + case *ast.IfStmt: + if s.Init != nil { + b.stmt(s.Init) + } + then := b.newBlock("if.then") + done := b.newBlock("if.done") + _else := done + if s.Else != nil { + _else = b.newBlock("if.else") + } + b.add(s.Cond) + b.ifelse(then, _else) + b.current = then + b.stmt(s.Body) + b.jump(done) + + if s.Else != nil { + b.current = _else + b.stmt(s.Else) + b.jump(done) + } + + b.current = done + + case *ast.SwitchStmt: + b.switchStmt(s, label) + + case *ast.TypeSwitchStmt: + b.typeSwitchStmt(s, label) + + case *ast.SelectStmt: + b.selectStmt(s, label) + + case *ast.ForStmt: + b.forStmt(s, label) + + case *ast.RangeStmt: + b.rangeStmt(s, label) + + default: + panic(fmt.Sprintf("unexpected statement kind: %T", s)) + } +} + +func (b *builder) stmtList(list []ast.Stmt) { + for _, s := range list { + b.stmt(s) + } +} + +func (b *builder) branchStmt(s *ast.BranchStmt) { + var block *Block + switch s.Tok { + case token.BREAK: + if s.Label != nil { + if lb := b.labeledBlock(s.Label); lb != nil { + block = lb._break + } + } else { + for t := b.targets; t != nil && block == nil; t = t.tail { + block = t._break + } + } + + case token.CONTINUE: + if s.Label != nil { + if lb := b.labeledBlock(s.Label); lb != nil { + block = lb._continue + } + } else { + for t := b.targets; t != nil && block == nil; t = t.tail { + block = t._continue + } + } + + case token.FALLTHROUGH: + for t := b.targets; t != nil && block == nil; t = t.tail { + block = t._fallthrough + } + + case token.GOTO: + if s.Label != nil { + block = b.labeledBlock(s.Label)._goto + } + } + if block == nil { + block = b.newBlock("undefined.branch") + } + b.jump(block) + b.current = b.newBlock("unreachable.branch") +} + +func (b *builder) switchStmt(s *ast.SwitchStmt, label *lblock) { + if s.Init != nil { + b.stmt(s.Init) + } + if s.Tag != nil { + b.add(s.Tag) + } + done := b.newBlock("switch.done") + if label != nil { + label._break = done + } + // We pull the default case (if present) down to the end. + // But each fallthrough label must point to the next + // body block in source order, so we preallocate a + // body block (fallthru) for the next case. + // Unfortunately this makes for a confusing block order. + var defaultBody *[]ast.Stmt + var defaultFallthrough *Block + var fallthru, defaultBlock *Block + ncases := len(s.Body.List) + for i, clause := range s.Body.List { + body := fallthru + if body == nil { + body = b.newBlock("switch.body") // first case only + } + + // Preallocate body block for the next case. + fallthru = done + if i+1 < ncases { + fallthru = b.newBlock("switch.body") + } + + cc := clause.(*ast.CaseClause) + if cc.List == nil { + // Default case. + defaultBody = &cc.Body + defaultFallthrough = fallthru + defaultBlock = body + continue + } + + var nextCond *Block + for _, cond := range cc.List { + nextCond = b.newBlock("switch.next") + b.add(cond) // one half of the tag==cond condition + b.ifelse(body, nextCond) + b.current = nextCond + } + b.current = body + b.targets = &targets{ + tail: b.targets, + _break: done, + _fallthrough: fallthru, + } + b.stmtList(cc.Body) + b.targets = b.targets.tail + b.jump(done) + b.current = nextCond + } + if defaultBlock != nil { + b.jump(defaultBlock) + b.current = defaultBlock + b.targets = &targets{ + tail: b.targets, + _break: done, + _fallthrough: defaultFallthrough, + } + b.stmtList(*defaultBody) + b.targets = b.targets.tail + } + b.jump(done) + b.current = done +} + +func (b *builder) typeSwitchStmt(s *ast.TypeSwitchStmt, label *lblock) { + if s.Init != nil { + b.stmt(s.Init) + } + if s.Assign != nil { + b.add(s.Assign) + } + + done := b.newBlock("typeswitch.done") + if label != nil { + label._break = done + } + var default_ *ast.CaseClause + for _, clause := range s.Body.List { + cc := clause.(*ast.CaseClause) + if cc.List == nil { + default_ = cc + continue + } + body := b.newBlock("typeswitch.body") + var next *Block + for _, casetype := range cc.List { + next = b.newBlock("typeswitch.next") + // casetype is a type, so don't call b.add(casetype). + // This block logically contains a type assertion, + // x.(casetype), but it's unclear how to represent x. + _ = casetype + b.ifelse(body, next) + b.current = next + } + b.current = body + b.typeCaseBody(cc, done) + b.current = next + } + if default_ != nil { + b.typeCaseBody(default_, done) + } else { + b.jump(done) + } + b.current = done +} + +func (b *builder) typeCaseBody(cc *ast.CaseClause, done *Block) { + b.targets = &targets{ + tail: b.targets, + _break: done, + } + b.stmtList(cc.Body) + b.targets = b.targets.tail + b.jump(done) +} + +func (b *builder) selectStmt(s *ast.SelectStmt, label *lblock) { + // First evaluate channel expressions. + // TODO(adonovan): fix: evaluate only channel exprs here. + for _, clause := range s.Body.List { + if comm := clause.(*ast.CommClause).Comm; comm != nil { + b.stmt(comm) + } + } + + done := b.newBlock("select.done") + if label != nil { + label._break = done + } + + var defaultBody *[]ast.Stmt + for _, cc := range s.Body.List { + clause := cc.(*ast.CommClause) + if clause.Comm == nil { + defaultBody = &clause.Body + continue + } + body := b.newBlock("select.body") + next := b.newBlock("select.next") + b.ifelse(body, next) + b.current = body + b.targets = &targets{ + tail: b.targets, + _break: done, + } + switch comm := clause.Comm.(type) { + case *ast.ExprStmt: // <-ch + // nop + case *ast.AssignStmt: // x := <-states[state].Chan + b.add(comm.Lhs[0]) + } + b.stmtList(clause.Body) + b.targets = b.targets.tail + b.jump(done) + b.current = next + } + if defaultBody != nil { + b.targets = &targets{ + tail: b.targets, + _break: done, + } + b.stmtList(*defaultBody) + b.targets = b.targets.tail + b.jump(done) + } + b.current = done +} + +func (b *builder) forStmt(s *ast.ForStmt, label *lblock) { + // ...init... + // jump loop + // loop: + // if cond goto body else done + // body: + // ...body... + // jump post + // post: (target of continue) + // ...post... + // jump loop + // done: (target of break) + if s.Init != nil { + b.stmt(s.Init) + } + body := b.newBlock("for.body") + done := b.newBlock("for.done") // target of 'break' + loop := body // target of back-edge + if s.Cond != nil { + loop = b.newBlock("for.loop") + } + cont := loop // target of 'continue' + if s.Post != nil { + cont = b.newBlock("for.post") + } + if label != nil { + label._break = done + label._continue = cont + } + b.jump(loop) + b.current = loop + if loop != body { + b.add(s.Cond) + b.ifelse(body, done) + b.current = body + } + b.targets = &targets{ + tail: b.targets, + _break: done, + _continue: cont, + } + b.stmt(s.Body) + b.targets = b.targets.tail + b.jump(cont) + + if s.Post != nil { + b.current = cont + b.stmt(s.Post) + b.jump(loop) // back-edge + } + b.current = done +} + +func (b *builder) rangeStmt(s *ast.RangeStmt, label *lblock) { + b.add(s.X) + + if s.Key != nil { + b.add(s.Key) + } + if s.Value != nil { + b.add(s.Value) + } + + // ... + // loop: (target of continue) + // if ... goto body else done + // body: + // ... + // jump loop + // done: (target of break) + + loop := b.newBlock("range.loop") + b.jump(loop) + b.current = loop + + body := b.newBlock("range.body") + done := b.newBlock("range.done") + b.ifelse(body, done) + b.current = body + + if label != nil { + label._break = done + label._continue = loop + } + b.targets = &targets{ + tail: b.targets, + _break: done, + _continue: loop, + } + b.stmt(s.Body) + b.targets = b.targets.tail + b.jump(loop) // back-edge + b.current = done +} + +// -------- helpers -------- + +// Destinations associated with unlabeled for/switch/select stmts. +// We push/pop one of these as we enter/leave each construct and for +// each BranchStmt we scan for the innermost target of the right type. +// +type targets struct { + tail *targets // rest of stack + _break *Block + _continue *Block + _fallthrough *Block +} + +// Destinations associated with a labeled block. +// We populate these as labels are encountered in forward gotos or +// labeled statements. +// +type lblock struct { + _goto *Block + _break *Block + _continue *Block +} + +// labeledBlock returns the branch target associated with the +// specified label, creating it if needed. +// +func (b *builder) labeledBlock(label *ast.Ident) *lblock { + lb := b.lblocks[label.Obj] + if lb == nil { + lb = &lblock{_goto: b.newBlock(label.Name)} + if b.lblocks == nil { + b.lblocks = make(map[*ast.Object]*lblock) + } + b.lblocks[label.Obj] = lb + } + return lb +} + +// newBlock appends a new unconnected basic block to b.cfg's block +// slice and returns it. +// It does not automatically become the current block. +// comment is an optional string for more readable debugging output. +func (b *builder) newBlock(comment string) *Block { + g := b.cfg + block := &Block{ + Index: int32(len(g.Blocks)), + comment: comment, + } + block.Succs = block.succs2[:0] + g.Blocks = append(g.Blocks, block) + return block +} + +func (b *builder) add(n ast.Node) { + b.current.Nodes = append(b.current.Nodes, n) +} + +// jump adds an edge from the current block to the target block, +// and sets b.current to nil. +func (b *builder) jump(target *Block) { + b.current.Succs = append(b.current.Succs, target) + b.current = nil +} + +// ifelse emits edges from the current block to the t and f blocks, +// and sets b.current to nil. +func (b *builder) ifelse(t, f *Block) { + b.current.Succs = append(b.current.Succs, t, f) + b.current = nil +} diff --git a/vendor/golang.org/x/tools/go/cfg/cfg.go b/vendor/golang.org/x/tools/go/cfg/cfg.go new file mode 100644 index 000000000..3ebc65f60 --- /dev/null +++ b/vendor/golang.org/x/tools/go/cfg/cfg.go @@ -0,0 +1,150 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cfg constructs a simple control-flow graph (CFG) of the +// statements and expressions within a single function. +// +// Use cfg.New to construct the CFG for a function body. +// +// The blocks of the CFG contain all the function's non-control +// statements. The CFG does not contain control statements such as If, +// Switch, Select, and Branch, but does contain their subexpressions. +// For example, this source code: +// +// if x := f(); x != nil { +// T() +// } else { +// F() +// } +// +// produces this CFG: +// +// 1: x := f() +// x != nil +// succs: 2, 3 +// 2: T() +// succs: 4 +// 3: F() +// succs: 4 +// 4: +// +// The CFG does contain Return statements; even implicit returns are +// materialized (at the position of the function's closing brace). +// +// The CFG does not record conditions associated with conditional branch +// edges, nor the short-circuit semantics of the && and || operators, +// nor abnormal control flow caused by panic. If you need this +// information, use golang.org/x/tools/go/ssa instead. +// +package cfg + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/token" +) + +// A CFG represents the control-flow graph of a single function. +// +// The entry point is Blocks[0]; there may be multiple return blocks. +type CFG struct { + Blocks []*Block // block[0] is entry; order otherwise undefined +} + +// A Block represents a basic block: a list of statements and +// expressions that are always evaluated sequentially. +// +// A block may have 0-2 successors: zero for a return block or a block +// that calls a function such as panic that never returns; one for a +// normal (jump) block; and two for a conditional (if) block. +type Block struct { + Nodes []ast.Node // statements, expressions, and ValueSpecs + Succs []*Block // successor nodes in the graph + Index int32 // index within CFG.Blocks + Live bool // block is reachable from entry + + comment string // for debugging + succs2 [2]*Block // underlying array for Succs +} + +// New returns a new control-flow graph for the specified function body, +// which must be non-nil. +// +// The CFG builder calls mayReturn to determine whether a given function +// call may return. For example, calls to panic, os.Exit, and log.Fatal +// do not return, so the builder can remove infeasible graph edges +// following such calls. The builder calls mayReturn only for a +// CallExpr beneath an ExprStmt. +func New(body *ast.BlockStmt, mayReturn func(*ast.CallExpr) bool) *CFG { + b := builder{ + mayReturn: mayReturn, + cfg: new(CFG), + } + b.current = b.newBlock("entry") + b.stmt(body) + + // Compute liveness (reachability from entry point), breadth-first. + q := make([]*Block, 0, len(b.cfg.Blocks)) + q = append(q, b.cfg.Blocks[0]) // entry point + for len(q) > 0 { + b := q[len(q)-1] + q = q[:len(q)-1] + + if !b.Live { + b.Live = true + q = append(q, b.Succs...) + } + } + + // Does control fall off the end of the function's body? + // Make implicit return explicit. + if b.current != nil && b.current.Live { + b.add(&ast.ReturnStmt{ + Return: body.End() - 1, + }) + } + + return b.cfg +} + +func (b *Block) String() string { + return fmt.Sprintf("block %d (%s)", b.Index, b.comment) +} + +// Return returns the return statement at the end of this block if present, nil otherwise. +func (b *Block) Return() (ret *ast.ReturnStmt) { + if len(b.Nodes) > 0 { + ret, _ = b.Nodes[len(b.Nodes)-1].(*ast.ReturnStmt) + } + return +} + +// Format formats the control-flow graph for ease of debugging. +func (g *CFG) Format(fset *token.FileSet) string { + var buf bytes.Buffer + for _, b := range g.Blocks { + fmt.Fprintf(&buf, ".%d: # %s\n", b.Index, b.comment) + for _, n := range b.Nodes { + fmt.Fprintf(&buf, "\t%s\n", formatNode(fset, n)) + } + if len(b.Succs) > 0 { + fmt.Fprintf(&buf, "\tsuccs:") + for _, succ := range b.Succs { + fmt.Fprintf(&buf, " %d", succ.Index) + } + buf.WriteByte('\n') + } + buf.WriteByte('\n') + } + return buf.String() +} + +func formatNode(fset *token.FileSet, n ast.Node) string { + var buf bytes.Buffer + format.Node(&buf, fset, n) + // Indent secondary lines by a tab. + return string(bytes.Replace(buf.Bytes(), []byte("\n"), []byte("\n\t"), -1)) +} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go new file mode 100644 index 000000000..fc8beea5d --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go @@ -0,0 +1,133 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gcexportdata provides functions for locating, reading, and +// writing export data files containing type information produced by the +// gc compiler. This package supports go1.7 export data format and all +// later versions. +// +// Although it might seem convenient for this package to live alongside +// go/types in the standard library, this would cause version skew +// problems for developer tools that use it, since they must be able to +// consume the outputs of the gc compiler both before and after a Go +// update such as from Go 1.7 to Go 1.8. Because this package lives in +// golang.org/x/tools, sites can update their version of this repo some +// time before the Go 1.8 release and rebuild and redeploy their +// developer tools, which will then be able to consume both Go 1.7 and +// Go 1.8 export data files, so they will work before and after the +// Go update. (See discussion at https://golang.org/issue/15651.) +// +package gcexportdata // import "golang.org/x/tools/go/gcexportdata" + +import ( + "bufio" + "bytes" + "fmt" + "go/token" + "go/types" + "io" + "io/ioutil" + + "golang.org/x/tools/go/internal/gcimporter" +) + +// Find returns the name of an object (.o) or archive (.a) file +// containing type information for the specified import path, +// using the workspace layout conventions of go/build. +// If no file was found, an empty filename is returned. +// +// A relative srcDir is interpreted relative to the current working directory. +// +// Find also returns the package's resolved (canonical) import path, +// reflecting the effects of srcDir and vendoring on importPath. +func Find(importPath, srcDir string) (filename, path string) { + return gcimporter.FindPkg(importPath, srcDir) +} + +// NewReader returns a reader for the export data section of an object +// (.o) or archive (.a) file read from r. The new reader may provide +// additional trailing data beyond the end of the export data. +func NewReader(r io.Reader) (io.Reader, error) { + buf := bufio.NewReader(r) + _, err := gcimporter.FindExportData(buf) + // If we ever switch to a zip-like archive format with the ToC + // at the end, we can return the correct portion of export data, + // but for now we must return the entire rest of the file. + return buf, err +} + +// Read reads export data from in, decodes it, and returns type +// information for the package. +// The package name is specified by path. +// File position information is added to fset. +// +// Read may inspect and add to the imports map to ensure that references +// within the export data to other packages are consistent. The caller +// must ensure that imports[path] does not exist, or exists but is +// incomplete (see types.Package.Complete), and Read inserts the +// resulting package into this map entry. +// +// On return, the state of the reader is undefined. +func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { + data, err := ioutil.ReadAll(in) + if err != nil { + return nil, fmt.Errorf("reading export data for %q: %v", path, err) + } + + if bytes.HasPrefix(data, []byte("!")) { + return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) + } + + // The App Engine Go runtime v1.6 uses the old export data format. + // TODO(adonovan): delete once v1.7 has been around for a while. + if bytes.HasPrefix(data, []byte("package ")) { + return gcimporter.ImportData(imports, path, path, bytes.NewReader(data)) + } + + // The indexed export format starts with an 'i'; the older + // binary export format starts with a 'c', 'd', or 'v' + // (from "version"). Select appropriate importer. + if len(data) > 0 && data[0] == 'i' { + _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) + return pkg, err + } + + _, pkg, err := gcimporter.BImportData(fset, imports, data, path) + return pkg, err +} + +// Write writes encoded type information for the specified package to out. +// The FileSet provides file position information for named objects. +func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error { + if _, err := io.WriteString(out, "i"); err != nil { + return err + } + return gcimporter.IExportData(out, fset, pkg) +} + +// ReadBundle reads an export bundle from in, decodes it, and returns type +// information for the packages. +// File position information is added to fset. +// +// ReadBundle may inspect and add to the imports map to ensure that references +// within the export bundle to other packages are consistent. +// +// On return, the state of the reader is undefined. +// +// Experimental: This API is experimental and may change in the future. +func ReadBundle(in io.Reader, fset *token.FileSet, imports map[string]*types.Package) ([]*types.Package, error) { + data, err := ioutil.ReadAll(in) + if err != nil { + return nil, fmt.Errorf("reading export bundle: %v", err) + } + return gcimporter.IImportBundle(fset, imports, data) +} + +// WriteBundle writes encoded type information for the specified packages to out. +// The FileSet provides file position information for named objects. +// +// Experimental: This API is experimental and may change in the future. +func WriteBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error { + return gcimporter.IExportBundle(out, fset, pkgs) +} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/vendor/golang.org/x/tools/go/gcexportdata/importer.go new file mode 100644 index 000000000..efe221e7e --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcexportdata/importer.go @@ -0,0 +1,73 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gcexportdata + +import ( + "fmt" + "go/token" + "go/types" + "os" +) + +// NewImporter returns a new instance of the types.Importer interface +// that reads type information from export data files written by gc. +// The Importer also satisfies types.ImporterFrom. +// +// Export data files are located using "go build" workspace conventions +// and the build.Default context. +// +// Use this importer instead of go/importer.For("gc", ...) to avoid the +// version-skew problems described in the documentation of this package, +// or to control the FileSet or access the imports map populated during +// package loading. +// +func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom { + return importer{fset, imports} +} + +type importer struct { + fset *token.FileSet + imports map[string]*types.Package +} + +func (imp importer) Import(importPath string) (*types.Package, error) { + return imp.ImportFrom(importPath, "", 0) +} + +func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) { + filename, path := Find(importPath, srcDir) + if filename == "" { + if importPath == "unsafe" { + // Even for unsafe, call Find first in case + // the package was vendored. + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %s", importPath) + } + + if pkg, ok := imp.imports[path]; ok && pkg.Complete() { + return pkg, nil // cache hit + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + f.Close() + if err != nil { + // add file name to error + err = fmt.Errorf("reading export data: %s: %v", filename, err) + } + }() + + r, err := NewReader(f) + if err != nil { + return nil, err + } + + return Read(r, imp.fset, imp.imports, path) +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go new file mode 100644 index 000000000..a807d0aaa --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go @@ -0,0 +1,852 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Binary package export. +// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go; +// see that file for specification of the format. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "math" + "math/big" + "sort" + "strings" +) + +// If debugFormat is set, each integer and string value is preceded by a marker +// and position information in the encoding. This mechanism permits an importer +// to recognize immediately when it is out of sync. The importer recognizes this +// mode automatically (i.e., it can import export data produced with debugging +// support even if debugFormat is not set at the time of import). This mode will +// lead to massively larger export data (by a factor of 2 to 3) and should only +// be enabled during development and debugging. +// +// NOTE: This flag is the first flag to enable if importing dies because of +// (suspected) format errors, and whenever a change is made to the format. +const debugFormat = false // default: false + +// If trace is set, debugging output is printed to std out. +const trace = false // default: false + +// Current export format version. Increase with each format change. +// Note: The latest binary (non-indexed) export format is at version 6. +// This exporter is still at level 4, but it doesn't matter since +// the binary importer can handle older versions just fine. +// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE +// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE +// 4: type name objects support type aliases, uses aliasTag +// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used) +// 2: removed unused bool in ODCL export (compiler only) +// 1: header format change (more regular), export package for _ struct fields +// 0: Go1.7 encoding +const exportVersion = 4 + +// trackAllTypes enables cycle tracking for all types, not just named +// types. The existing compiler invariants assume that unnamed types +// that are not completely set up are not used, or else there are spurious +// errors. +// If disabled, only named types are tracked, possibly leading to slightly +// less efficient encoding in rare cases. It also prevents the export of +// some corner-case type declarations (but those are not handled correctly +// with with the textual export format either). +// TODO(gri) enable and remove once issues caused by it are fixed +const trackAllTypes = false + +type exporter struct { + fset *token.FileSet + out bytes.Buffer + + // object -> index maps, indexed in order of serialization + strIndex map[string]int + pkgIndex map[*types.Package]int + typIndex map[types.Type]int + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + + // debugging support + written int // bytes written + indent int // for trace +} + +// internalError represents an error generated inside this package. +type internalError string + +func (e internalError) Error() string { return "gcimporter: " + string(e) } + +func internalErrorf(format string, args ...interface{}) error { + return internalError(fmt.Sprintf(format, args...)) +} + +// BExportData returns binary export data for pkg. +// If no file set is provided, position info will be missing. +func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { + defer func() { + if e := recover(); e != nil { + if ierr, ok := e.(internalError); ok { + err = ierr + return + } + // Not an internal error; panic again. + panic(e) + } + }() + + p := exporter{ + fset: fset, + strIndex: map[string]int{"": 0}, // empty string is mapped to 0 + pkgIndex: make(map[*types.Package]int), + typIndex: make(map[types.Type]int), + posInfoFormat: true, // TODO(gri) might become a flag, eventually + } + + // write version info + // The version string must start with "version %d" where %d is the version + // number. Additional debugging information may follow after a blank; that + // text is ignored by the importer. + p.rawStringln(fmt.Sprintf("version %d", exportVersion)) + var debug string + if debugFormat { + debug = "debug" + } + p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly + p.bool(trackAllTypes) + p.bool(p.posInfoFormat) + + // --- generic export data --- + + // populate type map with predeclared "known" types + for index, typ := range predeclared() { + p.typIndex[typ] = index + } + if len(p.typIndex) != len(predeclared()) { + return nil, internalError("duplicate entries in type map?") + } + + // write package data + p.pkg(pkg, true) + if trace { + p.tracef("\n") + } + + // write objects + objcount := 0 + scope := pkg.Scope() + for _, name := range scope.Names() { + if !ast.IsExported(name) { + continue + } + if trace { + p.tracef("\n") + } + p.obj(scope.Lookup(name)) + objcount++ + } + + // indicate end of list + if trace { + p.tracef("\n") + } + p.tag(endTag) + + // for self-verification only (redundant) + p.int(objcount) + + if trace { + p.tracef("\n") + } + + // --- end of export data --- + + return p.out.Bytes(), nil +} + +func (p *exporter) pkg(pkg *types.Package, emptypath bool) { + if pkg == nil { + panic(internalError("unexpected nil pkg")) + } + + // if we saw the package before, write its index (>= 0) + if i, ok := p.pkgIndex[pkg]; ok { + p.index('P', i) + return + } + + // otherwise, remember the package, write the package tag (< 0) and package data + if trace { + p.tracef("P%d = { ", len(p.pkgIndex)) + defer p.tracef("} ") + } + p.pkgIndex[pkg] = len(p.pkgIndex) + + p.tag(packageTag) + p.string(pkg.Name()) + if emptypath { + p.string("") + } else { + p.string(pkg.Path()) + } +} + +func (p *exporter) obj(obj types.Object) { + switch obj := obj.(type) { + case *types.Const: + p.tag(constTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + p.value(obj.Val()) + + case *types.TypeName: + if obj.IsAlias() { + p.tag(aliasTag) + p.pos(obj) + p.qualifiedName(obj) + } else { + p.tag(typeTag) + } + p.typ(obj.Type()) + + case *types.Var: + p.tag(varTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + + case *types.Func: + p.tag(funcTag) + p.pos(obj) + p.qualifiedName(obj) + sig := obj.Type().(*types.Signature) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + + default: + panic(internalErrorf("unexpected object %v (%T)", obj, obj)) + } +} + +func (p *exporter) pos(obj types.Object) { + if !p.posInfoFormat { + return + } + + file, line := p.fileLine(obj) + if file == p.prevFile { + // common case: write line delta + // delta == 0 means different file or no line change + delta := line - p.prevLine + p.int(delta) + if delta == 0 { + p.int(-1) // -1 means no file change + } + } else { + // different file + p.int(0) + // Encode filename as length of common prefix with previous + // filename, followed by (possibly empty) suffix. Filenames + // frequently share path prefixes, so this can save a lot + // of space and make export data size less dependent on file + // path length. The suffix is unlikely to be empty because + // file names tend to end in ".go". + n := commonPrefixLen(p.prevFile, file) + p.int(n) // n >= 0 + p.string(file[n:]) // write suffix only + p.prevFile = file + p.int(line) + } + p.prevLine = line +} + +func (p *exporter) fileLine(obj types.Object) (file string, line int) { + if p.fset != nil { + pos := p.fset.Position(obj.Pos()) + file = pos.Filename + line = pos.Line + } + return +} + +func commonPrefixLen(a, b string) int { + if len(a) > len(b) { + a, b = b, a + } + // len(a) <= len(b) + i := 0 + for i < len(a) && a[i] == b[i] { + i++ + } + return i +} + +func (p *exporter) qualifiedName(obj types.Object) { + p.string(obj.Name()) + p.pkg(obj.Pkg(), false) +} + +func (p *exporter) typ(t types.Type) { + if t == nil { + panic(internalError("nil type")) + } + + // Possible optimization: Anonymous pointer types *T where + // T is a named type are common. We could canonicalize all + // such types *T to a single type PT = *T. This would lead + // to at most one *T entry in typIndex, and all future *T's + // would be encoded as the respective index directly. Would + // save 1 byte (pointerTag) per *T and reduce the typIndex + // size (at the cost of a canonicalization map). We can do + // this later, without encoding format change. + + // if we saw the type before, write its index (>= 0) + if i, ok := p.typIndex[t]; ok { + p.index('T', i) + return + } + + // otherwise, remember the type, write the type tag (< 0) and type data + if trackAllTypes { + if trace { + p.tracef("T%d = {>\n", len(p.typIndex)) + defer p.tracef("<\n} ") + } + p.typIndex[t] = len(p.typIndex) + } + + switch t := t.(type) { + case *types.Named: + if !trackAllTypes { + // if we don't track all types, track named types now + p.typIndex[t] = len(p.typIndex) + } + + p.tag(namedTag) + p.pos(t.Obj()) + p.qualifiedName(t.Obj()) + p.typ(t.Underlying()) + if !types.IsInterface(t) { + p.assocMethods(t) + } + + case *types.Array: + p.tag(arrayTag) + p.int64(t.Len()) + p.typ(t.Elem()) + + case *types.Slice: + p.tag(sliceTag) + p.typ(t.Elem()) + + case *dddSlice: + p.tag(dddTag) + p.typ(t.elem) + + case *types.Struct: + p.tag(structTag) + p.fieldList(t) + + case *types.Pointer: + p.tag(pointerTag) + p.typ(t.Elem()) + + case *types.Signature: + p.tag(signatureTag) + p.paramList(t.Params(), t.Variadic()) + p.paramList(t.Results(), false) + + case *types.Interface: + p.tag(interfaceTag) + p.iface(t) + + case *types.Map: + p.tag(mapTag) + p.typ(t.Key()) + p.typ(t.Elem()) + + case *types.Chan: + p.tag(chanTag) + p.int(int(3 - t.Dir())) // hack + p.typ(t.Elem()) + + default: + panic(internalErrorf("unexpected type %T: %s", t, t)) + } +} + +func (p *exporter) assocMethods(named *types.Named) { + // Sort methods (for determinism). + var methods []*types.Func + for i := 0; i < named.NumMethods(); i++ { + methods = append(methods, named.Method(i)) + } + sort.Sort(methodsByName(methods)) + + p.int(len(methods)) + + if trace && methods != nil { + p.tracef("associated methods {>\n") + } + + for i, m := range methods { + if trace && i > 0 { + p.tracef("\n") + } + + p.pos(m) + name := m.Name() + p.string(name) + if !exported(name) { + p.pkg(m.Pkg(), false) + } + + sig := m.Type().(*types.Signature) + p.paramList(types.NewTuple(sig.Recv()), false) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + p.int(0) // dummy value for go:nointerface pragma - ignored by importer + } + + if trace && methods != nil { + p.tracef("<\n} ") + } +} + +type methodsByName []*types.Func + +func (x methodsByName) Len() int { return len(x) } +func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() } + +func (p *exporter) fieldList(t *types.Struct) { + if trace && t.NumFields() > 0 { + p.tracef("fields {>\n") + defer p.tracef("<\n} ") + } + + p.int(t.NumFields()) + for i := 0; i < t.NumFields(); i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.field(t.Field(i)) + p.string(t.Tag(i)) + } +} + +func (p *exporter) field(f *types.Var) { + if !f.IsField() { + panic(internalError("field expected")) + } + + p.pos(f) + p.fieldName(f) + p.typ(f.Type()) +} + +func (p *exporter) iface(t *types.Interface) { + // TODO(gri): enable importer to load embedded interfaces, + // then emit Embeddeds and ExplicitMethods separately here. + p.int(0) + + n := t.NumMethods() + if trace && n > 0 { + p.tracef("methods {>\n") + defer p.tracef("<\n} ") + } + p.int(n) + for i := 0; i < n; i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.method(t.Method(i)) + } +} + +func (p *exporter) method(m *types.Func) { + sig := m.Type().(*types.Signature) + if sig.Recv() == nil { + panic(internalError("method expected")) + } + + p.pos(m) + p.string(m.Name()) + if m.Name() != "_" && !ast.IsExported(m.Name()) { + p.pkg(m.Pkg(), false) + } + + // interface method; no need to encode receiver. + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) +} + +func (p *exporter) fieldName(f *types.Var) { + name := f.Name() + + if f.Anonymous() { + // anonymous field - we distinguish between 3 cases: + // 1) field name matches base type name and is exported + // 2) field name matches base type name and is not exported + // 3) field name doesn't match base type name (alias name) + bname := basetypeName(f.Type()) + if name == bname { + if ast.IsExported(name) { + name = "" // 1) we don't need to know the field name or package + } else { + name = "?" // 2) use unexported name "?" to force package export + } + } else { + // 3) indicate alias and export name as is + // (this requires an extra "@" but this is a rare case) + p.string("@") + } + } + + p.string(name) + if name != "" && !ast.IsExported(name) { + p.pkg(f.Pkg(), false) + } +} + +func basetypeName(typ types.Type) string { + switch typ := deref(typ).(type) { + case *types.Basic: + return typ.Name() + case *types.Named: + return typ.Obj().Name() + default: + return "" // unnamed type + } +} + +func (p *exporter) paramList(params *types.Tuple, variadic bool) { + // use negative length to indicate unnamed parameters + // (look at the first parameter only since either all + // names are present or all are absent) + n := params.Len() + if n > 0 && params.At(0).Name() == "" { + n = -n + } + p.int(n) + for i := 0; i < params.Len(); i++ { + q := params.At(i) + t := q.Type() + if variadic && i == params.Len()-1 { + t = &dddSlice{t.(*types.Slice).Elem()} + } + p.typ(t) + if n > 0 { + name := q.Name() + p.string(name) + if name != "_" { + p.pkg(q.Pkg(), false) + } + } + p.string("") // no compiler-specific info + } +} + +func (p *exporter) value(x constant.Value) { + if trace { + p.tracef("= ") + } + + switch x.Kind() { + case constant.Bool: + tag := falseTag + if constant.BoolVal(x) { + tag = trueTag + } + p.tag(tag) + + case constant.Int: + if v, exact := constant.Int64Val(x); exact { + // common case: x fits into an int64 - use compact encoding + p.tag(int64Tag) + p.int64(v) + return + } + // uncommon case: large x - use float encoding + // (powers of 2 will be encoded efficiently with exponent) + p.tag(floatTag) + p.float(constant.ToFloat(x)) + + case constant.Float: + p.tag(floatTag) + p.float(x) + + case constant.Complex: + p.tag(complexTag) + p.float(constant.Real(x)) + p.float(constant.Imag(x)) + + case constant.String: + p.tag(stringTag) + p.string(constant.StringVal(x)) + + case constant.Unknown: + // package contains type errors + p.tag(unknownTag) + + default: + panic(internalErrorf("unexpected value %v (%T)", x, x)) + } +} + +func (p *exporter) float(x constant.Value) { + if x.Kind() != constant.Float { + panic(internalErrorf("unexpected constant %v, want float", x)) + } + // extract sign (there is no -0) + sign := constant.Sign(x) + if sign == 0 { + // x == 0 + p.int(0) + return + } + // x != 0 + + var f big.Float + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + r := valueToRat(num) + f.SetRat(r.Quo(r, valueToRat(denom))) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + f.SetFloat64(math.MaxFloat64) // FIXME + } + + // extract exponent such that 0.5 <= m < 1.0 + var m big.Float + exp := f.MantExp(&m) + + // extract mantissa as *big.Int + // - set exponent large enough so mant satisfies mant.IsInt() + // - get *big.Int from mant + m.SetMantExp(&m, int(m.MinPrec())) + mant, acc := m.Int(nil) + if acc != big.Exact { + panic(internalError("internal error")) + } + + p.int(sign) + p.int(exp) + p.string(string(mant.Bytes())) +} + +func valueToRat(x constant.Value) *big.Rat { + // Convert little-endian to big-endian. + // I can't believe this is necessary. + bytes := constant.Bytes(x) + for i := 0; i < len(bytes)/2; i++ { + bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] + } + return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) +} + +func (p *exporter) bool(b bool) bool { + if trace { + p.tracef("[") + defer p.tracef("= %v] ", b) + } + + x := 0 + if b { + x = 1 + } + p.int(x) + return b +} + +// ---------------------------------------------------------------------------- +// Low-level encoders + +func (p *exporter) index(marker byte, index int) { + if index < 0 { + panic(internalError("invalid index < 0")) + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%c%d ", marker, index) + } + p.rawInt64(int64(index)) +} + +func (p *exporter) tag(tag int) { + if tag >= 0 { + panic(internalError("invalid tag >= 0")) + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%s ", tagString[-tag]) + } + p.rawInt64(int64(tag)) +} + +func (p *exporter) int(x int) { + p.int64(int64(x)) +} + +func (p *exporter) int64(x int64) { + if debugFormat { + p.marker('i') + } + if trace { + p.tracef("%d ", x) + } + p.rawInt64(x) +} + +func (p *exporter) string(s string) { + if debugFormat { + p.marker('s') + } + if trace { + p.tracef("%q ", s) + } + // if we saw the string before, write its index (>= 0) + // (the empty string is mapped to 0) + if i, ok := p.strIndex[s]; ok { + p.rawInt64(int64(i)) + return + } + // otherwise, remember string and write its negative length and bytes + p.strIndex[s] = len(p.strIndex) + p.rawInt64(-int64(len(s))) + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } +} + +// marker emits a marker byte and position information which makes +// it easy for a reader to detect if it is "out of sync". Used for +// debugFormat format only. +func (p *exporter) marker(m byte) { + p.rawByte(m) + // Enable this for help tracking down the location + // of an incorrect marker when running in debugFormat. + if false && trace { + p.tracef("#%d ", p.written) + } + p.rawInt64(int64(p.written)) +} + +// rawInt64 should only be used by low-level encoders. +func (p *exporter) rawInt64(x int64) { + var tmp [binary.MaxVarintLen64]byte + n := binary.PutVarint(tmp[:], x) + for i := 0; i < n; i++ { + p.rawByte(tmp[i]) + } +} + +// rawStringln should only be used to emit the initial version string. +func (p *exporter) rawStringln(s string) { + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } + p.rawByte('\n') +} + +// rawByte is the bottleneck interface to write to p.out. +// rawByte escapes b as follows (any encoding does that +// hides '$'): +// +// '$' => '|' 'S' +// '|' => '|' '|' +// +// Necessary so other tools can find the end of the +// export data by searching for "$$". +// rawByte should only be used by low-level encoders. +func (p *exporter) rawByte(b byte) { + switch b { + case '$': + // write '$' as '|' 'S' + b = 'S' + fallthrough + case '|': + // write '|' as '|' '|' + p.out.WriteByte('|') + p.written++ + } + p.out.WriteByte(b) + p.written++ +} + +// tracef is like fmt.Printf but it rewrites the format string +// to take care of indentation. +func (p *exporter) tracef(format string, args ...interface{}) { + if strings.ContainsAny(format, "<>\n") { + var buf bytes.Buffer + for i := 0; i < len(format); i++ { + // no need to deal with runes + ch := format[i] + switch ch { + case '>': + p.indent++ + continue + case '<': + p.indent-- + continue + } + buf.WriteByte(ch) + if ch == '\n' { + for j := p.indent; j > 0; j-- { + buf.WriteString(". ") + } + } + } + format = buf.String() + } + fmt.Printf(format, args...) +} + +// Debugging support. +// (tagString is only used when tracing is enabled) +var tagString = [...]string{ + // Packages + -packageTag: "package", + + // Types + -namedTag: "named type", + -arrayTag: "array", + -sliceTag: "slice", + -dddTag: "ddd", + -structTag: "struct", + -pointerTag: "pointer", + -signatureTag: "signature", + -interfaceTag: "interface", + -mapTag: "map", + -chanTag: "chan", + + // Values + -falseTag: "false", + -trueTag: "true", + -int64Tag: "int64", + -floatTag: "float", + -fractionTag: "fraction", + -complexTag: "complex", + -stringTag: "string", + -unknownTag: "unknown", + + // Type aliases + -aliasTag: "alias", +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go new file mode 100644 index 000000000..e9f73d14a --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go @@ -0,0 +1,1039 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go. + +package gcimporter + +import ( + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "go/types" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +type importer struct { + imports map[string]*types.Package + data []byte + importpath string + buf []byte // for reading strings + version int // export format version + + // object lists + strList []string // in order of appearance + pathList []string // in order of appearance + pkgList []*types.Package // in order of appearance + typList []types.Type // in order of appearance + interfaceList []*types.Interface // for delayed completion only + trackAllTypes bool + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + fake fakeFileSet + + // debugging support + debugFormat bool + read int // bytes read +} + +// BImportData imports a package from the serialized package data +// and returns the number of bytes consumed and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { + // catch panics and return them as errors + const currentVersion = 6 + version := -1 // unknown version + defer func() { + if e := recover(); e != nil { + // Return a (possibly nil or incomplete) package unchanged (see #16088). + if version > currentVersion { + err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) + } else { + err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + } + } + }() + + p := importer{ + imports: imports, + data: data, + importpath: path, + version: version, + strList: []string{""}, // empty string is mapped to 0 + pathList: []string{""}, // empty string is mapped to 0 + fake: fakeFileSet{ + fset: fset, + files: make(map[string]*token.File), + }, + } + + // read version info + var versionstr string + if b := p.rawByte(); b == 'c' || b == 'd' { + // Go1.7 encoding; first byte encodes low-level + // encoding format (compact vs debug). + // For backward-compatibility only (avoid problems with + // old installed packages). Newly compiled packages use + // the extensible format string. + // TODO(gri) Remove this support eventually; after Go1.8. + if b == 'd' { + p.debugFormat = true + } + p.trackAllTypes = p.rawByte() == 'a' + p.posInfoFormat = p.int() != 0 + versionstr = p.string() + if versionstr == "v1" { + version = 0 + } + } else { + // Go1.8 extensible encoding + // read version string and extract version number (ignore anything after the version number) + versionstr = p.rawStringln(b) + if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" { + if v, err := strconv.Atoi(s[1]); err == nil && v > 0 { + version = v + } + } + } + p.version = version + + // read version specific flags - extend as necessary + switch p.version { + // case currentVersion: + // ... + // fallthrough + case currentVersion, 5, 4, 3, 2, 1: + p.debugFormat = p.rawStringln(p.rawByte()) == "debug" + p.trackAllTypes = p.int() != 0 + p.posInfoFormat = p.int() != 0 + case 0: + // Go1.7 encoding format - nothing to do here + default: + errorf("unknown bexport format version %d (%q)", p.version, versionstr) + } + + // --- generic export data --- + + // populate typList with predeclared "known" types + p.typList = append(p.typList, predeclared()...) + + // read package data + pkg = p.pkg() + + // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go) + objcount := 0 + for { + tag := p.tagOrIndex() + if tag == endTag { + break + } + p.obj(tag) + objcount++ + } + + // self-verification + if count := p.int(); count != objcount { + errorf("got %d objects; want %d", objcount, count) + } + + // ignore compiler-specific import data + + // complete interfaces + // TODO(gri) re-investigate if we still need to do this in a delayed fashion + for _, typ := range p.interfaceList { + typ.Complete() + } + + // record all referenced packages as imports + list := append(([]*types.Package)(nil), p.pkgList[1:]...) + sort.Sort(byPath(list)) + pkg.SetImports(list) + + // package was imported completely and without errors + pkg.MarkComplete() + + return p.read, pkg, nil +} + +func errorf(format string, args ...interface{}) { + panic(fmt.Sprintf(format, args...)) +} + +func (p *importer) pkg() *types.Package { + // if the package was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.pkgList[i] + } + + // otherwise, i is the package tag (< 0) + if i != packageTag { + errorf("unexpected package tag %d version %d", i, p.version) + } + + // read package data + name := p.string() + var path string + if p.version >= 5 { + path = p.path() + } else { + path = p.string() + } + if p.version >= 6 { + p.int() // package height; unused by go/types + } + + // we should never see an empty package name + if name == "" { + errorf("empty package name in import") + } + + // an empty path denotes the package we are currently importing; + // it must be the first package we see + if (path == "") != (len(p.pkgList) == 0) { + errorf("package path %q for pkg index %d", path, len(p.pkgList)) + } + + // if the package was imported before, use that one; otherwise create a new one + if path == "" { + path = p.importpath + } + pkg := p.imports[path] + if pkg == nil { + pkg = types.NewPackage(path, name) + p.imports[path] = pkg + } else if pkg.Name() != name { + errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path) + } + p.pkgList = append(p.pkgList, pkg) + + return pkg +} + +// objTag returns the tag value for each object kind. +func objTag(obj types.Object) int { + switch obj.(type) { + case *types.Const: + return constTag + case *types.TypeName: + return typeTag + case *types.Var: + return varTag + case *types.Func: + return funcTag + default: + errorf("unexpected object: %v (%T)", obj, obj) // panics + panic("unreachable") + } +} + +func sameObj(a, b types.Object) bool { + // Because unnamed types are not canonicalized, we cannot simply compare types for + // (pointer) identity. + // Ideally we'd check equality of constant values as well, but this is good enough. + return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type()) +} + +func (p *importer) declare(obj types.Object) { + pkg := obj.Pkg() + if alt := pkg.Scope().Insert(obj); alt != nil { + // This can only trigger if we import a (non-type) object a second time. + // Excluding type aliases, this cannot happen because 1) we only import a package + // once; and b) we ignore compiler-specific export data which may contain + // functions whose inlined function bodies refer to other functions that + // were already imported. + // However, type aliases require reexporting the original type, so we need + // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go, + // method importer.obj, switch case importing functions). + // TODO(gri) review/update this comment once the gc compiler handles type aliases. + if !sameObj(obj, alt) { + errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt) + } + } +} + +func (p *importer) obj(tag int) { + switch tag { + case constTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + val := p.value() + p.declare(types.NewConst(pos, pkg, name, typ, val)) + + case aliasTag: + // TODO(gri) verify type alias hookup is correct + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + p.declare(types.NewTypeName(pos, pkg, name, typ)) + + case typeTag: + p.typ(nil, nil) + + case varTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + p.declare(types.NewVar(pos, pkg, name, typ)) + + case funcTag: + pos := p.pos() + pkg, name := p.qualifiedName() + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(nil, params, result, isddd) + p.declare(types.NewFunc(pos, pkg, name, sig)) + + default: + errorf("unexpected object tag %d", tag) + } +} + +const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go + +func (p *importer) pos() token.Pos { + if !p.posInfoFormat { + return token.NoPos + } + + file := p.prevFile + line := p.prevLine + delta := p.int() + line += delta + if p.version >= 5 { + if delta == deltaNewFile { + if n := p.int(); n >= 0 { + // file changed + file = p.path() + line = n + } + } + } else { + if delta == 0 { + if n := p.int(); n >= 0 { + // file changed + file = p.prevFile[:n] + p.string() + line = p.int() + } + } + } + p.prevFile = file + p.prevLine = line + + return p.fake.pos(file, line, 0) +} + +// Synthesize a token.Pos +type fakeFileSet struct { + fset *token.FileSet + files map[string]*token.File +} + +func (s *fakeFileSet) pos(file string, line, column int) token.Pos { + // TODO(mdempsky): Make use of column. + + // Since we don't know the set of needed file positions, we + // reserve maxlines positions per file. + const maxlines = 64 * 1024 + f := s.files[file] + if f == nil { + f = s.fset.AddFile(file, -1, maxlines) + s.files[file] = f + // Allocate the fake linebreak indices on first use. + // TODO(adonovan): opt: save ~512KB using a more complex scheme? + fakeLinesOnce.Do(func() { + fakeLines = make([]int, maxlines) + for i := range fakeLines { + fakeLines[i] = i + } + }) + f.SetLines(fakeLines) + } + + if line > maxlines { + line = 1 + } + + // Treat the file as if it contained only newlines + // and column=1: use the line number as the offset. + return f.Pos(line - 1) +} + +var ( + fakeLines []int + fakeLinesOnce sync.Once +) + +func (p *importer) qualifiedName() (pkg *types.Package, name string) { + name = p.string() + pkg = p.pkg() + return +} + +func (p *importer) record(t types.Type) { + p.typList = append(p.typList, t) +} + +// A dddSlice is a types.Type representing ...T parameters. +// It only appears for parameter types and does not escape +// the importer. +type dddSlice struct { + elem types.Type +} + +func (t *dddSlice) Underlying() types.Type { return t } +func (t *dddSlice) String() string { return "..." + t.elem.String() } + +// parent is the package which declared the type; parent == nil means +// the package currently imported. The parent package is needed for +// exported struct fields and interface methods which don't contain +// explicit package information in the export data. +// +// A non-nil tname is used as the "owner" of the result type; i.e., +// the result type is the underlying type of tname. tname is used +// to give interface methods a named receiver type where possible. +func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type { + // if the type was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.typList[i] + } + + // otherwise, i is the type tag (< 0) + switch i { + case namedTag: + // read type object + pos := p.pos() + parent, name := p.qualifiedName() + scope := parent.Scope() + obj := scope.Lookup(name) + + // if the object doesn't exist yet, create and insert it + if obj == nil { + obj = types.NewTypeName(pos, parent, name, nil) + scope.Insert(obj) + } + + if _, ok := obj.(*types.TypeName); !ok { + errorf("pkg = %s, name = %s => %s", parent, name, obj) + } + + // associate new named type with obj if it doesn't exist yet + t0 := types.NewNamed(obj.(*types.TypeName), nil, nil) + + // but record the existing type, if any + tname := obj.Type().(*types.Named) // tname is either t0 or the existing type + p.record(tname) + + // read underlying type + t0.SetUnderlying(p.typ(parent, t0)) + + // interfaces don't have associated methods + if types.IsInterface(t0) { + return tname + } + + // read associated methods + for i := p.int(); i > 0; i-- { + // TODO(gri) replace this with something closer to fieldName + pos := p.pos() + name := p.string() + if !exported(name) { + p.pkg() + } + + recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver? + params, isddd := p.paramList() + result, _ := p.paramList() + p.int() // go:nointerface pragma - discarded + + sig := types.NewSignature(recv.At(0), params, result, isddd) + t0.AddMethod(types.NewFunc(pos, parent, name, sig)) + } + + return tname + + case arrayTag: + t := new(types.Array) + if p.trackAllTypes { + p.record(t) + } + + n := p.int64() + *t = *types.NewArray(p.typ(parent, nil), n) + return t + + case sliceTag: + t := new(types.Slice) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewSlice(p.typ(parent, nil)) + return t + + case dddTag: + t := new(dddSlice) + if p.trackAllTypes { + p.record(t) + } + + t.elem = p.typ(parent, nil) + return t + + case structTag: + t := new(types.Struct) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewStruct(p.fieldList(parent)) + return t + + case pointerTag: + t := new(types.Pointer) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewPointer(p.typ(parent, nil)) + return t + + case signatureTag: + t := new(types.Signature) + if p.trackAllTypes { + p.record(t) + } + + params, isddd := p.paramList() + result, _ := p.paramList() + *t = *types.NewSignature(nil, params, result, isddd) + return t + + case interfaceTag: + // Create a dummy entry in the type list. This is safe because we + // cannot expect the interface type to appear in a cycle, as any + // such cycle must contain a named type which would have been + // first defined earlier. + // TODO(gri) Is this still true now that we have type aliases? + // See issue #23225. + n := len(p.typList) + if p.trackAllTypes { + p.record(nil) + } + + var embeddeds []types.Type + for n := p.int(); n > 0; n-- { + p.pos() + embeddeds = append(embeddeds, p.typ(parent, nil)) + } + + t := newInterface(p.methodList(parent, tname), embeddeds) + p.interfaceList = append(p.interfaceList, t) + if p.trackAllTypes { + p.typList[n] = t + } + return t + + case mapTag: + t := new(types.Map) + if p.trackAllTypes { + p.record(t) + } + + key := p.typ(parent, nil) + val := p.typ(parent, nil) + *t = *types.NewMap(key, val) + return t + + case chanTag: + t := new(types.Chan) + if p.trackAllTypes { + p.record(t) + } + + dir := chanDir(p.int()) + val := p.typ(parent, nil) + *t = *types.NewChan(dir, val) + return t + + default: + errorf("unexpected type tag %d", i) // panics + panic("unreachable") + } +} + +func chanDir(d int) types.ChanDir { + // tag values must match the constants in cmd/compile/internal/gc/go.go + switch d { + case 1 /* Crecv */ : + return types.RecvOnly + case 2 /* Csend */ : + return types.SendOnly + case 3 /* Cboth */ : + return types.SendRecv + default: + errorf("unexpected channel dir %d", d) + return 0 + } +} + +func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) { + if n := p.int(); n > 0 { + fields = make([]*types.Var, n) + tags = make([]string, n) + for i := range fields { + fields[i], tags[i] = p.field(parent) + } + } + return +} + +func (p *importer) field(parent *types.Package) (*types.Var, string) { + pos := p.pos() + pkg, name, alias := p.fieldName(parent) + typ := p.typ(parent, nil) + tag := p.string() + + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + errorf("named base type expected") + } + anonymous = true + } else if alias { + // anonymous field: we have an explicit name because it's an alias + anonymous = true + } + + return types.NewField(pos, pkg, name, typ, anonymous), tag +} + +func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) { + if n := p.int(); n > 0 { + methods = make([]*types.Func, n) + for i := range methods { + methods[i] = p.method(parent, baseType) + } + } + return +} + +func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func { + pos := p.pos() + pkg, name, _ := p.fieldName(parent) + // If we don't have a baseType, use a nil receiver. + // A receiver using the actual interface type (which + // we don't know yet) will be filled in when we call + // types.Interface.Complete. + var recv *types.Var + if baseType != nil { + recv = types.NewVar(token.NoPos, parent, "", baseType) + } + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(recv, params, result, isddd) + return types.NewFunc(pos, pkg, name, sig) +} + +func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) { + name = p.string() + pkg = parent + if pkg == nil { + // use the imported package instead + pkg = p.pkgList[0] + } + if p.version == 0 && name == "_" { + // version 0 didn't export a package for _ fields + return + } + switch name { + case "": + // 1) field name matches base type name and is exported: nothing to do + case "?": + // 2) field name matches base type name and is not exported: need package + name = "" + pkg = p.pkg() + case "@": + // 3) field name doesn't match type name (alias) + name = p.string() + alias = true + fallthrough + default: + if !exported(name) { + pkg = p.pkg() + } + } + return +} + +func (p *importer) paramList() (*types.Tuple, bool) { + n := p.int() + if n == 0 { + return nil, false + } + // negative length indicates unnamed parameters + named := true + if n < 0 { + n = -n + named = false + } + // n > 0 + params := make([]*types.Var, n) + isddd := false + for i := range params { + params[i], isddd = p.param(named) + } + return types.NewTuple(params...), isddd +} + +func (p *importer) param(named bool) (*types.Var, bool) { + t := p.typ(nil, nil) + td, isddd := t.(*dddSlice) + if isddd { + t = types.NewSlice(td.elem) + } + + var pkg *types.Package + var name string + if named { + name = p.string() + if name == "" { + errorf("expected named parameter") + } + if name != "_" { + pkg = p.pkg() + } + if i := strings.Index(name, "·"); i > 0 { + name = name[:i] // cut off gc-specific parameter numbering + } + } + + // read and discard compiler-specific info + p.string() + + return types.NewVar(token.NoPos, pkg, name, t), isddd +} + +func exported(name string) bool { + ch, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(ch) +} + +func (p *importer) value() constant.Value { + switch tag := p.tagOrIndex(); tag { + case falseTag: + return constant.MakeBool(false) + case trueTag: + return constant.MakeBool(true) + case int64Tag: + return constant.MakeInt64(p.int64()) + case floatTag: + return p.float() + case complexTag: + re := p.float() + im := p.float() + return constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + case stringTag: + return constant.MakeString(p.string()) + case unknownTag: + return constant.MakeUnknown() + default: + errorf("unexpected value tag %d", tag) // panics + panic("unreachable") + } +} + +func (p *importer) float() constant.Value { + sign := p.int() + if sign == 0 { + return constant.MakeInt64(0) + } + + exp := p.int() + mant := []byte(p.string()) // big endian + + // remove leading 0's if any + for len(mant) > 0 && mant[0] == 0 { + mant = mant[1:] + } + + // convert to little endian + // TODO(gri) go/constant should have a more direct conversion function + // (e.g., once it supports a big.Float based implementation) + for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 { + mant[i], mant[j] = mant[j], mant[i] + } + + // adjust exponent (constant.MakeFromBytes creates an integer value, + // but mant represents the mantissa bits such that 0.5 <= mant < 1.0) + exp -= len(mant) << 3 + if len(mant) > 0 { + for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 { + exp++ + } + } + + x := constant.MakeFromBytes(mant) + switch { + case exp < 0: + d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) + x = constant.BinaryOp(x, token.QUO, d) + case exp > 0: + x = constant.Shift(x, token.SHL, uint(exp)) + } + + if sign < 0 { + x = constant.UnaryOp(token.SUB, x, 0) + } + return x +} + +// ---------------------------------------------------------------------------- +// Low-level decoders + +func (p *importer) tagOrIndex() int { + if p.debugFormat { + p.marker('t') + } + + return int(p.rawInt64()) +} + +func (p *importer) int() int { + x := p.int64() + if int64(int(x)) != x { + errorf("exported integer too large") + } + return int(x) +} + +func (p *importer) int64() int64 { + if p.debugFormat { + p.marker('i') + } + + return p.rawInt64() +} + +func (p *importer) path() string { + if p.debugFormat { + p.marker('p') + } + // if the path was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.pathList[i] + } + // otherwise, i is the negative path length (< 0) + a := make([]string, -i) + for n := range a { + a[n] = p.string() + } + s := strings.Join(a, "/") + p.pathList = append(p.pathList, s) + return s +} + +func (p *importer) string() string { + if p.debugFormat { + p.marker('s') + } + // if the string was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.strList[i] + } + // otherwise, i is the negative string length (< 0) + if n := int(-i); n <= cap(p.buf) { + p.buf = p.buf[:n] + } else { + p.buf = make([]byte, n) + } + for i := range p.buf { + p.buf[i] = p.rawByte() + } + s := string(p.buf) + p.strList = append(p.strList, s) + return s +} + +func (p *importer) marker(want byte) { + if got := p.rawByte(); got != want { + errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read) + } + + pos := p.read + if n := int(p.rawInt64()); n != pos { + errorf("incorrect position: got %d; want %d", n, pos) + } +} + +// rawInt64 should only be used by low-level decoders. +func (p *importer) rawInt64() int64 { + i, err := binary.ReadVarint(p) + if err != nil { + errorf("read error: %v", err) + } + return i +} + +// rawStringln should only be used to read the initial version string. +func (p *importer) rawStringln(b byte) string { + p.buf = p.buf[:0] + for b != '\n' { + p.buf = append(p.buf, b) + b = p.rawByte() + } + return string(p.buf) +} + +// needed for binary.ReadVarint in rawInt64 +func (p *importer) ReadByte() (byte, error) { + return p.rawByte(), nil +} + +// byte is the bottleneck interface for reading p.data. +// It unescapes '|' 'S' to '$' and '|' '|' to '|'. +// rawByte should only be used by low-level decoders. +func (p *importer) rawByte() byte { + b := p.data[0] + r := 1 + if b == '|' { + b = p.data[1] + r = 2 + switch b { + case 'S': + b = '$' + case '|': + // nothing to do + default: + errorf("unexpected escape sequence in export data") + } + } + p.data = p.data[r:] + p.read += r + return b + +} + +// ---------------------------------------------------------------------------- +// Export format + +// Tags. Must be < 0. +const ( + // Objects + packageTag = -(iota + 1) + constTag + typeTag + varTag + funcTag + endTag + + // Types + namedTag + arrayTag + sliceTag + dddTag + structTag + pointerTag + signatureTag + interfaceTag + mapTag + chanTag + + // Values + falseTag + trueTag + int64Tag + floatTag + fractionTag // not used by gc + complexTag + stringTag + nilTag // only used by gc (appears in exported inlined function bodies) + unknownTag // not used by gc (only appears in packages with errors) + + // Type aliases + aliasTag +) + +var predeclOnce sync.Once +var predecl []types.Type // initialized lazily + +func predeclared() []types.Type { + predeclOnce.Do(func() { + // initialize lazily to be sure that all + // elements have been initialized before + predecl = []types.Type{ // basic types + types.Typ[types.Bool], + types.Typ[types.Int], + types.Typ[types.Int8], + types.Typ[types.Int16], + types.Typ[types.Int32], + types.Typ[types.Int64], + types.Typ[types.Uint], + types.Typ[types.Uint8], + types.Typ[types.Uint16], + types.Typ[types.Uint32], + types.Typ[types.Uint64], + types.Typ[types.Uintptr], + types.Typ[types.Float32], + types.Typ[types.Float64], + types.Typ[types.Complex64], + types.Typ[types.Complex128], + types.Typ[types.String], + + // basic type aliases + types.Universe.Lookup("byte").Type(), + types.Universe.Lookup("rune").Type(), + + // error + types.Universe.Lookup("error").Type(), + + // untyped types + types.Typ[types.UntypedBool], + types.Typ[types.UntypedInt], + types.Typ[types.UntypedRune], + types.Typ[types.UntypedFloat], + types.Typ[types.UntypedComplex], + types.Typ[types.UntypedString], + types.Typ[types.UntypedNil], + + // package unsafe + types.Typ[types.UnsafePointer], + + // invalid type + types.Typ[types.Invalid], // only appears in packages with errors + + // used internally by gc; never used by this package or in .a files + anyType{}, + } + }) + return predecl +} + +type anyType struct{} + +func (t anyType) Underlying() types.Type { return t } +func (t anyType) String() string { return "any" } diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go new file mode 100644 index 000000000..f33dc5613 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go @@ -0,0 +1,93 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go. + +// This file implements FindExportData. + +package gcimporter + +import ( + "bufio" + "fmt" + "io" + "strconv" + "strings" +) + +func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { + // See $GOROOT/include/ar.h. + hdr := make([]byte, 16+12+6+6+8+10+2) + _, err = io.ReadFull(r, hdr) + if err != nil { + return + } + // leave for debugging + if false { + fmt.Printf("header: %s", hdr) + } + s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) + size, err = strconv.Atoi(s) + if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { + err = fmt.Errorf("invalid archive header") + return + } + name = strings.TrimSpace(string(hdr[:16])) + return +} + +// FindExportData positions the reader r at the beginning of the +// export data section of an underlying GC-created object/archive +// file by reading from it. The reader must be positioned at the +// start of the file before calling this function. The hdr result +// is the string before the export data, either "$$" or "$$B". +// +func FindExportData(r *bufio.Reader) (hdr string, err error) { + // Read first line to make sure this is an object file. + line, err := r.ReadSlice('\n') + if err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + + if string(line) == "!\n" { + // Archive file. Scan to __.PKGDEF. + var name string + if name, _, err = readGopackHeader(r); err != nil { + return + } + + // First entry should be __.PKGDEF. + if name != "__.PKGDEF" { + err = fmt.Errorf("go archive is missing __.PKGDEF") + return + } + + // Read first line of __.PKGDEF data, so that line + // is once again the first line of the input. + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + + // Now at __.PKGDEF in archive or still at beginning of file. + // Either way, line should begin with "go object ". + if !strings.HasPrefix(string(line), "go object ") { + err = fmt.Errorf("not a Go object file") + return + } + + // Skip over object header to export data. + // Begins after first line starting with $$. + for line[0] != '$' { + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + hdr = string(line) + + return +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go new file mode 100644 index 000000000..e8cba6b23 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go @@ -0,0 +1,1078 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a modified copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go, +// but it also contains the original source-based importer code for Go1.6. +// Once we stop supporting 1.6, we can remove that code. + +// Package gcimporter provides various functions for reading +// gc-generated object files that can be used to implement the +// Importer interface defined by the Go 1.5 standard library package. +package gcimporter // import "golang.org/x/tools/go/internal/gcimporter" + +import ( + "bufio" + "errors" + "fmt" + "go/build" + "go/constant" + "go/token" + "go/types" + "io" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "text/scanner" +) + +// debugging/development support +const debug = false + +var pkgExts = [...]string{".a", ".o"} + +// FindPkg returns the filename and unique package id for an import +// path based on package information provided by build.Import (using +// the build.Default build.Context). A relative srcDir is interpreted +// relative to the current working directory. +// If no file was found, an empty filename is returned. +// +func FindPkg(path, srcDir string) (filename, id string) { + if path == "" { + return + } + + var noext string + switch { + default: + // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x" + // Don't require the source files to be present. + if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282 + srcDir = abs + } + bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary) + if bp.PkgObj == "" { + id = path // make sure we have an id to print in error message + return + } + noext = strings.TrimSuffix(bp.PkgObj, ".a") + id = bp.ImportPath + + case build.IsLocalImport(path): + // "./x" -> "/this/directory/x.ext", "/this/directory/x" + noext = filepath.Join(srcDir, path) + id = noext + + case filepath.IsAbs(path): + // for completeness only - go/build.Import + // does not support absolute imports + // "/x" -> "/x.ext", "/x" + noext = path + id = path + } + + if false { // for debugging + if path != id { + fmt.Printf("%s -> %s\n", path, id) + } + } + + // try extensions + for _, ext := range pkgExts { + filename = noext + ext + if f, err := os.Stat(filename); err == nil && !f.IsDir() { + return + } + } + + filename = "" // not found + return +} + +// ImportData imports a package by reading the gc-generated export data, +// adds the corresponding package object to the packages map indexed by id, +// and returns the object. +// +// The packages map must contains all packages already imported. The data +// reader position must be the beginning of the export data section. The +// filename is only used in error messages. +// +// If packages[id] contains the completely imported package, that package +// can be used directly, and there is no need to call this function (but +// there is also no harm but for extra time used). +// +func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) { + // support for parser error handling + defer func() { + switch r := recover().(type) { + case nil: + // nothing to do + case importError: + err = r + default: + panic(r) // internal error + } + }() + + var p parser + p.init(filename, id, data, packages) + pkg = p.parseExport() + + return +} + +// Import imports a gc-generated package given its import path and srcDir, adds +// the corresponding package object to the packages map, and returns the object. +// The packages map must contain all packages already imported. +// +func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) { + var rc io.ReadCloser + var filename, id string + if lookup != nil { + // With custom lookup specified, assume that caller has + // converted path to a canonical import path for use in the map. + if path == "unsafe" { + return types.Unsafe, nil + } + id = path + + // No need to re-import if the package was imported completely before. + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + f, err := lookup(path) + if err != nil { + return nil, err + } + rc = f + } else { + filename, id = FindPkg(path, srcDir) + if filename == "" { + if path == "unsafe" { + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %q", id) + } + + // no need to re-import if the package was imported completely before + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + if err != nil { + // add file name to error + err = fmt.Errorf("%s: %v", filename, err) + } + }() + rc = f + } + defer rc.Close() + + var hdr string + buf := bufio.NewReader(rc) + if hdr, err = FindExportData(buf); err != nil { + return + } + + switch hdr { + case "$$\n": + // Work-around if we don't have a filename; happens only if lookup != nil. + // Either way, the filename is only needed for importer error messages, so + // this is fine. + if filename == "" { + filename = path + } + return ImportData(packages, filename, id, buf) + + case "$$B\n": + var data []byte + data, err = ioutil.ReadAll(buf) + if err != nil { + break + } + + // TODO(gri): allow clients of go/importer to provide a FileSet. + // Or, define a new standard go/types/gcexportdata package. + fset := token.NewFileSet() + + // The indexed export format starts with an 'i'; the older + // binary export format starts with a 'c', 'd', or 'v' + // (from "version"). Select appropriate importer. + if len(data) > 0 && data[0] == 'i' { + _, pkg, err = IImportData(fset, packages, data[1:], id) + } else { + _, pkg, err = BImportData(fset, packages, data, id) + } + + default: + err = fmt.Errorf("unknown export data header: %q", hdr) + } + + return +} + +// ---------------------------------------------------------------------------- +// Parser + +// TODO(gri) Imported objects don't have position information. +// Ideally use the debug table line info; alternatively +// create some fake position (or the position of the +// import). That way error messages referring to imported +// objects can print meaningful information. + +// parser parses the exports inside a gc compiler-produced +// object/archive file and populates its scope with the results. +type parser struct { + scanner scanner.Scanner + tok rune // current token + lit string // literal string; only valid for Ident, Int, String tokens + id string // package id of imported package + sharedPkgs map[string]*types.Package // package id -> package object (across importer) + localPkgs map[string]*types.Package // package id -> package object (just this package) +} + +func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) { + p.scanner.Init(src) + p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) } + p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments + p.scanner.Whitespace = 1<<'\t' | 1<<' ' + p.scanner.Filename = filename // for good error messages + p.next() + p.id = id + p.sharedPkgs = packages + if debug { + // check consistency of packages map + for _, pkg := range packages { + if pkg.Name() == "" { + fmt.Printf("no package name for %s\n", pkg.Path()) + } + } + } +} + +func (p *parser) next() { + p.tok = p.scanner.Scan() + switch p.tok { + case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·': + p.lit = p.scanner.TokenText() + default: + p.lit = "" + } + if debug { + fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit) + } +} + +func declTypeName(pkg *types.Package, name string) *types.TypeName { + scope := pkg.Scope() + if obj := scope.Lookup(name); obj != nil { + return obj.(*types.TypeName) + } + obj := types.NewTypeName(token.NoPos, pkg, name, nil) + // a named type may be referred to before the underlying type + // is known - set it up + types.NewNamed(obj, nil, nil) + scope.Insert(obj) + return obj +} + +// ---------------------------------------------------------------------------- +// Error handling + +// Internal errors are boxed as importErrors. +type importError struct { + pos scanner.Position + err error +} + +func (e importError) Error() string { + return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err) +} + +func (p *parser) error(err interface{}) { + if s, ok := err.(string); ok { + err = errors.New(s) + } + // panic with a runtime.Error if err is not an error + panic(importError{p.scanner.Pos(), err.(error)}) +} + +func (p *parser) errorf(format string, args ...interface{}) { + p.error(fmt.Sprintf(format, args...)) +} + +func (p *parser) expect(tok rune) string { + lit := p.lit + if p.tok != tok { + p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit) + } + p.next() + return lit +} + +func (p *parser) expectSpecial(tok string) { + sep := 'x' // not white space + i := 0 + for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' { + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + i++ + } + if i < len(tok) { + p.errorf("expected %q, got %q", tok, tok[0:i]) + } +} + +func (p *parser) expectKeyword(keyword string) { + lit := p.expect(scanner.Ident) + if lit != keyword { + p.errorf("expected keyword %s, got %q", keyword, lit) + } +} + +// ---------------------------------------------------------------------------- +// Qualified and unqualified names + +// PackageId = string_lit . +// +func (p *parser) parsePackageID() string { + id, err := strconv.Unquote(p.expect(scanner.String)) + if err != nil { + p.error(err) + } + // id == "" stands for the imported package id + // (only known at time of package installation) + if id == "" { + id = p.id + } + return id +} + +// PackageName = ident . +// +func (p *parser) parsePackageName() string { + return p.expect(scanner.Ident) +} + +// dotIdentifier = ( ident | '·' ) { ident | int | '·' } . +func (p *parser) parseDotIdent() string { + ident := "" + if p.tok != scanner.Int { + sep := 'x' // not white space + for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' { + ident += p.lit + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + } + } + if ident == "" { + p.expect(scanner.Ident) // use expect() for error handling + } + return ident +} + +// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) . +// +func (p *parser) parseQualifiedName() (id, name string) { + p.expect('@') + id = p.parsePackageID() + p.expect('.') + // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields. + if p.tok == '?' { + p.next() + } else { + name = p.parseDotIdent() + } + return +} + +// getPkg returns the package for a given id. If the package is +// not found, create the package and add it to the p.localPkgs +// and p.sharedPkgs maps. name is the (expected) name of the +// package. If name == "", the package name is expected to be +// set later via an import clause in the export data. +// +// id identifies a package, usually by a canonical package path like +// "encoding/json" but possibly by a non-canonical import path like +// "./json". +// +func (p *parser) getPkg(id, name string) *types.Package { + // package unsafe is not in the packages maps - handle explicitly + if id == "unsafe" { + return types.Unsafe + } + + pkg := p.localPkgs[id] + if pkg == nil { + // first import of id from this package + pkg = p.sharedPkgs[id] + if pkg == nil { + // first import of id by this importer; + // add (possibly unnamed) pkg to shared packages + pkg = types.NewPackage(id, name) + p.sharedPkgs[id] = pkg + } + // add (possibly unnamed) pkg to local packages + if p.localPkgs == nil { + p.localPkgs = make(map[string]*types.Package) + } + p.localPkgs[id] = pkg + } else if name != "" { + // package exists already and we have an expected package name; + // make sure names match or set package name if necessary + if pname := pkg.Name(); pname == "" { + pkg.SetName(name) + } else if pname != name { + p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name) + } + } + return pkg +} + +// parseExportedName is like parseQualifiedName, but +// the package id is resolved to an imported *types.Package. +// +func (p *parser) parseExportedName() (pkg *types.Package, name string) { + id, name := p.parseQualifiedName() + pkg = p.getPkg(id, "") + return +} + +// ---------------------------------------------------------------------------- +// Types + +// BasicType = identifier . +// +func (p *parser) parseBasicType() types.Type { + id := p.expect(scanner.Ident) + obj := types.Universe.Lookup(id) + if obj, ok := obj.(*types.TypeName); ok { + return obj.Type() + } + p.errorf("not a basic type: %s", id) + return nil +} + +// ArrayType = "[" int_lit "]" Type . +// +func (p *parser) parseArrayType(parent *types.Package) types.Type { + // "[" already consumed and lookahead known not to be "]" + lit := p.expect(scanner.Int) + p.expect(']') + elem := p.parseType(parent) + n, err := strconv.ParseInt(lit, 10, 64) + if err != nil { + p.error(err) + } + return types.NewArray(elem, n) +} + +// MapType = "map" "[" Type "]" Type . +// +func (p *parser) parseMapType(parent *types.Package) types.Type { + p.expectKeyword("map") + p.expect('[') + key := p.parseType(parent) + p.expect(']') + elem := p.parseType(parent) + return types.NewMap(key, elem) +} + +// Name = identifier | "?" | QualifiedName . +// +// For unqualified and anonymous names, the returned package is the parent +// package unless parent == nil, in which case the returned package is the +// package being imported. (The parent package is not nil if the name +// is an unqualified struct field or interface method name belonging to a +// type declared in another package.) +// +// For qualified names, the returned package is nil (and not created if +// it doesn't exist yet) unless materializePkg is set (which creates an +// unnamed package with valid package path). In the latter case, a +// subsequent import clause is expected to provide a name for the package. +// +func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) { + pkg = parent + if pkg == nil { + pkg = p.sharedPkgs[p.id] + } + switch p.tok { + case scanner.Ident: + name = p.lit + p.next() + case '?': + // anonymous + p.next() + case '@': + // exported name prefixed with package path + pkg = nil + var id string + id, name = p.parseQualifiedName() + if materializePkg { + pkg = p.getPkg(id, "") + } + default: + p.error("name expected") + } + return +} + +func deref(typ types.Type) types.Type { + if p, _ := typ.(*types.Pointer); p != nil { + return p.Elem() + } + return typ +} + +// Field = Name Type [ string_lit ] . +// +func (p *parser) parseField(parent *types.Package) (*types.Var, string) { + pkg, name := p.parseName(parent, true) + + if name == "_" { + // Blank fields should be package-qualified because they + // are unexported identifiers, but gc does not qualify them. + // Assuming that the ident belongs to the current package + // causes types to change during re-exporting, leading + // to spurious "can't assign A to B" errors from go/types. + // As a workaround, pretend all blank fields belong + // to the same unique dummy package. + const blankpkg = "<_>" + pkg = p.getPkg(blankpkg, blankpkg) + } + + typ := p.parseType(parent) + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + p.errorf("anonymous field expected") + } + anonymous = true + } + tag := "" + if p.tok == scanner.String { + s := p.expect(scanner.String) + var err error + tag, err = strconv.Unquote(s) + if err != nil { + p.errorf("invalid struct tag %s: %s", s, err) + } + } + return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag +} + +// StructType = "struct" "{" [ FieldList ] "}" . +// FieldList = Field { ";" Field } . +// +func (p *parser) parseStructType(parent *types.Package) types.Type { + var fields []*types.Var + var tags []string + + p.expectKeyword("struct") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + fld, tag := p.parseField(parent) + if tag != "" && tags == nil { + tags = make([]string, i) + } + if tags != nil { + tags = append(tags, tag) + } + fields = append(fields, fld) + } + p.expect('}') + + return types.NewStruct(fields, tags) +} + +// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] . +// +func (p *parser) parseParameter() (par *types.Var, isVariadic bool) { + _, name := p.parseName(nil, false) + // remove gc-specific parameter numbering + if i := strings.Index(name, "·"); i >= 0 { + name = name[:i] + } + if p.tok == '.' { + p.expectSpecial("...") + isVariadic = true + } + typ := p.parseType(nil) + if isVariadic { + typ = types.NewSlice(typ) + } + // ignore argument tag (e.g. "noescape") + if p.tok == scanner.String { + p.next() + } + // TODO(gri) should we provide a package? + par = types.NewVar(token.NoPos, nil, name, typ) + return +} + +// Parameters = "(" [ ParameterList ] ")" . +// ParameterList = { Parameter "," } Parameter . +// +func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) { + p.expect('(') + for p.tok != ')' && p.tok != scanner.EOF { + if len(list) > 0 { + p.expect(',') + } + par, variadic := p.parseParameter() + list = append(list, par) + if variadic { + if isVariadic { + p.error("... not on final argument") + } + isVariadic = true + } + } + p.expect(')') + + return +} + +// Signature = Parameters [ Result ] . +// Result = Type | Parameters . +// +func (p *parser) parseSignature(recv *types.Var) *types.Signature { + params, isVariadic := p.parseParameters() + + // optional result type + var results []*types.Var + if p.tok == '(' { + var variadic bool + results, variadic = p.parseParameters() + if variadic { + p.error("... not permitted on result type") + } + } + + return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic) +} + +// InterfaceType = "interface" "{" [ MethodList ] "}" . +// MethodList = Method { ";" Method } . +// Method = Name Signature . +// +// The methods of embedded interfaces are always "inlined" +// by the compiler and thus embedded interfaces are never +// visible in the export data. +// +func (p *parser) parseInterfaceType(parent *types.Package) types.Type { + var methods []*types.Func + + p.expectKeyword("interface") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + pkg, name := p.parseName(parent, true) + sig := p.parseSignature(nil) + methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig)) + } + p.expect('}') + + // Complete requires the type's embedded interfaces to be fully defined, + // but we do not define any + return newInterface(methods, nil).Complete() +} + +// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type . +// +func (p *parser) parseChanType(parent *types.Package) types.Type { + dir := types.SendRecv + if p.tok == scanner.Ident { + p.expectKeyword("chan") + if p.tok == '<' { + p.expectSpecial("<-") + dir = types.SendOnly + } + } else { + p.expectSpecial("<-") + p.expectKeyword("chan") + dir = types.RecvOnly + } + elem := p.parseType(parent) + return types.NewChan(dir, elem) +} + +// Type = +// BasicType | TypeName | ArrayType | SliceType | StructType | +// PointerType | FuncType | InterfaceType | MapType | ChanType | +// "(" Type ")" . +// +// BasicType = ident . +// TypeName = ExportedName . +// SliceType = "[" "]" Type . +// PointerType = "*" Type . +// FuncType = "func" Signature . +// +func (p *parser) parseType(parent *types.Package) types.Type { + switch p.tok { + case scanner.Ident: + switch p.lit { + default: + return p.parseBasicType() + case "struct": + return p.parseStructType(parent) + case "func": + // FuncType + p.next() + return p.parseSignature(nil) + case "interface": + return p.parseInterfaceType(parent) + case "map": + return p.parseMapType(parent) + case "chan": + return p.parseChanType(parent) + } + case '@': + // TypeName + pkg, name := p.parseExportedName() + return declTypeName(pkg, name).Type() + case '[': + p.next() // look ahead + if p.tok == ']' { + // SliceType + p.next() + return types.NewSlice(p.parseType(parent)) + } + return p.parseArrayType(parent) + case '*': + // PointerType + p.next() + return types.NewPointer(p.parseType(parent)) + case '<': + return p.parseChanType(parent) + case '(': + // "(" Type ")" + p.next() + typ := p.parseType(parent) + p.expect(')') + return typ + } + p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit) + return nil +} + +// ---------------------------------------------------------------------------- +// Declarations + +// ImportDecl = "import" PackageName PackageId . +// +func (p *parser) parseImportDecl() { + p.expectKeyword("import") + name := p.parsePackageName() + p.getPkg(p.parsePackageID(), name) +} + +// int_lit = [ "+" | "-" ] { "0" ... "9" } . +// +func (p *parser) parseInt() string { + s := "" + switch p.tok { + case '-': + s = "-" + p.next() + case '+': + p.next() + } + return s + p.expect(scanner.Int) +} + +// number = int_lit [ "p" int_lit ] . +// +func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) { + // mantissa + mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0) + if mant == nil { + panic("invalid mantissa") + } + + if p.lit == "p" { + // exponent (base 2) + p.next() + exp, err := strconv.ParseInt(p.parseInt(), 10, 0) + if err != nil { + p.error(err) + } + if exp < 0 { + denom := constant.MakeInt64(1) + denom = constant.Shift(denom, token.SHL, uint(-exp)) + typ = types.Typ[types.UntypedFloat] + val = constant.BinaryOp(mant, token.QUO, denom) + return + } + if exp > 0 { + mant = constant.Shift(mant, token.SHL, uint(exp)) + } + typ = types.Typ[types.UntypedFloat] + val = mant + return + } + + typ = types.Typ[types.UntypedInt] + val = mant + return +} + +// ConstDecl = "const" ExportedName [ Type ] "=" Literal . +// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit . +// bool_lit = "true" | "false" . +// complex_lit = "(" float_lit "+" float_lit "i" ")" . +// rune_lit = "(" int_lit "+" int_lit ")" . +// string_lit = `"` { unicode_char } `"` . +// +func (p *parser) parseConstDecl() { + p.expectKeyword("const") + pkg, name := p.parseExportedName() + + var typ0 types.Type + if p.tok != '=' { + // constant types are never structured - no need for parent type + typ0 = p.parseType(nil) + } + + p.expect('=') + var typ types.Type + var val constant.Value + switch p.tok { + case scanner.Ident: + // bool_lit + if p.lit != "true" && p.lit != "false" { + p.error("expected true or false") + } + typ = types.Typ[types.UntypedBool] + val = constant.MakeBool(p.lit == "true") + p.next() + + case '-', scanner.Int: + // int_lit + typ, val = p.parseNumber() + + case '(': + // complex_lit or rune_lit + p.next() + if p.tok == scanner.Char { + p.next() + p.expect('+') + typ = types.Typ[types.UntypedRune] + _, val = p.parseNumber() + p.expect(')') + break + } + _, re := p.parseNumber() + p.expect('+') + _, im := p.parseNumber() + p.expectKeyword("i") + p.expect(')') + typ = types.Typ[types.UntypedComplex] + val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + + case scanner.Char: + // rune_lit + typ = types.Typ[types.UntypedRune] + val = constant.MakeFromLiteral(p.lit, token.CHAR, 0) + p.next() + + case scanner.String: + // string_lit + typ = types.Typ[types.UntypedString] + val = constant.MakeFromLiteral(p.lit, token.STRING, 0) + p.next() + + default: + p.errorf("expected literal got %s", scanner.TokenString(p.tok)) + } + + if typ0 == nil { + typ0 = typ + } + + pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val)) +} + +// TypeDecl = "type" ExportedName Type . +// +func (p *parser) parseTypeDecl() { + p.expectKeyword("type") + pkg, name := p.parseExportedName() + obj := declTypeName(pkg, name) + + // The type object may have been imported before and thus already + // have a type associated with it. We still need to parse the type + // structure, but throw it away if the object already has a type. + // This ensures that all imports refer to the same type object for + // a given type declaration. + typ := p.parseType(pkg) + + if name := obj.Type().(*types.Named); name.Underlying() == nil { + name.SetUnderlying(typ) + } +} + +// VarDecl = "var" ExportedName Type . +// +func (p *parser) parseVarDecl() { + p.expectKeyword("var") + pkg, name := p.parseExportedName() + typ := p.parseType(pkg) + pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ)) +} + +// Func = Signature [ Body ] . +// Body = "{" ... "}" . +// +func (p *parser) parseFunc(recv *types.Var) *types.Signature { + sig := p.parseSignature(recv) + if p.tok == '{' { + p.next() + for i := 1; i > 0; p.next() { + switch p.tok { + case '{': + i++ + case '}': + i-- + } + } + } + return sig +} + +// MethodDecl = "func" Receiver Name Func . +// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" . +// +func (p *parser) parseMethodDecl() { + // "func" already consumed + p.expect('(') + recv, _ := p.parseParameter() // receiver + p.expect(')') + + // determine receiver base type object + base := deref(recv.Type()).(*types.Named) + + // parse method name, signature, and possibly inlined body + _, name := p.parseName(nil, false) + sig := p.parseFunc(recv) + + // methods always belong to the same package as the base type object + pkg := base.Obj().Pkg() + + // add method to type unless type was imported before + // and method exists already + // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small. + base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig)) +} + +// FuncDecl = "func" ExportedName Func . +// +func (p *parser) parseFuncDecl() { + // "func" already consumed + pkg, name := p.parseExportedName() + typ := p.parseFunc(nil) + pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ)) +} + +// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" . +// +func (p *parser) parseDecl() { + if p.tok == scanner.Ident { + switch p.lit { + case "import": + p.parseImportDecl() + case "const": + p.parseConstDecl() + case "type": + p.parseTypeDecl() + case "var": + p.parseVarDecl() + case "func": + p.next() // look ahead + if p.tok == '(' { + p.parseMethodDecl() + } else { + p.parseFuncDecl() + } + } + } + p.expect('\n') +} + +// ---------------------------------------------------------------------------- +// Export + +// Export = "PackageClause { Decl } "$$" . +// PackageClause = "package" PackageName [ "safe" ] "\n" . +// +func (p *parser) parseExport() *types.Package { + p.expectKeyword("package") + name := p.parsePackageName() + if p.tok == scanner.Ident && p.lit == "safe" { + // package was compiled with -u option - ignore + p.next() + } + p.expect('\n') + + pkg := p.getPkg(p.id, name) + + for p.tok != '$' && p.tok != scanner.EOF { + p.parseDecl() + } + + if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' { + // don't call next()/expect() since reading past the + // export data may cause scanner errors (e.g. NUL chars) + p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch) + } + + if n := p.scanner.ErrorCount; n != 0 { + p.errorf("expected no scanner errors, got %d", n) + } + + // Record all locally referenced packages as imports. + var imports []*types.Package + for id, pkg2 := range p.localPkgs { + if pkg2.Name() == "" { + p.errorf("%s package has no name", id) + } + if id == p.id { + continue // avoid self-edge + } + imports = append(imports, pkg2) + } + sort.Sort(byPath(imports)) + pkg.SetImports(imports) + + // package was imported completely and without errors + pkg.MarkComplete() + + return pkg +} + +type byPath []*types.Package + +func (a byPath) Len() int { return len(a) } +func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() } diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go new file mode 100644 index 000000000..d2fc8b6fa --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go @@ -0,0 +1,781 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Indexed binary package export. +// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go; +// see that file for specification of the format. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "go/ast" + "go/constant" + "go/token" + "go/types" + "io" + "math/big" + "reflect" + "sort" +) + +// Current indexed export format version. Increase with each format change. +// 0: Go1.11 encoding +const iexportVersion = 0 + +// Current bundled export format version. Increase with each format change. +// 0: initial implementation +const bundleVersion = 0 + +// IExportData writes indexed export data for pkg to out. +// +// If no file set is provided, position info will be missing. +// The package path of the top-level package will not be recorded, +// so that calls to IImportData can override with a provided package path. +func IExportData(out io.Writer, fset *token.FileSet, pkg *types.Package) error { + return iexportCommon(out, fset, false, []*types.Package{pkg}) +} + +// IExportBundle writes an indexed export bundle for pkgs to out. +func IExportBundle(out io.Writer, fset *token.FileSet, pkgs []*types.Package) error { + return iexportCommon(out, fset, true, pkgs) +} + +func iexportCommon(out io.Writer, fset *token.FileSet, bundle bool, pkgs []*types.Package) (err error) { + defer func() { + if e := recover(); e != nil { + if ierr, ok := e.(internalError); ok { + err = ierr + return + } + // Not an internal error; panic again. + panic(e) + } + }() + + p := iexporter{ + fset: fset, + allPkgs: map[*types.Package]bool{}, + stringIndex: map[string]uint64{}, + declIndex: map[types.Object]uint64{}, + typIndex: map[types.Type]uint64{}, + } + if !bundle { + p.localpkg = pkgs[0] + } + + for i, pt := range predeclared() { + p.typIndex[pt] = uint64(i) + } + if len(p.typIndex) > predeclReserved { + panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)) + } + + // Initialize work queue with exported declarations. + for _, pkg := range pkgs { + scope := pkg.Scope() + for _, name := range scope.Names() { + if ast.IsExported(name) { + p.pushDecl(scope.Lookup(name)) + } + } + + if bundle { + // Ensure pkg and its imports are included in the index. + p.allPkgs[pkg] = true + for _, imp := range pkg.Imports() { + p.allPkgs[imp] = true + } + } + } + + // Loop until no more work. + for !p.declTodo.empty() { + p.doDecl(p.declTodo.popHead()) + } + + // Append indices to data0 section. + dataLen := uint64(p.data0.Len()) + w := p.newWriter() + w.writeIndex(p.declIndex) + + if bundle { + w.uint64(uint64(len(pkgs))) + for _, pkg := range pkgs { + w.pkg(pkg) + imps := pkg.Imports() + w.uint64(uint64(len(imps))) + for _, imp := range imps { + w.pkg(imp) + } + } + } + w.flush() + + // Assemble header. + var hdr intWriter + if bundle { + hdr.uint64(bundleVersion) + } + hdr.uint64(iexportVersion) + hdr.uint64(uint64(p.strings.Len())) + hdr.uint64(dataLen) + + // Flush output. + io.Copy(out, &hdr) + io.Copy(out, &p.strings) + io.Copy(out, &p.data0) + + return nil +} + +// writeIndex writes out an object index. mainIndex indicates whether +// we're writing out the main index, which is also read by +// non-compiler tools and includes a complete package description +// (i.e., name and height). +func (w *exportWriter) writeIndex(index map[types.Object]uint64) { + // Build a map from packages to objects from that package. + pkgObjs := map[*types.Package][]types.Object{} + + // For the main index, make sure to include every package that + // we reference, even if we're not exporting (or reexporting) + // any symbols from it. + if w.p.localpkg != nil { + pkgObjs[w.p.localpkg] = nil + } + for pkg := range w.p.allPkgs { + pkgObjs[pkg] = nil + } + + for obj := range index { + pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj) + } + + var pkgs []*types.Package + for pkg, objs := range pkgObjs { + pkgs = append(pkgs, pkg) + + sort.Slice(objs, func(i, j int) bool { + return objs[i].Name() < objs[j].Name() + }) + } + + sort.Slice(pkgs, func(i, j int) bool { + return w.exportPath(pkgs[i]) < w.exportPath(pkgs[j]) + }) + + w.uint64(uint64(len(pkgs))) + for _, pkg := range pkgs { + w.string(w.exportPath(pkg)) + w.string(pkg.Name()) + w.uint64(uint64(0)) // package height is not needed for go/types + + objs := pkgObjs[pkg] + w.uint64(uint64(len(objs))) + for _, obj := range objs { + w.string(obj.Name()) + w.uint64(index[obj]) + } + } +} + +type iexporter struct { + fset *token.FileSet + out *bytes.Buffer + + localpkg *types.Package + + // allPkgs tracks all packages that have been referenced by + // the export data, so we can ensure to include them in the + // main index. + allPkgs map[*types.Package]bool + + declTodo objQueue + + strings intWriter + stringIndex map[string]uint64 + + data0 intWriter + declIndex map[types.Object]uint64 + typIndex map[types.Type]uint64 +} + +// stringOff returns the offset of s within the string section. +// If not already present, it's added to the end. +func (p *iexporter) stringOff(s string) uint64 { + off, ok := p.stringIndex[s] + if !ok { + off = uint64(p.strings.Len()) + p.stringIndex[s] = off + + p.strings.uint64(uint64(len(s))) + p.strings.WriteString(s) + } + return off +} + +// pushDecl adds n to the declaration work queue, if not already present. +func (p *iexporter) pushDecl(obj types.Object) { + // Package unsafe is known to the compiler and predeclared. + assert(obj.Pkg() != types.Unsafe) + + if _, ok := p.declIndex[obj]; ok { + return + } + + p.declIndex[obj] = ^uint64(0) // mark n present in work queue + p.declTodo.pushTail(obj) +} + +// exportWriter handles writing out individual data section chunks. +type exportWriter struct { + p *iexporter + + data intWriter + currPkg *types.Package + prevFile string + prevLine int64 +} + +func (w *exportWriter) exportPath(pkg *types.Package) string { + if pkg == w.p.localpkg { + return "" + } + return pkg.Path() +} + +func (p *iexporter) doDecl(obj types.Object) { + w := p.newWriter() + w.setPkg(obj.Pkg(), false) + + switch obj := obj.(type) { + case *types.Var: + w.tag('V') + w.pos(obj.Pos()) + w.typ(obj.Type(), obj.Pkg()) + + case *types.Func: + sig, _ := obj.Type().(*types.Signature) + if sig.Recv() != nil { + panic(internalErrorf("unexpected method: %v", sig)) + } + w.tag('F') + w.pos(obj.Pos()) + w.signature(sig) + + case *types.Const: + w.tag('C') + w.pos(obj.Pos()) + w.value(obj.Type(), obj.Val()) + + case *types.TypeName: + if obj.IsAlias() { + w.tag('A') + w.pos(obj.Pos()) + w.typ(obj.Type(), obj.Pkg()) + break + } + + // Defined type. + w.tag('T') + w.pos(obj.Pos()) + + underlying := obj.Type().Underlying() + w.typ(underlying, obj.Pkg()) + + t := obj.Type() + if types.IsInterface(t) { + break + } + + named, ok := t.(*types.Named) + if !ok { + panic(internalErrorf("%s is not a defined type", t)) + } + + n := named.NumMethods() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + m := named.Method(i) + w.pos(m.Pos()) + w.string(m.Name()) + sig, _ := m.Type().(*types.Signature) + w.param(sig.Recv()) + w.signature(sig) + } + + default: + panic(internalErrorf("unexpected object: %v", obj)) + } + + p.declIndex[obj] = w.flush() +} + +func (w *exportWriter) tag(tag byte) { + w.data.WriteByte(tag) +} + +func (w *exportWriter) pos(pos token.Pos) { + if w.p.fset == nil { + w.int64(0) + return + } + + p := w.p.fset.Position(pos) + file := p.Filename + line := int64(p.Line) + + // When file is the same as the last position (common case), + // we can save a few bytes by delta encoding just the line + // number. + // + // Note: Because data objects may be read out of order (or not + // at all), we can only apply delta encoding within a single + // object. This is handled implicitly by tracking prevFile and + // prevLine as fields of exportWriter. + + if file == w.prevFile { + delta := line - w.prevLine + w.int64(delta) + if delta == deltaNewFile { + w.int64(-1) + } + } else { + w.int64(deltaNewFile) + w.int64(line) // line >= 0 + w.string(file) + w.prevFile = file + } + w.prevLine = line +} + +func (w *exportWriter) pkg(pkg *types.Package) { + // Ensure any referenced packages are declared in the main index. + w.p.allPkgs[pkg] = true + + w.string(w.exportPath(pkg)) +} + +func (w *exportWriter) qualifiedIdent(obj types.Object) { + // Ensure any referenced declarations are written out too. + w.p.pushDecl(obj) + + w.string(obj.Name()) + w.pkg(obj.Pkg()) +} + +func (w *exportWriter) typ(t types.Type, pkg *types.Package) { + w.data.uint64(w.p.typOff(t, pkg)) +} + +func (p *iexporter) newWriter() *exportWriter { + return &exportWriter{p: p} +} + +func (w *exportWriter) flush() uint64 { + off := uint64(w.p.data0.Len()) + io.Copy(&w.p.data0, &w.data) + return off +} + +func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 { + off, ok := p.typIndex[t] + if !ok { + w := p.newWriter() + w.doTyp(t, pkg) + off = predeclReserved + w.flush() + p.typIndex[t] = off + } + return off +} + +func (w *exportWriter) startType(k itag) { + w.data.uint64(uint64(k)) +} + +func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { + switch t := t.(type) { + case *types.Named: + w.startType(definedType) + w.qualifiedIdent(t.Obj()) + + case *types.Pointer: + w.startType(pointerType) + w.typ(t.Elem(), pkg) + + case *types.Slice: + w.startType(sliceType) + w.typ(t.Elem(), pkg) + + case *types.Array: + w.startType(arrayType) + w.uint64(uint64(t.Len())) + w.typ(t.Elem(), pkg) + + case *types.Chan: + w.startType(chanType) + // 1 RecvOnly; 2 SendOnly; 3 SendRecv + var dir uint64 + switch t.Dir() { + case types.RecvOnly: + dir = 1 + case types.SendOnly: + dir = 2 + case types.SendRecv: + dir = 3 + } + w.uint64(dir) + w.typ(t.Elem(), pkg) + + case *types.Map: + w.startType(mapType) + w.typ(t.Key(), pkg) + w.typ(t.Elem(), pkg) + + case *types.Signature: + w.startType(signatureType) + w.setPkg(pkg, true) + w.signature(t) + + case *types.Struct: + w.startType(structType) + w.setPkg(pkg, true) + + n := t.NumFields() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + f := t.Field(i) + w.pos(f.Pos()) + w.string(f.Name()) + w.typ(f.Type(), pkg) + w.bool(f.Anonymous()) + w.string(t.Tag(i)) // note (or tag) + } + + case *types.Interface: + w.startType(interfaceType) + w.setPkg(pkg, true) + + n := t.NumEmbeddeds() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + f := t.Embedded(i) + w.pos(f.Obj().Pos()) + w.typ(f.Obj().Type(), f.Obj().Pkg()) + } + + n = t.NumExplicitMethods() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + m := t.ExplicitMethod(i) + w.pos(m.Pos()) + w.string(m.Name()) + sig, _ := m.Type().(*types.Signature) + w.signature(sig) + } + + default: + panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t))) + } +} + +func (w *exportWriter) setPkg(pkg *types.Package, write bool) { + if write { + w.pkg(pkg) + } + + w.currPkg = pkg +} + +func (w *exportWriter) signature(sig *types.Signature) { + w.paramList(sig.Params()) + w.paramList(sig.Results()) + if sig.Params().Len() > 0 { + w.bool(sig.Variadic()) + } +} + +func (w *exportWriter) paramList(tup *types.Tuple) { + n := tup.Len() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + w.param(tup.At(i)) + } +} + +func (w *exportWriter) param(obj types.Object) { + w.pos(obj.Pos()) + w.localIdent(obj) + w.typ(obj.Type(), obj.Pkg()) +} + +func (w *exportWriter) value(typ types.Type, v constant.Value) { + w.typ(typ, nil) + + switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { + case types.IsBoolean: + w.bool(constant.BoolVal(v)) + case types.IsInteger: + var i big.Int + if i64, exact := constant.Int64Val(v); exact { + i.SetInt64(i64) + } else if ui64, exact := constant.Uint64Val(v); exact { + i.SetUint64(ui64) + } else { + i.SetString(v.ExactString(), 10) + } + w.mpint(&i, typ) + case types.IsFloat: + f := constantToFloat(v) + w.mpfloat(f, typ) + case types.IsComplex: + w.mpfloat(constantToFloat(constant.Real(v)), typ) + w.mpfloat(constantToFloat(constant.Imag(v)), typ) + case types.IsString: + w.string(constant.StringVal(v)) + default: + if b.Kind() == types.Invalid { + // package contains type errors + break + } + panic(internalErrorf("unexpected type %v (%v)", typ, typ.Underlying())) + } +} + +// constantToFloat converts a constant.Value with kind constant.Float to a +// big.Float. +func constantToFloat(x constant.Value) *big.Float { + x = constant.ToFloat(x) + // Use the same floating-point precision (512) as cmd/compile + // (see Mpprec in cmd/compile/internal/gc/mpfloat.go). + const mpprec = 512 + var f big.Float + f.SetPrec(mpprec) + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + n := valueToRat(num) + d := valueToRat(denom) + f.SetRat(n.Quo(n, d)) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + _, ok := f.SetString(x.ExactString()) + assert(ok) + } + return &f +} + +// mpint exports a multi-precision integer. +// +// For unsigned types, small values are written out as a single +// byte. Larger values are written out as a length-prefixed big-endian +// byte string, where the length prefix is encoded as its complement. +// For example, bytes 0, 1, and 2 directly represent the integer +// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-, +// 2-, and 3-byte big-endian string follow. +// +// Encoding for signed types use the same general approach as for +// unsigned types, except small values use zig-zag encoding and the +// bottom bit of length prefix byte for large values is reserved as a +// sign bit. +// +// The exact boundary between small and large encodings varies +// according to the maximum number of bytes needed to encode a value +// of type typ. As a special case, 8-bit types are always encoded as a +// single byte. +// +// TODO(mdempsky): Is this level of complexity really worthwhile? +func (w *exportWriter) mpint(x *big.Int, typ types.Type) { + basic, ok := typ.Underlying().(*types.Basic) + if !ok { + panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying())) + } + + signed, maxBytes := intSize(basic) + + negative := x.Sign() < 0 + if !signed && negative { + panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x)) + } + + b := x.Bytes() + if len(b) > 0 && b[0] == 0 { + panic(internalErrorf("leading zeros")) + } + if uint(len(b)) > maxBytes { + panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)) + } + + maxSmall := 256 - maxBytes + if signed { + maxSmall = 256 - 2*maxBytes + } + if maxBytes == 1 { + maxSmall = 256 + } + + // Check if x can use small value encoding. + if len(b) <= 1 { + var ux uint + if len(b) == 1 { + ux = uint(b[0]) + } + if signed { + ux <<= 1 + if negative { + ux-- + } + } + if ux < maxSmall { + w.data.WriteByte(byte(ux)) + return + } + } + + n := 256 - uint(len(b)) + if signed { + n = 256 - 2*uint(len(b)) + if negative { + n |= 1 + } + } + if n < maxSmall || n >= 256 { + panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)) + } + + w.data.WriteByte(byte(n)) + w.data.Write(b) +} + +// mpfloat exports a multi-precision floating point number. +// +// The number's value is decomposed into mantissa × 2**exponent, where +// mantissa is an integer. The value is written out as mantissa (as a +// multi-precision integer) and then the exponent, except exponent is +// omitted if mantissa is zero. +func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) { + if f.IsInf() { + panic("infinite constant") + } + + // Break into f = mant × 2**exp, with 0.5 <= mant < 1. + var mant big.Float + exp := int64(f.MantExp(&mant)) + + // Scale so that mant is an integer. + prec := mant.MinPrec() + mant.SetMantExp(&mant, int(prec)) + exp -= int64(prec) + + manti, acc := mant.Int(nil) + if acc != big.Exact { + panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc)) + } + w.mpint(manti, typ) + if manti.Sign() != 0 { + w.int64(exp) + } +} + +func (w *exportWriter) bool(b bool) bool { + var x uint64 + if b { + x = 1 + } + w.uint64(x) + return b +} + +func (w *exportWriter) int64(x int64) { w.data.int64(x) } +func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) } +func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) } + +func (w *exportWriter) localIdent(obj types.Object) { + // Anonymous parameters. + if obj == nil { + w.string("") + return + } + + name := obj.Name() + if name == "_" { + w.string("_") + return + } + + w.string(name) +} + +type intWriter struct { + bytes.Buffer +} + +func (w *intWriter) int64(x int64) { + var buf [binary.MaxVarintLen64]byte + n := binary.PutVarint(buf[:], x) + w.Write(buf[:n]) +} + +func (w *intWriter) uint64(x uint64) { + var buf [binary.MaxVarintLen64]byte + n := binary.PutUvarint(buf[:], x) + w.Write(buf[:n]) +} + +func assert(cond bool) { + if !cond { + panic("internal error: assertion failed") + } +} + +// The below is copied from go/src/cmd/compile/internal/gc/syntax.go. + +// objQueue is a FIFO queue of types.Object. The zero value of objQueue is +// a ready-to-use empty queue. +type objQueue struct { + ring []types.Object + head, tail int +} + +// empty returns true if q contains no Nodes. +func (q *objQueue) empty() bool { + return q.head == q.tail +} + +// pushTail appends n to the tail of the queue. +func (q *objQueue) pushTail(obj types.Object) { + if len(q.ring) == 0 { + q.ring = make([]types.Object, 16) + } else if q.head+len(q.ring) == q.tail { + // Grow the ring. + nring := make([]types.Object, len(q.ring)*2) + // Copy the old elements. + part := q.ring[q.head%len(q.ring):] + if q.tail-q.head <= len(part) { + part = part[:q.tail-q.head] + copy(nring, part) + } else { + pos := copy(nring, part) + copy(nring[pos:], q.ring[:q.tail%len(q.ring)]) + } + q.ring, q.head, q.tail = nring, 0, q.tail-q.head + } + + q.ring[q.tail%len(q.ring)] = obj + q.tail++ +} + +// popHead pops a node from the head of the queue. It panics if q is empty. +func (q *objQueue) popHead() types.Object { + if q.empty() { + panic("dequeue empty") + } + obj := q.ring[q.head%len(q.ring)] + q.head++ + return obj +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go new file mode 100644 index 000000000..8ed8bc62d --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go @@ -0,0 +1,676 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Indexed package import. +// See cmd/compile/internal/gc/iexport.go for the export data format. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "go/types" + "io" + "sort" +) + +type intReader struct { + *bytes.Reader + path string +} + +func (r *intReader) int64() int64 { + i, err := binary.ReadVarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +func (r *intReader) uint64() uint64 { + i, err := binary.ReadUvarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +const predeclReserved = 32 + +type itag uint64 + +const ( + // Types + definedType itag = iota + pointerType + sliceType + arrayType + chanType + mapType + signatureType + structType + interfaceType +) + +// IImportData imports a package from the serialized package data +// and returns 0 and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (int, *types.Package, error) { + pkgs, err := iimportCommon(fset, imports, data, false, path) + if err != nil { + return 0, nil, err + } + return 0, pkgs[0], nil +} + +// IImportBundle imports a set of packages from the serialized package bundle. +func IImportBundle(fset *token.FileSet, imports map[string]*types.Package, data []byte) ([]*types.Package, error) { + return iimportCommon(fset, imports, data, true, "") +} + +func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data []byte, bundle bool, path string) (pkgs []*types.Package, err error) { + const currentVersion = 1 + version := int64(-1) + defer func() { + if e := recover(); e != nil { + if version > currentVersion { + err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) + } else { + err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + } + } + }() + + r := &intReader{bytes.NewReader(data), path} + + if bundle { + bundleVersion := r.uint64() + switch bundleVersion { + case bundleVersion: + default: + errorf("unknown bundle format version %d", bundleVersion) + } + } + + version = int64(r.uint64()) + switch version { + case currentVersion, 0: + default: + errorf("unknown iexport format version %d", version) + } + + sLen := int64(r.uint64()) + dLen := int64(r.uint64()) + + whence, _ := r.Seek(0, io.SeekCurrent) + stringData := data[whence : whence+sLen] + declData := data[whence+sLen : whence+sLen+dLen] + r.Seek(sLen+dLen, io.SeekCurrent) + + p := iimporter{ + ipath: path, + version: int(version), + + stringData: stringData, + stringCache: make(map[uint64]string), + pkgCache: make(map[uint64]*types.Package), + + declData: declData, + pkgIndex: make(map[*types.Package]map[string]uint64), + typCache: make(map[uint64]types.Type), + + fake: fakeFileSet{ + fset: fset, + files: make(map[string]*token.File), + }, + } + + for i, pt := range predeclared() { + p.typCache[uint64(i)] = pt + } + + pkgList := make([]*types.Package, r.uint64()) + for i := range pkgList { + pkgPathOff := r.uint64() + pkgPath := p.stringAt(pkgPathOff) + pkgName := p.stringAt(r.uint64()) + _ = r.uint64() // package height; unused by go/types + + if pkgPath == "" { + pkgPath = path + } + pkg := imports[pkgPath] + if pkg == nil { + pkg = types.NewPackage(pkgPath, pkgName) + imports[pkgPath] = pkg + } else if pkg.Name() != pkgName { + errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path) + } + + p.pkgCache[pkgPathOff] = pkg + + nameIndex := make(map[string]uint64) + for nSyms := r.uint64(); nSyms > 0; nSyms-- { + name := p.stringAt(r.uint64()) + nameIndex[name] = r.uint64() + } + + p.pkgIndex[pkg] = nameIndex + pkgList[i] = pkg + } + + if bundle { + pkgs = make([]*types.Package, r.uint64()) + for i := range pkgs { + pkg := p.pkgAt(r.uint64()) + imps := make([]*types.Package, r.uint64()) + for j := range imps { + imps[j] = p.pkgAt(r.uint64()) + } + pkg.SetImports(imps) + pkgs[i] = pkg + } + } else { + if len(pkgList) == 0 { + errorf("no packages found for %s", path) + panic("unreachable") + } + pkgs = pkgList[:1] + + // record all referenced packages as imports + list := append(([]*types.Package)(nil), pkgList[1:]...) + sort.Sort(byPath(list)) + pkgs[0].SetImports(list) + } + + for _, pkg := range pkgs { + if pkg.Complete() { + continue + } + + names := make([]string, 0, len(p.pkgIndex[pkg])) + for name := range p.pkgIndex[pkg] { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + p.doDecl(pkg, name) + } + + // package was imported completely and without errors + pkg.MarkComplete() + } + + for _, typ := range p.interfaceList { + typ.Complete() + } + + return pkgs, nil +} + +type iimporter struct { + ipath string + version int + + stringData []byte + stringCache map[uint64]string + pkgCache map[uint64]*types.Package + + declData []byte + pkgIndex map[*types.Package]map[string]uint64 + typCache map[uint64]types.Type + + fake fakeFileSet + interfaceList []*types.Interface +} + +func (p *iimporter) doDecl(pkg *types.Package, name string) { + // See if we've already imported this declaration. + if obj := pkg.Scope().Lookup(name); obj != nil { + return + } + + off, ok := p.pkgIndex[pkg][name] + if !ok { + errorf("%v.%v not in index", pkg, name) + } + + r := &importReader{p: p, currPkg: pkg} + r.declReader.Reset(p.declData[off:]) + + r.obj(name) +} + +func (p *iimporter) stringAt(off uint64) string { + if s, ok := p.stringCache[off]; ok { + return s + } + + slen, n := binary.Uvarint(p.stringData[off:]) + if n <= 0 { + errorf("varint failed") + } + spos := off + uint64(n) + s := string(p.stringData[spos : spos+slen]) + p.stringCache[off] = s + return s +} + +func (p *iimporter) pkgAt(off uint64) *types.Package { + if pkg, ok := p.pkgCache[off]; ok { + return pkg + } + path := p.stringAt(off) + errorf("missing package %q in %q", path, p.ipath) + return nil +} + +func (p *iimporter) typAt(off uint64, base *types.Named) types.Type { + if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) { + return t + } + + if off < predeclReserved { + errorf("predeclared type missing from cache: %v", off) + } + + r := &importReader{p: p} + r.declReader.Reset(p.declData[off-predeclReserved:]) + t := r.doType(base) + + if base == nil || !isInterface(t) { + p.typCache[off] = t + } + return t +} + +type importReader struct { + p *iimporter + declReader bytes.Reader + currPkg *types.Package + prevFile string + prevLine int64 + prevColumn int64 +} + +func (r *importReader) obj(name string) { + tag := r.byte() + pos := r.pos() + + switch tag { + case 'A': + typ := r.typ() + + r.declare(types.NewTypeName(pos, r.currPkg, name, typ)) + + case 'C': + typ, val := r.value() + + r.declare(types.NewConst(pos, r.currPkg, name, typ, val)) + + case 'F': + sig := r.signature(nil) + + r.declare(types.NewFunc(pos, r.currPkg, name, sig)) + + case 'T': + // Types can be recursive. We need to setup a stub + // declaration before recursing. + obj := types.NewTypeName(pos, r.currPkg, name, nil) + named := types.NewNamed(obj, nil, nil) + r.declare(obj) + + underlying := r.p.typAt(r.uint64(), named).Underlying() + named.SetUnderlying(underlying) + + if !isInterface(underlying) { + for n := r.uint64(); n > 0; n-- { + mpos := r.pos() + mname := r.ident() + recv := r.param() + msig := r.signature(recv) + + named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig)) + } + } + + case 'V': + typ := r.typ() + + r.declare(types.NewVar(pos, r.currPkg, name, typ)) + + default: + errorf("unexpected tag: %v", tag) + } +} + +func (r *importReader) declare(obj types.Object) { + obj.Pkg().Scope().Insert(obj) +} + +func (r *importReader) value() (typ types.Type, val constant.Value) { + typ = r.typ() + + switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { + case types.IsBoolean: + val = constant.MakeBool(r.bool()) + + case types.IsString: + val = constant.MakeString(r.string()) + + case types.IsInteger: + val = r.mpint(b) + + case types.IsFloat: + val = r.mpfloat(b) + + case types.IsComplex: + re := r.mpfloat(b) + im := r.mpfloat(b) + val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + + default: + if b.Kind() == types.Invalid { + val = constant.MakeUnknown() + return + } + errorf("unexpected type %v", typ) // panics + panic("unreachable") + } + + return +} + +func intSize(b *types.Basic) (signed bool, maxBytes uint) { + if (b.Info() & types.IsUntyped) != 0 { + return true, 64 + } + + switch b.Kind() { + case types.Float32, types.Complex64: + return true, 3 + case types.Float64, types.Complex128: + return true, 7 + } + + signed = (b.Info() & types.IsUnsigned) == 0 + switch b.Kind() { + case types.Int8, types.Uint8: + maxBytes = 1 + case types.Int16, types.Uint16: + maxBytes = 2 + case types.Int32, types.Uint32: + maxBytes = 4 + default: + maxBytes = 8 + } + + return +} + +func (r *importReader) mpint(b *types.Basic) constant.Value { + signed, maxBytes := intSize(b) + + maxSmall := 256 - maxBytes + if signed { + maxSmall = 256 - 2*maxBytes + } + if maxBytes == 1 { + maxSmall = 256 + } + + n, _ := r.declReader.ReadByte() + if uint(n) < maxSmall { + v := int64(n) + if signed { + v >>= 1 + if n&1 != 0 { + v = ^v + } + } + return constant.MakeInt64(v) + } + + v := -n + if signed { + v = -(n &^ 1) >> 1 + } + if v < 1 || uint(v) > maxBytes { + errorf("weird decoding: %v, %v => %v", n, signed, v) + } + + buf := make([]byte, v) + io.ReadFull(&r.declReader, buf) + + // convert to little endian + // TODO(gri) go/constant should have a more direct conversion function + // (e.g., once it supports a big.Float based implementation) + for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 { + buf[i], buf[j] = buf[j], buf[i] + } + + x := constant.MakeFromBytes(buf) + if signed && n&1 != 0 { + x = constant.UnaryOp(token.SUB, x, 0) + } + return x +} + +func (r *importReader) mpfloat(b *types.Basic) constant.Value { + x := r.mpint(b) + if constant.Sign(x) == 0 { + return x + } + + exp := r.int64() + switch { + case exp > 0: + x = constant.Shift(x, token.SHL, uint(exp)) + // Ensure that the imported Kind is Float, else this constant may run into + // bitsize limits on overlarge integers. Eventually we can instead adopt + // the approach of CL 288632, but that CL relies on go/constant APIs that + // were introduced in go1.13. + // + // TODO(rFindley): sync the logic here with tip Go once we no longer + // support go1.12. + x = constant.ToFloat(x) + case exp < 0: + d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) + x = constant.BinaryOp(x, token.QUO, d) + } + return x +} + +func (r *importReader) ident() string { + return r.string() +} + +func (r *importReader) qualifiedIdent() (*types.Package, string) { + name := r.string() + pkg := r.pkg() + return pkg, name +} + +func (r *importReader) pos() token.Pos { + if r.p.version >= 1 { + r.posv1() + } else { + r.posv0() + } + + if r.prevFile == "" && r.prevLine == 0 && r.prevColumn == 0 { + return token.NoPos + } + return r.p.fake.pos(r.prevFile, int(r.prevLine), int(r.prevColumn)) +} + +func (r *importReader) posv0() { + delta := r.int64() + if delta != deltaNewFile { + r.prevLine += delta + } else if l := r.int64(); l == -1 { + r.prevLine += deltaNewFile + } else { + r.prevFile = r.string() + r.prevLine = l + } +} + +func (r *importReader) posv1() { + delta := r.int64() + r.prevColumn += delta >> 1 + if delta&1 != 0 { + delta = r.int64() + r.prevLine += delta >> 1 + if delta&1 != 0 { + r.prevFile = r.string() + } + } +} + +func (r *importReader) typ() types.Type { + return r.p.typAt(r.uint64(), nil) +} + +func isInterface(t types.Type) bool { + _, ok := t.(*types.Interface) + return ok +} + +func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) } +func (r *importReader) string() string { return r.p.stringAt(r.uint64()) } + +func (r *importReader) doType(base *types.Named) types.Type { + switch k := r.kind(); k { + default: + errorf("unexpected kind tag in %q: %v", r.p.ipath, k) + return nil + + case definedType: + pkg, name := r.qualifiedIdent() + r.p.doDecl(pkg, name) + return pkg.Scope().Lookup(name).(*types.TypeName).Type() + case pointerType: + return types.NewPointer(r.typ()) + case sliceType: + return types.NewSlice(r.typ()) + case arrayType: + n := r.uint64() + return types.NewArray(r.typ(), int64(n)) + case chanType: + dir := chanDir(int(r.uint64())) + return types.NewChan(dir, r.typ()) + case mapType: + return types.NewMap(r.typ(), r.typ()) + case signatureType: + r.currPkg = r.pkg() + return r.signature(nil) + + case structType: + r.currPkg = r.pkg() + + fields := make([]*types.Var, r.uint64()) + tags := make([]string, len(fields)) + for i := range fields { + fpos := r.pos() + fname := r.ident() + ftyp := r.typ() + emb := r.bool() + tag := r.string() + + fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb) + tags[i] = tag + } + return types.NewStruct(fields, tags) + + case interfaceType: + r.currPkg = r.pkg() + + embeddeds := make([]types.Type, r.uint64()) + for i := range embeddeds { + _ = r.pos() + embeddeds[i] = r.typ() + } + + methods := make([]*types.Func, r.uint64()) + for i := range methods { + mpos := r.pos() + mname := r.ident() + + // TODO(mdempsky): Matches bimport.go, but I + // don't agree with this. + var recv *types.Var + if base != nil { + recv = types.NewVar(token.NoPos, r.currPkg, "", base) + } + + msig := r.signature(recv) + methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig) + } + + typ := newInterface(methods, embeddeds) + r.p.interfaceList = append(r.p.interfaceList, typ) + return typ + } +} + +func (r *importReader) kind() itag { + return itag(r.uint64()) +} + +func (r *importReader) signature(recv *types.Var) *types.Signature { + params := r.paramList() + results := r.paramList() + variadic := params.Len() > 0 && r.bool() + return types.NewSignature(recv, params, results, variadic) +} + +func (r *importReader) paramList() *types.Tuple { + xs := make([]*types.Var, r.uint64()) + for i := range xs { + xs[i] = r.param() + } + return types.NewTuple(xs...) +} + +func (r *importReader) param() *types.Var { + pos := r.pos() + name := r.ident() + typ := r.typ() + return types.NewParam(pos, r.currPkg, name, typ) +} + +func (r *importReader) bool() bool { + return r.uint64() != 0 +} + +func (r *importReader) int64() int64 { + n, err := binary.ReadVarint(&r.declReader) + if err != nil { + errorf("readVarint: %v", err) + } + return n +} + +func (r *importReader) uint64() uint64 { + n, err := binary.ReadUvarint(&r.declReader) + if err != nil { + errorf("readUvarint: %v", err) + } + return n +} + +func (r *importReader) byte() byte { + x, err := r.declReader.ReadByte() + if err != nil { + errorf("declReader.ReadByte: %v", err) + } + return x +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go new file mode 100644 index 000000000..8b163e3d0 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go @@ -0,0 +1,22 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.11 +// +build !go1.11 + +package gcimporter + +import "go/types" + +func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { + named := make([]*types.Named, len(embeddeds)) + for i, e := range embeddeds { + var ok bool + named[i], ok = e.(*types.Named) + if !ok { + panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11") + } + } + return types.NewInterface(methods, named) +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go new file mode 100644 index 000000000..49984f40f --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go @@ -0,0 +1,14 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.11 +// +build go1.11 + +package gcimporter + +import "go/types" + +func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { + return types.NewInterfaceType(methods, embeddeds) +} diff --git a/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go new file mode 100644 index 000000000..18a002f82 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go @@ -0,0 +1,49 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package packagesdriver fetches type sizes for go/packages and go/analysis. +package packagesdriver + +import ( + "context" + "fmt" + "go/types" + "strings" + + "golang.org/x/tools/internal/gocommand" +) + +var debug = false + +func GetSizesGolist(ctx context.Context, inv gocommand.Invocation, gocmdRunner *gocommand.Runner) (types.Sizes, error) { + inv.Verb = "list" + inv.Args = []string{"-f", "{{context.GOARCH}} {{context.Compiler}}", "--", "unsafe"} + stdout, stderr, friendlyErr, rawErr := gocmdRunner.RunRaw(ctx, inv) + var goarch, compiler string + if rawErr != nil { + if rawErrMsg := rawErr.Error(); strings.Contains(rawErrMsg, "cannot find main module") || strings.Contains(rawErrMsg, "go.mod file not found") { + // User's running outside of a module. All bets are off. Get GOARCH and guess compiler is gc. + // TODO(matloob): Is this a problem in practice? + inv.Verb = "env" + inv.Args = []string{"GOARCH"} + envout, enverr := gocmdRunner.Run(ctx, inv) + if enverr != nil { + return nil, enverr + } + goarch = strings.TrimSpace(envout.String()) + compiler = "gc" + } else { + return nil, friendlyErr + } + } else { + fields := strings.Fields(stdout.String()) + if len(fields) < 2 { + return nil, fmt.Errorf("could not parse GOARCH and Go compiler in format \" \":\nstdout: <<%s>>\nstderr: <<%s>>", + stdout.String(), stderr.String()) + } + goarch = fields[0] + compiler = fields[1] + } + return types.SizesFor(compiler, goarch), nil +} diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go new file mode 100644 index 000000000..4bfe28a51 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/doc.go @@ -0,0 +1,221 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package packages loads Go packages for inspection and analysis. + +The Load function takes as input a list of patterns and return a list of Package +structs describing individual packages matched by those patterns. +The LoadMode controls the amount of detail in the loaded packages. + +Load passes most patterns directly to the underlying build tool, +but all patterns with the prefix "query=", where query is a +non-empty string of letters from [a-z], are reserved and may be +interpreted as query operators. + +Two query operators are currently supported: "file" and "pattern". + +The query "file=path/to/file.go" matches the package or packages enclosing +the Go source file path/to/file.go. For example "file=~/go/src/fmt/print.go" +might return the packages "fmt" and "fmt [fmt.test]". + +The query "pattern=string" causes "string" to be passed directly to +the underlying build tool. In most cases this is unnecessary, +but an application can use Load("pattern=" + x) as an escaping mechanism +to ensure that x is not interpreted as a query operator if it contains '='. + +All other query operators are reserved for future use and currently +cause Load to report an error. + +The Package struct provides basic information about the package, including + + - ID, a unique identifier for the package in the returned set; + - GoFiles, the names of the package's Go source files; + - Imports, a map from source import strings to the Packages they name; + - Types, the type information for the package's exported symbols; + - Syntax, the parsed syntax trees for the package's source code; and + - TypeInfo, the result of a complete type-check of the package syntax trees. + +(See the documentation for type Package for the complete list of fields +and more detailed descriptions.) + +For example, + + Load(nil, "bytes", "unicode...") + +returns four Package structs describing the standard library packages +bytes, unicode, unicode/utf16, and unicode/utf8. Note that one pattern +can match multiple packages and that a package might be matched by +multiple patterns: in general it is not possible to determine which +packages correspond to which patterns. + +Note that the list returned by Load contains only the packages matched +by the patterns. Their dependencies can be found by walking the import +graph using the Imports fields. + +The Load function can be configured by passing a pointer to a Config as +the first argument. A nil Config is equivalent to the zero Config, which +causes Load to run in LoadFiles mode, collecting minimal information. +See the documentation for type Config for details. + +As noted earlier, the Config.Mode controls the amount of detail +reported about the loaded packages. See the documentation for type LoadMode +for details. + +Most tools should pass their command-line arguments (after any flags) +uninterpreted to the loader, so that the loader can interpret them +according to the conventions of the underlying build system. +See the Example function for typical usage. + +*/ +package packages // import "golang.org/x/tools/go/packages" + +/* + +Motivation and design considerations + +The new package's design solves problems addressed by two existing +packages: go/build, which locates and describes packages, and +golang.org/x/tools/go/loader, which loads, parses and type-checks them. +The go/build.Package structure encodes too much of the 'go build' way +of organizing projects, leaving us in need of a data type that describes a +package of Go source code independent of the underlying build system. +We wanted something that works equally well with go build and vgo, and +also other build systems such as Bazel and Blaze, making it possible to +construct analysis tools that work in all these environments. +Tools such as errcheck and staticcheck were essentially unavailable to +the Go community at Google, and some of Google's internal tools for Go +are unavailable externally. +This new package provides a uniform way to obtain package metadata by +querying each of these build systems, optionally supporting their +preferred command-line notations for packages, so that tools integrate +neatly with users' build environments. The Metadata query function +executes an external query tool appropriate to the current workspace. + +Loading packages always returns the complete import graph "all the way down", +even if all you want is information about a single package, because the query +mechanisms of all the build systems we currently support ({go,vgo} list, and +blaze/bazel aspect-based query) cannot provide detailed information +about one package without visiting all its dependencies too, so there is +no additional asymptotic cost to providing transitive information. +(This property might not be true of a hypothetical 5th build system.) + +In calls to TypeCheck, all initial packages, and any package that +transitively depends on one of them, must be loaded from source. +Consider A->B->C->D->E: if A,C are initial, A,B,C must be loaded from +source; D may be loaded from export data, and E may not be loaded at all +(though it's possible that D's export data mentions it, so a +types.Package may be created for it and exposed.) + +The old loader had a feature to suppress type-checking of function +bodies on a per-package basis, primarily intended to reduce the work of +obtaining type information for imported packages. Now that imports are +satisfied by export data, the optimization no longer seems necessary. + +Despite some early attempts, the old loader did not exploit export data, +instead always using the equivalent of WholeProgram mode. This was due +to the complexity of mixing source and export data packages (now +resolved by the upward traversal mentioned above), and because export data +files were nearly always missing or stale. Now that 'go build' supports +caching, all the underlying build systems can guarantee to produce +export data in a reasonable (amortized) time. + +Test "main" packages synthesized by the build system are now reported as +first-class packages, avoiding the need for clients (such as go/ssa) to +reinvent this generation logic. + +One way in which go/packages is simpler than the old loader is in its +treatment of in-package tests. In-package tests are packages that +consist of all the files of the library under test, plus the test files. +The old loader constructed in-package tests by a two-phase process of +mutation called "augmentation": first it would construct and type check +all the ordinary library packages and type-check the packages that +depend on them; then it would add more (test) files to the package and +type-check again. This two-phase approach had four major problems: +1) in processing the tests, the loader modified the library package, + leaving no way for a client application to see both the test + package and the library package; one would mutate into the other. +2) because test files can declare additional methods on types defined in + the library portion of the package, the dispatch of method calls in + the library portion was affected by the presence of the test files. + This should have been a clue that the packages were logically + different. +3) this model of "augmentation" assumed at most one in-package test + per library package, which is true of projects using 'go build', + but not other build systems. +4) because of the two-phase nature of test processing, all packages that + import the library package had to be processed before augmentation, + forcing a "one-shot" API and preventing the client from calling Load + in several times in sequence as is now possible in WholeProgram mode. + (TypeCheck mode has a similar one-shot restriction for a different reason.) + +Early drafts of this package supported "multi-shot" operation. +Although it allowed clients to make a sequence of calls (or concurrent +calls) to Load, building up the graph of Packages incrementally, +it was of marginal value: it complicated the API +(since it allowed some options to vary across calls but not others), +it complicated the implementation, +it cannot be made to work in Types mode, as explained above, +and it was less efficient than making one combined call (when this is possible). +Among the clients we have inspected, none made multiple calls to load +but could not be easily and satisfactorily modified to make only a single call. +However, applications changes may be required. +For example, the ssadump command loads the user-specified packages +and in addition the runtime package. It is tempting to simply append +"runtime" to the user-provided list, but that does not work if the user +specified an ad-hoc package such as [a.go b.go]. +Instead, ssadump no longer requests the runtime package, +but seeks it among the dependencies of the user-specified packages, +and emits an error if it is not found. + +Overlays: The Overlay field in the Config allows providing alternate contents +for Go source files, by providing a mapping from file path to contents. +go/packages will pull in new imports added in overlay files when go/packages +is run in LoadImports mode or greater. +Overlay support for the go list driver isn't complete yet: if the file doesn't +exist on disk, it will only be recognized in an overlay if it is a non-test file +and the package would be reported even without the overlay. + +Questions & Tasks + +- Add GOARCH/GOOS? + They are not portable concepts, but could be made portable. + Our goal has been to allow users to express themselves using the conventions + of the underlying build system: if the build system honors GOARCH + during a build and during a metadata query, then so should + applications built atop that query mechanism. + Conversely, if the target architecture of the build is determined by + command-line flags, the application can pass the relevant + flags through to the build system using a command such as: + myapp -query_flag="--cpu=amd64" -query_flag="--os=darwin" + However, this approach is low-level, unwieldy, and non-portable. + GOOS and GOARCH seem important enough to warrant a dedicated option. + +- How should we handle partial failures such as a mixture of good and + malformed patterns, existing and non-existent packages, successful and + failed builds, import failures, import cycles, and so on, in a call to + Load? + +- Support bazel, blaze, and go1.10 list, not just go1.11 list. + +- Handle (and test) various partial success cases, e.g. + a mixture of good packages and: + invalid patterns + nonexistent packages + empty packages + packages with malformed package or import declarations + unreadable files + import cycles + other parse errors + type errors + Make sure we record errors at the correct place in the graph. + +- Missing packages among initial arguments are not reported. + Return bogus packages for them, like golist does. + +- "undeclared name" errors (for example) are reported out of source file + order. I suspect this is due to the breadth-first resolution now used + by go/types. Is that a bug? Discuss with gri. + +*/ diff --git a/vendor/golang.org/x/tools/go/packages/external.go b/vendor/golang.org/x/tools/go/packages/external.go new file mode 100644 index 000000000..7242a0a7d --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/external.go @@ -0,0 +1,101 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file enables an external tool to intercept package requests. +// If the tool is present then its results are used in preference to +// the go list command. + +package packages + +import ( + "bytes" + "encoding/json" + "fmt" + exec "golang.org/x/sys/execabs" + "os" + "strings" +) + +// The Driver Protocol +// +// The driver, given the inputs to a call to Load, returns metadata about the packages specified. +// This allows for different build systems to support go/packages by telling go/packages how the +// packages' source is organized. +// The driver is a binary, either specified by the GOPACKAGESDRIVER environment variable or in +// the path as gopackagesdriver. It's given the inputs to load in its argv. See the package +// documentation in doc.go for the full description of the patterns that need to be supported. +// A driver receives as a JSON-serialized driverRequest struct in standard input and will +// produce a JSON-serialized driverResponse (see definition in packages.go) in its standard output. + +// driverRequest is used to provide the portion of Load's Config that is needed by a driver. +type driverRequest struct { + Mode LoadMode `json:"mode"` + // Env specifies the environment the underlying build system should be run in. + Env []string `json:"env"` + // BuildFlags are flags that should be passed to the underlying build system. + BuildFlags []string `json:"build_flags"` + // Tests specifies whether the patterns should also return test packages. + Tests bool `json:"tests"` + // Overlay maps file paths (relative to the driver's working directory) to the byte contents + // of overlay files. + Overlay map[string][]byte `json:"overlay"` +} + +// findExternalDriver returns the file path of a tool that supplies +// the build system package structure, or "" if not found." +// If GOPACKAGESDRIVER is set in the environment findExternalTool returns its +// value, otherwise it searches for a binary named gopackagesdriver on the PATH. +func findExternalDriver(cfg *Config) driver { + const toolPrefix = "GOPACKAGESDRIVER=" + tool := "" + for _, env := range cfg.Env { + if val := strings.TrimPrefix(env, toolPrefix); val != env { + tool = val + } + } + if tool != "" && tool == "off" { + return nil + } + if tool == "" { + var err error + tool, err = exec.LookPath("gopackagesdriver") + if err != nil { + return nil + } + } + return func(cfg *Config, words ...string) (*driverResponse, error) { + req, err := json.Marshal(driverRequest{ + Mode: cfg.Mode, + Env: cfg.Env, + BuildFlags: cfg.BuildFlags, + Tests: cfg.Tests, + Overlay: cfg.Overlay, + }) + if err != nil { + return nil, fmt.Errorf("failed to encode message to driver tool: %v", err) + } + + buf := new(bytes.Buffer) + stderr := new(bytes.Buffer) + cmd := exec.CommandContext(cfg.Context, tool, words...) + cmd.Dir = cfg.Dir + cmd.Env = cfg.Env + cmd.Stdin = bytes.NewReader(req) + cmd.Stdout = buf + cmd.Stderr = stderr + + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr) + } + if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTDRIVERERRORS") != "" { + fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd), stderr) + } + + var response driverResponse + if err := json.Unmarshal(buf.Bytes(), &response); err != nil { + return nil, err + } + return &response, nil + } +} diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go new file mode 100644 index 000000000..0e1e7f11f --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/golist.go @@ -0,0 +1,1099 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "go/types" + "io/ioutil" + "log" + "os" + "path" + "path/filepath" + "reflect" + "sort" + "strconv" + "strings" + "sync" + "unicode" + + exec "golang.org/x/sys/execabs" + "golang.org/x/tools/go/internal/packagesdriver" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/packagesinternal" + "golang.org/x/xerrors" +) + +// debug controls verbose logging. +var debug, _ = strconv.ParseBool(os.Getenv("GOPACKAGESDEBUG")) + +// A goTooOldError reports that the go command +// found by exec.LookPath is too old to use the new go list behavior. +type goTooOldError struct { + error +} + +// responseDeduper wraps a driverResponse, deduplicating its contents. +type responseDeduper struct { + seenRoots map[string]bool + seenPackages map[string]*Package + dr *driverResponse +} + +func newDeduper() *responseDeduper { + return &responseDeduper{ + dr: &driverResponse{}, + seenRoots: map[string]bool{}, + seenPackages: map[string]*Package{}, + } +} + +// addAll fills in r with a driverResponse. +func (r *responseDeduper) addAll(dr *driverResponse) { + for _, pkg := range dr.Packages { + r.addPackage(pkg) + } + for _, root := range dr.Roots { + r.addRoot(root) + } +} + +func (r *responseDeduper) addPackage(p *Package) { + if r.seenPackages[p.ID] != nil { + return + } + r.seenPackages[p.ID] = p + r.dr.Packages = append(r.dr.Packages, p) +} + +func (r *responseDeduper) addRoot(id string) { + if r.seenRoots[id] { + return + } + r.seenRoots[id] = true + r.dr.Roots = append(r.dr.Roots, id) +} + +type golistState struct { + cfg *Config + ctx context.Context + + envOnce sync.Once + goEnvError error + goEnv map[string]string + + rootsOnce sync.Once + rootDirsError error + rootDirs map[string]string + + goVersionOnce sync.Once + goVersionError error + goVersion int // The X in Go 1.X. + + // vendorDirs caches the (non)existence of vendor directories. + vendorDirs map[string]bool +} + +// getEnv returns Go environment variables. Only specific variables are +// populated -- computing all of them is slow. +func (state *golistState) getEnv() (map[string]string, error) { + state.envOnce.Do(func() { + var b *bytes.Buffer + b, state.goEnvError = state.invokeGo("env", "-json", "GOMOD", "GOPATH") + if state.goEnvError != nil { + return + } + + state.goEnv = make(map[string]string) + decoder := json.NewDecoder(b) + if state.goEnvError = decoder.Decode(&state.goEnv); state.goEnvError != nil { + return + } + }) + return state.goEnv, state.goEnvError +} + +// mustGetEnv is a convenience function that can be used if getEnv has already succeeded. +func (state *golistState) mustGetEnv() map[string]string { + env, err := state.getEnv() + if err != nil { + panic(fmt.Sprintf("mustGetEnv: %v", err)) + } + return env +} + +// goListDriver uses the go list command to interpret the patterns and produce +// the build system package structure. +// See driver for more details. +func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) { + // Make sure that any asynchronous go commands are killed when we return. + parentCtx := cfg.Context + if parentCtx == nil { + parentCtx = context.Background() + } + ctx, cancel := context.WithCancel(parentCtx) + defer cancel() + + response := newDeduper() + + state := &golistState{ + cfg: cfg, + ctx: ctx, + vendorDirs: map[string]bool{}, + } + + // Fill in response.Sizes asynchronously if necessary. + var sizeserr error + var sizeswg sync.WaitGroup + if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 { + sizeswg.Add(1) + go func() { + var sizes types.Sizes + sizes, sizeserr = packagesdriver.GetSizesGolist(ctx, state.cfgInvocation(), cfg.gocmdRunner) + // types.SizesFor always returns nil or a *types.StdSizes. + response.dr.Sizes, _ = sizes.(*types.StdSizes) + sizeswg.Done() + }() + } + + // Determine files requested in contains patterns + var containFiles []string + restPatterns := make([]string, 0, len(patterns)) + // Extract file= and other [querytype]= patterns. Report an error if querytype + // doesn't exist. +extractQueries: + for _, pattern := range patterns { + eqidx := strings.Index(pattern, "=") + if eqidx < 0 { + restPatterns = append(restPatterns, pattern) + } else { + query, value := pattern[:eqidx], pattern[eqidx+len("="):] + switch query { + case "file": + containFiles = append(containFiles, value) + case "pattern": + restPatterns = append(restPatterns, value) + case "": // not a reserved query + restPatterns = append(restPatterns, pattern) + default: + for _, rune := range query { + if rune < 'a' || rune > 'z' { // not a reserved query + restPatterns = append(restPatterns, pattern) + continue extractQueries + } + } + // Reject all other patterns containing "=" + return nil, fmt.Errorf("invalid query type %q in query pattern %q", query, pattern) + } + } + } + + // See if we have any patterns to pass through to go list. Zero initial + // patterns also requires a go list call, since it's the equivalent of + // ".". + if len(restPatterns) > 0 || len(patterns) == 0 { + dr, err := state.createDriverResponse(restPatterns...) + if err != nil { + return nil, err + } + response.addAll(dr) + } + + if len(containFiles) != 0 { + if err := state.runContainsQueries(response, containFiles); err != nil { + return nil, err + } + } + + // Only use go/packages' overlay processing if we're using a Go version + // below 1.16. Otherwise, go list handles it. + if goVersion, err := state.getGoVersion(); err == nil && goVersion < 16 { + modifiedPkgs, needPkgs, err := state.processGolistOverlay(response) + if err != nil { + return nil, err + } + + var containsCandidates []string + if len(containFiles) > 0 { + containsCandidates = append(containsCandidates, modifiedPkgs...) + containsCandidates = append(containsCandidates, needPkgs...) + } + if err := state.addNeededOverlayPackages(response, needPkgs); err != nil { + return nil, err + } + // Check candidate packages for containFiles. + if len(containFiles) > 0 { + for _, id := range containsCandidates { + pkg, ok := response.seenPackages[id] + if !ok { + response.addPackage(&Package{ + ID: id, + Errors: []Error{{ + Kind: ListError, + Msg: fmt.Sprintf("package %s expected but not seen", id), + }}, + }) + continue + } + for _, f := range containFiles { + for _, g := range pkg.GoFiles { + if sameFile(f, g) { + response.addRoot(id) + } + } + } + } + } + // Add root for any package that matches a pattern. This applies only to + // packages that are modified by overlays, since they are not added as + // roots automatically. + for _, pattern := range restPatterns { + match := matchPattern(pattern) + for _, pkgID := range modifiedPkgs { + pkg, ok := response.seenPackages[pkgID] + if !ok { + continue + } + if match(pkg.PkgPath) { + response.addRoot(pkg.ID) + } + } + } + } + + sizeswg.Wait() + if sizeserr != nil { + return nil, sizeserr + } + return response.dr, nil +} + +func (state *golistState) addNeededOverlayPackages(response *responseDeduper, pkgs []string) error { + if len(pkgs) == 0 { + return nil + } + dr, err := state.createDriverResponse(pkgs...) + if err != nil { + return err + } + for _, pkg := range dr.Packages { + response.addPackage(pkg) + } + _, needPkgs, err := state.processGolistOverlay(response) + if err != nil { + return err + } + return state.addNeededOverlayPackages(response, needPkgs) +} + +func (state *golistState) runContainsQueries(response *responseDeduper, queries []string) error { + for _, query := range queries { + // TODO(matloob): Do only one query per directory. + fdir := filepath.Dir(query) + // Pass absolute path of directory to go list so that it knows to treat it as a directory, + // not a package path. + pattern, err := filepath.Abs(fdir) + if err != nil { + return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err) + } + dirResponse, err := state.createDriverResponse(pattern) + + // If there was an error loading the package, or the package is returned + // with errors, try to load the file as an ad-hoc package. + // Usually the error will appear in a returned package, but may not if we're + // in module mode and the ad-hoc is located outside a module. + if err != nil || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 && + len(dirResponse.Packages[0].Errors) == 1 { + var queryErr error + if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil { + return err // return the original error + } + } + isRoot := make(map[string]bool, len(dirResponse.Roots)) + for _, root := range dirResponse.Roots { + isRoot[root] = true + } + for _, pkg := range dirResponse.Packages { + // Add any new packages to the main set + // We don't bother to filter packages that will be dropped by the changes of roots, + // that will happen anyway during graph construction outside this function. + // Over-reporting packages is not a problem. + response.addPackage(pkg) + // if the package was not a root one, it cannot have the file + if !isRoot[pkg.ID] { + continue + } + for _, pkgFile := range pkg.GoFiles { + if filepath.Base(query) == filepath.Base(pkgFile) { + response.addRoot(pkg.ID) + break + } + } + } + } + return nil +} + +// adhocPackage attempts to load or construct an ad-hoc package for a given +// query, if the original call to the driver produced inadequate results. +func (state *golistState) adhocPackage(pattern, query string) (*driverResponse, error) { + response, err := state.createDriverResponse(query) + if err != nil { + return nil, err + } + // If we get nothing back from `go list`, + // try to make this file into its own ad-hoc package. + // TODO(rstambler): Should this check against the original response? + if len(response.Packages) == 0 { + response.Packages = append(response.Packages, &Package{ + ID: "command-line-arguments", + PkgPath: query, + GoFiles: []string{query}, + CompiledGoFiles: []string{query}, + Imports: make(map[string]*Package), + }) + response.Roots = append(response.Roots, "command-line-arguments") + } + // Handle special cases. + if len(response.Packages) == 1 { + // golang/go#33482: If this is a file= query for ad-hoc packages where + // the file only exists on an overlay, and exists outside of a module, + // add the file to the package and remove the errors. + if response.Packages[0].ID == "command-line-arguments" || + filepath.ToSlash(response.Packages[0].PkgPath) == filepath.ToSlash(query) { + if len(response.Packages[0].GoFiles) == 0 { + filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath + // TODO(matloob): check if the file is outside of a root dir? + for path := range state.cfg.Overlay { + if path == filename { + response.Packages[0].Errors = nil + response.Packages[0].GoFiles = []string{path} + response.Packages[0].CompiledGoFiles = []string{path} + } + } + } + } + } + return response, nil +} + +// Fields must match go list; +// see $GOROOT/src/cmd/go/internal/load/pkg.go. +type jsonPackage struct { + ImportPath string + Dir string + Name string + Export string + GoFiles []string + CompiledGoFiles []string + IgnoredGoFiles []string + IgnoredOtherFiles []string + CFiles []string + CgoFiles []string + CXXFiles []string + MFiles []string + HFiles []string + FFiles []string + SFiles []string + SwigFiles []string + SwigCXXFiles []string + SysoFiles []string + Imports []string + ImportMap map[string]string + Deps []string + Module *Module + TestGoFiles []string + TestImports []string + XTestGoFiles []string + XTestImports []string + ForTest string // q in a "p [q.test]" package, else "" + DepOnly bool + + Error *packagesinternal.PackageError + DepsErrors []*packagesinternal.PackageError +} + +type jsonPackageError struct { + ImportStack []string + Pos string + Err string +} + +func otherFiles(p *jsonPackage) [][]string { + return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles} +} + +// createDriverResponse uses the "go list" command to expand the pattern +// words and return a response for the specified packages. +func (state *golistState) createDriverResponse(words ...string) (*driverResponse, error) { + // go list uses the following identifiers in ImportPath and Imports: + // + // "p" -- importable package or main (command) + // "q.test" -- q's test executable + // "p [q.test]" -- variant of p as built for q's test executable + // "q_test [q.test]" -- q's external test package + // + // The packages p that are built differently for a test q.test + // are q itself, plus any helpers used by the external test q_test, + // typically including "testing" and all its dependencies. + + // Run "go list" for complete + // information on the specified packages. + buf, err := state.invokeGo("list", golistargs(state.cfg, words)...) + if err != nil { + return nil, err + } + seen := make(map[string]*jsonPackage) + pkgs := make(map[string]*Package) + additionalErrors := make(map[string][]Error) + // Decode the JSON and convert it to Package form. + var response driverResponse + for dec := json.NewDecoder(buf); dec.More(); { + p := new(jsonPackage) + if err := dec.Decode(p); err != nil { + return nil, fmt.Errorf("JSON decoding failed: %v", err) + } + + if p.ImportPath == "" { + // The documentation for go list says that “[e]rroneous packages will have + // a non-empty ImportPath”. If for some reason it comes back empty, we + // prefer to error out rather than silently discarding data or handing + // back a package without any way to refer to it. + if p.Error != nil { + return nil, Error{ + Pos: p.Error.Pos, + Msg: p.Error.Err, + } + } + return nil, fmt.Errorf("package missing import path: %+v", p) + } + + // Work around https://golang.org/issue/33157: + // go list -e, when given an absolute path, will find the package contained at + // that directory. But when no package exists there, it will return a fake package + // with an error and the ImportPath set to the absolute path provided to go list. + // Try to convert that absolute path to what its package path would be if it's + // contained in a known module or GOPATH entry. This will allow the package to be + // properly "reclaimed" when overlays are processed. + if filepath.IsAbs(p.ImportPath) && p.Error != nil { + pkgPath, ok, err := state.getPkgPath(p.ImportPath) + if err != nil { + return nil, err + } + if ok { + p.ImportPath = pkgPath + } + } + + if old, found := seen[p.ImportPath]; found { + // If one version of the package has an error, and the other doesn't, assume + // that this is a case where go list is reporting a fake dependency variant + // of the imported package: When a package tries to invalidly import another + // package, go list emits a variant of the imported package (with the same + // import path, but with an error on it, and the package will have a + // DepError set on it). An example of when this can happen is for imports of + // main packages: main packages can not be imported, but they may be + // separately matched and listed by another pattern. + // See golang.org/issue/36188 for more details. + + // The plan is that eventually, hopefully in Go 1.15, the error will be + // reported on the importing package rather than the duplicate "fake" + // version of the imported package. Once all supported versions of Go + // have the new behavior this logic can be deleted. + // TODO(matloob): delete the workaround logic once all supported versions of + // Go return the errors on the proper package. + + // There should be exactly one version of a package that doesn't have an + // error. + if old.Error == nil && p.Error == nil { + if !reflect.DeepEqual(p, old) { + return nil, fmt.Errorf("internal error: go list gives conflicting information for package %v", p.ImportPath) + } + continue + } + + // Determine if this package's error needs to be bubbled up. + // This is a hack, and we expect for go list to eventually set the error + // on the package. + if old.Error != nil { + var errkind string + if strings.Contains(old.Error.Err, "not an importable package") { + errkind = "not an importable package" + } else if strings.Contains(old.Error.Err, "use of internal package") && strings.Contains(old.Error.Err, "not allowed") { + errkind = "use of internal package not allowed" + } + if errkind != "" { + if len(old.Error.ImportStack) < 1 { + return nil, fmt.Errorf(`internal error: go list gave a %q error with empty import stack`, errkind) + } + importingPkg := old.Error.ImportStack[len(old.Error.ImportStack)-1] + if importingPkg == old.ImportPath { + // Using an older version of Go which put this package itself on top of import + // stack, instead of the importer. Look for importer in second from top + // position. + if len(old.Error.ImportStack) < 2 { + return nil, fmt.Errorf(`internal error: go list gave a %q error with an import stack without importing package`, errkind) + } + importingPkg = old.Error.ImportStack[len(old.Error.ImportStack)-2] + } + additionalErrors[importingPkg] = append(additionalErrors[importingPkg], Error{ + Pos: old.Error.Pos, + Msg: old.Error.Err, + Kind: ListError, + }) + } + } + + // Make sure that if there's a version of the package without an error, + // that's the one reported to the user. + if old.Error == nil { + continue + } + + // This package will replace the old one at the end of the loop. + } + seen[p.ImportPath] = p + + pkg := &Package{ + Name: p.Name, + ID: p.ImportPath, + GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles), + CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles), + OtherFiles: absJoin(p.Dir, otherFiles(p)...), + IgnoredFiles: absJoin(p.Dir, p.IgnoredGoFiles, p.IgnoredOtherFiles), + forTest: p.ForTest, + depsErrors: p.DepsErrors, + Module: p.Module, + } + + if (state.cfg.Mode&typecheckCgo) != 0 && len(p.CgoFiles) != 0 { + if len(p.CompiledGoFiles) > len(p.GoFiles) { + // We need the cgo definitions, which are in the first + // CompiledGoFile after the non-cgo ones. This is a hack but there + // isn't currently a better way to find it. We also need the pure + // Go files and unprocessed cgo files, all of which are already + // in pkg.GoFiles. + cgoTypes := p.CompiledGoFiles[len(p.GoFiles)] + pkg.CompiledGoFiles = append([]string{cgoTypes}, pkg.GoFiles...) + } else { + // golang/go#38990: go list silently fails to do cgo processing + pkg.CompiledGoFiles = nil + pkg.Errors = append(pkg.Errors, Error{ + Msg: "go list failed to return CompiledGoFiles. This may indicate failure to perform cgo processing; try building at the command line. See https://golang.org/issue/38990.", + Kind: ListError, + }) + } + } + + // Work around https://golang.org/issue/28749: + // cmd/go puts assembly, C, and C++ files in CompiledGoFiles. + // Filter out any elements of CompiledGoFiles that are also in OtherFiles. + // We have to keep this workaround in place until go1.12 is a distant memory. + if len(pkg.OtherFiles) > 0 { + other := make(map[string]bool, len(pkg.OtherFiles)) + for _, f := range pkg.OtherFiles { + other[f] = true + } + + out := pkg.CompiledGoFiles[:0] + for _, f := range pkg.CompiledGoFiles { + if other[f] { + continue + } + out = append(out, f) + } + pkg.CompiledGoFiles = out + } + + // Extract the PkgPath from the package's ID. + if i := strings.IndexByte(pkg.ID, ' '); i >= 0 { + pkg.PkgPath = pkg.ID[:i] + } else { + pkg.PkgPath = pkg.ID + } + + if pkg.PkgPath == "unsafe" { + pkg.GoFiles = nil // ignore fake unsafe.go file + } + + // Assume go list emits only absolute paths for Dir. + if p.Dir != "" && !filepath.IsAbs(p.Dir) { + log.Fatalf("internal error: go list returned non-absolute Package.Dir: %s", p.Dir) + } + + if p.Export != "" && !filepath.IsAbs(p.Export) { + pkg.ExportFile = filepath.Join(p.Dir, p.Export) + } else { + pkg.ExportFile = p.Export + } + + // imports + // + // Imports contains the IDs of all imported packages. + // ImportsMap records (path, ID) only where they differ. + ids := make(map[string]bool) + for _, id := range p.Imports { + ids[id] = true + } + pkg.Imports = make(map[string]*Package) + for path, id := range p.ImportMap { + pkg.Imports[path] = &Package{ID: id} // non-identity import + delete(ids, id) + } + for id := range ids { + if id == "C" { + continue + } + + pkg.Imports[id] = &Package{ID: id} // identity import + } + if !p.DepOnly { + response.Roots = append(response.Roots, pkg.ID) + } + + // Work around for pre-go.1.11 versions of go list. + // TODO(matloob): they should be handled by the fallback. + // Can we delete this? + if len(pkg.CompiledGoFiles) == 0 { + pkg.CompiledGoFiles = pkg.GoFiles + } + + // Temporary work-around for golang/go#39986. Parse filenames out of + // error messages. This happens if there are unrecoverable syntax + // errors in the source, so we can't match on a specific error message. + if err := p.Error; err != nil && state.shouldAddFilenameFromError(p) { + addFilenameFromPos := func(pos string) bool { + split := strings.Split(pos, ":") + if len(split) < 1 { + return false + } + filename := strings.TrimSpace(split[0]) + if filename == "" { + return false + } + if !filepath.IsAbs(filename) { + filename = filepath.Join(state.cfg.Dir, filename) + } + info, _ := os.Stat(filename) + if info == nil { + return false + } + pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, filename) + pkg.GoFiles = append(pkg.GoFiles, filename) + return true + } + found := addFilenameFromPos(err.Pos) + // In some cases, go list only reports the error position in the + // error text, not the error position. One such case is when the + // file's package name is a keyword (see golang.org/issue/39763). + if !found { + addFilenameFromPos(err.Err) + } + } + + if p.Error != nil { + msg := strings.TrimSpace(p.Error.Err) // Trim to work around golang.org/issue/32363. + // Address golang.org/issue/35964 by appending import stack to error message. + if msg == "import cycle not allowed" && len(p.Error.ImportStack) != 0 { + msg += fmt.Sprintf(": import stack: %v", p.Error.ImportStack) + } + pkg.Errors = append(pkg.Errors, Error{ + Pos: p.Error.Pos, + Msg: msg, + Kind: ListError, + }) + } + + pkgs[pkg.ID] = pkg + } + + for id, errs := range additionalErrors { + if p, ok := pkgs[id]; ok { + p.Errors = append(p.Errors, errs...) + } + } + for _, pkg := range pkgs { + response.Packages = append(response.Packages, pkg) + } + sort.Slice(response.Packages, func(i, j int) bool { return response.Packages[i].ID < response.Packages[j].ID }) + + return &response, nil +} + +func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool { + if len(p.GoFiles) > 0 || len(p.CompiledGoFiles) > 0 { + return false + } + + goV, err := state.getGoVersion() + if err != nil { + return false + } + + // On Go 1.14 and earlier, only add filenames from errors if the import stack is empty. + // The import stack behaves differently for these versions than newer Go versions. + if goV < 15 { + return len(p.Error.ImportStack) == 0 + } + + // On Go 1.15 and later, only parse filenames out of error if there's no import stack, + // or the current package is at the top of the import stack. This is not guaranteed + // to work perfectly, but should avoid some cases where files in errors don't belong to this + // package. + return len(p.Error.ImportStack) == 0 || p.Error.ImportStack[len(p.Error.ImportStack)-1] == p.ImportPath +} + +func (state *golistState) getGoVersion() (int, error) { + state.goVersionOnce.Do(func() { + state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.cfg.gocmdRunner) + }) + return state.goVersion, state.goVersionError +} + +// getPkgPath finds the package path of a directory if it's relative to a root +// directory. +func (state *golistState) getPkgPath(dir string) (string, bool, error) { + absDir, err := filepath.Abs(dir) + if err != nil { + return "", false, err + } + roots, err := state.determineRootDirs() + if err != nil { + return "", false, err + } + + for rdir, rpath := range roots { + // Make sure that the directory is in the module, + // to avoid creating a path relative to another module. + if !strings.HasPrefix(absDir, rdir) { + continue + } + // TODO(matloob): This doesn't properly handle symlinks. + r, err := filepath.Rel(rdir, dir) + if err != nil { + continue + } + if rpath != "" { + // We choose only one root even though the directory even it can belong in multiple modules + // or GOPATH entries. This is okay because we only need to work with absolute dirs when a + // file is missing from disk, for instance when gopls calls go/packages in an overlay. + // Once the file is saved, gopls, or the next invocation of the tool will get the correct + // result straight from golist. + // TODO(matloob): Implement module tiebreaking? + return path.Join(rpath, filepath.ToSlash(r)), true, nil + } + return filepath.ToSlash(r), true, nil + } + return "", false, nil +} + +// absJoin absolutizes and flattens the lists of files. +func absJoin(dir string, fileses ...[]string) (res []string) { + for _, files := range fileses { + for _, file := range files { + if !filepath.IsAbs(file) { + file = filepath.Join(dir, file) + } + res = append(res, file) + } + } + return res +} + +func golistargs(cfg *Config, words []string) []string { + const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo + fullargs := []string{ + "-e", "-json", + fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0), + fmt.Sprintf("-test=%t", cfg.Tests), + fmt.Sprintf("-export=%t", usesExportData(cfg)), + fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0), + // go list doesn't let you pass -test and -find together, + // probably because you'd just get the TestMain. + fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0), + } + fullargs = append(fullargs, cfg.BuildFlags...) + fullargs = append(fullargs, "--") + fullargs = append(fullargs, words...) + return fullargs +} + +// cfgInvocation returns an Invocation that reflects cfg's settings. +func (state *golistState) cfgInvocation() gocommand.Invocation { + cfg := state.cfg + return gocommand.Invocation{ + BuildFlags: cfg.BuildFlags, + ModFile: cfg.modFile, + ModFlag: cfg.modFlag, + CleanEnv: cfg.Env != nil, + Env: cfg.Env, + Logf: cfg.Logf, + WorkingDir: cfg.Dir, + } +} + +// invokeGo returns the stdout of a go command invocation. +func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, error) { + cfg := state.cfg + + inv := state.cfgInvocation() + + // For Go versions 1.16 and above, `go list` accepts overlays directly via + // the -overlay flag. Set it, if it's available. + // + // The check for "list" is not necessarily required, but we should avoid + // getting the go version if possible. + if verb == "list" { + goVersion, err := state.getGoVersion() + if err != nil { + return nil, err + } + if goVersion >= 16 { + filename, cleanup, err := state.writeOverlays() + if err != nil { + return nil, err + } + defer cleanup() + inv.Overlay = filename + } + } + inv.Verb = verb + inv.Args = args + gocmdRunner := cfg.gocmdRunner + if gocmdRunner == nil { + gocmdRunner = &gocommand.Runner{} + } + stdout, stderr, friendlyErr, err := gocmdRunner.RunRaw(cfg.Context, inv) + if err != nil { + // Check for 'go' executable not being found. + if ee, ok := err.(*exec.Error); ok && ee.Err == exec.ErrNotFound { + return nil, fmt.Errorf("'go list' driver requires 'go', but %s", exec.ErrNotFound) + } + + exitErr, ok := err.(*exec.ExitError) + if !ok { + // Catastrophic error: + // - context cancellation + return nil, xerrors.Errorf("couldn't run 'go': %w", err) + } + + // Old go version? + if strings.Contains(stderr.String(), "flag provided but not defined") { + return nil, goTooOldError{fmt.Errorf("unsupported version of go: %s: %s", exitErr, stderr)} + } + + // Related to #24854 + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "unexpected directory layout") { + return nil, friendlyErr + } + + // Is there an error running the C compiler in cgo? This will be reported in the "Error" field + // and should be suppressed by go list -e. + // + // This condition is not perfect yet because the error message can include other error messages than runtime/cgo. + isPkgPathRune := func(r rune) bool { + // From https://golang.org/ref/spec#Import_declarations: + // Implementation restriction: A compiler may restrict ImportPaths to non-empty strings + // using only characters belonging to Unicode's L, M, N, P, and S general categories + // (the Graphic characters without spaces) and may also exclude the + // characters !"#$%&'()*,:;<=>?[\]^`{|} and the Unicode replacement character U+FFFD. + return unicode.IsOneOf([]*unicode.RangeTable{unicode.L, unicode.M, unicode.N, unicode.P, unicode.S}, r) && + !strings.ContainsRune("!\"#$%&'()*,:;<=>?[\\]^`{|}\uFFFD", r) + } + // golang/go#36770: Handle case where cmd/go prints module download messages before the error. + msg := stderr.String() + for strings.HasPrefix(msg, "go: downloading") { + msg = msg[strings.IndexRune(msg, '\n')+1:] + } + if len(stderr.String()) > 0 && strings.HasPrefix(stderr.String(), "# ") { + msg := msg[len("# "):] + if strings.HasPrefix(strings.TrimLeftFunc(msg, isPkgPathRune), "\n") { + return stdout, nil + } + // Treat pkg-config errors as a special case (golang.org/issue/36770). + if strings.HasPrefix(msg, "pkg-config") { + return stdout, nil + } + } + + // This error only appears in stderr. See golang.org/cl/166398 for a fix in go list to show + // the error in the Err section of stdout in case -e option is provided. + // This fix is provided for backwards compatibility. + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "named files must be .go files") { + output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + strings.Trim(stderr.String(), "\n")) + return bytes.NewBufferString(output), nil + } + + // Similar to the previous error, but currently lacks a fix in Go. + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "named files must all be in one directory") { + output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + strings.Trim(stderr.String(), "\n")) + return bytes.NewBufferString(output), nil + } + + // Backwards compatibility for Go 1.11 because 1.12 and 1.13 put the directory in the ImportPath. + // If the package doesn't exist, put the absolute path of the directory into the error message, + // as Go 1.13 list does. + const noSuchDirectory = "no such directory" + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), noSuchDirectory) { + errstr := stderr.String() + abspath := strings.TrimSpace(errstr[strings.Index(errstr, noSuchDirectory)+len(noSuchDirectory):]) + output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + abspath, strings.Trim(stderr.String(), "\n")) + return bytes.NewBufferString(output), nil + } + + // Workaround for #29280: go list -e has incorrect behavior when an ad-hoc package doesn't exist. + // Note that the error message we look for in this case is different that the one looked for above. + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no such file or directory") { + output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + strings.Trim(stderr.String(), "\n")) + return bytes.NewBufferString(output), nil + } + + // Workaround for #34273. go list -e with GO111MODULE=on has incorrect behavior when listing a + // directory outside any module. + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "outside available modules") { + output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + // TODO(matloob): command-line-arguments isn't correct here. + "command-line-arguments", strings.Trim(stderr.String(), "\n")) + return bytes.NewBufferString(output), nil + } + + // Another variation of the previous error + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "outside module root") { + output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + // TODO(matloob): command-line-arguments isn't correct here. + "command-line-arguments", strings.Trim(stderr.String(), "\n")) + return bytes.NewBufferString(output), nil + } + + // Workaround for an instance of golang.org/issue/26755: go list -e will return a non-zero exit + // status if there's a dependency on a package that doesn't exist. But it should return + // a zero exit status and set an error on that package. + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no Go files in") { + // Don't clobber stdout if `go list` actually returned something. + if len(stdout.String()) > 0 { + return stdout, nil + } + // try to extract package name from string + stderrStr := stderr.String() + var importPath string + colon := strings.Index(stderrStr, ":") + if colon > 0 && strings.HasPrefix(stderrStr, "go build ") { + importPath = stderrStr[len("go build "):colon] + } + output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + importPath, strings.Trim(stderrStr, "\n")) + return bytes.NewBufferString(output), nil + } + + // Export mode entails a build. + // If that build fails, errors appear on stderr + // (despite the -e flag) and the Export field is blank. + // Do not fail in that case. + // The same is true if an ad-hoc package given to go list doesn't exist. + // TODO(matloob): Remove these once we can depend on go list to exit with a zero status with -e even when + // packages don't exist or a build fails. + if !usesExportData(cfg) && !containsGoFile(args) { + return nil, friendlyErr + } + } + return stdout, nil +} + +// OverlayJSON is the format overlay files are expected to be in. +// The Replace map maps from overlaid paths to replacement paths: +// the Go command will forward all reads trying to open +// each overlaid path to its replacement path, or consider the overlaid +// path not to exist if the replacement path is empty. +// +// From golang/go#39958. +type OverlayJSON struct { + Replace map[string]string `json:"replace,omitempty"` +} + +// writeOverlays writes out files for go list's -overlay flag, as described +// above. +func (state *golistState) writeOverlays() (filename string, cleanup func(), err error) { + // Do nothing if there are no overlays in the config. + if len(state.cfg.Overlay) == 0 { + return "", func() {}, nil + } + dir, err := ioutil.TempDir("", "gopackages-*") + if err != nil { + return "", nil, err + } + // The caller must clean up this directory, unless this function returns an + // error. + cleanup = func() { + os.RemoveAll(dir) + } + defer func() { + if err != nil { + cleanup() + } + }() + overlays := map[string]string{} + for k, v := range state.cfg.Overlay { + // Create a unique filename for the overlaid files, to avoid + // creating nested directories. + noSeparator := strings.Join(strings.Split(filepath.ToSlash(k), "/"), "") + f, err := ioutil.TempFile(dir, fmt.Sprintf("*-%s", noSeparator)) + if err != nil { + return "", func() {}, err + } + if _, err := f.Write(v); err != nil { + return "", func() {}, err + } + if err := f.Close(); err != nil { + return "", func() {}, err + } + overlays[k] = f.Name() + } + b, err := json.Marshal(OverlayJSON{Replace: overlays}) + if err != nil { + return "", func() {}, err + } + // Write out the overlay file that contains the filepath mappings. + filename = filepath.Join(dir, "overlay.json") + if err := ioutil.WriteFile(filename, b, 0665); err != nil { + return "", func() {}, err + } + return filename, cleanup, nil +} + +func containsGoFile(s []string) bool { + for _, f := range s { + if strings.HasSuffix(f, ".go") { + return true + } + } + return false +} + +func cmdDebugStr(cmd *exec.Cmd) string { + env := make(map[string]string) + for _, kv := range cmd.Env { + split := strings.SplitN(kv, "=", 2) + k, v := split[0], split[1] + env[k] = v + } + + var args []string + for _, arg := range cmd.Args { + quoted := strconv.Quote(arg) + if quoted[1:len(quoted)-1] != arg || strings.Contains(arg, " ") { + args = append(args, quoted) + } else { + args = append(args, arg) + } + } + return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], strings.Join(args, " ")) +} diff --git a/vendor/golang.org/x/tools/go/packages/golist_overlay.go b/vendor/golang.org/x/tools/go/packages/golist_overlay.go new file mode 100644 index 000000000..9576b472f --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/golist_overlay.go @@ -0,0 +1,575 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "encoding/json" + "fmt" + "go/parser" + "go/token" + "os" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + + "golang.org/x/tools/internal/gocommand" +) + +// processGolistOverlay provides rudimentary support for adding +// files that don't exist on disk to an overlay. The results can be +// sometimes incorrect. +// TODO(matloob): Handle unsupported cases, including the following: +// - determining the correct package to add given a new import path +func (state *golistState) processGolistOverlay(response *responseDeduper) (modifiedPkgs, needPkgs []string, err error) { + havePkgs := make(map[string]string) // importPath -> non-test package ID + needPkgsSet := make(map[string]bool) + modifiedPkgsSet := make(map[string]bool) + + pkgOfDir := make(map[string][]*Package) + for _, pkg := range response.dr.Packages { + // This is an approximation of import path to id. This can be + // wrong for tests, vendored packages, and a number of other cases. + havePkgs[pkg.PkgPath] = pkg.ID + dir, err := commonDir(pkg.GoFiles) + if err != nil { + return nil, nil, err + } + if dir != "" { + pkgOfDir[dir] = append(pkgOfDir[dir], pkg) + } + } + + // If no new imports are added, it is safe to avoid loading any needPkgs. + // Otherwise, it's hard to tell which package is actually being loaded + // (due to vendoring) and whether any modified package will show up + // in the transitive set of dependencies (because new imports are added, + // potentially modifying the transitive set of dependencies). + var overlayAddsImports bool + + // If both a package and its test package are created by the overlay, we + // need the real package first. Process all non-test files before test + // files, and make the whole process deterministic while we're at it. + var overlayFiles []string + for opath := range state.cfg.Overlay { + overlayFiles = append(overlayFiles, opath) + } + sort.Slice(overlayFiles, func(i, j int) bool { + iTest := strings.HasSuffix(overlayFiles[i], "_test.go") + jTest := strings.HasSuffix(overlayFiles[j], "_test.go") + if iTest != jTest { + return !iTest // non-tests are before tests. + } + return overlayFiles[i] < overlayFiles[j] + }) + for _, opath := range overlayFiles { + contents := state.cfg.Overlay[opath] + base := filepath.Base(opath) + dir := filepath.Dir(opath) + var pkg *Package // if opath belongs to both a package and its test variant, this will be the test variant + var testVariantOf *Package // if opath is a test file, this is the package it is testing + var fileExists bool + isTestFile := strings.HasSuffix(opath, "_test.go") + pkgName, ok := extractPackageName(opath, contents) + if !ok { + // Don't bother adding a file that doesn't even have a parsable package statement + // to the overlay. + continue + } + // If all the overlay files belong to a different package, change the + // package name to that package. + maybeFixPackageName(pkgName, isTestFile, pkgOfDir[dir]) + nextPackage: + for _, p := range response.dr.Packages { + if pkgName != p.Name && p.ID != "command-line-arguments" { + continue + } + for _, f := range p.GoFiles { + if !sameFile(filepath.Dir(f), dir) { + continue + } + // Make sure to capture information on the package's test variant, if needed. + if isTestFile && !hasTestFiles(p) { + // TODO(matloob): Are there packages other than the 'production' variant + // of a package that this can match? This shouldn't match the test main package + // because the file is generated in another directory. + testVariantOf = p + continue nextPackage + } else if !isTestFile && hasTestFiles(p) { + // We're examining a test variant, but the overlaid file is + // a non-test file. Because the overlay implementation + // (currently) only adds a file to one package, skip this + // package, so that we can add the file to the production + // variant of the package. (https://golang.org/issue/36857 + // tracks handling overlays on both the production and test + // variant of a package). + continue nextPackage + } + if pkg != nil && p != pkg && pkg.PkgPath == p.PkgPath { + // We have already seen the production version of the + // for which p is a test variant. + if hasTestFiles(p) { + testVariantOf = pkg + } + } + pkg = p + if filepath.Base(f) == base { + fileExists = true + } + } + } + // The overlay could have included an entirely new package or an + // ad-hoc package. An ad-hoc package is one that we have manually + // constructed from inadequate `go list` results for a file= query. + // It will have the ID command-line-arguments. + if pkg == nil || pkg.ID == "command-line-arguments" { + // Try to find the module or gopath dir the file is contained in. + // Then for modules, add the module opath to the beginning. + pkgPath, ok, err := state.getPkgPath(dir) + if err != nil { + return nil, nil, err + } + if !ok { + break + } + var forTest string // only set for x tests + isXTest := strings.HasSuffix(pkgName, "_test") + if isXTest { + forTest = pkgPath + pkgPath += "_test" + } + id := pkgPath + if isTestFile { + if isXTest { + id = fmt.Sprintf("%s [%s.test]", pkgPath, forTest) + } else { + id = fmt.Sprintf("%s [%s.test]", pkgPath, pkgPath) + } + } + if pkg != nil { + // TODO(rstambler): We should change the package's path and ID + // here. The only issue is that this messes with the roots. + } else { + // Try to reclaim a package with the same ID, if it exists in the response. + for _, p := range response.dr.Packages { + if reclaimPackage(p, id, opath, contents) { + pkg = p + break + } + } + // Otherwise, create a new package. + if pkg == nil { + pkg = &Package{ + PkgPath: pkgPath, + ID: id, + Name: pkgName, + Imports: make(map[string]*Package), + } + response.addPackage(pkg) + havePkgs[pkg.PkgPath] = id + // Add the production package's sources for a test variant. + if isTestFile && !isXTest && testVariantOf != nil { + pkg.GoFiles = append(pkg.GoFiles, testVariantOf.GoFiles...) + pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, testVariantOf.CompiledGoFiles...) + // Add the package under test and its imports to the test variant. + pkg.forTest = testVariantOf.PkgPath + for k, v := range testVariantOf.Imports { + pkg.Imports[k] = &Package{ID: v.ID} + } + } + if isXTest { + pkg.forTest = forTest + } + } + } + } + if !fileExists { + pkg.GoFiles = append(pkg.GoFiles, opath) + // TODO(matloob): Adding the file to CompiledGoFiles can exhibit the wrong behavior + // if the file will be ignored due to its build tags. + pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, opath) + modifiedPkgsSet[pkg.ID] = true + } + imports, err := extractImports(opath, contents) + if err != nil { + // Let the parser or type checker report errors later. + continue + } + for _, imp := range imports { + // TODO(rstambler): If the package is an x test and the import has + // a test variant, make sure to replace it. + if _, found := pkg.Imports[imp]; found { + continue + } + overlayAddsImports = true + id, ok := havePkgs[imp] + if !ok { + var err error + id, err = state.resolveImport(dir, imp) + if err != nil { + return nil, nil, err + } + } + pkg.Imports[imp] = &Package{ID: id} + // Add dependencies to the non-test variant version of this package as well. + if testVariantOf != nil { + testVariantOf.Imports[imp] = &Package{ID: id} + } + } + } + + // toPkgPath guesses the package path given the id. + toPkgPath := func(sourceDir, id string) (string, error) { + if i := strings.IndexByte(id, ' '); i >= 0 { + return state.resolveImport(sourceDir, id[:i]) + } + return state.resolveImport(sourceDir, id) + } + + // Now that new packages have been created, do another pass to determine + // the new set of missing packages. + for _, pkg := range response.dr.Packages { + for _, imp := range pkg.Imports { + if len(pkg.GoFiles) == 0 { + return nil, nil, fmt.Errorf("cannot resolve imports for package %q with no Go files", pkg.PkgPath) + } + pkgPath, err := toPkgPath(filepath.Dir(pkg.GoFiles[0]), imp.ID) + if err != nil { + return nil, nil, err + } + if _, ok := havePkgs[pkgPath]; !ok { + needPkgsSet[pkgPath] = true + } + } + } + + if overlayAddsImports { + needPkgs = make([]string, 0, len(needPkgsSet)) + for pkg := range needPkgsSet { + needPkgs = append(needPkgs, pkg) + } + } + modifiedPkgs = make([]string, 0, len(modifiedPkgsSet)) + for pkg := range modifiedPkgsSet { + modifiedPkgs = append(modifiedPkgs, pkg) + } + return modifiedPkgs, needPkgs, err +} + +// resolveImport finds the ID of a package given its import path. +// In particular, it will find the right vendored copy when in GOPATH mode. +func (state *golistState) resolveImport(sourceDir, importPath string) (string, error) { + env, err := state.getEnv() + if err != nil { + return "", err + } + if env["GOMOD"] != "" { + return importPath, nil + } + + searchDir := sourceDir + for { + vendorDir := filepath.Join(searchDir, "vendor") + exists, ok := state.vendorDirs[vendorDir] + if !ok { + info, err := os.Stat(vendorDir) + exists = err == nil && info.IsDir() + state.vendorDirs[vendorDir] = exists + } + + if exists { + vendoredPath := filepath.Join(vendorDir, importPath) + if info, err := os.Stat(vendoredPath); err == nil && info.IsDir() { + // We should probably check for .go files here, but shame on anyone who fools us. + path, ok, err := state.getPkgPath(vendoredPath) + if err != nil { + return "", err + } + if ok { + return path, nil + } + } + } + + // We know we've hit the top of the filesystem when we Dir / and get /, + // or C:\ and get C:\, etc. + next := filepath.Dir(searchDir) + if next == searchDir { + break + } + searchDir = next + } + return importPath, nil +} + +func hasTestFiles(p *Package) bool { + for _, f := range p.GoFiles { + if strings.HasSuffix(f, "_test.go") { + return true + } + } + return false +} + +// determineRootDirs returns a mapping from absolute directories that could +// contain code to their corresponding import path prefixes. +func (state *golistState) determineRootDirs() (map[string]string, error) { + env, err := state.getEnv() + if err != nil { + return nil, err + } + if env["GOMOD"] != "" { + state.rootsOnce.Do(func() { + state.rootDirs, state.rootDirsError = state.determineRootDirsModules() + }) + } else { + state.rootsOnce.Do(func() { + state.rootDirs, state.rootDirsError = state.determineRootDirsGOPATH() + }) + } + return state.rootDirs, state.rootDirsError +} + +func (state *golistState) determineRootDirsModules() (map[string]string, error) { + // List all of the modules--the first will be the directory for the main + // module. Any replaced modules will also need to be treated as roots. + // Editing files in the module cache isn't a great idea, so we don't + // plan to ever support that. + out, err := state.invokeGo("list", "-m", "-json", "all") + if err != nil { + // 'go list all' will fail if we're outside of a module and + // GO111MODULE=on. Try falling back without 'all'. + var innerErr error + out, innerErr = state.invokeGo("list", "-m", "-json") + if innerErr != nil { + return nil, err + } + } + roots := map[string]string{} + modules := map[string]string{} + var i int + for dec := json.NewDecoder(out); dec.More(); { + mod := new(gocommand.ModuleJSON) + if err := dec.Decode(mod); err != nil { + return nil, err + } + if mod.Dir != "" && mod.Path != "" { + // This is a valid module; add it to the map. + absDir, err := filepath.Abs(mod.Dir) + if err != nil { + return nil, err + } + modules[absDir] = mod.Path + // The first result is the main module. + if i == 0 || mod.Replace != nil && mod.Replace.Path != "" { + roots[absDir] = mod.Path + } + } + i++ + } + return roots, nil +} + +func (state *golistState) determineRootDirsGOPATH() (map[string]string, error) { + m := map[string]string{} + for _, dir := range filepath.SplitList(state.mustGetEnv()["GOPATH"]) { + absDir, err := filepath.Abs(dir) + if err != nil { + return nil, err + } + m[filepath.Join(absDir, "src")] = "" + } + return m, nil +} + +func extractImports(filename string, contents []byte) ([]string, error) { + f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.ImportsOnly) // TODO(matloob): reuse fileset? + if err != nil { + return nil, err + } + var res []string + for _, imp := range f.Imports { + quotedPath := imp.Path.Value + path, err := strconv.Unquote(quotedPath) + if err != nil { + return nil, err + } + res = append(res, path) + } + return res, nil +} + +// reclaimPackage attempts to reuse a package that failed to load in an overlay. +// +// If the package has errors and has no Name, GoFiles, or Imports, +// then it's possible that it doesn't yet exist on disk. +func reclaimPackage(pkg *Package, id string, filename string, contents []byte) bool { + // TODO(rstambler): Check the message of the actual error? + // It differs between $GOPATH and module mode. + if pkg.ID != id { + return false + } + if len(pkg.Errors) != 1 { + return false + } + if pkg.Name != "" || pkg.ExportFile != "" { + return false + } + if len(pkg.GoFiles) > 0 || len(pkg.CompiledGoFiles) > 0 || len(pkg.OtherFiles) > 0 { + return false + } + if len(pkg.Imports) > 0 { + return false + } + pkgName, ok := extractPackageName(filename, contents) + if !ok { + return false + } + pkg.Name = pkgName + pkg.Errors = nil + return true +} + +func extractPackageName(filename string, contents []byte) (string, bool) { + // TODO(rstambler): Check the message of the actual error? + // It differs between $GOPATH and module mode. + f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.PackageClauseOnly) // TODO(matloob): reuse fileset? + if err != nil { + return "", false + } + return f.Name.Name, true +} + +// commonDir returns the directory that all files are in, "" if files is empty, +// or an error if they aren't in the same directory. +func commonDir(files []string) (string, error) { + seen := make(map[string]bool) + for _, f := range files { + seen[filepath.Dir(f)] = true + } + if len(seen) > 1 { + return "", fmt.Errorf("files (%v) are in more than one directory: %v", files, seen) + } + for k := range seen { + // seen has only one element; return it. + return k, nil + } + return "", nil // no files +} + +// It is possible that the files in the disk directory dir have a different package +// name from newName, which is deduced from the overlays. If they all have a different +// package name, and they all have the same package name, then that name becomes +// the package name. +// It returns true if it changes the package name, false otherwise. +func maybeFixPackageName(newName string, isTestFile bool, pkgsOfDir []*Package) { + names := make(map[string]int) + for _, p := range pkgsOfDir { + names[p.Name]++ + } + if len(names) != 1 { + // some files are in different packages + return + } + var oldName string + for k := range names { + oldName = k + } + if newName == oldName { + return + } + // We might have a case where all of the package names in the directory are + // the same, but the overlay file is for an x test, which belongs to its + // own package. If the x test does not yet exist on disk, we may not yet + // have its package name on disk, but we should not rename the packages. + // + // We use a heuristic to determine if this file belongs to an x test: + // The test file should have a package name whose package name has a _test + // suffix or looks like "newName_test". + maybeXTest := strings.HasPrefix(oldName+"_test", newName) || strings.HasSuffix(newName, "_test") + if isTestFile && maybeXTest { + return + } + for _, p := range pkgsOfDir { + p.Name = newName + } +} + +// This function is copy-pasted from +// https://github.com/golang/go/blob/9706f510a5e2754595d716bd64be8375997311fb/src/cmd/go/internal/search/search.go#L360. +// It should be deleted when we remove support for overlays from go/packages. +// +// NOTE: This does not handle any ./... or ./ style queries, as this function +// doesn't know the working directory. +// +// matchPattern(pattern)(name) reports whether +// name matches pattern. Pattern is a limited glob +// pattern in which '...' means 'any string' and there +// is no other special syntax. +// Unfortunately, there are two special cases. Quoting "go help packages": +// +// First, /... at the end of the pattern can match an empty string, +// so that net/... matches both net and packages in its subdirectories, like net/http. +// Second, any slash-separated pattern element containing a wildcard never +// participates in a match of the "vendor" element in the path of a vendored +// package, so that ./... does not match packages in subdirectories of +// ./vendor or ./mycode/vendor, but ./vendor/... and ./mycode/vendor/... do. +// Note, however, that a directory named vendor that itself contains code +// is not a vendored package: cmd/vendor would be a command named vendor, +// and the pattern cmd/... matches it. +func matchPattern(pattern string) func(name string) bool { + // Convert pattern to regular expression. + // The strategy for the trailing /... is to nest it in an explicit ? expression. + // The strategy for the vendor exclusion is to change the unmatchable + // vendor strings to a disallowed code point (vendorChar) and to use + // "(anything but that codepoint)*" as the implementation of the ... wildcard. + // This is a bit complicated but the obvious alternative, + // namely a hand-written search like in most shell glob matchers, + // is too easy to make accidentally exponential. + // Using package regexp guarantees linear-time matching. + + const vendorChar = "\x00" + + if strings.Contains(pattern, vendorChar) { + return func(name string) bool { return false } + } + + re := regexp.QuoteMeta(pattern) + re = replaceVendor(re, vendorChar) + switch { + case strings.HasSuffix(re, `/`+vendorChar+`/\.\.\.`): + re = strings.TrimSuffix(re, `/`+vendorChar+`/\.\.\.`) + `(/vendor|/` + vendorChar + `/\.\.\.)` + case re == vendorChar+`/\.\.\.`: + re = `(/vendor|/` + vendorChar + `/\.\.\.)` + case strings.HasSuffix(re, `/\.\.\.`): + re = strings.TrimSuffix(re, `/\.\.\.`) + `(/\.\.\.)?` + } + re = strings.ReplaceAll(re, `\.\.\.`, `[^`+vendorChar+`]*`) + + reg := regexp.MustCompile(`^` + re + `$`) + + return func(name string) bool { + if strings.Contains(name, vendorChar) { + return false + } + return reg.MatchString(replaceVendor(name, vendorChar)) + } +} + +// replaceVendor returns the result of replacing +// non-trailing vendor path elements in x with repl. +func replaceVendor(x, repl string) string { + if !strings.Contains(x, "vendor") { + return x + } + elem := strings.Split(x, "/") + for i := 0; i < len(elem)-1; i++ { + if elem[i] == "vendor" { + elem[i] = repl + } + } + return strings.Join(elem, "/") +} diff --git a/vendor/golang.org/x/tools/go/packages/loadmode_string.go b/vendor/golang.org/x/tools/go/packages/loadmode_string.go new file mode 100644 index 000000000..7ea37e7ee --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/loadmode_string.go @@ -0,0 +1,57 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "fmt" + "strings" +) + +var allModes = []LoadMode{ + NeedName, + NeedFiles, + NeedCompiledGoFiles, + NeedImports, + NeedDeps, + NeedExportsFile, + NeedTypes, + NeedSyntax, + NeedTypesInfo, + NeedTypesSizes, +} + +var modeStrings = []string{ + "NeedName", + "NeedFiles", + "NeedCompiledGoFiles", + "NeedImports", + "NeedDeps", + "NeedExportsFile", + "NeedTypes", + "NeedSyntax", + "NeedTypesInfo", + "NeedTypesSizes", +} + +func (mod LoadMode) String() string { + m := mod + if m == 0 { + return "LoadMode(0)" + } + var out []string + for i, x := range allModes { + if x > m { + break + } + if (m & x) != 0 { + out = append(out, modeStrings[i]) + m = m ^ x + } + } + if m != 0 { + out = append(out, "Unknown") + } + return fmt.Sprintf("LoadMode(%s)", strings.Join(out, "|")) +} diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go new file mode 100644 index 000000000..8a1a2d681 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/packages.go @@ -0,0 +1,1239 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +// See doc.go for package documentation and implementation notes. + +import ( + "context" + "encoding/json" + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "go/types" + "io/ioutil" + "log" + "os" + "path/filepath" + "strings" + "sync" + "time" + + "golang.org/x/tools/go/gcexportdata" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/packagesinternal" + "golang.org/x/tools/internal/typesinternal" +) + +// A LoadMode controls the amount of detail to return when loading. +// The bits below can be combined to specify which fields should be +// filled in the result packages. +// The zero value is a special case, equivalent to combining +// the NeedName, NeedFiles, and NeedCompiledGoFiles bits. +// ID and Errors (if present) will always be filled. +// Load may return more information than requested. +type LoadMode int + +// TODO(matloob): When a V2 of go/packages is released, rename NeedExportsFile to +// NeedExportFile to make it consistent with the Package field it's adding. + +const ( + // NeedName adds Name and PkgPath. + NeedName LoadMode = 1 << iota + + // NeedFiles adds GoFiles and OtherFiles. + NeedFiles + + // NeedCompiledGoFiles adds CompiledGoFiles. + NeedCompiledGoFiles + + // NeedImports adds Imports. If NeedDeps is not set, the Imports field will contain + // "placeholder" Packages with only the ID set. + NeedImports + + // NeedDeps adds the fields requested by the LoadMode in the packages in Imports. + NeedDeps + + // NeedExportsFile adds ExportFile. + NeedExportsFile + + // NeedTypes adds Types, Fset, and IllTyped. + NeedTypes + + // NeedSyntax adds Syntax. + NeedSyntax + + // NeedTypesInfo adds TypesInfo. + NeedTypesInfo + + // NeedTypesSizes adds TypesSizes. + NeedTypesSizes + + // typecheckCgo enables full support for type checking cgo. Requires Go 1.15+. + // Modifies CompiledGoFiles and Types, and has no effect on its own. + typecheckCgo + + // NeedModule adds Module. + NeedModule +) + +const ( + // Deprecated: LoadFiles exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadFiles = NeedName | NeedFiles | NeedCompiledGoFiles + + // Deprecated: LoadImports exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadImports = LoadFiles | NeedImports + + // Deprecated: LoadTypes exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadTypes = LoadImports | NeedTypes | NeedTypesSizes + + // Deprecated: LoadSyntax exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadSyntax = LoadTypes | NeedSyntax | NeedTypesInfo + + // Deprecated: LoadAllSyntax exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadAllSyntax = LoadSyntax | NeedDeps +) + +// A Config specifies details about how packages should be loaded. +// The zero value is a valid configuration. +// Calls to Load do not modify this struct. +type Config struct { + // Mode controls the level of information returned for each package. + Mode LoadMode + + // Context specifies the context for the load operation. + // If the context is cancelled, the loader may stop early + // and return an ErrCancelled error. + // If Context is nil, the load cannot be cancelled. + Context context.Context + + // Logf is the logger for the config. + // If the user provides a logger, debug logging is enabled. + // If the GOPACKAGESDEBUG environment variable is set to true, + // but the logger is nil, default to log.Printf. + Logf func(format string, args ...interface{}) + + // Dir is the directory in which to run the build system's query tool + // that provides information about the packages. + // If Dir is empty, the tool is run in the current directory. + Dir string + + // Env is the environment to use when invoking the build system's query tool. + // If Env is nil, the current environment is used. + // As in os/exec's Cmd, only the last value in the slice for + // each environment key is used. To specify the setting of only + // a few variables, append to the current environment, as in: + // + // opt.Env = append(os.Environ(), "GOOS=plan9", "GOARCH=386") + // + Env []string + + // gocmdRunner guards go command calls from concurrency errors. + gocmdRunner *gocommand.Runner + + // BuildFlags is a list of command-line flags to be passed through to + // the build system's query tool. + BuildFlags []string + + // modFile will be used for -modfile in go command invocations. + modFile string + + // modFlag will be used for -modfile in go command invocations. + modFlag string + + // Fset provides source position information for syntax trees and types. + // If Fset is nil, Load will use a new fileset, but preserve Fset's value. + Fset *token.FileSet + + // ParseFile is called to read and parse each file + // when preparing a package's type-checked syntax tree. + // It must be safe to call ParseFile simultaneously from multiple goroutines. + // If ParseFile is nil, the loader will uses parser.ParseFile. + // + // ParseFile should parse the source from src and use filename only for + // recording position information. + // + // An application may supply a custom implementation of ParseFile + // to change the effective file contents or the behavior of the parser, + // or to modify the syntax tree. For example, selectively eliminating + // unwanted function bodies can significantly accelerate type checking. + ParseFile func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) + + // If Tests is set, the loader includes not just the packages + // matching a particular pattern but also any related test packages, + // including test-only variants of the package and the test executable. + // + // For example, when using the go command, loading "fmt" with Tests=true + // returns four packages, with IDs "fmt" (the standard package), + // "fmt [fmt.test]" (the package as compiled for the test), + // "fmt_test" (the test functions from source files in package fmt_test), + // and "fmt.test" (the test binary). + // + // In build systems with explicit names for tests, + // setting Tests may have no effect. + Tests bool + + // Overlay provides a mapping of absolute file paths to file contents. + // If the file with the given path already exists, the parser will use the + // alternative file contents provided by the map. + // + // Overlays provide incomplete support for when a given file doesn't + // already exist on disk. See the package doc above for more details. + Overlay map[string][]byte +} + +// driver is the type for functions that query the build system for the +// packages named by the patterns. +type driver func(cfg *Config, patterns ...string) (*driverResponse, error) + +// driverResponse contains the results for a driver query. +type driverResponse struct { + // NotHandled is returned if the request can't be handled by the current + // driver. If an external driver returns a response with NotHandled, the + // rest of the driverResponse is ignored, and go/packages will fallback + // to the next driver. If go/packages is extended in the future to support + // lists of multiple drivers, go/packages will fall back to the next driver. + NotHandled bool + + // Sizes, if not nil, is the types.Sizes to use when type checking. + Sizes *types.StdSizes + + // Roots is the set of package IDs that make up the root packages. + // We have to encode this separately because when we encode a single package + // we cannot know if it is one of the roots as that requires knowledge of the + // graph it is part of. + Roots []string `json:",omitempty"` + + // Packages is the full set of packages in the graph. + // The packages are not connected into a graph. + // The Imports if populated will be stubs that only have their ID set. + // Imports will be connected and then type and syntax information added in a + // later pass (see refine). + Packages []*Package +} + +// Load loads and returns the Go packages named by the given patterns. +// +// Config specifies loading options; +// nil behaves the same as an empty Config. +// +// Load returns an error if any of the patterns was invalid +// as defined by the underlying build system. +// It may return an empty list of packages without an error, +// for instance for an empty expansion of a valid wildcard. +// Errors associated with a particular package are recorded in the +// corresponding Package's Errors list, and do not cause Load to +// return an error. Clients may need to handle such errors before +// proceeding with further analysis. The PrintErrors function is +// provided for convenient display of all errors. +func Load(cfg *Config, patterns ...string) ([]*Package, error) { + l := newLoader(cfg) + response, err := defaultDriver(&l.Config, patterns...) + if err != nil { + return nil, err + } + l.sizes = response.Sizes + return l.refine(response.Roots, response.Packages...) +} + +// defaultDriver is a driver that implements go/packages' fallback behavior. +// It will try to request to an external driver, if one exists. If there's +// no external driver, or the driver returns a response with NotHandled set, +// defaultDriver will fall back to the go list driver. +func defaultDriver(cfg *Config, patterns ...string) (*driverResponse, error) { + driver := findExternalDriver(cfg) + if driver == nil { + driver = goListDriver + } + response, err := driver(cfg, patterns...) + if err != nil { + return response, err + } else if response.NotHandled { + return goListDriver(cfg, patterns...) + } + return response, nil +} + +// A Package describes a loaded Go package. +type Package struct { + // ID is a unique identifier for a package, + // in a syntax provided by the underlying build system. + // + // Because the syntax varies based on the build system, + // clients should treat IDs as opaque and not attempt to + // interpret them. + ID string + + // Name is the package name as it appears in the package source code. + Name string + + // PkgPath is the package path as used by the go/types package. + PkgPath string + + // Errors contains any errors encountered querying the metadata + // of the package, or while parsing or type-checking its files. + Errors []Error + + // GoFiles lists the absolute file paths of the package's Go source files. + GoFiles []string + + // CompiledGoFiles lists the absolute file paths of the package's source + // files that are suitable for type checking. + // This may differ from GoFiles if files are processed before compilation. + CompiledGoFiles []string + + // OtherFiles lists the absolute file paths of the package's non-Go source files, + // including assembly, C, C++, Fortran, Objective-C, SWIG, and so on. + OtherFiles []string + + // IgnoredFiles lists source files that are not part of the package + // using the current build configuration but that might be part of + // the package using other build configurations. + IgnoredFiles []string + + // ExportFile is the absolute path to a file containing type + // information for the package as provided by the build system. + ExportFile string + + // Imports maps import paths appearing in the package's Go source files + // to corresponding loaded Packages. + Imports map[string]*Package + + // Types provides type information for the package. + // The NeedTypes LoadMode bit sets this field for packages matching the + // patterns; type information for dependencies may be missing or incomplete, + // unless NeedDeps and NeedImports are also set. + Types *types.Package + + // Fset provides position information for Types, TypesInfo, and Syntax. + // It is set only when Types is set. + Fset *token.FileSet + + // IllTyped indicates whether the package or any dependency contains errors. + // It is set only when Types is set. + IllTyped bool + + // Syntax is the package's syntax trees, for the files listed in CompiledGoFiles. + // + // The NeedSyntax LoadMode bit populates this field for packages matching the patterns. + // If NeedDeps and NeedImports are also set, this field will also be populated + // for dependencies. + Syntax []*ast.File + + // TypesInfo provides type information about the package's syntax trees. + // It is set only when Syntax is set. + TypesInfo *types.Info + + // TypesSizes provides the effective size function for types in TypesInfo. + TypesSizes types.Sizes + + // forTest is the package under test, if any. + forTest string + + // depsErrors is the DepsErrors field from the go list response, if any. + depsErrors []*packagesinternal.PackageError + + // module is the module information for the package if it exists. + Module *Module +} + +// Module provides module information for a package. +type Module struct { + Path string // module path + Version string // module version + Replace *Module // replaced by this module + Time *time.Time // time version was created + Main bool // is this the main module? + Indirect bool // is this module only an indirect dependency of main module? + Dir string // directory holding files for this module, if any + GoMod string // path to go.mod file used when loading this module, if any + GoVersion string // go version used in module + Error *ModuleError // error loading module +} + +// ModuleError holds errors loading a module. +type ModuleError struct { + Err string // the error itself +} + +func init() { + packagesinternal.GetForTest = func(p interface{}) string { + return p.(*Package).forTest + } + packagesinternal.GetDepsErrors = func(p interface{}) []*packagesinternal.PackageError { + return p.(*Package).depsErrors + } + packagesinternal.GetGoCmdRunner = func(config interface{}) *gocommand.Runner { + return config.(*Config).gocmdRunner + } + packagesinternal.SetGoCmdRunner = func(config interface{}, runner *gocommand.Runner) { + config.(*Config).gocmdRunner = runner + } + packagesinternal.SetModFile = func(config interface{}, value string) { + config.(*Config).modFile = value + } + packagesinternal.SetModFlag = func(config interface{}, value string) { + config.(*Config).modFlag = value + } + packagesinternal.TypecheckCgo = int(typecheckCgo) +} + +// An Error describes a problem with a package's metadata, syntax, or types. +type Error struct { + Pos string // "file:line:col" or "file:line" or "" or "-" + Msg string + Kind ErrorKind +} + +// ErrorKind describes the source of the error, allowing the user to +// differentiate between errors generated by the driver, the parser, or the +// type-checker. +type ErrorKind int + +const ( + UnknownError ErrorKind = iota + ListError + ParseError + TypeError +) + +func (err Error) Error() string { + pos := err.Pos + if pos == "" { + pos = "-" // like token.Position{}.String() + } + return pos + ": " + err.Msg +} + +// flatPackage is the JSON form of Package +// It drops all the type and syntax fields, and transforms the Imports +// +// TODO(adonovan): identify this struct with Package, effectively +// publishing the JSON protocol. +type flatPackage struct { + ID string + Name string `json:",omitempty"` + PkgPath string `json:",omitempty"` + Errors []Error `json:",omitempty"` + GoFiles []string `json:",omitempty"` + CompiledGoFiles []string `json:",omitempty"` + OtherFiles []string `json:",omitempty"` + IgnoredFiles []string `json:",omitempty"` + ExportFile string `json:",omitempty"` + Imports map[string]string `json:",omitempty"` +} + +// MarshalJSON returns the Package in its JSON form. +// For the most part, the structure fields are written out unmodified, and +// the type and syntax fields are skipped. +// The imports are written out as just a map of path to package id. +// The errors are written using a custom type that tries to preserve the +// structure of error types we know about. +// +// This method exists to enable support for additional build systems. It is +// not intended for use by clients of the API and we may change the format. +func (p *Package) MarshalJSON() ([]byte, error) { + flat := &flatPackage{ + ID: p.ID, + Name: p.Name, + PkgPath: p.PkgPath, + Errors: p.Errors, + GoFiles: p.GoFiles, + CompiledGoFiles: p.CompiledGoFiles, + OtherFiles: p.OtherFiles, + IgnoredFiles: p.IgnoredFiles, + ExportFile: p.ExportFile, + } + if len(p.Imports) > 0 { + flat.Imports = make(map[string]string, len(p.Imports)) + for path, ipkg := range p.Imports { + flat.Imports[path] = ipkg.ID + } + } + return json.Marshal(flat) +} + +// UnmarshalJSON reads in a Package from its JSON format. +// See MarshalJSON for details about the format accepted. +func (p *Package) UnmarshalJSON(b []byte) error { + flat := &flatPackage{} + if err := json.Unmarshal(b, &flat); err != nil { + return err + } + *p = Package{ + ID: flat.ID, + Name: flat.Name, + PkgPath: flat.PkgPath, + Errors: flat.Errors, + GoFiles: flat.GoFiles, + CompiledGoFiles: flat.CompiledGoFiles, + OtherFiles: flat.OtherFiles, + ExportFile: flat.ExportFile, + } + if len(flat.Imports) > 0 { + p.Imports = make(map[string]*Package, len(flat.Imports)) + for path, id := range flat.Imports { + p.Imports[path] = &Package{ID: id} + } + } + return nil +} + +func (p *Package) String() string { return p.ID } + +// loaderPackage augments Package with state used during the loading phase +type loaderPackage struct { + *Package + importErrors map[string]error // maps each bad import to its error + loadOnce sync.Once + color uint8 // for cycle detection + needsrc bool // load from source (Mode >= LoadTypes) + needtypes bool // type information is either requested or depended on + initial bool // package was matched by a pattern +} + +// loader holds the working state of a single call to load. +type loader struct { + pkgs map[string]*loaderPackage + Config + sizes types.Sizes + parseCache map[string]*parseValue + parseCacheMu sync.Mutex + exportMu sync.Mutex // enforces mutual exclusion of exportdata operations + + // Config.Mode contains the implied mode (see impliedLoadMode). + // Implied mode contains all the fields we need the data for. + // In requestedMode there are the actually requested fields. + // We'll zero them out before returning packages to the user. + // This makes it easier for us to get the conditions where + // we need certain modes right. + requestedMode LoadMode +} + +type parseValue struct { + f *ast.File + err error + ready chan struct{} +} + +func newLoader(cfg *Config) *loader { + ld := &loader{ + parseCache: map[string]*parseValue{}, + } + if cfg != nil { + ld.Config = *cfg + // If the user has provided a logger, use it. + ld.Config.Logf = cfg.Logf + } + if ld.Config.Logf == nil { + // If the GOPACKAGESDEBUG environment variable is set to true, + // but the user has not provided a logger, default to log.Printf. + if debug { + ld.Config.Logf = log.Printf + } else { + ld.Config.Logf = func(format string, args ...interface{}) {} + } + } + if ld.Config.Mode == 0 { + ld.Config.Mode = NeedName | NeedFiles | NeedCompiledGoFiles // Preserve zero behavior of Mode for backwards compatibility. + } + if ld.Config.Env == nil { + ld.Config.Env = os.Environ() + } + if ld.Config.gocmdRunner == nil { + ld.Config.gocmdRunner = &gocommand.Runner{} + } + if ld.Context == nil { + ld.Context = context.Background() + } + if ld.Dir == "" { + if dir, err := os.Getwd(); err == nil { + ld.Dir = dir + } + } + + // Save the actually requested fields. We'll zero them out before returning packages to the user. + ld.requestedMode = ld.Mode + ld.Mode = impliedLoadMode(ld.Mode) + + if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 { + if ld.Fset == nil { + ld.Fset = token.NewFileSet() + } + + // ParseFile is required even in LoadTypes mode + // because we load source if export data is missing. + if ld.ParseFile == nil { + ld.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) { + const mode = parser.AllErrors | parser.ParseComments + return parser.ParseFile(fset, filename, src, mode) + } + } + } + + return ld +} + +// refine connects the supplied packages into a graph and then adds type and +// and syntax information as requested by the LoadMode. +func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) { + rootMap := make(map[string]int, len(roots)) + for i, root := range roots { + rootMap[root] = i + } + ld.pkgs = make(map[string]*loaderPackage) + // first pass, fixup and build the map and roots + var initial = make([]*loaderPackage, len(roots)) + for _, pkg := range list { + rootIndex := -1 + if i, found := rootMap[pkg.ID]; found { + rootIndex = i + } + + // Overlays can invalidate export data. + // TODO(matloob): make this check fine-grained based on dependencies on overlaid files + exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe" + // This package needs type information if the caller requested types and the package is + // either a root, or it's a non-root and the user requested dependencies ... + needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) + // This package needs source if the call requested source (or types info, which implies source) + // and the package is either a root, or itas a non- root and the user requested dependencies... + needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) || + // ... or if we need types and the exportData is invalid. We fall back to (incompletely) + // typechecking packages from source if they fail to compile. + (ld.Mode&NeedTypes|NeedTypesInfo != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe" + lpkg := &loaderPackage{ + Package: pkg, + needtypes: needtypes, + needsrc: needsrc, + } + ld.pkgs[lpkg.ID] = lpkg + if rootIndex >= 0 { + initial[rootIndex] = lpkg + lpkg.initial = true + } + } + for i, root := range roots { + if initial[i] == nil { + return nil, fmt.Errorf("root package %v is missing", root) + } + } + + // Materialize the import graph. + + const ( + white = 0 // new + grey = 1 // in progress + black = 2 // complete + ) + + // visit traverses the import graph, depth-first, + // and materializes the graph as Packages.Imports. + // + // Valid imports are saved in the Packages.Import map. + // Invalid imports (cycles and missing nodes) are saved in the importErrors map. + // Thus, even in the presence of both kinds of errors, the Import graph remains a DAG. + // + // visit returns whether the package needs src or has a transitive + // dependency on a package that does. These are the only packages + // for which we load source code. + var stack []*loaderPackage + var visit func(lpkg *loaderPackage) bool + var srcPkgs []*loaderPackage + visit = func(lpkg *loaderPackage) bool { + switch lpkg.color { + case black: + return lpkg.needsrc + case grey: + panic("internal error: grey node") + } + lpkg.color = grey + stack = append(stack, lpkg) // push + stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports + // If NeedImports isn't set, the imports fields will all be zeroed out. + if ld.Mode&NeedImports != 0 { + lpkg.Imports = make(map[string]*Package, len(stubs)) + for importPath, ipkg := range stubs { + var importErr error + imp := ld.pkgs[ipkg.ID] + if imp == nil { + // (includes package "C" when DisableCgo) + importErr = fmt.Errorf("missing package: %q", ipkg.ID) + } else if imp.color == grey { + importErr = fmt.Errorf("import cycle: %s", stack) + } + if importErr != nil { + if lpkg.importErrors == nil { + lpkg.importErrors = make(map[string]error) + } + lpkg.importErrors[importPath] = importErr + continue + } + + if visit(imp) { + lpkg.needsrc = true + } + lpkg.Imports[importPath] = imp.Package + } + } + if lpkg.needsrc { + srcPkgs = append(srcPkgs, lpkg) + } + if ld.Mode&NeedTypesSizes != 0 { + lpkg.TypesSizes = ld.sizes + } + stack = stack[:len(stack)-1] // pop + lpkg.color = black + + return lpkg.needsrc + } + + if ld.Mode&NeedImports == 0 { + // We do this to drop the stub import packages that we are not even going to try to resolve. + for _, lpkg := range initial { + lpkg.Imports = nil + } + } else { + // For each initial package, create its import DAG. + for _, lpkg := range initial { + visit(lpkg) + } + } + if ld.Mode&NeedImports != 0 && ld.Mode&NeedTypes != 0 { + for _, lpkg := range srcPkgs { + // Complete type information is required for the + // immediate dependencies of each source package. + for _, ipkg := range lpkg.Imports { + imp := ld.pkgs[ipkg.ID] + imp.needtypes = true + } + } + } + // Load type data and syntax if needed, starting at + // the initial packages (roots of the import DAG). + if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 { + var wg sync.WaitGroup + for _, lpkg := range initial { + wg.Add(1) + go func(lpkg *loaderPackage) { + ld.loadRecursive(lpkg) + wg.Done() + }(lpkg) + } + wg.Wait() + } + + result := make([]*Package, len(initial)) + for i, lpkg := range initial { + result[i] = lpkg.Package + } + for i := range ld.pkgs { + // Clear all unrequested fields, + // to catch programs that use more than they request. + if ld.requestedMode&NeedName == 0 { + ld.pkgs[i].Name = "" + ld.pkgs[i].PkgPath = "" + } + if ld.requestedMode&NeedFiles == 0 { + ld.pkgs[i].GoFiles = nil + ld.pkgs[i].OtherFiles = nil + ld.pkgs[i].IgnoredFiles = nil + } + if ld.requestedMode&NeedCompiledGoFiles == 0 { + ld.pkgs[i].CompiledGoFiles = nil + } + if ld.requestedMode&NeedImports == 0 { + ld.pkgs[i].Imports = nil + } + if ld.requestedMode&NeedExportsFile == 0 { + ld.pkgs[i].ExportFile = "" + } + if ld.requestedMode&NeedTypes == 0 { + ld.pkgs[i].Types = nil + ld.pkgs[i].Fset = nil + ld.pkgs[i].IllTyped = false + } + if ld.requestedMode&NeedSyntax == 0 { + ld.pkgs[i].Syntax = nil + } + if ld.requestedMode&NeedTypesInfo == 0 { + ld.pkgs[i].TypesInfo = nil + } + if ld.requestedMode&NeedTypesSizes == 0 { + ld.pkgs[i].TypesSizes = nil + } + if ld.requestedMode&NeedModule == 0 { + ld.pkgs[i].Module = nil + } + } + + return result, nil +} + +// loadRecursive loads the specified package and its dependencies, +// recursively, in parallel, in topological order. +// It is atomic and idempotent. +// Precondition: ld.Mode&NeedTypes. +func (ld *loader) loadRecursive(lpkg *loaderPackage) { + lpkg.loadOnce.Do(func() { + // Load the direct dependencies, in parallel. + var wg sync.WaitGroup + for _, ipkg := range lpkg.Imports { + imp := ld.pkgs[ipkg.ID] + wg.Add(1) + go func(imp *loaderPackage) { + ld.loadRecursive(imp) + wg.Done() + }(imp) + } + wg.Wait() + ld.loadPackage(lpkg) + }) +} + +// loadPackage loads the specified package. +// It must be called only once per Package, +// after immediate dependencies are loaded. +// Precondition: ld.Mode & NeedTypes. +func (ld *loader) loadPackage(lpkg *loaderPackage) { + if lpkg.PkgPath == "unsafe" { + // Fill in the blanks to avoid surprises. + lpkg.Types = types.Unsafe + lpkg.Fset = ld.Fset + lpkg.Syntax = []*ast.File{} + lpkg.TypesInfo = new(types.Info) + lpkg.TypesSizes = ld.sizes + return + } + + // Call NewPackage directly with explicit name. + // This avoids skew between golist and go/types when the files' + // package declarations are inconsistent. + lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name) + lpkg.Fset = ld.Fset + + // Subtle: we populate all Types fields with an empty Package + // before loading export data so that export data processing + // never has to create a types.Package for an indirect dependency, + // which would then require that such created packages be explicitly + // inserted back into the Import graph as a final step after export data loading. + // The Diamond test exercises this case. + if !lpkg.needtypes && !lpkg.needsrc { + return + } + if !lpkg.needsrc { + ld.loadFromExportData(lpkg) + return // not a source package, don't get syntax trees + } + + appendError := func(err error) { + // Convert various error types into the one true Error. + var errs []Error + switch err := err.(type) { + case Error: + // from driver + errs = append(errs, err) + + case *os.PathError: + // from parser + errs = append(errs, Error{ + Pos: err.Path + ":1", + Msg: err.Err.Error(), + Kind: ParseError, + }) + + case scanner.ErrorList: + // from parser + for _, err := range err { + errs = append(errs, Error{ + Pos: err.Pos.String(), + Msg: err.Msg, + Kind: ParseError, + }) + } + + case types.Error: + // from type checker + errs = append(errs, Error{ + Pos: err.Fset.Position(err.Pos).String(), + Msg: err.Msg, + Kind: TypeError, + }) + + default: + // unexpected impoverished error from parser? + errs = append(errs, Error{ + Pos: "-", + Msg: err.Error(), + Kind: UnknownError, + }) + + // If you see this error message, please file a bug. + log.Printf("internal error: error %q (%T) without position", err, err) + } + + lpkg.Errors = append(lpkg.Errors, errs...) + } + + if ld.Config.Mode&NeedTypes != 0 && len(lpkg.CompiledGoFiles) == 0 && lpkg.ExportFile != "" { + // The config requested loading sources and types, but sources are missing. + // Add an error to the package and fall back to loading from export data. + appendError(Error{"-", fmt.Sprintf("sources missing for package %s", lpkg.ID), ParseError}) + ld.loadFromExportData(lpkg) + return // can't get syntax trees for this package + } + + files, errs := ld.parseFiles(lpkg.CompiledGoFiles) + for _, err := range errs { + appendError(err) + } + + lpkg.Syntax = files + if ld.Config.Mode&NeedTypes == 0 { + return + } + + lpkg.TypesInfo = &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + lpkg.TypesSizes = ld.sizes + + importer := importerFunc(func(path string) (*types.Package, error) { + if path == "unsafe" { + return types.Unsafe, nil + } + + // The imports map is keyed by import path. + ipkg := lpkg.Imports[path] + if ipkg == nil { + if err := lpkg.importErrors[path]; err != nil { + return nil, err + } + // There was skew between the metadata and the + // import declarations, likely due to an edit + // race, or because the ParseFile feature was + // used to supply alternative file contents. + return nil, fmt.Errorf("no metadata for %s", path) + } + + if ipkg.Types != nil && ipkg.Types.Complete() { + return ipkg.Types, nil + } + log.Fatalf("internal error: package %q without types was imported from %q", path, lpkg) + panic("unreachable") + }) + + // type-check + tc := &types.Config{ + Importer: importer, + + // Type-check bodies of functions only in non-initial packages. + // Example: for import graph A->B->C and initial packages {A,C}, + // we can ignore function bodies in B. + IgnoreFuncBodies: ld.Mode&NeedDeps == 0 && !lpkg.initial, + + Error: appendError, + Sizes: ld.sizes, + } + if (ld.Mode & typecheckCgo) != 0 { + if !typesinternal.SetUsesCgo(tc) { + appendError(Error{ + Msg: "typecheckCgo requires Go 1.15+", + Kind: ListError, + }) + return + } + } + types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax) + + lpkg.importErrors = nil // no longer needed + + // If !Cgo, the type-checker uses FakeImportC mode, so + // it doesn't invoke the importer for import "C", + // nor report an error for the import, + // or for any undefined C.f reference. + // We must detect this explicitly and correctly + // mark the package as IllTyped (by reporting an error). + // TODO(adonovan): if these errors are annoying, + // we could just set IllTyped quietly. + if tc.FakeImportC { + outer: + for _, f := range lpkg.Syntax { + for _, imp := range f.Imports { + if imp.Path.Value == `"C"` { + err := types.Error{Fset: ld.Fset, Pos: imp.Pos(), Msg: `import "C" ignored`} + appendError(err) + break outer + } + } + } + } + + // Record accumulated errors. + illTyped := len(lpkg.Errors) > 0 + if !illTyped { + for _, imp := range lpkg.Imports { + if imp.IllTyped { + illTyped = true + break + } + } + } + lpkg.IllTyped = illTyped +} + +// An importFunc is an implementation of the single-method +// types.Importer interface based on a function value. +type importerFunc func(path string) (*types.Package, error) + +func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } + +// We use a counting semaphore to limit +// the number of parallel I/O calls per process. +var ioLimit = make(chan bool, 20) + +func (ld *loader) parseFile(filename string) (*ast.File, error) { + ld.parseCacheMu.Lock() + v, ok := ld.parseCache[filename] + if ok { + // cache hit + ld.parseCacheMu.Unlock() + <-v.ready + } else { + // cache miss + v = &parseValue{ready: make(chan struct{})} + ld.parseCache[filename] = v + ld.parseCacheMu.Unlock() + + var src []byte + for f, contents := range ld.Config.Overlay { + if sameFile(f, filename) { + src = contents + } + } + var err error + if src == nil { + ioLimit <- true // wait + src, err = ioutil.ReadFile(filename) + <-ioLimit // signal + } + if err != nil { + v.err = err + } else { + v.f, v.err = ld.ParseFile(ld.Fset, filename, src) + } + + close(v.ready) + } + return v.f, v.err +} + +// parseFiles reads and parses the Go source files and returns the ASTs +// of the ones that could be at least partially parsed, along with a +// list of I/O and parse errors encountered. +// +// Because files are scanned in parallel, the token.Pos +// positions of the resulting ast.Files are not ordered. +// +func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) { + var wg sync.WaitGroup + n := len(filenames) + parsed := make([]*ast.File, n) + errors := make([]error, n) + for i, file := range filenames { + if ld.Config.Context.Err() != nil { + parsed[i] = nil + errors[i] = ld.Config.Context.Err() + continue + } + wg.Add(1) + go func(i int, filename string) { + parsed[i], errors[i] = ld.parseFile(filename) + wg.Done() + }(i, file) + } + wg.Wait() + + // Eliminate nils, preserving order. + var o int + for _, f := range parsed { + if f != nil { + parsed[o] = f + o++ + } + } + parsed = parsed[:o] + + o = 0 + for _, err := range errors { + if err != nil { + errors[o] = err + o++ + } + } + errors = errors[:o] + + return parsed, errors +} + +// sameFile returns true if x and y have the same basename and denote +// the same file. +// +func sameFile(x, y string) bool { + if x == y { + // It could be the case that y doesn't exist. + // For instance, it may be an overlay file that + // hasn't been written to disk. To handle that case + // let x == y through. (We added the exact absolute path + // string to the CompiledGoFiles list, so the unwritten + // overlay case implies x==y.) + return true + } + if strings.EqualFold(filepath.Base(x), filepath.Base(y)) { // (optimisation) + if xi, err := os.Stat(x); err == nil { + if yi, err := os.Stat(y); err == nil { + return os.SameFile(xi, yi) + } + } + } + return false +} + +// loadFromExportData returns type information for the specified +// package, loading it from an export data file on the first request. +func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) { + if lpkg.PkgPath == "" { + log.Fatalf("internal error: Package %s has no PkgPath", lpkg) + } + + // Because gcexportdata.Read has the potential to create or + // modify the types.Package for each node in the transitive + // closure of dependencies of lpkg, all exportdata operations + // must be sequential. (Finer-grained locking would require + // changes to the gcexportdata API.) + // + // The exportMu lock guards the Package.Pkg field and the + // types.Package it points to, for each Package in the graph. + // + // Not all accesses to Package.Pkg need to be protected by exportMu: + // graph ordering ensures that direct dependencies of source + // packages are fully loaded before the importer reads their Pkg field. + ld.exportMu.Lock() + defer ld.exportMu.Unlock() + + if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() { + return tpkg, nil // cache hit + } + + lpkg.IllTyped = true // fail safe + + if lpkg.ExportFile == "" { + // Errors while building export data will have been printed to stderr. + return nil, fmt.Errorf("no export data file") + } + f, err := os.Open(lpkg.ExportFile) + if err != nil { + return nil, err + } + defer f.Close() + + // Read gc export data. + // + // We don't currently support gccgo export data because all + // underlying workspaces use the gc toolchain. (Even build + // systems that support gccgo don't use it for workspace + // queries.) + r, err := gcexportdata.NewReader(f) + if err != nil { + return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err) + } + + // Build the view. + // + // The gcexportdata machinery has no concept of package ID. + // It identifies packages by their PkgPath, which although not + // globally unique is unique within the scope of one invocation + // of the linker, type-checker, or gcexportdata. + // + // So, we must build a PkgPath-keyed view of the global + // (conceptually ID-keyed) cache of packages and pass it to + // gcexportdata. The view must contain every existing + // package that might possibly be mentioned by the + // current package---its transitive closure. + // + // In loadPackage, we unconditionally create a types.Package for + // each dependency so that export data loading does not + // create new ones. + // + // TODO(adonovan): it would be simpler and more efficient + // if the export data machinery invoked a callback to + // get-or-create a package instead of a map. + // + view := make(map[string]*types.Package) // view seen by gcexportdata + seen := make(map[*loaderPackage]bool) // all visited packages + var visit func(pkgs map[string]*Package) + visit = func(pkgs map[string]*Package) { + for _, p := range pkgs { + lpkg := ld.pkgs[p.ID] + if !seen[lpkg] { + seen[lpkg] = true + view[lpkg.PkgPath] = lpkg.Types + visit(lpkg.Imports) + } + } + } + visit(lpkg.Imports) + + viewLen := len(view) + 1 // adding the self package + // Parse the export data. + // (May modify incomplete packages in view but not create new ones.) + tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath) + if err != nil { + return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err) + } + if viewLen != len(view) { + log.Fatalf("Unexpected package creation during export data loading") + } + + lpkg.Types = tpkg + lpkg.IllTyped = false + + return tpkg, nil +} + +// impliedLoadMode returns loadMode with its dependencies. +func impliedLoadMode(loadMode LoadMode) LoadMode { + if loadMode&NeedTypesInfo != 0 && loadMode&NeedImports == 0 { + // If NeedTypesInfo, go/packages needs to do typechecking itself so it can + // associate type info with the AST. To do so, we need the export data + // for dependencies, which means we need to ask for the direct dependencies. + // NeedImports is used to ask for the direct dependencies. + loadMode |= NeedImports + } + + if loadMode&NeedDeps != 0 && loadMode&NeedImports == 0 { + // With NeedDeps we need to load at least direct dependencies. + // NeedImports is used to ask for the direct dependencies. + loadMode |= NeedImports + } + + return loadMode +} + +func usesExportData(cfg *Config) bool { + return cfg.Mode&NeedExportsFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0 +} diff --git a/vendor/golang.org/x/tools/go/packages/visit.go b/vendor/golang.org/x/tools/go/packages/visit.go new file mode 100644 index 000000000..a1dcc40b7 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/visit.go @@ -0,0 +1,59 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "fmt" + "os" + "sort" +) + +// Visit visits all the packages in the import graph whose roots are +// pkgs, calling the optional pre function the first time each package +// is encountered (preorder), and the optional post function after a +// package's dependencies have been visited (postorder). +// The boolean result of pre(pkg) determines whether +// the imports of package pkg are visited. +func Visit(pkgs []*Package, pre func(*Package) bool, post func(*Package)) { + seen := make(map[*Package]bool) + var visit func(*Package) + visit = func(pkg *Package) { + if !seen[pkg] { + seen[pkg] = true + + if pre == nil || pre(pkg) { + paths := make([]string, 0, len(pkg.Imports)) + for path := range pkg.Imports { + paths = append(paths, path) + } + sort.Strings(paths) // Imports is a map, this makes visit stable + for _, path := range paths { + visit(pkg.Imports[path]) + } + } + + if post != nil { + post(pkg) + } + } + } + for _, pkg := range pkgs { + visit(pkg) + } +} + +// PrintErrors prints to os.Stderr the accumulated errors of all +// packages in the import graph rooted at pkgs, dependencies first. +// PrintErrors returns the number of errors printed. +func PrintErrors(pkgs []*Package) int { + var n int + Visit(pkgs, nil, func(pkg *Package) { + for _, err := range pkg.Errors { + fmt.Fprintln(os.Stderr, err) + n++ + } + }) + return n +} diff --git a/vendor/golang.org/x/tools/go/ssa/blockopt.go b/vendor/golang.org/x/tools/go/ssa/blockopt.go new file mode 100644 index 000000000..e79260a21 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/blockopt.go @@ -0,0 +1,187 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// Simple block optimizations to simplify the control flow graph. + +// TODO(adonovan): opt: instead of creating several "unreachable" blocks +// per function in the Builder, reuse a single one (e.g. at Blocks[1]) +// to reduce garbage. + +import ( + "fmt" + "os" +) + +// If true, perform sanity checking and show progress at each +// successive iteration of optimizeBlocks. Very verbose. +const debugBlockOpt = false + +// markReachable sets Index=-1 for all blocks reachable from b. +func markReachable(b *BasicBlock) { + b.Index = -1 + for _, succ := range b.Succs { + if succ.Index == 0 { + markReachable(succ) + } + } +} + +// deleteUnreachableBlocks marks all reachable blocks of f and +// eliminates (nils) all others, including possibly cyclic subgraphs. +// +func deleteUnreachableBlocks(f *Function) { + const white, black = 0, -1 + // We borrow b.Index temporarily as the mark bit. + for _, b := range f.Blocks { + b.Index = white + } + markReachable(f.Blocks[0]) + if f.Recover != nil { + markReachable(f.Recover) + } + for i, b := range f.Blocks { + if b.Index == white { + for _, c := range b.Succs { + if c.Index == black { + c.removePred(b) // delete white->black edge + } + } + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "unreachable", b) + } + f.Blocks[i] = nil // delete b + } + } + f.removeNilBlocks() +} + +// jumpThreading attempts to apply simple jump-threading to block b, +// in which a->b->c become a->c if b is just a Jump. +// The result is true if the optimization was applied. +// +func jumpThreading(f *Function, b *BasicBlock) bool { + if b.Index == 0 { + return false // don't apply to entry block + } + if b.Instrs == nil { + return false + } + if _, ok := b.Instrs[0].(*Jump); !ok { + return false // not just a jump + } + c := b.Succs[0] + if c == b { + return false // don't apply to degenerate jump-to-self. + } + if c.hasPhi() { + return false // not sound without more effort + } + for j, a := range b.Preds { + a.replaceSucc(b, c) + + // If a now has two edges to c, replace its degenerate If by Jump. + if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c { + jump := new(Jump) + jump.setBlock(a) + a.Instrs[len(a.Instrs)-1] = jump + a.Succs = a.Succs[:1] + c.removePred(b) + } else { + if j == 0 { + c.replacePred(b, a) + } else { + c.Preds = append(c.Preds, a) + } + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c) + } + } + f.Blocks[b.Index] = nil // delete b + return true +} + +// fuseBlocks attempts to apply the block fusion optimization to block +// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1. +// The result is true if the optimization was applied. +// +func fuseBlocks(f *Function, a *BasicBlock) bool { + if len(a.Succs) != 1 { + return false + } + b := a.Succs[0] + if len(b.Preds) != 1 { + return false + } + + // Degenerate &&/|| ops may result in a straight-line CFG + // containing φ-nodes. (Ideally we'd replace such them with + // their sole operand but that requires Referrers, built later.) + if b.hasPhi() { + return false // not sound without further effort + } + + // Eliminate jump at end of A, then copy all of B across. + a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...) + for _, instr := range b.Instrs { + instr.setBlock(a) + } + + // A inherits B's successors + a.Succs = append(a.succs2[:0], b.Succs...) + + // Fix up Preds links of all successors of B. + for _, c := range b.Succs { + c.replacePred(b, a) + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "fuseBlocks", a, b) + } + + f.Blocks[b.Index] = nil // delete b + return true +} + +// optimizeBlocks() performs some simple block optimizations on a +// completed function: dead block elimination, block fusion, jump +// threading. +// +func optimizeBlocks(f *Function) { + deleteUnreachableBlocks(f) + + // Loop until no further progress. + changed := true + for changed { + changed = false + + if debugBlockOpt { + f.WriteTo(os.Stderr) + mustSanityCheck(f, nil) + } + + for _, b := range f.Blocks { + // f.Blocks will temporarily contain nils to indicate + // deleted blocks; we remove them at the end. + if b == nil { + continue + } + + // Fuse blocks. b->c becomes bc. + if fuseBlocks(f, b) { + changed = true + } + + // a->b->c becomes a->c if b contains only a Jump. + if jumpThreading(f, b) { + changed = true + continue // (b was disconnected) + } + } + } + f.removeNilBlocks() +} diff --git a/vendor/golang.org/x/tools/go/ssa/builder.go b/vendor/golang.org/x/tools/go/ssa/builder.go new file mode 100644 index 000000000..2d0fdaa4e --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/builder.go @@ -0,0 +1,2386 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the BUILD phase of SSA construction. +// +// SSA construction has two phases, CREATE and BUILD. In the CREATE phase +// (create.go), all packages are constructed and type-checked and +// definitions of all package members are created, method-sets are +// computed, and wrapper methods are synthesized. +// ssa.Packages are created in arbitrary order. +// +// In the BUILD phase (builder.go), the builder traverses the AST of +// each Go source function and generates SSA instructions for the +// function body. Initializer expressions for package-level variables +// are emitted to the package's init() function in the order specified +// by go/types.Info.InitOrder, then code for each function in the +// package is generated in lexical order. +// The BUILD phases for distinct packages are independent and are +// executed in parallel. +// +// TODO(adonovan): indeed, building functions is now embarrassingly parallel. +// Audit for concurrency then benchmark using more goroutines. +// +// The builder's and Program's indices (maps) are populated and +// mutated during the CREATE phase, but during the BUILD phase they +// remain constant. The sole exception is Prog.methodSets and its +// related maps, which are protected by a dedicated mutex. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "os" + "sync" +) + +type opaqueType struct { + types.Type + name string +} + +func (t *opaqueType) String() string { return t.name } + +var ( + varOk = newVar("ok", tBool) + varIndex = newVar("index", tInt) + + // Type constants. + tBool = types.Typ[types.Bool] + tByte = types.Typ[types.Byte] + tInt = types.Typ[types.Int] + tInvalid = types.Typ[types.Invalid] + tString = types.Typ[types.String] + tUntypedNil = types.Typ[types.UntypedNil] + tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators + tEface = types.NewInterface(nil, nil).Complete() + + // SSA Value constants. + vZero = intConst(0) + vOne = intConst(1) + vTrue = NewConst(constant.MakeBool(true), tBool) +) + +// builder holds state associated with the package currently being built. +// Its methods contain all the logic for AST-to-SSA conversion. +type builder struct{} + +// cond emits to fn code to evaluate boolean condition e and jump +// to t or f depending on its value, performing various simplifications. +// +// Postcondition: fn.currentBlock is nil. +// +func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) { + switch e := e.(type) { + case *ast.ParenExpr: + b.cond(fn, e.X, t, f) + return + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND: + ltrue := fn.newBasicBlock("cond.true") + b.cond(fn, e.X, ltrue, f) + fn.currentBlock = ltrue + b.cond(fn, e.Y, t, f) + return + + case token.LOR: + lfalse := fn.newBasicBlock("cond.false") + b.cond(fn, e.X, t, lfalse) + fn.currentBlock = lfalse + b.cond(fn, e.Y, t, f) + return + } + + case *ast.UnaryExpr: + if e.Op == token.NOT { + b.cond(fn, e.X, f, t) + return + } + } + + // A traditional compiler would simplify "if false" (etc) here + // but we do not, for better fidelity to the source code. + // + // The value of a constant condition may be platform-specific, + // and may cause blocks that are reachable in some configuration + // to be hidden from subsequent analyses such as bug-finding tools. + emitIf(fn, b.expr(fn, e), t, f) +} + +// logicalBinop emits code to fn to evaluate e, a &&- or +// ||-expression whose reified boolean value is wanted. +// The value is returned. +// +func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { + rhs := fn.newBasicBlock("binop.rhs") + done := fn.newBasicBlock("binop.done") + + // T(e) = T(e.X) = T(e.Y) after untyped constants have been + // eliminated. + // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. + t := fn.Pkg.typeOf(e) + + var short Value // value of the short-circuit path + switch e.Op { + case token.LAND: + b.cond(fn, e.X, rhs, done) + short = NewConst(constant.MakeBool(false), t) + + case token.LOR: + b.cond(fn, e.X, done, rhs) + short = NewConst(constant.MakeBool(true), t) + } + + // Is rhs unreachable? + if rhs.Preds == nil { + // Simplify false&&y to false, true||y to true. + fn.currentBlock = done + return short + } + + // Is done unreachable? + if done.Preds == nil { + // Simplify true&&y (or false||y) to y. + fn.currentBlock = rhs + return b.expr(fn, e.Y) + } + + // All edges from e.X to done carry the short-circuit value. + var edges []Value + for range done.Preds { + edges = append(edges, short) + } + + // The edge from e.Y to done carries the value of e.Y. + fn.currentBlock = rhs + edges = append(edges, b.expr(fn, e.Y)) + emitJump(fn, done) + fn.currentBlock = done + + phi := &Phi{Edges: edges, Comment: e.Op.String()} + phi.pos = e.OpPos + phi.typ = t + return done.emit(phi) +} + +// exprN lowers a multi-result expression e to SSA form, emitting code +// to fn and returning a single Value whose type is a *types.Tuple. +// The caller must access the components via Extract. +// +// Multi-result expressions include CallExprs in a multi-value +// assignment or return statement, and "value,ok" uses of +// TypeAssertExpr, IndexExpr (when X is a map), and UnaryExpr (when Op +// is token.ARROW). +// +func (b *builder) exprN(fn *Function, e ast.Expr) Value { + typ := fn.Pkg.typeOf(e).(*types.Tuple) + switch e := e.(type) { + case *ast.ParenExpr: + return b.exprN(fn, e.X) + + case *ast.CallExpr: + // Currently, no built-in function nor type conversion + // has multiple results, so we can avoid some of the + // cases for single-valued CallExpr. + var c Call + b.setCall(fn, e, &c.Call) + c.typ = typ + return fn.emit(&c) + + case *ast.IndexExpr: + mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) + lookup := &Lookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), + CommaOk: true, + } + lookup.setType(typ) + lookup.setPos(e.Lbrack) + return fn.emit(lookup) + + case *ast.TypeAssertExpr: + return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e.Lparen) + + case *ast.UnaryExpr: // must be receive <- + unop := &UnOp{ + Op: token.ARROW, + X: b.expr(fn, e.X), + CommaOk: true, + } + unop.setType(typ) + unop.setPos(e.OpPos) + return fn.emit(unop) + } + panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) +} + +// builtin emits to fn SSA instructions to implement a call to the +// built-in function obj with the specified arguments +// and return type. It returns the value defined by the result. +// +// The result is nil if no special handling was required; in this case +// the caller should treat this like an ordinary library function +// call. +// +func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, pos token.Pos) Value { + switch obj.Name() { + case "make": + switch typ.Underlying().(type) { + case *types.Slice: + n := b.expr(fn, args[1]) + m := n + if len(args) == 3 { + m = b.expr(fn, args[2]) + } + if m, ok := m.(*Const); ok { + // treat make([]T, n, m) as new([m]T)[:n] + cap := m.Int64() + at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) + alloc := emitNew(fn, at, pos) + alloc.Comment = "makeslice" + v := &Slice{ + X: alloc, + High: n, + } + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + } + v := &MakeSlice{ + Len: n, + Cap: m, + } + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + + case *types.Map: + var res Value + if len(args) == 2 { + res = b.expr(fn, args[1]) + } + v := &MakeMap{Reserve: res} + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + + case *types.Chan: + var sz Value = vZero + if len(args) == 2 { + sz = b.expr(fn, args[1]) + } + v := &MakeChan{Size: sz} + v.setPos(pos) + v.setType(typ) + return fn.emit(v) + } + + case "new": + alloc := emitNew(fn, deref(typ), pos) + alloc.Comment = "new" + return alloc + + case "len", "cap": + // Special case: len or cap of an array or *array is + // based on the type, not the value which may be nil. + // We must still evaluate the value, though. (If it + // was side-effect free, the whole call would have + // been constant-folded.) + t := deref(fn.Pkg.typeOf(args[0])).Underlying() + if at, ok := t.(*types.Array); ok { + b.expr(fn, args[0]) // for effects only + return intConst(at.Len()) + } + // Otherwise treat as normal. + + case "panic": + fn.emit(&Panic{ + X: emitConv(fn, b.expr(fn, args[0]), tEface), + pos: pos, + }) + fn.currentBlock = fn.newBasicBlock("unreachable") + return vTrue // any non-nil Value will do + } + return nil // treat all others as a regular function call +} + +// addr lowers a single-result addressable expression e to SSA form, +// emitting code to fn and returning the location (an lvalue) defined +// by the expression. +// +// If escaping is true, addr marks the base variable of the +// addressable expression e as being a potentially escaping pointer +// value. For example, in this code: +// +// a := A{ +// b: [1]B{B{c: 1}} +// } +// return &a.b[0].c +// +// the application of & causes a.b[0].c to have its address taken, +// which means that ultimately the local variable a must be +// heap-allocated. This is a simple but very conservative escape +// analysis. +// +// Operations forming potentially escaping pointers include: +// - &x, including when implicit in method call or composite literals. +// - a[:] iff a is an array (not *array) +// - references to variables in lexically enclosing functions. +// +func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { + switch e := e.(type) { + case *ast.Ident: + if isBlankIdent(e) { + return blank{} + } + obj := fn.Pkg.objectOf(e) + v := fn.Prog.packageLevelValue(obj) // var (address) + if v == nil { + v = fn.lookup(obj, escaping) + } + return &address{addr: v, pos: e.Pos(), expr: e} + + case *ast.CompositeLit: + t := deref(fn.Pkg.typeOf(e)) + var v *Alloc + if escaping { + v = emitNew(fn, t, e.Lbrace) + } else { + v = fn.addLocal(t, e.Lbrace) + } + v.Comment = "complit" + var sb storebuf + b.compLit(fn, v, e, true, &sb) + sb.emit(fn) + return &address{addr: v, pos: e.Lbrace, expr: e} + + case *ast.ParenExpr: + return b.addr(fn, e.X, escaping) + + case *ast.SelectorExpr: + sel, ok := fn.Pkg.info.Selections[e] + if !ok { + // qualified identifier + return b.addr(fn, e.Sel, escaping) + } + if sel.Kind() != types.FieldVal { + panic(sel) + } + wantAddr := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel) + last := len(sel.Index()) - 1 + return &address{ + addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel), + pos: e.Sel.Pos(), + expr: e.Sel, + } + + case *ast.IndexExpr: + var x Value + var et types.Type + switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + x = b.addr(fn, e.X, escaping).address(fn) + et = types.NewPointer(t.Elem()) + case *types.Pointer: // *array + x = b.expr(fn, e.X) + et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) + case *types.Slice: + x = b.expr(fn, e.X) + et = types.NewPointer(t.Elem()) + case *types.Map: + return &element{ + m: b.expr(fn, e.X), + k: emitConv(fn, b.expr(fn, e.Index), t.Key()), + t: t.Elem(), + pos: e.Lbrack, + } + default: + panic("unexpected container type in IndexExpr: " + t.String()) + } + v := &IndexAddr{ + X: x, + Index: emitConv(fn, b.expr(fn, e.Index), tInt), + } + v.setPos(e.Lbrack) + v.setType(et) + return &address{addr: fn.emit(v), pos: e.Lbrack, expr: e} + + case *ast.StarExpr: + return &address{addr: b.expr(fn, e.X), pos: e.Star, expr: e} + } + + panic(fmt.Sprintf("unexpected address expression: %T", e)) +} + +type store struct { + lhs lvalue + rhs Value +} + +type storebuf struct{ stores []store } + +func (sb *storebuf) store(lhs lvalue, rhs Value) { + sb.stores = append(sb.stores, store{lhs, rhs}) +} + +func (sb *storebuf) emit(fn *Function) { + for _, s := range sb.stores { + s.lhs.store(fn, s.rhs) + } +} + +// assign emits to fn code to initialize the lvalue loc with the value +// of expression e. If isZero is true, assign assumes that loc holds +// the zero value for its type. +// +// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate +// better code in some cases, e.g., for composite literals in an +// addressable location. +// +// If sb is not nil, assign generates code to evaluate expression e, but +// not to update loc. Instead, the necessary stores are appended to the +// storebuf sb so that they can be executed later. This allows correct +// in-place update of existing variables when the RHS is a composite +// literal that may reference parts of the LHS. +// +func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf) { + // Can we initialize it in place? + if e, ok := unparen(e).(*ast.CompositeLit); ok { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + if _, ok := loc.(blank); !ok { // avoid calling blank.typ() + if isPointer(loc.typ()) { + ptr := b.addr(fn, e, true).address(fn) + // copy address + if sb != nil { + sb.store(loc, ptr) + } else { + loc.store(fn, ptr) + } + return + } + } + + if _, ok := loc.(*address); ok { + if isInterface(loc.typ()) { + // e.g. var x interface{} = T{...} + // Can't in-place initialize an interface value. + // Fall back to copying. + } else { + // x = T{...} or x := T{...} + addr := loc.address(fn) + if sb != nil { + b.compLit(fn, addr, e, isZero, sb) + } else { + var sb storebuf + b.compLit(fn, addr, e, isZero, &sb) + sb.emit(fn) + } + + // Subtle: emit debug ref for aggregate types only; + // slice and map are handled by store ops in compLit. + switch loc.typ().Underlying().(type) { + case *types.Struct, *types.Array: + emitDebugRef(fn, e, addr, true) + } + + return + } + } + } + + // simple case: just copy + rhs := b.expr(fn, e) + if sb != nil { + sb.store(loc, rhs) + } else { + loc.store(fn, rhs) + } +} + +// expr lowers a single-result expression e to SSA form, emitting code +// to fn and returning the Value defined by the expression. +// +func (b *builder) expr(fn *Function, e ast.Expr) Value { + e = unparen(e) + + tv := fn.Pkg.info.Types[e] + + // Is expression a constant? + if tv.Value != nil { + return NewConst(tv.Value, tv.Type) + } + + var v Value + if tv.Addressable() { + // Prefer pointer arithmetic ({Index,Field}Addr) followed + // by Load over subelement extraction (e.g. Index, Field), + // to avoid large copies. + v = b.addr(fn, e, false).load(fn) + } else { + v = b.expr0(fn, e, tv) + } + if fn.debugInfo() { + emitDebugRef(fn, e, v, false) + } + return v +} + +func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { + switch e := e.(type) { + case *ast.BasicLit: + panic("non-constant BasicLit") // unreachable + + case *ast.FuncLit: + fn2 := &Function{ + name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), + Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), + pos: e.Type.Func, + parent: fn, + Pkg: fn.Pkg, + Prog: fn.Prog, + syntax: e, + } + fn.AnonFuncs = append(fn.AnonFuncs, fn2) + b.buildFunction(fn2) + if fn2.FreeVars == nil { + return fn2 + } + v := &MakeClosure{Fn: fn2} + v.setType(tv.Type) + for _, fv := range fn2.FreeVars { + v.Bindings = append(v.Bindings, fv.outer) + fv.outer = nil + } + return fn.emit(v) + + case *ast.TypeAssertExpr: // single-result form only + return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e.Lparen) + + case *ast.CallExpr: + if fn.Pkg.info.Types[e.Fun].IsType() { + // Explicit type conversion, e.g. string(x) or big.Int(x) + x := b.expr(fn, e.Args[0]) + y := emitConv(fn, x, tv.Type) + if y != x { + switch y := y.(type) { + case *Convert: + y.pos = e.Lparen + case *ChangeType: + y.pos = e.Lparen + case *MakeInterface: + y.pos = e.Lparen + } + } + return y + } + // Call to "intrinsic" built-ins, e.g. new, make, panic. + if id, ok := unparen(e.Fun).(*ast.Ident); ok { + if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { + if v := b.builtin(fn, obj, e.Args, tv.Type, e.Lparen); v != nil { + return v + } + } + } + // Regular function call. + var v Call + b.setCall(fn, e, &v.Call) + v.setType(tv.Type) + return fn.emit(&v) + + case *ast.UnaryExpr: + switch e.Op { + case token.AND: // &X --- potentially escaping. + addr := b.addr(fn, e.X, true) + if _, ok := unparen(e.X).(*ast.StarExpr); ok { + // &*p must panic if p is nil (http://golang.org/s/go12nil). + // For simplicity, we'll just (suboptimally) rely + // on the side effects of a load. + // TODO(adonovan): emit dedicated nilcheck. + addr.load(fn) + } + return addr.address(fn) + case token.ADD: + return b.expr(fn, e.X) + case token.NOT, token.ARROW, token.SUB, token.XOR: // ! <- - ^ + v := &UnOp{ + Op: e.Op, + X: b.expr(fn, e.X), + } + v.setPos(e.OpPos) + v.setType(tv.Type) + return fn.emit(v) + default: + panic(e.Op) + } + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND, token.LOR: + return b.logicalBinop(fn, e) + case token.SHL, token.SHR: + fallthrough + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e.OpPos) + + case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: + cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e.OpPos) + // The type of x==y may be UntypedBool. + return emitConv(fn, cmp, types.Default(tv.Type)) + default: + panic("illegal op in BinaryExpr: " + e.Op.String()) + } + + case *ast.SliceExpr: + var low, high, max Value + var x Value + switch fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + // Potentially escaping. + x = b.addr(fn, e.X, true).address(fn) + case *types.Basic, *types.Slice, *types.Pointer: // *array + x = b.expr(fn, e.X) + default: + panic("unreachable") + } + if e.High != nil { + high = b.expr(fn, e.High) + } + if e.Low != nil { + low = b.expr(fn, e.Low) + } + if e.Slice3 { + max = b.expr(fn, e.Max) + } + v := &Slice{ + X: x, + Low: low, + High: high, + Max: max, + } + v.setPos(e.Lbrack) + v.setType(tv.Type) + return fn.emit(v) + + case *ast.Ident: + obj := fn.Pkg.info.Uses[e] + // Universal built-in or nil? + switch obj := obj.(type) { + case *types.Builtin: + return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} + case *types.Nil: + return nilConst(tv.Type) + } + // Package-level func or var? + if v := fn.Prog.packageLevelValue(obj); v != nil { + if _, ok := obj.(*types.Var); ok { + return emitLoad(fn, v) // var (address) + } + return v // (func) + } + // Local var. + return emitLoad(fn, fn.lookup(obj, false)) // var (address) + + case *ast.SelectorExpr: + sel, ok := fn.Pkg.info.Selections[e] + if !ok { + // builtin unsafe.{Add,Slice} + if obj, ok := fn.Pkg.info.Uses[e.Sel].(*types.Builtin); ok { + return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} + } + // qualified identifier + return b.expr(fn, e.Sel) + } + switch sel.Kind() { + case types.MethodExpr: + // (*T).f or T.f, the method f from the method-set of type T. + // The result is a "thunk". + return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type) + + case types.MethodVal: + // e.f where e is an expression and f is a method. + // The result is a "bound". + obj := sel.Obj().(*types.Func) + rt := recvType(obj) + wantAddr := isPointer(rt) + escaping := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel) + if isInterface(rt) { + // If v has interface type I, + // we must emit a check that v is non-nil. + // We use: typeassert v.(I). + emitTypeAssert(fn, v, rt, token.NoPos) + } + c := &MakeClosure{ + Fn: makeBound(fn.Prog, obj), + Bindings: []Value{v}, + } + c.setPos(e.Sel.Pos()) + c.setType(tv.Type) + return fn.emit(c) + + case types.FieldVal: + indices := sel.Index() + last := len(indices) - 1 + v := b.expr(fn, e.X) + v = emitImplicitSelections(fn, v, indices[:last]) + v = emitFieldSelection(fn, v, indices[last], false, e.Sel) + return v + } + + panic("unexpected expression-relative selector") + + case *ast.IndexExpr: + switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + // Non-addressable array (in a register). + v := &Index{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), tInt), + } + v.setPos(e.Lbrack) + v.setType(t.Elem()) + return fn.emit(v) + + case *types.Map: + // Maps are not addressable. + mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) + v := &Lookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key()), + } + v.setPos(e.Lbrack) + v.setType(mapt.Elem()) + return fn.emit(v) + + case *types.Basic: // => string + // Strings are not addressable. + v := &Lookup{ + X: b.expr(fn, e.X), + Index: b.expr(fn, e.Index), + } + v.setPos(e.Lbrack) + v.setType(tByte) + return fn.emit(v) + + case *types.Slice, *types.Pointer: // *array + // Addressable slice/array; use IndexAddr and Load. + return b.addr(fn, e, false).load(fn) + + default: + panic("unexpected container type in IndexExpr: " + t.String()) + } + + case *ast.CompositeLit, *ast.StarExpr: + // Addressable types (lvalues) + return b.addr(fn, e, false).load(fn) + } + + panic(fmt.Sprintf("unexpected expr: %T", e)) +} + +// stmtList emits to fn code for all statements in list. +func (b *builder) stmtList(fn *Function, list []ast.Stmt) { + for _, s := range list { + b.stmt(fn, s) + } +} + +// receiver emits to fn code for expression e in the "receiver" +// position of selection e.f (where f may be a field or a method) and +// returns the effective receiver after applying the implicit field +// selections of sel. +// +// wantAddr requests that the result is an an address. If +// !sel.Indirect(), this may require that e be built in addr() mode; it +// must thus be addressable. +// +// escaping is defined as per builder.addr(). +// +func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection) Value { + var v Value + if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { + v = b.addr(fn, e, escaping).address(fn) + } else { + v = b.expr(fn, e) + } + + last := len(sel.Index()) - 1 + v = emitImplicitSelections(fn, v, sel.Index()[:last]) + if !wantAddr && isPointer(v.Type()) { + v = emitLoad(fn, v) + } + return v +} + +// setCallFunc populates the function parts of a CallCommon structure +// (Func, Method, Recv, Args[0]) based on the kind of invocation +// occurring in e. +// +func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { + c.pos = e.Lparen + + // Is this a method call? + if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { + sel, ok := fn.Pkg.info.Selections[selector] + if ok && sel.Kind() == types.MethodVal { + obj := sel.Obj().(*types.Func) + recv := recvType(obj) + wantAddr := isPointer(recv) + escaping := true + v := b.receiver(fn, selector.X, wantAddr, escaping, sel) + if isInterface(recv) { + // Invoke-mode call. + c.Value = v + c.Method = obj + } else { + // "Call"-mode call. + c.Value = fn.Prog.declaredFunc(obj) + c.Args = append(c.Args, v) + } + return + } + + // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): + // a statically dispatched call to the method f in the + // method-set of T or *T. T may be an interface. + // + // e.Fun would evaluate to a concrete method, interface + // wrapper function, or promotion wrapper. + // + // For now, we evaluate it in the usual way. + // + // TODO(adonovan): opt: inline expr() here, to make the + // call static and to avoid generation of wrappers. + // It's somewhat tricky as it may consume the first + // actual parameter if the call is "invoke" mode. + // + // Examples: + // type T struct{}; func (T) f() {} // "call" mode + // type T interface { f() } // "invoke" mode + // + // type S struct{ T } + // + // var s S + // S.f(s) + // (*S).f(&s) + // + // Suggested approach: + // - consume the first actual parameter expression + // and build it with b.expr(). + // - apply implicit field selections. + // - use MethodVal logic to populate fields of c. + } + + // Evaluate the function operand in the usual way. + c.Value = b.expr(fn, e.Fun) +} + +// emitCallArgs emits to f code for the actual parameters of call e to +// a (possibly built-in) function of effective type sig. +// The argument values are appended to args, which is then returned. +// +func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { + // f(x, y, z...): pass slice z straight through. + if e.Ellipsis != 0 { + for i, arg := range e.Args { + v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type()) + args = append(args, v) + } + return args + } + + offset := len(args) // 1 if call has receiver, 0 otherwise + + // Evaluate actual parameter expressions. + // + // If this is a chained call of the form f(g()) where g has + // multiple return values (MRV), they are flattened out into + // args; a suffix of them may end up in a varargs slice. + for _, arg := range e.Args { + v := b.expr(fn, arg) + if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain + for i, n := 0, ttuple.Len(); i < n; i++ { + args = append(args, emitExtract(fn, v, i)) + } + } else { + args = append(args, v) + } + } + + // Actual->formal assignability conversions for normal parameters. + np := sig.Params().Len() // number of normal parameters + if sig.Variadic() { + np-- + } + for i := 0; i < np; i++ { + args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type()) + } + + // Actual->formal assignability conversions for variadic parameter, + // and construction of slice. + if sig.Variadic() { + varargs := args[offset+np:] + st := sig.Params().At(np).Type().(*types.Slice) + vt := st.Elem() + if len(varargs) == 0 { + args = append(args, nilConst(st)) + } else { + // Replace a suffix of args with a slice containing it. + at := types.NewArray(vt, int64(len(varargs))) + a := emitNew(fn, at, token.NoPos) + a.setPos(e.Rparen) + a.Comment = "varargs" + for i, arg := range varargs { + iaddr := &IndexAddr{ + X: a, + Index: intConst(int64(i)), + } + iaddr.setType(types.NewPointer(vt)) + fn.emit(iaddr) + emitStore(fn, iaddr, arg, arg.Pos()) + } + s := &Slice{X: a} + s.setType(st) + args[offset+np] = fn.emit(s) + args = args[:offset+np+1] + } + } + return args +} + +// setCall emits to fn code to evaluate all the parameters of a function +// call e, and populates *c with those values. +// +func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { + // First deal with the f(...) part and optional receiver. + b.setCallFunc(fn, e, c) + + // Then append the other actual parameters. + sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) + if sig == nil { + panic(fmt.Sprintf("no signature for call of %s", e.Fun)) + } + c.Args = b.emitCallArgs(fn, sig, e, c.Args) +} + +// assignOp emits to fn code to perform loc = val. +func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, pos token.Pos) { + oldv := loc.load(fn) + loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type()), loc.typ(), pos)) +} + +// localValueSpec emits to fn code to define all of the vars in the +// function-local ValueSpec, spec. +// +func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { + switch { + case len(spec.Values) == len(spec.Names): + // e.g. var x, y = 0, 1 + // 1:1 assignment + for i, id := range spec.Names { + if !isBlankIdent(id) { + fn.addLocalForIdent(id) + } + lval := b.addr(fn, id, false) // non-escaping + b.assign(fn, lval, spec.Values[i], true, nil) + } + + case len(spec.Values) == 0: + // e.g. var x, y int + // Locals are implicitly zero-initialized. + for _, id := range spec.Names { + if !isBlankIdent(id) { + lhs := fn.addLocalForIdent(id) + if fn.debugInfo() { + emitDebugRef(fn, id, lhs, true) + } + } + } + + default: + // e.g. var x, y = pos() + tuple := b.exprN(fn, spec.Values[0]) + for i, id := range spec.Names { + if !isBlankIdent(id) { + fn.addLocalForIdent(id) + lhs := b.addr(fn, id, false) // non-escaping + lhs.store(fn, emitExtract(fn, tuple, i)) + } + } + } +} + +// assignStmt emits code to fn for a parallel assignment of rhss to lhss. +// isDef is true if this is a short variable declaration (:=). +// +// Note the similarity with localValueSpec. +// +func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool) { + // Side effects of all LHSs and RHSs must occur in left-to-right order. + lvals := make([]lvalue, len(lhss)) + isZero := make([]bool, len(lhss)) + for i, lhs := range lhss { + var lval lvalue = blank{} + if !isBlankIdent(lhs) { + if isDef { + if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { + fn.addNamedLocal(obj) + isZero[i] = true + } + } + lval = b.addr(fn, lhs, false) // non-escaping + } + lvals[i] = lval + } + if len(lhss) == len(rhss) { + // Simple assignment: x = f() (!isDef) + // Parallel assignment: x, y = f(), g() (!isDef) + // or short var decl: x, y := f(), g() (isDef) + // + // In all cases, the RHSs may refer to the LHSs, + // so we need a storebuf. + var sb storebuf + for i := range rhss { + b.assign(fn, lvals[i], rhss[i], isZero[i], &sb) + } + sb.emit(fn) + } else { + // e.g. x, y = pos() + tuple := b.exprN(fn, rhss[0]) + emitDebugRef(fn, rhss[0], tuple, false) + for i, lval := range lvals { + lval.store(fn, emitExtract(fn, tuple, i)) + } + } +} + +// arrayLen returns the length of the array whose composite literal elements are elts. +func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { + var max int64 = -1 + var i int64 = -1 + for _, e := range elts { + if kv, ok := e.(*ast.KeyValueExpr); ok { + i = b.expr(fn, kv.Key).(*Const).Int64() + } else { + i++ + } + if i > max { + max = i + } + } + return max + 1 +} + +// compLit emits to fn code to initialize a composite literal e at +// address addr with type typ. +// +// Nested composite literals are recursively initialized in place +// where possible. If isZero is true, compLit assumes that addr +// holds the zero value for typ. +// +// Because the elements of a composite literal may refer to the +// variables being updated, as in the second line below, +// x := T{a: 1} +// x = T{a: x.a} +// all the reads must occur before all the writes. Thus all stores to +// loc are emitted to the storebuf sb for later execution. +// +// A CompositeLit may have pointer type only in the recursive (nested) +// case when the type name is implicit. e.g. in []*T{{}}, the inner +// literal has type *T behaves like &T{}. +// In that case, addr must hold a T, not a *T. +// +func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { + typ := deref(fn.Pkg.typeOf(e)) + switch t := typ.Underlying().(type) { + case *types.Struct: + if !isZero && len(e.Elts) != t.NumFields() { + // memclear + sb.store(&address{addr, e.Lbrace, nil}, + zeroValue(fn, deref(addr.Type()))) + isZero = true + } + for i, e := range e.Elts { + fieldIndex := i + pos := e.Pos() + if kv, ok := e.(*ast.KeyValueExpr); ok { + fname := kv.Key.(*ast.Ident).Name + for i, n := 0, t.NumFields(); i < n; i++ { + sf := t.Field(i) + if sf.Name() == fname { + fieldIndex = i + pos = kv.Colon + e = kv.Value + break + } + } + } + sf := t.Field(fieldIndex) + faddr := &FieldAddr{ + X: addr, + Field: fieldIndex, + } + faddr.setType(types.NewPointer(sf.Type())) + fn.emit(faddr) + b.assign(fn, &address{addr: faddr, pos: pos, expr: e}, e, isZero, sb) + } + + case *types.Array, *types.Slice: + var at *types.Array + var array Value + switch t := t.(type) { + case *types.Slice: + at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) + alloc := emitNew(fn, at, e.Lbrace) + alloc.Comment = "slicelit" + array = alloc + case *types.Array: + at = t + array = addr + + if !isZero && int64(len(e.Elts)) != at.Len() { + // memclear + sb.store(&address{array, e.Lbrace, nil}, + zeroValue(fn, deref(array.Type()))) + } + } + + var idx *Const + for _, e := range e.Elts { + pos := e.Pos() + if kv, ok := e.(*ast.KeyValueExpr); ok { + idx = b.expr(fn, kv.Key).(*Const) + pos = kv.Colon + e = kv.Value + } else { + var idxval int64 + if idx != nil { + idxval = idx.Int64() + 1 + } + idx = intConst(idxval) + } + iaddr := &IndexAddr{ + X: array, + Index: idx, + } + iaddr.setType(types.NewPointer(at.Elem())) + fn.emit(iaddr) + if t != at { // slice + // backing array is unaliased => storebuf not needed. + b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, nil) + } else { + b.assign(fn, &address{addr: iaddr, pos: pos, expr: e}, e, true, sb) + } + } + + if t != at { // slice + s := &Slice{X: array} + s.setPos(e.Lbrace) + s.setType(typ) + sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, fn.emit(s)) + } + + case *types.Map: + m := &MakeMap{Reserve: intConst(int64(len(e.Elts)))} + m.setPos(e.Lbrace) + m.setType(typ) + fn.emit(m) + for _, e := range e.Elts { + e := e.(*ast.KeyValueExpr) + + // If a key expression in a map literal is itself a + // composite literal, the type may be omitted. + // For example: + // map[*struct{}]bool{{}: true} + // An &-operation may be implied: + // map[*struct{}]bool{&struct{}{}: true} + var key Value + if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + key = b.addr(fn, e.Key, true).address(fn) + } else { + key = b.expr(fn, e.Key) + } + + loc := element{ + m: m, + k: emitConv(fn, key, t.Key()), + t: t.Elem(), + pos: e.Colon, + } + + // We call assign() only because it takes care + // of any &-operation required in the recursive + // case, e.g., + // map[int]*struct{}{0: {}} implies &struct{}{}. + // In-place update is of course impossible, + // and no storebuf is needed. + b.assign(fn, &loc, e.Value, true, nil) + } + sb.store(&address{addr: addr, pos: e.Lbrace, expr: e}, m) + + default: + panic("unexpected CompositeLit type: " + t.String()) + } +} + +// switchStmt emits to fn code for the switch statement s, optionally +// labelled by label. +// +func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { + // We treat SwitchStmt like a sequential if-else chain. + // Multiway dispatch can be recovered later by ssautil.Switches() + // to those cases that are free of side effects. + if s.Init != nil { + b.stmt(fn, s.Init) + } + var tag Value = vTrue + if s.Tag != nil { + tag = b.expr(fn, s.Tag) + } + done := fn.newBasicBlock("switch.done") + if label != nil { + label._break = done + } + // We pull the default case (if present) down to the end. + // But each fallthrough label must point to the next + // body block in source order, so we preallocate a + // body block (fallthru) for the next case. + // Unfortunately this makes for a confusing block order. + var dfltBody *[]ast.Stmt + var dfltFallthrough *BasicBlock + var fallthru, dfltBlock *BasicBlock + ncases := len(s.Body.List) + for i, clause := range s.Body.List { + body := fallthru + if body == nil { + body = fn.newBasicBlock("switch.body") // first case only + } + + // Preallocate body block for the next case. + fallthru = done + if i+1 < ncases { + fallthru = fn.newBasicBlock("switch.body") + } + + cc := clause.(*ast.CaseClause) + if cc.List == nil { + // Default case. + dfltBody = &cc.Body + dfltFallthrough = fallthru + dfltBlock = body + continue + } + + var nextCond *BasicBlock + for _, cond := range cc.List { + nextCond = fn.newBasicBlock("switch.next") + // TODO(adonovan): opt: when tag==vTrue, we'd + // get better code if we use b.cond(cond) + // instead of BinOp(EQL, tag, b.expr(cond)) + // followed by If. Don't forget conversions + // though. + cond := emitCompare(fn, token.EQL, tag, b.expr(fn, cond), token.NoPos) + emitIf(fn, cond, body, nextCond) + fn.currentBlock = nextCond + } + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: fallthru, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = nextCond + } + if dfltBlock != nil { + emitJump(fn, dfltBlock) + fn.currentBlock = dfltBlock + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: dfltFallthrough, + } + b.stmtList(fn, *dfltBody) + fn.targets = fn.targets.tail + } + emitJump(fn, done) + fn.currentBlock = done +} + +// typeSwitchStmt emits to fn code for the type switch statement s, optionally +// labelled by label. +// +func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { + // We treat TypeSwitchStmt like a sequential if-else chain. + // Multiway dispatch can be recovered later by ssautil.Switches(). + + // Typeswitch lowering: + // + // var x X + // switch y := x.(type) { + // case T1, T2: S1 // >1 (y := x) + // case nil: SN // nil (y := x) + // default: SD // 0 types (y := x) + // case T3: S3 // 1 type (y := x.(T3)) + // } + // + // ...s.Init... + // x := eval x + // .caseT1: + // t1, ok1 := typeswitch,ok x + // if ok1 then goto S1 else goto .caseT2 + // .caseT2: + // t2, ok2 := typeswitch,ok x + // if ok2 then goto S1 else goto .caseNil + // .S1: + // y := x + // ...S1... + // goto done + // .caseNil: + // if t2, ok2 := typeswitch,ok x + // if x == nil then goto SN else goto .caseT3 + // .SN: + // y := x + // ...SN... + // goto done + // .caseT3: + // t3, ok3 := typeswitch,ok x + // if ok3 then goto S3 else goto default + // .S3: + // y := t3 + // ...S3... + // goto done + // .default: + // y := x + // ...SD... + // goto done + // .done: + + if s.Init != nil { + b.stmt(fn, s.Init) + } + + var x Value + switch ass := s.Assign.(type) { + case *ast.ExprStmt: // x.(type) + x = b.expr(fn, unparen(ass.X).(*ast.TypeAssertExpr).X) + case *ast.AssignStmt: // y := x.(type) + x = b.expr(fn, unparen(ass.Rhs[0]).(*ast.TypeAssertExpr).X) + } + + done := fn.newBasicBlock("typeswitch.done") + if label != nil { + label._break = done + } + var default_ *ast.CaseClause + for _, clause := range s.Body.List { + cc := clause.(*ast.CaseClause) + if cc.List == nil { + default_ = cc + continue + } + body := fn.newBasicBlock("typeswitch.body") + var next *BasicBlock + var casetype types.Type + var ti Value // ti, ok := typeassert,ok x + for _, cond := range cc.List { + next = fn.newBasicBlock("typeswitch.next") + casetype = fn.Pkg.typeOf(cond) + var condv Value + if casetype == tUntypedNil { + condv = emitCompare(fn, token.EQL, x, nilConst(x.Type()), token.NoPos) + ti = x + } else { + yok := emitTypeTest(fn, x, casetype, cc.Case) + ti = emitExtract(fn, yok, 0) + condv = emitExtract(fn, yok, 1) + } + emitIf(fn, condv, body, next) + fn.currentBlock = next + } + if len(cc.List) != 1 { + ti = x + } + fn.currentBlock = body + b.typeCaseBody(fn, cc, ti, done) + fn.currentBlock = next + } + if default_ != nil { + b.typeCaseBody(fn, default_, x, done) + } else { + emitJump(fn, done) + } + fn.currentBlock = done +} + +func (b *builder) typeCaseBody(fn *Function, cc *ast.CaseClause, x Value, done *BasicBlock) { + if obj := fn.Pkg.info.Implicits[cc]; obj != nil { + // In a switch y := x.(type), each case clause + // implicitly declares a distinct object y. + // In a single-type case, y has that type. + // In multi-type cases, 'case nil' and default, + // y has the same type as the interface operand. + emitStore(fn, fn.addNamedLocal(obj), x, obj.Pos()) + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) +} + +// selectStmt emits to fn code for the select statement s, optionally +// labelled by label. +// +func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) { + // A blocking select of a single case degenerates to a + // simple send or receive. + // TODO(adonovan): opt: is this optimization worth its weight? + if len(s.Body.List) == 1 { + clause := s.Body.List[0].(*ast.CommClause) + if clause.Comm != nil { + b.stmt(fn, clause.Comm) + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = done + return + } + } + + // First evaluate all channels in all cases, and find + // the directions of each state. + var states []*SelectState + blocking := true + debugInfo := fn.debugInfo() + for _, clause := range s.Body.List { + var st *SelectState + switch comm := clause.(*ast.CommClause).Comm.(type) { + case nil: // default case + blocking = false + continue + + case *ast.SendStmt: // ch<- i + ch := b.expr(fn, comm.Chan) + st = &SelectState{ + Dir: types.SendOnly, + Chan: ch, + Send: emitConv(fn, b.expr(fn, comm.Value), + ch.Type().Underlying().(*types.Chan).Elem()), + Pos: comm.Arrow, + } + if debugInfo { + st.DebugNode = comm + } + + case *ast.AssignStmt: // x := <-ch + recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + + case *ast.ExprStmt: // <-ch + recv := unparen(comm.X).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + } + states = append(states, st) + } + + // We dispatch on the (fair) result of Select using a + // sequential if-else chain, in effect: + // + // idx, recvOk, r0...r_n-1 := select(...) + // if idx == 0 { // receive on channel 0 (first receive => r0) + // x, ok := r0, recvOk + // ...state0... + // } else if v == 1 { // send on channel 1 + // ...state1... + // } else { + // ...default... + // } + sel := &Select{ + States: states, + Blocking: blocking, + } + sel.setPos(s.Select) + var vars []*types.Var + vars = append(vars, varIndex, varOk) + for _, st := range states { + if st.Dir == types.RecvOnly { + tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() + vars = append(vars, anonVar(tElem)) + } + } + sel.setType(types.NewTuple(vars...)) + + fn.emit(sel) + idx := emitExtract(fn, sel, 0) + + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + + var defaultBody *[]ast.Stmt + state := 0 + r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV + for _, cc := range s.Body.List { + clause := cc.(*ast.CommClause) + if clause.Comm == nil { + defaultBody = &clause.Body + continue + } + body := fn.newBasicBlock("select.body") + next := fn.newBasicBlock("select.next") + emitIf(fn, emitCompare(fn, token.EQL, idx, intConst(int64(state)), token.NoPos), body, next) + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + switch comm := clause.Comm.(type) { + case *ast.ExprStmt: // <-ch + if debugInfo { + v := emitExtract(fn, sel, r) + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + r++ + + case *ast.AssignStmt: // x := <-states[state].Chan + if comm.Tok == token.DEFINE { + fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) + } + x := b.addr(fn, comm.Lhs[0], false) // non-escaping + v := emitExtract(fn, sel, r) + if debugInfo { + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + x.store(fn, v) + + if len(comm.Lhs) == 2 { // x, ok := ... + if comm.Tok == token.DEFINE { + fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) + } + ok := b.addr(fn, comm.Lhs[1], false) // non-escaping + ok.store(fn, emitExtract(fn, sel, 1)) + } + r++ + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done) + fn.currentBlock = next + state++ + } + if defaultBody != nil { + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, *defaultBody) + fn.targets = fn.targets.tail + } else { + // A blocking select must match some case. + // (This should really be a runtime.errorString, not a string.) + fn.emit(&Panic{ + X: emitConv(fn, stringConst("blocking select matched no case"), tEface), + }) + fn.currentBlock = fn.newBasicBlock("unreachable") + } + emitJump(fn, done) + fn.currentBlock = done +} + +// forStmt emits to fn code for the for statement s, optionally +// labelled by label. +// +func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { + // ...init... + // jump loop + // loop: + // if cond goto body else done + // body: + // ...body... + // jump post + // post: (target of continue) + // ...post... + // jump loop + // done: (target of break) + if s.Init != nil { + b.stmt(fn, s.Init) + } + body := fn.newBasicBlock("for.body") + done := fn.newBasicBlock("for.done") // target of 'break' + loop := body // target of back-edge + if s.Cond != nil { + loop = fn.newBasicBlock("for.loop") + } + cont := loop // target of 'continue' + if s.Post != nil { + cont = fn.newBasicBlock("for.post") + } + if label != nil { + label._break = done + label._continue = cont + } + emitJump(fn, loop) + fn.currentBlock = loop + if loop != body { + b.cond(fn, s.Cond, body, done) + fn.currentBlock = body + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: cont, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, cont) + + if s.Post != nil { + fn.currentBlock = cont + b.stmt(fn, s.Post) + emitJump(fn, loop) // back-edge + } + fn.currentBlock = done +} + +// rangeIndexed emits to fn the header for an integer-indexed loop +// over array, *array or slice value x. +// The v result is defined only if tv is non-nil. +// forPos is the position of the "for" token. +// +func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { + // + // length = len(x) + // index = -1 + // loop: (target of continue) + // index++ + // if index < length goto body else done + // body: + // k = index + // v = x[index] + // ...body... + // jump loop + // done: (target of break) + + // Determine number of iterations. + var length Value + if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { + // For array or *array, the number of iterations is + // known statically thanks to the type. We avoid a + // data dependence upon x, permitting later dead-code + // elimination if x is pure, static unrolling, etc. + // Ranging over a nil *array may have >0 iterations. + // We still generate code for x, in case it has effects. + length = intConst(arr.Len()) + } else { + // length = len(x). + var c Call + c.Call.Value = makeLen(x.Type()) + c.Call.Args = []Value{x} + c.setType(tInt) + length = fn.emit(&c) + } + + index := fn.addLocal(tInt, token.NoPos) + emitStore(fn, index, intConst(-1), pos) + + loop = fn.newBasicBlock("rangeindex.loop") + emitJump(fn, loop) + fn.currentBlock = loop + + incr := &BinOp{ + Op: token.ADD, + X: emitLoad(fn, index), + Y: vOne, + } + incr.setType(tInt) + emitStore(fn, index, fn.emit(incr), pos) + + body := fn.newBasicBlock("rangeindex.body") + done = fn.newBasicBlock("rangeindex.done") + emitIf(fn, emitCompare(fn, token.LSS, incr, length, token.NoPos), body, done) + fn.currentBlock = body + + k = emitLoad(fn, index) + if tv != nil { + switch t := x.Type().Underlying().(type) { + case *types.Array: + instr := &Index{ + X: x, + Index: k, + } + instr.setType(t.Elem()) + instr.setPos(x.Pos()) + v = fn.emit(instr) + + case *types.Pointer: // *array + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) + instr.setPos(x.Pos()) + v = emitLoad(fn, fn.emit(instr)) + + case *types.Slice: + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem())) + instr.setPos(x.Pos()) + v = emitLoad(fn, fn.emit(instr)) + + default: + panic("rangeIndexed x:" + t.String()) + } + } + return +} + +// rangeIter emits to fn the header for a loop using +// Range/Next/Extract to iterate over map or string value x. +// tk and tv are the types of the key/value results k and v, or nil +// if the respective component is not wanted. +// +func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, pos token.Pos) (k, v Value, loop, done *BasicBlock) { + // + // it = range x + // loop: (target of continue) + // okv = next it (ok, key, value) + // ok = extract okv #0 + // if ok goto body else done + // body: + // k = extract okv #1 + // v = extract okv #2 + // ...body... + // jump loop + // done: (target of break) + // + + if tk == nil { + tk = tInvalid + } + if tv == nil { + tv = tInvalid + } + + rng := &Range{X: x} + rng.setPos(pos) + rng.setType(tRangeIter) + it := fn.emit(rng) + + loop = fn.newBasicBlock("rangeiter.loop") + emitJump(fn, loop) + fn.currentBlock = loop + + _, isString := x.Type().Underlying().(*types.Basic) + + okv := &Next{ + Iter: it, + IsString: isString, + } + okv.setType(types.NewTuple( + varOk, + newVar("k", tk), + newVar("v", tv), + )) + fn.emit(okv) + + body := fn.newBasicBlock("rangeiter.body") + done = fn.newBasicBlock("rangeiter.done") + emitIf(fn, emitExtract(fn, okv, 0), body, done) + fn.currentBlock = body + + if tk != tInvalid { + k = emitExtract(fn, okv, 1) + } + if tv != tInvalid { + v = emitExtract(fn, okv, 2) + } + return +} + +// rangeChan emits to fn the header for a loop that receives from +// channel x until it fails. +// tk is the channel's element type, or nil if the k result is +// not wanted +// pos is the position of the '=' or ':=' token. +// +func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, pos token.Pos) (k Value, loop, done *BasicBlock) { + // + // loop: (target of continue) + // ko = <-x (key, ok) + // ok = extract ko #1 + // if ok goto body else done + // body: + // k = extract ko #0 + // ... + // goto loop + // done: (target of break) + + loop = fn.newBasicBlock("rangechan.loop") + emitJump(fn, loop) + fn.currentBlock = loop + recv := &UnOp{ + Op: token.ARROW, + X: x, + CommaOk: true, + } + recv.setPos(pos) + recv.setType(types.NewTuple( + newVar("k", x.Type().Underlying().(*types.Chan).Elem()), + varOk, + )) + ko := fn.emit(recv) + body := fn.newBasicBlock("rangechan.body") + done = fn.newBasicBlock("rangechan.done") + emitIf(fn, emitExtract(fn, ko, 1), body, done) + fn.currentBlock = body + if tk != nil { + k = emitExtract(fn, ko, 0) + } + return +} + +// rangeStmt emits to fn code for the range statement s, optionally +// labelled by label. +// +func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock) { + var tk, tv types.Type + if s.Key != nil && !isBlankIdent(s.Key) { + tk = fn.Pkg.typeOf(s.Key) + } + if s.Value != nil && !isBlankIdent(s.Value) { + tv = fn.Pkg.typeOf(s.Value) + } + + // If iteration variables are defined (:=), this + // occurs once outside the loop. + // + // Unlike a short variable declaration, a RangeStmt + // using := never redeclares an existing variable; it + // always creates a new one. + if s.Tok == token.DEFINE { + if tk != nil { + fn.addLocalForIdent(s.Key.(*ast.Ident)) + } + if tv != nil { + fn.addLocalForIdent(s.Value.(*ast.Ident)) + } + } + + x := b.expr(fn, s.X) + + var k, v Value + var loop, done *BasicBlock + switch rt := x.Type().Underlying().(type) { + case *types.Slice, *types.Array, *types.Pointer: // *array + k, v, loop, done = b.rangeIndexed(fn, x, tv, s.For) + + case *types.Chan: + k, loop, done = b.rangeChan(fn, x, tk, s.For) + + case *types.Map, *types.Basic: // string + k, v, loop, done = b.rangeIter(fn, x, tk, tv, s.For) + + default: + panic("Cannot range over: " + rt.String()) + } + + // Evaluate both LHS expressions before we update either. + var kl, vl lvalue + if tk != nil { + kl = b.addr(fn, s.Key, false) // non-escaping + } + if tv != nil { + vl = b.addr(fn, s.Value, false) // non-escaping + } + if tk != nil { + kl.store(fn, k) + } + if tv != nil { + vl.store(fn, v) + } + + if label != nil { + label._break = done + label._continue = loop + } + + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: loop, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, loop) // back-edge + fn.currentBlock = done +} + +// stmt lowers statement s to SSA form, emitting code to fn. +func (b *builder) stmt(fn *Function, _s ast.Stmt) { + // The label of the current statement. If non-nil, its _goto + // target is always set; its _break and _continue are set only + // within the body of switch/typeswitch/select/for/range. + // It is effectively an additional default-nil parameter of stmt(). + var label *lblock +start: + switch s := _s.(type) { + case *ast.EmptyStmt: + // ignore. (Usually removed by gofmt.) + + case *ast.DeclStmt: // Con, Var or Typ + d := s.Decl.(*ast.GenDecl) + if d.Tok == token.VAR { + for _, spec := range d.Specs { + if vs, ok := spec.(*ast.ValueSpec); ok { + b.localValueSpec(fn, vs) + } + } + } + + case *ast.LabeledStmt: + label = fn.labelledBlock(s.Label) + emitJump(fn, label._goto) + fn.currentBlock = label._goto + _s = s.Stmt + goto start // effectively: tailcall stmt(fn, s.Stmt, label) + + case *ast.ExprStmt: + b.expr(fn, s.X) + + case *ast.SendStmt: + fn.emit(&Send{ + Chan: b.expr(fn, s.Chan), + X: emitConv(fn, b.expr(fn, s.Value), + fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem()), + pos: s.Arrow, + }) + + case *ast.IncDecStmt: + op := token.ADD + if s.Tok == token.DEC { + op = token.SUB + } + loc := b.addr(fn, s.X, false) + b.assignOp(fn, loc, NewConst(constant.MakeInt64(1), loc.typ()), op, s.Pos()) + + case *ast.AssignStmt: + switch s.Tok { + case token.ASSIGN, token.DEFINE: + b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE) + + default: // +=, etc. + op := s.Tok + token.ADD - token.ADD_ASSIGN + b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s.Pos()) + } + + case *ast.GoStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + v := Go{pos: s.Go} + b.setCall(fn, s.Call, &v.Call) + fn.emit(&v) + + case *ast.DeferStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + v := Defer{pos: s.Defer} + b.setCall(fn, s.Call, &v.Call) + fn.emit(&v) + + // A deferred call can cause recovery from panic, + // and control resumes at the Recover block. + createRecoverBlock(fn) + + case *ast.ReturnStmt: + var results []Value + if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { + // Return of one expression in a multi-valued function. + tuple := b.exprN(fn, s.Results[0]) + ttuple := tuple.Type().(*types.Tuple) + for i, n := 0, ttuple.Len(); i < n; i++ { + results = append(results, + emitConv(fn, emitExtract(fn, tuple, i), + fn.Signature.Results().At(i).Type())) + } + } else { + // 1:1 return, or no-arg return in non-void function. + for i, r := range s.Results { + v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type()) + results = append(results, v) + } + } + if fn.namedResults != nil { + // Function has named result parameters (NRPs). + // Perform parallel assignment of return operands to NRPs. + for i, r := range results { + emitStore(fn, fn.namedResults[i], r, s.Return) + } + } + // Run function calls deferred in this + // function when explicitly returning from it. + fn.emit(new(RunDefers)) + if fn.namedResults != nil { + // Reload NRPs to form the result tuple. + results = results[:0] + for _, r := range fn.namedResults { + results = append(results, emitLoad(fn, r)) + } + } + fn.emit(&Return{Results: results, pos: s.Return}) + fn.currentBlock = fn.newBasicBlock("unreachable") + + case *ast.BranchStmt: + var block *BasicBlock + switch s.Tok { + case token.BREAK: + if s.Label != nil { + block = fn.labelledBlock(s.Label)._break + } else { + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._break + } + } + + case token.CONTINUE: + if s.Label != nil { + block = fn.labelledBlock(s.Label)._continue + } else { + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._continue + } + } + + case token.FALLTHROUGH: + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._fallthrough + } + + case token.GOTO: + block = fn.labelledBlock(s.Label)._goto + } + emitJump(fn, block) + fn.currentBlock = fn.newBasicBlock("unreachable") + + case *ast.BlockStmt: + b.stmtList(fn, s.List) + + case *ast.IfStmt: + if s.Init != nil { + b.stmt(fn, s.Init) + } + then := fn.newBasicBlock("if.then") + done := fn.newBasicBlock("if.done") + els := done + if s.Else != nil { + els = fn.newBasicBlock("if.else") + } + b.cond(fn, s.Cond, then, els) + fn.currentBlock = then + b.stmt(fn, s.Body) + emitJump(fn, done) + + if s.Else != nil { + fn.currentBlock = els + b.stmt(fn, s.Else) + emitJump(fn, done) + } + + fn.currentBlock = done + + case *ast.SwitchStmt: + b.switchStmt(fn, s, label) + + case *ast.TypeSwitchStmt: + b.typeSwitchStmt(fn, s, label) + + case *ast.SelectStmt: + b.selectStmt(fn, s, label) + + case *ast.ForStmt: + b.forStmt(fn, s, label) + + case *ast.RangeStmt: + b.rangeStmt(fn, s, label) + + default: + panic(fmt.Sprintf("unexpected statement kind: %T", s)) + } +} + +// buildFunction builds SSA code for the body of function fn. Idempotent. +func (b *builder) buildFunction(fn *Function) { + if fn.Blocks != nil { + return // building already started + } + + var recvField *ast.FieldList + var body *ast.BlockStmt + var functype *ast.FuncType + switch n := fn.syntax.(type) { + case nil: + return // not a Go source function. (Synthetic, or from object file.) + case *ast.FuncDecl: + functype = n.Type + recvField = n.Recv + body = n.Body + case *ast.FuncLit: + functype = n.Type + body = n.Body + default: + panic(n) + } + + if body == nil { + // External function. + if fn.Params == nil { + // This condition ensures we add a non-empty + // params list once only, but we may attempt + // the degenerate empty case repeatedly. + // TODO(adonovan): opt: don't do that. + + // We set Function.Params even though there is no body + // code to reference them. This simplifies clients. + if recv := fn.Signature.Recv(); recv != nil { + fn.addParamObj(recv) + } + params := fn.Signature.Params() + for i, n := 0, params.Len(); i < n; i++ { + fn.addParamObj(params.At(i)) + } + } + return + } + if fn.Prog.mode&LogSource != 0 { + defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.pos))() + } + fn.startBody() + fn.createSyntacticParams(recvField, functype) + b.stmt(fn, body) + if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb == fn.Recover || cb.Preds != nil) { + // Control fell off the end of the function's body block. + // + // Block optimizations eliminate the current block, if + // unreachable. It is a builder invariant that + // if this no-arg return is ill-typed for + // fn.Signature.Results, this block must be + // unreachable. The sanity checker checks this. + fn.emit(new(RunDefers)) + fn.emit(new(Return)) + } + fn.finishBody() +} + +// buildFuncDecl builds SSA code for the function or method declared +// by decl in package pkg. +// +func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { + id := decl.Name + if isBlankIdent(id) { + return // discard + } + fn := pkg.values[pkg.info.Defs[id]].(*Function) + if decl.Recv == nil && id.Name == "init" { + var v Call + v.Call.Value = fn + v.setType(types.NewTuple()) + pkg.init.emit(&v) + } + b.buildFunction(fn) +} + +// Build calls Package.Build for each package in prog. +// Building occurs in parallel unless the BuildSerially mode flag was set. +// +// Build is intended for whole-program analysis; a typical compiler +// need only build a single package. +// +// Build is idempotent and thread-safe. +// +func (prog *Program) Build() { + var wg sync.WaitGroup + for _, p := range prog.packages { + if prog.mode&BuildSerially != 0 { + p.Build() + } else { + wg.Add(1) + go func(p *Package) { + p.Build() + wg.Done() + }(p) + } + } + wg.Wait() +} + +// Build builds SSA code for all functions and vars in package p. +// +// Precondition: CreatePackage must have been called for all of p's +// direct imports (and hence its direct imports must have been +// error-free). +// +// Build is idempotent and thread-safe. +// +func (p *Package) Build() { p.buildOnce.Do(p.build) } + +func (p *Package) build() { + if p.info == nil { + return // synthetic package, e.g. "testmain" + } + + // Ensure we have runtime type info for all exported members. + // TODO(adonovan): ideally belongs in memberFromObject, but + // that would require package creation in topological order. + for name, mem := range p.Members { + if ast.IsExported(name) { + p.Prog.needMethodsOf(mem.Type()) + } + } + if p.Prog.mode&LogSource != 0 { + defer logStack("build %s", p)() + } + init := p.init + init.startBody() + + var done *BasicBlock + + if p.Prog.mode&BareInits == 0 { + // Make init() skip if package is already initialized. + initguard := p.Var("init$guard") + doinit := init.newBasicBlock("init.start") + done = init.newBasicBlock("init.done") + emitIf(init, emitLoad(init, initguard), done, doinit) + init.currentBlock = doinit + emitStore(init, initguard, vTrue, token.NoPos) + + // Call the init() function of each package we import. + for _, pkg := range p.Pkg.Imports() { + prereq := p.Prog.packages[pkg] + if prereq == nil { + panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) + } + var v Call + v.Call.Value = prereq.init + v.Call.pos = init.pos + v.setType(types.NewTuple()) + init.emit(&v) + } + } + + var b builder + + // Initialize package-level vars in correct order. + for _, varinit := range p.info.InitOrder { + if init.Prog.mode&LogSource != 0 { + fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", + varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) + } + if len(varinit.Lhs) == 1 { + // 1:1 initialization: var x, y = a(), b() + var lval lvalue + if v := varinit.Lhs[0]; v.Name() != "_" { + lval = &address{addr: p.values[v].(*Global), pos: v.Pos()} + } else { + lval = blank{} + } + b.assign(init, lval, varinit.Rhs, true, nil) + } else { + // n:1 initialization: var x, y := f() + tuple := b.exprN(init, varinit.Rhs) + for i, v := range varinit.Lhs { + if v.Name() == "_" { + continue + } + emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i), v.Pos()) + } + } + } + + // Build all package-level functions, init functions + // and methods, including unreachable/blank ones. + // We build them in source order, but it's not significant. + for _, file := range p.files { + for _, decl := range file.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + b.buildFuncDecl(p, decl) + } + } + } + + // Finish up init(). + if p.Prog.mode&BareInits == 0 { + emitJump(init, done) + init.currentBlock = done + } + init.emit(new(Return)) + init.finishBody() + + p.info = nil // We no longer need ASTs or go/types deductions. + + if p.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckPackage(p) + } +} + +// Like ObjectOf, but panics instead of returning nil. +// Only valid during p's create and build phases. +func (p *Package) objectOf(id *ast.Ident) types.Object { + if o := p.info.ObjectOf(id); o != nil { + return o + } + panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", + id.Name, p.Prog.Fset.Position(id.Pos()))) +} + +// Like TypeOf, but panics instead of returning nil. +// Only valid during p's create and build phases. +func (p *Package) typeOf(e ast.Expr) types.Type { + if T := p.info.TypeOf(e); T != nil { + return T + } + panic(fmt.Sprintf("no type for %T @ %s", + e, p.Prog.Fset.Position(e.Pos()))) +} diff --git a/vendor/golang.org/x/tools/go/ssa/const.go b/vendor/golang.org/x/tools/go/ssa/const.go new file mode 100644 index 000000000..f43792e7f --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/const.go @@ -0,0 +1,169 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the Const SSA value type. + +import ( + "fmt" + "go/constant" + "go/token" + "go/types" + "strconv" +) + +// NewConst returns a new constant of the specified value and type. +// val must be valid according to the specification of Const.Value. +// +func NewConst(val constant.Value, typ types.Type) *Const { + return &Const{typ, val} +} + +// intConst returns an 'int' constant that evaluates to i. +// (i is an int64 in case the host is narrower than the target.) +func intConst(i int64) *Const { + return NewConst(constant.MakeInt64(i), tInt) +} + +// nilConst returns a nil constant of the specified type, which may +// be any reference type, including interfaces. +// +func nilConst(typ types.Type) *Const { + return NewConst(nil, typ) +} + +// stringConst returns a 'string' constant that evaluates to s. +func stringConst(s string) *Const { + return NewConst(constant.MakeString(s), tString) +} + +// zeroConst returns a new "zero" constant of the specified type, +// which must not be an array or struct type: the zero values of +// aggregates are well-defined but cannot be represented by Const. +// +func zeroConst(t types.Type) *Const { + switch t := t.(type) { + case *types.Basic: + switch { + case t.Info()&types.IsBoolean != 0: + return NewConst(constant.MakeBool(false), t) + case t.Info()&types.IsNumeric != 0: + return NewConst(constant.MakeInt64(0), t) + case t.Info()&types.IsString != 0: + return NewConst(constant.MakeString(""), t) + case t.Kind() == types.UnsafePointer: + fallthrough + case t.Kind() == types.UntypedNil: + return nilConst(t) + default: + panic(fmt.Sprint("zeroConst for unexpected type:", t)) + } + case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: + return nilConst(t) + case *types.Named: + return NewConst(zeroConst(t.Underlying()).Value, t) + case *types.Array, *types.Struct, *types.Tuple: + panic(fmt.Sprint("zeroConst applied to aggregate:", t)) + } + panic(fmt.Sprint("zeroConst: unexpected ", t)) +} + +func (c *Const) RelString(from *types.Package) string { + var s string + if c.Value == nil { + s = "nil" + } else if c.Value.Kind() == constant.String { + s = constant.StringVal(c.Value) + const max = 20 + // TODO(adonovan): don't cut a rune in half. + if len(s) > max { + s = s[:max-3] + "..." // abbreviate + } + s = strconv.Quote(s) + } else { + s = c.Value.String() + } + return s + ":" + relType(c.Type(), from) +} + +func (c *Const) Name() string { + return c.RelString(nil) +} + +func (c *Const) String() string { + return c.Name() +} + +func (c *Const) Type() types.Type { + return c.typ +} + +func (c *Const) Referrers() *[]Instruction { + return nil +} + +func (c *Const) Parent() *Function { return nil } + +func (c *Const) Pos() token.Pos { + return token.NoPos +} + +// IsNil returns true if this constant represents a typed or untyped nil value. +func (c *Const) IsNil() bool { + return c.Value == nil +} + +// TODO(adonovan): move everything below into golang.org/x/tools/go/ssa/interp. + +// Int64 returns the numeric value of this constant truncated to fit +// a signed 64-bit integer. +// +func (c *Const) Int64() int64 { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if i, ok := constant.Int64Val(x); ok { + return i + } + return 0 + case constant.Float: + f, _ := constant.Float64Val(x) + return int64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Uint64 returns the numeric value of this constant truncated to fit +// an unsigned 64-bit integer. +// +func (c *Const) Uint64() uint64 { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if u, ok := constant.Uint64Val(x); ok { + return u + } + return 0 + case constant.Float: + f, _ := constant.Float64Val(x) + return uint64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Float64 returns the numeric value of this constant truncated to fit +// a float64. +// +func (c *Const) Float64() float64 { + f, _ := constant.Float64Val(c.Value) + return f +} + +// Complex128 returns the complex value of this constant truncated to +// fit a complex128. +// +func (c *Const) Complex128() complex128 { + re, _ := constant.Float64Val(constant.Real(c.Value)) + im, _ := constant.Float64Val(constant.Imag(c.Value)) + return complex(re, im) +} diff --git a/vendor/golang.org/x/tools/go/ssa/create.go b/vendor/golang.org/x/tools/go/ssa/create.go new file mode 100644 index 000000000..85163a0c5 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/create.go @@ -0,0 +1,270 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the CREATE phase of SSA construction. +// See builder.go for explanation. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "sync" + + "golang.org/x/tools/go/types/typeutil" +) + +// NewProgram returns a new SSA Program. +// +// mode controls diagnostics and checking during SSA construction. +// +func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { + prog := &Program{ + Fset: fset, + imported: make(map[string]*Package), + packages: make(map[*types.Package]*Package), + thunks: make(map[selectionKey]*Function), + bounds: make(map[*types.Func]*Function), + mode: mode, + } + + h := typeutil.MakeHasher() // protected by methodsMu, in effect + prog.methodSets.SetHasher(h) + prog.canon.SetHasher(h) + + return prog +} + +// memberFromObject populates package pkg with a member for the +// typechecker object obj. +// +// For objects from Go source code, syntax is the associated syntax +// tree (for funcs and vars only); it will be used during the build +// phase. +// +func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { + name := obj.Name() + switch obj := obj.(type) { + case *types.Builtin: + if pkg.Pkg != types.Unsafe { + panic("unexpected builtin object: " + obj.String()) + } + + case *types.TypeName: + pkg.Members[name] = &Type{ + object: obj, + pkg: pkg, + } + + case *types.Const: + c := &NamedConst{ + object: obj, + Value: NewConst(obj.Val(), obj.Type()), + pkg: pkg, + } + pkg.values[obj] = c.Value + pkg.Members[name] = c + + case *types.Var: + g := &Global{ + Pkg: pkg, + name: name, + object: obj, + typ: types.NewPointer(obj.Type()), // address + pos: obj.Pos(), + } + pkg.values[obj] = g + pkg.Members[name] = g + + case *types.Func: + sig := obj.Type().(*types.Signature) + if sig.Recv() == nil && name == "init" { + pkg.ninit++ + name = fmt.Sprintf("init#%d", pkg.ninit) + } + fn := &Function{ + name: name, + object: obj, + Signature: sig, + syntax: syntax, + pos: obj.Pos(), + Pkg: pkg, + Prog: pkg.Prog, + } + if syntax == nil { + fn.Synthetic = "loaded from gc object file" + } + + pkg.values[obj] = fn + if sig.Recv() == nil { + pkg.Members[name] = fn // package-level function + } + + default: // (incl. *types.Package) + panic("unexpected Object type: " + obj.String()) + } +} + +// membersFromDecl populates package pkg with members for each +// typechecker object (var, func, const or type) associated with the +// specified decl. +// +func membersFromDecl(pkg *Package, decl ast.Decl) { + switch decl := decl.(type) { + case *ast.GenDecl: // import, const, type or var + switch decl.Tok { + case token.CONST: + for _, spec := range decl.Specs { + for _, id := range spec.(*ast.ValueSpec).Names { + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], nil) + } + } + } + + case token.VAR: + for _, spec := range decl.Specs { + for _, id := range spec.(*ast.ValueSpec).Names { + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], spec) + } + } + } + + case token.TYPE: + for _, spec := range decl.Specs { + id := spec.(*ast.TypeSpec).Name + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], nil) + } + } + } + + case *ast.FuncDecl: + id := decl.Name + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], decl) + } + } +} + +// CreatePackage constructs and returns an SSA Package from the +// specified type-checked, error-free file ASTs, and populates its +// Members mapping. +// +// importable determines whether this package should be returned by a +// subsequent call to ImportedPackage(pkg.Path()). +// +// The real work of building SSA form for each function is not done +// until a subsequent call to Package.Build(). +// +func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { + p := &Package{ + Prog: prog, + Members: make(map[string]Member), + values: make(map[types.Object]Value), + Pkg: pkg, + info: info, // transient (CREATE and BUILD phases) + files: files, // transient (CREATE and BUILD phases) + } + + // Add init() function. + p.init = &Function{ + name: "init", + Signature: new(types.Signature), + Synthetic: "package initializer", + Pkg: p, + Prog: prog, + } + p.Members[p.init.name] = p.init + + // CREATE phase. + // Allocate all package members: vars, funcs, consts and types. + if len(files) > 0 { + // Go source package. + for _, file := range files { + for _, decl := range file.Decls { + membersFromDecl(p, decl) + } + } + } else { + // GC-compiled binary package (or "unsafe") + // No code. + // No position information. + scope := p.Pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + memberFromObject(p, obj, nil) + if obj, ok := obj.(*types.TypeName); ok { + if named, ok := obj.Type().(*types.Named); ok { + for i, n := 0, named.NumMethods(); i < n; i++ { + memberFromObject(p, named.Method(i), nil) + } + } + } + } + } + + if prog.mode&BareInits == 0 { + // Add initializer guard variable. + initguard := &Global{ + Pkg: p, + name: "init$guard", + typ: types.NewPointer(tBool), + } + p.Members[initguard.Name()] = initguard + } + + if prog.mode&GlobalDebug != 0 { + p.SetDebugMode(true) + } + + if prog.mode&PrintPackages != 0 { + printMu.Lock() + p.WriteTo(os.Stdout) + printMu.Unlock() + } + + if importable { + prog.imported[p.Pkg.Path()] = p + } + prog.packages[p.Pkg] = p + + return p +} + +// printMu serializes printing of Packages/Functions to stdout. +var printMu sync.Mutex + +// AllPackages returns a new slice containing all packages in the +// program prog in unspecified order. +// +func (prog *Program) AllPackages() []*Package { + pkgs := make([]*Package, 0, len(prog.packages)) + for _, pkg := range prog.packages { + pkgs = append(pkgs, pkg) + } + return pkgs +} + +// ImportedPackage returns the importable Package whose PkgPath +// is path, or nil if no such Package has been created. +// +// A parameter to CreatePackage determines whether a package should be +// considered importable. For example, no import declaration can resolve +// to the ad-hoc main package created by 'go build foo.go'. +// +// TODO(adonovan): rethink this function and the "importable" concept; +// most packages are importable. This function assumes that all +// types.Package.Path values are unique within the ssa.Program, which is +// false---yet this function remains very convenient. +// Clients should use (*Program).Package instead where possible. +// SSA doesn't really need a string-keyed map of packages. +// +func (prog *Program) ImportedPackage(path string) *Package { + return prog.imported[path] +} diff --git a/vendor/golang.org/x/tools/go/ssa/doc.go b/vendor/golang.org/x/tools/go/ssa/doc.go new file mode 100644 index 000000000..1a13640f9 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/doc.go @@ -0,0 +1,125 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ssa defines a representation of the elements of Go programs +// (packages, types, functions, variables and constants) using a +// static single-assignment (SSA) form intermediate representation +// (IR) for the bodies of functions. +// +// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. +// +// For an introduction to SSA form, see +// http://en.wikipedia.org/wiki/Static_single_assignment_form. +// This page provides a broader reading list: +// http://www.dcs.gla.ac.uk/~jsinger/ssa.html. +// +// The level of abstraction of the SSA form is intentionally close to +// the source language to facilitate construction of source analysis +// tools. It is not intended for machine code generation. +// +// All looping, branching and switching constructs are replaced with +// unstructured control flow. Higher-level control flow constructs +// such as multi-way branch can be reconstructed as needed; see +// ssautil.Switches() for an example. +// +// The simplest way to create the SSA representation of a package is +// to load typed syntax trees using golang.org/x/tools/go/packages, then +// invoke the ssautil.Packages helper function. See ExampleLoadPackages +// and ExampleWholeProgram for examples. +// The resulting ssa.Program contains all the packages and their +// members, but SSA code is not created for function bodies until a +// subsequent call to (*Package).Build or (*Program).Build. +// +// The builder initially builds a naive SSA form in which all local +// variables are addresses of stack locations with explicit loads and +// stores. Registerisation of eligible locals and φ-node insertion +// using dominance and dataflow are then performed as a second pass +// called "lifting" to improve the accuracy and performance of +// subsequent analyses; this pass can be skipped by setting the +// NaiveForm builder flag. +// +// The primary interfaces of this package are: +// +// - Member: a named member of a Go package. +// - Value: an expression that yields a value. +// - Instruction: a statement that consumes values and performs computation. +// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph) +// +// A computation that yields a result implements both the Value and +// Instruction interfaces. The following table shows for each +// concrete type which of these interfaces it implements. +// +// Value? Instruction? Member? +// *Alloc ✔ ✔ +// *BinOp ✔ ✔ +// *Builtin ✔ +// *Call ✔ ✔ +// *ChangeInterface ✔ ✔ +// *ChangeType ✔ ✔ +// *Const ✔ +// *Convert ✔ ✔ +// *DebugRef ✔ +// *Defer ✔ +// *Extract ✔ ✔ +// *Field ✔ ✔ +// *FieldAddr ✔ ✔ +// *FreeVar ✔ +// *Function ✔ ✔ (func) +// *Global ✔ ✔ (var) +// *Go ✔ +// *If ✔ +// *Index ✔ ✔ +// *IndexAddr ✔ ✔ +// *Jump ✔ +// *Lookup ✔ ✔ +// *MakeChan ✔ ✔ +// *MakeClosure ✔ ✔ +// *MakeInterface ✔ ✔ +// *MakeMap ✔ ✔ +// *MakeSlice ✔ ✔ +// *MapUpdate ✔ +// *NamedConst ✔ (const) +// *Next ✔ ✔ +// *Panic ✔ +// *Parameter ✔ +// *Phi ✔ ✔ +// *Range ✔ ✔ +// *Return ✔ +// *RunDefers ✔ +// *Select ✔ ✔ +// *Send ✔ +// *Slice ✔ ✔ +// *Store ✔ +// *Type ✔ (type) +// *TypeAssert ✔ ✔ +// *UnOp ✔ ✔ +// +// Other key types in this package include: Program, Package, Function +// and BasicBlock. +// +// The program representation constructed by this package is fully +// resolved internally, i.e. it does not rely on the names of Values, +// Packages, Functions, Types or BasicBlocks for the correct +// interpretation of the program. Only the identities of objects and +// the topology of the SSA and type graphs are semantically +// significant. (There is one exception: Ids, used to identify field +// and method names, contain strings.) Avoidance of name-based +// operations simplifies the implementation of subsequent passes and +// can make them very efficient. Many objects are nonetheless named +// to aid in debugging, but it is not essential that the names be +// either accurate or unambiguous. The public API exposes a number of +// name-based maps for client convenience. +// +// The ssa/ssautil package provides various utilities that depend only +// on the public API of this package. +// +// TODO(adonovan): Consider the exceptional control-flow implications +// of defer and recover(). +// +// TODO(adonovan): write a how-to document for all the various cases +// of trying to determine corresponding elements across the four +// domains of source locations, ast.Nodes, types.Objects, +// ssa.Values/Instructions. +// +package ssa // import "golang.org/x/tools/go/ssa" diff --git a/vendor/golang.org/x/tools/go/ssa/dom.go b/vendor/golang.org/x/tools/go/ssa/dom.go new file mode 100644 index 000000000..822fe9772 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/dom.go @@ -0,0 +1,341 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines algorithms related to dominance. + +// Dominator tree construction ---------------------------------------- +// +// We use the algorithm described in Lengauer & Tarjan. 1979. A fast +// algorithm for finding dominators in a flowgraph. +// http://doi.acm.org/10.1145/357062.357071 +// +// We also apply the optimizations to SLT described in Georgiadis et +// al, Finding Dominators in Practice, JGAA 2006, +// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf +// to avoid the need for buckets of size > 1. + +import ( + "bytes" + "fmt" + "math/big" + "os" + "sort" +) + +// Idom returns the block that immediately dominates b: +// its parent in the dominator tree, if any. +// Neither the entry node (b.Index==0) nor recover node +// (b==b.Parent().Recover()) have a parent. +// +func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom } + +// Dominees returns the list of blocks that b immediately dominates: +// its children in the dominator tree. +// +func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children } + +// Dominates reports whether b dominates c. +func (b *BasicBlock) Dominates(c *BasicBlock) bool { + return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post +} + +type byDomPreorder []*BasicBlock + +func (a byDomPreorder) Len() int { return len(a) } +func (a byDomPreorder) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre } + +// DomPreorder returns a new slice containing the blocks of f in +// dominator tree preorder. +// +func (f *Function) DomPreorder() []*BasicBlock { + n := len(f.Blocks) + order := make(byDomPreorder, n) + copy(order, f.Blocks) + sort.Sort(order) + return order +} + +// domInfo contains a BasicBlock's dominance information. +type domInfo struct { + idom *BasicBlock // immediate dominator (parent in domtree) + children []*BasicBlock // nodes immediately dominated by this one + pre, post int32 // pre- and post-order numbering within domtree +} + +// ltState holds the working state for Lengauer-Tarjan algorithm +// (during which domInfo.pre is repurposed for CFG DFS preorder number). +type ltState struct { + // Each slice is indexed by b.Index. + sdom []*BasicBlock // b's semidominator + parent []*BasicBlock // b's parent in DFS traversal of CFG + ancestor []*BasicBlock // b's ancestor with least sdom +} + +// dfs implements the depth-first search part of the LT algorithm. +func (lt *ltState) dfs(v *BasicBlock, i int32, preorder []*BasicBlock) int32 { + preorder[i] = v + v.dom.pre = i // For now: DFS preorder of spanning tree of CFG + i++ + lt.sdom[v.Index] = v + lt.link(nil, v) + for _, w := range v.Succs { + if lt.sdom[w.Index] == nil { + lt.parent[w.Index] = v + i = lt.dfs(w, i, preorder) + } + } + return i +} + +// eval implements the EVAL part of the LT algorithm. +func (lt *ltState) eval(v *BasicBlock) *BasicBlock { + // TODO(adonovan): opt: do path compression per simple LT. + u := v + for ; lt.ancestor[v.Index] != nil; v = lt.ancestor[v.Index] { + if lt.sdom[v.Index].dom.pre < lt.sdom[u.Index].dom.pre { + u = v + } + } + return u +} + +// link implements the LINK part of the LT algorithm. +func (lt *ltState) link(v, w *BasicBlock) { + lt.ancestor[w.Index] = v +} + +// buildDomTree computes the dominator tree of f using the LT algorithm. +// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run). +// +func buildDomTree(f *Function) { + // The step numbers refer to the original LT paper; the + // reordering is due to Georgiadis. + + // Clear any previous domInfo. + for _, b := range f.Blocks { + b.dom = domInfo{} + } + + n := len(f.Blocks) + // Allocate space for 5 contiguous [n]*BasicBlock arrays: + // sdom, parent, ancestor, preorder, buckets. + space := make([]*BasicBlock, 5*n) + lt := ltState{ + sdom: space[0:n], + parent: space[n : 2*n], + ancestor: space[2*n : 3*n], + } + + // Step 1. Number vertices by depth-first preorder. + preorder := space[3*n : 4*n] + root := f.Blocks[0] + prenum := lt.dfs(root, 0, preorder) + recover := f.Recover + if recover != nil { + lt.dfs(recover, prenum, preorder) + } + + buckets := space[4*n : 5*n] + copy(buckets, preorder) + + // In reverse preorder... + for i := int32(n) - 1; i > 0; i-- { + w := preorder[i] + + // Step 3. Implicitly define the immediate dominator of each node. + for v := buckets[i]; v != w; v = buckets[v.dom.pre] { + u := lt.eval(v) + if lt.sdom[u.Index].dom.pre < i { + v.dom.idom = u + } else { + v.dom.idom = w + } + } + + // Step 2. Compute the semidominators of all nodes. + lt.sdom[w.Index] = lt.parent[w.Index] + for _, v := range w.Preds { + u := lt.eval(v) + if lt.sdom[u.Index].dom.pre < lt.sdom[w.Index].dom.pre { + lt.sdom[w.Index] = lt.sdom[u.Index] + } + } + + lt.link(lt.parent[w.Index], w) + + if lt.parent[w.Index] == lt.sdom[w.Index] { + w.dom.idom = lt.parent[w.Index] + } else { + buckets[i] = buckets[lt.sdom[w.Index].dom.pre] + buckets[lt.sdom[w.Index].dom.pre] = w + } + } + + // The final 'Step 3' is now outside the loop. + for v := buckets[0]; v != root; v = buckets[v.dom.pre] { + v.dom.idom = root + } + + // Step 4. Explicitly define the immediate dominator of each + // node, in preorder. + for _, w := range preorder[1:] { + if w == root || w == recover { + w.dom.idom = nil + } else { + if w.dom.idom != lt.sdom[w.Index] { + w.dom.idom = w.dom.idom.dom.idom + } + // Calculate Children relation as inverse of Idom. + w.dom.idom.dom.children = append(w.dom.idom.dom.children, w) + } + } + + pre, post := numberDomTree(root, 0, 0) + if recover != nil { + numberDomTree(recover, pre, post) + } + + // printDomTreeDot(os.Stderr, f) // debugging + // printDomTreeText(os.Stderr, root, 0) // debugging + + if f.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckDomTree(f) + } +} + +// numberDomTree sets the pre- and post-order numbers of a depth-first +// traversal of the dominator tree rooted at v. These are used to +// answer dominance queries in constant time. +// +func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { + v.dom.pre = pre + pre++ + for _, child := range v.dom.children { + pre, post = numberDomTree(child, pre, post) + } + v.dom.post = post + post++ + return pre, post +} + +// Testing utilities ---------------------------------------- + +// sanityCheckDomTree checks the correctness of the dominator tree +// computed by the LT algorithm by comparing against the dominance +// relation computed by a naive Kildall-style forward dataflow +// analysis (Algorithm 10.16 from the "Dragon" book). +// +func sanityCheckDomTree(f *Function) { + n := len(f.Blocks) + + // D[i] is the set of blocks that dominate f.Blocks[i], + // represented as a bit-set of block indices. + D := make([]big.Int, n) + + one := big.NewInt(1) + + // all is the set of all blocks; constant. + var all big.Int + all.Set(one).Lsh(&all, uint(n)).Sub(&all, one) + + // Initialization. + for i, b := range f.Blocks { + if i == 0 || b == f.Recover { + // A root is dominated only by itself. + D[i].SetBit(&D[0], 0, 1) + } else { + // All other blocks are (initially) dominated + // by every block. + D[i].Set(&all) + } + } + + // Iteration until fixed point. + for changed := true; changed; { + changed = false + for i, b := range f.Blocks { + if i == 0 || b == f.Recover { + continue + } + // Compute intersection across predecessors. + var x big.Int + x.Set(&all) + for _, pred := range b.Preds { + x.And(&x, &D[pred.Index]) + } + x.SetBit(&x, i, 1) // a block always dominates itself. + if D[i].Cmp(&x) != 0 { + D[i].Set(&x) + changed = true + } + } + } + + // Check the entire relation. O(n^2). + // The Recover block (if any) must be treated specially so we skip it. + ok := true + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + b, c := f.Blocks[i], f.Blocks[j] + if c == f.Recover { + continue + } + actual := b.Dominates(c) + expected := D[j].Bit(i) == 1 + if actual != expected { + fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected) + ok = false + } + } + } + + preorder := f.DomPreorder() + for _, b := range f.Blocks { + if got := preorder[b.dom.pre]; got != b { + fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b) + ok = false + } + } + + if !ok { + panic("sanityCheckDomTree failed for " + f.String()) + } + +} + +// Printing functions ---------------------------------------- + +// printDomTree prints the dominator tree as text, using indentation. +func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { + fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) + for _, child := range v.dom.children { + printDomTreeText(buf, child, indent+1) + } +} + +// printDomTreeDot prints the dominator tree of f in AT&T GraphViz +// (.dot) format. +func printDomTreeDot(buf *bytes.Buffer, f *Function) { + fmt.Fprintln(buf, "//", f) + fmt.Fprintln(buf, "digraph domtree {") + for i, b := range f.Blocks { + v := b.dom + fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) + // TODO(adonovan): improve appearance of edges + // belonging to both dominator tree and CFG. + + // Dominator tree edge. + if i != 0 { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre) + } + // CFG edges. + for _, pred := range b.Preds { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre) + } + } + fmt.Fprintln(buf, "}") +} diff --git a/vendor/golang.org/x/tools/go/ssa/emit.go b/vendor/golang.org/x/tools/go/ssa/emit.go new file mode 100644 index 000000000..df9ca4ff0 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/emit.go @@ -0,0 +1,478 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// Helpers for emitting SSA instructions. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" +) + +// emitNew emits to f a new (heap Alloc) instruction allocating an +// object of type typ. pos is the optional source location. +// +func emitNew(f *Function, typ types.Type, pos token.Pos) *Alloc { + v := &Alloc{Heap: true} + v.setType(types.NewPointer(typ)) + v.setPos(pos) + f.emit(v) + return v +} + +// emitLoad emits to f an instruction to load the address addr into a +// new temporary, and returns the value so defined. +// +func emitLoad(f *Function, addr Value) *UnOp { + v := &UnOp{Op: token.MUL, X: addr} + v.setType(deref(addr.Type())) + f.emit(v) + return v +} + +// emitDebugRef emits to f a DebugRef pseudo-instruction associating +// expression e with value v. +// +func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { + if !f.debugInfo() { + return // debugging not enabled + } + if v == nil || e == nil { + panic("nil") + } + var obj types.Object + e = unparen(e) + if id, ok := e.(*ast.Ident); ok { + if isBlankIdent(id) { + return + } + obj = f.Pkg.objectOf(id) + switch obj.(type) { + case *types.Nil, *types.Const, *types.Builtin: + return + } + } + f.emit(&DebugRef{ + X: v, + Expr: e, + IsAddr: isAddr, + object: obj, + }) +} + +// emitArith emits to f code to compute the binary operation op(x, y) +// where op is an eager shift, logical or arithmetic operation. +// (Use emitCompare() for comparisons and Builder.logicalBinop() for +// non-eager operations.) +// +func emitArith(f *Function, op token.Token, x, y Value, t types.Type, pos token.Pos) Value { + switch op { + case token.SHL, token.SHR: + x = emitConv(f, x, t) + // y may be signed or an 'untyped' constant. + // TODO(adonovan): whence signed values? + if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 { + y = emitConv(f, y, types.Typ[types.Uint64]) + } + + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + x = emitConv(f, x, t) + y = emitConv(f, y, t) + + default: + panic("illegal op in emitArith: " + op.String()) + + } + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setPos(pos) + v.setType(t) + return f.emit(v) +} + +// emitCompare emits to f code compute the boolean result of +// comparison comparison 'x op y'. +// +func emitCompare(f *Function, op token.Token, x, y Value, pos token.Pos) Value { + xt := x.Type().Underlying() + yt := y.Type().Underlying() + + // Special case to optimise a tagless SwitchStmt so that + // these are equivalent + // switch { case e: ...} + // switch true { case e: ... } + // if e==true { ... } + // even in the case when e's type is an interface. + // TODO(adonovan): opt: generalise to x==true, false!=y, etc. + if x == vTrue && op == token.EQL { + if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 { + return y + } + } + + if types.Identical(xt, yt) { + // no conversion necessary + } else if _, ok := xt.(*types.Interface); ok { + y = emitConv(f, y, x.Type()) + } else if _, ok := yt.(*types.Interface); ok { + x = emitConv(f, x, y.Type()) + } else if _, ok := x.(*Const); ok { + x = emitConv(f, x, y.Type()) + } else if _, ok := y.(*Const); ok { + y = emitConv(f, y, x.Type()) + } else { + // other cases, e.g. channels. No-op. + } + + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setPos(pos) + v.setType(tBool) + return f.emit(v) +} + +// isValuePreserving returns true if a conversion from ut_src to +// ut_dst is value-preserving, i.e. just a change of type. +// Precondition: neither argument is a named type. +// +func isValuePreserving(ut_src, ut_dst types.Type) bool { + // Identical underlying types? + if structTypesIdentical(ut_dst, ut_src) { + return true + } + + switch ut_dst.(type) { + case *types.Chan: + // Conversion between channel types? + _, ok := ut_src.(*types.Chan) + return ok + + case *types.Pointer: + // Conversion between pointers with identical base types? + _, ok := ut_src.(*types.Pointer) + return ok + } + return false +} + +// emitConv emits to f code to convert Value val to exactly type typ, +// and returns the converted value. Implicit conversions are required +// by language assignability rules in assignments, parameter passing, +// etc. +// +func emitConv(f *Function, val Value, typ types.Type) Value { + t_src := val.Type() + + // Identical types? Conversion is a no-op. + if types.Identical(t_src, typ) { + return val + } + + ut_dst := typ.Underlying() + ut_src := t_src.Underlying() + + // Just a change of type, but not value or representation? + if isValuePreserving(ut_src, ut_dst) { + c := &ChangeType{X: val} + c.setType(typ) + return f.emit(c) + } + + // Conversion to, or construction of a value of, an interface type? + if _, ok := ut_dst.(*types.Interface); ok { + // Assignment from one interface type to another? + if _, ok := ut_src.(*types.Interface); ok { + c := &ChangeInterface{X: val} + c.setType(typ) + return f.emit(c) + } + + // Untyped nil constant? Return interface-typed nil constant. + if ut_src == tUntypedNil { + return nilConst(typ) + } + + // Convert (non-nil) "untyped" literals to their default type. + if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 { + val = emitConv(f, val, types.Default(ut_src)) + } + + f.Pkg.Prog.needMethodsOf(val.Type()) + mi := &MakeInterface{X: val} + mi.setType(typ) + return f.emit(mi) + } + + // Conversion of a compile-time constant value? + if c, ok := val.(*Const); ok { + if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() { + // Conversion of a compile-time constant to + // another constant type results in a new + // constant of the destination type and + // (initially) the same abstract value. + // We don't truncate the value yet. + return NewConst(c.Value, typ) + } + + // We're converting from constant to non-constant type, + // e.g. string -> []byte/[]rune. + } + + // Conversion from slice to array pointer? + if slice, ok := ut_src.(*types.Slice); ok { + if ptr, ok := ut_dst.(*types.Pointer); ok { + if arr, ok := ptr.Elem().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { + c := &Convert{X: val} + c.setType(ut_dst) + return f.emit(c) + } + } + } + // A representation-changing conversion? + // At least one of {ut_src,ut_dst} must be *Basic. + // (The other may be []byte or []rune.) + _, ok1 := ut_src.(*types.Basic) + _, ok2 := ut_dst.(*types.Basic) + if ok1 || ok2 { + c := &Convert{X: val} + c.setType(typ) + return f.emit(c) + } + + panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ)) +} + +// emitStore emits to f an instruction to store value val at location +// addr, applying implicit conversions as required by assignability rules. +// +func emitStore(f *Function, addr, val Value, pos token.Pos) *Store { + s := &Store{ + Addr: addr, + Val: emitConv(f, val, deref(addr.Type())), + pos: pos, + } + f.emit(s) + return s +} + +// emitJump emits to f a jump to target, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +// +func emitJump(f *Function, target *BasicBlock) { + b := f.currentBlock + b.emit(new(Jump)) + addEdge(b, target) + f.currentBlock = nil +} + +// emitIf emits to f a conditional jump to tblock or fblock based on +// cond, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +// +func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock) { + b := f.currentBlock + b.emit(&If{Cond: cond}) + addEdge(b, tblock) + addEdge(b, fblock) + f.currentBlock = nil +} + +// emitExtract emits to f an instruction to extract the index'th +// component of tuple. It returns the extracted value. +// +func emitExtract(f *Function, tuple Value, index int) Value { + e := &Extract{Tuple: tuple, Index: index} + e.setType(tuple.Type().(*types.Tuple).At(index).Type()) + return f.emit(e) +} + +// emitTypeAssert emits to f a type assertion value := x.(t) and +// returns the value. x.Type() must be an interface. +// +func emitTypeAssert(f *Function, x Value, t types.Type, pos token.Pos) Value { + a := &TypeAssert{X: x, AssertedType: t} + a.setPos(pos) + a.setType(t) + return f.emit(a) +} + +// emitTypeTest emits to f a type test value,ok := x.(t) and returns +// a (value, ok) tuple. x.Type() must be an interface. +// +func emitTypeTest(f *Function, x Value, t types.Type, pos token.Pos) Value { + a := &TypeAssert{ + X: x, + AssertedType: t, + CommaOk: true, + } + a.setPos(pos) + a.setType(types.NewTuple( + newVar("value", t), + varOk, + )) + return f.emit(a) +} + +// emitTailCall emits to f a function call in tail position. The +// caller is responsible for all fields of 'call' except its type. +// Intended for wrapper methods. +// Precondition: f does/will not use deferred procedure calls. +// Postcondition: f.currentBlock is nil. +// +func emitTailCall(f *Function, call *Call) { + tresults := f.Signature.Results() + nr := tresults.Len() + if nr == 1 { + call.typ = tresults.At(0).Type() + } else { + call.typ = tresults + } + tuple := f.emit(call) + var ret Return + switch nr { + case 0: + // no-op + case 1: + ret.Results = []Value{tuple} + default: + for i := 0; i < nr; i++ { + v := emitExtract(f, tuple, i) + // TODO(adonovan): in principle, this is required: + // v = emitConv(f, o.Type, f.Signature.Results[i].Type) + // but in practice emitTailCall is only used when + // the types exactly match. + ret.Results = append(ret.Results, v) + } + } + f.emit(&ret) + f.currentBlock = nil +} + +// emitImplicitSelections emits to f code to apply the sequence of +// implicit field selections specified by indices to base value v, and +// returns the selected value. +// +// If v is the address of a struct, the result will be the address of +// a field; if it is the value of a struct, the result will be the +// value of a field. +// +func emitImplicitSelections(f *Function, v Value, indices []int) Value { + for _, index := range indices { + fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + + if isPointer(v.Type()) { + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr) + // Load the field's value iff indirectly embedded. + if isPointer(fld.Type()) { + v = emitLoad(f, v) + } + } else { + instr := &Field{ + X: v, + Field: index, + } + instr.setType(fld.Type()) + v = f.emit(instr) + } + } + return v +} + +// emitFieldSelection emits to f code to select the index'th field of v. +// +// If wantAddr, the input must be a pointer-to-struct and the result +// will be the field's address; otherwise the result will be the +// field's value. +// Ident id is used for position and debug info. +// +func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { + fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + if isPointer(v.Type()) { + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setPos(id.Pos()) + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr) + // Load the field's value iff we don't want its address. + if !wantAddr { + v = emitLoad(f, v) + } + } else { + instr := &Field{ + X: v, + Field: index, + } + instr.setPos(id.Pos()) + instr.setType(fld.Type()) + v = f.emit(instr) + } + emitDebugRef(f, id, v, wantAddr) + return v +} + +// zeroValue emits to f code to produce a zero value of type t, +// and returns it. +// +func zeroValue(f *Function, t types.Type) Value { + switch t.Underlying().(type) { + case *types.Struct, *types.Array: + return emitLoad(f, f.addLocal(t, token.NoPos)) + default: + return zeroConst(t) + } +} + +// createRecoverBlock emits to f a block of code to return after a +// recovered panic, and sets f.Recover to it. +// +// If f's result parameters are named, the code loads and returns +// their current values, otherwise it returns the zero values of their +// type. +// +// Idempotent. +// +func createRecoverBlock(f *Function) { + if f.Recover != nil { + return // already created + } + saved := f.currentBlock + + f.Recover = f.newBasicBlock("recover") + f.currentBlock = f.Recover + + var results []Value + if f.namedResults != nil { + // Reload NRPs to form value tuple. + for _, r := range f.namedResults { + results = append(results, emitLoad(f, r)) + } + } else { + R := f.Signature.Results() + for i, n := 0, R.Len(); i < n; i++ { + T := R.At(i).Type() + + // Return zero value of each result type. + results = append(results, zeroValue(f, T)) + } + } + f.emit(&Return{Results: results}) + + f.currentBlock = saved +} diff --git a/vendor/golang.org/x/tools/go/ssa/func.go b/vendor/golang.org/x/tools/go/ssa/func.go new file mode 100644 index 000000000..0b99bc9ba --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/func.go @@ -0,0 +1,691 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the Function and BasicBlock types. + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "os" + "strings" +) + +// addEdge adds a control-flow graph edge from from to to. +func addEdge(from, to *BasicBlock) { + from.Succs = append(from.Succs, to) + to.Preds = append(to.Preds, from) +} + +// Parent returns the function that contains block b. +func (b *BasicBlock) Parent() *Function { return b.parent } + +// String returns a human-readable label of this block. +// It is not guaranteed unique within the function. +// +func (b *BasicBlock) String() string { + return fmt.Sprintf("%d", b.Index) +} + +// emit appends an instruction to the current basic block. +// If the instruction defines a Value, it is returned. +// +func (b *BasicBlock) emit(i Instruction) Value { + i.setBlock(b) + b.Instrs = append(b.Instrs, i) + v, _ := i.(Value) + return v +} + +// predIndex returns the i such that b.Preds[i] == c or panics if +// there is none. +func (b *BasicBlock) predIndex(c *BasicBlock) int { + for i, pred := range b.Preds { + if pred == c { + return i + } + } + panic(fmt.Sprintf("no edge %s -> %s", c, b)) +} + +// hasPhi returns true if b.Instrs contains φ-nodes. +func (b *BasicBlock) hasPhi() bool { + _, ok := b.Instrs[0].(*Phi) + return ok +} + +// phis returns the prefix of b.Instrs containing all the block's φ-nodes. +func (b *BasicBlock) phis() []Instruction { + for i, instr := range b.Instrs { + if _, ok := instr.(*Phi); !ok { + return b.Instrs[:i] + } + } + return nil // unreachable in well-formed blocks +} + +// replacePred replaces all occurrences of p in b's predecessor list with q. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) replacePred(p, q *BasicBlock) { + for i, pred := range b.Preds { + if pred == p { + b.Preds[i] = q + } + } +} + +// replaceSucc replaces all occurrences of p in b's successor list with q. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { + for i, succ := range b.Succs { + if succ == p { + b.Succs[i] = q + } + } +} + +// removePred removes all occurrences of p in b's +// predecessor list and φ-nodes. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) removePred(p *BasicBlock) { + phis := b.phis() + + // We must preserve edge order for φ-nodes. + j := 0 + for i, pred := range b.Preds { + if pred != p { + b.Preds[j] = b.Preds[i] + // Strike out φ-edge too. + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges[j] = phi.Edges[i] + } + j++ + } + } + // Nil out b.Preds[j:] and φ-edges[j:] to aid GC. + for i := j; i < len(b.Preds); i++ { + b.Preds[i] = nil + for _, instr := range phis { + instr.(*Phi).Edges[i] = nil + } + } + b.Preds = b.Preds[:j] + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges = phi.Edges[:j] + } +} + +// Destinations associated with unlabelled for/switch/select stmts. +// We push/pop one of these as we enter/leave each construct and for +// each BranchStmt we scan for the innermost target of the right type. +// +type targets struct { + tail *targets // rest of stack + _break *BasicBlock + _continue *BasicBlock + _fallthrough *BasicBlock +} + +// Destinations associated with a labelled block. +// We populate these as labels are encountered in forward gotos or +// labelled statements. +// +type lblock struct { + _goto *BasicBlock + _break *BasicBlock + _continue *BasicBlock +} + +// labelledBlock returns the branch target associated with the +// specified label, creating it if needed. +// +func (f *Function) labelledBlock(label *ast.Ident) *lblock { + lb := f.lblocks[label.Obj] + if lb == nil { + lb = &lblock{_goto: f.newBasicBlock(label.Name)} + if f.lblocks == nil { + f.lblocks = make(map[*ast.Object]*lblock) + } + f.lblocks[label.Obj] = lb + } + return lb +} + +// addParam adds a (non-escaping) parameter to f.Params of the +// specified name, type and source position. +// +func (f *Function) addParam(name string, typ types.Type, pos token.Pos) *Parameter { + v := &Parameter{ + name: name, + typ: typ, + pos: pos, + parent: f, + } + f.Params = append(f.Params, v) + return v +} + +func (f *Function) addParamObj(obj types.Object) *Parameter { + name := obj.Name() + if name == "" { + name = fmt.Sprintf("arg%d", len(f.Params)) + } + param := f.addParam(name, obj.Type(), obj.Pos()) + param.object = obj + return param +} + +// addSpilledParam declares a parameter that is pre-spilled to the +// stack; the function body will load/store the spilled location. +// Subsequent lifting will eliminate spills where possible. +// +func (f *Function) addSpilledParam(obj types.Object) { + param := f.addParamObj(obj) + spill := &Alloc{Comment: obj.Name()} + spill.setType(types.NewPointer(obj.Type())) + spill.setPos(obj.Pos()) + f.objects[obj] = spill + f.Locals = append(f.Locals, spill) + f.emit(spill) + f.emit(&Store{Addr: spill, Val: param}) +} + +// startBody initializes the function prior to generating SSA code for its body. +// Precondition: f.Type() already set. +// +func (f *Function) startBody() { + f.currentBlock = f.newBasicBlock("entry") + f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init +} + +// createSyntacticParams populates f.Params and generates code (spills +// and named result locals) for all the parameters declared in the +// syntax. In addition it populates the f.objects mapping. +// +// Preconditions: +// f.startBody() was called. +// Postcondition: +// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0) +// +func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) { + // Receiver (at most one inner iteration). + if recv != nil { + for _, field := range recv.List { + for _, n := range field.Names { + f.addSpilledParam(f.Pkg.info.Defs[n]) + } + // Anonymous receiver? No need to spill. + if field.Names == nil { + f.addParamObj(f.Signature.Recv()) + } + } + } + + // Parameters. + if functype.Params != nil { + n := len(f.Params) // 1 if has recv, 0 otherwise + for _, field := range functype.Params.List { + for _, n := range field.Names { + f.addSpilledParam(f.Pkg.info.Defs[n]) + } + // Anonymous parameter? No need to spill. + if field.Names == nil { + f.addParamObj(f.Signature.Params().At(len(f.Params) - n)) + } + } + } + + // Named results. + if functype.Results != nil { + for _, field := range functype.Results.List { + // Implicit "var" decl of locals for named results. + for _, n := range field.Names { + f.namedResults = append(f.namedResults, f.addLocalForIdent(n)) + } + } + } +} + +type setNumable interface { + setNum(int) +} + +// numberRegisters assigns numbers to all SSA registers +// (value-defining Instructions) in f, to aid debugging. +// (Non-Instruction Values are named at construction.) +// +func numberRegisters(f *Function) { + v := 0 + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + switch instr.(type) { + case Value: + instr.(setNumable).setNum(v) + v++ + } + } + } +} + +// buildReferrers populates the def/use information in all non-nil +// Value.Referrers slice. +// Precondition: all such slices are initially empty. +func buildReferrers(f *Function) { + var rands []*Value + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if r := *rand; r != nil { + if ref := r.Referrers(); ref != nil { + *ref = append(*ref, instr) + } + } + } + } + } +} + +// finishBody() finalizes the function after SSA code generation of its body. +func (f *Function) finishBody() { + f.objects = nil + f.currentBlock = nil + f.lblocks = nil + + // Don't pin the AST in memory (except in debug mode). + if n := f.syntax; n != nil && !f.debugInfo() { + f.syntax = extentNode{n.Pos(), n.End()} + } + + // Remove from f.Locals any Allocs that escape to the heap. + j := 0 + for _, l := range f.Locals { + if !l.Heap { + f.Locals[j] = l + j++ + } + } + // Nil out f.Locals[j:] to aid GC. + for i := j; i < len(f.Locals); i++ { + f.Locals[i] = nil + } + f.Locals = f.Locals[:j] + + optimizeBlocks(f) + + buildReferrers(f) + + buildDomTree(f) + + if f.Prog.mode&NaiveForm == 0 { + // For debugging pre-state of lifting pass: + // numberRegisters(f) + // f.WriteTo(os.Stderr) + lift(f) + } + + f.namedResults = nil // (used by lifting) + + numberRegisters(f) + + if f.Prog.mode&PrintFunctions != 0 { + printMu.Lock() + f.WriteTo(os.Stdout) + printMu.Unlock() + } + + if f.Prog.mode&SanityCheckFunctions != 0 { + mustSanityCheck(f, nil) + } +} + +// removeNilBlocks eliminates nils from f.Blocks and updates each +// BasicBlock.Index. Use this after any pass that may delete blocks. +// +func (f *Function) removeNilBlocks() { + j := 0 + for _, b := range f.Blocks { + if b != nil { + b.Index = j + f.Blocks[j] = b + j++ + } + } + // Nil out f.Blocks[j:] to aid GC. + for i := j; i < len(f.Blocks); i++ { + f.Blocks[i] = nil + } + f.Blocks = f.Blocks[:j] +} + +// SetDebugMode sets the debug mode for package pkg. If true, all its +// functions will include full debug info. This greatly increases the +// size of the instruction stream, and causes Functions to depend upon +// the ASTs, potentially keeping them live in memory for longer. +// +func (pkg *Package) SetDebugMode(debug bool) { + // TODO(adonovan): do we want ast.File granularity? + pkg.debug = debug +} + +// debugInfo reports whether debug info is wanted for this function. +func (f *Function) debugInfo() bool { + return f.Pkg != nil && f.Pkg.debug +} + +// addNamedLocal creates a local variable, adds it to function f and +// returns it. Its name and type are taken from obj. Subsequent +// calls to f.lookup(obj) will return the same local. +// +func (f *Function) addNamedLocal(obj types.Object) *Alloc { + l := f.addLocal(obj.Type(), obj.Pos()) + l.Comment = obj.Name() + f.objects[obj] = l + return l +} + +func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc { + return f.addNamedLocal(f.Pkg.info.Defs[id]) +} + +// addLocal creates an anonymous local variable of type typ, adds it +// to function f and returns it. pos is the optional source location. +// +func (f *Function) addLocal(typ types.Type, pos token.Pos) *Alloc { + v := &Alloc{} + v.setType(types.NewPointer(typ)) + v.setPos(pos) + f.Locals = append(f.Locals, v) + f.emit(v) + return v +} + +// lookup returns the address of the named variable identified by obj +// that is local to function f or one of its enclosing functions. +// If escaping, the reference comes from a potentially escaping pointer +// expression and the referent must be heap-allocated. +// +func (f *Function) lookup(obj types.Object, escaping bool) Value { + if v, ok := f.objects[obj]; ok { + if alloc, ok := v.(*Alloc); ok && escaping { + alloc.Heap = true + } + return v // function-local var (address) + } + + // Definition must be in an enclosing function; + // plumb it through intervening closures. + if f.parent == nil { + panic("no ssa.Value for " + obj.String()) + } + outer := f.parent.lookup(obj, true) // escaping + v := &FreeVar{ + name: obj.Name(), + typ: outer.Type(), + pos: outer.Pos(), + outer: outer, + parent: f, + } + f.objects[obj] = v + f.FreeVars = append(f.FreeVars, v) + return v +} + +// emit emits the specified instruction to function f. +func (f *Function) emit(instr Instruction) Value { + return f.currentBlock.emit(instr) +} + +// RelString returns the full name of this function, qualified by +// package name, receiver type, etc. +// +// The specific formatting rules are not guaranteed and may change. +// +// Examples: +// "math.IsNaN" // a package-level function +// "(*bytes.Buffer).Bytes" // a declared method or a wrapper +// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) +// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) +// "main.main$1" // an anonymous function in main +// "main.init#1" // a declared init function +// "main.init" // the synthesized package initializer +// +// When these functions are referred to from within the same package +// (i.e. from == f.Pkg.Object), they are rendered without the package path. +// For example: "IsNaN", "(*Buffer).Bytes", etc. +// +// All non-synthetic functions have distinct package-qualified names. +// (But two methods may have the same name "(T).f" if one is a synthetic +// wrapper promoting a non-exported method "f" from another package; in +// that case, the strings are equal but the identifiers "f" are distinct.) +// +func (f *Function) RelString(from *types.Package) string { + // Anonymous? + if f.parent != nil { + // An anonymous function's Name() looks like "parentName$1", + // but its String() should include the type/package/etc. + parent := f.parent.RelString(from) + for i, anon := range f.parent.AnonFuncs { + if anon == f { + return fmt.Sprintf("%s$%d", parent, 1+i) + } + } + + return f.name // should never happen + } + + // Method (declared or wrapper)? + if recv := f.Signature.Recv(); recv != nil { + return f.relMethod(from, recv.Type()) + } + + // Thunk? + if f.method != nil { + return f.relMethod(from, f.method.Recv()) + } + + // Bound? + if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") { + return f.relMethod(from, f.FreeVars[0].Type()) + } + + // Package-level function? + // Prefix with package name for cross-package references only. + if p := f.pkg(); p != nil && p != from { + return fmt.Sprintf("%s.%s", p.Path(), f.name) + } + + // Unknown. + return f.name +} + +func (f *Function) relMethod(from *types.Package, recv types.Type) string { + return fmt.Sprintf("(%s).%s", relType(recv, from), f.name) +} + +// writeSignature writes to buf the signature sig in declaration syntax. +func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature, params []*Parameter) { + buf.WriteString("func ") + if recv := sig.Recv(); recv != nil { + buf.WriteString("(") + if n := params[0].Name(); n != "" { + buf.WriteString(n) + buf.WriteString(" ") + } + types.WriteType(buf, params[0].Type(), types.RelativeTo(from)) + buf.WriteString(") ") + } + buf.WriteString(name) + types.WriteSignature(buf, sig, types.RelativeTo(from)) +} + +func (f *Function) pkg() *types.Package { + if f.Pkg != nil { + return f.Pkg.Pkg + } + return nil +} + +var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer + +func (f *Function) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WriteFunction(&buf, f) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WriteFunction writes to buf a human-readable "disassembly" of f. +func WriteFunction(buf *bytes.Buffer, f *Function) { + fmt.Fprintf(buf, "# Name: %s\n", f.String()) + if f.Pkg != nil { + fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path()) + } + if syn := f.Synthetic; syn != "" { + fmt.Fprintln(buf, "# Synthetic:", syn) + } + if pos := f.Pos(); pos.IsValid() { + fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos)) + } + + if f.parent != nil { + fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name()) + } + + if f.Recover != nil { + fmt.Fprintf(buf, "# Recover: %s\n", f.Recover) + } + + from := f.pkg() + + if f.FreeVars != nil { + buf.WriteString("# Free variables:\n") + for i, fv := range f.FreeVars { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from)) + } + } + + if len(f.Locals) > 0 { + buf.WriteString("# Locals:\n") + for i, l := range f.Locals { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from)) + } + } + writeSignature(buf, from, f.Name(), f.Signature, f.Params) + buf.WriteString(":\n") + + if f.Blocks == nil { + buf.WriteString("\t(external)\n") + } + + // NB. column calculations are confused by non-ASCII + // characters and assume 8-space tabs. + const punchcard = 80 // for old time's sake. + const tabwidth = 8 + for _, b := range f.Blocks { + if b == nil { + // Corrupt CFG. + fmt.Fprintf(buf, ".nil:\n") + continue + } + n, _ := fmt.Fprintf(buf, "%d:", b.Index) + bmsg := fmt.Sprintf("%s P:%d S:%d", b.Comment, len(b.Preds), len(b.Succs)) + fmt.Fprintf(buf, "%*s%s\n", punchcard-1-n-len(bmsg), "", bmsg) + + if false { // CFG debugging + fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs) + } + for _, instr := range b.Instrs { + buf.WriteString("\t") + switch v := instr.(type) { + case Value: + l := punchcard - tabwidth + // Left-align the instruction. + if name := v.Name(); name != "" { + n, _ := fmt.Fprintf(buf, "%s = ", name) + l -= n + } + n, _ := buf.WriteString(instr.String()) + l -= n + // Right-align the type if there's space. + if t := v.Type(); t != nil { + buf.WriteByte(' ') + ts := relType(t, from) + l -= len(ts) + len(" ") // (spaces before and after type) + if l > 0 { + fmt.Fprintf(buf, "%*s", l, "") + } + buf.WriteString(ts) + } + case nil: + // Be robust against bad transforms. + buf.WriteString("") + default: + buf.WriteString(instr.String()) + } + buf.WriteString("\n") + } + } + fmt.Fprintf(buf, "\n") +} + +// newBasicBlock adds to f a new basic block and returns it. It does +// not automatically become the current block for subsequent calls to emit. +// comment is an optional string for more readable debugging output. +// +func (f *Function) newBasicBlock(comment string) *BasicBlock { + b := &BasicBlock{ + Index: len(f.Blocks), + Comment: comment, + parent: f, + } + b.Succs = b.succs2[:0] + f.Blocks = append(f.Blocks, b) + return b +} + +// NewFunction returns a new synthetic Function instance belonging to +// prog, with its name and signature fields set as specified. +// +// The caller is responsible for initializing the remaining fields of +// the function object, e.g. Pkg, Params, Blocks. +// +// It is practically impossible for clients to construct well-formed +// SSA functions/packages/programs directly, so we assume this is the +// job of the Builder alone. NewFunction exists to provide clients a +// little flexibility. For example, analysis tools may wish to +// construct fake Functions for the root of the callgraph, a fake +// "reflect" package, etc. +// +// TODO(adonovan): think harder about the API here. +// +func (prog *Program) NewFunction(name string, sig *types.Signature, provenance string) *Function { + return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} +} + +type extentNode [2]token.Pos + +func (n extentNode) Pos() token.Pos { return n[0] } +func (n extentNode) End() token.Pos { return n[1] } + +// Syntax returns an ast.Node whose Pos/End methods provide the +// lexical extent of the function if it was defined by Go source code +// (f.Synthetic==""), or nil otherwise. +// +// If f was built with debug information (see Package.SetDebugRef), +// the result is the *ast.FuncDecl or *ast.FuncLit that declared the +// function. Otherwise, it is an opaque Node providing only position +// information; this avoids pinning the AST in memory. +// +func (f *Function) Syntax() ast.Node { return f.syntax } diff --git a/vendor/golang.org/x/tools/go/ssa/identical.go b/vendor/golang.org/x/tools/go/ssa/identical.go new file mode 100644 index 000000000..e8026967b --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/identical.go @@ -0,0 +1,12 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.8 +// +build go1.8 + +package ssa + +import "go/types" + +var structTypesIdentical = types.IdenticalIgnoreTags diff --git a/vendor/golang.org/x/tools/go/ssa/identical_17.go b/vendor/golang.org/x/tools/go/ssa/identical_17.go new file mode 100644 index 000000000..575aa5dfc --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/identical_17.go @@ -0,0 +1,12 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !go1.8 +// +build !go1.8 + +package ssa + +import "go/types" + +var structTypesIdentical = types.Identical diff --git a/vendor/golang.org/x/tools/go/ssa/lift.go b/vendor/golang.org/x/tools/go/ssa/lift.go new file mode 100644 index 000000000..048e9b032 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/lift.go @@ -0,0 +1,653 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the lifting pass which tries to "lift" Alloc +// cells (new/local variables) into SSA registers, replacing loads +// with the dominating stored value, eliminating loads and stores, and +// inserting φ-nodes as needed. + +// Cited papers and resources: +// +// Ron Cytron et al. 1991. Efficiently computing SSA form... +// http://doi.acm.org/10.1145/115372.115320 +// +// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm. +// Software Practice and Experience 2001, 4:1-10. +// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf +// +// Daniel Berlin, llvmdev mailing list, 2012. +// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html +// (Be sure to expand the whole thread.) + +// TODO(adonovan): opt: there are many optimizations worth evaluating, and +// the conventional wisdom for SSA construction is that a simple +// algorithm well engineered often beats those of better asymptotic +// complexity on all but the most egregious inputs. +// +// Danny Berlin suggests that the Cooper et al. algorithm for +// computing the dominance frontier is superior to Cytron et al. +// Furthermore he recommends that rather than computing the DF for the +// whole function then renaming all alloc cells, it may be cheaper to +// compute the DF for each alloc cell separately and throw it away. +// +// Consider exploiting liveness information to avoid creating dead +// φ-nodes which we then immediately remove. +// +// Also see many other "TODO: opt" suggestions in the code. + +import ( + "fmt" + "go/token" + "go/types" + "math/big" + "os" +) + +// If true, show diagnostic information at each step of lifting. +// Very verbose. +const debugLifting = false + +// domFrontier maps each block to the set of blocks in its dominance +// frontier. The outer slice is conceptually a map keyed by +// Block.Index. The inner slice is conceptually a set, possibly +// containing duplicates. +// +// TODO(adonovan): opt: measure impact of dups; consider a packed bit +// representation, e.g. big.Int, and bitwise parallel operations for +// the union step in the Children loop. +// +// domFrontier's methods mutate the slice's elements but not its +// length, so their receivers needn't be pointers. +// +type domFrontier [][]*BasicBlock + +func (df domFrontier) add(u, v *BasicBlock) { + p := &df[u.Index] + *p = append(*p, v) +} + +// build builds the dominance frontier df for the dominator (sub)tree +// rooted at u, using the Cytron et al. algorithm. +// +// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA +// by pruning the entire IDF computation, rather than merely pruning +// the DF -> IDF step. +func (df domFrontier) build(u *BasicBlock) { + // Encounter each node u in postorder of dom tree. + for _, child := range u.dom.children { + df.build(child) + } + for _, vb := range u.Succs { + if v := vb.dom; v.idom != u { + df.add(u, vb) + } + } + for _, w := range u.dom.children { + for _, vb := range df[w.Index] { + // TODO(adonovan): opt: use word-parallel bitwise union. + if v := vb.dom; v.idom != u { + df.add(u, vb) + } + } + } +} + +func buildDomFrontier(fn *Function) domFrontier { + df := make(domFrontier, len(fn.Blocks)) + df.build(fn.Blocks[0]) + if fn.Recover != nil { + df.build(fn.Recover) + } + return df +} + +func removeInstr(refs []Instruction, instr Instruction) []Instruction { + i := 0 + for _, ref := range refs { + if ref == instr { + continue + } + refs[i] = ref + i++ + } + for j := i; j != len(refs); j++ { + refs[j] = nil // aid GC + } + return refs[:i] +} + +// lift replaces local and new Allocs accessed only with +// load/store by SSA registers, inserting φ-nodes where necessary. +// The result is a program in classical pruned SSA form. +// +// Preconditions: +// - fn has no dead blocks (blockopt has run). +// - Def/use info (Operands and Referrers) is up-to-date. +// - The dominator tree is up-to-date. +// +func lift(fn *Function) { + // TODO(adonovan): opt: lots of little optimizations may be + // worthwhile here, especially if they cause us to avoid + // buildDomFrontier. For example: + // + // - Alloc never loaded? Eliminate. + // - Alloc never stored? Replace all loads with a zero constant. + // - Alloc stored once? Replace loads with dominating store; + // don't forget that an Alloc is itself an effective store + // of zero. + // - Alloc used only within a single block? + // Use degenerate algorithm avoiding φ-nodes. + // - Consider synergy with scalar replacement of aggregates (SRA). + // e.g. *(&x.f) where x is an Alloc. + // Perhaps we'd get better results if we generated this as x.f + // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)). + // Unclear. + // + // But we will start with the simplest correct code. + df := buildDomFrontier(fn) + + if debugLifting { + title := false + for i, blocks := range df { + if blocks != nil { + if !title { + fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn) + title = true + } + fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks) + } + } + } + + newPhis := make(newPhiMap) + + // During this pass we will replace some BasicBlock.Instrs + // (allocs, loads and stores) with nil, keeping a count in + // BasicBlock.gaps. At the end we will reset Instrs to the + // concatenation of all non-dead newPhis and non-nil Instrs + // for the block, reusing the original array if space permits. + + // While we're here, we also eliminate 'rundefers' + // instructions in functions that contain no 'defer' + // instructions. + usesDefer := false + + // A counter used to generate ~unique ids for Phi nodes, as an + // aid to debugging. We use large numbers to make them highly + // visible. All nodes are renumbered later. + fresh := 1000 + + // Determine which allocs we can lift and number them densely. + // The renaming phase uses this numbering for compact maps. + numAllocs := 0 + for _, b := range fn.Blocks { + b.gaps = 0 + b.rundefers = 0 + for _, instr := range b.Instrs { + switch instr := instr.(type) { + case *Alloc: + index := -1 + if liftAlloc(df, instr, newPhis, &fresh) { + index = numAllocs + numAllocs++ + } + instr.index = index + case *Defer: + usesDefer = true + case *RunDefers: + b.rundefers++ + } + } + } + + // renaming maps an alloc (keyed by index) to its replacement + // value. Initially the renaming contains nil, signifying the + // zero constant of the appropriate type; we construct the + // Const lazily at most once on each path through the domtree. + // TODO(adonovan): opt: cache per-function not per subtree. + renaming := make([]Value, numAllocs) + + // Renaming. + rename(fn.Blocks[0], renaming, newPhis) + + // Eliminate dead φ-nodes. + removeDeadPhis(fn.Blocks, newPhis) + + // Prepend remaining live φ-nodes to each block. + for _, b := range fn.Blocks { + nps := newPhis[b] + j := len(nps) + + rundefersToKill := b.rundefers + if usesDefer { + rundefersToKill = 0 + } + + if j+b.gaps+rundefersToKill == 0 { + continue // fast path: no new phis or gaps + } + + // Compact nps + non-nil Instrs into a new slice. + // TODO(adonovan): opt: compact in situ (rightwards) + // if Instrs has sufficient space or slack. + dst := make([]Instruction, len(b.Instrs)+j-b.gaps-rundefersToKill) + for i, np := range nps { + dst[i] = np.phi + } + for _, instr := range b.Instrs { + if instr == nil { + continue + } + if !usesDefer { + if _, ok := instr.(*RunDefers); ok { + continue + } + } + dst[j] = instr + j++ + } + b.Instrs = dst + } + + // Remove any fn.Locals that were lifted. + j := 0 + for _, l := range fn.Locals { + if l.index < 0 { + fn.Locals[j] = l + j++ + } + } + // Nil out fn.Locals[j:] to aid GC. + for i := j; i < len(fn.Locals); i++ { + fn.Locals[i] = nil + } + fn.Locals = fn.Locals[:j] +} + +// removeDeadPhis removes φ-nodes not transitively needed by a +// non-Phi, non-DebugRef instruction. +func removeDeadPhis(blocks []*BasicBlock, newPhis newPhiMap) { + // First pass: find the set of "live" φ-nodes: those reachable + // from some non-Phi instruction. + // + // We compute reachability in reverse, starting from each φ, + // rather than forwards, starting from each live non-Phi + // instruction, because this way visits much less of the + // Value graph. + livePhis := make(map[*Phi]bool) + for _, npList := range newPhis { + for _, np := range npList { + phi := np.phi + if !livePhis[phi] && phiHasDirectReferrer(phi) { + markLivePhi(livePhis, phi) + } + } + } + + // Existing φ-nodes due to && and || operators + // are all considered live (see Go issue 19622). + for _, b := range blocks { + for _, phi := range b.phis() { + markLivePhi(livePhis, phi.(*Phi)) + } + } + + // Second pass: eliminate unused phis from newPhis. + for block, npList := range newPhis { + j := 0 + for _, np := range npList { + if livePhis[np.phi] { + npList[j] = np + j++ + } else { + // discard it, first removing it from referrers + for _, val := range np.phi.Edges { + if refs := val.Referrers(); refs != nil { + *refs = removeInstr(*refs, np.phi) + } + } + np.phi.block = nil + } + } + newPhis[block] = npList[:j] + } +} + +// markLivePhi marks phi, and all φ-nodes transitively reachable via +// its Operands, live. +func markLivePhi(livePhis map[*Phi]bool, phi *Phi) { + livePhis[phi] = true + for _, rand := range phi.Operands(nil) { + if q, ok := (*rand).(*Phi); ok { + if !livePhis[q] { + markLivePhi(livePhis, q) + } + } + } +} + +// phiHasDirectReferrer reports whether phi is directly referred to by +// a non-Phi instruction. Such instructions are the +// roots of the liveness traversal. +func phiHasDirectReferrer(phi *Phi) bool { + for _, instr := range *phi.Referrers() { + if _, ok := instr.(*Phi); !ok { + return true + } + } + return false +} + +type blockSet struct{ big.Int } // (inherit methods from Int) + +// add adds b to the set and returns true if the set changed. +func (s *blockSet) add(b *BasicBlock) bool { + i := b.Index + if s.Bit(i) != 0 { + return false + } + s.SetBit(&s.Int, i, 1) + return true +} + +// take removes an arbitrary element from a set s and +// returns its index, or returns -1 if empty. +func (s *blockSet) take() int { + l := s.BitLen() + for i := 0; i < l; i++ { + if s.Bit(i) == 1 { + s.SetBit(&s.Int, i, 0) + return i + } + } + return -1 +} + +// newPhi is a pair of a newly introduced φ-node and the lifted Alloc +// it replaces. +type newPhi struct { + phi *Phi + alloc *Alloc +} + +// newPhiMap records for each basic block, the set of newPhis that +// must be prepended to the block. +type newPhiMap map[*BasicBlock][]newPhi + +// liftAlloc determines whether alloc can be lifted into registers, +// and if so, it populates newPhis with all the φ-nodes it may require +// and returns true. +// +// fresh is a source of fresh ids for phi nodes. +// +func liftAlloc(df domFrontier, alloc *Alloc, newPhis newPhiMap, fresh *int) bool { + // Don't lift aggregates into registers, because we don't have + // a way to express their zero-constants. + switch deref(alloc.Type()).Underlying().(type) { + case *types.Array, *types.Struct: + return false + } + + // Don't lift named return values in functions that defer + // calls that may recover from panic. + if fn := alloc.Parent(); fn.Recover != nil { + for _, nr := range fn.namedResults { + if nr == alloc { + return false + } + } + } + + // Compute defblocks, the set of blocks containing a + // definition of the alloc cell. + var defblocks blockSet + for _, instr := range *alloc.Referrers() { + // Bail out if we discover the alloc is not liftable; + // the only operations permitted to use the alloc are + // loads/stores into the cell, and DebugRef. + switch instr := instr.(type) { + case *Store: + if instr.Val == alloc { + return false // address used as value + } + if instr.Addr != alloc { + panic("Alloc.Referrers is inconsistent") + } + defblocks.add(instr.Block()) + case *UnOp: + if instr.Op != token.MUL { + return false // not a load + } + if instr.X != alloc { + panic("Alloc.Referrers is inconsistent") + } + case *DebugRef: + // ok + default: + return false // some other instruction + } + } + // The Alloc itself counts as a (zero) definition of the cell. + defblocks.add(alloc.Block()) + + if debugLifting { + fmt.Fprintln(os.Stderr, "\tlifting ", alloc, alloc.Name()) + } + + fn := alloc.Parent() + + // Φ-insertion. + // + // What follows is the body of the main loop of the insert-φ + // function described by Cytron et al, but instead of using + // counter tricks, we just reset the 'hasAlready' and 'work' + // sets each iteration. These are bitmaps so it's pretty cheap. + // + // TODO(adonovan): opt: recycle slice storage for W, + // hasAlready, defBlocks across liftAlloc calls. + var hasAlready blockSet + + // Initialize W and work to defblocks. + var work blockSet = defblocks // blocks seen + var W blockSet // blocks to do + W.Set(&defblocks.Int) + + // Traverse iterated dominance frontier, inserting φ-nodes. + for i := W.take(); i != -1; i = W.take() { + u := fn.Blocks[i] + for _, v := range df[u.Index] { + if hasAlready.add(v) { + // Create φ-node. + // It will be prepended to v.Instrs later, if needed. + phi := &Phi{ + Edges: make([]Value, len(v.Preds)), + Comment: alloc.Comment, + } + // This is merely a debugging aid: + phi.setNum(*fresh) + *fresh++ + + phi.pos = alloc.Pos() + phi.setType(deref(alloc.Type())) + phi.block = v + if debugLifting { + fmt.Fprintf(os.Stderr, "\tplace %s = %s at block %s\n", phi.Name(), phi, v) + } + newPhis[v] = append(newPhis[v], newPhi{phi, alloc}) + + if work.add(v) { + W.add(v) + } + } + } + } + + return true +} + +// replaceAll replaces all intraprocedural uses of x with y, +// updating x.Referrers and y.Referrers. +// Precondition: x.Referrers() != nil, i.e. x must be local to some function. +// +func replaceAll(x, y Value) { + var rands []*Value + pxrefs := x.Referrers() + pyrefs := y.Referrers() + for _, instr := range *pxrefs { + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if *rand != nil { + if *rand == x { + *rand = y + } + } + } + if pyrefs != nil { + *pyrefs = append(*pyrefs, instr) // dups ok + } + } + *pxrefs = nil // x is now unreferenced +} + +// renamed returns the value to which alloc is being renamed, +// constructing it lazily if it's the implicit zero initialization. +// +func renamed(renaming []Value, alloc *Alloc) Value { + v := renaming[alloc.index] + if v == nil { + v = zeroConst(deref(alloc.Type())) + renaming[alloc.index] = v + } + return v +} + +// rename implements the (Cytron et al) SSA renaming algorithm, a +// preorder traversal of the dominator tree replacing all loads of +// Alloc cells with the value stored to that cell by the dominating +// store instruction. For lifting, we need only consider loads, +// stores and φ-nodes. +// +// renaming is a map from *Alloc (keyed by index number) to its +// dominating stored value; newPhis[x] is the set of new φ-nodes to be +// prepended to block x. +// +func rename(u *BasicBlock, renaming []Value, newPhis newPhiMap) { + // Each φ-node becomes the new name for its associated Alloc. + for _, np := range newPhis[u] { + phi := np.phi + alloc := np.alloc + renaming[alloc.index] = phi + } + + // Rename loads and stores of allocs. + for i, instr := range u.Instrs { + switch instr := instr.(type) { + case *Alloc: + if instr.index >= 0 { // store of zero to Alloc cell + // Replace dominated loads by the zero value. + renaming[instr.index] = nil + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr) + } + // Delete the Alloc. + u.Instrs[i] = nil + u.gaps++ + } + + case *Store: + if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell + // Replace dominated loads by the stored value. + renaming[alloc.index] = instr.Val + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n", + instr, instr.Val.Name()) + } + // Remove the store from the referrer list of the stored value. + if refs := instr.Val.Referrers(); refs != nil { + *refs = removeInstr(*refs, instr) + } + // Delete the Store. + u.Instrs[i] = nil + u.gaps++ + } + + case *UnOp: + if instr.Op == token.MUL { + if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell + newval := renamed(renaming, alloc) + if debugLifting { + fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n", + instr.Name(), instr, newval.Name()) + } + // Replace all references to + // the loaded value by the + // dominating stored value. + replaceAll(instr, newval) + // Delete the Load. + u.Instrs[i] = nil + u.gaps++ + } + } + + case *DebugRef: + if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // ref of Alloc cell + if instr.IsAddr { + instr.X = renamed(renaming, alloc) + instr.IsAddr = false + + // Add DebugRef to instr.X's referrers. + if refs := instr.X.Referrers(); refs != nil { + *refs = append(*refs, instr) + } + } else { + // A source expression denotes the address + // of an Alloc that was optimized away. + instr.X = nil + + // Delete the DebugRef. + u.Instrs[i] = nil + u.gaps++ + } + } + } + } + + // For each φ-node in a CFG successor, rename the edge. + for _, v := range u.Succs { + phis := newPhis[v] + if len(phis) == 0 { + continue + } + i := v.predIndex(u) + for _, np := range phis { + phi := np.phi + alloc := np.alloc + newval := renamed(renaming, alloc) + if debugLifting { + fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n", + phi.Name(), u, v, i, alloc.Name(), newval.Name()) + } + phi.Edges[i] = newval + if prefs := newval.Referrers(); prefs != nil { + *prefs = append(*prefs, phi) + } + } + } + + // Continue depth-first recursion over domtree, pushing a + // fresh copy of the renaming map for each subtree. + for i, v := range u.dom.children { + r := renaming + if i < len(u.dom.children)-1 { + // On all but the final iteration, we must make + // a copy to avoid destructive update. + r = make([]Value, len(renaming)) + copy(r, renaming) + } + rename(v, r, newPhis) + } + +} diff --git a/vendor/golang.org/x/tools/go/ssa/lvalue.go b/vendor/golang.org/x/tools/go/ssa/lvalue.go new file mode 100644 index 000000000..4d85be3ec --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/lvalue.go @@ -0,0 +1,120 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// lvalues are the union of addressable expressions and map-index +// expressions. + +import ( + "go/ast" + "go/token" + "go/types" +) + +// An lvalue represents an assignable location that may appear on the +// left-hand side of an assignment. This is a generalization of a +// pointer to permit updates to elements of maps. +// +type lvalue interface { + store(fn *Function, v Value) // stores v into the location + load(fn *Function) Value // loads the contents of the location + address(fn *Function) Value // address of the location + typ() types.Type // returns the type of the location +} + +// An address is an lvalue represented by a true pointer. +type address struct { + addr Value + pos token.Pos // source position + expr ast.Expr // source syntax of the value (not address) [debug mode] +} + +func (a *address) load(fn *Function) Value { + load := emitLoad(fn, a.addr) + load.pos = a.pos + return load +} + +func (a *address) store(fn *Function, v Value) { + store := emitStore(fn, a.addr, v, a.pos) + if a.expr != nil { + // store.Val is v, converted for assignability. + emitDebugRef(fn, a.expr, store.Val, false) + } +} + +func (a *address) address(fn *Function) Value { + if a.expr != nil { + emitDebugRef(fn, a.expr, a.addr, true) + } + return a.addr +} + +func (a *address) typ() types.Type { + return deref(a.addr.Type()) +} + +// An element is an lvalue represented by m[k], the location of an +// element of a map or string. These locations are not addressable +// since pointers cannot be formed from them, but they do support +// load(), and in the case of maps, store(). +// +type element struct { + m, k Value // map or string + t types.Type // map element type or string byte type + pos token.Pos // source position of colon ({k:v}) or lbrack (m[k]=v) +} + +func (e *element) load(fn *Function) Value { + l := &Lookup{ + X: e.m, + Index: e.k, + } + l.setPos(e.pos) + l.setType(e.t) + return fn.emit(l) +} + +func (e *element) store(fn *Function, v Value) { + up := &MapUpdate{ + Map: e.m, + Key: e.k, + Value: emitConv(fn, v, e.t), + } + up.pos = e.pos + fn.emit(up) +} + +func (e *element) address(fn *Function) Value { + panic("map/string elements are not addressable") +} + +func (e *element) typ() types.Type { + return e.t +} + +// A blank is a dummy variable whose name is "_". +// It is not reified: loads are illegal and stores are ignored. +// +type blank struct{} + +func (bl blank) load(fn *Function) Value { + panic("blank.load is illegal") +} + +func (bl blank) store(fn *Function, v Value) { + // no-op +} + +func (bl blank) address(fn *Function) Value { + panic("blank var is not addressable") +} + +func (bl blank) typ() types.Type { + // This should be the type of the blank Ident; the typechecker + // doesn't provide this yet, but fortunately, we don't need it + // yet either. + panic("blank.typ is unimplemented") +} diff --git a/vendor/golang.org/x/tools/go/ssa/methods.go b/vendor/golang.org/x/tools/go/ssa/methods.go new file mode 100644 index 000000000..9cf383916 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/methods.go @@ -0,0 +1,239 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines utilities for population of method sets. + +import ( + "fmt" + "go/types" +) + +// MethodValue returns the Function implementing method sel, building +// wrapper methods on demand. It returns nil if sel denotes an +// abstract (interface) method. +// +// Precondition: sel.Kind() == MethodVal. +// +// Thread-safe. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) MethodValue(sel *types.Selection) *Function { + if sel.Kind() != types.MethodVal { + panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel)) + } + T := sel.Recv() + if isInterface(T) { + return nil // abstract method + } + if prog.mode&LogSource != 0 { + defer logStack("MethodValue %s %v", T, sel)() + } + + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + return prog.addMethod(prog.createMethodSet(T), sel) +} + +// LookupMethod returns the implementation of the method of type T +// identified by (pkg, name). It returns nil if the method exists but +// is abstract, and panics if T has no such method. +// +func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function { + sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name) + if sel == nil { + panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name))) + } + return prog.MethodValue(sel) +} + +// methodSet contains the (concrete) methods of a non-interface type. +type methodSet struct { + mapping map[string]*Function // populated lazily + complete bool // mapping contains all methods +} + +// Precondition: !isInterface(T). +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +func (prog *Program) createMethodSet(T types.Type) *methodSet { + mset, ok := prog.methodSets.At(T).(*methodSet) + if !ok { + mset = &methodSet{mapping: make(map[string]*Function)} + prog.methodSets.Set(T, mset) + } + return mset +} + +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function { + if sel.Kind() == types.MethodExpr { + panic(sel) + } + id := sel.Obj().Id() + fn := mset.mapping[id] + if fn == nil { + obj := sel.Obj().(*types.Func) + + needsPromotion := len(sel.Index()) > 1 + needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.Recv()) + if needsPromotion || needsIndirection { + fn = makeWrapper(prog, sel) + } else { + fn = prog.declaredFunc(obj) + } + if fn.Signature.Recv() == nil { + panic(fn) // missing receiver + } + mset.mapping[id] = fn + } + return fn +} + +// RuntimeTypes returns a new unordered slice containing all +// concrete types in the program for which a complete (non-empty) +// method set is required at run-time. +// +// Thread-safe. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) RuntimeTypes() []types.Type { + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + var res []types.Type + prog.methodSets.Iterate(func(T types.Type, v interface{}) { + if v.(*methodSet).complete { + res = append(res, T) + } + }) + return res +} + +// declaredFunc returns the concrete function/method denoted by obj. +// Panic ensues if there is none. +// +func (prog *Program) declaredFunc(obj *types.Func) *Function { + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Function) + } + panic("no concrete method: " + obj.String()) +} + +// needMethodsOf ensures that runtime type information (including the +// complete method set) is available for the specified type T and all +// its subcomponents. +// +// needMethodsOf must be called for at least every type that is an +// operand of some MakeInterface instruction, and for the type of +// every exported package member. +// +// Precondition: T is not a method signature (*Signature with Recv()!=nil). +// +// Thread-safe. (Called via emitConv from multiple builder goroutines.) +// +// TODO(adonovan): make this faster. It accounts for 20% of SSA build time. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) needMethodsOf(T types.Type) { + prog.methodsMu.Lock() + prog.needMethods(T, false) + prog.methodsMu.Unlock() +} + +// Precondition: T is not a method signature (*Signature with Recv()!=nil). +// Recursive case: skip => don't create methods for T. +// +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +// +func (prog *Program) needMethods(T types.Type, skip bool) { + // Each package maintains its own set of types it has visited. + if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok { + // needMethods(T) was previously called + if !prevSkip || skip { + return // already seen, with same or false 'skip' value + } + } + prog.runtimeTypes.Set(T, skip) + + tmset := prog.MethodSets.MethodSet(T) + + if !skip && !isInterface(T) && tmset.Len() > 0 { + // Create methods of T. + mset := prog.createMethodSet(T) + if !mset.complete { + mset.complete = true + n := tmset.Len() + for i := 0; i < n; i++ { + prog.addMethod(mset, tmset.At(i)) + } + } + } + + // Recursion over signatures of each method. + for i := 0; i < tmset.Len(); i++ { + sig := tmset.At(i).Type().(*types.Signature) + prog.needMethods(sig.Params(), false) + prog.needMethods(sig.Results(), false) + } + + switch t := T.(type) { + case *types.Basic: + // nop + + case *types.Interface: + // nop---handled by recursion over method set. + + case *types.Pointer: + prog.needMethods(t.Elem(), false) + + case *types.Slice: + prog.needMethods(t.Elem(), false) + + case *types.Chan: + prog.needMethods(t.Elem(), false) + + case *types.Map: + prog.needMethods(t.Key(), false) + prog.needMethods(t.Elem(), false) + + case *types.Signature: + if t.Recv() != nil { + panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv())) + } + prog.needMethods(t.Params(), false) + prog.needMethods(t.Results(), false) + + case *types.Named: + // A pointer-to-named type can be derived from a named + // type via reflection. It may have methods too. + prog.needMethods(types.NewPointer(T), false) + + // Consider 'type T struct{S}' where S has methods. + // Reflection provides no way to get from T to struct{S}, + // only to S, so the method set of struct{S} is unwanted, + // so set 'skip' flag during recursion. + prog.needMethods(t.Underlying(), true) + + case *types.Array: + prog.needMethods(t.Elem(), false) + + case *types.Struct: + for i, n := 0, t.NumFields(); i < n; i++ { + prog.needMethods(t.Field(i).Type(), false) + } + + case *types.Tuple: + for i, n := 0, t.Len(); i < n; i++ { + prog.needMethods(t.At(i).Type(), false) + } + + default: + panic(T) + } +} diff --git a/vendor/golang.org/x/tools/go/ssa/mode.go b/vendor/golang.org/x/tools/go/ssa/mode.go new file mode 100644 index 000000000..298f24b91 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/mode.go @@ -0,0 +1,105 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines the BuilderMode type and its command-line flag. + +import ( + "bytes" + "fmt" +) + +// BuilderMode is a bitmask of options for diagnostics and checking. +// +// *BuilderMode satisfies the flag.Value interface. Example: +// +// var mode = ssa.BuilderMode(0) +// func init() { flag.Var(&mode, "build", ssa.BuilderModeDoc) } +// +type BuilderMode uint + +const ( + PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout + PrintFunctions // Print function SSA code to stdout + LogSource // Log source locations as SSA builder progresses + SanityCheckFunctions // Perform sanity checking of function bodies + NaiveForm // Build naïve SSA form: don't replace local loads/stores with registers + BuildSerially // Build packages serially, not in parallel. + GlobalDebug // Enable debug info for all packages + BareInits // Build init functions without guards or calls to dependent inits +) + +const BuilderModeDoc = `Options controlling the SSA builder. +The value is a sequence of zero or more of these letters: +C perform sanity [C]hecking of the SSA form. +D include [D]ebug info for every function. +P print [P]ackage inventory. +F print [F]unction SSA code. +S log [S]ource locations as SSA builder progresses. +L build distinct packages seria[L]ly instead of in parallel. +N build [N]aive SSA form: don't replace local loads/stores with registers. +I build bare [I]nit functions: no init guards or calls to dependent inits. +` + +func (m BuilderMode) String() string { + var buf bytes.Buffer + if m&GlobalDebug != 0 { + buf.WriteByte('D') + } + if m&PrintPackages != 0 { + buf.WriteByte('P') + } + if m&PrintFunctions != 0 { + buf.WriteByte('F') + } + if m&LogSource != 0 { + buf.WriteByte('S') + } + if m&SanityCheckFunctions != 0 { + buf.WriteByte('C') + } + if m&NaiveForm != 0 { + buf.WriteByte('N') + } + if m&BuildSerially != 0 { + buf.WriteByte('L') + } + if m&BareInits != 0 { + buf.WriteByte('I') + } + return buf.String() +} + +// Set parses the flag characters in s and updates *m. +func (m *BuilderMode) Set(s string) error { + var mode BuilderMode + for _, c := range s { + switch c { + case 'D': + mode |= GlobalDebug + case 'P': + mode |= PrintPackages + case 'F': + mode |= PrintFunctions + case 'S': + mode |= LogSource | BuildSerially + case 'C': + mode |= SanityCheckFunctions + case 'N': + mode |= NaiveForm + case 'L': + mode |= BuildSerially + case 'I': + mode |= BareInits + default: + return fmt.Errorf("unknown BuilderMode option: %q", c) + } + } + *m = mode + return nil +} + +// Get returns m. +func (m BuilderMode) Get() interface{} { return m } diff --git a/vendor/golang.org/x/tools/go/ssa/print.go b/vendor/golang.org/x/tools/go/ssa/print.go new file mode 100644 index 000000000..3333ba41a --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/print.go @@ -0,0 +1,431 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file implements the String() methods for all Value and +// Instruction types. + +import ( + "bytes" + "fmt" + "go/types" + "io" + "reflect" + "sort" + + "golang.org/x/tools/go/types/typeutil" +) + +// relName returns the name of v relative to i. +// In most cases, this is identical to v.Name(), but references to +// Functions (including methods) and Globals use RelString and +// all types are displayed with relType, so that only cross-package +// references are package-qualified. +// +func relName(v Value, i Instruction) string { + var from *types.Package + if i != nil { + from = i.Parent().pkg() + } + switch v := v.(type) { + case Member: // *Function or *Global + return v.RelString(from) + case *Const: + return v.RelString(from) + } + return v.Name() +} + +func relType(t types.Type, from *types.Package) string { + return types.TypeString(t, types.RelativeTo(from)) +} + +func relString(m Member, from *types.Package) string { + // NB: not all globals have an Object (e.g. init$guard), + // so use Package().Object not Object.Package(). + if pkg := m.Package().Pkg; pkg != nil && pkg != from { + return fmt.Sprintf("%s.%s", pkg.Path(), m.Name()) + } + return m.Name() +} + +// Value.String() +// +// This method is provided only for debugging. +// It never appears in disassembly, which uses Value.Name(). + +func (v *Parameter) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("parameter %s : %s", v.Name(), relType(v.Type(), from)) +} + +func (v *FreeVar) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("freevar %s : %s", v.Name(), relType(v.Type(), from)) +} + +func (v *Builtin) String() string { + return fmt.Sprintf("builtin %s", v.Name()) +} + +// Instruction.String() + +func (v *Alloc) String() string { + op := "local" + if v.Heap { + op = "new" + } + from := v.Parent().pkg() + return fmt.Sprintf("%s %s (%s)", op, relType(deref(v.Type()), from), v.Comment) +} + +func (v *Phi) String() string { + var b bytes.Buffer + b.WriteString("phi [") + for i, edge := range v.Edges { + if i > 0 { + b.WriteString(", ") + } + // Be robust against malformed CFG. + if v.block == nil { + b.WriteString("??") + continue + } + block := -1 + if i < len(v.block.Preds) { + block = v.block.Preds[i].Index + } + fmt.Fprintf(&b, "%d: ", block) + edgeVal := "" // be robust + if edge != nil { + edgeVal = relName(edge, v) + } + b.WriteString(edgeVal) + } + b.WriteString("]") + if v.Comment != "" { + b.WriteString(" #") + b.WriteString(v.Comment) + } + return b.String() +} + +func printCall(v *CallCommon, prefix string, instr Instruction) string { + var b bytes.Buffer + b.WriteString(prefix) + if !v.IsInvoke() { + b.WriteString(relName(v.Value, instr)) + } else { + fmt.Fprintf(&b, "invoke %s.%s", relName(v.Value, instr), v.Method.Name()) + } + b.WriteString("(") + for i, arg := range v.Args { + if i > 0 { + b.WriteString(", ") + } + b.WriteString(relName(arg, instr)) + } + if v.Signature().Variadic() { + b.WriteString("...") + } + b.WriteString(")") + return b.String() +} + +func (c *CallCommon) String() string { + return printCall(c, "", nil) +} + +func (v *Call) String() string { + return printCall(&v.Call, "", v) +} + +func (v *BinOp) String() string { + return fmt.Sprintf("%s %s %s", relName(v.X, v), v.Op.String(), relName(v.Y, v)) +} + +func (v *UnOp) String() string { + return fmt.Sprintf("%s%s%s", v.Op, relName(v.X, v), commaOk(v.CommaOk)) +} + +func printConv(prefix string, v, x Value) string { + from := v.Parent().pkg() + return fmt.Sprintf("%s %s <- %s (%s)", + prefix, + relType(v.Type(), from), + relType(x.Type(), from), + relName(x, v.(Instruction))) +} + +func (v *ChangeType) String() string { return printConv("changetype", v, v.X) } +func (v *Convert) String() string { return printConv("convert", v, v.X) } +func (v *ChangeInterface) String() string { return printConv("change interface", v, v.X) } +func (v *MakeInterface) String() string { return printConv("make", v, v.X) } + +func (v *MakeClosure) String() string { + var b bytes.Buffer + fmt.Fprintf(&b, "make closure %s", relName(v.Fn, v)) + if v.Bindings != nil { + b.WriteString(" [") + for i, c := range v.Bindings { + if i > 0 { + b.WriteString(", ") + } + b.WriteString(relName(c, v)) + } + b.WriteString("]") + } + return b.String() +} + +func (v *MakeSlice) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("make %s %s %s", + relType(v.Type(), from), + relName(v.Len, v), + relName(v.Cap, v)) +} + +func (v *Slice) String() string { + var b bytes.Buffer + b.WriteString("slice ") + b.WriteString(relName(v.X, v)) + b.WriteString("[") + if v.Low != nil { + b.WriteString(relName(v.Low, v)) + } + b.WriteString(":") + if v.High != nil { + b.WriteString(relName(v.High, v)) + } + if v.Max != nil { + b.WriteString(":") + b.WriteString(relName(v.Max, v)) + } + b.WriteString("]") + return b.String() +} + +func (v *MakeMap) String() string { + res := "" + if v.Reserve != nil { + res = relName(v.Reserve, v) + } + from := v.Parent().pkg() + return fmt.Sprintf("make %s %s", relType(v.Type(), from), res) +} + +func (v *MakeChan) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("make %s %s", relType(v.Type(), from), relName(v.Size, v)) +} + +func (v *FieldAddr) String() string { + st := deref(v.X.Type()).Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + return fmt.Sprintf("&%s.%s [#%d]", relName(v.X, v), name, v.Field) +} + +func (v *Field) String() string { + st := v.X.Type().Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + return fmt.Sprintf("%s.%s [#%d]", relName(v.X, v), name, v.Field) +} + +func (v *IndexAddr) String() string { + return fmt.Sprintf("&%s[%s]", relName(v.X, v), relName(v.Index, v)) +} + +func (v *Index) String() string { + return fmt.Sprintf("%s[%s]", relName(v.X, v), relName(v.Index, v)) +} + +func (v *Lookup) String() string { + return fmt.Sprintf("%s[%s]%s", relName(v.X, v), relName(v.Index, v), commaOk(v.CommaOk)) +} + +func (v *Range) String() string { + return "range " + relName(v.X, v) +} + +func (v *Next) String() string { + return "next " + relName(v.Iter, v) +} + +func (v *TypeAssert) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("typeassert%s %s.(%s)", commaOk(v.CommaOk), relName(v.X, v), relType(v.AssertedType, from)) +} + +func (v *Extract) String() string { + return fmt.Sprintf("extract %s #%d", relName(v.Tuple, v), v.Index) +} + +func (s *Jump) String() string { + // Be robust against malformed CFG. + block := -1 + if s.block != nil && len(s.block.Succs) == 1 { + block = s.block.Succs[0].Index + } + return fmt.Sprintf("jump %d", block) +} + +func (s *If) String() string { + // Be robust against malformed CFG. + tblock, fblock := -1, -1 + if s.block != nil && len(s.block.Succs) == 2 { + tblock = s.block.Succs[0].Index + fblock = s.block.Succs[1].Index + } + return fmt.Sprintf("if %s goto %d else %d", relName(s.Cond, s), tblock, fblock) +} + +func (s *Go) String() string { + return printCall(&s.Call, "go ", s) +} + +func (s *Panic) String() string { + return "panic " + relName(s.X, s) +} + +func (s *Return) String() string { + var b bytes.Buffer + b.WriteString("return") + for i, r := range s.Results { + if i == 0 { + b.WriteString(" ") + } else { + b.WriteString(", ") + } + b.WriteString(relName(r, s)) + } + return b.String() +} + +func (*RunDefers) String() string { + return "rundefers" +} + +func (s *Send) String() string { + return fmt.Sprintf("send %s <- %s", relName(s.Chan, s), relName(s.X, s)) +} + +func (s *Defer) String() string { + return printCall(&s.Call, "defer ", s) +} + +func (s *Select) String() string { + var b bytes.Buffer + for i, st := range s.States { + if i > 0 { + b.WriteString(", ") + } + if st.Dir == types.RecvOnly { + b.WriteString("<-") + b.WriteString(relName(st.Chan, s)) + } else { + b.WriteString(relName(st.Chan, s)) + b.WriteString("<-") + b.WriteString(relName(st.Send, s)) + } + } + non := "" + if !s.Blocking { + non = "non" + } + return fmt.Sprintf("select %sblocking [%s]", non, b.String()) +} + +func (s *Store) String() string { + return fmt.Sprintf("*%s = %s", relName(s.Addr, s), relName(s.Val, s)) +} + +func (s *MapUpdate) String() string { + return fmt.Sprintf("%s[%s] = %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s)) +} + +func (s *DebugRef) String() string { + p := s.Parent().Prog.Fset.Position(s.Pos()) + var descr interface{} + if s.object != nil { + descr = s.object // e.g. "var x int" + } else { + descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr" + } + var addr string + if s.IsAddr { + addr = "address of " + } + return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name()) +} + +func (p *Package) String() string { + return "package " + p.Pkg.Path() +} + +var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer + +func (p *Package) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WritePackage(&buf, p) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WritePackage writes to buf a human-readable summary of p. +func WritePackage(buf *bytes.Buffer, p *Package) { + fmt.Fprintf(buf, "%s:\n", p) + + var names []string + maxname := 0 + for name := range p.Members { + if l := len(name); l > maxname { + maxname = l + } + names = append(names, name) + } + + from := p.Pkg + sort.Strings(names) + for _, name := range names { + switch mem := p.Members[name].(type) { + case *NamedConst: + fmt.Fprintf(buf, " const %-*s %s = %s\n", + maxname, name, mem.Name(), mem.Value.RelString(from)) + + case *Function: + fmt.Fprintf(buf, " func %-*s %s\n", + maxname, name, relType(mem.Type(), from)) + + case *Type: + fmt.Fprintf(buf, " type %-*s %s\n", + maxname, name, relType(mem.Type().Underlying(), from)) + for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) { + fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from))) + } + + case *Global: + fmt.Fprintf(buf, " var %-*s %s\n", + maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from)) + } + } + + fmt.Fprintf(buf, "\n") +} + +func commaOk(x bool) string { + if x { + return ",ok" + } + return "" +} diff --git a/vendor/golang.org/x/tools/go/ssa/sanity.go b/vendor/golang.org/x/tools/go/ssa/sanity.go new file mode 100644 index 000000000..16df7e4f0 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/sanity.go @@ -0,0 +1,539 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// An optional pass for sanity-checking invariants of the SSA representation. +// Currently it checks CFG invariants but little at the instruction level. + +import ( + "fmt" + "go/types" + "io" + "os" + "strings" +) + +type sanity struct { + reporter io.Writer + fn *Function + block *BasicBlock + instrs map[Instruction]struct{} + insane bool +} + +// sanityCheck performs integrity checking of the SSA representation +// of the function fn and returns true if it was valid. Diagnostics +// are written to reporter if non-nil, os.Stderr otherwise. Some +// diagnostics are only warnings and do not imply a negative result. +// +// Sanity-checking is intended to facilitate the debugging of code +// transformation passes. +// +func sanityCheck(fn *Function, reporter io.Writer) bool { + if reporter == nil { + reporter = os.Stderr + } + return (&sanity{reporter: reporter}).checkFunction(fn) +} + +// mustSanityCheck is like sanityCheck but panics instead of returning +// a negative result. +// +func mustSanityCheck(fn *Function, reporter io.Writer) { + if !sanityCheck(fn, reporter) { + fn.WriteTo(os.Stderr) + panic("SanityCheck failed") + } +} + +func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { + fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn) + if s.block != nil { + fmt.Fprintf(s.reporter, ", block %s", s.block) + } + io.WriteString(s.reporter, ": ") + fmt.Fprintf(s.reporter, format, args...) + io.WriteString(s.reporter, "\n") +} + +func (s *sanity) errorf(format string, args ...interface{}) { + s.insane = true + s.diagnostic("Error", format, args...) +} + +func (s *sanity) warnf(format string, args ...interface{}) { + s.diagnostic("Warning", format, args...) +} + +// findDuplicate returns an arbitrary basic block that appeared more +// than once in blocks, or nil if all were unique. +func findDuplicate(blocks []*BasicBlock) *BasicBlock { + if len(blocks) < 2 { + return nil + } + if blocks[0] == blocks[1] { + return blocks[0] + } + // Slow path: + m := make(map[*BasicBlock]bool) + for _, b := range blocks { + if m[b] { + return b + } + m[b] = true + } + return nil +} + +func (s *sanity) checkInstr(idx int, instr Instruction) { + switch instr := instr.(type) { + case *If, *Jump, *Return, *Panic: + s.errorf("control flow instruction not at end of block") + case *Phi: + if idx == 0 { + // It suffices to apply this check to just the first phi node. + if dup := findDuplicate(s.block.Preds); dup != nil { + s.errorf("phi node in block with duplicate predecessor %s", dup) + } + } else { + prev := s.block.Instrs[idx-1] + if _, ok := prev.(*Phi); !ok { + s.errorf("Phi instruction follows a non-Phi: %T", prev) + } + } + if ne, np := len(instr.Edges), len(s.block.Preds); ne != np { + s.errorf("phi node has %d edges but %d predecessors", ne, np) + + } else { + for i, e := range instr.Edges { + if e == nil { + s.errorf("phi node '%s' has no value for edge #%d from %s", instr.Comment, i, s.block.Preds[i]) + } + } + } + + case *Alloc: + if !instr.Heap { + found := false + for _, l := range s.fn.Locals { + if l == instr { + found = true + break + } + } + if !found { + s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) + } + } + + case *BinOp: + case *Call: + case *ChangeInterface: + case *ChangeType: + case *Convert: + if _, ok := instr.X.Type().Underlying().(*types.Slice); ok { + if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok { + if _, ok := ptr.Elem().(*types.Array); ok { + break + } + } + } + if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok { + if _, ok := instr.Type().Underlying().(*types.Basic); !ok { + s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type()) + } + } + + case *Defer: + case *Extract: + case *Field: + case *FieldAddr: + case *Go: + case *Index: + case *IndexAddr: + case *Lookup: + case *MakeChan: + case *MakeClosure: + numFree := len(instr.Fn.(*Function).FreeVars) + numBind := len(instr.Bindings) + if numFree != numBind { + s.errorf("MakeClosure has %d Bindings for function %s with %d free vars", + numBind, instr.Fn, numFree) + + } + if recv := instr.Type().(*types.Signature).Recv(); recv != nil { + s.errorf("MakeClosure's type includes receiver %s", recv.Type()) + } + + case *MakeInterface: + case *MakeMap: + case *MakeSlice: + case *MapUpdate: + case *Next: + case *Range: + case *RunDefers: + case *Select: + case *Send: + case *Slice: + case *Store: + case *TypeAssert: + case *UnOp: + case *DebugRef: + // TODO(adonovan): implement checks. + default: + panic(fmt.Sprintf("Unknown instruction type: %T", instr)) + } + + if call, ok := instr.(CallInstruction); ok { + if call.Common().Signature() == nil { + s.errorf("nil signature: %s", call) + } + } + + // Check that value-defining instructions have valid types + // and a valid referrer list. + if v, ok := instr.(Value); ok { + t := v.Type() + if t == nil { + s.errorf("no type: %s = %s", v.Name(), v) + } else if t == tRangeIter { + // not a proper type; ignore. + } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { + s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t) + } + s.checkReferrerList(v) + } + + // Untyped constants are legal as instruction Operands(), + // for example: + // _ = "foo"[0] + // or: + // if wordsize==64 {...} + + // All other non-Instruction Values can be found via their + // enclosing Function or Package. +} + +func (s *sanity) checkFinalInstr(instr Instruction) { + switch instr := instr.(type) { + case *If: + if nsuccs := len(s.block.Succs); nsuccs != 2 { + s.errorf("If-terminated block has %d successors; expected 2", nsuccs) + return + } + if s.block.Succs[0] == s.block.Succs[1] { + s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0]) + return + } + + case *Jump: + if nsuccs := len(s.block.Succs); nsuccs != 1 { + s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs) + return + } + + case *Return: + if nsuccs := len(s.block.Succs); nsuccs != 0 { + s.errorf("Return-terminated block has %d successors; expected none", nsuccs) + return + } + if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na { + s.errorf("%d-ary return in %d-ary function", na, nf) + } + + case *Panic: + if nsuccs := len(s.block.Succs); nsuccs != 0 { + s.errorf("Panic-terminated block has %d successors; expected none", nsuccs) + return + } + + default: + s.errorf("non-control flow instruction at end of block") + } +} + +func (s *sanity) checkBlock(b *BasicBlock, index int) { + s.block = b + + if b.Index != index { + s.errorf("block has incorrect Index %d", b.Index) + } + if b.parent != s.fn { + s.errorf("block has incorrect parent %s", b.parent) + } + + // Check all blocks are reachable. + // (The entry block is always implicitly reachable, + // as is the Recover block, if any.) + if (index > 0 && b != b.parent.Recover) && len(b.Preds) == 0 { + s.warnf("unreachable block") + if b.Instrs == nil { + // Since this block is about to be pruned, + // tolerating transient problems in it + // simplifies other optimizations. + return + } + } + + // Check predecessor and successor relations are dual, + // and that all blocks in CFG belong to same function. + for _, a := range b.Preds { + found := false + for _, bb := range a.Succs { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) + } + if a.parent != s.fn { + s.errorf("predecessor %s belongs to different function %s", a, a.parent) + } + } + for _, c := range b.Succs { + found := false + for _, bb := range c.Preds { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) + } + if c.parent != s.fn { + s.errorf("successor %s belongs to different function %s", c, c.parent) + } + } + + // Check each instruction is sane. + n := len(b.Instrs) + if n == 0 { + s.errorf("basic block contains no instructions") + } + var rands [10]*Value // reuse storage + for j, instr := range b.Instrs { + if instr == nil { + s.errorf("nil instruction at index %d", j) + continue + } + if b2 := instr.Block(); b2 == nil { + s.errorf("nil Block() for instruction at index %d", j) + continue + } else if b2 != b { + s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j) + continue + } + if j < n-1 { + s.checkInstr(j, instr) + } else { + s.checkFinalInstr(instr) + } + + // Check Instruction.Operands. + operands: + for i, op := range instr.Operands(rands[:0]) { + if op == nil { + s.errorf("nil operand pointer %d of %s", i, instr) + continue + } + val := *op + if val == nil { + continue // a nil operand is ok + } + + // Check that "untyped" types only appear on constant operands. + if _, ok := (*op).(*Const); !ok { + if basic, ok := (*op).Type().(*types.Basic); ok { + if basic.Info()&types.IsUntyped != 0 { + s.errorf("operand #%d of %s is untyped: %s", i, instr, basic) + } + } + } + + // Check that Operands that are also Instructions belong to same function. + // TODO(adonovan): also check their block dominates block b. + if val, ok := val.(Instruction); ok { + if val.Block() == nil { + s.errorf("operand %d of %s is an instruction (%s) that belongs to no block", i, instr, val) + } else if val.Parent() != s.fn { + s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent()) + } + } + + // Check that each function-local operand of + // instr refers back to instr. (NB: quadratic) + switch val := val.(type) { + case *Const, *Global, *Builtin: + continue // not local + case *Function: + if val.parent == nil { + continue // only anon functions are local + } + } + + // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined. + + if refs := val.Referrers(); refs != nil { + for _, ref := range *refs { + if ref == instr { + continue operands + } + } + s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val) + } else { + s.errorf("operand %d of %s (%s) has no referrers", i, instr, val) + } + } + } +} + +func (s *sanity) checkReferrerList(v Value) { + refs := v.Referrers() + if refs == nil { + s.errorf("%s has missing referrer list", v.Name()) + return + } + for i, ref := range *refs { + if _, ok := s.instrs[ref]; !ok { + s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref) + } + } +} + +func (s *sanity) checkFunction(fn *Function) bool { + // TODO(adonovan): check Function invariants: + // - check params match signature + // - check transient fields are nil + // - warn if any fn.Locals do not appear among block instructions. + s.fn = fn + if fn.Prog == nil { + s.errorf("nil Prog") + } + + _ = fn.String() // must not crash + _ = fn.RelString(fn.pkg()) // must not crash + + // All functions have a package, except delegates (which are + // shared across packages, or duplicated as weak symbols in a + // separate-compilation model), and error.Error. + if fn.Pkg == nil { + if strings.HasPrefix(fn.Synthetic, "wrapper ") || + strings.HasPrefix(fn.Synthetic, "bound ") || + strings.HasPrefix(fn.Synthetic, "thunk ") || + strings.HasSuffix(fn.name, "Error") { + // ok + } else { + s.errorf("nil Pkg") + } + } + if src, syn := fn.Synthetic == "", fn.Syntax() != nil; src != syn { + s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) + } + for i, l := range fn.Locals { + if l.Parent() != fn { + s.errorf("Local %s at index %d has wrong parent", l.Name(), i) + } + if l.Heap { + s.errorf("Local %s at index %d has Heap flag set", l.Name(), i) + } + } + // Build the set of valid referrers. + s.instrs = make(map[Instruction]struct{}) + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + s.instrs[instr] = struct{}{} + } + } + for i, p := range fn.Params { + if p.Parent() != fn { + s.errorf("Param %s at index %d has wrong parent", p.Name(), i) + } + // Check common suffix of Signature and Params match type. + if sig := fn.Signature; sig != nil { + j := i - len(fn.Params) + sig.Params().Len() // index within sig.Params + if j < 0 { + continue + } + if !types.Identical(p.Type(), sig.Params().At(j).Type()) { + s.errorf("Param %s at index %d has wrong type (%s, versus %s in Signature)", p.Name(), i, p.Type(), sig.Params().At(j).Type()) + + } + } + s.checkReferrerList(p) + } + for i, fv := range fn.FreeVars { + if fv.Parent() != fn { + s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i) + } + s.checkReferrerList(fv) + } + + if fn.Blocks != nil && len(fn.Blocks) == 0 { + // Function _had_ blocks (so it's not external) but + // they were "optimized" away, even the entry block. + s.errorf("Blocks slice is non-nil but empty") + } + for i, b := range fn.Blocks { + if b == nil { + s.warnf("nil *BasicBlock at f.Blocks[%d]", i) + continue + } + s.checkBlock(b, i) + } + if fn.Recover != nil && fn.Blocks[fn.Recover.Index] != fn.Recover { + s.errorf("Recover block is not in Blocks slice") + } + + s.block = nil + for i, anon := range fn.AnonFuncs { + if anon.Parent() != fn { + s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent()) + } + } + s.fn = nil + return !s.insane +} + +// sanityCheckPackage checks invariants of packages upon creation. +// It does not require that the package is built. +// Unlike sanityCheck (for functions), it just panics at the first error. +func sanityCheckPackage(pkg *Package) { + if pkg.Pkg == nil { + panic(fmt.Sprintf("Package %s has no Object", pkg)) + } + _ = pkg.String() // must not crash + + for name, mem := range pkg.Members { + if name != mem.Name() { + panic(fmt.Sprintf("%s: %T.Name() = %s, want %s", + pkg.Pkg.Path(), mem, mem.Name(), name)) + } + obj := mem.Object() + if obj == nil { + // This check is sound because fields + // {Global,Function}.object have type + // types.Object. (If they were declared as + // *types.{Var,Func}, we'd have a non-empty + // interface containing a nil pointer.) + + continue // not all members have typechecker objects + } + if obj.Name() != name { + if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") { + // Ok. The name of a declared init function varies between + // its types.Func ("init") and its ssa.Function ("init#%d"). + } else { + panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s", + pkg.Pkg.Path(), mem, obj.Name(), name)) + } + } + if obj.Pos() != mem.Pos() { + panic(fmt.Sprintf("%s Pos=%d obj.Pos=%d", mem, mem.Pos(), obj.Pos())) + } + } +} diff --git a/vendor/golang.org/x/tools/go/ssa/source.go b/vendor/golang.org/x/tools/go/ssa/source.go new file mode 100644 index 000000000..8d9cca170 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/source.go @@ -0,0 +1,293 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines utilities for working with source positions +// or source-level named entities ("objects"). + +// TODO(adonovan): test that {Value,Instruction}.Pos() positions match +// the originating syntax, as specified. + +import ( + "go/ast" + "go/token" + "go/types" +) + +// EnclosingFunction returns the function that contains the syntax +// node denoted by path. +// +// Syntax associated with package-level variable specifications is +// enclosed by the package's init() function. +// +// Returns nil if not found; reasons might include: +// - the node is not enclosed by any function. +// - the node is within an anonymous function (FuncLit) and +// its SSA function has not been created yet +// (pkg.Build() has not yet been called). +// +func EnclosingFunction(pkg *Package, path []ast.Node) *Function { + // Start with package-level function... + fn := findEnclosingPackageLevelFunction(pkg, path) + if fn == nil { + return nil // not in any function + } + + // ...then walk down the nested anonymous functions. + n := len(path) +outer: + for i := range path { + if lit, ok := path[n-1-i].(*ast.FuncLit); ok { + for _, anon := range fn.AnonFuncs { + if anon.Pos() == lit.Type.Func { + fn = anon + continue outer + } + } + // SSA function not found: + // - package not yet built, or maybe + // - builder skipped FuncLit in dead block + // (in principle; but currently the Builder + // generates even dead FuncLits). + return nil + } + } + return fn +} + +// HasEnclosingFunction returns true if the AST node denoted by path +// is contained within the declaration of some function or +// package-level variable. +// +// Unlike EnclosingFunction, the behaviour of this function does not +// depend on whether SSA code for pkg has been built, so it can be +// used to quickly reject check inputs that will cause +// EnclosingFunction to fail, prior to SSA building. +// +func HasEnclosingFunction(pkg *Package, path []ast.Node) bool { + return findEnclosingPackageLevelFunction(pkg, path) != nil +} + +// findEnclosingPackageLevelFunction returns the Function +// corresponding to the package-level function enclosing path. +// +func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function { + if n := len(path); n >= 2 { // [... {Gen,Func}Decl File] + switch decl := path[n-2].(type) { + case *ast.GenDecl: + if decl.Tok == token.VAR && n >= 3 { + // Package-level 'var' initializer. + return pkg.init + } + + case *ast.FuncDecl: + if decl.Recv == nil && decl.Name.Name == "init" { + // Explicit init() function. + for _, b := range pkg.init.Blocks { + for _, instr := range b.Instrs { + if instr, ok := instr.(*Call); ok { + if callee, ok := instr.Call.Value.(*Function); ok && callee.Pkg == pkg && callee.Pos() == decl.Name.NamePos { + return callee + } + } + } + } + // Hack: return non-nil when SSA is not yet + // built so that HasEnclosingFunction works. + return pkg.init + } + // Declared function/method. + return findNamedFunc(pkg, decl.Name.NamePos) + } + } + return nil // not in any function +} + +// findNamedFunc returns the named function whose FuncDecl.Ident is at +// position pos. +// +func findNamedFunc(pkg *Package, pos token.Pos) *Function { + // Look at all package members and method sets of named types. + // Not very efficient. + for _, mem := range pkg.Members { + switch mem := mem.(type) { + case *Function: + if mem.Pos() == pos { + return mem + } + case *Type: + mset := pkg.Prog.MethodSets.MethodSet(types.NewPointer(mem.Type())) + for i, n := 0, mset.Len(); i < n; i++ { + // Don't call Program.Method: avoid creating wrappers. + obj := mset.At(i).Obj().(*types.Func) + if obj.Pos() == pos { + return pkg.values[obj].(*Function) + } + } + } + } + return nil +} + +// ValueForExpr returns the SSA Value that corresponds to non-constant +// expression e. +// +// It returns nil if no value was found, e.g. +// - the expression is not lexically contained within f; +// - f was not built with debug information; or +// - e is a constant expression. (For efficiency, no debug +// information is stored for constants. Use +// go/types.Info.Types[e].Value instead.) +// - e is a reference to nil or a built-in function. +// - the value was optimised away. +// +// If e is an addressable expression used in an lvalue context, +// value is the address denoted by e, and isAddr is true. +// +// The types of e (or &e, if isAddr) and the result are equal +// (modulo "untyped" bools resulting from comparisons). +// +// (Tip: to find the ssa.Value given a source position, use +// astutil.PathEnclosingInterval to locate the ast.Node, then +// EnclosingFunction to locate the Function, then ValueForExpr to find +// the ssa.Value.) +// +func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { + if f.debugInfo() { // (opt) + e = unparen(e) + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + if ref, ok := instr.(*DebugRef); ok { + if ref.Expr == e { + return ref.X, ref.IsAddr + } + } + } + } + } + return +} + +// --- Lookup functions for source-level named entities (types.Objects) --- + +// Package returns the SSA Package corresponding to the specified +// type-checker package object. +// It returns nil if no such SSA package has been created. +// +func (prog *Program) Package(obj *types.Package) *Package { + return prog.packages[obj] +} + +// packageLevelValue returns the package-level value corresponding to +// the specified named object, which may be a package-level const +// (*Const), var (*Global) or func (*Function) of some package in +// prog. It returns nil if the object is not found. +// +func (prog *Program) packageLevelValue(obj types.Object) Value { + if pkg, ok := prog.packages[obj.Pkg()]; ok { + return pkg.values[obj] + } + return nil +} + +// FuncValue returns the concrete Function denoted by the source-level +// named function obj, or nil if obj denotes an interface method. +// +// TODO(adonovan): check the invariant that obj.Type() matches the +// result's Signature, both in the params/results and in the receiver. +// +func (prog *Program) FuncValue(obj *types.Func) *Function { + fn, _ := prog.packageLevelValue(obj).(*Function) + return fn +} + +// ConstValue returns the SSA Value denoted by the source-level named +// constant obj. +// +func (prog *Program) ConstValue(obj *types.Const) *Const { + // TODO(adonovan): opt: share (don't reallocate) + // Consts for const objects and constant ast.Exprs. + + // Universal constant? {true,false,nil} + if obj.Parent() == types.Universe { + return NewConst(obj.Val(), obj.Type()) + } + // Package-level named constant? + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Const) + } + return NewConst(obj.Val(), obj.Type()) +} + +// VarValue returns the SSA Value that corresponds to a specific +// identifier denoting the source-level named variable obj. +// +// VarValue returns nil if a local variable was not found, perhaps +// because its package was not built, the debug information was not +// requested during SSA construction, or the value was optimized away. +// +// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval), +// and that ident must resolve to obj. +// +// pkg is the package enclosing the reference. (A reference to a var +// always occurs within a function, so we need to know where to find it.) +// +// If the identifier is a field selector and its base expression is +// non-addressable, then VarValue returns the value of that field. +// For example: +// func f() struct {x int} +// f().x // VarValue(x) returns a *Field instruction of type int +// +// All other identifiers denote addressable locations (variables). +// For them, VarValue may return either the variable's address or its +// value, even when the expression is evaluated only for its value; the +// situation is reported by isAddr, the second component of the result. +// +// If !isAddr, the returned value is the one associated with the +// specific identifier. For example, +// var x int // VarValue(x) returns Const 0 here +// x = 1 // VarValue(x) returns Const 1 here +// +// It is not specified whether the value or the address is returned in +// any particular case, as it may depend upon optimizations performed +// during SSA code generation, such as registerization, constant +// folding, avoidance of materialization of subexpressions, etc. +// +func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) { + // All references to a var are local to some function, possibly init. + fn := EnclosingFunction(pkg, ref) + if fn == nil { + return // e.g. def of struct field; SSA not built? + } + + id := ref[0].(*ast.Ident) + + // Defining ident of a parameter? + if id.Pos() == obj.Pos() { + for _, param := range fn.Params { + if param.Object() == obj { + return param, false + } + } + } + + // Other ident? + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + if dr, ok := instr.(*DebugRef); ok { + if dr.Pos() == id.Pos() { + return dr.X, dr.IsAddr + } + } + } + } + + // Defining ident of package-level var? + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Global), true + } + + return // e.g. debug info not requested, or var optimized away +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssa.go b/vendor/golang.org/x/tools/go/ssa/ssa.go new file mode 100644 index 000000000..d3faf4438 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssa.go @@ -0,0 +1,1696 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This package defines a high-level intermediate representation for +// Go programs using static single-assignment (SSA) form. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "sync" + + "golang.org/x/tools/go/types/typeutil" +) + +// A Program is a partial or complete Go program converted to SSA form. +type Program struct { + Fset *token.FileSet // position information for the files of this Program + imported map[string]*Package // all importable Packages, keyed by import path + packages map[*types.Package]*Package // all loaded Packages, keyed by object + mode BuilderMode // set of mode bits for SSA construction + MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets + + methodsMu sync.Mutex // guards the following maps: + methodSets typeutil.Map // maps type to its concrete methodSet + runtimeTypes typeutil.Map // types for which rtypes are needed + canon typeutil.Map // type canonicalization map + bounds map[*types.Func]*Function // bounds for curried x.Method closures + thunks map[selectionKey]*Function // thunks for T.Method expressions +} + +// A Package is a single analyzed Go package containing Members for +// all package-level functions, variables, constants and types it +// declares. These may be accessed directly via Members, or via the +// type-specific accessor methods Func, Type, Var and Const. +// +// Members also contains entries for "init" (the synthetic package +// initializer) and "init#%d", the nth declared init function, +// and unspecified other things too. +// +type Package struct { + Prog *Program // the owning program + Pkg *types.Package // the corresponding go/types.Package + Members map[string]Member // all package members keyed by name (incl. init and init#%d) + values map[types.Object]Value // package members (incl. types and methods), keyed by object + init *Function // Func("init"); the package's init function + debug bool // include full debug info in this package + + // The following fields are set transiently, then cleared + // after building. + buildOnce sync.Once // ensures package building occurs once + ninit int32 // number of init functions + info *types.Info // package type information + files []*ast.File // package ASTs +} + +// A Member is a member of a Go package, implemented by *NamedConst, +// *Global, *Function, or *Type; they are created by package-level +// const, var, func and type declarations respectively. +// +type Member interface { + Name() string // declared name of the package member + String() string // package-qualified name of the package member + RelString(*types.Package) string // like String, but relative refs are unqualified + Object() types.Object // typechecker's object for this member, if any + Pos() token.Pos // position of member's declaration, if known + Type() types.Type // type of the package member + Token() token.Token // token.{VAR,FUNC,CONST,TYPE} + Package() *Package // the containing package +} + +// A Type is a Member of a Package representing a package-level named type. +type Type struct { + object *types.TypeName + pkg *Package +} + +// A NamedConst is a Member of a Package representing a package-level +// named constant. +// +// Pos() returns the position of the declaring ast.ValueSpec.Names[*] +// identifier. +// +// NB: a NamedConst is not a Value; it contains a constant Value, which +// it augments with the name and position of its 'const' declaration. +// +type NamedConst struct { + object *types.Const + Value *Const + pkg *Package +} + +// A Value is an SSA value that can be referenced by an instruction. +type Value interface { + // Name returns the name of this value, and determines how + // this Value appears when used as an operand of an + // Instruction. + // + // This is the same as the source name for Parameters, + // Builtins, Functions, FreeVars, Globals. + // For constants, it is a representation of the constant's value + // and type. For all other Values this is the name of the + // virtual register defined by the instruction. + // + // The name of an SSA Value is not semantically significant, + // and may not even be unique within a function. + Name() string + + // If this value is an Instruction, String returns its + // disassembled form; otherwise it returns unspecified + // human-readable information about the Value, such as its + // kind, name and type. + String() string + + // Type returns the type of this value. Many instructions + // (e.g. IndexAddr) change their behaviour depending on the + // types of their operands. + Type() types.Type + + // Parent returns the function to which this Value belongs. + // It returns nil for named Functions, Builtin, Const and Global. + Parent() *Function + + // Referrers returns the list of instructions that have this + // value as one of their operands; it may contain duplicates + // if an instruction has a repeated operand. + // + // Referrers actually returns a pointer through which the + // caller may perform mutations to the object's state. + // + // Referrers is currently only defined if Parent()!=nil, + // i.e. for the function-local values FreeVar, Parameter, + // Functions (iff anonymous) and all value-defining instructions. + // It returns nil for named Functions, Builtin, Const and Global. + // + // Instruction.Operands contains the inverse of this relation. + Referrers() *[]Instruction + + // Pos returns the location of the AST token most closely + // associated with the operation that gave rise to this value, + // or token.NoPos if it was not explicit in the source. + // + // For each ast.Node type, a particular token is designated as + // the closest location for the expression, e.g. the Lparen + // for an *ast.CallExpr. This permits a compact but + // approximate mapping from Values to source positions for use + // in diagnostic messages, for example. + // + // (Do not use this position to determine which Value + // corresponds to an ast.Expr; use Function.ValueForExpr + // instead. NB: it requires that the function was built with + // debug information.) + Pos() token.Pos +} + +// An Instruction is an SSA instruction that computes a new Value or +// has some effect. +// +// An Instruction that defines a value (e.g. BinOp) also implements +// the Value interface; an Instruction that only has an effect (e.g. Store) +// does not. +// +type Instruction interface { + // String returns the disassembled form of this value. + // + // Examples of Instructions that are Values: + // "x + y" (BinOp) + // "len([])" (Call) + // Note that the name of the Value is not printed. + // + // Examples of Instructions that are not Values: + // "return x" (Return) + // "*y = x" (Store) + // + // (The separation Value.Name() from Value.String() is useful + // for some analyses which distinguish the operation from the + // value it defines, e.g., 'y = local int' is both an allocation + // of memory 'local int' and a definition of a pointer y.) + String() string + + // Parent returns the function to which this instruction + // belongs. + Parent() *Function + + // Block returns the basic block to which this instruction + // belongs. + Block() *BasicBlock + + // setBlock sets the basic block to which this instruction belongs. + setBlock(*BasicBlock) + + // Operands returns the operands of this instruction: the + // set of Values it references. + // + // Specifically, it appends their addresses to rands, a + // user-provided slice, and returns the resulting slice, + // permitting avoidance of memory allocation. + // + // The operands are appended in undefined order, but the order + // is consistent for a given Instruction; the addresses are + // always non-nil but may point to a nil Value. Clients may + // store through the pointers, e.g. to effect a value + // renaming. + // + // Value.Referrers is a subset of the inverse of this + // relation. (Referrers are not tracked for all types of + // Values.) + Operands(rands []*Value) []*Value + + // Pos returns the location of the AST token most closely + // associated with the operation that gave rise to this + // instruction, or token.NoPos if it was not explicit in the + // source. + // + // For each ast.Node type, a particular token is designated as + // the closest location for the expression, e.g. the Go token + // for an *ast.GoStmt. This permits a compact but approximate + // mapping from Instructions to source positions for use in + // diagnostic messages, for example. + // + // (Do not use this position to determine which Instruction + // corresponds to an ast.Expr; see the notes for Value.Pos. + // This position may be used to determine which non-Value + // Instruction corresponds to some ast.Stmts, but not all: If + // and Jump instructions have no Pos(), for example.) + Pos() token.Pos +} + +// A Node is a node in the SSA value graph. Every concrete type that +// implements Node is also either a Value, an Instruction, or both. +// +// Node contains the methods common to Value and Instruction, plus the +// Operands and Referrers methods generalized to return nil for +// non-Instructions and non-Values, respectively. +// +// Node is provided to simplify SSA graph algorithms. Clients should +// use the more specific and informative Value or Instruction +// interfaces where appropriate. +// +type Node interface { + // Common methods: + String() string + Pos() token.Pos + Parent() *Function + + // Partial methods: + Operands(rands []*Value) []*Value // nil for non-Instructions + Referrers() *[]Instruction // nil for non-Values +} + +// Function represents the parameters, results, and code of a function +// or method. +// +// If Blocks is nil, this indicates an external function for which no +// Go source code is available. In this case, FreeVars and Locals +// are nil too. Clients performing whole-program analysis must +// handle external functions specially. +// +// Blocks contains the function's control-flow graph (CFG). +// Blocks[0] is the function entry point; block order is not otherwise +// semantically significant, though it may affect the readability of +// the disassembly. +// To iterate over the blocks in dominance order, use DomPreorder(). +// +// Recover is an optional second entry point to which control resumes +// after a recovered panic. The Recover block may contain only a return +// statement, preceded by a load of the function's named return +// parameters, if any. +// +// A nested function (Parent()!=nil) that refers to one or more +// lexically enclosing local variables ("free variables") has FreeVars. +// Such functions cannot be called directly but require a +// value created by MakeClosure which, via its Bindings, supplies +// values for these parameters. +// +// If the function is a method (Signature.Recv() != nil) then the first +// element of Params is the receiver parameter. +// +// A Go package may declare many functions called "init". +// For each one, Object().Name() returns "init" but Name() returns +// "init#1", etc, in declaration order. +// +// Pos() returns the declaring ast.FuncLit.Type.Func or the position +// of the ast.FuncDecl.Name, if the function was explicit in the +// source. Synthetic wrappers, for which Synthetic != "", may share +// the same position as the function they wrap. +// Syntax.Pos() always returns the position of the declaring "func" token. +// +// Type() returns the function's Signature. +// +type Function struct { + name string + object types.Object // a declared *types.Func or one of its wrappers + method *types.Selection // info about provenance of synthetic methods + Signature *types.Signature + pos token.Pos + + Synthetic string // provenance of synthetic function; "" for true source functions + syntax ast.Node // *ast.Func{Decl,Lit}; replaced with simple ast.Node after build, unless debug mode + parent *Function // enclosing function if anon; nil if global + Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) + Prog *Program // enclosing program + Params []*Parameter // function parameters; for methods, includes receiver + FreeVars []*FreeVar // free variables whose values must be supplied by closure + Locals []*Alloc // local variables of this function + Blocks []*BasicBlock // basic blocks of the function; nil => external + Recover *BasicBlock // optional; control transfers here after recovered panic + AnonFuncs []*Function // anonymous functions directly beneath this one + referrers []Instruction // referring instructions (iff Parent() != nil) + + // The following fields are set transiently during building, + // then cleared. + currentBlock *BasicBlock // where to emit code + objects map[types.Object]Value // addresses of local variables + namedResults []*Alloc // tuple of named results + targets *targets // linked stack of branch targets + lblocks map[*ast.Object]*lblock // labelled blocks +} + +// BasicBlock represents an SSA basic block. +// +// The final element of Instrs is always an explicit transfer of +// control (If, Jump, Return, or Panic). +// +// A block may contain no Instructions only if it is unreachable, +// i.e., Preds is nil. Empty blocks are typically pruned. +// +// BasicBlocks and their Preds/Succs relation form a (possibly cyclic) +// graph independent of the SSA Value graph: the control-flow graph or +// CFG. It is illegal for multiple edges to exist between the same +// pair of blocks. +// +// Each BasicBlock is also a node in the dominator tree of the CFG. +// The tree may be navigated using Idom()/Dominees() and queried using +// Dominates(). +// +// The order of Preds and Succs is significant (to Phi and If +// instructions, respectively). +// +type BasicBlock struct { + Index int // index of this block within Parent().Blocks + Comment string // optional label; no semantic significance + parent *Function // parent function + Instrs []Instruction // instructions in order + Preds, Succs []*BasicBlock // predecessors and successors + succs2 [2]*BasicBlock // initial space for Succs + dom domInfo // dominator tree info + gaps int // number of nil Instrs (transient) + rundefers int // number of rundefers (transient) +} + +// Pure values ---------------------------------------- + +// A FreeVar represents a free variable of the function to which it +// belongs. +// +// FreeVars are used to implement anonymous functions, whose free +// variables are lexically captured in a closure formed by +// MakeClosure. The value of such a free var is an Alloc or another +// FreeVar and is considered a potentially escaping heap address, with +// pointer type. +// +// FreeVars are also used to implement bound method closures. Such a +// free var represents the receiver value and may be of any type that +// has concrete methods. +// +// Pos() returns the position of the value that was captured, which +// belongs to an enclosing function. +// +type FreeVar struct { + name string + typ types.Type + pos token.Pos + parent *Function + referrers []Instruction + + // Transiently needed during building. + outer Value // the Value captured from the enclosing context. +} + +// A Parameter represents an input parameter of a function. +// +type Parameter struct { + name string + object types.Object // a *types.Var; nil for non-source locals + typ types.Type + pos token.Pos + parent *Function + referrers []Instruction +} + +// A Const represents the value of a constant expression. +// +// The underlying type of a constant may be any boolean, numeric, or +// string type. In addition, a Const may represent the nil value of +// any reference type---interface, map, channel, pointer, slice, or +// function---but not "untyped nil". +// +// All source-level constant expressions are represented by a Const +// of the same type and value. +// +// Value holds the value of the constant, independent of its Type(), +// using go/constant representation, or nil for a typed nil value. +// +// Pos() returns token.NoPos. +// +// Example printed form: +// 42:int +// "hello":untyped string +// 3+4i:MyComplex +// +type Const struct { + typ types.Type + Value constant.Value +} + +// A Global is a named Value holding the address of a package-level +// variable. +// +// Pos() returns the position of the ast.ValueSpec.Names[*] +// identifier. +// +type Global struct { + name string + object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard + typ types.Type + pos token.Pos + + Pkg *Package +} + +// A Builtin represents a specific use of a built-in function, e.g. len. +// +// Builtins are immutable values. Builtins do not have addresses. +// Builtins can only appear in CallCommon.Value. +// +// Name() indicates the function: one of the built-in functions from the +// Go spec (excluding "make" and "new") or one of these ssa-defined +// intrinsics: +// +// // wrapnilchk returns ptr if non-nil, panics otherwise. +// // (For use in indirection wrappers.) +// func ssa:wrapnilchk(ptr *T, recvType, methodName string) *T +// +// Object() returns a *types.Builtin for built-ins defined by the spec, +// nil for others. +// +// Type() returns a *types.Signature representing the effective +// signature of the built-in for this call. +// +type Builtin struct { + name string + sig *types.Signature +} + +// Value-defining instructions ---------------------------------------- + +// The Alloc instruction reserves space for a variable of the given type, +// zero-initializes it, and yields its address. +// +// Alloc values are always addresses, and have pointer types, so the +// type of the allocated variable is actually +// Type().Underlying().(*types.Pointer).Elem(). +// +// If Heap is false, Alloc allocates space in the function's +// activation record (frame); we refer to an Alloc(Heap=false) as a +// "local" alloc. Each local Alloc returns the same address each time +// it is executed within the same activation; the space is +// re-initialized to zero. +// +// If Heap is true, Alloc allocates space in the heap; we +// refer to an Alloc(Heap=true) as a "new" alloc. Each new Alloc +// returns a different address each time it is executed. +// +// When Alloc is applied to a channel, map or slice type, it returns +// the address of an uninitialized (nil) reference of that kind; store +// the result of MakeSlice, MakeMap or MakeChan in that location to +// instantiate these types. +// +// Pos() returns the ast.CompositeLit.Lbrace for a composite literal, +// or the ast.CallExpr.Rparen for a call to new() or for a call that +// allocates a varargs slice. +// +// Example printed form: +// t0 = local int +// t1 = new int +// +type Alloc struct { + register + Comment string + Heap bool + index int // dense numbering; for lifting +} + +// The Phi instruction represents an SSA φ-node, which combines values +// that differ across incoming control-flow edges and yields a new +// value. Within a block, all φ-nodes must appear before all non-φ +// nodes. +// +// Pos() returns the position of the && or || for short-circuit +// control-flow joins, or that of the *Alloc for φ-nodes inserted +// during SSA renaming. +// +// Example printed form: +// t2 = phi [0: t0, 1: t1] +// +type Phi struct { + register + Comment string // a hint as to its purpose + Edges []Value // Edges[i] is value for Block().Preds[i] +} + +// The Call instruction represents a function or method call. +// +// The Call instruction yields the function result if there is exactly +// one. Otherwise it returns a tuple, the components of which are +// accessed via Extract. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.CallExpr.Lparen, if explicit in the source. +// +// Example printed form: +// t2 = println(t0, t1) +// t4 = t3() +// t7 = invoke t5.Println(...t6) +// +type Call struct { + register + Call CallCommon +} + +// The BinOp instruction yields the result of binary operation X Op Y. +// +// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source. +// +// Example printed form: +// t1 = t0 + 1:int +// +type BinOp struct { + register + // One of: + // ADD SUB MUL QUO REM + - * / % + // AND OR XOR SHL SHR AND_NOT & | ^ << >> &^ + // EQL NEQ LSS LEQ GTR GEQ == != < <= < >= + Op token.Token + X, Y Value +} + +// The UnOp instruction yields the result of Op X. +// ARROW is channel receive. +// MUL is pointer indirection (load). +// XOR is bitwise complement. +// SUB is negation. +// NOT is logical negation. +// +// If CommaOk and Op=ARROW, the result is a 2-tuple of the value above +// and a boolean indicating the success of the receive. The +// components of the tuple are accessed using Extract. +// +// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source. +// For receive operations (ARROW) implicit in ranging over a channel, +// Pos() returns the ast.RangeStmt.For. +// For implicit memory loads (STAR), Pos() returns the position of the +// most closely associated source-level construct; the details are not +// specified. +// +// Example printed form: +// t0 = *x +// t2 = <-t1,ok +// +type UnOp struct { + register + Op token.Token // One of: NOT SUB ARROW MUL XOR ! - <- * ^ + X Value + CommaOk bool +} + +// The ChangeType instruction applies to X a value-preserving type +// change to Type(). +// +// Type changes are permitted: +// - between a named type and its underlying type. +// - between two named types of the same underlying type. +// - between (possibly named) pointers to identical base types. +// - from a bidirectional channel to a read- or write-channel, +// optionally adding/removing a name. +// +// This operation cannot fail dynamically. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = changetype *int <- IntPtr (t0) +// +type ChangeType struct { + register + X Value +} + +// The Convert instruction yields the conversion of value X to type +// Type(). One or both of those types is basic (but possibly named). +// +// A conversion may change the value and representation of its operand. +// Conversions are permitted: +// - between real numeric types. +// - between complex numeric types. +// - between string and []byte or []rune. +// - between pointers and unsafe.Pointer. +// - between unsafe.Pointer and uintptr. +// - from (Unicode) integer to (UTF-8) string. +// - from slice to array pointer. +// A conversion may imply a type name change also. +// +// Conversions of untyped string/number/bool constants to a specific +// representation are eliminated during SSA construction. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = convert []byte <- string (t0) +// +type Convert struct { + register + X Value +} + +// ChangeInterface constructs a value of one interface type from a +// value of another interface type known to be assignable to it. +// This operation cannot fail. +// +// Pos() returns the ast.CallExpr.Lparen if the instruction arose from +// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the +// instruction arose from an explicit e.(T) operation; or token.NoPos +// otherwise. +// +// Example printed form: +// t1 = change interface interface{} <- I (t0) +// +type ChangeInterface struct { + register + X Value +} + +// MakeInterface constructs an instance of an interface type from a +// value of a concrete type. +// +// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set +// of X, and Program.MethodValue(m) to find the implementation of a method. +// +// To construct the zero value of an interface type T, use: +// NewConst(constant.MakeNil(), T, pos) +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t1 = make interface{} <- int (42:int) +// t2 = make Stringer <- t0 +// +type MakeInterface struct { + register + X Value +} + +// The MakeClosure instruction yields a closure value whose code is +// Fn and whose free variables' values are supplied by Bindings. +// +// Type() returns a (possibly named) *types.Signature. +// +// Pos() returns the ast.FuncLit.Type.Func for a function literal +// closure or the ast.SelectorExpr.Sel for a bound method closure. +// +// Example printed form: +// t0 = make closure anon@1.2 [x y z] +// t1 = make closure bound$(main.I).add [i] +// +type MakeClosure struct { + register + Fn Value // always a *Function + Bindings []Value // values for each free variable in Fn.FreeVars +} + +// The MakeMap instruction creates a new hash-table-based map object +// and yields a value of kind map. +// +// Type() returns a (possibly named) *types.Map. +// +// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or +// the ast.CompositeLit.Lbrack if created by a literal. +// +// Example printed form: +// t1 = make map[string]int t0 +// t1 = make StringIntMap t0 +// +type MakeMap struct { + register + Reserve Value // initial space reservation; nil => default +} + +// The MakeChan instruction creates a new channel object and yields a +// value of kind chan. +// +// Type() returns a (possibly named) *types.Chan. +// +// Pos() returns the ast.CallExpr.Lparen for the make(chan) that +// created it. +// +// Example printed form: +// t0 = make chan int 0 +// t0 = make IntChan 0 +// +type MakeChan struct { + register + Size Value // int; size of buffer; zero => synchronous. +} + +// The MakeSlice instruction yields a slice of length Len backed by a +// newly allocated array of length Cap. +// +// Both Len and Cap must be non-nil Values of integer type. +// +// (Alloc(types.Array) followed by Slice will not suffice because +// Alloc can only create arrays of constant length.) +// +// Type() returns a (possibly named) *types.Slice. +// +// Pos() returns the ast.CallExpr.Lparen for the make([]T) that +// created it. +// +// Example printed form: +// t1 = make []string 1:int t0 +// t1 = make StringSlice 1:int t0 +// +type MakeSlice struct { + register + Len Value + Cap Value +} + +// The Slice instruction yields a slice of an existing string, slice +// or *array X between optional integer bounds Low and High. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns string if the type of X was string, otherwise a +// *types.Slice with the same element type as X. +// +// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice +// operation, the ast.CompositeLit.Lbrace if created by a literal, or +// NoPos if not explicit in the source (e.g. a variadic argument slice). +// +// Example printed form: +// t1 = slice t0[1:] +// +type Slice struct { + register + X Value // slice, string, or *array + Low, High, Max Value // each may be nil +} + +// The FieldAddr instruction yields the address of Field of *struct X. +// +// The field is identified by its index within the field list of the +// struct type of X. +// +// Dynamically, this instruction panics if X evaluates to a nil +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. +// +// Example printed form: +// t1 = &t0.name [#1] +// +type FieldAddr struct { + register + X Value // *struct + Field int // field is X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct).Field(Field) +} + +// The Field instruction yields the Field of struct X. +// +// The field is identified by its index within the field list of the +// struct type of X; by using numeric indices we avoid ambiguity of +// package-local identifiers and permit compact representations. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. +// +// Example printed form: +// t1 = t0.name [#1] +// +type Field struct { + register + X Value // struct + Field int // index into X.Type().(*types.Struct).Fields +} + +// The IndexAddr instruction yields the address of the element at +// index Index of collection X. Index is an integer expression. +// +// The elements of maps and strings are not addressable; use Lookup or +// MapUpdate instead. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// t2 = &t0[t1] +// +type IndexAddr struct { + register + X Value // slice or *array, + Index Value // numeric index +} + +// The Index instruction yields element Index of array X. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// t2 = t0[t1] +// +type Index struct { + register + X Value // array + Index Value // integer index +} + +// The Lookup instruction yields element Index of collection X, a map +// or string. Index is an integer expression if X is a string or the +// appropriate key type if X is a map. +// +// If CommaOk, the result is a 2-tuple of the value above and a +// boolean indicating the result of a map membership test for the key. +// The components of the tuple are accessed using Extract. +// +// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. +// +// Example printed form: +// t2 = t0[t1] +// t5 = t3[t4],ok +// +type Lookup struct { + register + X Value // string or map + Index Value // numeric or key-typed index + CommaOk bool // return a value,ok pair +} + +// SelectState is a helper for Select. +// It represents one goal state and its corresponding communication. +// +type SelectState struct { + Dir types.ChanDir // direction of case (SendOnly or RecvOnly) + Chan Value // channel to use (for send or receive) + Send Value // value to send (for send) + Pos token.Pos // position of token.ARROW + DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode] +} + +// The Select instruction tests whether (or blocks until) one +// of the specified sent or received states is entered. +// +// Let n be the number of States for which Dir==RECV and T_i (0<=i string iterator; false => map iterator. +} + +// The TypeAssert instruction tests whether interface value X has type +// AssertedType. +// +// If !CommaOk, on success it returns v, the result of the conversion +// (defined below); on failure it panics. +// +// If CommaOk: on success it returns a pair (v, true) where v is the +// result of the conversion; on failure it returns (z, false) where z +// is AssertedType's zero value. The components of the pair must be +// accessed using the Extract instruction. +// +// If AssertedType is a concrete type, TypeAssert checks whether the +// dynamic type in interface X is equal to it, and if so, the result +// of the conversion is a copy of the value in the interface. +// +// If AssertedType is an interface, TypeAssert checks whether the +// dynamic type of the interface is assignable to it, and if so, the +// result of the conversion is a copy of the interface value X. +// If AssertedType is a superinterface of X.Type(), the operation will +// fail iff the operand is nil. (Contrast with ChangeInterface, which +// performs no nil-check.) +// +// Type() reflects the actual type of the result, possibly a +// 2-types.Tuple; AssertedType is the asserted type. +// +// Pos() returns the ast.CallExpr.Lparen if the instruction arose from +// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the +// instruction arose from an explicit e.(T) operation; or the +// ast.CaseClause.Case if the instruction arose from a case of a +// type-switch statement. +// +// Example printed form: +// t1 = typeassert t0.(int) +// t3 = typeassert,ok t2.(T) +// +type TypeAssert struct { + register + X Value + AssertedType types.Type + CommaOk bool +} + +// The Extract instruction yields component Index of Tuple. +// +// This is used to access the results of instructions with multiple +// return values, such as Call, TypeAssert, Next, UnOp(ARROW) and +// IndexExpr(Map). +// +// Example printed form: +// t1 = extract t0 #1 +// +type Extract struct { + register + Tuple Value + Index int +} + +// Instructions executed for effect. They do not yield a value. -------------------- + +// The Jump instruction transfers control to the sole successor of its +// owning block. +// +// A Jump must be the last instruction of its containing BasicBlock. +// +// Pos() returns NoPos. +// +// Example printed form: +// jump done +// +type Jump struct { + anInstruction +} + +// The If instruction transfers control to one of the two successors +// of its owning block, depending on the boolean Cond: the first if +// true, the second if false. +// +// An If instruction must be the last instruction of its containing +// BasicBlock. +// +// Pos() returns NoPos. +// +// Example printed form: +// if t0 goto done else body +// +type If struct { + anInstruction + Cond Value +} + +// The Return instruction returns values and control back to the calling +// function. +// +// len(Results) is always equal to the number of results in the +// function's signature. +// +// If len(Results) > 1, Return returns a tuple value with the specified +// components which the caller must access using Extract instructions. +// +// There is no instruction to return a ready-made tuple like those +// returned by a "value,ok"-mode TypeAssert, Lookup or UnOp(ARROW) or +// a tail-call to a function with multiple result parameters. +// +// Return must be the last instruction of its containing BasicBlock. +// Such a block has no successors. +// +// Pos() returns the ast.ReturnStmt.Return, if explicit in the source. +// +// Example printed form: +// return +// return nil:I, 2:int +// +type Return struct { + anInstruction + Results []Value + pos token.Pos +} + +// The RunDefers instruction pops and invokes the entire stack of +// procedure calls pushed by Defer instructions in this function. +// +// It is legal to encounter multiple 'rundefers' instructions in a +// single control-flow path through a function; this is useful in +// the combined init() function, for example. +// +// Pos() returns NoPos. +// +// Example printed form: +// rundefers +// +type RunDefers struct { + anInstruction +} + +// The Panic instruction initiates a panic with value X. +// +// A Panic instruction must be the last instruction of its containing +// BasicBlock, which must have no successors. +// +// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction; +// they are treated as calls to a built-in function. +// +// Pos() returns the ast.CallExpr.Lparen if this panic was explicit +// in the source. +// +// Example printed form: +// panic t0 +// +type Panic struct { + anInstruction + X Value // an interface{} + pos token.Pos +} + +// The Go instruction creates a new goroutine and calls the specified +// function within it. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.GoStmt.Go. +// +// Example printed form: +// go println(t0, t1) +// go t3() +// go invoke t5.Println(...t6) +// +type Go struct { + anInstruction + Call CallCommon + pos token.Pos +} + +// The Defer instruction pushes the specified call onto a stack of +// functions to be called by a RunDefers instruction or by a panic. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.DeferStmt.Defer. +// +// Example printed form: +// defer println(t0, t1) +// defer t3() +// defer invoke t5.Println(...t6) +// +type Defer struct { + anInstruction + Call CallCommon + pos token.Pos +} + +// The Send instruction sends X on channel Chan. +// +// Pos() returns the ast.SendStmt.Arrow, if explicit in the source. +// +// Example printed form: +// send t0 <- t1 +// +type Send struct { + anInstruction + Chan, X Value + pos token.Pos +} + +// The Store instruction stores Val at address Addr. +// Stores can be of arbitrary types. +// +// Pos() returns the position of the source-level construct most closely +// associated with the memory store operation. +// Since implicit memory stores are numerous and varied and depend upon +// implementation choices, the details are not specified. +// +// Example printed form: +// *x = y +// +type Store struct { + anInstruction + Addr Value + Val Value + pos token.Pos +} + +// The MapUpdate instruction updates the association of Map[Key] to +// Value. +// +// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack, +// if explicit in the source. +// +// Example printed form: +// t0[t1] = t2 +// +type MapUpdate struct { + anInstruction + Map Value + Key Value + Value Value + pos token.Pos +} + +// A DebugRef instruction maps a source-level expression Expr to the +// SSA value X that represents the value (!IsAddr) or address (IsAddr) +// of that expression. +// +// DebugRef is a pseudo-instruction: it has no dynamic effect. +// +// Pos() returns Expr.Pos(), the start position of the source-level +// expression. This is not the same as the "designated" token as +// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the +// position of the ("designated") Lparen token. +// +// If Expr is an *ast.Ident denoting a var or func, Object() returns +// the object; though this information can be obtained from the type +// checker, including it here greatly facilitates debugging. +// For non-Ident expressions, Object() returns nil. +// +// DebugRefs are generated only for functions built with debugging +// enabled; see Package.SetDebugMode() and the GlobalDebug builder +// mode flag. +// +// DebugRefs are not emitted for ast.Idents referring to constants or +// predeclared identifiers, since they are trivial and numerous. +// Nor are they emitted for ast.ParenExprs. +// +// (By representing these as instructions, rather than out-of-band, +// consistency is maintained during transformation passes by the +// ordinary SSA renaming machinery.) +// +// Example printed form: +// ; *ast.CallExpr @ 102:9 is t5 +// ; var x float64 @ 109:72 is x +// ; address of *ast.CompositeLit @ 216:10 is t0 +// +type DebugRef struct { + anInstruction + Expr ast.Expr // the referring expression (never *ast.ParenExpr) + object types.Object // the identity of the source var/func + IsAddr bool // Expr is addressable and X is the address it denotes + X Value // the value or address of Expr +} + +// Embeddable mix-ins and helpers for common parts of other structs. ----------- + +// register is a mix-in embedded by all SSA values that are also +// instructions, i.e. virtual registers, and provides a uniform +// implementation of most of the Value interface: Value.Name() is a +// numbered register (e.g. "t0"); the other methods are field accessors. +// +// Temporary names are automatically assigned to each register on +// completion of building a function in SSA form. +// +// Clients must not assume that the 'id' value (and the Name() derived +// from it) is unique within a function. As always in this API, +// semantics are determined only by identity; names exist only to +// facilitate debugging. +// +type register struct { + anInstruction + num int // "name" of virtual register, e.g. "t0". Not guaranteed unique. + typ types.Type // type of virtual register + pos token.Pos // position of source expression, or NoPos + referrers []Instruction +} + +// anInstruction is a mix-in embedded by all Instructions. +// It provides the implementations of the Block and setBlock methods. +type anInstruction struct { + block *BasicBlock // the basic block of this instruction +} + +// CallCommon is contained by Go, Defer and Call to hold the +// common parts of a function or method call. +// +// Each CallCommon exists in one of two modes, function call and +// interface method invocation, or "call" and "invoke" for short. +// +// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon +// represents an ordinary function call of the value in Value, +// which may be a *Builtin, a *Function or any other value of kind +// 'func'. +// +// Value may be one of: +// (a) a *Function, indicating a statically dispatched call +// to a package-level function, an anonymous function, or +// a method of a named type. +// (b) a *MakeClosure, indicating an immediately applied +// function literal with free variables. +// (c) a *Builtin, indicating a statically dispatched call +// to a built-in function. +// (d) any other value, indicating a dynamically dispatched +// function call. +// StaticCallee returns the identity of the callee in cases +// (a) and (b), nil otherwise. +// +// Args contains the arguments to the call. If Value is a method, +// Args[0] contains the receiver parameter. +// +// Example printed form: +// t2 = println(t0, t1) +// go t3() +// defer t5(...t6) +// +// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon +// represents a dynamically dispatched call to an interface method. +// In this mode, Value is the interface value and Method is the +// interface's abstract method. Note: an abstract method may be +// shared by multiple interfaces due to embedding; Value.Type() +// provides the specific interface used for this call. +// +// Value is implicitly supplied to the concrete method implementation +// as the receiver parameter; in other words, Args[0] holds not the +// receiver but the first true argument. +// +// Example printed form: +// t1 = invoke t0.String() +// go invoke t3.Run(t2) +// defer invoke t4.Handle(...t5) +// +// For all calls to variadic functions (Signature().Variadic()), +// the last element of Args is a slice. +// +type CallCommon struct { + Value Value // receiver (invoke mode) or func value (call mode) + Method *types.Func // abstract method (invoke mode) + Args []Value // actual parameters (in static method call, includes receiver) + pos token.Pos // position of CallExpr.Lparen, iff explicit in source +} + +// IsInvoke returns true if this call has "invoke" (not "call") mode. +func (c *CallCommon) IsInvoke() bool { + return c.Method != nil +} + +func (c *CallCommon) Pos() token.Pos { return c.pos } + +// Signature returns the signature of the called function. +// +// For an "invoke"-mode call, the signature of the interface method is +// returned. +// +// In either "call" or "invoke" mode, if the callee is a method, its +// receiver is represented by sig.Recv, not sig.Params().At(0). +// +func (c *CallCommon) Signature() *types.Signature { + if c.Method != nil { + return c.Method.Type().(*types.Signature) + } + return c.Value.Type().Underlying().(*types.Signature) +} + +// StaticCallee returns the callee if this is a trivially static +// "call"-mode call to a function. +func (c *CallCommon) StaticCallee() *Function { + switch fn := c.Value.(type) { + case *Function: + return fn + case *MakeClosure: + return fn.Fn.(*Function) + } + return nil +} + +// Description returns a description of the mode of this call suitable +// for a user interface, e.g., "static method call". +func (c *CallCommon) Description() string { + switch fn := c.Value.(type) { + case *Builtin: + return "built-in function call" + case *MakeClosure: + return "static function closure call" + case *Function: + if fn.Signature.Recv() != nil { + return "static method call" + } + return "static function call" + } + if c.IsInvoke() { + return "dynamic method call" // ("invoke" mode) + } + return "dynamic function call" +} + +// The CallInstruction interface, implemented by *Go, *Defer and *Call, +// exposes the common parts of function-calling instructions, +// yet provides a way back to the Value defined by *Call alone. +// +type CallInstruction interface { + Instruction + Common() *CallCommon // returns the common parts of the call + Value() *Call // returns the result value of the call (*Call) or nil (*Go, *Defer) +} + +func (s *Call) Common() *CallCommon { return &s.Call } +func (s *Defer) Common() *CallCommon { return &s.Call } +func (s *Go) Common() *CallCommon { return &s.Call } + +func (s *Call) Value() *Call { return s } +func (s *Defer) Value() *Call { return nil } +func (s *Go) Value() *Call { return nil } + +func (v *Builtin) Type() types.Type { return v.sig } +func (v *Builtin) Name() string { return v.name } +func (*Builtin) Referrers() *[]Instruction { return nil } +func (v *Builtin) Pos() token.Pos { return token.NoPos } +func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) } +func (v *Builtin) Parent() *Function { return nil } + +func (v *FreeVar) Type() types.Type { return v.typ } +func (v *FreeVar) Name() string { return v.name } +func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers } +func (v *FreeVar) Pos() token.Pos { return v.pos } +func (v *FreeVar) Parent() *Function { return v.parent } + +func (v *Global) Type() types.Type { return v.typ } +func (v *Global) Name() string { return v.name } +func (v *Global) Parent() *Function { return nil } +func (v *Global) Pos() token.Pos { return v.pos } +func (v *Global) Referrers() *[]Instruction { return nil } +func (v *Global) Token() token.Token { return token.VAR } +func (v *Global) Object() types.Object { return v.object } +func (v *Global) String() string { return v.RelString(nil) } +func (v *Global) Package() *Package { return v.Pkg } +func (v *Global) RelString(from *types.Package) string { return relString(v, from) } + +func (v *Function) Name() string { return v.name } +func (v *Function) Type() types.Type { return v.Signature } +func (v *Function) Pos() token.Pos { return v.pos } +func (v *Function) Token() token.Token { return token.FUNC } +func (v *Function) Object() types.Object { return v.object } +func (v *Function) String() string { return v.RelString(nil) } +func (v *Function) Package() *Package { return v.Pkg } +func (v *Function) Parent() *Function { return v.parent } +func (v *Function) Referrers() *[]Instruction { + if v.parent != nil { + return &v.referrers + } + return nil +} + +func (v *Parameter) Type() types.Type { return v.typ } +func (v *Parameter) Name() string { return v.name } +func (v *Parameter) Object() types.Object { return v.object } +func (v *Parameter) Referrers() *[]Instruction { return &v.referrers } +func (v *Parameter) Pos() token.Pos { return v.pos } +func (v *Parameter) Parent() *Function { return v.parent } + +func (v *Alloc) Type() types.Type { return v.typ } +func (v *Alloc) Referrers() *[]Instruction { return &v.referrers } +func (v *Alloc) Pos() token.Pos { return v.pos } + +func (v *register) Type() types.Type { return v.typ } +func (v *register) setType(typ types.Type) { v.typ = typ } +func (v *register) Name() string { return fmt.Sprintf("t%d", v.num) } +func (v *register) setNum(num int) { v.num = num } +func (v *register) Referrers() *[]Instruction { return &v.referrers } +func (v *register) Pos() token.Pos { return v.pos } +func (v *register) setPos(pos token.Pos) { v.pos = pos } + +func (v *anInstruction) Parent() *Function { return v.block.parent } +func (v *anInstruction) Block() *BasicBlock { return v.block } +func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block } +func (v *anInstruction) Referrers() *[]Instruction { return nil } + +func (t *Type) Name() string { return t.object.Name() } +func (t *Type) Pos() token.Pos { return t.object.Pos() } +func (t *Type) Type() types.Type { return t.object.Type() } +func (t *Type) Token() token.Token { return token.TYPE } +func (t *Type) Object() types.Object { return t.object } +func (t *Type) String() string { return t.RelString(nil) } +func (t *Type) Package() *Package { return t.pkg } +func (t *Type) RelString(from *types.Package) string { return relString(t, from) } + +func (c *NamedConst) Name() string { return c.object.Name() } +func (c *NamedConst) Pos() token.Pos { return c.object.Pos() } +func (c *NamedConst) String() string { return c.RelString(nil) } +func (c *NamedConst) Type() types.Type { return c.object.Type() } +func (c *NamedConst) Token() token.Token { return token.CONST } +func (c *NamedConst) Object() types.Object { return c.object } +func (c *NamedConst) Package() *Package { return c.pkg } +func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) } + +func (d *DebugRef) Object() types.Object { return d.object } + +// Func returns the package-level function of the specified name, +// or nil if not found. +// +func (p *Package) Func(name string) (f *Function) { + f, _ = p.Members[name].(*Function) + return +} + +// Var returns the package-level variable of the specified name, +// or nil if not found. +// +func (p *Package) Var(name string) (g *Global) { + g, _ = p.Members[name].(*Global) + return +} + +// Const returns the package-level constant of the specified name, +// or nil if not found. +// +func (p *Package) Const(name string) (c *NamedConst) { + c, _ = p.Members[name].(*NamedConst) + return +} + +// Type returns the package-level type of the specified name, +// or nil if not found. +// +func (p *Package) Type(name string) (t *Type) { + t, _ = p.Members[name].(*Type) + return +} + +func (v *Call) Pos() token.Pos { return v.Call.pos } +func (s *Defer) Pos() token.Pos { return s.pos } +func (s *Go) Pos() token.Pos { return s.pos } +func (s *MapUpdate) Pos() token.Pos { return s.pos } +func (s *Panic) Pos() token.Pos { return s.pos } +func (s *Return) Pos() token.Pos { return s.pos } +func (s *Send) Pos() token.Pos { return s.pos } +func (s *Store) Pos() token.Pos { return s.pos } +func (s *If) Pos() token.Pos { return token.NoPos } +func (s *Jump) Pos() token.Pos { return token.NoPos } +func (s *RunDefers) Pos() token.Pos { return token.NoPos } +func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() } + +// Operands. + +func (v *Alloc) Operands(rands []*Value) []*Value { + return rands +} + +func (v *BinOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Y) +} + +func (c *CallCommon) Operands(rands []*Value) []*Value { + rands = append(rands, &c.Value) + for i := range c.Args { + rands = append(rands, &c.Args[i]) + } + return rands +} + +func (s *Go) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Call) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Defer) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (v *ChangeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *ChangeType) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *Convert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *DebugRef) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Extract) Operands(rands []*Value) []*Value { + return append(rands, &v.Tuple) +} + +func (v *Field) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *FieldAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *If) Operands(rands []*Value) []*Value { + return append(rands, &s.Cond) +} + +func (v *Index) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *IndexAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (*Jump) Operands(rands []*Value) []*Value { + return rands +} + +func (v *Lookup) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *MakeChan) Operands(rands []*Value) []*Value { + return append(rands, &v.Size) +} + +func (v *MakeClosure) Operands(rands []*Value) []*Value { + rands = append(rands, &v.Fn) + for i := range v.Bindings { + rands = append(rands, &v.Bindings[i]) + } + return rands +} + +func (v *MakeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *MakeMap) Operands(rands []*Value) []*Value { + return append(rands, &v.Reserve) +} + +func (v *MakeSlice) Operands(rands []*Value) []*Value { + return append(rands, &v.Len, &v.Cap) +} + +func (v *MapUpdate) Operands(rands []*Value) []*Value { + return append(rands, &v.Map, &v.Key, &v.Value) +} + +func (v *Next) Operands(rands []*Value) []*Value { + return append(rands, &v.Iter) +} + +func (s *Panic) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Phi) Operands(rands []*Value) []*Value { + for i := range v.Edges { + rands = append(rands, &v.Edges[i]) + } + return rands +} + +func (v *Range) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *Return) Operands(rands []*Value) []*Value { + for i := range s.Results { + rands = append(rands, &s.Results[i]) + } + return rands +} + +func (*RunDefers) Operands(rands []*Value) []*Value { + return rands +} + +func (v *Select) Operands(rands []*Value) []*Value { + for i := range v.States { + rands = append(rands, &v.States[i].Chan, &v.States[i].Send) + } + return rands +} + +func (s *Send) Operands(rands []*Value) []*Value { + return append(rands, &s.Chan, &s.X) +} + +func (v *Slice) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Low, &v.High, &v.Max) +} + +func (s *Store) Operands(rands []*Value) []*Value { + return append(rands, &s.Addr, &s.Val) +} + +func (v *TypeAssert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *UnOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +// Non-Instruction Values: +func (v *Builtin) Operands(rands []*Value) []*Value { return rands } +func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } +func (v *Const) Operands(rands []*Value) []*Value { return rands } +func (v *Function) Operands(rands []*Value) []*Value { return rands } +func (v *Global) Operands(rands []*Value) []*Value { return rands } +func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go new file mode 100644 index 000000000..eab12dc55 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go @@ -0,0 +1,175 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil + +// This file defines utility functions for constructing programs in SSA form. + +import ( + "go/ast" + "go/token" + "go/types" + + "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/go/ssa" +) + +// Packages creates an SSA program for a set of packages. +// +// The packages must have been loaded from source syntax using the +// golang.org/x/tools/go/packages.Load function in LoadSyntax or +// LoadAllSyntax mode. +// +// Packages creates an SSA package for each well-typed package in the +// initial list, plus all their dependencies. The resulting list of +// packages corresponds to the list of initial packages, and may contain +// a nil if SSA code could not be constructed for the corresponding initial +// package due to type errors. +// +// Code for bodies of functions is not built until Build is called on +// the resulting Program. SSA code is constructed only for the initial +// packages with well-typed syntax trees. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +func Packages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { + return doPackages(initial, mode, false) +} + +// AllPackages creates an SSA program for a set of packages plus all +// their dependencies. +// +// The packages must have been loaded from source syntax using the +// golang.org/x/tools/go/packages.Load function in LoadAllSyntax mode. +// +// AllPackages creates an SSA package for each well-typed package in the +// initial list, plus all their dependencies. The resulting list of +// packages corresponds to the list of initial packages, and may contain +// a nil if SSA code could not be constructed for the corresponding +// initial package due to type errors. +// +// Code for bodies of functions is not built until Build is called on +// the resulting Program. SSA code is constructed for all packages with +// well-typed syntax trees. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +func AllPackages(initial []*packages.Package, mode ssa.BuilderMode) (*ssa.Program, []*ssa.Package) { + return doPackages(initial, mode, true) +} + +func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (*ssa.Program, []*ssa.Package) { + + var fset *token.FileSet + if len(initial) > 0 { + fset = initial[0].Fset + } + + prog := ssa.NewProgram(fset, mode) + + isInitial := make(map[*packages.Package]bool, len(initial)) + for _, p := range initial { + isInitial[p] = true + } + + ssamap := make(map[*packages.Package]*ssa.Package) + packages.Visit(initial, nil, func(p *packages.Package) { + if p.Types != nil && !p.IllTyped { + var files []*ast.File + if deps || isInitial[p] { + files = p.Syntax + } + ssamap[p] = prog.CreatePackage(p.Types, files, p.TypesInfo, true) + } + }) + + var ssapkgs []*ssa.Package + for _, p := range initial { + ssapkgs = append(ssapkgs, ssamap[p]) // may be nil + } + return prog, ssapkgs +} + +// CreateProgram returns a new program in SSA form, given a program +// loaded from source. An SSA package is created for each transitively +// error-free package of lprog. +// +// Code for bodies of functions is not built until Build is called +// on the result. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +// Deprecated: Use golang.org/x/tools/go/packages and the Packages +// function instead; see ssa.ExampleLoadPackages. +// +func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { + prog := ssa.NewProgram(lprog.Fset, mode) + + for _, info := range lprog.AllPackages { + if info.TransitivelyErrorFree { + prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) + } + } + + return prog +} + +// BuildPackage builds an SSA program with IR for a single package. +// +// It populates pkg by type-checking the specified file ASTs. All +// dependencies are loaded using the importer specified by tc, which +// typically loads compiler export data; SSA code cannot be built for +// those packages. BuildPackage then constructs an ssa.Program with all +// dependency packages created, and builds and returns the SSA package +// corresponding to pkg. +// +// The caller must have set pkg.Path() to the import path. +// +// The operation fails if there were any type-checking or import errors. +// +// See ../example_test.go for an example. +// +func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ssa.BuilderMode) (*ssa.Package, *types.Info, error) { + if fset == nil { + panic("no token.FileSet") + } + if pkg.Path() == "" { + panic("package has no import path") + } + + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { + return nil, nil, err + } + + prog := ssa.NewProgram(fset, mode) + + // Create SSA packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + prog.CreatePackage(p, nil, nil, true) + createAll(p.Imports()) + } + } + } + createAll(pkg.Imports()) + + // Create and build the primary package. + ssapkg := prog.CreatePackage(pkg, files, info, false) + ssapkg.Build() + return ssapkg, info, nil +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go new file mode 100644 index 000000000..db03bf555 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/switch.go @@ -0,0 +1,234 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil + +// This file implements discovery of switch and type-switch constructs +// from low-level control flow. +// +// Many techniques exist for compiling a high-level switch with +// constant cases to efficient machine code. The optimal choice will +// depend on the data type, the specific case values, the code in the +// body of each case, and the hardware. +// Some examples: +// - a lookup table (for a switch that maps constants to constants) +// - a computed goto +// - a binary tree +// - a perfect hash +// - a two-level switch (to partition constant strings by their first byte). + +import ( + "bytes" + "fmt" + "go/token" + "go/types" + + "golang.org/x/tools/go/ssa" +) + +// A ConstCase represents a single constant comparison. +// It is part of a Switch. +type ConstCase struct { + Block *ssa.BasicBlock // block performing the comparison + Body *ssa.BasicBlock // body of the case + Value *ssa.Const // case comparand +} + +// A TypeCase represents a single type assertion. +// It is part of a Switch. +type TypeCase struct { + Block *ssa.BasicBlock // block performing the type assert + Body *ssa.BasicBlock // body of the case + Type types.Type // case type + Binding ssa.Value // value bound by this case +} + +// A Switch is a logical high-level control flow operation +// (a multiway branch) discovered by analysis of a CFG containing +// only if/else chains. It is not part of the ssa.Instruction set. +// +// One of ConstCases and TypeCases has length >= 2; +// the other is nil. +// +// In a value switch, the list of cases may contain duplicate constants. +// A type switch may contain duplicate types, or types assignable +// to an interface type also in the list. +// TODO(adonovan): eliminate such duplicates. +// +type Switch struct { + Start *ssa.BasicBlock // block containing start of if/else chain + X ssa.Value // the switch operand + ConstCases []ConstCase // ordered list of constant comparisons + TypeCases []TypeCase // ordered list of type assertions + Default *ssa.BasicBlock // successor if all comparisons fail +} + +func (sw *Switch) String() string { + // We represent each block by the String() of its + // first Instruction, e.g. "print(42:int)". + var buf bytes.Buffer + if sw.ConstCases != nil { + fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name()) + for _, c := range sw.ConstCases { + fmt.Fprintf(&buf, "case %s: %s\n", c.Value, c.Body.Instrs[0]) + } + } else { + fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name()) + for _, c := range sw.TypeCases { + fmt.Fprintf(&buf, "case %s %s: %s\n", + c.Binding.Name(), c.Type, c.Body.Instrs[0]) + } + } + if sw.Default != nil { + fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0]) + } + fmt.Fprintf(&buf, "}") + return buf.String() +} + +// Switches examines the control-flow graph of fn and returns the +// set of inferred value and type switches. A value switch tests an +// ssa.Value for equality against two or more compile-time constant +// values. Switches involving link-time constants (addresses) are +// ignored. A type switch type-asserts an ssa.Value against two or +// more types. +// +// The switches are returned in dominance order. +// +// The resulting switches do not necessarily correspond to uses of the +// 'switch' keyword in the source: for example, a single source-level +// switch statement with non-constant cases may result in zero, one or +// many Switches, one per plural sequence of constant cases. +// Switches may even be inferred from if/else- or goto-based control flow. +// (In general, the control flow constructs of the source program +// cannot be faithfully reproduced from the SSA representation.) +// +func Switches(fn *ssa.Function) []Switch { + // Traverse the CFG in dominance order, so we don't + // enter an if/else-chain in the middle. + var switches []Switch + seen := make(map[*ssa.BasicBlock]bool) // TODO(adonovan): opt: use ssa.blockSet + for _, b := range fn.DomPreorder() { + if x, k := isComparisonBlock(b); x != nil { + // Block b starts a switch. + sw := Switch{Start: b, X: x} + valueSwitch(&sw, k, seen) + if len(sw.ConstCases) > 1 { + switches = append(switches, sw) + } + } + + if y, x, T := isTypeAssertBlock(b); y != nil { + // Block b starts a type switch. + sw := Switch{Start: b, X: x} + typeSwitch(&sw, y, T, seen) + if len(sw.TypeCases) > 1 { + switches = append(switches, sw) + } + } + } + return switches +} + +func valueSwitch(sw *Switch, k *ssa.Const, seen map[*ssa.BasicBlock]bool) { + b := sw.Start + x := sw.X + for x == sw.X { + if seen[b] { + break + } + seen[b] = true + + sw.ConstCases = append(sw.ConstCases, ConstCase{ + Block: b, + Body: b.Succs[0], + Value: k, + }) + b = b.Succs[1] + if len(b.Instrs) > 2 { + // Block b contains not just 'if x == k', + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + x, k = isComparisonBlock(b) + } + sw.Default = b +} + +func typeSwitch(sw *Switch, y ssa.Value, T types.Type, seen map[*ssa.BasicBlock]bool) { + b := sw.Start + x := sw.X + for x == sw.X { + if seen[b] { + break + } + seen[b] = true + + sw.TypeCases = append(sw.TypeCases, TypeCase{ + Block: b, + Body: b.Succs[0], + Type: T, + Binding: y, + }) + b = b.Succs[1] + if len(b.Instrs) > 4 { + // Block b contains not just + // {TypeAssert; Extract #0; Extract #1; If} + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + y, x, T = isTypeAssertBlock(b) + } + sw.Default = b +} + +// isComparisonBlock returns the operands (v, k) if a block ends with +// a comparison v==k, where k is a compile-time constant. +// +func isComparisonBlock(b *ssa.BasicBlock) (v ssa.Value, k *ssa.Const) { + if n := len(b.Instrs); n >= 2 { + if i, ok := b.Instrs[n-1].(*ssa.If); ok { + if binop, ok := i.Cond.(*ssa.BinOp); ok && binop.Block() == b && binop.Op == token.EQL { + if k, ok := binop.Y.(*ssa.Const); ok { + return binop.X, k + } + if k, ok := binop.X.(*ssa.Const); ok { + return binop.Y, k + } + } + } + } + return +} + +// isTypeAssertBlock returns the operands (y, x, T) if a block ends with +// a type assertion "if y, ok := x.(T); ok {". +// +func isTypeAssertBlock(b *ssa.BasicBlock) (y, x ssa.Value, T types.Type) { + if n := len(b.Instrs); n >= 4 { + if i, ok := b.Instrs[n-1].(*ssa.If); ok { + if ext1, ok := i.Cond.(*ssa.Extract); ok && ext1.Block() == b && ext1.Index == 1 { + if ta, ok := ext1.Tuple.(*ssa.TypeAssert); ok && ta.Block() == b { + // hack: relies upon instruction ordering. + if ext0, ok := b.Instrs[n-3].(*ssa.Extract); ok { + return ext0, ta.X, ta.AssertedType + } + } + } + } + } + return +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go new file mode 100644 index 000000000..3424e8a30 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/visit.go @@ -0,0 +1,79 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil // import "golang.org/x/tools/go/ssa/ssautil" + +import "golang.org/x/tools/go/ssa" + +// This file defines utilities for visiting the SSA representation of +// a Program. +// +// TODO(adonovan): test coverage. + +// AllFunctions finds and returns the set of functions potentially +// needed by program prog, as determined by a simple linker-style +// reachability algorithm starting from the members and method-sets of +// each package. The result may include anonymous functions and +// synthetic wrappers. +// +// Precondition: all packages are built. +// +func AllFunctions(prog *ssa.Program) map[*ssa.Function]bool { + visit := visitor{ + prog: prog, + seen: make(map[*ssa.Function]bool), + } + visit.program() + return visit.seen +} + +type visitor struct { + prog *ssa.Program + seen map[*ssa.Function]bool +} + +func (visit *visitor) program() { + for _, pkg := range visit.prog.AllPackages() { + for _, mem := range pkg.Members { + if fn, ok := mem.(*ssa.Function); ok { + visit.function(fn) + } + } + } + for _, T := range visit.prog.RuntimeTypes() { + mset := visit.prog.MethodSets.MethodSet(T) + for i, n := 0, mset.Len(); i < n; i++ { + visit.function(visit.prog.MethodValue(mset.At(i))) + } + } +} + +func (visit *visitor) function(fn *ssa.Function) { + if !visit.seen[fn] { + visit.seen[fn] = true + var buf [10]*ssa.Value // avoid alloc in common case + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + for _, op := range instr.Operands(buf[:0]) { + if fn, ok := (*op).(*ssa.Function); ok { + visit.function(fn) + } + } + } + } + } +} + +// MainPackages returns the subset of the specified packages +// named "main" that define a main function. +// The result may include synthetic "testmain" packages. +func MainPackages(pkgs []*ssa.Package) []*ssa.Package { + var mains []*ssa.Package + for _, pkg := range pkgs { + if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil { + mains = append(mains, pkg) + } + } + return mains +} diff --git a/vendor/golang.org/x/tools/go/ssa/testmain.go b/vendor/golang.org/x/tools/go/ssa/testmain.go new file mode 100644 index 000000000..c4256d1ef --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/testmain.go @@ -0,0 +1,274 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// CreateTestMainPackage synthesizes a main package that runs all the +// tests of the supplied packages. +// It is closely coupled to $GOROOT/src/cmd/go/test.go and $GOROOT/src/testing. +// +// TODO(adonovan): throws this all away now that x/tools/go/packages +// provides access to the actual synthetic test main files. + +import ( + "bytes" + "fmt" + "go/ast" + "go/parser" + "go/types" + "log" + "os" + "strings" + "text/template" +) + +// FindTests returns the Test, Benchmark, and Example functions +// (as defined by "go test") defined in the specified package, +// and its TestMain function, if any. +// +// Deprecated: Use golang.org/x/tools/go/packages to access synthetic +// testmain packages. +func FindTests(pkg *Package) (tests, benchmarks, examples []*Function, main *Function) { + prog := pkg.Prog + + // The first two of these may be nil: if the program doesn't import "testing", + // it can't contain any tests, but it may yet contain Examples. + var testSig *types.Signature // func(*testing.T) + var benchmarkSig *types.Signature // func(*testing.B) + var exampleSig = types.NewSignature(nil, nil, nil, false) // func() + + // Obtain the types from the parameters of testing.MainStart. + if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { + mainStart := testingPkg.Func("MainStart") + params := mainStart.Signature.Params() + testSig = funcField(params.At(1).Type()) + benchmarkSig = funcField(params.At(2).Type()) + + // Does the package define this function? + // func TestMain(*testing.M) + if f := pkg.Func("TestMain"); f != nil { + sig := f.Type().(*types.Signature) + starM := mainStart.Signature.Results().At(0).Type() // *testing.M + if sig.Results().Len() == 0 && + sig.Params().Len() == 1 && + types.Identical(sig.Params().At(0).Type(), starM) { + main = f + } + } + } + + // TODO(adonovan): use a stable order, e.g. lexical. + for _, mem := range pkg.Members { + if f, ok := mem.(*Function); ok && + ast.IsExported(f.Name()) && + strings.HasSuffix(prog.Fset.Position(f.Pos()).Filename, "_test.go") { + + switch { + case testSig != nil && isTestSig(f, "Test", testSig): + tests = append(tests, f) + case benchmarkSig != nil && isTestSig(f, "Benchmark", benchmarkSig): + benchmarks = append(benchmarks, f) + case isTestSig(f, "Example", exampleSig): + examples = append(examples, f) + default: + continue + } + } + } + return +} + +// Like isTest, but checks the signature too. +func isTestSig(f *Function, prefix string, sig *types.Signature) bool { + return isTest(f.Name(), prefix) && types.Identical(f.Signature, sig) +} + +// Given the type of one of the three slice parameters of testing.Main, +// returns the function type. +func funcField(slice types.Type) *types.Signature { + return slice.(*types.Slice).Elem().Underlying().(*types.Struct).Field(1).Type().(*types.Signature) +} + +// isTest tells whether name looks like a test (or benchmark, according to prefix). +// It is a Test (say) if there is a character after Test that is not a lower-case letter. +// We don't want TesticularCancer. +// Plundered from $GOROOT/src/cmd/go/test.go +func isTest(name, prefix string) bool { + if !strings.HasPrefix(name, prefix) { + return false + } + if len(name) == len(prefix) { // "Test" is ok + return true + } + return ast.IsExported(name[len(prefix):]) +} + +// CreateTestMainPackage creates and returns a synthetic "testmain" +// package for the specified package if it defines tests, benchmarks or +// executable examples, or nil otherwise. The new package is named +// "main" and provides a function named "main" that runs the tests, +// similar to the one that would be created by the 'go test' tool. +// +// Subsequent calls to prog.AllPackages include the new package. +// The package pkg must belong to the program prog. +// +// Deprecated: Use golang.org/x/tools/go/packages to access synthetic +// testmain packages. +func (prog *Program) CreateTestMainPackage(pkg *Package) *Package { + if pkg.Prog != prog { + log.Fatal("Package does not belong to Program") + } + + // Template data + var data struct { + Pkg *Package + Tests, Benchmarks, Examples []*Function + Main *Function + Go18 bool + } + data.Pkg = pkg + + // Enumerate tests. + data.Tests, data.Benchmarks, data.Examples, data.Main = FindTests(pkg) + if data.Main == nil && + data.Tests == nil && data.Benchmarks == nil && data.Examples == nil { + return nil + } + + // Synthesize source for testmain package. + path := pkg.Pkg.Path() + "$testmain" + tmpl := testmainTmpl + if testingPkg := prog.ImportedPackage("testing"); testingPkg != nil { + // In Go 1.8, testing.MainStart's first argument is an interface, not a func. + data.Go18 = types.IsInterface(testingPkg.Func("MainStart").Signature.Params().At(0).Type()) + } else { + // The program does not import "testing", but FindTests + // returned non-nil, which must mean there were Examples + // but no Test, Benchmark, or TestMain functions. + + // We'll simply call them from testmain.main; this will + // ensure they don't panic, but will not check any + // "Output:" comments. + // (We should not execute an Example that has no + // "Output:" comment, but it's impossible to tell here.) + tmpl = examplesOnlyTmpl + } + var buf bytes.Buffer + if err := tmpl.Execute(&buf, data); err != nil { + log.Fatalf("internal error expanding template for %s: %v", path, err) + } + if false { // debugging + fmt.Fprintln(os.Stderr, buf.String()) + } + + // Parse and type-check the testmain package. + f, err := parser.ParseFile(prog.Fset, path+".go", &buf, parser.Mode(0)) + if err != nil { + log.Fatalf("internal error parsing %s: %v", path, err) + } + conf := types.Config{ + DisableUnusedImportCheck: true, + Importer: importer{pkg}, + } + files := []*ast.File{f} + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + testmainPkg, err := conf.Check(path, prog.Fset, files, info) + if err != nil { + log.Fatalf("internal error type-checking %s: %v", path, err) + } + + // Create and build SSA code. + testmain := prog.CreatePackage(testmainPkg, files, info, false) + testmain.SetDebugMode(false) + testmain.Build() + testmain.Func("main").Synthetic = "test main function" + testmain.Func("init").Synthetic = "package initializer" + return testmain +} + +// An implementation of types.Importer for an already loaded SSA program. +type importer struct { + pkg *Package // package under test; may be non-importable +} + +func (imp importer) Import(path string) (*types.Package, error) { + if p := imp.pkg.Prog.ImportedPackage(path); p != nil { + return p.Pkg, nil + } + if path == imp.pkg.Pkg.Path() { + return imp.pkg.Pkg, nil + } + return nil, fmt.Errorf("not found") // can't happen +} + +var testmainTmpl = template.Must(template.New("testmain").Parse(` +package main + +import "io" +import "os" +import "testing" +import p {{printf "%q" .Pkg.Pkg.Path}} + +{{if .Go18}} +type deps struct{} + +func (deps) ImportPath() string { return "" } +func (deps) MatchString(pat, str string) (bool, error) { return true, nil } +func (deps) SetPanicOnExit0(bool) {} +func (deps) StartCPUProfile(io.Writer) error { return nil } +func (deps) StartTestLog(io.Writer) {} +func (deps) StopCPUProfile() {} +func (deps) StopTestLog() error { return nil } +func (deps) WriteHeapProfile(io.Writer) error { return nil } +func (deps) WriteProfileTo(string, io.Writer, int) error { return nil } + +var match deps +{{else}} +func match(_, _ string) (bool, error) { return true, nil } +{{end}} + +func main() { + tests := []testing.InternalTest{ +{{range .Tests}} + { {{printf "%q" .Name}}, p.{{.Name}} }, +{{end}} + } + benchmarks := []testing.InternalBenchmark{ +{{range .Benchmarks}} + { {{printf "%q" .Name}}, p.{{.Name}} }, +{{end}} + } + examples := []testing.InternalExample{ +{{range .Examples}} + {Name: {{printf "%q" .Name}}, F: p.{{.Name}}}, +{{end}} + } + m := testing.MainStart(match, tests, benchmarks, examples) +{{with .Main}} + p.{{.Name}}(m) +{{else}} + os.Exit(m.Run()) +{{end}} +} + +`)) + +var examplesOnlyTmpl = template.Must(template.New("examples").Parse(` +package main + +import p {{printf "%q" .Pkg.Pkg.Path}} + +func main() { +{{range .Examples}} + p.{{.Name}}() +{{end}} +} +`)) diff --git a/vendor/golang.org/x/tools/go/ssa/util.go b/vendor/golang.org/x/tools/go/ssa/util.go new file mode 100644 index 000000000..a09949a31 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/util.go @@ -0,0 +1,89 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines a number of miscellaneous utility functions. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "os" + + "golang.org/x/tools/go/ast/astutil" +) + +//// AST utilities + +func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } + +// isBlankIdent returns true iff e is an Ident with name "_". +// They have no associated types.Object, and thus no type. +// +func isBlankIdent(e ast.Expr) bool { + id, ok := e.(*ast.Ident) + return ok && id.Name == "_" +} + +//// Type utilities. Some of these belong in go/types. + +// isPointer returns true for types whose underlying type is a pointer. +func isPointer(typ types.Type) bool { + _, ok := typ.Underlying().(*types.Pointer) + return ok +} + +func isInterface(T types.Type) bool { return types.IsInterface(T) } + +// deref returns a pointer's element type; otherwise it returns typ. +func deref(typ types.Type) types.Type { + if p, ok := typ.Underlying().(*types.Pointer); ok { + return p.Elem() + } + return typ +} + +// recvType returns the receiver type of method obj. +func recvType(obj *types.Func) types.Type { + return obj.Type().(*types.Signature).Recv().Type() +} + +// logStack prints the formatted "start" message to stderr and +// returns a closure that prints the corresponding "end" message. +// Call using 'defer logStack(...)()' to show builder stack on panic. +// Don't forget trailing parens! +// +func logStack(format string, args ...interface{}) func() { + msg := fmt.Sprintf(format, args...) + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, "\n") + return func() { + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, " end\n") + } +} + +// newVar creates a 'var' for use in a types.Tuple. +func newVar(name string, typ types.Type) *types.Var { + return types.NewParam(token.NoPos, nil, name, typ) +} + +// anonVar creates an anonymous 'var' for use in a types.Tuple. +func anonVar(typ types.Type) *types.Var { + return newVar("", typ) +} + +var lenResults = types.NewTuple(anonVar(tInt)) + +// makeLen returns the len builtin specialized to type func(T)int. +func makeLen(T types.Type) *Builtin { + lenParams := types.NewTuple(anonVar(T)) + return &Builtin{ + name: "len", + sig: types.NewSignature(nil, lenParams, lenResults, false), + } +} diff --git a/vendor/golang.org/x/tools/go/ssa/wrappers.go b/vendor/golang.org/x/tools/go/ssa/wrappers.go new file mode 100644 index 000000000..a4ae71d8c --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/wrappers.go @@ -0,0 +1,290 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssa + +// This file defines synthesis of Functions that delegate to declared +// methods; they come in three kinds: +// +// (1) wrappers: methods that wrap declared methods, performing +// implicit pointer indirections and embedded field selections. +// +// (2) thunks: funcs that wrap declared methods. Like wrappers, +// thunks perform indirections and field selections. The thunk's +// first parameter is used as the receiver for the method call. +// +// (3) bounds: funcs that wrap declared methods. The bound's sole +// free variable, supplied by a closure, is used as the receiver +// for the method call. No indirections or field selections are +// performed since they can be done before the call. + +import ( + "fmt" + + "go/types" +) + +// -- wrappers ----------------------------------------------------------- + +// makeWrapper returns a synthetic method that delegates to the +// declared method denoted by meth.Obj(), first performing any +// necessary pointer indirections or field selections implied by meth. +// +// The resulting method's receiver type is meth.Recv(). +// +// This function is versatile but quite subtle! Consider the +// following axes of variation when making changes: +// - optional receiver indirection +// - optional implicit field selections +// - meth.Obj() may denote a concrete or an interface method +// - the result may be a thunk or a wrapper. +// +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +// +func makeWrapper(prog *Program, sel *types.Selection) *Function { + obj := sel.Obj().(*types.Func) // the declared function + sig := sel.Type().(*types.Signature) // type of this wrapper + + var recv *types.Var // wrapper's receiver or thunk's params[0] + name := obj.Name() + var description string + var start int // first regular param + if sel.Kind() == types.MethodExpr { + name += "$thunk" + description = "thunk" + recv = sig.Params().At(0) + start = 1 + } else { + description = "wrapper" + recv = sig.Recv() + } + + description = fmt.Sprintf("%s for %s", description, sel.Obj()) + if prog.mode&LogSource != 0 { + defer logStack("make %s to (%s)", description, recv.Type())() + } + fn := &Function{ + name: name, + method: sel, + object: obj, + Signature: sig, + Synthetic: description, + Prog: prog, + pos: obj.Pos(), + } + fn.startBody() + fn.addSpilledParam(recv) + createParams(fn, start) + + indices := sel.Index() + + var v Value = fn.Locals[0] // spilled receiver + if isPointer(sel.Recv()) { + v = emitLoad(fn, v) + + // For simple indirection wrappers, perform an informative nil-check: + // "value method (T).f called using nil *T pointer" + if len(indices) == 1 && !isPointer(recvType(obj)) { + var c Call + c.Call.Value = &Builtin{ + name: "ssa:wrapnilchk", + sig: types.NewSignature(nil, + types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)), + types.NewTuple(anonVar(sel.Recv())), false), + } + c.Call.Args = []Value{ + v, + stringConst(deref(sel.Recv()).String()), + stringConst(sel.Obj().Name()), + } + c.setType(v.Type()) + v = fn.emit(&c) + } + } + + // Invariant: v is a pointer, either + // value of *A receiver param, or + // address of A spilled receiver. + + // We use pointer arithmetic (FieldAddr possibly followed by + // Load) in preference to value extraction (Field possibly + // preceded by Load). + + v = emitImplicitSelections(fn, v, indices[:len(indices)-1]) + + // Invariant: v is a pointer, either + // value of implicit *C field, or + // address of implicit C field. + + var c Call + if r := recvType(obj); !isInterface(r) { // concrete method + if !isPointer(r) { + v = emitLoad(fn, v) + } + c.Call.Value = prog.declaredFunc(obj) + c.Call.Args = append(c.Call.Args, v) + } else { + c.Call.Method = obj + c.Call.Value = emitLoad(fn, v) + } + for _, arg := range fn.Params[1:] { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c) + fn.finishBody() + return fn +} + +// createParams creates parameters for wrapper method fn based on its +// Signature.Params, which do not include the receiver. +// start is the index of the first regular parameter to use. +// +func createParams(fn *Function, start int) { + tparams := fn.Signature.Params() + for i, n := start, tparams.Len(); i < n; i++ { + fn.addParamObj(tparams.At(i)) + } +} + +// -- bounds ----------------------------------------------------------- + +// makeBound returns a bound method wrapper (or "bound"), a synthetic +// function that delegates to a concrete or interface method denoted +// by obj. The resulting function has no receiver, but has one free +// variable which will be used as the method's receiver in the +// tail-call. +// +// Use MakeClosure with such a wrapper to construct a bound method +// closure. e.g.: +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// var t T +// f := t.meth +// f() // calls t.meth() +// +// f is a closure of a synthetic wrapper defined as if by: +// +// f := func() { return t.meth() } +// +// Unlike makeWrapper, makeBound need perform no indirection or field +// selections because that can be done before the closure is +// constructed. +// +// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) +// +func makeBound(prog *Program, obj *types.Func) *Function { + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + fn, ok := prog.bounds[obj] + if !ok { + description := fmt.Sprintf("bound method wrapper for %s", obj) + if prog.mode&LogSource != 0 { + defer logStack("%s", description)() + } + fn = &Function{ + name: obj.Name() + "$bound", + object: obj, + Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver + Synthetic: description, + Prog: prog, + pos: obj.Pos(), + } + + fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn} + fn.FreeVars = []*FreeVar{fv} + fn.startBody() + createParams(fn, 0) + var c Call + + if !isInterface(recvType(obj)) { // concrete + c.Call.Value = prog.declaredFunc(obj) + c.Call.Args = []Value{fv} + } else { + c.Call.Value = fv + c.Call.Method = obj + } + for _, arg := range fn.Params { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c) + fn.finishBody() + + prog.bounds[obj] = fn + } + return fn +} + +// -- thunks ----------------------------------------------------------- + +// makeThunk returns a thunk, a synthetic function that delegates to a +// concrete or interface method denoted by sel.Obj(). The resulting +// function has no receiver, but has an additional (first) regular +// parameter. +// +// Precondition: sel.Kind() == types.MethodExpr. +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// f := T.meth +// var t T +// f(t) // calls t.meth() +// +// f is a synthetic wrapper defined as if by: +// +// f := func(t T) { return t.meth() } +// +// TODO(adonovan): opt: currently the stub is created even when used +// directly in a function call: C.f(i, 0). This is less efficient +// than inlining the stub. +// +// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) +// +func makeThunk(prog *Program, sel *types.Selection) *Function { + if sel.Kind() != types.MethodExpr { + panic(sel) + } + + key := selectionKey{ + kind: sel.Kind(), + recv: sel.Recv(), + obj: sel.Obj(), + index: fmt.Sprint(sel.Index()), + indirect: sel.Indirect(), + } + + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + // Canonicalize key.recv to avoid constructing duplicate thunks. + canonRecv, ok := prog.canon.At(key.recv).(types.Type) + if !ok { + canonRecv = key.recv + prog.canon.Set(key.recv, canonRecv) + } + key.recv = canonRecv + + fn, ok := prog.thunks[key] + if !ok { + fn = makeWrapper(prog, sel) + if fn.Signature.Recv() != nil { + panic(fn) // unexpected receiver + } + prog.thunks[key] = fn + } + return fn +} + +func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { + return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic()) +} + +// selectionKey is like types.Selection but a usable map key. +type selectionKey struct { + kind types.SelectionKind + recv types.Type // canonicalized via Program.canon + obj types.Object + index string + indirect bool +} diff --git a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go new file mode 100644 index 000000000..cffd7acbe --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go @@ -0,0 +1,524 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package objectpath defines a naming scheme for types.Objects +// (that is, named entities in Go programs) relative to their enclosing +// package. +// +// Type-checker objects are canonical, so they are usually identified by +// their address in memory (a pointer), but a pointer has meaning only +// within one address space. By contrast, objectpath names allow the +// identity of an object to be sent from one program to another, +// establishing a correspondence between types.Object variables that are +// distinct but logically equivalent. +// +// A single object may have multiple paths. In this example, +// type A struct{ X int } +// type B A +// the field X has two paths due to its membership of both A and B. +// The For(obj) function always returns one of these paths, arbitrarily +// but consistently. +package objectpath + +import ( + "fmt" + "strconv" + "strings" + + "go/types" +) + +// A Path is an opaque name that identifies a types.Object +// relative to its package. Conceptually, the name consists of a +// sequence of destructuring operations applied to the package scope +// to obtain the original object. +// The name does not include the package itself. +type Path string + +// Encoding +// +// An object path is a textual and (with training) human-readable encoding +// of a sequence of destructuring operators, starting from a types.Package. +// The sequences represent a path through the package/object/type graph. +// We classify these operators by their type: +// +// PO package->object Package.Scope.Lookup +// OT object->type Object.Type +// TT type->type Type.{Elem,Key,Params,Results,Underlying} [EKPRU] +// TO type->object Type.{At,Field,Method,Obj} [AFMO] +// +// All valid paths start with a package and end at an object +// and thus may be defined by the regular language: +// +// objectpath = PO (OT TT* TO)* +// +// The concrete encoding follows directly: +// - The only PO operator is Package.Scope.Lookup, which requires an identifier. +// - The only OT operator is Object.Type, +// which we encode as '.' because dot cannot appear in an identifier. +// - The TT operators are encoded as [EKPRU]. +// - The OT operators are encoded as [AFMO]; +// three of these (At,Field,Method) require an integer operand, +// which is encoded as a string of decimal digits. +// These indices are stable across different representations +// of the same package, even source and export data. +// +// In the example below, +// +// package p +// +// type T interface { +// f() (a string, b struct{ X int }) +// } +// +// field X has the path "T.UM0.RA1.F0", +// representing the following sequence of operations: +// +// p.Lookup("T") T +// .Type().Underlying().Method(0). f +// .Type().Results().At(1) b +// .Type().Field(0) X +// +// The encoding is not maximally compact---every R or P is +// followed by an A, for example---but this simplifies the +// encoder and decoder. +// +const ( + // object->type operators + opType = '.' // .Type() (Object) + + // type->type operators + opElem = 'E' // .Elem() (Pointer, Slice, Array, Chan, Map) + opKey = 'K' // .Key() (Map) + opParams = 'P' // .Params() (Signature) + opResults = 'R' // .Results() (Signature) + opUnderlying = 'U' // .Underlying() (Named) + + // type->object operators + opAt = 'A' // .At(i) (Tuple) + opField = 'F' // .Field(i) (Struct) + opMethod = 'M' // .Method(i) (Named or Interface; not Struct: "promoted" names are ignored) + opObj = 'O' // .Obj() (Named) +) + +// The For function returns the path to an object relative to its package, +// or an error if the object is not accessible from the package's Scope. +// +// The For function guarantees to return a path only for the following objects: +// - package-level types +// - exported package-level non-types +// - methods +// - parameter and result variables +// - struct fields +// These objects are sufficient to define the API of their package. +// The objects described by a package's export data are drawn from this set. +// +// For does not return a path for predeclared names, imported package +// names, local names, and unexported package-level names (except +// types). +// +// Example: given this definition, +// +// package p +// +// type T interface { +// f() (a string, b struct{ X int }) +// } +// +// For(X) would return a path that denotes the following sequence of operations: +// +// p.Scope().Lookup("T") (TypeName T) +// .Type().Underlying().Method(0). (method Func f) +// .Type().Results().At(1) (field Var b) +// .Type().Field(0) (field Var X) +// +// where p is the package (*types.Package) to which X belongs. +func For(obj types.Object) (Path, error) { + pkg := obj.Pkg() + + // This table lists the cases of interest. + // + // Object Action + // ------ ------ + // nil reject + // builtin reject + // pkgname reject + // label reject + // var + // package-level accept + // func param/result accept + // local reject + // struct field accept + // const + // package-level accept + // local reject + // func + // package-level accept + // init functions reject + // concrete method accept + // interface method accept + // type + // package-level accept + // local reject + // + // The only accessible package-level objects are members of pkg itself. + // + // The cases are handled in four steps: + // + // 1. reject nil and builtin + // 2. accept package-level objects + // 3. reject obviously invalid objects + // 4. search the API for the path to the param/result/field/method. + + // 1. reference to nil or builtin? + if pkg == nil { + return "", fmt.Errorf("predeclared %s has no path", obj) + } + scope := pkg.Scope() + + // 2. package-level object? + if scope.Lookup(obj.Name()) == obj { + // Only exported objects (and non-exported types) have a path. + // Non-exported types may be referenced by other objects. + if _, ok := obj.(*types.TypeName); !ok && !obj.Exported() { + return "", fmt.Errorf("no path for non-exported %v", obj) + } + return Path(obj.Name()), nil + } + + // 3. Not a package-level object. + // Reject obviously non-viable cases. + switch obj := obj.(type) { + case *types.Const, // Only package-level constants have a path. + *types.TypeName, // Only package-level types have a path. + *types.Label, // Labels are function-local. + *types.PkgName: // PkgNames are file-local. + return "", fmt.Errorf("no path for %v", obj) + + case *types.Var: + // Could be: + // - a field (obj.IsField()) + // - a func parameter or result + // - a local var. + // Sadly there is no way to distinguish + // a param/result from a local + // so we must proceed to the find. + + case *types.Func: + // A func, if not package-level, must be a method. + if recv := obj.Type().(*types.Signature).Recv(); recv == nil { + return "", fmt.Errorf("func is not a method: %v", obj) + } + // TODO(adonovan): opt: if the method is concrete, + // do a specialized version of the rest of this function so + // that it's O(1) not O(|scope|). Basically 'find' is needed + // only for struct fields and interface methods. + + default: + panic(obj) + } + + // 4. Search the API for the path to the var (field/param/result) or method. + + // First inspect package-level named types. + // In the presence of path aliases, these give + // the best paths because non-types may + // refer to types, but not the reverse. + empty := make([]byte, 0, 48) // initial space + names := scope.Names() + for _, name := range names { + o := scope.Lookup(name) + tname, ok := o.(*types.TypeName) + if !ok { + continue // handle non-types in second pass + } + + path := append(empty, name...) + path = append(path, opType) + + T := o.Type() + + if tname.IsAlias() { + // type alias + if r := find(obj, T, path); r != nil { + return Path(r), nil + } + } else { + // defined (named) type + if r := find(obj, T.Underlying(), append(path, opUnderlying)); r != nil { + return Path(r), nil + } + } + } + + // Then inspect everything else: + // non-types, and declared methods of defined types. + for _, name := range names { + o := scope.Lookup(name) + path := append(empty, name...) + if _, ok := o.(*types.TypeName); !ok { + if o.Exported() { + // exported non-type (const, var, func) + if r := find(obj, o.Type(), append(path, opType)); r != nil { + return Path(r), nil + } + } + continue + } + + // Inspect declared methods of defined types. + if T, ok := o.Type().(*types.Named); ok { + path = append(path, opType) + for i := 0; i < T.NumMethods(); i++ { + m := T.Method(i) + path2 := appendOpArg(path, opMethod, i) + if m == obj { + return Path(path2), nil // found declared method + } + if r := find(obj, m.Type(), append(path2, opType)); r != nil { + return Path(r), nil + } + } + } + } + + return "", fmt.Errorf("can't find path for %v in %s", obj, pkg.Path()) +} + +func appendOpArg(path []byte, op byte, arg int) []byte { + path = append(path, op) + path = strconv.AppendInt(path, int64(arg), 10) + return path +} + +// find finds obj within type T, returning the path to it, or nil if not found. +func find(obj types.Object, T types.Type, path []byte) []byte { + switch T := T.(type) { + case *types.Basic, *types.Named: + // Named types belonging to pkg were handled already, + // so T must belong to another package. No path. + return nil + case *types.Pointer: + return find(obj, T.Elem(), append(path, opElem)) + case *types.Slice: + return find(obj, T.Elem(), append(path, opElem)) + case *types.Array: + return find(obj, T.Elem(), append(path, opElem)) + case *types.Chan: + return find(obj, T.Elem(), append(path, opElem)) + case *types.Map: + if r := find(obj, T.Key(), append(path, opKey)); r != nil { + return r + } + return find(obj, T.Elem(), append(path, opElem)) + case *types.Signature: + if r := find(obj, T.Params(), append(path, opParams)); r != nil { + return r + } + return find(obj, T.Results(), append(path, opResults)) + case *types.Struct: + for i := 0; i < T.NumFields(); i++ { + f := T.Field(i) + path2 := appendOpArg(path, opField, i) + if f == obj { + return path2 // found field var + } + if r := find(obj, f.Type(), append(path2, opType)); r != nil { + return r + } + } + return nil + case *types.Tuple: + for i := 0; i < T.Len(); i++ { + v := T.At(i) + path2 := appendOpArg(path, opAt, i) + if v == obj { + return path2 // found param/result var + } + if r := find(obj, v.Type(), append(path2, opType)); r != nil { + return r + } + } + return nil + case *types.Interface: + for i := 0; i < T.NumMethods(); i++ { + m := T.Method(i) + path2 := appendOpArg(path, opMethod, i) + if m == obj { + return path2 // found interface method + } + if r := find(obj, m.Type(), append(path2, opType)); r != nil { + return r + } + } + return nil + } + panic(T) +} + +// Object returns the object denoted by path p within the package pkg. +func Object(pkg *types.Package, p Path) (types.Object, error) { + if p == "" { + return nil, fmt.Errorf("empty path") + } + + pathstr := string(p) + var pkgobj, suffix string + if dot := strings.IndexByte(pathstr, opType); dot < 0 { + pkgobj = pathstr + } else { + pkgobj = pathstr[:dot] + suffix = pathstr[dot:] // suffix starts with "." + } + + obj := pkg.Scope().Lookup(pkgobj) + if obj == nil { + return nil, fmt.Errorf("package %s does not contain %q", pkg.Path(), pkgobj) + } + + // abstraction of *types.{Pointer,Slice,Array,Chan,Map} + type hasElem interface { + Elem() types.Type + } + // abstraction of *types.{Interface,Named} + type hasMethods interface { + Method(int) *types.Func + NumMethods() int + } + + // The loop state is the pair (t, obj), + // exactly one of which is non-nil, initially obj. + // All suffixes start with '.' (the only object->type operation), + // followed by optional type->type operations, + // then a type->object operation. + // The cycle then repeats. + var t types.Type + for suffix != "" { + code := suffix[0] + suffix = suffix[1:] + + // Codes [AFM] have an integer operand. + var index int + switch code { + case opAt, opField, opMethod: + rest := strings.TrimLeft(suffix, "0123456789") + numerals := suffix[:len(suffix)-len(rest)] + suffix = rest + i, err := strconv.Atoi(numerals) + if err != nil { + return nil, fmt.Errorf("invalid path: bad numeric operand %q for code %q", numerals, code) + } + index = int(i) + case opObj: + // no operand + default: + // The suffix must end with a type->object operation. + if suffix == "" { + return nil, fmt.Errorf("invalid path: ends with %q, want [AFMO]", code) + } + } + + if code == opType { + if t != nil { + return nil, fmt.Errorf("invalid path: unexpected %q in type context", opType) + } + t = obj.Type() + obj = nil + continue + } + + if t == nil { + return nil, fmt.Errorf("invalid path: code %q in object context", code) + } + + // Inv: t != nil, obj == nil + + switch code { + case opElem: + hasElem, ok := t.(hasElem) // Pointer, Slice, Array, Chan, Map + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want pointer, slice, array, chan or map)", code, t, t) + } + t = hasElem.Elem() + + case opKey: + mapType, ok := t.(*types.Map) + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want map)", code, t, t) + } + t = mapType.Key() + + case opParams: + sig, ok := t.(*types.Signature) + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t) + } + t = sig.Params() + + case opResults: + sig, ok := t.(*types.Signature) + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want signature)", code, t, t) + } + t = sig.Results() + + case opUnderlying: + named, ok := t.(*types.Named) + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t) + } + t = named.Underlying() + + case opAt: + tuple, ok := t.(*types.Tuple) + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %s, want tuple)", code, t, t) + } + if n := tuple.Len(); index >= n { + return nil, fmt.Errorf("tuple index %d out of range [0-%d)", index, n) + } + obj = tuple.At(index) + t = nil + + case opField: + structType, ok := t.(*types.Struct) + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %T, want struct)", code, t, t) + } + if n := structType.NumFields(); index >= n { + return nil, fmt.Errorf("field index %d out of range [0-%d)", index, n) + } + obj = structType.Field(index) + t = nil + + case opMethod: + hasMethods, ok := t.(hasMethods) // Interface or Named + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %s, want interface or named)", code, t, t) + } + if n := hasMethods.NumMethods(); index >= n { + return nil, fmt.Errorf("method index %d out of range [0-%d)", index, n) + } + obj = hasMethods.Method(index) + t = nil + + case opObj: + named, ok := t.(*types.Named) + if !ok { + return nil, fmt.Errorf("cannot apply %q to %s (got %s, want named)", code, t, t) + } + obj = named.Obj() + t = nil + + default: + return nil, fmt.Errorf("invalid path: unknown code %q", code) + } + } + + if obj.Pkg() != pkg { + return nil, fmt.Errorf("path denotes %s, which belongs to a different package", obj) + } + + return obj, nil // success +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/callee.go b/vendor/golang.org/x/tools/go/types/typeutil/callee.go new file mode 100644 index 000000000..38f596daf --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/typeutil/callee.go @@ -0,0 +1,46 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/ast/astutil" +) + +// Callee returns the named target of a function call, if any: +// a function, method, builtin, or variable. +func Callee(info *types.Info, call *ast.CallExpr) types.Object { + var obj types.Object + switch fun := astutil.Unparen(call.Fun).(type) { + case *ast.Ident: + obj = info.Uses[fun] // type, var, builtin, or declared func + case *ast.SelectorExpr: + if sel, ok := info.Selections[fun]; ok { + obj = sel.Obj() // method or field + } else { + obj = info.Uses[fun.Sel] // qualified identifier? + } + } + if _, ok := obj.(*types.TypeName); ok { + return nil // T(x) is a conversion, not a call + } + return obj +} + +// StaticCallee returns the target (function or method) of a static +// function call, if any. It returns nil for calls to builtins. +func StaticCallee(info *types.Info, call *ast.CallExpr) *types.Func { + if f, ok := Callee(info, call).(*types.Func); ok && !interfaceMethod(f) { + return f + } + return nil +} + +func interfaceMethod(f *types.Func) bool { + recv := f.Type().(*types.Signature).Recv() + return recv != nil && types.IsInterface(recv.Type()) +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/imports.go b/vendor/golang.org/x/tools/go/types/typeutil/imports.go new file mode 100644 index 000000000..9c441dba9 --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/typeutil/imports.go @@ -0,0 +1,31 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +import "go/types" + +// Dependencies returns all dependencies of the specified packages. +// +// Dependent packages appear in topological order: if package P imports +// package Q, Q appears earlier than P in the result. +// The algorithm follows import statements in the order they +// appear in the source code, so the result is a total order. +// +func Dependencies(pkgs ...*types.Package) []*types.Package { + var result []*types.Package + seen := make(map[*types.Package]bool) + var visit func(pkgs []*types.Package) + visit = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !seen[p] { + seen[p] = true + visit(p.Imports()) + result = append(result, p) + } + } + } + visit(pkgs) + return result +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go new file mode 100644 index 000000000..c7f754500 --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go @@ -0,0 +1,313 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeutil defines various utilities for types, such as Map, +// a mapping from types.Type to interface{} values. +package typeutil // import "golang.org/x/tools/go/types/typeutil" + +import ( + "bytes" + "fmt" + "go/types" + "reflect" +) + +// Map is a hash-table-based mapping from types (types.Type) to +// arbitrary interface{} values. The concrete types that implement +// the Type interface are pointers. Since they are not canonicalized, +// == cannot be used to check for equivalence, and thus we cannot +// simply use a Go map. +// +// Just as with map[K]V, a nil *Map is a valid empty map. +// +// Not thread-safe. +// +type Map struct { + hasher Hasher // shared by many Maps + table map[uint32][]entry // maps hash to bucket; entry.key==nil means unused + length int // number of map entries +} + +// entry is an entry (key/value association) in a hash bucket. +type entry struct { + key types.Type + value interface{} +} + +// SetHasher sets the hasher used by Map. +// +// All Hashers are functionally equivalent but contain internal state +// used to cache the results of hashing previously seen types. +// +// A single Hasher created by MakeHasher() may be shared among many +// Maps. This is recommended if the instances have many keys in +// common, as it will amortize the cost of hash computation. +// +// A Hasher may grow without bound as new types are seen. Even when a +// type is deleted from the map, the Hasher never shrinks, since other +// types in the map may reference the deleted type indirectly. +// +// Hashers are not thread-safe, and read-only operations such as +// Map.Lookup require updates to the hasher, so a full Mutex lock (not a +// read-lock) is require around all Map operations if a shared +// hasher is accessed from multiple threads. +// +// If SetHasher is not called, the Map will create a private hasher at +// the first call to Insert. +// +func (m *Map) SetHasher(hasher Hasher) { + m.hasher = hasher +} + +// Delete removes the entry with the given key, if any. +// It returns true if the entry was found. +// +func (m *Map) Delete(key types.Type) bool { + if m != nil && m.table != nil { + hash := m.hasher.Hash(key) + bucket := m.table[hash] + for i, e := range bucket { + if e.key != nil && types.Identical(key, e.key) { + // We can't compact the bucket as it + // would disturb iterators. + bucket[i] = entry{} + m.length-- + return true + } + } + } + return false +} + +// At returns the map entry for the given key. +// The result is nil if the entry is not present. +// +func (m *Map) At(key types.Type) interface{} { + if m != nil && m.table != nil { + for _, e := range m.table[m.hasher.Hash(key)] { + if e.key != nil && types.Identical(key, e.key) { + return e.value + } + } + } + return nil +} + +// Set sets the map entry for key to val, +// and returns the previous entry, if any. +func (m *Map) Set(key types.Type, value interface{}) (prev interface{}) { + if m.table != nil { + hash := m.hasher.Hash(key) + bucket := m.table[hash] + var hole *entry + for i, e := range bucket { + if e.key == nil { + hole = &bucket[i] + } else if types.Identical(key, e.key) { + prev = e.value + bucket[i].value = value + return + } + } + + if hole != nil { + *hole = entry{key, value} // overwrite deleted entry + } else { + m.table[hash] = append(bucket, entry{key, value}) + } + } else { + if m.hasher.memo == nil { + m.hasher = MakeHasher() + } + hash := m.hasher.Hash(key) + m.table = map[uint32][]entry{hash: {entry{key, value}}} + } + + m.length++ + return +} + +// Len returns the number of map entries. +func (m *Map) Len() int { + if m != nil { + return m.length + } + return 0 +} + +// Iterate calls function f on each entry in the map in unspecified order. +// +// If f should mutate the map, Iterate provides the same guarantees as +// Go maps: if f deletes a map entry that Iterate has not yet reached, +// f will not be invoked for it, but if f inserts a map entry that +// Iterate has not yet reached, whether or not f will be invoked for +// it is unspecified. +// +func (m *Map) Iterate(f func(key types.Type, value interface{})) { + if m != nil { + for _, bucket := range m.table { + for _, e := range bucket { + if e.key != nil { + f(e.key, e.value) + } + } + } + } +} + +// Keys returns a new slice containing the set of map keys. +// The order is unspecified. +func (m *Map) Keys() []types.Type { + keys := make([]types.Type, 0, m.Len()) + m.Iterate(func(key types.Type, _ interface{}) { + keys = append(keys, key) + }) + return keys +} + +func (m *Map) toString(values bool) string { + if m == nil { + return "{}" + } + var buf bytes.Buffer + fmt.Fprint(&buf, "{") + sep := "" + m.Iterate(func(key types.Type, value interface{}) { + fmt.Fprint(&buf, sep) + sep = ", " + fmt.Fprint(&buf, key) + if values { + fmt.Fprintf(&buf, ": %q", value) + } + }) + fmt.Fprint(&buf, "}") + return buf.String() +} + +// String returns a string representation of the map's entries. +// Values are printed using fmt.Sprintf("%v", v). +// Order is unspecified. +// +func (m *Map) String() string { + return m.toString(true) +} + +// KeysString returns a string representation of the map's key set. +// Order is unspecified. +// +func (m *Map) KeysString() string { + return m.toString(false) +} + +//////////////////////////////////////////////////////////////////////// +// Hasher + +// A Hasher maps each type to its hash value. +// For efficiency, a hasher uses memoization; thus its memory +// footprint grows monotonically over time. +// Hashers are not thread-safe. +// Hashers have reference semantics. +// Call MakeHasher to create a Hasher. +type Hasher struct { + memo map[types.Type]uint32 +} + +// MakeHasher returns a new Hasher instance. +func MakeHasher() Hasher { + return Hasher{make(map[types.Type]uint32)} +} + +// Hash computes a hash value for the given type t such that +// Identical(t, t') => Hash(t) == Hash(t'). +func (h Hasher) Hash(t types.Type) uint32 { + hash, ok := h.memo[t] + if !ok { + hash = h.hashFor(t) + h.memo[t] = hash + } + return hash +} + +// hashString computes the Fowler–Noll–Vo hash of s. +func hashString(s string) uint32 { + var h uint32 + for i := 0; i < len(s); i++ { + h ^= uint32(s[i]) + h *= 16777619 + } + return h +} + +// hashFor computes the hash of t. +func (h Hasher) hashFor(t types.Type) uint32 { + // See Identical for rationale. + switch t := t.(type) { + case *types.Basic: + return uint32(t.Kind()) + + case *types.Array: + return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) + + case *types.Slice: + return 9049 + 2*h.Hash(t.Elem()) + + case *types.Struct: + var hash uint32 = 9059 + for i, n := 0, t.NumFields(); i < n; i++ { + f := t.Field(i) + if f.Anonymous() { + hash += 8861 + } + hash += hashString(t.Tag(i)) + hash += hashString(f.Name()) // (ignore f.Pkg) + hash += h.Hash(f.Type()) + } + return hash + + case *types.Pointer: + return 9067 + 2*h.Hash(t.Elem()) + + case *types.Signature: + var hash uint32 = 9091 + if t.Variadic() { + hash *= 8863 + } + return hash + 3*h.hashTuple(t.Params()) + 5*h.hashTuple(t.Results()) + + case *types.Interface: + var hash uint32 = 9103 + for i, n := 0, t.NumMethods(); i < n; i++ { + // See go/types.identicalMethods for rationale. + // Method order is not significant. + // Ignore m.Pkg(). + m := t.Method(i) + hash += 3*hashString(m.Name()) + 5*h.Hash(m.Type()) + } + return hash + + case *types.Map: + return 9109 + 2*h.Hash(t.Key()) + 3*h.Hash(t.Elem()) + + case *types.Chan: + return 9127 + 2*uint32(t.Dir()) + 3*h.Hash(t.Elem()) + + case *types.Named: + // Not safe with a copying GC; objects may move. + return uint32(reflect.ValueOf(t.Obj()).Pointer()) + + case *types.Tuple: + return h.hashTuple(t) + } + panic(t) +} + +func (h Hasher) hashTuple(tuple *types.Tuple) uint32 { + // See go/types.identicalTypes for rationale. + n := tuple.Len() + var hash uint32 = 9137 + 2*uint32(n) + for i := 0; i < n; i++ { + hash += 3 * h.Hash(tuple.At(i).Type()) + } + return hash +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go new file mode 100644 index 000000000..32084610f --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go @@ -0,0 +1,72 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file implements a cache of method sets. + +package typeutil + +import ( + "go/types" + "sync" +) + +// A MethodSetCache records the method set of each type T for which +// MethodSet(T) is called so that repeat queries are fast. +// The zero value is a ready-to-use cache instance. +type MethodSetCache struct { + mu sync.Mutex + named map[*types.Named]struct{ value, pointer *types.MethodSet } // method sets for named N and *N + others map[types.Type]*types.MethodSet // all other types +} + +// MethodSet returns the method set of type T. It is thread-safe. +// +// If cache is nil, this function is equivalent to types.NewMethodSet(T). +// Utility functions can thus expose an optional *MethodSetCache +// parameter to clients that care about performance. +// +func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet { + if cache == nil { + return types.NewMethodSet(T) + } + cache.mu.Lock() + defer cache.mu.Unlock() + + switch T := T.(type) { + case *types.Named: + return cache.lookupNamed(T).value + + case *types.Pointer: + if N, ok := T.Elem().(*types.Named); ok { + return cache.lookupNamed(N).pointer + } + } + + // all other types + // (The map uses pointer equivalence, not type identity.) + mset := cache.others[T] + if mset == nil { + mset = types.NewMethodSet(T) + if cache.others == nil { + cache.others = make(map[types.Type]*types.MethodSet) + } + cache.others[T] = mset + } + return mset +} + +func (cache *MethodSetCache) lookupNamed(named *types.Named) struct{ value, pointer *types.MethodSet } { + if cache.named == nil { + cache.named = make(map[*types.Named]struct{ value, pointer *types.MethodSet }) + } + // Avoid recomputing mset(*T) for each distinct Pointer + // instance whose underlying type is a named type. + msets, ok := cache.named[named] + if !ok { + msets.value = types.NewMethodSet(named) + msets.pointer = types.NewMethodSet(types.NewPointer(named)) + cache.named[named] = msets + } + return msets +} diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/vendor/golang.org/x/tools/go/types/typeutil/ui.go new file mode 100644 index 000000000..9849c24ce --- /dev/null +++ b/vendor/golang.org/x/tools/go/types/typeutil/ui.go @@ -0,0 +1,52 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typeutil + +// This file defines utilities for user interfaces that display types. + +import "go/types" + +// IntuitiveMethodSet returns the intuitive method set of a type T, +// which is the set of methods you can call on an addressable value of +// that type. +// +// The result always contains MethodSet(T), and is exactly MethodSet(T) +// for interface types and for pointer-to-concrete types. +// For all other concrete types T, the result additionally +// contains each method belonging to *T if there is no identically +// named method on T itself. +// +// This corresponds to user intuition about method sets; +// this function is intended only for user interfaces. +// +// The order of the result is as for types.MethodSet(T). +// +func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection { + isPointerToConcrete := func(T types.Type) bool { + ptr, ok := T.(*types.Pointer) + return ok && !types.IsInterface(ptr.Elem()) + } + + var result []*types.Selection + mset := msets.MethodSet(T) + if types.IsInterface(T) || isPointerToConcrete(T) { + for i, n := 0, mset.Len(); i < n; i++ { + result = append(result, mset.At(i)) + } + } else { + // T is some other concrete type. + // Report methods of T and *T, preferring those of T. + pmset := msets.MethodSet(types.NewPointer(T)) + for i, n := 0, pmset.Len(); i < n; i++ { + meth := pmset.At(i) + if m := mset.Lookup(meth.Obj().Pkg(), meth.Obj().Name()); m != nil { + meth = m + } + result = append(result, meth) + } + + } + return result +} diff --git a/vendor/golang.org/x/tools/imports/forward.go b/vendor/golang.org/x/tools/imports/forward.go new file mode 100644 index 000000000..8be18a66b --- /dev/null +++ b/vendor/golang.org/x/tools/imports/forward.go @@ -0,0 +1,77 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package imports implements a Go pretty-printer (like package "go/format") +// that also adds or removes import statements as necessary. +package imports // import "golang.org/x/tools/imports" + +import ( + "io/ioutil" + "log" + + "golang.org/x/tools/internal/gocommand" + intimp "golang.org/x/tools/internal/imports" +) + +// Options specifies options for processing files. +type Options struct { + Fragment bool // Accept fragment of a source file (no package statement) + AllErrors bool // Report all errors (not just the first 10 on different lines) + + Comments bool // Print comments (true if nil *Options provided) + TabIndent bool // Use tabs for indent (true if nil *Options provided) + TabWidth int // Tab width (8 if nil *Options provided) + + FormatOnly bool // Disable the insertion and deletion of imports +} + +// Debug controls verbose logging. +var Debug = false + +// LocalPrefix is a comma-separated string of import path prefixes, which, if +// set, instructs Process to sort the import paths with the given prefixes +// into another group after 3rd-party packages. +var LocalPrefix string + +// Process formats and adjusts imports for the provided file. +// If opt is nil the defaults are used, and if src is nil the source +// is read from the filesystem. +// +// Note that filename's directory influences which imports can be chosen, +// so it is important that filename be accurate. +// To process data ``as if'' it were in filename, pass the data as a non-nil src. +func Process(filename string, src []byte, opt *Options) ([]byte, error) { + var err error + if src == nil { + src, err = ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + } + if opt == nil { + opt = &Options{Comments: true, TabIndent: true, TabWidth: 8} + } + intopt := &intimp.Options{ + Env: &intimp.ProcessEnv{ + GocmdRunner: &gocommand.Runner{}, + }, + LocalPrefix: LocalPrefix, + AllErrors: opt.AllErrors, + Comments: opt.Comments, + FormatOnly: opt.FormatOnly, + Fragment: opt.Fragment, + TabIndent: opt.TabIndent, + TabWidth: opt.TabWidth, + } + if Debug { + intopt.Env.Logf = log.Printf + } + return intimp.Process(filename, src, intopt) +} + +// VendorlessPath returns the devendorized version of the import path ipath. +// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b". +func VendorlessPath(ipath string) string { + return intimp.VendorlessPath(ipath) +} diff --git a/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go b/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go new file mode 100644 index 000000000..01f6e829f --- /dev/null +++ b/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go @@ -0,0 +1,425 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package analysisinternal exposes internal-only fields from go/analysis. +package analysisinternal + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "go/types" + "strings" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/internal/lsp/fuzzy" +) + +var ( + GetTypeErrors func(p interface{}) []types.Error + SetTypeErrors func(p interface{}, errors []types.Error) +) + +func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos { + // Get the end position for the type error. + offset, end := fset.PositionFor(start, false).Offset, start + if offset >= len(src) { + return end + } + if width := bytes.IndexAny(src[offset:], " \n,():;[]+-*"); width > 0 { + end = start + token.Pos(width) + } + return end +} + +func ZeroValue(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { + under := typ + if n, ok := typ.(*types.Named); ok { + under = n.Underlying() + } + switch u := under.(type) { + case *types.Basic: + switch { + case u.Info()&types.IsNumeric != 0: + return &ast.BasicLit{Kind: token.INT, Value: "0"} + case u.Info()&types.IsBoolean != 0: + return &ast.Ident{Name: "false"} + case u.Info()&types.IsString != 0: + return &ast.BasicLit{Kind: token.STRING, Value: `""`} + default: + panic("unknown basic type") + } + case *types.Chan, *types.Interface, *types.Map, *types.Pointer, *types.Signature, *types.Slice, *types.Array: + return ast.NewIdent("nil") + case *types.Struct: + texpr := TypeExpr(fset, f, pkg, typ) // typ because we want the name here. + if texpr == nil { + return nil + } + return &ast.CompositeLit{ + Type: texpr, + } + } + return nil +} + +// IsZeroValue checks whether the given expression is a 'zero value' (as determined by output of +// analysisinternal.ZeroValue) +func IsZeroValue(expr ast.Expr) bool { + switch e := expr.(type) { + case *ast.BasicLit: + return e.Value == "0" || e.Value == `""` + case *ast.Ident: + return e.Name == "nil" || e.Name == "false" + default: + return false + } +} + +func TypeExpr(fset *token.FileSet, f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { + switch t := typ.(type) { + case *types.Basic: + switch t.Kind() { + case types.UnsafePointer: + return &ast.SelectorExpr{X: ast.NewIdent("unsafe"), Sel: ast.NewIdent("Pointer")} + default: + return ast.NewIdent(t.Name()) + } + case *types.Pointer: + x := TypeExpr(fset, f, pkg, t.Elem()) + if x == nil { + return nil + } + return &ast.UnaryExpr{ + Op: token.MUL, + X: x, + } + case *types.Array: + elt := TypeExpr(fset, f, pkg, t.Elem()) + if elt == nil { + return nil + } + return &ast.ArrayType{ + Len: &ast.BasicLit{ + Kind: token.INT, + Value: fmt.Sprintf("%d", t.Len()), + }, + Elt: elt, + } + case *types.Slice: + elt := TypeExpr(fset, f, pkg, t.Elem()) + if elt == nil { + return nil + } + return &ast.ArrayType{ + Elt: elt, + } + case *types.Map: + key := TypeExpr(fset, f, pkg, t.Key()) + value := TypeExpr(fset, f, pkg, t.Elem()) + if key == nil || value == nil { + return nil + } + return &ast.MapType{ + Key: key, + Value: value, + } + case *types.Chan: + dir := ast.ChanDir(t.Dir()) + if t.Dir() == types.SendRecv { + dir = ast.SEND | ast.RECV + } + value := TypeExpr(fset, f, pkg, t.Elem()) + if value == nil { + return nil + } + return &ast.ChanType{ + Dir: dir, + Value: value, + } + case *types.Signature: + var params []*ast.Field + for i := 0; i < t.Params().Len(); i++ { + p := TypeExpr(fset, f, pkg, t.Params().At(i).Type()) + if p == nil { + return nil + } + params = append(params, &ast.Field{ + Type: p, + Names: []*ast.Ident{ + { + Name: t.Params().At(i).Name(), + }, + }, + }) + } + var returns []*ast.Field + for i := 0; i < t.Results().Len(); i++ { + r := TypeExpr(fset, f, pkg, t.Results().At(i).Type()) + if r == nil { + return nil + } + returns = append(returns, &ast.Field{ + Type: r, + }) + } + return &ast.FuncType{ + Params: &ast.FieldList{ + List: params, + }, + Results: &ast.FieldList{ + List: returns, + }, + } + case *types.Named: + if t.Obj().Pkg() == nil { + return ast.NewIdent(t.Obj().Name()) + } + if t.Obj().Pkg() == pkg { + return ast.NewIdent(t.Obj().Name()) + } + pkgName := t.Obj().Pkg().Name() + // If the file already imports the package under another name, use that. + for _, group := range astutil.Imports(fset, f) { + for _, cand := range group { + if strings.Trim(cand.Path.Value, `"`) == t.Obj().Pkg().Path() { + if cand.Name != nil && cand.Name.Name != "" { + pkgName = cand.Name.Name + } + } + } + } + if pkgName == "." { + return ast.NewIdent(t.Obj().Name()) + } + return &ast.SelectorExpr{ + X: ast.NewIdent(pkgName), + Sel: ast.NewIdent(t.Obj().Name()), + } + case *types.Struct: + return ast.NewIdent(t.String()) + case *types.Interface: + return ast.NewIdent(t.String()) + default: + return nil + } +} + +type TypeErrorPass string + +const ( + NoNewVars TypeErrorPass = "nonewvars" + NoResultValues TypeErrorPass = "noresultvalues" + UndeclaredName TypeErrorPass = "undeclaredname" +) + +// StmtToInsertVarBefore returns the ast.Stmt before which we can safely insert a new variable. +// Some examples: +// +// Basic Example: +// z := 1 +// y := z + x +// If x is undeclared, then this function would return `y := z + x`, so that we +// can insert `x := ` on the line before `y := z + x`. +// +// If stmt example: +// if z == 1 { +// } else if z == y {} +// If y is undeclared, then this function would return `if z == 1 {`, because we cannot +// insert a statement between an if and an else if statement. As a result, we need to find +// the top of the if chain to insert `y := ` before. +func StmtToInsertVarBefore(path []ast.Node) ast.Stmt { + enclosingIndex := -1 + for i, p := range path { + if _, ok := p.(ast.Stmt); ok { + enclosingIndex = i + break + } + } + if enclosingIndex == -1 { + return nil + } + enclosingStmt := path[enclosingIndex] + switch enclosingStmt.(type) { + case *ast.IfStmt: + // The enclosingStmt is inside of the if declaration, + // We need to check if we are in an else-if stmt and + // get the base if statement. + return baseIfStmt(path, enclosingIndex) + case *ast.CaseClause: + // Get the enclosing switch stmt if the enclosingStmt is + // inside of the case statement. + for i := enclosingIndex + 1; i < len(path); i++ { + if node, ok := path[i].(*ast.SwitchStmt); ok { + return node + } else if node, ok := path[i].(*ast.TypeSwitchStmt); ok { + return node + } + } + } + if len(path) <= enclosingIndex+1 { + return enclosingStmt.(ast.Stmt) + } + // Check if the enclosing statement is inside another node. + switch expr := path[enclosingIndex+1].(type) { + case *ast.IfStmt: + // Get the base if statement. + return baseIfStmt(path, enclosingIndex+1) + case *ast.ForStmt: + if expr.Init == enclosingStmt || expr.Post == enclosingStmt { + return expr + } + } + return enclosingStmt.(ast.Stmt) +} + +// baseIfStmt walks up the if/else-if chain until we get to +// the top of the current if chain. +func baseIfStmt(path []ast.Node, index int) ast.Stmt { + stmt := path[index] + for i := index + 1; i < len(path); i++ { + if node, ok := path[i].(*ast.IfStmt); ok && node.Else == stmt { + stmt = node + continue + } + break + } + return stmt.(ast.Stmt) +} + +// WalkASTWithParent walks the AST rooted at n. The semantics are +// similar to ast.Inspect except it does not call f(nil). +func WalkASTWithParent(n ast.Node, f func(n ast.Node, parent ast.Node) bool) { + var ancestors []ast.Node + ast.Inspect(n, func(n ast.Node) (recurse bool) { + if n == nil { + ancestors = ancestors[:len(ancestors)-1] + return false + } + + var parent ast.Node + if len(ancestors) > 0 { + parent = ancestors[len(ancestors)-1] + } + ancestors = append(ancestors, n) + return f(n, parent) + }) +} + +// FindMatchingIdents finds all identifiers in 'node' that match any of the given types. +// 'pos' represents the position at which the identifiers may be inserted. 'pos' must be within +// the scope of each of identifier we select. Otherwise, we will insert a variable at 'pos' that +// is unrecognized. +func FindMatchingIdents(typs []types.Type, node ast.Node, pos token.Pos, info *types.Info, pkg *types.Package) map[types.Type][]*ast.Ident { + matches := map[types.Type][]*ast.Ident{} + // Initialize matches to contain the variable types we are searching for. + for _, typ := range typs { + if typ == nil { + continue + } + matches[typ] = []*ast.Ident{} + } + seen := map[types.Object]struct{}{} + ast.Inspect(node, func(n ast.Node) bool { + if n == nil { + return false + } + // Prevent circular definitions. If 'pos' is within an assignment statement, do not + // allow any identifiers in that assignment statement to be selected. Otherwise, + // we could do the following, where 'x' satisfies the type of 'f0': + // + // x := fakeStruct{f0: x} + // + assignment, ok := n.(*ast.AssignStmt) + if ok && pos > assignment.Pos() && pos <= assignment.End() { + return false + } + if n.End() > pos { + return n.Pos() <= pos + } + ident, ok := n.(*ast.Ident) + if !ok || ident.Name == "_" { + return true + } + obj := info.Defs[ident] + if obj == nil || obj.Type() == nil { + return true + } + if _, ok := obj.(*types.TypeName); ok { + return true + } + // Prevent duplicates in matches' values. + if _, ok = seen[obj]; ok { + return true + } + seen[obj] = struct{}{} + // Find the scope for the given position. Then, check whether the object + // exists within the scope. + innerScope := pkg.Scope().Innermost(pos) + if innerScope == nil { + return true + } + _, foundObj := innerScope.LookupParent(ident.Name, pos) + if foundObj != obj { + return true + } + // The object must match one of the types that we are searching for. + if idents, ok := matches[obj.Type()]; ok { + matches[obj.Type()] = append(idents, ast.NewIdent(ident.Name)) + } + // If the object type does not exactly match any of the target types, greedily + // find the first target type that the object type can satisfy. + for typ := range matches { + if obj.Type() == typ { + continue + } + if equivalentTypes(obj.Type(), typ) { + matches[typ] = append(matches[typ], ast.NewIdent(ident.Name)) + } + } + return true + }) + return matches +} + +func equivalentTypes(want, got types.Type) bool { + if want == got || types.Identical(want, got) { + return true + } + // Code segment to help check for untyped equality from (golang/go#32146). + if rhs, ok := want.(*types.Basic); ok && rhs.Info()&types.IsUntyped > 0 { + if lhs, ok := got.Underlying().(*types.Basic); ok { + return rhs.Info()&types.IsConstType == lhs.Info()&types.IsConstType + } + } + return types.AssignableTo(want, got) +} + +// FindBestMatch employs fuzzy matching to evaluate the similarity of each given identifier to the +// given pattern. We return the identifier whose name is most similar to the pattern. +func FindBestMatch(pattern string, idents []*ast.Ident) ast.Expr { + fuzz := fuzzy.NewMatcher(pattern) + var bestFuzz ast.Expr + highScore := float32(0) // minimum score is 0 (no match) + for _, ident := range idents { + // TODO: Improve scoring algorithm. + score := fuzz.Score(ident.Name) + if score > highScore { + highScore = score + bestFuzz = ident + } else if score == 0 { + // Order matters in the fuzzy matching algorithm. If we find no match + // when matching the target to the identifier, try matching the identifier + // to the target. + revFuzz := fuzzy.NewMatcher(ident.Name) + revScore := revFuzz.Score(pattern) + if revScore > highScore { + highScore = revScore + bestFuzz = ident + } + } + } + return bestFuzz +} diff --git a/vendor/golang.org/x/tools/internal/event/core/event.go b/vendor/golang.org/x/tools/internal/event/core/event.go new file mode 100644 index 000000000..a6cf0e64a --- /dev/null +++ b/vendor/golang.org/x/tools/internal/event/core/event.go @@ -0,0 +1,85 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package core provides support for event based telemetry. +package core + +import ( + "fmt" + "time" + + "golang.org/x/tools/internal/event/label" +) + +// Event holds the information about an event of note that occurred. +type Event struct { + at time.Time + + // As events are often on the stack, storing the first few labels directly + // in the event can avoid an allocation at all for the very common cases of + // simple events. + // The length needs to be large enough to cope with the majority of events + // but no so large as to cause undue stack pressure. + // A log message with two values will use 3 labels (one for each value and + // one for the message itself). + + static [3]label.Label // inline storage for the first few labels + dynamic []label.Label // dynamically sized storage for remaining labels +} + +// eventLabelMap implements label.Map for a the labels of an Event. +type eventLabelMap struct { + event Event +} + +func (ev Event) At() time.Time { return ev.at } + +func (ev Event) Format(f fmt.State, r rune) { + if !ev.at.IsZero() { + fmt.Fprint(f, ev.at.Format("2006/01/02 15:04:05 ")) + } + for index := 0; ev.Valid(index); index++ { + if l := ev.Label(index); l.Valid() { + fmt.Fprintf(f, "\n\t%v", l) + } + } +} + +func (ev Event) Valid(index int) bool { + return index >= 0 && index < len(ev.static)+len(ev.dynamic) +} + +func (ev Event) Label(index int) label.Label { + if index < len(ev.static) { + return ev.static[index] + } + return ev.dynamic[index-len(ev.static)] +} + +func (ev Event) Find(key label.Key) label.Label { + for _, l := range ev.static { + if l.Key() == key { + return l + } + } + for _, l := range ev.dynamic { + if l.Key() == key { + return l + } + } + return label.Label{} +} + +func MakeEvent(static [3]label.Label, labels []label.Label) Event { + return Event{ + static: static, + dynamic: labels, + } +} + +// CloneEvent event returns a copy of the event with the time adjusted to at. +func CloneEvent(ev Event, at time.Time) Event { + ev.at = at + return ev +} diff --git a/vendor/golang.org/x/tools/internal/event/core/export.go b/vendor/golang.org/x/tools/internal/event/core/export.go new file mode 100644 index 000000000..05f3a9a57 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/event/core/export.go @@ -0,0 +1,70 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package core + +import ( + "context" + "sync/atomic" + "time" + "unsafe" + + "golang.org/x/tools/internal/event/label" +) + +// Exporter is a function that handles events. +// It may return a modified context and event. +type Exporter func(context.Context, Event, label.Map) context.Context + +var ( + exporter unsafe.Pointer +) + +// SetExporter sets the global exporter function that handles all events. +// The exporter is called synchronously from the event call site, so it should +// return quickly so as not to hold up user code. +func SetExporter(e Exporter) { + p := unsafe.Pointer(&e) + if e == nil { + // &e is always valid, and so p is always valid, but for the early abort + // of ProcessEvent to be efficient it needs to make the nil check on the + // pointer without having to dereference it, so we make the nil function + // also a nil pointer + p = nil + } + atomic.StorePointer(&exporter, p) +} + +// deliver is called to deliver an event to the supplied exporter. +// it will fill in the time. +func deliver(ctx context.Context, exporter Exporter, ev Event) context.Context { + // add the current time to the event + ev.at = time.Now() + // hand the event off to the current exporter + return exporter(ctx, ev, ev) +} + +// Export is called to deliver an event to the global exporter if set. +func Export(ctx context.Context, ev Event) context.Context { + // get the global exporter and abort early if there is not one + exporterPtr := (*Exporter)(atomic.LoadPointer(&exporter)) + if exporterPtr == nil { + return ctx + } + return deliver(ctx, *exporterPtr, ev) +} + +// ExportPair is called to deliver a start event to the supplied exporter. +// It also returns a function that will deliver the end event to the same +// exporter. +// It will fill in the time. +func ExportPair(ctx context.Context, begin, end Event) (context.Context, func()) { + // get the global exporter and abort early if there is not one + exporterPtr := (*Exporter)(atomic.LoadPointer(&exporter)) + if exporterPtr == nil { + return ctx, func() {} + } + ctx = deliver(ctx, *exporterPtr, begin) + return ctx, func() { deliver(ctx, *exporterPtr, end) } +} diff --git a/vendor/golang.org/x/tools/internal/event/core/fast.go b/vendor/golang.org/x/tools/internal/event/core/fast.go new file mode 100644 index 000000000..06c1d4615 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/event/core/fast.go @@ -0,0 +1,77 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package core + +import ( + "context" + + "golang.org/x/tools/internal/event/keys" + "golang.org/x/tools/internal/event/label" +) + +// Log1 takes a message and one label delivers a log event to the exporter. +// It is a customized version of Print that is faster and does no allocation. +func Log1(ctx context.Context, message string, t1 label.Label) { + Export(ctx, MakeEvent([3]label.Label{ + keys.Msg.Of(message), + t1, + }, nil)) +} + +// Log2 takes a message and two labels and delivers a log event to the exporter. +// It is a customized version of Print that is faster and does no allocation. +func Log2(ctx context.Context, message string, t1 label.Label, t2 label.Label) { + Export(ctx, MakeEvent([3]label.Label{ + keys.Msg.Of(message), + t1, + t2, + }, nil)) +} + +// Metric1 sends a label event to the exporter with the supplied labels. +func Metric1(ctx context.Context, t1 label.Label) context.Context { + return Export(ctx, MakeEvent([3]label.Label{ + keys.Metric.New(), + t1, + }, nil)) +} + +// Metric2 sends a label event to the exporter with the supplied labels. +func Metric2(ctx context.Context, t1, t2 label.Label) context.Context { + return Export(ctx, MakeEvent([3]label.Label{ + keys.Metric.New(), + t1, + t2, + }, nil)) +} + +// Start1 sends a span start event with the supplied label list to the exporter. +// It also returns a function that will end the span, which should normally be +// deferred. +func Start1(ctx context.Context, name string, t1 label.Label) (context.Context, func()) { + return ExportPair(ctx, + MakeEvent([3]label.Label{ + keys.Start.Of(name), + t1, + }, nil), + MakeEvent([3]label.Label{ + keys.End.New(), + }, nil)) +} + +// Start2 sends a span start event with the supplied label list to the exporter. +// It also returns a function that will end the span, which should normally be +// deferred. +func Start2(ctx context.Context, name string, t1, t2 label.Label) (context.Context, func()) { + return ExportPair(ctx, + MakeEvent([3]label.Label{ + keys.Start.Of(name), + t1, + t2, + }, nil), + MakeEvent([3]label.Label{ + keys.End.New(), + }, nil)) +} diff --git a/vendor/golang.org/x/tools/internal/event/doc.go b/vendor/golang.org/x/tools/internal/event/doc.go new file mode 100644 index 000000000..5dc6e6bab --- /dev/null +++ b/vendor/golang.org/x/tools/internal/event/doc.go @@ -0,0 +1,7 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package event provides a set of packages that cover the main +// concepts of telemetry in an implementation agnostic way. +package event diff --git a/vendor/golang.org/x/tools/internal/event/event.go b/vendor/golang.org/x/tools/internal/event/event.go new file mode 100644 index 000000000..4d55e577d --- /dev/null +++ b/vendor/golang.org/x/tools/internal/event/event.go @@ -0,0 +1,127 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package event + +import ( + "context" + + "golang.org/x/tools/internal/event/core" + "golang.org/x/tools/internal/event/keys" + "golang.org/x/tools/internal/event/label" +) + +// Exporter is a function that handles events. +// It may return a modified context and event. +type Exporter func(context.Context, core.Event, label.Map) context.Context + +// SetExporter sets the global exporter function that handles all events. +// The exporter is called synchronously from the event call site, so it should +// return quickly so as not to hold up user code. +func SetExporter(e Exporter) { + core.SetExporter(core.Exporter(e)) +} + +// Log takes a message and a label list and combines them into a single event +// before delivering them to the exporter. +func Log(ctx context.Context, message string, labels ...label.Label) { + core.Export(ctx, core.MakeEvent([3]label.Label{ + keys.Msg.Of(message), + }, labels)) +} + +// IsLog returns true if the event was built by the Log function. +// It is intended to be used in exporters to identify the semantics of the +// event when deciding what to do with it. +func IsLog(ev core.Event) bool { + return ev.Label(0).Key() == keys.Msg +} + +// Error takes a message and a label list and combines them into a single event +// before delivering them to the exporter. It captures the error in the +// delivered event. +func Error(ctx context.Context, message string, err error, labels ...label.Label) { + core.Export(ctx, core.MakeEvent([3]label.Label{ + keys.Msg.Of(message), + keys.Err.Of(err), + }, labels)) +} + +// IsError returns true if the event was built by the Error function. +// It is intended to be used in exporters to identify the semantics of the +// event when deciding what to do with it. +func IsError(ev core.Event) bool { + return ev.Label(0).Key() == keys.Msg && + ev.Label(1).Key() == keys.Err +} + +// Metric sends a label event to the exporter with the supplied labels. +func Metric(ctx context.Context, labels ...label.Label) { + core.Export(ctx, core.MakeEvent([3]label.Label{ + keys.Metric.New(), + }, labels)) +} + +// IsMetric returns true if the event was built by the Metric function. +// It is intended to be used in exporters to identify the semantics of the +// event when deciding what to do with it. +func IsMetric(ev core.Event) bool { + return ev.Label(0).Key() == keys.Metric +} + +// Label sends a label event to the exporter with the supplied labels. +func Label(ctx context.Context, labels ...label.Label) context.Context { + return core.Export(ctx, core.MakeEvent([3]label.Label{ + keys.Label.New(), + }, labels)) +} + +// IsLabel returns true if the event was built by the Label function. +// It is intended to be used in exporters to identify the semantics of the +// event when deciding what to do with it. +func IsLabel(ev core.Event) bool { + return ev.Label(0).Key() == keys.Label +} + +// Start sends a span start event with the supplied label list to the exporter. +// It also returns a function that will end the span, which should normally be +// deferred. +func Start(ctx context.Context, name string, labels ...label.Label) (context.Context, func()) { + return core.ExportPair(ctx, + core.MakeEvent([3]label.Label{ + keys.Start.Of(name), + }, labels), + core.MakeEvent([3]label.Label{ + keys.End.New(), + }, nil)) +} + +// IsStart returns true if the event was built by the Start function. +// It is intended to be used in exporters to identify the semantics of the +// event when deciding what to do with it. +func IsStart(ev core.Event) bool { + return ev.Label(0).Key() == keys.Start +} + +// IsEnd returns true if the event was built by the End function. +// It is intended to be used in exporters to identify the semantics of the +// event when deciding what to do with it. +func IsEnd(ev core.Event) bool { + return ev.Label(0).Key() == keys.End +} + +// Detach returns a context without an associated span. +// This allows the creation of spans that are not children of the current span. +func Detach(ctx context.Context) context.Context { + return core.Export(ctx, core.MakeEvent([3]label.Label{ + keys.Detach.New(), + }, nil)) +} + +// IsDetach returns true if the event was built by the Detach function. +// It is intended to be used in exporters to identify the semantics of the +// event when deciding what to do with it. +func IsDetach(ev core.Event) bool { + return ev.Label(0).Key() == keys.Detach +} diff --git a/vendor/golang.org/x/tools/internal/event/keys/keys.go b/vendor/golang.org/x/tools/internal/event/keys/keys.go new file mode 100644 index 000000000..a02206e30 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/event/keys/keys.go @@ -0,0 +1,564 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package keys + +import ( + "fmt" + "io" + "math" + "strconv" + + "golang.org/x/tools/internal/event/label" +) + +// Value represents a key for untyped values. +type Value struct { + name string + description string +} + +// New creates a new Key for untyped values. +func New(name, description string) *Value { + return &Value{name: name, description: description} +} + +func (k *Value) Name() string { return k.name } +func (k *Value) Description() string { return k.description } + +func (k *Value) Format(w io.Writer, buf []byte, l label.Label) { + fmt.Fprint(w, k.From(l)) +} + +// Get can be used to get a label for the key from a label.Map. +func (k *Value) Get(lm label.Map) interface{} { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return nil +} + +// From can be used to get a value from a Label. +func (k *Value) From(t label.Label) interface{} { return t.UnpackValue() } + +// Of creates a new Label with this key and the supplied value. +func (k *Value) Of(value interface{}) label.Label { return label.OfValue(k, value) } + +// Tag represents a key for tagging labels that have no value. +// These are used when the existence of the label is the entire information it +// carries, such as marking events to be of a specific kind, or from a specific +// package. +type Tag struct { + name string + description string +} + +// NewTag creates a new Key for tagging labels. +func NewTag(name, description string) *Tag { + return &Tag{name: name, description: description} +} + +func (k *Tag) Name() string { return k.name } +func (k *Tag) Description() string { return k.description } + +func (k *Tag) Format(w io.Writer, buf []byte, l label.Label) {} + +// New creates a new Label with this key. +func (k *Tag) New() label.Label { return label.OfValue(k, nil) } + +// Int represents a key +type Int struct { + name string + description string +} + +// NewInt creates a new Key for int values. +func NewInt(name, description string) *Int { + return &Int{name: name, description: description} +} + +func (k *Int) Name() string { return k.name } +func (k *Int) Description() string { return k.description } + +func (k *Int) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *Int) Of(v int) label.Label { return label.Of64(k, uint64(v)) } + +// Get can be used to get a label for the key from a label.Map. +func (k *Int) Get(lm label.Map) int { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *Int) From(t label.Label) int { return int(t.Unpack64()) } + +// Int8 represents a key +type Int8 struct { + name string + description string +} + +// NewInt8 creates a new Key for int8 values. +func NewInt8(name, description string) *Int8 { + return &Int8{name: name, description: description} +} + +func (k *Int8) Name() string { return k.name } +func (k *Int8) Description() string { return k.description } + +func (k *Int8) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *Int8) Of(v int8) label.Label { return label.Of64(k, uint64(v)) } + +// Get can be used to get a label for the key from a label.Map. +func (k *Int8) Get(lm label.Map) int8 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *Int8) From(t label.Label) int8 { return int8(t.Unpack64()) } + +// Int16 represents a key +type Int16 struct { + name string + description string +} + +// NewInt16 creates a new Key for int16 values. +func NewInt16(name, description string) *Int16 { + return &Int16{name: name, description: description} +} + +func (k *Int16) Name() string { return k.name } +func (k *Int16) Description() string { return k.description } + +func (k *Int16) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *Int16) Of(v int16) label.Label { return label.Of64(k, uint64(v)) } + +// Get can be used to get a label for the key from a label.Map. +func (k *Int16) Get(lm label.Map) int16 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *Int16) From(t label.Label) int16 { return int16(t.Unpack64()) } + +// Int32 represents a key +type Int32 struct { + name string + description string +} + +// NewInt32 creates a new Key for int32 values. +func NewInt32(name, description string) *Int32 { + return &Int32{name: name, description: description} +} + +func (k *Int32) Name() string { return k.name } +func (k *Int32) Description() string { return k.description } + +func (k *Int32) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendInt(buf, int64(k.From(l)), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *Int32) Of(v int32) label.Label { return label.Of64(k, uint64(v)) } + +// Get can be used to get a label for the key from a label.Map. +func (k *Int32) Get(lm label.Map) int32 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *Int32) From(t label.Label) int32 { return int32(t.Unpack64()) } + +// Int64 represents a key +type Int64 struct { + name string + description string +} + +// NewInt64 creates a new Key for int64 values. +func NewInt64(name, description string) *Int64 { + return &Int64{name: name, description: description} +} + +func (k *Int64) Name() string { return k.name } +func (k *Int64) Description() string { return k.description } + +func (k *Int64) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendInt(buf, k.From(l), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *Int64) Of(v int64) label.Label { return label.Of64(k, uint64(v)) } + +// Get can be used to get a label for the key from a label.Map. +func (k *Int64) Get(lm label.Map) int64 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *Int64) From(t label.Label) int64 { return int64(t.Unpack64()) } + +// UInt represents a key +type UInt struct { + name string + description string +} + +// NewUInt creates a new Key for uint values. +func NewUInt(name, description string) *UInt { + return &UInt{name: name, description: description} +} + +func (k *UInt) Name() string { return k.name } +func (k *UInt) Description() string { return k.description } + +func (k *UInt) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *UInt) Of(v uint) label.Label { return label.Of64(k, uint64(v)) } + +// Get can be used to get a label for the key from a label.Map. +func (k *UInt) Get(lm label.Map) uint { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *UInt) From(t label.Label) uint { return uint(t.Unpack64()) } + +// UInt8 represents a key +type UInt8 struct { + name string + description string +} + +// NewUInt8 creates a new Key for uint8 values. +func NewUInt8(name, description string) *UInt8 { + return &UInt8{name: name, description: description} +} + +func (k *UInt8) Name() string { return k.name } +func (k *UInt8) Description() string { return k.description } + +func (k *UInt8) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *UInt8) Of(v uint8) label.Label { return label.Of64(k, uint64(v)) } + +// Get can be used to get a label for the key from a label.Map. +func (k *UInt8) Get(lm label.Map) uint8 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *UInt8) From(t label.Label) uint8 { return uint8(t.Unpack64()) } + +// UInt16 represents a key +type UInt16 struct { + name string + description string +} + +// NewUInt16 creates a new Key for uint16 values. +func NewUInt16(name, description string) *UInt16 { + return &UInt16{name: name, description: description} +} + +func (k *UInt16) Name() string { return k.name } +func (k *UInt16) Description() string { return k.description } + +func (k *UInt16) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *UInt16) Of(v uint16) label.Label { return label.Of64(k, uint64(v)) } + +// Get can be used to get a label for the key from a label.Map. +func (k *UInt16) Get(lm label.Map) uint16 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *UInt16) From(t label.Label) uint16 { return uint16(t.Unpack64()) } + +// UInt32 represents a key +type UInt32 struct { + name string + description string +} + +// NewUInt32 creates a new Key for uint32 values. +func NewUInt32(name, description string) *UInt32 { + return &UInt32{name: name, description: description} +} + +func (k *UInt32) Name() string { return k.name } +func (k *UInt32) Description() string { return k.description } + +func (k *UInt32) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendUint(buf, uint64(k.From(l)), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *UInt32) Of(v uint32) label.Label { return label.Of64(k, uint64(v)) } + +// Get can be used to get a label for the key from a label.Map. +func (k *UInt32) Get(lm label.Map) uint32 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *UInt32) From(t label.Label) uint32 { return uint32(t.Unpack64()) } + +// UInt64 represents a key +type UInt64 struct { + name string + description string +} + +// NewUInt64 creates a new Key for uint64 values. +func NewUInt64(name, description string) *UInt64 { + return &UInt64{name: name, description: description} +} + +func (k *UInt64) Name() string { return k.name } +func (k *UInt64) Description() string { return k.description } + +func (k *UInt64) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendUint(buf, k.From(l), 10)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *UInt64) Of(v uint64) label.Label { return label.Of64(k, v) } + +// Get can be used to get a label for the key from a label.Map. +func (k *UInt64) Get(lm label.Map) uint64 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *UInt64) From(t label.Label) uint64 { return t.Unpack64() } + +// Float32 represents a key +type Float32 struct { + name string + description string +} + +// NewFloat32 creates a new Key for float32 values. +func NewFloat32(name, description string) *Float32 { + return &Float32{name: name, description: description} +} + +func (k *Float32) Name() string { return k.name } +func (k *Float32) Description() string { return k.description } + +func (k *Float32) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendFloat(buf, float64(k.From(l)), 'E', -1, 32)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *Float32) Of(v float32) label.Label { + return label.Of64(k, uint64(math.Float32bits(v))) +} + +// Get can be used to get a label for the key from a label.Map. +func (k *Float32) Get(lm label.Map) float32 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *Float32) From(t label.Label) float32 { + return math.Float32frombits(uint32(t.Unpack64())) +} + +// Float64 represents a key +type Float64 struct { + name string + description string +} + +// NewFloat64 creates a new Key for int64 values. +func NewFloat64(name, description string) *Float64 { + return &Float64{name: name, description: description} +} + +func (k *Float64) Name() string { return k.name } +func (k *Float64) Description() string { return k.description } + +func (k *Float64) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendFloat(buf, k.From(l), 'E', -1, 64)) +} + +// Of creates a new Label with this key and the supplied value. +func (k *Float64) Of(v float64) label.Label { + return label.Of64(k, math.Float64bits(v)) +} + +// Get can be used to get a label for the key from a label.Map. +func (k *Float64) Get(lm label.Map) float64 { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return 0 +} + +// From can be used to get a value from a Label. +func (k *Float64) From(t label.Label) float64 { + return math.Float64frombits(t.Unpack64()) +} + +// String represents a key +type String struct { + name string + description string +} + +// NewString creates a new Key for int64 values. +func NewString(name, description string) *String { + return &String{name: name, description: description} +} + +func (k *String) Name() string { return k.name } +func (k *String) Description() string { return k.description } + +func (k *String) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendQuote(buf, k.From(l))) +} + +// Of creates a new Label with this key and the supplied value. +func (k *String) Of(v string) label.Label { return label.OfString(k, v) } + +// Get can be used to get a label for the key from a label.Map. +func (k *String) Get(lm label.Map) string { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return "" +} + +// From can be used to get a value from a Label. +func (k *String) From(t label.Label) string { return t.UnpackString() } + +// Boolean represents a key +type Boolean struct { + name string + description string +} + +// NewBoolean creates a new Key for bool values. +func NewBoolean(name, description string) *Boolean { + return &Boolean{name: name, description: description} +} + +func (k *Boolean) Name() string { return k.name } +func (k *Boolean) Description() string { return k.description } + +func (k *Boolean) Format(w io.Writer, buf []byte, l label.Label) { + w.Write(strconv.AppendBool(buf, k.From(l))) +} + +// Of creates a new Label with this key and the supplied value. +func (k *Boolean) Of(v bool) label.Label { + if v { + return label.Of64(k, 1) + } + return label.Of64(k, 0) +} + +// Get can be used to get a label for the key from a label.Map. +func (k *Boolean) Get(lm label.Map) bool { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return false +} + +// From can be used to get a value from a Label. +func (k *Boolean) From(t label.Label) bool { return t.Unpack64() > 0 } + +// Error represents a key +type Error struct { + name string + description string +} + +// NewError creates a new Key for int64 values. +func NewError(name, description string) *Error { + return &Error{name: name, description: description} +} + +func (k *Error) Name() string { return k.name } +func (k *Error) Description() string { return k.description } + +func (k *Error) Format(w io.Writer, buf []byte, l label.Label) { + io.WriteString(w, k.From(l).Error()) +} + +// Of creates a new Label with this key and the supplied value. +func (k *Error) Of(v error) label.Label { return label.OfValue(k, v) } + +// Get can be used to get a label for the key from a label.Map. +func (k *Error) Get(lm label.Map) error { + if t := lm.Find(k); t.Valid() { + return k.From(t) + } + return nil +} + +// From can be used to get a value from a Label. +func (k *Error) From(t label.Label) error { + err, _ := t.UnpackValue().(error) + return err +} diff --git a/vendor/golang.org/x/tools/internal/event/keys/standard.go b/vendor/golang.org/x/tools/internal/event/keys/standard.go new file mode 100644 index 000000000..7e9586659 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/event/keys/standard.go @@ -0,0 +1,22 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package keys + +var ( + // Msg is a key used to add message strings to label lists. + Msg = NewString("message", "a readable message") + // Label is a key used to indicate an event adds labels to the context. + Label = NewTag("label", "a label context marker") + // Start is used for things like traces that have a name. + Start = NewString("start", "span start") + // Metric is a key used to indicate an event records metrics. + End = NewTag("end", "a span end marker") + // Metric is a key used to indicate an event records metrics. + Detach = NewTag("detach", "a span detach marker") + // Err is a key used to add error values to label lists. + Err = NewError("error", "an error that occurred") + // Metric is a key used to indicate an event records metrics. + Metric = NewTag("metric", "a metric event marker") +) diff --git a/vendor/golang.org/x/tools/internal/event/label/label.go b/vendor/golang.org/x/tools/internal/event/label/label.go new file mode 100644 index 000000000..0f526e1f9 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/event/label/label.go @@ -0,0 +1,215 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package label + +import ( + "fmt" + "io" + "reflect" + "unsafe" +) + +// Key is used as the identity of a Label. +// Keys are intended to be compared by pointer only, the name should be unique +// for communicating with external systems, but it is not required or enforced. +type Key interface { + // Name returns the key name. + Name() string + // Description returns a string that can be used to describe the value. + Description() string + + // Format is used in formatting to append the value of the label to the + // supplied buffer. + // The formatter may use the supplied buf as a scratch area to avoid + // allocations. + Format(w io.Writer, buf []byte, l Label) +} + +// Label holds a key and value pair. +// It is normally used when passing around lists of labels. +type Label struct { + key Key + packed uint64 + untyped interface{} +} + +// Map is the interface to a collection of Labels indexed by key. +type Map interface { + // Find returns the label that matches the supplied key. + Find(key Key) Label +} + +// List is the interface to something that provides an iterable +// list of labels. +// Iteration should start from 0 and continue until Valid returns false. +type List interface { + // Valid returns true if the index is within range for the list. + // It does not imply the label at that index will itself be valid. + Valid(index int) bool + // Label returns the label at the given index. + Label(index int) Label +} + +// list implements LabelList for a list of Labels. +type list struct { + labels []Label +} + +// filter wraps a LabelList filtering out specific labels. +type filter struct { + keys []Key + underlying List +} + +// listMap implements LabelMap for a simple list of labels. +type listMap struct { + labels []Label +} + +// mapChain implements LabelMap for a list of underlying LabelMap. +type mapChain struct { + maps []Map +} + +// OfValue creates a new label from the key and value. +// This method is for implementing new key types, label creation should +// normally be done with the Of method of the key. +func OfValue(k Key, value interface{}) Label { return Label{key: k, untyped: value} } + +// UnpackValue assumes the label was built using LabelOfValue and returns the value +// that was passed to that constructor. +// This method is for implementing new key types, for type safety normal +// access should be done with the From method of the key. +func (t Label) UnpackValue() interface{} { return t.untyped } + +// Of64 creates a new label from a key and a uint64. This is often +// used for non uint64 values that can be packed into a uint64. +// This method is for implementing new key types, label creation should +// normally be done with the Of method of the key. +func Of64(k Key, v uint64) Label { return Label{key: k, packed: v} } + +// Unpack64 assumes the label was built using LabelOf64 and returns the value that +// was passed to that constructor. +// This method is for implementing new key types, for type safety normal +// access should be done with the From method of the key. +func (t Label) Unpack64() uint64 { return t.packed } + +type stringptr unsafe.Pointer + +// OfString creates a new label from a key and a string. +// This method is for implementing new key types, label creation should +// normally be done with the Of method of the key. +func OfString(k Key, v string) Label { + hdr := (*reflect.StringHeader)(unsafe.Pointer(&v)) + return Label{ + key: k, + packed: uint64(hdr.Len), + untyped: stringptr(hdr.Data), + } +} + +// UnpackString assumes the label was built using LabelOfString and returns the +// value that was passed to that constructor. +// This method is for implementing new key types, for type safety normal +// access should be done with the From method of the key. +func (t Label) UnpackString() string { + var v string + hdr := (*reflect.StringHeader)(unsafe.Pointer(&v)) + hdr.Data = uintptr(t.untyped.(stringptr)) + hdr.Len = int(t.packed) + return v +} + +// Valid returns true if the Label is a valid one (it has a key). +func (t Label) Valid() bool { return t.key != nil } + +// Key returns the key of this Label. +func (t Label) Key() Key { return t.key } + +// Format is used for debug printing of labels. +func (t Label) Format(f fmt.State, r rune) { + if !t.Valid() { + io.WriteString(f, `nil`) + return + } + io.WriteString(f, t.Key().Name()) + io.WriteString(f, "=") + var buf [128]byte + t.Key().Format(f, buf[:0], t) +} + +func (l *list) Valid(index int) bool { + return index >= 0 && index < len(l.labels) +} + +func (l *list) Label(index int) Label { + return l.labels[index] +} + +func (f *filter) Valid(index int) bool { + return f.underlying.Valid(index) +} + +func (f *filter) Label(index int) Label { + l := f.underlying.Label(index) + for _, f := range f.keys { + if l.Key() == f { + return Label{} + } + } + return l +} + +func (lm listMap) Find(key Key) Label { + for _, l := range lm.labels { + if l.Key() == key { + return l + } + } + return Label{} +} + +func (c mapChain) Find(key Key) Label { + for _, src := range c.maps { + l := src.Find(key) + if l.Valid() { + return l + } + } + return Label{} +} + +var emptyList = &list{} + +func NewList(labels ...Label) List { + if len(labels) == 0 { + return emptyList + } + return &list{labels: labels} +} + +func Filter(l List, keys ...Key) List { + if len(keys) == 0 { + return l + } + return &filter{keys: keys, underlying: l} +} + +func NewMap(labels ...Label) Map { + return listMap{labels: labels} +} + +func MergeMaps(srcs ...Map) Map { + var nonNil []Map + for _, src := range srcs { + if src != nil { + nonNil = append(nonNil, src) + } + } + if len(nonNil) == 1 { + return nonNil[0] + } + return mapChain{maps: nonNil} +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go new file mode 100644 index 000000000..9887f7e7a --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go @@ -0,0 +1,196 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fastwalk provides a faster version of filepath.Walk for file system +// scanning tools. +package fastwalk + +import ( + "errors" + "os" + "path/filepath" + "runtime" + "sync" +) + +// ErrTraverseLink is used as a return value from WalkFuncs to indicate that the +// symlink named in the call may be traversed. +var ErrTraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory") + +// ErrSkipFiles is a used as a return value from WalkFuncs to indicate that the +// callback should not be called for any other files in the current directory. +// Child directories will still be traversed. +var ErrSkipFiles = errors.New("fastwalk: skip remaining files in directory") + +// Walk is a faster implementation of filepath.Walk. +// +// filepath.Walk's design necessarily calls os.Lstat on each file, +// even if the caller needs less info. +// Many tools need only the type of each file. +// On some platforms, this information is provided directly by the readdir +// system call, avoiding the need to stat each file individually. +// fastwalk_unix.go contains a fork of the syscall routines. +// +// See golang.org/issue/16399 +// +// Walk walks the file tree rooted at root, calling walkFn for +// each file or directory in the tree, including root. +// +// If fastWalk returns filepath.SkipDir, the directory is skipped. +// +// Unlike filepath.Walk: +// * file stat calls must be done by the user. +// The only provided metadata is the file type, which does not include +// any permission bits. +// * multiple goroutines stat the filesystem concurrently. The provided +// walkFn must be safe for concurrent use. +// * fastWalk can follow symlinks if walkFn returns the TraverseLink +// sentinel error. It is the walkFn's responsibility to prevent +// fastWalk from going into symlink cycles. +func Walk(root string, walkFn func(path string, typ os.FileMode) error) error { + // TODO(bradfitz): make numWorkers configurable? We used a + // minimum of 4 to give the kernel more info about multiple + // things we want, in hopes its I/O scheduling can take + // advantage of that. Hopefully most are in cache. Maybe 4 is + // even too low of a minimum. Profile more. + numWorkers := 4 + if n := runtime.NumCPU(); n > numWorkers { + numWorkers = n + } + + // Make sure to wait for all workers to finish, otherwise + // walkFn could still be called after returning. This Wait call + // runs after close(e.donec) below. + var wg sync.WaitGroup + defer wg.Wait() + + w := &walker{ + fn: walkFn, + enqueuec: make(chan walkItem, numWorkers), // buffered for performance + workc: make(chan walkItem, numWorkers), // buffered for performance + donec: make(chan struct{}), + + // buffered for correctness & not leaking goroutines: + resc: make(chan error, numWorkers), + } + defer close(w.donec) + + for i := 0; i < numWorkers; i++ { + wg.Add(1) + go w.doWork(&wg) + } + todo := []walkItem{{dir: root}} + out := 0 + for { + workc := w.workc + var workItem walkItem + if len(todo) == 0 { + workc = nil + } else { + workItem = todo[len(todo)-1] + } + select { + case workc <- workItem: + todo = todo[:len(todo)-1] + out++ + case it := <-w.enqueuec: + todo = append(todo, it) + case err := <-w.resc: + out-- + if err != nil { + return err + } + if out == 0 && len(todo) == 0 { + // It's safe to quit here, as long as the buffered + // enqueue channel isn't also readable, which might + // happen if the worker sends both another unit of + // work and its result before the other select was + // scheduled and both w.resc and w.enqueuec were + // readable. + select { + case it := <-w.enqueuec: + todo = append(todo, it) + default: + return nil + } + } + } + } +} + +// doWork reads directories as instructed (via workc) and runs the +// user's callback function. +func (w *walker) doWork(wg *sync.WaitGroup) { + defer wg.Done() + for { + select { + case <-w.donec: + return + case it := <-w.workc: + select { + case <-w.donec: + return + case w.resc <- w.walk(it.dir, !it.callbackDone): + } + } + } +} + +type walker struct { + fn func(path string, typ os.FileMode) error + + donec chan struct{} // closed on fastWalk's return + workc chan walkItem // to workers + enqueuec chan walkItem // from workers + resc chan error // from workers +} + +type walkItem struct { + dir string + callbackDone bool // callback already called; don't do it again +} + +func (w *walker) enqueue(it walkItem) { + select { + case w.enqueuec <- it: + case <-w.donec: + } +} + +func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error { + joined := dirName + string(os.PathSeparator) + baseName + if typ == os.ModeDir { + w.enqueue(walkItem{dir: joined}) + return nil + } + + err := w.fn(joined, typ) + if typ == os.ModeSymlink { + if err == ErrTraverseLink { + // Set callbackDone so we don't call it twice for both the + // symlink-as-symlink and the symlink-as-directory later: + w.enqueue(walkItem{dir: joined, callbackDone: true}) + return nil + } + if err == filepath.SkipDir { + // Permit SkipDir on symlinks too. + return nil + } + } + return err +} + +func (w *walker) walk(root string, runUserCallback bool) error { + if runUserCallback { + err := w.fn(root, os.ModeDir) + if err == filepath.SkipDir { + return nil + } + if err != nil { + return err + } + } + + return readDir(root, w.onDirEnt) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go new file mode 100644 index 000000000..d58595dbd --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go @@ -0,0 +1,14 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build freebsd || openbsd || netbsd +// +build freebsd openbsd netbsd + +package fastwalk + +import "syscall" + +func direntInode(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Fileno) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go new file mode 100644 index 000000000..ea02b9ebf --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go @@ -0,0 +1,15 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build (linux || darwin) && !appengine +// +build linux darwin +// +build !appengine + +package fastwalk + +import "syscall" + +func direntInode(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Ino) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go new file mode 100644 index 000000000..d5c9c321e --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go @@ -0,0 +1,14 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build darwin || freebsd || openbsd || netbsd +// +build darwin freebsd openbsd netbsd + +package fastwalk + +import "syscall" + +func direntNamlen(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Namlen) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go new file mode 100644 index 000000000..c82e57df8 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go @@ -0,0 +1,29 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build linux && !appengine +// +build linux,!appengine + +package fastwalk + +import ( + "bytes" + "syscall" + "unsafe" +) + +func direntNamlen(dirent *syscall.Dirent) uint64 { + const fixedHdr = uint16(unsafe.Offsetof(syscall.Dirent{}.Name)) + nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0])) + const nameBufLen = uint16(len(nameBuf)) + limit := dirent.Reclen - fixedHdr + if limit > nameBufLen { + limit = nameBufLen + } + nameLen := bytes.IndexByte(nameBuf[:limit], 0) + if nameLen < 0 { + panic("failed to find terminating 0 byte in dirent") + } + return uint64(nameLen) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go new file mode 100644 index 000000000..085d31160 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go @@ -0,0 +1,38 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build appengine || (!linux && !darwin && !freebsd && !openbsd && !netbsd) +// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd + +package fastwalk + +import ( + "io/ioutil" + "os" +) + +// readDir calls fn for each directory entry in dirName. +// It does not descend into directories or follow symlinks. +// If fn returns a non-nil error, readDir returns with that error +// immediately. +func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error { + fis, err := ioutil.ReadDir(dirName) + if err != nil { + return err + } + skipFiles := false + for _, fi := range fis { + if fi.Mode().IsRegular() && skipFiles { + continue + } + if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil { + if err == ErrSkipFiles { + skipFiles = true + continue + } + return err + } + } + return nil +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go new file mode 100644 index 000000000..58bd87841 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go @@ -0,0 +1,153 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build (linux || darwin || freebsd || openbsd || netbsd) && !appengine +// +build linux darwin freebsd openbsd netbsd +// +build !appengine + +package fastwalk + +import ( + "fmt" + "os" + "syscall" + "unsafe" +) + +const blockSize = 8 << 10 + +// unknownFileMode is a sentinel (and bogus) os.FileMode +// value used to represent a syscall.DT_UNKNOWN Dirent.Type. +const unknownFileMode os.FileMode = os.ModeNamedPipe | os.ModeSocket | os.ModeDevice + +func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error { + fd, err := open(dirName, 0, 0) + if err != nil { + return &os.PathError{Op: "open", Path: dirName, Err: err} + } + defer syscall.Close(fd) + + // The buffer must be at least a block long. + buf := make([]byte, blockSize) // stack-allocated; doesn't escape + bufp := 0 // starting read position in buf + nbuf := 0 // end valid data in buf + skipFiles := false + for { + if bufp >= nbuf { + bufp = 0 + nbuf, err = readDirent(fd, buf) + if err != nil { + return os.NewSyscallError("readdirent", err) + } + if nbuf <= 0 { + return nil + } + } + consumed, name, typ := parseDirEnt(buf[bufp:nbuf]) + bufp += consumed + if name == "" || name == "." || name == ".." { + continue + } + // Fallback for filesystems (like old XFS) that don't + // support Dirent.Type and have DT_UNKNOWN (0) there + // instead. + if typ == unknownFileMode { + fi, err := os.Lstat(dirName + "/" + name) + if err != nil { + // It got deleted in the meantime. + if os.IsNotExist(err) { + continue + } + return err + } + typ = fi.Mode() & os.ModeType + } + if skipFiles && typ.IsRegular() { + continue + } + if err := fn(dirName, name, typ); err != nil { + if err == ErrSkipFiles { + skipFiles = true + continue + } + return err + } + } +} + +func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) { + // golang.org/issue/37269 + dirent := &syscall.Dirent{} + copy((*[unsafe.Sizeof(syscall.Dirent{})]byte)(unsafe.Pointer(dirent))[:], buf) + if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v { + panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v)) + } + if len(buf) < int(dirent.Reclen) { + panic(fmt.Sprintf("buf size %d < record length %d", len(buf), dirent.Reclen)) + } + consumed = int(dirent.Reclen) + if direntInode(dirent) == 0 { // File absent in directory. + return + } + switch dirent.Type { + case syscall.DT_REG: + typ = 0 + case syscall.DT_DIR: + typ = os.ModeDir + case syscall.DT_LNK: + typ = os.ModeSymlink + case syscall.DT_BLK: + typ = os.ModeDevice + case syscall.DT_FIFO: + typ = os.ModeNamedPipe + case syscall.DT_SOCK: + typ = os.ModeSocket + case syscall.DT_UNKNOWN: + typ = unknownFileMode + default: + // Skip weird things. + // It's probably a DT_WHT (http://lwn.net/Articles/325369/) + // or something. Revisit if/when this package is moved outside + // of goimports. goimports only cares about regular files, + // symlinks, and directories. + return + } + + nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0])) + nameLen := direntNamlen(dirent) + + // Special cases for common things: + if nameLen == 1 && nameBuf[0] == '.' { + name = "." + } else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' { + name = ".." + } else { + name = string(nameBuf[:nameLen]) + } + return +} + +// According to https://golang.org/doc/go1.14#runtime +// A consequence of the implementation of preemption is that on Unix systems, including Linux and macOS +// systems, programs built with Go 1.14 will receive more signals than programs built with earlier releases. +// +// This causes syscall.Open and syscall.ReadDirent sometimes fail with EINTR errors. +// We need to retry in this case. +func open(path string, mode int, perm uint32) (fd int, err error) { + for { + fd, err := syscall.Open(path, mode, perm) + if err != syscall.EINTR { + return fd, err + } + } +} + +func readDirent(fd int, buf []byte) (n int, err error) { + for { + nbuf, err := syscall.ReadDirent(fd, buf) + if err != syscall.EINTR { + return nbuf, err + } + } +} diff --git a/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/vendor/golang.org/x/tools/internal/gocommand/invoke.go new file mode 100644 index 000000000..8659a0c5d --- /dev/null +++ b/vendor/golang.org/x/tools/internal/gocommand/invoke.go @@ -0,0 +1,273 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gocommand is a helper for calling the go command. +package gocommand + +import ( + "bytes" + "context" + "fmt" + exec "golang.org/x/sys/execabs" + "io" + "os" + "regexp" + "strconv" + "strings" + "sync" + "time" + + "golang.org/x/tools/internal/event" +) + +// An Runner will run go command invocations and serialize +// them if it sees a concurrency error. +type Runner struct { + // once guards the runner initialization. + once sync.Once + + // inFlight tracks available workers. + inFlight chan struct{} + + // serialized guards the ability to run a go command serially, + // to avoid deadlocks when claiming workers. + serialized chan struct{} +} + +const maxInFlight = 10 + +func (runner *Runner) initialize() { + runner.once.Do(func() { + runner.inFlight = make(chan struct{}, maxInFlight) + runner.serialized = make(chan struct{}, 1) + }) +} + +// 1.13: go: updates to go.mod needed, but contents have changed +// 1.14: go: updating go.mod: existing contents have changed since last read +var modConcurrencyError = regexp.MustCompile(`go:.*go.mod.*contents have changed`) + +// Run is a convenience wrapper around RunRaw. +// It returns only stdout and a "friendly" error. +func (runner *Runner) Run(ctx context.Context, inv Invocation) (*bytes.Buffer, error) { + stdout, _, friendly, _ := runner.RunRaw(ctx, inv) + return stdout, friendly +} + +// RunPiped runs the invocation serially, always waiting for any concurrent +// invocations to complete first. +func (runner *Runner) RunPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) error { + _, err := runner.runPiped(ctx, inv, stdout, stderr) + return err +} + +// RunRaw runs the invocation, serializing requests only if they fight over +// go.mod changes. +func (runner *Runner) RunRaw(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) { + // Make sure the runner is always initialized. + runner.initialize() + + // First, try to run the go command concurrently. + stdout, stderr, friendlyErr, err := runner.runConcurrent(ctx, inv) + + // If we encounter a load concurrency error, we need to retry serially. + if friendlyErr == nil || !modConcurrencyError.MatchString(friendlyErr.Error()) { + return stdout, stderr, friendlyErr, err + } + event.Error(ctx, "Load concurrency error, will retry serially", err) + + // Run serially by calling runPiped. + stdout.Reset() + stderr.Reset() + friendlyErr, err = runner.runPiped(ctx, inv, stdout, stderr) + return stdout, stderr, friendlyErr, err +} + +func (runner *Runner) runConcurrent(ctx context.Context, inv Invocation) (*bytes.Buffer, *bytes.Buffer, error, error) { + // Wait for 1 worker to become available. + select { + case <-ctx.Done(): + return nil, nil, nil, ctx.Err() + case runner.inFlight <- struct{}{}: + defer func() { <-runner.inFlight }() + } + + stdout, stderr := &bytes.Buffer{}, &bytes.Buffer{} + friendlyErr, err := inv.runWithFriendlyError(ctx, stdout, stderr) + return stdout, stderr, friendlyErr, err +} + +func (runner *Runner) runPiped(ctx context.Context, inv Invocation, stdout, stderr io.Writer) (error, error) { + // Make sure the runner is always initialized. + runner.initialize() + + // Acquire the serialization lock. This avoids deadlocks between two + // runPiped commands. + select { + case <-ctx.Done(): + return nil, ctx.Err() + case runner.serialized <- struct{}{}: + defer func() { <-runner.serialized }() + } + + // Wait for all in-progress go commands to return before proceeding, + // to avoid load concurrency errors. + for i := 0; i < maxInFlight; i++ { + select { + case <-ctx.Done(): + return nil, ctx.Err() + case runner.inFlight <- struct{}{}: + // Make sure we always "return" any workers we took. + defer func() { <-runner.inFlight }() + } + } + + return inv.runWithFriendlyError(ctx, stdout, stderr) +} + +// An Invocation represents a call to the go command. +type Invocation struct { + Verb string + Args []string + BuildFlags []string + ModFlag string + ModFile string + Overlay string + // If CleanEnv is set, the invocation will run only with the environment + // in Env, not starting with os.Environ. + CleanEnv bool + Env []string + WorkingDir string + Logf func(format string, args ...interface{}) +} + +func (i *Invocation) runWithFriendlyError(ctx context.Context, stdout, stderr io.Writer) (friendlyError error, rawError error) { + rawError = i.run(ctx, stdout, stderr) + if rawError != nil { + friendlyError = rawError + // Check for 'go' executable not being found. + if ee, ok := rawError.(*exec.Error); ok && ee.Err == exec.ErrNotFound { + friendlyError = fmt.Errorf("go command required, not found: %v", ee) + } + if ctx.Err() != nil { + friendlyError = ctx.Err() + } + friendlyError = fmt.Errorf("err: %v: stderr: %s", friendlyError, stderr) + } + return +} + +func (i *Invocation) run(ctx context.Context, stdout, stderr io.Writer) error { + log := i.Logf + if log == nil { + log = func(string, ...interface{}) {} + } + + goArgs := []string{i.Verb} + + appendModFile := func() { + if i.ModFile != "" { + goArgs = append(goArgs, "-modfile="+i.ModFile) + } + } + appendModFlag := func() { + if i.ModFlag != "" { + goArgs = append(goArgs, "-mod="+i.ModFlag) + } + } + appendOverlayFlag := func() { + if i.Overlay != "" { + goArgs = append(goArgs, "-overlay="+i.Overlay) + } + } + + switch i.Verb { + case "env", "version": + goArgs = append(goArgs, i.Args...) + case "mod": + // mod needs the sub-verb before flags. + goArgs = append(goArgs, i.Args[0]) + appendModFile() + goArgs = append(goArgs, i.Args[1:]...) + case "get": + goArgs = append(goArgs, i.BuildFlags...) + appendModFile() + goArgs = append(goArgs, i.Args...) + + default: // notably list and build. + goArgs = append(goArgs, i.BuildFlags...) + appendModFile() + appendModFlag() + appendOverlayFlag() + goArgs = append(goArgs, i.Args...) + } + cmd := exec.Command("go", goArgs...) + cmd.Stdout = stdout + cmd.Stderr = stderr + // On darwin the cwd gets resolved to the real path, which breaks anything that + // expects the working directory to keep the original path, including the + // go command when dealing with modules. + // The Go stdlib has a special feature where if the cwd and the PWD are the + // same node then it trusts the PWD, so by setting it in the env for the child + // process we fix up all the paths returned by the go command. + if !i.CleanEnv { + cmd.Env = os.Environ() + } + cmd.Env = append(cmd.Env, i.Env...) + if i.WorkingDir != "" { + cmd.Env = append(cmd.Env, "PWD="+i.WorkingDir) + cmd.Dir = i.WorkingDir + } + defer func(start time.Time) { log("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now()) + + return runCmdContext(ctx, cmd) +} + +// runCmdContext is like exec.CommandContext except it sends os.Interrupt +// before os.Kill. +func runCmdContext(ctx context.Context, cmd *exec.Cmd) error { + if err := cmd.Start(); err != nil { + return err + } + resChan := make(chan error, 1) + go func() { + resChan <- cmd.Wait() + }() + + select { + case err := <-resChan: + return err + case <-ctx.Done(): + } + // Cancelled. Interrupt and see if it ends voluntarily. + cmd.Process.Signal(os.Interrupt) + select { + case err := <-resChan: + return err + case <-time.After(time.Second): + } + // Didn't shut down in response to interrupt. Kill it hard. + cmd.Process.Kill() + return <-resChan +} + +func cmdDebugStr(cmd *exec.Cmd) string { + env := make(map[string]string) + for _, kv := range cmd.Env { + split := strings.SplitN(kv, "=", 2) + k, v := split[0], split[1] + env[k] = v + } + + var args []string + for _, arg := range cmd.Args { + quoted := strconv.Quote(arg) + if quoted[1:len(quoted)-1] != arg || strings.Contains(arg, " ") { + args = append(args, quoted) + } else { + args = append(args, arg) + } + } + return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], strings.Join(args, " ")) +} diff --git a/vendor/golang.org/x/tools/internal/gocommand/vendor.go b/vendor/golang.org/x/tools/internal/gocommand/vendor.go new file mode 100644 index 000000000..5e75bd6d8 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/gocommand/vendor.go @@ -0,0 +1,107 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gocommand + +import ( + "bytes" + "context" + "fmt" + "os" + "path/filepath" + "regexp" + "strings" + "time" + + "golang.org/x/mod/semver" +) + +// ModuleJSON holds information about a module. +type ModuleJSON struct { + Path string // module path + Version string // module version + Versions []string // available module versions (with -versions) + Replace *ModuleJSON // replaced by this module + Time *time.Time // time version was created + Update *ModuleJSON // available update, if any (with -u) + Main bool // is this the main module? + Indirect bool // is this module only an indirect dependency of main module? + Dir string // directory holding files for this module, if any + GoMod string // path to go.mod file used when loading this module, if any + GoVersion string // go version used in module +} + +var modFlagRegexp = regexp.MustCompile(`-mod[ =](\w+)`) + +// VendorEnabled reports whether vendoring is enabled. It takes a *Runner to execute Go commands +// with the supplied context.Context and Invocation. The Invocation can contain pre-defined fields, +// of which only Verb and Args are modified to run the appropriate Go command. +// Inspired by setDefaultBuildMod in modload/init.go +func VendorEnabled(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON, bool, error) { + mainMod, go114, err := getMainModuleAnd114(ctx, inv, r) + if err != nil { + return nil, false, err + } + + // We check the GOFLAGS to see if there is anything overridden or not. + inv.Verb = "env" + inv.Args = []string{"GOFLAGS"} + stdout, err := r.Run(ctx, inv) + if err != nil { + return nil, false, err + } + goflags := string(bytes.TrimSpace(stdout.Bytes())) + matches := modFlagRegexp.FindStringSubmatch(goflags) + var modFlag string + if len(matches) != 0 { + modFlag = matches[1] + } + if modFlag != "" { + // Don't override an explicit '-mod=' argument. + return mainMod, modFlag == "vendor", nil + } + if mainMod == nil || !go114 { + return mainMod, false, nil + } + // Check 1.14's automatic vendor mode. + if fi, err := os.Stat(filepath.Join(mainMod.Dir, "vendor")); err == nil && fi.IsDir() { + if mainMod.GoVersion != "" && semver.Compare("v"+mainMod.GoVersion, "v1.14") >= 0 { + // The Go version is at least 1.14, and a vendor directory exists. + // Set -mod=vendor by default. + return mainMod, true, nil + } + } + return mainMod, false, nil +} + +// getMainModuleAnd114 gets the main module's information and whether the +// go command in use is 1.14+. This is the information needed to figure out +// if vendoring should be enabled. +func getMainModuleAnd114(ctx context.Context, inv Invocation, r *Runner) (*ModuleJSON, bool, error) { + const format = `{{.Path}} +{{.Dir}} +{{.GoMod}} +{{.GoVersion}} +{{range context.ReleaseTags}}{{if eq . "go1.14"}}{{.}}{{end}}{{end}} +` + inv.Verb = "list" + inv.Args = []string{"-m", "-f", format} + stdout, err := r.Run(ctx, inv) + if err != nil { + return nil, false, err + } + + lines := strings.Split(stdout.String(), "\n") + if len(lines) < 5 { + return nil, false, fmt.Errorf("unexpected stdout: %q", stdout.String()) + } + mod := &ModuleJSON{ + Path: lines[0], + Dir: lines[1], + GoMod: lines[2], + GoVersion: lines[3], + Main: true, + } + return mod, lines[4] == "go1.14", nil +} diff --git a/vendor/golang.org/x/tools/internal/gocommand/version.go b/vendor/golang.org/x/tools/internal/gocommand/version.go new file mode 100644 index 000000000..713043680 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/gocommand/version.go @@ -0,0 +1,51 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gocommand + +import ( + "context" + "fmt" + "strings" +) + +// GoVersion checks the go version by running "go list" with modules off. +// It returns the X in Go 1.X. +func GoVersion(ctx context.Context, inv Invocation, r *Runner) (int, error) { + inv.Verb = "list" + inv.Args = []string{"-e", "-f", `{{context.ReleaseTags}}`, `--`, `unsafe`} + inv.Env = append(append([]string{}, inv.Env...), "GO111MODULE=off") + // Unset any unneeded flags, and remove them from BuildFlags, if they're + // present. + inv.ModFile = "" + inv.ModFlag = "" + var buildFlags []string + for _, flag := range inv.BuildFlags { + // Flags can be prefixed by one or two dashes. + f := strings.TrimPrefix(strings.TrimPrefix(flag, "-"), "-") + if strings.HasPrefix(f, "mod=") || strings.HasPrefix(f, "modfile=") { + continue + } + buildFlags = append(buildFlags, flag) + } + inv.BuildFlags = buildFlags + stdoutBytes, err := r.Run(ctx, inv) + if err != nil { + return 0, err + } + stdout := stdoutBytes.String() + if len(stdout) < 3 { + return 0, fmt.Errorf("bad ReleaseTags output: %q", stdout) + } + // Split up "[go1.1 go1.15]" + tags := strings.Fields(stdout[1 : len(stdout)-2]) + for i := len(tags) - 1; i >= 0; i-- { + var version int + if _, err := fmt.Sscanf(tags[i], "go1.%d", &version); err != nil { + continue + } + return version, nil + } + return 0, fmt.Errorf("no parseable ReleaseTags in %v", tags) +} diff --git a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go new file mode 100644 index 000000000..925ff5356 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go @@ -0,0 +1,264 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gopathwalk is like filepath.Walk but specialized for finding Go +// packages, particularly in $GOPATH and $GOROOT. +package gopathwalk + +import ( + "bufio" + "bytes" + "fmt" + "io/ioutil" + "log" + "os" + "path/filepath" + "strings" + "time" + + "golang.org/x/tools/internal/fastwalk" +) + +// Options controls the behavior of a Walk call. +type Options struct { + // If Logf is non-nil, debug logging is enabled through this function. + Logf func(format string, args ...interface{}) + // Search module caches. Also disables legacy goimports ignore rules. + ModulesEnabled bool +} + +// RootType indicates the type of a Root. +type RootType int + +const ( + RootUnknown RootType = iota + RootGOROOT + RootGOPATH + RootCurrentModule + RootModuleCache + RootOther +) + +// A Root is a starting point for a Walk. +type Root struct { + Path string + Type RootType +} + +// Walk walks Go source directories ($GOROOT, $GOPATH, etc) to find packages. +// For each package found, add will be called (concurrently) with the absolute +// paths of the containing source directory and the package directory. +// add will be called concurrently. +func Walk(roots []Root, add func(root Root, dir string), opts Options) { + WalkSkip(roots, add, func(Root, string) bool { return false }, opts) +} + +// WalkSkip walks Go source directories ($GOROOT, $GOPATH, etc) to find packages. +// For each package found, add will be called (concurrently) with the absolute +// paths of the containing source directory and the package directory. +// For each directory that will be scanned, skip will be called (concurrently) +// with the absolute paths of the containing source directory and the directory. +// If skip returns false on a directory it will be processed. +// add will be called concurrently. +// skip will be called concurrently. +func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root, dir string) bool, opts Options) { + for _, root := range roots { + walkDir(root, add, skip, opts) + } +} + +// walkDir creates a walker and starts fastwalk with this walker. +func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) { + if _, err := os.Stat(root.Path); os.IsNotExist(err) { + if opts.Logf != nil { + opts.Logf("skipping nonexistent directory: %v", root.Path) + } + return + } + start := time.Now() + if opts.Logf != nil { + opts.Logf("gopathwalk: scanning %s", root.Path) + } + w := &walker{ + root: root, + add: add, + skip: skip, + opts: opts, + } + w.init() + if err := fastwalk.Walk(root.Path, w.walk); err != nil { + log.Printf("gopathwalk: scanning directory %v: %v", root.Path, err) + } + + if opts.Logf != nil { + opts.Logf("gopathwalk: scanned %s in %v", root.Path, time.Since(start)) + } +} + +// walker is the callback for fastwalk.Walk. +type walker struct { + root Root // The source directory to scan. + add func(Root, string) // The callback that will be invoked for every possible Go package dir. + skip func(Root, string) bool // The callback that will be invoked for every dir. dir is skipped if it returns true. + opts Options // Options passed to Walk by the user. + + ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files. +} + +// init initializes the walker based on its Options +func (w *walker) init() { + var ignoredPaths []string + if w.root.Type == RootModuleCache { + ignoredPaths = []string{"cache"} + } + if !w.opts.ModulesEnabled && w.root.Type == RootGOPATH { + ignoredPaths = w.getIgnoredDirs(w.root.Path) + ignoredPaths = append(ignoredPaths, "v", "mod") + } + + for _, p := range ignoredPaths { + full := filepath.Join(w.root.Path, p) + if fi, err := os.Stat(full); err == nil { + w.ignoredDirs = append(w.ignoredDirs, fi) + if w.opts.Logf != nil { + w.opts.Logf("Directory added to ignore list: %s", full) + } + } else if w.opts.Logf != nil { + w.opts.Logf("Error statting ignored directory: %v", err) + } + } +} + +// getIgnoredDirs reads an optional config file at /.goimportsignore +// of relative directories to ignore when scanning for go files. +// The provided path is one of the $GOPATH entries with "src" appended. +func (w *walker) getIgnoredDirs(path string) []string { + file := filepath.Join(path, ".goimportsignore") + slurp, err := ioutil.ReadFile(file) + if w.opts.Logf != nil { + if err != nil { + w.opts.Logf("%v", err) + } else { + w.opts.Logf("Read %s", file) + } + } + if err != nil { + return nil + } + + var ignoredDirs []string + bs := bufio.NewScanner(bytes.NewReader(slurp)) + for bs.Scan() { + line := strings.TrimSpace(bs.Text()) + if line == "" || strings.HasPrefix(line, "#") { + continue + } + ignoredDirs = append(ignoredDirs, line) + } + return ignoredDirs +} + +// shouldSkipDir reports whether the file should be skipped or not. +func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool { + for _, ignoredDir := range w.ignoredDirs { + if os.SameFile(fi, ignoredDir) { + return true + } + } + if w.skip != nil { + // Check with the user specified callback. + return w.skip(w.root, dir) + } + return false +} + +// walk walks through the given path. +func (w *walker) walk(path string, typ os.FileMode) error { + dir := filepath.Dir(path) + if typ.IsRegular() { + if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) { + // Doesn't make sense to have regular files + // directly in your $GOPATH/src or $GOROOT/src. + return fastwalk.ErrSkipFiles + } + if !strings.HasSuffix(path, ".go") { + return nil + } + + w.add(w.root, dir) + return fastwalk.ErrSkipFiles + } + if typ == os.ModeDir { + base := filepath.Base(path) + if base == "" || base[0] == '.' || base[0] == '_' || + base == "testdata" || + (w.root.Type == RootGOROOT && w.opts.ModulesEnabled && base == "vendor") || + (!w.opts.ModulesEnabled && base == "node_modules") { + return filepath.SkipDir + } + fi, err := os.Lstat(path) + if err == nil && w.shouldSkipDir(fi, path) { + return filepath.SkipDir + } + return nil + } + if typ == os.ModeSymlink { + base := filepath.Base(path) + if strings.HasPrefix(base, ".#") { + // Emacs noise. + return nil + } + fi, err := os.Lstat(path) + if err != nil { + // Just ignore it. + return nil + } + if w.shouldTraverse(dir, fi) { + return fastwalk.ErrTraverseLink + } + } + return nil +} + +// shouldTraverse reports whether the symlink fi, found in dir, +// should be followed. It makes sure symlinks were never visited +// before to avoid symlink loops. +func (w *walker) shouldTraverse(dir string, fi os.FileInfo) bool { + path := filepath.Join(dir, fi.Name()) + target, err := filepath.EvalSymlinks(path) + if err != nil { + return false + } + ts, err := os.Stat(target) + if err != nil { + fmt.Fprintln(os.Stderr, err) + return false + } + if !ts.IsDir() { + return false + } + if w.shouldSkipDir(ts, dir) { + return false + } + // Check for symlink loops by statting each directory component + // and seeing if any are the same file as ts. + for { + parent := filepath.Dir(path) + if parent == path { + // Made it to the root without seeing a cycle. + // Use this symlink. + return true + } + parentInfo, err := os.Stat(parent) + if err != nil { + return false + } + if os.SameFile(ts, parentInfo) { + // Cycle. Don't traverse. + return false + } + path = parent + } + +} diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go new file mode 100644 index 000000000..d859617b7 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/fix.go @@ -0,0 +1,1730 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io/ioutil" + "os" + "path" + "path/filepath" + "reflect" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/gopathwalk" +) + +// importToGroup is a list of functions which map from an import path to +// a group number. +var importToGroup = []func(localPrefix, importPath string) (num int, ok bool){ + func(localPrefix, importPath string) (num int, ok bool) { + if localPrefix == "" { + return + } + for _, p := range strings.Split(localPrefix, ",") { + if strings.HasPrefix(importPath, p) || strings.TrimSuffix(p, "/") == importPath { + return 3, true + } + } + return + }, + func(_, importPath string) (num int, ok bool) { + if strings.HasPrefix(importPath, "appengine") { + return 2, true + } + return + }, + func(_, importPath string) (num int, ok bool) { + firstComponent := strings.Split(importPath, "/")[0] + if strings.Contains(firstComponent, ".") { + return 1, true + } + return + }, +} + +func importGroup(localPrefix, importPath string) int { + for _, fn := range importToGroup { + if n, ok := fn(localPrefix, importPath); ok { + return n + } + } + return 0 +} + +type ImportFixType int + +const ( + AddImport ImportFixType = iota + DeleteImport + SetImportName +) + +type ImportFix struct { + // StmtInfo represents the import statement this fix will add, remove, or change. + StmtInfo ImportInfo + // IdentName is the identifier that this fix will add or remove. + IdentName string + // FixType is the type of fix this is (AddImport, DeleteImport, SetImportName). + FixType ImportFixType + Relevance float64 // see pkg +} + +// An ImportInfo represents a single import statement. +type ImportInfo struct { + ImportPath string // import path, e.g. "crypto/rand". + Name string // import name, e.g. "crand", or "" if none. +} + +// A packageInfo represents what's known about a package. +type packageInfo struct { + name string // real package name, if known. + exports map[string]bool // known exports. +} + +// parseOtherFiles parses all the Go files in srcDir except filename, including +// test files if filename looks like a test. +func parseOtherFiles(fset *token.FileSet, srcDir, filename string) []*ast.File { + // This could use go/packages but it doesn't buy much, and it fails + // with https://golang.org/issue/26296 in LoadFiles mode in some cases. + considerTests := strings.HasSuffix(filename, "_test.go") + + fileBase := filepath.Base(filename) + packageFileInfos, err := ioutil.ReadDir(srcDir) + if err != nil { + return nil + } + + var files []*ast.File + for _, fi := range packageFileInfos { + if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") { + continue + } + if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") { + continue + } + + f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, 0) + if err != nil { + continue + } + + files = append(files, f) + } + + return files +} + +// addGlobals puts the names of package vars into the provided map. +func addGlobals(f *ast.File, globals map[string]bool) { + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + + for _, spec := range genDecl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + globals[valueSpec.Names[0].Name] = true + } + } +} + +// collectReferences builds a map of selector expressions, from +// left hand side (X) to a set of right hand sides (Sel). +func collectReferences(f *ast.File) references { + refs := references{} + + var visitor visitFn + visitor = func(node ast.Node) ast.Visitor { + if node == nil { + return visitor + } + switch v := node.(type) { + case *ast.SelectorExpr: + xident, ok := v.X.(*ast.Ident) + if !ok { + break + } + if xident.Obj != nil { + // If the parser can resolve it, it's not a package ref. + break + } + if !ast.IsExported(v.Sel.Name) { + // Whatever this is, it's not exported from a package. + break + } + pkgName := xident.Name + r := refs[pkgName] + if r == nil { + r = make(map[string]bool) + refs[pkgName] = r + } + r[v.Sel.Name] = true + } + return visitor + } + ast.Walk(visitor, f) + return refs +} + +// collectImports returns all the imports in f. +// Unnamed imports (., _) and "C" are ignored. +func collectImports(f *ast.File) []*ImportInfo { + var imports []*ImportInfo + for _, imp := range f.Imports { + var name string + if imp.Name != nil { + name = imp.Name.Name + } + if imp.Path.Value == `"C"` || name == "_" || name == "." { + continue + } + path := strings.Trim(imp.Path.Value, `"`) + imports = append(imports, &ImportInfo{ + Name: name, + ImportPath: path, + }) + } + return imports +} + +// findMissingImport searches pass's candidates for an import that provides +// pkg, containing all of syms. +func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo { + for _, candidate := range p.candidates { + pkgInfo, ok := p.knownPackages[candidate.ImportPath] + if !ok { + continue + } + if p.importIdentifier(candidate) != pkg { + continue + } + + allFound := true + for right := range syms { + if !pkgInfo.exports[right] { + allFound = false + break + } + } + + if allFound { + return candidate + } + } + return nil +} + +// references is set of references found in a Go file. The first map key is the +// left hand side of a selector expression, the second key is the right hand +// side, and the value should always be true. +type references map[string]map[string]bool + +// A pass contains all the inputs and state necessary to fix a file's imports. +// It can be modified in some ways during use; see comments below. +type pass struct { + // Inputs. These must be set before a call to load, and not modified after. + fset *token.FileSet // fset used to parse f and its siblings. + f *ast.File // the file being fixed. + srcDir string // the directory containing f. + env *ProcessEnv // the environment to use for go commands, etc. + loadRealPackageNames bool // if true, load package names from disk rather than guessing them. + otherFiles []*ast.File // sibling files. + + // Intermediate state, generated by load. + existingImports map[string]*ImportInfo + allRefs references + missingRefs references + + // Inputs to fix. These can be augmented between successive fix calls. + lastTry bool // indicates that this is the last call and fix should clean up as best it can. + candidates []*ImportInfo // candidate imports in priority order. + knownPackages map[string]*packageInfo // information about all known packages. +} + +// loadPackageNames saves the package names for everything referenced by imports. +func (p *pass) loadPackageNames(imports []*ImportInfo) error { + if p.env.Logf != nil { + p.env.Logf("loading package names for %v packages", len(imports)) + defer func() { + p.env.Logf("done loading package names for %v packages", len(imports)) + }() + } + var unknown []string + for _, imp := range imports { + if _, ok := p.knownPackages[imp.ImportPath]; ok { + continue + } + unknown = append(unknown, imp.ImportPath) + } + + resolver, err := p.env.GetResolver() + if err != nil { + return err + } + + names, err := resolver.loadPackageNames(unknown, p.srcDir) + if err != nil { + return err + } + + for path, name := range names { + p.knownPackages[path] = &packageInfo{ + name: name, + exports: map[string]bool{}, + } + } + return nil +} + +// importIdentifier returns the identifier that imp will introduce. It will +// guess if the package name has not been loaded, e.g. because the source +// is not available. +func (p *pass) importIdentifier(imp *ImportInfo) string { + if imp.Name != "" { + return imp.Name + } + known := p.knownPackages[imp.ImportPath] + if known != nil && known.name != "" { + return known.name + } + return ImportPathToAssumedName(imp.ImportPath) +} + +// load reads in everything necessary to run a pass, and reports whether the +// file already has all the imports it needs. It fills in p.missingRefs with the +// file's missing symbols, if any, or removes unused imports if not. +func (p *pass) load() ([]*ImportFix, bool) { + p.knownPackages = map[string]*packageInfo{} + p.missingRefs = references{} + p.existingImports = map[string]*ImportInfo{} + + // Load basic information about the file in question. + p.allRefs = collectReferences(p.f) + + // Load stuff from other files in the same package: + // global variables so we know they don't need resolving, and imports + // that we might want to mimic. + globals := map[string]bool{} + for _, otherFile := range p.otherFiles { + // Don't load globals from files that are in the same directory + // but a different package. Using them to suggest imports is OK. + if p.f.Name.Name == otherFile.Name.Name { + addGlobals(otherFile, globals) + } + p.candidates = append(p.candidates, collectImports(otherFile)...) + } + + // Resolve all the import paths we've seen to package names, and store + // f's imports by the identifier they introduce. + imports := collectImports(p.f) + if p.loadRealPackageNames { + err := p.loadPackageNames(append(imports, p.candidates...)) + if err != nil { + if p.env.Logf != nil { + p.env.Logf("loading package names: %v", err) + } + return nil, false + } + } + for _, imp := range imports { + p.existingImports[p.importIdentifier(imp)] = imp + } + + // Find missing references. + for left, rights := range p.allRefs { + if globals[left] { + continue + } + _, ok := p.existingImports[left] + if !ok { + p.missingRefs[left] = rights + continue + } + } + if len(p.missingRefs) != 0 { + return nil, false + } + + return p.fix() +} + +// fix attempts to satisfy missing imports using p.candidates. If it finds +// everything, or if p.lastTry is true, it updates fixes to add the imports it found, +// delete anything unused, and update import names, and returns true. +func (p *pass) fix() ([]*ImportFix, bool) { + // Find missing imports. + var selected []*ImportInfo + for left, rights := range p.missingRefs { + if imp := p.findMissingImport(left, rights); imp != nil { + selected = append(selected, imp) + } + } + + if !p.lastTry && len(selected) != len(p.missingRefs) { + return nil, false + } + + // Found everything, or giving up. Add the new imports and remove any unused. + var fixes []*ImportFix + for _, imp := range p.existingImports { + // We deliberately ignore globals here, because we can't be sure + // they're in the same package. People do things like put multiple + // main packages in the same directory, and we don't want to + // remove imports if they happen to have the same name as a var in + // a different package. + if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok { + fixes = append(fixes, &ImportFix{ + StmtInfo: *imp, + IdentName: p.importIdentifier(imp), + FixType: DeleteImport, + }) + continue + } + + // An existing import may need to update its import name to be correct. + if name := p.importSpecName(imp); name != imp.Name { + fixes = append(fixes, &ImportFix{ + StmtInfo: ImportInfo{ + Name: name, + ImportPath: imp.ImportPath, + }, + IdentName: p.importIdentifier(imp), + FixType: SetImportName, + }) + } + } + + for _, imp := range selected { + fixes = append(fixes, &ImportFix{ + StmtInfo: ImportInfo{ + Name: p.importSpecName(imp), + ImportPath: imp.ImportPath, + }, + IdentName: p.importIdentifier(imp), + FixType: AddImport, + }) + } + + return fixes, true +} + +// importSpecName gets the import name of imp in the import spec. +// +// When the import identifier matches the assumed import name, the import name does +// not appear in the import spec. +func (p *pass) importSpecName(imp *ImportInfo) string { + // If we did not load the real package names, or the name is already set, + // we just return the existing name. + if !p.loadRealPackageNames || imp.Name != "" { + return imp.Name + } + + ident := p.importIdentifier(imp) + if ident == ImportPathToAssumedName(imp.ImportPath) { + return "" // ident not needed since the assumed and real names are the same. + } + return ident +} + +// apply will perform the fixes on f in order. +func apply(fset *token.FileSet, f *ast.File, fixes []*ImportFix) { + for _, fix := range fixes { + switch fix.FixType { + case DeleteImport: + astutil.DeleteNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath) + case AddImport: + astutil.AddNamedImport(fset, f, fix.StmtInfo.Name, fix.StmtInfo.ImportPath) + case SetImportName: + // Find the matching import path and change the name. + for _, spec := range f.Imports { + path := strings.Trim(spec.Path.Value, `"`) + if path == fix.StmtInfo.ImportPath { + spec.Name = &ast.Ident{ + Name: fix.StmtInfo.Name, + NamePos: spec.Pos(), + } + } + } + } + } +} + +// assumeSiblingImportsValid assumes that siblings' use of packages is valid, +// adding the exports they use. +func (p *pass) assumeSiblingImportsValid() { + for _, f := range p.otherFiles { + refs := collectReferences(f) + imports := collectImports(f) + importsByName := map[string]*ImportInfo{} + for _, imp := range imports { + importsByName[p.importIdentifier(imp)] = imp + } + for left, rights := range refs { + if imp, ok := importsByName[left]; ok { + if m, ok := stdlib[imp.ImportPath]; ok { + // We have the stdlib in memory; no need to guess. + rights = copyExports(m) + } + p.addCandidate(imp, &packageInfo{ + // no name; we already know it. + exports: rights, + }) + } + } + } +} + +// addCandidate adds a candidate import to p, and merges in the information +// in pkg. +func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) { + p.candidates = append(p.candidates, imp) + if existing, ok := p.knownPackages[imp.ImportPath]; ok { + if existing.name == "" { + existing.name = pkg.name + } + for export := range pkg.exports { + existing.exports[export] = true + } + } else { + p.knownPackages[imp.ImportPath] = pkg + } +} + +// fixImports adds and removes imports from f so that all its references are +// satisfied and there are no unused imports. +// +// This is declared as a variable rather than a function so goimports can +// easily be extended by adding a file with an init function. +var fixImports = fixImportsDefault + +func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error { + fixes, err := getFixes(fset, f, filename, env) + if err != nil { + return err + } + apply(fset, f, fixes) + return err +} + +// getFixes gets the import fixes that need to be made to f in order to fix the imports. +// It does not modify the ast. +func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) { + abs, err := filepath.Abs(filename) + if err != nil { + return nil, err + } + srcDir := filepath.Dir(abs) + if env.Logf != nil { + env.Logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir) + } + + // First pass: looking only at f, and using the naive algorithm to + // derive package names from import paths, see if the file is already + // complete. We can't add any imports yet, because we don't know + // if missing references are actually package vars. + p := &pass{fset: fset, f: f, srcDir: srcDir, env: env} + if fixes, done := p.load(); done { + return fixes, nil + } + + otherFiles := parseOtherFiles(fset, srcDir, filename) + + // Second pass: add information from other files in the same package, + // like their package vars and imports. + p.otherFiles = otherFiles + if fixes, done := p.load(); done { + return fixes, nil + } + + // Now we can try adding imports from the stdlib. + p.assumeSiblingImportsValid() + addStdlibCandidates(p, p.missingRefs) + if fixes, done := p.fix(); done { + return fixes, nil + } + + // Third pass: get real package names where we had previously used + // the naive algorithm. + p = &pass{fset: fset, f: f, srcDir: srcDir, env: env} + p.loadRealPackageNames = true + p.otherFiles = otherFiles + if fixes, done := p.load(); done { + return fixes, nil + } + + if err := addStdlibCandidates(p, p.missingRefs); err != nil { + return nil, err + } + p.assumeSiblingImportsValid() + if fixes, done := p.fix(); done { + return fixes, nil + } + + // Go look for candidates in $GOPATH, etc. We don't necessarily load + // the real exports of sibling imports, so keep assuming their contents. + if err := addExternalCandidates(p, p.missingRefs, filename); err != nil { + return nil, err + } + + p.lastTry = true + fixes, _ := p.fix() + return fixes, nil +} + +// MaxRelevance is the highest relevance, used for the standard library. +// Chosen arbitrarily to match pre-existing gopls code. +const MaxRelevance = 7.0 + +// getCandidatePkgs works with the passed callback to find all acceptable packages. +// It deduplicates by import path, and uses a cached stdlib rather than reading +// from disk. +func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filename, filePkg string, env *ProcessEnv) error { + notSelf := func(p *pkg) bool { + return p.packageName != filePkg || p.dir != filepath.Dir(filename) + } + goenv, err := env.goEnv() + if err != nil { + return err + } + + var mu sync.Mutex // to guard asynchronous access to dupCheck + dupCheck := map[string]struct{}{} + + // Start off with the standard library. + for importPath, exports := range stdlib { + p := &pkg{ + dir: filepath.Join(goenv["GOROOT"], "src", importPath), + importPathShort: importPath, + packageName: path.Base(importPath), + relevance: MaxRelevance, + } + dupCheck[importPath] = struct{}{} + if notSelf(p) && wrappedCallback.dirFound(p) && wrappedCallback.packageNameLoaded(p) { + wrappedCallback.exportsLoaded(p, exports) + } + } + + scanFilter := &scanCallback{ + rootFound: func(root gopathwalk.Root) bool { + // Exclude goroot results -- getting them is relatively expensive, not cached, + // and generally redundant with the in-memory version. + return root.Type != gopathwalk.RootGOROOT && wrappedCallback.rootFound(root) + }, + dirFound: wrappedCallback.dirFound, + packageNameLoaded: func(pkg *pkg) bool { + mu.Lock() + defer mu.Unlock() + if _, ok := dupCheck[pkg.importPathShort]; ok { + return false + } + dupCheck[pkg.importPathShort] = struct{}{} + return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg) + }, + exportsLoaded: func(pkg *pkg, exports []string) { + // If we're an x_test, load the package under test's test variant. + if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) { + var err error + _, exports, err = loadExportsFromFiles(ctx, env, pkg.dir, true) + if err != nil { + return + } + } + wrappedCallback.exportsLoaded(pkg, exports) + }, + } + resolver, err := env.GetResolver() + if err != nil { + return err + } + return resolver.scan(ctx, scanFilter) +} + +func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) (map[string]float64, error) { + result := make(map[string]float64) + resolver, err := env.GetResolver() + if err != nil { + return nil, err + } + for _, path := range paths { + result[path] = resolver.scoreImportPath(ctx, path) + } + return result, nil +} + +func PrimeCache(ctx context.Context, env *ProcessEnv) error { + // Fully scan the disk for directories, but don't actually read any Go files. + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true + }, + dirFound: func(pkg *pkg) bool { + return false + }, + packageNameLoaded: func(pkg *pkg) bool { + return false + }, + } + return getCandidatePkgs(ctx, callback, "", "", env) +} + +func candidateImportName(pkg *pkg) string { + if ImportPathToAssumedName(pkg.importPathShort) != pkg.packageName { + return pkg.packageName + } + return "" +} + +// GetAllCandidates calls wrapped for each package whose name starts with +// searchPrefix, and can be imported from filename with the package name filePkg. +func GetAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error { + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true + }, + dirFound: func(pkg *pkg) bool { + if !canUse(filename, pkg.dir) { + return false + } + // Try the assumed package name first, then a simpler path match + // in case of packages named vN, which are not uncommon. + return strings.HasPrefix(ImportPathToAssumedName(pkg.importPathShort), searchPrefix) || + strings.HasPrefix(path.Base(pkg.importPathShort), searchPrefix) + }, + packageNameLoaded: func(pkg *pkg) bool { + if !strings.HasPrefix(pkg.packageName, searchPrefix) { + return false + } + wrapped(ImportFix{ + StmtInfo: ImportInfo{ + ImportPath: pkg.importPathShort, + Name: candidateImportName(pkg), + }, + IdentName: pkg.packageName, + FixType: AddImport, + Relevance: pkg.relevance, + }) + return false + }, + } + return getCandidatePkgs(ctx, callback, filename, filePkg, env) +} + +// GetImportPaths calls wrapped for each package whose import path starts with +// searchPrefix, and can be imported from filename with the package name filePkg. +func GetImportPaths(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error { + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true + }, + dirFound: func(pkg *pkg) bool { + if !canUse(filename, pkg.dir) { + return false + } + return strings.HasPrefix(pkg.importPathShort, searchPrefix) + }, + packageNameLoaded: func(pkg *pkg) bool { + wrapped(ImportFix{ + StmtInfo: ImportInfo{ + ImportPath: pkg.importPathShort, + Name: candidateImportName(pkg), + }, + IdentName: pkg.packageName, + FixType: AddImport, + Relevance: pkg.relevance, + }) + return false + }, + } + return getCandidatePkgs(ctx, callback, filename, filePkg, env) +} + +// A PackageExport is a package and its exports. +type PackageExport struct { + Fix *ImportFix + Exports []string +} + +// GetPackageExports returns all known packages with name pkg and their exports. +func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchPkg, filename, filePkg string, env *ProcessEnv) error { + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true + }, + dirFound: func(pkg *pkg) bool { + return pkgIsCandidate(filename, references{searchPkg: nil}, pkg) + }, + packageNameLoaded: func(pkg *pkg) bool { + return pkg.packageName == searchPkg + }, + exportsLoaded: func(pkg *pkg, exports []string) { + sort.Strings(exports) + wrapped(PackageExport{ + Fix: &ImportFix{ + StmtInfo: ImportInfo{ + ImportPath: pkg.importPathShort, + Name: candidateImportName(pkg), + }, + IdentName: pkg.packageName, + FixType: AddImport, + Relevance: pkg.relevance, + }, + Exports: exports, + }) + }, + } + return getCandidatePkgs(ctx, callback, filename, filePkg, env) +} + +var RequiredGoEnvVars = []string{"GO111MODULE", "GOFLAGS", "GOINSECURE", "GOMOD", "GOMODCACHE", "GONOPROXY", "GONOSUMDB", "GOPATH", "GOPROXY", "GOROOT", "GOSUMDB"} + +// ProcessEnv contains environment variables and settings that affect the use of +// the go command, the go/build package, etc. +type ProcessEnv struct { + GocmdRunner *gocommand.Runner + + BuildFlags []string + ModFlag string + ModFile string + + // Env overrides the OS environment, and can be used to specify + // GOPROXY, GO111MODULE, etc. PATH cannot be set here, because + // exec.Command will not honor it. + // Specifying all of RequiredGoEnvVars avoids a call to `go env`. + Env map[string]string + + WorkingDir string + + // If Logf is non-nil, debug logging is enabled through this function. + Logf func(format string, args ...interface{}) + + initialized bool + + resolver Resolver +} + +func (e *ProcessEnv) goEnv() (map[string]string, error) { + if err := e.init(); err != nil { + return nil, err + } + return e.Env, nil +} + +func (e *ProcessEnv) matchFile(dir, name string) (bool, error) { + bctx, err := e.buildContext() + if err != nil { + return false, err + } + return bctx.MatchFile(dir, name) +} + +// CopyConfig copies the env's configuration into a new env. +func (e *ProcessEnv) CopyConfig() *ProcessEnv { + copy := &ProcessEnv{ + GocmdRunner: e.GocmdRunner, + initialized: e.initialized, + BuildFlags: e.BuildFlags, + Logf: e.Logf, + WorkingDir: e.WorkingDir, + resolver: nil, + Env: map[string]string{}, + } + for k, v := range e.Env { + copy.Env[k] = v + } + return copy +} + +func (e *ProcessEnv) init() error { + if e.initialized { + return nil + } + + foundAllRequired := true + for _, k := range RequiredGoEnvVars { + if _, ok := e.Env[k]; !ok { + foundAllRequired = false + break + } + } + if foundAllRequired { + e.initialized = true + return nil + } + + if e.Env == nil { + e.Env = map[string]string{} + } + + goEnv := map[string]string{} + stdout, err := e.invokeGo(context.TODO(), "env", append([]string{"-json"}, RequiredGoEnvVars...)...) + if err != nil { + return err + } + if err := json.Unmarshal(stdout.Bytes(), &goEnv); err != nil { + return err + } + for k, v := range goEnv { + e.Env[k] = v + } + e.initialized = true + return nil +} + +func (e *ProcessEnv) env() []string { + var env []string // the gocommand package will prepend os.Environ. + for k, v := range e.Env { + env = append(env, k+"="+v) + } + return env +} + +func (e *ProcessEnv) GetResolver() (Resolver, error) { + if e.resolver != nil { + return e.resolver, nil + } + if err := e.init(); err != nil { + return nil, err + } + if len(e.Env["GOMOD"]) == 0 { + e.resolver = newGopathResolver(e) + return e.resolver, nil + } + e.resolver = newModuleResolver(e) + return e.resolver, nil +} + +func (e *ProcessEnv) buildContext() (*build.Context, error) { + ctx := build.Default + goenv, err := e.goEnv() + if err != nil { + return nil, err + } + ctx.GOROOT = goenv["GOROOT"] + ctx.GOPATH = goenv["GOPATH"] + + // As of Go 1.14, build.Context has a Dir field + // (see golang.org/issue/34860). + // Populate it only if present. + rc := reflect.ValueOf(&ctx).Elem() + dir := rc.FieldByName("Dir") + if dir.IsValid() && dir.Kind() == reflect.String { + dir.SetString(e.WorkingDir) + } + + // Since Go 1.11, go/build.Context.Import may invoke 'go list' depending on + // the value in GO111MODULE in the process's environment. We always want to + // run in GOPATH mode when calling Import, so we need to prevent this from + // happening. In Go 1.16, GO111MODULE defaults to "on", so this problem comes + // up more frequently. + // + // HACK: setting any of the Context I/O hooks prevents Import from invoking + // 'go list', regardless of GO111MODULE. This is undocumented, but it's + // unlikely to change before GOPATH support is removed. + ctx.ReadDir = ioutil.ReadDir + + return &ctx, nil +} + +func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string) (*bytes.Buffer, error) { + inv := gocommand.Invocation{ + Verb: verb, + Args: args, + BuildFlags: e.BuildFlags, + Env: e.env(), + Logf: e.Logf, + WorkingDir: e.WorkingDir, + } + return e.GocmdRunner.Run(ctx, inv) +} + +func addStdlibCandidates(pass *pass, refs references) error { + goenv, err := pass.env.goEnv() + if err != nil { + return err + } + add := func(pkg string) { + // Prevent self-imports. + if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir { + return + } + exports := copyExports(stdlib[pkg]) + pass.addCandidate( + &ImportInfo{ImportPath: pkg}, + &packageInfo{name: path.Base(pkg), exports: exports}) + } + for left := range refs { + if left == "rand" { + // Make sure we try crypto/rand before math/rand. + add("crypto/rand") + add("math/rand") + continue + } + for importPath := range stdlib { + if path.Base(importPath) == left { + add(importPath) + } + } + } + return nil +} + +// A Resolver does the build-system-specific parts of goimports. +type Resolver interface { + // loadPackageNames loads the package names in importPaths. + loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) + // scan works with callback to search for packages. See scanCallback for details. + scan(ctx context.Context, callback *scanCallback) error + // loadExports returns the set of exported symbols in the package at dir. + // loadExports may be called concurrently. + loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) + // scoreImportPath returns the relevance for an import path. + scoreImportPath(ctx context.Context, path string) float64 + + ClearForNewScan() +} + +// A scanCallback controls a call to scan and receives its results. +// In general, minor errors will be silently discarded; a user should not +// expect to receive a full series of calls for everything. +type scanCallback struct { + // rootFound is called before scanning a new root dir. If it returns true, + // the root will be scanned. Returning false will not necessarily prevent + // directories from that root making it to dirFound. + rootFound func(gopathwalk.Root) bool + // dirFound is called when a directory is found that is possibly a Go package. + // pkg will be populated with everything except packageName. + // If it returns true, the package's name will be loaded. + dirFound func(pkg *pkg) bool + // packageNameLoaded is called when a package is found and its name is loaded. + // If it returns true, the package's exports will be loaded. + packageNameLoaded func(pkg *pkg) bool + // exportsLoaded is called when a package's exports have been loaded. + exportsLoaded func(pkg *pkg, exports []string) +} + +func addExternalCandidates(pass *pass, refs references, filename string) error { + var mu sync.Mutex + found := make(map[string][]pkgDistance) + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true // We want everything. + }, + dirFound: func(pkg *pkg) bool { + return pkgIsCandidate(filename, refs, pkg) + }, + packageNameLoaded: func(pkg *pkg) bool { + if _, want := refs[pkg.packageName]; !want { + return false + } + if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName { + // The candidate is in the same directory and has the + // same package name. Don't try to import ourselves. + return false + } + if !canUse(filename, pkg.dir) { + return false + } + mu.Lock() + defer mu.Unlock() + found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)}) + return false // We'll do our own loading after we sort. + }, + } + resolver, err := pass.env.GetResolver() + if err != nil { + return err + } + if err = resolver.scan(context.Background(), callback); err != nil { + return err + } + + // Search for imports matching potential package references. + type result struct { + imp *ImportInfo + pkg *packageInfo + } + results := make(chan result, len(refs)) + + ctx, cancel := context.WithCancel(context.TODO()) + var wg sync.WaitGroup + defer func() { + cancel() + wg.Wait() + }() + var ( + firstErr error + firstErrOnce sync.Once + ) + for pkgName, symbols := range refs { + wg.Add(1) + go func(pkgName string, symbols map[string]bool) { + defer wg.Done() + + found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols, filename) + + if err != nil { + firstErrOnce.Do(func() { + firstErr = err + cancel() + }) + return + } + + if found == nil { + return // No matching package. + } + + imp := &ImportInfo{ + ImportPath: found.importPathShort, + } + + pkg := &packageInfo{ + name: pkgName, + exports: symbols, + } + results <- result{imp, pkg} + }(pkgName, symbols) + } + go func() { + wg.Wait() + close(results) + }() + + for result := range results { + pass.addCandidate(result.imp, result.pkg) + } + return firstErr +} + +// notIdentifier reports whether ch is an invalid identifier character. +func notIdentifier(ch rune) bool { + return !('a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || + '0' <= ch && ch <= '9' || + ch == '_' || + ch >= utf8.RuneSelf && (unicode.IsLetter(ch) || unicode.IsDigit(ch))) +} + +// ImportPathToAssumedName returns the assumed package name of an import path. +// It does this using only string parsing of the import path. +// It picks the last element of the path that does not look like a major +// version, and then picks the valid identifier off the start of that element. +// It is used to determine if a local rename should be added to an import for +// clarity. +// This function could be moved to a standard package and exported if we want +// for use in other tools. +func ImportPathToAssumedName(importPath string) string { + base := path.Base(importPath) + if strings.HasPrefix(base, "v") { + if _, err := strconv.Atoi(base[1:]); err == nil { + dir := path.Dir(importPath) + if dir != "." { + base = path.Base(dir) + } + } + } + base = strings.TrimPrefix(base, "go-") + if i := strings.IndexFunc(base, notIdentifier); i >= 0 { + base = base[:i] + } + return base +} + +// gopathResolver implements resolver for GOPATH workspaces. +type gopathResolver struct { + env *ProcessEnv + walked bool + cache *dirInfoCache + scanSema chan struct{} // scanSema prevents concurrent scans. +} + +func newGopathResolver(env *ProcessEnv) *gopathResolver { + r := &gopathResolver{ + env: env, + cache: &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + }, + scanSema: make(chan struct{}, 1), + } + r.scanSema <- struct{}{} + return r +} + +func (r *gopathResolver) ClearForNewScan() { + <-r.scanSema + r.cache = &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + } + r.walked = false + r.scanSema <- struct{}{} +} + +func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { + names := map[string]string{} + bctx, err := r.env.buildContext() + if err != nil { + return nil, err + } + for _, path := range importPaths { + names[path] = importPathToName(bctx, path, srcDir) + } + return names, nil +} + +// importPathToName finds out the actual package name, as declared in its .go files. +func importPathToName(bctx *build.Context, importPath, srcDir string) string { + // Fast path for standard library without going to disk. + if _, ok := stdlib[importPath]; ok { + return path.Base(importPath) // stdlib packages always match their paths. + } + + buildPkg, err := bctx.Import(importPath, srcDir, build.FindOnly) + if err != nil { + return "" + } + pkgName, err := packageDirToName(buildPkg.Dir) + if err != nil { + return "" + } + return pkgName +} + +// packageDirToName is a faster version of build.Import if +// the only thing desired is the package name. Given a directory, +// packageDirToName then only parses one file in the package, +// trusting that the files in the directory are consistent. +func packageDirToName(dir string) (packageName string, err error) { + d, err := os.Open(dir) + if err != nil { + return "", err + } + names, err := d.Readdirnames(-1) + d.Close() + if err != nil { + return "", err + } + sort.Strings(names) // to have predictable behavior + var lastErr error + var nfile int + for _, name := range names { + if !strings.HasSuffix(name, ".go") { + continue + } + if strings.HasSuffix(name, "_test.go") { + continue + } + nfile++ + fullFile := filepath.Join(dir, name) + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly) + if err != nil { + lastErr = err + continue + } + pkgName := f.Name.Name + if pkgName == "documentation" { + // Special case from go/build.ImportDir, not + // handled by ctx.MatchFile. + continue + } + if pkgName == "main" { + // Also skip package main, assuming it's a +build ignore generator or example. + // Since you can't import a package main anyway, there's no harm here. + continue + } + return pkgName, nil + } + if lastErr != nil { + return "", lastErr + } + return "", fmt.Errorf("no importable package found in %d Go files", nfile) +} + +type pkg struct { + dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http") + importPathShort string // vendorless import path ("net/http", "a/b") + packageName string // package name loaded from source if requested + relevance float64 // a weakly-defined score of how relevant a package is. 0 is most relevant. +} + +type pkgDistance struct { + pkg *pkg + distance int // relative distance to target +} + +// byDistanceOrImportPathShortLength sorts by relative distance breaking ties +// on the short import path length and then the import string itself. +type byDistanceOrImportPathShortLength []pkgDistance + +func (s byDistanceOrImportPathShortLength) Len() int { return len(s) } +func (s byDistanceOrImportPathShortLength) Less(i, j int) bool { + di, dj := s[i].distance, s[j].distance + if di == -1 { + return false + } + if dj == -1 { + return true + } + if di != dj { + return di < dj + } + + vi, vj := s[i].pkg.importPathShort, s[j].pkg.importPathShort + if len(vi) != len(vj) { + return len(vi) < len(vj) + } + return vi < vj +} +func (s byDistanceOrImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +func distance(basepath, targetpath string) int { + p, err := filepath.Rel(basepath, targetpath) + if err != nil { + return -1 + } + if p == "." { + return 0 + } + return strings.Count(p, string(filepath.Separator)) + 1 +} + +func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error { + add := func(root gopathwalk.Root, dir string) { + // We assume cached directories have not changed. We can skip them and their + // children. + if _, ok := r.cache.Load(dir); ok { + return + } + + importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):]) + info := directoryPackageInfo{ + status: directoryScanned, + dir: dir, + rootType: root.Type, + nonCanonicalImportPath: VendorlessPath(importpath), + } + r.cache.Store(dir, info) + } + processDir := func(info directoryPackageInfo) { + // Skip this directory if we were not able to get the package information successfully. + if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil { + return + } + + p := &pkg{ + importPathShort: info.nonCanonicalImportPath, + dir: info.dir, + relevance: MaxRelevance - 1, + } + if info.rootType == gopathwalk.RootGOROOT { + p.relevance = MaxRelevance + } + + if !callback.dirFound(p) { + return + } + var err error + p.packageName, err = r.cache.CachePackageName(info) + if err != nil { + return + } + + if !callback.packageNameLoaded(p) { + return + } + if _, exports, err := r.loadExports(ctx, p, false); err == nil { + callback.exportsLoaded(p, exports) + } + } + stop := r.cache.ScanAndListen(ctx, processDir) + defer stop() + + goenv, err := r.env.goEnv() + if err != nil { + return err + } + var roots []gopathwalk.Root + roots = append(roots, gopathwalk.Root{filepath.Join(goenv["GOROOT"], "src"), gopathwalk.RootGOROOT}) + for _, p := range filepath.SplitList(goenv["GOPATH"]) { + roots = append(roots, gopathwalk.Root{filepath.Join(p, "src"), gopathwalk.RootGOPATH}) + } + // The callback is not necessarily safe to use in the goroutine below. Process roots eagerly. + roots = filterRoots(roots, callback.rootFound) + // We can't cancel walks, because we need them to finish to have a usable + // cache. Instead, run them in a separate goroutine and detach. + scanDone := make(chan struct{}) + go func() { + select { + case <-ctx.Done(): + return + case <-r.scanSema: + } + defer func() { r.scanSema <- struct{}{} }() + gopathwalk.Walk(roots, add, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: false}) + close(scanDone) + }() + select { + case <-ctx.Done(): + case <-scanDone: + } + return nil +} + +func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) float64 { + if _, ok := stdlib[path]; ok { + return MaxRelevance + } + return MaxRelevance - 1 +} + +func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []gopathwalk.Root { + var result []gopathwalk.Root + for _, root := range roots { + if !include(root) { + continue + } + result = append(result, root) + } + return result +} + +func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) { + if info, ok := r.cache.Load(pkg.dir); ok && !includeTest { + return r.cache.CacheExports(ctx, r.env, info) + } + return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest) +} + +// VendorlessPath returns the devendorized version of the import path ipath. +// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b". +func VendorlessPath(ipath string) string { + // Devendorize for use in import statement. + if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 { + return ipath[i+len("/vendor/"):] + } + if strings.HasPrefix(ipath, "vendor/") { + return ipath[len("vendor/"):] + } + return ipath +} + +func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) { + // Look for non-test, buildable .go files which could provide exports. + all, err := ioutil.ReadDir(dir) + if err != nil { + return "", nil, err + } + var files []os.FileInfo + for _, fi := range all { + name := fi.Name() + if !strings.HasSuffix(name, ".go") || (!includeTest && strings.HasSuffix(name, "_test.go")) { + continue + } + match, err := env.matchFile(dir, fi.Name()) + if err != nil || !match { + continue + } + files = append(files, fi) + } + + if len(files) == 0 { + return "", nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", dir) + } + + var pkgName string + var exports []string + fset := token.NewFileSet() + for _, fi := range files { + select { + case <-ctx.Done(): + return "", nil, ctx.Err() + default: + } + + fullFile := filepath.Join(dir, fi.Name()) + f, err := parser.ParseFile(fset, fullFile, nil, 0) + if err != nil { + if env.Logf != nil { + env.Logf("error parsing %v: %v", fullFile, err) + } + continue + } + if f.Name.Name == "documentation" { + // Special case from go/build.ImportDir, not + // handled by MatchFile above. + continue + } + if includeTest && strings.HasSuffix(f.Name.Name, "_test") { + // x_test package. We want internal test files only. + continue + } + pkgName = f.Name.Name + for name := range f.Scope.Objects { + if ast.IsExported(name) { + exports = append(exports, name) + } + } + } + + if env.Logf != nil { + sortedExports := append([]string(nil), exports...) + sort.Strings(sortedExports) + env.Logf("loaded exports in dir %v (package %v): %v", dir, pkgName, strings.Join(sortedExports, ", ")) + } + return pkgName, exports, nil +} + +// findImport searches for a package with the given symbols. +// If no package is found, findImport returns ("", false, nil) +func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool, filename string) (*pkg, error) { + // Sort the candidates by their import package length, + // assuming that shorter package names are better than long + // ones. Note that this sorts by the de-vendored name, so + // there's no "penalty" for vendoring. + sort.Sort(byDistanceOrImportPathShortLength(candidates)) + if pass.env.Logf != nil { + for i, c := range candidates { + pass.env.Logf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir) + } + } + resolver, err := pass.env.GetResolver() + if err != nil { + return nil, err + } + + // Collect exports for packages with matching names. + rescv := make([]chan *pkg, len(candidates)) + for i := range candidates { + rescv[i] = make(chan *pkg, 1) + } + const maxConcurrentPackageImport = 4 + loadExportsSem := make(chan struct{}, maxConcurrentPackageImport) + + ctx, cancel := context.WithCancel(ctx) + var wg sync.WaitGroup + defer func() { + cancel() + wg.Wait() + }() + + wg.Add(1) + go func() { + defer wg.Done() + for i, c := range candidates { + select { + case loadExportsSem <- struct{}{}: + case <-ctx.Done(): + return + } + + wg.Add(1) + go func(c pkgDistance, resc chan<- *pkg) { + defer func() { + <-loadExportsSem + wg.Done() + }() + + if pass.env.Logf != nil { + pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName) + } + // If we're an x_test, load the package under test's test variant. + includeTest := strings.HasSuffix(pass.f.Name.Name, "_test") && c.pkg.dir == pass.srcDir + _, exports, err := resolver.loadExports(ctx, c.pkg, includeTest) + if err != nil { + if pass.env.Logf != nil { + pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err) + } + resc <- nil + return + } + + exportsMap := make(map[string]bool, len(exports)) + for _, sym := range exports { + exportsMap[sym] = true + } + + // If it doesn't have the right + // symbols, send nil to mean no match. + for symbol := range symbols { + if !exportsMap[symbol] { + resc <- nil + return + } + } + resc <- c.pkg + }(c, rescv[i]) + } + }() + + for _, resc := range rescv { + pkg := <-resc + if pkg == nil { + continue + } + return pkg, nil + } + return nil, nil +} + +// pkgIsCandidate reports whether pkg is a candidate for satisfying the +// finding which package pkgIdent in the file named by filename is trying +// to refer to. +// +// This check is purely lexical and is meant to be as fast as possible +// because it's run over all $GOPATH directories to filter out poor +// candidates in order to limit the CPU and I/O later parsing the +// exports in candidate packages. +// +// filename is the file being formatted. +// pkgIdent is the package being searched for, like "client" (if +// searching for "client.New") +func pkgIsCandidate(filename string, refs references, pkg *pkg) bool { + // Check "internal" and "vendor" visibility: + if !canUse(filename, pkg.dir) { + return false + } + + // Speed optimization to minimize disk I/O: + // the last two components on disk must contain the + // package name somewhere. + // + // This permits mismatch naming like directory + // "go-foo" being package "foo", or "pkg.v3" being "pkg", + // or directory "google.golang.org/api/cloudbilling/v1" + // being package "cloudbilling", but doesn't + // permit a directory "foo" to be package + // "bar", which is strongly discouraged + // anyway. There's no reason goimports needs + // to be slow just to accommodate that. + for pkgIdent := range refs { + lastTwo := lastTwoComponents(pkg.importPathShort) + if strings.Contains(lastTwo, pkgIdent) { + return true + } + if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) { + lastTwo = lowerASCIIAndRemoveHyphen(lastTwo) + if strings.Contains(lastTwo, pkgIdent) { + return true + } + } + } + return false +} + +func hasHyphenOrUpperASCII(s string) bool { + for i := 0; i < len(s); i++ { + b := s[i] + if b == '-' || ('A' <= b && b <= 'Z') { + return true + } + } + return false +} + +func lowerASCIIAndRemoveHyphen(s string) (ret string) { + buf := make([]byte, 0, len(s)) + for i := 0; i < len(s); i++ { + b := s[i] + switch { + case b == '-': + continue + case 'A' <= b && b <= 'Z': + buf = append(buf, b+('a'-'A')) + default: + buf = append(buf, b) + } + } + return string(buf) +} + +// canUse reports whether the package in dir is usable from filename, +// respecting the Go "internal" and "vendor" visibility rules. +func canUse(filename, dir string) bool { + // Fast path check, before any allocations. If it doesn't contain vendor + // or internal, it's not tricky: + // Note that this can false-negative on directories like "notinternal", + // but we check it correctly below. This is just a fast path. + if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") { + return true + } + + dirSlash := filepath.ToSlash(dir) + if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") { + return true + } + // Vendor or internal directory only visible from children of parent. + // That means the path from the current directory to the target directory + // can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal + // or bar/vendor or bar/internal. + // After stripping all the leading ../, the only okay place to see vendor or internal + // is at the very beginning of the path. + absfile, err := filepath.Abs(filename) + if err != nil { + return false + } + absdir, err := filepath.Abs(dir) + if err != nil { + return false + } + rel, err := filepath.Rel(absfile, absdir) + if err != nil { + return false + } + relSlash := filepath.ToSlash(rel) + if i := strings.LastIndex(relSlash, "../"); i >= 0 { + relSlash = relSlash[i+len("../"):] + } + return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal") +} + +// lastTwoComponents returns at most the last two path components +// of v, using either / or \ as the path separator. +func lastTwoComponents(v string) string { + nslash := 0 + for i := len(v) - 1; i >= 0; i-- { + if v[i] == '/' || v[i] == '\\' { + nslash++ + if nslash == 2 { + return v[i:] + } + } + } + return v +} + +type visitFn func(node ast.Node) ast.Visitor + +func (fn visitFn) Visit(node ast.Node) ast.Visitor { + return fn(node) +} + +func copyExports(pkg []string) map[string]bool { + m := make(map[string]bool, len(pkg)) + for _, v := range pkg { + m[v] = true + } + return m +} diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go new file mode 100644 index 000000000..2815edc33 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/imports.go @@ -0,0 +1,346 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run mkstdlib.go + +// Package imports implements a Go pretty-printer (like package "go/format") +// that also adds or removes import statements as necessary. +package imports + +import ( + "bufio" + "bytes" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/printer" + "go/token" + "io" + "regexp" + "strconv" + "strings" + + "golang.org/x/tools/go/ast/astutil" +) + +// Options is golang.org/x/tools/imports.Options with extra internal-only options. +type Options struct { + Env *ProcessEnv // The environment to use. Note: this contains the cached module and filesystem state. + + // LocalPrefix is a comma-separated string of import path prefixes, which, if + // set, instructs Process to sort the import paths with the given prefixes + // into another group after 3rd-party packages. + LocalPrefix string + + Fragment bool // Accept fragment of a source file (no package statement) + AllErrors bool // Report all errors (not just the first 10 on different lines) + + Comments bool // Print comments (true if nil *Options provided) + TabIndent bool // Use tabs for indent (true if nil *Options provided) + TabWidth int // Tab width (8 if nil *Options provided) + + FormatOnly bool // Disable the insertion and deletion of imports +} + +// Process implements golang.org/x/tools/imports.Process with explicit context in opt.Env. +func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) { + fileSet := token.NewFileSet() + file, adjust, err := parse(fileSet, filename, src, opt) + if err != nil { + return nil, err + } + + if !opt.FormatOnly { + if err := fixImports(fileSet, file, filename, opt.Env); err != nil { + return nil, err + } + } + return formatFile(fileSet, file, src, adjust, opt) +} + +// FixImports returns a list of fixes to the imports that, when applied, +// will leave the imports in the same state as Process. src and opt must +// be specified. +// +// Note that filename's directory influences which imports can be chosen, +// so it is important that filename be accurate. +func FixImports(filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) { + fileSet := token.NewFileSet() + file, _, err := parse(fileSet, filename, src, opt) + if err != nil { + return nil, err + } + + return getFixes(fileSet, file, filename, opt.Env) +} + +// ApplyFixes applies all of the fixes to the file and formats it. extraMode +// is added in when parsing the file. src and opts must be specified, but no +// env is needed. +func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, extraMode parser.Mode) (formatted []byte, err error) { + // Don't use parse() -- we don't care about fragments or statement lists + // here, and we need to work with unparseable files. + fileSet := token.NewFileSet() + parserMode := parser.Mode(0) + if opt.Comments { + parserMode |= parser.ParseComments + } + if opt.AllErrors { + parserMode |= parser.AllErrors + } + parserMode |= extraMode + + file, err := parser.ParseFile(fileSet, filename, src, parserMode) + if file == nil { + return nil, err + } + + // Apply the fixes to the file. + apply(fileSet, file, fixes) + + return formatFile(fileSet, file, src, nil, opt) +} + +func formatFile(fileSet *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) { + mergeImports(fileSet, file) + sortImports(opt.LocalPrefix, fileSet, file) + imps := astutil.Imports(fileSet, file) + var spacesBefore []string // import paths we need spaces before + for _, impSection := range imps { + // Within each block of contiguous imports, see if any + // import lines are in different group numbers. If so, + // we'll need to put a space between them so it's + // compatible with gofmt. + lastGroup := -1 + for _, importSpec := range impSection { + importPath, _ := strconv.Unquote(importSpec.Path.Value) + groupNum := importGroup(opt.LocalPrefix, importPath) + if groupNum != lastGroup && lastGroup != -1 { + spacesBefore = append(spacesBefore, importPath) + } + lastGroup = groupNum + } + + } + + printerMode := printer.UseSpaces + if opt.TabIndent { + printerMode |= printer.TabIndent + } + printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth} + + var buf bytes.Buffer + err := printConfig.Fprint(&buf, fileSet, file) + if err != nil { + return nil, err + } + out := buf.Bytes() + if adjust != nil { + out = adjust(src, out) + } + if len(spacesBefore) > 0 { + out, err = addImportSpaces(bytes.NewReader(out), spacesBefore) + if err != nil { + return nil, err + } + } + + out, err = format.Source(out) + if err != nil { + return nil, err + } + return out, nil +} + +// parse parses src, which was read from filename, +// as a Go source file or statement list. +func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) { + parserMode := parser.Mode(0) + if opt.Comments { + parserMode |= parser.ParseComments + } + if opt.AllErrors { + parserMode |= parser.AllErrors + } + + // Try as whole source file. + file, err := parser.ParseFile(fset, filename, src, parserMode) + if err == nil { + return file, nil, nil + } + // If the error is that the source file didn't begin with a + // package line and we accept fragmented input, fall through to + // try as a source fragment. Stop and return on any other error. + if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") { + return nil, nil, err + } + + // If this is a declaration list, make it a source file + // by inserting a package clause. + // Insert using a ;, not a newline, so that parse errors are on + // the correct line. + const prefix = "package main;" + psrc := append([]byte(prefix), src...) + file, err = parser.ParseFile(fset, filename, psrc, parserMode) + if err == nil { + // Gofmt will turn the ; into a \n. + // Do that ourselves now and update the file contents, + // so that positions and line numbers are correct going forward. + psrc[len(prefix)-1] = '\n' + fset.File(file.Package).SetLinesForContent(psrc) + + // If a main function exists, we will assume this is a main + // package and leave the file. + if containsMainFunc(file) { + return file, nil, nil + } + + adjust := func(orig, src []byte) []byte { + // Remove the package clause. + src = src[len(prefix):] + return matchSpace(orig, src) + } + return file, adjust, nil + } + // If the error is that the source file didn't begin with a + // declaration, fall through to try as a statement list. + // Stop and return on any other error. + if !strings.Contains(err.Error(), "expected declaration") { + return nil, nil, err + } + + // If this is a statement list, make it a source file + // by inserting a package clause and turning the list + // into a function body. This handles expressions too. + // Insert using a ;, not a newline, so that the line numbers + // in fsrc match the ones in src. + fsrc := append(append([]byte("package p; func _() {"), src...), '}') + file, err = parser.ParseFile(fset, filename, fsrc, parserMode) + if err == nil { + adjust := func(orig, src []byte) []byte { + // Remove the wrapping. + // Gofmt has turned the ; into a \n\n. + src = src[len("package p\n\nfunc _() {"):] + src = src[:len(src)-len("}\n")] + // Gofmt has also indented the function body one level. + // Remove that indent. + src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1) + return matchSpace(orig, src) + } + return file, adjust, nil + } + + // Failed, and out of options. + return nil, nil, err +} + +// containsMainFunc checks if a file contains a function declaration with the +// function signature 'func main()' +func containsMainFunc(file *ast.File) bool { + for _, decl := range file.Decls { + if f, ok := decl.(*ast.FuncDecl); ok { + if f.Name.Name != "main" { + continue + } + + if len(f.Type.Params.List) != 0 { + continue + } + + if f.Type.Results != nil && len(f.Type.Results.List) != 0 { + continue + } + + return true + } + } + + return false +} + +func cutSpace(b []byte) (before, middle, after []byte) { + i := 0 + for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') { + i++ + } + j := len(b) + for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') { + j-- + } + if i <= j { + return b[:i], b[i:j], b[j:] + } + return nil, nil, b[j:] +} + +// matchSpace reformats src to use the same space context as orig. +// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src. +// 2) matchSpace copies the indentation of the first non-blank line in orig +// to every non-blank line in src. +// 3) matchSpace copies the trailing space from orig and uses it in place +// of src's trailing space. +func matchSpace(orig []byte, src []byte) []byte { + before, _, after := cutSpace(orig) + i := bytes.LastIndex(before, []byte{'\n'}) + before, indent := before[:i+1], before[i+1:] + + _, src, _ = cutSpace(src) + + var b bytes.Buffer + b.Write(before) + for len(src) > 0 { + line := src + if i := bytes.IndexByte(line, '\n'); i >= 0 { + line, src = line[:i+1], line[i+1:] + } else { + src = nil + } + if len(line) > 0 && line[0] != '\n' { // not blank + b.Write(indent) + } + b.Write(line) + } + b.Write(after) + return b.Bytes() +} + +var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`) + +func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) { + var out bytes.Buffer + in := bufio.NewReader(r) + inImports := false + done := false + for { + s, err := in.ReadString('\n') + if err == io.EOF { + break + } else if err != nil { + return nil, err + } + + if !inImports && !done && strings.HasPrefix(s, "import") { + inImports = true + } + if inImports && (strings.HasPrefix(s, "var") || + strings.HasPrefix(s, "func") || + strings.HasPrefix(s, "const") || + strings.HasPrefix(s, "type")) { + done = true + inImports = false + } + if inImports && len(breaks) > 0 { + if m := impLine.FindStringSubmatch(s); m != nil { + if m[1] == breaks[0] { + out.WriteByte('\n') + breaks = breaks[1:] + } + } + } + + fmt.Fprint(&out, s) + } + return out.Bytes(), nil +} diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go new file mode 100644 index 000000000..dff6d5536 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/mod.go @@ -0,0 +1,695 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io/ioutil" + "os" + "path" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + + "golang.org/x/mod/module" + "golang.org/x/tools/internal/gocommand" + "golang.org/x/tools/internal/gopathwalk" +) + +// ModuleResolver implements resolver for modules using the go command as little +// as feasible. +type ModuleResolver struct { + env *ProcessEnv + moduleCacheDir string + dummyVendorMod *gocommand.ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory. + roots []gopathwalk.Root + scanSema chan struct{} // scanSema prevents concurrent scans and guards scannedRoots. + scannedRoots map[gopathwalk.Root]bool + + initialized bool + main *gocommand.ModuleJSON + modsByModPath []*gocommand.ModuleJSON // All modules, ordered by # of path components in module Path... + modsByDir []*gocommand.ModuleJSON // ...or Dir. + + // moduleCacheCache stores information about the module cache. + moduleCacheCache *dirInfoCache + otherCache *dirInfoCache +} + +func newModuleResolver(e *ProcessEnv) *ModuleResolver { + r := &ModuleResolver{ + env: e, + scanSema: make(chan struct{}, 1), + } + r.scanSema <- struct{}{} + return r +} + +func (r *ModuleResolver) init() error { + if r.initialized { + return nil + } + + goenv, err := r.env.goEnv() + if err != nil { + return err + } + inv := gocommand.Invocation{ + BuildFlags: r.env.BuildFlags, + ModFlag: r.env.ModFlag, + ModFile: r.env.ModFile, + Env: r.env.env(), + Logf: r.env.Logf, + WorkingDir: r.env.WorkingDir, + } + mainMod, vendorEnabled, err := gocommand.VendorEnabled(context.TODO(), inv, r.env.GocmdRunner) + if err != nil { + return err + } + + if mainMod != nil && vendorEnabled { + // Vendor mode is on, so all the non-Main modules are irrelevant, + // and we need to search /vendor for everything. + r.main = mainMod + r.dummyVendorMod = &gocommand.ModuleJSON{ + Path: "", + Dir: filepath.Join(mainMod.Dir, "vendor"), + } + r.modsByModPath = []*gocommand.ModuleJSON{mainMod, r.dummyVendorMod} + r.modsByDir = []*gocommand.ModuleJSON{mainMod, r.dummyVendorMod} + } else { + // Vendor mode is off, so run go list -m ... to find everything. + err := r.initAllMods() + // We expect an error when running outside of a module with + // GO111MODULE=on. Other errors are fatal. + if err != nil { + if errMsg := err.Error(); !strings.Contains(errMsg, "working directory is not part of a module") && !strings.Contains(errMsg, "go.mod file not found") { + return err + } + } + } + + if gmc := r.env.Env["GOMODCACHE"]; gmc != "" { + r.moduleCacheDir = gmc + } else { + gopaths := filepath.SplitList(goenv["GOPATH"]) + if len(gopaths) == 0 { + return fmt.Errorf("empty GOPATH") + } + r.moduleCacheDir = filepath.Join(gopaths[0], "/pkg/mod") + } + + sort.Slice(r.modsByModPath, func(i, j int) bool { + count := func(x int) int { + return strings.Count(r.modsByModPath[x].Path, "/") + } + return count(j) < count(i) // descending order + }) + sort.Slice(r.modsByDir, func(i, j int) bool { + count := func(x int) int { + return strings.Count(r.modsByDir[x].Dir, "/") + } + return count(j) < count(i) // descending order + }) + + r.roots = []gopathwalk.Root{ + {filepath.Join(goenv["GOROOT"], "/src"), gopathwalk.RootGOROOT}, + } + if r.main != nil { + r.roots = append(r.roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule}) + } + if vendorEnabled { + r.roots = append(r.roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther}) + } else { + addDep := func(mod *gocommand.ModuleJSON) { + if mod.Replace == nil { + // This is redundant with the cache, but we'll skip it cheaply enough. + r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootModuleCache}) + } else { + r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther}) + } + } + // Walk dependent modules before scanning the full mod cache, direct deps first. + for _, mod := range r.modsByModPath { + if !mod.Indirect && !mod.Main { + addDep(mod) + } + } + for _, mod := range r.modsByModPath { + if mod.Indirect && !mod.Main { + addDep(mod) + } + } + r.roots = append(r.roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache}) + } + + r.scannedRoots = map[gopathwalk.Root]bool{} + if r.moduleCacheCache == nil { + r.moduleCacheCache = &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + } + } + if r.otherCache == nil { + r.otherCache = &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + } + } + r.initialized = true + return nil +} + +func (r *ModuleResolver) initAllMods() error { + stdout, err := r.env.invokeGo(context.TODO(), "list", "-m", "-e", "-json", "...") + if err != nil { + return err + } + for dec := json.NewDecoder(stdout); dec.More(); { + mod := &gocommand.ModuleJSON{} + if err := dec.Decode(mod); err != nil { + return err + } + if mod.Dir == "" { + if r.env.Logf != nil { + r.env.Logf("module %v has not been downloaded and will be ignored", mod.Path) + } + // Can't do anything with a module that's not downloaded. + continue + } + // golang/go#36193: the go command doesn't always clean paths. + mod.Dir = filepath.Clean(mod.Dir) + r.modsByModPath = append(r.modsByModPath, mod) + r.modsByDir = append(r.modsByDir, mod) + if mod.Main { + r.main = mod + } + } + return nil +} + +func (r *ModuleResolver) ClearForNewScan() { + <-r.scanSema + r.scannedRoots = map[gopathwalk.Root]bool{} + r.otherCache = &dirInfoCache{ + dirs: map[string]*directoryPackageInfo{}, + listeners: map[*int]cacheListener{}, + } + r.scanSema <- struct{}{} +} + +func (r *ModuleResolver) ClearForNewMod() { + <-r.scanSema + *r = ModuleResolver{ + env: r.env, + moduleCacheCache: r.moduleCacheCache, + otherCache: r.otherCache, + scanSema: r.scanSema, + } + r.init() + r.scanSema <- struct{}{} +} + +// findPackage returns the module and directory that contains the package at +// the given import path, or returns nil, "" if no module is in scope. +func (r *ModuleResolver) findPackage(importPath string) (*gocommand.ModuleJSON, string) { + // This can't find packages in the stdlib, but that's harmless for all + // the existing code paths. + for _, m := range r.modsByModPath { + if !strings.HasPrefix(importPath, m.Path) { + continue + } + pathInModule := importPath[len(m.Path):] + pkgDir := filepath.Join(m.Dir, pathInModule) + if r.dirIsNestedModule(pkgDir, m) { + continue + } + + if info, ok := r.cacheLoad(pkgDir); ok { + if loaded, err := info.reachedStatus(nameLoaded); loaded { + if err != nil { + continue // No package in this dir. + } + return m, pkgDir + } + if scanned, err := info.reachedStatus(directoryScanned); scanned && err != nil { + continue // Dir is unreadable, etc. + } + // This is slightly wrong: a directory doesn't have to have an + // importable package to count as a package for package-to-module + // resolution. package main or _test files should count but + // don't. + // TODO(heschi): fix this. + if _, err := r.cachePackageName(info); err == nil { + return m, pkgDir + } + } + + // Not cached. Read the filesystem. + pkgFiles, err := ioutil.ReadDir(pkgDir) + if err != nil { + continue + } + // A module only contains a package if it has buildable go + // files in that directory. If not, it could be provided by an + // outer module. See #29736. + for _, fi := range pkgFiles { + if ok, _ := r.env.matchFile(pkgDir, fi.Name()); ok { + return m, pkgDir + } + } + } + return nil, "" +} + +func (r *ModuleResolver) cacheLoad(dir string) (directoryPackageInfo, bool) { + if info, ok := r.moduleCacheCache.Load(dir); ok { + return info, ok + } + return r.otherCache.Load(dir) +} + +func (r *ModuleResolver) cacheStore(info directoryPackageInfo) { + if info.rootType == gopathwalk.RootModuleCache { + r.moduleCacheCache.Store(info.dir, info) + } else { + r.otherCache.Store(info.dir, info) + } +} + +func (r *ModuleResolver) cacheKeys() []string { + return append(r.moduleCacheCache.Keys(), r.otherCache.Keys()...) +} + +// cachePackageName caches the package name for a dir already in the cache. +func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) { + if info.rootType == gopathwalk.RootModuleCache { + return r.moduleCacheCache.CachePackageName(info) + } + return r.otherCache.CachePackageName(info) +} + +func (r *ModuleResolver) cacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) { + if info.rootType == gopathwalk.RootModuleCache { + return r.moduleCacheCache.CacheExports(ctx, env, info) + } + return r.otherCache.CacheExports(ctx, env, info) +} + +// findModuleByDir returns the module that contains dir, or nil if no such +// module is in scope. +func (r *ModuleResolver) findModuleByDir(dir string) *gocommand.ModuleJSON { + // This is quite tricky and may not be correct. dir could be: + // - a package in the main module. + // - a replace target underneath the main module's directory. + // - a nested module in the above. + // - a replace target somewhere totally random. + // - a nested module in the above. + // - in the mod cache. + // - in /vendor/ in -mod=vendor mode. + // - nested module? Dunno. + // Rumor has it that replace targets cannot contain other replace targets. + for _, m := range r.modsByDir { + if !strings.HasPrefix(dir, m.Dir) { + continue + } + + if r.dirIsNestedModule(dir, m) { + continue + } + + return m + } + return nil +} + +// dirIsNestedModule reports if dir is contained in a nested module underneath +// mod, not actually in mod. +func (r *ModuleResolver) dirIsNestedModule(dir string, mod *gocommand.ModuleJSON) bool { + if !strings.HasPrefix(dir, mod.Dir) { + return false + } + if r.dirInModuleCache(dir) { + // Nested modules in the module cache are pruned, + // so it cannot be a nested module. + return false + } + if mod != nil && mod == r.dummyVendorMod { + // The /vendor pseudomodule is flattened and doesn't actually count. + return false + } + modDir, _ := r.modInfo(dir) + if modDir == "" { + return false + } + return modDir != mod.Dir +} + +func (r *ModuleResolver) modInfo(dir string) (modDir string, modName string) { + readModName := func(modFile string) string { + modBytes, err := ioutil.ReadFile(modFile) + if err != nil { + return "" + } + return modulePath(modBytes) + } + + if r.dirInModuleCache(dir) { + if matches := modCacheRegexp.FindStringSubmatch(dir); len(matches) == 3 { + index := strings.Index(dir, matches[1]+"@"+matches[2]) + modDir := filepath.Join(dir[:index], matches[1]+"@"+matches[2]) + return modDir, readModName(filepath.Join(modDir, "go.mod")) + } + } + for { + if info, ok := r.cacheLoad(dir); ok { + return info.moduleDir, info.moduleName + } + f := filepath.Join(dir, "go.mod") + info, err := os.Stat(f) + if err == nil && !info.IsDir() { + return dir, readModName(f) + } + + d := filepath.Dir(dir) + if len(d) >= len(dir) { + return "", "" // reached top of file system, no go.mod + } + dir = d + } +} + +func (r *ModuleResolver) dirInModuleCache(dir string) bool { + if r.moduleCacheDir == "" { + return false + } + return strings.HasPrefix(dir, r.moduleCacheDir) +} + +func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { + if err := r.init(); err != nil { + return nil, err + } + names := map[string]string{} + for _, path := range importPaths { + _, packageDir := r.findPackage(path) + if packageDir == "" { + continue + } + name, err := packageDirToName(packageDir) + if err != nil { + continue + } + names[path] = name + } + return names, nil +} + +func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error { + if err := r.init(); err != nil { + return err + } + + processDir := func(info directoryPackageInfo) { + // Skip this directory if we were not able to get the package information successfully. + if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil { + return + } + pkg, err := r.canonicalize(info) + if err != nil { + return + } + + if !callback.dirFound(pkg) { + return + } + pkg.packageName, err = r.cachePackageName(info) + if err != nil { + return + } + + if !callback.packageNameLoaded(pkg) { + return + } + _, exports, err := r.loadExports(ctx, pkg, false) + if err != nil { + return + } + callback.exportsLoaded(pkg, exports) + } + + // Start processing everything in the cache, and listen for the new stuff + // we discover in the walk below. + stop1 := r.moduleCacheCache.ScanAndListen(ctx, processDir) + defer stop1() + stop2 := r.otherCache.ScanAndListen(ctx, processDir) + defer stop2() + + // We assume cached directories are fully cached, including all their + // children, and have not changed. We can skip them. + skip := func(root gopathwalk.Root, dir string) bool { + info, ok := r.cacheLoad(dir) + if !ok { + return false + } + // This directory can be skipped as long as we have already scanned it. + // Packages with errors will continue to have errors, so there is no need + // to rescan them. + packageScanned, _ := info.reachedStatus(directoryScanned) + return packageScanned + } + + // Add anything new to the cache, and process it if we're still listening. + add := func(root gopathwalk.Root, dir string) { + r.cacheStore(r.scanDirForPackage(root, dir)) + } + + // r.roots and the callback are not necessarily safe to use in the + // goroutine below. Process them eagerly. + roots := filterRoots(r.roots, callback.rootFound) + // We can't cancel walks, because we need them to finish to have a usable + // cache. Instead, run them in a separate goroutine and detach. + scanDone := make(chan struct{}) + go func() { + select { + case <-ctx.Done(): + return + case <-r.scanSema: + } + defer func() { r.scanSema <- struct{}{} }() + // We have the lock on r.scannedRoots, and no other scans can run. + for _, root := range roots { + if ctx.Err() != nil { + return + } + + if r.scannedRoots[root] { + continue + } + gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Logf: r.env.Logf, ModulesEnabled: true}) + r.scannedRoots[root] = true + } + close(scanDone) + }() + select { + case <-ctx.Done(): + case <-scanDone: + } + return nil +} + +func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) float64 { + if _, ok := stdlib[path]; ok { + return MaxRelevance + } + mod, _ := r.findPackage(path) + return modRelevance(mod) +} + +func modRelevance(mod *gocommand.ModuleJSON) float64 { + var relevance float64 + switch { + case mod == nil: // out of scope + return MaxRelevance - 4 + case mod.Indirect: + relevance = MaxRelevance - 3 + case !mod.Main: + relevance = MaxRelevance - 2 + default: + relevance = MaxRelevance - 1 // main module ties with stdlib + } + + _, versionString, ok := module.SplitPathVersion(mod.Path) + if ok { + index := strings.Index(versionString, "v") + if index == -1 { + return relevance + } + if versionNumber, err := strconv.ParseFloat(versionString[index+1:], 64); err == nil { + relevance += versionNumber / 1000 + } + } + + return relevance +} + +// canonicalize gets the result of canonicalizing the packages using the results +// of initializing the resolver from 'go list -m'. +func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) { + // Packages in GOROOT are already canonical, regardless of the std/cmd modules. + if info.rootType == gopathwalk.RootGOROOT { + return &pkg{ + importPathShort: info.nonCanonicalImportPath, + dir: info.dir, + packageName: path.Base(info.nonCanonicalImportPath), + relevance: MaxRelevance, + }, nil + } + + importPath := info.nonCanonicalImportPath + mod := r.findModuleByDir(info.dir) + // Check if the directory is underneath a module that's in scope. + if mod != nil { + // It is. If dir is the target of a replace directive, + // our guessed import path is wrong. Use the real one. + if mod.Dir == info.dir { + importPath = mod.Path + } else { + dirInMod := info.dir[len(mod.Dir)+len("/"):] + importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod)) + } + } else if !strings.HasPrefix(importPath, info.moduleName) { + // The module's name doesn't match the package's import path. It + // probably needs a replace directive we don't have. + return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir) + } + + res := &pkg{ + importPathShort: importPath, + dir: info.dir, + relevance: modRelevance(mod), + } + // We may have discovered a package that has a different version + // in scope already. Canonicalize to that one if possible. + if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" { + res.dir = canonicalDir + } + return res, nil +} + +func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) { + if err := r.init(); err != nil { + return "", nil, err + } + if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest { + return r.cacheExports(ctx, r.env, info) + } + return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest) +} + +func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo { + subdir := "" + if dir != root.Path { + subdir = dir[len(root.Path)+len("/"):] + } + importPath := filepath.ToSlash(subdir) + if strings.HasPrefix(importPath, "vendor/") { + // Only enter vendor directories if they're explicitly requested as a root. + return directoryPackageInfo{ + status: directoryScanned, + err: fmt.Errorf("unwanted vendor directory"), + } + } + switch root.Type { + case gopathwalk.RootCurrentModule: + importPath = path.Join(r.main.Path, filepath.ToSlash(subdir)) + case gopathwalk.RootModuleCache: + matches := modCacheRegexp.FindStringSubmatch(subdir) + if len(matches) == 0 { + return directoryPackageInfo{ + status: directoryScanned, + err: fmt.Errorf("invalid module cache path: %v", subdir), + } + } + modPath, err := module.UnescapePath(filepath.ToSlash(matches[1])) + if err != nil { + if r.env.Logf != nil { + r.env.Logf("decoding module cache path %q: %v", subdir, err) + } + return directoryPackageInfo{ + status: directoryScanned, + err: fmt.Errorf("decoding module cache path %q: %v", subdir, err), + } + } + importPath = path.Join(modPath, filepath.ToSlash(matches[3])) + } + + modDir, modName := r.modInfo(dir) + result := directoryPackageInfo{ + status: directoryScanned, + dir: dir, + rootType: root.Type, + nonCanonicalImportPath: importPath, + moduleDir: modDir, + moduleName: modName, + } + if root.Type == gopathwalk.RootGOROOT { + // stdlib packages are always in scope, despite the confusing go.mod + return result + } + return result +} + +// modCacheRegexp splits a path in a module cache into module, module version, and package. +var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`) + +var ( + slashSlash = []byte("//") + moduleStr = []byte("module") +) + +// modulePath returns the module path from the gomod file text. +// If it cannot find a module path, it returns an empty string. +// It is tolerant of unrelated problems in the go.mod file. +// +// Copied from cmd/go/internal/modfile. +func modulePath(mod []byte) string { + for len(mod) > 0 { + line := mod + mod = nil + if i := bytes.IndexByte(line, '\n'); i >= 0 { + line, mod = line[:i], line[i+1:] + } + if i := bytes.Index(line, slashSlash); i >= 0 { + line = line[:i] + } + line = bytes.TrimSpace(line) + if !bytes.HasPrefix(line, moduleStr) { + continue + } + line = line[len(moduleStr):] + n := len(line) + line = bytes.TrimSpace(line) + if len(line) == n || len(line) == 0 { + continue + } + + if line[0] == '"' || line[0] == '`' { + p, err := strconv.Unquote(string(line)) + if err != nil { + return "" // malformed quoted string or multiline module path + } + return p + } + + return string(line) + } + return "" // missing module path +} diff --git a/vendor/golang.org/x/tools/internal/imports/mod_cache.go b/vendor/golang.org/x/tools/internal/imports/mod_cache.go new file mode 100644 index 000000000..18dada495 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/mod_cache.go @@ -0,0 +1,236 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "context" + "fmt" + "sync" + + "golang.org/x/tools/internal/gopathwalk" +) + +// To find packages to import, the resolver needs to know about all of the +// the packages that could be imported. This includes packages that are +// already in modules that are in (1) the current module, (2) replace targets, +// and (3) packages in the module cache. Packages in (1) and (2) may change over +// time, as the client may edit the current module and locally replaced modules. +// The module cache (which includes all of the packages in (3)) can only +// ever be added to. +// +// The resolver can thus save state about packages in the module cache +// and guarantee that this will not change over time. To obtain information +// about new modules added to the module cache, the module cache should be +// rescanned. +// +// It is OK to serve information about modules that have been deleted, +// as they do still exist. +// TODO(suzmue): can we share information with the caller about +// what module needs to be downloaded to import this package? + +type directoryPackageStatus int + +const ( + _ directoryPackageStatus = iota + directoryScanned + nameLoaded + exportsLoaded +) + +type directoryPackageInfo struct { + // status indicates the extent to which this struct has been filled in. + status directoryPackageStatus + // err is non-nil when there was an error trying to reach status. + err error + + // Set when status >= directoryScanned. + + // dir is the absolute directory of this package. + dir string + rootType gopathwalk.RootType + // nonCanonicalImportPath is the package's expected import path. It may + // not actually be importable at that path. + nonCanonicalImportPath string + + // Module-related information. + moduleDir string // The directory that is the module root of this dir. + moduleName string // The module name that contains this dir. + + // Set when status >= nameLoaded. + + packageName string // the package name, as declared in the source. + + // Set when status >= exportsLoaded. + + exports []string +} + +// reachedStatus returns true when info has a status at least target and any error associated with +// an attempt to reach target. +func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (bool, error) { + if info.err == nil { + return info.status >= target, nil + } + if info.status == target { + return true, info.err + } + return true, nil +} + +// dirInfoCache is a concurrency safe map for storing information about +// directories that may contain packages. +// +// The information in this cache is built incrementally. Entries are initialized in scan. +// No new keys should be added in any other functions, as all directories containing +// packages are identified in scan. +// +// Other functions, including loadExports and findPackage, may update entries in this cache +// as they discover new things about the directory. +// +// The information in the cache is not expected to change for the cache's +// lifetime, so there is no protection against competing writes. Users should +// take care not to hold the cache across changes to the underlying files. +// +// TODO(suzmue): consider other concurrency strategies and data structures (RWLocks, sync.Map, etc) +type dirInfoCache struct { + mu sync.Mutex + // dirs stores information about packages in directories, keyed by absolute path. + dirs map[string]*directoryPackageInfo + listeners map[*int]cacheListener +} + +type cacheListener func(directoryPackageInfo) + +// ScanAndListen calls listener on all the items in the cache, and on anything +// newly added. The returned stop function waits for all in-flight callbacks to +// finish and blocks new ones. +func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() { + ctx, cancel := context.WithCancel(ctx) + + // Flushing out all the callbacks is tricky without knowing how many there + // are going to be. Setting an arbitrary limit makes it much easier. + const maxInFlight = 10 + sema := make(chan struct{}, maxInFlight) + for i := 0; i < maxInFlight; i++ { + sema <- struct{}{} + } + + cookie := new(int) // A unique ID we can use for the listener. + + // We can't hold mu while calling the listener. + d.mu.Lock() + var keys []string + for key := range d.dirs { + keys = append(keys, key) + } + d.listeners[cookie] = func(info directoryPackageInfo) { + select { + case <-ctx.Done(): + return + case <-sema: + } + listener(info) + sema <- struct{}{} + } + d.mu.Unlock() + + stop := func() { + cancel() + d.mu.Lock() + delete(d.listeners, cookie) + d.mu.Unlock() + for i := 0; i < maxInFlight; i++ { + <-sema + } + } + + // Process the pre-existing keys. + for _, k := range keys { + select { + case <-ctx.Done(): + return stop + default: + } + if v, ok := d.Load(k); ok { + listener(v) + } + } + + return stop +} + +// Store stores the package info for dir. +func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) { + d.mu.Lock() + _, old := d.dirs[dir] + d.dirs[dir] = &info + var listeners []cacheListener + for _, l := range d.listeners { + listeners = append(listeners, l) + } + d.mu.Unlock() + + if !old { + for _, l := range listeners { + l(info) + } + } +} + +// Load returns a copy of the directoryPackageInfo for absolute directory dir. +func (d *dirInfoCache) Load(dir string) (directoryPackageInfo, bool) { + d.mu.Lock() + defer d.mu.Unlock() + info, ok := d.dirs[dir] + if !ok { + return directoryPackageInfo{}, false + } + return *info, true +} + +// Keys returns the keys currently present in d. +func (d *dirInfoCache) Keys() (keys []string) { + d.mu.Lock() + defer d.mu.Unlock() + for key := range d.dirs { + keys = append(keys, key) + } + return keys +} + +func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) { + if loaded, err := info.reachedStatus(nameLoaded); loaded { + return info.packageName, err + } + if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil { + return "", fmt.Errorf("cannot read package name, scan error: %v", err) + } + info.packageName, info.err = packageDirToName(info.dir) + info.status = nameLoaded + d.Store(info.dir, info) + return info.packageName, info.err +} + +func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) { + if reached, _ := info.reachedStatus(exportsLoaded); reached { + return info.packageName, info.exports, info.err + } + if reached, err := info.reachedStatus(nameLoaded); reached && err != nil { + return "", nil, err + } + info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir, false) + if info.err == context.Canceled || info.err == context.DeadlineExceeded { + return info.packageName, info.exports, info.err + } + // The cache structure wants things to proceed linearly. We can skip a + // step here, but only if we succeed. + if info.status == nameLoaded || info.err == nil { + info.status = exportsLoaded + } else { + info.status = nameLoaded + } + d.Store(info.dir, info) + return info.packageName, info.exports, info.err +} diff --git a/vendor/golang.org/x/tools/internal/imports/sortimports.go b/vendor/golang.org/x/tools/internal/imports/sortimports.go new file mode 100644 index 000000000..be8ffa25f --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/sortimports.go @@ -0,0 +1,280 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Hacked up copy of go/ast/import.go + +package imports + +import ( + "go/ast" + "go/token" + "sort" + "strconv" +) + +// sortImports sorts runs of consecutive import lines in import blocks in f. +// It also removes duplicate imports when it is possible to do so without data loss. +func sortImports(localPrefix string, fset *token.FileSet, f *ast.File) { + for i, d := range f.Decls { + d, ok := d.(*ast.GenDecl) + if !ok || d.Tok != token.IMPORT { + // Not an import declaration, so we're done. + // Imports are always first. + break + } + + if len(d.Specs) == 0 { + // Empty import block, remove it. + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + } + + if !d.Lparen.IsValid() { + // Not a block: sorted by default. + continue + } + + // Identify and sort runs of specs on successive lines. + i := 0 + specs := d.Specs[:0] + for j, s := range d.Specs { + if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line { + // j begins a new run. End this one. + specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:j])...) + i = j + } + } + specs = append(specs, sortSpecs(localPrefix, fset, f, d.Specs[i:])...) + d.Specs = specs + + // Deduping can leave a blank line before the rparen; clean that up. + if len(d.Specs) > 0 { + lastSpec := d.Specs[len(d.Specs)-1] + lastLine := fset.Position(lastSpec.Pos()).Line + if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 { + fset.File(d.Rparen).MergeLine(rParenLine - 1) + } + } + } +} + +// mergeImports merges all the import declarations into the first one. +// Taken from golang.org/x/tools/ast/astutil. +func mergeImports(fset *token.FileSet, f *ast.File) { + if len(f.Decls) <= 1 { + return + } + + // Merge all the import declarations into the first one. + var first *ast.GenDecl + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") { + continue + } + if first == nil { + first = gen + continue // Don't touch the first one. + } + // We now know there is more than one package in this import + // declaration. Ensure that it ends up parenthesized. + first.Lparen = first.Pos() + // Move the imports of the other import declaration to the first one. + for _, spec := range gen.Specs { + spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() + first.Specs = append(first.Specs, spec) + } + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + i-- + } +} + +// declImports reports whether gen contains an import of path. +// Taken from golang.org/x/tools/ast/astutil. +func declImports(gen *ast.GenDecl, path string) bool { + if gen.Tok != token.IMPORT { + return false + } + for _, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + if importPath(impspec) == path { + return true + } + } + return false +} + +func importPath(s ast.Spec) string { + t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value) + if err == nil { + return t + } + return "" +} + +func importName(s ast.Spec) string { + n := s.(*ast.ImportSpec).Name + if n == nil { + return "" + } + return n.Name +} + +func importComment(s ast.Spec) string { + c := s.(*ast.ImportSpec).Comment + if c == nil { + return "" + } + return c.Text() +} + +// collapse indicates whether prev may be removed, leaving only next. +func collapse(prev, next ast.Spec) bool { + if importPath(next) != importPath(prev) || importName(next) != importName(prev) { + return false + } + return prev.(*ast.ImportSpec).Comment == nil +} + +type posSpan struct { + Start token.Pos + End token.Pos +} + +func sortSpecs(localPrefix string, fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec { + // Can't short-circuit here even if specs are already sorted, + // since they might yet need deduplication. + // A lone import, however, may be safely ignored. + if len(specs) <= 1 { + return specs + } + + // Record positions for specs. + pos := make([]posSpan, len(specs)) + for i, s := range specs { + pos[i] = posSpan{s.Pos(), s.End()} + } + + // Identify comments in this range. + // Any comment from pos[0].Start to the final line counts. + lastLine := fset.Position(pos[len(pos)-1].End).Line + cstart := len(f.Comments) + cend := len(f.Comments) + for i, g := range f.Comments { + if g.Pos() < pos[0].Start { + continue + } + if i < cstart { + cstart = i + } + if fset.Position(g.End()).Line > lastLine { + cend = i + break + } + } + comments := f.Comments[cstart:cend] + + // Assign each comment to the import spec preceding it. + importComment := map[*ast.ImportSpec][]*ast.CommentGroup{} + specIndex := 0 + for _, g := range comments { + for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() { + specIndex++ + } + s := specs[specIndex].(*ast.ImportSpec) + importComment[s] = append(importComment[s], g) + } + + // Sort the import specs by import path. + // Remove duplicates, when possible without data loss. + // Reassign the import paths to have the same position sequence. + // Reassign each comment to abut the end of its spec. + // Sort the comments by new position. + sort.Sort(byImportSpec{localPrefix, specs}) + + // Dedup. Thanks to our sorting, we can just consider + // adjacent pairs of imports. + deduped := specs[:0] + for i, s := range specs { + if i == len(specs)-1 || !collapse(s, specs[i+1]) { + deduped = append(deduped, s) + } else { + p := s.Pos() + fset.File(p).MergeLine(fset.Position(p).Line) + } + } + specs = deduped + + // Fix up comment positions + for i, s := range specs { + s := s.(*ast.ImportSpec) + if s.Name != nil { + s.Name.NamePos = pos[i].Start + } + s.Path.ValuePos = pos[i].Start + s.EndPos = pos[i].End + nextSpecPos := pos[i].End + + for _, g := range importComment[s] { + for _, c := range g.List { + c.Slash = pos[i].End + nextSpecPos = c.End() + } + } + if i < len(specs)-1 { + pos[i+1].Start = nextSpecPos + pos[i+1].End = nextSpecPos + } + } + + sort.Sort(byCommentPos(comments)) + + // Fixup comments can insert blank lines, because import specs are on different lines. + // We remove those blank lines here by merging import spec to the first import spec line. + firstSpecLine := fset.Position(specs[0].Pos()).Line + for _, s := range specs[1:] { + p := s.Pos() + line := fset.File(p).Line(p) + for previousLine := line - 1; previousLine >= firstSpecLine; { + fset.File(p).MergeLine(previousLine) + previousLine-- + } + } + return specs +} + +type byImportSpec struct { + localPrefix string + specs []ast.Spec // slice of *ast.ImportSpec +} + +func (x byImportSpec) Len() int { return len(x.specs) } +func (x byImportSpec) Swap(i, j int) { x.specs[i], x.specs[j] = x.specs[j], x.specs[i] } +func (x byImportSpec) Less(i, j int) bool { + ipath := importPath(x.specs[i]) + jpath := importPath(x.specs[j]) + + igroup := importGroup(x.localPrefix, ipath) + jgroup := importGroup(x.localPrefix, jpath) + if igroup != jgroup { + return igroup < jgroup + } + + if ipath != jpath { + return ipath < jpath + } + iname := importName(x.specs[i]) + jname := importName(x.specs[j]) + + if iname != jname { + return iname < jname + } + return importComment(x.specs[i]) < importComment(x.specs[j]) +} + +type byCommentPos []*ast.CommentGroup + +func (x byCommentPos) Len() int { return len(x) } +func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() } diff --git a/vendor/golang.org/x/tools/internal/imports/zstdlib.go b/vendor/golang.org/x/tools/internal/imports/zstdlib.go new file mode 100644 index 000000000..ccdd4e0ff --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/zstdlib.go @@ -0,0 +1,10733 @@ +// Code generated by mkstdlib.go. DO NOT EDIT. + +package imports + +var stdlib = map[string][]string{ + "archive/tar": []string{ + "ErrFieldTooLong", + "ErrHeader", + "ErrWriteAfterClose", + "ErrWriteTooLong", + "FileInfoHeader", + "Format", + "FormatGNU", + "FormatPAX", + "FormatUSTAR", + "FormatUnknown", + "Header", + "NewReader", + "NewWriter", + "Reader", + "TypeBlock", + "TypeChar", + "TypeCont", + "TypeDir", + "TypeFifo", + "TypeGNULongLink", + "TypeGNULongName", + "TypeGNUSparse", + "TypeLink", + "TypeReg", + "TypeRegA", + "TypeSymlink", + "TypeXGlobalHeader", + "TypeXHeader", + "Writer", + }, + "archive/zip": []string{ + "Compressor", + "Decompressor", + "Deflate", + "ErrAlgorithm", + "ErrChecksum", + "ErrFormat", + "File", + "FileHeader", + "FileInfoHeader", + "NewReader", + "NewWriter", + "OpenReader", + "ReadCloser", + "Reader", + "RegisterCompressor", + "RegisterDecompressor", + "Store", + "Writer", + }, + "bufio": []string{ + "ErrAdvanceTooFar", + "ErrBadReadCount", + "ErrBufferFull", + "ErrFinalToken", + "ErrInvalidUnreadByte", + "ErrInvalidUnreadRune", + "ErrNegativeAdvance", + "ErrNegativeCount", + "ErrTooLong", + "MaxScanTokenSize", + "NewReadWriter", + "NewReader", + "NewReaderSize", + "NewScanner", + "NewWriter", + "NewWriterSize", + "ReadWriter", + "Reader", + "ScanBytes", + "ScanLines", + "ScanRunes", + "ScanWords", + "Scanner", + "SplitFunc", + "Writer", + }, + "bytes": []string{ + "Buffer", + "Compare", + "Contains", + "ContainsAny", + "ContainsRune", + "Count", + "Equal", + "EqualFold", + "ErrTooLarge", + "Fields", + "FieldsFunc", + "HasPrefix", + "HasSuffix", + "Index", + "IndexAny", + "IndexByte", + "IndexFunc", + "IndexRune", + "Join", + "LastIndex", + "LastIndexAny", + "LastIndexByte", + "LastIndexFunc", + "Map", + "MinRead", + "NewBuffer", + "NewBufferString", + "NewReader", + "Reader", + "Repeat", + "Replace", + "ReplaceAll", + "Runes", + "Split", + "SplitAfter", + "SplitAfterN", + "SplitN", + "Title", + "ToLower", + "ToLowerSpecial", + "ToTitle", + "ToTitleSpecial", + "ToUpper", + "ToUpperSpecial", + "ToValidUTF8", + "Trim", + "TrimFunc", + "TrimLeft", + "TrimLeftFunc", + "TrimPrefix", + "TrimRight", + "TrimRightFunc", + "TrimSpace", + "TrimSuffix", + }, + "compress/bzip2": []string{ + "NewReader", + "StructuralError", + }, + "compress/flate": []string{ + "BestCompression", + "BestSpeed", + "CorruptInputError", + "DefaultCompression", + "HuffmanOnly", + "InternalError", + "NewReader", + "NewReaderDict", + "NewWriter", + "NewWriterDict", + "NoCompression", + "ReadError", + "Reader", + "Resetter", + "WriteError", + "Writer", + }, + "compress/gzip": []string{ + "BestCompression", + "BestSpeed", + "DefaultCompression", + "ErrChecksum", + "ErrHeader", + "Header", + "HuffmanOnly", + "NewReader", + "NewWriter", + "NewWriterLevel", + "NoCompression", + "Reader", + "Writer", + }, + "compress/lzw": []string{ + "LSB", + "MSB", + "NewReader", + "NewWriter", + "Order", + }, + "compress/zlib": []string{ + "BestCompression", + "BestSpeed", + "DefaultCompression", + "ErrChecksum", + "ErrDictionary", + "ErrHeader", + "HuffmanOnly", + "NewReader", + "NewReaderDict", + "NewWriter", + "NewWriterLevel", + "NewWriterLevelDict", + "NoCompression", + "Resetter", + "Writer", + }, + "container/heap": []string{ + "Fix", + "Init", + "Interface", + "Pop", + "Push", + "Remove", + }, + "container/list": []string{ + "Element", + "List", + "New", + }, + "container/ring": []string{ + "New", + "Ring", + }, + "context": []string{ + "Background", + "CancelFunc", + "Canceled", + "Context", + "DeadlineExceeded", + "TODO", + "WithCancel", + "WithDeadline", + "WithTimeout", + "WithValue", + }, + "crypto": []string{ + "BLAKE2b_256", + "BLAKE2b_384", + "BLAKE2b_512", + "BLAKE2s_256", + "Decrypter", + "DecrypterOpts", + "Hash", + "MD4", + "MD5", + "MD5SHA1", + "PrivateKey", + "PublicKey", + "RIPEMD160", + "RegisterHash", + "SHA1", + "SHA224", + "SHA256", + "SHA384", + "SHA3_224", + "SHA3_256", + "SHA3_384", + "SHA3_512", + "SHA512", + "SHA512_224", + "SHA512_256", + "Signer", + "SignerOpts", + }, + "crypto/aes": []string{ + "BlockSize", + "KeySizeError", + "NewCipher", + }, + "crypto/cipher": []string{ + "AEAD", + "Block", + "BlockMode", + "NewCBCDecrypter", + "NewCBCEncrypter", + "NewCFBDecrypter", + "NewCFBEncrypter", + "NewCTR", + "NewGCM", + "NewGCMWithNonceSize", + "NewGCMWithTagSize", + "NewOFB", + "Stream", + "StreamReader", + "StreamWriter", + }, + "crypto/des": []string{ + "BlockSize", + "KeySizeError", + "NewCipher", + "NewTripleDESCipher", + }, + "crypto/dsa": []string{ + "ErrInvalidPublicKey", + "GenerateKey", + "GenerateParameters", + "L1024N160", + "L2048N224", + "L2048N256", + "L3072N256", + "ParameterSizes", + "Parameters", + "PrivateKey", + "PublicKey", + "Sign", + "Verify", + }, + "crypto/ecdsa": []string{ + "GenerateKey", + "PrivateKey", + "PublicKey", + "Sign", + "SignASN1", + "Verify", + "VerifyASN1", + }, + "crypto/ed25519": []string{ + "GenerateKey", + "NewKeyFromSeed", + "PrivateKey", + "PrivateKeySize", + "PublicKey", + "PublicKeySize", + "SeedSize", + "Sign", + "SignatureSize", + "Verify", + }, + "crypto/elliptic": []string{ + "Curve", + "CurveParams", + "GenerateKey", + "Marshal", + "MarshalCompressed", + "P224", + "P256", + "P384", + "P521", + "Unmarshal", + "UnmarshalCompressed", + }, + "crypto/hmac": []string{ + "Equal", + "New", + }, + "crypto/md5": []string{ + "BlockSize", + "New", + "Size", + "Sum", + }, + "crypto/rand": []string{ + "Int", + "Prime", + "Read", + "Reader", + }, + "crypto/rc4": []string{ + "Cipher", + "KeySizeError", + "NewCipher", + }, + "crypto/rsa": []string{ + "CRTValue", + "DecryptOAEP", + "DecryptPKCS1v15", + "DecryptPKCS1v15SessionKey", + "EncryptOAEP", + "EncryptPKCS1v15", + "ErrDecryption", + "ErrMessageTooLong", + "ErrVerification", + "GenerateKey", + "GenerateMultiPrimeKey", + "OAEPOptions", + "PKCS1v15DecryptOptions", + "PSSOptions", + "PSSSaltLengthAuto", + "PSSSaltLengthEqualsHash", + "PrecomputedValues", + "PrivateKey", + "PublicKey", + "SignPKCS1v15", + "SignPSS", + "VerifyPKCS1v15", + "VerifyPSS", + }, + "crypto/sha1": []string{ + "BlockSize", + "New", + "Size", + "Sum", + }, + "crypto/sha256": []string{ + "BlockSize", + "New", + "New224", + "Size", + "Size224", + "Sum224", + "Sum256", + }, + "crypto/sha512": []string{ + "BlockSize", + "New", + "New384", + "New512_224", + "New512_256", + "Size", + "Size224", + "Size256", + "Size384", + "Sum384", + "Sum512", + "Sum512_224", + "Sum512_256", + }, + "crypto/subtle": []string{ + "ConstantTimeByteEq", + "ConstantTimeCompare", + "ConstantTimeCopy", + "ConstantTimeEq", + "ConstantTimeLessOrEq", + "ConstantTimeSelect", + }, + "crypto/tls": []string{ + "Certificate", + "CertificateRequestInfo", + "CipherSuite", + "CipherSuiteName", + "CipherSuites", + "Client", + "ClientAuthType", + "ClientHelloInfo", + "ClientSessionCache", + "ClientSessionState", + "Config", + "Conn", + "ConnectionState", + "CurveID", + "CurveP256", + "CurveP384", + "CurveP521", + "Dial", + "DialWithDialer", + "Dialer", + "ECDSAWithP256AndSHA256", + "ECDSAWithP384AndSHA384", + "ECDSAWithP521AndSHA512", + "ECDSAWithSHA1", + "Ed25519", + "InsecureCipherSuites", + "Listen", + "LoadX509KeyPair", + "NewLRUClientSessionCache", + "NewListener", + "NoClientCert", + "PKCS1WithSHA1", + "PKCS1WithSHA256", + "PKCS1WithSHA384", + "PKCS1WithSHA512", + "PSSWithSHA256", + "PSSWithSHA384", + "PSSWithSHA512", + "RecordHeaderError", + "RenegotiateFreelyAsClient", + "RenegotiateNever", + "RenegotiateOnceAsClient", + "RenegotiationSupport", + "RequestClientCert", + "RequireAndVerifyClientCert", + "RequireAnyClientCert", + "Server", + "SignatureScheme", + "TLS_AES_128_GCM_SHA256", + "TLS_AES_256_GCM_SHA384", + "TLS_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA", + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA", + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA", + "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256", + "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256", + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA", + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305", + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256", + "TLS_ECDHE_RSA_WITH_RC4_128_SHA", + "TLS_FALLBACK_SCSV", + "TLS_RSA_WITH_3DES_EDE_CBC_SHA", + "TLS_RSA_WITH_AES_128_CBC_SHA", + "TLS_RSA_WITH_AES_128_CBC_SHA256", + "TLS_RSA_WITH_AES_128_GCM_SHA256", + "TLS_RSA_WITH_AES_256_CBC_SHA", + "TLS_RSA_WITH_AES_256_GCM_SHA384", + "TLS_RSA_WITH_RC4_128_SHA", + "VerifyClientCertIfGiven", + "VersionSSL30", + "VersionTLS10", + "VersionTLS11", + "VersionTLS12", + "VersionTLS13", + "X25519", + "X509KeyPair", + }, + "crypto/x509": []string{ + "CANotAuthorizedForExtKeyUsage", + "CANotAuthorizedForThisName", + "CertPool", + "Certificate", + "CertificateInvalidError", + "CertificateRequest", + "ConstraintViolationError", + "CreateCertificate", + "CreateCertificateRequest", + "CreateRevocationList", + "DSA", + "DSAWithSHA1", + "DSAWithSHA256", + "DecryptPEMBlock", + "ECDSA", + "ECDSAWithSHA1", + "ECDSAWithSHA256", + "ECDSAWithSHA384", + "ECDSAWithSHA512", + "Ed25519", + "EncryptPEMBlock", + "ErrUnsupportedAlgorithm", + "Expired", + "ExtKeyUsage", + "ExtKeyUsageAny", + "ExtKeyUsageClientAuth", + "ExtKeyUsageCodeSigning", + "ExtKeyUsageEmailProtection", + "ExtKeyUsageIPSECEndSystem", + "ExtKeyUsageIPSECTunnel", + "ExtKeyUsageIPSECUser", + "ExtKeyUsageMicrosoftCommercialCodeSigning", + "ExtKeyUsageMicrosoftKernelCodeSigning", + "ExtKeyUsageMicrosoftServerGatedCrypto", + "ExtKeyUsageNetscapeServerGatedCrypto", + "ExtKeyUsageOCSPSigning", + "ExtKeyUsageServerAuth", + "ExtKeyUsageTimeStamping", + "HostnameError", + "IncompatibleUsage", + "IncorrectPasswordError", + "InsecureAlgorithmError", + "InvalidReason", + "IsEncryptedPEMBlock", + "KeyUsage", + "KeyUsageCRLSign", + "KeyUsageCertSign", + "KeyUsageContentCommitment", + "KeyUsageDataEncipherment", + "KeyUsageDecipherOnly", + "KeyUsageDigitalSignature", + "KeyUsageEncipherOnly", + "KeyUsageKeyAgreement", + "KeyUsageKeyEncipherment", + "MD2WithRSA", + "MD5WithRSA", + "MarshalECPrivateKey", + "MarshalPKCS1PrivateKey", + "MarshalPKCS1PublicKey", + "MarshalPKCS8PrivateKey", + "MarshalPKIXPublicKey", + "NameConstraintsWithoutSANs", + "NameMismatch", + "NewCertPool", + "NotAuthorizedToSign", + "PEMCipher", + "PEMCipher3DES", + "PEMCipherAES128", + "PEMCipherAES192", + "PEMCipherAES256", + "PEMCipherDES", + "ParseCRL", + "ParseCertificate", + "ParseCertificateRequest", + "ParseCertificates", + "ParseDERCRL", + "ParseECPrivateKey", + "ParsePKCS1PrivateKey", + "ParsePKCS1PublicKey", + "ParsePKCS8PrivateKey", + "ParsePKIXPublicKey", + "PublicKeyAlgorithm", + "PureEd25519", + "RSA", + "RevocationList", + "SHA1WithRSA", + "SHA256WithRSA", + "SHA256WithRSAPSS", + "SHA384WithRSA", + "SHA384WithRSAPSS", + "SHA512WithRSA", + "SHA512WithRSAPSS", + "SignatureAlgorithm", + "SystemCertPool", + "SystemRootsError", + "TooManyConstraints", + "TooManyIntermediates", + "UnconstrainedName", + "UnhandledCriticalExtension", + "UnknownAuthorityError", + "UnknownPublicKeyAlgorithm", + "UnknownSignatureAlgorithm", + "VerifyOptions", + }, + "crypto/x509/pkix": []string{ + "AlgorithmIdentifier", + "AttributeTypeAndValue", + "AttributeTypeAndValueSET", + "CertificateList", + "Extension", + "Name", + "RDNSequence", + "RelativeDistinguishedNameSET", + "RevokedCertificate", + "TBSCertificateList", + }, + "database/sql": []string{ + "ColumnType", + "Conn", + "DB", + "DBStats", + "Drivers", + "ErrConnDone", + "ErrNoRows", + "ErrTxDone", + "IsolationLevel", + "LevelDefault", + "LevelLinearizable", + "LevelReadCommitted", + "LevelReadUncommitted", + "LevelRepeatableRead", + "LevelSerializable", + "LevelSnapshot", + "LevelWriteCommitted", + "Named", + "NamedArg", + "NullBool", + "NullFloat64", + "NullInt32", + "NullInt64", + "NullString", + "NullTime", + "Open", + "OpenDB", + "Out", + "RawBytes", + "Register", + "Result", + "Row", + "Rows", + "Scanner", + "Stmt", + "Tx", + "TxOptions", + }, + "database/sql/driver": []string{ + "Bool", + "ColumnConverter", + "Conn", + "ConnBeginTx", + "ConnPrepareContext", + "Connector", + "DefaultParameterConverter", + "Driver", + "DriverContext", + "ErrBadConn", + "ErrRemoveArgument", + "ErrSkip", + "Execer", + "ExecerContext", + "Int32", + "IsScanValue", + "IsValue", + "IsolationLevel", + "NamedValue", + "NamedValueChecker", + "NotNull", + "Null", + "Pinger", + "Queryer", + "QueryerContext", + "Result", + "ResultNoRows", + "Rows", + "RowsAffected", + "RowsColumnTypeDatabaseTypeName", + "RowsColumnTypeLength", + "RowsColumnTypeNullable", + "RowsColumnTypePrecisionScale", + "RowsColumnTypeScanType", + "RowsNextResultSet", + "SessionResetter", + "Stmt", + "StmtExecContext", + "StmtQueryContext", + "String", + "Tx", + "TxOptions", + "Validator", + "Value", + "ValueConverter", + "Valuer", + }, + "debug/dwarf": []string{ + "AddrType", + "ArrayType", + "Attr", + "AttrAbstractOrigin", + "AttrAccessibility", + "AttrAddrBase", + "AttrAddrClass", + "AttrAlignment", + "AttrAllocated", + "AttrArtificial", + "AttrAssociated", + "AttrBaseTypes", + "AttrBinaryScale", + "AttrBitOffset", + "AttrBitSize", + "AttrByteSize", + "AttrCallAllCalls", + "AttrCallAllSourceCalls", + "AttrCallAllTailCalls", + "AttrCallColumn", + "AttrCallDataLocation", + "AttrCallDataValue", + "AttrCallFile", + "AttrCallLine", + "AttrCallOrigin", + "AttrCallPC", + "AttrCallParameter", + "AttrCallReturnPC", + "AttrCallTailCall", + "AttrCallTarget", + "AttrCallTargetClobbered", + "AttrCallValue", + "AttrCalling", + "AttrCommonRef", + "AttrCompDir", + "AttrConstExpr", + "AttrConstValue", + "AttrContainingType", + "AttrCount", + "AttrDataBitOffset", + "AttrDataLocation", + "AttrDataMemberLoc", + "AttrDecimalScale", + "AttrDecimalSign", + "AttrDeclColumn", + "AttrDeclFile", + "AttrDeclLine", + "AttrDeclaration", + "AttrDefaultValue", + "AttrDefaulted", + "AttrDeleted", + "AttrDescription", + "AttrDigitCount", + "AttrDiscr", + "AttrDiscrList", + "AttrDiscrValue", + "AttrDwoName", + "AttrElemental", + "AttrEncoding", + "AttrEndianity", + "AttrEntrypc", + "AttrEnumClass", + "AttrExplicit", + "AttrExportSymbols", + "AttrExtension", + "AttrExternal", + "AttrFrameBase", + "AttrFriend", + "AttrHighpc", + "AttrIdentifierCase", + "AttrImport", + "AttrInline", + "AttrIsOptional", + "AttrLanguage", + "AttrLinkageName", + "AttrLocation", + "AttrLoclistsBase", + "AttrLowerBound", + "AttrLowpc", + "AttrMacroInfo", + "AttrMacros", + "AttrMainSubprogram", + "AttrMutable", + "AttrName", + "AttrNamelistItem", + "AttrNoreturn", + "AttrObjectPointer", + "AttrOrdering", + "AttrPictureString", + "AttrPriority", + "AttrProducer", + "AttrPrototyped", + "AttrPure", + "AttrRanges", + "AttrRank", + "AttrRecursive", + "AttrReference", + "AttrReturnAddr", + "AttrRnglistsBase", + "AttrRvalueReference", + "AttrSegment", + "AttrSibling", + "AttrSignature", + "AttrSmall", + "AttrSpecification", + "AttrStartScope", + "AttrStaticLink", + "AttrStmtList", + "AttrStrOffsetsBase", + "AttrStride", + "AttrStrideSize", + "AttrStringLength", + "AttrStringLengthBitSize", + "AttrStringLengthByteSize", + "AttrThreadsScaled", + "AttrTrampoline", + "AttrType", + "AttrUpperBound", + "AttrUseLocation", + "AttrUseUTF8", + "AttrVarParam", + "AttrVirtuality", + "AttrVisibility", + "AttrVtableElemLoc", + "BasicType", + "BoolType", + "CharType", + "Class", + "ClassAddrPtr", + "ClassAddress", + "ClassBlock", + "ClassConstant", + "ClassExprLoc", + "ClassFlag", + "ClassLinePtr", + "ClassLocList", + "ClassLocListPtr", + "ClassMacPtr", + "ClassRangeListPtr", + "ClassReference", + "ClassReferenceAlt", + "ClassReferenceSig", + "ClassRngList", + "ClassRngListsPtr", + "ClassStrOffsetsPtr", + "ClassString", + "ClassStringAlt", + "ClassUnknown", + "CommonType", + "ComplexType", + "Data", + "DecodeError", + "DotDotDotType", + "Entry", + "EnumType", + "EnumValue", + "ErrUnknownPC", + "Field", + "FloatType", + "FuncType", + "IntType", + "LineEntry", + "LineFile", + "LineReader", + "LineReaderPos", + "New", + "Offset", + "PtrType", + "QualType", + "Reader", + "StructField", + "StructType", + "Tag", + "TagAccessDeclaration", + "TagArrayType", + "TagAtomicType", + "TagBaseType", + "TagCallSite", + "TagCallSiteParameter", + "TagCatchDwarfBlock", + "TagClassType", + "TagCoarrayType", + "TagCommonDwarfBlock", + "TagCommonInclusion", + "TagCompileUnit", + "TagCondition", + "TagConstType", + "TagConstant", + "TagDwarfProcedure", + "TagDynamicType", + "TagEntryPoint", + "TagEnumerationType", + "TagEnumerator", + "TagFileType", + "TagFormalParameter", + "TagFriend", + "TagGenericSubrange", + "TagImmutableType", + "TagImportedDeclaration", + "TagImportedModule", + "TagImportedUnit", + "TagInheritance", + "TagInlinedSubroutine", + "TagInterfaceType", + "TagLabel", + "TagLexDwarfBlock", + "TagMember", + "TagModule", + "TagMutableType", + "TagNamelist", + "TagNamelistItem", + "TagNamespace", + "TagPackedType", + "TagPartialUnit", + "TagPointerType", + "TagPtrToMemberType", + "TagReferenceType", + "TagRestrictType", + "TagRvalueReferenceType", + "TagSetType", + "TagSharedType", + "TagSkeletonUnit", + "TagStringType", + "TagStructType", + "TagSubprogram", + "TagSubrangeType", + "TagSubroutineType", + "TagTemplateAlias", + "TagTemplateTypeParameter", + "TagTemplateValueParameter", + "TagThrownType", + "TagTryDwarfBlock", + "TagTypeUnit", + "TagTypedef", + "TagUnionType", + "TagUnspecifiedParameters", + "TagUnspecifiedType", + "TagVariable", + "TagVariant", + "TagVariantPart", + "TagVolatileType", + "TagWithStmt", + "Type", + "TypedefType", + "UcharType", + "UintType", + "UnspecifiedType", + "UnsupportedType", + "VoidType", + }, + "debug/elf": []string{ + "ARM_MAGIC_TRAMP_NUMBER", + "COMPRESS_HIOS", + "COMPRESS_HIPROC", + "COMPRESS_LOOS", + "COMPRESS_LOPROC", + "COMPRESS_ZLIB", + "Chdr32", + "Chdr64", + "Class", + "CompressionType", + "DF_BIND_NOW", + "DF_ORIGIN", + "DF_STATIC_TLS", + "DF_SYMBOLIC", + "DF_TEXTREL", + "DT_ADDRRNGHI", + "DT_ADDRRNGLO", + "DT_AUDIT", + "DT_AUXILIARY", + "DT_BIND_NOW", + "DT_CHECKSUM", + "DT_CONFIG", + "DT_DEBUG", + "DT_DEPAUDIT", + "DT_ENCODING", + "DT_FEATURE", + "DT_FILTER", + "DT_FINI", + "DT_FINI_ARRAY", + "DT_FINI_ARRAYSZ", + "DT_FLAGS", + "DT_FLAGS_1", + "DT_GNU_CONFLICT", + "DT_GNU_CONFLICTSZ", + "DT_GNU_HASH", + "DT_GNU_LIBLIST", + "DT_GNU_LIBLISTSZ", + "DT_GNU_PRELINKED", + "DT_HASH", + "DT_HIOS", + "DT_HIPROC", + "DT_INIT", + "DT_INIT_ARRAY", + "DT_INIT_ARRAYSZ", + "DT_JMPREL", + "DT_LOOS", + "DT_LOPROC", + "DT_MIPS_AUX_DYNAMIC", + "DT_MIPS_BASE_ADDRESS", + "DT_MIPS_COMPACT_SIZE", + "DT_MIPS_CONFLICT", + "DT_MIPS_CONFLICTNO", + "DT_MIPS_CXX_FLAGS", + "DT_MIPS_DELTA_CLASS", + "DT_MIPS_DELTA_CLASSSYM", + "DT_MIPS_DELTA_CLASSSYM_NO", + "DT_MIPS_DELTA_CLASS_NO", + "DT_MIPS_DELTA_INSTANCE", + "DT_MIPS_DELTA_INSTANCE_NO", + "DT_MIPS_DELTA_RELOC", + "DT_MIPS_DELTA_RELOC_NO", + "DT_MIPS_DELTA_SYM", + "DT_MIPS_DELTA_SYM_NO", + "DT_MIPS_DYNSTR_ALIGN", + "DT_MIPS_FLAGS", + "DT_MIPS_GOTSYM", + "DT_MIPS_GP_VALUE", + "DT_MIPS_HIDDEN_GOTIDX", + "DT_MIPS_HIPAGENO", + "DT_MIPS_ICHECKSUM", + "DT_MIPS_INTERFACE", + "DT_MIPS_INTERFACE_SIZE", + "DT_MIPS_IVERSION", + "DT_MIPS_LIBLIST", + "DT_MIPS_LIBLISTNO", + "DT_MIPS_LOCALPAGE_GOTIDX", + "DT_MIPS_LOCAL_GOTIDX", + "DT_MIPS_LOCAL_GOTNO", + "DT_MIPS_MSYM", + "DT_MIPS_OPTIONS", + "DT_MIPS_PERF_SUFFIX", + "DT_MIPS_PIXIE_INIT", + "DT_MIPS_PLTGOT", + "DT_MIPS_PROTECTED_GOTIDX", + "DT_MIPS_RLD_MAP", + "DT_MIPS_RLD_MAP_REL", + "DT_MIPS_RLD_TEXT_RESOLVE_ADDR", + "DT_MIPS_RLD_VERSION", + "DT_MIPS_RWPLT", + "DT_MIPS_SYMBOL_LIB", + "DT_MIPS_SYMTABNO", + "DT_MIPS_TIME_STAMP", + "DT_MIPS_UNREFEXTNO", + "DT_MOVEENT", + "DT_MOVESZ", + "DT_MOVETAB", + "DT_NEEDED", + "DT_NULL", + "DT_PLTGOT", + "DT_PLTPAD", + "DT_PLTPADSZ", + "DT_PLTREL", + "DT_PLTRELSZ", + "DT_POSFLAG_1", + "DT_PPC64_GLINK", + "DT_PPC64_OPD", + "DT_PPC64_OPDSZ", + "DT_PPC64_OPT", + "DT_PPC_GOT", + "DT_PPC_OPT", + "DT_PREINIT_ARRAY", + "DT_PREINIT_ARRAYSZ", + "DT_REL", + "DT_RELA", + "DT_RELACOUNT", + "DT_RELAENT", + "DT_RELASZ", + "DT_RELCOUNT", + "DT_RELENT", + "DT_RELSZ", + "DT_RPATH", + "DT_RUNPATH", + "DT_SONAME", + "DT_SPARC_REGISTER", + "DT_STRSZ", + "DT_STRTAB", + "DT_SYMBOLIC", + "DT_SYMENT", + "DT_SYMINENT", + "DT_SYMINFO", + "DT_SYMINSZ", + "DT_SYMTAB", + "DT_SYMTAB_SHNDX", + "DT_TEXTREL", + "DT_TLSDESC_GOT", + "DT_TLSDESC_PLT", + "DT_USED", + "DT_VALRNGHI", + "DT_VALRNGLO", + "DT_VERDEF", + "DT_VERDEFNUM", + "DT_VERNEED", + "DT_VERNEEDNUM", + "DT_VERSYM", + "Data", + "Dyn32", + "Dyn64", + "DynFlag", + "DynTag", + "EI_ABIVERSION", + "EI_CLASS", + "EI_DATA", + "EI_NIDENT", + "EI_OSABI", + "EI_PAD", + "EI_VERSION", + "ELFCLASS32", + "ELFCLASS64", + "ELFCLASSNONE", + "ELFDATA2LSB", + "ELFDATA2MSB", + "ELFDATANONE", + "ELFMAG", + "ELFOSABI_86OPEN", + "ELFOSABI_AIX", + "ELFOSABI_ARM", + "ELFOSABI_AROS", + "ELFOSABI_CLOUDABI", + "ELFOSABI_FENIXOS", + "ELFOSABI_FREEBSD", + "ELFOSABI_HPUX", + "ELFOSABI_HURD", + "ELFOSABI_IRIX", + "ELFOSABI_LINUX", + "ELFOSABI_MODESTO", + "ELFOSABI_NETBSD", + "ELFOSABI_NONE", + "ELFOSABI_NSK", + "ELFOSABI_OPENBSD", + "ELFOSABI_OPENVMS", + "ELFOSABI_SOLARIS", + "ELFOSABI_STANDALONE", + "ELFOSABI_TRU64", + "EM_386", + "EM_486", + "EM_56800EX", + "EM_68HC05", + "EM_68HC08", + "EM_68HC11", + "EM_68HC12", + "EM_68HC16", + "EM_68K", + "EM_78KOR", + "EM_8051", + "EM_860", + "EM_88K", + "EM_960", + "EM_AARCH64", + "EM_ALPHA", + "EM_ALPHA_STD", + "EM_ALTERA_NIOS2", + "EM_AMDGPU", + "EM_ARC", + "EM_ARCA", + "EM_ARC_COMPACT", + "EM_ARC_COMPACT2", + "EM_ARM", + "EM_AVR", + "EM_AVR32", + "EM_BA1", + "EM_BA2", + "EM_BLACKFIN", + "EM_BPF", + "EM_C166", + "EM_CDP", + "EM_CE", + "EM_CLOUDSHIELD", + "EM_COGE", + "EM_COLDFIRE", + "EM_COOL", + "EM_COREA_1ST", + "EM_COREA_2ND", + "EM_CR", + "EM_CR16", + "EM_CRAYNV2", + "EM_CRIS", + "EM_CRX", + "EM_CSR_KALIMBA", + "EM_CUDA", + "EM_CYPRESS_M8C", + "EM_D10V", + "EM_D30V", + "EM_DSP24", + "EM_DSPIC30F", + "EM_DXP", + "EM_ECOG1", + "EM_ECOG16", + "EM_ECOG1X", + "EM_ECOG2", + "EM_ETPU", + "EM_EXCESS", + "EM_F2MC16", + "EM_FIREPATH", + "EM_FR20", + "EM_FR30", + "EM_FT32", + "EM_FX66", + "EM_H8S", + "EM_H8_300", + "EM_H8_300H", + "EM_H8_500", + "EM_HUANY", + "EM_IA_64", + "EM_INTEL205", + "EM_INTEL206", + "EM_INTEL207", + "EM_INTEL208", + "EM_INTEL209", + "EM_IP2K", + "EM_JAVELIN", + "EM_K10M", + "EM_KM32", + "EM_KMX16", + "EM_KMX32", + "EM_KMX8", + "EM_KVARC", + "EM_L10M", + "EM_LANAI", + "EM_LATTICEMICO32", + "EM_M16C", + "EM_M32", + "EM_M32C", + "EM_M32R", + "EM_MANIK", + "EM_MAX", + "EM_MAXQ30", + "EM_MCHP_PIC", + "EM_MCST_ELBRUS", + "EM_ME16", + "EM_METAG", + "EM_MICROBLAZE", + "EM_MIPS", + "EM_MIPS_RS3_LE", + "EM_MIPS_RS4_BE", + "EM_MIPS_X", + "EM_MMA", + "EM_MMDSP_PLUS", + "EM_MMIX", + "EM_MN10200", + "EM_MN10300", + "EM_MOXIE", + "EM_MSP430", + "EM_NCPU", + "EM_NDR1", + "EM_NDS32", + "EM_NONE", + "EM_NORC", + "EM_NS32K", + "EM_OPEN8", + "EM_OPENRISC", + "EM_PARISC", + "EM_PCP", + "EM_PDP10", + "EM_PDP11", + "EM_PDSP", + "EM_PJ", + "EM_PPC", + "EM_PPC64", + "EM_PRISM", + "EM_QDSP6", + "EM_R32C", + "EM_RCE", + "EM_RH32", + "EM_RISCV", + "EM_RL78", + "EM_RS08", + "EM_RX", + "EM_S370", + "EM_S390", + "EM_SCORE7", + "EM_SEP", + "EM_SE_C17", + "EM_SE_C33", + "EM_SH", + "EM_SHARC", + "EM_SLE9X", + "EM_SNP1K", + "EM_SPARC", + "EM_SPARC32PLUS", + "EM_SPARCV9", + "EM_ST100", + "EM_ST19", + "EM_ST200", + "EM_ST7", + "EM_ST9PLUS", + "EM_STARCORE", + "EM_STM8", + "EM_STXP7X", + "EM_SVX", + "EM_TILE64", + "EM_TILEGX", + "EM_TILEPRO", + "EM_TINYJ", + "EM_TI_ARP32", + "EM_TI_C2000", + "EM_TI_C5500", + "EM_TI_C6000", + "EM_TI_PRU", + "EM_TMM_GPP", + "EM_TPC", + "EM_TRICORE", + "EM_TRIMEDIA", + "EM_TSK3000", + "EM_UNICORE", + "EM_V800", + "EM_V850", + "EM_VAX", + "EM_VIDEOCORE", + "EM_VIDEOCORE3", + "EM_VIDEOCORE5", + "EM_VISIUM", + "EM_VPP500", + "EM_X86_64", + "EM_XCORE", + "EM_XGATE", + "EM_XIMO16", + "EM_XTENSA", + "EM_Z80", + "EM_ZSP", + "ET_CORE", + "ET_DYN", + "ET_EXEC", + "ET_HIOS", + "ET_HIPROC", + "ET_LOOS", + "ET_LOPROC", + "ET_NONE", + "ET_REL", + "EV_CURRENT", + "EV_NONE", + "ErrNoSymbols", + "File", + "FileHeader", + "FormatError", + "Header32", + "Header64", + "ImportedSymbol", + "Machine", + "NT_FPREGSET", + "NT_PRPSINFO", + "NT_PRSTATUS", + "NType", + "NewFile", + "OSABI", + "Open", + "PF_MASKOS", + "PF_MASKPROC", + "PF_R", + "PF_W", + "PF_X", + "PT_AARCH64_ARCHEXT", + "PT_AARCH64_UNWIND", + "PT_ARM_ARCHEXT", + "PT_ARM_EXIDX", + "PT_DYNAMIC", + "PT_GNU_EH_FRAME", + "PT_GNU_MBIND_HI", + "PT_GNU_MBIND_LO", + "PT_GNU_PROPERTY", + "PT_GNU_RELRO", + "PT_GNU_STACK", + "PT_HIOS", + "PT_HIPROC", + "PT_INTERP", + "PT_LOAD", + "PT_LOOS", + "PT_LOPROC", + "PT_MIPS_ABIFLAGS", + "PT_MIPS_OPTIONS", + "PT_MIPS_REGINFO", + "PT_MIPS_RTPROC", + "PT_NOTE", + "PT_NULL", + "PT_OPENBSD_BOOTDATA", + "PT_OPENBSD_RANDOMIZE", + "PT_OPENBSD_WXNEEDED", + "PT_PAX_FLAGS", + "PT_PHDR", + "PT_S390_PGSTE", + "PT_SHLIB", + "PT_SUNWSTACK", + "PT_SUNW_EH_FRAME", + "PT_TLS", + "Prog", + "Prog32", + "Prog64", + "ProgFlag", + "ProgHeader", + "ProgType", + "R_386", + "R_386_16", + "R_386_32", + "R_386_32PLT", + "R_386_8", + "R_386_COPY", + "R_386_GLOB_DAT", + "R_386_GOT32", + "R_386_GOT32X", + "R_386_GOTOFF", + "R_386_GOTPC", + "R_386_IRELATIVE", + "R_386_JMP_SLOT", + "R_386_NONE", + "R_386_PC16", + "R_386_PC32", + "R_386_PC8", + "R_386_PLT32", + "R_386_RELATIVE", + "R_386_SIZE32", + "R_386_TLS_DESC", + "R_386_TLS_DESC_CALL", + "R_386_TLS_DTPMOD32", + "R_386_TLS_DTPOFF32", + "R_386_TLS_GD", + "R_386_TLS_GD_32", + "R_386_TLS_GD_CALL", + "R_386_TLS_GD_POP", + "R_386_TLS_GD_PUSH", + "R_386_TLS_GOTDESC", + "R_386_TLS_GOTIE", + "R_386_TLS_IE", + "R_386_TLS_IE_32", + "R_386_TLS_LDM", + "R_386_TLS_LDM_32", + "R_386_TLS_LDM_CALL", + "R_386_TLS_LDM_POP", + "R_386_TLS_LDM_PUSH", + "R_386_TLS_LDO_32", + "R_386_TLS_LE", + "R_386_TLS_LE_32", + "R_386_TLS_TPOFF", + "R_386_TLS_TPOFF32", + "R_390", + "R_390_12", + "R_390_16", + "R_390_20", + "R_390_32", + "R_390_64", + "R_390_8", + "R_390_COPY", + "R_390_GLOB_DAT", + "R_390_GOT12", + "R_390_GOT16", + "R_390_GOT20", + "R_390_GOT32", + "R_390_GOT64", + "R_390_GOTENT", + "R_390_GOTOFF", + "R_390_GOTOFF16", + "R_390_GOTOFF64", + "R_390_GOTPC", + "R_390_GOTPCDBL", + "R_390_GOTPLT12", + "R_390_GOTPLT16", + "R_390_GOTPLT20", + "R_390_GOTPLT32", + "R_390_GOTPLT64", + "R_390_GOTPLTENT", + "R_390_GOTPLTOFF16", + "R_390_GOTPLTOFF32", + "R_390_GOTPLTOFF64", + "R_390_JMP_SLOT", + "R_390_NONE", + "R_390_PC16", + "R_390_PC16DBL", + "R_390_PC32", + "R_390_PC32DBL", + "R_390_PC64", + "R_390_PLT16DBL", + "R_390_PLT32", + "R_390_PLT32DBL", + "R_390_PLT64", + "R_390_RELATIVE", + "R_390_TLS_DTPMOD", + "R_390_TLS_DTPOFF", + "R_390_TLS_GD32", + "R_390_TLS_GD64", + "R_390_TLS_GDCALL", + "R_390_TLS_GOTIE12", + "R_390_TLS_GOTIE20", + "R_390_TLS_GOTIE32", + "R_390_TLS_GOTIE64", + "R_390_TLS_IE32", + "R_390_TLS_IE64", + "R_390_TLS_IEENT", + "R_390_TLS_LDCALL", + "R_390_TLS_LDM32", + "R_390_TLS_LDM64", + "R_390_TLS_LDO32", + "R_390_TLS_LDO64", + "R_390_TLS_LE32", + "R_390_TLS_LE64", + "R_390_TLS_LOAD", + "R_390_TLS_TPOFF", + "R_AARCH64", + "R_AARCH64_ABS16", + "R_AARCH64_ABS32", + "R_AARCH64_ABS64", + "R_AARCH64_ADD_ABS_LO12_NC", + "R_AARCH64_ADR_GOT_PAGE", + "R_AARCH64_ADR_PREL_LO21", + "R_AARCH64_ADR_PREL_PG_HI21", + "R_AARCH64_ADR_PREL_PG_HI21_NC", + "R_AARCH64_CALL26", + "R_AARCH64_CONDBR19", + "R_AARCH64_COPY", + "R_AARCH64_GLOB_DAT", + "R_AARCH64_GOT_LD_PREL19", + "R_AARCH64_IRELATIVE", + "R_AARCH64_JUMP26", + "R_AARCH64_JUMP_SLOT", + "R_AARCH64_LD64_GOTOFF_LO15", + "R_AARCH64_LD64_GOTPAGE_LO15", + "R_AARCH64_LD64_GOT_LO12_NC", + "R_AARCH64_LDST128_ABS_LO12_NC", + "R_AARCH64_LDST16_ABS_LO12_NC", + "R_AARCH64_LDST32_ABS_LO12_NC", + "R_AARCH64_LDST64_ABS_LO12_NC", + "R_AARCH64_LDST8_ABS_LO12_NC", + "R_AARCH64_LD_PREL_LO19", + "R_AARCH64_MOVW_SABS_G0", + "R_AARCH64_MOVW_SABS_G1", + "R_AARCH64_MOVW_SABS_G2", + "R_AARCH64_MOVW_UABS_G0", + "R_AARCH64_MOVW_UABS_G0_NC", + "R_AARCH64_MOVW_UABS_G1", + "R_AARCH64_MOVW_UABS_G1_NC", + "R_AARCH64_MOVW_UABS_G2", + "R_AARCH64_MOVW_UABS_G2_NC", + "R_AARCH64_MOVW_UABS_G3", + "R_AARCH64_NONE", + "R_AARCH64_NULL", + "R_AARCH64_P32_ABS16", + "R_AARCH64_P32_ABS32", + "R_AARCH64_P32_ADD_ABS_LO12_NC", + "R_AARCH64_P32_ADR_GOT_PAGE", + "R_AARCH64_P32_ADR_PREL_LO21", + "R_AARCH64_P32_ADR_PREL_PG_HI21", + "R_AARCH64_P32_CALL26", + "R_AARCH64_P32_CONDBR19", + "R_AARCH64_P32_COPY", + "R_AARCH64_P32_GLOB_DAT", + "R_AARCH64_P32_GOT_LD_PREL19", + "R_AARCH64_P32_IRELATIVE", + "R_AARCH64_P32_JUMP26", + "R_AARCH64_P32_JUMP_SLOT", + "R_AARCH64_P32_LD32_GOT_LO12_NC", + "R_AARCH64_P32_LDST128_ABS_LO12_NC", + "R_AARCH64_P32_LDST16_ABS_LO12_NC", + "R_AARCH64_P32_LDST32_ABS_LO12_NC", + "R_AARCH64_P32_LDST64_ABS_LO12_NC", + "R_AARCH64_P32_LDST8_ABS_LO12_NC", + "R_AARCH64_P32_LD_PREL_LO19", + "R_AARCH64_P32_MOVW_SABS_G0", + "R_AARCH64_P32_MOVW_UABS_G0", + "R_AARCH64_P32_MOVW_UABS_G0_NC", + "R_AARCH64_P32_MOVW_UABS_G1", + "R_AARCH64_P32_PREL16", + "R_AARCH64_P32_PREL32", + "R_AARCH64_P32_RELATIVE", + "R_AARCH64_P32_TLSDESC", + "R_AARCH64_P32_TLSDESC_ADD_LO12_NC", + "R_AARCH64_P32_TLSDESC_ADR_PAGE21", + "R_AARCH64_P32_TLSDESC_ADR_PREL21", + "R_AARCH64_P32_TLSDESC_CALL", + "R_AARCH64_P32_TLSDESC_LD32_LO12_NC", + "R_AARCH64_P32_TLSDESC_LD_PREL19", + "R_AARCH64_P32_TLSGD_ADD_LO12_NC", + "R_AARCH64_P32_TLSGD_ADR_PAGE21", + "R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21", + "R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC", + "R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19", + "R_AARCH64_P32_TLSLE_ADD_TPREL_HI12", + "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12", + "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC", + "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0", + "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC", + "R_AARCH64_P32_TLSLE_MOVW_TPREL_G1", + "R_AARCH64_P32_TLS_DTPMOD", + "R_AARCH64_P32_TLS_DTPREL", + "R_AARCH64_P32_TLS_TPREL", + "R_AARCH64_P32_TSTBR14", + "R_AARCH64_PREL16", + "R_AARCH64_PREL32", + "R_AARCH64_PREL64", + "R_AARCH64_RELATIVE", + "R_AARCH64_TLSDESC", + "R_AARCH64_TLSDESC_ADD", + "R_AARCH64_TLSDESC_ADD_LO12_NC", + "R_AARCH64_TLSDESC_ADR_PAGE21", + "R_AARCH64_TLSDESC_ADR_PREL21", + "R_AARCH64_TLSDESC_CALL", + "R_AARCH64_TLSDESC_LD64_LO12_NC", + "R_AARCH64_TLSDESC_LDR", + "R_AARCH64_TLSDESC_LD_PREL19", + "R_AARCH64_TLSDESC_OFF_G0_NC", + "R_AARCH64_TLSDESC_OFF_G1", + "R_AARCH64_TLSGD_ADD_LO12_NC", + "R_AARCH64_TLSGD_ADR_PAGE21", + "R_AARCH64_TLSGD_ADR_PREL21", + "R_AARCH64_TLSGD_MOVW_G0_NC", + "R_AARCH64_TLSGD_MOVW_G1", + "R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21", + "R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC", + "R_AARCH64_TLSIE_LD_GOTTPREL_PREL19", + "R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC", + "R_AARCH64_TLSIE_MOVW_GOTTPREL_G1", + "R_AARCH64_TLSLD_ADR_PAGE21", + "R_AARCH64_TLSLD_ADR_PREL21", + "R_AARCH64_TLSLD_LDST128_DTPREL_LO12", + "R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC", + "R_AARCH64_TLSLE_ADD_TPREL_HI12", + "R_AARCH64_TLSLE_ADD_TPREL_LO12", + "R_AARCH64_TLSLE_ADD_TPREL_LO12_NC", + "R_AARCH64_TLSLE_LDST128_TPREL_LO12", + "R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC", + "R_AARCH64_TLSLE_MOVW_TPREL_G0", + "R_AARCH64_TLSLE_MOVW_TPREL_G0_NC", + "R_AARCH64_TLSLE_MOVW_TPREL_G1", + "R_AARCH64_TLSLE_MOVW_TPREL_G1_NC", + "R_AARCH64_TLSLE_MOVW_TPREL_G2", + "R_AARCH64_TLS_DTPMOD64", + "R_AARCH64_TLS_DTPREL64", + "R_AARCH64_TLS_TPREL64", + "R_AARCH64_TSTBR14", + "R_ALPHA", + "R_ALPHA_BRADDR", + "R_ALPHA_COPY", + "R_ALPHA_GLOB_DAT", + "R_ALPHA_GPDISP", + "R_ALPHA_GPREL32", + "R_ALPHA_GPRELHIGH", + "R_ALPHA_GPRELLOW", + "R_ALPHA_GPVALUE", + "R_ALPHA_HINT", + "R_ALPHA_IMMED_BR_HI32", + "R_ALPHA_IMMED_GP_16", + "R_ALPHA_IMMED_GP_HI32", + "R_ALPHA_IMMED_LO32", + "R_ALPHA_IMMED_SCN_HI32", + "R_ALPHA_JMP_SLOT", + "R_ALPHA_LITERAL", + "R_ALPHA_LITUSE", + "R_ALPHA_NONE", + "R_ALPHA_OP_PRSHIFT", + "R_ALPHA_OP_PSUB", + "R_ALPHA_OP_PUSH", + "R_ALPHA_OP_STORE", + "R_ALPHA_REFLONG", + "R_ALPHA_REFQUAD", + "R_ALPHA_RELATIVE", + "R_ALPHA_SREL16", + "R_ALPHA_SREL32", + "R_ALPHA_SREL64", + "R_ARM", + "R_ARM_ABS12", + "R_ARM_ABS16", + "R_ARM_ABS32", + "R_ARM_ABS32_NOI", + "R_ARM_ABS8", + "R_ARM_ALU_PCREL_15_8", + "R_ARM_ALU_PCREL_23_15", + "R_ARM_ALU_PCREL_7_0", + "R_ARM_ALU_PC_G0", + "R_ARM_ALU_PC_G0_NC", + "R_ARM_ALU_PC_G1", + "R_ARM_ALU_PC_G1_NC", + "R_ARM_ALU_PC_G2", + "R_ARM_ALU_SBREL_19_12_NC", + "R_ARM_ALU_SBREL_27_20_CK", + "R_ARM_ALU_SB_G0", + "R_ARM_ALU_SB_G0_NC", + "R_ARM_ALU_SB_G1", + "R_ARM_ALU_SB_G1_NC", + "R_ARM_ALU_SB_G2", + "R_ARM_AMP_VCALL9", + "R_ARM_BASE_ABS", + "R_ARM_CALL", + "R_ARM_COPY", + "R_ARM_GLOB_DAT", + "R_ARM_GNU_VTENTRY", + "R_ARM_GNU_VTINHERIT", + "R_ARM_GOT32", + "R_ARM_GOTOFF", + "R_ARM_GOTOFF12", + "R_ARM_GOTPC", + "R_ARM_GOTRELAX", + "R_ARM_GOT_ABS", + "R_ARM_GOT_BREL12", + "R_ARM_GOT_PREL", + "R_ARM_IRELATIVE", + "R_ARM_JUMP24", + "R_ARM_JUMP_SLOT", + "R_ARM_LDC_PC_G0", + "R_ARM_LDC_PC_G1", + "R_ARM_LDC_PC_G2", + "R_ARM_LDC_SB_G0", + "R_ARM_LDC_SB_G1", + "R_ARM_LDC_SB_G2", + "R_ARM_LDRS_PC_G0", + "R_ARM_LDRS_PC_G1", + "R_ARM_LDRS_PC_G2", + "R_ARM_LDRS_SB_G0", + "R_ARM_LDRS_SB_G1", + "R_ARM_LDRS_SB_G2", + "R_ARM_LDR_PC_G1", + "R_ARM_LDR_PC_G2", + "R_ARM_LDR_SBREL_11_10_NC", + "R_ARM_LDR_SB_G0", + "R_ARM_LDR_SB_G1", + "R_ARM_LDR_SB_G2", + "R_ARM_ME_TOO", + "R_ARM_MOVT_ABS", + "R_ARM_MOVT_BREL", + "R_ARM_MOVT_PREL", + "R_ARM_MOVW_ABS_NC", + "R_ARM_MOVW_BREL", + "R_ARM_MOVW_BREL_NC", + "R_ARM_MOVW_PREL_NC", + "R_ARM_NONE", + "R_ARM_PC13", + "R_ARM_PC24", + "R_ARM_PLT32", + "R_ARM_PLT32_ABS", + "R_ARM_PREL31", + "R_ARM_PRIVATE_0", + "R_ARM_PRIVATE_1", + "R_ARM_PRIVATE_10", + "R_ARM_PRIVATE_11", + "R_ARM_PRIVATE_12", + "R_ARM_PRIVATE_13", + "R_ARM_PRIVATE_14", + "R_ARM_PRIVATE_15", + "R_ARM_PRIVATE_2", + "R_ARM_PRIVATE_3", + "R_ARM_PRIVATE_4", + "R_ARM_PRIVATE_5", + "R_ARM_PRIVATE_6", + "R_ARM_PRIVATE_7", + "R_ARM_PRIVATE_8", + "R_ARM_PRIVATE_9", + "R_ARM_RABS32", + "R_ARM_RBASE", + "R_ARM_REL32", + "R_ARM_REL32_NOI", + "R_ARM_RELATIVE", + "R_ARM_RPC24", + "R_ARM_RREL32", + "R_ARM_RSBREL32", + "R_ARM_RXPC25", + "R_ARM_SBREL31", + "R_ARM_SBREL32", + "R_ARM_SWI24", + "R_ARM_TARGET1", + "R_ARM_TARGET2", + "R_ARM_THM_ABS5", + "R_ARM_THM_ALU_ABS_G0_NC", + "R_ARM_THM_ALU_ABS_G1_NC", + "R_ARM_THM_ALU_ABS_G2_NC", + "R_ARM_THM_ALU_ABS_G3", + "R_ARM_THM_ALU_PREL_11_0", + "R_ARM_THM_GOT_BREL12", + "R_ARM_THM_JUMP11", + "R_ARM_THM_JUMP19", + "R_ARM_THM_JUMP24", + "R_ARM_THM_JUMP6", + "R_ARM_THM_JUMP8", + "R_ARM_THM_MOVT_ABS", + "R_ARM_THM_MOVT_BREL", + "R_ARM_THM_MOVT_PREL", + "R_ARM_THM_MOVW_ABS_NC", + "R_ARM_THM_MOVW_BREL", + "R_ARM_THM_MOVW_BREL_NC", + "R_ARM_THM_MOVW_PREL_NC", + "R_ARM_THM_PC12", + "R_ARM_THM_PC22", + "R_ARM_THM_PC8", + "R_ARM_THM_RPC22", + "R_ARM_THM_SWI8", + "R_ARM_THM_TLS_CALL", + "R_ARM_THM_TLS_DESCSEQ16", + "R_ARM_THM_TLS_DESCSEQ32", + "R_ARM_THM_XPC22", + "R_ARM_TLS_CALL", + "R_ARM_TLS_DESCSEQ", + "R_ARM_TLS_DTPMOD32", + "R_ARM_TLS_DTPOFF32", + "R_ARM_TLS_GD32", + "R_ARM_TLS_GOTDESC", + "R_ARM_TLS_IE12GP", + "R_ARM_TLS_IE32", + "R_ARM_TLS_LDM32", + "R_ARM_TLS_LDO12", + "R_ARM_TLS_LDO32", + "R_ARM_TLS_LE12", + "R_ARM_TLS_LE32", + "R_ARM_TLS_TPOFF32", + "R_ARM_V4BX", + "R_ARM_XPC25", + "R_INFO", + "R_INFO32", + "R_MIPS", + "R_MIPS_16", + "R_MIPS_26", + "R_MIPS_32", + "R_MIPS_64", + "R_MIPS_ADD_IMMEDIATE", + "R_MIPS_CALL16", + "R_MIPS_CALL_HI16", + "R_MIPS_CALL_LO16", + "R_MIPS_DELETE", + "R_MIPS_GOT16", + "R_MIPS_GOT_DISP", + "R_MIPS_GOT_HI16", + "R_MIPS_GOT_LO16", + "R_MIPS_GOT_OFST", + "R_MIPS_GOT_PAGE", + "R_MIPS_GPREL16", + "R_MIPS_GPREL32", + "R_MIPS_HI16", + "R_MIPS_HIGHER", + "R_MIPS_HIGHEST", + "R_MIPS_INSERT_A", + "R_MIPS_INSERT_B", + "R_MIPS_JALR", + "R_MIPS_LITERAL", + "R_MIPS_LO16", + "R_MIPS_NONE", + "R_MIPS_PC16", + "R_MIPS_PJUMP", + "R_MIPS_REL16", + "R_MIPS_REL32", + "R_MIPS_RELGOT", + "R_MIPS_SCN_DISP", + "R_MIPS_SHIFT5", + "R_MIPS_SHIFT6", + "R_MIPS_SUB", + "R_MIPS_TLS_DTPMOD32", + "R_MIPS_TLS_DTPMOD64", + "R_MIPS_TLS_DTPREL32", + "R_MIPS_TLS_DTPREL64", + "R_MIPS_TLS_DTPREL_HI16", + "R_MIPS_TLS_DTPREL_LO16", + "R_MIPS_TLS_GD", + "R_MIPS_TLS_GOTTPREL", + "R_MIPS_TLS_LDM", + "R_MIPS_TLS_TPREL32", + "R_MIPS_TLS_TPREL64", + "R_MIPS_TLS_TPREL_HI16", + "R_MIPS_TLS_TPREL_LO16", + "R_PPC", + "R_PPC64", + "R_PPC64_ADDR14", + "R_PPC64_ADDR14_BRNTAKEN", + "R_PPC64_ADDR14_BRTAKEN", + "R_PPC64_ADDR16", + "R_PPC64_ADDR16_DS", + "R_PPC64_ADDR16_HA", + "R_PPC64_ADDR16_HI", + "R_PPC64_ADDR16_HIGH", + "R_PPC64_ADDR16_HIGHA", + "R_PPC64_ADDR16_HIGHER", + "R_PPC64_ADDR16_HIGHERA", + "R_PPC64_ADDR16_HIGHEST", + "R_PPC64_ADDR16_HIGHESTA", + "R_PPC64_ADDR16_LO", + "R_PPC64_ADDR16_LO_DS", + "R_PPC64_ADDR24", + "R_PPC64_ADDR32", + "R_PPC64_ADDR64", + "R_PPC64_ADDR64_LOCAL", + "R_PPC64_DTPMOD64", + "R_PPC64_DTPREL16", + "R_PPC64_DTPREL16_DS", + "R_PPC64_DTPREL16_HA", + "R_PPC64_DTPREL16_HI", + "R_PPC64_DTPREL16_HIGH", + "R_PPC64_DTPREL16_HIGHA", + "R_PPC64_DTPREL16_HIGHER", + "R_PPC64_DTPREL16_HIGHERA", + "R_PPC64_DTPREL16_HIGHEST", + "R_PPC64_DTPREL16_HIGHESTA", + "R_PPC64_DTPREL16_LO", + "R_PPC64_DTPREL16_LO_DS", + "R_PPC64_DTPREL64", + "R_PPC64_ENTRY", + "R_PPC64_GOT16", + "R_PPC64_GOT16_DS", + "R_PPC64_GOT16_HA", + "R_PPC64_GOT16_HI", + "R_PPC64_GOT16_LO", + "R_PPC64_GOT16_LO_DS", + "R_PPC64_GOT_DTPREL16_DS", + "R_PPC64_GOT_DTPREL16_HA", + "R_PPC64_GOT_DTPREL16_HI", + "R_PPC64_GOT_DTPREL16_LO_DS", + "R_PPC64_GOT_TLSGD16", + "R_PPC64_GOT_TLSGD16_HA", + "R_PPC64_GOT_TLSGD16_HI", + "R_PPC64_GOT_TLSGD16_LO", + "R_PPC64_GOT_TLSLD16", + "R_PPC64_GOT_TLSLD16_HA", + "R_PPC64_GOT_TLSLD16_HI", + "R_PPC64_GOT_TLSLD16_LO", + "R_PPC64_GOT_TPREL16_DS", + "R_PPC64_GOT_TPREL16_HA", + "R_PPC64_GOT_TPREL16_HI", + "R_PPC64_GOT_TPREL16_LO_DS", + "R_PPC64_IRELATIVE", + "R_PPC64_JMP_IREL", + "R_PPC64_JMP_SLOT", + "R_PPC64_NONE", + "R_PPC64_PLT16_LO_DS", + "R_PPC64_PLTGOT16", + "R_PPC64_PLTGOT16_DS", + "R_PPC64_PLTGOT16_HA", + "R_PPC64_PLTGOT16_HI", + "R_PPC64_PLTGOT16_LO", + "R_PPC64_PLTGOT_LO_DS", + "R_PPC64_REL14", + "R_PPC64_REL14_BRNTAKEN", + "R_PPC64_REL14_BRTAKEN", + "R_PPC64_REL16", + "R_PPC64_REL16DX_HA", + "R_PPC64_REL16_HA", + "R_PPC64_REL16_HI", + "R_PPC64_REL16_LO", + "R_PPC64_REL24", + "R_PPC64_REL24_NOTOC", + "R_PPC64_REL32", + "R_PPC64_REL64", + "R_PPC64_SECTOFF_DS", + "R_PPC64_SECTOFF_LO_DS", + "R_PPC64_TLS", + "R_PPC64_TLSGD", + "R_PPC64_TLSLD", + "R_PPC64_TOC", + "R_PPC64_TOC16", + "R_PPC64_TOC16_DS", + "R_PPC64_TOC16_HA", + "R_PPC64_TOC16_HI", + "R_PPC64_TOC16_LO", + "R_PPC64_TOC16_LO_DS", + "R_PPC64_TOCSAVE", + "R_PPC64_TPREL16", + "R_PPC64_TPREL16_DS", + "R_PPC64_TPREL16_HA", + "R_PPC64_TPREL16_HI", + "R_PPC64_TPREL16_HIGH", + "R_PPC64_TPREL16_HIGHA", + "R_PPC64_TPREL16_HIGHER", + "R_PPC64_TPREL16_HIGHERA", + "R_PPC64_TPREL16_HIGHEST", + "R_PPC64_TPREL16_HIGHESTA", + "R_PPC64_TPREL16_LO", + "R_PPC64_TPREL16_LO_DS", + "R_PPC64_TPREL64", + "R_PPC_ADDR14", + "R_PPC_ADDR14_BRNTAKEN", + "R_PPC_ADDR14_BRTAKEN", + "R_PPC_ADDR16", + "R_PPC_ADDR16_HA", + "R_PPC_ADDR16_HI", + "R_PPC_ADDR16_LO", + "R_PPC_ADDR24", + "R_PPC_ADDR32", + "R_PPC_COPY", + "R_PPC_DTPMOD32", + "R_PPC_DTPREL16", + "R_PPC_DTPREL16_HA", + "R_PPC_DTPREL16_HI", + "R_PPC_DTPREL16_LO", + "R_PPC_DTPREL32", + "R_PPC_EMB_BIT_FLD", + "R_PPC_EMB_MRKREF", + "R_PPC_EMB_NADDR16", + "R_PPC_EMB_NADDR16_HA", + "R_PPC_EMB_NADDR16_HI", + "R_PPC_EMB_NADDR16_LO", + "R_PPC_EMB_NADDR32", + "R_PPC_EMB_RELSDA", + "R_PPC_EMB_RELSEC16", + "R_PPC_EMB_RELST_HA", + "R_PPC_EMB_RELST_HI", + "R_PPC_EMB_RELST_LO", + "R_PPC_EMB_SDA21", + "R_PPC_EMB_SDA2I16", + "R_PPC_EMB_SDA2REL", + "R_PPC_EMB_SDAI16", + "R_PPC_GLOB_DAT", + "R_PPC_GOT16", + "R_PPC_GOT16_HA", + "R_PPC_GOT16_HI", + "R_PPC_GOT16_LO", + "R_PPC_GOT_TLSGD16", + "R_PPC_GOT_TLSGD16_HA", + "R_PPC_GOT_TLSGD16_HI", + "R_PPC_GOT_TLSGD16_LO", + "R_PPC_GOT_TLSLD16", + "R_PPC_GOT_TLSLD16_HA", + "R_PPC_GOT_TLSLD16_HI", + "R_PPC_GOT_TLSLD16_LO", + "R_PPC_GOT_TPREL16", + "R_PPC_GOT_TPREL16_HA", + "R_PPC_GOT_TPREL16_HI", + "R_PPC_GOT_TPREL16_LO", + "R_PPC_JMP_SLOT", + "R_PPC_LOCAL24PC", + "R_PPC_NONE", + "R_PPC_PLT16_HA", + "R_PPC_PLT16_HI", + "R_PPC_PLT16_LO", + "R_PPC_PLT32", + "R_PPC_PLTREL24", + "R_PPC_PLTREL32", + "R_PPC_REL14", + "R_PPC_REL14_BRNTAKEN", + "R_PPC_REL14_BRTAKEN", + "R_PPC_REL24", + "R_PPC_REL32", + "R_PPC_RELATIVE", + "R_PPC_SDAREL16", + "R_PPC_SECTOFF", + "R_PPC_SECTOFF_HA", + "R_PPC_SECTOFF_HI", + "R_PPC_SECTOFF_LO", + "R_PPC_TLS", + "R_PPC_TPREL16", + "R_PPC_TPREL16_HA", + "R_PPC_TPREL16_HI", + "R_PPC_TPREL16_LO", + "R_PPC_TPREL32", + "R_PPC_UADDR16", + "R_PPC_UADDR32", + "R_RISCV", + "R_RISCV_32", + "R_RISCV_32_PCREL", + "R_RISCV_64", + "R_RISCV_ADD16", + "R_RISCV_ADD32", + "R_RISCV_ADD64", + "R_RISCV_ADD8", + "R_RISCV_ALIGN", + "R_RISCV_BRANCH", + "R_RISCV_CALL", + "R_RISCV_CALL_PLT", + "R_RISCV_COPY", + "R_RISCV_GNU_VTENTRY", + "R_RISCV_GNU_VTINHERIT", + "R_RISCV_GOT_HI20", + "R_RISCV_GPREL_I", + "R_RISCV_GPREL_S", + "R_RISCV_HI20", + "R_RISCV_JAL", + "R_RISCV_JUMP_SLOT", + "R_RISCV_LO12_I", + "R_RISCV_LO12_S", + "R_RISCV_NONE", + "R_RISCV_PCREL_HI20", + "R_RISCV_PCREL_LO12_I", + "R_RISCV_PCREL_LO12_S", + "R_RISCV_RELATIVE", + "R_RISCV_RELAX", + "R_RISCV_RVC_BRANCH", + "R_RISCV_RVC_JUMP", + "R_RISCV_RVC_LUI", + "R_RISCV_SET16", + "R_RISCV_SET32", + "R_RISCV_SET6", + "R_RISCV_SET8", + "R_RISCV_SUB16", + "R_RISCV_SUB32", + "R_RISCV_SUB6", + "R_RISCV_SUB64", + "R_RISCV_SUB8", + "R_RISCV_TLS_DTPMOD32", + "R_RISCV_TLS_DTPMOD64", + "R_RISCV_TLS_DTPREL32", + "R_RISCV_TLS_DTPREL64", + "R_RISCV_TLS_GD_HI20", + "R_RISCV_TLS_GOT_HI20", + "R_RISCV_TLS_TPREL32", + "R_RISCV_TLS_TPREL64", + "R_RISCV_TPREL_ADD", + "R_RISCV_TPREL_HI20", + "R_RISCV_TPREL_I", + "R_RISCV_TPREL_LO12_I", + "R_RISCV_TPREL_LO12_S", + "R_RISCV_TPREL_S", + "R_SPARC", + "R_SPARC_10", + "R_SPARC_11", + "R_SPARC_13", + "R_SPARC_16", + "R_SPARC_22", + "R_SPARC_32", + "R_SPARC_5", + "R_SPARC_6", + "R_SPARC_64", + "R_SPARC_7", + "R_SPARC_8", + "R_SPARC_COPY", + "R_SPARC_DISP16", + "R_SPARC_DISP32", + "R_SPARC_DISP64", + "R_SPARC_DISP8", + "R_SPARC_GLOB_DAT", + "R_SPARC_GLOB_JMP", + "R_SPARC_GOT10", + "R_SPARC_GOT13", + "R_SPARC_GOT22", + "R_SPARC_H44", + "R_SPARC_HH22", + "R_SPARC_HI22", + "R_SPARC_HIPLT22", + "R_SPARC_HIX22", + "R_SPARC_HM10", + "R_SPARC_JMP_SLOT", + "R_SPARC_L44", + "R_SPARC_LM22", + "R_SPARC_LO10", + "R_SPARC_LOPLT10", + "R_SPARC_LOX10", + "R_SPARC_M44", + "R_SPARC_NONE", + "R_SPARC_OLO10", + "R_SPARC_PC10", + "R_SPARC_PC22", + "R_SPARC_PCPLT10", + "R_SPARC_PCPLT22", + "R_SPARC_PCPLT32", + "R_SPARC_PC_HH22", + "R_SPARC_PC_HM10", + "R_SPARC_PC_LM22", + "R_SPARC_PLT32", + "R_SPARC_PLT64", + "R_SPARC_REGISTER", + "R_SPARC_RELATIVE", + "R_SPARC_UA16", + "R_SPARC_UA32", + "R_SPARC_UA64", + "R_SPARC_WDISP16", + "R_SPARC_WDISP19", + "R_SPARC_WDISP22", + "R_SPARC_WDISP30", + "R_SPARC_WPLT30", + "R_SYM32", + "R_SYM64", + "R_TYPE32", + "R_TYPE64", + "R_X86_64", + "R_X86_64_16", + "R_X86_64_32", + "R_X86_64_32S", + "R_X86_64_64", + "R_X86_64_8", + "R_X86_64_COPY", + "R_X86_64_DTPMOD64", + "R_X86_64_DTPOFF32", + "R_X86_64_DTPOFF64", + "R_X86_64_GLOB_DAT", + "R_X86_64_GOT32", + "R_X86_64_GOT64", + "R_X86_64_GOTOFF64", + "R_X86_64_GOTPC32", + "R_X86_64_GOTPC32_TLSDESC", + "R_X86_64_GOTPC64", + "R_X86_64_GOTPCREL", + "R_X86_64_GOTPCREL64", + "R_X86_64_GOTPCRELX", + "R_X86_64_GOTPLT64", + "R_X86_64_GOTTPOFF", + "R_X86_64_IRELATIVE", + "R_X86_64_JMP_SLOT", + "R_X86_64_NONE", + "R_X86_64_PC16", + "R_X86_64_PC32", + "R_X86_64_PC32_BND", + "R_X86_64_PC64", + "R_X86_64_PC8", + "R_X86_64_PLT32", + "R_X86_64_PLT32_BND", + "R_X86_64_PLTOFF64", + "R_X86_64_RELATIVE", + "R_X86_64_RELATIVE64", + "R_X86_64_REX_GOTPCRELX", + "R_X86_64_SIZE32", + "R_X86_64_SIZE64", + "R_X86_64_TLSDESC", + "R_X86_64_TLSDESC_CALL", + "R_X86_64_TLSGD", + "R_X86_64_TLSLD", + "R_X86_64_TPOFF32", + "R_X86_64_TPOFF64", + "Rel32", + "Rel64", + "Rela32", + "Rela64", + "SHF_ALLOC", + "SHF_COMPRESSED", + "SHF_EXECINSTR", + "SHF_GROUP", + "SHF_INFO_LINK", + "SHF_LINK_ORDER", + "SHF_MASKOS", + "SHF_MASKPROC", + "SHF_MERGE", + "SHF_OS_NONCONFORMING", + "SHF_STRINGS", + "SHF_TLS", + "SHF_WRITE", + "SHN_ABS", + "SHN_COMMON", + "SHN_HIOS", + "SHN_HIPROC", + "SHN_HIRESERVE", + "SHN_LOOS", + "SHN_LOPROC", + "SHN_LORESERVE", + "SHN_UNDEF", + "SHN_XINDEX", + "SHT_DYNAMIC", + "SHT_DYNSYM", + "SHT_FINI_ARRAY", + "SHT_GNU_ATTRIBUTES", + "SHT_GNU_HASH", + "SHT_GNU_LIBLIST", + "SHT_GNU_VERDEF", + "SHT_GNU_VERNEED", + "SHT_GNU_VERSYM", + "SHT_GROUP", + "SHT_HASH", + "SHT_HIOS", + "SHT_HIPROC", + "SHT_HIUSER", + "SHT_INIT_ARRAY", + "SHT_LOOS", + "SHT_LOPROC", + "SHT_LOUSER", + "SHT_NOBITS", + "SHT_NOTE", + "SHT_NULL", + "SHT_PREINIT_ARRAY", + "SHT_PROGBITS", + "SHT_REL", + "SHT_RELA", + "SHT_SHLIB", + "SHT_STRTAB", + "SHT_SYMTAB", + "SHT_SYMTAB_SHNDX", + "STB_GLOBAL", + "STB_HIOS", + "STB_HIPROC", + "STB_LOCAL", + "STB_LOOS", + "STB_LOPROC", + "STB_WEAK", + "STT_COMMON", + "STT_FILE", + "STT_FUNC", + "STT_HIOS", + "STT_HIPROC", + "STT_LOOS", + "STT_LOPROC", + "STT_NOTYPE", + "STT_OBJECT", + "STT_SECTION", + "STT_TLS", + "STV_DEFAULT", + "STV_HIDDEN", + "STV_INTERNAL", + "STV_PROTECTED", + "ST_BIND", + "ST_INFO", + "ST_TYPE", + "ST_VISIBILITY", + "Section", + "Section32", + "Section64", + "SectionFlag", + "SectionHeader", + "SectionIndex", + "SectionType", + "Sym32", + "Sym32Size", + "Sym64", + "Sym64Size", + "SymBind", + "SymType", + "SymVis", + "Symbol", + "Type", + "Version", + }, + "debug/gosym": []string{ + "DecodingError", + "Func", + "LineTable", + "NewLineTable", + "NewTable", + "Obj", + "Sym", + "Table", + "UnknownFileError", + "UnknownLineError", + }, + "debug/macho": []string{ + "ARM64_RELOC_ADDEND", + "ARM64_RELOC_BRANCH26", + "ARM64_RELOC_GOT_LOAD_PAGE21", + "ARM64_RELOC_GOT_LOAD_PAGEOFF12", + "ARM64_RELOC_PAGE21", + "ARM64_RELOC_PAGEOFF12", + "ARM64_RELOC_POINTER_TO_GOT", + "ARM64_RELOC_SUBTRACTOR", + "ARM64_RELOC_TLVP_LOAD_PAGE21", + "ARM64_RELOC_TLVP_LOAD_PAGEOFF12", + "ARM64_RELOC_UNSIGNED", + "ARM_RELOC_BR24", + "ARM_RELOC_HALF", + "ARM_RELOC_HALF_SECTDIFF", + "ARM_RELOC_LOCAL_SECTDIFF", + "ARM_RELOC_PAIR", + "ARM_RELOC_PB_LA_PTR", + "ARM_RELOC_SECTDIFF", + "ARM_RELOC_VANILLA", + "ARM_THUMB_32BIT_BRANCH", + "ARM_THUMB_RELOC_BR22", + "Cpu", + "Cpu386", + "CpuAmd64", + "CpuArm", + "CpuArm64", + "CpuPpc", + "CpuPpc64", + "Dylib", + "DylibCmd", + "Dysymtab", + "DysymtabCmd", + "ErrNotFat", + "FatArch", + "FatArchHeader", + "FatFile", + "File", + "FileHeader", + "FlagAllModsBound", + "FlagAllowStackExecution", + "FlagAppExtensionSafe", + "FlagBindAtLoad", + "FlagBindsToWeak", + "FlagCanonical", + "FlagDeadStrippableDylib", + "FlagDyldLink", + "FlagForceFlat", + "FlagHasTLVDescriptors", + "FlagIncrLink", + "FlagLazyInit", + "FlagNoFixPrebinding", + "FlagNoHeapExecution", + "FlagNoMultiDefs", + "FlagNoReexportedDylibs", + "FlagNoUndefs", + "FlagPIE", + "FlagPrebindable", + "FlagPrebound", + "FlagRootSafe", + "FlagSetuidSafe", + "FlagSplitSegs", + "FlagSubsectionsViaSymbols", + "FlagTwoLevel", + "FlagWeakDefines", + "FormatError", + "GENERIC_RELOC_LOCAL_SECTDIFF", + "GENERIC_RELOC_PAIR", + "GENERIC_RELOC_PB_LA_PTR", + "GENERIC_RELOC_SECTDIFF", + "GENERIC_RELOC_TLV", + "GENERIC_RELOC_VANILLA", + "Load", + "LoadBytes", + "LoadCmd", + "LoadCmdDylib", + "LoadCmdDylinker", + "LoadCmdDysymtab", + "LoadCmdRpath", + "LoadCmdSegment", + "LoadCmdSegment64", + "LoadCmdSymtab", + "LoadCmdThread", + "LoadCmdUnixThread", + "Magic32", + "Magic64", + "MagicFat", + "NewFatFile", + "NewFile", + "Nlist32", + "Nlist64", + "Open", + "OpenFat", + "Regs386", + "RegsAMD64", + "Reloc", + "RelocTypeARM", + "RelocTypeARM64", + "RelocTypeGeneric", + "RelocTypeX86_64", + "Rpath", + "RpathCmd", + "Section", + "Section32", + "Section64", + "SectionHeader", + "Segment", + "Segment32", + "Segment64", + "SegmentHeader", + "Symbol", + "Symtab", + "SymtabCmd", + "Thread", + "Type", + "TypeBundle", + "TypeDylib", + "TypeExec", + "TypeObj", + "X86_64_RELOC_BRANCH", + "X86_64_RELOC_GOT", + "X86_64_RELOC_GOT_LOAD", + "X86_64_RELOC_SIGNED", + "X86_64_RELOC_SIGNED_1", + "X86_64_RELOC_SIGNED_2", + "X86_64_RELOC_SIGNED_4", + "X86_64_RELOC_SUBTRACTOR", + "X86_64_RELOC_TLV", + "X86_64_RELOC_UNSIGNED", + }, + "debug/pe": []string{ + "COFFSymbol", + "COFFSymbolSize", + "DataDirectory", + "File", + "FileHeader", + "FormatError", + "IMAGE_DIRECTORY_ENTRY_ARCHITECTURE", + "IMAGE_DIRECTORY_ENTRY_BASERELOC", + "IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT", + "IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR", + "IMAGE_DIRECTORY_ENTRY_DEBUG", + "IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT", + "IMAGE_DIRECTORY_ENTRY_EXCEPTION", + "IMAGE_DIRECTORY_ENTRY_EXPORT", + "IMAGE_DIRECTORY_ENTRY_GLOBALPTR", + "IMAGE_DIRECTORY_ENTRY_IAT", + "IMAGE_DIRECTORY_ENTRY_IMPORT", + "IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG", + "IMAGE_DIRECTORY_ENTRY_RESOURCE", + "IMAGE_DIRECTORY_ENTRY_SECURITY", + "IMAGE_DIRECTORY_ENTRY_TLS", + "IMAGE_DLLCHARACTERISTICS_APPCONTAINER", + "IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE", + "IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY", + "IMAGE_DLLCHARACTERISTICS_GUARD_CF", + "IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA", + "IMAGE_DLLCHARACTERISTICS_NO_BIND", + "IMAGE_DLLCHARACTERISTICS_NO_ISOLATION", + "IMAGE_DLLCHARACTERISTICS_NO_SEH", + "IMAGE_DLLCHARACTERISTICS_NX_COMPAT", + "IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE", + "IMAGE_DLLCHARACTERISTICS_WDM_DRIVER", + "IMAGE_FILE_32BIT_MACHINE", + "IMAGE_FILE_AGGRESIVE_WS_TRIM", + "IMAGE_FILE_BYTES_REVERSED_HI", + "IMAGE_FILE_BYTES_REVERSED_LO", + "IMAGE_FILE_DEBUG_STRIPPED", + "IMAGE_FILE_DLL", + "IMAGE_FILE_EXECUTABLE_IMAGE", + "IMAGE_FILE_LARGE_ADDRESS_AWARE", + "IMAGE_FILE_LINE_NUMS_STRIPPED", + "IMAGE_FILE_LOCAL_SYMS_STRIPPED", + "IMAGE_FILE_MACHINE_AM33", + "IMAGE_FILE_MACHINE_AMD64", + "IMAGE_FILE_MACHINE_ARM", + "IMAGE_FILE_MACHINE_ARM64", + "IMAGE_FILE_MACHINE_ARMNT", + "IMAGE_FILE_MACHINE_EBC", + "IMAGE_FILE_MACHINE_I386", + "IMAGE_FILE_MACHINE_IA64", + "IMAGE_FILE_MACHINE_M32R", + "IMAGE_FILE_MACHINE_MIPS16", + "IMAGE_FILE_MACHINE_MIPSFPU", + "IMAGE_FILE_MACHINE_MIPSFPU16", + "IMAGE_FILE_MACHINE_POWERPC", + "IMAGE_FILE_MACHINE_POWERPCFP", + "IMAGE_FILE_MACHINE_R4000", + "IMAGE_FILE_MACHINE_SH3", + "IMAGE_FILE_MACHINE_SH3DSP", + "IMAGE_FILE_MACHINE_SH4", + "IMAGE_FILE_MACHINE_SH5", + "IMAGE_FILE_MACHINE_THUMB", + "IMAGE_FILE_MACHINE_UNKNOWN", + "IMAGE_FILE_MACHINE_WCEMIPSV2", + "IMAGE_FILE_NET_RUN_FROM_SWAP", + "IMAGE_FILE_RELOCS_STRIPPED", + "IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP", + "IMAGE_FILE_SYSTEM", + "IMAGE_FILE_UP_SYSTEM_ONLY", + "IMAGE_SUBSYSTEM_EFI_APPLICATION", + "IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER", + "IMAGE_SUBSYSTEM_EFI_ROM", + "IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER", + "IMAGE_SUBSYSTEM_NATIVE", + "IMAGE_SUBSYSTEM_NATIVE_WINDOWS", + "IMAGE_SUBSYSTEM_OS2_CUI", + "IMAGE_SUBSYSTEM_POSIX_CUI", + "IMAGE_SUBSYSTEM_UNKNOWN", + "IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION", + "IMAGE_SUBSYSTEM_WINDOWS_CE_GUI", + "IMAGE_SUBSYSTEM_WINDOWS_CUI", + "IMAGE_SUBSYSTEM_WINDOWS_GUI", + "IMAGE_SUBSYSTEM_XBOX", + "ImportDirectory", + "NewFile", + "Open", + "OptionalHeader32", + "OptionalHeader64", + "Reloc", + "Section", + "SectionHeader", + "SectionHeader32", + "StringTable", + "Symbol", + }, + "debug/plan9obj": []string{ + "File", + "FileHeader", + "Magic386", + "Magic64", + "MagicAMD64", + "MagicARM", + "NewFile", + "Open", + "Section", + "SectionHeader", + "Sym", + }, + "embed": []string{ + "FS", + }, + "encoding": []string{ + "BinaryMarshaler", + "BinaryUnmarshaler", + "TextMarshaler", + "TextUnmarshaler", + }, + "encoding/ascii85": []string{ + "CorruptInputError", + "Decode", + "Encode", + "MaxEncodedLen", + "NewDecoder", + "NewEncoder", + }, + "encoding/asn1": []string{ + "BitString", + "ClassApplication", + "ClassContextSpecific", + "ClassPrivate", + "ClassUniversal", + "Enumerated", + "Flag", + "Marshal", + "MarshalWithParams", + "NullBytes", + "NullRawValue", + "ObjectIdentifier", + "RawContent", + "RawValue", + "StructuralError", + "SyntaxError", + "TagBMPString", + "TagBitString", + "TagBoolean", + "TagEnum", + "TagGeneralString", + "TagGeneralizedTime", + "TagIA5String", + "TagInteger", + "TagNull", + "TagNumericString", + "TagOID", + "TagOctetString", + "TagPrintableString", + "TagSequence", + "TagSet", + "TagT61String", + "TagUTCTime", + "TagUTF8String", + "Unmarshal", + "UnmarshalWithParams", + }, + "encoding/base32": []string{ + "CorruptInputError", + "Encoding", + "HexEncoding", + "NewDecoder", + "NewEncoder", + "NewEncoding", + "NoPadding", + "StdEncoding", + "StdPadding", + }, + "encoding/base64": []string{ + "CorruptInputError", + "Encoding", + "NewDecoder", + "NewEncoder", + "NewEncoding", + "NoPadding", + "RawStdEncoding", + "RawURLEncoding", + "StdEncoding", + "StdPadding", + "URLEncoding", + }, + "encoding/binary": []string{ + "BigEndian", + "ByteOrder", + "LittleEndian", + "MaxVarintLen16", + "MaxVarintLen32", + "MaxVarintLen64", + "PutUvarint", + "PutVarint", + "Read", + "ReadUvarint", + "ReadVarint", + "Size", + "Uvarint", + "Varint", + "Write", + }, + "encoding/csv": []string{ + "ErrBareQuote", + "ErrFieldCount", + "ErrQuote", + "ErrTrailingComma", + "NewReader", + "NewWriter", + "ParseError", + "Reader", + "Writer", + }, + "encoding/gob": []string{ + "CommonType", + "Decoder", + "Encoder", + "GobDecoder", + "GobEncoder", + "NewDecoder", + "NewEncoder", + "Register", + "RegisterName", + }, + "encoding/hex": []string{ + "Decode", + "DecodeString", + "DecodedLen", + "Dump", + "Dumper", + "Encode", + "EncodeToString", + "EncodedLen", + "ErrLength", + "InvalidByteError", + "NewDecoder", + "NewEncoder", + }, + "encoding/json": []string{ + "Compact", + "Decoder", + "Delim", + "Encoder", + "HTMLEscape", + "Indent", + "InvalidUTF8Error", + "InvalidUnmarshalError", + "Marshal", + "MarshalIndent", + "Marshaler", + "MarshalerError", + "NewDecoder", + "NewEncoder", + "Number", + "RawMessage", + "SyntaxError", + "Token", + "Unmarshal", + "UnmarshalFieldError", + "UnmarshalTypeError", + "Unmarshaler", + "UnsupportedTypeError", + "UnsupportedValueError", + "Valid", + }, + "encoding/pem": []string{ + "Block", + "Decode", + "Encode", + "EncodeToMemory", + }, + "encoding/xml": []string{ + "Attr", + "CharData", + "Comment", + "CopyToken", + "Decoder", + "Directive", + "Encoder", + "EndElement", + "Escape", + "EscapeText", + "HTMLAutoClose", + "HTMLEntity", + "Header", + "Marshal", + "MarshalIndent", + "Marshaler", + "MarshalerAttr", + "Name", + "NewDecoder", + "NewEncoder", + "NewTokenDecoder", + "ProcInst", + "StartElement", + "SyntaxError", + "TagPathError", + "Token", + "TokenReader", + "Unmarshal", + "UnmarshalError", + "Unmarshaler", + "UnmarshalerAttr", + "UnsupportedTypeError", + }, + "errors": []string{ + "As", + "Is", + "New", + "Unwrap", + }, + "expvar": []string{ + "Do", + "Float", + "Func", + "Get", + "Handler", + "Int", + "KeyValue", + "Map", + "NewFloat", + "NewInt", + "NewMap", + "NewString", + "Publish", + "String", + "Var", + }, + "flag": []string{ + "Arg", + "Args", + "Bool", + "BoolVar", + "CommandLine", + "ContinueOnError", + "Duration", + "DurationVar", + "ErrHelp", + "ErrorHandling", + "ExitOnError", + "Flag", + "FlagSet", + "Float64", + "Float64Var", + "Func", + "Getter", + "Int", + "Int64", + "Int64Var", + "IntVar", + "Lookup", + "NArg", + "NFlag", + "NewFlagSet", + "PanicOnError", + "Parse", + "Parsed", + "PrintDefaults", + "Set", + "String", + "StringVar", + "Uint", + "Uint64", + "Uint64Var", + "UintVar", + "UnquoteUsage", + "Usage", + "Value", + "Var", + "Visit", + "VisitAll", + }, + "fmt": []string{ + "Errorf", + "Formatter", + "Fprint", + "Fprintf", + "Fprintln", + "Fscan", + "Fscanf", + "Fscanln", + "GoStringer", + "Print", + "Printf", + "Println", + "Scan", + "ScanState", + "Scanf", + "Scanln", + "Scanner", + "Sprint", + "Sprintf", + "Sprintln", + "Sscan", + "Sscanf", + "Sscanln", + "State", + "Stringer", + }, + "go/ast": []string{ + "ArrayType", + "AssignStmt", + "Bad", + "BadDecl", + "BadExpr", + "BadStmt", + "BasicLit", + "BinaryExpr", + "BlockStmt", + "BranchStmt", + "CallExpr", + "CaseClause", + "ChanDir", + "ChanType", + "CommClause", + "Comment", + "CommentGroup", + "CommentMap", + "CompositeLit", + "Con", + "Decl", + "DeclStmt", + "DeferStmt", + "Ellipsis", + "EmptyStmt", + "Expr", + "ExprStmt", + "Field", + "FieldFilter", + "FieldList", + "File", + "FileExports", + "Filter", + "FilterDecl", + "FilterFile", + "FilterFuncDuplicates", + "FilterImportDuplicates", + "FilterPackage", + "FilterUnassociatedComments", + "ForStmt", + "Fprint", + "Fun", + "FuncDecl", + "FuncLit", + "FuncType", + "GenDecl", + "GoStmt", + "Ident", + "IfStmt", + "ImportSpec", + "Importer", + "IncDecStmt", + "IndexExpr", + "Inspect", + "InterfaceType", + "IsExported", + "KeyValueExpr", + "LabeledStmt", + "Lbl", + "MapType", + "MergeMode", + "MergePackageFiles", + "NewCommentMap", + "NewIdent", + "NewObj", + "NewPackage", + "NewScope", + "Node", + "NotNilFilter", + "ObjKind", + "Object", + "Package", + "PackageExports", + "ParenExpr", + "Pkg", + "Print", + "RECV", + "RangeStmt", + "ReturnStmt", + "SEND", + "Scope", + "SelectStmt", + "SelectorExpr", + "SendStmt", + "SliceExpr", + "SortImports", + "Spec", + "StarExpr", + "Stmt", + "StructType", + "SwitchStmt", + "Typ", + "TypeAssertExpr", + "TypeSpec", + "TypeSwitchStmt", + "UnaryExpr", + "ValueSpec", + "Var", + "Visitor", + "Walk", + }, + "go/build": []string{ + "AllowBinary", + "ArchChar", + "Context", + "Default", + "FindOnly", + "IgnoreVendor", + "Import", + "ImportComment", + "ImportDir", + "ImportMode", + "IsLocalImport", + "MultiplePackageError", + "NoGoError", + "Package", + "ToolDir", + }, + "go/build/constraint": []string{ + "AndExpr", + "Expr", + "IsGoBuild", + "IsPlusBuild", + "NotExpr", + "OrExpr", + "Parse", + "PlusBuildLines", + "SyntaxError", + "TagExpr", + }, + "go/constant": []string{ + "BinaryOp", + "BitLen", + "Bool", + "BoolVal", + "Bytes", + "Compare", + "Complex", + "Denom", + "Float", + "Float32Val", + "Float64Val", + "Imag", + "Int", + "Int64Val", + "Kind", + "Make", + "MakeBool", + "MakeFloat64", + "MakeFromBytes", + "MakeFromLiteral", + "MakeImag", + "MakeInt64", + "MakeString", + "MakeUint64", + "MakeUnknown", + "Num", + "Real", + "Shift", + "Sign", + "String", + "StringVal", + "ToComplex", + "ToFloat", + "ToInt", + "Uint64Val", + "UnaryOp", + "Unknown", + "Val", + "Value", + }, + "go/doc": []string{ + "AllDecls", + "AllMethods", + "Example", + "Examples", + "Filter", + "Func", + "IllegalPrefixes", + "IsPredeclared", + "Mode", + "New", + "NewFromFiles", + "Note", + "Package", + "PreserveAST", + "Synopsis", + "ToHTML", + "ToText", + "Type", + "Value", + }, + "go/format": []string{ + "Node", + "Source", + }, + "go/importer": []string{ + "Default", + "For", + "ForCompiler", + "Lookup", + }, + "go/parser": []string{ + "AllErrors", + "DeclarationErrors", + "ImportsOnly", + "Mode", + "PackageClauseOnly", + "ParseComments", + "ParseDir", + "ParseExpr", + "ParseExprFrom", + "ParseFile", + "SpuriousErrors", + "Trace", + }, + "go/printer": []string{ + "CommentedNode", + "Config", + "Fprint", + "Mode", + "RawFormat", + "SourcePos", + "TabIndent", + "UseSpaces", + }, + "go/scanner": []string{ + "Error", + "ErrorHandler", + "ErrorList", + "Mode", + "PrintError", + "ScanComments", + "Scanner", + }, + "go/token": []string{ + "ADD", + "ADD_ASSIGN", + "AND", + "AND_ASSIGN", + "AND_NOT", + "AND_NOT_ASSIGN", + "ARROW", + "ASSIGN", + "BREAK", + "CASE", + "CHAN", + "CHAR", + "COLON", + "COMMA", + "COMMENT", + "CONST", + "CONTINUE", + "DEC", + "DEFAULT", + "DEFER", + "DEFINE", + "ELLIPSIS", + "ELSE", + "EOF", + "EQL", + "FALLTHROUGH", + "FLOAT", + "FOR", + "FUNC", + "File", + "FileSet", + "GEQ", + "GO", + "GOTO", + "GTR", + "HighestPrec", + "IDENT", + "IF", + "ILLEGAL", + "IMAG", + "IMPORT", + "INC", + "INT", + "INTERFACE", + "IsExported", + "IsIdentifier", + "IsKeyword", + "LAND", + "LBRACE", + "LBRACK", + "LEQ", + "LOR", + "LPAREN", + "LSS", + "Lookup", + "LowestPrec", + "MAP", + "MUL", + "MUL_ASSIGN", + "NEQ", + "NOT", + "NewFileSet", + "NoPos", + "OR", + "OR_ASSIGN", + "PACKAGE", + "PERIOD", + "Pos", + "Position", + "QUO", + "QUO_ASSIGN", + "RANGE", + "RBRACE", + "RBRACK", + "REM", + "REM_ASSIGN", + "RETURN", + "RPAREN", + "SELECT", + "SEMICOLON", + "SHL", + "SHL_ASSIGN", + "SHR", + "SHR_ASSIGN", + "STRING", + "STRUCT", + "SUB", + "SUB_ASSIGN", + "SWITCH", + "TYPE", + "Token", + "UnaryPrec", + "VAR", + "XOR", + "XOR_ASSIGN", + }, + "go/types": []string{ + "Array", + "AssertableTo", + "AssignableTo", + "Basic", + "BasicInfo", + "BasicKind", + "Bool", + "Builtin", + "Byte", + "Chan", + "ChanDir", + "CheckExpr", + "Checker", + "Comparable", + "Complex128", + "Complex64", + "Config", + "Const", + "ConvertibleTo", + "DefPredeclaredTestFuncs", + "Default", + "Error", + "Eval", + "ExprString", + "FieldVal", + "Float32", + "Float64", + "Func", + "Id", + "Identical", + "IdenticalIgnoreTags", + "Implements", + "ImportMode", + "Importer", + "ImporterFrom", + "Info", + "Initializer", + "Int", + "Int16", + "Int32", + "Int64", + "Int8", + "Interface", + "Invalid", + "IsBoolean", + "IsComplex", + "IsConstType", + "IsFloat", + "IsInteger", + "IsInterface", + "IsNumeric", + "IsOrdered", + "IsString", + "IsUnsigned", + "IsUntyped", + "Label", + "LookupFieldOrMethod", + "Map", + "MethodExpr", + "MethodSet", + "MethodVal", + "MissingMethod", + "Named", + "NewArray", + "NewChan", + "NewChecker", + "NewConst", + "NewField", + "NewFunc", + "NewInterface", + "NewInterfaceType", + "NewLabel", + "NewMap", + "NewMethodSet", + "NewNamed", + "NewPackage", + "NewParam", + "NewPkgName", + "NewPointer", + "NewScope", + "NewSignature", + "NewSlice", + "NewStruct", + "NewTuple", + "NewTypeName", + "NewVar", + "Nil", + "Object", + "ObjectString", + "Package", + "PkgName", + "Pointer", + "Qualifier", + "RecvOnly", + "RelativeTo", + "Rune", + "Scope", + "Selection", + "SelectionKind", + "SelectionString", + "SendOnly", + "SendRecv", + "Signature", + "Sizes", + "SizesFor", + "Slice", + "StdSizes", + "String", + "Struct", + "Tuple", + "Typ", + "Type", + "TypeAndValue", + "TypeName", + "TypeString", + "Uint", + "Uint16", + "Uint32", + "Uint64", + "Uint8", + "Uintptr", + "Universe", + "Unsafe", + "UnsafePointer", + "UntypedBool", + "UntypedComplex", + "UntypedFloat", + "UntypedInt", + "UntypedNil", + "UntypedRune", + "UntypedString", + "Var", + "WriteExpr", + "WriteSignature", + "WriteType", + }, + "hash": []string{ + "Hash", + "Hash32", + "Hash64", + }, + "hash/adler32": []string{ + "Checksum", + "New", + "Size", + }, + "hash/crc32": []string{ + "Castagnoli", + "Checksum", + "ChecksumIEEE", + "IEEE", + "IEEETable", + "Koopman", + "MakeTable", + "New", + "NewIEEE", + "Size", + "Table", + "Update", + }, + "hash/crc64": []string{ + "Checksum", + "ECMA", + "ISO", + "MakeTable", + "New", + "Size", + "Table", + "Update", + }, + "hash/fnv": []string{ + "New128", + "New128a", + "New32", + "New32a", + "New64", + "New64a", + }, + "hash/maphash": []string{ + "Hash", + "MakeSeed", + "Seed", + }, + "html": []string{ + "EscapeString", + "UnescapeString", + }, + "html/template": []string{ + "CSS", + "ErrAmbigContext", + "ErrBadHTML", + "ErrBranchEnd", + "ErrEndContext", + "ErrNoSuchTemplate", + "ErrOutputContext", + "ErrPartialCharset", + "ErrPartialEscape", + "ErrPredefinedEscaper", + "ErrRangeLoopReentry", + "ErrSlashAmbig", + "Error", + "ErrorCode", + "FuncMap", + "HTML", + "HTMLAttr", + "HTMLEscape", + "HTMLEscapeString", + "HTMLEscaper", + "IsTrue", + "JS", + "JSEscape", + "JSEscapeString", + "JSEscaper", + "JSStr", + "Must", + "New", + "OK", + "ParseFS", + "ParseFiles", + "ParseGlob", + "Srcset", + "Template", + "URL", + "URLQueryEscaper", + }, + "image": []string{ + "Alpha", + "Alpha16", + "Black", + "CMYK", + "Config", + "Decode", + "DecodeConfig", + "ErrFormat", + "Gray", + "Gray16", + "Image", + "NRGBA", + "NRGBA64", + "NYCbCrA", + "NewAlpha", + "NewAlpha16", + "NewCMYK", + "NewGray", + "NewGray16", + "NewNRGBA", + "NewNRGBA64", + "NewNYCbCrA", + "NewPaletted", + "NewRGBA", + "NewRGBA64", + "NewUniform", + "NewYCbCr", + "Opaque", + "Paletted", + "PalettedImage", + "Point", + "Pt", + "RGBA", + "RGBA64", + "Rect", + "Rectangle", + "RegisterFormat", + "Transparent", + "Uniform", + "White", + "YCbCr", + "YCbCrSubsampleRatio", + "YCbCrSubsampleRatio410", + "YCbCrSubsampleRatio411", + "YCbCrSubsampleRatio420", + "YCbCrSubsampleRatio422", + "YCbCrSubsampleRatio440", + "YCbCrSubsampleRatio444", + "ZP", + "ZR", + }, + "image/color": []string{ + "Alpha", + "Alpha16", + "Alpha16Model", + "AlphaModel", + "Black", + "CMYK", + "CMYKModel", + "CMYKToRGB", + "Color", + "Gray", + "Gray16", + "Gray16Model", + "GrayModel", + "Model", + "ModelFunc", + "NRGBA", + "NRGBA64", + "NRGBA64Model", + "NRGBAModel", + "NYCbCrA", + "NYCbCrAModel", + "Opaque", + "Palette", + "RGBA", + "RGBA64", + "RGBA64Model", + "RGBAModel", + "RGBToCMYK", + "RGBToYCbCr", + "Transparent", + "White", + "YCbCr", + "YCbCrModel", + "YCbCrToRGB", + }, + "image/color/palette": []string{ + "Plan9", + "WebSafe", + }, + "image/draw": []string{ + "Draw", + "DrawMask", + "Drawer", + "FloydSteinberg", + "Image", + "Op", + "Over", + "Quantizer", + "Src", + }, + "image/gif": []string{ + "Decode", + "DecodeAll", + "DecodeConfig", + "DisposalBackground", + "DisposalNone", + "DisposalPrevious", + "Encode", + "EncodeAll", + "GIF", + "Options", + }, + "image/jpeg": []string{ + "Decode", + "DecodeConfig", + "DefaultQuality", + "Encode", + "FormatError", + "Options", + "Reader", + "UnsupportedError", + }, + "image/png": []string{ + "BestCompression", + "BestSpeed", + "CompressionLevel", + "Decode", + "DecodeConfig", + "DefaultCompression", + "Encode", + "Encoder", + "EncoderBuffer", + "EncoderBufferPool", + "FormatError", + "NoCompression", + "UnsupportedError", + }, + "index/suffixarray": []string{ + "Index", + "New", + }, + "io": []string{ + "ByteReader", + "ByteScanner", + "ByteWriter", + "Closer", + "Copy", + "CopyBuffer", + "CopyN", + "Discard", + "EOF", + "ErrClosedPipe", + "ErrNoProgress", + "ErrShortBuffer", + "ErrShortWrite", + "ErrUnexpectedEOF", + "LimitReader", + "LimitedReader", + "MultiReader", + "MultiWriter", + "NewSectionReader", + "NopCloser", + "Pipe", + "PipeReader", + "PipeWriter", + "ReadAll", + "ReadAtLeast", + "ReadCloser", + "ReadFull", + "ReadSeekCloser", + "ReadSeeker", + "ReadWriteCloser", + "ReadWriteSeeker", + "ReadWriter", + "Reader", + "ReaderAt", + "ReaderFrom", + "RuneReader", + "RuneScanner", + "SectionReader", + "SeekCurrent", + "SeekEnd", + "SeekStart", + "Seeker", + "StringWriter", + "TeeReader", + "WriteCloser", + "WriteSeeker", + "WriteString", + "Writer", + "WriterAt", + "WriterTo", + }, + "io/fs": []string{ + "DirEntry", + "ErrClosed", + "ErrExist", + "ErrInvalid", + "ErrNotExist", + "ErrPermission", + "FS", + "File", + "FileInfo", + "FileMode", + "Glob", + "GlobFS", + "ModeAppend", + "ModeCharDevice", + "ModeDevice", + "ModeDir", + "ModeExclusive", + "ModeIrregular", + "ModeNamedPipe", + "ModePerm", + "ModeSetgid", + "ModeSetuid", + "ModeSocket", + "ModeSticky", + "ModeSymlink", + "ModeTemporary", + "ModeType", + "PathError", + "ReadDir", + "ReadDirFS", + "ReadDirFile", + "ReadFile", + "ReadFileFS", + "SkipDir", + "Stat", + "StatFS", + "Sub", + "SubFS", + "ValidPath", + "WalkDir", + "WalkDirFunc", + }, + "io/ioutil": []string{ + "Discard", + "NopCloser", + "ReadAll", + "ReadDir", + "ReadFile", + "TempDir", + "TempFile", + "WriteFile", + }, + "log": []string{ + "Default", + "Fatal", + "Fatalf", + "Fatalln", + "Flags", + "LUTC", + "Ldate", + "Llongfile", + "Lmicroseconds", + "Lmsgprefix", + "Logger", + "Lshortfile", + "LstdFlags", + "Ltime", + "New", + "Output", + "Panic", + "Panicf", + "Panicln", + "Prefix", + "Print", + "Printf", + "Println", + "SetFlags", + "SetOutput", + "SetPrefix", + "Writer", + }, + "log/syslog": []string{ + "Dial", + "LOG_ALERT", + "LOG_AUTH", + "LOG_AUTHPRIV", + "LOG_CRIT", + "LOG_CRON", + "LOG_DAEMON", + "LOG_DEBUG", + "LOG_EMERG", + "LOG_ERR", + "LOG_FTP", + "LOG_INFO", + "LOG_KERN", + "LOG_LOCAL0", + "LOG_LOCAL1", + "LOG_LOCAL2", + "LOG_LOCAL3", + "LOG_LOCAL4", + "LOG_LOCAL5", + "LOG_LOCAL6", + "LOG_LOCAL7", + "LOG_LPR", + "LOG_MAIL", + "LOG_NEWS", + "LOG_NOTICE", + "LOG_SYSLOG", + "LOG_USER", + "LOG_UUCP", + "LOG_WARNING", + "New", + "NewLogger", + "Priority", + "Writer", + }, + "math": []string{ + "Abs", + "Acos", + "Acosh", + "Asin", + "Asinh", + "Atan", + "Atan2", + "Atanh", + "Cbrt", + "Ceil", + "Copysign", + "Cos", + "Cosh", + "Dim", + "E", + "Erf", + "Erfc", + "Erfcinv", + "Erfinv", + "Exp", + "Exp2", + "Expm1", + "FMA", + "Float32bits", + "Float32frombits", + "Float64bits", + "Float64frombits", + "Floor", + "Frexp", + "Gamma", + "Hypot", + "Ilogb", + "Inf", + "IsInf", + "IsNaN", + "J0", + "J1", + "Jn", + "Ldexp", + "Lgamma", + "Ln10", + "Ln2", + "Log", + "Log10", + "Log10E", + "Log1p", + "Log2", + "Log2E", + "Logb", + "Max", + "MaxFloat32", + "MaxFloat64", + "MaxInt16", + "MaxInt32", + "MaxInt64", + "MaxInt8", + "MaxUint16", + "MaxUint32", + "MaxUint64", + "MaxUint8", + "Min", + "MinInt16", + "MinInt32", + "MinInt64", + "MinInt8", + "Mod", + "Modf", + "NaN", + "Nextafter", + "Nextafter32", + "Phi", + "Pi", + "Pow", + "Pow10", + "Remainder", + "Round", + "RoundToEven", + "Signbit", + "Sin", + "Sincos", + "Sinh", + "SmallestNonzeroFloat32", + "SmallestNonzeroFloat64", + "Sqrt", + "Sqrt2", + "SqrtE", + "SqrtPhi", + "SqrtPi", + "Tan", + "Tanh", + "Trunc", + "Y0", + "Y1", + "Yn", + }, + "math/big": []string{ + "Above", + "Accuracy", + "AwayFromZero", + "Below", + "ErrNaN", + "Exact", + "Float", + "Int", + "Jacobi", + "MaxBase", + "MaxExp", + "MaxPrec", + "MinExp", + "NewFloat", + "NewInt", + "NewRat", + "ParseFloat", + "Rat", + "RoundingMode", + "ToNearestAway", + "ToNearestEven", + "ToNegativeInf", + "ToPositiveInf", + "ToZero", + "Word", + }, + "math/bits": []string{ + "Add", + "Add32", + "Add64", + "Div", + "Div32", + "Div64", + "LeadingZeros", + "LeadingZeros16", + "LeadingZeros32", + "LeadingZeros64", + "LeadingZeros8", + "Len", + "Len16", + "Len32", + "Len64", + "Len8", + "Mul", + "Mul32", + "Mul64", + "OnesCount", + "OnesCount16", + "OnesCount32", + "OnesCount64", + "OnesCount8", + "Rem", + "Rem32", + "Rem64", + "Reverse", + "Reverse16", + "Reverse32", + "Reverse64", + "Reverse8", + "ReverseBytes", + "ReverseBytes16", + "ReverseBytes32", + "ReverseBytes64", + "RotateLeft", + "RotateLeft16", + "RotateLeft32", + "RotateLeft64", + "RotateLeft8", + "Sub", + "Sub32", + "Sub64", + "TrailingZeros", + "TrailingZeros16", + "TrailingZeros32", + "TrailingZeros64", + "TrailingZeros8", + "UintSize", + }, + "math/cmplx": []string{ + "Abs", + "Acos", + "Acosh", + "Asin", + "Asinh", + "Atan", + "Atanh", + "Conj", + "Cos", + "Cosh", + "Cot", + "Exp", + "Inf", + "IsInf", + "IsNaN", + "Log", + "Log10", + "NaN", + "Phase", + "Polar", + "Pow", + "Rect", + "Sin", + "Sinh", + "Sqrt", + "Tan", + "Tanh", + }, + "math/rand": []string{ + "ExpFloat64", + "Float32", + "Float64", + "Int", + "Int31", + "Int31n", + "Int63", + "Int63n", + "Intn", + "New", + "NewSource", + "NewZipf", + "NormFloat64", + "Perm", + "Rand", + "Read", + "Seed", + "Shuffle", + "Source", + "Source64", + "Uint32", + "Uint64", + "Zipf", + }, + "mime": []string{ + "AddExtensionType", + "BEncoding", + "ErrInvalidMediaParameter", + "ExtensionsByType", + "FormatMediaType", + "ParseMediaType", + "QEncoding", + "TypeByExtension", + "WordDecoder", + "WordEncoder", + }, + "mime/multipart": []string{ + "ErrMessageTooLarge", + "File", + "FileHeader", + "Form", + "NewReader", + "NewWriter", + "Part", + "Reader", + "Writer", + }, + "mime/quotedprintable": []string{ + "NewReader", + "NewWriter", + "Reader", + "Writer", + }, + "net": []string{ + "Addr", + "AddrError", + "Buffers", + "CIDRMask", + "Conn", + "DNSConfigError", + "DNSError", + "DefaultResolver", + "Dial", + "DialIP", + "DialTCP", + "DialTimeout", + "DialUDP", + "DialUnix", + "Dialer", + "ErrClosed", + "ErrWriteToConnected", + "Error", + "FileConn", + "FileListener", + "FilePacketConn", + "FlagBroadcast", + "FlagLoopback", + "FlagMulticast", + "FlagPointToPoint", + "FlagUp", + "Flags", + "HardwareAddr", + "IP", + "IPAddr", + "IPConn", + "IPMask", + "IPNet", + "IPv4", + "IPv4Mask", + "IPv4allrouter", + "IPv4allsys", + "IPv4bcast", + "IPv4len", + "IPv4zero", + "IPv6interfacelocalallnodes", + "IPv6len", + "IPv6linklocalallnodes", + "IPv6linklocalallrouters", + "IPv6loopback", + "IPv6unspecified", + "IPv6zero", + "Interface", + "InterfaceAddrs", + "InterfaceByIndex", + "InterfaceByName", + "Interfaces", + "InvalidAddrError", + "JoinHostPort", + "Listen", + "ListenConfig", + "ListenIP", + "ListenMulticastUDP", + "ListenPacket", + "ListenTCP", + "ListenUDP", + "ListenUnix", + "ListenUnixgram", + "Listener", + "LookupAddr", + "LookupCNAME", + "LookupHost", + "LookupIP", + "LookupMX", + "LookupNS", + "LookupPort", + "LookupSRV", + "LookupTXT", + "MX", + "NS", + "OpError", + "PacketConn", + "ParseCIDR", + "ParseError", + "ParseIP", + "ParseMAC", + "Pipe", + "ResolveIPAddr", + "ResolveTCPAddr", + "ResolveUDPAddr", + "ResolveUnixAddr", + "Resolver", + "SRV", + "SplitHostPort", + "TCPAddr", + "TCPConn", + "TCPListener", + "UDPAddr", + "UDPConn", + "UnixAddr", + "UnixConn", + "UnixListener", + "UnknownNetworkError", + }, + "net/http": []string{ + "CanonicalHeaderKey", + "Client", + "CloseNotifier", + "ConnState", + "Cookie", + "CookieJar", + "DefaultClient", + "DefaultMaxHeaderBytes", + "DefaultMaxIdleConnsPerHost", + "DefaultServeMux", + "DefaultTransport", + "DetectContentType", + "Dir", + "ErrAbortHandler", + "ErrBodyNotAllowed", + "ErrBodyReadAfterClose", + "ErrContentLength", + "ErrHandlerTimeout", + "ErrHeaderTooLong", + "ErrHijacked", + "ErrLineTooLong", + "ErrMissingBoundary", + "ErrMissingContentLength", + "ErrMissingFile", + "ErrNoCookie", + "ErrNoLocation", + "ErrNotMultipart", + "ErrNotSupported", + "ErrServerClosed", + "ErrShortBody", + "ErrSkipAltProtocol", + "ErrUnexpectedTrailer", + "ErrUseLastResponse", + "ErrWriteAfterFlush", + "Error", + "FS", + "File", + "FileServer", + "FileSystem", + "Flusher", + "Get", + "Handle", + "HandleFunc", + "Handler", + "HandlerFunc", + "Head", + "Header", + "Hijacker", + "ListenAndServe", + "ListenAndServeTLS", + "LocalAddrContextKey", + "MaxBytesReader", + "MethodConnect", + "MethodDelete", + "MethodGet", + "MethodHead", + "MethodOptions", + "MethodPatch", + "MethodPost", + "MethodPut", + "MethodTrace", + "NewFileTransport", + "NewRequest", + "NewRequestWithContext", + "NewServeMux", + "NoBody", + "NotFound", + "NotFoundHandler", + "ParseHTTPVersion", + "ParseTime", + "Post", + "PostForm", + "ProtocolError", + "ProxyFromEnvironment", + "ProxyURL", + "PushOptions", + "Pusher", + "ReadRequest", + "ReadResponse", + "Redirect", + "RedirectHandler", + "Request", + "Response", + "ResponseWriter", + "RoundTripper", + "SameSite", + "SameSiteDefaultMode", + "SameSiteLaxMode", + "SameSiteNoneMode", + "SameSiteStrictMode", + "Serve", + "ServeContent", + "ServeFile", + "ServeMux", + "ServeTLS", + "Server", + "ServerContextKey", + "SetCookie", + "StateActive", + "StateClosed", + "StateHijacked", + "StateIdle", + "StateNew", + "StatusAccepted", + "StatusAlreadyReported", + "StatusBadGateway", + "StatusBadRequest", + "StatusConflict", + "StatusContinue", + "StatusCreated", + "StatusEarlyHints", + "StatusExpectationFailed", + "StatusFailedDependency", + "StatusForbidden", + "StatusFound", + "StatusGatewayTimeout", + "StatusGone", + "StatusHTTPVersionNotSupported", + "StatusIMUsed", + "StatusInsufficientStorage", + "StatusInternalServerError", + "StatusLengthRequired", + "StatusLocked", + "StatusLoopDetected", + "StatusMethodNotAllowed", + "StatusMisdirectedRequest", + "StatusMovedPermanently", + "StatusMultiStatus", + "StatusMultipleChoices", + "StatusNetworkAuthenticationRequired", + "StatusNoContent", + "StatusNonAuthoritativeInfo", + "StatusNotAcceptable", + "StatusNotExtended", + "StatusNotFound", + "StatusNotImplemented", + "StatusNotModified", + "StatusOK", + "StatusPartialContent", + "StatusPaymentRequired", + "StatusPermanentRedirect", + "StatusPreconditionFailed", + "StatusPreconditionRequired", + "StatusProcessing", + "StatusProxyAuthRequired", + "StatusRequestEntityTooLarge", + "StatusRequestHeaderFieldsTooLarge", + "StatusRequestTimeout", + "StatusRequestURITooLong", + "StatusRequestedRangeNotSatisfiable", + "StatusResetContent", + "StatusSeeOther", + "StatusServiceUnavailable", + "StatusSwitchingProtocols", + "StatusTeapot", + "StatusTemporaryRedirect", + "StatusText", + "StatusTooEarly", + "StatusTooManyRequests", + "StatusUnauthorized", + "StatusUnavailableForLegalReasons", + "StatusUnprocessableEntity", + "StatusUnsupportedMediaType", + "StatusUpgradeRequired", + "StatusUseProxy", + "StatusVariantAlsoNegotiates", + "StripPrefix", + "TimeFormat", + "TimeoutHandler", + "TrailerPrefix", + "Transport", + }, + "net/http/cgi": []string{ + "Handler", + "Request", + "RequestFromMap", + "Serve", + }, + "net/http/cookiejar": []string{ + "Jar", + "New", + "Options", + "PublicSuffixList", + }, + "net/http/fcgi": []string{ + "ErrConnClosed", + "ErrRequestAborted", + "ProcessEnv", + "Serve", + }, + "net/http/httptest": []string{ + "DefaultRemoteAddr", + "NewRecorder", + "NewRequest", + "NewServer", + "NewTLSServer", + "NewUnstartedServer", + "ResponseRecorder", + "Server", + }, + "net/http/httptrace": []string{ + "ClientTrace", + "ContextClientTrace", + "DNSDoneInfo", + "DNSStartInfo", + "GotConnInfo", + "WithClientTrace", + "WroteRequestInfo", + }, + "net/http/httputil": []string{ + "BufferPool", + "ClientConn", + "DumpRequest", + "DumpRequestOut", + "DumpResponse", + "ErrClosed", + "ErrLineTooLong", + "ErrPersistEOF", + "ErrPipeline", + "NewChunkedReader", + "NewChunkedWriter", + "NewClientConn", + "NewProxyClientConn", + "NewServerConn", + "NewSingleHostReverseProxy", + "ReverseProxy", + "ServerConn", + }, + "net/http/pprof": []string{ + "Cmdline", + "Handler", + "Index", + "Profile", + "Symbol", + "Trace", + }, + "net/mail": []string{ + "Address", + "AddressParser", + "ErrHeaderNotPresent", + "Header", + "Message", + "ParseAddress", + "ParseAddressList", + "ParseDate", + "ReadMessage", + }, + "net/rpc": []string{ + "Accept", + "Call", + "Client", + "ClientCodec", + "DefaultDebugPath", + "DefaultRPCPath", + "DefaultServer", + "Dial", + "DialHTTP", + "DialHTTPPath", + "ErrShutdown", + "HandleHTTP", + "NewClient", + "NewClientWithCodec", + "NewServer", + "Register", + "RegisterName", + "Request", + "Response", + "ServeCodec", + "ServeConn", + "ServeRequest", + "Server", + "ServerCodec", + "ServerError", + }, + "net/rpc/jsonrpc": []string{ + "Dial", + "NewClient", + "NewClientCodec", + "NewServerCodec", + "ServeConn", + }, + "net/smtp": []string{ + "Auth", + "CRAMMD5Auth", + "Client", + "Dial", + "NewClient", + "PlainAuth", + "SendMail", + "ServerInfo", + }, + "net/textproto": []string{ + "CanonicalMIMEHeaderKey", + "Conn", + "Dial", + "Error", + "MIMEHeader", + "NewConn", + "NewReader", + "NewWriter", + "Pipeline", + "ProtocolError", + "Reader", + "TrimBytes", + "TrimString", + "Writer", + }, + "net/url": []string{ + "Error", + "EscapeError", + "InvalidHostError", + "Parse", + "ParseQuery", + "ParseRequestURI", + "PathEscape", + "PathUnescape", + "QueryEscape", + "QueryUnescape", + "URL", + "User", + "UserPassword", + "Userinfo", + "Values", + }, + "os": []string{ + "Args", + "Chdir", + "Chmod", + "Chown", + "Chtimes", + "Clearenv", + "Create", + "CreateTemp", + "DevNull", + "DirEntry", + "DirFS", + "Environ", + "ErrClosed", + "ErrDeadlineExceeded", + "ErrExist", + "ErrInvalid", + "ErrNoDeadline", + "ErrNotExist", + "ErrPermission", + "ErrProcessDone", + "Executable", + "Exit", + "Expand", + "ExpandEnv", + "File", + "FileInfo", + "FileMode", + "FindProcess", + "Getegid", + "Getenv", + "Geteuid", + "Getgid", + "Getgroups", + "Getpagesize", + "Getpid", + "Getppid", + "Getuid", + "Getwd", + "Hostname", + "Interrupt", + "IsExist", + "IsNotExist", + "IsPathSeparator", + "IsPermission", + "IsTimeout", + "Kill", + "Lchown", + "Link", + "LinkError", + "LookupEnv", + "Lstat", + "Mkdir", + "MkdirAll", + "MkdirTemp", + "ModeAppend", + "ModeCharDevice", + "ModeDevice", + "ModeDir", + "ModeExclusive", + "ModeIrregular", + "ModeNamedPipe", + "ModePerm", + "ModeSetgid", + "ModeSetuid", + "ModeSocket", + "ModeSticky", + "ModeSymlink", + "ModeTemporary", + "ModeType", + "NewFile", + "NewSyscallError", + "O_APPEND", + "O_CREATE", + "O_EXCL", + "O_RDONLY", + "O_RDWR", + "O_SYNC", + "O_TRUNC", + "O_WRONLY", + "Open", + "OpenFile", + "PathError", + "PathListSeparator", + "PathSeparator", + "Pipe", + "ProcAttr", + "Process", + "ProcessState", + "ReadDir", + "ReadFile", + "Readlink", + "Remove", + "RemoveAll", + "Rename", + "SEEK_CUR", + "SEEK_END", + "SEEK_SET", + "SameFile", + "Setenv", + "Signal", + "StartProcess", + "Stat", + "Stderr", + "Stdin", + "Stdout", + "Symlink", + "SyscallError", + "TempDir", + "Truncate", + "Unsetenv", + "UserCacheDir", + "UserConfigDir", + "UserHomeDir", + "WriteFile", + }, + "os/exec": []string{ + "Cmd", + "Command", + "CommandContext", + "ErrNotFound", + "Error", + "ExitError", + "LookPath", + }, + "os/signal": []string{ + "Ignore", + "Ignored", + "Notify", + "NotifyContext", + "Reset", + "Stop", + }, + "os/user": []string{ + "Current", + "Group", + "Lookup", + "LookupGroup", + "LookupGroupId", + "LookupId", + "UnknownGroupError", + "UnknownGroupIdError", + "UnknownUserError", + "UnknownUserIdError", + "User", + }, + "path": []string{ + "Base", + "Clean", + "Dir", + "ErrBadPattern", + "Ext", + "IsAbs", + "Join", + "Match", + "Split", + }, + "path/filepath": []string{ + "Abs", + "Base", + "Clean", + "Dir", + "ErrBadPattern", + "EvalSymlinks", + "Ext", + "FromSlash", + "Glob", + "HasPrefix", + "IsAbs", + "Join", + "ListSeparator", + "Match", + "Rel", + "Separator", + "SkipDir", + "Split", + "SplitList", + "ToSlash", + "VolumeName", + "Walk", + "WalkDir", + "WalkFunc", + }, + "plugin": []string{ + "Open", + "Plugin", + "Symbol", + }, + "reflect": []string{ + "Append", + "AppendSlice", + "Array", + "ArrayOf", + "Bool", + "BothDir", + "Chan", + "ChanDir", + "ChanOf", + "Complex128", + "Complex64", + "Copy", + "DeepEqual", + "Float32", + "Float64", + "Func", + "FuncOf", + "Indirect", + "Int", + "Int16", + "Int32", + "Int64", + "Int8", + "Interface", + "Invalid", + "Kind", + "MakeChan", + "MakeFunc", + "MakeMap", + "MakeMapWithSize", + "MakeSlice", + "Map", + "MapIter", + "MapOf", + "Method", + "New", + "NewAt", + "Ptr", + "PtrTo", + "RecvDir", + "Select", + "SelectCase", + "SelectDefault", + "SelectDir", + "SelectRecv", + "SelectSend", + "SendDir", + "Slice", + "SliceHeader", + "SliceOf", + "String", + "StringHeader", + "Struct", + "StructField", + "StructOf", + "StructTag", + "Swapper", + "Type", + "TypeOf", + "Uint", + "Uint16", + "Uint32", + "Uint64", + "Uint8", + "Uintptr", + "UnsafePointer", + "Value", + "ValueError", + "ValueOf", + "Zero", + }, + "regexp": []string{ + "Compile", + "CompilePOSIX", + "Match", + "MatchReader", + "MatchString", + "MustCompile", + "MustCompilePOSIX", + "QuoteMeta", + "Regexp", + }, + "regexp/syntax": []string{ + "ClassNL", + "Compile", + "DotNL", + "EmptyBeginLine", + "EmptyBeginText", + "EmptyEndLine", + "EmptyEndText", + "EmptyNoWordBoundary", + "EmptyOp", + "EmptyOpContext", + "EmptyWordBoundary", + "ErrInternalError", + "ErrInvalidCharClass", + "ErrInvalidCharRange", + "ErrInvalidEscape", + "ErrInvalidNamedCapture", + "ErrInvalidPerlOp", + "ErrInvalidRepeatOp", + "ErrInvalidRepeatSize", + "ErrInvalidUTF8", + "ErrMissingBracket", + "ErrMissingParen", + "ErrMissingRepeatArgument", + "ErrTrailingBackslash", + "ErrUnexpectedParen", + "Error", + "ErrorCode", + "Flags", + "FoldCase", + "Inst", + "InstAlt", + "InstAltMatch", + "InstCapture", + "InstEmptyWidth", + "InstFail", + "InstMatch", + "InstNop", + "InstOp", + "InstRune", + "InstRune1", + "InstRuneAny", + "InstRuneAnyNotNL", + "IsWordChar", + "Literal", + "MatchNL", + "NonGreedy", + "OneLine", + "Op", + "OpAlternate", + "OpAnyChar", + "OpAnyCharNotNL", + "OpBeginLine", + "OpBeginText", + "OpCapture", + "OpCharClass", + "OpConcat", + "OpEmptyMatch", + "OpEndLine", + "OpEndText", + "OpLiteral", + "OpNoMatch", + "OpNoWordBoundary", + "OpPlus", + "OpQuest", + "OpRepeat", + "OpStar", + "OpWordBoundary", + "POSIX", + "Parse", + "Perl", + "PerlX", + "Prog", + "Regexp", + "Simple", + "UnicodeGroups", + "WasDollar", + }, + "runtime": []string{ + "BlockProfile", + "BlockProfileRecord", + "Breakpoint", + "CPUProfile", + "Caller", + "Callers", + "CallersFrames", + "Compiler", + "Error", + "Frame", + "Frames", + "Func", + "FuncForPC", + "GC", + "GOARCH", + "GOMAXPROCS", + "GOOS", + "GOROOT", + "Goexit", + "GoroutineProfile", + "Gosched", + "KeepAlive", + "LockOSThread", + "MemProfile", + "MemProfileRate", + "MemProfileRecord", + "MemStats", + "MutexProfile", + "NumCPU", + "NumCgoCall", + "NumGoroutine", + "ReadMemStats", + "ReadTrace", + "SetBlockProfileRate", + "SetCPUProfileRate", + "SetCgoTraceback", + "SetFinalizer", + "SetMutexProfileFraction", + "Stack", + "StackRecord", + "StartTrace", + "StopTrace", + "ThreadCreateProfile", + "TypeAssertionError", + "UnlockOSThread", + "Version", + }, + "runtime/debug": []string{ + "BuildInfo", + "FreeOSMemory", + "GCStats", + "Module", + "PrintStack", + "ReadBuildInfo", + "ReadGCStats", + "SetGCPercent", + "SetMaxStack", + "SetMaxThreads", + "SetPanicOnFault", + "SetTraceback", + "Stack", + "WriteHeapDump", + }, + "runtime/metrics": []string{ + "All", + "Description", + "Float64Histogram", + "KindBad", + "KindFloat64", + "KindFloat64Histogram", + "KindUint64", + "Read", + "Sample", + "Value", + "ValueKind", + }, + "runtime/pprof": []string{ + "Do", + "ForLabels", + "Label", + "LabelSet", + "Labels", + "Lookup", + "NewProfile", + "Profile", + "Profiles", + "SetGoroutineLabels", + "StartCPUProfile", + "StopCPUProfile", + "WithLabels", + "WriteHeapProfile", + }, + "runtime/trace": []string{ + "IsEnabled", + "Log", + "Logf", + "NewTask", + "Region", + "Start", + "StartRegion", + "Stop", + "Task", + "WithRegion", + }, + "sort": []string{ + "Float64Slice", + "Float64s", + "Float64sAreSorted", + "IntSlice", + "Interface", + "Ints", + "IntsAreSorted", + "IsSorted", + "Reverse", + "Search", + "SearchFloat64s", + "SearchInts", + "SearchStrings", + "Slice", + "SliceIsSorted", + "SliceStable", + "Sort", + "Stable", + "StringSlice", + "Strings", + "StringsAreSorted", + }, + "strconv": []string{ + "AppendBool", + "AppendFloat", + "AppendInt", + "AppendQuote", + "AppendQuoteRune", + "AppendQuoteRuneToASCII", + "AppendQuoteRuneToGraphic", + "AppendQuoteToASCII", + "AppendQuoteToGraphic", + "AppendUint", + "Atoi", + "CanBackquote", + "ErrRange", + "ErrSyntax", + "FormatBool", + "FormatComplex", + "FormatFloat", + "FormatInt", + "FormatUint", + "IntSize", + "IsGraphic", + "IsPrint", + "Itoa", + "NumError", + "ParseBool", + "ParseComplex", + "ParseFloat", + "ParseInt", + "ParseUint", + "Quote", + "QuoteRune", + "QuoteRuneToASCII", + "QuoteRuneToGraphic", + "QuoteToASCII", + "QuoteToGraphic", + "Unquote", + "UnquoteChar", + }, + "strings": []string{ + "Builder", + "Compare", + "Contains", + "ContainsAny", + "ContainsRune", + "Count", + "EqualFold", + "Fields", + "FieldsFunc", + "HasPrefix", + "HasSuffix", + "Index", + "IndexAny", + "IndexByte", + "IndexFunc", + "IndexRune", + "Join", + "LastIndex", + "LastIndexAny", + "LastIndexByte", + "LastIndexFunc", + "Map", + "NewReader", + "NewReplacer", + "Reader", + "Repeat", + "Replace", + "ReplaceAll", + "Replacer", + "Split", + "SplitAfter", + "SplitAfterN", + "SplitN", + "Title", + "ToLower", + "ToLowerSpecial", + "ToTitle", + "ToTitleSpecial", + "ToUpper", + "ToUpperSpecial", + "ToValidUTF8", + "Trim", + "TrimFunc", + "TrimLeft", + "TrimLeftFunc", + "TrimPrefix", + "TrimRight", + "TrimRightFunc", + "TrimSpace", + "TrimSuffix", + }, + "sync": []string{ + "Cond", + "Locker", + "Map", + "Mutex", + "NewCond", + "Once", + "Pool", + "RWMutex", + "WaitGroup", + }, + "sync/atomic": []string{ + "AddInt32", + "AddInt64", + "AddUint32", + "AddUint64", + "AddUintptr", + "CompareAndSwapInt32", + "CompareAndSwapInt64", + "CompareAndSwapPointer", + "CompareAndSwapUint32", + "CompareAndSwapUint64", + "CompareAndSwapUintptr", + "LoadInt32", + "LoadInt64", + "LoadPointer", + "LoadUint32", + "LoadUint64", + "LoadUintptr", + "StoreInt32", + "StoreInt64", + "StorePointer", + "StoreUint32", + "StoreUint64", + "StoreUintptr", + "SwapInt32", + "SwapInt64", + "SwapPointer", + "SwapUint32", + "SwapUint64", + "SwapUintptr", + "Value", + }, + "syscall": []string{ + "AF_ALG", + "AF_APPLETALK", + "AF_ARP", + "AF_ASH", + "AF_ATM", + "AF_ATMPVC", + "AF_ATMSVC", + "AF_AX25", + "AF_BLUETOOTH", + "AF_BRIDGE", + "AF_CAIF", + "AF_CAN", + "AF_CCITT", + "AF_CHAOS", + "AF_CNT", + "AF_COIP", + "AF_DATAKIT", + "AF_DECnet", + "AF_DLI", + "AF_E164", + "AF_ECMA", + "AF_ECONET", + "AF_ENCAP", + "AF_FILE", + "AF_HYLINK", + "AF_IEEE80211", + "AF_IEEE802154", + "AF_IMPLINK", + "AF_INET", + "AF_INET6", + "AF_INET6_SDP", + "AF_INET_SDP", + "AF_IPX", + "AF_IRDA", + "AF_ISDN", + "AF_ISO", + "AF_IUCV", + "AF_KEY", + "AF_LAT", + "AF_LINK", + "AF_LLC", + "AF_LOCAL", + "AF_MAX", + "AF_MPLS", + "AF_NATM", + "AF_NDRV", + "AF_NETBEUI", + "AF_NETBIOS", + "AF_NETGRAPH", + "AF_NETLINK", + "AF_NETROM", + "AF_NS", + "AF_OROUTE", + "AF_OSI", + "AF_PACKET", + "AF_PHONET", + "AF_PPP", + "AF_PPPOX", + "AF_PUP", + "AF_RDS", + "AF_RESERVED_36", + "AF_ROSE", + "AF_ROUTE", + "AF_RXRPC", + "AF_SCLUSTER", + "AF_SECURITY", + "AF_SIP", + "AF_SLOW", + "AF_SNA", + "AF_SYSTEM", + "AF_TIPC", + "AF_UNIX", + "AF_UNSPEC", + "AF_VENDOR00", + "AF_VENDOR01", + "AF_VENDOR02", + "AF_VENDOR03", + "AF_VENDOR04", + "AF_VENDOR05", + "AF_VENDOR06", + "AF_VENDOR07", + "AF_VENDOR08", + "AF_VENDOR09", + "AF_VENDOR10", + "AF_VENDOR11", + "AF_VENDOR12", + "AF_VENDOR13", + "AF_VENDOR14", + "AF_VENDOR15", + "AF_VENDOR16", + "AF_VENDOR17", + "AF_VENDOR18", + "AF_VENDOR19", + "AF_VENDOR20", + "AF_VENDOR21", + "AF_VENDOR22", + "AF_VENDOR23", + "AF_VENDOR24", + "AF_VENDOR25", + "AF_VENDOR26", + "AF_VENDOR27", + "AF_VENDOR28", + "AF_VENDOR29", + "AF_VENDOR30", + "AF_VENDOR31", + "AF_VENDOR32", + "AF_VENDOR33", + "AF_VENDOR34", + "AF_VENDOR35", + "AF_VENDOR36", + "AF_VENDOR37", + "AF_VENDOR38", + "AF_VENDOR39", + "AF_VENDOR40", + "AF_VENDOR41", + "AF_VENDOR42", + "AF_VENDOR43", + "AF_VENDOR44", + "AF_VENDOR45", + "AF_VENDOR46", + "AF_VENDOR47", + "AF_WANPIPE", + "AF_X25", + "AI_CANONNAME", + "AI_NUMERICHOST", + "AI_PASSIVE", + "APPLICATION_ERROR", + "ARPHRD_ADAPT", + "ARPHRD_APPLETLK", + "ARPHRD_ARCNET", + "ARPHRD_ASH", + "ARPHRD_ATM", + "ARPHRD_AX25", + "ARPHRD_BIF", + "ARPHRD_CHAOS", + "ARPHRD_CISCO", + "ARPHRD_CSLIP", + "ARPHRD_CSLIP6", + "ARPHRD_DDCMP", + "ARPHRD_DLCI", + "ARPHRD_ECONET", + "ARPHRD_EETHER", + "ARPHRD_ETHER", + "ARPHRD_EUI64", + "ARPHRD_FCAL", + "ARPHRD_FCFABRIC", + "ARPHRD_FCPL", + "ARPHRD_FCPP", + "ARPHRD_FDDI", + "ARPHRD_FRAD", + "ARPHRD_FRELAY", + "ARPHRD_HDLC", + "ARPHRD_HIPPI", + "ARPHRD_HWX25", + "ARPHRD_IEEE1394", + "ARPHRD_IEEE802", + "ARPHRD_IEEE80211", + "ARPHRD_IEEE80211_PRISM", + "ARPHRD_IEEE80211_RADIOTAP", + "ARPHRD_IEEE802154", + "ARPHRD_IEEE802154_PHY", + "ARPHRD_IEEE802_TR", + "ARPHRD_INFINIBAND", + "ARPHRD_IPDDP", + "ARPHRD_IPGRE", + "ARPHRD_IRDA", + "ARPHRD_LAPB", + "ARPHRD_LOCALTLK", + "ARPHRD_LOOPBACK", + "ARPHRD_METRICOM", + "ARPHRD_NETROM", + "ARPHRD_NONE", + "ARPHRD_PIMREG", + "ARPHRD_PPP", + "ARPHRD_PRONET", + "ARPHRD_RAWHDLC", + "ARPHRD_ROSE", + "ARPHRD_RSRVD", + "ARPHRD_SIT", + "ARPHRD_SKIP", + "ARPHRD_SLIP", + "ARPHRD_SLIP6", + "ARPHRD_STRIP", + "ARPHRD_TUNNEL", + "ARPHRD_TUNNEL6", + "ARPHRD_VOID", + "ARPHRD_X25", + "AUTHTYPE_CLIENT", + "AUTHTYPE_SERVER", + "Accept", + "Accept4", + "AcceptEx", + "Access", + "Acct", + "AddrinfoW", + "Adjtime", + "Adjtimex", + "AllThreadsSyscall", + "AllThreadsSyscall6", + "AttachLsf", + "B0", + "B1000000", + "B110", + "B115200", + "B1152000", + "B1200", + "B134", + "B14400", + "B150", + "B1500000", + "B1800", + "B19200", + "B200", + "B2000000", + "B230400", + "B2400", + "B2500000", + "B28800", + "B300", + "B3000000", + "B3500000", + "B38400", + "B4000000", + "B460800", + "B4800", + "B50", + "B500000", + "B57600", + "B576000", + "B600", + "B7200", + "B75", + "B76800", + "B921600", + "B9600", + "BASE_PROTOCOL", + "BIOCFEEDBACK", + "BIOCFLUSH", + "BIOCGBLEN", + "BIOCGDIRECTION", + "BIOCGDIRFILT", + "BIOCGDLT", + "BIOCGDLTLIST", + "BIOCGETBUFMODE", + "BIOCGETIF", + "BIOCGETZMAX", + "BIOCGFEEDBACK", + "BIOCGFILDROP", + "BIOCGHDRCMPLT", + "BIOCGRSIG", + "BIOCGRTIMEOUT", + "BIOCGSEESENT", + "BIOCGSTATS", + "BIOCGSTATSOLD", + "BIOCGTSTAMP", + "BIOCIMMEDIATE", + "BIOCLOCK", + "BIOCPROMISC", + "BIOCROTZBUF", + "BIOCSBLEN", + "BIOCSDIRECTION", + "BIOCSDIRFILT", + "BIOCSDLT", + "BIOCSETBUFMODE", + "BIOCSETF", + "BIOCSETFNR", + "BIOCSETIF", + "BIOCSETWF", + "BIOCSETZBUF", + "BIOCSFEEDBACK", + "BIOCSFILDROP", + "BIOCSHDRCMPLT", + "BIOCSRSIG", + "BIOCSRTIMEOUT", + "BIOCSSEESENT", + "BIOCSTCPF", + "BIOCSTSTAMP", + "BIOCSUDPF", + "BIOCVERSION", + "BPF_A", + "BPF_ABS", + "BPF_ADD", + "BPF_ALIGNMENT", + "BPF_ALIGNMENT32", + "BPF_ALU", + "BPF_AND", + "BPF_B", + "BPF_BUFMODE_BUFFER", + "BPF_BUFMODE_ZBUF", + "BPF_DFLTBUFSIZE", + "BPF_DIRECTION_IN", + "BPF_DIRECTION_OUT", + "BPF_DIV", + "BPF_H", + "BPF_IMM", + "BPF_IND", + "BPF_JA", + "BPF_JEQ", + "BPF_JGE", + "BPF_JGT", + "BPF_JMP", + "BPF_JSET", + "BPF_K", + "BPF_LD", + "BPF_LDX", + "BPF_LEN", + "BPF_LSH", + "BPF_MAJOR_VERSION", + "BPF_MAXBUFSIZE", + "BPF_MAXINSNS", + "BPF_MEM", + "BPF_MEMWORDS", + "BPF_MINBUFSIZE", + "BPF_MINOR_VERSION", + "BPF_MISC", + "BPF_MSH", + "BPF_MUL", + "BPF_NEG", + "BPF_OR", + "BPF_RELEASE", + "BPF_RET", + "BPF_RSH", + "BPF_ST", + "BPF_STX", + "BPF_SUB", + "BPF_TAX", + "BPF_TXA", + "BPF_T_BINTIME", + "BPF_T_BINTIME_FAST", + "BPF_T_BINTIME_MONOTONIC", + "BPF_T_BINTIME_MONOTONIC_FAST", + "BPF_T_FAST", + "BPF_T_FLAG_MASK", + "BPF_T_FORMAT_MASK", + "BPF_T_MICROTIME", + "BPF_T_MICROTIME_FAST", + "BPF_T_MICROTIME_MONOTONIC", + "BPF_T_MICROTIME_MONOTONIC_FAST", + "BPF_T_MONOTONIC", + "BPF_T_MONOTONIC_FAST", + "BPF_T_NANOTIME", + "BPF_T_NANOTIME_FAST", + "BPF_T_NANOTIME_MONOTONIC", + "BPF_T_NANOTIME_MONOTONIC_FAST", + "BPF_T_NONE", + "BPF_T_NORMAL", + "BPF_W", + "BPF_X", + "BRKINT", + "Bind", + "BindToDevice", + "BpfBuflen", + "BpfDatalink", + "BpfHdr", + "BpfHeadercmpl", + "BpfInsn", + "BpfInterface", + "BpfJump", + "BpfProgram", + "BpfStat", + "BpfStats", + "BpfStmt", + "BpfTimeout", + "BpfTimeval", + "BpfVersion", + "BpfZbuf", + "BpfZbufHeader", + "ByHandleFileInformation", + "BytePtrFromString", + "ByteSliceFromString", + "CCR0_FLUSH", + "CERT_CHAIN_POLICY_AUTHENTICODE", + "CERT_CHAIN_POLICY_AUTHENTICODE_TS", + "CERT_CHAIN_POLICY_BASE", + "CERT_CHAIN_POLICY_BASIC_CONSTRAINTS", + "CERT_CHAIN_POLICY_EV", + "CERT_CHAIN_POLICY_MICROSOFT_ROOT", + "CERT_CHAIN_POLICY_NT_AUTH", + "CERT_CHAIN_POLICY_SSL", + "CERT_E_CN_NO_MATCH", + "CERT_E_EXPIRED", + "CERT_E_PURPOSE", + "CERT_E_ROLE", + "CERT_E_UNTRUSTEDROOT", + "CERT_STORE_ADD_ALWAYS", + "CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG", + "CERT_STORE_PROV_MEMORY", + "CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT", + "CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT", + "CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT", + "CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT", + "CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT", + "CERT_TRUST_INVALID_BASIC_CONSTRAINTS", + "CERT_TRUST_INVALID_EXTENSION", + "CERT_TRUST_INVALID_NAME_CONSTRAINTS", + "CERT_TRUST_INVALID_POLICY_CONSTRAINTS", + "CERT_TRUST_IS_CYCLIC", + "CERT_TRUST_IS_EXPLICIT_DISTRUST", + "CERT_TRUST_IS_NOT_SIGNATURE_VALID", + "CERT_TRUST_IS_NOT_TIME_VALID", + "CERT_TRUST_IS_NOT_VALID_FOR_USAGE", + "CERT_TRUST_IS_OFFLINE_REVOCATION", + "CERT_TRUST_IS_REVOKED", + "CERT_TRUST_IS_UNTRUSTED_ROOT", + "CERT_TRUST_NO_ERROR", + "CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY", + "CERT_TRUST_REVOCATION_STATUS_UNKNOWN", + "CFLUSH", + "CLOCAL", + "CLONE_CHILD_CLEARTID", + "CLONE_CHILD_SETTID", + "CLONE_CSIGNAL", + "CLONE_DETACHED", + "CLONE_FILES", + "CLONE_FS", + "CLONE_IO", + "CLONE_NEWIPC", + "CLONE_NEWNET", + "CLONE_NEWNS", + "CLONE_NEWPID", + "CLONE_NEWUSER", + "CLONE_NEWUTS", + "CLONE_PARENT", + "CLONE_PARENT_SETTID", + "CLONE_PID", + "CLONE_PTRACE", + "CLONE_SETTLS", + "CLONE_SIGHAND", + "CLONE_SYSVSEM", + "CLONE_THREAD", + "CLONE_UNTRACED", + "CLONE_VFORK", + "CLONE_VM", + "CPUID_CFLUSH", + "CREAD", + "CREATE_ALWAYS", + "CREATE_NEW", + "CREATE_NEW_PROCESS_GROUP", + "CREATE_UNICODE_ENVIRONMENT", + "CRYPT_DEFAULT_CONTAINER_OPTIONAL", + "CRYPT_DELETEKEYSET", + "CRYPT_MACHINE_KEYSET", + "CRYPT_NEWKEYSET", + "CRYPT_SILENT", + "CRYPT_VERIFYCONTEXT", + "CS5", + "CS6", + "CS7", + "CS8", + "CSIZE", + "CSTART", + "CSTATUS", + "CSTOP", + "CSTOPB", + "CSUSP", + "CTL_MAXNAME", + "CTL_NET", + "CTL_QUERY", + "CTRL_BREAK_EVENT", + "CTRL_CLOSE_EVENT", + "CTRL_C_EVENT", + "CTRL_LOGOFF_EVENT", + "CTRL_SHUTDOWN_EVENT", + "CancelIo", + "CancelIoEx", + "CertAddCertificateContextToStore", + "CertChainContext", + "CertChainElement", + "CertChainPara", + "CertChainPolicyPara", + "CertChainPolicyStatus", + "CertCloseStore", + "CertContext", + "CertCreateCertificateContext", + "CertEnhKeyUsage", + "CertEnumCertificatesInStore", + "CertFreeCertificateChain", + "CertFreeCertificateContext", + "CertGetCertificateChain", + "CertInfo", + "CertOpenStore", + "CertOpenSystemStore", + "CertRevocationCrlInfo", + "CertRevocationInfo", + "CertSimpleChain", + "CertTrustListInfo", + "CertTrustStatus", + "CertUsageMatch", + "CertVerifyCertificateChainPolicy", + "Chdir", + "CheckBpfVersion", + "Chflags", + "Chmod", + "Chown", + "Chroot", + "Clearenv", + "Close", + "CloseHandle", + "CloseOnExec", + "Closesocket", + "CmsgLen", + "CmsgSpace", + "Cmsghdr", + "CommandLineToArgv", + "ComputerName", + "Conn", + "Connect", + "ConnectEx", + "ConvertSidToStringSid", + "ConvertStringSidToSid", + "CopySid", + "Creat", + "CreateDirectory", + "CreateFile", + "CreateFileMapping", + "CreateHardLink", + "CreateIoCompletionPort", + "CreatePipe", + "CreateProcess", + "CreateProcessAsUser", + "CreateSymbolicLink", + "CreateToolhelp32Snapshot", + "Credential", + "CryptAcquireContext", + "CryptGenRandom", + "CryptReleaseContext", + "DIOCBSFLUSH", + "DIOCOSFPFLUSH", + "DLL", + "DLLError", + "DLT_A429", + "DLT_A653_ICM", + "DLT_AIRONET_HEADER", + "DLT_AOS", + "DLT_APPLE_IP_OVER_IEEE1394", + "DLT_ARCNET", + "DLT_ARCNET_LINUX", + "DLT_ATM_CLIP", + "DLT_ATM_RFC1483", + "DLT_AURORA", + "DLT_AX25", + "DLT_AX25_KISS", + "DLT_BACNET_MS_TP", + "DLT_BLUETOOTH_HCI_H4", + "DLT_BLUETOOTH_HCI_H4_WITH_PHDR", + "DLT_CAN20B", + "DLT_CAN_SOCKETCAN", + "DLT_CHAOS", + "DLT_CHDLC", + "DLT_CISCO_IOS", + "DLT_C_HDLC", + "DLT_C_HDLC_WITH_DIR", + "DLT_DBUS", + "DLT_DECT", + "DLT_DOCSIS", + "DLT_DVB_CI", + "DLT_ECONET", + "DLT_EN10MB", + "DLT_EN3MB", + "DLT_ENC", + "DLT_ERF", + "DLT_ERF_ETH", + "DLT_ERF_POS", + "DLT_FC_2", + "DLT_FC_2_WITH_FRAME_DELIMS", + "DLT_FDDI", + "DLT_FLEXRAY", + "DLT_FRELAY", + "DLT_FRELAY_WITH_DIR", + "DLT_GCOM_SERIAL", + "DLT_GCOM_T1E1", + "DLT_GPF_F", + "DLT_GPF_T", + "DLT_GPRS_LLC", + "DLT_GSMTAP_ABIS", + "DLT_GSMTAP_UM", + "DLT_HDLC", + "DLT_HHDLC", + "DLT_HIPPI", + "DLT_IBM_SN", + "DLT_IBM_SP", + "DLT_IEEE802", + "DLT_IEEE802_11", + "DLT_IEEE802_11_RADIO", + "DLT_IEEE802_11_RADIO_AVS", + "DLT_IEEE802_15_4", + "DLT_IEEE802_15_4_LINUX", + "DLT_IEEE802_15_4_NOFCS", + "DLT_IEEE802_15_4_NONASK_PHY", + "DLT_IEEE802_16_MAC_CPS", + "DLT_IEEE802_16_MAC_CPS_RADIO", + "DLT_IPFILTER", + "DLT_IPMB", + "DLT_IPMB_LINUX", + "DLT_IPNET", + "DLT_IPOIB", + "DLT_IPV4", + "DLT_IPV6", + "DLT_IP_OVER_FC", + "DLT_JUNIPER_ATM1", + "DLT_JUNIPER_ATM2", + "DLT_JUNIPER_ATM_CEMIC", + "DLT_JUNIPER_CHDLC", + "DLT_JUNIPER_ES", + "DLT_JUNIPER_ETHER", + "DLT_JUNIPER_FIBRECHANNEL", + "DLT_JUNIPER_FRELAY", + "DLT_JUNIPER_GGSN", + "DLT_JUNIPER_ISM", + "DLT_JUNIPER_MFR", + "DLT_JUNIPER_MLFR", + "DLT_JUNIPER_MLPPP", + "DLT_JUNIPER_MONITOR", + "DLT_JUNIPER_PIC_PEER", + "DLT_JUNIPER_PPP", + "DLT_JUNIPER_PPPOE", + "DLT_JUNIPER_PPPOE_ATM", + "DLT_JUNIPER_SERVICES", + "DLT_JUNIPER_SRX_E2E", + "DLT_JUNIPER_ST", + "DLT_JUNIPER_VP", + "DLT_JUNIPER_VS", + "DLT_LAPB_WITH_DIR", + "DLT_LAPD", + "DLT_LIN", + "DLT_LINUX_EVDEV", + "DLT_LINUX_IRDA", + "DLT_LINUX_LAPD", + "DLT_LINUX_PPP_WITHDIRECTION", + "DLT_LINUX_SLL", + "DLT_LOOP", + "DLT_LTALK", + "DLT_MATCHING_MAX", + "DLT_MATCHING_MIN", + "DLT_MFR", + "DLT_MOST", + "DLT_MPEG_2_TS", + "DLT_MPLS", + "DLT_MTP2", + "DLT_MTP2_WITH_PHDR", + "DLT_MTP3", + "DLT_MUX27010", + "DLT_NETANALYZER", + "DLT_NETANALYZER_TRANSPARENT", + "DLT_NFC_LLCP", + "DLT_NFLOG", + "DLT_NG40", + "DLT_NULL", + "DLT_PCI_EXP", + "DLT_PFLOG", + "DLT_PFSYNC", + "DLT_PPI", + "DLT_PPP", + "DLT_PPP_BSDOS", + "DLT_PPP_ETHER", + "DLT_PPP_PPPD", + "DLT_PPP_SERIAL", + "DLT_PPP_WITH_DIR", + "DLT_PPP_WITH_DIRECTION", + "DLT_PRISM_HEADER", + "DLT_PRONET", + "DLT_RAIF1", + "DLT_RAW", + "DLT_RAWAF_MASK", + "DLT_RIO", + "DLT_SCCP", + "DLT_SITA", + "DLT_SLIP", + "DLT_SLIP_BSDOS", + "DLT_STANAG_5066_D_PDU", + "DLT_SUNATM", + "DLT_SYMANTEC_FIREWALL", + "DLT_TZSP", + "DLT_USB", + "DLT_USB_LINUX", + "DLT_USB_LINUX_MMAPPED", + "DLT_USER0", + "DLT_USER1", + "DLT_USER10", + "DLT_USER11", + "DLT_USER12", + "DLT_USER13", + "DLT_USER14", + "DLT_USER15", + "DLT_USER2", + "DLT_USER3", + "DLT_USER4", + "DLT_USER5", + "DLT_USER6", + "DLT_USER7", + "DLT_USER8", + "DLT_USER9", + "DLT_WIHART", + "DLT_X2E_SERIAL", + "DLT_X2E_XORAYA", + "DNSMXData", + "DNSPTRData", + "DNSRecord", + "DNSSRVData", + "DNSTXTData", + "DNS_INFO_NO_RECORDS", + "DNS_TYPE_A", + "DNS_TYPE_A6", + "DNS_TYPE_AAAA", + "DNS_TYPE_ADDRS", + "DNS_TYPE_AFSDB", + "DNS_TYPE_ALL", + "DNS_TYPE_ANY", + "DNS_TYPE_ATMA", + "DNS_TYPE_AXFR", + "DNS_TYPE_CERT", + "DNS_TYPE_CNAME", + "DNS_TYPE_DHCID", + "DNS_TYPE_DNAME", + "DNS_TYPE_DNSKEY", + "DNS_TYPE_DS", + "DNS_TYPE_EID", + "DNS_TYPE_GID", + "DNS_TYPE_GPOS", + "DNS_TYPE_HINFO", + "DNS_TYPE_ISDN", + "DNS_TYPE_IXFR", + "DNS_TYPE_KEY", + "DNS_TYPE_KX", + "DNS_TYPE_LOC", + "DNS_TYPE_MAILA", + "DNS_TYPE_MAILB", + "DNS_TYPE_MB", + "DNS_TYPE_MD", + "DNS_TYPE_MF", + "DNS_TYPE_MG", + "DNS_TYPE_MINFO", + "DNS_TYPE_MR", + "DNS_TYPE_MX", + "DNS_TYPE_NAPTR", + "DNS_TYPE_NBSTAT", + "DNS_TYPE_NIMLOC", + "DNS_TYPE_NS", + "DNS_TYPE_NSAP", + "DNS_TYPE_NSAPPTR", + "DNS_TYPE_NSEC", + "DNS_TYPE_NULL", + "DNS_TYPE_NXT", + "DNS_TYPE_OPT", + "DNS_TYPE_PTR", + "DNS_TYPE_PX", + "DNS_TYPE_RP", + "DNS_TYPE_RRSIG", + "DNS_TYPE_RT", + "DNS_TYPE_SIG", + "DNS_TYPE_SINK", + "DNS_TYPE_SOA", + "DNS_TYPE_SRV", + "DNS_TYPE_TEXT", + "DNS_TYPE_TKEY", + "DNS_TYPE_TSIG", + "DNS_TYPE_UID", + "DNS_TYPE_UINFO", + "DNS_TYPE_UNSPEC", + "DNS_TYPE_WINS", + "DNS_TYPE_WINSR", + "DNS_TYPE_WKS", + "DNS_TYPE_X25", + "DT_BLK", + "DT_CHR", + "DT_DIR", + "DT_FIFO", + "DT_LNK", + "DT_REG", + "DT_SOCK", + "DT_UNKNOWN", + "DT_WHT", + "DUPLICATE_CLOSE_SOURCE", + "DUPLICATE_SAME_ACCESS", + "DeleteFile", + "DetachLsf", + "DeviceIoControl", + "Dirent", + "DnsNameCompare", + "DnsQuery", + "DnsRecordListFree", + "DnsSectionAdditional", + "DnsSectionAnswer", + "DnsSectionAuthority", + "DnsSectionQuestion", + "Dup", + "Dup2", + "Dup3", + "DuplicateHandle", + "E2BIG", + "EACCES", + "EADDRINUSE", + "EADDRNOTAVAIL", + "EADV", + "EAFNOSUPPORT", + "EAGAIN", + "EALREADY", + "EAUTH", + "EBADARCH", + "EBADE", + "EBADEXEC", + "EBADF", + "EBADFD", + "EBADMACHO", + "EBADMSG", + "EBADR", + "EBADRPC", + "EBADRQC", + "EBADSLT", + "EBFONT", + "EBUSY", + "ECANCELED", + "ECAPMODE", + "ECHILD", + "ECHO", + "ECHOCTL", + "ECHOE", + "ECHOK", + "ECHOKE", + "ECHONL", + "ECHOPRT", + "ECHRNG", + "ECOMM", + "ECONNABORTED", + "ECONNREFUSED", + "ECONNRESET", + "EDEADLK", + "EDEADLOCK", + "EDESTADDRREQ", + "EDEVERR", + "EDOM", + "EDOOFUS", + "EDOTDOT", + "EDQUOT", + "EEXIST", + "EFAULT", + "EFBIG", + "EFER_LMA", + "EFER_LME", + "EFER_NXE", + "EFER_SCE", + "EFTYPE", + "EHOSTDOWN", + "EHOSTUNREACH", + "EHWPOISON", + "EIDRM", + "EILSEQ", + "EINPROGRESS", + "EINTR", + "EINVAL", + "EIO", + "EIPSEC", + "EISCONN", + "EISDIR", + "EISNAM", + "EKEYEXPIRED", + "EKEYREJECTED", + "EKEYREVOKED", + "EL2HLT", + "EL2NSYNC", + "EL3HLT", + "EL3RST", + "ELAST", + "ELF_NGREG", + "ELF_PRARGSZ", + "ELIBACC", + "ELIBBAD", + "ELIBEXEC", + "ELIBMAX", + "ELIBSCN", + "ELNRNG", + "ELOOP", + "EMEDIUMTYPE", + "EMFILE", + "EMLINK", + "EMSGSIZE", + "EMT_TAGOVF", + "EMULTIHOP", + "EMUL_ENABLED", + "EMUL_LINUX", + "EMUL_LINUX32", + "EMUL_MAXID", + "EMUL_NATIVE", + "ENAMETOOLONG", + "ENAVAIL", + "ENDRUNDISC", + "ENEEDAUTH", + "ENETDOWN", + "ENETRESET", + "ENETUNREACH", + "ENFILE", + "ENOANO", + "ENOATTR", + "ENOBUFS", + "ENOCSI", + "ENODATA", + "ENODEV", + "ENOENT", + "ENOEXEC", + "ENOKEY", + "ENOLCK", + "ENOLINK", + "ENOMEDIUM", + "ENOMEM", + "ENOMSG", + "ENONET", + "ENOPKG", + "ENOPOLICY", + "ENOPROTOOPT", + "ENOSPC", + "ENOSR", + "ENOSTR", + "ENOSYS", + "ENOTBLK", + "ENOTCAPABLE", + "ENOTCONN", + "ENOTDIR", + "ENOTEMPTY", + "ENOTNAM", + "ENOTRECOVERABLE", + "ENOTSOCK", + "ENOTSUP", + "ENOTTY", + "ENOTUNIQ", + "ENXIO", + "EN_SW_CTL_INF", + "EN_SW_CTL_PREC", + "EN_SW_CTL_ROUND", + "EN_SW_DATACHAIN", + "EN_SW_DENORM", + "EN_SW_INVOP", + "EN_SW_OVERFLOW", + "EN_SW_PRECLOSS", + "EN_SW_UNDERFLOW", + "EN_SW_ZERODIV", + "EOPNOTSUPP", + "EOVERFLOW", + "EOWNERDEAD", + "EPERM", + "EPFNOSUPPORT", + "EPIPE", + "EPOLLERR", + "EPOLLET", + "EPOLLHUP", + "EPOLLIN", + "EPOLLMSG", + "EPOLLONESHOT", + "EPOLLOUT", + "EPOLLPRI", + "EPOLLRDBAND", + "EPOLLRDHUP", + "EPOLLRDNORM", + "EPOLLWRBAND", + "EPOLLWRNORM", + "EPOLL_CLOEXEC", + "EPOLL_CTL_ADD", + "EPOLL_CTL_DEL", + "EPOLL_CTL_MOD", + "EPOLL_NONBLOCK", + "EPROCLIM", + "EPROCUNAVAIL", + "EPROGMISMATCH", + "EPROGUNAVAIL", + "EPROTO", + "EPROTONOSUPPORT", + "EPROTOTYPE", + "EPWROFF", + "ERANGE", + "EREMCHG", + "EREMOTE", + "EREMOTEIO", + "ERESTART", + "ERFKILL", + "EROFS", + "ERPCMISMATCH", + "ERROR_ACCESS_DENIED", + "ERROR_ALREADY_EXISTS", + "ERROR_BROKEN_PIPE", + "ERROR_BUFFER_OVERFLOW", + "ERROR_DIR_NOT_EMPTY", + "ERROR_ENVVAR_NOT_FOUND", + "ERROR_FILE_EXISTS", + "ERROR_FILE_NOT_FOUND", + "ERROR_HANDLE_EOF", + "ERROR_INSUFFICIENT_BUFFER", + "ERROR_IO_PENDING", + "ERROR_MOD_NOT_FOUND", + "ERROR_MORE_DATA", + "ERROR_NETNAME_DELETED", + "ERROR_NOT_FOUND", + "ERROR_NO_MORE_FILES", + "ERROR_OPERATION_ABORTED", + "ERROR_PATH_NOT_FOUND", + "ERROR_PRIVILEGE_NOT_HELD", + "ERROR_PROC_NOT_FOUND", + "ESHLIBVERS", + "ESHUTDOWN", + "ESOCKTNOSUPPORT", + "ESPIPE", + "ESRCH", + "ESRMNT", + "ESTALE", + "ESTRPIPE", + "ETHERCAP_JUMBO_MTU", + "ETHERCAP_VLAN_HWTAGGING", + "ETHERCAP_VLAN_MTU", + "ETHERMIN", + "ETHERMTU", + "ETHERMTU_JUMBO", + "ETHERTYPE_8023", + "ETHERTYPE_AARP", + "ETHERTYPE_ACCTON", + "ETHERTYPE_AEONIC", + "ETHERTYPE_ALPHA", + "ETHERTYPE_AMBER", + "ETHERTYPE_AMOEBA", + "ETHERTYPE_AOE", + "ETHERTYPE_APOLLO", + "ETHERTYPE_APOLLODOMAIN", + "ETHERTYPE_APPLETALK", + "ETHERTYPE_APPLITEK", + "ETHERTYPE_ARGONAUT", + "ETHERTYPE_ARP", + "ETHERTYPE_AT", + "ETHERTYPE_ATALK", + "ETHERTYPE_ATOMIC", + "ETHERTYPE_ATT", + "ETHERTYPE_ATTSTANFORD", + "ETHERTYPE_AUTOPHON", + "ETHERTYPE_AXIS", + "ETHERTYPE_BCLOOP", + "ETHERTYPE_BOFL", + "ETHERTYPE_CABLETRON", + "ETHERTYPE_CHAOS", + "ETHERTYPE_COMDESIGN", + "ETHERTYPE_COMPUGRAPHIC", + "ETHERTYPE_COUNTERPOINT", + "ETHERTYPE_CRONUS", + "ETHERTYPE_CRONUSVLN", + "ETHERTYPE_DCA", + "ETHERTYPE_DDE", + "ETHERTYPE_DEBNI", + "ETHERTYPE_DECAM", + "ETHERTYPE_DECCUST", + "ETHERTYPE_DECDIAG", + "ETHERTYPE_DECDNS", + "ETHERTYPE_DECDTS", + "ETHERTYPE_DECEXPER", + "ETHERTYPE_DECLAST", + "ETHERTYPE_DECLTM", + "ETHERTYPE_DECMUMPS", + "ETHERTYPE_DECNETBIOS", + "ETHERTYPE_DELTACON", + "ETHERTYPE_DIDDLE", + "ETHERTYPE_DLOG1", + "ETHERTYPE_DLOG2", + "ETHERTYPE_DN", + "ETHERTYPE_DOGFIGHT", + "ETHERTYPE_DSMD", + "ETHERTYPE_ECMA", + "ETHERTYPE_ENCRYPT", + "ETHERTYPE_ES", + "ETHERTYPE_EXCELAN", + "ETHERTYPE_EXPERDATA", + "ETHERTYPE_FLIP", + "ETHERTYPE_FLOWCONTROL", + "ETHERTYPE_FRARP", + "ETHERTYPE_GENDYN", + "ETHERTYPE_HAYES", + "ETHERTYPE_HIPPI_FP", + "ETHERTYPE_HITACHI", + "ETHERTYPE_HP", + "ETHERTYPE_IEEEPUP", + "ETHERTYPE_IEEEPUPAT", + "ETHERTYPE_IMLBL", + "ETHERTYPE_IMLBLDIAG", + "ETHERTYPE_IP", + "ETHERTYPE_IPAS", + "ETHERTYPE_IPV6", + "ETHERTYPE_IPX", + "ETHERTYPE_IPXNEW", + "ETHERTYPE_KALPANA", + "ETHERTYPE_LANBRIDGE", + "ETHERTYPE_LANPROBE", + "ETHERTYPE_LAT", + "ETHERTYPE_LBACK", + "ETHERTYPE_LITTLE", + "ETHERTYPE_LLDP", + "ETHERTYPE_LOGICRAFT", + "ETHERTYPE_LOOPBACK", + "ETHERTYPE_MATRA", + "ETHERTYPE_MAX", + "ETHERTYPE_MERIT", + "ETHERTYPE_MICP", + "ETHERTYPE_MOPDL", + "ETHERTYPE_MOPRC", + "ETHERTYPE_MOTOROLA", + "ETHERTYPE_MPLS", + "ETHERTYPE_MPLS_MCAST", + "ETHERTYPE_MUMPS", + "ETHERTYPE_NBPCC", + "ETHERTYPE_NBPCLAIM", + "ETHERTYPE_NBPCLREQ", + "ETHERTYPE_NBPCLRSP", + "ETHERTYPE_NBPCREQ", + "ETHERTYPE_NBPCRSP", + "ETHERTYPE_NBPDG", + "ETHERTYPE_NBPDGB", + "ETHERTYPE_NBPDLTE", + "ETHERTYPE_NBPRAR", + "ETHERTYPE_NBPRAS", + "ETHERTYPE_NBPRST", + "ETHERTYPE_NBPSCD", + "ETHERTYPE_NBPVCD", + "ETHERTYPE_NBS", + "ETHERTYPE_NCD", + "ETHERTYPE_NESTAR", + "ETHERTYPE_NETBEUI", + "ETHERTYPE_NOVELL", + "ETHERTYPE_NS", + "ETHERTYPE_NSAT", + "ETHERTYPE_NSCOMPAT", + "ETHERTYPE_NTRAILER", + "ETHERTYPE_OS9", + "ETHERTYPE_OS9NET", + "ETHERTYPE_PACER", + "ETHERTYPE_PAE", + "ETHERTYPE_PCS", + "ETHERTYPE_PLANNING", + "ETHERTYPE_PPP", + "ETHERTYPE_PPPOE", + "ETHERTYPE_PPPOEDISC", + "ETHERTYPE_PRIMENTS", + "ETHERTYPE_PUP", + "ETHERTYPE_PUPAT", + "ETHERTYPE_QINQ", + "ETHERTYPE_RACAL", + "ETHERTYPE_RATIONAL", + "ETHERTYPE_RAWFR", + "ETHERTYPE_RCL", + "ETHERTYPE_RDP", + "ETHERTYPE_RETIX", + "ETHERTYPE_REVARP", + "ETHERTYPE_SCA", + "ETHERTYPE_SECTRA", + "ETHERTYPE_SECUREDATA", + "ETHERTYPE_SGITW", + "ETHERTYPE_SG_BOUNCE", + "ETHERTYPE_SG_DIAG", + "ETHERTYPE_SG_NETGAMES", + "ETHERTYPE_SG_RESV", + "ETHERTYPE_SIMNET", + "ETHERTYPE_SLOW", + "ETHERTYPE_SLOWPROTOCOLS", + "ETHERTYPE_SNA", + "ETHERTYPE_SNMP", + "ETHERTYPE_SONIX", + "ETHERTYPE_SPIDER", + "ETHERTYPE_SPRITE", + "ETHERTYPE_STP", + "ETHERTYPE_TALARIS", + "ETHERTYPE_TALARISMC", + "ETHERTYPE_TCPCOMP", + "ETHERTYPE_TCPSM", + "ETHERTYPE_TEC", + "ETHERTYPE_TIGAN", + "ETHERTYPE_TRAIL", + "ETHERTYPE_TRANSETHER", + "ETHERTYPE_TYMSHARE", + "ETHERTYPE_UBBST", + "ETHERTYPE_UBDEBUG", + "ETHERTYPE_UBDIAGLOOP", + "ETHERTYPE_UBDL", + "ETHERTYPE_UBNIU", + "ETHERTYPE_UBNMC", + "ETHERTYPE_VALID", + "ETHERTYPE_VARIAN", + "ETHERTYPE_VAXELN", + "ETHERTYPE_VEECO", + "ETHERTYPE_VEXP", + "ETHERTYPE_VGLAB", + "ETHERTYPE_VINES", + "ETHERTYPE_VINESECHO", + "ETHERTYPE_VINESLOOP", + "ETHERTYPE_VITAL", + "ETHERTYPE_VLAN", + "ETHERTYPE_VLTLMAN", + "ETHERTYPE_VPROD", + "ETHERTYPE_VURESERVED", + "ETHERTYPE_WATERLOO", + "ETHERTYPE_WELLFLEET", + "ETHERTYPE_X25", + "ETHERTYPE_X75", + "ETHERTYPE_XNSSM", + "ETHERTYPE_XTP", + "ETHER_ADDR_LEN", + "ETHER_ALIGN", + "ETHER_CRC_LEN", + "ETHER_CRC_POLY_BE", + "ETHER_CRC_POLY_LE", + "ETHER_HDR_LEN", + "ETHER_MAX_DIX_LEN", + "ETHER_MAX_LEN", + "ETHER_MAX_LEN_JUMBO", + "ETHER_MIN_LEN", + "ETHER_PPPOE_ENCAP_LEN", + "ETHER_TYPE_LEN", + "ETHER_VLAN_ENCAP_LEN", + "ETH_P_1588", + "ETH_P_8021Q", + "ETH_P_802_2", + "ETH_P_802_3", + "ETH_P_AARP", + "ETH_P_ALL", + "ETH_P_AOE", + "ETH_P_ARCNET", + "ETH_P_ARP", + "ETH_P_ATALK", + "ETH_P_ATMFATE", + "ETH_P_ATMMPOA", + "ETH_P_AX25", + "ETH_P_BPQ", + "ETH_P_CAIF", + "ETH_P_CAN", + "ETH_P_CONTROL", + "ETH_P_CUST", + "ETH_P_DDCMP", + "ETH_P_DEC", + "ETH_P_DIAG", + "ETH_P_DNA_DL", + "ETH_P_DNA_RC", + "ETH_P_DNA_RT", + "ETH_P_DSA", + "ETH_P_ECONET", + "ETH_P_EDSA", + "ETH_P_FCOE", + "ETH_P_FIP", + "ETH_P_HDLC", + "ETH_P_IEEE802154", + "ETH_P_IEEEPUP", + "ETH_P_IEEEPUPAT", + "ETH_P_IP", + "ETH_P_IPV6", + "ETH_P_IPX", + "ETH_P_IRDA", + "ETH_P_LAT", + "ETH_P_LINK_CTL", + "ETH_P_LOCALTALK", + "ETH_P_LOOP", + "ETH_P_MOBITEX", + "ETH_P_MPLS_MC", + "ETH_P_MPLS_UC", + "ETH_P_PAE", + "ETH_P_PAUSE", + "ETH_P_PHONET", + "ETH_P_PPPTALK", + "ETH_P_PPP_DISC", + "ETH_P_PPP_MP", + "ETH_P_PPP_SES", + "ETH_P_PUP", + "ETH_P_PUPAT", + "ETH_P_RARP", + "ETH_P_SCA", + "ETH_P_SLOW", + "ETH_P_SNAP", + "ETH_P_TEB", + "ETH_P_TIPC", + "ETH_P_TRAILER", + "ETH_P_TR_802_2", + "ETH_P_WAN_PPP", + "ETH_P_WCCP", + "ETH_P_X25", + "ETIME", + "ETIMEDOUT", + "ETOOMANYREFS", + "ETXTBSY", + "EUCLEAN", + "EUNATCH", + "EUSERS", + "EVFILT_AIO", + "EVFILT_FS", + "EVFILT_LIO", + "EVFILT_MACHPORT", + "EVFILT_PROC", + "EVFILT_READ", + "EVFILT_SIGNAL", + "EVFILT_SYSCOUNT", + "EVFILT_THREADMARKER", + "EVFILT_TIMER", + "EVFILT_USER", + "EVFILT_VM", + "EVFILT_VNODE", + "EVFILT_WRITE", + "EV_ADD", + "EV_CLEAR", + "EV_DELETE", + "EV_DISABLE", + "EV_DISPATCH", + "EV_DROP", + "EV_ENABLE", + "EV_EOF", + "EV_ERROR", + "EV_FLAG0", + "EV_FLAG1", + "EV_ONESHOT", + "EV_OOBAND", + "EV_POLL", + "EV_RECEIPT", + "EV_SYSFLAGS", + "EWINDOWS", + "EWOULDBLOCK", + "EXDEV", + "EXFULL", + "EXTA", + "EXTB", + "EXTPROC", + "Environ", + "EpollCreate", + "EpollCreate1", + "EpollCtl", + "EpollEvent", + "EpollWait", + "Errno", + "EscapeArg", + "Exchangedata", + "Exec", + "Exit", + "ExitProcess", + "FD_CLOEXEC", + "FD_SETSIZE", + "FILE_ACTION_ADDED", + "FILE_ACTION_MODIFIED", + "FILE_ACTION_REMOVED", + "FILE_ACTION_RENAMED_NEW_NAME", + "FILE_ACTION_RENAMED_OLD_NAME", + "FILE_APPEND_DATA", + "FILE_ATTRIBUTE_ARCHIVE", + "FILE_ATTRIBUTE_DIRECTORY", + "FILE_ATTRIBUTE_HIDDEN", + "FILE_ATTRIBUTE_NORMAL", + "FILE_ATTRIBUTE_READONLY", + "FILE_ATTRIBUTE_REPARSE_POINT", + "FILE_ATTRIBUTE_SYSTEM", + "FILE_BEGIN", + "FILE_CURRENT", + "FILE_END", + "FILE_FLAG_BACKUP_SEMANTICS", + "FILE_FLAG_OPEN_REPARSE_POINT", + "FILE_FLAG_OVERLAPPED", + "FILE_LIST_DIRECTORY", + "FILE_MAP_COPY", + "FILE_MAP_EXECUTE", + "FILE_MAP_READ", + "FILE_MAP_WRITE", + "FILE_NOTIFY_CHANGE_ATTRIBUTES", + "FILE_NOTIFY_CHANGE_CREATION", + "FILE_NOTIFY_CHANGE_DIR_NAME", + "FILE_NOTIFY_CHANGE_FILE_NAME", + "FILE_NOTIFY_CHANGE_LAST_ACCESS", + "FILE_NOTIFY_CHANGE_LAST_WRITE", + "FILE_NOTIFY_CHANGE_SIZE", + "FILE_SHARE_DELETE", + "FILE_SHARE_READ", + "FILE_SHARE_WRITE", + "FILE_SKIP_COMPLETION_PORT_ON_SUCCESS", + "FILE_SKIP_SET_EVENT_ON_HANDLE", + "FILE_TYPE_CHAR", + "FILE_TYPE_DISK", + "FILE_TYPE_PIPE", + "FILE_TYPE_REMOTE", + "FILE_TYPE_UNKNOWN", + "FILE_WRITE_ATTRIBUTES", + "FLUSHO", + "FORMAT_MESSAGE_ALLOCATE_BUFFER", + "FORMAT_MESSAGE_ARGUMENT_ARRAY", + "FORMAT_MESSAGE_FROM_HMODULE", + "FORMAT_MESSAGE_FROM_STRING", + "FORMAT_MESSAGE_FROM_SYSTEM", + "FORMAT_MESSAGE_IGNORE_INSERTS", + "FORMAT_MESSAGE_MAX_WIDTH_MASK", + "FSCTL_GET_REPARSE_POINT", + "F_ADDFILESIGS", + "F_ADDSIGS", + "F_ALLOCATEALL", + "F_ALLOCATECONTIG", + "F_CANCEL", + "F_CHKCLEAN", + "F_CLOSEM", + "F_DUP2FD", + "F_DUP2FD_CLOEXEC", + "F_DUPFD", + "F_DUPFD_CLOEXEC", + "F_EXLCK", + "F_FLUSH_DATA", + "F_FREEZE_FS", + "F_FSCTL", + "F_FSDIRMASK", + "F_FSIN", + "F_FSINOUT", + "F_FSOUT", + "F_FSPRIV", + "F_FSVOID", + "F_FULLFSYNC", + "F_GETFD", + "F_GETFL", + "F_GETLEASE", + "F_GETLK", + "F_GETLK64", + "F_GETLKPID", + "F_GETNOSIGPIPE", + "F_GETOWN", + "F_GETOWN_EX", + "F_GETPATH", + "F_GETPATH_MTMINFO", + "F_GETPIPE_SZ", + "F_GETPROTECTIONCLASS", + "F_GETSIG", + "F_GLOBAL_NOCACHE", + "F_LOCK", + "F_LOG2PHYS", + "F_LOG2PHYS_EXT", + "F_MARKDEPENDENCY", + "F_MAXFD", + "F_NOCACHE", + "F_NODIRECT", + "F_NOTIFY", + "F_OGETLK", + "F_OK", + "F_OSETLK", + "F_OSETLKW", + "F_PARAM_MASK", + "F_PARAM_MAX", + "F_PATHPKG_CHECK", + "F_PEOFPOSMODE", + "F_PREALLOCATE", + "F_RDADVISE", + "F_RDAHEAD", + "F_RDLCK", + "F_READAHEAD", + "F_READBOOTSTRAP", + "F_SETBACKINGSTORE", + "F_SETFD", + "F_SETFL", + "F_SETLEASE", + "F_SETLK", + "F_SETLK64", + "F_SETLKW", + "F_SETLKW64", + "F_SETLK_REMOTE", + "F_SETNOSIGPIPE", + "F_SETOWN", + "F_SETOWN_EX", + "F_SETPIPE_SZ", + "F_SETPROTECTIONCLASS", + "F_SETSIG", + "F_SETSIZE", + "F_SHLCK", + "F_TEST", + "F_THAW_FS", + "F_TLOCK", + "F_ULOCK", + "F_UNLCK", + "F_UNLCKSYS", + "F_VOLPOSMODE", + "F_WRITEBOOTSTRAP", + "F_WRLCK", + "Faccessat", + "Fallocate", + "Fbootstraptransfer_t", + "Fchdir", + "Fchflags", + "Fchmod", + "Fchmodat", + "Fchown", + "Fchownat", + "FcntlFlock", + "FdSet", + "Fdatasync", + "FileNotifyInformation", + "Filetime", + "FindClose", + "FindFirstFile", + "FindNextFile", + "Flock", + "Flock_t", + "FlushBpf", + "FlushFileBuffers", + "FlushViewOfFile", + "ForkExec", + "ForkLock", + "FormatMessage", + "Fpathconf", + "FreeAddrInfoW", + "FreeEnvironmentStrings", + "FreeLibrary", + "Fsid", + "Fstat", + "Fstatat", + "Fstatfs", + "Fstore_t", + "Fsync", + "Ftruncate", + "FullPath", + "Futimes", + "Futimesat", + "GENERIC_ALL", + "GENERIC_EXECUTE", + "GENERIC_READ", + "GENERIC_WRITE", + "GUID", + "GetAcceptExSockaddrs", + "GetAdaptersInfo", + "GetAddrInfoW", + "GetCommandLine", + "GetComputerName", + "GetConsoleMode", + "GetCurrentDirectory", + "GetCurrentProcess", + "GetEnvironmentStrings", + "GetEnvironmentVariable", + "GetExitCodeProcess", + "GetFileAttributes", + "GetFileAttributesEx", + "GetFileExInfoStandard", + "GetFileExMaxInfoLevel", + "GetFileInformationByHandle", + "GetFileType", + "GetFullPathName", + "GetHostByName", + "GetIfEntry", + "GetLastError", + "GetLengthSid", + "GetLongPathName", + "GetProcAddress", + "GetProcessTimes", + "GetProtoByName", + "GetQueuedCompletionStatus", + "GetServByName", + "GetShortPathName", + "GetStartupInfo", + "GetStdHandle", + "GetSystemTimeAsFileTime", + "GetTempPath", + "GetTimeZoneInformation", + "GetTokenInformation", + "GetUserNameEx", + "GetUserProfileDirectory", + "GetVersion", + "Getcwd", + "Getdents", + "Getdirentries", + "Getdtablesize", + "Getegid", + "Getenv", + "Geteuid", + "Getfsstat", + "Getgid", + "Getgroups", + "Getpagesize", + "Getpeername", + "Getpgid", + "Getpgrp", + "Getpid", + "Getppid", + "Getpriority", + "Getrlimit", + "Getrusage", + "Getsid", + "Getsockname", + "Getsockopt", + "GetsockoptByte", + "GetsockoptICMPv6Filter", + "GetsockoptIPMreq", + "GetsockoptIPMreqn", + "GetsockoptIPv6MTUInfo", + "GetsockoptIPv6Mreq", + "GetsockoptInet4Addr", + "GetsockoptInt", + "GetsockoptUcred", + "Gettid", + "Gettimeofday", + "Getuid", + "Getwd", + "Getxattr", + "HANDLE_FLAG_INHERIT", + "HKEY_CLASSES_ROOT", + "HKEY_CURRENT_CONFIG", + "HKEY_CURRENT_USER", + "HKEY_DYN_DATA", + "HKEY_LOCAL_MACHINE", + "HKEY_PERFORMANCE_DATA", + "HKEY_USERS", + "HUPCL", + "Handle", + "Hostent", + "ICANON", + "ICMP6_FILTER", + "ICMPV6_FILTER", + "ICMPv6Filter", + "ICRNL", + "IEXTEN", + "IFAN_ARRIVAL", + "IFAN_DEPARTURE", + "IFA_ADDRESS", + "IFA_ANYCAST", + "IFA_BROADCAST", + "IFA_CACHEINFO", + "IFA_F_DADFAILED", + "IFA_F_DEPRECATED", + "IFA_F_HOMEADDRESS", + "IFA_F_NODAD", + "IFA_F_OPTIMISTIC", + "IFA_F_PERMANENT", + "IFA_F_SECONDARY", + "IFA_F_TEMPORARY", + "IFA_F_TENTATIVE", + "IFA_LABEL", + "IFA_LOCAL", + "IFA_MAX", + "IFA_MULTICAST", + "IFA_ROUTE", + "IFA_UNSPEC", + "IFF_ALLMULTI", + "IFF_ALTPHYS", + "IFF_AUTOMEDIA", + "IFF_BROADCAST", + "IFF_CANTCHANGE", + "IFF_CANTCONFIG", + "IFF_DEBUG", + "IFF_DRV_OACTIVE", + "IFF_DRV_RUNNING", + "IFF_DYING", + "IFF_DYNAMIC", + "IFF_LINK0", + "IFF_LINK1", + "IFF_LINK2", + "IFF_LOOPBACK", + "IFF_MASTER", + "IFF_MONITOR", + "IFF_MULTICAST", + "IFF_NOARP", + "IFF_NOTRAILERS", + "IFF_NO_PI", + "IFF_OACTIVE", + "IFF_ONE_QUEUE", + "IFF_POINTOPOINT", + "IFF_POINTTOPOINT", + "IFF_PORTSEL", + "IFF_PPROMISC", + "IFF_PROMISC", + "IFF_RENAMING", + "IFF_RUNNING", + "IFF_SIMPLEX", + "IFF_SLAVE", + "IFF_SMART", + "IFF_STATICARP", + "IFF_TAP", + "IFF_TUN", + "IFF_TUN_EXCL", + "IFF_UP", + "IFF_VNET_HDR", + "IFLA_ADDRESS", + "IFLA_BROADCAST", + "IFLA_COST", + "IFLA_IFALIAS", + "IFLA_IFNAME", + "IFLA_LINK", + "IFLA_LINKINFO", + "IFLA_LINKMODE", + "IFLA_MAP", + "IFLA_MASTER", + "IFLA_MAX", + "IFLA_MTU", + "IFLA_NET_NS_PID", + "IFLA_OPERSTATE", + "IFLA_PRIORITY", + "IFLA_PROTINFO", + "IFLA_QDISC", + "IFLA_STATS", + "IFLA_TXQLEN", + "IFLA_UNSPEC", + "IFLA_WEIGHT", + "IFLA_WIRELESS", + "IFNAMSIZ", + "IFT_1822", + "IFT_A12MPPSWITCH", + "IFT_AAL2", + "IFT_AAL5", + "IFT_ADSL", + "IFT_AFLANE8023", + "IFT_AFLANE8025", + "IFT_ARAP", + "IFT_ARCNET", + "IFT_ARCNETPLUS", + "IFT_ASYNC", + "IFT_ATM", + "IFT_ATMDXI", + "IFT_ATMFUNI", + "IFT_ATMIMA", + "IFT_ATMLOGICAL", + "IFT_ATMRADIO", + "IFT_ATMSUBINTERFACE", + "IFT_ATMVCIENDPT", + "IFT_ATMVIRTUAL", + "IFT_BGPPOLICYACCOUNTING", + "IFT_BLUETOOTH", + "IFT_BRIDGE", + "IFT_BSC", + "IFT_CARP", + "IFT_CCTEMUL", + "IFT_CELLULAR", + "IFT_CEPT", + "IFT_CES", + "IFT_CHANNEL", + "IFT_CNR", + "IFT_COFFEE", + "IFT_COMPOSITELINK", + "IFT_DCN", + "IFT_DIGITALPOWERLINE", + "IFT_DIGITALWRAPPEROVERHEADCHANNEL", + "IFT_DLSW", + "IFT_DOCSCABLEDOWNSTREAM", + "IFT_DOCSCABLEMACLAYER", + "IFT_DOCSCABLEUPSTREAM", + "IFT_DOCSCABLEUPSTREAMCHANNEL", + "IFT_DS0", + "IFT_DS0BUNDLE", + "IFT_DS1FDL", + "IFT_DS3", + "IFT_DTM", + "IFT_DUMMY", + "IFT_DVBASILN", + "IFT_DVBASIOUT", + "IFT_DVBRCCDOWNSTREAM", + "IFT_DVBRCCMACLAYER", + "IFT_DVBRCCUPSTREAM", + "IFT_ECONET", + "IFT_ENC", + "IFT_EON", + "IFT_EPLRS", + "IFT_ESCON", + "IFT_ETHER", + "IFT_FAITH", + "IFT_FAST", + "IFT_FASTETHER", + "IFT_FASTETHERFX", + "IFT_FDDI", + "IFT_FIBRECHANNEL", + "IFT_FRAMERELAYINTERCONNECT", + "IFT_FRAMERELAYMPI", + "IFT_FRDLCIENDPT", + "IFT_FRELAY", + "IFT_FRELAYDCE", + "IFT_FRF16MFRBUNDLE", + "IFT_FRFORWARD", + "IFT_G703AT2MB", + "IFT_G703AT64K", + "IFT_GIF", + "IFT_GIGABITETHERNET", + "IFT_GR303IDT", + "IFT_GR303RDT", + "IFT_H323GATEKEEPER", + "IFT_H323PROXY", + "IFT_HDH1822", + "IFT_HDLC", + "IFT_HDSL2", + "IFT_HIPERLAN2", + "IFT_HIPPI", + "IFT_HIPPIINTERFACE", + "IFT_HOSTPAD", + "IFT_HSSI", + "IFT_HY", + "IFT_IBM370PARCHAN", + "IFT_IDSL", + "IFT_IEEE1394", + "IFT_IEEE80211", + "IFT_IEEE80212", + "IFT_IEEE8023ADLAG", + "IFT_IFGSN", + "IFT_IMT", + "IFT_INFINIBAND", + "IFT_INTERLEAVE", + "IFT_IP", + "IFT_IPFORWARD", + "IFT_IPOVERATM", + "IFT_IPOVERCDLC", + "IFT_IPOVERCLAW", + "IFT_IPSWITCH", + "IFT_IPXIP", + "IFT_ISDN", + "IFT_ISDNBASIC", + "IFT_ISDNPRIMARY", + "IFT_ISDNS", + "IFT_ISDNU", + "IFT_ISO88022LLC", + "IFT_ISO88023", + "IFT_ISO88024", + "IFT_ISO88025", + "IFT_ISO88025CRFPINT", + "IFT_ISO88025DTR", + "IFT_ISO88025FIBER", + "IFT_ISO88026", + "IFT_ISUP", + "IFT_L2VLAN", + "IFT_L3IPVLAN", + "IFT_L3IPXVLAN", + "IFT_LAPB", + "IFT_LAPD", + "IFT_LAPF", + "IFT_LINEGROUP", + "IFT_LOCALTALK", + "IFT_LOOP", + "IFT_MEDIAMAILOVERIP", + "IFT_MFSIGLINK", + "IFT_MIOX25", + "IFT_MODEM", + "IFT_MPC", + "IFT_MPLS", + "IFT_MPLSTUNNEL", + "IFT_MSDSL", + "IFT_MVL", + "IFT_MYRINET", + "IFT_NFAS", + "IFT_NSIP", + "IFT_OPTICALCHANNEL", + "IFT_OPTICALTRANSPORT", + "IFT_OTHER", + "IFT_P10", + "IFT_P80", + "IFT_PARA", + "IFT_PDP", + "IFT_PFLOG", + "IFT_PFLOW", + "IFT_PFSYNC", + "IFT_PLC", + "IFT_PON155", + "IFT_PON622", + "IFT_POS", + "IFT_PPP", + "IFT_PPPMULTILINKBUNDLE", + "IFT_PROPATM", + "IFT_PROPBWAP2MP", + "IFT_PROPCNLS", + "IFT_PROPDOCSWIRELESSDOWNSTREAM", + "IFT_PROPDOCSWIRELESSMACLAYER", + "IFT_PROPDOCSWIRELESSUPSTREAM", + "IFT_PROPMUX", + "IFT_PROPVIRTUAL", + "IFT_PROPWIRELESSP2P", + "IFT_PTPSERIAL", + "IFT_PVC", + "IFT_Q2931", + "IFT_QLLC", + "IFT_RADIOMAC", + "IFT_RADSL", + "IFT_REACHDSL", + "IFT_RFC1483", + "IFT_RS232", + "IFT_RSRB", + "IFT_SDLC", + "IFT_SDSL", + "IFT_SHDSL", + "IFT_SIP", + "IFT_SIPSIG", + "IFT_SIPTG", + "IFT_SLIP", + "IFT_SMDSDXI", + "IFT_SMDSICIP", + "IFT_SONET", + "IFT_SONETOVERHEADCHANNEL", + "IFT_SONETPATH", + "IFT_SONETVT", + "IFT_SRP", + "IFT_SS7SIGLINK", + "IFT_STACKTOSTACK", + "IFT_STARLAN", + "IFT_STF", + "IFT_T1", + "IFT_TDLC", + "IFT_TELINK", + "IFT_TERMPAD", + "IFT_TR008", + "IFT_TRANSPHDLC", + "IFT_TUNNEL", + "IFT_ULTRA", + "IFT_USB", + "IFT_V11", + "IFT_V35", + "IFT_V36", + "IFT_V37", + "IFT_VDSL", + "IFT_VIRTUALIPADDRESS", + "IFT_VIRTUALTG", + "IFT_VOICEDID", + "IFT_VOICEEM", + "IFT_VOICEEMFGD", + "IFT_VOICEENCAP", + "IFT_VOICEFGDEANA", + "IFT_VOICEFXO", + "IFT_VOICEFXS", + "IFT_VOICEOVERATM", + "IFT_VOICEOVERCABLE", + "IFT_VOICEOVERFRAMERELAY", + "IFT_VOICEOVERIP", + "IFT_X213", + "IFT_X25", + "IFT_X25DDN", + "IFT_X25HUNTGROUP", + "IFT_X25MLP", + "IFT_X25PLE", + "IFT_XETHER", + "IGNBRK", + "IGNCR", + "IGNORE", + "IGNPAR", + "IMAXBEL", + "INFINITE", + "INLCR", + "INPCK", + "INVALID_FILE_ATTRIBUTES", + "IN_ACCESS", + "IN_ALL_EVENTS", + "IN_ATTRIB", + "IN_CLASSA_HOST", + "IN_CLASSA_MAX", + "IN_CLASSA_NET", + "IN_CLASSA_NSHIFT", + "IN_CLASSB_HOST", + "IN_CLASSB_MAX", + "IN_CLASSB_NET", + "IN_CLASSB_NSHIFT", + "IN_CLASSC_HOST", + "IN_CLASSC_NET", + "IN_CLASSC_NSHIFT", + "IN_CLASSD_HOST", + "IN_CLASSD_NET", + "IN_CLASSD_NSHIFT", + "IN_CLOEXEC", + "IN_CLOSE", + "IN_CLOSE_NOWRITE", + "IN_CLOSE_WRITE", + "IN_CREATE", + "IN_DELETE", + "IN_DELETE_SELF", + "IN_DONT_FOLLOW", + "IN_EXCL_UNLINK", + "IN_IGNORED", + "IN_ISDIR", + "IN_LINKLOCALNETNUM", + "IN_LOOPBACKNET", + "IN_MASK_ADD", + "IN_MODIFY", + "IN_MOVE", + "IN_MOVED_FROM", + "IN_MOVED_TO", + "IN_MOVE_SELF", + "IN_NONBLOCK", + "IN_ONESHOT", + "IN_ONLYDIR", + "IN_OPEN", + "IN_Q_OVERFLOW", + "IN_RFC3021_HOST", + "IN_RFC3021_MASK", + "IN_RFC3021_NET", + "IN_RFC3021_NSHIFT", + "IN_UNMOUNT", + "IOC_IN", + "IOC_INOUT", + "IOC_OUT", + "IOC_VENDOR", + "IOC_WS2", + "IO_REPARSE_TAG_SYMLINK", + "IPMreq", + "IPMreqn", + "IPPROTO_3PC", + "IPPROTO_ADFS", + "IPPROTO_AH", + "IPPROTO_AHIP", + "IPPROTO_APES", + "IPPROTO_ARGUS", + "IPPROTO_AX25", + "IPPROTO_BHA", + "IPPROTO_BLT", + "IPPROTO_BRSATMON", + "IPPROTO_CARP", + "IPPROTO_CFTP", + "IPPROTO_CHAOS", + "IPPROTO_CMTP", + "IPPROTO_COMP", + "IPPROTO_CPHB", + "IPPROTO_CPNX", + "IPPROTO_DCCP", + "IPPROTO_DDP", + "IPPROTO_DGP", + "IPPROTO_DIVERT", + "IPPROTO_DIVERT_INIT", + "IPPROTO_DIVERT_RESP", + "IPPROTO_DONE", + "IPPROTO_DSTOPTS", + "IPPROTO_EGP", + "IPPROTO_EMCON", + "IPPROTO_ENCAP", + "IPPROTO_EON", + "IPPROTO_ESP", + "IPPROTO_ETHERIP", + "IPPROTO_FRAGMENT", + "IPPROTO_GGP", + "IPPROTO_GMTP", + "IPPROTO_GRE", + "IPPROTO_HELLO", + "IPPROTO_HMP", + "IPPROTO_HOPOPTS", + "IPPROTO_ICMP", + "IPPROTO_ICMPV6", + "IPPROTO_IDP", + "IPPROTO_IDPR", + "IPPROTO_IDRP", + "IPPROTO_IGMP", + "IPPROTO_IGP", + "IPPROTO_IGRP", + "IPPROTO_IL", + "IPPROTO_INLSP", + "IPPROTO_INP", + "IPPROTO_IP", + "IPPROTO_IPCOMP", + "IPPROTO_IPCV", + "IPPROTO_IPEIP", + "IPPROTO_IPIP", + "IPPROTO_IPPC", + "IPPROTO_IPV4", + "IPPROTO_IPV6", + "IPPROTO_IPV6_ICMP", + "IPPROTO_IRTP", + "IPPROTO_KRYPTOLAN", + "IPPROTO_LARP", + "IPPROTO_LEAF1", + "IPPROTO_LEAF2", + "IPPROTO_MAX", + "IPPROTO_MAXID", + "IPPROTO_MEAS", + "IPPROTO_MH", + "IPPROTO_MHRP", + "IPPROTO_MICP", + "IPPROTO_MOBILE", + "IPPROTO_MPLS", + "IPPROTO_MTP", + "IPPROTO_MUX", + "IPPROTO_ND", + "IPPROTO_NHRP", + "IPPROTO_NONE", + "IPPROTO_NSP", + "IPPROTO_NVPII", + "IPPROTO_OLD_DIVERT", + "IPPROTO_OSPFIGP", + "IPPROTO_PFSYNC", + "IPPROTO_PGM", + "IPPROTO_PIGP", + "IPPROTO_PIM", + "IPPROTO_PRM", + "IPPROTO_PUP", + "IPPROTO_PVP", + "IPPROTO_RAW", + "IPPROTO_RCCMON", + "IPPROTO_RDP", + "IPPROTO_ROUTING", + "IPPROTO_RSVP", + "IPPROTO_RVD", + "IPPROTO_SATEXPAK", + "IPPROTO_SATMON", + "IPPROTO_SCCSP", + "IPPROTO_SCTP", + "IPPROTO_SDRP", + "IPPROTO_SEND", + "IPPROTO_SEP", + "IPPROTO_SKIP", + "IPPROTO_SPACER", + "IPPROTO_SRPC", + "IPPROTO_ST", + "IPPROTO_SVMTP", + "IPPROTO_SWIPE", + "IPPROTO_TCF", + "IPPROTO_TCP", + "IPPROTO_TLSP", + "IPPROTO_TP", + "IPPROTO_TPXX", + "IPPROTO_TRUNK1", + "IPPROTO_TRUNK2", + "IPPROTO_TTP", + "IPPROTO_UDP", + "IPPROTO_UDPLITE", + "IPPROTO_VINES", + "IPPROTO_VISA", + "IPPROTO_VMTP", + "IPPROTO_VRRP", + "IPPROTO_WBEXPAK", + "IPPROTO_WBMON", + "IPPROTO_WSN", + "IPPROTO_XNET", + "IPPROTO_XTP", + "IPV6_2292DSTOPTS", + "IPV6_2292HOPLIMIT", + "IPV6_2292HOPOPTS", + "IPV6_2292NEXTHOP", + "IPV6_2292PKTINFO", + "IPV6_2292PKTOPTIONS", + "IPV6_2292RTHDR", + "IPV6_ADDRFORM", + "IPV6_ADD_MEMBERSHIP", + "IPV6_AUTHHDR", + "IPV6_AUTH_LEVEL", + "IPV6_AUTOFLOWLABEL", + "IPV6_BINDANY", + "IPV6_BINDV6ONLY", + "IPV6_BOUND_IF", + "IPV6_CHECKSUM", + "IPV6_DEFAULT_MULTICAST_HOPS", + "IPV6_DEFAULT_MULTICAST_LOOP", + "IPV6_DEFHLIM", + "IPV6_DONTFRAG", + "IPV6_DROP_MEMBERSHIP", + "IPV6_DSTOPTS", + "IPV6_ESP_NETWORK_LEVEL", + "IPV6_ESP_TRANS_LEVEL", + "IPV6_FAITH", + "IPV6_FLOWINFO_MASK", + "IPV6_FLOWLABEL_MASK", + "IPV6_FRAGTTL", + "IPV6_FW_ADD", + "IPV6_FW_DEL", + "IPV6_FW_FLUSH", + "IPV6_FW_GET", + "IPV6_FW_ZERO", + "IPV6_HLIMDEC", + "IPV6_HOPLIMIT", + "IPV6_HOPOPTS", + "IPV6_IPCOMP_LEVEL", + "IPV6_IPSEC_POLICY", + "IPV6_JOIN_ANYCAST", + "IPV6_JOIN_GROUP", + "IPV6_LEAVE_ANYCAST", + "IPV6_LEAVE_GROUP", + "IPV6_MAXHLIM", + "IPV6_MAXOPTHDR", + "IPV6_MAXPACKET", + "IPV6_MAX_GROUP_SRC_FILTER", + "IPV6_MAX_MEMBERSHIPS", + "IPV6_MAX_SOCK_SRC_FILTER", + "IPV6_MIN_MEMBERSHIPS", + "IPV6_MMTU", + "IPV6_MSFILTER", + "IPV6_MTU", + "IPV6_MTU_DISCOVER", + "IPV6_MULTICAST_HOPS", + "IPV6_MULTICAST_IF", + "IPV6_MULTICAST_LOOP", + "IPV6_NEXTHOP", + "IPV6_OPTIONS", + "IPV6_PATHMTU", + "IPV6_PIPEX", + "IPV6_PKTINFO", + "IPV6_PMTUDISC_DO", + "IPV6_PMTUDISC_DONT", + "IPV6_PMTUDISC_PROBE", + "IPV6_PMTUDISC_WANT", + "IPV6_PORTRANGE", + "IPV6_PORTRANGE_DEFAULT", + "IPV6_PORTRANGE_HIGH", + "IPV6_PORTRANGE_LOW", + "IPV6_PREFER_TEMPADDR", + "IPV6_RECVDSTOPTS", + "IPV6_RECVDSTPORT", + "IPV6_RECVERR", + "IPV6_RECVHOPLIMIT", + "IPV6_RECVHOPOPTS", + "IPV6_RECVPATHMTU", + "IPV6_RECVPKTINFO", + "IPV6_RECVRTHDR", + "IPV6_RECVTCLASS", + "IPV6_ROUTER_ALERT", + "IPV6_RTABLE", + "IPV6_RTHDR", + "IPV6_RTHDRDSTOPTS", + "IPV6_RTHDR_LOOSE", + "IPV6_RTHDR_STRICT", + "IPV6_RTHDR_TYPE_0", + "IPV6_RXDSTOPTS", + "IPV6_RXHOPOPTS", + "IPV6_SOCKOPT_RESERVED1", + "IPV6_TCLASS", + "IPV6_UNICAST_HOPS", + "IPV6_USE_MIN_MTU", + "IPV6_V6ONLY", + "IPV6_VERSION", + "IPV6_VERSION_MASK", + "IPV6_XFRM_POLICY", + "IP_ADD_MEMBERSHIP", + "IP_ADD_SOURCE_MEMBERSHIP", + "IP_AUTH_LEVEL", + "IP_BINDANY", + "IP_BLOCK_SOURCE", + "IP_BOUND_IF", + "IP_DEFAULT_MULTICAST_LOOP", + "IP_DEFAULT_MULTICAST_TTL", + "IP_DF", + "IP_DIVERTFL", + "IP_DONTFRAG", + "IP_DROP_MEMBERSHIP", + "IP_DROP_SOURCE_MEMBERSHIP", + "IP_DUMMYNET3", + "IP_DUMMYNET_CONFIGURE", + "IP_DUMMYNET_DEL", + "IP_DUMMYNET_FLUSH", + "IP_DUMMYNET_GET", + "IP_EF", + "IP_ERRORMTU", + "IP_ESP_NETWORK_LEVEL", + "IP_ESP_TRANS_LEVEL", + "IP_FAITH", + "IP_FREEBIND", + "IP_FW3", + "IP_FW_ADD", + "IP_FW_DEL", + "IP_FW_FLUSH", + "IP_FW_GET", + "IP_FW_NAT_CFG", + "IP_FW_NAT_DEL", + "IP_FW_NAT_GET_CONFIG", + "IP_FW_NAT_GET_LOG", + "IP_FW_RESETLOG", + "IP_FW_TABLE_ADD", + "IP_FW_TABLE_DEL", + "IP_FW_TABLE_FLUSH", + "IP_FW_TABLE_GETSIZE", + "IP_FW_TABLE_LIST", + "IP_FW_ZERO", + "IP_HDRINCL", + "IP_IPCOMP_LEVEL", + "IP_IPSECFLOWINFO", + "IP_IPSEC_LOCAL_AUTH", + "IP_IPSEC_LOCAL_CRED", + "IP_IPSEC_LOCAL_ID", + "IP_IPSEC_POLICY", + "IP_IPSEC_REMOTE_AUTH", + "IP_IPSEC_REMOTE_CRED", + "IP_IPSEC_REMOTE_ID", + "IP_MAXPACKET", + "IP_MAX_GROUP_SRC_FILTER", + "IP_MAX_MEMBERSHIPS", + "IP_MAX_SOCK_MUTE_FILTER", + "IP_MAX_SOCK_SRC_FILTER", + "IP_MAX_SOURCE_FILTER", + "IP_MF", + "IP_MINFRAGSIZE", + "IP_MINTTL", + "IP_MIN_MEMBERSHIPS", + "IP_MSFILTER", + "IP_MSS", + "IP_MTU", + "IP_MTU_DISCOVER", + "IP_MULTICAST_IF", + "IP_MULTICAST_IFINDEX", + "IP_MULTICAST_LOOP", + "IP_MULTICAST_TTL", + "IP_MULTICAST_VIF", + "IP_NAT__XXX", + "IP_OFFMASK", + "IP_OLD_FW_ADD", + "IP_OLD_FW_DEL", + "IP_OLD_FW_FLUSH", + "IP_OLD_FW_GET", + "IP_OLD_FW_RESETLOG", + "IP_OLD_FW_ZERO", + "IP_ONESBCAST", + "IP_OPTIONS", + "IP_ORIGDSTADDR", + "IP_PASSSEC", + "IP_PIPEX", + "IP_PKTINFO", + "IP_PKTOPTIONS", + "IP_PMTUDISC", + "IP_PMTUDISC_DO", + "IP_PMTUDISC_DONT", + "IP_PMTUDISC_PROBE", + "IP_PMTUDISC_WANT", + "IP_PORTRANGE", + "IP_PORTRANGE_DEFAULT", + "IP_PORTRANGE_HIGH", + "IP_PORTRANGE_LOW", + "IP_RECVDSTADDR", + "IP_RECVDSTPORT", + "IP_RECVERR", + "IP_RECVIF", + "IP_RECVOPTS", + "IP_RECVORIGDSTADDR", + "IP_RECVPKTINFO", + "IP_RECVRETOPTS", + "IP_RECVRTABLE", + "IP_RECVTOS", + "IP_RECVTTL", + "IP_RETOPTS", + "IP_RF", + "IP_ROUTER_ALERT", + "IP_RSVP_OFF", + "IP_RSVP_ON", + "IP_RSVP_VIF_OFF", + "IP_RSVP_VIF_ON", + "IP_RTABLE", + "IP_SENDSRCADDR", + "IP_STRIPHDR", + "IP_TOS", + "IP_TRAFFIC_MGT_BACKGROUND", + "IP_TRANSPARENT", + "IP_TTL", + "IP_UNBLOCK_SOURCE", + "IP_XFRM_POLICY", + "IPv6MTUInfo", + "IPv6Mreq", + "ISIG", + "ISTRIP", + "IUCLC", + "IUTF8", + "IXANY", + "IXOFF", + "IXON", + "IfAddrmsg", + "IfAnnounceMsghdr", + "IfData", + "IfInfomsg", + "IfMsghdr", + "IfaMsghdr", + "IfmaMsghdr", + "IfmaMsghdr2", + "ImplementsGetwd", + "Inet4Pktinfo", + "Inet6Pktinfo", + "InotifyAddWatch", + "InotifyEvent", + "InotifyInit", + "InotifyInit1", + "InotifyRmWatch", + "InterfaceAddrMessage", + "InterfaceAnnounceMessage", + "InterfaceInfo", + "InterfaceMessage", + "InterfaceMulticastAddrMessage", + "InvalidHandle", + "Ioperm", + "Iopl", + "Iovec", + "IpAdapterInfo", + "IpAddrString", + "IpAddressString", + "IpMaskString", + "Issetugid", + "KEY_ALL_ACCESS", + "KEY_CREATE_LINK", + "KEY_CREATE_SUB_KEY", + "KEY_ENUMERATE_SUB_KEYS", + "KEY_EXECUTE", + "KEY_NOTIFY", + "KEY_QUERY_VALUE", + "KEY_READ", + "KEY_SET_VALUE", + "KEY_WOW64_32KEY", + "KEY_WOW64_64KEY", + "KEY_WRITE", + "Kevent", + "Kevent_t", + "Kill", + "Klogctl", + "Kqueue", + "LANG_ENGLISH", + "LAYERED_PROTOCOL", + "LCNT_OVERLOAD_FLUSH", + "LINUX_REBOOT_CMD_CAD_OFF", + "LINUX_REBOOT_CMD_CAD_ON", + "LINUX_REBOOT_CMD_HALT", + "LINUX_REBOOT_CMD_KEXEC", + "LINUX_REBOOT_CMD_POWER_OFF", + "LINUX_REBOOT_CMD_RESTART", + "LINUX_REBOOT_CMD_RESTART2", + "LINUX_REBOOT_CMD_SW_SUSPEND", + "LINUX_REBOOT_MAGIC1", + "LINUX_REBOOT_MAGIC2", + "LOCK_EX", + "LOCK_NB", + "LOCK_SH", + "LOCK_UN", + "LazyDLL", + "LazyProc", + "Lchown", + "Linger", + "Link", + "Listen", + "Listxattr", + "LoadCancelIoEx", + "LoadConnectEx", + "LoadCreateSymbolicLink", + "LoadDLL", + "LoadGetAddrInfo", + "LoadLibrary", + "LoadSetFileCompletionNotificationModes", + "LocalFree", + "Log2phys_t", + "LookupAccountName", + "LookupAccountSid", + "LookupSID", + "LsfJump", + "LsfSocket", + "LsfStmt", + "Lstat", + "MADV_AUTOSYNC", + "MADV_CAN_REUSE", + "MADV_CORE", + "MADV_DOFORK", + "MADV_DONTFORK", + "MADV_DONTNEED", + "MADV_FREE", + "MADV_FREE_REUSABLE", + "MADV_FREE_REUSE", + "MADV_HUGEPAGE", + "MADV_HWPOISON", + "MADV_MERGEABLE", + "MADV_NOCORE", + "MADV_NOHUGEPAGE", + "MADV_NORMAL", + "MADV_NOSYNC", + "MADV_PROTECT", + "MADV_RANDOM", + "MADV_REMOVE", + "MADV_SEQUENTIAL", + "MADV_SPACEAVAIL", + "MADV_UNMERGEABLE", + "MADV_WILLNEED", + "MADV_ZERO_WIRED_PAGES", + "MAP_32BIT", + "MAP_ALIGNED_SUPER", + "MAP_ALIGNMENT_16MB", + "MAP_ALIGNMENT_1TB", + "MAP_ALIGNMENT_256TB", + "MAP_ALIGNMENT_4GB", + "MAP_ALIGNMENT_64KB", + "MAP_ALIGNMENT_64PB", + "MAP_ALIGNMENT_MASK", + "MAP_ALIGNMENT_SHIFT", + "MAP_ANON", + "MAP_ANONYMOUS", + "MAP_COPY", + "MAP_DENYWRITE", + "MAP_EXECUTABLE", + "MAP_FILE", + "MAP_FIXED", + "MAP_FLAGMASK", + "MAP_GROWSDOWN", + "MAP_HASSEMAPHORE", + "MAP_HUGETLB", + "MAP_INHERIT", + "MAP_INHERIT_COPY", + "MAP_INHERIT_DEFAULT", + "MAP_INHERIT_DONATE_COPY", + "MAP_INHERIT_NONE", + "MAP_INHERIT_SHARE", + "MAP_JIT", + "MAP_LOCKED", + "MAP_NOCACHE", + "MAP_NOCORE", + "MAP_NOEXTEND", + "MAP_NONBLOCK", + "MAP_NORESERVE", + "MAP_NOSYNC", + "MAP_POPULATE", + "MAP_PREFAULT_READ", + "MAP_PRIVATE", + "MAP_RENAME", + "MAP_RESERVED0080", + "MAP_RESERVED0100", + "MAP_SHARED", + "MAP_STACK", + "MAP_TRYFIXED", + "MAP_TYPE", + "MAP_WIRED", + "MAXIMUM_REPARSE_DATA_BUFFER_SIZE", + "MAXLEN_IFDESCR", + "MAXLEN_PHYSADDR", + "MAX_ADAPTER_ADDRESS_LENGTH", + "MAX_ADAPTER_DESCRIPTION_LENGTH", + "MAX_ADAPTER_NAME_LENGTH", + "MAX_COMPUTERNAME_LENGTH", + "MAX_INTERFACE_NAME_LEN", + "MAX_LONG_PATH", + "MAX_PATH", + "MAX_PROTOCOL_CHAIN", + "MCL_CURRENT", + "MCL_FUTURE", + "MNT_DETACH", + "MNT_EXPIRE", + "MNT_FORCE", + "MSG_BCAST", + "MSG_CMSG_CLOEXEC", + "MSG_COMPAT", + "MSG_CONFIRM", + "MSG_CONTROLMBUF", + "MSG_CTRUNC", + "MSG_DONTROUTE", + "MSG_DONTWAIT", + "MSG_EOF", + "MSG_EOR", + "MSG_ERRQUEUE", + "MSG_FASTOPEN", + "MSG_FIN", + "MSG_FLUSH", + "MSG_HAVEMORE", + "MSG_HOLD", + "MSG_IOVUSRSPACE", + "MSG_LENUSRSPACE", + "MSG_MCAST", + "MSG_MORE", + "MSG_NAMEMBUF", + "MSG_NBIO", + "MSG_NEEDSA", + "MSG_NOSIGNAL", + "MSG_NOTIFICATION", + "MSG_OOB", + "MSG_PEEK", + "MSG_PROXY", + "MSG_RCVMORE", + "MSG_RST", + "MSG_SEND", + "MSG_SYN", + "MSG_TRUNC", + "MSG_TRYHARD", + "MSG_USERFLAGS", + "MSG_WAITALL", + "MSG_WAITFORONE", + "MSG_WAITSTREAM", + "MS_ACTIVE", + "MS_ASYNC", + "MS_BIND", + "MS_DEACTIVATE", + "MS_DIRSYNC", + "MS_INVALIDATE", + "MS_I_VERSION", + "MS_KERNMOUNT", + "MS_KILLPAGES", + "MS_MANDLOCK", + "MS_MGC_MSK", + "MS_MGC_VAL", + "MS_MOVE", + "MS_NOATIME", + "MS_NODEV", + "MS_NODIRATIME", + "MS_NOEXEC", + "MS_NOSUID", + "MS_NOUSER", + "MS_POSIXACL", + "MS_PRIVATE", + "MS_RDONLY", + "MS_REC", + "MS_RELATIME", + "MS_REMOUNT", + "MS_RMT_MASK", + "MS_SHARED", + "MS_SILENT", + "MS_SLAVE", + "MS_STRICTATIME", + "MS_SYNC", + "MS_SYNCHRONOUS", + "MS_UNBINDABLE", + "Madvise", + "MapViewOfFile", + "MaxTokenInfoClass", + "Mclpool", + "MibIfRow", + "Mkdir", + "Mkdirat", + "Mkfifo", + "Mknod", + "Mknodat", + "Mlock", + "Mlockall", + "Mmap", + "Mount", + "MoveFile", + "Mprotect", + "Msghdr", + "Munlock", + "Munlockall", + "Munmap", + "MustLoadDLL", + "NAME_MAX", + "NETLINK_ADD_MEMBERSHIP", + "NETLINK_AUDIT", + "NETLINK_BROADCAST_ERROR", + "NETLINK_CONNECTOR", + "NETLINK_DNRTMSG", + "NETLINK_DROP_MEMBERSHIP", + "NETLINK_ECRYPTFS", + "NETLINK_FIB_LOOKUP", + "NETLINK_FIREWALL", + "NETLINK_GENERIC", + "NETLINK_INET_DIAG", + "NETLINK_IP6_FW", + "NETLINK_ISCSI", + "NETLINK_KOBJECT_UEVENT", + "NETLINK_NETFILTER", + "NETLINK_NFLOG", + "NETLINK_NO_ENOBUFS", + "NETLINK_PKTINFO", + "NETLINK_RDMA", + "NETLINK_ROUTE", + "NETLINK_SCSITRANSPORT", + "NETLINK_SELINUX", + "NETLINK_UNUSED", + "NETLINK_USERSOCK", + "NETLINK_XFRM", + "NET_RT_DUMP", + "NET_RT_DUMP2", + "NET_RT_FLAGS", + "NET_RT_IFLIST", + "NET_RT_IFLIST2", + "NET_RT_IFLISTL", + "NET_RT_IFMALIST", + "NET_RT_MAXID", + "NET_RT_OIFLIST", + "NET_RT_OOIFLIST", + "NET_RT_STAT", + "NET_RT_STATS", + "NET_RT_TABLE", + "NET_RT_TRASH", + "NLA_ALIGNTO", + "NLA_F_NESTED", + "NLA_F_NET_BYTEORDER", + "NLA_HDRLEN", + "NLMSG_ALIGNTO", + "NLMSG_DONE", + "NLMSG_ERROR", + "NLMSG_HDRLEN", + "NLMSG_MIN_TYPE", + "NLMSG_NOOP", + "NLMSG_OVERRUN", + "NLM_F_ACK", + "NLM_F_APPEND", + "NLM_F_ATOMIC", + "NLM_F_CREATE", + "NLM_F_DUMP", + "NLM_F_ECHO", + "NLM_F_EXCL", + "NLM_F_MATCH", + "NLM_F_MULTI", + "NLM_F_REPLACE", + "NLM_F_REQUEST", + "NLM_F_ROOT", + "NOFLSH", + "NOTE_ABSOLUTE", + "NOTE_ATTRIB", + "NOTE_CHILD", + "NOTE_DELETE", + "NOTE_EOF", + "NOTE_EXEC", + "NOTE_EXIT", + "NOTE_EXITSTATUS", + "NOTE_EXTEND", + "NOTE_FFAND", + "NOTE_FFCOPY", + "NOTE_FFCTRLMASK", + "NOTE_FFLAGSMASK", + "NOTE_FFNOP", + "NOTE_FFOR", + "NOTE_FORK", + "NOTE_LINK", + "NOTE_LOWAT", + "NOTE_NONE", + "NOTE_NSECONDS", + "NOTE_PCTRLMASK", + "NOTE_PDATAMASK", + "NOTE_REAP", + "NOTE_RENAME", + "NOTE_RESOURCEEND", + "NOTE_REVOKE", + "NOTE_SECONDS", + "NOTE_SIGNAL", + "NOTE_TRACK", + "NOTE_TRACKERR", + "NOTE_TRIGGER", + "NOTE_TRUNCATE", + "NOTE_USECONDS", + "NOTE_VM_ERROR", + "NOTE_VM_PRESSURE", + "NOTE_VM_PRESSURE_SUDDEN_TERMINATE", + "NOTE_VM_PRESSURE_TERMINATE", + "NOTE_WRITE", + "NameCanonical", + "NameCanonicalEx", + "NameDisplay", + "NameDnsDomain", + "NameFullyQualifiedDN", + "NameSamCompatible", + "NameServicePrincipal", + "NameUniqueId", + "NameUnknown", + "NameUserPrincipal", + "Nanosleep", + "NetApiBufferFree", + "NetGetJoinInformation", + "NetSetupDomainName", + "NetSetupUnjoined", + "NetSetupUnknownStatus", + "NetSetupWorkgroupName", + "NetUserGetInfo", + "NetlinkMessage", + "NetlinkRIB", + "NetlinkRouteAttr", + "NetlinkRouteRequest", + "NewCallback", + "NewCallbackCDecl", + "NewLazyDLL", + "NlAttr", + "NlMsgerr", + "NlMsghdr", + "NsecToFiletime", + "NsecToTimespec", + "NsecToTimeval", + "Ntohs", + "OCRNL", + "OFDEL", + "OFILL", + "OFIOGETBMAP", + "OID_PKIX_KP_SERVER_AUTH", + "OID_SERVER_GATED_CRYPTO", + "OID_SGC_NETSCAPE", + "OLCUC", + "ONLCR", + "ONLRET", + "ONOCR", + "ONOEOT", + "OPEN_ALWAYS", + "OPEN_EXISTING", + "OPOST", + "O_ACCMODE", + "O_ALERT", + "O_ALT_IO", + "O_APPEND", + "O_ASYNC", + "O_CLOEXEC", + "O_CREAT", + "O_DIRECT", + "O_DIRECTORY", + "O_DSYNC", + "O_EVTONLY", + "O_EXCL", + "O_EXEC", + "O_EXLOCK", + "O_FSYNC", + "O_LARGEFILE", + "O_NDELAY", + "O_NOATIME", + "O_NOCTTY", + "O_NOFOLLOW", + "O_NONBLOCK", + "O_NOSIGPIPE", + "O_POPUP", + "O_RDONLY", + "O_RDWR", + "O_RSYNC", + "O_SHLOCK", + "O_SYMLINK", + "O_SYNC", + "O_TRUNC", + "O_TTY_INIT", + "O_WRONLY", + "Open", + "OpenCurrentProcessToken", + "OpenProcess", + "OpenProcessToken", + "Openat", + "Overlapped", + "PACKET_ADD_MEMBERSHIP", + "PACKET_BROADCAST", + "PACKET_DROP_MEMBERSHIP", + "PACKET_FASTROUTE", + "PACKET_HOST", + "PACKET_LOOPBACK", + "PACKET_MR_ALLMULTI", + "PACKET_MR_MULTICAST", + "PACKET_MR_PROMISC", + "PACKET_MULTICAST", + "PACKET_OTHERHOST", + "PACKET_OUTGOING", + "PACKET_RECV_OUTPUT", + "PACKET_RX_RING", + "PACKET_STATISTICS", + "PAGE_EXECUTE_READ", + "PAGE_EXECUTE_READWRITE", + "PAGE_EXECUTE_WRITECOPY", + "PAGE_READONLY", + "PAGE_READWRITE", + "PAGE_WRITECOPY", + "PARENB", + "PARMRK", + "PARODD", + "PENDIN", + "PFL_HIDDEN", + "PFL_MATCHES_PROTOCOL_ZERO", + "PFL_MULTIPLE_PROTO_ENTRIES", + "PFL_NETWORKDIRECT_PROVIDER", + "PFL_RECOMMENDED_PROTO_ENTRY", + "PF_FLUSH", + "PKCS_7_ASN_ENCODING", + "PMC5_PIPELINE_FLUSH", + "PRIO_PGRP", + "PRIO_PROCESS", + "PRIO_USER", + "PRI_IOFLUSH", + "PROCESS_QUERY_INFORMATION", + "PROCESS_TERMINATE", + "PROT_EXEC", + "PROT_GROWSDOWN", + "PROT_GROWSUP", + "PROT_NONE", + "PROT_READ", + "PROT_WRITE", + "PROV_DH_SCHANNEL", + "PROV_DSS", + "PROV_DSS_DH", + "PROV_EC_ECDSA_FULL", + "PROV_EC_ECDSA_SIG", + "PROV_EC_ECNRA_FULL", + "PROV_EC_ECNRA_SIG", + "PROV_FORTEZZA", + "PROV_INTEL_SEC", + "PROV_MS_EXCHANGE", + "PROV_REPLACE_OWF", + "PROV_RNG", + "PROV_RSA_AES", + "PROV_RSA_FULL", + "PROV_RSA_SCHANNEL", + "PROV_RSA_SIG", + "PROV_SPYRUS_LYNKS", + "PROV_SSL", + "PR_CAPBSET_DROP", + "PR_CAPBSET_READ", + "PR_CLEAR_SECCOMP_FILTER", + "PR_ENDIAN_BIG", + "PR_ENDIAN_LITTLE", + "PR_ENDIAN_PPC_LITTLE", + "PR_FPEMU_NOPRINT", + "PR_FPEMU_SIGFPE", + "PR_FP_EXC_ASYNC", + "PR_FP_EXC_DISABLED", + "PR_FP_EXC_DIV", + "PR_FP_EXC_INV", + "PR_FP_EXC_NONRECOV", + "PR_FP_EXC_OVF", + "PR_FP_EXC_PRECISE", + "PR_FP_EXC_RES", + "PR_FP_EXC_SW_ENABLE", + "PR_FP_EXC_UND", + "PR_GET_DUMPABLE", + "PR_GET_ENDIAN", + "PR_GET_FPEMU", + "PR_GET_FPEXC", + "PR_GET_KEEPCAPS", + "PR_GET_NAME", + "PR_GET_PDEATHSIG", + "PR_GET_SECCOMP", + "PR_GET_SECCOMP_FILTER", + "PR_GET_SECUREBITS", + "PR_GET_TIMERSLACK", + "PR_GET_TIMING", + "PR_GET_TSC", + "PR_GET_UNALIGN", + "PR_MCE_KILL", + "PR_MCE_KILL_CLEAR", + "PR_MCE_KILL_DEFAULT", + "PR_MCE_KILL_EARLY", + "PR_MCE_KILL_GET", + "PR_MCE_KILL_LATE", + "PR_MCE_KILL_SET", + "PR_SECCOMP_FILTER_EVENT", + "PR_SECCOMP_FILTER_SYSCALL", + "PR_SET_DUMPABLE", + "PR_SET_ENDIAN", + "PR_SET_FPEMU", + "PR_SET_FPEXC", + "PR_SET_KEEPCAPS", + "PR_SET_NAME", + "PR_SET_PDEATHSIG", + "PR_SET_PTRACER", + "PR_SET_SECCOMP", + "PR_SET_SECCOMP_FILTER", + "PR_SET_SECUREBITS", + "PR_SET_TIMERSLACK", + "PR_SET_TIMING", + "PR_SET_TSC", + "PR_SET_UNALIGN", + "PR_TASK_PERF_EVENTS_DISABLE", + "PR_TASK_PERF_EVENTS_ENABLE", + "PR_TIMING_STATISTICAL", + "PR_TIMING_TIMESTAMP", + "PR_TSC_ENABLE", + "PR_TSC_SIGSEGV", + "PR_UNALIGN_NOPRINT", + "PR_UNALIGN_SIGBUS", + "PTRACE_ARCH_PRCTL", + "PTRACE_ATTACH", + "PTRACE_CONT", + "PTRACE_DETACH", + "PTRACE_EVENT_CLONE", + "PTRACE_EVENT_EXEC", + "PTRACE_EVENT_EXIT", + "PTRACE_EVENT_FORK", + "PTRACE_EVENT_VFORK", + "PTRACE_EVENT_VFORK_DONE", + "PTRACE_GETCRUNCHREGS", + "PTRACE_GETEVENTMSG", + "PTRACE_GETFPREGS", + "PTRACE_GETFPXREGS", + "PTRACE_GETHBPREGS", + "PTRACE_GETREGS", + "PTRACE_GETREGSET", + "PTRACE_GETSIGINFO", + "PTRACE_GETVFPREGS", + "PTRACE_GETWMMXREGS", + "PTRACE_GET_THREAD_AREA", + "PTRACE_KILL", + "PTRACE_OLDSETOPTIONS", + "PTRACE_O_MASK", + "PTRACE_O_TRACECLONE", + "PTRACE_O_TRACEEXEC", + "PTRACE_O_TRACEEXIT", + "PTRACE_O_TRACEFORK", + "PTRACE_O_TRACESYSGOOD", + "PTRACE_O_TRACEVFORK", + "PTRACE_O_TRACEVFORKDONE", + "PTRACE_PEEKDATA", + "PTRACE_PEEKTEXT", + "PTRACE_PEEKUSR", + "PTRACE_POKEDATA", + "PTRACE_POKETEXT", + "PTRACE_POKEUSR", + "PTRACE_SETCRUNCHREGS", + "PTRACE_SETFPREGS", + "PTRACE_SETFPXREGS", + "PTRACE_SETHBPREGS", + "PTRACE_SETOPTIONS", + "PTRACE_SETREGS", + "PTRACE_SETREGSET", + "PTRACE_SETSIGINFO", + "PTRACE_SETVFPREGS", + "PTRACE_SETWMMXREGS", + "PTRACE_SET_SYSCALL", + "PTRACE_SET_THREAD_AREA", + "PTRACE_SINGLEBLOCK", + "PTRACE_SINGLESTEP", + "PTRACE_SYSCALL", + "PTRACE_SYSEMU", + "PTRACE_SYSEMU_SINGLESTEP", + "PTRACE_TRACEME", + "PT_ATTACH", + "PT_ATTACHEXC", + "PT_CONTINUE", + "PT_DATA_ADDR", + "PT_DENY_ATTACH", + "PT_DETACH", + "PT_FIRSTMACH", + "PT_FORCEQUOTA", + "PT_KILL", + "PT_MASK", + "PT_READ_D", + "PT_READ_I", + "PT_READ_U", + "PT_SIGEXC", + "PT_STEP", + "PT_TEXT_ADDR", + "PT_TEXT_END_ADDR", + "PT_THUPDATE", + "PT_TRACE_ME", + "PT_WRITE_D", + "PT_WRITE_I", + "PT_WRITE_U", + "ParseDirent", + "ParseNetlinkMessage", + "ParseNetlinkRouteAttr", + "ParseRoutingMessage", + "ParseRoutingSockaddr", + "ParseSocketControlMessage", + "ParseUnixCredentials", + "ParseUnixRights", + "PathMax", + "Pathconf", + "Pause", + "Pipe", + "Pipe2", + "PivotRoot", + "Pointer", + "PostQueuedCompletionStatus", + "Pread", + "Proc", + "ProcAttr", + "Process32First", + "Process32Next", + "ProcessEntry32", + "ProcessInformation", + "Protoent", + "PtraceAttach", + "PtraceCont", + "PtraceDetach", + "PtraceGetEventMsg", + "PtraceGetRegs", + "PtracePeekData", + "PtracePeekText", + "PtracePokeData", + "PtracePokeText", + "PtraceRegs", + "PtraceSetOptions", + "PtraceSetRegs", + "PtraceSingleStep", + "PtraceSyscall", + "Pwrite", + "REG_BINARY", + "REG_DWORD", + "REG_DWORD_BIG_ENDIAN", + "REG_DWORD_LITTLE_ENDIAN", + "REG_EXPAND_SZ", + "REG_FULL_RESOURCE_DESCRIPTOR", + "REG_LINK", + "REG_MULTI_SZ", + "REG_NONE", + "REG_QWORD", + "REG_QWORD_LITTLE_ENDIAN", + "REG_RESOURCE_LIST", + "REG_RESOURCE_REQUIREMENTS_LIST", + "REG_SZ", + "RLIMIT_AS", + "RLIMIT_CORE", + "RLIMIT_CPU", + "RLIMIT_DATA", + "RLIMIT_FSIZE", + "RLIMIT_NOFILE", + "RLIMIT_STACK", + "RLIM_INFINITY", + "RTAX_ADVMSS", + "RTAX_AUTHOR", + "RTAX_BRD", + "RTAX_CWND", + "RTAX_DST", + "RTAX_FEATURES", + "RTAX_FEATURE_ALLFRAG", + "RTAX_FEATURE_ECN", + "RTAX_FEATURE_SACK", + "RTAX_FEATURE_TIMESTAMP", + "RTAX_GATEWAY", + "RTAX_GENMASK", + "RTAX_HOPLIMIT", + "RTAX_IFA", + "RTAX_IFP", + "RTAX_INITCWND", + "RTAX_INITRWND", + "RTAX_LABEL", + "RTAX_LOCK", + "RTAX_MAX", + "RTAX_MTU", + "RTAX_NETMASK", + "RTAX_REORDERING", + "RTAX_RTO_MIN", + "RTAX_RTT", + "RTAX_RTTVAR", + "RTAX_SRC", + "RTAX_SRCMASK", + "RTAX_SSTHRESH", + "RTAX_TAG", + "RTAX_UNSPEC", + "RTAX_WINDOW", + "RTA_ALIGNTO", + "RTA_AUTHOR", + "RTA_BRD", + "RTA_CACHEINFO", + "RTA_DST", + "RTA_FLOW", + "RTA_GATEWAY", + "RTA_GENMASK", + "RTA_IFA", + "RTA_IFP", + "RTA_IIF", + "RTA_LABEL", + "RTA_MAX", + "RTA_METRICS", + "RTA_MULTIPATH", + "RTA_NETMASK", + "RTA_OIF", + "RTA_PREFSRC", + "RTA_PRIORITY", + "RTA_SRC", + "RTA_SRCMASK", + "RTA_TABLE", + "RTA_TAG", + "RTA_UNSPEC", + "RTCF_DIRECTSRC", + "RTCF_DOREDIRECT", + "RTCF_LOG", + "RTCF_MASQ", + "RTCF_NAT", + "RTCF_VALVE", + "RTF_ADDRCLASSMASK", + "RTF_ADDRCONF", + "RTF_ALLONLINK", + "RTF_ANNOUNCE", + "RTF_BLACKHOLE", + "RTF_BROADCAST", + "RTF_CACHE", + "RTF_CLONED", + "RTF_CLONING", + "RTF_CONDEMNED", + "RTF_DEFAULT", + "RTF_DELCLONE", + "RTF_DONE", + "RTF_DYNAMIC", + "RTF_FLOW", + "RTF_FMASK", + "RTF_GATEWAY", + "RTF_GWFLAG_COMPAT", + "RTF_HOST", + "RTF_IFREF", + "RTF_IFSCOPE", + "RTF_INTERFACE", + "RTF_IRTT", + "RTF_LINKRT", + "RTF_LLDATA", + "RTF_LLINFO", + "RTF_LOCAL", + "RTF_MASK", + "RTF_MODIFIED", + "RTF_MPATH", + "RTF_MPLS", + "RTF_MSS", + "RTF_MTU", + "RTF_MULTICAST", + "RTF_NAT", + "RTF_NOFORWARD", + "RTF_NONEXTHOP", + "RTF_NOPMTUDISC", + "RTF_PERMANENT_ARP", + "RTF_PINNED", + "RTF_POLICY", + "RTF_PRCLONING", + "RTF_PROTO1", + "RTF_PROTO2", + "RTF_PROTO3", + "RTF_REINSTATE", + "RTF_REJECT", + "RTF_RNH_LOCKED", + "RTF_SOURCE", + "RTF_SRC", + "RTF_STATIC", + "RTF_STICKY", + "RTF_THROW", + "RTF_TUNNEL", + "RTF_UP", + "RTF_USETRAILERS", + "RTF_WASCLONED", + "RTF_WINDOW", + "RTF_XRESOLVE", + "RTM_ADD", + "RTM_BASE", + "RTM_CHANGE", + "RTM_CHGADDR", + "RTM_DELACTION", + "RTM_DELADDR", + "RTM_DELADDRLABEL", + "RTM_DELETE", + "RTM_DELLINK", + "RTM_DELMADDR", + "RTM_DELNEIGH", + "RTM_DELQDISC", + "RTM_DELROUTE", + "RTM_DELRULE", + "RTM_DELTCLASS", + "RTM_DELTFILTER", + "RTM_DESYNC", + "RTM_F_CLONED", + "RTM_F_EQUALIZE", + "RTM_F_NOTIFY", + "RTM_F_PREFIX", + "RTM_GET", + "RTM_GET2", + "RTM_GETACTION", + "RTM_GETADDR", + "RTM_GETADDRLABEL", + "RTM_GETANYCAST", + "RTM_GETDCB", + "RTM_GETLINK", + "RTM_GETMULTICAST", + "RTM_GETNEIGH", + "RTM_GETNEIGHTBL", + "RTM_GETQDISC", + "RTM_GETROUTE", + "RTM_GETRULE", + "RTM_GETTCLASS", + "RTM_GETTFILTER", + "RTM_IEEE80211", + "RTM_IFANNOUNCE", + "RTM_IFINFO", + "RTM_IFINFO2", + "RTM_LLINFO_UPD", + "RTM_LOCK", + "RTM_LOSING", + "RTM_MAX", + "RTM_MAXSIZE", + "RTM_MISS", + "RTM_NEWACTION", + "RTM_NEWADDR", + "RTM_NEWADDRLABEL", + "RTM_NEWLINK", + "RTM_NEWMADDR", + "RTM_NEWMADDR2", + "RTM_NEWNDUSEROPT", + "RTM_NEWNEIGH", + "RTM_NEWNEIGHTBL", + "RTM_NEWPREFIX", + "RTM_NEWQDISC", + "RTM_NEWROUTE", + "RTM_NEWRULE", + "RTM_NEWTCLASS", + "RTM_NEWTFILTER", + "RTM_NR_FAMILIES", + "RTM_NR_MSGTYPES", + "RTM_OIFINFO", + "RTM_OLDADD", + "RTM_OLDDEL", + "RTM_OOIFINFO", + "RTM_REDIRECT", + "RTM_RESOLVE", + "RTM_RTTUNIT", + "RTM_SETDCB", + "RTM_SETGATE", + "RTM_SETLINK", + "RTM_SETNEIGHTBL", + "RTM_VERSION", + "RTNH_ALIGNTO", + "RTNH_F_DEAD", + "RTNH_F_ONLINK", + "RTNH_F_PERVASIVE", + "RTNLGRP_IPV4_IFADDR", + "RTNLGRP_IPV4_MROUTE", + "RTNLGRP_IPV4_ROUTE", + "RTNLGRP_IPV4_RULE", + "RTNLGRP_IPV6_IFADDR", + "RTNLGRP_IPV6_IFINFO", + "RTNLGRP_IPV6_MROUTE", + "RTNLGRP_IPV6_PREFIX", + "RTNLGRP_IPV6_ROUTE", + "RTNLGRP_IPV6_RULE", + "RTNLGRP_LINK", + "RTNLGRP_ND_USEROPT", + "RTNLGRP_NEIGH", + "RTNLGRP_NONE", + "RTNLGRP_NOTIFY", + "RTNLGRP_TC", + "RTN_ANYCAST", + "RTN_BLACKHOLE", + "RTN_BROADCAST", + "RTN_LOCAL", + "RTN_MAX", + "RTN_MULTICAST", + "RTN_NAT", + "RTN_PROHIBIT", + "RTN_THROW", + "RTN_UNICAST", + "RTN_UNREACHABLE", + "RTN_UNSPEC", + "RTN_XRESOLVE", + "RTPROT_BIRD", + "RTPROT_BOOT", + "RTPROT_DHCP", + "RTPROT_DNROUTED", + "RTPROT_GATED", + "RTPROT_KERNEL", + "RTPROT_MRT", + "RTPROT_NTK", + "RTPROT_RA", + "RTPROT_REDIRECT", + "RTPROT_STATIC", + "RTPROT_UNSPEC", + "RTPROT_XORP", + "RTPROT_ZEBRA", + "RTV_EXPIRE", + "RTV_HOPCOUNT", + "RTV_MTU", + "RTV_RPIPE", + "RTV_RTT", + "RTV_RTTVAR", + "RTV_SPIPE", + "RTV_SSTHRESH", + "RTV_WEIGHT", + "RT_CACHING_CONTEXT", + "RT_CLASS_DEFAULT", + "RT_CLASS_LOCAL", + "RT_CLASS_MAIN", + "RT_CLASS_MAX", + "RT_CLASS_UNSPEC", + "RT_DEFAULT_FIB", + "RT_NORTREF", + "RT_SCOPE_HOST", + "RT_SCOPE_LINK", + "RT_SCOPE_NOWHERE", + "RT_SCOPE_SITE", + "RT_SCOPE_UNIVERSE", + "RT_TABLEID_MAX", + "RT_TABLE_COMPAT", + "RT_TABLE_DEFAULT", + "RT_TABLE_LOCAL", + "RT_TABLE_MAIN", + "RT_TABLE_MAX", + "RT_TABLE_UNSPEC", + "RUSAGE_CHILDREN", + "RUSAGE_SELF", + "RUSAGE_THREAD", + "Radvisory_t", + "RawConn", + "RawSockaddr", + "RawSockaddrAny", + "RawSockaddrDatalink", + "RawSockaddrInet4", + "RawSockaddrInet6", + "RawSockaddrLinklayer", + "RawSockaddrNetlink", + "RawSockaddrUnix", + "RawSyscall", + "RawSyscall6", + "Read", + "ReadConsole", + "ReadDirectoryChanges", + "ReadDirent", + "ReadFile", + "Readlink", + "Reboot", + "Recvfrom", + "Recvmsg", + "RegCloseKey", + "RegEnumKeyEx", + "RegOpenKeyEx", + "RegQueryInfoKey", + "RegQueryValueEx", + "RemoveDirectory", + "Removexattr", + "Rename", + "Renameat", + "Revoke", + "Rlimit", + "Rmdir", + "RouteMessage", + "RouteRIB", + "RoutingMessage", + "RtAttr", + "RtGenmsg", + "RtMetrics", + "RtMsg", + "RtMsghdr", + "RtNexthop", + "Rusage", + "SCM_BINTIME", + "SCM_CREDENTIALS", + "SCM_CREDS", + "SCM_RIGHTS", + "SCM_TIMESTAMP", + "SCM_TIMESTAMPING", + "SCM_TIMESTAMPNS", + "SCM_TIMESTAMP_MONOTONIC", + "SHUT_RD", + "SHUT_RDWR", + "SHUT_WR", + "SID", + "SIDAndAttributes", + "SIGABRT", + "SIGALRM", + "SIGBUS", + "SIGCHLD", + "SIGCLD", + "SIGCONT", + "SIGEMT", + "SIGFPE", + "SIGHUP", + "SIGILL", + "SIGINFO", + "SIGINT", + "SIGIO", + "SIGIOT", + "SIGKILL", + "SIGLIBRT", + "SIGLWP", + "SIGPIPE", + "SIGPOLL", + "SIGPROF", + "SIGPWR", + "SIGQUIT", + "SIGSEGV", + "SIGSTKFLT", + "SIGSTOP", + "SIGSYS", + "SIGTERM", + "SIGTHR", + "SIGTRAP", + "SIGTSTP", + "SIGTTIN", + "SIGTTOU", + "SIGUNUSED", + "SIGURG", + "SIGUSR1", + "SIGUSR2", + "SIGVTALRM", + "SIGWINCH", + "SIGXCPU", + "SIGXFSZ", + "SIOCADDDLCI", + "SIOCADDMULTI", + "SIOCADDRT", + "SIOCAIFADDR", + "SIOCAIFGROUP", + "SIOCALIFADDR", + "SIOCARPIPLL", + "SIOCATMARK", + "SIOCAUTOADDR", + "SIOCAUTONETMASK", + "SIOCBRDGADD", + "SIOCBRDGADDS", + "SIOCBRDGARL", + "SIOCBRDGDADDR", + "SIOCBRDGDEL", + "SIOCBRDGDELS", + "SIOCBRDGFLUSH", + "SIOCBRDGFRL", + "SIOCBRDGGCACHE", + "SIOCBRDGGFD", + "SIOCBRDGGHT", + "SIOCBRDGGIFFLGS", + "SIOCBRDGGMA", + "SIOCBRDGGPARAM", + "SIOCBRDGGPRI", + "SIOCBRDGGRL", + "SIOCBRDGGSIFS", + "SIOCBRDGGTO", + "SIOCBRDGIFS", + "SIOCBRDGRTS", + "SIOCBRDGSADDR", + "SIOCBRDGSCACHE", + "SIOCBRDGSFD", + "SIOCBRDGSHT", + "SIOCBRDGSIFCOST", + "SIOCBRDGSIFFLGS", + "SIOCBRDGSIFPRIO", + "SIOCBRDGSMA", + "SIOCBRDGSPRI", + "SIOCBRDGSPROTO", + "SIOCBRDGSTO", + "SIOCBRDGSTXHC", + "SIOCDARP", + "SIOCDELDLCI", + "SIOCDELMULTI", + "SIOCDELRT", + "SIOCDEVPRIVATE", + "SIOCDIFADDR", + "SIOCDIFGROUP", + "SIOCDIFPHYADDR", + "SIOCDLIFADDR", + "SIOCDRARP", + "SIOCGARP", + "SIOCGDRVSPEC", + "SIOCGETKALIVE", + "SIOCGETLABEL", + "SIOCGETPFLOW", + "SIOCGETPFSYNC", + "SIOCGETSGCNT", + "SIOCGETVIFCNT", + "SIOCGETVLAN", + "SIOCGHIWAT", + "SIOCGIFADDR", + "SIOCGIFADDRPREF", + "SIOCGIFALIAS", + "SIOCGIFALTMTU", + "SIOCGIFASYNCMAP", + "SIOCGIFBOND", + "SIOCGIFBR", + "SIOCGIFBRDADDR", + "SIOCGIFCAP", + "SIOCGIFCONF", + "SIOCGIFCOUNT", + "SIOCGIFDATA", + "SIOCGIFDESCR", + "SIOCGIFDEVMTU", + "SIOCGIFDLT", + "SIOCGIFDSTADDR", + "SIOCGIFENCAP", + "SIOCGIFFIB", + "SIOCGIFFLAGS", + "SIOCGIFGATTR", + "SIOCGIFGENERIC", + "SIOCGIFGMEMB", + "SIOCGIFGROUP", + "SIOCGIFHARDMTU", + "SIOCGIFHWADDR", + "SIOCGIFINDEX", + "SIOCGIFKPI", + "SIOCGIFMAC", + "SIOCGIFMAP", + "SIOCGIFMEDIA", + "SIOCGIFMEM", + "SIOCGIFMETRIC", + "SIOCGIFMTU", + "SIOCGIFNAME", + "SIOCGIFNETMASK", + "SIOCGIFPDSTADDR", + "SIOCGIFPFLAGS", + "SIOCGIFPHYS", + "SIOCGIFPRIORITY", + "SIOCGIFPSRCADDR", + "SIOCGIFRDOMAIN", + "SIOCGIFRTLABEL", + "SIOCGIFSLAVE", + "SIOCGIFSTATUS", + "SIOCGIFTIMESLOT", + "SIOCGIFTXQLEN", + "SIOCGIFVLAN", + "SIOCGIFWAKEFLAGS", + "SIOCGIFXFLAGS", + "SIOCGLIFADDR", + "SIOCGLIFPHYADDR", + "SIOCGLIFPHYRTABLE", + "SIOCGLIFPHYTTL", + "SIOCGLINKSTR", + "SIOCGLOWAT", + "SIOCGPGRP", + "SIOCGPRIVATE_0", + "SIOCGPRIVATE_1", + "SIOCGRARP", + "SIOCGSPPPPARAMS", + "SIOCGSTAMP", + "SIOCGSTAMPNS", + "SIOCGVH", + "SIOCGVNETID", + "SIOCIFCREATE", + "SIOCIFCREATE2", + "SIOCIFDESTROY", + "SIOCIFGCLONERS", + "SIOCINITIFADDR", + "SIOCPROTOPRIVATE", + "SIOCRSLVMULTI", + "SIOCRTMSG", + "SIOCSARP", + "SIOCSDRVSPEC", + "SIOCSETKALIVE", + "SIOCSETLABEL", + "SIOCSETPFLOW", + "SIOCSETPFSYNC", + "SIOCSETVLAN", + "SIOCSHIWAT", + "SIOCSIFADDR", + "SIOCSIFADDRPREF", + "SIOCSIFALTMTU", + "SIOCSIFASYNCMAP", + "SIOCSIFBOND", + "SIOCSIFBR", + "SIOCSIFBRDADDR", + "SIOCSIFCAP", + "SIOCSIFDESCR", + "SIOCSIFDSTADDR", + "SIOCSIFENCAP", + "SIOCSIFFIB", + "SIOCSIFFLAGS", + "SIOCSIFGATTR", + "SIOCSIFGENERIC", + "SIOCSIFHWADDR", + "SIOCSIFHWBROADCAST", + "SIOCSIFKPI", + "SIOCSIFLINK", + "SIOCSIFLLADDR", + "SIOCSIFMAC", + "SIOCSIFMAP", + "SIOCSIFMEDIA", + "SIOCSIFMEM", + "SIOCSIFMETRIC", + "SIOCSIFMTU", + "SIOCSIFNAME", + "SIOCSIFNETMASK", + "SIOCSIFPFLAGS", + "SIOCSIFPHYADDR", + "SIOCSIFPHYS", + "SIOCSIFPRIORITY", + "SIOCSIFRDOMAIN", + "SIOCSIFRTLABEL", + "SIOCSIFRVNET", + "SIOCSIFSLAVE", + "SIOCSIFTIMESLOT", + "SIOCSIFTXQLEN", + "SIOCSIFVLAN", + "SIOCSIFVNET", + "SIOCSIFXFLAGS", + "SIOCSLIFPHYADDR", + "SIOCSLIFPHYRTABLE", + "SIOCSLIFPHYTTL", + "SIOCSLINKSTR", + "SIOCSLOWAT", + "SIOCSPGRP", + "SIOCSRARP", + "SIOCSSPPPPARAMS", + "SIOCSVH", + "SIOCSVNETID", + "SIOCZIFDATA", + "SIO_GET_EXTENSION_FUNCTION_POINTER", + "SIO_GET_INTERFACE_LIST", + "SIO_KEEPALIVE_VALS", + "SIO_UDP_CONNRESET", + "SOCK_CLOEXEC", + "SOCK_DCCP", + "SOCK_DGRAM", + "SOCK_FLAGS_MASK", + "SOCK_MAXADDRLEN", + "SOCK_NONBLOCK", + "SOCK_NOSIGPIPE", + "SOCK_PACKET", + "SOCK_RAW", + "SOCK_RDM", + "SOCK_SEQPACKET", + "SOCK_STREAM", + "SOL_AAL", + "SOL_ATM", + "SOL_DECNET", + "SOL_ICMPV6", + "SOL_IP", + "SOL_IPV6", + "SOL_IRDA", + "SOL_PACKET", + "SOL_RAW", + "SOL_SOCKET", + "SOL_TCP", + "SOL_X25", + "SOMAXCONN", + "SO_ACCEPTCONN", + "SO_ACCEPTFILTER", + "SO_ATTACH_FILTER", + "SO_BINDANY", + "SO_BINDTODEVICE", + "SO_BINTIME", + "SO_BROADCAST", + "SO_BSDCOMPAT", + "SO_DEBUG", + "SO_DETACH_FILTER", + "SO_DOMAIN", + "SO_DONTROUTE", + "SO_DONTTRUNC", + "SO_ERROR", + "SO_KEEPALIVE", + "SO_LABEL", + "SO_LINGER", + "SO_LINGER_SEC", + "SO_LISTENINCQLEN", + "SO_LISTENQLEN", + "SO_LISTENQLIMIT", + "SO_MARK", + "SO_NETPROC", + "SO_NKE", + "SO_NOADDRERR", + "SO_NOHEADER", + "SO_NOSIGPIPE", + "SO_NOTIFYCONFLICT", + "SO_NO_CHECK", + "SO_NO_DDP", + "SO_NO_OFFLOAD", + "SO_NP_EXTENSIONS", + "SO_NREAD", + "SO_NWRITE", + "SO_OOBINLINE", + "SO_OVERFLOWED", + "SO_PASSCRED", + "SO_PASSSEC", + "SO_PEERCRED", + "SO_PEERLABEL", + "SO_PEERNAME", + "SO_PEERSEC", + "SO_PRIORITY", + "SO_PROTOCOL", + "SO_PROTOTYPE", + "SO_RANDOMPORT", + "SO_RCVBUF", + "SO_RCVBUFFORCE", + "SO_RCVLOWAT", + "SO_RCVTIMEO", + "SO_RESTRICTIONS", + "SO_RESTRICT_DENYIN", + "SO_RESTRICT_DENYOUT", + "SO_RESTRICT_DENYSET", + "SO_REUSEADDR", + "SO_REUSEPORT", + "SO_REUSESHAREUID", + "SO_RTABLE", + "SO_RXQ_OVFL", + "SO_SECURITY_AUTHENTICATION", + "SO_SECURITY_ENCRYPTION_NETWORK", + "SO_SECURITY_ENCRYPTION_TRANSPORT", + "SO_SETFIB", + "SO_SNDBUF", + "SO_SNDBUFFORCE", + "SO_SNDLOWAT", + "SO_SNDTIMEO", + "SO_SPLICE", + "SO_TIMESTAMP", + "SO_TIMESTAMPING", + "SO_TIMESTAMPNS", + "SO_TIMESTAMP_MONOTONIC", + "SO_TYPE", + "SO_UPCALLCLOSEWAIT", + "SO_UPDATE_ACCEPT_CONTEXT", + "SO_UPDATE_CONNECT_CONTEXT", + "SO_USELOOPBACK", + "SO_USER_COOKIE", + "SO_VENDOR", + "SO_WANTMORE", + "SO_WANTOOBFLAG", + "SSLExtraCertChainPolicyPara", + "STANDARD_RIGHTS_ALL", + "STANDARD_RIGHTS_EXECUTE", + "STANDARD_RIGHTS_READ", + "STANDARD_RIGHTS_REQUIRED", + "STANDARD_RIGHTS_WRITE", + "STARTF_USESHOWWINDOW", + "STARTF_USESTDHANDLES", + "STD_ERROR_HANDLE", + "STD_INPUT_HANDLE", + "STD_OUTPUT_HANDLE", + "SUBLANG_ENGLISH_US", + "SW_FORCEMINIMIZE", + "SW_HIDE", + "SW_MAXIMIZE", + "SW_MINIMIZE", + "SW_NORMAL", + "SW_RESTORE", + "SW_SHOW", + "SW_SHOWDEFAULT", + "SW_SHOWMAXIMIZED", + "SW_SHOWMINIMIZED", + "SW_SHOWMINNOACTIVE", + "SW_SHOWNA", + "SW_SHOWNOACTIVATE", + "SW_SHOWNORMAL", + "SYMBOLIC_LINK_FLAG_DIRECTORY", + "SYNCHRONIZE", + "SYSCTL_VERSION", + "SYSCTL_VERS_0", + "SYSCTL_VERS_1", + "SYSCTL_VERS_MASK", + "SYS_ABORT2", + "SYS_ACCEPT", + "SYS_ACCEPT4", + "SYS_ACCEPT_NOCANCEL", + "SYS_ACCESS", + "SYS_ACCESS_EXTENDED", + "SYS_ACCT", + "SYS_ADD_KEY", + "SYS_ADD_PROFIL", + "SYS_ADJFREQ", + "SYS_ADJTIME", + "SYS_ADJTIMEX", + "SYS_AFS_SYSCALL", + "SYS_AIO_CANCEL", + "SYS_AIO_ERROR", + "SYS_AIO_FSYNC", + "SYS_AIO_READ", + "SYS_AIO_RETURN", + "SYS_AIO_SUSPEND", + "SYS_AIO_SUSPEND_NOCANCEL", + "SYS_AIO_WRITE", + "SYS_ALARM", + "SYS_ARCH_PRCTL", + "SYS_ARM_FADVISE64_64", + "SYS_ARM_SYNC_FILE_RANGE", + "SYS_ATGETMSG", + "SYS_ATPGETREQ", + "SYS_ATPGETRSP", + "SYS_ATPSNDREQ", + "SYS_ATPSNDRSP", + "SYS_ATPUTMSG", + "SYS_ATSOCKET", + "SYS_AUDIT", + "SYS_AUDITCTL", + "SYS_AUDITON", + "SYS_AUDIT_SESSION_JOIN", + "SYS_AUDIT_SESSION_PORT", + "SYS_AUDIT_SESSION_SELF", + "SYS_BDFLUSH", + "SYS_BIND", + "SYS_BINDAT", + "SYS_BREAK", + "SYS_BRK", + "SYS_BSDTHREAD_CREATE", + "SYS_BSDTHREAD_REGISTER", + "SYS_BSDTHREAD_TERMINATE", + "SYS_CAPGET", + "SYS_CAPSET", + "SYS_CAP_ENTER", + "SYS_CAP_FCNTLS_GET", + "SYS_CAP_FCNTLS_LIMIT", + "SYS_CAP_GETMODE", + "SYS_CAP_GETRIGHTS", + "SYS_CAP_IOCTLS_GET", + "SYS_CAP_IOCTLS_LIMIT", + "SYS_CAP_NEW", + "SYS_CAP_RIGHTS_GET", + "SYS_CAP_RIGHTS_LIMIT", + "SYS_CHDIR", + "SYS_CHFLAGS", + "SYS_CHFLAGSAT", + "SYS_CHMOD", + "SYS_CHMOD_EXTENDED", + "SYS_CHOWN", + "SYS_CHOWN32", + "SYS_CHROOT", + "SYS_CHUD", + "SYS_CLOCK_ADJTIME", + "SYS_CLOCK_GETCPUCLOCKID2", + "SYS_CLOCK_GETRES", + "SYS_CLOCK_GETTIME", + "SYS_CLOCK_NANOSLEEP", + "SYS_CLOCK_SETTIME", + "SYS_CLONE", + "SYS_CLOSE", + "SYS_CLOSEFROM", + "SYS_CLOSE_NOCANCEL", + "SYS_CONNECT", + "SYS_CONNECTAT", + "SYS_CONNECT_NOCANCEL", + "SYS_COPYFILE", + "SYS_CPUSET", + "SYS_CPUSET_GETAFFINITY", + "SYS_CPUSET_GETID", + "SYS_CPUSET_SETAFFINITY", + "SYS_CPUSET_SETID", + "SYS_CREAT", + "SYS_CREATE_MODULE", + "SYS_CSOPS", + "SYS_DELETE", + "SYS_DELETE_MODULE", + "SYS_DUP", + "SYS_DUP2", + "SYS_DUP3", + "SYS_EACCESS", + "SYS_EPOLL_CREATE", + "SYS_EPOLL_CREATE1", + "SYS_EPOLL_CTL", + "SYS_EPOLL_CTL_OLD", + "SYS_EPOLL_PWAIT", + "SYS_EPOLL_WAIT", + "SYS_EPOLL_WAIT_OLD", + "SYS_EVENTFD", + "SYS_EVENTFD2", + "SYS_EXCHANGEDATA", + "SYS_EXECVE", + "SYS_EXIT", + "SYS_EXIT_GROUP", + "SYS_EXTATTRCTL", + "SYS_EXTATTR_DELETE_FD", + "SYS_EXTATTR_DELETE_FILE", + "SYS_EXTATTR_DELETE_LINK", + "SYS_EXTATTR_GET_FD", + "SYS_EXTATTR_GET_FILE", + "SYS_EXTATTR_GET_LINK", + "SYS_EXTATTR_LIST_FD", + "SYS_EXTATTR_LIST_FILE", + "SYS_EXTATTR_LIST_LINK", + "SYS_EXTATTR_SET_FD", + "SYS_EXTATTR_SET_FILE", + "SYS_EXTATTR_SET_LINK", + "SYS_FACCESSAT", + "SYS_FADVISE64", + "SYS_FADVISE64_64", + "SYS_FALLOCATE", + "SYS_FANOTIFY_INIT", + "SYS_FANOTIFY_MARK", + "SYS_FCHDIR", + "SYS_FCHFLAGS", + "SYS_FCHMOD", + "SYS_FCHMODAT", + "SYS_FCHMOD_EXTENDED", + "SYS_FCHOWN", + "SYS_FCHOWN32", + "SYS_FCHOWNAT", + "SYS_FCHROOT", + "SYS_FCNTL", + "SYS_FCNTL64", + "SYS_FCNTL_NOCANCEL", + "SYS_FDATASYNC", + "SYS_FEXECVE", + "SYS_FFCLOCK_GETCOUNTER", + "SYS_FFCLOCK_GETESTIMATE", + "SYS_FFCLOCK_SETESTIMATE", + "SYS_FFSCTL", + "SYS_FGETATTRLIST", + "SYS_FGETXATTR", + "SYS_FHOPEN", + "SYS_FHSTAT", + "SYS_FHSTATFS", + "SYS_FILEPORT_MAKEFD", + "SYS_FILEPORT_MAKEPORT", + "SYS_FKTRACE", + "SYS_FLISTXATTR", + "SYS_FLOCK", + "SYS_FORK", + "SYS_FPATHCONF", + "SYS_FREEBSD6_FTRUNCATE", + "SYS_FREEBSD6_LSEEK", + "SYS_FREEBSD6_MMAP", + "SYS_FREEBSD6_PREAD", + "SYS_FREEBSD6_PWRITE", + "SYS_FREEBSD6_TRUNCATE", + "SYS_FREMOVEXATTR", + "SYS_FSCTL", + "SYS_FSETATTRLIST", + "SYS_FSETXATTR", + "SYS_FSGETPATH", + "SYS_FSTAT", + "SYS_FSTAT64", + "SYS_FSTAT64_EXTENDED", + "SYS_FSTATAT", + "SYS_FSTATAT64", + "SYS_FSTATFS", + "SYS_FSTATFS64", + "SYS_FSTATV", + "SYS_FSTATVFS1", + "SYS_FSTAT_EXTENDED", + "SYS_FSYNC", + "SYS_FSYNC_NOCANCEL", + "SYS_FSYNC_RANGE", + "SYS_FTIME", + "SYS_FTRUNCATE", + "SYS_FTRUNCATE64", + "SYS_FUTEX", + "SYS_FUTIMENS", + "SYS_FUTIMES", + "SYS_FUTIMESAT", + "SYS_GETATTRLIST", + "SYS_GETAUDIT", + "SYS_GETAUDIT_ADDR", + "SYS_GETAUID", + "SYS_GETCONTEXT", + "SYS_GETCPU", + "SYS_GETCWD", + "SYS_GETDENTS", + "SYS_GETDENTS64", + "SYS_GETDIRENTRIES", + "SYS_GETDIRENTRIES64", + "SYS_GETDIRENTRIESATTR", + "SYS_GETDTABLECOUNT", + "SYS_GETDTABLESIZE", + "SYS_GETEGID", + "SYS_GETEGID32", + "SYS_GETEUID", + "SYS_GETEUID32", + "SYS_GETFH", + "SYS_GETFSSTAT", + "SYS_GETFSSTAT64", + "SYS_GETGID", + "SYS_GETGID32", + "SYS_GETGROUPS", + "SYS_GETGROUPS32", + "SYS_GETHOSTUUID", + "SYS_GETITIMER", + "SYS_GETLCID", + "SYS_GETLOGIN", + "SYS_GETLOGINCLASS", + "SYS_GETPEERNAME", + "SYS_GETPGID", + "SYS_GETPGRP", + "SYS_GETPID", + "SYS_GETPMSG", + "SYS_GETPPID", + "SYS_GETPRIORITY", + "SYS_GETRESGID", + "SYS_GETRESGID32", + "SYS_GETRESUID", + "SYS_GETRESUID32", + "SYS_GETRLIMIT", + "SYS_GETRTABLE", + "SYS_GETRUSAGE", + "SYS_GETSGROUPS", + "SYS_GETSID", + "SYS_GETSOCKNAME", + "SYS_GETSOCKOPT", + "SYS_GETTHRID", + "SYS_GETTID", + "SYS_GETTIMEOFDAY", + "SYS_GETUID", + "SYS_GETUID32", + "SYS_GETVFSSTAT", + "SYS_GETWGROUPS", + "SYS_GETXATTR", + "SYS_GET_KERNEL_SYMS", + "SYS_GET_MEMPOLICY", + "SYS_GET_ROBUST_LIST", + "SYS_GET_THREAD_AREA", + "SYS_GTTY", + "SYS_IDENTITYSVC", + "SYS_IDLE", + "SYS_INITGROUPS", + "SYS_INIT_MODULE", + "SYS_INOTIFY_ADD_WATCH", + "SYS_INOTIFY_INIT", + "SYS_INOTIFY_INIT1", + "SYS_INOTIFY_RM_WATCH", + "SYS_IOCTL", + "SYS_IOPERM", + "SYS_IOPL", + "SYS_IOPOLICYSYS", + "SYS_IOPRIO_GET", + "SYS_IOPRIO_SET", + "SYS_IO_CANCEL", + "SYS_IO_DESTROY", + "SYS_IO_GETEVENTS", + "SYS_IO_SETUP", + "SYS_IO_SUBMIT", + "SYS_IPC", + "SYS_ISSETUGID", + "SYS_JAIL", + "SYS_JAIL_ATTACH", + "SYS_JAIL_GET", + "SYS_JAIL_REMOVE", + "SYS_JAIL_SET", + "SYS_KDEBUG_TRACE", + "SYS_KENV", + "SYS_KEVENT", + "SYS_KEVENT64", + "SYS_KEXEC_LOAD", + "SYS_KEYCTL", + "SYS_KILL", + "SYS_KLDFIND", + "SYS_KLDFIRSTMOD", + "SYS_KLDLOAD", + "SYS_KLDNEXT", + "SYS_KLDSTAT", + "SYS_KLDSYM", + "SYS_KLDUNLOAD", + "SYS_KLDUNLOADF", + "SYS_KQUEUE", + "SYS_KQUEUE1", + "SYS_KTIMER_CREATE", + "SYS_KTIMER_DELETE", + "SYS_KTIMER_GETOVERRUN", + "SYS_KTIMER_GETTIME", + "SYS_KTIMER_SETTIME", + "SYS_KTRACE", + "SYS_LCHFLAGS", + "SYS_LCHMOD", + "SYS_LCHOWN", + "SYS_LCHOWN32", + "SYS_LGETFH", + "SYS_LGETXATTR", + "SYS_LINK", + "SYS_LINKAT", + "SYS_LIO_LISTIO", + "SYS_LISTEN", + "SYS_LISTXATTR", + "SYS_LLISTXATTR", + "SYS_LOCK", + "SYS_LOOKUP_DCOOKIE", + "SYS_LPATHCONF", + "SYS_LREMOVEXATTR", + "SYS_LSEEK", + "SYS_LSETXATTR", + "SYS_LSTAT", + "SYS_LSTAT64", + "SYS_LSTAT64_EXTENDED", + "SYS_LSTATV", + "SYS_LSTAT_EXTENDED", + "SYS_LUTIMES", + "SYS_MAC_SYSCALL", + "SYS_MADVISE", + "SYS_MADVISE1", + "SYS_MAXSYSCALL", + "SYS_MBIND", + "SYS_MIGRATE_PAGES", + "SYS_MINCORE", + "SYS_MINHERIT", + "SYS_MKCOMPLEX", + "SYS_MKDIR", + "SYS_MKDIRAT", + "SYS_MKDIR_EXTENDED", + "SYS_MKFIFO", + "SYS_MKFIFOAT", + "SYS_MKFIFO_EXTENDED", + "SYS_MKNOD", + "SYS_MKNODAT", + "SYS_MLOCK", + "SYS_MLOCKALL", + "SYS_MMAP", + "SYS_MMAP2", + "SYS_MODCTL", + "SYS_MODFIND", + "SYS_MODFNEXT", + "SYS_MODIFY_LDT", + "SYS_MODNEXT", + "SYS_MODSTAT", + "SYS_MODWATCH", + "SYS_MOUNT", + "SYS_MOVE_PAGES", + "SYS_MPROTECT", + "SYS_MPX", + "SYS_MQUERY", + "SYS_MQ_GETSETATTR", + "SYS_MQ_NOTIFY", + "SYS_MQ_OPEN", + "SYS_MQ_TIMEDRECEIVE", + "SYS_MQ_TIMEDSEND", + "SYS_MQ_UNLINK", + "SYS_MREMAP", + "SYS_MSGCTL", + "SYS_MSGGET", + "SYS_MSGRCV", + "SYS_MSGRCV_NOCANCEL", + "SYS_MSGSND", + "SYS_MSGSND_NOCANCEL", + "SYS_MSGSYS", + "SYS_MSYNC", + "SYS_MSYNC_NOCANCEL", + "SYS_MUNLOCK", + "SYS_MUNLOCKALL", + "SYS_MUNMAP", + "SYS_NAME_TO_HANDLE_AT", + "SYS_NANOSLEEP", + "SYS_NEWFSTATAT", + "SYS_NFSCLNT", + "SYS_NFSSERVCTL", + "SYS_NFSSVC", + "SYS_NFSTAT", + "SYS_NICE", + "SYS_NLSTAT", + "SYS_NMOUNT", + "SYS_NSTAT", + "SYS_NTP_ADJTIME", + "SYS_NTP_GETTIME", + "SYS_OABI_SYSCALL_BASE", + "SYS_OBREAK", + "SYS_OLDFSTAT", + "SYS_OLDLSTAT", + "SYS_OLDOLDUNAME", + "SYS_OLDSTAT", + "SYS_OLDUNAME", + "SYS_OPEN", + "SYS_OPENAT", + "SYS_OPENBSD_POLL", + "SYS_OPEN_BY_HANDLE_AT", + "SYS_OPEN_EXTENDED", + "SYS_OPEN_NOCANCEL", + "SYS_OVADVISE", + "SYS_PACCEPT", + "SYS_PATHCONF", + "SYS_PAUSE", + "SYS_PCICONFIG_IOBASE", + "SYS_PCICONFIG_READ", + "SYS_PCICONFIG_WRITE", + "SYS_PDFORK", + "SYS_PDGETPID", + "SYS_PDKILL", + "SYS_PERF_EVENT_OPEN", + "SYS_PERSONALITY", + "SYS_PID_HIBERNATE", + "SYS_PID_RESUME", + "SYS_PID_SHUTDOWN_SOCKETS", + "SYS_PID_SUSPEND", + "SYS_PIPE", + "SYS_PIPE2", + "SYS_PIVOT_ROOT", + "SYS_PMC_CONTROL", + "SYS_PMC_GET_INFO", + "SYS_POLL", + "SYS_POLLTS", + "SYS_POLL_NOCANCEL", + "SYS_POSIX_FADVISE", + "SYS_POSIX_FALLOCATE", + "SYS_POSIX_OPENPT", + "SYS_POSIX_SPAWN", + "SYS_PPOLL", + "SYS_PRCTL", + "SYS_PREAD", + "SYS_PREAD64", + "SYS_PREADV", + "SYS_PREAD_NOCANCEL", + "SYS_PRLIMIT64", + "SYS_PROCCTL", + "SYS_PROCESS_POLICY", + "SYS_PROCESS_VM_READV", + "SYS_PROCESS_VM_WRITEV", + "SYS_PROC_INFO", + "SYS_PROF", + "SYS_PROFIL", + "SYS_PSELECT", + "SYS_PSELECT6", + "SYS_PSET_ASSIGN", + "SYS_PSET_CREATE", + "SYS_PSET_DESTROY", + "SYS_PSYNCH_CVBROAD", + "SYS_PSYNCH_CVCLRPREPOST", + "SYS_PSYNCH_CVSIGNAL", + "SYS_PSYNCH_CVWAIT", + "SYS_PSYNCH_MUTEXDROP", + "SYS_PSYNCH_MUTEXWAIT", + "SYS_PSYNCH_RW_DOWNGRADE", + "SYS_PSYNCH_RW_LONGRDLOCK", + "SYS_PSYNCH_RW_RDLOCK", + "SYS_PSYNCH_RW_UNLOCK", + "SYS_PSYNCH_RW_UNLOCK2", + "SYS_PSYNCH_RW_UPGRADE", + "SYS_PSYNCH_RW_WRLOCK", + "SYS_PSYNCH_RW_YIELDWRLOCK", + "SYS_PTRACE", + "SYS_PUTPMSG", + "SYS_PWRITE", + "SYS_PWRITE64", + "SYS_PWRITEV", + "SYS_PWRITE_NOCANCEL", + "SYS_QUERY_MODULE", + "SYS_QUOTACTL", + "SYS_RASCTL", + "SYS_RCTL_ADD_RULE", + "SYS_RCTL_GET_LIMITS", + "SYS_RCTL_GET_RACCT", + "SYS_RCTL_GET_RULES", + "SYS_RCTL_REMOVE_RULE", + "SYS_READ", + "SYS_READAHEAD", + "SYS_READDIR", + "SYS_READLINK", + "SYS_READLINKAT", + "SYS_READV", + "SYS_READV_NOCANCEL", + "SYS_READ_NOCANCEL", + "SYS_REBOOT", + "SYS_RECV", + "SYS_RECVFROM", + "SYS_RECVFROM_NOCANCEL", + "SYS_RECVMMSG", + "SYS_RECVMSG", + "SYS_RECVMSG_NOCANCEL", + "SYS_REMAP_FILE_PAGES", + "SYS_REMOVEXATTR", + "SYS_RENAME", + "SYS_RENAMEAT", + "SYS_REQUEST_KEY", + "SYS_RESTART_SYSCALL", + "SYS_REVOKE", + "SYS_RFORK", + "SYS_RMDIR", + "SYS_RTPRIO", + "SYS_RTPRIO_THREAD", + "SYS_RT_SIGACTION", + "SYS_RT_SIGPENDING", + "SYS_RT_SIGPROCMASK", + "SYS_RT_SIGQUEUEINFO", + "SYS_RT_SIGRETURN", + "SYS_RT_SIGSUSPEND", + "SYS_RT_SIGTIMEDWAIT", + "SYS_RT_TGSIGQUEUEINFO", + "SYS_SBRK", + "SYS_SCHED_GETAFFINITY", + "SYS_SCHED_GETPARAM", + "SYS_SCHED_GETSCHEDULER", + "SYS_SCHED_GET_PRIORITY_MAX", + "SYS_SCHED_GET_PRIORITY_MIN", + "SYS_SCHED_RR_GET_INTERVAL", + "SYS_SCHED_SETAFFINITY", + "SYS_SCHED_SETPARAM", + "SYS_SCHED_SETSCHEDULER", + "SYS_SCHED_YIELD", + "SYS_SCTP_GENERIC_RECVMSG", + "SYS_SCTP_GENERIC_SENDMSG", + "SYS_SCTP_GENERIC_SENDMSG_IOV", + "SYS_SCTP_PEELOFF", + "SYS_SEARCHFS", + "SYS_SECURITY", + "SYS_SELECT", + "SYS_SELECT_NOCANCEL", + "SYS_SEMCONFIG", + "SYS_SEMCTL", + "SYS_SEMGET", + "SYS_SEMOP", + "SYS_SEMSYS", + "SYS_SEMTIMEDOP", + "SYS_SEM_CLOSE", + "SYS_SEM_DESTROY", + "SYS_SEM_GETVALUE", + "SYS_SEM_INIT", + "SYS_SEM_OPEN", + "SYS_SEM_POST", + "SYS_SEM_TRYWAIT", + "SYS_SEM_UNLINK", + "SYS_SEM_WAIT", + "SYS_SEM_WAIT_NOCANCEL", + "SYS_SEND", + "SYS_SENDFILE", + "SYS_SENDFILE64", + "SYS_SENDMMSG", + "SYS_SENDMSG", + "SYS_SENDMSG_NOCANCEL", + "SYS_SENDTO", + "SYS_SENDTO_NOCANCEL", + "SYS_SETATTRLIST", + "SYS_SETAUDIT", + "SYS_SETAUDIT_ADDR", + "SYS_SETAUID", + "SYS_SETCONTEXT", + "SYS_SETDOMAINNAME", + "SYS_SETEGID", + "SYS_SETEUID", + "SYS_SETFIB", + "SYS_SETFSGID", + "SYS_SETFSGID32", + "SYS_SETFSUID", + "SYS_SETFSUID32", + "SYS_SETGID", + "SYS_SETGID32", + "SYS_SETGROUPS", + "SYS_SETGROUPS32", + "SYS_SETHOSTNAME", + "SYS_SETITIMER", + "SYS_SETLCID", + "SYS_SETLOGIN", + "SYS_SETLOGINCLASS", + "SYS_SETNS", + "SYS_SETPGID", + "SYS_SETPRIORITY", + "SYS_SETPRIVEXEC", + "SYS_SETREGID", + "SYS_SETREGID32", + "SYS_SETRESGID", + "SYS_SETRESGID32", + "SYS_SETRESUID", + "SYS_SETRESUID32", + "SYS_SETREUID", + "SYS_SETREUID32", + "SYS_SETRLIMIT", + "SYS_SETRTABLE", + "SYS_SETSGROUPS", + "SYS_SETSID", + "SYS_SETSOCKOPT", + "SYS_SETTID", + "SYS_SETTID_WITH_PID", + "SYS_SETTIMEOFDAY", + "SYS_SETUID", + "SYS_SETUID32", + "SYS_SETWGROUPS", + "SYS_SETXATTR", + "SYS_SET_MEMPOLICY", + "SYS_SET_ROBUST_LIST", + "SYS_SET_THREAD_AREA", + "SYS_SET_TID_ADDRESS", + "SYS_SGETMASK", + "SYS_SHARED_REGION_CHECK_NP", + "SYS_SHARED_REGION_MAP_AND_SLIDE_NP", + "SYS_SHMAT", + "SYS_SHMCTL", + "SYS_SHMDT", + "SYS_SHMGET", + "SYS_SHMSYS", + "SYS_SHM_OPEN", + "SYS_SHM_UNLINK", + "SYS_SHUTDOWN", + "SYS_SIGACTION", + "SYS_SIGALTSTACK", + "SYS_SIGNAL", + "SYS_SIGNALFD", + "SYS_SIGNALFD4", + "SYS_SIGPENDING", + "SYS_SIGPROCMASK", + "SYS_SIGQUEUE", + "SYS_SIGQUEUEINFO", + "SYS_SIGRETURN", + "SYS_SIGSUSPEND", + "SYS_SIGSUSPEND_NOCANCEL", + "SYS_SIGTIMEDWAIT", + "SYS_SIGWAIT", + "SYS_SIGWAITINFO", + "SYS_SOCKET", + "SYS_SOCKETCALL", + "SYS_SOCKETPAIR", + "SYS_SPLICE", + "SYS_SSETMASK", + "SYS_SSTK", + "SYS_STACK_SNAPSHOT", + "SYS_STAT", + "SYS_STAT64", + "SYS_STAT64_EXTENDED", + "SYS_STATFS", + "SYS_STATFS64", + "SYS_STATV", + "SYS_STATVFS1", + "SYS_STAT_EXTENDED", + "SYS_STIME", + "SYS_STTY", + "SYS_SWAPCONTEXT", + "SYS_SWAPCTL", + "SYS_SWAPOFF", + "SYS_SWAPON", + "SYS_SYMLINK", + "SYS_SYMLINKAT", + "SYS_SYNC", + "SYS_SYNCFS", + "SYS_SYNC_FILE_RANGE", + "SYS_SYSARCH", + "SYS_SYSCALL", + "SYS_SYSCALL_BASE", + "SYS_SYSFS", + "SYS_SYSINFO", + "SYS_SYSLOG", + "SYS_TEE", + "SYS_TGKILL", + "SYS_THREAD_SELFID", + "SYS_THR_CREATE", + "SYS_THR_EXIT", + "SYS_THR_KILL", + "SYS_THR_KILL2", + "SYS_THR_NEW", + "SYS_THR_SELF", + "SYS_THR_SET_NAME", + "SYS_THR_SUSPEND", + "SYS_THR_WAKE", + "SYS_TIME", + "SYS_TIMERFD_CREATE", + "SYS_TIMERFD_GETTIME", + "SYS_TIMERFD_SETTIME", + "SYS_TIMER_CREATE", + "SYS_TIMER_DELETE", + "SYS_TIMER_GETOVERRUN", + "SYS_TIMER_GETTIME", + "SYS_TIMER_SETTIME", + "SYS_TIMES", + "SYS_TKILL", + "SYS_TRUNCATE", + "SYS_TRUNCATE64", + "SYS_TUXCALL", + "SYS_UGETRLIMIT", + "SYS_ULIMIT", + "SYS_UMASK", + "SYS_UMASK_EXTENDED", + "SYS_UMOUNT", + "SYS_UMOUNT2", + "SYS_UNAME", + "SYS_UNDELETE", + "SYS_UNLINK", + "SYS_UNLINKAT", + "SYS_UNMOUNT", + "SYS_UNSHARE", + "SYS_USELIB", + "SYS_USTAT", + "SYS_UTIME", + "SYS_UTIMENSAT", + "SYS_UTIMES", + "SYS_UTRACE", + "SYS_UUIDGEN", + "SYS_VADVISE", + "SYS_VFORK", + "SYS_VHANGUP", + "SYS_VM86", + "SYS_VM86OLD", + "SYS_VMSPLICE", + "SYS_VM_PRESSURE_MONITOR", + "SYS_VSERVER", + "SYS_WAIT4", + "SYS_WAIT4_NOCANCEL", + "SYS_WAIT6", + "SYS_WAITEVENT", + "SYS_WAITID", + "SYS_WAITID_NOCANCEL", + "SYS_WAITPID", + "SYS_WATCHEVENT", + "SYS_WORKQ_KERNRETURN", + "SYS_WORKQ_OPEN", + "SYS_WRITE", + "SYS_WRITEV", + "SYS_WRITEV_NOCANCEL", + "SYS_WRITE_NOCANCEL", + "SYS_YIELD", + "SYS__LLSEEK", + "SYS__LWP_CONTINUE", + "SYS__LWP_CREATE", + "SYS__LWP_CTL", + "SYS__LWP_DETACH", + "SYS__LWP_EXIT", + "SYS__LWP_GETNAME", + "SYS__LWP_GETPRIVATE", + "SYS__LWP_KILL", + "SYS__LWP_PARK", + "SYS__LWP_SELF", + "SYS__LWP_SETNAME", + "SYS__LWP_SETPRIVATE", + "SYS__LWP_SUSPEND", + "SYS__LWP_UNPARK", + "SYS__LWP_UNPARK_ALL", + "SYS__LWP_WAIT", + "SYS__LWP_WAKEUP", + "SYS__NEWSELECT", + "SYS__PSET_BIND", + "SYS__SCHED_GETAFFINITY", + "SYS__SCHED_GETPARAM", + "SYS__SCHED_SETAFFINITY", + "SYS__SCHED_SETPARAM", + "SYS__SYSCTL", + "SYS__UMTX_LOCK", + "SYS__UMTX_OP", + "SYS__UMTX_UNLOCK", + "SYS___ACL_ACLCHECK_FD", + "SYS___ACL_ACLCHECK_FILE", + "SYS___ACL_ACLCHECK_LINK", + "SYS___ACL_DELETE_FD", + "SYS___ACL_DELETE_FILE", + "SYS___ACL_DELETE_LINK", + "SYS___ACL_GET_FD", + "SYS___ACL_GET_FILE", + "SYS___ACL_GET_LINK", + "SYS___ACL_SET_FD", + "SYS___ACL_SET_FILE", + "SYS___ACL_SET_LINK", + "SYS___CLONE", + "SYS___DISABLE_THREADSIGNAL", + "SYS___GETCWD", + "SYS___GETLOGIN", + "SYS___GET_TCB", + "SYS___MAC_EXECVE", + "SYS___MAC_GETFSSTAT", + "SYS___MAC_GET_FD", + "SYS___MAC_GET_FILE", + "SYS___MAC_GET_LCID", + "SYS___MAC_GET_LCTX", + "SYS___MAC_GET_LINK", + "SYS___MAC_GET_MOUNT", + "SYS___MAC_GET_PID", + "SYS___MAC_GET_PROC", + "SYS___MAC_MOUNT", + "SYS___MAC_SET_FD", + "SYS___MAC_SET_FILE", + "SYS___MAC_SET_LCTX", + "SYS___MAC_SET_LINK", + "SYS___MAC_SET_PROC", + "SYS___MAC_SYSCALL", + "SYS___OLD_SEMWAIT_SIGNAL", + "SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL", + "SYS___POSIX_CHOWN", + "SYS___POSIX_FCHOWN", + "SYS___POSIX_LCHOWN", + "SYS___POSIX_RENAME", + "SYS___PTHREAD_CANCELED", + "SYS___PTHREAD_CHDIR", + "SYS___PTHREAD_FCHDIR", + "SYS___PTHREAD_KILL", + "SYS___PTHREAD_MARKCANCEL", + "SYS___PTHREAD_SIGMASK", + "SYS___QUOTACTL", + "SYS___SEMCTL", + "SYS___SEMWAIT_SIGNAL", + "SYS___SEMWAIT_SIGNAL_NOCANCEL", + "SYS___SETLOGIN", + "SYS___SETUGID", + "SYS___SET_TCB", + "SYS___SIGACTION_SIGTRAMP", + "SYS___SIGTIMEDWAIT", + "SYS___SIGWAIT", + "SYS___SIGWAIT_NOCANCEL", + "SYS___SYSCTL", + "SYS___TFORK", + "SYS___THREXIT", + "SYS___THRSIGDIVERT", + "SYS___THRSLEEP", + "SYS___THRWAKEUP", + "S_ARCH1", + "S_ARCH2", + "S_BLKSIZE", + "S_IEXEC", + "S_IFBLK", + "S_IFCHR", + "S_IFDIR", + "S_IFIFO", + "S_IFLNK", + "S_IFMT", + "S_IFREG", + "S_IFSOCK", + "S_IFWHT", + "S_IREAD", + "S_IRGRP", + "S_IROTH", + "S_IRUSR", + "S_IRWXG", + "S_IRWXO", + "S_IRWXU", + "S_ISGID", + "S_ISTXT", + "S_ISUID", + "S_ISVTX", + "S_IWGRP", + "S_IWOTH", + "S_IWRITE", + "S_IWUSR", + "S_IXGRP", + "S_IXOTH", + "S_IXUSR", + "S_LOGIN_SET", + "SecurityAttributes", + "Seek", + "Select", + "Sendfile", + "Sendmsg", + "SendmsgN", + "Sendto", + "Servent", + "SetBpf", + "SetBpfBuflen", + "SetBpfDatalink", + "SetBpfHeadercmpl", + "SetBpfImmediate", + "SetBpfInterface", + "SetBpfPromisc", + "SetBpfTimeout", + "SetCurrentDirectory", + "SetEndOfFile", + "SetEnvironmentVariable", + "SetFileAttributes", + "SetFileCompletionNotificationModes", + "SetFilePointer", + "SetFileTime", + "SetHandleInformation", + "SetKevent", + "SetLsfPromisc", + "SetNonblock", + "Setdomainname", + "Setegid", + "Setenv", + "Seteuid", + "Setfsgid", + "Setfsuid", + "Setgid", + "Setgroups", + "Sethostname", + "Setlogin", + "Setpgid", + "Setpriority", + "Setprivexec", + "Setregid", + "Setresgid", + "Setresuid", + "Setreuid", + "Setrlimit", + "Setsid", + "Setsockopt", + "SetsockoptByte", + "SetsockoptICMPv6Filter", + "SetsockoptIPMreq", + "SetsockoptIPMreqn", + "SetsockoptIPv6Mreq", + "SetsockoptInet4Addr", + "SetsockoptInt", + "SetsockoptLinger", + "SetsockoptString", + "SetsockoptTimeval", + "Settimeofday", + "Setuid", + "Setxattr", + "Shutdown", + "SidTypeAlias", + "SidTypeComputer", + "SidTypeDeletedAccount", + "SidTypeDomain", + "SidTypeGroup", + "SidTypeInvalid", + "SidTypeLabel", + "SidTypeUnknown", + "SidTypeUser", + "SidTypeWellKnownGroup", + "Signal", + "SizeofBpfHdr", + "SizeofBpfInsn", + "SizeofBpfProgram", + "SizeofBpfStat", + "SizeofBpfVersion", + "SizeofBpfZbuf", + "SizeofBpfZbufHeader", + "SizeofCmsghdr", + "SizeofICMPv6Filter", + "SizeofIPMreq", + "SizeofIPMreqn", + "SizeofIPv6MTUInfo", + "SizeofIPv6Mreq", + "SizeofIfAddrmsg", + "SizeofIfAnnounceMsghdr", + "SizeofIfData", + "SizeofIfInfomsg", + "SizeofIfMsghdr", + "SizeofIfaMsghdr", + "SizeofIfmaMsghdr", + "SizeofIfmaMsghdr2", + "SizeofInet4Pktinfo", + "SizeofInet6Pktinfo", + "SizeofInotifyEvent", + "SizeofLinger", + "SizeofMsghdr", + "SizeofNlAttr", + "SizeofNlMsgerr", + "SizeofNlMsghdr", + "SizeofRtAttr", + "SizeofRtGenmsg", + "SizeofRtMetrics", + "SizeofRtMsg", + "SizeofRtMsghdr", + "SizeofRtNexthop", + "SizeofSockFilter", + "SizeofSockFprog", + "SizeofSockaddrAny", + "SizeofSockaddrDatalink", + "SizeofSockaddrInet4", + "SizeofSockaddrInet6", + "SizeofSockaddrLinklayer", + "SizeofSockaddrNetlink", + "SizeofSockaddrUnix", + "SizeofTCPInfo", + "SizeofUcred", + "SlicePtrFromStrings", + "SockFilter", + "SockFprog", + "Sockaddr", + "SockaddrDatalink", + "SockaddrGen", + "SockaddrInet4", + "SockaddrInet6", + "SockaddrLinklayer", + "SockaddrNetlink", + "SockaddrUnix", + "Socket", + "SocketControlMessage", + "SocketDisableIPv6", + "Socketpair", + "Splice", + "StartProcess", + "StartupInfo", + "Stat", + "Stat_t", + "Statfs", + "Statfs_t", + "Stderr", + "Stdin", + "Stdout", + "StringBytePtr", + "StringByteSlice", + "StringSlicePtr", + "StringToSid", + "StringToUTF16", + "StringToUTF16Ptr", + "Symlink", + "Sync", + "SyncFileRange", + "SysProcAttr", + "SysProcIDMap", + "Syscall", + "Syscall12", + "Syscall15", + "Syscall18", + "Syscall6", + "Syscall9", + "Sysctl", + "SysctlUint32", + "Sysctlnode", + "Sysinfo", + "Sysinfo_t", + "Systemtime", + "TCGETS", + "TCIFLUSH", + "TCIOFLUSH", + "TCOFLUSH", + "TCPInfo", + "TCPKeepalive", + "TCP_CA_NAME_MAX", + "TCP_CONGCTL", + "TCP_CONGESTION", + "TCP_CONNECTIONTIMEOUT", + "TCP_CORK", + "TCP_DEFER_ACCEPT", + "TCP_INFO", + "TCP_KEEPALIVE", + "TCP_KEEPCNT", + "TCP_KEEPIDLE", + "TCP_KEEPINIT", + "TCP_KEEPINTVL", + "TCP_LINGER2", + "TCP_MAXBURST", + "TCP_MAXHLEN", + "TCP_MAXOLEN", + "TCP_MAXSEG", + "TCP_MAXWIN", + "TCP_MAX_SACK", + "TCP_MAX_WINSHIFT", + "TCP_MD5SIG", + "TCP_MD5SIG_MAXKEYLEN", + "TCP_MINMSS", + "TCP_MINMSSOVERLOAD", + "TCP_MSS", + "TCP_NODELAY", + "TCP_NOOPT", + "TCP_NOPUSH", + "TCP_NSTATES", + "TCP_QUICKACK", + "TCP_RXT_CONNDROPTIME", + "TCP_RXT_FINDROP", + "TCP_SACK_ENABLE", + "TCP_SYNCNT", + "TCP_VENDOR", + "TCP_WINDOW_CLAMP", + "TCSAFLUSH", + "TCSETS", + "TF_DISCONNECT", + "TF_REUSE_SOCKET", + "TF_USE_DEFAULT_WORKER", + "TF_USE_KERNEL_APC", + "TF_USE_SYSTEM_THREAD", + "TF_WRITE_BEHIND", + "TH32CS_INHERIT", + "TH32CS_SNAPALL", + "TH32CS_SNAPHEAPLIST", + "TH32CS_SNAPMODULE", + "TH32CS_SNAPMODULE32", + "TH32CS_SNAPPROCESS", + "TH32CS_SNAPTHREAD", + "TIME_ZONE_ID_DAYLIGHT", + "TIME_ZONE_ID_STANDARD", + "TIME_ZONE_ID_UNKNOWN", + "TIOCCBRK", + "TIOCCDTR", + "TIOCCONS", + "TIOCDCDTIMESTAMP", + "TIOCDRAIN", + "TIOCDSIMICROCODE", + "TIOCEXCL", + "TIOCEXT", + "TIOCFLAG_CDTRCTS", + "TIOCFLAG_CLOCAL", + "TIOCFLAG_CRTSCTS", + "TIOCFLAG_MDMBUF", + "TIOCFLAG_PPS", + "TIOCFLAG_SOFTCAR", + "TIOCFLUSH", + "TIOCGDEV", + "TIOCGDRAINWAIT", + "TIOCGETA", + "TIOCGETD", + "TIOCGFLAGS", + "TIOCGICOUNT", + "TIOCGLCKTRMIOS", + "TIOCGLINED", + "TIOCGPGRP", + "TIOCGPTN", + "TIOCGQSIZE", + "TIOCGRANTPT", + "TIOCGRS485", + "TIOCGSERIAL", + "TIOCGSID", + "TIOCGSIZE", + "TIOCGSOFTCAR", + "TIOCGTSTAMP", + "TIOCGWINSZ", + "TIOCINQ", + "TIOCIXOFF", + "TIOCIXON", + "TIOCLINUX", + "TIOCMBIC", + "TIOCMBIS", + "TIOCMGDTRWAIT", + "TIOCMGET", + "TIOCMIWAIT", + "TIOCMODG", + "TIOCMODS", + "TIOCMSDTRWAIT", + "TIOCMSET", + "TIOCM_CAR", + "TIOCM_CD", + "TIOCM_CTS", + "TIOCM_DCD", + "TIOCM_DSR", + "TIOCM_DTR", + "TIOCM_LE", + "TIOCM_RI", + "TIOCM_RNG", + "TIOCM_RTS", + "TIOCM_SR", + "TIOCM_ST", + "TIOCNOTTY", + "TIOCNXCL", + "TIOCOUTQ", + "TIOCPKT", + "TIOCPKT_DATA", + "TIOCPKT_DOSTOP", + "TIOCPKT_FLUSHREAD", + "TIOCPKT_FLUSHWRITE", + "TIOCPKT_IOCTL", + "TIOCPKT_NOSTOP", + "TIOCPKT_START", + "TIOCPKT_STOP", + "TIOCPTMASTER", + "TIOCPTMGET", + "TIOCPTSNAME", + "TIOCPTYGNAME", + "TIOCPTYGRANT", + "TIOCPTYUNLK", + "TIOCRCVFRAME", + "TIOCREMOTE", + "TIOCSBRK", + "TIOCSCONS", + "TIOCSCTTY", + "TIOCSDRAINWAIT", + "TIOCSDTR", + "TIOCSERCONFIG", + "TIOCSERGETLSR", + "TIOCSERGETMULTI", + "TIOCSERGSTRUCT", + "TIOCSERGWILD", + "TIOCSERSETMULTI", + "TIOCSERSWILD", + "TIOCSER_TEMT", + "TIOCSETA", + "TIOCSETAF", + "TIOCSETAW", + "TIOCSETD", + "TIOCSFLAGS", + "TIOCSIG", + "TIOCSLCKTRMIOS", + "TIOCSLINED", + "TIOCSPGRP", + "TIOCSPTLCK", + "TIOCSQSIZE", + "TIOCSRS485", + "TIOCSSERIAL", + "TIOCSSIZE", + "TIOCSSOFTCAR", + "TIOCSTART", + "TIOCSTAT", + "TIOCSTI", + "TIOCSTOP", + "TIOCSTSTAMP", + "TIOCSWINSZ", + "TIOCTIMESTAMP", + "TIOCUCNTL", + "TIOCVHANGUP", + "TIOCXMTFRAME", + "TOKEN_ADJUST_DEFAULT", + "TOKEN_ADJUST_GROUPS", + "TOKEN_ADJUST_PRIVILEGES", + "TOKEN_ADJUST_SESSIONID", + "TOKEN_ALL_ACCESS", + "TOKEN_ASSIGN_PRIMARY", + "TOKEN_DUPLICATE", + "TOKEN_EXECUTE", + "TOKEN_IMPERSONATE", + "TOKEN_QUERY", + "TOKEN_QUERY_SOURCE", + "TOKEN_READ", + "TOKEN_WRITE", + "TOSTOP", + "TRUNCATE_EXISTING", + "TUNATTACHFILTER", + "TUNDETACHFILTER", + "TUNGETFEATURES", + "TUNGETIFF", + "TUNGETSNDBUF", + "TUNGETVNETHDRSZ", + "TUNSETDEBUG", + "TUNSETGROUP", + "TUNSETIFF", + "TUNSETLINK", + "TUNSETNOCSUM", + "TUNSETOFFLOAD", + "TUNSETOWNER", + "TUNSETPERSIST", + "TUNSETSNDBUF", + "TUNSETTXFILTER", + "TUNSETVNETHDRSZ", + "Tee", + "TerminateProcess", + "Termios", + "Tgkill", + "Time", + "Time_t", + "Times", + "Timespec", + "TimespecToNsec", + "Timeval", + "Timeval32", + "TimevalToNsec", + "Timex", + "Timezoneinformation", + "Tms", + "Token", + "TokenAccessInformation", + "TokenAuditPolicy", + "TokenDefaultDacl", + "TokenElevation", + "TokenElevationType", + "TokenGroups", + "TokenGroupsAndPrivileges", + "TokenHasRestrictions", + "TokenImpersonationLevel", + "TokenIntegrityLevel", + "TokenLinkedToken", + "TokenLogonSid", + "TokenMandatoryPolicy", + "TokenOrigin", + "TokenOwner", + "TokenPrimaryGroup", + "TokenPrivileges", + "TokenRestrictedSids", + "TokenSandBoxInert", + "TokenSessionId", + "TokenSessionReference", + "TokenSource", + "TokenStatistics", + "TokenType", + "TokenUIAccess", + "TokenUser", + "TokenVirtualizationAllowed", + "TokenVirtualizationEnabled", + "Tokenprimarygroup", + "Tokenuser", + "TranslateAccountName", + "TranslateName", + "TransmitFile", + "TransmitFileBuffers", + "Truncate", + "UNIX_PATH_MAX", + "USAGE_MATCH_TYPE_AND", + "USAGE_MATCH_TYPE_OR", + "UTF16FromString", + "UTF16PtrFromString", + "UTF16ToString", + "Ucred", + "Umask", + "Uname", + "Undelete", + "UnixCredentials", + "UnixRights", + "Unlink", + "Unlinkat", + "UnmapViewOfFile", + "Unmount", + "Unsetenv", + "Unshare", + "UserInfo10", + "Ustat", + "Ustat_t", + "Utimbuf", + "Utime", + "Utimes", + "UtimesNano", + "Utsname", + "VDISCARD", + "VDSUSP", + "VEOF", + "VEOL", + "VEOL2", + "VERASE", + "VERASE2", + "VINTR", + "VKILL", + "VLNEXT", + "VMIN", + "VQUIT", + "VREPRINT", + "VSTART", + "VSTATUS", + "VSTOP", + "VSUSP", + "VSWTC", + "VT0", + "VT1", + "VTDLY", + "VTIME", + "VWERASE", + "VirtualLock", + "VirtualUnlock", + "WAIT_ABANDONED", + "WAIT_FAILED", + "WAIT_OBJECT_0", + "WAIT_TIMEOUT", + "WALL", + "WALLSIG", + "WALTSIG", + "WCLONE", + "WCONTINUED", + "WCOREFLAG", + "WEXITED", + "WLINUXCLONE", + "WNOHANG", + "WNOTHREAD", + "WNOWAIT", + "WNOZOMBIE", + "WOPTSCHECKED", + "WORDSIZE", + "WSABuf", + "WSACleanup", + "WSADESCRIPTION_LEN", + "WSAData", + "WSAEACCES", + "WSAECONNABORTED", + "WSAECONNRESET", + "WSAEnumProtocols", + "WSAID_CONNECTEX", + "WSAIoctl", + "WSAPROTOCOL_LEN", + "WSAProtocolChain", + "WSAProtocolInfo", + "WSARecv", + "WSARecvFrom", + "WSASYS_STATUS_LEN", + "WSASend", + "WSASendTo", + "WSASendto", + "WSAStartup", + "WSTOPPED", + "WTRAPPED", + "WUNTRACED", + "Wait4", + "WaitForSingleObject", + "WaitStatus", + "Win32FileAttributeData", + "Win32finddata", + "Write", + "WriteConsole", + "WriteFile", + "X509_ASN_ENCODING", + "XCASE", + "XP1_CONNECTIONLESS", + "XP1_CONNECT_DATA", + "XP1_DISCONNECT_DATA", + "XP1_EXPEDITED_DATA", + "XP1_GRACEFUL_CLOSE", + "XP1_GUARANTEED_DELIVERY", + "XP1_GUARANTEED_ORDER", + "XP1_IFS_HANDLES", + "XP1_MESSAGE_ORIENTED", + "XP1_MULTIPOINT_CONTROL_PLANE", + "XP1_MULTIPOINT_DATA_PLANE", + "XP1_PARTIAL_MESSAGE", + "XP1_PSEUDO_STREAM", + "XP1_QOS_SUPPORTED", + "XP1_SAN_SUPPORT_SDP", + "XP1_SUPPORT_BROADCAST", + "XP1_SUPPORT_MULTIPOINT", + "XP1_UNI_RECV", + "XP1_UNI_SEND", + }, + "syscall/js": []string{ + "CopyBytesToGo", + "CopyBytesToJS", + "Error", + "Func", + "FuncOf", + "Global", + "Null", + "Type", + "TypeBoolean", + "TypeFunction", + "TypeNull", + "TypeNumber", + "TypeObject", + "TypeString", + "TypeSymbol", + "TypeUndefined", + "Undefined", + "Value", + "ValueError", + "ValueOf", + "Wrapper", + }, + "testing": []string{ + "AllocsPerRun", + "B", + "Benchmark", + "BenchmarkResult", + "Cover", + "CoverBlock", + "CoverMode", + "Coverage", + "Init", + "InternalBenchmark", + "InternalExample", + "InternalTest", + "M", + "Main", + "MainStart", + "PB", + "RegisterCover", + "RunBenchmarks", + "RunExamples", + "RunTests", + "Short", + "T", + "TB", + "Verbose", + }, + "testing/fstest": []string{ + "MapFS", + "MapFile", + "TestFS", + }, + "testing/iotest": []string{ + "DataErrReader", + "ErrReader", + "ErrTimeout", + "HalfReader", + "NewReadLogger", + "NewWriteLogger", + "OneByteReader", + "TestReader", + "TimeoutReader", + "TruncateWriter", + }, + "testing/quick": []string{ + "Check", + "CheckEqual", + "CheckEqualError", + "CheckError", + "Config", + "Generator", + "SetupError", + "Value", + }, + "text/scanner": []string{ + "Char", + "Comment", + "EOF", + "Float", + "GoTokens", + "GoWhitespace", + "Ident", + "Int", + "Position", + "RawString", + "ScanChars", + "ScanComments", + "ScanFloats", + "ScanIdents", + "ScanInts", + "ScanRawStrings", + "ScanStrings", + "Scanner", + "SkipComments", + "String", + "TokenString", + }, + "text/tabwriter": []string{ + "AlignRight", + "Debug", + "DiscardEmptyColumns", + "Escape", + "FilterHTML", + "NewWriter", + "StripEscape", + "TabIndent", + "Writer", + }, + "text/template": []string{ + "ExecError", + "FuncMap", + "HTMLEscape", + "HTMLEscapeString", + "HTMLEscaper", + "IsTrue", + "JSEscape", + "JSEscapeString", + "JSEscaper", + "Must", + "New", + "ParseFS", + "ParseFiles", + "ParseGlob", + "Template", + "URLQueryEscaper", + }, + "text/template/parse": []string{ + "ActionNode", + "BoolNode", + "BranchNode", + "ChainNode", + "CommandNode", + "CommentNode", + "DotNode", + "FieldNode", + "IdentifierNode", + "IfNode", + "IsEmptyTree", + "ListNode", + "Mode", + "New", + "NewIdentifier", + "NilNode", + "Node", + "NodeAction", + "NodeBool", + "NodeChain", + "NodeCommand", + "NodeComment", + "NodeDot", + "NodeField", + "NodeIdentifier", + "NodeIf", + "NodeList", + "NodeNil", + "NodeNumber", + "NodePipe", + "NodeRange", + "NodeString", + "NodeTemplate", + "NodeText", + "NodeType", + "NodeVariable", + "NodeWith", + "NumberNode", + "Parse", + "ParseComments", + "PipeNode", + "Pos", + "RangeNode", + "StringNode", + "TemplateNode", + "TextNode", + "Tree", + "VariableNode", + "WithNode", + }, + "time": []string{ + "ANSIC", + "After", + "AfterFunc", + "April", + "August", + "Date", + "December", + "Duration", + "February", + "FixedZone", + "Friday", + "Hour", + "January", + "July", + "June", + "Kitchen", + "LoadLocation", + "LoadLocationFromTZData", + "Local", + "Location", + "March", + "May", + "Microsecond", + "Millisecond", + "Minute", + "Monday", + "Month", + "Nanosecond", + "NewTicker", + "NewTimer", + "November", + "Now", + "October", + "Parse", + "ParseDuration", + "ParseError", + "ParseInLocation", + "RFC1123", + "RFC1123Z", + "RFC3339", + "RFC3339Nano", + "RFC822", + "RFC822Z", + "RFC850", + "RubyDate", + "Saturday", + "Second", + "September", + "Since", + "Sleep", + "Stamp", + "StampMicro", + "StampMilli", + "StampNano", + "Sunday", + "Thursday", + "Tick", + "Ticker", + "Time", + "Timer", + "Tuesday", + "UTC", + "Unix", + "UnixDate", + "Until", + "Wednesday", + "Weekday", + }, + "unicode": []string{ + "ASCII_Hex_Digit", + "Adlam", + "Ahom", + "Anatolian_Hieroglyphs", + "Arabic", + "Armenian", + "Avestan", + "AzeriCase", + "Balinese", + "Bamum", + "Bassa_Vah", + "Batak", + "Bengali", + "Bhaiksuki", + "Bidi_Control", + "Bopomofo", + "Brahmi", + "Braille", + "Buginese", + "Buhid", + "C", + "Canadian_Aboriginal", + "Carian", + "CaseRange", + "CaseRanges", + "Categories", + "Caucasian_Albanian", + "Cc", + "Cf", + "Chakma", + "Cham", + "Cherokee", + "Chorasmian", + "Co", + "Common", + "Coptic", + "Cs", + "Cuneiform", + "Cypriot", + "Cyrillic", + "Dash", + "Deprecated", + "Deseret", + "Devanagari", + "Diacritic", + "Digit", + "Dives_Akuru", + "Dogra", + "Duployan", + "Egyptian_Hieroglyphs", + "Elbasan", + "Elymaic", + "Ethiopic", + "Extender", + "FoldCategory", + "FoldScript", + "Georgian", + "Glagolitic", + "Gothic", + "Grantha", + "GraphicRanges", + "Greek", + "Gujarati", + "Gunjala_Gondi", + "Gurmukhi", + "Han", + "Hangul", + "Hanifi_Rohingya", + "Hanunoo", + "Hatran", + "Hebrew", + "Hex_Digit", + "Hiragana", + "Hyphen", + "IDS_Binary_Operator", + "IDS_Trinary_Operator", + "Ideographic", + "Imperial_Aramaic", + "In", + "Inherited", + "Inscriptional_Pahlavi", + "Inscriptional_Parthian", + "Is", + "IsControl", + "IsDigit", + "IsGraphic", + "IsLetter", + "IsLower", + "IsMark", + "IsNumber", + "IsOneOf", + "IsPrint", + "IsPunct", + "IsSpace", + "IsSymbol", + "IsTitle", + "IsUpper", + "Javanese", + "Join_Control", + "Kaithi", + "Kannada", + "Katakana", + "Kayah_Li", + "Kharoshthi", + "Khitan_Small_Script", + "Khmer", + "Khojki", + "Khudawadi", + "L", + "Lao", + "Latin", + "Lepcha", + "Letter", + "Limbu", + "Linear_A", + "Linear_B", + "Lisu", + "Ll", + "Lm", + "Lo", + "Logical_Order_Exception", + "Lower", + "LowerCase", + "Lt", + "Lu", + "Lycian", + "Lydian", + "M", + "Mahajani", + "Makasar", + "Malayalam", + "Mandaic", + "Manichaean", + "Marchen", + "Mark", + "Masaram_Gondi", + "MaxASCII", + "MaxCase", + "MaxLatin1", + "MaxRune", + "Mc", + "Me", + "Medefaidrin", + "Meetei_Mayek", + "Mende_Kikakui", + "Meroitic_Cursive", + "Meroitic_Hieroglyphs", + "Miao", + "Mn", + "Modi", + "Mongolian", + "Mro", + "Multani", + "Myanmar", + "N", + "Nabataean", + "Nandinagari", + "Nd", + "New_Tai_Lue", + "Newa", + "Nko", + "Nl", + "No", + "Noncharacter_Code_Point", + "Number", + "Nushu", + "Nyiakeng_Puachue_Hmong", + "Ogham", + "Ol_Chiki", + "Old_Hungarian", + "Old_Italic", + "Old_North_Arabian", + "Old_Permic", + "Old_Persian", + "Old_Sogdian", + "Old_South_Arabian", + "Old_Turkic", + "Oriya", + "Osage", + "Osmanya", + "Other", + "Other_Alphabetic", + "Other_Default_Ignorable_Code_Point", + "Other_Grapheme_Extend", + "Other_ID_Continue", + "Other_ID_Start", + "Other_Lowercase", + "Other_Math", + "Other_Uppercase", + "P", + "Pahawh_Hmong", + "Palmyrene", + "Pattern_Syntax", + "Pattern_White_Space", + "Pau_Cin_Hau", + "Pc", + "Pd", + "Pe", + "Pf", + "Phags_Pa", + "Phoenician", + "Pi", + "Po", + "Prepended_Concatenation_Mark", + "PrintRanges", + "Properties", + "Ps", + "Psalter_Pahlavi", + "Punct", + "Quotation_Mark", + "Radical", + "Range16", + "Range32", + "RangeTable", + "Regional_Indicator", + "Rejang", + "ReplacementChar", + "Runic", + "S", + "STerm", + "Samaritan", + "Saurashtra", + "Sc", + "Scripts", + "Sentence_Terminal", + "Sharada", + "Shavian", + "Siddham", + "SignWriting", + "SimpleFold", + "Sinhala", + "Sk", + "Sm", + "So", + "Soft_Dotted", + "Sogdian", + "Sora_Sompeng", + "Soyombo", + "Space", + "SpecialCase", + "Sundanese", + "Syloti_Nagri", + "Symbol", + "Syriac", + "Tagalog", + "Tagbanwa", + "Tai_Le", + "Tai_Tham", + "Tai_Viet", + "Takri", + "Tamil", + "Tangut", + "Telugu", + "Terminal_Punctuation", + "Thaana", + "Thai", + "Tibetan", + "Tifinagh", + "Tirhuta", + "Title", + "TitleCase", + "To", + "ToLower", + "ToTitle", + "ToUpper", + "TurkishCase", + "Ugaritic", + "Unified_Ideograph", + "Upper", + "UpperCase", + "UpperLower", + "Vai", + "Variation_Selector", + "Version", + "Wancho", + "Warang_Citi", + "White_Space", + "Yezidi", + "Yi", + "Z", + "Zanabazar_Square", + "Zl", + "Zp", + "Zs", + }, + "unicode/utf16": []string{ + "Decode", + "DecodeRune", + "Encode", + "EncodeRune", + "IsSurrogate", + }, + "unicode/utf8": []string{ + "DecodeLastRune", + "DecodeLastRuneInString", + "DecodeRune", + "DecodeRuneInString", + "EncodeRune", + "FullRune", + "FullRuneInString", + "MaxRune", + "RuneCount", + "RuneCountInString", + "RuneError", + "RuneLen", + "RuneSelf", + "RuneStart", + "UTFMax", + "Valid", + "ValidRune", + "ValidString", + }, + "unsafe": []string{ + "Alignof", + "ArbitraryType", + "Offsetof", + "Pointer", + "Sizeof", + }, +} diff --git a/vendor/golang.org/x/tools/internal/lsp/fuzzy/input.go b/vendor/golang.org/x/tools/internal/lsp/fuzzy/input.go new file mode 100644 index 000000000..ac377035e --- /dev/null +++ b/vendor/golang.org/x/tools/internal/lsp/fuzzy/input.go @@ -0,0 +1,168 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package fuzzy + +import ( + "unicode" +) + +// RuneRole specifies the role of a rune in the context of an input. +type RuneRole byte + +const ( + // RNone specifies a rune without any role in the input (i.e., whitespace/non-ASCII). + RNone RuneRole = iota + // RSep specifies a rune with the role of segment separator. + RSep + // RTail specifies a rune which is a lower-case tail in a word in the input. + RTail + // RUCTail specifies a rune which is an upper-case tail in a word in the input. + RUCTail + // RHead specifies a rune which is the first character in a word in the input. + RHead +) + +// RuneRoles detects the roles of each byte rune in an input string and stores it in the output +// slice. The rune role depends on the input type. Stops when it parsed all the runes in the string +// or when it filled the output. If output is nil, then it gets created. +func RuneRoles(str string, reuse []RuneRole) []RuneRole { + var output []RuneRole + if cap(reuse) < len(str) { + output = make([]RuneRole, 0, len(str)) + } else { + output = reuse[:0] + } + + prev, prev2 := rtNone, rtNone + for i := 0; i < len(str); i++ { + r := rune(str[i]) + + role := RNone + + curr := rtLower + if str[i] <= unicode.MaxASCII { + curr = runeType(rt[str[i]] - '0') + } + + if curr == rtLower { + if prev == rtNone || prev == rtPunct { + role = RHead + } else { + role = RTail + } + } else if curr == rtUpper { + role = RHead + + if prev == rtUpper { + // This and previous characters are both upper case. + + if i+1 == len(str) { + // This is last character, previous was also uppercase -> this is UCTail + // i.e., (current char is C): aBC / BC / ABC + role = RUCTail + } + } + } else if curr == rtPunct { + switch r { + case '.', ':': + role = RSep + } + } + if curr != rtLower { + if i > 1 && output[i-1] == RHead && prev2 == rtUpper && (output[i-2] == RHead || output[i-2] == RUCTail) { + // The previous two characters were uppercase. The current one is not a lower case, so the + // previous one can't be a HEAD. Make it a UCTail. + // i.e., (last char is current char - B must be a UCTail): ABC / ZABC / AB. + output[i-1] = RUCTail + } + } + + output = append(output, role) + prev2 = prev + prev = curr + } + return output +} + +type runeType byte + +const ( + rtNone runeType = iota + rtPunct + rtLower + rtUpper +) + +const rt = "00000000000000000000000000000000000000000000001122222222221000000333333333333333333333333330000002222222222222222222222222200000" + +// LastSegment returns the substring representing the last segment from the input, where each +// byte has an associated RuneRole in the roles slice. This makes sense only for inputs of Symbol +// or Filename type. +func LastSegment(input string, roles []RuneRole) string { + // Exclude ending separators. + end := len(input) - 1 + for end >= 0 && roles[end] == RSep { + end-- + } + if end < 0 { + return "" + } + + start := end - 1 + for start >= 0 && roles[start] != RSep { + start-- + } + + return input[start+1 : end+1] +} + +// ToLower transforms the input string to lower case, which is stored in the output byte slice. +// The lower casing considers only ASCII values - non ASCII values are left unmodified. +// Stops when parsed all input or when it filled the output slice. If output is nil, then it gets +// created. +func ToLower(input string, reuse []byte) []byte { + output := reuse + if cap(reuse) < len(input) { + output = make([]byte, len(input)) + } + + for i := 0; i < len(input); i++ { + r := rune(input[i]) + if r <= unicode.MaxASCII { + if 'A' <= r && r <= 'Z' { + r += 'a' - 'A' + } + } + output[i] = byte(r) + } + return output[:len(input)] +} + +// WordConsumer defines a consumer for a word delimited by the [start,end) byte offsets in an input +// (start is inclusive, end is exclusive). +type WordConsumer func(start, end int) + +// Words find word delimiters in an input based on its bytes' mappings to rune roles. The offset +// delimiters for each word are fed to the provided consumer function. +func Words(roles []RuneRole, consume WordConsumer) { + var wordStart int + for i, r := range roles { + switch r { + case RUCTail, RTail: + case RHead, RNone, RSep: + if i != wordStart { + consume(wordStart, i) + } + wordStart = i + if r != RHead { + // Skip this character. + wordStart = i + 1 + } + } + } + if wordStart != len(roles) { + consume(wordStart, len(roles)) + } +} diff --git a/vendor/golang.org/x/tools/internal/lsp/fuzzy/matcher.go b/vendor/golang.org/x/tools/internal/lsp/fuzzy/matcher.go new file mode 100644 index 000000000..16a643097 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/lsp/fuzzy/matcher.go @@ -0,0 +1,398 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fuzzy implements a fuzzy matching algorithm. +package fuzzy + +import ( + "bytes" + "fmt" +) + +const ( + // MaxInputSize is the maximum size of the input scored against the fuzzy matcher. Longer inputs + // will be truncated to this size. + MaxInputSize = 127 + // MaxPatternSize is the maximum size of the pattern used to construct the fuzzy matcher. Longer + // inputs are truncated to this size. + MaxPatternSize = 63 +) + +type scoreVal int + +func (s scoreVal) val() int { + return int(s) >> 1 +} + +func (s scoreVal) prevK() int { + return int(s) & 1 +} + +func score(val int, prevK int /*0 or 1*/) scoreVal { + return scoreVal(val<<1 + prevK) +} + +// Matcher implements a fuzzy matching algorithm for scoring candidates against a pattern. +// The matcher does not support parallel usage. +type Matcher struct { + pattern string + patternLower []byte // lower-case version of the pattern + patternShort []byte // first characters of the pattern + caseSensitive bool // set if the pattern is mix-cased + + patternRoles []RuneRole // the role of each character in the pattern + roles []RuneRole // the role of each character in the tested string + + scores [MaxInputSize + 1][MaxPatternSize + 1][2]scoreVal + + scoreScale float32 + + lastCandidateLen int // in bytes + lastCandidateMatched bool + + // Here we save the last candidate in lower-case. This is basically a byte slice we reuse for + // performance reasons, so the slice is not reallocated for every candidate. + lowerBuf [MaxInputSize]byte + rolesBuf [MaxInputSize]RuneRole +} + +func (m *Matcher) bestK(i, j int) int { + if m.scores[i][j][0].val() < m.scores[i][j][1].val() { + return 1 + } + return 0 +} + +// NewMatcher returns a new fuzzy matcher for scoring candidates against the provided pattern. +func NewMatcher(pattern string) *Matcher { + if len(pattern) > MaxPatternSize { + pattern = pattern[:MaxPatternSize] + } + + m := &Matcher{ + pattern: pattern, + patternLower: ToLower(pattern, nil), + } + + for i, c := range m.patternLower { + if pattern[i] != c { + m.caseSensitive = true + break + } + } + + if len(pattern) > 3 { + m.patternShort = m.patternLower[:3] + } else { + m.patternShort = m.patternLower + } + + m.patternRoles = RuneRoles(pattern, nil) + + if len(pattern) > 0 { + maxCharScore := 4 + m.scoreScale = 1 / float32(maxCharScore*len(pattern)) + } + + return m +} + +// Score returns the score returned by matching the candidate to the pattern. +// This is not designed for parallel use. Multiple candidates must be scored sequentially. +// Returns a score between 0 and 1 (0 - no match, 1 - perfect match). +func (m *Matcher) Score(candidate string) float32 { + if len(candidate) > MaxInputSize { + candidate = candidate[:MaxInputSize] + } + lower := ToLower(candidate, m.lowerBuf[:]) + m.lastCandidateLen = len(candidate) + + if len(m.pattern) == 0 { + // Empty patterns perfectly match candidates. + return 1 + } + + if m.match(candidate, lower) { + sc := m.computeScore(candidate, lower) + if sc > minScore/2 && !m.poorMatch() { + m.lastCandidateMatched = true + if len(m.pattern) == len(candidate) { + // Perfect match. + return 1 + } + + if sc < 0 { + sc = 0 + } + normalizedScore := float32(sc) * m.scoreScale + if normalizedScore > 1 { + normalizedScore = 1 + } + + return normalizedScore + } + } + + m.lastCandidateMatched = false + return 0 +} + +const minScore = -10000 + +// MatchedRanges returns matches ranges for the last scored string as a flattened array of +// [begin, end) byte offset pairs. +func (m *Matcher) MatchedRanges() []int { + if len(m.pattern) == 0 || !m.lastCandidateMatched { + return nil + } + i, j := m.lastCandidateLen, len(m.pattern) + if m.scores[i][j][0].val() < minScore/2 && m.scores[i][j][1].val() < minScore/2 { + return nil + } + + var ret []int + k := m.bestK(i, j) + for i > 0 { + take := (k == 1) + k = m.scores[i][j][k].prevK() + if take { + if len(ret) == 0 || ret[len(ret)-1] != i { + ret = append(ret, i) + ret = append(ret, i-1) + } else { + ret[len(ret)-1] = i - 1 + } + j-- + } + i-- + } + // Reverse slice. + for i := 0; i < len(ret)/2; i++ { + ret[i], ret[len(ret)-1-i] = ret[len(ret)-1-i], ret[i] + } + return ret +} + +func (m *Matcher) match(candidate string, candidateLower []byte) bool { + i, j := 0, 0 + for ; i < len(candidateLower) && j < len(m.patternLower); i++ { + if candidateLower[i] == m.patternLower[j] { + j++ + } + } + if j != len(m.patternLower) { + return false + } + + // The input passes the simple test against pattern, so it is time to classify its characters. + // Character roles are used below to find the last segment. + m.roles = RuneRoles(candidate, m.rolesBuf[:]) + + return true +} + +func (m *Matcher) computeScore(candidate string, candidateLower []byte) int { + pattLen, candLen := len(m.pattern), len(candidate) + + for j := 0; j <= len(m.pattern); j++ { + m.scores[0][j][0] = minScore << 1 + m.scores[0][j][1] = minScore << 1 + } + m.scores[0][0][0] = score(0, 0) // Start with 0. + + segmentsLeft, lastSegStart := 1, 0 + for i := 0; i < candLen; i++ { + if m.roles[i] == RSep { + segmentsLeft++ + lastSegStart = i + 1 + } + } + + // A per-character bonus for a consecutive match. + consecutiveBonus := 2 + wordIdx := 0 // Word count within segment. + for i := 1; i <= candLen; i++ { + + role := m.roles[i-1] + isHead := role == RHead + + if isHead { + wordIdx++ + } else if role == RSep && segmentsLeft > 1 { + wordIdx = 0 + segmentsLeft-- + } + + var skipPenalty int + if i == 1 || (i-1) == lastSegStart { + // Skipping the start of first or last segment. + skipPenalty++ + } + + for j := 0; j <= pattLen; j++ { + // By default, we don't have a match. Fill in the skip data. + m.scores[i][j][1] = minScore << 1 + + // Compute the skip score. + k := 0 + if m.scores[i-1][j][0].val() < m.scores[i-1][j][1].val() { + k = 1 + } + + skipScore := m.scores[i-1][j][k].val() + // Do not penalize missing characters after the last matched segment. + if j != pattLen { + skipScore -= skipPenalty + } + m.scores[i][j][0] = score(skipScore, k) + + if j == 0 || candidateLower[i-1] != m.patternLower[j-1] { + // Not a match. + continue + } + pRole := m.patternRoles[j-1] + + if role == RTail && pRole == RHead { + if j > 1 { + // Not a match: a head in the pattern matches a tail character in the candidate. + continue + } + // Special treatment for the first character of the pattern. We allow + // matches in the middle of a word if they are long enough, at least + // min(3, pattern.length) characters. + if !bytes.HasPrefix(candidateLower[i-1:], m.patternShort) { + continue + } + } + + // Compute the char score. + var charScore int + // Bonus 1: the char is in the candidate's last segment. + if segmentsLeft <= 1 { + charScore++ + } + // Bonus 2: Case match or a Head in the pattern aligns with one in the word. + // Single-case patterns lack segmentation signals and we assume any character + // can be a head of a segment. + if candidate[i-1] == m.pattern[j-1] || role == RHead && (!m.caseSensitive || pRole == RHead) { + charScore++ + } + + // Penalty 1: pattern char is Head, candidate char is Tail. + if role == RTail && pRole == RHead { + charScore-- + } + // Penalty 2: first pattern character matched in the middle of a word. + if j == 1 && role == RTail { + charScore -= 4 + } + + // Third dimension encodes whether there is a gap between the previous match and the current + // one. + for k := 0; k < 2; k++ { + sc := m.scores[i-1][j-1][k].val() + charScore + + isConsecutive := k == 1 || i-1 == 0 || i-1 == lastSegStart + if isConsecutive { + // Bonus 3: a consecutive match. First character match also gets a bonus to + // ensure prefix final match score normalizes to 1.0. + // Logically, this is a part of charScore, but we have to compute it here because it + // only applies for consecutive matches (k == 1). + sc += consecutiveBonus + } + if k == 0 { + // Penalty 3: Matching inside a segment (and previous char wasn't matched). Penalize for the lack + // of alignment. + if role == RTail || role == RUCTail { + sc -= 3 + } + } + + if sc > m.scores[i][j][1].val() { + m.scores[i][j][1] = score(sc, k) + } + } + } + } + + result := m.scores[len(candidate)][len(m.pattern)][m.bestK(len(candidate), len(m.pattern))].val() + + return result +} + +// ScoreTable returns the score table computed for the provided candidate. Used only for debugging. +func (m *Matcher) ScoreTable(candidate string) string { + var buf bytes.Buffer + + var line1, line2, separator bytes.Buffer + line1.WriteString("\t") + line2.WriteString("\t") + for j := 0; j < len(m.pattern); j++ { + line1.WriteString(fmt.Sprintf("%c\t\t", m.pattern[j])) + separator.WriteString("----------------") + } + + buf.WriteString(line1.String()) + buf.WriteString("\n") + buf.WriteString(separator.String()) + buf.WriteString("\n") + + for i := 1; i <= len(candidate); i++ { + line1.Reset() + line2.Reset() + + line1.WriteString(fmt.Sprintf("%c\t", candidate[i-1])) + line2.WriteString("\t") + + for j := 1; j <= len(m.pattern); j++ { + line1.WriteString(fmt.Sprintf("M%6d(%c)\t", m.scores[i][j][0].val(), dir(m.scores[i][j][0].prevK()))) + line2.WriteString(fmt.Sprintf("H%6d(%c)\t", m.scores[i][j][1].val(), dir(m.scores[i][j][1].prevK()))) + } + buf.WriteString(line1.String()) + buf.WriteString("\n") + buf.WriteString(line2.String()) + buf.WriteString("\n") + buf.WriteString(separator.String()) + buf.WriteString("\n") + } + + return buf.String() +} + +func dir(prevK int) rune { + if prevK == 0 { + return 'M' + } + return 'H' +} + +func (m *Matcher) poorMatch() bool { + if len(m.pattern) < 2 { + return false + } + + i, j := m.lastCandidateLen, len(m.pattern) + k := m.bestK(i, j) + + var counter, len int + for i > 0 { + take := (k == 1) + k = m.scores[i][j][k].prevK() + if take { + len++ + if k == 0 && len < 3 && m.roles[i-1] == RTail { + // Short match in the middle of a word + counter++ + if counter > 1 { + return true + } + } + j-- + } else { + len = 0 + } + i-- + } + return false +} diff --git a/vendor/golang.org/x/tools/internal/packagesinternal/packages.go b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go new file mode 100644 index 000000000..9702094c5 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go @@ -0,0 +1,28 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package packagesinternal exposes internal-only fields from go/packages. +package packagesinternal + +import ( + "golang.org/x/tools/internal/gocommand" +) + +var GetForTest = func(p interface{}) string { return "" } +var GetDepsErrors = func(p interface{}) []*PackageError { return nil } + +type PackageError struct { + ImportStack []string // shortest path from package named on command line to this one + Pos string // position of error (if present, file:line:col) + Err string // the error itself +} + +var GetGoCmdRunner = func(config interface{}) *gocommand.Runner { return nil } + +var SetGoCmdRunner = func(config interface{}, runner *gocommand.Runner) {} + +var TypecheckCgo int + +var SetModFlag = func(config interface{}, value string) {} +var SetModFile = func(config interface{}, value string) {} diff --git a/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go new file mode 100644 index 000000000..fa2834e2a --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go @@ -0,0 +1,1368 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +//go:generate stringer -type=ErrorCode + +type ErrorCode int + +// This file defines the error codes that can be produced during type-checking. +// Collectively, these codes provide an identifier that may be used to +// implement special handling for certain types of errors. +// +// Error codes should be fine-grained enough that the exact nature of the error +// can be easily determined, but coarse enough that they are not an +// implementation detail of the type checking algorithm. As a rule-of-thumb, +// errors should be considered equivalent if there is a theoretical refactoring +// of the type checker in which they are emitted in exactly one place. For +// example, the type checker emits different error messages for "too many +// arguments" and "too few arguments", but one can imagine an alternative type +// checker where this check instead just emits a single "wrong number of +// arguments", so these errors should have the same code. +// +// Error code names should be as brief as possible while retaining accuracy and +// distinctiveness. In most cases names should start with an adjective +// describing the nature of the error (e.g. "invalid", "unused", "misplaced"), +// and end with a noun identifying the relevant language object. For example, +// "DuplicateDecl" or "InvalidSliceExpr". For brevity, naming follows the +// convention that "bad" implies a problem with syntax, and "invalid" implies a +// problem with types. + +const ( + _ ErrorCode = iota + + // Test is reserved for errors that only apply while in self-test mode. + Test + + /* package names */ + + // BlankPkgName occurs when a package name is the blank identifier "_". + // + // Per the spec: + // "The PackageName must not be the blank identifier." + BlankPkgName + + // MismatchedPkgName occurs when a file's package name doesn't match the + // package name already established by other files. + MismatchedPkgName + + // InvalidPkgUse occurs when a package identifier is used outside of a + // selector expression. + // + // Example: + // import "fmt" + // + // var _ = fmt + InvalidPkgUse + + /* imports */ + + // BadImportPath occurs when an import path is not valid. + BadImportPath + + // BrokenImport occurs when importing a package fails. + // + // Example: + // import "amissingpackage" + BrokenImport + + // ImportCRenamed occurs when the special import "C" is renamed. "C" is a + // pseudo-package, and must not be renamed. + // + // Example: + // import _ "C" + ImportCRenamed + + // UnusedImport occurs when an import is unused. + // + // Example: + // import "fmt" + // + // func main() {} + UnusedImport + + /* initialization */ + + // InvalidInitCycle occurs when an invalid cycle is detected within the + // initialization graph. + // + // Example: + // var x int = f() + // + // func f() int { return x } + InvalidInitCycle + + /* decls */ + + // DuplicateDecl occurs when an identifier is declared multiple times. + // + // Example: + // var x = 1 + // var x = 2 + DuplicateDecl + + // InvalidDeclCycle occurs when a declaration cycle is not valid. + // + // Example: + // import "unsafe" + // + // type T struct { + // a [n]int + // } + // + // var n = unsafe.Sizeof(T{}) + InvalidDeclCycle + + // InvalidTypeCycle occurs when a cycle in type definitions results in a + // type that is not well-defined. + // + // Example: + // import "unsafe" + // + // type T [unsafe.Sizeof(T{})]int + InvalidTypeCycle + + /* decls > const */ + + // InvalidConstInit occurs when a const declaration has a non-constant + // initializer. + // + // Example: + // var x int + // const _ = x + InvalidConstInit + + // InvalidConstVal occurs when a const value cannot be converted to its + // target type. + // + // TODO(findleyr): this error code and example are not very clear. Consider + // removing it. + // + // Example: + // const _ = 1 << "hello" + InvalidConstVal + + // InvalidConstType occurs when the underlying type in a const declaration + // is not a valid constant type. + // + // Example: + // const c *int = 4 + InvalidConstType + + /* decls > var (+ other variable assignment codes) */ + + // UntypedNil occurs when the predeclared (untyped) value nil is used to + // initialize a variable declared without an explicit type. + // + // Example: + // var x = nil + UntypedNil + + // WrongAssignCount occurs when the number of values on the right-hand side + // of an assignment or or initialization expression does not match the number + // of variables on the left-hand side. + // + // Example: + // var x = 1, 2 + WrongAssignCount + + // UnassignableOperand occurs when the left-hand side of an assignment is + // not assignable. + // + // Example: + // func f() { + // const c = 1 + // c = 2 + // } + UnassignableOperand + + // NoNewVar occurs when a short variable declaration (':=') does not declare + // new variables. + // + // Example: + // func f() { + // x := 1 + // x := 2 + // } + NoNewVar + + // MultiValAssignOp occurs when an assignment operation (+=, *=, etc) does + // not have single-valued left-hand or right-hand side. + // + // Per the spec: + // "In assignment operations, both the left- and right-hand expression lists + // must contain exactly one single-valued expression" + // + // Example: + // func f() int { + // x, y := 1, 2 + // x, y += 1 + // return x + y + // } + MultiValAssignOp + + // InvalidIfaceAssign occurs when a value of type T is used as an + // interface, but T does not implement a method of the expected interface. + // + // Example: + // type I interface { + // f() + // } + // + // type T int + // + // var x I = T(1) + InvalidIfaceAssign + + // InvalidChanAssign occurs when a chan assignment is invalid. + // + // Per the spec, a value x is assignable to a channel type T if: + // "x is a bidirectional channel value, T is a channel type, x's type V and + // T have identical element types, and at least one of V or T is not a + // defined type." + // + // Example: + // type T1 chan int + // type T2 chan int + // + // var x T1 + // // Invalid assignment because both types are named + // var _ T2 = x + InvalidChanAssign + + // IncompatibleAssign occurs when the type of the right-hand side expression + // in an assignment cannot be assigned to the type of the variable being + // assigned. + // + // Example: + // var x []int + // var _ int = x + IncompatibleAssign + + // UnaddressableFieldAssign occurs when trying to assign to a struct field + // in a map value. + // + // Example: + // func f() { + // m := make(map[string]struct{i int}) + // m["foo"].i = 42 + // } + UnaddressableFieldAssign + + /* decls > type (+ other type expression codes) */ + + // NotAType occurs when the identifier used as the underlying type in a type + // declaration or the right-hand side of a type alias does not denote a type. + // + // Example: + // var S = 2 + // + // type T S + NotAType + + // InvalidArrayLen occurs when an array length is not a constant value. + // + // Example: + // var n = 3 + // var _ = [n]int{} + InvalidArrayLen + + // BlankIfaceMethod occurs when a method name is '_'. + // + // Per the spec: + // "The name of each explicitly specified method must be unique and not + // blank." + // + // Example: + // type T interface { + // _(int) + // } + BlankIfaceMethod + + // IncomparableMapKey occurs when a map key type does not support the == and + // != operators. + // + // Per the spec: + // "The comparison operators == and != must be fully defined for operands of + // the key type; thus the key type must not be a function, map, or slice." + // + // Example: + // var x map[T]int + // + // type T []int + IncomparableMapKey + + // InvalidIfaceEmbed occurs when a non-interface type is embedded in an + // interface. + // + // Example: + // type T struct {} + // + // func (T) m() + // + // type I interface { + // T + // } + InvalidIfaceEmbed + + // InvalidPtrEmbed occurs when an embedded field is of the pointer form *T, + // and T itself is itself a pointer, an unsafe.Pointer, or an interface. + // + // Per the spec: + // "An embedded field must be specified as a type name T or as a pointer to + // a non-interface type name *T, and T itself may not be a pointer type." + // + // Example: + // type T *int + // + // type S struct { + // *T + // } + InvalidPtrEmbed + + /* decls > func and method */ + + // BadRecv occurs when a method declaration does not have exactly one + // receiver parameter. + // + // Example: + // func () _() {} + BadRecv + + // InvalidRecv occurs when a receiver type expression is not of the form T + // or *T, or T is a pointer type. + // + // Example: + // type T struct {} + // + // func (**T) m() {} + InvalidRecv + + // DuplicateFieldAndMethod occurs when an identifier appears as both a field + // and method name. + // + // Example: + // type T struct { + // m int + // } + // + // func (T) m() {} + DuplicateFieldAndMethod + + // DuplicateMethod occurs when two methods on the same receiver type have + // the same name. + // + // Example: + // type T struct {} + // func (T) m() {} + // func (T) m(i int) int { return i } + DuplicateMethod + + /* decls > special */ + + // InvalidBlank occurs when a blank identifier is used as a value or type. + // + // Per the spec: + // "The blank identifier may appear as an operand only on the left-hand side + // of an assignment." + // + // Example: + // var x = _ + InvalidBlank + + // InvalidIota occurs when the predeclared identifier iota is used outside + // of a constant declaration. + // + // Example: + // var x = iota + InvalidIota + + // MissingInitBody occurs when an init function is missing its body. + // + // Example: + // func init() + MissingInitBody + + // InvalidInitSig occurs when an init function declares parameters or + // results. + // + // Example: + // func init() int { return 1 } + InvalidInitSig + + // InvalidInitDecl occurs when init is declared as anything other than a + // function. + // + // Example: + // var init = 1 + InvalidInitDecl + + // InvalidMainDecl occurs when main is declared as anything other than a + // function, in a main package. + InvalidMainDecl + + /* exprs */ + + // TooManyValues occurs when a function returns too many values for the + // expression context in which it is used. + // + // Example: + // func ReturnTwo() (int, int) { + // return 1, 2 + // } + // + // var x = ReturnTwo() + TooManyValues + + // NotAnExpr occurs when a type expression is used where a value expression + // is expected. + // + // Example: + // type T struct {} + // + // func f() { + // T + // } + NotAnExpr + + /* exprs > const */ + + // TruncatedFloat occurs when a float constant is truncated to an integer + // value. + // + // Example: + // var _ int = 98.6 + TruncatedFloat + + // NumericOverflow occurs when a numeric constant overflows its target type. + // + // Example: + // var x int8 = 1000 + NumericOverflow + + /* exprs > operation */ + + // UndefinedOp occurs when an operator is not defined for the type(s) used + // in an operation. + // + // Example: + // var c = "a" - "b" + UndefinedOp + + // MismatchedTypes occurs when operand types are incompatible in a binary + // operation. + // + // Example: + // var a = "hello" + // var b = 1 + // var c = a - b + MismatchedTypes + + // DivByZero occurs when a division operation is provable at compile + // time to be a division by zero. + // + // Example: + // const divisor = 0 + // var x int = 1/divisor + DivByZero + + // NonNumericIncDec occurs when an increment or decrement operator is + // applied to a non-numeric value. + // + // Example: + // func f() { + // var c = "c" + // c++ + // } + NonNumericIncDec + + /* exprs > ptr */ + + // UnaddressableOperand occurs when the & operator is applied to an + // unaddressable expression. + // + // Example: + // var x = &1 + UnaddressableOperand + + // InvalidIndirection occurs when a non-pointer value is indirected via the + // '*' operator. + // + // Example: + // var x int + // var y = *x + InvalidIndirection + + /* exprs > [] */ + + // NonIndexableOperand occurs when an index operation is applied to a value + // that cannot be indexed. + // + // Example: + // var x = 1 + // var y = x[1] + NonIndexableOperand + + // InvalidIndex occurs when an index argument is not of integer type, + // negative, or out-of-bounds. + // + // Example: + // var s = [...]int{1,2,3} + // var x = s[5] + // + // Example: + // var s = []int{1,2,3} + // var _ = s[-1] + // + // Example: + // var s = []int{1,2,3} + // var i string + // var _ = s[i] + InvalidIndex + + // SwappedSliceIndices occurs when constant indices in a slice expression + // are decreasing in value. + // + // Example: + // var _ = []int{1,2,3}[2:1] + SwappedSliceIndices + + /* operators > slice */ + + // NonSliceableOperand occurs when a slice operation is applied to a value + // whose type is not sliceable, or is unaddressable. + // + // Example: + // var x = [...]int{1, 2, 3}[:1] + // + // Example: + // var x = 1 + // var y = 1[:1] + NonSliceableOperand + + // InvalidSliceExpr occurs when a three-index slice expression (a[x:y:z]) is + // applied to a string. + // + // Example: + // var s = "hello" + // var x = s[1:2:3] + InvalidSliceExpr + + /* exprs > shift */ + + // InvalidShiftCount occurs when the right-hand side of a shift operation is + // either non-integer, negative, or too large. + // + // Example: + // var ( + // x string + // y int = 1 << x + // ) + InvalidShiftCount + + // InvalidShiftOperand occurs when the shifted operand is not an integer. + // + // Example: + // var s = "hello" + // var x = s << 2 + InvalidShiftOperand + + /* exprs > chan */ + + // InvalidReceive occurs when there is a channel receive from a value that + // is either not a channel, or is a send-only channel. + // + // Example: + // func f() { + // var x = 1 + // <-x + // } + InvalidReceive + + // InvalidSend occurs when there is a channel send to a value that is not a + // channel, or is a receive-only channel. + // + // Example: + // func f() { + // var x = 1 + // x <- "hello!" + // } + InvalidSend + + /* exprs > literal */ + + // DuplicateLitKey occurs when an index is duplicated in a slice, array, or + // map literal. + // + // Example: + // var _ = []int{0:1, 0:2} + // + // Example: + // var _ = map[string]int{"a": 1, "a": 2} + DuplicateLitKey + + // MissingLitKey occurs when a map literal is missing a key expression. + // + // Example: + // var _ = map[string]int{1} + MissingLitKey + + // InvalidLitIndex occurs when the key in a key-value element of a slice or + // array literal is not an integer constant. + // + // Example: + // var i = 0 + // var x = []string{i: "world"} + InvalidLitIndex + + // OversizeArrayLit occurs when an array literal exceeds its length. + // + // Example: + // var _ = [2]int{1,2,3} + OversizeArrayLit + + // MixedStructLit occurs when a struct literal contains a mix of positional + // and named elements. + // + // Example: + // var _ = struct{i, j int}{i: 1, 2} + MixedStructLit + + // InvalidStructLit occurs when a positional struct literal has an incorrect + // number of values. + // + // Example: + // var _ = struct{i, j int}{1,2,3} + InvalidStructLit + + // MissingLitField occurs when a struct literal refers to a field that does + // not exist on the struct type. + // + // Example: + // var _ = struct{i int}{j: 2} + MissingLitField + + // DuplicateLitField occurs when a struct literal contains duplicated + // fields. + // + // Example: + // var _ = struct{i int}{i: 1, i: 2} + DuplicateLitField + + // UnexportedLitField occurs when a positional struct literal implicitly + // assigns an unexported field of an imported type. + UnexportedLitField + + // InvalidLitField occurs when a field name is not a valid identifier. + // + // Example: + // var _ = struct{i int}{1: 1} + InvalidLitField + + // UntypedLit occurs when a composite literal omits a required type + // identifier. + // + // Example: + // type outer struct{ + // inner struct { i int } + // } + // + // var _ = outer{inner: {1}} + UntypedLit + + // InvalidLit occurs when a composite literal expression does not match its + // type. + // + // Example: + // type P *struct{ + // x int + // } + // var _ = P {} + InvalidLit + + /* exprs > selector */ + + // AmbiguousSelector occurs when a selector is ambiguous. + // + // Example: + // type E1 struct { i int } + // type E2 struct { i int } + // type T struct { E1; E2 } + // + // var x T + // var _ = x.i + AmbiguousSelector + + // UndeclaredImportedName occurs when a package-qualified identifier is + // undeclared by the imported package. + // + // Example: + // import "go/types" + // + // var _ = types.NotAnActualIdentifier + UndeclaredImportedName + + // UnexportedName occurs when a selector refers to an unexported identifier + // of an imported package. + // + // Example: + // import "reflect" + // + // type _ reflect.flag + UnexportedName + + // UndeclaredName occurs when an identifier is not declared in the current + // scope. + // + // Example: + // var x T + UndeclaredName + + // MissingFieldOrMethod occurs when a selector references a field or method + // that does not exist. + // + // Example: + // type T struct {} + // + // var x = T{}.f + MissingFieldOrMethod + + /* exprs > ... */ + + // BadDotDotDotSyntax occurs when a "..." occurs in a context where it is + // not valid. + // + // Example: + // var _ = map[int][...]int{0: {}} + BadDotDotDotSyntax + + // NonVariadicDotDotDot occurs when a "..." is used on the final argument to + // a non-variadic function. + // + // Example: + // func printArgs(s []string) { + // for _, a := range s { + // println(a) + // } + // } + // + // func f() { + // s := []string{"a", "b", "c"} + // printArgs(s...) + // } + NonVariadicDotDotDot + + // MisplacedDotDotDot occurs when a "..." is used somewhere other than the + // final argument to a function call. + // + // Example: + // func printArgs(args ...int) { + // for _, a := range args { + // println(a) + // } + // } + // + // func f() { + // a := []int{1,2,3} + // printArgs(0, a...) + // } + MisplacedDotDotDot + + // InvalidDotDotDotOperand occurs when a "..." operator is applied to a + // single-valued operand. + // + // Example: + // func printArgs(args ...int) { + // for _, a := range args { + // println(a) + // } + // } + // + // func f() { + // a := 1 + // printArgs(a...) + // } + // + // Example: + // func args() (int, int) { + // return 1, 2 + // } + // + // func printArgs(args ...int) { + // for _, a := range args { + // println(a) + // } + // } + // + // func g() { + // printArgs(args()...) + // } + InvalidDotDotDotOperand + + // InvalidDotDotDot occurs when a "..." is used in a non-variadic built-in + // function. + // + // Example: + // var s = []int{1, 2, 3} + // var l = len(s...) + InvalidDotDotDot + + /* exprs > built-in */ + + // UncalledBuiltin occurs when a built-in function is used as a + // function-valued expression, instead of being called. + // + // Per the spec: + // "The built-in functions do not have standard Go types, so they can only + // appear in call expressions; they cannot be used as function values." + // + // Example: + // var _ = copy + UncalledBuiltin + + // InvalidAppend occurs when append is called with a first argument that is + // not a slice. + // + // Example: + // var _ = append(1, 2) + InvalidAppend + + // InvalidCap occurs when an argument to the cap built-in function is not of + // supported type. + // + // See https://golang.org/ref/spec#Lengthand_capacity for information on + // which underlying types are supported as arguments to cap and len. + // + // Example: + // var s = 2 + // var x = cap(s) + InvalidCap + + // InvalidClose occurs when close(...) is called with an argument that is + // not of channel type, or that is a receive-only channel. + // + // Example: + // func f() { + // var x int + // close(x) + // } + InvalidClose + + // InvalidCopy occurs when the arguments are not of slice type or do not + // have compatible type. + // + // See https://golang.org/ref/spec#Appendingand_copying_slices for more + // information on the type requirements for the copy built-in. + // + // Example: + // func f() { + // var x []int + // y := []int64{1,2,3} + // copy(x, y) + // } + InvalidCopy + + // InvalidComplex occurs when the complex built-in function is called with + // arguments with incompatible types. + // + // Example: + // var _ = complex(float32(1), float64(2)) + InvalidComplex + + // InvalidDelete occurs when the delete built-in function is called with a + // first argument that is not a map. + // + // Example: + // func f() { + // m := "hello" + // delete(m, "e") + // } + InvalidDelete + + // InvalidImag occurs when the imag built-in function is called with an + // argument that does not have complex type. + // + // Example: + // var _ = imag(int(1)) + InvalidImag + + // InvalidLen occurs when an argument to the len built-in function is not of + // supported type. + // + // See https://golang.org/ref/spec#Lengthand_capacity for information on + // which underlying types are supported as arguments to cap and len. + // + // Example: + // var s = 2 + // var x = len(s) + InvalidLen + + // SwappedMakeArgs occurs when make is called with three arguments, and its + // length argument is larger than its capacity argument. + // + // Example: + // var x = make([]int, 3, 2) + SwappedMakeArgs + + // InvalidMake occurs when make is called with an unsupported type argument. + // + // See https://golang.org/ref/spec#Makingslices_maps_and_channels for + // information on the types that may be created using make. + // + // Example: + // var x = make(int) + InvalidMake + + // InvalidReal occurs when the real built-in function is called with an + // argument that does not have complex type. + // + // Example: + // var _ = real(int(1)) + InvalidReal + + /* exprs > assertion */ + + // InvalidAssert occurs when a type assertion is applied to a + // value that is not of interface type. + // + // Example: + // var x = 1 + // var _ = x.(float64) + InvalidAssert + + // ImpossibleAssert occurs for a type assertion x.(T) when the value x of + // interface cannot have dynamic type T, due to a missing or mismatching + // method on T. + // + // Example: + // type T int + // + // func (t *T) m() int { return int(*t) } + // + // type I interface { m() int } + // + // var x I + // var _ = x.(T) + ImpossibleAssert + + /* exprs > conversion */ + + // InvalidConversion occurs when the argument type cannot be converted to the + // target. + // + // See https://golang.org/ref/spec#Conversions for the rules of + // convertibility. + // + // Example: + // var x float64 + // var _ = string(x) + InvalidConversion + + // InvalidUntypedConversion occurs when an there is no valid implicit + // conversion from an untyped value satisfying the type constraints of the + // context in which it is used. + // + // Example: + // var _ = 1 + "" + InvalidUntypedConversion + + /* offsetof */ + + // BadOffsetofSyntax occurs when unsafe.Offsetof is called with an argument + // that is not a selector expression. + // + // Example: + // import "unsafe" + // + // var x int + // var _ = unsafe.Offsetof(x) + BadOffsetofSyntax + + // InvalidOffsetof occurs when unsafe.Offsetof is called with a method + // selector, rather than a field selector, or when the field is embedded via + // a pointer. + // + // Per the spec: + // + // "If f is an embedded field, it must be reachable without pointer + // indirections through fields of the struct. " + // + // Example: + // import "unsafe" + // + // type T struct { f int } + // type S struct { *T } + // var s S + // var _ = unsafe.Offsetof(s.f) + // + // Example: + // import "unsafe" + // + // type S struct{} + // + // func (S) m() {} + // + // var s S + // var _ = unsafe.Offsetof(s.m) + InvalidOffsetof + + /* control flow > scope */ + + // UnusedExpr occurs when a side-effect free expression is used as a + // statement. Such a statement has no effect. + // + // Example: + // func f(i int) { + // i*i + // } + UnusedExpr + + // UnusedVar occurs when a variable is declared but unused. + // + // Example: + // func f() { + // x := 1 + // } + UnusedVar + + // MissingReturn occurs when a function with results is missing a return + // statement. + // + // Example: + // func f() int {} + MissingReturn + + // WrongResultCount occurs when a return statement returns an incorrect + // number of values. + // + // Example: + // func ReturnOne() int { + // return 1, 2 + // } + WrongResultCount + + // OutOfScopeResult occurs when the name of a value implicitly returned by + // an empty return statement is shadowed in a nested scope. + // + // Example: + // func factor(n int) (i int) { + // for i := 2; i < n; i++ { + // if n%i == 0 { + // return + // } + // } + // return 0 + // } + OutOfScopeResult + + /* control flow > if */ + + // InvalidCond occurs when an if condition is not a boolean expression. + // + // Example: + // func checkReturn(i int) { + // if i { + // panic("non-zero return") + // } + // } + InvalidCond + + /* control flow > for */ + + // InvalidPostDecl occurs when there is a declaration in a for-loop post + // statement. + // + // Example: + // func f() { + // for i := 0; i < 10; j := 0 {} + // } + InvalidPostDecl + + // InvalidChanRange occurs when a send-only channel used in a range + // expression. + // + // Example: + // func sum(c chan<- int) { + // s := 0 + // for i := range c { + // s += i + // } + // } + InvalidChanRange + + // InvalidIterVar occurs when two iteration variables are used while ranging + // over a channel. + // + // Example: + // func f(c chan int) { + // for k, v := range c { + // println(k, v) + // } + // } + InvalidIterVar + + // InvalidRangeExpr occurs when the type of a range expression is not array, + // slice, string, map, or channel. + // + // Example: + // func f(i int) { + // for j := range i { + // println(j) + // } + // } + InvalidRangeExpr + + /* control flow > switch */ + + // MisplacedBreak occurs when a break statement is not within a for, switch, + // or select statement of the innermost function definition. + // + // Example: + // func f() { + // break + // } + MisplacedBreak + + // MisplacedContinue occurs when a continue statement is not within a for + // loop of the innermost function definition. + // + // Example: + // func sumeven(n int) int { + // proceed := func() { + // continue + // } + // sum := 0 + // for i := 1; i <= n; i++ { + // if i % 2 != 0 { + // proceed() + // } + // sum += i + // } + // return sum + // } + MisplacedContinue + + // MisplacedFallthrough occurs when a fallthrough statement is not within an + // expression switch. + // + // Example: + // func typename(i interface{}) string { + // switch i.(type) { + // case int64: + // fallthrough + // case int: + // return "int" + // } + // return "unsupported" + // } + MisplacedFallthrough + + // DuplicateCase occurs when a type or expression switch has duplicate + // cases. + // + // Example: + // func printInt(i int) { + // switch i { + // case 1: + // println("one") + // case 1: + // println("One") + // } + // } + DuplicateCase + + // DuplicateDefault occurs when a type or expression switch has multiple + // default clauses. + // + // Example: + // func printInt(i int) { + // switch i { + // case 1: + // println("one") + // default: + // println("One") + // default: + // println("1") + // } + // } + DuplicateDefault + + // BadTypeKeyword occurs when a .(type) expression is used anywhere other + // than a type switch. + // + // Example: + // type I interface { + // m() + // } + // var t I + // var _ = t.(type) + BadTypeKeyword + + // InvalidTypeSwitch occurs when .(type) is used on an expression that is + // not of interface type. + // + // Example: + // func f(i int) { + // switch x := i.(type) {} + // } + InvalidTypeSwitch + + // InvalidExprSwitch occurs when a switch expression is not comparable. + // + // Example: + // func _() { + // var a struct{ _ func() } + // switch a /* ERROR cannot switch on a */ { + // } + // } + InvalidExprSwitch + + /* control flow > select */ + + // InvalidSelectCase occurs when a select case is not a channel send or + // receive. + // + // Example: + // func checkChan(c <-chan int) bool { + // select { + // case c: + // return true + // default: + // return false + // } + // } + InvalidSelectCase + + /* control flow > labels and jumps */ + + // UndeclaredLabel occurs when an undeclared label is jumped to. + // + // Example: + // func f() { + // goto L + // } + UndeclaredLabel + + // DuplicateLabel occurs when a label is declared more than once. + // + // Example: + // func f() int { + // L: + // L: + // return 1 + // } + DuplicateLabel + + // MisplacedLabel occurs when a break or continue label is not on a for, + // switch, or select statement. + // + // Example: + // func f() { + // L: + // a := []int{1,2,3} + // for _, e := range a { + // if e > 10 { + // break L + // } + // println(a) + // } + // } + MisplacedLabel + + // UnusedLabel occurs when a label is declared but not used. + // + // Example: + // func f() { + // L: + // } + UnusedLabel + + // JumpOverDecl occurs when a label jumps over a variable declaration. + // + // Example: + // func f() int { + // goto L + // x := 2 + // L: + // x++ + // return x + // } + JumpOverDecl + + // JumpIntoBlock occurs when a forward jump goes to a label inside a nested + // block. + // + // Example: + // func f(x int) { + // goto L + // if x > 0 { + // L: + // print("inside block") + // } + // } + JumpIntoBlock + + /* control flow > calls */ + + // InvalidMethodExpr occurs when a pointer method is called but the argument + // is not addressable. + // + // Example: + // type T struct {} + // + // func (*T) m() int { return 1 } + // + // var _ = T.m(T{}) + InvalidMethodExpr + + // WrongArgCount occurs when too few or too many arguments are passed by a + // function call. + // + // Example: + // func f(i int) {} + // var x = f() + WrongArgCount + + // InvalidCall occurs when an expression is called that is not of function + // type. + // + // Example: + // var x = "x" + // var y = x() + InvalidCall + + /* control flow > suspended */ + + // UnusedResults occurs when a restricted expression-only built-in function + // is suspended via go or defer. Such a suspension discards the results of + // these side-effect free built-in functions, and therefore is ineffectual. + // + // Example: + // func f(a []int) int { + // defer len(a) + // return i + // } + UnusedResults + + // InvalidDefer occurs when a deferred expression is not a function call, + // for example if the expression is a type conversion. + // + // Example: + // func f(i int) int { + // defer int32(i) + // return i + // } + InvalidDefer + + // InvalidGo occurs when a go expression is not a function call, for example + // if the expression is a type conversion. + // + // Example: + // func f(i int) int { + // go int32(i) + // return i + // } + InvalidGo +) diff --git a/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go b/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go new file mode 100644 index 000000000..3e5842a5f --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typesinternal/errorcode_string.go @@ -0,0 +1,153 @@ +// Code generated by "stringer -type=ErrorCode"; DO NOT EDIT. + +package typesinternal + +import "strconv" + +func _() { + // An "invalid array index" compiler error signifies that the constant values have changed. + // Re-run the stringer command to generate them again. + var x [1]struct{} + _ = x[Test-1] + _ = x[BlankPkgName-2] + _ = x[MismatchedPkgName-3] + _ = x[InvalidPkgUse-4] + _ = x[BadImportPath-5] + _ = x[BrokenImport-6] + _ = x[ImportCRenamed-7] + _ = x[UnusedImport-8] + _ = x[InvalidInitCycle-9] + _ = x[DuplicateDecl-10] + _ = x[InvalidDeclCycle-11] + _ = x[InvalidTypeCycle-12] + _ = x[InvalidConstInit-13] + _ = x[InvalidConstVal-14] + _ = x[InvalidConstType-15] + _ = x[UntypedNil-16] + _ = x[WrongAssignCount-17] + _ = x[UnassignableOperand-18] + _ = x[NoNewVar-19] + _ = x[MultiValAssignOp-20] + _ = x[InvalidIfaceAssign-21] + _ = x[InvalidChanAssign-22] + _ = x[IncompatibleAssign-23] + _ = x[UnaddressableFieldAssign-24] + _ = x[NotAType-25] + _ = x[InvalidArrayLen-26] + _ = x[BlankIfaceMethod-27] + _ = x[IncomparableMapKey-28] + _ = x[InvalidIfaceEmbed-29] + _ = x[InvalidPtrEmbed-30] + _ = x[BadRecv-31] + _ = x[InvalidRecv-32] + _ = x[DuplicateFieldAndMethod-33] + _ = x[DuplicateMethod-34] + _ = x[InvalidBlank-35] + _ = x[InvalidIota-36] + _ = x[MissingInitBody-37] + _ = x[InvalidInitSig-38] + _ = x[InvalidInitDecl-39] + _ = x[InvalidMainDecl-40] + _ = x[TooManyValues-41] + _ = x[NotAnExpr-42] + _ = x[TruncatedFloat-43] + _ = x[NumericOverflow-44] + _ = x[UndefinedOp-45] + _ = x[MismatchedTypes-46] + _ = x[DivByZero-47] + _ = x[NonNumericIncDec-48] + _ = x[UnaddressableOperand-49] + _ = x[InvalidIndirection-50] + _ = x[NonIndexableOperand-51] + _ = x[InvalidIndex-52] + _ = x[SwappedSliceIndices-53] + _ = x[NonSliceableOperand-54] + _ = x[InvalidSliceExpr-55] + _ = x[InvalidShiftCount-56] + _ = x[InvalidShiftOperand-57] + _ = x[InvalidReceive-58] + _ = x[InvalidSend-59] + _ = x[DuplicateLitKey-60] + _ = x[MissingLitKey-61] + _ = x[InvalidLitIndex-62] + _ = x[OversizeArrayLit-63] + _ = x[MixedStructLit-64] + _ = x[InvalidStructLit-65] + _ = x[MissingLitField-66] + _ = x[DuplicateLitField-67] + _ = x[UnexportedLitField-68] + _ = x[InvalidLitField-69] + _ = x[UntypedLit-70] + _ = x[InvalidLit-71] + _ = x[AmbiguousSelector-72] + _ = x[UndeclaredImportedName-73] + _ = x[UnexportedName-74] + _ = x[UndeclaredName-75] + _ = x[MissingFieldOrMethod-76] + _ = x[BadDotDotDotSyntax-77] + _ = x[NonVariadicDotDotDot-78] + _ = x[MisplacedDotDotDot-79] + _ = x[InvalidDotDotDotOperand-80] + _ = x[InvalidDotDotDot-81] + _ = x[UncalledBuiltin-82] + _ = x[InvalidAppend-83] + _ = x[InvalidCap-84] + _ = x[InvalidClose-85] + _ = x[InvalidCopy-86] + _ = x[InvalidComplex-87] + _ = x[InvalidDelete-88] + _ = x[InvalidImag-89] + _ = x[InvalidLen-90] + _ = x[SwappedMakeArgs-91] + _ = x[InvalidMake-92] + _ = x[InvalidReal-93] + _ = x[InvalidAssert-94] + _ = x[ImpossibleAssert-95] + _ = x[InvalidConversion-96] + _ = x[InvalidUntypedConversion-97] + _ = x[BadOffsetofSyntax-98] + _ = x[InvalidOffsetof-99] + _ = x[UnusedExpr-100] + _ = x[UnusedVar-101] + _ = x[MissingReturn-102] + _ = x[WrongResultCount-103] + _ = x[OutOfScopeResult-104] + _ = x[InvalidCond-105] + _ = x[InvalidPostDecl-106] + _ = x[InvalidChanRange-107] + _ = x[InvalidIterVar-108] + _ = x[InvalidRangeExpr-109] + _ = x[MisplacedBreak-110] + _ = x[MisplacedContinue-111] + _ = x[MisplacedFallthrough-112] + _ = x[DuplicateCase-113] + _ = x[DuplicateDefault-114] + _ = x[BadTypeKeyword-115] + _ = x[InvalidTypeSwitch-116] + _ = x[InvalidExprSwitch-117] + _ = x[InvalidSelectCase-118] + _ = x[UndeclaredLabel-119] + _ = x[DuplicateLabel-120] + _ = x[MisplacedLabel-121] + _ = x[UnusedLabel-122] + _ = x[JumpOverDecl-123] + _ = x[JumpIntoBlock-124] + _ = x[InvalidMethodExpr-125] + _ = x[WrongArgCount-126] + _ = x[InvalidCall-127] + _ = x[UnusedResults-128] + _ = x[InvalidDefer-129] + _ = x[InvalidGo-130] +} + +const _ErrorCode_name = "TestBlankPkgNameMismatchedPkgNameInvalidPkgUseBadImportPathBrokenImportImportCRenamedUnusedImportInvalidInitCycleDuplicateDeclInvalidDeclCycleInvalidTypeCycleInvalidConstInitInvalidConstValInvalidConstTypeUntypedNilWrongAssignCountUnassignableOperandNoNewVarMultiValAssignOpInvalidIfaceAssignInvalidChanAssignIncompatibleAssignUnaddressableFieldAssignNotATypeInvalidArrayLenBlankIfaceMethodIncomparableMapKeyInvalidIfaceEmbedInvalidPtrEmbedBadRecvInvalidRecvDuplicateFieldAndMethodDuplicateMethodInvalidBlankInvalidIotaMissingInitBodyInvalidInitSigInvalidInitDeclInvalidMainDeclTooManyValuesNotAnExprTruncatedFloatNumericOverflowUndefinedOpMismatchedTypesDivByZeroNonNumericIncDecUnaddressableOperandInvalidIndirectionNonIndexableOperandInvalidIndexSwappedSliceIndicesNonSliceableOperandInvalidSliceExprInvalidShiftCountInvalidShiftOperandInvalidReceiveInvalidSendDuplicateLitKeyMissingLitKeyInvalidLitIndexOversizeArrayLitMixedStructLitInvalidStructLitMissingLitFieldDuplicateLitFieldUnexportedLitFieldInvalidLitFieldUntypedLitInvalidLitAmbiguousSelectorUndeclaredImportedNameUnexportedNameUndeclaredNameMissingFieldOrMethodBadDotDotDotSyntaxNonVariadicDotDotDotMisplacedDotDotDotInvalidDotDotDotOperandInvalidDotDotDotUncalledBuiltinInvalidAppendInvalidCapInvalidCloseInvalidCopyInvalidComplexInvalidDeleteInvalidImagInvalidLenSwappedMakeArgsInvalidMakeInvalidRealInvalidAssertImpossibleAssertInvalidConversionInvalidUntypedConversionBadOffsetofSyntaxInvalidOffsetofUnusedExprUnusedVarMissingReturnWrongResultCountOutOfScopeResultInvalidCondInvalidPostDeclInvalidChanRangeInvalidIterVarInvalidRangeExprMisplacedBreakMisplacedContinueMisplacedFallthroughDuplicateCaseDuplicateDefaultBadTypeKeywordInvalidTypeSwitchInvalidExprSwitchInvalidSelectCaseUndeclaredLabelDuplicateLabelMisplacedLabelUnusedLabelJumpOverDeclJumpIntoBlockInvalidMethodExprWrongArgCountInvalidCallUnusedResultsInvalidDeferInvalidGo" + +var _ErrorCode_index = [...]uint16{0, 4, 16, 33, 46, 59, 71, 85, 97, 113, 126, 142, 158, 174, 189, 205, 215, 231, 250, 258, 274, 292, 309, 327, 351, 359, 374, 390, 408, 425, 440, 447, 458, 481, 496, 508, 519, 534, 548, 563, 578, 591, 600, 614, 629, 640, 655, 664, 680, 700, 718, 737, 749, 768, 787, 803, 820, 839, 853, 864, 879, 892, 907, 923, 937, 953, 968, 985, 1003, 1018, 1028, 1038, 1055, 1077, 1091, 1105, 1125, 1143, 1163, 1181, 1204, 1220, 1235, 1248, 1258, 1270, 1281, 1295, 1308, 1319, 1329, 1344, 1355, 1366, 1379, 1395, 1412, 1436, 1453, 1468, 1478, 1487, 1500, 1516, 1532, 1543, 1558, 1574, 1588, 1604, 1618, 1635, 1655, 1668, 1684, 1698, 1715, 1732, 1749, 1764, 1778, 1792, 1803, 1815, 1828, 1845, 1858, 1869, 1882, 1894, 1903} + +func (i ErrorCode) String() string { + i -= 1 + if i < 0 || i >= ErrorCode(len(_ErrorCode_index)-1) { + return "ErrorCode(" + strconv.FormatInt(int64(i+1), 10) + ")" + } + return _ErrorCode_name[_ErrorCode_index[i]:_ErrorCode_index[i+1]] +} diff --git a/vendor/golang.org/x/tools/internal/typesinternal/types.go b/vendor/golang.org/x/tools/internal/typesinternal/types.go new file mode 100644 index 000000000..c3e1a397d --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typesinternal/types.go @@ -0,0 +1,45 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typesinternal provides access to internal go/types APIs that are not +// yet exported. +package typesinternal + +import ( + "go/token" + "go/types" + "reflect" + "unsafe" +) + +func SetUsesCgo(conf *types.Config) bool { + v := reflect.ValueOf(conf).Elem() + + f := v.FieldByName("go115UsesCgo") + if !f.IsValid() { + f = v.FieldByName("UsesCgo") + if !f.IsValid() { + return false + } + } + + addr := unsafe.Pointer(f.UnsafeAddr()) + *(*bool)(addr) = true + + return true +} + +func ReadGo116ErrorData(terr types.Error) (ErrorCode, token.Pos, token.Pos, bool) { + var data [3]int + // By coincidence all of these fields are ints, which simplifies things. + v := reflect.ValueOf(terr) + for i, name := range []string{"go116code", "go116start", "go116end"} { + f := v.FieldByName(name) + if !f.IsValid() { + return 0, 0, 0, false + } + data[i] = int(f.Int()) + } + return ErrorCode(data[0]), token.Pos(data[1]), token.Pos(data[2]), true +} diff --git a/vendor/golang.org/x/xerrors/LICENSE b/vendor/golang.org/x/xerrors/LICENSE new file mode 100644 index 000000000..e4a47e17f --- /dev/null +++ b/vendor/golang.org/x/xerrors/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2019 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/xerrors/PATENTS b/vendor/golang.org/x/xerrors/PATENTS new file mode 100644 index 000000000..733099041 --- /dev/null +++ b/vendor/golang.org/x/xerrors/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/xerrors/README b/vendor/golang.org/x/xerrors/README new file mode 100644 index 000000000..aac7867a5 --- /dev/null +++ b/vendor/golang.org/x/xerrors/README @@ -0,0 +1,2 @@ +This repository holds the transition packages for the new Go 1.13 error values. +See golang.org/design/29934-error-values. diff --git a/vendor/golang.org/x/xerrors/adaptor.go b/vendor/golang.org/x/xerrors/adaptor.go new file mode 100644 index 000000000..4317f2483 --- /dev/null +++ b/vendor/golang.org/x/xerrors/adaptor.go @@ -0,0 +1,193 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import ( + "bytes" + "fmt" + "io" + "reflect" + "strconv" +) + +// FormatError calls the FormatError method of f with an errors.Printer +// configured according to s and verb, and writes the result to s. +func FormatError(f Formatter, s fmt.State, verb rune) { + // Assuming this function is only called from the Format method, and given + // that FormatError takes precedence over Format, it cannot be called from + // any package that supports errors.Formatter. It is therefore safe to + // disregard that State may be a specific printer implementation and use one + // of our choice instead. + + // limitations: does not support printing error as Go struct. + + var ( + sep = " " // separator before next error + p = &state{State: s} + direct = true + ) + + var err error = f + + switch verb { + // Note that this switch must match the preference order + // for ordinary string printing (%#v before %+v, and so on). + + case 'v': + if s.Flag('#') { + if stringer, ok := err.(fmt.GoStringer); ok { + io.WriteString(&p.buf, stringer.GoString()) + goto exit + } + // proceed as if it were %v + } else if s.Flag('+') { + p.printDetail = true + sep = "\n - " + } + case 's': + case 'q', 'x', 'X': + // Use an intermediate buffer in the rare cases that precision, + // truncation, or one of the alternative verbs (q, x, and X) are + // specified. + direct = false + + default: + p.buf.WriteString("%!") + p.buf.WriteRune(verb) + p.buf.WriteByte('(') + switch { + case err != nil: + p.buf.WriteString(reflect.TypeOf(f).String()) + default: + p.buf.WriteString("") + } + p.buf.WriteByte(')') + io.Copy(s, &p.buf) + return + } + +loop: + for { + switch v := err.(type) { + case Formatter: + err = v.FormatError((*printer)(p)) + case fmt.Formatter: + v.Format(p, 'v') + break loop + default: + io.WriteString(&p.buf, v.Error()) + break loop + } + if err == nil { + break + } + if p.needColon || !p.printDetail { + p.buf.WriteByte(':') + p.needColon = false + } + p.buf.WriteString(sep) + p.inDetail = false + p.needNewline = false + } + +exit: + width, okW := s.Width() + prec, okP := s.Precision() + + if !direct || (okW && width > 0) || okP { + // Construct format string from State s. + format := []byte{'%'} + if s.Flag('-') { + format = append(format, '-') + } + if s.Flag('+') { + format = append(format, '+') + } + if s.Flag(' ') { + format = append(format, ' ') + } + if okW { + format = strconv.AppendInt(format, int64(width), 10) + } + if okP { + format = append(format, '.') + format = strconv.AppendInt(format, int64(prec), 10) + } + format = append(format, string(verb)...) + fmt.Fprintf(s, string(format), p.buf.String()) + } else { + io.Copy(s, &p.buf) + } +} + +var detailSep = []byte("\n ") + +// state tracks error printing state. It implements fmt.State. +type state struct { + fmt.State + buf bytes.Buffer + + printDetail bool + inDetail bool + needColon bool + needNewline bool +} + +func (s *state) Write(b []byte) (n int, err error) { + if s.printDetail { + if len(b) == 0 { + return 0, nil + } + if s.inDetail && s.needColon { + s.needNewline = true + if b[0] == '\n' { + b = b[1:] + } + } + k := 0 + for i, c := range b { + if s.needNewline { + if s.inDetail && s.needColon { + s.buf.WriteByte(':') + s.needColon = false + } + s.buf.Write(detailSep) + s.needNewline = false + } + if c == '\n' { + s.buf.Write(b[k:i]) + k = i + 1 + s.needNewline = true + } + } + s.buf.Write(b[k:]) + if !s.inDetail { + s.needColon = true + } + } else if !s.inDetail { + s.buf.Write(b) + } + return len(b), nil +} + +// printer wraps a state to implement an xerrors.Printer. +type printer state + +func (s *printer) Print(args ...interface{}) { + if !s.inDetail || s.printDetail { + fmt.Fprint((*state)(s), args...) + } +} + +func (s *printer) Printf(format string, args ...interface{}) { + if !s.inDetail || s.printDetail { + fmt.Fprintf((*state)(s), format, args...) + } +} + +func (s *printer) Detail() bool { + s.inDetail = true + return s.printDetail +} diff --git a/vendor/golang.org/x/xerrors/codereview.cfg b/vendor/golang.org/x/xerrors/codereview.cfg new file mode 100644 index 000000000..3f8b14b64 --- /dev/null +++ b/vendor/golang.org/x/xerrors/codereview.cfg @@ -0,0 +1 @@ +issuerepo: golang/go diff --git a/vendor/golang.org/x/xerrors/doc.go b/vendor/golang.org/x/xerrors/doc.go new file mode 100644 index 000000000..eef99d9d5 --- /dev/null +++ b/vendor/golang.org/x/xerrors/doc.go @@ -0,0 +1,22 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package xerrors implements functions to manipulate errors. +// +// This package is based on the Go 2 proposal for error values: +// https://golang.org/design/29934-error-values +// +// These functions were incorporated into the standard library's errors package +// in Go 1.13: +// - Is +// - As +// - Unwrap +// +// Also, Errorf's %w verb was incorporated into fmt.Errorf. +// +// Use this package to get equivalent behavior in all supported Go versions. +// +// No other features of this package were included in Go 1.13, and at present +// there are no plans to include any of them. +package xerrors // import "golang.org/x/xerrors" diff --git a/vendor/golang.org/x/xerrors/errors.go b/vendor/golang.org/x/xerrors/errors.go new file mode 100644 index 000000000..e88d3772d --- /dev/null +++ b/vendor/golang.org/x/xerrors/errors.go @@ -0,0 +1,33 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import "fmt" + +// errorString is a trivial implementation of error. +type errorString struct { + s string + frame Frame +} + +// New returns an error that formats as the given text. +// +// The returned error contains a Frame set to the caller's location and +// implements Formatter to show this information when printed with details. +func New(text string) error { + return &errorString{text, Caller(1)} +} + +func (e *errorString) Error() string { + return e.s +} + +func (e *errorString) Format(s fmt.State, v rune) { FormatError(e, s, v) } + +func (e *errorString) FormatError(p Printer) (next error) { + p.Print(e.s) + e.frame.Format(p) + return nil +} diff --git a/vendor/golang.org/x/xerrors/fmt.go b/vendor/golang.org/x/xerrors/fmt.go new file mode 100644 index 000000000..829862ddf --- /dev/null +++ b/vendor/golang.org/x/xerrors/fmt.go @@ -0,0 +1,187 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import ( + "fmt" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/xerrors/internal" +) + +const percentBangString = "%!" + +// Errorf formats according to a format specifier and returns the string as a +// value that satisfies error. +// +// The returned error includes the file and line number of the caller when +// formatted with additional detail enabled. If the last argument is an error +// the returned error's Format method will return it if the format string ends +// with ": %s", ": %v", or ": %w". If the last argument is an error and the +// format string ends with ": %w", the returned error implements an Unwrap +// method returning it. +// +// If the format specifier includes a %w verb with an error operand in a +// position other than at the end, the returned error will still implement an +// Unwrap method returning the operand, but the error's Format method will not +// return the wrapped error. +// +// It is invalid to include more than one %w verb or to supply it with an +// operand that does not implement the error interface. The %w verb is otherwise +// a synonym for %v. +func Errorf(format string, a ...interface{}) error { + format = formatPlusW(format) + // Support a ": %[wsv]" suffix, which works well with xerrors.Formatter. + wrap := strings.HasSuffix(format, ": %w") + idx, format2, ok := parsePercentW(format) + percentWElsewhere := !wrap && idx >= 0 + if !percentWElsewhere && (wrap || strings.HasSuffix(format, ": %s") || strings.HasSuffix(format, ": %v")) { + err := errorAt(a, len(a)-1) + if err == nil { + return &noWrapError{fmt.Sprintf(format, a...), nil, Caller(1)} + } + // TODO: this is not entirely correct. The error value could be + // printed elsewhere in format if it mixes numbered with unnumbered + // substitutions. With relatively small changes to doPrintf we can + // have it optionally ignore extra arguments and pass the argument + // list in its entirety. + msg := fmt.Sprintf(format[:len(format)-len(": %s")], a[:len(a)-1]...) + frame := Frame{} + if internal.EnableTrace { + frame = Caller(1) + } + if wrap { + return &wrapError{msg, err, frame} + } + return &noWrapError{msg, err, frame} + } + // Support %w anywhere. + // TODO: don't repeat the wrapped error's message when %w occurs in the middle. + msg := fmt.Sprintf(format2, a...) + if idx < 0 { + return &noWrapError{msg, nil, Caller(1)} + } + err := errorAt(a, idx) + if !ok || err == nil { + // Too many %ws or argument of %w is not an error. Approximate the Go + // 1.13 fmt.Errorf message. + return &noWrapError{fmt.Sprintf("%sw(%s)", percentBangString, msg), nil, Caller(1)} + } + frame := Frame{} + if internal.EnableTrace { + frame = Caller(1) + } + return &wrapError{msg, err, frame} +} + +func errorAt(args []interface{}, i int) error { + if i < 0 || i >= len(args) { + return nil + } + err, ok := args[i].(error) + if !ok { + return nil + } + return err +} + +// formatPlusW is used to avoid the vet check that will barf at %w. +func formatPlusW(s string) string { + return s +} + +// Return the index of the only %w in format, or -1 if none. +// Also return a rewritten format string with %w replaced by %v, and +// false if there is more than one %w. +// TODO: handle "%[N]w". +func parsePercentW(format string) (idx int, newFormat string, ok bool) { + // Loosely copied from golang.org/x/tools/go/analysis/passes/printf/printf.go. + idx = -1 + ok = true + n := 0 + sz := 0 + var isW bool + for i := 0; i < len(format); i += sz { + if format[i] != '%' { + sz = 1 + continue + } + // "%%" is not a format directive. + if i+1 < len(format) && format[i+1] == '%' { + sz = 2 + continue + } + sz, isW = parsePrintfVerb(format[i:]) + if isW { + if idx >= 0 { + ok = false + } else { + idx = n + } + // "Replace" the last character, the 'w', with a 'v'. + p := i + sz - 1 + format = format[:p] + "v" + format[p+1:] + } + n++ + } + return idx, format, ok +} + +// Parse the printf verb starting with a % at s[0]. +// Return how many bytes it occupies and whether the verb is 'w'. +func parsePrintfVerb(s string) (int, bool) { + // Assume only that the directive is a sequence of non-letters followed by a single letter. + sz := 0 + var r rune + for i := 1; i < len(s); i += sz { + r, sz = utf8.DecodeRuneInString(s[i:]) + if unicode.IsLetter(r) { + return i + sz, r == 'w' + } + } + return len(s), false +} + +type noWrapError struct { + msg string + err error + frame Frame +} + +func (e *noWrapError) Error() string { + return fmt.Sprint(e) +} + +func (e *noWrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) } + +func (e *noWrapError) FormatError(p Printer) (next error) { + p.Print(e.msg) + e.frame.Format(p) + return e.err +} + +type wrapError struct { + msg string + err error + frame Frame +} + +func (e *wrapError) Error() string { + return fmt.Sprint(e) +} + +func (e *wrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) } + +func (e *wrapError) FormatError(p Printer) (next error) { + p.Print(e.msg) + e.frame.Format(p) + return e.err +} + +func (e *wrapError) Unwrap() error { + return e.err +} diff --git a/vendor/golang.org/x/xerrors/format.go b/vendor/golang.org/x/xerrors/format.go new file mode 100644 index 000000000..1bc9c26b9 --- /dev/null +++ b/vendor/golang.org/x/xerrors/format.go @@ -0,0 +1,34 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +// A Formatter formats error messages. +type Formatter interface { + error + + // FormatError prints the receiver's first error and returns the next error in + // the error chain, if any. + FormatError(p Printer) (next error) +} + +// A Printer formats error messages. +// +// The most common implementation of Printer is the one provided by package fmt +// during Printf (as of Go 1.13). Localization packages such as golang.org/x/text/message +// typically provide their own implementations. +type Printer interface { + // Print appends args to the message output. + Print(args ...interface{}) + + // Printf writes a formatted string. + Printf(format string, args ...interface{}) + + // Detail reports whether error detail is requested. + // After the first call to Detail, all text written to the Printer + // is formatted as additional detail, or ignored when + // detail has not been requested. + // If Detail returns false, the caller can avoid printing the detail at all. + Detail() bool +} diff --git a/vendor/golang.org/x/xerrors/frame.go b/vendor/golang.org/x/xerrors/frame.go new file mode 100644 index 000000000..0de628ec5 --- /dev/null +++ b/vendor/golang.org/x/xerrors/frame.go @@ -0,0 +1,56 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import ( + "runtime" +) + +// A Frame contains part of a call stack. +type Frame struct { + // Make room for three PCs: the one we were asked for, what it called, + // and possibly a PC for skipPleaseUseCallersFrames. See: + // https://go.googlesource.com/go/+/032678e0fb/src/runtime/extern.go#169 + frames [3]uintptr +} + +// Caller returns a Frame that describes a frame on the caller's stack. +// The argument skip is the number of frames to skip over. +// Caller(0) returns the frame for the caller of Caller. +func Caller(skip int) Frame { + var s Frame + runtime.Callers(skip+1, s.frames[:]) + return s +} + +// location reports the file, line, and function of a frame. +// +// The returned function may be "" even if file and line are not. +func (f Frame) location() (function, file string, line int) { + frames := runtime.CallersFrames(f.frames[:]) + if _, ok := frames.Next(); !ok { + return "", "", 0 + } + fr, ok := frames.Next() + if !ok { + return "", "", 0 + } + return fr.Function, fr.File, fr.Line +} + +// Format prints the stack as error detail. +// It should be called from an error's Format implementation +// after printing any other error detail. +func (f Frame) Format(p Printer) { + if p.Detail() { + function, file, line := f.location() + if function != "" { + p.Printf("%s\n ", function) + } + if file != "" { + p.Printf("%s:%d\n", file, line) + } + } +} diff --git a/vendor/golang.org/x/xerrors/go.mod b/vendor/golang.org/x/xerrors/go.mod new file mode 100644 index 000000000..870d4f612 --- /dev/null +++ b/vendor/golang.org/x/xerrors/go.mod @@ -0,0 +1,3 @@ +module golang.org/x/xerrors + +go 1.11 diff --git a/vendor/golang.org/x/xerrors/internal/internal.go b/vendor/golang.org/x/xerrors/internal/internal.go new file mode 100644 index 000000000..89f4eca5d --- /dev/null +++ b/vendor/golang.org/x/xerrors/internal/internal.go @@ -0,0 +1,8 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package internal + +// EnableTrace indicates whether stack information should be recorded in errors. +var EnableTrace = true diff --git a/vendor/golang.org/x/xerrors/wrap.go b/vendor/golang.org/x/xerrors/wrap.go new file mode 100644 index 000000000..9a3b51037 --- /dev/null +++ b/vendor/golang.org/x/xerrors/wrap.go @@ -0,0 +1,106 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package xerrors + +import ( + "reflect" +) + +// A Wrapper provides context around another error. +type Wrapper interface { + // Unwrap returns the next error in the error chain. + // If there is no next error, Unwrap returns nil. + Unwrap() error +} + +// Opaque returns an error with the same error formatting as err +// but that does not match err and cannot be unwrapped. +func Opaque(err error) error { + return noWrapper{err} +} + +type noWrapper struct { + error +} + +func (e noWrapper) FormatError(p Printer) (next error) { + if f, ok := e.error.(Formatter); ok { + return f.FormatError(p) + } + p.Print(e.error) + return nil +} + +// Unwrap returns the result of calling the Unwrap method on err, if err implements +// Unwrap. Otherwise, Unwrap returns nil. +func Unwrap(err error) error { + u, ok := err.(Wrapper) + if !ok { + return nil + } + return u.Unwrap() +} + +// Is reports whether any error in err's chain matches target. +// +// An error is considered to match a target if it is equal to that target or if +// it implements a method Is(error) bool such that Is(target) returns true. +func Is(err, target error) bool { + if target == nil { + return err == target + } + + isComparable := reflect.TypeOf(target).Comparable() + for { + if isComparable && err == target { + return true + } + if x, ok := err.(interface{ Is(error) bool }); ok && x.Is(target) { + return true + } + // TODO: consider supporing target.Is(err). This would allow + // user-definable predicates, but also may allow for coping with sloppy + // APIs, thereby making it easier to get away with them. + if err = Unwrap(err); err == nil { + return false + } + } +} + +// As finds the first error in err's chain that matches the type to which target +// points, and if so, sets the target to its value and returns true. An error +// matches a type if it is assignable to the target type, or if it has a method +// As(interface{}) bool such that As(target) returns true. As will panic if target +// is not a non-nil pointer to a type which implements error or is of interface type. +// +// The As method should set the target to its value and return true if err +// matches the type to which target points. +func As(err error, target interface{}) bool { + if target == nil { + panic("errors: target cannot be nil") + } + val := reflect.ValueOf(target) + typ := val.Type() + if typ.Kind() != reflect.Ptr || val.IsNil() { + panic("errors: target must be a non-nil pointer") + } + if e := typ.Elem(); e.Kind() != reflect.Interface && !e.Implements(errorType) { + panic("errors: *target must be interface or implement error") + } + targetType := typ.Elem() + for err != nil { + if reflect.TypeOf(err).AssignableTo(targetType) { + val.Elem().Set(reflect.ValueOf(err)) + return true + } + if x, ok := err.(interface{ As(interface{}) bool }); ok && x.As(target) { + return true + } + err = Unwrap(err) + } + return false +} + +var errorType = reflect.TypeOf((*error)(nil)).Elem() diff --git a/vendor/gopkg.in/ini.v1/.gitignore b/vendor/gopkg.in/ini.v1/.gitignore new file mode 100644 index 000000000..12411127b --- /dev/null +++ b/vendor/gopkg.in/ini.v1/.gitignore @@ -0,0 +1,6 @@ +testdata/conf_out.ini +ini.sublime-project +ini.sublime-workspace +testdata/conf_reflect.ini +.idea +/.vscode diff --git a/vendor/gopkg.in/ini.v1/LICENSE b/vendor/gopkg.in/ini.v1/LICENSE new file mode 100644 index 000000000..d361bbcdf --- /dev/null +++ b/vendor/gopkg.in/ini.v1/LICENSE @@ -0,0 +1,191 @@ +Apache License +Version 2.0, January 2004 +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + +"License" shall mean the terms and conditions for use, reproduction, and +distribution as defined by Sections 1 through 9 of this document. + +"Licensor" shall mean the copyright owner or entity authorized by the copyright +owner that is granting the License. + +"Legal Entity" shall mean the union of the acting entity and all other entities +that control, are controlled by, or are under common control with that entity. +For the purposes of this definition, "control" means (i) the power, direct or +indirect, to cause the direction or management of such entity, whether by +contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the +outstanding shares, or (iii) beneficial ownership of such entity. + +"You" (or "Your") shall mean an individual or Legal Entity exercising +permissions granted by this License. + +"Source" form shall mean the preferred form for making modifications, including +but not limited to software source code, documentation source, and configuration +files. + +"Object" form shall mean any form resulting from mechanical transformation or +translation of a Source form, including but not limited to compiled object code, +generated documentation, and conversions to other media types. + +"Work" shall mean the work of authorship, whether in Source or Object form, made +available under the License, as indicated by a copyright notice that is included +in or attached to the work (an example is provided in the Appendix below). + +"Derivative Works" shall mean any work, whether in Source or Object form, that +is based on (or derived from) the Work and for which the editorial revisions, +annotations, elaborations, or other modifications represent, as a whole, an +original work of authorship. For the purposes of this License, Derivative Works +shall not include works that remain separable from, or merely link (or bind by +name) to the interfaces of, the Work and Derivative Works thereof. + +"Contribution" shall mean any work of authorship, including the original version +of the Work and any modifications or additions to that Work or Derivative Works +thereof, that is intentionally submitted to Licensor for inclusion in the Work +by the copyright owner or by an individual or Legal Entity authorized to submit +on behalf of the copyright owner. For the purposes of this definition, +"submitted" means any form of electronic, verbal, or written communication sent +to the Licensor or its representatives, including but not limited to +communication on electronic mailing lists, source code control systems, and +issue tracking systems that are managed by, or on behalf of, the Licensor for +the purpose of discussing and improving the Work, but excluding communication +that is conspicuously marked or otherwise designated in writing by the copyright +owner as "Not a Contribution." + +"Contributor" shall mean Licensor and any individual or Legal Entity on behalf +of whom a Contribution has been received by Licensor and subsequently +incorporated within the Work. + +2. Grant of Copyright License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable copyright license to reproduce, prepare Derivative Works of, +publicly display, publicly perform, sublicense, and distribute the Work and such +Derivative Works in Source or Object form. + +3. Grant of Patent License. + +Subject to the terms and conditions of this License, each Contributor hereby +grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, +irrevocable (except as stated in this section) patent license to make, have +made, use, offer to sell, sell, import, and otherwise transfer the Work, where +such license applies only to those patent claims licensable by such Contributor +that are necessarily infringed by their Contribution(s) alone or by combination +of their Contribution(s) with the Work to which such Contribution(s) was +submitted. If You institute patent litigation against any entity (including a +cross-claim or counterclaim in a lawsuit) alleging that the Work or a +Contribution incorporated within the Work constitutes direct or contributory +patent infringement, then any patent licenses granted to You under this License +for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. + +You may reproduce and distribute copies of the Work or Derivative Works thereof +in any medium, with or without modifications, and in Source or Object form, +provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of +this License; and +You must cause any modified files to carry prominent notices stating that You +changed the files; and +You must retain, in the Source form of any Derivative Works that You distribute, +all copyright, patent, trademark, and attribution notices from the Source form +of the Work, excluding those notices that do not pertain to any part of the +Derivative Works; and +If the Work includes a "NOTICE" text file as part of its distribution, then any +Derivative Works that You distribute must include a readable copy of the +attribution notices contained within such NOTICE file, excluding those notices +that do not pertain to any part of the Derivative Works, in at least one of the +following places: within a NOTICE text file distributed as part of the +Derivative Works; within the Source form or documentation, if provided along +with the Derivative Works; or, within a display generated by the Derivative +Works, if and wherever such third-party notices normally appear. The contents of +the NOTICE file are for informational purposes only and do not modify the +License. You may add Your own attribution notices within Derivative Works that +You distribute, alongside or as an addendum to the NOTICE text from the Work, +provided that such additional attribution notices cannot be construed as +modifying the License. +You may add Your own copyright statement to Your modifications and may provide +additional or different license terms and conditions for use, reproduction, or +distribution of Your modifications, or for any such Derivative Works as a whole, +provided Your use, reproduction, and distribution of the Work otherwise complies +with the conditions stated in this License. + +5. Submission of Contributions. + +Unless You explicitly state otherwise, any Contribution intentionally submitted +for inclusion in the Work by You to the Licensor shall be under the terms and +conditions of this License, without any additional terms or conditions. +Notwithstanding the above, nothing herein shall supersede or modify the terms of +any separate license agreement you may have executed with Licensor regarding +such Contributions. + +6. Trademarks. + +This License does not grant permission to use the trade names, trademarks, +service marks, or product names of the Licensor, except as required for +reasonable and customary use in describing the origin of the Work and +reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. + +Unless required by applicable law or agreed to in writing, Licensor provides the +Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, +including, without limitation, any warranties or conditions of TITLE, +NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are +solely responsible for determining the appropriateness of using or +redistributing the Work and assume any risks associated with Your exercise of +permissions under this License. + +8. Limitation of Liability. + +In no event and under no legal theory, whether in tort (including negligence), +contract, or otherwise, unless required by applicable law (such as deliberate +and grossly negligent acts) or agreed to in writing, shall any Contributor be +liable to You for damages, including any direct, indirect, special, incidental, +or consequential damages of any character arising as a result of this License or +out of the use or inability to use the Work (including but not limited to +damages for loss of goodwill, work stoppage, computer failure or malfunction, or +any and all other commercial damages or losses), even if such Contributor has +been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. + +While redistributing the Work or Derivative Works thereof, You may choose to +offer, and charge a fee for, acceptance of support, warranty, indemnity, or +other liability obligations and/or rights consistent with this License. However, +in accepting such obligations, You may act only on Your own behalf and on Your +sole responsibility, not on behalf of any other Contributor, and only if You +agree to indemnify, defend, and hold each Contributor harmless for any liability +incurred by, or claims asserted against, such Contributor by reason of your +accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work + +To apply the Apache License to your work, attach the following boilerplate +notice, with the fields enclosed by brackets "[]" replaced with your own +identifying information. (Don't include the brackets!) The text should be +enclosed in the appropriate comment syntax for the file format. We also +recommend that a file or class name and description of purpose be included on +the same "printed page" as the copyright notice for easier identification within +third-party archives. + + Copyright 2014 Unknwon + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/gopkg.in/ini.v1/Makefile b/vendor/gopkg.in/ini.v1/Makefile new file mode 100644 index 000000000..f3b0dae2d --- /dev/null +++ b/vendor/gopkg.in/ini.v1/Makefile @@ -0,0 +1,15 @@ +.PHONY: build test bench vet coverage + +build: vet bench + +test: + go test -v -cover -race + +bench: + go test -v -cover -test.bench=. -test.benchmem + +vet: + go vet + +coverage: + go test -coverprofile=c.out && go tool cover -html=c.out && rm c.out diff --git a/vendor/gopkg.in/ini.v1/README.md b/vendor/gopkg.in/ini.v1/README.md new file mode 100644 index 000000000..5d65658b2 --- /dev/null +++ b/vendor/gopkg.in/ini.v1/README.md @@ -0,0 +1,43 @@ +# INI + +[![GitHub Workflow Status](https://img.shields.io/github/workflow/status/go-ini/ini/Go?logo=github&style=for-the-badge)](https://github.com/go-ini/ini/actions?query=workflow%3AGo) +[![codecov](https://img.shields.io/codecov/c/github/go-ini/ini/master?logo=codecov&style=for-the-badge)](https://codecov.io/gh/go-ini/ini) +[![GoDoc](https://img.shields.io/badge/GoDoc-Reference-blue?style=for-the-badge&logo=go)](https://pkg.go.dev/github.com/go-ini/ini?tab=doc) +[![Sourcegraph](https://img.shields.io/badge/view%20on-Sourcegraph-brightgreen.svg?style=for-the-badge&logo=sourcegraph)](https://sourcegraph.com/github.com/go-ini/ini) + +![](https://avatars0.githubusercontent.com/u/10216035?v=3&s=200) + +Package ini provides INI file read and write functionality in Go. + +## Features + +- Load from multiple data sources(file, `[]byte`, `io.Reader` and `io.ReadCloser`) with overwrites. +- Read with recursion values. +- Read with parent-child sections. +- Read with auto-increment key names. +- Read with multiple-line values. +- Read with tons of helper methods. +- Read and convert values to Go types. +- Read and **WRITE** comments of sections and keys. +- Manipulate sections, keys and comments with ease. +- Keep sections and keys in order as you parse and save. + +## Installation + +The minimum requirement of Go is **1.6**. + +```sh +$ go get gopkg.in/ini.v1 +``` + +Please add `-u` flag to update in the future. + +## Getting Help + +- [Getting Started](https://ini.unknwon.io/docs/intro/getting_started) +- [API Documentation](https://gowalker.org/gopkg.in/ini.v1) +- 中国大陆镜像:https://ini.unknwon.cn + +## License + +This project is under Apache v2 License. See the [LICENSE](LICENSE) file for the full license text. diff --git a/vendor/gopkg.in/ini.v1/codecov.yml b/vendor/gopkg.in/ini.v1/codecov.yml new file mode 100644 index 000000000..fc947f230 --- /dev/null +++ b/vendor/gopkg.in/ini.v1/codecov.yml @@ -0,0 +1,9 @@ +coverage: + range: "60...95" + status: + project: + default: + threshold: 1% + +comment: + layout: 'diff, files' diff --git a/vendor/gopkg.in/ini.v1/data_source.go b/vendor/gopkg.in/ini.v1/data_source.go new file mode 100644 index 000000000..c3a541f1d --- /dev/null +++ b/vendor/gopkg.in/ini.v1/data_source.go @@ -0,0 +1,76 @@ +// Copyright 2019 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package ini + +import ( + "bytes" + "fmt" + "io" + "io/ioutil" + "os" +) + +var ( + _ dataSource = (*sourceFile)(nil) + _ dataSource = (*sourceData)(nil) + _ dataSource = (*sourceReadCloser)(nil) +) + +// dataSource is an interface that returns object which can be read and closed. +type dataSource interface { + ReadCloser() (io.ReadCloser, error) +} + +// sourceFile represents an object that contains content on the local file system. +type sourceFile struct { + name string +} + +func (s sourceFile) ReadCloser() (_ io.ReadCloser, err error) { + return os.Open(s.name) +} + +// sourceData represents an object that contains content in memory. +type sourceData struct { + data []byte +} + +func (s *sourceData) ReadCloser() (io.ReadCloser, error) { + return ioutil.NopCloser(bytes.NewReader(s.data)), nil +} + +// sourceReadCloser represents an input stream with Close method. +type sourceReadCloser struct { + reader io.ReadCloser +} + +func (s *sourceReadCloser) ReadCloser() (io.ReadCloser, error) { + return s.reader, nil +} + +func parseDataSource(source interface{}) (dataSource, error) { + switch s := source.(type) { + case string: + return sourceFile{s}, nil + case []byte: + return &sourceData{s}, nil + case io.ReadCloser: + return &sourceReadCloser{s}, nil + case io.Reader: + return &sourceReadCloser{ioutil.NopCloser(s)}, nil + default: + return nil, fmt.Errorf("error parsing data source: unknown type %q", s) + } +} diff --git a/vendor/gopkg.in/ini.v1/deprecated.go b/vendor/gopkg.in/ini.v1/deprecated.go new file mode 100644 index 000000000..e8bda06e6 --- /dev/null +++ b/vendor/gopkg.in/ini.v1/deprecated.go @@ -0,0 +1,25 @@ +// Copyright 2019 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package ini + +const ( + // Deprecated: Use "DefaultSection" instead. + DEFAULT_SECTION = DefaultSection +) + +var ( + // Deprecated: AllCapsUnderscore converts to format ALL_CAPS_UNDERSCORE. + AllCapsUnderscore = SnackCase +) diff --git a/vendor/gopkg.in/ini.v1/error.go b/vendor/gopkg.in/ini.v1/error.go new file mode 100644 index 000000000..d88347c54 --- /dev/null +++ b/vendor/gopkg.in/ini.v1/error.go @@ -0,0 +1,34 @@ +// Copyright 2016 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package ini + +import ( + "fmt" +) + +// ErrDelimiterNotFound indicates the error type of no delimiter is found which there should be one. +type ErrDelimiterNotFound struct { + Line string +} + +// IsErrDelimiterNotFound returns true if the given error is an instance of ErrDelimiterNotFound. +func IsErrDelimiterNotFound(err error) bool { + _, ok := err.(ErrDelimiterNotFound) + return ok +} + +func (err ErrDelimiterNotFound) Error() string { + return fmt.Sprintf("key-value delimiter not found: %s", err.Line) +} diff --git a/vendor/gopkg.in/ini.v1/file.go b/vendor/gopkg.in/ini.v1/file.go new file mode 100644 index 000000000..b96d172cf --- /dev/null +++ b/vendor/gopkg.in/ini.v1/file.go @@ -0,0 +1,517 @@ +// Copyright 2017 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package ini + +import ( + "bytes" + "errors" + "fmt" + "io" + "io/ioutil" + "os" + "strings" + "sync" +) + +// File represents a combination of one or more INI files in memory. +type File struct { + options LoadOptions + dataSources []dataSource + + // Should make things safe, but sometimes doesn't matter. + BlockMode bool + lock sync.RWMutex + + // To keep data in order. + sectionList []string + // To keep track of the index of a section with same name. + // This meta list is only used with non-unique section names are allowed. + sectionIndexes []int + + // Actual data is stored here. + sections map[string][]*Section + + NameMapper + ValueMapper +} + +// newFile initializes File object with given data sources. +func newFile(dataSources []dataSource, opts LoadOptions) *File { + if len(opts.KeyValueDelimiters) == 0 { + opts.KeyValueDelimiters = "=:" + } + if len(opts.KeyValueDelimiterOnWrite) == 0 { + opts.KeyValueDelimiterOnWrite = "=" + } + if len(opts.ChildSectionDelimiter) == 0 { + opts.ChildSectionDelimiter = "." + } + + return &File{ + BlockMode: true, + dataSources: dataSources, + sections: make(map[string][]*Section), + options: opts, + } +} + +// Empty returns an empty file object. +func Empty(opts ...LoadOptions) *File { + var opt LoadOptions + if len(opts) > 0 { + opt = opts[0] + } + + // Ignore error here, we are sure our data is good. + f, _ := LoadSources(opt, []byte("")) + return f +} + +// NewSection creates a new section. +func (f *File) NewSection(name string) (*Section, error) { + if len(name) == 0 { + return nil, errors.New("empty section name") + } + + if (f.options.Insensitive || f.options.InsensitiveSections) && name != DefaultSection { + name = strings.ToLower(name) + } + + if f.BlockMode { + f.lock.Lock() + defer f.lock.Unlock() + } + + if !f.options.AllowNonUniqueSections && inSlice(name, f.sectionList) { + return f.sections[name][0], nil + } + + f.sectionList = append(f.sectionList, name) + + // NOTE: Append to indexes must happen before appending to sections, + // otherwise index will have off-by-one problem. + f.sectionIndexes = append(f.sectionIndexes, len(f.sections[name])) + + sec := newSection(f, name) + f.sections[name] = append(f.sections[name], sec) + + return sec, nil +} + +// NewRawSection creates a new section with an unparseable body. +func (f *File) NewRawSection(name, body string) (*Section, error) { + section, err := f.NewSection(name) + if err != nil { + return nil, err + } + + section.isRawSection = true + section.rawBody = body + return section, nil +} + +// NewSections creates a list of sections. +func (f *File) NewSections(names ...string) (err error) { + for _, name := range names { + if _, err = f.NewSection(name); err != nil { + return err + } + } + return nil +} + +// GetSection returns section by given name. +func (f *File) GetSection(name string) (*Section, error) { + secs, err := f.SectionsByName(name) + if err != nil { + return nil, err + } + + return secs[0], err +} + +// SectionsByName returns all sections with given name. +func (f *File) SectionsByName(name string) ([]*Section, error) { + if len(name) == 0 { + name = DefaultSection + } + if f.options.Insensitive || f.options.InsensitiveSections { + name = strings.ToLower(name) + } + + if f.BlockMode { + f.lock.RLock() + defer f.lock.RUnlock() + } + + secs := f.sections[name] + if len(secs) == 0 { + return nil, fmt.Errorf("section %q does not exist", name) + } + + return secs, nil +} + +// Section assumes named section exists and returns a zero-value when not. +func (f *File) Section(name string) *Section { + sec, err := f.GetSection(name) + if err != nil { + // Note: It's OK here because the only possible error is empty section name, + // but if it's empty, this piece of code won't be executed. + sec, _ = f.NewSection(name) + return sec + } + return sec +} + +// SectionWithIndex assumes named section exists and returns a new section when not. +func (f *File) SectionWithIndex(name string, index int) *Section { + secs, err := f.SectionsByName(name) + if err != nil || len(secs) <= index { + // NOTE: It's OK here because the only possible error is empty section name, + // but if it's empty, this piece of code won't be executed. + newSec, _ := f.NewSection(name) + return newSec + } + + return secs[index] +} + +// Sections returns a list of Section stored in the current instance. +func (f *File) Sections() []*Section { + if f.BlockMode { + f.lock.RLock() + defer f.lock.RUnlock() + } + + sections := make([]*Section, len(f.sectionList)) + for i, name := range f.sectionList { + sections[i] = f.sections[name][f.sectionIndexes[i]] + } + return sections +} + +// ChildSections returns a list of child sections of given section name. +func (f *File) ChildSections(name string) []*Section { + return f.Section(name).ChildSections() +} + +// SectionStrings returns list of section names. +func (f *File) SectionStrings() []string { + list := make([]string, len(f.sectionList)) + copy(list, f.sectionList) + return list +} + +// DeleteSection deletes a section or all sections with given name. +func (f *File) DeleteSection(name string) { + secs, err := f.SectionsByName(name) + if err != nil { + return + } + + for i := 0; i < len(secs); i++ { + // For non-unique sections, it is always needed to remove the first one so + // in the next iteration, the subsequent section continue having index 0. + // Ignoring the error as index 0 never returns an error. + _ = f.DeleteSectionWithIndex(name, 0) + } +} + +// DeleteSectionWithIndex deletes a section with given name and index. +func (f *File) DeleteSectionWithIndex(name string, index int) error { + if !f.options.AllowNonUniqueSections && index != 0 { + return fmt.Errorf("delete section with non-zero index is only allowed when non-unique sections is enabled") + } + + if len(name) == 0 { + name = DefaultSection + } + if f.options.Insensitive || f.options.InsensitiveSections { + name = strings.ToLower(name) + } + + if f.BlockMode { + f.lock.Lock() + defer f.lock.Unlock() + } + + // Count occurrences of the sections + occurrences := 0 + + sectionListCopy := make([]string, len(f.sectionList)) + copy(sectionListCopy, f.sectionList) + + for i, s := range sectionListCopy { + if s != name { + continue + } + + if occurrences == index { + if len(f.sections[name]) <= 1 { + delete(f.sections, name) // The last one in the map + } else { + f.sections[name] = append(f.sections[name][:index], f.sections[name][index+1:]...) + } + + // Fix section lists + f.sectionList = append(f.sectionList[:i], f.sectionList[i+1:]...) + f.sectionIndexes = append(f.sectionIndexes[:i], f.sectionIndexes[i+1:]...) + + } else if occurrences > index { + // Fix the indices of all following sections with this name. + f.sectionIndexes[i-1]-- + } + + occurrences++ + } + + return nil +} + +func (f *File) reload(s dataSource) error { + r, err := s.ReadCloser() + if err != nil { + return err + } + defer r.Close() + + return f.parse(r) +} + +// Reload reloads and parses all data sources. +func (f *File) Reload() (err error) { + for _, s := range f.dataSources { + if err = f.reload(s); err != nil { + // In loose mode, we create an empty default section for nonexistent files. + if os.IsNotExist(err) && f.options.Loose { + _ = f.parse(bytes.NewBuffer(nil)) + continue + } + return err + } + if f.options.ShortCircuit { + return nil + } + } + return nil +} + +// Append appends one or more data sources and reloads automatically. +func (f *File) Append(source interface{}, others ...interface{}) error { + ds, err := parseDataSource(source) + if err != nil { + return err + } + f.dataSources = append(f.dataSources, ds) + for _, s := range others { + ds, err = parseDataSource(s) + if err != nil { + return err + } + f.dataSources = append(f.dataSources, ds) + } + return f.Reload() +} + +func (f *File) writeToBuffer(indent string) (*bytes.Buffer, error) { + equalSign := DefaultFormatLeft + f.options.KeyValueDelimiterOnWrite + DefaultFormatRight + + if PrettyFormat || PrettyEqual { + equalSign = fmt.Sprintf(" %s ", f.options.KeyValueDelimiterOnWrite) + } + + // Use buffer to make sure target is safe until finish encoding. + buf := bytes.NewBuffer(nil) + for i, sname := range f.sectionList { + sec := f.SectionWithIndex(sname, f.sectionIndexes[i]) + if len(sec.Comment) > 0 { + // Support multiline comments + lines := strings.Split(sec.Comment, LineBreak) + for i := range lines { + if lines[i][0] != '#' && lines[i][0] != ';' { + lines[i] = "; " + lines[i] + } else { + lines[i] = lines[i][:1] + " " + strings.TrimSpace(lines[i][1:]) + } + + if _, err := buf.WriteString(lines[i] + LineBreak); err != nil { + return nil, err + } + } + } + + if i > 0 || DefaultHeader || (i == 0 && strings.ToUpper(sec.name) != DefaultSection) { + if _, err := buf.WriteString("[" + sname + "]" + LineBreak); err != nil { + return nil, err + } + } else { + // Write nothing if default section is empty + if len(sec.keyList) == 0 { + continue + } + } + + if sec.isRawSection { + if _, err := buf.WriteString(sec.rawBody); err != nil { + return nil, err + } + + if PrettySection { + // Put a line between sections + if _, err := buf.WriteString(LineBreak); err != nil { + return nil, err + } + } + continue + } + + // Count and generate alignment length and buffer spaces using the + // longest key. Keys may be modified if they contain certain characters so + // we need to take that into account in our calculation. + alignLength := 0 + if PrettyFormat { + for _, kname := range sec.keyList { + keyLength := len(kname) + // First case will surround key by ` and second by """ + if strings.Contains(kname, "\"") || strings.ContainsAny(kname, f.options.KeyValueDelimiters) { + keyLength += 2 + } else if strings.Contains(kname, "`") { + keyLength += 6 + } + + if keyLength > alignLength { + alignLength = keyLength + } + } + } + alignSpaces := bytes.Repeat([]byte(" "), alignLength) + + KeyList: + for _, kname := range sec.keyList { + key := sec.Key(kname) + if len(key.Comment) > 0 { + if len(indent) > 0 && sname != DefaultSection { + buf.WriteString(indent) + } + + // Support multiline comments + lines := strings.Split(key.Comment, LineBreak) + for i := range lines { + if lines[i][0] != '#' && lines[i][0] != ';' { + lines[i] = "; " + strings.TrimSpace(lines[i]) + } else { + lines[i] = lines[i][:1] + " " + strings.TrimSpace(lines[i][1:]) + } + + if _, err := buf.WriteString(lines[i] + LineBreak); err != nil { + return nil, err + } + } + } + + if len(indent) > 0 && sname != DefaultSection { + buf.WriteString(indent) + } + + switch { + case key.isAutoIncrement: + kname = "-" + case strings.Contains(kname, "\"") || strings.ContainsAny(kname, f.options.KeyValueDelimiters): + kname = "`" + kname + "`" + case strings.Contains(kname, "`"): + kname = `"""` + kname + `"""` + } + + for _, val := range key.ValueWithShadows() { + if _, err := buf.WriteString(kname); err != nil { + return nil, err + } + + if key.isBooleanType { + if kname != sec.keyList[len(sec.keyList)-1] { + buf.WriteString(LineBreak) + } + continue KeyList + } + + // Write out alignment spaces before "=" sign + if PrettyFormat { + buf.Write(alignSpaces[:alignLength-len(kname)]) + } + + // In case key value contains "\n", "`", "\"", "#" or ";" + if strings.ContainsAny(val, "\n`") { + val = `"""` + val + `"""` + } else if !f.options.IgnoreInlineComment && strings.ContainsAny(val, "#;") { + val = "`" + val + "`" + } else if len(strings.TrimSpace(val)) != len(val) { + val = `"` + val + `"` + } + if _, err := buf.WriteString(equalSign + val + LineBreak); err != nil { + return nil, err + } + } + + for _, val := range key.nestedValues { + if _, err := buf.WriteString(indent + " " + val + LineBreak); err != nil { + return nil, err + } + } + } + + if PrettySection { + // Put a line between sections + if _, err := buf.WriteString(LineBreak); err != nil { + return nil, err + } + } + } + + return buf, nil +} + +// WriteToIndent writes content into io.Writer with given indention. +// If PrettyFormat has been set to be true, +// it will align "=" sign with spaces under each section. +func (f *File) WriteToIndent(w io.Writer, indent string) (int64, error) { + buf, err := f.writeToBuffer(indent) + if err != nil { + return 0, err + } + return buf.WriteTo(w) +} + +// WriteTo writes file content into io.Writer. +func (f *File) WriteTo(w io.Writer) (int64, error) { + return f.WriteToIndent(w, "") +} + +// SaveToIndent writes content to file system with given value indention. +func (f *File) SaveToIndent(filename, indent string) error { + // Note: Because we are truncating with os.Create, + // so it's safer to save to a temporary file location and rename after done. + buf, err := f.writeToBuffer(indent) + if err != nil { + return err + } + + return ioutil.WriteFile(filename, buf.Bytes(), 0666) +} + +// SaveTo writes content to file system. +func (f *File) SaveTo(filename string) error { + return f.SaveToIndent(filename, "") +} diff --git a/vendor/gopkg.in/ini.v1/helper.go b/vendor/gopkg.in/ini.v1/helper.go new file mode 100644 index 000000000..f9d80a682 --- /dev/null +++ b/vendor/gopkg.in/ini.v1/helper.go @@ -0,0 +1,24 @@ +// Copyright 2019 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package ini + +func inSlice(str string, s []string) bool { + for _, v := range s { + if str == v { + return true + } + } + return false +} diff --git a/vendor/gopkg.in/ini.v1/ini.go b/vendor/gopkg.in/ini.v1/ini.go new file mode 100644 index 000000000..23f07422e --- /dev/null +++ b/vendor/gopkg.in/ini.v1/ini.go @@ -0,0 +1,176 @@ +// +build go1.6 + +// Copyright 2014 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +// Package ini provides INI file read and write functionality in Go. +package ini + +import ( + "os" + "regexp" + "runtime" + "strings" +) + +const ( + // DefaultSection is the name of default section. You can use this constant or the string literal. + // In most of cases, an empty string is all you need to access the section. + DefaultSection = "DEFAULT" + + // Maximum allowed depth when recursively substituing variable names. + depthValues = 99 +) + +var ( + // LineBreak is the delimiter to determine or compose a new line. + // This variable will be changed to "\r\n" automatically on Windows at package init time. + LineBreak = "\n" + + // Variable regexp pattern: %(variable)s + varPattern = regexp.MustCompile(`%\(([^)]+)\)s`) + + // DefaultHeader explicitly writes default section header. + DefaultHeader = false + + // PrettySection indicates whether to put a line between sections. + PrettySection = true + // PrettyFormat indicates whether to align "=" sign with spaces to produce pretty output + // or reduce all possible spaces for compact format. + PrettyFormat = true + // PrettyEqual places spaces around "=" sign even when PrettyFormat is false. + PrettyEqual = false + // DefaultFormatLeft places custom spaces on the left when PrettyFormat and PrettyEqual are both disabled. + DefaultFormatLeft = "" + // DefaultFormatRight places custom spaces on the right when PrettyFormat and PrettyEqual are both disabled. + DefaultFormatRight = "" +) + +var inTest = len(os.Args) > 0 && strings.HasSuffix(strings.TrimSuffix(os.Args[0], ".exe"), ".test") + +func init() { + if runtime.GOOS == "windows" && !inTest { + LineBreak = "\r\n" + } +} + +// LoadOptions contains all customized options used for load data source(s). +type LoadOptions struct { + // Loose indicates whether the parser should ignore nonexistent files or return error. + Loose bool + // Insensitive indicates whether the parser forces all section and key names to lowercase. + Insensitive bool + // InsensitiveSections indicates whether the parser forces all section to lowercase. + InsensitiveSections bool + // InsensitiveKeys indicates whether the parser forces all key names to lowercase. + InsensitiveKeys bool + // IgnoreContinuation indicates whether to ignore continuation lines while parsing. + IgnoreContinuation bool + // IgnoreInlineComment indicates whether to ignore comments at the end of value and treat it as part of value. + IgnoreInlineComment bool + // SkipUnrecognizableLines indicates whether to skip unrecognizable lines that do not conform to key/value pairs. + SkipUnrecognizableLines bool + // ShortCircuit indicates whether to ignore other configuration sources after loaded the first available configuration source. + ShortCircuit bool + // AllowBooleanKeys indicates whether to allow boolean type keys or treat as value is missing. + // This type of keys are mostly used in my.cnf. + AllowBooleanKeys bool + // AllowShadows indicates whether to keep track of keys with same name under same section. + AllowShadows bool + // AllowNestedValues indicates whether to allow AWS-like nested values. + // Docs: http://docs.aws.amazon.com/cli/latest/topic/config-vars.html#nested-values + AllowNestedValues bool + // AllowPythonMultilineValues indicates whether to allow Python-like multi-line values. + // Docs: https://docs.python.org/3/library/configparser.html#supported-ini-file-structure + // Relevant quote: Values can also span multiple lines, as long as they are indented deeper + // than the first line of the value. + AllowPythonMultilineValues bool + // SpaceBeforeInlineComment indicates whether to allow comment symbols (\# and \;) inside value. + // Docs: https://docs.python.org/2/library/configparser.html + // Quote: Comments may appear on their own in an otherwise empty line, or may be entered in lines holding values or section names. + // In the latter case, they need to be preceded by a whitespace character to be recognized as a comment. + SpaceBeforeInlineComment bool + // UnescapeValueDoubleQuotes indicates whether to unescape double quotes inside value to regular format + // when value is surrounded by double quotes, e.g. key="a \"value\"" => key=a "value" + UnescapeValueDoubleQuotes bool + // UnescapeValueCommentSymbols indicates to unescape comment symbols (\# and \;) inside value to regular format + // when value is NOT surrounded by any quotes. + // Note: UNSTABLE, behavior might change to only unescape inside double quotes but may noy necessary at all. + UnescapeValueCommentSymbols bool + // UnparseableSections stores a list of blocks that are allowed with raw content which do not otherwise + // conform to key/value pairs. Specify the names of those blocks here. + UnparseableSections []string + // KeyValueDelimiters is the sequence of delimiters that are used to separate key and value. By default, it is "=:". + KeyValueDelimiters string + // KeyValueDelimiterOnWrite is the delimiter that are used to separate key and value output. By default, it is "=". + KeyValueDelimiterOnWrite string + // ChildSectionDelimiter is the delimiter that is used to separate child sections. By default, it is ".". + ChildSectionDelimiter string + // PreserveSurroundedQuote indicates whether to preserve surrounded quote (single and double quotes). + PreserveSurroundedQuote bool + // DebugFunc is called to collect debug information (currently only useful to debug parsing Python-style multiline values). + DebugFunc DebugFunc + // ReaderBufferSize is the buffer size of the reader in bytes. + ReaderBufferSize int + // AllowNonUniqueSections indicates whether to allow sections with the same name multiple times. + AllowNonUniqueSections bool +} + +// DebugFunc is the type of function called to log parse events. +type DebugFunc func(message string) + +// LoadSources allows caller to apply customized options for loading from data source(s). +func LoadSources(opts LoadOptions, source interface{}, others ...interface{}) (_ *File, err error) { + sources := make([]dataSource, len(others)+1) + sources[0], err = parseDataSource(source) + if err != nil { + return nil, err + } + for i := range others { + sources[i+1], err = parseDataSource(others[i]) + if err != nil { + return nil, err + } + } + f := newFile(sources, opts) + if err = f.Reload(); err != nil { + return nil, err + } + return f, nil +} + +// Load loads and parses from INI data sources. +// Arguments can be mixed of file name with string type, or raw data in []byte. +// It will return error if list contains nonexistent files. +func Load(source interface{}, others ...interface{}) (*File, error) { + return LoadSources(LoadOptions{}, source, others...) +} + +// LooseLoad has exactly same functionality as Load function +// except it ignores nonexistent files instead of returning error. +func LooseLoad(source interface{}, others ...interface{}) (*File, error) { + return LoadSources(LoadOptions{Loose: true}, source, others...) +} + +// InsensitiveLoad has exactly same functionality as Load function +// except it forces all section and key names to be lowercased. +func InsensitiveLoad(source interface{}, others ...interface{}) (*File, error) { + return LoadSources(LoadOptions{Insensitive: true}, source, others...) +} + +// ShadowLoad has exactly same functionality as Load function +// except it allows have shadow keys. +func ShadowLoad(source interface{}, others ...interface{}) (*File, error) { + return LoadSources(LoadOptions{AllowShadows: true}, source, others...) +} diff --git a/vendor/gopkg.in/ini.v1/key.go b/vendor/gopkg.in/ini.v1/key.go new file mode 100644 index 000000000..8baafd9ea --- /dev/null +++ b/vendor/gopkg.in/ini.v1/key.go @@ -0,0 +1,829 @@ +// Copyright 2014 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package ini + +import ( + "bytes" + "errors" + "fmt" + "strconv" + "strings" + "time" +) + +// Key represents a key under a section. +type Key struct { + s *Section + Comment string + name string + value string + isAutoIncrement bool + isBooleanType bool + + isShadow bool + shadows []*Key + + nestedValues []string +} + +// newKey simply return a key object with given values. +func newKey(s *Section, name, val string) *Key { + return &Key{ + s: s, + name: name, + value: val, + } +} + +func (k *Key) addShadow(val string) error { + if k.isShadow { + return errors.New("cannot add shadow to another shadow key") + } else if k.isAutoIncrement || k.isBooleanType { + return errors.New("cannot add shadow to auto-increment or boolean key") + } + + // Deduplicate shadows based on their values. + if k.value == val { + return nil + } + for i := range k.shadows { + if k.shadows[i].value == val { + return nil + } + } + + shadow := newKey(k.s, k.name, val) + shadow.isShadow = true + k.shadows = append(k.shadows, shadow) + return nil +} + +// AddShadow adds a new shadow key to itself. +func (k *Key) AddShadow(val string) error { + if !k.s.f.options.AllowShadows { + return errors.New("shadow key is not allowed") + } + return k.addShadow(val) +} + +func (k *Key) addNestedValue(val string) error { + if k.isAutoIncrement || k.isBooleanType { + return errors.New("cannot add nested value to auto-increment or boolean key") + } + + k.nestedValues = append(k.nestedValues, val) + return nil +} + +// AddNestedValue adds a nested value to the key. +func (k *Key) AddNestedValue(val string) error { + if !k.s.f.options.AllowNestedValues { + return errors.New("nested value is not allowed") + } + return k.addNestedValue(val) +} + +// ValueMapper represents a mapping function for values, e.g. os.ExpandEnv +type ValueMapper func(string) string + +// Name returns name of key. +func (k *Key) Name() string { + return k.name +} + +// Value returns raw value of key for performance purpose. +func (k *Key) Value() string { + return k.value +} + +// ValueWithShadows returns raw values of key and its shadows if any. +func (k *Key) ValueWithShadows() []string { + if len(k.shadows) == 0 { + return []string{k.value} + } + vals := make([]string, len(k.shadows)+1) + vals[0] = k.value + for i := range k.shadows { + vals[i+1] = k.shadows[i].value + } + return vals +} + +// NestedValues returns nested values stored in the key. +// It is possible returned value is nil if no nested values stored in the key. +func (k *Key) NestedValues() []string { + return k.nestedValues +} + +// transformValue takes a raw value and transforms to its final string. +func (k *Key) transformValue(val string) string { + if k.s.f.ValueMapper != nil { + val = k.s.f.ValueMapper(val) + } + + // Fail-fast if no indicate char found for recursive value + if !strings.Contains(val, "%") { + return val + } + for i := 0; i < depthValues; i++ { + vr := varPattern.FindString(val) + if len(vr) == 0 { + break + } + + // Take off leading '%(' and trailing ')s'. + noption := vr[2 : len(vr)-2] + + // Search in the same section. + // If not found or found the key itself, then search again in default section. + nk, err := k.s.GetKey(noption) + if err != nil || k == nk { + nk, _ = k.s.f.Section("").GetKey(noption) + if nk == nil { + // Stop when no results found in the default section, + // and returns the value as-is. + break + } + } + + // Substitute by new value and take off leading '%(' and trailing ')s'. + val = strings.Replace(val, vr, nk.value, -1) + } + return val +} + +// String returns string representation of value. +func (k *Key) String() string { + return k.transformValue(k.value) +} + +// Validate accepts a validate function which can +// return modifed result as key value. +func (k *Key) Validate(fn func(string) string) string { + return fn(k.String()) +} + +// parseBool returns the boolean value represented by the string. +// +// It accepts 1, t, T, TRUE, true, True, YES, yes, Yes, y, ON, on, On, +// 0, f, F, FALSE, false, False, NO, no, No, n, OFF, off, Off. +// Any other value returns an error. +func parseBool(str string) (value bool, err error) { + switch str { + case "1", "t", "T", "true", "TRUE", "True", "YES", "yes", "Yes", "y", "ON", "on", "On": + return true, nil + case "0", "f", "F", "false", "FALSE", "False", "NO", "no", "No", "n", "OFF", "off", "Off": + return false, nil + } + return false, fmt.Errorf("parsing \"%s\": invalid syntax", str) +} + +// Bool returns bool type value. +func (k *Key) Bool() (bool, error) { + return parseBool(k.String()) +} + +// Float64 returns float64 type value. +func (k *Key) Float64() (float64, error) { + return strconv.ParseFloat(k.String(), 64) +} + +// Int returns int type value. +func (k *Key) Int() (int, error) { + v, err := strconv.ParseInt(k.String(), 0, 64) + return int(v), err +} + +// Int64 returns int64 type value. +func (k *Key) Int64() (int64, error) { + return strconv.ParseInt(k.String(), 0, 64) +} + +// Uint returns uint type valued. +func (k *Key) Uint() (uint, error) { + u, e := strconv.ParseUint(k.String(), 0, 64) + return uint(u), e +} + +// Uint64 returns uint64 type value. +func (k *Key) Uint64() (uint64, error) { + return strconv.ParseUint(k.String(), 0, 64) +} + +// Duration returns time.Duration type value. +func (k *Key) Duration() (time.Duration, error) { + return time.ParseDuration(k.String()) +} + +// TimeFormat parses with given format and returns time.Time type value. +func (k *Key) TimeFormat(format string) (time.Time, error) { + return time.Parse(format, k.String()) +} + +// Time parses with RFC3339 format and returns time.Time type value. +func (k *Key) Time() (time.Time, error) { + return k.TimeFormat(time.RFC3339) +} + +// MustString returns default value if key value is empty. +func (k *Key) MustString(defaultVal string) string { + val := k.String() + if len(val) == 0 { + k.value = defaultVal + return defaultVal + } + return val +} + +// MustBool always returns value without error, +// it returns false if error occurs. +func (k *Key) MustBool(defaultVal ...bool) bool { + val, err := k.Bool() + if len(defaultVal) > 0 && err != nil { + k.value = strconv.FormatBool(defaultVal[0]) + return defaultVal[0] + } + return val +} + +// MustFloat64 always returns value without error, +// it returns 0.0 if error occurs. +func (k *Key) MustFloat64(defaultVal ...float64) float64 { + val, err := k.Float64() + if len(defaultVal) > 0 && err != nil { + k.value = strconv.FormatFloat(defaultVal[0], 'f', -1, 64) + return defaultVal[0] + } + return val +} + +// MustInt always returns value without error, +// it returns 0 if error occurs. +func (k *Key) MustInt(defaultVal ...int) int { + val, err := k.Int() + if len(defaultVal) > 0 && err != nil { + k.value = strconv.FormatInt(int64(defaultVal[0]), 10) + return defaultVal[0] + } + return val +} + +// MustInt64 always returns value without error, +// it returns 0 if error occurs. +func (k *Key) MustInt64(defaultVal ...int64) int64 { + val, err := k.Int64() + if len(defaultVal) > 0 && err != nil { + k.value = strconv.FormatInt(defaultVal[0], 10) + return defaultVal[0] + } + return val +} + +// MustUint always returns value without error, +// it returns 0 if error occurs. +func (k *Key) MustUint(defaultVal ...uint) uint { + val, err := k.Uint() + if len(defaultVal) > 0 && err != nil { + k.value = strconv.FormatUint(uint64(defaultVal[0]), 10) + return defaultVal[0] + } + return val +} + +// MustUint64 always returns value without error, +// it returns 0 if error occurs. +func (k *Key) MustUint64(defaultVal ...uint64) uint64 { + val, err := k.Uint64() + if len(defaultVal) > 0 && err != nil { + k.value = strconv.FormatUint(defaultVal[0], 10) + return defaultVal[0] + } + return val +} + +// MustDuration always returns value without error, +// it returns zero value if error occurs. +func (k *Key) MustDuration(defaultVal ...time.Duration) time.Duration { + val, err := k.Duration() + if len(defaultVal) > 0 && err != nil { + k.value = defaultVal[0].String() + return defaultVal[0] + } + return val +} + +// MustTimeFormat always parses with given format and returns value without error, +// it returns zero value if error occurs. +func (k *Key) MustTimeFormat(format string, defaultVal ...time.Time) time.Time { + val, err := k.TimeFormat(format) + if len(defaultVal) > 0 && err != nil { + k.value = defaultVal[0].Format(format) + return defaultVal[0] + } + return val +} + +// MustTime always parses with RFC3339 format and returns value without error, +// it returns zero value if error occurs. +func (k *Key) MustTime(defaultVal ...time.Time) time.Time { + return k.MustTimeFormat(time.RFC3339, defaultVal...) +} + +// In always returns value without error, +// it returns default value if error occurs or doesn't fit into candidates. +func (k *Key) In(defaultVal string, candidates []string) string { + val := k.String() + for _, cand := range candidates { + if val == cand { + return val + } + } + return defaultVal +} + +// InFloat64 always returns value without error, +// it returns default value if error occurs or doesn't fit into candidates. +func (k *Key) InFloat64(defaultVal float64, candidates []float64) float64 { + val := k.MustFloat64() + for _, cand := range candidates { + if val == cand { + return val + } + } + return defaultVal +} + +// InInt always returns value without error, +// it returns default value if error occurs or doesn't fit into candidates. +func (k *Key) InInt(defaultVal int, candidates []int) int { + val := k.MustInt() + for _, cand := range candidates { + if val == cand { + return val + } + } + return defaultVal +} + +// InInt64 always returns value without error, +// it returns default value if error occurs or doesn't fit into candidates. +func (k *Key) InInt64(defaultVal int64, candidates []int64) int64 { + val := k.MustInt64() + for _, cand := range candidates { + if val == cand { + return val + } + } + return defaultVal +} + +// InUint always returns value without error, +// it returns default value if error occurs or doesn't fit into candidates. +func (k *Key) InUint(defaultVal uint, candidates []uint) uint { + val := k.MustUint() + for _, cand := range candidates { + if val == cand { + return val + } + } + return defaultVal +} + +// InUint64 always returns value without error, +// it returns default value if error occurs or doesn't fit into candidates. +func (k *Key) InUint64(defaultVal uint64, candidates []uint64) uint64 { + val := k.MustUint64() + for _, cand := range candidates { + if val == cand { + return val + } + } + return defaultVal +} + +// InTimeFormat always parses with given format and returns value without error, +// it returns default value if error occurs or doesn't fit into candidates. +func (k *Key) InTimeFormat(format string, defaultVal time.Time, candidates []time.Time) time.Time { + val := k.MustTimeFormat(format) + for _, cand := range candidates { + if val == cand { + return val + } + } + return defaultVal +} + +// InTime always parses with RFC3339 format and returns value without error, +// it returns default value if error occurs or doesn't fit into candidates. +func (k *Key) InTime(defaultVal time.Time, candidates []time.Time) time.Time { + return k.InTimeFormat(time.RFC3339, defaultVal, candidates) +} + +// RangeFloat64 checks if value is in given range inclusively, +// and returns default value if it's not. +func (k *Key) RangeFloat64(defaultVal, min, max float64) float64 { + val := k.MustFloat64() + if val < min || val > max { + return defaultVal + } + return val +} + +// RangeInt checks if value is in given range inclusively, +// and returns default value if it's not. +func (k *Key) RangeInt(defaultVal, min, max int) int { + val := k.MustInt() + if val < min || val > max { + return defaultVal + } + return val +} + +// RangeInt64 checks if value is in given range inclusively, +// and returns default value if it's not. +func (k *Key) RangeInt64(defaultVal, min, max int64) int64 { + val := k.MustInt64() + if val < min || val > max { + return defaultVal + } + return val +} + +// RangeTimeFormat checks if value with given format is in given range inclusively, +// and returns default value if it's not. +func (k *Key) RangeTimeFormat(format string, defaultVal, min, max time.Time) time.Time { + val := k.MustTimeFormat(format) + if val.Unix() < min.Unix() || val.Unix() > max.Unix() { + return defaultVal + } + return val +} + +// RangeTime checks if value with RFC3339 format is in given range inclusively, +// and returns default value if it's not. +func (k *Key) RangeTime(defaultVal, min, max time.Time) time.Time { + return k.RangeTimeFormat(time.RFC3339, defaultVal, min, max) +} + +// Strings returns list of string divided by given delimiter. +func (k *Key) Strings(delim string) []string { + str := k.String() + if len(str) == 0 { + return []string{} + } + + runes := []rune(str) + vals := make([]string, 0, 2) + var buf bytes.Buffer + escape := false + idx := 0 + for { + if escape { + escape = false + if runes[idx] != '\\' && !strings.HasPrefix(string(runes[idx:]), delim) { + buf.WriteRune('\\') + } + buf.WriteRune(runes[idx]) + } else { + if runes[idx] == '\\' { + escape = true + } else if strings.HasPrefix(string(runes[idx:]), delim) { + idx += len(delim) - 1 + vals = append(vals, strings.TrimSpace(buf.String())) + buf.Reset() + } else { + buf.WriteRune(runes[idx]) + } + } + idx++ + if idx == len(runes) { + break + } + } + + if buf.Len() > 0 { + vals = append(vals, strings.TrimSpace(buf.String())) + } + + return vals +} + +// StringsWithShadows returns list of string divided by given delimiter. +// Shadows will also be appended if any. +func (k *Key) StringsWithShadows(delim string) []string { + vals := k.ValueWithShadows() + results := make([]string, 0, len(vals)*2) + for i := range vals { + if len(vals) == 0 { + continue + } + + results = append(results, strings.Split(vals[i], delim)...) + } + + for i := range results { + results[i] = k.transformValue(strings.TrimSpace(results[i])) + } + return results +} + +// Float64s returns list of float64 divided by given delimiter. Any invalid input will be treated as zero value. +func (k *Key) Float64s(delim string) []float64 { + vals, _ := k.parseFloat64s(k.Strings(delim), true, false) + return vals +} + +// Ints returns list of int divided by given delimiter. Any invalid input will be treated as zero value. +func (k *Key) Ints(delim string) []int { + vals, _ := k.parseInts(k.Strings(delim), true, false) + return vals +} + +// Int64s returns list of int64 divided by given delimiter. Any invalid input will be treated as zero value. +func (k *Key) Int64s(delim string) []int64 { + vals, _ := k.parseInt64s(k.Strings(delim), true, false) + return vals +} + +// Uints returns list of uint divided by given delimiter. Any invalid input will be treated as zero value. +func (k *Key) Uints(delim string) []uint { + vals, _ := k.parseUints(k.Strings(delim), true, false) + return vals +} + +// Uint64s returns list of uint64 divided by given delimiter. Any invalid input will be treated as zero value. +func (k *Key) Uint64s(delim string) []uint64 { + vals, _ := k.parseUint64s(k.Strings(delim), true, false) + return vals +} + +// Bools returns list of bool divided by given delimiter. Any invalid input will be treated as zero value. +func (k *Key) Bools(delim string) []bool { + vals, _ := k.parseBools(k.Strings(delim), true, false) + return vals +} + +// TimesFormat parses with given format and returns list of time.Time divided by given delimiter. +// Any invalid input will be treated as zero value (0001-01-01 00:00:00 +0000 UTC). +func (k *Key) TimesFormat(format, delim string) []time.Time { + vals, _ := k.parseTimesFormat(format, k.Strings(delim), true, false) + return vals +} + +// Times parses with RFC3339 format and returns list of time.Time divided by given delimiter. +// Any invalid input will be treated as zero value (0001-01-01 00:00:00 +0000 UTC). +func (k *Key) Times(delim string) []time.Time { + return k.TimesFormat(time.RFC3339, delim) +} + +// ValidFloat64s returns list of float64 divided by given delimiter. If some value is not float, then +// it will not be included to result list. +func (k *Key) ValidFloat64s(delim string) []float64 { + vals, _ := k.parseFloat64s(k.Strings(delim), false, false) + return vals +} + +// ValidInts returns list of int divided by given delimiter. If some value is not integer, then it will +// not be included to result list. +func (k *Key) ValidInts(delim string) []int { + vals, _ := k.parseInts(k.Strings(delim), false, false) + return vals +} + +// ValidInt64s returns list of int64 divided by given delimiter. If some value is not 64-bit integer, +// then it will not be included to result list. +func (k *Key) ValidInt64s(delim string) []int64 { + vals, _ := k.parseInt64s(k.Strings(delim), false, false) + return vals +} + +// ValidUints returns list of uint divided by given delimiter. If some value is not unsigned integer, +// then it will not be included to result list. +func (k *Key) ValidUints(delim string) []uint { + vals, _ := k.parseUints(k.Strings(delim), false, false) + return vals +} + +// ValidUint64s returns list of uint64 divided by given delimiter. If some value is not 64-bit unsigned +// integer, then it will not be included to result list. +func (k *Key) ValidUint64s(delim string) []uint64 { + vals, _ := k.parseUint64s(k.Strings(delim), false, false) + return vals +} + +// ValidBools returns list of bool divided by given delimiter. If some value is not 64-bit unsigned +// integer, then it will not be included to result list. +func (k *Key) ValidBools(delim string) []bool { + vals, _ := k.parseBools(k.Strings(delim), false, false) + return vals +} + +// ValidTimesFormat parses with given format and returns list of time.Time divided by given delimiter. +func (k *Key) ValidTimesFormat(format, delim string) []time.Time { + vals, _ := k.parseTimesFormat(format, k.Strings(delim), false, false) + return vals +} + +// ValidTimes parses with RFC3339 format and returns list of time.Time divided by given delimiter. +func (k *Key) ValidTimes(delim string) []time.Time { + return k.ValidTimesFormat(time.RFC3339, delim) +} + +// StrictFloat64s returns list of float64 divided by given delimiter or error on first invalid input. +func (k *Key) StrictFloat64s(delim string) ([]float64, error) { + return k.parseFloat64s(k.Strings(delim), false, true) +} + +// StrictInts returns list of int divided by given delimiter or error on first invalid input. +func (k *Key) StrictInts(delim string) ([]int, error) { + return k.parseInts(k.Strings(delim), false, true) +} + +// StrictInt64s returns list of int64 divided by given delimiter or error on first invalid input. +func (k *Key) StrictInt64s(delim string) ([]int64, error) { + return k.parseInt64s(k.Strings(delim), false, true) +} + +// StrictUints returns list of uint divided by given delimiter or error on first invalid input. +func (k *Key) StrictUints(delim string) ([]uint, error) { + return k.parseUints(k.Strings(delim), false, true) +} + +// StrictUint64s returns list of uint64 divided by given delimiter or error on first invalid input. +func (k *Key) StrictUint64s(delim string) ([]uint64, error) { + return k.parseUint64s(k.Strings(delim), false, true) +} + +// StrictBools returns list of bool divided by given delimiter or error on first invalid input. +func (k *Key) StrictBools(delim string) ([]bool, error) { + return k.parseBools(k.Strings(delim), false, true) +} + +// StrictTimesFormat parses with given format and returns list of time.Time divided by given delimiter +// or error on first invalid input. +func (k *Key) StrictTimesFormat(format, delim string) ([]time.Time, error) { + return k.parseTimesFormat(format, k.Strings(delim), false, true) +} + +// StrictTimes parses with RFC3339 format and returns list of time.Time divided by given delimiter +// or error on first invalid input. +func (k *Key) StrictTimes(delim string) ([]time.Time, error) { + return k.StrictTimesFormat(time.RFC3339, delim) +} + +// parseBools transforms strings to bools. +func (k *Key) parseBools(strs []string, addInvalid, returnOnInvalid bool) ([]bool, error) { + vals := make([]bool, 0, len(strs)) + parser := func(str string) (interface{}, error) { + val, err := parseBool(str) + return val, err + } + rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) + if err == nil { + for _, val := range rawVals { + vals = append(vals, val.(bool)) + } + } + return vals, err +} + +// parseFloat64s transforms strings to float64s. +func (k *Key) parseFloat64s(strs []string, addInvalid, returnOnInvalid bool) ([]float64, error) { + vals := make([]float64, 0, len(strs)) + parser := func(str string) (interface{}, error) { + val, err := strconv.ParseFloat(str, 64) + return val, err + } + rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) + if err == nil { + for _, val := range rawVals { + vals = append(vals, val.(float64)) + } + } + return vals, err +} + +// parseInts transforms strings to ints. +func (k *Key) parseInts(strs []string, addInvalid, returnOnInvalid bool) ([]int, error) { + vals := make([]int, 0, len(strs)) + parser := func(str string) (interface{}, error) { + val, err := strconv.ParseInt(str, 0, 64) + return val, err + } + rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) + if err == nil { + for _, val := range rawVals { + vals = append(vals, int(val.(int64))) + } + } + return vals, err +} + +// parseInt64s transforms strings to int64s. +func (k *Key) parseInt64s(strs []string, addInvalid, returnOnInvalid bool) ([]int64, error) { + vals := make([]int64, 0, len(strs)) + parser := func(str string) (interface{}, error) { + val, err := strconv.ParseInt(str, 0, 64) + return val, err + } + + rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) + if err == nil { + for _, val := range rawVals { + vals = append(vals, val.(int64)) + } + } + return vals, err +} + +// parseUints transforms strings to uints. +func (k *Key) parseUints(strs []string, addInvalid, returnOnInvalid bool) ([]uint, error) { + vals := make([]uint, 0, len(strs)) + parser := func(str string) (interface{}, error) { + val, err := strconv.ParseUint(str, 0, 64) + return val, err + } + + rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) + if err == nil { + for _, val := range rawVals { + vals = append(vals, uint(val.(uint64))) + } + } + return vals, err +} + +// parseUint64s transforms strings to uint64s. +func (k *Key) parseUint64s(strs []string, addInvalid, returnOnInvalid bool) ([]uint64, error) { + vals := make([]uint64, 0, len(strs)) + parser := func(str string) (interface{}, error) { + val, err := strconv.ParseUint(str, 0, 64) + return val, err + } + rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) + if err == nil { + for _, val := range rawVals { + vals = append(vals, val.(uint64)) + } + } + return vals, err +} + + +type Parser func(str string) (interface{}, error) + + +// parseTimesFormat transforms strings to times in given format. +func (k *Key) parseTimesFormat(format string, strs []string, addInvalid, returnOnInvalid bool) ([]time.Time, error) { + vals := make([]time.Time, 0, len(strs)) + parser := func(str string) (interface{}, error) { + val, err := time.Parse(format, str) + return val, err + } + rawVals, err := k.doParse(strs, addInvalid, returnOnInvalid, parser) + if err == nil { + for _, val := range rawVals { + vals = append(vals, val.(time.Time)) + } + } + return vals, err +} + + +// doParse transforms strings to different types +func (k *Key) doParse(strs []string, addInvalid, returnOnInvalid bool, parser Parser) ([]interface{}, error) { + vals := make([]interface{}, 0, len(strs)) + for _, str := range strs { + val, err := parser(str) + if err != nil && returnOnInvalid { + return nil, err + } + if err == nil || addInvalid { + vals = append(vals, val) + } + } + return vals, nil +} + +// SetValue changes key value. +func (k *Key) SetValue(v string) { + if k.s.f.BlockMode { + k.s.f.lock.Lock() + defer k.s.f.lock.Unlock() + } + + k.value = v + k.s.keysHash[k.name] = v +} diff --git a/vendor/gopkg.in/ini.v1/parser.go b/vendor/gopkg.in/ini.v1/parser.go new file mode 100644 index 000000000..65147166f --- /dev/null +++ b/vendor/gopkg.in/ini.v1/parser.go @@ -0,0 +1,535 @@ +// Copyright 2015 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package ini + +import ( + "bufio" + "bytes" + "fmt" + "io" + "regexp" + "strconv" + "strings" + "unicode" +) + +const minReaderBufferSize = 4096 + +var pythonMultiline = regexp.MustCompile(`^([\t\f ]+)(.*)`) + +type parserOptions struct { + IgnoreContinuation bool + IgnoreInlineComment bool + AllowPythonMultilineValues bool + SpaceBeforeInlineComment bool + UnescapeValueDoubleQuotes bool + UnescapeValueCommentSymbols bool + PreserveSurroundedQuote bool + DebugFunc DebugFunc + ReaderBufferSize int +} + +type parser struct { + buf *bufio.Reader + options parserOptions + + isEOF bool + count int + comment *bytes.Buffer +} + +func (p *parser) debug(format string, args ...interface{}) { + if p.options.DebugFunc != nil { + p.options.DebugFunc(fmt.Sprintf(format, args...)) + } +} + +func newParser(r io.Reader, opts parserOptions) *parser { + size := opts.ReaderBufferSize + if size < minReaderBufferSize { + size = minReaderBufferSize + } + + return &parser{ + buf: bufio.NewReaderSize(r, size), + options: opts, + count: 1, + comment: &bytes.Buffer{}, + } +} + +// BOM handles header of UTF-8, UTF-16 LE and UTF-16 BE's BOM format. +// http://en.wikipedia.org/wiki/Byte_order_mark#Representations_of_byte_order_marks_by_encoding +func (p *parser) BOM() error { + mask, err := p.buf.Peek(2) + if err != nil && err != io.EOF { + return err + } else if len(mask) < 2 { + return nil + } + + switch { + case mask[0] == 254 && mask[1] == 255: + fallthrough + case mask[0] == 255 && mask[1] == 254: + _, err = p.buf.Read(mask) + if err != nil { + return err + } + case mask[0] == 239 && mask[1] == 187: + mask, err := p.buf.Peek(3) + if err != nil && err != io.EOF { + return err + } else if len(mask) < 3 { + return nil + } + if mask[2] == 191 { + _, err = p.buf.Read(mask) + if err != nil { + return err + } + } + } + return nil +} + +func (p *parser) readUntil(delim byte) ([]byte, error) { + data, err := p.buf.ReadBytes(delim) + if err != nil { + if err == io.EOF { + p.isEOF = true + } else { + return nil, err + } + } + return data, nil +} + +func cleanComment(in []byte) ([]byte, bool) { + i := bytes.IndexAny(in, "#;") + if i == -1 { + return nil, false + } + return in[i:], true +} + +func readKeyName(delimiters string, in []byte) (string, int, error) { + line := string(in) + + // Check if key name surrounded by quotes. + var keyQuote string + if line[0] == '"' { + if len(line) > 6 && string(line[0:3]) == `"""` { + keyQuote = `"""` + } else { + keyQuote = `"` + } + } else if line[0] == '`' { + keyQuote = "`" + } + + // Get out key name + var endIdx int + if len(keyQuote) > 0 { + startIdx := len(keyQuote) + // FIXME: fail case -> """"""name"""=value + pos := strings.Index(line[startIdx:], keyQuote) + if pos == -1 { + return "", -1, fmt.Errorf("missing closing key quote: %s", line) + } + pos += startIdx + + // Find key-value delimiter + i := strings.IndexAny(line[pos+startIdx:], delimiters) + if i < 0 { + return "", -1, ErrDelimiterNotFound{line} + } + endIdx = pos + i + return strings.TrimSpace(line[startIdx:pos]), endIdx + startIdx + 1, nil + } + + endIdx = strings.IndexAny(line, delimiters) + if endIdx < 0 { + return "", -1, ErrDelimiterNotFound{line} + } + return strings.TrimSpace(line[0:endIdx]), endIdx + 1, nil +} + +func (p *parser) readMultilines(line, val, valQuote string) (string, error) { + for { + data, err := p.readUntil('\n') + if err != nil { + return "", err + } + next := string(data) + + pos := strings.LastIndex(next, valQuote) + if pos > -1 { + val += next[:pos] + + comment, has := cleanComment([]byte(next[pos:])) + if has { + p.comment.Write(bytes.TrimSpace(comment)) + } + break + } + val += next + if p.isEOF { + return "", fmt.Errorf("missing closing key quote from %q to %q", line, next) + } + } + return val, nil +} + +func (p *parser) readContinuationLines(val string) (string, error) { + for { + data, err := p.readUntil('\n') + if err != nil { + return "", err + } + next := strings.TrimSpace(string(data)) + + if len(next) == 0 { + break + } + val += next + if val[len(val)-1] != '\\' { + break + } + val = val[:len(val)-1] + } + return val, nil +} + +// hasSurroundedQuote check if and only if the first and last characters +// are quotes \" or \'. +// It returns false if any other parts also contain same kind of quotes. +func hasSurroundedQuote(in string, quote byte) bool { + return len(in) >= 2 && in[0] == quote && in[len(in)-1] == quote && + strings.IndexByte(in[1:], quote) == len(in)-2 +} + +func (p *parser) readValue(in []byte, bufferSize int) (string, error) { + + line := strings.TrimLeftFunc(string(in), unicode.IsSpace) + if len(line) == 0 { + if p.options.AllowPythonMultilineValues && len(in) > 0 && in[len(in)-1] == '\n' { + return p.readPythonMultilines(line, bufferSize) + } + return "", nil + } + + var valQuote string + if len(line) > 3 && string(line[0:3]) == `"""` { + valQuote = `"""` + } else if line[0] == '`' { + valQuote = "`" + } else if p.options.UnescapeValueDoubleQuotes && line[0] == '"' { + valQuote = `"` + } + + if len(valQuote) > 0 { + startIdx := len(valQuote) + pos := strings.LastIndex(line[startIdx:], valQuote) + // Check for multi-line value + if pos == -1 { + return p.readMultilines(line, line[startIdx:], valQuote) + } + + if p.options.UnescapeValueDoubleQuotes && valQuote == `"` { + return strings.Replace(line[startIdx:pos+startIdx], `\"`, `"`, -1), nil + } + return line[startIdx : pos+startIdx], nil + } + + lastChar := line[len(line)-1] + // Won't be able to reach here if value only contains whitespace + line = strings.TrimSpace(line) + trimmedLastChar := line[len(line)-1] + + // Check continuation lines when desired + if !p.options.IgnoreContinuation && trimmedLastChar == '\\' { + return p.readContinuationLines(line[:len(line)-1]) + } + + // Check if ignore inline comment + if !p.options.IgnoreInlineComment { + var i int + if p.options.SpaceBeforeInlineComment { + i = strings.Index(line, " #") + if i == -1 { + i = strings.Index(line, " ;") + } + + } else { + i = strings.IndexAny(line, "#;") + } + + if i > -1 { + p.comment.WriteString(line[i:]) + line = strings.TrimSpace(line[:i]) + } + + } + + // Trim single and double quotes + if (hasSurroundedQuote(line, '\'') || + hasSurroundedQuote(line, '"')) && !p.options.PreserveSurroundedQuote { + line = line[1 : len(line)-1] + } else if len(valQuote) == 0 && p.options.UnescapeValueCommentSymbols { + if strings.Contains(line, `\;`) { + line = strings.Replace(line, `\;`, ";", -1) + } + if strings.Contains(line, `\#`) { + line = strings.Replace(line, `\#`, "#", -1) + } + } else if p.options.AllowPythonMultilineValues && lastChar == '\n' { + return p.readPythonMultilines(line, bufferSize) + } + + return line, nil +} + +func (p *parser) readPythonMultilines(line string, bufferSize int) (string, error) { + parserBufferPeekResult, _ := p.buf.Peek(bufferSize) + peekBuffer := bytes.NewBuffer(parserBufferPeekResult) + + indentSize := 0 + for { + peekData, peekErr := peekBuffer.ReadBytes('\n') + if peekErr != nil { + if peekErr == io.EOF { + p.debug("readPythonMultilines: io.EOF, peekData: %q, line: %q", string(peekData), line) + return line, nil + } + + p.debug("readPythonMultilines: failed to peek with error: %v", peekErr) + return "", peekErr + } + + p.debug("readPythonMultilines: parsing %q", string(peekData)) + + peekMatches := pythonMultiline.FindStringSubmatch(string(peekData)) + p.debug("readPythonMultilines: matched %d parts", len(peekMatches)) + for n, v := range peekMatches { + p.debug(" %d: %q", n, v) + } + + // Return if not a Python multiline value. + if len(peekMatches) != 3 { + p.debug("readPythonMultilines: end of value, got: %q", line) + return line, nil + } + + // Determine indent size and line prefix. + currentIndentSize := len(peekMatches[1]) + if indentSize < 1 { + indentSize = currentIndentSize + p.debug("readPythonMultilines: indent size is %d", indentSize) + } + + // Make sure each line is indented at least as far as first line. + if currentIndentSize < indentSize { + p.debug("readPythonMultilines: end of value, current indent: %d, expected indent: %d, line: %q", currentIndentSize, indentSize, line) + return line, nil + } + + // Advance the parser reader (buffer) in-sync with the peek buffer. + _, err := p.buf.Discard(len(peekData)) + if err != nil { + p.debug("readPythonMultilines: failed to skip to the end, returning error") + return "", err + } + + // Handle indented empty line. + line += "\n" + peekMatches[1][indentSize:] + peekMatches[2] + } +} + +// parse parses data through an io.Reader. +func (f *File) parse(reader io.Reader) (err error) { + p := newParser(reader, parserOptions{ + IgnoreContinuation: f.options.IgnoreContinuation, + IgnoreInlineComment: f.options.IgnoreInlineComment, + AllowPythonMultilineValues: f.options.AllowPythonMultilineValues, + SpaceBeforeInlineComment: f.options.SpaceBeforeInlineComment, + UnescapeValueDoubleQuotes: f.options.UnescapeValueDoubleQuotes, + UnescapeValueCommentSymbols: f.options.UnescapeValueCommentSymbols, + PreserveSurroundedQuote: f.options.PreserveSurroundedQuote, + DebugFunc: f.options.DebugFunc, + ReaderBufferSize: f.options.ReaderBufferSize, + }) + if err = p.BOM(); err != nil { + return fmt.Errorf("BOM: %v", err) + } + + // Ignore error because default section name is never empty string. + name := DefaultSection + if f.options.Insensitive || f.options.InsensitiveSections { + name = strings.ToLower(DefaultSection) + } + section, _ := f.NewSection(name) + + // This "last" is not strictly equivalent to "previous one" if current key is not the first nested key + var isLastValueEmpty bool + var lastRegularKey *Key + + var line []byte + var inUnparseableSection bool + + // NOTE: Iterate and increase `currentPeekSize` until + // the size of the parser buffer is found. + // TODO(unknwon): When Golang 1.10 is the lowest version supported, replace with `parserBufferSize := p.buf.Size()`. + parserBufferSize := 0 + // NOTE: Peek 4kb at a time. + currentPeekSize := minReaderBufferSize + + if f.options.AllowPythonMultilineValues { + for { + peekBytes, _ := p.buf.Peek(currentPeekSize) + peekBytesLength := len(peekBytes) + + if parserBufferSize >= peekBytesLength { + break + } + + currentPeekSize *= 2 + parserBufferSize = peekBytesLength + } + } + + for !p.isEOF { + line, err = p.readUntil('\n') + if err != nil { + return err + } + + if f.options.AllowNestedValues && + isLastValueEmpty && len(line) > 0 { + if line[0] == ' ' || line[0] == '\t' { + err = lastRegularKey.addNestedValue(string(bytes.TrimSpace(line))) + if err != nil { + return err + } + continue + } + } + + line = bytes.TrimLeftFunc(line, unicode.IsSpace) + if len(line) == 0 { + continue + } + + // Comments + if line[0] == '#' || line[0] == ';' { + // Note: we do not care ending line break, + // it is needed for adding second line, + // so just clean it once at the end when set to value. + p.comment.Write(line) + continue + } + + // Section + if line[0] == '[' { + // Read to the next ']' (TODO: support quoted strings) + closeIdx := bytes.LastIndexByte(line, ']') + if closeIdx == -1 { + return fmt.Errorf("unclosed section: %s", line) + } + + name := string(line[1:closeIdx]) + section, err = f.NewSection(name) + if err != nil { + return err + } + + comment, has := cleanComment(line[closeIdx+1:]) + if has { + p.comment.Write(comment) + } + + section.Comment = strings.TrimSpace(p.comment.String()) + + // Reset auto-counter and comments + p.comment.Reset() + p.count = 1 + + inUnparseableSection = false + for i := range f.options.UnparseableSections { + if f.options.UnparseableSections[i] == name || + ((f.options.Insensitive || f.options.InsensitiveSections) && strings.EqualFold(f.options.UnparseableSections[i], name)) { + inUnparseableSection = true + continue + } + } + continue + } + + if inUnparseableSection { + section.isRawSection = true + section.rawBody += string(line) + continue + } + + kname, offset, err := readKeyName(f.options.KeyValueDelimiters, line) + if err != nil { + // Treat as boolean key when desired, and whole line is key name. + if IsErrDelimiterNotFound(err) { + switch { + case f.options.AllowBooleanKeys: + kname, err := p.readValue(line, parserBufferSize) + if err != nil { + return err + } + key, err := section.NewBooleanKey(kname) + if err != nil { + return err + } + key.Comment = strings.TrimSpace(p.comment.String()) + p.comment.Reset() + continue + + case f.options.SkipUnrecognizableLines: + continue + } + } + return err + } + + // Auto increment. + isAutoIncr := false + if kname == "-" { + isAutoIncr = true + kname = "#" + strconv.Itoa(p.count) + p.count++ + } + + value, err := p.readValue(line[offset:], parserBufferSize) + if err != nil { + return err + } + isLastValueEmpty = len(value) == 0 + + key, err := section.NewKey(kname, value) + if err != nil { + return err + } + key.isAutoIncrement = isAutoIncr + key.Comment = strings.TrimSpace(p.comment.String()) + p.comment.Reset() + lastRegularKey = key + } + return nil +} diff --git a/vendor/gopkg.in/ini.v1/section.go b/vendor/gopkg.in/ini.v1/section.go new file mode 100644 index 000000000..afaa97c97 --- /dev/null +++ b/vendor/gopkg.in/ini.v1/section.go @@ -0,0 +1,256 @@ +// Copyright 2014 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package ini + +import ( + "errors" + "fmt" + "strings" +) + +// Section represents a config section. +type Section struct { + f *File + Comment string + name string + keys map[string]*Key + keyList []string + keysHash map[string]string + + isRawSection bool + rawBody string +} + +func newSection(f *File, name string) *Section { + return &Section{ + f: f, + name: name, + keys: make(map[string]*Key), + keyList: make([]string, 0, 10), + keysHash: make(map[string]string), + } +} + +// Name returns name of Section. +func (s *Section) Name() string { + return s.name +} + +// Body returns rawBody of Section if the section was marked as unparseable. +// It still follows the other rules of the INI format surrounding leading/trailing whitespace. +func (s *Section) Body() string { + return strings.TrimSpace(s.rawBody) +} + +// SetBody updates body content only if section is raw. +func (s *Section) SetBody(body string) { + if !s.isRawSection { + return + } + s.rawBody = body +} + +// NewKey creates a new key to given section. +func (s *Section) NewKey(name, val string) (*Key, error) { + if len(name) == 0 { + return nil, errors.New("error creating new key: empty key name") + } else if s.f.options.Insensitive || s.f.options.InsensitiveKeys { + name = strings.ToLower(name) + } + + if s.f.BlockMode { + s.f.lock.Lock() + defer s.f.lock.Unlock() + } + + if inSlice(name, s.keyList) { + if s.f.options.AllowShadows { + if err := s.keys[name].addShadow(val); err != nil { + return nil, err + } + } else { + s.keys[name].value = val + s.keysHash[name] = val + } + return s.keys[name], nil + } + + s.keyList = append(s.keyList, name) + s.keys[name] = newKey(s, name, val) + s.keysHash[name] = val + return s.keys[name], nil +} + +// NewBooleanKey creates a new boolean type key to given section. +func (s *Section) NewBooleanKey(name string) (*Key, error) { + key, err := s.NewKey(name, "true") + if err != nil { + return nil, err + } + + key.isBooleanType = true + return key, nil +} + +// GetKey returns key in section by given name. +func (s *Section) GetKey(name string) (*Key, error) { + if s.f.BlockMode { + s.f.lock.RLock() + } + if s.f.options.Insensitive || s.f.options.InsensitiveKeys { + name = strings.ToLower(name) + } + key := s.keys[name] + if s.f.BlockMode { + s.f.lock.RUnlock() + } + + if key == nil { + // Check if it is a child-section. + sname := s.name + for { + if i := strings.LastIndex(sname, s.f.options.ChildSectionDelimiter); i > -1 { + sname = sname[:i] + sec, err := s.f.GetSection(sname) + if err != nil { + continue + } + return sec.GetKey(name) + } + break + } + return nil, fmt.Errorf("error when getting key of section %q: key %q not exists", s.name, name) + } + return key, nil +} + +// HasKey returns true if section contains a key with given name. +func (s *Section) HasKey(name string) bool { + key, _ := s.GetKey(name) + return key != nil +} + +// Deprecated: Use "HasKey" instead. +func (s *Section) Haskey(name string) bool { + return s.HasKey(name) +} + +// HasValue returns true if section contains given raw value. +func (s *Section) HasValue(value string) bool { + if s.f.BlockMode { + s.f.lock.RLock() + defer s.f.lock.RUnlock() + } + + for _, k := range s.keys { + if value == k.value { + return true + } + } + return false +} + +// Key assumes named Key exists in section and returns a zero-value when not. +func (s *Section) Key(name string) *Key { + key, err := s.GetKey(name) + if err != nil { + // It's OK here because the only possible error is empty key name, + // but if it's empty, this piece of code won't be executed. + key, _ = s.NewKey(name, "") + return key + } + return key +} + +// Keys returns list of keys of section. +func (s *Section) Keys() []*Key { + keys := make([]*Key, len(s.keyList)) + for i := range s.keyList { + keys[i] = s.Key(s.keyList[i]) + } + return keys +} + +// ParentKeys returns list of keys of parent section. +func (s *Section) ParentKeys() []*Key { + var parentKeys []*Key + sname := s.name + for { + if i := strings.LastIndex(sname, s.f.options.ChildSectionDelimiter); i > -1 { + sname = sname[:i] + sec, err := s.f.GetSection(sname) + if err != nil { + continue + } + parentKeys = append(parentKeys, sec.Keys()...) + } else { + break + } + + } + return parentKeys +} + +// KeyStrings returns list of key names of section. +func (s *Section) KeyStrings() []string { + list := make([]string, len(s.keyList)) + copy(list, s.keyList) + return list +} + +// KeysHash returns keys hash consisting of names and values. +func (s *Section) KeysHash() map[string]string { + if s.f.BlockMode { + s.f.lock.RLock() + defer s.f.lock.RUnlock() + } + + hash := map[string]string{} + for key, value := range s.keysHash { + hash[key] = value + } + return hash +} + +// DeleteKey deletes a key from section. +func (s *Section) DeleteKey(name string) { + if s.f.BlockMode { + s.f.lock.Lock() + defer s.f.lock.Unlock() + } + + for i, k := range s.keyList { + if k == name { + s.keyList = append(s.keyList[:i], s.keyList[i+1:]...) + delete(s.keys, name) + delete(s.keysHash, name) + return + } + } +} + +// ChildSections returns a list of child sections of current section. +// For example, "[parent.child1]" and "[parent.child12]" are child sections +// of section "[parent]". +func (s *Section) ChildSections() []*Section { + prefix := s.name + s.f.options.ChildSectionDelimiter + children := make([]*Section, 0, 3) + for _, name := range s.f.sectionList { + if strings.HasPrefix(name, prefix) { + children = append(children, s.f.sections[name]...) + } + } + return children +} diff --git a/vendor/gopkg.in/ini.v1/struct.go b/vendor/gopkg.in/ini.v1/struct.go new file mode 100644 index 000000000..a486b2fe0 --- /dev/null +++ b/vendor/gopkg.in/ini.v1/struct.go @@ -0,0 +1,747 @@ +// Copyright 2014 Unknwon +// +// Licensed under the Apache License, Version 2.0 (the "License"): you may +// not use this file except in compliance with the License. You may obtain +// a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +// License for the specific language governing permissions and limitations +// under the License. + +package ini + +import ( + "bytes" + "errors" + "fmt" + "reflect" + "strings" + "time" + "unicode" +) + +// NameMapper represents a ini tag name mapper. +type NameMapper func(string) string + +// Built-in name getters. +var ( + // SnackCase converts to format SNACK_CASE. + SnackCase NameMapper = func(raw string) string { + newstr := make([]rune, 0, len(raw)) + for i, chr := range raw { + if isUpper := 'A' <= chr && chr <= 'Z'; isUpper { + if i > 0 { + newstr = append(newstr, '_') + } + } + newstr = append(newstr, unicode.ToUpper(chr)) + } + return string(newstr) + } + // TitleUnderscore converts to format title_underscore. + TitleUnderscore NameMapper = func(raw string) string { + newstr := make([]rune, 0, len(raw)) + for i, chr := range raw { + if isUpper := 'A' <= chr && chr <= 'Z'; isUpper { + if i > 0 { + newstr = append(newstr, '_') + } + chr -= 'A' - 'a' + } + newstr = append(newstr, chr) + } + return string(newstr) + } +) + +func (s *Section) parseFieldName(raw, actual string) string { + if len(actual) > 0 { + return actual + } + if s.f.NameMapper != nil { + return s.f.NameMapper(raw) + } + return raw +} + +func parseDelim(actual string) string { + if len(actual) > 0 { + return actual + } + return "," +} + +var reflectTime = reflect.TypeOf(time.Now()).Kind() + +// setSliceWithProperType sets proper values to slice based on its type. +func setSliceWithProperType(key *Key, field reflect.Value, delim string, allowShadow, isStrict bool) error { + var strs []string + if allowShadow { + strs = key.StringsWithShadows(delim) + } else { + strs = key.Strings(delim) + } + + numVals := len(strs) + if numVals == 0 { + return nil + } + + var vals interface{} + var err error + + sliceOf := field.Type().Elem().Kind() + switch sliceOf { + case reflect.String: + vals = strs + case reflect.Int: + vals, err = key.parseInts(strs, true, false) + case reflect.Int64: + vals, err = key.parseInt64s(strs, true, false) + case reflect.Uint: + vals, err = key.parseUints(strs, true, false) + case reflect.Uint64: + vals, err = key.parseUint64s(strs, true, false) + case reflect.Float64: + vals, err = key.parseFloat64s(strs, true, false) + case reflect.Bool: + vals, err = key.parseBools(strs, true, false) + case reflectTime: + vals, err = key.parseTimesFormat(time.RFC3339, strs, true, false) + default: + return fmt.Errorf("unsupported type '[]%s'", sliceOf) + } + if err != nil && isStrict { + return err + } + + slice := reflect.MakeSlice(field.Type(), numVals, numVals) + for i := 0; i < numVals; i++ { + switch sliceOf { + case reflect.String: + slice.Index(i).Set(reflect.ValueOf(vals.([]string)[i])) + case reflect.Int: + slice.Index(i).Set(reflect.ValueOf(vals.([]int)[i])) + case reflect.Int64: + slice.Index(i).Set(reflect.ValueOf(vals.([]int64)[i])) + case reflect.Uint: + slice.Index(i).Set(reflect.ValueOf(vals.([]uint)[i])) + case reflect.Uint64: + slice.Index(i).Set(reflect.ValueOf(vals.([]uint64)[i])) + case reflect.Float64: + slice.Index(i).Set(reflect.ValueOf(vals.([]float64)[i])) + case reflect.Bool: + slice.Index(i).Set(reflect.ValueOf(vals.([]bool)[i])) + case reflectTime: + slice.Index(i).Set(reflect.ValueOf(vals.([]time.Time)[i])) + } + } + field.Set(slice) + return nil +} + +func wrapStrictError(err error, isStrict bool) error { + if isStrict { + return err + } + return nil +} + +// setWithProperType sets proper value to field based on its type, +// but it does not return error for failing parsing, +// because we want to use default value that is already assigned to struct. +func setWithProperType(t reflect.Type, key *Key, field reflect.Value, delim string, allowShadow, isStrict bool) error { + vt := t + isPtr := t.Kind() == reflect.Ptr + if isPtr { + vt = t.Elem() + } + switch vt.Kind() { + case reflect.String: + stringVal := key.String() + if isPtr { + field.Set(reflect.ValueOf(&stringVal)) + } else if len(stringVal) > 0 { + field.SetString(key.String()) + } + case reflect.Bool: + boolVal, err := key.Bool() + if err != nil { + return wrapStrictError(err, isStrict) + } + if isPtr { + field.Set(reflect.ValueOf(&boolVal)) + } else { + field.SetBool(boolVal) + } + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + // ParseDuration will not return err for `0`, so check the type name + if vt.Name() == "Duration" { + durationVal, err := key.Duration() + if err != nil { + if intVal, err := key.Int64(); err == nil { + field.SetInt(intVal) + return nil + } + return wrapStrictError(err, isStrict) + } + if isPtr { + field.Set(reflect.ValueOf(&durationVal)) + } else if int64(durationVal) > 0 { + field.Set(reflect.ValueOf(durationVal)) + } + return nil + } + + intVal, err := key.Int64() + if err != nil { + return wrapStrictError(err, isStrict) + } + if isPtr { + pv := reflect.New(t.Elem()) + pv.Elem().SetInt(intVal) + field.Set(pv) + } else { + field.SetInt(intVal) + } + // byte is an alias for uint8, so supporting uint8 breaks support for byte + case reflect.Uint, reflect.Uint16, reflect.Uint32, reflect.Uint64: + durationVal, err := key.Duration() + // Skip zero value + if err == nil && uint64(durationVal) > 0 { + if isPtr { + field.Set(reflect.ValueOf(&durationVal)) + } else { + field.Set(reflect.ValueOf(durationVal)) + } + return nil + } + + uintVal, err := key.Uint64() + if err != nil { + return wrapStrictError(err, isStrict) + } + if isPtr { + pv := reflect.New(t.Elem()) + pv.Elem().SetUint(uintVal) + field.Set(pv) + } else { + field.SetUint(uintVal) + } + + case reflect.Float32, reflect.Float64: + floatVal, err := key.Float64() + if err != nil { + return wrapStrictError(err, isStrict) + } + if isPtr { + pv := reflect.New(t.Elem()) + pv.Elem().SetFloat(floatVal) + field.Set(pv) + } else { + field.SetFloat(floatVal) + } + case reflectTime: + timeVal, err := key.Time() + if err != nil { + return wrapStrictError(err, isStrict) + } + if isPtr { + field.Set(reflect.ValueOf(&timeVal)) + } else { + field.Set(reflect.ValueOf(timeVal)) + } + case reflect.Slice: + return setSliceWithProperType(key, field, delim, allowShadow, isStrict) + default: + return fmt.Errorf("unsupported type %q", t) + } + return nil +} + +func parseTagOptions(tag string) (rawName string, omitEmpty bool, allowShadow bool, allowNonUnique bool, extends bool) { + opts := strings.SplitN(tag, ",", 5) + rawName = opts[0] + for _, opt := range opts[1:] { + omitEmpty = omitEmpty || (opt == "omitempty") + allowShadow = allowShadow || (opt == "allowshadow") + allowNonUnique = allowNonUnique || (opt == "nonunique") + extends = extends || (opt == "extends") + } + return rawName, omitEmpty, allowShadow, allowNonUnique, extends +} + +// mapToField maps the given value to the matching field of the given section. +// The sectionIndex is the index (if non unique sections are enabled) to which the value should be added. +func (s *Section) mapToField(val reflect.Value, isStrict bool, sectionIndex int, sectionName string) error { + if val.Kind() == reflect.Ptr { + val = val.Elem() + } + typ := val.Type() + + for i := 0; i < typ.NumField(); i++ { + field := val.Field(i) + tpField := typ.Field(i) + + tag := tpField.Tag.Get("ini") + if tag == "-" { + continue + } + + rawName, _, allowShadow, allowNonUnique, extends := parseTagOptions(tag) + fieldName := s.parseFieldName(tpField.Name, rawName) + if len(fieldName) == 0 || !field.CanSet() { + continue + } + + isStruct := tpField.Type.Kind() == reflect.Struct + isStructPtr := tpField.Type.Kind() == reflect.Ptr && tpField.Type.Elem().Kind() == reflect.Struct + isAnonymousPtr := tpField.Type.Kind() == reflect.Ptr && tpField.Anonymous + if isAnonymousPtr { + field.Set(reflect.New(tpField.Type.Elem())) + } + + if extends && (isAnonymousPtr || (isStruct && tpField.Anonymous)) { + if isStructPtr && field.IsNil() { + field.Set(reflect.New(tpField.Type.Elem())) + } + fieldSection := s + if rawName != "" { + sectionName = s.name + s.f.options.ChildSectionDelimiter + rawName + if secs, err := s.f.SectionsByName(sectionName); err == nil && sectionIndex < len(secs) { + fieldSection = secs[sectionIndex] + } + } + if err := fieldSection.mapToField(field, isStrict, sectionIndex, sectionName); err != nil { + return fmt.Errorf("map to field %q: %v", fieldName, err) + } + } else if isAnonymousPtr || isStruct || isStructPtr { + if secs, err := s.f.SectionsByName(fieldName); err == nil { + if len(secs) <= sectionIndex { + return fmt.Errorf("there are not enough sections (%d <= %d) for the field %q", len(secs), sectionIndex, fieldName) + } + // Only set the field to non-nil struct value if we have a section for it. + // Otherwise, we end up with a non-nil struct ptr even though there is no data. + if isStructPtr && field.IsNil() { + field.Set(reflect.New(tpField.Type.Elem())) + } + if err = secs[sectionIndex].mapToField(field, isStrict, sectionIndex, fieldName); err != nil { + return fmt.Errorf("map to field %q: %v", fieldName, err) + } + continue + } + } + + // Map non-unique sections + if allowNonUnique && tpField.Type.Kind() == reflect.Slice { + newField, err := s.mapToSlice(fieldName, field, isStrict) + if err != nil { + return fmt.Errorf("map to slice %q: %v", fieldName, err) + } + + field.Set(newField) + continue + } + + if key, err := s.GetKey(fieldName); err == nil { + delim := parseDelim(tpField.Tag.Get("delim")) + if err = setWithProperType(tpField.Type, key, field, delim, allowShadow, isStrict); err != nil { + return fmt.Errorf("set field %q: %v", fieldName, err) + } + } + } + return nil +} + +// mapToSlice maps all sections with the same name and returns the new value. +// The type of the Value must be a slice. +func (s *Section) mapToSlice(secName string, val reflect.Value, isStrict bool) (reflect.Value, error) { + secs, err := s.f.SectionsByName(secName) + if err != nil { + return reflect.Value{}, err + } + + typ := val.Type().Elem() + for i, sec := range secs { + elem := reflect.New(typ) + if err = sec.mapToField(elem, isStrict, i, sec.name); err != nil { + return reflect.Value{}, fmt.Errorf("map to field from section %q: %v", secName, err) + } + + val = reflect.Append(val, elem.Elem()) + } + return val, nil +} + +// mapTo maps a section to object v. +func (s *Section) mapTo(v interface{}, isStrict bool) error { + typ := reflect.TypeOf(v) + val := reflect.ValueOf(v) + if typ.Kind() == reflect.Ptr { + typ = typ.Elem() + val = val.Elem() + } else { + return errors.New("not a pointer to a struct") + } + + if typ.Kind() == reflect.Slice { + newField, err := s.mapToSlice(s.name, val, isStrict) + if err != nil { + return err + } + + val.Set(newField) + return nil + } + + return s.mapToField(val, isStrict, 0, s.name) +} + +// MapTo maps section to given struct. +func (s *Section) MapTo(v interface{}) error { + return s.mapTo(v, false) +} + +// StrictMapTo maps section to given struct in strict mode, +// which returns all possible error including value parsing error. +func (s *Section) StrictMapTo(v interface{}) error { + return s.mapTo(v, true) +} + +// MapTo maps file to given struct. +func (f *File) MapTo(v interface{}) error { + return f.Section("").MapTo(v) +} + +// StrictMapTo maps file to given struct in strict mode, +// which returns all possible error including value parsing error. +func (f *File) StrictMapTo(v interface{}) error { + return f.Section("").StrictMapTo(v) +} + +// MapToWithMapper maps data sources to given struct with name mapper. +func MapToWithMapper(v interface{}, mapper NameMapper, source interface{}, others ...interface{}) error { + cfg, err := Load(source, others...) + if err != nil { + return err + } + cfg.NameMapper = mapper + return cfg.MapTo(v) +} + +// StrictMapToWithMapper maps data sources to given struct with name mapper in strict mode, +// which returns all possible error including value parsing error. +func StrictMapToWithMapper(v interface{}, mapper NameMapper, source interface{}, others ...interface{}) error { + cfg, err := Load(source, others...) + if err != nil { + return err + } + cfg.NameMapper = mapper + return cfg.StrictMapTo(v) +} + +// MapTo maps data sources to given struct. +func MapTo(v, source interface{}, others ...interface{}) error { + return MapToWithMapper(v, nil, source, others...) +} + +// StrictMapTo maps data sources to given struct in strict mode, +// which returns all possible error including value parsing error. +func StrictMapTo(v, source interface{}, others ...interface{}) error { + return StrictMapToWithMapper(v, nil, source, others...) +} + +// reflectSliceWithProperType does the opposite thing as setSliceWithProperType. +func reflectSliceWithProperType(key *Key, field reflect.Value, delim string, allowShadow bool) error { + slice := field.Slice(0, field.Len()) + if field.Len() == 0 { + return nil + } + sliceOf := field.Type().Elem().Kind() + + if allowShadow { + var keyWithShadows *Key + for i := 0; i < field.Len(); i++ { + var val string + switch sliceOf { + case reflect.String: + val = slice.Index(i).String() + case reflect.Int, reflect.Int64: + val = fmt.Sprint(slice.Index(i).Int()) + case reflect.Uint, reflect.Uint64: + val = fmt.Sprint(slice.Index(i).Uint()) + case reflect.Float64: + val = fmt.Sprint(slice.Index(i).Float()) + case reflect.Bool: + val = fmt.Sprint(slice.Index(i).Bool()) + case reflectTime: + val = slice.Index(i).Interface().(time.Time).Format(time.RFC3339) + default: + return fmt.Errorf("unsupported type '[]%s'", sliceOf) + } + + if i == 0 { + keyWithShadows = newKey(key.s, key.name, val) + } else { + _ = keyWithShadows.AddShadow(val) + } + } + *key = *keyWithShadows + return nil + } + + var buf bytes.Buffer + for i := 0; i < field.Len(); i++ { + switch sliceOf { + case reflect.String: + buf.WriteString(slice.Index(i).String()) + case reflect.Int, reflect.Int64: + buf.WriteString(fmt.Sprint(slice.Index(i).Int())) + case reflect.Uint, reflect.Uint64: + buf.WriteString(fmt.Sprint(slice.Index(i).Uint())) + case reflect.Float64: + buf.WriteString(fmt.Sprint(slice.Index(i).Float())) + case reflect.Bool: + buf.WriteString(fmt.Sprint(slice.Index(i).Bool())) + case reflectTime: + buf.WriteString(slice.Index(i).Interface().(time.Time).Format(time.RFC3339)) + default: + return fmt.Errorf("unsupported type '[]%s'", sliceOf) + } + buf.WriteString(delim) + } + key.SetValue(buf.String()[:buf.Len()-len(delim)]) + return nil +} + +// reflectWithProperType does the opposite thing as setWithProperType. +func reflectWithProperType(t reflect.Type, key *Key, field reflect.Value, delim string, allowShadow bool) error { + switch t.Kind() { + case reflect.String: + key.SetValue(field.String()) + case reflect.Bool: + key.SetValue(fmt.Sprint(field.Bool())) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + key.SetValue(fmt.Sprint(field.Int())) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + key.SetValue(fmt.Sprint(field.Uint())) + case reflect.Float32, reflect.Float64: + key.SetValue(fmt.Sprint(field.Float())) + case reflectTime: + key.SetValue(fmt.Sprint(field.Interface().(time.Time).Format(time.RFC3339))) + case reflect.Slice: + return reflectSliceWithProperType(key, field, delim, allowShadow) + case reflect.Ptr: + if !field.IsNil() { + return reflectWithProperType(t.Elem(), key, field.Elem(), delim, allowShadow) + } + default: + return fmt.Errorf("unsupported type %q", t) + } + return nil +} + +// CR: copied from encoding/json/encode.go with modifications of time.Time support. +// TODO: add more test coverage. +func isEmptyValue(v reflect.Value) bool { + switch v.Kind() { + case reflect.Array, reflect.Map, reflect.Slice, reflect.String: + return v.Len() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Interface, reflect.Ptr: + return v.IsNil() + case reflectTime: + t, ok := v.Interface().(time.Time) + return ok && t.IsZero() + } + return false +} + +// StructReflector is the interface implemented by struct types that can extract themselves into INI objects. +type StructReflector interface { + ReflectINIStruct(*File) error +} + +func (s *Section) reflectFrom(val reflect.Value) error { + if val.Kind() == reflect.Ptr { + val = val.Elem() + } + typ := val.Type() + + for i := 0; i < typ.NumField(); i++ { + if !val.Field(i).CanInterface() { + continue + } + + field := val.Field(i) + tpField := typ.Field(i) + + tag := tpField.Tag.Get("ini") + if tag == "-" { + continue + } + + rawName, omitEmpty, allowShadow, allowNonUnique, extends := parseTagOptions(tag) + if omitEmpty && isEmptyValue(field) { + continue + } + + if r, ok := field.Interface().(StructReflector); ok { + return r.ReflectINIStruct(s.f) + } + + fieldName := s.parseFieldName(tpField.Name, rawName) + if len(fieldName) == 0 || !field.CanSet() { + continue + } + + if extends && tpField.Anonymous && (tpField.Type.Kind() == reflect.Ptr || tpField.Type.Kind() == reflect.Struct) { + if err := s.reflectFrom(field); err != nil { + return fmt.Errorf("reflect from field %q: %v", fieldName, err) + } + continue + } + + if (tpField.Type.Kind() == reflect.Ptr && tpField.Type.Elem().Kind() == reflect.Struct) || + (tpField.Type.Kind() == reflect.Struct && tpField.Type.Name() != "Time") { + // Note: The only error here is section doesn't exist. + sec, err := s.f.GetSection(fieldName) + if err != nil { + // Note: fieldName can never be empty here, ignore error. + sec, _ = s.f.NewSection(fieldName) + } + + // Add comment from comment tag + if len(sec.Comment) == 0 { + sec.Comment = tpField.Tag.Get("comment") + } + + if err = sec.reflectFrom(field); err != nil { + return fmt.Errorf("reflect from field %q: %v", fieldName, err) + } + continue + } + + if allowNonUnique && tpField.Type.Kind() == reflect.Slice { + slice := field.Slice(0, field.Len()) + if field.Len() == 0 { + return nil + } + sliceOf := field.Type().Elem().Kind() + + for i := 0; i < field.Len(); i++ { + if sliceOf != reflect.Struct && sliceOf != reflect.Ptr { + return fmt.Errorf("field %q is not a slice of pointer or struct", fieldName) + } + + sec, err := s.f.NewSection(fieldName) + if err != nil { + return err + } + + // Add comment from comment tag + if len(sec.Comment) == 0 { + sec.Comment = tpField.Tag.Get("comment") + } + + if err := sec.reflectFrom(slice.Index(i)); err != nil { + return fmt.Errorf("reflect from field %q: %v", fieldName, err) + } + } + continue + } + + // Note: Same reason as section. + key, err := s.GetKey(fieldName) + if err != nil { + key, _ = s.NewKey(fieldName, "") + } + + // Add comment from comment tag + if len(key.Comment) == 0 { + key.Comment = tpField.Tag.Get("comment") + } + + delim := parseDelim(tpField.Tag.Get("delim")) + if err = reflectWithProperType(tpField.Type, key, field, delim, allowShadow); err != nil { + return fmt.Errorf("reflect field %q: %v", fieldName, err) + } + + } + return nil +} + +// ReflectFrom reflects section from given struct. It overwrites existing ones. +func (s *Section) ReflectFrom(v interface{}) error { + typ := reflect.TypeOf(v) + val := reflect.ValueOf(v) + + if s.name != DefaultSection && s.f.options.AllowNonUniqueSections && + (typ.Kind() == reflect.Slice || typ.Kind() == reflect.Ptr) { + // Clear sections to make sure none exists before adding the new ones + s.f.DeleteSection(s.name) + + if typ.Kind() == reflect.Ptr { + sec, err := s.f.NewSection(s.name) + if err != nil { + return err + } + return sec.reflectFrom(val.Elem()) + } + + slice := val.Slice(0, val.Len()) + sliceOf := val.Type().Elem().Kind() + if sliceOf != reflect.Ptr { + return fmt.Errorf("not a slice of pointers") + } + + for i := 0; i < slice.Len(); i++ { + sec, err := s.f.NewSection(s.name) + if err != nil { + return err + } + + err = sec.reflectFrom(slice.Index(i)) + if err != nil { + return fmt.Errorf("reflect from %dth field: %v", i, err) + } + } + + return nil + } + + if typ.Kind() == reflect.Ptr { + val = val.Elem() + } else { + return errors.New("not a pointer to a struct") + } + + return s.reflectFrom(val) +} + +// ReflectFrom reflects file from given struct. +func (f *File) ReflectFrom(v interface{}) error { + return f.Section("").ReflectFrom(v) +} + +// ReflectFromWithMapper reflects data sources from given struct with name mapper. +func ReflectFromWithMapper(cfg *File, v interface{}, mapper NameMapper) error { + cfg.NameMapper = mapper + return cfg.ReflectFrom(v) +} + +// ReflectFrom reflects data sources from given struct. +func ReflectFrom(cfg *File, v interface{}) error { + return ReflectFromWithMapper(cfg, v, nil) +} diff --git a/vendor/honnef.co/go/tools/LICENSE b/vendor/honnef.co/go/tools/LICENSE new file mode 100644 index 000000000..dfd031454 --- /dev/null +++ b/vendor/honnef.co/go/tools/LICENSE @@ -0,0 +1,20 @@ +Copyright (c) 2016 Dominik Honnef + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY b/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY new file mode 100644 index 000000000..f2c0fa93a --- /dev/null +++ b/vendor/honnef.co/go/tools/LICENSE-THIRD-PARTY @@ -0,0 +1,121 @@ +Staticcheck and its related tools make use of third party projects, +either by reusing their code, or by statically linking them into +resulting binaries. These projects are: + +* The Go Programming Language - https://golang.org/ + golang.org/x/mod - https://github.com/golang/mod + golang.org/x/tools - https://github.com/golang/tools + golang.org/x/sys - https://github.com/golang/sys + golang.org/x/xerrors - https://github.com/golang/xerrors + + Copyright (c) 2009 The Go Authors. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of Google Inc. nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +* github.com/BurntSushi/toml - https://github.com/BurntSushi/toml + + The MIT License (MIT) + + Copyright (c) 2013 TOML authors + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + +* gogrep - https://github.com/mvdan/gogrep + + Copyright (c) 2017, Daniel Martí. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +* gosmith - https://github.com/dvyukov/gosmith + + Copyright (c) 2014 Dmitry Vyukov. All rights reserved. + + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + * The name of Dmitry Vyukov may be used to endorse or promote + products derived from this software without specific prior written permission. + + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/analysis/code/code.go b/vendor/honnef.co/go/tools/analysis/code/code.go new file mode 100644 index 000000000..27b01c4ec --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/code/code.go @@ -0,0 +1,294 @@ +// Package code answers structural and type questions about Go code. +package code + +import ( + "flag" + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "strings" + + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/go/ast/astutil" + "honnef.co/go/tools/go/types/typeutil" + + "golang.org/x/tools/go/analysis" +) + +type Positioner interface { + Pos() token.Pos +} + +func IsOfType(pass *analysis.Pass, expr ast.Expr, name string) bool { + return typeutil.IsType(pass.TypesInfo.TypeOf(expr), name) +} + +func IsInTest(pass *analysis.Pass, node Positioner) bool { + // FIXME(dh): this doesn't work for global variables with + // initializers + f := pass.Fset.File(node.Pos()) + return f != nil && strings.HasSuffix(f.Name(), "_test.go") +} + +// IsMain reports whether the package being processed is a package +// main. +func IsMain(pass *analysis.Pass) bool { + return pass.Pkg.Name() == "main" +} + +// IsMainLike reports whether the package being processed is a +// main-like package. A main-like package is a package that is +// package main, or that is intended to be used by a tool framework +// such as cobra to implement a command. +// +// Note that this function errs on the side of false positives; it may +// return true for packages that aren't main-like. IsMainLike is +// intended for analyses that wish to suppress diagnostics for +// main-like packages to avoid false positives. +func IsMainLike(pass *analysis.Pass) bool { + if pass.Pkg.Name() == "main" { + return true + } + for _, imp := range pass.Pkg.Imports() { + if imp.Path() == "github.com/spf13/cobra" { + return true + } + } + return false +} + +func SelectorName(pass *analysis.Pass, expr *ast.SelectorExpr) string { + info := pass.TypesInfo + sel := info.Selections[expr] + if sel == nil { + if x, ok := expr.X.(*ast.Ident); ok { + pkg, ok := info.ObjectOf(x).(*types.PkgName) + if !ok { + // This shouldn't happen + return fmt.Sprintf("%s.%s", x.Name, expr.Sel.Name) + } + return fmt.Sprintf("%s.%s", pkg.Imported().Path(), expr.Sel.Name) + } + panic(fmt.Sprintf("unsupported selector: %v", expr)) + } + return fmt.Sprintf("(%s).%s", sel.Recv(), sel.Obj().Name()) +} + +func IsNil(pass *analysis.Pass, expr ast.Expr) bool { + return pass.TypesInfo.Types[expr].IsNil() +} + +func BoolConst(pass *analysis.Pass, expr ast.Expr) bool { + val := pass.TypesInfo.ObjectOf(expr.(*ast.Ident)).(*types.Const).Val() + return constant.BoolVal(val) +} + +func IsBoolConst(pass *analysis.Pass, expr ast.Expr) bool { + // We explicitly don't support typed bools because more often than + // not, custom bool types are used as binary enums and the + // explicit comparison is desired. + + ident, ok := expr.(*ast.Ident) + if !ok { + return false + } + obj := pass.TypesInfo.ObjectOf(ident) + c, ok := obj.(*types.Const) + if !ok { + return false + } + basic, ok := c.Type().(*types.Basic) + if !ok { + return false + } + if basic.Kind() != types.UntypedBool && basic.Kind() != types.Bool { + return false + } + return true +} + +func ExprToInt(pass *analysis.Pass, expr ast.Expr) (int64, bool) { + tv := pass.TypesInfo.Types[expr] + if tv.Value == nil { + return 0, false + } + if tv.Value.Kind() != constant.Int { + return 0, false + } + return constant.Int64Val(tv.Value) +} + +func ExprToString(pass *analysis.Pass, expr ast.Expr) (string, bool) { + val := pass.TypesInfo.Types[expr].Value + if val == nil { + return "", false + } + if val.Kind() != constant.String { + return "", false + } + return constant.StringVal(val), true +} + +func CallName(pass *analysis.Pass, call *ast.CallExpr) string { + switch fun := astutil.Unparen(call.Fun).(type) { + case *ast.SelectorExpr: + fn, ok := pass.TypesInfo.ObjectOf(fun.Sel).(*types.Func) + if !ok { + return "" + } + return typeutil.FuncName(fn) + case *ast.Ident: + obj := pass.TypesInfo.ObjectOf(fun) + switch obj := obj.(type) { + case *types.Func: + return typeutil.FuncName(obj) + case *types.Builtin: + return obj.Name() + default: + return "" + } + default: + return "" + } +} + +func IsCallTo(pass *analysis.Pass, node ast.Node, name string) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return false + } + return CallName(pass, call) == name +} + +func IsCallToAny(pass *analysis.Pass, node ast.Node, names ...string) bool { + call, ok := node.(*ast.CallExpr) + if !ok { + return false + } + q := CallName(pass, call) + for _, name := range names { + if q == name { + return true + } + } + return false +} + +func File(pass *analysis.Pass, node Positioner) *ast.File { + m := pass.ResultOf[facts.TokenFile].(map[*token.File]*ast.File) + return m[pass.Fset.File(node.Pos())] +} + +// IsGenerated reports whether pos is in a generated file, It ignores +// //line directives. +func IsGenerated(pass *analysis.Pass, pos token.Pos) bool { + _, ok := Generator(pass, pos) + return ok +} + +// Generator returns the generator that generated the file containing +// pos. It ignores //line directives. +func Generator(pass *analysis.Pass, pos token.Pos) (facts.Generator, bool) { + file := pass.Fset.PositionFor(pos, false).Filename + m := pass.ResultOf[facts.Generated].(map[string]facts.Generator) + g, ok := m[file] + return g, ok +} + +// MayHaveSideEffects reports whether expr may have side effects. If +// the purity argument is nil, this function implements a purely +// syntactic check, meaning that any function call may have side +// effects, regardless of the called function's body. Otherwise, +// purity will be consulted to determine the purity of function calls. +func MayHaveSideEffects(pass *analysis.Pass, expr ast.Expr, purity facts.PurityResult) bool { + switch expr := expr.(type) { + case *ast.BadExpr: + return true + case *ast.Ellipsis: + return MayHaveSideEffects(pass, expr.Elt, purity) + case *ast.FuncLit: + // the literal itself cannot have side effects, only calling it + // might, which is handled by CallExpr. + return false + case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType: + // types cannot have side effects + return false + case *ast.BasicLit: + return false + case *ast.BinaryExpr: + return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Y, purity) + case *ast.CallExpr: + if purity == nil { + return true + } + switch obj := typeutil.Callee(pass.TypesInfo, expr).(type) { + case *types.Func: + if _, ok := purity[obj]; !ok { + return true + } + case *types.Builtin: + switch obj.Name() { + case "len", "cap": + default: + return true + } + default: + return true + } + for _, arg := range expr.Args { + if MayHaveSideEffects(pass, arg, purity) { + return true + } + } + return false + case *ast.CompositeLit: + if MayHaveSideEffects(pass, expr.Type, purity) { + return true + } + for _, elt := range expr.Elts { + if MayHaveSideEffects(pass, elt, purity) { + return true + } + } + return false + case *ast.Ident: + return false + case *ast.IndexExpr: + return MayHaveSideEffects(pass, expr.X, purity) || MayHaveSideEffects(pass, expr.Index, purity) + case *ast.KeyValueExpr: + return MayHaveSideEffects(pass, expr.Key, purity) || MayHaveSideEffects(pass, expr.Value, purity) + case *ast.SelectorExpr: + return MayHaveSideEffects(pass, expr.X, purity) + case *ast.SliceExpr: + return MayHaveSideEffects(pass, expr.X, purity) || + MayHaveSideEffects(pass, expr.Low, purity) || + MayHaveSideEffects(pass, expr.High, purity) || + MayHaveSideEffects(pass, expr.Max, purity) + case *ast.StarExpr: + return MayHaveSideEffects(pass, expr.X, purity) + case *ast.TypeAssertExpr: + return MayHaveSideEffects(pass, expr.X, purity) + case *ast.UnaryExpr: + if MayHaveSideEffects(pass, expr.X, purity) { + return true + } + return expr.Op == token.ARROW + case *ast.ParenExpr: + return MayHaveSideEffects(pass, expr.X, purity) + case nil: + return false + default: + panic(fmt.Sprintf("internal error: unhandled type %T", expr)) + } +} + +func IsGoVersion(pass *analysis.Pass, minor int) bool { + f, ok := pass.Analyzer.Flags.Lookup("go").Value.(flag.Getter) + if !ok { + panic("requested Go version, but analyzer has no version flag") + } + version := f.Get().(int) + return version >= minor +} diff --git a/vendor/honnef.co/go/tools/analysis/code/visit.go b/vendor/honnef.co/go/tools/analysis/code/visit.go new file mode 100644 index 000000000..f8bf2d169 --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/code/visit.go @@ -0,0 +1,51 @@ +package code + +import ( + "bytes" + "go/ast" + "go/format" + + "honnef.co/go/tools/pattern" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +func Preorder(pass *analysis.Pass, fn func(ast.Node), types ...ast.Node) { + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).Preorder(types, fn) +} + +func PreorderStack(pass *analysis.Pass, fn func(ast.Node, []ast.Node), types ...ast.Node) { + pass.ResultOf[inspect.Analyzer].(*inspector.Inspector).WithStack(types, func(n ast.Node, push bool, stack []ast.Node) (proceed bool) { + if push { + fn(n, stack) + } + return true + }) +} + +func Match(pass *analysis.Pass, q pattern.Pattern, node ast.Node) (*pattern.Matcher, bool) { + // Note that we ignore q.Relevant – callers of Match usually use + // AST inspectors that already filter on nodes we're interested + // in. + m := &pattern.Matcher{TypesInfo: pass.TypesInfo} + ok := m.Match(q.Root, node) + return m, ok +} + +func MatchAndEdit(pass *analysis.Pass, before, after pattern.Pattern, node ast.Node) (*pattern.Matcher, []analysis.TextEdit, bool) { + m, ok := Match(pass, before, node) + if !ok { + return m, nil, false + } + r := pattern.NodeToAST(after.Root, m.State) + buf := &bytes.Buffer{} + format.Node(buf, pass.Fset, r) + edit := []analysis.TextEdit{{ + Pos: node.Pos(), + End: node.End(), + NewText: buf.Bytes(), + }} + return m, edit, true +} diff --git a/vendor/honnef.co/go/tools/analysis/edit/edit.go b/vendor/honnef.co/go/tools/analysis/edit/edit.go new file mode 100644 index 000000000..90bc5f8cc --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/edit/edit.go @@ -0,0 +1,74 @@ +package edit + +import ( + "bytes" + "go/ast" + "go/format" + "go/token" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/pattern" +) + +type Ranger interface { + Pos() token.Pos + End() token.Pos +} + +type Range [2]token.Pos + +func (r Range) Pos() token.Pos { return r[0] } +func (r Range) End() token.Pos { return r[1] } + +func ReplaceWithString(fset *token.FileSet, old Ranger, new string) analysis.TextEdit { + return analysis.TextEdit{ + Pos: old.Pos(), + End: old.End(), + NewText: []byte(new), + } +} + +func ReplaceWithNode(fset *token.FileSet, old Ranger, new ast.Node) analysis.TextEdit { + buf := &bytes.Buffer{} + if err := format.Node(buf, fset, new); err != nil { + panic("internal error: " + err.Error()) + } + return analysis.TextEdit{ + Pos: old.Pos(), + End: old.End(), + NewText: buf.Bytes(), + } +} + +func ReplaceWithPattern(pass *analysis.Pass, after pattern.Pattern, state pattern.State, node Ranger) analysis.TextEdit { + r := pattern.NodeToAST(after.Root, state) + buf := &bytes.Buffer{} + format.Node(buf, pass.Fset, r) + return analysis.TextEdit{ + Pos: node.Pos(), + End: node.End(), + NewText: buf.Bytes(), + } +} + +func Delete(old Ranger) analysis.TextEdit { + return analysis.TextEdit{ + Pos: old.Pos(), + End: old.End(), + NewText: nil, + } +} + +func Fix(msg string, edits ...analysis.TextEdit) analysis.SuggestedFix { + return analysis.SuggestedFix{ + Message: msg, + TextEdits: edits, + } +} + +func Selector(x, sel string) *ast.SelectorExpr { + return &ast.SelectorExpr{ + X: &ast.Ident{Name: x}, + Sel: &ast.Ident{Name: sel}, + } +} diff --git a/vendor/honnef.co/go/tools/analysis/facts/deprecated.go b/vendor/honnef.co/go/tools/analysis/facts/deprecated.go new file mode 100644 index 000000000..dbc5ede5c --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/facts/deprecated.go @@ -0,0 +1,145 @@ +package facts + +import ( + "go/ast" + "go/token" + "go/types" + "reflect" + "strings" + + "golang.org/x/tools/go/analysis" +) + +type IsDeprecated struct{ Msg string } + +func (*IsDeprecated) AFact() {} +func (d *IsDeprecated) String() string { return "Deprecated: " + d.Msg } + +type DeprecatedResult struct { + Objects map[types.Object]*IsDeprecated + Packages map[*types.Package]*IsDeprecated +} + +var Deprecated = &analysis.Analyzer{ + Name: "fact_deprecated", + Doc: "Mark deprecated objects", + Run: deprecated, + FactTypes: []analysis.Fact{(*IsDeprecated)(nil)}, + ResultType: reflect.TypeOf(DeprecatedResult{}), +} + +func deprecated(pass *analysis.Pass) (interface{}, error) { + var names []*ast.Ident + + extractDeprecatedMessage := func(docs []*ast.CommentGroup) string { + for _, doc := range docs { + if doc == nil { + continue + } + parts := strings.Split(doc.Text(), "\n\n") + for _, part := range parts { + if !strings.HasPrefix(part, "Deprecated: ") { + continue + } + alt := part[len("Deprecated: "):] + alt = strings.Replace(alt, "\n", " ", -1) + return alt + } + } + return "" + } + doDocs := func(names []*ast.Ident, docs []*ast.CommentGroup) { + alt := extractDeprecatedMessage(docs) + if alt == "" { + return + } + + for _, name := range names { + obj := pass.TypesInfo.ObjectOf(name) + pass.ExportObjectFact(obj, &IsDeprecated{alt}) + } + } + + var docs []*ast.CommentGroup + for _, f := range pass.Files { + docs = append(docs, f.Doc) + } + if alt := extractDeprecatedMessage(docs); alt != "" { + // Don't mark package syscall as deprecated, even though + // it is. A lot of people still use it for simple + // constants like SIGKILL, and I am not comfortable + // telling them to use x/sys for that. + if pass.Pkg.Path() != "syscall" { + pass.ExportPackageFact(&IsDeprecated{alt}) + } + } + + docs = docs[:0] + for _, f := range pass.Files { + fn := func(node ast.Node) bool { + if node == nil { + return true + } + var ret bool + switch node := node.(type) { + case *ast.GenDecl: + switch node.Tok { + case token.TYPE, token.CONST, token.VAR: + docs = append(docs, node.Doc) + return true + default: + return false + } + case *ast.FuncDecl: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = false + case *ast.TypeSpec: + docs = append(docs, node.Doc) + names = []*ast.Ident{node.Name} + ret = true + case *ast.ValueSpec: + docs = append(docs, node.Doc) + names = node.Names + ret = false + case *ast.File: + return true + case *ast.StructType: + for _, field := range node.Fields.List { + doDocs(field.Names, []*ast.CommentGroup{field.Doc}) + } + return false + case *ast.InterfaceType: + for _, field := range node.Methods.List { + doDocs(field.Names, []*ast.CommentGroup{field.Doc}) + } + return false + default: + return false + } + if len(names) == 0 || len(docs) == 0 { + return ret + } + doDocs(names, docs) + + docs = docs[:0] + names = nil + return ret + } + ast.Inspect(f, fn) + } + + out := DeprecatedResult{ + Objects: map[types.Object]*IsDeprecated{}, + Packages: map[*types.Package]*IsDeprecated{}, + } + + for _, fact := range pass.AllObjectFacts() { + out.Objects[fact.Object] = fact.Fact.(*IsDeprecated) + } + for _, fact := range pass.AllPackageFacts() { + out.Packages[fact.Package] = fact.Fact.(*IsDeprecated) + } + + return out, nil +} diff --git a/vendor/honnef.co/go/tools/analysis/facts/directives.go b/vendor/honnef.co/go/tools/analysis/facts/directives.go new file mode 100644 index 000000000..800fce2e0 --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/facts/directives.go @@ -0,0 +1,20 @@ +package facts + +import ( + "reflect" + + "golang.org/x/tools/go/analysis" + "honnef.co/go/tools/analysis/lint" +) + +func directives(pass *analysis.Pass) (interface{}, error) { + return lint.ParseDirectives(pass.Files, pass.Fset), nil +} + +var Directives = &analysis.Analyzer{ + Name: "directives", + Doc: "extracts linter directives", + Run: directives, + RunDespiteErrors: true, + ResultType: reflect.TypeOf([]lint.Directive{}), +} diff --git a/vendor/honnef.co/go/tools/analysis/facts/generated.go b/vendor/honnef.co/go/tools/analysis/facts/generated.go new file mode 100644 index 000000000..058fd8922 --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/facts/generated.go @@ -0,0 +1,97 @@ +package facts + +import ( + "bufio" + "bytes" + "io" + "os" + "reflect" + "strings" + + "golang.org/x/tools/go/analysis" +) + +type Generator int + +// A list of known generators we can detect +const ( + Unknown Generator = iota + Goyacc + Cgo + Stringer + ProtocGenGo +) + +var ( + // used by cgo before Go 1.11 + oldCgo = []byte("// Created by cgo - DO NOT EDIT") + prefix = []byte("// Code generated ") + suffix = []byte(" DO NOT EDIT.") + nl = []byte("\n") + crnl = []byte("\r\n") +) + +func isGenerated(path string) (Generator, bool) { + f, err := os.Open(path) + if err != nil { + return 0, false + } + defer f.Close() + br := bufio.NewReader(f) + for { + s, err := br.ReadBytes('\n') + if err != nil && err != io.EOF { + return 0, false + } + s = bytes.TrimSuffix(s, crnl) + s = bytes.TrimSuffix(s, nl) + if bytes.HasPrefix(s, prefix) && bytes.HasSuffix(s, suffix) { + if len(s)-len(suffix) < len(prefix) { + return Unknown, true + } + + text := string(s[len(prefix) : len(s)-len(suffix)]) + switch text { + case "by goyacc.": + return Goyacc, true + case "by cmd/cgo;": + return Cgo, true + case "by protoc-gen-go.": + return ProtocGenGo, true + } + if strings.HasPrefix(text, `by "stringer `) { + return Stringer, true + } + if strings.HasPrefix(text, `by goyacc `) { + return Goyacc, true + } + + return Unknown, true + } + if bytes.Equal(s, oldCgo) { + return Cgo, true + } + if err == io.EOF { + break + } + } + return 0, false +} + +var Generated = &analysis.Analyzer{ + Name: "isgenerated", + Doc: "annotate file names that have been code generated", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[string]Generator{} + for _, f := range pass.Files { + path := pass.Fset.PositionFor(f.Pos(), false).Filename + g, ok := isGenerated(path) + if ok { + m[path] = g + } + } + return m, nil + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(map[string]Generator{}), +} diff --git a/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go b/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go new file mode 100644 index 000000000..4d49d1327 --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/facts/nilness/nilness.go @@ -0,0 +1,242 @@ +package nilness + +import ( + "fmt" + "go/token" + "go/types" + "reflect" + + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/types/typeutil" + "honnef.co/go/tools/internal/passes/buildir" + + "golang.org/x/tools/go/analysis" +) + +// neverReturnsNilFact denotes that a function's return value will never +// be nil (typed or untyped). The analysis errs on the side of false +// negatives. +type neverReturnsNilFact struct { + Rets []neverNilness +} + +func (*neverReturnsNilFact) AFact() {} +func (fact *neverReturnsNilFact) String() string { + return fmt.Sprintf("never returns nil: %v", fact.Rets) +} + +type Result struct { + m map[*types.Func][]neverNilness +} + +var Analysis = &analysis.Analyzer{ + Name: "nilness", + Doc: "Annotates return values that will never be nil (typed or untyped)", + Run: run, + Requires: []*analysis.Analyzer{buildir.Analyzer}, + FactTypes: []analysis.Fact{(*neverReturnsNilFact)(nil)}, + ResultType: reflect.TypeOf((*Result)(nil)), +} + +// MayReturnNil reports whether the ret's return value of fn might be +// a typed or untyped nil value. The value of ret is zero-based. When +// globalOnly is true, the only possible nil values are global +// variables. +// +// The analysis has false positives: MayReturnNil can incorrectly +// report true, but never incorrectly reports false. +func (r *Result) MayReturnNil(fn *types.Func, ret int) (yes bool, globalOnly bool) { + if !typeutil.IsPointerLike(fn.Type().(*types.Signature).Results().At(ret).Type()) { + return false, false + } + if len(r.m[fn]) == 0 { + return true, false + } + + v := r.m[fn][ret] + return v != neverNil, v == onlyGlobal +} + +func run(pass *analysis.Pass) (interface{}, error) { + seen := map[*ir.Function]struct{}{} + out := &Result{ + m: map[*types.Func][]neverNilness{}, + } + for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { + impl(pass, fn, seen) + } + + for _, fact := range pass.AllObjectFacts() { + out.m[fact.Object.(*types.Func)] = fact.Fact.(*neverReturnsNilFact).Rets + } + + return out, nil +} + +type neverNilness uint8 + +const ( + neverNil neverNilness = 1 + onlyGlobal neverNilness = 2 + nilly neverNilness = 3 +) + +func (n neverNilness) String() string { + switch n { + case neverNil: + return "never" + case onlyGlobal: + return "global" + case nilly: + return "nil" + default: + return "BUG" + } +} + +func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{}) []neverNilness { + if fn.Object() == nil { + // TODO(dh): support closures + return nil + } + if fact := new(neverReturnsNilFact); pass.ImportObjectFact(fn.Object(), fact) { + return fact.Rets + } + if fn.Pkg != pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg { + return nil + } + if fn.Blocks == nil { + return nil + } + if _, ok := seenFns[fn]; ok { + // break recursion + return nil + } + + seenFns[fn] = struct{}{} + + seen := map[ir.Value]struct{}{} + + var mightReturnNil func(v ir.Value) neverNilness + mightReturnNil = func(v ir.Value) neverNilness { + if _, ok := seen[v]; ok { + // break cycle + return nilly + } + if !typeutil.IsPointerLike(v.Type()) { + return neverNil + } + seen[v] = struct{}{} + switch v := v.(type) { + case *ir.MakeInterface: + return mightReturnNil(v.X) + case *ir.Convert: + return mightReturnNil(v.X) + case *ir.Slice: + return mightReturnNil(v.X) + case *ir.Phi: + ret := neverNil + for _, e := range v.Edges { + if n := mightReturnNil(e); n > ret { + ret = n + } + } + return ret + case *ir.Extract: + switch d := v.Tuple.(type) { + case *ir.Call: + if callee := d.Call.StaticCallee(); callee != nil { + ret := impl(pass, callee, seenFns) + if len(ret) == 0 { + return nilly + } + return ret[v.Index] + } else { + return nilly + } + case *ir.TypeAssert, *ir.Next, *ir.Select, *ir.MapLookup, *ir.TypeSwitch, *ir.Recv: + // we don't need to look at the Extract's index + // because we've already checked its type. + return nilly + default: + panic(fmt.Sprintf("internal error: unhandled type %T", d)) + } + case *ir.Call: + if callee := v.Call.StaticCallee(); callee != nil { + ret := impl(pass, callee, seenFns) + if len(ret) == 0 { + return nilly + } + return ret[0] + } else { + return nilly + } + case *ir.BinOp, *ir.UnOp, *ir.Alloc, *ir.FieldAddr, *ir.IndexAddr, *ir.Global, *ir.MakeSlice, *ir.MakeClosure, *ir.Function, *ir.MakeMap, *ir.MakeChan: + return neverNil + case *ir.Sigma: + iff, ok := v.From.Control().(*ir.If) + if !ok { + return nilly + } + binop, ok := iff.Cond.(*ir.BinOp) + if !ok { + return nilly + } + isNil := func(v ir.Value) bool { + k, ok := v.(*ir.Const) + if !ok { + return false + } + return k.Value == nil + } + if binop.X == v.X && isNil(binop.Y) || binop.Y == v.X && isNil(binop.X) { + op := binop.Op + if v.From.Succs[0] != v.Block() { + // we're in the false branch, negate op + switch op { + case token.EQL: + op = token.NEQ + case token.NEQ: + op = token.EQL + default: + panic(fmt.Sprintf("internal error: unhandled token %v", op)) + } + } + switch op { + case token.EQL: + return nilly + case token.NEQ: + return neverNil + default: + panic(fmt.Sprintf("internal error: unhandled token %v", op)) + } + } + return nilly + case *ir.ChangeType: + return mightReturnNil(v.X) + case *ir.Load: + if _, ok := v.X.(*ir.Global); ok { + return onlyGlobal + } + return nilly + case *ir.TypeAssert, *ir.ChangeInterface, *ir.Field, *ir.Const, *ir.Index, *ir.MapLookup, *ir.Parameter, *ir.Recv, *ir.TypeSwitch: + return nilly + default: + panic(fmt.Sprintf("internal error: unhandled type %T", v)) + } + } + ret := fn.Exit.Control().(*ir.Return) + out := make([]neverNilness, len(ret.Results)) + export := false + for i, v := range ret.Results { + v := mightReturnNil(v) + out[i] = v + if v != nilly && typeutil.IsPointerLike(fn.Signature.Results().At(i).Type()) { + export = true + } + } + if export { + pass.ExportObjectFact(fn.Object(), &neverReturnsNilFact{out}) + } + return out +} diff --git a/vendor/honnef.co/go/tools/analysis/facts/purity.go b/vendor/honnef.co/go/tools/analysis/facts/purity.go new file mode 100644 index 000000000..582b6209e --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/facts/purity.go @@ -0,0 +1,178 @@ +package facts + +import ( + "go/types" + "reflect" + + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "honnef.co/go/tools/internal/passes/buildir" + + "golang.org/x/tools/go/analysis" +) + +type IsPure struct{} + +func (*IsPure) AFact() {} +func (d *IsPure) String() string { return "is pure" } + +type PurityResult map[*types.Func]*IsPure + +var Purity = &analysis.Analyzer{ + Name: "fact_purity", + Doc: "Mark pure functions", + Run: purity, + Requires: []*analysis.Analyzer{buildir.Analyzer}, + FactTypes: []analysis.Fact{(*IsPure)(nil)}, + ResultType: reflect.TypeOf(PurityResult{}), +} + +var pureStdlib = map[string]struct{}{ + "errors.New": {}, + "fmt.Errorf": {}, + "fmt.Sprintf": {}, + "fmt.Sprint": {}, + "sort.Reverse": {}, + "strings.Map": {}, + "strings.Repeat": {}, + "strings.Replace": {}, + "strings.Title": {}, + "strings.ToLower": {}, + "strings.ToLowerSpecial": {}, + "strings.ToTitle": {}, + "strings.ToTitleSpecial": {}, + "strings.ToUpper": {}, + "strings.ToUpperSpecial": {}, + "strings.Trim": {}, + "strings.TrimFunc": {}, + "strings.TrimLeft": {}, + "strings.TrimLeftFunc": {}, + "strings.TrimPrefix": {}, + "strings.TrimRight": {}, + "strings.TrimRightFunc": {}, + "strings.TrimSpace": {}, + "strings.TrimSuffix": {}, + "(*net/http.Request).WithContext": {}, +} + +func purity(pass *analysis.Pass) (interface{}, error) { + seen := map[*ir.Function]struct{}{} + irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg + var check func(fn *ir.Function) (ret bool) + check = func(fn *ir.Function) (ret bool) { + if fn.Object() == nil { + // TODO(dh): support closures + return false + } + if pass.ImportObjectFact(fn.Object(), new(IsPure)) { + return true + } + if fn.Pkg != irpkg { + // Function is in another package but wasn't marked as + // pure, ergo it isn't pure + return false + } + // Break recursion + if _, ok := seen[fn]; ok { + return false + } + + seen[fn] = struct{}{} + defer func() { + if ret { + pass.ExportObjectFact(fn.Object(), &IsPure{}) + } + }() + + if irutil.IsStub(fn) { + return false + } + + if _, ok := pureStdlib[fn.Object().(*types.Func).FullName()]; ok { + return true + } + + if fn.Signature.Results().Len() == 0 { + // A function with no return values is empty or is doing some + // work we cannot see (for example because of build tags); + // don't consider it pure. + return false + } + + for _, param := range fn.Params { + // TODO(dh): this may not be strictly correct. pure code + // can, to an extent, operate on non-basic types. + if _, ok := param.Type().Underlying().(*types.Basic); !ok { + return false + } + } + + // Don't consider external functions pure. + if fn.Blocks == nil { + return false + } + checkCall := func(common *ir.CallCommon) bool { + if common.IsInvoke() { + return false + } + builtin, ok := common.Value.(*ir.Builtin) + if !ok { + if common.StaticCallee() != fn { + if common.StaticCallee() == nil { + return false + } + if !check(common.StaticCallee()) { + return false + } + } + } else { + switch builtin.Name() { + case "len", "cap": + default: + return false + } + } + return true + } + for _, b := range fn.Blocks { + for _, ins := range b.Instrs { + switch ins := ins.(type) { + case *ir.Call: + if !checkCall(ins.Common()) { + return false + } + case *ir.Defer: + if !checkCall(&ins.Call) { + return false + } + case *ir.Select: + return false + case *ir.Send: + return false + case *ir.Go: + return false + case *ir.Panic: + return false + case *ir.Store: + return false + case *ir.FieldAddr: + return false + case *ir.Alloc: + return false + case *ir.Load: + return false + } + } + } + return true + } + for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { + check(fn) + } + + out := PurityResult{} + for _, fact := range pass.AllObjectFacts() { + out[fact.Object.(*types.Func)] = fact.Fact.(*IsPure) + } + return out, nil +} diff --git a/vendor/honnef.co/go/tools/analysis/facts/token.go b/vendor/honnef.co/go/tools/analysis/facts/token.go new file mode 100644 index 000000000..26e76ff73 --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/facts/token.go @@ -0,0 +1,24 @@ +package facts + +import ( + "go/ast" + "go/token" + "reflect" + + "golang.org/x/tools/go/analysis" +) + +var TokenFile = &analysis.Analyzer{ + Name: "tokenfileanalyzer", + Doc: "creates a mapping of *token.File to *ast.File", + Run: func(pass *analysis.Pass) (interface{}, error) { + m := map[*token.File]*ast.File{} + for _, af := range pass.Files { + tf := pass.Fset.File(af.Pos()) + m[tf] = af + } + return m, nil + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf(map[*token.File]*ast.File{}), +} diff --git a/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go b/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go new file mode 100644 index 000000000..5e5644711 --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/facts/typedness/typedness.go @@ -0,0 +1,242 @@ +package typedness + +import ( + "fmt" + "go/token" + "go/types" + "reflect" + + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "honnef.co/go/tools/internal/passes/buildir" + + "golang.org/x/tools/go/analysis" +) + +// alwaysTypedFact denotes that a function's return value will never +// be untyped nil. The analysis errs on the side of false negatives. +type alwaysTypedFact struct { + Rets uint8 +} + +func (*alwaysTypedFact) AFact() {} +func (fact *alwaysTypedFact) String() string { + return fmt.Sprintf("always typed: %08b", fact.Rets) +} + +type Result struct { + m map[*types.Func]uint8 +} + +var Analysis = &analysis.Analyzer{ + Name: "typedness", + Doc: "Annotates return values that are always typed values", + Run: run, + Requires: []*analysis.Analyzer{buildir.Analyzer}, + FactTypes: []analysis.Fact{(*alwaysTypedFact)(nil)}, + ResultType: reflect.TypeOf((*Result)(nil)), +} + +// MustReturnTyped reports whether the ret's return value of fn must +// be a typed value, i.e. an interface value containing a concrete +// type or trivially a concrete type. The value of ret is zero-based. +// +// The analysis has false negatives: MustReturnTyped may incorrectly +// report false, but never incorrectly reports true. +func (r *Result) MustReturnTyped(fn *types.Func, ret int) bool { + if _, ok := fn.Type().(*types.Signature).Results().At(ret).Type().Underlying().(*types.Interface); !ok { + return true + } + return (r.m[fn] & (1 << ret)) != 0 +} + +func run(pass *analysis.Pass) (interface{}, error) { + seen := map[*ir.Function]struct{}{} + out := &Result{ + m: map[*types.Func]uint8{}, + } + for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { + impl(pass, fn, seen) + } + + for _, fact := range pass.AllObjectFacts() { + out.m[fact.Object.(*types.Func)] = fact.Fact.(*alwaysTypedFact).Rets + } + + return out, nil +} + +func impl(pass *analysis.Pass, fn *ir.Function, seenFns map[*ir.Function]struct{}) (out uint8) { + if fn.Signature.Results().Len() > 8 { + return 0 + } + if fn.Object() == nil { + // TODO(dh): support closures + return 0 + } + if fact := new(alwaysTypedFact); pass.ImportObjectFact(fn.Object(), fact) { + return fact.Rets + } + if fn.Pkg != pass.ResultOf[buildir.Analyzer].(*buildir.IR).Pkg { + return 0 + } + if fn.Blocks == nil { + return 0 + } + if irutil.IsStub(fn) { + return 0 + } + if _, ok := seenFns[fn]; ok { + // break recursion + return 0 + } + + seenFns[fn] = struct{}{} + defer func() { + for i := 0; i < fn.Signature.Results().Len(); i++ { + if _, ok := fn.Signature.Results().At(i).Type().Underlying().(*types.Interface); !ok { + // we don't need facts to know that non-interface + // types can't be untyped nil. zeroing out those bits + // may result in all bits being zero, in which case we + // don't have to save any fact. + out &= ^(1 << i) + } + } + if out > 0 { + pass.ExportObjectFact(fn.Object(), &alwaysTypedFact{out}) + } + }() + + isUntypedNil := func(v ir.Value) bool { + k, ok := v.(*ir.Const) + if !ok { + return false + } + if _, ok := k.Type().Underlying().(*types.Interface); !ok { + return false + } + return k.Value == nil + } + + var do func(v ir.Value, seen map[ir.Value]struct{}) bool + do = func(v ir.Value, seen map[ir.Value]struct{}) bool { + if _, ok := seen[v]; ok { + // break cycle + return false + } + seen[v] = struct{}{} + switch v := v.(type) { + case *ir.Const: + // can't be a typed nil, because then we'd be returning the + // result of MakeInterface. + return false + case *ir.ChangeInterface: + return do(v.X, seen) + case *ir.Extract: + call, ok := v.Tuple.(*ir.Call) + if !ok { + // We only care about extracts of function results. For + // everything else (e.g. channel receives and map + // lookups), we can either not deduce any information, or + // will see a MakeInterface. + return false + } + if callee := call.Call.StaticCallee(); callee != nil { + return impl(pass, callee, seenFns)&(1<interface conversions, which + // don't tell us anything about the nilness. + return false + case *ir.MapLookup, *ir.Index, *ir.Recv, *ir.Parameter, *ir.Load, *ir.Field: + // All other instructions that tell us nothing about the + // typedness of interface values. + return false + default: + panic(fmt.Sprintf("internal error: unhandled type %T", v)) + } + } + + ret := fn.Exit.Control().(*ir.Return) + for i, v := range ret.Results { + if _, ok := fn.Signature.Results().At(i).Type().Underlying().(*types.Interface); ok { + if do(v, map[ir.Value]struct{}{}) { + out |= 1 << i + } + } + } + return out +} diff --git a/vendor/honnef.co/go/tools/analysis/lint/lint.go b/vendor/honnef.co/go/tools/analysis/lint/lint.go new file mode 100644 index 000000000..fdc4cb5dc --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/lint/lint.go @@ -0,0 +1,198 @@ +// Package lint provides abstractions on top of go/analysis. +package lint + +import ( + "flag" + "fmt" + "go/ast" + "go/build" + "go/token" + "strconv" + "strings" + + "golang.org/x/tools/go/analysis" +) + +type Analyzer struct { + // The analyzer's documentation. Unlike go/analysis.Analyzer.Doc, + // this field is structured, providing access to severity, options + // etc. + Doc *Documentation + Analyzer *analysis.Analyzer +} + +func (a *Analyzer) initialize() { + a.Analyzer.Doc = a.Doc.String() + if a.Analyzer.Flags.Usage == nil { + fs := flag.NewFlagSet("", flag.PanicOnError) + fs.Var(newVersionFlag(), "go", "Target Go version") + a.Analyzer.Flags = *fs + } +} + +func InitializeAnalyzers(docs map[string]*Documentation, analyzers map[string]*analysis.Analyzer) []*Analyzer { + out := make([]*Analyzer, 0, len(analyzers)) + for k, v := range analyzers { + v.Name = k + a := &Analyzer{ + Doc: docs[k], + Analyzer: v, + } + a.initialize() + out = append(out, a) + } + return out +} + +type Severity int + +const ( + SeverityNone Severity = iota + SeverityError + SeverityDeprecated + SeverityWarning + SeverityInfo + SeverityHint +) + +type Documentation struct { + Title string + Text string + Since string + NonDefault bool + Options []string + Severity Severity +} + +func Markdownify(m map[string]*Documentation) map[string]*Documentation { + for _, v := range m { + v.Title = toMarkdown(v.Title) + v.Text = toMarkdown(v.Text) + } + return m +} + +func toMarkdown(s string) string { + return strings.ReplaceAll(s, `\'`, "`") +} + +func (doc *Documentation) String() string { + if doc == nil { + return "Error: No documentation." + } + + b := &strings.Builder{} + fmt.Fprintf(b, "%s\n\n", doc.Title) + if doc.Text != "" { + fmt.Fprintf(b, "%s\n\n", doc.Text) + } + fmt.Fprint(b, "Available since\n ") + if doc.Since == "" { + fmt.Fprint(b, "unreleased") + } else { + fmt.Fprintf(b, "%s", doc.Since) + } + if doc.NonDefault { + fmt.Fprint(b, ", non-default") + } + fmt.Fprint(b, "\n") + if len(doc.Options) > 0 { + fmt.Fprintf(b, "\nOptions\n") + for _, opt := range doc.Options { + fmt.Fprintf(b, " %s", opt) + } + fmt.Fprint(b, "\n") + } + return b.String() +} + +func newVersionFlag() flag.Getter { + tags := build.Default.ReleaseTags + v := tags[len(tags)-1][2:] + version := new(VersionFlag) + if err := version.Set(v); err != nil { + panic(fmt.Sprintf("internal error: %s", err)) + } + return version +} + +type VersionFlag int + +func (v *VersionFlag) String() string { + return fmt.Sprintf("1.%d", *v) +} + +func (v *VersionFlag) Set(s string) error { + if len(s) < 3 { + return fmt.Errorf("invalid Go version: %q", s) + } + if s[0] != '1' { + return fmt.Errorf("invalid Go version: %q", s) + } + if s[1] != '.' { + return fmt.Errorf("invalid Go version: %q", s) + } + i, err := strconv.Atoi(s[2:]) + if err != nil { + return fmt.Errorf("invalid Go version: %q", s) + } + *v = VersionFlag(i) + return nil +} + +func (v *VersionFlag) Get() interface{} { + return int(*v) +} + +// ExhaustiveTypeSwitch panics when called. It can be used to ensure +// that type switches are exhaustive. +func ExhaustiveTypeSwitch(v interface{}) { + panic(fmt.Sprintf("internal error: unhandled case %T", v)) +} + +// A directive is a comment of the form '//lint: +// [arguments...]'. It represents instructions to the static analysis +// tool. +type Directive struct { + Command string + Arguments []string + Directive *ast.Comment + Node ast.Node +} + +func parseDirective(s string) (cmd string, args []string) { + if !strings.HasPrefix(s, "//lint:") { + return "", nil + } + s = strings.TrimPrefix(s, "//lint:") + fields := strings.Split(s, " ") + return fields[0], fields[1:] +} + +func ParseDirectives(files []*ast.File, fset *token.FileSet) []Directive { + var dirs []Directive + for _, f := range files { + // OPT(dh): in our old code, we skip all the comment map work if we + // couldn't find any directives, benchmark if that's actually + // worth doing + cm := ast.NewCommentMap(fset, f, f.Comments) + for node, cgs := range cm { + for _, cg := range cgs { + for _, c := range cg.List { + if !strings.HasPrefix(c.Text, "//lint:") { + continue + } + cmd, args := parseDirective(c.Text) + d := Directive{ + Command: cmd, + Arguments: args, + Directive: c, + Node: node, + } + dirs = append(dirs, d) + } + } + } + } + return dirs +} diff --git a/vendor/honnef.co/go/tools/analysis/report/report.go b/vendor/honnef.co/go/tools/analysis/report/report.go new file mode 100644 index 000000000..f7cd64ab2 --- /dev/null +++ b/vendor/honnef.co/go/tools/analysis/report/report.go @@ -0,0 +1,247 @@ +package report + +import ( + "bytes" + "fmt" + "go/ast" + "go/format" + "go/token" + "path/filepath" + "strconv" + "strings" + + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/go/ast/astutil" + + "golang.org/x/tools/go/analysis" +) + +type Options struct { + ShortRange bool + FilterGenerated bool + Fixes []analysis.SuggestedFix + Related []analysis.RelatedInformation +} + +type Option func(*Options) + +func ShortRange() Option { + return func(opts *Options) { + opts.ShortRange = true + } +} + +func FilterGenerated() Option { + return func(opts *Options) { + opts.FilterGenerated = true + } +} + +func Fixes(fixes ...analysis.SuggestedFix) Option { + return func(opts *Options) { + opts.Fixes = append(opts.Fixes, fixes...) + } +} + +func Related(node Positioner, message string) Option { + return func(opts *Options) { + pos, end, ok := getRange(node, opts.ShortRange) + if !ok { + return + } + r := analysis.RelatedInformation{ + Pos: pos, + End: end, + Message: message, + } + opts.Related = append(opts.Related, r) + } +} + +type Positioner interface { + Pos() token.Pos +} + +type fullPositioner interface { + Pos() token.Pos + End() token.Pos +} + +type sourcer interface { + Source() ast.Node +} + +// shortRange returns the position and end of the main component of an +// AST node. For nodes that have no body, the short range is identical +// to the node's Pos and End. For nodes that do have a body, the short +// range excludes the body. +func shortRange(node ast.Node) (pos, end token.Pos) { + switch node := node.(type) { + case *ast.File: + return node.Pos(), node.Name.End() + case *ast.CaseClause: + return node.Pos(), node.Colon + 1 + case *ast.CommClause: + return node.Pos(), node.Colon + 1 + case *ast.DeferStmt: + return node.Pos(), node.Defer + token.Pos(len("defer")) + case *ast.ExprStmt: + return shortRange(node.X) + case *ast.ForStmt: + if node.Post != nil { + return node.For, node.Post.End() + } else if node.Cond != nil { + return node.For, node.Cond.End() + } else if node.Init != nil { + // +1 to catch the semicolon, for gofmt'ed code + return node.Pos(), node.Init.End() + 1 + } else { + return node.Pos(), node.For + token.Pos(len("for")) + } + case *ast.FuncDecl: + return node.Pos(), node.Type.End() + case *ast.FuncLit: + return node.Pos(), node.Type.End() + case *ast.GoStmt: + if _, ok := astutil.Unparen(node.Call.Fun).(*ast.FuncLit); ok { + return node.Pos(), node.Go + token.Pos(len("go")) + } else { + return node.Pos(), node.End() + } + case *ast.IfStmt: + return node.Pos(), node.Cond.End() + case *ast.RangeStmt: + return node.Pos(), node.X.End() + case *ast.SelectStmt: + return node.Pos(), node.Pos() + token.Pos(len("select")) + case *ast.SwitchStmt: + if node.Tag != nil { + return node.Pos(), node.Tag.End() + } else if node.Init != nil { + // +1 to catch the semicolon, for gofmt'ed code + return node.Pos(), node.Init.End() + 1 + } else { + return node.Pos(), node.Pos() + token.Pos(len("switch")) + } + case *ast.TypeSwitchStmt: + return node.Pos(), node.Assign.End() + default: + return node.Pos(), node.End() + } +} + +func HasRange(node Positioner) bool { + // we don't know if getRange will be called with shortRange set to + // true, so make sure that both work. + _, _, ok := getRange(node, false) + if !ok { + return false + } + _, _, ok = getRange(node, true) + return ok +} + +func getRange(node Positioner, short bool) (pos, end token.Pos, ok bool) { + switch n := node.(type) { + case sourcer: + s := n.Source() + if s == nil { + return 0, 0, false + } + if short { + p, e := shortRange(s) + return p, e, true + } + return s.Pos(), s.End(), true + case fullPositioner: + if short { + p, e := shortRange(n) + return p, e, true + } + return n.Pos(), n.End(), true + default: + return n.Pos(), token.NoPos, true + } +} + +func Report(pass *analysis.Pass, node Positioner, message string, opts ...Option) { + cfg := &Options{} + for _, opt := range opts { + opt(cfg) + } + + file := DisplayPosition(pass.Fset, node.Pos()).Filename + if cfg.FilterGenerated { + m := pass.ResultOf[facts.Generated].(map[string]facts.Generator) + if _, ok := m[file]; ok { + return + } + } + + pos, end, ok := getRange(node, cfg.ShortRange) + if !ok { + panic(fmt.Sprintf("no valid position for reporting node %v", node)) + } + d := analysis.Diagnostic{ + Pos: pos, + End: end, + Message: message, + SuggestedFixes: cfg.Fixes, + Related: cfg.Related, + } + pass.Report(d) +} + +func Render(pass *analysis.Pass, x interface{}) string { + var buf bytes.Buffer + if err := format.Node(&buf, pass.Fset, x); err != nil { + panic(err) + } + return buf.String() +} + +func RenderArgs(pass *analysis.Pass, args []ast.Expr) string { + var ss []string + for _, arg := range args { + ss = append(ss, Render(pass, arg)) + } + return strings.Join(ss, ", ") +} + +func DisplayPosition(fset *token.FileSet, p token.Pos) token.Position { + if p == token.NoPos { + return token.Position{} + } + + // Only use the adjusted position if it points to another Go file. + // This means we'll point to the original file for cgo files, but + // we won't point to a YACC grammar file. + pos := fset.PositionFor(p, false) + adjPos := fset.PositionFor(p, true) + + if filepath.Ext(adjPos.Filename) == ".go" { + return adjPos + } + + return pos +} + +func Ordinal(n int) string { + suffix := "th" + if n < 10 || n > 20 { + switch n % 10 { + case 0: + suffix = "th" + case 1: + suffix = "st" + case 2: + suffix = "nd" + case 3: + suffix = "rd" + default: + suffix = "th" + } + } + + return strconv.Itoa(n) + suffix +} diff --git a/vendor/honnef.co/go/tools/config/config.go b/vendor/honnef.co/go/tools/config/config.go new file mode 100644 index 000000000..14de76f4c --- /dev/null +++ b/vendor/honnef.co/go/tools/config/config.go @@ -0,0 +1,245 @@ +package config + +import ( + "bytes" + "fmt" + "go/ast" + "go/token" + "os" + "path/filepath" + "reflect" + "strings" + + "github.com/BurntSushi/toml" + "golang.org/x/tools/go/analysis" +) + +// Dir looks at a list of absolute file names, which should make up a +// single package, and returns the path of the directory that may +// contain a staticcheck.conf file. It returns the empty string if no +// such directory could be determined, for example because all files +// were located in Go's build cache. +func Dir(files []string) string { + if len(files) == 0 { + return "" + } + cache, err := os.UserCacheDir() + if err != nil { + cache = "" + } + var path string + for _, p := range files { + // FIXME(dh): using strings.HasPrefix isn't technically + // correct, but it should be good enough for now. + if cache != "" && strings.HasPrefix(p, cache) { + // File in the build cache of the standard Go build system + continue + } + path = p + break + } + + if path == "" { + // The package only consists of generated files. + return "" + } + + dir := filepath.Dir(path) + return dir +} + +func dirAST(files []*ast.File, fset *token.FileSet) string { + names := make([]string, len(files)) + for i, f := range files { + names[i] = fset.PositionFor(f.Pos(), true).Filename + } + return Dir(names) +} + +var Analyzer = &analysis.Analyzer{ + Name: "config", + Doc: "loads configuration for the current package tree", + Run: func(pass *analysis.Pass) (interface{}, error) { + dir := dirAST(pass.Files, pass.Fset) + if dir == "" { + cfg := DefaultConfig + return &cfg, nil + } + cfg, err := Load(dir) + if err != nil { + return nil, fmt.Errorf("error loading staticcheck.conf: %s", err) + } + return &cfg, nil + }, + RunDespiteErrors: true, + ResultType: reflect.TypeOf((*Config)(nil)), +} + +func For(pass *analysis.Pass) *Config { + return pass.ResultOf[Analyzer].(*Config) +} + +func mergeLists(a, b []string) []string { + out := make([]string, 0, len(a)+len(b)) + for _, el := range b { + if el == "inherit" { + out = append(out, a...) + } else { + out = append(out, el) + } + } + + return out +} + +func normalizeList(list []string) []string { + if len(list) > 1 { + nlist := make([]string, 0, len(list)) + nlist = append(nlist, list[0]) + for i, el := range list[1:] { + if el != list[i] { + nlist = append(nlist, el) + } + } + list = nlist + } + + for _, el := range list { + if el == "inherit" { + // This should never happen, because the default config + // should not use "inherit" + panic(`unresolved "inherit"`) + } + } + + return list +} + +func (cfg Config) Merge(ocfg Config) Config { + if ocfg.Checks != nil { + cfg.Checks = mergeLists(cfg.Checks, ocfg.Checks) + } + if ocfg.Initialisms != nil { + cfg.Initialisms = mergeLists(cfg.Initialisms, ocfg.Initialisms) + } + if ocfg.DotImportWhitelist != nil { + cfg.DotImportWhitelist = mergeLists(cfg.DotImportWhitelist, ocfg.DotImportWhitelist) + } + if ocfg.HTTPStatusCodeWhitelist != nil { + cfg.HTTPStatusCodeWhitelist = mergeLists(cfg.HTTPStatusCodeWhitelist, ocfg.HTTPStatusCodeWhitelist) + } + return cfg +} + +type Config struct { + // TODO(dh): this implementation makes it impossible for external + // clients to add their own checkers with configuration. At the + // moment, we don't really care about that; we don't encourage + // that people use this package. In the future, we may. The + // obvious solution would be using map[string]interface{}, but + // that's obviously subpar. + + Checks []string `toml:"checks"` + Initialisms []string `toml:"initialisms"` + DotImportWhitelist []string `toml:"dot_import_whitelist"` + HTTPStatusCodeWhitelist []string `toml:"http_status_code_whitelist"` +} + +func (c Config) String() string { + buf := &bytes.Buffer{} + + fmt.Fprintf(buf, "Checks: %#v\n", c.Checks) + fmt.Fprintf(buf, "Initialisms: %#v\n", c.Initialisms) + fmt.Fprintf(buf, "DotImportWhitelist: %#v\n", c.DotImportWhitelist) + fmt.Fprintf(buf, "HTTPStatusCodeWhitelist: %#v", c.HTTPStatusCodeWhitelist) + + return buf.String() +} + +var DefaultConfig = Config{ + Checks: []string{"all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022", "-ST1023"}, + Initialisms: []string{ + "ACL", "API", "ASCII", "CPU", "CSS", "DNS", + "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", + "IP", "JSON", "QPS", "RAM", "RPC", "SLA", + "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", + "UDP", "UI", "GID", "UID", "UUID", "URI", + "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", + "XSS", "SIP", "RTP", "AMQP", "DB", "TS", + }, + DotImportWhitelist: []string{}, + HTTPStatusCodeWhitelist: []string{"200", "400", "404", "500"}, +} + +const ConfigName = "staticcheck.conf" + +func parseConfigs(dir string) ([]Config, error) { + var out []Config + + // TODO(dh): consider stopping at the GOPATH/module boundary + for dir != "" { + f, err := os.Open(filepath.Join(dir, ConfigName)) + if os.IsNotExist(err) { + ndir := filepath.Dir(dir) + if ndir == dir { + break + } + dir = ndir + continue + } + if err != nil { + return nil, err + } + var cfg Config + _, err = toml.DecodeReader(f, &cfg) + f.Close() + if err != nil { + return nil, err + } + out = append(out, cfg) + ndir := filepath.Dir(dir) + if ndir == dir { + break + } + dir = ndir + } + out = append(out, DefaultConfig) + if len(out) < 2 { + return out, nil + } + for i := 0; i < len(out)/2; i++ { + out[i], out[len(out)-1-i] = out[len(out)-1-i], out[i] + } + return out, nil +} + +func mergeConfigs(confs []Config) Config { + if len(confs) == 0 { + // This shouldn't happen because we always have at least a + // default config. + panic("trying to merge zero configs") + } + if len(confs) == 1 { + return confs[0] + } + conf := confs[0] + for _, oconf := range confs[1:] { + conf = conf.Merge(oconf) + } + return conf +} + +func Load(dir string) (Config, error) { + confs, err := parseConfigs(dir) + if err != nil { + return Config{}, err + } + conf := mergeConfigs(confs) + + conf.Checks = normalizeList(conf.Checks) + conf.Initialisms = normalizeList(conf.Initialisms) + conf.DotImportWhitelist = normalizeList(conf.DotImportWhitelist) + conf.HTTPStatusCodeWhitelist = normalizeList(conf.HTTPStatusCodeWhitelist) + + return conf, nil +} diff --git a/vendor/honnef.co/go/tools/config/example.conf b/vendor/honnef.co/go/tools/config/example.conf new file mode 100644 index 000000000..106da5bcb --- /dev/null +++ b/vendor/honnef.co/go/tools/config/example.conf @@ -0,0 +1,10 @@ +checks = ["all", "-ST1000", "-ST1003", "-ST1016", "-ST1020", "-ST1021", "-ST1022", "-ST1023"] +initialisms = ["ACL", "API", "ASCII", "CPU", "CSS", "DNS", + "EOF", "GUID", "HTML", "HTTP", "HTTPS", "ID", + "IP", "JSON", "QPS", "RAM", "RPC", "SLA", + "SMTP", "SQL", "SSH", "TCP", "TLS", "TTL", + "UDP", "UI", "GID", "UID", "UUID", "URI", + "URL", "UTF8", "VM", "XML", "XMPP", "XSRF", + "XSS", "SIP", "RTP", "AMQP", "DB", "TS"] +dot_import_whitelist = [] +http_status_code_whitelist = ["200", "400", "404", "500"] diff --git a/vendor/honnef.co/go/tools/go/ast/astutil/upstream.go b/vendor/honnef.co/go/tools/go/ast/astutil/upstream.go new file mode 100644 index 000000000..fc647c4d3 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ast/astutil/upstream.go @@ -0,0 +1,20 @@ +package astutil + +import ( + "go/ast" + "go/token" + _ "unsafe" + + "golang.org/x/tools/go/ast/astutil" +) + +type Cursor = astutil.Cursor +type ApplyFunc = astutil.ApplyFunc + +func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) { + return astutil.Apply(root, pre, post) +} + +func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) { + return astutil.PathEnclosingInterval(root, start, end) +} diff --git a/vendor/honnef.co/go/tools/go/ast/astutil/util.go b/vendor/honnef.co/go/tools/go/ast/astutil/util.go new file mode 100644 index 000000000..fac33780d --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ast/astutil/util.go @@ -0,0 +1,299 @@ +package astutil + +import ( + "fmt" + "go/ast" + "go/token" + "reflect" + "strings" +) + +func IsIdent(expr ast.Expr, ident string) bool { + id, ok := expr.(*ast.Ident) + return ok && id.Name == ident +} + +// isBlank returns whether id is the blank identifier "_". +// If id == nil, the answer is false. +func IsBlank(id ast.Expr) bool { + ident, _ := id.(*ast.Ident) + return ident != nil && ident.Name == "_" +} + +func IsIntLiteral(expr ast.Expr, literal string) bool { + lit, ok := expr.(*ast.BasicLit) + return ok && lit.Kind == token.INT && lit.Value == literal +} + +// Deprecated: use IsIntLiteral instead +func IsZero(expr ast.Expr) bool { + return IsIntLiteral(expr, "0") +} + +func Preamble(f *ast.File) string { + cutoff := f.Package + if f.Doc != nil { + cutoff = f.Doc.Pos() + } + var out []string + for _, cmt := range f.Comments { + if cmt.Pos() >= cutoff { + break + } + out = append(out, cmt.Text()) + } + return strings.Join(out, "\n") +} + +func GroupSpecs(fset *token.FileSet, specs []ast.Spec) [][]ast.Spec { + if len(specs) == 0 { + return nil + } + groups := make([][]ast.Spec, 1) + groups[0] = append(groups[0], specs[0]) + + for _, spec := range specs[1:] { + g := groups[len(groups)-1] + if fset.PositionFor(spec.Pos(), false).Line-1 != + fset.PositionFor(g[len(g)-1].End(), false).Line { + + groups = append(groups, nil) + } + + groups[len(groups)-1] = append(groups[len(groups)-1], spec) + } + + return groups +} + +// Unparen returns e with any enclosing parentheses stripped. +func Unparen(e ast.Expr) ast.Expr { + for { + p, ok := e.(*ast.ParenExpr) + if !ok { + return e + } + e = p.X + } +} + +func CopyExpr(node ast.Expr) ast.Expr { + switch node := node.(type) { + case *ast.BasicLit: + cp := *node + return &cp + case *ast.BinaryExpr: + cp := *node + cp.X = CopyExpr(cp.X) + cp.Y = CopyExpr(cp.Y) + return &cp + case *ast.CallExpr: + cp := *node + cp.Fun = CopyExpr(cp.Fun) + cp.Args = make([]ast.Expr, len(node.Args)) + for i, v := range node.Args { + cp.Args[i] = CopyExpr(v) + } + return &cp + case *ast.CompositeLit: + cp := *node + cp.Type = CopyExpr(cp.Type) + cp.Elts = make([]ast.Expr, len(node.Elts)) + for i, v := range node.Elts { + cp.Elts[i] = CopyExpr(v) + } + return &cp + case *ast.Ident: + cp := *node + return &cp + case *ast.IndexExpr: + cp := *node + cp.X = CopyExpr(cp.X) + cp.Index = CopyExpr(cp.Index) + return &cp + case *ast.KeyValueExpr: + cp := *node + cp.Key = CopyExpr(cp.Key) + cp.Value = CopyExpr(cp.Value) + return &cp + case *ast.ParenExpr: + cp := *node + cp.X = CopyExpr(cp.X) + return &cp + case *ast.SelectorExpr: + cp := *node + cp.X = CopyExpr(cp.X) + cp.Sel = CopyExpr(cp.Sel).(*ast.Ident) + return &cp + case *ast.SliceExpr: + cp := *node + cp.X = CopyExpr(cp.X) + cp.Low = CopyExpr(cp.Low) + cp.High = CopyExpr(cp.High) + cp.Max = CopyExpr(cp.Max) + return &cp + case *ast.StarExpr: + cp := *node + cp.X = CopyExpr(cp.X) + return &cp + case *ast.TypeAssertExpr: + cp := *node + cp.X = CopyExpr(cp.X) + cp.Type = CopyExpr(cp.Type) + return &cp + case *ast.UnaryExpr: + cp := *node + cp.X = CopyExpr(cp.X) + return &cp + case *ast.MapType: + cp := *node + cp.Key = CopyExpr(cp.Key) + cp.Value = CopyExpr(cp.Value) + return &cp + case *ast.ArrayType: + cp := *node + cp.Len = CopyExpr(cp.Len) + cp.Elt = CopyExpr(cp.Elt) + return &cp + case *ast.Ellipsis: + cp := *node + cp.Elt = CopyExpr(cp.Elt) + return &cp + case *ast.InterfaceType: + cp := *node + return &cp + case *ast.StructType: + cp := *node + return &cp + case *ast.FuncLit: + // TODO(dh): implement copying of function literals. + return nil + case *ast.ChanType: + cp := *node + cp.Value = CopyExpr(cp.Value) + return &cp + case nil: + return nil + default: + panic(fmt.Sprintf("unreachable: %T", node)) + } +} + +func Equal(a, b ast.Node) bool { + if a == b { + return true + } + if a == nil || b == nil { + return false + } + if reflect.TypeOf(a) != reflect.TypeOf(b) { + return false + } + + switch a := a.(type) { + case *ast.BasicLit: + b := b.(*ast.BasicLit) + return a.Kind == b.Kind && a.Value == b.Value + case *ast.BinaryExpr: + b := b.(*ast.BinaryExpr) + return Equal(a.X, b.X) && a.Op == b.Op && Equal(a.Y, b.Y) + case *ast.CallExpr: + b := b.(*ast.CallExpr) + if len(a.Args) != len(b.Args) { + return false + } + for i, arg := range a.Args { + if !Equal(arg, b.Args[i]) { + return false + } + } + return Equal(a.Fun, b.Fun) && + (a.Ellipsis == token.NoPos && b.Ellipsis == token.NoPos || a.Ellipsis != token.NoPos && b.Ellipsis != token.NoPos) + case *ast.CompositeLit: + b := b.(*ast.CompositeLit) + if len(a.Elts) != len(b.Elts) { + return false + } + for i, elt := range b.Elts { + if !Equal(elt, b.Elts[i]) { + return false + } + } + return Equal(a.Type, b.Type) && a.Incomplete == b.Incomplete + case *ast.Ident: + b := b.(*ast.Ident) + return a.Name == b.Name + case *ast.IndexExpr: + b := b.(*ast.IndexExpr) + return Equal(a.X, b.X) && Equal(a.Index, b.Index) + case *ast.KeyValueExpr: + b := b.(*ast.KeyValueExpr) + return Equal(a.Key, b.Key) && Equal(a.Value, b.Value) + case *ast.ParenExpr: + b := b.(*ast.ParenExpr) + return Equal(a.X, b.X) + case *ast.SelectorExpr: + b := b.(*ast.SelectorExpr) + return Equal(a.X, b.X) && Equal(a.Sel, b.Sel) + case *ast.SliceExpr: + b := b.(*ast.SliceExpr) + return Equal(a.X, b.X) && Equal(a.Low, b.Low) && Equal(a.High, b.High) && Equal(a.Max, b.Max) && a.Slice3 == b.Slice3 + case *ast.StarExpr: + b := b.(*ast.StarExpr) + return Equal(a.X, b.X) + case *ast.TypeAssertExpr: + b := b.(*ast.TypeAssertExpr) + return Equal(a.X, b.X) && Equal(a.Type, b.Type) + case *ast.UnaryExpr: + b := b.(*ast.UnaryExpr) + return a.Op == b.Op && Equal(a.X, b.X) + case *ast.MapType: + b := b.(*ast.MapType) + return Equal(a.Key, b.Key) && Equal(a.Value, b.Value) + case *ast.ArrayType: + b := b.(*ast.ArrayType) + return Equal(a.Len, b.Len) && Equal(a.Elt, b.Elt) + case *ast.Ellipsis: + b := b.(*ast.Ellipsis) + return Equal(a.Elt, b.Elt) + case *ast.InterfaceType: + b := b.(*ast.InterfaceType) + return a.Incomplete == b.Incomplete && Equal(a.Methods, b.Methods) + case *ast.StructType: + b := b.(*ast.StructType) + return a.Incomplete == b.Incomplete && Equal(a.Fields, b.Fields) + case *ast.FuncLit: + // TODO(dh): support function literals + return false + case *ast.ChanType: + b := b.(*ast.ChanType) + return a.Dir == b.Dir && (a.Arrow == token.NoPos && b.Arrow == token.NoPos || a.Arrow != token.NoPos && b.Arrow != token.NoPos) + case *ast.FieldList: + b := b.(*ast.FieldList) + if len(a.List) != len(b.List) { + return false + } + for i, fieldA := range a.List { + if !Equal(fieldA, b.List[i]) { + return false + } + } + return true + case *ast.Field: + b := b.(*ast.Field) + if len(a.Names) != len(b.Names) { + return false + } + for j, name := range a.Names { + if !Equal(name, b.Names[j]) { + return false + } + } + if !Equal(a.Type, b.Type) || !Equal(a.Tag, b.Tag) { + return false + } + return true + default: + panic(fmt.Sprintf("unreachable: %T", a)) + } +} diff --git a/vendor/honnef.co/go/tools/go/ir/LICENSE b/vendor/honnef.co/go/tools/go/ir/LICENSE new file mode 100644 index 000000000..aee48041e --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. +Copyright (c) 2016 Dominik Honnef. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/honnef.co/go/tools/go/ir/UPSTREAM b/vendor/honnef.co/go/tools/go/ir/UPSTREAM new file mode 100644 index 000000000..b12782503 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/UPSTREAM @@ -0,0 +1,9 @@ +This package started as a copy of golang.org/x/tools/go/ssa, imported from an unknown commit in 2016. +It has since been heavily modified to match our own needs in an IR. +The changes are too many to list here, and it is best to consider this package independent of go/ssa. + +Upstream changes still get applied when they address bugs in portions of code we have inherited. + +The last upstream commit we've looked at was: +640c1dea83015e5271a001c99370762fc63dc280 + diff --git a/vendor/honnef.co/go/tools/go/ir/blockopt.go b/vendor/honnef.co/go/tools/go/ir/blockopt.go new file mode 100644 index 000000000..d7a0e3567 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/blockopt.go @@ -0,0 +1,209 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// Simple block optimizations to simplify the control flow graph. + +// TODO(adonovan): opt: instead of creating several "unreachable" blocks +// per function in the Builder, reuse a single one (e.g. at Blocks[1]) +// to reduce garbage. + +import ( + "fmt" + "os" +) + +// If true, perform sanity checking and show progress at each +// successive iteration of optimizeBlocks. Very verbose. +const debugBlockOpt = false + +// markReachable sets Index=-1 for all blocks reachable from b. +func markReachable(b *BasicBlock) { + b.gaps = -1 + for _, succ := range b.Succs { + if succ.gaps == 0 { + markReachable(succ) + } + } +} + +// deleteUnreachableBlocks marks all reachable blocks of f and +// eliminates (nils) all others, including possibly cyclic subgraphs. +// +func deleteUnreachableBlocks(f *Function) { + const white, black = 0, -1 + // We borrow b.gaps temporarily as the mark bit. + for _, b := range f.Blocks { + b.gaps = white + } + markReachable(f.Blocks[0]) + // In SSI form, we need the exit to be reachable for correct + // post-dominance information. In original form, however, we + // cannot unconditionally mark it reachable because we won't + // be adding fake edges, and this breaks the calculation of + // dominance information. + markReachable(f.Exit) + for i, b := range f.Blocks { + if b.gaps == white { + for _, c := range b.Succs { + if c.gaps == black { + c.removePred(b) // delete white->black edge + } + } + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "unreachable", b) + } + f.Blocks[i] = nil // delete b + } + } + f.removeNilBlocks() +} + +// jumpThreading attempts to apply simple jump-threading to block b, +// in which a->b->c become a->c if b is just a Jump. +// The result is true if the optimization was applied. +// +func jumpThreading(f *Function, b *BasicBlock) bool { + if b.Index == 0 { + return false // don't apply to entry block + } + if b.Instrs == nil { + return false + } + for _, pred := range b.Preds { + switch pred.Control().(type) { + case *ConstantSwitch: + // don't optimize away the head blocks of switch statements + return false + } + } + if _, ok := b.Instrs[0].(*Jump); !ok { + return false // not just a jump + } + c := b.Succs[0] + if c == b { + return false // don't apply to degenerate jump-to-self. + } + if c.hasPhi() { + return false // not sound without more effort + } + for j, a := range b.Preds { + a.replaceSucc(b, c) + + // If a now has two edges to c, replace its degenerate If by Jump. + if len(a.Succs) == 2 && a.Succs[0] == c && a.Succs[1] == c { + jump := new(Jump) + jump.setBlock(a) + a.Instrs[len(a.Instrs)-1] = jump + a.Succs = a.Succs[:1] + c.removePred(b) + } else { + if j == 0 { + c.replacePred(b, a) + } else { + c.Preds = append(c.Preds, a) + } + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "jumpThreading", a, b, c) + } + } + f.Blocks[b.Index] = nil // delete b + return true +} + +// fuseBlocks attempts to apply the block fusion optimization to block +// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1. +// The result is true if the optimization was applied. +// +func fuseBlocks(f *Function, a *BasicBlock) bool { + if len(a.Succs) != 1 { + return false + } + if a.Succs[0] == f.Exit { + return false + } + b := a.Succs[0] + if len(b.Preds) != 1 { + return false + } + if _, ok := a.Instrs[len(a.Instrs)-1].(*Panic); ok { + // panics aren't simple jumps, they have side effects. + return false + } + + // Degenerate &&/|| ops may result in a straight-line CFG + // containing φ-nodes. (Ideally we'd replace such them with + // their sole operand but that requires Referrers, built later.) + if b.hasPhi() { + return false // not sound without further effort + } + + // Eliminate jump at end of A, then copy all of B across. + a.Instrs = append(a.Instrs[:len(a.Instrs)-1], b.Instrs...) + for _, instr := range b.Instrs { + instr.setBlock(a) + } + + // A inherits B's successors + a.Succs = append(a.succs2[:0], b.Succs...) + + // Fix up Preds links of all successors of B. + for _, c := range b.Succs { + c.replacePred(b, a) + } + + if debugBlockOpt { + fmt.Fprintln(os.Stderr, "fuseBlocks", a, b) + } + + f.Blocks[b.Index] = nil // delete b + return true +} + +// optimizeBlocks() performs some simple block optimizations on a +// completed function: dead block elimination, block fusion, jump +// threading. +// +func optimizeBlocks(f *Function) { + if debugBlockOpt { + f.WriteTo(os.Stderr) + mustSanityCheck(f, nil) + } + + deleteUnreachableBlocks(f) + + // Loop until no further progress. + changed := true + for changed { + changed = false + + if debugBlockOpt { + f.WriteTo(os.Stderr) + mustSanityCheck(f, nil) + } + + for _, b := range f.Blocks { + // f.Blocks will temporarily contain nils to indicate + // deleted blocks; we remove them at the end. + if b == nil { + continue + } + + // Fuse blocks. b->c becomes bc. + if fuseBlocks(f, b) { + changed = true + } + + // a->b->c becomes a->c if b contains only a Jump. + if jumpThreading(f, b) { + changed = true + continue // (b was disconnected) + } + } + } + f.removeNilBlocks() +} diff --git a/vendor/honnef.co/go/tools/go/ir/builder.go b/vendor/honnef.co/go/tools/go/ir/builder.go new file mode 100644 index 000000000..0cc1bedcd --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/builder.go @@ -0,0 +1,2479 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file implements the BUILD phase of IR construction. +// +// IR construction has two phases, CREATE and BUILD. In the CREATE phase +// (create.go), all packages are constructed and type-checked and +// definitions of all package members are created, method-sets are +// computed, and wrapper methods are synthesized. +// ir.Packages are created in arbitrary order. +// +// In the BUILD phase (builder.go), the builder traverses the AST of +// each Go source function and generates IR instructions for the +// function body. Initializer expressions for package-level variables +// are emitted to the package's init() function in the order specified +// by go/types.Info.InitOrder, then code for each function in the +// package is generated in lexical order. +// +// The builder's and Program's indices (maps) are populated and +// mutated during the CREATE phase, but during the BUILD phase they +// remain constant. The sole exception is Prog.methodSets and its +// related maps, which are protected by a dedicated mutex. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "os" +) + +type opaqueType struct { + types.Type + name string +} + +func (t *opaqueType) String() string { return t.name } + +var ( + varOk = newVar("ok", tBool) + varIndex = newVar("index", tInt) + + // Type constants. + tBool = types.Typ[types.Bool] + tByte = types.Typ[types.Byte] + tInt = types.Typ[types.Int] + tInvalid = types.Typ[types.Invalid] + tString = types.Typ[types.String] + tUntypedNil = types.Typ[types.UntypedNil] + tRangeIter = &opaqueType{nil, "iter"} // the type of all "range" iterators + tEface = types.NewInterfaceType(nil, nil).Complete() +) + +// builder holds state associated with the package currently being built. +// Its methods contain all the logic for AST-to-IR conversion. +type builder struct { + printFunc string + + blocksets [5]BlockSet +} + +// cond emits to fn code to evaluate boolean condition e and jump +// to t or f depending on its value, performing various simplifications. +// +// Postcondition: fn.currentBlock is nil. +// +func (b *builder) cond(fn *Function, e ast.Expr, t, f *BasicBlock) *If { + switch e := e.(type) { + case *ast.ParenExpr: + return b.cond(fn, e.X, t, f) + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND: + ltrue := fn.newBasicBlock("cond.true") + b.cond(fn, e.X, ltrue, f) + fn.currentBlock = ltrue + return b.cond(fn, e.Y, t, f) + + case token.LOR: + lfalse := fn.newBasicBlock("cond.false") + b.cond(fn, e.X, t, lfalse) + fn.currentBlock = lfalse + return b.cond(fn, e.Y, t, f) + } + + case *ast.UnaryExpr: + if e.Op == token.NOT { + return b.cond(fn, e.X, f, t) + } + } + + // A traditional compiler would simplify "if false" (etc) here + // but we do not, for better fidelity to the source code. + // + // The value of a constant condition may be platform-specific, + // and may cause blocks that are reachable in some configuration + // to be hidden from subsequent analyses such as bug-finding tools. + return emitIf(fn, b.expr(fn, e), t, f, e) +} + +// logicalBinop emits code to fn to evaluate e, a &&- or +// ||-expression whose reified boolean value is wanted. +// The value is returned. +// +func (b *builder) logicalBinop(fn *Function, e *ast.BinaryExpr) Value { + rhs := fn.newBasicBlock("binop.rhs") + done := fn.newBasicBlock("binop.done") + + // T(e) = T(e.X) = T(e.Y) after untyped constants have been + // eliminated. + // TODO(adonovan): not true; MyBool==MyBool yields UntypedBool. + t := fn.Pkg.typeOf(e) + + var short Value // value of the short-circuit path + switch e.Op { + case token.LAND: + b.cond(fn, e.X, rhs, done) + short = emitConst(fn, NewConst(constant.MakeBool(false), t)) + + case token.LOR: + b.cond(fn, e.X, done, rhs) + short = emitConst(fn, NewConst(constant.MakeBool(true), t)) + } + + // Is rhs unreachable? + if rhs.Preds == nil { + // Simplify false&&y to false, true||y to true. + fn.currentBlock = done + return short + } + + // Is done unreachable? + if done.Preds == nil { + // Simplify true&&y (or false||y) to y. + fn.currentBlock = rhs + return b.expr(fn, e.Y) + } + + // All edges from e.X to done carry the short-circuit value. + var edges []Value + for range done.Preds { + edges = append(edges, short) + } + + // The edge from e.Y to done carries the value of e.Y. + fn.currentBlock = rhs + edges = append(edges, b.expr(fn, e.Y)) + emitJump(fn, done, e) + fn.currentBlock = done + + phi := &Phi{Edges: edges} + phi.typ = t + return done.emit(phi, e) +} + +// exprN lowers a multi-result expression e to IR form, emitting code +// to fn and returning a single Value whose type is a *types.Tuple. +// The caller must access the components via Extract. +// +// Multi-result expressions include CallExprs in a multi-value +// assignment or return statement, and "value,ok" uses of +// TypeAssertExpr, IndexExpr (when X is a map), and Recv. +// +func (b *builder) exprN(fn *Function, e ast.Expr) Value { + typ := fn.Pkg.typeOf(e).(*types.Tuple) + switch e := e.(type) { + case *ast.ParenExpr: + return b.exprN(fn, e.X) + + case *ast.CallExpr: + // Currently, no built-in function nor type conversion + // has multiple results, so we can avoid some of the + // cases for single-valued CallExpr. + var c Call + b.setCall(fn, e, &c.Call) + c.typ = typ + return fn.emit(&c, e) + + case *ast.IndexExpr: + mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) + lookup := &MapLookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e), + CommaOk: true, + } + lookup.setType(typ) + return fn.emit(lookup, e) + + case *ast.TypeAssertExpr: + return emitTypeTest(fn, b.expr(fn, e.X), typ.At(0).Type(), e) + + case *ast.UnaryExpr: // must be receive <- + return emitRecv(fn, b.expr(fn, e.X), true, typ, e) + } + panic(fmt.Sprintf("exprN(%T) in %s", e, fn)) +} + +// builtin emits to fn IR instructions to implement a call to the +// built-in function obj with the specified arguments +// and return type. It returns the value defined by the result. +// +// The result is nil if no special handling was required; in this case +// the caller should treat this like an ordinary library function +// call. +// +func (b *builder) builtin(fn *Function, obj *types.Builtin, args []ast.Expr, typ types.Type, source ast.Node) Value { + switch obj.Name() { + case "make": + switch typ.Underlying().(type) { + case *types.Slice: + n := b.expr(fn, args[1]) + m := n + if len(args) == 3 { + m = b.expr(fn, args[2]) + } + if m, ok := m.(*Const); ok { + // treat make([]T, n, m) as new([m]T)[:n] + cap := m.Int64() + at := types.NewArray(typ.Underlying().(*types.Slice).Elem(), cap) + alloc := emitNew(fn, at, source) + v := &Slice{ + X: alloc, + High: n, + } + v.setType(typ) + return fn.emit(v, source) + } + v := &MakeSlice{ + Len: n, + Cap: m, + } + v.setType(typ) + return fn.emit(v, source) + + case *types.Map: + var res Value + if len(args) == 2 { + res = b.expr(fn, args[1]) + } + v := &MakeMap{Reserve: res} + v.setType(typ) + return fn.emit(v, source) + + case *types.Chan: + var sz Value = emitConst(fn, intConst(0)) + if len(args) == 2 { + sz = b.expr(fn, args[1]) + } + v := &MakeChan{Size: sz} + v.setType(typ) + return fn.emit(v, source) + } + + case "new": + alloc := emitNew(fn, deref(typ), source) + return alloc + + case "len", "cap": + // Special case: len or cap of an array or *array is + // based on the type, not the value which may be nil. + // We must still evaluate the value, though. (If it + // was side-effect free, the whole call would have + // been constant-folded.) + t := deref(fn.Pkg.typeOf(args[0])).Underlying() + if at, ok := t.(*types.Array); ok { + b.expr(fn, args[0]) // for effects only + return emitConst(fn, intConst(at.Len())) + } + // Otherwise treat as normal. + + case "panic": + fn.emit(&Panic{ + X: emitConv(fn, b.expr(fn, args[0]), tEface, source), + }, source) + addEdge(fn.currentBlock, fn.Exit) + fn.currentBlock = fn.newBasicBlock("unreachable") + return emitConst(fn, NewConst(constant.MakeBool(true), tBool)) // any non-nil Value will do + } + return nil // treat all others as a regular function call +} + +// addr lowers a single-result addressable expression e to IR form, +// emitting code to fn and returning the location (an lvalue) defined +// by the expression. +// +// If escaping is true, addr marks the base variable of the +// addressable expression e as being a potentially escaping pointer +// value. For example, in this code: +// +// a := A{ +// b: [1]B{B{c: 1}} +// } +// return &a.b[0].c +// +// the application of & causes a.b[0].c to have its address taken, +// which means that ultimately the local variable a must be +// heap-allocated. This is a simple but very conservative escape +// analysis. +// +// Operations forming potentially escaping pointers include: +// - &x, including when implicit in method call or composite literals. +// - a[:] iff a is an array (not *array) +// - references to variables in lexically enclosing functions. +// +func (b *builder) addr(fn *Function, e ast.Expr, escaping bool) lvalue { + switch e := e.(type) { + case *ast.Ident: + if isBlankIdent(e) { + return blank{} + } + obj := fn.Pkg.objectOf(e) + v := fn.Prog.packageLevelValue(obj) // var (address) + if v == nil { + v = fn.lookup(obj, escaping) + } + return &address{addr: v, expr: e} + + case *ast.CompositeLit: + t := deref(fn.Pkg.typeOf(e)) + var v *Alloc + if escaping { + v = emitNew(fn, t, e) + } else { + v = fn.addLocal(t, e) + } + var sb storebuf + b.compLit(fn, v, e, true, &sb) + sb.emit(fn) + return &address{addr: v, expr: e} + + case *ast.ParenExpr: + return b.addr(fn, e.X, escaping) + + case *ast.SelectorExpr: + sel, ok := fn.Pkg.info.Selections[e] + if !ok { + // qualified identifier + return b.addr(fn, e.Sel, escaping) + } + if sel.Kind() != types.FieldVal { + panic(sel) + } + wantAddr := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel, e) + last := len(sel.Index()) - 1 + return &address{ + addr: emitFieldSelection(fn, v, sel.Index()[last], true, e.Sel), + expr: e.Sel, + } + + case *ast.IndexExpr: + var x Value + var et types.Type + switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + x = b.addr(fn, e.X, escaping).address(fn) + et = types.NewPointer(t.Elem()) + case *types.Pointer: // *array + x = b.expr(fn, e.X) + et = types.NewPointer(t.Elem().Underlying().(*types.Array).Elem()) + case *types.Slice: + x = b.expr(fn, e.X) + et = types.NewPointer(t.Elem()) + case *types.Map: + return &element{ + m: b.expr(fn, e.X), + k: emitConv(fn, b.expr(fn, e.Index), t.Key(), e.Index), + t: t.Elem(), + } + default: + panic("unexpected container type in IndexExpr: " + t.String()) + } + v := &IndexAddr{ + X: x, + Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index), + } + v.setType(et) + return &address{addr: fn.emit(v, e), expr: e} + + case *ast.StarExpr: + return &address{addr: b.expr(fn, e.X), expr: e} + } + + panic(fmt.Sprintf("unexpected address expression: %T", e)) +} + +type store struct { + lhs lvalue + rhs Value + source ast.Node +} + +type storebuf struct{ stores []store } + +func (sb *storebuf) store(lhs lvalue, rhs Value, source ast.Node) { + sb.stores = append(sb.stores, store{lhs, rhs, source}) +} + +func (sb *storebuf) emit(fn *Function) { + for _, s := range sb.stores { + s.lhs.store(fn, s.rhs, s.source) + } +} + +// assign emits to fn code to initialize the lvalue loc with the value +// of expression e. If isZero is true, assign assumes that loc holds +// the zero value for its type. +// +// This is equivalent to loc.store(fn, b.expr(fn, e)), but may generate +// better code in some cases, e.g., for composite literals in an +// addressable location. +// +// If sb is not nil, assign generates code to evaluate expression e, but +// not to update loc. Instead, the necessary stores are appended to the +// storebuf sb so that they can be executed later. This allows correct +// in-place update of existing variables when the RHS is a composite +// literal that may reference parts of the LHS. +// +func (b *builder) assign(fn *Function, loc lvalue, e ast.Expr, isZero bool, sb *storebuf, source ast.Node) { + // Can we initialize it in place? + if e, ok := unparen(e).(*ast.CompositeLit); ok { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + if _, ok := loc.(blank); !ok { // avoid calling blank.typ() + if isPointer(loc.typ()) { + ptr := b.addr(fn, e, true).address(fn) + // copy address + if sb != nil { + sb.store(loc, ptr, source) + } else { + loc.store(fn, ptr, source) + } + return + } + } + + if _, ok := loc.(*address); ok { + if isInterface(loc.typ()) { + // e.g. var x interface{} = T{...} + // Can't in-place initialize an interface value. + // Fall back to copying. + } else { + // x = T{...} or x := T{...} + addr := loc.address(fn) + if sb != nil { + b.compLit(fn, addr, e, isZero, sb) + } else { + var sb storebuf + b.compLit(fn, addr, e, isZero, &sb) + sb.emit(fn) + } + + // Subtle: emit debug ref for aggregate types only; + // slice and map are handled by store ops in compLit. + switch loc.typ().Underlying().(type) { + case *types.Struct, *types.Array: + emitDebugRef(fn, e, addr, true) + } + + return + } + } + } + + // simple case: just copy + rhs := b.expr(fn, e) + if sb != nil { + sb.store(loc, rhs, source) + } else { + loc.store(fn, rhs, source) + } +} + +// expr lowers a single-result expression e to IR form, emitting code +// to fn and returning the Value defined by the expression. +// +func (b *builder) expr(fn *Function, e ast.Expr) Value { + e = unparen(e) + + tv := fn.Pkg.info.Types[e] + + // Is expression a constant? + if tv.Value != nil { + return emitConst(fn, NewConst(tv.Value, tv.Type)) + } + + var v Value + if tv.Addressable() { + // Prefer pointer arithmetic ({Index,Field}Addr) followed + // by Load over subelement extraction (e.g. Index, Field), + // to avoid large copies. + v = b.addr(fn, e, false).load(fn, e) + } else { + v = b.expr0(fn, e, tv) + } + if fn.debugInfo() { + emitDebugRef(fn, e, v, false) + } + return v +} + +func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { + switch e := e.(type) { + case *ast.BasicLit: + panic("non-constant BasicLit") // unreachable + + case *ast.FuncLit: + fn2 := &Function{ + name: fmt.Sprintf("%s$%d", fn.Name(), 1+len(fn.AnonFuncs)), + Signature: fn.Pkg.typeOf(e.Type).Underlying().(*types.Signature), + parent: fn, + Pkg: fn.Pkg, + Prog: fn.Prog, + functionBody: new(functionBody), + } + fn2.source = e + fn.AnonFuncs = append(fn.AnonFuncs, fn2) + fn2.initHTML(b.printFunc) + b.buildFunction(fn2) + if fn2.FreeVars == nil { + return fn2 + } + v := &MakeClosure{Fn: fn2} + v.setType(tv.Type) + for _, fv := range fn2.FreeVars { + v.Bindings = append(v.Bindings, fv.outer) + fv.outer = nil + } + return fn.emit(v, e) + + case *ast.TypeAssertExpr: // single-result form only + return emitTypeAssert(fn, b.expr(fn, e.X), tv.Type, e) + + case *ast.CallExpr: + if fn.Pkg.info.Types[e.Fun].IsType() { + // Explicit type conversion, e.g. string(x) or big.Int(x) + x := b.expr(fn, e.Args[0]) + y := emitConv(fn, x, tv.Type, e) + return y + } + // Call to "intrinsic" built-ins, e.g. new, make, panic. + if id, ok := unparen(e.Fun).(*ast.Ident); ok { + if obj, ok := fn.Pkg.info.Uses[id].(*types.Builtin); ok { + if v := b.builtin(fn, obj, e.Args, tv.Type, e); v != nil { + return v + } + } + } + // Regular function call. + var v Call + b.setCall(fn, e, &v.Call) + v.setType(tv.Type) + return fn.emit(&v, e) + + case *ast.UnaryExpr: + switch e.Op { + case token.AND: // &X --- potentially escaping. + addr := b.addr(fn, e.X, true) + if _, ok := unparen(e.X).(*ast.StarExpr); ok { + // &*p must panic if p is nil (http://golang.org/s/go12nil). + // For simplicity, we'll just (suboptimally) rely + // on the side effects of a load. + // TODO(adonovan): emit dedicated nilcheck. + addr.load(fn, e) + } + return addr.address(fn) + case token.ADD: + return b.expr(fn, e.X) + case token.NOT, token.SUB, token.XOR: // ! <- - ^ + v := &UnOp{ + Op: e.Op, + X: b.expr(fn, e.X), + } + v.setType(tv.Type) + return fn.emit(v, e) + case token.ARROW: + return emitRecv(fn, b.expr(fn, e.X), false, tv.Type, e) + default: + panic(e.Op) + } + + case *ast.BinaryExpr: + switch e.Op { + case token.LAND, token.LOR: + return b.logicalBinop(fn, e) + case token.SHL, token.SHR: + fallthrough + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + return emitArith(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), tv.Type, e) + + case token.EQL, token.NEQ, token.GTR, token.LSS, token.LEQ, token.GEQ: + cmp := emitCompare(fn, e.Op, b.expr(fn, e.X), b.expr(fn, e.Y), e) + // The type of x==y may be UntypedBool. + return emitConv(fn, cmp, types.Default(tv.Type), e) + default: + panic("illegal op in BinaryExpr: " + e.Op.String()) + } + + case *ast.SliceExpr: + var low, high, max Value + var x Value + switch fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + // Potentially escaping. + x = b.addr(fn, e.X, true).address(fn) + case *types.Basic, *types.Slice, *types.Pointer: // *array + x = b.expr(fn, e.X) + default: + panic("unreachable") + } + if e.High != nil { + high = b.expr(fn, e.High) + } + if e.Low != nil { + low = b.expr(fn, e.Low) + } + if e.Slice3 { + max = b.expr(fn, e.Max) + } + v := &Slice{ + X: x, + Low: low, + High: high, + Max: max, + } + v.setType(tv.Type) + return fn.emit(v, e) + + case *ast.Ident: + obj := fn.Pkg.info.Uses[e] + // Universal built-in or nil? + switch obj := obj.(type) { + case *types.Builtin: + return &Builtin{name: obj.Name(), sig: tv.Type.(*types.Signature)} + case *types.Nil: + return emitConst(fn, nilConst(tv.Type)) + } + // Package-level func or var? + if v := fn.Prog.packageLevelValue(obj); v != nil { + if _, ok := obj.(*types.Var); ok { + return emitLoad(fn, v, e) // var (address) + } + return v // (func) + } + // Local var. + return emitLoad(fn, fn.lookup(obj, false), e) // var (address) + + case *ast.SelectorExpr: + sel, ok := fn.Pkg.info.Selections[e] + if !ok { + // builtin unsafe.{Add,Slice} + if obj, ok := fn.Pkg.info.Uses[e.Sel].(*types.Builtin); ok { + return &Builtin{name: "Unsafe" + obj.Name(), sig: tv.Type.(*types.Signature)} + } + // qualified identifier + return b.expr(fn, e.Sel) + } + switch sel.Kind() { + case types.MethodExpr: + // (*T).f or T.f, the method f from the method-set of type T. + // The result is a "thunk". + return emitConv(fn, makeThunk(fn.Prog, sel), tv.Type, e) + + case types.MethodVal: + // e.f where e is an expression and f is a method. + // The result is a "bound". + obj := sel.Obj().(*types.Func) + rt := recvType(obj) + wantAddr := isPointer(rt) + escaping := true + v := b.receiver(fn, e.X, wantAddr, escaping, sel, e) + if isInterface(rt) { + // If v has interface type I, + // we must emit a check that v is non-nil. + // We use: typeassert v.(I). + emitTypeAssert(fn, v, rt, e) + } + c := &MakeClosure{ + Fn: makeBound(fn.Prog, obj), + Bindings: []Value{v}, + } + c.source = e.Sel + c.setType(tv.Type) + return fn.emit(c, e) + + case types.FieldVal: + indices := sel.Index() + last := len(indices) - 1 + v := b.expr(fn, e.X) + v = emitImplicitSelections(fn, v, indices[:last], e) + v = emitFieldSelection(fn, v, indices[last], false, e.Sel) + return v + } + + panic("unexpected expression-relative selector") + + case *ast.IndexExpr: + switch t := fn.Pkg.typeOf(e.X).Underlying().(type) { + case *types.Array: + // Non-addressable array (in a register). + v := &Index{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), tInt, e.Index), + } + v.setType(t.Elem()) + return fn.emit(v, e) + + case *types.Map: + // Maps are not addressable. + mapt := fn.Pkg.typeOf(e.X).Underlying().(*types.Map) + v := &MapLookup{ + X: b.expr(fn, e.X), + Index: emitConv(fn, b.expr(fn, e.Index), mapt.Key(), e.Index), + } + v.setType(mapt.Elem()) + return fn.emit(v, e) + + case *types.Basic: // => string + // Strings are not addressable. + v := &StringLookup{ + X: b.expr(fn, e.X), + Index: b.expr(fn, e.Index), + } + v.setType(tByte) + return fn.emit(v, e) + + case *types.Slice, *types.Pointer: // *array + // Addressable slice/array; use IndexAddr and Load. + return b.addr(fn, e, false).load(fn, e) + + default: + panic("unexpected container type in IndexExpr: " + t.String()) + } + + case *ast.CompositeLit, *ast.StarExpr: + // Addressable types (lvalues) + return b.addr(fn, e, false).load(fn, e) + } + + panic(fmt.Sprintf("unexpected expr: %T", e)) +} + +// stmtList emits to fn code for all statements in list. +func (b *builder) stmtList(fn *Function, list []ast.Stmt) { + for _, s := range list { + b.stmt(fn, s) + } +} + +// receiver emits to fn code for expression e in the "receiver" +// position of selection e.f (where f may be a field or a method) and +// returns the effective receiver after applying the implicit field +// selections of sel. +// +// wantAddr requests that the result is an an address. If +// !sel.Indirect(), this may require that e be built in addr() mode; it +// must thus be addressable. +// +// escaping is defined as per builder.addr(). +// +func (b *builder) receiver(fn *Function, e ast.Expr, wantAddr, escaping bool, sel *types.Selection, source ast.Node) Value { + var v Value + if wantAddr && !sel.Indirect() && !isPointer(fn.Pkg.typeOf(e)) { + v = b.addr(fn, e, escaping).address(fn) + } else { + v = b.expr(fn, e) + } + + last := len(sel.Index()) - 1 + v = emitImplicitSelections(fn, v, sel.Index()[:last], source) + if !wantAddr && isPointer(v.Type()) { + v = emitLoad(fn, v, e) + } + return v +} + +// setCallFunc populates the function parts of a CallCommon structure +// (Func, Method, Recv, Args[0]) based on the kind of invocation +// occurring in e. +// +func (b *builder) setCallFunc(fn *Function, e *ast.CallExpr, c *CallCommon) { + // Is this a method call? + if selector, ok := unparen(e.Fun).(*ast.SelectorExpr); ok { + sel, ok := fn.Pkg.info.Selections[selector] + if ok && sel.Kind() == types.MethodVal { + obj := sel.Obj().(*types.Func) + recv := recvType(obj) + wantAddr := isPointer(recv) + escaping := true + v := b.receiver(fn, selector.X, wantAddr, escaping, sel, selector) + if isInterface(recv) { + // Invoke-mode call. + c.Value = v + c.Method = obj + } else { + // "Call"-mode call. + c.Value = fn.Prog.declaredFunc(obj) + c.Args = append(c.Args, v) + } + return + } + + // sel.Kind()==MethodExpr indicates T.f() or (*T).f(): + // a statically dispatched call to the method f in the + // method-set of T or *T. T may be an interface. + // + // e.Fun would evaluate to a concrete method, interface + // wrapper function, or promotion wrapper. + // + // For now, we evaluate it in the usual way. + // + // TODO(adonovan): opt: inline expr() here, to make the + // call static and to avoid generation of wrappers. + // It's somewhat tricky as it may consume the first + // actual parameter if the call is "invoke" mode. + // + // Examples: + // type T struct{}; func (T) f() {} // "call" mode + // type T interface { f() } // "invoke" mode + // + // type S struct{ T } + // + // var s S + // S.f(s) + // (*S).f(&s) + // + // Suggested approach: + // - consume the first actual parameter expression + // and build it with b.expr(). + // - apply implicit field selections. + // - use MethodVal logic to populate fields of c. + } + + // Evaluate the function operand in the usual way. + c.Value = b.expr(fn, e.Fun) +} + +// emitCallArgs emits to f code for the actual parameters of call e to +// a (possibly built-in) function of effective type sig. +// The argument values are appended to args, which is then returned. +// +func (b *builder) emitCallArgs(fn *Function, sig *types.Signature, e *ast.CallExpr, args []Value) []Value { + // f(x, y, z...): pass slice z straight through. + if e.Ellipsis != 0 { + for i, arg := range e.Args { + v := emitConv(fn, b.expr(fn, arg), sig.Params().At(i).Type(), arg) + args = append(args, v) + } + return args + } + + offset := len(args) // 1 if call has receiver, 0 otherwise + + // Evaluate actual parameter expressions. + // + // If this is a chained call of the form f(g()) where g has + // multiple return values (MRV), they are flattened out into + // args; a suffix of them may end up in a varargs slice. + for _, arg := range e.Args { + v := b.expr(fn, arg) + if ttuple, ok := v.Type().(*types.Tuple); ok { // MRV chain + for i, n := 0, ttuple.Len(); i < n; i++ { + args = append(args, emitExtract(fn, v, i, arg)) + } + } else { + args = append(args, v) + } + } + + // Actual->formal assignability conversions for normal parameters. + np := sig.Params().Len() // number of normal parameters + if sig.Variadic() { + np-- + } + for i := 0; i < np; i++ { + args[offset+i] = emitConv(fn, args[offset+i], sig.Params().At(i).Type(), args[offset+i].Source()) + } + + // Actual->formal assignability conversions for variadic parameter, + // and construction of slice. + if sig.Variadic() { + varargs := args[offset+np:] + st := sig.Params().At(np).Type().(*types.Slice) + vt := st.Elem() + if len(varargs) == 0 { + args = append(args, emitConst(fn, nilConst(st))) + } else { + // Replace a suffix of args with a slice containing it. + at := types.NewArray(vt, int64(len(varargs))) + a := emitNew(fn, at, e) + a.source = e + for i, arg := range varargs { + iaddr := &IndexAddr{ + X: a, + Index: emitConst(fn, intConst(int64(i))), + } + iaddr.setType(types.NewPointer(vt)) + fn.emit(iaddr, e) + emitStore(fn, iaddr, arg, arg.Source()) + } + s := &Slice{X: a} + s.setType(st) + args[offset+np] = fn.emit(s, args[offset+np].Source()) + args = args[:offset+np+1] + } + } + return args +} + +// setCall emits to fn code to evaluate all the parameters of a function +// call e, and populates *c with those values. +// +func (b *builder) setCall(fn *Function, e *ast.CallExpr, c *CallCommon) { + // First deal with the f(...) part and optional receiver. + b.setCallFunc(fn, e, c) + + // Then append the other actual parameters. + sig, _ := fn.Pkg.typeOf(e.Fun).Underlying().(*types.Signature) + if sig == nil { + panic(fmt.Sprintf("no signature for call of %s", e.Fun)) + } + c.Args = b.emitCallArgs(fn, sig, e, c.Args) +} + +// assignOp emits to fn code to perform loc = val. +func (b *builder) assignOp(fn *Function, loc lvalue, val Value, op token.Token, source ast.Node) { + oldv := loc.load(fn, source) + loc.store(fn, emitArith(fn, op, oldv, emitConv(fn, val, oldv.Type(), source), loc.typ(), source), source) +} + +// localValueSpec emits to fn code to define all of the vars in the +// function-local ValueSpec, spec. +// +func (b *builder) localValueSpec(fn *Function, spec *ast.ValueSpec) { + switch { + case len(spec.Values) == len(spec.Names): + // e.g. var x, y = 0, 1 + // 1:1 assignment + for i, id := range spec.Names { + if !isBlankIdent(id) { + fn.addLocalForIdent(id) + } + lval := b.addr(fn, id, false) // non-escaping + b.assign(fn, lval, spec.Values[i], true, nil, spec) + } + + case len(spec.Values) == 0: + // e.g. var x, y int + // Locals are implicitly zero-initialized. + for _, id := range spec.Names { + if !isBlankIdent(id) { + lhs := fn.addLocalForIdent(id) + if fn.debugInfo() { + emitDebugRef(fn, id, lhs, true) + } + } + } + + default: + // e.g. var x, y = pos() + tuple := b.exprN(fn, spec.Values[0]) + for i, id := range spec.Names { + if !isBlankIdent(id) { + fn.addLocalForIdent(id) + lhs := b.addr(fn, id, false) // non-escaping + lhs.store(fn, emitExtract(fn, tuple, i, id), id) + } + } + } +} + +// assignStmt emits code to fn for a parallel assignment of rhss to lhss. +// isDef is true if this is a short variable declaration (:=). +// +// Note the similarity with localValueSpec. +// +func (b *builder) assignStmt(fn *Function, lhss, rhss []ast.Expr, isDef bool, source ast.Node) { + // Side effects of all LHSs and RHSs must occur in left-to-right order. + lvals := make([]lvalue, len(lhss)) + isZero := make([]bool, len(lhss)) + for i, lhs := range lhss { + var lval lvalue = blank{} + if !isBlankIdent(lhs) { + if isDef { + if obj := fn.Pkg.info.Defs[lhs.(*ast.Ident)]; obj != nil { + fn.addNamedLocal(obj, lhs) + isZero[i] = true + } + } + lval = b.addr(fn, lhs, false) // non-escaping + } + lvals[i] = lval + } + if len(lhss) == len(rhss) { + // Simple assignment: x = f() (!isDef) + // Parallel assignment: x, y = f(), g() (!isDef) + // or short var decl: x, y := f(), g() (isDef) + // + // In all cases, the RHSs may refer to the LHSs, + // so we need a storebuf. + var sb storebuf + for i := range rhss { + b.assign(fn, lvals[i], rhss[i], isZero[i], &sb, source) + } + sb.emit(fn) + } else { + // e.g. x, y = pos() + tuple := b.exprN(fn, rhss[0]) + emitDebugRef(fn, rhss[0], tuple, false) + for i, lval := range lvals { + lval.store(fn, emitExtract(fn, tuple, i, source), source) + } + } +} + +// arrayLen returns the length of the array whose composite literal elements are elts. +func (b *builder) arrayLen(fn *Function, elts []ast.Expr) int64 { + var max int64 = -1 + var i int64 = -1 + for _, e := range elts { + if kv, ok := e.(*ast.KeyValueExpr); ok { + i = b.expr(fn, kv.Key).(*Const).Int64() + } else { + i++ + } + if i > max { + max = i + } + } + return max + 1 +} + +// compLit emits to fn code to initialize a composite literal e at +// address addr with type typ. +// +// Nested composite literals are recursively initialized in place +// where possible. If isZero is true, compLit assumes that addr +// holds the zero value for typ. +// +// Because the elements of a composite literal may refer to the +// variables being updated, as in the second line below, +// x := T{a: 1} +// x = T{a: x.a} +// all the reads must occur before all the writes. Thus all stores to +// loc are emitted to the storebuf sb for later execution. +// +// A CompositeLit may have pointer type only in the recursive (nested) +// case when the type name is implicit. e.g. in []*T{{}}, the inner +// literal has type *T behaves like &T{}. +// In that case, addr must hold a T, not a *T. +// +func (b *builder) compLit(fn *Function, addr Value, e *ast.CompositeLit, isZero bool, sb *storebuf) { + typ := deref(fn.Pkg.typeOf(e)) + switch t := typ.Underlying().(type) { + case *types.Struct: + if !isZero && len(e.Elts) != t.NumFields() { + // memclear + sb.store(&address{addr, nil}, zeroValue(fn, deref(addr.Type()), e), e) + isZero = true + } + for i, e := range e.Elts { + fieldIndex := i + if kv, ok := e.(*ast.KeyValueExpr); ok { + fname := kv.Key.(*ast.Ident).Name + for i, n := 0, t.NumFields(); i < n; i++ { + sf := t.Field(i) + if sf.Name() == fname { + fieldIndex = i + e = kv.Value + break + } + } + } + sf := t.Field(fieldIndex) + faddr := &FieldAddr{ + X: addr, + Field: fieldIndex, + } + faddr.setType(types.NewPointer(sf.Type())) + fn.emit(faddr, e) + b.assign(fn, &address{addr: faddr, expr: e}, e, isZero, sb, e) + } + + case *types.Array, *types.Slice: + var at *types.Array + var array Value + switch t := t.(type) { + case *types.Slice: + at = types.NewArray(t.Elem(), b.arrayLen(fn, e.Elts)) + alloc := emitNew(fn, at, e) + array = alloc + case *types.Array: + at = t + array = addr + + if !isZero && int64(len(e.Elts)) != at.Len() { + // memclear + sb.store(&address{array, nil}, zeroValue(fn, deref(array.Type()), e), e) + } + } + + var idx *Const + for _, e := range e.Elts { + if kv, ok := e.(*ast.KeyValueExpr); ok { + idx = b.expr(fn, kv.Key).(*Const) + e = kv.Value + } else { + var idxval int64 + if idx != nil { + idxval = idx.Int64() + 1 + } + idx = emitConst(fn, intConst(idxval)) + } + iaddr := &IndexAddr{ + X: array, + Index: idx, + } + iaddr.setType(types.NewPointer(at.Elem())) + fn.emit(iaddr, e) + if t != at { // slice + // backing array is unaliased => storebuf not needed. + b.assign(fn, &address{addr: iaddr, expr: e}, e, true, nil, e) + } else { + b.assign(fn, &address{addr: iaddr, expr: e}, e, true, sb, e) + } + } + + if t != at { // slice + s := &Slice{X: array} + s.setType(typ) + sb.store(&address{addr: addr, expr: e}, fn.emit(s, e), e) + } + + case *types.Map: + m := &MakeMap{Reserve: emitConst(fn, intConst(int64(len(e.Elts))))} + m.setType(typ) + fn.emit(m, e) + for _, e := range e.Elts { + e := e.(*ast.KeyValueExpr) + + // If a key expression in a map literal is itself a + // composite literal, the type may be omitted. + // For example: + // map[*struct{}]bool{{}: true} + // An &-operation may be implied: + // map[*struct{}]bool{&struct{}{}: true} + var key Value + if _, ok := unparen(e.Key).(*ast.CompositeLit); ok && isPointer(t.Key()) { + // A CompositeLit never evaluates to a pointer, + // so if the type of the location is a pointer, + // an &-operation is implied. + key = b.addr(fn, e.Key, true).address(fn) + } else { + key = b.expr(fn, e.Key) + } + + loc := element{ + m: m, + k: emitConv(fn, key, t.Key(), e), + t: t.Elem(), + } + + // We call assign() only because it takes care + // of any &-operation required in the recursive + // case, e.g., + // map[int]*struct{}{0: {}} implies &struct{}{}. + // In-place update is of course impossible, + // and no storebuf is needed. + b.assign(fn, &loc, e.Value, true, nil, e) + } + sb.store(&address{addr: addr, expr: e}, m, e) + + default: + panic("unexpected CompositeLit type: " + t.String()) + } +} + +func (b *builder) switchStmt(fn *Function, s *ast.SwitchStmt, label *lblock) { + if s.Tag == nil { + b.switchStmtDynamic(fn, s, label) + return + } + dynamic := false + for _, iclause := range s.Body.List { + clause := iclause.(*ast.CaseClause) + for _, cond := range clause.List { + if fn.Pkg.info.Types[unparen(cond)].Value == nil { + dynamic = true + break + } + } + } + + if dynamic { + b.switchStmtDynamic(fn, s, label) + return + } + + if s.Init != nil { + b.stmt(fn, s.Init) + } + + entry := fn.currentBlock + tag := b.expr(fn, s.Tag) + + heads := make([]*BasicBlock, 0, len(s.Body.List)) + bodies := make([]*BasicBlock, len(s.Body.List)) + conds := make([]Value, 0, len(s.Body.List)) + + hasDefault := false + done := fn.newBasicBlock("switch.done") + if label != nil { + label._break = done + } + for i, stmt := range s.Body.List { + body := fn.newBasicBlock(fmt.Sprintf("switch.body.%d", i)) + bodies[i] = body + cas := stmt.(*ast.CaseClause) + if cas.List == nil { + // default branch + hasDefault = true + head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d", i)) + conds = append(conds, nil) + heads = append(heads, head) + fn.currentBlock = head + emitJump(fn, body, cas) + } + for j, cond := range stmt.(*ast.CaseClause).List { + fn.currentBlock = entry + head := fn.newBasicBlock(fmt.Sprintf("switch.head.%d.%d", i, j)) + conds = append(conds, b.expr(fn, cond)) + heads = append(heads, head) + fn.currentBlock = head + emitJump(fn, body, cond) + } + } + + for i, stmt := range s.Body.List { + clause := stmt.(*ast.CaseClause) + body := bodies[i] + fn.currentBlock = body + fallthru := done + if i+1 < len(bodies) { + fallthru = bodies[i+1] + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: fallthru, + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done, stmt) + } + + if !hasDefault { + head := fn.newBasicBlock("switch.head.implicit-default") + body := fn.newBasicBlock("switch.body.implicit-default") + fn.currentBlock = head + emitJump(fn, body, s) + fn.currentBlock = body + emitJump(fn, done, s) + heads = append(heads, head) + conds = append(conds, nil) + } + + if len(heads) != len(conds) { + panic(fmt.Sprintf("internal error: %d heads for %d conds", len(heads), len(conds))) + } + for _, head := range heads { + addEdge(entry, head) + } + fn.currentBlock = entry + entry.emit(&ConstantSwitch{ + Tag: tag, + Conds: conds, + }, s) + fn.currentBlock = done +} + +// switchStmt emits to fn code for the switch statement s, optionally +// labelled by label. +// +func (b *builder) switchStmtDynamic(fn *Function, s *ast.SwitchStmt, label *lblock) { + // We treat SwitchStmt like a sequential if-else chain. + // Multiway dispatch can be recovered later by irutil.Switches() + // to those cases that are free of side effects. + if s.Init != nil { + b.stmt(fn, s.Init) + } + kTrue := emitConst(fn, NewConst(constant.MakeBool(true), tBool)) + + var tagv Value = kTrue + var tagSource ast.Node = s + if s.Tag != nil { + tagv = b.expr(fn, s.Tag) + tagSource = s.Tag + } + // lifting only considers loads and stores, but we want different + // sigma nodes for the different comparisons. use a temporary and + // load it in every branch. + tag := fn.addLocal(tagv.Type(), tagSource) + emitStore(fn, tag, tagv, tagSource) + + done := fn.newBasicBlock("switch.done") + if label != nil { + label._break = done + } + // We pull the default case (if present) down to the end. + // But each fallthrough label must point to the next + // body block in source order, so we preallocate a + // body block (fallthru) for the next case. + // Unfortunately this makes for a confusing block order. + var dfltBody *[]ast.Stmt + var dfltFallthrough *BasicBlock + var fallthru, dfltBlock *BasicBlock + ncases := len(s.Body.List) + for i, clause := range s.Body.List { + body := fallthru + if body == nil { + body = fn.newBasicBlock("switch.body") // first case only + } + + // Preallocate body block for the next case. + fallthru = done + if i+1 < ncases { + fallthru = fn.newBasicBlock("switch.body") + } + + cc := clause.(*ast.CaseClause) + if cc.List == nil { + // Default case. + dfltBody = &cc.Body + dfltFallthrough = fallthru + dfltBlock = body + continue + } + + var nextCond *BasicBlock + for _, cond := range cc.List { + nextCond = fn.newBasicBlock("switch.next") + if tagv == kTrue { + // emit a proper if/else chain instead of a comparison + // of a value against true. + // + // NOTE(dh): adonovan had a todo saying "don't forget + // conversions though". As far as I can tell, there + // aren't any conversions that we need to take care of + // here. `case bool(a) && bool(b)` as well as `case + // bool(a && b)` are being taken care of by b.cond, + // and `case a` where a is not of type bool is + // invalid. + b.cond(fn, cond, body, nextCond) + } else { + cond := emitCompare(fn, token.EQL, emitLoad(fn, tag, cond), b.expr(fn, cond), cond) + emitIf(fn, cond, body, nextCond, cond.Source()) + } + + fn.currentBlock = nextCond + } + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: fallthru, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done, s) + fn.currentBlock = nextCond + } + if dfltBlock != nil { + // The lack of a Source for the jump doesn't matter, block + // fusing will get rid of the jump later. + + emitJump(fn, dfltBlock, s) + fn.currentBlock = dfltBlock + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _fallthrough: dfltFallthrough, + } + b.stmtList(fn, *dfltBody) + fn.targets = fn.targets.tail + } + emitJump(fn, done, s) + fn.currentBlock = done +} + +func (b *builder) typeSwitchStmt(fn *Function, s *ast.TypeSwitchStmt, label *lblock) { + if s.Init != nil { + b.stmt(fn, s.Init) + } + + var tag Value + switch e := s.Assign.(type) { + case *ast.ExprStmt: // x.(type) + tag = b.expr(fn, unparen(e.X).(*ast.TypeAssertExpr).X) + case *ast.AssignStmt: // y := x.(type) + tag = b.expr(fn, unparen(e.Rhs[0]).(*ast.TypeAssertExpr).X) + default: + panic("unreachable") + } + tagPtr := fn.addLocal(tag.Type(), tag.Source()) + emitStore(fn, tagPtr, tag, tag.Source()) + + // +1 in case there's no explicit default case + heads := make([]*BasicBlock, 0, len(s.Body.List)+1) + + entry := fn.currentBlock + done := fn.newBasicBlock("done") + if label != nil { + label._break = done + } + + // set up type switch and constant switch, populate their conditions + tswtch := &TypeSwitch{ + Tag: emitLoad(fn, tagPtr, tag.Source()), + Conds: make([]types.Type, 0, len(s.Body.List)+1), + } + cswtch := &ConstantSwitch{ + Conds: make([]Value, 0, len(s.Body.List)+1), + } + + rets := make([]types.Type, 0, len(s.Body.List)+1) + index := 0 + var default_ *ast.CaseClause + for _, clause := range s.Body.List { + cc := clause.(*ast.CaseClause) + if obj := fn.Pkg.info.Implicits[cc]; obj != nil { + fn.addNamedLocal(obj, cc) + } + if cc.List == nil { + // default case + default_ = cc + } else { + for _, expr := range cc.List { + tswtch.Conds = append(tswtch.Conds, fn.Pkg.typeOf(expr)) + cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(index)))) + index++ + } + if len(cc.List) == 1 { + rets = append(rets, fn.Pkg.typeOf(cc.List[0])) + } else { + for range cc.List { + rets = append(rets, tag.Type()) + } + } + } + } + + // default branch + rets = append(rets, tag.Type()) + + var vars []*types.Var + vars = append(vars, varIndex) + for _, typ := range rets { + vars = append(vars, anonVar(typ)) + } + tswtch.setType(types.NewTuple(vars...)) + // default branch + fn.currentBlock = entry + fn.emit(tswtch, s) + cswtch.Conds = append(cswtch.Conds, emitConst(fn, intConst(int64(-1)))) + // in theory we should add a local and stores/loads for tswtch, to + // generate sigma nodes in the branches. however, there isn't any + // useful information we could possibly attach to it. + cswtch.Tag = emitExtract(fn, tswtch, 0, s) + fn.emit(cswtch, s) + + // build heads and bodies + index = 0 + for _, clause := range s.Body.List { + cc := clause.(*ast.CaseClause) + if cc.List == nil { + continue + } + + body := fn.newBasicBlock("typeswitch.body") + for _, expr := range cc.List { + head := fn.newBasicBlock("typeswitch.head") + heads = append(heads, head) + fn.currentBlock = head + + if obj := fn.Pkg.info.Implicits[cc]; obj != nil { + // In a switch y := x.(type), each case clause + // implicitly declares a distinct object y. + // In a single-type case, y has that type. + // In multi-type cases, 'case nil' and default, + // y has the same type as the interface operand. + + l := fn.objects[obj] + if rets[index] == tUntypedNil { + emitStore(fn, l, emitConst(fn, nilConst(tswtch.Tag.Type())), s.Assign) + } else { + x := emitExtract(fn, tswtch, index+1, s.Assign) + emitStore(fn, l, x, nil) + } + } + + emitJump(fn, body, expr) + index++ + } + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, cc.Body) + fn.targets = fn.targets.tail + emitJump(fn, done, clause) + } + + if default_ == nil { + // implicit default + heads = append(heads, done) + } else { + body := fn.newBasicBlock("typeswitch.default") + heads = append(heads, body) + fn.currentBlock = body + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + if obj := fn.Pkg.info.Implicits[default_]; obj != nil { + l := fn.objects[obj] + x := emitExtract(fn, tswtch, index+1, s.Assign) + emitStore(fn, l, x, s) + } + b.stmtList(fn, default_.Body) + fn.targets = fn.targets.tail + emitJump(fn, done, s) + } + + fn.currentBlock = entry + for _, head := range heads { + addEdge(entry, head) + } + fn.currentBlock = done +} + +// selectStmt emits to fn code for the select statement s, optionally +// labelled by label. +// +func (b *builder) selectStmt(fn *Function, s *ast.SelectStmt, label *lblock) (noreturn bool) { + if len(s.Body.List) == 0 { + instr := &Select{Blocking: true} + instr.setType(types.NewTuple(varIndex, varOk)) + fn.emit(instr, s) + fn.emit(new(Unreachable), s) + addEdge(fn.currentBlock, fn.Exit) + return true + } + + // A blocking select of a single case degenerates to a + // simple send or receive. + // TODO(adonovan): opt: is this optimization worth its weight? + if len(s.Body.List) == 1 { + clause := s.Body.List[0].(*ast.CommClause) + if clause.Comm != nil { + b.stmt(fn, clause.Comm) + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done, clause) + fn.currentBlock = done + return false + } + } + + // First evaluate all channels in all cases, and find + // the directions of each state. + var states []*SelectState + blocking := true + debugInfo := fn.debugInfo() + for _, clause := range s.Body.List { + var st *SelectState + switch comm := clause.(*ast.CommClause).Comm.(type) { + case nil: // default case + blocking = false + continue + + case *ast.SendStmt: // ch<- i + ch := b.expr(fn, comm.Chan) + st = &SelectState{ + Dir: types.SendOnly, + Chan: ch, + Send: emitConv(fn, b.expr(fn, comm.Value), + ch.Type().Underlying().(*types.Chan).Elem(), comm), + Pos: comm.Arrow, + } + if debugInfo { + st.DebugNode = comm + } + + case *ast.AssignStmt: // x := <-ch + recv := unparen(comm.Rhs[0]).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + + case *ast.ExprStmt: // <-ch + recv := unparen(comm.X).(*ast.UnaryExpr) + st = &SelectState{ + Dir: types.RecvOnly, + Chan: b.expr(fn, recv.X), + Pos: recv.OpPos, + } + if debugInfo { + st.DebugNode = recv + } + } + states = append(states, st) + } + + // We dispatch on the (fair) result of Select using a + // switch on the returned index. + sel := &Select{ + States: states, + Blocking: blocking, + } + sel.source = s + var vars []*types.Var + vars = append(vars, varIndex, varOk) + for _, st := range states { + if st.Dir == types.RecvOnly { + tElem := st.Chan.Type().Underlying().(*types.Chan).Elem() + vars = append(vars, anonVar(tElem)) + } + } + sel.setType(types.NewTuple(vars...)) + fn.emit(sel, s) + idx := emitExtract(fn, sel, 0, s) + + done := fn.newBasicBlock("select.done") + if label != nil { + label._break = done + } + + entry := fn.currentBlock + swtch := &ConstantSwitch{ + Tag: idx, + // one condition per case + Conds: make([]Value, 0, len(s.Body.List)+1), + } + // note that we don't need heads; a select case can only have a single condition + var bodies []*BasicBlock + + state := 0 + r := 2 // index in 'sel' tuple of value; increments if st.Dir==RECV + for _, cc := range s.Body.List { + clause := cc.(*ast.CommClause) + if clause.Comm == nil { + body := fn.newBasicBlock("select.default") + fn.currentBlock = body + bodies = append(bodies, body) + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + b.stmtList(fn, clause.Body) + emitJump(fn, done, s) + fn.targets = fn.targets.tail + swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(-1))) + continue + } + swtch.Conds = append(swtch.Conds, emitConst(fn, intConst(int64(state)))) + body := fn.newBasicBlock("select.body") + fn.currentBlock = body + bodies = append(bodies, body) + fn.targets = &targets{ + tail: fn.targets, + _break: done, + } + switch comm := clause.Comm.(type) { + case *ast.ExprStmt: // <-ch + if debugInfo { + v := emitExtract(fn, sel, r, comm) + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + r++ + + case *ast.AssignStmt: // x := <-states[state].Chan + if comm.Tok == token.DEFINE { + fn.addLocalForIdent(comm.Lhs[0].(*ast.Ident)) + } + x := b.addr(fn, comm.Lhs[0], false) // non-escaping + v := emitExtract(fn, sel, r, comm) + if debugInfo { + emitDebugRef(fn, states[state].DebugNode.(ast.Expr), v, false) + } + x.store(fn, v, comm) + + if len(comm.Lhs) == 2 { // x, ok := ... + if comm.Tok == token.DEFINE { + fn.addLocalForIdent(comm.Lhs[1].(*ast.Ident)) + } + ok := b.addr(fn, comm.Lhs[1], false) // non-escaping + ok.store(fn, emitExtract(fn, sel, 1, comm), comm) + } + r++ + } + b.stmtList(fn, clause.Body) + fn.targets = fn.targets.tail + emitJump(fn, done, s) + state++ + } + fn.currentBlock = entry + fn.emit(swtch, s) + for _, body := range bodies { + addEdge(entry, body) + } + fn.currentBlock = done + return false +} + +// forStmt emits to fn code for the for statement s, optionally +// labelled by label. +// +func (b *builder) forStmt(fn *Function, s *ast.ForStmt, label *lblock) { + // ...init... + // jump loop + // loop: + // if cond goto body else done + // body: + // ...body... + // jump post + // post: (target of continue) + // ...post... + // jump loop + // done: (target of break) + if s.Init != nil { + b.stmt(fn, s.Init) + } + body := fn.newBasicBlock("for.body") + done := fn.newBasicBlock("for.done") // target of 'break' + loop := body // target of back-edge + if s.Cond != nil { + loop = fn.newBasicBlock("for.loop") + } + cont := loop // target of 'continue' + if s.Post != nil { + cont = fn.newBasicBlock("for.post") + } + if label != nil { + label._break = done + label._continue = cont + } + emitJump(fn, loop, s) + fn.currentBlock = loop + if loop != body { + b.cond(fn, s.Cond, body, done) + fn.currentBlock = body + } + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: cont, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, cont, s) + + if s.Post != nil { + fn.currentBlock = cont + b.stmt(fn, s.Post) + emitJump(fn, loop, s) // back-edge + } + fn.currentBlock = done +} + +// rangeIndexed emits to fn the header for an integer-indexed loop +// over array, *array or slice value x. +// The v result is defined only if tv is non-nil. +// forPos is the position of the "for" token. +// +func (b *builder) rangeIndexed(fn *Function, x Value, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { + // + // length = len(x) + // index = -1 + // loop: (target of continue) + // index++ + // if index < length goto body else done + // body: + // k = index + // v = x[index] + // ...body... + // jump loop + // done: (target of break) + + // Determine number of iterations. + var length Value + if arr, ok := deref(x.Type()).Underlying().(*types.Array); ok { + // For array or *array, the number of iterations is + // known statically thanks to the type. We avoid a + // data dependence upon x, permitting later dead-code + // elimination if x is pure, static unrolling, etc. + // Ranging over a nil *array may have >0 iterations. + // We still generate code for x, in case it has effects. + length = emitConst(fn, intConst(arr.Len())) + } else { + // length = len(x). + var c Call + c.Call.Value = makeLen(x.Type()) + c.Call.Args = []Value{x} + c.setType(tInt) + length = fn.emit(&c, source) + } + + index := fn.addLocal(tInt, source) + emitStore(fn, index, emitConst(fn, intConst(-1)), source) + + loop = fn.newBasicBlock("rangeindex.loop") + emitJump(fn, loop, source) + fn.currentBlock = loop + + incr := &BinOp{ + Op: token.ADD, + X: emitLoad(fn, index, source), + Y: emitConst(fn, intConst(1)), + } + incr.setType(tInt) + emitStore(fn, index, fn.emit(incr, source), source) + + body := fn.newBasicBlock("rangeindex.body") + done = fn.newBasicBlock("rangeindex.done") + emitIf(fn, emitCompare(fn, token.LSS, incr, length, source), body, done, source) + fn.currentBlock = body + + k = emitLoad(fn, index, source) + if tv != nil { + switch t := x.Type().Underlying().(type) { + case *types.Array: + instr := &Index{ + X: x, + Index: k, + } + instr.setType(t.Elem()) + v = fn.emit(instr, source) + + case *types.Pointer: // *array + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem().Underlying().(*types.Array).Elem())) + v = emitLoad(fn, fn.emit(instr, source), source) + + case *types.Slice: + instr := &IndexAddr{ + X: x, + Index: k, + } + instr.setType(types.NewPointer(t.Elem())) + v = emitLoad(fn, fn.emit(instr, source), source) + + default: + panic("rangeIndexed x:" + t.String()) + } + } + return +} + +// rangeIter emits to fn the header for a loop using +// Range/Next/Extract to iterate over map or string value x. +// tk and tv are the types of the key/value results k and v, or nil +// if the respective component is not wanted. +// +func (b *builder) rangeIter(fn *Function, x Value, tk, tv types.Type, source ast.Node) (k, v Value, loop, done *BasicBlock) { + // + // it = range x + // loop: (target of continue) + // okv = next it (ok, key, value) + // ok = extract okv #0 + // if ok goto body else done + // body: + // k = extract okv #1 + // v = extract okv #2 + // ...body... + // jump loop + // done: (target of break) + // + + if tk == nil { + tk = tInvalid + } + if tv == nil { + tv = tInvalid + } + + rng := &Range{X: x} + rng.setType(tRangeIter) + it := fn.emit(rng, source) + + loop = fn.newBasicBlock("rangeiter.loop") + emitJump(fn, loop, source) + fn.currentBlock = loop + + _, isString := x.Type().Underlying().(*types.Basic) + + okv := &Next{ + Iter: it, + IsString: isString, + } + okv.setType(types.NewTuple( + varOk, + newVar("k", tk), + newVar("v", tv), + )) + fn.emit(okv, source) + + body := fn.newBasicBlock("rangeiter.body") + done = fn.newBasicBlock("rangeiter.done") + emitIf(fn, emitExtract(fn, okv, 0, source), body, done, source) + fn.currentBlock = body + + if tk != tInvalid { + k = emitExtract(fn, okv, 1, source) + } + if tv != tInvalid { + v = emitExtract(fn, okv, 2, source) + } + return +} + +// rangeChan emits to fn the header for a loop that receives from +// channel x until it fails. +// tk is the channel's element type, or nil if the k result is +// not wanted +// pos is the position of the '=' or ':=' token. +// +func (b *builder) rangeChan(fn *Function, x Value, tk types.Type, source ast.Node) (k Value, loop, done *BasicBlock) { + // + // loop: (target of continue) + // ko = <-x (key, ok) + // ok = extract ko #1 + // if ok goto body else done + // body: + // k = extract ko #0 + // ... + // goto loop + // done: (target of break) + + loop = fn.newBasicBlock("rangechan.loop") + emitJump(fn, loop, source) + fn.currentBlock = loop + retv := emitRecv(fn, x, true, types.NewTuple(newVar("k", x.Type().Underlying().(*types.Chan).Elem()), varOk), source) + body := fn.newBasicBlock("rangechan.body") + done = fn.newBasicBlock("rangechan.done") + emitIf(fn, emitExtract(fn, retv, 1, source), body, done, source) + fn.currentBlock = body + if tk != nil { + k = emitExtract(fn, retv, 0, source) + } + return +} + +// rangeStmt emits to fn code for the range statement s, optionally +// labelled by label. +// +func (b *builder) rangeStmt(fn *Function, s *ast.RangeStmt, label *lblock, source ast.Node) { + var tk, tv types.Type + if s.Key != nil && !isBlankIdent(s.Key) { + tk = fn.Pkg.typeOf(s.Key) + } + if s.Value != nil && !isBlankIdent(s.Value) { + tv = fn.Pkg.typeOf(s.Value) + } + + // If iteration variables are defined (:=), this + // occurs once outside the loop. + // + // Unlike a short variable declaration, a RangeStmt + // using := never redeclares an existing variable; it + // always creates a new one. + if s.Tok == token.DEFINE { + if tk != nil { + fn.addLocalForIdent(s.Key.(*ast.Ident)) + } + if tv != nil { + fn.addLocalForIdent(s.Value.(*ast.Ident)) + } + } + + x := b.expr(fn, s.X) + + var k, v Value + var loop, done *BasicBlock + switch rt := x.Type().Underlying().(type) { + case *types.Slice, *types.Array, *types.Pointer: // *array + k, v, loop, done = b.rangeIndexed(fn, x, tv, source) + + case *types.Chan: + k, loop, done = b.rangeChan(fn, x, tk, source) + + case *types.Map, *types.Basic: // string + k, v, loop, done = b.rangeIter(fn, x, tk, tv, source) + + default: + panic("Cannot range over: " + rt.String()) + } + + // Evaluate both LHS expressions before we update either. + var kl, vl lvalue + if tk != nil { + kl = b.addr(fn, s.Key, false) // non-escaping + } + if tv != nil { + vl = b.addr(fn, s.Value, false) // non-escaping + } + if tk != nil { + kl.store(fn, k, s) + } + if tv != nil { + vl.store(fn, v, s) + } + + if label != nil { + label._break = done + label._continue = loop + } + + fn.targets = &targets{ + tail: fn.targets, + _break: done, + _continue: loop, + } + b.stmt(fn, s.Body) + fn.targets = fn.targets.tail + emitJump(fn, loop, source) // back-edge + fn.currentBlock = done +} + +// stmt lowers statement s to IR form, emitting code to fn. +func (b *builder) stmt(fn *Function, _s ast.Stmt) { + // The label of the current statement. If non-nil, its _goto + // target is always set; its _break and _continue are set only + // within the body of switch/typeswitch/select/for/range. + // It is effectively an additional default-nil parameter of stmt(). + var label *lblock +start: + switch s := _s.(type) { + case *ast.EmptyStmt: + // ignore. (Usually removed by gofmt.) + + case *ast.DeclStmt: // Con, Var or Typ + d := s.Decl.(*ast.GenDecl) + if d.Tok == token.VAR { + for _, spec := range d.Specs { + if vs, ok := spec.(*ast.ValueSpec); ok { + b.localValueSpec(fn, vs) + } + } + } + + case *ast.LabeledStmt: + label = fn.labelledBlock(s.Label) + emitJump(fn, label._goto, s) + fn.currentBlock = label._goto + _s = s.Stmt + goto start // effectively: tailcall stmt(fn, s.Stmt, label) + + case *ast.ExprStmt: + b.expr(fn, s.X) + + case *ast.SendStmt: + instr := &Send{ + Chan: b.expr(fn, s.Chan), + X: emitConv(fn, b.expr(fn, s.Value), + fn.Pkg.typeOf(s.Chan).Underlying().(*types.Chan).Elem(), s), + } + fn.emit(instr, s) + + case *ast.IncDecStmt: + op := token.ADD + if s.Tok == token.DEC { + op = token.SUB + } + loc := b.addr(fn, s.X, false) + b.assignOp(fn, loc, emitConst(fn, NewConst(constant.MakeInt64(1), loc.typ())), op, s) + + case *ast.AssignStmt: + switch s.Tok { + case token.ASSIGN, token.DEFINE: + b.assignStmt(fn, s.Lhs, s.Rhs, s.Tok == token.DEFINE, _s) + + default: // +=, etc. + op := s.Tok + token.ADD - token.ADD_ASSIGN + b.assignOp(fn, b.addr(fn, s.Lhs[0], false), b.expr(fn, s.Rhs[0]), op, s) + } + + case *ast.GoStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + v := Go{} + b.setCall(fn, s.Call, &v.Call) + fn.emit(&v, s) + + case *ast.DeferStmt: + // The "intrinsics" new/make/len/cap are forbidden here. + // panic is treated like an ordinary function call. + v := Defer{} + b.setCall(fn, s.Call, &v.Call) + fn.hasDefer = true + fn.emit(&v, s) + + case *ast.ReturnStmt: + // TODO(dh): we could emit tighter position information by + // using the ith returned expression + + var results []Value + if len(s.Results) == 1 && fn.Signature.Results().Len() > 1 { + // Return of one expression in a multi-valued function. + tuple := b.exprN(fn, s.Results[0]) + ttuple := tuple.Type().(*types.Tuple) + for i, n := 0, ttuple.Len(); i < n; i++ { + results = append(results, + emitConv(fn, emitExtract(fn, tuple, i, s), + fn.Signature.Results().At(i).Type(), s)) + } + } else { + // 1:1 return, or no-arg return in non-void function. + for i, r := range s.Results { + v := emitConv(fn, b.expr(fn, r), fn.Signature.Results().At(i).Type(), s) + results = append(results, v) + } + } + + ret := fn.results() + for i, r := range results { + emitStore(fn, ret[i], r, s) + } + + emitJump(fn, fn.Exit, s) + fn.currentBlock = fn.newBasicBlock("unreachable") + + case *ast.BranchStmt: + var block *BasicBlock + switch s.Tok { + case token.BREAK: + if s.Label != nil { + block = fn.labelledBlock(s.Label)._break + } else { + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._break + } + } + + case token.CONTINUE: + if s.Label != nil { + block = fn.labelledBlock(s.Label)._continue + } else { + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._continue + } + } + + case token.FALLTHROUGH: + for t := fn.targets; t != nil && block == nil; t = t.tail { + block = t._fallthrough + } + + case token.GOTO: + block = fn.labelledBlock(s.Label)._goto + } + j := emitJump(fn, block, s) + j.Comment = s.Tok.String() + fn.currentBlock = fn.newBasicBlock("unreachable") + + case *ast.BlockStmt: + b.stmtList(fn, s.List) + + case *ast.IfStmt: + if s.Init != nil { + b.stmt(fn, s.Init) + } + then := fn.newBasicBlock("if.then") + done := fn.newBasicBlock("if.done") + els := done + if s.Else != nil { + els = fn.newBasicBlock("if.else") + } + instr := b.cond(fn, s.Cond, then, els) + instr.source = s + fn.currentBlock = then + b.stmt(fn, s.Body) + emitJump(fn, done, s) + + if s.Else != nil { + fn.currentBlock = els + b.stmt(fn, s.Else) + emitJump(fn, done, s) + } + + fn.currentBlock = done + + case *ast.SwitchStmt: + b.switchStmt(fn, s, label) + + case *ast.TypeSwitchStmt: + b.typeSwitchStmt(fn, s, label) + + case *ast.SelectStmt: + if b.selectStmt(fn, s, label) { + // the select has no cases, it blocks forever + fn.currentBlock = fn.newBasicBlock("unreachable") + } + + case *ast.ForStmt: + b.forStmt(fn, s, label) + + case *ast.RangeStmt: + b.rangeStmt(fn, s, label, s) + + default: + panic(fmt.Sprintf("unexpected statement kind: %T", s)) + } +} + +// buildFunction builds IR code for the body of function fn. Idempotent. +func (b *builder) buildFunction(fn *Function) { + if fn.Blocks != nil { + return // building already started + } + + var recvField *ast.FieldList + var body *ast.BlockStmt + var functype *ast.FuncType + switch n := fn.source.(type) { + case nil: + return // not a Go source function. (Synthetic, or from object file.) + case *ast.FuncDecl: + functype = n.Type + recvField = n.Recv + body = n.Body + case *ast.FuncLit: + functype = n.Type + body = n.Body + default: + panic(n) + } + + if fn.Package().Pkg.Path() == "syscall" && fn.Name() == "Exit" { + // syscall.Exit is a stub and the way os.Exit terminates the + // process. Note that there are other functions in the runtime + // that also terminate or unwind that we cannot analyze. + // However, they aren't stubs, so buildExits ends up getting + // called on them, so that's where we handle those special + // cases. + fn.NoReturn = AlwaysExits + } + + if body == nil { + // External function. + if fn.Params == nil { + // This condition ensures we add a non-empty + // params list once only, but we may attempt + // the degenerate empty case repeatedly. + // TODO(adonovan): opt: don't do that. + + // We set Function.Params even though there is no body + // code to reference them. This simplifies clients. + if recv := fn.Signature.Recv(); recv != nil { + // XXX synthesize an ast.Node + fn.addParamObj(recv, nil) + } + params := fn.Signature.Params() + for i, n := 0, params.Len(); i < n; i++ { + // XXX synthesize an ast.Node + fn.addParamObj(params.At(i), nil) + } + } + return + } + if fn.Prog.mode&LogSource != 0 { + defer logStack("build function %s @ %s", fn, fn.Prog.Fset.Position(fn.Pos()))() + } + fn.blocksets = b.blocksets + fn.Blocks = make([]*BasicBlock, 0, avgBlocks) + fn.startBody() + fn.createSyntacticParams(recvField, functype) + fn.exitBlock() + b.stmt(fn, body) + if cb := fn.currentBlock; cb != nil && (cb == fn.Blocks[0] || cb.Preds != nil) { + // Control fell off the end of the function's body block. + // + // Block optimizations eliminate the current block, if + // unreachable. It is a builder invariant that + // if this no-arg return is ill-typed for + // fn.Signature.Results, this block must be + // unreachable. The sanity checker checks this. + // fn.emit(new(RunDefers)) + // fn.emit(new(Return)) + emitJump(fn, fn.Exit, nil) + } + optimizeBlocks(fn) + buildFakeExits(fn) + b.buildExits(fn) + b.addUnreachables(fn) + fn.finishBody() + b.blocksets = fn.blocksets + fn.functionBody = nil +} + +// buildFuncDecl builds IR code for the function or method declared +// by decl in package pkg. +// +func (b *builder) buildFuncDecl(pkg *Package, decl *ast.FuncDecl) { + id := decl.Name + if isBlankIdent(id) { + return // discard + } + fn := pkg.values[pkg.info.Defs[id]].(*Function) + if decl.Recv == nil && id.Name == "init" { + var v Call + v.Call.Value = fn + v.setType(types.NewTuple()) + pkg.init.emit(&v, decl) + } + fn.source = decl + b.buildFunction(fn) +} + +// Build calls Package.Build for each package in prog. +// +// Build is intended for whole-program analysis; a typical compiler +// need only build a single package. +// +// Build is idempotent and thread-safe. +// +func (prog *Program) Build() { + for _, p := range prog.packages { + p.Build() + } +} + +// Build builds IR code for all functions and vars in package p. +// +// Precondition: CreatePackage must have been called for all of p's +// direct imports (and hence its direct imports must have been +// error-free). +// +// Build is idempotent and thread-safe. +// +func (p *Package) Build() { p.buildOnce.Do(p.build) } + +func (p *Package) build() { + if p.info == nil { + return // synthetic package, e.g. "testmain" + } + + // Ensure we have runtime type info for all exported members. + // TODO(adonovan): ideally belongs in memberFromObject, but + // that would require package creation in topological order. + for name, mem := range p.Members { + if ast.IsExported(name) { + p.Prog.needMethodsOf(mem.Type()) + } + } + if p.Prog.mode&LogSource != 0 { + defer logStack("build %s", p)() + } + init := p.init + init.startBody() + init.exitBlock() + + var done *BasicBlock + + // Make init() skip if package is already initialized. + initguard := p.Var("init$guard") + doinit := init.newBasicBlock("init.start") + done = init.Exit + emitIf(init, emitLoad(init, initguard, nil), done, doinit, nil) + init.currentBlock = doinit + emitStore(init, initguard, emitConst(init, NewConst(constant.MakeBool(true), tBool)), nil) + + // Call the init() function of each package we import. + for _, pkg := range p.Pkg.Imports() { + prereq := p.Prog.packages[pkg] + if prereq == nil { + panic(fmt.Sprintf("Package(%q).Build(): unsatisfied import: Program.CreatePackage(%q) was not called", p.Pkg.Path(), pkg.Path())) + } + var v Call + v.Call.Value = prereq.init + v.setType(types.NewTuple()) + init.emit(&v, nil) + } + + b := builder{ + printFunc: p.printFunc, + } + + // Initialize package-level vars in correct order. + for _, varinit := range p.info.InitOrder { + if init.Prog.mode&LogSource != 0 { + fmt.Fprintf(os.Stderr, "build global initializer %v @ %s\n", + varinit.Lhs, p.Prog.Fset.Position(varinit.Rhs.Pos())) + } + if len(varinit.Lhs) == 1 { + // 1:1 initialization: var x, y = a(), b() + var lval lvalue + if v := varinit.Lhs[0]; v.Name() != "_" { + lval = &address{addr: p.values[v].(*Global)} + } else { + lval = blank{} + } + // TODO(dh): do emit position information + b.assign(init, lval, varinit.Rhs, true, nil, nil) + } else { + // n:1 initialization: var x, y := f() + tuple := b.exprN(init, varinit.Rhs) + for i, v := range varinit.Lhs { + if v.Name() == "_" { + continue + } + emitStore(init, p.values[v].(*Global), emitExtract(init, tuple, i, nil), nil) + } + } + } + + // Build all package-level functions, init functions + // and methods, including unreachable/blank ones. + // We build them in source order, but it's not significant. + for _, file := range p.files { + for _, decl := range file.Decls { + if decl, ok := decl.(*ast.FuncDecl); ok { + b.buildFuncDecl(p, decl) + } + } + } + + // Finish up init(). + emitJump(init, done, nil) + init.finishBody() + + p.info = nil // We no longer need ASTs or go/types deductions. + + if p.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckPackage(p) + } +} + +// Like ObjectOf, but panics instead of returning nil. +// Only valid during p's create and build phases. +func (p *Package) objectOf(id *ast.Ident) types.Object { + if o := p.info.ObjectOf(id); o != nil { + return o + } + panic(fmt.Sprintf("no types.Object for ast.Ident %s @ %s", + id.Name, p.Prog.Fset.Position(id.Pos()))) +} + +// Like TypeOf, but panics instead of returning nil. +// Only valid during p's create and build phases. +func (p *Package) typeOf(e ast.Expr) types.Type { + if T := p.info.TypeOf(e); T != nil { + return T + } + panic(fmt.Sprintf("no type for %T @ %s", + e, p.Prog.Fset.Position(e.Pos()))) +} diff --git a/vendor/honnef.co/go/tools/go/ir/const.go b/vendor/honnef.co/go/tools/go/ir/const.go new file mode 100644 index 000000000..7cdf006e8 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/const.go @@ -0,0 +1,153 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file defines the Const SSA value type. + +import ( + "fmt" + "go/constant" + "go/types" + "strconv" +) + +// NewConst returns a new constant of the specified value and type. +// val must be valid according to the specification of Const.Value. +// +func NewConst(val constant.Value, typ types.Type) *Const { + return &Const{ + register: register{ + typ: typ, + }, + Value: val, + } +} + +// intConst returns an 'int' constant that evaluates to i. +// (i is an int64 in case the host is narrower than the target.) +func intConst(i int64) *Const { + return NewConst(constant.MakeInt64(i), tInt) +} + +// nilConst returns a nil constant of the specified type, which may +// be any reference type, including interfaces. +// +func nilConst(typ types.Type) *Const { + return NewConst(nil, typ) +} + +// stringConst returns a 'string' constant that evaluates to s. +func stringConst(s string) *Const { + return NewConst(constant.MakeString(s), tString) +} + +// zeroConst returns a new "zero" constant of the specified type, +// which must not be an array or struct type: the zero values of +// aggregates are well-defined but cannot be represented by Const. +// +func zeroConst(t types.Type) *Const { + switch t := t.(type) { + case *types.Basic: + switch { + case t.Info()&types.IsBoolean != 0: + return NewConst(constant.MakeBool(false), t) + case t.Info()&types.IsNumeric != 0: + return NewConst(constant.MakeInt64(0), t) + case t.Info()&types.IsString != 0: + return NewConst(constant.MakeString(""), t) + case t.Kind() == types.UnsafePointer: + fallthrough + case t.Kind() == types.UntypedNil: + return nilConst(t) + default: + panic(fmt.Sprint("zeroConst for unexpected type:", t)) + } + case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: + return nilConst(t) + case *types.Named: + return NewConst(zeroConst(t.Underlying()).Value, t) + case *types.Array, *types.Struct, *types.Tuple: + panic(fmt.Sprint("zeroConst applied to aggregate:", t)) + } + panic(fmt.Sprint("zeroConst: unexpected ", t)) +} + +func (c *Const) RelString(from *types.Package) string { + var p string + if c.Value == nil { + p = "nil" + } else if c.Value.Kind() == constant.String { + v := constant.StringVal(c.Value) + const max = 20 + // TODO(adonovan): don't cut a rune in half. + if len(v) > max { + v = v[:max-3] + "..." // abbreviate + } + p = strconv.Quote(v) + } else { + p = c.Value.String() + } + return fmt.Sprintf("Const <%s> {%s}", relType(c.Type(), from), p) +} + +func (c *Const) String() string { + return c.RelString(c.Parent().pkg()) +} + +// IsNil returns true if this constant represents a typed or untyped nil value. +func (c *Const) IsNil() bool { + return c.Value == nil +} + +// Int64 returns the numeric value of this constant truncated to fit +// a signed 64-bit integer. +// +func (c *Const) Int64() int64 { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if i, ok := constant.Int64Val(x); ok { + return i + } + return 0 + case constant.Float: + f, _ := constant.Float64Val(x) + return int64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Uint64 returns the numeric value of this constant truncated to fit +// an unsigned 64-bit integer. +// +func (c *Const) Uint64() uint64 { + switch x := constant.ToInt(c.Value); x.Kind() { + case constant.Int: + if u, ok := constant.Uint64Val(x); ok { + return u + } + return 0 + case constant.Float: + f, _ := constant.Float64Val(x) + return uint64(f) + } + panic(fmt.Sprintf("unexpected constant value: %T", c.Value)) +} + +// Float64 returns the numeric value of this constant truncated to fit +// a float64. +// +func (c *Const) Float64() float64 { + f, _ := constant.Float64Val(c.Value) + return f +} + +// Complex128 returns the complex value of this constant truncated to +// fit a complex128. +// +func (c *Const) Complex128() complex128 { + re, _ := constant.Float64Val(constant.Real(c.Value)) + im, _ := constant.Float64Val(constant.Imag(c.Value)) + return complex(re, im) +} diff --git a/vendor/honnef.co/go/tools/go/ir/create.go b/vendor/honnef.co/go/tools/go/ir/create.go new file mode 100644 index 000000000..5e7f6ed94 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/create.go @@ -0,0 +1,288 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file implements the CREATE phase of IR construction. +// See builder.go for explanation. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "sync" + + "honnef.co/go/tools/go/types/typeutil" +) + +// measured on the standard library and rounded up to powers of two, +// on average there are 8 blocks and 16 instructions per block in a +// function. +const avgBlocks = 8 +const avgInstructionsPerBlock = 16 + +// NewProgram returns a new IR Program. +// +// mode controls diagnostics and checking during IR construction. +// +func NewProgram(fset *token.FileSet, mode BuilderMode) *Program { + prog := &Program{ + Fset: fset, + imported: make(map[string]*Package), + packages: make(map[*types.Package]*Package), + thunks: make(map[selectionKey]*Function), + bounds: make(map[*types.Func]*Function), + mode: mode, + } + + h := typeutil.MakeHasher() // protected by methodsMu, in effect + prog.methodSets.SetHasher(h) + prog.canon.SetHasher(h) + + return prog +} + +// memberFromObject populates package pkg with a member for the +// typechecker object obj. +// +// For objects from Go source code, syntax is the associated syntax +// tree (for funcs and vars only); it will be used during the build +// phase. +// +func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) { + name := obj.Name() + switch obj := obj.(type) { + case *types.Builtin: + if pkg.Pkg != types.Unsafe { + panic("unexpected builtin object: " + obj.String()) + } + + case *types.TypeName: + pkg.Members[name] = &Type{ + object: obj, + pkg: pkg, + } + + case *types.Const: + c := &NamedConst{ + object: obj, + Value: NewConst(obj.Val(), obj.Type()), + pkg: pkg, + } + pkg.values[obj] = c.Value + pkg.Members[name] = c + + case *types.Var: + g := &Global{ + Pkg: pkg, + name: name, + object: obj, + typ: types.NewPointer(obj.Type()), // address + } + pkg.values[obj] = g + pkg.Members[name] = g + + case *types.Func: + sig := obj.Type().(*types.Signature) + if sig.Recv() == nil && name == "init" { + pkg.ninit++ + name = fmt.Sprintf("init#%d", pkg.ninit) + } + fn := &Function{ + name: name, + object: obj, + Signature: sig, + Pkg: pkg, + Prog: pkg.Prog, + } + + fn.source = syntax + fn.initHTML(pkg.printFunc) + if syntax == nil { + fn.Synthetic = SyntheticLoadedFromExportData + } else { + // Note: we initialize fn.Blocks in + // (*builder).buildFunction and not here because Blocks + // being nil is used to indicate that building of the + // function hasn't started yet. + + fn.functionBody = &functionBody{ + scratchInstructions: make([]Instruction, avgBlocks*avgInstructionsPerBlock), + } + } + + pkg.values[obj] = fn + pkg.Functions = append(pkg.Functions, fn) + if sig.Recv() == nil { + pkg.Members[name] = fn // package-level function + } + + default: // (incl. *types.Package) + panic("unexpected Object type: " + obj.String()) + } +} + +// membersFromDecl populates package pkg with members for each +// typechecker object (var, func, const or type) associated with the +// specified decl. +// +func membersFromDecl(pkg *Package, decl ast.Decl) { + switch decl := decl.(type) { + case *ast.GenDecl: // import, const, type or var + switch decl.Tok { + case token.CONST: + for _, spec := range decl.Specs { + for _, id := range spec.(*ast.ValueSpec).Names { + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], nil) + } + } + } + + case token.VAR: + for _, spec := range decl.Specs { + for _, id := range spec.(*ast.ValueSpec).Names { + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], spec) + } + } + } + + case token.TYPE: + for _, spec := range decl.Specs { + id := spec.(*ast.TypeSpec).Name + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], nil) + } + } + } + + case *ast.FuncDecl: + id := decl.Name + if !isBlankIdent(id) { + memberFromObject(pkg, pkg.info.Defs[id], decl) + } + } +} + +// CreatePackage constructs and returns an IR Package from the +// specified type-checked, error-free file ASTs, and populates its +// Members mapping. +// +// importable determines whether this package should be returned by a +// subsequent call to ImportedPackage(pkg.Path()). +// +// The real work of building IR form for each function is not done +// until a subsequent call to Package.Build(). +// +func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { + p := &Package{ + Prog: prog, + Members: make(map[string]Member), + values: make(map[types.Object]Value), + Pkg: pkg, + info: info, // transient (CREATE and BUILD phases) + files: files, // transient (CREATE and BUILD phases) + printFunc: prog.PrintFunc, + } + + // Add init() function. + p.init = &Function{ + name: "init", + Signature: new(types.Signature), + Synthetic: SyntheticPackageInitializer, + Pkg: p, + Prog: prog, + functionBody: new(functionBody), + } + p.init.initHTML(prog.PrintFunc) + p.Members[p.init.name] = p.init + p.Functions = append(p.Functions, p.init) + + // CREATE phase. + // Allocate all package members: vars, funcs, consts and types. + if len(files) > 0 { + // Go source package. + for _, file := range files { + for _, decl := range file.Decls { + membersFromDecl(p, decl) + } + } + } else { + // GC-compiled binary package (or "unsafe") + // No code. + // No position information. + scope := p.Pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + memberFromObject(p, obj, nil) + if obj, ok := obj.(*types.TypeName); ok { + if named, ok := obj.Type().(*types.Named); ok { + for i, n := 0, named.NumMethods(); i < n; i++ { + memberFromObject(p, named.Method(i), nil) + } + } + } + } + } + + // Add initializer guard variable. + initguard := &Global{ + Pkg: p, + name: "init$guard", + typ: types.NewPointer(tBool), + } + p.Members[initguard.Name()] = initguard + + if prog.mode&GlobalDebug != 0 { + p.SetDebugMode(true) + } + + if prog.mode&PrintPackages != 0 { + printMu.Lock() + p.WriteTo(os.Stdout) + printMu.Unlock() + } + + if importable { + prog.imported[p.Pkg.Path()] = p + } + prog.packages[p.Pkg] = p + + return p +} + +// printMu serializes printing of Packages/Functions to stdout. +var printMu sync.Mutex + +// AllPackages returns a new slice containing all packages in the +// program prog in unspecified order. +// +func (prog *Program) AllPackages() []*Package { + pkgs := make([]*Package, 0, len(prog.packages)) + for _, pkg := range prog.packages { + pkgs = append(pkgs, pkg) + } + return pkgs +} + +// ImportedPackage returns the importable Package whose PkgPath +// is path, or nil if no such Package has been created. +// +// A parameter to CreatePackage determines whether a package should be +// considered importable. For example, no import declaration can resolve +// to the ad-hoc main package created by 'go build foo.go'. +// +// TODO(adonovan): rethink this function and the "importable" concept; +// most packages are importable. This function assumes that all +// types.Package.Path values are unique within the ir.Program, which is +// false---yet this function remains very convenient. +// Clients should use (*Program).Package instead where possible. +// IR doesn't really need a string-keyed map of packages. +// +func (prog *Program) ImportedPackage(path string) *Package { + return prog.imported[path] +} diff --git a/vendor/honnef.co/go/tools/go/ir/doc.go b/vendor/honnef.co/go/tools/go/ir/doc.go new file mode 100644 index 000000000..0765d439e --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/doc.go @@ -0,0 +1,129 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package ir defines a representation of the elements of Go programs +// (packages, types, functions, variables and constants) using a +// static single-information (SSI) form intermediate representation +// (IR) for the bodies of functions. +// +// THIS INTERFACE IS EXPERIMENTAL AND IS LIKELY TO CHANGE. +// +// For an introduction to SSA form, upon which SSI builds, see +// http://en.wikipedia.org/wiki/Static_single_assignment_form. +// This page provides a broader reading list: +// http://www.dcs.gla.ac.uk/~jsinger/ssa.html. +// +// For an introduction to SSI form, see The static single information +// form by C. Scott Ananian. +// +// The level of abstraction of the IR form is intentionally close to +// the source language to facilitate construction of source analysis +// tools. It is not intended for machine code generation. +// +// The simplest way to create the IR of a package is +// to load typed syntax trees using golang.org/x/tools/go/packages, then +// invoke the irutil.Packages helper function. See ExampleLoadPackages +// and ExampleWholeProgram for examples. +// The resulting ir.Program contains all the packages and their +// members, but IR code is not created for function bodies until a +// subsequent call to (*Package).Build or (*Program).Build. +// +// The builder initially builds a naive IR form in which all local +// variables are addresses of stack locations with explicit loads and +// stores. Registerization of eligible locals and φ-node insertion +// using dominance and dataflow are then performed as a second pass +// called "lifting" to improve the accuracy and performance of +// subsequent analyses; this pass can be skipped by setting the +// NaiveForm builder flag. +// +// The primary interfaces of this package are: +// +// - Member: a named member of a Go package. +// - Value: an expression that yields a value. +// - Instruction: a statement that consumes values and performs computation. +// - Node: a Value or Instruction (emphasizing its membership in the IR value graph) +// +// A computation that yields a result implements both the Value and +// Instruction interfaces. The following table shows for each +// concrete type which of these interfaces it implements. +// +// Value? Instruction? Member? +// *Alloc ✔ ✔ +// *BinOp ✔ ✔ +// *BlankStore ✔ +// *Builtin ✔ +// *Call ✔ ✔ +// *ChangeInterface ✔ ✔ +// *ChangeType ✔ ✔ +// *Const ✔ ✔ +// *Convert ✔ ✔ +// *DebugRef ✔ +// *Defer ✔ ✔ +// *Extract ✔ ✔ +// *Field ✔ ✔ +// *FieldAddr ✔ ✔ +// *FreeVar ✔ +// *Function ✔ ✔ (func) +// *Global ✔ ✔ (var) +// *Go ✔ ✔ +// *If ✔ +// *Index ✔ ✔ +// *IndexAddr ✔ ✔ +// *Jump ✔ +// *Load ✔ ✔ +// *MakeChan ✔ ✔ +// *MakeClosure ✔ ✔ +// *MakeInterface ✔ ✔ +// *MakeMap ✔ ✔ +// *MakeSlice ✔ ✔ +// *MapLookup ✔ ✔ +// *MapUpdate ✔ ✔ +// *NamedConst ✔ (const) +// *Next ✔ ✔ +// *Panic ✔ +// *Parameter ✔ ✔ +// *Phi ✔ ✔ +// *Range ✔ ✔ +// *Recv ✔ ✔ +// *Return ✔ +// *RunDefers ✔ +// *Select ✔ ✔ +// *Send ✔ ✔ +// *Sigma ✔ ✔ +// *Slice ✔ ✔ +// *Store ✔ ✔ +// *StringLookup ✔ ✔ +// *Type ✔ (type) +// *TypeAssert ✔ ✔ +// *UnOp ✔ ✔ +// *Unreachable ✔ +// +// Other key types in this package include: Program, Package, Function +// and BasicBlock. +// +// The program representation constructed by this package is fully +// resolved internally, i.e. it does not rely on the names of Values, +// Packages, Functions, Types or BasicBlocks for the correct +// interpretation of the program. Only the identities of objects and +// the topology of the IR and type graphs are semantically +// significant. (There is one exception: Ids, used to identify field +// and method names, contain strings.) Avoidance of name-based +// operations simplifies the implementation of subsequent passes and +// can make them very efficient. Many objects are nonetheless named +// to aid in debugging, but it is not essential that the names be +// either accurate or unambiguous. The public API exposes a number of +// name-based maps for client convenience. +// +// The ir/irutil package provides various utilities that depend only +// on the public API of this package. +// +// TODO(adonovan): Consider the exceptional control-flow implications +// of defer and recover(). +// +// TODO(adonovan): write a how-to document for all the various cases +// of trying to determine corresponding elements across the four +// domains of source locations, ast.Nodes, types.Objects, +// ir.Values/Instructions. +// +package ir diff --git a/vendor/honnef.co/go/tools/go/ir/dom.go b/vendor/honnef.co/go/tools/go/ir/dom.go new file mode 100644 index 000000000..13ecd47cb --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/dom.go @@ -0,0 +1,469 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file defines algorithms related to dominance. + +// Dominator tree construction ---------------------------------------- +// +// We use the algorithm described in Lengauer & Tarjan. 1979. A fast +// algorithm for finding dominators in a flowgraph. +// http://doi.acm.org/10.1145/357062.357071 +// +// We also apply the optimizations to SLT described in Georgiadis et +// al, Finding Dominators in Practice, JGAA 2006, +// http://jgaa.info/accepted/2006/GeorgiadisTarjanWerneck2006.10.1.pdf +// to avoid the need for buckets of size > 1. + +import ( + "bytes" + "fmt" + "io" + "math/big" + "os" + "sort" +) + +// Idom returns the block that immediately dominates b: +// its parent in the dominator tree, if any. +// The entry node (b.Index==0) does not have a parent. +// +func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom } + +// Dominees returns the list of blocks that b immediately dominates: +// its children in the dominator tree. +// +func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children } + +// Dominates reports whether b dominates c. +func (b *BasicBlock) Dominates(c *BasicBlock) bool { + return b.dom.pre <= c.dom.pre && c.dom.post <= b.dom.post +} + +type byDomPreorder []*BasicBlock + +func (a byDomPreorder) Len() int { return len(a) } +func (a byDomPreorder) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre } + +// DomPreorder returns a new slice containing the blocks of f in +// dominator tree preorder. +// +func (f *Function) DomPreorder() []*BasicBlock { + n := len(f.Blocks) + order := make(byDomPreorder, n) + copy(order, f.Blocks) + sort.Sort(order) + return order +} + +// domInfo contains a BasicBlock's dominance information. +type domInfo struct { + idom *BasicBlock // immediate dominator (parent in domtree) + children []*BasicBlock // nodes immediately dominated by this one + pre, post int32 // pre- and post-order numbering within domtree +} + +// buildDomTree computes the dominator tree of f using the LT algorithm. +// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run). +// +func buildDomTree(fn *Function) { + // The step numbers refer to the original LT paper; the + // reordering is due to Georgiadis. + + // Clear any previous domInfo. + for _, b := range fn.Blocks { + b.dom = domInfo{} + } + + idoms := make([]*BasicBlock, len(fn.Blocks)) + + order := make([]*BasicBlock, 0, len(fn.Blocks)) + seen := fn.blockset(0) + var dfs func(b *BasicBlock) + dfs = func(b *BasicBlock) { + if !seen.Add(b) { + return + } + for _, succ := range b.Succs { + dfs(succ) + } + if fn.fakeExits.Has(b) { + dfs(fn.Exit) + } + order = append(order, b) + b.post = len(order) - 1 + } + dfs(fn.Blocks[0]) + + for i := 0; i < len(order)/2; i++ { + o := len(order) - i - 1 + order[i], order[o] = order[o], order[i] + } + + idoms[fn.Blocks[0].Index] = fn.Blocks[0] + changed := true + for changed { + changed = false + // iterate over all nodes in reverse postorder, except for the + // entry node + for _, b := range order[1:] { + var newIdom *BasicBlock + do := func(p *BasicBlock) { + if idoms[p.Index] == nil { + return + } + if newIdom == nil { + newIdom = p + } else { + finger1 := p + finger2 := newIdom + for finger1 != finger2 { + for finger1.post < finger2.post { + finger1 = idoms[finger1.Index] + } + for finger2.post < finger1.post { + finger2 = idoms[finger2.Index] + } + } + newIdom = finger1 + } + } + for _, p := range b.Preds { + do(p) + } + if b == fn.Exit { + for _, p := range fn.Blocks { + if fn.fakeExits.Has(p) { + do(p) + } + } + } + + if idoms[b.Index] != newIdom { + idoms[b.Index] = newIdom + changed = true + } + } + } + + for i, b := range idoms { + fn.Blocks[i].dom.idom = b + if b == nil { + // malformed CFG + continue + } + if i == b.Index { + continue + } + b.dom.children = append(b.dom.children, fn.Blocks[i]) + } + + numberDomTree(fn.Blocks[0], 0, 0) + + // printDomTreeDot(os.Stderr, fn) // debugging + // printDomTreeText(os.Stderr, root, 0) // debugging + + if fn.Prog.mode&SanityCheckFunctions != 0 { + sanityCheckDomTree(fn) + } +} + +// buildPostDomTree is like buildDomTree, but builds the post-dominator tree instead. +func buildPostDomTree(fn *Function) { + // The step numbers refer to the original LT paper; the + // reordering is due to Georgiadis. + + // Clear any previous domInfo. + for _, b := range fn.Blocks { + b.pdom = domInfo{} + } + + idoms := make([]*BasicBlock, len(fn.Blocks)) + + order := make([]*BasicBlock, 0, len(fn.Blocks)) + seen := fn.blockset(0) + var dfs func(b *BasicBlock) + dfs = func(b *BasicBlock) { + if !seen.Add(b) { + return + } + for _, pred := range b.Preds { + dfs(pred) + } + if b == fn.Exit { + for _, p := range fn.Blocks { + if fn.fakeExits.Has(p) { + dfs(p) + } + } + } + order = append(order, b) + b.post = len(order) - 1 + } + dfs(fn.Exit) + + for i := 0; i < len(order)/2; i++ { + o := len(order) - i - 1 + order[i], order[o] = order[o], order[i] + } + + idoms[fn.Exit.Index] = fn.Exit + changed := true + for changed { + changed = false + // iterate over all nodes in reverse postorder, except for the + // exit node + for _, b := range order[1:] { + var newIdom *BasicBlock + do := func(p *BasicBlock) { + if idoms[p.Index] == nil { + return + } + if newIdom == nil { + newIdom = p + } else { + finger1 := p + finger2 := newIdom + for finger1 != finger2 { + for finger1.post < finger2.post { + finger1 = idoms[finger1.Index] + } + for finger2.post < finger1.post { + finger2 = idoms[finger2.Index] + } + } + newIdom = finger1 + } + } + for _, p := range b.Succs { + do(p) + } + if fn.fakeExits.Has(b) { + do(fn.Exit) + } + + if idoms[b.Index] != newIdom { + idoms[b.Index] = newIdom + changed = true + } + } + } + + for i, b := range idoms { + fn.Blocks[i].pdom.idom = b + if b == nil { + // malformed CFG + continue + } + if i == b.Index { + continue + } + b.pdom.children = append(b.pdom.children, fn.Blocks[i]) + } + + numberPostDomTree(fn.Exit, 0, 0) + + // printPostDomTreeDot(os.Stderr, fn) // debugging + // printPostDomTreeText(os.Stderr, fn.Exit, 0) // debugging + + if fn.Prog.mode&SanityCheckFunctions != 0 { // XXX + sanityCheckDomTree(fn) // XXX + } +} + +// numberDomTree sets the pre- and post-order numbers of a depth-first +// traversal of the dominator tree rooted at v. These are used to +// answer dominance queries in constant time. +// +func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) { + v.dom.pre = pre + pre++ + for _, child := range v.dom.children { + pre, post = numberDomTree(child, pre, post) + } + v.dom.post = post + post++ + return pre, post +} + +// numberPostDomTree sets the pre- and post-order numbers of a depth-first +// traversal of the post-dominator tree rooted at v. These are used to +// answer post-dominance queries in constant time. +// +func numberPostDomTree(v *BasicBlock, pre, post int32) (int32, int32) { + v.pdom.pre = pre + pre++ + for _, child := range v.pdom.children { + pre, post = numberPostDomTree(child, pre, post) + } + v.pdom.post = post + post++ + return pre, post +} + +// Testing utilities ---------------------------------------- + +// sanityCheckDomTree checks the correctness of the dominator tree +// computed by the LT algorithm by comparing against the dominance +// relation computed by a naive Kildall-style forward dataflow +// analysis (Algorithm 10.16 from the "Dragon" book). +// +func sanityCheckDomTree(f *Function) { + n := len(f.Blocks) + + // D[i] is the set of blocks that dominate f.Blocks[i], + // represented as a bit-set of block indices. + D := make([]big.Int, n) + + one := big.NewInt(1) + + // all is the set of all blocks; constant. + var all big.Int + all.Set(one).Lsh(&all, uint(n)).Sub(&all, one) + + // Initialization. + for i := range f.Blocks { + if i == 0 { + // A root is dominated only by itself. + D[i].SetBit(&D[0], 0, 1) + } else { + // All other blocks are (initially) dominated + // by every block. + D[i].Set(&all) + } + } + + // Iteration until fixed point. + for changed := true; changed; { + changed = false + for i, b := range f.Blocks { + if i == 0 { + continue + } + // Compute intersection across predecessors. + var x big.Int + x.Set(&all) + for _, pred := range b.Preds { + x.And(&x, &D[pred.Index]) + } + if b == f.Exit { + for _, p := range f.Blocks { + if f.fakeExits.Has(p) { + x.And(&x, &D[p.Index]) + } + } + } + x.SetBit(&x, i, 1) // a block always dominates itself. + if D[i].Cmp(&x) != 0 { + D[i].Set(&x) + changed = true + } + } + } + + // Check the entire relation. O(n^2). + ok := true + for i := 0; i < n; i++ { + for j := 0; j < n; j++ { + b, c := f.Blocks[i], f.Blocks[j] + actual := b.Dominates(c) + expected := D[j].Bit(i) == 1 + if actual != expected { + fmt.Fprintf(os.Stderr, "dominates(%s, %s)==%t, want %t\n", b, c, actual, expected) + ok = false + } + } + } + + preorder := f.DomPreorder() + for _, b := range f.Blocks { + if got := preorder[b.dom.pre]; got != b { + fmt.Fprintf(os.Stderr, "preorder[%d]==%s, want %s\n", b.dom.pre, got, b) + ok = false + } + } + + if !ok { + panic("sanityCheckDomTree failed for " + f.String()) + } + +} + +// Printing functions ---------------------------------------- + +// printDomTree prints the dominator tree as text, using indentation. +//lint:ignore U1000 used during debugging +func printDomTreeText(buf *bytes.Buffer, v *BasicBlock, indent int) { + fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) + for _, child := range v.dom.children { + printDomTreeText(buf, child, indent+1) + } +} + +// printDomTreeDot prints the dominator tree of f in AT&T GraphViz +// (.dot) format. +//lint:ignore U1000 used during debugging +func printDomTreeDot(buf io.Writer, f *Function) { + fmt.Fprintln(buf, "//", f) + fmt.Fprintln(buf, "digraph domtree {") + for i, b := range f.Blocks { + v := b.dom + fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) + // TODO(adonovan): improve appearance of edges + // belonging to both dominator tree and CFG. + + // Dominator tree edge. + if i != 0 { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.dom.pre, v.pre) + } + // CFG edges. + for _, pred := range b.Preds { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.dom.pre, v.pre) + } + + if f.fakeExits.Has(b) { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0,color=red];\n", b.dom.pre, f.Exit.dom.pre) + } + } + fmt.Fprintln(buf, "}") +} + +// printDomTree prints the dominator tree as text, using indentation. +//lint:ignore U1000 used during debugging +func printPostDomTreeText(buf io.Writer, v *BasicBlock, indent int) { + fmt.Fprintf(buf, "%*s%s\n", 4*indent, "", v) + for _, child := range v.pdom.children { + printPostDomTreeText(buf, child, indent+1) + } +} + +// printDomTreeDot prints the dominator tree of f in AT&T GraphViz +// (.dot) format. +//lint:ignore U1000 used during debugging +func printPostDomTreeDot(buf io.Writer, f *Function) { + fmt.Fprintln(buf, "//", f) + fmt.Fprintln(buf, "digraph pdomtree {") + for _, b := range f.Blocks { + v := b.pdom + fmt.Fprintf(buf, "\tn%d [label=\"%s (%d, %d)\",shape=\"rectangle\"];\n", v.pre, b, v.pre, v.post) + // TODO(adonovan): improve appearance of edges + // belonging to both dominator tree and CFG. + + // Dominator tree edge. + if b != f.Exit { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"solid\",weight=100];\n", v.idom.pdom.pre, v.pre) + } + // CFG edges. + for _, pred := range b.Preds { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0];\n", pred.pdom.pre, v.pre) + } + + if f.fakeExits.Has(b) { + fmt.Fprintf(buf, "\tn%d -> n%d [style=\"dotted\",weight=0,color=red];\n", b.dom.pre, f.Exit.dom.pre) + } + } + fmt.Fprintln(buf, "}") +} diff --git a/vendor/honnef.co/go/tools/go/ir/emit.go b/vendor/honnef.co/go/tools/go/ir/emit.go new file mode 100644 index 000000000..f7629646a --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/emit.go @@ -0,0 +1,461 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// Helpers for emitting IR instructions. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" +) + +// emitNew emits to f a new (heap Alloc) instruction allocating an +// object of type typ. pos is the optional source location. +// +func emitNew(f *Function, typ types.Type, source ast.Node) *Alloc { + v := &Alloc{Heap: true} + v.setType(types.NewPointer(typ)) + f.emit(v, source) + return v +} + +// emitLoad emits to f an instruction to load the address addr into a +// new temporary, and returns the value so defined. +// +func emitLoad(f *Function, addr Value, source ast.Node) *Load { + v := &Load{X: addr} + v.setType(deref(addr.Type())) + f.emit(v, source) + return v +} + +func emitRecv(f *Function, ch Value, commaOk bool, typ types.Type, source ast.Node) Value { + recv := &Recv{ + Chan: ch, + CommaOk: commaOk, + } + recv.setType(typ) + return f.emit(recv, source) +} + +// emitDebugRef emits to f a DebugRef pseudo-instruction associating +// expression e with value v. +// +func emitDebugRef(f *Function, e ast.Expr, v Value, isAddr bool) { + if !f.debugInfo() { + return // debugging not enabled + } + if v == nil || e == nil { + panic("nil") + } + var obj types.Object + e = unparen(e) + if id, ok := e.(*ast.Ident); ok { + if isBlankIdent(id) { + return + } + obj = f.Pkg.objectOf(id) + switch obj.(type) { + case *types.Nil, *types.Const, *types.Builtin: + return + } + } + f.emit(&DebugRef{ + X: v, + Expr: e, + IsAddr: isAddr, + object: obj, + }, nil) +} + +// emitArith emits to f code to compute the binary operation op(x, y) +// where op is an eager shift, logical or arithmetic operation. +// (Use emitCompare() for comparisons and Builder.logicalBinop() for +// non-eager operations.) +// +func emitArith(f *Function, op token.Token, x, y Value, t types.Type, source ast.Node) Value { + switch op { + case token.SHL, token.SHR: + x = emitConv(f, x, t, source) + // y may be signed or an 'untyped' constant. + // TODO(adonovan): whence signed values? + if b, ok := y.Type().Underlying().(*types.Basic); ok && b.Info()&types.IsUnsigned == 0 { + y = emitConv(f, y, types.Typ[types.Uint64], source) + } + + case token.ADD, token.SUB, token.MUL, token.QUO, token.REM, token.AND, token.OR, token.XOR, token.AND_NOT: + x = emitConv(f, x, t, source) + y = emitConv(f, y, t, source) + + default: + panic("illegal op in emitArith: " + op.String()) + + } + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setType(t) + return f.emit(v, source) +} + +// emitCompare emits to f code compute the boolean result of +// comparison comparison 'x op y'. +// +func emitCompare(f *Function, op token.Token, x, y Value, source ast.Node) Value { + xt := x.Type().Underlying() + yt := y.Type().Underlying() + + // Special case to optimise a tagless SwitchStmt so that + // these are equivalent + // switch { case e: ...} + // switch true { case e: ... } + // if e==true { ... } + // even in the case when e's type is an interface. + // TODO(adonovan): opt: generalise to x==true, false!=y, etc. + if x, ok := x.(*Const); ok && op == token.EQL && x.Value != nil && x.Value.Kind() == constant.Bool && constant.BoolVal(x.Value) { + if yt, ok := yt.(*types.Basic); ok && yt.Info()&types.IsBoolean != 0 { + return y + } + } + + if types.Identical(xt, yt) { + // no conversion necessary + } else if _, ok := xt.(*types.Interface); ok { + y = emitConv(f, y, x.Type(), source) + } else if _, ok := yt.(*types.Interface); ok { + x = emitConv(f, x, y.Type(), source) + } else if _, ok := x.(*Const); ok { + x = emitConv(f, x, y.Type(), source) + } else if _, ok := y.(*Const); ok { + y = emitConv(f, y, x.Type(), source) + //lint:ignore SA9003 no-op + } else { + // other cases, e.g. channels. No-op. + } + + v := &BinOp{ + Op: op, + X: x, + Y: y, + } + v.setType(tBool) + return f.emit(v, source) +} + +// isValuePreserving returns true if a conversion from ut_src to +// ut_dst is value-preserving, i.e. just a change of type. +// Precondition: neither argument is a named type. +// +func isValuePreserving(ut_src, ut_dst types.Type) bool { + // Identical underlying types? + if structTypesIdentical(ut_dst, ut_src) { + return true + } + + switch ut_dst.(type) { + case *types.Chan: + // Conversion between channel types? + _, ok := ut_src.(*types.Chan) + return ok + + case *types.Pointer: + // Conversion between pointers with identical base types? + _, ok := ut_src.(*types.Pointer) + return ok + } + return false +} + +// emitConv emits to f code to convert Value val to exactly type typ, +// and returns the converted value. Implicit conversions are required +// by language assignability rules in assignments, parameter passing, +// etc. +// +func emitConv(f *Function, val Value, typ types.Type, source ast.Node) Value { + t_src := val.Type() + + // Identical types? Conversion is a no-op. + if types.Identical(t_src, typ) { + return val + } + + ut_dst := typ.Underlying() + ut_src := t_src.Underlying() + + // Just a change of type, but not value or representation? + if isValuePreserving(ut_src, ut_dst) { + c := &ChangeType{X: val} + c.setType(typ) + return f.emit(c, source) + } + + // Conversion to, or construction of a value of, an interface type? + if _, ok := ut_dst.(*types.Interface); ok { + // Assignment from one interface type to another? + if _, ok := ut_src.(*types.Interface); ok { + c := &ChangeInterface{X: val} + c.setType(typ) + return f.emit(c, source) + } + + // Untyped nil constant? Return interface-typed nil constant. + if ut_src == tUntypedNil { + return emitConst(f, nilConst(typ)) + } + + // Convert (non-nil) "untyped" literals to their default type. + if t, ok := ut_src.(*types.Basic); ok && t.Info()&types.IsUntyped != 0 { + val = emitConv(f, val, types.Default(ut_src), source) + } + + f.Pkg.Prog.needMethodsOf(val.Type()) + mi := &MakeInterface{X: val} + mi.setType(typ) + return f.emit(mi, source) + } + + // Conversion of a compile-time constant value? + if c, ok := val.(*Const); ok { + if _, ok := ut_dst.(*types.Basic); ok || c.IsNil() { + // Conversion of a compile-time constant to + // another constant type results in a new + // constant of the destination type and + // (initially) the same abstract value. + // We don't truncate the value yet. + return emitConst(f, NewConst(c.Value, typ)) + } + + // We're converting from constant to non-constant type, + // e.g. string -> []byte/[]rune. + } + + // Conversion from slice to array pointer? + if slice, ok := ut_src.(*types.Slice); ok { + if ptr, ok := ut_dst.(*types.Pointer); ok { + if arr, ok := ptr.Elem().(*types.Array); ok && types.Identical(slice.Elem(), arr.Elem()) { + c := &Convert{X: val} + c.setType(ut_dst) + return f.emit(c, source) + } + } + } + + // A representation-changing conversion? + // At least one of {ut_src,ut_dst} must be *Basic. + // (The other may be []byte or []rune.) + _, ok1 := ut_src.(*types.Basic) + _, ok2 := ut_dst.(*types.Basic) + if ok1 || ok2 { + c := &Convert{X: val} + c.setType(typ) + return f.emit(c, source) + } + + panic(fmt.Sprintf("in %s: cannot convert %s (%s) to %s", f, val, val.Type(), typ)) +} + +// emitStore emits to f an instruction to store value val at location +// addr, applying implicit conversions as required by assignability rules. +// +func emitStore(f *Function, addr, val Value, source ast.Node) *Store { + s := &Store{ + Addr: addr, + Val: emitConv(f, val, deref(addr.Type()), source), + } + // make sure we call getMem after the call to emitConv, which may + // itself update the memory state + f.emit(s, source) + return s +} + +// emitJump emits to f a jump to target, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +// +func emitJump(f *Function, target *BasicBlock, source ast.Node) *Jump { + b := f.currentBlock + j := new(Jump) + b.emit(j, source) + addEdge(b, target) + f.currentBlock = nil + return j +} + +// emitIf emits to f a conditional jump to tblock or fblock based on +// cond, and updates the control-flow graph. +// Postcondition: f.currentBlock is nil. +// +func emitIf(f *Function, cond Value, tblock, fblock *BasicBlock, source ast.Node) *If { + b := f.currentBlock + stmt := &If{Cond: cond} + b.emit(stmt, source) + addEdge(b, tblock) + addEdge(b, fblock) + f.currentBlock = nil + return stmt +} + +// emitExtract emits to f an instruction to extract the index'th +// component of tuple. It returns the extracted value. +// +func emitExtract(f *Function, tuple Value, index int, source ast.Node) Value { + e := &Extract{Tuple: tuple, Index: index} + e.setType(tuple.Type().(*types.Tuple).At(index).Type()) + return f.emit(e, source) +} + +// emitTypeAssert emits to f a type assertion value := x.(t) and +// returns the value. x.Type() must be an interface. +// +func emitTypeAssert(f *Function, x Value, t types.Type, source ast.Node) Value { + a := &TypeAssert{X: x, AssertedType: t} + a.setType(t) + return f.emit(a, source) +} + +// emitTypeTest emits to f a type test value,ok := x.(t) and returns +// a (value, ok) tuple. x.Type() must be an interface. +// +func emitTypeTest(f *Function, x Value, t types.Type, source ast.Node) Value { + a := &TypeAssert{ + X: x, + AssertedType: t, + CommaOk: true, + } + a.setType(types.NewTuple( + newVar("value", t), + varOk, + )) + return f.emit(a, source) +} + +// emitTailCall emits to f a function call in tail position. The +// caller is responsible for all fields of 'call' except its type. +// Intended for wrapper methods. +// Precondition: f does/will not use deferred procedure calls. +// Postcondition: f.currentBlock is nil. +// +func emitTailCall(f *Function, call *Call, source ast.Node) { + tresults := f.Signature.Results() + nr := tresults.Len() + if nr == 1 { + call.typ = tresults.At(0).Type() + } else { + call.typ = tresults + } + tuple := f.emit(call, source) + var ret Return + switch nr { + case 0: + // no-op + case 1: + ret.Results = []Value{tuple} + default: + for i := 0; i < nr; i++ { + v := emitExtract(f, tuple, i, source) + // TODO(adonovan): in principle, this is required: + // v = emitConv(f, o.Type, f.Signature.Results[i].Type) + // but in practice emitTailCall is only used when + // the types exactly match. + ret.Results = append(ret.Results, v) + } + } + + f.Exit = f.newBasicBlock("exit") + emitJump(f, f.Exit, source) + f.currentBlock = f.Exit + f.emit(&ret, source) + f.currentBlock = nil +} + +// emitImplicitSelections emits to f code to apply the sequence of +// implicit field selections specified by indices to base value v, and +// returns the selected value. +// +// If v is the address of a struct, the result will be the address of +// a field; if it is the value of a struct, the result will be the +// value of a field. +// +func emitImplicitSelections(f *Function, v Value, indices []int, source ast.Node) Value { + for _, index := range indices { + fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + + if isPointer(v.Type()) { + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr, source) + // Load the field's value iff indirectly embedded. + if isPointer(fld.Type()) { + v = emitLoad(f, v, source) + } + } else { + instr := &Field{ + X: v, + Field: index, + } + instr.setType(fld.Type()) + v = f.emit(instr, source) + } + } + return v +} + +// emitFieldSelection emits to f code to select the index'th field of v. +// +// If wantAddr, the input must be a pointer-to-struct and the result +// will be the field's address; otherwise the result will be the +// field's value. +// Ident id is used for position and debug info. +// +func emitFieldSelection(f *Function, v Value, index int, wantAddr bool, id *ast.Ident) Value { + fld := deref(v.Type()).Underlying().(*types.Struct).Field(index) + if isPointer(v.Type()) { + instr := &FieldAddr{ + X: v, + Field: index, + } + instr.setSource(id) + instr.setType(types.NewPointer(fld.Type())) + v = f.emit(instr, id) + // Load the field's value iff we don't want its address. + if !wantAddr { + v = emitLoad(f, v, id) + } + } else { + instr := &Field{ + X: v, + Field: index, + } + instr.setSource(id) + instr.setType(fld.Type()) + v = f.emit(instr, id) + } + emitDebugRef(f, id, v, wantAddr) + return v +} + +// zeroValue emits to f code to produce a zero value of type t, +// and returns it. +// +func zeroValue(f *Function, t types.Type, source ast.Node) Value { + switch t.Underlying().(type) { + case *types.Struct, *types.Array: + return emitLoad(f, f.addLocal(t, source), source) + default: + return emitConst(f, zeroConst(t)) + } +} + +func emitConst(f *Function, c *Const) *Const { + f.consts = append(f.consts, c) + return c +} diff --git a/vendor/honnef.co/go/tools/go/ir/exits.go b/vendor/honnef.co/go/tools/go/ir/exits.go new file mode 100644 index 000000000..0abf58089 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/exits.go @@ -0,0 +1,317 @@ +package ir + +import ( + "go/types" +) + +func (b *builder) buildExits(fn *Function) { + if obj := fn.Object(); obj != nil { + switch obj.Pkg().Path() { + case "runtime": + switch obj.Name() { + case "exit": + fn.NoReturn = AlwaysExits + return + case "throw": + fn.NoReturn = AlwaysExits + return + case "Goexit": + fn.NoReturn = AlwaysUnwinds + return + } + case "github.com/sirupsen/logrus": + switch obj.(*types.Func).FullName() { + case "(*github.com/sirupsen/logrus.Logger).Exit": + // Technically, this method does not unconditionally exit + // the process. It dynamically calls a function stored in + // the logger. If the function is nil, it defaults to + // os.Exit. + // + // The main intent of this method is to terminate the + // process, and that's what the vast majority of people + // will use it for. We'll happily accept some false + // negatives to avoid a lot of false positives. + fn.NoReturn = AlwaysExits + return + case "(*github.com/sirupsen/logrus.Logger).Panic", + "(*github.com/sirupsen/logrus.Logger).Panicf", + "(*github.com/sirupsen/logrus.Logger).Panicln": + + // These methods will always panic, but that's not + // statically known from the code alone, because they + // take a detour through the generic Log methods. + fn.NoReturn = AlwaysUnwinds + return + case "(*github.com/sirupsen/logrus.Entry).Panicf", + "(*github.com/sirupsen/logrus.Entry).Panicln": + + // Entry.Panic has an explicit panic, but Panicf and + // Panicln do not, relying fully on the generic Log + // method. + fn.NoReturn = AlwaysUnwinds + return + case "(*github.com/sirupsen/logrus.Logger).Log", + "(*github.com/sirupsen/logrus.Logger).Logf", + "(*github.com/sirupsen/logrus.Logger).Logln": + // TODO(dh): we cannot handle these cases. Whether they + // exit or unwind depends on the level, which is set + // via the first argument. We don't currently support + // call-site-specific exit information. + } + case "github.com/golang/glog": + switch obj.(*types.Func).FullName() { + case "github.com/golang/glog.Exit", + "github.com/golang/glog.ExitDepth", + "github.com/golang/glog.Exitf", + "github.com/golang/glog.Exitln", + "github.com/golang/glog.Fatal", + "github.com/golang/glog.FatalDepth", + "github.com/golang/glog.Fatalf", + "github.com/golang/glog.Fatalln": + // all of these call os.Exit after logging + fn.NoReturn = AlwaysExits + } + } + } + + isRecoverCall := func(instr Instruction) bool { + if instr, ok := instr.(*Call); ok { + if builtin, ok := instr.Call.Value.(*Builtin); ok { + if builtin.Name() == "recover" { + return true + } + } + } + return false + } + + both := NewBlockSet(len(fn.Blocks)) + exits := NewBlockSet(len(fn.Blocks)) + unwinds := NewBlockSet(len(fn.Blocks)) + recovers := false + for _, u := range fn.Blocks { + for _, instr := range u.Instrs { + instrSwitch: + switch instr := instr.(type) { + case *Defer: + if recovers { + // avoid doing extra work, we already know that this function calls recover + continue + } + call := instr.Call.StaticCallee() + if call == nil { + // not a static call, so we can't be sure the + // deferred call isn't calling recover + recovers = true + break + } + if call.Package() == fn.Package() { + b.buildFunction(call) + } + if len(call.Blocks) == 0 { + // external function, we don't know what's + // happening inside it + // + // TODO(dh): this includes functions from + // imported packages, due to how go/analysis + // works. We could introduce another fact, + // like we've done for exiting and unwinding. + recovers = true + break + } + for _, y := range call.Blocks { + for _, instr2 := range y.Instrs { + if isRecoverCall(instr2) { + recovers = true + break instrSwitch + } + } + } + + case *Panic: + both.Add(u) + unwinds.Add(u) + + case CallInstruction: + switch instr.(type) { + case *Defer, *Call: + default: + continue + } + if instr.Common().IsInvoke() { + // give up + return + } + var call *Function + switch instr.Common().Value.(type) { + case *Function, *MakeClosure: + call = instr.Common().StaticCallee() + case *Builtin: + // the only builtins that affect control flow are + // panic and recover, and we've already handled + // those + continue + default: + // dynamic dispatch + return + } + // buildFunction is idempotent. if we're part of a + // (mutually) recursive call chain, then buildFunction + // will immediately return, and fn.WillExit will be false. + if call.Package() == fn.Package() { + b.buildFunction(call) + } + switch call.NoReturn { + case AlwaysExits: + both.Add(u) + exits.Add(u) + case AlwaysUnwinds: + both.Add(u) + unwinds.Add(u) + case NeverReturns: + both.Add(u) + } + } + } + } + + // depth-first search trying to find a path to the exit block that + // doesn't cross any of the blacklisted blocks + seen := NewBlockSet(len(fn.Blocks)) + var findPath func(root *BasicBlock, bl *BlockSet) bool + findPath = func(root *BasicBlock, bl *BlockSet) bool { + if root == fn.Exit { + return true + } + if seen.Has(root) { + return false + } + if bl.Has(root) { + return false + } + seen.Add(root) + for _, succ := range root.Succs { + if findPath(succ, bl) { + return true + } + } + return false + } + findPathEntry := func(root *BasicBlock, bl *BlockSet) bool { + if bl.Num() == 0 { + return true + } + seen.Clear() + return findPath(root, bl) + } + + if !findPathEntry(fn.Blocks[0], exits) { + fn.NoReturn = AlwaysExits + } else if !recovers { + // Only consider unwinding and "never returns" if we don't + // call recover. If we do call recover, then panics don't + // bubble up the stack. + + // TODO(dh): the position of the defer matters. If we + // unconditionally terminate before we defer a recover, then + // the recover is ineffective. + + if !findPathEntry(fn.Blocks[0], unwinds) { + fn.NoReturn = AlwaysUnwinds + } else if !findPathEntry(fn.Blocks[0], both) { + fn.NoReturn = NeverReturns + } + } +} + +func (b *builder) addUnreachables(fn *Function) { + var unreachable *BasicBlock + + for _, bb := range fn.Blocks { + instrLoop: + for i, instr := range bb.Instrs { + if instr, ok := instr.(*Call); ok { + var call *Function + switch v := instr.Common().Value.(type) { + case *Function: + call = v + case *MakeClosure: + call = v.Fn.(*Function) + } + if call == nil { + continue + } + if call.Package() == fn.Package() { + // make sure we have information on all functions in this package + b.buildFunction(call) + } + switch call.NoReturn { + case AlwaysExits: + // This call will cause the process to terminate. + // Remove remaining instructions in the block and + // replace any control flow with Unreachable. + for _, succ := range bb.Succs { + succ.removePred(bb) + } + bb.Succs = bb.Succs[:0] + + bb.Instrs = bb.Instrs[:i+1] + bb.emit(new(Unreachable), instr.Source()) + addEdge(bb, fn.Exit) + break instrLoop + + case AlwaysUnwinds: + // This call will cause the goroutine to terminate + // and defers to run (i.e. a panic or + // runtime.Goexit). Remove remaining instructions + // in the block and replace any control flow with + // an unconditional jump to the exit block. + for _, succ := range bb.Succs { + succ.removePred(bb) + } + bb.Succs = bb.Succs[:0] + + bb.Instrs = bb.Instrs[:i+1] + bb.emit(new(Jump), instr.Source()) + addEdge(bb, fn.Exit) + break instrLoop + + case NeverReturns: + // This call will either cause the goroutine to + // terminate, or the process to terminate. Remove + // remaining instructions in the block and replace + // any control flow with a conditional jump to + // either the exit block, or Unreachable. + for _, succ := range bb.Succs { + succ.removePred(bb) + } + bb.Succs = bb.Succs[:0] + + bb.Instrs = bb.Instrs[:i+1] + var c Call + c.Call.Value = &Builtin{ + name: "ir:noreturnWasPanic", + sig: types.NewSignature(nil, + types.NewTuple(), + types.NewTuple(anonVar(types.Typ[types.Bool])), + false, + ), + } + c.setType(types.Typ[types.Bool]) + + if unreachable == nil { + unreachable = fn.newBasicBlock("unreachable") + unreachable.emit(&Unreachable{}, nil) + addEdge(unreachable, fn.Exit) + } + + bb.emit(&c, instr.Source()) + bb.emit(&If{Cond: &c}, instr.Source()) + addEdge(bb, fn.Exit) + addEdge(bb, unreachable) + break instrLoop + } + } + } + } +} diff --git a/vendor/honnef.co/go/tools/go/ir/func.go b/vendor/honnef.co/go/tools/go/ir/func.go new file mode 100644 index 000000000..69e381cd6 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/func.go @@ -0,0 +1,983 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file implements the Function and BasicBlock types. + +import ( + "bytes" + "fmt" + "go/ast" + "go/constant" + "go/format" + "go/token" + "go/types" + "io" + "os" + "strings" +) + +// addEdge adds a control-flow graph edge from from to to. +func addEdge(from, to *BasicBlock) { + from.Succs = append(from.Succs, to) + to.Preds = append(to.Preds, from) +} + +// Control returns the last instruction in the block. +func (b *BasicBlock) Control() Instruction { + if len(b.Instrs) == 0 { + return nil + } + return b.Instrs[len(b.Instrs)-1] +} + +// SigmaFor returns the sigma node for v coming from pred. +func (b *BasicBlock) SigmaFor(v Value, pred *BasicBlock) *Sigma { + for _, instr := range b.Instrs { + sigma, ok := instr.(*Sigma) + if !ok { + // no more sigmas + return nil + } + if sigma.From == pred && sigma.X == v { + return sigma + } + } + return nil +} + +// Parent returns the function that contains block b. +func (b *BasicBlock) Parent() *Function { return b.parent } + +// String returns a human-readable label of this block. +// It is not guaranteed unique within the function. +// +func (b *BasicBlock) String() string { + return fmt.Sprintf("%d", b.Index) +} + +// emit appends an instruction to the current basic block. +// If the instruction defines a Value, it is returned. +// +func (b *BasicBlock) emit(i Instruction, source ast.Node) Value { + i.setSource(source) + i.setBlock(b) + b.Instrs = append(b.Instrs, i) + v, _ := i.(Value) + return v +} + +// predIndex returns the i such that b.Preds[i] == c or panics if +// there is none. +func (b *BasicBlock) predIndex(c *BasicBlock) int { + for i, pred := range b.Preds { + if pred == c { + return i + } + } + panic(fmt.Sprintf("no edge %s -> %s", c, b)) +} + +// succIndex returns the i such that b.Succs[i] == c or -1 if there is none. +func (b *BasicBlock) succIndex(c *BasicBlock) int { + for i, succ := range b.Succs { + if succ == c { + return i + } + } + return -1 +} + +// hasPhi returns true if b.Instrs contains φ-nodes. +func (b *BasicBlock) hasPhi() bool { + _, ok := b.Instrs[0].(*Phi) + return ok +} + +func (b *BasicBlock) Phis() []Instruction { + return b.phis() +} + +// phis returns the prefix of b.Instrs containing all the block's φ-nodes. +func (b *BasicBlock) phis() []Instruction { + for i, instr := range b.Instrs { + if _, ok := instr.(*Phi); !ok { + return b.Instrs[:i] + } + } + return nil // unreachable in well-formed blocks +} + +// replacePred replaces all occurrences of p in b's predecessor list with q. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) replacePred(p, q *BasicBlock) { + for i, pred := range b.Preds { + if pred == p { + b.Preds[i] = q + } + } +} + +// replaceSucc replaces all occurrences of p in b's successor list with q. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) replaceSucc(p, q *BasicBlock) { + for i, succ := range b.Succs { + if succ == p { + b.Succs[i] = q + } + } +} + +// removePred removes all occurrences of p in b's +// predecessor list and φ-nodes. +// Ordinarily there should be at most one. +// +func (b *BasicBlock) removePred(p *BasicBlock) { + phis := b.phis() + + // We must preserve edge order for φ-nodes. + j := 0 + for i, pred := range b.Preds { + if pred != p { + b.Preds[j] = b.Preds[i] + // Strike out φ-edge too. + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges[j] = phi.Edges[i] + } + j++ + } + } + // Nil out b.Preds[j:] and φ-edges[j:] to aid GC. + for i := j; i < len(b.Preds); i++ { + b.Preds[i] = nil + for _, instr := range phis { + instr.(*Phi).Edges[i] = nil + } + } + b.Preds = b.Preds[:j] + for _, instr := range phis { + phi := instr.(*Phi) + phi.Edges = phi.Edges[:j] + } +} + +// Destinations associated with unlabelled for/switch/select stmts. +// We push/pop one of these as we enter/leave each construct and for +// each BranchStmt we scan for the innermost target of the right type. +// +type targets struct { + tail *targets // rest of stack + _break *BasicBlock + _continue *BasicBlock + _fallthrough *BasicBlock +} + +// Destinations associated with a labelled block. +// We populate these as labels are encountered in forward gotos or +// labelled statements. +// +type lblock struct { + _goto *BasicBlock + _break *BasicBlock + _continue *BasicBlock +} + +// labelledBlock returns the branch target associated with the +// specified label, creating it if needed. +// +func (f *Function) labelledBlock(label *ast.Ident) *lblock { + lb := f.lblocks[label.Obj] + if lb == nil { + lb = &lblock{_goto: f.newBasicBlock(label.Name)} + if f.lblocks == nil { + f.lblocks = make(map[*ast.Object]*lblock) + } + f.lblocks[label.Obj] = lb + } + return lb +} + +// addParam adds a (non-escaping) parameter to f.Params of the +// specified name, type and source position. +// +func (f *Function) addParam(name string, typ types.Type, source ast.Node) *Parameter { + var b *BasicBlock + if len(f.Blocks) > 0 { + b = f.Blocks[0] + } + v := &Parameter{ + name: name, + } + v.setBlock(b) + v.setType(typ) + v.setSource(source) + f.Params = append(f.Params, v) + if b != nil { + // There may be no blocks if this function has no body. We + // still create params, but aren't interested in the + // instruction. + f.Blocks[0].Instrs = append(f.Blocks[0].Instrs, v) + } + return v +} + +func (f *Function) addParamObj(obj types.Object, source ast.Node) *Parameter { + name := obj.Name() + if name == "" { + name = fmt.Sprintf("arg%d", len(f.Params)) + } + param := f.addParam(name, obj.Type(), source) + param.object = obj + return param +} + +// addSpilledParam declares a parameter that is pre-spilled to the +// stack; the function body will load/store the spilled location. +// Subsequent lifting will eliminate spills where possible. +// +func (f *Function) addSpilledParam(obj types.Object, source ast.Node) { + param := f.addParamObj(obj, source) + spill := &Alloc{} + spill.setType(types.NewPointer(obj.Type())) + spill.source = source + f.objects[obj] = spill + f.Locals = append(f.Locals, spill) + f.emit(spill, source) + emitStore(f, spill, param, source) + // f.emit(&Store{Addr: spill, Val: param}) +} + +// startBody initializes the function prior to generating IR code for its body. +// Precondition: f.Type() already set. +// +func (f *Function) startBody() { + entry := f.newBasicBlock("entry") + f.currentBlock = entry + f.objects = make(map[types.Object]Value) // needed for some synthetics, e.g. init +} + +func (f *Function) blockset(i int) *BlockSet { + bs := &f.blocksets[i] + if len(bs.values) != len(f.Blocks) { + if cap(bs.values) >= len(f.Blocks) { + bs.values = bs.values[:len(f.Blocks)] + bs.Clear() + } else { + bs.values = make([]bool, len(f.Blocks)) + } + } else { + bs.Clear() + } + return bs +} + +func (f *Function) exitBlock() { + old := f.currentBlock + + f.Exit = f.newBasicBlock("exit") + f.currentBlock = f.Exit + + ret := f.results() + results := make([]Value, len(ret)) + // Run function calls deferred in this + // function when explicitly returning from it. + f.emit(new(RunDefers), nil) + for i, r := range ret { + results[i] = emitLoad(f, r, nil) + } + + f.emit(&Return{Results: results}, nil) + f.currentBlock = old +} + +// createSyntacticParams populates f.Params and generates code (spills +// and named result locals) for all the parameters declared in the +// syntax. In addition it populates the f.objects mapping. +// +// Preconditions: +// f.startBody() was called. +// Postcondition: +// len(f.Params) == len(f.Signature.Params) + (f.Signature.Recv() ? 1 : 0) +// +func (f *Function) createSyntacticParams(recv *ast.FieldList, functype *ast.FuncType) { + // Receiver (at most one inner iteration). + if recv != nil { + for _, field := range recv.List { + for _, n := range field.Names { + f.addSpilledParam(f.Pkg.info.Defs[n], n) + } + // Anonymous receiver? No need to spill. + if field.Names == nil { + f.addParamObj(f.Signature.Recv(), field) + } + } + } + + // Parameters. + if functype.Params != nil { + n := len(f.Params) // 1 if has recv, 0 otherwise + for _, field := range functype.Params.List { + for _, n := range field.Names { + f.addSpilledParam(f.Pkg.info.Defs[n], n) + } + // Anonymous parameter? No need to spill. + if field.Names == nil { + f.addParamObj(f.Signature.Params().At(len(f.Params)-n), field) + } + } + } + + // Named results. + if functype.Results != nil { + for _, field := range functype.Results.List { + // Implicit "var" decl of locals for named results. + for _, n := range field.Names { + f.namedResults = append(f.namedResults, f.addLocalForIdent(n)) + } + } + + if len(f.namedResults) == 0 { + sig := f.Signature.Results() + for i := 0; i < sig.Len(); i++ { + // XXX position information + v := f.addLocal(sig.At(i).Type(), nil) + f.implicitResults = append(f.implicitResults, v) + } + } + } +} + +func numberNodes(f *Function) { + var base ID + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + if instr == nil { + continue + } + base++ + instr.setID(base) + } + } +} + +// buildReferrers populates the def/use information in all non-nil +// Value.Referrers slice. +// Precondition: all such slices are initially empty. +func buildReferrers(f *Function) { + var rands []*Value + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + rands = instr.Operands(rands[:0]) // recycle storage + for _, rand := range rands { + if r := *rand; r != nil { + if ref := r.Referrers(); ref != nil { + if len(*ref) == 0 { + // per median, each value has two referrers, so we can avoid one call into growslice + // + // Note: we experimented with allocating + // sequential scratch space, but we + // couldn't find a value that gave better + // performance than making many individual + // allocations + *ref = make([]Instruction, 1, 2) + (*ref)[0] = instr + } else { + *ref = append(*ref, instr) + } + } + } + } + } + } +} + +func (f *Function) emitConsts() { + if len(f.Blocks) == 0 { + f.consts = nil + return + } + + // TODO(dh): our deduplication only works on booleans and + // integers. other constants are represented as pointers to + // things. + if len(f.consts) == 0 { + return + } else if len(f.consts) <= 32 { + f.emitConstsFew() + } else { + f.emitConstsMany() + } +} + +func (f *Function) emitConstsFew() { + dedup := make([]*Const, 0, 32) + for _, c := range f.consts { + if len(*c.Referrers()) == 0 { + continue + } + found := false + for _, d := range dedup { + if c.typ == d.typ && c.Value == d.Value { + replaceAll(c, d) + found = true + break + } + } + if !found { + dedup = append(dedup, c) + } + } + + instrs := make([]Instruction, len(f.Blocks[0].Instrs)+len(dedup)) + for i, c := range dedup { + instrs[i] = c + c.setBlock(f.Blocks[0]) + } + copy(instrs[len(dedup):], f.Blocks[0].Instrs) + f.Blocks[0].Instrs = instrs + f.consts = nil +} + +func (f *Function) emitConstsMany() { + type constKey struct { + typ types.Type + value constant.Value + } + + m := make(map[constKey]Value, len(f.consts)) + areNil := 0 + for i, c := range f.consts { + if len(*c.Referrers()) == 0 { + f.consts[i] = nil + areNil++ + continue + } + + k := constKey{ + typ: c.typ, + value: c.Value, + } + if dup, ok := m[k]; !ok { + m[k] = c + } else { + f.consts[i] = nil + areNil++ + replaceAll(c, dup) + } + } + + instrs := make([]Instruction, len(f.Blocks[0].Instrs)+len(f.consts)-areNil) + i := 0 + for _, c := range f.consts { + if c != nil { + instrs[i] = c + c.setBlock(f.Blocks[0]) + i++ + } + } + copy(instrs[i:], f.Blocks[0].Instrs) + f.Blocks[0].Instrs = instrs + f.consts = nil +} + +// buildFakeExits ensures that every block in the function is +// reachable in reverse from the Exit block. This is required to build +// a full post-dominator tree, and to ensure the exit block's +// inclusion in the dominator tree. +func buildFakeExits(fn *Function) { + // Find back-edges via forward DFS + fn.fakeExits = BlockSet{values: make([]bool, len(fn.Blocks))} + seen := fn.blockset(0) + backEdges := fn.blockset(1) + + var dfs func(b *BasicBlock) + dfs = func(b *BasicBlock) { + if !seen.Add(b) { + backEdges.Add(b) + return + } + for _, pred := range b.Succs { + dfs(pred) + } + } + dfs(fn.Blocks[0]) +buildLoop: + for { + seen := fn.blockset(2) + var dfs func(b *BasicBlock) + dfs = func(b *BasicBlock) { + if !seen.Add(b) { + return + } + for _, pred := range b.Preds { + dfs(pred) + } + if b == fn.Exit { + for _, b := range fn.Blocks { + if fn.fakeExits.Has(b) { + dfs(b) + } + } + } + } + dfs(fn.Exit) + + for _, b := range fn.Blocks { + if !seen.Has(b) && backEdges.Has(b) { + // Block b is not reachable from the exit block. Add a + // fake jump from b to exit, then try again. Note that we + // only add one fake edge at a time, as it may make + // multiple blocks reachable. + // + // We only consider those blocks that have back edges. + // Any unreachable block that doesn't have a back edge + // must flow into a loop, which by definition has a + // back edge. Thus, by looking for loops, we should + // need fewer fake edges overall. + fn.fakeExits.Add(b) + continue buildLoop + } + } + + break + } +} + +// finishBody() finalizes the function after IR code generation of its body. +func (f *Function) finishBody() { + f.objects = nil + f.currentBlock = nil + f.lblocks = nil + + // Remove from f.Locals any Allocs that escape to the heap. + j := 0 + for _, l := range f.Locals { + if !l.Heap { + f.Locals[j] = l + j++ + } + } + // Nil out f.Locals[j:] to aid GC. + for i := j; i < len(f.Locals); i++ { + f.Locals[i] = nil + } + f.Locals = f.Locals[:j] + + optimizeBlocks(f) + buildFakeExits(f) + buildReferrers(f) + buildDomTree(f) + buildPostDomTree(f) + + if f.Prog.mode&NaiveForm == 0 { + lift(f) + } + + // emit constants after lifting, because lifting may produce new constants. + f.emitConsts() + + f.namedResults = nil // (used by lifting) + f.implicitResults = nil + + numberNodes(f) + + defer f.wr.Close() + f.wr.WriteFunc("start", "start", f) + + if f.Prog.mode&PrintFunctions != 0 { + printMu.Lock() + f.WriteTo(os.Stdout) + printMu.Unlock() + } + + if f.Prog.mode&SanityCheckFunctions != 0 { + mustSanityCheck(f, nil) + } +} + +func isUselessPhi(phi *Phi) (Value, bool) { + var v0 Value + for _, e := range phi.Edges { + if e == phi { + continue + } + if v0 == nil { + v0 = e + } + if v0 != e { + if v0, ok := v0.(*Const); ok { + if e, ok := e.(*Const); ok { + if v0.typ == e.typ && v0.Value == e.Value { + continue + } + } + } + return nil, false + } + } + return v0, true +} + +func (f *Function) RemoveNilBlocks() { + f.removeNilBlocks() +} + +// removeNilBlocks eliminates nils from f.Blocks and updates each +// BasicBlock.Index. Use this after any pass that may delete blocks. +// +func (f *Function) removeNilBlocks() { + j := 0 + for _, b := range f.Blocks { + if b != nil { + b.Index = j + f.Blocks[j] = b + j++ + } + } + // Nil out f.Blocks[j:] to aid GC. + for i := j; i < len(f.Blocks); i++ { + f.Blocks[i] = nil + } + f.Blocks = f.Blocks[:j] +} + +// SetDebugMode sets the debug mode for package pkg. If true, all its +// functions will include full debug info. This greatly increases the +// size of the instruction stream, and causes Functions to depend upon +// the ASTs, potentially keeping them live in memory for longer. +// +func (pkg *Package) SetDebugMode(debug bool) { + // TODO(adonovan): do we want ast.File granularity? + pkg.debug = debug +} + +// debugInfo reports whether debug info is wanted for this function. +func (f *Function) debugInfo() bool { + return f.Pkg != nil && f.Pkg.debug +} + +// addNamedLocal creates a local variable, adds it to function f and +// returns it. Its name and type are taken from obj. Subsequent +// calls to f.lookup(obj) will return the same local. +// +func (f *Function) addNamedLocal(obj types.Object, source ast.Node) *Alloc { + l := f.addLocal(obj.Type(), source) + f.objects[obj] = l + return l +} + +func (f *Function) addLocalForIdent(id *ast.Ident) *Alloc { + return f.addNamedLocal(f.Pkg.info.Defs[id], id) +} + +// addLocal creates an anonymous local variable of type typ, adds it +// to function f and returns it. pos is the optional source location. +// +func (f *Function) addLocal(typ types.Type, source ast.Node) *Alloc { + v := &Alloc{} + v.setType(types.NewPointer(typ)) + f.Locals = append(f.Locals, v) + f.emit(v, source) + return v +} + +// lookup returns the address of the named variable identified by obj +// that is local to function f or one of its enclosing functions. +// If escaping, the reference comes from a potentially escaping pointer +// expression and the referent must be heap-allocated. +// +func (f *Function) lookup(obj types.Object, escaping bool) Value { + if v, ok := f.objects[obj]; ok { + if alloc, ok := v.(*Alloc); ok && escaping { + alloc.Heap = true + } + return v // function-local var (address) + } + + // Definition must be in an enclosing function; + // plumb it through intervening closures. + if f.parent == nil { + panic("no ir.Value for " + obj.String()) + } + outer := f.parent.lookup(obj, true) // escaping + v := &FreeVar{ + name: obj.Name(), + typ: outer.Type(), + outer: outer, + parent: f, + } + f.objects[obj] = v + f.FreeVars = append(f.FreeVars, v) + return v +} + +// emit emits the specified instruction to function f. +func (f *Function) emit(instr Instruction, source ast.Node) Value { + return f.currentBlock.emit(instr, source) +} + +// RelString returns the full name of this function, qualified by +// package name, receiver type, etc. +// +// The specific formatting rules are not guaranteed and may change. +// +// Examples: +// "math.IsNaN" // a package-level function +// "(*bytes.Buffer).Bytes" // a declared method or a wrapper +// "(*bytes.Buffer).Bytes$thunk" // thunk (func wrapping method; receiver is param 0) +// "(*bytes.Buffer).Bytes$bound" // bound (func wrapping method; receiver supplied by closure) +// "main.main$1" // an anonymous function in main +// "main.init#1" // a declared init function +// "main.init" // the synthesized package initializer +// +// When these functions are referred to from within the same package +// (i.e. from == f.Pkg.Object), they are rendered without the package path. +// For example: "IsNaN", "(*Buffer).Bytes", etc. +// +// All non-synthetic functions have distinct package-qualified names. +// (But two methods may have the same name "(T).f" if one is a synthetic +// wrapper promoting a non-exported method "f" from another package; in +// that case, the strings are equal but the identifiers "f" are distinct.) +// +func (f *Function) RelString(from *types.Package) string { + // Anonymous? + if f.parent != nil { + // An anonymous function's Name() looks like "parentName$1", + // but its String() should include the type/package/etc. + parent := f.parent.RelString(from) + for i, anon := range f.parent.AnonFuncs { + if anon == f { + return fmt.Sprintf("%s$%d", parent, 1+i) + } + } + + return f.name // should never happen + } + + // Method (declared or wrapper)? + if recv := f.Signature.Recv(); recv != nil { + return f.relMethod(from, recv.Type()) + } + + // Thunk? + if f.method != nil { + return f.relMethod(from, f.method.Recv()) + } + + // Bound? + if len(f.FreeVars) == 1 && strings.HasSuffix(f.name, "$bound") { + return f.relMethod(from, f.FreeVars[0].Type()) + } + + // Package-level function? + // Prefix with package name for cross-package references only. + if p := f.pkg(); p != nil && p != from { + return fmt.Sprintf("%s.%s", p.Path(), f.name) + } + + // Unknown. + return f.name +} + +func (f *Function) relMethod(from *types.Package, recv types.Type) string { + return fmt.Sprintf("(%s).%s", relType(recv, from), f.name) +} + +// writeSignature writes to buf the signature sig in declaration syntax. +func writeSignature(buf *bytes.Buffer, from *types.Package, name string, sig *types.Signature, params []*Parameter) { + buf.WriteString("func ") + if recv := sig.Recv(); recv != nil { + buf.WriteString("(") + if n := params[0].Name(); n != "" { + buf.WriteString(n) + buf.WriteString(" ") + } + types.WriteType(buf, params[0].Type(), types.RelativeTo(from)) + buf.WriteString(") ") + } + buf.WriteString(name) + types.WriteSignature(buf, sig, types.RelativeTo(from)) +} + +func (f *Function) pkg() *types.Package { + if f.Pkg != nil { + return f.Pkg.Pkg + } + return nil +} + +var _ io.WriterTo = (*Function)(nil) // *Function implements io.Writer + +func (f *Function) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WriteFunction(&buf, f) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WriteFunction writes to buf a human-readable "disassembly" of f. +func WriteFunction(buf *bytes.Buffer, f *Function) { + fmt.Fprintf(buf, "# Name: %s\n", f.String()) + if f.Pkg != nil { + fmt.Fprintf(buf, "# Package: %s\n", f.Pkg.Pkg.Path()) + } + if syn := f.Synthetic; syn != 0 { + fmt.Fprintln(buf, "# Synthetic:", syn) + } + if pos := f.Pos(); pos.IsValid() { + fmt.Fprintf(buf, "# Location: %s\n", f.Prog.Fset.Position(pos)) + } + + if f.parent != nil { + fmt.Fprintf(buf, "# Parent: %s\n", f.parent.Name()) + } + + from := f.pkg() + + if f.FreeVars != nil { + buf.WriteString("# Free variables:\n") + for i, fv := range f.FreeVars { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, fv.Name(), relType(fv.Type(), from)) + } + } + + if len(f.Locals) > 0 { + buf.WriteString("# Locals:\n") + for i, l := range f.Locals { + fmt.Fprintf(buf, "# % 3d:\t%s %s\n", i, l.Name(), relType(deref(l.Type()), from)) + } + } + writeSignature(buf, from, f.Name(), f.Signature, f.Params) + buf.WriteString(":\n") + + if f.Blocks == nil { + buf.WriteString("\t(external)\n") + } + + for _, b := range f.Blocks { + if b == nil { + // Corrupt CFG. + fmt.Fprintf(buf, ".nil:\n") + continue + } + fmt.Fprintf(buf, "b%d:", b.Index) + if len(b.Preds) > 0 { + fmt.Fprint(buf, " ←") + for _, pred := range b.Preds { + fmt.Fprintf(buf, " b%d", pred.Index) + } + } + if b.Comment != "" { + fmt.Fprintf(buf, " # %s", b.Comment) + } + buf.WriteByte('\n') + + if false { // CFG debugging + fmt.Fprintf(buf, "\t# CFG: %s --> %s --> %s\n", b.Preds, b, b.Succs) + } + + buf2 := &bytes.Buffer{} + for _, instr := range b.Instrs { + buf.WriteString("\t") + switch v := instr.(type) { + case Value: + // Left-align the instruction. + if name := v.Name(); name != "" { + fmt.Fprintf(buf, "%s = ", name) + } + buf.WriteString(instr.String()) + case nil: + // Be robust against bad transforms. + buf.WriteString("") + default: + buf.WriteString(instr.String()) + } + buf.WriteString("\n") + + if f.Prog.mode&PrintSource != 0 { + if s := instr.Source(); s != nil { + buf2.Reset() + format.Node(buf2, f.Prog.Fset, s) + for { + line, err := buf2.ReadString('\n') + if len(line) == 0 { + break + } + buf.WriteString("\t\t> ") + buf.WriteString(line) + if line[len(line)-1] != '\n' { + buf.WriteString("\n") + } + if err != nil { + break + } + } + } + } + } + buf.WriteString("\n") + } +} + +// newBasicBlock adds to f a new basic block and returns it. It does +// not automatically become the current block for subsequent calls to emit. +// comment is an optional string for more readable debugging output. +// +func (f *Function) newBasicBlock(comment string) *BasicBlock { + var instrs []Instruction + if len(f.functionBody.scratchInstructions) > 0 { + instrs = f.functionBody.scratchInstructions[0:0:avgInstructionsPerBlock] + f.functionBody.scratchInstructions = f.functionBody.scratchInstructions[avgInstructionsPerBlock:] + } else { + instrs = make([]Instruction, 0, avgInstructionsPerBlock) + } + + b := &BasicBlock{ + Index: len(f.Blocks), + Comment: comment, + parent: f, + Instrs: instrs, + } + b.Succs = b.succs2[:0] + f.Blocks = append(f.Blocks, b) + return b +} + +// NewFunction returns a new synthetic Function instance belonging to +// prog, with its name and signature fields set as specified. +// +// The caller is responsible for initializing the remaining fields of +// the function object, e.g. Pkg, Params, Blocks. +// +// It is practically impossible for clients to construct well-formed +// IR functions/packages/programs directly, so we assume this is the +// job of the Builder alone. NewFunction exists to provide clients a +// little flexibility. For example, analysis tools may wish to +// construct fake Functions for the root of the callgraph, a fake +// "reflect" package, etc. +// +// TODO(adonovan): think harder about the API here. +// +func (prog *Program) NewFunction(name string, sig *types.Signature, provenance Synthetic) *Function { + return &Function{Prog: prog, name: name, Signature: sig, Synthetic: provenance} +} + +//lint:ignore U1000 we may make use of this for functions loaded from export data +type extentNode [2]token.Pos + +func (n extentNode) Pos() token.Pos { return n[0] } +func (n extentNode) End() token.Pos { return n[1] } + +func (f *Function) initHTML(name string) { + if name == "" { + return + } + if rel := f.RelString(nil); rel == name { + f.wr = NewHTMLWriter("ir.html", rel, "") + } +} diff --git a/vendor/honnef.co/go/tools/go/ir/html.go b/vendor/honnef.co/go/tools/go/ir/html.go new file mode 100644 index 000000000..35b421a70 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/html.go @@ -0,0 +1,1124 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Copyright 2019 Dominik Honnef. All rights reserved. + +package ir + +import ( + "bytes" + "fmt" + "go/types" + "html" + "io" + "log" + "os" + "os/exec" + "path/filepath" + "reflect" + "sort" + "strings" +) + +func live(f *Function) []bool { + max := 0 + var ops []*Value + + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + if int(instr.ID()) > max { + max = int(instr.ID()) + } + } + } + + out := make([]bool, max+1) + var q []Node + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + switch instr.(type) { + case *BlankStore, *Call, *ConstantSwitch, *Defer, *Go, *If, *Jump, *MapUpdate, *Next, *Panic, *Recv, *Return, *RunDefers, *Send, *Store, *Unreachable: + out[instr.ID()] = true + q = append(q, instr) + } + } + } + + for len(q) > 0 { + v := q[len(q)-1] + q = q[:len(q)-1] + for _, op := range v.Operands(ops) { + if *op == nil { + continue + } + if !out[(*op).ID()] { + out[(*op).ID()] = true + q = append(q, *op) + } + } + } + + return out +} + +type funcPrinter interface { + startBlock(b *BasicBlock, reachable bool) + endBlock(b *BasicBlock) + value(v Node, live bool) + startDepCycle() + endDepCycle() + named(n string, vals []Value) +} + +func namedValues(f *Function) map[types.Object][]Value { + names := map[types.Object][]Value{} + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + if instr, ok := instr.(*DebugRef); ok { + if obj := instr.object; obj != nil { + names[obj] = append(names[obj], instr.X) + } + } + } + } + // XXX deduplicate values + return names +} + +func fprintFunc(p funcPrinter, f *Function) { + // XXX does our IR form preserve unreachable blocks? + // reachable, live := findlive(f) + + l := live(f) + for _, b := range f.Blocks { + // XXX + // p.startBlock(b, reachable[b.Index]) + p.startBlock(b, true) + + end := len(b.Instrs) - 1 + if end < 0 { + end = 0 + } + for _, v := range b.Instrs[:end] { + if _, ok := v.(*DebugRef); !ok { + p.value(v, l[v.ID()]) + } + } + p.endBlock(b) + } + + names := namedValues(f) + keys := make([]types.Object, 0, len(names)) + for key := range names { + keys = append(keys, key) + } + sort.Slice(keys, func(i, j int) bool { + return keys[i].Pos() < keys[j].Pos() + }) + for _, key := range keys { + p.named(key.Name(), names[key]) + } +} + +func opName(v Node) string { + switch v := v.(type) { + case *Call: + if v.Common().IsInvoke() { + return "Invoke" + } + return "Call" + case *Alloc: + if v.Heap { + return "HeapAlloc" + } + return "StackAlloc" + case *Select: + if v.Blocking { + return "SelectBlocking" + } + return "SelectNonBlocking" + default: + return reflect.ValueOf(v).Type().Elem().Name() + } +} + +type HTMLWriter struct { + w io.WriteCloser + path string + dot *dotWriter +} + +func NewHTMLWriter(path string, funcname, cfgMask string) *HTMLWriter { + out, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) + if err != nil { + log.Fatalf("%v", err) + } + pwd, err := os.Getwd() + if err != nil { + log.Fatalf("%v", err) + } + html := HTMLWriter{w: out, path: filepath.Join(pwd, path)} + html.dot = newDotWriter() + html.start(funcname) + return &html +} + +func (w *HTMLWriter) start(name string) { + if w == nil { + return + } + w.WriteString("") + w.WriteString(` + + + + + +`) + w.WriteString("") + w.WriteString("

") + w.WriteString(html.EscapeString(name)) + w.WriteString("

") + w.WriteString(` +help +
+ +

+Click on a value or block to toggle highlighting of that value/block +and its uses. (Values and blocks are highlighted by ID, and IDs of +dead items may be reused, so not all highlights necessarily correspond +to the clicked item.) +

+ +

+Faded out values and blocks are dead code that has not been eliminated. +

+ +

+Values printed in italics have a dependency cycle. +

+ +

+CFG: Dashed edge is for unlikely branches. Blue color is for backward edges. +Edge with a dot means that this edge follows the order in which blocks were laid out. +

+ +
+`) + w.WriteString("") + w.WriteString("") +} + +func (w *HTMLWriter) Close() { + if w == nil { + return + } + io.WriteString(w.w, "") + io.WriteString(w.w, "
") + io.WriteString(w.w, "") + io.WriteString(w.w, "") + w.w.Close() + fmt.Printf("dumped IR to %v\n", w.path) +} + +// WriteFunc writes f in a column headed by title. +// phase is used for collapsing columns and should be unique across the table. +func (w *HTMLWriter) WriteFunc(phase, title string, f *Function) { + if w == nil { + return + } + w.WriteColumn(phase, title, "", funcHTML(f, phase, w.dot)) +} + +// WriteColumn writes raw HTML in a column headed by title. +// It is intended for pre- and post-compilation log output. +func (w *HTMLWriter) WriteColumn(phase, title, class, html string) { + if w == nil { + return + } + id := strings.Replace(phase, " ", "-", -1) + // collapsed column + w.Printf("
%v
", id, phase) + + if class == "" { + w.Printf("", id) + } else { + w.Printf("", id, class) + } + w.WriteString("

" + title + "

") + w.WriteString(html) + w.WriteString("") +} + +func (w *HTMLWriter) Printf(msg string, v ...interface{}) { + if _, err := fmt.Fprintf(w.w, msg, v...); err != nil { + log.Fatalf("%v", err) + } +} + +func (w *HTMLWriter) WriteString(s string) { + if _, err := io.WriteString(w.w, s); err != nil { + log.Fatalf("%v", err) + } +} + +func valueHTML(v Node) string { + if v == nil { + return "<nil>" + } + // TODO: Using the value ID as the class ignores the fact + // that value IDs get recycled and that some values + // are transmuted into other values. + class := fmt.Sprintf("t%d", v.ID()) + var label string + switch v := v.(type) { + case *Function: + label = v.RelString(nil) + case *Builtin: + label = v.Name() + default: + label = class + } + return fmt.Sprintf("%s", class, label) +} + +func valueLongHTML(v Node) string { + // TODO: Any intra-value formatting? + // I'm wary of adding too much visual noise, + // but a little bit might be valuable. + // We already have visual noise in the form of punctuation + // maybe we could replace some of that with formatting. + s := fmt.Sprintf("", v.ID()) + + linenumber := "(?)" + if v.Pos().IsValid() { + line := v.Parent().Prog.Fset.Position(v.Pos()).Line + linenumber = fmt.Sprintf("(%d)", line, line) + } + + s += fmt.Sprintf("%s %s = %s", valueHTML(v), linenumber, opName(v)) + + if v, ok := v.(Value); ok { + s += " <" + html.EscapeString(v.Type().String()) + ">" + } + + switch v := v.(type) { + case *Parameter: + s += fmt.Sprintf(" {%s}", html.EscapeString(v.name)) + case *BinOp: + s += fmt.Sprintf(" {%s}", html.EscapeString(v.Op.String())) + case *UnOp: + s += fmt.Sprintf(" {%s}", html.EscapeString(v.Op.String())) + case *Extract: + name := v.Tuple.Type().(*types.Tuple).At(v.Index).Name() + s += fmt.Sprintf(" [%d] (%s)", v.Index, name) + case *Field: + st := v.X.Type().Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + s += fmt.Sprintf(" [%d] (%s)", v.Field, name) + case *FieldAddr: + st := deref(v.X.Type()).Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + + s += fmt.Sprintf(" [%d] (%s)", v.Field, name) + case *Recv: + s += fmt.Sprintf(" {%t}", v.CommaOk) + case *Call: + if v.Common().IsInvoke() { + s += fmt.Sprintf(" {%s}", html.EscapeString(v.Common().Method.FullName())) + } + case *Const: + if v.Value == nil { + s += " {<nil>}" + } else { + s += fmt.Sprintf(" {%s}", html.EscapeString(v.Value.String())) + } + case *Sigma: + s += fmt.Sprintf(" [#%s]", v.From) + } + for _, a := range v.Operands(nil) { + s += fmt.Sprintf(" %s", valueHTML(*a)) + } + + // OPT(dh): we're calling namedValues many times on the same function. + allNames := namedValues(v.Parent()) + var names []string + for name, values := range allNames { + for _, value := range values { + if v == value { + names = append(names, name.Name()) + break + } + } + } + if len(names) != 0 { + s += " (" + strings.Join(names, ", ") + ")" + } + + s += "" + return s +} + +func blockHTML(b *BasicBlock) string { + // TODO: Using the value ID as the class ignores the fact + // that value IDs get recycled and that some values + // are transmuted into other values. + s := html.EscapeString(b.String()) + return fmt.Sprintf("%s", s, s) +} + +func blockLongHTML(b *BasicBlock) string { + var kind string + var term Instruction + if len(b.Instrs) > 0 { + term = b.Control() + kind = opName(term) + } + // TODO: improve this for HTML? + s := fmt.Sprintf("%s", b.Index, kind) + + if term != nil { + ops := term.Operands(nil) + if len(ops) > 0 { + var ss []string + for _, op := range ops { + ss = append(ss, valueHTML(*op)) + } + s += " " + strings.Join(ss, ", ") + } + } + if len(b.Succs) > 0 { + s += " →" // right arrow + for _, c := range b.Succs { + s += " " + blockHTML(c) + } + } + return s +} + +func funcHTML(f *Function, phase string, dot *dotWriter) string { + buf := new(bytes.Buffer) + if dot != nil { + dot.writeFuncSVG(buf, phase, f) + } + fmt.Fprint(buf, "") + p := htmlFuncPrinter{w: buf} + fprintFunc(p, f) + + // fprintFunc(&buf, f) // TODO: HTML, not text,
for line breaks, etc. + fmt.Fprint(buf, "
") + return buf.String() +} + +type htmlFuncPrinter struct { + w io.Writer +} + +func (p htmlFuncPrinter) startBlock(b *BasicBlock, reachable bool) { + var dead string + if !reachable { + dead = "dead-block" + } + fmt.Fprintf(p.w, "
    ", b, dead) + fmt.Fprintf(p.w, "
  • %s:", blockHTML(b)) + if len(b.Preds) > 0 { + io.WriteString(p.w, " ←") // left arrow + for _, pred := range b.Preds { + fmt.Fprintf(p.w, " %s", blockHTML(pred)) + } + } + if len(b.Instrs) > 0 { + io.WriteString(p.w, ``) + } + io.WriteString(p.w, "
  • ") + if len(b.Instrs) > 0 { // start list of values + io.WriteString(p.w, "
  • ") + io.WriteString(p.w, "
      ") + } +} + +func (p htmlFuncPrinter) endBlock(b *BasicBlock) { + if len(b.Instrs) > 0 { // end list of values + io.WriteString(p.w, "
    ") + io.WriteString(p.w, "
  • ") + } + io.WriteString(p.w, "
  • ") + fmt.Fprint(p.w, blockLongHTML(b)) + io.WriteString(p.w, "
  • ") + io.WriteString(p.w, "
") +} + +func (p htmlFuncPrinter) value(v Node, live bool) { + var dead string + if !live { + dead = "dead-value" + } + fmt.Fprintf(p.w, "
  • ", dead) + fmt.Fprint(p.w, valueLongHTML(v)) + io.WriteString(p.w, "
  • ") +} + +func (p htmlFuncPrinter) startDepCycle() { + fmt.Fprintln(p.w, "") +} + +func (p htmlFuncPrinter) endDepCycle() { + fmt.Fprintln(p.w, "") +} + +func (p htmlFuncPrinter) named(n string, vals []Value) { + fmt.Fprintf(p.w, "
  • name %s: ", n) + for _, val := range vals { + fmt.Fprintf(p.w, "%s ", valueHTML(val)) + } + fmt.Fprintf(p.w, "
  • ") +} + +type dotWriter struct { + path string + broken bool +} + +// newDotWriter returns non-nil value when mask is valid. +// dotWriter will generate SVGs only for the phases specified in the mask. +// mask can contain following patterns and combinations of them: +// * - all of them; +// x-y - x through y, inclusive; +// x,y - x and y, but not the passes between. +func newDotWriter() *dotWriter { + path, err := exec.LookPath("dot") + if err != nil { + fmt.Println(err) + return nil + } + return &dotWriter{path: path} +} + +func (d *dotWriter) writeFuncSVG(w io.Writer, phase string, f *Function) { + if d.broken { + return + } + cmd := exec.Command(d.path, "-Tsvg") + pipe, err := cmd.StdinPipe() + if err != nil { + d.broken = true + fmt.Println(err) + return + } + buf := new(bytes.Buffer) + cmd.Stdout = buf + bufErr := new(bytes.Buffer) + cmd.Stderr = bufErr + err = cmd.Start() + if err != nil { + d.broken = true + fmt.Println(err) + return + } + fmt.Fprint(pipe, `digraph "" { margin=0; size="4,40"; ranksep=.2; `) + id := strings.Replace(phase, " ", "-", -1) + fmt.Fprintf(pipe, `id="g_graph_%s";`, id) + fmt.Fprintf(pipe, `node [style=filled,fillcolor=white,fontsize=16,fontname="Menlo,Times,serif",margin="0.01,0.03"];`) + fmt.Fprintf(pipe, `edge [fontsize=16,fontname="Menlo,Times,serif"];`) + for _, b := range f.Blocks { + layout := "" + fmt.Fprintf(pipe, `%v [label="%v%s\n%v",id="graph_node_%v_%v"];`, b, b, layout, b.Control().String(), id, b) + } + indexOf := make([]int, len(f.Blocks)) + for i, b := range f.Blocks { + indexOf[b.Index] = i + } + + // XXX + /* + ponums := make([]int32, len(f.Blocks)) + _ = postorderWithNumbering(f, ponums) + isBackEdge := func(from, to int) bool { + return ponums[from] <= ponums[to] + } + */ + isBackEdge := func(from, to int) bool { return false } + + for _, b := range f.Blocks { + for i, s := range b.Succs { + style := "solid" + color := "black" + arrow := "vee" + if isBackEdge(b.Index, s.Index) { + color = "blue" + } + fmt.Fprintf(pipe, `%v -> %v [label=" %d ",style="%s",color="%s",arrowhead="%s"];`, b, s, i, style, color, arrow) + } + } + fmt.Fprint(pipe, "}") + pipe.Close() + err = cmd.Wait() + if err != nil { + d.broken = true + fmt.Printf("dot: %v\n%v\n", err, bufErr.String()) + return + } + + svgID := "svg_graph_" + id + fmt.Fprintf(w, `
    `, svgID, svgID) + // For now, an awful hack: edit the html as it passes through + // our fingers, finding ' 0 { + fset = initial[0].Fset + } + + prog := ir.NewProgram(fset, mode) + if opts != nil { + prog.PrintFunc = opts.PrintFunc + } + + isInitial := make(map[*packages.Package]bool, len(initial)) + for _, p := range initial { + isInitial[p] = true + } + + irmap := make(map[*packages.Package]*ir.Package) + packages.Visit(initial, nil, func(p *packages.Package) { + if p.Types != nil && !p.IllTyped { + var files []*ast.File + if deps || isInitial[p] { + files = p.Syntax + } + irmap[p] = prog.CreatePackage(p.Types, files, p.TypesInfo, true) + } + }) + + var irpkgs []*ir.Package + for _, p := range initial { + irpkgs = append(irpkgs, irmap[p]) // may be nil + } + return prog, irpkgs +} + +// CreateProgram returns a new program in IR form, given a program +// loaded from source. An IR package is created for each transitively +// error-free package of lprog. +// +// Code for bodies of functions is not built until Build is called +// on the result. +// +// The mode parameter controls diagnostics and checking during IR construction. +// +// Deprecated: use golang.org/x/tools/go/packages and the Packages +// function instead; see ir.ExampleLoadPackages. +// +func CreateProgram(lprog *loader.Program, mode ir.BuilderMode) *ir.Program { + prog := ir.NewProgram(lprog.Fset, mode) + + for _, info := range lprog.AllPackages { + if info.TransitivelyErrorFree { + prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) + } + } + + return prog +} + +// BuildPackage builds an IR program with IR for a single package. +// +// It populates pkg by type-checking the specified file ASTs. All +// dependencies are loaded using the importer specified by tc, which +// typically loads compiler export data; IR code cannot be built for +// those packages. BuildPackage then constructs an ir.Program with all +// dependency packages created, and builds and returns the IR package +// corresponding to pkg. +// +// The caller must have set pkg.Path() to the import path. +// +// The operation fails if there were any type-checking or import errors. +// +// See ../ir/example_test.go for an example. +// +func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, files []*ast.File, mode ir.BuilderMode) (*ir.Package, *types.Info, error) { + if fset == nil { + panic("no token.FileSet") + } + if pkg.Path() == "" { + panic("package has no import path") + } + + info := &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { + return nil, nil, err + } + + prog := ir.NewProgram(fset, mode) + + // Create IR packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + prog.CreatePackage(p, nil, nil, true) + createAll(p.Imports()) + } + } + } + createAll(pkg.Imports()) + + // Create and build the primary package. + irpkg := prog.CreatePackage(pkg, files, info, false) + irpkg.Build() + return irpkg, info, nil +} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/loops.go b/vendor/honnef.co/go/tools/go/ir/irutil/loops.go new file mode 100644 index 000000000..751cc680b --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/irutil/loops.go @@ -0,0 +1,54 @@ +package irutil + +import "honnef.co/go/tools/go/ir" + +type Loop struct{ *ir.BlockSet } + +func FindLoops(fn *ir.Function) []Loop { + if fn.Blocks == nil { + return nil + } + tree := fn.DomPreorder() + var sets []Loop + for _, h := range tree { + for _, n := range h.Preds { + if !h.Dominates(n) { + continue + } + // n is a back-edge to h + // h is the loop header + if n == h { + set := Loop{ir.NewBlockSet(len(fn.Blocks))} + set.Add(n) + sets = append(sets, set) + continue + } + set := Loop{ir.NewBlockSet(len(fn.Blocks))} + set.Add(h) + set.Add(n) + for _, b := range allPredsBut(n, h, nil) { + set.Add(b) + } + sets = append(sets, set) + } + } + return sets +} + +func allPredsBut(b, but *ir.BasicBlock, list []*ir.BasicBlock) []*ir.BasicBlock { +outer: + for _, pred := range b.Preds { + if pred == but { + continue + } + for _, p := range list { + // TODO improve big-o complexity of this function + if pred == p { + continue outer + } + } + list = append(list, pred) + list = allPredsBut(pred, but, list) + } + return list +} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/stub.go b/vendor/honnef.co/go/tools/go/ir/irutil/stub.go new file mode 100644 index 000000000..4311c7dbe --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/irutil/stub.go @@ -0,0 +1,32 @@ +package irutil + +import ( + "honnef.co/go/tools/go/ir" +) + +// IsStub reports whether a function is a stub. A function is +// considered a stub if it has no instructions or if all it does is +// return a constant value. +func IsStub(fn *ir.Function) bool { + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + switch instr.(type) { + case *ir.Const: + // const naturally has no side-effects + case *ir.Panic: + // panic is a stub if it only uses constants + case *ir.Return: + // return is a stub if it only uses constants + case *ir.DebugRef: + case *ir.Jump: + // if there are no disallowed instructions, then we're + // only jumping to the exit block (or possibly + // somewhere else that's stubby?) + default: + // all other instructions are assumed to do actual work + return false + } + } + } + return true +} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/switch.go b/vendor/honnef.co/go/tools/go/ir/irutil/switch.go new file mode 100644 index 000000000..e7654e008 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/irutil/switch.go @@ -0,0 +1,264 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package irutil + +// This file implements discovery of switch and type-switch constructs +// from low-level control flow. +// +// Many techniques exist for compiling a high-level switch with +// constant cases to efficient machine code. The optimal choice will +// depend on the data type, the specific case values, the code in the +// body of each case, and the hardware. +// Some examples: +// - a lookup table (for a switch that maps constants to constants) +// - a computed goto +// - a binary tree +// - a perfect hash +// - a two-level switch (to partition constant strings by their first byte). + +import ( + "bytes" + "fmt" + "go/token" + "go/types" + + "honnef.co/go/tools/go/ir" +) + +// A ConstCase represents a single constant comparison. +// It is part of a Switch. +type ConstCase struct { + Block *ir.BasicBlock // block performing the comparison + Body *ir.BasicBlock // body of the case + Value *ir.Const // case comparand +} + +// A TypeCase represents a single type assertion. +// It is part of a Switch. +type TypeCase struct { + Block *ir.BasicBlock // block performing the type assert + Body *ir.BasicBlock // body of the case + Type types.Type // case type + Binding ir.Value // value bound by this case +} + +// A Switch is a logical high-level control flow operation +// (a multiway branch) discovered by analysis of a CFG containing +// only if/else chains. It is not part of the ir.Instruction set. +// +// One of ConstCases and TypeCases has length >= 2; +// the other is nil. +// +// In a value switch, the list of cases may contain duplicate constants. +// A type switch may contain duplicate types, or types assignable +// to an interface type also in the list. +// TODO(adonovan): eliminate such duplicates. +// +type Switch struct { + Start *ir.BasicBlock // block containing start of if/else chain + X ir.Value // the switch operand + ConstCases []ConstCase // ordered list of constant comparisons + TypeCases []TypeCase // ordered list of type assertions + Default *ir.BasicBlock // successor if all comparisons fail +} + +func (sw *Switch) String() string { + // We represent each block by the String() of its + // first Instruction, e.g. "print(42:int)". + var buf bytes.Buffer + if sw.ConstCases != nil { + fmt.Fprintf(&buf, "switch %s {\n", sw.X.Name()) + for _, c := range sw.ConstCases { + fmt.Fprintf(&buf, "case %s: %s\n", c.Value.Name(), c.Body.Instrs[0]) + } + } else { + fmt.Fprintf(&buf, "switch %s.(type) {\n", sw.X.Name()) + for _, c := range sw.TypeCases { + fmt.Fprintf(&buf, "case %s %s: %s\n", + c.Binding.Name(), c.Type, c.Body.Instrs[0]) + } + } + if sw.Default != nil { + fmt.Fprintf(&buf, "default: %s\n", sw.Default.Instrs[0]) + } + fmt.Fprintf(&buf, "}") + return buf.String() +} + +// Switches examines the control-flow graph of fn and returns the +// set of inferred value and type switches. A value switch tests an +// ir.Value for equality against two or more compile-time constant +// values. Switches involving link-time constants (addresses) are +// ignored. A type switch type-asserts an ir.Value against two or +// more types. +// +// The switches are returned in dominance order. +// +// The resulting switches do not necessarily correspond to uses of the +// 'switch' keyword in the source: for example, a single source-level +// switch statement with non-constant cases may result in zero, one or +// many Switches, one per plural sequence of constant cases. +// Switches may even be inferred from if/else- or goto-based control flow. +// (In general, the control flow constructs of the source program +// cannot be faithfully reproduced from the IR.) +// +func Switches(fn *ir.Function) []Switch { + // Traverse the CFG in dominance order, so we don't + // enter an if/else-chain in the middle. + var switches []Switch + seen := make(map[*ir.BasicBlock]bool) // TODO(adonovan): opt: use ir.blockSet + for _, b := range fn.DomPreorder() { + if x, k := isComparisonBlock(b); x != nil { + // Block b starts a switch. + sw := Switch{Start: b, X: x} + valueSwitch(&sw, k, seen) + if len(sw.ConstCases) > 1 { + switches = append(switches, sw) + } + } + + if y, x, T := isTypeAssertBlock(b); y != nil { + // Block b starts a type switch. + sw := Switch{Start: b, X: x} + typeSwitch(&sw, y, T, seen) + if len(sw.TypeCases) > 1 { + switches = append(switches, sw) + } + } + } + return switches +} + +func isSameX(x1 ir.Value, x2 ir.Value) bool { + if x1 == x2 { + return true + } + if x2, ok := x2.(*ir.Sigma); ok { + return isSameX(x1, x2.X) + } + return false +} + +func valueSwitch(sw *Switch, k *ir.Const, seen map[*ir.BasicBlock]bool) { + b := sw.Start + x := sw.X + for isSameX(sw.X, x) { + if seen[b] { + break + } + seen[b] = true + + sw.ConstCases = append(sw.ConstCases, ConstCase{ + Block: b, + Body: b.Succs[0], + Value: k, + }) + b = b.Succs[1] + n := 0 + for _, instr := range b.Instrs { + switch instr.(type) { + case *ir.If, *ir.BinOp: + n++ + case *ir.Sigma, *ir.Phi, *ir.DebugRef: + default: + n += 1000 + } + } + if n != 2 { + // Block b contains not just 'if x == k' and σ/ϕ nodes, + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + x, k = isComparisonBlock(b) + } + sw.Default = b +} + +func typeSwitch(sw *Switch, y ir.Value, T types.Type, seen map[*ir.BasicBlock]bool) { + b := sw.Start + x := sw.X + for isSameX(sw.X, x) { + if seen[b] { + break + } + seen[b] = true + + sw.TypeCases = append(sw.TypeCases, TypeCase{ + Block: b, + Body: b.Succs[0], + Type: T, + Binding: y, + }) + b = b.Succs[1] + n := 0 + for _, instr := range b.Instrs { + switch instr.(type) { + case *ir.TypeAssert, *ir.Extract, *ir.If: + n++ + case *ir.Sigma, *ir.Phi: + default: + n += 1000 + } + } + if n != 4 { + // Block b contains not just + // {TypeAssert; Extract #0; Extract #1; If} + // so it may have side effects that + // make it unsafe to elide. + break + } + if len(b.Preds) != 1 { + // Block b has multiple predecessors, + // so it cannot be treated as a case. + break + } + y, x, T = isTypeAssertBlock(b) + } + sw.Default = b +} + +// isComparisonBlock returns the operands (v, k) if a block ends with +// a comparison v==k, where k is a compile-time constant. +// +func isComparisonBlock(b *ir.BasicBlock) (v ir.Value, k *ir.Const) { + if n := len(b.Instrs); n >= 2 { + if i, ok := b.Instrs[n-1].(*ir.If); ok { + if binop, ok := i.Cond.(*ir.BinOp); ok && binop.Block() == b && binop.Op == token.EQL { + if k, ok := binop.Y.(*ir.Const); ok { + return binop.X, k + } + if k, ok := binop.X.(*ir.Const); ok { + return binop.Y, k + } + } + } + } + return +} + +// isTypeAssertBlock returns the operands (y, x, T) if a block ends with +// a type assertion "if y, ok := x.(T); ok {". +// +func isTypeAssertBlock(b *ir.BasicBlock) (y, x ir.Value, T types.Type) { + if n := len(b.Instrs); n >= 4 { + if i, ok := b.Instrs[n-1].(*ir.If); ok { + if ext1, ok := i.Cond.(*ir.Extract); ok && ext1.Block() == b && ext1.Index == 1 { + if ta, ok := ext1.Tuple.(*ir.TypeAssert); ok && ta.Block() == b { + // hack: relies upon instruction ordering. + if ext0, ok := b.Instrs[n-3].(*ir.Extract); ok { + return ext0, ta.X, ta.AssertedType + } + } + } + } + } + return +} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/terminates.go b/vendor/honnef.co/go/tools/go/ir/irutil/terminates.go new file mode 100644 index 000000000..84e7503bb --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/irutil/terminates.go @@ -0,0 +1,70 @@ +package irutil + +import ( + "go/types" + + "honnef.co/go/tools/go/ir" +) + +// Terminates reports whether fn is supposed to return, that is if it +// has at least one theoretic path that returns from the function. +// Explicit panics do not count as terminating. +func Terminates(fn *ir.Function) bool { + if fn.Blocks == nil { + // assuming that a function terminates is the conservative + // choice + return true + } + + for _, block := range fn.Blocks { + if _, ok := block.Control().(*ir.Return); ok { + if len(block.Preds) == 0 { + return true + } + for _, pred := range block.Preds { + switch ctrl := pred.Control().(type) { + case *ir.Panic: + // explicit panics do not count as terminating + case *ir.If: + // Check if we got here by receiving from a closed + // time.Tick channel – this cannot happen at + // runtime and thus doesn't constitute termination + iff := ctrl + if !ok { + return true + } + ex, ok := iff.Cond.(*ir.Extract) + if !ok { + return true + } + if ex.Index != 1 { + return true + } + recv, ok := ex.Tuple.(*ir.Recv) + if !ok { + return true + } + call, ok := recv.Chan.(*ir.Call) + if !ok { + return true + } + fn, ok := call.Common().Value.(*ir.Function) + if !ok { + return true + } + fn2, ok := fn.Object().(*types.Func) + if !ok { + return true + } + if fn2.FullName() != "time.Tick" { + return true + } + default: + // we've reached the exit block + return true + } + } + } + } + return false +} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/util.go b/vendor/honnef.co/go/tools/go/ir/irutil/util.go new file mode 100644 index 000000000..49b978a96 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/irutil/util.go @@ -0,0 +1,165 @@ +package irutil + +import ( + "go/types" + "strings" + + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/types/typeutil" +) + +func Reachable(from, to *ir.BasicBlock) bool { + if from == to { + return true + } + if from.Dominates(to) { + return true + } + + found := false + Walk(from, func(b *ir.BasicBlock) bool { + if b == to { + found = true + return false + } + return true + }) + return found +} + +func Walk(b *ir.BasicBlock, fn func(*ir.BasicBlock) bool) { + seen := map[*ir.BasicBlock]bool{} + wl := []*ir.BasicBlock{b} + for len(wl) > 0 { + b := wl[len(wl)-1] + wl = wl[:len(wl)-1] + if seen[b] { + continue + } + seen[b] = true + if !fn(b) { + continue + } + wl = append(wl, b.Succs...) + } +} + +func Vararg(x *ir.Slice) ([]ir.Value, bool) { + var out []ir.Value + slice, ok := x.X.(*ir.Alloc) + if !ok { + return nil, false + } + for _, ref := range *slice.Referrers() { + if ref == x { + continue + } + if ref.Block() != x.Block() { + return nil, false + } + idx, ok := ref.(*ir.IndexAddr) + if !ok { + return nil, false + } + if len(*idx.Referrers()) != 1 { + return nil, false + } + store, ok := (*idx.Referrers())[0].(*ir.Store) + if !ok { + return nil, false + } + out = append(out, store.Val) + } + return out, true +} + +func CallName(call *ir.CallCommon) string { + if call.IsInvoke() { + return "" + } + switch v := call.Value.(type) { + case *ir.Function: + fn, ok := v.Object().(*types.Func) + if !ok { + return "" + } + return typeutil.FuncName(fn) + case *ir.Builtin: + return v.Name() + } + return "" +} + +func IsCallTo(call *ir.CallCommon, name string) bool { return CallName(call) == name } + +func IsCallToAny(call *ir.CallCommon, names ...string) bool { + q := CallName(call) + for _, name := range names { + if q == name { + return true + } + } + return false +} + +func FilterDebug(instr []ir.Instruction) []ir.Instruction { + var out []ir.Instruction + for _, ins := range instr { + if _, ok := ins.(*ir.DebugRef); !ok { + out = append(out, ins) + } + } + return out +} + +func IsExample(fn *ir.Function) bool { + if !strings.HasPrefix(fn.Name(), "Example") { + return false + } + f := fn.Prog.Fset.File(fn.Pos()) + if f == nil { + return false + } + return strings.HasSuffix(f.Name(), "_test.go") +} + +// Flatten recursively returns the underlying value of an ir.Sigma or +// ir.Phi node. If all edges in an ir.Phi node are the same (after +// flattening), the flattened edge will get returned. If flattening is +// not possible, nil is returned. +func Flatten(v ir.Value) ir.Value { + failed := false + seen := map[ir.Value]struct{}{} + var out ir.Value + var dfs func(v ir.Value) + dfs = func(v ir.Value) { + if failed { + return + } + if _, ok := seen[v]; ok { + return + } + seen[v] = struct{}{} + + switch v := v.(type) { + case *ir.Sigma: + dfs(v.X) + case *ir.Phi: + for _, e := range v.Edges { + dfs(e) + } + default: + if out == nil { + out = v + } else if out != v { + failed = true + } + } + } + dfs(v) + + if failed { + return nil + } + return out +} diff --git a/vendor/honnef.co/go/tools/go/ir/irutil/visit.go b/vendor/honnef.co/go/tools/go/ir/irutil/visit.go new file mode 100644 index 000000000..f6d0503dd --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/irutil/visit.go @@ -0,0 +1,79 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package irutil + +import "honnef.co/go/tools/go/ir" + +// This file defines utilities for visiting the IR of +// a Program. +// +// TODO(adonovan): test coverage. + +// AllFunctions finds and returns the set of functions potentially +// needed by program prog, as determined by a simple linker-style +// reachability algorithm starting from the members and method-sets of +// each package. The result may include anonymous functions and +// synthetic wrappers. +// +// Precondition: all packages are built. +// +func AllFunctions(prog *ir.Program) map[*ir.Function]bool { + visit := visitor{ + prog: prog, + seen: make(map[*ir.Function]bool), + } + visit.program() + return visit.seen +} + +type visitor struct { + prog *ir.Program + seen map[*ir.Function]bool +} + +func (visit *visitor) program() { + for _, pkg := range visit.prog.AllPackages() { + for _, mem := range pkg.Members { + if fn, ok := mem.(*ir.Function); ok { + visit.function(fn) + } + } + } + for _, T := range visit.prog.RuntimeTypes() { + mset := visit.prog.MethodSets.MethodSet(T) + for i, n := 0, mset.Len(); i < n; i++ { + visit.function(visit.prog.MethodValue(mset.At(i))) + } + } +} + +func (visit *visitor) function(fn *ir.Function) { + if !visit.seen[fn] { + visit.seen[fn] = true + var buf [10]*ir.Value // avoid alloc in common case + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + for _, op := range instr.Operands(buf[:0]) { + if fn, ok := (*op).(*ir.Function); ok { + visit.function(fn) + } + } + } + } + } +} + +// MainPackages returns the subset of the specified packages +// named "main" that define a main function. +// The result may include synthetic "testmain" packages. +func MainPackages(pkgs []*ir.Package) []*ir.Package { + var mains []*ir.Package + for _, pkg := range pkgs { + if pkg.Pkg.Name() == "main" && pkg.Func("main") != nil { + mains = append(mains, pkg) + } + } + return mains +} diff --git a/vendor/honnef.co/go/tools/go/ir/lift.go b/vendor/honnef.co/go/tools/go/ir/lift.go new file mode 100644 index 000000000..336470464 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/lift.go @@ -0,0 +1,1075 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file defines the lifting pass which tries to "lift" Alloc +// cells (new/local variables) into SSA registers, replacing loads +// with the dominating stored value, eliminating loads and stores, and +// inserting φ- and σ-nodes as needed. + +// Cited papers and resources: +// +// Ron Cytron et al. 1991. Efficiently computing SSA form... +// http://doi.acm.org/10.1145/115372.115320 +// +// Cooper, Harvey, Kennedy. 2001. A Simple, Fast Dominance Algorithm. +// Software Practice and Experience 2001, 4:1-10. +// http://www.hipersoft.rice.edu/grads/publications/dom14.pdf +// +// Daniel Berlin, llvmdev mailing list, 2012. +// http://lists.cs.uiuc.edu/pipermail/llvmdev/2012-January/046638.html +// (Be sure to expand the whole thread.) +// +// C. Scott Ananian. 1997. The static single information form. +// +// Jeremy Singer. 2006. Static program analysis based on virtual register renaming. + +// TODO(adonovan): opt: there are many optimizations worth evaluating, and +// the conventional wisdom for SSA construction is that a simple +// algorithm well engineered often beats those of better asymptotic +// complexity on all but the most egregious inputs. +// +// Danny Berlin suggests that the Cooper et al. algorithm for +// computing the dominance frontier is superior to Cytron et al. +// Furthermore he recommends that rather than computing the DF for the +// whole function then renaming all alloc cells, it may be cheaper to +// compute the DF for each alloc cell separately and throw it away. +// +// Consider exploiting liveness information to avoid creating dead +// φ-nodes which we then immediately remove. +// +// Also see many other "TODO: opt" suggestions in the code. + +import ( + "fmt" + "go/types" + "os" +) + +// If true, show diagnostic information at each step of lifting. +// Very verbose. +const debugLifting = false + +// domFrontier maps each block to the set of blocks in its dominance +// frontier. The outer slice is conceptually a map keyed by +// Block.Index. The inner slice is conceptually a set, possibly +// containing duplicates. +// +// TODO(adonovan): opt: measure impact of dups; consider a packed bit +// representation, e.g. big.Int, and bitwise parallel operations for +// the union step in the Children loop. +// +// domFrontier's methods mutate the slice's elements but not its +// length, so their receivers needn't be pointers. +// +type domFrontier [][]*BasicBlock + +func (df domFrontier) add(u, v *BasicBlock) { + df[u.Index] = append(df[u.Index], v) +} + +// build builds the dominance frontier df for the dominator tree of +// fn, using the algorithm found in A Simple, Fast Dominance +// Algorithm, Figure 5. +// +// TODO(adonovan): opt: consider Berlin approach, computing pruned SSA +// by pruning the entire IDF computation, rather than merely pruning +// the DF -> IDF step. +func (df domFrontier) build(fn *Function) { + for _, b := range fn.Blocks { + preds := b.Preds[0:len(b.Preds):len(b.Preds)] + if b == fn.Exit { + for i, v := range fn.fakeExits.values { + if v { + preds = append(preds, fn.Blocks[i]) + } + } + } + if len(preds) >= 2 { + for _, p := range preds { + runner := p + for runner != b.dom.idom { + df.add(runner, b) + runner = runner.dom.idom + } + } + } + } +} + +func buildDomFrontier(fn *Function) domFrontier { + df := make(domFrontier, len(fn.Blocks)) + df.build(fn) + return df +} + +type postDomFrontier [][]*BasicBlock + +func (rdf postDomFrontier) add(u, v *BasicBlock) { + rdf[u.Index] = append(rdf[u.Index], v) +} + +func (rdf postDomFrontier) build(fn *Function) { + for _, b := range fn.Blocks { + succs := b.Succs[0:len(b.Succs):len(b.Succs)] + if fn.fakeExits.Has(b) { + succs = append(succs, fn.Exit) + } + if len(succs) >= 2 { + for _, s := range succs { + runner := s + for runner != b.pdom.idom { + rdf.add(runner, b) + runner = runner.pdom.idom + } + } + } + } +} + +func buildPostDomFrontier(fn *Function) postDomFrontier { + rdf := make(postDomFrontier, len(fn.Blocks)) + rdf.build(fn) + return rdf +} + +func removeInstr(refs []Instruction, instr Instruction) []Instruction { + i := 0 + for _, ref := range refs { + if ref == instr { + continue + } + refs[i] = ref + i++ + } + for j := i; j != len(refs); j++ { + refs[j] = nil // aid GC + } + return refs[:i] +} + +func clearInstrs(instrs []Instruction) { + for i := range instrs { + instrs[i] = nil + } +} + +// lift replaces local and new Allocs accessed only with +// load/store by IR registers, inserting φ- and σ-nodes where necessary. +// The result is a program in pruned SSI form. +// +// Preconditions: +// - fn has no dead blocks (blockopt has run). +// - Def/use info (Operands and Referrers) is up-to-date. +// - The dominator tree is up-to-date. +// +func lift(fn *Function) { + // TODO(adonovan): opt: lots of little optimizations may be + // worthwhile here, especially if they cause us to avoid + // buildDomFrontier. For example: + // + // - Alloc never loaded? Eliminate. + // - Alloc never stored? Replace all loads with a zero constant. + // - Alloc stored once? Replace loads with dominating store; + // don't forget that an Alloc is itself an effective store + // of zero. + // - Alloc used only within a single block? + // Use degenerate algorithm avoiding φ-nodes. + // - Consider synergy with scalar replacement of aggregates (SRA). + // e.g. *(&x.f) where x is an Alloc. + // Perhaps we'd get better results if we generated this as x.f + // i.e. Field(x, .f) instead of Load(FieldIndex(x, .f)). + // Unclear. + // + // But we will start with the simplest correct code. + var df domFrontier + var rdf postDomFrontier + var closure *closure + var newPhis newPhiMap + var newSigmas newSigmaMap + + // During this pass we will replace some BasicBlock.Instrs + // (allocs, loads and stores) with nil, keeping a count in + // BasicBlock.gaps. At the end we will reset Instrs to the + // concatenation of all non-dead newPhis and non-nil Instrs + // for the block, reusing the original array if space permits. + + // While we're here, we also eliminate 'rundefers' + // instructions in functions that contain no 'defer' + // instructions. + usesDefer := false + + // Determine which allocs we can lift and number them densely. + // The renaming phase uses this numbering for compact maps. + numAllocs := 0 + for _, b := range fn.Blocks { + b.gaps = 0 + b.rundefers = 0 + for _, instr := range b.Instrs { + switch instr := instr.(type) { + case *Alloc: + if !liftable(instr) { + instr.index = -1 + continue + } + index := -1 + if numAllocs == 0 { + df = buildDomFrontier(fn) + rdf = buildPostDomFrontier(fn) + if len(fn.Blocks) > 2 { + closure = transitiveClosure(fn) + } + newPhis = make(newPhiMap, len(fn.Blocks)) + newSigmas = make(newSigmaMap, len(fn.Blocks)) + + if debugLifting { + title := false + for i, blocks := range df { + if blocks != nil { + if !title { + fmt.Fprintf(os.Stderr, "Dominance frontier of %s:\n", fn) + title = true + } + fmt.Fprintf(os.Stderr, "\t%s: %s\n", fn.Blocks[i], blocks) + } + } + } + } + liftAlloc(closure, df, rdf, instr, newPhis, newSigmas) + index = numAllocs + numAllocs++ + instr.index = index + case *Defer: + usesDefer = true + case *RunDefers: + b.rundefers++ + } + } + } + + if numAllocs > 0 { + // renaming maps an alloc (keyed by index) to its replacement + // value. Initially the renaming contains nil, signifying the + // zero constant of the appropriate type; we construct the + // Const lazily at most once on each path through the domtree. + // TODO(adonovan): opt: cache per-function not per subtree. + renaming := make([]Value, numAllocs) + + // Renaming. + rename(fn.Blocks[0], renaming, newPhis, newSigmas) + + simplifyPhis(newPhis) + + // Eliminate dead φ- and σ-nodes. + markLiveNodes(fn.Blocks, newPhis, newSigmas) + } + + // Prepend remaining live φ-nodes to each block and possibly kill rundefers. + for _, b := range fn.Blocks { + var head []Instruction + if numAllocs > 0 { + nps := newPhis[b.Index] + head = make([]Instruction, 0, len(nps)) + for _, pred := range b.Preds { + nss := newSigmas[pred.Index] + idx := pred.succIndex(b) + for _, newSigma := range nss { + if sigma := newSigma.sigmas[idx]; sigma != nil && sigma.live { + head = append(head, sigma) + + // we didn't populate referrers before, as most + // sigma nodes will be killed + if refs := sigma.X.Referrers(); refs != nil { + *refs = append(*refs, sigma) + } + } else if sigma != nil { + sigma.block = nil + } + } + } + for _, np := range nps { + if np.phi.live { + head = append(head, np.phi) + } else { + for _, edge := range np.phi.Edges { + if refs := edge.Referrers(); refs != nil { + *refs = removeInstr(*refs, np.phi) + } + } + np.phi.block = nil + } + } + } + + rundefersToKill := b.rundefers + if usesDefer { + rundefersToKill = 0 + } + + j := len(head) + if j+b.gaps+rundefersToKill == 0 { + continue // fast path: no new phis or gaps + } + + // We could do straight copies instead of element-wise copies + // when both b.gaps and rundefersToKill are zero. However, + // that seems to only be the case ~1% of the time, which + // doesn't seem worth the extra branch. + + // Remove dead instructions, add phis and sigmas + ns := len(b.Instrs) + j - b.gaps - rundefersToKill + if ns <= cap(b.Instrs) { + // b.Instrs has enough capacity to store all instructions + + // OPT(dh): check cap vs the actually required space; if + // there is a big enough difference, it may be worth + // allocating a new slice, to avoid pinning memory. + dst := b.Instrs[:cap(b.Instrs)] + i := len(dst) - 1 + for n := len(b.Instrs) - 1; n >= 0; n-- { + instr := dst[n] + if instr == nil { + continue + } + if !usesDefer { + if _, ok := instr.(*RunDefers); ok { + continue + } + } + dst[i] = instr + i-- + } + off := i + 1 - len(head) + // aid GC + clearInstrs(dst[:off]) + dst = dst[off:] + copy(dst, head) + b.Instrs = dst + } else { + // not enough space, so allocate a new slice and copy + // over. + dst := make([]Instruction, ns) + copy(dst, head) + + for _, instr := range b.Instrs { + if instr == nil { + continue + } + if !usesDefer { + if _, ok := instr.(*RunDefers); ok { + continue + } + } + dst[j] = instr + j++ + } + b.Instrs = dst + } + } + + // Remove any fn.Locals that were lifted. + j := 0 + for _, l := range fn.Locals { + if l.index < 0 { + fn.Locals[j] = l + j++ + } + } + // Nil out fn.Locals[j:] to aid GC. + for i := j; i < len(fn.Locals); i++ { + fn.Locals[i] = nil + } + fn.Locals = fn.Locals[:j] +} + +func hasDirectReferrer(instr Instruction) bool { + for _, instr := range *instr.Referrers() { + switch instr.(type) { + case *Phi, *Sigma: + // ignore + default: + return true + } + } + return false +} + +func markLiveNodes(blocks []*BasicBlock, newPhis newPhiMap, newSigmas newSigmaMap) { + // Phi and sigma nodes are considered live if a non-phi, non-sigma + // node uses them. Once we find a node that is live, we mark all + // of its operands as used, too. + for _, npList := range newPhis { + for _, np := range npList { + phi := np.phi + if !phi.live && hasDirectReferrer(phi) { + markLivePhi(phi) + } + } + } + for _, npList := range newSigmas { + for _, np := range npList { + for _, sigma := range np.sigmas { + if sigma != nil && !sigma.live && hasDirectReferrer(sigma) { + markLiveSigma(sigma) + } + } + } + } + // Existing φ-nodes due to && and || operators + // are all considered live (see Go issue 19622). + for _, b := range blocks { + for _, phi := range b.phis() { + markLivePhi(phi.(*Phi)) + } + } +} + +func markLivePhi(phi *Phi) { + phi.live = true + for _, rand := range phi.Edges { + switch rand := rand.(type) { + case *Phi: + if !rand.live { + markLivePhi(rand) + } + case *Sigma: + if !rand.live { + markLiveSigma(rand) + } + } + } +} + +func markLiveSigma(sigma *Sigma) { + sigma.live = true + switch rand := sigma.X.(type) { + case *Phi: + if !rand.live { + markLivePhi(rand) + } + case *Sigma: + if !rand.live { + markLiveSigma(rand) + } + } +} + +// simplifyPhis replaces trivial phis with non-phi alternatives. Phi +// nodes where all edges are identical, or consist of only the phi +// itself and one other value, may be replaced with the value. +func simplifyPhis(newPhis newPhiMap) { + // find all phis that are trivial and can be replaced with a + // non-phi value. run until we reach a fixpoint, because replacing + // a phi may make other phis trivial. + for changed := true; changed; { + changed = false + for _, npList := range newPhis { + for _, np := range npList { + if np.phi.live { + // we're reusing 'live' to mean 'dead' in the context of simplifyPhis + continue + } + if r, ok := isUselessPhi(np.phi); ok { + // useless phi, replace its uses with the + // replacement value. the dead phi pass will clean + // up the phi afterwards. + replaceAll(np.phi, r) + np.phi.live = true + changed = true + } + } + } + } + + for _, npList := range newPhis { + for _, np := range npList { + np.phi.live = false + } + } +} + +type BlockSet struct { + idx int + values []bool + count int +} + +func NewBlockSet(size int) *BlockSet { + return &BlockSet{values: make([]bool, size)} +} + +func (s *BlockSet) Set(s2 *BlockSet) { + copy(s.values, s2.values) + s.count = 0 + for _, v := range s.values { + if v { + s.count++ + } + } +} + +func (s *BlockSet) Num() int { + return s.count +} + +func (s *BlockSet) Has(b *BasicBlock) bool { + if b.Index >= len(s.values) { + return false + } + return s.values[b.Index] +} + +// add adds b to the set and returns true if the set changed. +func (s *BlockSet) Add(b *BasicBlock) bool { + if s.values[b.Index] { + return false + } + s.count++ + s.values[b.Index] = true + s.idx = b.Index + + return true +} + +func (s *BlockSet) Clear() { + for j := range s.values { + s.values[j] = false + } + s.count = 0 +} + +// take removes an arbitrary element from a set s and +// returns its index, or returns -1 if empty. +func (s *BlockSet) Take() int { + // [i, end] + for i := s.idx; i < len(s.values); i++ { + if s.values[i] { + s.values[i] = false + s.idx = i + s.count-- + return i + } + } + + // [start, i) + for i := 0; i < s.idx; i++ { + if s.values[i] { + s.values[i] = false + s.idx = i + s.count-- + return i + } + } + + return -1 +} + +type closure struct { + span []uint32 + reachables []interval +} + +type interval uint32 + +const ( + flagMask = 1 << 31 + numBits = 20 + lengthBits = 32 - numBits - 1 + lengthMask = (1<>numBits + } else { + // large interval + i++ + start = uint32(inv & numMask) + end = uint32(r[i]) + } + if idx >= start && idx <= end { + return true + } + } + return false +} + +func (c closure) reachable(id int) []interval { + return c.reachables[c.span[id]:c.span[id+1]] +} + +func (c closure) walk(current *BasicBlock, b *BasicBlock, visited []bool) { + visited[b.Index] = true + for _, succ := range b.Succs { + if visited[succ.Index] { + continue + } + visited[succ.Index] = true + c.walk(current, succ, visited) + } +} + +func transitiveClosure(fn *Function) *closure { + reachable := make([]bool, len(fn.Blocks)) + c := &closure{} + c.span = make([]uint32, len(fn.Blocks)+1) + + addInterval := func(start, end uint32) { + if l := end - start; l <= 1<= 0 { // store of zero to Alloc cell + // Replace dominated loads by the zero value. + renaming[instr.index] = nil + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill alloc %s\n", instr) + } + // Delete the Alloc. + u.Instrs[i] = nil + u.gaps++ + } + + case *Store: + if alloc, ok := instr.Addr.(*Alloc); ok && alloc.index >= 0 { // store to Alloc cell + // Replace dominated loads by the stored value. + renaming[alloc.index] = instr.Val + if debugLifting { + fmt.Fprintf(os.Stderr, "\tkill store %s; new value: %s\n", + instr, instr.Val.Name()) + } + if refs := instr.Addr.Referrers(); refs != nil { + *refs = removeInstr(*refs, instr) + } + if refs := instr.Val.Referrers(); refs != nil { + *refs = removeInstr(*refs, instr) + } + // Delete the Store. + u.Instrs[i] = nil + u.gaps++ + } + + case *Load: + if alloc, ok := instr.X.(*Alloc); ok && alloc.index >= 0 { // load of Alloc cell + // In theory, we wouldn't be able to replace loads + // directly, because a loaded value could be used in + // different branches, in which case it should be + // replaced with different sigma nodes. But we can't + // simply defer replacement, either, because then + // later stores might incorrectly affect this load. + // + // To avoid doing renaming on _all_ values (instead of + // just loads and stores like we're doing), we make + // sure during code generation that each load is only + // used in one block. For example, in constant switch + // statements, where the tag is only evaluated once, + // we store it in a temporary and load it for each + // comparison, so that we have individual loads to + // replace. + newval := renamed(u.Parent(), renaming, alloc) + if debugLifting { + fmt.Fprintf(os.Stderr, "\tupdate load %s = %s with %s\n", + instr.Name(), instr, newval) + } + replaceAll(instr, newval) + u.Instrs[i] = nil + u.gaps++ + } + + case *DebugRef: + if x, ok := instr.X.(*Alloc); ok && x.index >= 0 { + if instr.IsAddr { + instr.X = renamed(u.Parent(), renaming, x) + instr.IsAddr = false + + // Add DebugRef to instr.X's referrers. + if refs := instr.X.Referrers(); refs != nil { + *refs = append(*refs, instr) + } + } else { + // A source expression denotes the address + // of an Alloc that was optimized away. + instr.X = nil + + // Delete the DebugRef. + u.Instrs[i] = nil + u.gaps++ + } + } + } + } + + // update all outgoing sigma nodes with the dominating store + for _, sigmas := range newSigmas[u.Index] { + for _, sigma := range sigmas.sigmas { + if sigma == nil { + continue + } + sigma.X = renamed(u.Parent(), renaming, sigmas.alloc) + } + } + + // For each φ-node in a CFG successor, rename the edge. + for succi, v := range u.Succs { + phis := newPhis[v.Index] + if len(phis) == 0 { + continue + } + i := v.predIndex(u) + for _, np := range phis { + phi := np.phi + alloc := np.alloc + // if there's a sigma node, use it, else use the dominating value + var newval Value + for _, sigmas := range newSigmas[u.Index] { + if sigmas.alloc == alloc && sigmas.sigmas[succi] != nil { + newval = sigmas.sigmas[succi] + break + } + } + if newval == nil { + newval = renamed(u.Parent(), renaming, alloc) + } + if debugLifting { + fmt.Fprintf(os.Stderr, "\tsetphi %s edge %s -> %s (#%d) (alloc=%s) := %s\n", + phi.Name(), u, v, i, alloc.Name(), newval.Name()) + } + phi.Edges[i] = newval + if prefs := newval.Referrers(); prefs != nil { + *prefs = append(*prefs, phi) + } + } + } + + // Continue depth-first recursion over domtree, pushing a + // fresh copy of the renaming map for each subtree. + r := make([]Value, len(renaming)) + for _, v := range u.dom.children { + // XXX add debugging + copy(r, renaming) + + // on entry to a block, the incoming sigma nodes become the new values for their alloc + if idx := u.succIndex(v); idx != -1 { + for _, sigma := range newSigmas[u.Index] { + if sigma.sigmas[idx] != nil { + r[sigma.alloc.index] = sigma.sigmas[idx] + } + } + } + rename(v, r, newPhis, newSigmas) + } + +} diff --git a/vendor/honnef.co/go/tools/go/ir/lvalue.go b/vendor/honnef.co/go/tools/go/ir/lvalue.go new file mode 100644 index 000000000..f676a1f7a --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/lvalue.go @@ -0,0 +1,116 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// lvalues are the union of addressable expressions and map-index +// expressions. + +import ( + "go/ast" + "go/types" +) + +// An lvalue represents an assignable location that may appear on the +// left-hand side of an assignment. This is a generalization of a +// pointer to permit updates to elements of maps. +// +type lvalue interface { + store(fn *Function, v Value, source ast.Node) // stores v into the location + load(fn *Function, source ast.Node) Value // loads the contents of the location + address(fn *Function) Value // address of the location + typ() types.Type // returns the type of the location +} + +// An address is an lvalue represented by a true pointer. +type address struct { + addr Value + expr ast.Expr // source syntax of the value (not address) [debug mode] +} + +func (a *address) load(fn *Function, source ast.Node) Value { + return emitLoad(fn, a.addr, source) +} + +func (a *address) store(fn *Function, v Value, source ast.Node) { + store := emitStore(fn, a.addr, v, source) + if a.expr != nil { + // store.Val is v, converted for assignability. + emitDebugRef(fn, a.expr, store.Val, false) + } +} + +func (a *address) address(fn *Function) Value { + if a.expr != nil { + emitDebugRef(fn, a.expr, a.addr, true) + } + return a.addr +} + +func (a *address) typ() types.Type { + return deref(a.addr.Type()) +} + +// An element is an lvalue represented by m[k], the location of an +// element of a map. These locations are not addressable +// since pointers cannot be formed from them, but they do support +// load() and store(). +// +type element struct { + m, k Value // map + t types.Type // map element type +} + +func (e *element) load(fn *Function, source ast.Node) Value { + l := &MapLookup{ + X: e.m, + Index: e.k, + } + l.setType(e.t) + return fn.emit(l, source) +} + +func (e *element) store(fn *Function, v Value, source ast.Node) { + up := &MapUpdate{ + Map: e.m, + Key: e.k, + Value: emitConv(fn, v, e.t, source), + } + fn.emit(up, source) +} + +func (e *element) address(fn *Function) Value { + panic("map elements are not addressable") +} + +func (e *element) typ() types.Type { + return e.t +} + +// A blank is a dummy variable whose name is "_". +// It is not reified: loads are illegal and stores are ignored. +// +type blank struct{} + +func (bl blank) load(fn *Function, source ast.Node) Value { + panic("blank.load is illegal") +} + +func (bl blank) store(fn *Function, v Value, source ast.Node) { + s := &BlankStore{ + Val: v, + } + fn.emit(s, source) +} + +func (bl blank) address(fn *Function) Value { + panic("blank var is not addressable") +} + +func (bl blank) typ() types.Type { + // This should be the type of the blank Ident; the typechecker + // doesn't provide this yet, but fortunately, we don't need it + // yet either. + panic("blank.typ is unimplemented") +} diff --git a/vendor/honnef.co/go/tools/go/ir/methods.go b/vendor/honnef.co/go/tools/go/ir/methods.go new file mode 100644 index 000000000..517f448b8 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/methods.go @@ -0,0 +1,239 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file defines utilities for population of method sets. + +import ( + "fmt" + "go/types" +) + +// MethodValue returns the Function implementing method sel, building +// wrapper methods on demand. It returns nil if sel denotes an +// abstract (interface) method. +// +// Precondition: sel.Kind() == MethodVal. +// +// Thread-safe. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) MethodValue(sel *types.Selection) *Function { + if sel.Kind() != types.MethodVal { + panic(fmt.Sprintf("MethodValue(%s) kind != MethodVal", sel)) + } + T := sel.Recv() + if isInterface(T) { + return nil // abstract method + } + if prog.mode&LogSource != 0 { + defer logStack("MethodValue %s %v", T, sel)() + } + + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + return prog.addMethod(prog.createMethodSet(T), sel) +} + +// LookupMethod returns the implementation of the method of type T +// identified by (pkg, name). It returns nil if the method exists but +// is abstract, and panics if T has no such method. +// +func (prog *Program) LookupMethod(T types.Type, pkg *types.Package, name string) *Function { + sel := prog.MethodSets.MethodSet(T).Lookup(pkg, name) + if sel == nil { + panic(fmt.Sprintf("%s has no method %s", T, types.Id(pkg, name))) + } + return prog.MethodValue(sel) +} + +// methodSet contains the (concrete) methods of a non-interface type. +type methodSet struct { + mapping map[string]*Function // populated lazily + complete bool // mapping contains all methods +} + +// Precondition: !isInterface(T). +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +func (prog *Program) createMethodSet(T types.Type) *methodSet { + mset, ok := prog.methodSets.At(T).(*methodSet) + if !ok { + mset = &methodSet{mapping: make(map[string]*Function)} + prog.methodSets.Set(T, mset) + } + return mset +} + +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +func (prog *Program) addMethod(mset *methodSet, sel *types.Selection) *Function { + if sel.Kind() == types.MethodExpr { + panic(sel) + } + id := sel.Obj().Id() + fn := mset.mapping[id] + if fn == nil { + obj := sel.Obj().(*types.Func) + + needsPromotion := len(sel.Index()) > 1 + needsIndirection := !isPointer(recvType(obj)) && isPointer(sel.Recv()) + if needsPromotion || needsIndirection { + fn = makeWrapper(prog, sel) + } else { + fn = prog.declaredFunc(obj) + } + if fn.Signature.Recv() == nil { + panic(fn) // missing receiver + } + mset.mapping[id] = fn + } + return fn +} + +// RuntimeTypes returns a new unordered slice containing all +// concrete types in the program for which a complete (non-empty) +// method set is required at run-time. +// +// Thread-safe. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) RuntimeTypes() []types.Type { + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + var res []types.Type + prog.methodSets.Iterate(func(T types.Type, v interface{}) { + if v.(*methodSet).complete { + res = append(res, T) + } + }) + return res +} + +// declaredFunc returns the concrete function/method denoted by obj. +// Panic ensues if there is none. +// +func (prog *Program) declaredFunc(obj *types.Func) *Function { + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Function) + } + panic("no concrete method: " + obj.String()) +} + +// needMethodsOf ensures that runtime type information (including the +// complete method set) is available for the specified type T and all +// its subcomponents. +// +// needMethodsOf must be called for at least every type that is an +// operand of some MakeInterface instruction, and for the type of +// every exported package member. +// +// Precondition: T is not a method signature (*Signature with Recv()!=nil). +// +// Thread-safe. (Called via emitConv from multiple builder goroutines.) +// +// TODO(adonovan): make this faster. It accounts for 20% of SSA build time. +// +// EXCLUSIVE_LOCKS_ACQUIRED(prog.methodsMu) +// +func (prog *Program) needMethodsOf(T types.Type) { + prog.methodsMu.Lock() + prog.needMethods(T, false) + prog.methodsMu.Unlock() +} + +// Precondition: T is not a method signature (*Signature with Recv()!=nil). +// Recursive case: skip => don't create methods for T. +// +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +// +func (prog *Program) needMethods(T types.Type, skip bool) { + // Each package maintains its own set of types it has visited. + if prevSkip, ok := prog.runtimeTypes.At(T).(bool); ok { + // needMethods(T) was previously called + if !prevSkip || skip { + return // already seen, with same or false 'skip' value + } + } + prog.runtimeTypes.Set(T, skip) + + tmset := prog.MethodSets.MethodSet(T) + + if !skip && !isInterface(T) && tmset.Len() > 0 { + // Create methods of T. + mset := prog.createMethodSet(T) + if !mset.complete { + mset.complete = true + n := tmset.Len() + for i := 0; i < n; i++ { + prog.addMethod(mset, tmset.At(i)) + } + } + } + + // Recursion over signatures of each method. + for i := 0; i < tmset.Len(); i++ { + sig := tmset.At(i).Type().(*types.Signature) + prog.needMethods(sig.Params(), false) + prog.needMethods(sig.Results(), false) + } + + switch t := T.(type) { + case *types.Basic: + // nop + + case *types.Interface: + // nop---handled by recursion over method set. + + case *types.Pointer: + prog.needMethods(t.Elem(), false) + + case *types.Slice: + prog.needMethods(t.Elem(), false) + + case *types.Chan: + prog.needMethods(t.Elem(), false) + + case *types.Map: + prog.needMethods(t.Key(), false) + prog.needMethods(t.Elem(), false) + + case *types.Signature: + if t.Recv() != nil { + panic(fmt.Sprintf("Signature %s has Recv %s", t, t.Recv())) + } + prog.needMethods(t.Params(), false) + prog.needMethods(t.Results(), false) + + case *types.Named: + // A pointer-to-named type can be derived from a named + // type via reflection. It may have methods too. + prog.needMethods(types.NewPointer(T), false) + + // Consider 'type T struct{S}' where S has methods. + // Reflection provides no way to get from T to struct{S}, + // only to S, so the method set of struct{S} is unwanted, + // so set 'skip' flag during recursion. + prog.needMethods(t.Underlying(), true) + + case *types.Array: + prog.needMethods(t.Elem(), false) + + case *types.Struct: + for i, n := 0, t.NumFields(); i < n; i++ { + prog.needMethods(t.Field(i).Type(), false) + } + + case *types.Tuple: + for i, n := 0, t.Len(); i < n; i++ { + prog.needMethods(t.At(i).Type(), false) + } + + default: + panic(T) + } +} diff --git a/vendor/honnef.co/go/tools/go/ir/mode.go b/vendor/honnef.co/go/tools/go/ir/mode.go new file mode 100644 index 000000000..da548fdbb --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/mode.go @@ -0,0 +1,98 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file defines the BuilderMode type and its command-line flag. + +import ( + "bytes" + "fmt" +) + +// BuilderMode is a bitmask of options for diagnostics and checking. +// +// *BuilderMode satisfies the flag.Value interface. Example: +// +// var mode = ir.BuilderMode(0) +// func init() { flag.Var(&mode, "build", ir.BuilderModeDoc) } +// +type BuilderMode uint + +const ( + PrintPackages BuilderMode = 1 << iota // Print package inventory to stdout + PrintFunctions // Print function IR code to stdout + PrintSource // Print source code when printing function IR + LogSource // Log source locations as IR builder progresses + SanityCheckFunctions // Perform sanity checking of function bodies + NaiveForm // Build naïve IR form: don't replace local loads/stores with registers + GlobalDebug // Enable debug info for all packages +) + +const BuilderModeDoc = `Options controlling the IR builder. +The value is a sequence of zero or more of these letters: +C perform sanity [C]hecking of the IR form. +D include [D]ebug info for every function. +P print [P]ackage inventory. +F print [F]unction IR code. +A print [A]ST nodes responsible for IR instructions +S log [S]ource locations as IR builder progresses. +N build [N]aive IR form: don't replace local loads/stores with registers. +` + +func (m BuilderMode) String() string { + var buf bytes.Buffer + if m&GlobalDebug != 0 { + buf.WriteByte('D') + } + if m&PrintPackages != 0 { + buf.WriteByte('P') + } + if m&PrintFunctions != 0 { + buf.WriteByte('F') + } + if m&PrintSource != 0 { + buf.WriteByte('A') + } + if m&LogSource != 0 { + buf.WriteByte('S') + } + if m&SanityCheckFunctions != 0 { + buf.WriteByte('C') + } + if m&NaiveForm != 0 { + buf.WriteByte('N') + } + return buf.String() +} + +// Set parses the flag characters in s and updates *m. +func (m *BuilderMode) Set(s string) error { + var mode BuilderMode + for _, c := range s { + switch c { + case 'D': + mode |= GlobalDebug + case 'P': + mode |= PrintPackages + case 'F': + mode |= PrintFunctions + case 'A': + mode |= PrintSource + case 'S': + mode |= LogSource + case 'C': + mode |= SanityCheckFunctions + case 'N': + mode |= NaiveForm + default: + return fmt.Errorf("unknown BuilderMode option: %q", c) + } + } + *m = mode + return nil +} + +// Get returns m. +func (m BuilderMode) Get() interface{} { return m } diff --git a/vendor/honnef.co/go/tools/go/ir/print.go b/vendor/honnef.co/go/tools/go/ir/print.go new file mode 100644 index 000000000..f6ed431b6 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/print.go @@ -0,0 +1,472 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file implements the String() methods for all Value and +// Instruction types. + +import ( + "bytes" + "fmt" + "go/types" + "io" + "reflect" + "sort" + + "honnef.co/go/tools/go/types/typeutil" +) + +// relName returns the name of v relative to i. +// In most cases, this is identical to v.Name(), but references to +// Functions (including methods) and Globals use RelString and +// all types are displayed with relType, so that only cross-package +// references are package-qualified. +// +func relName(v Value, i Instruction) string { + if v == nil { + return "" + } + var from *types.Package + if i != nil { + from = i.Parent().pkg() + } + switch v := v.(type) { + case Member: // *Function or *Global + return v.RelString(from) + } + return v.Name() +} + +func relType(t types.Type, from *types.Package) string { + return types.TypeString(t, types.RelativeTo(from)) +} + +func relString(m Member, from *types.Package) string { + // NB: not all globals have an Object (e.g. init$guard), + // so use Package().Object not Object.Package(). + if pkg := m.Package().Pkg; pkg != nil && pkg != from { + return fmt.Sprintf("%s.%s", pkg.Path(), m.Name()) + } + return m.Name() +} + +// Value.String() +// +// This method is provided only for debugging. +// It never appears in disassembly, which uses Value.Name(). + +func (v *Parameter) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("Parameter <%s> {%s}", relType(v.Type(), from), v.name) +} + +func (v *FreeVar) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("FreeVar <%s> %s", relType(v.Type(), from), v.Name()) +} + +func (v *Builtin) String() string { + return fmt.Sprintf("Builtin %s", v.Name()) +} + +// Instruction.String() + +func (v *Alloc) String() string { + from := v.Parent().pkg() + storage := "Stack" + if v.Heap { + storage = "Heap" + } + return fmt.Sprintf("%sAlloc <%s>", storage, relType(v.Type(), from)) +} + +func (v *Sigma) String() string { + from := v.Parent().pkg() + s := fmt.Sprintf("Sigma <%s> [b%d] %s", relType(v.Type(), from), v.From.Index, v.X.Name()) + return s +} + +func (v *Phi) String() string { + var b bytes.Buffer + fmt.Fprintf(&b, "Phi <%s>", v.Type()) + for i, edge := range v.Edges { + b.WriteString(" ") + // Be robust against malformed CFG. + if v.block == nil { + b.WriteString("??") + continue + } + block := -1 + if i < len(v.block.Preds) { + block = v.block.Preds[i].Index + } + fmt.Fprintf(&b, "%d:", block) + edgeVal := "" // be robust + if edge != nil { + edgeVal = relName(edge, v) + } + b.WriteString(edgeVal) + } + return b.String() +} + +func printCall(v *CallCommon, prefix string, instr Instruction) string { + var b bytes.Buffer + if !v.IsInvoke() { + if value, ok := instr.(Value); ok { + fmt.Fprintf(&b, "%s <%s> %s", prefix, relType(value.Type(), instr.Parent().pkg()), relName(v.Value, instr)) + } else { + fmt.Fprintf(&b, "%s %s", prefix, relName(v.Value, instr)) + } + } else { + if value, ok := instr.(Value); ok { + fmt.Fprintf(&b, "%sInvoke <%s> %s.%s", prefix, relType(value.Type(), instr.Parent().pkg()), relName(v.Value, instr), v.Method.Name()) + } else { + fmt.Fprintf(&b, "%sInvoke %s.%s", prefix, relName(v.Value, instr), v.Method.Name()) + } + } + for _, arg := range v.Args { + b.WriteString(" ") + b.WriteString(relName(arg, instr)) + } + return b.String() +} + +func (c *CallCommon) String() string { + return printCall(c, "", nil) +} + +func (v *Call) String() string { + return printCall(&v.Call, "Call", v) +} + +func (v *BinOp) String() string { + return fmt.Sprintf("BinOp <%s> {%s} %s %s", relType(v.Type(), v.Parent().pkg()), v.Op.String(), relName(v.X, v), relName(v.Y, v)) +} + +func (v *UnOp) String() string { + return fmt.Sprintf("UnOp <%s> {%s} %s", relType(v.Type(), v.Parent().pkg()), v.Op.String(), relName(v.X, v)) +} + +func (v *Load) String() string { + return fmt.Sprintf("Load <%s> %s", relType(v.Type(), v.Parent().pkg()), relName(v.X, v)) +} + +func printConv(prefix string, v, x Value) string { + from := v.Parent().pkg() + return fmt.Sprintf("%s <%s> %s", + prefix, + relType(v.Type(), from), + relName(x, v.(Instruction))) +} + +func (v *ChangeType) String() string { return printConv("ChangeType", v, v.X) } +func (v *Convert) String() string { return printConv("Convert", v, v.X) } +func (v *ChangeInterface) String() string { return printConv("ChangeInterface", v, v.X) } +func (v *MakeInterface) String() string { return printConv("MakeInterface", v, v.X) } + +func (v *MakeClosure) String() string { + from := v.Parent().pkg() + var b bytes.Buffer + fmt.Fprintf(&b, "MakeClosure <%s> %s", relType(v.Type(), from), relName(v.Fn, v)) + if v.Bindings != nil { + for _, c := range v.Bindings { + b.WriteString(" ") + b.WriteString(relName(c, v)) + } + } + return b.String() +} + +func (v *MakeSlice) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("MakeSlice <%s> %s %s", + relType(v.Type(), from), + relName(v.Len, v), + relName(v.Cap, v)) +} + +func (v *Slice) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("Slice <%s> %s %s %s %s", + relType(v.Type(), from), relName(v.X, v), relName(v.Low, v), relName(v.High, v), relName(v.Max, v)) +} + +func (v *MakeMap) String() string { + res := "" + if v.Reserve != nil { + res = relName(v.Reserve, v) + } + from := v.Parent().pkg() + return fmt.Sprintf("MakeMap <%s> %s", relType(v.Type(), from), res) +} + +func (v *MakeChan) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("MakeChan <%s> %s", relType(v.Type(), from), relName(v.Size, v)) +} + +func (v *FieldAddr) String() string { + from := v.Parent().pkg() + st := deref(v.X.Type()).Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + return fmt.Sprintf("FieldAddr <%s> [%d] (%s) %s", relType(v.Type(), from), v.Field, name, relName(v.X, v)) +} + +func (v *Field) String() string { + st := v.X.Type().Underlying().(*types.Struct) + // Be robust against a bad index. + name := "?" + if 0 <= v.Field && v.Field < st.NumFields() { + name = st.Field(v.Field).Name() + } + from := v.Parent().pkg() + return fmt.Sprintf("Field <%s> [%d] (%s) %s", relType(v.Type(), from), v.Field, name, relName(v.X, v)) +} + +func (v *IndexAddr) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("IndexAddr <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v)) +} + +func (v *Index) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("Index <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v)) +} + +func (v *MapLookup) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("MapLookup <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v)) +} + +func (v *StringLookup) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("StringLookup <%s> %s %s", relType(v.Type(), from), relName(v.X, v), relName(v.Index, v)) +} + +func (v *Range) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("Range <%s> %s", relType(v.Type(), from), relName(v.X, v)) +} + +func (v *Next) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("Next <%s> %s", relType(v.Type(), from), relName(v.Iter, v)) +} + +func (v *TypeAssert) String() string { + from := v.Parent().pkg() + return fmt.Sprintf("TypeAssert <%s> %s", relType(v.Type(), from), relName(v.X, v)) +} + +func (v *Extract) String() string { + from := v.Parent().pkg() + name := v.Tuple.Type().(*types.Tuple).At(v.Index).Name() + return fmt.Sprintf("Extract <%s> [%d] (%s) %s", relType(v.Type(), from), v.Index, name, relName(v.Tuple, v)) +} + +func (s *Jump) String() string { + // Be robust against malformed CFG. + block := -1 + if s.block != nil && len(s.block.Succs) == 1 { + block = s.block.Succs[0].Index + } + str := fmt.Sprintf("Jump → b%d", block) + if s.Comment != "" { + str = fmt.Sprintf("%s # %s", str, s.Comment) + } + return str +} + +func (s *Unreachable) String() string { + // Be robust against malformed CFG. + block := -1 + if s.block != nil && len(s.block.Succs) == 1 { + block = s.block.Succs[0].Index + } + return fmt.Sprintf("Unreachable → b%d", block) +} + +func (s *If) String() string { + // Be robust against malformed CFG. + tblock, fblock := -1, -1 + if s.block != nil && len(s.block.Succs) == 2 { + tblock = s.block.Succs[0].Index + fblock = s.block.Succs[1].Index + } + return fmt.Sprintf("If %s → b%d b%d", relName(s.Cond, s), tblock, fblock) +} + +func (s *ConstantSwitch) String() string { + var b bytes.Buffer + fmt.Fprintf(&b, "ConstantSwitch %s", relName(s.Tag, s)) + for _, cond := range s.Conds { + fmt.Fprintf(&b, " %s", relName(cond, s)) + } + fmt.Fprint(&b, " →") + for _, succ := range s.block.Succs { + fmt.Fprintf(&b, " b%d", succ.Index) + } + return b.String() +} + +func (s *TypeSwitch) String() string { + from := s.Parent().pkg() + var b bytes.Buffer + fmt.Fprintf(&b, "TypeSwitch <%s> %s", relType(s.typ, from), relName(s.Tag, s)) + for _, cond := range s.Conds { + fmt.Fprintf(&b, " %q", relType(cond, s.block.parent.pkg())) + } + return b.String() +} + +func (s *Go) String() string { + return printCall(&s.Call, "Go", s) +} + +func (s *Panic) String() string { + // Be robust against malformed CFG. + block := -1 + if s.block != nil && len(s.block.Succs) == 1 { + block = s.block.Succs[0].Index + } + return fmt.Sprintf("Panic %s → b%d", relName(s.X, s), block) +} + +func (s *Return) String() string { + var b bytes.Buffer + b.WriteString("Return") + for _, r := range s.Results { + b.WriteString(" ") + b.WriteString(relName(r, s)) + } + return b.String() +} + +func (*RunDefers) String() string { + return "RunDefers" +} + +func (s *Send) String() string { + return fmt.Sprintf("Send %s %s", relName(s.Chan, s), relName(s.X, s)) +} + +func (recv *Recv) String() string { + from := recv.Parent().pkg() + return fmt.Sprintf("Recv <%s> %s", relType(recv.Type(), from), relName(recv.Chan, recv)) +} + +func (s *Defer) String() string { + return printCall(&s.Call, "Defer", s) +} + +func (s *Select) String() string { + var b bytes.Buffer + for i, st := range s.States { + if i > 0 { + b.WriteString(", ") + } + if st.Dir == types.RecvOnly { + b.WriteString("<-") + b.WriteString(relName(st.Chan, s)) + } else { + b.WriteString(relName(st.Chan, s)) + b.WriteString("<-") + b.WriteString(relName(st.Send, s)) + } + } + non := "" + if !s.Blocking { + non = "Non" + } + from := s.Parent().pkg() + return fmt.Sprintf("Select%sBlocking <%s> [%s]", non, relType(s.Type(), from), b.String()) +} + +func (s *Store) String() string { + return fmt.Sprintf("Store {%s} %s %s", + s.Val.Type(), relName(s.Addr, s), relName(s.Val, s)) +} + +func (s *BlankStore) String() string { + return fmt.Sprintf("BlankStore %s", relName(s.Val, s)) +} + +func (s *MapUpdate) String() string { + return fmt.Sprintf("MapUpdate %s %s %s", relName(s.Map, s), relName(s.Key, s), relName(s.Value, s)) +} + +func (s *DebugRef) String() string { + p := s.Parent().Prog.Fset.Position(s.Pos()) + var descr interface{} + if s.object != nil { + descr = s.object // e.g. "var x int" + } else { + descr = reflect.TypeOf(s.Expr) // e.g. "*ast.CallExpr" + } + var addr string + if s.IsAddr { + addr = "address of " + } + return fmt.Sprintf("; %s%s @ %d:%d is %s", addr, descr, p.Line, p.Column, s.X.Name()) +} + +func (p *Package) String() string { + return "package " + p.Pkg.Path() +} + +var _ io.WriterTo = (*Package)(nil) // *Package implements io.Writer + +func (p *Package) WriteTo(w io.Writer) (int64, error) { + var buf bytes.Buffer + WritePackage(&buf, p) + n, err := w.Write(buf.Bytes()) + return int64(n), err +} + +// WritePackage writes to buf a human-readable summary of p. +func WritePackage(buf *bytes.Buffer, p *Package) { + fmt.Fprintf(buf, "%s:\n", p) + + var names []string + maxname := 0 + for name := range p.Members { + if l := len(name); l > maxname { + maxname = l + } + names = append(names, name) + } + + from := p.Pkg + sort.Strings(names) + for _, name := range names { + switch mem := p.Members[name].(type) { + case *NamedConst: + fmt.Fprintf(buf, " const %-*s %s = %s\n", + maxname, name, mem.Name(), mem.Value.RelString(from)) + + case *Function: + fmt.Fprintf(buf, " func %-*s %s\n", + maxname, name, relType(mem.Type(), from)) + + case *Type: + fmt.Fprintf(buf, " type %-*s %s\n", + maxname, name, relType(mem.Type().Underlying(), from)) + for _, meth := range typeutil.IntuitiveMethodSet(mem.Type(), &p.Prog.MethodSets) { + fmt.Fprintf(buf, " %s\n", types.SelectionString(meth, types.RelativeTo(from))) + } + + case *Global: + fmt.Fprintf(buf, " var %-*s %s\n", + maxname, name, relType(mem.Type().(*types.Pointer).Elem(), from)) + } + } + + fmt.Fprintf(buf, "\n") +} diff --git a/vendor/honnef.co/go/tools/go/ir/sanity.go b/vendor/honnef.co/go/tools/go/ir/sanity.go new file mode 100644 index 000000000..1d4c5f74a --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/sanity.go @@ -0,0 +1,561 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// An optional pass for sanity-checking invariants of the IR representation. +// Currently it checks CFG invariants but little at the instruction level. + +import ( + "fmt" + "go/types" + "io" + "os" + "strings" +) + +type sanity struct { + reporter io.Writer + fn *Function + block *BasicBlock + instrs map[Instruction]struct{} + insane bool +} + +// sanityCheck performs integrity checking of the IR representation +// of the function fn and returns true if it was valid. Diagnostics +// are written to reporter if non-nil, os.Stderr otherwise. Some +// diagnostics are only warnings and do not imply a negative result. +// +// Sanity-checking is intended to facilitate the debugging of code +// transformation passes. +// +func sanityCheck(fn *Function, reporter io.Writer) bool { + if reporter == nil { + reporter = os.Stderr + } + return (&sanity{reporter: reporter}).checkFunction(fn) +} + +// mustSanityCheck is like sanityCheck but panics instead of returning +// a negative result. +// +func mustSanityCheck(fn *Function, reporter io.Writer) { + if !sanityCheck(fn, reporter) { + fn.WriteTo(os.Stderr) + panic("SanityCheck failed") + } +} + +func (s *sanity) diagnostic(prefix, format string, args ...interface{}) { + fmt.Fprintf(s.reporter, "%s: function %s", prefix, s.fn) + if s.block != nil { + fmt.Fprintf(s.reporter, ", block %s", s.block) + } + io.WriteString(s.reporter, ": ") + fmt.Fprintf(s.reporter, format, args...) + io.WriteString(s.reporter, "\n") +} + +func (s *sanity) errorf(format string, args ...interface{}) { + s.insane = true + s.diagnostic("Error", format, args...) +} + +func (s *sanity) warnf(format string, args ...interface{}) { + s.diagnostic("Warning", format, args...) +} + +// findDuplicate returns an arbitrary basic block that appeared more +// than once in blocks, or nil if all were unique. +func findDuplicate(blocks []*BasicBlock) *BasicBlock { + if len(blocks) < 2 { + return nil + } + if blocks[0] == blocks[1] { + return blocks[0] + } + // Slow path: + m := make(map[*BasicBlock]bool) + for _, b := range blocks { + if m[b] { + return b + } + m[b] = true + } + return nil +} + +func (s *sanity) checkInstr(idx int, instr Instruction) { + switch instr := instr.(type) { + case *If, *Jump, *Return, *Panic, *Unreachable, *ConstantSwitch: + s.errorf("control flow instruction not at end of block") + case *Sigma: + if idx > 0 { + prev := s.block.Instrs[idx-1] + if _, ok := prev.(*Sigma); !ok { + s.errorf("Sigma instruction follows a non-Sigma: %T", prev) + } + } + case *Phi: + if idx == 0 { + // It suffices to apply this check to just the first phi node. + if dup := findDuplicate(s.block.Preds); dup != nil { + s.errorf("phi node in block with duplicate predecessor %s", dup) + } + } else { + prev := s.block.Instrs[idx-1] + switch prev.(type) { + case *Phi, *Sigma: + default: + s.errorf("Phi instruction follows a non-Phi, non-Sigma: %T", prev) + } + } + if ne, np := len(instr.Edges), len(s.block.Preds); ne != np { + s.errorf("phi node has %d edges but %d predecessors", ne, np) + + } else { + for i, e := range instr.Edges { + if e == nil { + s.errorf("phi node '%v' has no value for edge #%d from %s", instr, i, s.block.Preds[i]) + } + } + } + + case *Alloc: + if !instr.Heap { + found := false + for _, l := range s.fn.Locals { + if l == instr { + found = true + break + } + } + if !found { + s.errorf("local alloc %s = %s does not appear in Function.Locals", instr.Name(), instr) + } + } + + case *BinOp: + case *Call: + case *ChangeInterface: + case *ChangeType: + case *Convert: + if _, ok := instr.X.Type().Underlying().(*types.Slice); ok { + if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok { + if _, ok := ptr.Elem().(*types.Array); ok { + break + } + } + } + if _, ok := instr.X.Type().Underlying().(*types.Basic); !ok { + if _, ok := instr.Type().Underlying().(*types.Basic); !ok { + s.errorf("convert %s -> %s: at least one type must be basic", instr.X.Type(), instr.Type()) + } + } + + case *Defer: + case *Extract: + case *Field: + case *FieldAddr: + case *Go: + case *Index: + case *IndexAddr: + case *MapLookup: + case *StringLookup: + case *MakeChan: + case *MakeClosure: + numFree := len(instr.Fn.(*Function).FreeVars) + numBind := len(instr.Bindings) + if numFree != numBind { + s.errorf("MakeClosure has %d Bindings for function %s with %d free vars", + numBind, instr.Fn, numFree) + + } + if recv := instr.Type().(*types.Signature).Recv(); recv != nil { + s.errorf("MakeClosure's type includes receiver %s", recv.Type()) + } + + case *MakeInterface: + case *MakeMap: + case *MakeSlice: + case *MapUpdate: + case *Next: + case *Range: + case *RunDefers: + case *Select: + case *Send: + case *Slice: + case *Store: + case *TypeAssert: + case *UnOp: + case *DebugRef: + case *BlankStore: + case *Load: + case *Parameter: + case *Const: + case *Recv: + case *TypeSwitch: + default: + panic(fmt.Sprintf("Unknown instruction type: %T", instr)) + } + + if call, ok := instr.(CallInstruction); ok { + if call.Common().Signature() == nil { + s.errorf("nil signature: %s", call) + } + } + + // Check that value-defining instructions have valid types + // and a valid referrer list. + if v, ok := instr.(Value); ok { + t := v.Type() + if t == nil { + s.errorf("no type: %s = %s", v.Name(), v) + } else if t == tRangeIter { + // not a proper type; ignore. + } else if b, ok := t.Underlying().(*types.Basic); ok && b.Info()&types.IsUntyped != 0 { + if _, ok := v.(*Const); !ok { + s.errorf("instruction has 'untyped' result: %s = %s : %s", v.Name(), v, t) + } + } + s.checkReferrerList(v) + } + + // Untyped constants are legal as instruction Operands(), + // for example: + // _ = "foo"[0] + // or: + // if wordsize==64 {...} + + // All other non-Instruction Values can be found via their + // enclosing Function or Package. +} + +func (s *sanity) checkFinalInstr(instr Instruction) { + switch instr := instr.(type) { + case *If: + if nsuccs := len(s.block.Succs); nsuccs != 2 { + s.errorf("If-terminated block has %d successors; expected 2", nsuccs) + return + } + if s.block.Succs[0] == s.block.Succs[1] { + s.errorf("If-instruction has same True, False target blocks: %s", s.block.Succs[0]) + return + } + + case *Jump: + if nsuccs := len(s.block.Succs); nsuccs != 1 { + s.errorf("Jump-terminated block has %d successors; expected 1", nsuccs) + return + } + + case *Return: + if nsuccs := len(s.block.Succs); nsuccs != 0 { + s.errorf("Return-terminated block has %d successors; expected none", nsuccs) + return + } + if na, nf := len(instr.Results), s.fn.Signature.Results().Len(); nf != na { + s.errorf("%d-ary return in %d-ary function", na, nf) + } + + case *Panic: + if nsuccs := len(s.block.Succs); nsuccs != 1 { + s.errorf("Panic-terminated block has %d successors; expected one", nsuccs) + return + } + + case *Unreachable: + if nsuccs := len(s.block.Succs); nsuccs != 1 { + s.errorf("Unreachable-terminated block has %d successors; expected one", nsuccs) + return + } + + case *ConstantSwitch: + + default: + s.errorf("non-control flow instruction at end of block") + } +} + +func (s *sanity) checkBlock(b *BasicBlock, index int) { + s.block = b + + if b.Index != index { + s.errorf("block has incorrect Index %d", b.Index) + } + if b.parent != s.fn { + s.errorf("block has incorrect parent %s", b.parent) + } + + // Check all blocks are reachable. + // (The entry block is always implicitly reachable, the exit block may be unreachable.) + if index > 1 && len(b.Preds) == 0 { + s.warnf("unreachable block") + if b.Instrs == nil { + // Since this block is about to be pruned, + // tolerating transient problems in it + // simplifies other optimizations. + return + } + } + + // Check predecessor and successor relations are dual, + // and that all blocks in CFG belong to same function. + for _, a := range b.Preds { + found := false + for _, bb := range a.Succs { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected successor edge in predecessor %s; found only: %s", a, a.Succs) + } + if a.parent != s.fn { + s.errorf("predecessor %s belongs to different function %s", a, a.parent) + } + } + for _, c := range b.Succs { + found := false + for _, bb := range c.Preds { + if bb == b { + found = true + break + } + } + if !found { + s.errorf("expected predecessor edge in successor %s; found only: %s", c, c.Preds) + } + if c.parent != s.fn { + s.errorf("successor %s belongs to different function %s", c, c.parent) + } + } + + // Check each instruction is sane. + n := len(b.Instrs) + if n == 0 { + s.errorf("basic block contains no instructions") + } + var rands [10]*Value // reuse storage + for j, instr := range b.Instrs { + if instr == nil { + s.errorf("nil instruction at index %d", j) + continue + } + if b2 := instr.Block(); b2 == nil { + s.errorf("nil Block() for instruction at index %d", j) + continue + } else if b2 != b { + s.errorf("wrong Block() (%s) for instruction at index %d ", b2, j) + continue + } + if j < n-1 { + s.checkInstr(j, instr) + } else { + s.checkFinalInstr(instr) + } + + // Check Instruction.Operands. + operands: + for i, op := range instr.Operands(rands[:0]) { + if op == nil { + s.errorf("nil operand pointer %d of %s", i, instr) + continue + } + val := *op + if val == nil { + continue // a nil operand is ok + } + + // Check that "untyped" types only appear on constant operands. + if _, ok := (*op).(*Const); !ok { + if basic, ok := (*op).Type().(*types.Basic); ok { + if basic.Info()&types.IsUntyped != 0 { + s.errorf("operand #%d of %s is untyped: %s", i, instr, basic) + } + } + } + + // Check that Operands that are also Instructions belong to same function. + // TODO(adonovan): also check their block dominates block b. + if val, ok := val.(Instruction); ok { + if val.Block() == nil { + s.errorf("operand %d of %s is an instruction (%s) that belongs to no block", i, instr, val) + } else if val.Parent() != s.fn { + s.errorf("operand %d of %s is an instruction (%s) from function %s", i, instr, val, val.Parent()) + } + } + + // Check that each function-local operand of + // instr refers back to instr. (NB: quadratic) + switch val := val.(type) { + case *Const, *Global, *Builtin: + continue // not local + case *Function: + if val.parent == nil { + continue // only anon functions are local + } + } + + // TODO(adonovan): check val.Parent() != nil <=> val.Referrers() is defined. + + if refs := val.Referrers(); refs != nil { + for _, ref := range *refs { + if ref == instr { + continue operands + } + } + s.errorf("operand %d of %s (%s) does not refer to us", i, instr, val) + } else { + s.errorf("operand %d of %s (%s) has no referrers", i, instr, val) + } + } + } +} + +func (s *sanity) checkReferrerList(v Value) { + refs := v.Referrers() + if refs == nil { + s.errorf("%s has missing referrer list", v.Name()) + return + } + for i, ref := range *refs { + if _, ok := s.instrs[ref]; !ok { + if val, ok := ref.(Value); ok { + s.errorf("%s.Referrers()[%d] = %s = %s is not an instruction belonging to this function", v.Name(), i, val.Name(), val) + } else { + s.errorf("%s.Referrers()[%d] = %s is not an instruction belonging to this function", v.Name(), i, ref) + } + } + } +} + +func (s *sanity) checkFunction(fn *Function) bool { + // TODO(adonovan): check Function invariants: + // - check params match signature + // - check transient fields are nil + // - warn if any fn.Locals do not appear among block instructions. + s.fn = fn + if fn.Prog == nil { + s.errorf("nil Prog") + } + + _ = fn.String() // must not crash + _ = fn.RelString(fn.pkg()) // must not crash + + // All functions have a package, except delegates (which are + // shared across packages, or duplicated as weak symbols in a + // separate-compilation model), and error.Error. + if fn.Pkg == nil { + switch fn.Synthetic { + case SyntheticWrapper, SyntheticBound, SyntheticThunk: + default: + if !strings.HasSuffix(fn.name, "Error") { + s.errorf("nil Pkg") + } + } + } + if src, syn := fn.Synthetic == 0, fn.source != nil; src != syn { + s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) + } + for i, l := range fn.Locals { + if l.Parent() != fn { + s.errorf("Local %s at index %d has wrong parent", l.Name(), i) + } + if l.Heap { + s.errorf("Local %s at index %d has Heap flag set", l.Name(), i) + } + } + // Build the set of valid referrers. + s.instrs = make(map[Instruction]struct{}) + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + s.instrs[instr] = struct{}{} + } + } + for i, p := range fn.Params { + if p.Parent() != fn { + s.errorf("Param %s at index %d has wrong parent", p.Name(), i) + } + // Check common suffix of Signature and Params match type. + if sig := fn.Signature; sig != nil { + j := i - len(fn.Params) + sig.Params().Len() // index within sig.Params + if j < 0 { + continue + } + if !types.Identical(p.Type(), sig.Params().At(j).Type()) { + s.errorf("Param %s at index %d has wrong type (%s, versus %s in Signature)", p.Name(), i, p.Type(), sig.Params().At(j).Type()) + + } + } + + s.checkReferrerList(p) + } + for i, fv := range fn.FreeVars { + if fv.Parent() != fn { + s.errorf("FreeVar %s at index %d has wrong parent", fv.Name(), i) + } + s.checkReferrerList(fv) + } + + if fn.Blocks != nil && len(fn.Blocks) == 0 { + // Function _had_ blocks (so it's not external) but + // they were "optimized" away, even the entry block. + s.errorf("Blocks slice is non-nil but empty") + } + for i, b := range fn.Blocks { + if b == nil { + s.warnf("nil *BasicBlock at f.Blocks[%d]", i) + continue + } + s.checkBlock(b, i) + } + + s.block = nil + for i, anon := range fn.AnonFuncs { + if anon.Parent() != fn { + s.errorf("AnonFuncs[%d]=%s but %s.Parent()=%s", i, anon, anon, anon.Parent()) + } + } + s.fn = nil + return !s.insane +} + +// sanityCheckPackage checks invariants of packages upon creation. +// It does not require that the package is built. +// Unlike sanityCheck (for functions), it just panics at the first error. +func sanityCheckPackage(pkg *Package) { + if pkg.Pkg == nil { + panic(fmt.Sprintf("Package %s has no Object", pkg)) + } + _ = pkg.String() // must not crash + + for name, mem := range pkg.Members { + if name != mem.Name() { + panic(fmt.Sprintf("%s: %T.Name() = %s, want %s", + pkg.Pkg.Path(), mem, mem.Name(), name)) + } + obj := mem.Object() + if obj == nil { + // This check is sound because fields + // {Global,Function}.object have type + // types.Object. (If they were declared as + // *types.{Var,Func}, we'd have a non-empty + // interface containing a nil pointer.) + + continue // not all members have typechecker objects + } + if obj.Name() != name { + if obj.Name() == "init" && strings.HasPrefix(mem.Name(), "init#") { + // Ok. The name of a declared init function varies between + // its types.Func ("init") and its ir.Function ("init#%d"). + } else { + panic(fmt.Sprintf("%s: %T.Object().Name() = %s, want %s", + pkg.Pkg.Path(), mem, obj.Name(), name)) + } + } + } +} diff --git a/vendor/honnef.co/go/tools/go/ir/source.go b/vendor/honnef.co/go/tools/go/ir/source.go new file mode 100644 index 000000000..93d1ccbd2 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/source.go @@ -0,0 +1,270 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file defines utilities for working with source positions +// or source-level named entities ("objects"). + +// TODO(adonovan): test that {Value,Instruction}.Pos() positions match +// the originating syntax, as specified. + +import ( + "go/ast" + "go/token" + "go/types" +) + +// EnclosingFunction returns the function that contains the syntax +// node denoted by path. +// +// Syntax associated with package-level variable specifications is +// enclosed by the package's init() function. +// +// Returns nil if not found; reasons might include: +// - the node is not enclosed by any function. +// - the node is within an anonymous function (FuncLit) and +// its IR function has not been created yet +// (pkg.Build() has not yet been called). +// +func EnclosingFunction(pkg *Package, path []ast.Node) *Function { + // Start with package-level function... + fn := findEnclosingPackageLevelFunction(pkg, path) + if fn == nil { + return nil // not in any function + } + + // ...then walk down the nested anonymous functions. + n := len(path) +outer: + for i := range path { + if lit, ok := path[n-1-i].(*ast.FuncLit); ok { + for _, anon := range fn.AnonFuncs { + if anon.Pos() == lit.Type.Func { + fn = anon + continue outer + } + } + // IR function not found: + // - package not yet built, or maybe + // - builder skipped FuncLit in dead block + // (in principle; but currently the Builder + // generates even dead FuncLits). + return nil + } + } + return fn +} + +// HasEnclosingFunction returns true if the AST node denoted by path +// is contained within the declaration of some function or +// package-level variable. +// +// Unlike EnclosingFunction, the behaviour of this function does not +// depend on whether IR code for pkg has been built, so it can be +// used to quickly reject check inputs that will cause +// EnclosingFunction to fail, prior to IR building. +// +func HasEnclosingFunction(pkg *Package, path []ast.Node) bool { + return findEnclosingPackageLevelFunction(pkg, path) != nil +} + +// findEnclosingPackageLevelFunction returns the Function +// corresponding to the package-level function enclosing path. +// +func findEnclosingPackageLevelFunction(pkg *Package, path []ast.Node) *Function { + if n := len(path); n >= 2 { // [... {Gen,Func}Decl File] + switch decl := path[n-2].(type) { + case *ast.GenDecl: + if decl.Tok == token.VAR && n >= 3 { + // Package-level 'var' initializer. + return pkg.init + } + + case *ast.FuncDecl: + // Declared function/method. + fn := findNamedFunc(pkg, decl.Pos()) + if fn == nil && decl.Recv == nil && decl.Name.Name == "init" { + // Hack: return non-nil when IR is not yet + // built so that HasEnclosingFunction works. + return pkg.init + } + return fn + } + } + return nil // not in any function +} + +// findNamedFunc returns the named function whose FuncDecl.Ident is at +// position pos. +// +func findNamedFunc(pkg *Package, pos token.Pos) *Function { + for _, fn := range pkg.Functions { + if fn.Pos() == pos { + return fn + } + } + return nil +} + +// ValueForExpr returns the IR Value that corresponds to non-constant +// expression e. +// +// It returns nil if no value was found, e.g. +// - the expression is not lexically contained within f; +// - f was not built with debug information; or +// - e is a constant expression. (For efficiency, no debug +// information is stored for constants. Use +// go/types.Info.Types[e].Value instead.) +// - e is a reference to nil or a built-in function. +// - the value was optimised away. +// +// If e is an addressable expression used in an lvalue context, +// value is the address denoted by e, and isAddr is true. +// +// The types of e (or &e, if isAddr) and the result are equal +// (modulo "untyped" bools resulting from comparisons). +// +// (Tip: to find the ir.Value given a source position, use +// astutil.PathEnclosingInterval to locate the ast.Node, then +// EnclosingFunction to locate the Function, then ValueForExpr to find +// the ir.Value.) +// +func (f *Function) ValueForExpr(e ast.Expr) (value Value, isAddr bool) { + if f.debugInfo() { // (opt) + e = unparen(e) + for _, b := range f.Blocks { + for _, instr := range b.Instrs { + if ref, ok := instr.(*DebugRef); ok { + if ref.Expr == e { + return ref.X, ref.IsAddr + } + } + } + } + } + return +} + +// --- Lookup functions for source-level named entities (types.Objects) --- + +// Package returns the IR Package corresponding to the specified +// type-checker package object. +// It returns nil if no such IR package has been created. +// +func (prog *Program) Package(obj *types.Package) *Package { + return prog.packages[obj] +} + +// packageLevelValue returns the package-level value corresponding to +// the specified named object, which may be a package-level const +// (*Const), var (*Global) or func (*Function) of some package in +// prog. It returns nil if the object is not found. +// +func (prog *Program) packageLevelValue(obj types.Object) Value { + if pkg, ok := prog.packages[obj.Pkg()]; ok { + return pkg.values[obj] + } + return nil +} + +// FuncValue returns the concrete Function denoted by the source-level +// named function obj, or nil if obj denotes an interface method. +// +// TODO(adonovan): check the invariant that obj.Type() matches the +// result's Signature, both in the params/results and in the receiver. +// +func (prog *Program) FuncValue(obj *types.Func) *Function { + fn, _ := prog.packageLevelValue(obj).(*Function) + return fn +} + +// ConstValue returns the IR Value denoted by the source-level named +// constant obj. +// +func (prog *Program) ConstValue(obj *types.Const) *Const { + // TODO(adonovan): opt: share (don't reallocate) + // Consts for const objects and constant ast.Exprs. + + // Universal constant? {true,false,nil} + if obj.Parent() == types.Universe { + return NewConst(obj.Val(), obj.Type()) + } + // Package-level named constant? + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Const) + } + return NewConst(obj.Val(), obj.Type()) +} + +// VarValue returns the IR Value that corresponds to a specific +// identifier denoting the source-level named variable obj. +// +// VarValue returns nil if a local variable was not found, perhaps +// because its package was not built, the debug information was not +// requested during IR construction, or the value was optimized away. +// +// ref is the path to an ast.Ident (e.g. from PathEnclosingInterval), +// and that ident must resolve to obj. +// +// pkg is the package enclosing the reference. (A reference to a var +// always occurs within a function, so we need to know where to find it.) +// +// If the identifier is a field selector and its base expression is +// non-addressable, then VarValue returns the value of that field. +// For example: +// func f() struct {x int} +// f().x // VarValue(x) returns a *Field instruction of type int +// +// All other identifiers denote addressable locations (variables). +// For them, VarValue may return either the variable's address or its +// value, even when the expression is evaluated only for its value; the +// situation is reported by isAddr, the second component of the result. +// +// If !isAddr, the returned value is the one associated with the +// specific identifier. For example, +// var x int // VarValue(x) returns Const 0 here +// x = 1 // VarValue(x) returns Const 1 here +// +// It is not specified whether the value or the address is returned in +// any particular case, as it may depend upon optimizations performed +// during IR code generation, such as registerization, constant +// folding, avoidance of materialization of subexpressions, etc. +// +func (prog *Program) VarValue(obj *types.Var, pkg *Package, ref []ast.Node) (value Value, isAddr bool) { + // All references to a var are local to some function, possibly init. + fn := EnclosingFunction(pkg, ref) + if fn == nil { + return // e.g. def of struct field; IR not built? + } + + id := ref[0].(*ast.Ident) + + // Defining ident of a parameter? + if id.Pos() == obj.Pos() { + for _, param := range fn.Params { + if param.Object() == obj { + return param, false + } + } + } + + // Other ident? + for _, b := range fn.Blocks { + for _, instr := range b.Instrs { + if dr, ok := instr.(*DebugRef); ok { + if dr.Pos() == id.Pos() { + return dr.X, dr.IsAddr + } + } + } + } + + // Defining ident of package-level var? + if v := prog.packageLevelValue(obj); v != nil { + return v.(*Global), true + } + + return // e.g. debug info not requested, or var optimized away +} diff --git a/vendor/honnef.co/go/tools/go/ir/ssa.go b/vendor/honnef.co/go/tools/go/ir/ssa.go new file mode 100644 index 000000000..6dfdfcd80 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/ssa.go @@ -0,0 +1,1898 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This package defines a high-level intermediate representation for +// Go programs using static single-information (SSI) form. + +import ( + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "sync" + + "honnef.co/go/tools/go/types/typeutil" +) + +type ID int + +// A Program is a partial or complete Go program converted to IR form. +type Program struct { + Fset *token.FileSet // position information for the files of this Program + PrintFunc string // create ir.html for function specified in PrintFunc + imported map[string]*Package // all importable Packages, keyed by import path + packages map[*types.Package]*Package // all loaded Packages, keyed by object + mode BuilderMode // set of mode bits for IR construction + MethodSets typeutil.MethodSetCache // cache of type-checker's method-sets + + methodsMu sync.Mutex // guards the following maps: + methodSets typeutil.Map // maps type to its concrete methodSet + runtimeTypes typeutil.Map // types for which rtypes are needed + canon typeutil.Map // type canonicalization map + bounds map[*types.Func]*Function // bounds for curried x.Method closures + thunks map[selectionKey]*Function // thunks for T.Method expressions +} + +// A Package is a single analyzed Go package containing Members for +// all package-level functions, variables, constants and types it +// declares. These may be accessed directly via Members, or via the +// type-specific accessor methods Func, Type, Var and Const. +// +// Members also contains entries for "init" (the synthetic package +// initializer) and "init#%d", the nth declared init function, +// and unspecified other things too. +// +type Package struct { + Prog *Program // the owning program + Pkg *types.Package // the corresponding go/types.Package + Members map[string]Member // all package members keyed by name (incl. init and init#%d) + Functions []*Function // all functions, excluding anonymous ones + values map[types.Object]Value // package members (incl. types and methods), keyed by object + init *Function // Func("init"); the package's init function + debug bool // include full debug info in this package + printFunc string // which function to print in HTML form + + // The following fields are set transiently, then cleared + // after building. + buildOnce sync.Once // ensures package building occurs once + ninit int32 // number of init functions + info *types.Info // package type information + files []*ast.File // package ASTs +} + +// A Member is a member of a Go package, implemented by *NamedConst, +// *Global, *Function, or *Type; they are created by package-level +// const, var, func and type declarations respectively. +// +type Member interface { + Name() string // declared name of the package member + String() string // package-qualified name of the package member + RelString(*types.Package) string // like String, but relative refs are unqualified + Object() types.Object // typechecker's object for this member, if any + Type() types.Type // type of the package member + Token() token.Token // token.{VAR,FUNC,CONST,TYPE} + Package() *Package // the containing package +} + +// A Type is a Member of a Package representing a package-level named type. +type Type struct { + object *types.TypeName + pkg *Package +} + +// A NamedConst is a Member of a Package representing a package-level +// named constant. +// +// Pos() returns the position of the declaring ast.ValueSpec.Names[*] +// identifier. +// +// NB: a NamedConst is not a Value; it contains a constant Value, which +// it augments with the name and position of its 'const' declaration. +// +type NamedConst struct { + object *types.Const + Value *Const + pkg *Package +} + +// A Value is an IR value that can be referenced by an instruction. +type Value interface { + setID(ID) + + // Name returns the name of this value, and determines how + // this Value appears when used as an operand of an + // Instruction. + // + // This is the same as the source name for Parameters, + // Builtins, Functions, FreeVars, Globals. + // For constants, it is a representation of the constant's value + // and type. For all other Values this is the name of the + // virtual register defined by the instruction. + // + // The name of an IR Value is not semantically significant, + // and may not even be unique within a function. + Name() string + + // ID returns the ID of this value. IDs are unique within a single + // function and are densely numbered, but may contain gaps. + // Values and other Instructions share the same ID space. + // Globally, values are identified by their addresses. However, + // IDs exist to facilitate efficient storage of mappings between + // values and data when analysing functions. + // + // NB: IDs are allocated late in the IR construction process and + // are not available to early stages of said process. + ID() ID + + // If this value is an Instruction, String returns its + // disassembled form; otherwise it returns unspecified + // human-readable information about the Value, such as its + // kind, name and type. + String() string + + // Type returns the type of this value. Many instructions + // (e.g. IndexAddr) change their behaviour depending on the + // types of their operands. + Type() types.Type + + // Parent returns the function to which this Value belongs. + // It returns nil for named Functions, Builtin and Global. + Parent() *Function + + // Referrers returns the list of instructions that have this + // value as one of their operands; it may contain duplicates + // if an instruction has a repeated operand. + // + // Referrers actually returns a pointer through which the + // caller may perform mutations to the object's state. + // + // Referrers is currently only defined if Parent()!=nil, + // i.e. for the function-local values FreeVar, Parameter, + // Functions (iff anonymous) and all value-defining instructions. + // It returns nil for named Functions, Builtin and Global. + // + // Instruction.Operands contains the inverse of this relation. + Referrers() *[]Instruction + + Operands(rands []*Value) []*Value // nil for non-Instructions + + // Source returns the AST node responsible for creating this + // value. A single AST node may be responsible for more than one + // value, and not all values have an associated AST node. + // + // Do not use this method to find a Value given an ast.Expr; use + // ValueForExpr instead. + Source() ast.Node + + // Pos returns Source().Pos() if Source is not nil, else it + // returns token.NoPos. + Pos() token.Pos +} + +// An Instruction is an IR instruction that computes a new Value or +// has some effect. +// +// An Instruction that defines a value (e.g. BinOp) also implements +// the Value interface; an Instruction that only has an effect (e.g. Store) +// does not. +// +type Instruction interface { + setSource(ast.Node) + setID(ID) + + // String returns the disassembled form of this value. + // + // Examples of Instructions that are Values: + // "BinOp {+} t1 t2" (BinOp) + // "Call len t1" (Call) + // Note that the name of the Value is not printed. + // + // Examples of Instructions that are not Values: + // "Return t1" (Return) + // "Store {int} t2 t1" (Store) + // + // (The separation of Value.Name() from Value.String() is useful + // for some analyses which distinguish the operation from the + // value it defines, e.g., 'y = local int' is both an allocation + // of memory 'local int' and a definition of a pointer y.) + String() string + + // ID returns the ID of this instruction. IDs are unique within a single + // function and are densely numbered, but may contain gaps. + // Globally, instructions are identified by their addresses. However, + // IDs exist to facilitate efficient storage of mappings between + // instructions and data when analysing functions. + // + // NB: IDs are allocated late in the IR construction process and + // are not available to early stages of said process. + ID() ID + + // Parent returns the function to which this instruction + // belongs. + Parent() *Function + + // Block returns the basic block to which this instruction + // belongs. + Block() *BasicBlock + + // setBlock sets the basic block to which this instruction belongs. + setBlock(*BasicBlock) + + // Operands returns the operands of this instruction: the + // set of Values it references. + // + // Specifically, it appends their addresses to rands, a + // user-provided slice, and returns the resulting slice, + // permitting avoidance of memory allocation. + // + // The operands are appended in undefined order, but the order + // is consistent for a given Instruction; the addresses are + // always non-nil but may point to a nil Value. Clients may + // store through the pointers, e.g. to effect a value + // renaming. + // + // Value.Referrers is a subset of the inverse of this + // relation. (Referrers are not tracked for all types of + // Values.) + Operands(rands []*Value) []*Value + + Referrers() *[]Instruction // nil for non-Values + + // Source returns the AST node responsible for creating this + // instruction. A single AST node may be responsible for more than + // one instruction, and not all instructions have an associated + // AST node. + Source() ast.Node + + // Pos returns Source().Pos() if Source is not nil, else it + // returns token.NoPos. + Pos() token.Pos +} + +// A Node is a node in the IR value graph. Every concrete type that +// implements Node is also either a Value, an Instruction, or both. +// +// Node contains the methods common to Value and Instruction, plus the +// Operands and Referrers methods generalized to return nil for +// non-Instructions and non-Values, respectively. +// +// Node is provided to simplify IR graph algorithms. Clients should +// use the more specific and informative Value or Instruction +// interfaces where appropriate. +// +type Node interface { + setID(ID) + + // Common methods: + ID() ID + String() string + Source() ast.Node + Pos() token.Pos + Parent() *Function + + // Partial methods: + Operands(rands []*Value) []*Value // nil for non-Instructions + Referrers() *[]Instruction // nil for non-Values +} + +type Synthetic int + +const ( + SyntheticLoadedFromExportData Synthetic = iota + 1 + SyntheticPackageInitializer + SyntheticThunk + SyntheticWrapper + SyntheticBound +) + +func (syn Synthetic) String() string { + switch syn { + case SyntheticLoadedFromExportData: + return "loaded from export data" + case SyntheticPackageInitializer: + return "package initializer" + case SyntheticThunk: + return "thunk" + case SyntheticWrapper: + return "wrapper" + case SyntheticBound: + return "bound" + default: + return fmt.Sprintf("Synthetic(%d)", syn) + } +} + +// Function represents the parameters, results, and code of a function +// or method. +// +// If Blocks is nil, this indicates an external function for which no +// Go source code is available. In this case, FreeVars and Locals +// are nil too. Clients performing whole-program analysis must +// handle external functions specially. +// +// Blocks contains the function's control-flow graph (CFG). +// Blocks[0] is the function entry point; block order is not otherwise +// semantically significant, though it may affect the readability of +// the disassembly. +// To iterate over the blocks in dominance order, use DomPreorder(). +// +// A nested function (Parent()!=nil) that refers to one or more +// lexically enclosing local variables ("free variables") has FreeVars. +// Such functions cannot be called directly but require a +// value created by MakeClosure which, via its Bindings, supplies +// values for these parameters. +// +// If the function is a method (Signature.Recv() != nil) then the first +// element of Params is the receiver parameter. +// +// A Go package may declare many functions called "init". +// For each one, Object().Name() returns "init" but Name() returns +// "init#1", etc, in declaration order. +// +// Pos() returns the declaring ast.FuncLit.Type.Func or the position +// of the ast.FuncDecl.Name, if the function was explicit in the +// source. Synthetic wrappers, for which Synthetic != "", may share +// the same position as the function they wrap. +// Syntax.Pos() always returns the position of the declaring "func" token. +// +// Type() returns the function's Signature. +// +type Function struct { + node + + name string + object types.Object // a declared *types.Func or one of its wrappers + method *types.Selection // info about provenance of synthetic methods + Signature *types.Signature + + Synthetic Synthetic + parent *Function // enclosing function if anon; nil if global + Pkg *Package // enclosing package; nil for shared funcs (wrappers and error.Error) + Prog *Program // enclosing program + Params []*Parameter // function parameters; for methods, includes receiver + FreeVars []*FreeVar // free variables whose values must be supplied by closure + Locals []*Alloc // local variables of this function + Blocks []*BasicBlock // basic blocks of the function; nil => external + Exit *BasicBlock // The function's exit block + AnonFuncs []*Function // anonymous functions directly beneath this one + referrers []Instruction // referring instructions (iff Parent() != nil) + NoReturn NoReturn // Calling this function will always terminate control flow. + + *functionBody +} + +type NoReturn uint8 + +const ( + Returns NoReturn = iota + AlwaysExits + AlwaysUnwinds + NeverReturns +) + +type functionBody struct { + // The following fields are set transiently during building, + // then cleared. + currentBlock *BasicBlock // where to emit code + objects map[types.Object]Value // addresses of local variables + namedResults []*Alloc // tuple of named results + implicitResults []*Alloc // tuple of results + targets *targets // linked stack of branch targets + lblocks map[*ast.Object]*lblock // labelled blocks + consts []*Const + wr *HTMLWriter + fakeExits BlockSet + blocksets [5]BlockSet + hasDefer bool + + // a contiguous block of instructions that will be used by blocks, + // to avoid making multiple allocations. + scratchInstructions []Instruction +} + +func (fn *Function) results() []*Alloc { + if len(fn.namedResults) > 0 { + return fn.namedResults + } + return fn.implicitResults +} + +// BasicBlock represents an IR basic block. +// +// The final element of Instrs is always an explicit transfer of +// control (If, Jump, Return, Panic, or Unreachable). +// +// A block may contain no Instructions only if it is unreachable, +// i.e., Preds is nil. Empty blocks are typically pruned. +// +// BasicBlocks and their Preds/Succs relation form a (possibly cyclic) +// graph independent of the IR Value graph: the control-flow graph or +// CFG. It is illegal for multiple edges to exist between the same +// pair of blocks. +// +// Each BasicBlock is also a node in the dominator tree of the CFG. +// The tree may be navigated using Idom()/Dominees() and queried using +// Dominates(). +// +// The order of Preds and Succs is significant (to Phi and If +// instructions, respectively). +// +type BasicBlock struct { + Index int // index of this block within Parent().Blocks + Comment string // optional label; no semantic significance + parent *Function // parent function + Instrs []Instruction // instructions in order + Preds, Succs []*BasicBlock // predecessors and successors + succs2 [2]*BasicBlock // initial space for Succs + dom domInfo // dominator tree info + pdom domInfo // post-dominator tree info + post int + gaps int // number of nil Instrs (transient) + rundefers int // number of rundefers (transient) +} + +// Pure values ---------------------------------------- + +// A FreeVar represents a free variable of the function to which it +// belongs. +// +// FreeVars are used to implement anonymous functions, whose free +// variables are lexically captured in a closure formed by +// MakeClosure. The value of such a free var is an Alloc or another +// FreeVar and is considered a potentially escaping heap address, with +// pointer type. +// +// FreeVars are also used to implement bound method closures. Such a +// free var represents the receiver value and may be of any type that +// has concrete methods. +// +// Pos() returns the position of the value that was captured, which +// belongs to an enclosing function. +// +type FreeVar struct { + node + + name string + typ types.Type + parent *Function + referrers []Instruction + + // Transiently needed during building. + outer Value // the Value captured from the enclosing context. +} + +// A Parameter represents an input parameter of a function. +// +type Parameter struct { + register + + name string + object types.Object // a *types.Var; nil for non-source locals +} + +// A Const represents the value of a constant expression. +// +// The underlying type of a constant may be any boolean, numeric, or +// string type. In addition, a Const may represent the nil value of +// any reference type---interface, map, channel, pointer, slice, or +// function---but not "untyped nil". +// +// All source-level constant expressions are represented by a Const +// of the same type and value. +// +// Value holds the exact value of the constant, independent of its +// Type(), using the same representation as package go/constant uses for +// constants, or nil for a typed nil value. +// +// Pos() returns token.NoPos. +// +// Example printed form: +// Const {42} +// Const {"test"} +// Const {(3 + 4i)} +// +type Const struct { + register + + Value constant.Value +} + +// A Global is a named Value holding the address of a package-level +// variable. +// +// Pos() returns the position of the ast.ValueSpec.Names[*] +// identifier. +// +type Global struct { + node + + name string + object types.Object // a *types.Var; may be nil for synthetics e.g. init$guard + typ types.Type + + Pkg *Package +} + +// A Builtin represents a specific use of a built-in function, e.g. len. +// +// Builtins are immutable values. Builtins do not have addresses. +// Builtins can only appear in CallCommon.Func. +// +// Name() indicates the function: one of the built-in functions from the +// Go spec (excluding "make" and "new") or one of these ir-defined +// intrinsics: +// +// // wrapnilchk returns ptr if non-nil, panics otherwise. +// // (For use in indirection wrappers.) +// func ir:wrapnilchk(ptr *T, recvType, methodName string) *T +// +// // noreturnWasPanic returns true if the previously called +// // function panicked, false if it exited the process. +// func ir:noreturnWasPanic() bool +// +// Object() returns a *types.Builtin for built-ins defined by the spec, +// nil for others. +// +// Type() returns a *types.Signature representing the effective +// signature of the built-in for this call. +// +type Builtin struct { + node + + name string + sig *types.Signature +} + +// Value-defining instructions ---------------------------------------- + +// The Alloc instruction reserves space for a variable of the given type, +// zero-initializes it, and yields its address. +// +// Alloc values are always addresses, and have pointer types, so the +// type of the allocated variable is actually +// Type().Underlying().(*types.Pointer).Elem(). +// +// If Heap is false, Alloc allocates space in the function's +// activation record (frame); we refer to an Alloc(Heap=false) as a +// "stack" alloc. Each stack Alloc returns the same address each time +// it is executed within the same activation; the space is +// re-initialized to zero. +// +// If Heap is true, Alloc allocates space in the heap; we +// refer to an Alloc(Heap=true) as a "heap" alloc. Each heap Alloc +// returns a different address each time it is executed. +// +// When Alloc is applied to a channel, map or slice type, it returns +// the address of an uninitialized (nil) reference of that kind; store +// the result of MakeSlice, MakeMap or MakeChan in that location to +// instantiate these types. +// +// Pos() returns the ast.CompositeLit.Lbrace for a composite literal, +// or the ast.CallExpr.Rparen for a call to new() or for a call that +// allocates a varargs slice. +// +// Example printed form: +// t1 = StackAlloc <*int> +// t2 = HeapAlloc <*int> (new) +// +type Alloc struct { + register + Heap bool + index int // dense numbering; for lifting +} + +var _ Instruction = (*Sigma)(nil) +var _ Value = (*Sigma)(nil) + +// The Sigma instruction represents an SSI σ-node, which splits values +// at branches in the control flow. +// +// Conceptually, σ-nodes exist at the end of blocks that branch and +// constitute parallel assignments to one value per destination block. +// However, such a representation would be awkward to work with, so +// instead we place σ-nodes at the beginning of branch targets. The +// From field denotes to which incoming edge the node applies. +// +// Within a block, all σ-nodes must appear before all non-σ nodes. +// +// Example printed form: +// t2 = Sigma [#0] t1 (x) +// +type Sigma struct { + register + From *BasicBlock + X Value + + live bool // used during lifting +} + +// The Phi instruction represents an SSA φ-node, which combines values +// that differ across incoming control-flow edges and yields a new +// value. Within a block, all φ-nodes must appear before all non-φ, non-σ +// nodes. +// +// Pos() returns the position of the && or || for short-circuit +// control-flow joins, or that of the *Alloc for φ-nodes inserted +// during SSA renaming. +// +// Example printed form: +// t3 = Phi 2:t1 4:t2 (x) +// +type Phi struct { + register + Edges []Value // Edges[i] is value for Block().Preds[i] + + live bool // used during lifting +} + +// The Call instruction represents a function or method call. +// +// The Call instruction yields the function result if there is exactly +// one. Otherwise it returns a tuple, the components of which are +// accessed via Extract. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.CallExpr.Lparen, if explicit in the source. +// +// Example printed form: +// t3 = Call <()> println t1 t2 +// t4 = Call <()> foo$1 +// t6 = Invoke t5.String +// +type Call struct { + register + Call CallCommon +} + +// The BinOp instruction yields the result of binary operation X Op Y. +// +// Pos() returns the ast.BinaryExpr.OpPos, if explicit in the source. +// +// Example printed form: +// t3 = BinOp {+} t2 t1 +// +type BinOp struct { + register + // One of: + // ADD SUB MUL QUO REM + - * / % + // AND OR XOR SHL SHR AND_NOT & | ^ << >> &^ + // EQL NEQ LSS LEQ GTR GEQ == != < <= < >= + Op token.Token + X, Y Value +} + +// The UnOp instruction yields the result of Op X. +// XOR is bitwise complement. +// SUB is negation. +// NOT is logical negation. +// +// +// Example printed form: +// t2 = UnOp {^} t1 +// +type UnOp struct { + register + Op token.Token // One of: NOT SUB XOR ! - ^ + X Value +} + +// The Load instruction loads a value from a memory address. +// +// For implicit memory loads, Pos() returns the position of the +// most closely associated source-level construct; the details are not +// specified. +// +// Example printed form: +// t2 = Load t1 +// +type Load struct { + register + X Value +} + +// The ChangeType instruction applies to X a value-preserving type +// change to Type(). +// +// Type changes are permitted: +// - between a named type and its underlying type. +// - between two named types of the same underlying type. +// - between (possibly named) pointers to identical base types. +// - from a bidirectional channel to a read- or write-channel, +// optionally adding/removing a name. +// +// This operation cannot fail dynamically. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t2 = ChangeType <*T> t1 +// +type ChangeType struct { + register + X Value +} + +// The Convert instruction yields the conversion of value X to type +// Type(). One or both of those types is basic (but possibly named). +// +// A conversion may change the value and representation of its operand. +// Conversions are permitted: +// - between real numeric types. +// - between complex numeric types. +// - between string and []byte or []rune. +// - between pointers and unsafe.Pointer. +// - between unsafe.Pointer and uintptr. +// - from (Unicode) integer to (UTF-8) string. +// - from slice to array pointer. +// A conversion may imply a type name change also. +// +// Conversions of untyped string/number/bool constants to a specific +// representation are eliminated during IR construction. +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t2 = Convert <[]byte> t1 +// +type Convert struct { + register + X Value +} + +// ChangeInterface constructs a value of one interface type from a +// value of another interface type known to be assignable to it. +// This operation cannot fail. +// +// Pos() returns the ast.CallExpr.Lparen if the instruction arose from +// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the +// instruction arose from an explicit e.(T) operation; or token.NoPos +// otherwise. +// +// Example printed form: +// t2 = ChangeInterface t1 +// +type ChangeInterface struct { + register + X Value +} + +// MakeInterface constructs an instance of an interface type from a +// value of a concrete type. +// +// Use Program.MethodSets.MethodSet(X.Type()) to find the method-set +// of X, and Program.MethodValue(m) to find the implementation of a method. +// +// To construct the zero value of an interface type T, use: +// NewConst(constant.MakeNil(), T, pos) +// +// Pos() returns the ast.CallExpr.Lparen, if the instruction arose +// from an explicit conversion in the source. +// +// Example printed form: +// t2 = MakeInterface t1 +// +type MakeInterface struct { + register + X Value +} + +// The MakeClosure instruction yields a closure value whose code is +// Fn and whose free variables' values are supplied by Bindings. +// +// Type() returns a (possibly named) *types.Signature. +// +// Pos() returns the ast.FuncLit.Type.Func for a function literal +// closure or the ast.SelectorExpr.Sel for a bound method closure. +// +// Example printed form: +// t1 = MakeClosure foo$1 t1 t2 +// t5 = MakeClosure (T).foo$bound t4 +// +type MakeClosure struct { + register + Fn Value // always a *Function + Bindings []Value // values for each free variable in Fn.FreeVars +} + +// The MakeMap instruction creates a new hash-table-based map object +// and yields a value of kind map. +// +// Type() returns a (possibly named) *types.Map. +// +// Pos() returns the ast.CallExpr.Lparen, if created by make(map), or +// the ast.CompositeLit.Lbrack if created by a literal. +// +// Example printed form: +// t1 = MakeMap +// t2 = MakeMap t1 +// +type MakeMap struct { + register + Reserve Value // initial space reservation; nil => default +} + +// The MakeChan instruction creates a new channel object and yields a +// value of kind chan. +// +// Type() returns a (possibly named) *types.Chan. +// +// Pos() returns the ast.CallExpr.Lparen for the make(chan) that +// created it. +// +// Example printed form: +// t3 = MakeChan t1 +// t4 = MakeChan t2 +// +type MakeChan struct { + register + Size Value // int; size of buffer; zero => synchronous. +} + +// The MakeSlice instruction yields a slice of length Len backed by a +// newly allocated array of length Cap. +// +// Both Len and Cap must be non-nil Values of integer type. +// +// (Alloc(types.Array) followed by Slice will not suffice because +// Alloc can only create arrays of constant length.) +// +// Type() returns a (possibly named) *types.Slice. +// +// Pos() returns the ast.CallExpr.Lparen for the make([]T) that +// created it. +// +// Example printed form: +// t3 = MakeSlice <[]string> t1 t2 +// t4 = MakeSlice t1 t2 +// +type MakeSlice struct { + register + Len Value + Cap Value +} + +// The Slice instruction yields a slice of an existing string, slice +// or *array X between optional integer bounds Low and High. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns string if the type of X was string, otherwise a +// *types.Slice with the same element type as X. +// +// Pos() returns the ast.SliceExpr.Lbrack if created by a x[:] slice +// operation, the ast.CompositeLit.Lbrace if created by a literal, or +// NoPos if not explicit in the source (e.g. a variadic argument slice). +// +// Example printed form: +// t4 = Slice <[]int> t3 t2 t1 +// +type Slice struct { + register + X Value // slice, string, or *array + Low, High, Max Value // each may be nil +} + +// The FieldAddr instruction yields the address of Field of *struct X. +// +// The field is identified by its index within the field list of the +// struct type of X. +// +// Dynamically, this instruction panics if X evaluates to a nil +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. +// +// Example printed form: +// t2 = FieldAddr <*int> [0] (X) t1 +// +type FieldAddr struct { + register + X Value // *struct + Field int // field is X.Type().Underlying().(*types.Pointer).Elem().Underlying().(*types.Struct).Field(Field) +} + +// The Field instruction yields the Field of struct X. +// +// The field is identified by its index within the field list of the +// struct type of X; by using numeric indices we avoid ambiguity of +// package-local identifiers and permit compact representations. +// +// Pos() returns the position of the ast.SelectorExpr.Sel for the +// field, if explicit in the source. +// +// Example printed form: +// t2 = FieldAddr [0] (X) t1 +// +type Field struct { + register + X Value // struct + Field int // index into X.Type().(*types.Struct).Fields +} + +// The IndexAddr instruction yields the address of the element at +// index Index of collection X. Index is an integer expression. +// +// The elements of maps and strings are not addressable; use StringLookup, MapLookup or +// MapUpdate instead. +// +// Dynamically, this instruction panics if X evaluates to a nil *array +// pointer. +// +// Type() returns a (possibly named) *types.Pointer. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// t3 = IndexAddr <*int> t2 t1 +// +type IndexAddr struct { + register + X Value // slice or *array, + Index Value // numeric index +} + +// The Index instruction yields element Index of array X. +// +// Pos() returns the ast.IndexExpr.Lbrack for the index operation, if +// explicit in the source. +// +// Example printed form: +// t3 = Index t2 t1 +// +type Index struct { + register + X Value // array + Index Value // integer index +} + +// The MapLookup instruction yields element Index of collection X, a map. +// +// If CommaOk, the result is a 2-tuple of the value above and a +// boolean indicating the result of a map membership test for the key. +// The components of the tuple are accessed using Extract. +// +// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. +// +// Example printed form: +// t4 = MapLookup t3 t1 +// t6 = MapLookup <(string, bool)> t3 t2 +// +type MapLookup struct { + register + X Value // map + Index Value // key-typed index + CommaOk bool // return a value,ok pair +} + +// The StringLookup instruction yields element Index of collection X, a string. +// Index is an integer expression. +// +// Pos() returns the ast.IndexExpr.Lbrack, if explicit in the source. +// +// Example printed form: +// t3 = StringLookup t2 t1 +// +type StringLookup struct { + register + X Value // string + Index Value // numeric index +} + +// SelectState is a helper for Select. +// It represents one goal state and its corresponding communication. +// +type SelectState struct { + Dir types.ChanDir // direction of case (SendOnly or RecvOnly) + Chan Value // channel to use (for send or receive) + Send Value // value to send (for send) + Pos token.Pos // position of token.ARROW + DebugNode ast.Node // ast.SendStmt or ast.UnaryExpr(<-) [debug mode] +} + +// The Select instruction tests whether (or blocks until) one +// of the specified sent or received states is entered. +// +// Let n be the number of States for which Dir==RECV and Tᵢ (0 ≤ i < n) +// be the element type of each such state's Chan. +// Select returns an n+2-tuple +// (index int, recvOk bool, r₀ T₀, ... rₙ-1 Tₙ-1) +// The tuple's components, described below, must be accessed via the +// Extract instruction. +// +// If Blocking, select waits until exactly one state holds, i.e. a +// channel becomes ready for the designated operation of sending or +// receiving; select chooses one among the ready states +// pseudorandomly, performs the send or receive operation, and sets +// 'index' to the index of the chosen channel. +// +// If !Blocking, select doesn't block if no states hold; instead it +// returns immediately with index equal to -1. +// +// If the chosen channel was used for a receive, the rᵢ component is +// set to the received value, where i is the index of that state among +// all n receive states; otherwise rᵢ has the zero value of type Tᵢ. +// Note that the receive index i is not the same as the state +// index index. +// +// The second component of the triple, recvOk, is a boolean whose value +// is true iff the selected operation was a receive and the receive +// successfully yielded a value. +// +// Pos() returns the ast.SelectStmt.Select. +// +// Example printed form: +// t6 = SelectNonBlocking <(index int, ok bool, int)> [<-t4, t5<-t1] +// t11 = SelectBlocking <(index int, ok bool)> [] +// +type Select struct { + register + States []*SelectState + Blocking bool +} + +// The Range instruction yields an iterator over the domain and range +// of X, which must be a string or map. +// +// Elements are accessed via Next. +// +// Type() returns an opaque and degenerate "rangeIter" type. +// +// Pos() returns the ast.RangeStmt.For. +// +// Example printed form: +// t2 = Range t1 +// +type Range struct { + register + X Value // string or map +} + +// The Next instruction reads and advances the (map or string) +// iterator Iter and returns a 3-tuple value (ok, k, v). If the +// iterator is not exhausted, ok is true and k and v are the next +// elements of the domain and range, respectively. Otherwise ok is +// false and k and v are undefined. +// +// Components of the tuple are accessed using Extract. +// +// The IsString field distinguishes iterators over strings from those +// over maps, as the Type() alone is insufficient: consider +// map[int]rune. +// +// Type() returns a *types.Tuple for the triple (ok, k, v). +// The types of k and/or v may be types.Invalid. +// +// Example printed form: +// t5 = Next <(ok bool, k int, v rune)> t2 +// t5 = Next <(ok bool, k invalid type, v invalid type)> t2 +// +type Next struct { + register + Iter Value + IsString bool // true => string iterator; false => map iterator. +} + +// The TypeAssert instruction tests whether interface value X has type +// AssertedType. +// +// If !CommaOk, on success it returns v, the result of the conversion +// (defined below); on failure it panics. +// +// If CommaOk: on success it returns a pair (v, true) where v is the +// result of the conversion; on failure it returns (z, false) where z +// is AssertedType's zero value. The components of the pair must be +// accessed using the Extract instruction. +// +// If AssertedType is a concrete type, TypeAssert checks whether the +// dynamic type in interface X is equal to it, and if so, the result +// of the conversion is a copy of the value in the interface. +// +// If AssertedType is an interface, TypeAssert checks whether the +// dynamic type of the interface is assignable to it, and if so, the +// result of the conversion is a copy of the interface value X. +// If AssertedType is a superinterface of X.Type(), the operation will +// fail iff the operand is nil. (Contrast with ChangeInterface, which +// performs no nil-check.) +// +// Type() reflects the actual type of the result, possibly a +// 2-types.Tuple; AssertedType is the asserted type. +// +// Pos() returns the ast.CallExpr.Lparen if the instruction arose from +// an explicit T(e) conversion; the ast.TypeAssertExpr.Lparen if the +// instruction arose from an explicit e.(T) operation; or the +// ast.CaseClause.Case if the instruction arose from a case of a +// type-switch statement. +// +// Example printed form: +// t2 = TypeAssert t1 +// t4 = TypeAssert <(value fmt.Stringer, ok bool)> t1 +// +type TypeAssert struct { + register + X Value + AssertedType types.Type + CommaOk bool +} + +// The Extract instruction yields component Index of Tuple. +// +// This is used to access the results of instructions with multiple +// return values, such as Call, TypeAssert, Next, Recv, +// MapLookup and others. +// +// Example printed form: +// t7 = Extract [1] (ok) t4 +// +type Extract struct { + register + Tuple Value + Index int +} + +// Instructions executed for effect. They do not yield a value. -------------------- + +// The Jump instruction transfers control to the sole successor of its +// owning block. +// +// A Jump must be the last instruction of its containing BasicBlock. +// +// Pos() returns NoPos. +// +// Example printed form: +// Jump → b1 +// +type Jump struct { + anInstruction + Comment string +} + +// The Unreachable pseudo-instruction signals that execution cannot +// continue after the preceding function call because it terminates +// the process. +// +// The instruction acts as a control instruction, jumping to the exit +// block. However, this jump will never execute. +// +// An Unreachable instruction must be the last instruction of its +// containing BasicBlock. +// +// Example printed form: +// Unreachable → b1 +// +type Unreachable struct { + anInstruction +} + +// The If instruction transfers control to one of the two successors +// of its owning block, depending on the boolean Cond: the first if +// true, the second if false. +// +// An If instruction must be the last instruction of its containing +// BasicBlock. +// +// Pos() returns the *ast.IfStmt, if explicit in the source. +// +// Example printed form: +// If t2 → b1 b2 +// +type If struct { + anInstruction + Cond Value +} + +type ConstantSwitch struct { + anInstruction + Tag Value + // Constant branch conditions. A nil Value denotes the (implicit + // or explicit) default branch. + Conds []Value +} + +type TypeSwitch struct { + register + Tag Value + Conds []types.Type +} + +// The Return instruction returns values and control back to the calling +// function. +// +// len(Results) is always equal to the number of results in the +// function's signature. +// +// If len(Results) > 1, Return returns a tuple value with the specified +// components which the caller must access using Extract instructions. +// +// There is no instruction to return a ready-made tuple like those +// returned by a "value,ok"-mode TypeAssert, MapLookup or Recv or +// a tail-call to a function with multiple result parameters. +// +// Return must be the last instruction of its containing BasicBlock. +// Such a block has no successors. +// +// Pos() returns the ast.ReturnStmt.Return, if explicit in the source. +// +// Example printed form: +// Return +// Return t1 t2 +// +type Return struct { + anInstruction + Results []Value +} + +// The RunDefers instruction pops and invokes the entire stack of +// procedure calls pushed by Defer instructions in this function. +// +// It is legal to encounter multiple 'rundefers' instructions in a +// single control-flow path through a function; this is useful in +// the combined init() function, for example. +// +// Pos() returns NoPos. +// +// Example printed form: +// RunDefers +// +type RunDefers struct { + anInstruction +} + +// The Panic instruction initiates a panic with value X. +// +// A Panic instruction must be the last instruction of its containing +// BasicBlock, which must have one successor, the exit block. +// +// NB: 'go panic(x)' and 'defer panic(x)' do not use this instruction; +// they are treated as calls to a built-in function. +// +// Pos() returns the ast.CallExpr.Lparen if this panic was explicit +// in the source. +// +// Example printed form: +// Panic t1 +// +type Panic struct { + anInstruction + X Value // an interface{} +} + +// The Go instruction creates a new goroutine and calls the specified +// function within it. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.GoStmt.Go. +// +// Example printed form: +// Go println t1 +// Go t3 +// GoInvoke t4.Bar t2 +// +type Go struct { + anInstruction + Call CallCommon +} + +// The Defer instruction pushes the specified call onto a stack of +// functions to be called by a RunDefers instruction or by a panic. +// +// See CallCommon for generic function call documentation. +// +// Pos() returns the ast.DeferStmt.Defer. +// +// Example printed form: +// Defer println t1 +// Defer t3 +// DeferInvoke t4.Bar t2 +// +type Defer struct { + anInstruction + Call CallCommon +} + +// The Send instruction sends X on channel Chan. +// +// Pos() returns the ast.SendStmt.Arrow, if explicit in the source. +// +// Example printed form: +// Send t2 t1 +// +type Send struct { + anInstruction + Chan, X Value +} + +// The Recv instruction receives from channel Chan. +// +// If CommaOk, the result is a 2-tuple of the value above +// and a boolean indicating the success of the receive. The +// components of the tuple are accessed using Extract. +// +// Pos() returns the ast.UnaryExpr.OpPos, if explicit in the source. +// For receive operations implicit in ranging over a channel, +// Pos() returns the ast.RangeStmt.For. +// +// Example printed form: +// t2 = Recv t1 +// t3 = Recv <(int, bool)> t1 +type Recv struct { + register + Chan Value + CommaOk bool +} + +// The Store instruction stores Val at address Addr. +// Stores can be of arbitrary types. +// +// Pos() returns the position of the source-level construct most closely +// associated with the memory store operation. +// Since implicit memory stores are numerous and varied and depend upon +// implementation choices, the details are not specified. +// +// Example printed form: +// Store {int} t2 t1 +// +type Store struct { + anInstruction + Addr Value + Val Value +} + +// The BlankStore instruction is emitted for assignments to the blank +// identifier. +// +// BlankStore is a pseudo-instruction: it has no dynamic effect. +// +// Pos() returns NoPos. +// +// Example printed form: +// BlankStore t1 +// +type BlankStore struct { + anInstruction + Val Value +} + +// The MapUpdate instruction updates the association of Map[Key] to +// Value. +// +// Pos() returns the ast.KeyValueExpr.Colon or ast.IndexExpr.Lbrack, +// if explicit in the source. +// +// Example printed form: +// MapUpdate t3 t1 t2 +// +type MapUpdate struct { + anInstruction + Map Value + Key Value + Value Value +} + +// A DebugRef instruction maps a source-level expression Expr to the +// IR value X that represents the value (!IsAddr) or address (IsAddr) +// of that expression. +// +// DebugRef is a pseudo-instruction: it has no dynamic effect. +// +// Pos() returns Expr.Pos(), the start position of the source-level +// expression. This is not the same as the "designated" token as +// documented at Value.Pos(). e.g. CallExpr.Pos() does not return the +// position of the ("designated") Lparen token. +// +// DebugRefs are generated only for functions built with debugging +// enabled; see Package.SetDebugMode() and the GlobalDebug builder +// mode flag. +// +// DebugRefs are not emitted for ast.Idents referring to constants or +// predeclared identifiers, since they are trivial and numerous. +// Nor are they emitted for ast.ParenExprs. +// +// (By representing these as instructions, rather than out-of-band, +// consistency is maintained during transformation passes by the +// ordinary SSA renaming machinery.) +// +// Example printed form: +// ; *ast.CallExpr @ 102:9 is t5 +// ; var x float64 @ 109:72 is x +// ; address of *ast.CompositeLit @ 216:10 is t0 +// +type DebugRef struct { + anInstruction + Expr ast.Expr // the referring expression (never *ast.ParenExpr) + object types.Object // the identity of the source var/func + IsAddr bool // Expr is addressable and X is the address it denotes + X Value // the value or address of Expr +} + +// Embeddable mix-ins and helpers for common parts of other structs. ----------- + +// register is a mix-in embedded by all IR values that are also +// instructions, i.e. virtual registers, and provides a uniform +// implementation of most of the Value interface: Value.Name() is a +// numbered register (e.g. "t0"); the other methods are field accessors. +// +// Temporary names are automatically assigned to each register on +// completion of building a function in IR form. +// +type register struct { + anInstruction + typ types.Type // type of virtual register + referrers []Instruction +} + +type node struct { + source ast.Node + id ID +} + +func (n *node) setID(id ID) { n.id = id } +func (n node) ID() ID { return n.id } + +func (n *node) setSource(source ast.Node) { n.source = source } +func (n *node) Source() ast.Node { return n.source } + +func (n *node) Pos() token.Pos { + if n.source != nil { + return n.source.Pos() + } + return token.NoPos +} + +// anInstruction is a mix-in embedded by all Instructions. +// It provides the implementations of the Block and setBlock methods. +type anInstruction struct { + node + block *BasicBlock // the basic block of this instruction +} + +// CallCommon is contained by Go, Defer and Call to hold the +// common parts of a function or method call. +// +// Each CallCommon exists in one of two modes, function call and +// interface method invocation, or "call" and "invoke" for short. +// +// 1. "call" mode: when Method is nil (!IsInvoke), a CallCommon +// represents an ordinary function call of the value in Value, +// which may be a *Builtin, a *Function or any other value of kind +// 'func'. +// +// Value may be one of: +// (a) a *Function, indicating a statically dispatched call +// to a package-level function, an anonymous function, or +// a method of a named type. +// (b) a *MakeClosure, indicating an immediately applied +// function literal with free variables. +// (c) a *Builtin, indicating a statically dispatched call +// to a built-in function. +// (d) any other value, indicating a dynamically dispatched +// function call. +// StaticCallee returns the identity of the callee in cases +// (a) and (b), nil otherwise. +// +// Args contains the arguments to the call. If Value is a method, +// Args[0] contains the receiver parameter. +// +// Example printed form: +// t3 = Call <()> println t1 t2 +// Go t3 +// Defer t3 +// +// 2. "invoke" mode: when Method is non-nil (IsInvoke), a CallCommon +// represents a dynamically dispatched call to an interface method. +// In this mode, Value is the interface value and Method is the +// interface's abstract method. Note: an abstract method may be +// shared by multiple interfaces due to embedding; Value.Type() +// provides the specific interface used for this call. +// +// Value is implicitly supplied to the concrete method implementation +// as the receiver parameter; in other words, Args[0] holds not the +// receiver but the first true argument. +// +// Example printed form: +// t6 = Invoke t5.String +// GoInvoke t4.Bar t2 +// DeferInvoke t4.Bar t2 +// +// For all calls to variadic functions (Signature().Variadic()), +// the last element of Args is a slice. +// +type CallCommon struct { + Value Value // receiver (invoke mode) or func value (call mode) + Method *types.Func // abstract method (invoke mode) + Args []Value // actual parameters (in static method call, includes receiver) + Results Value +} + +// IsInvoke returns true if this call has "invoke" (not "call") mode. +func (c *CallCommon) IsInvoke() bool { + return c.Method != nil +} + +// Signature returns the signature of the called function. +// +// For an "invoke"-mode call, the signature of the interface method is +// returned. +// +// In either "call" or "invoke" mode, if the callee is a method, its +// receiver is represented by sig.Recv, not sig.Params().At(0). +// +func (c *CallCommon) Signature() *types.Signature { + if c.Method != nil { + return c.Method.Type().(*types.Signature) + } + return c.Value.Type().Underlying().(*types.Signature) +} + +// StaticCallee returns the callee if this is a trivially static +// "call"-mode call to a function. +func (c *CallCommon) StaticCallee() *Function { + switch fn := c.Value.(type) { + case *Function: + return fn + case *MakeClosure: + return fn.Fn.(*Function) + } + return nil +} + +// Description returns a description of the mode of this call suitable +// for a user interface, e.g., "static method call". +func (c *CallCommon) Description() string { + switch fn := c.Value.(type) { + case *Builtin: + return "built-in function call" + case *MakeClosure: + return "static function closure call" + case *Function: + if fn.Signature.Recv() != nil { + return "static method call" + } + return "static function call" + } + if c.IsInvoke() { + return "dynamic method call" // ("invoke" mode) + } + return "dynamic function call" +} + +// The CallInstruction interface, implemented by *Go, *Defer and *Call, +// exposes the common parts of function-calling instructions, +// yet provides a way back to the Value defined by *Call alone. +// +type CallInstruction interface { + Instruction + Common() *CallCommon // returns the common parts of the call + Value() *Call +} + +func (s *Call) Common() *CallCommon { return &s.Call } +func (s *Defer) Common() *CallCommon { return &s.Call } +func (s *Go) Common() *CallCommon { return &s.Call } + +func (s *Call) Value() *Call { return s } +func (s *Defer) Value() *Call { return nil } +func (s *Go) Value() *Call { return nil } + +func (v *Builtin) Type() types.Type { return v.sig } +func (v *Builtin) Name() string { return v.name } +func (*Builtin) Referrers() *[]Instruction { return nil } +func (v *Builtin) Pos() token.Pos { return token.NoPos } +func (v *Builtin) Object() types.Object { return types.Universe.Lookup(v.name) } +func (v *Builtin) Parent() *Function { return nil } + +func (v *FreeVar) Type() types.Type { return v.typ } +func (v *FreeVar) Name() string { return v.name } +func (v *FreeVar) Referrers() *[]Instruction { return &v.referrers } +func (v *FreeVar) Parent() *Function { return v.parent } + +func (v *Global) Type() types.Type { return v.typ } +func (v *Global) Name() string { return v.name } +func (v *Global) Parent() *Function { return nil } +func (v *Global) Referrers() *[]Instruction { return nil } +func (v *Global) Token() token.Token { return token.VAR } +func (v *Global) Object() types.Object { return v.object } +func (v *Global) String() string { return v.RelString(nil) } +func (v *Global) Package() *Package { return v.Pkg } +func (v *Global) RelString(from *types.Package) string { return relString(v, from) } + +func (v *Function) Name() string { return v.name } +func (v *Function) Type() types.Type { return v.Signature } +func (v *Function) Token() token.Token { return token.FUNC } +func (v *Function) Object() types.Object { return v.object } +func (v *Function) String() string { return v.RelString(nil) } +func (v *Function) Package() *Package { return v.Pkg } +func (v *Function) Parent() *Function { return v.parent } +func (v *Function) Referrers() *[]Instruction { + if v.parent != nil { + return &v.referrers + } + return nil +} + +func (v *Parameter) Object() types.Object { return v.object } + +func (v *Alloc) Type() types.Type { return v.typ } +func (v *Alloc) Referrers() *[]Instruction { return &v.referrers } + +func (v *register) Type() types.Type { return v.typ } +func (v *register) setType(typ types.Type) { v.typ = typ } +func (v *register) Name() string { return fmt.Sprintf("t%d", v.id) } +func (v *register) Referrers() *[]Instruction { return &v.referrers } + +func (v *anInstruction) Parent() *Function { return v.block.parent } +func (v *anInstruction) Block() *BasicBlock { return v.block } +func (v *anInstruction) setBlock(block *BasicBlock) { v.block = block } +func (v *anInstruction) Referrers() *[]Instruction { return nil } + +func (t *Type) Name() string { return t.object.Name() } +func (t *Type) Pos() token.Pos { return t.object.Pos() } +func (t *Type) Type() types.Type { return t.object.Type() } +func (t *Type) Token() token.Token { return token.TYPE } +func (t *Type) Object() types.Object { return t.object } +func (t *Type) String() string { return t.RelString(nil) } +func (t *Type) Package() *Package { return t.pkg } +func (t *Type) RelString(from *types.Package) string { return relString(t, from) } + +func (c *NamedConst) Name() string { return c.object.Name() } +func (c *NamedConst) Pos() token.Pos { return c.object.Pos() } +func (c *NamedConst) String() string { return c.RelString(nil) } +func (c *NamedConst) Type() types.Type { return c.object.Type() } +func (c *NamedConst) Token() token.Token { return token.CONST } +func (c *NamedConst) Object() types.Object { return c.object } +func (c *NamedConst) Package() *Package { return c.pkg } +func (c *NamedConst) RelString(from *types.Package) string { return relString(c, from) } + +// Func returns the package-level function of the specified name, +// or nil if not found. +// +func (p *Package) Func(name string) (f *Function) { + f, _ = p.Members[name].(*Function) + return +} + +// Var returns the package-level variable of the specified name, +// or nil if not found. +// +func (p *Package) Var(name string) (g *Global) { + g, _ = p.Members[name].(*Global) + return +} + +// Const returns the package-level constant of the specified name, +// or nil if not found. +// +func (p *Package) Const(name string) (c *NamedConst) { + c, _ = p.Members[name].(*NamedConst) + return +} + +// Type returns the package-level type of the specified name, +// or nil if not found. +// +func (p *Package) Type(name string) (t *Type) { + t, _ = p.Members[name].(*Type) + return +} + +func (s *DebugRef) Pos() token.Pos { return s.Expr.Pos() } + +// Operands. + +func (v *Alloc) Operands(rands []*Value) []*Value { + return rands +} + +func (v *BinOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Y) +} + +func (c *CallCommon) Operands(rands []*Value) []*Value { + rands = append(rands, &c.Value) + for i := range c.Args { + rands = append(rands, &c.Args[i]) + } + return rands +} + +func (s *Go) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Call) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (s *Defer) Operands(rands []*Value) []*Value { + return s.Call.Operands(rands) +} + +func (v *ChangeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *ChangeType) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *Convert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *DebugRef) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Extract) Operands(rands []*Value) []*Value { + return append(rands, &v.Tuple) +} + +func (v *Field) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *FieldAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *If) Operands(rands []*Value) []*Value { + return append(rands, &s.Cond) +} + +func (s *ConstantSwitch) Operands(rands []*Value) []*Value { + rands = append(rands, &s.Tag) + for i := range s.Conds { + rands = append(rands, &s.Conds[i]) + } + return rands +} + +func (s *TypeSwitch) Operands(rands []*Value) []*Value { + rands = append(rands, &s.Tag) + return rands +} + +func (v *Index) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *IndexAddr) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (*Jump) Operands(rands []*Value) []*Value { + return rands +} + +func (*Unreachable) Operands(rands []*Value) []*Value { + return rands +} + +func (v *MapLookup) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *StringLookup) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Index) +} + +func (v *MakeChan) Operands(rands []*Value) []*Value { + return append(rands, &v.Size) +} + +func (v *MakeClosure) Operands(rands []*Value) []*Value { + rands = append(rands, &v.Fn) + for i := range v.Bindings { + rands = append(rands, &v.Bindings[i]) + } + return rands +} + +func (v *MakeInterface) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *MakeMap) Operands(rands []*Value) []*Value { + return append(rands, &v.Reserve) +} + +func (v *MakeSlice) Operands(rands []*Value) []*Value { + return append(rands, &v.Len, &v.Cap) +} + +func (v *MapUpdate) Operands(rands []*Value) []*Value { + return append(rands, &v.Map, &v.Key, &v.Value) +} + +func (v *Next) Operands(rands []*Value) []*Value { + return append(rands, &v.Iter) +} + +func (s *Panic) Operands(rands []*Value) []*Value { + return append(rands, &s.X) +} + +func (v *Sigma) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *Phi) Operands(rands []*Value) []*Value { + for i := range v.Edges { + rands = append(rands, &v.Edges[i]) + } + return rands +} + +func (v *Range) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (s *Return) Operands(rands []*Value) []*Value { + for i := range s.Results { + rands = append(rands, &s.Results[i]) + } + return rands +} + +func (*RunDefers) Operands(rands []*Value) []*Value { + return rands +} + +func (v *Select) Operands(rands []*Value) []*Value { + for i := range v.States { + rands = append(rands, &v.States[i].Chan, &v.States[i].Send) + } + return rands +} + +func (s *Send) Operands(rands []*Value) []*Value { + return append(rands, &s.Chan, &s.X) +} + +func (recv *Recv) Operands(rands []*Value) []*Value { + return append(rands, &recv.Chan) +} + +func (v *Slice) Operands(rands []*Value) []*Value { + return append(rands, &v.X, &v.Low, &v.High, &v.Max) +} + +func (s *Store) Operands(rands []*Value) []*Value { + return append(rands, &s.Addr, &s.Val) +} + +func (s *BlankStore) Operands(rands []*Value) []*Value { + return append(rands, &s.Val) +} + +func (v *TypeAssert) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *UnOp) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +func (v *Load) Operands(rands []*Value) []*Value { + return append(rands, &v.X) +} + +// Non-Instruction Values: +func (v *Builtin) Operands(rands []*Value) []*Value { return rands } +func (v *FreeVar) Operands(rands []*Value) []*Value { return rands } +func (v *Const) Operands(rands []*Value) []*Value { return rands } +func (v *Function) Operands(rands []*Value) []*Value { return rands } +func (v *Global) Operands(rands []*Value) []*Value { return rands } +func (v *Parameter) Operands(rands []*Value) []*Value { return rands } diff --git a/vendor/honnef.co/go/tools/go/ir/staticcheck.conf b/vendor/honnef.co/go/tools/go/ir/staticcheck.conf new file mode 100644 index 000000000..d7b38bc35 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/staticcheck.conf @@ -0,0 +1,3 @@ +# ssa/... is mostly imported from upstream and we don't want to +# deviate from it too much, hence disabling SA1019 +checks = ["inherit", "-SA1019"] diff --git a/vendor/honnef.co/go/tools/go/ir/util.go b/vendor/honnef.co/go/tools/go/ir/util.go new file mode 100644 index 000000000..343a6320a --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/util.go @@ -0,0 +1,89 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file defines a number of miscellaneous utility functions. + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "io" + "os" + + "honnef.co/go/tools/go/ast/astutil" +) + +//// AST utilities + +func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } + +// isBlankIdent returns true iff e is an Ident with name "_". +// They have no associated types.Object, and thus no type. +// +func isBlankIdent(e ast.Expr) bool { + id, ok := e.(*ast.Ident) + return ok && id.Name == "_" +} + +//// Type utilities. Some of these belong in go/types. + +// isPointer returns true for types whose underlying type is a pointer. +func isPointer(typ types.Type) bool { + _, ok := typ.Underlying().(*types.Pointer) + return ok +} + +func isInterface(T types.Type) bool { return types.IsInterface(T) } + +// deref returns a pointer's element type; otherwise it returns typ. +func deref(typ types.Type) types.Type { + if p, ok := typ.Underlying().(*types.Pointer); ok { + return p.Elem() + } + return typ +} + +// recvType returns the receiver type of method obj. +func recvType(obj *types.Func) types.Type { + return obj.Type().(*types.Signature).Recv().Type() +} + +// logStack prints the formatted "start" message to stderr and +// returns a closure that prints the corresponding "end" message. +// Call using 'defer logStack(...)()' to show builder stack on panic. +// Don't forget trailing parens! +// +func logStack(format string, args ...interface{}) func() { + msg := fmt.Sprintf(format, args...) + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, "\n") + return func() { + io.WriteString(os.Stderr, msg) + io.WriteString(os.Stderr, " end\n") + } +} + +// newVar creates a 'var' for use in a types.Tuple. +func newVar(name string, typ types.Type) *types.Var { + return types.NewParam(token.NoPos, nil, name, typ) +} + +// anonVar creates an anonymous 'var' for use in a types.Tuple. +func anonVar(typ types.Type) *types.Var { + return newVar("", typ) +} + +var lenResults = types.NewTuple(anonVar(tInt)) + +// makeLen returns the len builtin specialized to type func(T)int. +func makeLen(T types.Type) *Builtin { + lenParams := types.NewTuple(anonVar(T)) + return &Builtin{ + name: "len", + sig: types.NewSignature(nil, lenParams, lenResults, false), + } +} diff --git a/vendor/honnef.co/go/tools/go/ir/wrappers.go b/vendor/honnef.co/go/tools/go/ir/wrappers.go new file mode 100644 index 000000000..1d51b5dc9 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/wrappers.go @@ -0,0 +1,290 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ir + +// This file defines synthesis of Functions that delegate to declared +// methods; they come in three kinds: +// +// (1) wrappers: methods that wrap declared methods, performing +// implicit pointer indirections and embedded field selections. +// +// (2) thunks: funcs that wrap declared methods. Like wrappers, +// thunks perform indirections and field selections. The thunk's +// first parameter is used as the receiver for the method call. +// +// (3) bounds: funcs that wrap declared methods. The bound's sole +// free variable, supplied by a closure, is used as the receiver +// for the method call. No indirections or field selections are +// performed since they can be done before the call. + +import ( + "fmt" + + "go/types" +) + +// -- wrappers ----------------------------------------------------------- + +// makeWrapper returns a synthetic method that delegates to the +// declared method denoted by meth.Obj(), first performing any +// necessary pointer indirections or field selections implied by meth. +// +// The resulting method's receiver type is meth.Recv(). +// +// This function is versatile but quite subtle! Consider the +// following axes of variation when making changes: +// - optional receiver indirection +// - optional implicit field selections +// - meth.Obj() may denote a concrete or an interface method +// - the result may be a thunk or a wrapper. +// +// EXCLUSIVE_LOCKS_REQUIRED(prog.methodsMu) +// +func makeWrapper(prog *Program, sel *types.Selection) *Function { + obj := sel.Obj().(*types.Func) // the declared function + sig := sel.Type().(*types.Signature) // type of this wrapper + + var recv *types.Var // wrapper's receiver or thunk's params[0] + name := obj.Name() + var description Synthetic + var start int // first regular param + if sel.Kind() == types.MethodExpr { + name += "$thunk" + description = SyntheticThunk + recv = sig.Params().At(0) + start = 1 + } else { + description = SyntheticWrapper + recv = sig.Recv() + } + + if prog.mode&LogSource != 0 { + defer logStack("make %s to (%s)", description, recv.Type())() + } + fn := &Function{ + name: name, + method: sel, + object: obj, + Signature: sig, + Synthetic: description, + Prog: prog, + functionBody: new(functionBody), + } + fn.initHTML(prog.PrintFunc) + fn.startBody() + fn.addSpilledParam(recv, nil) + createParams(fn, start) + + indices := sel.Index() + + var v Value = fn.Locals[0] // spilled receiver + if isPointer(sel.Recv()) { + v = emitLoad(fn, v, nil) + + // For simple indirection wrappers, perform an informative nil-check: + // "value method (T).f called using nil *T pointer" + if len(indices) == 1 && !isPointer(recvType(obj)) { + var c Call + c.Call.Value = &Builtin{ + name: "ir:wrapnilchk", + sig: types.NewSignature(nil, + types.NewTuple(anonVar(sel.Recv()), anonVar(tString), anonVar(tString)), + types.NewTuple(anonVar(sel.Recv())), false), + } + c.Call.Args = []Value{ + v, + emitConst(fn, stringConst(deref(sel.Recv()).String())), + emitConst(fn, stringConst(sel.Obj().Name())), + } + c.setType(v.Type()) + v = fn.emit(&c, nil) + } + } + + // Invariant: v is a pointer, either + // value of *A receiver param, or + // address of A spilled receiver. + + // We use pointer arithmetic (FieldAddr possibly followed by + // Load) in preference to value extraction (Field possibly + // preceded by Load). + + v = emitImplicitSelections(fn, v, indices[:len(indices)-1], nil) + + // Invariant: v is a pointer, either + // value of implicit *C field, or + // address of implicit C field. + + var c Call + if r := recvType(obj); !isInterface(r) { // concrete method + if !isPointer(r) { + v = emitLoad(fn, v, nil) + } + c.Call.Value = prog.declaredFunc(obj) + c.Call.Args = append(c.Call.Args, v) + } else { + c.Call.Method = obj + c.Call.Value = emitLoad(fn, v, nil) + } + for _, arg := range fn.Params[1:] { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c, nil) + fn.finishBody() + return fn +} + +// createParams creates parameters for wrapper method fn based on its +// Signature.Params, which do not include the receiver. +// start is the index of the first regular parameter to use. +// +func createParams(fn *Function, start int) { + tparams := fn.Signature.Params() + for i, n := start, tparams.Len(); i < n; i++ { + fn.addParamObj(tparams.At(i), nil) + } +} + +// -- bounds ----------------------------------------------------------- + +// makeBound returns a bound method wrapper (or "bound"), a synthetic +// function that delegates to a concrete or interface method denoted +// by obj. The resulting function has no receiver, but has one free +// variable which will be used as the method's receiver in the +// tail-call. +// +// Use MakeClosure with such a wrapper to construct a bound method +// closure. e.g.: +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// var t T +// f := t.meth +// f() // calls t.meth() +// +// f is a closure of a synthetic wrapper defined as if by: +// +// f := func() { return t.meth() } +// +// Unlike makeWrapper, makeBound need perform no indirection or field +// selections because that can be done before the closure is +// constructed. +// +// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) +// +func makeBound(prog *Program, obj *types.Func) *Function { + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + fn, ok := prog.bounds[obj] + if !ok { + if prog.mode&LogSource != 0 { + defer logStack("%s", SyntheticBound)() + } + fn = &Function{ + name: obj.Name() + "$bound", + object: obj, + Signature: changeRecv(obj.Type().(*types.Signature), nil), // drop receiver + Synthetic: SyntheticBound, + Prog: prog, + functionBody: new(functionBody), + } + fn.initHTML(prog.PrintFunc) + + fv := &FreeVar{name: "recv", typ: recvType(obj), parent: fn} + fn.FreeVars = []*FreeVar{fv} + fn.startBody() + createParams(fn, 0) + var c Call + + if !isInterface(recvType(obj)) { // concrete + c.Call.Value = prog.declaredFunc(obj) + c.Call.Args = []Value{fv} + } else { + c.Call.Value = fv + c.Call.Method = obj + } + for _, arg := range fn.Params { + c.Call.Args = append(c.Call.Args, arg) + } + emitTailCall(fn, &c, nil) + fn.finishBody() + + prog.bounds[obj] = fn + } + return fn +} + +// -- thunks ----------------------------------------------------------- + +// makeThunk returns a thunk, a synthetic function that delegates to a +// concrete or interface method denoted by sel.Obj(). The resulting +// function has no receiver, but has an additional (first) regular +// parameter. +// +// Precondition: sel.Kind() == types.MethodExpr. +// +// type T int or: type T interface { meth() } +// func (t T) meth() +// f := T.meth +// var t T +// f(t) // calls t.meth() +// +// f is a synthetic wrapper defined as if by: +// +// f := func(t T) { return t.meth() } +// +// TODO(adonovan): opt: currently the stub is created even when used +// directly in a function call: C.f(i, 0). This is less efficient +// than inlining the stub. +// +// EXCLUSIVE_LOCKS_ACQUIRED(meth.Prog.methodsMu) +// +func makeThunk(prog *Program, sel *types.Selection) *Function { + if sel.Kind() != types.MethodExpr { + panic(sel) + } + + key := selectionKey{ + kind: sel.Kind(), + recv: sel.Recv(), + obj: sel.Obj(), + index: fmt.Sprint(sel.Index()), + indirect: sel.Indirect(), + } + + prog.methodsMu.Lock() + defer prog.methodsMu.Unlock() + + // Canonicalize key.recv to avoid constructing duplicate thunks. + canonRecv, ok := prog.canon.At(key.recv).(types.Type) + if !ok { + canonRecv = key.recv + prog.canon.Set(key.recv, canonRecv) + } + key.recv = canonRecv + + fn, ok := prog.thunks[key] + if !ok { + fn = makeWrapper(prog, sel) + if fn.Signature.Recv() != nil { + panic(fn) // unexpected receiver + } + prog.thunks[key] = fn + } + return fn +} + +func changeRecv(s *types.Signature, recv *types.Var) *types.Signature { + return types.NewSignature(recv, s.Params(), s.Results(), s.Variadic()) +} + +// selectionKey is like types.Selection but a usable map key. +type selectionKey struct { + kind types.SelectionKind + recv types.Type // canonicalized via Program.canon + obj types.Object + index string + indirect bool +} diff --git a/vendor/honnef.co/go/tools/go/ir/write.go b/vendor/honnef.co/go/tools/go/ir/write.go new file mode 100644 index 000000000..b936bc985 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/ir/write.go @@ -0,0 +1,5 @@ +package ir + +func NewJump(parent *BasicBlock) *Jump { + return &Jump{anInstruction{block: parent}, ""} +} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go b/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go new file mode 100644 index 000000000..d35d08e00 --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/upstream.go @@ -0,0 +1,25 @@ +package typeutil + +import ( + "go/ast" + "go/types" + _ "unsafe" + + "golang.org/x/tools/go/types/typeutil" +) + +type MethodSetCache = typeutil.MethodSetCache +type Map = typeutil.Map +type Hasher = typeutil.Hasher + +func Callee(info *types.Info, call *ast.CallExpr) types.Object { + return typeutil.Callee(info, call) +} + +func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection { + return typeutil.IntuitiveMethodSet(T, msets) +} + +func MakeHasher() Hasher { + return typeutil.MakeHasher() +} diff --git a/vendor/honnef.co/go/tools/go/types/typeutil/util.go b/vendor/honnef.co/go/tools/go/types/typeutil/util.go new file mode 100644 index 000000000..b0aca16bd --- /dev/null +++ b/vendor/honnef.co/go/tools/go/types/typeutil/util.go @@ -0,0 +1,131 @@ +package typeutil + +import ( + "bytes" + "go/types" + "sync" +) + +var bufferPool = &sync.Pool{ + New: func() interface{} { + buf := bytes.NewBuffer(nil) + buf.Grow(64) + return buf + }, +} + +func FuncName(f *types.Func) string { + buf := bufferPool.Get().(*bytes.Buffer) + buf.Reset() + if f.Type() != nil { + sig := f.Type().(*types.Signature) + if recv := sig.Recv(); recv != nil { + buf.WriteByte('(') + if _, ok := recv.Type().(*types.Interface); ok { + // gcimporter creates abstract methods of + // named interfaces using the interface type + // (not the named type) as the receiver. + // Don't print it in full. + buf.WriteString("interface") + } else { + types.WriteType(buf, recv.Type(), nil) + } + buf.WriteByte(')') + buf.WriteByte('.') + } else if f.Pkg() != nil { + writePackage(buf, f.Pkg()) + } + } + buf.WriteString(f.Name()) + s := buf.String() + bufferPool.Put(buf) + return s +} + +func writePackage(buf *bytes.Buffer, pkg *types.Package) { + if pkg == nil { + return + } + s := pkg.Path() + if s != "" { + buf.WriteString(s) + buf.WriteByte('.') + } +} + +// Dereference returns a pointer's element type; otherwise it returns +// T. +func Dereference(T types.Type) types.Type { + if p, ok := T.Underlying().(*types.Pointer); ok { + return p.Elem() + } + return T +} + +// DereferenceR returns a pointer's element type; otherwise it returns +// T. If the element type is itself a pointer, DereferenceR will be +// applied recursively. +func DereferenceR(T types.Type) types.Type { + if p, ok := T.Underlying().(*types.Pointer); ok { + return DereferenceR(p.Elem()) + } + return T +} + +func IsObject(obj types.Object, name string) bool { + var path string + if pkg := obj.Pkg(); pkg != nil { + path = pkg.Path() + "." + } + return path+obj.Name() == name +} + +// OPT(dh): IsType is kind of expensive; should we really use it? +func IsType(T types.Type, name string) bool { return types.TypeString(T, nil) == name } + +func IsPointerLike(T types.Type) bool { + switch T := T.Underlying().(type) { + case *types.Interface, *types.Chan, *types.Map, *types.Signature, *types.Pointer, *types.Slice: + return true + case *types.Basic: + return T.Kind() == types.UnsafePointer + } + return false +} + +type Field struct { + Var *types.Var + Tag string + Path []int +} + +// FlattenFields recursively flattens T and embedded structs, +// returning a list of fields. If multiple fields with the same name +// exist, all will be returned. +func FlattenFields(T *types.Struct) []Field { + return flattenFields(T, nil, nil) +} + +func flattenFields(T *types.Struct, path []int, seen map[types.Type]bool) []Field { + if seen == nil { + seen = map[types.Type]bool{} + } + if seen[T] { + return nil + } + seen[T] = true + var out []Field + for i := 0; i < T.NumFields(); i++ { + field := T.Field(i) + tag := T.Tag(i) + np := append(path[:len(path):len(path)], i) + if field.Anonymous() { + if s, ok := Dereference(field.Type()).Underlying().(*types.Struct); ok { + out = append(out, flattenFields(s, np, seen)...) + } + } else { + out = append(out, Field{field, tag, np}) + } + } + return out +} diff --git a/vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go b/vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go new file mode 100644 index 000000000..51dfaef53 --- /dev/null +++ b/vendor/honnef.co/go/tools/internal/passes/buildir/buildir.go @@ -0,0 +1,107 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package buildir defines an Analyzer that constructs the IR +// of an error-free package and returns the set of all +// functions within it. It does not report any diagnostics itself but +// may be used as an input to other analyzers. +// +// THIS INTERFACE IS EXPERIMENTAL AND MAY BE SUBJECT TO INCOMPATIBLE CHANGE. +package buildir + +import ( + "go/ast" + "go/types" + "reflect" + + "honnef.co/go/tools/go/ir" + + "golang.org/x/tools/go/analysis" +) + +type noReturn struct { + Kind ir.NoReturn +} + +func (*noReturn) AFact() {} + +var Analyzer = &analysis.Analyzer{ + Name: "buildir", + Doc: "build IR for later passes", + Run: run, + ResultType: reflect.TypeOf(new(IR)), + FactTypes: []analysis.Fact{new(noReturn)}, +} + +// IR provides intermediate representation for all the +// non-blank source functions in the current package. +type IR struct { + Pkg *ir.Package + SrcFuncs []*ir.Function +} + +func run(pass *analysis.Pass) (interface{}, error) { + // Plundered from ssautil.BuildPackage. + + // We must create a new Program for each Package because the + // analysis API provides no place to hang a Program shared by + // all Packages. Consequently, IR Packages and Functions do not + // have a canonical representation across an analysis session of + // multiple packages. This is unlikely to be a problem in + // practice because the analysis API essentially forces all + // packages to be analysed independently, so any given call to + // Analysis.Run on a package will see only IR objects belonging + // to a single Program. + + mode := ir.GlobalDebug + + prog := ir.NewProgram(pass.Fset, mode) + + // Create IR packages for all imports. + // Order is not significant. + created := make(map[*types.Package]bool) + var createAll func(pkgs []*types.Package) + createAll = func(pkgs []*types.Package) { + for _, p := range pkgs { + if !created[p] { + created[p] = true + irpkg := prog.CreatePackage(p, nil, nil, true) + for _, fn := range irpkg.Functions { + if ast.IsExported(fn.Name()) { + var noRet noReturn + if pass.ImportObjectFact(fn.Object(), &noRet) { + fn.NoReturn = noRet.Kind + } + } + } + createAll(p.Imports()) + } + } + } + createAll(pass.Pkg.Imports()) + + // Create and build the primary package. + irpkg := prog.CreatePackage(pass.Pkg, pass.Files, pass.TypesInfo, false) + irpkg.Build() + + // Compute list of source functions, including literals, + // in source order. + var addAnons func(f *ir.Function) + funcs := make([]*ir.Function, len(irpkg.Functions)) + copy(funcs, irpkg.Functions) + addAnons = func(f *ir.Function) { + for _, anon := range f.AnonFuncs { + funcs = append(funcs, anon) + addAnons(anon) + } + } + for _, fn := range irpkg.Functions { + addAnons(fn) + if fn.NoReturn > 0 { + pass.ExportObjectFact(fn.Object(), &noReturn{fn.NoReturn}) + } + } + + return &IR{Pkg: irpkg, SrcFuncs: funcs}, nil +} diff --git a/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go new file mode 100644 index 000000000..2e839614a --- /dev/null +++ b/vendor/honnef.co/go/tools/internal/sharedcheck/lint.go @@ -0,0 +1,206 @@ +package sharedcheck + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + + "honnef.co/go/tools/analysis/code" + "honnef.co/go/tools/analysis/edit" + "honnef.co/go/tools/analysis/facts" + "honnef.co/go/tools/analysis/report" + "honnef.co/go/tools/go/ast/astutil" + "honnef.co/go/tools/go/ir" + "honnef.co/go/tools/go/ir/irutil" + "honnef.co/go/tools/internal/passes/buildir" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" +) + +func CheckRangeStringRunes(pass *analysis.Pass) (interface{}, error) { + for _, fn := range pass.ResultOf[buildir.Analyzer].(*buildir.IR).SrcFuncs { + cb := func(node ast.Node) bool { + rng, ok := node.(*ast.RangeStmt) + if !ok || !astutil.IsBlank(rng.Key) { + return true + } + + v, _ := fn.ValueForExpr(rng.X) + + // Check that we're converting from string to []rune + val, _ := v.(*ir.Convert) + if val == nil { + return true + } + Tsrc, ok := val.X.Type().Underlying().(*types.Basic) + if !ok || Tsrc.Kind() != types.String { + return true + } + Tdst, ok := val.Type().(*types.Slice) + if !ok { + return true + } + TdstElem, ok := Tdst.Elem().(*types.Basic) + if !ok || TdstElem.Kind() != types.Int32 { + return true + } + + // Check that the result of the conversion is only used to + // range over + refs := val.Referrers() + if refs == nil { + return true + } + + // Expect two refs: one for obtaining the length of the slice, + // one for accessing the elements + if len(irutil.FilterDebug(*refs)) != 2 { + // TODO(dh): right now, we check that only one place + // refers to our slice. This will miss cases such as + // ranging over the slice twice. Ideally, we'd ensure that + // the slice is only used for ranging over (without + // accessing the key), but that is harder to do because in + // IR form, ranging over a slice looks like an ordinary + // loop with index increments and slice accesses. We'd + // have to look at the associated AST node to check that + // it's a range statement. + return true + } + + pass.Reportf(rng.Pos(), "should range over string, not []rune(string)") + + return true + } + if source := fn.Source(); source != nil { + ast.Inspect(source, cb) + } + } + return nil, nil +} + +// RedundantTypeInDeclarationChecker returns a checker that flags variable declarations with redundantly specified types. +// That is, it flags 'var v T = e' where e's type is identical to T and 'var v = e' (or 'v := e') would have the same effect. +// +// It does not flag variables under the following conditions, to reduce the number of false positives: +// - global variables – these often specify types to aid godoc +// - files that use cgo – cgo code generation and pointer checking emits redundant types +// +// It does not flag variables under the following conditions, unless flagHelpfulTypes is true, to reduce the number of noisy positives: +// - packages that import syscall or unsafe – these sometimes use this form of assignment to make sure types are as expected +// - variables named the blank identifier – a pattern used to confirm the types of variables +// - named untyped constants on the rhs – the explicitness might aid readability +func RedundantTypeInDeclarationChecker(verb string, flagHelpfulTypes bool) *analysis.Analyzer { + fn := func(pass *analysis.Pass) (interface{}, error) { + eval := func(expr ast.Expr) (types.TypeAndValue, error) { + info := &types.Info{ + Types: map[ast.Expr]types.TypeAndValue{}, + } + err := types.CheckExpr(pass.Fset, pass.Pkg, expr.Pos(), expr, info) + return info.Types[expr], err + } + + if !flagHelpfulTypes { + // Don't look at code in low-level packages + for _, imp := range pass.Pkg.Imports() { + if imp.Path() == "syscall" || imp.Path() == "unsafe" { + return nil, nil + } + } + } + + fn := func(node ast.Node) { + decl := node.(*ast.GenDecl) + if decl.Tok != token.VAR { + return + } + + gen, _ := code.Generator(pass, decl.Pos()) + if gen == facts.Cgo { + // TODO(dh): remove this exception once we can use UsesCgo + return + } + + // Delay looking up parent AST nodes until we have to + checkedDecl := false + + specLoop: + for _, spec := range decl.Specs { + spec := spec.(*ast.ValueSpec) + if spec.Type == nil { + continue + } + if len(spec.Names) != len(spec.Values) { + continue + } + Tlhs := pass.TypesInfo.TypeOf(spec.Type) + for i, v := range spec.Values { + if !flagHelpfulTypes && spec.Names[i].Name == "_" { + continue specLoop + } + Trhs := pass.TypesInfo.TypeOf(v) + if !types.Identical(Tlhs, Trhs) { + continue specLoop + } + + // Some expressions are untyped and get converted to the lhs type implicitly. + // This applies to untyped constants, shift operations with an untyped lhs, and possibly others. + // + // Check if the type is truly redundant, i.e. if the type on the lhs doesn't match the default type of the untyped constant. + tv, err := eval(v) + if err != nil { + panic(err) + } + if b, ok := tv.Type.(*types.Basic); ok && (b.Info()&types.IsUntyped) != 0 { + switch v := v.(type) { + case *ast.Ident: + // Only flag named constant rhs if it's a predeclared identifier. + // Don't flag other named constants, as the explicit type may aid readability. + if pass.TypesInfo.ObjectOf(v).Pkg() != nil && !flagHelpfulTypes { + continue specLoop + } + case *ast.SelectorExpr: + // Constant selector expressions can only refer to named constants that arent predeclared. + if !flagHelpfulTypes { + continue specLoop + } + default: + // don't skip if the type on the lhs matches the default type of the constant + if Tlhs != types.Default(b) { + continue specLoop + } + } + } + } + + if !checkedDecl { + // Don't flag global variables. These often have explicit types for godoc's sake. + path, _ := astutil.PathEnclosingInterval(code.File(pass, decl), decl.Pos(), decl.Pos()) + pathLoop: + for _, el := range path { + switch el.(type) { + case *ast.FuncDecl, *ast.FuncLit: + checkedDecl = true + break pathLoop + } + } + if !checkedDecl { + // decl is not inside a function + break specLoop + } + } + + report.Report(pass, spec.Type, fmt.Sprintf("%s omit type %s from declaration; it will be inferred from the right-hand side", verb, report.Render(pass, spec.Type)), report.FilterGenerated(), + report.Fixes(edit.Fix("Remove redundant type", edit.Delete(spec.Type)))) + } + } + code.Preorder(pass, fn, (*ast.GenDecl)(nil)) + return nil, nil + } + + return &analysis.Analyzer{ + Run: fn, + Requires: []*analysis.Analyzer{facts.Generated, inspect.Analyzer, facts.TokenFile, facts.Generated}, + } +} diff --git a/vendor/honnef.co/go/tools/knowledge/arg.go b/vendor/honnef.co/go/tools/knowledge/arg.go new file mode 100644 index 000000000..7ac0b358d --- /dev/null +++ b/vendor/honnef.co/go/tools/knowledge/arg.go @@ -0,0 +1,64 @@ +package knowledge + +var args = map[string]int{ + "(*encoding/json.Decoder).Decode.v": 0, + "(*encoding/json.Encoder).Encode.v": 0, + "(*encoding/xml.Decoder).Decode.v": 0, + "(*encoding/xml.Encoder).Encode.v": 0, + "(*sync.Pool).Put.x": 0, + "(*text/template.Template).Parse.text": 0, + "(io.Seeker).Seek.offset": 0, + "(time.Time).Sub.u": 0, + "append.elems": 1, + "append.slice": 0, + "bytes.Equal.a": 0, + "bytes.Equal.b": 1, + "encoding/binary.Write.data": 2, + "errors.New.text": 0, + "fmt.Fprintf.format": 1, + "fmt.Printf.format": 0, + "fmt.Sprintf.a[0]": 1, + "fmt.Sprintf.format": 0, + "json.Marshal.v": 0, + "json.Unmarshal.v": 1, + "len.v": 0, + "make.size[0]": 1, + "make.size[1]": 2, + "make.t": 0, + "net/url.Parse.rawurl": 0, + "os.OpenFile.flag": 1, + "os/exec.Command.name": 0, + "os/signal.Notify.c": 0, + "regexp.Compile.expr": 0, + "runtime.SetFinalizer.finalizer": 1, + "runtime.SetFinalizer.obj": 0, + "sort.Sort.data": 0, + "strconv.AppendFloat.bitSize": 4, + "strconv.AppendFloat.fmt": 2, + "strconv.AppendInt.base": 2, + "strconv.AppendUint.base": 2, + "strconv.FormatComplex.bitSize": 3, + "strconv.FormatComplex.fmt": 1, + "strconv.FormatFloat.bitSize": 3, + "strconv.FormatFloat.fmt": 1, + "strconv.FormatInt.base": 1, + "strconv.FormatUint.base": 1, + "strconv.ParseComplex.bitSize": 1, + "strconv.ParseFloat.bitSize": 1, + "strconv.ParseInt.base": 1, + "strconv.ParseInt.bitSize": 2, + "strconv.ParseUint.base": 1, + "strconv.ParseUint.bitSize": 2, + "time.Parse.layout": 0, + "time.Sleep.d": 0, + "xml.Marshal.v": 0, + "xml.Unmarshal.v": 1, +} + +func Arg(name string) int { + n, ok := args[name] + if !ok { + panic("unknown argument " + name) + } + return n +} diff --git a/vendor/honnef.co/go/tools/knowledge/deprecated.go b/vendor/honnef.co/go/tools/knowledge/deprecated.go new file mode 100644 index 000000000..8fa84fd44 --- /dev/null +++ b/vendor/honnef.co/go/tools/knowledge/deprecated.go @@ -0,0 +1,217 @@ +package knowledge + +const ( + DeprecatedNeverUse = -1 + DeprecatedUseNoLonger = -2 +) + +type Deprecation struct { + DeprecatedSince int + AlternativeAvailableSince int +} + +// go/importer.ForCompiler contains "Deprecated:", but it refers to a single argument, not the whole function. +// Luckily, the notice starts in the middle of a paragraph, and as such isn't detected by us. + +var StdlibDeprecations = map[string]Deprecation{ + // FIXME(dh): AllowBinary isn't being detected as deprecated + // because the comment has a newline right after "Deprecated:" + "go/build.AllowBinary": {7, 7}, + "(archive/zip.FileHeader).CompressedSize": {1, 1}, + "(archive/zip.FileHeader).UncompressedSize": {1, 1}, + "(archive/zip.FileHeader).ModifiedTime": {10, 10}, + "(archive/zip.FileHeader).ModifiedDate": {10, 10}, + "(*archive/zip.FileHeader).ModTime": {10, 10}, + "(*archive/zip.FileHeader).SetModTime": {10, 10}, + "(go/doc.Package).Bugs": {1, 1}, + "os.SEEK_SET": {7, 7}, + "os.SEEK_CUR": {7, 7}, + "os.SEEK_END": {7, 7}, + "(net.Dialer).Cancel": {7, 7}, + "runtime.CPUProfile": {9, 0}, + "compress/flate.ReadError": {6, DeprecatedUseNoLonger}, + "compress/flate.WriteError": {6, DeprecatedUseNoLonger}, + "path/filepath.HasPrefix": {0, DeprecatedNeverUse}, + "(net/http.Transport).Dial": {7, 7}, + "(net/http.Transport).DialTLS": {14, 14}, + "(*net/http.Transport).CancelRequest": {6, 5}, + "net/http.ErrWriteAfterFlush": {7, DeprecatedUseNoLonger}, + "net/http.ErrHeaderTooLong": {8, DeprecatedUseNoLonger}, + "net/http.ErrShortBody": {8, DeprecatedUseNoLonger}, + "net/http.ErrMissingContentLength": {8, DeprecatedUseNoLonger}, + "net/http/httputil.ErrPersistEOF": {0, DeprecatedUseNoLonger}, + "net/http/httputil.ErrClosed": {0, DeprecatedUseNoLonger}, + "net/http/httputil.ErrPipeline": {0, DeprecatedUseNoLonger}, + "net/http/httputil.ServerConn": {0, 0}, + "net/http/httputil.NewServerConn": {0, 0}, + "net/http/httputil.ClientConn": {0, 0}, + "net/http/httputil.NewClientConn": {0, 0}, + "net/http/httputil.NewProxyClientConn": {0, 0}, + "(net/http.Request).Cancel": {7, 7}, + "(text/template/parse.PipeNode).Line": {1, DeprecatedUseNoLonger}, + "(text/template/parse.ActionNode).Line": {1, DeprecatedUseNoLonger}, + "(text/template/parse.BranchNode).Line": {1, DeprecatedUseNoLonger}, + "(text/template/parse.TemplateNode).Line": {1, DeprecatedUseNoLonger}, + "database/sql/driver.ColumnConverter": {9, 9}, + "database/sql/driver.Execer": {8, 8}, + "database/sql/driver.Queryer": {8, 8}, + "(database/sql/driver.Conn).Begin": {8, 8}, + "(database/sql/driver.Stmt).Exec": {8, 8}, + "(database/sql/driver.Stmt).Query": {8, 8}, + "syscall.StringByteSlice": {1, 1}, + "syscall.StringBytePtr": {1, 1}, + "syscall.StringSlicePtr": {1, 1}, + "syscall.StringToUTF16": {1, 1}, + "syscall.StringToUTF16Ptr": {1, 1}, + "(*regexp.Regexp).Copy": {12, DeprecatedUseNoLonger}, + "(archive/tar.Header).Xattrs": {10, 10}, + "archive/tar.TypeRegA": {11, 1}, + "go/types.NewInterface": {11, 11}, + "(*go/types.Interface).Embedded": {11, 11}, + "go/importer.For": {12, 12}, + "encoding/json.InvalidUTF8Error": {2, DeprecatedUseNoLonger}, + "encoding/json.UnmarshalFieldError": {2, DeprecatedUseNoLonger}, + "encoding/csv.ErrTrailingComma": {2, DeprecatedUseNoLonger}, + "(encoding/csv.Reader).TrailingComma": {2, DeprecatedUseNoLonger}, + "(net.Dialer).DualStack": {12, 12}, + "net/http.ErrUnexpectedTrailer": {12, DeprecatedUseNoLonger}, + "net/http.CloseNotifier": {11, 7}, + // This is hairy. The notice says "Not all errors in the http package related to protocol errors are of type ProtocolError", but doesn't that imply that some errors do? + "net/http.ProtocolError": {8, DeprecatedUseNoLonger}, + "(crypto/x509.CertificateRequest).Attributes": {5, 3}, + + // These functions have no direct alternative, but they are insecure and should no longer be used. + "crypto/x509.IsEncryptedPEMBlock": {16, DeprecatedNeverUse}, + "crypto/x509.DecryptPEMBlock": {16, DeprecatedNeverUse}, + "crypto/x509.EncryptPEMBlock": {16, DeprecatedNeverUse}, + "crypto/dsa": {16, DeprecatedNeverUse}, + + // This function has no alternative, but also no purpose. + "(*crypto/rc4.Cipher).Reset": {12, DeprecatedNeverUse}, + "(net/http/httptest.ResponseRecorder).HeaderMap": {11, 7}, + "image.ZP": {13, 0}, + "image.ZR": {13, 0}, + "(*debug/gosym.LineTable).LineToPC": {2, 2}, + "(*debug/gosym.LineTable).PCToLine": {2, 2}, + "crypto/tls.VersionSSL30": {13, DeprecatedNeverUse}, + "(crypto/tls.Config).NameToCertificate": {14, DeprecatedUseNoLonger}, + "(*crypto/tls.Config).BuildNameToCertificate": {14, DeprecatedUseNoLonger}, + "(crypto/tls.Config).SessionTicketKey": {16, 5}, + // No alternative, no use + "(crypto/tls.ConnectionState).NegotiatedProtocolIsMutual": {16, DeprecatedNeverUse}, + // No alternative, but insecure + "(crypto/tls.ConnectionState).TLSUnique": {16, DeprecatedNeverUse}, + "image/jpeg.Reader": {4, DeprecatedNeverUse}, + + // All of these have been deprecated in favour of external libraries + "syscall.AttachLsf": {7, 0}, + "syscall.DetachLsf": {7, 0}, + "syscall.LsfSocket": {7, 0}, + "syscall.SetLsfPromisc": {7, 0}, + "syscall.LsfJump": {7, 0}, + "syscall.LsfStmt": {7, 0}, + "syscall.BpfStmt": {7, 0}, + "syscall.BpfJump": {7, 0}, + "syscall.BpfBuflen": {7, 0}, + "syscall.SetBpfBuflen": {7, 0}, + "syscall.BpfDatalink": {7, 0}, + "syscall.SetBpfDatalink": {7, 0}, + "syscall.SetBpfPromisc": {7, 0}, + "syscall.FlushBpf": {7, 0}, + "syscall.BpfInterface": {7, 0}, + "syscall.SetBpfInterface": {7, 0}, + "syscall.BpfTimeout": {7, 0}, + "syscall.SetBpfTimeout": {7, 0}, + "syscall.BpfStats": {7, 0}, + "syscall.SetBpfImmediate": {7, 0}, + "syscall.SetBpf": {7, 0}, + "syscall.CheckBpfVersion": {7, 0}, + "syscall.BpfHeadercmpl": {7, 0}, + "syscall.SetBpfHeadercmpl": {7, 0}, + "syscall.RouteRIB": {8, 0}, + "syscall.RoutingMessage": {8, 0}, + "syscall.RouteMessage": {8, 0}, + "syscall.InterfaceMessage": {8, 0}, + "syscall.InterfaceAddrMessage": {8, 0}, + "syscall.ParseRoutingMessage": {8, 0}, + "syscall.ParseRoutingSockaddr": {8, 0}, + "syscall.InterfaceAnnounceMessage": {7, 0}, + "syscall.InterfaceMulticastAddrMessage": {7, 0}, + "syscall.FormatMessage": {5, 0}, + "syscall.PostQueuedCompletionStatus": {17, 0}, + "syscall.GetQueuedCompletionStatus": {17, 0}, + "syscall.CreateIoCompletionPort": {17, 0}, + + // Not marked as deprecated with a recognizable header, but deprecated nonetheless. + "io/ioutil": {16, 16}, +} + +// Last imported from Go at 32b73ae18026e8a9dc4c5aa49999b1ea445bc68c with the following numbers of deprecations: +// +// chulak go@master ./src $ rg -c "Deprecated: " +// vendor/golang.org/x/crypto/curve25519/curve25519.go:1 +// vendor/golang.org/x/text/transform/transform.go:1 +// cmd/compile/internal/types/sym.go:2 +// syscall/route_netbsd.go:1 +// syscall/route_bsd.go:7 +// syscall/lsf_linux.go:6 +// syscall/exec_unix.go:1 +// syscall/route_darwin.go:1 +// syscall/route_freebsd.go:2 +// syscall/route_dragonfly.go:2 +// syscall/bpf_darwin.go:18 +// syscall/route_openbsd.go:1 +// syscall/syscall_windows.go:6 +// syscall/syscall.go:3 +// syscall/bpf_bsd.go:18 +// cmd/compile/internal/types2/type.go:2 +// syscall/exec_plan9.go:1 +// cmd/internal/obj/textflag.go:1 +// cmd/internal/obj/link.go:5 +// cmd/vendor/golang.org/x/sys/unix/zsysnum_darwin_arm64.go:1 +// cmd/vendor/golang.org/x/mod/semver/semver.go:1 +// cmd/vendor/golang.org/x/sys/windows/syscall_windows.go:2 +// cmd/vendor/golang.org/x/sys/unix/zsysnum_darwin_amd64.go:1 +// cmd/vendor/golang.org/x/mod/modfile/rule.go:2 +// cmd/vendor/golang.org/x/sys/windows/security_windows.go:1 +// cmd/go/internal/modcmd/edit.go:1 +// cmd/go/testdata/mod/example.com_deprecated_a_v1.9.0.txt:2 +// cmd/go/testdata/mod/example.com_undeprecated_v1.0.0.txt:2 +// cmd/go/testdata/mod/example.com_deprecated_b_v1.9.0.txt:2 +// encoding/csv/reader.go:2 +// encoding/json/decode.go:1 +// encoding/json/encode.go:1 +// cmd/go/testdata/script/mod_list_deprecated.txt:2 +// cmd/go/testdata/script/mod_deprecate_message.txt:4 +// cmd/go/testdata/script/mod_list_deprecated_replace.txt:1 +// runtime/cpuprof.go:1 +// cmd/go/testdata/script/mod_edit.txt:1 +// crypto/tls/common.go:6 +// crypto/rc4/rc4.go:1 +// crypto/dsa/dsa.go:1 +// path/filepath/path_unix.go:1 +// path/filepath/path_windows.go:1 +// path/filepath/path_plan9.go:1 +// regexp/regexp.go:1 +// crypto/x509/pem_decrypt.go:3 +// crypto/x509/x509.go:1 +// archive/zip/struct.go:6 +// archive/tar/common.go:2 +// debug/gosym/pclntab.go:2 +// compress/flate/inflate.go:2 +// image/geom.go:2 +// image/jpeg/reader.go:1 +// os/file.go:1 +// net/dial.go:2 +// net/http/server.go:2 +// net/http/socks_bundle.go:1 +// net/http/httputil/persist.go:8 +// net/http/request.go:6 +// net/http/transport.go:3 +// net/http/httptest/recorder.go:1 +// go/doc/doc.go:1 +// go/types/errorcodes.go:1 +// go/types/type.go:2 +// database/sql/driver/driver.go:6 +// text/template/parse/node.go:5 +// go/importer/importer.go:2 diff --git a/vendor/honnef.co/go/tools/pattern/convert.go b/vendor/honnef.co/go/tools/pattern/convert.go new file mode 100644 index 000000000..dfcd1560d --- /dev/null +++ b/vendor/honnef.co/go/tools/pattern/convert.go @@ -0,0 +1,242 @@ +package pattern + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" +) + +var astTypes = map[string]reflect.Type{ + "Ellipsis": reflect.TypeOf(ast.Ellipsis{}), + "RangeStmt": reflect.TypeOf(ast.RangeStmt{}), + "AssignStmt": reflect.TypeOf(ast.AssignStmt{}), + "IndexExpr": reflect.TypeOf(ast.IndexExpr{}), + "Ident": reflect.TypeOf(ast.Ident{}), + "ValueSpec": reflect.TypeOf(ast.ValueSpec{}), + "GenDecl": reflect.TypeOf(ast.GenDecl{}), + "BinaryExpr": reflect.TypeOf(ast.BinaryExpr{}), + "ForStmt": reflect.TypeOf(ast.ForStmt{}), + "ArrayType": reflect.TypeOf(ast.ArrayType{}), + "DeferStmt": reflect.TypeOf(ast.DeferStmt{}), + "MapType": reflect.TypeOf(ast.MapType{}), + "ReturnStmt": reflect.TypeOf(ast.ReturnStmt{}), + "SliceExpr": reflect.TypeOf(ast.SliceExpr{}), + "StarExpr": reflect.TypeOf(ast.StarExpr{}), + "UnaryExpr": reflect.TypeOf(ast.UnaryExpr{}), + "SendStmt": reflect.TypeOf(ast.SendStmt{}), + "SelectStmt": reflect.TypeOf(ast.SelectStmt{}), + "ImportSpec": reflect.TypeOf(ast.ImportSpec{}), + "IfStmt": reflect.TypeOf(ast.IfStmt{}), + "GoStmt": reflect.TypeOf(ast.GoStmt{}), + "Field": reflect.TypeOf(ast.Field{}), + "SelectorExpr": reflect.TypeOf(ast.SelectorExpr{}), + "StructType": reflect.TypeOf(ast.StructType{}), + "KeyValueExpr": reflect.TypeOf(ast.KeyValueExpr{}), + "FuncType": reflect.TypeOf(ast.FuncType{}), + "FuncLit": reflect.TypeOf(ast.FuncLit{}), + "FuncDecl": reflect.TypeOf(ast.FuncDecl{}), + "ChanType": reflect.TypeOf(ast.ChanType{}), + "CallExpr": reflect.TypeOf(ast.CallExpr{}), + "CaseClause": reflect.TypeOf(ast.CaseClause{}), + "CommClause": reflect.TypeOf(ast.CommClause{}), + "CompositeLit": reflect.TypeOf(ast.CompositeLit{}), + "EmptyStmt": reflect.TypeOf(ast.EmptyStmt{}), + "SwitchStmt": reflect.TypeOf(ast.SwitchStmt{}), + "TypeSwitchStmt": reflect.TypeOf(ast.TypeSwitchStmt{}), + "TypeAssertExpr": reflect.TypeOf(ast.TypeAssertExpr{}), + "TypeSpec": reflect.TypeOf(ast.TypeSpec{}), + "InterfaceType": reflect.TypeOf(ast.InterfaceType{}), + "BranchStmt": reflect.TypeOf(ast.BranchStmt{}), + "IncDecStmt": reflect.TypeOf(ast.IncDecStmt{}), + "BasicLit": reflect.TypeOf(ast.BasicLit{}), +} + +func ASTToNode(node interface{}) Node { + switch node := node.(type) { + case *ast.File: + panic("cannot convert *ast.File to Node") + case nil: + return Nil{} + case string: + return String(node) + case token.Token: + return Token(node) + case *ast.ExprStmt: + return ASTToNode(node.X) + case *ast.BlockStmt: + if node == nil { + return Nil{} + } + return ASTToNode(node.List) + case *ast.FieldList: + if node == nil { + return Nil{} + } + return ASTToNode(node.List) + case *ast.BasicLit: + if node == nil { + return Nil{} + } + case *ast.ParenExpr: + return ASTToNode(node.X) + } + + if node, ok := node.(ast.Node); ok { + name := reflect.TypeOf(node).Elem().Name() + T, ok := structNodes[name] + if !ok { + panic(fmt.Sprintf("internal error: unhandled type %T", node)) + } + + if reflect.ValueOf(node).IsNil() { + return Nil{} + } + v := reflect.ValueOf(node).Elem() + objs := make([]Node, T.NumField()) + for i := 0; i < T.NumField(); i++ { + f := v.FieldByName(T.Field(i).Name) + objs[i] = ASTToNode(f.Interface()) + } + + n, err := populateNode(name, objs, false) + if err != nil { + panic(fmt.Sprintf("internal error: %s", err)) + } + return n + } + + s := reflect.ValueOf(node) + if s.Kind() == reflect.Slice { + if s.Len() == 0 { + return List{} + } + if s.Len() == 1 { + return ASTToNode(s.Index(0).Interface()) + } + + tail := List{} + for i := s.Len() - 1; i >= 0; i-- { + head := ASTToNode(s.Index(i).Interface()) + l := List{ + Head: head, + Tail: tail, + } + tail = l + } + return tail + } + + panic(fmt.Sprintf("internal error: unhandled type %T", node)) +} + +func NodeToAST(node Node, state State) interface{} { + switch node := node.(type) { + case Binding: + v, ok := state[node.Name] + if !ok { + // really we want to return an error here + panic("XXX") + } + switch v := v.(type) { + case types.Object: + return &ast.Ident{Name: v.Name()} + default: + return v + } + case Builtin, Any, Object, Function, Not, Or: + panic("XXX") + case List: + if (node == List{}) { + return []ast.Node{} + } + x := []ast.Node{NodeToAST(node.Head, state).(ast.Node)} + x = append(x, NodeToAST(node.Tail, state).([]ast.Node)...) + return x + case Token: + return token.Token(node) + case String: + return string(node) + case Nil: + return nil + } + + name := reflect.TypeOf(node).Name() + T, ok := astTypes[name] + if !ok { + panic(fmt.Sprintf("internal error: unhandled type %T", node)) + } + v := reflect.ValueOf(node) + out := reflect.New(T) + for i := 0; i < T.NumField(); i++ { + fNode := v.FieldByName(T.Field(i).Name) + if (fNode == reflect.Value{}) { + continue + } + fAST := out.Elem().FieldByName(T.Field(i).Name) + switch fAST.Type().Kind() { + case reflect.Slice: + c := reflect.ValueOf(NodeToAST(fNode.Interface().(Node), state)) + if c.Kind() != reflect.Slice { + // it's a single node in the pattern, we have to wrap + // it in a slice + slice := reflect.MakeSlice(fAST.Type(), 1, 1) + slice.Index(0).Set(c) + c = slice + } + switch fAST.Interface().(type) { + case []ast.Node: + switch cc := c.Interface().(type) { + case []ast.Node: + fAST.Set(c) + case []ast.Expr: + var slice []ast.Node + for _, el := range cc { + slice = append(slice, el) + } + fAST.Set(reflect.ValueOf(slice)) + default: + panic("XXX") + } + case []ast.Expr: + switch cc := c.Interface().(type) { + case []ast.Node: + var slice []ast.Expr + for _, el := range cc { + slice = append(slice, el.(ast.Expr)) + } + fAST.Set(reflect.ValueOf(slice)) + case []ast.Expr: + fAST.Set(c) + default: + panic("XXX") + } + default: + panic("XXX") + } + case reflect.Int: + c := reflect.ValueOf(NodeToAST(fNode.Interface().(Node), state)) + switch c.Kind() { + case reflect.String: + tok, ok := tokensByString[c.Interface().(string)] + if !ok { + // really we want to return an error here + panic("XXX") + } + fAST.SetInt(int64(tok)) + case reflect.Int: + fAST.Set(c) + default: + panic(fmt.Sprintf("internal error: unexpected kind %s", c.Kind())) + } + default: + r := NodeToAST(fNode.Interface().(Node), state) + if r != nil { + fAST.Set(reflect.ValueOf(r)) + } + } + } + + return out.Interface().(ast.Node) +} diff --git a/vendor/honnef.co/go/tools/pattern/doc.go b/vendor/honnef.co/go/tools/pattern/doc.go new file mode 100644 index 000000000..974617543 --- /dev/null +++ b/vendor/honnef.co/go/tools/pattern/doc.go @@ -0,0 +1,273 @@ +/* +Package pattern implements a simple language for pattern matching Go ASTs. + +Design decisions and trade-offs + +The language is designed specifically for the task of filtering ASTs +to simplify the implementation of analyses in staticcheck. +It is also intended to be trivial to parse and execute. + +To that end, we make certain decisions that make the language more +suited to its task, while making certain queries infeasible. + +Furthermore, it is fully expected that the majority of analyses will still require ordinary Go code +to further process the filtered AST, to make use of type information and to enforce complex invariants. +It is not our goal to design a scripting language for writing entire checks in. + +The language + +At its core, patterns are a representation of Go ASTs, allowing for the use of placeholders to enable pattern matching. +Their syntax is inspired by LISP and Haskell, but unlike LISP, the core unit of patterns isn't the list, but the node. +There is a fixed set of nodes, identified by name, and with the exception of the Or node, all nodes have a fixed number of arguments. +In addition to nodes, there are atoms, which represent basic units such as strings or the nil value. + +Pattern matching is implemented via bindings, represented by the Binding node. +A Binding can match nodes and associate them with names, to later recall the nodes. +This allows for expressing "this node must be equal to that node" constraints. + +To simplify writing and reading patterns, a small amount of additional syntax exists on top of nodes and atoms. +This additional syntax doesn't add any new features of its own, it simply provides shortcuts to creating nodes and atoms. + +To show an example of a pattern, first consider this snippet of Go code: + + if x := fn(); x != nil { + for _, v := range x { + println(v, x) + } + } + +The corresponding AST expressed as an idiomatic pattern would look as follows: + + (IfStmt + (AssignStmt (Ident "x") ":=" (CallExpr (Ident "fn") [])) + (BinaryExpr (Ident "x") "!=" (Ident "nil")) + (RangeStmt + (Ident "_") (Ident "v") ":=" (Ident "x") + (CallExpr (Ident "println") [(Ident "v") (Ident "x")])) + nil) + +Two things are worth noting about this representation. +First, the [el1 el2 ...] syntax is a short-hand for creating lists. +It is a short-hand for el1:el2:[], which itself is a short-hand for (List el1 (List el2 (List nil nil)). +Second, note the absence of a lot of lists in places that normally accept lists. +For example, assignment assigns a number of right-hands to a number of left-hands, yet our AssignStmt is lacking any form of list. +This is due to the fact that a single node can match a list of exactly one element. +Thus, the two following forms have identical matching behavior: + + (AssignStmt (Ident "x") ":=" (CallExpr (Ident "fn") [])) + (AssignStmt [(Ident "x")] ":=" [(CallExpr (Ident "fn") [])]) + +This section serves as an overview of the language's syntax. +More in-depth explanations of the matching behavior as well as an exhaustive list of node types follows in the coming sections. + +Pattern matching + +TODO write about pattern matching + +- inspired by haskell syntax, but much, much simpler and naive + +Node types + +The language contains two kinds of nodes: those that map to nodes in the AST, and those that implement additional logic. + +Nodes that map directly to AST nodes are named identically to the types in the go/ast package. +What follows is an exhaustive list of these nodes: + + (ArrayType len elt) + (AssignStmt lhs tok rhs) + (BasicLit kind value) + (BinaryExpr x op y) + (BranchStmt tok label) + (CallExpr fun args) + (CaseClause list body) + (ChanType dir value) + (CommClause comm body) + (CompositeLit type elts) + (DeferStmt call) + (Ellipsis elt) + (EmptyStmt) + (Field names type tag) + (ForStmt init cond post body) + (FuncDecl recv name type body) + (FuncLit type body) + (FuncType params results) + (GenDecl specs) + (GoStmt call) + (Ident name) + (IfStmt init cond body else) + (ImportSpec name path) + (IncDecStmt x tok) + (IndexExpr x index) + (InterfaceType methods) + (KeyValueExpr key value) + (MapType key value) + (RangeStmt key value tok x body) + (ReturnStmt results) + (SelectStmt body) + (SelectorExpr x sel) + (SendStmt chan value) + (SliceExpr x low high max) + (StarExpr x) + (StructType fields) + (SwitchStmt init tag body) + (TypeAssertExpr) + (TypeSpec name type) + (TypeSwitchStmt init assign body) + (UnaryExpr op x) + (ValueSpec names type values) + +Additionally, there are the String, Token and nil atoms. +Strings are double-quoted string literals, as in (Ident "someName"). +Tokens are also represented as double-quoted string literals, but are converted to token.Token values in contexts that require tokens, +such as in (BinaryExpr x "<" y), where "<" is transparently converted to token.LSS during matching. +The keyword 'nil' denotes the nil value, which represents the absence of any value. + +We also define the (List head tail) node, which is used to represent sequences of elements as a singly linked list. +The head is a single element, and the tail is the remainder of the list. +For example, + + (List "foo" (List "bar" (List "baz" (List nil nil)))) + +represents a list of three elements, "foo", "bar" and "baz". There is dedicated syntax for writing lists, which looks as follows: + + ["foo" "bar" "baz"] + +This syntax is itself syntactic sugar for the following form: + + "foo":"bar":"baz":[] + +This form is of particular interest for pattern matching, as it allows matching on the head and tail. For example, + + "foo":"bar":_ + +would match any list with at least two elements, where the first two elements are "foo" and "bar". This is equivalent to writing + + (List "foo" (List "bar" _)) + +Note that it is not possible to match from the end of the list. +That is, there is no way to express a query such as "a list of any length where the last element is foo". + +Note that unlike in LISP, nil and empty lists are distinct from one another. +In patterns, with respect to lists, nil is akin to Go's untyped nil. +It will match a nil ast.Node, but it will not match a nil []ast.Expr. Nil will, however, match pointers to named types such as *ast.Ident. +Similarly, lists are akin to Go's +slices. An empty list will match both a nil and an empty []ast.Expr, but it will not match a nil ast.Node. + +Due to the difference between nil and empty lists, an empty list is represented as (List nil nil), i.e. a list with no head or tail. +Similarly, a list of one element is represented as (List el (List nil nil)). Unlike in LISP, it cannot be represented by (List el nil). + +Finally, there are nodes that implement special logic or matching behavior. + +(Any) matches any value. The underscore (_) maps to this node, making the following two forms equivalent: + + (Ident _) + (Ident (Any)) + +(Builtin name) matches a built-in identifier or function by name. +This is a type-aware variant of (Ident name). +Instead of only comparing the name, it resolves the object behind the name and makes sure it's a pre-declared identifier. + +For example, in the following piece of code + + func fn() { + println(true) + true := false + println(true) + } + +the pattern + + (Builtin "true") + +will match exactly once, on the first use of 'true' in the function. +Subsequent occurrences of 'true' no longer refer to the pre-declared identifier. + +(Object name) matches an identifier by name, but yields the +types.Object it refers to. + +(Function name) matches ast.Idents and ast.SelectorExprs that refer to a function with a given fully qualified name. +For example, "net/url.PathEscape" matches the PathEscape function in the net/url package, +and "(net/url.EscapeError).Error" refers to the Error method on the net/url.EscapeError type, +either on an instance of the type, or on the type itself. + +For example, the following patterns match the following lines of code: + + (CallExpr (Function "fmt.Println") _) // pattern 1 + (CallExpr (Function "(net/url.EscapeError).Error") _) // pattern 2 + + fmt.Println("hello, world") // matches pattern 1 + var x url.EscapeError + x.Error() // matches pattern 2 + (url.EscapeError).Error(x) // also matches pattern 2 + +(Binding name node) creates or uses a binding. +Bindings work like variable assignments, allowing referring to already matched nodes. +As an example, bindings are necessary to match self-assignment of the form "x = x", +since we need to express that the right-hand side is identical to the left-hand side. + +If a binding's node is not nil, the matcher will attempt to match a node according to the pattern. +If a binding's node is nil, the binding will either recall an existing value, or match the Any node. +It is an error to provide a non-nil node to a binding that has already been bound. + +Referring back to the earlier example, the following pattern will match self-assignment of idents: + + (AssignStmt (Binding "lhs" (Ident _)) "=" (Binding "lhs" nil)) + +Because bindings are a crucial component of pattern matching, there is special syntax for creating and recalling bindings. +Lower-case names refer to bindings. If standing on its own, the name "foo" will be equivalent to (Binding "foo" nil). +If a name is followed by an at-sign (@) then it will create a binding for the node that follows. +Together, this allows us to rewrite the earlier example as follows: + + (AssignStmt lhs@(Ident _) "=" lhs) + +(Or nodes...) is a variadic node that tries matching each node until one succeeds. For example, the following pattern matches all idents of name "foo" or "bar": + + (Ident (Or "foo" "bar")) + +We could also have written + + (Or (Ident "foo") (Ident "bar")) + +and achieved the same result. We can also mix different kinds of nodes: + + (Or (Ident "foo") (CallExpr (Ident "bar") _)) + +When using bindings inside of nodes used inside Or, all or none of the bindings will be bound. +That is, partially matched nodes that ultimately failed to match will not produce any bindings observable outside of the matching attempt. +We can thus write + + (Or (Ident name) (CallExpr name)) + +and 'name' will either be a String if the first option matched, or an Ident or SelectorExpr if the second option matched. + +(Not node) + +The Not node negates a match. For example, (Not (Ident _)) will match all nodes that aren't identifiers. + +ChanDir(0) + +Automatic unnesting of AST nodes + +The Go AST has several types of nodes that wrap other nodes. +To simplify matching, we automatically unwrap some of these nodes. + +These nodes are ExprStmt (for using expressions in a statement context), +ParenExpr (for parenthesized expressions), +DeclStmt (for declarations in a statement context), +and LabeledStmt (for labeled statements). + +Thus, the query + + (FuncLit _ [(CallExpr _ _)] + +will match a function literal containing a single function call, +even though in the actual Go AST, the CallExpr is nested inside an ExprStmt, +as function bodies are made up of sequences of statements. + +On the flip-side, there is no way to specifically match these wrapper nodes. +For example, there is no way of searching for unnecessary parentheses, like in the following piece of Go code: + + ((x)) += 2 + +*/ +package pattern diff --git a/vendor/honnef.co/go/tools/pattern/fuzz.go b/vendor/honnef.co/go/tools/pattern/fuzz.go new file mode 100644 index 000000000..52e7df974 --- /dev/null +++ b/vendor/honnef.co/go/tools/pattern/fuzz.go @@ -0,0 +1,50 @@ +// +build gofuzz + +package pattern + +import ( + "go/ast" + goparser "go/parser" + "go/token" + "os" + "path/filepath" + "strings" +) + +var files []*ast.File + +func init() { + fset := token.NewFileSet() + filepath.Walk("/usr/lib/go/src", func(path string, info os.FileInfo, err error) error { + if err != nil { + // XXX error handling + panic(err) + } + if !strings.HasSuffix(path, ".go") { + return nil + } + f, err := goparser.ParseFile(fset, path, nil, 0) + if err != nil { + return nil + } + files = append(files, f) + return nil + }) +} + +func Fuzz(data []byte) int { + p := &Parser{} + pat, err := p.Parse(string(data)) + if err != nil { + if strings.Contains(err.Error(), "internal error") { + panic(err) + } + return 0 + } + _ = pat.Root.String() + + for _, f := range files { + Match(pat.Root, f) + } + return 1 +} diff --git a/vendor/honnef.co/go/tools/pattern/lexer.go b/vendor/honnef.co/go/tools/pattern/lexer.go new file mode 100644 index 000000000..fb72e392b --- /dev/null +++ b/vendor/honnef.co/go/tools/pattern/lexer.go @@ -0,0 +1,221 @@ +package pattern + +import ( + "fmt" + "go/token" + "unicode" + "unicode/utf8" +) + +type lexer struct { + f *token.File + + input string + start int + pos int + width int + items chan item +} + +type itemType int + +const eof = -1 + +const ( + itemError itemType = iota + itemLeftParen + itemRightParen + itemLeftBracket + itemRightBracket + itemTypeName + itemVariable + itemAt + itemColon + itemBlank + itemString + itemEOF +) + +func (typ itemType) String() string { + switch typ { + case itemError: + return "ERROR" + case itemLeftParen: + return "(" + case itemRightParen: + return ")" + case itemLeftBracket: + return "[" + case itemRightBracket: + return "]" + case itemTypeName: + return "TYPE" + case itemVariable: + return "VAR" + case itemAt: + return "@" + case itemColon: + return ":" + case itemBlank: + return "_" + case itemString: + return "STRING" + case itemEOF: + return "EOF" + default: + return fmt.Sprintf("itemType(%d)", typ) + } +} + +type item struct { + typ itemType + val string + pos int +} + +type stateFn func(*lexer) stateFn + +func (l *lexer) run() { + for state := lexStart; state != nil; { + state = state(l) + } + close(l.items) +} + +func (l *lexer) emitValue(t itemType, value string) { + l.items <- item{t, value, l.start} + l.start = l.pos +} + +func (l *lexer) emit(t itemType) { + l.items <- item{t, l.input[l.start:l.pos], l.start} + l.start = l.pos +} + +func lexStart(l *lexer) stateFn { + switch r := l.next(); { + case r == eof: + l.emit(itemEOF) + return nil + case unicode.IsSpace(r): + l.ignore() + case r == '(': + l.emit(itemLeftParen) + case r == ')': + l.emit(itemRightParen) + case r == '[': + l.emit(itemLeftBracket) + case r == ']': + l.emit(itemRightBracket) + case r == '@': + l.emit(itemAt) + case r == ':': + l.emit(itemColon) + case r == '_': + l.emit(itemBlank) + case r == '"': + l.backup() + return lexString + case unicode.IsUpper(r): + l.backup() + return lexType + case unicode.IsLower(r): + l.backup() + return lexVariable + default: + return l.errorf("unexpected character %c", r) + } + return lexStart +} + +func (l *lexer) next() (r rune) { + if l.pos >= len(l.input) { + l.width = 0 + return eof + } + r, l.width = utf8.DecodeRuneInString(l.input[l.pos:]) + + if r == '\n' { + l.f.AddLine(l.pos) + } + + l.pos += l.width + + return r +} + +func (l *lexer) ignore() { + l.start = l.pos +} + +func (l *lexer) backup() { + l.pos -= l.width +} + +func (l *lexer) errorf(format string, args ...interface{}) stateFn { + // TODO(dh): emit position information in errors + l.items <- item{ + itemError, + fmt.Sprintf(format, args...), + l.start, + } + return nil +} + +func isAlphaNumeric(r rune) bool { + return r >= '0' && r <= '9' || + r >= 'a' && r <= 'z' || + r >= 'A' && r <= 'Z' +} + +func lexString(l *lexer) stateFn { + l.next() // skip quote + escape := false + + var runes []rune + for { + switch r := l.next(); r { + case eof: + return l.errorf("unterminated string") + case '"': + if !escape { + l.emitValue(itemString, string(runes)) + return lexStart + } else { + runes = append(runes, '"') + escape = false + } + case '\\': + if escape { + runes = append(runes, '\\') + escape = false + } else { + escape = true + } + default: + runes = append(runes, r) + } + } +} + +func lexType(l *lexer) stateFn { + l.next() + for { + if !isAlphaNumeric(l.next()) { + l.backup() + l.emit(itemTypeName) + return lexStart + } + } +} + +func lexVariable(l *lexer) stateFn { + l.next() + for { + if !isAlphaNumeric(l.next()) { + l.backup() + l.emit(itemVariable) + return lexStart + } + } +} diff --git a/vendor/honnef.co/go/tools/pattern/match.go b/vendor/honnef.co/go/tools/pattern/match.go new file mode 100644 index 000000000..ebbbdd469 --- /dev/null +++ b/vendor/honnef.co/go/tools/pattern/match.go @@ -0,0 +1,547 @@ +package pattern + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" +) + +var tokensByString = map[string]Token{ + "INT": Token(token.INT), + "FLOAT": Token(token.FLOAT), + "IMAG": Token(token.IMAG), + "CHAR": Token(token.CHAR), + "STRING": Token(token.STRING), + "+": Token(token.ADD), + "-": Token(token.SUB), + "*": Token(token.MUL), + "/": Token(token.QUO), + "%": Token(token.REM), + "&": Token(token.AND), + "|": Token(token.OR), + "^": Token(token.XOR), + "<<": Token(token.SHL), + ">>": Token(token.SHR), + "&^": Token(token.AND_NOT), + "+=": Token(token.ADD_ASSIGN), + "-=": Token(token.SUB_ASSIGN), + "*=": Token(token.MUL_ASSIGN), + "/=": Token(token.QUO_ASSIGN), + "%=": Token(token.REM_ASSIGN), + "&=": Token(token.AND_ASSIGN), + "|=": Token(token.OR_ASSIGN), + "^=": Token(token.XOR_ASSIGN), + "<<=": Token(token.SHL_ASSIGN), + ">>=": Token(token.SHR_ASSIGN), + "&^=": Token(token.AND_NOT_ASSIGN), + "&&": Token(token.LAND), + "||": Token(token.LOR), + "<-": Token(token.ARROW), + "++": Token(token.INC), + "--": Token(token.DEC), + "==": Token(token.EQL), + "<": Token(token.LSS), + ">": Token(token.GTR), + "=": Token(token.ASSIGN), + "!": Token(token.NOT), + "!=": Token(token.NEQ), + "<=": Token(token.LEQ), + ">=": Token(token.GEQ), + ":=": Token(token.DEFINE), + "...": Token(token.ELLIPSIS), + "IMPORT": Token(token.IMPORT), + "VAR": Token(token.VAR), + "TYPE": Token(token.TYPE), + "CONST": Token(token.CONST), + "BREAK": Token(token.BREAK), + "CONTINUE": Token(token.CONTINUE), + "GOTO": Token(token.GOTO), + "FALLTHROUGH": Token(token.FALLTHROUGH), +} + +func maybeToken(node Node) (Node, bool) { + if node, ok := node.(String); ok { + if tok, ok := tokensByString[string(node)]; ok { + return tok, true + } + return node, false + } + return node, false +} + +func isNil(v interface{}) bool { + if v == nil { + return true + } + if _, ok := v.(Nil); ok { + return true + } + return false +} + +type matcher interface { + Match(*Matcher, interface{}) (interface{}, bool) +} + +type State = map[string]interface{} + +type Matcher struct { + TypesInfo *types.Info + State State +} + +func (m *Matcher) fork() *Matcher { + state := make(State, len(m.State)) + for k, v := range m.State { + state[k] = v + } + return &Matcher{ + TypesInfo: m.TypesInfo, + State: state, + } +} + +func (m *Matcher) merge(mc *Matcher) { + m.State = mc.State +} + +func (m *Matcher) Match(a Node, b ast.Node) bool { + m.State = State{} + _, ok := match(m, a, b) + return ok +} + +func Match(a Node, b ast.Node) (*Matcher, bool) { + m := &Matcher{} + ret := m.Match(a, b) + return m, ret +} + +// Match two items, which may be (Node, AST) or (AST, AST) +func match(m *Matcher, l, r interface{}) (interface{}, bool) { + if _, ok := r.(Node); ok { + panic("Node mustn't be on right side of match") + } + + switch l := l.(type) { + case *ast.ParenExpr: + return match(m, l.X, r) + case *ast.ExprStmt: + return match(m, l.X, r) + case *ast.DeclStmt: + return match(m, l.Decl, r) + case *ast.LabeledStmt: + return match(m, l.Stmt, r) + case *ast.BlockStmt: + return match(m, l.List, r) + case *ast.FieldList: + return match(m, l.List, r) + } + + switch r := r.(type) { + case *ast.ParenExpr: + return match(m, l, r.X) + case *ast.ExprStmt: + return match(m, l, r.X) + case *ast.DeclStmt: + return match(m, l, r.Decl) + case *ast.LabeledStmt: + return match(m, l, r.Stmt) + case *ast.BlockStmt: + if r == nil { + return match(m, l, nil) + } + return match(m, l, r.List) + case *ast.FieldList: + if r == nil { + return match(m, l, nil) + } + return match(m, l, r.List) + case *ast.BasicLit: + if r == nil { + return match(m, l, nil) + } + } + + if l, ok := l.(matcher); ok { + return l.Match(m, r) + } + + if l, ok := l.(Node); ok { + // Matching of pattern with concrete value + return matchNodeAST(m, l, r) + } + + if l == nil || r == nil { + return nil, l == r + } + + { + ln, ok1 := l.(ast.Node) + rn, ok2 := r.(ast.Node) + if ok1 && ok2 { + return matchAST(m, ln, rn) + } + } + + { + obj, ok := l.(types.Object) + if ok { + switch r := r.(type) { + case *ast.Ident: + return obj, obj == m.TypesInfo.ObjectOf(r) + case *ast.SelectorExpr: + return obj, obj == m.TypesInfo.ObjectOf(r.Sel) + default: + return obj, false + } + } + } + + { + ln, ok1 := l.([]ast.Expr) + rn, ok2 := r.([]ast.Expr) + if ok1 || ok2 { + if ok1 && !ok2 { + rn = []ast.Expr{r.(ast.Expr)} + } else if !ok1 && ok2 { + ln = []ast.Expr{l.(ast.Expr)} + } + + if len(ln) != len(rn) { + return nil, false + } + for i, ll := range ln { + if _, ok := match(m, ll, rn[i]); !ok { + return nil, false + } + } + return r, true + } + } + + { + ln, ok1 := l.([]ast.Stmt) + rn, ok2 := r.([]ast.Stmt) + if ok1 || ok2 { + if ok1 && !ok2 { + rn = []ast.Stmt{r.(ast.Stmt)} + } else if !ok1 && ok2 { + ln = []ast.Stmt{l.(ast.Stmt)} + } + + if len(ln) != len(rn) { + return nil, false + } + for i, ll := range ln { + if _, ok := match(m, ll, rn[i]); !ok { + return nil, false + } + } + return r, true + } + } + + { + ln, ok1 := l.([]*ast.Field) + rn, ok2 := r.([]*ast.Field) + if ok1 || ok2 { + if ok1 && !ok2 { + rn = []*ast.Field{r.(*ast.Field)} + } else if !ok1 && ok2 { + ln = []*ast.Field{l.(*ast.Field)} + } + + if len(ln) != len(rn) { + return nil, false + } + for i, ll := range ln { + if _, ok := match(m, ll, rn[i]); !ok { + return nil, false + } + } + return r, true + } + } + + panic(fmt.Sprintf("unsupported comparison: %T and %T", l, r)) +} + +// Match a Node with an AST node +func matchNodeAST(m *Matcher, a Node, b interface{}) (interface{}, bool) { + switch b := b.(type) { + case []ast.Stmt: + // 'a' is not a List or we'd be using its Match + // implementation. + + if len(b) != 1 { + return nil, false + } + return match(m, a, b[0]) + case []ast.Expr: + // 'a' is not a List or we'd be using its Match + // implementation. + + if len(b) != 1 { + return nil, false + } + return match(m, a, b[0]) + case ast.Node: + ra := reflect.ValueOf(a) + rb := reflect.ValueOf(b).Elem() + + if ra.Type().Name() != rb.Type().Name() { + return nil, false + } + + for i := 0; i < ra.NumField(); i++ { + af := ra.Field(i) + fieldName := ra.Type().Field(i).Name + bf := rb.FieldByName(fieldName) + if (bf == reflect.Value{}) { + panic(fmt.Sprintf("internal error: could not find field %s in type %t when comparing with %T", fieldName, b, a)) + } + ai := af.Interface() + bi := bf.Interface() + if ai == nil { + return b, bi == nil + } + if _, ok := match(m, ai.(Node), bi); !ok { + return b, false + } + } + return b, true + case nil: + return nil, a == Nil{} + default: + panic(fmt.Sprintf("unhandled type %T", b)) + } +} + +// Match two AST nodes +func matchAST(m *Matcher, a, b ast.Node) (interface{}, bool) { + ra := reflect.ValueOf(a) + rb := reflect.ValueOf(b) + + if ra.Type() != rb.Type() { + return nil, false + } + if ra.IsNil() || rb.IsNil() { + return rb, ra.IsNil() == rb.IsNil() + } + + ra = ra.Elem() + rb = rb.Elem() + for i := 0; i < ra.NumField(); i++ { + af := ra.Field(i) + bf := rb.Field(i) + if af.Type() == rtTokPos || af.Type() == rtObject || af.Type() == rtCommentGroup { + continue + } + + switch af.Kind() { + case reflect.Slice: + if af.Len() != bf.Len() { + return nil, false + } + for j := 0; j < af.Len(); j++ { + if _, ok := match(m, af.Index(j).Interface().(ast.Node), bf.Index(j).Interface().(ast.Node)); !ok { + return nil, false + } + } + case reflect.String: + if af.String() != bf.String() { + return nil, false + } + case reflect.Int: + if af.Int() != bf.Int() { + return nil, false + } + case reflect.Bool: + if af.Bool() != bf.Bool() { + return nil, false + } + case reflect.Ptr, reflect.Interface: + if _, ok := match(m, af.Interface(), bf.Interface()); !ok { + return nil, false + } + default: + panic(fmt.Sprintf("internal error: unhandled kind %s (%T)", af.Kind(), af.Interface())) + } + } + return b, true +} + +func (b Binding) Match(m *Matcher, node interface{}) (interface{}, bool) { + if isNil(b.Node) { + v, ok := m.State[b.Name] + if ok { + // Recall value + return match(m, v, node) + } + // Matching anything + b.Node = Any{} + } + + // Store value + if _, ok := m.State[b.Name]; ok { + panic(fmt.Sprintf("binding already created: %s", b.Name)) + } + new, ret := match(m, b.Node, node) + if ret { + m.State[b.Name] = new + } + return new, ret +} + +func (Any) Match(m *Matcher, node interface{}) (interface{}, bool) { + return node, true +} + +func (l List) Match(m *Matcher, node interface{}) (interface{}, bool) { + v := reflect.ValueOf(node) + if v.Kind() == reflect.Slice { + if isNil(l.Head) { + return node, v.Len() == 0 + } + if v.Len() == 0 { + return nil, false + } + // OPT(dh): don't check the entire tail if head didn't match + _, ok1 := match(m, l.Head, v.Index(0).Interface()) + _, ok2 := match(m, l.Tail, v.Slice(1, v.Len()).Interface()) + return node, ok1 && ok2 + } + // Our empty list does not equal an untyped Go nil. This way, we can + // tell apart an if with no else and an if with an empty else. + return nil, false +} + +func (s String) Match(m *Matcher, node interface{}) (interface{}, bool) { + switch o := node.(type) { + case token.Token: + if tok, ok := maybeToken(s); ok { + return match(m, tok, node) + } + return nil, false + case string: + return o, string(s) == o + default: + return nil, false + } +} + +func (tok Token) Match(m *Matcher, node interface{}) (interface{}, bool) { + o, ok := node.(token.Token) + if !ok { + return nil, false + } + return o, token.Token(tok) == o +} + +func (Nil) Match(m *Matcher, node interface{}) (interface{}, bool) { + return nil, isNil(node) || reflect.ValueOf(node).IsNil() +} + +func (builtin Builtin) Match(m *Matcher, node interface{}) (interface{}, bool) { + r, ok := match(m, Ident(builtin), node) + if !ok { + return nil, false + } + ident := r.(*ast.Ident) + obj := m.TypesInfo.ObjectOf(ident) + if obj != types.Universe.Lookup(ident.Name) { + return nil, false + } + return ident, true +} + +func (obj Object) Match(m *Matcher, node interface{}) (interface{}, bool) { + r, ok := match(m, Ident(obj), node) + if !ok { + return nil, false + } + ident := r.(*ast.Ident) + + id := m.TypesInfo.ObjectOf(ident) + _, ok = match(m, obj.Name, ident.Name) + return id, ok +} + +func (fn Function) Match(m *Matcher, node interface{}) (interface{}, bool) { + var name string + var obj types.Object + + r, ok := match(m, Or{Nodes: []Node{Ident{Any{}}, SelectorExpr{Any{}, Any{}}}}, node) + if !ok { + return nil, false + } + + switch r := r.(type) { + case *ast.Ident: + obj = m.TypesInfo.ObjectOf(r) + switch obj := obj.(type) { + case *types.Func: + // OPT(dh): optimize this similar to code.FuncName + name = obj.FullName() + case *types.Builtin: + name = obj.Name() + default: + return nil, false + } + case *ast.SelectorExpr: + var ok bool + obj, ok = m.TypesInfo.ObjectOf(r.Sel).(*types.Func) + if !ok { + return nil, false + } + // OPT(dh): optimize this similar to code.FuncName + name = obj.(*types.Func).FullName() + default: + panic("unreachable") + } + _, ok = match(m, fn.Name, name) + return obj, ok +} + +func (or Or) Match(m *Matcher, node interface{}) (interface{}, bool) { + for _, opt := range or.Nodes { + mc := m.fork() + if ret, ok := match(mc, opt, node); ok { + m.merge(mc) + return ret, true + } + } + return nil, false +} + +func (not Not) Match(m *Matcher, node interface{}) (interface{}, bool) { + _, ok := match(m, not.Node, node) + if ok { + return nil, false + } + return node, true +} + +var ( + // Types of fields in go/ast structs that we want to skip + rtTokPos = reflect.TypeOf(token.Pos(0)) + rtObject = reflect.TypeOf((*ast.Object)(nil)) + rtCommentGroup = reflect.TypeOf((*ast.CommentGroup)(nil)) +) + +var ( + _ matcher = Binding{} + _ matcher = Any{} + _ matcher = List{} + _ matcher = String("") + _ matcher = Token(0) + _ matcher = Nil{} + _ matcher = Builtin{} + _ matcher = Object{} + _ matcher = Function{} + _ matcher = Or{} + _ matcher = Not{} +) diff --git a/vendor/honnef.co/go/tools/pattern/parser.go b/vendor/honnef.co/go/tools/pattern/parser.go new file mode 100644 index 000000000..68cc54b2e --- /dev/null +++ b/vendor/honnef.co/go/tools/pattern/parser.go @@ -0,0 +1,463 @@ +package pattern + +import ( + "fmt" + "go/ast" + "go/token" + "reflect" +) + +type Pattern struct { + Root Node + // Relevant contains instances of ast.Node that could potentially + // initiate a successful match of the pattern. + Relevant []reflect.Type +} + +func MustParse(s string) Pattern { + p := &Parser{AllowTypeInfo: true} + pat, err := p.Parse(s) + if err != nil { + panic(err) + } + return pat +} + +func roots(node Node) []reflect.Type { + switch node := node.(type) { + case Or: + var out []reflect.Type + for _, el := range node.Nodes { + out = append(out, roots(el)...) + } + return out + case Not: + return roots(node.Node) + case Binding: + return roots(node.Node) + case Nil, nil: + // this branch is reached via bindings + return allTypes + default: + Ts, ok := nodeToASTTypes[reflect.TypeOf(node)] + if !ok { + panic(fmt.Sprintf("internal error: unhandled type %T", node)) + } + return Ts + } +} + +var allTypes = []reflect.Type{ + reflect.TypeOf((*ast.RangeStmt)(nil)), + reflect.TypeOf((*ast.AssignStmt)(nil)), + reflect.TypeOf((*ast.IndexExpr)(nil)), + reflect.TypeOf((*ast.Ident)(nil)), + reflect.TypeOf((*ast.ValueSpec)(nil)), + reflect.TypeOf((*ast.GenDecl)(nil)), + reflect.TypeOf((*ast.BinaryExpr)(nil)), + reflect.TypeOf((*ast.ForStmt)(nil)), + reflect.TypeOf((*ast.ArrayType)(nil)), + reflect.TypeOf((*ast.DeferStmt)(nil)), + reflect.TypeOf((*ast.MapType)(nil)), + reflect.TypeOf((*ast.ReturnStmt)(nil)), + reflect.TypeOf((*ast.SliceExpr)(nil)), + reflect.TypeOf((*ast.StarExpr)(nil)), + reflect.TypeOf((*ast.UnaryExpr)(nil)), + reflect.TypeOf((*ast.SendStmt)(nil)), + reflect.TypeOf((*ast.SelectStmt)(nil)), + reflect.TypeOf((*ast.ImportSpec)(nil)), + reflect.TypeOf((*ast.IfStmt)(nil)), + reflect.TypeOf((*ast.GoStmt)(nil)), + reflect.TypeOf((*ast.Field)(nil)), + reflect.TypeOf((*ast.SelectorExpr)(nil)), + reflect.TypeOf((*ast.StructType)(nil)), + reflect.TypeOf((*ast.KeyValueExpr)(nil)), + reflect.TypeOf((*ast.FuncType)(nil)), + reflect.TypeOf((*ast.FuncLit)(nil)), + reflect.TypeOf((*ast.FuncDecl)(nil)), + reflect.TypeOf((*ast.ChanType)(nil)), + reflect.TypeOf((*ast.CallExpr)(nil)), + reflect.TypeOf((*ast.CaseClause)(nil)), + reflect.TypeOf((*ast.CommClause)(nil)), + reflect.TypeOf((*ast.CompositeLit)(nil)), + reflect.TypeOf((*ast.EmptyStmt)(nil)), + reflect.TypeOf((*ast.SwitchStmt)(nil)), + reflect.TypeOf((*ast.TypeSwitchStmt)(nil)), + reflect.TypeOf((*ast.TypeAssertExpr)(nil)), + reflect.TypeOf((*ast.TypeSpec)(nil)), + reflect.TypeOf((*ast.InterfaceType)(nil)), + reflect.TypeOf((*ast.BranchStmt)(nil)), + reflect.TypeOf((*ast.IncDecStmt)(nil)), + reflect.TypeOf((*ast.BasicLit)(nil)), +} + +var nodeToASTTypes = map[reflect.Type][]reflect.Type{ + reflect.TypeOf(String("")): nil, + reflect.TypeOf(Token(0)): nil, + reflect.TypeOf(List{}): {reflect.TypeOf((*ast.BlockStmt)(nil)), reflect.TypeOf((*ast.FieldList)(nil))}, + reflect.TypeOf(Builtin{}): {reflect.TypeOf((*ast.Ident)(nil))}, + reflect.TypeOf(Object{}): {reflect.TypeOf((*ast.Ident)(nil))}, + reflect.TypeOf(Function{}): {reflect.TypeOf((*ast.Ident)(nil)), reflect.TypeOf((*ast.SelectorExpr)(nil))}, + reflect.TypeOf(Any{}): allTypes, + reflect.TypeOf(RangeStmt{}): {reflect.TypeOf((*ast.RangeStmt)(nil))}, + reflect.TypeOf(AssignStmt{}): {reflect.TypeOf((*ast.AssignStmt)(nil))}, + reflect.TypeOf(IndexExpr{}): {reflect.TypeOf((*ast.IndexExpr)(nil))}, + reflect.TypeOf(Ident{}): {reflect.TypeOf((*ast.Ident)(nil))}, + reflect.TypeOf(ValueSpec{}): {reflect.TypeOf((*ast.ValueSpec)(nil))}, + reflect.TypeOf(GenDecl{}): {reflect.TypeOf((*ast.GenDecl)(nil))}, + reflect.TypeOf(BinaryExpr{}): {reflect.TypeOf((*ast.BinaryExpr)(nil))}, + reflect.TypeOf(ForStmt{}): {reflect.TypeOf((*ast.ForStmt)(nil))}, + reflect.TypeOf(ArrayType{}): {reflect.TypeOf((*ast.ArrayType)(nil))}, + reflect.TypeOf(DeferStmt{}): {reflect.TypeOf((*ast.DeferStmt)(nil))}, + reflect.TypeOf(MapType{}): {reflect.TypeOf((*ast.MapType)(nil))}, + reflect.TypeOf(ReturnStmt{}): {reflect.TypeOf((*ast.ReturnStmt)(nil))}, + reflect.TypeOf(SliceExpr{}): {reflect.TypeOf((*ast.SliceExpr)(nil))}, + reflect.TypeOf(StarExpr{}): {reflect.TypeOf((*ast.StarExpr)(nil))}, + reflect.TypeOf(UnaryExpr{}): {reflect.TypeOf((*ast.UnaryExpr)(nil))}, + reflect.TypeOf(SendStmt{}): {reflect.TypeOf((*ast.SendStmt)(nil))}, + reflect.TypeOf(SelectStmt{}): {reflect.TypeOf((*ast.SelectStmt)(nil))}, + reflect.TypeOf(ImportSpec{}): {reflect.TypeOf((*ast.ImportSpec)(nil))}, + reflect.TypeOf(IfStmt{}): {reflect.TypeOf((*ast.IfStmt)(nil))}, + reflect.TypeOf(GoStmt{}): {reflect.TypeOf((*ast.GoStmt)(nil))}, + reflect.TypeOf(Field{}): {reflect.TypeOf((*ast.Field)(nil))}, + reflect.TypeOf(SelectorExpr{}): {reflect.TypeOf((*ast.SelectorExpr)(nil))}, + reflect.TypeOf(StructType{}): {reflect.TypeOf((*ast.StructType)(nil))}, + reflect.TypeOf(KeyValueExpr{}): {reflect.TypeOf((*ast.KeyValueExpr)(nil))}, + reflect.TypeOf(FuncType{}): {reflect.TypeOf((*ast.FuncType)(nil))}, + reflect.TypeOf(FuncLit{}): {reflect.TypeOf((*ast.FuncLit)(nil))}, + reflect.TypeOf(FuncDecl{}): {reflect.TypeOf((*ast.FuncDecl)(nil))}, + reflect.TypeOf(ChanType{}): {reflect.TypeOf((*ast.ChanType)(nil))}, + reflect.TypeOf(CallExpr{}): {reflect.TypeOf((*ast.CallExpr)(nil))}, + reflect.TypeOf(CaseClause{}): {reflect.TypeOf((*ast.CaseClause)(nil))}, + reflect.TypeOf(CommClause{}): {reflect.TypeOf((*ast.CommClause)(nil))}, + reflect.TypeOf(CompositeLit{}): {reflect.TypeOf((*ast.CompositeLit)(nil))}, + reflect.TypeOf(EmptyStmt{}): {reflect.TypeOf((*ast.EmptyStmt)(nil))}, + reflect.TypeOf(SwitchStmt{}): {reflect.TypeOf((*ast.SwitchStmt)(nil))}, + reflect.TypeOf(TypeSwitchStmt{}): {reflect.TypeOf((*ast.TypeSwitchStmt)(nil))}, + reflect.TypeOf(TypeAssertExpr{}): {reflect.TypeOf((*ast.TypeAssertExpr)(nil))}, + reflect.TypeOf(TypeSpec{}): {reflect.TypeOf((*ast.TypeSpec)(nil))}, + reflect.TypeOf(InterfaceType{}): {reflect.TypeOf((*ast.InterfaceType)(nil))}, + reflect.TypeOf(BranchStmt{}): {reflect.TypeOf((*ast.BranchStmt)(nil))}, + reflect.TypeOf(IncDecStmt{}): {reflect.TypeOf((*ast.IncDecStmt)(nil))}, + reflect.TypeOf(BasicLit{}): {reflect.TypeOf((*ast.BasicLit)(nil))}, +} + +var requiresTypeInfo = map[string]bool{ + "Function": true, + "Builtin": true, + "Object": true, +} + +type Parser struct { + // Allow nodes that rely on type information + AllowTypeInfo bool + + lex *lexer + cur item + last *item + items chan item +} + +func (p *Parser) Parse(s string) (Pattern, error) { + p.cur = item{} + p.last = nil + p.items = nil + + fset := token.NewFileSet() + p.lex = &lexer{ + f: fset.AddFile("